From d8406e4752abb58d7c34c86c501473c313aea2bc Mon Sep 17 00:00:00 2001 From: Mary Gouseti Date: Fri, 11 Oct 2024 16:39:50 +0300 Subject: [PATCH 001/449] [Failure store - selector syntax] Refactor IndicesOptions builder (#114597) **Introduction** > In order to make adoption of failure stores simpler for all users, we are introducing a new syntactical feature to index expression resolution: The selector. > > Selectors, denoted with a :: followed by a recognized suffix will allow users to specify which component of an index abstraction they would like to operate on within an API call. In this case, an index abstraction is a concrete index, data stream, or alias; Any abstraction that can be resolved to a set of indices/shards. We define a component of an index abstraction to be some searchable unit of the index abstraction. > > To start, we will support two components: data and failures. Concrete indices are their own data components, while the data component for index aliases are all of the indices contained therein. For data streams, the data component corresponds to their backing indices. Data stream aliases mirror this, treating all backing indices of the data streams they correspond to as their data component. > > The failure component is only supported by data streams and data stream aliases. The failure component of these abstractions refer to the data streams' failure stores. Indices and index aliases do not have a failure component. For more details and examples see https://github.com/elastic/elasticsearch/pull/113144. All this work has been cherry picked from there. **Purpose of this PR** This PR is replacing the the indices options boolean constructor with the builders. The goal is to give me and the reviewer a very narrow scope change when we can ensure we did not make any mistakes during the conversion. Also it will reduce a bit the change list in https://github.com/elastic/elasticsearch/pull/113144/files. --- .../DeleteDataStreamLifecycleAction.java | 21 +++++++++++++- .../indices/alias/IndicesAliasesRequest.java | 21 +++++++++++++- .../indices/delete/DeleteIndexRequest.java | 29 ++++++++++++------- .../datastreams/DataStreamsStatsAction.java | 27 ++++++++++++++++- .../datastreams/DeleteDataStreamAction.java | 20 ++++++++++++- .../datastreams/GetDataStreamAction.java | 21 +++++++++++++- .../GetDataStreamLifecycleAction.java | 21 +++++++++++++- .../PutDataStreamLifecycleAction.java | 21 +++++++++++++- 8 files changed, 164 insertions(+), 17 deletions(-) diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/DeleteDataStreamLifecycleAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/DeleteDataStreamLifecycleAction.java index 1c4659efc2f8b..1595348649528 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/DeleteDataStreamLifecycleAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/DeleteDataStreamLifecycleAction.java @@ -34,7 +34,26 @@ private DeleteDataStreamLifecycleAction() {/* no instances */} public static final class Request extends AcknowledgedRequest implements IndicesRequest.Replaceable { private String[] names; - private IndicesOptions indicesOptions = IndicesOptions.fromOptions(false, true, true, true, false, false, true, false); + private IndicesOptions indicesOptions = IndicesOptions.builder() + .concreteTargetOptions(IndicesOptions.ConcreteTargetOptions.ERROR_WHEN_UNAVAILABLE_TARGETS) + .wildcardOptions( + IndicesOptions.WildcardOptions.builder() + .matchOpen(true) + .matchClosed(true) + .includeHidden(false) + .resolveAliases(false) + .allowEmptyExpressions(true) + .build() + ) + .gatekeeperOptions( + IndicesOptions.GatekeeperOptions.builder() + .allowAliasToMultipleIndices(false) + .allowClosedIndices(true) + .ignoreThrottled(false) + .allowFailureIndices(false) + .build() + ) + .build(); public Request(StreamInput in) throws IOException { super(in); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java index cf06dd34fd5ca..d66cab1d2d717 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java @@ -58,7 +58,26 @@ public class IndicesAliasesRequest extends AcknowledgedRequest implements IndicesRequest.Replaceable { - public static final IndicesOptions DEFAULT_INDICES_OPTIONS = IndicesOptions.fromOptions( - false, - true, - true, - true, - false, - false, - true, - false - ); + public static final IndicesOptions DEFAULT_INDICES_OPTIONS = IndicesOptions.builder() + .concreteTargetOptions(IndicesOptions.ConcreteTargetOptions.ERROR_WHEN_UNAVAILABLE_TARGETS) + .wildcardOptions( + IndicesOptions.WildcardOptions.builder() + .matchOpen(true) + .matchClosed(true) + .allowEmptyExpressions(true) + .resolveAliases(false) + .build() + ) + .gatekeeperOptions( + IndicesOptions.GatekeeperOptions.builder() + .allowAliasToMultipleIndices(false) + .allowClosedIndices(true) + .ignoreThrottled(false) + .allowFailureIndices(true) + .build() + ) + .build(); private String[] indices; // Delete index should work by default on both open and closed indices. diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/DataStreamsStatsAction.java b/server/src/main/java/org/elasticsearch/action/datastreams/DataStreamsStatsAction.java index 2bd4d223bc4ae..fbb084e8cd121 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/DataStreamsStatsAction.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/DataStreamsStatsAction.java @@ -40,7 +40,32 @@ public static class Request extends BroadcastRequest { public Request() { // this doesn't really matter since data stream name resolution isn't affected by IndicesOptions and // a data stream's backing indices are retrieved from its metadata - super(null, IndicesOptions.fromOptions(false, true, true, true, true, false, true, false)); + super( + null, + IndicesOptions.builder() + .concreteTargetOptions(IndicesOptions.ConcreteTargetOptions.ERROR_WHEN_UNAVAILABLE_TARGETS) + .wildcardOptions( + IndicesOptions.WildcardOptions.builder() + .matchOpen(true) + .matchClosed(true) + .includeHidden(false) + .resolveAliases(false) + .allowEmptyExpressions(true) + .build() + ) + .gatekeeperOptions( + IndicesOptions.GatekeeperOptions.builder() + .allowAliasToMultipleIndices(true) + .allowClosedIndices(true) + .ignoreThrottled(false) + .allowFailureIndices(true) + .build() + ) + .failureStoreOptions( + IndicesOptions.FailureStoreOptions.builder().includeRegularIndices(true).includeFailureIndices(true).build() + ) + .build() + ); } public Request(StreamInput in) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/DeleteDataStreamAction.java b/server/src/main/java/org/elasticsearch/action/datastreams/DeleteDataStreamAction.java index 4f3e238796ed6..4f647d4f02884 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/DeleteDataStreamAction.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/DeleteDataStreamAction.java @@ -46,7 +46,25 @@ public static class Request extends MasterNodeRequest implements Indice // empty response can be returned in case wildcards were used or // 404 status code returned in case no wildcard were used. private final boolean wildcardExpressionsOriginallySpecified; - private IndicesOptions indicesOptions = IndicesOptions.fromOptions(false, true, true, true, false, false, true, false); + private IndicesOptions indicesOptions = IndicesOptions.builder() + .concreteTargetOptions(IndicesOptions.ConcreteTargetOptions.ERROR_WHEN_UNAVAILABLE_TARGETS) + .wildcardOptions( + IndicesOptions.WildcardOptions.builder() + .matchOpen(true) + .matchClosed(true) + .resolveAliases(false) + .allowEmptyExpressions(true) + .build() + ) + .gatekeeperOptions( + IndicesOptions.GatekeeperOptions.builder() + .allowAliasToMultipleIndices(false) + .allowClosedIndices(true) + .ignoreThrottled(false) + .allowFailureIndices(true) + .build() + ) + .build(); public Request(TimeValue masterNodeTimeout, String... names) { super(masterNodeTimeout); diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java b/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java index 8d7f440ab20e4..c1cf0fa7aab42 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java @@ -56,7 +56,26 @@ private GetDataStreamAction() { public static class Request extends MasterNodeReadRequest implements IndicesRequest.Replaceable { private String[] names; - private IndicesOptions indicesOptions = IndicesOptions.fromOptions(false, true, true, true, false, false, true, false); + private IndicesOptions indicesOptions = IndicesOptions.builder() + .concreteTargetOptions(IndicesOptions.ConcreteTargetOptions.ERROR_WHEN_UNAVAILABLE_TARGETS) + .wildcardOptions( + IndicesOptions.WildcardOptions.builder() + .matchOpen(true) + .matchClosed(true) + .includeHidden(false) + .resolveAliases(false) + .allowEmptyExpressions(true) + .build() + ) + .gatekeeperOptions( + IndicesOptions.GatekeeperOptions.builder() + .allowAliasToMultipleIndices(false) + .allowClosedIndices(true) + .ignoreThrottled(false) + .allowFailureIndices(true) + .build() + ) + .build(); private boolean includeDefaults = false; private boolean verbose = false; diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/GetDataStreamLifecycleAction.java b/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/GetDataStreamLifecycleAction.java index 6314f47ab9516..bd628c88a1b1e 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/GetDataStreamLifecycleAction.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/GetDataStreamLifecycleAction.java @@ -47,7 +47,26 @@ private GetDataStreamLifecycleAction() {/* no instances */} public static class Request extends MasterNodeReadRequest implements IndicesRequest.Replaceable { private String[] names; - private IndicesOptions indicesOptions = IndicesOptions.fromOptions(false, true, true, true, false, false, true, false); + private IndicesOptions indicesOptions = IndicesOptions.builder() + .concreteTargetOptions(IndicesOptions.ConcreteTargetOptions.ERROR_WHEN_UNAVAILABLE_TARGETS) + .wildcardOptions( + IndicesOptions.WildcardOptions.builder() + .matchOpen(true) + .matchClosed(true) + .includeHidden(false) + .resolveAliases(false) + .allowEmptyExpressions(true) + .build() + ) + .gatekeeperOptions( + IndicesOptions.GatekeeperOptions.builder() + .allowAliasToMultipleIndices(false) + .allowClosedIndices(true) + .ignoreThrottled(false) + .allowFailureIndices(true) + .build() + ) + .build(); private boolean includeDefaults = false; public Request(TimeValue masterNodeTimeout, String[] names) { diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/PutDataStreamLifecycleAction.java b/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/PutDataStreamLifecycleAction.java index 77f723a46f168..b054d12890366 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/PutDataStreamLifecycleAction.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/PutDataStreamLifecycleAction.java @@ -78,7 +78,26 @@ public static Request parseRequest(XContentParser parser, Factory factory) { } private String[] names; - private IndicesOptions indicesOptions = IndicesOptions.fromOptions(false, true, true, true, false, false, true, false); + private IndicesOptions indicesOptions = IndicesOptions.builder() + .concreteTargetOptions(IndicesOptions.ConcreteTargetOptions.ERROR_WHEN_UNAVAILABLE_TARGETS) + .wildcardOptions( + IndicesOptions.WildcardOptions.builder() + .matchOpen(true) + .matchClosed(true) + .includeHidden(false) + .resolveAliases(false) + .allowEmptyExpressions(true) + .build() + ) + .gatekeeperOptions( + IndicesOptions.GatekeeperOptions.builder() + .allowAliasToMultipleIndices(false) + .allowClosedIndices(true) + .ignoreThrottled(false) + .allowFailureIndices(false) + .build() + ) + .build(); private final DataStreamLifecycle lifecycle; public Request(StreamInput in) throws IOException { From 3529f92e52636c3751b96c456f237ab8e4a41801 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Fri, 11 Oct 2024 15:24:09 +0100 Subject: [PATCH 002/449] [ML] Add sentence overlap option to the sentence chunking settings (#114461) --- .../org/elasticsearch/TransportVersions.java | 1 + .../chunking/ChunkingSettingsOptions.java | 3 +- .../chunking/SentenceBoundaryChunker.java | 87 ++++++++- .../SentenceBoundaryChunkingSettings.java | 33 +++- .../WordBoundaryChunkingSettings.java | 2 +- .../ChunkingSettingsBuilderTests.java | 2 +- .../chunking/ChunkingSettingsTests.java | 2 +- .../SentenceBoundaryChunkerTests.java | 182 +++++++++++++++++- ...SentenceBoundaryChunkingSettingsTests.java | 5 +- .../chunking/WordBoundaryChunkerTests.java | 5 +- 10 files changed, 291 insertions(+), 31 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 721a0e8314927..0f9c27a7877b8 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -240,6 +240,7 @@ static TransportVersion def(int id) { public static final TransportVersion SIMULATE_INDEX_TEMPLATES_SUBSTITUTIONS = def(8_764_00_0); public static final TransportVersion RETRIEVERS_TELEMETRY_ADDED = def(8_765_00_0); public static final TransportVersion ESQL_CACHED_STRING_SERIALIZATION = def(8_766_00_0); + public static final TransportVersion CHUNK_SENTENCE_OVERLAP_SETTING_ADDED = def(8_767_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/ChunkingSettingsOptions.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/ChunkingSettingsOptions.java index a85b92dd1a055..93d435eb0b69f 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/ChunkingSettingsOptions.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/ChunkingSettingsOptions.java @@ -10,7 +10,8 @@ public enum ChunkingSettingsOptions { STRATEGY("strategy"), MAX_CHUNK_SIZE("max_chunk_size"), - OVERLAP("overlap"); + OVERLAP("overlap"), + SENTENCE_OVERLAP("sentence_overlap"); private final String chunkingSettingsOption; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunker.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunker.java index 3a53ecc7ae958..5df940d6a3fba 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunker.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunker.java @@ -34,6 +34,7 @@ public class SentenceBoundaryChunker implements Chunker { public SentenceBoundaryChunker() { sentenceIterator = BreakIterator.getSentenceInstance(Locale.ROOT); wordIterator = BreakIterator.getWordInstance(Locale.ROOT); + } /** @@ -46,7 +47,7 @@ public SentenceBoundaryChunker() { @Override public List chunk(String input, ChunkingSettings chunkingSettings) { if (chunkingSettings instanceof SentenceBoundaryChunkingSettings sentenceBoundaryChunkingSettings) { - return chunk(input, sentenceBoundaryChunkingSettings.maxChunkSize); + return chunk(input, sentenceBoundaryChunkingSettings.maxChunkSize, sentenceBoundaryChunkingSettings.sentenceOverlap > 0); } else { throw new IllegalArgumentException( Strings.format( @@ -64,7 +65,7 @@ public List chunk(String input, ChunkingSettings chunkingSettings) { * @param maxNumberWordsPerChunk Maximum size of the chunk * @return The input text chunked */ - public List chunk(String input, int maxNumberWordsPerChunk) { + public List chunk(String input, int maxNumberWordsPerChunk, boolean includePrecedingSentence) { var chunks = new ArrayList(); sentenceIterator.setText(input); @@ -75,24 +76,46 @@ public List chunk(String input, int maxNumberWordsPerChunk) { int sentenceStart = 0; int chunkWordCount = 0; + int wordsInPrecedingSentenceCount = 0; + int previousSentenceStart = 0; + int boundary = sentenceIterator.next(); while (boundary != BreakIterator.DONE) { int sentenceEnd = sentenceIterator.current(); - int countWordsInSentence = countWords(sentenceStart, sentenceEnd); + int wordsInSentenceCount = countWords(sentenceStart, sentenceEnd); - if (chunkWordCount + countWordsInSentence > maxNumberWordsPerChunk) { + if (chunkWordCount + wordsInSentenceCount > maxNumberWordsPerChunk) { // over the max chunk size, roll back to the last sentence + int nextChunkWordCount = wordsInSentenceCount; if (chunkWordCount > 0) { // add a new chunk containing all the input up to this sentence chunks.add(input.substring(chunkStart, chunkEnd)); - chunkStart = chunkEnd; - chunkWordCount = countWordsInSentence; // the next chunk will contain this sentence + + if (includePrecedingSentence) { + if (wordsInPrecedingSentenceCount + wordsInSentenceCount > maxNumberWordsPerChunk) { + // cut the last sentence + int numWordsToSkip = numWordsToSkipInPreviousSentence(wordsInPrecedingSentenceCount, maxNumberWordsPerChunk); + + chunkStart = skipWords(input, previousSentenceStart, numWordsToSkip); + chunkWordCount = (wordsInPrecedingSentenceCount - numWordsToSkip) + wordsInSentenceCount; + } else { + chunkWordCount = wordsInPrecedingSentenceCount + wordsInSentenceCount; + chunkStart = previousSentenceStart; + } + + nextChunkWordCount = chunkWordCount; + } else { + chunkStart = chunkEnd; + chunkWordCount = wordsInSentenceCount; // the next chunk will contain this sentence + } } - if (countWordsInSentence > maxNumberWordsPerChunk) { - // This sentence is bigger than the max chunk size. + // Is the next chunk larger than max chunk size? + // If so split it + if (nextChunkWordCount > maxNumberWordsPerChunk) { + // This sentence (and optional overlap) is bigger than the max chunk size. // Split the sentence on the word boundary var sentenceSplits = splitLongSentence( input.substring(chunkStart, sentenceEnd), @@ -113,7 +136,12 @@ public List chunk(String input, int maxNumberWordsPerChunk) { chunkWordCount = sentenceSplits.get(i).wordCount(); } } else { - chunkWordCount += countWordsInSentence; + chunkWordCount += wordsInSentenceCount; + } + + if (includePrecedingSentence) { + previousSentenceStart = sentenceStart; + wordsInPrecedingSentenceCount = wordsInSentenceCount; } sentenceStart = sentenceEnd; @@ -133,6 +161,45 @@ static List splitLongSentence(String text, in return new WordBoundaryChunker().chunkPositions(text, maxNumberOfWords, overlap); } + static int numWordsToSkipInPreviousSentence(int wordsInPrecedingSentenceCount, int maxNumberWordsPerChunk) { + var maxWordsInOverlap = maxWordsInOverlap(maxNumberWordsPerChunk); + if (wordsInPrecedingSentenceCount > maxWordsInOverlap) { + return wordsInPrecedingSentenceCount - maxWordsInOverlap; + } else { + return 0; + } + } + + static int maxWordsInOverlap(int maxNumberWordsPerChunk) { + return Math.min(maxNumberWordsPerChunk / 2, 20); + } + + private int skipWords(String input, int start, int numWords) { + var itr = BreakIterator.getWordInstance(Locale.ROOT); + itr.setText(input); + return skipWords(start, numWords, itr); + } + + static int skipWords(int start, int numWords, BreakIterator wordIterator) { + wordIterator.preceding(start); // start of the current word + + int boundary = wordIterator.current(); + int wordCount = 0; + while (boundary != BreakIterator.DONE && wordCount < numWords) { + int wordStatus = wordIterator.getRuleStatus(); + if (wordStatus != BreakIterator.WORD_NONE) { + wordCount++; + } + boundary = wordIterator.next(); + } + + if (boundary == BreakIterator.DONE) { + return wordIterator.last(); + } else { + return boundary; + } + } + private int countWords(int start, int end) { return countWords(start, end, this.wordIterator); } @@ -157,6 +224,6 @@ static int countWords(int start, int end, BreakIterator wordIterator) { } private static int overlapForChunkSize(int chunkSize) { - return (chunkSize - 1) / 2; + return Math.min(20, (chunkSize - 1) / 2); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunkingSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunkingSettings.java index 0d1903895f615..758dd5d04e268 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunkingSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunkingSettings.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.ChunkingSettings; import org.elasticsearch.inference.ChunkingStrategy; import org.elasticsearch.inference.ModelConfigurations; @@ -30,16 +31,25 @@ public class SentenceBoundaryChunkingSettings implements ChunkingSettings { private static final ChunkingStrategy STRATEGY = ChunkingStrategy.SENTENCE; private static final Set VALID_KEYS = Set.of( ChunkingSettingsOptions.STRATEGY.toString(), - ChunkingSettingsOptions.MAX_CHUNK_SIZE.toString() + ChunkingSettingsOptions.MAX_CHUNK_SIZE.toString(), + ChunkingSettingsOptions.SENTENCE_OVERLAP.toString() ); + + private static int DEFAULT_OVERLAP = 0; + protected final int maxChunkSize; + protected int sentenceOverlap = DEFAULT_OVERLAP; - public SentenceBoundaryChunkingSettings(Integer maxChunkSize) { + public SentenceBoundaryChunkingSettings(Integer maxChunkSize, @Nullable Integer sentenceOverlap) { this.maxChunkSize = maxChunkSize; + this.sentenceOverlap = sentenceOverlap == null ? DEFAULT_OVERLAP : sentenceOverlap; } public SentenceBoundaryChunkingSettings(StreamInput in) throws IOException { maxChunkSize = in.readInt(); + if (in.getTransportVersion().onOrAfter(TransportVersions.CHUNK_SENTENCE_OVERLAP_SETTING_ADDED)) { + sentenceOverlap = in.readVInt(); + } } public static SentenceBoundaryChunkingSettings fromMap(Map map) { @@ -59,11 +69,24 @@ public static SentenceBoundaryChunkingSettings fromMap(Map map) validationException ); + Integer sentenceOverlap = ServiceUtils.extractOptionalPositiveInteger( + map, + ChunkingSettingsOptions.SENTENCE_OVERLAP.toString(), + ModelConfigurations.CHUNKING_SETTINGS, + validationException + ); + + if (sentenceOverlap != null && sentenceOverlap > 1) { + validationException.addValidationError( + ChunkingSettingsOptions.SENTENCE_OVERLAP.toString() + "[" + sentenceOverlap + "] must be either 0 or 1" + ); // todo better + } + if (validationException.validationErrors().isEmpty() == false) { throw validationException; } - return new SentenceBoundaryChunkingSettings(maxChunkSize); + return new SentenceBoundaryChunkingSettings(maxChunkSize, sentenceOverlap); } @Override @@ -72,6 +95,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws { builder.field(ChunkingSettingsOptions.STRATEGY.toString(), STRATEGY); builder.field(ChunkingSettingsOptions.MAX_CHUNK_SIZE.toString(), maxChunkSize); + builder.field(ChunkingSettingsOptions.SENTENCE_OVERLAP.toString(), sentenceOverlap); } builder.endObject(); return builder; @@ -90,6 +114,9 @@ public TransportVersion getMinimalSupportedVersion() { @Override public void writeTo(StreamOutput out) throws IOException { out.writeInt(maxChunkSize); + if (out.getTransportVersion().onOrAfter(TransportVersions.CHUNK_SENTENCE_OVERLAP_SETTING_ADDED)) { + out.writeVInt(sentenceOverlap); + } } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/WordBoundaryChunkingSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/WordBoundaryChunkingSettings.java index 6517e0eea14d9..5b91e122b9c80 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/WordBoundaryChunkingSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/WordBoundaryChunkingSettings.java @@ -52,7 +52,7 @@ public static WordBoundaryChunkingSettings fromMap(Map map) { var invalidSettings = map.keySet().stream().filter(key -> VALID_KEYS.contains(key) == false).toArray(); if (invalidSettings.length > 0) { validationException.addValidationError( - Strings.format("Sentence based chunking settings can not have the following settings: %s", Arrays.toString(invalidSettings)) + Strings.format("Word based chunking settings can not have the following settings: %s", Arrays.toString(invalidSettings)) ); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/ChunkingSettingsBuilderTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/ChunkingSettingsBuilderTests.java index 061ea677e6fe1..3c09984ac0162 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/ChunkingSettingsBuilderTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/ChunkingSettingsBuilderTests.java @@ -56,7 +56,7 @@ private Map, ChunkingSettings> chunkingSettingsMapToChunking ChunkingSettingsOptions.MAX_CHUNK_SIZE.toString(), maxChunkSize ), - new SentenceBoundaryChunkingSettings(maxChunkSize) + new SentenceBoundaryChunkingSettings(maxChunkSize, 1) ); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/ChunkingSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/ChunkingSettingsTests.java index 2482586c75595..8373ae93354b1 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/ChunkingSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/ChunkingSettingsTests.java @@ -25,7 +25,7 @@ public static ChunkingSettings createRandomChunkingSettings() { return new WordBoundaryChunkingSettings(maxChunkSize, randomIntBetween(1, maxChunkSize / 2)); } case SENTENCE -> { - return new SentenceBoundaryChunkingSettings(randomNonNegativeInt()); + return new SentenceBoundaryChunkingSettings(randomNonNegativeInt(), randomBoolean() ? 0 : 1); } default -> throw new IllegalArgumentException("Unsupported random strategy [" + randomStrategy + "]"); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunkerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunkerTests.java index 335752faa6b22..5687ebc4dbae7 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunkerTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunkerTests.java @@ -13,19 +13,24 @@ import org.elasticsearch.test.ESTestCase; import org.hamcrest.Matchers; +import java.util.ArrayList; import java.util.Arrays; import java.util.Locale; import static org.elasticsearch.xpack.inference.chunking.WordBoundaryChunkerTests.TEST_TEXT; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.endsWith; +import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.lessThanOrEqualTo; +import static org.hamcrest.Matchers.startsWith; public class SentenceBoundaryChunkerTests extends ESTestCase { public void testChunkSplitLargeChunkSizes() { for (int maxWordsPerChunk : new int[] { 100, 200 }) { var chunker = new SentenceBoundaryChunker(); - var chunks = chunker.chunk(TEST_TEXT, maxWordsPerChunk); + var chunks = chunker.chunk(TEST_TEXT, maxWordsPerChunk, false); int numChunks = expectedNumberOfChunks(sentenceSizes(TEST_TEXT), maxWordsPerChunk); assertThat("words per chunk " + maxWordsPerChunk, chunks, hasSize(numChunks)); @@ -39,11 +44,94 @@ public void testChunkSplitLargeChunkSizes() { } } + public void testChunkSplitLargeChunkSizes_withOverlap() { + boolean overlap = true; + for (int maxWordsPerChunk : new int[] { 70, 80, 100, 120, 150, 200 }) { + var chunker = new SentenceBoundaryChunker(); + var chunks = chunker.chunk(TEST_TEXT, maxWordsPerChunk, overlap); + + int[] overlaps = chunkOverlaps(sentenceSizes(TEST_TEXT), maxWordsPerChunk, overlap); + assertThat("words per chunk " + maxWordsPerChunk, chunks, hasSize(overlaps.length)); + + assertTrue(Character.isUpperCase(chunks.get(0).charAt(0))); + + for (int i = 0; i < overlaps.length; i++) { + if (overlaps[i] == 0) { + // start of a sentence + assertTrue(Character.isUpperCase(chunks.get(i).charAt(0))); + } else { + // The start of this chunk should contain some text from the end of the previous + var previousChunk = chunks.get(i - 1); + assertThat(chunks.get(i), containsString(previousChunk.substring(previousChunk.length() - 20))); + } + } + + var trailingWhiteSpaceRemoved = chunks.get(0).strip(); + var lastChar = trailingWhiteSpaceRemoved.charAt(trailingWhiteSpaceRemoved.length() - 1); + assertThat(lastChar, Matchers.is('.')); + trailingWhiteSpaceRemoved = chunks.get(chunks.size() - 1).strip(); + lastChar = trailingWhiteSpaceRemoved.charAt(trailingWhiteSpaceRemoved.length() - 1); + assertThat(lastChar, Matchers.is('.')); + } + } + + public void testWithOverlap_SentencesFitInChunks() { + int numChunks = 4; + int chunkSize = 100; + + var sb = new StringBuilder(); + + int[] sentenceStartIndexes = new int[numChunks]; + sentenceStartIndexes[0] = 0; + + int numSentences = randomIntBetween(2, 5); + int sentenceIndex = 0; + int lastSentenceSize = 0; + int roughSentenceSize = (chunkSize / numSentences) - 1; + for (int j = 0; j < numSentences; j++) { + sb.append(makeSentence(roughSentenceSize, sentenceIndex++)); + lastSentenceSize = roughSentenceSize; + } + + for (int i = 1; i < numChunks; i++) { + sentenceStartIndexes[i] = sentenceIndex - 1; + + roughSentenceSize = (chunkSize / numSentences) - 1; + int wordCount = lastSentenceSize; + + while (wordCount + roughSentenceSize < chunkSize) { + sb.append(makeSentence(roughSentenceSize, sentenceIndex++)); + lastSentenceSize = roughSentenceSize; + wordCount += roughSentenceSize; + } + } + + var chunker = new SentenceBoundaryChunker(); + var chunks = chunker.chunk(sb.toString(), chunkSize, true); + assertThat(chunks, hasSize(numChunks)); + for (int i = 0; i < numChunks; i++) { + assertThat("num sentences " + numSentences, chunks.get(i), startsWith("SStart" + sentenceStartIndexes[i])); + assertThat("num sentences " + numSentences, chunks.get(i).trim(), endsWith(".")); + } + } + + private String makeSentence(int numWords, int sentenceIndex) { + StringBuilder sb = new StringBuilder(); + sb.append("SStart").append(sentenceIndex).append(' '); + for (int i = 1; i < numWords - 1; i++) { + sb.append(i).append(' '); + } + sb.append(numWords - 1).append(". "); + return sb.toString(); + } + public void testChunk_ChunkSizeLargerThanText() { int maxWordsPerChunk = 500; var chunker = new SentenceBoundaryChunker(); - var chunks = chunker.chunk(TEST_TEXT, maxWordsPerChunk); + var chunks = chunker.chunk(TEST_TEXT, maxWordsPerChunk, false); + assertEquals(chunks.get(0), TEST_TEXT); + chunks = chunker.chunk(TEST_TEXT, maxWordsPerChunk, true); assertEquals(chunks.get(0), TEST_TEXT); } @@ -54,7 +142,7 @@ public void testChunkSplit_SentencesLongerThanChunkSize() { for (int i = 0; i < chunkSizes.length; i++) { int maxWordsPerChunk = chunkSizes[i]; var chunker = new SentenceBoundaryChunker(); - var chunks = chunker.chunk(TEST_TEXT, maxWordsPerChunk); + var chunks = chunker.chunk(TEST_TEXT, maxWordsPerChunk, false); assertThat("words per chunk " + maxWordsPerChunk, chunks, hasSize(expectedNumberOFChunks[i])); for (var chunk : chunks) { @@ -76,6 +164,48 @@ public void testChunkSplit_SentencesLongerThanChunkSize() { } } + public void testChunkSplit_SentencesLongerThanChunkSize_WithOverlap() { + var chunkSizes = new int[] { 10, 30, 50 }; + + // Chunk sizes are shorter the sentences most of the sentences will be split. + for (int i = 0; i < chunkSizes.length; i++) { + int maxWordsPerChunk = chunkSizes[i]; + var chunker = new SentenceBoundaryChunker(); + var chunks = chunker.chunk(TEST_TEXT, maxWordsPerChunk, true); + assertThat(chunks.get(0), containsString("Word segmentation is the problem of dividing")); + assertThat(chunks.get(chunks.size() - 1), containsString(", with solidification being a stronger norm.")); + } + } + + public void testShortLongShortSentences_WithOverlap() { + int maxWordsPerChunk = 40; + var sb = new StringBuilder(); + int[] sentenceLengths = new int[] { 15, 30, 20, 5 }; + for (int l = 0; l < sentenceLengths.length; l++) { + sb.append("SStart").append(l).append(" "); + for (int i = 1; i < sentenceLengths[l] - 1; i++) { + sb.append(i).append(' '); + } + sb.append(sentenceLengths[l] - 1).append(". "); + } + + var chunker = new SentenceBoundaryChunker(); + var chunks = chunker.chunk(sb.toString(), maxWordsPerChunk, true); + assertThat(chunks, hasSize(5)); + assertTrue(chunks.get(0).trim().startsWith("SStart0")); // Entire sentence + assertTrue(chunks.get(0).trim().endsWith(".")); // Entire sentence + + assertTrue(chunks.get(1).trim().startsWith("SStart0")); // contains previous sentence + assertFalse(chunks.get(1).trim().endsWith(".")); // not a full sentence(s) + + assertTrue(chunks.get(2).trim().endsWith(".")); + assertTrue(chunks.get(3).trim().endsWith(".")); + + assertTrue(chunks.get(4).trim().startsWith("SStart2")); // contains previous sentence + assertThat(chunks.get(4), containsString("SStart3")); // last chunk contains 2 sentences + assertTrue(chunks.get(4).trim().endsWith(".")); // full sentence(s) + } + public void testCountWords() { // Test word count matches the whitespace separated word count. var splitByWhiteSpaceSentenceSizes = sentenceSizes(TEST_TEXT); @@ -102,6 +232,30 @@ public void testCountWords() { assertEquals(BreakIterator.DONE, sentenceIterator.next()); } + public void testSkipWords() { + int numWords = 50; + StringBuilder sb = new StringBuilder(); + for (int i = 0; i < numWords; i++) { + sb.append("word").append(i).append(" "); + } + var text = sb.toString(); + + var wordIterator = BreakIterator.getWordInstance(Locale.ROOT); + wordIterator.setText(text); + + int start = 0; + int pos = SentenceBoundaryChunker.skipWords(start, 3, wordIterator); + assertThat(text.substring(pos), startsWith("word3 ")); + pos = SentenceBoundaryChunker.skipWords(pos + 1, 1, wordIterator); + assertThat(text.substring(pos), startsWith("word4 ")); + pos = SentenceBoundaryChunker.skipWords(pos + 1, 5, wordIterator); + assertThat(text.substring(pos), startsWith("word9 ")); + + // past the end of the input + pos = SentenceBoundaryChunker.skipWords(0, numWords + 10, wordIterator); + assertThat(pos, greaterThan(0)); + } + public void testCountWords_short() { // Test word count matches the whitespace separated word count. var text = "This is a short sentence. Followed by another."; @@ -148,7 +302,7 @@ public void testCountWords_WithSymbols() { public void testChunkSplitLargeChunkSizesWithChunkingSettings() { for (int maxWordsPerChunk : new int[] { 100, 200 }) { var chunker = new SentenceBoundaryChunker(); - SentenceBoundaryChunkingSettings chunkingSettings = new SentenceBoundaryChunkingSettings(maxWordsPerChunk); + SentenceBoundaryChunkingSettings chunkingSettings = new SentenceBoundaryChunkingSettings(maxWordsPerChunk, 0); var chunks = chunker.chunk(TEST_TEXT, chunkingSettings); int numChunks = expectedNumberOfChunks(sentenceSizes(TEST_TEXT), maxWordsPerChunk); @@ -182,16 +336,30 @@ private int[] sentenceSizes(String text) { } private int expectedNumberOfChunks(int[] sentenceLengths, int maxWordsPerChunk) { - int numChunks = 1; + return chunkOverlaps(sentenceLengths, maxWordsPerChunk, false).length; + } + + private int[] chunkOverlaps(int[] sentenceLengths, int maxWordsPerChunk, boolean includeSingleSentenceOverlap) { + int maxOverlap = SentenceBoundaryChunker.maxWordsInOverlap(maxWordsPerChunk); + + var overlaps = new ArrayList(); + overlaps.add(0); int runningWordCount = 0; for (int i = 0; i < sentenceLengths.length; i++) { if (runningWordCount + sentenceLengths[i] > maxWordsPerChunk) { - numChunks++; runningWordCount = sentenceLengths[i]; + if (includeSingleSentenceOverlap && i > 0) { + // include what is carried over from the previous + int overlap = Math.min(maxOverlap, sentenceLengths[i - 1]); + overlaps.add(overlap); + runningWordCount += overlap; + } else { + overlaps.add(0); + } } else { runningWordCount += sentenceLengths[i]; } } - return numChunks; + return overlaps.stream().mapToInt(Integer::intValue).toArray(); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunkingSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunkingSettingsTests.java index 3f304a593144b..fe97d7eb3af54 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunkingSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunkingSettingsTests.java @@ -59,13 +59,12 @@ protected Writeable.Reader instanceReader() { @Override protected SentenceBoundaryChunkingSettings createTestInstance() { - return new SentenceBoundaryChunkingSettings(randomNonNegativeInt()); + return new SentenceBoundaryChunkingSettings(randomNonNegativeInt(), randomBoolean() ? 0 : 1); } @Override protected SentenceBoundaryChunkingSettings mutateInstance(SentenceBoundaryChunkingSettings instance) throws IOException { var chunkSize = randomValueOtherThan(instance.maxChunkSize, ESTestCase::randomNonNegativeInt); - - return new SentenceBoundaryChunkingSettings(chunkSize); + return new SentenceBoundaryChunkingSettings(chunkSize, instance.sentenceOverlap); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/WordBoundaryChunkerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/WordBoundaryChunkerTests.java index 21d8c65ad7dcd..08c0724f36270 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/WordBoundaryChunkerTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/WordBoundaryChunkerTests.java @@ -54,9 +54,6 @@ public class WordBoundaryChunkerTests extends ESTestCase { + " خليفہ المومنين يا خليفہ المسلمين يا صحابی يا رضي الله عنه چئي۔ (ب) آنحضور ﷺ جي گھروارين کان علاوه ڪنھن کي ام المومنين " + "چئي۔ (ج) آنحضور ﷺ جي خاندان جي اھل بيت کان علاوہڍه ڪنھن کي اھل بيت چئي۔ (د) پنھنجي عبادت گاھ کي مسجد چئي۔" }; - private static final int DEFAULT_MAX_CHUNK_SIZE = 250; - private static final int DEFAULT_OVERLAP = 100; - public static int NUM_WORDS_IN_TEST_TEXT; static { var wordIterator = BreakIterator.getWordInstance(Locale.ROOT); @@ -139,7 +136,7 @@ public void testNumberOfChunksWithWordBoundaryChunkingSettings() { } public void testInvalidChunkingSettingsProvided() { - ChunkingSettings chunkingSettings = new SentenceBoundaryChunkingSettings(randomNonNegativeInt()); + ChunkingSettings chunkingSettings = new SentenceBoundaryChunkingSettings(randomNonNegativeInt(), 0); assertThrows(IllegalArgumentException.class, () -> { new WordBoundaryChunker().chunk(TEST_TEXT, chunkingSettings); }); } From 6cc77020c9e6c337c76dff727a7a24190c22979b Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Fri, 11 Oct 2024 10:26:49 -0400 Subject: [PATCH 003/449] ESQL: Improve error message in test (#114524) Improve an error message in the test for `profile`ing the ordinals-based grouping operator. It's failed in the past with a rather cryptic error message. This will either keep it passing fully or give us a better error message when it does fail. Closes #114380 --- muted-tests.yml | 3 --- .../xpack/esql/qa/single_node/RestEsqlIT.java | 10 +++++----- 2 files changed, 5 insertions(+), 8 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 6b88435f5c0b3..4f9503607e430 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -340,9 +340,6 @@ tests: - class: org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT method: test {yaml=cluster.stats/30_ccs_stats/cross-cluster search stats search} issue: https://github.com/elastic/elasticsearch/issues/114371 -- class: org.elasticsearch.xpack.esql.qa.single_node.RestEsqlIT - method: testProfileOrdinalsGroupingOperator {SYNC} - issue: https://github.com/elastic/elasticsearch/issues/114380 - class: org.elasticsearch.xpack.inference.services.cohere.CohereServiceTests method: testInfer_StreamRequest issue: https://github.com/elastic/elasticsearch/issues/114385 diff --git a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java index 60399bc2e3e00..3388f6f517bdf 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java +++ b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java @@ -45,6 +45,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.startsWith; @@ -331,14 +332,13 @@ public void testProfile() throws IOException { } public void testProfileOrdinalsGroupingOperator() throws IOException { + assumeTrue("requires pragmas", Build.current().isSnapshot()); indexTimestampData(1); RequestObjectBuilder builder = requestObjectBuilder().query(fromIndex() + " | STATS AVG(value) BY test.keyword"); builder.profile(true); - if (Build.current().isSnapshot()) { - // Lock to shard level partitioning, so we get consistent profile output - builder.pragmas(Settings.builder().put("data_partitioning", "shard").build()); - } + // Lock to shard level partitioning, so we get consistent profile output + builder.pragmas(Settings.builder().put("data_partitioning", "shard").build()); Map result = runEsql(builder); List> signatures = new ArrayList<>(); @@ -356,7 +356,7 @@ public void testProfileOrdinalsGroupingOperator() throws IOException { signatures.add(sig); } - assertThat(signatures.get(0).get(2), equalTo("OrdinalsGroupingOperator[aggregators=[\"sum of longs\", \"count\"]]")); + assertThat(signatures, hasItem(hasItem("OrdinalsGroupingOperator[aggregators=[\"sum of longs\", \"count\"]]"))); } public void testInlineStatsProfile() throws IOException { From 6e788406568566fb38984bc68456a47c1b068e8e Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sat, 12 Oct 2024 01:41:28 +1100 Subject: [PATCH 004/449] Mute org.elasticsearch.xpack.esql.optimizer.PhysicalPlanOptimizerTests testPushSpatialIntersectsEvalToSource {default} #114627 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 4f9503607e430..af3f321045d29 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -381,6 +381,9 @@ tests: - class: org.elasticsearch.datastreams.logsdb.qa.LogsDbVersusLogsDbReindexedIntoStandardModeChallengeRestIT method: testMatchAllQuery issue: https://github.com/elastic/elasticsearch/issues/114607 +- class: org.elasticsearch.xpack.esql.optimizer.PhysicalPlanOptimizerTests + method: testPushSpatialIntersectsEvalToSource {default} + issue: https://github.com/elastic/elasticsearch/issues/114627 # Examples: # From 8d935fbdf098a635b3821e25f4bc783c54fa331e Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sat, 12 Oct 2024 01:41:43 +1100 Subject: [PATCH 005/449] Mute org.elasticsearch.xpack.esql.optimizer.PhysicalPlanOptimizerTests testPushWhereEvalToSource {default} #114628 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index af3f321045d29..5d14cabdd46ce 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -384,6 +384,9 @@ tests: - class: org.elasticsearch.xpack.esql.optimizer.PhysicalPlanOptimizerTests method: testPushSpatialIntersectsEvalToSource {default} issue: https://github.com/elastic/elasticsearch/issues/114627 +- class: org.elasticsearch.xpack.esql.optimizer.PhysicalPlanOptimizerTests + method: testPushWhereEvalToSource {default} + issue: https://github.com/elastic/elasticsearch/issues/114628 # Examples: # From 589cb8fcbce1ab703253d8626cdd6977f1f21c82 Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Fri, 11 Oct 2024 10:55:27 -0400 Subject: [PATCH 006/449] Fixing test failure for #114556 (#114617) --- .../test/search.vectors/70_dense_vector_telemetry.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/70_dense_vector_telemetry.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/70_dense_vector_telemetry.yml index 66b05e4d0d156..16574ceb587b4 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/70_dense_vector_telemetry.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/70_dense_vector_telemetry.yml @@ -21,13 +21,13 @@ setup: element_type: byte index_options: type: hnsw + m: 16 + ef_construction: 100 vector2: type: dense_vector dims: 1024 index: true similarity: dot_product - index_options: - type: int8_hnsw vector3: type: dense_vector dims: 100 From d37c1c636bfc87df0906a6427e824f6bca722245 Mon Sep 17 00:00:00 2001 From: David Turner Date: Fri, 11 Oct 2024 15:52:28 +0100 Subject: [PATCH 007/449] AwaitsFixes for #114625 --- .../xpack/esql/optimizer/PhysicalPlanOptimizerTests.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java index 6746b8ff61268..8c7588b9cb5ca 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java @@ -3235,6 +3235,7 @@ public void testPushSpatialIntersectsStringToSource() { * }][_doc{f}#23], limit[], sort[] estimatedRowSize[304] * */ + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/pull/114625") public void testPushWhereEvalToSource() { String query = """ FROM airports @@ -3269,6 +3270,7 @@ public void testPushWhereEvalToSource() { assertThat("Expected range to be less than 4", range.to(), equalTo(4)); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/pull/114625") public void testPushSpatialIntersectsEvalToSource() { for (String query : new String[] { """ FROM airports From ddd576e8e4ca04b7913293dc14cdfc3184361a66 Mon Sep 17 00:00:00 2001 From: David Turner Date: Fri, 11 Oct 2024 15:58:49 +0100 Subject: [PATCH 008/449] Revert "AwaitsFixes for #114625" This reverts commit d37c1c636bfc87df0906a6427e824f6bca722245. The automuter got there first. --- .../xpack/esql/optimizer/PhysicalPlanOptimizerTests.java | 2 -- 1 file changed, 2 deletions(-) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java index 8c7588b9cb5ca..6746b8ff61268 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java @@ -3235,7 +3235,6 @@ public void testPushSpatialIntersectsStringToSource() { * }][_doc{f}#23], limit[], sort[] estimatedRowSize[304] * */ - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/pull/114625") public void testPushWhereEvalToSource() { String query = """ FROM airports @@ -3270,7 +3269,6 @@ public void testPushWhereEvalToSource() { assertThat("Expected range to be less than 4", range.to(), equalTo(4)); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/pull/114625") public void testPushSpatialIntersectsEvalToSource() { for (String query : new String[] { """ FROM airports From e56f24ffd00ee9746d84ba6055acce7ed781be4a Mon Sep 17 00:00:00 2001 From: Bogdan Pintea Date: Fri, 11 Oct 2024 17:56:31 +0200 Subject: [PATCH 009/449] SQL: Remove dependency on `org.elasticsearch.Version` (#112094) This removes SQL's use of `org.elasticsearch.Version` class and usages replaced by `SqlVersion`. All the currently considered released versions (`7.0.0` to `8.16.0`) have been declared as `SqlVersion` instances. These are still tested against. The last "known release" (`8.16.0`) is considered the "server compatibility version" and all clients at or past this release are compatible with the server; notably, they can be also on a newer version than server's. Clients released before this "server compatibility version" respect existing compatibility requirements (must/can be older up to one major lower, but past `7.7.0`). The "server compatibility version" will not be updated with newer stack releases (at least not until #112745 is addressed). Fixes #102689 --- .../xpack/sql/jdbc/EsDataSource.java | 2 +- .../xpack/sql/jdbc/EsDriver.java | 2 +- .../jdbc/DriverManagerRegistrationTests.java | 5 +- .../xpack/sql/jdbc/VersionParityTests.java | 39 ++-- .../xpack/sql/jdbc/VersionTests.java | 11 +- .../xpack/sql/jdbc/WebServerTestCase.java | 6 +- .../xpack/sql/qa/jdbc/ConnectionTestCase.java | 6 +- .../xpack/sql/qa/jdbc/JdbcTestUtils.java | 26 ++- .../sql/qa/jdbc/JdbcWarningsTestCase.java | 6 +- .../xpack/sql/qa/mixed_node/SqlSearchIT.java | 7 +- .../sql/qa/rest/BaseRestSqlTestCase.java | 19 +- .../xpack/sql/qa/rest/RestSqlTestCase.java | 9 +- .../sql/action/AbstractSqlQueryRequest.java | 11 +- .../xpack/sql/action/SqlQueryResponse.java | 9 +- .../sql/action/SqlQueryResponseTests.java | 6 +- .../sql/action/SqlRequestParsersTests.java | 8 +- .../xpack/sql/cli/CliSessionTests.java | 17 +- .../xpack/sql/cli/VersionTests.java | 7 +- .../xpack/sql/client/ClientVersion.java | 3 +- .../xpack/sql/proto/SqlVersion.java | 30 +-- .../xpack/sql/proto/SqlVersions.java | 219 ++++++++++++++++++ .../xpack/sql/proto/StringUtils.java | 8 +- .../xpack/sql/proto/VersionCompatibility.java | 59 +++++ .../xpack/sql/proto/SqlVersionTests.java | 43 +++- .../xpack/sql/action/SqlActionIT.java | 6 +- .../xpack/sql/analysis/analyzer/Verifier.java | 8 +- .../xpack/sql/execution/search/Querier.java | 5 +- .../extractor/CompositeKeyExtractor.java | 2 +- .../xpack/sql}/index/IndexCompatibility.java | 14 +- .../index/VersionCompatibilityChecks.java | 30 +-- .../sql/plan/logical/command/ShowColumns.java | 10 +- .../plan/logical/command/sys/SysColumns.java | 6 +- .../plan/logical/command/sys/SysTypes.java | 7 +- .../xpack/sql/session/SqlConfiguration.java | 4 +- .../xpack/sql/session/SqlSession.java | 5 +- .../xpack/sql/action/BasicFormatterTests.java | 4 +- .../analyzer/FieldAttributeTests.java | 66 +++--- .../analysis/index/IndexResolverTests.java | 20 +- .../logical/command/ShowColumnsTests.java | 19 +- .../logical/command/sys/SysColumnsTests.java | 45 ++-- .../logical/command/sys/SysTypesTests.java | 20 +- .../xpack/sql/plugin/TextFormatTests.java | 20 +- .../xpack/sql/util/SqlVersionUtils.java | 39 ++++ 43 files changed, 593 insertions(+), 295 deletions(-) create mode 100644 x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlVersions.java create mode 100644 x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/VersionCompatibility.java rename x-pack/plugin/{ql/src/main/java/org/elasticsearch/xpack/ql => sql/src/main/java/org/elasticsearch/xpack/sql}/index/IndexCompatibility.java (78%) rename x-pack/plugin/{ql/src/main/java/org/elasticsearch/xpack/ql => sql/src/main/java/org/elasticsearch/xpack/sql}/index/VersionCompatibilityChecks.java (58%) create mode 100644 x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/util/SqlVersionUtils.java diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/EsDataSource.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/EsDataSource.java index ad8c39b6345ba..b54a425836df2 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/EsDataSource.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/EsDataSource.java @@ -25,7 +25,7 @@ public class EsDataSource implements DataSource, Wrapper { static { - // invoke Version to perform classpath/jar sanity checks + // invoke version to perform classpath/jar sanity checks ClientVersion.CURRENT.toString(); } diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/EsDriver.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/EsDriver.java index 9c5734368bcd1..7a22f0df0f798 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/EsDriver.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/EsDriver.java @@ -23,7 +23,7 @@ public class EsDriver implements Driver { private static final EsDriver INSTANCE = new EsDriver(); static { - // invoke Version to perform classpath/jar sanity checks + // invoke version to perform classpath/jar sanity checks ClientVersion.CURRENT.toString(); try { diff --git a/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/DriverManagerRegistrationTests.java b/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/DriverManagerRegistrationTests.java index 273b7f75c5a2d..eff050993ed4e 100644 --- a/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/DriverManagerRegistrationTests.java +++ b/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/DriverManagerRegistrationTests.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.sql.jdbc; -import org.elasticsearch.Version; import org.elasticsearch.test.ESTestCase; import java.security.AccessController; @@ -27,8 +26,8 @@ public void testVersioning() throws Exception { /* This test will only work properly in gradle because in gradle we run the tests * using the jar. */ - assertNotEquals(String.valueOf(Version.CURRENT.major), d.getMajorVersion()); - assertNotEquals(String.valueOf(Version.CURRENT.minor), d.getMinorVersion()); + assertNotEquals(String.valueOf(VersionTests.current().major), d.getMajorVersion()); + assertNotEquals(String.valueOf(VersionTests.current().minor), d.getMinorVersion()); }); } diff --git a/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/VersionParityTests.java b/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/VersionParityTests.java index 524a653de7e76..8fc11681c2303 100644 --- a/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/VersionParityTests.java +++ b/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/VersionParityTests.java @@ -7,17 +7,21 @@ package org.elasticsearch.xpack.sql.jdbc; -import org.elasticsearch.Version; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.rest.root.MainResponse; -import org.elasticsearch.test.VersionUtils; import org.elasticsearch.test.http.MockResponse; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.sql.client.ClientVersion; import org.elasticsearch.xpack.sql.proto.SqlVersion; +import org.elasticsearch.xpack.sql.proto.SqlVersions; import java.io.IOException; import java.sql.SQLException; +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Collectors; + +import static org.elasticsearch.xpack.sql.proto.VersionCompatibility.INTRODUCING_VERSION_COMPATIBILITY; /** * Test class for JDBC-ES server versions checks. @@ -29,15 +33,11 @@ public class VersionParityTests extends WebServerTestCase { public void testExceptionThrownOnIncompatibleVersions() throws IOException, SQLException { String url = JdbcConfiguration.URL_PREFIX + webServerAddress(); - Version firstVersion = VersionUtils.getFirstVersion(); - Version version = Version.V_7_7_0; - do { - version = VersionUtils.getPreviousVersion(version); + for (var version = SqlVersions.getFirstVersion(); version.onOrAfter(INTRODUCING_VERSION_COMPATIBILITY) == false; version = + SqlVersions.getNextVersion(version)) { logger.info("Checking exception is thrown for version {}", version); prepareResponse(version); - // Client's version is wired up to patch level, excluding the qualifier => generate the test version as the server does it. - String versionString = SqlVersion.fromString(version.toString()).toString(); SQLException ex = expectThrows( SQLException.class, @@ -48,27 +48,30 @@ public void testExceptionThrownOnIncompatibleVersions() throws IOException, SQLE + ClientVersion.CURRENT.majorMinorToString() + " or newer; attempting to connect to a server " + "version " - + versionString, + + version, ex.getMessage() ); - } while (version.compareTo(firstVersion) > 0); + } } public void testNoExceptionThrownForCompatibleVersions() throws IOException { String url = JdbcConfiguration.URL_PREFIX + webServerAddress(); - Version version = Version.CURRENT; - try { - do { + List afterVersionCompatibility = SqlVersions.getAllVersions() + .stream() + .filter(v -> v.onOrAfter(INTRODUCING_VERSION_COMPATIBILITY)) + .collect(Collectors.toCollection(ArrayList::new)); + afterVersionCompatibility.add(VersionTests.current()); + for (var version : afterVersionCompatibility) { + try { prepareResponse(version); new JdbcHttpClient(new JdbcConnection(JdbcConfiguration.create(url, null, 0), false)); - version = VersionUtils.getPreviousVersion(version); - } while (version.compareTo(Version.V_7_7_0) >= 0); - } catch (SQLException sqle) { - fail("JDBC driver version and Elasticsearch server version should be compatible. Error: " + sqle); + } catch (SQLException sqle) { + fail("JDBC driver version and Elasticsearch server version should be compatible. Error: " + sqle); + } } } - void prepareResponse(Version version) throws IOException { + void prepareResponse(SqlVersion version) throws IOException { MainResponse response = version == null ? createCurrentVersionMainResponse() : createMainResponse(version); webServer().enqueue( new MockResponse().setResponseCode(200) diff --git a/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/VersionTests.java b/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/VersionTests.java index 1553077250c7e..de6a67ab9805a 100644 --- a/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/VersionTests.java +++ b/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/VersionTests.java @@ -6,15 +6,20 @@ */ package org.elasticsearch.xpack.sql.jdbc; +import org.elasticsearch.Build; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.sql.client.ClientVersion; +import org.elasticsearch.xpack.sql.proto.SqlVersion; public class VersionTests extends ESTestCase { public void testVersionIsCurrent() { /* This test will only work properly in gradle because in gradle we run the tests * using the jar. */ - assertEquals(org.elasticsearch.Version.CURRENT.major, ClientVersion.CURRENT.major); - assertEquals(org.elasticsearch.Version.CURRENT.minor, ClientVersion.CURRENT.minor); - assertEquals(org.elasticsearch.Version.CURRENT.revision, ClientVersion.CURRENT.revision); + assertEquals(current(), ClientVersion.CURRENT); + } + + /** Returns the current stack version. Can be unreleased. */ + public static SqlVersion current() { + return SqlVersion.fromString(Build.current().version()); } } diff --git a/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/WebServerTestCase.java b/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/WebServerTestCase.java index 85e296935676a..1d940e059af4d 100644 --- a/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/WebServerTestCase.java +++ b/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/WebServerTestCase.java @@ -8,13 +8,13 @@ package org.elasticsearch.xpack.sql.jdbc; import org.elasticsearch.Build; -import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.rest.root.MainResponse; import org.elasticsearch.test.BuildUtils; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.http.MockWebServer; +import org.elasticsearch.xpack.sql.proto.SqlVersion; import org.junit.After; import org.junit.Before; @@ -42,10 +42,10 @@ public MockWebServer webServer() { } MainResponse createCurrentVersionMainResponse() { - return createMainResponse(Version.CURRENT); + return createMainResponse(VersionTests.current()); } - MainResponse createMainResponse(Version version) { + MainResponse createMainResponse(SqlVersion version) { // the SQL client only cares about node version, // so ignore index & transport versions here (just set them to current) String clusterUuid = randomAlphaOfLength(10); diff --git a/x-pack/plugin/sql/qa/jdbc/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/ConnectionTestCase.java b/x-pack/plugin/sql/qa/jdbc/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/ConnectionTestCase.java index db384964b3c84..15b9d3613bbb1 100644 --- a/x-pack/plugin/sql/qa/jdbc/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/ConnectionTestCase.java +++ b/x-pack/plugin/sql/qa/jdbc/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/ConnectionTestCase.java @@ -6,8 +6,6 @@ */ package org.elasticsearch.xpack.sql.qa.jdbc; -import org.elasticsearch.Version; - import java.sql.Connection; import java.sql.DatabaseMetaData; import java.sql.SQLException; @@ -22,8 +20,8 @@ public void testConnectionProperties() throws SQLException { assertFalse(c.isClosed()); assertTrue(c.isReadOnly()); DatabaseMetaData md = c.getMetaData(); - assertEquals(Version.CURRENT.major, md.getDatabaseMajorVersion()); - assertEquals(Version.CURRENT.minor, md.getDatabaseMinorVersion()); + assertEquals(JdbcTestUtils.CURRENT.major, md.getDatabaseMajorVersion()); + assertEquals(JdbcTestUtils.CURRENT.minor, md.getDatabaseMinorVersion()); } } diff --git a/x-pack/plugin/sql/qa/jdbc/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcTestUtils.java b/x-pack/plugin/sql/qa/jdbc/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcTestUtils.java index fb5b69a053f13..01d2df1d710d2 100644 --- a/x-pack/plugin/sql/qa/jdbc/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcTestUtils.java +++ b/x-pack/plugin/sql/qa/jdbc/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcTestUtils.java @@ -6,8 +6,9 @@ */ package org.elasticsearch.xpack.sql.qa.jdbc; -import org.elasticsearch.Version; +import org.elasticsearch.Build; import org.elasticsearch.xpack.sql.jdbc.EsType; +import org.elasticsearch.xpack.sql.proto.SqlVersion; import org.elasticsearch.xpack.sql.proto.StringUtils; import java.math.BigInteger; @@ -26,10 +27,11 @@ import java.util.LinkedHashMap; import java.util.Map; -import static org.elasticsearch.Version.V_8_2_0; -import static org.elasticsearch.Version.V_8_4_0; import static org.elasticsearch.common.time.DateUtils.toMilliSeconds; import static org.elasticsearch.test.ESTestCase.randomLongBetween; +import static org.elasticsearch.xpack.sql.proto.VersionCompatibility.supportsDateNanos; +import static org.elasticsearch.xpack.sql.proto.VersionCompatibility.supportsUnsignedLong; +import static org.elasticsearch.xpack.sql.proto.VersionCompatibility.supportsVersionType; final class JdbcTestUtils { @@ -43,7 +45,9 @@ private JdbcTestUtils() {} static final LocalDate EPOCH = LocalDate.of(1970, 1, 1); static final String UNSIGNED_LONG_TYPE_NAME = "UNSIGNED_LONG"; - static final BigInteger UNSIGNED_LONG_MAX = BigInteger.ONE.shiftLeft(Long.SIZE).subtract(BigInteger.ONE); + + // Build's version is always a SemVer in JDBC tests + public static final SqlVersion CURRENT = SqlVersion.fromString(Build.current().version()); /* * The version of the driver that the QA (bwc-)tests run against. @@ -59,12 +63,11 @@ private JdbcTestUtils() {} * } * */ - static final Version JDBC_DRIVER_VERSION; + static final SqlVersion JDBC_DRIVER_VERSION; static { - // master's version is x.0.0-SNAPSHOT, tho Version#fromString() won't accept that back for recent versions - String jdbcDriverVersion = System.getProperty(DRIVER_VERSION_PROPERTY_NAME, "").replace("-SNAPSHOT", ""); - JDBC_DRIVER_VERSION = Version.fromString(jdbcDriverVersion); // takes empty and null strings, resolves them to CURRENT + String jdbcDriverVersion = System.getProperty(DRIVER_VERSION_PROPERTY_NAME, ""); + JDBC_DRIVER_VERSION = jdbcDriverVersion.isEmpty() ? CURRENT : SqlVersion.fromString(jdbcDriverVersion); // Note: keep in sync with org.elasticsearch.xpack.sql.jdbc.TypeUtils#CLASS_TO_TYPE Map, EsType> aMap = new LinkedHashMap<>(); @@ -150,15 +153,14 @@ static int extractNanosOnly(long nanos) { } static boolean versionSupportsDateNanos() { - return JDBC_DRIVER_VERSION.onOrAfter(Version.V_7_12_0); + return supportsDateNanos(JDBC_DRIVER_VERSION); } public static boolean isUnsignedLongSupported() { - return JDBC_DRIVER_VERSION.onOrAfter(V_8_2_0); + return supportsUnsignedLong(JDBC_DRIVER_VERSION); } public static boolean isVersionFieldTypeSupported() { - return JDBC_DRIVER_VERSION.onOrAfter(V_8_4_0); + return supportsVersionType(JDBC_DRIVER_VERSION); } - } diff --git a/x-pack/plugin/sql/qa/jdbc/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcWarningsTestCase.java b/x-pack/plugin/sql/qa/jdbc/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcWarningsTestCase.java index 663f2bffae2fa..d0ec4a13de2fa 100644 --- a/x-pack/plugin/sql/qa/jdbc/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcWarningsTestCase.java +++ b/x-pack/plugin/sql/qa/jdbc/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcWarningsTestCase.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.sql.qa.jdbc; -import org.elasticsearch.Version; import org.junit.Before; import java.io.IOException; @@ -20,14 +19,13 @@ import java.util.List; import java.util.Properties; +import static org.elasticsearch.xpack.sql.proto.VersionCompatibility.INTRODUCING_WARNING_HANDLING; import static org.elasticsearch.xpack.sql.qa.jdbc.JdbcTestUtils.JDBC_DRIVER_VERSION; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; public abstract class JdbcWarningsTestCase extends JdbcIntegrationTestCase { - private static final Version WARNING_HANDLING_ADDED_VERSION = Version.V_8_2_0; - @Before public void setupData() throws IOException { index("test_data", b -> b.field("foo", 1)); @@ -89,7 +87,7 @@ public void testClearWarnings() throws SQLException { } private void assumeWarningHandlingDriverVersion() { - assumeTrue("Driver does not yet handle deprecation warnings", JDBC_DRIVER_VERSION.onOrAfter(WARNING_HANDLING_ADDED_VERSION)); + assumeTrue("Driver does not yet handle deprecation warnings", JDBC_DRIVER_VERSION.onOrAfter(INTRODUCING_WARNING_HANDLING)); } } diff --git a/x-pack/plugin/sql/qa/mixed-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/mixed_node/SqlSearchIT.java b/x-pack/plugin/sql/qa/mixed-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/mixed_node/SqlSearchIT.java index f05eccb737ca2..78dce9d128cab 100644 --- a/x-pack/plugin/sql/qa/mixed-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/mixed_node/SqlSearchIT.java +++ b/x-pack/plugin/sql/qa/mixed-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/mixed_node/SqlSearchIT.java @@ -9,7 +9,6 @@ import org.apache.http.HttpHost; import org.apache.lucene.sandbox.document.HalfFloatPoint; -import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.RestClient; @@ -19,6 +18,7 @@ import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.ql.TestNode; import org.elasticsearch.xpack.ql.TestNodes; +import org.elasticsearch.xpack.sql.proto.SqlVersion; import org.junit.After; import org.junit.Before; @@ -36,13 +36,14 @@ import static java.util.Collections.unmodifiableMap; import static org.elasticsearch.xpack.ql.TestUtils.buildNodeAndVersions; import static org.elasticsearch.xpack.ql.TestUtils.readResource; +import static org.elasticsearch.xpack.sql.proto.VersionCompatibility.INTRODUCING_VERSION_FIELD_TYPE; public class SqlSearchIT extends ESRestTestCase { private static final String BWC_NODES_VERSION = System.getProperty("tests.bwc_nodes_version"); - // TODO[lor]: replace this with feature-based checks when we have one - private static final boolean SUPPORTS_VERSION_FIELD_QL_INTRODUCTION = Version.fromString(BWC_NODES_VERSION).onOrAfter(Version.V_8_4_0); + private static final boolean SUPPORTS_VERSION_FIELD_QL_INTRODUCTION = SqlVersion.fromString(BWC_NODES_VERSION) + .onOrAfter(INTRODUCING_VERSION_FIELD_TYPE); private static final String index = "test_sql_mixed_versions"; private static int numShards; diff --git a/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/rest/BaseRestSqlTestCase.java b/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/rest/BaseRestSqlTestCase.java index bd43d3d651e52..ece8409a022cd 100644 --- a/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/rest/BaseRestSqlTestCase.java +++ b/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/rest/BaseRestSqlTestCase.java @@ -9,7 +9,7 @@ import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; -import org.elasticsearch.Version; +import org.elasticsearch.Build; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; @@ -20,6 +20,8 @@ import org.elasticsearch.xcontent.cbor.CborXContent; import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.sql.proto.Mode; +import org.elasticsearch.xpack.sql.proto.SqlVersion; +import org.elasticsearch.xpack.sql.proto.SqlVersions; import org.elasticsearch.xpack.sql.proto.StringUtils; import java.io.IOException; @@ -54,6 +56,11 @@ public abstract class BaseRestSqlTestCase extends RemoteClusterAwareSqlRestTestC private static final String TEST_INDEX = "test"; private static final String DATA_STREAM_TEMPLATE = "test-ds-index-template"; + /** + * What's the version of the server that the clients should be compatible with? + * This will be either the stack version, or SqlVersions.getLatestVersion() if the stack version is not available. + */ + private static final SqlVersion SERVER_COMPAT_VERSION = getServerCompatVersion(); public static class RequestObjectBuilder { private StringBuilder request; @@ -83,7 +90,7 @@ public RequestObjectBuilder mode(Object m) { if (isQuery) { Mode mode = (m instanceof Mode) ? (Mode) m : Mode.fromString(modeString); if (Mode.isDedicatedClient(mode)) { - version(Version.CURRENT.toString()); + version(SERVER_COMPAT_VERSION.toString()); } } return this; @@ -301,4 +308,12 @@ public static Tuple runSqlAsText(RequestObjectBuilder requestObj response.getHeader("Cursor") ); } + + private static SqlVersion getServerCompatVersion() { + try { + return SqlVersion.fromString(Build.current().version()); + } catch (Exception e) { + return SqlVersions.getLatestVersion(); + } + } } diff --git a/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/rest/RestSqlTestCase.java b/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/rest/RestSqlTestCase.java index ca9532d8dc7d0..aa1cabe17161a 100644 --- a/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/rest/RestSqlTestCase.java +++ b/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/rest/RestSqlTestCase.java @@ -10,7 +10,6 @@ import org.apache.http.HttpEntity; import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; -import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; @@ -23,12 +22,13 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Tuple; import org.elasticsearch.test.NotEqualMessageBuilder; -import org.elasticsearch.test.VersionUtils; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.json.JsonStringEncoder; import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.sql.proto.CoreProtocol; import org.elasticsearch.xpack.sql.proto.Mode; +import org.elasticsearch.xpack.sql.proto.SqlVersion; +import org.elasticsearch.xpack.sql.proto.SqlVersions; import org.elasticsearch.xpack.sql.proto.StringUtils; import org.elasticsearch.xpack.sql.qa.ErrorsTestCase; import org.hamcrest.Matcher; @@ -60,7 +60,6 @@ import static java.util.Collections.unmodifiableMap; import static org.elasticsearch.common.Strings.hasText; import static org.elasticsearch.xpack.ql.TestUtils.getNumberOfSearchContexts; -import static org.elasticsearch.xpack.ql.index.VersionCompatibilityChecks.INTRODUCING_UNSIGNED_LONG; import static org.elasticsearch.xpack.sql.proto.CoreProtocol.COLUMNS_NAME; import static org.elasticsearch.xpack.sql.proto.CoreProtocol.HEADER_NAME_ASYNC_ID; import static org.elasticsearch.xpack.sql.proto.CoreProtocol.HEADER_NAME_ASYNC_PARTIAL; @@ -76,6 +75,7 @@ import static org.elasticsearch.xpack.sql.proto.CoreProtocol.URL_PARAM_DELIMITER; import static org.elasticsearch.xpack.sql.proto.CoreProtocol.URL_PARAM_FORMAT; import static org.elasticsearch.xpack.sql.proto.CoreProtocol.WAIT_FOR_COMPLETION_TIMEOUT_NAME; +import static org.elasticsearch.xpack.sql.proto.VersionCompatibility.INTRODUCING_UNSIGNED_LONG; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.lessThanOrEqualTo; @@ -1159,7 +1159,8 @@ public void testBinaryFieldFiltering() throws IOException { public void testPreventedUnsignedLongMaskedAccess() throws IOException { loadUnsignedLongTestData(); - Version version = VersionUtils.randomVersionBetween(random(), null, VersionUtils.getPreviousVersion(INTRODUCING_UNSIGNED_LONG)); + var preVersionCompat = SqlVersions.getAllVersions().stream().filter(v -> v.onOrAfter(INTRODUCING_UNSIGNED_LONG) == false).toList(); + SqlVersion version = preVersionCompat.get(random().nextInt(preVersionCompat.size())); String query = query("SELECT unsigned_long::STRING FROM " + indexPattern("test")).version(version.toString()).toString(); expectBadRequest( () -> runSql(new StringEntity(query, ContentType.APPLICATION_JSON), "", randomMode()), diff --git a/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/AbstractSqlQueryRequest.java b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/AbstractSqlQueryRequest.java index d711538ad1d09..46308790dc91a 100644 --- a/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/AbstractSqlQueryRequest.java +++ b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/AbstractSqlQueryRequest.java @@ -6,9 +6,7 @@ */ package org.elasticsearch.xpack.sql.action; -import org.elasticsearch.Build; import org.elasticsearch.TransportVersions; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.CompositeIndicesRequest; import org.elasticsearch.common.Strings; @@ -30,7 +28,8 @@ import org.elasticsearch.xpack.sql.proto.Mode; import org.elasticsearch.xpack.sql.proto.RequestInfo; import org.elasticsearch.xpack.sql.proto.SqlTypedParamValue; -import org.elasticsearch.xpack.sql.proto.SqlVersion; +import org.elasticsearch.xpack.sql.proto.SqlVersions; +import org.elasticsearch.xpack.sql.proto.VersionCompatibility; import java.io.IOException; import java.time.ZoneId; @@ -288,15 +287,15 @@ public ActionRequestValidationException validate() { validationException ); } - } else if (SqlVersion.isClientCompatible(SqlVersion.fromId(Version.CURRENT.id), requestInfo().version()) == false) { + } else if (VersionCompatibility.isClientCompatible(SqlVersions.SERVER_COMPAT_VERSION, requestInfo().version()) == false) { validationException = addValidationError( "The [" + requestInfo().version() + "] version of the [" + mode.toString() + "] " - + "client is not compatible with Elasticsearch version [" - + Build.current().version() + + "client is not compatible with Elasticsearch server compatibility version [" + + SqlVersions.SERVER_COMPAT_VERSION + "]", validationException ); diff --git a/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryResponse.java b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryResponse.java index 725eb0a2e3b01..978f42f7f50dc 100644 --- a/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryResponse.java +++ b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryResponse.java @@ -9,7 +9,6 @@ import com.fasterxml.jackson.core.JsonGenerator; import org.elasticsearch.TransportVersions; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; @@ -21,6 +20,7 @@ import org.elasticsearch.xpack.sql.proto.ColumnInfo; import org.elasticsearch.xpack.sql.proto.Mode; import org.elasticsearch.xpack.sql.proto.SqlVersion; +import org.elasticsearch.xpack.sql.proto.SqlVersions; import org.elasticsearch.xpack.sql.proto.StringUtils; import java.io.IOException; @@ -33,8 +33,7 @@ import static org.elasticsearch.xpack.sql.action.AbstractSqlQueryRequest.CURSOR; import static org.elasticsearch.xpack.sql.proto.Mode.CLI; import static org.elasticsearch.xpack.sql.proto.Mode.JDBC; -import static org.elasticsearch.xpack.sql.proto.SqlVersion.fromId; -import static org.elasticsearch.xpack.sql.proto.SqlVersion.isClientCompatible; +import static org.elasticsearch.xpack.sql.proto.VersionCompatibility.isClientCompatible; /** * Response to perform an sql query @@ -108,7 +107,7 @@ public SqlQueryResponse( ) { this.cursor = cursor; this.mode = mode; - this.sqlVersion = sqlVersion != null ? sqlVersion : fromId(Version.CURRENT.id); + this.sqlVersion = sqlVersion != null ? sqlVersion : SqlVersions.SERVER_COMPAT_VERSION; this.columnar = columnar; this.columns = columns; this.rows = rows; @@ -276,7 +275,7 @@ private static XContentBuilder toXContent(ColumnInfo info, XContentBuilder build public static XContentBuilder value(XContentBuilder builder, Mode mode, SqlVersion sqlVersion, Object value) throws IOException { if (value instanceof ZonedDateTime zdt) { // use the ISO format - if (mode == JDBC && isClientCompatible(SqlVersion.fromId(Version.CURRENT.id), sqlVersion)) { + if (mode == JDBC && isClientCompatible(SqlVersions.SERVER_COMPAT_VERSION, sqlVersion)) { builder.value(StringUtils.toString(zdt, sqlVersion)); } else { builder.value(StringUtils.toString(zdt)); diff --git a/x-pack/plugin/sql/sql-action/src/test/java/org/elasticsearch/xpack/sql/action/SqlQueryResponseTests.java b/x-pack/plugin/sql/sql-action/src/test/java/org/elasticsearch/xpack/sql/action/SqlQueryResponseTests.java index 78c4ebf378770..e8637d8bd80c2 100644 --- a/x-pack/plugin/sql/sql-action/src/test/java/org/elasticsearch/xpack/sql/action/SqlQueryResponseTests.java +++ b/x-pack/plugin/sql/sql-action/src/test/java/org/elasticsearch/xpack/sql/action/SqlQueryResponseTests.java @@ -33,7 +33,7 @@ import static org.elasticsearch.xpack.sql.action.Protocol.ID_NAME; import static org.elasticsearch.xpack.sql.action.Protocol.IS_PARTIAL_NAME; import static org.elasticsearch.xpack.sql.action.Protocol.IS_RUNNING_NAME; -import static org.elasticsearch.xpack.sql.proto.SqlVersion.DATE_NANOS_SUPPORT_VERSION; +import static org.elasticsearch.xpack.sql.proto.VersionCompatibility.INTRODUCING_DATE_NANOS; import static org.hamcrest.Matchers.hasSize; public class SqlQueryResponseTests extends AbstractXContentSerializingTestCase { @@ -118,7 +118,7 @@ public static SqlQueryResponse createRandomInstance( rows.add(row); } } - return new SqlQueryResponse(cursor, mode, DATE_NANOS_SUPPORT_VERSION, false, columns, rows, asyncExecutionId, isPartial, isRunning); + return new SqlQueryResponse(cursor, mode, INTRODUCING_DATE_NANOS, false, columns, rows, asyncExecutionId, isPartial, isRunning); } public void testToXContent() throws IOException { @@ -177,7 +177,7 @@ protected SqlQueryResponse doParseInstance(XContentParser parser) throws IOExcep return new SqlQueryResponse( protoResponse.cursor(), Mode.JDBC, - DATE_NANOS_SUPPORT_VERSION, + INTRODUCING_DATE_NANOS, false, protoResponse.columns(), protoResponse.rows(), diff --git a/x-pack/plugin/sql/sql-action/src/test/java/org/elasticsearch/xpack/sql/action/SqlRequestParsersTests.java b/x-pack/plugin/sql/sql-action/src/test/java/org/elasticsearch/xpack/sql/action/SqlRequestParsersTests.java index 871e559eedf75..42229faa2981c 100644 --- a/x-pack/plugin/sql/sql-action/src/test/java/org/elasticsearch/xpack/sql/action/SqlRequestParsersTests.java +++ b/x-pack/plugin/sql/sql-action/src/test/java/org/elasticsearch/xpack/sql/action/SqlRequestParsersTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.sql.action; -import org.elasticsearch.Version; import org.elasticsearch.core.Strings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.test.ESTestCase; @@ -18,6 +17,7 @@ import org.elasticsearch.xpack.sql.proto.CoreProtocol; import org.elasticsearch.xpack.sql.proto.Mode; import org.elasticsearch.xpack.sql.proto.SqlTypedParamValue; +import org.elasticsearch.xpack.sql.proto.SqlVersions; import java.io.IOException; import java.util.ArrayList; @@ -154,7 +154,9 @@ public void testQueryRequestParser() throws IOException { String params; List list = new ArrayList<>(1); - final String clientVersion = Mode.isDedicatedClient(randomMode) ? "\"version\": \"" + Version.CURRENT.toString() + "\"," : ""; + final String clientVersion = Mode.isDedicatedClient(randomMode) + ? "\"version\": \"" + SqlVersions.SERVER_COMPAT_VERSION + "\"," + : ""; if (Mode.isDriver(randomMode)) { params = "{\"value\":123, \"type\":\"whatever\"}"; list.add(new SqlTypedParamValue("whatever", 123, true)); @@ -178,7 +180,7 @@ public void testQueryRequestParser() throws IOException { assertNull(request.clientId()); assertEquals(randomMode, request.mode()); if (Mode.isDedicatedClient(randomMode)) { - assertEquals(Version.CURRENT.toString(), request.version().toString()); + assertEquals(SqlVersions.SERVER_COMPAT_VERSION.toString(), request.version().toString()); } assertEquals("whatever", request.cursor()); assertEquals("select", request.query()); diff --git a/x-pack/plugin/sql/sql-cli/src/test/java/org/elasticsearch/xpack/sql/cli/CliSessionTests.java b/x-pack/plugin/sql/sql-cli/src/test/java/org/elasticsearch/xpack/sql/cli/CliSessionTests.java index bcb7fefec4559..b4fb2ed419310 100644 --- a/x-pack/plugin/sql/sql-cli/src/test/java/org/elasticsearch/xpack/sql/cli/CliSessionTests.java +++ b/x-pack/plugin/sql/sql-cli/src/test/java/org/elasticsearch/xpack/sql/cli/CliSessionTests.java @@ -6,19 +6,19 @@ */ package org.elasticsearch.xpack.sql.cli; -import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.common.UUIDs; -import org.elasticsearch.test.VersionUtils; import org.elasticsearch.xpack.sql.cli.command.CliSession; import org.elasticsearch.xpack.sql.client.ClientException; import org.elasticsearch.xpack.sql.client.ClientVersion; import org.elasticsearch.xpack.sql.client.HttpClient; import org.elasticsearch.xpack.sql.proto.MainResponse; import org.elasticsearch.xpack.sql.proto.SqlVersion; +import org.elasticsearch.xpack.sql.proto.SqlVersions; import java.sql.SQLException; +import static org.elasticsearch.xpack.sql.proto.VersionCompatibility.INTRODUCING_VERSION_COMPATIBILITY; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; @@ -55,8 +55,11 @@ public void testConnection() throws Exception { public void testWrongServerVersion() throws Exception { HttpClient httpClient = mock(HttpClient.class); - Version v = VersionUtils.randomVersionBetween(random(), null, VersionUtils.getPreviousVersion(Version.V_7_7_0)); - SqlVersion version = new SqlVersion(v.major, v.minor, v.revision); + var preVersionCompat = SqlVersions.getAllVersions() + .stream() + .filter(v -> v.onOrAfter(INTRODUCING_VERSION_COMPATIBILITY) == false) + .toList(); + SqlVersion version = preVersionCompat.get(random().nextInt(preVersionCompat.size())); when(httpClient.serverInfo()).thenReturn( new MainResponse(randomAlphaOfLength(5), version.toString(), ClusterName.DEFAULT.value(), UUIDs.randomBase64UUID()) ); @@ -66,7 +69,7 @@ public void testWrongServerVersion() throws Exception { "This version of the CLI is only compatible with Elasticsearch version " + ClientVersion.CURRENT.majorMinorToString() + " or newer; attempting to connect to a server version " - + version.toString(), + + version, throwable.getMessage() ); verify(httpClient, times(1)).serverInfo(); @@ -75,8 +78,8 @@ public void testWrongServerVersion() throws Exception { public void testHigherServerVersion() throws Exception { HttpClient httpClient = mock(HttpClient.class); - Version v = VersionUtils.randomVersionBetween(random(), Version.V_7_7_0, null); - SqlVersion version = new SqlVersion(v.major, v.minor, v.revision); + var postVersionCompat = SqlVersions.getAllVersions().stream().filter(v -> v.onOrAfter(INTRODUCING_VERSION_COMPATIBILITY)).toList(); + SqlVersion version = postVersionCompat.get(random().nextInt(postVersionCompat.size())); when(httpClient.serverInfo()).thenReturn( new MainResponse(randomAlphaOfLength(5), version.toString(), ClusterName.DEFAULT.value(), UUIDs.randomBase64UUID()) ); diff --git a/x-pack/plugin/sql/sql-cli/src/test/java/org/elasticsearch/xpack/sql/cli/VersionTests.java b/x-pack/plugin/sql/sql-cli/src/test/java/org/elasticsearch/xpack/sql/cli/VersionTests.java index 9677a08c337e1..fc46da074147a 100644 --- a/x-pack/plugin/sql/sql-cli/src/test/java/org/elasticsearch/xpack/sql/cli/VersionTests.java +++ b/x-pack/plugin/sql/sql-cli/src/test/java/org/elasticsearch/xpack/sql/cli/VersionTests.java @@ -6,16 +6,15 @@ */ package org.elasticsearch.xpack.sql.cli; -import org.elasticsearch.Version; +import org.elasticsearch.Build; import org.elasticsearch.xpack.sql.client.ClientVersion; +import org.elasticsearch.xpack.sql.proto.SqlVersion; public class VersionTests extends SqlCliTestCase { public void testVersionIsCurrent() { /* This test will only work properly in gradle because in gradle we run the tests * using the jar. */ - assertEquals(Version.CURRENT.major, ClientVersion.CURRENT.major); - assertEquals(Version.CURRENT.minor, ClientVersion.CURRENT.minor); - assertEquals(Version.CURRENT.revision, ClientVersion.CURRENT.revision); + assertEquals(SqlVersion.fromString(Build.current().version()), ClientVersion.CURRENT); } } diff --git a/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/ClientVersion.java b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/ClientVersion.java index 63e31e6c9a107..1e3ccbf22b718 100644 --- a/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/ClientVersion.java +++ b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/ClientVersion.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.sql.client; import org.elasticsearch.xpack.sql.proto.SqlVersion; +import org.elasticsearch.xpack.sql.proto.VersionCompatibility; import java.io.IOException; import java.net.JarURLConnection; @@ -119,7 +120,7 @@ static SqlVersion extractVersion(URL url) { // as well. public static boolean isServerCompatible(SqlVersion server) { // Starting with this version, the compatibility logic moved from the client to the server. - return SqlVersion.hasVersionCompatibility(server); + return VersionCompatibility.hasVersionCompatibility(server); } public static int jdbcMajorVersion() { diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlVersion.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlVersion.java index 09a931dc7204a..856ff2bcb1ea1 100644 --- a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlVersion.java +++ b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlVersion.java @@ -30,10 +30,6 @@ public class SqlVersion implements Comparable { public static final int MINOR_MULTIPLIER = REVISION_MULTIPLIER * REVISION_MULTIPLIER; public static final int MAJOR_MULTIPLIER = REVISION_MULTIPLIER * MINOR_MULTIPLIER; - public static final SqlVersion V_7_7_0 = new SqlVersion(7, 7, 0); - public static final SqlVersion V_7_12_0 = new SqlVersion(7, 12, 0); - public static final SqlVersion DATE_NANOS_SUPPORT_VERSION = V_7_12_0; // TODO: move to VersionCompatibilityChecks - public SqlVersion(byte major, byte minor, byte revision) { this(toString(major, minor, revision), major, minor, revision); } @@ -148,29 +144,7 @@ public int compareTo(SqlVersion o) { return id - o.id; } - public static int majorMinorId(SqlVersion v) { - return v.major * MAJOR_MULTIPLIER + v.minor * MINOR_MULTIPLIER; - } - - public int compareToMajorMinor(SqlVersion o) { - return majorMinorId(this) - majorMinorId(o); - } - - public static boolean hasVersionCompatibility(SqlVersion version) { - return version.compareTo(V_7_7_0) >= 0; - } - - // A client is version-compatible with the server if: - // - it supports version compatibility (past or on 7.7.0); and - // - it's not on a version newer than server's; and - // - it's major version is at most one unit behind server's. - public static boolean isClientCompatible(SqlVersion server, SqlVersion client) { - // ES's Version.CURRENT not available (core not a dependency), so it needs to be passed in as a parameter. - return hasVersionCompatibility(client) && server.compareTo(client) >= 0 && server.major - client.major <= 1; - } - - // TODO: move to VersionCompatibilityChecks - public static boolean supportsDateNanos(SqlVersion version) { - return DATE_NANOS_SUPPORT_VERSION.compareTo(version) <= 0; + public boolean onOrAfter(SqlVersion other) { + return this.compareTo(other) >= 0; } } diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlVersions.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlVersions.java new file mode 100644 index 0000000000000..7e0960550e634 --- /dev/null +++ b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlVersions.java @@ -0,0 +1,219 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.sql.proto; + +import java.lang.reflect.Field; +import java.lang.reflect.Modifier; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +import static org.elasticsearch.xpack.sql.proto.SqlVersion.fromId; + +public final class SqlVersions { + + public static final SqlVersion V_7_0_0 = fromId(7_00_00_99); + public static final SqlVersion V_7_0_1 = fromId(7_00_01_99); + public static final SqlVersion V_7_1_0 = fromId(7_01_00_99); + public static final SqlVersion V_7_1_1 = fromId(7_01_01_99); + public static final SqlVersion V_7_2_0 = fromId(7_02_00_99); + public static final SqlVersion V_7_2_1 = fromId(7_02_01_99); + public static final SqlVersion V_7_3_0 = fromId(7_03_00_99); + public static final SqlVersion V_7_3_1 = fromId(7_03_01_99); + public static final SqlVersion V_7_3_2 = fromId(7_03_02_99); + public static final SqlVersion V_7_4_0 = fromId(7_04_00_99); + public static final SqlVersion V_7_4_1 = fromId(7_04_01_99); + public static final SqlVersion V_7_4_2 = fromId(7_04_02_99); + public static final SqlVersion V_7_5_0 = fromId(7_05_00_99); + public static final SqlVersion V_7_5_1 = fromId(7_05_01_99); + public static final SqlVersion V_7_5_2 = fromId(7_05_02_99); + public static final SqlVersion V_7_6_0 = fromId(7_06_00_99); + public static final SqlVersion V_7_6_1 = fromId(7_06_01_99); + public static final SqlVersion V_7_6_2 = fromId(7_06_02_99); + public static final SqlVersion V_7_7_0 = fromId(7_07_00_99); + public static final SqlVersion V_7_7_1 = fromId(7_07_01_99); + public static final SqlVersion V_7_8_0 = fromId(7_08_00_99); + public static final SqlVersion V_7_8_1 = fromId(7_08_01_99); + public static final SqlVersion V_7_9_0 = fromId(7_09_00_99); + public static final SqlVersion V_7_9_1 = fromId(7_09_01_99); + public static final SqlVersion V_7_9_2 = fromId(7_09_02_99); + public static final SqlVersion V_7_9_3 = fromId(7_09_03_99); + public static final SqlVersion V_7_10_0 = fromId(7_10_00_99); + public static final SqlVersion V_7_10_1 = fromId(7_10_01_99); + public static final SqlVersion V_7_10_2 = fromId(7_10_02_99); + public static final SqlVersion V_7_11_0 = fromId(7_11_00_99); + public static final SqlVersion V_7_11_1 = fromId(7_11_01_99); + public static final SqlVersion V_7_11_2 = fromId(7_11_02_99); + public static final SqlVersion V_7_12_0 = fromId(7_12_00_99); + public static final SqlVersion V_7_12_1 = fromId(7_12_01_99); + public static final SqlVersion V_7_13_0 = fromId(7_13_00_99); + public static final SqlVersion V_7_13_1 = fromId(7_13_01_99); + public static final SqlVersion V_7_13_2 = fromId(7_13_02_99); + public static final SqlVersion V_7_13_3 = fromId(7_13_03_99); + public static final SqlVersion V_7_13_4 = fromId(7_13_04_99); + public static final SqlVersion V_7_14_0 = fromId(7_14_00_99); + public static final SqlVersion V_7_14_1 = fromId(7_14_01_99); + public static final SqlVersion V_7_14_2 = fromId(7_14_02_99); + public static final SqlVersion V_7_15_0 = fromId(7_15_00_99); + public static final SqlVersion V_7_15_1 = fromId(7_15_01_99); + public static final SqlVersion V_7_15_2 = fromId(7_15_02_99); + public static final SqlVersion V_7_16_0 = fromId(7_16_00_99); + public static final SqlVersion V_7_16_1 = fromId(7_16_01_99); + public static final SqlVersion V_7_16_2 = fromId(7_16_02_99); + public static final SqlVersion V_7_16_3 = fromId(7_16_03_99); + public static final SqlVersion V_7_17_0 = fromId(7_17_00_99); + public static final SqlVersion V_7_17_1 = fromId(7_17_01_99); + public static final SqlVersion V_7_17_2 = fromId(7_17_02_99); + public static final SqlVersion V_7_17_3 = fromId(7_17_03_99); + public static final SqlVersion V_7_17_4 = fromId(7_17_04_99); + public static final SqlVersion V_7_17_5 = fromId(7_17_05_99); + public static final SqlVersion V_7_17_6 = fromId(7_17_06_99); + public static final SqlVersion V_7_17_7 = fromId(7_17_07_99); + public static final SqlVersion V_7_17_8 = fromId(7_17_08_99); + public static final SqlVersion V_7_17_9 = fromId(7_17_09_99); + public static final SqlVersion V_7_17_10 = fromId(7_17_10_99); + public static final SqlVersion V_7_17_11 = fromId(7_17_11_99); + public static final SqlVersion V_7_17_12 = fromId(7_17_12_99); + public static final SqlVersion V_7_17_13 = fromId(7_17_13_99); + public static final SqlVersion V_7_17_14 = fromId(7_17_14_99); + public static final SqlVersion V_7_17_15 = fromId(7_17_15_99); + public static final SqlVersion V_7_17_16 = fromId(7_17_16_99); + public static final SqlVersion V_7_17_17 = fromId(7_17_17_99); + public static final SqlVersion V_7_17_18 = fromId(7_17_18_99); + public static final SqlVersion V_7_17_19 = fromId(7_17_19_99); + public static final SqlVersion V_7_17_20 = fromId(7_17_20_99); + public static final SqlVersion V_7_17_21 = fromId(7_17_21_99); + public static final SqlVersion V_7_17_22 = fromId(7_17_22_99); + public static final SqlVersion V_7_17_23 = fromId(7_17_23_99); + public static final SqlVersion V_7_17_24 = fromId(7_17_24_99); + + public static final SqlVersion V_8_0_0 = fromId(8_00_00_99); + public static final SqlVersion V_8_0_1 = fromId(8_00_01_99); + public static final SqlVersion V_8_1_0 = fromId(8_01_00_99); + public static final SqlVersion V_8_1_1 = fromId(8_01_01_99); + public static final SqlVersion V_8_1_2 = fromId(8_01_02_99); + public static final SqlVersion V_8_1_3 = fromId(8_01_03_99); + public static final SqlVersion V_8_2_0 = fromId(8_02_00_99); + public static final SqlVersion V_8_2_1 = fromId(8_02_01_99); + public static final SqlVersion V_8_2_2 = fromId(8_02_02_99); + public static final SqlVersion V_8_2_3 = fromId(8_02_03_99); + public static final SqlVersion V_8_3_0 = fromId(8_03_00_99); + public static final SqlVersion V_8_3_1 = fromId(8_03_01_99); + public static final SqlVersion V_8_3_2 = fromId(8_03_02_99); + public static final SqlVersion V_8_3_3 = fromId(8_03_03_99); + public static final SqlVersion V_8_4_0 = fromId(8_04_00_99); + public static final SqlVersion V_8_4_1 = fromId(8_04_01_99); + public static final SqlVersion V_8_4_2 = fromId(8_04_02_99); + public static final SqlVersion V_8_4_3 = fromId(8_04_03_99); + public static final SqlVersion V_8_5_0 = fromId(8_05_00_99); + public static final SqlVersion V_8_5_1 = fromId(8_05_01_99); + public static final SqlVersion V_8_5_2 = fromId(8_05_02_99); + public static final SqlVersion V_8_5_3 = fromId(8_05_03_99); + public static final SqlVersion V_8_6_0 = fromId(8_06_00_99); + public static final SqlVersion V_8_6_1 = fromId(8_06_01_99); + public static final SqlVersion V_8_6_2 = fromId(8_06_02_99); + public static final SqlVersion V_8_7_0 = fromId(8_07_00_99); + public static final SqlVersion V_8_7_1 = fromId(8_07_01_99); + public static final SqlVersion V_8_8_0 = fromId(8_08_00_99); + public static final SqlVersion V_8_8_1 = fromId(8_08_01_99); + public static final SqlVersion V_8_8_2 = fromId(8_08_02_99); + public static final SqlVersion V_8_9_0 = fromId(8_09_00_99); + public static final SqlVersion V_8_9_1 = fromId(8_09_01_99); + public static final SqlVersion V_8_9_2 = fromId(8_09_02_99); + public static final SqlVersion V_8_10_0 = fromId(8_10_00_99); + public static final SqlVersion V_8_10_1 = fromId(8_10_01_99); + public static final SqlVersion V_8_10_2 = fromId(8_10_02_99); + public static final SqlVersion V_8_10_3 = fromId(8_10_03_99); + public static final SqlVersion V_8_10_4 = fromId(8_10_04_99); + public static final SqlVersion V_8_11_0 = fromId(8_11_00_99); + public static final SqlVersion V_8_11_1 = fromId(8_11_01_99); + public static final SqlVersion V_8_11_2 = fromId(8_11_02_99); + public static final SqlVersion V_8_11_3 = fromId(8_11_03_99); + public static final SqlVersion V_8_11_4 = fromId(8_11_04_99); + public static final SqlVersion V_8_12_0 = fromId(8_12_00_99); + public static final SqlVersion V_8_12_1 = fromId(8_12_01_99); + public static final SqlVersion V_8_12_2 = fromId(8_12_02_99); + public static final SqlVersion V_8_13_0 = fromId(8_13_00_99); + public static final SqlVersion V_8_13_1 = fromId(8_13_01_99); + public static final SqlVersion V_8_13_2 = fromId(8_13_02_99); + public static final SqlVersion V_8_13_3 = fromId(8_13_03_99); + public static final SqlVersion V_8_13_4 = fromId(8_13_04_99); + public static final SqlVersion V_8_14_0 = fromId(8_14_00_99); + public static final SqlVersion V_8_14_1 = fromId(8_14_01_99); + public static final SqlVersion V_8_14_2 = fromId(8_14_02_99); + public static final SqlVersion V_8_14_3 = fromId(8_14_03_99); + public static final SqlVersion V_8_15_0 = fromId(8_15_00_99); + public static final SqlVersion V_8_15_1 = fromId(8_15_01_99); + public static final SqlVersion V_8_16_0 = fromId(8_16_00_99); + + static final List DECLARED_VERSIONS = getDeclaredVersions(); + + /** + * What's the version of the server that the clients should be compatible with? + */ + public static final SqlVersion SERVER_COMPAT_VERSION = getLatestVersion(); + + public static SqlVersion getFirstVersion() { + return DECLARED_VERSIONS.get(0); + } + + public static SqlVersion getLatestVersion() { + return DECLARED_VERSIONS.get(DECLARED_VERSIONS.size() - 1); + } + + public static SqlVersion getPreviousVersion(SqlVersion version) { + int index = Collections.binarySearch(DECLARED_VERSIONS, version); + if (index < 1) { + throw new IllegalArgumentException("couldn't find any released versions before [" + version + "]"); + } + return DECLARED_VERSIONS.get(index - 1); + } + + public static SqlVersion getNextVersion(SqlVersion version) { + int index = Collections.binarySearch(DECLARED_VERSIONS, version); + if (index >= DECLARED_VERSIONS.size() - 1) { + throw new IllegalArgumentException("couldn't find any released versions before [" + version + "]"); + } + return DECLARED_VERSIONS.get(index + 1); + } + + public static List getAllVersions() { + return DECLARED_VERSIONS; + } + + // lifted from org.elasticsearch.Version#getDeclaredVersions + private static List getDeclaredVersions() { + final Field[] fields = SqlVersions.class.getFields(); + final List versions = new ArrayList<>(fields.length); + for (final Field field : fields) { + final int mod = field.getModifiers(); + if (false == (Modifier.isStatic(mod) && Modifier.isFinal(mod) && Modifier.isPublic(mod))) { + continue; + } + if (field.getType() != SqlVersion.class) { + continue; + } + switch (field.getName()) { + case "LATEST": + case "SERVER_COMPAT_VERSION": + continue; + } + assert field.getName().matches("V(_\\d+){3}?") : field.getName(); + try { + if (field.get(null) == null) { + throw new IllegalStateException("field " + field.getName() + " is null"); + } + versions.add(((SqlVersion) field.get(null))); + } catch (final IllegalAccessException e) { + throw new RuntimeException(e); + } + } + Collections.sort(versions); + return versions; + } +} diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/StringUtils.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/StringUtils.java index cf0a189a44b96..75bab66e5b23e 100644 --- a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/StringUtils.java +++ b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/StringUtils.java @@ -24,7 +24,7 @@ import static java.time.temporal.ChronoField.MINUTE_OF_HOUR; import static java.time.temporal.ChronoField.NANO_OF_SECOND; import static java.time.temporal.ChronoField.SECOND_OF_MINUTE; -import static org.elasticsearch.xpack.sql.proto.SqlVersion.DATE_NANOS_SUPPORT_VERSION; +import static org.elasticsearch.xpack.sql.proto.VersionCompatibility.INTRODUCING_DATE_NANOS; public final class StringUtils { @@ -82,7 +82,7 @@ private StringUtils() {} // This method doesn't support compatibility with older JDBC drivers public static String toString(Object value) { - return toString(value, DATE_NANOS_SUPPORT_VERSION); + return toString(value, INTRODUCING_DATE_NANOS); } public static String toString(Object value, SqlVersion sqlVersion) { @@ -91,14 +91,14 @@ public static String toString(Object value, SqlVersion sqlVersion) { } if (value instanceof ZonedDateTime) { - if (SqlVersion.supportsDateNanos(sqlVersion)) { + if (VersionCompatibility.supportsDateNanos(sqlVersion)) { return ((ZonedDateTime) value).format(ISO_DATETIME_WITH_NANOS); } else { return ((ZonedDateTime) value).format(ISO_DATETIME_WITH_MILLIS); } } if (value instanceof OffsetTime) { - if (SqlVersion.supportsDateNanos(sqlVersion)) { + if (VersionCompatibility.supportsDateNanos(sqlVersion)) { return ((OffsetTime) value).format(ISO_TIME_WITH_NANOS); } else { return ((OffsetTime) value).format(ISO_TIME_WITH_MILLIS); diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/VersionCompatibility.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/VersionCompatibility.java new file mode 100644 index 0000000000000..691f140111c8f --- /dev/null +++ b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/VersionCompatibility.java @@ -0,0 +1,59 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.sql.proto; + +public class VersionCompatibility { + + public static final SqlVersion INTRODUCING_VERSION_COMPATIBILITY = SqlVersions.V_7_7_0; + public static final SqlVersion INTRODUCING_DATE_NANOS = SqlVersions.V_7_12_0; + public static final SqlVersion INTRODUCING_UNSIGNED_LONG = SqlVersions.V_8_2_0; + public static final SqlVersion INTRODUCING_WARNING_HANDLING = SqlVersions.V_8_2_0; + public static final SqlVersion INTRODUCING_VERSION_FIELD_TYPE = SqlVersions.V_8_4_0; + public static final SqlVersion INTRODUCING_VERSIONING_INDEPENDENT_FEATURES = SqlVersions.V_8_16_0; + + public static boolean hasVersionCompatibility(SqlVersion version) { + return version.onOrAfter(INTRODUCING_VERSION_COMPATIBILITY); + } + + /** Is the client on or past a version that is SQL-specific, independent of stack versioning? */ + public static boolean hasVersioningIndependentFeatures(SqlVersion version) { + return version.onOrAfter(INTRODUCING_VERSIONING_INDEPENDENT_FEATURES); + } + + // A client is version-compatible with the server if: + // - it is on or past the version-independent feature gating version; OR + // - it supports version compatibility (past or on 7.7.0); AND + // - it's not on a version newer than server's; AND + // - it's major version is at most one unit behind server's. + public static boolean isClientCompatible(SqlVersion server, SqlVersion client) { + if (hasVersioningIndependentFeatures(client)) { + return true; + } + // ES's CURRENT not available (core not a dependency), so it needs to be passed in as a parameter. + return hasVersionCompatibility(client) && server.onOrAfter(client) && server.major - client.major <= 1; + } + + // TODO: move to VersionCompatibilityChecks + public static boolean supportsDateNanos(SqlVersion version) { + return version.onOrAfter(INTRODUCING_DATE_NANOS); + } + + /** + * Does the provided {@code version} support the unsigned_long type (PR#60050)? + */ + public static boolean supportsUnsignedLong(SqlVersion version) { + return version.onOrAfter(INTRODUCING_UNSIGNED_LONG); + } + + /** + * Does the provided {@code version} support the version type (PR#85502)? + */ + public static boolean supportsVersionType(SqlVersion version) { + return version.onOrAfter(INTRODUCING_VERSION_FIELD_TYPE); + } +} diff --git a/x-pack/plugin/sql/sql-proto/src/test/java/org/elasticsearch/xpack/sql/proto/SqlVersionTests.java b/x-pack/plugin/sql/sql-proto/src/test/java/org/elasticsearch/xpack/sql/proto/SqlVersionTests.java index 65fccf9fc4c44..563802c836b89 100644 --- a/x-pack/plugin/sql/sql-proto/src/test/java/org/elasticsearch/xpack/sql/proto/SqlVersionTests.java +++ b/x-pack/plugin/sql/sql-proto/src/test/java/org/elasticsearch/xpack/sql/proto/SqlVersionTests.java @@ -13,8 +13,9 @@ import static org.elasticsearch.xpack.sql.proto.SqlVersion.MAJOR_MULTIPLIER; import static org.elasticsearch.xpack.sql.proto.SqlVersion.MINOR_MULTIPLIER; import static org.elasticsearch.xpack.sql.proto.SqlVersion.REVISION_MULTIPLIER; -import static org.elasticsearch.xpack.sql.proto.SqlVersion.V_7_7_0; -import static org.elasticsearch.xpack.sql.proto.SqlVersion.isClientCompatible; +import static org.elasticsearch.xpack.sql.proto.SqlVersions.V_7_7_0; +import static org.elasticsearch.xpack.sql.proto.VersionCompatibility.INTRODUCING_VERSIONING_INDEPENDENT_FEATURES; +import static org.elasticsearch.xpack.sql.proto.VersionCompatibility.isClientCompatible; public class SqlVersionTests extends ESTestCase { public void test123FromString() { @@ -37,6 +38,17 @@ public void test123AlphaFromString() { assertEquals("1.2.3-Alpha", ver.version); } + public void test123AlphaWithIdFromString() { + String version = "1.2.3-Alpha[" + randomIntBetween(0, 100_000_000) + "]"; + SqlVersion ver = SqlVersion.fromString(version); + assertEquals(1, ver.major); + assertEquals(2, ver.minor); + assertEquals(3, ver.revision); + assertEquals(REVISION_MULTIPLIER - 1, ver.build); + assertEquals(1 * MAJOR_MULTIPLIER + 2 * MINOR_MULTIPLIER + 3 * REVISION_MULTIPLIER + REVISION_MULTIPLIER - 1, ver.id); + assertEquals(version, ver.version); + } + public void test123AlphaSnapshotFromString() { SqlVersion ver = SqlVersion.fromString("1.2.3-Alpha-SNAPSHOT"); assertEquals(1, ver.major); @@ -84,16 +96,28 @@ public void testVersionCompatibilityClientWithNoCompatibility() { } public void testVersionCompatibilityClientNewer() { - int major = randomIntBetween(7, 99); - SqlVersion server = new SqlVersion(major, randomIntBetween(major > 7 ? 0 : 7, 99), randomIntBetween(0, 98)); + SqlVersion server = randomReleasedVersion(false); SqlVersion client = new SqlVersion(server.major, server.minor, (byte) (server.revision + 1)); assertFalse(isClientCompatible(server, client)); } + public void testVersionCompatibilityClientVersionIndependentFeatures() { + SqlVersion server = new SqlVersion( + randomIntBetween(INTRODUCING_VERSIONING_INDEPENDENT_FEATURES.major, 99), + randomIntBetween(INTRODUCING_VERSIONING_INDEPENDENT_FEATURES.minor, 99), + randomIntBetween(INTRODUCING_VERSIONING_INDEPENDENT_FEATURES.revision, 99) + ); + SqlVersion client = new SqlVersion( + randomIntBetween(INTRODUCING_VERSIONING_INDEPENDENT_FEATURES.major, 99), + randomIntBetween(INTRODUCING_VERSIONING_INDEPENDENT_FEATURES.minor, 99), + randomIntBetween(INTRODUCING_VERSIONING_INDEPENDENT_FEATURES.revision, 99) + ); + assertTrue(server + " vs. " + client, isClientCompatible(server, client)); + } + public void testVersionCompatibilityClientTooOld() { - int major = randomIntBetween(9, 99); - SqlVersion server = new SqlVersion(major, randomIntBetween(0, 99), randomIntBetween(0, 99)); - SqlVersion client = new SqlVersion(major - 2, randomIntBetween(0, 99), randomIntBetween(0, 99)); + SqlVersion server = randomReleasedVersion(false); + SqlVersion client = new SqlVersion(server.major - 2, randomIntBetween(0, 99), randomIntBetween(0, 99)); assertFalse(isClientCompatible(server, client)); } @@ -109,4 +133,9 @@ public void testVersionCompatibile() { SqlVersion server = new SqlVersion(serverMajor, serverMinor, serverRevision); assertTrue(isClientCompatible(server, client)); } + + private static SqlVersion randomReleasedVersion(boolean includeVersioningIndependent) { + var allVersions = SqlVersions.getAllVersions(); + return allVersions.get(randomIntBetween(0, allVersions.size() - 1 - (includeVersioningIndependent ? 0 : 1))); + } } diff --git a/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/SqlActionIT.java b/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/SqlActionIT.java index 3b7da36eebe9e..3f7c74f09f6da 100644 --- a/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/SqlActionIT.java +++ b/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/SqlActionIT.java @@ -6,11 +6,11 @@ */ package org.elasticsearch.xpack.sql.action; -import org.elasticsearch.Version; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.xpack.sql.proto.ColumnInfo; import org.elasticsearch.xpack.sql.proto.Mode; +import org.elasticsearch.xpack.sql.proto.SqlVersions; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.equalTo; @@ -31,7 +31,7 @@ public void testSqlAction() { String columns = dataBeforeCount ? "data, count" : "count, data"; SqlQueryResponse response = new SqlQueryRequestBuilder(client()).query("SELECT " + columns + " FROM test ORDER BY count") .mode(Mode.JDBC) - .version(Version.CURRENT.toString()) + .version(SqlVersions.SERVER_COMPAT_VERSION.toString()) .get(); assertThat(response.size(), equalTo(2L)); assertThat(response.columns(), hasSize(2)); @@ -50,7 +50,7 @@ public void testSqlAction() { public void testSqlActionCurrentVersion() { SqlQueryResponse response = new SqlQueryRequestBuilder(client()).query("SELECT true") .mode(randomFrom(Mode.CLI, Mode.JDBC)) - .version(Version.CURRENT.toString()) + .version(SqlVersions.SERVER_COMPAT_VERSION.toString()) .get(); assertThat(response.size(), equalTo(1L)); assertEquals(true, response.rows().get(0).get(0)); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java index 4c6d06738e16f..3625733227da5 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.sql.analysis.analyzer; -import org.elasticsearch.Version; import org.elasticsearch.core.Tuple; import org.elasticsearch.xpack.ql.capabilities.Unresolvable; import org.elasticsearch.xpack.ql.common.Failure; @@ -77,11 +76,11 @@ import static java.util.stream.Collectors.toMap; import static org.elasticsearch.xpack.ql.analyzer.VerifierChecks.checkFilterConditionType; import static org.elasticsearch.xpack.ql.common.Failure.fail; -import static org.elasticsearch.xpack.ql.index.VersionCompatibilityChecks.isTypeSupportedInVersion; -import static org.elasticsearch.xpack.ql.index.VersionCompatibilityChecks.versionIntroducingType; import static org.elasticsearch.xpack.ql.type.DataTypes.BINARY; import static org.elasticsearch.xpack.ql.type.DataTypes.UNSIGNED_LONG; import static org.elasticsearch.xpack.ql.util.CollectionUtils.combine; +import static org.elasticsearch.xpack.sql.index.VersionCompatibilityChecks.isTypeSupportedInVersion; +import static org.elasticsearch.xpack.sql.index.VersionCompatibilityChecks.versionIntroducingType; import static org.elasticsearch.xpack.sql.stats.FeatureMetric.COMMAND; import static org.elasticsearch.xpack.sql.stats.FeatureMetric.GROUPBY; import static org.elasticsearch.xpack.sql.stats.FeatureMetric.HAVING; @@ -1000,9 +999,8 @@ private static void checkCastOnInexact(LogicalPlan p, Set localFailures } private static void checkClientSupportsDataTypes(LogicalPlan p, Set localFailures, SqlVersion version) { - Version ver = Version.fromId(version.id); p.output().forEach(e -> { - if (e.resolved() && isTypeSupportedInVersion(e.dataType(), ver) == false) { + if (e.resolved() && isTypeSupportedInVersion(e.dataType(), version) == false) { localFailures.add( fail( e, diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java index 9cf60ec3bb2e4..b2ce91140de76 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java @@ -10,6 +10,7 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.search.TotalHits; import org.apache.lucene.util.PriorityQueue; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DelegatingActionListener; import org.elasticsearch.action.search.ClosePointInTimeRequest; @@ -100,7 +101,7 @@ import static org.elasticsearch.action.ActionListener.wrap; import static org.elasticsearch.xpack.ql.execution.search.extractor.AbstractFieldHitExtractor.MultiValueSupport.LENIENT; import static org.elasticsearch.xpack.ql.execution.search.extractor.AbstractFieldHitExtractor.MultiValueSupport.NONE; -import static org.elasticsearch.xpack.ql.index.VersionCompatibilityChecks.INTRODUCING_UNSIGNED_LONG; +import static org.elasticsearch.xpack.sql.proto.VersionCompatibility.INTRODUCING_UNSIGNED_LONG; // TODO: add retry/back-off public class Querier { @@ -201,7 +202,7 @@ public static void closePointInTime(Client client, BytesReference pointInTimeId, public static SearchRequest prepareRequest(SearchSourceBuilder source, SqlConfiguration cfg, boolean includeFrozen, String... indices) { source.timeout(cfg.requestTimeout()); - SearchRequest searchRequest = new SearchRequest(INTRODUCING_UNSIGNED_LONG); + SearchRequest searchRequest = new SearchRequest(Version.fromId(INTRODUCING_UNSIGNED_LONG.id)); if (source.pointInTimeBuilder() == null) { searchRequest.indices(indices); searchRequest.indicesOptions( diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/CompositeKeyExtractor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/CompositeKeyExtractor.java index 45bb7623f815a..bf54dca279dd0 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/CompositeKeyExtractor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/CompositeKeyExtractor.java @@ -22,11 +22,11 @@ import java.util.Map; import java.util.Objects; -import static org.elasticsearch.xpack.ql.index.VersionCompatibilityChecks.INTRODUCING_UNSIGNED_LONG_TRANSPORT; import static org.elasticsearch.xpack.ql.type.DataTypeConverter.toUnsignedLong; import static org.elasticsearch.xpack.ql.type.DataTypes.DATETIME; import static org.elasticsearch.xpack.ql.type.DataTypes.NULL; import static org.elasticsearch.xpack.ql.type.DataTypes.UNSIGNED_LONG; +import static org.elasticsearch.xpack.sql.index.VersionCompatibilityChecks.INTRODUCING_UNSIGNED_LONG_TRANSPORT; import static org.elasticsearch.xpack.sql.type.SqlDataTypes.isDateBased; public class CompositeKeyExtractor implements BucketExtractor { diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/index/IndexCompatibility.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/index/IndexCompatibility.java similarity index 78% rename from x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/index/IndexCompatibility.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/index/IndexCompatibility.java index 2f99854679d8f..dbdedd5b49cdb 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/index/IndexCompatibility.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/index/IndexCompatibility.java @@ -5,22 +5,24 @@ * 2.0. */ -package org.elasticsearch.xpack.ql.index; +package org.elasticsearch.xpack.sql.index; -import org.elasticsearch.Version; +import org.elasticsearch.xpack.ql.index.EsIndex; +import org.elasticsearch.xpack.ql.index.IndexResolution; import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.EsField; import org.elasticsearch.xpack.ql.type.UnsupportedEsField; +import org.elasticsearch.xpack.sql.proto.SqlVersion; import java.util.Map; -import static org.elasticsearch.xpack.ql.index.VersionCompatibilityChecks.isTypeSupportedInVersion; import static org.elasticsearch.xpack.ql.type.DataTypes.isPrimitive; import static org.elasticsearch.xpack.ql.type.Types.propagateUnsupportedType; +import static org.elasticsearch.xpack.sql.index.VersionCompatibilityChecks.isTypeSupportedInVersion; public final class IndexCompatibility { - public static Map compatible(Map mapping, Version version) { + public static Map compatible(Map mapping, SqlVersion version) { for (Map.Entry entry : mapping.entrySet()) { EsField esField = entry.getValue(); DataType dataType = esField.getDataType(); @@ -35,12 +37,12 @@ public static Map compatible(Map mapping, Vers return mapping; } - public static EsIndex compatible(EsIndex esIndex, Version version) { + public static EsIndex compatible(EsIndex esIndex, SqlVersion version) { compatible(esIndex.mapping(), version); return esIndex; } - public static IndexResolution compatible(IndexResolution indexResolution, Version version) { + public static IndexResolution compatible(IndexResolution indexResolution, SqlVersion version) { if (indexResolution.isValid()) { compatible(indexResolution.get(), version); } diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/index/VersionCompatibilityChecks.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/index/VersionCompatibilityChecks.java similarity index 58% rename from x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/index/VersionCompatibilityChecks.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/index/VersionCompatibilityChecks.java index e194f385d1606..bd98c40ac1674 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/index/VersionCompatibilityChecks.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/index/VersionCompatibilityChecks.java @@ -5,28 +5,28 @@ * 2.0. */ -package org.elasticsearch.xpack.ql.index; +package org.elasticsearch.xpack.sql.index; import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; -import org.elasticsearch.Version; import org.elasticsearch.core.Nullable; import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.sql.proto.SqlVersion; -import static org.elasticsearch.Version.V_8_2_0; -import static org.elasticsearch.Version.V_8_4_0; import static org.elasticsearch.xpack.ql.type.DataTypes.UNSIGNED_LONG; import static org.elasticsearch.xpack.ql.type.DataTypes.VERSION; +import static org.elasticsearch.xpack.sql.proto.VersionCompatibility.INTRODUCING_UNSIGNED_LONG; +import static org.elasticsearch.xpack.sql.proto.VersionCompatibility.INTRODUCING_VERSION_FIELD_TYPE; +import static org.elasticsearch.xpack.sql.proto.VersionCompatibility.supportsUnsignedLong; +import static org.elasticsearch.xpack.sql.proto.VersionCompatibility.supportsVersionType; public final class VersionCompatibilityChecks { - public static final Version INTRODUCING_UNSIGNED_LONG = V_8_2_0; public static final TransportVersion INTRODUCING_UNSIGNED_LONG_TRANSPORT = TransportVersions.V_8_2_0; - public static final Version INTRODUCING_VERSION_FIELD_TYPE = V_8_4_0; private VersionCompatibilityChecks() {} - public static boolean isTypeSupportedInVersion(DataType dataType, Version version) { + public static boolean isTypeSupportedInVersion(DataType dataType, SqlVersion version) { if (dataType == UNSIGNED_LONG) { return supportsUnsignedLong(version); } @@ -36,21 +36,7 @@ public static boolean isTypeSupportedInVersion(DataType dataType, Version versio return true; } - /** - * Does the provided {@code version} support the unsigned_long type (PR#60050)? - */ - public static boolean supportsUnsignedLong(Version version) { - return INTRODUCING_UNSIGNED_LONG.compareTo(version) <= 0; - } - - /** - * Does the provided {@code version} support the version type (PR#85502)? - */ - public static boolean supportsVersionType(Version version) { - return INTRODUCING_VERSION_FIELD_TYPE.compareTo(version) <= 0; - } - - public static @Nullable Version versionIntroducingType(DataType dataType) { + public static @Nullable SqlVersion versionIntroducingType(DataType dataType) { if (dataType == UNSIGNED_LONG) { return INTRODUCING_UNSIGNED_LONG; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/ShowColumns.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/ShowColumns.java index 516aaa961b5b3..72f9cf2543d53 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/ShowColumns.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/ShowColumns.java @@ -6,18 +6,17 @@ */ package org.elasticsearch.xpack.sql.plan.logical.command; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.xpack.ql.expression.Attribute; import org.elasticsearch.xpack.ql.expression.FieldAttribute; import org.elasticsearch.xpack.ql.expression.predicate.regex.LikePattern; -import org.elasticsearch.xpack.ql.index.IndexCompatibility; import org.elasticsearch.xpack.ql.index.IndexResolver; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.EsField; import org.elasticsearch.xpack.ql.type.KeywordEsField; +import org.elasticsearch.xpack.sql.index.IndexCompatibility; import org.elasticsearch.xpack.sql.session.Cursor.Page; import org.elasticsearch.xpack.sql.session.SqlSession; import org.elasticsearch.xpack.sql.type.SqlDataTypes; @@ -91,8 +90,11 @@ public void execute(SqlSession session, ActionListener listener) { List> rows = emptyList(); if (indexResult.isValid()) { rows = new ArrayList<>(); - Version version = Version.fromId(session.configuration().version().id); - fillInRows(IndexCompatibility.compatible(indexResult, version).get().mapping(), null, rows); + fillInRows( + IndexCompatibility.compatible(indexResult, session.configuration().version()).get().mapping(), + null, + rows + ); } l.onResponse(of(session, rows)); }) diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysColumns.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysColumns.java index 9f57df5190dcc..b78dea5353733 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysColumns.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysColumns.java @@ -7,21 +7,21 @@ package org.elasticsearch.xpack.sql.plan.logical.command.sys; import org.apache.lucene.util.Counter; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.Strings; import org.elasticsearch.xpack.ql.expression.Attribute; import org.elasticsearch.xpack.ql.expression.predicate.regex.LikePattern; import org.elasticsearch.xpack.ql.index.EsIndex; -import org.elasticsearch.xpack.ql.index.IndexCompatibility; import org.elasticsearch.xpack.ql.index.IndexResolver; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.EsField; import org.elasticsearch.xpack.ql.util.StringUtils; +import org.elasticsearch.xpack.sql.index.IndexCompatibility; import org.elasticsearch.xpack.sql.plan.logical.command.Command; import org.elasticsearch.xpack.sql.proto.Mode; +import org.elasticsearch.xpack.sql.proto.SqlVersion; import org.elasticsearch.xpack.sql.session.Cursor.Page; import org.elasticsearch.xpack.sql.session.ListCursor; import org.elasticsearch.xpack.sql.session.Rows; @@ -156,7 +156,7 @@ public void execute(SqlSession session, ActionListener listener) { tableCat = cluster; } - Version version = Version.fromId(session.configuration().version().id); + SqlVersion version = session.configuration().version(); // special case for '%' (translated to *) if ("*".equals(idx)) { session.indexResolver() diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTypes.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTypes.java index 66efd1a4ee879..15d1bed5b9a86 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTypes.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTypes.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.sql.plan.logical.command.sys; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.xpack.ql.expression.Attribute; import org.elasticsearch.xpack.ql.tree.NodeInfo; @@ -24,12 +23,12 @@ import static java.util.Arrays.asList; import static java.util.stream.Collectors.toList; -import static org.elasticsearch.xpack.ql.index.VersionCompatibilityChecks.isTypeSupportedInVersion; import static org.elasticsearch.xpack.ql.type.DataTypes.BOOLEAN; import static org.elasticsearch.xpack.ql.type.DataTypes.INTEGER; import static org.elasticsearch.xpack.ql.type.DataTypes.SHORT; import static org.elasticsearch.xpack.ql.type.DataTypes.isSigned; import static org.elasticsearch.xpack.ql.type.DataTypes.isString; +import static org.elasticsearch.xpack.sql.index.VersionCompatibilityChecks.isTypeSupportedInVersion; import static org.elasticsearch.xpack.sql.type.SqlDataTypes.metaSqlDataType; import static org.elasticsearch.xpack.sql.type.SqlDataTypes.metaSqlDateTimeSub; import static org.elasticsearch.xpack.sql.type.SqlDataTypes.metaSqlMaximumScale; @@ -85,9 +84,7 @@ public List output() { @Override public final void execute(SqlSession session, ActionListener listener) { - Stream values = SqlDataTypes.types() - .stream() - .filter(t -> isTypeSupportedInVersion(t, Version.fromId(session.configuration().version().id))); + Stream values = SqlDataTypes.types().stream().filter(t -> isTypeSupportedInVersion(t, session.configuration().version())); if (type.intValue() != 0) { values = values.filter(t -> type.equals(sqlType(t).getVendorTypeNumber())); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/SqlConfiguration.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/SqlConfiguration.java index 8f66170c0842b..395f12ce3a66a 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/SqlConfiguration.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/SqlConfiguration.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.sql.session; -import org.elasticsearch.Version; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.query.QueryBuilder; @@ -14,6 +13,7 @@ import org.elasticsearch.xpack.sql.action.SqlQueryTask; import org.elasticsearch.xpack.sql.proto.Mode; import org.elasticsearch.xpack.sql.proto.SqlVersion; +import org.elasticsearch.xpack.sql.proto.SqlVersions; import java.time.ZoneId; import java.util.Map; @@ -73,7 +73,7 @@ public SqlConfiguration( this.runtimeMappings = runtimeMappings; this.mode = mode == null ? Mode.PLAIN : mode; this.clientId = clientId; - this.version = version != null ? version : SqlVersion.fromId(Version.CURRENT.id); + this.version = version != null ? version : SqlVersions.SERVER_COMPAT_VERSION; this.multiValueFieldLeniency = multiValueFieldLeniency; this.includeFrozenIndices = includeFrozen; this.taskId = taskId; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/SqlSession.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/SqlSession.java index 040807981a389..c32155f6daeb6 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/SqlSession.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/SqlSession.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.sql.session; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.ParentTaskAssigningClient; @@ -15,7 +14,6 @@ import org.elasticsearch.xpack.ql.analyzer.PreAnalyzer.PreAnalysis; import org.elasticsearch.xpack.ql.analyzer.TableInfo; import org.elasticsearch.xpack.ql.expression.function.FunctionRegistry; -import org.elasticsearch.xpack.ql.index.IndexCompatibility; import org.elasticsearch.xpack.ql.index.IndexResolution; import org.elasticsearch.xpack.ql.index.IndexResolver; import org.elasticsearch.xpack.ql.index.MappingException; @@ -26,6 +24,7 @@ import org.elasticsearch.xpack.sql.analysis.analyzer.AnalyzerContext; import org.elasticsearch.xpack.sql.analysis.analyzer.Verifier; import org.elasticsearch.xpack.sql.execution.PlanExecutor; +import org.elasticsearch.xpack.sql.index.IndexCompatibility; import org.elasticsearch.xpack.sql.optimizer.Optimizer; import org.elasticsearch.xpack.sql.parser.SqlParser; import org.elasticsearch.xpack.sql.plan.physical.PhysicalPlan; @@ -119,7 +118,7 @@ public void analyzedPlan(LogicalPlan parsed, boolean verify, ActionListener plan(sql)); assertThat(ex.getMessage(), containsString("Found 1 problem\nline 1:8: Cannot use field [unsigned_long]")); - for (Version v : List.of(INTRODUCING_UNSIGNED_LONG, postUnsignedLong)) { + for (SqlVersion v : List.of(INTRODUCING_UNSIGNED_LONG, POST_UNSIGNED_LONG)) { analyzer = analyzer( - SqlTestUtils.randomConfiguration(SqlVersion.fromId(v.id)), + SqlTestUtils.randomConfiguration(v), functionRegistry, loadCompatibleIndexResolution("mapping-numeric.json", v), verifier @@ -357,23 +358,21 @@ public void testVersionTypeVersionCompatibility() { String queryWithAlias = "SELECT version_number AS version_number FROM test"; String queryWithCast = "SELECT CONCAT(version_number::string, '-SNAPSHOT')::version AS version_number FROM test"; - Version preVersion = Version.fromId(INTRODUCING_VERSION_FIELD_TYPE.id - SqlVersion.MINOR_MULTIPLIER); - Version postVersion = Version.fromId(INTRODUCING_VERSION_FIELD_TYPE.id + SqlVersion.MINOR_MULTIPLIER); - SqlConfiguration sqlConfig = SqlTestUtils.randomConfiguration(SqlVersion.fromId(preVersion.id)); + SqlConfiguration sqlConfig = SqlTestUtils.randomConfiguration(PRE_VERSION_FIELD); for (String sql : List.of(query, queryWithCastLiteral, queryWithAlias, queryWithCast)) { analyzer = analyzer( sqlConfig, functionRegistry, - loadCompatibleIndexResolution("mapping-version.json", preVersion), + loadCompatibleIndexResolution("mapping-version.json", PRE_VERSION_FIELD), new Verifier(new Metrics()) ); VerificationException ex = expectThrows(VerificationException.class, () -> plan(sql)); assertThat(ex.getMessage(), containsString("Cannot use field [version_number]")); - for (Version v : List.of(INTRODUCING_VERSION_FIELD_TYPE, postVersion)) { + for (SqlVersion v : List.of(INTRODUCING_VERSION_FIELD_TYPE, POST_VERSION_FIELD)) { analyzer = analyzer( - SqlTestUtils.randomConfiguration(SqlVersion.fromId(v.id)), + SqlTestUtils.randomConfiguration(v), functionRegistry, loadCompatibleIndexResolution("mapping-version.json", v), verifier @@ -391,12 +390,10 @@ public void testVersionTypeVersionCompatibility() { } public void testNonProjectedUnsignedLongVersionCompatibility() { - Version preUnsignedLong = Version.fromId(INTRODUCING_UNSIGNED_LONG.id - SqlVersion.MINOR_MULTIPLIER); - SqlConfiguration sqlConfig = SqlTestUtils.randomConfiguration(SqlVersion.fromId(preUnsignedLong.id)); analyzer = analyzer( - sqlConfig, + SqlTestUtils.randomConfiguration(PRE_UNSIGNED_LONG), functionRegistry, - loadCompatibleIndexResolution("mapping-numeric.json", preUnsignedLong), + loadCompatibleIndexResolution("mapping-numeric.json", PRE_UNSIGNED_LONG), new Verifier(new Metrics()) ); @@ -426,24 +423,17 @@ public void testNestedUnsignedLongVersionCompatibility() { """; String sql = "SELECT container.ul as unsigned_long FROM test"; - Version preUnsignedLong = Version.fromId(INTRODUCING_UNSIGNED_LONG.id - SqlVersion.MINOR_MULTIPLIER); analyzer = analyzer( - SqlTestUtils.randomConfiguration(SqlVersion.fromId(preUnsignedLong.id)), + SqlTestUtils.randomConfiguration(PRE_UNSIGNED_LONG), functionRegistry, - compatibleIndexResolution(props, preUnsignedLong), + compatibleIndexResolution(props, PRE_UNSIGNED_LONG), new Verifier(new Metrics()) ); VerificationException ex = expectThrows(VerificationException.class, () -> plan(sql)); assertThat(ex.getMessage(), containsString("Cannot use field [container.ul] with unsupported type [UNSIGNED_LONG]")); - Version postUnsignedLong = Version.fromId(INTRODUCING_UNSIGNED_LONG.id + SqlVersion.MINOR_MULTIPLIER); - for (Version v : List.of(INTRODUCING_UNSIGNED_LONG, postUnsignedLong)) { - analyzer = analyzer( - SqlTestUtils.randomConfiguration(SqlVersion.fromId(v.id)), - functionRegistry, - compatibleIndexResolution(props, v), - verifier - ); + for (SqlVersion v : List.of(INTRODUCING_UNSIGNED_LONG, POST_UNSIGNED_LONG)) { + analyzer = analyzer(SqlTestUtils.randomConfiguration(v), functionRegistry, compatibleIndexResolution(props, v), verifier); LogicalPlan plan = plan(sql); assertThat(plan, instanceOf(Project.class)); Project p = (Project) plan; @@ -456,17 +446,15 @@ public void testNestedUnsignedLongVersionCompatibility() { } public void testUnsignedLongStarExpandedVersionControlled() { - SqlVersion preUnsignedLong = SqlVersion.fromId(INTRODUCING_UNSIGNED_LONG.id - SqlVersion.MINOR_MULTIPLIER); - SqlVersion postUnsignedLong = SqlVersion.fromId(INTRODUCING_UNSIGNED_LONG.id + SqlVersion.MINOR_MULTIPLIER); String query = "SELECT * FROM test"; - for (SqlVersion version : List.of(preUnsignedLong, SqlVersion.fromId(INTRODUCING_UNSIGNED_LONG.id), postUnsignedLong)) { + for (SqlVersion version : List.of(PRE_UNSIGNED_LONG, INTRODUCING_UNSIGNED_LONG, POST_UNSIGNED_LONG)) { SqlConfiguration config = SqlTestUtils.randomConfiguration(version); // the mapping is mutated when making it "compatible", so it needs to be reloaded inside the loop. analyzer = analyzer( config, functionRegistry, - loadCompatibleIndexResolution("mapping-numeric.json", Version.fromId(version.id)), + loadCompatibleIndexResolution("mapping-numeric.json", version), new Verifier(new Metrics()) ); @@ -475,7 +463,7 @@ public void testUnsignedLongStarExpandedVersionControlled() { Project p = (Project) plan; List projectedDataTypes = p.projections().stream().map(Expression::dataType).toList(); - assertEquals(isTypeSupportedInVersion(UNSIGNED_LONG, Version.fromId(version.id)), projectedDataTypes.contains(UNSIGNED_LONG)); + assertEquals(isTypeSupportedInVersion(UNSIGNED_LONG, version), projectedDataTypes.contains(UNSIGNED_LONG)); } } @@ -517,11 +505,11 @@ private static IndexResolution loadIndexResolution(String mappingName) { return IndexResolution.valid(index); } - private static IndexResolution loadCompatibleIndexResolution(String mappingName, Version version) { + private static IndexResolution loadCompatibleIndexResolution(String mappingName, SqlVersion version) { return IndexCompatibility.compatible(loadIndexResolution(mappingName), version); } - private static IndexResolution compatibleIndexResolution(String properties, Version version) { + private static IndexResolution compatibleIndexResolution(String properties, SqlVersion version) { Map mapping = Types.fromEs( DefaultDataTypeRegistry.INSTANCE, XContentHelper.convertToMap(JsonXContent.jsonXContent, properties, randomBoolean()) diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolverTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolverTests.java index 6d7822e5619cc..8388dabe23592 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolverTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolverTests.java @@ -150,7 +150,7 @@ public void testMetaFieldsAreIgnored() throws Exception { addFieldCaps(fieldCaps, "text", "keyword", true, true); String wildcard = "*"; - IndexResolution resolution = mergedMappings(wildcard, new String[] { "index" }, fieldCaps); + IndexResolution resolution = mergedMappings(wildcard, new String[] { "org/elasticsearch/xpack/sql/index" }, fieldCaps); assertTrue(resolution.isValid()); EsIndex esIndex = resolution.get(); @@ -160,7 +160,7 @@ public void testMetaFieldsAreIgnored() throws Exception { assertNull(esIndex.mapping().get("_doc_count")); assertEquals(INTEGER, esIndex.mapping().get("_not_meta_field").getDataType()); assertEquals(KEYWORD, esIndex.mapping().get("text").getDataType()); - assertEquals(Set.of("index"), resolution.get().concreteIndices()); + assertEquals(Set.of("org/elasticsearch/xpack/sql/index"), resolution.get().concreteIndices()); } public void testFlattenedHiddenSubfield() throws Exception { @@ -175,12 +175,12 @@ public void testFlattenedHiddenSubfield() throws Exception { addFieldCaps(fieldCaps, "text", "keyword", true, true); String wildcard = "*"; - IndexResolution resolution = mergedMappings(wildcard, new String[] { "index" }, fieldCaps); + IndexResolution resolution = mergedMappings(wildcard, new String[] { "org/elasticsearch/xpack/sql/index" }, fieldCaps); assertTrue(resolution.isValid()); EsIndex esIndex = resolution.get(); assertEquals(wildcard, esIndex.name()); - assertEquals(Set.of("index"), resolution.get().concreteIndices()); + assertEquals(Set.of("org/elasticsearch/xpack/sql/index"), resolution.get().concreteIndices()); assertEquals(UNSUPPORTED, esIndex.mapping().get("some_field").getDataType()); assertEquals(UNSUPPORTED, esIndex.mapping().get("some_field").getProperties().get("_keyed").getDataType()); assertEquals(OBJECT, esIndex.mapping().get("nested_field").getDataType()); @@ -205,12 +205,12 @@ public void testPropagateUnsupportedTypeToSubFields() throws Exception { addFieldCaps(fieldCaps, "a.b.c.e", "foo", true, true); String wildcard = "*"; - IndexResolution resolution = mergedMappings(wildcard, new String[] { "index" }, fieldCaps); + IndexResolution resolution = mergedMappings(wildcard, new String[] { "org/elasticsearch/xpack/sql/index" }, fieldCaps); assertTrue(resolution.isValid()); EsIndex esIndex = resolution.get(); assertEquals(wildcard, esIndex.name()); - assertEquals(Set.of("index"), resolution.get().concreteIndices()); + assertEquals(Set.of("org/elasticsearch/xpack/sql/index"), resolution.get().concreteIndices()); assertEquals(TEXT, esIndex.mapping().get("a").getDataType()); assertEquals(UNSUPPORTED, esIndex.mapping().get("a").getProperties().get("b").getDataType()); assertEquals(UNSUPPORTED, esIndex.mapping().get("a").getProperties().get("b").getProperties().get("c").getDataType()); @@ -241,12 +241,12 @@ public void testRandomMappingFieldTypeMappedAsUnsupported() throws Exception { addFieldCaps(fieldCaps, "text", "keyword", true, true); String wildcard = "*"; - IndexResolution resolution = mergedMappings(wildcard, new String[] { "index" }, fieldCaps); + IndexResolution resolution = mergedMappings(wildcard, new String[] { "org/elasticsearch/xpack/sql/index" }, fieldCaps); assertTrue(resolution.isValid()); EsIndex esIndex = resolution.get(); assertEquals(wildcard, esIndex.name()); - assertEquals(Set.of("index"), resolution.get().concreteIndices()); + assertEquals(Set.of("org/elasticsearch/xpack/sql/index"), resolution.get().concreteIndices()); assertEquals(UNSUPPORTED, esIndex.mapping().get("some_field").getDataType()); assertEquals(OBJECT, esIndex.mapping().get("nested_field").getDataType()); assertEquals(UNSUPPORTED, esIndex.mapping().get("nested_field").getProperties().get("sub_field1").getDataType()); @@ -363,7 +363,7 @@ public void testMultipleCompatibleIndicesWithDifferentFields() { Map mapping = Maps.newMapWithExpectedSize(1); String fieldName = "field" + (i + 1); mapping.put(fieldName, new KeywordEsField(fieldName)); - expectedIndices[i] = new EsIndex("index" + (i + 1), mapping); + expectedIndices[i] = new EsIndex("org/elasticsearch/xpack/sql/index" + (i + 1), mapping); } Arrays.sort(expectedIndices, Comparator.comparing(EsIndex::name)); @@ -385,7 +385,7 @@ public void testMergeConcreteIndices() { Map mapping = Maps.newMapWithExpectedSize(1); String fieldName = "field" + (i + 1); mapping.put(fieldName, new KeywordEsField(fieldName)); - String indexName = "index" + (i + 1); + String indexName = "org/elasticsearch/xpack/sql/index" + (i + 1); expectedIndices[i] = new EsIndex(indexName, mapping, Set.of(indexName)); indexNames.add(indexName); } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/ShowColumnsTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/ShowColumnsTests.java index 8e0edc54e6fbf..48fce8dfbdb0f 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/ShowColumnsTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/ShowColumnsTests.java @@ -7,10 +7,9 @@ package org.elasticsearch.xpack.sql.plan.logical.command; -import org.elasticsearch.Version; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.ql.index.IndexCompatibility; import org.elasticsearch.xpack.ql.type.EsField; +import org.elasticsearch.xpack.sql.index.IndexCompatibility; import org.elasticsearch.xpack.sql.proto.SqlVersion; import java.sql.JDBCType; @@ -19,8 +18,6 @@ import java.util.Map; import static java.util.Arrays.asList; -import static org.elasticsearch.xpack.ql.index.VersionCompatibilityChecks.supportsUnsignedLong; -import static org.elasticsearch.xpack.ql.index.VersionCompatibilityChecks.supportsVersionType; import static org.elasticsearch.xpack.ql.type.DataTypes.BOOLEAN; import static org.elasticsearch.xpack.ql.type.DataTypes.DATETIME; import static org.elasticsearch.xpack.ql.type.DataTypes.FLOAT; @@ -32,11 +29,13 @@ import static org.elasticsearch.xpack.ql.type.DataTypes.UNSIGNED_LONG; import static org.elasticsearch.xpack.ql.type.DataTypes.UNSUPPORTED; import static org.elasticsearch.xpack.ql.type.DataTypes.VERSION; -import static org.elasticsearch.xpack.sql.plan.logical.command.sys.SysColumnsTests.UNSIGNED_LONG_TEST_VERSIONS; -import static org.elasticsearch.xpack.sql.plan.logical.command.sys.SysColumnsTests.VERSION_FIELD_TEST_VERSIONS; +import static org.elasticsearch.xpack.sql.proto.VersionCompatibility.supportsUnsignedLong; +import static org.elasticsearch.xpack.sql.proto.VersionCompatibility.supportsVersionType; import static org.elasticsearch.xpack.sql.type.SqlDataTypes.GEO_POINT; import static org.elasticsearch.xpack.sql.type.SqlDataTypes.GEO_SHAPE; import static org.elasticsearch.xpack.sql.types.SqlTypesTests.loadMapping; +import static org.elasticsearch.xpack.sql.util.SqlVersionUtils.UNSIGNED_LONG_TEST_VERSIONS; +import static org.elasticsearch.xpack.sql.util.SqlVersionUtils.VERSION_FIELD_TEST_VERSIONS; public class ShowColumnsTests extends ESTestCase { @@ -94,8 +93,8 @@ public void testUnsignedLongFiltering() { List> rows = new ArrayList<>(); // mapping's mutated by IndexCompatibility.compatible, needs to stay in the loop Map mapping = loadMapping("mapping-multi-field-variation.json", true); - ShowColumns.fillInRows(IndexCompatibility.compatible(mapping, Version.fromId(version.id)), null, rows); - assertTrue((supportsUnsignedLong(Version.fromId(version.id)) && rows.contains(rowSupported)) || rows.contains(rowUnsupported)); + ShowColumns.fillInRows(IndexCompatibility.compatible(mapping, version), null, rows); + assertTrue((supportsUnsignedLong(version) && rows.contains(rowSupported)) || rows.contains(rowUnsupported)); } } @@ -106,8 +105,8 @@ public void testVersionFieldFiltering() { List> rows = new ArrayList<>(); // mapping's mutated by IndexCompatibility.compatible, needs to stay in the loop Map mapping = loadMapping("mapping-multi-field-variation.json", true); - ShowColumns.fillInRows(IndexCompatibility.compatible(mapping, Version.fromId(version.id)), null, rows); - assertTrue((supportsVersionType(Version.fromId(version.id)) && rows.contains(rowSupported)) || rows.contains(rowUnsupported)); + ShowColumns.fillInRows(IndexCompatibility.compatible(mapping, version), null, rows); + assertTrue((supportsVersionType(version) && rows.contains(rowSupported)) || rows.contains(rowUnsupported)); } } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysColumnsTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysColumnsTests.java index e0af40bbc4ce5..a44041ce04610 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysColumnsTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysColumnsTests.java @@ -6,23 +6,23 @@ */ package org.elasticsearch.xpack.sql.plan.logical.command.sys; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionTestUtils; import org.elasticsearch.core.Tuple; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.ql.index.EsIndex; -import org.elasticsearch.xpack.ql.index.IndexCompatibility; import org.elasticsearch.xpack.ql.index.IndexResolution; import org.elasticsearch.xpack.ql.index.IndexResolver; import org.elasticsearch.xpack.ql.type.EsField; import org.elasticsearch.xpack.sql.action.Protocol; import org.elasticsearch.xpack.sql.analysis.analyzer.Analyzer; +import org.elasticsearch.xpack.sql.index.IndexCompatibility; import org.elasticsearch.xpack.sql.parser.SqlParser; import org.elasticsearch.xpack.sql.plan.logical.command.Command; import org.elasticsearch.xpack.sql.proto.Mode; import org.elasticsearch.xpack.sql.proto.SqlTypedParamValue; import org.elasticsearch.xpack.sql.proto.SqlVersion; +import org.elasticsearch.xpack.sql.proto.SqlVersions; import org.elasticsearch.xpack.sql.session.Cursor; import org.elasticsearch.xpack.sql.session.SchemaRowSet; import org.elasticsearch.xpack.sql.session.SqlConfiguration; @@ -42,14 +42,14 @@ import static java.util.Collections.emptyList; import static java.util.Collections.singletonList; import static org.elasticsearch.xpack.ql.TestUtils.UTC; -import static org.elasticsearch.xpack.ql.index.VersionCompatibilityChecks.INTRODUCING_UNSIGNED_LONG; -import static org.elasticsearch.xpack.ql.index.VersionCompatibilityChecks.INTRODUCING_VERSION_FIELD_TYPE; -import static org.elasticsearch.xpack.ql.index.VersionCompatibilityChecks.isTypeSupportedInVersion; import static org.elasticsearch.xpack.ql.type.DataTypes.UNSIGNED_LONG; import static org.elasticsearch.xpack.ql.type.DataTypes.VERSION; import static org.elasticsearch.xpack.sql.analysis.analyzer.AnalyzerTestUtils.analyzer; +import static org.elasticsearch.xpack.sql.index.VersionCompatibilityChecks.isTypeSupportedInVersion; import static org.elasticsearch.xpack.sql.proto.Mode.isDriver; import static org.elasticsearch.xpack.sql.types.SqlTypesTests.loadMapping; +import static org.elasticsearch.xpack.sql.util.SqlVersionUtils.UNSIGNED_LONG_TEST_VERSIONS; +import static org.elasticsearch.xpack.sql.util.SqlVersionUtils.VERSION_FIELD_TEST_VERSIONS; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyBoolean; import static org.mockito.ArgumentMatchers.eq; @@ -59,20 +59,6 @@ public class SysColumnsTests extends ESTestCase { - public static List UNSIGNED_LONG_TEST_VERSIONS = List.of( - SqlVersion.fromId(INTRODUCING_UNSIGNED_LONG.id - SqlVersion.MINOR_MULTIPLIER), - SqlVersion.fromId(INTRODUCING_UNSIGNED_LONG.id), - SqlVersion.fromId(INTRODUCING_UNSIGNED_LONG.id + SqlVersion.MINOR_MULTIPLIER), - SqlVersion.fromId(Version.CURRENT.id) - ); - - public static List VERSION_FIELD_TEST_VERSIONS = List.of( - SqlVersion.fromId(INTRODUCING_VERSION_FIELD_TYPE.id - SqlVersion.MINOR_MULTIPLIER), - SqlVersion.fromId(INTRODUCING_VERSION_FIELD_TYPE.id), - SqlVersion.fromId(INTRODUCING_VERSION_FIELD_TYPE.id + SqlVersion.MINOR_MULTIPLIER), - SqlVersion.fromId(Version.CURRENT.id) - ); - private static final String CLUSTER_NAME = "cluster"; private static final Map MAPPING1 = loadMapping("mapping-multi-field-with-nested.json", true); private static final Map MAPPING2 = loadMapping("mapping-multi-field-variation.json", true); @@ -84,7 +70,7 @@ public class SysColumnsTests extends ESTestCase { private void sysColumnsInMode(Mode mode) { Class typeClass = mode == Mode.ODBC ? Short.class : Integer.class; List> rows = new ArrayList<>(); - SysColumns.fillInRows("test", "index", MAPPING2, null, rows, null, mode); + SysColumns.fillInRows("test", "org/elasticsearch/xpack/sql/index", MAPPING2, null, rows, null, mode); assertEquals(FIELD_COUNT2, rows.size()); assertEquals(24, rows.get(0).size()); @@ -162,8 +148,8 @@ public void testUnsignedLongFiltering() { Map mapping = loadMapping("mapping-multi-field-variation.json", true); SysColumns.fillInRows( "test", - "index", - IndexCompatibility.compatible(mapping, Version.fromId(version.id)), + "org/elasticsearch/xpack/sql/index", + IndexCompatibility.compatible(mapping, version), null, rows, null, @@ -171,7 +157,7 @@ public void testUnsignedLongFiltering() { ); List types = rows.stream().map(row -> name(row).toString()).collect(Collectors.toList()); assertEquals( - isTypeSupportedInVersion(UNSIGNED_LONG, Version.fromId(version.id)), + isTypeSupportedInVersion(UNSIGNED_LONG, version), types.contains(UNSIGNED_LONG.toString().toLowerCase(Locale.ROOT)) ); } @@ -186,18 +172,15 @@ public void testVersionTypeFiltering() { Map mapping = loadMapping("mapping-multi-field-variation.json", true); SysColumns.fillInRows( "test", - "index", - IndexCompatibility.compatible(mapping, Version.fromId(version.id)), + "org/elasticsearch/xpack/sql/index", + IndexCompatibility.compatible(mapping, version), null, rows, null, mode ); List types = rows.stream().map(row -> name(row).toString()).collect(Collectors.toList()); - assertEquals( - isTypeSupportedInVersion(VERSION, Version.fromId(version.id)), - types.contains(VERSION.toString().toLowerCase(Locale.ROOT)) - ); + assertEquals(isTypeSupportedInVersion(VERSION, version), types.contains(VERSION.toString().toLowerCase(Locale.ROOT))); } } } @@ -313,7 +296,7 @@ private int executeCommandInOdbcModeAndCountRows(String sql) { null, Mode.ODBC, null, - SqlVersion.fromId(Version.CURRENT.id), + SqlVersions.SERVER_COMPAT_VERSION, null, null, false, @@ -360,7 +343,7 @@ private void executeCommand( null, mode, null, - SqlVersion.fromId(Version.CURRENT.id), + SqlVersions.SERVER_COMPAT_VERSION, null, null, false, diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTypesTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTypesTests.java index d2b2e99d566c8..789d03efbc3f9 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTypesTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTypesTests.java @@ -6,8 +6,8 @@ */ package org.elasticsearch.xpack.sql.plan.logical.command.sys; -import org.elasticsearch.Version; import org.elasticsearch.action.support.ActionTestUtils; +import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Tuple; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.ql.index.EsIndex; @@ -34,19 +34,20 @@ import java.util.Set; import static java.util.Arrays.asList; -import static org.elasticsearch.xpack.ql.index.VersionCompatibilityChecks.isTypeSupportedInVersion; import static org.elasticsearch.xpack.ql.type.DataTypes.UNSIGNED_LONG; import static org.elasticsearch.xpack.ql.type.DataTypes.VERSION; import static org.elasticsearch.xpack.sql.analysis.analyzer.AnalyzerTestUtils.analyzer; -import static org.elasticsearch.xpack.sql.plan.logical.command.sys.SysColumnsTests.UNSIGNED_LONG_TEST_VERSIONS; -import static org.elasticsearch.xpack.sql.plan.logical.command.sys.SysColumnsTests.VERSION_FIELD_TEST_VERSIONS; +import static org.elasticsearch.xpack.sql.index.VersionCompatibilityChecks.isTypeSupportedInVersion; +import static org.elasticsearch.xpack.sql.proto.SqlVersions.SERVER_COMPAT_VERSION; +import static org.elasticsearch.xpack.sql.util.SqlVersionUtils.UNSIGNED_LONG_TEST_VERSIONS; +import static org.elasticsearch.xpack.sql.util.SqlVersionUtils.VERSION_FIELD_TEST_VERSIONS; import static org.mockito.Mockito.mock; public class SysTypesTests extends ESTestCase { private final SqlParser parser = new SqlParser(); - private Tuple sql(String sql, Mode mode, SqlVersion version) { + private Tuple sql(String sql, Mode mode, @Nullable SqlVersion version) { SqlConfiguration configuration = new SqlConfiguration( DateUtils.UTC, null, @@ -76,7 +77,7 @@ private Tuple sql(String sql, Mode mode, SqlVersion version } private Tuple sql(String sql) { - return sql(sql, randomFrom(Mode.values()), randomBoolean() ? null : SqlVersion.fromId(Version.CURRENT.id)); + return sql(sql, randomFrom(Mode.values()), randomBoolean() ? null : SERVER_COMPAT_VERSION); } public void testSysTypes() { @@ -154,7 +155,7 @@ public void testUnsignedLongFiltering() { List types = new ArrayList<>(); r.forEachRow(rv -> types.add((String) rv.column(0))); assertEquals( - isTypeSupportedInVersion(UNSIGNED_LONG, Version.fromId(cmd.v2().configuration().version().id)), + isTypeSupportedInVersion(UNSIGNED_LONG, cmd.v2().configuration().version()), types.contains(UNSIGNED_LONG.toString()) ); })); @@ -173,10 +174,7 @@ public void testVersionTypeFiltering() { SchemaRowSet r = (SchemaRowSet) p.rowSet(); List types = new ArrayList<>(); r.forEachRow(rv -> types.add((String) rv.column(0))); - assertEquals( - isTypeSupportedInVersion(VERSION, Version.fromId(cmd.v2().configuration().version().id)), - types.contains(VERSION.toString()) - ); + assertEquals(isTypeSupportedInVersion(VERSION, cmd.v2().configuration().version()), types.contains(VERSION.toString())); })); } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plugin/TextFormatTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plugin/TextFormatTests.java index 7224b357fbd7d..10e8496b12304 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plugin/TextFormatTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plugin/TextFormatTests.java @@ -28,7 +28,7 @@ import static org.elasticsearch.xpack.sql.plugin.TextFormat.CSV; import static org.elasticsearch.xpack.sql.plugin.TextFormat.PLAIN_TEXT; import static org.elasticsearch.xpack.sql.plugin.TextFormat.TSV; -import static org.elasticsearch.xpack.sql.proto.SqlVersion.DATE_NANOS_SUPPORT_VERSION; +import static org.elasticsearch.xpack.sql.proto.VersionCompatibility.INTRODUCING_DATE_NANOS; import static org.elasticsearch.xpack.sql.proto.formatter.SimpleFormatter.FormatOption.TEXT; public class TextFormatTests extends ESTestCase { @@ -179,7 +179,7 @@ public void testPlainTextEmptyCursorWithoutColumns() { PLAIN_TEXT.format( req(), new BasicFormatter(emptyList(), emptyList(), TEXT), - new SqlQueryResponse(StringUtils.EMPTY, Mode.JDBC, DATE_NANOS_SUPPORT_VERSION, false, null, emptyList()) + new SqlQueryResponse(StringUtils.EMPTY, Mode.JDBC, INTRODUCING_DATE_NANOS, false, null, emptyList()) ).v1() ); } @@ -188,9 +188,9 @@ private static SqlQueryResponse emptyData() { return new SqlQueryResponse( StringUtils.EMPTY, Mode.JDBC, - DATE_NANOS_SUPPORT_VERSION, + INTRODUCING_DATE_NANOS, false, - singletonList(new ColumnInfo("index", "name", "keyword")), + singletonList(new ColumnInfo("org/elasticsearch/xpack/sql/index", "name", "keyword")), emptyList() ); } @@ -198,29 +198,29 @@ private static SqlQueryResponse emptyData() { private static SqlQueryResponse regularData() { // headers List headers = new ArrayList<>(); - headers.add(new ColumnInfo("index", "string", "keyword")); - headers.add(new ColumnInfo("index", "number", "integer")); + headers.add(new ColumnInfo("org/elasticsearch/xpack/sql/index", "string", "keyword")); + headers.add(new ColumnInfo("org/elasticsearch/xpack/sql/index", "number", "integer")); // values List> values = new ArrayList<>(); values.add(asList("Along The River Bank", 11 * 60 + 48)); values.add(asList("Mind Train", 4 * 60 + 40)); - return new SqlQueryResponse(null, Mode.JDBC, DATE_NANOS_SUPPORT_VERSION, false, headers, values); + return new SqlQueryResponse(null, Mode.JDBC, INTRODUCING_DATE_NANOS, false, headers, values); } private static SqlQueryResponse escapedData() { // headers List headers = new ArrayList<>(); - headers.add(new ColumnInfo("index", "first", "keyword")); - headers.add(new ColumnInfo("index", "\"special\"", "keyword")); + headers.add(new ColumnInfo("org/elasticsearch/xpack/sql/index", "first", "keyword")); + headers.add(new ColumnInfo("org/elasticsearch/xpack/sql/index", "\"special\"", "keyword")); // values List> values = new ArrayList<>(); values.add(asList("normal", "\"quo\"ted\",\n")); values.add(asList("commas", "a,b,c,\n,d,e,\t\n")); - return new SqlQueryResponse(null, Mode.JDBC, DATE_NANOS_SUPPORT_VERSION, false, headers, values); + return new SqlQueryResponse(null, Mode.JDBC, INTRODUCING_DATE_NANOS, false, headers, values); } private static RestRequest req() { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/util/SqlVersionUtils.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/util/SqlVersionUtils.java new file mode 100644 index 0000000000000..016ee661c5054 --- /dev/null +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/util/SqlVersionUtils.java @@ -0,0 +1,39 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.sql.util; + +import org.elasticsearch.xpack.sql.proto.SqlVersion; +import org.elasticsearch.xpack.sql.proto.SqlVersions; + +import java.util.List; + +import static org.elasticsearch.xpack.sql.proto.SqlVersions.SERVER_COMPAT_VERSION; +import static org.elasticsearch.xpack.sql.proto.VersionCompatibility.INTRODUCING_UNSIGNED_LONG; +import static org.elasticsearch.xpack.sql.proto.VersionCompatibility.INTRODUCING_VERSION_FIELD_TYPE; + +public final class SqlVersionUtils { + + public static final SqlVersion PRE_UNSIGNED_LONG = SqlVersions.getPreviousVersion(INTRODUCING_UNSIGNED_LONG); + public static final SqlVersion POST_UNSIGNED_LONG = SqlVersions.getNextVersion(INTRODUCING_UNSIGNED_LONG); + public static final SqlVersion PRE_VERSION_FIELD = SqlVersions.getPreviousVersion(INTRODUCING_VERSION_FIELD_TYPE); + public static final SqlVersion POST_VERSION_FIELD = SqlVersions.getNextVersion(INTRODUCING_VERSION_FIELD_TYPE); + + public static List UNSIGNED_LONG_TEST_VERSIONS = List.of( + PRE_UNSIGNED_LONG, + INTRODUCING_UNSIGNED_LONG, + POST_UNSIGNED_LONG, + SERVER_COMPAT_VERSION + ); + + public static List VERSION_FIELD_TEST_VERSIONS = List.of( + PRE_VERSION_FIELD, + INTRODUCING_VERSION_FIELD_TYPE, + POST_VERSION_FIELD, + SERVER_COMPAT_VERSION + ); +} From f77611c109b470759f50ccff3630d4b3d118cfa0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Iv=C3=A1n=20Cea=20Fontenla?= Date: Fri, 11 Oct 2024 19:08:34 +0200 Subject: [PATCH 010/449] Replace "::" casts to explicit casting functions (#114639) Fixes https://github.com/elastic/elasticsearch/issues/114613 Those tests were added with `::` casts, which don't work in older versions. As they aren't testing anything around those casts, I'm replacing them with `TO_()` functions to let them work everywhere. --- .../src/main/resources/date.csv-spec | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec index 1fdb6150a0e81..36035c48f182c 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec @@ -369,7 +369,7 @@ date1:date | dd_ms:integer evalDateDiffMonthAsWhole0Months -ROW from="2023-12-31T23:59:59.999Z"::DATETIME, to="2024-01-01T00:00:00"::DATETIME +ROW from=TO_DATETIME("2023-12-31T23:59:59.999Z"), to=TO_DATETIME("2024-01-01T00:00:00") | EVAL msecs=DATE_DIFF("milliseconds", from, to), months=DATE_DIFF("month", from, to) ; @@ -380,7 +380,7 @@ ROW from="2023-12-31T23:59:59.999Z"::DATETIME, to="2024-01-01T00:00:00"::DATETIM evalDateDiffMonthAsWhole1Month -ROW from="2023-12-31T23:59:59.999Z"::DATETIME, to="2024-02-01T00:00:00"::DATETIME +ROW from=TO_DATETIME("2023-12-31T23:59:59.999Z"), to=TO_DATETIME("2024-02-01T00:00:00") | EVAL secs=DATE_DIFF("seconds", from, to), months=DATE_DIFF("month", from, to) ; @@ -392,7 +392,7 @@ ROW from="2023-12-31T23:59:59.999Z"::DATETIME, to="2024-02-01T00:00:00"::DATETIM evalDateDiffYearAsWhole0Years required_capability: date_diff_year_calendarial -ROW from="2023-12-31T23:59:59.999Z"::DATETIME, to="2024-01-01T00:00:00"::DATETIME +ROW from=TO_DATETIME("2023-12-31T23:59:59.999Z"), to=TO_DATETIME("2024-01-01T00:00:00") | EVAL msecs=DATE_DIFF("milliseconds", from, to), years=DATE_DIFF("year", from, to) ; @@ -403,7 +403,7 @@ ROW from="2023-12-31T23:59:59.999Z"::DATETIME, to="2024-01-01T00:00:00"::DATETIM evalDateDiffYearAsWhole1Year required_capability: date_diff_year_calendarial -ROW from="2023-12-31T23:59:59.999Z"::DATETIME, to="2025-01-01T00:00:00"::DATETIME +ROW from=TO_DATETIME("2023-12-31T23:59:59.999Z"), to=TO_DATETIME("2025-01-01T00:00:00") | EVAL secs=DATE_DIFF("seconds", from, to), years=DATE_DIFF("year", from, to) ; @@ -414,7 +414,7 @@ ROW from="2023-12-31T23:59:59.999Z"::DATETIME, to="2025-01-01T00:00:00"::DATETIM evalDateDiffYearAsWhole1Year required_capability: date_diff_year_calendarial -ROW from="2024-01-01T00:00:00Z"::DATETIME, to="2025-01-01T00:00:00"::DATETIME +ROW from=TO_DATETIME("2024-01-01T00:00:00Z"), to=TO_DATETIME("2025-01-01T00:00:00") | EVAL secs=DATE_DIFF("seconds", from, to), years=DATE_DIFF("year", from, to) ; @@ -426,9 +426,9 @@ evalDateDiffYearForDocs required_capability: date_diff_year_calendarial // tag::evalDateDiffYearForDocs[] -ROW end_23="2023-12-31T23:59:59.999Z"::DATETIME, - start_24="2024-01-01T00:00:00.000Z"::DATETIME, - end_24="2024-12-31T23:59:59.999"::DATETIME +ROW end_23=TO_DATETIME("2023-12-31T23:59:59.999Z"), + start_24=TO_DATETIME("2024-01-01T00:00:00.000Z"), + end_24=TO_DATETIME("2024-12-31T23:59:59.999") | EVAL end23_to_start24=DATE_DIFF("year", end_23, start_24) | EVAL end23_to_end24=DATE_DIFF("year", end_23, end_24) | EVAL start_to_end_24=DATE_DIFF("year", start_24, end_24) From f3cd890f3ea08e40d4cb49482bdea0f6e4afb13a Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Fri, 11 Oct 2024 20:58:53 +0300 Subject: [PATCH 011/449] Second parsing pass tracks array scopes properly (#114621) --- muted-tests.yml | 9 ------ .../index/mapper/DocumentParser.java | 9 ++++-- .../mapper/IgnoredSourceFieldMapperTests.java | 31 +++++++++++++++++++ 3 files changed, 38 insertions(+), 11 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 5d14cabdd46ce..e5fa7966dcd88 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -372,15 +372,6 @@ tests: - class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT method: test {p0=synonyms/60_synonym_rule_get/Synonym rule not found} issue: https://github.com/elastic/elasticsearch/issues/114444 -- class: org.elasticsearch.datastreams.logsdb.qa.LogsDbVersusLogsDbReindexedIntoStandardModeChallengeRestIT - method: testTermsAggregation - issue: https://github.com/elastic/elasticsearch/issues/114554 -- class: org.elasticsearch.datastreams.logsdb.qa.LogsDbVersusLogsDbReindexedIntoStandardModeChallengeRestIT - method: testTermsQuery - issue: https://github.com/elastic/elasticsearch/issues/114563 -- class: org.elasticsearch.datastreams.logsdb.qa.LogsDbVersusLogsDbReindexedIntoStandardModeChallengeRestIT - method: testMatchAllQuery - issue: https://github.com/elastic/elasticsearch/issues/114607 - class: org.elasticsearch.xpack.esql.optimizer.PhysicalPlanOptimizerTests method: testPushSpatialIntersectsEvalToSource {default} issue: https://github.com/elastic/elasticsearch/issues/114627 diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java index 0ff754d953934..bac987a3df96d 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java @@ -208,6 +208,7 @@ private static List parseDocForMissingValues XContentParser parser = context.parser(); XContentParser.Token currentToken = parser.nextToken(); List path = new ArrayList<>(); + List isObjectInPath = new ArrayList<>(); // Tracks if path components correspond to an object or an array. String fieldName = null; while (currentToken != null) { while (currentToken != XContentParser.Token.FIELD_NAME) { @@ -218,11 +219,16 @@ private static List parseDocForMissingValues parser.skipChildren(); } else { path.add(fieldName); + isObjectInPath.add(currentToken == XContentParser.Token.START_OBJECT); } fieldName = null; } else if (currentToken == XContentParser.Token.END_OBJECT || currentToken == XContentParser.Token.END_ARRAY) { - if (currentToken == XContentParser.Token.END_OBJECT && path.isEmpty() == false) { + // Remove the path, if the scope type matches the one when the path was added. + if (isObjectInPath.isEmpty() == false + && (isObjectInPath.getLast() && currentToken == XContentParser.Token.END_OBJECT + || isObjectInPath.getLast() == false && currentToken == XContentParser.Token.END_ARRAY)) { path.removeLast(); + isObjectInPath.removeLast(); } fieldName = null; } @@ -237,7 +243,6 @@ private static List parseDocForMissingValues if (leaf != null) { parser.nextToken(); // Advance the parser to the value to be read. result.add(leaf.cloneWithValue(context.encodeFlattenedToken())); - parser.nextToken(); // Skip the token ending the value. fieldName = null; } currentToken = parser.nextToken(); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapperTests.java index 205ff08c397b2..5eac5acdca286 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapperTests.java @@ -901,6 +901,37 @@ public void testNestedArray() throws IOException { ); } + public void testConflictingFieldNameAfterArray() throws IOException { + DocumentMapper documentMapper = createMapperService(syntheticSourceMapping(b -> { + b.startObject("path").startObject("properties"); + { + b.startObject("to").startObject("properties"); + { + b.startObject("id").field("type", "integer").field("synthetic_source_keep", "arrays").endObject(); + } + b.endObject().endObject(); + b.startObject("id").field("type", "float").endObject(); + } + b.endObject().endObject(); + })).documentMapper(); + + var syntheticSource = syntheticSource(documentMapper, b -> { + b.startObject("path"); + { + b.startArray("to"); + { + b.startObject().array("id", 1, 20, 3).endObject(); + b.startObject().field("id", 10).endObject(); + } + b.endArray(); + b.field("id", "0.1"); + } + b.endObject(); + }); + assertEquals(""" + {"path":{"id":0.1,"to":{"id":[1,20,3,10]}}}""", syntheticSource); + } + public void testArrayWithinArray() throws IOException { DocumentMapper documentMapper = createMapperService(syntheticSourceMapping(b -> { b.startObject("path"); From 085ffc3511d9f719fd8bfda5f40e4cb25de5989e Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Fri, 11 Oct 2024 20:03:01 +0200 Subject: [PATCH 012/449] Change exception type when timing out waiting for specific seqno in fleet search api. (#114526) Without this change request fails with `ElasticsearchTimeoutException` if waiting for seqno times out. This results in a 500 status code. With this change the `SearchTimeoutException` is used which results in a 504 status code. This is a more appropriate response code for time-outs. Closes #114395 --- .../java/org/elasticsearch/search/SearchService.java | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/SearchService.java b/server/src/main/java/org/elasticsearch/search/SearchService.java index 6fff4318e5b35..be96b4e25d841 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchService.java +++ b/server/src/main/java/org/elasticsearch/search/SearchService.java @@ -17,7 +17,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.TopDocs; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.ElasticsearchTimeoutException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.ResolvedIndices; @@ -33,6 +32,7 @@ import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.component.AbstractLifecycleComponent; +import org.elasticsearch.common.logging.LoggerMessageFormat; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; @@ -112,6 +112,7 @@ import org.elasticsearch.search.query.QuerySearchRequest; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.query.ScrollQuerySearchResult; +import org.elasticsearch.search.query.SearchTimeoutException; import org.elasticsearch.search.rank.feature.RankFeatureResult; import org.elasticsearch.search.rank.feature.RankFeatureShardPhase; import org.elasticsearch.search.rank.feature.RankFeatureShardRequest; @@ -598,9 +599,13 @@ private void ensureAfterSeqNoRefreshed( final TimeValue timeout = request.getWaitForCheckpointsTimeout(); final Scheduler.ScheduledCancellable timeoutTask = NO_TIMEOUT.equals(timeout) ? null : threadPool.schedule(() -> { if (isDone.compareAndSet(false, true)) { - listener.onFailure( - new ElasticsearchTimeoutException("Wait for seq_no [{}] refreshed timed out [{}]", waitForCheckpoint, timeout) + var shardTarget = new SearchShardTarget( + shard.routingEntry().currentNodeId(), + shard.shardId(), + request.getClusterAlias() ); + var message = LoggerMessageFormat.format("Wait for seq_no [{}] refreshed timed out [{}]", waitForCheckpoint, timeout); + listener.onFailure(new SearchTimeoutException(shardTarget, message)); } }, timeout, EsExecutors.DIRECT_EXECUTOR_SERVICE); From a62228a74422f24a45e5ab93d572a2a4d2383c32 Mon Sep 17 00:00:00 2001 From: Oleksandr Kolomiiets Date: Fri, 11 Oct 2024 11:16:55 -0700 Subject: [PATCH 013/449] Allow stored source in logsdb and tsdb (#114454) --- modules/aggregations/build.gradle | 1 + .../test/aggregations/time_series.yml | 17 ---- .../logsdb/LogsIndexModeCustomSettingsIT.java | 96 +++++++++++++++++-- rest-api-spec/build.gradle | 4 + .../test/logsdb/20_source_mapping.yml | 59 +++--------- .../rest-api-spec/test/tsdb/20_mapping.yml | 26 +++-- .../org/elasticsearch/index/IndexMode.java | 8 +- .../index/mapper/MapperFeatures.java | 6 +- .../index/mapper/MappingParser.java | 5 - .../index/mapper/SourceFieldMapper.java | 6 +- 10 files changed, 135 insertions(+), 93 deletions(-) diff --git a/modules/aggregations/build.gradle b/modules/aggregations/build.gradle index 1b3aac13b3608..f558ce8b9cfdb 100644 --- a/modules/aggregations/build.gradle +++ b/modules/aggregations/build.gradle @@ -48,4 +48,5 @@ dependencies { tasks.named("yamlRestCompatTestTransform").configure({ task -> task.skipTest("aggregations/date_agg_per_day_of_week/Date aggregartion per day of week", "week-date behaviour has changed") + task.skipTest("aggregations/time_series/Configure with no synthetic source", "temporary until backport") }) diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/time_series.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/time_series.yml index 1703d4908a753..acab855e17df6 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/time_series.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/time_series.yml @@ -291,23 +291,6 @@ setup: sum: sum: field: val ---- -"Configure with no synthetic source": - - requires: - cluster_features: ["gte_v8.15.0"] - reason: "Error message changed in 8.15.0" - - - do: - catch: '/Indices with with index mode \[time_series\] only support synthetic source/' - indices.create: - index: tsdb_error - body: - settings: - mode: time_series - routing_path: [key] - mappings: - _source: - enabled: false --- "Number for keyword routing field": diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/LogsIndexModeCustomSettingsIT.java b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/LogsIndexModeCustomSettingsIT.java index db6c12c8bc565..ab78f48b6cddf 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/LogsIndexModeCustomSettingsIT.java +++ b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/LogsIndexModeCustomSettingsIT.java @@ -15,7 +15,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.local.distribution.DistributionType; -import org.hamcrest.Matchers; import org.junit.Before; import org.junit.ClassRule; @@ -115,18 +114,62 @@ public void testConfigureStoredSourceBeforeIndexCreation() throws IOException { } }"""; + assertOK(putComponentTemplate(client, "logs@custom", storedSourceMapping)); + assertOK(createDataStream(client, "logs-custom-dev")); + + var mapping = getMapping(client, getDataStreamBackingIndex(client, "logs-custom-dev", 0)); + String sourceMode = (String) subObject("_source").apply(mapping).get("mode"); + assertThat(sourceMode, equalTo("stored")); + } + + public void testConfigureDisabledSourceBeforeIndexCreation() { + var storedSourceMapping = """ + { + "template": { + "settings": { + "index": { + "mode": "logsdb" + } + }, + "mappings": { + "_source": { + "enabled": false + } + } + } + }"""; + Exception e = assertThrows(ResponseException.class, () -> putComponentTemplate(client, "logs@custom", storedSourceMapping)); assertThat( e.getMessage(), - containsString("Failed to parse mapping: Indices with with index mode [logsdb] only support synthetic source") + containsString("Failed to parse mapping: _source can not be disabled in index using [logsdb] index mode") ); assertThat(e.getMessage(), containsString("mapper_parsing_exception")); + } - assertOK(createDataStream(client, "logs-custom-dev")); + public void testConfigureDisabledSourceModeBeforeIndexCreation() { + var storedSourceMapping = """ + { + "template": { + "settings": { + "index": { + "mode": "logsdb" + } + }, + "mappings": { + "_source": { + "mode": "disabled" + } + } + } + }"""; - var mapping = getMapping(client, getDataStreamBackingIndex(client, "logs-custom-dev", 0)); - String sourceMode = (String) subObject("_source").apply(mapping).get("mode"); - assertThat(sourceMode, equalTo("synthetic")); + Exception e = assertThrows(ResponseException.class, () -> putComponentTemplate(client, "logs@custom", storedSourceMapping)); + assertThat( + e.getMessage(), + containsString("Failed to parse mapping: _source can not be disabled in index using [logsdb] index mode") + ); + assertThat(e.getMessage(), containsString("mapper_parsing_exception")); } public void testConfigureStoredSourceWhenIndexIsCreated() throws IOException { @@ -142,8 +185,45 @@ public void testConfigureStoredSourceWhenIndexIsCreated() throws IOException { }"""; assertOK(putComponentTemplate(client, "logs@custom", storedSourceMapping)); + assertOK(createDataStream(client, "logs-custom-dev")); + + var mapping = getMapping(client, getDataStreamBackingIndex(client, "logs-custom-dev", 0)); + String sourceMode = (String) subObject("_source").apply(mapping).get("mode"); + assertThat(sourceMode, equalTo("stored")); + } + + public void testConfigureDisabledSourceWhenIndexIsCreated() throws IOException { + var disabledModeMapping = """ + { + "template": { + "mappings": { + "_source": { + "enabled": false + } + } + } + }"""; + + assertOK(putComponentTemplate(client, "logs@custom", disabledModeMapping)); + ResponseException e = expectThrows(ResponseException.class, () -> createDataStream(client, "logs-custom-dev")); + assertThat(e.getMessage(), containsString("_source can not be disabled in index using [logsdb] index mode")); + } + + public void testConfigureDisabledSourceModeWhenIndexIsCreated() throws IOException { + var disabledModeMapping = """ + { + "template": { + "mappings": { + "_source": { + "mode": "disabled" + } + } + } + }"""; + + assertOK(putComponentTemplate(client, "logs@custom", disabledModeMapping)); ResponseException e = expectThrows(ResponseException.class, () -> createDataStream(client, "logs-custom-dev")); - assertThat(e.getMessage(), containsString("Indices with with index mode [logsdb] only support synthetic source")); + assertThat(e.getMessage(), containsString("_source can not be disabled in index using [logsdb] index mode")); } public void testOverrideIndexCodec() throws IOException { @@ -377,7 +457,7 @@ public void testIgnoreAboveSetting() throws IOException { ); assertThat( ex.getMessage(), - Matchers.containsString("Failed to parse value [" + newValue + "] for setting [index.mapping.ignore_above]") + containsString("Failed to parse value [" + newValue + "] for setting [index.mapping.ignore_above]") ); } } diff --git a/rest-api-spec/build.gradle b/rest-api-spec/build.gradle index a742e83255bbb..27ae0c7f99db1 100644 --- a/rest-api-spec/build.gradle +++ b/rest-api-spec/build.gradle @@ -57,4 +57,8 @@ tasks.named("precommit").configure { tasks.named("yamlRestCompatTestTransform").configure({task -> task.skipTest("indices.sort/10_basic/Index Sort", "warning does not exist for compatibility") task.skipTest("search/330_fetch_fields/Test search rewrite", "warning does not exist for compatibility") + task.skipTest("tsdb/20_mapping/disabled source", "temporary until backported") + task.skipTest("logsdb/20_source_mapping/disabled _source is not supported", "temporary until backported") + task.skipTest("tsdb/20_mapping/regular source", "temporary until backported") + task.skipTest("logsdb/20_source_mapping/stored _source mode is not supported", "temporary until backported") }) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/20_source_mapping.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/20_source_mapping.yml index d209c839d904b..03c8def9f558c 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/20_source_mapping.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/20_source_mapping.yml @@ -1,21 +1,10 @@ --- -stored _source mode is not supported: +stored _source mode is supported: - requires: - test_runner_features: [capabilities] - capabilities: - - method: PUT - path: /{index} - capabilities: [logsdb_index_mode] - reason: "Support for 'logsdb' index mode capability required" - - - skip: - known_issues: - - cluster_feature: "gte_v8.15.0" - fixed_by: "gte_v8.16.0" - reason: "Development of logs index mode spans 8.15 and 8.16" + cluster_features: ["mapper.source.remove_synthetic_source_only_validation"] + reason: requires new validation logic - do: - catch: bad_request indices.create: index: test-stored-source body: @@ -25,31 +14,17 @@ stored _source mode is not supported: mappings: _source: mode: stored - properties: - "@timestamp": - type: date - host.name: - type: keyword + - do: + indices.get: + index: test-stored-source - - match: { error.type: "mapper_parsing_exception" } - - match: { error.root_cause.0.type: "mapper_parsing_exception" } - - match: { error.reason: "Failed to parse mapping: Indices with with index mode [logsdb] only support synthetic source" } + - match: { test-stored-source.mappings._source.mode: "stored" } --- disabled _source is not supported: - requires: - test_runner_features: [capabilities] - capabilities: - - method: PUT - path: /{index} - capabilities: [logsdb_index_mode] - reason: "Support for 'logsdb' index mode capability required" - - - skip: - known_issues: - - cluster_feature: "gte_v8.15.0" - fixed_by: "gte_v8.16.0" - reason: "Development of logs index mode spans 8.15 and 8.16" + cluster_features: ["mapper.source.remove_synthetic_source_only_validation"] + reason: requires new error message - do: catch: bad_request @@ -62,20 +37,15 @@ disabled _source is not supported: mappings: _source: enabled: false - properties: - "@timestamp": - type: date - host.name: - type: keyword - match: { error.type: "mapper_parsing_exception" } - match: { error.root_cause.0.type: "mapper_parsing_exception" } - - match: { error.reason: "Failed to parse mapping: Indices with with index mode [logsdb] only support synthetic source" } + - match: { error.reason: "Failed to parse mapping: _source can not be disabled in index using [logsdb] index mode" } - do: catch: bad_request indices.create: - index: test-disabled-source + index: test-disabled-mode-source body: settings: index: @@ -83,12 +53,7 @@ disabled _source is not supported: mappings: _source: mode: disabled - properties: - "@timestamp": - type: date - host.name: - type: keyword - match: { error.type: "mapper_parsing_exception" } - match: { error.root_cause.0.type: "mapper_parsing_exception" } - - match: { error.reason: "Failed to parse mapping: Indices with with index mode [logsdb] only support synthetic source" } + - match: { error.reason: "Failed to parse mapping: _source can not be disabled in index using [logsdb] index mode" } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml index ade153d284548..6a59c7bf75cbf 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml @@ -456,13 +456,12 @@ nested fields: - match: {tsdb-synthetic.mappings._source.mode: synthetic} --- -regular source: +stored source is supported: - requires: - cluster_features: ["gte_v8.7.0"] - reason: synthetic source + cluster_features: ["mapper.source.remove_synthetic_source_only_validation"] + reason: requires new validation logic - do: - catch: '/time series indices only support synthetic source/' indices.create: index: tsdb_index body: @@ -486,14 +485,21 @@ regular source: uid: type: keyword time_series_dimension: true + + - do: + indices.get: + index: tsdb_index + + - match: { tsdb_index.mappings._source.mode: "stored" } + --- -disabled source: +disabled source is not supported: - requires: - cluster_features: ["gte_v8.7.0"] - reason: synthetic source + cluster_features: ["mapper.source.remove_synthetic_source_only_validation"] + reason: requires new error message - do: - catch: '/time series indices only support synthetic source/' + catch: bad_request indices.create: index: tsdb_index body: @@ -518,6 +524,10 @@ disabled source: type: keyword time_series_dimension: true + - match: { error.type: "mapper_parsing_exception" } + - match: { error.root_cause.0.type: "mapper_parsing_exception" } + - match: { error.reason: "Failed to parse mapping: _source can not be disabled in index using [time_series] index mode" } + --- source include/exclude: - requires: diff --git a/server/src/main/java/org/elasticsearch/index/IndexMode.java b/server/src/main/java/org/elasticsearch/index/IndexMode.java index 5dfd698b2bb20..2d9e89223d7a6 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexMode.java +++ b/server/src/main/java/org/elasticsearch/index/IndexMode.java @@ -217,8 +217,8 @@ public boolean shouldValidateTimestamp() { @Override public void validateSourceFieldMapper(SourceFieldMapper sourceFieldMapper) { - if (sourceFieldMapper.isSynthetic() == false) { - throw new IllegalArgumentException("time series indices only support synthetic source"); + if (sourceFieldMapper.enabled() == false) { + throw new IllegalArgumentException("_source can not be disabled in index using [" + IndexMode.TIME_SERIES + "] index mode"); } } @@ -292,8 +292,8 @@ public boolean shouldValidateTimestamp() { @Override public void validateSourceFieldMapper(SourceFieldMapper sourceFieldMapper) { - if (sourceFieldMapper.isSynthetic() == false) { - throw new IllegalArgumentException("Indices with with index mode [" + IndexMode.LOGSDB + "] only support synthetic source"); + if (sourceFieldMapper.enabled() == false) { + throw new IllegalArgumentException("_source can not be disabled in index using [" + IndexMode.LOGSDB + "] index mode"); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java index 4f90bd6e6f2c9..f3744c974e9e3 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java @@ -58,6 +58,10 @@ public Set getFeatures() { @Override public Set getTestFeatures() { - return Set.of(RangeFieldMapper.DATE_RANGE_INDEXING_FIX, IgnoredSourceFieldMapper.DONT_EXPAND_DOTS_IN_IGNORED_SOURCE); + return Set.of( + RangeFieldMapper.DATE_RANGE_INDEXING_FIX, + IgnoredSourceFieldMapper.DONT_EXPAND_DOTS_IN_IGNORED_SOURCE, + SourceFieldMapper.REMOVE_SYNTHETIC_SOURCE_ONLY_VALIDATION + ); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MappingParser.java b/server/src/main/java/org/elasticsearch/index/mapper/MappingParser.java index 6737f08b1ac5b..9afa77161bef1 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MappingParser.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MappingParser.java @@ -12,7 +12,6 @@ import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Nullable; -import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.mapper.MapperService.MergeReason; import org.elasticsearch.xcontent.XContentType; @@ -147,10 +146,6 @@ Mapping parse(@Nullable String type, MergeReason reason, Map map assert fieldNodeMap.isEmpty(); if (metadataFieldMapper instanceof SourceFieldMapper sfm) { - // Validation in other places should have failed first - assert sfm.isSynthetic() - || (sfm.isSynthetic() == false && mappingParserContext.getIndexSettings().getMode() != IndexMode.TIME_SERIES) - : "synthetic source can't be disabled in a time series index"; isSourceSynthetic = sfm.isSynthetic(); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java index 118cdbffc5db9..0f4549c679d42 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java @@ -51,6 +51,9 @@ public class SourceFieldMapper extends MetadataFieldMapper { public static final NodeFeature SYNTHETIC_SOURCE_COPY_TO_INSIDE_OBJECTS_FIX = new NodeFeature( "mapper.source.synthetic_source_copy_to_inside_objects_fix" ); + public static final NodeFeature REMOVE_SYNTHETIC_SOURCE_ONLY_VALIDATION = new NodeFeature( + "mapper.source.remove_synthetic_source_only_validation" + ); public static final String NAME = "_source"; public static final String RECOVERY_SOURCE_NAME = "_recovery_source"; @@ -235,9 +238,6 @@ private boolean isDefault() { @Override public SourceFieldMapper build() { if (enabled.getValue().explicit()) { - if (indexMode != null && indexMode.isSyntheticSourceEnabled()) { - throw new MapperParsingException("Indices with with index mode [" + indexMode + "] only support synthetic source"); - } if (mode.get() != null) { throw new MapperParsingException("Cannot set both [mode] and [enabled] parameters"); } From 4c48aa346ae33e4406e89219762a2de903bbf14f Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Fri, 11 Oct 2024 14:55:01 -0400 Subject: [PATCH 014/449] ESQL: Retry test on 403 (#114450) Retry the async test when you get a 403 - that could be because security has not yet booted. We should have permission to fetch everything. --- .../xpack/esql/heap_attack/HeapAttackIT.java | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java index 809bcd5a9bc12..008a056e87901 100644 --- a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java +++ b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java @@ -129,6 +129,15 @@ public void testSortByManyLongsTooMuchMemoryAsync() throws IOException { try { resp = client().performRequest(fetch); } catch (ResponseException e) { + if (e.getResponse().getStatusLine().getStatusCode() == 403) { + /* + * There's a bug when loading from the translog with security + * enabled. If we retry a few times we'll load from the index + * itself and should succeed. + */ + logger.error("polled for results got 403"); + continue; + } if (e.getResponse().getStatusLine().getStatusCode() == 404) { logger.error("polled for results got 404"); continue; From fd9d7335c8addb34ecc7b6b2b72a882ec63721f4 Mon Sep 17 00:00:00 2001 From: Michael Peterson Date: Fri, 11 Oct 2024 15:03:26 -0400 Subject: [PATCH 015/449] CCS metadata is opt-in in ESQL JSON responses (#114437) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Since Kibana only needs CCS metadata in ESQL responses from certain well-defined locations, we are making CCS metadata opt-in. This feature is patterned after ESQL profiling, where you specify "profile": true in the ESQL body and if you asked for it will be present in the response always (it will be written to the .async-search index and you can’t turn it off in later async-search requests against this particular query ID) and if you didn’t ask for it at the beginning it will never be present (it will NOT be written to the .async-search index when it is persisted). The new option is "include_ccs_metadata": true/false. --- .../esql/esql-across-clusters.asciidoc | 20 +-- docs/reference/esql/esql-query-api.asciidoc | 13 ++ .../org/elasticsearch/TransportVersions.java | 1 + .../xpack/esql/ccq/MultiClustersIT.java | 131 +++++++++------ .../xpack/esql/qa/single_node/RestEsqlIT.java | 1 + .../xpack/esql/qa/rest/RestEsqlTestCase.java | 9 + .../esql/action/CrossClustersEnrichIT.java | 71 ++++++-- .../esql/action/CrossClustersQueryIT.java | 157 ++++++++++++++---- .../xpack/esql/action/EsqlExecutionInfo.java | 26 ++- .../xpack/esql/action/EsqlQueryRequest.java | 9 + .../xpack/esql/action/EsqlQueryResponse.java | 2 +- .../xpack/esql/action/RequestXContent.java | 2 + .../xpack/esql/plugin/ComputeService.java | 2 +- .../esql/plugin/EsqlMediaTypeParser.java | 20 ++- .../esql/plugin/TransportEsqlQueryAction.java | 5 +- .../elasticsearch/xpack/esql/CsvTests.java | 2 +- .../esql/action/EsqlQueryResponseTests.java | 6 +- .../esql/formatter/TextFormatterTests.java | 6 +- .../esql/plugin/ComputeListenerTests.java | 14 +- .../esql/plugin/EsqlMediaTypeParserTests.java | 38 +++++ .../xpack/esql/session/EsqlSessionTests.java | 12 +- .../esql/stats/PlanExecutorMetricsTests.java | 4 +- 22 files changed, 414 insertions(+), 137 deletions(-) diff --git a/docs/reference/esql/esql-across-clusters.asciidoc b/docs/reference/esql/esql-across-clusters.asciidoc index cfcb5de73602c..db266fafde9d6 100644 --- a/docs/reference/esql/esql-across-clusters.asciidoc +++ b/docs/reference/esql/esql-across-clusters.asciidoc @@ -188,9 +188,10 @@ FROM *:my-index-000001 [[ccq-cluster-details]] ==== Cross-cluster metadata -ES|QL {ccs} responses include metadata about the search on each cluster when the response format is JSON. +Using the `"include_ccs_metadata": true` option, users can request that +ES|QL {ccs} responses include metadata about the search on each cluster (when the response format is JSON). Here we show an example using the async search endpoint. {ccs-cap} metadata is also present in the synchronous -search endpoint. +search endpoint response when requested. [source,console] ---- @@ -200,7 +201,8 @@ POST /_query/async?format=json FROM my-index-000001,cluster_one:my-index-000001,cluster_two:my-index* | STATS COUNT(http.response.status_code) BY user.id | LIMIT 2 - """ + """, + "include_ccs_metadata": true } ---- // TEST[setup:my_index] @@ -238,7 +240,7 @@ Which returns: "(local)": { <4> "status": "successful", "indices": "blogs", - "took": 36, <5> + "took": 41, <5> "_shards": { <6> "total": 13, "successful": 13, @@ -260,7 +262,7 @@ Which returns: "cluster_two": { "status": "successful", "indices": "cluster_two:my-index*", - "took": 41, + "took": 40, "_shards": { "total": 18, "successful": 18, @@ -286,7 +288,7 @@ it is identified as "(local)". <5> How long (in milliseconds) the search took on each cluster. This can be useful to determine which clusters have slower response times than others. <6> The shard details for the search on that cluster, including a count of shards that were -skipped due to the can-match phase. Shards are skipped when they cannot have any matching data +skipped due to the can-match phase results. Shards are skipped when they cannot have any matching data and therefore are not included in the full ES|QL query. @@ -294,9 +296,6 @@ The cross-cluster metadata can be used to determine whether any data came back f For instance, in the query below, the wildcard expression for `cluster-two` did not resolve to a concrete index (or indices). The cluster is, therefore, marked as 'skipped' and the total number of shards searched is set to zero. -Since the other cluster did have a matching index, the search did not return an error, but -instead returned all the matching data it could find. - [source,console] ---- @@ -306,7 +305,8 @@ POST /_query/async?format=json FROM cluster_one:my-index*,cluster_two:logs* | STATS COUNT(http.response.status_code) BY user.id | LIMIT 2 - """ + """, + "include_ccs_metadata": true } ---- // TEST[continued] diff --git a/docs/reference/esql/esql-query-api.asciidoc b/docs/reference/esql/esql-query-api.asciidoc index d1db21043a5b5..b1582721ad0e0 100644 --- a/docs/reference/esql/esql-query-api.asciidoc +++ b/docs/reference/esql/esql-query-api.asciidoc @@ -67,6 +67,11 @@ precedence. `false`. The API only supports this parameter for CBOR, JSON, SMILE, and YAML responses. See <>. +`include_ccs_metadata`:: +(Optional, boolean) If `true`, cross-cluster searches will include metadata about the query +on each cluster. Defaults to `false`. The API only supports this parameter for CBOR, JSON, SMILE, +and YAML responses. See <>. + `locale`:: (Optional, string) Returns results (especially dates) formatted per the conventions of the locale. For syntax, refer to <>. @@ -85,6 +90,7 @@ https://en.wikipedia.org/wiki/Query_plan[EXPLAIN PLAN]. `query`:: (Required, string) {esql} query to run. For syntax, refer to <>. + ifeval::["{release-state}"=="unreleased"] `table`:: (Optional, object) Named "table" parameters that can be referenced by the <> command. @@ -108,6 +114,13 @@ returned if `drop_null_columns` is sent with the request. (array of arrays) Values for the search results. +`_clusters`:: +(object) +Metadata about clusters involved in the execution of a cross-cluster query. Only returned (1) for +cross-cluster searches and (2) when `include_ccs_metadata` is sent in the body and set to `true` +and (3) when `format` of the response is set to JSON (the default), CBOR, SMILE, or YAML. +See <> for more information. + `profile`:: (object) Profile describing the execution of the query. Only returned if `profile` was sent in the body. diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 0f9c27a7877b8..03186e63240e5 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -241,6 +241,7 @@ static TransportVersion def(int id) { public static final TransportVersion RETRIEVERS_TELEMETRY_ADDED = def(8_765_00_0); public static final TransportVersion ESQL_CACHED_STRING_SERIALIZATION = def(8_766_00_0); public static final TransportVersion CHUNK_SENTENCE_OVERLAP_SETTING_ADDED = def(8_767_00_0); + public static final TransportVersion OPT_IN_ESQL_CCS_EXECUTION_INFO = def(8_768_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClustersIT.java b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClustersIT.java index 454f3962c07ea..1f72827057c5b 100644 --- a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClustersIT.java +++ b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClustersIT.java @@ -127,8 +127,18 @@ void indexDocs(RestClient client, String index, List docs) throws IOExcepti refresh(client, index); } - private Map run(String query) throws IOException { - Map resp = runEsql(new RestEsqlTestCase.RequestObjectBuilder().query(query).build()); + private Map run(String query, boolean includeCCSMetadata) throws IOException { + Map resp = runEsql( + new RestEsqlTestCase.RequestObjectBuilder().query(query).includeCCSMetadata(includeCCSMetadata).build() + ); + logger.info("--> query {} response {}", query, resp); + return resp; + } + + private Map runWithColumnarAndIncludeCCSMetadata(String query) throws IOException { + Map resp = runEsql( + new RestEsqlTestCase.RequestObjectBuilder().query(query).includeCCSMetadata(true).columnar(true).build() + ); logger.info("--> query {} response {}", query, resp); return resp; } @@ -147,62 +157,77 @@ private Map runEsql(RestEsqlTestCase.RequestObjectBuilder reques public void testCount() throws Exception { { - Map result = run("FROM test-local-index,*:test-remote-index | STATS c = COUNT(*)"); + boolean includeCCSMetadata = randomBoolean(); + Map result = run("FROM test-local-index,*:test-remote-index | STATS c = COUNT(*)", includeCCSMetadata); var columns = List.of(Map.of("name", "c", "type", "long")); var values = List.of(List.of(localDocs.size() + remoteDocs.size())); MapMatcher mapMatcher = matchesMap(); - assertMap( - result, - mapMatcher.entry("columns", columns) - .entry("values", values) - .entry("took", greaterThanOrEqualTo(0)) - .entry("_clusters", any(Map.class)) - ); - assertClusterDetailsMap(result, false); + if (includeCCSMetadata) { + mapMatcher = mapMatcher.entry("_clusters", any(Map.class)); + } + assertMap(result, mapMatcher.entry("columns", columns).entry("values", values).entry("took", greaterThanOrEqualTo(0))); + if (includeCCSMetadata) { + assertClusterDetailsMap(result, false); + } } { - Map result = run("FROM *:test-remote-index | STATS c = COUNT(*)"); + boolean includeCCSMetadata = randomBoolean(); + Map result = run("FROM *:test-remote-index | STATS c = COUNT(*)", includeCCSMetadata); var columns = List.of(Map.of("name", "c", "type", "long")); var values = List.of(List.of(remoteDocs.size())); MapMatcher mapMatcher = matchesMap(); - assertMap( - result, - mapMatcher.entry("columns", columns) - .entry("values", values) - .entry("took", greaterThanOrEqualTo(0)) - .entry("_clusters", any(Map.class)) - ); - assertClusterDetailsMap(result, true); + if (includeCCSMetadata) { + mapMatcher = mapMatcher.entry("_clusters", any(Map.class)); + } + assertMap(result, mapMatcher.entry("columns", columns).entry("values", values).entry("took", greaterThanOrEqualTo(0))); + if (includeCCSMetadata) { + assertClusterDetailsMap(result, true); + } } } public void testUngroupedAggs() throws Exception { { - Map result = run("FROM test-local-index,*:test-remote-index | STATS total = SUM(data)"); + boolean includeCCSMetadata = randomBoolean(); + Map result = run("FROM test-local-index,*:test-remote-index | STATS total = SUM(data)", includeCCSMetadata); var columns = List.of(Map.of("name", "total", "type", "long")); long sum = Stream.concat(localDocs.stream(), remoteDocs.stream()).mapToLong(d -> d.data).sum(); var values = List.of(List.of(Math.toIntExact(sum))); // check all sections of map except _cluster/details MapMatcher mapMatcher = matchesMap(); - assertMap( - result, - mapMatcher.entry("columns", columns) - .entry("values", values) - .entry("took", greaterThanOrEqualTo(0)) - .entry("_clusters", any(Map.class)) - ); - assertClusterDetailsMap(result, false); + if (includeCCSMetadata) { + mapMatcher = mapMatcher.entry("_clusters", any(Map.class)); + } + assertMap(result, mapMatcher.entry("columns", columns).entry("values", values).entry("took", greaterThanOrEqualTo(0))); + if (includeCCSMetadata) { + assertClusterDetailsMap(result, false); + } } { - Map result = run("FROM *:test-remote-index | STATS total = SUM(data)"); + boolean includeCCSMetadata = randomBoolean(); + Map result = run("FROM *:test-remote-index | STATS total = SUM(data)", includeCCSMetadata); + var columns = List.of(Map.of("name", "total", "type", "long")); + long sum = remoteDocs.stream().mapToLong(d -> d.data).sum(); + var values = List.of(List.of(Math.toIntExact(sum))); + + MapMatcher mapMatcher = matchesMap(); + if (includeCCSMetadata) { + mapMatcher = mapMatcher.entry("_clusters", any(Map.class)); + } + assertMap(result, mapMatcher.entry("columns", columns).entry("values", values).entry("took", greaterThanOrEqualTo(0))); + if (includeCCSMetadata) { + assertClusterDetailsMap(result, true); + } + } + { + Map result = runWithColumnarAndIncludeCCSMetadata("FROM *:test-remote-index | STATS total = SUM(data)"); var columns = List.of(Map.of("name", "total", "type", "long")); long sum = remoteDocs.stream().mapToLong(d -> d.data).sum(); var values = List.of(List.of(Math.toIntExact(sum))); - // check all sections of map except _cluster/details MapMatcher mapMatcher = matchesMap(); assertMap( result, @@ -269,7 +294,11 @@ private void assertClusterDetailsMap(Map result, boolean remoteO public void testGroupedAggs() throws Exception { { - Map result = run("FROM test-local-index,*:test-remote-index | STATS total = SUM(data) BY color | SORT color"); + boolean includeCCSMetadata = randomBoolean(); + Map result = run( + "FROM test-local-index,*:test-remote-index | STATS total = SUM(data) BY color | SORT color", + includeCCSMetadata + ); var columns = List.of(Map.of("name", "total", "type", "long"), Map.of("name", "color", "type", "keyword")); var values = Stream.concat(localDocs.stream(), remoteDocs.stream()) .collect(Collectors.toMap(d -> d.color, Doc::data, Long::sum)) @@ -280,17 +309,20 @@ public void testGroupedAggs() throws Exception { .toList(); MapMatcher mapMatcher = matchesMap(); - assertMap( - result, - mapMatcher.entry("columns", columns) - .entry("values", values) - .entry("took", greaterThanOrEqualTo(0)) - .entry("_clusters", any(Map.class)) - ); - assertClusterDetailsMap(result, false); + if (includeCCSMetadata) { + mapMatcher = mapMatcher.entry("_clusters", any(Map.class)); + } + assertMap(result, mapMatcher.entry("columns", columns).entry("values", values).entry("took", greaterThanOrEqualTo(0))); + if (includeCCSMetadata) { + assertClusterDetailsMap(result, false); + } } { - Map result = run("FROM *:test-remote-index | STATS total = SUM(data) by color | SORT color"); + boolean includeCCSMetadata = randomBoolean(); + Map result = run( + "FROM *:test-remote-index | STATS total = SUM(data) by color | SORT color", + includeCCSMetadata + ); var columns = List.of(Map.of("name", "total", "type", "long"), Map.of("name", "color", "type", "keyword")); var values = remoteDocs.stream() .collect(Collectors.toMap(d -> d.color, Doc::data, Long::sum)) @@ -300,16 +332,15 @@ public void testGroupedAggs() throws Exception { .map(e -> List.of(Math.toIntExact(e.getValue()), e.getKey())) .toList(); - // check all sections of map except _cluster/details + // check all sections of map except _clusters/details MapMatcher mapMatcher = matchesMap(); - assertMap( - result, - mapMatcher.entry("columns", columns) - .entry("values", values) - .entry("took", greaterThanOrEqualTo(0)) - .entry("_clusters", any(Map.class)) - ); - assertClusterDetailsMap(result, true); + if (includeCCSMetadata) { + mapMatcher = mapMatcher.entry("_clusters", any(Map.class)); + } + assertMap(result, mapMatcher.entry("columns", columns).entry("values", values).entry("took", greaterThanOrEqualTo(0))); + if (includeCCSMetadata) { + assertClusterDetailsMap(result, true); + } } } diff --git a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java index 3388f6f517bdf..7de4ee4ccae28 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java +++ b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java @@ -76,6 +76,7 @@ public void testBasicEsql() throws IOException { indexTimestampData(1); RequestObjectBuilder builder = requestObjectBuilder().query(fromIndex() + " | stats avg(value)"); + requestObjectBuilder().includeCCSMetadata(randomBoolean()); if (Build.current().isSnapshot()) { builder.pragmas(Settings.builder().put("data_partitioning", "shard").build()); } diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java index 8163e73078c71..4fa6ac3009654 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java @@ -128,6 +128,7 @@ public static class RequestObjectBuilder { private Boolean keepOnCompletion = null; private Boolean profile = null; + private Boolean includeCCSMetadata = null; private CheckedConsumer filter; @@ -197,6 +198,11 @@ public RequestObjectBuilder profile(boolean profile) { return this; } + public RequestObjectBuilder includeCCSMetadata(boolean includeCCSMetadata) { + this.includeCCSMetadata = includeCCSMetadata; + return this; + } + public RequestObjectBuilder filter(CheckedConsumer filter) { this.filter = filter; return this; @@ -220,6 +226,9 @@ public RequestObjectBuilder build() throws IOException { if (profile != null) { builder.field("profile", profile); } + if (includeCCSMetadata != null) { + builder.field("include_ccs_metadata", includeCCSMetadata); + } if (filter != null) { builder.startObject("filter"); filter.accept(builder); diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersEnrichIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersEnrichIT.java index 452d56680e8da..7d8bb738098d3 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersEnrichIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersEnrichIT.java @@ -14,6 +14,7 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; +import org.elasticsearch.core.Tuple; import org.elasticsearch.ingest.common.IngestCommonPlugin; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.license.LicenseService; @@ -220,7 +221,7 @@ static String enrichVendors(Enrich.Mode mode) { public void testWithHostsPolicy() { for (var mode : Enrich.Mode.values()) { String query = "FROM events | eval ip= TO_STR(host) | " + enrichHosts(mode) + " | stats c = COUNT(*) by os | SORT os"; - try (EsqlQueryResponse resp = runQuery(query)) { + try (EsqlQueryResponse resp = runQuery(query, null)) { List> rows = getValuesList(resp); assertThat( rows, @@ -237,9 +238,14 @@ public void testWithHostsPolicy() { assertFalse(resp.getExecutionInfo().isCrossClusterSearch()); } } + + Tuple includeCCSMetadata = randomIncludeCCSMetadata(); + Boolean requestIncludeMeta = includeCCSMetadata.v1(); + boolean responseExpectMeta = includeCCSMetadata.v2(); + for (var mode : Enrich.Mode.values()) { String query = "FROM *:events | eval ip= TO_STR(host) | " + enrichHosts(mode) + " | stats c = COUNT(*) by os | SORT os"; - try (EsqlQueryResponse resp = runQuery(query)) { + try (EsqlQueryResponse resp = runQuery(query, requestIncludeMeta)) { List> rows = getValuesList(resp); assertThat( rows, @@ -255,6 +261,7 @@ public void testWithHostsPolicy() { ) ); EsqlExecutionInfo executionInfo = resp.getExecutionInfo(); + assertThat(executionInfo.includeCCSMetadata(), equalTo(responseExpectMeta)); assertThat(executionInfo.clusterAliases(), equalTo(Set.of("c1", "c2"))); assertCCSExecutionInfoDetails(executionInfo); } @@ -262,7 +269,7 @@ public void testWithHostsPolicy() { for (var mode : Enrich.Mode.values()) { String query = "FROM *:events,events | eval ip= TO_STR(host) | " + enrichHosts(mode) + " | stats c = COUNT(*) by os | SORT os"; - try (EsqlQueryResponse resp = runQuery(query)) { + try (EsqlQueryResponse resp = runQuery(query, requestIncludeMeta)) { List> rows = getValuesList(resp); assertThat( rows, @@ -278,6 +285,7 @@ public void testWithHostsPolicy() { ) ); EsqlExecutionInfo executionInfo = resp.getExecutionInfo(); + assertThat(executionInfo.includeCCSMetadata(), equalTo(responseExpectMeta)); assertThat(executionInfo.clusterAliases(), equalTo(Set.of("", "c1", "c2"))); assertCCSExecutionInfoDetails(executionInfo); } @@ -285,6 +293,10 @@ public void testWithHostsPolicy() { } public void testEnrichHostsAggThenEnrichVendorCoordinator() { + Tuple includeCCSMetadata = randomIncludeCCSMetadata(); + Boolean requestIncludeMeta = includeCCSMetadata.v1(); + boolean responseExpectMeta = includeCCSMetadata.v2(); + for (var hostMode : Enrich.Mode.values()) { String query = String.format(Locale.ROOT, """ FROM *:events,events @@ -295,7 +307,7 @@ public void testEnrichHostsAggThenEnrichVendorCoordinator() { | stats c = SUM(c) by vendor | sort vendor """, enrichHosts(hostMode), enrichVendors(Enrich.Mode.COORDINATOR)); - try (EsqlQueryResponse resp = runQuery(query)) { + try (EsqlQueryResponse resp = runQuery(query, requestIncludeMeta)) { assertThat( getValuesList(resp), equalTo( @@ -309,6 +321,7 @@ public void testEnrichHostsAggThenEnrichVendorCoordinator() { ) ); EsqlExecutionInfo executionInfo = resp.getExecutionInfo(); + assertThat(executionInfo.includeCCSMetadata(), equalTo(responseExpectMeta)); assertThat(executionInfo.clusterAliases(), equalTo(Set.of("", "c1", "c2"))); assertCCSExecutionInfoDetails(executionInfo); } @@ -316,6 +329,10 @@ public void testEnrichHostsAggThenEnrichVendorCoordinator() { } public void testEnrichTwiceThenAggs() { + Tuple includeCCSMetadata = randomIncludeCCSMetadata(); + Boolean requestIncludeMeta = includeCCSMetadata.v1(); + boolean responseExpectMeta = includeCCSMetadata.v2(); + for (var hostMode : Enrich.Mode.values()) { String query = String.format(Locale.ROOT, """ FROM *:events,events @@ -325,7 +342,7 @@ public void testEnrichTwiceThenAggs() { | stats c = COUNT(*) by vendor | sort vendor """, enrichHosts(hostMode), enrichVendors(Enrich.Mode.COORDINATOR)); - try (EsqlQueryResponse resp = runQuery(query)) { + try (EsqlQueryResponse resp = runQuery(query, requestIncludeMeta)) { assertThat( getValuesList(resp), equalTo( @@ -339,6 +356,7 @@ public void testEnrichTwiceThenAggs() { ) ); EsqlExecutionInfo executionInfo = resp.getExecutionInfo(); + assertThat(executionInfo.includeCCSMetadata(), equalTo(responseExpectMeta)); assertThat(executionInfo.clusterAliases(), equalTo(Set.of("", "c1", "c2"))); assertCCSExecutionInfoDetails(executionInfo); } @@ -346,6 +364,10 @@ public void testEnrichTwiceThenAggs() { } public void testEnrichCoordinatorThenAny() { + Tuple includeCCSMetadata = randomIncludeCCSMetadata(); + Boolean requestIncludeMeta = includeCCSMetadata.v1(); + boolean responseExpectMeta = includeCCSMetadata.v2(); + String query = String.format(Locale.ROOT, """ FROM *:events,events | eval ip= TO_STR(host) @@ -354,7 +376,7 @@ public void testEnrichCoordinatorThenAny() { | stats c = COUNT(*) by vendor | sort vendor """, enrichHosts(Enrich.Mode.COORDINATOR), enrichVendors(Enrich.Mode.ANY)); - try (EsqlQueryResponse resp = runQuery(query)) { + try (EsqlQueryResponse resp = runQuery(query, requestIncludeMeta)) { assertThat( getValuesList(resp), equalTo( @@ -368,12 +390,17 @@ public void testEnrichCoordinatorThenAny() { ) ); EsqlExecutionInfo executionInfo = resp.getExecutionInfo(); + assertThat(executionInfo.includeCCSMetadata(), equalTo(responseExpectMeta)); assertThat(executionInfo.clusterAliases(), equalTo(Set.of("", "c1", "c2"))); assertCCSExecutionInfoDetails(executionInfo); } } public void testEnrichCoordinatorWithVendor() { + Tuple includeCCSMetadata = randomIncludeCCSMetadata(); + Boolean requestIncludeMeta = includeCCSMetadata.v1(); + boolean responseExpectMeta = includeCCSMetadata.v2(); + for (Enrich.Mode hostMode : Enrich.Mode.values()) { String query = String.format(Locale.ROOT, """ FROM *:events,events @@ -383,7 +410,7 @@ public void testEnrichCoordinatorWithVendor() { | stats c = COUNT(*) by vendor | sort vendor """, enrichHosts(hostMode), enrichVendors(Enrich.Mode.COORDINATOR)); - try (EsqlQueryResponse resp = runQuery(query)) { + try (EsqlQueryResponse resp = runQuery(query, requestIncludeMeta)) { assertThat( getValuesList(resp), equalTo( @@ -397,6 +424,7 @@ public void testEnrichCoordinatorWithVendor() { ) ); EsqlExecutionInfo executionInfo = resp.getExecutionInfo(); + assertThat(executionInfo.includeCCSMetadata(), equalTo(responseExpectMeta)); assertThat(executionInfo.clusterAliases(), equalTo(Set.of("", "c1", "c2"))); assertCCSExecutionInfoDetails(executionInfo); } @@ -405,6 +433,10 @@ public void testEnrichCoordinatorWithVendor() { } public void testEnrichRemoteWithVendor() { + Tuple includeCCSMetadata = randomIncludeCCSMetadata(); + Boolean requestIncludeMeta = includeCCSMetadata.v1(); + boolean responseExpectMeta = includeCCSMetadata.v2(); + for (Enrich.Mode hostMode : List.of(Enrich.Mode.ANY, Enrich.Mode.REMOTE)) { var query = String.format(Locale.ROOT, """ FROM *:events,events @@ -414,7 +446,7 @@ public void testEnrichRemoteWithVendor() { | stats c = COUNT(*) by vendor | sort vendor """, enrichHosts(hostMode), enrichVendors(Enrich.Mode.REMOTE)); - try (EsqlQueryResponse resp = runQuery(query)) { + try (EsqlQueryResponse resp = runQuery(query, requestIncludeMeta)) { assertThat( getValuesList(resp), equalTo( @@ -430,6 +462,7 @@ public void testEnrichRemoteWithVendor() { ) ); EsqlExecutionInfo executionInfo = resp.getExecutionInfo(); + assertThat(executionInfo.includeCCSMetadata(), equalTo(responseExpectMeta)); assertThat(executionInfo.clusterAliases(), equalTo(Set.of("", "c1", "c2"))); assertCCSExecutionInfoDetails(executionInfo); } @@ -444,7 +477,7 @@ public void testTopNThenEnrichRemote() { | LIMIT 5 | %s """, enrichHosts(Enrich.Mode.REMOTE)); - var error = expectThrows(VerificationException.class, () -> runQuery(query).close()); + var error = expectThrows(VerificationException.class, () -> runQuery(query, randomBoolean()).close()); assertThat(error.getMessage(), containsString("ENRICH with remote policy can't be executed after LIMIT")); } @@ -455,7 +488,7 @@ public void testLimitThenEnrichRemote() { | eval ip= TO_STR(host) | %s """, enrichHosts(Enrich.Mode.REMOTE)); - var error = expectThrows(VerificationException.class, () -> runQuery(query).close()); + var error = expectThrows(VerificationException.class, () -> runQuery(query, randomBoolean()).close()); assertThat(error.getMessage(), containsString("ENRICH with remote policy can't be executed after LIMIT")); } @@ -468,7 +501,7 @@ public void testAggThenEnrichRemote() { | %s | sort vendor """, enrichHosts(Enrich.Mode.ANY), enrichVendors(Enrich.Mode.REMOTE)); - var error = expectThrows(VerificationException.class, () -> runQuery(query).close()); + var error = expectThrows(VerificationException.class, () -> runQuery(query, randomBoolean()).close()); assertThat(error.getMessage(), containsString("ENRICH with remote policy can't be executed after STATS")); } @@ -480,20 +513,23 @@ public void testEnrichCoordinatorThenEnrichRemote() { | %s | sort vendor """, enrichHosts(Enrich.Mode.COORDINATOR), enrichVendors(Enrich.Mode.REMOTE)); - var error = expectThrows(VerificationException.class, () -> runQuery(query).close()); + var error = expectThrows(VerificationException.class, () -> runQuery(query, randomBoolean()).close()); assertThat( error.getMessage(), containsString("ENRICH with remote policy can't be executed after another ENRICH with coordinator policy") ); } - protected EsqlQueryResponse runQuery(String query) { + protected EsqlQueryResponse runQuery(String query, Boolean ccsMetadataInResponse) { EsqlQueryRequest request = EsqlQueryRequest.syncEsqlQueryRequest(); request.query(query); request.pragmas(AbstractEsqlIntegTestCase.randomPragmas()); if (randomBoolean()) { request.profile(true); } + if (ccsMetadataInResponse != null) { + request.includeCCSMetadata(ccsMetadataInResponse); + } return client(LOCAL_CLUSTER).execute(EsqlQueryAction.INSTANCE, request).actionGet(30, TimeUnit.SECONDS); } @@ -516,6 +552,15 @@ private static void assertCCSExecutionInfoDetails(EsqlExecutionInfo executionInf } } + public static Tuple randomIncludeCCSMetadata() { + return switch (randomIntBetween(1, 3)) { + case 1 -> new Tuple<>(Boolean.TRUE, Boolean.TRUE); + case 2 -> new Tuple<>(Boolean.FALSE, Boolean.FALSE); + case 3 -> new Tuple<>(null, Boolean.FALSE); + default -> throw new AssertionError("should not get here"); + }; + } + public static class LocalStateEnrich extends LocalStateCompositeXPackPlugin { public LocalStateEnrich(final Settings settings, final Path configPath) throws Exception { diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersQueryIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersQueryIT.java index 4f4f3d112247e..adfa2fc7273cd 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersQueryIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersQueryIT.java @@ -10,7 +10,6 @@ import org.elasticsearch.Build; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; -import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.Priority; @@ -21,13 +20,16 @@ import org.elasticsearch.compute.operator.DriverProfile; import org.elasticsearch.compute.operator.exchange.ExchangeService; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.Tuple; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.AbstractMultiClustersTestCase; import org.elasticsearch.test.InternalTestCluster; +import org.elasticsearch.test.XContentTestUtils; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; import org.elasticsearch.xpack.esql.plugin.QueryPragmas; +import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; @@ -79,12 +81,15 @@ public List> getSettings() { } } - public void testSimple() { + public void testSuccessfulPathways() { Map testClusterInfo = setupTwoClusters(); int localNumShards = (Integer) testClusterInfo.get("local.num_shards"); int remoteNumShards = (Integer) testClusterInfo.get("remote.num_shards"); - try (EsqlQueryResponse resp = runQuery("from logs-*,*:logs-* | stats sum (v)")) { + Tuple includeCCSMetadata = randomIncludeCCSMetadata(); + Boolean requestIncludeMeta = includeCCSMetadata.v1(); + boolean responseExpectMeta = includeCCSMetadata.v2(); + try (EsqlQueryResponse resp = runQuery("from logs-*,*:logs-* | stats sum (v)", requestIncludeMeta)) { List> values = getValuesList(resp); assertThat(values, hasSize(1)); assertThat(values.get(0), equalTo(List.of(330L))); @@ -93,6 +98,7 @@ public void testSimple() { assertNotNull(executionInfo); assertThat(executionInfo.isCrossClusterSearch(), is(true)); assertThat(executionInfo.overallTook().millis(), greaterThanOrEqualTo(0L)); + assertThat(executionInfo.includeCCSMetadata(), equalTo(responseExpectMeta)); assertThat(executionInfo.clusterAliases(), equalTo(Set.of(REMOTE_CLUSTER, LOCAL_CLUSTER))); @@ -113,9 +119,12 @@ public void testSimple() { assertThat(localCluster.getSuccessfulShards(), equalTo(localNumShards)); assertThat(localCluster.getSkippedShards(), equalTo(0)); assertThat(localCluster.getFailedShards(), equalTo(0)); + + // ensure that the _clusters metadata is present only if requested + assertClusterMetadataInResponse(resp, responseExpectMeta); } - try (EsqlQueryResponse resp = runQuery("from logs-*,*:logs-* | stats count(*) by tag | sort tag | keep tag")) { + try (EsqlQueryResponse resp = runQuery("from logs-*,*:logs-* | stats count(*) by tag | sort tag | keep tag", requestIncludeMeta)) { List> values = getValuesList(resp); assertThat(values, hasSize(2)); assertThat(values.get(0), equalTo(List.of("local"))); @@ -125,6 +134,7 @@ public void testSimple() { assertNotNull(executionInfo); assertThat(executionInfo.isCrossClusterSearch(), is(true)); assertThat(executionInfo.overallTook().millis(), greaterThanOrEqualTo(0L)); + assertThat(executionInfo.includeCCSMetadata(), equalTo(responseExpectMeta)); assertThat(executionInfo.clusterAliases(), equalTo(Set.of(REMOTE_CLUSTER, LOCAL_CLUSTER))); @@ -145,6 +155,9 @@ public void testSimple() { assertThat(localCluster.getSuccessfulShards(), equalTo(localNumShards)); assertThat(localCluster.getSkippedShards(), equalTo(0)); assertThat(localCluster.getFailedShards(), equalTo(0)); + + // ensure that the _clusters metadata is present only if requested + assertClusterMetadataInResponse(resp, responseExpectMeta); } } @@ -153,9 +166,13 @@ public void testSearchesWhereMissingIndicesAreSpecified() { int localNumShards = (Integer) testClusterInfo.get("local.num_shards"); int remoteNumShards = (Integer) testClusterInfo.get("remote.num_shards"); + Tuple includeCCSMetadata = randomIncludeCCSMetadata(); + Boolean requestIncludeMeta = includeCCSMetadata.v1(); + boolean responseExpectMeta = includeCCSMetadata.v2(); + // since a valid local index was specified, the invalid index on cluster-a does not throw an exception, // but instead is simply ignored - ensure this is captured in the EsqlExecutionInfo - try (EsqlQueryResponse resp = runQuery("from logs-*,cluster-a:no_such_index | stats sum (v)")) { + try (EsqlQueryResponse resp = runQuery("from logs-*,cluster-a:no_such_index | stats sum (v)", requestIncludeMeta)) { EsqlExecutionInfo executionInfo = resp.getExecutionInfo(); List> values = getValuesList(resp); assertThat(values, hasSize(1)); @@ -164,6 +181,7 @@ public void testSearchesWhereMissingIndicesAreSpecified() { assertNotNull(executionInfo); assertThat(executionInfo.isCrossClusterSearch(), is(true)); assertThat(executionInfo.overallTook().millis(), greaterThanOrEqualTo(0L)); + assertThat(executionInfo.includeCCSMetadata(), equalTo(responseExpectMeta)); assertThat(executionInfo.clusterAliases(), equalTo(Set.of(REMOTE_CLUSTER, LOCAL_CLUSTER))); @@ -188,7 +206,12 @@ public void testSearchesWhereMissingIndicesAreSpecified() { // since the remote cluster has a valid index expression, the missing local index is ignored // make this is captured in the EsqlExecutionInfo - try (EsqlQueryResponse resp = runQuery("from no_such_index,*:logs-* | stats count(*) by tag | sort tag | keep tag")) { + try ( + EsqlQueryResponse resp = runQuery( + "from no_such_index,*:logs-* | stats count(*) by tag | sort tag | keep tag", + requestIncludeMeta + ) + ) { List> values = getValuesList(resp); assertThat(values, hasSize(1)); assertThat(values.get(0), equalTo(List.of("remote"))); @@ -197,6 +220,7 @@ public void testSearchesWhereMissingIndicesAreSpecified() { assertNotNull(executionInfo); assertThat(executionInfo.isCrossClusterSearch(), is(true)); assertThat(executionInfo.overallTook().millis(), greaterThanOrEqualTo(0L)); + assertThat(executionInfo.includeCCSMetadata(), equalTo(responseExpectMeta)); assertThat(executionInfo.clusterAliases(), equalTo(Set.of(REMOTE_CLUSTER, LOCAL_CLUSTER))); @@ -223,7 +247,8 @@ public void testSearchesWhereMissingIndicesAreSpecified() { // in the index expression of the EsqlExecutionInfo and with an indication that zero shards were searched try ( EsqlQueryResponse resp = runQuery( - "FROM no_such_index*,*:no_such_index1,*:no_such_index2,logs-1 | STATS COUNT(*) by tag | SORT tag | KEEP tag" + "FROM no_such_index*,*:no_such_index1,*:no_such_index2,logs-1 | STATS COUNT(*) by tag | SORT tag | KEEP tag", + requestIncludeMeta ) ) { List> values = getValuesList(resp); @@ -234,6 +259,7 @@ public void testSearchesWhereMissingIndicesAreSpecified() { assertNotNull(executionInfo); assertThat(executionInfo.isCrossClusterSearch(), is(true)); assertThat(executionInfo.overallTook().millis(), greaterThanOrEqualTo(0L)); + assertThat(executionInfo.includeCCSMetadata(), equalTo(responseExpectMeta)); assertThat(executionInfo.clusterAliases(), equalTo(Set.of(REMOTE_CLUSTER, LOCAL_CLUSTER))); @@ -257,7 +283,7 @@ public void testSearchesWhereMissingIndicesAreSpecified() { } // wildcard on remote cluster that matches nothing - should be present in EsqlExecutionInfo marked as SKIPPED, no shards searched - try (EsqlQueryResponse resp = runQuery("from cluster-a:no_such_index*,logs-* | stats sum (v)")) { + try (EsqlQueryResponse resp = runQuery("from cluster-a:no_such_index*,logs-* | stats sum (v)", requestIncludeMeta)) { EsqlExecutionInfo executionInfo = resp.getExecutionInfo(); List> values = getValuesList(resp); assertThat(values, hasSize(1)); @@ -266,6 +292,7 @@ public void testSearchesWhereMissingIndicesAreSpecified() { assertNotNull(executionInfo); assertThat(executionInfo.isCrossClusterSearch(), is(true)); assertThat(executionInfo.overallTook().millis(), greaterThanOrEqualTo(0L)); + assertThat(executionInfo.includeCCSMetadata(), equalTo(responseExpectMeta)); assertThat(executionInfo.clusterAliases(), equalTo(Set.of(REMOTE_CLUSTER, LOCAL_CLUSTER))); @@ -293,8 +320,12 @@ public void testSearchesWhereNonExistentClusterIsSpecifiedWithWildcards() { Map testClusterInfo = setupTwoClusters(); int localNumShards = (Integer) testClusterInfo.get("local.num_shards"); + Tuple includeCCSMetadata = randomIncludeCCSMetadata(); + Boolean requestIncludeMeta = includeCCSMetadata.v1(); + boolean responseExpectMeta = includeCCSMetadata.v2(); + // a query which matches no remote cluster is not a cross cluster search - try (EsqlQueryResponse resp = runQuery("from logs-*,x*:no_such_index* | stats sum (v)")) { + try (EsqlQueryResponse resp = runQuery("from logs-*,x*:no_such_index* | stats sum (v)", requestIncludeMeta)) { EsqlExecutionInfo executionInfo = resp.getExecutionInfo(); List> values = getValuesList(resp); assertThat(values, hasSize(1)); @@ -303,12 +334,18 @@ public void testSearchesWhereNonExistentClusterIsSpecifiedWithWildcards() { assertNotNull(executionInfo); assertThat(executionInfo.clusterAliases(), equalTo(Set.of(LOCAL_CLUSTER))); assertThat(executionInfo.isCrossClusterSearch(), is(false)); + assertThat(executionInfo.includeCCSMetadata(), equalTo(responseExpectMeta)); // since this not a CCS, only the overall took time in the EsqlExecutionInfo matters assertThat(executionInfo.overallTook().millis(), greaterThanOrEqualTo(0L)); } // cluster-foo* matches nothing and so should not be present in the EsqlExecutionInfo - try (EsqlQueryResponse resp = runQuery("from logs-*,no_such_index*,cluster-a:no_such_index*,cluster-foo*:* | stats sum (v)")) { + try ( + EsqlQueryResponse resp = runQuery( + "from logs-*,no_such_index*,cluster-a:no_such_index*,cluster-foo*:* | stats sum (v)", + requestIncludeMeta + ) + ) { EsqlExecutionInfo executionInfo = resp.getExecutionInfo(); List> values = getValuesList(resp); assertThat(values, hasSize(1)); @@ -317,6 +354,7 @@ public void testSearchesWhereNonExistentClusterIsSpecifiedWithWildcards() { assertNotNull(executionInfo); assertThat(executionInfo.isCrossClusterSearch(), is(true)); assertThat(executionInfo.overallTook().millis(), greaterThanOrEqualTo(0L)); + assertThat(executionInfo.includeCCSMetadata(), equalTo(responseExpectMeta)); assertThat(executionInfo.clusterAliases(), equalTo(Set.of(REMOTE_CLUSTER, LOCAL_CLUSTER))); @@ -349,8 +387,12 @@ public void testSearchesWhereNonExistentClusterIsSpecifiedWithWildcards() { public void testCCSExecutionOnSearchesWithLimit0() { setupTwoClusters(); + Tuple includeCCSMetadata = randomIncludeCCSMetadata(); + Boolean requestIncludeMeta = includeCCSMetadata.v1(); + boolean responseExpectMeta = includeCCSMetadata.v2(); + // Ensure non-cross cluster queries have overall took time - try (EsqlQueryResponse resp = runQuery("FROM logs* | LIMIT 0")) { + try (EsqlQueryResponse resp = runQuery("FROM logs* | LIMIT 0", requestIncludeMeta)) { EsqlExecutionInfo executionInfo = resp.getExecutionInfo(); assertNotNull(executionInfo); assertThat(executionInfo.isCrossClusterSearch(), is(false)); @@ -358,12 +400,13 @@ public void testCCSExecutionOnSearchesWithLimit0() { } // ensure cross-cluster searches have overall took time and correct per-cluster details in EsqlExecutionInfo - try (EsqlQueryResponse resp = runQuery("FROM logs*,cluster-a:* | LIMIT 0")) { + try (EsqlQueryResponse resp = runQuery("FROM logs*,cluster-a:* | LIMIT 0", requestIncludeMeta)) { EsqlExecutionInfo executionInfo = resp.getExecutionInfo(); assertNotNull(executionInfo); assertThat(executionInfo.isCrossClusterSearch(), is(true)); long overallTookMillis = executionInfo.overallTook().millis(); assertThat(overallTookMillis, greaterThanOrEqualTo(0L)); + assertThat(executionInfo.includeCCSMetadata(), equalTo(responseExpectMeta)); assertThat(executionInfo.clusterAliases(), equalTo(Set.of(REMOTE_CLUSTER, LOCAL_CLUSTER))); EsqlExecutionInfo.Cluster remoteCluster = executionInfo.getCluster(REMOTE_CLUSTER); @@ -387,12 +430,13 @@ public void testCCSExecutionOnSearchesWithLimit0() { assertNull(localCluster.getFailedShards()); } - try (EsqlQueryResponse resp = runQuery("FROM logs*,cluster-a:nomatch* | LIMIT 0")) { + try (EsqlQueryResponse resp = runQuery("FROM logs*,cluster-a:nomatch* | LIMIT 0", requestIncludeMeta)) { EsqlExecutionInfo executionInfo = resp.getExecutionInfo(); assertNotNull(executionInfo); assertThat(executionInfo.isCrossClusterSearch(), is(true)); long overallTookMillis = executionInfo.overallTook().millis(); assertThat(overallTookMillis, greaterThanOrEqualTo(0L)); + assertThat(executionInfo.includeCCSMetadata(), equalTo(responseExpectMeta)); assertThat(executionInfo.clusterAliases(), equalTo(Set.of(REMOTE_CLUSTER, LOCAL_CLUSTER))); EsqlExecutionInfo.Cluster remoteCluster = executionInfo.getCluster(REMOTE_CLUSTER); @@ -415,12 +459,13 @@ public void testCCSExecutionOnSearchesWithLimit0() { assertNull(localCluster.getFailedShards()); } - try (EsqlQueryResponse resp = runQuery("FROM nomatch*,cluster-a:* | LIMIT 0")) { + try (EsqlQueryResponse resp = runQuery("FROM nomatch*,cluster-a:* | LIMIT 0", requestIncludeMeta)) { EsqlExecutionInfo executionInfo = resp.getExecutionInfo(); assertNotNull(executionInfo); assertThat(executionInfo.isCrossClusterSearch(), is(true)); long overallTookMillis = executionInfo.overallTook().millis(); assertThat(overallTookMillis, greaterThanOrEqualTo(0L)); + assertThat(executionInfo.includeCCSMetadata(), equalTo(responseExpectMeta)); assertThat(executionInfo.clusterAliases(), equalTo(Set.of(REMOTE_CLUSTER, LOCAL_CLUSTER))); EsqlExecutionInfo.Cluster remoteCluster = executionInfo.getCluster(REMOTE_CLUSTER); @@ -447,7 +492,16 @@ public void testMetadataIndex() { int localNumShards = (Integer) testClusterInfo.get("local.num_shards"); int remoteNumShards = (Integer) testClusterInfo.get("remote.num_shards"); - try (EsqlQueryResponse resp = runQuery("FROM logs*,*:logs* METADATA _index | stats sum(v) by _index | sort _index")) { + Tuple includeCCSMetadata = randomIncludeCCSMetadata(); + Boolean requestIncludeMeta = includeCCSMetadata.v1(); + boolean responseExpectMeta = includeCCSMetadata.v2(); + + try ( + EsqlQueryResponse resp = runQuery( + "FROM logs*,*:logs* METADATA _index | stats sum(v) by _index | sort _index", + requestIncludeMeta + ) + ) { List> values = getValuesList(resp); assertThat(values.get(0), equalTo(List.of(285L, "cluster-a:logs-2"))); assertThat(values.get(1), equalTo(List.of(45L, "logs-1"))); @@ -455,6 +509,7 @@ public void testMetadataIndex() { EsqlExecutionInfo executionInfo = resp.getExecutionInfo(); assertNotNull(executionInfo); assertThat(executionInfo.isCrossClusterSearch(), is(true)); + assertThat(executionInfo.includeCCSMetadata(), equalTo(responseExpectMeta)); assertThat(executionInfo.overallTook().millis(), greaterThanOrEqualTo(0L)); EsqlExecutionInfo.Cluster remoteCluster = executionInfo.getCluster(REMOTE_CLUSTER); @@ -477,18 +532,6 @@ public void testMetadataIndex() { } } - void waitForNoInitializingShards(Client client, TimeValue timeout, String... indices) { - ClusterHealthResponse resp = client.admin() - .cluster() - .prepareHealth(TEST_REQUEST_TIMEOUT, indices) - .setWaitForEvents(Priority.LANGUID) - .setWaitForNoRelocatingShards(true) - .setWaitForNoInitializingShards(true) - .setTimeout(timeout) - .get(); - assertFalse(Strings.toString(resp, true, true), resp.isTimedOut()); - } - public void testProfile() { Map testClusterInfo = setupTwoClusters(); int localNumShards = (Integer) testClusterInfo.get("local.num_shards"); @@ -529,6 +572,7 @@ public void testProfile() { EsqlExecutionInfo.Cluster remoteCluster = executionInfo.getCluster(REMOTE_CLUSTER); assertNull(remoteCluster); assertThat(executionInfo.isCrossClusterSearch(), is(false)); + assertThat(executionInfo.includeCCSMetadata(), is(false)); // since this not a CCS, only the overall took time in the EsqlExecutionInfo matters assertThat(executionInfo.overallTook().millis(), greaterThanOrEqualTo(0L)); } @@ -550,6 +594,7 @@ public void testProfile() { EsqlExecutionInfo executionInfo = resp.getExecutionInfo(); assertNotNull(executionInfo); assertThat(executionInfo.isCrossClusterSearch(), is(true)); + assertThat(executionInfo.includeCCSMetadata(), is(false)); assertThat(executionInfo.overallTook().millis(), greaterThanOrEqualTo(0L)); EsqlExecutionInfo.Cluster remoteCluster = executionInfo.getCluster(REMOTE_CLUSTER); @@ -582,6 +627,7 @@ public void testProfile() { EsqlExecutionInfo executionInfo = resp.getExecutionInfo(); assertNotNull(executionInfo); assertThat(executionInfo.isCrossClusterSearch(), is(true)); + assertThat(executionInfo.includeCCSMetadata(), is(false)); assertThat(executionInfo.overallTook().millis(), greaterThanOrEqualTo(0L)); EsqlExecutionInfo.Cluster remoteCluster = executionInfo.getCluster(REMOTE_CLUSTER); @@ -608,14 +654,11 @@ public void testProfile() { public void testWarnings() throws Exception { Map testClusterInfo = setupTwoClusters(); - String localIndex = (String) testClusterInfo.get("local.index"); - String remoteIndex = (String) testClusterInfo.get("remote.index"); int localNumShards = (Integer) testClusterInfo.get("local.num_shards"); int remoteNumShards = (Integer) testClusterInfo.get("remote.num_shards"); EsqlQueryRequest request = EsqlQueryRequest.syncEsqlQueryRequest(); request.query("FROM logs*,*:logs* | EVAL ip = to_ip(id) | STATS total = sum(v) by ip | LIMIT 10"); - PlainActionFuture future = new PlainActionFuture<>(); InternalTestCluster cluster = cluster(LOCAL_CLUSTER); String node = randomFrom(cluster.getNodeNames()); CountDownLatch latch = new CountDownLatch(1); @@ -634,6 +677,7 @@ public void testWarnings() throws Exception { EsqlExecutionInfo executionInfo = resp.getExecutionInfo(); assertNotNull(executionInfo); assertThat(executionInfo.isCrossClusterSearch(), is(true)); + assertThat(executionInfo.includeCCSMetadata(), is(false)); assertThat(executionInfo.overallTook().millis(), greaterThanOrEqualTo(0L)); EsqlExecutionInfo.Cluster remoteCluster = executionInfo.getCluster(REMOTE_CLUSTER); @@ -662,11 +706,34 @@ public void testWarnings() throws Exception { assertTrue(latch.await(30, TimeUnit.SECONDS)); } - protected EsqlQueryResponse runQuery(String query) { + private static void assertClusterMetadataInResponse(EsqlQueryResponse resp, boolean responseExpectMeta) { + try { + final Map esqlResponseAsMap = XContentTestUtils.convertToMap(resp); + final Object clusters = esqlResponseAsMap.get("_clusters"); + if (responseExpectMeta) { + assertNotNull(clusters); + // test a few entries to ensure it looks correct (other tests do a full analysis of the metadata in the response) + @SuppressWarnings("unchecked") + Map inner = (Map) clusters; + assertTrue(inner.containsKey("total")); + assertTrue(inner.containsKey("details")); + } else { + assertNull(clusters); + } + } catch (IOException e) { + fail("Could not convert ESQL response to Map: " + e); + } + } + + protected EsqlQueryResponse runQuery(String query, Boolean ccsMetadataInResponse) { EsqlQueryRequest request = EsqlQueryRequest.syncEsqlQueryRequest(); request.query(query); request.pragmas(AbstractEsqlIntegTestCase.randomPragmas()); - request.profile(true); + request.profile(randomInt(5) == 2); + request.columnar(randomBoolean()); + if (ccsMetadataInResponse != null) { + request.includeCCSMetadata(ccsMetadataInResponse); + } return runQuery(request); } @@ -674,6 +741,32 @@ protected EsqlQueryResponse runQuery(EsqlQueryRequest request) { return client(LOCAL_CLUSTER).execute(EsqlQueryAction.INSTANCE, request).actionGet(30, TimeUnit.SECONDS); } + /** + * v1: value to send to runQuery (can be null; null means use default value) + * v2: whether to expect CCS Metadata in the response (cannot be null) + * @return + */ + public static Tuple randomIncludeCCSMetadata() { + return switch (randomIntBetween(1, 3)) { + case 1 -> new Tuple<>(Boolean.TRUE, Boolean.TRUE); + case 2 -> new Tuple<>(Boolean.FALSE, Boolean.FALSE); + case 3 -> new Tuple<>(null, Boolean.FALSE); + default -> throw new AssertionError("should not get here"); + }; + } + + void waitForNoInitializingShards(Client client, TimeValue timeout, String... indices) { + ClusterHealthResponse resp = client.admin() + .cluster() + .prepareHealth(TEST_REQUEST_TIMEOUT, indices) + .setWaitForEvents(Priority.LANGUID) + .setWaitForNoRelocatingShards(true) + .setWaitForNoInitializingShards(true) + .setTimeout(timeout) + .get(); + assertFalse(Strings.toString(resp, true, true), resp.isTimedOut()); + } + Map setupTwoClusters() { String localIndex = "logs-1"; int numShardsLocal = randomIntBetween(1, 5); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlExecutionInfo.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlExecutionInfo.java index f7966ff5ae9ec..dabccd4ffeb17 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlExecutionInfo.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlExecutionInfo.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.action; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -63,24 +64,29 @@ public class EsqlExecutionInfo implements ChunkedToXContentObject, Writeable { private final transient Predicate skipUnavailablePredicate; private TimeValue overallTook; - public EsqlExecutionInfo() { - this(Predicates.always()); // default all clusters to skip_unavailable=true + // whether the user has asked for CCS metadata to be in the JSON response (the overall took will always be present) + private final boolean includeCCSMetadata; + + public EsqlExecutionInfo(boolean includeCCSMetadata) { + this(Predicates.always(), includeCCSMetadata); // default all clusters to skip_unavailable=true } /** * @param skipUnavailablePredicate provide lookup for whether a given cluster has skip_unavailable set to true or false */ - public EsqlExecutionInfo(Predicate skipUnavailablePredicate) { + public EsqlExecutionInfo(Predicate skipUnavailablePredicate, boolean includeCCSMetadata) { this.clusterInfo = ConcurrentCollections.newConcurrentMap(); this.skipUnavailablePredicate = skipUnavailablePredicate; + this.includeCCSMetadata = includeCCSMetadata; } /** * For testing use with fromXContent parsing only * @param clusterInfo */ - EsqlExecutionInfo(ConcurrentMap clusterInfo) { + EsqlExecutionInfo(ConcurrentMap clusterInfo, boolean includeCCSMetadata) { this.clusterInfo = clusterInfo; + this.includeCCSMetadata = includeCCSMetadata; this.skipUnavailablePredicate = Predicates.always(); } @@ -94,6 +100,11 @@ public EsqlExecutionInfo(StreamInput in) throws IOException { clusterList.forEach(c -> m.put(c.getClusterAlias(), c)); this.clusterInfo = m; } + if (in.getTransportVersion().onOrAfter(TransportVersions.OPT_IN_ESQL_CCS_EXECUTION_INFO)) { + this.includeCCSMetadata = in.readBoolean(); + } else { + this.includeCCSMetadata = false; + } this.skipUnavailablePredicate = Predicates.always(); } @@ -105,6 +116,13 @@ public void writeTo(StreamOutput out) throws IOException { } else { out.writeCollection(Collections.emptyList()); } + if (out.getTransportVersion().onOrAfter(TransportVersions.OPT_IN_ESQL_CCS_EXECUTION_INFO)) { + out.writeBoolean(includeCCSMetadata); + } + } + + public boolean includeCCSMetadata() { + return includeCCSMetadata; } public void overallTook(TimeValue took) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequest.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequest.java index 4ab310863c61d..239f9e2696f88 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequest.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequest.java @@ -42,6 +42,7 @@ public class EsqlQueryRequest extends org.elasticsearch.xpack.core.esql.action.E private String query; private boolean columnar; private boolean profile; + private boolean includeCCSMetadata; private Locale locale; private QueryBuilder filter; private QueryPragmas pragmas = new QueryPragmas(Settings.EMPTY); @@ -128,6 +129,14 @@ public void profile(boolean profile) { this.profile = profile; } + public void includeCCSMetadata(boolean include) { + this.includeCCSMetadata = include; + } + + public boolean includeCCSMetadata() { + return includeCCSMetadata; + } + /** * Is profiling enabled? */ diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java index 3232f3a9118d4..4e59d5419fe6f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java @@ -206,7 +206,7 @@ public Iterator toXContentChunked(ToXContent.Params params b.append(ResponseXContentUtils.allColumns(columns, "columns")); } b.array("values", ResponseXContentUtils.columnValues(this.columns, this.pages, columnar, nullColumns)); - if (executionInfo != null && executionInfo.isCrossClusterSearch()) { + if (executionInfo != null && executionInfo.isCrossClusterSearch() && executionInfo.includeCCSMetadata()) { b.field("_clusters", executionInfo); } if (profile != null) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RequestXContent.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RequestXContent.java index b930fa5823404..7224aa049093d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RequestXContent.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RequestXContent.java @@ -80,6 +80,7 @@ String fields() { private static final ParseField LOCALE_FIELD = new ParseField("locale"); private static final ParseField PROFILE_FIELD = new ParseField("profile"); private static final ParseField ACCEPT_PRAGMA_RISKS = new ParseField("accept_pragma_risks"); + private static final ParseField INCLUDE_CCS_METADATA_FIELD = new ParseField("include_ccs_metadata"); static final ParseField TABLES_FIELD = new ParseField("tables"); static final ParseField WAIT_FOR_COMPLETION_TIMEOUT = new ParseField("wait_for_completion_timeout"); @@ -117,6 +118,7 @@ private static void objectParserCommon(ObjectParser parser) parser.declareBoolean(EsqlQueryRequest::columnar, COLUMNAR_FIELD); parser.declareObject(EsqlQueryRequest::filter, (p, c) -> AbstractQueryBuilder.parseTopLevelQuery(p), FILTER_FIELD); parser.declareBoolean(EsqlQueryRequest::acceptedPragmaRisks, ACCEPT_PRAGMA_RISKS); + parser.declareBoolean(EsqlQueryRequest::includeCCSMetadata, INCLUDE_CCS_METADATA_FIELD); parser.declareObject( EsqlQueryRequest::pragmas, (p, c) -> new QueryPragmas(Settings.builder().loadFromMap(p.map()).build()), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java index ce2a1d7a5f660..f714695504a1d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java @@ -834,7 +834,7 @@ public void messageReceived(ClusterComputeRequest request, TransportChannel chan * execution metadata for ES|QL processing local to this cluster. The execution info will be copied into the * ComputeResponse that is sent back to the primary coordinating cluster. */ - EsqlExecutionInfo execInfo = new EsqlExecutionInfo(); + EsqlExecutionInfo execInfo = new EsqlExecutionInfo(true); execInfo.swapCluster(clusterAlias, (k, v) -> new EsqlExecutionInfo.Cluster(clusterAlias, Arrays.toString(request.indices()))); CancellableTask cancellable = (CancellableTask) task; long start = request.configuration().getQueryStartTimeNanos(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlMediaTypeParser.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlMediaTypeParser.java index 915efe9302a92..17329ca2e0054 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlMediaTypeParser.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlMediaTypeParser.java @@ -37,10 +37,15 @@ public class EsqlMediaTypeParser { * format. If there is a {@code format} parameter we use that. If there * isn't but there is a {@code Accept} header then we use that. If there * isn't then we use the {@code Content-Type} header which is required. + * + * Also validates certain parameter combinations and throws IllegalArgumentException if invalid + * combinations are detected. */ public static MediaType getResponseMediaType(RestRequest request, EsqlQueryRequest esqlRequest) { var mediaType = request.hasParam(URL_PARAM_FORMAT) ? mediaTypeFromParams(request) : mediaTypeFromHeaders(request); - return validateColumnarRequest(esqlRequest.columnar(), mediaType, request); + validateColumnarRequest(esqlRequest.columnar(), mediaType); + validateIncludeCCSMetadata(esqlRequest.includeCCSMetadata(), mediaType); + return checkNonNullMediaType(mediaType, request); } private static MediaType mediaTypeFromHeaders(RestRequest request) { @@ -53,7 +58,7 @@ private static MediaType mediaTypeFromParams(RestRequest request) { return MEDIA_TYPE_REGISTRY.queryParamToMediaType(request.param(URL_PARAM_FORMAT)); } - private static MediaType validateColumnarRequest(boolean requestIsColumnar, MediaType fromMediaType, RestRequest request) { + private static void validateColumnarRequest(boolean requestIsColumnar, MediaType fromMediaType) { if (requestIsColumnar && fromMediaType instanceof TextFormat) { throw new IllegalArgumentException( "Invalid use of [columnar] argument: cannot be used in combination with " @@ -61,7 +66,16 @@ private static MediaType validateColumnarRequest(boolean requestIsColumnar, Medi + " formats" ); } - return checkNonNullMediaType(fromMediaType, request); + } + + private static void validateIncludeCCSMetadata(boolean includeCCSMetadata, MediaType fromMediaType) { + if (includeCCSMetadata && fromMediaType instanceof TextFormat) { + throw new IllegalArgumentException( + "Invalid use of [include_ccs_metadata] argument: cannot be used in combination with " + + Arrays.stream(TextFormat.values()).map(MediaType::queryParameter).toList() + + " formats" + ); + } } private static MediaType checkNonNullMediaType(MediaType mediaType, RestRequest request) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java index 17c795f2de28c..193930cdf711d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java @@ -167,7 +167,10 @@ private void innerExecute(Task task, EsqlQueryRequest request, ActionListener remoteClusterService.isSkipUnavailable(clusterAlias)); + EsqlExecutionInfo executionInfo = new EsqlExecutionInfo( + clusterAlias -> remoteClusterService.isSkipUnavailable(clusterAlias), + request.includeCCSMetadata() + ); BiConsumer> runPhase = (physicalPlan, resultListener) -> computeService.execute( sessionId, (CancellableTask) task, diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index 3eef31e1cc406..965358c0c3f8c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -433,7 +433,7 @@ private ActualResults executePlan(BigArrays bigArrays) throws Exception { session.executeOptimizedPlan( new EsqlQueryRequest(), - new EsqlExecutionInfo(), + new EsqlExecutionInfo(randomBoolean()), runPhase(bigArrays, physicalOperationProviders), session.optimizedPlan(analyzed), listener.delegateFailureAndWrap( diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java index abf03d4fe06dd..b147cfde21721 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java @@ -134,7 +134,7 @@ EsqlQueryResponse randomResponseAsync(boolean columnar, EsqlQueryResponse.Profil } EsqlExecutionInfo createExecutionInfo() { - EsqlExecutionInfo executionInfo = new EsqlExecutionInfo(); + EsqlExecutionInfo executionInfo = new EsqlExecutionInfo(true); executionInfo.overallTook(new TimeValue(5000)); executionInfo.swapCluster( "", @@ -426,9 +426,9 @@ static EsqlExecutionInfo parseClusters(XContentParser parser) throws IOException } } if (clusterInfoMap.isEmpty()) { - return new EsqlExecutionInfo(); + return new EsqlExecutionInfo(true); } else { - return new EsqlExecutionInfo(clusterInfoMap); + return new EsqlExecutionInfo(clusterInfoMap, true); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatterTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatterTests.java index c145d770409da..e735ba83168bb 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatterTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatterTests.java @@ -82,7 +82,7 @@ public class TextFormatterTests extends ESTestCase { null, randomBoolean(), randomBoolean(), - new EsqlExecutionInfo() + new EsqlExecutionInfo(randomBoolean()) ); TextFormatter formatter = new TextFormatter(esqlResponse); @@ -157,7 +157,7 @@ public void testFormatWithoutHeader() { null, randomBoolean(), randomBoolean(), - new EsqlExecutionInfo() + new EsqlExecutionInfo(randomBoolean()) ); String[] result = getTextBodyContent(new TextFormatter(response).format(false)).split("\n"); @@ -198,7 +198,7 @@ public void testVeryLongPadding() { null, randomBoolean(), randomBoolean(), - new EsqlExecutionInfo() + new EsqlExecutionInfo(randomBoolean()) ) ).format(false) ) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/ComputeListenerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/ComputeListenerTests.java index da11a790e6f2f..8cfcb605a19d5 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/ComputeListenerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/ComputeListenerTests.java @@ -125,7 +125,7 @@ private ComputeResponse randomResponse(boolean includeExecutionInfo) { public void testEmpty() { PlainActionFuture results = new PlainActionFuture<>(); - EsqlExecutionInfo executionInfo = new EsqlExecutionInfo(); + EsqlExecutionInfo executionInfo = new EsqlExecutionInfo(randomBoolean()); try ( ComputeListener ignored = ComputeListener.create( RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY, @@ -145,7 +145,7 @@ public void testEmpty() { public void testCollectComputeResults() { PlainActionFuture future = new PlainActionFuture<>(); List allProfiles = new ArrayList<>(); - EsqlExecutionInfo executionInfo = new EsqlExecutionInfo(); + EsqlExecutionInfo executionInfo = new EsqlExecutionInfo(randomBoolean()); try ( ComputeListener computeListener = ComputeListener.create( RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY, @@ -194,7 +194,7 @@ public void testAcquireComputeCCSListener() { PlainActionFuture future = new PlainActionFuture<>(); List allProfiles = new ArrayList<>(); String remoteAlias = "rc1"; - EsqlExecutionInfo executionInfo = new EsqlExecutionInfo(); + EsqlExecutionInfo executionInfo = new EsqlExecutionInfo(true); executionInfo.swapCluster(remoteAlias, (k, v) -> new EsqlExecutionInfo.Cluster(remoteAlias, "logs*", false)); try ( ComputeListener computeListener = ComputeListener.create( @@ -248,7 +248,7 @@ public void testAcquireComputeCCSListener() { public void testAcquireComputeRunningOnRemoteClusterFillsInTookTime() { PlainActionFuture future = new PlainActionFuture<>(); List allProfiles = new ArrayList<>(); - EsqlExecutionInfo executionInfo = new EsqlExecutionInfo(); + EsqlExecutionInfo executionInfo = new EsqlExecutionInfo(true); String remoteAlias = "rc1"; executionInfo.swapCluster( remoteAlias, @@ -318,7 +318,7 @@ public void testAcquireComputeRunningOnRemoteClusterFillsInTookTime() { public void testAcquireComputeRunningOnQueryingClusterFillsInTookTime() { PlainActionFuture future = new PlainActionFuture<>(); List allProfiles = new ArrayList<>(); - EsqlExecutionInfo executionInfo = new EsqlExecutionInfo(); + EsqlExecutionInfo executionInfo = new EsqlExecutionInfo(true); String localCluster = RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY; // we need a remote cluster in the ExecutionInfo in order to simulate a CCS, since ExecutionInfo is only // fully filled in for cross-cluster searches @@ -372,7 +372,7 @@ public void testCancelOnFailure() throws Exception { int failedTasks = between(1, 100); PlainActionFuture rootListener = new PlainActionFuture<>(); CancellableTask rootTask = newTask(); - EsqlExecutionInfo execInfo = new EsqlExecutionInfo(); + EsqlExecutionInfo execInfo = new EsqlExecutionInfo(randomBoolean()); try ( ComputeListener computeListener = ComputeListener.create( RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY, @@ -436,7 +436,7 @@ public void onFailure(Exception e) { } }; CountDownLatch latch = new CountDownLatch(1); - EsqlExecutionInfo executionInfo = new EsqlExecutionInfo(); + EsqlExecutionInfo executionInfo = new EsqlExecutionInfo(randomBoolean()); try ( ComputeListener computeListener = ComputeListener.create( RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY, diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/EsqlMediaTypeParserTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/EsqlMediaTypeParserTests.java index 789d6e5adbfc7..4b9166c621940 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/EsqlMediaTypeParserTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/EsqlMediaTypeParserTests.java @@ -80,6 +80,18 @@ public void testColumnarWithAcceptText() { assertEquals(e.getMessage(), "Invalid use of [columnar] argument: cannot be used in combination with [txt, csv, tsv] formats"); } + public void testIncludeCCSMetadataWithAcceptText() { + var accept = randomFrom("text/plain", "text/csv", "text/tab-separated-values"); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> getResponseMediaType(reqWithAccept(accept), createTestInstance(false, true)) + ); + assertEquals( + "Invalid use of [include_ccs_metadata] argument: cannot be used in combination with [txt, csv, tsv] formats", + e.getMessage() + ); + } + public void testColumnarWithParamText() { IllegalArgumentException e = expectThrows( IllegalArgumentException.class, @@ -88,6 +100,26 @@ public void testColumnarWithParamText() { assertEquals(e.getMessage(), "Invalid use of [columnar] argument: cannot be used in combination with [txt, csv, tsv] formats"); } + public void testIncludeCCSMetadataWithNonJSONMediaTypesInParams() { + { + RestRequest restRequest = reqWithParams(Map.of("format", randomFrom("txt", "csv", "tsv"))); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> getResponseMediaType(restRequest, createTestInstance(false, true)) + ); + assertEquals( + "Invalid use of [include_ccs_metadata] argument: cannot be used in combination with [txt, csv, tsv] formats", + e.getMessage() + ); + } + { + // check that no exception is thrown for the XContent types + RestRequest restRequest = reqWithParams(Map.of("format", randomFrom("SMILE", "YAML", "CBOR", "JSON"))); + MediaType responseMediaType = getResponseMediaType(restRequest, createTestInstance(true, true)); + assertNotNull(responseMediaType); + } + } + public void testNoFormat() { IllegalArgumentException e = expectThrows( IllegalArgumentException.class, @@ -113,4 +145,10 @@ protected EsqlQueryRequest createTestInstance(boolean columnar) { request.columnar(columnar); return request; } + + protected EsqlQueryRequest createTestInstance(boolean columnar, boolean includeCCSMetadata) { + var request = createTestInstance(columnar); + request.includeCCSMetadata(includeCCSMetadata); + return request; + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/EsqlSessionTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/EsqlSessionTests.java index 326756ad0b5f4..7e93213fcee21 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/EsqlSessionTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/EsqlSessionTests.java @@ -30,7 +30,7 @@ public void testUpdateExecutionInfoWithUnavailableClusters() { final String localClusterAlias = RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY; final String remote1Alias = "remote1"; final String remote2Alias = "remote2"; - EsqlExecutionInfo executionInfo = new EsqlExecutionInfo(); + EsqlExecutionInfo executionInfo = new EsqlExecutionInfo(true); executionInfo.swapCluster(localClusterAlias, (k, v) -> new EsqlExecutionInfo.Cluster(localClusterAlias, "logs*", false)); executionInfo.swapCluster(remote1Alias, (k, v) -> new EsqlExecutionInfo.Cluster(remote1Alias, "*", true)); executionInfo.swapCluster(remote2Alias, (k, v) -> new EsqlExecutionInfo.Cluster(remote2Alias, "mylogs1,mylogs2,logs*", true)); @@ -59,7 +59,7 @@ public void testUpdateExecutionInfoWithUnavailableClusters() { final String localClusterAlias = RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY; final String remote1Alias = "remote1"; final String remote2Alias = "remote2"; - EsqlExecutionInfo executionInfo = new EsqlExecutionInfo(); + EsqlExecutionInfo executionInfo = new EsqlExecutionInfo(true); executionInfo.swapCluster(localClusterAlias, (k, v) -> new EsqlExecutionInfo.Cluster(localClusterAlias, "logs*", false)); executionInfo.swapCluster(remote1Alias, (k, v) -> new EsqlExecutionInfo.Cluster(remote1Alias, "*", true)); executionInfo.swapCluster(remote2Alias, (k, v) -> new EsqlExecutionInfo.Cluster(remote2Alias, "mylogs1,mylogs2,logs*", false)); @@ -87,7 +87,7 @@ public void testUpdateExecutionInfoWithUnavailableClusters() { final String localClusterAlias = RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY; final String remote1Alias = "remote1"; final String remote2Alias = "remote2"; - EsqlExecutionInfo executionInfo = new EsqlExecutionInfo(); + EsqlExecutionInfo executionInfo = new EsqlExecutionInfo(true); executionInfo.swapCluster(localClusterAlias, (k, v) -> new EsqlExecutionInfo.Cluster(localClusterAlias, "logs*", false)); executionInfo.swapCluster(remote1Alias, (k, v) -> new EsqlExecutionInfo.Cluster(remote1Alias, "*", true)); executionInfo.swapCluster(remote2Alias, (k, v) -> new EsqlExecutionInfo.Cluster(remote2Alias, "mylogs1,mylogs2,logs*", false)); @@ -117,7 +117,7 @@ public void testUpdateExecutionInfoWithClustersWithNoMatchingIndices() { final String localClusterAlias = RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY; final String remote1Alias = "remote1"; final String remote2Alias = "remote2"; - EsqlExecutionInfo executionInfo = new EsqlExecutionInfo(); + EsqlExecutionInfo executionInfo = new EsqlExecutionInfo(true); executionInfo.swapCluster(localClusterAlias, (k, v) -> new EsqlExecutionInfo.Cluster(localClusterAlias, "logs*", false)); executionInfo.swapCluster(remote1Alias, (k, v) -> new EsqlExecutionInfo.Cluster(remote1Alias, "*", true)); executionInfo.swapCluster(remote2Alias, (k, v) -> new EsqlExecutionInfo.Cluster(remote2Alias, "mylogs1,mylogs2,logs*", false)); @@ -160,7 +160,7 @@ public void testUpdateExecutionInfoWithClustersWithNoMatchingIndices() { final String localClusterAlias = RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY; final String remote1Alias = "remote1"; final String remote2Alias = "remote2"; - EsqlExecutionInfo executionInfo = new EsqlExecutionInfo(); + EsqlExecutionInfo executionInfo = new EsqlExecutionInfo(true); executionInfo.swapCluster(localClusterAlias, (k, v) -> new EsqlExecutionInfo.Cluster(localClusterAlias, "logs*", false)); executionInfo.swapCluster(remote1Alias, (k, v) -> new EsqlExecutionInfo.Cluster(remote1Alias, "*", true)); executionInfo.swapCluster(remote2Alias, (k, v) -> new EsqlExecutionInfo.Cluster(remote2Alias, "mylogs1,mylogs2,logs*", false)); @@ -206,7 +206,7 @@ public void testUpdateExecutionInfoWithClustersWithNoMatchingIndices() { final String localClusterAlias = RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY; final String remote1Alias = "remote1"; final String remote2Alias = "remote2"; - EsqlExecutionInfo executionInfo = new EsqlExecutionInfo(); + EsqlExecutionInfo executionInfo = new EsqlExecutionInfo(true); executionInfo.swapCluster(localClusterAlias, (k, v) -> new EsqlExecutionInfo.Cluster(localClusterAlias, "logs*", false)); executionInfo.swapCluster(remote1Alias, (k, v) -> new EsqlExecutionInfo.Cluster(remote1Alias, "*", true)); executionInfo.swapCluster(remote2Alias, (k, v) -> new EsqlExecutionInfo.Cluster(remote2Alias, "mylogs1,mylogs2,logs*", false)); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/PlanExecutorMetricsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/PlanExecutorMetricsTests.java index adc449bfc092e..9edc85223e7b3 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/PlanExecutorMetricsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/PlanExecutorMetricsTests.java @@ -120,7 +120,7 @@ public void testFailedMetric() { randomAlphaOfLength(10), EsqlTestUtils.TEST_CFG, enrichResolver, - new EsqlExecutionInfo(), + new EsqlExecutionInfo(randomBoolean()), groupIndicesByCluster, runPhase, new ActionListener<>() { @@ -149,7 +149,7 @@ public void onFailure(Exception e) { randomAlphaOfLength(10), EsqlTestUtils.TEST_CFG, enrichResolver, - new EsqlExecutionInfo(), + new EsqlExecutionInfo(randomBoolean()), groupIndicesByCluster, runPhase, new ActionListener<>() { From e304c1d5c1dfd20c9b7ea4da3bf0560c0c82c1e9 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Fri, 11 Oct 2024 15:13:11 -0400 Subject: [PATCH 016/449] ESQL: Speed up grouping by bytes (#114021) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This speeds up grouping by bytes valued fields (keyword, text, ip, and wildcard) when the input is an ordinal block: ``` bytes_refs 22.213 ± 0.322 -> 19.848 ± 0.205 ns/op (*maybe* real, maybe noise. still good) ordinal didn't exist -> 2.988 ± 0.011 ns/op ``` I see this as 20ns -> 3ns, an 85% speed up. We never hard the ordinals branch before so I'm expecting the same performance there - about 20ns per op. This also speeds up grouping by a pair of byte valued fields: ``` two_bytes_refs 83.112 ± 42.348 -> 46.521 ± 0.386 ns/op two_ordinals 83.531 ± 23.473 -> 8.617 ± 0.105 ns/op ``` The speed up is much better when the fields are ordinals because hashing bytes is comparatively slow. I believe the ordinals case is quite common. I've run into it in quite a few profiles. --- .../compute/operator/AggregatorBenchmark.java | 49 ++++- docs/changelog/114021.yaml | 5 + .../blockhash/BytesRefBlockHash.java | 40 +++- .../blockhash/DoubleBlockHash.java | 2 + .../aggregation/blockhash/IntBlockHash.java | 2 + .../aggregation/blockhash/LongBlockHash.java | 2 + .../aggregation/blockhash/BlockHash.java | 51 ++++- .../blockhash/BooleanBlockHash.java | 5 +- .../blockhash/BytesRef2BlockHash.java | 196 ++++++++++++++++++ .../blockhash/BytesRef3BlockHash.java | 2 +- .../aggregation/blockhash/X-BlockHash.java.st | 42 +++- .../blockhash/BlockHashRandomizedTests.java | 171 +++++++++++---- .../aggregation/blockhash/BlockHashTests.java | 131 ++++++++++++ 13 files changed, 632 insertions(+), 66 deletions(-) create mode 100644 docs/changelog/114021.yaml create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRef2BlockHash.java diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/AggregatorBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/AggregatorBenchmark.java index 27f4d68b0bc3f..652defa7b39cd 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/AggregatorBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/AggregatorBenchmark.java @@ -30,10 +30,13 @@ import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.OrdinalBytesRefVector; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.AggregationOperator; import org.elasticsearch.compute.operator.DriverContext; @@ -78,7 +81,10 @@ public class AggregatorBenchmark { private static final String DOUBLES = "doubles"; private static final String BOOLEANS = "booleans"; private static final String BYTES_REFS = "bytes_refs"; + private static final String ORDINALS = "ordinals"; private static final String TWO_LONGS = "two_" + LONGS; + private static final String TWO_BYTES_REFS = "two_" + BYTES_REFS; + private static final String TWO_ORDINALS = "two_" + ORDINALS; private static final String LONGS_AND_BYTES_REFS = LONGS + "_and_" + BYTES_REFS; private static final String TWO_LONGS_AND_BYTES_REFS = "two_" + LONGS + "_and_" + BYTES_REFS; @@ -119,7 +125,21 @@ public class AggregatorBenchmark { } } - @Param({ NONE, LONGS, INTS, DOUBLES, BOOLEANS, BYTES_REFS, TWO_LONGS, LONGS_AND_BYTES_REFS, TWO_LONGS_AND_BYTES_REFS }) + @Param( + { + NONE, + LONGS, + INTS, + DOUBLES, + BOOLEANS, + BYTES_REFS, + ORDINALS, + TWO_LONGS, + TWO_BYTES_REFS, + TWO_ORDINALS, + LONGS_AND_BYTES_REFS, + TWO_LONGS_AND_BYTES_REFS } + ) public String grouping; @Param({ COUNT, COUNT_DISTINCT, MIN, MAX, SUM }) @@ -144,8 +164,12 @@ private static Operator operator(DriverContext driverContext, String grouping, S case INTS -> List.of(new BlockHash.GroupSpec(0, ElementType.INT)); case DOUBLES -> List.of(new BlockHash.GroupSpec(0, ElementType.DOUBLE)); case BOOLEANS -> List.of(new BlockHash.GroupSpec(0, ElementType.BOOLEAN)); - case BYTES_REFS -> List.of(new BlockHash.GroupSpec(0, ElementType.BYTES_REF)); + case BYTES_REFS, ORDINALS -> List.of(new BlockHash.GroupSpec(0, ElementType.BYTES_REF)); case TWO_LONGS -> List.of(new BlockHash.GroupSpec(0, ElementType.LONG), new BlockHash.GroupSpec(1, ElementType.LONG)); + case TWO_BYTES_REFS, TWO_ORDINALS -> List.of( + new BlockHash.GroupSpec(0, ElementType.BYTES_REF), + new BlockHash.GroupSpec(1, ElementType.BYTES_REF) + ); case LONGS_AND_BYTES_REFS -> List.of( new BlockHash.GroupSpec(0, ElementType.LONG), new BlockHash.GroupSpec(1, ElementType.BYTES_REF) @@ -218,6 +242,10 @@ private static void checkGrouped(String prefix, String grouping, String op, Stri checkGroupingBlock(prefix, LONGS, page.getBlock(0)); checkGroupingBlock(prefix, LONGS, page.getBlock(1)); } + case TWO_BYTES_REFS, TWO_ORDINALS -> { + checkGroupingBlock(prefix, BYTES_REFS, page.getBlock(0)); + checkGroupingBlock(prefix, BYTES_REFS, page.getBlock(1)); + } case LONGS_AND_BYTES_REFS -> { checkGroupingBlock(prefix, LONGS, page.getBlock(0)); checkGroupingBlock(prefix, BYTES_REFS, page.getBlock(1)); @@ -379,7 +407,7 @@ private static void checkGroupingBlock(String prefix, String grouping, Block blo throw new AssertionError(prefix + "bad group expected [true] but was [" + groups.getBoolean(1) + "]"); } } - case BYTES_REFS -> { + case BYTES_REFS, ORDINALS -> { BytesRefBlock groups = (BytesRefBlock) block; for (int g = 0; g < GROUPS; g++) { if (false == groups.getBytesRef(g, new BytesRef()).equals(bytesGroup(g))) { @@ -508,6 +536,8 @@ private static Block dataBlock(BlockFactory blockFactory, String blockType) { private static List groupingBlocks(String grouping, String blockType) { return switch (grouping) { case TWO_LONGS -> List.of(groupingBlock(LONGS, blockType), groupingBlock(LONGS, blockType)); + case TWO_BYTES_REFS -> List.of(groupingBlock(BYTES_REFS, blockType), groupingBlock(BYTES_REFS, blockType)); + case TWO_ORDINALS -> List.of(groupingBlock(ORDINALS, blockType), groupingBlock(ORDINALS, blockType)); case LONGS_AND_BYTES_REFS -> List.of(groupingBlock(LONGS, blockType), groupingBlock(BYTES_REFS, blockType)); case TWO_LONGS_AND_BYTES_REFS -> List.of( groupingBlock(LONGS, blockType), @@ -570,6 +600,19 @@ private static Block groupingBlock(String grouping, String blockType) { } yield builder.build(); } + case ORDINALS -> { + IntVector.Builder ordinals = blockFactory.newIntVectorBuilder(BLOCK_LENGTH * valuesPerGroup); + for (int i = 0; i < BLOCK_LENGTH; i++) { + for (int v = 0; v < valuesPerGroup; v++) { + ordinals.appendInt(i % GROUPS); + } + } + BytesRefVector.Builder bytes = blockFactory.newBytesRefVectorBuilder(BLOCK_LENGTH * valuesPerGroup); + for (int i = 0; i < GROUPS; i++) { + bytes.appendBytesRef(bytesGroup(i)); + } + yield new OrdinalBytesRefVector(ordinals.build(), bytes.build()).asBlock(); + } default -> throw new UnsupportedOperationException("unsupported grouping [" + grouping + "]"); }; } diff --git a/docs/changelog/114021.yaml b/docs/changelog/114021.yaml new file mode 100644 index 0000000000000..e9dab5dce5685 --- /dev/null +++ b/docs/changelog/114021.yaml @@ -0,0 +1,5 @@ +pr: 114021 +summary: "ESQL: Speed up grouping by bytes" +area: ES|QL +type: enhancement +issues: [] diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/BytesRefBlockHash.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/BytesRefBlockHash.java index 7fcb412de5e2b..3c5bf2c18c915 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/BytesRefBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/BytesRefBlockHash.java @@ -23,15 +23,18 @@ import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.OrdinalBytesRefBlock; +import org.elasticsearch.compute.data.OrdinalBytesRefVector; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.mvdedupe.MultivalueDedupe; import org.elasticsearch.compute.operator.mvdedupe.MultivalueDedupeBytesRef; +import org.elasticsearch.compute.operator.mvdedupe.MultivalueDedupeInt; import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; /** * Maps a {@link BytesRefBlock} column to group ids. + * This class is generated. Do not edit it. */ final class BytesRefBlockHash extends BlockHash { private final int channel; @@ -54,6 +57,7 @@ final class BytesRefBlockHash extends BlockHash { @Override public void add(Page page, GroupingAggregatorFunction.AddInput addInput) { + // TODO track raw counts and which implementation we pick for the profiler - #114008 var block = page.getBlock(channel); if (block.areAllValuesNull()) { seenNull = true; @@ -76,6 +80,10 @@ public void add(Page page, GroupingAggregatorFunction.AddInput addInput) { } IntVector add(BytesRefVector vector) { + var ordinals = vector.asOrdinals(); + if (ordinals != null) { + return addOrdinalsVector(ordinals); + } BytesRef scratch = new BytesRef(); int positions = vector.getPositionCount(); try (var builder = blockFactory.newIntVectorFixedBuilder(positions)) { @@ -113,15 +121,29 @@ public ReleasableIterator lookup(Page page, ByteSizeValue targetBlockS return ReleasableIterator.single(lookup(vector)); } - private IntBlock addOrdinalsBlock(OrdinalBytesRefBlock inputBlock) { - var inputOrds = inputBlock.getOrdinalsBlock(); + private IntVector addOrdinalsVector(OrdinalBytesRefVector inputBlock) { + IntVector inputOrds = inputBlock.getOrdinalsVector(); try ( - var builder = blockFactory.newIntBlockBuilder(inputOrds.getPositionCount()); + var builder = blockFactory.newIntVectorBuilder(inputOrds.getPositionCount()); var hashOrds = add(inputBlock.getDictionaryVector()) ) { - for (int i = 0; i < inputOrds.getPositionCount(); i++) { - int valueCount = inputOrds.getValueCount(i); - int firstIndex = inputOrds.getFirstValueIndex(i); + for (int p = 0; p < inputOrds.getPositionCount(); p++) { + int ord = hashOrds.getInt(inputOrds.getInt(p)); + builder.appendInt(ord); + } + return builder.build(); + } + } + + private IntBlock addOrdinalsBlock(OrdinalBytesRefBlock inputBlock) { + try ( + IntBlock inputOrds = new MultivalueDedupeInt(inputBlock.getOrdinalsBlock()).dedupeToBlockAdaptive(blockFactory); + IntBlock.Builder builder = blockFactory.newIntBlockBuilder(inputOrds.getPositionCount()); + IntVector hashOrds = add(inputBlock.getDictionaryVector()) + ) { + for (int p = 0; p < inputOrds.getPositionCount(); p++) { + int valueCount = inputOrds.getValueCount(p); + int firstIndex = inputOrds.getFirstValueIndex(p); switch (valueCount) { case 0 -> { builder.appendInt(0); @@ -132,9 +154,11 @@ private IntBlock addOrdinalsBlock(OrdinalBytesRefBlock inputBlock) { builder.appendInt(ord); } default -> { + int start = firstIndex; + int end = firstIndex + valueCount; builder.beginPositionEntry(); - for (int v = 0; v < valueCount; v++) { - int ord = hashOrds.getInt(inputOrds.getInt(firstIndex + i)); + for (int i = start; i < end; i++) { + int ord = hashOrds.getInt(inputOrds.getInt(i)); builder.appendInt(ord); } builder.endPositionEntry(); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/DoubleBlockHash.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/DoubleBlockHash.java index bd9d752302ae3..c9c672112a630 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/DoubleBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/DoubleBlockHash.java @@ -28,6 +28,7 @@ /** * Maps a {@link DoubleBlock} column to group ids. + * This class is generated. Do not edit it. */ final class DoubleBlockHash extends BlockHash { private final int channel; @@ -50,6 +51,7 @@ final class DoubleBlockHash extends BlockHash { @Override public void add(Page page, GroupingAggregatorFunction.AddInput addInput) { + // TODO track raw counts and which implementation we pick for the profiler - #114008 var block = page.getBlock(channel); if (block.areAllValuesNull()) { seenNull = true; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/IntBlockHash.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/IntBlockHash.java index 5b1b48bd270ab..13b60c6f1fec5 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/IntBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/IntBlockHash.java @@ -26,6 +26,7 @@ /** * Maps a {@link IntBlock} column to group ids. + * This class is generated. Do not edit it. */ final class IntBlockHash extends BlockHash { private final int channel; @@ -48,6 +49,7 @@ final class IntBlockHash extends BlockHash { @Override public void add(Page page, GroupingAggregatorFunction.AddInput addInput) { + // TODO track raw counts and which implementation we pick for the profiler - #114008 var block = page.getBlock(channel); if (block.areAllValuesNull()) { seenNull = true; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/LongBlockHash.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/LongBlockHash.java index 074ccb2f7cd7d..5252bd742ec51 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/LongBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/LongBlockHash.java @@ -28,6 +28,7 @@ /** * Maps a {@link LongBlock} column to group ids. + * This class is generated. Do not edit it. */ final class LongBlockHash extends BlockHash { private final int channel; @@ -50,6 +51,7 @@ final class LongBlockHash extends BlockHash { @Override public void add(Page page, GroupingAggregatorFunction.AddInput addInput) { + // TODO track raw counts and which implementation we pick for the profiler - #114008 var block = page.getBlock(channel); if (block.areAllValuesNull()) { seenNull = true; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BlockHash.java index fe1a07e8e16a6..919cb92f79260 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BlockHash.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BitArray; import org.elasticsearch.common.util.BytesRefHash; +import org.elasticsearch.common.util.Int3Hash; import org.elasticsearch.common.util.LongHash; import org.elasticsearch.common.util.LongLongHash; import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; @@ -28,14 +29,37 @@ import java.util.List; /** - * A specialized hash table implementation maps values of a {@link Block} to ids (in longs). - * This class delegates to {@link LongHash} or {@link BytesRefHash}. - * - * @see LongHash - * @see BytesRefHash + * Specialized hash table implementations that map rows to a set + * of bucket IDs to which they belong to implement {@code GROUP BY} expressions. + *

+ * A row is always in at least one bucket so the results are never {@code null}. + * {@code null} valued key columns will map to some integer bucket id. + * If none of key columns are multivalued then the output is always an + * {@link IntVector}. If any of the key are multivalued then a row is + * in a bucket for each value. If more than one key is multivalued then + * the row is in the combinatorial explosion of all value combinations. + * Luckily for the number of values rows can only be in each bucket once. + * Unluckily, it's the responsibility of {@link BlockHash} to remove those + * duplicates. + *

+ *

+ * These classes typically delegate to some combination of {@link BytesRefHash}, + * {@link LongHash}, {@link LongLongHash}, {@link Int3Hash}. They don't + * technically have to be hash tables, so long as they + * implement the deduplication semantics above and vend integer ids. + *

+ *

+ * The integer ids are assigned to offsets into arrays of aggregation states + * so its permissible to have gaps in the ints. But large gaps are a bad + * idea because they'll waste space in the aggregations that use these + * positions. For example, {@link BooleanBlockHash} assigns {@code 0} to + * {@code null}, {@code 1} to {@code false}, and {@code 1} to {@code true} + * and that's fine and simple and good because it'll never + * leave a big gap, even if we never see {@code null}. + *

*/ public abstract sealed class BlockHash implements Releasable, SeenGroupIds // - permits BooleanBlockHash, BytesRefBlockHash, DoubleBlockHash, IntBlockHash, LongBlockHash, BytesRef3BlockHash, // + permits BooleanBlockHash, BytesRefBlockHash, DoubleBlockHash, IntBlockHash, LongBlockHash, BytesRef2BlockHash, BytesRef3BlockHash, // NullBlockHash, PackedValuesBlockHash, BytesRefLongBlockHash, LongLongBlockHash, TimeSeriesBlockHash { protected final BlockFactory blockFactory; @@ -98,8 +122,19 @@ public static BlockHash build(List groups, BlockFactory blockFactory, if (groups.size() == 1) { return newForElementType(groups.get(0).channel(), groups.get(0).elementType(), blockFactory); } - if (groups.size() == 3 && groups.stream().allMatch(g -> g.elementType == ElementType.BYTES_REF)) { - return new BytesRef3BlockHash(blockFactory, groups.get(0).channel, groups.get(1).channel, groups.get(2).channel, emitBatchSize); + if (groups.stream().allMatch(g -> g.elementType == ElementType.BYTES_REF)) { + switch (groups.size()) { + case 2: + return new BytesRef2BlockHash(blockFactory, groups.get(0).channel, groups.get(1).channel, emitBatchSize); + case 3: + return new BytesRef3BlockHash( + blockFactory, + groups.get(0).channel, + groups.get(1).channel, + groups.get(2).channel, + emitBatchSize + ); + } } if (allowBrokenOptimizations && groups.size() == 2) { var g1 = groups.get(0); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BooleanBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BooleanBlockHash.java index 4c2817588904a..ecbf292b077ea 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BooleanBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BooleanBlockHash.java @@ -25,8 +25,9 @@ import static org.elasticsearch.compute.operator.mvdedupe.MultivalueDedupeBoolean.TRUE_ORD; /** - * Maps a {@link BooleanBlock} column to group ids. Assigns group - * {@code 0} to {@code false} and group {@code 1} to {@code true}. + * Maps a {@link BooleanBlock} column to group ids. Assigns + * {@code 0} to {@code null}, {@code 1} to {@code false}, and + * {@code 2} to {@code true}. */ final class BooleanBlockHash extends BlockHash { private final int channel; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRef2BlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRef2BlockHash.java new file mode 100644 index 0000000000000..ff25aa1381004 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRef2BlockHash.java @@ -0,0 +1,196 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation.blockhash; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.BitArray; +import org.elasticsearch.common.util.LongHash; +import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.core.ReleasableIterator; +import org.elasticsearch.core.Releasables; + +import java.util.Locale; + +/** + * Maps two {@link BytesRefBlock}s to group ids. + */ +final class BytesRef2BlockHash extends BlockHash { + private final int emitBatchSize; + private final int channel1; + private final int channel2; + private final BytesRefBlockHash hash1; + private final BytesRefBlockHash hash2; + private final LongHash finalHash; + + BytesRef2BlockHash(BlockFactory blockFactory, int channel1, int channel2, int emitBatchSize) { + super(blockFactory); + this.emitBatchSize = emitBatchSize; + this.channel1 = channel1; + this.channel2 = channel2; + boolean success = false; + try { + this.hash1 = new BytesRefBlockHash(channel1, blockFactory); + this.hash2 = new BytesRefBlockHash(channel2, blockFactory); + this.finalHash = new LongHash(1, blockFactory.bigArrays()); + success = true; + } finally { + if (success == false) { + close(); + } + } + } + + @Override + public void close() { + Releasables.close(hash1, hash2, finalHash); + } + + @Override + public void add(Page page, GroupingAggregatorFunction.AddInput addInput) { + BytesRefBlock b1 = page.getBlock(channel1); + BytesRefBlock b2 = page.getBlock(channel2); + BytesRefVector v1 = b1.asVector(); + BytesRefVector v2 = b2.asVector(); + if (v1 != null && v2 != null) { + addVectors(v1, v2, addInput); + } else { + try (IntBlock k1 = hash1.add(b1); IntBlock k2 = hash2.add(b2)) { + try (AddWork work = new AddWork(k1, k2, addInput)) { + work.add(); + } + } + } + } + + private void addVectors(BytesRefVector v1, BytesRefVector v2, GroupingAggregatorFunction.AddInput addInput) { + final int positionCount = v1.getPositionCount(); + try (IntVector.FixedBuilder ordsBuilder = blockFactory.newIntVectorFixedBuilder(positionCount)) { + try (IntVector k1 = hash1.add(v1); IntVector k2 = hash2.add(v2)) { + for (int p = 0; p < positionCount; p++) { + long ord = ord(k1.getInt(p), k2.getInt(p)); + ordsBuilder.appendInt(p, Math.toIntExact(ord)); + } + } + try (IntVector ords = ordsBuilder.build()) { + addInput.add(0, ords); + } + } + } + + private class AddWork extends AddPage { + final IntBlock b1; + final IntBlock b2; + + AddWork(IntBlock b1, IntBlock b2, GroupingAggregatorFunction.AddInput addInput) { + super(blockFactory, emitBatchSize, addInput); + this.b1 = b1; + this.b2 = b2; + } + + void add() { + int positionCount = b1.getPositionCount(); + for (int i = 0; i < positionCount; i++) { + int v1 = b1.getValueCount(i); + int v2 = b2.getValueCount(i); + int first1 = b1.getFirstValueIndex(i); + int first2 = b2.getFirstValueIndex(i); + if (v1 == 1 && v2 == 1) { + long ord = ord(b1.getInt(first1), b2.getInt(first2)); + appendOrdSv(i, Math.toIntExact(ord)); + continue; + } + for (int i1 = 0; i1 < v1; i1++) { + int k1 = b1.getInt(first1 + i1); + for (int i2 = 0; i2 < v2; i2++) { + int k2 = b2.getInt(first2 + i2); + long ord = ord(k1, k2); + appendOrdInMv(i, Math.toIntExact(ord)); + } + } + finishMv(); + } + flushRemaining(); + } + } + + private long ord(int k1, int k2) { + return hashOrdToGroup(finalHash.add((long) k2 << 32 | k1)); + } + + @Override + public ReleasableIterator lookup(Page page, ByteSizeValue targetBlockSize) { + throw new UnsupportedOperationException("TODO"); + } + + @Override + public Block[] getKeys() { + // TODO Build Ordinals blocks #114010 + final int positions = (int) finalHash.size(); + final BytesRef scratch = new BytesRef(); + final BytesRefBlock[] outputBlocks = new BytesRefBlock[2]; + try { + try (BytesRefBlock.Builder b1 = blockFactory.newBytesRefBlockBuilder(positions)) { + for (int i = 0; i < positions; i++) { + int k1 = (int) (finalHash.get(i) & 0xffffL); + if (k1 == 0) { + b1.appendNull(); + } else { + b1.appendBytesRef(hash1.hash.get(k1 - 1, scratch)); + } + } + outputBlocks[0] = b1.build(); + } + try (BytesRefBlock.Builder b2 = blockFactory.newBytesRefBlockBuilder(positions)) { + for (int i = 0; i < positions; i++) { + int k2 = (int) (finalHash.get(i) >>> 32); + if (k2 == 0) { + b2.appendNull(); + } else { + b2.appendBytesRef(hash2.hash.get(k2 - 1, scratch)); + } + } + outputBlocks[1] = b2.build(); + } + return outputBlocks; + } finally { + if (outputBlocks[outputBlocks.length - 1] == null) { + Releasables.close(outputBlocks); + } + } + } + + @Override + public BitArray seenGroupIds(BigArrays bigArrays) { + return new Range(0, Math.toIntExact(finalHash.size())).seenGroupIds(bigArrays); + } + + @Override + public IntVector nonEmpty() { + return IntVector.range(0, Math.toIntExact(finalHash.size()), blockFactory); + } + + @Override + public String toString() { + return String.format( + Locale.ROOT, + "BytesRef2BlockHash{keys=[channel1=%d, channel2=%d], entries=%d}", + channel1, + channel2, + finalHash.size() + ); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRef3BlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRef3BlockHash.java index 54bf068b4de33..987a808ed7950 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRef3BlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRef3BlockHash.java @@ -85,7 +85,6 @@ public void add(Page page, GroupingAggregatorFunction.AddInput addInput) { private void addVectors(BytesRefVector v1, BytesRefVector v2, BytesRefVector v3, GroupingAggregatorFunction.AddInput addInput) { final int positionCount = v1.getPositionCount(); try (IntVector.FixedBuilder ordsBuilder = blockFactory.newIntVectorFixedBuilder(positionCount)) { - // TODO: enable ordinal vectors in BytesRefBlockHash try (IntVector k1 = hash1.add(v1); IntVector k2 = hash2.add(v2); IntVector k3 = hash3.add(v3)) { for (int p = 0; p < positionCount; p++) { long ord = hashOrdToGroup(finalHash.add(k1.getInt(p), k2.getInt(p), k3.getInt(p))); @@ -148,6 +147,7 @@ public ReleasableIterator lookup(Page page, ByteSizeValue targetBlockS @Override public Block[] getKeys() { + // TODO Build Ordinals blocks #114010 final int positions = (int) finalHash.size(); final BytesRef scratch = new BytesRef(); final BytesRefBlock[] outputBlocks = new BytesRefBlock[3]; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/X-BlockHash.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/X-BlockHash.java.st index b4f700980558e..7c21cff56d7bb 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/X-BlockHash.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/X-BlockHash.java.st @@ -28,6 +28,7 @@ import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.OrdinalBytesRefBlock; +import org.elasticsearch.compute.data.OrdinalBytesRefVector; $elseif(double)$ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; @@ -51,6 +52,9 @@ $endif$ import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.mvdedupe.MultivalueDedupe; import org.elasticsearch.compute.operator.mvdedupe.MultivalueDedupe$Type$; +$if(BytesRef)$ +import org.elasticsearch.compute.operator.mvdedupe.MultivalueDedupeInt; +$endif$ import org.elasticsearch.core.ReleasableIterator; $if(BytesRef)$ @@ -62,6 +66,7 @@ import java.util.BitSet; $endif$ /** * Maps a {@link $Type$Block} column to group ids. + * This class is generated. Do not edit it. */ final class $Type$BlockHash extends BlockHash { private final int channel; @@ -84,6 +89,7 @@ final class $Type$BlockHash extends BlockHash { @Override public void add(Page page, GroupingAggregatorFunction.AddInput addInput) { + // TODO track raw counts and which implementation we pick for the profiler - #114008 var block = page.getBlock(channel); if (block.areAllValuesNull()) { seenNull = true; @@ -107,6 +113,10 @@ final class $Type$BlockHash extends BlockHash { IntVector add($Type$Vector vector) { $if(BytesRef)$ + var ordinals = vector.asOrdinals(); + if (ordinals != null) { + return addOrdinalsVector(ordinals); + } BytesRef scratch = new BytesRef(); $endif$ int positions = vector.getPositionCount(); @@ -154,15 +164,29 @@ $endif$ } $if(BytesRef)$ - private IntBlock addOrdinalsBlock(OrdinalBytesRefBlock inputBlock) { - var inputOrds = inputBlock.getOrdinalsBlock(); + private IntVector addOrdinalsVector(OrdinalBytesRefVector inputBlock) { + IntVector inputOrds = inputBlock.getOrdinalsVector(); try ( - var builder = blockFactory.newIntBlockBuilder(inputOrds.getPositionCount()); + var builder = blockFactory.newIntVectorBuilder(inputOrds.getPositionCount()); var hashOrds = add(inputBlock.getDictionaryVector()) ) { - for (int i = 0; i < inputOrds.getPositionCount(); i++) { - int valueCount = inputOrds.getValueCount(i); - int firstIndex = inputOrds.getFirstValueIndex(i); + for (int p = 0; p < inputOrds.getPositionCount(); p++) { + int ord = hashOrds.getInt(inputOrds.getInt(p)); + builder.appendInt(ord); + } + return builder.build(); + } + } + + private IntBlock addOrdinalsBlock(OrdinalBytesRefBlock inputBlock) { + try ( + IntBlock inputOrds = new MultivalueDedupeInt(inputBlock.getOrdinalsBlock()).dedupeToBlockAdaptive(blockFactory); + IntBlock.Builder builder = blockFactory.newIntBlockBuilder(inputOrds.getPositionCount()); + IntVector hashOrds = add(inputBlock.getDictionaryVector()) + ) { + for (int p = 0; p < inputOrds.getPositionCount(); p++) { + int valueCount = inputOrds.getValueCount(p); + int firstIndex = inputOrds.getFirstValueIndex(p); switch (valueCount) { case 0 -> { builder.appendInt(0); @@ -173,9 +197,11 @@ $if(BytesRef)$ builder.appendInt(ord); } default -> { + int start = firstIndex; + int end = firstIndex + valueCount; builder.beginPositionEntry(); - for (int v = 0; v < valueCount; v++) { - int ord = hashOrds.getInt(inputOrds.getInt(firstIndex + i)); + for (int i = start; i < end; i++) { + int ord = hashOrds.getInt(inputOrds.getInt(i)); builder.appendInt(ord); } builder.endPositionEntry(); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashRandomizedTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashRandomizedTests.java index 27ec0b979e8ae..76d4caf810eb8 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashRandomizedTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashRandomizedTests.java @@ -21,10 +21,13 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BlockTestUtils; +import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.MockBlockFactory; +import org.elasticsearch.compute.data.OrdinalBytesRefBlock; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.data.TestBlockFactory; import org.elasticsearch.compute.operator.mvdedupe.MultivalueDedupeTests; import org.elasticsearch.core.ReleasableIterator; import org.elasticsearch.core.Releasables; @@ -38,11 +41,13 @@ import java.util.Collections; import java.util.Comparator; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.NavigableSet; import java.util.Set; import java.util.TreeSet; +import java.util.stream.Stream; import static org.elasticsearch.test.ListMatcher.matchesList; import static org.elasticsearch.test.MapMatcher.assertMap; @@ -58,26 +63,40 @@ public class BlockHashRandomizedTests extends ESTestCase { @ParametersFactory public static List params() { - List params = new ArrayList<>(); + List> allowedTypesChoices = List.of( + /* + * Run with only `LONG` elements because we have some + * optimizations that hit if you only have those. + */ + List.of(new Basic(ElementType.LONG)), + /* + * Run with only `BYTES_REF` elements because we have some + * optimizations that hit if you only have those. + */ + List.of(new Basic(ElementType.BYTES_REF)), + /* + * Run with only `BYTES_REF` elements in an OrdinalBytesRefBlock + * because we have a few optimizations that use it. + */ + List.of(new Ordinals(10)), + /* + * Run with only `LONG` and `BYTES_REF` elements because + * we have some optimizations that hit if you only have + * those. + */ + List.of(new Basic(ElementType.LONG), new Basic(ElementType.BYTES_REF)), + /* + * Any random source. + */ + Stream.concat(Stream.of(new Ordinals(10)), MultivalueDedupeTests.supportedTypes().stream().map(Basic::new)).toList() + ); + List params = new ArrayList<>(); for (boolean forcePackedHash : new boolean[] { false, true }) { for (int groups : new int[] { 1, 2, 3, 4, 5, 10 }) { for (int maxValuesPerPosition : new int[] { 1, 3 }) { for (int dups : new int[] { 0, 2 }) { - for (List allowedTypes : List.of( - /* - * Run with only `LONG` elements because we have some - * optimizations that hit if you only have those. - */ - List.of(ElementType.LONG), - /* - * Run with only `LONG` and `BYTES_REF` elements because - * we have some optimizations that hit if you only have - * those. - */ - List.of(ElementType.LONG, ElementType.BYTES_REF), - MultivalueDedupeTests.supportedTypes() - )) { + for (List allowedTypes : allowedTypesChoices) { params.add(new Object[] { forcePackedHash, groups, maxValuesPerPosition, dups, allowedTypes }); } } @@ -87,18 +106,33 @@ public static List params() { return params; } + /** + * The type of {@link Block} being tested. + */ + interface Type { + /** + * The type of the {@link ElementType elements} in the {@link Block}. + */ + ElementType elementType(); + + /** + * Build a random {@link Block}. + */ + BasicBlockTests.RandomBlock randomBlock(int positionCount, int maxValuesPerPosition, int dups); + } + private final boolean forcePackedHash; private final int groups; private final int maxValuesPerPosition; private final int dups; - private final List allowedTypes; + private final List allowedTypes; public BlockHashRandomizedTests( @Name("forcePackedHash") boolean forcePackedHash, @Name("groups") int groups, @Name("maxValuesPerPosition") int maxValuesPerPosition, @Name("dups") int dups, - @Name("allowedTypes") List allowedTypes + @Name("allowedTypes") List allowedTypes ) { this.forcePackedHash = forcePackedHash; this.groups = groups; @@ -127,21 +161,22 @@ public void testWithCranky() { } private void test(MockBlockFactory blockFactory) { - List types = randomList(groups, groups, () -> randomFrom(allowedTypes)); + List types = randomList(groups, groups, () -> randomFrom(allowedTypes)); + List elementTypes = types.stream().map(Type::elementType).toList(); BasicBlockTests.RandomBlock[] randomBlocks = new BasicBlockTests.RandomBlock[types.size()]; Block[] blocks = new Block[types.size()]; - int pageCount = between(1, 10); + int pageCount = between(1, groups < 10 ? 10 : 5); int positionCount = 100; int emitBatchSize = 100; - try (BlockHash blockHash = newBlockHash(blockFactory, emitBatchSize, types)) { + try (BlockHash blockHash = newBlockHash(blockFactory, emitBatchSize, elementTypes)) { /* * Only the long/long, long/bytes_ref, and bytes_ref/long implementations don't collect nulls. */ Oracle oracle = new Oracle( forcePackedHash - || false == (types.equals(List.of(ElementType.LONG, ElementType.LONG)) - || types.equals(List.of(ElementType.LONG, ElementType.BYTES_REF)) - || types.equals(List.of(ElementType.BYTES_REF, ElementType.LONG))) + || false == (elementTypes.equals(List.of(ElementType.LONG, ElementType.LONG)) + || elementTypes.equals(List.of(ElementType.LONG, ElementType.BYTES_REF)) + || elementTypes.equals(List.of(ElementType.BYTES_REF, ElementType.LONG))) ); /* * Expected ordinals for checking lookup. Skipped if we have more than 5 groups because @@ -151,15 +186,7 @@ private void test(MockBlockFactory blockFactory) { for (int p = 0; p < pageCount; p++) { for (int g = 0; g < blocks.length; g++) { - randomBlocks[g] = BasicBlockTests.randomBlock( - types.get(g), - positionCount, - types.get(g) == ElementType.NULL ? true : randomBoolean(), - 1, - maxValuesPerPosition, - 0, - dups - ); + randomBlocks[g] = types.get(g).randomBlock(positionCount, maxValuesPerPosition, dups); blocks[g] = randomBlocks[g].block(); } oracle.add(randomBlocks); @@ -209,6 +236,7 @@ private void test(MockBlockFactory blockFactory) { if (blockHash instanceof LongLongBlockHash == false && blockHash instanceof BytesRefLongBlockHash == false + && blockHash instanceof BytesRef2BlockHash == false && blockHash instanceof BytesRef3BlockHash == false) { assertLookup(blockFactory, expectedOrds, types, blockHash, oracle); } @@ -235,14 +263,14 @@ private BlockHash newBlockHash(BlockFactory blockFactory, int emitBatchSize, Lis private void assertLookup( BlockFactory blockFactory, Map, Set> expectedOrds, - List types, + List types, BlockHash blockHash, Oracle oracle ) { Block.Builder[] builders = new Block.Builder[types.size()]; try { for (int b = 0; b < builders.length; b++) { - builders[b] = types.get(b).newBlockBuilder(LOOKUP_POSITIONS, blockFactory); + builders[b] = types.get(b).elementType().newBlockBuilder(LOOKUP_POSITIONS, blockFactory); } for (int p = 0; p < LOOKUP_POSITIONS; p++) { /* @@ -408,8 +436,8 @@ static CircuitBreakerService mockBreakerService(CircuitBreaker breaker) { return breakerService; } - private static List randomKey(List types) { - return types.stream().map(BlockHashRandomizedTests::randomKeyElement).toList(); + private static List randomKey(List types) { + return types.stream().map(t -> randomKeyElement(t.elementType())).toList(); } public static Object randomKeyElement(ElementType type) { @@ -423,4 +451,75 @@ public static Object randomKeyElement(ElementType type) { default -> throw new IllegalArgumentException("unsupported element type [" + type + "]"); }; } + + private record Basic(ElementType elementType) implements Type { + @Override + public BasicBlockTests.RandomBlock randomBlock(int positionCount, int maxValuesPerPosition, int dups) { + return BasicBlockTests.randomBlock( + elementType, + positionCount, + elementType == ElementType.NULL | randomBoolean(), + 1, + maxValuesPerPosition, + 0, + dups + ); + } + } + + private record Ordinals(int dictionarySize) implements Type { + @Override + public ElementType elementType() { + return ElementType.BYTES_REF; + } + + @Override + public BasicBlockTests.RandomBlock randomBlock(int positionCount, int maxValuesPerPosition, int dups) { + List> dictionary = new ArrayList<>(); + List> values = new ArrayList<>(positionCount); + try ( + IntBlock.Builder ordinals = TestBlockFactory.getNonBreakingInstance() + .newIntBlockBuilder(positionCount * maxValuesPerPosition); + BytesRefVector.Builder bytes = TestBlockFactory.getNonBreakingInstance().newBytesRefVectorBuilder(maxValuesPerPosition); + ) { + for (String value : dictionary(maxValuesPerPosition)) { + bytes.appendBytesRef(new BytesRef(value)); + dictionary.add(Map.entry(value, dictionary.size())); + } + for (int p = 0; p < positionCount; p++) { + int valueCount = between(1, maxValuesPerPosition); + int dupCount = between(0, dups); + + List ordsAtPosition = new ArrayList<>(); + List valuesAtPosition = new ArrayList<>(); + values.add(valuesAtPosition); + if (valueCount != 1 || dupCount != 0) { + ordinals.beginPositionEntry(); + } + for (int v = 0; v < valueCount; v++) { + Map.Entry value = randomFrom(dictionary); + valuesAtPosition.add(new BytesRef(value.getKey())); + ordinals.appendInt(value.getValue()); + ordsAtPosition.add(value.getValue()); + } + for (int v = 0; v < dupCount; v++) { + ordinals.appendInt(randomFrom(ordsAtPosition)); + } + if (valueCount != 1 || dupCount != 0) { + ordinals.endPositionEntry(); + } + } + return new BasicBlockTests.RandomBlock(values, new OrdinalBytesRefBlock(ordinals.build(), bytes.build())); + } + } + + private Set dictionary(int maxValuesPerPosition) { + int count = Math.max(dictionarySize, maxValuesPerPosition); + Set values = new HashSet<>(); + while (values.size() < count) { + values.add(randomAlphaOfLength(5)); + } + return values; + } + } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTests.java index 800683c696c0f..aeea18e52da0f 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTests.java @@ -20,12 +20,15 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.MockBlockFactory; +import org.elasticsearch.compute.data.OrdinalBytesRefBlock; +import org.elasticsearch.compute.data.OrdinalBytesRefVector; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.data.TestBlockFactory; import org.elasticsearch.core.Releasable; @@ -460,6 +463,133 @@ public void testBytesRefHashWithMultiValuedFields() { } } + public void testBasicOrdinals() { + try ( + IntVector.Builder ords = blockFactory.newIntVectorFixedBuilder(8); + BytesRefVector.Builder bytes = blockFactory.newBytesRefVectorBuilder(8) + ) { + ords.appendInt(1); + ords.appendInt(0); + ords.appendInt(3); + ords.appendInt(1); + ords.appendInt(3); + ords.appendInt(0); + ords.appendInt(2); + ords.appendInt(3); + bytes.appendBytesRef(new BytesRef("item-1")); + bytes.appendBytesRef(new BytesRef("item-2")); + bytes.appendBytesRef(new BytesRef("item-3")); + bytes.appendBytesRef(new BytesRef("item-4")); + + hash(ordsAndKeys -> { + if (forcePackedHash) { + assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:BYTES_REF], entries=4, size=")); + assertThat(ordsAndKeys.description, endsWith("b}")); + assertOrds(ordsAndKeys.ords, 0, 1, 2, 0, 2, 1, 3, 2); + assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(0, 4))); + assertKeys(ordsAndKeys.keys, "item-2", "item-1", "item-4", "item-3"); + } else { + assertThat(ordsAndKeys.description, startsWith("BytesRefBlockHash{channel=0, entries=4, size=")); + assertThat(ordsAndKeys.description, endsWith("b, seenNull=false}")); + assertOrds(ordsAndKeys.ords, 2, 1, 4, 2, 4, 1, 3, 4); + assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(1, 5))); + assertKeys(ordsAndKeys.keys, "item-1", "item-2", "item-3", "item-4"); + } + }, new OrdinalBytesRefVector(ords.build(), bytes.build()).asBlock()); + } + } + + public void testOrdinalsWithNulls() { + try ( + IntBlock.Builder ords = blockFactory.newIntBlockBuilder(4); + BytesRefVector.Builder bytes = blockFactory.newBytesRefVectorBuilder(2) + ) { + ords.appendInt(0); + ords.appendNull(); + ords.appendInt(1); + ords.appendNull(); + bytes.appendBytesRef(new BytesRef("cat")); + bytes.appendBytesRef(new BytesRef("dog")); + + hash(ordsAndKeys -> { + if (forcePackedHash) { + assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:BYTES_REF], entries=3, size=")); + assertThat(ordsAndKeys.description, endsWith("b}")); + assertOrds(ordsAndKeys.ords, 0, 1, 2, 1); + assertKeys(ordsAndKeys.keys, "cat", null, "dog"); + } else { + assertThat(ordsAndKeys.description, startsWith("BytesRefBlockHash{channel=0, entries=2, size=")); + assertThat(ordsAndKeys.description, endsWith("b, seenNull=true}")); + assertOrds(ordsAndKeys.ords, 1, 0, 2, 0); + assertKeys(ordsAndKeys.keys, null, "cat", "dog"); + } + assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(0, 3))); + }, new OrdinalBytesRefBlock(ords.build(), bytes.build())); + } + } + + public void testOrdinalsWithMultiValuedFields() { + try ( + IntBlock.Builder ords = blockFactory.newIntBlockBuilder(4); + BytesRefVector.Builder bytes = blockFactory.newBytesRefVectorBuilder(2) + ) { + ords.appendInt(0); + ords.beginPositionEntry(); + ords.appendInt(0); + ords.appendInt(1); + ords.endPositionEntry(); + ords.beginPositionEntry(); + ords.appendInt(1); + ords.appendInt(2); + ords.endPositionEntry(); + ords.beginPositionEntry(); + ords.appendInt(2); + ords.appendInt(1); + ords.endPositionEntry(); + ords.appendNull(); + ords.beginPositionEntry(); + ords.appendInt(2); + ords.appendInt(2); + ords.appendInt(1); + ords.endPositionEntry(); + + bytes.appendBytesRef(new BytesRef("foo")); + bytes.appendBytesRef(new BytesRef("bar")); + bytes.appendBytesRef(new BytesRef("bort")); + + hash(ordsAndKeys -> { + if (forcePackedHash) { + assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:BYTES_REF], entries=4, size=")); + assertThat(ordsAndKeys.description, endsWith("b}")); + assertOrds( + ordsAndKeys.ords, + new int[] { 0 }, + new int[] { 0, 1 }, + new int[] { 1, 2 }, + new int[] { 2, 1 }, + new int[] { 3 }, + new int[] { 2, 1 } + ); + assertKeys(ordsAndKeys.keys, "foo", "bar", "bort", null); + } else { + assertThat(ordsAndKeys.description, startsWith("BytesRefBlockHash{channel=0, entries=3, size=")); + assertThat(ordsAndKeys.description, endsWith("b, seenNull=true}")); + assertOrds( + ordsAndKeys.ords, + new int[] { 1 }, + new int[] { 1, 2 }, + new int[] { 2, 3 }, + new int[] { 3, 2 }, + new int[] { 0 }, + new int[] { 3, 2 } + ); + assertKeys(ordsAndKeys.keys, null, "foo", "bar", "bort"); + } + assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(0, 4))); + }, new OrdinalBytesRefBlock(ords.build(), bytes.build())); + } + } + public void testBooleanHashFalseFirst() { boolean[] values = new boolean[] { false, true, true, true, true }; hash(ordsAndKeys -> { @@ -1315,6 +1445,7 @@ public void close() { }); if (blockHash instanceof LongLongBlockHash == false && blockHash instanceof BytesRefLongBlockHash == false + && blockHash instanceof BytesRef2BlockHash == false && blockHash instanceof BytesRef3BlockHash == false) { Block[] keys = blockHash.getKeys(); try (ReleasableIterator lookup = blockHash.lookup(new Page(keys), ByteSizeValue.ofKb(between(1, 100)))) { From 71967687cde7f3293abe99eaad8c2f51fbb430db Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Fri, 11 Oct 2024 15:13:38 -0400 Subject: [PATCH 017/449] ESQL: Test partially filtered aggs (#114510) Tests for partially filtered aggs. It uses the existing aggs tests and adds junk rows that are filtered away. That way we don't have to add new testing assertions to each class - we just can reuse the existing assertions. --- .../AggregatorFunctionTestCase.java | 17 +++ .../FilteredAggregatorFunctionTests.java | 5 + .../GroupingAggregatorFunctionTestCase.java | 27 +++- .../AddGarbageRowsSourceOperator.java | 133 ++++++++++++++++++ 4 files changed, 181 insertions(+), 1 deletion(-) create mode 100644 x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AddGarbageRowsSourceOperator.java diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/AggregatorFunctionTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/AggregatorFunctionTestCase.java index a4eb252dbf35c..e2c9c255b67bd 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/AggregatorFunctionTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/AggregatorFunctionTestCase.java @@ -22,6 +22,7 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.data.TestBlockFactory; +import org.elasticsearch.compute.operator.AddGarbageRowsSourceOperator; import org.elasticsearch.compute.operator.AggregationOperator; import org.elasticsearch.compute.operator.CannedSourceOperator; import org.elasticsearch.compute.operator.Driver; @@ -203,6 +204,22 @@ public void testNoneFiltered() { assertSimpleOutput(origInput, results); } + public void testSomeFiltered() { + Operator.OperatorFactory factory = simpleWithMode( + AggregatorMode.SINGLE, + agg -> new FilteredAggregatorFunctionSupplier(agg, AddGarbageRowsSourceOperator.filterFactory()) + ); + DriverContext driverContext = driverContext(); + // Build the test data + List input = CannedSourceOperator.collectPages(simpleInput(driverContext.blockFactory(), 10)); + List origInput = BlockTestUtils.deepCopyOf(input, TestBlockFactory.getNonBreakingInstance()); + // Sprinkle garbage into it + input = CannedSourceOperator.collectPages(new AddGarbageRowsSourceOperator(new CannedSourceOperator(input.iterator()))); + List results = drive(factory.get(driverContext), input.iterator(), driverContext); + assertThat(results, hasSize(1)); + assertSimpleOutput(origInput, results); + } + // Returns an intermediate state that is equivalent to what the local execution planner will emit // if it determines that certain shards have no relevant data. List nullIntermediateState(BlockFactory blockFactory) { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/FilteredAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/FilteredAggregatorFunctionTests.java index 7e1575fb81726..da2c3502144db 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/FilteredAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/FilteredAggregatorFunctionTests.java @@ -103,4 +103,9 @@ public void testNoneFiltered() { public void testAllFiltered() { assumeFalse("can't double filter. tests already filter.", true); } + + @Override + public void testSomeFiltered() { + assumeFalse("can't double filter. tests already filter.", true); + } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java index 316058e57e089..9414e076a26e6 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java @@ -26,6 +26,7 @@ import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.data.TestBlockFactory; +import org.elasticsearch.compute.operator.AddGarbageRowsSourceOperator; import org.elasticsearch.compute.operator.CannedSourceOperator; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.ForkingOperatorTestCase; @@ -53,6 +54,7 @@ import static org.elasticsearch.compute.data.BlockTestUtils.append; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.in; /** * Shared tests for testing grouped aggregations. @@ -160,11 +162,17 @@ protected long randomGroupId(int pageSize) { @Override protected final void assertSimpleOutput(List input, List results) { + assertSimpleOutput(input, results, true); + } + + private void assertSimpleOutput(List input, List results, boolean assertGroupCount) { SeenGroups seenGroups = seenGroups(input); assertThat(results, hasSize(1)); assertThat(results.get(0).getBlockCount(), equalTo(2)); - assertThat(results.get(0).getPositionCount(), equalTo(seenGroups.size())); + if (assertGroupCount) { + assertThat(results.get(0).getPositionCount(), equalTo(seenGroups.size())); + } Block groups = results.get(0).getBlock(0); Block result = results.get(0).getBlock(1); @@ -394,6 +402,23 @@ public final void testNoneFiltered() { assertSimpleOutput(origInput, results); } + public void testSomeFiltered() { + Operator.OperatorFactory factory = simpleWithMode( + AggregatorMode.SINGLE, + agg -> new FilteredAggregatorFunctionSupplier(agg, AddGarbageRowsSourceOperator.filterFactory()) + ); + DriverContext driverContext = driverContext(); + // Build the test data + List input = CannedSourceOperator.collectPages(simpleInput(driverContext.blockFactory(), 10)); + List origInput = BlockTestUtils.deepCopyOf(input, TestBlockFactory.getNonBreakingInstance()); + // Sprinkle garbage into it + input = CannedSourceOperator.collectPages(new AddGarbageRowsSourceOperator(new CannedSourceOperator(input.iterator()))); + List results = drive(factory.get(driverContext), input.iterator(), driverContext); + assertThat(results, hasSize(1)); + + assertSimpleOutput(origInput, results, false); + } + /** * Asserts that the output from an empty input is a {@link Block} containing * only {@code null}. Override for {@code count} style aggregations that diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AddGarbageRowsSourceOperator.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AddGarbageRowsSourceOperator.java new file mode 100644 index 0000000000000..079be87161421 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AddGarbageRowsSourceOperator.java @@ -0,0 +1,133 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.FloatBlock; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.test.ESTestCase; + +/** + * A {@link SourceOperator} that inserts random garbage into data from another + * {@link SourceOperator}. It also inserts an extra channel at the end of the page + * containing a {@code boolean} column. If it is {@code true} then the data came + * from the original operator. If it's {@code false} then the data is random + * garbage inserted by this operator. + */ +public class AddGarbageRowsSourceOperator extends SourceOperator { + public static EvalOperator.ExpressionEvaluator.Factory filterFactory() { + /* + * Grabs the filter from the last block. That's where we put it. + */ + return ctx -> new EvalOperator.ExpressionEvaluator() { + @Override + public Block eval(Page page) { + Block block = page.getBlock(page.getBlockCount() - 1); + block.incRef(); + return block; + } + + @Override + public void close() {} + }; + } + + private final SourceOperator next; + + public AddGarbageRowsSourceOperator(SourceOperator next) { + this.next = next; + } + + @Override + public void finish() { + next.finish(); + } + + @Override + public boolean isFinished() { + return next.isFinished(); + } + + @Override + public Page getOutput() { + Page page = next.getOutput(); + if (page == null) { + return null; + } + Block.Builder[] newBlocks = new Block.Builder[page.getBlockCount() + 1]; + try { + for (int b = 0; b < page.getBlockCount(); b++) { + Block block = page.getBlock(b); + newBlocks[b] = block.elementType().newBlockBuilder(page.getPositionCount(), block.blockFactory()); + } + newBlocks[page.getBlockCount()] = page.getBlock(0).blockFactory().newBooleanBlockBuilder(page.getPositionCount()); + + for (int p = 0; p < page.getPositionCount(); p++) { + if (ESTestCase.randomBoolean()) { + insertGarbageRows(newBlocks, page); + } + copyPosition(newBlocks, page, p); + if (ESTestCase.randomBoolean()) { + insertGarbageRows(newBlocks, page); + } + } + + return new Page(Block.Builder.buildAll(newBlocks)); + } finally { + Releasables.close(Releasables.wrap(newBlocks), page::releaseBlocks); + } + } + + private void copyPosition(Block.Builder[] newBlocks, Page page, int p) { + for (int b = 0; b < page.getBlockCount(); b++) { + Block block = page.getBlock(b); + newBlocks[b].copyFrom(block, p, p + 1); + } + signalKeep(newBlocks, true); + } + + private void insertGarbageRows(Block.Builder[] newBlocks, Page page) { + int count = ESTestCase.between(1, 5); + for (int c = 0; c < count; c++) { + insertGarbageRow(newBlocks, page); + } + } + + private void insertGarbageRow(Block.Builder[] newBlocks, Page page) { + for (int b = 0; b < page.getBlockCount(); b++) { + Block block = page.getBlock(b); + switch (block.elementType()) { + case BOOLEAN -> ((BooleanBlock.Builder) newBlocks[b]).appendBoolean(ESTestCase.randomBoolean()); + case BYTES_REF -> ((BytesRefBlock.Builder) newBlocks[b]).appendBytesRef(new BytesRef(ESTestCase.randomAlphaOfLength(5))); + case COMPOSITE, DOC, UNKNOWN -> throw new UnsupportedOperationException(); + case INT -> ((IntBlock.Builder) newBlocks[b]).appendInt(ESTestCase.randomInt()); + case LONG -> ((LongBlock.Builder) newBlocks[b]).appendLong(ESTestCase.randomLong()); + case NULL -> newBlocks[b].appendNull(); + case DOUBLE -> ((DoubleBlock.Builder) newBlocks[b]).appendDouble(ESTestCase.randomDouble()); + case FLOAT -> ((FloatBlock.Builder) newBlocks[b]).appendFloat(ESTestCase.randomFloat()); + } + } + signalKeep(newBlocks, false); + } + + private void signalKeep(Block.Builder[] newBlocks, boolean shouldKeep) { + ((BooleanBlock.Builder) newBlocks[newBlocks.length - 1]).appendBoolean(shouldKeep); + } + + @Override + public void close() { + next.close(); + } +} From 0ff423e34d7b68df6f904a14c30753470fdca033 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sat, 12 Oct 2024 06:24:53 +1100 Subject: [PATCH 018/449] Mute org.elasticsearch.xpack.inference.integration.ModelRegistryIT testGetModel #114657 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index e5fa7966dcd88..f25f7cdb64f31 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -378,6 +378,9 @@ tests: - class: org.elasticsearch.xpack.esql.optimizer.PhysicalPlanOptimizerTests method: testPushWhereEvalToSource {default} issue: https://github.com/elastic/elasticsearch/issues/114628 +- class: org.elasticsearch.xpack.inference.integration.ModelRegistryIT + method: testGetModel + issue: https://github.com/elastic/elasticsearch/issues/114657 # Examples: # From 2155f1bed5a2a59623e908c838c7fcd1c801e2f0 Mon Sep 17 00:00:00 2001 From: Larisa Motova Date: Fri, 11 Oct 2024 09:33:45 -1000 Subject: [PATCH 019/449] [ES|QL] Add hypot function (#114382) Adds a hypotenuse function --- docs/changelog/114382.yaml | 5 + .../esql/functions/description/hypot.asciidoc | 5 + .../esql/functions/examples/hypot.asciidoc | 13 + .../functions/kibana/definition/hypot.json | 301 ++++++++++++++++++ .../esql/functions/kibana/docs/hypot.md | 12 + .../esql/functions/layout/hypot.asciidoc | 15 + .../esql/functions/math-functions.asciidoc | 2 + .../esql/functions/parameters/hypot.asciidoc | 9 + .../esql/functions/signature/hypot.svg | 1 + .../esql/functions/types/hypot.asciidoc | 24 ++ .../src/main/resources/math.csv-spec | 57 ++++ .../function/scalar/math/HypotEvaluator.java | 132 ++++++++ .../xpack/esql/action/EsqlCapabilities.java | 5 + .../function/EsqlFunctionRegistry.java | 2 + .../function/scalar/EsqlScalarFunction.java | 2 + .../function/scalar/math/Hypot.java | 130 ++++++++ .../scalar/math/HypotSerializationTests.java | 39 +++ .../function/scalar/math/HypotTests.java | 46 +++ 18 files changed, 800 insertions(+) create mode 100644 docs/changelog/114382.yaml create mode 100644 docs/reference/esql/functions/description/hypot.asciidoc create mode 100644 docs/reference/esql/functions/examples/hypot.asciidoc create mode 100644 docs/reference/esql/functions/kibana/definition/hypot.json create mode 100644 docs/reference/esql/functions/kibana/docs/hypot.md create mode 100644 docs/reference/esql/functions/layout/hypot.asciidoc create mode 100644 docs/reference/esql/functions/parameters/hypot.asciidoc create mode 100644 docs/reference/esql/functions/signature/hypot.svg create mode 100644 docs/reference/esql/functions/types/hypot.asciidoc create mode 100644 x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/HypotEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Hypot.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/HypotSerializationTests.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/HypotTests.java diff --git a/docs/changelog/114382.yaml b/docs/changelog/114382.yaml new file mode 100644 index 0000000000000..9f572e14f4737 --- /dev/null +++ b/docs/changelog/114382.yaml @@ -0,0 +1,5 @@ +pr: 114382 +summary: "[ES|QL] Add hypot function" +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/reference/esql/functions/description/hypot.asciidoc b/docs/reference/esql/functions/description/hypot.asciidoc new file mode 100644 index 0000000000000..5162f0d9ef98f --- /dev/null +++ b/docs/reference/esql/functions/description/hypot.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Returns the hypotenuse of two numbers. The input can be any numeric values, the return value is always a double. Hypotenuses of infinities are null. diff --git a/docs/reference/esql/functions/examples/hypot.asciidoc b/docs/reference/esql/functions/examples/hypot.asciidoc new file mode 100644 index 0000000000000..6dbcc62e8755e --- /dev/null +++ b/docs/reference/esql/functions/examples/hypot.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/math.csv-spec[tag=hypot] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/math.csv-spec[tag=hypot-result] +|=== + diff --git a/docs/reference/esql/functions/kibana/definition/hypot.json b/docs/reference/esql/functions/kibana/definition/hypot.json new file mode 100644 index 0000000000000..06971f07a3585 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/hypot.json @@ -0,0 +1,301 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "hypot", + "description" : "Returns the hypotenuse of two numbers. The input can be any numeric values, the return value is always a double.\nHypotenuses of infinities are null.", + "signatures" : [ + { + "params" : [ + { + "name" : "number1", + "type" : "double", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + }, + { + "name" : "number2", + "type" : "double", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number1", + "type" : "double", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + }, + { + "name" : "number2", + "type" : "integer", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number1", + "type" : "double", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + }, + { + "name" : "number2", + "type" : "long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number1", + "type" : "double", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + }, + { + "name" : "number2", + "type" : "unsigned_long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number1", + "type" : "integer", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + }, + { + "name" : "number2", + "type" : "double", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number1", + "type" : "integer", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + }, + { + "name" : "number2", + "type" : "integer", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number1", + "type" : "integer", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + }, + { + "name" : "number2", + "type" : "long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number1", + "type" : "integer", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + }, + { + "name" : "number2", + "type" : "unsigned_long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number1", + "type" : "long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + }, + { + "name" : "number2", + "type" : "double", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number1", + "type" : "long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + }, + { + "name" : "number2", + "type" : "integer", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number1", + "type" : "long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + }, + { + "name" : "number2", + "type" : "long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number1", + "type" : "long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + }, + { + "name" : "number2", + "type" : "unsigned_long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number1", + "type" : "unsigned_long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + }, + { + "name" : "number2", + "type" : "double", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number1", + "type" : "unsigned_long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + }, + { + "name" : "number2", + "type" : "integer", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number1", + "type" : "unsigned_long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + }, + { + "name" : "number2", + "type" : "long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number1", + "type" : "unsigned_long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + }, + { + "name" : "number2", + "type" : "unsigned_long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + } + ], + "examples" : [ + "ROW a = 3.0, b = 4.0\n| EVAL c = HYPOT(a, b)" + ], + "preview" : false, + "snapshot_only" : false +} diff --git a/docs/reference/esql/functions/kibana/docs/hypot.md b/docs/reference/esql/functions/kibana/docs/hypot.md new file mode 100644 index 0000000000000..f0cbea6b88e55 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/hypot.md @@ -0,0 +1,12 @@ + + +### HYPOT +Returns the hypotenuse of two numbers. The input can be any numeric values, the return value is always a double. +Hypotenuses of infinities are null. + +``` +ROW a = 3.0, b = 4.0 +| EVAL c = HYPOT(a, b) +``` diff --git a/docs/reference/esql/functions/layout/hypot.asciidoc b/docs/reference/esql/functions/layout/hypot.asciidoc new file mode 100644 index 0000000000000..84376a9f15908 --- /dev/null +++ b/docs/reference/esql/functions/layout/hypot.asciidoc @@ -0,0 +1,15 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-hypot]] +=== `HYPOT` + +*Syntax* + +[.text-center] +image::esql/functions/signature/hypot.svg[Embedded,opts=inline] + +include::../parameters/hypot.asciidoc[] +include::../description/hypot.asciidoc[] +include::../types/hypot.asciidoc[] +include::../examples/hypot.asciidoc[] diff --git a/docs/reference/esql/functions/math-functions.asciidoc b/docs/reference/esql/functions/math-functions.asciidoc index e311208795533..9fedfa57f50c5 100644 --- a/docs/reference/esql/functions/math-functions.asciidoc +++ b/docs/reference/esql/functions/math-functions.asciidoc @@ -20,6 +20,7 @@ * <> * <> * <> +* <> * <> * <> * <> @@ -46,6 +47,7 @@ include::layout/cosh.asciidoc[] include::layout/e.asciidoc[] include::layout/exp.asciidoc[] include::layout/floor.asciidoc[] +include::layout/hypot.asciidoc[] include::layout/log.asciidoc[] include::layout/log10.asciidoc[] include::layout/pi.asciidoc[] diff --git a/docs/reference/esql/functions/parameters/hypot.asciidoc b/docs/reference/esql/functions/parameters/hypot.asciidoc new file mode 100644 index 0000000000000..9d6c7d50c7bec --- /dev/null +++ b/docs/reference/esql/functions/parameters/hypot.asciidoc @@ -0,0 +1,9 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Parameters* + +`number1`:: +Numeric expression. If `null`, the function returns `null`. + +`number2`:: +Numeric expression. If `null`, the function returns `null`. diff --git a/docs/reference/esql/functions/signature/hypot.svg b/docs/reference/esql/functions/signature/hypot.svg new file mode 100644 index 0000000000000..b849ea42cfd9e --- /dev/null +++ b/docs/reference/esql/functions/signature/hypot.svg @@ -0,0 +1 @@ +HYPOT(number1,number2) \ No newline at end of file diff --git a/docs/reference/esql/functions/types/hypot.asciidoc b/docs/reference/esql/functions/types/hypot.asciidoc new file mode 100644 index 0000000000000..dd06ba96d7f34 --- /dev/null +++ b/docs/reference/esql/functions/types/hypot.asciidoc @@ -0,0 +1,24 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +number1 | number2 | result +double | double | double +double | integer | double +double | long | double +double | unsigned_long | double +integer | double | double +integer | integer | double +integer | long | double +integer | unsigned_long | double +long | double | double +long | integer | double +long | long | double +long | unsigned_long | double +unsigned_long | double | double +unsigned_long | integer | double +unsigned_long | long | double +unsigned_long | unsigned_long | double +|=== diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec index b00bb5143726c..da069836504d4 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec @@ -1379,6 +1379,63 @@ d:double | c:double -0.0 | -0.0 ; +hypot +required_capability: fn_hypot +// tag::hypot[] +ROW a = 3.0, b = 4.0 +| EVAL c = HYPOT(a, b) +// end::hypot[] +; + +// tag::hypot-result[] +a:double | b:double | c:double +3.0 | 4.0 | 5.0 +// end::hypot-result[] +; + +hypotWithFrom +required_capability: fn_hypot +FROM ul_logs +| WHERE id > 95 +| EVAL bytes_hypot = HYPOT(bytes_in, bytes_out) +| SORT id ASC +| LIMIT 5 +| KEEP id, bytes_in, bytes_out, bytes_hypot +; + +id:integer | bytes_in:unsigned_long | bytes_out:unsigned_long | bytes_hypot:double +96 | 9932469097722733505 | 14925592145374204307 | 1.792839209932874E19 +97 | 11620953158540412267 | 3809712277266935082 | 1.2229491401875583E19 +98 | 3448205404634246112 | 5409549730889481641 | 6.415087591258227E18 +99 | 1957665857956635540 | 352442273299370793 | 1.9891382977102218E18 +100 | 16462768484251021236 | 15616395223975497926 | 2.2691287886707827E19 +; + +hypotBothNull +required_capability: fn_hypot +FROM ul_logs +| WHERE bytes_in IS NULL and bytes_out IS NULL +| LIMIT 1 +| EVAL bytes_hypot = HYPOT(bytes_in, bytes_out) +| KEEP bytes_in, bytes_out, bytes_hypot +; + +bytes_in:unsigned_long | bytes_out:unsigned_long | bytes_hypot:double +null | null | null +; + +hypotOneNull +required_capability: fn_hypot +FROM ul_logs +| WHERE id == 41 +| EVAL confused_hypot = HYPOT(id, bytes_in) +| KEEP id, bytes_in, confused_hypot +; + +id:integer | bytes_in:unsigned_long | confused_hypot:double +41 | null | null +; + least // tag::least[] ROW a = 10, b = 20 diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/HypotEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/HypotEvaluator.java new file mode 100644 index 0000000000000..f5684bcb4be18 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/HypotEvaluator.java @@ -0,0 +1,132 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.expression.function.Warnings; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Hypot}. + * This class is generated. Do not edit it. + */ +public final class HypotEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator n1; + + private final EvalOperator.ExpressionEvaluator n2; + + private final DriverContext driverContext; + + public HypotEvaluator(Source source, EvalOperator.ExpressionEvaluator n1, + EvalOperator.ExpressionEvaluator n2, DriverContext driverContext) { + this.n1 = n1; + this.n2 = n2; + this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); + } + + @Override + public Block eval(Page page) { + try (DoubleBlock n1Block = (DoubleBlock) n1.eval(page)) { + try (DoubleBlock n2Block = (DoubleBlock) n2.eval(page)) { + DoubleVector n1Vector = n1Block.asVector(); + if (n1Vector == null) { + return eval(page.getPositionCount(), n1Block, n2Block); + } + DoubleVector n2Vector = n2Block.asVector(); + if (n2Vector == null) { + return eval(page.getPositionCount(), n1Block, n2Block); + } + return eval(page.getPositionCount(), n1Vector, n2Vector).asBlock(); + } + } + } + + public DoubleBlock eval(int positionCount, DoubleBlock n1Block, DoubleBlock n2Block) { + try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (n1Block.isNull(p)) { + result.appendNull(); + continue position; + } + if (n1Block.getValueCount(p) != 1) { + if (n1Block.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (n2Block.isNull(p)) { + result.appendNull(); + continue position; + } + if (n2Block.getValueCount(p) != 1) { + if (n2Block.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + result.appendDouble(Hypot.process(n1Block.getDouble(n1Block.getFirstValueIndex(p)), n2Block.getDouble(n2Block.getFirstValueIndex(p)))); + } + return result.build(); + } + } + + public DoubleVector eval(int positionCount, DoubleVector n1Vector, DoubleVector n2Vector) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + result.appendDouble(p, Hypot.process(n1Vector.getDouble(p), n2Vector.getDouble(p))); + } + return result.build(); + } + } + + @Override + public String toString() { + return "HypotEvaluator[" + "n1=" + n1 + ", n2=" + n2 + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(n1, n2); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory n1; + + private final EvalOperator.ExpressionEvaluator.Factory n2; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory n1, + EvalOperator.ExpressionEvaluator.Factory n2) { + this.source = source; + this.n1 = n1; + this.n2 = n2; + } + + @Override + public HypotEvaluator get(DriverContext context) { + return new HypotEvaluator(source, n1.get(context), n2.get(context), context); + } + + @Override + public String toString() { + return "HypotEvaluator[" + "n1=" + n1 + ", n2=" + n2 + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index 9aa4d874c53e2..18ee6b9417e5c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -37,6 +37,11 @@ public enum Cap { */ FN_CBRT, + /** + * Support for function {@code HYPOT}. + */ + FN_HYPOT, + /** * Support for {@code MV_APPEND} function. #107001 */ diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index 8e238f9ed760c..3b1225555b297 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -78,6 +78,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.math.E; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Exp; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Floor; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Hypot; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Log; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Log10; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Pi; @@ -285,6 +286,7 @@ private FunctionDefinition[][] functions() { def(Exp.class, Exp::new, "exp"), def(Floor.class, Floor::new, "floor"), def(Greatest.class, Greatest::new, "greatest"), + def(Hypot.class, Hypot::new, "hypot"), def(Log.class, Log::new, "log"), def(Log10.class, Log10::new, "log10"), def(Least.class, Least::new, "least"), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/EsqlScalarFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/EsqlScalarFunction.java index afe9bf6e45eda..e4e1fbb6e5aac 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/EsqlScalarFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/EsqlScalarFunction.java @@ -30,6 +30,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.ip.IpPrefix; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Atan2; import org.elasticsearch.xpack.esql.expression.function.scalar.math.E; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Hypot; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Log; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Pi; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Pow; @@ -82,6 +83,7 @@ public static List getNamedWriteables() { entries.add(E.ENTRY); entries.add(EndsWith.ENTRY); entries.add(Greatest.ENTRY); + entries.add(Hypot.ENTRY); entries.add(In.ENTRY); entries.add(InsensitiveEquals.ENTRY); entries.add(DateExtract.ENTRY); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Hypot.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Hypot.java new file mode 100644 index 0000000000000..1a644c929f3c3 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Hypot.java @@ -0,0 +1,130 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.compute.ann.Evaluator; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.Expressions; +import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; +import org.elasticsearch.xpack.esql.core.tree.NodeInfo; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.Example; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; +import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isNumeric; + +/** + * Returns the hypotenuse of the numbers given as parameters. + */ +public class Hypot extends EsqlScalarFunction { + public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Hypot", Hypot::new); + + private final Expression n1; + private final Expression n2; + + @FunctionInfo(returnType = "double", description = """ + Returns the hypotenuse of two numbers. The input can be any numeric values, the return value is always a double. + Hypotenuses of infinities are null.""", examples = @Example(file = "math", tag = "hypot")) + public Hypot( + Source source, + @Param( + name = "number1", + type = { "double", "integer", "long", "unsigned_long" }, + description = "Numeric expression. If `null`, the function returns `null`." + ) Expression n1, + @Param( + name = "number2", + type = { "double", "integer", "long", "unsigned_long" }, + description = "Numeric expression. If `null`, the function returns `null`." + ) Expression n2 + ) { + super(source, List.of(n1, n2)); + this.n1 = n1; + this.n2 = n2; + } + + private Hypot(StreamInput in) throws IOException { + this(Source.readFrom((PlanStreamInput) in), in.readNamedWriteable(Expression.class), in.readNamedWriteable(Expression.class)); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + source().writeTo(out); + out.writeNamedWriteable(n1); + out.writeNamedWriteable(n2); + } + + @Override + public String getWriteableName() { + return ENTRY.name; + } + + @Override + public Expression replaceChildren(List newChildren) { + return new Hypot(source(), newChildren.get(0), newChildren.get(1)); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, Hypot::new, n1, n2); + } + + @Evaluator + static double process(double n1, double n2) { + return Math.hypot(n1, n2); + } + + @Override + public DataType dataType() { + return DataType.DOUBLE; + } + + @Override + protected TypeResolution resolveType() { + if (childrenResolved() == false) { + return new TypeResolution("Unresolved children"); + } + + TypeResolution resolution = isNumeric(n1, sourceText(), TypeResolutions.ParamOrdinal.FIRST); + if (resolution.unresolved()) { + return resolution; + } + return isNumeric(n2, sourceText(), TypeResolutions.ParamOrdinal.SECOND); + } + + @Override + public boolean foldable() { + return Expressions.foldable(children()); + } + + @Override + public EvalOperator.ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { + var n1Eval = Cast.cast(source(), n1.dataType(), DataType.DOUBLE, toEvaluator.apply(n1)); + var n2Eval = Cast.cast(source(), n2.dataType(), DataType.DOUBLE, toEvaluator.apply(n2)); + return new HypotEvaluator.Factory(source(), n1Eval, n2Eval); + } + + public Expression n1() { + return n1; + } + + public Expression n2() { + return n2; + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/HypotSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/HypotSerializationTests.java new file mode 100644 index 0000000000000..5c2e84fcba8e0 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/HypotSerializationTests.java @@ -0,0 +1,39 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; +import org.elasticsearch.xpack.esql.expression.AbstractUnaryScalarSerializationTests; + +import java.io.IOException; + +public class HypotSerializationTests extends AbstractExpressionSerializationTests { + + @Override + protected Hypot createTestInstance() { + Source source = randomSource(); + Expression n1 = randomChild(); + Expression n2 = randomChild(); + return new Hypot(source, n1, n2); + } + + @Override + protected Hypot mutateInstance(Hypot instance) throws IOException { + Source source = instance.source(); + Expression n1 = instance.n1(); + Expression n2 = instance.n2(); + if (randomBoolean()) { + n1 = randomValueOtherThan(n1, AbstractUnaryScalarSerializationTests::randomChild); + } else { + n2 = randomValueOtherThan(n2, AbstractUnaryScalarSerializationTests::randomChild); + } + return new Hypot(source, n1, n2); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/HypotTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/HypotTests.java new file mode 100644 index 0000000000000..0161abc2b9560 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/HypotTests.java @@ -0,0 +1,46 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; + +import java.util.List; +import java.util.function.Supplier; + +public class HypotTests extends AbstractScalarFunctionTestCase { + public HypotTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + List suppliers = TestCaseSupplier.forBinaryCastingToDouble( + "HypotEvaluator", + "n1", + "n2", + Math::hypot, + Double.NEGATIVE_INFINITY, + Double.POSITIVE_INFINITY, + Double.NEGATIVE_INFINITY, + Double.POSITIVE_INFINITY, + List.of() + ); + return parameterSuppliersFromTypedDataWithDefaultChecks(true, suppliers, (v, p) -> "numeric"); + } + + @Override + protected Expression build(Source source, List args) { + return new Hypot(source, args.get(0), args.get(1)); + } +} From f1f5ee06a351f7bc38aadcd3c03784fb2e423c51 Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Fri, 11 Oct 2024 21:58:15 +0200 Subject: [PATCH 020/449] Replace cloud-ess docker image with wolfi-ess (#114413) * Replace cloud-ess docker image with wolfi-ess We just replaced the existing implementation of cloud-ess with what was wolfi-ess which is a wolfi based ess image. The cloud image itself will be removed in a future commit it was not used anywhere * Switch to test cloud docker image instead of default docker in packaging pr tests. This adds way more coverage than the default docker image which is also barely touched --- .../pull-request/packaging-tests-unix.yml | 2 +- .../gradle/internal/DockerBase.java | 9 +--- .../InternalDistributionDownloadPlugin.java | 3 -- ...WolfiEssElasticsearchDistributionType.java | 27 ------------ ...nternalElasticsearchDistributionTypes.java | 4 +- .../internal/test/DistroTestPlugin.java | 2 - distribution/docker/README.md | 9 +--- distribution/docker/build.gradle | 39 ++++++++--------- distribution/docker/src/docker/Dockerfile.ess | 43 ++++++++----------- .../packaging/test/DockerTests.java | 35 ++++++--------- .../test/KeystoreManagementTests.java | 2 +- .../packaging/test/PackagingTestCase.java | 7 +-- .../packaging/util/Distribution.java | 7 +-- .../packaging/util/docker/Docker.java | 6 +-- .../packaging/util/docker/DockerRun.java | 1 - settings.gradle | 2 - 16 files changed, 62 insertions(+), 136 deletions(-) delete mode 100644 build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/distribution/DockerWolfiEssElasticsearchDistributionType.java diff --git a/.buildkite/pipelines/pull-request/packaging-tests-unix.yml b/.buildkite/pipelines/pull-request/packaging-tests-unix.yml index e94baac8d9448..04ccc41891b3b 100644 --- a/.buildkite/pipelines/pull-request/packaging-tests-unix.yml +++ b/.buildkite/pipelines/pull-request/packaging-tests-unix.yml @@ -5,7 +5,7 @@ steps: steps: - label: "{{matrix.image}} / docker / packaging-tests-unix" key: "packaging-tests-unix-docker" - command: ./.ci/scripts/packaging-test.sh destructiveDistroTest.docker + command: ./.ci/scripts/packaging-test.sh destructiveDistroTest.docker-cloud-ess timeout_in_minutes: 300 matrix: setup: diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java index 95f279bfa5162..9d78d3229edc1 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java @@ -24,10 +24,6 @@ public enum DockerBase { // Base image with extras for Cloud CLOUD("ubuntu:20.04", "-cloud", "apt-get"), - // Based on CLOUD above, with more extras. We don't set a base image because - // we programmatically extend from the Cloud image. - CLOUD_ESS(null, "-cloud-ess", "apt-get"), - // Chainguard based wolfi image with latest jdk // This is usually updated via renovatebot // spotless:off @@ -36,10 +32,9 @@ public enum DockerBase { "apk" ), // spotless:on - // Based on WOLFI above, with more extras. We don't set a base image because - // we programmatically extend from the Wolfi image. - WOLFI_ESS(null, "-wolfi-ess", "apk"); + // we programmatically extend from the wolfi image. + CLOUD_ESS(null, "-cloud-ess", "apk"); private final String image; private final String suffix; diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionDownloadPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionDownloadPlugin.java index 6b93ea10283ae..19309fe2da8a3 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionDownloadPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionDownloadPlugin.java @@ -181,9 +181,6 @@ private static String distributionProjectName(ElasticsearchDistribution distribu if (distribution.getType() == InternalElasticsearchDistributionTypes.DOCKER_WOLFI) { return projectName + "wolfi-docker" + archString + "-export"; } - if (distribution.getType() == InternalElasticsearchDistributionTypes.DOCKER_WOLFI_ESS) { - return projectName + "wolfi-ess-docker" + archString + "-export"; - } return projectName + distribution.getType().getName(); } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/distribution/DockerWolfiEssElasticsearchDistributionType.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/distribution/DockerWolfiEssElasticsearchDistributionType.java deleted file mode 100644 index 550c43d43a536..0000000000000 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/distribution/DockerWolfiEssElasticsearchDistributionType.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.gradle.internal.distribution; - -import org.elasticsearch.gradle.ElasticsearchDistributionType; - -public class DockerWolfiEssElasticsearchDistributionType implements ElasticsearchDistributionType { - - DockerWolfiEssElasticsearchDistributionType() {} - - @Override - public String getName() { - return "dockerWolfiEss"; - } - - @Override - public boolean isDocker() { - return true; - } -} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/distribution/InternalElasticsearchDistributionTypes.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/distribution/InternalElasticsearchDistributionTypes.java index 077a47041861f..ba0e76b3f5b99 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/distribution/InternalElasticsearchDistributionTypes.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/distribution/InternalElasticsearchDistributionTypes.java @@ -22,7 +22,6 @@ public class InternalElasticsearchDistributionTypes { public static ElasticsearchDistributionType DOCKER_CLOUD = new DockerCloudElasticsearchDistributionType(); public static ElasticsearchDistributionType DOCKER_CLOUD_ESS = new DockerCloudEssElasticsearchDistributionType(); public static ElasticsearchDistributionType DOCKER_WOLFI = new DockerWolfiElasticsearchDistributionType(); - public static ElasticsearchDistributionType DOCKER_WOLFI_ESS = new DockerWolfiEssElasticsearchDistributionType(); public static List ALL_INTERNAL = List.of( DEB, @@ -32,7 +31,6 @@ public class InternalElasticsearchDistributionTypes { DOCKER_IRONBANK, DOCKER_CLOUD, DOCKER_CLOUD_ESS, - DOCKER_WOLFI, - DOCKER_WOLFI_ESS + DOCKER_WOLFI ); } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/DistroTestPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/DistroTestPlugin.java index cc852e615726a..77ab9557eac33 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/DistroTestPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/DistroTestPlugin.java @@ -54,7 +54,6 @@ import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.DOCKER_IRONBANK; import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.DOCKER_UBI; import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.DOCKER_WOLFI; -import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.DOCKER_WOLFI_ESS; import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.RPM; /** @@ -153,7 +152,6 @@ private static Map> lifecycleTask lifecyleTasks.put(DOCKER_CLOUD, project.getTasks().register(taskPrefix + ".docker-cloud")); lifecyleTasks.put(DOCKER_CLOUD_ESS, project.getTasks().register(taskPrefix + ".docker-cloud-ess")); lifecyleTasks.put(DOCKER_WOLFI, project.getTasks().register(taskPrefix + ".docker-wolfi")); - lifecyleTasks.put(DOCKER_WOLFI_ESS, project.getTasks().register(taskPrefix + ".docker-wolfi-ess")); lifecyleTasks.put(ARCHIVE, project.getTasks().register(taskPrefix + ".archives")); lifecyleTasks.put(DEB, project.getTasks().register(taskPrefix + ".packages")); lifecyleTasks.put(RPM, lifecyleTasks.get(DEB)); diff --git a/distribution/docker/README.md b/distribution/docker/README.md index 28e6ff314d91a..49facab461edc 100644 --- a/distribution/docker/README.md +++ b/distribution/docker/README.md @@ -7,7 +7,7 @@ the [DockerBase] enum. * UBI - the same as the default image, but based upon [RedHat's UBI images][ubi], specifically their minimal flavour. * Wolfi - the same as the default image, but based upon [Wolfi](https://github.com/wolfi-dev) - * Wolfi ESS - this directly extends the Wolfi image, and adds all ES plugins + * Cloud ESS - this directly extends the Wolfi image, and adds all ES plugins that the ES build generates in an archive directory. It also sets an environment variable that points at this directory. This allows plugins to be installed from the archive instead of the internet, speeding up @@ -23,7 +23,6 @@ the [DockerBase] enum. software (FOSS) and Commercial off-the-shelf (COTS). In practice, this is another UBI build, this time on the regular UBI image, with extra hardening. See below for more details. - * Cloud - this is mostly the same as the default image, with some notable differences: * `filebeat` and `metricbeat` are included * `wget` is included @@ -31,12 +30,6 @@ the [DockerBase] enum. `/app/elasticsearch.sh`. In normal use this file would be bind-mounted in, but the image ships a stub version of this file so that the image can still be tested. - * Cloud ESS - this directly extends the Cloud image, and adds all ES plugins - that the ES build generates in an archive directory. It also sets an - environment variable that points at this directory. This allows plugins to - be installed from the archive instead of the internet, speeding up - deployment times. - The long-term goal is for both Cloud images to be retired in favour of the default image. diff --git a/distribution/docker/build.gradle b/distribution/docker/build.gradle index 99c482d91085a..e40ac68bbacf4 100644 --- a/distribution/docker/build.gradle +++ b/distribution/docker/build.gradle @@ -381,7 +381,7 @@ private static List generateTags(DockerBase base, Architecture architect String image = "elasticsearch${base.suffix}" String namespace = 'elasticsearch' - if (base == DockerBase.CLOUD || base == DockerBase.CLOUD_ESS || base == DockerBase.WOLFI_ESS) { + if (base == DockerBase.CLOUD || base == DockerBase.CLOUD_ESS) { namespace += '-ci' } @@ -446,7 +446,8 @@ void addBuildDockerImageTask(Architecture architecture, DockerBase base) { } } -void addBuildEssDockerImageTask(Architecture architecture, DockerBase dockerBase) { +void addBuildEssDockerImageTask(Architecture architecture) { + DockerBase dockerBase = DockerBase.CLOUD_ESS String arch = architecture == Architecture.AARCH64 ? '-aarch64' : '' String contextDir = "${project.buildDir}/docker-context/elasticsearch${dockerBase.suffix}-${VersionProperties.elasticsearch}-docker-build-context${arch}" @@ -460,22 +461,20 @@ void addBuildEssDockerImageTask(Architecture architecture, DockerBase dockerBase from configurations.allPlugins } - if (dockerBase == DockerBase.WOLFI_ESS) { - // If we're performing a release build, but `build.id` hasn't been set, we can - // infer that we're not at the Docker building stage of the build, and therefore - // we should skip the beats part of the build. - String buildId = providers.systemProperty('build.id').getOrNull() - boolean includeBeats = VersionProperties.isElasticsearchSnapshot() == true || buildId != null || useDra + // If we're performing a release build, but `build.id` hasn't been set, we can + // infer that we're not at the Docker building stage of the build, and therefore + // we should skip the beats part of the build. + String buildId = providers.systemProperty('build.id').getOrNull() + boolean includeBeats = VersionProperties.isElasticsearchSnapshot() == true || buildId != null || useDra - if (includeBeats) { - from configurations.getByName("filebeat_${architecture.classifier}") - from configurations.getByName("metricbeat_${architecture.classifier}") - } - // For some reason, the artifact name can differ depending on what repository we used. - rename ~/((?:file|metric)beat)-.*\.tar\.gz$/, "\$1-${VersionProperties.elasticsearch}.tar.gz" + if (includeBeats) { + from configurations.getByName("filebeat_${architecture.classifier}") + from configurations.getByName("metricbeat_${architecture.classifier}") } + // For some reason, the artifact name can differ depending on what repository we used. + rename ~/((?:file|metric)beat)-.*\.tar\.gz$/, "\$1-${VersionProperties.elasticsearch}.tar.gz" - String baseSuffix = dockerBase == DockerBase.CLOUD_ESS ? DockerBase.CLOUD.suffix : DockerBase.WOLFI.suffix + String baseSuffix = DockerBase.WOLFI.suffix from(projectDir.resolve("src/docker/Dockerfile.ess")) { expand( [ @@ -493,7 +492,7 @@ void addBuildEssDockerImageTask(Architecture architecture, DockerBase dockerBase final TaskProvider buildDockerImageTask = tasks.register(taskName("build", architecture, dockerBase, "DockerImage"), DockerBuildTask) { - DockerBase base = dockerBase == DockerBase.CLOUD_ESS ? DockerBase.CLOUD : DockerBase.WOLFI + DockerBase base = DockerBase.WOLFI TaskProvider buildBaseTask = tasks.named(taskName("build", architecture, base, "DockerImage")) inputs.files(buildBaseTask) @@ -519,7 +518,7 @@ void addBuildEssDockerImageTask(Architecture architecture, DockerBase dockerBase for (final Architecture architecture : Architecture.values()) { for (final DockerBase base : DockerBase.values()) { - if (base == DockerBase.CLOUD_ESS || base == DockerBase.WOLFI_ESS) { + if (base == DockerBase.CLOUD_ESS) { continue } addBuildDockerContextTask(architecture, base) @@ -527,8 +526,7 @@ for (final Architecture architecture : Architecture.values()) { addBuildDockerImageTask(architecture, base) } - addBuildEssDockerImageTask(architecture, DockerBase.CLOUD_ESS) - addBuildEssDockerImageTask(architecture, DockerBase.WOLFI_ESS) + addBuildEssDockerImageTask(architecture) } def exportDockerImages = tasks.register("exportDockerImages") @@ -564,8 +562,7 @@ subprojects { Project subProject -> (base == DockerBase.CLOUD ? 'cloud.tar' : (base == DockerBase.CLOUD_ESS ? 'cloud-ess.tar' : (base == DockerBase.WOLFI ? 'wolfi.tar' : - (base == DockerBase.WOLFI_ESS ? 'wolfi-ess.tar' : - 'docker.tar'))))) + 'docker.tar')))) final String artifactName = "elasticsearch${arch}${base.suffix}_test" final String exportTaskName = taskName("export", architecture, base, 'DockerImage') diff --git a/distribution/docker/src/docker/Dockerfile.ess b/distribution/docker/src/docker/Dockerfile.ess index 3ca5e8f2b42a3..197af28b93455 100644 --- a/distribution/docker/src/docker/Dockerfile.ess +++ b/distribution/docker/src/docker/Dockerfile.ess @@ -2,26 +2,24 @@ FROM ${base_image} AS builder USER root -<% if (docker_base == "wolfi_ess") { %> - # Add plugins infrastructure - RUN mkdir -p /opt/plugins/archive - RUN chmod -R 0555 /opt/plugins - - COPY filebeat-${version}.tar.gz metricbeat-${version}.tar.gz /tmp/ - RUN set -eux ; \\ - for beat in filebeat metricbeat ; do \\ - if [ ! -s /tmp/\$beat-${version}.tar.gz ]; then \\ - echo "/tmp/\$beat-${version}.tar.gz is empty - cannot uncompress" 2>&1 ; \\ - exit 1 ; \\ - fi ; \\ - if ! tar tf /tmp/\$beat-${version}.tar.gz >/dev/null; then \\ - echo "/tmp/\$beat-${version}.tar.gz is corrupt - cannot uncompress" 2>&1 ; \\ - exit 1 ; \\ - fi ; \\ - mkdir -p /opt/\$beat ; \\ - tar xf /tmp/\$beat-${version}.tar.gz -C /opt/\$beat --strip-components=1 ; \\ - done -<% } %> +# Add plugins infrastructure +RUN mkdir -p /opt/plugins/archive +RUN chmod -R 0555 /opt/plugins + +COPY filebeat-${version}.tar.gz metricbeat-${version}.tar.gz /tmp/ +RUN set -eux ; \\ + for beat in filebeat metricbeat ; do \\ + if [ ! -s /tmp/\$beat-${version}.tar.gz ]; then \\ + echo "/tmp/\$beat-${version}.tar.gz is empty - cannot uncompress" 2>&1 ; \\ + exit 1 ; \\ + fi ; \\ + if ! tar tf /tmp/\$beat-${version}.tar.gz >/dev/null; then \\ + echo "/tmp/\$beat-${version}.tar.gz is corrupt - cannot uncompress" 2>&1 ; \\ + exit 1 ; \\ + fi ; \\ + mkdir -p /opt/\$beat ; \\ + tar xf /tmp/\$beat-${version}.tar.gz -C /opt/\$beat --strip-components=1 ; \\ + done COPY plugins/*.zip /opt/plugins/archive/ @@ -29,7 +27,6 @@ RUN chown 1000:1000 /opt/plugins/archive/* RUN chmod 0444 /opt/plugins/archive/* FROM ${base_image} -<% if (docker_base == "wolfi_ess") { %> USER root RUN <%= retry.loop("apk", "export DEBIAN_FRONTEND=noninteractive && apk update && apk update && apk add --no-cache wget") %> @@ -44,8 +41,4 @@ RUN mkdir /app && \\ COPY --from=builder --chown=0:0 /opt /opt USER 1000:0 -<% } else { %> -COPY --from=builder /opt/plugins /opt/plugins -<% } %> - ENV ES_PLUGIN_ARCHIVE_DIR /opt/plugins/archive diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java index 2a3e0c16fdc2f..4ca97bff42333 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java @@ -99,6 +99,7 @@ *
  • The default image with a custom, small base image
  • *
  • A UBI-based image
  • *
  • Another UBI image for Iron Bank
  • + *
  • A WOLFI-based image
  • *
  • Images for Cloud
  • * */ @@ -170,9 +171,7 @@ public void test012SecurityCanBeDisabled() throws Exception { public void test020PluginsListWithNoPlugins() { assumeTrue( "Only applies to non-Cloud images", - distribution.packaging != Packaging.DOCKER_CLOUD - && distribution().packaging != Packaging.DOCKER_CLOUD_ESS - && distribution().packaging != Packaging.DOCKER_WOLFI_ESS + distribution.packaging != Packaging.DOCKER_CLOUD && distribution().packaging != Packaging.DOCKER_CLOUD_ESS ); final Installation.Executables bin = installation.executables(); @@ -203,15 +202,14 @@ public void test021InstallPlugin() { * Checks that ESS images can install plugins from the local archive. */ public void test022InstallPluginsFromLocalArchive() { - assumeTrue( - "Only ESS images have a local archive", - distribution().packaging == Packaging.DOCKER_CLOUD_ESS || distribution().packaging == Packaging.DOCKER_WOLFI_ESS - ); + assumeTrue("Only ESS images have a local archive", distribution().packaging == Packaging.DOCKER_CLOUD_ESS); final String plugin = "analysis-icu"; final Installation.Executables bin = installation.executables(); + listPluginArchive().forEach(System.out::println); assertThat("Expected " + plugin + " to not be installed", listPlugins(), not(hasItems(plugin))); + assertThat("Expected " + plugin + " available in archive", listPluginArchive(), hasSize(16)); // Stuff the proxy settings with garbage, so any attempt to go out to the internet would fail sh.getEnv() @@ -259,10 +257,7 @@ public void test023InstallPluginUsingConfigFile() { * Checks that ESS images can manage plugins from the local archive by deploying a plugins config file. */ public void test024InstallPluginFromArchiveUsingConfigFile() { - assumeTrue( - "Only ESS image has a plugin archive", - distribution().packaging == Packaging.DOCKER_CLOUD_ESS || distribution().packaging == Packaging.DOCKER_WOLFI_ESS - ); + assumeTrue("Only ESS image has a plugin archive", distribution().packaging == Packaging.DOCKER_CLOUD_ESS); final String filename = "elasticsearch-plugins.yml"; append(tempDir.resolve(filename), """ @@ -394,7 +389,7 @@ public void test040JavaUsesTheOsProvidedKeystore() { if (distribution.packaging == Packaging.DOCKER_UBI || distribution.packaging == Packaging.DOCKER_IRON_BANK) { // In these images, the `cacerts` file ought to be a symlink here assertThat(path, equalTo("/etc/pki/ca-trust/extracted/java/cacerts")); - } else if (distribution.packaging == Packaging.DOCKER_WOLFI || distribution.packaging == Packaging.DOCKER_WOLFI_ESS) { + } else if (distribution.packaging == Packaging.DOCKER_WOLFI || distribution.packaging == Packaging.DOCKER_CLOUD_ESS) { // In these images, the `cacerts` file ought to be a symlink here assertThat(path, equalTo("/etc/ssl/certs/java/cacerts")); } else { @@ -1121,10 +1116,8 @@ public void test170DefaultShellIsBash() { */ public void test171AdditionalCliOptionsAreForwarded() throws Exception { assumeTrue( - "Does not apply to Cloud and wolfi ess images, because they don't use the default entrypoint", - distribution.packaging != Packaging.DOCKER_CLOUD - && distribution().packaging != Packaging.DOCKER_CLOUD_ESS - && distribution().packaging != Packaging.DOCKER_WOLFI_ESS + "Does not apply to Cloud and Cloud ESS images, because they don't use the default entrypoint", + distribution.packaging != Packaging.DOCKER_CLOUD && distribution().packaging != Packaging.DOCKER_CLOUD_ESS ); runContainer(distribution(), builder().runArgs("bin/elasticsearch", "-Ecluster.name=kimchy").envVar("ELASTIC_PASSWORD", PASSWORD)); @@ -1211,11 +1204,7 @@ public void test310IronBankImageHasNoAdditionalLabels() throws Exception { * Check that the Cloud image contains the required Beats */ public void test400CloudImageBundlesBeats() { - assumeTrue( - distribution.packaging == Packaging.DOCKER_CLOUD - || distribution.packaging == Packaging.DOCKER_CLOUD_ESS - || distribution.packaging == Packaging.DOCKER_WOLFI_ESS - ); + assumeTrue(distribution.packaging == Packaging.DOCKER_CLOUD || distribution.packaging == Packaging.DOCKER_CLOUD_ESS); final List contents = listContents("/opt"); assertThat("Expected beats in /opt", contents, hasItems("filebeat", "metricbeat")); @@ -1233,6 +1222,10 @@ private List listPlugins() { return sh.run(bin.pluginTool + " list").stdout().lines().collect(Collectors.toList()); } + private List listPluginArchive() { + return sh.run("ls -lh /opt/plugins/archive").stdout().lines().collect(Collectors.toList()); + } + /** * Check that readiness listener works */ diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/KeystoreManagementTests.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/KeystoreManagementTests.java index 2aff1f258ed65..a988a446f561f 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/KeystoreManagementTests.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/KeystoreManagementTests.java @@ -436,7 +436,7 @@ private void verifyKeystorePermissions() { switch (distribution.packaging) { case TAR, ZIP -> assertThat(keystore, file(File, ARCHIVE_OWNER, ARCHIVE_OWNER, p660)); case DEB, RPM -> assertThat(keystore, file(File, "root", "elasticsearch", p660)); - case DOCKER, DOCKER_UBI, DOCKER_IRON_BANK, DOCKER_CLOUD, DOCKER_CLOUD_ESS, DOCKER_WOLFI, DOCKER_WOLFI_ESS -> assertThat( + case DOCKER, DOCKER_UBI, DOCKER_IRON_BANK, DOCKER_CLOUD, DOCKER_CLOUD_ESS, DOCKER_WOLFI -> assertThat( keystore, DockerFileMatcher.file(p660) ); diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/PackagingTestCase.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/PackagingTestCase.java index 487a00bdecac9..644990105f60f 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/PackagingTestCase.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/PackagingTestCase.java @@ -245,7 +245,7 @@ protected static void install() throws Exception { installation = Packages.installPackage(sh, distribution); Packages.verifyPackageInstallation(installation, distribution, sh); } - case DOCKER, DOCKER_UBI, DOCKER_IRON_BANK, DOCKER_CLOUD, DOCKER_CLOUD_ESS, DOCKER_WOLFI, DOCKER_WOLFI_ESS -> { + case DOCKER, DOCKER_UBI, DOCKER_IRON_BANK, DOCKER_CLOUD, DOCKER_CLOUD_ESS, DOCKER_WOLFI -> { installation = Docker.runContainer(distribution); Docker.verifyContainerInstallation(installation); } @@ -338,7 +338,6 @@ public Shell.Result runElasticsearchStartCommand(String password, boolean daemon case DOCKER_CLOUD: case DOCKER_CLOUD_ESS: case DOCKER_WOLFI: - case DOCKER_WOLFI_ESS: // nothing, "installing" docker image is running it return Shell.NO_OP; default: @@ -362,7 +361,6 @@ public void stopElasticsearch() throws Exception { case DOCKER_CLOUD: case DOCKER_CLOUD_ESS: case DOCKER_WOLFI: - case DOCKER_WOLFI_ESS: // nothing, "installing" docker image is running it break; default: @@ -375,8 +373,7 @@ public void awaitElasticsearchStartup(Shell.Result result) throws Exception { switch (distribution.packaging) { case TAR, ZIP -> Archives.assertElasticsearchStarted(installation); case DEB, RPM -> Packages.assertElasticsearchStarted(sh, installation); - case DOCKER, DOCKER_UBI, DOCKER_IRON_BANK, DOCKER_CLOUD, DOCKER_CLOUD_ESS, DOCKER_WOLFI, DOCKER_WOLFI_ESS -> Docker - .waitForElasticsearchToStart(); + case DOCKER, DOCKER_UBI, DOCKER_IRON_BANK, DOCKER_CLOUD, DOCKER_CLOUD_ESS, DOCKER_WOLFI -> Docker.waitForElasticsearchToStart(); default -> throw new IllegalStateException("Unknown Elasticsearch packaging type."); } } diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Distribution.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Distribution.java index d63d956dc5199..05cef4a0818ba 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Distribution.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Distribution.java @@ -39,8 +39,6 @@ public Distribution(Path path) { this.packaging = Packaging.DOCKER_CLOUD_ESS; } else if (filename.endsWith(".wolfi.tar")) { this.packaging = Packaging.DOCKER_WOLFI; - } else if (filename.endsWith(".wolfi-ess.tar")) { - this.packaging = Packaging.DOCKER_WOLFI_ESS; } else { int lastDot = filename.lastIndexOf('.'); this.packaging = Packaging.valueOf(filename.substring(lastDot + 1).toUpperCase(Locale.ROOT)); @@ -65,7 +63,7 @@ public boolean isPackage() { */ public boolean isDocker() { return switch (packaging) { - case DOCKER, DOCKER_UBI, DOCKER_IRON_BANK, DOCKER_CLOUD, DOCKER_CLOUD_ESS, DOCKER_WOLFI, DOCKER_WOLFI_ESS -> true; + case DOCKER, DOCKER_UBI, DOCKER_IRON_BANK, DOCKER_CLOUD, DOCKER_CLOUD_ESS, DOCKER_WOLFI -> true; default -> false; }; } @@ -81,8 +79,7 @@ public enum Packaging { DOCKER_IRON_BANK(".ironbank.tar", Platforms.isDocker()), DOCKER_CLOUD(".cloud.tar", Platforms.isDocker()), DOCKER_CLOUD_ESS(".cloud-ess.tar", Platforms.isDocker()), - DOCKER_WOLFI(".wolfi.tar", Platforms.isDocker()), - DOCKER_WOLFI_ESS(".wolfi-ess.tar", Platforms.isDocker()); + DOCKER_WOLFI(".wolfi.tar", Platforms.isDocker()); /** The extension of this distribution's file */ public final String extension; diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/Docker.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/Docker.java index 6f7827663d46c..c38eaa58f0552 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/Docker.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/Docker.java @@ -532,9 +532,7 @@ public static void verifyContainerInstallation(Installation es) { ) ); - if (es.distribution.packaging == Packaging.DOCKER_CLOUD - || es.distribution.packaging == Packaging.DOCKER_CLOUD_ESS - || es.distribution.packaging == Packaging.DOCKER_WOLFI_ESS) { + if (es.distribution.packaging == Packaging.DOCKER_CLOUD || es.distribution.packaging == Packaging.DOCKER_CLOUD_ESS) { verifyCloudContainerInstallation(es); } } @@ -543,7 +541,7 @@ private static void verifyCloudContainerInstallation(Installation es) { final String pluginArchive = "/opt/plugins/archive"; final List plugins = listContents(pluginArchive); - if (es.distribution.packaging == Packaging.DOCKER_CLOUD_ESS || es.distribution.packaging == Packaging.DOCKER_WOLFI_ESS) { + if (es.distribution.packaging == Packaging.DOCKER_CLOUD_ESS) { assertThat("ESS image should come with plugins in " + pluginArchive, plugins, not(empty())); final List repositoryPlugins = plugins.stream() diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/DockerRun.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/DockerRun.java index a1529de825804..e562e7591564e 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/DockerRun.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/DockerRun.java @@ -168,7 +168,6 @@ public static String getImageName(Distribution distribution) { case DOCKER_CLOUD -> "-cloud"; case DOCKER_CLOUD_ESS -> "-cloud-ess"; case DOCKER_WOLFI -> "-wolfi"; - case DOCKER_WOLFI_ESS -> "-wolfi-ess"; default -> throw new IllegalStateException("Unexpected distribution packaging type: " + distribution.packaging); }; diff --git a/settings.gradle b/settings.gradle index a47751fd499c0..be0844de1164a 100644 --- a/settings.gradle +++ b/settings.gradle @@ -75,8 +75,6 @@ List projects = [ 'distribution:docker:ubi-docker-export', 'distribution:docker:wolfi-docker-aarch64-export', 'distribution:docker:wolfi-docker-export', - 'distribution:docker:wolfi-ess-docker-aarch64-export', - 'distribution:docker:wolfi-ess-docker-export', 'distribution:packages:aarch64-deb', 'distribution:packages:deb', 'distribution:packages:aarch64-rpm', From 049c4825707247ac7f189558bbd02a3d401a8e54 Mon Sep 17 00:00:00 2001 From: Patrick Doyle <810052+prdoyle@users.noreply.github.com> Date: Fri, 11 Oct 2024 16:01:48 -0400 Subject: [PATCH 021/449] Initial InstrumenterTests (#114422) * Initial InstrumenterTests * Assert on instrumentation method arguments --- .../tools/entitlement-agent/impl/build.gradle | 7 + .../instrumentation/impl/ASMUtils.java | 31 ++++ .../impl/InstrumenterTests.java | 153 ++++++++++++++++++ ...icsearch.entitlement.api.EntitlementChecks | 10 ++ .../agent/EntitlementAgentTests.java | 4 + gradle/verification-metadata.xml | 5 + 6 files changed, 210 insertions(+) create mode 100644 distribution/tools/entitlement-agent/impl/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/ASMUtils.java create mode 100644 distribution/tools/entitlement-agent/impl/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterTests.java create mode 100644 distribution/tools/entitlement-agent/impl/src/test/resources/META-INF/services/org.elasticsearch.entitlement.api.EntitlementChecks diff --git a/distribution/tools/entitlement-agent/impl/build.gradle b/distribution/tools/entitlement-agent/impl/build.gradle index f73e21505d483..16f134bf0e693 100644 --- a/distribution/tools/entitlement-agent/impl/build.gradle +++ b/distribution/tools/entitlement-agent/impl/build.gradle @@ -12,6 +12,13 @@ apply plugin: 'elasticsearch.build' dependencies { compileOnly project(':distribution:tools:entitlement-agent') implementation 'org.ow2.asm:asm:9.7' + testImplementation project(":test:framework") + testImplementation project(":distribution:tools:entitlement-bridge") + testImplementation 'org.ow2.asm:asm-util:9.7' +} + +tasks.named('test').configure { + systemProperty "tests.security.manager", "false" } tasks.named('forbiddenApisMain').configure { diff --git a/distribution/tools/entitlement-agent/impl/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/ASMUtils.java b/distribution/tools/entitlement-agent/impl/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/ASMUtils.java new file mode 100644 index 0000000000000..d7aaa6d854e9c --- /dev/null +++ b/distribution/tools/entitlement-agent/impl/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/ASMUtils.java @@ -0,0 +1,31 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.instrumentation.impl; + +import org.objectweb.asm.ClassReader; +import org.objectweb.asm.util.Printer; +import org.objectweb.asm.util.Textifier; +import org.objectweb.asm.util.TraceClassVisitor; + +import java.io.PrintWriter; +import java.io.StringWriter; + +public class ASMUtils { + public static String bytecode2text(byte[] classBytes) { + ClassReader classReader = new ClassReader(classBytes); + StringWriter stringWriter = new StringWriter(); + try (PrintWriter printWriter = new PrintWriter(stringWriter)) { + Printer printer = new Textifier(); // For a textual representation + TraceClassVisitor traceClassVisitor = new TraceClassVisitor(null, printer, printWriter); + classReader.accept(traceClassVisitor, 0); + return stringWriter.toString(); + } + } +} diff --git a/distribution/tools/entitlement-agent/impl/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterTests.java b/distribution/tools/entitlement-agent/impl/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterTests.java new file mode 100644 index 0000000000000..e807ecee4f103 --- /dev/null +++ b/distribution/tools/entitlement-agent/impl/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterTests.java @@ -0,0 +1,153 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.instrumentation.impl; + +import org.elasticsearch.entitlement.api.EntitlementChecks; +import org.elasticsearch.entitlement.api.EntitlementProvider; +import org.elasticsearch.entitlement.instrumentation.InstrumentationService; +import org.elasticsearch.entitlement.instrumentation.MethodKey; +import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; +import org.elasticsearch.test.ESTestCase; +import org.junit.Before; + +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; +import java.util.Map; + +import static org.elasticsearch.entitlement.instrumentation.impl.ASMUtils.bytecode2text; + +/** + * This tests {@link InstrumenterImpl} in isolation, without a java agent. + * It causes the methods to be instrumented, and verifies that the instrumentation is called as expected. + * Problems with bytecode generation are easier to debug this way than in the context of an agent. + */ +@ESTestCase.WithoutSecurityManager +public class InstrumenterTests extends ESTestCase { + final InstrumentationService instrumentationService = new InstrumentationServiceImpl(); + + private static TestEntitlementManager getTestChecks() { + return (TestEntitlementManager) EntitlementProvider.checks(); + } + + @Before + public void initialize() { + getTestChecks().isActive = false; + } + + /** + * Contains all the virtual methods from {@link ClassToInstrument}, + * allowing this test to call them on the dynamically loaded instrumented class. + */ + public interface Testable {} + + /** + * This is a placeholder for real class library methods. + * Without the java agent, we can't instrument the real methods, so we instrument this instead. + *

    + * Methods of this class must have the same signature and the same static/virtual condition as the corresponding real method. + * They should assert that the arguments came through correctly. + * They must not throw {@link TestException}. + */ + public static class ClassToInstrument implements Testable { + public static void systemExit(int status) { + assertEquals(123, status); + } + } + + static final class TestException extends RuntimeException {} + + /** + * We're not testing the permission checking logic here. + * This is a trivial implementation of {@link EntitlementChecks} that just always throws, + * just to demonstrate that the injected bytecodes succeed in calling these methods. + */ + public static class TestEntitlementManager implements EntitlementChecks { + /** + * This allows us to test that the instrumentation is correct in both cases: + * if the check throws, and if it doesn't. + */ + volatile boolean isActive; + + @Override + public void checkSystemExit(Class callerClass, int status) { + assertSame(InstrumenterTests.class, callerClass); + assertEquals(123, status); + throwIfActive(); + } + + private void throwIfActive() { + if (isActive) { + throw new TestException(); + } + } + } + + public void test() throws Exception { + // This test doesn't replace ClassToInstrument in-place but instead loads a separate + // class ClassToInstrument_NEW that contains the instrumentation. Because of this, + // we need to configure the Transformer to use a MethodKey and instrumentationMethod + // with slightly different signatures (using the common interface Testable) which + // is not what would happen when it's run by the agent. + + MethodKey k1 = instrumentationService.methodKeyForTarget(ClassToInstrument.class.getMethod("systemExit", int.class)); + Method v1 = EntitlementChecks.class.getMethod("checkSystemExit", Class.class, int.class); + var instrumenter = new InstrumenterImpl("_NEW", Map.of(k1, v1)); + + byte[] newBytecode = instrumenter.instrumentClassFile(ClassToInstrument.class).bytecodes(); + + if (logger.isTraceEnabled()) { + logger.trace("Bytecode after instrumentation:\n{}", bytecode2text(newBytecode)); + } + + Class newClass = new TestLoader(Testable.class.getClassLoader()).defineClassFromBytes( + ClassToInstrument.class.getName() + "_NEW", + newBytecode + ); + + // Before checking is active, nothing should throw + callStaticSystemExit(newClass, 123); + + getTestChecks().isActive = true; + + // After checking is activated, everything should throw + assertThrows(TestException.class, () -> callStaticSystemExit(newClass, 123)); + } + + /** + * Calling a static method of a dynamically loaded class is significantly more cumbersome + * than calling a virtual method. + */ + private static void callStaticSystemExit(Class c, int status) throws NoSuchMethodException, IllegalAccessException { + try { + c.getMethod("systemExit", int.class).invoke(null, status); + } catch (InvocationTargetException e) { + Throwable cause = e.getCause(); + if (cause instanceof TestException n) { + // Sometimes we're expecting this one! + throw n; + } else { + throw new AssertionError(cause); + } + } + } + + static class TestLoader extends ClassLoader { + TestLoader(ClassLoader parent) { + super(parent); + } + + public Class defineClassFromBytes(String name, byte[] bytes) { + return defineClass(name, bytes, 0, bytes.length); + } + } + + private static final Logger logger = LogManager.getLogger(InstrumenterTests.class); +} diff --git a/distribution/tools/entitlement-agent/impl/src/test/resources/META-INF/services/org.elasticsearch.entitlement.api.EntitlementChecks b/distribution/tools/entitlement-agent/impl/src/test/resources/META-INF/services/org.elasticsearch.entitlement.api.EntitlementChecks new file mode 100644 index 0000000000000..983585190b35a --- /dev/null +++ b/distribution/tools/entitlement-agent/impl/src/test/resources/META-INF/services/org.elasticsearch.entitlement.api.EntitlementChecks @@ -0,0 +1,10 @@ +# + # Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + # or more contributor license agreements. Licensed under the "Elastic License + # 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + # Public License v 1"; you may not use this file except in compliance with, at + # your election, the "Elastic License 2.0", the "GNU Affero General Public + # License v3.0 only", or the "Server Side Public License, v 1". +# + +org.elasticsearch.entitlement.instrumentation.impl.InstrumenterTests$TestEntitlementManager diff --git a/distribution/tools/entitlement-agent/src/test/java/org/elasticsearch/entitlement/agent/EntitlementAgentTests.java b/distribution/tools/entitlement-agent/src/test/java/org/elasticsearch/entitlement/agent/EntitlementAgentTests.java index bb775d302c1d0..cf7991626029a 100644 --- a/distribution/tools/entitlement-agent/src/test/java/org/elasticsearch/entitlement/agent/EntitlementAgentTests.java +++ b/distribution/tools/entitlement-agent/src/test/java/org/elasticsearch/entitlement/agent/EntitlementAgentTests.java @@ -24,6 +24,10 @@ * to make sure it works with the entitlement granted and throws without it. * The only exception is {@link System#exit}, where we can't that it works without * terminating the JVM. + *

    + * If you're trying to debug the instrumentation code, take a look at {@code InstrumenterTests}. + * That tests the bytecode portion without firing up an agent, which makes everything easier to troubleshoot. + *

    * See {@code build.gradle} for how we set the command line arguments for this test. */ @WithoutSecurityManager diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 443417e6a5b92..0b5c1ae6528f9 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -4222,6 +4222,11 @@ + + + + + From 3b75d5644a9437f81f1e71ad9b8a1e2e4891baf0 Mon Sep 17 00:00:00 2001 From: Oleksandr Kolomiiets Date: Fri, 11 Oct 2024 13:24:53 -0700 Subject: [PATCH 022/449] Unmute test that does not exist anymore (#114655) Closes #111631. --- muted-tests.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index f25f7cdb64f31..31f99b0ae632a 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -68,9 +68,6 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/111448 - class: org.elasticsearch.search.SearchServiceTests issue: https://github.com/elastic/elasticsearch/issues/111529 -- class: org.elasticsearch.xpack.test.rest.XPackRestIT - method: test {p0=rollup/security_tests/Index-based access} - issue: https://github.com/elastic/elasticsearch/issues/111631 - class: org.elasticsearch.upgrades.FullClusterRestartIT method: testSnapshotRestore {cluster=OLD} issue: https://github.com/elastic/elasticsearch/issues/111777 From 693fb95866d6bb65730dc5c24a138f446c2f636f Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Fri, 11 Oct 2024 16:31:48 -0400 Subject: [PATCH 023/449] Support IPinfo database configurations (#114548) --- .../geoip/EnterpriseGeoIpDownloader.java | 16 +- .../geoip/GeoIpDownloaderTaskExecutor.java | 16 +- .../ingest/geoip/GeoIpProcessor.java | 3 +- .../ingest/geoip/IngestGeoIpPlugin.java | 11 +- .../geoip/direct/DatabaseConfiguration.java | 83 ++++++++--- ...RestDeleteDatabaseConfigurationAction.java | 2 +- .../RestGetDatabaseConfigurationAction.java | 7 +- .../RestPutDatabaseConfigurationAction.java | 2 +- ...ansportPutDatabaseConfigurationAction.java | 21 ++- .../direct/DatabaseConfigurationTests.java | 29 ++-- .../test/ingest_geoip/40_geoip_databases.yml | 12 ++ .../ingest_geoip/50_ip_lookup_processor.yml | 45 ++++++ .../ingest_geoip/60_ip_location_databases.yml | 137 ++++++++++++++++++ .../ingest.delete_ip_location_database.json | 31 ++++ .../api/ingest.get_ip_location_database.json | 37 +++++ .../api/ingest.put_ip_location_database.json | 36 +++++ .../ingest/IngestGeoIpFeatures.java | 8 +- 17 files changed, 452 insertions(+), 44 deletions(-) create mode 100644 modules/ingest-geoip/src/yamlRestTest/resources/rest-api-spec/test/ingest_geoip/50_ip_lookup_processor.yml create mode 100644 modules/ingest-geoip/src/yamlRestTest/resources/rest-api-spec/test/ingest_geoip/60_ip_location_databases.yml create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/ingest.delete_ip_location_database.json create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/ingest.get_ip_location_database.json create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/ingest.put_ip_location_database.json diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/EnterpriseGeoIpDownloader.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/EnterpriseGeoIpDownloader.java index fa46540e29f7a..3bbb0539f193a 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/EnterpriseGeoIpDownloader.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/EnterpriseGeoIpDownloader.java @@ -23,6 +23,7 @@ import org.elasticsearch.common.CheckedSupplier; import org.elasticsearch.common.Strings; import org.elasticsearch.common.hash.MessageDigests; +import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.query.BoolQueryBuilder; @@ -236,7 +237,7 @@ boolean processDatabase(String id, DatabaseConfiguration database) throws IOExce logger.debug("Processing database [{}] for configuration [{}]", name, database.id()); try (ProviderDownload downloader = downloaderFor(database)) { - if (downloader.validCredentials()) { + if (downloader != null && downloader.validCredentials()) { // the name that comes from the enterprise downloader cluster state doesn't include the .mmdb extension, // but the downloading and indexing of database code expects it to be there, so we add it on here before continuing final String fileName = name + ".mmdb"; @@ -443,10 +444,17 @@ private void scheduleNextRun(TimeValue time) { } } + @Nullable private ProviderDownload downloaderFor(DatabaseConfiguration database) { - assert database.provider() instanceof DatabaseConfiguration.Maxmind - : "Attempt to use maxmind downloader with a provider of type" + database.provider().getClass(); - return new MaxmindDownload(database.name(), (DatabaseConfiguration.Maxmind) database.provider()); + if (database.provider() instanceof DatabaseConfiguration.Maxmind) { + return new MaxmindDownload(database.name(), (DatabaseConfiguration.Maxmind) database.provider()); + } else if (database.provider() instanceof DatabaseConfiguration.Ipinfo) { + // as a temporary implementation detail, null here means 'not actually supported *just yet*' + return null; + } else { + assert false : "Attempted to use database downloader with unsupported provider type [" + database.provider().getClass() + "]"; + return null; + } } class MaxmindDownload implements ProviderDownload { diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java index a7828a9f3a0b7..eacf2e5a2ee57 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java @@ -55,6 +55,7 @@ import static org.elasticsearch.ingest.geoip.GeoIpDownloader.GEOIP_DOWNLOADER; import static org.elasticsearch.ingest.geoip.GeoIpProcessor.Factory.downloadDatabaseOnPipelineCreation; import static org.elasticsearch.ingest.geoip.GeoIpProcessor.GEOIP_TYPE; +import static org.elasticsearch.ingest.geoip.GeoIpProcessor.IP_LOCATION_TYPE; /** * Persistent task executor that is responsible for starting {@link GeoIpDownloader} after task is allocated by master node. @@ -297,9 +298,18 @@ private static boolean hasAtLeastOneGeoipProcessor(Map processor return false; } - final Map processorConfig = (Map) processor.get(GEOIP_TYPE); - if (processorConfig != null) { - return downloadDatabaseOnPipelineCreation(GEOIP_TYPE, processorConfig, null) == downloadDatabaseOnPipelineCreation; + { + final Map processorConfig = (Map) processor.get(GEOIP_TYPE); + if (processorConfig != null) { + return downloadDatabaseOnPipelineCreation(GEOIP_TYPE, processorConfig, null) == downloadDatabaseOnPipelineCreation; + } + } + + { + final Map processorConfig = (Map) processor.get(IP_LOCATION_TYPE); + if (processorConfig != null) { + return downloadDatabaseOnPipelineCreation(IP_LOCATION_TYPE, processorConfig, null) == downloadDatabaseOnPipelineCreation; + } } return isProcessorWithOnFailureGeoIpProcessor(processor, downloadDatabaseOnPipelineCreation) diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java index f8ca6d87924a4..6c64cb755bb32 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java @@ -42,6 +42,7 @@ public final class GeoIpProcessor extends AbstractProcessor { + "in a future version of Elasticsearch"; // TODO add a message about migration? public static final String GEOIP_TYPE = "geoip"; + public static final String IP_LOCATION_TYPE = "ip_location"; private final String type; private final String field; @@ -225,7 +226,7 @@ public Processor create( final Map config ) throws IOException { String ipField = readStringProperty(type, processorTag, config, "field"); - String targetField = readStringProperty(type, processorTag, config, "target_field", "geoip"); + String targetField = readStringProperty(type, processorTag, config, "target_field", type); String databaseFile = readStringProperty(type, processorTag, config, "database_file", "GeoLite2-City.mmdb"); List propertyNames = readOptionalList(type, processorTag, config, "properties"); boolean ignoreMissing = readBooleanProperty(type, processorTag, config, "ignore_missing", false); diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java index 49932f342086e..cc0bec583483e 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java @@ -71,6 +71,7 @@ import java.util.function.Predicate; import java.util.function.Supplier; +import static java.util.Map.entry; import static org.elasticsearch.index.mapper.MapperService.SINGLE_MAPPING_NAME; import static org.elasticsearch.ingest.EnterpriseGeoIpTask.ENTERPRISE_GEOIP_DOWNLOADER; import static org.elasticsearch.ingest.IngestService.INGEST_ORIGIN; @@ -129,7 +130,10 @@ public Map getProcessors(Processor.Parameters paramet parameters.ingestService.getClusterService() ); databaseRegistry.set(registry); - return Map.of(GeoIpProcessor.GEOIP_TYPE, new GeoIpProcessor.Factory(GeoIpProcessor.GEOIP_TYPE, registry)); + return Map.ofEntries( + entry(GeoIpProcessor.GEOIP_TYPE, new GeoIpProcessor.Factory(GeoIpProcessor.GEOIP_TYPE, registry)), + entry(GeoIpProcessor.IP_LOCATION_TYPE, new GeoIpProcessor.Factory(GeoIpProcessor.IP_LOCATION_TYPE, registry)) + ); } @Override @@ -239,6 +243,11 @@ public List getNamedWriteables() { DatabaseConfiguration.Maxmind.NAME, DatabaseConfiguration.Maxmind::new ), + new NamedWriteableRegistry.Entry( + DatabaseConfiguration.Provider.class, + DatabaseConfiguration.Ipinfo.NAME, + DatabaseConfiguration.Ipinfo::new + ), new NamedWriteableRegistry.Entry( DatabaseConfiguration.Provider.class, DatabaseConfiguration.Local.NAME, diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/DatabaseConfiguration.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/DatabaseConfiguration.java index 3399b71879e26..a26364f9305e1 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/DatabaseConfiguration.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/DatabaseConfiguration.java @@ -26,6 +26,7 @@ import java.io.IOException; import java.nio.charset.StandardCharsets; +import java.util.Arrays; import java.util.Objects; import java.util.Set; import java.util.regex.Pattern; @@ -78,8 +79,19 @@ public record DatabaseConfiguration(String id, String name, Provider provider) i // "GeoLite2-Country" ); + public static final Set IPINFO_NAMES = Set.of( + // these file names are from https://ipinfo.io/developers/database-filename-reference + "asn", // "Free IP to ASN" + "country", // "Free IP to Country" + // "country_asn" // "Free IP to Country + IP to ASN", not supported at present + "standard_asn", // commercial "ASN" + "standard_location", // commercial "IP Geolocation" + "standard_privacy" // commercial "Privacy Detection" (sometimes "Anonymous IP") + ); + private static final ParseField NAME = new ParseField("name"); private static final ParseField MAXMIND = new ParseField(Maxmind.NAME); + private static final ParseField IPINFO = new ParseField(Ipinfo.NAME); private static final ParseField WEB = new ParseField(Web.NAME); private static final ParseField LOCAL = new ParseField(Local.NAME); @@ -89,12 +101,21 @@ public record DatabaseConfiguration(String id, String name, Provider provider) i (a, id) -> { String name = (String) a[0]; Provider provider; + + // one and only one provider object must be present + final long numNonNulls = Arrays.stream(a, 1, a.length).filter(Objects::nonNull).count(); + if (numNonNulls != 1) { + throw new IllegalArgumentException("Exactly one provider object must be specified, but [" + numNonNulls + "] were found"); + } + if (a[1] != null) { provider = (Maxmind) a[1]; } else if (a[2] != null) { - provider = (Web) a[2]; + provider = (Ipinfo) a[2]; + } else if (a[3] != null) { + provider = (Web) a[3]; } else { - provider = (Local) a[3]; + provider = (Local) a[4]; } return new DatabaseConfiguration(id, name, provider); } @@ -107,6 +128,7 @@ public record DatabaseConfiguration(String id, String name, Provider provider) i (parser, id) -> Maxmind.PARSER.apply(parser, null), MAXMIND ); + PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), (parser, id) -> Ipinfo.PARSER.apply(parser, null), IPINFO); PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), (parser, id) -> Web.PARSER.apply(parser, null), WEB); PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), (parser, id) -> Local.PARSER.apply(parser, null), LOCAL); } @@ -194,8 +216,16 @@ public ActionRequestValidationException validate() { err.addValidationError("invalid name [" + name + "]: cannot be empty"); } - if (MAXMIND_NAMES.contains(name) == false) { - err.addValidationError("invalid name [" + name + "]: must be a supported name ([" + MAXMIND_NAMES + "])"); + // provider-specific name validation + if (provider instanceof Maxmind) { + if (MAXMIND_NAMES.contains(name) == false) { + err.addValidationError("invalid name [" + name + "]: must be a supported name ([" + MAXMIND_NAMES + "])"); + } + } + if (provider instanceof Ipinfo) { + if (IPINFO_NAMES.contains(name) == false) { + err.addValidationError("invalid name [" + name + "]: must be a supported name ([" + IPINFO_NAMES + "])"); + } } // important: the name must be unique across all configurations of this same type, @@ -234,7 +264,7 @@ public String getWriteableName() { private static final ParseField ACCOUNT_ID = new ParseField("account_id"); - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("database", false, (a, id) -> { + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("maxmind", false, (a, id) -> { String accountId = (String) a[0]; return new Maxmind(accountId); }); @@ -247,10 +277,6 @@ public Maxmind(StreamInput in) throws IOException { this(in.readString()); } - public static Maxmind parse(XContentParser parser) { - return PARSER.apply(parser, null); - } - @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(accountId); @@ -270,6 +296,37 @@ public boolean isReadOnly() { } } + public record Ipinfo() implements Provider { + public static final String NAME = "ipinfo"; + + // this'll become a ConstructingObjectParser once we accept the token (securely) in the json definition + private static final ObjectParser PARSER = new ObjectParser<>("ipinfo", Ipinfo::new); + + public Ipinfo(StreamInput in) throws IOException { + this(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException {} + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.endObject(); + return builder; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public boolean isReadOnly() { + return false; + } + } + public record Local(String type) implements Provider { public static final String NAME = "local"; @@ -288,10 +345,6 @@ public Local(StreamInput in) throws IOException { this(in.readString()); } - public static Local parse(XContentParser parser) { - return PARSER.apply(parser, null); - } - @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(type); @@ -325,10 +378,6 @@ public Web(StreamInput in) throws IOException { this(); } - public static Web parse(XContentParser parser) { - return PARSER.apply(parser, null); - } - @Override public void writeTo(StreamOutput out) throws IOException {} diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/RestDeleteDatabaseConfigurationAction.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/RestDeleteDatabaseConfigurationAction.java index e836821a3b2f2..78ea73250d632 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/RestDeleteDatabaseConfigurationAction.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/RestDeleteDatabaseConfigurationAction.java @@ -27,7 +27,7 @@ public class RestDeleteDatabaseConfigurationAction extends BaseRestHandler { @Override public List routes() { - return List.of(new Route(DELETE, "/_ingest/geoip/database/{id}")); + return List.of(new Route(DELETE, "/_ingest/ip_location/database/{id}"), new Route(DELETE, "/_ingest/geoip/database/{id}")); } @Override diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/RestGetDatabaseConfigurationAction.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/RestGetDatabaseConfigurationAction.java index f34f388f22965..af446ee8d2bd9 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/RestGetDatabaseConfigurationAction.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/RestGetDatabaseConfigurationAction.java @@ -26,7 +26,12 @@ public class RestGetDatabaseConfigurationAction extends BaseRestHandler { @Override public List routes() { - return List.of(new Route(GET, "/_ingest/geoip/database"), new Route(GET, "/_ingest/geoip/database/{id}")); + return List.of( + new Route(GET, "/_ingest/ip_location/database"), + new Route(GET, "/_ingest/ip_location/database/{id}"), + new Route(GET, "/_ingest/geoip/database"), + new Route(GET, "/_ingest/geoip/database/{id}") + ); } @Override diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/RestPutDatabaseConfigurationAction.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/RestPutDatabaseConfigurationAction.java index c0b7a3f59f3aa..95b40df12fd1f 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/RestPutDatabaseConfigurationAction.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/RestPutDatabaseConfigurationAction.java @@ -29,7 +29,7 @@ public class RestPutDatabaseConfigurationAction extends BaseRestHandler { @Override public List routes() { - return List.of(new Route(PUT, "/_ingest/geoip/database/{id}")); + return List.of(new Route(PUT, "/_ingest/ip_location/database/{id}"), new Route(PUT, "/_ingest/geoip/database/{id}")); } @Override diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/TransportPutDatabaseConfigurationAction.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/TransportPutDatabaseConfigurationAction.java index fda0e12bb1b76..dfb8fa78089d2 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/TransportPutDatabaseConfigurationAction.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/TransportPutDatabaseConfigurationAction.java @@ -29,6 +29,7 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Strings; import org.elasticsearch.core.Tuple; +import org.elasticsearch.features.FeatureService; import org.elasticsearch.ingest.geoip.IngestGeoIpMetadata; import org.elasticsearch.ingest.geoip.direct.PutDatabaseConfigurationAction.Request; import org.elasticsearch.injection.guice.Inject; @@ -41,6 +42,8 @@ import java.util.Map; import java.util.Optional; +import static org.elasticsearch.ingest.IngestGeoIpFeatures.PUT_DATABASE_CONFIGURATION_ACTION_IPINFO; + public class TransportPutDatabaseConfigurationAction extends TransportMasterNodeAction { private static final Logger logger = LogManager.getLogger(TransportPutDatabaseConfigurationAction.class); @@ -58,6 +61,7 @@ public void taskSucceeded(UpdateDatabaseConfigurationTask task, Void unused) { } }; + private final FeatureService featureService; private final MasterServiceTaskQueue updateDatabaseConfigurationTaskQueue; @Inject @@ -66,7 +70,8 @@ public TransportPutDatabaseConfigurationAction( ClusterService clusterService, ThreadPool threadPool, ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver + IndexNameExpressionResolver indexNameExpressionResolver, + FeatureService featureService ) { super( PutDatabaseConfigurationAction.NAME, @@ -79,6 +84,7 @@ public TransportPutDatabaseConfigurationAction( AcknowledgedResponse::readFrom, EsExecutors.DIRECT_EXECUTOR_SERVICE ); + this.featureService = featureService; this.updateDatabaseConfigurationTaskQueue = clusterService.createTaskQueue( "update-geoip-database-configuration-state-update", Priority.NORMAL, @@ -89,6 +95,19 @@ public TransportPutDatabaseConfigurationAction( @Override protected void masterOperation(Task task, Request request, ClusterState state, ActionListener listener) { final String id = request.getDatabase().id(); + + // if this is an ipinfo configuration, then make sure the whole cluster supports that feature + if (request.getDatabase().provider() instanceof DatabaseConfiguration.Ipinfo + && featureService.clusterHasFeature(clusterService.state(), PUT_DATABASE_CONFIGURATION_ACTION_IPINFO) == false) { + listener.onFailure( + new IllegalArgumentException( + "Unable to use ipinfo database configurations in mixed-clusters with nodes that do not support feature " + + PUT_DATABASE_CONFIGURATION_ACTION_IPINFO.id() + ) + ); + return; + } + updateDatabaseConfigurationTaskQueue.submitTask( Strings.format("update-geoip-database-configuration-[%s]", id), new UpdateDatabaseConfigurationTask(listener, request.getDatabase()), diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/direct/DatabaseConfigurationTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/direct/DatabaseConfigurationTests.java index 33356ad4235dc..76b2896afe7a0 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/direct/DatabaseConfigurationTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/direct/DatabaseConfigurationTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.ingest.geoip.IngestGeoIpPlugin; +import org.elasticsearch.ingest.geoip.direct.DatabaseConfiguration.Ipinfo; import org.elasticsearch.ingest.geoip.direct.DatabaseConfiguration.Local; import org.elasticsearch.ingest.geoip.direct.DatabaseConfiguration.Maxmind; import org.elasticsearch.ingest.geoip.direct.DatabaseConfiguration.Web; @@ -21,6 +22,7 @@ import java.io.IOException; import java.util.Set; +import static org.elasticsearch.ingest.geoip.direct.DatabaseConfiguration.IPINFO_NAMES; import static org.elasticsearch.ingest.geoip.direct.DatabaseConfiguration.MAXMIND_NAMES; public class DatabaseConfigurationTests extends AbstractXContentSerializingTestCase { @@ -44,13 +46,14 @@ protected DatabaseConfiguration createTestInstance() { } public static DatabaseConfiguration randomDatabaseConfiguration(String id) { + boolean useIpinfo = randomBoolean(); DatabaseConfiguration.Provider provider = switch (between(0, 2)) { - case 0 -> new Maxmind(randomAlphaOfLength(5)); + case 0 -> useIpinfo ? new Ipinfo() : new Maxmind(randomAlphaOfLength(5)); case 1 -> new Web(); case 2 -> new Local(randomAlphaOfLength(10)); default -> throw new AssertionError("failure, got illegal switch case"); }; - return new DatabaseConfiguration(id, randomFrom(MAXMIND_NAMES), provider); + return new DatabaseConfiguration(id, useIpinfo ? randomFrom(IPINFO_NAMES) : randomFrom(MAXMIND_NAMES), provider); } @Override @@ -61,21 +64,21 @@ protected DatabaseConfiguration mutateInstance(DatabaseConfiguration instance) { case 1: return new DatabaseConfiguration( instance.id(), - randomValueOtherThan(instance.name(), () -> randomFrom(MAXMIND_NAMES)), + randomValueOtherThan( + instance.name(), + () -> instance.provider() instanceof Ipinfo ? randomFrom(IPINFO_NAMES) : randomFrom(MAXMIND_NAMES) + ), instance.provider() ); case 2: DatabaseConfiguration.Provider provider = instance.provider(); - DatabaseConfiguration.Provider modifiedProvider; - if (provider instanceof Maxmind maxmind) { - modifiedProvider = new Maxmind(((Maxmind) instance.provider()).accountId() + randomAlphaOfLength(2)); - } else if (provider instanceof Web) { - modifiedProvider = new Maxmind(randomAlphaOfLength(20)); // can't modify a Web - } else if (provider instanceof Local local) { - modifiedProvider = new Local(local.type() + randomAlphaOfLength(2)); - } else { - throw new AssertionError("Unexpected provider type: " + provider.getClass()); - } + DatabaseConfiguration.Provider modifiedProvider = switch (provider) { + case Maxmind maxmind -> new Maxmind(maxmind.accountId() + randomAlphaOfLength(2)); + case Ipinfo ignored -> new Local(randomAlphaOfLength(20)); // can't modify Ipinfo + case Web ignored -> new Local(randomAlphaOfLength(20)); // can't modify a Web + case Local local -> new Local(local.type() + randomAlphaOfLength(2)); + default -> throw new AssertionError("Unexpected provider type: " + provider.getClass()); + }; return new DatabaseConfiguration(instance.id(), instance.name(), modifiedProvider); default: throw new AssertionError("failure, got illegal switch case"); diff --git a/modules/ingest-geoip/src/yamlRestTest/resources/rest-api-spec/test/ingest_geoip/40_geoip_databases.yml b/modules/ingest-geoip/src/yamlRestTest/resources/rest-api-spec/test/ingest_geoip/40_geoip_databases.yml index 04fd2ac6a8189..a1104505bc240 100644 --- a/modules/ingest-geoip/src/yamlRestTest/resources/rest-api-spec/test/ingest_geoip/40_geoip_databases.yml +++ b/modules/ingest-geoip/src/yamlRestTest/resources/rest-api-spec/test/ingest_geoip/40_geoip_databases.yml @@ -1,8 +1,20 @@ +--- setup: - requires: cluster_features: ["geoip.downloader.database.configuration", "get_database_configuration_action.multi_node"] reason: "geoip downloader database configuration APIs added in 8.15, and updated in 8.16 to return more results" +--- +teardown: + - do: + ingest.delete_ip_location_database: + id: "my_database_1" + ignore: 404 + - do: + ingest.delete_ip_location_database: + id: "my_database_2" + ignore: 404 + --- "Test adding, getting, and removing geoip databases": - do: diff --git a/modules/ingest-geoip/src/yamlRestTest/resources/rest-api-spec/test/ingest_geoip/50_ip_lookup_processor.yml b/modules/ingest-geoip/src/yamlRestTest/resources/rest-api-spec/test/ingest_geoip/50_ip_lookup_processor.yml new file mode 100644 index 0000000000000..fd73c715a5ac5 --- /dev/null +++ b/modules/ingest-geoip/src/yamlRestTest/resources/rest-api-spec/test/ingest_geoip/50_ip_lookup_processor.yml @@ -0,0 +1,45 @@ +setup: + - requires: + cluster_features: + - "put_database_configuration_action.ipinfo" + reason: "ipinfo support added in 8.16" + +--- +"Test ip_location processor with defaults": + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "ip_location" : { + "field" : "field1" + } + } + ] + } + - match: { acknowledged: true } + + - do: + index: + index: test + id: "1" + pipeline: "my_pipeline" + body: {field1: "89.160.20.128"} + + - do: + get: + index: test + id: "1" + - match: { _source.field1: "89.160.20.128" } + - length: { _source.ip_location: 7 } + - match: { _source.ip_location.city_name: "Linköping" } + - match: { _source.ip_location.country_iso_code: "SE" } + - match: { _source.ip_location.location.lon: 15.6167 } + - match: { _source.ip_location.location.lat: 58.4167 } + - match: { _source.ip_location.region_iso_code: "SE-E" } + - match: { _source.ip_location.country_name: "Sweden" } + - match: { _source.ip_location.region_name: "Östergötland County" } + - match: { _source.ip_location.continent_name: "Europe" } diff --git a/modules/ingest-geoip/src/yamlRestTest/resources/rest-api-spec/test/ingest_geoip/60_ip_location_databases.yml b/modules/ingest-geoip/src/yamlRestTest/resources/rest-api-spec/test/ingest_geoip/60_ip_location_databases.yml new file mode 100644 index 0000000000000..e2e9a1fdb5e28 --- /dev/null +++ b/modules/ingest-geoip/src/yamlRestTest/resources/rest-api-spec/test/ingest_geoip/60_ip_location_databases.yml @@ -0,0 +1,137 @@ +--- +setup: + - requires: + cluster_features: + - "put_database_configuration_action.ipinfo" + reason: "ip location downloader database configuration APIs added in 8.16 to support more types" + +--- +teardown: + - do: + ingest.delete_ip_location_database: + id: "my_database_1" + ignore: 404 + - do: + ingest.delete_ip_location_database: + id: "my_database_2" + ignore: 404 + - do: + ingest.delete_ip_location_database: + id: "my_database_3" + ignore: 404 + +--- +"Test adding, getting, and removing ip location databases": + - do: + ingest.put_ip_location_database: + id: "my_database_1" + body: > + { + "name": "GeoIP2-City", + "maxmind": { + "account_id": "1234" + } + } + - match: { acknowledged: true } + + - do: + ingest.put_ip_location_database: + id: "my_database_1" + body: > + { + "name": "GeoIP2-Country", + "maxmind": { + "account_id": "4321" + } + } + - match: { acknowledged: true } + + - do: + ingest.put_ip_location_database: + id: "my_database_2" + body: > + { + "name": "GeoIP2-City", + "maxmind": { + "account_id": "1234" + } + } + - match: { acknowledged: true } + + - do: + catch: /illegal_argument_exception/ + ingest.put_ip_location_database: + id: "_web_TXlDdXN0b21HZW9MaXRlMi1DaXR5Lm1tZGI=" + body: > + { + "name": "GeoIP2-City", + "web": { + } + } + + - do: + ingest.put_ip_location_database: + id: "my_database_3" + body: > + { + "name": "standard_privacy", + "ipinfo": { + } + } + - match: { acknowledged: true } + + - do: + ingest.get_ip_location_database: + id: "my_database_1" + - length: { databases: 1 } + - match: { databases.0.id: "my_database_1" } + - gte: { databases.0.modified_date_millis: 0 } + - match: { databases.0.database.name: "GeoIP2-Country" } + - match: { databases.0.database.maxmind.account_id: "4321" } + + - do: + ingest.get_ip_location_database: {} + - length: { databases: 7 } + + - do: + ingest.get_ip_location_database: + id: "my_database_1,my_database_2" + - length: { databases: 2 } + + - do: + ingest.get_ip_location_database: + id: "_web_TXlDdXN0b21HZW9MaXRlMi1DaXR5Lm1tZGI=" + - length: { databases: 1 } + - match: { databases.0.id: "_web_TXlDdXN0b21HZW9MaXRlMi1DaXR5Lm1tZGI=" } + - gte: { databases.0.modified_date_millis: -1 } + - match: { databases.0.database.name: "MyCustomGeoLite2-City" } + + - do: + ingest.delete_ip_location_database: + id: "my_database_1" + + - do: + catch: /resource_not_found_exception/ + ingest.delete_ip_location_database: + id: "_web_TXlDdXN0b21HZW9MaXRlMi1DaXR5Lm1tZGI=" + + - do: + ingest.get_ip_location_database: {} + - length: { databases: 6 } + + - do: + ingest.get_ip_location_database: + id: "my_database_2" + - length: { databases: 1 } + - match: { databases.0.id: "my_database_2" } + - gte: { databases.0.modified_date_millis: 0 } + - match: { databases.0.database.name: "GeoIP2-City" } + - match: { databases.0.database.maxmind.account_id: "1234" } + + - do: + ingest.get_ip_location_database: + id: "my_database_3" + - length: { databases: 1 } + - match: { databases.0.id: "my_database_3" } + - gte: { databases.0.modified_date_millis: 0 } + - match: { databases.0.database.name: "standard_privacy" } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.delete_ip_location_database.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.delete_ip_location_database.json new file mode 100644 index 0000000000000..e97d1da276906 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.delete_ip_location_database.json @@ -0,0 +1,31 @@ +{ + "ingest.delete_ip_location_database":{ + "documentation":{ + "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/delete-ip-location-database-api.html", + "description":"Deletes an ip location database configuration" + }, + "stability":"stable", + "visibility":"public", + "headers":{ + "accept": [ "application/json"] + }, + "url":{ + "paths":[ + { + "path":"/_ingest/ip_location/database/{id}", + "methods":[ + "DELETE" + ], + "parts":{ + "id":{ + "type":"list", + "description":"A comma-separated list of ip location database configurations to delete" + } + } + } + ] + }, + "params":{ + } + } +} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.get_ip_location_database.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.get_ip_location_database.json new file mode 100644 index 0000000000000..a2e42fe6c8e59 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.get_ip_location_database.json @@ -0,0 +1,37 @@ +{ + "ingest.get_ip_location_database":{ + "documentation":{ + "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/get-ip-location-database-api.html", + "description":"Returns the specified ip location database configuration" + }, + "stability":"stable", + "visibility":"public", + "headers":{ + "accept": [ "application/json"] + }, + "url":{ + "paths":[ + { + "path":"/_ingest/ip_location/database", + "methods":[ + "GET" + ] + }, + { + "path":"/_ingest/ip_location/database/{id}", + "methods":[ + "GET" + ], + "parts":{ + "id":{ + "type":"list", + "description":"A comma-separated list of ip location database configurations to get; use `*` to get all ip location database configurations" + } + } + } + ] + }, + "params":{ + } + } +} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.put_ip_location_database.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.put_ip_location_database.json new file mode 100644 index 0000000000000..18487969b1a90 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.put_ip_location_database.json @@ -0,0 +1,36 @@ +{ + "ingest.put_ip_location_database":{ + "documentation":{ + "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/put-ip-location-database-api.html", + "description":"Puts the configuration for a ip location database to be downloaded" + }, + "stability":"stable", + "visibility":"public", + "headers":{ + "accept": [ "application/json"], + "content_type": ["application/json"] + }, + "url":{ + "paths":[ + { + "path":"/_ingest/ip_location/database/{id}", + "methods":[ + "PUT" + ], + "parts":{ + "id":{ + "type":"string", + "description":"The id of the database configuration" + } + } + } + ] + }, + "params":{ + }, + "body":{ + "description":"The database configuration definition", + "required":true + } + } +} diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestGeoIpFeatures.java b/server/src/main/java/org/elasticsearch/ingest/IngestGeoIpFeatures.java index 1933d285d7870..77b11357d79b1 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestGeoIpFeatures.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestGeoIpFeatures.java @@ -22,7 +22,13 @@ public class IngestGeoIpFeatures implements FeatureSpecification { "get_database_configuration_action.multi_node" ); + public static final NodeFeature PUT_DATABASE_CONFIGURATION_ACTION_IPINFO = new NodeFeature("put_database_configuration_action.ipinfo"); + public Set getFeatures() { - return Set.of(GEOIP_DOWNLOADER_DATABASE_CONFIGURATION, GET_DATABASE_CONFIGURATION_ACTION_MULTI_NODE); + return Set.of( + GEOIP_DOWNLOADER_DATABASE_CONFIGURATION, + GET_DATABASE_CONFIGURATION_ACTION_MULTI_NODE, + PUT_DATABASE_CONFIGURATION_ACTION_IPINFO + ); } } From e65ee92b72b2c9ce41b2fdf6d1513136af915c1b Mon Sep 17 00:00:00 2001 From: Keith Massey Date: Fri, 11 Oct 2024 16:04:27 -0500 Subject: [PATCH 024/449] Move tests out of geo ip processor tests (#114656) --- .../ingest/geoip/DatabaseTests.java | 48 ++ .../ingest/geoip/GeoIpProcessorTests.java | 435 +----------------- .../geoip/MaxmindIpDataLookupsTests.java | 303 ++++++++++++ 3 files changed, 353 insertions(+), 433 deletions(-) create mode 100644 modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/DatabaseTests.java create mode 100644 modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/MaxmindIpDataLookupsTests.java diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/DatabaseTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/DatabaseTests.java new file mode 100644 index 0000000000000..5710a20277527 --- /dev/null +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/DatabaseTests.java @@ -0,0 +1,48 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.ingest.geoip; + +import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.test.ESTestCase; + +import java.util.Set; + +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; + +public class DatabaseTests extends ESTestCase { + + public void testDatabasePropertyInvariants() { + // the city database is like a specialization of the country database + assertThat(Sets.difference(Database.Country.properties(), Database.City.properties()), is(empty())); + assertThat(Sets.difference(Database.Country.defaultProperties(), Database.City.defaultProperties()), is(empty())); + + // the isp database is like a specialization of the asn database + assertThat(Sets.difference(Database.Asn.properties(), Database.Isp.properties()), is(empty())); + assertThat(Sets.difference(Database.Asn.defaultProperties(), Database.Isp.defaultProperties()), is(empty())); + + // the enterprise database is like these other databases joined together + for (Database type : Set.of( + Database.City, + Database.Country, + Database.Asn, + Database.AnonymousIp, + Database.ConnectionType, + Database.Domain, + Database.Isp + )) { + assertThat(Sets.difference(type.properties(), Database.Enterprise.properties()), is(empty())); + } + // but in terms of the default fields, it's like a drop-in replacement for the city database + // n.b. this is just a choice we decided to make here at Elastic + assertThat(Database.Enterprise.defaultProperties(), equalTo(Database.City.defaultProperties())); + } +} diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java index 50b59c26749fc..e96bdbd6314b2 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java @@ -10,7 +10,6 @@ package org.elasticsearch.ingest.geoip; import org.elasticsearch.common.CheckedSupplier; -import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.core.IOUtils; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.RandomDocumentPicks; @@ -24,14 +23,12 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import static org.elasticsearch.ingest.IngestDocumentMatcher.assertIngestDocument; import static org.elasticsearch.ingest.geoip.GeoIpProcessor.GEOIP_TYPE; import static org.elasticsearch.ingest.geoip.GeoIpTestUtils.copyDatabase; import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.is; @@ -57,71 +54,6 @@ public void cleanup() throws IOException { IOUtils.rm(tmpDir); } - public void testDatabasePropertyInvariants() { - // the city database is like a specialization of the country database - assertThat(Sets.difference(Database.Country.properties(), Database.City.properties()), is(empty())); - assertThat(Sets.difference(Database.Country.defaultProperties(), Database.City.defaultProperties()), is(empty())); - - // the isp database is like a specialization of the asn database - assertThat(Sets.difference(Database.Asn.properties(), Database.Isp.properties()), is(empty())); - assertThat(Sets.difference(Database.Asn.defaultProperties(), Database.Isp.defaultProperties()), is(empty())); - - // the enterprise database is like these other databases joined together - for (Database type : Set.of( - Database.City, - Database.Country, - Database.Asn, - Database.AnonymousIp, - Database.ConnectionType, - Database.Domain, - Database.Isp - )) { - assertThat(Sets.difference(type.properties(), Database.Enterprise.properties()), is(empty())); - } - // but in terms of the default fields, it's like a drop-in replacement for the city database - // n.b. this is just a choice we decided to make here at Elastic - assertThat(Database.Enterprise.defaultProperties(), equalTo(Database.City.defaultProperties())); - } - - public void testCity() throws Exception { - String ip = "8.8.8.8"; - GeoIpProcessor processor = new GeoIpProcessor( - GEOIP_TYPE, - randomAlphaOfLength(10), - null, - "source_field", - loader("GeoLite2-City.mmdb"), - () -> true, - "target_field", - ipDataLookupAll(Database.City), - false, - false, - "filename" - ); - - Map document = new HashMap<>(); - document.put("source_field", ip); - IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); - processor.execute(ingestDocument); - - assertThat(ingestDocument.getSourceAndMetadata().get("source_field"), equalTo(ip)); - @SuppressWarnings("unchecked") - Map geoData = (Map) ingestDocument.getSourceAndMetadata().get("target_field"); - assertThat(geoData, notNullValue()); - assertThat(geoData.size(), equalTo(12)); - assertThat(geoData.get("ip"), equalTo(ip)); - assertThat(geoData.get("country_in_european_union"), equalTo(false)); - assertThat(geoData.get("country_iso_code"), equalTo("US")); - assertThat(geoData.get("country_name"), equalTo("United States")); - assertThat(geoData.get("continent_code"), equalTo("NA")); - assertThat(geoData.get("continent_name"), equalTo("North America")); - assertThat(geoData.get("timezone"), equalTo("America/Chicago")); - assertThat(geoData.get("location"), equalTo(Map.of("lat", 37.751d, "lon", -97.822d))); - assertThat(geoData.get("registered_country_in_european_union"), equalTo(false)); - assertThat(geoData.get("registered_country_iso_code"), equalTo("US")); - assertThat(geoData.get("registered_country_name"), equalTo("United States")); - } - public void testNullValueWithIgnoreMissing() throws Exception { GeoIpProcessor processor = new GeoIpProcessor( GEOIP_TYPE, @@ -208,369 +140,6 @@ public void testNonExistentWithoutIgnoreMissing() { assertThat(exception.getMessage(), equalTo("field [source_field] not present as part of path [source_field]")); } - public void testCity_withIpV6() throws Exception { - String ip = "2602:306:33d3:8000::3257:9652"; - GeoIpProcessor processor = new GeoIpProcessor( - GEOIP_TYPE, - randomAlphaOfLength(10), - null, - "source_field", - loader("GeoLite2-City.mmdb"), - () -> true, - "target_field", - ipDataLookupAll(Database.City), - false, - false, - "filename" - ); - - Map document = new HashMap<>(); - document.put("source_field", ip); - IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); - processor.execute(ingestDocument); - - assertThat(ingestDocument.getSourceAndMetadata().get("source_field"), equalTo(ip)); - @SuppressWarnings("unchecked") - Map geoData = (Map) ingestDocument.getSourceAndMetadata().get("target_field"); - assertThat(geoData, notNullValue()); - assertThat(geoData.size(), equalTo(16)); - assertThat(geoData.get("ip"), equalTo(ip)); - assertThat(geoData.get("country_in_european_union"), equalTo(false)); - assertThat(geoData.get("country_iso_code"), equalTo("US")); - assertThat(geoData.get("country_name"), equalTo("United States")); - assertThat(geoData.get("continent_code"), equalTo("NA")); - assertThat(geoData.get("continent_name"), equalTo("North America")); - assertThat(geoData.get("region_iso_code"), equalTo("US-FL")); - assertThat(geoData.get("region_name"), equalTo("Florida")); - assertThat(geoData.get("city_name"), equalTo("Homestead")); - assertThat(geoData.get("timezone"), equalTo("America/New_York")); - assertThat(geoData.get("location"), equalTo(Map.of("lat", 25.4573d, "lon", -80.4572d))); - assertThat(geoData.get("accuracy_radius"), equalTo(50)); - assertThat(geoData.get("postal_code"), equalTo("33035")); - assertThat(geoData.get("registered_country_in_european_union"), equalTo(false)); - assertThat(geoData.get("registered_country_iso_code"), equalTo("US")); - assertThat(geoData.get("registered_country_name"), equalTo("United States")); - } - - public void testCityWithMissingLocation() throws Exception { - String ip = "80.231.5.0"; - GeoIpProcessor processor = new GeoIpProcessor( - GEOIP_TYPE, - randomAlphaOfLength(10), - null, - "source_field", - loader("GeoLite2-City.mmdb"), - () -> true, - "target_field", - ipDataLookupAll(Database.City), - false, - false, - "filename" - ); - - Map document = new HashMap<>(); - document.put("source_field", ip); - IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); - processor.execute(ingestDocument); - - assertThat(ingestDocument.getSourceAndMetadata().get("source_field"), equalTo(ip)); - @SuppressWarnings("unchecked") - Map geoData = (Map) ingestDocument.getSourceAndMetadata().get("target_field"); - assertThat(geoData, notNullValue()); - assertThat(geoData.size(), equalTo(1)); - assertThat(geoData.get("ip"), equalTo(ip)); - } - - public void testCountry() throws Exception { - String ip = "82.170.213.79"; - GeoIpProcessor processor = new GeoIpProcessor( - GEOIP_TYPE, - randomAlphaOfLength(10), - null, - "source_field", - loader("GeoLite2-Country.mmdb"), - () -> true, - "target_field", - ipDataLookupAll(Database.Country), - false, - false, - "filename" - ); - - Map document = new HashMap<>(); - document.put("source_field", ip); - IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); - processor.execute(ingestDocument); - - assertThat(ingestDocument.getSourceAndMetadata().get("source_field"), equalTo(ip)); - @SuppressWarnings("unchecked") - Map geoData = (Map) ingestDocument.getSourceAndMetadata().get("target_field"); - assertThat(geoData, notNullValue()); - assertThat(geoData.size(), equalTo(9)); - assertThat(geoData.get("ip"), equalTo(ip)); - assertThat(geoData.get("country_in_european_union"), equalTo(true)); - assertThat(geoData.get("country_iso_code"), equalTo("NL")); - assertThat(geoData.get("country_name"), equalTo("Netherlands")); - assertThat(geoData.get("continent_code"), equalTo("EU")); - assertThat(geoData.get("continent_name"), equalTo("Europe")); - assertThat(geoData.get("registered_country_in_european_union"), equalTo(true)); - assertThat(geoData.get("registered_country_iso_code"), equalTo("NL")); - assertThat(geoData.get("registered_country_name"), equalTo("Netherlands")); - } - - public void testCountryWithMissingLocation() throws Exception { - String ip = "80.231.5.0"; - GeoIpProcessor processor = new GeoIpProcessor( - GEOIP_TYPE, - randomAlphaOfLength(10), - null, - "source_field", - loader("GeoLite2-Country.mmdb"), - () -> true, - "target_field", - ipDataLookupAll(Database.Country), - false, - false, - "filename" - ); - - Map document = new HashMap<>(); - document.put("source_field", ip); - IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); - processor.execute(ingestDocument); - - assertThat(ingestDocument.getSourceAndMetadata().get("source_field"), equalTo(ip)); - @SuppressWarnings("unchecked") - Map geoData = (Map) ingestDocument.getSourceAndMetadata().get("target_field"); - assertThat(geoData, notNullValue()); - assertThat(geoData.size(), equalTo(1)); - assertThat(geoData.get("ip"), equalTo(ip)); - } - - public void testAsn() throws Exception { - String ip = "82.171.64.0"; - GeoIpProcessor processor = new GeoIpProcessor( - GEOIP_TYPE, - randomAlphaOfLength(10), - null, - "source_field", - loader("GeoLite2-ASN.mmdb"), - () -> true, - "target_field", - ipDataLookupAll(Database.Asn), - false, - false, - "filename" - ); - - Map document = new HashMap<>(); - document.put("source_field", ip); - IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); - processor.execute(ingestDocument); - - assertThat(ingestDocument.getSourceAndMetadata().get("source_field"), equalTo(ip)); - @SuppressWarnings("unchecked") - Map geoData = (Map) ingestDocument.getSourceAndMetadata().get("target_field"); - assertThat(geoData, notNullValue()); - assertThat(geoData.size(), equalTo(4)); - assertThat(geoData.get("ip"), equalTo(ip)); - assertThat(geoData.get("asn"), equalTo(1136L)); - assertThat(geoData.get("organization_name"), equalTo("KPN B.V.")); - assertThat(geoData.get("network"), equalTo("82.168.0.0/14")); - } - - public void testAnonymmousIp() throws Exception { - String ip = "81.2.69.1"; - GeoIpProcessor processor = new GeoIpProcessor( - GEOIP_TYPE, - randomAlphaOfLength(10), - null, - "source_field", - loader("GeoIP2-Anonymous-IP-Test.mmdb"), - () -> true, - "target_field", - ipDataLookupAll(Database.AnonymousIp), - false, - false, - "filename" - ); - - Map document = new HashMap<>(); - document.put("source_field", ip); - IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); - processor.execute(ingestDocument); - - assertThat(ingestDocument.getSourceAndMetadata().get("source_field"), equalTo(ip)); - @SuppressWarnings("unchecked") - Map geoData = (Map) ingestDocument.getSourceAndMetadata().get("target_field"); - assertThat(geoData, notNullValue()); - assertThat(geoData.size(), equalTo(7)); - assertThat(geoData.get("ip"), equalTo(ip)); - assertThat(geoData.get("hosting_provider"), equalTo(true)); - assertThat(geoData.get("tor_exit_node"), equalTo(true)); - assertThat(geoData.get("anonymous_vpn"), equalTo(true)); - assertThat(geoData.get("anonymous"), equalTo(true)); - assertThat(geoData.get("public_proxy"), equalTo(true)); - assertThat(geoData.get("residential_proxy"), equalTo(true)); - } - - public void testConnectionType() throws Exception { - String ip = "214.78.120.5"; - GeoIpProcessor processor = new GeoIpProcessor( - GEOIP_TYPE, - randomAlphaOfLength(10), - null, - "source_field", - loader("GeoIP2-Connection-Type-Test.mmdb"), - () -> true, - "target_field", - ipDataLookupAll(Database.ConnectionType), - false, - false, - "filename" - ); - - Map document = new HashMap<>(); - document.put("source_field", ip); - IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); - processor.execute(ingestDocument); - - assertThat(ingestDocument.getSourceAndMetadata().get("source_field"), equalTo(ip)); - @SuppressWarnings("unchecked") - Map geoData = (Map) ingestDocument.getSourceAndMetadata().get("target_field"); - assertThat(geoData, notNullValue()); - assertThat(geoData.size(), equalTo(2)); - assertThat(geoData.get("ip"), equalTo(ip)); - assertThat(geoData.get("connection_type"), equalTo("Satellite")); - } - - public void testDomain() throws Exception { - String ip = "69.219.64.2"; - GeoIpProcessor processor = new GeoIpProcessor( - GEOIP_TYPE, - randomAlphaOfLength(10), - null, - "source_field", - loader("GeoIP2-Domain-Test.mmdb"), - () -> true, - "target_field", - ipDataLookupAll(Database.Domain), - false, - false, - "filename" - ); - - Map document = new HashMap<>(); - document.put("source_field", ip); - IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); - processor.execute(ingestDocument); - - assertThat(ingestDocument.getSourceAndMetadata().get("source_field"), equalTo(ip)); - @SuppressWarnings("unchecked") - Map geoData = (Map) ingestDocument.getSourceAndMetadata().get("target_field"); - assertThat(geoData, notNullValue()); - assertThat(geoData.size(), equalTo(2)); - assertThat(geoData.get("ip"), equalTo(ip)); - assertThat(geoData.get("domain"), equalTo("ameritech.net")); - } - - public void testEnterprise() throws Exception { - String ip = "74.209.24.4"; - GeoIpProcessor processor = new GeoIpProcessor( - GEOIP_TYPE, - randomAlphaOfLength(10), - null, - "source_field", - loader("GeoIP2-Enterprise-Test.mmdb"), - () -> true, - "target_field", - ipDataLookupAll(Database.Enterprise), - false, - false, - "filename" - ); - - Map document = new HashMap<>(); - document.put("source_field", ip); - IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); - processor.execute(ingestDocument); - - assertThat(ingestDocument.getSourceAndMetadata().get("source_field"), equalTo(ip)); - @SuppressWarnings("unchecked") - Map geoData = (Map) ingestDocument.getSourceAndMetadata().get("target_field"); - assertThat(geoData, notNullValue()); - assertThat(geoData.size(), equalTo(33)); - assertThat(geoData.get("ip"), equalTo(ip)); - assertThat(geoData.get("country_confidence"), equalTo(99)); - assertThat(geoData.get("country_in_european_union"), equalTo(false)); - assertThat(geoData.get("country_iso_code"), equalTo("US")); - assertThat(geoData.get("country_name"), equalTo("United States")); - assertThat(geoData.get("continent_code"), equalTo("NA")); - assertThat(geoData.get("continent_name"), equalTo("North America")); - assertThat(geoData.get("region_iso_code"), equalTo("US-NY")); - assertThat(geoData.get("region_name"), equalTo("New York")); - assertThat(geoData.get("city_confidence"), equalTo(11)); - assertThat(geoData.get("city_name"), equalTo("Chatham")); - assertThat(geoData.get("timezone"), equalTo("America/New_York")); - assertThat(geoData.get("location"), equalTo(Map.of("lat", 42.3478, "lon", -73.5549))); - assertThat(geoData.get("accuracy_radius"), equalTo(27)); - assertThat(geoData.get("postal_code"), equalTo("12037")); - assertThat(geoData.get("city_confidence"), equalTo(11)); - assertThat(geoData.get("asn"), equalTo(14671L)); - assertThat(geoData.get("organization_name"), equalTo("FairPoint Communications")); - assertThat(geoData.get("network"), equalTo("74.209.16.0/20")); - assertThat(geoData.get("hosting_provider"), equalTo(false)); - assertThat(geoData.get("tor_exit_node"), equalTo(false)); - assertThat(geoData.get("anonymous_vpn"), equalTo(false)); - assertThat(geoData.get("anonymous"), equalTo(false)); - assertThat(geoData.get("public_proxy"), equalTo(false)); - assertThat(geoData.get("residential_proxy"), equalTo(false)); - assertThat(geoData.get("domain"), equalTo("frpt.net")); - assertThat(geoData.get("isp"), equalTo("Fairpoint Communications")); - assertThat(geoData.get("isp_organization_name"), equalTo("Fairpoint Communications")); - assertThat(geoData.get("user_type"), equalTo("residential")); - assertThat(geoData.get("connection_type"), equalTo("Cable/DSL")); - assertThat(geoData.get("registered_country_in_european_union"), equalTo(false)); - assertThat(geoData.get("registered_country_iso_code"), equalTo("US")); - assertThat(geoData.get("registered_country_name"), equalTo("United States")); - } - - public void testIsp() throws Exception { - String ip = "149.101.100.1"; - GeoIpProcessor processor = new GeoIpProcessor( - GEOIP_TYPE, - randomAlphaOfLength(10), - null, - "source_field", - loader("GeoIP2-ISP-Test.mmdb"), - () -> true, - "target_field", - ipDataLookupAll(Database.Isp), - false, - false, - "filename" - ); - - Map document = new HashMap<>(); - document.put("source_field", ip); - IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); - processor.execute(ingestDocument); - - assertThat(ingestDocument.getSourceAndMetadata().get("source_field"), equalTo(ip)); - @SuppressWarnings("unchecked") - Map geoData = (Map) ingestDocument.getSourceAndMetadata().get("target_field"); - assertThat(geoData, notNullValue()); - assertThat(geoData.size(), equalTo(8)); - assertThat(geoData.get("ip"), equalTo(ip)); - assertThat(geoData.get("asn"), equalTo(6167L)); - assertThat(geoData.get("organization_name"), equalTo("CELLCO-PART")); - assertThat(geoData.get("network"), equalTo("149.101.100.0/28")); - assertThat(geoData.get("isp"), equalTo("Verizon Wireless")); - assertThat(geoData.get("isp_organization_name"), equalTo("Verizon Wireless")); - assertThat(geoData.get("mobile_network_code"), equalTo("004")); - assertThat(geoData.get("mobile_country_code"), equalTo("310")); - } - public void testAddressIsNotInTheDatabase() throws Exception { GeoIpProcessor processor = new GeoIpProcessor( GEOIP_TYPE, @@ -594,9 +163,9 @@ public void testAddressIsNotInTheDatabase() throws Exception { } /** - * Don't silently do DNS lookups or anything trappy on bogus data + * Tests that an exception in the IpDataLookup is propagated out of the GeoIpProcessor's execute method */ - public void testInvalid() { + public void testExceptionPropagates() { GeoIpProcessor processor = new GeoIpProcessor( GEOIP_TYPE, randomAlphaOfLength(10), diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/MaxmindIpDataLookupsTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/MaxmindIpDataLookupsTests.java new file mode 100644 index 0000000000000..aca6b3564abb3 --- /dev/null +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/MaxmindIpDataLookupsTests.java @@ -0,0 +1,303 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.ingest.geoip; + +import org.apache.lucene.util.Constants; +import org.elasticsearch.core.IOUtils; +import org.elasticsearch.test.ESTestCase; +import org.junit.After; +import org.junit.Before; + +import java.io.IOException; +import java.nio.file.Path; +import java.util.Map; + +import static java.util.Map.entry; +import static org.elasticsearch.ingest.geoip.GeoIpTestUtils.copyDatabase; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; + +public class MaxmindIpDataLookupsTests extends ESTestCase { + + // a temporary directory that mmdb files can be copied to and read from + private Path tmpDir; + + @Before + public void setup() { + tmpDir = createTempDir(); + } + + @After + public void cleanup() throws IOException { + IOUtils.rm(tmpDir); + } + + public void testCity() { + assumeFalse("https://github.com/elastic/elasticsearch/issues/114266", Constants.WINDOWS); + String databaseName = "GeoLite2-City.mmdb"; + String ip = "8.8.8.8"; + assertExpectedLookupResults( + databaseName, + ip, + new MaxmindIpDataLookups.City(Database.City.properties()), + Map.ofEntries( + entry("ip", ip), + entry("country_in_european_union", false), + entry("country_iso_code", "US"), + entry("country_name", "United States"), + entry("continent_code", "NA"), + entry("continent_name", "North America"), + entry("timezone", "America/Chicago"), + entry("location", Map.of("lat", 37.751d, "lon", -97.822d)), + entry("accuracy_radius", 1000), + entry("registered_country_in_european_union", false), + entry("registered_country_iso_code", "US"), + entry("registered_country_name", "United States") + ) + ); + } + + public void testCity_withIpV6() { + assumeFalse("https://github.com/elastic/elasticsearch/issues/114266", Constants.WINDOWS); + String databaseName = "GeoLite2-City.mmdb"; + String ip = "2602:306:33d3:8000::3257:9652"; + assertExpectedLookupResults( + databaseName, + ip, + new MaxmindIpDataLookups.City(Database.City.properties()), + Map.ofEntries( + entry("ip", ip), + entry("country_in_european_union", false), + entry("country_iso_code", "US"), + entry("country_name", "United States"), + entry("continent_code", "NA"), + entry("continent_name", "North America"), + entry("region_iso_code", "US-FL"), + entry("region_name", "Florida"), + entry("city_name", "Homestead"), + entry("postal_code", "33035"), + entry("timezone", "America/New_York"), + entry("location", Map.of("lat", 25.4573d, "lon", -80.4572d)), + entry("accuracy_radius", 50), + entry("registered_country_in_european_union", false), + entry("registered_country_iso_code", "US"), + entry("registered_country_name", "United States") + ) + ); + } + + public void testCityWithMissingLocation() { + assumeFalse("https://github.com/elastic/elasticsearch/issues/114266", Constants.WINDOWS); + String databaseName = "GeoLite2-City.mmdb"; + String ip = "80.231.5.0"; + assertExpectedLookupResults( + databaseName, + ip, + new MaxmindIpDataLookups.City(Database.City.properties()), + Map.ofEntries(entry("ip", ip)) + ); + } + + public void testCountry() { + assumeFalse("https://github.com/elastic/elasticsearch/issues/114266", Constants.WINDOWS); + String databaseName = "GeoLite2-Country.mmdb"; + String ip = "82.170.213.79"; + assertExpectedLookupResults( + databaseName, + ip, + new MaxmindIpDataLookups.Country(Database.Country.properties()), + Map.ofEntries( + entry("ip", ip), + entry("country_in_european_union", true), + entry("country_iso_code", "NL"), + entry("country_name", "Netherlands"), + entry("continent_code", "EU"), + entry("continent_name", "Europe"), + entry("registered_country_in_european_union", true), + entry("registered_country_iso_code", "NL"), + entry("registered_country_name", "Netherlands") + ) + ); + } + + /** + * Don't silently do DNS lookups or anything trappy on bogus data + */ + public void testInvalid() throws IOException { + assumeFalse("https://github.com/elastic/elasticsearch/issues/114266", Constants.WINDOWS); + String databaseName = "GeoLite2-Country.mmdb"; + String ip = "www.google.com"; + try (DatabaseReaderLazyLoader loader = loader(databaseName)) { + IpDataLookup lookup = new MaxmindIpDataLookups.Country(Database.Country.properties()); + IllegalArgumentException e = assertThrows(IllegalArgumentException.class, () -> lookup.getData(loader, ip)); + assertThat(e.getMessage(), containsString("not an IP string literal")); + } + } + + public void testCountryWithMissingLocation() { + assumeFalse("https://github.com/elastic/elasticsearch/issues/114266", Constants.WINDOWS); + String databaseName = "GeoLite2-Country.mmdb"; + String ip = "80.231.5.0"; + assertExpectedLookupResults( + databaseName, + ip, + new MaxmindIpDataLookups.Country(Database.Country.properties()), + Map.ofEntries(entry("ip", ip)) + ); + } + + public void testAsn() throws IOException { + assumeFalse("https://github.com/elastic/elasticsearch/issues/114266", Constants.WINDOWS); + String databaseName = "GeoLite2-ASN.mmdb"; + String ip = "82.171.64.0"; + assertExpectedLookupResults( + databaseName, + ip, + new MaxmindIpDataLookups.Asn(Database.Asn.properties()), + Map.ofEntries(entry("ip", ip), entry("organization_name", "KPN B.V."), entry("asn", 1136L), entry("network", "82.168.0.0/14")) + ); + } + + public void testAnonymousIp() { + assumeFalse("https://github.com/elastic/elasticsearch/issues/114266", Constants.WINDOWS); + String databaseName = "GeoIP2-Anonymous-IP-Test.mmdb"; + String ip = "81.2.69.1"; + assertExpectedLookupResults( + databaseName, + ip, + new MaxmindIpDataLookups.AnonymousIp(Database.AnonymousIp.properties()), + Map.ofEntries( + entry("ip", ip), + entry("hosting_provider", true), + entry("tor_exit_node", true), + entry("anonymous_vpn", true), + entry("anonymous", true), + entry("public_proxy", true), + entry("residential_proxy", true) + ) + ); + } + + public void testConnectionType() { + assumeFalse("https://github.com/elastic/elasticsearch/issues/114266", Constants.WINDOWS); + String databaseName = "GeoIP2-Connection-Type-Test.mmdb"; + String ip = "214.78.120.5"; + assertExpectedLookupResults( + databaseName, + ip, + new MaxmindIpDataLookups.ConnectionType(Database.ConnectionType.properties()), + Map.ofEntries(entry("ip", ip), entry("connection_type", "Satellite")) + ); + } + + public void testDomain() { + String databaseName = "GeoIP2-Domain-Test.mmdb"; + String ip = "69.219.64.2"; + assertExpectedLookupResults( + databaseName, + ip, + new MaxmindIpDataLookups.Domain(Database.Domain.properties()), + Map.ofEntries(entry("ip", ip), entry("domain", "ameritech.net")) + ); + } + + public void testEnterprise() { + assumeFalse("https://github.com/elastic/elasticsearch/issues/114266", Constants.WINDOWS); + String databaseName = "GeoIP2-Enterprise-Test.mmdb"; + String ip = "74.209.24.4"; + assertExpectedLookupResults( + databaseName, + ip, + new MaxmindIpDataLookups.Enterprise(Database.Enterprise.properties()), + Map.ofEntries( + entry("ip", ip), + entry("country_confidence", 99), + entry("country_in_european_union", false), + entry("country_iso_code", "US"), + entry("country_name", "United States"), + entry("continent_code", "NA"), + entry("continent_name", "North America"), + entry("region_iso_code", "US-NY"), + entry("region_name", "New York"), + entry("city_confidence", 11), + entry("city_name", "Chatham"), + entry("timezone", "America/New_York"), + entry("location", Map.of("lat", 42.3478, "lon", -73.5549)), + entry("accuracy_radius", 27), + entry("postal_code", "12037"), + entry("postal_confidence", 11), + entry("asn", 14671L), + entry("organization_name", "FairPoint Communications"), + entry("network", "74.209.16.0/20"), + entry("hosting_provider", false), + entry("tor_exit_node", false), + entry("anonymous_vpn", false), + entry("anonymous", false), + entry("public_proxy", false), + entry("residential_proxy", false), + entry("domain", "frpt.net"), + entry("isp", "Fairpoint Communications"), + entry("isp_organization_name", "Fairpoint Communications"), + entry("user_type", "residential"), + entry("connection_type", "Cable/DSL"), + entry("registered_country_in_european_union", false), + entry("registered_country_iso_code", "US"), + entry("registered_country_name", "United States") + ) + ); + } + + public void testIsp() { + assumeFalse("https://github.com/elastic/elasticsearch/issues/114266", Constants.WINDOWS); + String databaseName = "GeoIP2-ISP-Test.mmdb"; + String ip = "149.101.100.1"; + assertExpectedLookupResults( + databaseName, + ip, + new MaxmindIpDataLookups.Isp(Database.Isp.properties()), + Map.ofEntries( + entry("ip", ip), + entry("asn", 6167L), + entry("organization_name", "CELLCO-PART"), + entry("network", "149.101.100.0/28"), + entry("isp", "Verizon Wireless"), + entry("isp_organization_name", "Verizon Wireless"), + entry("mobile_network_code", "004"), + entry("mobile_country_code", "310") + ) + ); + } + + private void assertExpectedLookupResults(String databaseName, String ip, IpDataLookup lookup, Map expected) { + try (DatabaseReaderLazyLoader loader = loader(databaseName)) { + Map actual = lookup.getData(loader, ip); + assertThat( + "The set of keys in the result are not the same as the set of expected keys", + actual.keySet(), + containsInAnyOrder(expected.keySet().toArray(new String[0])) + ); + for (Map.Entry entry : expected.entrySet()) { + assertThat("Unexpected value for key [" + entry.getKey() + "]", actual.get(entry.getKey()), equalTo(entry.getValue())); + } + } catch (AssertionError e) { + fail(e, "Assert failed for database [%s] with address [%s]", databaseName, ip); + } catch (Exception e) { + fail(e, "Exception for database [%s] with address [%s]", databaseName, ip); + } + } + + private DatabaseReaderLazyLoader loader(final String databaseName) { + Path path = tmpDir.resolve(databaseName); + copyDatabase(databaseName, path); + final GeoIpCache cache = new GeoIpCache(1000); + return new DatabaseReaderLazyLoader(cache, path, null); + } +} From 6b714e28f36852c514f966cda31f3f2a19c6e871 Mon Sep 17 00:00:00 2001 From: Max Hniebergall <137079448+maxhniebergall@users.noreply.github.com> Date: Fri, 11 Oct 2024 17:39:01 -0400 Subject: [PATCH 025/449] [Inference API] Introduce Update API to change some aspects of existing inference endpoints (#114457) --- docs/changelog/114457.yaml | 6 + .../inference/EmptySecretSettings.java | 6 + .../inference/EmptyTaskSettings.java | 6 + .../inference/SecretSettings.java | 3 + .../elasticsearch/inference/TaskSettings.java | 5 + .../action/UpdateInferenceModelAction.java | 278 +++++++++++++++ .../xpack/core/ml/job/messages/Messages.java | 3 + .../xpack/core/ml/utils/ExceptionsHelper.java | 4 + .../inference/InferenceBaseRestTest.java | 20 ++ .../xpack/inference/InferenceCrudIT.java | 32 +- .../mock/AbstractTestInferenceService.java | 10 + .../integration/ModelRegistryIT.java | 9 + .../xpack/inference/InferencePlugin.java | 5 + .../TransportPutInferenceModelAction.java | 36 +- .../TransportUpdateInferenceModelAction.java | 328 ++++++++++++++++++ .../inference/registry/ModelRegistry.java | 150 ++++++++ .../xpack/inference/rest/Paths.java | 6 + .../rest/RestUpdateInferenceModelAction.java | 62 ++++ .../inference/services/ServiceUtils.java | 34 ++ ...babaCloudSearchCompletionTaskSettings.java | 8 + ...babaCloudSearchEmbeddingsTaskSettings.java | 7 + .../AlibabaCloudSearchRerankTaskSettings.java | 6 + .../AlibabaCloudSearchSparseTaskSettings.java | 7 + .../AmazonBedrockSecretSettings.java | 6 + ...azonBedrockChatCompletionTaskSettings.java | 9 + .../AnthropicChatCompletionTaskSettings.java | 6 + ...ureAiStudioChatCompletionTaskSettings.java | 22 ++ .../AzureAiStudioEmbeddingsTaskSettings.java | 9 + .../AzureOpenAiSecretSettings.java | 6 + .../AzureOpenAiCompletionTaskSettings.java | 9 + .../AzureOpenAiEmbeddingsTaskSettings.java | 9 + .../CohereEmbeddingsTaskSettings.java | 7 + .../rerank/CohereRerankTaskSettings.java | 6 + .../CustomElandRerankTaskSettings.java | 13 +- .../ElasticsearchInternalServiceSettings.java | 12 + .../ElserMlNodeTaskSettings.java | 6 + .../GoogleVertexAiSecretSettings.java | 8 +- .../GoogleVertexAiEmbeddingsTaskSettings.java | 9 + .../GoogleVertexAiRerankTaskSettings.java | 9 + .../OpenAiChatCompletionTaskSettings.java | 9 + .../OpenAiEmbeddingsTaskSettings.java | 7 + .../settings/DefaultSecretSettings.java | 6 + .../inference/EmptySecretSettingsTests.java | 10 + .../inference/EmptyTaskSettingsTests.java | 8 + .../xpack/inference/ModelSecretsTests.java | 6 + ...TransportPutInferenceModelActionTests.java | 20 +- .../xpack/inference/model/TestModel.java | 11 + ...loudSearchCompletionTaskSettingsTests.java | 23 +- ...loudSearchEmbeddingsTaskSettingsTests.java | 24 +- ...abaCloudSearchSparseTaskSettingsTests.java | 30 +- .../AmazonBedrockSecretSettingsTests.java | 11 + ...edrockChatCompletionTaskSettingsTests.java | 62 ++++ ...hropicChatCompletionTaskSettingsTests.java | 19 + ...StudioChatCompletionTaskSettingsTests.java | 25 ++ ...reAiStudioEmbeddingsTaskSettingsTests.java | 18 + .../AzureOpenAiSecretSettingsTests.java | 26 +- ...zureOpenAiCompletionTaskSettingsTests.java | 10 + ...zureOpenAiEmbeddingsTaskSettingsTests.java | 30 +- .../CohereEmbeddingsTaskSettingsTests.java | 26 ++ .../rerank/CohereRerankTaskSettingsTests.java | 154 ++++++++ .../CustomElandRerankTaskSettingsTests.java | 41 +-- .../GoogleVertexAiSecretSettingsTests.java | 9 + ...leVertexAiEmbeddingsTaskSettingsTests.java | 18 + ...GoogleVertexAiRerankTaskSettingsTests.java | 18 + ...OpenAiChatCompletionTaskSettingsTests.java | 18 + .../OpenAiEmbeddingsTaskSettingsTests.java | 21 +- .../settings/DefaultSecretSettingsTests.java | 9 + .../xpack/security/operator/Constants.java | 1 + 68 files changed, 1745 insertions(+), 102 deletions(-) create mode 100644 docs/changelog/114457.yaml create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/UpdateInferenceModelAction.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportUpdateInferenceModelAction.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestUpdateInferenceModelAction.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankTaskSettingsTests.java diff --git a/docs/changelog/114457.yaml b/docs/changelog/114457.yaml new file mode 100644 index 0000000000000..9558c41852f69 --- /dev/null +++ b/docs/changelog/114457.yaml @@ -0,0 +1,6 @@ +pr: 114457 +summary: "[Inference API] Introduce Update API to change some aspects of existing\ + \ inference endpoints" +area: Machine Learning +type: enhancement +issues: [] diff --git a/server/src/main/java/org/elasticsearch/inference/EmptySecretSettings.java b/server/src/main/java/org/elasticsearch/inference/EmptySecretSettings.java index 0e5b3a555b800..9c666bd4a35f5 100644 --- a/server/src/main/java/org/elasticsearch/inference/EmptySecretSettings.java +++ b/server/src/main/java/org/elasticsearch/inference/EmptySecretSettings.java @@ -16,6 +16,7 @@ import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; +import java.util.Map; /** * This class defines an empty secret settings object. This is useful for services that do not have any secret settings. @@ -48,4 +49,9 @@ public TransportVersion getMinimalSupportedVersion() { @Override public void writeTo(StreamOutput out) throws IOException {} + + @Override + public SecretSettings newSecretSettings(Map newSecrets) { + return INSTANCE; + } } diff --git a/server/src/main/java/org/elasticsearch/inference/EmptyTaskSettings.java b/server/src/main/java/org/elasticsearch/inference/EmptyTaskSettings.java index 0c863932c6afe..cba0282f7fed8 100644 --- a/server/src/main/java/org/elasticsearch/inference/EmptyTaskSettings.java +++ b/server/src/main/java/org/elasticsearch/inference/EmptyTaskSettings.java @@ -16,6 +16,7 @@ import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; +import java.util.Map; /** * This class defines an empty task settings object. This is useful for services that do not have any task settings. @@ -53,4 +54,9 @@ public TransportVersion getMinimalSupportedVersion() { @Override public void writeTo(StreamOutput out) throws IOException {} + + @Override + public TaskSettings updatedTaskSettings(Map newSettings) { + return INSTANCE; + } } diff --git a/server/src/main/java/org/elasticsearch/inference/SecretSettings.java b/server/src/main/java/org/elasticsearch/inference/SecretSettings.java index e2c0c8b58c69b..90ca92bb0e2ef 100644 --- a/server/src/main/java/org/elasticsearch/inference/SecretSettings.java +++ b/server/src/main/java/org/elasticsearch/inference/SecretSettings.java @@ -12,6 +12,9 @@ import org.elasticsearch.common.io.stream.VersionedNamedWriteable; import org.elasticsearch.xcontent.ToXContentObject; +import java.util.Map; + public interface SecretSettings extends ToXContentObject, VersionedNamedWriteable { + SecretSettings newSecretSettings(Map newSecrets); } diff --git a/server/src/main/java/org/elasticsearch/inference/TaskSettings.java b/server/src/main/java/org/elasticsearch/inference/TaskSettings.java index 9862abce2332c..7dd20688245ba 100644 --- a/server/src/main/java/org/elasticsearch/inference/TaskSettings.java +++ b/server/src/main/java/org/elasticsearch/inference/TaskSettings.java @@ -12,6 +12,11 @@ import org.elasticsearch.common.io.stream.VersionedNamedWriteable; import org.elasticsearch.xcontent.ToXContentObject; +import java.util.Map; + public interface TaskSettings extends ToXContentObject, VersionedNamedWriteable { + boolean isEmpty(); + + TaskSettings updatedTaskSettings(Map newSettings); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/UpdateInferenceModelAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/UpdateInferenceModelAction.java new file mode 100644 index 0000000000000..cc59ae890467b --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/UpdateInferenceModelAction.java @@ -0,0 +1,278 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.inference.action; + +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.support.master.AcknowledgedRequest; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.ml.job.messages.Messages; +import org.elasticsearch.xpack.core.ml.utils.MlStrings; + +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.inference.ModelConfigurations.SERVICE_SETTINGS; +import static org.elasticsearch.inference.ModelConfigurations.TASK_SETTINGS; + +public class UpdateInferenceModelAction extends ActionType { + + public static final UpdateInferenceModelAction INSTANCE = new UpdateInferenceModelAction(); + public static final String NAME = "cluster:admin/xpack/inference/update"; + + public UpdateInferenceModelAction() { + super(NAME); + } + + public record Settings( + @Nullable Map serviceSettings, + @Nullable Map taskSettings, + @Nullable TaskType taskType + ) {} + + public static class Request extends AcknowledgedRequest { + + private final String inferenceEntityId; + private final BytesReference content; + private final XContentType contentType; + private final TaskType taskType; + private Settings settings; + + public Request(String inferenceEntityId, BytesReference content, XContentType contentType, TaskType taskType, TimeValue timeout) { + super(timeout, DEFAULT_ACK_TIMEOUT); + this.inferenceEntityId = inferenceEntityId; + this.content = content; + this.contentType = contentType; + this.taskType = taskType; + } + + public Request(StreamInput in) throws IOException { + super(in); + this.inferenceEntityId = in.readString(); + this.content = in.readBytesReference(); + this.taskType = TaskType.fromStream(in); + this.contentType = in.readEnum(XContentType.class); + } + + public String getInferenceEntityId() { + return inferenceEntityId; + } + + public TaskType getTaskType() { + return taskType; + } + + /** + * The body of the request. + * For in-cluster models, this is expected to contain some of the following: + * "number_of_allocations": `an integer` + * + * For third-party services, this is expected to contain: + * "service_settings": { + * "api_key": `a string` // service settings can only contain an api key + * } + * "task_settings": { a map of settings } + * + */ + public BytesReference getContent() { + return content; + } + + /** + * The body of the request as a map. + * The map is validated such that only allowed fields are present. + * If any fields in the body are not on the allow list, this function will throw an exception. + */ + public Settings getContentAsSettings() { + if (settings == null) { // settings is deterministic on content, so we only need to compute it once + Map unvalidatedMap = XContentHelper.convertToMap(content, false, contentType).v2(); + Map serviceSettings = new HashMap<>(); + Map taskSettings = new HashMap<>(); + TaskType taskType = null; + + if (unvalidatedMap.isEmpty()) { + throw new ElasticsearchStatusException("Request body is empty", RestStatus.BAD_REQUEST); + } + + if (unvalidatedMap.containsKey("task_type")) { + if (unvalidatedMap.get("task_type") instanceof String taskTypeString) { + taskType = TaskType.fromStringOrStatusException(taskTypeString); + } else { + throw new ElasticsearchStatusException( + "Failed to parse [task_type] in update request [{}]", + RestStatus.INTERNAL_SERVER_ERROR, + unvalidatedMap.toString() + ); + } + unvalidatedMap.remove("task_type"); + } + + if (unvalidatedMap.containsKey(SERVICE_SETTINGS)) { + if (unvalidatedMap.get(SERVICE_SETTINGS) instanceof Map tempMap) { + for (Map.Entry entry : (tempMap).entrySet()) { + if (entry.getKey() instanceof String key && entry.getValue() instanceof Object value) { + serviceSettings.put(key, value); + } else { + throw new ElasticsearchStatusException( + "Failed to parse update request [{}]", + RestStatus.INTERNAL_SERVER_ERROR, + unvalidatedMap.toString() + ); + } + } + unvalidatedMap.remove(SERVICE_SETTINGS); + } else { + throw new ElasticsearchStatusException( + "Unable to parse service settings in the request [{}]", + RestStatus.BAD_REQUEST, + unvalidatedMap.toString() + ); + } + } + + if (unvalidatedMap.containsKey(TASK_SETTINGS)) { + if (unvalidatedMap.get(TASK_SETTINGS) instanceof Map tempMap) { + for (Map.Entry entry : (tempMap).entrySet()) { + if (entry.getKey() instanceof String key && entry.getValue() instanceof Object value) { + taskSettings.put(key, value); + } else { + throw new ElasticsearchStatusException( + "Failed to parse update request [{}]", + RestStatus.INTERNAL_SERVER_ERROR, + unvalidatedMap.toString() + ); + } + } + unvalidatedMap.remove(TASK_SETTINGS); + } else { + throw new ElasticsearchStatusException( + "Unable to parse task settings in the request [{}]", + RestStatus.BAD_REQUEST, + unvalidatedMap.toString() + ); + } + } + + if (unvalidatedMap.isEmpty() == false) { + throw new ElasticsearchStatusException( + "Request contained fields which cannot be updated, remove these fields and try again [{}]", + RestStatus.BAD_REQUEST, + unvalidatedMap.toString() + ); + } + + this.settings = new Settings( + serviceSettings.isEmpty() == false ? Collections.unmodifiableMap(serviceSettings) : null, + taskSettings.isEmpty() == false ? Collections.unmodifiableMap(taskSettings) : null, + taskType + ); + } + return this.settings; + } + + public XContentType getContentType() { + return contentType; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(inferenceEntityId); + taskType.writeTo(out); + out.writeBytesReference(content); + XContentHelper.writeTo(out, contentType); + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = new ActionRequestValidationException(); + if (MlStrings.isValidId(this.inferenceEntityId) == false) { + validationException.addValidationError(Messages.getMessage(Messages.INVALID_ID, "inference_id", this.inferenceEntityId)); + } + + if (validationException.validationErrors().isEmpty() == false) { + return validationException; + } else { + return null; + } + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return Objects.equals(inferenceEntityId, request.inferenceEntityId) + && Objects.equals(content, request.content) + && contentType == request.contentType + && taskType == request.taskType; + } + + @Override + public int hashCode() { + return Objects.hash(inferenceEntityId, content, contentType, taskType); + } + } + + public static class Response extends ActionResponse implements ToXContentObject { + + private final ModelConfigurations model; + + public Response(ModelConfigurations model) { + this.model = model; + } + + public Response(StreamInput in) throws IOException { + super(in); + model = new ModelConfigurations(in); + } + + public ModelConfigurations getModel() { + return model; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + model.writeTo(out); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return model.toFilteredXContent(builder, params); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Response response = (Response) o; + return Objects.equals(model, response.model); + } + + @Override + public int hashCode() { + return Objects.hash(model); + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java index 6ebed55451ae7..9f9def6a0678d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java @@ -281,6 +281,9 @@ public final class Messages { public static final String FIELD_CANNOT_BE_NULL = "Field [{0}] cannot be null"; public static final String MODEL_ID_MATCHES_EXISTING_MODEL_IDS_BUT_MUST_NOT = "Model IDs must be unique. Requested model ID [{}] matches existing model IDs but must not."; + public static final String MODEL_ID_DOES_NOT_MATCH_EXISTING_MODEL_IDS_BUT_MUST_FOR_IN_CLUSTER_SERVICE = + "Requested model ID [{}] does not have a matching trained model and thus cannot be updated."; + public static final String INFERENCE_ENTITY_NON_EXISTANT_NO_UPDATE = "The inference endpoint [{}] does not exist and cannot be updated"; private Messages() {} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/ExceptionsHelper.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/ExceptionsHelper.java index fb75d95aeed1b..73e3c31297fbf 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/ExceptionsHelper.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/ExceptionsHelper.java @@ -93,6 +93,10 @@ public static ElasticsearchStatusException badRequestException(String msg, Objec return new ElasticsearchStatusException(msg, RestStatus.BAD_REQUEST, args); } + public static ElasticsearchStatusException entityNotFoundException(String msg, Object... args) { + return new ElasticsearchStatusException(msg, RestStatus.NOT_FOUND, args); + } + public static ElasticsearchStatusException taskOperationFailureToStatusException(TaskOperationFailure failure) { return new ElasticsearchStatusException(failure.getCause().getMessage(), failure.getStatus(), failure.getCause()); } diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java index f82b6f155c0a0..3ca6b45c2948e 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java @@ -81,6 +81,21 @@ static String mockSparseServiceModelConfig(@Nullable TaskType taskTypeInBody) { """, taskType); } + static String updateConfig(@Nullable TaskType taskTypeInBody, String apiKey, int temperature) { + var taskType = taskTypeInBody == null ? "" : "\"task_type\": \"" + taskTypeInBody + "\","; + return Strings.format(""" + { + %s + "service_settings": { + "api_key": "%s" + }, + "task_settings": { + "temperature": %d + } + } + """, taskType, apiKey, temperature); + } + static String mockCompletionServiceModelConfig(@Nullable TaskType taskTypeInBody) { var taskType = taskTypeInBody == null ? "" : "\"task_type\": \"" + taskTypeInBody + "\","; return Strings.format(""" @@ -196,6 +211,11 @@ protected Map putModel(String modelId, String modelConfig, TaskT return putRequest(endpoint, modelConfig); } + protected Map updateEndpoint(String inferenceID, String modelConfig, TaskType taskType) throws IOException { + String endpoint = Strings.format("_inference/%s/%s/_update", taskType, inferenceID); + return putRequest(endpoint, modelConfig); + } + protected Map putPipeline(String pipelineId, String modelId) throws IOException { String endpoint = Strings.format("_ingest/pipeline/%s", pipelineId); String body = """ diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java index 5a84fd8985504..98c8d43707219 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java @@ -16,6 +16,8 @@ import java.io.IOException; import java.util.List; +import java.util.Map; +import java.util.Objects; import java.util.Set; import java.util.function.Function; import java.util.stream.IntStream; @@ -29,7 +31,7 @@ public class InferenceCrudIT extends InferenceBaseRestTest { @SuppressWarnings("unchecked") - public void testGet() throws IOException { + public void testCRUD() throws IOException { for (int i = 0; i < 5; i++) { putModel("se_model_" + i, mockSparseServiceModelConfig(), TaskType.SPARSE_EMBEDDING); } @@ -53,11 +55,29 @@ public void testGet() throws IOException { for (var denseModel : getDenseModels) { assertEquals("text_embedding", denseModel.get("task_type")); } - - var singleModel = getModels("se_model_1", TaskType.SPARSE_EMBEDDING); - assertThat(singleModel, hasSize(1)); - assertEquals("se_model_1", singleModel.get(0).get("inference_id")); - + String oldApiKey; + { + var singleModel = getModels("se_model_1", TaskType.SPARSE_EMBEDDING); + assertThat(singleModel, hasSize(1)); + assertEquals("se_model_1", singleModel.get(0).get("inference_id")); + oldApiKey = (String) singleModel.get(0).get("api_key"); + } + var newApiKey = randomAlphaOfLength(10); + int temperature = randomIntBetween(1, 10); + Map updatedEndpoint = updateEndpoint( + "se_model_1", + updateConfig(TaskType.SPARSE_EMBEDDING, newApiKey, temperature), + TaskType.SPARSE_EMBEDDING + ); + Map updatedTaskSettings = (Map) updatedEndpoint.get("task_settings"); + assertEquals(temperature, updatedTaskSettings.get("temperature")); + { + var singleModel = getModels("se_model_1", TaskType.SPARSE_EMBEDDING); + assertThat(singleModel, hasSize(1)); + assertEquals("se_model_1", singleModel.get(0).get("inference_id")); + assertNotEquals(oldApiKey, newApiKey); + assertEquals(updatedEndpoint, singleModel.get(0)); + } for (int i = 0; i < 5; i++) { deleteModel("se_model_" + i, TaskType.SPARSE_EMBEDDING); } diff --git a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/AbstractTestInferenceService.java b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/AbstractTestInferenceService.java index 02dfff1b5c2e6..6496bcdd89f21 100644 --- a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/AbstractTestInferenceService.java +++ b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/AbstractTestInferenceService.java @@ -163,6 +163,11 @@ public String getWriteableName() { public TransportVersion getMinimalSupportedVersion() { return TransportVersion.current(); // fine for these tests but will not work for cluster upgrade tests } + + @Override + public TaskSettings updatedTaskSettings(Map newSettings) { + return fromMap(new HashMap<>(newSettings)); + } } public record TestSecretSettings(String apiKey) implements SecretSettings { @@ -211,5 +216,10 @@ public String getWriteableName() { public TransportVersion getMinimalSupportedVersion() { return TransportVersion.current(); // fine for these tests but will not work for cluster upgrade tests } + + @Override + public SecretSettings newSecretSettings(Map newSecrets) { + return TestSecretSettings.fromMap(new HashMap<>(newSecrets)); + } } } diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java index 1a2f0fb6a1137..a76c4303268e4 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java @@ -600,6 +600,10 @@ public void writeTo(StreamOutput out) throws IOException { public boolean isEmpty() { return true; } + + public TaskSettings updatedTaskSettings(Map newSettings) { + return this; + } } record TestSecretSettings(String key) implements SecretSettings { @@ -625,6 +629,11 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.endObject(); return builder; } + + @Override + public SecretSettings newSecretSettings(Map newSecrets) { + return new TestSecretSettings(newSecrets.get("secret").toString()); + } } TestModelOfAnyKind(String inferenceEntityId, TaskType taskType, String service) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java index 927fd94809886..d251120980e0b 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java @@ -47,12 +47,14 @@ import org.elasticsearch.xpack.core.inference.action.GetInferenceModelAction; import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.core.inference.action.PutInferenceModelAction; +import org.elasticsearch.xpack.core.inference.action.UpdateInferenceModelAction; import org.elasticsearch.xpack.inference.action.TransportDeleteInferenceEndpointAction; import org.elasticsearch.xpack.inference.action.TransportGetInferenceDiagnosticsAction; import org.elasticsearch.xpack.inference.action.TransportGetInferenceModelAction; import org.elasticsearch.xpack.inference.action.TransportInferenceAction; import org.elasticsearch.xpack.inference.action.TransportInferenceUsageAction; import org.elasticsearch.xpack.inference.action.TransportPutInferenceModelAction; +import org.elasticsearch.xpack.inference.action.TransportUpdateInferenceModelAction; import org.elasticsearch.xpack.inference.action.filter.ShardBulkInferenceActionFilter; import org.elasticsearch.xpack.inference.common.Truncator; import org.elasticsearch.xpack.inference.external.amazonbedrock.AmazonBedrockRequestSender; @@ -76,6 +78,7 @@ import org.elasticsearch.xpack.inference.rest.RestInferenceAction; import org.elasticsearch.xpack.inference.rest.RestPutInferenceModelAction; import org.elasticsearch.xpack.inference.rest.RestStreamInferenceAction; +import org.elasticsearch.xpack.inference.rest.RestUpdateInferenceModelAction; import org.elasticsearch.xpack.inference.services.ServiceComponents; import org.elasticsearch.xpack.inference.services.alibabacloudsearch.AlibabaCloudSearchService; import org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockService; @@ -149,6 +152,7 @@ public InferencePlugin(Settings settings) { new ActionHandler<>(InferenceAction.INSTANCE, TransportInferenceAction.class), new ActionHandler<>(GetInferenceModelAction.INSTANCE, TransportGetInferenceModelAction.class), new ActionHandler<>(PutInferenceModelAction.INSTANCE, TransportPutInferenceModelAction.class), + new ActionHandler<>(UpdateInferenceModelAction.INSTANCE, TransportUpdateInferenceModelAction.class), new ActionHandler<>(DeleteInferenceEndpointAction.INSTANCE, TransportDeleteInferenceEndpointAction.class), new ActionHandler<>(XPackUsageFeatureAction.INFERENCE, TransportInferenceUsageAction.class), new ActionHandler<>(GetInferenceDiagnosticsAction.INSTANCE, TransportGetInferenceDiagnosticsAction.class) @@ -172,6 +176,7 @@ public List getRestHandlers( new RestStreamInferenceAction(), new RestGetInferenceModelAction(), new RestPutInferenceModelAction(), + new RestUpdateInferenceModelAction(), new RestDeleteInferenceEndpointAction(), new RestGetInferenceDiagnosticsAction() ); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java index 49d65b6e0dc59..64eeed82ee1b9 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java @@ -41,6 +41,7 @@ import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.inference.InferencePlugin; import org.elasticsearch.xpack.inference.registry.ModelRegistry; +import org.elasticsearch.xpack.inference.services.ServiceUtils; import org.elasticsearch.xpack.inference.services.elasticsearch.ElasticsearchInternalService; import java.io.IOException; @@ -100,7 +101,7 @@ protected void masterOperation( ActionListener listener ) throws Exception { var requestAsMap = requestToMap(request); - var resolvedTaskType = resolveTaskType(request.getTaskType(), (String) requestAsMap.remove(TaskType.NAME)); + var resolvedTaskType = ServiceUtils.resolveTaskType(request.getTaskType(), (String) requestAsMap.remove(TaskType.NAME)); String serviceName = (String) requestAsMap.remove(ModelConfigurations.SERVICE); if (serviceName == null) { @@ -227,37 +228,4 @@ protected ClusterBlockException checkBlock(PutInferenceModelAction.Request reque return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); } - /** - * task_type can be specified as either a URL parameter or in the - * request body. Resolve which to use or throw if the settings are - * inconsistent - * @param urlTaskType Taken from the URL parameter. ANY means not specified. - * @param bodyTaskType Taken from the request body. Maybe null - * @return The resolved task type - */ - static TaskType resolveTaskType(TaskType urlTaskType, String bodyTaskType) { - if (bodyTaskType == null) { - if (urlTaskType == TaskType.ANY) { - throw new ElasticsearchStatusException("model is missing required setting [task_type]", RestStatus.BAD_REQUEST); - } else { - return urlTaskType; - } - } - - TaskType parsedBodyTask = TaskType.fromStringOrStatusException(bodyTaskType); - if (parsedBodyTask == TaskType.ANY) { - throw new ElasticsearchStatusException("task_type [any] is not valid type for inference", RestStatus.BAD_REQUEST); - } - - if (parsedBodyTask.isAnyOrSame(urlTaskType) == false) { - throw new ElasticsearchStatusException( - "Cannot resolve conflicting task_type parameter in the request URL [{}] and the request body [{}]", - RestStatus.BAD_REQUEST, - urlTaskType.toString(), - bodyTaskType - ); - } - - return parsedBodyTask; - } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportUpdateInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportUpdateInferenceModelAction.java new file mode 100644 index 0000000000000..03a88e5228fa8 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportUpdateInferenceModelAction.java @@ -0,0 +1,328 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.action; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.SubscribableListener; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.inference.InferenceService; +import org.elasticsearch.inference.InferenceServiceRegistry; +import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ModelSecrets; +import org.elasticsearch.inference.SecretSettings; +import org.elasticsearch.inference.ServiceSettings; +import org.elasticsearch.inference.TaskSettings; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.inference.UnparsedModel; +import org.elasticsearch.injection.guice.Inject; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xpack.core.inference.action.UpdateInferenceModelAction; +import org.elasticsearch.xpack.core.ml.action.CreateTrainedModelAssignmentAction; +import org.elasticsearch.xpack.core.ml.action.UpdateTrainedModelDeploymentAction; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentUtils; +import org.elasticsearch.xpack.core.ml.job.messages.Messages; +import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; +import org.elasticsearch.xpack.inference.registry.ModelRegistry; +import org.elasticsearch.xpack.inference.services.elasticsearch.ElasticsearchInternalService; +import org.elasticsearch.xpack.inference.services.elasticsearch.ElasticsearchInternalServiceSettings; + +import java.io.IOException; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.concurrent.atomic.AtomicReference; + +import static org.elasticsearch.xpack.inference.services.ServiceUtils.resolveTaskType; +import static org.elasticsearch.xpack.inference.services.elasticsearch.ElasticsearchInternalServiceSettings.NUM_ALLOCATIONS; + +public class TransportUpdateInferenceModelAction extends TransportMasterNodeAction< + UpdateInferenceModelAction.Request, + UpdateInferenceModelAction.Response> { + + private static final Logger logger = LogManager.getLogger(TransportUpdateInferenceModelAction.class); + + private final ModelRegistry modelRegistry; + private final InferenceServiceRegistry serviceRegistry; + private final Client client; + + @Inject + public TransportUpdateInferenceModelAction( + TransportService transportService, + ClusterService clusterService, + ThreadPool threadPool, + ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, + ModelRegistry modelRegistry, + InferenceServiceRegistry serviceRegistry, + Client client, + Settings settings + ) { + super( + UpdateInferenceModelAction.NAME, + transportService, + clusterService, + threadPool, + actionFilters, + UpdateInferenceModelAction.Request::new, + indexNameExpressionResolver, + UpdateInferenceModelAction.Response::new, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + this.modelRegistry = modelRegistry; + this.serviceRegistry = serviceRegistry; + this.client = client; + } + + @Override + protected void masterOperation( + Task task, + UpdateInferenceModelAction.Request request, + ClusterState state, + ActionListener masterListener + ) { + var bodyTaskType = request.getContentAsSettings().taskType(); + var resolvedTaskType = resolveTaskType(request.getTaskType(), bodyTaskType != null ? bodyTaskType.toString() : null); + + AtomicReference service = new AtomicReference<>(); + + var inferenceEntityId = request.getInferenceEntityId(); + + SubscribableListener.newForked(listener -> { checkEndpointExists(inferenceEntityId, listener); }) + .andThen((listener, unparsedModel) -> { + + Optional optionalService = serviceRegistry.getService(unparsedModel.service()); + if (optionalService.isEmpty()) { + listener.onFailure( + new ElasticsearchStatusException( + "Service [{}] not found", + RestStatus.INTERNAL_SERVER_ERROR, + unparsedModel.service() + ) + ); + } else { + service.set(optionalService.get()); + listener.onResponse(unparsedModel); + } + }) + .andThen((listener, existingUnparsedModel) -> { + + Model existingParsedModel = service.get() + .parsePersistedConfigWithSecrets( + request.getInferenceEntityId(), + existingUnparsedModel.taskType(), + new HashMap<>(existingUnparsedModel.settings()), + new HashMap<>(existingUnparsedModel.secrets()) + ); + + Model newModel = combineExistingModelWithNewSettings( + existingParsedModel, + request.getContentAsSettings(), + service.get().name(), + resolvedTaskType + ); + + if (isInClusterService(service.get().name())) { + updateInClusterEndpoint(request, newModel, existingParsedModel, listener); + } else { + modelRegistry.updateModelTransaction(newModel, existingParsedModel, listener); + } + }) + .andThen((listener, didUpdate) -> { + if (didUpdate) { + modelRegistry.getModel(inferenceEntityId, ActionListener.wrap((unparsedModel) -> { + if (unparsedModel == null) { + listener.onFailure( + new ElasticsearchStatusException( + "Failed to update model, updated model not found", + RestStatus.INTERNAL_SERVER_ERROR + ) + ); + } else { + listener.onResponse( + service.get() + .parsePersistedConfig( + request.getInferenceEntityId(), + resolvedTaskType, + new HashMap<>(unparsedModel.settings()) + ) + .getConfigurations() + ); + } + }, listener::onFailure)); + } else { + listener.onFailure(new ElasticsearchStatusException("Failed to update model", RestStatus.INTERNAL_SERVER_ERROR)); + } + + }).andThen((listener, modelConfig) -> { + listener.onResponse(new UpdateInferenceModelAction.Response(modelConfig)); + }) + .addListener(masterListener); + } + + /** + * Combines the existing model with the new settings to create a new model using the + * SecretSettings and TaskSettings implementations for each service, as well as specifically handling NUM_ALLOCATIONS. + * + * @param existingParsedModel the Model representing a third-party service endpoint + * @param settingsToUpdate new settings + * @param serviceName + * @return a new object representing the updated model + */ + private Model combineExistingModelWithNewSettings( + Model existingParsedModel, + UpdateInferenceModelAction.Settings settingsToUpdate, + String serviceName, + TaskType resolvedTaskType + ) { + ModelConfigurations existingConfigs = existingParsedModel.getConfigurations(); + TaskSettings existingTaskSettings = existingConfigs.getTaskSettings(); + SecretSettings existingSecretSettings = existingParsedModel.getSecretSettings(); + + SecretSettings newSecretSettings = existingSecretSettings; + TaskSettings newTaskSettings = existingTaskSettings; + ServiceSettings newServiceSettings = existingConfigs.getServiceSettings(); + + if (settingsToUpdate.serviceSettings() != null && existingSecretSettings != null) { + newSecretSettings = existingSecretSettings.newSecretSettings(settingsToUpdate.serviceSettings()); + } + if (settingsToUpdate.serviceSettings() != null && settingsToUpdate.serviceSettings().containsKey(NUM_ALLOCATIONS)) { + // In cluster services can only have their num_allocations updated, so this is a special case + if (newServiceSettings instanceof ElasticsearchInternalServiceSettings elasticServiceSettings) { + newServiceSettings = new ElasticsearchInternalServiceSettings( + elasticServiceSettings, + (Integer) settingsToUpdate.serviceSettings().get(NUM_ALLOCATIONS) + ); + } + } + if (settingsToUpdate.taskSettings() != null && existingTaskSettings != null) { + newTaskSettings = existingTaskSettings.updatedTaskSettings(settingsToUpdate.taskSettings()); + } + + if (existingParsedModel.getTaskType().equals(resolvedTaskType) == false) { + throw new ElasticsearchStatusException("Task type must match the task type of the existing endpoint", RestStatus.BAD_REQUEST); + } + + ModelConfigurations newModelConfigs = new ModelConfigurations( + existingParsedModel.getInferenceEntityId(), + existingParsedModel.getTaskType(), + serviceName, + newServiceSettings, + newTaskSettings + ); + + return new Model(newModelConfigs, new ModelSecrets(newSecretSettings)); + } + + private void updateInClusterEndpoint( + UpdateInferenceModelAction.Request request, + Model newModel, + Model existingParsedModel, + ActionListener listener + ) throws IOException { + // The model we are trying to update must have a trained model associated with it if it is an in-cluster deployment + throwIfTrainedModelDoesntExist(request); + + Map serviceSettings = request.getContentAsSettings().serviceSettings(); + if (serviceSettings != null && serviceSettings.get(NUM_ALLOCATIONS) instanceof Integer numAllocations) { + + UpdateTrainedModelDeploymentAction.Request updateRequest = new UpdateTrainedModelDeploymentAction.Request( + request.getInferenceEntityId() + ); + updateRequest.setNumberOfAllocations(numAllocations); + + var delegate = listener.delegateFailure((l2, response) -> { + modelRegistry.updateModelTransaction(newModel, existingParsedModel, l2); + }); + + logger.info( + "Updating trained model deployment for inference entity [{}] with [{}] num_allocations", + request.getInferenceEntityId(), + numAllocations + ); + client.execute(UpdateTrainedModelDeploymentAction.INSTANCE, updateRequest, delegate); + + } else { + listener.onFailure( + new ElasticsearchStatusException( + "Failed to parse [{}] of update request [{}]", + RestStatus.BAD_REQUEST, + NUM_ALLOCATIONS, + request.getContent().utf8ToString() + ) + ); + } + + } + + private boolean isInClusterService(String name) { + return List.of(ElasticsearchInternalService.NAME, ElasticsearchInternalService.OLD_ELSER_SERVICE_NAME).contains(name); + } + + private void throwIfTrainedModelDoesntExist(UpdateInferenceModelAction.Request request) throws ElasticsearchStatusException { + var assignments = TrainedModelAssignmentUtils.modelAssignments(request.getInferenceEntityId(), clusterService.state()); + if ((assignments == null || assignments.isEmpty())) { + throw ExceptionsHelper.entityNotFoundException( + Messages.MODEL_ID_DOES_NOT_MATCH_EXISTING_MODEL_IDS_BUT_MUST_FOR_IN_CLUSTER_SERVICE, + request.getInferenceEntityId() + + ); + } + } + + private void checkEndpointExists(String inferenceEntityId, ActionListener listener) { + modelRegistry.getModelWithSecrets(inferenceEntityId, ActionListener.wrap((model) -> { + if (model == null) { + listener.onFailure( + ExceptionsHelper.entityNotFoundException(Messages.INFERENCE_ENTITY_NON_EXISTANT_NO_UPDATE, inferenceEntityId) + ); + } else { + listener.onResponse(model); + } + }, e -> { + if (e instanceof ResourceNotFoundException) { + listener.onFailure( + // provide a more specific error message if the inference entity does not exist + ExceptionsHelper.entityNotFoundException(Messages.INFERENCE_ENTITY_NON_EXISTANT_NO_UPDATE, inferenceEntityId) + ); + } else { + listener.onFailure(e); + } + })); + } + + private static XContentParser getParser(UpdateInferenceModelAction.Request request) throws IOException { + return XContentHelper.createParser(XContentParserConfiguration.EMPTY, request.getContent(), request.getContentType()); + } + + @Override + protected ClusterBlockException checkBlock(UpdateInferenceModelAction.Request request, ClusterState state) { + return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); + } + +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java index d756c0ef26f14..62571c13aebf4 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java @@ -3,6 +3,8 @@ * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. + * + * this file contains code contributed by a generative AI */ package org.elasticsearch.xpack.inference.registry; @@ -21,6 +23,7 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.OriginSettingClient; @@ -49,10 +52,13 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; import java.util.function.Function; import java.util.stream.Collectors; @@ -83,6 +89,8 @@ public static UnparsedModel unparsedModelFromMap(ModelConfigMap modelConfigMap) private final OriginSettingClient client; private Map defaultConfigs; + private final Set preventDeletionLock = Collections.newSetFromMap(new ConcurrentHashMap<>()); + public ModelRegistry(Client client) { this.client = new OriginSettingClient(client, ClientHelper.INFERENCE_ORIGIN); this.defaultConfigs = new HashMap<>(); @@ -306,7 +314,139 @@ private ModelConfigMap createModelConfigMap(SearchHits hits, String inferenceEnt ); } + public void updateModelTransaction(Model newModel, Model existingModel, ActionListener finalListener) { + + String inferenceEntityId = newModel.getConfigurations().getInferenceEntityId(); + logger.info("Attempting to store update to inference endpoint [{}]", inferenceEntityId); + + if (preventDeletionLock.contains(inferenceEntityId)) { + logger.warn(format("Attempted to update endpoint [{}] that is already being updated", inferenceEntityId)); + finalListener.onFailure( + new ElasticsearchStatusException( + "Endpoint [{}] is currently being updated. Try again once the update completes", + RestStatus.CONFLICT, + inferenceEntityId + ) + ); + return; + } else { + preventDeletionLock.add(inferenceEntityId); + } + + SubscribableListener.newForked((subListener) -> { + // in this block, we try to update the stored model configurations + IndexRequest configRequest = createIndexRequest( + Model.documentId(inferenceEntityId), + InferenceIndex.INDEX_NAME, + newModel.getConfigurations(), + true + ); + + ActionListener storeConfigListener = subListener.delegateResponse((l, e) -> { + // this block will only be called if the bulk unexpectedly throws an exception + preventDeletionLock.remove(inferenceEntityId); + l.onFailure(e); + }); + + client.prepareBulk().add(configRequest).setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).execute(storeConfigListener); + + }).andThen((subListener, configResponse) -> { + // in this block, we respond to the success or failure of updating the model configurations, then try to store the new secrets + if (configResponse.hasFailures()) { + // if storing the model configurations failed, it won't throw an exception, we need to check the BulkResponse and handle the + // exceptions ourselves. + logger.error( + format("Failed to update inference endpoint [%s] due to [%s]", inferenceEntityId, configResponse.buildFailureMessage()) + ); + // Since none of our updates succeeded at this point, we can simply return. + finalListener.onFailure( + new ElasticsearchStatusException( + format("Failed to update inference endpoint [%s] due to [%s]", inferenceEntityId), + RestStatus.INTERNAL_SERVER_ERROR, + configResponse.buildFailureMessage() + ) + ); + } else { + // Since the model configurations were successfully updated, we can now try to store the new secrets + IndexRequest secretsRequest = createIndexRequest( + Model.documentId(newModel.getConfigurations().getInferenceEntityId()), + InferenceSecretsIndex.INDEX_NAME, + newModel.getSecrets(), + true + ); + + ActionListener storeSecretsListener = subListener.delegateResponse((l, e) -> { + // this block will only be called if the bulk unexpectedly throws an exception + preventDeletionLock.remove(inferenceEntityId); + l.onFailure(e); + }); + + client.prepareBulk() + .add(secretsRequest) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .execute(storeSecretsListener); + } + }).andThen((subListener, secretsResponse) -> { + // in this block, we respond to the success or failure of updating the model secrets + if (secretsResponse.hasFailures()) { + // since storing the secrets failed, we will try to restore / roll-back-to the previous model configurations + IndexRequest configRequest = createIndexRequest( + Model.documentId(inferenceEntityId), + InferenceIndex.INDEX_NAME, + existingModel.getConfigurations(), + true + ); + logger.error( + "Failed to update inference endpoint secrets [{}], attempting rolling back to previous state", + inferenceEntityId + ); + + ActionListener rollbackConfigListener = subListener.delegateResponse((l, e) -> { + // this block will only be called if the bulk unexpectedly throws an exception + preventDeletionLock.remove(inferenceEntityId); + l.onFailure(e); + }); + client.prepareBulk() + .add(configRequest) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .execute(rollbackConfigListener); + } else { + // since updating the secrets was successful, we can remove the lock and respond to the final listener + preventDeletionLock.remove(inferenceEntityId); + finalListener.onResponse(true); + } + }).andThen((subListener, configResponse) -> { + // this block will be called if the secrets response failed, and the rollback didn't throw an exception. + // The rollback still could have failed though, so we need to check for that. + preventDeletionLock.remove(inferenceEntityId); + if (configResponse.hasFailures()) { + logger.error( + format("Failed to update inference endpoint [%s] due to [%s]", inferenceEntityId, configResponse.buildFailureMessage()) + ); + finalListener.onFailure( + new ElasticsearchStatusException( + format( + "Failed to rollback while handling failure to update inference endpoint [%s]. " + + "Endpoint may be in an inconsistent state due to [%s]", + inferenceEntityId + ), + RestStatus.INTERNAL_SERVER_ERROR, + configResponse.buildFailureMessage() + ) + ); + } else { + logger.warn("Failed to update inference endpoint [{}], successfully rolled back to previous state", inferenceEntityId); + finalListener.onResponse(false); + } + }); + + } + + /** + * Note: storeModel does not overwrite existing models and thus does not need to check the lock + */ public void storeModel(Model model, ActionListener listener) { + ActionListener bulkResponseActionListener = getStoreModelListener(model, listener); IndexRequest configRequest = createIndexRequest( @@ -405,6 +545,16 @@ private static BulkItemResponse.Failure getFirstBulkFailure(BulkResponse bulkRes } public void deleteModel(String inferenceEntityId, ActionListener listener) { + if (preventDeletionLock.contains(inferenceEntityId)) { + listener.onFailure( + new ElasticsearchStatusException( + "Model is currently being updated, you may delete the model once the update completes", + RestStatus.CONFLICT + ) + ); + return; + } + DeleteByQueryRequest request = new DeleteByQueryRequest().setAbortOnVersionConflict(false); request.indices(InferenceIndex.INDEX_PATTERN, InferenceSecretsIndex.INDEX_PATTERN); request.setQuery(documentIdQuery(inferenceEntityId)); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/Paths.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/Paths.java index 9f64b58e48b55..2dec72e6692a6 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/Paths.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/Paths.java @@ -14,6 +14,12 @@ public final class Paths { static final String INFERENCE_ID_PATH = "_inference/{" + TASK_TYPE_OR_INFERENCE_ID + "}"; static final String TASK_TYPE_INFERENCE_ID_PATH = "_inference/{" + TASK_TYPE_OR_INFERENCE_ID + "}/{" + INFERENCE_ID + "}"; static final String INFERENCE_DIAGNOSTICS_PATH = "_inference/.diagnostics"; + static final String TASK_TYPE_INFERENCE_ID_UPDATE_PATH = "_inference/{" + + TASK_TYPE_OR_INFERENCE_ID + + "}/{" + + INFERENCE_ID + + "}/_update"; + static final String INFERENCE_ID_UPDATE_PATH = "_inference/{" + TASK_TYPE_OR_INFERENCE_ID + "}/_update"; static final String STREAM_INFERENCE_ID_PATH = "_inference/{" + TASK_TYPE_OR_INFERENCE_ID + "}/_stream"; static final String STREAM_TASK_TYPE_INFERENCE_ID_PATH = "_inference/{" diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestUpdateInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestUpdateInferenceModelAction.java new file mode 100644 index 0000000000000..9405a6752538c --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestUpdateInferenceModelAction.java @@ -0,0 +1,62 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.rest; + +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.rest.RestUtils; +import org.elasticsearch.rest.Scope; +import org.elasticsearch.rest.ServerlessScope; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.core.inference.action.UpdateInferenceModelAction; + +import java.util.List; + +import static org.elasticsearch.rest.RestRequest.Method.PUT; +import static org.elasticsearch.xpack.inference.rest.Paths.INFERENCE_ID; +import static org.elasticsearch.xpack.inference.rest.Paths.INFERENCE_ID_UPDATE_PATH; +import static org.elasticsearch.xpack.inference.rest.Paths.TASK_TYPE_INFERENCE_ID_UPDATE_PATH; +import static org.elasticsearch.xpack.inference.rest.Paths.TASK_TYPE_OR_INFERENCE_ID; + +@ServerlessScope(Scope.PUBLIC) +public class RestUpdateInferenceModelAction extends BaseRestHandler { + @Override + public String getName() { + return "update_inference_model_action"; + } + + @Override + public List routes() { + return List.of(new Route(PUT, INFERENCE_ID_UPDATE_PATH), new Route(PUT, TASK_TYPE_INFERENCE_ID_UPDATE_PATH)); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { + String inferenceEntityId; + TaskType taskType; + if (restRequest.hasParam(INFERENCE_ID)) { + inferenceEntityId = restRequest.param(INFERENCE_ID); + taskType = TaskType.fromStringOrStatusException(restRequest.param(TASK_TYPE_OR_INFERENCE_ID)); + } else { + throw new ElasticsearchStatusException("Inference ID must be provided in the path", RestStatus.BAD_REQUEST); + } + + var request = new UpdateInferenceModelAction.Request( + inferenceEntityId, + restRequest.requiredContent(), + restRequest.getXContentType(), + taskType, + RestUtils.getMasterNodeTimeout(restRequest) + ); + return channel -> client.execute(UpdateInferenceModelAction.INSTANCE, request, new RestToXContentListener<>(channel)); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java index 32c1d17373e53..c0e3c78b12f13 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java @@ -625,6 +625,40 @@ public static String mustBeAPositiveLongErrorMessage(String settingName, String return format("[%s] Invalid value [%s]. [%s] must be a positive long", scope, value, settingName); } + /** + * task_type can be specified as either a URL parameter or in the + * request body. Resolve which to use or throw if the settings are + * inconsistent + * @param urlTaskType Taken from the URL parameter. ANY means not specified. + * @param bodyTaskType Taken from the request body. Maybe null + * @return The resolved task type + */ + public static TaskType resolveTaskType(TaskType urlTaskType, String bodyTaskType) { + if (bodyTaskType == null) { + if (urlTaskType == TaskType.ANY) { + throw new ElasticsearchStatusException("model is missing required setting [task_type]", RestStatus.BAD_REQUEST); + } else { + return urlTaskType; + } + } + + TaskType parsedBodyTask = TaskType.fromStringOrStatusException(bodyTaskType); + if (parsedBodyTask == TaskType.ANY) { + throw new ElasticsearchStatusException("task_type [any] is not valid type for inference", RestStatus.BAD_REQUEST); + } + + if (parsedBodyTask.isAnyOrSame(urlTaskType) == false) { + throw new ElasticsearchStatusException( + "Cannot resolve conflicting task_type parameter in the request URL [{}] and the request body [{}]", + RestStatus.BAD_REQUEST, + urlTaskType.toString(), + bodyTaskType + ); + } + + return parsedBodyTask; + } + /** * Functional interface for creating an enum from a string. * @param diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/completion/AlibabaCloudSearchCompletionTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/completion/AlibabaCloudSearchCompletionTaskSettings.java index 63f82a8eceb98..05b5873a81d8d 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/completion/AlibabaCloudSearchCompletionTaskSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/completion/AlibabaCloudSearchCompletionTaskSettings.java @@ -139,4 +139,12 @@ public int hashCode() { public Map getParameters() { return parameters; } + + @Override + public TaskSettings updatedTaskSettings(Map newSettings) { + AlibabaCloudSearchCompletionTaskSettings updatedSettings = AlibabaCloudSearchCompletionTaskSettings.fromMap( + new HashMap<>(newSettings) + ); + return of(this, updatedSettings); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/embeddings/AlibabaCloudSearchEmbeddingsTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/embeddings/AlibabaCloudSearchEmbeddingsTaskSettings.java index c908c219e4053..9a431717d9fb9 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/embeddings/AlibabaCloudSearchEmbeddingsTaskSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/embeddings/AlibabaCloudSearchEmbeddingsTaskSettings.java @@ -21,6 +21,7 @@ import java.io.IOException; import java.util.EnumSet; +import java.util.HashMap; import java.util.Map; import java.util.Objects; @@ -174,4 +175,10 @@ public int hashCode() { public static String invalidInputTypeMessage(InputType inputType) { return Strings.format("received invalid input type value [%s]", inputType.toString()); } + + @Override + public TaskSettings updatedTaskSettings(Map newSettings) { + AlibabaCloudSearchEmbeddingsTaskSettings newSettingsOnly = fromMap(new HashMap<>(newSettings)); + return of(this, newSettingsOnly, newSettingsOnly.inputType != null ? newSettingsOnly.inputType : this.getInputType()); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/rerank/AlibabaCloudSearchRerankTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/rerank/AlibabaCloudSearchRerankTaskSettings.java index 97e7ecd41223d..40c3dee00d6c7 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/rerank/AlibabaCloudSearchRerankTaskSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/rerank/AlibabaCloudSearchRerankTaskSettings.java @@ -102,4 +102,10 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash(); } + + @Override + public TaskSettings updatedTaskSettings(Map newSettings) { + AlibabaCloudSearchRerankTaskSettings updatedSettings = new AlibabaCloudSearchRerankTaskSettings(); + return of(this, updatedSettings); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/sparse/AlibabaCloudSearchSparseTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/sparse/AlibabaCloudSearchSparseTaskSettings.java index 873cdf31fbe9d..0f4ebce920167 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/sparse/AlibabaCloudSearchSparseTaskSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/sparse/AlibabaCloudSearchSparseTaskSettings.java @@ -21,6 +21,7 @@ import java.io.IOException; import java.util.EnumSet; +import java.util.HashMap; import java.util.Map; import java.util.Objects; @@ -188,4 +189,10 @@ public int hashCode() { public static String invalidInputTypeMessage(InputType inputType) { return Strings.format("received invalid input type value [%s]", inputType.toString()); } + + @Override + public TaskSettings updatedTaskSettings(Map newSettings) { + AlibabaCloudSearchSparseTaskSettings updatedSettings = fromMap(new HashMap<>(newSettings)); + return of(this, updatedSettings, updatedSettings.getInputType() != null ? updatedSettings.getInputType() : this.inputType); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockSecretSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockSecretSettings.java index 9e6328ce1c358..30a7dc9ad5a2e 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockSecretSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockSecretSettings.java @@ -18,6 +18,7 @@ import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; +import java.util.HashMap; import java.util.Map; import java.util.Objects; @@ -107,4 +108,9 @@ public boolean equals(Object object) { public int hashCode() { return Objects.hash(accessKey, secretKey); } + + @Override + public SecretSettings newSecretSettings(Map newSecrets) { + return fromMap(new HashMap<>(newSecrets)); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/completion/AmazonBedrockChatCompletionTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/completion/AmazonBedrockChatCompletionTaskSettings.java index 13787ed8cb6a4..c3db1465863e4 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/completion/AmazonBedrockChatCompletionTaskSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/completion/AmazonBedrockChatCompletionTaskSettings.java @@ -17,6 +17,7 @@ import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; +import java.util.HashMap; import java.util.Map; import java.util.Objects; @@ -192,4 +193,12 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash(temperature, topP, topK, maxNewTokens); } + + @Override + public TaskSettings updatedTaskSettings(Map newSettings) { + AmazonBedrockChatCompletionRequestTaskSettings requestSettings = AmazonBedrockChatCompletionRequestTaskSettings.fromMap( + new HashMap<>(newSettings) + ); + return of(this, requestSettings); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/anthropic/completion/AnthropicChatCompletionTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/anthropic/completion/AnthropicChatCompletionTaskSettings.java index bb2c027127371..e8a6ca638c916 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/anthropic/completion/AnthropicChatCompletionTaskSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/anthropic/completion/AnthropicChatCompletionTaskSettings.java @@ -19,6 +19,7 @@ import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; import java.io.IOException; +import java.util.HashMap; import java.util.Map; import java.util.Objects; @@ -59,6 +60,11 @@ private static AnthropicChatCompletionTaskSettings fromPersistedMap(Map newSettings) { + return fromRequestMap(new HashMap<>(newSettings)); + } + private record CommonFields(int maxTokens, Double temperature, Double topP, Integer topK) {} private static CommonFields fromMap(Map map, ValidationException validationException) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/completion/AzureAiStudioChatCompletionTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/completion/AzureAiStudioChatCompletionTaskSettings.java index b8e33bac410fe..544c52f59a3c4 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/completion/AzureAiStudioChatCompletionTaskSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/completion/AzureAiStudioChatCompletionTaskSettings.java @@ -20,6 +20,7 @@ import org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsTaskSettings; import java.io.IOException; +import java.util.HashMap; import java.util.Map; import java.util.Objects; @@ -178,6 +179,20 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } + @Override + public String toString() { + return "AzureAiStudioChatCompletionTaskSettings{" + + "temperature=" + + temperature + + ", topP=" + + topP + + ", doSample=" + + doSample + + ", maxNewTokens=" + + maxNewTokens + + '}'; + } + @Override public boolean equals(Object o) { if (this == o) return true; @@ -194,4 +209,11 @@ public int hashCode() { return Objects.hash(temperature, topP, doSample, maxNewTokens); } + @Override + public TaskSettings updatedTaskSettings(Map newSettings) { + AzureAiStudioChatCompletionRequestTaskSettings requestSettings = AzureAiStudioChatCompletionRequestTaskSettings.fromMap( + new HashMap<>(newSettings) + ); + return of(this, requestSettings); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/embeddings/AzureAiStudioEmbeddingsTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/embeddings/AzureAiStudioEmbeddingsTaskSettings.java index bdb6ae74e5ab3..340ee95cd7b0c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/embeddings/AzureAiStudioEmbeddingsTaskSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/embeddings/AzureAiStudioEmbeddingsTaskSettings.java @@ -19,6 +19,7 @@ import org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsTaskSettings; import java.io.IOException; +import java.util.HashMap; import java.util.Map; import java.util.Objects; @@ -111,4 +112,12 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hashCode(user); } + + @Override + public TaskSettings updatedTaskSettings(Map newSettings) { + AzureAiStudioEmbeddingsRequestTaskSettings requestSettings = AzureAiStudioEmbeddingsRequestTaskSettings.fromMap( + new HashMap<>(newSettings) + ); + return AzureAiStudioEmbeddingsTaskSettings.of(this, requestSettings); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiSecretSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiSecretSettings.java index 06217e8079b06..a2bd4f6175989 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiSecretSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiSecretSettings.java @@ -19,6 +19,7 @@ import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; +import java.util.HashMap; import java.util.Map; import java.util.Objects; @@ -125,4 +126,9 @@ public boolean equals(Object object) { public int hashCode() { return Objects.hash(entraId, apiKey); } + + @Override + public SecretSettings newSecretSettings(Map newSecrets) { + return AzureOpenAiSecretSettings.fromMap(new HashMap<>(newSecrets)); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionTaskSettings.java index de0a0897a93c5..3008a543b8fea 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionTaskSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionTaskSettings.java @@ -18,6 +18,7 @@ import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; +import java.util.HashMap; import java.util.Map; import java.util.Objects; @@ -107,4 +108,12 @@ public boolean equals(Object object) { public int hashCode() { return Objects.hash(user); } + + @Override + public TaskSettings updatedTaskSettings(Map newSettings) { + AzureOpenAiCompletionRequestTaskSettings updatedSettings = AzureOpenAiCompletionRequestTaskSettings.fromMap( + new HashMap<>(newSettings) + ); + return of(this, updatedSettings); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsTaskSettings.java index 28ccade0a06b0..4157d7748d789 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsTaskSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsTaskSettings.java @@ -18,6 +18,7 @@ import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; +import java.util.HashMap; import java.util.Map; import java.util.Objects; @@ -116,4 +117,12 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash(user); } + + @Override + public TaskSettings updatedTaskSettings(Map newSettings) { + AzureOpenAiEmbeddingsRequestTaskSettings requestSettings = AzureOpenAiEmbeddingsRequestTaskSettings.fromMap( + new HashMap<>(newSettings) + ); + return of(this, requestSettings); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsTaskSettings.java index 34d37d0003adf..b789d1578290a 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsTaskSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsTaskSettings.java @@ -22,6 +22,7 @@ import java.io.IOException; import java.util.EnumSet; +import java.util.HashMap; import java.util.Map; import java.util.Objects; @@ -204,4 +205,10 @@ public int hashCode() { public static String invalidInputTypeMessage(InputType inputType) { return Strings.format("received invalid input type value [%s]", inputType.toString()); } + + @Override + public TaskSettings updatedTaskSettings(Map newSettings) { + CohereEmbeddingsTaskSettings updatedSettings = CohereEmbeddingsTaskSettings.fromMap(new HashMap<>(newSettings)); + return of(this, updatedSettings, updatedSettings.inputType != null ? updatedSettings.inputType : this.inputType); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankTaskSettings.java index f5893c825efcf..479000f840502 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankTaskSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankTaskSettings.java @@ -20,6 +20,7 @@ import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; +import java.util.HashMap; import java.util.Map; import java.util.Objects; @@ -186,4 +187,9 @@ public Integer getMaxChunksPerDoc() { return maxChunksPerDoc; } + @Override + public TaskSettings updatedTaskSettings(Map newSettings) { + CohereRerankTaskSettings updatedSettings = CohereRerankTaskSettings.fromMap(new HashMap<>(newSettings)); + return CohereRerankTaskSettings.of(this, updatedSettings); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandRerankTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandRerankTaskSettings.java index 70d787152121f..a0be1661b860d 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandRerankTaskSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandRerankTaskSettings.java @@ -17,6 +17,7 @@ import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; +import java.util.HashMap; import java.util.Map; import java.util.Objects; @@ -87,7 +88,11 @@ public CustomElandRerankTaskSettings(StreamInput in) throws IOException { } public CustomElandRerankTaskSettings(@Nullable Boolean doReturnDocuments) { - this.returnDocuments = doReturnDocuments; + if (doReturnDocuments == null) { + this.returnDocuments = true; + } else { + this.returnDocuments = doReturnDocuments; + } } @Override @@ -136,4 +141,10 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash(returnDocuments); } + + @Override + public TaskSettings updatedTaskSettings(Map newSettings) { + CustomElandRerankTaskSettings updatedSettings = CustomElandRerankTaskSettings.fromMap(new HashMap<>(newSettings)); + return of(this, updatedSettings); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettings.java index f8b5837ef387e..37e0f28dfb3fe 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettings.java @@ -122,6 +122,18 @@ protected ElasticsearchInternalServiceSettings(ElasticsearchInternalServiceSetti this.adaptiveAllocationsSettings = other.adaptiveAllocationsSettings; } + /** + * Copy constructor with the ability to set the number of allocations. Used for Update API. + * @param other the existing settings + * @param numAllocations the new number of allocations + */ + public ElasticsearchInternalServiceSettings(ElasticsearchInternalServiceSettings other, int numAllocations) { + this.numAllocations = numAllocations; + this.numThreads = other.numThreads; + this.modelId = other.modelId; + this.adaptiveAllocationsSettings = other.adaptiveAllocationsSettings; + } + public ElasticsearchInternalServiceSettings(StreamInput in) throws IOException { if (in.getTransportVersion().onOrAfter(TransportVersions.INFERENCE_ADAPTIVE_ALLOCATIONS)) { this.numAllocations = in.readOptionalVInt(); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserMlNodeTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserMlNodeTaskSettings.java index 33696231668a5..3bcaa57827fdb 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserMlNodeTaskSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserMlNodeTaskSettings.java @@ -15,6 +15,7 @@ import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; +import java.util.Map; import java.util.Objects; public class ElserMlNodeTaskSettings implements TaskSettings { @@ -65,4 +66,9 @@ public int hashCode() { // Return the hash of NAME to make the serialization tests pass return Objects.hash(NAME); } + + @Override + public TaskSettings updatedTaskSettings(Map newSettings) { + return DEFAULT; + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiSecretSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiSecretSettings.java index 57c8d61f9f9a5..20dbadb9b3eae 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiSecretSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiSecretSettings.java @@ -19,6 +19,7 @@ import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; +import java.util.HashMap; import java.util.Map; import java.util.Objects; @@ -30,7 +31,7 @@ public class GoogleVertexAiSecretSettings implements SecretSettings { public static final String SERVICE_ACCOUNT_JSON = "service_account_json"; - private final SecureString serviceAccountJson; + final SecureString serviceAccountJson; public static GoogleVertexAiSecretSettings fromMap(@Nullable Map map) { if (map == null) { @@ -101,4 +102,9 @@ public boolean equals(Object object) { public int hashCode() { return Objects.hash(serviceAccountJson); } + + @Override + public SecretSettings newSecretSettings(Map newSecrets) { + return GoogleVertexAiSecretSettings.fromMap(new HashMap<>(newSecrets)); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/embeddings/GoogleVertexAiEmbeddingsTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/embeddings/GoogleVertexAiEmbeddingsTaskSettings.java index 5e0185a7abb36..b7242100178a3 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/embeddings/GoogleVertexAiEmbeddingsTaskSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/embeddings/GoogleVertexAiEmbeddingsTaskSettings.java @@ -17,6 +17,7 @@ import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; +import java.util.HashMap; import java.util.Map; import java.util.Objects; @@ -107,4 +108,12 @@ public boolean equals(Object object) { public int hashCode() { return Objects.hash(autoTruncate); } + + @Override + public TaskSettings updatedTaskSettings(Map newSettings) { + GoogleVertexAiEmbeddingsRequestTaskSettings requestSettings = GoogleVertexAiEmbeddingsRequestTaskSettings.fromMap( + new HashMap<>(newSettings) + ); + return of(this, requestSettings); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/rerank/GoogleVertexAiRerankTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/rerank/GoogleVertexAiRerankTaskSettings.java index 8256eed7a5cba..64bec7e6cfeef 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/rerank/GoogleVertexAiRerankTaskSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/rerank/GoogleVertexAiRerankTaskSettings.java @@ -18,6 +18,7 @@ import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; +import java.util.HashMap; import java.util.Map; import java.util.Objects; @@ -107,4 +108,12 @@ public boolean equals(Object object) { public int hashCode() { return Objects.hash(topN); } + + @Override + public TaskSettings updatedTaskSettings(Map newSettings) { + GoogleVertexAiRerankRequestTaskSettings requestSettings = GoogleVertexAiRerankRequestTaskSettings.fromMap( + new HashMap<>(newSettings) + ); + return of(this, requestSettings); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionTaskSettings.java index 3c2586fb5a264..44064f61f5180 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionTaskSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionTaskSettings.java @@ -18,6 +18,7 @@ import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; +import java.util.HashMap; import java.util.Map; import java.util.Objects; @@ -107,4 +108,12 @@ public boolean equals(Object object) { public int hashCode() { return Objects.hash(user); } + + @Override + public TaskSettings updatedTaskSettings(Map newSettings) { + OpenAiChatCompletionRequestTaskSettings updatedSettings = OpenAiChatCompletionRequestTaskSettings.fromMap( + new HashMap<>(newSettings) + ); + return of(this, updatedSettings); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsTaskSettings.java index c7cc60043ef47..64f852822703c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsTaskSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsTaskSettings.java @@ -19,6 +19,7 @@ import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; import java.io.IOException; +import java.util.HashMap; import java.util.Map; import java.util.Objects; @@ -127,4 +128,10 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash(user); } + + @Override + public TaskSettings updatedTaskSettings(Map newSettings) { + OpenAiEmbeddingsRequestTaskSettings requestSettings = OpenAiEmbeddingsRequestTaskSettings.fromMap(new HashMap<>(newSettings)); + return of(this, requestSettings); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/settings/DefaultSecretSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/settings/DefaultSecretSettings.java index 6affa998c089d..c68d4bc801724 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/settings/DefaultSecretSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/settings/DefaultSecretSettings.java @@ -19,6 +19,7 @@ import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; +import java.util.HashMap; import java.util.Map; import java.util.Objects; @@ -78,4 +79,9 @@ public TransportVersion getMinimalSupportedVersion() { public void writeTo(StreamOutput out) throws IOException { out.writeSecureString(apiKey); } + + @Override + public SecretSettings newSecretSettings(Map newSecrets) { + return fromMap(new HashMap<>(newSecrets)); + } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/EmptySecretSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/EmptySecretSettingsTests.java index b50ea9e5ee224..d27a326d5fa1e 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/EmptySecretSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/EmptySecretSettingsTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.inference; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.core.Tuple; import org.elasticsearch.inference.EmptySecretSettings; import org.elasticsearch.test.AbstractWireSerializingTestCase; @@ -32,4 +33,13 @@ protected EmptySecretSettings mutateInstance(EmptySecretSettings instance) { // All instances are the same and have no fields, nothing to mutate return null; } + + public void testNewSecretSettings() { + + EmptySecretSettings newSecretSettings = (EmptySecretSettings) EmptySecretSettings.INSTANCE.newSecretSettings( + randomMap(0, 3, () -> new Tuple<>(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10))) + ); + + assertSame(EmptySecretSettings.INSTANCE, newSecretSettings); + } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/EmptyTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/EmptyTaskSettingsTests.java index 060dc23b935cc..7bc0cc57e31ab 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/EmptyTaskSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/EmptyTaskSettingsTests.java @@ -11,12 +11,20 @@ import org.elasticsearch.inference.EmptyTaskSettings; import org.elasticsearch.test.AbstractWireSerializingTestCase; +import java.util.Map; + public class EmptyTaskSettingsTests extends AbstractWireSerializingTestCase { public static EmptyTaskSettings createRandom() { return EmptyTaskSettings.INSTANCE; // no options to randomise } + public void testUpdatedTaskSettings() { + EmptyTaskSettings initialSettings = createRandom(); + EmptyTaskSettings updatedSettings = (EmptyTaskSettings) initialSettings.updatedTaskSettings(Map.of()); + assertEquals(EmptyTaskSettings.INSTANCE, updatedSettings); + } + @Override protected Writeable.Reader instanceReader() { return EmptyTaskSettings::new; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/ModelSecretsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/ModelSecretsTests.java index d6d139190c12c..ea2f41bf5c6cf 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/ModelSecretsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/ModelSecretsTests.java @@ -20,6 +20,7 @@ import java.io.IOException; import java.util.List; +import java.util.Map; public class ModelSecretsTests extends AbstractWireSerializingTestCase { @@ -83,5 +84,10 @@ public String getWriteableName() { public TransportVersion getMinimalSupportedVersion() { return TransportVersions.V_8_11_X; } + + @Override + public SecretSettings newSecretSettings(Map newSecrets) { + return new FakeSecretSettings(newSecrets.get(API_KEY).toString()); + } } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelActionTests.java index 27e56c1bd973d..991c5a581eb35 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelActionTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelActionTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.inference.TaskType; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.services.ServiceUtils; import static org.hamcrest.Matchers.containsString; @@ -17,27 +18,18 @@ public class TransportPutInferenceModelActionTests extends ESTestCase { public void testResolveTaskType() { - assertEquals(TaskType.SPARSE_EMBEDDING, TransportPutInferenceModelAction.resolveTaskType(TaskType.SPARSE_EMBEDDING, null)); - assertEquals( - TaskType.SPARSE_EMBEDDING, - TransportPutInferenceModelAction.resolveTaskType(TaskType.ANY, TaskType.SPARSE_EMBEDDING.toString()) - ); + assertEquals(TaskType.SPARSE_EMBEDDING, ServiceUtils.resolveTaskType(TaskType.SPARSE_EMBEDDING, null)); + assertEquals(TaskType.SPARSE_EMBEDDING, ServiceUtils.resolveTaskType(TaskType.ANY, TaskType.SPARSE_EMBEDDING.toString())); - var e = expectThrows( - ElasticsearchStatusException.class, - () -> TransportPutInferenceModelAction.resolveTaskType(TaskType.ANY, null) - ); + var e = expectThrows(ElasticsearchStatusException.class, () -> ServiceUtils.resolveTaskType(TaskType.ANY, null)); assertThat(e.getMessage(), containsString("model is missing required setting [task_type]")); - e = expectThrows( - ElasticsearchStatusException.class, - () -> TransportPutInferenceModelAction.resolveTaskType(TaskType.ANY, TaskType.ANY.toString()) - ); + e = expectThrows(ElasticsearchStatusException.class, () -> ServiceUtils.resolveTaskType(TaskType.ANY, TaskType.ANY.toString())); assertThat(e.getMessage(), containsString("task_type [any] is not valid type for inference")); e = expectThrows( ElasticsearchStatusException.class, - () -> TransportPutInferenceModelAction.resolveTaskType(TaskType.SPARSE_EMBEDDING, TaskType.TEXT_EMBEDDING.toString()) + () -> ServiceUtils.resolveTaskType(TaskType.SPARSE_EMBEDDING, TaskType.TEXT_EMBEDDING.toString()) ); assertThat( e.getMessage(), diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/model/TestModel.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/model/TestModel.java index d8c25fb5a6d88..779a98e023455 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/model/TestModel.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/model/TestModel.java @@ -25,6 +25,7 @@ import org.elasticsearch.xpack.inference.services.ServiceUtils; import java.io.IOException; +import java.util.HashMap; import java.util.Map; import static org.elasticsearch.test.ESTestCase.randomAlphaOfLength; @@ -217,6 +218,11 @@ public String getWriteableName() { public TransportVersion getMinimalSupportedVersion() { return TransportVersion.current(); // fine for these tests but will not work for cluster upgrade tests } + + @Override + public TaskSettings updatedTaskSettings(Map newSettings) { + return TestTaskSettings.fromMap(new HashMap<>(newSettings)); + } } public record TestSecretSettings(String apiKey) implements SecretSettings { @@ -265,5 +271,10 @@ public String getWriteableName() { public TransportVersion getMinimalSupportedVersion() { return TransportVersion.current(); // fine for these tests but will not work for cluster upgrade tests } + + @Override + public SecretSettings newSecretSettings(Map newSecrets) { + return new TestSecretSettings(newSecrets.get("api_key").toString()); + } } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/completion/AlibabaCloudSearchCompletionTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/completion/AlibabaCloudSearchCompletionTaskSettingsTests.java index c48d57cf3e03b..7acba78b3066b 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/completion/AlibabaCloudSearchCompletionTaskSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/completion/AlibabaCloudSearchCompletionTaskSettingsTests.java @@ -7,16 +7,17 @@ package org.elasticsearch.xpack.inference.services.alibabacloudsearch.completion; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.Nullable; import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.hamcrest.MatcherAssert; import java.io.IOException; +import java.util.Collections; import java.util.HashMap; import java.util.Map; +import static org.elasticsearch.xpack.inference.services.alibabacloudsearch.completion.AlibabaCloudSearchCompletionTaskSettings.PARAMETERS; import static org.hamcrest.Matchers.is; public class AlibabaCloudSearchCompletionTaskSettingsTests extends AbstractWireSerializingTestCase< @@ -34,10 +35,20 @@ public void testFromMap() { ); } - public void testIsEmpty() { - var randomSettings = createRandom(); - var stringRep = Strings.toString(randomSettings); - assertEquals(stringRep, randomSettings.isEmpty(), stringRep.equals("{}")); + public void testUpdatedTaskSettings() { + var initialSettings = createRandom(); + var newSettings = createRandom(); + Map newSettingsMap = new HashMap<>(); + if (newSettings.getParameters() != null) { + newSettingsMap.put(PARAMETERS, newSettings.getParameters()); + } + AlibabaCloudSearchCompletionTaskSettings updatedSettings = (AlibabaCloudSearchCompletionTaskSettings) initialSettings + .updatedTaskSettings(Collections.unmodifiableMap(newSettingsMap)); + if (newSettings.getParameters() == null) { + assertEquals(initialSettings.getParameters(), updatedSettings.getParameters()); + } else { + assertEquals(newSettings.getParameters(), updatedSettings.getParameters()); + } } @Override @@ -60,7 +71,7 @@ public static Map getTaskSettingsMap(@Nullable Map(); if (params != null) { - map.put(AlibabaCloudSearchCompletionTaskSettings.PARAMETERS, params); + map.put(PARAMETERS, params); } return map; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/embeddings/AlibabaCloudSearchEmbeddingsTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/embeddings/AlibabaCloudSearchEmbeddingsTaskSettingsTests.java index 9e75a2f475051..4b558949fdc4a 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/embeddings/AlibabaCloudSearchEmbeddingsTaskSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/embeddings/AlibabaCloudSearchEmbeddingsTaskSettingsTests.java @@ -15,10 +15,12 @@ import org.hamcrest.MatcherAssert; import java.io.IOException; +import java.util.Collections; import java.util.HashMap; import java.util.Map; import static org.elasticsearch.xpack.inference.InputTypeTests.randomWithIngestAndSearch; +import static org.elasticsearch.xpack.inference.services.alibabacloudsearch.embeddings.AlibabaCloudSearchEmbeddingsTaskSettings.INPUT_TYPE; import static org.hamcrest.Matchers.is; public class AlibabaCloudSearchEmbeddingsTaskSettingsTests extends AbstractWireSerializingTestCase< @@ -31,13 +33,27 @@ public static AlibabaCloudSearchEmbeddingsTaskSettings createRandom() { public void testFromMap() { MatcherAssert.assertThat( - AlibabaCloudSearchEmbeddingsTaskSettings.fromMap( - new HashMap<>(Map.of(AlibabaCloudSearchEmbeddingsTaskSettings.INPUT_TYPE, "ingest")) - ), + AlibabaCloudSearchEmbeddingsTaskSettings.fromMap(new HashMap<>(Map.of(INPUT_TYPE, "ingest"))), is(new AlibabaCloudSearchEmbeddingsTaskSettings(InputType.INGEST)) ); } + public void testUpdatedTaskSettings() { + var initialSettings = createRandom(); + var newSettings = createRandom(); + Map newSettingsMap = new HashMap<>(); + if (newSettings.getInputType() != null) { + newSettingsMap.put(INPUT_TYPE, newSettings.getInputType().toString()); + } + AlibabaCloudSearchEmbeddingsTaskSettings updatedSettings = (AlibabaCloudSearchEmbeddingsTaskSettings) initialSettings + .updatedTaskSettings(Collections.unmodifiableMap(newSettingsMap)); + if (newSettings.getInputType() == null) { + assertEquals(initialSettings.getInputType(), updatedSettings.getInputType()); + } else { + assertEquals(newSettings.getInputType(), updatedSettings.getInputType()); + } + } + public void testFromMap_WhenInputTypeIsNull() { InputType inputType = null; MatcherAssert.assertThat( @@ -72,7 +88,7 @@ public static Map getTaskSettingsMap(@Nullable InputType inputTy var map = new HashMap(); if (inputType != null) { - map.put(AlibabaCloudSearchEmbeddingsTaskSettings.INPUT_TYPE, inputType.toString()); + map.put(INPUT_TYPE, inputType.toString()); } return map; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/sparse/AlibabaCloudSearchSparseTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/sparse/AlibabaCloudSearchSparseTaskSettingsTests.java index 2c134c6765078..fa78b24d1a4bb 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/sparse/AlibabaCloudSearchSparseTaskSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/sparse/AlibabaCloudSearchSparseTaskSettingsTests.java @@ -19,6 +19,8 @@ import java.util.Map; import static org.elasticsearch.xpack.inference.InputTypeTests.randomWithIngestAndSearch; +import static org.elasticsearch.xpack.inference.services.alibabacloudsearch.sparse.AlibabaCloudSearchSparseTaskSettings.INPUT_TYPE; +import static org.elasticsearch.xpack.inference.services.alibabacloudsearch.sparse.AlibabaCloudSearchSparseTaskSettings.RETURN_TOKEN; import static org.hamcrest.Matchers.is; public class AlibabaCloudSearchSparseTaskSettingsTests extends AbstractWireSerializingTestCase { @@ -31,11 +33,33 @@ public static AlibabaCloudSearchSparseTaskSettings createRandom() { public void testFromMap() { MatcherAssert.assertThat( - AlibabaCloudSearchSparseTaskSettings.fromMap(new HashMap<>(Map.of(AlibabaCloudSearchSparseTaskSettings.INPUT_TYPE, "ingest"))), + AlibabaCloudSearchSparseTaskSettings.fromMap(new HashMap<>(Map.of(INPUT_TYPE, "ingest"))), is(new AlibabaCloudSearchSparseTaskSettings(InputType.INGEST, null)) ); } + public void testUpdatedTaskSettings() { + { + var initialSettings = createRandom(); + var newSettings = createRandom(); + AlibabaCloudSearchSparseTaskSettings updatedSettings = (AlibabaCloudSearchSparseTaskSettings) initialSettings + .updatedTaskSettings(Map.of(RETURN_TOKEN, newSettings.isReturnToken())); + } + { + var initialSettings = createRandom(); + var newSettings = createRandom(); + AlibabaCloudSearchSparseTaskSettings updatedSettings = (AlibabaCloudSearchSparseTaskSettings) initialSettings + .updatedTaskSettings( + Map.of( + INPUT_TYPE, + newSettings.getInputType() == null ? InputType.SEARCH.toString() : newSettings.getInputType().toString(), + RETURN_TOKEN, + newSettings.isReturnToken() + ) + ); + } + } + public void testIsEmpty() { var randomSettings = createRandom(); var stringRep = Strings.toString(randomSettings); @@ -69,11 +93,11 @@ public static Map getTaskSettingsMap(@Nullable InputType inputTy var map = new HashMap(); if (inputType != null) { - map.put(AlibabaCloudSearchSparseTaskSettings.INPUT_TYPE, inputType.toString()); + map.put(INPUT_TYPE, inputType.toString()); } if (returnToken != null) { - map.put(AlibabaCloudSearchSparseTaskSettings.RETURN_TOKEN, returnToken); + map.put(RETURN_TOKEN, returnToken); } return map; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockSecretSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockSecretSettingsTests.java index 904851842a6c8..88aebd2d9d42b 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockSecretSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockSecretSettingsTests.java @@ -29,6 +29,17 @@ public class AmazonBedrockSecretSettingsTests extends AbstractBWCWireSerializationTestCase { + public void testNewSecretSettings() { + AmazonBedrockSecretSettings initialSettings = createRandom(); + AmazonBedrockSecretSettings newSettings = createRandom(); + + AmazonBedrockSecretSettings finalSettings = (AmazonBedrockSecretSettings) initialSettings.newSecretSettings( + Map.of(ACCESS_KEY_FIELD, newSettings.accessKey.toString(), SECRET_KEY_FIELD, newSettings.secretKey.toString()) + ); + + assertEquals(newSettings, finalSettings); + } + public void testIt_CreatesSettings_ReturnsNullFromMap_null() { var secrets = AmazonBedrockSecretSettings.fromMap(null); assertNull(secrets); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/completion/AmazonBedrockChatCompletionTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/completion/AmazonBedrockChatCompletionTaskSettingsTests.java index 69dd3b1e6257b..adbf2c66ca0e2 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/completion/AmazonBedrockChatCompletionTaskSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/completion/AmazonBedrockChatCompletionTaskSettingsTests.java @@ -38,6 +38,68 @@ public void testIsEmpty() { assertEquals(stringRep, randomSettings.isEmpty(), stringRep.equals("{}")); } + public void updatedTaskSettings_WithEmptyMap_ReturnsSameSettings() { + var initialSettings = createRandom(); + AmazonBedrockChatCompletionTaskSettings updatedSettings = (AmazonBedrockChatCompletionTaskSettings) initialSettings + .updatedTaskSettings(Map.of()); + assertEquals(initialSettings, updatedSettings); + } + + public void updatedTaskSettings_WithNewTemperature_ReturnsUpdatedSettings() { + var initialSettings = createRandom(); + Map newSettings = Map.of(TEMPERATURE_FIELD, 0.7); + AmazonBedrockChatCompletionTaskSettings updatedSettings = (AmazonBedrockChatCompletionTaskSettings) initialSettings + .updatedTaskSettings(newSettings); + assertEquals(0.7, (double) updatedSettings.temperature(), 0.001); + assertEquals(initialSettings.topP(), updatedSettings.topP()); + assertEquals(initialSettings.topK(), updatedSettings.topK()); + assertEquals(initialSettings.maxNewTokens(), updatedSettings.maxNewTokens()); + } + + public void updatedTaskSettings_WithNewTopP_ReturnsUpdatedSettings() { + var initialSettings = createRandom(); + Map newSettings = Map.of(TOP_P_FIELD, 0.8); + AmazonBedrockChatCompletionTaskSettings updatedSettings = (AmazonBedrockChatCompletionTaskSettings) initialSettings + .updatedTaskSettings(newSettings); + assertEquals(0.8, (double) updatedSettings.topP(), 0.001); + assertEquals(initialSettings.temperature(), updatedSettings.temperature()); + assertEquals(initialSettings.topK(), updatedSettings.topK()); + assertEquals(initialSettings.maxNewTokens(), updatedSettings.maxNewTokens()); + } + + public void updatedTaskSettings_WithNewTopK_ReturnsUpdatedSettings() { + var initialSettings = createRandom(); + Map newSettings = Map.of(TOP_K_FIELD, 0.9); + AmazonBedrockChatCompletionTaskSettings updatedSettings = (AmazonBedrockChatCompletionTaskSettings) initialSettings + .updatedTaskSettings(newSettings); + assertEquals(0.9, (double) updatedSettings.topK(), 0.001); + assertEquals(initialSettings.temperature(), updatedSettings.temperature()); + assertEquals(initialSettings.topP(), updatedSettings.topP()); + assertEquals(initialSettings.maxNewTokens(), updatedSettings.maxNewTokens()); + } + + public void updatedTaskSettings_WithNewMaxNewTokens_ReturnsUpdatedSettings() { + var initialSettings = createRandom(); + Map newSettings = Map.of(MAX_NEW_TOKENS_FIELD, 256); + AmazonBedrockChatCompletionTaskSettings updatedSettings = (AmazonBedrockChatCompletionTaskSettings) initialSettings + .updatedTaskSettings(newSettings); + assertEquals(256, (double) updatedSettings.maxNewTokens(), 0.001); + assertEquals(initialSettings.temperature(), updatedSettings.temperature()); + assertEquals(initialSettings.topP(), updatedSettings.topP()); + assertEquals(initialSettings.topK(), updatedSettings.topK()); + } + + public void updatedTaskSettings_WithMultipleNewValues_ReturnsUpdatedSettings() { + var initialSettings = createRandom(); + Map newSettings = Map.of(TEMPERATURE_FIELD, 0.7, TOP_P_FIELD, 0.8, TOP_K_FIELD, 0.9, MAX_NEW_TOKENS_FIELD, 256); + AmazonBedrockChatCompletionTaskSettings updatedSettings = (AmazonBedrockChatCompletionTaskSettings) initialSettings + .updatedTaskSettings(newSettings); + assertEquals(0.7, (double) updatedSettings.temperature(), 0.001); + assertEquals(0.8, (double) updatedSettings.topP(), 0.001); + assertEquals(0.9, (double) updatedSettings.topK(), 0.001); + assertEquals(256, (int) updatedSettings.maxNewTokens(), 0.001); + } + public void testFromMap_AllValues() { var taskMap = getChatCompletionTaskSettingsMap(1.0, 0.5, 0.6, 512); assertEquals( diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/anthropic/completion/AnthropicChatCompletionTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/anthropic/completion/AnthropicChatCompletionTaskSettingsTests.java index e00de80e8709e..5f6823770345f 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/anthropic/completion/AnthropicChatCompletionTaskSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/anthropic/completion/AnthropicChatCompletionTaskSettingsTests.java @@ -24,6 +24,25 @@ public class AnthropicChatCompletionTaskSettingsTests extends AbstractBWCWireSerializationTestCase { + public void testUpdatedTaskSettings() { + var initialSettings = createRandom(); + var newSettings = createRandom(); + AnthropicChatCompletionTaskSettings updatedSettings = (AnthropicChatCompletionTaskSettings) initialSettings.updatedTaskSettings( + Map.of( + AnthropicServiceFields.MAX_TOKENS, + newSettings.maxTokens(), + AnthropicServiceFields.TEMPERATURE_FIELD, + newSettings.temperature(), + AnthropicServiceFields.TOP_P_FIELD, + newSettings.topP(), + AnthropicServiceFields.TOP_K_FIELD, + newSettings.topK() + ) + ); + + assertEquals(newSettings, updatedSettings); + } + public static Map getChatCompletionTaskSettingsMap( @Nullable Integer maxTokens, @Nullable Double temperature, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/completion/AzureAiStudioChatCompletionTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/completion/AzureAiStudioChatCompletionTaskSettingsTests.java index 8d7dcf1ef5170..21c1a233348fe 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/completion/AzureAiStudioChatCompletionTaskSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/completion/AzureAiStudioChatCompletionTaskSettingsTests.java @@ -19,6 +19,7 @@ import org.hamcrest.MatcherAssert; import java.io.IOException; +import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -38,6 +39,30 @@ public void testIsEmpty() { assertEquals(stringRep, randomSettings.isEmpty(), stringRep.equals("{}")); } + public void testUpdatedTaskSettings() { + var initialSettings = createRandom(); + var newSettings = createRandom(); + var settingsMap = new HashMap(); + if (newSettings.doSample() != null) settingsMap.put(DO_SAMPLE_FIELD, newSettings.doSample()); + if (newSettings.temperature() != null) settingsMap.put(TEMPERATURE_FIELD, newSettings.temperature()); + if (newSettings.topP() != null) settingsMap.put(TOP_P_FIELD, newSettings.topP()); + if (newSettings.maxNewTokens() != null) settingsMap.put(MAX_NEW_TOKENS_FIELD, newSettings.maxNewTokens()); + + AzureAiStudioChatCompletionTaskSettings updatedSettings = (AzureAiStudioChatCompletionTaskSettings) initialSettings + .updatedTaskSettings(Collections.unmodifiableMap(settingsMap)); + + assertEquals( + newSettings.temperature() == null ? initialSettings.temperature() : newSettings.temperature(), + updatedSettings.temperature() + ); + assertEquals(newSettings.topP() == null ? initialSettings.topP() : newSettings.topP(), updatedSettings.topP()); + assertEquals(newSettings.doSample() == null ? initialSettings.doSample() : newSettings.doSample(), updatedSettings.doSample()); + assertEquals( + newSettings.maxNewTokens() == null ? initialSettings.maxNewTokens() : newSettings.maxNewTokens(), + updatedSettings.maxNewTokens() + ); + } + public void testFromMap_AllValues() { var taskMap = getTaskSettingsMap(1.0, 2.0, true, 512); assertEquals( diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/embeddings/AzureAiStudioEmbeddingsTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/embeddings/AzureAiStudioEmbeddingsTaskSettingsTests.java index 4b6b38bd15c0d..cdfde5fcb09c9 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/embeddings/AzureAiStudioEmbeddingsTaskSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/embeddings/AzureAiStudioEmbeddingsTaskSettingsTests.java @@ -20,6 +20,7 @@ import org.hamcrest.MatcherAssert; import java.io.IOException; +import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -32,6 +33,23 @@ public void testIsEmpty() { assertEquals(stringRep, randomSettings.isEmpty(), stringRep.equals("{}")); } + public void testUpdatedTaskSettings() { + var initialSettings = createRandom(); + var newSettings = createRandom(); + Map newSettingsMap = new HashMap<>(); + if (newSettings.user() != null) { + newSettingsMap.put(AzureAiStudioConstants.USER_FIELD, newSettings.user()); + } + AzureAiStudioEmbeddingsTaskSettings updatedSettings = (AzureAiStudioEmbeddingsTaskSettings) initialSettings.updatedTaskSettings( + Collections.unmodifiableMap(newSettingsMap) + ); + if (newSettings.user() == null) { + assertEquals(initialSettings.user(), updatedSettings.user()); + } else { + assertEquals(newSettings.user(), updatedSettings.user()); + } + } + public void testFromMap_WithUser() { assertEquals( new AzureAiStudioEmbeddingsTaskSettings("user"), diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiSecretSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiSecretSettingsTests.java index e08365e7ca3bf..dbbf90054a55b 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiSecretSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiSecretSettingsTests.java @@ -30,7 +30,31 @@ public class AzureOpenAiSecretSettingsTests extends AbstractBWCWireSerializationTestCase { public static AzureOpenAiSecretSettings createRandom() { - return new AzureOpenAiSecretSettings(randomSecureStringOfLength(15), randomSecureStringOfLength(15)); + boolean isApiKeyNotEntraId = randomBoolean(); + return new AzureOpenAiSecretSettings( + isApiKeyNotEntraId ? randomSecureStringOfLength(15) : null, + isApiKeyNotEntraId == false ? randomSecureStringOfLength(15) : null + ); + } + + public void testNewSecretSettingsApiKey() { + AzureOpenAiSecretSettings initialSettings = createRandom(); + AzureOpenAiSecretSettings newSettings = new AzureOpenAiSecretSettings(randomSecureStringOfLength(15), null); + AzureOpenAiSecretSettings finalSettings = (AzureOpenAiSecretSettings) initialSettings.newSecretSettings( + Map.of(API_KEY, newSettings.apiKey().toString()) + ); + + assertEquals(newSettings, finalSettings); + } + + public void testNewSecretSettingsEntraId() { + AzureOpenAiSecretSettings initialSettings = createRandom(); + AzureOpenAiSecretSettings newSettings = new AzureOpenAiSecretSettings(null, randomSecureStringOfLength(15)); + AzureOpenAiSecretSettings finalSettings = (AzureOpenAiSecretSettings) initialSettings.newSecretSettings( + Map.of(ENTRA_ID, newSettings.entraId().toString()) + ); + + assertEquals(newSettings, finalSettings); } public void testFromMap_ApiKey_Only() { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionTaskSettingsTests.java index 8e8d9c4f92800..9d77abfe6d512 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionTaskSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionTaskSettingsTests.java @@ -38,6 +38,16 @@ public void testIsEmpty() { assertEquals(stringRep, randomSettings.isEmpty(), stringRep.equals("{}")); } + public void testUpdatedTaskSettings() { + var initialSettings = createRandom(); + var newSettings = createRandom(); + AzureOpenAiCompletionTaskSettings updatedSettings = (AzureOpenAiCompletionTaskSettings) initialSettings.updatedTaskSettings( + newSettings.user() == null ? Map.of() : Map.of(AzureOpenAiServiceFields.USER, newSettings.user()) + ); + + assertEquals(newSettings.user() == null ? initialSettings.user() : newSettings.user(), updatedSettings.user()); + } + public void testFromMap_WithUser() { var user = "user"; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsTaskSettingsTests.java index 72a063af37b90..4df9f2f6bcce0 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsTaskSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsTaskSettingsTests.java @@ -12,13 +12,13 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.Nullable; import org.elasticsearch.test.AbstractWireSerializingTestCase; -import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiServiceFields; import org.hamcrest.MatcherAssert; import java.io.IOException; import java.util.HashMap; import java.util.Map; +import static org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiServiceFields.USER; import static org.hamcrest.Matchers.is; public class AzureOpenAiEmbeddingsTaskSettingsTests extends AbstractWireSerializingTestCase { @@ -41,17 +41,31 @@ public static AzureOpenAiEmbeddingsTaskSettings createRandom() { return new AzureOpenAiEmbeddingsTaskSettings(user); } + public void testUpdatedTaskSettings() { + var initialSettings = createRandom(); + var newSettings = createRandom(); + AzureOpenAiEmbeddingsTaskSettings updatedSettings = (AzureOpenAiEmbeddingsTaskSettings) initialSettings.updatedTaskSettings( + newSettings.user() == null ? Map.of() : Map.of(USER, newSettings.user()) + ); + + if (newSettings.user() == null) { + assertEquals(initialSettings.user(), updatedSettings.user()); + } else { + assertEquals(newSettings.user(), updatedSettings.user()); + } + } + public void testFromMap_WithUser() { assertEquals( new AzureOpenAiEmbeddingsTaskSettings("user"), - AzureOpenAiEmbeddingsTaskSettings.fromMap(new HashMap<>(Map.of(AzureOpenAiServiceFields.USER, "user"))) + AzureOpenAiEmbeddingsTaskSettings.fromMap(new HashMap<>(Map.of(USER, "user"))) ); } public void testFromMap_UserIsEmptyString() { var thrownException = expectThrows( ValidationException.class, - () -> AzureOpenAiEmbeddingsTaskSettings.fromMap(new HashMap<>(Map.of(AzureOpenAiServiceFields.USER, ""))) + () -> AzureOpenAiEmbeddingsTaskSettings.fromMap(new HashMap<>(Map.of(USER, ""))) ); MatcherAssert.assertThat( @@ -66,7 +80,7 @@ public void testFromMap_MissingUser_DoesNotThrowException() { } public void testOverrideWith_KeepsOriginalValuesWithOverridesAreNull() { - var taskSettings = AzureOpenAiEmbeddingsTaskSettings.fromMap(new HashMap<>(Map.of(AzureOpenAiServiceFields.USER, "user"))); + var taskSettings = AzureOpenAiEmbeddingsTaskSettings.fromMap(new HashMap<>(Map.of(USER, "user"))); var overriddenTaskSettings = AzureOpenAiEmbeddingsTaskSettings.of( taskSettings, @@ -76,11 +90,9 @@ public void testOverrideWith_KeepsOriginalValuesWithOverridesAreNull() { } public void testOverrideWith_UsesOverriddenSettings() { - var taskSettings = AzureOpenAiEmbeddingsTaskSettings.fromMap(new HashMap<>(Map.of(AzureOpenAiServiceFields.USER, "user"))); + var taskSettings = AzureOpenAiEmbeddingsTaskSettings.fromMap(new HashMap<>(Map.of(USER, "user"))); - var requestTaskSettings = AzureOpenAiEmbeddingsRequestTaskSettings.fromMap( - new HashMap<>(Map.of(AzureOpenAiServiceFields.USER, "user2")) - ); + var requestTaskSettings = AzureOpenAiEmbeddingsRequestTaskSettings.fromMap(new HashMap<>(Map.of(USER, "user2"))); var overriddenTaskSettings = AzureOpenAiEmbeddingsTaskSettings.of(taskSettings, requestTaskSettings); MatcherAssert.assertThat(overriddenTaskSettings, is(new AzureOpenAiEmbeddingsTaskSettings("user2"))); @@ -105,7 +117,7 @@ public static Map getAzureOpenAiRequestTaskSettingsMap(@Nullable var map = new HashMap(); if (user != null) { - map.put(AzureOpenAiServiceFields.USER, user); + map.put(USER, user); } return map; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsTaskSettingsTests.java index 90c9b032465c6..3df8fcaf5d6b8 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsTaskSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsTaskSettingsTests.java @@ -19,6 +19,7 @@ import java.io.IOException; import java.util.Arrays; +import java.util.Collections; import java.util.EnumSet; import java.util.HashMap; import java.util.Locale; @@ -43,6 +44,31 @@ public void testIsEmpty() { assertEquals(stringRep, randomSettings.isEmpty(), stringRep.equals("{}")); } + public void testUpdatedTaskSettings() { + var initialSettings = createRandom(); + var newSettings = createRandom(); + Map newSettingsMap = new HashMap<>(); + if (newSettings.getInputType() != null) { + newSettingsMap.put(CohereEmbeddingsTaskSettings.INPUT_TYPE, newSettings.getInputType().toString()); + } + if (newSettings.getTruncation() != null) { + newSettingsMap.put(CohereServiceFields.TRUNCATE, newSettings.getTruncation().toString()); + } + CohereEmbeddingsTaskSettings updatedSettings = (CohereEmbeddingsTaskSettings) initialSettings.updatedTaskSettings( + Collections.unmodifiableMap(newSettingsMap) + ); + if (newSettings.getInputType() == null) { + assertEquals(initialSettings.getInputType(), updatedSettings.getInputType()); + } else { + assertEquals(newSettings.getInputType(), updatedSettings.getInputType()); + } + if (newSettings.getTruncation() == null) { + assertEquals(initialSettings.getTruncation(), updatedSettings.getTruncation()); + } else { + assertEquals(newSettings.getTruncation(), updatedSettings.getTruncation()); + } + } + public void testFromMap_CreatesEmptySettings_WhenAllFieldsAreNull() { MatcherAssert.assertThat( CohereEmbeddingsTaskSettings.fromMap(new HashMap<>(Map.of())), diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankTaskSettingsTests.java new file mode 100644 index 0000000000000..6924ee05ecbb8 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankTaskSettingsTests.java @@ -0,0 +1,154 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.cohere.rerank; + +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.containsString; + +public class CohereRerankTaskSettingsTests extends AbstractWireSerializingTestCase { + + public static CohereRerankTaskSettings createRandom() { + var returnDocuments = randomBoolean() ? randomBoolean() : null; + var topNDocsOnly = randomBoolean() ? randomIntBetween(1, 10) : null; + var maxChunksPerDoc = randomBoolean() ? randomIntBetween(1, 20) : null; + + return new CohereRerankTaskSettings(topNDocsOnly, returnDocuments, maxChunksPerDoc); + } + + public void testFromMap_WithValidValues_ReturnsSettings() { + Map taskMap = Map.of( + CohereRerankTaskSettings.RETURN_DOCUMENTS, + true, + CohereRerankTaskSettings.TOP_N_DOCS_ONLY, + 5, + CohereRerankTaskSettings.MAX_CHUNKS_PER_DOC, + 10 + ); + var settings = CohereRerankTaskSettings.fromMap(new HashMap<>(taskMap)); + assertTrue(settings.getReturnDocuments()); + assertEquals(5, settings.getTopNDocumentsOnly().intValue()); + assertEquals(10, settings.getMaxChunksPerDoc().intValue()); + } + + public void testFromMap_WithNullValues_ReturnsSettingsWithNulls() { + var settings = CohereRerankTaskSettings.fromMap(Map.of()); + assertNull(settings.getReturnDocuments()); + assertNull(settings.getTopNDocumentsOnly()); + assertNull(settings.getMaxChunksPerDoc()); + } + + public void testFromMap_WithInvalidReturnDocuments_ThrowsValidationException() { + Map taskMap = Map.of( + CohereRerankTaskSettings.RETURN_DOCUMENTS, + "invalid", + CohereRerankTaskSettings.TOP_N_DOCS_ONLY, + 5, + CohereRerankTaskSettings.MAX_CHUNKS_PER_DOC, + 10 + ); + var thrownException = expectThrows(ValidationException.class, () -> CohereRerankTaskSettings.fromMap(new HashMap<>(taskMap))); + assertThat(thrownException.getMessage(), containsString("field [return_documents] is not of the expected type")); + } + + public void testFromMap_WithInvalidTopNDocsOnly_ThrowsValidationException() { + Map taskMap = Map.of( + CohereRerankTaskSettings.RETURN_DOCUMENTS, + true, + CohereRerankTaskSettings.TOP_N_DOCS_ONLY, + "invalid", + CohereRerankTaskSettings.MAX_CHUNKS_PER_DOC, + 10 + ); + var thrownException = expectThrows(ValidationException.class, () -> CohereRerankTaskSettings.fromMap(new HashMap<>(taskMap))); + assertThat(thrownException.getMessage(), containsString("field [top_n] is not of the expected type")); + } + + public void testFromMap_WithInvalidMaxChunksPerDoc_ThrowsValidationException() { + Map taskMap = Map.of( + CohereRerankTaskSettings.RETURN_DOCUMENTS, + true, + CohereRerankTaskSettings.TOP_N_DOCS_ONLY, + 5, + CohereRerankTaskSettings.MAX_CHUNKS_PER_DOC, + "invalid" + ); + var thrownException = expectThrows(ValidationException.class, () -> CohereRerankTaskSettings.fromMap(new HashMap<>(taskMap))); + assertThat(thrownException.getMessage(), containsString("field [max_chunks_per_doc] is not of the expected type")); + } + + public void UpdatedTaskSettings_WithEmptyMap_ReturnsSameSettings() { + var initialSettings = new CohereRerankTaskSettings(5, true, 10); + CohereRerankTaskSettings updatedSettings = (CohereRerankTaskSettings) initialSettings.updatedTaskSettings(Map.of()); + assertEquals(initialSettings, updatedSettings); + } + + public void testUpdatedTaskSettings_WithNewReturnDocuments_ReturnsUpdatedSettings() { + var initialSettings = new CohereRerankTaskSettings(5, true, 10); + Map newSettings = Map.of(CohereRerankTaskSettings.RETURN_DOCUMENTS, false); + CohereRerankTaskSettings updatedSettings = (CohereRerankTaskSettings) initialSettings.updatedTaskSettings(newSettings); + assertFalse(updatedSettings.getReturnDocuments()); + assertEquals(initialSettings.getTopNDocumentsOnly(), updatedSettings.getTopNDocumentsOnly()); + assertEquals(initialSettings.getMaxChunksPerDoc(), updatedSettings.getMaxChunksPerDoc()); + } + + public void testUpdatedTaskSettings_WithNewTopNDocsOnly_ReturnsUpdatedSettings() { + var initialSettings = new CohereRerankTaskSettings(5, true, 10); + Map newSettings = Map.of(CohereRerankTaskSettings.TOP_N_DOCS_ONLY, 7); + CohereRerankTaskSettings updatedSettings = (CohereRerankTaskSettings) initialSettings.updatedTaskSettings(newSettings); + assertEquals(7, updatedSettings.getTopNDocumentsOnly().intValue()); + assertEquals(initialSettings.getReturnDocuments(), updatedSettings.getReturnDocuments()); + assertEquals(initialSettings.getMaxChunksPerDoc(), updatedSettings.getMaxChunksPerDoc()); + } + + public void testUpdatedTaskSettings_WithNewMaxChunksPerDoc_ReturnsUpdatedSettings() { + var initialSettings = new CohereRerankTaskSettings(5, true, 10); + Map newSettings = Map.of(CohereRerankTaskSettings.MAX_CHUNKS_PER_DOC, 15); + CohereRerankTaskSettings updatedSettings = (CohereRerankTaskSettings) initialSettings.updatedTaskSettings(newSettings); + assertEquals(15, updatedSettings.getMaxChunksPerDoc().intValue()); + assertEquals(initialSettings.getReturnDocuments(), updatedSettings.getReturnDocuments()); + assertEquals(initialSettings.getTopNDocumentsOnly(), updatedSettings.getTopNDocumentsOnly()); + } + + public void testUpdatedTaskSettings_WithMultipleNewValues_ReturnsUpdatedSettings() { + var initialSettings = new CohereRerankTaskSettings(5, true, 10); + Map newSettings = Map.of( + CohereRerankTaskSettings.RETURN_DOCUMENTS, + false, + CohereRerankTaskSettings.TOP_N_DOCS_ONLY, + 7, + CohereRerankTaskSettings.MAX_CHUNKS_PER_DOC, + 15 + ); + CohereRerankTaskSettings updatedSettings = (CohereRerankTaskSettings) initialSettings.updatedTaskSettings(newSettings); + assertFalse(updatedSettings.getReturnDocuments()); + assertEquals(7, updatedSettings.getTopNDocumentsOnly().intValue()); + assertEquals(15, updatedSettings.getMaxChunksPerDoc().intValue()); + } + + @Override + protected Writeable.Reader instanceReader() { + return CohereRerankTaskSettings::new; + } + + @Override + protected CohereRerankTaskSettings createTestInstance() { + return createRandom(); + } + + @Override + protected CohereRerankTaskSettings mutateInstance(CohereRerankTaskSettings instance) throws IOException { + return randomValueOtherThan(instance, CohereRerankTaskSettingsTests::createRandom); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandRerankTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandRerankTaskSettingsTests.java index 72e6daa911c1d..4207896fc54f3 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandRerankTaskSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandRerankTaskSettingsTests.java @@ -15,7 +15,9 @@ import org.elasticsearch.xcontent.XContentType; import java.io.IOException; +import java.util.Collections; import java.util.HashMap; +import java.util.Map; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.sameInstance; @@ -28,6 +30,23 @@ public void testIsEmpty() { assertEquals(stringRep, randomSettings.isEmpty(), stringRep.equals("{}")); } + public void testUpdatedTaskSettings() { + var initialSettings = createRandom(); + var newSettings = createRandom(); + Map newSettingsMap = new HashMap<>(); + if (newSettings.returnDocuments() != null) { + newSettingsMap.put(CustomElandRerankTaskSettings.RETURN_DOCUMENTS, newSettings.returnDocuments()); + } + CustomElandRerankTaskSettings updatedSettings = (CustomElandRerankTaskSettings) initialSettings.updatedTaskSettings( + Collections.unmodifiableMap(newSettingsMap) + ); + if (newSettings.returnDocuments() == null) { + assertEquals(initialSettings.returnDocuments(), updatedSettings.returnDocuments()); + } else { + assertEquals(newSettings.returnDocuments(), updatedSettings.returnDocuments()); + } + } + public void testDefaultsFromMap_MapIsNull_ReturnsDefaultSettings() { var customElandRerankTaskSettings = CustomElandRerankTaskSettings.defaultsFromMap(null); @@ -69,18 +88,6 @@ public void testToXContent_WritesAllValues() throws IOException { {"return_documents":true}""")); } - public void testToXContent_DoesNotWriteReturnDocuments_IfNull() throws IOException { - Boolean bool = null; - var serviceSettings = new CustomElandRerankTaskSettings(bool); - - XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); - serviceSettings.toXContent(builder, null); - String xContentResult = Strings.toString(builder); - - assertThat(xContentResult, is(""" - {}""")); - } - public void testOf_PrefersNonNullRequestTaskSettings() { var originalSettings = new CustomElandRerankTaskSettings(Boolean.FALSE); var requestTaskSettings = new CustomElandRerankTaskSettings(Boolean.TRUE); @@ -90,16 +97,6 @@ public void testOf_PrefersNonNullRequestTaskSettings() { assertThat(taskSettings, sameInstance(requestTaskSettings)); } - public void testOf_UseOriginalSettings_IfRequestSettingsValuesAreNull() { - Boolean bool = null; - var originalSettings = new CustomElandRerankTaskSettings(Boolean.TRUE); - var requestTaskSettings = new CustomElandRerankTaskSettings(bool); - - var taskSettings = CustomElandRerankTaskSettings.of(originalSettings, requestTaskSettings); - - assertThat(taskSettings, sameInstance(originalSettings)); - } - private static CustomElandRerankTaskSettings createRandom() { return new CustomElandRerankTaskSettings(randomOptionalBoolean()); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiSecretSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiSecretSettingsTests.java index 95d3522b863a9..90738d43aacb3 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiSecretSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiSecretSettingsTests.java @@ -29,6 +29,15 @@ public static GoogleVertexAiSecretSettings createRandom() { return new GoogleVertexAiSecretSettings(randomSecureStringOfLength(30)); } + public void testNewSecretSettings() { + GoogleVertexAiSecretSettings initialSettings = createRandom(); + GoogleVertexAiSecretSettings newSettings = createRandom(); + GoogleVertexAiSecretSettings newGoogleVertexAiSecretSettings = (GoogleVertexAiSecretSettings) initialSettings.newSecretSettings( + Map.of(GoogleVertexAiSecretSettings.SERVICE_ACCOUNT_JSON, newSettings.serviceAccountJson.toString()) + ); + assertEquals(newSettings, newGoogleVertexAiSecretSettings); + } + public void testFromMap_ReturnsNull_WhenMapIsNUll() { assertNull(GoogleVertexAiSecretSettings.fromMap(null)); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/embeddings/GoogleVertexAiEmbeddingsTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/embeddings/GoogleVertexAiEmbeddingsTaskSettingsTests.java index ac7e9348b370b..5b87bbc3c42c8 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/embeddings/GoogleVertexAiEmbeddingsTaskSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/embeddings/GoogleVertexAiEmbeddingsTaskSettingsTests.java @@ -18,6 +18,7 @@ import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; import java.io.IOException; +import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -31,6 +32,23 @@ public void testIsEmpty() { assertEquals(stringRep, randomSettings.isEmpty(), stringRep.equals("{}")); } + public void testUpdatedTaskSettings() { + var initialSettings = createRandom(); + var newSettings = createRandom(); + Map newSettingsMap = new HashMap<>(); + if (newSettings.autoTruncate() != null) { + newSettingsMap.put(GoogleVertexAiEmbeddingsTaskSettings.AUTO_TRUNCATE, newSettings.autoTruncate()); + } + GoogleVertexAiEmbeddingsTaskSettings updatedSettings = (GoogleVertexAiEmbeddingsTaskSettings) initialSettings.updatedTaskSettings( + Collections.unmodifiableMap(newSettingsMap) + ); + if (newSettings.autoTruncate() == null) { + assertEquals(initialSettings.autoTruncate(), updatedSettings.autoTruncate()); + } else { + assertEquals(newSettings.autoTruncate(), updatedSettings.autoTruncate()); + } + } + public void testFromMap_AutoTruncateIsSet() { var autoTruncate = true; var taskSettingsMap = getTaskSettingsMap(autoTruncate); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/rerank/GoogleVertexAiRerankTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/rerank/GoogleVertexAiRerankTaskSettingsTests.java index 03f89b6a2c042..957defb54d846 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/rerank/GoogleVertexAiRerankTaskSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/rerank/GoogleVertexAiRerankTaskSettingsTests.java @@ -18,6 +18,7 @@ import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; import java.io.IOException; +import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -33,6 +34,23 @@ public void testIsEmpty() { assertEquals(stringRep, randomSettings.isEmpty(), stringRep.equals("{}")); } + public void testUpdatedTaskSettings() { + var initialSettings = createRandom(); + var newSettings = createRandom(); + Map newSettingsMap = new HashMap<>(); + if (newSettings.topN() != null) { + newSettingsMap.put(GoogleVertexAiRerankTaskSettings.TOP_N, newSettings.topN()); + } + GoogleVertexAiRerankTaskSettings updatedSettings = (GoogleVertexAiRerankTaskSettings) initialSettings.updatedTaskSettings( + Collections.unmodifiableMap(newSettingsMap) + ); + if (newSettings.topN() == null) { + assertEquals(initialSettings.topN(), updatedSettings.topN()); + } else { + assertEquals(newSettings.topN(), updatedSettings.topN()); + } + } + public void testFromMap_TopNIsSet() { var topN = 1; var taskSettingsMap = getTaskSettingsMap(topN); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionTaskSettingsTests.java index 16d7e8f1db9be..9d1170bb23dbb 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionTaskSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionTaskSettingsTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.xpack.inference.services.openai.OpenAiServiceFields; import java.io.IOException; +import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -31,6 +32,23 @@ public void testIsEmpty() { assertEquals(stringRep, randomSettings.isEmpty(), stringRep.equals("{}")); } + public void testUpdatedTaskSettings() { + var initialSettings = createRandomWithUser(); + var newSettings = createRandomWithUser(); + Map newSettingsMap = new HashMap<>(); + if (newSettings.user() != null) { + newSettingsMap.put(OpenAiServiceFields.USER, newSettings.user()); + } + OpenAiChatCompletionTaskSettings updatedSettings = (OpenAiChatCompletionTaskSettings) initialSettings.updatedTaskSettings( + Collections.unmodifiableMap(newSettingsMap) + ); + if (newSettings.user() == null) { + assertEquals(initialSettings.user(), updatedSettings.user()); + } else { + assertEquals(newSettings.user(), updatedSettings.user()); + } + } + public void testFromMap_WithUser() { assertEquals( new OpenAiChatCompletionTaskSettings("user"), diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsTaskSettingsTests.java index a5ae2f0a3a44b..0512c36e64de5 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsTaskSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsTaskSettingsTests.java @@ -14,9 +14,11 @@ import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; import org.elasticsearch.xpack.inference.services.openai.OpenAiServiceFields; +import org.elasticsearch.xpack.inference.services.openai.completion.OpenAiChatCompletionTaskSettings; import org.hamcrest.MatcherAssert; import java.io.IOException; +import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -37,11 +39,28 @@ public static OpenAiEmbeddingsTaskSettings createRandom() { } public void testIsEmpty() { - var randomSettings = createRandom(); + var randomSettings = new OpenAiChatCompletionTaskSettings(randomBoolean() ? null : "username"); var stringRep = Strings.toString(randomSettings); assertEquals(stringRep, randomSettings.isEmpty(), stringRep.equals("{}")); } + public void testUpdatedTaskSettings() { + var initialSettings = createRandom(); + var newSettings = createRandom(); + Map newSettingsMap = new HashMap<>(); + if (newSettings.user() != null) { + newSettingsMap.put(OpenAiServiceFields.USER, newSettings.user()); + } + OpenAiEmbeddingsTaskSettings updatedSettings = (OpenAiEmbeddingsTaskSettings) initialSettings.updatedTaskSettings( + Collections.unmodifiableMap(newSettingsMap) + ); + if (newSettings.user() == null) { + assertEquals(initialSettings.user(), updatedSettings.user()); + } else { + assertEquals(newSettings.user(), updatedSettings.user()); + } + } + public void testFromMap_WithUser() { assertEquals( new OpenAiEmbeddingsTaskSettings("user"), diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/settings/DefaultSecretSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/settings/DefaultSecretSettingsTests.java index 212a867349e5c..118cf25a452a7 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/settings/DefaultSecretSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/settings/DefaultSecretSettingsTests.java @@ -26,6 +26,15 @@ public static DefaultSecretSettings createRandom() { return new DefaultSecretSettings(new SecureString(randomAlphaOfLength(15).toCharArray())); } + public void testNewSecretSettings() { + DefaultSecretSettings initialSettings = createRandom(); + DefaultSecretSettings newSettings = createRandom(); + DefaultSecretSettings finalSettings = (DefaultSecretSettings) initialSettings.newSecretSettings( + Map.of(DefaultSecretSettings.API_KEY, newSettings.apiKey().toString()) + ); + assertEquals(newSettings, finalSettings); + } + public void testFromMap() { var apiKey = "abc"; var serviceSettings = DefaultSecretSettings.fromMap(new HashMap<>(Map.of(DefaultSecretSettings.API_KEY, apiKey))); diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index 853d0fd9318ae..d791873eb3142 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -174,6 +174,7 @@ public class Constants { "cluster:admin/xpack/enrich/reindex", "cluster:admin/xpack/inference/delete", "cluster:admin/xpack/inference/put", + "cluster:admin/xpack/inference/update", "cluster:admin/xpack/license/basic_status", // "cluster:admin/xpack/license/delete", "cluster:admin/xpack/license/feature_usage", From b23984ddc472256a2d4c3b556fdebaf1f42bda99 Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Fri, 11 Oct 2024 19:32:18 -0400 Subject: [PATCH 026/449] Refactor IPinfoIpDataLookupsTests tests (and others) (#114667) --- .../ingest/geoip/DatabaseTests.java | 14 + .../geoip/IpinfoIpDataLookupsTests.java | 299 ++++++++---------- .../ingest/geoip/MMDBUtilTests.java | 35 -- .../geoip/MaxmindIpDataLookupsTests.java | 36 +++ 4 files changed, 177 insertions(+), 207 deletions(-) diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/DatabaseTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/DatabaseTests.java index 5710a20277527..39ecf4e70383b 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/DatabaseTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/DatabaseTests.java @@ -45,4 +45,18 @@ public void testDatabasePropertyInvariants() { // n.b. this is just a choice we decided to make here at Elastic assertThat(Database.Enterprise.defaultProperties(), equalTo(Database.City.defaultProperties())); } + + public void testDatabaseVariantPropertyInvariants() { + // the second ASN variant database is like a specialization of the ASN database + assertThat(Sets.difference(Database.Asn.properties(), Database.AsnV2.properties()), is(empty())); + assertThat(Database.Asn.defaultProperties(), equalTo(Database.AsnV2.defaultProperties())); + + // the second City variant database is like a version of the ordinary City database but lacking many fields + assertThat(Sets.difference(Database.CityV2.properties(), Database.City.properties()), is(empty())); + assertThat(Sets.difference(Database.CityV2.defaultProperties(), Database.City.defaultProperties()), is(empty())); + + // the second Country variant database is like a version of the ordinary Country database but lacking come fields + assertThat(Sets.difference(Database.CountryV2.properties(), Database.CountryV2.properties()), is(empty())); + assertThat(Database.CountryV2.defaultProperties(), equalTo(Database.Country.defaultProperties())); + } } diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookupsTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookupsTests.java index 4ecf3056db738..4167170567f52 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookupsTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookupsTests.java @@ -15,15 +15,9 @@ import org.apache.lucene.util.Constants; import org.elasticsearch.common.network.NetworkAddress; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.SuppressForbidden; -import org.elasticsearch.core.TimeValue; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.threadpool.TestThreadPool; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.watcher.ResourceWatcherService; import org.junit.After; import org.junit.Before; @@ -41,50 +35,27 @@ import static org.elasticsearch.ingest.geoip.IpinfoIpDataLookups.parseBoolean; import static org.elasticsearch.ingest.geoip.IpinfoIpDataLookups.parseLocationDouble; import static org.hamcrest.Matchers.anyOf; -import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.startsWith; public class IpinfoIpDataLookupsTests extends ESTestCase { - private ThreadPool threadPool; - private ResourceWatcherService resourceWatcherService; - // a temporary directory that mmdb files can be copied to and read from private Path tmpDir; @Before public void setup() { - threadPool = new TestThreadPool(ConfigDatabases.class.getSimpleName()); - Settings settings = Settings.builder().put("resource.reload.interval.high", TimeValue.timeValueMillis(100)).build(); - resourceWatcherService = new ResourceWatcherService(settings, threadPool); tmpDir = createTempDir(); } @After public void cleanup() throws IOException { - resourceWatcherService.close(); - threadPool.shutdownNow(); IOUtils.rm(tmpDir); } - public void testDatabasePropertyInvariants() { - // the second ASN variant database is like a specialization of the ASN database - assertThat(Sets.difference(Database.Asn.properties(), Database.AsnV2.properties()), is(empty())); - assertThat(Database.Asn.defaultProperties(), equalTo(Database.AsnV2.defaultProperties())); - - // the second City variant database is like a version of the ordinary City database but lacking many fields - assertThat(Sets.difference(Database.CityV2.properties(), Database.City.properties()), is(empty())); - assertThat(Sets.difference(Database.CityV2.defaultProperties(), Database.City.defaultProperties()), is(empty())); - - // the second Country variant database is like a version of the ordinary Country database but lacking come fields - assertThat(Sets.difference(Database.CountryV2.properties(), Database.CountryV2.properties()), is(empty())); - assertThat(Database.CountryV2.defaultProperties(), equalTo(Database.Country.defaultProperties())); - } - public void testParseAsn() { // expected case: "AS123" is 123 assertThat(parseAsn("AS123"), equalTo(123L)); @@ -126,53 +97,42 @@ public void testParseLocationDouble() { assertThat(parseLocationDouble("anythingelse"), nullValue()); } - public void testAsn() throws IOException { + public void testAsnFree() { assumeFalse("https://github.com/elastic/elasticsearch/issues/114266", Constants.WINDOWS); - Path configDir = tmpDir; - copyDatabase("ipinfo/ip_asn_sample.mmdb", configDir.resolve("ip_asn_sample.mmdb")); - copyDatabase("ipinfo/asn_sample.mmdb", configDir.resolve("asn_sample.mmdb")); - - GeoIpCache cache = new GeoIpCache(1000); // real cache to test purging of entries upon a reload - ConfigDatabases configDatabases = new ConfigDatabases(configDir, cache); - configDatabases.initialize(resourceWatcherService); - - // this is the 'free' ASN database (sample) - try (DatabaseReaderLazyLoader loader = configDatabases.getDatabase("ip_asn_sample.mmdb")) { - IpDataLookup lookup = new IpinfoIpDataLookups.Asn(Database.AsnV2.properties()); - Map data = lookup.getData(loader, "5.182.109.0"); - assertThat( - data, - equalTo( - Map.ofEntries( - entry("ip", "5.182.109.0"), - entry("organization_name", "M247 Europe SRL"), - entry("asn", 9009L), - entry("network", "5.182.109.0/24"), - entry("domain", "m247.com") - ) - ) - ); - } + String databaseName = "ip_asn_sample.mmdb"; + String ip = "5.182.109.0"; + assertExpectedLookupResults( + databaseName, + ip, + new IpinfoIpDataLookups.Asn(Database.AsnV2.properties()), + Map.ofEntries( + entry("ip", ip), + entry("organization_name", "M247 Europe SRL"), + entry("asn", 9009L), + entry("network", "5.182.109.0/24"), + entry("domain", "m247.com") + ) + ); + } - // this is the non-free or 'standard' ASN database (sample) - try (DatabaseReaderLazyLoader loader = configDatabases.getDatabase("asn_sample.mmdb")) { - IpDataLookup lookup = new IpinfoIpDataLookups.Asn(Database.AsnV2.properties()); - Map data = lookup.getData(loader, "23.53.116.0"); - assertThat( - data, - equalTo( - Map.ofEntries( - entry("ip", "23.53.116.0"), - entry("organization_name", "Akamai Technologies, Inc."), - entry("asn", 32787L), - entry("network", "23.53.116.0/24"), - entry("domain", "akamai.com"), - entry("type", "hosting"), - entry("country_iso_code", "US") - ) - ) - ); - } + public void testAsnStandard() { + assumeFalse("https://github.com/elastic/elasticsearch/issues/114266", Constants.WINDOWS); + String databaseName = "asn_sample.mmdb"; + String ip = "23.53.116.0"; + assertExpectedLookupResults( + databaseName, + ip, + new IpinfoIpDataLookups.Asn(Database.AsnV2.properties()), + Map.ofEntries( + entry("ip", ip), + entry("organization_name", "Akamai Technologies, Inc."), + entry("asn", 32787L), + entry("network", "23.53.116.0/24"), + entry("domain", "akamai.com"), + entry("type", "hosting"), + entry("country_iso_code", "US") + ) + ); } public void testAsnInvariants() { @@ -212,62 +172,42 @@ public void testAsnInvariants() { } } - public void testCountry() throws IOException { + public void testCountryFree() { assumeFalse("https://github.com/elastic/elasticsearch/issues/114266", Constants.WINDOWS); - Path configDir = tmpDir; - copyDatabase("ipinfo/ip_country_sample.mmdb", configDir.resolve("ip_country_sample.mmdb")); - - GeoIpCache cache = new GeoIpCache(1000); // real cache to test purging of entries upon a reload - ConfigDatabases configDatabases = new ConfigDatabases(configDir, cache); - configDatabases.initialize(resourceWatcherService); - - // this is the 'free' Country database (sample) - try (DatabaseReaderLazyLoader loader = configDatabases.getDatabase("ip_country_sample.mmdb")) { - IpDataLookup lookup = new IpinfoIpDataLookups.Country(Database.CountryV2.properties()); - Map data = lookup.getData(loader, "4.221.143.168"); - assertThat( - data, - equalTo( - Map.ofEntries( - entry("ip", "4.221.143.168"), - entry("country_name", "South Africa"), - entry("country_iso_code", "ZA"), - entry("continent_name", "Africa"), - entry("continent_code", "AF") - ) - ) - ); - } + String databaseName = "ip_country_sample.mmdb"; + String ip = "4.221.143.168"; + assertExpectedLookupResults( + databaseName, + ip, + new IpinfoIpDataLookups.Country(Database.CountryV2.properties()), + Map.ofEntries( + entry("ip", ip), + entry("country_name", "South Africa"), + entry("country_iso_code", "ZA"), + entry("continent_name", "Africa"), + entry("continent_code", "AF") + ) + ); } - public void testGeolocation() throws IOException { + public void testGeolocationStandard() { assumeFalse("https://github.com/elastic/elasticsearch/issues/114266", Constants.WINDOWS); - Path configDir = tmpDir; - copyDatabase("ipinfo/ip_geolocation_sample.mmdb", configDir.resolve("ip_geolocation_sample.mmdb")); - - GeoIpCache cache = new GeoIpCache(1000); // real cache to test purging of entries upon a reload - ConfigDatabases configDatabases = new ConfigDatabases(configDir, cache); - configDatabases.initialize(resourceWatcherService); - - // this is the non-free or 'standard' Geolocation database (sample) - try (DatabaseReaderLazyLoader loader = configDatabases.getDatabase("ip_geolocation_sample.mmdb")) { - IpDataLookup lookup = new IpinfoIpDataLookups.Geolocation(Database.CityV2.properties()); - Map data = lookup.getData(loader, "2.124.90.182"); - assertThat( - data, - equalTo( - Map.ofEntries( - entry("ip", "2.124.90.182"), - entry("country_iso_code", "GB"), - entry("region_name", "England"), - entry("city_name", "London"), - entry("timezone", "Europe/London"), - entry("postal_code", "E1W"), - entry("location", Map.of("lat", 51.50853, "lon", -0.12574)) - ) - ) - ); - } + String databaseName = "ip_geolocation_sample.mmdb"; + String ip = "2.124.90.182"; + assertExpectedLookupResults( + databaseName, + ip, + new IpinfoIpDataLookups.Geolocation(Database.CityV2.properties()), + Map.ofEntries( + entry("ip", ip), + entry("country_iso_code", "GB"), + entry("region_name", "England"), + entry("city_name", "London"), + entry("timezone", "Europe/London"), + entry("postal_code", "E1W"), + entry("location", Map.of("lat", 51.50853, "lon", -0.12574)) + ) + ); } public void testGeolocationInvariants() { @@ -308,53 +248,43 @@ public void testGeolocationInvariants() { } } - public void testPrivacyDetection() throws IOException { + public void testPrivacyDetectionStandard() { assumeFalse("https://github.com/elastic/elasticsearch/issues/114266", Constants.WINDOWS); - Path configDir = tmpDir; - copyDatabase("ipinfo/privacy_detection_sample.mmdb", configDir.resolve("privacy_detection_sample.mmdb")); - - GeoIpCache cache = new GeoIpCache(1000); // real cache to test purging of entries upon a reload - ConfigDatabases configDatabases = new ConfigDatabases(configDir, cache); - configDatabases.initialize(resourceWatcherService); - - // testing the first row in the sample database - try (DatabaseReaderLazyLoader loader = configDatabases.getDatabase("privacy_detection_sample.mmdb")) { - IpDataLookup lookup = new IpinfoIpDataLookups.PrivacyDetection(Database.PrivacyDetection.properties()); - Map data = lookup.getData(loader, "1.53.59.33"); - assertThat( - data, - equalTo( - Map.ofEntries( - entry("ip", "1.53.59.33"), - entry("hosting", false), - entry("proxy", false), - entry("relay", false), - entry("tor", false), - entry("vpn", true) - ) - ) - ); - } + String databaseName = "privacy_detection_sample.mmdb"; + String ip = "1.53.59.33"; + assertExpectedLookupResults( + databaseName, + ip, + new IpinfoIpDataLookups.PrivacyDetection(Database.PrivacyDetection.properties()), + Map.ofEntries( + entry("ip", ip), + entry("hosting", false), + entry("proxy", false), + entry("relay", false), + entry("tor", false), + entry("vpn", true) + ) + ); + } - // testing a row with a non-empty service in the sample database - try (DatabaseReaderLazyLoader loader = configDatabases.getDatabase("privacy_detection_sample.mmdb")) { - IpDataLookup lookup = new IpinfoIpDataLookups.PrivacyDetection(Database.PrivacyDetection.properties()); - Map data = lookup.getData(loader, "216.131.74.65"); - assertThat( - data, - equalTo( - Map.ofEntries( - entry("ip", "216.131.74.65"), - entry("hosting", true), - entry("proxy", false), - entry("service", "FastVPN"), - entry("relay", false), - entry("tor", false), - entry("vpn", true) - ) - ) - ); - } + public void testPrivacyDetectionStandardNonEmptyService() { + assumeFalse("https://github.com/elastic/elasticsearch/issues/114266", Constants.WINDOWS); + String databaseName = "privacy_detection_sample.mmdb"; + String ip = "216.131.74.65"; + assertExpectedLookupResults( + databaseName, + ip, + new IpinfoIpDataLookups.PrivacyDetection(Database.PrivacyDetection.properties()), + Map.ofEntries( + entry("ip", ip), + entry("hosting", true), + entry("proxy", false), + entry("service", "FastVPN"), + entry("relay", false), + entry("tor", false), + entry("vpn", true) + ) + ); } public void testPrivacyDetectionInvariants() { @@ -403,4 +333,29 @@ private static void assertDatabaseInvariants(final Path databasePath, final BiCo private static File pathToFile(Path databasePath) { return databasePath.toFile(); } + + private void assertExpectedLookupResults(String databaseName, String ip, IpDataLookup lookup, Map expected) { + try (DatabaseReaderLazyLoader loader = loader(databaseName)) { + Map actual = lookup.getData(loader, ip); + assertThat( + "The set of keys in the result are not the same as the set of expected keys", + actual.keySet(), + containsInAnyOrder(expected.keySet().toArray(new String[0])) + ); + for (Map.Entry entry : expected.entrySet()) { + assertThat("Unexpected value for key [" + entry.getKey() + "]", actual.get(entry.getKey()), equalTo(entry.getValue())); + } + } catch (AssertionError e) { + fail(e, "Assert failed for database [%s] with address [%s]", databaseName, ip); + } catch (Exception e) { + fail(e, "Exception for database [%s] with address [%s]", databaseName, ip); + } + } + + private DatabaseReaderLazyLoader loader(final String databaseName) { + Path path = tmpDir.resolve(databaseName); + copyDatabase("ipinfo/" + databaseName, path); // the ipinfo databases are prefixed on the test classpath + final GeoIpCache cache = new GeoIpCache(1000); + return new DatabaseReaderLazyLoader(cache, path, null); + } } diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/MMDBUtilTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/MMDBUtilTests.java index 46a34c2cdad56..083e9b5bc32da 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/MMDBUtilTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/MMDBUtilTests.java @@ -83,39 +83,4 @@ public void testIsGzip() throws IOException { assertThat(MMDBUtil.isGzip(database), is(false)); assertThat(MMDBUtil.isGzip(gzipDatabase), is(true)); } - - public void testDatabaseTypeParsing() throws IOException { - // this test is a little bit overloaded -- it's testing that we're getting the expected sorts of - // database_type strings from these files, *and* it's also testing that we dispatch on those strings - // correctly and associated those files with the correct high-level Elasticsearch Database type. - // down the road it would probably make sense to split these out and find a better home for some of the - // logic, but for now it's probably more valuable to have the test *somewhere* than to get especially - // pedantic about where precisely it should be. - - copyDatabase("GeoLite2-City-Test.mmdb", tmpDir); - copyDatabase("GeoLite2-Country-Test.mmdb", tmpDir); - copyDatabase("GeoLite2-ASN-Test.mmdb", tmpDir); - copyDatabase("GeoIP2-Anonymous-IP-Test.mmdb", tmpDir); - copyDatabase("GeoIP2-City-Test.mmdb", tmpDir); - copyDatabase("GeoIP2-Country-Test.mmdb", tmpDir); - copyDatabase("GeoIP2-Connection-Type-Test.mmdb", tmpDir); - copyDatabase("GeoIP2-Domain-Test.mmdb", tmpDir); - copyDatabase("GeoIP2-Enterprise-Test.mmdb", tmpDir); - copyDatabase("GeoIP2-ISP-Test.mmdb", tmpDir); - - assertThat(parseDatabaseFromType("GeoLite2-City-Test.mmdb"), is(Database.City)); - assertThat(parseDatabaseFromType("GeoLite2-Country-Test.mmdb"), is(Database.Country)); - assertThat(parseDatabaseFromType("GeoLite2-ASN-Test.mmdb"), is(Database.Asn)); - assertThat(parseDatabaseFromType("GeoIP2-Anonymous-IP-Test.mmdb"), is(Database.AnonymousIp)); - assertThat(parseDatabaseFromType("GeoIP2-City-Test.mmdb"), is(Database.City)); - assertThat(parseDatabaseFromType("GeoIP2-Country-Test.mmdb"), is(Database.Country)); - assertThat(parseDatabaseFromType("GeoIP2-Connection-Type-Test.mmdb"), is(Database.ConnectionType)); - assertThat(parseDatabaseFromType("GeoIP2-Domain-Test.mmdb"), is(Database.Domain)); - assertThat(parseDatabaseFromType("GeoIP2-Enterprise-Test.mmdb"), is(Database.Enterprise)); - assertThat(parseDatabaseFromType("GeoIP2-ISP-Test.mmdb"), is(Database.Isp)); - } - - private Database parseDatabaseFromType(String databaseFile) throws IOException { - return IpDataLookupFactories.getDatabase(MMDBUtil.getDatabaseType(tmpDir.resolve(databaseFile))); - } } diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/MaxmindIpDataLookupsTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/MaxmindIpDataLookupsTests.java index aca6b3564abb3..57ee2191a590d 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/MaxmindIpDataLookupsTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/MaxmindIpDataLookupsTests.java @@ -24,6 +24,7 @@ import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; public class MaxmindIpDataLookupsTests extends ESTestCase { @@ -276,6 +277,41 @@ public void testIsp() { ); } + public void testDatabaseTypeParsing() throws IOException { + // this test is a little bit overloaded -- it's testing that we're getting the expected sorts of + // database_type strings from these files, *and* it's also testing that we dispatch on those strings + // correctly and associated those files with the correct high-level Elasticsearch Database type. + // down the road it would probably make sense to split these out and find a better home for some of the + // logic, but for now it's probably more valuable to have the test *somewhere* than to get especially + // pedantic about where precisely it should be. + + copyDatabase("GeoLite2-City-Test.mmdb", tmpDir); + copyDatabase("GeoLite2-Country-Test.mmdb", tmpDir); + copyDatabase("GeoLite2-ASN-Test.mmdb", tmpDir); + copyDatabase("GeoIP2-Anonymous-IP-Test.mmdb", tmpDir); + copyDatabase("GeoIP2-City-Test.mmdb", tmpDir); + copyDatabase("GeoIP2-Country-Test.mmdb", tmpDir); + copyDatabase("GeoIP2-Connection-Type-Test.mmdb", tmpDir); + copyDatabase("GeoIP2-Domain-Test.mmdb", tmpDir); + copyDatabase("GeoIP2-Enterprise-Test.mmdb", tmpDir); + copyDatabase("GeoIP2-ISP-Test.mmdb", tmpDir); + + assertThat(parseDatabaseFromType("GeoLite2-City-Test.mmdb"), is(Database.City)); + assertThat(parseDatabaseFromType("GeoLite2-Country-Test.mmdb"), is(Database.Country)); + assertThat(parseDatabaseFromType("GeoLite2-ASN-Test.mmdb"), is(Database.Asn)); + assertThat(parseDatabaseFromType("GeoIP2-Anonymous-IP-Test.mmdb"), is(Database.AnonymousIp)); + assertThat(parseDatabaseFromType("GeoIP2-City-Test.mmdb"), is(Database.City)); + assertThat(parseDatabaseFromType("GeoIP2-Country-Test.mmdb"), is(Database.Country)); + assertThat(parseDatabaseFromType("GeoIP2-Connection-Type-Test.mmdb"), is(Database.ConnectionType)); + assertThat(parseDatabaseFromType("GeoIP2-Domain-Test.mmdb"), is(Database.Domain)); + assertThat(parseDatabaseFromType("GeoIP2-Enterprise-Test.mmdb"), is(Database.Enterprise)); + assertThat(parseDatabaseFromType("GeoIP2-ISP-Test.mmdb"), is(Database.Isp)); + } + + private Database parseDatabaseFromType(String databaseFile) throws IOException { + return IpDataLookupFactories.getDatabase(MMDBUtil.getDatabaseType(tmpDir.resolve(databaseFile))); + } + private void assertExpectedLookupResults(String databaseName, String ip, IpDataLookup lookup, Map expected) { try (DatabaseReaderLazyLoader loader = loader(databaseName)) { Map actual = lookup.getData(loader, ip); From edcabb80b788c0bf91b7edbaf4c79deb8b7b8553 Mon Sep 17 00:00:00 2001 From: Salvatore Campagna <93581129+salvatore-campagna@users.noreply.github.com> Date: Sat, 12 Oct 2024 18:14:15 +0200 Subject: [PATCH 027/449] Introduce `index.mapping.source.mode` setting to override `_source.mode` (#114433) * featur : introduce index.mapping.source.mode setting Introduce a new `index.mapper.source.mode` setting which will be used to override the mapping level `_source.mode`. For now the mapping level setting will stay and be deprecated later with another PR. The setting takes precedence always precedence. When not defined the index mode is used and can be overridden by the _source.mode mapping level definition. --- .../common/settings/IndexScopedSettings.java | 2 + .../elasticsearch/index/IndexSettings.java | 7 + .../index/mapper/SourceFieldMapper.java | 114 ++- .../TransportResumeFollowActionTests.java | 2 + .../test/40_source_mode_setting.yml | 847 ++++++++++++++++++ 5 files changed, 950 insertions(+), 22 deletions(-) create mode 100644 x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/40_source_mode_setting.yml diff --git a/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java b/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java index ad3d7d7f1c2ec..884ce38fba391 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java @@ -35,6 +35,7 @@ import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.IgnoredSourceFieldMapper; import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.index.store.FsDirectoryFactory; import org.elasticsearch.index.store.Store; @@ -186,6 +187,7 @@ public final class IndexScopedSettings extends AbstractScopedSettings { FieldMapper.SYNTHETIC_SOURCE_KEEP_INDEX_SETTING, IgnoredSourceFieldMapper.SKIP_IGNORED_SOURCE_WRITE_SETTING, IgnoredSourceFieldMapper.SKIP_IGNORED_SOURCE_READ_SETTING, + SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING, // validate that built-in similarities don't get redefined Setting.groupSetting("index.similarity.", (s) -> { diff --git a/server/src/main/java/org/elasticsearch/index/IndexSettings.java b/server/src/main/java/org/elasticsearch/index/IndexSettings.java index e82f9eff7d5e0..f3f8ce4b8e7e4 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexSettings.java +++ b/server/src/main/java/org/elasticsearch/index/IndexSettings.java @@ -28,6 +28,7 @@ import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.mapper.IgnoredSourceFieldMapper; import org.elasticsearch.index.mapper.Mapper; +import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.ingest.IngestService; import org.elasticsearch.node.Node; @@ -820,6 +821,7 @@ private void setRetentionLeaseMillis(final TimeValue retentionLease) { private volatile long mappingDimensionFieldsLimit; private volatile boolean skipIgnoredSourceWrite; private volatile boolean skipIgnoredSourceRead; + private final SourceFieldMapper.Mode indexMappingSourceMode; /** * The maximum number of refresh listeners allows on this shard. @@ -980,6 +982,7 @@ public IndexSettings(final IndexMetadata indexMetadata, final Settings nodeSetti es87TSDBCodecEnabled = scopedSettings.get(TIME_SERIES_ES87TSDB_CODEC_ENABLED_SETTING); skipIgnoredSourceWrite = scopedSettings.get(IgnoredSourceFieldMapper.SKIP_IGNORED_SOURCE_WRITE_SETTING); skipIgnoredSourceRead = scopedSettings.get(IgnoredSourceFieldMapper.SKIP_IGNORED_SOURCE_READ_SETTING); + indexMappingSourceMode = scopedSettings.get(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING); scopedSettings.addSettingsUpdateConsumer( MergePolicyConfig.INDEX_COMPOUND_FORMAT_SETTING, @@ -1659,6 +1662,10 @@ private void setSkipIgnoredSourceRead(boolean value) { this.skipIgnoredSourceRead = value; } + public SourceFieldMapper.Mode getIndexMappingSourceMode() { + return indexMappingSourceMode; + } + /** * The bounds for {@code @timestamp} on this index or * {@code null} if there are no bounds. diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java index 0f4549c679d42..f9b9de97715ed 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java @@ -18,11 +18,13 @@ import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.core.Nullable; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.IndexMode; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.index.query.SearchExecutionContext; @@ -62,8 +64,16 @@ public class SourceFieldMapper extends MetadataFieldMapper { public static final String LOSSY_PARAMETERS_ALLOWED_SETTING_NAME = "index.lossy.source-mapping-parameters"; + public static final Setting INDEX_MAPPER_SOURCE_MODE_SETTING = Setting.enumSetting(SourceFieldMapper.Mode.class, settings -> { + final IndexMode indexMode = IndexSettings.MODE.get(settings); + return switch (indexMode) { + case IndexMode.LOGSDB, IndexMode.TIME_SERIES -> Mode.SYNTHETIC.name(); + default -> Mode.STORED.name(); + }; + }, "index.mapping.source.mode", value -> {}, Setting.Property.Final, Setting.Property.IndexScope); + /** The source mode */ - private enum Mode { + public enum Mode { DISABLED, STORED, SYNTHETIC @@ -96,6 +106,15 @@ private enum Mode { true ); + private static final SourceFieldMapper TSDB_DEFAULT_STORED = new SourceFieldMapper( + Mode.STORED, + Explicit.IMPLICIT_TRUE, + Strings.EMPTY_ARRAY, + Strings.EMPTY_ARRAY, + IndexMode.TIME_SERIES, + true + ); + private static final SourceFieldMapper TSDB_DEFAULT_NO_RECOVERY_SOURCE = new SourceFieldMapper( Mode.SYNTHETIC, Explicit.IMPLICIT_TRUE, @@ -105,6 +124,15 @@ private enum Mode { false ); + private static final SourceFieldMapper TSDB_DEFAULT_NO_RECOVERY_SOURCE_STORED = new SourceFieldMapper( + Mode.STORED, + Explicit.IMPLICIT_TRUE, + Strings.EMPTY_ARRAY, + Strings.EMPTY_ARRAY, + IndexMode.TIME_SERIES, + false + ); + private static final SourceFieldMapper LOGSDB_DEFAULT = new SourceFieldMapper( Mode.SYNTHETIC, Explicit.IMPLICIT_TRUE, @@ -114,6 +142,15 @@ private enum Mode { true ); + private static final SourceFieldMapper LOGSDB_DEFAULT_STORED = new SourceFieldMapper( + Mode.STORED, + Explicit.IMPLICIT_TRUE, + Strings.EMPTY_ARRAY, + Strings.EMPTY_ARRAY, + IndexMode.LOGSDB, + true + ); + private static final SourceFieldMapper LOGSDB_DEFAULT_NO_RECOVERY_SOURCE = new SourceFieldMapper( Mode.SYNTHETIC, Explicit.IMPLICIT_TRUE, @@ -123,6 +160,15 @@ private enum Mode { false ); + private static final SourceFieldMapper LOGSDB_DEFAULT_NO_RECOVERY_SOURCE_STORED = new SourceFieldMapper( + Mode.STORED, + Explicit.IMPLICIT_TRUE, + Strings.EMPTY_ARRAY, + Strings.EMPTY_ARRAY, + IndexMode.LOGSDB, + false + ); + /* * Synthetic source was added as the default for TSDB in v.8.7. The legacy field mapper below * is used in bwc tests and mixed clusters containing time series indexes created in an earlier version. @@ -197,6 +243,8 @@ public static class Builder extends MetadataFieldMapper.Builder { m -> Arrays.asList(toType(m).excludes) ); + private final Settings settings; + private final IndexMode indexMode; private final boolean supportsNonDefaultParameterValues; @@ -210,6 +258,7 @@ public Builder( boolean enableRecoverySource ) { super(Defaults.NAME); + this.settings = settings; this.indexMode = indexMode; this.supportsNonDefaultParameterValues = supportsCheckForNonDefaultParams == false || settings.getAsBoolean(LOSSY_PARAMETERS_ALLOWED_SETTING_NAME, true); @@ -226,10 +275,10 @@ protected Parameter[] getParameters() { return new Parameter[] { enabled, mode, includes, excludes }; } - private boolean isDefault() { - Mode m = mode.get(); - if (m != null - && (((indexMode != null && indexMode.isSyntheticSourceEnabled() && m == Mode.SYNTHETIC) == false) || m == Mode.DISABLED)) { + private boolean isDefault(final Mode sourceMode) { + if (sourceMode != null + && (((indexMode != null && indexMode.isSyntheticSourceEnabled() && sourceMode == Mode.SYNTHETIC) == false) + || sourceMode == Mode.DISABLED)) { return false; } return enabled.get().value() && includes.getValue().isEmpty() && excludes.getValue().isEmpty(); @@ -242,12 +291,14 @@ public SourceFieldMapper build() { throw new MapperParsingException("Cannot set both [mode] and [enabled] parameters"); } } - if (isDefault()) { - return switch (indexMode) { - case TIME_SERIES -> enableRecoverySource ? TSDB_DEFAULT : TSDB_DEFAULT_NO_RECOVERY_SOURCE; - case LOGSDB -> enableRecoverySource ? LOGSDB_DEFAULT : LOGSDB_DEFAULT_NO_RECOVERY_SOURCE; - default -> enableRecoverySource ? DEFAULT : DEFAULT_NO_RECOVERY_SOURCE; - }; + // NOTE: if the `index.mapper.source.mode` exists it takes precedence to determine the source mode for `_source` + // otherwise the mode is determined according to `index.mode` and `_source.mode`. + final Mode sourceMode = INDEX_MAPPER_SOURCE_MODE_SETTING.exists(settings) + ? INDEX_MAPPER_SOURCE_MODE_SETTING.get(settings) + : mode.get(); + if (isDefault(sourceMode)) { + return resolveSourceMode(indexMode, sourceMode, enableRecoverySource); + } if (supportsNonDefaultParameterValues == false) { List disallowed = new ArrayList<>(); @@ -271,8 +322,9 @@ public SourceFieldMapper build() { ); } } + SourceFieldMapper sourceFieldMapper = new SourceFieldMapper( - mode.get(), + sourceMode, enabled.get(), includes.getValue().toArray(Strings.EMPTY_ARRAY), excludes.getValue().toArray(Strings.EMPTY_ARRAY), @@ -287,21 +339,39 @@ public SourceFieldMapper build() { } + private static SourceFieldMapper resolveSourceMode(final IndexMode indexMode, final Mode sourceMode, boolean enableRecoverySource) { + if (indexMode == IndexMode.STANDARD) { + return enableRecoverySource ? DEFAULT : DEFAULT_NO_RECOVERY_SOURCE; + } + final SourceFieldMapper syntheticWithoutRecoverySource = indexMode == IndexMode.TIME_SERIES + ? TSDB_DEFAULT_NO_RECOVERY_SOURCE + : LOGSDB_DEFAULT_NO_RECOVERY_SOURCE; + final SourceFieldMapper syntheticWithRecoverySource = indexMode == IndexMode.TIME_SERIES ? TSDB_DEFAULT : LOGSDB_DEFAULT; + final SourceFieldMapper storedWithoutRecoverySource = indexMode == IndexMode.TIME_SERIES + ? TSDB_DEFAULT_NO_RECOVERY_SOURCE_STORED + : LOGSDB_DEFAULT_NO_RECOVERY_SOURCE_STORED; + final SourceFieldMapper storedWithRecoverySource = indexMode == IndexMode.TIME_SERIES ? TSDB_DEFAULT_STORED : LOGSDB_DEFAULT_STORED; + + return switch (sourceMode) { + case SYNTHETIC -> enableRecoverySource ? syntheticWithRecoverySource : syntheticWithoutRecoverySource; + case STORED -> enableRecoverySource ? storedWithRecoverySource : storedWithoutRecoverySource; + case DISABLED -> throw new IllegalArgumentException( + "_source can not be disabled in index using [" + indexMode + "] index mode" + ); + }; + } + public static final TypeParser PARSER = new ConfigurableTypeParser(c -> { - var indexMode = c.getIndexSettings().getMode(); + final IndexMode indexMode = c.getIndexSettings().getMode(); boolean enableRecoverySource = INDICES_RECOVERY_SOURCE_ENABLED_SETTING.get(c.getSettings()); + final Mode settingSourceMode = INDEX_MAPPER_SOURCE_MODE_SETTING.get(c.getSettings()); + if (indexMode.isSyntheticSourceEnabled()) { - if (indexMode == IndexMode.TIME_SERIES) { - if (c.getIndexSettings().getIndexVersionCreated().onOrAfter(IndexVersions.V_8_7_0)) { - return enableRecoverySource ? TSDB_DEFAULT : TSDB_DEFAULT_NO_RECOVERY_SOURCE; - } else { - return enableRecoverySource ? TSDB_LEGACY_DEFAULT : TSDB_LEGACY_DEFAULT_NO_RECOVERY_SOURCE; - } - } else if (indexMode == IndexMode.LOGSDB) { - return enableRecoverySource ? LOGSDB_DEFAULT : LOGSDB_DEFAULT_NO_RECOVERY_SOURCE; + if (indexMode == IndexMode.TIME_SERIES && c.getIndexSettings().getIndexVersionCreated().before(IndexVersions.V_8_7_0)) { + return enableRecoverySource ? TSDB_LEGACY_DEFAULT : TSDB_LEGACY_DEFAULT_NO_RECOVERY_SOURCE; } } - return enableRecoverySource ? DEFAULT : DEFAULT_NO_RECOVERY_SOURCE; + return resolveSourceMode(indexMode, settingSourceMode, enableRecoverySource); }, c -> new Builder( c.getIndexSettings().getMode(), diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowActionTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowActionTests.java index b4be0b33a464e..357e1bca38e8f 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowActionTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowActionTests.java @@ -18,6 +18,7 @@ import org.elasticsearch.index.MapperTestUtils; import org.elasticsearch.index.mapper.IgnoredSourceFieldMapper; import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.ccr.Ccr; import org.elasticsearch.xpack.ccr.CcrSettings; @@ -334,6 +335,7 @@ public void testDynamicIndexSettingsAreClassified() { replicatedSettings.add(IndexSettings.PREFER_ILM_SETTING); replicatedSettings.add(IgnoredSourceFieldMapper.SKIP_IGNORED_SOURCE_READ_SETTING); replicatedSettings.add(IgnoredSourceFieldMapper.SKIP_IGNORED_SOURCE_WRITE_SETTING); + replicatedSettings.add(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING); for (Setting setting : IndexScopedSettings.BUILT_IN_INDEX_SETTINGS) { // removed settings have no effect, they are only there for BWC diff --git a/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/40_source_mode_setting.yml b/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/40_source_mode_setting.yml new file mode 100644 index 0000000000000..33fedce3b59c1 --- /dev/null +++ b/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/40_source_mode_setting.yml @@ -0,0 +1,847 @@ +--- +create an index with disabled source mode and standard index mode without setting: + - do: + indices.create: + index: test_disabled_standard + body: + settings: + index: + mode: standard + mappings: + _source: + mode: disabled + + - do: + indices.get_mapping: + index: test_disabled_standard + + - match: { test_disabled_standard.mappings._source.mode: disabled } + +--- +create an index with stored source mode and standard index mode without setting: + - do: + indices.create: + index: test_stored_standard + body: + settings: + index: + mode: standard + mappings: + _source: + mode: stored + + - do: + indices.get_mapping: + index: test_stored_standard + + - match: { test_stored_standard.mappings._source.mode: stored } + +--- +create an index with synthetic source mode and standard index mode without setting: + - do: + indices.create: + index: test_synthetic_standard + body: + settings: + index: + mode: standard + mappings: + _source: + mode: synthetic + + - do: + indices.get_mapping: + index: test_synthetic_standard + + - match: { test_synthetic_standard.mappings._source.mode: synthetic } + +--- +create an index with disabled source mode and logsdb index mode without setting: + - do: + catch: bad_request + indices.create: + index: test_disabled_logsdb + body: + settings: + index: + mode: logsdb + mappings: + _source: + mode: disabled + + - match: { error.type: "mapper_parsing_exception" } + - match: { error.reason: "Failed to parse mapping: _source can not be disabled in index using [logsdb] index mode" } + +--- +create an index with stored source mode and logsdb index mode without setting: + - do: + indices.create: + index: test_stored_logsdb + body: + settings: + index: + mode: logsdb + mappings: + _source: + mode: stored + + - do: + indices.get_settings: + index: "test_stored_logsdb" + - match: { test_stored_logsdb.settings.index.mode: logsdb } + + - do: + indices.get_mapping: + index: test_stored_logsdb + + - match: { test_stored_logsdb.mappings._source.mode: stored } + +--- +create an index with synthetic source mode and logsdb index mode without setting: + - do: + indices.create: + index: test_synthetic_logsdb + body: + settings: + index: + mode: logsdb + mappings: + _source: + mode: synthetic + + - do: + indices.get_mapping: + index: test_synthetic_logsdb + + - match: { test_synthetic_logsdb.mappings._source.mode: synthetic } + +--- +create an index with disabled source mode and time series index mode without setting: + - do: + catch: bad_request + indices.create: + index: test_disabled_time_series + body: + settings: + index: + mode: time_series + routing_path: [ keyword ] + time_series: + start_time: 2021-04-28T00:00:00Z + end_time: 2021-04-29T00:00:00Z + mappings: + _source: + mode: disabled + properties: + keyword: + type: keyword + time_series_dimension: true + + - match: { error.type: "mapper_parsing_exception" } + - match: { error.reason: "Failed to parse mapping: _source can not be disabled in index using [time_series] index mode" } + +--- +create an index with stored source mode and time series index mode without setting: + - do: + indices.create: + index: test_stored_time_series + body: + settings: + index: + mode: time_series + routing_path: [ keyword ] + time_series: + start_time: 2021-04-28T00:00:00Z + end_time: 2021-04-29T00:00:00Z + mappings: + _source: + mode: stored + properties: + keyword: + type: keyword + time_series_dimension: true + + - do: + indices.get_settings: + index: "test_stored_time_series" + - match: { test_stored_time_series.settings.index.mode: time_series } + + - do: + indices.get_mapping: + index: test_stored_time_series + + - match: { test_stored_time_series.mappings._source.mode: stored } + + +--- +create an index with synthetic source mode and time series index mode without setting: + - do: + indices.create: + index: test_synthetic_time_series + body: + settings: + index: + mode: time_series + routing_path: [ keyword ] + time_series: + start_time: 2021-04-28T00:00:00Z + end_time: 2021-04-29T00:00:00Z + mappings: + _source: + mode: synthetic + properties: + keyword: + type: keyword + time_series_dimension: true + + - do: + indices.get_settings: + index: "test_synthetic_time_series" + - match: { test_synthetic_time_series.settings.index.mode: time_series } + + - do: + indices.get_mapping: + index: test_synthetic_time_series + + - match: { test_synthetic_time_series.mappings._source.mode: synthetic } + +--- +create an index with stored source mode: + - do: + indices.create: + index: test_stored_default + body: + mappings: + _source: + mode: stored + + - do: + indices.get_mapping: + index: test_stored_default + + - match: { test_stored_default.mappings._source.mode: stored } + +--- +override stored to synthetic source mode: + - do: + indices.create: + index: test_stored_override + body: + settings: + index: + mapping.source.mode: synthetic + mappings: + _source: + mode: stored + + - do: + indices.get_mapping: + index: test_stored_override + + - match: { test_stored_override.mappings._source.mode: synthetic } + +--- +override stored to disabled source mode: + - do: + indices.create: + index: test_stored_disabled + body: + settings: + index: + mapping.source.mode: disabled + mappings: + _source: + mode: stored + + - do: + indices.get_mapping: + index: test_stored_disabled + + - match: { test_stored_disabled.mappings._source.mode: disabled } + +--- +create an index with disabled source mode: + - do: + indices.create: + index: test_disabled_default + body: + mappings: + _source: + mode: disabled + + - do: + indices.get_mapping: + index: test_disabled_default + + - match: { test_disabled_default.mappings._source.mode: disabled } + +--- +override disabled to synthetic source mode: + - do: + indices.create: + index: test_disabled_synthetic + body: + settings: + index: + mapping.source.mode: synthetic + mappings: + _source: + mode: disabled + + - do: + indices.get_mapping: + index: test_disabled_synthetic + + - match: { test_disabled_synthetic.mappings._source.mode: synthetic } + +--- +override disabled to stored source mode: + - do: + indices.create: + index: test_disabled_stored + body: + settings: + index: + mapping.source.mode: stored + mappings: + _source: + mode: disabled + + - do: + indices.get_mapping: + index: test_disabled_stored + + - match: { test_disabled_stored.mappings._source.mode: stored } + +--- +create an index with synthetic source mode: + - do: + indices.create: + index: test_synthetic_default + body: + mappings: + _source: + mode: synthetic + + - do: + indices.get_mapping: + index: test_synthetic_default + + - match: { test_synthetic_default.mappings._source.mode: synthetic } + +--- +override synthetic to stored source mode: + - do: + indices.create: + index: test_synthetic_stored + body: + settings: + index: + mapping.source.mode: stored + mappings: + _source: + mode: synthetic + + - do: + indices.get_mapping: + index: test_synthetic_stored + + - match: { test_synthetic_stored.mappings._source.mode: stored } + +--- +override synthetic to disabled source mode: + - do: + indices.create: + index: test_synthetic_disabled + body: + settings: + index: + mapping.source.mode: disabled + mappings: + _source: + mode: synthetic + + - do: + indices.get_mapping: + index: test_synthetic_disabled + + - match: { test_synthetic_disabled.mappings._source.mode: disabled } + +--- +create an index with unspecified source mode: + - do: + indices.create: + index: test_unset_default + + - do: + indices.get_mapping: + index: test_unset_default + + - match: { test_unset_default.mappings._source.mode: null } + +--- +override unspecified to stored source mode: + - do: + indices.create: + index: test_unset_stored + body: + settings: + index: + mapping.source.mode: stored + + - do: + indices.get_mapping: + index: test_unset_stored + + - match: { test_unset_stored.mappings: { } } + +--- +override unspecified to disabled source mode: + - do: + indices.create: + index: test_unset_disabled + body: + settings: + index: + mapping.source.mode: disabled + + - do: + indices.get_mapping: + index: test_unset_disabled + + - match: { test_unset_disabled.mappings: { } } + +--- +override unspecified to synthetic source mode: + - do: + indices.create: + index: test_unset_synthetic + body: + settings: + index: + mapping.source.mode: synthetic + + - do: + indices.get_mapping: + index: test_unset_synthetic + + - match: { test_unset_synthetic.mappings: { } } + +--- +create an index with standard index mode: + - do: + indices.create: + index: test_standard_index_mode + body: + settings: + index: + mode: standard + mappings: + _source: + mode: stored + + - do: + indices.get_mapping: + index: test_standard_index_mode + + - match: { test_standard_index_mode.mappings._source.mode: stored } + +--- +create an index with time_series index mode and synthetic source: + - do: + indices.create: + index: test_time_series_index_mode_synthetic + body: + settings: + index: + mode: time_series + mapping.source.mode: synthetic + routing_path: [ keyword ] + time_series: + start_time: 2021-04-28T00:00:00Z + end_time: 2021-04-29T00:00:00Z + mappings: + properties: + keyword: + type: keyword + time_series_dimension: true + + - do: + indices.get_settings: + index: "test_time_series_index_mode_synthetic" + - match: { test_time_series_index_mode_synthetic.settings.index.mode: time_series } + + + - do: + indices.get_mapping: + index: test_time_series_index_mode_synthetic + + - match: { test_time_series_index_mode_synthetic.mappings._source.mode: synthetic } + +--- +create an index with logsdb index mode and synthetic source: + - do: + indices.create: + index: test_logsdb_index_mode_synthetic + body: + settings: + index: + mode: logsdb + mapping.source.mode: synthetic + + - do: + indices.get_settings: + index: "test_logsdb_index_mode_synthetic" + - match: { test_logsdb_index_mode_synthetic.settings.index.mode: logsdb } + + - do: + indices.get_mapping: + index: test_logsdb_index_mode_synthetic + + - match: { test_logsdb_index_mode_synthetic.mappings._source.mode: synthetic } + +--- +create an index with time_series index mode and stored source: + - do: + indices.create: + index: test_time_series_index_mode_undefined + body: + settings: + index: + mode: time_series + mapping.source.mode: stored + routing_path: [ keyword ] + time_series: + start_time: 2021-04-28T00:00:00Z + end_time: 2021-04-29T00:00:00Z + mappings: + properties: + keyword: + type: keyword + time_series_dimension: true + + - do: + indices.get_settings: + index: "test_time_series_index_mode_undefined" + - match: { test_time_series_index_mode_undefined.settings.index.mode: time_series } + + - do: + indices.get_mapping: + index: test_time_series_index_mode_undefined + + - match: { test_time_series_index_mode_undefined.mappings._source.mode: stored } + +--- +create an index with logsdb index mode and stored source: + - do: + indices.create: + index: test_logsdb_index_mode_undefined + body: + settings: + index: + mode: logsdb + mapping.source.mode: stored + + - do: + indices.get_settings: + index: "test_logsdb_index_mode_undefined" + - match: { test_logsdb_index_mode_undefined.settings.index.mode: logsdb } + + - do: + indices.get_mapping: + index: test_logsdb_index_mode_undefined + + - match: { test_logsdb_index_mode_undefined.mappings._source.mode: stored } + +--- +create an index with time_series index mode and disabled source: + - do: + catch: bad_request + indices.create: + index: test_time_series_index_mode + body: + settings: + index: + mode: time_series + mapping.source.mode: disabled + routing_path: [ keyword ] + time_series: + start_time: 2021-04-28T00:00:00Z + end_time: 2021-04-29T00:00:00Z + mappings: + properties: + keyword: + type: keyword + time_series_dimension: true + + - match: { error.type: "mapper_parsing_exception" } + - match: { error.reason: "Failed to parse mapping: _source can not be disabled in index using [time_series] index mode" } + +--- +create an index with logsdb index mode and disabled source: + - do: + catch: bad_request + indices.create: + index: test_logsdb_index_mode + body: + settings: + index: + mode: logsdb + mapping.source.mode: disabled + + - match: { error.type: "mapper_parsing_exception" } + - match: { error.reason: "Failed to parse mapping: _source can not be disabled in index using [logsdb] index mode" } + +--- +modify final setting after index creation: + - do: + indices.create: + index: test_modify_setting + body: + settings: + index: + mapping.source.mode: stored + + - do: + catch: /.*Can't update non dynamic setting.*/ + indices.put_settings: + index: test_modify_setting + body: + index: + mapping.source.mode: synthetic + +--- +modify source mapping from stored to disabled after index creation: + - do: + indices.create: + index: test_modify_source_mode_stored_disabled + body: + settings: + index: + mapping.source.mode: stored + + - do: + indices.put_mapping: + index: test_modify_source_mode_stored_disabled + body: + _source: + mode: disabled + - is_true: acknowledged + + - do: + indices.get_mapping: + index: test_modify_source_mode_stored_disabled + - match: { test_modify_source_mode_stored_disabled.mappings._source.mode: stored } + +--- +modify source mapping from stored to synthetic after index creation: + - do: + indices.create: + index: test_modify_source_mode_stored_synthetic + body: + settings: + index: + mapping.source.mode: stored + + - do: + indices.put_mapping: + index: test_modify_source_mode_stored_synthetic + body: + _source: + mode: synthetic + - is_true: acknowledged + + - do: + indices.get_mapping: + index: test_modify_source_mode_stored_synthetic + - match: { test_modify_source_mode_stored_synthetic.mappings._source.mode: stored } + +--- +modify source mapping from disabled to stored after index creation: + - do: + indices.create: + index: test_modify_source_mode_disabled_stored + body: + settings: + index: + mapping.source.mode: disabled + + - do: + indices.put_mapping: + index: test_modify_source_mode_disabled_stored + body: + _source: + mode: stored + - is_true: acknowledged + + - do: + indices.get_mapping: + index: test_modify_source_mode_disabled_stored + - match: { test_modify_source_mode_disabled_stored.mappings._source.mode: disabled } + +--- +modify source mapping from disabled to synthetic after index creation: + - do: + indices.create: + index: test_modify_source_mode_disabled_synthetic + body: + settings: + index: + mapping.source.mode: disabled + + - do: + indices.put_mapping: + index: test_modify_source_mode_disabled_synthetic + body: + _source: + mode: synthetic + - is_true: acknowledged + + - do: + indices.get_mapping: + index: test_modify_source_mode_disabled_synthetic + - match: { test_modify_source_mode_disabled_synthetic.mappings._source.mode: disabled } + +--- +modify source mapping from synthetic to stored after index creation: + - do: + indices.create: + index: test_modify_source_mode_synthetic_stored + body: + settings: + index: + mapping.source.mode: synthetic + + - do: + indices.put_mapping: + index: test_modify_source_mode_synthetic_stored + body: + _source: + mode: stored + - is_true: acknowledged + + - do: + indices.get_mapping: + index: test_modify_source_mode_synthetic_stored + - match: { test_modify_source_mode_synthetic_stored.mappings._source.mode: synthetic } + +--- +modify source mapping from synthetic to disabled after index creation: + - do: + indices.create: + index: test_modify_source_mode_synthetic_disabled + body: + settings: + index: + mapping.source.mode: synthetic + + - do: + indices.put_mapping: + index: test_modify_source_mode_synthetic_disabled + body: + _source: + mode: disabled + - is_true: acknowledged + + - do: + indices.get_mapping: + index: test_modify_source_mode_synthetic_disabled + - match: { test_modify_source_mode_synthetic_disabled.mappings._source.mode: synthetic } + +--- +modify logsdb index source mode to disabled after index creation: + - do: + indices.create: + index: test_modify_logsdb_disabled_after_creation + body: + settings: + index: + mode: logsdb + + - do: + catch: bad_request + indices.put_mapping: + index: test_modify_logsdb_disabled_after_creation + body: + _source: + mode: disabled + - match: { error.type: "mapper_parsing_exception" } + - match: { error.reason: "Failed to parse mapping: _source can not be disabled in index using [logsdb] index mode" } + +--- +modify logsdb index source mode to stored after index creation: + - do: + indices.create: + index: test_modify_logsdb_stored_after_creation + body: + settings: + index: + mode: logsdb + + - do: + catch: bad_request + indices.put_mapping: + index: test_modify_logsdb_stored_after_creation + body: + _source: + mode: stored + - match: { error.type: "illegal_argument_exception" } + - match: { error.reason: "Mapper for [_source] conflicts with existing mapper:\n\tCannot update parameter [mode] from [synthetic] to [stored]" } + +--- +modify time_series index source mode to disabled after index creation: + - do: + indices.create: + index: test_modify_time_series_disabled_after_creation + body: + settings: + index: + mode: time_series + routing_path: [ keyword ] + time_series: + start_time: 2021-04-28T00:00:00Z + end_time: 2021-04-29T00:00:00Z + mappings: + properties: + keyword: + type: keyword + time_series_dimension: true + + - do: + catch: bad_request + indices.put_mapping: + index: test_modify_time_series_disabled_after_creation + body: + _source: + mode: disabled + - match: { error.type: "mapper_parsing_exception" } + - match: { error.reason: "Failed to parse mapping: _source can not be disabled in index using [time_series] index mode" } + +--- +modify time_series index source mode to stored after index creation: + - do: + indices.create: + index: test_modify_time_series_stored_after_creation + body: + settings: + index: + mode: time_series + routing_path: [ keyword ] + time_series: + start_time: 2021-04-28T00:00:00Z + end_time: 2021-04-29T00:00:00Z + mappings: + properties: + keyword: + type: keyword + time_series_dimension: true + + - do: + catch: bad_request + indices.put_mapping: + index: test_modify_time_series_stored_after_creation + body: + _source: + mode: stored + - match: { error.type: "illegal_argument_exception" } + - match: { error.reason: "Mapper for [_source] conflicts with existing mapper:\n\tCannot update parameter [mode] from [synthetic] to [stored]" } From e833e7b6c4e81284ef4e5bcb29cea9d9dad6b963 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Sat, 12 Oct 2024 18:55:27 +0200 Subject: [PATCH 028/449] Add feature flag for subobjects auto (#114616) --- .../rest/yaml/CcsCommonYamlTestSuiteIT.java | 3 +- .../yaml/RcsCcsCommonYamlTestSuiteIT.java | 1 + ...okeTestMultiNodeClientYamlTestSuiteIT.java | 1 + .../test/rest/ClientYamlTestSuiteIT.java | 1 + .../index/mapper/ObjectMapper.java | 4 +- .../index/mapper/DynamicTemplatesTests.java | 8 +++ .../index/mapper/ObjectMapperTests.java | 50 +++++++++++-------- .../test/cluster/FeatureFlag.java | 1 + .../xpack/test/rest/XPackRestIT.java | 1 + ...CoreWithSecurityClientYamlTestSuiteIT.java | 1 + x-pack/qa/runtime-fields/build.gradle | 1 + 11 files changed, 49 insertions(+), 23 deletions(-) diff --git a/qa/ccs-common-rest/src/yamlRestTest/java/org/elasticsearch/test/rest/yaml/CcsCommonYamlTestSuiteIT.java b/qa/ccs-common-rest/src/yamlRestTest/java/org/elasticsearch/test/rest/yaml/CcsCommonYamlTestSuiteIT.java index 8ce1bfdc61f6b..3a24427df24a3 100644 --- a/qa/ccs-common-rest/src/yamlRestTest/java/org/elasticsearch/test/rest/yaml/CcsCommonYamlTestSuiteIT.java +++ b/qa/ccs-common-rest/src/yamlRestTest/java/org/elasticsearch/test/rest/yaml/CcsCommonYamlTestSuiteIT.java @@ -89,7 +89,8 @@ public class CcsCommonYamlTestSuiteIT extends ESClientYamlSuiteTestCase { .setting("xpack.security.enabled", "false") // geohex_grid requires gold license .setting("xpack.license.self_generated.type", "trial") - .feature(FeatureFlag.TIME_SERIES_MODE); + .feature(FeatureFlag.TIME_SERIES_MODE) + .feature(FeatureFlag.SUB_OBJECTS_AUTO_ENABLED); private static ElasticsearchCluster remoteCluster = ElasticsearchCluster.local() .name(REMOTE_CLUSTER_NAME) diff --git a/qa/ccs-common-rest/src/yamlRestTest/java/org/elasticsearch/test/rest/yaml/RcsCcsCommonYamlTestSuiteIT.java b/qa/ccs-common-rest/src/yamlRestTest/java/org/elasticsearch/test/rest/yaml/RcsCcsCommonYamlTestSuiteIT.java index acdd540ca7b9d..5ada1e941266a 100644 --- a/qa/ccs-common-rest/src/yamlRestTest/java/org/elasticsearch/test/rest/yaml/RcsCcsCommonYamlTestSuiteIT.java +++ b/qa/ccs-common-rest/src/yamlRestTest/java/org/elasticsearch/test/rest/yaml/RcsCcsCommonYamlTestSuiteIT.java @@ -91,6 +91,7 @@ public class RcsCcsCommonYamlTestSuiteIT extends ESClientYamlSuiteTestCase { .setting("xpack.security.remote_cluster_server.ssl.enabled", "false") .setting("xpack.security.remote_cluster_client.ssl.enabled", "false") .feature(FeatureFlag.TIME_SERIES_MODE) + .feature(FeatureFlag.SUB_OBJECTS_AUTO_ENABLED) .user("test_admin", "x-pack-test-password"); private static ElasticsearchCluster fulfillingCluster = ElasticsearchCluster.local() diff --git a/qa/smoke-test-multinode/src/yamlRestTest/java/org/elasticsearch/smoketest/SmokeTestMultiNodeClientYamlTestSuiteIT.java b/qa/smoke-test-multinode/src/yamlRestTest/java/org/elasticsearch/smoketest/SmokeTestMultiNodeClientYamlTestSuiteIT.java index c68d27b883c53..e53c0564be297 100644 --- a/qa/smoke-test-multinode/src/yamlRestTest/java/org/elasticsearch/smoketest/SmokeTestMultiNodeClientYamlTestSuiteIT.java +++ b/qa/smoke-test-multinode/src/yamlRestTest/java/org/elasticsearch/smoketest/SmokeTestMultiNodeClientYamlTestSuiteIT.java @@ -35,6 +35,7 @@ public class SmokeTestMultiNodeClientYamlTestSuiteIT extends ESClientYamlSuiteTe // The first node does not have the ingest role so we're sure ingest requests are forwarded: .node(0, n -> n.setting("node.roles", "[master,data,ml,remote_cluster_client,transform]")) .feature(FeatureFlag.TIME_SERIES_MODE) + .feature(FeatureFlag.SUB_OBJECTS_AUTO_ENABLED) .build(); public SmokeTestMultiNodeClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { diff --git a/rest-api-spec/src/yamlRestTest/java/org/elasticsearch/test/rest/ClientYamlTestSuiteIT.java b/rest-api-spec/src/yamlRestTest/java/org/elasticsearch/test/rest/ClientYamlTestSuiteIT.java index 2b20e35019424..084e212a913b2 100644 --- a/rest-api-spec/src/yamlRestTest/java/org/elasticsearch/test/rest/ClientYamlTestSuiteIT.java +++ b/rest-api-spec/src/yamlRestTest/java/org/elasticsearch/test/rest/ClientYamlTestSuiteIT.java @@ -36,6 +36,7 @@ public class ClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { .module("health-shards-availability") .module("data-streams") .feature(FeatureFlag.TIME_SERIES_MODE) + .feature(FeatureFlag.SUB_OBJECTS_AUTO_ENABLED) .build(); public ClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java index 5e63fee8c5adc..70c4a3ac213a2 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.Explicit; import org.elasticsearch.common.logging.DeprecationCategory; import org.elasticsearch.common.logging.DeprecationLogger; +import org.elasticsearch.common.util.FeatureFlag; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.core.Nullable; import org.elasticsearch.features.NodeFeature; @@ -41,6 +42,7 @@ public class ObjectMapper extends Mapper { private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(ObjectMapper.class); + public static final FeatureFlag SUB_OBJECTS_AUTO_FEATURE_FLAG = new FeatureFlag("sub_objects_auto"); public static final String CONTENT_TYPE = "object"; static final String STORE_ARRAY_SOURCE_PARAM = "store_array_source"; @@ -74,7 +76,7 @@ public static Subobjects from(Object node) { if (value.equalsIgnoreCase("false")) { return DISABLED; } - if (value.equalsIgnoreCase("auto")) { + if (SUB_OBJECTS_AUTO_FEATURE_FLAG.isEnabled() && value.equalsIgnoreCase("auto")) { return AUTO; } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java index 7f430cf676809..7e9a196faaa26 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java @@ -1377,6 +1377,7 @@ public void testSubobjectsFalseWithInnerNestedFromDynamicTemplate() { } public void testSubobjectsAutoFlatPaths() throws IOException { + assumeTrue("only test when feature flag for subobjects auto is enabled", ObjectMapper.SUB_OBJECTS_AUTO_FEATURE_FLAG.isEnabled()); MapperService mapperService = createDynamicTemplateAutoSubobjects(); ParsedDocument doc = mapperService.documentMapper().parse(source(b -> { b.field("foo.metric.count", 10); @@ -1389,6 +1390,7 @@ public void testSubobjectsAutoFlatPaths() throws IOException { } public void testSubobjectsAutoStructuredPaths() throws IOException { + assumeTrue("only test when feature flag for subobjects auto is enabled", ObjectMapper.SUB_OBJECTS_AUTO_FEATURE_FLAG.isEnabled()); MapperService mapperService = createDynamicTemplateAutoSubobjects(); ParsedDocument doc = mapperService.documentMapper().parse(source(b -> { b.startObject("foo"); @@ -1411,6 +1413,7 @@ public void testSubobjectsAutoStructuredPaths() throws IOException { } public void testSubobjectsAutoArrayOfObjects() throws IOException { + assumeTrue("only test when feature flag for subobjects auto is enabled", ObjectMapper.SUB_OBJECTS_AUTO_FEATURE_FLAG.isEnabled()); MapperService mapperService = createDynamicTemplateAutoSubobjects(); ParsedDocument doc = mapperService.documentMapper().parse(source(b -> { b.startObject("foo"); @@ -1444,6 +1447,7 @@ public void testSubobjectsAutoArrayOfObjects() throws IOException { } public void testSubobjectAutoDynamicNested() throws IOException { + assumeTrue("only test when feature flag for subobjects auto is enabled", ObjectMapper.SUB_OBJECTS_AUTO_FEATURE_FLAG.isEnabled()); DocumentMapper mapper = createDocumentMapper(topMapping(b -> { b.startArray("dynamic_templates"); { @@ -1482,6 +1486,7 @@ public void testSubobjectAutoDynamicNested() throws IOException { } public void testRootSubobjectAutoDynamicNested() throws IOException { + assumeTrue("only test when feature flag for subobjects auto is enabled", ObjectMapper.SUB_OBJECTS_AUTO_FEATURE_FLAG.isEnabled()); DocumentMapper mapper = createDocumentMapper(topMapping(b -> { b.startArray("dynamic_templates"); { @@ -1515,6 +1520,7 @@ public void testRootSubobjectAutoDynamicNested() throws IOException { } public void testDynamicSubobjectsAutoDynamicFalse() throws Exception { + assumeTrue("only test when feature flag for subobjects auto is enabled", ObjectMapper.SUB_OBJECTS_AUTO_FEATURE_FLAG.isEnabled()); // verify that we read the dynamic value properly from the parent mapper. DocumentParser#dynamicOrDefault splits the field // name where dots are found, but it does that only for the parent prefix e.g. metrics.service and not for the leaf suffix time.max DocumentMapper mapper = createDocumentMapper(topMapping(b -> { @@ -1578,6 +1584,7 @@ public void testDynamicSubobjectsAutoDynamicFalse() throws Exception { } public void testSubobjectsAutoWithInnerNestedFromDynamicTemplate() throws IOException { + assumeTrue("only test when feature flag for subobjects auto is enabled", ObjectMapper.SUB_OBJECTS_AUTO_FEATURE_FLAG.isEnabled()); DocumentMapper mapper = createDocumentMapper(topMapping(b -> { b.startArray("dynamic_templates"); { @@ -2045,6 +2052,7 @@ public void testSubobjectsFalseFlattened() throws IOException { } public void testSubobjectsAutoFlattened() throws IOException { + assumeTrue("only test when feature flag for subobjects auto is enabled", ObjectMapper.SUB_OBJECTS_AUTO_FEATURE_FLAG.isEnabled()); String mapping = """ { "_doc": { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java index 64eee39532c31..3b77015fde415 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java @@ -169,27 +169,29 @@ public void testMergeEnabledForIndexTemplates() throws IOException { assertEquals(ObjectMapper.Subobjects.ENABLED, objectMapper.subobjects()); assertTrue(objectMapper.sourceKeepMode().isEmpty()); - // Setting 'enabled' to true is allowed, and updates the mapping. - update = Strings.toString( - XContentFactory.jsonBuilder() - .startObject() - .startObject("properties") - .startObject("object") - .field("type", "object") - .field("enabled", true) - .field("subobjects", "auto") - .field(ObjectMapper.STORE_ARRAY_SOURCE_PARAM, true) - .endObject() - .endObject() - .endObject() - ); - mapper = mapperService.merge("type", new CompressedXContent(update), MergeReason.INDEX_TEMPLATE); - - objectMapper = mapper.mappers().objectMappers().get("object"); - assertNotNull(objectMapper); - assertTrue(objectMapper.isEnabled()); - assertEquals(ObjectMapper.Subobjects.AUTO, objectMapper.subobjects()); - assertEquals(Mapper.SourceKeepMode.ARRAYS, objectMapper.sourceKeepMode().orElse(Mapper.SourceKeepMode.NONE)); + if (ObjectMapper.SUB_OBJECTS_AUTO_FEATURE_FLAG.isEnabled()) { + // Setting 'enabled' to true is allowed, and updates the mapping. + update = Strings.toString( + XContentFactory.jsonBuilder() + .startObject() + .startObject("properties") + .startObject("object") + .field("type", "object") + .field("enabled", true) + .field("subobjects", "auto") + .field(ObjectMapper.STORE_ARRAY_SOURCE_PARAM, true) + .endObject() + .endObject() + .endObject() + ); + mapper = mapperService.merge("type", new CompressedXContent(update), MergeReason.INDEX_TEMPLATE); + + objectMapper = mapper.mappers().objectMappers().get("object"); + assertNotNull(objectMapper); + assertTrue(objectMapper.isEnabled()); + assertEquals(ObjectMapper.Subobjects.AUTO, objectMapper.subobjects()); + assertEquals(Mapper.SourceKeepMode.ARRAYS, objectMapper.sourceKeepMode().orElse(Mapper.SourceKeepMode.NONE)); + } } public void testFieldReplacementForIndexTemplates() throws IOException { @@ -503,6 +505,7 @@ public void testSubobjectsCannotBeUpdatedOnRoot() throws IOException { } public void testSubobjectsAuto() throws Exception { + assumeTrue("only test when feature flag for subobjects auto is enabled", ObjectMapper.SUB_OBJECTS_AUTO_FEATURE_FLAG.isEnabled()); MapperService mapperService = createMapperService(mapping(b -> { b.startObject("metrics.service"); { @@ -532,6 +535,7 @@ public void testSubobjectsAuto() throws Exception { } public void testSubobjectsAutoWithInnerObject() throws IOException { + assumeTrue("only test when feature flag for subobjects auto is enabled", ObjectMapper.SUB_OBJECTS_AUTO_FEATURE_FLAG.isEnabled()); MapperService mapperService = createMapperService(mapping(b -> { b.startObject("metrics.service"); { @@ -565,6 +569,7 @@ public void testSubobjectsAutoWithInnerObject() throws IOException { } public void testSubobjectsAutoWithInnerNested() throws IOException { + assumeTrue("only test when feature flag for subobjects auto is enabled", ObjectMapper.SUB_OBJECTS_AUTO_FEATURE_FLAG.isEnabled()); MapperService mapperService = createMapperService(mapping(b -> { b.startObject("metrics.service"); { @@ -586,6 +591,7 @@ public void testSubobjectsAutoWithInnerNested() throws IOException { } public void testSubobjectsAutoRoot() throws Exception { + assumeTrue("only test when feature flag for subobjects auto is enabled", ObjectMapper.SUB_OBJECTS_AUTO_FEATURE_FLAG.isEnabled()); MapperService mapperService = createMapperService(mappingWithSubobjects(b -> { b.startObject("metrics.service.time"); b.field("type", "long"); @@ -606,6 +612,7 @@ public void testSubobjectsAutoRoot() throws Exception { } public void testSubobjectsAutoRootWithInnerObject() throws IOException { + assumeTrue("only test when feature flag for subobjects auto is enabled", ObjectMapper.SUB_OBJECTS_AUTO_FEATURE_FLAG.isEnabled()); MapperService mapperService = createMapperService(mappingWithSubobjects(b -> { b.startObject("metrics.service.time"); { @@ -626,6 +633,7 @@ public void testSubobjectsAutoRootWithInnerObject() throws IOException { } public void testSubobjectsAutoRootWithInnerNested() throws IOException { + assumeTrue("only test when feature flag for subobjects auto is enabled", ObjectMapper.SUB_OBJECTS_AUTO_FEATURE_FLAG.isEnabled()); MapperService mapperService = createMapperService(mappingWithSubobjects(b -> { b.startObject("metrics.service"); b.field("type", "nested"); diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java index aa72d3248812e..ca2300611b4fd 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java @@ -18,6 +18,7 @@ public enum FeatureFlag { TIME_SERIES_MODE("es.index_mode_feature_flag_registered=true", Version.fromString("8.0.0"), null), FAILURE_STORE_ENABLED("es.failure_store_feature_flag_enabled=true", Version.fromString("8.12.0"), null), + SUB_OBJECTS_AUTO_ENABLED("es.sub_objects_auto_feature_flag_enabled=true", Version.fromString("8.16.0"), null), CHUNKING_SETTINGS_ENABLED("es.inference_chunking_settings_feature_flag_enabled=true", Version.fromString("8.16.0"), null), INFERENCE_DEFAULT_ELSER("es.inference_default_elser_feature_flag_enabled=true", Version.fromString("8.16.0"), null); diff --git a/x-pack/plugin/src/yamlRestTest/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java b/x-pack/plugin/src/yamlRestTest/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java index 556a417fb5e79..988ee93bda6b4 100644 --- a/x-pack/plugin/src/yamlRestTest/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java +++ b/x-pack/plugin/src/yamlRestTest/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java @@ -43,6 +43,7 @@ public class XPackRestIT extends AbstractXPackRestTest { .setting("xpack.searchable.snapshot.shared_cache.region_size", "256KB") .user("x_pack_rest_user", "x-pack-test-password") .feature(FeatureFlag.TIME_SERIES_MODE) + .feature(FeatureFlag.SUB_OBJECTS_AUTO_ENABLED) .configFile("testnode.pem", Resource.fromClasspath("org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem")) .configFile("testnode.crt", Resource.fromClasspath("org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")) .configFile("service_tokens", Resource.fromClasspath("service_tokens")) diff --git a/x-pack/qa/core-rest-tests-with-security/src/yamlRestTest/java/org/elasticsearch/xpack/security/CoreWithSecurityClientYamlTestSuiteIT.java b/x-pack/qa/core-rest-tests-with-security/src/yamlRestTest/java/org/elasticsearch/xpack/security/CoreWithSecurityClientYamlTestSuiteIT.java index fe62d4e2d2639..0b40828b8e86c 100644 --- a/x-pack/qa/core-rest-tests-with-security/src/yamlRestTest/java/org/elasticsearch/xpack/security/CoreWithSecurityClientYamlTestSuiteIT.java +++ b/x-pack/qa/core-rest-tests-with-security/src/yamlRestTest/java/org/elasticsearch/xpack/security/CoreWithSecurityClientYamlTestSuiteIT.java @@ -48,6 +48,7 @@ public class CoreWithSecurityClientYamlTestSuiteIT extends ESClientYamlSuiteTest .setting("xpack.security.autoconfiguration.enabled", "false") .user(USER, PASS) .feature(FeatureFlag.TIME_SERIES_MODE) + .feature(FeatureFlag.SUB_OBJECTS_AUTO_ENABLED) .build(); public CoreWithSecurityClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { diff --git a/x-pack/qa/runtime-fields/build.gradle b/x-pack/qa/runtime-fields/build.gradle index 43d6d9463e0d1..986baf867b501 100644 --- a/x-pack/qa/runtime-fields/build.gradle +++ b/x-pack/qa/runtime-fields/build.gradle @@ -44,6 +44,7 @@ subprojects { setting 'xpack.security.enabled', 'false' requiresFeature 'es.index_mode_feature_flag_registered', Version.fromString("8.0.0") + requiresFeature 'es.sub_objects_auto_feature_flag_enabled', Version.fromString("8.16.0") } tasks.named("yamlRestTest").configure { From bc0d1d7f3c12f8b3d26cb264ad1d7d6266b43815 Mon Sep 17 00:00:00 2001 From: Nick Tindall Date: Mon, 14 Oct 2024 09:45:46 +1100 Subject: [PATCH 029/449] Avoid throw exception in SyntheticSourceIndexSettingsProvider (#114479) Co-authored-by: Nhat Nguyen --- docs/reference/index-modules.asciidoc | 2 +- muted-tests.yml | 6 ------ .../template/SimpleIndexTemplateIT.java | 4 ++-- .../cluster/metadata/IndexMetadata.java | 2 +- .../SyntheticSourceIndexSettingsProvider.java | 18 ++++++++++-------- 5 files changed, 14 insertions(+), 18 deletions(-) diff --git a/docs/reference/index-modules.asciidoc b/docs/reference/index-modules.asciidoc index ed8cf6c1494e4..1c8f1db216b75 100644 --- a/docs/reference/index-modules.asciidoc +++ b/docs/reference/index-modules.asciidoc @@ -122,7 +122,7 @@ preview:[] The number of shards a custom <> value can go to. Defaults to 1 and can only be set at index creation time. This value must be less - than the `index.number_of_shards` unless the `index.number_of_shards` value is also 1. + than the `index.number_of_routing_shards` unless the `index.number_of_routing_shards` value is also 1. See <> for more details about how this setting is used. [[ccr-index-soft-deletes]] diff --git a/muted-tests.yml b/muted-tests.yml index 31f99b0ae632a..d0fc50de31bd1 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -283,12 +283,6 @@ tests: - class: org.elasticsearch.ingest.geoip.DatabaseNodeServiceIT method: testGzippedDatabase issue: https://github.com/elastic/elasticsearch/issues/113752 -- class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT - method: test {p0=indices.split/40_routing_partition_size/more than 1} - issue: https://github.com/elastic/elasticsearch/issues/113841 -- class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT - method: test {p0=indices.split/40_routing_partition_size/nested} - issue: https://github.com/elastic/elasticsearch/issues/113842 - class: org.elasticsearch.threadpool.SimpleThreadPoolIT method: testThreadPoolMetrics issue: https://github.com/elastic/elasticsearch/issues/108320 diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java index 3ca3f20917009..0647a24aa39c8 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java @@ -881,7 +881,7 @@ public void testPartitionedTemplate() throws Exception { ); assertThat( eBadSettings.getMessage(), - containsString("partition size [6] should be a positive number less than the number of shards [5]") + containsString("partition size [6] should be a positive number less than the number of routing shards [5]") ); // provide an invalid mapping for a partitioned index @@ -913,7 +913,7 @@ public void testPartitionedTemplate() throws Exception { assertThat( eBadIndex.getMessage(), - containsString("partition size [6] should be a positive number less than the number of shards [5]") + containsString("partition size [6] should be a positive number less than the number of routing shards [5]") ); // finally, create a valid index diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java index 9760d84c67c5b..23e8e49aa16db 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java @@ -2265,7 +2265,7 @@ IndexMetadata build(boolean repair) { "routing partition size [" + routingPartitionSize + "] should be a positive number" - + " less than the number of shards [" + + " less than the number of routing shards [" + getRoutingNumShards() + "] for [" + index diff --git a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java index 759fa6af98868..6e139cc3ce9e6 100644 --- a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java +++ b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java @@ -79,15 +79,17 @@ boolean newIndexHasSyntheticSourceUsage( return false; } - var tmpIndexMetadata = buildIndexMetadataForMapperService(indexName, isTimeSeries, indexTemplateAndCreateRequestSettings); - try (var mapperService = mapperServiceFactory.apply(tmpIndexMetadata)) { - // combinedTemplateMappings can be null when creating system indices - // combinedTemplateMappings can be empty when creating a normal index that doesn't match any template and without mapping. - if (combinedTemplateMappings == null || combinedTemplateMappings.isEmpty()) { - combinedTemplateMappings = List.of(new CompressedXContent("{}")); + try { + var tmpIndexMetadata = buildIndexMetadataForMapperService(indexName, isTimeSeries, indexTemplateAndCreateRequestSettings); + try (var mapperService = mapperServiceFactory.apply(tmpIndexMetadata)) { + // combinedTemplateMappings can be null when creating system indices + // combinedTemplateMappings can be empty when creating a normal index that doesn't match any template and without mapping. + if (combinedTemplateMappings == null || combinedTemplateMappings.isEmpty()) { + combinedTemplateMappings = List.of(new CompressedXContent("{}")); + } + mapperService.merge(MapperService.SINGLE_MAPPING_NAME, combinedTemplateMappings, MapperService.MergeReason.INDEX_TEMPLATE); + return mapperService.documentMapper().sourceMapper().isSynthetic(); } - mapperService.merge(MapperService.SINGLE_MAPPING_NAME, combinedTemplateMappings, MapperService.MergeReason.INDEX_TEMPLATE); - return mapperService.documentMapper().sourceMapper().isSynthetic(); } catch (AssertionError | Exception e) { // In case invalid mappings or setting are provided, then mapper service creation can fail. // In that case it is ok to return false here. The index creation will fail anyway later, so need to fallback to stored source. From 5f3595bba9bc8485be58fbc0f39e440cffd87bf5 Mon Sep 17 00:00:00 2001 From: Yang Wang Date: Mon, 14 Oct 2024 15:18:25 +1100 Subject: [PATCH 030/449] Add a callback for onConnectionClosed to MockTransportService (#114564) The callback is added to allow inserting additional behaviour such as delay when handling closed connection. --- .../test/transport/MockTransportService.java | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java index 57a6d1e09c52d..c4e1c6c7a0681 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java @@ -80,6 +80,8 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.LinkedBlockingDeque; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Supplier; @@ -104,6 +106,7 @@ public class MockTransportService extends TransportService { private final Map> openConnections = new HashMap<>(); private final List onStopListeners = new CopyOnWriteArrayList<>(); + private final AtomicReference> onConnectionClosedCallback = new AtomicReference<>(); public static class TestPlugin extends Plugin { @Override @@ -788,6 +791,19 @@ public void openConnection(DiscoveryNode node, ConnectionProfile connectionProfi })); } + public void setOnConnectionClosedCallback(Consumer callback) { + onConnectionClosedCallback.set(callback); + } + + @Override + public void onConnectionClosed(Transport.Connection connection) { + final Consumer callback = onConnectionClosedCallback.get(); + if (callback != null) { + callback.accept(connection); + } + super.onConnectionClosed(connection); + } + public void addOnStopListener(Runnable listener) { onStopListeners.add(listener); } From a262eb6dbd6e5e73a911c07b1bcfdb302858c03f Mon Sep 17 00:00:00 2001 From: Carlos Delgado <6339205+carlosdelest@users.noreply.github.com> Date: Mon, 14 Oct 2024 07:31:55 +0200 Subject: [PATCH 031/449] Add ESQL match function (#113374) --- docs/changelog/113374.yaml | 5 + .../esql/functions/description/match.asciidoc | 5 + .../esql/functions/examples/match.asciidoc | 13 + .../functions/kibana/definition/match.json | 85 + .../esql/functions/kibana/docs/match.md | 14 + .../esql/functions/layout/match.asciidoc | 17 + .../esql/functions/parameters/match.asciidoc | 9 + .../esql/functions/signature/match.svg | 1 + .../esql/functions/types/match.asciidoc | 12 + .../esql/core/expression/TypeResolutions.java | 7 +- .../main/resources/match-function.csv-spec | 199 ++ .../src/main/resources/qstr-function.csv-spec | 73 +- ...ringFunctionIT.java => QueryStringIT.java} | 2 +- .../esql/src/main/antlr/EsqlBaseLexer.g4 | 4 +- .../esql/src/main/antlr/EsqlBaseLexer.tokens | 162 +- .../esql/src/main/antlr/EsqlBaseParser.g4 | 8 +- .../esql/src/main/antlr/EsqlBaseParser.tokens | 162 +- .../xpack/esql/action/EsqlCapabilities.java | 5 + .../xpack/esql/analysis/Verifier.java | 119 +- .../function/EsqlFunctionRegistry.java | 6 +- .../function/fulltext/FullTextFunction.java | 77 +- .../expression/function/fulltext/Match.java | 116 + ...ryStringFunction.java => QueryString.java} | 48 +- .../physical/local/PushFiltersToSource.java | 7 +- .../xpack/esql/parser/EsqlBaseLexer.interp | 9 +- .../xpack/esql/parser/EsqlBaseLexer.java | 2033 ++++++++--------- .../xpack/esql/parser/EsqlBaseParser.interp | 7 +- .../xpack/esql/parser/EsqlBaseParser.java | 1858 ++++++++------- .../parser/EsqlBaseParserBaseListener.java | 12 + .../parser/EsqlBaseParserBaseVisitor.java | 7 + .../esql/parser/EsqlBaseParserListener.java | 10 + .../esql/parser/EsqlBaseParserVisitor.java | 6 + .../xpack/esql/parser/EsqlParser.java | 6 +- .../xpack/esql/parser/ExpressionBuilder.java | 25 +- .../planner/EsqlExpressionTranslators.java | 30 +- .../elasticsearch/xpack/esql/CsvTests.java | 4 + .../xpack/esql/analysis/VerifierTests.java | 167 +- .../function/fulltext/MatchTests.java | 107 + ...nctionTests.java => QueryStringTests.java} | 6 +- .../LocalPhysicalPlanOptimizerTests.java | 228 +- .../optimizer/LogicalPlanOptimizerTests.java | 37 + 41 files changed, 3435 insertions(+), 2273 deletions(-) create mode 100644 docs/changelog/113374.yaml create mode 100644 docs/reference/esql/functions/description/match.asciidoc create mode 100644 docs/reference/esql/functions/examples/match.asciidoc create mode 100644 docs/reference/esql/functions/kibana/definition/match.json create mode 100644 docs/reference/esql/functions/kibana/docs/match.md create mode 100644 docs/reference/esql/functions/layout/match.asciidoc create mode 100644 docs/reference/esql/functions/parameters/match.asciidoc create mode 100644 docs/reference/esql/functions/signature/match.svg create mode 100644 docs/reference/esql/functions/types/match.asciidoc create mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/match-function.csv-spec rename x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/{QueryStringFunctionIT.java => QueryStringIT.java} (98%) create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Match.java rename x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/{QueryStringFunction.java => QueryString.java} (66%) create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchTests.java rename x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/{QueryStringFunctionTests.java => QueryStringTests.java} (92%) diff --git a/docs/changelog/113374.yaml b/docs/changelog/113374.yaml new file mode 100644 index 0000000000000..f1d5750de0f60 --- /dev/null +++ b/docs/changelog/113374.yaml @@ -0,0 +1,5 @@ +pr: 113374 +summary: Add ESQL match function +area: ES|QL +type: feature +issues: [] diff --git a/docs/reference/esql/functions/description/match.asciidoc b/docs/reference/esql/functions/description/match.asciidoc new file mode 100644 index 0000000000000..2a27fe4814395 --- /dev/null +++ b/docs/reference/esql/functions/description/match.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Performs a match query on the specified field. Returns true if the provided query matches the row. diff --git a/docs/reference/esql/functions/examples/match.asciidoc b/docs/reference/esql/functions/examples/match.asciidoc new file mode 100644 index 0000000000000..3f31d68ea9abb --- /dev/null +++ b/docs/reference/esql/functions/examples/match.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/match-function.csv-spec[tag=match-with-field] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/match-function.csv-spec[tag=match-with-field-result] +|=== + diff --git a/docs/reference/esql/functions/kibana/definition/match.json b/docs/reference/esql/functions/kibana/definition/match.json new file mode 100644 index 0000000000000..d2fe0bba53866 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/match.json @@ -0,0 +1,85 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "match", + "description" : "Performs a match query on the specified field. Returns true if the provided query matches the row.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "Field that the query will target." + }, + { + "name" : "query", + "type" : "keyword", + "optional" : false, + "description" : "Text you wish to find in the provided field." + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "Field that the query will target." + }, + { + "name" : "query", + "type" : "text", + "optional" : false, + "description" : "Text you wish to find in the provided field." + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "Field that the query will target." + }, + { + "name" : "query", + "type" : "keyword", + "optional" : false, + "description" : "Text you wish to find in the provided field." + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "Field that the query will target." + }, + { + "name" : "query", + "type" : "text", + "optional" : false, + "description" : "Text you wish to find in the provided field." + } + ], + "variadic" : false, + "returnType" : "boolean" + } + ], + "examples" : [ + "from books \n| where match(author, \"Faulkner\")\n| keep book_no, author \n| sort book_no \n| limit 5;" + ], + "preview" : true, + "snapshot_only" : true +} diff --git a/docs/reference/esql/functions/kibana/docs/match.md b/docs/reference/esql/functions/kibana/docs/match.md new file mode 100644 index 0000000000000..3c06662982bbf --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/match.md @@ -0,0 +1,14 @@ + + +### MATCH +Performs a match query on the specified field. Returns true if the provided query matches the row. + +``` +from books +| where match(author, "Faulkner") +| keep book_no, author +| sort book_no +| limit 5; +``` diff --git a/docs/reference/esql/functions/layout/match.asciidoc b/docs/reference/esql/functions/layout/match.asciidoc new file mode 100644 index 0000000000000..e62c81548c2b1 --- /dev/null +++ b/docs/reference/esql/functions/layout/match.asciidoc @@ -0,0 +1,17 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-match]] +=== `MATCH` + +preview::["Do not use on production environments. This functionality is in technical preview and may be changed or removed in a future release. Elastic will work to fix any issues, but features in technical preview are not subject to the support SLA of official GA features."] + +*Syntax* + +[.text-center] +image::esql/functions/signature/match.svg[Embedded,opts=inline] + +include::../parameters/match.asciidoc[] +include::../description/match.asciidoc[] +include::../types/match.asciidoc[] +include::../examples/match.asciidoc[] diff --git a/docs/reference/esql/functions/parameters/match.asciidoc b/docs/reference/esql/functions/parameters/match.asciidoc new file mode 100644 index 0000000000000..f18adb28cd20c --- /dev/null +++ b/docs/reference/esql/functions/parameters/match.asciidoc @@ -0,0 +1,9 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Parameters* + +`field`:: +Field that the query will target. + +`query`:: +Text you wish to find in the provided field. diff --git a/docs/reference/esql/functions/signature/match.svg b/docs/reference/esql/functions/signature/match.svg new file mode 100644 index 0000000000000..e7bb001247a9d --- /dev/null +++ b/docs/reference/esql/functions/signature/match.svg @@ -0,0 +1 @@ +MATCH(field,query) diff --git a/docs/reference/esql/functions/types/match.asciidoc b/docs/reference/esql/functions/types/match.asciidoc new file mode 100644 index 0000000000000..7523b29c62b1d --- /dev/null +++ b/docs/reference/esql/functions/types/match.asciidoc @@ -0,0 +1,12 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +field | query | result +keyword | keyword | boolean +keyword | text | boolean +text | keyword | boolean +text | text | boolean +|=== diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/TypeResolutions.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/TypeResolutions.java index ab05a71b0e1c6..b817ec17c7bda 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/TypeResolutions.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/TypeResolutions.java @@ -155,18 +155,19 @@ public static TypeResolution isNotNullAndFoldable(Expression e, String operation return resolution; } - public static TypeResolution isNotFoldable(Expression e, String operationName, ParamOrdinal paramOrd) { - if (e.foldable()) { + public static TypeResolution isNotNull(Expression e, String operationName, ParamOrdinal paramOrd) { + if (e.dataType() == DataType.NULL) { return new TypeResolution( format( null, - "{}argument of [{}] must be a table column, found constant [{}]", + "{}argument of [{}] cannot be null, received [{}]", paramOrd == null || paramOrd == DEFAULT ? "" : paramOrd.name().toLowerCase(Locale.ROOT) + " ", operationName, Expressions.name(e) ) ); } + return TypeResolution.TYPE_RESOLVED; } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/match-function.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/match-function.csv-spec new file mode 100644 index 0000000000000..b0578aa1a4ed0 --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/match-function.csv-spec @@ -0,0 +1,199 @@ +############################################### +# Tests for Match function +# + +matchWithField +required_capability: match_function + +// tag::match-with-field[] +from books +| where match(author, "Faulkner") +| keep book_no, author +| sort book_no +| limit 5; +// end::match-with-field[] + +// tag::match-with-field-result[] +book_no:keyword | author:text +2378 | [Carol Faulkner, Holly Byers Ochoa, Lucretia Mott] +2713 | William Faulkner +2847 | Colleen Faulkner +2883 | William Faulkner +3293 | Danny Faulkner +; +// end::match-with-field-result[] + +matchWithMultipleFunctions +required_capability: match_function + +from books +| where match(title, "Return") AND match(author, "Tolkien") +| keep book_no, title; +ignoreOrder:true + +book_no:keyword | title:text +2714 | Return of the King Being the Third Part of The Lord of the Rings +7350 | Return of the Shadow +; + +matchWithQueryExpressions +required_capability: match_function + +from books +| where match(title, CONCAT("Return ", " King")) +| keep book_no, title; +ignoreOrder:true + +book_no:keyword | title:text +2714 | Return of the King Being the Third Part of The Lord of the Rings +7350 | Return of the Shadow +; + +matchAfterKeep +required_capability: match_function + +from books +| keep book_no, author +| where match(author, "Faulkner") +| sort book_no +| limit 5; + +book_no:keyword | author:text +2378 | [Carol Faulkner, Holly Byers Ochoa, Lucretia Mott] +2713 | William Faulkner +2847 | Colleen Faulkner +2883 | William Faulkner +3293 | Danny Faulkner +; + +matchAfterDrop +required_capability: match_function + +from books +| drop ratings, description, year, publisher, title, author.keyword +| where match(author, "Faulkner") +| keep book_no, author +| sort book_no +| limit 5; + +book_no:keyword | author:text +2378 | [Carol Faulkner, Holly Byers Ochoa, Lucretia Mott] +2713 | William Faulkner +2847 | Colleen Faulkner +2883 | William Faulkner +3293 | Danny Faulkner +; + +matchAfterEval +required_capability: match_function + +from books +| eval stars = to_long(ratings / 2.0) +| where match(author, "Faulkner") +| sort book_no +| keep book_no, author, stars +| limit 5; + +book_no:keyword | author:text | stars:long +2378 | [Carol Faulkner, Holly Byers Ochoa, Lucretia Mott] | 3 +2713 | William Faulkner | 2 +2847 | Colleen Faulkner | 3 +2883 | William Faulkner | 2 +3293 | Danny Faulkner | 2 +; + +matchWithConjunction +required_capability: match_function + +from books +| where match(title, "Rings") and ratings > 4.6 +| keep book_no, title; +ignoreOrder:true + +book_no:keyword | title:text +4023 |A Tolkien Compass: Including J. R. R. Tolkien's Guide to the Names in The Lord of the Rings +7140 |The Lord of the Rings Poster Collection: Six Paintings by Alan Lee (No. 1) +; + +matchWithFunctionPushedToLucene +required_capability: match_function + +from hosts +| where match(host, "beta") and cidr_match(ip1, "127.0.0.2/32", "127.0.0.3/32") +| keep card, host, ip0, ip1; +ignoreOrder:true + +card:keyword |host:keyword |ip0:ip |ip1:ip +eth1 |beta |127.0.0.1 |127.0.0.2 +; + +matchWithNonPushableConjunction +required_capability: match_function + +from books +| where match(title, "Rings") and length(title) > 75 +| keep book_no, title; +ignoreOrder:true + +book_no:keyword | title:text +4023 | A Tolkien Compass: Including J. R. R. Tolkien's Guide to the Names in The Lord of the Rings +; + +matchWithMultipleWhereClauses +required_capability: match_function + +from books +| where match(title, "rings") +| where match(title, "lord") +| keep book_no, title; +ignoreOrder:true + +book_no:keyword | title:text +2675 | The Lord of the Rings - Boxed Set +2714 | Return of the King Being the Third Part of The Lord of the Rings +4023 | A Tolkien Compass: Including J. R. R. Tolkien's Guide to the Names in The Lord of the Rings +7140 | The Lord of the Rings Poster Collection: Six Paintings by Alan Lee (No. 1) +; + +matchMultivaluedField +required_capability: match_function + +from employees +| where match(job_positions, "Tech Lead") and match(job_positions, "Reporting Analyst") +| keep emp_no, first_name, last_name; +ignoreOrder:true + +emp_no:integer | first_name:keyword | last_name:keyword +10004 | Chirstian | Koblick +10010 | Duangkaew | Piveteau +10011 | Mary | Sluis +10088 | Jungsoon | Syrzycki +10093 | Sailaja | Desikan +10097 | Remzi | Waschkowski +; + +testMultiValuedFieldWithConjunction +required_capability: match_function + +from employees +| where match(job_positions, "Data Scientist") and match(job_positions, "Support Engineer") +| keep emp_no, first_name, last_name; +ignoreOrder:true + +emp_no:integer | first_name:keyword | last_name:keyword +10043 | Yishay | Tzvieli +; + +testMatchAndQueryStringFunctions +required_capability: match_function +required_capability: qstr_function + +from employees +| where match(job_positions, "Data Scientist") and qstr("job_positions: (Support Engineer) and gender: F") +| keep emp_no, first_name, last_name; +ignoreOrder:true + +emp_no:integer | first_name:keyword | last_name:keyword +10041 | Uri | Lenart +10043 | Yishay | Tzvieli +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/qstr-function.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/qstr-function.csv-spec index 2f6313925032e..6dc03d0debcfa 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/qstr-function.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/qstr-function.csv-spec @@ -49,20 +49,6 @@ book_no:keyword | title:text 7350 | Return of the Shadow ; -qstrWithDisjunction -required_capability: qstr_function - -from books -| where qstr("title:Return") or year > 2020 -| keep book_no, title; -ignoreOrder:true - -book_no:keyword | title:text -2714 | Return of the King Being the Third Part of The Lord of the Rings -6818 | Hadji Murad -7350 | Return of the Shadow -; - qstrWithConjunction required_capability: qstr_function @@ -88,17 +74,16 @@ card:keyword |host:keyword |ip0:ip |ip1:ip eth1 |beta |127.0.0.1 |127.0.0.2 ; -qstrWithFunctionNotPushedToLucene +qstrWithNonPushableConjunction required_capability: qstr_function from books -| where qstr("title: rings") and length(description) > 600 +| where qstr("title: Rings") and length(title) > 75 | keep book_no, title; ignoreOrder:true book_no:keyword | title:text -2675 | The Lord of the Rings - Boxed Set -2714 | Return of the King Being the Third Part of The Lord of the Rings +4023 |A Tolkien Compass: Including J. R. R. Tolkien's Guide to the Names in The Lord of the Rings ; qstrWithMultipleWhereClauses @@ -114,3 +99,55 @@ book_no:keyword | title:text 4023 | A Tolkien Compass: Including J. R. R. Tolkien's Guide to the Names in The Lord of the Rings 7140 | The Lord of the Rings Poster Collection: Six Paintings by Alan Lee (No. 1) ; + + +matchMultivaluedTextField +required_capability: match_function + +from employees +| where qstr("job_positions: (Tech Lead) AND job_positions:(Reporting Analyst)") +| keep emp_no, first_name, last_name; +ignoreOrder:true + +emp_no:integer | first_name:keyword | last_name:keyword +10004 | Chirstian | Koblick +10010 | Duangkaew | Piveteau +10011 | Mary | Sluis +10088 | Jungsoon | Syrzycki +10093 | Sailaja | Desikan +10097 | Remzi | Waschkowski +; + +matchMultivaluedNumericField +required_capability: match_function + +from employees +| where qstr("salary_change: [14 TO *]") +| keep emp_no, first_name, last_name, salary_change; +ignoreOrder:true + +emp_no:integer | first_name:keyword | last_name:keyword | salary_change:double +10003 | Parto | Bamford | [12.82, 14.68] +10015 | Guoxiang | Nooteboom | [12.4, 14.25] +10023 | Bojan | Montemayor | [0.8, 14.63] +10040 | Weiyi | Meriste | [-8.94, 1.92, 6.97, 14.74] +10061 | Tse | Herber | [-2.58, -0.95, 14.39] +10065 | Satosi | Awdeh | [-9.81, -1.47, 14.44] +10099 | Valter | Sullins | [-8.78, -3.98, 10.71, 14.26] +; + +testMultiValuedFieldWithConjunction +required_capability: match_function + +from employees +| where (qstr("job_positions: (Data Scientist) OR job_positions:(Support Engineer)")) and gender == "F" +| keep emp_no, first_name, last_name; +ignoreOrder:true + +emp_no:integer | first_name:keyword | last_name:keyword +10023 | Bojan | Montemayor +10041 | Uri | Lenart +10044 | Mingsen | Casley +10053 | Sanjiv | Zschoche +10069 | Margareta | Bierman +; diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/QueryStringFunctionIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/QueryStringIT.java similarity index 98% rename from x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/QueryStringFunctionIT.java rename to x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/QueryStringIT.java index e6f11ca1f44d2..53b833c7e8a15 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/QueryStringFunctionIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/QueryStringIT.java @@ -29,7 +29,7 @@ import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.Matchers.equalTo; -public class QueryStringFunctionIT extends AbstractEsqlIntegTestCase { +public class QueryStringIT extends AbstractEsqlIntegTestCase { @Before public void setupIndex() { diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 index 0d8d3abf77ecc..ce3947875e6c7 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 @@ -86,7 +86,6 @@ WHERE : 'where' -> pushMode(EXPRESSION_MODE); // MYCOMMAND : 'mycommand' -> ... DEV_INLINESTATS : {this.isDevVersion()}? 'inlinestats' -> pushMode(EXPRESSION_MODE); DEV_LOOKUP : {this.isDevVersion()}? 'lookup' -> pushMode(LOOKUP_MODE); -DEV_MATCH : {this.isDevVersion()}? 'match' -> pushMode(EXPRESSION_MODE); DEV_METRICS : {this.isDevVersion()}? 'metrics' -> pushMode(METRICS_MODE); // @@ -209,8 +208,7 @@ ASTERISK : '*'; SLASH : '/'; PERCENT : '%'; -// move it in the main section if the feature gets promoted -DEV_MATCH_OP : {this.isDevVersion()}? DEV_MATCH -> type(DEV_MATCH); +DEV_MATCH : {this.isDevVersion()}? 'match'; NAMED_OR_POSITIONAL_PARAM : PARAM (LETTER | UNDERSCORE) UNQUOTED_ID_BODY* diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens index 4fd37ab9900f2..2fe262a6983f7 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens @@ -16,51 +16,51 @@ STATS=15 WHERE=16 DEV_INLINESTATS=17 DEV_LOOKUP=18 -DEV_MATCH=19 -DEV_METRICS=20 -UNKNOWN_CMD=21 -LINE_COMMENT=22 -MULTILINE_COMMENT=23 -WS=24 -PIPE=25 -QUOTED_STRING=26 -INTEGER_LITERAL=27 -DECIMAL_LITERAL=28 -BY=29 -AND=30 -ASC=31 -ASSIGN=32 -CAST_OP=33 -COMMA=34 -DESC=35 -DOT=36 -FALSE=37 -FIRST=38 -IN=39 -IS=40 -LAST=41 -LIKE=42 -LP=43 -NOT=44 -NULL=45 -NULLS=46 -OR=47 -PARAM=48 -RLIKE=49 -RP=50 -TRUE=51 -EQ=52 -CIEQ=53 -NEQ=54 -LT=55 -LTE=56 -GT=57 -GTE=58 -PLUS=59 -MINUS=60 -ASTERISK=61 -SLASH=62 -PERCENT=63 +DEV_METRICS=19 +UNKNOWN_CMD=20 +LINE_COMMENT=21 +MULTILINE_COMMENT=22 +WS=23 +PIPE=24 +QUOTED_STRING=25 +INTEGER_LITERAL=26 +DECIMAL_LITERAL=27 +BY=28 +AND=29 +ASC=30 +ASSIGN=31 +CAST_OP=32 +COMMA=33 +DESC=34 +DOT=35 +FALSE=36 +FIRST=37 +IN=38 +IS=39 +LAST=40 +LIKE=41 +LP=42 +NOT=43 +NULL=44 +NULLS=45 +OR=46 +PARAM=47 +RLIKE=48 +RP=49 +TRUE=50 +EQ=51 +CIEQ=52 +NEQ=53 +LT=54 +LTE=55 +GT=56 +GTE=57 +PLUS=58 +MINUS=59 +ASTERISK=60 +SLASH=61 +PERCENT=62 +DEV_MATCH=63 NAMED_OR_POSITIONAL_PARAM=64 OPENING_BRACKET=65 CLOSING_BRACKET=66 @@ -134,42 +134,42 @@ CLOSING_METRICS_WS=120 'sort'=14 'stats'=15 'where'=16 -'|'=25 -'by'=29 -'and'=30 -'asc'=31 -'='=32 -'::'=33 -','=34 -'desc'=35 -'.'=36 -'false'=37 -'first'=38 -'in'=39 -'is'=40 -'last'=41 -'like'=42 -'('=43 -'not'=44 -'null'=45 -'nulls'=46 -'or'=47 -'?'=48 -'rlike'=49 -')'=50 -'true'=51 -'=='=52 -'=~'=53 -'!='=54 -'<'=55 -'<='=56 -'>'=57 -'>='=58 -'+'=59 -'-'=60 -'*'=61 -'/'=62 -'%'=63 +'|'=24 +'by'=28 +'and'=29 +'asc'=30 +'='=31 +'::'=32 +','=33 +'desc'=34 +'.'=35 +'false'=36 +'first'=37 +'in'=38 +'is'=39 +'last'=40 +'like'=41 +'('=42 +'not'=43 +'null'=44 +'nulls'=45 +'or'=46 +'?'=47 +'rlike'=48 +')'=49 +'true'=50 +'=='=51 +'=~'=52 +'!='=53 +'<'=54 +'<='=55 +'>'=56 +'>='=57 +'+'=58 +'-'=59 +'*'=60 +'/'=61 +'%'=62 ']'=66 'metadata'=75 'as'=84 diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 index b720ba98babf0..c053824861a96 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 @@ -101,7 +101,13 @@ primaryExpression ; functionExpression - : identifierOrParameter LP (ASTERISK | (booleanExpression (COMMA booleanExpression)*))? RP + : functionName LP (ASTERISK | (booleanExpression (COMMA booleanExpression)*))? RP + ; + +functionName + // Additional function identifiers that are already a reserved word in the language + : {this.isDevVersion()}? DEV_MATCH + | identifierOrParameter ; dataType diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens index 4fd37ab9900f2..2fe262a6983f7 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens @@ -16,51 +16,51 @@ STATS=15 WHERE=16 DEV_INLINESTATS=17 DEV_LOOKUP=18 -DEV_MATCH=19 -DEV_METRICS=20 -UNKNOWN_CMD=21 -LINE_COMMENT=22 -MULTILINE_COMMENT=23 -WS=24 -PIPE=25 -QUOTED_STRING=26 -INTEGER_LITERAL=27 -DECIMAL_LITERAL=28 -BY=29 -AND=30 -ASC=31 -ASSIGN=32 -CAST_OP=33 -COMMA=34 -DESC=35 -DOT=36 -FALSE=37 -FIRST=38 -IN=39 -IS=40 -LAST=41 -LIKE=42 -LP=43 -NOT=44 -NULL=45 -NULLS=46 -OR=47 -PARAM=48 -RLIKE=49 -RP=50 -TRUE=51 -EQ=52 -CIEQ=53 -NEQ=54 -LT=55 -LTE=56 -GT=57 -GTE=58 -PLUS=59 -MINUS=60 -ASTERISK=61 -SLASH=62 -PERCENT=63 +DEV_METRICS=19 +UNKNOWN_CMD=20 +LINE_COMMENT=21 +MULTILINE_COMMENT=22 +WS=23 +PIPE=24 +QUOTED_STRING=25 +INTEGER_LITERAL=26 +DECIMAL_LITERAL=27 +BY=28 +AND=29 +ASC=30 +ASSIGN=31 +CAST_OP=32 +COMMA=33 +DESC=34 +DOT=35 +FALSE=36 +FIRST=37 +IN=38 +IS=39 +LAST=40 +LIKE=41 +LP=42 +NOT=43 +NULL=44 +NULLS=45 +OR=46 +PARAM=47 +RLIKE=48 +RP=49 +TRUE=50 +EQ=51 +CIEQ=52 +NEQ=53 +LT=54 +LTE=55 +GT=56 +GTE=57 +PLUS=58 +MINUS=59 +ASTERISK=60 +SLASH=61 +PERCENT=62 +DEV_MATCH=63 NAMED_OR_POSITIONAL_PARAM=64 OPENING_BRACKET=65 CLOSING_BRACKET=66 @@ -134,42 +134,42 @@ CLOSING_METRICS_WS=120 'sort'=14 'stats'=15 'where'=16 -'|'=25 -'by'=29 -'and'=30 -'asc'=31 -'='=32 -'::'=33 -','=34 -'desc'=35 -'.'=36 -'false'=37 -'first'=38 -'in'=39 -'is'=40 -'last'=41 -'like'=42 -'('=43 -'not'=44 -'null'=45 -'nulls'=46 -'or'=47 -'?'=48 -'rlike'=49 -')'=50 -'true'=51 -'=='=52 -'=~'=53 -'!='=54 -'<'=55 -'<='=56 -'>'=57 -'>='=58 -'+'=59 -'-'=60 -'*'=61 -'/'=62 -'%'=63 +'|'=24 +'by'=28 +'and'=29 +'asc'=30 +'='=31 +'::'=32 +','=33 +'desc'=34 +'.'=35 +'false'=36 +'first'=37 +'in'=38 +'is'=39 +'last'=40 +'like'=41 +'('=42 +'not'=43 +'null'=44 +'nulls'=45 +'or'=46 +'?'=47 +'rlike'=48 +')'=49 +'true'=50 +'=='=51 +'=~'=52 +'!='=53 +'<'=54 +'<='=55 +'>'=56 +'>='=57 +'+'=58 +'-'=59 +'*'=60 +'/'=61 +'%'=62 ']'=66 'metadata'=75 'as'=84 diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index 18ee6b9417e5c..2e979dcce1758 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -346,6 +346,11 @@ public enum Cap { */ QSTR_FUNCTION(true), + /** + * MATCH function + */ + MATCH_FUNCTION(true), + /** * Don't optimize CASE IS NOT NULL function by not requiring the fields to be not null as well. * https://github.com/elastic/elasticsearch/issues/112704 diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java index 647a29b71c5e1..e45db0c02be7e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java @@ -19,8 +19,12 @@ import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; +import org.elasticsearch.xpack.esql.core.expression.function.Function; import org.elasticsearch.xpack.esql.core.expression.predicate.BinaryOperator; import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.MatchQueryPredicate; +import org.elasticsearch.xpack.esql.core.expression.predicate.logical.BinaryLogic; +import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; +import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparison; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.Holder; @@ -28,6 +32,8 @@ import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction; import org.elasticsearch.xpack.esql.expression.function.aggregate.Rate; import org.elasticsearch.xpack.esql.expression.function.fulltext.FullTextFunction; +import org.elasticsearch.xpack.esql.expression.function.fulltext.Match; +import org.elasticsearch.xpack.esql.expression.function.fulltext.QueryString; import org.elasticsearch.xpack.esql.expression.function.grouping.GroupingFunction; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Neg; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.Equals; @@ -55,6 +61,7 @@ import java.util.List; import java.util.Locale; import java.util.Set; +import java.util.function.BiConsumer; import java.util.function.Consumer; import java.util.stream.Stream; @@ -644,27 +651,105 @@ private static void checkFilterMatchConditions(LogicalPlan plan, Set fa private static void checkFullTextQueryFunctions(LogicalPlan plan, Set failures) { if (plan instanceof Filter f) { Expression condition = f.condition(); - if (condition instanceof FullTextFunction ftf) { - // Similar to cases present in org.elasticsearch.xpack.esql.optimizer.rules.PushDownAndCombineFilters - - // we can't check if it can be pushed down as we don't have yet information about the fields present in the - // StringQueryPredicate - plan.forEachDown(LogicalPlan.class, lp -> { - if ((lp instanceof Filter || lp instanceof OrderBy || lp instanceof EsRelation) == false) { - failures.add( - fail( - plan, - "[{}] function cannot be used after {}", - ftf.functionName(), - lp.sourceText().split(" ")[0].toUpperCase(Locale.ROOT) - ) - ); - } - }); - } + checkCommandsBeforeQueryStringFunction(plan, condition, failures); + checkCommandsBeforeMatchFunction(plan, condition, failures); + checkFullTextFunctionsConditions(condition, failures); + checkFullTextFunctionsParents(condition, failures); } else { plan.forEachExpression(FullTextFunction.class, ftf -> { failures.add(fail(ftf, "[{}] function is only supported in WHERE commands", ftf.functionName())); }); } } + + private static void checkCommandsBeforeQueryStringFunction(LogicalPlan plan, Expression condition, Set failures) { + condition.forEachDown(QueryString.class, qsf -> { + plan.forEachDown(LogicalPlan.class, lp -> { + if ((lp instanceof Filter || lp instanceof OrderBy || lp instanceof EsRelation) == false) { + failures.add( + fail( + plan, + "[{}] function cannot be used after {}", + qsf.functionName(), + lp.sourceText().split(" ")[0].toUpperCase(Locale.ROOT) + ) + ); + } + }); + }); + } + + private static void checkCommandsBeforeMatchFunction(LogicalPlan plan, Expression condition, Set failures) { + condition.forEachDown(Match.class, qsf -> { + plan.forEachDown(LogicalPlan.class, lp -> { + if (lp instanceof Limit) { + failures.add( + fail( + plan, + "[{}] function cannot be used after {}", + qsf.functionName(), + lp.sourceText().split(" ")[0].toUpperCase(Locale.ROOT) + ) + ); + } + }); + }); + } + + private static void checkFullTextFunctionsConditions(Expression condition, Set failures) { + condition.forEachUp(Or.class, or -> { + checkFullTextFunctionInDisjunction(failures, or, or.left()); + checkFullTextFunctionInDisjunction(failures, or, or.right()); + }); + } + + private static void checkFullTextFunctionInDisjunction(Set failures, Or or, Expression left) { + left.forEachDown(FullTextFunction.class, ftf -> { + failures.add( + fail( + or, + "Invalid condition [{}]. Function {} can't be used as part of an or condition", + or.sourceText(), + ftf.functionName() + ) + ); + }); + } + + private static void checkFullTextFunctionsParents(Expression condition, Set failures) { + forEachFullTextFunctionParent(condition, (ftf, parent) -> { + if ((parent instanceof FullTextFunction == false) + && (parent instanceof BinaryLogic == false) + && (parent instanceof Not == false)) { + failures.add( + fail( + condition, + "Invalid condition [{}]. Function {} can't be used with {}", + condition.sourceText(), + ftf.functionName(), + ((Function) parent).functionName() + ) + ); + } + }); + } + + /** + * Executes the action on every parent of a FullTextFunction in the condition if it is found + * + * @param action the action to execute for each parent of a FullTextFunction + */ + private static FullTextFunction forEachFullTextFunctionParent(Expression condition, BiConsumer action) { + if (condition instanceof FullTextFunction ftf) { + return ftf; + } + for (Expression child : condition.children()) { + FullTextFunction foundMatchingChild = forEachFullTextFunctionParent(child, action); + if (foundMatchingChild != null) { + action.accept(foundMatchingChild, condition); + return foundMatchingChild; + } + } + return null; + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index 3b1225555b297..e8921c68b8913 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -32,7 +32,8 @@ import org.elasticsearch.xpack.esql.expression.function.aggregate.Top; import org.elasticsearch.xpack.esql.expression.function.aggregate.Values; import org.elasticsearch.xpack.esql.expression.function.aggregate.WeightedAvg; -import org.elasticsearch.xpack.esql.expression.function.fulltext.QueryStringFunction; +import org.elasticsearch.xpack.esql.expression.function.fulltext.Match; +import org.elasticsearch.xpack.esql.expression.function.fulltext.QueryString; import org.elasticsearch.xpack.esql.expression.function.grouping.Bucket; import org.elasticsearch.xpack.esql.expression.function.grouping.Categorize; import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Case; @@ -395,7 +396,8 @@ private static FunctionDefinition[][] snapshotFunctions() { def(Categorize.class, Categorize::new, "categorize"), def(Rate.class, Rate::withUnresolvedTimestamp, "rate"), // Full text functions - def(QueryStringFunction.class, QueryStringFunction::new, "qstr") } }; + def(QueryString.class, QueryString::new, "qstr"), + def(Match.class, Match::new, "match") } }; } public EsqlFunctionRegistry snapshotRegistry() { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextFunction.java index 54730eec4f317..a39c0d7bc6b50 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextFunction.java @@ -7,22 +7,20 @@ package org.elasticsearch.xpack.esql.expression.function.fulltext; +import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.action.EsqlCapabilities; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.Nullability; +import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; import org.elasticsearch.xpack.esql.core.expression.function.Function; -import org.elasticsearch.xpack.esql.core.querydsl.query.Query; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.util.PlanStreamInput; -import java.io.IOException; import java.util.ArrayList; import java.util.List; -import static java.util.Collections.singletonList; +import static org.elasticsearch.common.logging.LoggerMessageFormat.format; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isNotNullAndFoldable; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isString; @@ -36,45 +34,84 @@ public abstract class FullTextFunction extends Function { public static List getNamedWriteables() { List entries = new ArrayList<>(); if (EsqlCapabilities.Cap.QSTR_FUNCTION.isEnabled()) { - entries.add(QueryStringFunction.ENTRY); + entries.add(QueryString.ENTRY); + } + if (EsqlCapabilities.Cap.MATCH_FUNCTION.isEnabled()) { + entries.add(Match.ENTRY); } return entries; } private final Expression query; - protected FullTextFunction(Source source, Expression query) { - super(source, singletonList(query)); + protected FullTextFunction(Source source, Expression query, List children) { + super(source, children); this.query = query; } - protected FullTextFunction(StreamInput in) throws IOException { - this(Source.readFrom((StreamInput & PlanStreamInput) in), in.readNamedWriteable(Expression.class)); - } - @Override public DataType dataType() { return DataType.BOOLEAN; } @Override - protected TypeResolution resolveType() { + protected final TypeResolution resolveType() { if (childrenResolved() == false) { return new TypeResolution("Unresolved children"); } - return isString(query(), sourceText(), DEFAULT).and(isNotNullAndFoldable(query(), functionName(), DEFAULT)); + return resolveNonQueryParamTypes().and(resolveQueryParamType()); + } + + /** + * Resolves the type for the query parameter, as part of the type resolution for the function + * + * @return type resolution for query parameter + */ + private TypeResolution resolveQueryParamType() { + return isString(query(), sourceText(), queryParamOrdinal()).and(isNotNullAndFoldable(query(), sourceText(), queryParamOrdinal())); + } + + /** + * Subclasses can override this method for custom type resolution for additional function parameters + * + * @return type resolution for non-query parameter types + */ + protected TypeResolution resolveNonQueryParamTypes() { + return TypeResolution.TYPE_RESOLVED; } public Expression query() { return query; } - @Override - public void writeTo(StreamOutput out) throws IOException { - source().writeTo(out); - out.writeNamedWriteable(query); + /** + * Returns the resulting query as a String + * + * @return query expression as a string + */ + public final String queryAsText() { + Object queryAsObject = query().fold(); + if (queryAsObject instanceof BytesRef bytesRef) { + return bytesRef.utf8ToString(); + } + + throw new IllegalArgumentException( + format(null, "{} argument in {} function needs to be resolved to a string", queryParamOrdinal(), functionName()) + ); } - public abstract Query asQuery(); + /** + * Returns the param ordinal for the query parameter so it can be used in error messages + * + * @return Query ordinal for the + */ + protected TypeResolutions.ParamOrdinal queryParamOrdinal() { + return DEFAULT; + } + + @Override + public Nullability nullable() { + return Nullability.FALSE; + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Match.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Match.java new file mode 100644 index 0000000000000..b4e0f3c743216 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Match.java @@ -0,0 +1,116 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.fulltext; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.esql.capabilities.Validatable; +import org.elasticsearch.xpack.esql.common.Failure; +import org.elasticsearch.xpack.esql.common.Failures; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; +import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; +import org.elasticsearch.xpack.esql.core.querydsl.query.QueryStringQuery; +import org.elasticsearch.xpack.esql.core.tree.NodeInfo; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.expression.function.Example; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.SECOND; +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isNotNull; +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isString; + +/** + * Full text function that performs a {@link QueryStringQuery} . + */ +public class Match extends FullTextFunction implements Validatable { + + public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Match", Match::new); + + private final Expression field; + + @FunctionInfo( + returnType = "boolean", + preview = true, + description = "Performs a match query on the specified field. Returns true if the provided query matches the row.", + examples = { @Example(file = "match-function", tag = "match-with-field") } + ) + public Match( + Source source, + @Param(name = "field", type = { "keyword", "text" }, description = "Field that the query will target.") Expression field, + @Param( + name = "query", + type = { "keyword", "text" }, + description = "Text you wish to find in the provided field." + ) Expression matchQuery + ) { + super(source, matchQuery, List.of(field, matchQuery)); + this.field = field; + } + + private Match(StreamInput in) throws IOException { + this(Source.readFrom((PlanStreamInput) in), in.readNamedWriteable(Expression.class), in.readNamedWriteable(Expression.class)); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + source().writeTo(out); + out.writeNamedWriteable(field); + out.writeNamedWriteable(query()); + } + + @Override + public String getWriteableName() { + return ENTRY.name; + } + + @Override + protected TypeResolution resolveNonQueryParamTypes() { + return isNotNull(field, sourceText(), FIRST).and(isString(field, sourceText(), FIRST)).and(super.resolveNonQueryParamTypes()); + } + + @Override + public void validate(Failures failures) { + if (field instanceof FieldAttribute == false) { + failures.add( + Failure.fail( + field, + "[{}] cannot operate on [{}], which is not a field from an index mapping", + functionName(), + field.sourceText() + ) + ); + } + } + + @Override + public Expression replaceChildren(List newChildren) { + // Query is the first child, field is the second child + return new Match(source(), newChildren.get(0), newChildren.get(1)); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, Match::new, field, query()); + } + + protected TypeResolutions.ParamOrdinal queryParamOrdinal() { + return SECOND; + } + + public Expression field() { + return field; + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/QueryStringFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/QueryString.java similarity index 66% rename from x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/QueryStringFunction.java rename to x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/QueryString.java index fa331acd08655..0d7d15a13dd80 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/QueryStringFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/QueryString.java @@ -7,32 +7,27 @@ package org.elasticsearch.xpack.esql.expression.function.fulltext; -import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.querydsl.query.Query; import org.elasticsearch.xpack.esql.core.querydsl.query.QueryStringQuery; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; import java.io.IOException; import java.util.List; -import java.util.Map; /** * Full text function that performs a {@link QueryStringQuery} . */ -public class QueryStringFunction extends FullTextFunction { +public class QueryString extends FullTextFunction { - public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( - Expression.class, - "QStr", - QueryStringFunction::new - ); + public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "QStr", QueryString::new); @FunctionInfo( returnType = "boolean", @@ -40,7 +35,7 @@ public class QueryStringFunction extends FullTextFunction { description = "Performs a query string query. Returns true if the provided query string matches the row.", examples = { @Example(file = "qstr-function", tag = "qstr-with-field") } ) - public QueryStringFunction( + public QueryString( Source source, @Param( name = "query", @@ -48,40 +43,37 @@ public QueryStringFunction( description = "Query string in Lucene query string format." ) Expression queryString ) { - super(source, queryString); + super(source, queryString, List.of(queryString)); } - private QueryStringFunction(StreamInput in) throws IOException { - super(in); + private QueryString(StreamInput in) throws IOException { + this(Source.readFrom((PlanStreamInput) in), in.readNamedWriteable(Expression.class)); } @Override - public String functionName() { - return "QSTR"; + public void writeTo(StreamOutput out) throws IOException { + source().writeTo(out); + out.writeNamedWriteable(query()); } @Override - public Query asQuery() { - Object queryAsObject = query().fold(); - if (queryAsObject instanceof BytesRef queryAsBytesRef) { - return new QueryStringQuery(source(), queryAsBytesRef.utf8ToString(), Map.of(), null); - } else { - throw new IllegalArgumentException("Query in QSTR needs to be resolved to a string"); - } + public String getWriteableName() { + return ENTRY.name; } @Override - public Expression replaceChildren(List newChildren) { - return new QueryStringFunction(source(), newChildren.get(0)); + public String functionName() { + return "QSTR"; } @Override - protected NodeInfo info() { - return NodeInfo.create(this, QueryStringFunction::new, query()); + public Expression replaceChildren(List newChildren) { + return new QueryString(source(), newChildren.get(0)); } @Override - public String getWriteableName() { - return ENTRY.name; + protected NodeInfo info() { + return NodeInfo.create(this, QueryString::new, query()); } + } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushFiltersToSource.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushFiltersToSource.java index 1ba966e318219..2209dffe5af06 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushFiltersToSource.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushFiltersToSource.java @@ -32,7 +32,8 @@ import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.CollectionUtils; import org.elasticsearch.xpack.esql.core.util.Queries; -import org.elasticsearch.xpack.esql.expression.function.fulltext.FullTextFunction; +import org.elasticsearch.xpack.esql.expression.function.fulltext.Match; +import org.elasticsearch.xpack.esql.expression.function.fulltext.QueryString; import org.elasticsearch.xpack.esql.expression.function.scalar.ip.CIDRMatch; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.BinarySpatialFunction; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesFunction; @@ -250,8 +251,10 @@ public static boolean canPushToSource(Expression exp, Predicate return mqp.field() instanceof FieldAttribute && DataType.isString(mqp.field().dataType()); } else if (exp instanceof StringQueryPredicate) { return true; - } else if (exp instanceof FullTextFunction) { + } else if (exp instanceof QueryString) { return true; + } else if (exp instanceof Match mf) { + return mf.field() instanceof FieldAttribute && DataType.isString(mf.field().dataType()); } return false; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp index b5ca44826c051..e9e6f45bdc30f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp @@ -23,7 +23,6 @@ null null null null -null '|' null null @@ -65,6 +64,7 @@ null '%' null null +null ']' null null @@ -141,7 +141,6 @@ STATS WHERE DEV_INLINESTATS DEV_LOOKUP -DEV_MATCH DEV_METRICS UNKNOWN_CMD LINE_COMMENT @@ -186,6 +185,7 @@ MINUS ASTERISK SLASH PERCENT +DEV_MATCH NAMED_OR_POSITIONAL_PARAM OPENING_BRACKET CLOSING_BRACKET @@ -263,7 +263,6 @@ STATS WHERE DEV_INLINESTATS DEV_LOOKUP -DEV_MATCH DEV_METRICS UNKNOWN_CMD LINE_COMMENT @@ -318,7 +317,7 @@ MINUS ASTERISK SLASH PERCENT -DEV_MATCH_OP +DEV_MATCH NAMED_OR_POSITIONAL_PARAM OPENING_BRACKET CLOSING_BRACKET @@ -466,4 +465,4 @@ METRICS_MODE CLOSING_METRICS_MODE atn: -[4, 0, 120, 1475, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 2, 154, 7, 154, 2, 155, 7, 155, 2, 156, 7, 156, 2, 157, 7, 157, 2, 158, 7, 158, 2, 159, 7, 159, 2, 160, 7, 160, 2, 161, 7, 161, 2, 162, 7, 162, 2, 163, 7, 163, 2, 164, 7, 164, 2, 165, 7, 165, 2, 166, 7, 166, 2, 167, 7, 167, 2, 168, 7, 168, 2, 169, 7, 169, 2, 170, 7, 170, 2, 171, 7, 171, 2, 172, 7, 172, 2, 173, 7, 173, 2, 174, 7, 174, 2, 175, 7, 175, 2, 176, 7, 176, 2, 177, 7, 177, 2, 178, 7, 178, 2, 179, 7, 179, 2, 180, 7, 180, 2, 181, 7, 181, 2, 182, 7, 182, 2, 183, 7, 183, 2, 184, 7, 184, 2, 185, 7, 185, 2, 186, 7, 186, 2, 187, 7, 187, 2, 188, 7, 188, 2, 189, 7, 189, 2, 190, 7, 190, 2, 191, 7, 191, 2, 192, 7, 192, 2, 193, 7, 193, 2, 194, 7, 194, 2, 195, 7, 195, 2, 196, 7, 196, 2, 197, 7, 197, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 20, 4, 20, 587, 8, 20, 11, 20, 12, 20, 588, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 5, 21, 597, 8, 21, 10, 21, 12, 21, 600, 9, 21, 1, 21, 3, 21, 603, 8, 21, 1, 21, 3, 21, 606, 8, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 5, 22, 615, 8, 22, 10, 22, 12, 22, 618, 9, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 4, 23, 626, 8, 23, 11, 23, 12, 23, 627, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 28, 1, 28, 1, 29, 1, 29, 3, 29, 647, 8, 29, 1, 29, 4, 29, 650, 8, 29, 11, 29, 12, 29, 651, 1, 30, 1, 30, 1, 31, 1, 31, 1, 32, 1, 32, 1, 32, 3, 32, 661, 8, 32, 1, 33, 1, 33, 1, 34, 1, 34, 1, 34, 3, 34, 668, 8, 34, 1, 35, 1, 35, 1, 35, 5, 35, 673, 8, 35, 10, 35, 12, 35, 676, 9, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 5, 35, 684, 8, 35, 10, 35, 12, 35, 687, 9, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 3, 35, 694, 8, 35, 1, 35, 3, 35, 697, 8, 35, 3, 35, 699, 8, 35, 1, 36, 4, 36, 702, 8, 36, 11, 36, 12, 36, 703, 1, 37, 4, 37, 707, 8, 37, 11, 37, 12, 37, 708, 1, 37, 1, 37, 5, 37, 713, 8, 37, 10, 37, 12, 37, 716, 9, 37, 1, 37, 1, 37, 4, 37, 720, 8, 37, 11, 37, 12, 37, 721, 1, 37, 4, 37, 725, 8, 37, 11, 37, 12, 37, 726, 1, 37, 1, 37, 5, 37, 731, 8, 37, 10, 37, 12, 37, 734, 9, 37, 3, 37, 736, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 4, 37, 742, 8, 37, 11, 37, 12, 37, 743, 1, 37, 1, 37, 3, 37, 748, 8, 37, 1, 38, 1, 38, 1, 38, 1, 39, 1, 39, 1, 39, 1, 39, 1, 40, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 1, 64, 1, 64, 1, 65, 1, 65, 1, 65, 1, 66, 1, 66, 1, 67, 1, 67, 1, 67, 1, 68, 1, 68, 1, 69, 1, 69, 1, 70, 1, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 73, 1, 73, 1, 73, 1, 73, 1, 73, 1, 74, 1, 74, 1, 74, 3, 74, 875, 8, 74, 1, 74, 5, 74, 878, 8, 74, 10, 74, 12, 74, 881, 9, 74, 1, 74, 1, 74, 4, 74, 885, 8, 74, 11, 74, 12, 74, 886, 3, 74, 889, 8, 74, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 76, 1, 76, 1, 76, 1, 76, 1, 76, 1, 77, 1, 77, 5, 77, 903, 8, 77, 10, 77, 12, 77, 906, 9, 77, 1, 77, 1, 77, 3, 77, 910, 8, 77, 1, 77, 4, 77, 913, 8, 77, 11, 77, 12, 77, 914, 3, 77, 917, 8, 77, 1, 78, 1, 78, 4, 78, 921, 8, 78, 11, 78, 12, 78, 922, 1, 78, 1, 78, 1, 79, 1, 79, 1, 80, 1, 80, 1, 80, 1, 80, 1, 81, 1, 81, 1, 81, 1, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 1, 83, 1, 83, 1, 83, 1, 84, 1, 84, 1, 84, 1, 84, 1, 84, 1, 85, 1, 85, 1, 85, 1, 85, 1, 86, 1, 86, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 1, 88, 1, 88, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 1, 92, 1, 93, 1, 93, 1, 93, 1, 93, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 3, 95, 1000, 8, 95, 1, 96, 4, 96, 1003, 8, 96, 11, 96, 12, 96, 1004, 1, 97, 1, 97, 1, 97, 1, 97, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 1, 100, 1, 101, 1, 101, 1, 101, 1, 101, 1, 102, 1, 102, 1, 102, 1, 102, 1, 102, 1, 103, 1, 103, 1, 103, 1, 103, 1, 104, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 106, 1, 107, 1, 107, 1, 107, 1, 107, 3, 107, 1052, 8, 107, 1, 108, 1, 108, 3, 108, 1056, 8, 108, 1, 108, 5, 108, 1059, 8, 108, 10, 108, 12, 108, 1062, 9, 108, 1, 108, 1, 108, 3, 108, 1066, 8, 108, 1, 108, 4, 108, 1069, 8, 108, 11, 108, 12, 108, 1070, 3, 108, 1073, 8, 108, 1, 109, 1, 109, 4, 109, 1077, 8, 109, 11, 109, 12, 109, 1078, 1, 110, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 1, 113, 1, 114, 1, 114, 1, 114, 1, 114, 1, 115, 1, 115, 1, 115, 1, 115, 1, 116, 1, 116, 1, 116, 1, 116, 1, 117, 1, 117, 1, 117, 1, 117, 1, 118, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 120, 1, 120, 1, 121, 1, 121, 1, 121, 1, 121, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 129, 4, 129, 1162, 8, 129, 11, 129, 12, 129, 1163, 1, 129, 1, 129, 3, 129, 1168, 8, 129, 1, 129, 4, 129, 1171, 8, 129, 11, 129, 12, 129, 1172, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 133, 1, 133, 1, 134, 1, 134, 1, 134, 1, 134, 1, 134, 1, 134, 1, 135, 1, 135, 1, 135, 1, 135, 1, 136, 1, 136, 1, 136, 1, 136, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 152, 1, 152, 1, 153, 1, 153, 1, 153, 1, 153, 1, 154, 1, 154, 1, 154, 1, 154, 1, 155, 1, 155, 1, 155, 1, 155, 1, 155, 1, 156, 1, 156, 1, 156, 1, 156, 1, 156, 1, 157, 1, 157, 1, 157, 1, 157, 1, 158, 1, 158, 1, 158, 1, 158, 1, 159, 1, 159, 1, 159, 1, 159, 1, 160, 1, 160, 1, 160, 1, 160, 1, 160, 1, 161, 1, 161, 1, 162, 1, 162, 1, 162, 1, 162, 1, 162, 4, 162, 1312, 8, 162, 11, 162, 12, 162, 1313, 1, 163, 1, 163, 1, 163, 1, 163, 1, 164, 1, 164, 1, 164, 1, 164, 1, 165, 1, 165, 1, 165, 1, 165, 1, 166, 1, 166, 1, 166, 1, 166, 1, 166, 1, 167, 1, 167, 1, 167, 1, 167, 1, 168, 1, 168, 1, 168, 1, 168, 1, 169, 1, 169, 1, 169, 1, 169, 1, 170, 1, 170, 1, 170, 1, 170, 1, 170, 1, 171, 1, 171, 1, 171, 1, 171, 1, 172, 1, 172, 1, 172, 1, 172, 1, 173, 1, 173, 1, 173, 1, 173, 1, 174, 1, 174, 1, 174, 1, 174, 1, 175, 1, 175, 1, 175, 1, 175, 1, 176, 1, 176, 1, 176, 1, 176, 1, 176, 1, 176, 1, 177, 1, 177, 1, 177, 1, 177, 1, 178, 1, 178, 1, 178, 1, 178, 1, 179, 1, 179, 1, 179, 1, 179, 1, 180, 1, 180, 1, 180, 1, 180, 1, 181, 1, 181, 1, 181, 1, 181, 1, 182, 1, 182, 1, 182, 1, 182, 1, 183, 1, 183, 1, 183, 1, 183, 1, 183, 1, 184, 1, 184, 1, 184, 1, 184, 1, 184, 1, 184, 1, 185, 1, 185, 1, 185, 1, 185, 1, 185, 1, 185, 1, 186, 1, 186, 1, 186, 1, 186, 1, 187, 1, 187, 1, 187, 1, 187, 1, 188, 1, 188, 1, 188, 1, 188, 1, 189, 1, 189, 1, 189, 1, 189, 1, 189, 1, 189, 1, 190, 1, 190, 1, 190, 1, 190, 1, 190, 1, 190, 1, 191, 1, 191, 1, 191, 1, 191, 1, 192, 1, 192, 1, 192, 1, 192, 1, 193, 1, 193, 1, 193, 1, 193, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 195, 1, 195, 1, 195, 1, 195, 1, 195, 1, 195, 1, 196, 1, 196, 1, 196, 1, 196, 1, 196, 1, 196, 1, 197, 1, 197, 1, 197, 1, 197, 1, 197, 2, 616, 685, 0, 198, 15, 1, 17, 2, 19, 3, 21, 4, 23, 5, 25, 6, 27, 7, 29, 8, 31, 9, 33, 10, 35, 11, 37, 12, 39, 13, 41, 14, 43, 15, 45, 16, 47, 17, 49, 18, 51, 19, 53, 20, 55, 21, 57, 22, 59, 23, 61, 24, 63, 25, 65, 0, 67, 0, 69, 0, 71, 0, 73, 0, 75, 0, 77, 0, 79, 0, 81, 0, 83, 0, 85, 26, 87, 27, 89, 28, 91, 29, 93, 30, 95, 31, 97, 32, 99, 33, 101, 34, 103, 35, 105, 36, 107, 37, 109, 38, 111, 39, 113, 40, 115, 41, 117, 42, 119, 43, 121, 44, 123, 45, 125, 46, 127, 47, 129, 48, 131, 49, 133, 50, 135, 51, 137, 52, 139, 53, 141, 54, 143, 55, 145, 56, 147, 57, 149, 58, 151, 59, 153, 60, 155, 61, 157, 62, 159, 63, 161, 0, 163, 64, 165, 65, 167, 66, 169, 67, 171, 0, 173, 68, 175, 69, 177, 70, 179, 71, 181, 0, 183, 0, 185, 72, 187, 73, 189, 74, 191, 0, 193, 0, 195, 0, 197, 0, 199, 0, 201, 0, 203, 75, 205, 0, 207, 76, 209, 0, 211, 0, 213, 77, 215, 78, 217, 79, 219, 0, 221, 0, 223, 0, 225, 0, 227, 0, 229, 0, 231, 0, 233, 80, 235, 81, 237, 82, 239, 83, 241, 0, 243, 0, 245, 0, 247, 0, 249, 0, 251, 0, 253, 84, 255, 0, 257, 85, 259, 86, 261, 87, 263, 0, 265, 0, 267, 88, 269, 89, 271, 0, 273, 90, 275, 0, 277, 91, 279, 92, 281, 93, 283, 0, 285, 0, 287, 0, 289, 0, 291, 0, 293, 0, 295, 0, 297, 0, 299, 0, 301, 94, 303, 95, 305, 96, 307, 0, 309, 0, 311, 0, 313, 0, 315, 0, 317, 0, 319, 97, 321, 98, 323, 99, 325, 0, 327, 100, 329, 101, 331, 102, 333, 103, 335, 0, 337, 104, 339, 105, 341, 106, 343, 107, 345, 108, 347, 0, 349, 0, 351, 0, 353, 0, 355, 0, 357, 0, 359, 0, 361, 109, 363, 110, 365, 111, 367, 0, 369, 0, 371, 0, 373, 0, 375, 112, 377, 113, 379, 114, 381, 0, 383, 0, 385, 0, 387, 115, 389, 116, 391, 117, 393, 0, 395, 0, 397, 118, 399, 119, 401, 120, 403, 0, 405, 0, 407, 0, 409, 0, 15, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 35, 2, 0, 68, 68, 100, 100, 2, 0, 73, 73, 105, 105, 2, 0, 83, 83, 115, 115, 2, 0, 69, 69, 101, 101, 2, 0, 67, 67, 99, 99, 2, 0, 84, 84, 116, 116, 2, 0, 82, 82, 114, 114, 2, 0, 79, 79, 111, 111, 2, 0, 80, 80, 112, 112, 2, 0, 78, 78, 110, 110, 2, 0, 72, 72, 104, 104, 2, 0, 86, 86, 118, 118, 2, 0, 65, 65, 97, 97, 2, 0, 76, 76, 108, 108, 2, 0, 88, 88, 120, 120, 2, 0, 70, 70, 102, 102, 2, 0, 77, 77, 109, 109, 2, 0, 71, 71, 103, 103, 2, 0, 75, 75, 107, 107, 2, 0, 87, 87, 119, 119, 2, 0, 85, 85, 117, 117, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 8, 0, 34, 34, 78, 78, 82, 82, 84, 84, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 2, 0, 66, 66, 98, 98, 2, 0, 89, 89, 121, 121, 11, 0, 9, 10, 13, 13, 32, 32, 34, 34, 44, 44, 47, 47, 58, 58, 61, 61, 91, 91, 93, 93, 124, 124, 2, 0, 42, 42, 47, 47, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1503, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0, 0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0, 0, 0, 33, 1, 0, 0, 0, 0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0, 0, 0, 39, 1, 0, 0, 0, 0, 41, 1, 0, 0, 0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0, 0, 0, 0, 47, 1, 0, 0, 0, 0, 49, 1, 0, 0, 0, 0, 51, 1, 0, 0, 0, 0, 53, 1, 0, 0, 0, 0, 55, 1, 0, 0, 0, 0, 57, 1, 0, 0, 0, 0, 59, 1, 0, 0, 0, 0, 61, 1, 0, 0, 0, 1, 63, 1, 0, 0, 0, 1, 85, 1, 0, 0, 0, 1, 87, 1, 0, 0, 0, 1, 89, 1, 0, 0, 0, 1, 91, 1, 0, 0, 0, 1, 93, 1, 0, 0, 0, 1, 95, 1, 0, 0, 0, 1, 97, 1, 0, 0, 0, 1, 99, 1, 0, 0, 0, 1, 101, 1, 0, 0, 0, 1, 103, 1, 0, 0, 0, 1, 105, 1, 0, 0, 0, 1, 107, 1, 0, 0, 0, 1, 109, 1, 0, 0, 0, 1, 111, 1, 0, 0, 0, 1, 113, 1, 0, 0, 0, 1, 115, 1, 0, 0, 0, 1, 117, 1, 0, 0, 0, 1, 119, 1, 0, 0, 0, 1, 121, 1, 0, 0, 0, 1, 123, 1, 0, 0, 0, 1, 125, 1, 0, 0, 0, 1, 127, 1, 0, 0, 0, 1, 129, 1, 0, 0, 0, 1, 131, 1, 0, 0, 0, 1, 133, 1, 0, 0, 0, 1, 135, 1, 0, 0, 0, 1, 137, 1, 0, 0, 0, 1, 139, 1, 0, 0, 0, 1, 141, 1, 0, 0, 0, 1, 143, 1, 0, 0, 0, 1, 145, 1, 0, 0, 0, 1, 147, 1, 0, 0, 0, 1, 149, 1, 0, 0, 0, 1, 151, 1, 0, 0, 0, 1, 153, 1, 0, 0, 0, 1, 155, 1, 0, 0, 0, 1, 157, 1, 0, 0, 0, 1, 159, 1, 0, 0, 0, 1, 161, 1, 0, 0, 0, 1, 163, 1, 0, 0, 0, 1, 165, 1, 0, 0, 0, 1, 167, 1, 0, 0, 0, 1, 169, 1, 0, 0, 0, 1, 173, 1, 0, 0, 0, 1, 175, 1, 0, 0, 0, 1, 177, 1, 0, 0, 0, 1, 179, 1, 0, 0, 0, 2, 181, 1, 0, 0, 0, 2, 183, 1, 0, 0, 0, 2, 185, 1, 0, 0, 0, 2, 187, 1, 0, 0, 0, 2, 189, 1, 0, 0, 0, 3, 191, 1, 0, 0, 0, 3, 193, 1, 0, 0, 0, 3, 195, 1, 0, 0, 0, 3, 197, 1, 0, 0, 0, 3, 199, 1, 0, 0, 0, 3, 201, 1, 0, 0, 0, 3, 203, 1, 0, 0, 0, 3, 207, 1, 0, 0, 0, 3, 209, 1, 0, 0, 0, 3, 211, 1, 0, 0, 0, 3, 213, 1, 0, 0, 0, 3, 215, 1, 0, 0, 0, 3, 217, 1, 0, 0, 0, 4, 219, 1, 0, 0, 0, 4, 221, 1, 0, 0, 0, 4, 223, 1, 0, 0, 0, 4, 225, 1, 0, 0, 0, 4, 227, 1, 0, 0, 0, 4, 233, 1, 0, 0, 0, 4, 235, 1, 0, 0, 0, 4, 237, 1, 0, 0, 0, 4, 239, 1, 0, 0, 0, 5, 241, 1, 0, 0, 0, 5, 243, 1, 0, 0, 0, 5, 245, 1, 0, 0, 0, 5, 247, 1, 0, 0, 0, 5, 249, 1, 0, 0, 0, 5, 251, 1, 0, 0, 0, 5, 253, 1, 0, 0, 0, 5, 255, 1, 0, 0, 0, 5, 257, 1, 0, 0, 0, 5, 259, 1, 0, 0, 0, 5, 261, 1, 0, 0, 0, 6, 263, 1, 0, 0, 0, 6, 265, 1, 0, 0, 0, 6, 267, 1, 0, 0, 0, 6, 269, 1, 0, 0, 0, 6, 273, 1, 0, 0, 0, 6, 275, 1, 0, 0, 0, 6, 277, 1, 0, 0, 0, 6, 279, 1, 0, 0, 0, 6, 281, 1, 0, 0, 0, 7, 283, 1, 0, 0, 0, 7, 285, 1, 0, 0, 0, 7, 287, 1, 0, 0, 0, 7, 289, 1, 0, 0, 0, 7, 291, 1, 0, 0, 0, 7, 293, 1, 0, 0, 0, 7, 295, 1, 0, 0, 0, 7, 297, 1, 0, 0, 0, 7, 299, 1, 0, 0, 0, 7, 301, 1, 0, 0, 0, 7, 303, 1, 0, 0, 0, 7, 305, 1, 0, 0, 0, 8, 307, 1, 0, 0, 0, 8, 309, 1, 0, 0, 0, 8, 311, 1, 0, 0, 0, 8, 313, 1, 0, 0, 0, 8, 315, 1, 0, 0, 0, 8, 317, 1, 0, 0, 0, 8, 319, 1, 0, 0, 0, 8, 321, 1, 0, 0, 0, 8, 323, 1, 0, 0, 0, 9, 325, 1, 0, 0, 0, 9, 327, 1, 0, 0, 0, 9, 329, 1, 0, 0, 0, 9, 331, 1, 0, 0, 0, 9, 333, 1, 0, 0, 0, 10, 335, 1, 0, 0, 0, 10, 337, 1, 0, 0, 0, 10, 339, 1, 0, 0, 0, 10, 341, 1, 0, 0, 0, 10, 343, 1, 0, 0, 0, 10, 345, 1, 0, 0, 0, 11, 347, 1, 0, 0, 0, 11, 349, 1, 0, 0, 0, 11, 351, 1, 0, 0, 0, 11, 353, 1, 0, 0, 0, 11, 355, 1, 0, 0, 0, 11, 357, 1, 0, 0, 0, 11, 359, 1, 0, 0, 0, 11, 361, 1, 0, 0, 0, 11, 363, 1, 0, 0, 0, 11, 365, 1, 0, 0, 0, 12, 367, 1, 0, 0, 0, 12, 369, 1, 0, 0, 0, 12, 371, 1, 0, 0, 0, 12, 373, 1, 0, 0, 0, 12, 375, 1, 0, 0, 0, 12, 377, 1, 0, 0, 0, 12, 379, 1, 0, 0, 0, 13, 381, 1, 0, 0, 0, 13, 383, 1, 0, 0, 0, 13, 385, 1, 0, 0, 0, 13, 387, 1, 0, 0, 0, 13, 389, 1, 0, 0, 0, 13, 391, 1, 0, 0, 0, 14, 393, 1, 0, 0, 0, 14, 395, 1, 0, 0, 0, 14, 397, 1, 0, 0, 0, 14, 399, 1, 0, 0, 0, 14, 401, 1, 0, 0, 0, 14, 403, 1, 0, 0, 0, 14, 405, 1, 0, 0, 0, 14, 407, 1, 0, 0, 0, 14, 409, 1, 0, 0, 0, 15, 411, 1, 0, 0, 0, 17, 421, 1, 0, 0, 0, 19, 428, 1, 0, 0, 0, 21, 437, 1, 0, 0, 0, 23, 444, 1, 0, 0, 0, 25, 454, 1, 0, 0, 0, 27, 461, 1, 0, 0, 0, 29, 468, 1, 0, 0, 0, 31, 475, 1, 0, 0, 0, 33, 483, 1, 0, 0, 0, 35, 495, 1, 0, 0, 0, 37, 504, 1, 0, 0, 0, 39, 510, 1, 0, 0, 0, 41, 517, 1, 0, 0, 0, 43, 524, 1, 0, 0, 0, 45, 532, 1, 0, 0, 0, 47, 540, 1, 0, 0, 0, 49, 555, 1, 0, 0, 0, 51, 565, 1, 0, 0, 0, 53, 574, 1, 0, 0, 0, 55, 586, 1, 0, 0, 0, 57, 592, 1, 0, 0, 0, 59, 609, 1, 0, 0, 0, 61, 625, 1, 0, 0, 0, 63, 631, 1, 0, 0, 0, 65, 635, 1, 0, 0, 0, 67, 637, 1, 0, 0, 0, 69, 639, 1, 0, 0, 0, 71, 642, 1, 0, 0, 0, 73, 644, 1, 0, 0, 0, 75, 653, 1, 0, 0, 0, 77, 655, 1, 0, 0, 0, 79, 660, 1, 0, 0, 0, 81, 662, 1, 0, 0, 0, 83, 667, 1, 0, 0, 0, 85, 698, 1, 0, 0, 0, 87, 701, 1, 0, 0, 0, 89, 747, 1, 0, 0, 0, 91, 749, 1, 0, 0, 0, 93, 752, 1, 0, 0, 0, 95, 756, 1, 0, 0, 0, 97, 760, 1, 0, 0, 0, 99, 762, 1, 0, 0, 0, 101, 765, 1, 0, 0, 0, 103, 767, 1, 0, 0, 0, 105, 772, 1, 0, 0, 0, 107, 774, 1, 0, 0, 0, 109, 780, 1, 0, 0, 0, 111, 786, 1, 0, 0, 0, 113, 789, 1, 0, 0, 0, 115, 792, 1, 0, 0, 0, 117, 797, 1, 0, 0, 0, 119, 802, 1, 0, 0, 0, 121, 804, 1, 0, 0, 0, 123, 808, 1, 0, 0, 0, 125, 813, 1, 0, 0, 0, 127, 819, 1, 0, 0, 0, 129, 822, 1, 0, 0, 0, 131, 824, 1, 0, 0, 0, 133, 830, 1, 0, 0, 0, 135, 832, 1, 0, 0, 0, 137, 837, 1, 0, 0, 0, 139, 840, 1, 0, 0, 0, 141, 843, 1, 0, 0, 0, 143, 846, 1, 0, 0, 0, 145, 848, 1, 0, 0, 0, 147, 851, 1, 0, 0, 0, 149, 853, 1, 0, 0, 0, 151, 856, 1, 0, 0, 0, 153, 858, 1, 0, 0, 0, 155, 860, 1, 0, 0, 0, 157, 862, 1, 0, 0, 0, 159, 864, 1, 0, 0, 0, 161, 866, 1, 0, 0, 0, 163, 888, 1, 0, 0, 0, 165, 890, 1, 0, 0, 0, 167, 895, 1, 0, 0, 0, 169, 916, 1, 0, 0, 0, 171, 918, 1, 0, 0, 0, 173, 926, 1, 0, 0, 0, 175, 928, 1, 0, 0, 0, 177, 932, 1, 0, 0, 0, 179, 936, 1, 0, 0, 0, 181, 940, 1, 0, 0, 0, 183, 945, 1, 0, 0, 0, 185, 950, 1, 0, 0, 0, 187, 954, 1, 0, 0, 0, 189, 958, 1, 0, 0, 0, 191, 962, 1, 0, 0, 0, 193, 967, 1, 0, 0, 0, 195, 971, 1, 0, 0, 0, 197, 975, 1, 0, 0, 0, 199, 979, 1, 0, 0, 0, 201, 983, 1, 0, 0, 0, 203, 987, 1, 0, 0, 0, 205, 999, 1, 0, 0, 0, 207, 1002, 1, 0, 0, 0, 209, 1006, 1, 0, 0, 0, 211, 1010, 1, 0, 0, 0, 213, 1014, 1, 0, 0, 0, 215, 1018, 1, 0, 0, 0, 217, 1022, 1, 0, 0, 0, 219, 1026, 1, 0, 0, 0, 221, 1031, 1, 0, 0, 0, 223, 1035, 1, 0, 0, 0, 225, 1039, 1, 0, 0, 0, 227, 1043, 1, 0, 0, 0, 229, 1051, 1, 0, 0, 0, 231, 1072, 1, 0, 0, 0, 233, 1076, 1, 0, 0, 0, 235, 1080, 1, 0, 0, 0, 237, 1084, 1, 0, 0, 0, 239, 1088, 1, 0, 0, 0, 241, 1092, 1, 0, 0, 0, 243, 1097, 1, 0, 0, 0, 245, 1101, 1, 0, 0, 0, 247, 1105, 1, 0, 0, 0, 249, 1109, 1, 0, 0, 0, 251, 1113, 1, 0, 0, 0, 253, 1117, 1, 0, 0, 0, 255, 1120, 1, 0, 0, 0, 257, 1124, 1, 0, 0, 0, 259, 1128, 1, 0, 0, 0, 261, 1132, 1, 0, 0, 0, 263, 1136, 1, 0, 0, 0, 265, 1141, 1, 0, 0, 0, 267, 1146, 1, 0, 0, 0, 269, 1151, 1, 0, 0, 0, 271, 1158, 1, 0, 0, 0, 273, 1167, 1, 0, 0, 0, 275, 1174, 1, 0, 0, 0, 277, 1178, 1, 0, 0, 0, 279, 1182, 1, 0, 0, 0, 281, 1186, 1, 0, 0, 0, 283, 1190, 1, 0, 0, 0, 285, 1196, 1, 0, 0, 0, 287, 1200, 1, 0, 0, 0, 289, 1204, 1, 0, 0, 0, 291, 1208, 1, 0, 0, 0, 293, 1212, 1, 0, 0, 0, 295, 1216, 1, 0, 0, 0, 297, 1220, 1, 0, 0, 0, 299, 1224, 1, 0, 0, 0, 301, 1228, 1, 0, 0, 0, 303, 1232, 1, 0, 0, 0, 305, 1236, 1, 0, 0, 0, 307, 1240, 1, 0, 0, 0, 309, 1245, 1, 0, 0, 0, 311, 1249, 1, 0, 0, 0, 313, 1253, 1, 0, 0, 0, 315, 1257, 1, 0, 0, 0, 317, 1261, 1, 0, 0, 0, 319, 1265, 1, 0, 0, 0, 321, 1269, 1, 0, 0, 0, 323, 1273, 1, 0, 0, 0, 325, 1277, 1, 0, 0, 0, 327, 1282, 1, 0, 0, 0, 329, 1287, 1, 0, 0, 0, 331, 1291, 1, 0, 0, 0, 333, 1295, 1, 0, 0, 0, 335, 1299, 1, 0, 0, 0, 337, 1304, 1, 0, 0, 0, 339, 1311, 1, 0, 0, 0, 341, 1315, 1, 0, 0, 0, 343, 1319, 1, 0, 0, 0, 345, 1323, 1, 0, 0, 0, 347, 1327, 1, 0, 0, 0, 349, 1332, 1, 0, 0, 0, 351, 1336, 1, 0, 0, 0, 353, 1340, 1, 0, 0, 0, 355, 1344, 1, 0, 0, 0, 357, 1349, 1, 0, 0, 0, 359, 1353, 1, 0, 0, 0, 361, 1357, 1, 0, 0, 0, 363, 1361, 1, 0, 0, 0, 365, 1365, 1, 0, 0, 0, 367, 1369, 1, 0, 0, 0, 369, 1375, 1, 0, 0, 0, 371, 1379, 1, 0, 0, 0, 373, 1383, 1, 0, 0, 0, 375, 1387, 1, 0, 0, 0, 377, 1391, 1, 0, 0, 0, 379, 1395, 1, 0, 0, 0, 381, 1399, 1, 0, 0, 0, 383, 1404, 1, 0, 0, 0, 385, 1410, 1, 0, 0, 0, 387, 1416, 1, 0, 0, 0, 389, 1420, 1, 0, 0, 0, 391, 1424, 1, 0, 0, 0, 393, 1428, 1, 0, 0, 0, 395, 1434, 1, 0, 0, 0, 397, 1440, 1, 0, 0, 0, 399, 1444, 1, 0, 0, 0, 401, 1448, 1, 0, 0, 0, 403, 1452, 1, 0, 0, 0, 405, 1458, 1, 0, 0, 0, 407, 1464, 1, 0, 0, 0, 409, 1470, 1, 0, 0, 0, 411, 412, 7, 0, 0, 0, 412, 413, 7, 1, 0, 0, 413, 414, 7, 2, 0, 0, 414, 415, 7, 2, 0, 0, 415, 416, 7, 3, 0, 0, 416, 417, 7, 4, 0, 0, 417, 418, 7, 5, 0, 0, 418, 419, 1, 0, 0, 0, 419, 420, 6, 0, 0, 0, 420, 16, 1, 0, 0, 0, 421, 422, 7, 0, 0, 0, 422, 423, 7, 6, 0, 0, 423, 424, 7, 7, 0, 0, 424, 425, 7, 8, 0, 0, 425, 426, 1, 0, 0, 0, 426, 427, 6, 1, 1, 0, 427, 18, 1, 0, 0, 0, 428, 429, 7, 3, 0, 0, 429, 430, 7, 9, 0, 0, 430, 431, 7, 6, 0, 0, 431, 432, 7, 1, 0, 0, 432, 433, 7, 4, 0, 0, 433, 434, 7, 10, 0, 0, 434, 435, 1, 0, 0, 0, 435, 436, 6, 2, 2, 0, 436, 20, 1, 0, 0, 0, 437, 438, 7, 3, 0, 0, 438, 439, 7, 11, 0, 0, 439, 440, 7, 12, 0, 0, 440, 441, 7, 13, 0, 0, 441, 442, 1, 0, 0, 0, 442, 443, 6, 3, 0, 0, 443, 22, 1, 0, 0, 0, 444, 445, 7, 3, 0, 0, 445, 446, 7, 14, 0, 0, 446, 447, 7, 8, 0, 0, 447, 448, 7, 13, 0, 0, 448, 449, 7, 12, 0, 0, 449, 450, 7, 1, 0, 0, 450, 451, 7, 9, 0, 0, 451, 452, 1, 0, 0, 0, 452, 453, 6, 4, 3, 0, 453, 24, 1, 0, 0, 0, 454, 455, 7, 15, 0, 0, 455, 456, 7, 6, 0, 0, 456, 457, 7, 7, 0, 0, 457, 458, 7, 16, 0, 0, 458, 459, 1, 0, 0, 0, 459, 460, 6, 5, 4, 0, 460, 26, 1, 0, 0, 0, 461, 462, 7, 17, 0, 0, 462, 463, 7, 6, 0, 0, 463, 464, 7, 7, 0, 0, 464, 465, 7, 18, 0, 0, 465, 466, 1, 0, 0, 0, 466, 467, 6, 6, 0, 0, 467, 28, 1, 0, 0, 0, 468, 469, 7, 18, 0, 0, 469, 470, 7, 3, 0, 0, 470, 471, 7, 3, 0, 0, 471, 472, 7, 8, 0, 0, 472, 473, 1, 0, 0, 0, 473, 474, 6, 7, 1, 0, 474, 30, 1, 0, 0, 0, 475, 476, 7, 13, 0, 0, 476, 477, 7, 1, 0, 0, 477, 478, 7, 16, 0, 0, 478, 479, 7, 1, 0, 0, 479, 480, 7, 5, 0, 0, 480, 481, 1, 0, 0, 0, 481, 482, 6, 8, 0, 0, 482, 32, 1, 0, 0, 0, 483, 484, 7, 16, 0, 0, 484, 485, 7, 11, 0, 0, 485, 486, 5, 95, 0, 0, 486, 487, 7, 3, 0, 0, 487, 488, 7, 14, 0, 0, 488, 489, 7, 8, 0, 0, 489, 490, 7, 12, 0, 0, 490, 491, 7, 9, 0, 0, 491, 492, 7, 0, 0, 0, 492, 493, 1, 0, 0, 0, 493, 494, 6, 9, 5, 0, 494, 34, 1, 0, 0, 0, 495, 496, 7, 6, 0, 0, 496, 497, 7, 3, 0, 0, 497, 498, 7, 9, 0, 0, 498, 499, 7, 12, 0, 0, 499, 500, 7, 16, 0, 0, 500, 501, 7, 3, 0, 0, 501, 502, 1, 0, 0, 0, 502, 503, 6, 10, 6, 0, 503, 36, 1, 0, 0, 0, 504, 505, 7, 6, 0, 0, 505, 506, 7, 7, 0, 0, 506, 507, 7, 19, 0, 0, 507, 508, 1, 0, 0, 0, 508, 509, 6, 11, 0, 0, 509, 38, 1, 0, 0, 0, 510, 511, 7, 2, 0, 0, 511, 512, 7, 10, 0, 0, 512, 513, 7, 7, 0, 0, 513, 514, 7, 19, 0, 0, 514, 515, 1, 0, 0, 0, 515, 516, 6, 12, 7, 0, 516, 40, 1, 0, 0, 0, 517, 518, 7, 2, 0, 0, 518, 519, 7, 7, 0, 0, 519, 520, 7, 6, 0, 0, 520, 521, 7, 5, 0, 0, 521, 522, 1, 0, 0, 0, 522, 523, 6, 13, 0, 0, 523, 42, 1, 0, 0, 0, 524, 525, 7, 2, 0, 0, 525, 526, 7, 5, 0, 0, 526, 527, 7, 12, 0, 0, 527, 528, 7, 5, 0, 0, 528, 529, 7, 2, 0, 0, 529, 530, 1, 0, 0, 0, 530, 531, 6, 14, 0, 0, 531, 44, 1, 0, 0, 0, 532, 533, 7, 19, 0, 0, 533, 534, 7, 10, 0, 0, 534, 535, 7, 3, 0, 0, 535, 536, 7, 6, 0, 0, 536, 537, 7, 3, 0, 0, 537, 538, 1, 0, 0, 0, 538, 539, 6, 15, 0, 0, 539, 46, 1, 0, 0, 0, 540, 541, 4, 16, 0, 0, 541, 542, 7, 1, 0, 0, 542, 543, 7, 9, 0, 0, 543, 544, 7, 13, 0, 0, 544, 545, 7, 1, 0, 0, 545, 546, 7, 9, 0, 0, 546, 547, 7, 3, 0, 0, 547, 548, 7, 2, 0, 0, 548, 549, 7, 5, 0, 0, 549, 550, 7, 12, 0, 0, 550, 551, 7, 5, 0, 0, 551, 552, 7, 2, 0, 0, 552, 553, 1, 0, 0, 0, 553, 554, 6, 16, 0, 0, 554, 48, 1, 0, 0, 0, 555, 556, 4, 17, 1, 0, 556, 557, 7, 13, 0, 0, 557, 558, 7, 7, 0, 0, 558, 559, 7, 7, 0, 0, 559, 560, 7, 18, 0, 0, 560, 561, 7, 20, 0, 0, 561, 562, 7, 8, 0, 0, 562, 563, 1, 0, 0, 0, 563, 564, 6, 17, 8, 0, 564, 50, 1, 0, 0, 0, 565, 566, 4, 18, 2, 0, 566, 567, 7, 16, 0, 0, 567, 568, 7, 12, 0, 0, 568, 569, 7, 5, 0, 0, 569, 570, 7, 4, 0, 0, 570, 571, 7, 10, 0, 0, 571, 572, 1, 0, 0, 0, 572, 573, 6, 18, 0, 0, 573, 52, 1, 0, 0, 0, 574, 575, 4, 19, 3, 0, 575, 576, 7, 16, 0, 0, 576, 577, 7, 3, 0, 0, 577, 578, 7, 5, 0, 0, 578, 579, 7, 6, 0, 0, 579, 580, 7, 1, 0, 0, 580, 581, 7, 4, 0, 0, 581, 582, 7, 2, 0, 0, 582, 583, 1, 0, 0, 0, 583, 584, 6, 19, 9, 0, 584, 54, 1, 0, 0, 0, 585, 587, 8, 21, 0, 0, 586, 585, 1, 0, 0, 0, 587, 588, 1, 0, 0, 0, 588, 586, 1, 0, 0, 0, 588, 589, 1, 0, 0, 0, 589, 590, 1, 0, 0, 0, 590, 591, 6, 20, 0, 0, 591, 56, 1, 0, 0, 0, 592, 593, 5, 47, 0, 0, 593, 594, 5, 47, 0, 0, 594, 598, 1, 0, 0, 0, 595, 597, 8, 22, 0, 0, 596, 595, 1, 0, 0, 0, 597, 600, 1, 0, 0, 0, 598, 596, 1, 0, 0, 0, 598, 599, 1, 0, 0, 0, 599, 602, 1, 0, 0, 0, 600, 598, 1, 0, 0, 0, 601, 603, 5, 13, 0, 0, 602, 601, 1, 0, 0, 0, 602, 603, 1, 0, 0, 0, 603, 605, 1, 0, 0, 0, 604, 606, 5, 10, 0, 0, 605, 604, 1, 0, 0, 0, 605, 606, 1, 0, 0, 0, 606, 607, 1, 0, 0, 0, 607, 608, 6, 21, 10, 0, 608, 58, 1, 0, 0, 0, 609, 610, 5, 47, 0, 0, 610, 611, 5, 42, 0, 0, 611, 616, 1, 0, 0, 0, 612, 615, 3, 59, 22, 0, 613, 615, 9, 0, 0, 0, 614, 612, 1, 0, 0, 0, 614, 613, 1, 0, 0, 0, 615, 618, 1, 0, 0, 0, 616, 617, 1, 0, 0, 0, 616, 614, 1, 0, 0, 0, 617, 619, 1, 0, 0, 0, 618, 616, 1, 0, 0, 0, 619, 620, 5, 42, 0, 0, 620, 621, 5, 47, 0, 0, 621, 622, 1, 0, 0, 0, 622, 623, 6, 22, 10, 0, 623, 60, 1, 0, 0, 0, 624, 626, 7, 23, 0, 0, 625, 624, 1, 0, 0, 0, 626, 627, 1, 0, 0, 0, 627, 625, 1, 0, 0, 0, 627, 628, 1, 0, 0, 0, 628, 629, 1, 0, 0, 0, 629, 630, 6, 23, 10, 0, 630, 62, 1, 0, 0, 0, 631, 632, 5, 124, 0, 0, 632, 633, 1, 0, 0, 0, 633, 634, 6, 24, 11, 0, 634, 64, 1, 0, 0, 0, 635, 636, 7, 24, 0, 0, 636, 66, 1, 0, 0, 0, 637, 638, 7, 25, 0, 0, 638, 68, 1, 0, 0, 0, 639, 640, 5, 92, 0, 0, 640, 641, 7, 26, 0, 0, 641, 70, 1, 0, 0, 0, 642, 643, 8, 27, 0, 0, 643, 72, 1, 0, 0, 0, 644, 646, 7, 3, 0, 0, 645, 647, 7, 28, 0, 0, 646, 645, 1, 0, 0, 0, 646, 647, 1, 0, 0, 0, 647, 649, 1, 0, 0, 0, 648, 650, 3, 65, 25, 0, 649, 648, 1, 0, 0, 0, 650, 651, 1, 0, 0, 0, 651, 649, 1, 0, 0, 0, 651, 652, 1, 0, 0, 0, 652, 74, 1, 0, 0, 0, 653, 654, 5, 64, 0, 0, 654, 76, 1, 0, 0, 0, 655, 656, 5, 96, 0, 0, 656, 78, 1, 0, 0, 0, 657, 661, 8, 29, 0, 0, 658, 659, 5, 96, 0, 0, 659, 661, 5, 96, 0, 0, 660, 657, 1, 0, 0, 0, 660, 658, 1, 0, 0, 0, 661, 80, 1, 0, 0, 0, 662, 663, 5, 95, 0, 0, 663, 82, 1, 0, 0, 0, 664, 668, 3, 67, 26, 0, 665, 668, 3, 65, 25, 0, 666, 668, 3, 81, 33, 0, 667, 664, 1, 0, 0, 0, 667, 665, 1, 0, 0, 0, 667, 666, 1, 0, 0, 0, 668, 84, 1, 0, 0, 0, 669, 674, 5, 34, 0, 0, 670, 673, 3, 69, 27, 0, 671, 673, 3, 71, 28, 0, 672, 670, 1, 0, 0, 0, 672, 671, 1, 0, 0, 0, 673, 676, 1, 0, 0, 0, 674, 672, 1, 0, 0, 0, 674, 675, 1, 0, 0, 0, 675, 677, 1, 0, 0, 0, 676, 674, 1, 0, 0, 0, 677, 699, 5, 34, 0, 0, 678, 679, 5, 34, 0, 0, 679, 680, 5, 34, 0, 0, 680, 681, 5, 34, 0, 0, 681, 685, 1, 0, 0, 0, 682, 684, 8, 22, 0, 0, 683, 682, 1, 0, 0, 0, 684, 687, 1, 0, 0, 0, 685, 686, 1, 0, 0, 0, 685, 683, 1, 0, 0, 0, 686, 688, 1, 0, 0, 0, 687, 685, 1, 0, 0, 0, 688, 689, 5, 34, 0, 0, 689, 690, 5, 34, 0, 0, 690, 691, 5, 34, 0, 0, 691, 693, 1, 0, 0, 0, 692, 694, 5, 34, 0, 0, 693, 692, 1, 0, 0, 0, 693, 694, 1, 0, 0, 0, 694, 696, 1, 0, 0, 0, 695, 697, 5, 34, 0, 0, 696, 695, 1, 0, 0, 0, 696, 697, 1, 0, 0, 0, 697, 699, 1, 0, 0, 0, 698, 669, 1, 0, 0, 0, 698, 678, 1, 0, 0, 0, 699, 86, 1, 0, 0, 0, 700, 702, 3, 65, 25, 0, 701, 700, 1, 0, 0, 0, 702, 703, 1, 0, 0, 0, 703, 701, 1, 0, 0, 0, 703, 704, 1, 0, 0, 0, 704, 88, 1, 0, 0, 0, 705, 707, 3, 65, 25, 0, 706, 705, 1, 0, 0, 0, 707, 708, 1, 0, 0, 0, 708, 706, 1, 0, 0, 0, 708, 709, 1, 0, 0, 0, 709, 710, 1, 0, 0, 0, 710, 714, 3, 105, 45, 0, 711, 713, 3, 65, 25, 0, 712, 711, 1, 0, 0, 0, 713, 716, 1, 0, 0, 0, 714, 712, 1, 0, 0, 0, 714, 715, 1, 0, 0, 0, 715, 748, 1, 0, 0, 0, 716, 714, 1, 0, 0, 0, 717, 719, 3, 105, 45, 0, 718, 720, 3, 65, 25, 0, 719, 718, 1, 0, 0, 0, 720, 721, 1, 0, 0, 0, 721, 719, 1, 0, 0, 0, 721, 722, 1, 0, 0, 0, 722, 748, 1, 0, 0, 0, 723, 725, 3, 65, 25, 0, 724, 723, 1, 0, 0, 0, 725, 726, 1, 0, 0, 0, 726, 724, 1, 0, 0, 0, 726, 727, 1, 0, 0, 0, 727, 735, 1, 0, 0, 0, 728, 732, 3, 105, 45, 0, 729, 731, 3, 65, 25, 0, 730, 729, 1, 0, 0, 0, 731, 734, 1, 0, 0, 0, 732, 730, 1, 0, 0, 0, 732, 733, 1, 0, 0, 0, 733, 736, 1, 0, 0, 0, 734, 732, 1, 0, 0, 0, 735, 728, 1, 0, 0, 0, 735, 736, 1, 0, 0, 0, 736, 737, 1, 0, 0, 0, 737, 738, 3, 73, 29, 0, 738, 748, 1, 0, 0, 0, 739, 741, 3, 105, 45, 0, 740, 742, 3, 65, 25, 0, 741, 740, 1, 0, 0, 0, 742, 743, 1, 0, 0, 0, 743, 741, 1, 0, 0, 0, 743, 744, 1, 0, 0, 0, 744, 745, 1, 0, 0, 0, 745, 746, 3, 73, 29, 0, 746, 748, 1, 0, 0, 0, 747, 706, 1, 0, 0, 0, 747, 717, 1, 0, 0, 0, 747, 724, 1, 0, 0, 0, 747, 739, 1, 0, 0, 0, 748, 90, 1, 0, 0, 0, 749, 750, 7, 30, 0, 0, 750, 751, 7, 31, 0, 0, 751, 92, 1, 0, 0, 0, 752, 753, 7, 12, 0, 0, 753, 754, 7, 9, 0, 0, 754, 755, 7, 0, 0, 0, 755, 94, 1, 0, 0, 0, 756, 757, 7, 12, 0, 0, 757, 758, 7, 2, 0, 0, 758, 759, 7, 4, 0, 0, 759, 96, 1, 0, 0, 0, 760, 761, 5, 61, 0, 0, 761, 98, 1, 0, 0, 0, 762, 763, 5, 58, 0, 0, 763, 764, 5, 58, 0, 0, 764, 100, 1, 0, 0, 0, 765, 766, 5, 44, 0, 0, 766, 102, 1, 0, 0, 0, 767, 768, 7, 0, 0, 0, 768, 769, 7, 3, 0, 0, 769, 770, 7, 2, 0, 0, 770, 771, 7, 4, 0, 0, 771, 104, 1, 0, 0, 0, 772, 773, 5, 46, 0, 0, 773, 106, 1, 0, 0, 0, 774, 775, 7, 15, 0, 0, 775, 776, 7, 12, 0, 0, 776, 777, 7, 13, 0, 0, 777, 778, 7, 2, 0, 0, 778, 779, 7, 3, 0, 0, 779, 108, 1, 0, 0, 0, 780, 781, 7, 15, 0, 0, 781, 782, 7, 1, 0, 0, 782, 783, 7, 6, 0, 0, 783, 784, 7, 2, 0, 0, 784, 785, 7, 5, 0, 0, 785, 110, 1, 0, 0, 0, 786, 787, 7, 1, 0, 0, 787, 788, 7, 9, 0, 0, 788, 112, 1, 0, 0, 0, 789, 790, 7, 1, 0, 0, 790, 791, 7, 2, 0, 0, 791, 114, 1, 0, 0, 0, 792, 793, 7, 13, 0, 0, 793, 794, 7, 12, 0, 0, 794, 795, 7, 2, 0, 0, 795, 796, 7, 5, 0, 0, 796, 116, 1, 0, 0, 0, 797, 798, 7, 13, 0, 0, 798, 799, 7, 1, 0, 0, 799, 800, 7, 18, 0, 0, 800, 801, 7, 3, 0, 0, 801, 118, 1, 0, 0, 0, 802, 803, 5, 40, 0, 0, 803, 120, 1, 0, 0, 0, 804, 805, 7, 9, 0, 0, 805, 806, 7, 7, 0, 0, 806, 807, 7, 5, 0, 0, 807, 122, 1, 0, 0, 0, 808, 809, 7, 9, 0, 0, 809, 810, 7, 20, 0, 0, 810, 811, 7, 13, 0, 0, 811, 812, 7, 13, 0, 0, 812, 124, 1, 0, 0, 0, 813, 814, 7, 9, 0, 0, 814, 815, 7, 20, 0, 0, 815, 816, 7, 13, 0, 0, 816, 817, 7, 13, 0, 0, 817, 818, 7, 2, 0, 0, 818, 126, 1, 0, 0, 0, 819, 820, 7, 7, 0, 0, 820, 821, 7, 6, 0, 0, 821, 128, 1, 0, 0, 0, 822, 823, 5, 63, 0, 0, 823, 130, 1, 0, 0, 0, 824, 825, 7, 6, 0, 0, 825, 826, 7, 13, 0, 0, 826, 827, 7, 1, 0, 0, 827, 828, 7, 18, 0, 0, 828, 829, 7, 3, 0, 0, 829, 132, 1, 0, 0, 0, 830, 831, 5, 41, 0, 0, 831, 134, 1, 0, 0, 0, 832, 833, 7, 5, 0, 0, 833, 834, 7, 6, 0, 0, 834, 835, 7, 20, 0, 0, 835, 836, 7, 3, 0, 0, 836, 136, 1, 0, 0, 0, 837, 838, 5, 61, 0, 0, 838, 839, 5, 61, 0, 0, 839, 138, 1, 0, 0, 0, 840, 841, 5, 61, 0, 0, 841, 842, 5, 126, 0, 0, 842, 140, 1, 0, 0, 0, 843, 844, 5, 33, 0, 0, 844, 845, 5, 61, 0, 0, 845, 142, 1, 0, 0, 0, 846, 847, 5, 60, 0, 0, 847, 144, 1, 0, 0, 0, 848, 849, 5, 60, 0, 0, 849, 850, 5, 61, 0, 0, 850, 146, 1, 0, 0, 0, 851, 852, 5, 62, 0, 0, 852, 148, 1, 0, 0, 0, 853, 854, 5, 62, 0, 0, 854, 855, 5, 61, 0, 0, 855, 150, 1, 0, 0, 0, 856, 857, 5, 43, 0, 0, 857, 152, 1, 0, 0, 0, 858, 859, 5, 45, 0, 0, 859, 154, 1, 0, 0, 0, 860, 861, 5, 42, 0, 0, 861, 156, 1, 0, 0, 0, 862, 863, 5, 47, 0, 0, 863, 158, 1, 0, 0, 0, 864, 865, 5, 37, 0, 0, 865, 160, 1, 0, 0, 0, 866, 867, 4, 73, 4, 0, 867, 868, 3, 51, 18, 0, 868, 869, 1, 0, 0, 0, 869, 870, 6, 73, 12, 0, 870, 162, 1, 0, 0, 0, 871, 874, 3, 129, 57, 0, 872, 875, 3, 67, 26, 0, 873, 875, 3, 81, 33, 0, 874, 872, 1, 0, 0, 0, 874, 873, 1, 0, 0, 0, 875, 879, 1, 0, 0, 0, 876, 878, 3, 83, 34, 0, 877, 876, 1, 0, 0, 0, 878, 881, 1, 0, 0, 0, 879, 877, 1, 0, 0, 0, 879, 880, 1, 0, 0, 0, 880, 889, 1, 0, 0, 0, 881, 879, 1, 0, 0, 0, 882, 884, 3, 129, 57, 0, 883, 885, 3, 65, 25, 0, 884, 883, 1, 0, 0, 0, 885, 886, 1, 0, 0, 0, 886, 884, 1, 0, 0, 0, 886, 887, 1, 0, 0, 0, 887, 889, 1, 0, 0, 0, 888, 871, 1, 0, 0, 0, 888, 882, 1, 0, 0, 0, 889, 164, 1, 0, 0, 0, 890, 891, 5, 91, 0, 0, 891, 892, 1, 0, 0, 0, 892, 893, 6, 75, 0, 0, 893, 894, 6, 75, 0, 0, 894, 166, 1, 0, 0, 0, 895, 896, 5, 93, 0, 0, 896, 897, 1, 0, 0, 0, 897, 898, 6, 76, 11, 0, 898, 899, 6, 76, 11, 0, 899, 168, 1, 0, 0, 0, 900, 904, 3, 67, 26, 0, 901, 903, 3, 83, 34, 0, 902, 901, 1, 0, 0, 0, 903, 906, 1, 0, 0, 0, 904, 902, 1, 0, 0, 0, 904, 905, 1, 0, 0, 0, 905, 917, 1, 0, 0, 0, 906, 904, 1, 0, 0, 0, 907, 910, 3, 81, 33, 0, 908, 910, 3, 75, 30, 0, 909, 907, 1, 0, 0, 0, 909, 908, 1, 0, 0, 0, 910, 912, 1, 0, 0, 0, 911, 913, 3, 83, 34, 0, 912, 911, 1, 0, 0, 0, 913, 914, 1, 0, 0, 0, 914, 912, 1, 0, 0, 0, 914, 915, 1, 0, 0, 0, 915, 917, 1, 0, 0, 0, 916, 900, 1, 0, 0, 0, 916, 909, 1, 0, 0, 0, 917, 170, 1, 0, 0, 0, 918, 920, 3, 77, 31, 0, 919, 921, 3, 79, 32, 0, 920, 919, 1, 0, 0, 0, 921, 922, 1, 0, 0, 0, 922, 920, 1, 0, 0, 0, 922, 923, 1, 0, 0, 0, 923, 924, 1, 0, 0, 0, 924, 925, 3, 77, 31, 0, 925, 172, 1, 0, 0, 0, 926, 927, 3, 171, 78, 0, 927, 174, 1, 0, 0, 0, 928, 929, 3, 57, 21, 0, 929, 930, 1, 0, 0, 0, 930, 931, 6, 80, 10, 0, 931, 176, 1, 0, 0, 0, 932, 933, 3, 59, 22, 0, 933, 934, 1, 0, 0, 0, 934, 935, 6, 81, 10, 0, 935, 178, 1, 0, 0, 0, 936, 937, 3, 61, 23, 0, 937, 938, 1, 0, 0, 0, 938, 939, 6, 82, 10, 0, 939, 180, 1, 0, 0, 0, 940, 941, 3, 165, 75, 0, 941, 942, 1, 0, 0, 0, 942, 943, 6, 83, 13, 0, 943, 944, 6, 83, 14, 0, 944, 182, 1, 0, 0, 0, 945, 946, 3, 63, 24, 0, 946, 947, 1, 0, 0, 0, 947, 948, 6, 84, 15, 0, 948, 949, 6, 84, 11, 0, 949, 184, 1, 0, 0, 0, 950, 951, 3, 61, 23, 0, 951, 952, 1, 0, 0, 0, 952, 953, 6, 85, 10, 0, 953, 186, 1, 0, 0, 0, 954, 955, 3, 57, 21, 0, 955, 956, 1, 0, 0, 0, 956, 957, 6, 86, 10, 0, 957, 188, 1, 0, 0, 0, 958, 959, 3, 59, 22, 0, 959, 960, 1, 0, 0, 0, 960, 961, 6, 87, 10, 0, 961, 190, 1, 0, 0, 0, 962, 963, 3, 63, 24, 0, 963, 964, 1, 0, 0, 0, 964, 965, 6, 88, 15, 0, 965, 966, 6, 88, 11, 0, 966, 192, 1, 0, 0, 0, 967, 968, 3, 165, 75, 0, 968, 969, 1, 0, 0, 0, 969, 970, 6, 89, 13, 0, 970, 194, 1, 0, 0, 0, 971, 972, 3, 167, 76, 0, 972, 973, 1, 0, 0, 0, 973, 974, 6, 90, 16, 0, 974, 196, 1, 0, 0, 0, 975, 976, 3, 337, 161, 0, 976, 977, 1, 0, 0, 0, 977, 978, 6, 91, 17, 0, 978, 198, 1, 0, 0, 0, 979, 980, 3, 101, 43, 0, 980, 981, 1, 0, 0, 0, 981, 982, 6, 92, 18, 0, 982, 200, 1, 0, 0, 0, 983, 984, 3, 97, 41, 0, 984, 985, 1, 0, 0, 0, 985, 986, 6, 93, 19, 0, 986, 202, 1, 0, 0, 0, 987, 988, 7, 16, 0, 0, 988, 989, 7, 3, 0, 0, 989, 990, 7, 5, 0, 0, 990, 991, 7, 12, 0, 0, 991, 992, 7, 0, 0, 0, 992, 993, 7, 12, 0, 0, 993, 994, 7, 5, 0, 0, 994, 995, 7, 12, 0, 0, 995, 204, 1, 0, 0, 0, 996, 1000, 8, 32, 0, 0, 997, 998, 5, 47, 0, 0, 998, 1000, 8, 33, 0, 0, 999, 996, 1, 0, 0, 0, 999, 997, 1, 0, 0, 0, 1000, 206, 1, 0, 0, 0, 1001, 1003, 3, 205, 95, 0, 1002, 1001, 1, 0, 0, 0, 1003, 1004, 1, 0, 0, 0, 1004, 1002, 1, 0, 0, 0, 1004, 1005, 1, 0, 0, 0, 1005, 208, 1, 0, 0, 0, 1006, 1007, 3, 207, 96, 0, 1007, 1008, 1, 0, 0, 0, 1008, 1009, 6, 97, 20, 0, 1009, 210, 1, 0, 0, 0, 1010, 1011, 3, 85, 35, 0, 1011, 1012, 1, 0, 0, 0, 1012, 1013, 6, 98, 21, 0, 1013, 212, 1, 0, 0, 0, 1014, 1015, 3, 57, 21, 0, 1015, 1016, 1, 0, 0, 0, 1016, 1017, 6, 99, 10, 0, 1017, 214, 1, 0, 0, 0, 1018, 1019, 3, 59, 22, 0, 1019, 1020, 1, 0, 0, 0, 1020, 1021, 6, 100, 10, 0, 1021, 216, 1, 0, 0, 0, 1022, 1023, 3, 61, 23, 0, 1023, 1024, 1, 0, 0, 0, 1024, 1025, 6, 101, 10, 0, 1025, 218, 1, 0, 0, 0, 1026, 1027, 3, 63, 24, 0, 1027, 1028, 1, 0, 0, 0, 1028, 1029, 6, 102, 15, 0, 1029, 1030, 6, 102, 11, 0, 1030, 220, 1, 0, 0, 0, 1031, 1032, 3, 105, 45, 0, 1032, 1033, 1, 0, 0, 0, 1033, 1034, 6, 103, 22, 0, 1034, 222, 1, 0, 0, 0, 1035, 1036, 3, 101, 43, 0, 1036, 1037, 1, 0, 0, 0, 1037, 1038, 6, 104, 18, 0, 1038, 224, 1, 0, 0, 0, 1039, 1040, 3, 129, 57, 0, 1040, 1041, 1, 0, 0, 0, 1041, 1042, 6, 105, 23, 0, 1042, 226, 1, 0, 0, 0, 1043, 1044, 3, 163, 74, 0, 1044, 1045, 1, 0, 0, 0, 1045, 1046, 6, 106, 24, 0, 1046, 228, 1, 0, 0, 0, 1047, 1052, 3, 67, 26, 0, 1048, 1052, 3, 65, 25, 0, 1049, 1052, 3, 81, 33, 0, 1050, 1052, 3, 155, 70, 0, 1051, 1047, 1, 0, 0, 0, 1051, 1048, 1, 0, 0, 0, 1051, 1049, 1, 0, 0, 0, 1051, 1050, 1, 0, 0, 0, 1052, 230, 1, 0, 0, 0, 1053, 1056, 3, 67, 26, 0, 1054, 1056, 3, 155, 70, 0, 1055, 1053, 1, 0, 0, 0, 1055, 1054, 1, 0, 0, 0, 1056, 1060, 1, 0, 0, 0, 1057, 1059, 3, 229, 107, 0, 1058, 1057, 1, 0, 0, 0, 1059, 1062, 1, 0, 0, 0, 1060, 1058, 1, 0, 0, 0, 1060, 1061, 1, 0, 0, 0, 1061, 1073, 1, 0, 0, 0, 1062, 1060, 1, 0, 0, 0, 1063, 1066, 3, 81, 33, 0, 1064, 1066, 3, 75, 30, 0, 1065, 1063, 1, 0, 0, 0, 1065, 1064, 1, 0, 0, 0, 1066, 1068, 1, 0, 0, 0, 1067, 1069, 3, 229, 107, 0, 1068, 1067, 1, 0, 0, 0, 1069, 1070, 1, 0, 0, 0, 1070, 1068, 1, 0, 0, 0, 1070, 1071, 1, 0, 0, 0, 1071, 1073, 1, 0, 0, 0, 1072, 1055, 1, 0, 0, 0, 1072, 1065, 1, 0, 0, 0, 1073, 232, 1, 0, 0, 0, 1074, 1077, 3, 231, 108, 0, 1075, 1077, 3, 171, 78, 0, 1076, 1074, 1, 0, 0, 0, 1076, 1075, 1, 0, 0, 0, 1077, 1078, 1, 0, 0, 0, 1078, 1076, 1, 0, 0, 0, 1078, 1079, 1, 0, 0, 0, 1079, 234, 1, 0, 0, 0, 1080, 1081, 3, 57, 21, 0, 1081, 1082, 1, 0, 0, 0, 1082, 1083, 6, 110, 10, 0, 1083, 236, 1, 0, 0, 0, 1084, 1085, 3, 59, 22, 0, 1085, 1086, 1, 0, 0, 0, 1086, 1087, 6, 111, 10, 0, 1087, 238, 1, 0, 0, 0, 1088, 1089, 3, 61, 23, 0, 1089, 1090, 1, 0, 0, 0, 1090, 1091, 6, 112, 10, 0, 1091, 240, 1, 0, 0, 0, 1092, 1093, 3, 63, 24, 0, 1093, 1094, 1, 0, 0, 0, 1094, 1095, 6, 113, 15, 0, 1095, 1096, 6, 113, 11, 0, 1096, 242, 1, 0, 0, 0, 1097, 1098, 3, 97, 41, 0, 1098, 1099, 1, 0, 0, 0, 1099, 1100, 6, 114, 19, 0, 1100, 244, 1, 0, 0, 0, 1101, 1102, 3, 101, 43, 0, 1102, 1103, 1, 0, 0, 0, 1103, 1104, 6, 115, 18, 0, 1104, 246, 1, 0, 0, 0, 1105, 1106, 3, 105, 45, 0, 1106, 1107, 1, 0, 0, 0, 1107, 1108, 6, 116, 22, 0, 1108, 248, 1, 0, 0, 0, 1109, 1110, 3, 129, 57, 0, 1110, 1111, 1, 0, 0, 0, 1111, 1112, 6, 117, 23, 0, 1112, 250, 1, 0, 0, 0, 1113, 1114, 3, 163, 74, 0, 1114, 1115, 1, 0, 0, 0, 1115, 1116, 6, 118, 24, 0, 1116, 252, 1, 0, 0, 0, 1117, 1118, 7, 12, 0, 0, 1118, 1119, 7, 2, 0, 0, 1119, 254, 1, 0, 0, 0, 1120, 1121, 3, 233, 109, 0, 1121, 1122, 1, 0, 0, 0, 1122, 1123, 6, 120, 25, 0, 1123, 256, 1, 0, 0, 0, 1124, 1125, 3, 57, 21, 0, 1125, 1126, 1, 0, 0, 0, 1126, 1127, 6, 121, 10, 0, 1127, 258, 1, 0, 0, 0, 1128, 1129, 3, 59, 22, 0, 1129, 1130, 1, 0, 0, 0, 1130, 1131, 6, 122, 10, 0, 1131, 260, 1, 0, 0, 0, 1132, 1133, 3, 61, 23, 0, 1133, 1134, 1, 0, 0, 0, 1134, 1135, 6, 123, 10, 0, 1135, 262, 1, 0, 0, 0, 1136, 1137, 3, 63, 24, 0, 1137, 1138, 1, 0, 0, 0, 1138, 1139, 6, 124, 15, 0, 1139, 1140, 6, 124, 11, 0, 1140, 264, 1, 0, 0, 0, 1141, 1142, 3, 165, 75, 0, 1142, 1143, 1, 0, 0, 0, 1143, 1144, 6, 125, 13, 0, 1144, 1145, 6, 125, 26, 0, 1145, 266, 1, 0, 0, 0, 1146, 1147, 7, 7, 0, 0, 1147, 1148, 7, 9, 0, 0, 1148, 1149, 1, 0, 0, 0, 1149, 1150, 6, 126, 27, 0, 1150, 268, 1, 0, 0, 0, 1151, 1152, 7, 19, 0, 0, 1152, 1153, 7, 1, 0, 0, 1153, 1154, 7, 5, 0, 0, 1154, 1155, 7, 10, 0, 0, 1155, 1156, 1, 0, 0, 0, 1156, 1157, 6, 127, 27, 0, 1157, 270, 1, 0, 0, 0, 1158, 1159, 8, 34, 0, 0, 1159, 272, 1, 0, 0, 0, 1160, 1162, 3, 271, 128, 0, 1161, 1160, 1, 0, 0, 0, 1162, 1163, 1, 0, 0, 0, 1163, 1161, 1, 0, 0, 0, 1163, 1164, 1, 0, 0, 0, 1164, 1165, 1, 0, 0, 0, 1165, 1166, 3, 337, 161, 0, 1166, 1168, 1, 0, 0, 0, 1167, 1161, 1, 0, 0, 0, 1167, 1168, 1, 0, 0, 0, 1168, 1170, 1, 0, 0, 0, 1169, 1171, 3, 271, 128, 0, 1170, 1169, 1, 0, 0, 0, 1171, 1172, 1, 0, 0, 0, 1172, 1170, 1, 0, 0, 0, 1172, 1173, 1, 0, 0, 0, 1173, 274, 1, 0, 0, 0, 1174, 1175, 3, 273, 129, 0, 1175, 1176, 1, 0, 0, 0, 1176, 1177, 6, 130, 28, 0, 1177, 276, 1, 0, 0, 0, 1178, 1179, 3, 57, 21, 0, 1179, 1180, 1, 0, 0, 0, 1180, 1181, 6, 131, 10, 0, 1181, 278, 1, 0, 0, 0, 1182, 1183, 3, 59, 22, 0, 1183, 1184, 1, 0, 0, 0, 1184, 1185, 6, 132, 10, 0, 1185, 280, 1, 0, 0, 0, 1186, 1187, 3, 61, 23, 0, 1187, 1188, 1, 0, 0, 0, 1188, 1189, 6, 133, 10, 0, 1189, 282, 1, 0, 0, 0, 1190, 1191, 3, 63, 24, 0, 1191, 1192, 1, 0, 0, 0, 1192, 1193, 6, 134, 15, 0, 1193, 1194, 6, 134, 11, 0, 1194, 1195, 6, 134, 11, 0, 1195, 284, 1, 0, 0, 0, 1196, 1197, 3, 97, 41, 0, 1197, 1198, 1, 0, 0, 0, 1198, 1199, 6, 135, 19, 0, 1199, 286, 1, 0, 0, 0, 1200, 1201, 3, 101, 43, 0, 1201, 1202, 1, 0, 0, 0, 1202, 1203, 6, 136, 18, 0, 1203, 288, 1, 0, 0, 0, 1204, 1205, 3, 105, 45, 0, 1205, 1206, 1, 0, 0, 0, 1206, 1207, 6, 137, 22, 0, 1207, 290, 1, 0, 0, 0, 1208, 1209, 3, 269, 127, 0, 1209, 1210, 1, 0, 0, 0, 1210, 1211, 6, 138, 29, 0, 1211, 292, 1, 0, 0, 0, 1212, 1213, 3, 233, 109, 0, 1213, 1214, 1, 0, 0, 0, 1214, 1215, 6, 139, 25, 0, 1215, 294, 1, 0, 0, 0, 1216, 1217, 3, 173, 79, 0, 1217, 1218, 1, 0, 0, 0, 1218, 1219, 6, 140, 30, 0, 1219, 296, 1, 0, 0, 0, 1220, 1221, 3, 129, 57, 0, 1221, 1222, 1, 0, 0, 0, 1222, 1223, 6, 141, 23, 0, 1223, 298, 1, 0, 0, 0, 1224, 1225, 3, 163, 74, 0, 1225, 1226, 1, 0, 0, 0, 1226, 1227, 6, 142, 24, 0, 1227, 300, 1, 0, 0, 0, 1228, 1229, 3, 57, 21, 0, 1229, 1230, 1, 0, 0, 0, 1230, 1231, 6, 143, 10, 0, 1231, 302, 1, 0, 0, 0, 1232, 1233, 3, 59, 22, 0, 1233, 1234, 1, 0, 0, 0, 1234, 1235, 6, 144, 10, 0, 1235, 304, 1, 0, 0, 0, 1236, 1237, 3, 61, 23, 0, 1237, 1238, 1, 0, 0, 0, 1238, 1239, 6, 145, 10, 0, 1239, 306, 1, 0, 0, 0, 1240, 1241, 3, 63, 24, 0, 1241, 1242, 1, 0, 0, 0, 1242, 1243, 6, 146, 15, 0, 1243, 1244, 6, 146, 11, 0, 1244, 308, 1, 0, 0, 0, 1245, 1246, 3, 105, 45, 0, 1246, 1247, 1, 0, 0, 0, 1247, 1248, 6, 147, 22, 0, 1248, 310, 1, 0, 0, 0, 1249, 1250, 3, 129, 57, 0, 1250, 1251, 1, 0, 0, 0, 1251, 1252, 6, 148, 23, 0, 1252, 312, 1, 0, 0, 0, 1253, 1254, 3, 163, 74, 0, 1254, 1255, 1, 0, 0, 0, 1255, 1256, 6, 149, 24, 0, 1256, 314, 1, 0, 0, 0, 1257, 1258, 3, 173, 79, 0, 1258, 1259, 1, 0, 0, 0, 1259, 1260, 6, 150, 30, 0, 1260, 316, 1, 0, 0, 0, 1261, 1262, 3, 169, 77, 0, 1262, 1263, 1, 0, 0, 0, 1263, 1264, 6, 151, 31, 0, 1264, 318, 1, 0, 0, 0, 1265, 1266, 3, 57, 21, 0, 1266, 1267, 1, 0, 0, 0, 1267, 1268, 6, 152, 10, 0, 1268, 320, 1, 0, 0, 0, 1269, 1270, 3, 59, 22, 0, 1270, 1271, 1, 0, 0, 0, 1271, 1272, 6, 153, 10, 0, 1272, 322, 1, 0, 0, 0, 1273, 1274, 3, 61, 23, 0, 1274, 1275, 1, 0, 0, 0, 1275, 1276, 6, 154, 10, 0, 1276, 324, 1, 0, 0, 0, 1277, 1278, 3, 63, 24, 0, 1278, 1279, 1, 0, 0, 0, 1279, 1280, 6, 155, 15, 0, 1280, 1281, 6, 155, 11, 0, 1281, 326, 1, 0, 0, 0, 1282, 1283, 7, 1, 0, 0, 1283, 1284, 7, 9, 0, 0, 1284, 1285, 7, 15, 0, 0, 1285, 1286, 7, 7, 0, 0, 1286, 328, 1, 0, 0, 0, 1287, 1288, 3, 57, 21, 0, 1288, 1289, 1, 0, 0, 0, 1289, 1290, 6, 157, 10, 0, 1290, 330, 1, 0, 0, 0, 1291, 1292, 3, 59, 22, 0, 1292, 1293, 1, 0, 0, 0, 1293, 1294, 6, 158, 10, 0, 1294, 332, 1, 0, 0, 0, 1295, 1296, 3, 61, 23, 0, 1296, 1297, 1, 0, 0, 0, 1297, 1298, 6, 159, 10, 0, 1298, 334, 1, 0, 0, 0, 1299, 1300, 3, 167, 76, 0, 1300, 1301, 1, 0, 0, 0, 1301, 1302, 6, 160, 16, 0, 1302, 1303, 6, 160, 11, 0, 1303, 336, 1, 0, 0, 0, 1304, 1305, 5, 58, 0, 0, 1305, 338, 1, 0, 0, 0, 1306, 1312, 3, 75, 30, 0, 1307, 1312, 3, 65, 25, 0, 1308, 1312, 3, 105, 45, 0, 1309, 1312, 3, 67, 26, 0, 1310, 1312, 3, 81, 33, 0, 1311, 1306, 1, 0, 0, 0, 1311, 1307, 1, 0, 0, 0, 1311, 1308, 1, 0, 0, 0, 1311, 1309, 1, 0, 0, 0, 1311, 1310, 1, 0, 0, 0, 1312, 1313, 1, 0, 0, 0, 1313, 1311, 1, 0, 0, 0, 1313, 1314, 1, 0, 0, 0, 1314, 340, 1, 0, 0, 0, 1315, 1316, 3, 57, 21, 0, 1316, 1317, 1, 0, 0, 0, 1317, 1318, 6, 163, 10, 0, 1318, 342, 1, 0, 0, 0, 1319, 1320, 3, 59, 22, 0, 1320, 1321, 1, 0, 0, 0, 1321, 1322, 6, 164, 10, 0, 1322, 344, 1, 0, 0, 0, 1323, 1324, 3, 61, 23, 0, 1324, 1325, 1, 0, 0, 0, 1325, 1326, 6, 165, 10, 0, 1326, 346, 1, 0, 0, 0, 1327, 1328, 3, 63, 24, 0, 1328, 1329, 1, 0, 0, 0, 1329, 1330, 6, 166, 15, 0, 1330, 1331, 6, 166, 11, 0, 1331, 348, 1, 0, 0, 0, 1332, 1333, 3, 337, 161, 0, 1333, 1334, 1, 0, 0, 0, 1334, 1335, 6, 167, 17, 0, 1335, 350, 1, 0, 0, 0, 1336, 1337, 3, 101, 43, 0, 1337, 1338, 1, 0, 0, 0, 1338, 1339, 6, 168, 18, 0, 1339, 352, 1, 0, 0, 0, 1340, 1341, 3, 105, 45, 0, 1341, 1342, 1, 0, 0, 0, 1342, 1343, 6, 169, 22, 0, 1343, 354, 1, 0, 0, 0, 1344, 1345, 3, 267, 126, 0, 1345, 1346, 1, 0, 0, 0, 1346, 1347, 6, 170, 32, 0, 1347, 1348, 6, 170, 33, 0, 1348, 356, 1, 0, 0, 0, 1349, 1350, 3, 207, 96, 0, 1350, 1351, 1, 0, 0, 0, 1351, 1352, 6, 171, 20, 0, 1352, 358, 1, 0, 0, 0, 1353, 1354, 3, 85, 35, 0, 1354, 1355, 1, 0, 0, 0, 1355, 1356, 6, 172, 21, 0, 1356, 360, 1, 0, 0, 0, 1357, 1358, 3, 57, 21, 0, 1358, 1359, 1, 0, 0, 0, 1359, 1360, 6, 173, 10, 0, 1360, 362, 1, 0, 0, 0, 1361, 1362, 3, 59, 22, 0, 1362, 1363, 1, 0, 0, 0, 1363, 1364, 6, 174, 10, 0, 1364, 364, 1, 0, 0, 0, 1365, 1366, 3, 61, 23, 0, 1366, 1367, 1, 0, 0, 0, 1367, 1368, 6, 175, 10, 0, 1368, 366, 1, 0, 0, 0, 1369, 1370, 3, 63, 24, 0, 1370, 1371, 1, 0, 0, 0, 1371, 1372, 6, 176, 15, 0, 1372, 1373, 6, 176, 11, 0, 1373, 1374, 6, 176, 11, 0, 1374, 368, 1, 0, 0, 0, 1375, 1376, 3, 101, 43, 0, 1376, 1377, 1, 0, 0, 0, 1377, 1378, 6, 177, 18, 0, 1378, 370, 1, 0, 0, 0, 1379, 1380, 3, 105, 45, 0, 1380, 1381, 1, 0, 0, 0, 1381, 1382, 6, 178, 22, 0, 1382, 372, 1, 0, 0, 0, 1383, 1384, 3, 233, 109, 0, 1384, 1385, 1, 0, 0, 0, 1385, 1386, 6, 179, 25, 0, 1386, 374, 1, 0, 0, 0, 1387, 1388, 3, 57, 21, 0, 1388, 1389, 1, 0, 0, 0, 1389, 1390, 6, 180, 10, 0, 1390, 376, 1, 0, 0, 0, 1391, 1392, 3, 59, 22, 0, 1392, 1393, 1, 0, 0, 0, 1393, 1394, 6, 181, 10, 0, 1394, 378, 1, 0, 0, 0, 1395, 1396, 3, 61, 23, 0, 1396, 1397, 1, 0, 0, 0, 1397, 1398, 6, 182, 10, 0, 1398, 380, 1, 0, 0, 0, 1399, 1400, 3, 63, 24, 0, 1400, 1401, 1, 0, 0, 0, 1401, 1402, 6, 183, 15, 0, 1402, 1403, 6, 183, 11, 0, 1403, 382, 1, 0, 0, 0, 1404, 1405, 3, 207, 96, 0, 1405, 1406, 1, 0, 0, 0, 1406, 1407, 6, 184, 20, 0, 1407, 1408, 6, 184, 11, 0, 1408, 1409, 6, 184, 34, 0, 1409, 384, 1, 0, 0, 0, 1410, 1411, 3, 85, 35, 0, 1411, 1412, 1, 0, 0, 0, 1412, 1413, 6, 185, 21, 0, 1413, 1414, 6, 185, 11, 0, 1414, 1415, 6, 185, 34, 0, 1415, 386, 1, 0, 0, 0, 1416, 1417, 3, 57, 21, 0, 1417, 1418, 1, 0, 0, 0, 1418, 1419, 6, 186, 10, 0, 1419, 388, 1, 0, 0, 0, 1420, 1421, 3, 59, 22, 0, 1421, 1422, 1, 0, 0, 0, 1422, 1423, 6, 187, 10, 0, 1423, 390, 1, 0, 0, 0, 1424, 1425, 3, 61, 23, 0, 1425, 1426, 1, 0, 0, 0, 1426, 1427, 6, 188, 10, 0, 1427, 392, 1, 0, 0, 0, 1428, 1429, 3, 337, 161, 0, 1429, 1430, 1, 0, 0, 0, 1430, 1431, 6, 189, 17, 0, 1431, 1432, 6, 189, 11, 0, 1432, 1433, 6, 189, 9, 0, 1433, 394, 1, 0, 0, 0, 1434, 1435, 3, 101, 43, 0, 1435, 1436, 1, 0, 0, 0, 1436, 1437, 6, 190, 18, 0, 1437, 1438, 6, 190, 11, 0, 1438, 1439, 6, 190, 9, 0, 1439, 396, 1, 0, 0, 0, 1440, 1441, 3, 57, 21, 0, 1441, 1442, 1, 0, 0, 0, 1442, 1443, 6, 191, 10, 0, 1443, 398, 1, 0, 0, 0, 1444, 1445, 3, 59, 22, 0, 1445, 1446, 1, 0, 0, 0, 1446, 1447, 6, 192, 10, 0, 1447, 400, 1, 0, 0, 0, 1448, 1449, 3, 61, 23, 0, 1449, 1450, 1, 0, 0, 0, 1450, 1451, 6, 193, 10, 0, 1451, 402, 1, 0, 0, 0, 1452, 1453, 3, 173, 79, 0, 1453, 1454, 1, 0, 0, 0, 1454, 1455, 6, 194, 11, 0, 1455, 1456, 6, 194, 0, 0, 1456, 1457, 6, 194, 30, 0, 1457, 404, 1, 0, 0, 0, 1458, 1459, 3, 169, 77, 0, 1459, 1460, 1, 0, 0, 0, 1460, 1461, 6, 195, 11, 0, 1461, 1462, 6, 195, 0, 0, 1462, 1463, 6, 195, 31, 0, 1463, 406, 1, 0, 0, 0, 1464, 1465, 3, 91, 38, 0, 1465, 1466, 1, 0, 0, 0, 1466, 1467, 6, 196, 11, 0, 1467, 1468, 6, 196, 0, 0, 1468, 1469, 6, 196, 35, 0, 1469, 408, 1, 0, 0, 0, 1470, 1471, 3, 63, 24, 0, 1471, 1472, 1, 0, 0, 0, 1472, 1473, 6, 197, 15, 0, 1473, 1474, 6, 197, 11, 0, 1474, 410, 1, 0, 0, 0, 65, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 588, 598, 602, 605, 614, 616, 627, 646, 651, 660, 667, 672, 674, 685, 693, 696, 698, 703, 708, 714, 721, 726, 732, 735, 743, 747, 874, 879, 886, 888, 904, 909, 914, 916, 922, 999, 1004, 1051, 1055, 1060, 1065, 1070, 1072, 1076, 1078, 1163, 1167, 1172, 1311, 1313, 36, 5, 1, 0, 5, 4, 0, 5, 6, 0, 5, 2, 0, 5, 3, 0, 5, 8, 0, 5, 5, 0, 5, 9, 0, 5, 11, 0, 5, 13, 0, 0, 1, 0, 4, 0, 0, 7, 19, 0, 7, 65, 0, 5, 0, 0, 7, 25, 0, 7, 66, 0, 7, 104, 0, 7, 34, 0, 7, 32, 0, 7, 76, 0, 7, 26, 0, 7, 36, 0, 7, 48, 0, 7, 64, 0, 7, 80, 0, 5, 10, 0, 5, 7, 0, 7, 90, 0, 7, 89, 0, 7, 68, 0, 7, 67, 0, 7, 88, 0, 5, 12, 0, 5, 14, 0, 7, 29, 0] \ No newline at end of file +[4, 0, 120, 1466, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 2, 154, 7, 154, 2, 155, 7, 155, 2, 156, 7, 156, 2, 157, 7, 157, 2, 158, 7, 158, 2, 159, 7, 159, 2, 160, 7, 160, 2, 161, 7, 161, 2, 162, 7, 162, 2, 163, 7, 163, 2, 164, 7, 164, 2, 165, 7, 165, 2, 166, 7, 166, 2, 167, 7, 167, 2, 168, 7, 168, 2, 169, 7, 169, 2, 170, 7, 170, 2, 171, 7, 171, 2, 172, 7, 172, 2, 173, 7, 173, 2, 174, 7, 174, 2, 175, 7, 175, 2, 176, 7, 176, 2, 177, 7, 177, 2, 178, 7, 178, 2, 179, 7, 179, 2, 180, 7, 180, 2, 181, 7, 181, 2, 182, 7, 182, 2, 183, 7, 183, 2, 184, 7, 184, 2, 185, 7, 185, 2, 186, 7, 186, 2, 187, 7, 187, 2, 188, 7, 188, 2, 189, 7, 189, 2, 190, 7, 190, 2, 191, 7, 191, 2, 192, 7, 192, 2, 193, 7, 193, 2, 194, 7, 194, 2, 195, 7, 195, 2, 196, 7, 196, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 4, 19, 576, 8, 19, 11, 19, 12, 19, 577, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 5, 20, 586, 8, 20, 10, 20, 12, 20, 589, 9, 20, 1, 20, 3, 20, 592, 8, 20, 1, 20, 3, 20, 595, 8, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 5, 21, 604, 8, 21, 10, 21, 12, 21, 607, 9, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 4, 22, 615, 8, 22, 11, 22, 12, 22, 616, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 28, 1, 28, 3, 28, 636, 8, 28, 1, 28, 4, 28, 639, 8, 28, 11, 28, 12, 28, 640, 1, 29, 1, 29, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 3, 31, 650, 8, 31, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 3, 33, 657, 8, 33, 1, 34, 1, 34, 1, 34, 5, 34, 662, 8, 34, 10, 34, 12, 34, 665, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 673, 8, 34, 10, 34, 12, 34, 676, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 3, 34, 683, 8, 34, 1, 34, 3, 34, 686, 8, 34, 3, 34, 688, 8, 34, 1, 35, 4, 35, 691, 8, 35, 11, 35, 12, 35, 692, 1, 36, 4, 36, 696, 8, 36, 11, 36, 12, 36, 697, 1, 36, 1, 36, 5, 36, 702, 8, 36, 10, 36, 12, 36, 705, 9, 36, 1, 36, 1, 36, 4, 36, 709, 8, 36, 11, 36, 12, 36, 710, 1, 36, 4, 36, 714, 8, 36, 11, 36, 12, 36, 715, 1, 36, 1, 36, 5, 36, 720, 8, 36, 10, 36, 12, 36, 723, 9, 36, 3, 36, 725, 8, 36, 1, 36, 1, 36, 1, 36, 1, 36, 4, 36, 731, 8, 36, 11, 36, 12, 36, 732, 1, 36, 1, 36, 3, 36, 737, 8, 36, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 1, 39, 1, 39, 1, 39, 1, 39, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 1, 64, 1, 64, 1, 64, 1, 65, 1, 65, 1, 66, 1, 66, 1, 66, 1, 67, 1, 67, 1, 68, 1, 68, 1, 69, 1, 69, 1, 70, 1, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 72, 1, 72, 1, 72, 1, 72, 1, 72, 1, 73, 1, 73, 1, 73, 3, 73, 866, 8, 73, 1, 73, 5, 73, 869, 8, 73, 10, 73, 12, 73, 872, 9, 73, 1, 73, 1, 73, 4, 73, 876, 8, 73, 11, 73, 12, 73, 877, 3, 73, 880, 8, 73, 1, 74, 1, 74, 1, 74, 1, 74, 1, 74, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 76, 1, 76, 5, 76, 894, 8, 76, 10, 76, 12, 76, 897, 9, 76, 1, 76, 1, 76, 3, 76, 901, 8, 76, 1, 76, 4, 76, 904, 8, 76, 11, 76, 12, 76, 905, 3, 76, 908, 8, 76, 1, 77, 1, 77, 4, 77, 912, 8, 77, 11, 77, 12, 77, 913, 1, 77, 1, 77, 1, 78, 1, 78, 1, 79, 1, 79, 1, 79, 1, 79, 1, 80, 1, 80, 1, 80, 1, 80, 1, 81, 1, 81, 1, 81, 1, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 1, 83, 1, 83, 1, 83, 1, 84, 1, 84, 1, 84, 1, 84, 1, 85, 1, 85, 1, 85, 1, 85, 1, 86, 1, 86, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 1, 88, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 1, 92, 1, 93, 1, 93, 1, 93, 1, 93, 1, 93, 1, 93, 1, 93, 1, 93, 1, 93, 1, 94, 1, 94, 1, 94, 3, 94, 991, 8, 94, 1, 95, 4, 95, 994, 8, 95, 11, 95, 12, 95, 995, 1, 96, 1, 96, 1, 96, 1, 96, 1, 97, 1, 97, 1, 97, 1, 97, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 1, 100, 1, 101, 1, 101, 1, 101, 1, 101, 1, 101, 1, 102, 1, 102, 1, 102, 1, 102, 1, 103, 1, 103, 1, 103, 1, 103, 1, 104, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 106, 3, 106, 1043, 8, 106, 1, 107, 1, 107, 3, 107, 1047, 8, 107, 1, 107, 5, 107, 1050, 8, 107, 10, 107, 12, 107, 1053, 9, 107, 1, 107, 1, 107, 3, 107, 1057, 8, 107, 1, 107, 4, 107, 1060, 8, 107, 11, 107, 12, 107, 1061, 3, 107, 1064, 8, 107, 1, 108, 1, 108, 4, 108, 1068, 8, 108, 11, 108, 12, 108, 1069, 1, 109, 1, 109, 1, 109, 1, 109, 1, 110, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 1, 114, 1, 114, 1, 114, 1, 114, 1, 115, 1, 115, 1, 115, 1, 115, 1, 116, 1, 116, 1, 116, 1, 116, 1, 117, 1, 117, 1, 117, 1, 117, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 120, 1, 120, 1, 121, 1, 121, 1, 121, 1, 121, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 126, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 128, 4, 128, 1153, 8, 128, 11, 128, 12, 128, 1154, 1, 128, 1, 128, 3, 128, 1159, 8, 128, 1, 128, 4, 128, 1162, 8, 128, 11, 128, 12, 128, 1163, 1, 129, 1, 129, 1, 129, 1, 129, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 133, 1, 133, 1, 133, 1, 133, 1, 134, 1, 134, 1, 134, 1, 134, 1, 135, 1, 135, 1, 135, 1, 135, 1, 136, 1, 136, 1, 136, 1, 136, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 152, 1, 152, 1, 153, 1, 153, 1, 153, 1, 153, 1, 154, 1, 154, 1, 154, 1, 154, 1, 154, 1, 155, 1, 155, 1, 155, 1, 155, 1, 155, 1, 156, 1, 156, 1, 156, 1, 156, 1, 157, 1, 157, 1, 157, 1, 157, 1, 158, 1, 158, 1, 158, 1, 158, 1, 159, 1, 159, 1, 159, 1, 159, 1, 159, 1, 160, 1, 160, 1, 161, 1, 161, 1, 161, 1, 161, 1, 161, 4, 161, 1303, 8, 161, 11, 161, 12, 161, 1304, 1, 162, 1, 162, 1, 162, 1, 162, 1, 163, 1, 163, 1, 163, 1, 163, 1, 164, 1, 164, 1, 164, 1, 164, 1, 165, 1, 165, 1, 165, 1, 165, 1, 165, 1, 166, 1, 166, 1, 166, 1, 166, 1, 167, 1, 167, 1, 167, 1, 167, 1, 168, 1, 168, 1, 168, 1, 168, 1, 169, 1, 169, 1, 169, 1, 169, 1, 169, 1, 170, 1, 170, 1, 170, 1, 170, 1, 171, 1, 171, 1, 171, 1, 171, 1, 172, 1, 172, 1, 172, 1, 172, 1, 173, 1, 173, 1, 173, 1, 173, 1, 174, 1, 174, 1, 174, 1, 174, 1, 175, 1, 175, 1, 175, 1, 175, 1, 175, 1, 175, 1, 176, 1, 176, 1, 176, 1, 176, 1, 177, 1, 177, 1, 177, 1, 177, 1, 178, 1, 178, 1, 178, 1, 178, 1, 179, 1, 179, 1, 179, 1, 179, 1, 180, 1, 180, 1, 180, 1, 180, 1, 181, 1, 181, 1, 181, 1, 181, 1, 182, 1, 182, 1, 182, 1, 182, 1, 182, 1, 183, 1, 183, 1, 183, 1, 183, 1, 183, 1, 183, 1, 184, 1, 184, 1, 184, 1, 184, 1, 184, 1, 184, 1, 185, 1, 185, 1, 185, 1, 185, 1, 186, 1, 186, 1, 186, 1, 186, 1, 187, 1, 187, 1, 187, 1, 187, 1, 188, 1, 188, 1, 188, 1, 188, 1, 188, 1, 188, 1, 189, 1, 189, 1, 189, 1, 189, 1, 189, 1, 189, 1, 190, 1, 190, 1, 190, 1, 190, 1, 191, 1, 191, 1, 191, 1, 191, 1, 192, 1, 192, 1, 192, 1, 192, 1, 193, 1, 193, 1, 193, 1, 193, 1, 193, 1, 193, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 195, 1, 195, 1, 195, 1, 195, 1, 195, 1, 195, 1, 196, 1, 196, 1, 196, 1, 196, 1, 196, 2, 605, 674, 0, 197, 15, 1, 17, 2, 19, 3, 21, 4, 23, 5, 25, 6, 27, 7, 29, 8, 31, 9, 33, 10, 35, 11, 37, 12, 39, 13, 41, 14, 43, 15, 45, 16, 47, 17, 49, 18, 51, 19, 53, 20, 55, 21, 57, 22, 59, 23, 61, 24, 63, 0, 65, 0, 67, 0, 69, 0, 71, 0, 73, 0, 75, 0, 77, 0, 79, 0, 81, 0, 83, 25, 85, 26, 87, 27, 89, 28, 91, 29, 93, 30, 95, 31, 97, 32, 99, 33, 101, 34, 103, 35, 105, 36, 107, 37, 109, 38, 111, 39, 113, 40, 115, 41, 117, 42, 119, 43, 121, 44, 123, 45, 125, 46, 127, 47, 129, 48, 131, 49, 133, 50, 135, 51, 137, 52, 139, 53, 141, 54, 143, 55, 145, 56, 147, 57, 149, 58, 151, 59, 153, 60, 155, 61, 157, 62, 159, 63, 161, 64, 163, 65, 165, 66, 167, 67, 169, 0, 171, 68, 173, 69, 175, 70, 177, 71, 179, 0, 181, 0, 183, 72, 185, 73, 187, 74, 189, 0, 191, 0, 193, 0, 195, 0, 197, 0, 199, 0, 201, 75, 203, 0, 205, 76, 207, 0, 209, 0, 211, 77, 213, 78, 215, 79, 217, 0, 219, 0, 221, 0, 223, 0, 225, 0, 227, 0, 229, 0, 231, 80, 233, 81, 235, 82, 237, 83, 239, 0, 241, 0, 243, 0, 245, 0, 247, 0, 249, 0, 251, 84, 253, 0, 255, 85, 257, 86, 259, 87, 261, 0, 263, 0, 265, 88, 267, 89, 269, 0, 271, 90, 273, 0, 275, 91, 277, 92, 279, 93, 281, 0, 283, 0, 285, 0, 287, 0, 289, 0, 291, 0, 293, 0, 295, 0, 297, 0, 299, 94, 301, 95, 303, 96, 305, 0, 307, 0, 309, 0, 311, 0, 313, 0, 315, 0, 317, 97, 319, 98, 321, 99, 323, 0, 325, 100, 327, 101, 329, 102, 331, 103, 333, 0, 335, 104, 337, 105, 339, 106, 341, 107, 343, 108, 345, 0, 347, 0, 349, 0, 351, 0, 353, 0, 355, 0, 357, 0, 359, 109, 361, 110, 363, 111, 365, 0, 367, 0, 369, 0, 371, 0, 373, 112, 375, 113, 377, 114, 379, 0, 381, 0, 383, 0, 385, 115, 387, 116, 389, 117, 391, 0, 393, 0, 395, 118, 397, 119, 399, 120, 401, 0, 403, 0, 405, 0, 407, 0, 15, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 35, 2, 0, 68, 68, 100, 100, 2, 0, 73, 73, 105, 105, 2, 0, 83, 83, 115, 115, 2, 0, 69, 69, 101, 101, 2, 0, 67, 67, 99, 99, 2, 0, 84, 84, 116, 116, 2, 0, 82, 82, 114, 114, 2, 0, 79, 79, 111, 111, 2, 0, 80, 80, 112, 112, 2, 0, 78, 78, 110, 110, 2, 0, 72, 72, 104, 104, 2, 0, 86, 86, 118, 118, 2, 0, 65, 65, 97, 97, 2, 0, 76, 76, 108, 108, 2, 0, 88, 88, 120, 120, 2, 0, 70, 70, 102, 102, 2, 0, 77, 77, 109, 109, 2, 0, 71, 71, 103, 103, 2, 0, 75, 75, 107, 107, 2, 0, 87, 87, 119, 119, 2, 0, 85, 85, 117, 117, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 8, 0, 34, 34, 78, 78, 82, 82, 84, 84, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 2, 0, 66, 66, 98, 98, 2, 0, 89, 89, 121, 121, 11, 0, 9, 10, 13, 13, 32, 32, 34, 34, 44, 44, 47, 47, 58, 58, 61, 61, 91, 91, 93, 93, 124, 124, 2, 0, 42, 42, 47, 47, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1494, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0, 0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0, 0, 0, 33, 1, 0, 0, 0, 0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0, 0, 0, 39, 1, 0, 0, 0, 0, 41, 1, 0, 0, 0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0, 0, 0, 0, 47, 1, 0, 0, 0, 0, 49, 1, 0, 0, 0, 0, 51, 1, 0, 0, 0, 0, 53, 1, 0, 0, 0, 0, 55, 1, 0, 0, 0, 0, 57, 1, 0, 0, 0, 0, 59, 1, 0, 0, 0, 1, 61, 1, 0, 0, 0, 1, 83, 1, 0, 0, 0, 1, 85, 1, 0, 0, 0, 1, 87, 1, 0, 0, 0, 1, 89, 1, 0, 0, 0, 1, 91, 1, 0, 0, 0, 1, 93, 1, 0, 0, 0, 1, 95, 1, 0, 0, 0, 1, 97, 1, 0, 0, 0, 1, 99, 1, 0, 0, 0, 1, 101, 1, 0, 0, 0, 1, 103, 1, 0, 0, 0, 1, 105, 1, 0, 0, 0, 1, 107, 1, 0, 0, 0, 1, 109, 1, 0, 0, 0, 1, 111, 1, 0, 0, 0, 1, 113, 1, 0, 0, 0, 1, 115, 1, 0, 0, 0, 1, 117, 1, 0, 0, 0, 1, 119, 1, 0, 0, 0, 1, 121, 1, 0, 0, 0, 1, 123, 1, 0, 0, 0, 1, 125, 1, 0, 0, 0, 1, 127, 1, 0, 0, 0, 1, 129, 1, 0, 0, 0, 1, 131, 1, 0, 0, 0, 1, 133, 1, 0, 0, 0, 1, 135, 1, 0, 0, 0, 1, 137, 1, 0, 0, 0, 1, 139, 1, 0, 0, 0, 1, 141, 1, 0, 0, 0, 1, 143, 1, 0, 0, 0, 1, 145, 1, 0, 0, 0, 1, 147, 1, 0, 0, 0, 1, 149, 1, 0, 0, 0, 1, 151, 1, 0, 0, 0, 1, 153, 1, 0, 0, 0, 1, 155, 1, 0, 0, 0, 1, 157, 1, 0, 0, 0, 1, 159, 1, 0, 0, 0, 1, 161, 1, 0, 0, 0, 1, 163, 1, 0, 0, 0, 1, 165, 1, 0, 0, 0, 1, 167, 1, 0, 0, 0, 1, 171, 1, 0, 0, 0, 1, 173, 1, 0, 0, 0, 1, 175, 1, 0, 0, 0, 1, 177, 1, 0, 0, 0, 2, 179, 1, 0, 0, 0, 2, 181, 1, 0, 0, 0, 2, 183, 1, 0, 0, 0, 2, 185, 1, 0, 0, 0, 2, 187, 1, 0, 0, 0, 3, 189, 1, 0, 0, 0, 3, 191, 1, 0, 0, 0, 3, 193, 1, 0, 0, 0, 3, 195, 1, 0, 0, 0, 3, 197, 1, 0, 0, 0, 3, 199, 1, 0, 0, 0, 3, 201, 1, 0, 0, 0, 3, 205, 1, 0, 0, 0, 3, 207, 1, 0, 0, 0, 3, 209, 1, 0, 0, 0, 3, 211, 1, 0, 0, 0, 3, 213, 1, 0, 0, 0, 3, 215, 1, 0, 0, 0, 4, 217, 1, 0, 0, 0, 4, 219, 1, 0, 0, 0, 4, 221, 1, 0, 0, 0, 4, 223, 1, 0, 0, 0, 4, 225, 1, 0, 0, 0, 4, 231, 1, 0, 0, 0, 4, 233, 1, 0, 0, 0, 4, 235, 1, 0, 0, 0, 4, 237, 1, 0, 0, 0, 5, 239, 1, 0, 0, 0, 5, 241, 1, 0, 0, 0, 5, 243, 1, 0, 0, 0, 5, 245, 1, 0, 0, 0, 5, 247, 1, 0, 0, 0, 5, 249, 1, 0, 0, 0, 5, 251, 1, 0, 0, 0, 5, 253, 1, 0, 0, 0, 5, 255, 1, 0, 0, 0, 5, 257, 1, 0, 0, 0, 5, 259, 1, 0, 0, 0, 6, 261, 1, 0, 0, 0, 6, 263, 1, 0, 0, 0, 6, 265, 1, 0, 0, 0, 6, 267, 1, 0, 0, 0, 6, 271, 1, 0, 0, 0, 6, 273, 1, 0, 0, 0, 6, 275, 1, 0, 0, 0, 6, 277, 1, 0, 0, 0, 6, 279, 1, 0, 0, 0, 7, 281, 1, 0, 0, 0, 7, 283, 1, 0, 0, 0, 7, 285, 1, 0, 0, 0, 7, 287, 1, 0, 0, 0, 7, 289, 1, 0, 0, 0, 7, 291, 1, 0, 0, 0, 7, 293, 1, 0, 0, 0, 7, 295, 1, 0, 0, 0, 7, 297, 1, 0, 0, 0, 7, 299, 1, 0, 0, 0, 7, 301, 1, 0, 0, 0, 7, 303, 1, 0, 0, 0, 8, 305, 1, 0, 0, 0, 8, 307, 1, 0, 0, 0, 8, 309, 1, 0, 0, 0, 8, 311, 1, 0, 0, 0, 8, 313, 1, 0, 0, 0, 8, 315, 1, 0, 0, 0, 8, 317, 1, 0, 0, 0, 8, 319, 1, 0, 0, 0, 8, 321, 1, 0, 0, 0, 9, 323, 1, 0, 0, 0, 9, 325, 1, 0, 0, 0, 9, 327, 1, 0, 0, 0, 9, 329, 1, 0, 0, 0, 9, 331, 1, 0, 0, 0, 10, 333, 1, 0, 0, 0, 10, 335, 1, 0, 0, 0, 10, 337, 1, 0, 0, 0, 10, 339, 1, 0, 0, 0, 10, 341, 1, 0, 0, 0, 10, 343, 1, 0, 0, 0, 11, 345, 1, 0, 0, 0, 11, 347, 1, 0, 0, 0, 11, 349, 1, 0, 0, 0, 11, 351, 1, 0, 0, 0, 11, 353, 1, 0, 0, 0, 11, 355, 1, 0, 0, 0, 11, 357, 1, 0, 0, 0, 11, 359, 1, 0, 0, 0, 11, 361, 1, 0, 0, 0, 11, 363, 1, 0, 0, 0, 12, 365, 1, 0, 0, 0, 12, 367, 1, 0, 0, 0, 12, 369, 1, 0, 0, 0, 12, 371, 1, 0, 0, 0, 12, 373, 1, 0, 0, 0, 12, 375, 1, 0, 0, 0, 12, 377, 1, 0, 0, 0, 13, 379, 1, 0, 0, 0, 13, 381, 1, 0, 0, 0, 13, 383, 1, 0, 0, 0, 13, 385, 1, 0, 0, 0, 13, 387, 1, 0, 0, 0, 13, 389, 1, 0, 0, 0, 14, 391, 1, 0, 0, 0, 14, 393, 1, 0, 0, 0, 14, 395, 1, 0, 0, 0, 14, 397, 1, 0, 0, 0, 14, 399, 1, 0, 0, 0, 14, 401, 1, 0, 0, 0, 14, 403, 1, 0, 0, 0, 14, 405, 1, 0, 0, 0, 14, 407, 1, 0, 0, 0, 15, 409, 1, 0, 0, 0, 17, 419, 1, 0, 0, 0, 19, 426, 1, 0, 0, 0, 21, 435, 1, 0, 0, 0, 23, 442, 1, 0, 0, 0, 25, 452, 1, 0, 0, 0, 27, 459, 1, 0, 0, 0, 29, 466, 1, 0, 0, 0, 31, 473, 1, 0, 0, 0, 33, 481, 1, 0, 0, 0, 35, 493, 1, 0, 0, 0, 37, 502, 1, 0, 0, 0, 39, 508, 1, 0, 0, 0, 41, 515, 1, 0, 0, 0, 43, 522, 1, 0, 0, 0, 45, 530, 1, 0, 0, 0, 47, 538, 1, 0, 0, 0, 49, 553, 1, 0, 0, 0, 51, 563, 1, 0, 0, 0, 53, 575, 1, 0, 0, 0, 55, 581, 1, 0, 0, 0, 57, 598, 1, 0, 0, 0, 59, 614, 1, 0, 0, 0, 61, 620, 1, 0, 0, 0, 63, 624, 1, 0, 0, 0, 65, 626, 1, 0, 0, 0, 67, 628, 1, 0, 0, 0, 69, 631, 1, 0, 0, 0, 71, 633, 1, 0, 0, 0, 73, 642, 1, 0, 0, 0, 75, 644, 1, 0, 0, 0, 77, 649, 1, 0, 0, 0, 79, 651, 1, 0, 0, 0, 81, 656, 1, 0, 0, 0, 83, 687, 1, 0, 0, 0, 85, 690, 1, 0, 0, 0, 87, 736, 1, 0, 0, 0, 89, 738, 1, 0, 0, 0, 91, 741, 1, 0, 0, 0, 93, 745, 1, 0, 0, 0, 95, 749, 1, 0, 0, 0, 97, 751, 1, 0, 0, 0, 99, 754, 1, 0, 0, 0, 101, 756, 1, 0, 0, 0, 103, 761, 1, 0, 0, 0, 105, 763, 1, 0, 0, 0, 107, 769, 1, 0, 0, 0, 109, 775, 1, 0, 0, 0, 111, 778, 1, 0, 0, 0, 113, 781, 1, 0, 0, 0, 115, 786, 1, 0, 0, 0, 117, 791, 1, 0, 0, 0, 119, 793, 1, 0, 0, 0, 121, 797, 1, 0, 0, 0, 123, 802, 1, 0, 0, 0, 125, 808, 1, 0, 0, 0, 127, 811, 1, 0, 0, 0, 129, 813, 1, 0, 0, 0, 131, 819, 1, 0, 0, 0, 133, 821, 1, 0, 0, 0, 135, 826, 1, 0, 0, 0, 137, 829, 1, 0, 0, 0, 139, 832, 1, 0, 0, 0, 141, 835, 1, 0, 0, 0, 143, 837, 1, 0, 0, 0, 145, 840, 1, 0, 0, 0, 147, 842, 1, 0, 0, 0, 149, 845, 1, 0, 0, 0, 151, 847, 1, 0, 0, 0, 153, 849, 1, 0, 0, 0, 155, 851, 1, 0, 0, 0, 157, 853, 1, 0, 0, 0, 159, 855, 1, 0, 0, 0, 161, 879, 1, 0, 0, 0, 163, 881, 1, 0, 0, 0, 165, 886, 1, 0, 0, 0, 167, 907, 1, 0, 0, 0, 169, 909, 1, 0, 0, 0, 171, 917, 1, 0, 0, 0, 173, 919, 1, 0, 0, 0, 175, 923, 1, 0, 0, 0, 177, 927, 1, 0, 0, 0, 179, 931, 1, 0, 0, 0, 181, 936, 1, 0, 0, 0, 183, 941, 1, 0, 0, 0, 185, 945, 1, 0, 0, 0, 187, 949, 1, 0, 0, 0, 189, 953, 1, 0, 0, 0, 191, 958, 1, 0, 0, 0, 193, 962, 1, 0, 0, 0, 195, 966, 1, 0, 0, 0, 197, 970, 1, 0, 0, 0, 199, 974, 1, 0, 0, 0, 201, 978, 1, 0, 0, 0, 203, 990, 1, 0, 0, 0, 205, 993, 1, 0, 0, 0, 207, 997, 1, 0, 0, 0, 209, 1001, 1, 0, 0, 0, 211, 1005, 1, 0, 0, 0, 213, 1009, 1, 0, 0, 0, 215, 1013, 1, 0, 0, 0, 217, 1017, 1, 0, 0, 0, 219, 1022, 1, 0, 0, 0, 221, 1026, 1, 0, 0, 0, 223, 1030, 1, 0, 0, 0, 225, 1034, 1, 0, 0, 0, 227, 1042, 1, 0, 0, 0, 229, 1063, 1, 0, 0, 0, 231, 1067, 1, 0, 0, 0, 233, 1071, 1, 0, 0, 0, 235, 1075, 1, 0, 0, 0, 237, 1079, 1, 0, 0, 0, 239, 1083, 1, 0, 0, 0, 241, 1088, 1, 0, 0, 0, 243, 1092, 1, 0, 0, 0, 245, 1096, 1, 0, 0, 0, 247, 1100, 1, 0, 0, 0, 249, 1104, 1, 0, 0, 0, 251, 1108, 1, 0, 0, 0, 253, 1111, 1, 0, 0, 0, 255, 1115, 1, 0, 0, 0, 257, 1119, 1, 0, 0, 0, 259, 1123, 1, 0, 0, 0, 261, 1127, 1, 0, 0, 0, 263, 1132, 1, 0, 0, 0, 265, 1137, 1, 0, 0, 0, 267, 1142, 1, 0, 0, 0, 269, 1149, 1, 0, 0, 0, 271, 1158, 1, 0, 0, 0, 273, 1165, 1, 0, 0, 0, 275, 1169, 1, 0, 0, 0, 277, 1173, 1, 0, 0, 0, 279, 1177, 1, 0, 0, 0, 281, 1181, 1, 0, 0, 0, 283, 1187, 1, 0, 0, 0, 285, 1191, 1, 0, 0, 0, 287, 1195, 1, 0, 0, 0, 289, 1199, 1, 0, 0, 0, 291, 1203, 1, 0, 0, 0, 293, 1207, 1, 0, 0, 0, 295, 1211, 1, 0, 0, 0, 297, 1215, 1, 0, 0, 0, 299, 1219, 1, 0, 0, 0, 301, 1223, 1, 0, 0, 0, 303, 1227, 1, 0, 0, 0, 305, 1231, 1, 0, 0, 0, 307, 1236, 1, 0, 0, 0, 309, 1240, 1, 0, 0, 0, 311, 1244, 1, 0, 0, 0, 313, 1248, 1, 0, 0, 0, 315, 1252, 1, 0, 0, 0, 317, 1256, 1, 0, 0, 0, 319, 1260, 1, 0, 0, 0, 321, 1264, 1, 0, 0, 0, 323, 1268, 1, 0, 0, 0, 325, 1273, 1, 0, 0, 0, 327, 1278, 1, 0, 0, 0, 329, 1282, 1, 0, 0, 0, 331, 1286, 1, 0, 0, 0, 333, 1290, 1, 0, 0, 0, 335, 1295, 1, 0, 0, 0, 337, 1302, 1, 0, 0, 0, 339, 1306, 1, 0, 0, 0, 341, 1310, 1, 0, 0, 0, 343, 1314, 1, 0, 0, 0, 345, 1318, 1, 0, 0, 0, 347, 1323, 1, 0, 0, 0, 349, 1327, 1, 0, 0, 0, 351, 1331, 1, 0, 0, 0, 353, 1335, 1, 0, 0, 0, 355, 1340, 1, 0, 0, 0, 357, 1344, 1, 0, 0, 0, 359, 1348, 1, 0, 0, 0, 361, 1352, 1, 0, 0, 0, 363, 1356, 1, 0, 0, 0, 365, 1360, 1, 0, 0, 0, 367, 1366, 1, 0, 0, 0, 369, 1370, 1, 0, 0, 0, 371, 1374, 1, 0, 0, 0, 373, 1378, 1, 0, 0, 0, 375, 1382, 1, 0, 0, 0, 377, 1386, 1, 0, 0, 0, 379, 1390, 1, 0, 0, 0, 381, 1395, 1, 0, 0, 0, 383, 1401, 1, 0, 0, 0, 385, 1407, 1, 0, 0, 0, 387, 1411, 1, 0, 0, 0, 389, 1415, 1, 0, 0, 0, 391, 1419, 1, 0, 0, 0, 393, 1425, 1, 0, 0, 0, 395, 1431, 1, 0, 0, 0, 397, 1435, 1, 0, 0, 0, 399, 1439, 1, 0, 0, 0, 401, 1443, 1, 0, 0, 0, 403, 1449, 1, 0, 0, 0, 405, 1455, 1, 0, 0, 0, 407, 1461, 1, 0, 0, 0, 409, 410, 7, 0, 0, 0, 410, 411, 7, 1, 0, 0, 411, 412, 7, 2, 0, 0, 412, 413, 7, 2, 0, 0, 413, 414, 7, 3, 0, 0, 414, 415, 7, 4, 0, 0, 415, 416, 7, 5, 0, 0, 416, 417, 1, 0, 0, 0, 417, 418, 6, 0, 0, 0, 418, 16, 1, 0, 0, 0, 419, 420, 7, 0, 0, 0, 420, 421, 7, 6, 0, 0, 421, 422, 7, 7, 0, 0, 422, 423, 7, 8, 0, 0, 423, 424, 1, 0, 0, 0, 424, 425, 6, 1, 1, 0, 425, 18, 1, 0, 0, 0, 426, 427, 7, 3, 0, 0, 427, 428, 7, 9, 0, 0, 428, 429, 7, 6, 0, 0, 429, 430, 7, 1, 0, 0, 430, 431, 7, 4, 0, 0, 431, 432, 7, 10, 0, 0, 432, 433, 1, 0, 0, 0, 433, 434, 6, 2, 2, 0, 434, 20, 1, 0, 0, 0, 435, 436, 7, 3, 0, 0, 436, 437, 7, 11, 0, 0, 437, 438, 7, 12, 0, 0, 438, 439, 7, 13, 0, 0, 439, 440, 1, 0, 0, 0, 440, 441, 6, 3, 0, 0, 441, 22, 1, 0, 0, 0, 442, 443, 7, 3, 0, 0, 443, 444, 7, 14, 0, 0, 444, 445, 7, 8, 0, 0, 445, 446, 7, 13, 0, 0, 446, 447, 7, 12, 0, 0, 447, 448, 7, 1, 0, 0, 448, 449, 7, 9, 0, 0, 449, 450, 1, 0, 0, 0, 450, 451, 6, 4, 3, 0, 451, 24, 1, 0, 0, 0, 452, 453, 7, 15, 0, 0, 453, 454, 7, 6, 0, 0, 454, 455, 7, 7, 0, 0, 455, 456, 7, 16, 0, 0, 456, 457, 1, 0, 0, 0, 457, 458, 6, 5, 4, 0, 458, 26, 1, 0, 0, 0, 459, 460, 7, 17, 0, 0, 460, 461, 7, 6, 0, 0, 461, 462, 7, 7, 0, 0, 462, 463, 7, 18, 0, 0, 463, 464, 1, 0, 0, 0, 464, 465, 6, 6, 0, 0, 465, 28, 1, 0, 0, 0, 466, 467, 7, 18, 0, 0, 467, 468, 7, 3, 0, 0, 468, 469, 7, 3, 0, 0, 469, 470, 7, 8, 0, 0, 470, 471, 1, 0, 0, 0, 471, 472, 6, 7, 1, 0, 472, 30, 1, 0, 0, 0, 473, 474, 7, 13, 0, 0, 474, 475, 7, 1, 0, 0, 475, 476, 7, 16, 0, 0, 476, 477, 7, 1, 0, 0, 477, 478, 7, 5, 0, 0, 478, 479, 1, 0, 0, 0, 479, 480, 6, 8, 0, 0, 480, 32, 1, 0, 0, 0, 481, 482, 7, 16, 0, 0, 482, 483, 7, 11, 0, 0, 483, 484, 5, 95, 0, 0, 484, 485, 7, 3, 0, 0, 485, 486, 7, 14, 0, 0, 486, 487, 7, 8, 0, 0, 487, 488, 7, 12, 0, 0, 488, 489, 7, 9, 0, 0, 489, 490, 7, 0, 0, 0, 490, 491, 1, 0, 0, 0, 491, 492, 6, 9, 5, 0, 492, 34, 1, 0, 0, 0, 493, 494, 7, 6, 0, 0, 494, 495, 7, 3, 0, 0, 495, 496, 7, 9, 0, 0, 496, 497, 7, 12, 0, 0, 497, 498, 7, 16, 0, 0, 498, 499, 7, 3, 0, 0, 499, 500, 1, 0, 0, 0, 500, 501, 6, 10, 6, 0, 501, 36, 1, 0, 0, 0, 502, 503, 7, 6, 0, 0, 503, 504, 7, 7, 0, 0, 504, 505, 7, 19, 0, 0, 505, 506, 1, 0, 0, 0, 506, 507, 6, 11, 0, 0, 507, 38, 1, 0, 0, 0, 508, 509, 7, 2, 0, 0, 509, 510, 7, 10, 0, 0, 510, 511, 7, 7, 0, 0, 511, 512, 7, 19, 0, 0, 512, 513, 1, 0, 0, 0, 513, 514, 6, 12, 7, 0, 514, 40, 1, 0, 0, 0, 515, 516, 7, 2, 0, 0, 516, 517, 7, 7, 0, 0, 517, 518, 7, 6, 0, 0, 518, 519, 7, 5, 0, 0, 519, 520, 1, 0, 0, 0, 520, 521, 6, 13, 0, 0, 521, 42, 1, 0, 0, 0, 522, 523, 7, 2, 0, 0, 523, 524, 7, 5, 0, 0, 524, 525, 7, 12, 0, 0, 525, 526, 7, 5, 0, 0, 526, 527, 7, 2, 0, 0, 527, 528, 1, 0, 0, 0, 528, 529, 6, 14, 0, 0, 529, 44, 1, 0, 0, 0, 530, 531, 7, 19, 0, 0, 531, 532, 7, 10, 0, 0, 532, 533, 7, 3, 0, 0, 533, 534, 7, 6, 0, 0, 534, 535, 7, 3, 0, 0, 535, 536, 1, 0, 0, 0, 536, 537, 6, 15, 0, 0, 537, 46, 1, 0, 0, 0, 538, 539, 4, 16, 0, 0, 539, 540, 7, 1, 0, 0, 540, 541, 7, 9, 0, 0, 541, 542, 7, 13, 0, 0, 542, 543, 7, 1, 0, 0, 543, 544, 7, 9, 0, 0, 544, 545, 7, 3, 0, 0, 545, 546, 7, 2, 0, 0, 546, 547, 7, 5, 0, 0, 547, 548, 7, 12, 0, 0, 548, 549, 7, 5, 0, 0, 549, 550, 7, 2, 0, 0, 550, 551, 1, 0, 0, 0, 551, 552, 6, 16, 0, 0, 552, 48, 1, 0, 0, 0, 553, 554, 4, 17, 1, 0, 554, 555, 7, 13, 0, 0, 555, 556, 7, 7, 0, 0, 556, 557, 7, 7, 0, 0, 557, 558, 7, 18, 0, 0, 558, 559, 7, 20, 0, 0, 559, 560, 7, 8, 0, 0, 560, 561, 1, 0, 0, 0, 561, 562, 6, 17, 8, 0, 562, 50, 1, 0, 0, 0, 563, 564, 4, 18, 2, 0, 564, 565, 7, 16, 0, 0, 565, 566, 7, 3, 0, 0, 566, 567, 7, 5, 0, 0, 567, 568, 7, 6, 0, 0, 568, 569, 7, 1, 0, 0, 569, 570, 7, 4, 0, 0, 570, 571, 7, 2, 0, 0, 571, 572, 1, 0, 0, 0, 572, 573, 6, 18, 9, 0, 573, 52, 1, 0, 0, 0, 574, 576, 8, 21, 0, 0, 575, 574, 1, 0, 0, 0, 576, 577, 1, 0, 0, 0, 577, 575, 1, 0, 0, 0, 577, 578, 1, 0, 0, 0, 578, 579, 1, 0, 0, 0, 579, 580, 6, 19, 0, 0, 580, 54, 1, 0, 0, 0, 581, 582, 5, 47, 0, 0, 582, 583, 5, 47, 0, 0, 583, 587, 1, 0, 0, 0, 584, 586, 8, 22, 0, 0, 585, 584, 1, 0, 0, 0, 586, 589, 1, 0, 0, 0, 587, 585, 1, 0, 0, 0, 587, 588, 1, 0, 0, 0, 588, 591, 1, 0, 0, 0, 589, 587, 1, 0, 0, 0, 590, 592, 5, 13, 0, 0, 591, 590, 1, 0, 0, 0, 591, 592, 1, 0, 0, 0, 592, 594, 1, 0, 0, 0, 593, 595, 5, 10, 0, 0, 594, 593, 1, 0, 0, 0, 594, 595, 1, 0, 0, 0, 595, 596, 1, 0, 0, 0, 596, 597, 6, 20, 10, 0, 597, 56, 1, 0, 0, 0, 598, 599, 5, 47, 0, 0, 599, 600, 5, 42, 0, 0, 600, 605, 1, 0, 0, 0, 601, 604, 3, 57, 21, 0, 602, 604, 9, 0, 0, 0, 603, 601, 1, 0, 0, 0, 603, 602, 1, 0, 0, 0, 604, 607, 1, 0, 0, 0, 605, 606, 1, 0, 0, 0, 605, 603, 1, 0, 0, 0, 606, 608, 1, 0, 0, 0, 607, 605, 1, 0, 0, 0, 608, 609, 5, 42, 0, 0, 609, 610, 5, 47, 0, 0, 610, 611, 1, 0, 0, 0, 611, 612, 6, 21, 10, 0, 612, 58, 1, 0, 0, 0, 613, 615, 7, 23, 0, 0, 614, 613, 1, 0, 0, 0, 615, 616, 1, 0, 0, 0, 616, 614, 1, 0, 0, 0, 616, 617, 1, 0, 0, 0, 617, 618, 1, 0, 0, 0, 618, 619, 6, 22, 10, 0, 619, 60, 1, 0, 0, 0, 620, 621, 5, 124, 0, 0, 621, 622, 1, 0, 0, 0, 622, 623, 6, 23, 11, 0, 623, 62, 1, 0, 0, 0, 624, 625, 7, 24, 0, 0, 625, 64, 1, 0, 0, 0, 626, 627, 7, 25, 0, 0, 627, 66, 1, 0, 0, 0, 628, 629, 5, 92, 0, 0, 629, 630, 7, 26, 0, 0, 630, 68, 1, 0, 0, 0, 631, 632, 8, 27, 0, 0, 632, 70, 1, 0, 0, 0, 633, 635, 7, 3, 0, 0, 634, 636, 7, 28, 0, 0, 635, 634, 1, 0, 0, 0, 635, 636, 1, 0, 0, 0, 636, 638, 1, 0, 0, 0, 637, 639, 3, 63, 24, 0, 638, 637, 1, 0, 0, 0, 639, 640, 1, 0, 0, 0, 640, 638, 1, 0, 0, 0, 640, 641, 1, 0, 0, 0, 641, 72, 1, 0, 0, 0, 642, 643, 5, 64, 0, 0, 643, 74, 1, 0, 0, 0, 644, 645, 5, 96, 0, 0, 645, 76, 1, 0, 0, 0, 646, 650, 8, 29, 0, 0, 647, 648, 5, 96, 0, 0, 648, 650, 5, 96, 0, 0, 649, 646, 1, 0, 0, 0, 649, 647, 1, 0, 0, 0, 650, 78, 1, 0, 0, 0, 651, 652, 5, 95, 0, 0, 652, 80, 1, 0, 0, 0, 653, 657, 3, 65, 25, 0, 654, 657, 3, 63, 24, 0, 655, 657, 3, 79, 32, 0, 656, 653, 1, 0, 0, 0, 656, 654, 1, 0, 0, 0, 656, 655, 1, 0, 0, 0, 657, 82, 1, 0, 0, 0, 658, 663, 5, 34, 0, 0, 659, 662, 3, 67, 26, 0, 660, 662, 3, 69, 27, 0, 661, 659, 1, 0, 0, 0, 661, 660, 1, 0, 0, 0, 662, 665, 1, 0, 0, 0, 663, 661, 1, 0, 0, 0, 663, 664, 1, 0, 0, 0, 664, 666, 1, 0, 0, 0, 665, 663, 1, 0, 0, 0, 666, 688, 5, 34, 0, 0, 667, 668, 5, 34, 0, 0, 668, 669, 5, 34, 0, 0, 669, 670, 5, 34, 0, 0, 670, 674, 1, 0, 0, 0, 671, 673, 8, 22, 0, 0, 672, 671, 1, 0, 0, 0, 673, 676, 1, 0, 0, 0, 674, 675, 1, 0, 0, 0, 674, 672, 1, 0, 0, 0, 675, 677, 1, 0, 0, 0, 676, 674, 1, 0, 0, 0, 677, 678, 5, 34, 0, 0, 678, 679, 5, 34, 0, 0, 679, 680, 5, 34, 0, 0, 680, 682, 1, 0, 0, 0, 681, 683, 5, 34, 0, 0, 682, 681, 1, 0, 0, 0, 682, 683, 1, 0, 0, 0, 683, 685, 1, 0, 0, 0, 684, 686, 5, 34, 0, 0, 685, 684, 1, 0, 0, 0, 685, 686, 1, 0, 0, 0, 686, 688, 1, 0, 0, 0, 687, 658, 1, 0, 0, 0, 687, 667, 1, 0, 0, 0, 688, 84, 1, 0, 0, 0, 689, 691, 3, 63, 24, 0, 690, 689, 1, 0, 0, 0, 691, 692, 1, 0, 0, 0, 692, 690, 1, 0, 0, 0, 692, 693, 1, 0, 0, 0, 693, 86, 1, 0, 0, 0, 694, 696, 3, 63, 24, 0, 695, 694, 1, 0, 0, 0, 696, 697, 1, 0, 0, 0, 697, 695, 1, 0, 0, 0, 697, 698, 1, 0, 0, 0, 698, 699, 1, 0, 0, 0, 699, 703, 3, 103, 44, 0, 700, 702, 3, 63, 24, 0, 701, 700, 1, 0, 0, 0, 702, 705, 1, 0, 0, 0, 703, 701, 1, 0, 0, 0, 703, 704, 1, 0, 0, 0, 704, 737, 1, 0, 0, 0, 705, 703, 1, 0, 0, 0, 706, 708, 3, 103, 44, 0, 707, 709, 3, 63, 24, 0, 708, 707, 1, 0, 0, 0, 709, 710, 1, 0, 0, 0, 710, 708, 1, 0, 0, 0, 710, 711, 1, 0, 0, 0, 711, 737, 1, 0, 0, 0, 712, 714, 3, 63, 24, 0, 713, 712, 1, 0, 0, 0, 714, 715, 1, 0, 0, 0, 715, 713, 1, 0, 0, 0, 715, 716, 1, 0, 0, 0, 716, 724, 1, 0, 0, 0, 717, 721, 3, 103, 44, 0, 718, 720, 3, 63, 24, 0, 719, 718, 1, 0, 0, 0, 720, 723, 1, 0, 0, 0, 721, 719, 1, 0, 0, 0, 721, 722, 1, 0, 0, 0, 722, 725, 1, 0, 0, 0, 723, 721, 1, 0, 0, 0, 724, 717, 1, 0, 0, 0, 724, 725, 1, 0, 0, 0, 725, 726, 1, 0, 0, 0, 726, 727, 3, 71, 28, 0, 727, 737, 1, 0, 0, 0, 728, 730, 3, 103, 44, 0, 729, 731, 3, 63, 24, 0, 730, 729, 1, 0, 0, 0, 731, 732, 1, 0, 0, 0, 732, 730, 1, 0, 0, 0, 732, 733, 1, 0, 0, 0, 733, 734, 1, 0, 0, 0, 734, 735, 3, 71, 28, 0, 735, 737, 1, 0, 0, 0, 736, 695, 1, 0, 0, 0, 736, 706, 1, 0, 0, 0, 736, 713, 1, 0, 0, 0, 736, 728, 1, 0, 0, 0, 737, 88, 1, 0, 0, 0, 738, 739, 7, 30, 0, 0, 739, 740, 7, 31, 0, 0, 740, 90, 1, 0, 0, 0, 741, 742, 7, 12, 0, 0, 742, 743, 7, 9, 0, 0, 743, 744, 7, 0, 0, 0, 744, 92, 1, 0, 0, 0, 745, 746, 7, 12, 0, 0, 746, 747, 7, 2, 0, 0, 747, 748, 7, 4, 0, 0, 748, 94, 1, 0, 0, 0, 749, 750, 5, 61, 0, 0, 750, 96, 1, 0, 0, 0, 751, 752, 5, 58, 0, 0, 752, 753, 5, 58, 0, 0, 753, 98, 1, 0, 0, 0, 754, 755, 5, 44, 0, 0, 755, 100, 1, 0, 0, 0, 756, 757, 7, 0, 0, 0, 757, 758, 7, 3, 0, 0, 758, 759, 7, 2, 0, 0, 759, 760, 7, 4, 0, 0, 760, 102, 1, 0, 0, 0, 761, 762, 5, 46, 0, 0, 762, 104, 1, 0, 0, 0, 763, 764, 7, 15, 0, 0, 764, 765, 7, 12, 0, 0, 765, 766, 7, 13, 0, 0, 766, 767, 7, 2, 0, 0, 767, 768, 7, 3, 0, 0, 768, 106, 1, 0, 0, 0, 769, 770, 7, 15, 0, 0, 770, 771, 7, 1, 0, 0, 771, 772, 7, 6, 0, 0, 772, 773, 7, 2, 0, 0, 773, 774, 7, 5, 0, 0, 774, 108, 1, 0, 0, 0, 775, 776, 7, 1, 0, 0, 776, 777, 7, 9, 0, 0, 777, 110, 1, 0, 0, 0, 778, 779, 7, 1, 0, 0, 779, 780, 7, 2, 0, 0, 780, 112, 1, 0, 0, 0, 781, 782, 7, 13, 0, 0, 782, 783, 7, 12, 0, 0, 783, 784, 7, 2, 0, 0, 784, 785, 7, 5, 0, 0, 785, 114, 1, 0, 0, 0, 786, 787, 7, 13, 0, 0, 787, 788, 7, 1, 0, 0, 788, 789, 7, 18, 0, 0, 789, 790, 7, 3, 0, 0, 790, 116, 1, 0, 0, 0, 791, 792, 5, 40, 0, 0, 792, 118, 1, 0, 0, 0, 793, 794, 7, 9, 0, 0, 794, 795, 7, 7, 0, 0, 795, 796, 7, 5, 0, 0, 796, 120, 1, 0, 0, 0, 797, 798, 7, 9, 0, 0, 798, 799, 7, 20, 0, 0, 799, 800, 7, 13, 0, 0, 800, 801, 7, 13, 0, 0, 801, 122, 1, 0, 0, 0, 802, 803, 7, 9, 0, 0, 803, 804, 7, 20, 0, 0, 804, 805, 7, 13, 0, 0, 805, 806, 7, 13, 0, 0, 806, 807, 7, 2, 0, 0, 807, 124, 1, 0, 0, 0, 808, 809, 7, 7, 0, 0, 809, 810, 7, 6, 0, 0, 810, 126, 1, 0, 0, 0, 811, 812, 5, 63, 0, 0, 812, 128, 1, 0, 0, 0, 813, 814, 7, 6, 0, 0, 814, 815, 7, 13, 0, 0, 815, 816, 7, 1, 0, 0, 816, 817, 7, 18, 0, 0, 817, 818, 7, 3, 0, 0, 818, 130, 1, 0, 0, 0, 819, 820, 5, 41, 0, 0, 820, 132, 1, 0, 0, 0, 821, 822, 7, 5, 0, 0, 822, 823, 7, 6, 0, 0, 823, 824, 7, 20, 0, 0, 824, 825, 7, 3, 0, 0, 825, 134, 1, 0, 0, 0, 826, 827, 5, 61, 0, 0, 827, 828, 5, 61, 0, 0, 828, 136, 1, 0, 0, 0, 829, 830, 5, 61, 0, 0, 830, 831, 5, 126, 0, 0, 831, 138, 1, 0, 0, 0, 832, 833, 5, 33, 0, 0, 833, 834, 5, 61, 0, 0, 834, 140, 1, 0, 0, 0, 835, 836, 5, 60, 0, 0, 836, 142, 1, 0, 0, 0, 837, 838, 5, 60, 0, 0, 838, 839, 5, 61, 0, 0, 839, 144, 1, 0, 0, 0, 840, 841, 5, 62, 0, 0, 841, 146, 1, 0, 0, 0, 842, 843, 5, 62, 0, 0, 843, 844, 5, 61, 0, 0, 844, 148, 1, 0, 0, 0, 845, 846, 5, 43, 0, 0, 846, 150, 1, 0, 0, 0, 847, 848, 5, 45, 0, 0, 848, 152, 1, 0, 0, 0, 849, 850, 5, 42, 0, 0, 850, 154, 1, 0, 0, 0, 851, 852, 5, 47, 0, 0, 852, 156, 1, 0, 0, 0, 853, 854, 5, 37, 0, 0, 854, 158, 1, 0, 0, 0, 855, 856, 4, 72, 3, 0, 856, 857, 7, 16, 0, 0, 857, 858, 7, 12, 0, 0, 858, 859, 7, 5, 0, 0, 859, 860, 7, 4, 0, 0, 860, 861, 7, 10, 0, 0, 861, 160, 1, 0, 0, 0, 862, 865, 3, 127, 56, 0, 863, 866, 3, 65, 25, 0, 864, 866, 3, 79, 32, 0, 865, 863, 1, 0, 0, 0, 865, 864, 1, 0, 0, 0, 866, 870, 1, 0, 0, 0, 867, 869, 3, 81, 33, 0, 868, 867, 1, 0, 0, 0, 869, 872, 1, 0, 0, 0, 870, 868, 1, 0, 0, 0, 870, 871, 1, 0, 0, 0, 871, 880, 1, 0, 0, 0, 872, 870, 1, 0, 0, 0, 873, 875, 3, 127, 56, 0, 874, 876, 3, 63, 24, 0, 875, 874, 1, 0, 0, 0, 876, 877, 1, 0, 0, 0, 877, 875, 1, 0, 0, 0, 877, 878, 1, 0, 0, 0, 878, 880, 1, 0, 0, 0, 879, 862, 1, 0, 0, 0, 879, 873, 1, 0, 0, 0, 880, 162, 1, 0, 0, 0, 881, 882, 5, 91, 0, 0, 882, 883, 1, 0, 0, 0, 883, 884, 6, 74, 0, 0, 884, 885, 6, 74, 0, 0, 885, 164, 1, 0, 0, 0, 886, 887, 5, 93, 0, 0, 887, 888, 1, 0, 0, 0, 888, 889, 6, 75, 11, 0, 889, 890, 6, 75, 11, 0, 890, 166, 1, 0, 0, 0, 891, 895, 3, 65, 25, 0, 892, 894, 3, 81, 33, 0, 893, 892, 1, 0, 0, 0, 894, 897, 1, 0, 0, 0, 895, 893, 1, 0, 0, 0, 895, 896, 1, 0, 0, 0, 896, 908, 1, 0, 0, 0, 897, 895, 1, 0, 0, 0, 898, 901, 3, 79, 32, 0, 899, 901, 3, 73, 29, 0, 900, 898, 1, 0, 0, 0, 900, 899, 1, 0, 0, 0, 901, 903, 1, 0, 0, 0, 902, 904, 3, 81, 33, 0, 903, 902, 1, 0, 0, 0, 904, 905, 1, 0, 0, 0, 905, 903, 1, 0, 0, 0, 905, 906, 1, 0, 0, 0, 906, 908, 1, 0, 0, 0, 907, 891, 1, 0, 0, 0, 907, 900, 1, 0, 0, 0, 908, 168, 1, 0, 0, 0, 909, 911, 3, 75, 30, 0, 910, 912, 3, 77, 31, 0, 911, 910, 1, 0, 0, 0, 912, 913, 1, 0, 0, 0, 913, 911, 1, 0, 0, 0, 913, 914, 1, 0, 0, 0, 914, 915, 1, 0, 0, 0, 915, 916, 3, 75, 30, 0, 916, 170, 1, 0, 0, 0, 917, 918, 3, 169, 77, 0, 918, 172, 1, 0, 0, 0, 919, 920, 3, 55, 20, 0, 920, 921, 1, 0, 0, 0, 921, 922, 6, 79, 10, 0, 922, 174, 1, 0, 0, 0, 923, 924, 3, 57, 21, 0, 924, 925, 1, 0, 0, 0, 925, 926, 6, 80, 10, 0, 926, 176, 1, 0, 0, 0, 927, 928, 3, 59, 22, 0, 928, 929, 1, 0, 0, 0, 929, 930, 6, 81, 10, 0, 930, 178, 1, 0, 0, 0, 931, 932, 3, 163, 74, 0, 932, 933, 1, 0, 0, 0, 933, 934, 6, 82, 12, 0, 934, 935, 6, 82, 13, 0, 935, 180, 1, 0, 0, 0, 936, 937, 3, 61, 23, 0, 937, 938, 1, 0, 0, 0, 938, 939, 6, 83, 14, 0, 939, 940, 6, 83, 11, 0, 940, 182, 1, 0, 0, 0, 941, 942, 3, 59, 22, 0, 942, 943, 1, 0, 0, 0, 943, 944, 6, 84, 10, 0, 944, 184, 1, 0, 0, 0, 945, 946, 3, 55, 20, 0, 946, 947, 1, 0, 0, 0, 947, 948, 6, 85, 10, 0, 948, 186, 1, 0, 0, 0, 949, 950, 3, 57, 21, 0, 950, 951, 1, 0, 0, 0, 951, 952, 6, 86, 10, 0, 952, 188, 1, 0, 0, 0, 953, 954, 3, 61, 23, 0, 954, 955, 1, 0, 0, 0, 955, 956, 6, 87, 14, 0, 956, 957, 6, 87, 11, 0, 957, 190, 1, 0, 0, 0, 958, 959, 3, 163, 74, 0, 959, 960, 1, 0, 0, 0, 960, 961, 6, 88, 12, 0, 961, 192, 1, 0, 0, 0, 962, 963, 3, 165, 75, 0, 963, 964, 1, 0, 0, 0, 964, 965, 6, 89, 15, 0, 965, 194, 1, 0, 0, 0, 966, 967, 3, 335, 160, 0, 967, 968, 1, 0, 0, 0, 968, 969, 6, 90, 16, 0, 969, 196, 1, 0, 0, 0, 970, 971, 3, 99, 42, 0, 971, 972, 1, 0, 0, 0, 972, 973, 6, 91, 17, 0, 973, 198, 1, 0, 0, 0, 974, 975, 3, 95, 40, 0, 975, 976, 1, 0, 0, 0, 976, 977, 6, 92, 18, 0, 977, 200, 1, 0, 0, 0, 978, 979, 7, 16, 0, 0, 979, 980, 7, 3, 0, 0, 980, 981, 7, 5, 0, 0, 981, 982, 7, 12, 0, 0, 982, 983, 7, 0, 0, 0, 983, 984, 7, 12, 0, 0, 984, 985, 7, 5, 0, 0, 985, 986, 7, 12, 0, 0, 986, 202, 1, 0, 0, 0, 987, 991, 8, 32, 0, 0, 988, 989, 5, 47, 0, 0, 989, 991, 8, 33, 0, 0, 990, 987, 1, 0, 0, 0, 990, 988, 1, 0, 0, 0, 991, 204, 1, 0, 0, 0, 992, 994, 3, 203, 94, 0, 993, 992, 1, 0, 0, 0, 994, 995, 1, 0, 0, 0, 995, 993, 1, 0, 0, 0, 995, 996, 1, 0, 0, 0, 996, 206, 1, 0, 0, 0, 997, 998, 3, 205, 95, 0, 998, 999, 1, 0, 0, 0, 999, 1000, 6, 96, 19, 0, 1000, 208, 1, 0, 0, 0, 1001, 1002, 3, 83, 34, 0, 1002, 1003, 1, 0, 0, 0, 1003, 1004, 6, 97, 20, 0, 1004, 210, 1, 0, 0, 0, 1005, 1006, 3, 55, 20, 0, 1006, 1007, 1, 0, 0, 0, 1007, 1008, 6, 98, 10, 0, 1008, 212, 1, 0, 0, 0, 1009, 1010, 3, 57, 21, 0, 1010, 1011, 1, 0, 0, 0, 1011, 1012, 6, 99, 10, 0, 1012, 214, 1, 0, 0, 0, 1013, 1014, 3, 59, 22, 0, 1014, 1015, 1, 0, 0, 0, 1015, 1016, 6, 100, 10, 0, 1016, 216, 1, 0, 0, 0, 1017, 1018, 3, 61, 23, 0, 1018, 1019, 1, 0, 0, 0, 1019, 1020, 6, 101, 14, 0, 1020, 1021, 6, 101, 11, 0, 1021, 218, 1, 0, 0, 0, 1022, 1023, 3, 103, 44, 0, 1023, 1024, 1, 0, 0, 0, 1024, 1025, 6, 102, 21, 0, 1025, 220, 1, 0, 0, 0, 1026, 1027, 3, 99, 42, 0, 1027, 1028, 1, 0, 0, 0, 1028, 1029, 6, 103, 17, 0, 1029, 222, 1, 0, 0, 0, 1030, 1031, 3, 127, 56, 0, 1031, 1032, 1, 0, 0, 0, 1032, 1033, 6, 104, 22, 0, 1033, 224, 1, 0, 0, 0, 1034, 1035, 3, 161, 73, 0, 1035, 1036, 1, 0, 0, 0, 1036, 1037, 6, 105, 23, 0, 1037, 226, 1, 0, 0, 0, 1038, 1043, 3, 65, 25, 0, 1039, 1043, 3, 63, 24, 0, 1040, 1043, 3, 79, 32, 0, 1041, 1043, 3, 153, 69, 0, 1042, 1038, 1, 0, 0, 0, 1042, 1039, 1, 0, 0, 0, 1042, 1040, 1, 0, 0, 0, 1042, 1041, 1, 0, 0, 0, 1043, 228, 1, 0, 0, 0, 1044, 1047, 3, 65, 25, 0, 1045, 1047, 3, 153, 69, 0, 1046, 1044, 1, 0, 0, 0, 1046, 1045, 1, 0, 0, 0, 1047, 1051, 1, 0, 0, 0, 1048, 1050, 3, 227, 106, 0, 1049, 1048, 1, 0, 0, 0, 1050, 1053, 1, 0, 0, 0, 1051, 1049, 1, 0, 0, 0, 1051, 1052, 1, 0, 0, 0, 1052, 1064, 1, 0, 0, 0, 1053, 1051, 1, 0, 0, 0, 1054, 1057, 3, 79, 32, 0, 1055, 1057, 3, 73, 29, 0, 1056, 1054, 1, 0, 0, 0, 1056, 1055, 1, 0, 0, 0, 1057, 1059, 1, 0, 0, 0, 1058, 1060, 3, 227, 106, 0, 1059, 1058, 1, 0, 0, 0, 1060, 1061, 1, 0, 0, 0, 1061, 1059, 1, 0, 0, 0, 1061, 1062, 1, 0, 0, 0, 1062, 1064, 1, 0, 0, 0, 1063, 1046, 1, 0, 0, 0, 1063, 1056, 1, 0, 0, 0, 1064, 230, 1, 0, 0, 0, 1065, 1068, 3, 229, 107, 0, 1066, 1068, 3, 169, 77, 0, 1067, 1065, 1, 0, 0, 0, 1067, 1066, 1, 0, 0, 0, 1068, 1069, 1, 0, 0, 0, 1069, 1067, 1, 0, 0, 0, 1069, 1070, 1, 0, 0, 0, 1070, 232, 1, 0, 0, 0, 1071, 1072, 3, 55, 20, 0, 1072, 1073, 1, 0, 0, 0, 1073, 1074, 6, 109, 10, 0, 1074, 234, 1, 0, 0, 0, 1075, 1076, 3, 57, 21, 0, 1076, 1077, 1, 0, 0, 0, 1077, 1078, 6, 110, 10, 0, 1078, 236, 1, 0, 0, 0, 1079, 1080, 3, 59, 22, 0, 1080, 1081, 1, 0, 0, 0, 1081, 1082, 6, 111, 10, 0, 1082, 238, 1, 0, 0, 0, 1083, 1084, 3, 61, 23, 0, 1084, 1085, 1, 0, 0, 0, 1085, 1086, 6, 112, 14, 0, 1086, 1087, 6, 112, 11, 0, 1087, 240, 1, 0, 0, 0, 1088, 1089, 3, 95, 40, 0, 1089, 1090, 1, 0, 0, 0, 1090, 1091, 6, 113, 18, 0, 1091, 242, 1, 0, 0, 0, 1092, 1093, 3, 99, 42, 0, 1093, 1094, 1, 0, 0, 0, 1094, 1095, 6, 114, 17, 0, 1095, 244, 1, 0, 0, 0, 1096, 1097, 3, 103, 44, 0, 1097, 1098, 1, 0, 0, 0, 1098, 1099, 6, 115, 21, 0, 1099, 246, 1, 0, 0, 0, 1100, 1101, 3, 127, 56, 0, 1101, 1102, 1, 0, 0, 0, 1102, 1103, 6, 116, 22, 0, 1103, 248, 1, 0, 0, 0, 1104, 1105, 3, 161, 73, 0, 1105, 1106, 1, 0, 0, 0, 1106, 1107, 6, 117, 23, 0, 1107, 250, 1, 0, 0, 0, 1108, 1109, 7, 12, 0, 0, 1109, 1110, 7, 2, 0, 0, 1110, 252, 1, 0, 0, 0, 1111, 1112, 3, 231, 108, 0, 1112, 1113, 1, 0, 0, 0, 1113, 1114, 6, 119, 24, 0, 1114, 254, 1, 0, 0, 0, 1115, 1116, 3, 55, 20, 0, 1116, 1117, 1, 0, 0, 0, 1117, 1118, 6, 120, 10, 0, 1118, 256, 1, 0, 0, 0, 1119, 1120, 3, 57, 21, 0, 1120, 1121, 1, 0, 0, 0, 1121, 1122, 6, 121, 10, 0, 1122, 258, 1, 0, 0, 0, 1123, 1124, 3, 59, 22, 0, 1124, 1125, 1, 0, 0, 0, 1125, 1126, 6, 122, 10, 0, 1126, 260, 1, 0, 0, 0, 1127, 1128, 3, 61, 23, 0, 1128, 1129, 1, 0, 0, 0, 1129, 1130, 6, 123, 14, 0, 1130, 1131, 6, 123, 11, 0, 1131, 262, 1, 0, 0, 0, 1132, 1133, 3, 163, 74, 0, 1133, 1134, 1, 0, 0, 0, 1134, 1135, 6, 124, 12, 0, 1135, 1136, 6, 124, 25, 0, 1136, 264, 1, 0, 0, 0, 1137, 1138, 7, 7, 0, 0, 1138, 1139, 7, 9, 0, 0, 1139, 1140, 1, 0, 0, 0, 1140, 1141, 6, 125, 26, 0, 1141, 266, 1, 0, 0, 0, 1142, 1143, 7, 19, 0, 0, 1143, 1144, 7, 1, 0, 0, 1144, 1145, 7, 5, 0, 0, 1145, 1146, 7, 10, 0, 0, 1146, 1147, 1, 0, 0, 0, 1147, 1148, 6, 126, 26, 0, 1148, 268, 1, 0, 0, 0, 1149, 1150, 8, 34, 0, 0, 1150, 270, 1, 0, 0, 0, 1151, 1153, 3, 269, 127, 0, 1152, 1151, 1, 0, 0, 0, 1153, 1154, 1, 0, 0, 0, 1154, 1152, 1, 0, 0, 0, 1154, 1155, 1, 0, 0, 0, 1155, 1156, 1, 0, 0, 0, 1156, 1157, 3, 335, 160, 0, 1157, 1159, 1, 0, 0, 0, 1158, 1152, 1, 0, 0, 0, 1158, 1159, 1, 0, 0, 0, 1159, 1161, 1, 0, 0, 0, 1160, 1162, 3, 269, 127, 0, 1161, 1160, 1, 0, 0, 0, 1162, 1163, 1, 0, 0, 0, 1163, 1161, 1, 0, 0, 0, 1163, 1164, 1, 0, 0, 0, 1164, 272, 1, 0, 0, 0, 1165, 1166, 3, 271, 128, 0, 1166, 1167, 1, 0, 0, 0, 1167, 1168, 6, 129, 27, 0, 1168, 274, 1, 0, 0, 0, 1169, 1170, 3, 55, 20, 0, 1170, 1171, 1, 0, 0, 0, 1171, 1172, 6, 130, 10, 0, 1172, 276, 1, 0, 0, 0, 1173, 1174, 3, 57, 21, 0, 1174, 1175, 1, 0, 0, 0, 1175, 1176, 6, 131, 10, 0, 1176, 278, 1, 0, 0, 0, 1177, 1178, 3, 59, 22, 0, 1178, 1179, 1, 0, 0, 0, 1179, 1180, 6, 132, 10, 0, 1180, 280, 1, 0, 0, 0, 1181, 1182, 3, 61, 23, 0, 1182, 1183, 1, 0, 0, 0, 1183, 1184, 6, 133, 14, 0, 1184, 1185, 6, 133, 11, 0, 1185, 1186, 6, 133, 11, 0, 1186, 282, 1, 0, 0, 0, 1187, 1188, 3, 95, 40, 0, 1188, 1189, 1, 0, 0, 0, 1189, 1190, 6, 134, 18, 0, 1190, 284, 1, 0, 0, 0, 1191, 1192, 3, 99, 42, 0, 1192, 1193, 1, 0, 0, 0, 1193, 1194, 6, 135, 17, 0, 1194, 286, 1, 0, 0, 0, 1195, 1196, 3, 103, 44, 0, 1196, 1197, 1, 0, 0, 0, 1197, 1198, 6, 136, 21, 0, 1198, 288, 1, 0, 0, 0, 1199, 1200, 3, 267, 126, 0, 1200, 1201, 1, 0, 0, 0, 1201, 1202, 6, 137, 28, 0, 1202, 290, 1, 0, 0, 0, 1203, 1204, 3, 231, 108, 0, 1204, 1205, 1, 0, 0, 0, 1205, 1206, 6, 138, 24, 0, 1206, 292, 1, 0, 0, 0, 1207, 1208, 3, 171, 78, 0, 1208, 1209, 1, 0, 0, 0, 1209, 1210, 6, 139, 29, 0, 1210, 294, 1, 0, 0, 0, 1211, 1212, 3, 127, 56, 0, 1212, 1213, 1, 0, 0, 0, 1213, 1214, 6, 140, 22, 0, 1214, 296, 1, 0, 0, 0, 1215, 1216, 3, 161, 73, 0, 1216, 1217, 1, 0, 0, 0, 1217, 1218, 6, 141, 23, 0, 1218, 298, 1, 0, 0, 0, 1219, 1220, 3, 55, 20, 0, 1220, 1221, 1, 0, 0, 0, 1221, 1222, 6, 142, 10, 0, 1222, 300, 1, 0, 0, 0, 1223, 1224, 3, 57, 21, 0, 1224, 1225, 1, 0, 0, 0, 1225, 1226, 6, 143, 10, 0, 1226, 302, 1, 0, 0, 0, 1227, 1228, 3, 59, 22, 0, 1228, 1229, 1, 0, 0, 0, 1229, 1230, 6, 144, 10, 0, 1230, 304, 1, 0, 0, 0, 1231, 1232, 3, 61, 23, 0, 1232, 1233, 1, 0, 0, 0, 1233, 1234, 6, 145, 14, 0, 1234, 1235, 6, 145, 11, 0, 1235, 306, 1, 0, 0, 0, 1236, 1237, 3, 103, 44, 0, 1237, 1238, 1, 0, 0, 0, 1238, 1239, 6, 146, 21, 0, 1239, 308, 1, 0, 0, 0, 1240, 1241, 3, 127, 56, 0, 1241, 1242, 1, 0, 0, 0, 1242, 1243, 6, 147, 22, 0, 1243, 310, 1, 0, 0, 0, 1244, 1245, 3, 161, 73, 0, 1245, 1246, 1, 0, 0, 0, 1246, 1247, 6, 148, 23, 0, 1247, 312, 1, 0, 0, 0, 1248, 1249, 3, 171, 78, 0, 1249, 1250, 1, 0, 0, 0, 1250, 1251, 6, 149, 29, 0, 1251, 314, 1, 0, 0, 0, 1252, 1253, 3, 167, 76, 0, 1253, 1254, 1, 0, 0, 0, 1254, 1255, 6, 150, 30, 0, 1255, 316, 1, 0, 0, 0, 1256, 1257, 3, 55, 20, 0, 1257, 1258, 1, 0, 0, 0, 1258, 1259, 6, 151, 10, 0, 1259, 318, 1, 0, 0, 0, 1260, 1261, 3, 57, 21, 0, 1261, 1262, 1, 0, 0, 0, 1262, 1263, 6, 152, 10, 0, 1263, 320, 1, 0, 0, 0, 1264, 1265, 3, 59, 22, 0, 1265, 1266, 1, 0, 0, 0, 1266, 1267, 6, 153, 10, 0, 1267, 322, 1, 0, 0, 0, 1268, 1269, 3, 61, 23, 0, 1269, 1270, 1, 0, 0, 0, 1270, 1271, 6, 154, 14, 0, 1271, 1272, 6, 154, 11, 0, 1272, 324, 1, 0, 0, 0, 1273, 1274, 7, 1, 0, 0, 1274, 1275, 7, 9, 0, 0, 1275, 1276, 7, 15, 0, 0, 1276, 1277, 7, 7, 0, 0, 1277, 326, 1, 0, 0, 0, 1278, 1279, 3, 55, 20, 0, 1279, 1280, 1, 0, 0, 0, 1280, 1281, 6, 156, 10, 0, 1281, 328, 1, 0, 0, 0, 1282, 1283, 3, 57, 21, 0, 1283, 1284, 1, 0, 0, 0, 1284, 1285, 6, 157, 10, 0, 1285, 330, 1, 0, 0, 0, 1286, 1287, 3, 59, 22, 0, 1287, 1288, 1, 0, 0, 0, 1288, 1289, 6, 158, 10, 0, 1289, 332, 1, 0, 0, 0, 1290, 1291, 3, 165, 75, 0, 1291, 1292, 1, 0, 0, 0, 1292, 1293, 6, 159, 15, 0, 1293, 1294, 6, 159, 11, 0, 1294, 334, 1, 0, 0, 0, 1295, 1296, 5, 58, 0, 0, 1296, 336, 1, 0, 0, 0, 1297, 1303, 3, 73, 29, 0, 1298, 1303, 3, 63, 24, 0, 1299, 1303, 3, 103, 44, 0, 1300, 1303, 3, 65, 25, 0, 1301, 1303, 3, 79, 32, 0, 1302, 1297, 1, 0, 0, 0, 1302, 1298, 1, 0, 0, 0, 1302, 1299, 1, 0, 0, 0, 1302, 1300, 1, 0, 0, 0, 1302, 1301, 1, 0, 0, 0, 1303, 1304, 1, 0, 0, 0, 1304, 1302, 1, 0, 0, 0, 1304, 1305, 1, 0, 0, 0, 1305, 338, 1, 0, 0, 0, 1306, 1307, 3, 55, 20, 0, 1307, 1308, 1, 0, 0, 0, 1308, 1309, 6, 162, 10, 0, 1309, 340, 1, 0, 0, 0, 1310, 1311, 3, 57, 21, 0, 1311, 1312, 1, 0, 0, 0, 1312, 1313, 6, 163, 10, 0, 1313, 342, 1, 0, 0, 0, 1314, 1315, 3, 59, 22, 0, 1315, 1316, 1, 0, 0, 0, 1316, 1317, 6, 164, 10, 0, 1317, 344, 1, 0, 0, 0, 1318, 1319, 3, 61, 23, 0, 1319, 1320, 1, 0, 0, 0, 1320, 1321, 6, 165, 14, 0, 1321, 1322, 6, 165, 11, 0, 1322, 346, 1, 0, 0, 0, 1323, 1324, 3, 335, 160, 0, 1324, 1325, 1, 0, 0, 0, 1325, 1326, 6, 166, 16, 0, 1326, 348, 1, 0, 0, 0, 1327, 1328, 3, 99, 42, 0, 1328, 1329, 1, 0, 0, 0, 1329, 1330, 6, 167, 17, 0, 1330, 350, 1, 0, 0, 0, 1331, 1332, 3, 103, 44, 0, 1332, 1333, 1, 0, 0, 0, 1333, 1334, 6, 168, 21, 0, 1334, 352, 1, 0, 0, 0, 1335, 1336, 3, 265, 125, 0, 1336, 1337, 1, 0, 0, 0, 1337, 1338, 6, 169, 31, 0, 1338, 1339, 6, 169, 32, 0, 1339, 354, 1, 0, 0, 0, 1340, 1341, 3, 205, 95, 0, 1341, 1342, 1, 0, 0, 0, 1342, 1343, 6, 170, 19, 0, 1343, 356, 1, 0, 0, 0, 1344, 1345, 3, 83, 34, 0, 1345, 1346, 1, 0, 0, 0, 1346, 1347, 6, 171, 20, 0, 1347, 358, 1, 0, 0, 0, 1348, 1349, 3, 55, 20, 0, 1349, 1350, 1, 0, 0, 0, 1350, 1351, 6, 172, 10, 0, 1351, 360, 1, 0, 0, 0, 1352, 1353, 3, 57, 21, 0, 1353, 1354, 1, 0, 0, 0, 1354, 1355, 6, 173, 10, 0, 1355, 362, 1, 0, 0, 0, 1356, 1357, 3, 59, 22, 0, 1357, 1358, 1, 0, 0, 0, 1358, 1359, 6, 174, 10, 0, 1359, 364, 1, 0, 0, 0, 1360, 1361, 3, 61, 23, 0, 1361, 1362, 1, 0, 0, 0, 1362, 1363, 6, 175, 14, 0, 1363, 1364, 6, 175, 11, 0, 1364, 1365, 6, 175, 11, 0, 1365, 366, 1, 0, 0, 0, 1366, 1367, 3, 99, 42, 0, 1367, 1368, 1, 0, 0, 0, 1368, 1369, 6, 176, 17, 0, 1369, 368, 1, 0, 0, 0, 1370, 1371, 3, 103, 44, 0, 1371, 1372, 1, 0, 0, 0, 1372, 1373, 6, 177, 21, 0, 1373, 370, 1, 0, 0, 0, 1374, 1375, 3, 231, 108, 0, 1375, 1376, 1, 0, 0, 0, 1376, 1377, 6, 178, 24, 0, 1377, 372, 1, 0, 0, 0, 1378, 1379, 3, 55, 20, 0, 1379, 1380, 1, 0, 0, 0, 1380, 1381, 6, 179, 10, 0, 1381, 374, 1, 0, 0, 0, 1382, 1383, 3, 57, 21, 0, 1383, 1384, 1, 0, 0, 0, 1384, 1385, 6, 180, 10, 0, 1385, 376, 1, 0, 0, 0, 1386, 1387, 3, 59, 22, 0, 1387, 1388, 1, 0, 0, 0, 1388, 1389, 6, 181, 10, 0, 1389, 378, 1, 0, 0, 0, 1390, 1391, 3, 61, 23, 0, 1391, 1392, 1, 0, 0, 0, 1392, 1393, 6, 182, 14, 0, 1393, 1394, 6, 182, 11, 0, 1394, 380, 1, 0, 0, 0, 1395, 1396, 3, 205, 95, 0, 1396, 1397, 1, 0, 0, 0, 1397, 1398, 6, 183, 19, 0, 1398, 1399, 6, 183, 11, 0, 1399, 1400, 6, 183, 33, 0, 1400, 382, 1, 0, 0, 0, 1401, 1402, 3, 83, 34, 0, 1402, 1403, 1, 0, 0, 0, 1403, 1404, 6, 184, 20, 0, 1404, 1405, 6, 184, 11, 0, 1405, 1406, 6, 184, 33, 0, 1406, 384, 1, 0, 0, 0, 1407, 1408, 3, 55, 20, 0, 1408, 1409, 1, 0, 0, 0, 1409, 1410, 6, 185, 10, 0, 1410, 386, 1, 0, 0, 0, 1411, 1412, 3, 57, 21, 0, 1412, 1413, 1, 0, 0, 0, 1413, 1414, 6, 186, 10, 0, 1414, 388, 1, 0, 0, 0, 1415, 1416, 3, 59, 22, 0, 1416, 1417, 1, 0, 0, 0, 1417, 1418, 6, 187, 10, 0, 1418, 390, 1, 0, 0, 0, 1419, 1420, 3, 335, 160, 0, 1420, 1421, 1, 0, 0, 0, 1421, 1422, 6, 188, 16, 0, 1422, 1423, 6, 188, 11, 0, 1423, 1424, 6, 188, 9, 0, 1424, 392, 1, 0, 0, 0, 1425, 1426, 3, 99, 42, 0, 1426, 1427, 1, 0, 0, 0, 1427, 1428, 6, 189, 17, 0, 1428, 1429, 6, 189, 11, 0, 1429, 1430, 6, 189, 9, 0, 1430, 394, 1, 0, 0, 0, 1431, 1432, 3, 55, 20, 0, 1432, 1433, 1, 0, 0, 0, 1433, 1434, 6, 190, 10, 0, 1434, 396, 1, 0, 0, 0, 1435, 1436, 3, 57, 21, 0, 1436, 1437, 1, 0, 0, 0, 1437, 1438, 6, 191, 10, 0, 1438, 398, 1, 0, 0, 0, 1439, 1440, 3, 59, 22, 0, 1440, 1441, 1, 0, 0, 0, 1441, 1442, 6, 192, 10, 0, 1442, 400, 1, 0, 0, 0, 1443, 1444, 3, 171, 78, 0, 1444, 1445, 1, 0, 0, 0, 1445, 1446, 6, 193, 11, 0, 1446, 1447, 6, 193, 0, 0, 1447, 1448, 6, 193, 29, 0, 1448, 402, 1, 0, 0, 0, 1449, 1450, 3, 167, 76, 0, 1450, 1451, 1, 0, 0, 0, 1451, 1452, 6, 194, 11, 0, 1452, 1453, 6, 194, 0, 0, 1453, 1454, 6, 194, 30, 0, 1454, 404, 1, 0, 0, 0, 1455, 1456, 3, 89, 37, 0, 1456, 1457, 1, 0, 0, 0, 1457, 1458, 6, 195, 11, 0, 1458, 1459, 6, 195, 0, 0, 1459, 1460, 6, 195, 34, 0, 1460, 406, 1, 0, 0, 0, 1461, 1462, 3, 61, 23, 0, 1462, 1463, 1, 0, 0, 0, 1463, 1464, 6, 196, 14, 0, 1464, 1465, 6, 196, 11, 0, 1465, 408, 1, 0, 0, 0, 65, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 577, 587, 591, 594, 603, 605, 616, 635, 640, 649, 656, 661, 663, 674, 682, 685, 687, 692, 697, 703, 710, 715, 721, 724, 732, 736, 865, 870, 877, 879, 895, 900, 905, 907, 913, 990, 995, 1042, 1046, 1051, 1056, 1061, 1063, 1067, 1069, 1154, 1158, 1163, 1302, 1304, 35, 5, 1, 0, 5, 4, 0, 5, 6, 0, 5, 2, 0, 5, 3, 0, 5, 8, 0, 5, 5, 0, 5, 9, 0, 5, 11, 0, 5, 13, 0, 0, 1, 0, 4, 0, 0, 7, 65, 0, 5, 0, 0, 7, 24, 0, 7, 66, 0, 7, 104, 0, 7, 33, 0, 7, 31, 0, 7, 76, 0, 7, 25, 0, 7, 35, 0, 7, 47, 0, 7, 64, 0, 7, 80, 0, 5, 10, 0, 5, 7, 0, 7, 90, 0, 7, 89, 0, 7, 68, 0, 7, 67, 0, 7, 88, 0, 5, 12, 0, 5, 14, 0, 7, 28, 0] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java index f67daa29ab059..563e2418e7eff 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java @@ -8,14 +8,16 @@ * 2.0. */ -import org.antlr.v4.runtime.Lexer; import org.antlr.v4.runtime.CharStream; -import org.antlr.v4.runtime.Token; -import org.antlr.v4.runtime.TokenStream; -import org.antlr.v4.runtime.*; -import org.antlr.v4.runtime.atn.*; +import org.antlr.v4.runtime.RuleContext; +import org.antlr.v4.runtime.RuntimeMetaData; +import org.antlr.v4.runtime.Vocabulary; +import org.antlr.v4.runtime.VocabularyImpl; +import org.antlr.v4.runtime.atn.ATN; +import org.antlr.v4.runtime.atn.ATNDeserializer; +import org.antlr.v4.runtime.atn.LexerATNSimulator; +import org.antlr.v4.runtime.atn.PredictionContextCache; import org.antlr.v4.runtime.dfa.DFA; -import org.antlr.v4.runtime.misc.*; @SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast", "CheckReturnValue", "this-escape"}) public class EsqlBaseLexer extends LexerConfig { @@ -25,90 +27,90 @@ public class EsqlBaseLexer extends LexerConfig { protected static final PredictionContextCache _sharedContextCache = new PredictionContextCache(); public static final int - DISSECT=1, DROP=2, ENRICH=3, EVAL=4, EXPLAIN=5, FROM=6, GROK=7, KEEP=8, - LIMIT=9, MV_EXPAND=10, RENAME=11, ROW=12, SHOW=13, SORT=14, STATS=15, - WHERE=16, DEV_INLINESTATS=17, DEV_LOOKUP=18, DEV_MATCH=19, DEV_METRICS=20, - UNKNOWN_CMD=21, LINE_COMMENT=22, MULTILINE_COMMENT=23, WS=24, PIPE=25, - QUOTED_STRING=26, INTEGER_LITERAL=27, DECIMAL_LITERAL=28, BY=29, AND=30, - ASC=31, ASSIGN=32, CAST_OP=33, COMMA=34, DESC=35, DOT=36, FALSE=37, FIRST=38, - IN=39, IS=40, LAST=41, LIKE=42, LP=43, NOT=44, NULL=45, NULLS=46, OR=47, - PARAM=48, RLIKE=49, RP=50, TRUE=51, EQ=52, CIEQ=53, NEQ=54, LT=55, LTE=56, - GT=57, GTE=58, PLUS=59, MINUS=60, ASTERISK=61, SLASH=62, PERCENT=63, NAMED_OR_POSITIONAL_PARAM=64, - OPENING_BRACKET=65, CLOSING_BRACKET=66, UNQUOTED_IDENTIFIER=67, QUOTED_IDENTIFIER=68, - EXPR_LINE_COMMENT=69, EXPR_MULTILINE_COMMENT=70, EXPR_WS=71, EXPLAIN_WS=72, - EXPLAIN_LINE_COMMENT=73, EXPLAIN_MULTILINE_COMMENT=74, METADATA=75, UNQUOTED_SOURCE=76, - FROM_LINE_COMMENT=77, FROM_MULTILINE_COMMENT=78, FROM_WS=79, ID_PATTERN=80, - PROJECT_LINE_COMMENT=81, PROJECT_MULTILINE_COMMENT=82, PROJECT_WS=83, - AS=84, RENAME_LINE_COMMENT=85, RENAME_MULTILINE_COMMENT=86, RENAME_WS=87, - ON=88, WITH=89, ENRICH_POLICY_NAME=90, ENRICH_LINE_COMMENT=91, ENRICH_MULTILINE_COMMENT=92, - ENRICH_WS=93, ENRICH_FIELD_LINE_COMMENT=94, ENRICH_FIELD_MULTILINE_COMMENT=95, - ENRICH_FIELD_WS=96, MVEXPAND_LINE_COMMENT=97, MVEXPAND_MULTILINE_COMMENT=98, - MVEXPAND_WS=99, INFO=100, SHOW_LINE_COMMENT=101, SHOW_MULTILINE_COMMENT=102, - SHOW_WS=103, COLON=104, SETTING=105, SETTING_LINE_COMMENT=106, SETTTING_MULTILINE_COMMENT=107, - SETTING_WS=108, LOOKUP_LINE_COMMENT=109, LOOKUP_MULTILINE_COMMENT=110, - LOOKUP_WS=111, LOOKUP_FIELD_LINE_COMMENT=112, LOOKUP_FIELD_MULTILINE_COMMENT=113, - LOOKUP_FIELD_WS=114, METRICS_LINE_COMMENT=115, METRICS_MULTILINE_COMMENT=116, - METRICS_WS=117, CLOSING_METRICS_LINE_COMMENT=118, CLOSING_METRICS_MULTILINE_COMMENT=119, + DISSECT=1, DROP=2, ENRICH=3, EVAL=4, EXPLAIN=5, FROM=6, GROK=7, KEEP=8, + LIMIT=9, MV_EXPAND=10, RENAME=11, ROW=12, SHOW=13, SORT=14, STATS=15, + WHERE=16, DEV_INLINESTATS=17, DEV_LOOKUP=18, DEV_METRICS=19, UNKNOWN_CMD=20, + LINE_COMMENT=21, MULTILINE_COMMENT=22, WS=23, PIPE=24, QUOTED_STRING=25, + INTEGER_LITERAL=26, DECIMAL_LITERAL=27, BY=28, AND=29, ASC=30, ASSIGN=31, + CAST_OP=32, COMMA=33, DESC=34, DOT=35, FALSE=36, FIRST=37, IN=38, IS=39, + LAST=40, LIKE=41, LP=42, NOT=43, NULL=44, NULLS=45, OR=46, PARAM=47, RLIKE=48, + RP=49, TRUE=50, EQ=51, CIEQ=52, NEQ=53, LT=54, LTE=55, GT=56, GTE=57, + PLUS=58, MINUS=59, ASTERISK=60, SLASH=61, PERCENT=62, DEV_MATCH=63, NAMED_OR_POSITIONAL_PARAM=64, + OPENING_BRACKET=65, CLOSING_BRACKET=66, UNQUOTED_IDENTIFIER=67, QUOTED_IDENTIFIER=68, + EXPR_LINE_COMMENT=69, EXPR_MULTILINE_COMMENT=70, EXPR_WS=71, EXPLAIN_WS=72, + EXPLAIN_LINE_COMMENT=73, EXPLAIN_MULTILINE_COMMENT=74, METADATA=75, UNQUOTED_SOURCE=76, + FROM_LINE_COMMENT=77, FROM_MULTILINE_COMMENT=78, FROM_WS=79, ID_PATTERN=80, + PROJECT_LINE_COMMENT=81, PROJECT_MULTILINE_COMMENT=82, PROJECT_WS=83, + AS=84, RENAME_LINE_COMMENT=85, RENAME_MULTILINE_COMMENT=86, RENAME_WS=87, + ON=88, WITH=89, ENRICH_POLICY_NAME=90, ENRICH_LINE_COMMENT=91, ENRICH_MULTILINE_COMMENT=92, + ENRICH_WS=93, ENRICH_FIELD_LINE_COMMENT=94, ENRICH_FIELD_MULTILINE_COMMENT=95, + ENRICH_FIELD_WS=96, MVEXPAND_LINE_COMMENT=97, MVEXPAND_MULTILINE_COMMENT=98, + MVEXPAND_WS=99, INFO=100, SHOW_LINE_COMMENT=101, SHOW_MULTILINE_COMMENT=102, + SHOW_WS=103, COLON=104, SETTING=105, SETTING_LINE_COMMENT=106, SETTTING_MULTILINE_COMMENT=107, + SETTING_WS=108, LOOKUP_LINE_COMMENT=109, LOOKUP_MULTILINE_COMMENT=110, + LOOKUP_WS=111, LOOKUP_FIELD_LINE_COMMENT=112, LOOKUP_FIELD_MULTILINE_COMMENT=113, + LOOKUP_FIELD_WS=114, METRICS_LINE_COMMENT=115, METRICS_MULTILINE_COMMENT=116, + METRICS_WS=117, CLOSING_METRICS_LINE_COMMENT=118, CLOSING_METRICS_MULTILINE_COMMENT=119, CLOSING_METRICS_WS=120; public static final int - EXPRESSION_MODE=1, EXPLAIN_MODE=2, FROM_MODE=3, PROJECT_MODE=4, RENAME_MODE=5, - ENRICH_MODE=6, ENRICH_FIELD_MODE=7, MVEXPAND_MODE=8, SHOW_MODE=9, SETTING_MODE=10, + EXPRESSION_MODE=1, EXPLAIN_MODE=2, FROM_MODE=3, PROJECT_MODE=4, RENAME_MODE=5, + ENRICH_MODE=6, ENRICH_FIELD_MODE=7, MVEXPAND_MODE=8, SHOW_MODE=9, SETTING_MODE=10, LOOKUP_MODE=11, LOOKUP_FIELD_MODE=12, METRICS_MODE=13, CLOSING_METRICS_MODE=14; public static String[] channelNames = { "DEFAULT_TOKEN_CHANNEL", "HIDDEN" }; public static String[] modeNames = { - "DEFAULT_MODE", "EXPRESSION_MODE", "EXPLAIN_MODE", "FROM_MODE", "PROJECT_MODE", - "RENAME_MODE", "ENRICH_MODE", "ENRICH_FIELD_MODE", "MVEXPAND_MODE", "SHOW_MODE", + "DEFAULT_MODE", "EXPRESSION_MODE", "EXPLAIN_MODE", "FROM_MODE", "PROJECT_MODE", + "RENAME_MODE", "ENRICH_MODE", "ENRICH_FIELD_MODE", "MVEXPAND_MODE", "SHOW_MODE", "SETTING_MODE", "LOOKUP_MODE", "LOOKUP_FIELD_MODE", "METRICS_MODE", "CLOSING_METRICS_MODE" }; private static String[] makeRuleNames() { return new String[] { - "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", "KEEP", - "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", "WHERE", - "DEV_INLINESTATS", "DEV_LOOKUP", "DEV_MATCH", "DEV_METRICS", "UNKNOWN_CMD", - "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "PIPE", "DIGIT", "LETTER", - "ESCAPE_SEQUENCE", "UNESCAPED_CHARS", "EXPONENT", "ASPERAND", "BACKQUOTE", - "BACKQUOTE_BLOCK", "UNDERSCORE", "UNQUOTED_ID_BODY", "QUOTED_STRING", - "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", - "COMMA", "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", "LIKE", - "LP", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", - "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", - "SLASH", "PERCENT", "DEV_MATCH_OP", "NAMED_OR_POSITIONAL_PARAM", "OPENING_BRACKET", - "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", "QUOTED_ID", "QUOTED_IDENTIFIER", - "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", "EXPLAIN_OPENING_BRACKET", - "EXPLAIN_PIPE", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", - "FROM_PIPE", "FROM_OPENING_BRACKET", "FROM_CLOSING_BRACKET", "FROM_COLON", - "FROM_COMMA", "FROM_ASSIGN", "METADATA", "UNQUOTED_SOURCE_PART", "UNQUOTED_SOURCE", - "FROM_UNQUOTED_SOURCE", "FROM_QUOTED_SOURCE", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", - "FROM_WS", "PROJECT_PIPE", "PROJECT_DOT", "PROJECT_COMMA", "PROJECT_PARAM", - "PROJECT_NAMED_OR_POSITIONAL_PARAM", "UNQUOTED_ID_BODY_WITH_PATTERN", - "UNQUOTED_ID_PATTERN", "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", - "PROJECT_WS", "RENAME_PIPE", "RENAME_ASSIGN", "RENAME_COMMA", "RENAME_DOT", - "RENAME_PARAM", "RENAME_NAMED_OR_POSITIONAL_PARAM", "AS", "RENAME_ID_PATTERN", - "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", "RENAME_WS", "ENRICH_PIPE", - "ENRICH_OPENING_BRACKET", "ON", "WITH", "ENRICH_POLICY_NAME_BODY", "ENRICH_POLICY_NAME", - "ENRICH_MODE_UNQUOTED_VALUE", "ENRICH_LINE_COMMENT", "ENRICH_MULTILINE_COMMENT", - "ENRICH_WS", "ENRICH_FIELD_PIPE", "ENRICH_FIELD_ASSIGN", "ENRICH_FIELD_COMMA", - "ENRICH_FIELD_DOT", "ENRICH_FIELD_WITH", "ENRICH_FIELD_ID_PATTERN", "ENRICH_FIELD_QUOTED_IDENTIFIER", - "ENRICH_FIELD_PARAM", "ENRICH_FIELD_NAMED_OR_POSITIONAL_PARAM", "ENRICH_FIELD_LINE_COMMENT", - "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_PIPE", - "MVEXPAND_DOT", "MVEXPAND_PARAM", "MVEXPAND_NAMED_OR_POSITIONAL_PARAM", - "MVEXPAND_QUOTED_IDENTIFIER", "MVEXPAND_UNQUOTED_IDENTIFIER", "MVEXPAND_LINE_COMMENT", - "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "SHOW_PIPE", "INFO", "SHOW_LINE_COMMENT", - "SHOW_MULTILINE_COMMENT", "SHOW_WS", "SETTING_CLOSING_BRACKET", "COLON", - "SETTING", "SETTING_LINE_COMMENT", "SETTTING_MULTILINE_COMMENT", "SETTING_WS", - "LOOKUP_PIPE", "LOOKUP_COLON", "LOOKUP_COMMA", "LOOKUP_DOT", "LOOKUP_ON", - "LOOKUP_UNQUOTED_SOURCE", "LOOKUP_QUOTED_SOURCE", "LOOKUP_LINE_COMMENT", - "LOOKUP_MULTILINE_COMMENT", "LOOKUP_WS", "LOOKUP_FIELD_PIPE", "LOOKUP_FIELD_COMMA", - "LOOKUP_FIELD_DOT", "LOOKUP_FIELD_ID_PATTERN", "LOOKUP_FIELD_LINE_COMMENT", - "LOOKUP_FIELD_MULTILINE_COMMENT", "LOOKUP_FIELD_WS", "METRICS_PIPE", - "METRICS_UNQUOTED_SOURCE", "METRICS_QUOTED_SOURCE", "METRICS_LINE_COMMENT", - "METRICS_MULTILINE_COMMENT", "METRICS_WS", "CLOSING_METRICS_COLON", "CLOSING_METRICS_COMMA", - "CLOSING_METRICS_LINE_COMMENT", "CLOSING_METRICS_MULTILINE_COMMENT", - "CLOSING_METRICS_WS", "CLOSING_METRICS_QUOTED_IDENTIFIER", "CLOSING_METRICS_UNQUOTED_IDENTIFIER", + "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", "KEEP", + "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", "WHERE", + "DEV_INLINESTATS", "DEV_LOOKUP", "DEV_METRICS", "UNKNOWN_CMD", "LINE_COMMENT", + "MULTILINE_COMMENT", "WS", "PIPE", "DIGIT", "LETTER", "ESCAPE_SEQUENCE", + "UNESCAPED_CHARS", "EXPONENT", "ASPERAND", "BACKQUOTE", "BACKQUOTE_BLOCK", + "UNDERSCORE", "UNQUOTED_ID_BODY", "QUOTED_STRING", "INTEGER_LITERAL", + "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", "COMMA", + "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", "LIKE", "LP", "NOT", + "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", "CIEQ", + "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", + "PERCENT", "DEV_MATCH", "NAMED_OR_POSITIONAL_PARAM", "OPENING_BRACKET", + "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", "QUOTED_ID", "QUOTED_IDENTIFIER", + "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", "EXPLAIN_OPENING_BRACKET", + "EXPLAIN_PIPE", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", + "FROM_PIPE", "FROM_OPENING_BRACKET", "FROM_CLOSING_BRACKET", "FROM_COLON", + "FROM_COMMA", "FROM_ASSIGN", "METADATA", "UNQUOTED_SOURCE_PART", "UNQUOTED_SOURCE", + "FROM_UNQUOTED_SOURCE", "FROM_QUOTED_SOURCE", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", + "FROM_WS", "PROJECT_PIPE", "PROJECT_DOT", "PROJECT_COMMA", "PROJECT_PARAM", + "PROJECT_NAMED_OR_POSITIONAL_PARAM", "UNQUOTED_ID_BODY_WITH_PATTERN", + "UNQUOTED_ID_PATTERN", "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", + "PROJECT_WS", "RENAME_PIPE", "RENAME_ASSIGN", "RENAME_COMMA", "RENAME_DOT", + "RENAME_PARAM", "RENAME_NAMED_OR_POSITIONAL_PARAM", "AS", "RENAME_ID_PATTERN", + "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", "RENAME_WS", "ENRICH_PIPE", + "ENRICH_OPENING_BRACKET", "ON", "WITH", "ENRICH_POLICY_NAME_BODY", "ENRICH_POLICY_NAME", + "ENRICH_MODE_UNQUOTED_VALUE", "ENRICH_LINE_COMMENT", "ENRICH_MULTILINE_COMMENT", + "ENRICH_WS", "ENRICH_FIELD_PIPE", "ENRICH_FIELD_ASSIGN", "ENRICH_FIELD_COMMA", + "ENRICH_FIELD_DOT", "ENRICH_FIELD_WITH", "ENRICH_FIELD_ID_PATTERN", "ENRICH_FIELD_QUOTED_IDENTIFIER", + "ENRICH_FIELD_PARAM", "ENRICH_FIELD_NAMED_OR_POSITIONAL_PARAM", "ENRICH_FIELD_LINE_COMMENT", + "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_PIPE", + "MVEXPAND_DOT", "MVEXPAND_PARAM", "MVEXPAND_NAMED_OR_POSITIONAL_PARAM", + "MVEXPAND_QUOTED_IDENTIFIER", "MVEXPAND_UNQUOTED_IDENTIFIER", "MVEXPAND_LINE_COMMENT", + "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "SHOW_PIPE", "INFO", "SHOW_LINE_COMMENT", + "SHOW_MULTILINE_COMMENT", "SHOW_WS", "SETTING_CLOSING_BRACKET", "COLON", + "SETTING", "SETTING_LINE_COMMENT", "SETTTING_MULTILINE_COMMENT", "SETTING_WS", + "LOOKUP_PIPE", "LOOKUP_COLON", "LOOKUP_COMMA", "LOOKUP_DOT", "LOOKUP_ON", + "LOOKUP_UNQUOTED_SOURCE", "LOOKUP_QUOTED_SOURCE", "LOOKUP_LINE_COMMENT", + "LOOKUP_MULTILINE_COMMENT", "LOOKUP_WS", "LOOKUP_FIELD_PIPE", "LOOKUP_FIELD_COMMA", + "LOOKUP_FIELD_DOT", "LOOKUP_FIELD_ID_PATTERN", "LOOKUP_FIELD_LINE_COMMENT", + "LOOKUP_FIELD_MULTILINE_COMMENT", "LOOKUP_FIELD_WS", "METRICS_PIPE", + "METRICS_UNQUOTED_SOURCE", "METRICS_QUOTED_SOURCE", "METRICS_LINE_COMMENT", + "METRICS_MULTILINE_COMMENT", "METRICS_WS", "CLOSING_METRICS_COLON", "CLOSING_METRICS_COMMA", + "CLOSING_METRICS_LINE_COMMENT", "CLOSING_METRICS_MULTILINE_COMMENT", + "CLOSING_METRICS_WS", "CLOSING_METRICS_QUOTED_IDENTIFIER", "CLOSING_METRICS_UNQUOTED_IDENTIFIER", "CLOSING_METRICS_BY", "CLOSING_METRICS_PIPE" }; } @@ -116,46 +118,46 @@ private static String[] makeRuleNames() { private static String[] makeLiteralNames() { return new String[] { - null, "'dissect'", "'drop'", "'enrich'", "'eval'", "'explain'", "'from'", - "'grok'", "'keep'", "'limit'", "'mv_expand'", "'rename'", "'row'", "'show'", - "'sort'", "'stats'", "'where'", null, null, null, null, null, null, null, - null, "'|'", null, null, null, "'by'", "'and'", "'asc'", "'='", "'::'", - "','", "'desc'", "'.'", "'false'", "'first'", "'in'", "'is'", "'last'", - "'like'", "'('", "'not'", "'null'", "'nulls'", "'or'", "'?'", "'rlike'", - "')'", "'true'", "'=='", "'=~'", "'!='", "'<'", "'<='", "'>'", "'>='", - "'+'", "'-'", "'*'", "'/'", "'%'", null, null, "']'", null, null, null, - null, null, null, null, null, "'metadata'", null, null, null, null, null, - null, null, null, "'as'", null, null, null, "'on'", "'with'", null, null, - null, null, null, null, null, null, null, null, "'info'", null, null, + null, "'dissect'", "'drop'", "'enrich'", "'eval'", "'explain'", "'from'", + "'grok'", "'keep'", "'limit'", "'mv_expand'", "'rename'", "'row'", "'show'", + "'sort'", "'stats'", "'where'", null, null, null, null, null, null, null, + "'|'", null, null, null, "'by'", "'and'", "'asc'", "'='", "'::'", "','", + "'desc'", "'.'", "'false'", "'first'", "'in'", "'is'", "'last'", "'like'", + "'('", "'not'", "'null'", "'nulls'", "'or'", "'?'", "'rlike'", "')'", + "'true'", "'=='", "'=~'", "'!='", "'<'", "'<='", "'>'", "'>='", "'+'", + "'-'", "'*'", "'/'", "'%'", null, null, null, "']'", null, null, null, + null, null, null, null, null, "'metadata'", null, null, null, null, null, + null, null, null, "'as'", null, null, null, "'on'", "'with'", null, null, + null, null, null, null, null, null, null, null, "'info'", null, null, null, "':'" }; } private static final String[] _LITERAL_NAMES = makeLiteralNames(); private static String[] makeSymbolicNames() { return new String[] { - null, "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", - "KEEP", "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", - "WHERE", "DEV_INLINESTATS", "DEV_LOOKUP", "DEV_MATCH", "DEV_METRICS", - "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "PIPE", "QUOTED_STRING", - "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", - "COMMA", "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", "LIKE", - "LP", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", - "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", - "SLASH", "PERCENT", "NAMED_OR_POSITIONAL_PARAM", "OPENING_BRACKET", "CLOSING_BRACKET", - "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", - "EXPR_WS", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", - "METADATA", "UNQUOTED_SOURCE", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", - "FROM_WS", "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", - "PROJECT_WS", "AS", "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", - "RENAME_WS", "ON", "WITH", "ENRICH_POLICY_NAME", "ENRICH_LINE_COMMENT", - "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", - "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT", - "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "INFO", "SHOW_LINE_COMMENT", - "SHOW_MULTILINE_COMMENT", "SHOW_WS", "COLON", "SETTING", "SETTING_LINE_COMMENT", - "SETTTING_MULTILINE_COMMENT", "SETTING_WS", "LOOKUP_LINE_COMMENT", "LOOKUP_MULTILINE_COMMENT", - "LOOKUP_WS", "LOOKUP_FIELD_LINE_COMMENT", "LOOKUP_FIELD_MULTILINE_COMMENT", - "LOOKUP_FIELD_WS", "METRICS_LINE_COMMENT", "METRICS_MULTILINE_COMMENT", - "METRICS_WS", "CLOSING_METRICS_LINE_COMMENT", "CLOSING_METRICS_MULTILINE_COMMENT", + null, "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", + "KEEP", "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", + "WHERE", "DEV_INLINESTATS", "DEV_LOOKUP", "DEV_METRICS", "UNKNOWN_CMD", + "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "PIPE", "QUOTED_STRING", "INTEGER_LITERAL", + "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", "COMMA", + "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", "LIKE", "LP", "NOT", + "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", "CIEQ", + "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", + "PERCENT", "DEV_MATCH", "NAMED_OR_POSITIONAL_PARAM", "OPENING_BRACKET", + "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", + "EXPR_MULTILINE_COMMENT", "EXPR_WS", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", + "EXPLAIN_MULTILINE_COMMENT", "METADATA", "UNQUOTED_SOURCE", "FROM_LINE_COMMENT", + "FROM_MULTILINE_COMMENT", "FROM_WS", "ID_PATTERN", "PROJECT_LINE_COMMENT", + "PROJECT_MULTILINE_COMMENT", "PROJECT_WS", "AS", "RENAME_LINE_COMMENT", + "RENAME_MULTILINE_COMMENT", "RENAME_WS", "ON", "WITH", "ENRICH_POLICY_NAME", + "ENRICH_LINE_COMMENT", "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", + "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT", + "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "INFO", "SHOW_LINE_COMMENT", + "SHOW_MULTILINE_COMMENT", "SHOW_WS", "COLON", "SETTING", "SETTING_LINE_COMMENT", + "SETTTING_MULTILINE_COMMENT", "SETTING_WS", "LOOKUP_LINE_COMMENT", "LOOKUP_MULTILINE_COMMENT", + "LOOKUP_WS", "LOOKUP_FIELD_LINE_COMMENT", "LOOKUP_FIELD_MULTILINE_COMMENT", + "LOOKUP_FIELD_WS", "METRICS_LINE_COMMENT", "METRICS_MULTILINE_COMMENT", + "METRICS_WS", "CLOSING_METRICS_LINE_COMMENT", "CLOSING_METRICS_MULTILINE_COMMENT", "CLOSING_METRICS_WS" }; } @@ -226,11 +228,9 @@ public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { case 17: return DEV_LOOKUP_sempred((RuleContext)_localctx, predIndex); case 18: - return DEV_MATCH_sempred((RuleContext)_localctx, predIndex); - case 19: return DEV_METRICS_sempred((RuleContext)_localctx, predIndex); - case 73: - return DEV_MATCH_OP_sempred((RuleContext)_localctx, predIndex); + case 72: + return DEV_MATCH_sempred((RuleContext)_localctx, predIndex); } return true; } @@ -248,30 +248,23 @@ private boolean DEV_LOOKUP_sempred(RuleContext _localctx, int predIndex) { } return true; } - private boolean DEV_MATCH_sempred(RuleContext _localctx, int predIndex) { + private boolean DEV_METRICS_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { case 2: return this.isDevVersion(); } return true; } - private boolean DEV_METRICS_sempred(RuleContext _localctx, int predIndex) { + private boolean DEV_MATCH_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { case 3: return this.isDevVersion(); } return true; } - private boolean DEV_MATCH_OP_sempred(RuleContext _localctx, int predIndex) { - switch (predIndex) { - case 4: - return this.isDevVersion(); - } - return true; - } public static final String _serializedATN = - "\u0004\u0000x\u05c3\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ + "\u0004\u0000x\u05ba\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ @@ -327,908 +320,902 @@ private boolean DEV_MATCH_OP_sempred(RuleContext _localctx, int predIndex) { "\u00ba\u0007\u00ba\u0002\u00bb\u0007\u00bb\u0002\u00bc\u0007\u00bc\u0002"+ "\u00bd\u0007\u00bd\u0002\u00be\u0007\u00be\u0002\u00bf\u0007\u00bf\u0002"+ "\u00c0\u0007\u00c0\u0002\u00c1\u0007\u00c1\u0002\u00c2\u0007\u00c2\u0002"+ - "\u00c3\u0007\u00c3\u0002\u00c4\u0007\u00c4\u0002\u00c5\u0007\u00c5\u0001"+ + "\u00c3\u0007\u00c3\u0002\u00c4\u0007\u00c4\u0001\u0000\u0001\u0000\u0001"+ "\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001"+ - "\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0001\u0001\u0001\u0001"+ - "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0002\u0001"+ + "\u0000\u0001\u0000\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ + "\u0001\u0001\u0001\u0001\u0001\u0001\u0002\u0001\u0002\u0001\u0002\u0001"+ "\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001"+ - "\u0002\u0001\u0002\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ - "\u0003\u0001\u0003\u0001\u0003\u0001\u0004\u0001\u0004\u0001\u0004\u0001"+ - "\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001"+ - "\u0004\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ - "\u0005\u0001\u0005\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001"+ - "\u0006\u0001\u0006\u0001\u0006\u0001\u0007\u0001\u0007\u0001\u0007\u0001"+ - "\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\b\u0001\b\u0001\b\u0001"+ - "\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\t\u0001\t\u0001\t\u0001\t\u0001"+ - "\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\n\u0001"+ - "\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\u000b"+ - "\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\f\u0001"+ - "\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\r\u0001\r\u0001\r\u0001"+ - "\r\u0001\r\u0001\r\u0001\r\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e"+ - "\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000f\u0001\u000f"+ - "\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f"+ + "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ + "\u0003\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001"+ + "\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001"+ + "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ + "\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001"+ + "\u0006\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001"+ + "\u0007\u0001\u0007\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001"+ + "\b\u0001\b\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001"+ + "\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001"+ + "\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\u000b\u0001\u000b\u0001\u000b"+ + "\u0001\u000b\u0001\u000b\u0001\u000b\u0001\f\u0001\f\u0001\f\u0001\f\u0001"+ + "\f\u0001\f\u0001\f\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001"+ + "\r\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e"+ + "\u0001\u000e\u0001\u000e\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f"+ + "\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u0010\u0001\u0010"+ "\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010"+ "\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010"+ - "\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0011\u0001\u0011\u0001\u0011"+ - "\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011"+ - "\u0001\u0011\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012"+ - "\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0013\u0001\u0013"+ - "\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013"+ - "\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0014\u0004\u0014\u024b\b\u0014"+ - "\u000b\u0014\f\u0014\u024c\u0001\u0014\u0001\u0014\u0001\u0015\u0001\u0015"+ - "\u0001\u0015\u0001\u0015\u0005\u0015\u0255\b\u0015\n\u0015\f\u0015\u0258"+ - "\t\u0015\u0001\u0015\u0003\u0015\u025b\b\u0015\u0001\u0015\u0003\u0015"+ - "\u025e\b\u0015\u0001\u0015\u0001\u0015\u0001\u0016\u0001\u0016\u0001\u0016"+ - "\u0001\u0016\u0001\u0016\u0005\u0016\u0267\b\u0016\n\u0016\f\u0016\u026a"+ - "\t\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001"+ - "\u0017\u0004\u0017\u0272\b\u0017\u000b\u0017\f\u0017\u0273\u0001\u0017"+ - "\u0001\u0017\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0019"+ - "\u0001\u0019\u0001\u001a\u0001\u001a\u0001\u001b\u0001\u001b\u0001\u001b"+ - "\u0001\u001c\u0001\u001c\u0001\u001d\u0001\u001d\u0003\u001d\u0287\b\u001d"+ - "\u0001\u001d\u0004\u001d\u028a\b\u001d\u000b\u001d\f\u001d\u028b\u0001"+ - "\u001e\u0001\u001e\u0001\u001f\u0001\u001f\u0001 \u0001 \u0001 \u0003"+ - " \u0295\b \u0001!\u0001!\u0001\"\u0001\"\u0001\"\u0003\"\u029c\b\"\u0001"+ - "#\u0001#\u0001#\u0005#\u02a1\b#\n#\f#\u02a4\t#\u0001#\u0001#\u0001#\u0001"+ - "#\u0001#\u0001#\u0005#\u02ac\b#\n#\f#\u02af\t#\u0001#\u0001#\u0001#\u0001"+ - "#\u0001#\u0003#\u02b6\b#\u0001#\u0003#\u02b9\b#\u0003#\u02bb\b#\u0001"+ - "$\u0004$\u02be\b$\u000b$\f$\u02bf\u0001%\u0004%\u02c3\b%\u000b%\f%\u02c4"+ - "\u0001%\u0001%\u0005%\u02c9\b%\n%\f%\u02cc\t%\u0001%\u0001%\u0004%\u02d0"+ - "\b%\u000b%\f%\u02d1\u0001%\u0004%\u02d5\b%\u000b%\f%\u02d6\u0001%\u0001"+ - "%\u0005%\u02db\b%\n%\f%\u02de\t%\u0003%\u02e0\b%\u0001%\u0001%\u0001%"+ - "\u0001%\u0004%\u02e6\b%\u000b%\f%\u02e7\u0001%\u0001%\u0003%\u02ec\b%"+ - "\u0001&\u0001&\u0001&\u0001\'\u0001\'\u0001\'\u0001\'\u0001(\u0001(\u0001"+ - "(\u0001(\u0001)\u0001)\u0001*\u0001*\u0001*\u0001+\u0001+\u0001,\u0001"+ - ",\u0001,\u0001,\u0001,\u0001-\u0001-\u0001.\u0001.\u0001.\u0001.\u0001"+ - ".\u0001.\u0001/\u0001/\u0001/\u0001/\u0001/\u0001/\u00010\u00010\u0001"+ - "0\u00011\u00011\u00011\u00012\u00012\u00012\u00012\u00012\u00013\u0001"+ - "3\u00013\u00013\u00013\u00014\u00014\u00015\u00015\u00015\u00015\u0001"+ - "6\u00016\u00016\u00016\u00016\u00017\u00017\u00017\u00017\u00017\u0001"+ - "7\u00018\u00018\u00018\u00019\u00019\u0001:\u0001:\u0001:\u0001:\u0001"+ - ":\u0001:\u0001;\u0001;\u0001<\u0001<\u0001<\u0001<\u0001<\u0001=\u0001"+ - "=\u0001=\u0001>\u0001>\u0001>\u0001?\u0001?\u0001?\u0001@\u0001@\u0001"+ - "A\u0001A\u0001A\u0001B\u0001B\u0001C\u0001C\u0001C\u0001D\u0001D\u0001"+ - "E\u0001E\u0001F\u0001F\u0001G\u0001G\u0001H\u0001H\u0001I\u0001I\u0001"+ - "I\u0001I\u0001I\u0001J\u0001J\u0001J\u0003J\u036b\bJ\u0001J\u0005J\u036e"+ - "\bJ\nJ\fJ\u0371\tJ\u0001J\u0001J\u0004J\u0375\bJ\u000bJ\fJ\u0376\u0003"+ - "J\u0379\bJ\u0001K\u0001K\u0001K\u0001K\u0001K\u0001L\u0001L\u0001L\u0001"+ - "L\u0001L\u0001M\u0001M\u0005M\u0387\bM\nM\fM\u038a\tM\u0001M\u0001M\u0003"+ - "M\u038e\bM\u0001M\u0004M\u0391\bM\u000bM\fM\u0392\u0003M\u0395\bM\u0001"+ - "N\u0001N\u0004N\u0399\bN\u000bN\fN\u039a\u0001N\u0001N\u0001O\u0001O\u0001"+ - "P\u0001P\u0001P\u0001P\u0001Q\u0001Q\u0001Q\u0001Q\u0001R\u0001R\u0001"+ - "R\u0001R\u0001S\u0001S\u0001S\u0001S\u0001S\u0001T\u0001T\u0001T\u0001"+ - "T\u0001T\u0001U\u0001U\u0001U\u0001U\u0001V\u0001V\u0001V\u0001V\u0001"+ - "W\u0001W\u0001W\u0001W\u0001X\u0001X\u0001X\u0001X\u0001X\u0001Y\u0001"+ - "Y\u0001Y\u0001Y\u0001Z\u0001Z\u0001Z\u0001Z\u0001[\u0001[\u0001[\u0001"+ - "[\u0001\\\u0001\\\u0001\\\u0001\\\u0001]\u0001]\u0001]\u0001]\u0001^\u0001"+ - "^\u0001^\u0001^\u0001^\u0001^\u0001^\u0001^\u0001^\u0001_\u0001_\u0001"+ - "_\u0003_\u03e8\b_\u0001`\u0004`\u03eb\b`\u000b`\f`\u03ec\u0001a\u0001"+ - "a\u0001a\u0001a\u0001b\u0001b\u0001b\u0001b\u0001c\u0001c\u0001c\u0001"+ - "c\u0001d\u0001d\u0001d\u0001d\u0001e\u0001e\u0001e\u0001e\u0001f\u0001"+ - "f\u0001f\u0001f\u0001f\u0001g\u0001g\u0001g\u0001g\u0001h\u0001h\u0001"+ - "h\u0001h\u0001i\u0001i\u0001i\u0001i\u0001j\u0001j\u0001j\u0001j\u0001"+ - "k\u0001k\u0001k\u0001k\u0003k\u041c\bk\u0001l\u0001l\u0003l\u0420\bl\u0001"+ - "l\u0005l\u0423\bl\nl\fl\u0426\tl\u0001l\u0001l\u0003l\u042a\bl\u0001l"+ - "\u0004l\u042d\bl\u000bl\fl\u042e\u0003l\u0431\bl\u0001m\u0001m\u0004m"+ - "\u0435\bm\u000bm\fm\u0436\u0001n\u0001n\u0001n\u0001n\u0001o\u0001o\u0001"+ - "o\u0001o\u0001p\u0001p\u0001p\u0001p\u0001q\u0001q\u0001q\u0001q\u0001"+ - "q\u0001r\u0001r\u0001r\u0001r\u0001s\u0001s\u0001s\u0001s\u0001t\u0001"+ - "t\u0001t\u0001t\u0001u\u0001u\u0001u\u0001u\u0001v\u0001v\u0001v\u0001"+ - "v\u0001w\u0001w\u0001w\u0001x\u0001x\u0001x\u0001x\u0001y\u0001y\u0001"+ - "y\u0001y\u0001z\u0001z\u0001z\u0001z\u0001{\u0001{\u0001{\u0001{\u0001"+ - "|\u0001|\u0001|\u0001|\u0001|\u0001}\u0001}\u0001}\u0001}\u0001}\u0001"+ - "~\u0001~\u0001~\u0001~\u0001~\u0001\u007f\u0001\u007f\u0001\u007f\u0001"+ - "\u007f\u0001\u007f\u0001\u007f\u0001\u007f\u0001\u0080\u0001\u0080\u0001"+ - "\u0081\u0004\u0081\u048a\b\u0081\u000b\u0081\f\u0081\u048b\u0001\u0081"+ - "\u0001\u0081\u0003\u0081\u0490\b\u0081\u0001\u0081\u0004\u0081\u0493\b"+ - "\u0081\u000b\u0081\f\u0081\u0494\u0001\u0082\u0001\u0082\u0001\u0082\u0001"+ - "\u0082\u0001\u0083\u0001\u0083\u0001\u0083\u0001\u0083\u0001\u0084\u0001"+ - "\u0084\u0001\u0084\u0001\u0084\u0001\u0085\u0001\u0085\u0001\u0085\u0001"+ - "\u0085\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0086\u0001"+ - "\u0086\u0001\u0087\u0001\u0087\u0001\u0087\u0001\u0087\u0001\u0088\u0001"+ - "\u0088\u0001\u0088\u0001\u0088\u0001\u0089\u0001\u0089\u0001\u0089\u0001"+ - "\u0089\u0001\u008a\u0001\u008a\u0001\u008a\u0001\u008a\u0001\u008b\u0001"+ - "\u008b\u0001\u008b\u0001\u008b\u0001\u008c\u0001\u008c\u0001\u008c\u0001"+ - "\u008c\u0001\u008d\u0001\u008d\u0001\u008d\u0001\u008d\u0001\u008e\u0001"+ - "\u008e\u0001\u008e\u0001\u008e\u0001\u008f\u0001\u008f\u0001\u008f\u0001"+ - "\u008f\u0001\u0090\u0001\u0090\u0001\u0090\u0001\u0090\u0001\u0091\u0001"+ - "\u0091\u0001\u0091\u0001\u0091\u0001\u0092\u0001\u0092\u0001\u0092\u0001"+ - "\u0092\u0001\u0092\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0093\u0001"+ - "\u0094\u0001\u0094\u0001\u0094\u0001\u0094\u0001\u0095\u0001\u0095\u0001"+ - "\u0095\u0001\u0095\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0096\u0001"+ - "\u0097\u0001\u0097\u0001\u0097\u0001\u0097\u0001\u0098\u0001\u0098\u0001"+ - "\u0098\u0001\u0098\u0001\u0099\u0001\u0099\u0001\u0099\u0001\u0099\u0001"+ - "\u009a\u0001\u009a\u0001\u009a\u0001\u009a\u0001\u009b\u0001\u009b\u0001"+ - "\u009b\u0001\u009b\u0001\u009b\u0001\u009c\u0001\u009c\u0001\u009c\u0001"+ - "\u009c\u0001\u009c\u0001\u009d\u0001\u009d\u0001\u009d\u0001\u009d\u0001"+ - "\u009e\u0001\u009e\u0001\u009e\u0001\u009e\u0001\u009f\u0001\u009f\u0001"+ - "\u009f\u0001\u009f\u0001\u00a0\u0001\u00a0\u0001\u00a0\u0001\u00a0\u0001"+ - "\u00a0\u0001\u00a1\u0001\u00a1\u0001\u00a2\u0001\u00a2\u0001\u00a2\u0001"+ - "\u00a2\u0001\u00a2\u0004\u00a2\u0520\b\u00a2\u000b\u00a2\f\u00a2\u0521"+ - "\u0001\u00a3\u0001\u00a3\u0001\u00a3\u0001\u00a3\u0001\u00a4\u0001\u00a4"+ - "\u0001\u00a4\u0001\u00a4\u0001\u00a5\u0001\u00a5\u0001\u00a5\u0001\u00a5"+ - "\u0001\u00a6\u0001\u00a6\u0001\u00a6\u0001\u00a6\u0001\u00a6\u0001\u00a7"+ - "\u0001\u00a7\u0001\u00a7\u0001\u00a7\u0001\u00a8\u0001\u00a8\u0001\u00a8"+ - "\u0001\u00a8\u0001\u00a9\u0001\u00a9\u0001\u00a9\u0001\u00a9\u0001\u00aa"+ - "\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001\u00ab\u0001\u00ab"+ - "\u0001\u00ab\u0001\u00ab\u0001\u00ac\u0001\u00ac\u0001\u00ac\u0001\u00ac"+ - "\u0001\u00ad\u0001\u00ad\u0001\u00ad\u0001\u00ad\u0001\u00ae\u0001\u00ae"+ - "\u0001\u00ae\u0001\u00ae\u0001\u00af\u0001\u00af\u0001\u00af\u0001\u00af"+ - "\u0001\u00b0\u0001\u00b0\u0001\u00b0\u0001\u00b0\u0001\u00b0\u0001\u00b0"+ - "\u0001\u00b1\u0001\u00b1\u0001\u00b1\u0001\u00b1\u0001\u00b2\u0001\u00b2"+ - "\u0001\u00b2\u0001\u00b2\u0001\u00b3\u0001\u00b3\u0001\u00b3\u0001\u00b3"+ - "\u0001\u00b4\u0001\u00b4\u0001\u00b4\u0001\u00b4\u0001\u00b5\u0001\u00b5"+ - "\u0001\u00b5\u0001\u00b5\u0001\u00b6\u0001\u00b6\u0001\u00b6\u0001\u00b6"+ - "\u0001\u00b7\u0001\u00b7\u0001\u00b7\u0001\u00b7\u0001\u00b7\u0001\u00b8"+ - "\u0001\u00b8\u0001\u00b8\u0001\u00b8\u0001\u00b8\u0001\u00b8\u0001\u00b9"+ - "\u0001\u00b9\u0001\u00b9\u0001\u00b9\u0001\u00b9\u0001\u00b9\u0001\u00ba"+ - "\u0001\u00ba\u0001\u00ba\u0001\u00ba\u0001\u00bb\u0001\u00bb\u0001\u00bb"+ - "\u0001\u00bb\u0001\u00bc\u0001\u00bc\u0001\u00bc\u0001\u00bc\u0001\u00bd"+ - "\u0001\u00bd\u0001\u00bd\u0001\u00bd\u0001\u00bd\u0001\u00bd\u0001\u00be"+ - "\u0001\u00be\u0001\u00be\u0001\u00be\u0001\u00be\u0001\u00be\u0001\u00bf"+ - "\u0001\u00bf\u0001\u00bf\u0001\u00bf\u0001\u00c0\u0001\u00c0\u0001\u00c0"+ - "\u0001\u00c0\u0001\u00c1\u0001\u00c1\u0001\u00c1\u0001\u00c1\u0001\u00c2"+ - "\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001\u00c3"+ - "\u0001\u00c3\u0001\u00c3\u0001\u00c3\u0001\u00c3\u0001\u00c3\u0001\u00c4"+ - "\u0001\u00c4\u0001\u00c4\u0001\u00c4\u0001\u00c4\u0001\u00c4\u0001\u00c5"+ - "\u0001\u00c5\u0001\u00c5\u0001\u00c5\u0001\u00c5\u0002\u0268\u02ad\u0000"+ - "\u00c6\u000f\u0001\u0011\u0002\u0013\u0003\u0015\u0004\u0017\u0005\u0019"+ - "\u0006\u001b\u0007\u001d\b\u001f\t!\n#\u000b%\f\'\r)\u000e+\u000f-\u0010"+ - "/\u00111\u00123\u00135\u00147\u00159\u0016;\u0017=\u0018?\u0019A\u0000"+ - "C\u0000E\u0000G\u0000I\u0000K\u0000M\u0000O\u0000Q\u0000S\u0000U\u001a"+ - "W\u001bY\u001c[\u001d]\u001e_\u001fa c!e\"g#i$k%m&o\'q(s)u*w+y,{-}.\u007f"+ - "/\u00810\u00831\u00852\u00873\u00894\u008b5\u008d6\u008f7\u00918\u0093"+ - "9\u0095:\u0097;\u0099<\u009b=\u009d>\u009f?\u00a1\u0000\u00a3@\u00a5A"+ - "\u00a7B\u00a9C\u00ab\u0000\u00adD\u00afE\u00b1F\u00b3G\u00b5\u0000\u00b7"+ - "\u0000\u00b9H\u00bbI\u00bdJ\u00bf\u0000\u00c1\u0000\u00c3\u0000\u00c5"+ - "\u0000\u00c7\u0000\u00c9\u0000\u00cbK\u00cd\u0000\u00cfL\u00d1\u0000\u00d3"+ - "\u0000\u00d5M\u00d7N\u00d9O\u00db\u0000\u00dd\u0000\u00df\u0000\u00e1"+ - "\u0000\u00e3\u0000\u00e5\u0000\u00e7\u0000\u00e9P\u00ebQ\u00edR\u00ef"+ - "S\u00f1\u0000\u00f3\u0000\u00f5\u0000\u00f7\u0000\u00f9\u0000\u00fb\u0000"+ - "\u00fdT\u00ff\u0000\u0101U\u0103V\u0105W\u0107\u0000\u0109\u0000\u010b"+ - "X\u010dY\u010f\u0000\u0111Z\u0113\u0000\u0115[\u0117\\\u0119]\u011b\u0000"+ - "\u011d\u0000\u011f\u0000\u0121\u0000\u0123\u0000\u0125\u0000\u0127\u0000"+ - "\u0129\u0000\u012b\u0000\u012d^\u012f_\u0131`\u0133\u0000\u0135\u0000"+ - "\u0137\u0000\u0139\u0000\u013b\u0000\u013d\u0000\u013fa\u0141b\u0143c"+ - "\u0145\u0000\u0147d\u0149e\u014bf\u014dg\u014f\u0000\u0151h\u0153i\u0155"+ - "j\u0157k\u0159l\u015b\u0000\u015d\u0000\u015f\u0000\u0161\u0000\u0163"+ - "\u0000\u0165\u0000\u0167\u0000\u0169m\u016bn\u016do\u016f\u0000\u0171"+ - "\u0000\u0173\u0000\u0175\u0000\u0177p\u0179q\u017br\u017d\u0000\u017f"+ - "\u0000\u0181\u0000\u0183s\u0185t\u0187u\u0189\u0000\u018b\u0000\u018d"+ - "v\u018fw\u0191x\u0193\u0000\u0195\u0000\u0197\u0000\u0199\u0000\u000f"+ - "\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e"+ - "#\u0002\u0000DDdd\u0002\u0000IIii\u0002\u0000SSss\u0002\u0000EEee\u0002"+ - "\u0000CCcc\u0002\u0000TTtt\u0002\u0000RRrr\u0002\u0000OOoo\u0002\u0000"+ - "PPpp\u0002\u0000NNnn\u0002\u0000HHhh\u0002\u0000VVvv\u0002\u0000AAaa\u0002"+ - "\u0000LLll\u0002\u0000XXxx\u0002\u0000FFff\u0002\u0000MMmm\u0002\u0000"+ - "GGgg\u0002\u0000KKkk\u0002\u0000WWww\u0002\u0000UUuu\u0006\u0000\t\n\r"+ - "\r //[[]]\u0002\u0000\n\n\r\r\u0003\u0000\t\n\r\r \u0001\u000009\u0002"+ - "\u0000AZaz\b\u0000\"\"NNRRTT\\\\nnrrtt\u0004\u0000\n\n\r\r\"\"\\\\\u0002"+ - "\u0000++--\u0001\u0000``\u0002\u0000BBbb\u0002\u0000YYyy\u000b\u0000\t"+ - "\n\r\r \"\",,//::==[[]]||\u0002\u0000**//\u000b\u0000\t\n\r\r \"#,,"+ - "//::<<>?\\\\||\u05df\u0000\u000f\u0001\u0000\u0000\u0000\u0000\u0011\u0001"+ - "\u0000\u0000\u0000\u0000\u0013\u0001\u0000\u0000\u0000\u0000\u0015\u0001"+ - "\u0000\u0000\u0000\u0000\u0017\u0001\u0000\u0000\u0000\u0000\u0019\u0001"+ - "\u0000\u0000\u0000\u0000\u001b\u0001\u0000\u0000\u0000\u0000\u001d\u0001"+ - "\u0000\u0000\u0000\u0000\u001f\u0001\u0000\u0000\u0000\u0000!\u0001\u0000"+ - "\u0000\u0000\u0000#\u0001\u0000\u0000\u0000\u0000%\u0001\u0000\u0000\u0000"+ - "\u0000\'\u0001\u0000\u0000\u0000\u0000)\u0001\u0000\u0000\u0000\u0000"+ - "+\u0001\u0000\u0000\u0000\u0000-\u0001\u0000\u0000\u0000\u0000/\u0001"+ - "\u0000\u0000\u0000\u00001\u0001\u0000\u0000\u0000\u00003\u0001\u0000\u0000"+ - "\u0000\u00005\u0001\u0000\u0000\u0000\u00007\u0001\u0000\u0000\u0000\u0000"+ - "9\u0001\u0000\u0000\u0000\u0000;\u0001\u0000\u0000\u0000\u0000=\u0001"+ - "\u0000\u0000\u0000\u0001?\u0001\u0000\u0000\u0000\u0001U\u0001\u0000\u0000"+ - "\u0000\u0001W\u0001\u0000\u0000\u0000\u0001Y\u0001\u0000\u0000\u0000\u0001"+ - "[\u0001\u0000\u0000\u0000\u0001]\u0001\u0000\u0000\u0000\u0001_\u0001"+ - "\u0000\u0000\u0000\u0001a\u0001\u0000\u0000\u0000\u0001c\u0001\u0000\u0000"+ - "\u0000\u0001e\u0001\u0000\u0000\u0000\u0001g\u0001\u0000\u0000\u0000\u0001"+ - "i\u0001\u0000\u0000\u0000\u0001k\u0001\u0000\u0000\u0000\u0001m\u0001"+ - "\u0000\u0000\u0000\u0001o\u0001\u0000\u0000\u0000\u0001q\u0001\u0000\u0000"+ - "\u0000\u0001s\u0001\u0000\u0000\u0000\u0001u\u0001\u0000\u0000\u0000\u0001"+ - "w\u0001\u0000\u0000\u0000\u0001y\u0001\u0000\u0000\u0000\u0001{\u0001"+ - "\u0000\u0000\u0000\u0001}\u0001\u0000\u0000\u0000\u0001\u007f\u0001\u0000"+ - "\u0000\u0000\u0001\u0081\u0001\u0000\u0000\u0000\u0001\u0083\u0001\u0000"+ - "\u0000\u0000\u0001\u0085\u0001\u0000\u0000\u0000\u0001\u0087\u0001\u0000"+ - "\u0000\u0000\u0001\u0089\u0001\u0000\u0000\u0000\u0001\u008b\u0001\u0000"+ - "\u0000\u0000\u0001\u008d\u0001\u0000\u0000\u0000\u0001\u008f\u0001\u0000"+ - "\u0000\u0000\u0001\u0091\u0001\u0000\u0000\u0000\u0001\u0093\u0001\u0000"+ - "\u0000\u0000\u0001\u0095\u0001\u0000\u0000\u0000\u0001\u0097\u0001\u0000"+ - "\u0000\u0000\u0001\u0099\u0001\u0000\u0000\u0000\u0001\u009b\u0001\u0000"+ - "\u0000\u0000\u0001\u009d\u0001\u0000\u0000\u0000\u0001\u009f\u0001\u0000"+ - "\u0000\u0000\u0001\u00a1\u0001\u0000\u0000\u0000\u0001\u00a3\u0001\u0000"+ - "\u0000\u0000\u0001\u00a5\u0001\u0000\u0000\u0000\u0001\u00a7\u0001\u0000"+ - "\u0000\u0000\u0001\u00a9\u0001\u0000\u0000\u0000\u0001\u00ad\u0001\u0000"+ - "\u0000\u0000\u0001\u00af\u0001\u0000\u0000\u0000\u0001\u00b1\u0001\u0000"+ - "\u0000\u0000\u0001\u00b3\u0001\u0000\u0000\u0000\u0002\u00b5\u0001\u0000"+ - "\u0000\u0000\u0002\u00b7\u0001\u0000\u0000\u0000\u0002\u00b9\u0001\u0000"+ - "\u0000\u0000\u0002\u00bb\u0001\u0000\u0000\u0000\u0002\u00bd\u0001\u0000"+ - "\u0000\u0000\u0003\u00bf\u0001\u0000\u0000\u0000\u0003\u00c1\u0001\u0000"+ - "\u0000\u0000\u0003\u00c3\u0001\u0000\u0000\u0000\u0003\u00c5\u0001\u0000"+ - "\u0000\u0000\u0003\u00c7\u0001\u0000\u0000\u0000\u0003\u00c9\u0001\u0000"+ - "\u0000\u0000\u0003\u00cb\u0001\u0000\u0000\u0000\u0003\u00cf\u0001\u0000"+ - "\u0000\u0000\u0003\u00d1\u0001\u0000\u0000\u0000\u0003\u00d3\u0001\u0000"+ - "\u0000\u0000\u0003\u00d5\u0001\u0000\u0000\u0000\u0003\u00d7\u0001\u0000"+ - "\u0000\u0000\u0003\u00d9\u0001\u0000\u0000\u0000\u0004\u00db\u0001\u0000"+ - "\u0000\u0000\u0004\u00dd\u0001\u0000\u0000\u0000\u0004\u00df\u0001\u0000"+ - "\u0000\u0000\u0004\u00e1\u0001\u0000\u0000\u0000\u0004\u00e3\u0001\u0000"+ - "\u0000\u0000\u0004\u00e9\u0001\u0000\u0000\u0000\u0004\u00eb\u0001\u0000"+ - "\u0000\u0000\u0004\u00ed\u0001\u0000\u0000\u0000\u0004\u00ef\u0001\u0000"+ - "\u0000\u0000\u0005\u00f1\u0001\u0000\u0000\u0000\u0005\u00f3\u0001\u0000"+ - "\u0000\u0000\u0005\u00f5\u0001\u0000\u0000\u0000\u0005\u00f7\u0001\u0000"+ - "\u0000\u0000\u0005\u00f9\u0001\u0000\u0000\u0000\u0005\u00fb\u0001\u0000"+ - "\u0000\u0000\u0005\u00fd\u0001\u0000\u0000\u0000\u0005\u00ff\u0001\u0000"+ - "\u0000\u0000\u0005\u0101\u0001\u0000\u0000\u0000\u0005\u0103\u0001\u0000"+ - "\u0000\u0000\u0005\u0105\u0001\u0000\u0000\u0000\u0006\u0107\u0001\u0000"+ - "\u0000\u0000\u0006\u0109\u0001\u0000\u0000\u0000\u0006\u010b\u0001\u0000"+ - "\u0000\u0000\u0006\u010d\u0001\u0000\u0000\u0000\u0006\u0111\u0001\u0000"+ - "\u0000\u0000\u0006\u0113\u0001\u0000\u0000\u0000\u0006\u0115\u0001\u0000"+ - "\u0000\u0000\u0006\u0117\u0001\u0000\u0000\u0000\u0006\u0119\u0001\u0000"+ - "\u0000\u0000\u0007\u011b\u0001\u0000\u0000\u0000\u0007\u011d\u0001\u0000"+ - "\u0000\u0000\u0007\u011f\u0001\u0000\u0000\u0000\u0007\u0121\u0001\u0000"+ - "\u0000\u0000\u0007\u0123\u0001\u0000\u0000\u0000\u0007\u0125\u0001\u0000"+ - "\u0000\u0000\u0007\u0127\u0001\u0000\u0000\u0000\u0007\u0129\u0001\u0000"+ - "\u0000\u0000\u0007\u012b\u0001\u0000\u0000\u0000\u0007\u012d\u0001\u0000"+ - "\u0000\u0000\u0007\u012f\u0001\u0000\u0000\u0000\u0007\u0131\u0001\u0000"+ - "\u0000\u0000\b\u0133\u0001\u0000\u0000\u0000\b\u0135\u0001\u0000\u0000"+ - "\u0000\b\u0137\u0001\u0000\u0000\u0000\b\u0139\u0001\u0000\u0000\u0000"+ - "\b\u013b\u0001\u0000\u0000\u0000\b\u013d\u0001\u0000\u0000\u0000\b\u013f"+ - "\u0001\u0000\u0000\u0000\b\u0141\u0001\u0000\u0000\u0000\b\u0143\u0001"+ - "\u0000\u0000\u0000\t\u0145\u0001\u0000\u0000\u0000\t\u0147\u0001\u0000"+ - "\u0000\u0000\t\u0149\u0001\u0000\u0000\u0000\t\u014b\u0001\u0000\u0000"+ - "\u0000\t\u014d\u0001\u0000\u0000\u0000\n\u014f\u0001\u0000\u0000\u0000"+ - "\n\u0151\u0001\u0000\u0000\u0000\n\u0153\u0001\u0000\u0000\u0000\n\u0155"+ - "\u0001\u0000\u0000\u0000\n\u0157\u0001\u0000\u0000\u0000\n\u0159\u0001"+ - "\u0000\u0000\u0000\u000b\u015b\u0001\u0000\u0000\u0000\u000b\u015d\u0001"+ - "\u0000\u0000\u0000\u000b\u015f\u0001\u0000\u0000\u0000\u000b\u0161\u0001"+ - "\u0000\u0000\u0000\u000b\u0163\u0001\u0000\u0000\u0000\u000b\u0165\u0001"+ - "\u0000\u0000\u0000\u000b\u0167\u0001\u0000\u0000\u0000\u000b\u0169\u0001"+ - "\u0000\u0000\u0000\u000b\u016b\u0001\u0000\u0000\u0000\u000b\u016d\u0001"+ - "\u0000\u0000\u0000\f\u016f\u0001\u0000\u0000\u0000\f\u0171\u0001\u0000"+ - "\u0000\u0000\f\u0173\u0001\u0000\u0000\u0000\f\u0175\u0001\u0000\u0000"+ - "\u0000\f\u0177\u0001\u0000\u0000\u0000\f\u0179\u0001\u0000\u0000\u0000"+ - "\f\u017b\u0001\u0000\u0000\u0000\r\u017d\u0001\u0000\u0000\u0000\r\u017f"+ - "\u0001\u0000\u0000\u0000\r\u0181\u0001\u0000\u0000\u0000\r\u0183\u0001"+ - "\u0000\u0000\u0000\r\u0185\u0001\u0000\u0000\u0000\r\u0187\u0001\u0000"+ - "\u0000\u0000\u000e\u0189\u0001\u0000\u0000\u0000\u000e\u018b\u0001\u0000"+ - "\u0000\u0000\u000e\u018d\u0001\u0000\u0000\u0000\u000e\u018f\u0001\u0000"+ - "\u0000\u0000\u000e\u0191\u0001\u0000\u0000\u0000\u000e\u0193\u0001\u0000"+ - "\u0000\u0000\u000e\u0195\u0001\u0000\u0000\u0000\u000e\u0197\u0001\u0000"+ - "\u0000\u0000\u000e\u0199\u0001\u0000\u0000\u0000\u000f\u019b\u0001\u0000"+ - "\u0000\u0000\u0011\u01a5\u0001\u0000\u0000\u0000\u0013\u01ac\u0001\u0000"+ - "\u0000\u0000\u0015\u01b5\u0001\u0000\u0000\u0000\u0017\u01bc\u0001\u0000"+ - "\u0000\u0000\u0019\u01c6\u0001\u0000\u0000\u0000\u001b\u01cd\u0001\u0000"+ - "\u0000\u0000\u001d\u01d4\u0001\u0000\u0000\u0000\u001f\u01db\u0001\u0000"+ - "\u0000\u0000!\u01e3\u0001\u0000\u0000\u0000#\u01ef\u0001\u0000\u0000\u0000"+ - "%\u01f8\u0001\u0000\u0000\u0000\'\u01fe\u0001\u0000\u0000\u0000)\u0205"+ - "\u0001\u0000\u0000\u0000+\u020c\u0001\u0000\u0000\u0000-\u0214\u0001\u0000"+ - "\u0000\u0000/\u021c\u0001\u0000\u0000\u00001\u022b\u0001\u0000\u0000\u0000"+ - "3\u0235\u0001\u0000\u0000\u00005\u023e\u0001\u0000\u0000\u00007\u024a"+ - "\u0001\u0000\u0000\u00009\u0250\u0001\u0000\u0000\u0000;\u0261\u0001\u0000"+ - "\u0000\u0000=\u0271\u0001\u0000\u0000\u0000?\u0277\u0001\u0000\u0000\u0000"+ - "A\u027b\u0001\u0000\u0000\u0000C\u027d\u0001\u0000\u0000\u0000E\u027f"+ - "\u0001\u0000\u0000\u0000G\u0282\u0001\u0000\u0000\u0000I\u0284\u0001\u0000"+ - "\u0000\u0000K\u028d\u0001\u0000\u0000\u0000M\u028f\u0001\u0000\u0000\u0000"+ - "O\u0294\u0001\u0000\u0000\u0000Q\u0296\u0001\u0000\u0000\u0000S\u029b"+ - "\u0001\u0000\u0000\u0000U\u02ba\u0001\u0000\u0000\u0000W\u02bd\u0001\u0000"+ - "\u0000\u0000Y\u02eb\u0001\u0000\u0000\u0000[\u02ed\u0001\u0000\u0000\u0000"+ - "]\u02f0\u0001\u0000\u0000\u0000_\u02f4\u0001\u0000\u0000\u0000a\u02f8"+ - "\u0001\u0000\u0000\u0000c\u02fa\u0001\u0000\u0000\u0000e\u02fd\u0001\u0000"+ - "\u0000\u0000g\u02ff\u0001\u0000\u0000\u0000i\u0304\u0001\u0000\u0000\u0000"+ - "k\u0306\u0001\u0000\u0000\u0000m\u030c\u0001\u0000\u0000\u0000o\u0312"+ - "\u0001\u0000\u0000\u0000q\u0315\u0001\u0000\u0000\u0000s\u0318\u0001\u0000"+ - "\u0000\u0000u\u031d\u0001\u0000\u0000\u0000w\u0322\u0001\u0000\u0000\u0000"+ - "y\u0324\u0001\u0000\u0000\u0000{\u0328\u0001\u0000\u0000\u0000}\u032d"+ - "\u0001\u0000\u0000\u0000\u007f\u0333\u0001\u0000\u0000\u0000\u0081\u0336"+ - "\u0001\u0000\u0000\u0000\u0083\u0338\u0001\u0000\u0000\u0000\u0085\u033e"+ - "\u0001\u0000\u0000\u0000\u0087\u0340\u0001\u0000\u0000\u0000\u0089\u0345"+ - "\u0001\u0000\u0000\u0000\u008b\u0348\u0001\u0000\u0000\u0000\u008d\u034b"+ - "\u0001\u0000\u0000\u0000\u008f\u034e\u0001\u0000\u0000\u0000\u0091\u0350"+ - "\u0001\u0000\u0000\u0000\u0093\u0353\u0001\u0000\u0000\u0000\u0095\u0355"+ - "\u0001\u0000\u0000\u0000\u0097\u0358\u0001\u0000\u0000\u0000\u0099\u035a"+ - "\u0001\u0000\u0000\u0000\u009b\u035c\u0001\u0000\u0000\u0000\u009d\u035e"+ - "\u0001\u0000\u0000\u0000\u009f\u0360\u0001\u0000\u0000\u0000\u00a1\u0362"+ - "\u0001\u0000\u0000\u0000\u00a3\u0378\u0001\u0000\u0000\u0000\u00a5\u037a"+ - "\u0001\u0000\u0000\u0000\u00a7\u037f\u0001\u0000\u0000\u0000\u00a9\u0394"+ - "\u0001\u0000\u0000\u0000\u00ab\u0396\u0001\u0000\u0000\u0000\u00ad\u039e"+ - "\u0001\u0000\u0000\u0000\u00af\u03a0\u0001\u0000\u0000\u0000\u00b1\u03a4"+ - "\u0001\u0000\u0000\u0000\u00b3\u03a8\u0001\u0000\u0000\u0000\u00b5\u03ac"+ - "\u0001\u0000\u0000\u0000\u00b7\u03b1\u0001\u0000\u0000\u0000\u00b9\u03b6"+ - "\u0001\u0000\u0000\u0000\u00bb\u03ba\u0001\u0000\u0000\u0000\u00bd\u03be"+ - "\u0001\u0000\u0000\u0000\u00bf\u03c2\u0001\u0000\u0000\u0000\u00c1\u03c7"+ - "\u0001\u0000\u0000\u0000\u00c3\u03cb\u0001\u0000\u0000\u0000\u00c5\u03cf"+ - "\u0001\u0000\u0000\u0000\u00c7\u03d3\u0001\u0000\u0000\u0000\u00c9\u03d7"+ - "\u0001\u0000\u0000\u0000\u00cb\u03db\u0001\u0000\u0000\u0000\u00cd\u03e7"+ - "\u0001\u0000\u0000\u0000\u00cf\u03ea\u0001\u0000\u0000\u0000\u00d1\u03ee"+ - "\u0001\u0000\u0000\u0000\u00d3\u03f2\u0001\u0000\u0000\u0000\u00d5\u03f6"+ - "\u0001\u0000\u0000\u0000\u00d7\u03fa\u0001\u0000\u0000\u0000\u00d9\u03fe"+ - "\u0001\u0000\u0000\u0000\u00db\u0402\u0001\u0000\u0000\u0000\u00dd\u0407"+ - "\u0001\u0000\u0000\u0000\u00df\u040b\u0001\u0000\u0000\u0000\u00e1\u040f"+ - "\u0001\u0000\u0000\u0000\u00e3\u0413\u0001\u0000\u0000\u0000\u00e5\u041b"+ - "\u0001\u0000\u0000\u0000\u00e7\u0430\u0001\u0000\u0000\u0000\u00e9\u0434"+ - "\u0001\u0000\u0000\u0000\u00eb\u0438\u0001\u0000\u0000\u0000\u00ed\u043c"+ - "\u0001\u0000\u0000\u0000\u00ef\u0440\u0001\u0000\u0000\u0000\u00f1\u0444"+ - "\u0001\u0000\u0000\u0000\u00f3\u0449\u0001\u0000\u0000\u0000\u00f5\u044d"+ - "\u0001\u0000\u0000\u0000\u00f7\u0451\u0001\u0000\u0000\u0000\u00f9\u0455"+ - "\u0001\u0000\u0000\u0000\u00fb\u0459\u0001\u0000\u0000\u0000\u00fd\u045d"+ - "\u0001\u0000\u0000\u0000\u00ff\u0460\u0001\u0000\u0000\u0000\u0101\u0464"+ - "\u0001\u0000\u0000\u0000\u0103\u0468\u0001\u0000\u0000\u0000\u0105\u046c"+ - "\u0001\u0000\u0000\u0000\u0107\u0470\u0001\u0000\u0000\u0000\u0109\u0475"+ - "\u0001\u0000\u0000\u0000\u010b\u047a\u0001\u0000\u0000\u0000\u010d\u047f"+ - "\u0001\u0000\u0000\u0000\u010f\u0486\u0001\u0000\u0000\u0000\u0111\u048f"+ - "\u0001\u0000\u0000\u0000\u0113\u0496\u0001\u0000\u0000\u0000\u0115\u049a"+ - "\u0001\u0000\u0000\u0000\u0117\u049e\u0001\u0000\u0000\u0000\u0119\u04a2"+ - "\u0001\u0000\u0000\u0000\u011b\u04a6\u0001\u0000\u0000\u0000\u011d\u04ac"+ - "\u0001\u0000\u0000\u0000\u011f\u04b0\u0001\u0000\u0000\u0000\u0121\u04b4"+ - "\u0001\u0000\u0000\u0000\u0123\u04b8\u0001\u0000\u0000\u0000\u0125\u04bc"+ - "\u0001\u0000\u0000\u0000\u0127\u04c0\u0001\u0000\u0000\u0000\u0129\u04c4"+ - "\u0001\u0000\u0000\u0000\u012b\u04c8\u0001\u0000\u0000\u0000\u012d\u04cc"+ - "\u0001\u0000\u0000\u0000\u012f\u04d0\u0001\u0000\u0000\u0000\u0131\u04d4"+ - "\u0001\u0000\u0000\u0000\u0133\u04d8\u0001\u0000\u0000\u0000\u0135\u04dd"+ - "\u0001\u0000\u0000\u0000\u0137\u04e1\u0001\u0000\u0000\u0000\u0139\u04e5"+ - "\u0001\u0000\u0000\u0000\u013b\u04e9\u0001\u0000\u0000\u0000\u013d\u04ed"+ - "\u0001\u0000\u0000\u0000\u013f\u04f1\u0001\u0000\u0000\u0000\u0141\u04f5"+ - "\u0001\u0000\u0000\u0000\u0143\u04f9\u0001\u0000\u0000\u0000\u0145\u04fd"+ - "\u0001\u0000\u0000\u0000\u0147\u0502\u0001\u0000\u0000\u0000\u0149\u0507"+ - "\u0001\u0000\u0000\u0000\u014b\u050b\u0001\u0000\u0000\u0000\u014d\u050f"+ - "\u0001\u0000\u0000\u0000\u014f\u0513\u0001\u0000\u0000\u0000\u0151\u0518"+ - "\u0001\u0000\u0000\u0000\u0153\u051f\u0001\u0000\u0000\u0000\u0155\u0523"+ - "\u0001\u0000\u0000\u0000\u0157\u0527\u0001\u0000\u0000\u0000\u0159\u052b"+ - "\u0001\u0000\u0000\u0000\u015b\u052f\u0001\u0000\u0000\u0000\u015d\u0534"+ - "\u0001\u0000\u0000\u0000\u015f\u0538\u0001\u0000\u0000\u0000\u0161\u053c"+ - "\u0001\u0000\u0000\u0000\u0163\u0540\u0001\u0000\u0000\u0000\u0165\u0545"+ - "\u0001\u0000\u0000\u0000\u0167\u0549\u0001\u0000\u0000\u0000\u0169\u054d"+ - "\u0001\u0000\u0000\u0000\u016b\u0551\u0001\u0000\u0000\u0000\u016d\u0555"+ - "\u0001\u0000\u0000\u0000\u016f\u0559\u0001\u0000\u0000\u0000\u0171\u055f"+ - "\u0001\u0000\u0000\u0000\u0173\u0563\u0001\u0000\u0000\u0000\u0175\u0567"+ - "\u0001\u0000\u0000\u0000\u0177\u056b\u0001\u0000\u0000\u0000\u0179\u056f"+ - "\u0001\u0000\u0000\u0000\u017b\u0573\u0001\u0000\u0000\u0000\u017d\u0577"+ - "\u0001\u0000\u0000\u0000\u017f\u057c\u0001\u0000\u0000\u0000\u0181\u0582"+ - "\u0001\u0000\u0000\u0000\u0183\u0588\u0001\u0000\u0000\u0000\u0185\u058c"+ - "\u0001\u0000\u0000\u0000\u0187\u0590\u0001\u0000\u0000\u0000\u0189\u0594"+ - "\u0001\u0000\u0000\u0000\u018b\u059a\u0001\u0000\u0000\u0000\u018d\u05a0"+ - "\u0001\u0000\u0000\u0000\u018f\u05a4\u0001\u0000\u0000\u0000\u0191\u05a8"+ - "\u0001\u0000\u0000\u0000\u0193\u05ac\u0001\u0000\u0000\u0000\u0195\u05b2"+ - "\u0001\u0000\u0000\u0000\u0197\u05b8\u0001\u0000\u0000\u0000\u0199\u05be"+ - "\u0001\u0000\u0000\u0000\u019b\u019c\u0007\u0000\u0000\u0000\u019c\u019d"+ - "\u0007\u0001\u0000\u0000\u019d\u019e\u0007\u0002\u0000\u0000\u019e\u019f"+ - "\u0007\u0002\u0000\u0000\u019f\u01a0\u0007\u0003\u0000\u0000\u01a0\u01a1"+ - "\u0007\u0004\u0000\u0000\u01a1\u01a2\u0007\u0005\u0000\u0000\u01a2\u01a3"+ - "\u0001\u0000\u0000\u0000\u01a3\u01a4\u0006\u0000\u0000\u0000\u01a4\u0010"+ - "\u0001\u0000\u0000\u0000\u01a5\u01a6\u0007\u0000\u0000\u0000\u01a6\u01a7"+ - "\u0007\u0006\u0000\u0000\u01a7\u01a8\u0007\u0007\u0000\u0000\u01a8\u01a9"+ - "\u0007\b\u0000\u0000\u01a9\u01aa\u0001\u0000\u0000\u0000\u01aa\u01ab\u0006"+ - "\u0001\u0001\u0000\u01ab\u0012\u0001\u0000\u0000\u0000\u01ac\u01ad\u0007"+ - "\u0003\u0000\u0000\u01ad\u01ae\u0007\t\u0000\u0000\u01ae\u01af\u0007\u0006"+ - "\u0000\u0000\u01af\u01b0\u0007\u0001\u0000\u0000\u01b0\u01b1\u0007\u0004"+ - "\u0000\u0000\u01b1\u01b2\u0007\n\u0000\u0000\u01b2\u01b3\u0001\u0000\u0000"+ - "\u0000\u01b3\u01b4\u0006\u0002\u0002\u0000\u01b4\u0014\u0001\u0000\u0000"+ - "\u0000\u01b5\u01b6\u0007\u0003\u0000\u0000\u01b6\u01b7\u0007\u000b\u0000"+ - "\u0000\u01b7\u01b8\u0007\f\u0000\u0000\u01b8\u01b9\u0007\r\u0000\u0000"+ - "\u01b9\u01ba\u0001\u0000\u0000\u0000\u01ba\u01bb\u0006\u0003\u0000\u0000"+ - "\u01bb\u0016\u0001\u0000\u0000\u0000\u01bc\u01bd\u0007\u0003\u0000\u0000"+ - "\u01bd\u01be\u0007\u000e\u0000\u0000\u01be\u01bf\u0007\b\u0000\u0000\u01bf"+ - "\u01c0\u0007\r\u0000\u0000\u01c0\u01c1\u0007\f\u0000\u0000\u01c1\u01c2"+ - "\u0007\u0001\u0000\u0000\u01c2\u01c3\u0007\t\u0000\u0000\u01c3\u01c4\u0001"+ - "\u0000\u0000\u0000\u01c4\u01c5\u0006\u0004\u0003\u0000\u01c5\u0018\u0001"+ - "\u0000\u0000\u0000\u01c6\u01c7\u0007\u000f\u0000\u0000\u01c7\u01c8\u0007"+ - "\u0006\u0000\u0000\u01c8\u01c9\u0007\u0007\u0000\u0000\u01c9\u01ca\u0007"+ - "\u0010\u0000\u0000\u01ca\u01cb\u0001\u0000\u0000\u0000\u01cb\u01cc\u0006"+ - "\u0005\u0004\u0000\u01cc\u001a\u0001\u0000\u0000\u0000\u01cd\u01ce\u0007"+ - "\u0011\u0000\u0000\u01ce\u01cf\u0007\u0006\u0000\u0000\u01cf\u01d0\u0007"+ - "\u0007\u0000\u0000\u01d0\u01d1\u0007\u0012\u0000\u0000\u01d1\u01d2\u0001"+ - "\u0000\u0000\u0000\u01d2\u01d3\u0006\u0006\u0000\u0000\u01d3\u001c\u0001"+ - "\u0000\u0000\u0000\u01d4\u01d5\u0007\u0012\u0000\u0000\u01d5\u01d6\u0007"+ - "\u0003\u0000\u0000\u01d6\u01d7\u0007\u0003\u0000\u0000\u01d7\u01d8\u0007"+ - "\b\u0000\u0000\u01d8\u01d9\u0001\u0000\u0000\u0000\u01d9\u01da\u0006\u0007"+ - "\u0001\u0000\u01da\u001e\u0001\u0000\u0000\u0000\u01db\u01dc\u0007\r\u0000"+ - "\u0000\u01dc\u01dd\u0007\u0001\u0000\u0000\u01dd\u01de\u0007\u0010\u0000"+ - "\u0000\u01de\u01df\u0007\u0001\u0000\u0000\u01df\u01e0\u0007\u0005\u0000"+ - "\u0000\u01e0\u01e1\u0001\u0000\u0000\u0000\u01e1\u01e2\u0006\b\u0000\u0000"+ - "\u01e2 \u0001\u0000\u0000\u0000\u01e3\u01e4\u0007\u0010\u0000\u0000\u01e4"+ - "\u01e5\u0007\u000b\u0000\u0000\u01e5\u01e6\u0005_\u0000\u0000\u01e6\u01e7"+ - "\u0007\u0003\u0000\u0000\u01e7\u01e8\u0007\u000e\u0000\u0000\u01e8\u01e9"+ - "\u0007\b\u0000\u0000\u01e9\u01ea\u0007\f\u0000\u0000\u01ea\u01eb\u0007"+ - "\t\u0000\u0000\u01eb\u01ec\u0007\u0000\u0000\u0000\u01ec\u01ed\u0001\u0000"+ - "\u0000\u0000\u01ed\u01ee\u0006\t\u0005\u0000\u01ee\"\u0001\u0000\u0000"+ - "\u0000\u01ef\u01f0\u0007\u0006\u0000\u0000\u01f0\u01f1\u0007\u0003\u0000"+ - "\u0000\u01f1\u01f2\u0007\t\u0000\u0000\u01f2\u01f3\u0007\f\u0000\u0000"+ - "\u01f3\u01f4\u0007\u0010\u0000\u0000\u01f4\u01f5\u0007\u0003\u0000\u0000"+ - "\u01f5\u01f6\u0001\u0000\u0000\u0000\u01f6\u01f7\u0006\n\u0006\u0000\u01f7"+ - "$\u0001\u0000\u0000\u0000\u01f8\u01f9\u0007\u0006\u0000\u0000\u01f9\u01fa"+ - "\u0007\u0007\u0000\u0000\u01fa\u01fb\u0007\u0013\u0000\u0000\u01fb\u01fc"+ - "\u0001\u0000\u0000\u0000\u01fc\u01fd\u0006\u000b\u0000\u0000\u01fd&\u0001"+ - "\u0000\u0000\u0000\u01fe\u01ff\u0007\u0002\u0000\u0000\u01ff\u0200\u0007"+ - "\n\u0000\u0000\u0200\u0201\u0007\u0007\u0000\u0000\u0201\u0202\u0007\u0013"+ - "\u0000\u0000\u0202\u0203\u0001\u0000\u0000\u0000\u0203\u0204\u0006\f\u0007"+ - "\u0000\u0204(\u0001\u0000\u0000\u0000\u0205\u0206\u0007\u0002\u0000\u0000"+ - "\u0206\u0207\u0007\u0007\u0000\u0000\u0207\u0208\u0007\u0006\u0000\u0000"+ - "\u0208\u0209\u0007\u0005\u0000\u0000\u0209\u020a\u0001\u0000\u0000\u0000"+ - "\u020a\u020b\u0006\r\u0000\u0000\u020b*\u0001\u0000\u0000\u0000\u020c"+ - "\u020d\u0007\u0002\u0000\u0000\u020d\u020e\u0007\u0005\u0000\u0000\u020e"+ - "\u020f\u0007\f\u0000\u0000\u020f\u0210\u0007\u0005\u0000\u0000\u0210\u0211"+ - "\u0007\u0002\u0000\u0000\u0211\u0212\u0001\u0000\u0000\u0000\u0212\u0213"+ - "\u0006\u000e\u0000\u0000\u0213,\u0001\u0000\u0000\u0000\u0214\u0215\u0007"+ - "\u0013\u0000\u0000\u0215\u0216\u0007\n\u0000\u0000\u0216\u0217\u0007\u0003"+ - "\u0000\u0000\u0217\u0218\u0007\u0006\u0000\u0000\u0218\u0219\u0007\u0003"+ - "\u0000\u0000\u0219\u021a\u0001\u0000\u0000\u0000\u021a\u021b\u0006\u000f"+ - "\u0000\u0000\u021b.\u0001\u0000\u0000\u0000\u021c\u021d\u0004\u0010\u0000"+ - "\u0000\u021d\u021e\u0007\u0001\u0000\u0000\u021e\u021f\u0007\t\u0000\u0000"+ - "\u021f\u0220\u0007\r\u0000\u0000\u0220\u0221\u0007\u0001\u0000\u0000\u0221"+ - "\u0222\u0007\t\u0000\u0000\u0222\u0223\u0007\u0003\u0000\u0000\u0223\u0224"+ - "\u0007\u0002\u0000\u0000\u0224\u0225\u0007\u0005\u0000\u0000\u0225\u0226"+ - "\u0007\f\u0000\u0000\u0226\u0227\u0007\u0005\u0000\u0000\u0227\u0228\u0007"+ - "\u0002\u0000\u0000\u0228\u0229\u0001\u0000\u0000\u0000\u0229\u022a\u0006"+ - "\u0010\u0000\u0000\u022a0\u0001\u0000\u0000\u0000\u022b\u022c\u0004\u0011"+ - "\u0001\u0000\u022c\u022d\u0007\r\u0000\u0000\u022d\u022e\u0007\u0007\u0000"+ - "\u0000\u022e\u022f\u0007\u0007\u0000\u0000\u022f\u0230\u0007\u0012\u0000"+ - "\u0000\u0230\u0231\u0007\u0014\u0000\u0000\u0231\u0232\u0007\b\u0000\u0000"+ - "\u0232\u0233\u0001\u0000\u0000\u0000\u0233\u0234\u0006\u0011\b\u0000\u0234"+ - "2\u0001\u0000\u0000\u0000\u0235\u0236\u0004\u0012\u0002\u0000\u0236\u0237"+ - "\u0007\u0010\u0000\u0000\u0237\u0238\u0007\f\u0000\u0000\u0238\u0239\u0007"+ - "\u0005\u0000\u0000\u0239\u023a\u0007\u0004\u0000\u0000\u023a\u023b\u0007"+ - "\n\u0000\u0000\u023b\u023c\u0001\u0000\u0000\u0000\u023c\u023d\u0006\u0012"+ - "\u0000\u0000\u023d4\u0001\u0000\u0000\u0000\u023e\u023f\u0004\u0013\u0003"+ - "\u0000\u023f\u0240\u0007\u0010\u0000\u0000\u0240\u0241\u0007\u0003\u0000"+ - "\u0000\u0241\u0242\u0007\u0005\u0000\u0000\u0242\u0243\u0007\u0006\u0000"+ - "\u0000\u0243\u0244\u0007\u0001\u0000\u0000\u0244\u0245\u0007\u0004\u0000"+ - "\u0000\u0245\u0246\u0007\u0002\u0000\u0000\u0246\u0247\u0001\u0000\u0000"+ - "\u0000\u0247\u0248\u0006\u0013\t\u0000\u02486\u0001\u0000\u0000\u0000"+ - "\u0249\u024b\b\u0015\u0000\u0000\u024a\u0249\u0001\u0000\u0000\u0000\u024b"+ - "\u024c\u0001\u0000\u0000\u0000\u024c\u024a\u0001\u0000\u0000\u0000\u024c"+ - "\u024d\u0001\u0000\u0000\u0000\u024d\u024e\u0001\u0000\u0000\u0000\u024e"+ - "\u024f\u0006\u0014\u0000\u0000\u024f8\u0001\u0000\u0000\u0000\u0250\u0251"+ - "\u0005/\u0000\u0000\u0251\u0252\u0005/\u0000\u0000\u0252\u0256\u0001\u0000"+ - "\u0000\u0000\u0253\u0255\b\u0016\u0000\u0000\u0254\u0253\u0001\u0000\u0000"+ - "\u0000\u0255\u0258\u0001\u0000\u0000\u0000\u0256\u0254\u0001\u0000\u0000"+ - "\u0000\u0256\u0257\u0001\u0000\u0000\u0000\u0257\u025a\u0001\u0000\u0000"+ - "\u0000\u0258\u0256\u0001\u0000\u0000\u0000\u0259\u025b\u0005\r\u0000\u0000"+ - "\u025a\u0259\u0001\u0000\u0000\u0000\u025a\u025b\u0001\u0000\u0000\u0000"+ - "\u025b\u025d\u0001\u0000\u0000\u0000\u025c\u025e\u0005\n\u0000\u0000\u025d"+ - "\u025c\u0001\u0000\u0000\u0000\u025d\u025e\u0001\u0000\u0000\u0000\u025e"+ - "\u025f\u0001\u0000\u0000\u0000\u025f\u0260\u0006\u0015\n\u0000\u0260:"+ - "\u0001\u0000\u0000\u0000\u0261\u0262\u0005/\u0000\u0000\u0262\u0263\u0005"+ - "*\u0000\u0000\u0263\u0268\u0001\u0000\u0000\u0000\u0264\u0267\u0003;\u0016"+ - "\u0000\u0265\u0267\t\u0000\u0000\u0000\u0266\u0264\u0001\u0000\u0000\u0000"+ - "\u0266\u0265\u0001\u0000\u0000\u0000\u0267\u026a\u0001\u0000\u0000\u0000"+ - "\u0268\u0269\u0001\u0000\u0000\u0000\u0268\u0266\u0001\u0000\u0000\u0000"+ - "\u0269\u026b\u0001\u0000\u0000\u0000\u026a\u0268\u0001\u0000\u0000\u0000"+ - "\u026b\u026c\u0005*\u0000\u0000\u026c\u026d\u0005/\u0000\u0000\u026d\u026e"+ - "\u0001\u0000\u0000\u0000\u026e\u026f\u0006\u0016\n\u0000\u026f<\u0001"+ - "\u0000\u0000\u0000\u0270\u0272\u0007\u0017\u0000\u0000\u0271\u0270\u0001"+ - "\u0000\u0000\u0000\u0272\u0273\u0001\u0000\u0000\u0000\u0273\u0271\u0001"+ - "\u0000\u0000\u0000\u0273\u0274\u0001\u0000\u0000\u0000\u0274\u0275\u0001"+ - "\u0000\u0000\u0000\u0275\u0276\u0006\u0017\n\u0000\u0276>\u0001\u0000"+ - "\u0000\u0000\u0277\u0278\u0005|\u0000\u0000\u0278\u0279\u0001\u0000\u0000"+ - "\u0000\u0279\u027a\u0006\u0018\u000b\u0000\u027a@\u0001\u0000\u0000\u0000"+ - "\u027b\u027c\u0007\u0018\u0000\u0000\u027cB\u0001\u0000\u0000\u0000\u027d"+ - "\u027e\u0007\u0019\u0000\u0000\u027eD\u0001\u0000\u0000\u0000\u027f\u0280"+ - "\u0005\\\u0000\u0000\u0280\u0281\u0007\u001a\u0000\u0000\u0281F\u0001"+ - "\u0000\u0000\u0000\u0282\u0283\b\u001b\u0000\u0000\u0283H\u0001\u0000"+ - "\u0000\u0000\u0284\u0286\u0007\u0003\u0000\u0000\u0285\u0287\u0007\u001c"+ - "\u0000\u0000\u0286\u0285\u0001\u0000\u0000\u0000\u0286\u0287\u0001\u0000"+ - "\u0000\u0000\u0287\u0289\u0001\u0000\u0000\u0000\u0288\u028a\u0003A\u0019"+ - "\u0000\u0289\u0288\u0001\u0000\u0000\u0000\u028a\u028b\u0001\u0000\u0000"+ - "\u0000\u028b\u0289\u0001\u0000\u0000\u0000\u028b\u028c\u0001\u0000\u0000"+ - "\u0000\u028cJ\u0001\u0000\u0000\u0000\u028d\u028e\u0005@\u0000\u0000\u028e"+ - "L\u0001\u0000\u0000\u0000\u028f\u0290\u0005`\u0000\u0000\u0290N\u0001"+ - "\u0000\u0000\u0000\u0291\u0295\b\u001d\u0000\u0000\u0292\u0293\u0005`"+ - "\u0000\u0000\u0293\u0295\u0005`\u0000\u0000\u0294\u0291\u0001\u0000\u0000"+ - "\u0000\u0294\u0292\u0001\u0000\u0000\u0000\u0295P\u0001\u0000\u0000\u0000"+ - "\u0296\u0297\u0005_\u0000\u0000\u0297R\u0001\u0000\u0000\u0000\u0298\u029c"+ - "\u0003C\u001a\u0000\u0299\u029c\u0003A\u0019\u0000\u029a\u029c\u0003Q"+ - "!\u0000\u029b\u0298\u0001\u0000\u0000\u0000\u029b\u0299\u0001\u0000\u0000"+ - "\u0000\u029b\u029a\u0001\u0000\u0000\u0000\u029cT\u0001\u0000\u0000\u0000"+ - "\u029d\u02a2\u0005\"\u0000\u0000\u029e\u02a1\u0003E\u001b\u0000\u029f"+ - "\u02a1\u0003G\u001c\u0000\u02a0\u029e\u0001\u0000\u0000\u0000\u02a0\u029f"+ - "\u0001\u0000\u0000\u0000\u02a1\u02a4\u0001\u0000\u0000\u0000\u02a2\u02a0"+ - "\u0001\u0000\u0000\u0000\u02a2\u02a3\u0001\u0000\u0000\u0000\u02a3\u02a5"+ - "\u0001\u0000\u0000\u0000\u02a4\u02a2\u0001\u0000\u0000\u0000\u02a5\u02bb"+ - "\u0005\"\u0000\u0000\u02a6\u02a7\u0005\"\u0000\u0000\u02a7\u02a8\u0005"+ - "\"\u0000\u0000\u02a8\u02a9\u0005\"\u0000\u0000\u02a9\u02ad\u0001\u0000"+ - "\u0000\u0000\u02aa\u02ac\b\u0016\u0000\u0000\u02ab\u02aa\u0001\u0000\u0000"+ - "\u0000\u02ac\u02af\u0001\u0000\u0000\u0000\u02ad\u02ae\u0001\u0000\u0000"+ - "\u0000\u02ad\u02ab\u0001\u0000\u0000\u0000\u02ae\u02b0\u0001\u0000\u0000"+ - "\u0000\u02af\u02ad\u0001\u0000\u0000\u0000\u02b0\u02b1\u0005\"\u0000\u0000"+ - "\u02b1\u02b2\u0005\"\u0000\u0000\u02b2\u02b3\u0005\"\u0000\u0000\u02b3"+ - "\u02b5\u0001\u0000\u0000\u0000\u02b4\u02b6\u0005\"\u0000\u0000\u02b5\u02b4"+ - "\u0001\u0000\u0000\u0000\u02b5\u02b6\u0001\u0000\u0000\u0000\u02b6\u02b8"+ - "\u0001\u0000\u0000\u0000\u02b7\u02b9\u0005\"\u0000\u0000\u02b8\u02b7\u0001"+ - "\u0000\u0000\u0000\u02b8\u02b9\u0001\u0000\u0000\u0000\u02b9\u02bb\u0001"+ - "\u0000\u0000\u0000\u02ba\u029d\u0001\u0000\u0000\u0000\u02ba\u02a6\u0001"+ - "\u0000\u0000\u0000\u02bbV\u0001\u0000\u0000\u0000\u02bc\u02be\u0003A\u0019"+ - "\u0000\u02bd\u02bc\u0001\u0000\u0000\u0000\u02be\u02bf\u0001\u0000\u0000"+ - "\u0000\u02bf\u02bd\u0001\u0000\u0000\u0000\u02bf\u02c0\u0001\u0000\u0000"+ - "\u0000\u02c0X\u0001\u0000\u0000\u0000\u02c1\u02c3\u0003A\u0019\u0000\u02c2"+ - "\u02c1\u0001\u0000\u0000\u0000\u02c3\u02c4\u0001\u0000\u0000\u0000\u02c4"+ - "\u02c2\u0001\u0000\u0000\u0000\u02c4\u02c5\u0001\u0000\u0000\u0000\u02c5"+ - "\u02c6\u0001\u0000\u0000\u0000\u02c6\u02ca\u0003i-\u0000\u02c7\u02c9\u0003"+ - "A\u0019\u0000\u02c8\u02c7\u0001\u0000\u0000\u0000\u02c9\u02cc\u0001\u0000"+ - "\u0000\u0000\u02ca\u02c8\u0001\u0000\u0000\u0000\u02ca\u02cb\u0001\u0000"+ - "\u0000\u0000\u02cb\u02ec\u0001\u0000\u0000\u0000\u02cc\u02ca\u0001\u0000"+ - "\u0000\u0000\u02cd\u02cf\u0003i-\u0000\u02ce\u02d0\u0003A\u0019\u0000"+ - "\u02cf\u02ce\u0001\u0000\u0000\u0000\u02d0\u02d1\u0001\u0000\u0000\u0000"+ - "\u02d1\u02cf\u0001\u0000\u0000\u0000\u02d1\u02d2\u0001\u0000\u0000\u0000"+ - "\u02d2\u02ec\u0001\u0000\u0000\u0000\u02d3\u02d5\u0003A\u0019\u0000\u02d4"+ - "\u02d3\u0001\u0000\u0000\u0000\u02d5\u02d6\u0001\u0000\u0000\u0000\u02d6"+ - "\u02d4\u0001\u0000\u0000\u0000\u02d6\u02d7\u0001\u0000\u0000\u0000\u02d7"+ - "\u02df\u0001\u0000\u0000\u0000\u02d8\u02dc\u0003i-\u0000\u02d9\u02db\u0003"+ - "A\u0019\u0000\u02da\u02d9\u0001\u0000\u0000\u0000\u02db\u02de\u0001\u0000"+ - "\u0000\u0000\u02dc\u02da\u0001\u0000\u0000\u0000\u02dc\u02dd\u0001\u0000"+ - "\u0000\u0000\u02dd\u02e0\u0001\u0000\u0000\u0000\u02de\u02dc\u0001\u0000"+ - "\u0000\u0000\u02df\u02d8\u0001\u0000\u0000\u0000\u02df\u02e0\u0001\u0000"+ - "\u0000\u0000\u02e0\u02e1\u0001\u0000\u0000\u0000\u02e1\u02e2\u0003I\u001d"+ - "\u0000\u02e2\u02ec\u0001\u0000\u0000\u0000\u02e3\u02e5\u0003i-\u0000\u02e4"+ - "\u02e6\u0003A\u0019\u0000\u02e5\u02e4\u0001\u0000\u0000\u0000\u02e6\u02e7"+ - "\u0001\u0000\u0000\u0000\u02e7\u02e5\u0001\u0000\u0000\u0000\u02e7\u02e8"+ - "\u0001\u0000\u0000\u0000\u02e8\u02e9\u0001\u0000\u0000\u0000\u02e9\u02ea"+ - "\u0003I\u001d\u0000\u02ea\u02ec\u0001\u0000\u0000\u0000\u02eb\u02c2\u0001"+ - "\u0000\u0000\u0000\u02eb\u02cd\u0001\u0000\u0000\u0000\u02eb\u02d4\u0001"+ - "\u0000\u0000\u0000\u02eb\u02e3\u0001\u0000\u0000\u0000\u02ecZ\u0001\u0000"+ - "\u0000\u0000\u02ed\u02ee\u0007\u001e\u0000\u0000\u02ee\u02ef\u0007\u001f"+ - "\u0000\u0000\u02ef\\\u0001\u0000\u0000\u0000\u02f0\u02f1\u0007\f\u0000"+ - "\u0000\u02f1\u02f2\u0007\t\u0000\u0000\u02f2\u02f3\u0007\u0000\u0000\u0000"+ - "\u02f3^\u0001\u0000\u0000\u0000\u02f4\u02f5\u0007\f\u0000\u0000\u02f5"+ - "\u02f6\u0007\u0002\u0000\u0000\u02f6\u02f7\u0007\u0004\u0000\u0000\u02f7"+ - "`\u0001\u0000\u0000\u0000\u02f8\u02f9\u0005=\u0000\u0000\u02f9b\u0001"+ - "\u0000\u0000\u0000\u02fa\u02fb\u0005:\u0000\u0000\u02fb\u02fc\u0005:\u0000"+ - "\u0000\u02fcd\u0001\u0000\u0000\u0000\u02fd\u02fe\u0005,\u0000\u0000\u02fe"+ - "f\u0001\u0000\u0000\u0000\u02ff\u0300\u0007\u0000\u0000\u0000\u0300\u0301"+ - "\u0007\u0003\u0000\u0000\u0301\u0302\u0007\u0002\u0000\u0000\u0302\u0303"+ - "\u0007\u0004\u0000\u0000\u0303h\u0001\u0000\u0000\u0000\u0304\u0305\u0005"+ - ".\u0000\u0000\u0305j\u0001\u0000\u0000\u0000\u0306\u0307\u0007\u000f\u0000"+ - "\u0000\u0307\u0308\u0007\f\u0000\u0000\u0308\u0309\u0007\r\u0000\u0000"+ - "\u0309\u030a\u0007\u0002\u0000\u0000\u030a\u030b\u0007\u0003\u0000\u0000"+ - "\u030bl\u0001\u0000\u0000\u0000\u030c\u030d\u0007\u000f\u0000\u0000\u030d"+ - "\u030e\u0007\u0001\u0000\u0000\u030e\u030f\u0007\u0006\u0000\u0000\u030f"+ - "\u0310\u0007\u0002\u0000\u0000\u0310\u0311\u0007\u0005\u0000\u0000\u0311"+ - "n\u0001\u0000\u0000\u0000\u0312\u0313\u0007\u0001\u0000\u0000\u0313\u0314"+ - "\u0007\t\u0000\u0000\u0314p\u0001\u0000\u0000\u0000\u0315\u0316\u0007"+ - "\u0001\u0000\u0000\u0316\u0317\u0007\u0002\u0000\u0000\u0317r\u0001\u0000"+ - "\u0000\u0000\u0318\u0319\u0007\r\u0000\u0000\u0319\u031a\u0007\f\u0000"+ - "\u0000\u031a\u031b\u0007\u0002\u0000\u0000\u031b\u031c\u0007\u0005\u0000"+ - "\u0000\u031ct\u0001\u0000\u0000\u0000\u031d\u031e\u0007\r\u0000\u0000"+ - "\u031e\u031f\u0007\u0001\u0000\u0000\u031f\u0320\u0007\u0012\u0000\u0000"+ - "\u0320\u0321\u0007\u0003\u0000\u0000\u0321v\u0001\u0000\u0000\u0000\u0322"+ - "\u0323\u0005(\u0000\u0000\u0323x\u0001\u0000\u0000\u0000\u0324\u0325\u0007"+ - "\t\u0000\u0000\u0325\u0326\u0007\u0007\u0000\u0000\u0326\u0327\u0007\u0005"+ - "\u0000\u0000\u0327z\u0001\u0000\u0000\u0000\u0328\u0329\u0007\t\u0000"+ - "\u0000\u0329\u032a\u0007\u0014\u0000\u0000\u032a\u032b\u0007\r\u0000\u0000"+ - "\u032b\u032c\u0007\r\u0000\u0000\u032c|\u0001\u0000\u0000\u0000\u032d"+ - "\u032e\u0007\t\u0000\u0000\u032e\u032f\u0007\u0014\u0000\u0000\u032f\u0330"+ - "\u0007\r\u0000\u0000\u0330\u0331\u0007\r\u0000\u0000\u0331\u0332\u0007"+ - "\u0002\u0000\u0000\u0332~\u0001\u0000\u0000\u0000\u0333\u0334\u0007\u0007"+ - "\u0000\u0000\u0334\u0335\u0007\u0006\u0000\u0000\u0335\u0080\u0001\u0000"+ - "\u0000\u0000\u0336\u0337\u0005?\u0000\u0000\u0337\u0082\u0001\u0000\u0000"+ - "\u0000\u0338\u0339\u0007\u0006\u0000\u0000\u0339\u033a\u0007\r\u0000\u0000"+ - "\u033a\u033b\u0007\u0001\u0000\u0000\u033b\u033c\u0007\u0012\u0000\u0000"+ - "\u033c\u033d\u0007\u0003\u0000\u0000\u033d\u0084\u0001\u0000\u0000\u0000"+ - "\u033e\u033f\u0005)\u0000\u0000\u033f\u0086\u0001\u0000\u0000\u0000\u0340"+ - "\u0341\u0007\u0005\u0000\u0000\u0341\u0342\u0007\u0006\u0000\u0000\u0342"+ - "\u0343\u0007\u0014\u0000\u0000\u0343\u0344\u0007\u0003\u0000\u0000\u0344"+ - "\u0088\u0001\u0000\u0000\u0000\u0345\u0346\u0005=\u0000\u0000\u0346\u0347"+ - "\u0005=\u0000\u0000\u0347\u008a\u0001\u0000\u0000\u0000\u0348\u0349\u0005"+ - "=\u0000\u0000\u0349\u034a\u0005~\u0000\u0000\u034a\u008c\u0001\u0000\u0000"+ - "\u0000\u034b\u034c\u0005!\u0000\u0000\u034c\u034d\u0005=\u0000\u0000\u034d"+ - "\u008e\u0001\u0000\u0000\u0000\u034e\u034f\u0005<\u0000\u0000\u034f\u0090"+ - "\u0001\u0000\u0000\u0000\u0350\u0351\u0005<\u0000\u0000\u0351\u0352\u0005"+ - "=\u0000\u0000\u0352\u0092\u0001\u0000\u0000\u0000\u0353\u0354\u0005>\u0000"+ - "\u0000\u0354\u0094\u0001\u0000\u0000\u0000\u0355\u0356\u0005>\u0000\u0000"+ - "\u0356\u0357\u0005=\u0000\u0000\u0357\u0096\u0001\u0000\u0000\u0000\u0358"+ - "\u0359\u0005+\u0000\u0000\u0359\u0098\u0001\u0000\u0000\u0000\u035a\u035b"+ - "\u0005-\u0000\u0000\u035b\u009a\u0001\u0000\u0000\u0000\u035c\u035d\u0005"+ - "*\u0000\u0000\u035d\u009c\u0001\u0000\u0000\u0000\u035e\u035f\u0005/\u0000"+ - "\u0000\u035f\u009e\u0001\u0000\u0000\u0000\u0360\u0361\u0005%\u0000\u0000"+ - "\u0361\u00a0\u0001\u0000\u0000\u0000\u0362\u0363\u0004I\u0004\u0000\u0363"+ - "\u0364\u00033\u0012\u0000\u0364\u0365\u0001\u0000\u0000\u0000\u0365\u0366"+ - "\u0006I\f\u0000\u0366\u00a2\u0001\u0000\u0000\u0000\u0367\u036a\u0003"+ - "\u00819\u0000\u0368\u036b\u0003C\u001a\u0000\u0369\u036b\u0003Q!\u0000"+ - "\u036a\u0368\u0001\u0000\u0000\u0000\u036a\u0369\u0001\u0000\u0000\u0000"+ - "\u036b\u036f\u0001\u0000\u0000\u0000\u036c\u036e\u0003S\"\u0000\u036d"+ - "\u036c\u0001\u0000\u0000\u0000\u036e\u0371\u0001\u0000\u0000\u0000\u036f"+ - "\u036d\u0001\u0000\u0000\u0000\u036f\u0370\u0001\u0000\u0000\u0000\u0370"+ - "\u0379\u0001\u0000\u0000\u0000\u0371\u036f\u0001\u0000\u0000\u0000\u0372"+ - "\u0374\u0003\u00819\u0000\u0373\u0375\u0003A\u0019\u0000\u0374\u0373\u0001"+ - "\u0000\u0000\u0000\u0375\u0376\u0001\u0000\u0000\u0000\u0376\u0374\u0001"+ - "\u0000\u0000\u0000\u0376\u0377\u0001\u0000\u0000\u0000\u0377\u0379\u0001"+ - "\u0000\u0000\u0000\u0378\u0367\u0001\u0000\u0000\u0000\u0378\u0372\u0001"+ - "\u0000\u0000\u0000\u0379\u00a4\u0001\u0000\u0000\u0000\u037a\u037b\u0005"+ - "[\u0000\u0000\u037b\u037c\u0001\u0000\u0000\u0000\u037c\u037d\u0006K\u0000"+ - "\u0000\u037d\u037e\u0006K\u0000\u0000\u037e\u00a6\u0001\u0000\u0000\u0000"+ - "\u037f\u0380\u0005]\u0000\u0000\u0380\u0381\u0001\u0000\u0000\u0000\u0381"+ - "\u0382\u0006L\u000b\u0000\u0382\u0383\u0006L\u000b\u0000\u0383\u00a8\u0001"+ - "\u0000\u0000\u0000\u0384\u0388\u0003C\u001a\u0000\u0385\u0387\u0003S\""+ - "\u0000\u0386\u0385\u0001\u0000\u0000\u0000\u0387\u038a\u0001\u0000\u0000"+ - "\u0000\u0388\u0386\u0001\u0000\u0000\u0000\u0388\u0389\u0001\u0000\u0000"+ - "\u0000\u0389\u0395\u0001\u0000\u0000\u0000\u038a\u0388\u0001\u0000\u0000"+ - "\u0000\u038b\u038e\u0003Q!\u0000\u038c\u038e\u0003K\u001e\u0000\u038d"+ - "\u038b\u0001\u0000\u0000\u0000\u038d\u038c\u0001\u0000\u0000\u0000\u038e"+ - "\u0390\u0001\u0000\u0000\u0000\u038f\u0391\u0003S\"\u0000\u0390\u038f"+ - "\u0001\u0000\u0000\u0000\u0391\u0392\u0001\u0000\u0000\u0000\u0392\u0390"+ - "\u0001\u0000\u0000\u0000\u0392\u0393\u0001\u0000\u0000\u0000\u0393\u0395"+ - "\u0001\u0000\u0000\u0000\u0394\u0384\u0001\u0000\u0000\u0000\u0394\u038d"+ - "\u0001\u0000\u0000\u0000\u0395\u00aa\u0001\u0000\u0000\u0000\u0396\u0398"+ - "\u0003M\u001f\u0000\u0397\u0399\u0003O \u0000\u0398\u0397\u0001\u0000"+ - "\u0000\u0000\u0399\u039a\u0001\u0000\u0000\u0000\u039a\u0398\u0001\u0000"+ - "\u0000\u0000\u039a\u039b\u0001\u0000\u0000\u0000\u039b\u039c\u0001\u0000"+ - "\u0000\u0000\u039c\u039d\u0003M\u001f\u0000\u039d\u00ac\u0001\u0000\u0000"+ - "\u0000\u039e\u039f\u0003\u00abN\u0000\u039f\u00ae\u0001\u0000\u0000\u0000"+ - "\u03a0\u03a1\u00039\u0015\u0000\u03a1\u03a2\u0001\u0000\u0000\u0000\u03a2"+ - "\u03a3\u0006P\n\u0000\u03a3\u00b0\u0001\u0000\u0000\u0000\u03a4\u03a5"+ - "\u0003;\u0016\u0000\u03a5\u03a6\u0001\u0000\u0000\u0000\u03a6\u03a7\u0006"+ - "Q\n\u0000\u03a7\u00b2\u0001\u0000\u0000\u0000\u03a8\u03a9\u0003=\u0017"+ - "\u0000\u03a9\u03aa\u0001\u0000\u0000\u0000\u03aa\u03ab\u0006R\n\u0000"+ - "\u03ab\u00b4\u0001\u0000\u0000\u0000\u03ac\u03ad\u0003\u00a5K\u0000\u03ad"+ - "\u03ae\u0001\u0000\u0000\u0000\u03ae\u03af\u0006S\r\u0000\u03af\u03b0"+ - "\u0006S\u000e\u0000\u03b0\u00b6\u0001\u0000\u0000\u0000\u03b1\u03b2\u0003"+ - "?\u0018\u0000\u03b2\u03b3\u0001\u0000\u0000\u0000\u03b3\u03b4\u0006T\u000f"+ - "\u0000\u03b4\u03b5\u0006T\u000b\u0000\u03b5\u00b8\u0001\u0000\u0000\u0000"+ - "\u03b6\u03b7\u0003=\u0017\u0000\u03b7\u03b8\u0001\u0000\u0000\u0000\u03b8"+ - "\u03b9\u0006U\n\u0000\u03b9\u00ba\u0001\u0000\u0000\u0000\u03ba\u03bb"+ - "\u00039\u0015\u0000\u03bb\u03bc\u0001\u0000\u0000\u0000\u03bc\u03bd\u0006"+ - "V\n\u0000\u03bd\u00bc\u0001\u0000\u0000\u0000\u03be\u03bf\u0003;\u0016"+ - "\u0000\u03bf\u03c0\u0001\u0000\u0000\u0000\u03c0\u03c1\u0006W\n\u0000"+ - "\u03c1\u00be\u0001\u0000\u0000\u0000\u03c2\u03c3\u0003?\u0018\u0000\u03c3"+ - "\u03c4\u0001\u0000\u0000\u0000\u03c4\u03c5\u0006X\u000f\u0000\u03c5\u03c6"+ - "\u0006X\u000b\u0000\u03c6\u00c0\u0001\u0000\u0000\u0000\u03c7\u03c8\u0003"+ - "\u00a5K\u0000\u03c8\u03c9\u0001\u0000\u0000\u0000\u03c9\u03ca\u0006Y\r"+ - "\u0000\u03ca\u00c2\u0001\u0000\u0000\u0000\u03cb\u03cc\u0003\u00a7L\u0000"+ - "\u03cc\u03cd\u0001\u0000\u0000\u0000\u03cd\u03ce\u0006Z\u0010\u0000\u03ce"+ - "\u00c4\u0001\u0000\u0000\u0000\u03cf\u03d0\u0003\u0151\u00a1\u0000\u03d0"+ - "\u03d1\u0001\u0000\u0000\u0000\u03d1\u03d2\u0006[\u0011\u0000\u03d2\u00c6"+ - "\u0001\u0000\u0000\u0000\u03d3\u03d4\u0003e+\u0000\u03d4\u03d5\u0001\u0000"+ - "\u0000\u0000\u03d5\u03d6\u0006\\\u0012\u0000\u03d6\u00c8\u0001\u0000\u0000"+ - "\u0000\u03d7\u03d8\u0003a)\u0000\u03d8\u03d9\u0001\u0000\u0000\u0000\u03d9"+ - "\u03da\u0006]\u0013\u0000\u03da\u00ca\u0001\u0000\u0000\u0000\u03db\u03dc"+ - "\u0007\u0010\u0000\u0000\u03dc\u03dd\u0007\u0003\u0000\u0000\u03dd\u03de"+ - "\u0007\u0005\u0000\u0000\u03de\u03df\u0007\f\u0000\u0000\u03df\u03e0\u0007"+ - "\u0000\u0000\u0000\u03e0\u03e1\u0007\f\u0000\u0000\u03e1\u03e2\u0007\u0005"+ - "\u0000\u0000\u03e2\u03e3\u0007\f\u0000\u0000\u03e3\u00cc\u0001\u0000\u0000"+ - "\u0000\u03e4\u03e8\b \u0000\u0000\u03e5\u03e6\u0005/\u0000\u0000\u03e6"+ - "\u03e8\b!\u0000\u0000\u03e7\u03e4\u0001\u0000\u0000\u0000\u03e7\u03e5"+ - "\u0001\u0000\u0000\u0000\u03e8\u00ce\u0001\u0000\u0000\u0000\u03e9\u03eb"+ - "\u0003\u00cd_\u0000\u03ea\u03e9\u0001\u0000\u0000\u0000\u03eb\u03ec\u0001"+ - "\u0000\u0000\u0000\u03ec\u03ea\u0001\u0000\u0000\u0000\u03ec\u03ed\u0001"+ - "\u0000\u0000\u0000\u03ed\u00d0\u0001\u0000\u0000\u0000\u03ee\u03ef\u0003"+ - "\u00cf`\u0000\u03ef\u03f0\u0001\u0000\u0000\u0000\u03f0\u03f1\u0006a\u0014"+ - "\u0000\u03f1\u00d2\u0001\u0000\u0000\u0000\u03f2\u03f3\u0003U#\u0000\u03f3"+ - "\u03f4\u0001\u0000\u0000\u0000\u03f4\u03f5\u0006b\u0015\u0000\u03f5\u00d4"+ - "\u0001\u0000\u0000\u0000\u03f6\u03f7\u00039\u0015\u0000\u03f7\u03f8\u0001"+ - "\u0000\u0000\u0000\u03f8\u03f9\u0006c\n\u0000\u03f9\u00d6\u0001\u0000"+ - "\u0000\u0000\u03fa\u03fb\u0003;\u0016\u0000\u03fb\u03fc\u0001\u0000\u0000"+ - "\u0000\u03fc\u03fd\u0006d\n\u0000\u03fd\u00d8\u0001\u0000\u0000\u0000"+ - "\u03fe\u03ff\u0003=\u0017\u0000\u03ff\u0400\u0001\u0000\u0000\u0000\u0400"+ - "\u0401\u0006e\n\u0000\u0401\u00da\u0001\u0000\u0000\u0000\u0402\u0403"+ - "\u0003?\u0018\u0000\u0403\u0404\u0001\u0000\u0000\u0000\u0404\u0405\u0006"+ - "f\u000f\u0000\u0405\u0406\u0006f\u000b\u0000\u0406\u00dc\u0001\u0000\u0000"+ - "\u0000\u0407\u0408\u0003i-\u0000\u0408\u0409\u0001\u0000\u0000\u0000\u0409"+ - "\u040a\u0006g\u0016\u0000\u040a\u00de\u0001\u0000\u0000\u0000\u040b\u040c"+ - "\u0003e+\u0000\u040c\u040d\u0001\u0000\u0000\u0000\u040d\u040e\u0006h"+ - "\u0012\u0000\u040e\u00e0\u0001\u0000\u0000\u0000\u040f\u0410\u0003\u0081"+ - "9\u0000\u0410\u0411\u0001\u0000\u0000\u0000\u0411\u0412\u0006i\u0017\u0000"+ - "\u0412\u00e2\u0001\u0000\u0000\u0000\u0413\u0414\u0003\u00a3J\u0000\u0414"+ - "\u0415\u0001\u0000\u0000\u0000\u0415\u0416\u0006j\u0018\u0000\u0416\u00e4"+ - "\u0001\u0000\u0000\u0000\u0417\u041c\u0003C\u001a\u0000\u0418\u041c\u0003"+ - "A\u0019\u0000\u0419\u041c\u0003Q!\u0000\u041a\u041c\u0003\u009bF\u0000"+ - "\u041b\u0417\u0001\u0000\u0000\u0000\u041b\u0418\u0001\u0000\u0000\u0000"+ - "\u041b\u0419\u0001\u0000\u0000\u0000\u041b\u041a\u0001\u0000\u0000\u0000"+ - "\u041c\u00e6\u0001\u0000\u0000\u0000\u041d\u0420\u0003C\u001a\u0000\u041e"+ - "\u0420\u0003\u009bF\u0000\u041f\u041d\u0001\u0000\u0000\u0000\u041f\u041e"+ - "\u0001\u0000\u0000\u0000\u0420\u0424\u0001\u0000\u0000\u0000\u0421\u0423"+ - "\u0003\u00e5k\u0000\u0422\u0421\u0001\u0000\u0000\u0000\u0423\u0426\u0001"+ - "\u0000\u0000\u0000\u0424\u0422\u0001\u0000\u0000\u0000\u0424\u0425\u0001"+ - "\u0000\u0000\u0000\u0425\u0431\u0001\u0000\u0000\u0000\u0426\u0424\u0001"+ - "\u0000\u0000\u0000\u0427\u042a\u0003Q!\u0000\u0428\u042a\u0003K\u001e"+ - "\u0000\u0429\u0427\u0001\u0000\u0000\u0000\u0429\u0428\u0001\u0000\u0000"+ - "\u0000\u042a\u042c\u0001\u0000\u0000\u0000\u042b\u042d\u0003\u00e5k\u0000"+ - "\u042c\u042b\u0001\u0000\u0000\u0000\u042d\u042e\u0001\u0000\u0000\u0000"+ - "\u042e\u042c\u0001\u0000\u0000\u0000\u042e\u042f\u0001\u0000\u0000\u0000"+ - "\u042f\u0431\u0001\u0000\u0000\u0000\u0430\u041f\u0001\u0000\u0000\u0000"+ - "\u0430\u0429\u0001\u0000\u0000\u0000\u0431\u00e8\u0001\u0000\u0000\u0000"+ - "\u0432\u0435\u0003\u00e7l\u0000\u0433\u0435\u0003\u00abN\u0000\u0434\u0432"+ - "\u0001\u0000\u0000\u0000\u0434\u0433\u0001\u0000\u0000\u0000\u0435\u0436"+ - "\u0001\u0000\u0000\u0000\u0436\u0434\u0001\u0000\u0000\u0000\u0436\u0437"+ - "\u0001\u0000\u0000\u0000\u0437\u00ea\u0001\u0000\u0000\u0000\u0438\u0439"+ - "\u00039\u0015\u0000\u0439\u043a\u0001\u0000\u0000\u0000\u043a\u043b\u0006"+ - "n\n\u0000\u043b\u00ec\u0001\u0000\u0000\u0000\u043c\u043d\u0003;\u0016"+ - "\u0000\u043d\u043e\u0001\u0000\u0000\u0000\u043e\u043f\u0006o\n\u0000"+ - "\u043f\u00ee\u0001\u0000\u0000\u0000\u0440\u0441\u0003=\u0017\u0000\u0441"+ - "\u0442\u0001\u0000\u0000\u0000\u0442\u0443\u0006p\n\u0000\u0443\u00f0"+ - "\u0001\u0000\u0000\u0000\u0444\u0445\u0003?\u0018\u0000\u0445\u0446\u0001"+ - "\u0000\u0000\u0000\u0446\u0447\u0006q\u000f\u0000\u0447\u0448\u0006q\u000b"+ - "\u0000\u0448\u00f2\u0001\u0000\u0000\u0000\u0449\u044a\u0003a)\u0000\u044a"+ - "\u044b\u0001\u0000\u0000\u0000\u044b\u044c\u0006r\u0013\u0000\u044c\u00f4"+ - "\u0001\u0000\u0000\u0000\u044d\u044e\u0003e+\u0000\u044e\u044f\u0001\u0000"+ - "\u0000\u0000\u044f\u0450\u0006s\u0012\u0000\u0450\u00f6\u0001\u0000\u0000"+ - "\u0000\u0451\u0452\u0003i-\u0000\u0452\u0453\u0001\u0000\u0000\u0000\u0453"+ - "\u0454\u0006t\u0016\u0000\u0454\u00f8\u0001\u0000\u0000\u0000\u0455\u0456"+ - "\u0003\u00819\u0000\u0456\u0457\u0001\u0000\u0000\u0000\u0457\u0458\u0006"+ - "u\u0017\u0000\u0458\u00fa\u0001\u0000\u0000\u0000\u0459\u045a\u0003\u00a3"+ - "J\u0000\u045a\u045b\u0001\u0000\u0000\u0000\u045b\u045c\u0006v\u0018\u0000"+ - "\u045c\u00fc\u0001\u0000\u0000\u0000\u045d\u045e\u0007\f\u0000\u0000\u045e"+ - "\u045f\u0007\u0002\u0000\u0000\u045f\u00fe\u0001\u0000\u0000\u0000\u0460"+ - "\u0461\u0003\u00e9m\u0000\u0461\u0462\u0001\u0000\u0000\u0000\u0462\u0463"+ - "\u0006x\u0019\u0000\u0463\u0100\u0001\u0000\u0000\u0000\u0464\u0465\u0003"+ - "9\u0015\u0000\u0465\u0466\u0001\u0000\u0000\u0000\u0466\u0467\u0006y\n"+ - "\u0000\u0467\u0102\u0001\u0000\u0000\u0000\u0468\u0469\u0003;\u0016\u0000"+ - "\u0469\u046a\u0001\u0000\u0000\u0000\u046a\u046b\u0006z\n\u0000\u046b"+ - "\u0104\u0001\u0000\u0000\u0000\u046c\u046d\u0003=\u0017\u0000\u046d\u046e"+ - "\u0001\u0000\u0000\u0000\u046e\u046f\u0006{\n\u0000\u046f\u0106\u0001"+ - "\u0000\u0000\u0000\u0470\u0471\u0003?\u0018\u0000\u0471\u0472\u0001\u0000"+ - "\u0000\u0000\u0472\u0473\u0006|\u000f\u0000\u0473\u0474\u0006|\u000b\u0000"+ - "\u0474\u0108\u0001\u0000\u0000\u0000\u0475\u0476\u0003\u00a5K\u0000\u0476"+ - "\u0477\u0001\u0000\u0000\u0000\u0477\u0478\u0006}\r\u0000\u0478\u0479"+ - "\u0006}\u001a\u0000\u0479\u010a\u0001\u0000\u0000\u0000\u047a\u047b\u0007"+ - "\u0007\u0000\u0000\u047b\u047c\u0007\t\u0000\u0000\u047c\u047d\u0001\u0000"+ - "\u0000\u0000\u047d\u047e\u0006~\u001b\u0000\u047e\u010c\u0001\u0000\u0000"+ - "\u0000\u047f\u0480\u0007\u0013\u0000\u0000\u0480\u0481\u0007\u0001\u0000"+ - "\u0000\u0481\u0482\u0007\u0005\u0000\u0000\u0482\u0483\u0007\n\u0000\u0000"+ - "\u0483\u0484\u0001\u0000\u0000\u0000\u0484\u0485\u0006\u007f\u001b\u0000"+ - "\u0485\u010e\u0001\u0000\u0000\u0000\u0486\u0487\b\"\u0000\u0000\u0487"+ - "\u0110\u0001\u0000\u0000\u0000\u0488\u048a\u0003\u010f\u0080\u0000\u0489"+ - "\u0488\u0001\u0000\u0000\u0000\u048a\u048b\u0001\u0000\u0000\u0000\u048b"+ - "\u0489\u0001\u0000\u0000\u0000\u048b\u048c\u0001\u0000\u0000\u0000\u048c"+ - "\u048d\u0001\u0000\u0000\u0000\u048d\u048e\u0003\u0151\u00a1\u0000\u048e"+ - "\u0490\u0001\u0000\u0000\u0000\u048f\u0489\u0001\u0000\u0000\u0000\u048f"+ - "\u0490\u0001\u0000\u0000\u0000\u0490\u0492\u0001\u0000\u0000\u0000\u0491"+ - "\u0493\u0003\u010f\u0080\u0000\u0492\u0491\u0001\u0000\u0000\u0000\u0493"+ - "\u0494\u0001\u0000\u0000\u0000\u0494\u0492\u0001\u0000\u0000\u0000\u0494"+ - "\u0495\u0001\u0000\u0000\u0000\u0495\u0112\u0001\u0000\u0000\u0000\u0496"+ - "\u0497\u0003\u0111\u0081\u0000\u0497\u0498\u0001\u0000\u0000\u0000\u0498"+ - "\u0499\u0006\u0082\u001c\u0000\u0499\u0114\u0001\u0000\u0000\u0000\u049a"+ - "\u049b\u00039\u0015\u0000\u049b\u049c\u0001\u0000\u0000\u0000\u049c\u049d"+ - "\u0006\u0083\n\u0000\u049d\u0116\u0001\u0000\u0000\u0000\u049e\u049f\u0003"+ - ";\u0016\u0000\u049f\u04a0\u0001\u0000\u0000\u0000\u04a0\u04a1\u0006\u0084"+ - "\n\u0000\u04a1\u0118\u0001\u0000\u0000\u0000\u04a2\u04a3\u0003=\u0017"+ - "\u0000\u04a3\u04a4\u0001\u0000\u0000\u0000\u04a4\u04a5\u0006\u0085\n\u0000"+ - "\u04a5\u011a\u0001\u0000\u0000\u0000\u04a6\u04a7\u0003?\u0018\u0000\u04a7"+ - "\u04a8\u0001\u0000\u0000\u0000\u04a8\u04a9\u0006\u0086\u000f\u0000\u04a9"+ - "\u04aa\u0006\u0086\u000b\u0000\u04aa\u04ab\u0006\u0086\u000b\u0000\u04ab"+ - "\u011c\u0001\u0000\u0000\u0000\u04ac\u04ad\u0003a)\u0000\u04ad\u04ae\u0001"+ - "\u0000\u0000\u0000\u04ae\u04af\u0006\u0087\u0013\u0000\u04af\u011e\u0001"+ - "\u0000\u0000\u0000\u04b0\u04b1\u0003e+\u0000\u04b1\u04b2\u0001\u0000\u0000"+ - "\u0000\u04b2\u04b3\u0006\u0088\u0012\u0000\u04b3\u0120\u0001\u0000\u0000"+ - "\u0000\u04b4\u04b5\u0003i-\u0000\u04b5\u04b6\u0001\u0000\u0000\u0000\u04b6"+ - "\u04b7\u0006\u0089\u0016\u0000\u04b7\u0122\u0001\u0000\u0000\u0000\u04b8"+ - "\u04b9\u0003\u010d\u007f\u0000\u04b9\u04ba\u0001\u0000\u0000\u0000\u04ba"+ - "\u04bb\u0006\u008a\u001d\u0000\u04bb\u0124\u0001\u0000\u0000\u0000\u04bc"+ - "\u04bd\u0003\u00e9m\u0000\u04bd\u04be\u0001\u0000\u0000\u0000\u04be\u04bf"+ - "\u0006\u008b\u0019\u0000\u04bf\u0126\u0001\u0000\u0000\u0000\u04c0\u04c1"+ - "\u0003\u00adO\u0000\u04c1\u04c2\u0001\u0000\u0000\u0000\u04c2\u04c3\u0006"+ - "\u008c\u001e\u0000\u04c3\u0128\u0001\u0000\u0000\u0000\u04c4\u04c5\u0003"+ - "\u00819\u0000\u04c5\u04c6\u0001\u0000\u0000\u0000\u04c6\u04c7\u0006\u008d"+ - "\u0017\u0000\u04c7\u012a\u0001\u0000\u0000\u0000\u04c8\u04c9\u0003\u00a3"+ - "J\u0000\u04c9\u04ca\u0001\u0000\u0000\u0000\u04ca\u04cb\u0006\u008e\u0018"+ - "\u0000\u04cb\u012c\u0001\u0000\u0000\u0000\u04cc\u04cd\u00039\u0015\u0000"+ - "\u04cd\u04ce\u0001\u0000\u0000\u0000\u04ce\u04cf\u0006\u008f\n\u0000\u04cf"+ - "\u012e\u0001\u0000\u0000\u0000\u04d0\u04d1\u0003;\u0016\u0000\u04d1\u04d2"+ - "\u0001\u0000\u0000\u0000\u04d2\u04d3\u0006\u0090\n\u0000\u04d3\u0130\u0001"+ - "\u0000\u0000\u0000\u04d4\u04d5\u0003=\u0017\u0000\u04d5\u04d6\u0001\u0000"+ - "\u0000\u0000\u04d6\u04d7\u0006\u0091\n\u0000\u04d7\u0132\u0001\u0000\u0000"+ - "\u0000\u04d8\u04d9\u0003?\u0018\u0000\u04d9\u04da\u0001\u0000\u0000\u0000"+ - "\u04da\u04db\u0006\u0092\u000f\u0000\u04db\u04dc\u0006\u0092\u000b\u0000"+ - "\u04dc\u0134\u0001\u0000\u0000\u0000\u04dd\u04de\u0003i-\u0000\u04de\u04df"+ - "\u0001\u0000\u0000\u0000\u04df\u04e0\u0006\u0093\u0016\u0000\u04e0\u0136"+ - "\u0001\u0000\u0000\u0000\u04e1\u04e2\u0003\u00819\u0000\u04e2\u04e3\u0001"+ - "\u0000\u0000\u0000\u04e3\u04e4\u0006\u0094\u0017\u0000\u04e4\u0138\u0001"+ - "\u0000\u0000\u0000\u04e5\u04e6\u0003\u00a3J\u0000\u04e6\u04e7\u0001\u0000"+ - "\u0000\u0000\u04e7\u04e8\u0006\u0095\u0018\u0000\u04e8\u013a\u0001\u0000"+ - "\u0000\u0000\u04e9\u04ea\u0003\u00adO\u0000\u04ea\u04eb\u0001\u0000\u0000"+ - "\u0000\u04eb\u04ec\u0006\u0096\u001e\u0000\u04ec\u013c\u0001\u0000\u0000"+ - "\u0000\u04ed\u04ee\u0003\u00a9M\u0000\u04ee\u04ef\u0001\u0000\u0000\u0000"+ - "\u04ef\u04f0\u0006\u0097\u001f\u0000\u04f0\u013e\u0001\u0000\u0000\u0000"+ - "\u04f1\u04f2\u00039\u0015\u0000\u04f2\u04f3\u0001\u0000\u0000\u0000\u04f3"+ - "\u04f4\u0006\u0098\n\u0000\u04f4\u0140\u0001\u0000\u0000\u0000\u04f5\u04f6"+ - "\u0003;\u0016\u0000\u04f6\u04f7\u0001\u0000\u0000\u0000\u04f7\u04f8\u0006"+ - "\u0099\n\u0000\u04f8\u0142\u0001\u0000\u0000\u0000\u04f9\u04fa\u0003="+ - "\u0017\u0000\u04fa\u04fb\u0001\u0000\u0000\u0000\u04fb\u04fc\u0006\u009a"+ - "\n\u0000\u04fc\u0144\u0001\u0000\u0000\u0000\u04fd\u04fe\u0003?\u0018"+ - "\u0000\u04fe\u04ff\u0001\u0000\u0000\u0000\u04ff\u0500\u0006\u009b\u000f"+ - "\u0000\u0500\u0501\u0006\u009b\u000b\u0000\u0501\u0146\u0001\u0000\u0000"+ - "\u0000\u0502\u0503\u0007\u0001\u0000\u0000\u0503\u0504\u0007\t\u0000\u0000"+ - "\u0504\u0505\u0007\u000f\u0000\u0000\u0505\u0506\u0007\u0007\u0000\u0000"+ - "\u0506\u0148\u0001\u0000\u0000\u0000\u0507\u0508\u00039\u0015\u0000\u0508"+ - "\u0509\u0001\u0000\u0000\u0000\u0509\u050a\u0006\u009d\n\u0000\u050a\u014a"+ - "\u0001\u0000\u0000\u0000\u050b\u050c\u0003;\u0016\u0000\u050c\u050d\u0001"+ - "\u0000\u0000\u0000\u050d\u050e\u0006\u009e\n\u0000\u050e\u014c\u0001\u0000"+ - "\u0000\u0000\u050f\u0510\u0003=\u0017\u0000\u0510\u0511\u0001\u0000\u0000"+ - "\u0000\u0511\u0512\u0006\u009f\n\u0000\u0512\u014e\u0001\u0000\u0000\u0000"+ - "\u0513\u0514\u0003\u00a7L\u0000\u0514\u0515\u0001\u0000\u0000\u0000\u0515"+ - "\u0516\u0006\u00a0\u0010\u0000\u0516\u0517\u0006\u00a0\u000b\u0000\u0517"+ - "\u0150\u0001\u0000\u0000\u0000\u0518\u0519\u0005:\u0000\u0000\u0519\u0152"+ - "\u0001\u0000\u0000\u0000\u051a\u0520\u0003K\u001e\u0000\u051b\u0520\u0003"+ - "A\u0019\u0000\u051c\u0520\u0003i-\u0000\u051d\u0520\u0003C\u001a\u0000"+ - "\u051e\u0520\u0003Q!\u0000\u051f\u051a\u0001\u0000\u0000\u0000\u051f\u051b"+ - "\u0001\u0000\u0000\u0000\u051f\u051c\u0001\u0000\u0000\u0000\u051f\u051d"+ - "\u0001\u0000\u0000\u0000\u051f\u051e\u0001\u0000\u0000\u0000\u0520\u0521"+ - "\u0001\u0000\u0000\u0000\u0521\u051f\u0001\u0000\u0000\u0000\u0521\u0522"+ - "\u0001\u0000\u0000\u0000\u0522\u0154\u0001\u0000\u0000\u0000\u0523\u0524"+ - "\u00039\u0015\u0000\u0524\u0525\u0001\u0000\u0000\u0000\u0525\u0526\u0006"+ - "\u00a3\n\u0000\u0526\u0156\u0001\u0000\u0000\u0000\u0527\u0528\u0003;"+ - "\u0016\u0000\u0528\u0529\u0001\u0000\u0000\u0000\u0529\u052a\u0006\u00a4"+ - "\n\u0000\u052a\u0158\u0001\u0000\u0000\u0000\u052b\u052c\u0003=\u0017"+ - "\u0000\u052c\u052d\u0001\u0000\u0000\u0000\u052d\u052e\u0006\u00a5\n\u0000"+ - "\u052e\u015a\u0001\u0000\u0000\u0000\u052f\u0530\u0003?\u0018\u0000\u0530"+ - "\u0531\u0001\u0000\u0000\u0000\u0531\u0532\u0006\u00a6\u000f\u0000\u0532"+ - "\u0533\u0006\u00a6\u000b\u0000\u0533\u015c\u0001\u0000\u0000\u0000\u0534"+ - "\u0535\u0003\u0151\u00a1\u0000\u0535\u0536\u0001\u0000\u0000\u0000\u0536"+ - "\u0537\u0006\u00a7\u0011\u0000\u0537\u015e\u0001\u0000\u0000\u0000\u0538"+ - "\u0539\u0003e+\u0000\u0539\u053a\u0001\u0000\u0000\u0000\u053a\u053b\u0006"+ - "\u00a8\u0012\u0000\u053b\u0160\u0001\u0000\u0000\u0000\u053c\u053d\u0003"+ - "i-\u0000\u053d\u053e\u0001\u0000\u0000\u0000\u053e\u053f\u0006\u00a9\u0016"+ - "\u0000\u053f\u0162\u0001\u0000\u0000\u0000\u0540\u0541\u0003\u010b~\u0000"+ - "\u0541\u0542\u0001\u0000\u0000\u0000\u0542\u0543\u0006\u00aa \u0000\u0543"+ - "\u0544\u0006\u00aa!\u0000\u0544\u0164\u0001\u0000\u0000\u0000\u0545\u0546"+ - "\u0003\u00cf`\u0000\u0546\u0547\u0001\u0000\u0000\u0000\u0547\u0548\u0006"+ - "\u00ab\u0014\u0000\u0548\u0166\u0001\u0000\u0000\u0000\u0549\u054a\u0003"+ - "U#\u0000\u054a\u054b\u0001\u0000\u0000\u0000\u054b\u054c\u0006\u00ac\u0015"+ - "\u0000\u054c\u0168\u0001\u0000\u0000\u0000\u054d\u054e\u00039\u0015\u0000"+ - "\u054e\u054f\u0001\u0000\u0000\u0000\u054f\u0550\u0006\u00ad\n\u0000\u0550"+ - "\u016a\u0001\u0000\u0000\u0000\u0551\u0552\u0003;\u0016\u0000\u0552\u0553"+ - "\u0001\u0000\u0000\u0000\u0553\u0554\u0006\u00ae\n\u0000\u0554\u016c\u0001"+ - "\u0000\u0000\u0000\u0555\u0556\u0003=\u0017\u0000\u0556\u0557\u0001\u0000"+ - "\u0000\u0000\u0557\u0558\u0006\u00af\n\u0000\u0558\u016e\u0001\u0000\u0000"+ - "\u0000\u0559\u055a\u0003?\u0018\u0000\u055a\u055b\u0001\u0000\u0000\u0000"+ - "\u055b\u055c\u0006\u00b0\u000f\u0000\u055c\u055d\u0006\u00b0\u000b\u0000"+ - "\u055d\u055e\u0006\u00b0\u000b\u0000\u055e\u0170\u0001\u0000\u0000\u0000"+ - "\u055f\u0560\u0003e+\u0000\u0560\u0561\u0001\u0000\u0000\u0000\u0561\u0562"+ - "\u0006\u00b1\u0012\u0000\u0562\u0172\u0001\u0000\u0000\u0000\u0563\u0564"+ - "\u0003i-\u0000\u0564\u0565\u0001\u0000\u0000\u0000\u0565\u0566\u0006\u00b2"+ - "\u0016\u0000\u0566\u0174\u0001\u0000\u0000\u0000\u0567\u0568\u0003\u00e9"+ - "m\u0000\u0568\u0569\u0001\u0000\u0000\u0000\u0569\u056a\u0006\u00b3\u0019"+ - "\u0000\u056a\u0176\u0001\u0000\u0000\u0000\u056b\u056c\u00039\u0015\u0000"+ - "\u056c\u056d\u0001\u0000\u0000\u0000\u056d\u056e\u0006\u00b4\n\u0000\u056e"+ - "\u0178\u0001\u0000\u0000\u0000\u056f\u0570\u0003;\u0016\u0000\u0570\u0571"+ - "\u0001\u0000\u0000\u0000\u0571\u0572\u0006\u00b5\n\u0000\u0572\u017a\u0001"+ - "\u0000\u0000\u0000\u0573\u0574\u0003=\u0017\u0000\u0574\u0575\u0001\u0000"+ - "\u0000\u0000\u0575\u0576\u0006\u00b6\n\u0000\u0576\u017c\u0001\u0000\u0000"+ - "\u0000\u0577\u0578\u0003?\u0018\u0000\u0578\u0579\u0001\u0000\u0000\u0000"+ - "\u0579\u057a\u0006\u00b7\u000f\u0000\u057a\u057b\u0006\u00b7\u000b\u0000"+ - "\u057b\u017e\u0001\u0000\u0000\u0000\u057c\u057d\u0003\u00cf`\u0000\u057d"+ - "\u057e\u0001\u0000\u0000\u0000\u057e\u057f\u0006\u00b8\u0014\u0000\u057f"+ - "\u0580\u0006\u00b8\u000b\u0000\u0580\u0581\u0006\u00b8\"\u0000\u0581\u0180"+ - "\u0001\u0000\u0000\u0000\u0582\u0583\u0003U#\u0000\u0583\u0584\u0001\u0000"+ - "\u0000\u0000\u0584\u0585\u0006\u00b9\u0015\u0000\u0585\u0586\u0006\u00b9"+ - "\u000b\u0000\u0586\u0587\u0006\u00b9\"\u0000\u0587\u0182\u0001\u0000\u0000"+ - "\u0000\u0588\u0589\u00039\u0015\u0000\u0589\u058a\u0001\u0000\u0000\u0000"+ - "\u058a\u058b\u0006\u00ba\n\u0000\u058b\u0184\u0001\u0000\u0000\u0000\u058c"+ - "\u058d\u0003;\u0016\u0000\u058d\u058e\u0001\u0000\u0000\u0000\u058e\u058f"+ - "\u0006\u00bb\n\u0000\u058f\u0186\u0001\u0000\u0000\u0000\u0590\u0591\u0003"+ - "=\u0017\u0000\u0591\u0592\u0001\u0000\u0000\u0000\u0592\u0593\u0006\u00bc"+ - "\n\u0000\u0593\u0188\u0001\u0000\u0000\u0000\u0594\u0595\u0003\u0151\u00a1"+ - "\u0000\u0595\u0596\u0001\u0000\u0000\u0000\u0596\u0597\u0006\u00bd\u0011"+ - "\u0000\u0597\u0598\u0006\u00bd\u000b\u0000\u0598\u0599\u0006\u00bd\t\u0000"+ - "\u0599\u018a\u0001\u0000\u0000\u0000\u059a\u059b\u0003e+\u0000\u059b\u059c"+ - "\u0001\u0000\u0000\u0000\u059c\u059d\u0006\u00be\u0012\u0000\u059d\u059e"+ - "\u0006\u00be\u000b\u0000\u059e\u059f\u0006\u00be\t\u0000\u059f\u018c\u0001"+ - "\u0000\u0000\u0000\u05a0\u05a1\u00039\u0015\u0000\u05a1\u05a2\u0001\u0000"+ - "\u0000\u0000\u05a2\u05a3\u0006\u00bf\n\u0000\u05a3\u018e\u0001\u0000\u0000"+ - "\u0000\u05a4\u05a5\u0003;\u0016\u0000\u05a5\u05a6\u0001\u0000\u0000\u0000"+ - "\u05a6\u05a7\u0006\u00c0\n\u0000\u05a7\u0190\u0001\u0000\u0000\u0000\u05a8"+ - "\u05a9\u0003=\u0017\u0000\u05a9\u05aa\u0001\u0000\u0000\u0000\u05aa\u05ab"+ - "\u0006\u00c1\n\u0000\u05ab\u0192\u0001\u0000\u0000\u0000\u05ac\u05ad\u0003"+ - "\u00adO\u0000\u05ad\u05ae\u0001\u0000\u0000\u0000\u05ae\u05af\u0006\u00c2"+ - "\u000b\u0000\u05af\u05b0\u0006\u00c2\u0000\u0000\u05b0\u05b1\u0006\u00c2"+ - "\u001e\u0000\u05b1\u0194\u0001\u0000\u0000\u0000\u05b2\u05b3\u0003\u00a9"+ - "M\u0000\u05b3\u05b4\u0001\u0000\u0000\u0000\u05b4\u05b5\u0006\u00c3\u000b"+ - "\u0000\u05b5\u05b6\u0006\u00c3\u0000\u0000\u05b6\u05b7\u0006\u00c3\u001f"+ - "\u0000\u05b7\u0196\u0001\u0000\u0000\u0000\u05b8\u05b9\u0003[&\u0000\u05b9"+ - "\u05ba\u0001\u0000\u0000\u0000\u05ba\u05bb\u0006\u00c4\u000b\u0000\u05bb"+ - "\u05bc\u0006\u00c4\u0000\u0000\u05bc\u05bd\u0006\u00c4#\u0000\u05bd\u0198"+ - "\u0001\u0000\u0000\u0000\u05be\u05bf\u0003?\u0018\u0000\u05bf\u05c0\u0001"+ - "\u0000\u0000\u0000\u05c0\u05c1\u0006\u00c5\u000f\u0000\u05c1\u05c2\u0006"+ - "\u00c5\u000b\u0000\u05c2\u019a\u0001\u0000\u0000\u0000A\u0000\u0001\u0002"+ - "\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e\u024c\u0256\u025a"+ - "\u025d\u0266\u0268\u0273\u0286\u028b\u0294\u029b\u02a0\u02a2\u02ad\u02b5"+ - "\u02b8\u02ba\u02bf\u02c4\u02ca\u02d1\u02d6\u02dc\u02df\u02e7\u02eb\u036a"+ - "\u036f\u0376\u0378\u0388\u038d\u0392\u0394\u039a\u03e7\u03ec\u041b\u041f"+ - "\u0424\u0429\u042e\u0430\u0434\u0436\u048b\u048f\u0494\u051f\u0521$\u0005"+ - "\u0001\u0000\u0005\u0004\u0000\u0005\u0006\u0000\u0005\u0002\u0000\u0005"+ - "\u0003\u0000\u0005\b\u0000\u0005\u0005\u0000\u0005\t\u0000\u0005\u000b"+ - "\u0000\u0005\r\u0000\u0000\u0001\u0000\u0004\u0000\u0000\u0007\u0013\u0000"+ - "\u0007A\u0000\u0005\u0000\u0000\u0007\u0019\u0000\u0007B\u0000\u0007h"+ - "\u0000\u0007\"\u0000\u0007 \u0000\u0007L\u0000\u0007\u001a\u0000\u0007"+ - "$\u0000\u00070\u0000\u0007@\u0000\u0007P\u0000\u0005\n\u0000\u0005\u0007"+ - "\u0000\u0007Z\u0000\u0007Y\u0000\u0007D\u0000\u0007C\u0000\u0007X\u0000"+ - "\u0005\f\u0000\u0005\u000e\u0000\u0007\u001d\u0000"; + "\u0001\u0010\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011"+ + "\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0012"+ + "\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012"+ + "\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0013\u0004\u0013"+ + "\u0240\b\u0013\u000b\u0013\f\u0013\u0241\u0001\u0013\u0001\u0013\u0001"+ + "\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0005\u0014\u024a\b\u0014\n"+ + "\u0014\f\u0014\u024d\t\u0014\u0001\u0014\u0003\u0014\u0250\b\u0014\u0001"+ + "\u0014\u0003\u0014\u0253\b\u0014\u0001\u0014\u0001\u0014\u0001\u0015\u0001"+ + "\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0005\u0015\u025c\b\u0015\n"+ + "\u0015\f\u0015\u025f\t\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001"+ + "\u0015\u0001\u0015\u0001\u0016\u0004\u0016\u0267\b\u0016\u000b\u0016\f"+ + "\u0016\u0268\u0001\u0016\u0001\u0016\u0001\u0017\u0001\u0017\u0001\u0017"+ + "\u0001\u0017\u0001\u0018\u0001\u0018\u0001\u0019\u0001\u0019\u0001\u001a"+ + "\u0001\u001a\u0001\u001a\u0001\u001b\u0001\u001b\u0001\u001c\u0001\u001c"+ + "\u0003\u001c\u027c\b\u001c\u0001\u001c\u0004\u001c\u027f\b\u001c\u000b"+ + "\u001c\f\u001c\u0280\u0001\u001d\u0001\u001d\u0001\u001e\u0001\u001e\u0001"+ + "\u001f\u0001\u001f\u0001\u001f\u0003\u001f\u028a\b\u001f\u0001 \u0001"+ + " \u0001!\u0001!\u0001!\u0003!\u0291\b!\u0001\"\u0001\"\u0001\"\u0005\""+ + "\u0296\b\"\n\"\f\"\u0299\t\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001"+ + "\"\u0005\"\u02a1\b\"\n\"\f\"\u02a4\t\"\u0001\"\u0001\"\u0001\"\u0001\""+ + "\u0001\"\u0003\"\u02ab\b\"\u0001\"\u0003\"\u02ae\b\"\u0003\"\u02b0\b\""+ + "\u0001#\u0004#\u02b3\b#\u000b#\f#\u02b4\u0001$\u0004$\u02b8\b$\u000b$"+ + "\f$\u02b9\u0001$\u0001$\u0005$\u02be\b$\n$\f$\u02c1\t$\u0001$\u0001$\u0004"+ + "$\u02c5\b$\u000b$\f$\u02c6\u0001$\u0004$\u02ca\b$\u000b$\f$\u02cb\u0001"+ + "$\u0001$\u0005$\u02d0\b$\n$\f$\u02d3\t$\u0003$\u02d5\b$\u0001$\u0001$"+ + "\u0001$\u0001$\u0004$\u02db\b$\u000b$\f$\u02dc\u0001$\u0001$\u0003$\u02e1"+ + "\b$\u0001%\u0001%\u0001%\u0001&\u0001&\u0001&\u0001&\u0001\'\u0001\'\u0001"+ + "\'\u0001\'\u0001(\u0001(\u0001)\u0001)\u0001)\u0001*\u0001*\u0001+\u0001"+ + "+\u0001+\u0001+\u0001+\u0001,\u0001,\u0001-\u0001-\u0001-\u0001-\u0001"+ + "-\u0001-\u0001.\u0001.\u0001.\u0001.\u0001.\u0001.\u0001/\u0001/\u0001"+ + "/\u00010\u00010\u00010\u00011\u00011\u00011\u00011\u00011\u00012\u0001"+ + "2\u00012\u00012\u00012\u00013\u00013\u00014\u00014\u00014\u00014\u0001"+ + "5\u00015\u00015\u00015\u00015\u00016\u00016\u00016\u00016\u00016\u0001"+ + "6\u00017\u00017\u00017\u00018\u00018\u00019\u00019\u00019\u00019\u0001"+ + "9\u00019\u0001:\u0001:\u0001;\u0001;\u0001;\u0001;\u0001;\u0001<\u0001"+ + "<\u0001<\u0001=\u0001=\u0001=\u0001>\u0001>\u0001>\u0001?\u0001?\u0001"+ + "@\u0001@\u0001@\u0001A\u0001A\u0001B\u0001B\u0001B\u0001C\u0001C\u0001"+ + "D\u0001D\u0001E\u0001E\u0001F\u0001F\u0001G\u0001G\u0001H\u0001H\u0001"+ + "H\u0001H\u0001H\u0001H\u0001H\u0001I\u0001I\u0001I\u0003I\u0362\bI\u0001"+ + "I\u0005I\u0365\bI\nI\fI\u0368\tI\u0001I\u0001I\u0004I\u036c\bI\u000bI"+ + "\fI\u036d\u0003I\u0370\bI\u0001J\u0001J\u0001J\u0001J\u0001J\u0001K\u0001"+ + "K\u0001K\u0001K\u0001K\u0001L\u0001L\u0005L\u037e\bL\nL\fL\u0381\tL\u0001"+ + "L\u0001L\u0003L\u0385\bL\u0001L\u0004L\u0388\bL\u000bL\fL\u0389\u0003"+ + "L\u038c\bL\u0001M\u0001M\u0004M\u0390\bM\u000bM\fM\u0391\u0001M\u0001"+ + "M\u0001N\u0001N\u0001O\u0001O\u0001O\u0001O\u0001P\u0001P\u0001P\u0001"+ + "P\u0001Q\u0001Q\u0001Q\u0001Q\u0001R\u0001R\u0001R\u0001R\u0001R\u0001"+ + "S\u0001S\u0001S\u0001S\u0001S\u0001T\u0001T\u0001T\u0001T\u0001U\u0001"+ + "U\u0001U\u0001U\u0001V\u0001V\u0001V\u0001V\u0001W\u0001W\u0001W\u0001"+ + "W\u0001W\u0001X\u0001X\u0001X\u0001X\u0001Y\u0001Y\u0001Y\u0001Y\u0001"+ + "Z\u0001Z\u0001Z\u0001Z\u0001[\u0001[\u0001[\u0001[\u0001\\\u0001\\\u0001"+ + "\\\u0001\\\u0001]\u0001]\u0001]\u0001]\u0001]\u0001]\u0001]\u0001]\u0001"+ + "]\u0001^\u0001^\u0001^\u0003^\u03df\b^\u0001_\u0004_\u03e2\b_\u000b_\f"+ + "_\u03e3\u0001`\u0001`\u0001`\u0001`\u0001a\u0001a\u0001a\u0001a\u0001"+ + "b\u0001b\u0001b\u0001b\u0001c\u0001c\u0001c\u0001c\u0001d\u0001d\u0001"+ + "d\u0001d\u0001e\u0001e\u0001e\u0001e\u0001e\u0001f\u0001f\u0001f\u0001"+ + "f\u0001g\u0001g\u0001g\u0001g\u0001h\u0001h\u0001h\u0001h\u0001i\u0001"+ + "i\u0001i\u0001i\u0001j\u0001j\u0001j\u0001j\u0003j\u0413\bj\u0001k\u0001"+ + "k\u0003k\u0417\bk\u0001k\u0005k\u041a\bk\nk\fk\u041d\tk\u0001k\u0001k"+ + "\u0003k\u0421\bk\u0001k\u0004k\u0424\bk\u000bk\fk\u0425\u0003k\u0428\b"+ + "k\u0001l\u0001l\u0004l\u042c\bl\u000bl\fl\u042d\u0001m\u0001m\u0001m\u0001"+ + "m\u0001n\u0001n\u0001n\u0001n\u0001o\u0001o\u0001o\u0001o\u0001p\u0001"+ + "p\u0001p\u0001p\u0001p\u0001q\u0001q\u0001q\u0001q\u0001r\u0001r\u0001"+ + "r\u0001r\u0001s\u0001s\u0001s\u0001s\u0001t\u0001t\u0001t\u0001t\u0001"+ + "u\u0001u\u0001u\u0001u\u0001v\u0001v\u0001v\u0001w\u0001w\u0001w\u0001"+ + "w\u0001x\u0001x\u0001x\u0001x\u0001y\u0001y\u0001y\u0001y\u0001z\u0001"+ + "z\u0001z\u0001z\u0001{\u0001{\u0001{\u0001{\u0001{\u0001|\u0001|\u0001"+ + "|\u0001|\u0001|\u0001}\u0001}\u0001}\u0001}\u0001}\u0001~\u0001~\u0001"+ + "~\u0001~\u0001~\u0001~\u0001~\u0001\u007f\u0001\u007f\u0001\u0080\u0004"+ + "\u0080\u0481\b\u0080\u000b\u0080\f\u0080\u0482\u0001\u0080\u0001\u0080"+ + "\u0003\u0080\u0487\b\u0080\u0001\u0080\u0004\u0080\u048a\b\u0080\u000b"+ + "\u0080\f\u0080\u048b\u0001\u0081\u0001\u0081\u0001\u0081\u0001\u0081\u0001"+ + "\u0082\u0001\u0082\u0001\u0082\u0001\u0082\u0001\u0083\u0001\u0083\u0001"+ + "\u0083\u0001\u0083\u0001\u0084\u0001\u0084\u0001\u0084\u0001\u0084\u0001"+ + "\u0085\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0085\u0001"+ + "\u0086\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0087\u0001\u0087\u0001"+ + "\u0087\u0001\u0087\u0001\u0088\u0001\u0088\u0001\u0088\u0001\u0088\u0001"+ + "\u0089\u0001\u0089\u0001\u0089\u0001\u0089\u0001\u008a\u0001\u008a\u0001"+ + "\u008a\u0001\u008a\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008b\u0001"+ + "\u008c\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008d\u0001\u008d\u0001"+ + "\u008d\u0001\u008d\u0001\u008e\u0001\u008e\u0001\u008e\u0001\u008e\u0001"+ + "\u008f\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u0090\u0001\u0090\u0001"+ + "\u0090\u0001\u0090\u0001\u0091\u0001\u0091\u0001\u0091\u0001\u0091\u0001"+ + "\u0091\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0093\u0001"+ + "\u0093\u0001\u0093\u0001\u0093\u0001\u0094\u0001\u0094\u0001\u0094\u0001"+ + "\u0094\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0096\u0001"+ + "\u0096\u0001\u0096\u0001\u0096\u0001\u0097\u0001\u0097\u0001\u0097\u0001"+ + "\u0097\u0001\u0098\u0001\u0098\u0001\u0098\u0001\u0098\u0001\u0099\u0001"+ + "\u0099\u0001\u0099\u0001\u0099\u0001\u009a\u0001\u009a\u0001\u009a\u0001"+ + "\u009a\u0001\u009a\u0001\u009b\u0001\u009b\u0001\u009b\u0001\u009b\u0001"+ + "\u009b\u0001\u009c\u0001\u009c\u0001\u009c\u0001\u009c\u0001\u009d\u0001"+ + "\u009d\u0001\u009d\u0001\u009d\u0001\u009e\u0001\u009e\u0001\u009e\u0001"+ + "\u009e\u0001\u009f\u0001\u009f\u0001\u009f\u0001\u009f\u0001\u009f\u0001"+ + "\u00a0\u0001\u00a0\u0001\u00a1\u0001\u00a1\u0001\u00a1\u0001\u00a1\u0001"+ + "\u00a1\u0004\u00a1\u0517\b\u00a1\u000b\u00a1\f\u00a1\u0518\u0001\u00a2"+ + "\u0001\u00a2\u0001\u00a2\u0001\u00a2\u0001\u00a3\u0001\u00a3\u0001\u00a3"+ + "\u0001\u00a3\u0001\u00a4\u0001\u00a4\u0001\u00a4\u0001\u00a4\u0001\u00a5"+ + "\u0001\u00a5\u0001\u00a5\u0001\u00a5\u0001\u00a5\u0001\u00a6\u0001\u00a6"+ + "\u0001\u00a6\u0001\u00a6\u0001\u00a7\u0001\u00a7\u0001\u00a7\u0001\u00a7"+ + "\u0001\u00a8\u0001\u00a8\u0001\u00a8\u0001\u00a8\u0001\u00a9\u0001\u00a9"+ + "\u0001\u00a9\u0001\u00a9\u0001\u00a9\u0001\u00aa\u0001\u00aa\u0001\u00aa"+ + "\u0001\u00aa\u0001\u00ab\u0001\u00ab\u0001\u00ab\u0001\u00ab\u0001\u00ac"+ + "\u0001\u00ac\u0001\u00ac\u0001\u00ac\u0001\u00ad\u0001\u00ad\u0001\u00ad"+ + "\u0001\u00ad\u0001\u00ae\u0001\u00ae\u0001\u00ae\u0001\u00ae\u0001\u00af"+ + "\u0001\u00af\u0001\u00af\u0001\u00af\u0001\u00af\u0001\u00af\u0001\u00b0"+ + "\u0001\u00b0\u0001\u00b0\u0001\u00b0\u0001\u00b1\u0001\u00b1\u0001\u00b1"+ + "\u0001\u00b1\u0001\u00b2\u0001\u00b2\u0001\u00b2\u0001\u00b2\u0001\u00b3"+ + "\u0001\u00b3\u0001\u00b3\u0001\u00b3\u0001\u00b4\u0001\u00b4\u0001\u00b4"+ + "\u0001\u00b4\u0001\u00b5\u0001\u00b5\u0001\u00b5\u0001\u00b5\u0001\u00b6"+ + "\u0001\u00b6\u0001\u00b6\u0001\u00b6\u0001\u00b6\u0001\u00b7\u0001\u00b7"+ + "\u0001\u00b7\u0001\u00b7\u0001\u00b7\u0001\u00b7\u0001\u00b8\u0001\u00b8"+ + "\u0001\u00b8\u0001\u00b8\u0001\u00b8\u0001\u00b8\u0001\u00b9\u0001\u00b9"+ + "\u0001\u00b9\u0001\u00b9\u0001\u00ba\u0001\u00ba\u0001\u00ba\u0001\u00ba"+ + "\u0001\u00bb\u0001\u00bb\u0001\u00bb\u0001\u00bb\u0001\u00bc\u0001\u00bc"+ + "\u0001\u00bc\u0001\u00bc\u0001\u00bc\u0001\u00bc\u0001\u00bd\u0001\u00bd"+ + "\u0001\u00bd\u0001\u00bd\u0001\u00bd\u0001\u00bd\u0001\u00be\u0001\u00be"+ + "\u0001\u00be\u0001\u00be\u0001\u00bf\u0001\u00bf\u0001\u00bf\u0001\u00bf"+ + "\u0001\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c1\u0001\u00c1"+ + "\u0001\u00c1\u0001\u00c1\u0001\u00c1\u0001\u00c1\u0001\u00c2\u0001\u00c2"+ + "\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001\u00c3\u0001\u00c3"+ + "\u0001\u00c3\u0001\u00c3\u0001\u00c3\u0001\u00c3\u0001\u00c4\u0001\u00c4"+ + "\u0001\u00c4\u0001\u00c4\u0001\u00c4\u0002\u025d\u02a2\u0000\u00c5\u000f"+ + "\u0001\u0011\u0002\u0013\u0003\u0015\u0004\u0017\u0005\u0019\u0006\u001b"+ + "\u0007\u001d\b\u001f\t!\n#\u000b%\f\'\r)\u000e+\u000f-\u0010/\u00111\u0012"+ + "3\u00135\u00147\u00159\u0016;\u0017=\u0018?\u0000A\u0000C\u0000E\u0000"+ + "G\u0000I\u0000K\u0000M\u0000O\u0000Q\u0000S\u0019U\u001aW\u001bY\u001c"+ + "[\u001d]\u001e_\u001fa c!e\"g#i$k%m&o\'q(s)u*w+y,{-}.\u007f/\u00810\u0083"+ + "1\u00852\u00873\u00894\u008b5\u008d6\u008f7\u00918\u00939\u0095:\u0097"+ + ";\u0099<\u009b=\u009d>\u009f?\u00a1@\u00a3A\u00a5B\u00a7C\u00a9\u0000"+ + "\u00abD\u00adE\u00afF\u00b1G\u00b3\u0000\u00b5\u0000\u00b7H\u00b9I\u00bb"+ + "J\u00bd\u0000\u00bf\u0000\u00c1\u0000\u00c3\u0000\u00c5\u0000\u00c7\u0000"+ + "\u00c9K\u00cb\u0000\u00cdL\u00cf\u0000\u00d1\u0000\u00d3M\u00d5N\u00d7"+ + "O\u00d9\u0000\u00db\u0000\u00dd\u0000\u00df\u0000\u00e1\u0000\u00e3\u0000"+ + "\u00e5\u0000\u00e7P\u00e9Q\u00ebR\u00edS\u00ef\u0000\u00f1\u0000\u00f3"+ + "\u0000\u00f5\u0000\u00f7\u0000\u00f9\u0000\u00fbT\u00fd\u0000\u00ffU\u0101"+ + "V\u0103W\u0105\u0000\u0107\u0000\u0109X\u010bY\u010d\u0000\u010fZ\u0111"+ + "\u0000\u0113[\u0115\\\u0117]\u0119\u0000\u011b\u0000\u011d\u0000\u011f"+ + "\u0000\u0121\u0000\u0123\u0000\u0125\u0000\u0127\u0000\u0129\u0000\u012b"+ + "^\u012d_\u012f`\u0131\u0000\u0133\u0000\u0135\u0000\u0137\u0000\u0139"+ + "\u0000\u013b\u0000\u013da\u013fb\u0141c\u0143\u0000\u0145d\u0147e\u0149"+ + "f\u014bg\u014d\u0000\u014fh\u0151i\u0153j\u0155k\u0157l\u0159\u0000\u015b"+ + "\u0000\u015d\u0000\u015f\u0000\u0161\u0000\u0163\u0000\u0165\u0000\u0167"+ + "m\u0169n\u016bo\u016d\u0000\u016f\u0000\u0171\u0000\u0173\u0000\u0175"+ + "p\u0177q\u0179r\u017b\u0000\u017d\u0000\u017f\u0000\u0181s\u0183t\u0185"+ + "u\u0187\u0000\u0189\u0000\u018bv\u018dw\u018fx\u0191\u0000\u0193\u0000"+ + "\u0195\u0000\u0197\u0000\u000f\u0000\u0001\u0002\u0003\u0004\u0005\u0006"+ + "\u0007\b\t\n\u000b\f\r\u000e#\u0002\u0000DDdd\u0002\u0000IIii\u0002\u0000"+ + "SSss\u0002\u0000EEee\u0002\u0000CCcc\u0002\u0000TTtt\u0002\u0000RRrr\u0002"+ + "\u0000OOoo\u0002\u0000PPpp\u0002\u0000NNnn\u0002\u0000HHhh\u0002\u0000"+ + "VVvv\u0002\u0000AAaa\u0002\u0000LLll\u0002\u0000XXxx\u0002\u0000FFff\u0002"+ + "\u0000MMmm\u0002\u0000GGgg\u0002\u0000KKkk\u0002\u0000WWww\u0002\u0000"+ + "UUuu\u0006\u0000\t\n\r\r //[[]]\u0002\u0000\n\n\r\r\u0003\u0000\t\n\r"+ + "\r \u0001\u000009\u0002\u0000AZaz\b\u0000\"\"NNRRTT\\\\nnrrtt\u0004\u0000"+ + "\n\n\r\r\"\"\\\\\u0002\u0000++--\u0001\u0000``\u0002\u0000BBbb\u0002\u0000"+ + "YYyy\u000b\u0000\t\n\r\r \"\",,//::==[[]]||\u0002\u0000**//\u000b\u0000"+ + "\t\n\r\r \"#,,//::<<>?\\\\||\u05d6\u0000\u000f\u0001\u0000\u0000\u0000"+ + "\u0000\u0011\u0001\u0000\u0000\u0000\u0000\u0013\u0001\u0000\u0000\u0000"+ + "\u0000\u0015\u0001\u0000\u0000\u0000\u0000\u0017\u0001\u0000\u0000\u0000"+ + "\u0000\u0019\u0001\u0000\u0000\u0000\u0000\u001b\u0001\u0000\u0000\u0000"+ + "\u0000\u001d\u0001\u0000\u0000\u0000\u0000\u001f\u0001\u0000\u0000\u0000"+ + "\u0000!\u0001\u0000\u0000\u0000\u0000#\u0001\u0000\u0000\u0000\u0000%"+ + "\u0001\u0000\u0000\u0000\u0000\'\u0001\u0000\u0000\u0000\u0000)\u0001"+ + "\u0000\u0000\u0000\u0000+\u0001\u0000\u0000\u0000\u0000-\u0001\u0000\u0000"+ + "\u0000\u0000/\u0001\u0000\u0000\u0000\u00001\u0001\u0000\u0000\u0000\u0000"+ + "3\u0001\u0000\u0000\u0000\u00005\u0001\u0000\u0000\u0000\u00007\u0001"+ + "\u0000\u0000\u0000\u00009\u0001\u0000\u0000\u0000\u0000;\u0001\u0000\u0000"+ + "\u0000\u0001=\u0001\u0000\u0000\u0000\u0001S\u0001\u0000\u0000\u0000\u0001"+ + "U\u0001\u0000\u0000\u0000\u0001W\u0001\u0000\u0000\u0000\u0001Y\u0001"+ + "\u0000\u0000\u0000\u0001[\u0001\u0000\u0000\u0000\u0001]\u0001\u0000\u0000"+ + "\u0000\u0001_\u0001\u0000\u0000\u0000\u0001a\u0001\u0000\u0000\u0000\u0001"+ + "c\u0001\u0000\u0000\u0000\u0001e\u0001\u0000\u0000\u0000\u0001g\u0001"+ + "\u0000\u0000\u0000\u0001i\u0001\u0000\u0000\u0000\u0001k\u0001\u0000\u0000"+ + "\u0000\u0001m\u0001\u0000\u0000\u0000\u0001o\u0001\u0000\u0000\u0000\u0001"+ + "q\u0001\u0000\u0000\u0000\u0001s\u0001\u0000\u0000\u0000\u0001u\u0001"+ + "\u0000\u0000\u0000\u0001w\u0001\u0000\u0000\u0000\u0001y\u0001\u0000\u0000"+ + "\u0000\u0001{\u0001\u0000\u0000\u0000\u0001}\u0001\u0000\u0000\u0000\u0001"+ + "\u007f\u0001\u0000\u0000\u0000\u0001\u0081\u0001\u0000\u0000\u0000\u0001"+ + "\u0083\u0001\u0000\u0000\u0000\u0001\u0085\u0001\u0000\u0000\u0000\u0001"+ + "\u0087\u0001\u0000\u0000\u0000\u0001\u0089\u0001\u0000\u0000\u0000\u0001"+ + "\u008b\u0001\u0000\u0000\u0000\u0001\u008d\u0001\u0000\u0000\u0000\u0001"+ + "\u008f\u0001\u0000\u0000\u0000\u0001\u0091\u0001\u0000\u0000\u0000\u0001"+ + "\u0093\u0001\u0000\u0000\u0000\u0001\u0095\u0001\u0000\u0000\u0000\u0001"+ + "\u0097\u0001\u0000\u0000\u0000\u0001\u0099\u0001\u0000\u0000\u0000\u0001"+ + "\u009b\u0001\u0000\u0000\u0000\u0001\u009d\u0001\u0000\u0000\u0000\u0001"+ + "\u009f\u0001\u0000\u0000\u0000\u0001\u00a1\u0001\u0000\u0000\u0000\u0001"+ + "\u00a3\u0001\u0000\u0000\u0000\u0001\u00a5\u0001\u0000\u0000\u0000\u0001"+ + "\u00a7\u0001\u0000\u0000\u0000\u0001\u00ab\u0001\u0000\u0000\u0000\u0001"+ + "\u00ad\u0001\u0000\u0000\u0000\u0001\u00af\u0001\u0000\u0000\u0000\u0001"+ + "\u00b1\u0001\u0000\u0000\u0000\u0002\u00b3\u0001\u0000\u0000\u0000\u0002"+ + "\u00b5\u0001\u0000\u0000\u0000\u0002\u00b7\u0001\u0000\u0000\u0000\u0002"+ + "\u00b9\u0001\u0000\u0000\u0000\u0002\u00bb\u0001\u0000\u0000\u0000\u0003"+ + "\u00bd\u0001\u0000\u0000\u0000\u0003\u00bf\u0001\u0000\u0000\u0000\u0003"+ + "\u00c1\u0001\u0000\u0000\u0000\u0003\u00c3\u0001\u0000\u0000\u0000\u0003"+ + "\u00c5\u0001\u0000\u0000\u0000\u0003\u00c7\u0001\u0000\u0000\u0000\u0003"+ + "\u00c9\u0001\u0000\u0000\u0000\u0003\u00cd\u0001\u0000\u0000\u0000\u0003"+ + "\u00cf\u0001\u0000\u0000\u0000\u0003\u00d1\u0001\u0000\u0000\u0000\u0003"+ + "\u00d3\u0001\u0000\u0000\u0000\u0003\u00d5\u0001\u0000\u0000\u0000\u0003"+ + "\u00d7\u0001\u0000\u0000\u0000\u0004\u00d9\u0001\u0000\u0000\u0000\u0004"+ + "\u00db\u0001\u0000\u0000\u0000\u0004\u00dd\u0001\u0000\u0000\u0000\u0004"+ + "\u00df\u0001\u0000\u0000\u0000\u0004\u00e1\u0001\u0000\u0000\u0000\u0004"+ + "\u00e7\u0001\u0000\u0000\u0000\u0004\u00e9\u0001\u0000\u0000\u0000\u0004"+ + "\u00eb\u0001\u0000\u0000\u0000\u0004\u00ed\u0001\u0000\u0000\u0000\u0005"+ + "\u00ef\u0001\u0000\u0000\u0000\u0005\u00f1\u0001\u0000\u0000\u0000\u0005"+ + "\u00f3\u0001\u0000\u0000\u0000\u0005\u00f5\u0001\u0000\u0000\u0000\u0005"+ + "\u00f7\u0001\u0000\u0000\u0000\u0005\u00f9\u0001\u0000\u0000\u0000\u0005"+ + "\u00fb\u0001\u0000\u0000\u0000\u0005\u00fd\u0001\u0000\u0000\u0000\u0005"+ + "\u00ff\u0001\u0000\u0000\u0000\u0005\u0101\u0001\u0000\u0000\u0000\u0005"+ + "\u0103\u0001\u0000\u0000\u0000\u0006\u0105\u0001\u0000\u0000\u0000\u0006"+ + "\u0107\u0001\u0000\u0000\u0000\u0006\u0109\u0001\u0000\u0000\u0000\u0006"+ + "\u010b\u0001\u0000\u0000\u0000\u0006\u010f\u0001\u0000\u0000\u0000\u0006"+ + "\u0111\u0001\u0000\u0000\u0000\u0006\u0113\u0001\u0000\u0000\u0000\u0006"+ + "\u0115\u0001\u0000\u0000\u0000\u0006\u0117\u0001\u0000\u0000\u0000\u0007"+ + "\u0119\u0001\u0000\u0000\u0000\u0007\u011b\u0001\u0000\u0000\u0000\u0007"+ + "\u011d\u0001\u0000\u0000\u0000\u0007\u011f\u0001\u0000\u0000\u0000\u0007"+ + "\u0121\u0001\u0000\u0000\u0000\u0007\u0123\u0001\u0000\u0000\u0000\u0007"+ + "\u0125\u0001\u0000\u0000\u0000\u0007\u0127\u0001\u0000\u0000\u0000\u0007"+ + "\u0129\u0001\u0000\u0000\u0000\u0007\u012b\u0001\u0000\u0000\u0000\u0007"+ + "\u012d\u0001\u0000\u0000\u0000\u0007\u012f\u0001\u0000\u0000\u0000\b\u0131"+ + "\u0001\u0000\u0000\u0000\b\u0133\u0001\u0000\u0000\u0000\b\u0135\u0001"+ + "\u0000\u0000\u0000\b\u0137\u0001\u0000\u0000\u0000\b\u0139\u0001\u0000"+ + "\u0000\u0000\b\u013b\u0001\u0000\u0000\u0000\b\u013d\u0001\u0000\u0000"+ + "\u0000\b\u013f\u0001\u0000\u0000\u0000\b\u0141\u0001\u0000\u0000\u0000"+ + "\t\u0143\u0001\u0000\u0000\u0000\t\u0145\u0001\u0000\u0000\u0000\t\u0147"+ + "\u0001\u0000\u0000\u0000\t\u0149\u0001\u0000\u0000\u0000\t\u014b\u0001"+ + "\u0000\u0000\u0000\n\u014d\u0001\u0000\u0000\u0000\n\u014f\u0001\u0000"+ + "\u0000\u0000\n\u0151\u0001\u0000\u0000\u0000\n\u0153\u0001\u0000\u0000"+ + "\u0000\n\u0155\u0001\u0000\u0000\u0000\n\u0157\u0001\u0000\u0000\u0000"+ + "\u000b\u0159\u0001\u0000\u0000\u0000\u000b\u015b\u0001\u0000\u0000\u0000"+ + "\u000b\u015d\u0001\u0000\u0000\u0000\u000b\u015f\u0001\u0000\u0000\u0000"+ + "\u000b\u0161\u0001\u0000\u0000\u0000\u000b\u0163\u0001\u0000\u0000\u0000"+ + "\u000b\u0165\u0001\u0000\u0000\u0000\u000b\u0167\u0001\u0000\u0000\u0000"+ + "\u000b\u0169\u0001\u0000\u0000\u0000\u000b\u016b\u0001\u0000\u0000\u0000"+ + "\f\u016d\u0001\u0000\u0000\u0000\f\u016f\u0001\u0000\u0000\u0000\f\u0171"+ + "\u0001\u0000\u0000\u0000\f\u0173\u0001\u0000\u0000\u0000\f\u0175\u0001"+ + "\u0000\u0000\u0000\f\u0177\u0001\u0000\u0000\u0000\f\u0179\u0001\u0000"+ + "\u0000\u0000\r\u017b\u0001\u0000\u0000\u0000\r\u017d\u0001\u0000\u0000"+ + "\u0000\r\u017f\u0001\u0000\u0000\u0000\r\u0181\u0001\u0000\u0000\u0000"+ + "\r\u0183\u0001\u0000\u0000\u0000\r\u0185\u0001\u0000\u0000\u0000\u000e"+ + "\u0187\u0001\u0000\u0000\u0000\u000e\u0189\u0001\u0000\u0000\u0000\u000e"+ + "\u018b\u0001\u0000\u0000\u0000\u000e\u018d\u0001\u0000\u0000\u0000\u000e"+ + "\u018f\u0001\u0000\u0000\u0000\u000e\u0191\u0001\u0000\u0000\u0000\u000e"+ + "\u0193\u0001\u0000\u0000\u0000\u000e\u0195\u0001\u0000\u0000\u0000\u000e"+ + "\u0197\u0001\u0000\u0000\u0000\u000f\u0199\u0001\u0000\u0000\u0000\u0011"+ + "\u01a3\u0001\u0000\u0000\u0000\u0013\u01aa\u0001\u0000\u0000\u0000\u0015"+ + "\u01b3\u0001\u0000\u0000\u0000\u0017\u01ba\u0001\u0000\u0000\u0000\u0019"+ + "\u01c4\u0001\u0000\u0000\u0000\u001b\u01cb\u0001\u0000\u0000\u0000\u001d"+ + "\u01d2\u0001\u0000\u0000\u0000\u001f\u01d9\u0001\u0000\u0000\u0000!\u01e1"+ + "\u0001\u0000\u0000\u0000#\u01ed\u0001\u0000\u0000\u0000%\u01f6\u0001\u0000"+ + "\u0000\u0000\'\u01fc\u0001\u0000\u0000\u0000)\u0203\u0001\u0000\u0000"+ + "\u0000+\u020a\u0001\u0000\u0000\u0000-\u0212\u0001\u0000\u0000\u0000/"+ + "\u021a\u0001\u0000\u0000\u00001\u0229\u0001\u0000\u0000\u00003\u0233\u0001"+ + "\u0000\u0000\u00005\u023f\u0001\u0000\u0000\u00007\u0245\u0001\u0000\u0000"+ + "\u00009\u0256\u0001\u0000\u0000\u0000;\u0266\u0001\u0000\u0000\u0000="+ + "\u026c\u0001\u0000\u0000\u0000?\u0270\u0001\u0000\u0000\u0000A\u0272\u0001"+ + "\u0000\u0000\u0000C\u0274\u0001\u0000\u0000\u0000E\u0277\u0001\u0000\u0000"+ + "\u0000G\u0279\u0001\u0000\u0000\u0000I\u0282\u0001\u0000\u0000\u0000K"+ + "\u0284\u0001\u0000\u0000\u0000M\u0289\u0001\u0000\u0000\u0000O\u028b\u0001"+ + "\u0000\u0000\u0000Q\u0290\u0001\u0000\u0000\u0000S\u02af\u0001\u0000\u0000"+ + "\u0000U\u02b2\u0001\u0000\u0000\u0000W\u02e0\u0001\u0000\u0000\u0000Y"+ + "\u02e2\u0001\u0000\u0000\u0000[\u02e5\u0001\u0000\u0000\u0000]\u02e9\u0001"+ + "\u0000\u0000\u0000_\u02ed\u0001\u0000\u0000\u0000a\u02ef\u0001\u0000\u0000"+ + "\u0000c\u02f2\u0001\u0000\u0000\u0000e\u02f4\u0001\u0000\u0000\u0000g"+ + "\u02f9\u0001\u0000\u0000\u0000i\u02fb\u0001\u0000\u0000\u0000k\u0301\u0001"+ + "\u0000\u0000\u0000m\u0307\u0001\u0000\u0000\u0000o\u030a\u0001\u0000\u0000"+ + "\u0000q\u030d\u0001\u0000\u0000\u0000s\u0312\u0001\u0000\u0000\u0000u"+ + "\u0317\u0001\u0000\u0000\u0000w\u0319\u0001\u0000\u0000\u0000y\u031d\u0001"+ + "\u0000\u0000\u0000{\u0322\u0001\u0000\u0000\u0000}\u0328\u0001\u0000\u0000"+ + "\u0000\u007f\u032b\u0001\u0000\u0000\u0000\u0081\u032d\u0001\u0000\u0000"+ + "\u0000\u0083\u0333\u0001\u0000\u0000\u0000\u0085\u0335\u0001\u0000\u0000"+ + "\u0000\u0087\u033a\u0001\u0000\u0000\u0000\u0089\u033d\u0001\u0000\u0000"+ + "\u0000\u008b\u0340\u0001\u0000\u0000\u0000\u008d\u0343\u0001\u0000\u0000"+ + "\u0000\u008f\u0345\u0001\u0000\u0000\u0000\u0091\u0348\u0001\u0000\u0000"+ + "\u0000\u0093\u034a\u0001\u0000\u0000\u0000\u0095\u034d\u0001\u0000\u0000"+ + "\u0000\u0097\u034f\u0001\u0000\u0000\u0000\u0099\u0351\u0001\u0000\u0000"+ + "\u0000\u009b\u0353\u0001\u0000\u0000\u0000\u009d\u0355\u0001\u0000\u0000"+ + "\u0000\u009f\u0357\u0001\u0000\u0000\u0000\u00a1\u036f\u0001\u0000\u0000"+ + "\u0000\u00a3\u0371\u0001\u0000\u0000\u0000\u00a5\u0376\u0001\u0000\u0000"+ + "\u0000\u00a7\u038b\u0001\u0000\u0000\u0000\u00a9\u038d\u0001\u0000\u0000"+ + "\u0000\u00ab\u0395\u0001\u0000\u0000\u0000\u00ad\u0397\u0001\u0000\u0000"+ + "\u0000\u00af\u039b\u0001\u0000\u0000\u0000\u00b1\u039f\u0001\u0000\u0000"+ + "\u0000\u00b3\u03a3\u0001\u0000\u0000\u0000\u00b5\u03a8\u0001\u0000\u0000"+ + "\u0000\u00b7\u03ad\u0001\u0000\u0000\u0000\u00b9\u03b1\u0001\u0000\u0000"+ + "\u0000\u00bb\u03b5\u0001\u0000\u0000\u0000\u00bd\u03b9\u0001\u0000\u0000"+ + "\u0000\u00bf\u03be\u0001\u0000\u0000\u0000\u00c1\u03c2\u0001\u0000\u0000"+ + "\u0000\u00c3\u03c6\u0001\u0000\u0000\u0000\u00c5\u03ca\u0001\u0000\u0000"+ + "\u0000\u00c7\u03ce\u0001\u0000\u0000\u0000\u00c9\u03d2\u0001\u0000\u0000"+ + "\u0000\u00cb\u03de\u0001\u0000\u0000\u0000\u00cd\u03e1\u0001\u0000\u0000"+ + "\u0000\u00cf\u03e5\u0001\u0000\u0000\u0000\u00d1\u03e9\u0001\u0000\u0000"+ + "\u0000\u00d3\u03ed\u0001\u0000\u0000\u0000\u00d5\u03f1\u0001\u0000\u0000"+ + "\u0000\u00d7\u03f5\u0001\u0000\u0000\u0000\u00d9\u03f9\u0001\u0000\u0000"+ + "\u0000\u00db\u03fe\u0001\u0000\u0000\u0000\u00dd\u0402\u0001\u0000\u0000"+ + "\u0000\u00df\u0406\u0001\u0000\u0000\u0000\u00e1\u040a\u0001\u0000\u0000"+ + "\u0000\u00e3\u0412\u0001\u0000\u0000\u0000\u00e5\u0427\u0001\u0000\u0000"+ + "\u0000\u00e7\u042b\u0001\u0000\u0000\u0000\u00e9\u042f\u0001\u0000\u0000"+ + "\u0000\u00eb\u0433\u0001\u0000\u0000\u0000\u00ed\u0437\u0001\u0000\u0000"+ + "\u0000\u00ef\u043b\u0001\u0000\u0000\u0000\u00f1\u0440\u0001\u0000\u0000"+ + "\u0000\u00f3\u0444\u0001\u0000\u0000\u0000\u00f5\u0448\u0001\u0000\u0000"+ + "\u0000\u00f7\u044c\u0001\u0000\u0000\u0000\u00f9\u0450\u0001\u0000\u0000"+ + "\u0000\u00fb\u0454\u0001\u0000\u0000\u0000\u00fd\u0457\u0001\u0000\u0000"+ + "\u0000\u00ff\u045b\u0001\u0000\u0000\u0000\u0101\u045f\u0001\u0000\u0000"+ + "\u0000\u0103\u0463\u0001\u0000\u0000\u0000\u0105\u0467\u0001\u0000\u0000"+ + "\u0000\u0107\u046c\u0001\u0000\u0000\u0000\u0109\u0471\u0001\u0000\u0000"+ + "\u0000\u010b\u0476\u0001\u0000\u0000\u0000\u010d\u047d\u0001\u0000\u0000"+ + "\u0000\u010f\u0486\u0001\u0000\u0000\u0000\u0111\u048d\u0001\u0000\u0000"+ + "\u0000\u0113\u0491\u0001\u0000\u0000\u0000\u0115\u0495\u0001\u0000\u0000"+ + "\u0000\u0117\u0499\u0001\u0000\u0000\u0000\u0119\u049d\u0001\u0000\u0000"+ + "\u0000\u011b\u04a3\u0001\u0000\u0000\u0000\u011d\u04a7\u0001\u0000\u0000"+ + "\u0000\u011f\u04ab\u0001\u0000\u0000\u0000\u0121\u04af\u0001\u0000\u0000"+ + "\u0000\u0123\u04b3\u0001\u0000\u0000\u0000\u0125\u04b7\u0001\u0000\u0000"+ + "\u0000\u0127\u04bb\u0001\u0000\u0000\u0000\u0129\u04bf\u0001\u0000\u0000"+ + "\u0000\u012b\u04c3\u0001\u0000\u0000\u0000\u012d\u04c7\u0001\u0000\u0000"+ + "\u0000\u012f\u04cb\u0001\u0000\u0000\u0000\u0131\u04cf\u0001\u0000\u0000"+ + "\u0000\u0133\u04d4\u0001\u0000\u0000\u0000\u0135\u04d8\u0001\u0000\u0000"+ + "\u0000\u0137\u04dc\u0001\u0000\u0000\u0000\u0139\u04e0\u0001\u0000\u0000"+ + "\u0000\u013b\u04e4\u0001\u0000\u0000\u0000\u013d\u04e8\u0001\u0000\u0000"+ + "\u0000\u013f\u04ec\u0001\u0000\u0000\u0000\u0141\u04f0\u0001\u0000\u0000"+ + "\u0000\u0143\u04f4\u0001\u0000\u0000\u0000\u0145\u04f9\u0001\u0000\u0000"+ + "\u0000\u0147\u04fe\u0001\u0000\u0000\u0000\u0149\u0502\u0001\u0000\u0000"+ + "\u0000\u014b\u0506\u0001\u0000\u0000\u0000\u014d\u050a\u0001\u0000\u0000"+ + "\u0000\u014f\u050f\u0001\u0000\u0000\u0000\u0151\u0516\u0001\u0000\u0000"+ + "\u0000\u0153\u051a\u0001\u0000\u0000\u0000\u0155\u051e\u0001\u0000\u0000"+ + "\u0000\u0157\u0522\u0001\u0000\u0000\u0000\u0159\u0526\u0001\u0000\u0000"+ + "\u0000\u015b\u052b\u0001\u0000\u0000\u0000\u015d\u052f\u0001\u0000\u0000"+ + "\u0000\u015f\u0533\u0001\u0000\u0000\u0000\u0161\u0537\u0001\u0000\u0000"+ + "\u0000\u0163\u053c\u0001\u0000\u0000\u0000\u0165\u0540\u0001\u0000\u0000"+ + "\u0000\u0167\u0544\u0001\u0000\u0000\u0000\u0169\u0548\u0001\u0000\u0000"+ + "\u0000\u016b\u054c\u0001\u0000\u0000\u0000\u016d\u0550\u0001\u0000\u0000"+ + "\u0000\u016f\u0556\u0001\u0000\u0000\u0000\u0171\u055a\u0001\u0000\u0000"+ + "\u0000\u0173\u055e\u0001\u0000\u0000\u0000\u0175\u0562\u0001\u0000\u0000"+ + "\u0000\u0177\u0566\u0001\u0000\u0000\u0000\u0179\u056a\u0001\u0000\u0000"+ + "\u0000\u017b\u056e\u0001\u0000\u0000\u0000\u017d\u0573\u0001\u0000\u0000"+ + "\u0000\u017f\u0579\u0001\u0000\u0000\u0000\u0181\u057f\u0001\u0000\u0000"+ + "\u0000\u0183\u0583\u0001\u0000\u0000\u0000\u0185\u0587\u0001\u0000\u0000"+ + "\u0000\u0187\u058b\u0001\u0000\u0000\u0000\u0189\u0591\u0001\u0000\u0000"+ + "\u0000\u018b\u0597\u0001\u0000\u0000\u0000\u018d\u059b\u0001\u0000\u0000"+ + "\u0000\u018f\u059f\u0001\u0000\u0000\u0000\u0191\u05a3\u0001\u0000\u0000"+ + "\u0000\u0193\u05a9\u0001\u0000\u0000\u0000\u0195\u05af\u0001\u0000\u0000"+ + "\u0000\u0197\u05b5\u0001\u0000\u0000\u0000\u0199\u019a\u0007\u0000\u0000"+ + "\u0000\u019a\u019b\u0007\u0001\u0000\u0000\u019b\u019c\u0007\u0002\u0000"+ + "\u0000\u019c\u019d\u0007\u0002\u0000\u0000\u019d\u019e\u0007\u0003\u0000"+ + "\u0000\u019e\u019f\u0007\u0004\u0000\u0000\u019f\u01a0\u0007\u0005\u0000"+ + "\u0000\u01a0\u01a1\u0001\u0000\u0000\u0000\u01a1\u01a2\u0006\u0000\u0000"+ + "\u0000\u01a2\u0010\u0001\u0000\u0000\u0000\u01a3\u01a4\u0007\u0000\u0000"+ + "\u0000\u01a4\u01a5\u0007\u0006\u0000\u0000\u01a5\u01a6\u0007\u0007\u0000"+ + "\u0000\u01a6\u01a7\u0007\b\u0000\u0000\u01a7\u01a8\u0001\u0000\u0000\u0000"+ + "\u01a8\u01a9\u0006\u0001\u0001\u0000\u01a9\u0012\u0001\u0000\u0000\u0000"+ + "\u01aa\u01ab\u0007\u0003\u0000\u0000\u01ab\u01ac\u0007\t\u0000\u0000\u01ac"+ + "\u01ad\u0007\u0006\u0000\u0000\u01ad\u01ae\u0007\u0001\u0000\u0000\u01ae"+ + "\u01af\u0007\u0004\u0000\u0000\u01af\u01b0\u0007\n\u0000\u0000\u01b0\u01b1"+ + "\u0001\u0000\u0000\u0000\u01b1\u01b2\u0006\u0002\u0002\u0000\u01b2\u0014"+ + "\u0001\u0000\u0000\u0000\u01b3\u01b4\u0007\u0003\u0000\u0000\u01b4\u01b5"+ + "\u0007\u000b\u0000\u0000\u01b5\u01b6\u0007\f\u0000\u0000\u01b6\u01b7\u0007"+ + "\r\u0000\u0000\u01b7\u01b8\u0001\u0000\u0000\u0000\u01b8\u01b9\u0006\u0003"+ + "\u0000\u0000\u01b9\u0016\u0001\u0000\u0000\u0000\u01ba\u01bb\u0007\u0003"+ + "\u0000\u0000\u01bb\u01bc\u0007\u000e\u0000\u0000\u01bc\u01bd\u0007\b\u0000"+ + "\u0000\u01bd\u01be\u0007\r\u0000\u0000\u01be\u01bf\u0007\f\u0000\u0000"+ + "\u01bf\u01c0\u0007\u0001\u0000\u0000\u01c0\u01c1\u0007\t\u0000\u0000\u01c1"+ + "\u01c2\u0001\u0000\u0000\u0000\u01c2\u01c3\u0006\u0004\u0003\u0000\u01c3"+ + "\u0018\u0001\u0000\u0000\u0000\u01c4\u01c5\u0007\u000f\u0000\u0000\u01c5"+ + "\u01c6\u0007\u0006\u0000\u0000\u01c6\u01c7\u0007\u0007\u0000\u0000\u01c7"+ + "\u01c8\u0007\u0010\u0000\u0000\u01c8\u01c9\u0001\u0000\u0000\u0000\u01c9"+ + "\u01ca\u0006\u0005\u0004\u0000\u01ca\u001a\u0001\u0000\u0000\u0000\u01cb"+ + "\u01cc\u0007\u0011\u0000\u0000\u01cc\u01cd\u0007\u0006\u0000\u0000\u01cd"+ + "\u01ce\u0007\u0007\u0000\u0000\u01ce\u01cf\u0007\u0012\u0000\u0000\u01cf"+ + "\u01d0\u0001\u0000\u0000\u0000\u01d0\u01d1\u0006\u0006\u0000\u0000\u01d1"+ + "\u001c\u0001\u0000\u0000\u0000\u01d2\u01d3\u0007\u0012\u0000\u0000\u01d3"+ + "\u01d4\u0007\u0003\u0000\u0000\u01d4\u01d5\u0007\u0003\u0000\u0000\u01d5"+ + "\u01d6\u0007\b\u0000\u0000\u01d6\u01d7\u0001\u0000\u0000\u0000\u01d7\u01d8"+ + "\u0006\u0007\u0001\u0000\u01d8\u001e\u0001\u0000\u0000\u0000\u01d9\u01da"+ + "\u0007\r\u0000\u0000\u01da\u01db\u0007\u0001\u0000\u0000\u01db\u01dc\u0007"+ + "\u0010\u0000\u0000\u01dc\u01dd\u0007\u0001\u0000\u0000\u01dd\u01de\u0007"+ + "\u0005\u0000\u0000\u01de\u01df\u0001\u0000\u0000\u0000\u01df\u01e0\u0006"+ + "\b\u0000\u0000\u01e0 \u0001\u0000\u0000\u0000\u01e1\u01e2\u0007\u0010"+ + "\u0000\u0000\u01e2\u01e3\u0007\u000b\u0000\u0000\u01e3\u01e4\u0005_\u0000"+ + "\u0000\u01e4\u01e5\u0007\u0003\u0000\u0000\u01e5\u01e6\u0007\u000e\u0000"+ + "\u0000\u01e6\u01e7\u0007\b\u0000\u0000\u01e7\u01e8\u0007\f\u0000\u0000"+ + "\u01e8\u01e9\u0007\t\u0000\u0000\u01e9\u01ea\u0007\u0000\u0000\u0000\u01ea"+ + "\u01eb\u0001\u0000\u0000\u0000\u01eb\u01ec\u0006\t\u0005\u0000\u01ec\""+ + "\u0001\u0000\u0000\u0000\u01ed\u01ee\u0007\u0006\u0000\u0000\u01ee\u01ef"+ + "\u0007\u0003\u0000\u0000\u01ef\u01f0\u0007\t\u0000\u0000\u01f0\u01f1\u0007"+ + "\f\u0000\u0000\u01f1\u01f2\u0007\u0010\u0000\u0000\u01f2\u01f3\u0007\u0003"+ + "\u0000\u0000\u01f3\u01f4\u0001\u0000\u0000\u0000\u01f4\u01f5\u0006\n\u0006"+ + "\u0000\u01f5$\u0001\u0000\u0000\u0000\u01f6\u01f7\u0007\u0006\u0000\u0000"+ + "\u01f7\u01f8\u0007\u0007\u0000\u0000\u01f8\u01f9\u0007\u0013\u0000\u0000"+ + "\u01f9\u01fa\u0001\u0000\u0000\u0000\u01fa\u01fb\u0006\u000b\u0000\u0000"+ + "\u01fb&\u0001\u0000\u0000\u0000\u01fc\u01fd\u0007\u0002\u0000\u0000\u01fd"+ + "\u01fe\u0007\n\u0000\u0000\u01fe\u01ff\u0007\u0007\u0000\u0000\u01ff\u0200"+ + "\u0007\u0013\u0000\u0000\u0200\u0201\u0001\u0000\u0000\u0000\u0201\u0202"+ + "\u0006\f\u0007\u0000\u0202(\u0001\u0000\u0000\u0000\u0203\u0204\u0007"+ + "\u0002\u0000\u0000\u0204\u0205\u0007\u0007\u0000\u0000\u0205\u0206\u0007"+ + "\u0006\u0000\u0000\u0206\u0207\u0007\u0005\u0000\u0000\u0207\u0208\u0001"+ + "\u0000\u0000\u0000\u0208\u0209\u0006\r\u0000\u0000\u0209*\u0001\u0000"+ + "\u0000\u0000\u020a\u020b\u0007\u0002\u0000\u0000\u020b\u020c\u0007\u0005"+ + "\u0000\u0000\u020c\u020d\u0007\f\u0000\u0000\u020d\u020e\u0007\u0005\u0000"+ + "\u0000\u020e\u020f\u0007\u0002\u0000\u0000\u020f\u0210\u0001\u0000\u0000"+ + "\u0000\u0210\u0211\u0006\u000e\u0000\u0000\u0211,\u0001\u0000\u0000\u0000"+ + "\u0212\u0213\u0007\u0013\u0000\u0000\u0213\u0214\u0007\n\u0000\u0000\u0214"+ + "\u0215\u0007\u0003\u0000\u0000\u0215\u0216\u0007\u0006\u0000\u0000\u0216"+ + "\u0217\u0007\u0003\u0000\u0000\u0217\u0218\u0001\u0000\u0000\u0000\u0218"+ + "\u0219\u0006\u000f\u0000\u0000\u0219.\u0001\u0000\u0000\u0000\u021a\u021b"+ + "\u0004\u0010\u0000\u0000\u021b\u021c\u0007\u0001\u0000\u0000\u021c\u021d"+ + "\u0007\t\u0000\u0000\u021d\u021e\u0007\r\u0000\u0000\u021e\u021f\u0007"+ + "\u0001\u0000\u0000\u021f\u0220\u0007\t\u0000\u0000\u0220\u0221\u0007\u0003"+ + "\u0000\u0000\u0221\u0222\u0007\u0002\u0000\u0000\u0222\u0223\u0007\u0005"+ + "\u0000\u0000\u0223\u0224\u0007\f\u0000\u0000\u0224\u0225\u0007\u0005\u0000"+ + "\u0000\u0225\u0226\u0007\u0002\u0000\u0000\u0226\u0227\u0001\u0000\u0000"+ + "\u0000\u0227\u0228\u0006\u0010\u0000\u0000\u02280\u0001\u0000\u0000\u0000"+ + "\u0229\u022a\u0004\u0011\u0001\u0000\u022a\u022b\u0007\r\u0000\u0000\u022b"+ + "\u022c\u0007\u0007\u0000\u0000\u022c\u022d\u0007\u0007\u0000\u0000\u022d"+ + "\u022e\u0007\u0012\u0000\u0000\u022e\u022f\u0007\u0014\u0000\u0000\u022f"+ + "\u0230\u0007\b\u0000\u0000\u0230\u0231\u0001\u0000\u0000\u0000\u0231\u0232"+ + "\u0006\u0011\b\u0000\u02322\u0001\u0000\u0000\u0000\u0233\u0234\u0004"+ + "\u0012\u0002\u0000\u0234\u0235\u0007\u0010\u0000\u0000\u0235\u0236\u0007"+ + "\u0003\u0000\u0000\u0236\u0237\u0007\u0005\u0000\u0000\u0237\u0238\u0007"+ + "\u0006\u0000\u0000\u0238\u0239\u0007\u0001\u0000\u0000\u0239\u023a\u0007"+ + "\u0004\u0000\u0000\u023a\u023b\u0007\u0002\u0000\u0000\u023b\u023c\u0001"+ + "\u0000\u0000\u0000\u023c\u023d\u0006\u0012\t\u0000\u023d4\u0001\u0000"+ + "\u0000\u0000\u023e\u0240\b\u0015\u0000\u0000\u023f\u023e\u0001\u0000\u0000"+ + "\u0000\u0240\u0241\u0001\u0000\u0000\u0000\u0241\u023f\u0001\u0000\u0000"+ + "\u0000\u0241\u0242\u0001\u0000\u0000\u0000\u0242\u0243\u0001\u0000\u0000"+ + "\u0000\u0243\u0244\u0006\u0013\u0000\u0000\u02446\u0001\u0000\u0000\u0000"+ + "\u0245\u0246\u0005/\u0000\u0000\u0246\u0247\u0005/\u0000\u0000\u0247\u024b"+ + "\u0001\u0000\u0000\u0000\u0248\u024a\b\u0016\u0000\u0000\u0249\u0248\u0001"+ + "\u0000\u0000\u0000\u024a\u024d\u0001\u0000\u0000\u0000\u024b\u0249\u0001"+ + "\u0000\u0000\u0000\u024b\u024c\u0001\u0000\u0000\u0000\u024c\u024f\u0001"+ + "\u0000\u0000\u0000\u024d\u024b\u0001\u0000\u0000\u0000\u024e\u0250\u0005"+ + "\r\u0000\u0000\u024f\u024e\u0001\u0000\u0000\u0000\u024f\u0250\u0001\u0000"+ + "\u0000\u0000\u0250\u0252\u0001\u0000\u0000\u0000\u0251\u0253\u0005\n\u0000"+ + "\u0000\u0252\u0251\u0001\u0000\u0000\u0000\u0252\u0253\u0001\u0000\u0000"+ + "\u0000\u0253\u0254\u0001\u0000\u0000\u0000\u0254\u0255\u0006\u0014\n\u0000"+ + "\u02558\u0001\u0000\u0000\u0000\u0256\u0257\u0005/\u0000\u0000\u0257\u0258"+ + "\u0005*\u0000\u0000\u0258\u025d\u0001\u0000\u0000\u0000\u0259\u025c\u0003"+ + "9\u0015\u0000\u025a\u025c\t\u0000\u0000\u0000\u025b\u0259\u0001\u0000"+ + "\u0000\u0000\u025b\u025a\u0001\u0000\u0000\u0000\u025c\u025f\u0001\u0000"+ + "\u0000\u0000\u025d\u025e\u0001\u0000\u0000\u0000\u025d\u025b\u0001\u0000"+ + "\u0000\u0000\u025e\u0260\u0001\u0000\u0000\u0000\u025f\u025d\u0001\u0000"+ + "\u0000\u0000\u0260\u0261\u0005*\u0000\u0000\u0261\u0262\u0005/\u0000\u0000"+ + "\u0262\u0263\u0001\u0000\u0000\u0000\u0263\u0264\u0006\u0015\n\u0000\u0264"+ + ":\u0001\u0000\u0000\u0000\u0265\u0267\u0007\u0017\u0000\u0000\u0266\u0265"+ + "\u0001\u0000\u0000\u0000\u0267\u0268\u0001\u0000\u0000\u0000\u0268\u0266"+ + "\u0001\u0000\u0000\u0000\u0268\u0269\u0001\u0000\u0000\u0000\u0269\u026a"+ + "\u0001\u0000\u0000\u0000\u026a\u026b\u0006\u0016\n\u0000\u026b<\u0001"+ + "\u0000\u0000\u0000\u026c\u026d\u0005|\u0000\u0000\u026d\u026e\u0001\u0000"+ + "\u0000\u0000\u026e\u026f\u0006\u0017\u000b\u0000\u026f>\u0001\u0000\u0000"+ + "\u0000\u0270\u0271\u0007\u0018\u0000\u0000\u0271@\u0001\u0000\u0000\u0000"+ + "\u0272\u0273\u0007\u0019\u0000\u0000\u0273B\u0001\u0000\u0000\u0000\u0274"+ + "\u0275\u0005\\\u0000\u0000\u0275\u0276\u0007\u001a\u0000\u0000\u0276D"+ + "\u0001\u0000\u0000\u0000\u0277\u0278\b\u001b\u0000\u0000\u0278F\u0001"+ + "\u0000\u0000\u0000\u0279\u027b\u0007\u0003\u0000\u0000\u027a\u027c\u0007"+ + "\u001c\u0000\u0000\u027b\u027a\u0001\u0000\u0000\u0000\u027b\u027c\u0001"+ + "\u0000\u0000\u0000\u027c\u027e\u0001\u0000\u0000\u0000\u027d\u027f\u0003"+ + "?\u0018\u0000\u027e\u027d\u0001\u0000\u0000\u0000\u027f\u0280\u0001\u0000"+ + "\u0000\u0000\u0280\u027e\u0001\u0000\u0000\u0000\u0280\u0281\u0001\u0000"+ + "\u0000\u0000\u0281H\u0001\u0000\u0000\u0000\u0282\u0283\u0005@\u0000\u0000"+ + "\u0283J\u0001\u0000\u0000\u0000\u0284\u0285\u0005`\u0000\u0000\u0285L"+ + "\u0001\u0000\u0000\u0000\u0286\u028a\b\u001d\u0000\u0000\u0287\u0288\u0005"+ + "`\u0000\u0000\u0288\u028a\u0005`\u0000\u0000\u0289\u0286\u0001\u0000\u0000"+ + "\u0000\u0289\u0287\u0001\u0000\u0000\u0000\u028aN\u0001\u0000\u0000\u0000"+ + "\u028b\u028c\u0005_\u0000\u0000\u028cP\u0001\u0000\u0000\u0000\u028d\u0291"+ + "\u0003A\u0019\u0000\u028e\u0291\u0003?\u0018\u0000\u028f\u0291\u0003O"+ + " \u0000\u0290\u028d\u0001\u0000\u0000\u0000\u0290\u028e\u0001\u0000\u0000"+ + "\u0000\u0290\u028f\u0001\u0000\u0000\u0000\u0291R\u0001\u0000\u0000\u0000"+ + "\u0292\u0297\u0005\"\u0000\u0000\u0293\u0296\u0003C\u001a\u0000\u0294"+ + "\u0296\u0003E\u001b\u0000\u0295\u0293\u0001\u0000\u0000\u0000\u0295\u0294"+ + "\u0001\u0000\u0000\u0000\u0296\u0299\u0001\u0000\u0000\u0000\u0297\u0295"+ + "\u0001\u0000\u0000\u0000\u0297\u0298\u0001\u0000\u0000\u0000\u0298\u029a"+ + "\u0001\u0000\u0000\u0000\u0299\u0297\u0001\u0000\u0000\u0000\u029a\u02b0"+ + "\u0005\"\u0000\u0000\u029b\u029c\u0005\"\u0000\u0000\u029c\u029d\u0005"+ + "\"\u0000\u0000\u029d\u029e\u0005\"\u0000\u0000\u029e\u02a2\u0001\u0000"+ + "\u0000\u0000\u029f\u02a1\b\u0016\u0000\u0000\u02a0\u029f\u0001\u0000\u0000"+ + "\u0000\u02a1\u02a4\u0001\u0000\u0000\u0000\u02a2\u02a3\u0001\u0000\u0000"+ + "\u0000\u02a2\u02a0\u0001\u0000\u0000\u0000\u02a3\u02a5\u0001\u0000\u0000"+ + "\u0000\u02a4\u02a2\u0001\u0000\u0000\u0000\u02a5\u02a6\u0005\"\u0000\u0000"+ + "\u02a6\u02a7\u0005\"\u0000\u0000\u02a7\u02a8\u0005\"\u0000\u0000\u02a8"+ + "\u02aa\u0001\u0000\u0000\u0000\u02a9\u02ab\u0005\"\u0000\u0000\u02aa\u02a9"+ + "\u0001\u0000\u0000\u0000\u02aa\u02ab\u0001\u0000\u0000\u0000\u02ab\u02ad"+ + "\u0001\u0000\u0000\u0000\u02ac\u02ae\u0005\"\u0000\u0000\u02ad\u02ac\u0001"+ + "\u0000\u0000\u0000\u02ad\u02ae\u0001\u0000\u0000\u0000\u02ae\u02b0\u0001"+ + "\u0000\u0000\u0000\u02af\u0292\u0001\u0000\u0000\u0000\u02af\u029b\u0001"+ + "\u0000\u0000\u0000\u02b0T\u0001\u0000\u0000\u0000\u02b1\u02b3\u0003?\u0018"+ + "\u0000\u02b2\u02b1\u0001\u0000\u0000\u0000\u02b3\u02b4\u0001\u0000\u0000"+ + "\u0000\u02b4\u02b2\u0001\u0000\u0000\u0000\u02b4\u02b5\u0001\u0000\u0000"+ + "\u0000\u02b5V\u0001\u0000\u0000\u0000\u02b6\u02b8\u0003?\u0018\u0000\u02b7"+ + "\u02b6\u0001\u0000\u0000\u0000\u02b8\u02b9\u0001\u0000\u0000\u0000\u02b9"+ + "\u02b7\u0001\u0000\u0000\u0000\u02b9\u02ba\u0001\u0000\u0000\u0000\u02ba"+ + "\u02bb\u0001\u0000\u0000\u0000\u02bb\u02bf\u0003g,\u0000\u02bc\u02be\u0003"+ + "?\u0018\u0000\u02bd\u02bc\u0001\u0000\u0000\u0000\u02be\u02c1\u0001\u0000"+ + "\u0000\u0000\u02bf\u02bd\u0001\u0000\u0000\u0000\u02bf\u02c0\u0001\u0000"+ + "\u0000\u0000\u02c0\u02e1\u0001\u0000\u0000\u0000\u02c1\u02bf\u0001\u0000"+ + "\u0000\u0000\u02c2\u02c4\u0003g,\u0000\u02c3\u02c5\u0003?\u0018\u0000"+ + "\u02c4\u02c3\u0001\u0000\u0000\u0000\u02c5\u02c6\u0001\u0000\u0000\u0000"+ + "\u02c6\u02c4\u0001\u0000\u0000\u0000\u02c6\u02c7\u0001\u0000\u0000\u0000"+ + "\u02c7\u02e1\u0001\u0000\u0000\u0000\u02c8\u02ca\u0003?\u0018\u0000\u02c9"+ + "\u02c8\u0001\u0000\u0000\u0000\u02ca\u02cb\u0001\u0000\u0000\u0000\u02cb"+ + "\u02c9\u0001\u0000\u0000\u0000\u02cb\u02cc\u0001\u0000\u0000\u0000\u02cc"+ + "\u02d4\u0001\u0000\u0000\u0000\u02cd\u02d1\u0003g,\u0000\u02ce\u02d0\u0003"+ + "?\u0018\u0000\u02cf\u02ce\u0001\u0000\u0000\u0000\u02d0\u02d3\u0001\u0000"+ + "\u0000\u0000\u02d1\u02cf\u0001\u0000\u0000\u0000\u02d1\u02d2\u0001\u0000"+ + "\u0000\u0000\u02d2\u02d5\u0001\u0000\u0000\u0000\u02d3\u02d1\u0001\u0000"+ + "\u0000\u0000\u02d4\u02cd\u0001\u0000\u0000\u0000\u02d4\u02d5\u0001\u0000"+ + "\u0000\u0000\u02d5\u02d6\u0001\u0000\u0000\u0000\u02d6\u02d7\u0003G\u001c"+ + "\u0000\u02d7\u02e1\u0001\u0000\u0000\u0000\u02d8\u02da\u0003g,\u0000\u02d9"+ + "\u02db\u0003?\u0018\u0000\u02da\u02d9\u0001\u0000\u0000\u0000\u02db\u02dc"+ + "\u0001\u0000\u0000\u0000\u02dc\u02da\u0001\u0000\u0000\u0000\u02dc\u02dd"+ + "\u0001\u0000\u0000\u0000\u02dd\u02de\u0001\u0000\u0000\u0000\u02de\u02df"+ + "\u0003G\u001c\u0000\u02df\u02e1\u0001\u0000\u0000\u0000\u02e0\u02b7\u0001"+ + "\u0000\u0000\u0000\u02e0\u02c2\u0001\u0000\u0000\u0000\u02e0\u02c9\u0001"+ + "\u0000\u0000\u0000\u02e0\u02d8\u0001\u0000\u0000\u0000\u02e1X\u0001\u0000"+ + "\u0000\u0000\u02e2\u02e3\u0007\u001e\u0000\u0000\u02e3\u02e4\u0007\u001f"+ + "\u0000\u0000\u02e4Z\u0001\u0000\u0000\u0000\u02e5\u02e6\u0007\f\u0000"+ + "\u0000\u02e6\u02e7\u0007\t\u0000\u0000\u02e7\u02e8\u0007\u0000\u0000\u0000"+ + "\u02e8\\\u0001\u0000\u0000\u0000\u02e9\u02ea\u0007\f\u0000\u0000\u02ea"+ + "\u02eb\u0007\u0002\u0000\u0000\u02eb\u02ec\u0007\u0004\u0000\u0000\u02ec"+ + "^\u0001\u0000\u0000\u0000\u02ed\u02ee\u0005=\u0000\u0000\u02ee`\u0001"+ + "\u0000\u0000\u0000\u02ef\u02f0\u0005:\u0000\u0000\u02f0\u02f1\u0005:\u0000"+ + "\u0000\u02f1b\u0001\u0000\u0000\u0000\u02f2\u02f3\u0005,\u0000\u0000\u02f3"+ + "d\u0001\u0000\u0000\u0000\u02f4\u02f5\u0007\u0000\u0000\u0000\u02f5\u02f6"+ + "\u0007\u0003\u0000\u0000\u02f6\u02f7\u0007\u0002\u0000\u0000\u02f7\u02f8"+ + "\u0007\u0004\u0000\u0000\u02f8f\u0001\u0000\u0000\u0000\u02f9\u02fa\u0005"+ + ".\u0000\u0000\u02fah\u0001\u0000\u0000\u0000\u02fb\u02fc\u0007\u000f\u0000"+ + "\u0000\u02fc\u02fd\u0007\f\u0000\u0000\u02fd\u02fe\u0007\r\u0000\u0000"+ + "\u02fe\u02ff\u0007\u0002\u0000\u0000\u02ff\u0300\u0007\u0003\u0000\u0000"+ + "\u0300j\u0001\u0000\u0000\u0000\u0301\u0302\u0007\u000f\u0000\u0000\u0302"+ + "\u0303\u0007\u0001\u0000\u0000\u0303\u0304\u0007\u0006\u0000\u0000\u0304"+ + "\u0305\u0007\u0002\u0000\u0000\u0305\u0306\u0007\u0005\u0000\u0000\u0306"+ + "l\u0001\u0000\u0000\u0000\u0307\u0308\u0007\u0001\u0000\u0000\u0308\u0309"+ + "\u0007\t\u0000\u0000\u0309n\u0001\u0000\u0000\u0000\u030a\u030b\u0007"+ + "\u0001\u0000\u0000\u030b\u030c\u0007\u0002\u0000\u0000\u030cp\u0001\u0000"+ + "\u0000\u0000\u030d\u030e\u0007\r\u0000\u0000\u030e\u030f\u0007\f\u0000"+ + "\u0000\u030f\u0310\u0007\u0002\u0000\u0000\u0310\u0311\u0007\u0005\u0000"+ + "\u0000\u0311r\u0001\u0000\u0000\u0000\u0312\u0313\u0007\r\u0000\u0000"+ + "\u0313\u0314\u0007\u0001\u0000\u0000\u0314\u0315\u0007\u0012\u0000\u0000"+ + "\u0315\u0316\u0007\u0003\u0000\u0000\u0316t\u0001\u0000\u0000\u0000\u0317"+ + "\u0318\u0005(\u0000\u0000\u0318v\u0001\u0000\u0000\u0000\u0319\u031a\u0007"+ + "\t\u0000\u0000\u031a\u031b\u0007\u0007\u0000\u0000\u031b\u031c\u0007\u0005"+ + "\u0000\u0000\u031cx\u0001\u0000\u0000\u0000\u031d\u031e\u0007\t\u0000"+ + "\u0000\u031e\u031f\u0007\u0014\u0000\u0000\u031f\u0320\u0007\r\u0000\u0000"+ + "\u0320\u0321\u0007\r\u0000\u0000\u0321z\u0001\u0000\u0000\u0000\u0322"+ + "\u0323\u0007\t\u0000\u0000\u0323\u0324\u0007\u0014\u0000\u0000\u0324\u0325"+ + "\u0007\r\u0000\u0000\u0325\u0326\u0007\r\u0000\u0000\u0326\u0327\u0007"+ + "\u0002\u0000\u0000\u0327|\u0001\u0000\u0000\u0000\u0328\u0329\u0007\u0007"+ + "\u0000\u0000\u0329\u032a\u0007\u0006\u0000\u0000\u032a~\u0001\u0000\u0000"+ + "\u0000\u032b\u032c\u0005?\u0000\u0000\u032c\u0080\u0001\u0000\u0000\u0000"+ + "\u032d\u032e\u0007\u0006\u0000\u0000\u032e\u032f\u0007\r\u0000\u0000\u032f"+ + "\u0330\u0007\u0001\u0000\u0000\u0330\u0331\u0007\u0012\u0000\u0000\u0331"+ + "\u0332\u0007\u0003\u0000\u0000\u0332\u0082\u0001\u0000\u0000\u0000\u0333"+ + "\u0334\u0005)\u0000\u0000\u0334\u0084\u0001\u0000\u0000\u0000\u0335\u0336"+ + "\u0007\u0005\u0000\u0000\u0336\u0337\u0007\u0006\u0000\u0000\u0337\u0338"+ + "\u0007\u0014\u0000\u0000\u0338\u0339\u0007\u0003\u0000\u0000\u0339\u0086"+ + "\u0001\u0000\u0000\u0000\u033a\u033b\u0005=\u0000\u0000\u033b\u033c\u0005"+ + "=\u0000\u0000\u033c\u0088\u0001\u0000\u0000\u0000\u033d\u033e\u0005=\u0000"+ + "\u0000\u033e\u033f\u0005~\u0000\u0000\u033f\u008a\u0001\u0000\u0000\u0000"+ + "\u0340\u0341\u0005!\u0000\u0000\u0341\u0342\u0005=\u0000\u0000\u0342\u008c"+ + "\u0001\u0000\u0000\u0000\u0343\u0344\u0005<\u0000\u0000\u0344\u008e\u0001"+ + "\u0000\u0000\u0000\u0345\u0346\u0005<\u0000\u0000\u0346\u0347\u0005=\u0000"+ + "\u0000\u0347\u0090\u0001\u0000\u0000\u0000\u0348\u0349\u0005>\u0000\u0000"+ + "\u0349\u0092\u0001\u0000\u0000\u0000\u034a\u034b\u0005>\u0000\u0000\u034b"+ + "\u034c\u0005=\u0000\u0000\u034c\u0094\u0001\u0000\u0000\u0000\u034d\u034e"+ + "\u0005+\u0000\u0000\u034e\u0096\u0001\u0000\u0000\u0000\u034f\u0350\u0005"+ + "-\u0000\u0000\u0350\u0098\u0001\u0000\u0000\u0000\u0351\u0352\u0005*\u0000"+ + "\u0000\u0352\u009a\u0001\u0000\u0000\u0000\u0353\u0354\u0005/\u0000\u0000"+ + "\u0354\u009c\u0001\u0000\u0000\u0000\u0355\u0356\u0005%\u0000\u0000\u0356"+ + "\u009e\u0001\u0000\u0000\u0000\u0357\u0358\u0004H\u0003\u0000\u0358\u0359"+ + "\u0007\u0010\u0000\u0000\u0359\u035a\u0007\f\u0000\u0000\u035a\u035b\u0007"+ + "\u0005\u0000\u0000\u035b\u035c\u0007\u0004\u0000\u0000\u035c\u035d\u0007"+ + "\n\u0000\u0000\u035d\u00a0\u0001\u0000\u0000\u0000\u035e\u0361\u0003\u007f"+ + "8\u0000\u035f\u0362\u0003A\u0019\u0000\u0360\u0362\u0003O \u0000\u0361"+ + "\u035f\u0001\u0000\u0000\u0000\u0361\u0360\u0001\u0000\u0000\u0000\u0362"+ + "\u0366\u0001\u0000\u0000\u0000\u0363\u0365\u0003Q!\u0000\u0364\u0363\u0001"+ + "\u0000\u0000\u0000\u0365\u0368\u0001\u0000\u0000\u0000\u0366\u0364\u0001"+ + "\u0000\u0000\u0000\u0366\u0367\u0001\u0000\u0000\u0000\u0367\u0370\u0001"+ + "\u0000\u0000\u0000\u0368\u0366\u0001\u0000\u0000\u0000\u0369\u036b\u0003"+ + "\u007f8\u0000\u036a\u036c\u0003?\u0018\u0000\u036b\u036a\u0001\u0000\u0000"+ + "\u0000\u036c\u036d\u0001\u0000\u0000\u0000\u036d\u036b\u0001\u0000\u0000"+ + "\u0000\u036d\u036e\u0001\u0000\u0000\u0000\u036e\u0370\u0001\u0000\u0000"+ + "\u0000\u036f\u035e\u0001\u0000\u0000\u0000\u036f\u0369\u0001\u0000\u0000"+ + "\u0000\u0370\u00a2\u0001\u0000\u0000\u0000\u0371\u0372\u0005[\u0000\u0000"+ + "\u0372\u0373\u0001\u0000\u0000\u0000\u0373\u0374\u0006J\u0000\u0000\u0374"+ + "\u0375\u0006J\u0000\u0000\u0375\u00a4\u0001\u0000\u0000\u0000\u0376\u0377"+ + "\u0005]\u0000\u0000\u0377\u0378\u0001\u0000\u0000\u0000\u0378\u0379\u0006"+ + "K\u000b\u0000\u0379\u037a\u0006K\u000b\u0000\u037a\u00a6\u0001\u0000\u0000"+ + "\u0000\u037b\u037f\u0003A\u0019\u0000\u037c\u037e\u0003Q!\u0000\u037d"+ + "\u037c\u0001\u0000\u0000\u0000\u037e\u0381\u0001\u0000\u0000\u0000\u037f"+ + "\u037d\u0001\u0000\u0000\u0000\u037f\u0380\u0001\u0000\u0000\u0000\u0380"+ + "\u038c\u0001\u0000\u0000\u0000\u0381\u037f\u0001\u0000\u0000\u0000\u0382"+ + "\u0385\u0003O \u0000\u0383\u0385\u0003I\u001d\u0000\u0384\u0382\u0001"+ + "\u0000\u0000\u0000\u0384\u0383\u0001\u0000\u0000\u0000\u0385\u0387\u0001"+ + "\u0000\u0000\u0000\u0386\u0388\u0003Q!\u0000\u0387\u0386\u0001\u0000\u0000"+ + "\u0000\u0388\u0389\u0001\u0000\u0000\u0000\u0389\u0387\u0001\u0000\u0000"+ + "\u0000\u0389\u038a\u0001\u0000\u0000\u0000\u038a\u038c\u0001\u0000\u0000"+ + "\u0000\u038b\u037b\u0001\u0000\u0000\u0000\u038b\u0384\u0001\u0000\u0000"+ + "\u0000\u038c\u00a8\u0001\u0000\u0000\u0000\u038d\u038f\u0003K\u001e\u0000"+ + "\u038e\u0390\u0003M\u001f\u0000\u038f\u038e\u0001\u0000\u0000\u0000\u0390"+ + "\u0391\u0001\u0000\u0000\u0000\u0391\u038f\u0001\u0000\u0000\u0000\u0391"+ + "\u0392\u0001\u0000\u0000\u0000\u0392\u0393\u0001\u0000\u0000\u0000\u0393"+ + "\u0394\u0003K\u001e\u0000\u0394\u00aa\u0001\u0000\u0000\u0000\u0395\u0396"+ + "\u0003\u00a9M\u0000\u0396\u00ac\u0001\u0000\u0000\u0000\u0397\u0398\u0003"+ + "7\u0014\u0000\u0398\u0399\u0001\u0000\u0000\u0000\u0399\u039a\u0006O\n"+ + "\u0000\u039a\u00ae\u0001\u0000\u0000\u0000\u039b\u039c\u00039\u0015\u0000"+ + "\u039c\u039d\u0001\u0000\u0000\u0000\u039d\u039e\u0006P\n\u0000\u039e"+ + "\u00b0\u0001\u0000\u0000\u0000\u039f\u03a0\u0003;\u0016\u0000\u03a0\u03a1"+ + "\u0001\u0000\u0000\u0000\u03a1\u03a2\u0006Q\n\u0000\u03a2\u00b2\u0001"+ + "\u0000\u0000\u0000\u03a3\u03a4\u0003\u00a3J\u0000\u03a4\u03a5\u0001\u0000"+ + "\u0000\u0000\u03a5\u03a6\u0006R\f\u0000\u03a6\u03a7\u0006R\r\u0000\u03a7"+ + "\u00b4\u0001\u0000\u0000\u0000\u03a8\u03a9\u0003=\u0017\u0000\u03a9\u03aa"+ + "\u0001\u0000\u0000\u0000\u03aa\u03ab\u0006S\u000e\u0000\u03ab\u03ac\u0006"+ + "S\u000b\u0000\u03ac\u00b6\u0001\u0000\u0000\u0000\u03ad\u03ae\u0003;\u0016"+ + "\u0000\u03ae\u03af\u0001\u0000\u0000\u0000\u03af\u03b0\u0006T\n\u0000"+ + "\u03b0\u00b8\u0001\u0000\u0000\u0000\u03b1\u03b2\u00037\u0014\u0000\u03b2"+ + "\u03b3\u0001\u0000\u0000\u0000\u03b3\u03b4\u0006U\n\u0000\u03b4\u00ba"+ + "\u0001\u0000\u0000\u0000\u03b5\u03b6\u00039\u0015\u0000\u03b6\u03b7\u0001"+ + "\u0000\u0000\u0000\u03b7\u03b8\u0006V\n\u0000\u03b8\u00bc\u0001\u0000"+ + "\u0000\u0000\u03b9\u03ba\u0003=\u0017\u0000\u03ba\u03bb\u0001\u0000\u0000"+ + "\u0000\u03bb\u03bc\u0006W\u000e\u0000\u03bc\u03bd\u0006W\u000b\u0000\u03bd"+ + "\u00be\u0001\u0000\u0000\u0000\u03be\u03bf\u0003\u00a3J\u0000\u03bf\u03c0"+ + "\u0001\u0000\u0000\u0000\u03c0\u03c1\u0006X\f\u0000\u03c1\u00c0\u0001"+ + "\u0000\u0000\u0000\u03c2\u03c3\u0003\u00a5K\u0000\u03c3\u03c4\u0001\u0000"+ + "\u0000\u0000\u03c4\u03c5\u0006Y\u000f\u0000\u03c5\u00c2\u0001\u0000\u0000"+ + "\u0000\u03c6\u03c7\u0003\u014f\u00a0\u0000\u03c7\u03c8\u0001\u0000\u0000"+ + "\u0000\u03c8\u03c9\u0006Z\u0010\u0000\u03c9\u00c4\u0001\u0000\u0000\u0000"+ + "\u03ca\u03cb\u0003c*\u0000\u03cb\u03cc\u0001\u0000\u0000\u0000\u03cc\u03cd"+ + "\u0006[\u0011\u0000\u03cd\u00c6\u0001\u0000\u0000\u0000\u03ce\u03cf\u0003"+ + "_(\u0000\u03cf\u03d0\u0001\u0000\u0000\u0000\u03d0\u03d1\u0006\\\u0012"+ + "\u0000\u03d1\u00c8\u0001\u0000\u0000\u0000\u03d2\u03d3\u0007\u0010\u0000"+ + "\u0000\u03d3\u03d4\u0007\u0003\u0000\u0000\u03d4\u03d5\u0007\u0005\u0000"+ + "\u0000\u03d5\u03d6\u0007\f\u0000\u0000\u03d6\u03d7\u0007\u0000\u0000\u0000"+ + "\u03d7\u03d8\u0007\f\u0000\u0000\u03d8\u03d9\u0007\u0005\u0000\u0000\u03d9"+ + "\u03da\u0007\f\u0000\u0000\u03da\u00ca\u0001\u0000\u0000\u0000\u03db\u03df"+ + "\b \u0000\u0000\u03dc\u03dd\u0005/\u0000\u0000\u03dd\u03df\b!\u0000\u0000"+ + "\u03de\u03db\u0001\u0000\u0000\u0000\u03de\u03dc\u0001\u0000\u0000\u0000"+ + "\u03df\u00cc\u0001\u0000\u0000\u0000\u03e0\u03e2\u0003\u00cb^\u0000\u03e1"+ + "\u03e0\u0001\u0000\u0000\u0000\u03e2\u03e3\u0001\u0000\u0000\u0000\u03e3"+ + "\u03e1\u0001\u0000\u0000\u0000\u03e3\u03e4\u0001\u0000\u0000\u0000\u03e4"+ + "\u00ce\u0001\u0000\u0000\u0000\u03e5\u03e6\u0003\u00cd_\u0000\u03e6\u03e7"+ + "\u0001\u0000\u0000\u0000\u03e7\u03e8\u0006`\u0013\u0000\u03e8\u00d0\u0001"+ + "\u0000\u0000\u0000\u03e9\u03ea\u0003S\"\u0000\u03ea\u03eb\u0001\u0000"+ + "\u0000\u0000\u03eb\u03ec\u0006a\u0014\u0000\u03ec\u00d2\u0001\u0000\u0000"+ + "\u0000\u03ed\u03ee\u00037\u0014\u0000\u03ee\u03ef\u0001\u0000\u0000\u0000"+ + "\u03ef\u03f0\u0006b\n\u0000\u03f0\u00d4\u0001\u0000\u0000\u0000\u03f1"+ + "\u03f2\u00039\u0015\u0000\u03f2\u03f3\u0001\u0000\u0000\u0000\u03f3\u03f4"+ + "\u0006c\n\u0000\u03f4\u00d6\u0001\u0000\u0000\u0000\u03f5\u03f6\u0003"+ + ";\u0016\u0000\u03f6\u03f7\u0001\u0000\u0000\u0000\u03f7\u03f8\u0006d\n"+ + "\u0000\u03f8\u00d8\u0001\u0000\u0000\u0000\u03f9\u03fa\u0003=\u0017\u0000"+ + "\u03fa\u03fb\u0001\u0000\u0000\u0000\u03fb\u03fc\u0006e\u000e\u0000\u03fc"+ + "\u03fd\u0006e\u000b\u0000\u03fd\u00da\u0001\u0000\u0000\u0000\u03fe\u03ff"+ + "\u0003g,\u0000\u03ff\u0400\u0001\u0000\u0000\u0000\u0400\u0401\u0006f"+ + "\u0015\u0000\u0401\u00dc\u0001\u0000\u0000\u0000\u0402\u0403\u0003c*\u0000"+ + "\u0403\u0404\u0001\u0000\u0000\u0000\u0404\u0405\u0006g\u0011\u0000\u0405"+ + "\u00de\u0001\u0000\u0000\u0000\u0406\u0407\u0003\u007f8\u0000\u0407\u0408"+ + "\u0001\u0000\u0000\u0000\u0408\u0409\u0006h\u0016\u0000\u0409\u00e0\u0001"+ + "\u0000\u0000\u0000\u040a\u040b\u0003\u00a1I\u0000\u040b\u040c\u0001\u0000"+ + "\u0000\u0000\u040c\u040d\u0006i\u0017\u0000\u040d\u00e2\u0001\u0000\u0000"+ + "\u0000\u040e\u0413\u0003A\u0019\u0000\u040f\u0413\u0003?\u0018\u0000\u0410"+ + "\u0413\u0003O \u0000\u0411\u0413\u0003\u0099E\u0000\u0412\u040e\u0001"+ + "\u0000\u0000\u0000\u0412\u040f\u0001\u0000\u0000\u0000\u0412\u0410\u0001"+ + "\u0000\u0000\u0000\u0412\u0411\u0001\u0000\u0000\u0000\u0413\u00e4\u0001"+ + "\u0000\u0000\u0000\u0414\u0417\u0003A\u0019\u0000\u0415\u0417\u0003\u0099"+ + "E\u0000\u0416\u0414\u0001\u0000\u0000\u0000\u0416\u0415\u0001\u0000\u0000"+ + "\u0000\u0417\u041b\u0001\u0000\u0000\u0000\u0418\u041a\u0003\u00e3j\u0000"+ + "\u0419\u0418\u0001\u0000\u0000\u0000\u041a\u041d\u0001\u0000\u0000\u0000"+ + "\u041b\u0419\u0001\u0000\u0000\u0000\u041b\u041c\u0001\u0000\u0000\u0000"+ + "\u041c\u0428\u0001\u0000\u0000\u0000\u041d\u041b\u0001\u0000\u0000\u0000"+ + "\u041e\u0421\u0003O \u0000\u041f\u0421\u0003I\u001d\u0000\u0420\u041e"+ + "\u0001\u0000\u0000\u0000\u0420\u041f\u0001\u0000\u0000\u0000\u0421\u0423"+ + "\u0001\u0000\u0000\u0000\u0422\u0424\u0003\u00e3j\u0000\u0423\u0422\u0001"+ + "\u0000\u0000\u0000\u0424\u0425\u0001\u0000\u0000\u0000\u0425\u0423\u0001"+ + "\u0000\u0000\u0000\u0425\u0426\u0001\u0000\u0000\u0000\u0426\u0428\u0001"+ + "\u0000\u0000\u0000\u0427\u0416\u0001\u0000\u0000\u0000\u0427\u0420\u0001"+ + "\u0000\u0000\u0000\u0428\u00e6\u0001\u0000\u0000\u0000\u0429\u042c\u0003"+ + "\u00e5k\u0000\u042a\u042c\u0003\u00a9M\u0000\u042b\u0429\u0001\u0000\u0000"+ + "\u0000\u042b\u042a\u0001\u0000\u0000\u0000\u042c\u042d\u0001\u0000\u0000"+ + "\u0000\u042d\u042b\u0001\u0000\u0000\u0000\u042d\u042e\u0001\u0000\u0000"+ + "\u0000\u042e\u00e8\u0001\u0000\u0000\u0000\u042f\u0430\u00037\u0014\u0000"+ + "\u0430\u0431\u0001\u0000\u0000\u0000\u0431\u0432\u0006m\n\u0000\u0432"+ + "\u00ea\u0001\u0000\u0000\u0000\u0433\u0434\u00039\u0015\u0000\u0434\u0435"+ + "\u0001\u0000\u0000\u0000\u0435\u0436\u0006n\n\u0000\u0436\u00ec\u0001"+ + "\u0000\u0000\u0000\u0437\u0438\u0003;\u0016\u0000\u0438\u0439\u0001\u0000"+ + "\u0000\u0000\u0439\u043a\u0006o\n\u0000\u043a\u00ee\u0001\u0000\u0000"+ + "\u0000\u043b\u043c\u0003=\u0017\u0000\u043c\u043d\u0001\u0000\u0000\u0000"+ + "\u043d\u043e\u0006p\u000e\u0000\u043e\u043f\u0006p\u000b\u0000\u043f\u00f0"+ + "\u0001\u0000\u0000\u0000\u0440\u0441\u0003_(\u0000\u0441\u0442\u0001\u0000"+ + "\u0000\u0000\u0442\u0443\u0006q\u0012\u0000\u0443\u00f2\u0001\u0000\u0000"+ + "\u0000\u0444\u0445\u0003c*\u0000\u0445\u0446\u0001\u0000\u0000\u0000\u0446"+ + "\u0447\u0006r\u0011\u0000\u0447\u00f4\u0001\u0000\u0000\u0000\u0448\u0449"+ + "\u0003g,\u0000\u0449\u044a\u0001\u0000\u0000\u0000\u044a\u044b\u0006s"+ + "\u0015\u0000\u044b\u00f6\u0001\u0000\u0000\u0000\u044c\u044d\u0003\u007f"+ + "8\u0000\u044d\u044e\u0001\u0000\u0000\u0000\u044e\u044f\u0006t\u0016\u0000"+ + "\u044f\u00f8\u0001\u0000\u0000\u0000\u0450\u0451\u0003\u00a1I\u0000\u0451"+ + "\u0452\u0001\u0000\u0000\u0000\u0452\u0453\u0006u\u0017\u0000\u0453\u00fa"+ + "\u0001\u0000\u0000\u0000\u0454\u0455\u0007\f\u0000\u0000\u0455\u0456\u0007"+ + "\u0002\u0000\u0000\u0456\u00fc\u0001\u0000\u0000\u0000\u0457\u0458\u0003"+ + "\u00e7l\u0000\u0458\u0459\u0001\u0000\u0000\u0000\u0459\u045a\u0006w\u0018"+ + "\u0000\u045a\u00fe\u0001\u0000\u0000\u0000\u045b\u045c\u00037\u0014\u0000"+ + "\u045c\u045d\u0001\u0000\u0000\u0000\u045d\u045e\u0006x\n\u0000\u045e"+ + "\u0100\u0001\u0000\u0000\u0000\u045f\u0460\u00039\u0015\u0000\u0460\u0461"+ + "\u0001\u0000\u0000\u0000\u0461\u0462\u0006y\n\u0000\u0462\u0102\u0001"+ + "\u0000\u0000\u0000\u0463\u0464\u0003;\u0016\u0000\u0464\u0465\u0001\u0000"+ + "\u0000\u0000\u0465\u0466\u0006z\n\u0000\u0466\u0104\u0001\u0000\u0000"+ + "\u0000\u0467\u0468\u0003=\u0017\u0000\u0468\u0469\u0001\u0000\u0000\u0000"+ + "\u0469\u046a\u0006{\u000e\u0000\u046a\u046b\u0006{\u000b\u0000\u046b\u0106"+ + "\u0001\u0000\u0000\u0000\u046c\u046d\u0003\u00a3J\u0000\u046d\u046e\u0001"+ + "\u0000\u0000\u0000\u046e\u046f\u0006|\f\u0000\u046f\u0470\u0006|\u0019"+ + "\u0000\u0470\u0108\u0001\u0000\u0000\u0000\u0471\u0472\u0007\u0007\u0000"+ + "\u0000\u0472\u0473\u0007\t\u0000\u0000\u0473\u0474\u0001\u0000\u0000\u0000"+ + "\u0474\u0475\u0006}\u001a\u0000\u0475\u010a\u0001\u0000\u0000\u0000\u0476"+ + "\u0477\u0007\u0013\u0000\u0000\u0477\u0478\u0007\u0001\u0000\u0000\u0478"+ + "\u0479\u0007\u0005\u0000\u0000\u0479\u047a\u0007\n\u0000\u0000\u047a\u047b"+ + "\u0001\u0000\u0000\u0000\u047b\u047c\u0006~\u001a\u0000\u047c\u010c\u0001"+ + "\u0000\u0000\u0000\u047d\u047e\b\"\u0000\u0000\u047e\u010e\u0001\u0000"+ + "\u0000\u0000\u047f\u0481\u0003\u010d\u007f\u0000\u0480\u047f\u0001\u0000"+ + "\u0000\u0000\u0481\u0482\u0001\u0000\u0000\u0000\u0482\u0480\u0001\u0000"+ + "\u0000\u0000\u0482\u0483\u0001\u0000\u0000\u0000\u0483\u0484\u0001\u0000"+ + "\u0000\u0000\u0484\u0485\u0003\u014f\u00a0\u0000\u0485\u0487\u0001\u0000"+ + "\u0000\u0000\u0486\u0480\u0001\u0000\u0000\u0000\u0486\u0487\u0001\u0000"+ + "\u0000\u0000\u0487\u0489\u0001\u0000\u0000\u0000\u0488\u048a\u0003\u010d"+ + "\u007f\u0000\u0489\u0488\u0001\u0000\u0000\u0000\u048a\u048b\u0001\u0000"+ + "\u0000\u0000\u048b\u0489\u0001\u0000\u0000\u0000\u048b\u048c\u0001\u0000"+ + "\u0000\u0000\u048c\u0110\u0001\u0000\u0000\u0000\u048d\u048e\u0003\u010f"+ + "\u0080\u0000\u048e\u048f\u0001\u0000\u0000\u0000\u048f\u0490\u0006\u0081"+ + "\u001b\u0000\u0490\u0112\u0001\u0000\u0000\u0000\u0491\u0492\u00037\u0014"+ + "\u0000\u0492\u0493\u0001\u0000\u0000\u0000\u0493\u0494\u0006\u0082\n\u0000"+ + "\u0494\u0114\u0001\u0000\u0000\u0000\u0495\u0496\u00039\u0015\u0000\u0496"+ + "\u0497\u0001\u0000\u0000\u0000\u0497\u0498\u0006\u0083\n\u0000\u0498\u0116"+ + "\u0001\u0000\u0000\u0000\u0499\u049a\u0003;\u0016\u0000\u049a\u049b\u0001"+ + "\u0000\u0000\u0000\u049b\u049c\u0006\u0084\n\u0000\u049c\u0118\u0001\u0000"+ + "\u0000\u0000\u049d\u049e\u0003=\u0017\u0000\u049e\u049f\u0001\u0000\u0000"+ + "\u0000\u049f\u04a0\u0006\u0085\u000e\u0000\u04a0\u04a1\u0006\u0085\u000b"+ + "\u0000\u04a1\u04a2\u0006\u0085\u000b\u0000\u04a2\u011a\u0001\u0000\u0000"+ + "\u0000\u04a3\u04a4\u0003_(\u0000\u04a4\u04a5\u0001\u0000\u0000\u0000\u04a5"+ + "\u04a6\u0006\u0086\u0012\u0000\u04a6\u011c\u0001\u0000\u0000\u0000\u04a7"+ + "\u04a8\u0003c*\u0000\u04a8\u04a9\u0001\u0000\u0000\u0000\u04a9\u04aa\u0006"+ + "\u0087\u0011\u0000\u04aa\u011e\u0001\u0000\u0000\u0000\u04ab\u04ac\u0003"+ + "g,\u0000\u04ac\u04ad\u0001\u0000\u0000\u0000\u04ad\u04ae\u0006\u0088\u0015"+ + "\u0000\u04ae\u0120\u0001\u0000\u0000\u0000\u04af\u04b0\u0003\u010b~\u0000"+ + "\u04b0\u04b1\u0001\u0000\u0000\u0000\u04b1\u04b2\u0006\u0089\u001c\u0000"+ + "\u04b2\u0122\u0001\u0000\u0000\u0000\u04b3\u04b4\u0003\u00e7l\u0000\u04b4"+ + "\u04b5\u0001\u0000\u0000\u0000\u04b5\u04b6\u0006\u008a\u0018\u0000\u04b6"+ + "\u0124\u0001\u0000\u0000\u0000\u04b7\u04b8\u0003\u00abN\u0000\u04b8\u04b9"+ + "\u0001\u0000\u0000\u0000\u04b9\u04ba\u0006\u008b\u001d\u0000\u04ba\u0126"+ + "\u0001\u0000\u0000\u0000\u04bb\u04bc\u0003\u007f8\u0000\u04bc\u04bd\u0001"+ + "\u0000\u0000\u0000\u04bd\u04be\u0006\u008c\u0016\u0000\u04be\u0128\u0001"+ + "\u0000\u0000\u0000\u04bf\u04c0\u0003\u00a1I\u0000\u04c0\u04c1\u0001\u0000"+ + "\u0000\u0000\u04c1\u04c2\u0006\u008d\u0017\u0000\u04c2\u012a\u0001\u0000"+ + "\u0000\u0000\u04c3\u04c4\u00037\u0014\u0000\u04c4\u04c5\u0001\u0000\u0000"+ + "\u0000\u04c5\u04c6\u0006\u008e\n\u0000\u04c6\u012c\u0001\u0000\u0000\u0000"+ + "\u04c7\u04c8\u00039\u0015\u0000\u04c8\u04c9\u0001\u0000\u0000\u0000\u04c9"+ + "\u04ca\u0006\u008f\n\u0000\u04ca\u012e\u0001\u0000\u0000\u0000\u04cb\u04cc"+ + "\u0003;\u0016\u0000\u04cc\u04cd\u0001\u0000\u0000\u0000\u04cd\u04ce\u0006"+ + "\u0090\n\u0000\u04ce\u0130\u0001\u0000\u0000\u0000\u04cf\u04d0\u0003="+ + "\u0017\u0000\u04d0\u04d1\u0001\u0000\u0000\u0000\u04d1\u04d2\u0006\u0091"+ + "\u000e\u0000\u04d2\u04d3\u0006\u0091\u000b\u0000\u04d3\u0132\u0001\u0000"+ + "\u0000\u0000\u04d4\u04d5\u0003g,\u0000\u04d5\u04d6\u0001\u0000\u0000\u0000"+ + "\u04d6\u04d7\u0006\u0092\u0015\u0000\u04d7\u0134\u0001\u0000\u0000\u0000"+ + "\u04d8\u04d9\u0003\u007f8\u0000\u04d9\u04da\u0001\u0000\u0000\u0000\u04da"+ + "\u04db\u0006\u0093\u0016\u0000\u04db\u0136\u0001\u0000\u0000\u0000\u04dc"+ + "\u04dd\u0003\u00a1I\u0000\u04dd\u04de\u0001\u0000\u0000\u0000\u04de\u04df"+ + "\u0006\u0094\u0017\u0000\u04df\u0138\u0001\u0000\u0000\u0000\u04e0\u04e1"+ + "\u0003\u00abN\u0000\u04e1\u04e2\u0001\u0000\u0000\u0000\u04e2\u04e3\u0006"+ + "\u0095\u001d\u0000\u04e3\u013a\u0001\u0000\u0000\u0000\u04e4\u04e5\u0003"+ + "\u00a7L\u0000\u04e5\u04e6\u0001\u0000\u0000\u0000\u04e6\u04e7\u0006\u0096"+ + "\u001e\u0000\u04e7\u013c\u0001\u0000\u0000\u0000\u04e8\u04e9\u00037\u0014"+ + "\u0000\u04e9\u04ea\u0001\u0000\u0000\u0000\u04ea\u04eb\u0006\u0097\n\u0000"+ + "\u04eb\u013e\u0001\u0000\u0000\u0000\u04ec\u04ed\u00039\u0015\u0000\u04ed"+ + "\u04ee\u0001\u0000\u0000\u0000\u04ee\u04ef\u0006\u0098\n\u0000\u04ef\u0140"+ + "\u0001\u0000\u0000\u0000\u04f0\u04f1\u0003;\u0016\u0000\u04f1\u04f2\u0001"+ + "\u0000\u0000\u0000\u04f2\u04f3\u0006\u0099\n\u0000\u04f3\u0142\u0001\u0000"+ + "\u0000\u0000\u04f4\u04f5\u0003=\u0017\u0000\u04f5\u04f6\u0001\u0000\u0000"+ + "\u0000\u04f6\u04f7\u0006\u009a\u000e\u0000\u04f7\u04f8\u0006\u009a\u000b"+ + "\u0000\u04f8\u0144\u0001\u0000\u0000\u0000\u04f9\u04fa\u0007\u0001\u0000"+ + "\u0000\u04fa\u04fb\u0007\t\u0000\u0000\u04fb\u04fc\u0007\u000f\u0000\u0000"+ + "\u04fc\u04fd\u0007\u0007\u0000\u0000\u04fd\u0146\u0001\u0000\u0000\u0000"+ + "\u04fe\u04ff\u00037\u0014\u0000\u04ff\u0500\u0001\u0000\u0000\u0000\u0500"+ + "\u0501\u0006\u009c\n\u0000\u0501\u0148\u0001\u0000\u0000\u0000\u0502\u0503"+ + "\u00039\u0015\u0000\u0503\u0504\u0001\u0000\u0000\u0000\u0504\u0505\u0006"+ + "\u009d\n\u0000\u0505\u014a\u0001\u0000\u0000\u0000\u0506\u0507\u0003;"+ + "\u0016\u0000\u0507\u0508\u0001\u0000\u0000\u0000\u0508\u0509\u0006\u009e"+ + "\n\u0000\u0509\u014c\u0001\u0000\u0000\u0000\u050a\u050b\u0003\u00a5K"+ + "\u0000\u050b\u050c\u0001\u0000\u0000\u0000\u050c\u050d\u0006\u009f\u000f"+ + "\u0000\u050d\u050e\u0006\u009f\u000b\u0000\u050e\u014e\u0001\u0000\u0000"+ + "\u0000\u050f\u0510\u0005:\u0000\u0000\u0510\u0150\u0001\u0000\u0000\u0000"+ + "\u0511\u0517\u0003I\u001d\u0000\u0512\u0517\u0003?\u0018\u0000\u0513\u0517"+ + "\u0003g,\u0000\u0514\u0517\u0003A\u0019\u0000\u0515\u0517\u0003O \u0000"+ + "\u0516\u0511\u0001\u0000\u0000\u0000\u0516\u0512\u0001\u0000\u0000\u0000"+ + "\u0516\u0513\u0001\u0000\u0000\u0000\u0516\u0514\u0001\u0000\u0000\u0000"+ + "\u0516\u0515\u0001\u0000\u0000\u0000\u0517\u0518\u0001\u0000\u0000\u0000"+ + "\u0518\u0516\u0001\u0000\u0000\u0000\u0518\u0519\u0001\u0000\u0000\u0000"+ + "\u0519\u0152\u0001\u0000\u0000\u0000\u051a\u051b\u00037\u0014\u0000\u051b"+ + "\u051c\u0001\u0000\u0000\u0000\u051c\u051d\u0006\u00a2\n\u0000\u051d\u0154"+ + "\u0001\u0000\u0000\u0000\u051e\u051f\u00039\u0015\u0000\u051f\u0520\u0001"+ + "\u0000\u0000\u0000\u0520\u0521\u0006\u00a3\n\u0000\u0521\u0156\u0001\u0000"+ + "\u0000\u0000\u0522\u0523\u0003;\u0016\u0000\u0523\u0524\u0001\u0000\u0000"+ + "\u0000\u0524\u0525\u0006\u00a4\n\u0000\u0525\u0158\u0001\u0000\u0000\u0000"+ + "\u0526\u0527\u0003=\u0017\u0000\u0527\u0528\u0001\u0000\u0000\u0000\u0528"+ + "\u0529\u0006\u00a5\u000e\u0000\u0529\u052a\u0006\u00a5\u000b\u0000\u052a"+ + "\u015a\u0001\u0000\u0000\u0000\u052b\u052c\u0003\u014f\u00a0\u0000\u052c"+ + "\u052d\u0001\u0000\u0000\u0000\u052d\u052e\u0006\u00a6\u0010\u0000\u052e"+ + "\u015c\u0001\u0000\u0000\u0000\u052f\u0530\u0003c*\u0000\u0530\u0531\u0001"+ + "\u0000\u0000\u0000\u0531\u0532\u0006\u00a7\u0011\u0000\u0532\u015e\u0001"+ + "\u0000\u0000\u0000\u0533\u0534\u0003g,\u0000\u0534\u0535\u0001\u0000\u0000"+ + "\u0000\u0535\u0536\u0006\u00a8\u0015\u0000\u0536\u0160\u0001\u0000\u0000"+ + "\u0000\u0537\u0538\u0003\u0109}\u0000\u0538\u0539\u0001\u0000\u0000\u0000"+ + "\u0539\u053a\u0006\u00a9\u001f\u0000\u053a\u053b\u0006\u00a9 \u0000\u053b"+ + "\u0162\u0001\u0000\u0000\u0000\u053c\u053d\u0003\u00cd_\u0000\u053d\u053e"+ + "\u0001\u0000\u0000\u0000\u053e\u053f\u0006\u00aa\u0013\u0000\u053f\u0164"+ + "\u0001\u0000\u0000\u0000\u0540\u0541\u0003S\"\u0000\u0541\u0542\u0001"+ + "\u0000\u0000\u0000\u0542\u0543\u0006\u00ab\u0014\u0000\u0543\u0166\u0001"+ + "\u0000\u0000\u0000\u0544\u0545\u00037\u0014\u0000\u0545\u0546\u0001\u0000"+ + "\u0000\u0000\u0546\u0547\u0006\u00ac\n\u0000\u0547\u0168\u0001\u0000\u0000"+ + "\u0000\u0548\u0549\u00039\u0015\u0000\u0549\u054a\u0001\u0000\u0000\u0000"+ + "\u054a\u054b\u0006\u00ad\n\u0000\u054b\u016a\u0001\u0000\u0000\u0000\u054c"+ + "\u054d\u0003;\u0016\u0000\u054d\u054e\u0001\u0000\u0000\u0000\u054e\u054f"+ + "\u0006\u00ae\n\u0000\u054f\u016c\u0001\u0000\u0000\u0000\u0550\u0551\u0003"+ + "=\u0017\u0000\u0551\u0552\u0001\u0000\u0000\u0000\u0552\u0553\u0006\u00af"+ + "\u000e\u0000\u0553\u0554\u0006\u00af\u000b\u0000\u0554\u0555\u0006\u00af"+ + "\u000b\u0000\u0555\u016e\u0001\u0000\u0000\u0000\u0556\u0557\u0003c*\u0000"+ + "\u0557\u0558\u0001\u0000\u0000\u0000\u0558\u0559\u0006\u00b0\u0011\u0000"+ + "\u0559\u0170\u0001\u0000\u0000\u0000\u055a\u055b\u0003g,\u0000\u055b\u055c"+ + "\u0001\u0000\u0000\u0000\u055c\u055d\u0006\u00b1\u0015\u0000\u055d\u0172"+ + "\u0001\u0000\u0000\u0000\u055e\u055f\u0003\u00e7l\u0000\u055f\u0560\u0001"+ + "\u0000\u0000\u0000\u0560\u0561\u0006\u00b2\u0018\u0000\u0561\u0174\u0001"+ + "\u0000\u0000\u0000\u0562\u0563\u00037\u0014\u0000\u0563\u0564\u0001\u0000"+ + "\u0000\u0000\u0564\u0565\u0006\u00b3\n\u0000\u0565\u0176\u0001\u0000\u0000"+ + "\u0000\u0566\u0567\u00039\u0015\u0000\u0567\u0568\u0001\u0000\u0000\u0000"+ + "\u0568\u0569\u0006\u00b4\n\u0000\u0569\u0178\u0001\u0000\u0000\u0000\u056a"+ + "\u056b\u0003;\u0016\u0000\u056b\u056c\u0001\u0000\u0000\u0000\u056c\u056d"+ + "\u0006\u00b5\n\u0000\u056d\u017a\u0001\u0000\u0000\u0000\u056e\u056f\u0003"+ + "=\u0017\u0000\u056f\u0570\u0001\u0000\u0000\u0000\u0570\u0571\u0006\u00b6"+ + "\u000e\u0000\u0571\u0572\u0006\u00b6\u000b\u0000\u0572\u017c\u0001\u0000"+ + "\u0000\u0000\u0573\u0574\u0003\u00cd_\u0000\u0574\u0575\u0001\u0000\u0000"+ + "\u0000\u0575\u0576\u0006\u00b7\u0013\u0000\u0576\u0577\u0006\u00b7\u000b"+ + "\u0000\u0577\u0578\u0006\u00b7!\u0000\u0578\u017e\u0001\u0000\u0000\u0000"+ + "\u0579\u057a\u0003S\"\u0000\u057a\u057b\u0001\u0000\u0000\u0000\u057b"+ + "\u057c\u0006\u00b8\u0014\u0000\u057c\u057d\u0006\u00b8\u000b\u0000\u057d"+ + "\u057e\u0006\u00b8!\u0000\u057e\u0180\u0001\u0000\u0000\u0000\u057f\u0580"+ + "\u00037\u0014\u0000\u0580\u0581\u0001\u0000\u0000\u0000\u0581\u0582\u0006"+ + "\u00b9\n\u0000\u0582\u0182\u0001\u0000\u0000\u0000\u0583\u0584\u00039"+ + "\u0015\u0000\u0584\u0585\u0001\u0000\u0000\u0000\u0585\u0586\u0006\u00ba"+ + "\n\u0000\u0586\u0184\u0001\u0000\u0000\u0000\u0587\u0588\u0003;\u0016"+ + "\u0000\u0588\u0589\u0001\u0000\u0000\u0000\u0589\u058a\u0006\u00bb\n\u0000"+ + "\u058a\u0186\u0001\u0000\u0000\u0000\u058b\u058c\u0003\u014f\u00a0\u0000"+ + "\u058c\u058d\u0001\u0000\u0000\u0000\u058d\u058e\u0006\u00bc\u0010\u0000"+ + "\u058e\u058f\u0006\u00bc\u000b\u0000\u058f\u0590\u0006\u00bc\t\u0000\u0590"+ + "\u0188\u0001\u0000\u0000\u0000\u0591\u0592\u0003c*\u0000\u0592\u0593\u0001"+ + "\u0000\u0000\u0000\u0593\u0594\u0006\u00bd\u0011\u0000\u0594\u0595\u0006"+ + "\u00bd\u000b\u0000\u0595\u0596\u0006\u00bd\t\u0000\u0596\u018a\u0001\u0000"+ + "\u0000\u0000\u0597\u0598\u00037\u0014\u0000\u0598\u0599\u0001\u0000\u0000"+ + "\u0000\u0599\u059a\u0006\u00be\n\u0000\u059a\u018c\u0001\u0000\u0000\u0000"+ + "\u059b\u059c\u00039\u0015\u0000\u059c\u059d\u0001\u0000\u0000\u0000\u059d"+ + "\u059e\u0006\u00bf\n\u0000\u059e\u018e\u0001\u0000\u0000\u0000\u059f\u05a0"+ + "\u0003;\u0016\u0000\u05a0\u05a1\u0001\u0000\u0000\u0000\u05a1\u05a2\u0006"+ + "\u00c0\n\u0000\u05a2\u0190\u0001\u0000\u0000\u0000\u05a3\u05a4\u0003\u00ab"+ + "N\u0000\u05a4\u05a5\u0001\u0000\u0000\u0000\u05a5\u05a6\u0006\u00c1\u000b"+ + "\u0000\u05a6\u05a7\u0006\u00c1\u0000\u0000\u05a7\u05a8\u0006\u00c1\u001d"+ + "\u0000\u05a8\u0192\u0001\u0000\u0000\u0000\u05a9\u05aa\u0003\u00a7L\u0000"+ + "\u05aa\u05ab\u0001\u0000\u0000\u0000\u05ab\u05ac\u0006\u00c2\u000b\u0000"+ + "\u05ac\u05ad\u0006\u00c2\u0000\u0000\u05ad\u05ae\u0006\u00c2\u001e\u0000"+ + "\u05ae\u0194\u0001\u0000\u0000\u0000\u05af\u05b0\u0003Y%\u0000\u05b0\u05b1"+ + "\u0001\u0000\u0000\u0000\u05b1\u05b2\u0006\u00c3\u000b\u0000\u05b2\u05b3"+ + "\u0006\u00c3\u0000\u0000\u05b3\u05b4\u0006\u00c3\"\u0000\u05b4\u0196\u0001"+ + "\u0000\u0000\u0000\u05b5\u05b6\u0003=\u0017\u0000\u05b6\u05b7\u0001\u0000"+ + "\u0000\u0000\u05b7\u05b8\u0006\u00c4\u000e\u0000\u05b8\u05b9\u0006\u00c4"+ + "\u000b\u0000\u05b9\u0198\u0001\u0000\u0000\u0000A\u0000\u0001\u0002\u0003"+ + "\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e\u0241\u024b\u024f\u0252"+ + "\u025b\u025d\u0268\u027b\u0280\u0289\u0290\u0295\u0297\u02a2\u02aa\u02ad"+ + "\u02af\u02b4\u02b9\u02bf\u02c6\u02cb\u02d1\u02d4\u02dc\u02e0\u0361\u0366"+ + "\u036d\u036f\u037f\u0384\u0389\u038b\u0391\u03de\u03e3\u0412\u0416\u041b"+ + "\u0420\u0425\u0427\u042b\u042d\u0482\u0486\u048b\u0516\u0518#\u0005\u0001"+ + "\u0000\u0005\u0004\u0000\u0005\u0006\u0000\u0005\u0002\u0000\u0005\u0003"+ + "\u0000\u0005\b\u0000\u0005\u0005\u0000\u0005\t\u0000\u0005\u000b\u0000"+ + "\u0005\r\u0000\u0000\u0001\u0000\u0004\u0000\u0000\u0007A\u0000\u0005"+ + "\u0000\u0000\u0007\u0018\u0000\u0007B\u0000\u0007h\u0000\u0007!\u0000"+ + "\u0007\u001f\u0000\u0007L\u0000\u0007\u0019\u0000\u0007#\u0000\u0007/"+ + "\u0000\u0007@\u0000\u0007P\u0000\u0005\n\u0000\u0005\u0007\u0000\u0007"+ + "Z\u0000\u0007Y\u0000\u0007D\u0000\u0007C\u0000\u0007X\u0000\u0005\f\u0000"+ + "\u0005\u000e\u0000\u0007\u001c\u0000"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp index eb3c70385d628..5fdf80f24d9b0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp @@ -23,7 +23,6 @@ null null null null -null '|' null null @@ -65,6 +64,7 @@ null '%' null null +null ']' null null @@ -141,7 +141,6 @@ STATS WHERE DEV_INLINESTATS DEV_LOOKUP -DEV_MATCH DEV_METRICS UNKNOWN_CMD LINE_COMMENT @@ -186,6 +185,7 @@ MINUS ASTERISK SLASH PERCENT +DEV_MATCH NAMED_OR_POSITIONAL_PARAM OPENING_BRACKET CLOSING_BRACKET @@ -257,6 +257,7 @@ valueExpression operatorExpression primaryExpression functionExpression +functionName dataType rowCommand fields @@ -307,4 +308,4 @@ inlinestatsCommand atn: -[4, 1, 120, 580, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 128, 8, 1, 10, 1, 12, 1, 131, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 139, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 157, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 169, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 176, 8, 5, 10, 5, 12, 5, 179, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 186, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 192, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 200, 8, 5, 10, 5, 12, 5, 203, 9, 5, 1, 6, 1, 6, 3, 6, 207, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 214, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 219, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 230, 8, 8, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 236, 8, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 5, 9, 244, 8, 9, 10, 9, 12, 9, 247, 9, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 3, 10, 257, 8, 10, 1, 10, 1, 10, 1, 10, 5, 10, 262, 8, 10, 10, 10, 12, 10, 265, 9, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 5, 11, 273, 8, 11, 10, 11, 12, 11, 276, 9, 11, 3, 11, 278, 8, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 5, 14, 290, 8, 14, 10, 14, 12, 14, 293, 9, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 3, 15, 300, 8, 15, 1, 16, 1, 16, 1, 16, 1, 16, 5, 16, 306, 8, 16, 10, 16, 12, 16, 309, 9, 16, 1, 16, 3, 16, 312, 8, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 3, 17, 319, 8, 17, 1, 18, 1, 18, 1, 19, 1, 19, 1, 20, 1, 20, 3, 20, 327, 8, 20, 1, 21, 1, 21, 1, 21, 1, 21, 5, 21, 333, 8, 21, 10, 21, 12, 21, 336, 9, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 5, 23, 346, 8, 23, 10, 23, 12, 23, 349, 9, 23, 1, 23, 3, 23, 352, 8, 23, 1, 23, 1, 23, 3, 23, 356, 8, 23, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 3, 25, 363, 8, 25, 1, 25, 1, 25, 3, 25, 367, 8, 25, 1, 26, 1, 26, 1, 26, 5, 26, 372, 8, 26, 10, 26, 12, 26, 375, 9, 26, 1, 27, 1, 27, 1, 27, 5, 27, 380, 8, 27, 10, 27, 12, 27, 383, 9, 27, 1, 28, 1, 28, 1, 28, 5, 28, 388, 8, 28, 10, 28, 12, 28, 391, 9, 28, 1, 29, 1, 29, 1, 30, 1, 30, 3, 30, 397, 8, 30, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 5, 31, 412, 8, 31, 10, 31, 12, 31, 415, 9, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 5, 31, 423, 8, 31, 10, 31, 12, 31, 426, 9, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 5, 31, 434, 8, 31, 10, 31, 12, 31, 437, 9, 31, 1, 31, 1, 31, 3, 31, 441, 8, 31, 1, 32, 1, 32, 3, 32, 445, 8, 32, 1, 33, 1, 33, 3, 33, 449, 8, 33, 1, 34, 1, 34, 1, 34, 1, 35, 1, 35, 1, 35, 1, 35, 5, 35, 458, 8, 35, 10, 35, 12, 35, 461, 9, 35, 1, 36, 1, 36, 3, 36, 465, 8, 36, 1, 36, 1, 36, 3, 36, 469, 8, 36, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 39, 1, 39, 1, 39, 1, 39, 5, 39, 481, 8, 39, 10, 39, 12, 39, 484, 9, 39, 1, 40, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 1, 41, 3, 41, 494, 8, 41, 1, 42, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 5, 44, 506, 8, 44, 10, 44, 12, 44, 509, 9, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 47, 1, 47, 3, 47, 519, 8, 47, 1, 48, 3, 48, 522, 8, 48, 1, 48, 1, 48, 1, 49, 3, 49, 527, 8, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 3, 55, 549, 8, 55, 1, 55, 1, 55, 1, 55, 1, 55, 5, 55, 555, 8, 55, 10, 55, 12, 55, 558, 9, 55, 3, 55, 560, 8, 55, 1, 56, 1, 56, 1, 56, 3, 56, 565, 8, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 578, 8, 58, 1, 58, 0, 4, 2, 10, 18, 20, 59, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 0, 8, 1, 0, 59, 60, 1, 0, 61, 63, 2, 0, 26, 26, 76, 76, 1, 0, 67, 68, 2, 0, 31, 31, 35, 35, 2, 0, 38, 38, 41, 41, 2, 0, 37, 37, 51, 51, 2, 0, 52, 52, 54, 58, 606, 0, 118, 1, 0, 0, 0, 2, 121, 1, 0, 0, 0, 4, 138, 1, 0, 0, 0, 6, 156, 1, 0, 0, 0, 8, 158, 1, 0, 0, 0, 10, 191, 1, 0, 0, 0, 12, 218, 1, 0, 0, 0, 14, 220, 1, 0, 0, 0, 16, 229, 1, 0, 0, 0, 18, 235, 1, 0, 0, 0, 20, 256, 1, 0, 0, 0, 22, 266, 1, 0, 0, 0, 24, 281, 1, 0, 0, 0, 26, 283, 1, 0, 0, 0, 28, 286, 1, 0, 0, 0, 30, 299, 1, 0, 0, 0, 32, 301, 1, 0, 0, 0, 34, 318, 1, 0, 0, 0, 36, 320, 1, 0, 0, 0, 38, 322, 1, 0, 0, 0, 40, 326, 1, 0, 0, 0, 42, 328, 1, 0, 0, 0, 44, 337, 1, 0, 0, 0, 46, 341, 1, 0, 0, 0, 48, 357, 1, 0, 0, 0, 50, 360, 1, 0, 0, 0, 52, 368, 1, 0, 0, 0, 54, 376, 1, 0, 0, 0, 56, 384, 1, 0, 0, 0, 58, 392, 1, 0, 0, 0, 60, 396, 1, 0, 0, 0, 62, 440, 1, 0, 0, 0, 64, 444, 1, 0, 0, 0, 66, 448, 1, 0, 0, 0, 68, 450, 1, 0, 0, 0, 70, 453, 1, 0, 0, 0, 72, 462, 1, 0, 0, 0, 74, 470, 1, 0, 0, 0, 76, 473, 1, 0, 0, 0, 78, 476, 1, 0, 0, 0, 80, 485, 1, 0, 0, 0, 82, 489, 1, 0, 0, 0, 84, 495, 1, 0, 0, 0, 86, 499, 1, 0, 0, 0, 88, 502, 1, 0, 0, 0, 90, 510, 1, 0, 0, 0, 92, 514, 1, 0, 0, 0, 94, 518, 1, 0, 0, 0, 96, 521, 1, 0, 0, 0, 98, 526, 1, 0, 0, 0, 100, 530, 1, 0, 0, 0, 102, 532, 1, 0, 0, 0, 104, 534, 1, 0, 0, 0, 106, 537, 1, 0, 0, 0, 108, 541, 1, 0, 0, 0, 110, 544, 1, 0, 0, 0, 112, 564, 1, 0, 0, 0, 114, 568, 1, 0, 0, 0, 116, 573, 1, 0, 0, 0, 118, 119, 3, 2, 1, 0, 119, 120, 5, 0, 0, 1, 120, 1, 1, 0, 0, 0, 121, 122, 6, 1, -1, 0, 122, 123, 3, 4, 2, 0, 123, 129, 1, 0, 0, 0, 124, 125, 10, 1, 0, 0, 125, 126, 5, 25, 0, 0, 126, 128, 3, 6, 3, 0, 127, 124, 1, 0, 0, 0, 128, 131, 1, 0, 0, 0, 129, 127, 1, 0, 0, 0, 129, 130, 1, 0, 0, 0, 130, 3, 1, 0, 0, 0, 131, 129, 1, 0, 0, 0, 132, 139, 3, 104, 52, 0, 133, 139, 3, 32, 16, 0, 134, 139, 3, 26, 13, 0, 135, 139, 3, 108, 54, 0, 136, 137, 4, 2, 1, 0, 137, 139, 3, 46, 23, 0, 138, 132, 1, 0, 0, 0, 138, 133, 1, 0, 0, 0, 138, 134, 1, 0, 0, 0, 138, 135, 1, 0, 0, 0, 138, 136, 1, 0, 0, 0, 139, 5, 1, 0, 0, 0, 140, 157, 3, 48, 24, 0, 141, 157, 3, 8, 4, 0, 142, 157, 3, 74, 37, 0, 143, 157, 3, 68, 34, 0, 144, 157, 3, 50, 25, 0, 145, 157, 3, 70, 35, 0, 146, 157, 3, 76, 38, 0, 147, 157, 3, 78, 39, 0, 148, 157, 3, 82, 41, 0, 149, 157, 3, 84, 42, 0, 150, 157, 3, 110, 55, 0, 151, 157, 3, 86, 43, 0, 152, 153, 4, 3, 2, 0, 153, 157, 3, 116, 58, 0, 154, 155, 4, 3, 3, 0, 155, 157, 3, 114, 57, 0, 156, 140, 1, 0, 0, 0, 156, 141, 1, 0, 0, 0, 156, 142, 1, 0, 0, 0, 156, 143, 1, 0, 0, 0, 156, 144, 1, 0, 0, 0, 156, 145, 1, 0, 0, 0, 156, 146, 1, 0, 0, 0, 156, 147, 1, 0, 0, 0, 156, 148, 1, 0, 0, 0, 156, 149, 1, 0, 0, 0, 156, 150, 1, 0, 0, 0, 156, 151, 1, 0, 0, 0, 156, 152, 1, 0, 0, 0, 156, 154, 1, 0, 0, 0, 157, 7, 1, 0, 0, 0, 158, 159, 5, 16, 0, 0, 159, 160, 3, 10, 5, 0, 160, 9, 1, 0, 0, 0, 161, 162, 6, 5, -1, 0, 162, 163, 5, 44, 0, 0, 163, 192, 3, 10, 5, 8, 164, 192, 3, 16, 8, 0, 165, 192, 3, 12, 6, 0, 166, 168, 3, 16, 8, 0, 167, 169, 5, 44, 0, 0, 168, 167, 1, 0, 0, 0, 168, 169, 1, 0, 0, 0, 169, 170, 1, 0, 0, 0, 170, 171, 5, 39, 0, 0, 171, 172, 5, 43, 0, 0, 172, 177, 3, 16, 8, 0, 173, 174, 5, 34, 0, 0, 174, 176, 3, 16, 8, 0, 175, 173, 1, 0, 0, 0, 176, 179, 1, 0, 0, 0, 177, 175, 1, 0, 0, 0, 177, 178, 1, 0, 0, 0, 178, 180, 1, 0, 0, 0, 179, 177, 1, 0, 0, 0, 180, 181, 5, 50, 0, 0, 181, 192, 1, 0, 0, 0, 182, 183, 3, 16, 8, 0, 183, 185, 5, 40, 0, 0, 184, 186, 5, 44, 0, 0, 185, 184, 1, 0, 0, 0, 185, 186, 1, 0, 0, 0, 186, 187, 1, 0, 0, 0, 187, 188, 5, 45, 0, 0, 188, 192, 1, 0, 0, 0, 189, 190, 4, 5, 4, 0, 190, 192, 3, 14, 7, 0, 191, 161, 1, 0, 0, 0, 191, 164, 1, 0, 0, 0, 191, 165, 1, 0, 0, 0, 191, 166, 1, 0, 0, 0, 191, 182, 1, 0, 0, 0, 191, 189, 1, 0, 0, 0, 192, 201, 1, 0, 0, 0, 193, 194, 10, 5, 0, 0, 194, 195, 5, 30, 0, 0, 195, 200, 3, 10, 5, 6, 196, 197, 10, 4, 0, 0, 197, 198, 5, 47, 0, 0, 198, 200, 3, 10, 5, 5, 199, 193, 1, 0, 0, 0, 199, 196, 1, 0, 0, 0, 200, 203, 1, 0, 0, 0, 201, 199, 1, 0, 0, 0, 201, 202, 1, 0, 0, 0, 202, 11, 1, 0, 0, 0, 203, 201, 1, 0, 0, 0, 204, 206, 3, 16, 8, 0, 205, 207, 5, 44, 0, 0, 206, 205, 1, 0, 0, 0, 206, 207, 1, 0, 0, 0, 207, 208, 1, 0, 0, 0, 208, 209, 5, 42, 0, 0, 209, 210, 3, 100, 50, 0, 210, 219, 1, 0, 0, 0, 211, 213, 3, 16, 8, 0, 212, 214, 5, 44, 0, 0, 213, 212, 1, 0, 0, 0, 213, 214, 1, 0, 0, 0, 214, 215, 1, 0, 0, 0, 215, 216, 5, 49, 0, 0, 216, 217, 3, 100, 50, 0, 217, 219, 1, 0, 0, 0, 218, 204, 1, 0, 0, 0, 218, 211, 1, 0, 0, 0, 219, 13, 1, 0, 0, 0, 220, 221, 3, 16, 8, 0, 221, 222, 5, 19, 0, 0, 222, 223, 3, 100, 50, 0, 223, 15, 1, 0, 0, 0, 224, 230, 3, 18, 9, 0, 225, 226, 3, 18, 9, 0, 226, 227, 3, 102, 51, 0, 227, 228, 3, 18, 9, 0, 228, 230, 1, 0, 0, 0, 229, 224, 1, 0, 0, 0, 229, 225, 1, 0, 0, 0, 230, 17, 1, 0, 0, 0, 231, 232, 6, 9, -1, 0, 232, 236, 3, 20, 10, 0, 233, 234, 7, 0, 0, 0, 234, 236, 3, 18, 9, 3, 235, 231, 1, 0, 0, 0, 235, 233, 1, 0, 0, 0, 236, 245, 1, 0, 0, 0, 237, 238, 10, 2, 0, 0, 238, 239, 7, 1, 0, 0, 239, 244, 3, 18, 9, 3, 240, 241, 10, 1, 0, 0, 241, 242, 7, 0, 0, 0, 242, 244, 3, 18, 9, 2, 243, 237, 1, 0, 0, 0, 243, 240, 1, 0, 0, 0, 244, 247, 1, 0, 0, 0, 245, 243, 1, 0, 0, 0, 245, 246, 1, 0, 0, 0, 246, 19, 1, 0, 0, 0, 247, 245, 1, 0, 0, 0, 248, 249, 6, 10, -1, 0, 249, 257, 3, 62, 31, 0, 250, 257, 3, 52, 26, 0, 251, 257, 3, 22, 11, 0, 252, 253, 5, 43, 0, 0, 253, 254, 3, 10, 5, 0, 254, 255, 5, 50, 0, 0, 255, 257, 1, 0, 0, 0, 256, 248, 1, 0, 0, 0, 256, 250, 1, 0, 0, 0, 256, 251, 1, 0, 0, 0, 256, 252, 1, 0, 0, 0, 257, 263, 1, 0, 0, 0, 258, 259, 10, 1, 0, 0, 259, 260, 5, 33, 0, 0, 260, 262, 3, 24, 12, 0, 261, 258, 1, 0, 0, 0, 262, 265, 1, 0, 0, 0, 263, 261, 1, 0, 0, 0, 263, 264, 1, 0, 0, 0, 264, 21, 1, 0, 0, 0, 265, 263, 1, 0, 0, 0, 266, 267, 3, 66, 33, 0, 267, 277, 5, 43, 0, 0, 268, 278, 5, 61, 0, 0, 269, 274, 3, 10, 5, 0, 270, 271, 5, 34, 0, 0, 271, 273, 3, 10, 5, 0, 272, 270, 1, 0, 0, 0, 273, 276, 1, 0, 0, 0, 274, 272, 1, 0, 0, 0, 274, 275, 1, 0, 0, 0, 275, 278, 1, 0, 0, 0, 276, 274, 1, 0, 0, 0, 277, 268, 1, 0, 0, 0, 277, 269, 1, 0, 0, 0, 277, 278, 1, 0, 0, 0, 278, 279, 1, 0, 0, 0, 279, 280, 5, 50, 0, 0, 280, 23, 1, 0, 0, 0, 281, 282, 3, 58, 29, 0, 282, 25, 1, 0, 0, 0, 283, 284, 5, 12, 0, 0, 284, 285, 3, 28, 14, 0, 285, 27, 1, 0, 0, 0, 286, 291, 3, 30, 15, 0, 287, 288, 5, 34, 0, 0, 288, 290, 3, 30, 15, 0, 289, 287, 1, 0, 0, 0, 290, 293, 1, 0, 0, 0, 291, 289, 1, 0, 0, 0, 291, 292, 1, 0, 0, 0, 292, 29, 1, 0, 0, 0, 293, 291, 1, 0, 0, 0, 294, 300, 3, 10, 5, 0, 295, 296, 3, 52, 26, 0, 296, 297, 5, 32, 0, 0, 297, 298, 3, 10, 5, 0, 298, 300, 1, 0, 0, 0, 299, 294, 1, 0, 0, 0, 299, 295, 1, 0, 0, 0, 300, 31, 1, 0, 0, 0, 301, 302, 5, 6, 0, 0, 302, 307, 3, 34, 17, 0, 303, 304, 5, 34, 0, 0, 304, 306, 3, 34, 17, 0, 305, 303, 1, 0, 0, 0, 306, 309, 1, 0, 0, 0, 307, 305, 1, 0, 0, 0, 307, 308, 1, 0, 0, 0, 308, 311, 1, 0, 0, 0, 309, 307, 1, 0, 0, 0, 310, 312, 3, 40, 20, 0, 311, 310, 1, 0, 0, 0, 311, 312, 1, 0, 0, 0, 312, 33, 1, 0, 0, 0, 313, 314, 3, 36, 18, 0, 314, 315, 5, 104, 0, 0, 315, 316, 3, 38, 19, 0, 316, 319, 1, 0, 0, 0, 317, 319, 3, 38, 19, 0, 318, 313, 1, 0, 0, 0, 318, 317, 1, 0, 0, 0, 319, 35, 1, 0, 0, 0, 320, 321, 5, 76, 0, 0, 321, 37, 1, 0, 0, 0, 322, 323, 7, 2, 0, 0, 323, 39, 1, 0, 0, 0, 324, 327, 3, 42, 21, 0, 325, 327, 3, 44, 22, 0, 326, 324, 1, 0, 0, 0, 326, 325, 1, 0, 0, 0, 327, 41, 1, 0, 0, 0, 328, 329, 5, 75, 0, 0, 329, 334, 5, 76, 0, 0, 330, 331, 5, 34, 0, 0, 331, 333, 5, 76, 0, 0, 332, 330, 1, 0, 0, 0, 333, 336, 1, 0, 0, 0, 334, 332, 1, 0, 0, 0, 334, 335, 1, 0, 0, 0, 335, 43, 1, 0, 0, 0, 336, 334, 1, 0, 0, 0, 337, 338, 5, 65, 0, 0, 338, 339, 3, 42, 21, 0, 339, 340, 5, 66, 0, 0, 340, 45, 1, 0, 0, 0, 341, 342, 5, 20, 0, 0, 342, 347, 3, 34, 17, 0, 343, 344, 5, 34, 0, 0, 344, 346, 3, 34, 17, 0, 345, 343, 1, 0, 0, 0, 346, 349, 1, 0, 0, 0, 347, 345, 1, 0, 0, 0, 347, 348, 1, 0, 0, 0, 348, 351, 1, 0, 0, 0, 349, 347, 1, 0, 0, 0, 350, 352, 3, 28, 14, 0, 351, 350, 1, 0, 0, 0, 351, 352, 1, 0, 0, 0, 352, 355, 1, 0, 0, 0, 353, 354, 5, 29, 0, 0, 354, 356, 3, 28, 14, 0, 355, 353, 1, 0, 0, 0, 355, 356, 1, 0, 0, 0, 356, 47, 1, 0, 0, 0, 357, 358, 5, 4, 0, 0, 358, 359, 3, 28, 14, 0, 359, 49, 1, 0, 0, 0, 360, 362, 5, 15, 0, 0, 361, 363, 3, 28, 14, 0, 362, 361, 1, 0, 0, 0, 362, 363, 1, 0, 0, 0, 363, 366, 1, 0, 0, 0, 364, 365, 5, 29, 0, 0, 365, 367, 3, 28, 14, 0, 366, 364, 1, 0, 0, 0, 366, 367, 1, 0, 0, 0, 367, 51, 1, 0, 0, 0, 368, 373, 3, 66, 33, 0, 369, 370, 5, 36, 0, 0, 370, 372, 3, 66, 33, 0, 371, 369, 1, 0, 0, 0, 372, 375, 1, 0, 0, 0, 373, 371, 1, 0, 0, 0, 373, 374, 1, 0, 0, 0, 374, 53, 1, 0, 0, 0, 375, 373, 1, 0, 0, 0, 376, 381, 3, 60, 30, 0, 377, 378, 5, 36, 0, 0, 378, 380, 3, 60, 30, 0, 379, 377, 1, 0, 0, 0, 380, 383, 1, 0, 0, 0, 381, 379, 1, 0, 0, 0, 381, 382, 1, 0, 0, 0, 382, 55, 1, 0, 0, 0, 383, 381, 1, 0, 0, 0, 384, 389, 3, 54, 27, 0, 385, 386, 5, 34, 0, 0, 386, 388, 3, 54, 27, 0, 387, 385, 1, 0, 0, 0, 388, 391, 1, 0, 0, 0, 389, 387, 1, 0, 0, 0, 389, 390, 1, 0, 0, 0, 390, 57, 1, 0, 0, 0, 391, 389, 1, 0, 0, 0, 392, 393, 7, 3, 0, 0, 393, 59, 1, 0, 0, 0, 394, 397, 5, 80, 0, 0, 395, 397, 3, 64, 32, 0, 396, 394, 1, 0, 0, 0, 396, 395, 1, 0, 0, 0, 397, 61, 1, 0, 0, 0, 398, 441, 5, 45, 0, 0, 399, 400, 3, 98, 49, 0, 400, 401, 5, 67, 0, 0, 401, 441, 1, 0, 0, 0, 402, 441, 3, 96, 48, 0, 403, 441, 3, 98, 49, 0, 404, 441, 3, 92, 46, 0, 405, 441, 3, 64, 32, 0, 406, 441, 3, 100, 50, 0, 407, 408, 5, 65, 0, 0, 408, 413, 3, 94, 47, 0, 409, 410, 5, 34, 0, 0, 410, 412, 3, 94, 47, 0, 411, 409, 1, 0, 0, 0, 412, 415, 1, 0, 0, 0, 413, 411, 1, 0, 0, 0, 413, 414, 1, 0, 0, 0, 414, 416, 1, 0, 0, 0, 415, 413, 1, 0, 0, 0, 416, 417, 5, 66, 0, 0, 417, 441, 1, 0, 0, 0, 418, 419, 5, 65, 0, 0, 419, 424, 3, 92, 46, 0, 420, 421, 5, 34, 0, 0, 421, 423, 3, 92, 46, 0, 422, 420, 1, 0, 0, 0, 423, 426, 1, 0, 0, 0, 424, 422, 1, 0, 0, 0, 424, 425, 1, 0, 0, 0, 425, 427, 1, 0, 0, 0, 426, 424, 1, 0, 0, 0, 427, 428, 5, 66, 0, 0, 428, 441, 1, 0, 0, 0, 429, 430, 5, 65, 0, 0, 430, 435, 3, 100, 50, 0, 431, 432, 5, 34, 0, 0, 432, 434, 3, 100, 50, 0, 433, 431, 1, 0, 0, 0, 434, 437, 1, 0, 0, 0, 435, 433, 1, 0, 0, 0, 435, 436, 1, 0, 0, 0, 436, 438, 1, 0, 0, 0, 437, 435, 1, 0, 0, 0, 438, 439, 5, 66, 0, 0, 439, 441, 1, 0, 0, 0, 440, 398, 1, 0, 0, 0, 440, 399, 1, 0, 0, 0, 440, 402, 1, 0, 0, 0, 440, 403, 1, 0, 0, 0, 440, 404, 1, 0, 0, 0, 440, 405, 1, 0, 0, 0, 440, 406, 1, 0, 0, 0, 440, 407, 1, 0, 0, 0, 440, 418, 1, 0, 0, 0, 440, 429, 1, 0, 0, 0, 441, 63, 1, 0, 0, 0, 442, 445, 5, 48, 0, 0, 443, 445, 5, 64, 0, 0, 444, 442, 1, 0, 0, 0, 444, 443, 1, 0, 0, 0, 445, 65, 1, 0, 0, 0, 446, 449, 3, 58, 29, 0, 447, 449, 3, 64, 32, 0, 448, 446, 1, 0, 0, 0, 448, 447, 1, 0, 0, 0, 449, 67, 1, 0, 0, 0, 450, 451, 5, 9, 0, 0, 451, 452, 5, 27, 0, 0, 452, 69, 1, 0, 0, 0, 453, 454, 5, 14, 0, 0, 454, 459, 3, 72, 36, 0, 455, 456, 5, 34, 0, 0, 456, 458, 3, 72, 36, 0, 457, 455, 1, 0, 0, 0, 458, 461, 1, 0, 0, 0, 459, 457, 1, 0, 0, 0, 459, 460, 1, 0, 0, 0, 460, 71, 1, 0, 0, 0, 461, 459, 1, 0, 0, 0, 462, 464, 3, 10, 5, 0, 463, 465, 7, 4, 0, 0, 464, 463, 1, 0, 0, 0, 464, 465, 1, 0, 0, 0, 465, 468, 1, 0, 0, 0, 466, 467, 5, 46, 0, 0, 467, 469, 7, 5, 0, 0, 468, 466, 1, 0, 0, 0, 468, 469, 1, 0, 0, 0, 469, 73, 1, 0, 0, 0, 470, 471, 5, 8, 0, 0, 471, 472, 3, 56, 28, 0, 472, 75, 1, 0, 0, 0, 473, 474, 5, 2, 0, 0, 474, 475, 3, 56, 28, 0, 475, 77, 1, 0, 0, 0, 476, 477, 5, 11, 0, 0, 477, 482, 3, 80, 40, 0, 478, 479, 5, 34, 0, 0, 479, 481, 3, 80, 40, 0, 480, 478, 1, 0, 0, 0, 481, 484, 1, 0, 0, 0, 482, 480, 1, 0, 0, 0, 482, 483, 1, 0, 0, 0, 483, 79, 1, 0, 0, 0, 484, 482, 1, 0, 0, 0, 485, 486, 3, 54, 27, 0, 486, 487, 5, 84, 0, 0, 487, 488, 3, 54, 27, 0, 488, 81, 1, 0, 0, 0, 489, 490, 5, 1, 0, 0, 490, 491, 3, 20, 10, 0, 491, 493, 3, 100, 50, 0, 492, 494, 3, 88, 44, 0, 493, 492, 1, 0, 0, 0, 493, 494, 1, 0, 0, 0, 494, 83, 1, 0, 0, 0, 495, 496, 5, 7, 0, 0, 496, 497, 3, 20, 10, 0, 497, 498, 3, 100, 50, 0, 498, 85, 1, 0, 0, 0, 499, 500, 5, 10, 0, 0, 500, 501, 3, 52, 26, 0, 501, 87, 1, 0, 0, 0, 502, 507, 3, 90, 45, 0, 503, 504, 5, 34, 0, 0, 504, 506, 3, 90, 45, 0, 505, 503, 1, 0, 0, 0, 506, 509, 1, 0, 0, 0, 507, 505, 1, 0, 0, 0, 507, 508, 1, 0, 0, 0, 508, 89, 1, 0, 0, 0, 509, 507, 1, 0, 0, 0, 510, 511, 3, 58, 29, 0, 511, 512, 5, 32, 0, 0, 512, 513, 3, 62, 31, 0, 513, 91, 1, 0, 0, 0, 514, 515, 7, 6, 0, 0, 515, 93, 1, 0, 0, 0, 516, 519, 3, 96, 48, 0, 517, 519, 3, 98, 49, 0, 518, 516, 1, 0, 0, 0, 518, 517, 1, 0, 0, 0, 519, 95, 1, 0, 0, 0, 520, 522, 7, 0, 0, 0, 521, 520, 1, 0, 0, 0, 521, 522, 1, 0, 0, 0, 522, 523, 1, 0, 0, 0, 523, 524, 5, 28, 0, 0, 524, 97, 1, 0, 0, 0, 525, 527, 7, 0, 0, 0, 526, 525, 1, 0, 0, 0, 526, 527, 1, 0, 0, 0, 527, 528, 1, 0, 0, 0, 528, 529, 5, 27, 0, 0, 529, 99, 1, 0, 0, 0, 530, 531, 5, 26, 0, 0, 531, 101, 1, 0, 0, 0, 532, 533, 7, 7, 0, 0, 533, 103, 1, 0, 0, 0, 534, 535, 5, 5, 0, 0, 535, 536, 3, 106, 53, 0, 536, 105, 1, 0, 0, 0, 537, 538, 5, 65, 0, 0, 538, 539, 3, 2, 1, 0, 539, 540, 5, 66, 0, 0, 540, 107, 1, 0, 0, 0, 541, 542, 5, 13, 0, 0, 542, 543, 5, 100, 0, 0, 543, 109, 1, 0, 0, 0, 544, 545, 5, 3, 0, 0, 545, 548, 5, 90, 0, 0, 546, 547, 5, 88, 0, 0, 547, 549, 3, 54, 27, 0, 548, 546, 1, 0, 0, 0, 548, 549, 1, 0, 0, 0, 549, 559, 1, 0, 0, 0, 550, 551, 5, 89, 0, 0, 551, 556, 3, 112, 56, 0, 552, 553, 5, 34, 0, 0, 553, 555, 3, 112, 56, 0, 554, 552, 1, 0, 0, 0, 555, 558, 1, 0, 0, 0, 556, 554, 1, 0, 0, 0, 556, 557, 1, 0, 0, 0, 557, 560, 1, 0, 0, 0, 558, 556, 1, 0, 0, 0, 559, 550, 1, 0, 0, 0, 559, 560, 1, 0, 0, 0, 560, 111, 1, 0, 0, 0, 561, 562, 3, 54, 27, 0, 562, 563, 5, 32, 0, 0, 563, 565, 1, 0, 0, 0, 564, 561, 1, 0, 0, 0, 564, 565, 1, 0, 0, 0, 565, 566, 1, 0, 0, 0, 566, 567, 3, 54, 27, 0, 567, 113, 1, 0, 0, 0, 568, 569, 5, 18, 0, 0, 569, 570, 3, 34, 17, 0, 570, 571, 5, 88, 0, 0, 571, 572, 3, 56, 28, 0, 572, 115, 1, 0, 0, 0, 573, 574, 5, 17, 0, 0, 574, 577, 3, 28, 14, 0, 575, 576, 5, 29, 0, 0, 576, 578, 3, 28, 14, 0, 577, 575, 1, 0, 0, 0, 577, 578, 1, 0, 0, 0, 578, 117, 1, 0, 0, 0, 56, 129, 138, 156, 168, 177, 185, 191, 199, 201, 206, 213, 218, 229, 235, 243, 245, 256, 263, 274, 277, 291, 299, 307, 311, 318, 326, 334, 347, 351, 355, 362, 366, 373, 381, 389, 396, 413, 424, 435, 440, 444, 448, 459, 464, 468, 482, 493, 507, 518, 521, 526, 548, 556, 559, 564, 577] \ No newline at end of file +[4, 1, 120, 587, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 130, 8, 1, 10, 1, 12, 1, 133, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 141, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 159, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 171, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 178, 8, 5, 10, 5, 12, 5, 181, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 188, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 194, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 202, 8, 5, 10, 5, 12, 5, 205, 9, 5, 1, 6, 1, 6, 3, 6, 209, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 216, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 221, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 232, 8, 8, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 238, 8, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 5, 9, 246, 8, 9, 10, 9, 12, 9, 249, 9, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 3, 10, 259, 8, 10, 1, 10, 1, 10, 1, 10, 5, 10, 264, 8, 10, 10, 10, 12, 10, 267, 9, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 5, 11, 275, 8, 11, 10, 11, 12, 11, 278, 9, 11, 3, 11, 280, 8, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 3, 12, 287, 8, 12, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 5, 15, 297, 8, 15, 10, 15, 12, 15, 300, 9, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 3, 16, 307, 8, 16, 1, 17, 1, 17, 1, 17, 1, 17, 5, 17, 313, 8, 17, 10, 17, 12, 17, 316, 9, 17, 1, 17, 3, 17, 319, 8, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 3, 18, 326, 8, 18, 1, 19, 1, 19, 1, 20, 1, 20, 1, 21, 1, 21, 3, 21, 334, 8, 21, 1, 22, 1, 22, 1, 22, 1, 22, 5, 22, 340, 8, 22, 10, 22, 12, 22, 343, 9, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 5, 24, 353, 8, 24, 10, 24, 12, 24, 356, 9, 24, 1, 24, 3, 24, 359, 8, 24, 1, 24, 1, 24, 3, 24, 363, 8, 24, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 3, 26, 370, 8, 26, 1, 26, 1, 26, 3, 26, 374, 8, 26, 1, 27, 1, 27, 1, 27, 5, 27, 379, 8, 27, 10, 27, 12, 27, 382, 9, 27, 1, 28, 1, 28, 1, 28, 5, 28, 387, 8, 28, 10, 28, 12, 28, 390, 9, 28, 1, 29, 1, 29, 1, 29, 5, 29, 395, 8, 29, 10, 29, 12, 29, 398, 9, 29, 1, 30, 1, 30, 1, 31, 1, 31, 3, 31, 404, 8, 31, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 5, 32, 419, 8, 32, 10, 32, 12, 32, 422, 9, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 5, 32, 430, 8, 32, 10, 32, 12, 32, 433, 9, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 5, 32, 441, 8, 32, 10, 32, 12, 32, 444, 9, 32, 1, 32, 1, 32, 3, 32, 448, 8, 32, 1, 33, 1, 33, 3, 33, 452, 8, 33, 1, 34, 1, 34, 3, 34, 456, 8, 34, 1, 35, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 1, 36, 5, 36, 465, 8, 36, 10, 36, 12, 36, 468, 9, 36, 1, 37, 1, 37, 3, 37, 472, 8, 37, 1, 37, 1, 37, 3, 37, 476, 8, 37, 1, 38, 1, 38, 1, 38, 1, 39, 1, 39, 1, 39, 1, 40, 1, 40, 1, 40, 1, 40, 5, 40, 488, 8, 40, 10, 40, 12, 40, 491, 9, 40, 1, 41, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 3, 42, 501, 8, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 45, 1, 45, 1, 45, 5, 45, 513, 8, 45, 10, 45, 12, 45, 516, 9, 45, 1, 46, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 48, 1, 48, 3, 48, 526, 8, 48, 1, 49, 3, 49, 529, 8, 49, 1, 49, 1, 49, 1, 50, 3, 50, 534, 8, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 3, 56, 556, 8, 56, 1, 56, 1, 56, 1, 56, 1, 56, 5, 56, 562, 8, 56, 10, 56, 12, 56, 565, 9, 56, 3, 56, 567, 8, 56, 1, 57, 1, 57, 1, 57, 3, 57, 572, 8, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 59, 3, 59, 585, 8, 59, 1, 59, 0, 4, 2, 10, 18, 20, 60, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 118, 0, 8, 1, 0, 58, 59, 1, 0, 60, 62, 2, 0, 25, 25, 76, 76, 1, 0, 67, 68, 2, 0, 30, 30, 34, 34, 2, 0, 37, 37, 40, 40, 2, 0, 36, 36, 50, 50, 2, 0, 51, 51, 53, 57, 613, 0, 120, 1, 0, 0, 0, 2, 123, 1, 0, 0, 0, 4, 140, 1, 0, 0, 0, 6, 158, 1, 0, 0, 0, 8, 160, 1, 0, 0, 0, 10, 193, 1, 0, 0, 0, 12, 220, 1, 0, 0, 0, 14, 222, 1, 0, 0, 0, 16, 231, 1, 0, 0, 0, 18, 237, 1, 0, 0, 0, 20, 258, 1, 0, 0, 0, 22, 268, 1, 0, 0, 0, 24, 286, 1, 0, 0, 0, 26, 288, 1, 0, 0, 0, 28, 290, 1, 0, 0, 0, 30, 293, 1, 0, 0, 0, 32, 306, 1, 0, 0, 0, 34, 308, 1, 0, 0, 0, 36, 325, 1, 0, 0, 0, 38, 327, 1, 0, 0, 0, 40, 329, 1, 0, 0, 0, 42, 333, 1, 0, 0, 0, 44, 335, 1, 0, 0, 0, 46, 344, 1, 0, 0, 0, 48, 348, 1, 0, 0, 0, 50, 364, 1, 0, 0, 0, 52, 367, 1, 0, 0, 0, 54, 375, 1, 0, 0, 0, 56, 383, 1, 0, 0, 0, 58, 391, 1, 0, 0, 0, 60, 399, 1, 0, 0, 0, 62, 403, 1, 0, 0, 0, 64, 447, 1, 0, 0, 0, 66, 451, 1, 0, 0, 0, 68, 455, 1, 0, 0, 0, 70, 457, 1, 0, 0, 0, 72, 460, 1, 0, 0, 0, 74, 469, 1, 0, 0, 0, 76, 477, 1, 0, 0, 0, 78, 480, 1, 0, 0, 0, 80, 483, 1, 0, 0, 0, 82, 492, 1, 0, 0, 0, 84, 496, 1, 0, 0, 0, 86, 502, 1, 0, 0, 0, 88, 506, 1, 0, 0, 0, 90, 509, 1, 0, 0, 0, 92, 517, 1, 0, 0, 0, 94, 521, 1, 0, 0, 0, 96, 525, 1, 0, 0, 0, 98, 528, 1, 0, 0, 0, 100, 533, 1, 0, 0, 0, 102, 537, 1, 0, 0, 0, 104, 539, 1, 0, 0, 0, 106, 541, 1, 0, 0, 0, 108, 544, 1, 0, 0, 0, 110, 548, 1, 0, 0, 0, 112, 551, 1, 0, 0, 0, 114, 571, 1, 0, 0, 0, 116, 575, 1, 0, 0, 0, 118, 580, 1, 0, 0, 0, 120, 121, 3, 2, 1, 0, 121, 122, 5, 0, 0, 1, 122, 1, 1, 0, 0, 0, 123, 124, 6, 1, -1, 0, 124, 125, 3, 4, 2, 0, 125, 131, 1, 0, 0, 0, 126, 127, 10, 1, 0, 0, 127, 128, 5, 24, 0, 0, 128, 130, 3, 6, 3, 0, 129, 126, 1, 0, 0, 0, 130, 133, 1, 0, 0, 0, 131, 129, 1, 0, 0, 0, 131, 132, 1, 0, 0, 0, 132, 3, 1, 0, 0, 0, 133, 131, 1, 0, 0, 0, 134, 141, 3, 106, 53, 0, 135, 141, 3, 34, 17, 0, 136, 141, 3, 28, 14, 0, 137, 141, 3, 110, 55, 0, 138, 139, 4, 2, 1, 0, 139, 141, 3, 48, 24, 0, 140, 134, 1, 0, 0, 0, 140, 135, 1, 0, 0, 0, 140, 136, 1, 0, 0, 0, 140, 137, 1, 0, 0, 0, 140, 138, 1, 0, 0, 0, 141, 5, 1, 0, 0, 0, 142, 159, 3, 50, 25, 0, 143, 159, 3, 8, 4, 0, 144, 159, 3, 76, 38, 0, 145, 159, 3, 70, 35, 0, 146, 159, 3, 52, 26, 0, 147, 159, 3, 72, 36, 0, 148, 159, 3, 78, 39, 0, 149, 159, 3, 80, 40, 0, 150, 159, 3, 84, 42, 0, 151, 159, 3, 86, 43, 0, 152, 159, 3, 112, 56, 0, 153, 159, 3, 88, 44, 0, 154, 155, 4, 3, 2, 0, 155, 159, 3, 118, 59, 0, 156, 157, 4, 3, 3, 0, 157, 159, 3, 116, 58, 0, 158, 142, 1, 0, 0, 0, 158, 143, 1, 0, 0, 0, 158, 144, 1, 0, 0, 0, 158, 145, 1, 0, 0, 0, 158, 146, 1, 0, 0, 0, 158, 147, 1, 0, 0, 0, 158, 148, 1, 0, 0, 0, 158, 149, 1, 0, 0, 0, 158, 150, 1, 0, 0, 0, 158, 151, 1, 0, 0, 0, 158, 152, 1, 0, 0, 0, 158, 153, 1, 0, 0, 0, 158, 154, 1, 0, 0, 0, 158, 156, 1, 0, 0, 0, 159, 7, 1, 0, 0, 0, 160, 161, 5, 16, 0, 0, 161, 162, 3, 10, 5, 0, 162, 9, 1, 0, 0, 0, 163, 164, 6, 5, -1, 0, 164, 165, 5, 43, 0, 0, 165, 194, 3, 10, 5, 8, 166, 194, 3, 16, 8, 0, 167, 194, 3, 12, 6, 0, 168, 170, 3, 16, 8, 0, 169, 171, 5, 43, 0, 0, 170, 169, 1, 0, 0, 0, 170, 171, 1, 0, 0, 0, 171, 172, 1, 0, 0, 0, 172, 173, 5, 38, 0, 0, 173, 174, 5, 42, 0, 0, 174, 179, 3, 16, 8, 0, 175, 176, 5, 33, 0, 0, 176, 178, 3, 16, 8, 0, 177, 175, 1, 0, 0, 0, 178, 181, 1, 0, 0, 0, 179, 177, 1, 0, 0, 0, 179, 180, 1, 0, 0, 0, 180, 182, 1, 0, 0, 0, 181, 179, 1, 0, 0, 0, 182, 183, 5, 49, 0, 0, 183, 194, 1, 0, 0, 0, 184, 185, 3, 16, 8, 0, 185, 187, 5, 39, 0, 0, 186, 188, 5, 43, 0, 0, 187, 186, 1, 0, 0, 0, 187, 188, 1, 0, 0, 0, 188, 189, 1, 0, 0, 0, 189, 190, 5, 44, 0, 0, 190, 194, 1, 0, 0, 0, 191, 192, 4, 5, 4, 0, 192, 194, 3, 14, 7, 0, 193, 163, 1, 0, 0, 0, 193, 166, 1, 0, 0, 0, 193, 167, 1, 0, 0, 0, 193, 168, 1, 0, 0, 0, 193, 184, 1, 0, 0, 0, 193, 191, 1, 0, 0, 0, 194, 203, 1, 0, 0, 0, 195, 196, 10, 5, 0, 0, 196, 197, 5, 29, 0, 0, 197, 202, 3, 10, 5, 6, 198, 199, 10, 4, 0, 0, 199, 200, 5, 46, 0, 0, 200, 202, 3, 10, 5, 5, 201, 195, 1, 0, 0, 0, 201, 198, 1, 0, 0, 0, 202, 205, 1, 0, 0, 0, 203, 201, 1, 0, 0, 0, 203, 204, 1, 0, 0, 0, 204, 11, 1, 0, 0, 0, 205, 203, 1, 0, 0, 0, 206, 208, 3, 16, 8, 0, 207, 209, 5, 43, 0, 0, 208, 207, 1, 0, 0, 0, 208, 209, 1, 0, 0, 0, 209, 210, 1, 0, 0, 0, 210, 211, 5, 41, 0, 0, 211, 212, 3, 102, 51, 0, 212, 221, 1, 0, 0, 0, 213, 215, 3, 16, 8, 0, 214, 216, 5, 43, 0, 0, 215, 214, 1, 0, 0, 0, 215, 216, 1, 0, 0, 0, 216, 217, 1, 0, 0, 0, 217, 218, 5, 48, 0, 0, 218, 219, 3, 102, 51, 0, 219, 221, 1, 0, 0, 0, 220, 206, 1, 0, 0, 0, 220, 213, 1, 0, 0, 0, 221, 13, 1, 0, 0, 0, 222, 223, 3, 16, 8, 0, 223, 224, 5, 63, 0, 0, 224, 225, 3, 102, 51, 0, 225, 15, 1, 0, 0, 0, 226, 232, 3, 18, 9, 0, 227, 228, 3, 18, 9, 0, 228, 229, 3, 104, 52, 0, 229, 230, 3, 18, 9, 0, 230, 232, 1, 0, 0, 0, 231, 226, 1, 0, 0, 0, 231, 227, 1, 0, 0, 0, 232, 17, 1, 0, 0, 0, 233, 234, 6, 9, -1, 0, 234, 238, 3, 20, 10, 0, 235, 236, 7, 0, 0, 0, 236, 238, 3, 18, 9, 3, 237, 233, 1, 0, 0, 0, 237, 235, 1, 0, 0, 0, 238, 247, 1, 0, 0, 0, 239, 240, 10, 2, 0, 0, 240, 241, 7, 1, 0, 0, 241, 246, 3, 18, 9, 3, 242, 243, 10, 1, 0, 0, 243, 244, 7, 0, 0, 0, 244, 246, 3, 18, 9, 2, 245, 239, 1, 0, 0, 0, 245, 242, 1, 0, 0, 0, 246, 249, 1, 0, 0, 0, 247, 245, 1, 0, 0, 0, 247, 248, 1, 0, 0, 0, 248, 19, 1, 0, 0, 0, 249, 247, 1, 0, 0, 0, 250, 251, 6, 10, -1, 0, 251, 259, 3, 64, 32, 0, 252, 259, 3, 54, 27, 0, 253, 259, 3, 22, 11, 0, 254, 255, 5, 42, 0, 0, 255, 256, 3, 10, 5, 0, 256, 257, 5, 49, 0, 0, 257, 259, 1, 0, 0, 0, 258, 250, 1, 0, 0, 0, 258, 252, 1, 0, 0, 0, 258, 253, 1, 0, 0, 0, 258, 254, 1, 0, 0, 0, 259, 265, 1, 0, 0, 0, 260, 261, 10, 1, 0, 0, 261, 262, 5, 32, 0, 0, 262, 264, 3, 26, 13, 0, 263, 260, 1, 0, 0, 0, 264, 267, 1, 0, 0, 0, 265, 263, 1, 0, 0, 0, 265, 266, 1, 0, 0, 0, 266, 21, 1, 0, 0, 0, 267, 265, 1, 0, 0, 0, 268, 269, 3, 24, 12, 0, 269, 279, 5, 42, 0, 0, 270, 280, 5, 60, 0, 0, 271, 276, 3, 10, 5, 0, 272, 273, 5, 33, 0, 0, 273, 275, 3, 10, 5, 0, 274, 272, 1, 0, 0, 0, 275, 278, 1, 0, 0, 0, 276, 274, 1, 0, 0, 0, 276, 277, 1, 0, 0, 0, 277, 280, 1, 0, 0, 0, 278, 276, 1, 0, 0, 0, 279, 270, 1, 0, 0, 0, 279, 271, 1, 0, 0, 0, 279, 280, 1, 0, 0, 0, 280, 281, 1, 0, 0, 0, 281, 282, 5, 49, 0, 0, 282, 23, 1, 0, 0, 0, 283, 284, 4, 12, 10, 0, 284, 287, 5, 63, 0, 0, 285, 287, 3, 68, 34, 0, 286, 283, 1, 0, 0, 0, 286, 285, 1, 0, 0, 0, 287, 25, 1, 0, 0, 0, 288, 289, 3, 60, 30, 0, 289, 27, 1, 0, 0, 0, 290, 291, 5, 12, 0, 0, 291, 292, 3, 30, 15, 0, 292, 29, 1, 0, 0, 0, 293, 298, 3, 32, 16, 0, 294, 295, 5, 33, 0, 0, 295, 297, 3, 32, 16, 0, 296, 294, 1, 0, 0, 0, 297, 300, 1, 0, 0, 0, 298, 296, 1, 0, 0, 0, 298, 299, 1, 0, 0, 0, 299, 31, 1, 0, 0, 0, 300, 298, 1, 0, 0, 0, 301, 307, 3, 10, 5, 0, 302, 303, 3, 54, 27, 0, 303, 304, 5, 31, 0, 0, 304, 305, 3, 10, 5, 0, 305, 307, 1, 0, 0, 0, 306, 301, 1, 0, 0, 0, 306, 302, 1, 0, 0, 0, 307, 33, 1, 0, 0, 0, 308, 309, 5, 6, 0, 0, 309, 314, 3, 36, 18, 0, 310, 311, 5, 33, 0, 0, 311, 313, 3, 36, 18, 0, 312, 310, 1, 0, 0, 0, 313, 316, 1, 0, 0, 0, 314, 312, 1, 0, 0, 0, 314, 315, 1, 0, 0, 0, 315, 318, 1, 0, 0, 0, 316, 314, 1, 0, 0, 0, 317, 319, 3, 42, 21, 0, 318, 317, 1, 0, 0, 0, 318, 319, 1, 0, 0, 0, 319, 35, 1, 0, 0, 0, 320, 321, 3, 38, 19, 0, 321, 322, 5, 104, 0, 0, 322, 323, 3, 40, 20, 0, 323, 326, 1, 0, 0, 0, 324, 326, 3, 40, 20, 0, 325, 320, 1, 0, 0, 0, 325, 324, 1, 0, 0, 0, 326, 37, 1, 0, 0, 0, 327, 328, 5, 76, 0, 0, 328, 39, 1, 0, 0, 0, 329, 330, 7, 2, 0, 0, 330, 41, 1, 0, 0, 0, 331, 334, 3, 44, 22, 0, 332, 334, 3, 46, 23, 0, 333, 331, 1, 0, 0, 0, 333, 332, 1, 0, 0, 0, 334, 43, 1, 0, 0, 0, 335, 336, 5, 75, 0, 0, 336, 341, 5, 76, 0, 0, 337, 338, 5, 33, 0, 0, 338, 340, 5, 76, 0, 0, 339, 337, 1, 0, 0, 0, 340, 343, 1, 0, 0, 0, 341, 339, 1, 0, 0, 0, 341, 342, 1, 0, 0, 0, 342, 45, 1, 0, 0, 0, 343, 341, 1, 0, 0, 0, 344, 345, 5, 65, 0, 0, 345, 346, 3, 44, 22, 0, 346, 347, 5, 66, 0, 0, 347, 47, 1, 0, 0, 0, 348, 349, 5, 19, 0, 0, 349, 354, 3, 36, 18, 0, 350, 351, 5, 33, 0, 0, 351, 353, 3, 36, 18, 0, 352, 350, 1, 0, 0, 0, 353, 356, 1, 0, 0, 0, 354, 352, 1, 0, 0, 0, 354, 355, 1, 0, 0, 0, 355, 358, 1, 0, 0, 0, 356, 354, 1, 0, 0, 0, 357, 359, 3, 30, 15, 0, 358, 357, 1, 0, 0, 0, 358, 359, 1, 0, 0, 0, 359, 362, 1, 0, 0, 0, 360, 361, 5, 28, 0, 0, 361, 363, 3, 30, 15, 0, 362, 360, 1, 0, 0, 0, 362, 363, 1, 0, 0, 0, 363, 49, 1, 0, 0, 0, 364, 365, 5, 4, 0, 0, 365, 366, 3, 30, 15, 0, 366, 51, 1, 0, 0, 0, 367, 369, 5, 15, 0, 0, 368, 370, 3, 30, 15, 0, 369, 368, 1, 0, 0, 0, 369, 370, 1, 0, 0, 0, 370, 373, 1, 0, 0, 0, 371, 372, 5, 28, 0, 0, 372, 374, 3, 30, 15, 0, 373, 371, 1, 0, 0, 0, 373, 374, 1, 0, 0, 0, 374, 53, 1, 0, 0, 0, 375, 380, 3, 68, 34, 0, 376, 377, 5, 35, 0, 0, 377, 379, 3, 68, 34, 0, 378, 376, 1, 0, 0, 0, 379, 382, 1, 0, 0, 0, 380, 378, 1, 0, 0, 0, 380, 381, 1, 0, 0, 0, 381, 55, 1, 0, 0, 0, 382, 380, 1, 0, 0, 0, 383, 388, 3, 62, 31, 0, 384, 385, 5, 35, 0, 0, 385, 387, 3, 62, 31, 0, 386, 384, 1, 0, 0, 0, 387, 390, 1, 0, 0, 0, 388, 386, 1, 0, 0, 0, 388, 389, 1, 0, 0, 0, 389, 57, 1, 0, 0, 0, 390, 388, 1, 0, 0, 0, 391, 396, 3, 56, 28, 0, 392, 393, 5, 33, 0, 0, 393, 395, 3, 56, 28, 0, 394, 392, 1, 0, 0, 0, 395, 398, 1, 0, 0, 0, 396, 394, 1, 0, 0, 0, 396, 397, 1, 0, 0, 0, 397, 59, 1, 0, 0, 0, 398, 396, 1, 0, 0, 0, 399, 400, 7, 3, 0, 0, 400, 61, 1, 0, 0, 0, 401, 404, 5, 80, 0, 0, 402, 404, 3, 66, 33, 0, 403, 401, 1, 0, 0, 0, 403, 402, 1, 0, 0, 0, 404, 63, 1, 0, 0, 0, 405, 448, 5, 44, 0, 0, 406, 407, 3, 100, 50, 0, 407, 408, 5, 67, 0, 0, 408, 448, 1, 0, 0, 0, 409, 448, 3, 98, 49, 0, 410, 448, 3, 100, 50, 0, 411, 448, 3, 94, 47, 0, 412, 448, 3, 66, 33, 0, 413, 448, 3, 102, 51, 0, 414, 415, 5, 65, 0, 0, 415, 420, 3, 96, 48, 0, 416, 417, 5, 33, 0, 0, 417, 419, 3, 96, 48, 0, 418, 416, 1, 0, 0, 0, 419, 422, 1, 0, 0, 0, 420, 418, 1, 0, 0, 0, 420, 421, 1, 0, 0, 0, 421, 423, 1, 0, 0, 0, 422, 420, 1, 0, 0, 0, 423, 424, 5, 66, 0, 0, 424, 448, 1, 0, 0, 0, 425, 426, 5, 65, 0, 0, 426, 431, 3, 94, 47, 0, 427, 428, 5, 33, 0, 0, 428, 430, 3, 94, 47, 0, 429, 427, 1, 0, 0, 0, 430, 433, 1, 0, 0, 0, 431, 429, 1, 0, 0, 0, 431, 432, 1, 0, 0, 0, 432, 434, 1, 0, 0, 0, 433, 431, 1, 0, 0, 0, 434, 435, 5, 66, 0, 0, 435, 448, 1, 0, 0, 0, 436, 437, 5, 65, 0, 0, 437, 442, 3, 102, 51, 0, 438, 439, 5, 33, 0, 0, 439, 441, 3, 102, 51, 0, 440, 438, 1, 0, 0, 0, 441, 444, 1, 0, 0, 0, 442, 440, 1, 0, 0, 0, 442, 443, 1, 0, 0, 0, 443, 445, 1, 0, 0, 0, 444, 442, 1, 0, 0, 0, 445, 446, 5, 66, 0, 0, 446, 448, 1, 0, 0, 0, 447, 405, 1, 0, 0, 0, 447, 406, 1, 0, 0, 0, 447, 409, 1, 0, 0, 0, 447, 410, 1, 0, 0, 0, 447, 411, 1, 0, 0, 0, 447, 412, 1, 0, 0, 0, 447, 413, 1, 0, 0, 0, 447, 414, 1, 0, 0, 0, 447, 425, 1, 0, 0, 0, 447, 436, 1, 0, 0, 0, 448, 65, 1, 0, 0, 0, 449, 452, 5, 47, 0, 0, 450, 452, 5, 64, 0, 0, 451, 449, 1, 0, 0, 0, 451, 450, 1, 0, 0, 0, 452, 67, 1, 0, 0, 0, 453, 456, 3, 60, 30, 0, 454, 456, 3, 66, 33, 0, 455, 453, 1, 0, 0, 0, 455, 454, 1, 0, 0, 0, 456, 69, 1, 0, 0, 0, 457, 458, 5, 9, 0, 0, 458, 459, 5, 26, 0, 0, 459, 71, 1, 0, 0, 0, 460, 461, 5, 14, 0, 0, 461, 466, 3, 74, 37, 0, 462, 463, 5, 33, 0, 0, 463, 465, 3, 74, 37, 0, 464, 462, 1, 0, 0, 0, 465, 468, 1, 0, 0, 0, 466, 464, 1, 0, 0, 0, 466, 467, 1, 0, 0, 0, 467, 73, 1, 0, 0, 0, 468, 466, 1, 0, 0, 0, 469, 471, 3, 10, 5, 0, 470, 472, 7, 4, 0, 0, 471, 470, 1, 0, 0, 0, 471, 472, 1, 0, 0, 0, 472, 475, 1, 0, 0, 0, 473, 474, 5, 45, 0, 0, 474, 476, 7, 5, 0, 0, 475, 473, 1, 0, 0, 0, 475, 476, 1, 0, 0, 0, 476, 75, 1, 0, 0, 0, 477, 478, 5, 8, 0, 0, 478, 479, 3, 58, 29, 0, 479, 77, 1, 0, 0, 0, 480, 481, 5, 2, 0, 0, 481, 482, 3, 58, 29, 0, 482, 79, 1, 0, 0, 0, 483, 484, 5, 11, 0, 0, 484, 489, 3, 82, 41, 0, 485, 486, 5, 33, 0, 0, 486, 488, 3, 82, 41, 0, 487, 485, 1, 0, 0, 0, 488, 491, 1, 0, 0, 0, 489, 487, 1, 0, 0, 0, 489, 490, 1, 0, 0, 0, 490, 81, 1, 0, 0, 0, 491, 489, 1, 0, 0, 0, 492, 493, 3, 56, 28, 0, 493, 494, 5, 84, 0, 0, 494, 495, 3, 56, 28, 0, 495, 83, 1, 0, 0, 0, 496, 497, 5, 1, 0, 0, 497, 498, 3, 20, 10, 0, 498, 500, 3, 102, 51, 0, 499, 501, 3, 90, 45, 0, 500, 499, 1, 0, 0, 0, 500, 501, 1, 0, 0, 0, 501, 85, 1, 0, 0, 0, 502, 503, 5, 7, 0, 0, 503, 504, 3, 20, 10, 0, 504, 505, 3, 102, 51, 0, 505, 87, 1, 0, 0, 0, 506, 507, 5, 10, 0, 0, 507, 508, 3, 54, 27, 0, 508, 89, 1, 0, 0, 0, 509, 514, 3, 92, 46, 0, 510, 511, 5, 33, 0, 0, 511, 513, 3, 92, 46, 0, 512, 510, 1, 0, 0, 0, 513, 516, 1, 0, 0, 0, 514, 512, 1, 0, 0, 0, 514, 515, 1, 0, 0, 0, 515, 91, 1, 0, 0, 0, 516, 514, 1, 0, 0, 0, 517, 518, 3, 60, 30, 0, 518, 519, 5, 31, 0, 0, 519, 520, 3, 64, 32, 0, 520, 93, 1, 0, 0, 0, 521, 522, 7, 6, 0, 0, 522, 95, 1, 0, 0, 0, 523, 526, 3, 98, 49, 0, 524, 526, 3, 100, 50, 0, 525, 523, 1, 0, 0, 0, 525, 524, 1, 0, 0, 0, 526, 97, 1, 0, 0, 0, 527, 529, 7, 0, 0, 0, 528, 527, 1, 0, 0, 0, 528, 529, 1, 0, 0, 0, 529, 530, 1, 0, 0, 0, 530, 531, 5, 27, 0, 0, 531, 99, 1, 0, 0, 0, 532, 534, 7, 0, 0, 0, 533, 532, 1, 0, 0, 0, 533, 534, 1, 0, 0, 0, 534, 535, 1, 0, 0, 0, 535, 536, 5, 26, 0, 0, 536, 101, 1, 0, 0, 0, 537, 538, 5, 25, 0, 0, 538, 103, 1, 0, 0, 0, 539, 540, 7, 7, 0, 0, 540, 105, 1, 0, 0, 0, 541, 542, 5, 5, 0, 0, 542, 543, 3, 108, 54, 0, 543, 107, 1, 0, 0, 0, 544, 545, 5, 65, 0, 0, 545, 546, 3, 2, 1, 0, 546, 547, 5, 66, 0, 0, 547, 109, 1, 0, 0, 0, 548, 549, 5, 13, 0, 0, 549, 550, 5, 100, 0, 0, 550, 111, 1, 0, 0, 0, 551, 552, 5, 3, 0, 0, 552, 555, 5, 90, 0, 0, 553, 554, 5, 88, 0, 0, 554, 556, 3, 56, 28, 0, 555, 553, 1, 0, 0, 0, 555, 556, 1, 0, 0, 0, 556, 566, 1, 0, 0, 0, 557, 558, 5, 89, 0, 0, 558, 563, 3, 114, 57, 0, 559, 560, 5, 33, 0, 0, 560, 562, 3, 114, 57, 0, 561, 559, 1, 0, 0, 0, 562, 565, 1, 0, 0, 0, 563, 561, 1, 0, 0, 0, 563, 564, 1, 0, 0, 0, 564, 567, 1, 0, 0, 0, 565, 563, 1, 0, 0, 0, 566, 557, 1, 0, 0, 0, 566, 567, 1, 0, 0, 0, 567, 113, 1, 0, 0, 0, 568, 569, 3, 56, 28, 0, 569, 570, 5, 31, 0, 0, 570, 572, 1, 0, 0, 0, 571, 568, 1, 0, 0, 0, 571, 572, 1, 0, 0, 0, 572, 573, 1, 0, 0, 0, 573, 574, 3, 56, 28, 0, 574, 115, 1, 0, 0, 0, 575, 576, 5, 18, 0, 0, 576, 577, 3, 36, 18, 0, 577, 578, 5, 88, 0, 0, 578, 579, 3, 58, 29, 0, 579, 117, 1, 0, 0, 0, 580, 581, 5, 17, 0, 0, 581, 584, 3, 30, 15, 0, 582, 583, 5, 28, 0, 0, 583, 585, 3, 30, 15, 0, 584, 582, 1, 0, 0, 0, 584, 585, 1, 0, 0, 0, 585, 119, 1, 0, 0, 0, 57, 131, 140, 158, 170, 179, 187, 193, 201, 203, 208, 215, 220, 231, 237, 245, 247, 258, 265, 276, 279, 286, 298, 306, 314, 318, 325, 333, 341, 354, 358, 362, 369, 373, 380, 388, 396, 403, 420, 431, 442, 447, 451, 455, 466, 471, 475, 489, 500, 514, 525, 528, 533, 555, 563, 566, 571, 584] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java index 14913849d1b51..522393fb42c4b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java @@ -8,14 +8,26 @@ * 2.0. */ -import org.antlr.v4.runtime.atn.*; +import org.antlr.v4.runtime.FailedPredicateException; +import org.antlr.v4.runtime.NoViableAltException; +import org.antlr.v4.runtime.ParserRuleContext; +import org.antlr.v4.runtime.RecognitionException; +import org.antlr.v4.runtime.RuleContext; +import org.antlr.v4.runtime.RuntimeMetaData; +import org.antlr.v4.runtime.Token; +import org.antlr.v4.runtime.TokenStream; +import org.antlr.v4.runtime.Vocabulary; +import org.antlr.v4.runtime.VocabularyImpl; +import org.antlr.v4.runtime.atn.ATN; +import org.antlr.v4.runtime.atn.ATNDeserializer; +import org.antlr.v4.runtime.atn.ParserATNSimulator; +import org.antlr.v4.runtime.atn.PredictionContextCache; import org.antlr.v4.runtime.dfa.DFA; -import org.antlr.v4.runtime.*; -import org.antlr.v4.runtime.misc.*; -import org.antlr.v4.runtime.tree.*; +import org.antlr.v4.runtime.tree.ParseTreeListener; +import org.antlr.v4.runtime.tree.ParseTreeVisitor; +import org.antlr.v4.runtime.tree.TerminalNode; + import java.util.List; -import java.util.Iterator; -import java.util.ArrayList; @SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast", "CheckReturnValue"}) public class EsqlBaseParser extends ParserConfig { @@ -25,66 +37,66 @@ public class EsqlBaseParser extends ParserConfig { protected static final PredictionContextCache _sharedContextCache = new PredictionContextCache(); public static final int - DISSECT=1, DROP=2, ENRICH=3, EVAL=4, EXPLAIN=5, FROM=6, GROK=7, KEEP=8, - LIMIT=9, MV_EXPAND=10, RENAME=11, ROW=12, SHOW=13, SORT=14, STATS=15, - WHERE=16, DEV_INLINESTATS=17, DEV_LOOKUP=18, DEV_MATCH=19, DEV_METRICS=20, - UNKNOWN_CMD=21, LINE_COMMENT=22, MULTILINE_COMMENT=23, WS=24, PIPE=25, - QUOTED_STRING=26, INTEGER_LITERAL=27, DECIMAL_LITERAL=28, BY=29, AND=30, - ASC=31, ASSIGN=32, CAST_OP=33, COMMA=34, DESC=35, DOT=36, FALSE=37, FIRST=38, - IN=39, IS=40, LAST=41, LIKE=42, LP=43, NOT=44, NULL=45, NULLS=46, OR=47, - PARAM=48, RLIKE=49, RP=50, TRUE=51, EQ=52, CIEQ=53, NEQ=54, LT=55, LTE=56, - GT=57, GTE=58, PLUS=59, MINUS=60, ASTERISK=61, SLASH=62, PERCENT=63, NAMED_OR_POSITIONAL_PARAM=64, - OPENING_BRACKET=65, CLOSING_BRACKET=66, UNQUOTED_IDENTIFIER=67, QUOTED_IDENTIFIER=68, - EXPR_LINE_COMMENT=69, EXPR_MULTILINE_COMMENT=70, EXPR_WS=71, EXPLAIN_WS=72, - EXPLAIN_LINE_COMMENT=73, EXPLAIN_MULTILINE_COMMENT=74, METADATA=75, UNQUOTED_SOURCE=76, - FROM_LINE_COMMENT=77, FROM_MULTILINE_COMMENT=78, FROM_WS=79, ID_PATTERN=80, - PROJECT_LINE_COMMENT=81, PROJECT_MULTILINE_COMMENT=82, PROJECT_WS=83, - AS=84, RENAME_LINE_COMMENT=85, RENAME_MULTILINE_COMMENT=86, RENAME_WS=87, - ON=88, WITH=89, ENRICH_POLICY_NAME=90, ENRICH_LINE_COMMENT=91, ENRICH_MULTILINE_COMMENT=92, - ENRICH_WS=93, ENRICH_FIELD_LINE_COMMENT=94, ENRICH_FIELD_MULTILINE_COMMENT=95, - ENRICH_FIELD_WS=96, MVEXPAND_LINE_COMMENT=97, MVEXPAND_MULTILINE_COMMENT=98, - MVEXPAND_WS=99, INFO=100, SHOW_LINE_COMMENT=101, SHOW_MULTILINE_COMMENT=102, - SHOW_WS=103, COLON=104, SETTING=105, SETTING_LINE_COMMENT=106, SETTTING_MULTILINE_COMMENT=107, - SETTING_WS=108, LOOKUP_LINE_COMMENT=109, LOOKUP_MULTILINE_COMMENT=110, - LOOKUP_WS=111, LOOKUP_FIELD_LINE_COMMENT=112, LOOKUP_FIELD_MULTILINE_COMMENT=113, - LOOKUP_FIELD_WS=114, METRICS_LINE_COMMENT=115, METRICS_MULTILINE_COMMENT=116, - METRICS_WS=117, CLOSING_METRICS_LINE_COMMENT=118, CLOSING_METRICS_MULTILINE_COMMENT=119, + DISSECT=1, DROP=2, ENRICH=3, EVAL=4, EXPLAIN=5, FROM=6, GROK=7, KEEP=8, + LIMIT=9, MV_EXPAND=10, RENAME=11, ROW=12, SHOW=13, SORT=14, STATS=15, + WHERE=16, DEV_INLINESTATS=17, DEV_LOOKUP=18, DEV_METRICS=19, UNKNOWN_CMD=20, + LINE_COMMENT=21, MULTILINE_COMMENT=22, WS=23, PIPE=24, QUOTED_STRING=25, + INTEGER_LITERAL=26, DECIMAL_LITERAL=27, BY=28, AND=29, ASC=30, ASSIGN=31, + CAST_OP=32, COMMA=33, DESC=34, DOT=35, FALSE=36, FIRST=37, IN=38, IS=39, + LAST=40, LIKE=41, LP=42, NOT=43, NULL=44, NULLS=45, OR=46, PARAM=47, RLIKE=48, + RP=49, TRUE=50, EQ=51, CIEQ=52, NEQ=53, LT=54, LTE=55, GT=56, GTE=57, + PLUS=58, MINUS=59, ASTERISK=60, SLASH=61, PERCENT=62, DEV_MATCH=63, NAMED_OR_POSITIONAL_PARAM=64, + OPENING_BRACKET=65, CLOSING_BRACKET=66, UNQUOTED_IDENTIFIER=67, QUOTED_IDENTIFIER=68, + EXPR_LINE_COMMENT=69, EXPR_MULTILINE_COMMENT=70, EXPR_WS=71, EXPLAIN_WS=72, + EXPLAIN_LINE_COMMENT=73, EXPLAIN_MULTILINE_COMMENT=74, METADATA=75, UNQUOTED_SOURCE=76, + FROM_LINE_COMMENT=77, FROM_MULTILINE_COMMENT=78, FROM_WS=79, ID_PATTERN=80, + PROJECT_LINE_COMMENT=81, PROJECT_MULTILINE_COMMENT=82, PROJECT_WS=83, + AS=84, RENAME_LINE_COMMENT=85, RENAME_MULTILINE_COMMENT=86, RENAME_WS=87, + ON=88, WITH=89, ENRICH_POLICY_NAME=90, ENRICH_LINE_COMMENT=91, ENRICH_MULTILINE_COMMENT=92, + ENRICH_WS=93, ENRICH_FIELD_LINE_COMMENT=94, ENRICH_FIELD_MULTILINE_COMMENT=95, + ENRICH_FIELD_WS=96, MVEXPAND_LINE_COMMENT=97, MVEXPAND_MULTILINE_COMMENT=98, + MVEXPAND_WS=99, INFO=100, SHOW_LINE_COMMENT=101, SHOW_MULTILINE_COMMENT=102, + SHOW_WS=103, COLON=104, SETTING=105, SETTING_LINE_COMMENT=106, SETTTING_MULTILINE_COMMENT=107, + SETTING_WS=108, LOOKUP_LINE_COMMENT=109, LOOKUP_MULTILINE_COMMENT=110, + LOOKUP_WS=111, LOOKUP_FIELD_LINE_COMMENT=112, LOOKUP_FIELD_MULTILINE_COMMENT=113, + LOOKUP_FIELD_WS=114, METRICS_LINE_COMMENT=115, METRICS_MULTILINE_COMMENT=116, + METRICS_WS=117, CLOSING_METRICS_LINE_COMMENT=118, CLOSING_METRICS_MULTILINE_COMMENT=119, CLOSING_METRICS_WS=120; public static final int - RULE_singleStatement = 0, RULE_query = 1, RULE_sourceCommand = 2, RULE_processingCommand = 3, - RULE_whereCommand = 4, RULE_booleanExpression = 5, RULE_regexBooleanExpression = 6, - RULE_matchBooleanExpression = 7, RULE_valueExpression = 8, RULE_operatorExpression = 9, - RULE_primaryExpression = 10, RULE_functionExpression = 11, RULE_dataType = 12, - RULE_rowCommand = 13, RULE_fields = 14, RULE_field = 15, RULE_fromCommand = 16, - RULE_indexPattern = 17, RULE_clusterString = 18, RULE_indexString = 19, - RULE_metadata = 20, RULE_metadataOption = 21, RULE_deprecated_metadata = 22, - RULE_metricsCommand = 23, RULE_evalCommand = 24, RULE_statsCommand = 25, - RULE_qualifiedName = 26, RULE_qualifiedNamePattern = 27, RULE_qualifiedNamePatterns = 28, - RULE_identifier = 29, RULE_identifierPattern = 30, RULE_constant = 31, - RULE_parameter = 32, RULE_identifierOrParameter = 33, RULE_limitCommand = 34, - RULE_sortCommand = 35, RULE_orderExpression = 36, RULE_keepCommand = 37, - RULE_dropCommand = 38, RULE_renameCommand = 39, RULE_renameClause = 40, - RULE_dissectCommand = 41, RULE_grokCommand = 42, RULE_mvExpandCommand = 43, - RULE_commandOptions = 44, RULE_commandOption = 45, RULE_booleanValue = 46, - RULE_numericValue = 47, RULE_decimalValue = 48, RULE_integerValue = 49, - RULE_string = 50, RULE_comparisonOperator = 51, RULE_explainCommand = 52, - RULE_subqueryExpression = 53, RULE_showCommand = 54, RULE_enrichCommand = 55, - RULE_enrichWithClause = 56, RULE_lookupCommand = 57, RULE_inlinestatsCommand = 58; + RULE_singleStatement = 0, RULE_query = 1, RULE_sourceCommand = 2, RULE_processingCommand = 3, + RULE_whereCommand = 4, RULE_booleanExpression = 5, RULE_regexBooleanExpression = 6, + RULE_matchBooleanExpression = 7, RULE_valueExpression = 8, RULE_operatorExpression = 9, + RULE_primaryExpression = 10, RULE_functionExpression = 11, RULE_functionName = 12, + RULE_dataType = 13, RULE_rowCommand = 14, RULE_fields = 15, RULE_field = 16, + RULE_fromCommand = 17, RULE_indexPattern = 18, RULE_clusterString = 19, + RULE_indexString = 20, RULE_metadata = 21, RULE_metadataOption = 22, RULE_deprecated_metadata = 23, + RULE_metricsCommand = 24, RULE_evalCommand = 25, RULE_statsCommand = 26, + RULE_qualifiedName = 27, RULE_qualifiedNamePattern = 28, RULE_qualifiedNamePatterns = 29, + RULE_identifier = 30, RULE_identifierPattern = 31, RULE_constant = 32, + RULE_parameter = 33, RULE_identifierOrParameter = 34, RULE_limitCommand = 35, + RULE_sortCommand = 36, RULE_orderExpression = 37, RULE_keepCommand = 38, + RULE_dropCommand = 39, RULE_renameCommand = 40, RULE_renameClause = 41, + RULE_dissectCommand = 42, RULE_grokCommand = 43, RULE_mvExpandCommand = 44, + RULE_commandOptions = 45, RULE_commandOption = 46, RULE_booleanValue = 47, + RULE_numericValue = 48, RULE_decimalValue = 49, RULE_integerValue = 50, + RULE_string = 51, RULE_comparisonOperator = 52, RULE_explainCommand = 53, + RULE_subqueryExpression = 54, RULE_showCommand = 55, RULE_enrichCommand = 56, + RULE_enrichWithClause = 57, RULE_lookupCommand = 58, RULE_inlinestatsCommand = 59; private static String[] makeRuleNames() { return new String[] { - "singleStatement", "query", "sourceCommand", "processingCommand", "whereCommand", - "booleanExpression", "regexBooleanExpression", "matchBooleanExpression", - "valueExpression", "operatorExpression", "primaryExpression", "functionExpression", - "dataType", "rowCommand", "fields", "field", "fromCommand", "indexPattern", - "clusterString", "indexString", "metadata", "metadataOption", "deprecated_metadata", - "metricsCommand", "evalCommand", "statsCommand", "qualifiedName", "qualifiedNamePattern", - "qualifiedNamePatterns", "identifier", "identifierPattern", "constant", - "parameter", "identifierOrParameter", "limitCommand", "sortCommand", - "orderExpression", "keepCommand", "dropCommand", "renameCommand", "renameClause", - "dissectCommand", "grokCommand", "mvExpandCommand", "commandOptions", - "commandOption", "booleanValue", "numericValue", "decimalValue", "integerValue", - "string", "comparisonOperator", "explainCommand", "subqueryExpression", - "showCommand", "enrichCommand", "enrichWithClause", "lookupCommand", + "singleStatement", "query", "sourceCommand", "processingCommand", "whereCommand", + "booleanExpression", "regexBooleanExpression", "matchBooleanExpression", + "valueExpression", "operatorExpression", "primaryExpression", "functionExpression", + "functionName", "dataType", "rowCommand", "fields", "field", "fromCommand", + "indexPattern", "clusterString", "indexString", "metadata", "metadataOption", + "deprecated_metadata", "metricsCommand", "evalCommand", "statsCommand", + "qualifiedName", "qualifiedNamePattern", "qualifiedNamePatterns", "identifier", + "identifierPattern", "constant", "parameter", "identifierOrParameter", + "limitCommand", "sortCommand", "orderExpression", "keepCommand", "dropCommand", + "renameCommand", "renameClause", "dissectCommand", "grokCommand", "mvExpandCommand", + "commandOptions", "commandOption", "booleanValue", "numericValue", "decimalValue", + "integerValue", "string", "comparisonOperator", "explainCommand", "subqueryExpression", + "showCommand", "enrichCommand", "enrichWithClause", "lookupCommand", "inlinestatsCommand" }; } @@ -92,46 +104,46 @@ private static String[] makeRuleNames() { private static String[] makeLiteralNames() { return new String[] { - null, "'dissect'", "'drop'", "'enrich'", "'eval'", "'explain'", "'from'", - "'grok'", "'keep'", "'limit'", "'mv_expand'", "'rename'", "'row'", "'show'", - "'sort'", "'stats'", "'where'", null, null, null, null, null, null, null, - null, "'|'", null, null, null, "'by'", "'and'", "'asc'", "'='", "'::'", - "','", "'desc'", "'.'", "'false'", "'first'", "'in'", "'is'", "'last'", - "'like'", "'('", "'not'", "'null'", "'nulls'", "'or'", "'?'", "'rlike'", - "')'", "'true'", "'=='", "'=~'", "'!='", "'<'", "'<='", "'>'", "'>='", - "'+'", "'-'", "'*'", "'/'", "'%'", null, null, "']'", null, null, null, - null, null, null, null, null, "'metadata'", null, null, null, null, null, - null, null, null, "'as'", null, null, null, "'on'", "'with'", null, null, - null, null, null, null, null, null, null, null, "'info'", null, null, + null, "'dissect'", "'drop'", "'enrich'", "'eval'", "'explain'", "'from'", + "'grok'", "'keep'", "'limit'", "'mv_expand'", "'rename'", "'row'", "'show'", + "'sort'", "'stats'", "'where'", null, null, null, null, null, null, null, + "'|'", null, null, null, "'by'", "'and'", "'asc'", "'='", "'::'", "','", + "'desc'", "'.'", "'false'", "'first'", "'in'", "'is'", "'last'", "'like'", + "'('", "'not'", "'null'", "'nulls'", "'or'", "'?'", "'rlike'", "')'", + "'true'", "'=='", "'=~'", "'!='", "'<'", "'<='", "'>'", "'>='", "'+'", + "'-'", "'*'", "'/'", "'%'", null, null, null, "']'", null, null, null, + null, null, null, null, null, "'metadata'", null, null, null, null, null, + null, null, null, "'as'", null, null, null, "'on'", "'with'", null, null, + null, null, null, null, null, null, null, null, "'info'", null, null, null, "':'" }; } private static final String[] _LITERAL_NAMES = makeLiteralNames(); private static String[] makeSymbolicNames() { return new String[] { - null, "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", - "KEEP", "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", - "WHERE", "DEV_INLINESTATS", "DEV_LOOKUP", "DEV_MATCH", "DEV_METRICS", - "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "PIPE", "QUOTED_STRING", - "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", - "COMMA", "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", "LIKE", - "LP", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", - "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", - "SLASH", "PERCENT", "NAMED_OR_POSITIONAL_PARAM", "OPENING_BRACKET", "CLOSING_BRACKET", - "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", - "EXPR_WS", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", - "METADATA", "UNQUOTED_SOURCE", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", - "FROM_WS", "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", - "PROJECT_WS", "AS", "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", - "RENAME_WS", "ON", "WITH", "ENRICH_POLICY_NAME", "ENRICH_LINE_COMMENT", - "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", - "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT", - "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "INFO", "SHOW_LINE_COMMENT", - "SHOW_MULTILINE_COMMENT", "SHOW_WS", "COLON", "SETTING", "SETTING_LINE_COMMENT", - "SETTTING_MULTILINE_COMMENT", "SETTING_WS", "LOOKUP_LINE_COMMENT", "LOOKUP_MULTILINE_COMMENT", - "LOOKUP_WS", "LOOKUP_FIELD_LINE_COMMENT", "LOOKUP_FIELD_MULTILINE_COMMENT", - "LOOKUP_FIELD_WS", "METRICS_LINE_COMMENT", "METRICS_MULTILINE_COMMENT", - "METRICS_WS", "CLOSING_METRICS_LINE_COMMENT", "CLOSING_METRICS_MULTILINE_COMMENT", + null, "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", + "KEEP", "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", + "WHERE", "DEV_INLINESTATS", "DEV_LOOKUP", "DEV_METRICS", "UNKNOWN_CMD", + "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "PIPE", "QUOTED_STRING", "INTEGER_LITERAL", + "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", "COMMA", + "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", "LIKE", "LP", "NOT", + "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", "CIEQ", + "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", + "PERCENT", "DEV_MATCH", "NAMED_OR_POSITIONAL_PARAM", "OPENING_BRACKET", + "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", + "EXPR_MULTILINE_COMMENT", "EXPR_WS", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", + "EXPLAIN_MULTILINE_COMMENT", "METADATA", "UNQUOTED_SOURCE", "FROM_LINE_COMMENT", + "FROM_MULTILINE_COMMENT", "FROM_WS", "ID_PATTERN", "PROJECT_LINE_COMMENT", + "PROJECT_MULTILINE_COMMENT", "PROJECT_WS", "AS", "RENAME_LINE_COMMENT", + "RENAME_MULTILINE_COMMENT", "RENAME_WS", "ON", "WITH", "ENRICH_POLICY_NAME", + "ENRICH_LINE_COMMENT", "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", + "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT", + "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "INFO", "SHOW_LINE_COMMENT", + "SHOW_MULTILINE_COMMENT", "SHOW_WS", "COLON", "SETTING", "SETTING_LINE_COMMENT", + "SETTTING_MULTILINE_COMMENT", "SETTING_WS", "LOOKUP_LINE_COMMENT", "LOOKUP_MULTILINE_COMMENT", + "LOOKUP_WS", "LOOKUP_FIELD_LINE_COMMENT", "LOOKUP_FIELD_MULTILINE_COMMENT", + "LOOKUP_FIELD_WS", "METRICS_LINE_COMMENT", "METRICS_MULTILINE_COMMENT", + "METRICS_WS", "CLOSING_METRICS_LINE_COMMENT", "CLOSING_METRICS_MULTILINE_COMMENT", "CLOSING_METRICS_WS" }; } @@ -219,9 +231,9 @@ public final SingleStatementContext singleStatement() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(118); + setState(120); query(0); - setState(119); + setState(121); match(EOF); } } @@ -243,7 +255,7 @@ public QueryContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_query; } - + @SuppressWarnings("this-escape") public QueryContext() { } public void copyFrom(QueryContext ctx) { @@ -317,11 +329,11 @@ private QueryContext query(int _p) throws RecognitionException { _ctx = _localctx; _prevctx = _localctx; - setState(122); + setState(124); sourceCommand(); } _ctx.stop = _input.LT(-1); - setState(129); + setState(131); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,0,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -332,16 +344,16 @@ private QueryContext query(int _p) throws RecognitionException { { _localctx = new CompositeQueryContext(new QueryContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_query); - setState(124); + setState(126); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(125); + setState(127); match(PIPE); - setState(126); + setState(128); processingCommand(); } - } + } } - setState(131); + setState(133); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,0,_ctx); } @@ -399,43 +411,43 @@ public final SourceCommandContext sourceCommand() throws RecognitionException { SourceCommandContext _localctx = new SourceCommandContext(_ctx, getState()); enterRule(_localctx, 4, RULE_sourceCommand); try { - setState(138); + setState(140); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,1,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(132); + setState(134); explainCommand(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(133); + setState(135); fromCommand(); } break; case 3: enterOuterAlt(_localctx, 3); { - setState(134); + setState(136); rowCommand(); } break; case 4: enterOuterAlt(_localctx, 4); { - setState(135); + setState(137); showCommand(); } break; case 5: enterOuterAlt(_localctx, 5); { - setState(136); + setState(138); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(137); + setState(139); metricsCommand(); } break; @@ -520,108 +532,108 @@ public final ProcessingCommandContext processingCommand() throws RecognitionExce ProcessingCommandContext _localctx = new ProcessingCommandContext(_ctx, getState()); enterRule(_localctx, 6, RULE_processingCommand); try { - setState(156); + setState(158); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,2,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(140); + setState(142); evalCommand(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(141); + setState(143); whereCommand(); } break; case 3: enterOuterAlt(_localctx, 3); { - setState(142); + setState(144); keepCommand(); } break; case 4: enterOuterAlt(_localctx, 4); { - setState(143); + setState(145); limitCommand(); } break; case 5: enterOuterAlt(_localctx, 5); { - setState(144); + setState(146); statsCommand(); } break; case 6: enterOuterAlt(_localctx, 6); { - setState(145); + setState(147); sortCommand(); } break; case 7: enterOuterAlt(_localctx, 7); { - setState(146); + setState(148); dropCommand(); } break; case 8: enterOuterAlt(_localctx, 8); { - setState(147); + setState(149); renameCommand(); } break; case 9: enterOuterAlt(_localctx, 9); { - setState(148); + setState(150); dissectCommand(); } break; case 10: enterOuterAlt(_localctx, 10); { - setState(149); + setState(151); grokCommand(); } break; case 11: enterOuterAlt(_localctx, 11); { - setState(150); + setState(152); enrichCommand(); } break; case 12: enterOuterAlt(_localctx, 12); { - setState(151); + setState(153); mvExpandCommand(); } break; case 13: enterOuterAlt(_localctx, 13); { - setState(152); + setState(154); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(153); + setState(155); inlinestatsCommand(); } break; case 14: enterOuterAlt(_localctx, 14); { - setState(154); + setState(156); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(155); + setState(157); lookupCommand(); } break; @@ -670,9 +682,9 @@ public final WhereCommandContext whereCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(158); + setState(160); match(WHERE); - setState(159); + setState(161); booleanExpression(0); } } @@ -694,7 +706,7 @@ public BooleanExpressionContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_booleanExpression; } - + @SuppressWarnings("this-escape") public BooleanExpressionContext() { } public void copyFrom(BooleanExpressionContext ctx) { @@ -888,7 +900,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc int _alt; enterOuterAlt(_localctx, 1); { - setState(191); + setState(193); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,6,_ctx) ) { case 1: @@ -897,9 +909,9 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _ctx = _localctx; _prevctx = _localctx; - setState(162); + setState(164); match(NOT); - setState(163); + setState(165); booleanExpression(8); } break; @@ -908,7 +920,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new BooleanDefaultContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(164); + setState(166); valueExpression(); } break; @@ -917,7 +929,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new RegexExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(165); + setState(167); regexBooleanExpression(); } break; @@ -926,41 +938,41 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalInContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(166); - valueExpression(); setState(168); + valueExpression(); + setState(170); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(167); + setState(169); match(NOT); } } - setState(170); + setState(172); match(IN); - setState(171); + setState(173); match(LP); - setState(172); + setState(174); valueExpression(); - setState(177); + setState(179); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(173); + setState(175); match(COMMA); - setState(174); + setState(176); valueExpression(); } } - setState(179); + setState(181); _errHandler.sync(this); _la = _input.LA(1); } - setState(180); + setState(182); match(RP); } break; @@ -969,21 +981,21 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new IsNullContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(182); + setState(184); valueExpression(); - setState(183); - match(IS); setState(185); + match(IS); + setState(187); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(184); + setState(186); match(NOT); } } - setState(187); + setState(189); match(NULL); } break; @@ -992,15 +1004,15 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new MatchExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(189); + setState(191); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(190); + setState(192); matchBooleanExpression(); } break; } _ctx.stop = _input.LT(-1); - setState(201); + setState(203); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,8,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1008,7 +1020,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(199); + setState(201); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,7,_ctx) ) { case 1: @@ -1016,11 +1028,11 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(193); + setState(195); if (!(precpred(_ctx, 5))) throw new FailedPredicateException(this, "precpred(_ctx, 5)"); - setState(194); + setState(196); ((LogicalBinaryContext)_localctx).operator = match(AND); - setState(195); + setState(197); ((LogicalBinaryContext)_localctx).right = booleanExpression(6); } break; @@ -1029,18 +1041,18 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(196); + setState(198); if (!(precpred(_ctx, 4))) throw new FailedPredicateException(this, "precpred(_ctx, 4)"); - setState(197); + setState(199); ((LogicalBinaryContext)_localctx).operator = match(OR); - setState(198); + setState(200); ((LogicalBinaryContext)_localctx).right = booleanExpression(5); } break; } - } + } } - setState(203); + setState(205); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,8,_ctx); } @@ -1095,48 +1107,48 @@ public final RegexBooleanExpressionContext regexBooleanExpression() throws Recog enterRule(_localctx, 12, RULE_regexBooleanExpression); int _la; try { - setState(218); + setState(220); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,11,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(204); - valueExpression(); setState(206); + valueExpression(); + setState(208); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(205); + setState(207); match(NOT); } } - setState(208); + setState(210); ((RegexBooleanExpressionContext)_localctx).kind = match(LIKE); - setState(209); + setState(211); ((RegexBooleanExpressionContext)_localctx).pattern = string(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(211); - valueExpression(); setState(213); + valueExpression(); + setState(215); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(212); + setState(214); match(NOT); } } - setState(215); + setState(217); ((RegexBooleanExpressionContext)_localctx).kind = match(RLIKE); - setState(216); + setState(218); ((RegexBooleanExpressionContext)_localctx).pattern = string(); } break; @@ -1189,11 +1201,11 @@ public final MatchBooleanExpressionContext matchBooleanExpression() throws Recog try { enterOuterAlt(_localctx, 1); { - setState(220); + setState(222); valueExpression(); - setState(221); + setState(223); match(DEV_MATCH); - setState(222); + setState(224); ((MatchBooleanExpressionContext)_localctx).queryString = string(); } } @@ -1215,7 +1227,7 @@ public ValueExpressionContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_valueExpression; } - + @SuppressWarnings("this-escape") public ValueExpressionContext() { } public void copyFrom(ValueExpressionContext ctx) { @@ -1277,14 +1289,14 @@ public final ValueExpressionContext valueExpression() throws RecognitionExceptio ValueExpressionContext _localctx = new ValueExpressionContext(_ctx, getState()); enterRule(_localctx, 16, RULE_valueExpression); try { - setState(229); + setState(231); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,12,_ctx) ) { case 1: _localctx = new ValueExpressionDefaultContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(224); + setState(226); operatorExpression(0); } break; @@ -1292,11 +1304,11 @@ public final ValueExpressionContext valueExpression() throws RecognitionExceptio _localctx = new ComparisonContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(225); + setState(227); ((ComparisonContext)_localctx).left = operatorExpression(0); - setState(226); + setState(228); comparisonOperator(); - setState(227); + setState(229); ((ComparisonContext)_localctx).right = operatorExpression(0); } break; @@ -1320,7 +1332,7 @@ public OperatorExpressionContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_operatorExpression; } - + @SuppressWarnings("this-escape") public OperatorExpressionContext() { } public void copyFrom(OperatorExpressionContext ctx) { @@ -1421,7 +1433,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE int _alt; enterOuterAlt(_localctx, 1); { - setState(235); + setState(237); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,13,_ctx) ) { case 1: @@ -1430,7 +1442,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _ctx = _localctx; _prevctx = _localctx; - setState(232); + setState(234); primaryExpression(0); } break; @@ -1439,7 +1451,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticUnaryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(233); + setState(235); ((ArithmeticUnaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -1450,13 +1462,13 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(234); + setState(236); operatorExpression(3); } break; } _ctx.stop = _input.LT(-1); - setState(245); + setState(247); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,15,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1464,7 +1476,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(243); + setState(245); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,14,_ctx) ) { case 1: @@ -1472,12 +1484,12 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticBinaryContext(new OperatorExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_operatorExpression); - setState(237); + setState(239); if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); - setState(238); + setState(240); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); - if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & -2305843009213693952L) != 0)) ) { + if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 8070450532247928832L) != 0)) ) { ((ArithmeticBinaryContext)_localctx).operator = (Token)_errHandler.recoverInline(this); } else { @@ -1485,7 +1497,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(239); + setState(241); ((ArithmeticBinaryContext)_localctx).right = operatorExpression(3); } break; @@ -1494,9 +1506,9 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticBinaryContext(new OperatorExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_operatorExpression); - setState(240); + setState(242); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(241); + setState(243); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -1507,14 +1519,14 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(242); + setState(244); ((ArithmeticBinaryContext)_localctx).right = operatorExpression(2); } break; } - } + } } - setState(247); + setState(249); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,15,_ctx); } @@ -1538,7 +1550,7 @@ public PrimaryExpressionContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_primaryExpression; } - + @SuppressWarnings("this-escape") public PrimaryExpressionContext() { } public void copyFrom(PrimaryExpressionContext ctx) { @@ -1672,7 +1684,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc int _alt; enterOuterAlt(_localctx, 1); { - setState(256); + setState(258); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,16,_ctx) ) { case 1: @@ -1681,7 +1693,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _ctx = _localctx; _prevctx = _localctx; - setState(249); + setState(251); constant(); } break; @@ -1690,7 +1702,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new DereferenceContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(250); + setState(252); qualifiedName(); } break; @@ -1699,7 +1711,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new FunctionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(251); + setState(253); functionExpression(); } break; @@ -1708,17 +1720,17 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new ParenthesizedExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(252); + setState(254); match(LP); - setState(253); + setState(255); booleanExpression(0); - setState(254); + setState(256); match(RP); } break; } _ctx.stop = _input.LT(-1); - setState(263); + setState(265); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,17,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1729,16 +1741,16 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc { _localctx = new InlineCastContext(new PrimaryExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_primaryExpression); - setState(258); + setState(260); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(259); + setState(261); match(CAST_OP); - setState(260); + setState(262); dataType(); } - } + } } - setState(265); + setState(267); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,17,_ctx); } @@ -1757,8 +1769,8 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc @SuppressWarnings("CheckReturnValue") public static class FunctionExpressionContext extends ParserRuleContext { - public IdentifierOrParameterContext identifierOrParameter() { - return getRuleContext(IdentifierOrParameterContext.class,0); + public FunctionNameContext functionName() { + return getRuleContext(FunctionNameContext.class,0); } public TerminalNode LP() { return getToken(EsqlBaseParser.LP, 0); } public TerminalNode RP() { return getToken(EsqlBaseParser.RP, 0); } @@ -1800,37 +1812,37 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(266); - identifierOrParameter(); - setState(267); + setState(268); + functionName(); + setState(269); match(LP); - setState(277); + setState(279); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,19,_ctx) ) { case 1: { - setState(268); + setState(270); match(ASTERISK); } break; case 2: { { - setState(269); + setState(271); booleanExpression(0); - setState(274); + setState(276); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(270); + setState(272); match(COMMA); - setState(271); + setState(273); booleanExpression(0); } } - setState(276); + setState(278); _errHandler.sync(this); _la = _input.LA(1); } @@ -1838,7 +1850,7 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx } break; } - setState(279); + setState(281); match(RP); } } @@ -1853,6 +1865,68 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx return _localctx; } + @SuppressWarnings("CheckReturnValue") + public static class FunctionNameContext extends ParserRuleContext { + public TerminalNode DEV_MATCH() { return getToken(EsqlBaseParser.DEV_MATCH, 0); } + public IdentifierOrParameterContext identifierOrParameter() { + return getRuleContext(IdentifierOrParameterContext.class,0); + } + @SuppressWarnings("this-escape") + public FunctionNameContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_functionName; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterFunctionName(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitFunctionName(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor)visitor).visitFunctionName(this); + else return visitor.visitChildren(this); + } + } + + public final FunctionNameContext functionName() throws RecognitionException { + FunctionNameContext _localctx = new FunctionNameContext(_ctx, getState()); + enterRule(_localctx, 24, RULE_functionName); + try { + setState(286); + _errHandler.sync(this); + switch ( getInterpreter().adaptivePredict(_input,20,_ctx) ) { + case 1: + enterOuterAlt(_localctx, 1); + { + setState(283); + if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); + setState(284); + match(DEV_MATCH); + } + break; + case 2: + enterOuterAlt(_localctx, 2); + { + setState(285); + identifierOrParameter(); + } + break; + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + @SuppressWarnings("CheckReturnValue") public static class DataTypeContext extends ParserRuleContext { @SuppressWarnings("this-escape") @@ -1860,7 +1934,7 @@ public DataTypeContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_dataType; } - + @SuppressWarnings("this-escape") public DataTypeContext() { } public void copyFrom(DataTypeContext ctx) { @@ -1891,12 +1965,12 @@ public T accept(ParseTreeVisitor visitor) { public final DataTypeContext dataType() throws RecognitionException { DataTypeContext _localctx = new DataTypeContext(_ctx, getState()); - enterRule(_localctx, 24, RULE_dataType); + enterRule(_localctx, 26, RULE_dataType); try { _localctx = new ToDataTypeContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(281); + setState(288); identifier(); } } @@ -1939,13 +2013,13 @@ public T accept(ParseTreeVisitor visitor) { public final RowCommandContext rowCommand() throws RecognitionException { RowCommandContext _localctx = new RowCommandContext(_ctx, getState()); - enterRule(_localctx, 26, RULE_rowCommand); + enterRule(_localctx, 28, RULE_rowCommand); try { enterOuterAlt(_localctx, 1); { - setState(283); + setState(290); match(ROW); - setState(284); + setState(291); fields(); } } @@ -1994,30 +2068,30 @@ public T accept(ParseTreeVisitor visitor) { public final FieldsContext fields() throws RecognitionException { FieldsContext _localctx = new FieldsContext(_ctx, getState()); - enterRule(_localctx, 28, RULE_fields); + enterRule(_localctx, 30, RULE_fields); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(286); + setState(293); field(); - setState(291); + setState(298); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,20,_ctx); + _alt = getInterpreter().adaptivePredict(_input,21,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(287); + setState(294); match(COMMA); - setState(288); + setState(295); field(); } - } + } } - setState(293); + setState(300); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,20,_ctx); + _alt = getInterpreter().adaptivePredict(_input,21,_ctx); } } } @@ -2063,26 +2137,26 @@ public T accept(ParseTreeVisitor visitor) { public final FieldContext field() throws RecognitionException { FieldContext _localctx = new FieldContext(_ctx, getState()); - enterRule(_localctx, 30, RULE_field); + enterRule(_localctx, 32, RULE_field); try { - setState(299); + setState(306); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,21,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,22,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(294); + setState(301); booleanExpression(0); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(295); + setState(302); qualifiedName(); - setState(296); + setState(303); match(ASSIGN); - setState(297); + setState(304); booleanExpression(0); } break; @@ -2137,39 +2211,39 @@ public T accept(ParseTreeVisitor visitor) { public final FromCommandContext fromCommand() throws RecognitionException { FromCommandContext _localctx = new FromCommandContext(_ctx, getState()); - enterRule(_localctx, 32, RULE_fromCommand); + enterRule(_localctx, 34, RULE_fromCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(301); + setState(308); match(FROM); - setState(302); + setState(309); indexPattern(); - setState(307); + setState(314); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,22,_ctx); + _alt = getInterpreter().adaptivePredict(_input,23,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(303); + setState(310); match(COMMA); - setState(304); + setState(311); indexPattern(); } - } + } } - setState(309); + setState(316); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,22,_ctx); + _alt = getInterpreter().adaptivePredict(_input,23,_ctx); } - setState(311); + setState(318); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,23,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,24,_ctx) ) { case 1: { - setState(310); + setState(317); metadata(); } break; @@ -2218,26 +2292,26 @@ public T accept(ParseTreeVisitor visitor) { public final IndexPatternContext indexPattern() throws RecognitionException { IndexPatternContext _localctx = new IndexPatternContext(_ctx, getState()); - enterRule(_localctx, 34, RULE_indexPattern); + enterRule(_localctx, 36, RULE_indexPattern); try { - setState(318); + setState(325); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,24,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,25,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(313); + setState(320); clusterString(); - setState(314); + setState(321); match(COLON); - setState(315); + setState(322); indexString(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(317); + setState(324); indexString(); } break; @@ -2279,11 +2353,11 @@ public T accept(ParseTreeVisitor visitor) { public final ClusterStringContext clusterString() throws RecognitionException { ClusterStringContext _localctx = new ClusterStringContext(_ctx, getState()); - enterRule(_localctx, 36, RULE_clusterString); + enterRule(_localctx, 38, RULE_clusterString); try { enterOuterAlt(_localctx, 1); { - setState(320); + setState(327); match(UNQUOTED_SOURCE); } } @@ -2324,12 +2398,12 @@ public T accept(ParseTreeVisitor visitor) { public final IndexStringContext indexString() throws RecognitionException { IndexStringContext _localctx = new IndexStringContext(_ctx, getState()); - enterRule(_localctx, 38, RULE_indexString); + enterRule(_localctx, 40, RULE_indexString); int _la; try { enterOuterAlt(_localctx, 1); { - setState(322); + setState(329); _la = _input.LA(1); if ( !(_la==QUOTED_STRING || _la==UNQUOTED_SOURCE) ) { _errHandler.recoverInline(this); @@ -2382,22 +2456,22 @@ public T accept(ParseTreeVisitor visitor) { public final MetadataContext metadata() throws RecognitionException { MetadataContext _localctx = new MetadataContext(_ctx, getState()); - enterRule(_localctx, 40, RULE_metadata); + enterRule(_localctx, 42, RULE_metadata); try { - setState(326); + setState(333); _errHandler.sync(this); switch (_input.LA(1)) { case METADATA: enterOuterAlt(_localctx, 1); { - setState(324); + setState(331); metadataOption(); } break; case OPENING_BRACKET: enterOuterAlt(_localctx, 2); { - setState(325); + setState(332); deprecated_metadata(); } break; @@ -2449,32 +2523,32 @@ public T accept(ParseTreeVisitor visitor) { public final MetadataOptionContext metadataOption() throws RecognitionException { MetadataOptionContext _localctx = new MetadataOptionContext(_ctx, getState()); - enterRule(_localctx, 42, RULE_metadataOption); + enterRule(_localctx, 44, RULE_metadataOption); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(328); + setState(335); match(METADATA); - setState(329); + setState(336); match(UNQUOTED_SOURCE); - setState(334); + setState(341); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,26,_ctx); + _alt = getInterpreter().adaptivePredict(_input,27,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(330); + setState(337); match(COMMA); - setState(331); + setState(338); match(UNQUOTED_SOURCE); } - } + } } - setState(336); + setState(343); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,26,_ctx); + _alt = getInterpreter().adaptivePredict(_input,27,_ctx); } } } @@ -2517,15 +2591,15 @@ public T accept(ParseTreeVisitor visitor) { public final Deprecated_metadataContext deprecated_metadata() throws RecognitionException { Deprecated_metadataContext _localctx = new Deprecated_metadataContext(_ctx, getState()); - enterRule(_localctx, 44, RULE_deprecated_metadata); + enterRule(_localctx, 46, RULE_deprecated_metadata); try { enterOuterAlt(_localctx, 1); { - setState(337); + setState(344); match(OPENING_BRACKET); - setState(338); + setState(345); metadataOption(); - setState(339); + setState(346); match(CLOSING_BRACKET); } } @@ -2584,51 +2658,51 @@ public T accept(ParseTreeVisitor visitor) { public final MetricsCommandContext metricsCommand() throws RecognitionException { MetricsCommandContext _localctx = new MetricsCommandContext(_ctx, getState()); - enterRule(_localctx, 46, RULE_metricsCommand); + enterRule(_localctx, 48, RULE_metricsCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(341); + setState(348); match(DEV_METRICS); - setState(342); + setState(349); indexPattern(); - setState(347); + setState(354); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,27,_ctx); + _alt = getInterpreter().adaptivePredict(_input,28,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(343); + setState(350); match(COMMA); - setState(344); + setState(351); indexPattern(); } - } + } } - setState(349); + setState(356); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,27,_ctx); + _alt = getInterpreter().adaptivePredict(_input,28,_ctx); } - setState(351); + setState(358); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,28,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,29,_ctx) ) { case 1: { - setState(350); + setState(357); ((MetricsCommandContext)_localctx).aggregates = fields(); } break; } - setState(355); + setState(362); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,29,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,30,_ctx) ) { case 1: { - setState(353); + setState(360); match(BY); - setState(354); + setState(361); ((MetricsCommandContext)_localctx).grouping = fields(); } break; @@ -2674,13 +2748,13 @@ public T accept(ParseTreeVisitor visitor) { public final EvalCommandContext evalCommand() throws RecognitionException { EvalCommandContext _localctx = new EvalCommandContext(_ctx, getState()); - enterRule(_localctx, 48, RULE_evalCommand); + enterRule(_localctx, 50, RULE_evalCommand); try { enterOuterAlt(_localctx, 1); { - setState(357); + setState(364); match(EVAL); - setState(358); + setState(365); fields(); } } @@ -2729,30 +2803,30 @@ public T accept(ParseTreeVisitor visitor) { public final StatsCommandContext statsCommand() throws RecognitionException { StatsCommandContext _localctx = new StatsCommandContext(_ctx, getState()); - enterRule(_localctx, 50, RULE_statsCommand); + enterRule(_localctx, 52, RULE_statsCommand); try { enterOuterAlt(_localctx, 1); { - setState(360); + setState(367); match(STATS); - setState(362); + setState(369); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,30,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,31,_ctx) ) { case 1: { - setState(361); + setState(368); ((StatsCommandContext)_localctx).stats = fields(); } break; } - setState(366); + setState(373); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,31,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,32,_ctx) ) { case 1: { - setState(364); + setState(371); match(BY); - setState(365); + setState(372); ((StatsCommandContext)_localctx).grouping = fields(); } break; @@ -2804,30 +2878,30 @@ public T accept(ParseTreeVisitor visitor) { public final QualifiedNameContext qualifiedName() throws RecognitionException { QualifiedNameContext _localctx = new QualifiedNameContext(_ctx, getState()); - enterRule(_localctx, 52, RULE_qualifiedName); + enterRule(_localctx, 54, RULE_qualifiedName); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(368); + setState(375); identifierOrParameter(); - setState(373); + setState(380); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,32,_ctx); + _alt = getInterpreter().adaptivePredict(_input,33,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(369); + setState(376); match(DOT); - setState(370); + setState(377); identifierOrParameter(); } - } + } } - setState(375); + setState(382); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,32,_ctx); + _alt = getInterpreter().adaptivePredict(_input,33,_ctx); } } } @@ -2876,30 +2950,30 @@ public T accept(ParseTreeVisitor visitor) { public final QualifiedNamePatternContext qualifiedNamePattern() throws RecognitionException { QualifiedNamePatternContext _localctx = new QualifiedNamePatternContext(_ctx, getState()); - enterRule(_localctx, 54, RULE_qualifiedNamePattern); + enterRule(_localctx, 56, RULE_qualifiedNamePattern); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(376); + setState(383); identifierPattern(); - setState(381); + setState(388); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,33,_ctx); + _alt = getInterpreter().adaptivePredict(_input,34,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(377); + setState(384); match(DOT); - setState(378); + setState(385); identifierPattern(); } - } + } } - setState(383); + setState(390); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,33,_ctx); + _alt = getInterpreter().adaptivePredict(_input,34,_ctx); } } } @@ -2948,30 +3022,30 @@ public T accept(ParseTreeVisitor visitor) { public final QualifiedNamePatternsContext qualifiedNamePatterns() throws RecognitionException { QualifiedNamePatternsContext _localctx = new QualifiedNamePatternsContext(_ctx, getState()); - enterRule(_localctx, 56, RULE_qualifiedNamePatterns); + enterRule(_localctx, 58, RULE_qualifiedNamePatterns); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(384); + setState(391); qualifiedNamePattern(); - setState(389); + setState(396); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,34,_ctx); + _alt = getInterpreter().adaptivePredict(_input,35,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(385); + setState(392); match(COMMA); - setState(386); + setState(393); qualifiedNamePattern(); } - } + } } - setState(391); + setState(398); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,34,_ctx); + _alt = getInterpreter().adaptivePredict(_input,35,_ctx); } } } @@ -3012,12 +3086,12 @@ public T accept(ParseTreeVisitor visitor) { public final IdentifierContext identifier() throws RecognitionException { IdentifierContext _localctx = new IdentifierContext(_ctx, getState()); - enterRule(_localctx, 58, RULE_identifier); + enterRule(_localctx, 60, RULE_identifier); int _la; try { enterOuterAlt(_localctx, 1); { - setState(392); + setState(399); _la = _input.LA(1); if ( !(_la==UNQUOTED_IDENTIFIER || _la==QUOTED_IDENTIFIER) ) { _errHandler.recoverInline(this); @@ -3068,15 +3142,15 @@ public T accept(ParseTreeVisitor visitor) { public final IdentifierPatternContext identifierPattern() throws RecognitionException { IdentifierPatternContext _localctx = new IdentifierPatternContext(_ctx, getState()); - enterRule(_localctx, 60, RULE_identifierPattern); + enterRule(_localctx, 62, RULE_identifierPattern); try { - setState(396); + setState(403); _errHandler.sync(this); switch (_input.LA(1)) { case ID_PATTERN: enterOuterAlt(_localctx, 1); { - setState(394); + setState(401); match(ID_PATTERN); } break; @@ -3084,7 +3158,7 @@ public final IdentifierPatternContext identifierPattern() throws RecognitionExce case NAMED_OR_POSITIONAL_PARAM: enterOuterAlt(_localctx, 2); { - setState(395); + setState(402); parameter(); } break; @@ -3110,7 +3184,7 @@ public ConstantContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_constant; } - + @SuppressWarnings("this-escape") public ConstantContext() { } public void copyFrom(ConstantContext ctx) { @@ -3356,17 +3430,17 @@ public T accept(ParseTreeVisitor visitor) { public final ConstantContext constant() throws RecognitionException { ConstantContext _localctx = new ConstantContext(_ctx, getState()); - enterRule(_localctx, 62, RULE_constant); + enterRule(_localctx, 64, RULE_constant); int _la; try { - setState(440); + setState(447); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,39,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,40,_ctx) ) { case 1: _localctx = new NullLiteralContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(398); + setState(405); match(NULL); } break; @@ -3374,9 +3448,9 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new QualifiedIntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(399); + setState(406); integerValue(); - setState(400); + setState(407); match(UNQUOTED_IDENTIFIER); } break; @@ -3384,7 +3458,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new DecimalLiteralContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(402); + setState(409); decimalValue(); } break; @@ -3392,7 +3466,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new IntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(403); + setState(410); integerValue(); } break; @@ -3400,7 +3474,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanLiteralContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(404); + setState(411); booleanValue(); } break; @@ -3408,7 +3482,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new InputParameterContext(_localctx); enterOuterAlt(_localctx, 6); { - setState(405); + setState(412); parameter(); } break; @@ -3416,7 +3490,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringLiteralContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(406); + setState(413); string(); } break; @@ -3424,27 +3498,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new NumericArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 8); { - setState(407); + setState(414); match(OPENING_BRACKET); - setState(408); + setState(415); numericValue(); - setState(413); + setState(420); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(409); + setState(416); match(COMMA); - setState(410); + setState(417); numericValue(); } } - setState(415); + setState(422); _errHandler.sync(this); _la = _input.LA(1); } - setState(416); + setState(423); match(CLOSING_BRACKET); } break; @@ -3452,27 +3526,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 9); { - setState(418); + setState(425); match(OPENING_BRACKET); - setState(419); + setState(426); booleanValue(); - setState(424); + setState(431); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(420); + setState(427); match(COMMA); - setState(421); + setState(428); booleanValue(); } } - setState(426); + setState(433); _errHandler.sync(this); _la = _input.LA(1); } - setState(427); + setState(434); match(CLOSING_BRACKET); } break; @@ -3480,27 +3554,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 10); { - setState(429); + setState(436); match(OPENING_BRACKET); - setState(430); + setState(437); string(); - setState(435); + setState(442); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(431); + setState(438); match(COMMA); - setState(432); + setState(439); string(); } } - setState(437); + setState(444); _errHandler.sync(this); _la = _input.LA(1); } - setState(438); + setState(445); match(CLOSING_BRACKET); } break; @@ -3524,7 +3598,7 @@ public ParameterContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_parameter; } - + @SuppressWarnings("this-escape") public ParameterContext() { } public void copyFrom(ParameterContext ctx) { @@ -3572,16 +3646,16 @@ public T accept(ParseTreeVisitor visitor) { public final ParameterContext parameter() throws RecognitionException { ParameterContext _localctx = new ParameterContext(_ctx, getState()); - enterRule(_localctx, 64, RULE_parameter); + enterRule(_localctx, 66, RULE_parameter); try { - setState(444); + setState(451); _errHandler.sync(this); switch (_input.LA(1)) { case PARAM: _localctx = new InputParamContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(442); + setState(449); match(PARAM); } break; @@ -3589,7 +3663,7 @@ public final ParameterContext parameter() throws RecognitionException { _localctx = new InputNamedOrPositionalParamContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(443); + setState(450); match(NAMED_OR_POSITIONAL_PARAM); } break; @@ -3638,16 +3712,16 @@ public T accept(ParseTreeVisitor visitor) { public final IdentifierOrParameterContext identifierOrParameter() throws RecognitionException { IdentifierOrParameterContext _localctx = new IdentifierOrParameterContext(_ctx, getState()); - enterRule(_localctx, 66, RULE_identifierOrParameter); + enterRule(_localctx, 68, RULE_identifierOrParameter); try { - setState(448); + setState(455); _errHandler.sync(this); switch (_input.LA(1)) { case UNQUOTED_IDENTIFIER: case QUOTED_IDENTIFIER: enterOuterAlt(_localctx, 1); { - setState(446); + setState(453); identifier(); } break; @@ -3655,7 +3729,7 @@ public final IdentifierOrParameterContext identifierOrParameter() throws Recogni case NAMED_OR_POSITIONAL_PARAM: enterOuterAlt(_localctx, 2); { - setState(447); + setState(454); parameter(); } break; @@ -3700,13 +3774,13 @@ public T accept(ParseTreeVisitor visitor) { public final LimitCommandContext limitCommand() throws RecognitionException { LimitCommandContext _localctx = new LimitCommandContext(_ctx, getState()); - enterRule(_localctx, 68, RULE_limitCommand); + enterRule(_localctx, 70, RULE_limitCommand); try { enterOuterAlt(_localctx, 1); { - setState(450); + setState(457); match(LIMIT); - setState(451); + setState(458); match(INTEGER_LITERAL); } } @@ -3756,32 +3830,32 @@ public T accept(ParseTreeVisitor visitor) { public final SortCommandContext sortCommand() throws RecognitionException { SortCommandContext _localctx = new SortCommandContext(_ctx, getState()); - enterRule(_localctx, 70, RULE_sortCommand); + enterRule(_localctx, 72, RULE_sortCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(453); + setState(460); match(SORT); - setState(454); + setState(461); orderExpression(); - setState(459); + setState(466); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,42,_ctx); + _alt = getInterpreter().adaptivePredict(_input,43,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(455); + setState(462); match(COMMA); - setState(456); + setState(463); orderExpression(); } - } + } } - setState(461); + setState(468); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,42,_ctx); + _alt = getInterpreter().adaptivePredict(_input,43,_ctx); } } } @@ -3830,19 +3904,19 @@ public T accept(ParseTreeVisitor visitor) { public final OrderExpressionContext orderExpression() throws RecognitionException { OrderExpressionContext _localctx = new OrderExpressionContext(_ctx, getState()); - enterRule(_localctx, 72, RULE_orderExpression); + enterRule(_localctx, 74, RULE_orderExpression); int _la; try { enterOuterAlt(_localctx, 1); { - setState(462); + setState(469); booleanExpression(0); - setState(464); + setState(471); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,43,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,44,_ctx) ) { case 1: { - setState(463); + setState(470); ((OrderExpressionContext)_localctx).ordering = _input.LT(1); _la = _input.LA(1); if ( !(_la==ASC || _la==DESC) ) { @@ -3856,14 +3930,14 @@ public final OrderExpressionContext orderExpression() throws RecognitionExceptio } break; } - setState(468); + setState(475); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,44,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,45,_ctx) ) { case 1: { - setState(466); + setState(473); match(NULLS); - setState(467); + setState(474); ((OrderExpressionContext)_localctx).nullOrdering = _input.LT(1); _la = _input.LA(1); if ( !(_la==FIRST || _la==LAST) ) { @@ -3918,13 +3992,13 @@ public T accept(ParseTreeVisitor visitor) { public final KeepCommandContext keepCommand() throws RecognitionException { KeepCommandContext _localctx = new KeepCommandContext(_ctx, getState()); - enterRule(_localctx, 74, RULE_keepCommand); + enterRule(_localctx, 76, RULE_keepCommand); try { enterOuterAlt(_localctx, 1); { - setState(470); + setState(477); match(KEEP); - setState(471); + setState(478); qualifiedNamePatterns(); } } @@ -3967,13 +4041,13 @@ public T accept(ParseTreeVisitor visitor) { public final DropCommandContext dropCommand() throws RecognitionException { DropCommandContext _localctx = new DropCommandContext(_ctx, getState()); - enterRule(_localctx, 76, RULE_dropCommand); + enterRule(_localctx, 78, RULE_dropCommand); try { enterOuterAlt(_localctx, 1); { - setState(473); + setState(480); match(DROP); - setState(474); + setState(481); qualifiedNamePatterns(); } } @@ -4023,32 +4097,32 @@ public T accept(ParseTreeVisitor visitor) { public final RenameCommandContext renameCommand() throws RecognitionException { RenameCommandContext _localctx = new RenameCommandContext(_ctx, getState()); - enterRule(_localctx, 78, RULE_renameCommand); + enterRule(_localctx, 80, RULE_renameCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(476); + setState(483); match(RENAME); - setState(477); + setState(484); renameClause(); - setState(482); + setState(489); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,45,_ctx); + _alt = getInterpreter().adaptivePredict(_input,46,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(478); + setState(485); match(COMMA); - setState(479); + setState(486); renameClause(); } - } + } } - setState(484); + setState(491); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,45,_ctx); + _alt = getInterpreter().adaptivePredict(_input,46,_ctx); } } } @@ -4096,15 +4170,15 @@ public T accept(ParseTreeVisitor visitor) { public final RenameClauseContext renameClause() throws RecognitionException { RenameClauseContext _localctx = new RenameClauseContext(_ctx, getState()); - enterRule(_localctx, 80, RULE_renameClause); + enterRule(_localctx, 82, RULE_renameClause); try { enterOuterAlt(_localctx, 1); { - setState(485); + setState(492); ((RenameClauseContext)_localctx).oldName = qualifiedNamePattern(); - setState(486); + setState(493); match(AS); - setState(487); + setState(494); ((RenameClauseContext)_localctx).newName = qualifiedNamePattern(); } } @@ -4153,22 +4227,22 @@ public T accept(ParseTreeVisitor visitor) { public final DissectCommandContext dissectCommand() throws RecognitionException { DissectCommandContext _localctx = new DissectCommandContext(_ctx, getState()); - enterRule(_localctx, 82, RULE_dissectCommand); + enterRule(_localctx, 84, RULE_dissectCommand); try { enterOuterAlt(_localctx, 1); { - setState(489); + setState(496); match(DISSECT); - setState(490); + setState(497); primaryExpression(0); - setState(491); + setState(498); string(); - setState(493); + setState(500); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,46,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,47,_ctx) ) { case 1: { - setState(492); + setState(499); commandOptions(); } break; @@ -4217,15 +4291,15 @@ public T accept(ParseTreeVisitor visitor) { public final GrokCommandContext grokCommand() throws RecognitionException { GrokCommandContext _localctx = new GrokCommandContext(_ctx, getState()); - enterRule(_localctx, 84, RULE_grokCommand); + enterRule(_localctx, 86, RULE_grokCommand); try { enterOuterAlt(_localctx, 1); { - setState(495); + setState(502); match(GROK); - setState(496); + setState(503); primaryExpression(0); - setState(497); + setState(504); string(); } } @@ -4268,13 +4342,13 @@ public T accept(ParseTreeVisitor visitor) { public final MvExpandCommandContext mvExpandCommand() throws RecognitionException { MvExpandCommandContext _localctx = new MvExpandCommandContext(_ctx, getState()); - enterRule(_localctx, 86, RULE_mvExpandCommand); + enterRule(_localctx, 88, RULE_mvExpandCommand); try { enterOuterAlt(_localctx, 1); { - setState(499); + setState(506); match(MV_EXPAND); - setState(500); + setState(507); qualifiedName(); } } @@ -4323,30 +4397,30 @@ public T accept(ParseTreeVisitor visitor) { public final CommandOptionsContext commandOptions() throws RecognitionException { CommandOptionsContext _localctx = new CommandOptionsContext(_ctx, getState()); - enterRule(_localctx, 88, RULE_commandOptions); + enterRule(_localctx, 90, RULE_commandOptions); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(502); + setState(509); commandOption(); - setState(507); + setState(514); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,47,_ctx); + _alt = getInterpreter().adaptivePredict(_input,48,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(503); + setState(510); match(COMMA); - setState(504); + setState(511); commandOption(); } - } + } } - setState(509); + setState(516); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,47,_ctx); + _alt = getInterpreter().adaptivePredict(_input,48,_ctx); } } } @@ -4392,15 +4466,15 @@ public T accept(ParseTreeVisitor visitor) { public final CommandOptionContext commandOption() throws RecognitionException { CommandOptionContext _localctx = new CommandOptionContext(_ctx, getState()); - enterRule(_localctx, 90, RULE_commandOption); + enterRule(_localctx, 92, RULE_commandOption); try { enterOuterAlt(_localctx, 1); { - setState(510); + setState(517); identifier(); - setState(511); + setState(518); match(ASSIGN); - setState(512); + setState(519); constant(); } } @@ -4441,12 +4515,12 @@ public T accept(ParseTreeVisitor visitor) { public final BooleanValueContext booleanValue() throws RecognitionException { BooleanValueContext _localctx = new BooleanValueContext(_ctx, getState()); - enterRule(_localctx, 92, RULE_booleanValue); + enterRule(_localctx, 94, RULE_booleanValue); int _la; try { enterOuterAlt(_localctx, 1); { - setState(514); + setState(521); _la = _input.LA(1); if ( !(_la==FALSE || _la==TRUE) ) { _errHandler.recoverInline(this); @@ -4499,22 +4573,22 @@ public T accept(ParseTreeVisitor visitor) { public final NumericValueContext numericValue() throws RecognitionException { NumericValueContext _localctx = new NumericValueContext(_ctx, getState()); - enterRule(_localctx, 94, RULE_numericValue); + enterRule(_localctx, 96, RULE_numericValue); try { - setState(518); + setState(525); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,48,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,49,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(516); + setState(523); decimalValue(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(517); + setState(524); integerValue(); } break; @@ -4558,17 +4632,17 @@ public T accept(ParseTreeVisitor visitor) { public final DecimalValueContext decimalValue() throws RecognitionException { DecimalValueContext _localctx = new DecimalValueContext(_ctx, getState()); - enterRule(_localctx, 96, RULE_decimalValue); + enterRule(_localctx, 98, RULE_decimalValue); int _la; try { enterOuterAlt(_localctx, 1); { - setState(521); + setState(528); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(520); + setState(527); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -4581,7 +4655,7 @@ public final DecimalValueContext decimalValue() throws RecognitionException { } } - setState(523); + setState(530); match(DECIMAL_LITERAL); } } @@ -4623,17 +4697,17 @@ public T accept(ParseTreeVisitor visitor) { public final IntegerValueContext integerValue() throws RecognitionException { IntegerValueContext _localctx = new IntegerValueContext(_ctx, getState()); - enterRule(_localctx, 98, RULE_integerValue); + enterRule(_localctx, 100, RULE_integerValue); int _la; try { enterOuterAlt(_localctx, 1); { - setState(526); + setState(533); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(525); + setState(532); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -4646,7 +4720,7 @@ public final IntegerValueContext integerValue() throws RecognitionException { } } - setState(528); + setState(535); match(INTEGER_LITERAL); } } @@ -4686,11 +4760,11 @@ public T accept(ParseTreeVisitor visitor) { public final StringContext string() throws RecognitionException { StringContext _localctx = new StringContext(_ctx, getState()); - enterRule(_localctx, 100, RULE_string); + enterRule(_localctx, 102, RULE_string); try { enterOuterAlt(_localctx, 1); { - setState(530); + setState(537); match(QUOTED_STRING); } } @@ -4735,14 +4809,14 @@ public T accept(ParseTreeVisitor visitor) { public final ComparisonOperatorContext comparisonOperator() throws RecognitionException { ComparisonOperatorContext _localctx = new ComparisonOperatorContext(_ctx, getState()); - enterRule(_localctx, 102, RULE_comparisonOperator); + enterRule(_localctx, 104, RULE_comparisonOperator); int _la; try { enterOuterAlt(_localctx, 1); { - setState(532); + setState(539); _la = _input.LA(1); - if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 562949953421312000L) != 0)) ) { + if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 281474976710656000L) != 0)) ) { _errHandler.recoverInline(this); } else { @@ -4791,13 +4865,13 @@ public T accept(ParseTreeVisitor visitor) { public final ExplainCommandContext explainCommand() throws RecognitionException { ExplainCommandContext _localctx = new ExplainCommandContext(_ctx, getState()); - enterRule(_localctx, 104, RULE_explainCommand); + enterRule(_localctx, 106, RULE_explainCommand); try { enterOuterAlt(_localctx, 1); { - setState(534); + setState(541); match(EXPLAIN); - setState(535); + setState(542); subqueryExpression(); } } @@ -4841,15 +4915,15 @@ public T accept(ParseTreeVisitor visitor) { public final SubqueryExpressionContext subqueryExpression() throws RecognitionException { SubqueryExpressionContext _localctx = new SubqueryExpressionContext(_ctx, getState()); - enterRule(_localctx, 106, RULE_subqueryExpression); + enterRule(_localctx, 108, RULE_subqueryExpression); try { enterOuterAlt(_localctx, 1); { - setState(537); + setState(544); match(OPENING_BRACKET); - setState(538); + setState(545); query(0); - setState(539); + setState(546); match(CLOSING_BRACKET); } } @@ -4871,7 +4945,7 @@ public ShowCommandContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_showCommand; } - + @SuppressWarnings("this-escape") public ShowCommandContext() { } public void copyFrom(ShowCommandContext ctx) { @@ -4901,14 +4975,14 @@ public T accept(ParseTreeVisitor visitor) { public final ShowCommandContext showCommand() throws RecognitionException { ShowCommandContext _localctx = new ShowCommandContext(_ctx, getState()); - enterRule(_localctx, 108, RULE_showCommand); + enterRule(_localctx, 110, RULE_showCommand); try { _localctx = new ShowInfoContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(541); + setState(548); match(SHOW); - setState(542); + setState(549); match(INFO); } } @@ -4966,53 +5040,53 @@ public T accept(ParseTreeVisitor visitor) { public final EnrichCommandContext enrichCommand() throws RecognitionException { EnrichCommandContext _localctx = new EnrichCommandContext(_ctx, getState()); - enterRule(_localctx, 110, RULE_enrichCommand); + enterRule(_localctx, 112, RULE_enrichCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(544); + setState(551); match(ENRICH); - setState(545); + setState(552); ((EnrichCommandContext)_localctx).policyName = match(ENRICH_POLICY_NAME); - setState(548); + setState(555); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,51,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,52,_ctx) ) { case 1: { - setState(546); + setState(553); match(ON); - setState(547); + setState(554); ((EnrichCommandContext)_localctx).matchField = qualifiedNamePattern(); } break; } - setState(559); + setState(566); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,53,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,54,_ctx) ) { case 1: { - setState(550); + setState(557); match(WITH); - setState(551); + setState(558); enrichWithClause(); - setState(556); + setState(563); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,52,_ctx); + _alt = getInterpreter().adaptivePredict(_input,53,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(552); + setState(559); match(COMMA); - setState(553); + setState(560); enrichWithClause(); } - } + } } - setState(558); + setState(565); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,52,_ctx); + _alt = getInterpreter().adaptivePredict(_input,53,_ctx); } } break; @@ -5063,23 +5137,23 @@ public T accept(ParseTreeVisitor visitor) { public final EnrichWithClauseContext enrichWithClause() throws RecognitionException { EnrichWithClauseContext _localctx = new EnrichWithClauseContext(_ctx, getState()); - enterRule(_localctx, 112, RULE_enrichWithClause); + enterRule(_localctx, 114, RULE_enrichWithClause); try { enterOuterAlt(_localctx, 1); { - setState(564); + setState(571); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,54,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,55,_ctx) ) { case 1: { - setState(561); + setState(568); ((EnrichWithClauseContext)_localctx).newName = qualifiedNamePattern(); - setState(562); + setState(569); match(ASSIGN); } break; } - setState(566); + setState(573); ((EnrichWithClauseContext)_localctx).enrichField = qualifiedNamePattern(); } } @@ -5128,17 +5202,17 @@ public T accept(ParseTreeVisitor visitor) { public final LookupCommandContext lookupCommand() throws RecognitionException { LookupCommandContext _localctx = new LookupCommandContext(_ctx, getState()); - enterRule(_localctx, 114, RULE_lookupCommand); + enterRule(_localctx, 116, RULE_lookupCommand); try { enterOuterAlt(_localctx, 1); { - setState(568); + setState(575); match(DEV_LOOKUP); - setState(569); + setState(576); ((LookupCommandContext)_localctx).tableName = indexPattern(); - setState(570); + setState(577); match(ON); - setState(571); + setState(578); ((LookupCommandContext)_localctx).matchFields = qualifiedNamePatterns(); } } @@ -5187,22 +5261,22 @@ public T accept(ParseTreeVisitor visitor) { public final InlinestatsCommandContext inlinestatsCommand() throws RecognitionException { InlinestatsCommandContext _localctx = new InlinestatsCommandContext(_ctx, getState()); - enterRule(_localctx, 116, RULE_inlinestatsCommand); + enterRule(_localctx, 118, RULE_inlinestatsCommand); try { enterOuterAlt(_localctx, 1); { - setState(573); + setState(580); match(DEV_INLINESTATS); - setState(574); + setState(581); ((InlinestatsCommandContext)_localctx).stats = fields(); - setState(577); + setState(584); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,55,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,56,_ctx) ) { case 1: { - setState(575); + setState(582); match(BY); - setState(576); + setState(583); ((InlinestatsCommandContext)_localctx).grouping = fields(); } break; @@ -5234,6 +5308,8 @@ public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { return operatorExpression_sempred((OperatorExpressionContext)_localctx, predIndex); case 10: return primaryExpression_sempred((PrimaryExpressionContext)_localctx, predIndex); + case 12: + return functionName_sempred((FunctionNameContext)_localctx, predIndex); } return true; } @@ -5287,9 +5363,16 @@ private boolean primaryExpression_sempred(PrimaryExpressionContext _localctx, in } return true; } + private boolean functionName_sempred(FunctionNameContext _localctx, int predIndex) { + switch (predIndex) { + case 10: + return this.isDevVersion(); + } + return true; + } public static final String _serializedATN = - "\u0004\u0001x\u0244\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ + "\u0004\u0001x\u024b\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ "\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004\u0002"+ "\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007\u0002"+ "\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b\u0002"+ @@ -5304,360 +5387,361 @@ private boolean primaryExpression_sempred(PrimaryExpressionContext _localctx, in "(\u0007(\u0002)\u0007)\u0002*\u0007*\u0002+\u0007+\u0002,\u0007,\u0002"+ "-\u0007-\u0002.\u0007.\u0002/\u0007/\u00020\u00070\u00021\u00071\u0002"+ "2\u00072\u00023\u00073\u00024\u00074\u00025\u00075\u00026\u00076\u0002"+ - "7\u00077\u00028\u00078\u00029\u00079\u0002:\u0007:\u0001\u0000\u0001\u0000"+ - "\u0001\u0000\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ - "\u0001\u0001\u0005\u0001\u0080\b\u0001\n\u0001\f\u0001\u0083\t\u0001\u0001"+ - "\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0003"+ - "\u0002\u008b\b\u0002\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ - "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ - "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0003"+ - "\u0003\u009d\b\u0003\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001"+ - "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003"+ - "\u0005\u00a9\b\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ - "\u0005\u0005\u0005\u00b0\b\u0005\n\u0005\f\u0005\u00b3\t\u0005\u0001\u0005"+ - "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u00ba\b\u0005"+ - "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u00c0\b\u0005"+ + "7\u00077\u00028\u00078\u00029\u00079\u0002:\u0007:\u0002;\u0007;\u0001"+ + "\u0000\u0001\u0000\u0001\u0000\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ + "\u0001\u0001\u0001\u0001\u0001\u0005\u0001\u0082\b\u0001\n\u0001\f\u0001"+ + "\u0085\t\u0001\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002"+ + "\u0001\u0002\u0003\u0002\u008d\b\u0002\u0001\u0003\u0001\u0003\u0001\u0003"+ + "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+ + "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+ + "\u0001\u0003\u0003\u0003\u009f\b\u0003\u0001\u0004\u0001\u0004\u0001\u0004"+ "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ - "\u0005\u0005\u00c8\b\u0005\n\u0005\f\u0005\u00cb\t\u0005\u0001\u0006\u0001"+ - "\u0006\u0003\u0006\u00cf\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001"+ - "\u0006\u0001\u0006\u0003\u0006\u00d6\b\u0006\u0001\u0006\u0001\u0006\u0001"+ - "\u0006\u0003\u0006\u00db\b\u0006\u0001\u0007\u0001\u0007\u0001\u0007\u0001"+ - "\u0007\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0003\b\u00e6\b\b\u0001"+ - "\t\u0001\t\u0001\t\u0001\t\u0003\t\u00ec\b\t\u0001\t\u0001\t\u0001\t\u0001"+ - "\t\u0001\t\u0001\t\u0005\t\u00f4\b\t\n\t\f\t\u00f7\t\t\u0001\n\u0001\n"+ - "\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0003\n\u0101\b\n\u0001"+ - "\n\u0001\n\u0001\n\u0005\n\u0106\b\n\n\n\f\n\u0109\t\n\u0001\u000b\u0001"+ - "\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0005\u000b\u0111"+ - "\b\u000b\n\u000b\f\u000b\u0114\t\u000b\u0003\u000b\u0116\b\u000b\u0001"+ - "\u000b\u0001\u000b\u0001\f\u0001\f\u0001\r\u0001\r\u0001\r\u0001\u000e"+ - "\u0001\u000e\u0001\u000e\u0005\u000e\u0122\b\u000e\n\u000e\f\u000e\u0125"+ - "\t\u000e\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0003"+ - "\u000f\u012c\b\u000f\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0005"+ - "\u0010\u0132\b\u0010\n\u0010\f\u0010\u0135\t\u0010\u0001\u0010\u0003\u0010"+ - "\u0138\b\u0010\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011"+ - "\u0003\u0011\u013f\b\u0011\u0001\u0012\u0001\u0012\u0001\u0013\u0001\u0013"+ - "\u0001\u0014\u0001\u0014\u0003\u0014\u0147\b\u0014\u0001\u0015\u0001\u0015"+ - "\u0001\u0015\u0001\u0015\u0005\u0015\u014d\b\u0015\n\u0015\f\u0015\u0150"+ - "\t\u0015\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0017\u0001"+ - "\u0017\u0001\u0017\u0001\u0017\u0005\u0017\u015a\b\u0017\n\u0017\f\u0017"+ - "\u015d\t\u0017\u0001\u0017\u0003\u0017\u0160\b\u0017\u0001\u0017\u0001"+ - "\u0017\u0003\u0017\u0164\b\u0017\u0001\u0018\u0001\u0018\u0001\u0018\u0001"+ - "\u0019\u0001\u0019\u0003\u0019\u016b\b\u0019\u0001\u0019\u0001\u0019\u0003"+ - "\u0019\u016f\b\u0019\u0001\u001a\u0001\u001a\u0001\u001a\u0005\u001a\u0174"+ - "\b\u001a\n\u001a\f\u001a\u0177\t\u001a\u0001\u001b\u0001\u001b\u0001\u001b"+ - "\u0005\u001b\u017c\b\u001b\n\u001b\f\u001b\u017f\t\u001b\u0001\u001c\u0001"+ - "\u001c\u0001\u001c\u0005\u001c\u0184\b\u001c\n\u001c\f\u001c\u0187\t\u001c"+ - "\u0001\u001d\u0001\u001d\u0001\u001e\u0001\u001e\u0003\u001e\u018d\b\u001e"+ - "\u0001\u001f\u0001\u001f\u0001\u001f\u0001\u001f\u0001\u001f\u0001\u001f"+ - "\u0001\u001f\u0001\u001f\u0001\u001f\u0001\u001f\u0001\u001f\u0001\u001f"+ - "\u0001\u001f\u0005\u001f\u019c\b\u001f\n\u001f\f\u001f\u019f\t\u001f\u0001"+ - "\u001f\u0001\u001f\u0001\u001f\u0001\u001f\u0001\u001f\u0001\u001f\u0005"+ - "\u001f\u01a7\b\u001f\n\u001f\f\u001f\u01aa\t\u001f\u0001\u001f\u0001\u001f"+ - "\u0001\u001f\u0001\u001f\u0001\u001f\u0001\u001f\u0005\u001f\u01b2\b\u001f"+ - "\n\u001f\f\u001f\u01b5\t\u001f\u0001\u001f\u0001\u001f\u0003\u001f\u01b9"+ - "\b\u001f\u0001 \u0001 \u0003 \u01bd\b \u0001!\u0001!\u0003!\u01c1\b!\u0001"+ - "\"\u0001\"\u0001\"\u0001#\u0001#\u0001#\u0001#\u0005#\u01ca\b#\n#\f#\u01cd"+ - "\t#\u0001$\u0001$\u0003$\u01d1\b$\u0001$\u0001$\u0003$\u01d5\b$\u0001"+ - "%\u0001%\u0001%\u0001&\u0001&\u0001&\u0001\'\u0001\'\u0001\'\u0001\'\u0005"+ - "\'\u01e1\b\'\n\'\f\'\u01e4\t\'\u0001(\u0001(\u0001(\u0001(\u0001)\u0001"+ - ")\u0001)\u0001)\u0003)\u01ee\b)\u0001*\u0001*\u0001*\u0001*\u0001+\u0001"+ - "+\u0001+\u0001,\u0001,\u0001,\u0005,\u01fa\b,\n,\f,\u01fd\t,\u0001-\u0001"+ - "-\u0001-\u0001-\u0001.\u0001.\u0001/\u0001/\u0003/\u0207\b/\u00010\u0003"+ - "0\u020a\b0\u00010\u00010\u00011\u00031\u020f\b1\u00011\u00011\u00012\u0001"+ - "2\u00013\u00013\u00014\u00014\u00014\u00015\u00015\u00015\u00015\u0001"+ - "6\u00016\u00016\u00017\u00017\u00017\u00017\u00037\u0225\b7\u00017\u0001"+ - "7\u00017\u00017\u00057\u022b\b7\n7\f7\u022e\t7\u00037\u0230\b7\u00018"+ - "\u00018\u00018\u00038\u0235\b8\u00018\u00018\u00019\u00019\u00019\u0001"+ - "9\u00019\u0001:\u0001:\u0001:\u0001:\u0003:\u0242\b:\u0001:\u0000\u0004"+ - "\u0002\n\u0012\u0014;\u0000\u0002\u0004\u0006\b\n\f\u000e\u0010\u0012"+ - "\u0014\u0016\u0018\u001a\u001c\u001e \"$&(*,.02468:<>@BDFHJLNPRTVXZ\\"+ - "^`bdfhjlnprt\u0000\b\u0001\u0000;<\u0001\u0000=?\u0002\u0000\u001a\u001a"+ - "LL\u0001\u0000CD\u0002\u0000\u001f\u001f##\u0002\u0000&&))\u0002\u0000"+ - "%%33\u0002\u0000446:\u025e\u0000v\u0001\u0000\u0000\u0000\u0002y\u0001"+ - "\u0000\u0000\u0000\u0004\u008a\u0001\u0000\u0000\u0000\u0006\u009c\u0001"+ - "\u0000\u0000\u0000\b\u009e\u0001\u0000\u0000\u0000\n\u00bf\u0001\u0000"+ - "\u0000\u0000\f\u00da\u0001\u0000\u0000\u0000\u000e\u00dc\u0001\u0000\u0000"+ - "\u0000\u0010\u00e5\u0001\u0000\u0000\u0000\u0012\u00eb\u0001\u0000\u0000"+ - "\u0000\u0014\u0100\u0001\u0000\u0000\u0000\u0016\u010a\u0001\u0000\u0000"+ - "\u0000\u0018\u0119\u0001\u0000\u0000\u0000\u001a\u011b\u0001\u0000\u0000"+ - "\u0000\u001c\u011e\u0001\u0000\u0000\u0000\u001e\u012b\u0001\u0000\u0000"+ - "\u0000 \u012d\u0001\u0000\u0000\u0000\"\u013e\u0001\u0000\u0000\u0000"+ - "$\u0140\u0001\u0000\u0000\u0000&\u0142\u0001\u0000\u0000\u0000(\u0146"+ - "\u0001\u0000\u0000\u0000*\u0148\u0001\u0000\u0000\u0000,\u0151\u0001\u0000"+ - "\u0000\u0000.\u0155\u0001\u0000\u0000\u00000\u0165\u0001\u0000\u0000\u0000"+ - "2\u0168\u0001\u0000\u0000\u00004\u0170\u0001\u0000\u0000\u00006\u0178"+ - "\u0001\u0000\u0000\u00008\u0180\u0001\u0000\u0000\u0000:\u0188\u0001\u0000"+ - "\u0000\u0000<\u018c\u0001\u0000\u0000\u0000>\u01b8\u0001\u0000\u0000\u0000"+ - "@\u01bc\u0001\u0000\u0000\u0000B\u01c0\u0001\u0000\u0000\u0000D\u01c2"+ - "\u0001\u0000\u0000\u0000F\u01c5\u0001\u0000\u0000\u0000H\u01ce\u0001\u0000"+ - "\u0000\u0000J\u01d6\u0001\u0000\u0000\u0000L\u01d9\u0001\u0000\u0000\u0000"+ - "N\u01dc\u0001\u0000\u0000\u0000P\u01e5\u0001\u0000\u0000\u0000R\u01e9"+ - "\u0001\u0000\u0000\u0000T\u01ef\u0001\u0000\u0000\u0000V\u01f3\u0001\u0000"+ - "\u0000\u0000X\u01f6\u0001\u0000\u0000\u0000Z\u01fe\u0001\u0000\u0000\u0000"+ - "\\\u0202\u0001\u0000\u0000\u0000^\u0206\u0001\u0000\u0000\u0000`\u0209"+ - "\u0001\u0000\u0000\u0000b\u020e\u0001\u0000\u0000\u0000d\u0212\u0001\u0000"+ - "\u0000\u0000f\u0214\u0001\u0000\u0000\u0000h\u0216\u0001\u0000\u0000\u0000"+ - "j\u0219\u0001\u0000\u0000\u0000l\u021d\u0001\u0000\u0000\u0000n\u0220"+ - "\u0001\u0000\u0000\u0000p\u0234\u0001\u0000\u0000\u0000r\u0238\u0001\u0000"+ - "\u0000\u0000t\u023d\u0001\u0000\u0000\u0000vw\u0003\u0002\u0001\u0000"+ - "wx\u0005\u0000\u0000\u0001x\u0001\u0001\u0000\u0000\u0000yz\u0006\u0001"+ - "\uffff\uffff\u0000z{\u0003\u0004\u0002\u0000{\u0081\u0001\u0000\u0000"+ - "\u0000|}\n\u0001\u0000\u0000}~\u0005\u0019\u0000\u0000~\u0080\u0003\u0006"+ - "\u0003\u0000\u007f|\u0001\u0000\u0000\u0000\u0080\u0083\u0001\u0000\u0000"+ - "\u0000\u0081\u007f\u0001\u0000\u0000\u0000\u0081\u0082\u0001\u0000\u0000"+ - "\u0000\u0082\u0003\u0001\u0000\u0000\u0000\u0083\u0081\u0001\u0000\u0000"+ - "\u0000\u0084\u008b\u0003h4\u0000\u0085\u008b\u0003 \u0010\u0000\u0086"+ - "\u008b\u0003\u001a\r\u0000\u0087\u008b\u0003l6\u0000\u0088\u0089\u0004"+ - "\u0002\u0001\u0000\u0089\u008b\u0003.\u0017\u0000\u008a\u0084\u0001\u0000"+ - "\u0000\u0000\u008a\u0085\u0001\u0000\u0000\u0000\u008a\u0086\u0001\u0000"+ - "\u0000\u0000\u008a\u0087\u0001\u0000\u0000\u0000\u008a\u0088\u0001\u0000"+ - "\u0000\u0000\u008b\u0005\u0001\u0000\u0000\u0000\u008c\u009d\u00030\u0018"+ - "\u0000\u008d\u009d\u0003\b\u0004\u0000\u008e\u009d\u0003J%\u0000\u008f"+ - "\u009d\u0003D\"\u0000\u0090\u009d\u00032\u0019\u0000\u0091\u009d\u0003"+ - "F#\u0000\u0092\u009d\u0003L&\u0000\u0093\u009d\u0003N\'\u0000\u0094\u009d"+ - "\u0003R)\u0000\u0095\u009d\u0003T*\u0000\u0096\u009d\u0003n7\u0000\u0097"+ - "\u009d\u0003V+\u0000\u0098\u0099\u0004\u0003\u0002\u0000\u0099\u009d\u0003"+ - "t:\u0000\u009a\u009b\u0004\u0003\u0003\u0000\u009b\u009d\u0003r9\u0000"+ - "\u009c\u008c\u0001\u0000\u0000\u0000\u009c\u008d\u0001\u0000\u0000\u0000"+ - "\u009c\u008e\u0001\u0000\u0000\u0000\u009c\u008f\u0001\u0000\u0000\u0000"+ - "\u009c\u0090\u0001\u0000\u0000\u0000\u009c\u0091\u0001\u0000\u0000\u0000"+ - "\u009c\u0092\u0001\u0000\u0000\u0000\u009c\u0093\u0001\u0000\u0000\u0000"+ - "\u009c\u0094\u0001\u0000\u0000\u0000\u009c\u0095\u0001\u0000\u0000\u0000"+ - "\u009c\u0096\u0001\u0000\u0000\u0000\u009c\u0097\u0001\u0000\u0000\u0000"+ - "\u009c\u0098\u0001\u0000\u0000\u0000\u009c\u009a\u0001\u0000\u0000\u0000"+ - "\u009d\u0007\u0001\u0000\u0000\u0000\u009e\u009f\u0005\u0010\u0000\u0000"+ - "\u009f\u00a0\u0003\n\u0005\u0000\u00a0\t\u0001\u0000\u0000\u0000\u00a1"+ - "\u00a2\u0006\u0005\uffff\uffff\u0000\u00a2\u00a3\u0005,\u0000\u0000\u00a3"+ - "\u00c0\u0003\n\u0005\b\u00a4\u00c0\u0003\u0010\b\u0000\u00a5\u00c0\u0003"+ - "\f\u0006\u0000\u00a6\u00a8\u0003\u0010\b\u0000\u00a7\u00a9\u0005,\u0000"+ - "\u0000\u00a8\u00a7\u0001\u0000\u0000\u0000\u00a8\u00a9\u0001\u0000\u0000"+ - "\u0000\u00a9\u00aa\u0001\u0000\u0000\u0000\u00aa\u00ab\u0005\'\u0000\u0000"+ - "\u00ab\u00ac\u0005+\u0000\u0000\u00ac\u00b1\u0003\u0010\b\u0000\u00ad"+ - "\u00ae\u0005\"\u0000\u0000\u00ae\u00b0\u0003\u0010\b\u0000\u00af\u00ad"+ - "\u0001\u0000\u0000\u0000\u00b0\u00b3\u0001\u0000\u0000\u0000\u00b1\u00af"+ - "\u0001\u0000\u0000\u0000\u00b1\u00b2\u0001\u0000\u0000\u0000\u00b2\u00b4"+ - "\u0001\u0000\u0000\u0000\u00b3\u00b1\u0001\u0000\u0000\u0000\u00b4\u00b5"+ - "\u00052\u0000\u0000\u00b5\u00c0\u0001\u0000\u0000\u0000\u00b6\u00b7\u0003"+ - "\u0010\b\u0000\u00b7\u00b9\u0005(\u0000\u0000\u00b8\u00ba\u0005,\u0000"+ - "\u0000\u00b9\u00b8\u0001\u0000\u0000\u0000\u00b9\u00ba\u0001\u0000\u0000"+ - "\u0000\u00ba\u00bb\u0001\u0000\u0000\u0000\u00bb\u00bc\u0005-\u0000\u0000"+ - "\u00bc\u00c0\u0001\u0000\u0000\u0000\u00bd\u00be\u0004\u0005\u0004\u0000"+ - "\u00be\u00c0\u0003\u000e\u0007\u0000\u00bf\u00a1\u0001\u0000\u0000\u0000"+ - "\u00bf\u00a4\u0001\u0000\u0000\u0000\u00bf\u00a5\u0001\u0000\u0000\u0000"+ - "\u00bf\u00a6\u0001\u0000\u0000\u0000\u00bf\u00b6\u0001\u0000\u0000\u0000"+ - "\u00bf\u00bd\u0001\u0000\u0000\u0000\u00c0\u00c9\u0001\u0000\u0000\u0000"+ - "\u00c1\u00c2\n\u0005\u0000\u0000\u00c2\u00c3\u0005\u001e\u0000\u0000\u00c3"+ - "\u00c8\u0003\n\u0005\u0006\u00c4\u00c5\n\u0004\u0000\u0000\u00c5\u00c6"+ - "\u0005/\u0000\u0000\u00c6\u00c8\u0003\n\u0005\u0005\u00c7\u00c1\u0001"+ - "\u0000\u0000\u0000\u00c7\u00c4\u0001\u0000\u0000\u0000\u00c8\u00cb\u0001"+ - "\u0000\u0000\u0000\u00c9\u00c7\u0001\u0000\u0000\u0000\u00c9\u00ca\u0001"+ - "\u0000\u0000\u0000\u00ca\u000b\u0001\u0000\u0000\u0000\u00cb\u00c9\u0001"+ - "\u0000\u0000\u0000\u00cc\u00ce\u0003\u0010\b\u0000\u00cd\u00cf\u0005,"+ - "\u0000\u0000\u00ce\u00cd\u0001\u0000\u0000\u0000\u00ce\u00cf\u0001\u0000"+ - "\u0000\u0000\u00cf\u00d0\u0001\u0000\u0000\u0000\u00d0\u00d1\u0005*\u0000"+ - "\u0000\u00d1\u00d2\u0003d2\u0000\u00d2\u00db\u0001\u0000\u0000\u0000\u00d3"+ - "\u00d5\u0003\u0010\b\u0000\u00d4\u00d6\u0005,\u0000\u0000\u00d5\u00d4"+ - "\u0001\u0000\u0000\u0000\u00d5\u00d6\u0001\u0000\u0000\u0000\u00d6\u00d7"+ - "\u0001\u0000\u0000\u0000\u00d7\u00d8\u00051\u0000\u0000\u00d8\u00d9\u0003"+ - "d2\u0000\u00d9\u00db\u0001\u0000\u0000\u0000\u00da\u00cc\u0001\u0000\u0000"+ - "\u0000\u00da\u00d3\u0001\u0000\u0000\u0000\u00db\r\u0001\u0000\u0000\u0000"+ - "\u00dc\u00dd\u0003\u0010\b\u0000\u00dd\u00de\u0005\u0013\u0000\u0000\u00de"+ - "\u00df\u0003d2\u0000\u00df\u000f\u0001\u0000\u0000\u0000\u00e0\u00e6\u0003"+ - "\u0012\t\u0000\u00e1\u00e2\u0003\u0012\t\u0000\u00e2\u00e3\u0003f3\u0000"+ - "\u00e3\u00e4\u0003\u0012\t\u0000\u00e4\u00e6\u0001\u0000\u0000\u0000\u00e5"+ - "\u00e0\u0001\u0000\u0000\u0000\u00e5\u00e1\u0001\u0000\u0000\u0000\u00e6"+ - "\u0011\u0001\u0000\u0000\u0000\u00e7\u00e8\u0006\t\uffff\uffff\u0000\u00e8"+ - "\u00ec\u0003\u0014\n\u0000\u00e9\u00ea\u0007\u0000\u0000\u0000\u00ea\u00ec"+ - "\u0003\u0012\t\u0003\u00eb\u00e7\u0001\u0000\u0000\u0000\u00eb\u00e9\u0001"+ - "\u0000\u0000\u0000\u00ec\u00f5\u0001\u0000\u0000\u0000\u00ed\u00ee\n\u0002"+ - "\u0000\u0000\u00ee\u00ef\u0007\u0001\u0000\u0000\u00ef\u00f4\u0003\u0012"+ - "\t\u0003\u00f0\u00f1\n\u0001\u0000\u0000\u00f1\u00f2\u0007\u0000\u0000"+ - "\u0000\u00f2\u00f4\u0003\u0012\t\u0002\u00f3\u00ed\u0001\u0000\u0000\u0000"+ - "\u00f3\u00f0\u0001\u0000\u0000\u0000\u00f4\u00f7\u0001\u0000\u0000\u0000"+ - "\u00f5\u00f3\u0001\u0000\u0000\u0000\u00f5\u00f6\u0001\u0000\u0000\u0000"+ - "\u00f6\u0013\u0001\u0000\u0000\u0000\u00f7\u00f5\u0001\u0000\u0000\u0000"+ - "\u00f8\u00f9\u0006\n\uffff\uffff\u0000\u00f9\u0101\u0003>\u001f\u0000"+ - "\u00fa\u0101\u00034\u001a\u0000\u00fb\u0101\u0003\u0016\u000b\u0000\u00fc"+ - "\u00fd\u0005+\u0000\u0000\u00fd\u00fe\u0003\n\u0005\u0000\u00fe\u00ff"+ - "\u00052\u0000\u0000\u00ff\u0101\u0001\u0000\u0000\u0000\u0100\u00f8\u0001"+ - "\u0000\u0000\u0000\u0100\u00fa\u0001\u0000\u0000\u0000\u0100\u00fb\u0001"+ - "\u0000\u0000\u0000\u0100\u00fc\u0001\u0000\u0000\u0000\u0101\u0107\u0001"+ - "\u0000\u0000\u0000\u0102\u0103\n\u0001\u0000\u0000\u0103\u0104\u0005!"+ - "\u0000\u0000\u0104\u0106\u0003\u0018\f\u0000\u0105\u0102\u0001\u0000\u0000"+ - "\u0000\u0106\u0109\u0001\u0000\u0000\u0000\u0107\u0105\u0001\u0000\u0000"+ - "\u0000\u0107\u0108\u0001\u0000\u0000\u0000\u0108\u0015\u0001\u0000\u0000"+ - "\u0000\u0109\u0107\u0001\u0000\u0000\u0000\u010a\u010b\u0003B!\u0000\u010b"+ - "\u0115\u0005+\u0000\u0000\u010c\u0116\u0005=\u0000\u0000\u010d\u0112\u0003"+ - "\n\u0005\u0000\u010e\u010f\u0005\"\u0000\u0000\u010f\u0111\u0003\n\u0005"+ - "\u0000\u0110\u010e\u0001\u0000\u0000\u0000\u0111\u0114\u0001\u0000\u0000"+ - "\u0000\u0112\u0110\u0001\u0000\u0000\u0000\u0112\u0113\u0001\u0000\u0000"+ - "\u0000\u0113\u0116\u0001\u0000\u0000\u0000\u0114\u0112\u0001\u0000\u0000"+ - "\u0000\u0115\u010c\u0001\u0000\u0000\u0000\u0115\u010d\u0001\u0000\u0000"+ - "\u0000\u0115\u0116\u0001\u0000\u0000\u0000\u0116\u0117\u0001\u0000\u0000"+ - "\u0000\u0117\u0118\u00052\u0000\u0000\u0118\u0017\u0001\u0000\u0000\u0000"+ - "\u0119\u011a\u0003:\u001d\u0000\u011a\u0019\u0001\u0000\u0000\u0000\u011b"+ - "\u011c\u0005\f\u0000\u0000\u011c\u011d\u0003\u001c\u000e\u0000\u011d\u001b"+ - "\u0001\u0000\u0000\u0000\u011e\u0123\u0003\u001e\u000f\u0000\u011f\u0120"+ - "\u0005\"\u0000\u0000\u0120\u0122\u0003\u001e\u000f\u0000\u0121\u011f\u0001"+ - "\u0000\u0000\u0000\u0122\u0125\u0001\u0000\u0000\u0000\u0123\u0121\u0001"+ - "\u0000\u0000\u0000\u0123\u0124\u0001\u0000\u0000\u0000\u0124\u001d\u0001"+ - "\u0000\u0000\u0000\u0125\u0123\u0001\u0000\u0000\u0000\u0126\u012c\u0003"+ - "\n\u0005\u0000\u0127\u0128\u00034\u001a\u0000\u0128\u0129\u0005 \u0000"+ - "\u0000\u0129\u012a\u0003\n\u0005\u0000\u012a\u012c\u0001\u0000\u0000\u0000"+ - "\u012b\u0126\u0001\u0000\u0000\u0000\u012b\u0127\u0001\u0000\u0000\u0000"+ - "\u012c\u001f\u0001\u0000\u0000\u0000\u012d\u012e\u0005\u0006\u0000\u0000"+ - "\u012e\u0133\u0003\"\u0011\u0000\u012f\u0130\u0005\"\u0000\u0000\u0130"+ - "\u0132\u0003\"\u0011\u0000\u0131\u012f\u0001\u0000\u0000\u0000\u0132\u0135"+ - "\u0001\u0000\u0000\u0000\u0133\u0131\u0001\u0000\u0000\u0000\u0133\u0134"+ - "\u0001\u0000\u0000\u0000\u0134\u0137\u0001\u0000\u0000\u0000\u0135\u0133"+ - "\u0001\u0000\u0000\u0000\u0136\u0138\u0003(\u0014\u0000\u0137\u0136\u0001"+ - "\u0000\u0000\u0000\u0137\u0138\u0001\u0000\u0000\u0000\u0138!\u0001\u0000"+ - "\u0000\u0000\u0139\u013a\u0003$\u0012\u0000\u013a\u013b\u0005h\u0000\u0000"+ - "\u013b\u013c\u0003&\u0013\u0000\u013c\u013f\u0001\u0000\u0000\u0000\u013d"+ - "\u013f\u0003&\u0013\u0000\u013e\u0139\u0001\u0000\u0000\u0000\u013e\u013d"+ - "\u0001\u0000\u0000\u0000\u013f#\u0001\u0000\u0000\u0000\u0140\u0141\u0005"+ - "L\u0000\u0000\u0141%\u0001\u0000\u0000\u0000\u0142\u0143\u0007\u0002\u0000"+ - "\u0000\u0143\'\u0001\u0000\u0000\u0000\u0144\u0147\u0003*\u0015\u0000"+ - "\u0145\u0147\u0003,\u0016\u0000\u0146\u0144\u0001\u0000\u0000\u0000\u0146"+ - "\u0145\u0001\u0000\u0000\u0000\u0147)\u0001\u0000\u0000\u0000\u0148\u0149"+ - "\u0005K\u0000\u0000\u0149\u014e\u0005L\u0000\u0000\u014a\u014b\u0005\""+ - "\u0000\u0000\u014b\u014d\u0005L\u0000\u0000\u014c\u014a\u0001\u0000\u0000"+ - "\u0000\u014d\u0150\u0001\u0000\u0000\u0000\u014e\u014c\u0001\u0000\u0000"+ - "\u0000\u014e\u014f\u0001\u0000\u0000\u0000\u014f+\u0001\u0000\u0000\u0000"+ - "\u0150\u014e\u0001\u0000\u0000\u0000\u0151\u0152\u0005A\u0000\u0000\u0152"+ - "\u0153\u0003*\u0015\u0000\u0153\u0154\u0005B\u0000\u0000\u0154-\u0001"+ - "\u0000\u0000\u0000\u0155\u0156\u0005\u0014\u0000\u0000\u0156\u015b\u0003"+ - "\"\u0011\u0000\u0157\u0158\u0005\"\u0000\u0000\u0158\u015a\u0003\"\u0011"+ - "\u0000\u0159\u0157\u0001\u0000\u0000\u0000\u015a\u015d\u0001\u0000\u0000"+ - "\u0000\u015b\u0159\u0001\u0000\u0000\u0000\u015b\u015c\u0001\u0000\u0000"+ - "\u0000\u015c\u015f\u0001\u0000\u0000\u0000\u015d\u015b\u0001\u0000\u0000"+ - "\u0000\u015e\u0160\u0003\u001c\u000e\u0000\u015f\u015e\u0001\u0000\u0000"+ - "\u0000\u015f\u0160\u0001\u0000\u0000\u0000\u0160\u0163\u0001\u0000\u0000"+ - "\u0000\u0161\u0162\u0005\u001d\u0000\u0000\u0162\u0164\u0003\u001c\u000e"+ - "\u0000\u0163\u0161\u0001\u0000\u0000\u0000\u0163\u0164\u0001\u0000\u0000"+ - "\u0000\u0164/\u0001\u0000\u0000\u0000\u0165\u0166\u0005\u0004\u0000\u0000"+ - "\u0166\u0167\u0003\u001c\u000e\u0000\u01671\u0001\u0000\u0000\u0000\u0168"+ - "\u016a\u0005\u000f\u0000\u0000\u0169\u016b\u0003\u001c\u000e\u0000\u016a"+ - "\u0169\u0001\u0000\u0000\u0000\u016a\u016b\u0001\u0000\u0000\u0000\u016b"+ - "\u016e\u0001\u0000\u0000\u0000\u016c\u016d\u0005\u001d\u0000\u0000\u016d"+ - "\u016f\u0003\u001c\u000e\u0000\u016e\u016c\u0001\u0000\u0000\u0000\u016e"+ - "\u016f\u0001\u0000\u0000\u0000\u016f3\u0001\u0000\u0000\u0000\u0170\u0175"+ - "\u0003B!\u0000\u0171\u0172\u0005$\u0000\u0000\u0172\u0174\u0003B!\u0000"+ - "\u0173\u0171\u0001\u0000\u0000\u0000\u0174\u0177\u0001\u0000\u0000\u0000"+ - "\u0175\u0173\u0001\u0000\u0000\u0000\u0175\u0176\u0001\u0000\u0000\u0000"+ - "\u01765\u0001\u0000\u0000\u0000\u0177\u0175\u0001\u0000\u0000\u0000\u0178"+ - "\u017d\u0003<\u001e\u0000\u0179\u017a\u0005$\u0000\u0000\u017a\u017c\u0003"+ - "<\u001e\u0000\u017b\u0179\u0001\u0000\u0000\u0000\u017c\u017f\u0001\u0000"+ - "\u0000\u0000\u017d\u017b\u0001\u0000\u0000\u0000\u017d\u017e\u0001\u0000"+ - "\u0000\u0000\u017e7\u0001\u0000\u0000\u0000\u017f\u017d\u0001\u0000\u0000"+ - "\u0000\u0180\u0185\u00036\u001b\u0000\u0181\u0182\u0005\"\u0000\u0000"+ - "\u0182\u0184\u00036\u001b\u0000\u0183\u0181\u0001\u0000\u0000\u0000\u0184"+ - "\u0187\u0001\u0000\u0000\u0000\u0185\u0183\u0001\u0000\u0000\u0000\u0185"+ - "\u0186\u0001\u0000\u0000\u0000\u01869\u0001\u0000\u0000\u0000\u0187\u0185"+ - "\u0001\u0000\u0000\u0000\u0188\u0189\u0007\u0003\u0000\u0000\u0189;\u0001"+ - "\u0000\u0000\u0000\u018a\u018d\u0005P\u0000\u0000\u018b\u018d\u0003@ "+ - "\u0000\u018c\u018a\u0001\u0000\u0000\u0000\u018c\u018b\u0001\u0000\u0000"+ - "\u0000\u018d=\u0001\u0000\u0000\u0000\u018e\u01b9\u0005-\u0000\u0000\u018f"+ - "\u0190\u0003b1\u0000\u0190\u0191\u0005C\u0000\u0000\u0191\u01b9\u0001"+ - "\u0000\u0000\u0000\u0192\u01b9\u0003`0\u0000\u0193\u01b9\u0003b1\u0000"+ - "\u0194\u01b9\u0003\\.\u0000\u0195\u01b9\u0003@ \u0000\u0196\u01b9\u0003"+ - "d2\u0000\u0197\u0198\u0005A\u0000\u0000\u0198\u019d\u0003^/\u0000\u0199"+ - "\u019a\u0005\"\u0000\u0000\u019a\u019c\u0003^/\u0000\u019b\u0199\u0001"+ - "\u0000\u0000\u0000\u019c\u019f\u0001\u0000\u0000\u0000\u019d\u019b\u0001"+ - "\u0000\u0000\u0000\u019d\u019e\u0001\u0000\u0000\u0000\u019e\u01a0\u0001"+ - "\u0000\u0000\u0000\u019f\u019d\u0001\u0000\u0000\u0000\u01a0\u01a1\u0005"+ - "B\u0000\u0000\u01a1\u01b9\u0001\u0000\u0000\u0000\u01a2\u01a3\u0005A\u0000"+ - "\u0000\u01a3\u01a8\u0003\\.\u0000\u01a4\u01a5\u0005\"\u0000\u0000\u01a5"+ - "\u01a7\u0003\\.\u0000\u01a6\u01a4\u0001\u0000\u0000\u0000\u01a7\u01aa"+ - "\u0001\u0000\u0000\u0000\u01a8\u01a6\u0001\u0000\u0000\u0000\u01a8\u01a9"+ - "\u0001\u0000\u0000\u0000\u01a9\u01ab\u0001\u0000\u0000\u0000\u01aa\u01a8"+ - "\u0001\u0000\u0000\u0000\u01ab\u01ac\u0005B\u0000\u0000\u01ac\u01b9\u0001"+ - "\u0000\u0000\u0000\u01ad\u01ae\u0005A\u0000\u0000\u01ae\u01b3\u0003d2"+ - "\u0000\u01af\u01b0\u0005\"\u0000\u0000\u01b0\u01b2\u0003d2\u0000\u01b1"+ - "\u01af\u0001\u0000\u0000\u0000\u01b2\u01b5\u0001\u0000\u0000\u0000\u01b3"+ - "\u01b1\u0001\u0000\u0000\u0000\u01b3\u01b4\u0001\u0000\u0000\u0000\u01b4"+ - "\u01b6\u0001\u0000\u0000\u0000\u01b5\u01b3\u0001\u0000\u0000\u0000\u01b6"+ - "\u01b7\u0005B\u0000\u0000\u01b7\u01b9\u0001\u0000\u0000\u0000\u01b8\u018e"+ - "\u0001\u0000\u0000\u0000\u01b8\u018f\u0001\u0000\u0000\u0000\u01b8\u0192"+ - "\u0001\u0000\u0000\u0000\u01b8\u0193\u0001\u0000\u0000\u0000\u01b8\u0194"+ - "\u0001\u0000\u0000\u0000\u01b8\u0195\u0001\u0000\u0000\u0000\u01b8\u0196"+ - "\u0001\u0000\u0000\u0000\u01b8\u0197\u0001\u0000\u0000\u0000\u01b8\u01a2"+ - "\u0001\u0000\u0000\u0000\u01b8\u01ad\u0001\u0000\u0000\u0000\u01b9?\u0001"+ - "\u0000\u0000\u0000\u01ba\u01bd\u00050\u0000\u0000\u01bb\u01bd\u0005@\u0000"+ - "\u0000\u01bc\u01ba\u0001\u0000\u0000\u0000\u01bc\u01bb\u0001\u0000\u0000"+ - "\u0000\u01bdA\u0001\u0000\u0000\u0000\u01be\u01c1\u0003:\u001d\u0000\u01bf"+ - "\u01c1\u0003@ \u0000\u01c0\u01be\u0001\u0000\u0000\u0000\u01c0\u01bf\u0001"+ - "\u0000\u0000\u0000\u01c1C\u0001\u0000\u0000\u0000\u01c2\u01c3\u0005\t"+ - "\u0000\u0000\u01c3\u01c4\u0005\u001b\u0000\u0000\u01c4E\u0001\u0000\u0000"+ - "\u0000\u01c5\u01c6\u0005\u000e\u0000\u0000\u01c6\u01cb\u0003H$\u0000\u01c7"+ - "\u01c8\u0005\"\u0000\u0000\u01c8\u01ca\u0003H$\u0000\u01c9\u01c7\u0001"+ - "\u0000\u0000\u0000\u01ca\u01cd\u0001\u0000\u0000\u0000\u01cb\u01c9\u0001"+ - "\u0000\u0000\u0000\u01cb\u01cc\u0001\u0000\u0000\u0000\u01ccG\u0001\u0000"+ - "\u0000\u0000\u01cd\u01cb\u0001\u0000\u0000\u0000\u01ce\u01d0\u0003\n\u0005"+ - "\u0000\u01cf\u01d1\u0007\u0004\u0000\u0000\u01d0\u01cf\u0001\u0000\u0000"+ - "\u0000\u01d0\u01d1\u0001\u0000\u0000\u0000\u01d1\u01d4\u0001\u0000\u0000"+ - "\u0000\u01d2\u01d3\u0005.\u0000\u0000\u01d3\u01d5\u0007\u0005\u0000\u0000"+ - "\u01d4\u01d2\u0001\u0000\u0000\u0000\u01d4\u01d5\u0001\u0000\u0000\u0000"+ - "\u01d5I\u0001\u0000\u0000\u0000\u01d6\u01d7\u0005\b\u0000\u0000\u01d7"+ - "\u01d8\u00038\u001c\u0000\u01d8K\u0001\u0000\u0000\u0000\u01d9\u01da\u0005"+ - "\u0002\u0000\u0000\u01da\u01db\u00038\u001c\u0000\u01dbM\u0001\u0000\u0000"+ - "\u0000\u01dc\u01dd\u0005\u000b\u0000\u0000\u01dd\u01e2\u0003P(\u0000\u01de"+ - "\u01df\u0005\"\u0000\u0000\u01df\u01e1\u0003P(\u0000\u01e0\u01de\u0001"+ - "\u0000\u0000\u0000\u01e1\u01e4\u0001\u0000\u0000\u0000\u01e2\u01e0\u0001"+ - "\u0000\u0000\u0000\u01e2\u01e3\u0001\u0000\u0000\u0000\u01e3O\u0001\u0000"+ - "\u0000\u0000\u01e4\u01e2\u0001\u0000\u0000\u0000\u01e5\u01e6\u00036\u001b"+ - "\u0000\u01e6\u01e7\u0005T\u0000\u0000\u01e7\u01e8\u00036\u001b\u0000\u01e8"+ - "Q\u0001\u0000\u0000\u0000\u01e9\u01ea\u0005\u0001\u0000\u0000\u01ea\u01eb"+ - "\u0003\u0014\n\u0000\u01eb\u01ed\u0003d2\u0000\u01ec\u01ee\u0003X,\u0000"+ - "\u01ed\u01ec\u0001\u0000\u0000\u0000\u01ed\u01ee\u0001\u0000\u0000\u0000"+ - "\u01eeS\u0001\u0000\u0000\u0000\u01ef\u01f0\u0005\u0007\u0000\u0000\u01f0"+ - "\u01f1\u0003\u0014\n\u0000\u01f1\u01f2\u0003d2\u0000\u01f2U\u0001\u0000"+ - "\u0000\u0000\u01f3\u01f4\u0005\n\u0000\u0000\u01f4\u01f5\u00034\u001a"+ - "\u0000\u01f5W\u0001\u0000\u0000\u0000\u01f6\u01fb\u0003Z-\u0000\u01f7"+ - "\u01f8\u0005\"\u0000\u0000\u01f8\u01fa\u0003Z-\u0000\u01f9\u01f7\u0001"+ - "\u0000\u0000\u0000\u01fa\u01fd\u0001\u0000\u0000\u0000\u01fb\u01f9\u0001"+ - "\u0000\u0000\u0000\u01fb\u01fc\u0001\u0000\u0000\u0000\u01fcY\u0001\u0000"+ - "\u0000\u0000\u01fd\u01fb\u0001\u0000\u0000\u0000\u01fe\u01ff\u0003:\u001d"+ - "\u0000\u01ff\u0200\u0005 \u0000\u0000\u0200\u0201\u0003>\u001f\u0000\u0201"+ - "[\u0001\u0000\u0000\u0000\u0202\u0203\u0007\u0006\u0000\u0000\u0203]\u0001"+ - "\u0000\u0000\u0000\u0204\u0207\u0003`0\u0000\u0205\u0207\u0003b1\u0000"+ - "\u0206\u0204\u0001\u0000\u0000\u0000\u0206\u0205\u0001\u0000\u0000\u0000"+ - "\u0207_\u0001\u0000\u0000\u0000\u0208\u020a\u0007\u0000\u0000\u0000\u0209"+ - "\u0208\u0001\u0000\u0000\u0000\u0209\u020a\u0001\u0000\u0000\u0000\u020a"+ - "\u020b\u0001\u0000\u0000\u0000\u020b\u020c\u0005\u001c\u0000\u0000\u020c"+ - "a\u0001\u0000\u0000\u0000\u020d\u020f\u0007\u0000\u0000\u0000\u020e\u020d"+ - "\u0001\u0000\u0000\u0000\u020e\u020f\u0001\u0000\u0000\u0000\u020f\u0210"+ - "\u0001\u0000\u0000\u0000\u0210\u0211\u0005\u001b\u0000\u0000\u0211c\u0001"+ - "\u0000\u0000\u0000\u0212\u0213\u0005\u001a\u0000\u0000\u0213e\u0001\u0000"+ - "\u0000\u0000\u0214\u0215\u0007\u0007\u0000\u0000\u0215g\u0001\u0000\u0000"+ - "\u0000\u0216\u0217\u0005\u0005\u0000\u0000\u0217\u0218\u0003j5\u0000\u0218"+ - "i\u0001\u0000\u0000\u0000\u0219\u021a\u0005A\u0000\u0000\u021a\u021b\u0003"+ - "\u0002\u0001\u0000\u021b\u021c\u0005B\u0000\u0000\u021ck\u0001\u0000\u0000"+ - "\u0000\u021d\u021e\u0005\r\u0000\u0000\u021e\u021f\u0005d\u0000\u0000"+ - "\u021fm\u0001\u0000\u0000\u0000\u0220\u0221\u0005\u0003\u0000\u0000\u0221"+ - "\u0224\u0005Z\u0000\u0000\u0222\u0223\u0005X\u0000\u0000\u0223\u0225\u0003"+ - "6\u001b\u0000\u0224\u0222\u0001\u0000\u0000\u0000\u0224\u0225\u0001\u0000"+ - "\u0000\u0000\u0225\u022f\u0001\u0000\u0000\u0000\u0226\u0227\u0005Y\u0000"+ - "\u0000\u0227\u022c\u0003p8\u0000\u0228\u0229\u0005\"\u0000\u0000\u0229"+ - "\u022b\u0003p8\u0000\u022a\u0228\u0001\u0000\u0000\u0000\u022b\u022e\u0001"+ - "\u0000\u0000\u0000\u022c\u022a\u0001\u0000\u0000\u0000\u022c\u022d\u0001"+ - "\u0000\u0000\u0000\u022d\u0230\u0001\u0000\u0000\u0000\u022e\u022c\u0001"+ - "\u0000\u0000\u0000\u022f\u0226\u0001\u0000\u0000\u0000\u022f\u0230\u0001"+ - "\u0000\u0000\u0000\u0230o\u0001\u0000\u0000\u0000\u0231\u0232\u00036\u001b"+ - "\u0000\u0232\u0233\u0005 \u0000\u0000\u0233\u0235\u0001\u0000\u0000\u0000"+ - "\u0234\u0231\u0001\u0000\u0000\u0000\u0234\u0235\u0001\u0000\u0000\u0000"+ - "\u0235\u0236\u0001\u0000\u0000\u0000\u0236\u0237\u00036\u001b\u0000\u0237"+ - "q\u0001\u0000\u0000\u0000\u0238\u0239\u0005\u0012\u0000\u0000\u0239\u023a"+ - "\u0003\"\u0011\u0000\u023a\u023b\u0005X\u0000\u0000\u023b\u023c\u0003"+ - "8\u001c\u0000\u023cs\u0001\u0000\u0000\u0000\u023d\u023e\u0005\u0011\u0000"+ - "\u0000\u023e\u0241\u0003\u001c\u000e\u0000\u023f\u0240\u0005\u001d\u0000"+ - "\u0000\u0240\u0242\u0003\u001c\u000e\u0000\u0241\u023f\u0001\u0000\u0000"+ - "\u0000\u0241\u0242\u0001\u0000\u0000\u0000\u0242u\u0001\u0000\u0000\u0000"+ - "8\u0081\u008a\u009c\u00a8\u00b1\u00b9\u00bf\u00c7\u00c9\u00ce\u00d5\u00da"+ - "\u00e5\u00eb\u00f3\u00f5\u0100\u0107\u0112\u0115\u0123\u012b\u0133\u0137"+ - "\u013e\u0146\u014e\u015b\u015f\u0163\u016a\u016e\u0175\u017d\u0185\u018c"+ - "\u019d\u01a8\u01b3\u01b8\u01bc\u01c0\u01cb\u01d0\u01d4\u01e2\u01ed\u01fb"+ - "\u0206\u0209\u020e\u0224\u022c\u022f\u0234\u0241"; + "\u0001\u0005\u0003\u0005\u00ab\b\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ + "\u0001\u0005\u0001\u0005\u0005\u0005\u00b2\b\u0005\n\u0005\f\u0005\u00b5"+ + "\t\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003"+ + "\u0005\u00bc\b\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003"+ + "\u0005\u00c2\b\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ + "\u0005\u0001\u0005\u0005\u0005\u00ca\b\u0005\n\u0005\f\u0005\u00cd\t\u0005"+ + "\u0001\u0006\u0001\u0006\u0003\u0006\u00d1\b\u0006\u0001\u0006\u0001\u0006"+ + "\u0001\u0006\u0001\u0006\u0001\u0006\u0003\u0006\u00d8\b\u0006\u0001\u0006"+ + "\u0001\u0006\u0001\u0006\u0003\u0006\u00dd\b\u0006\u0001\u0007\u0001\u0007"+ + "\u0001\u0007\u0001\u0007\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0003"+ + "\b\u00e8\b\b\u0001\t\u0001\t\u0001\t\u0001\t\u0003\t\u00ee\b\t\u0001\t"+ + "\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0005\t\u00f6\b\t\n\t\f\t\u00f9"+ + "\t\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0003"+ + "\n\u0103\b\n\u0001\n\u0001\n\u0001\n\u0005\n\u0108\b\n\n\n\f\n\u010b\t"+ + "\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b"+ + "\u0005\u000b\u0113\b\u000b\n\u000b\f\u000b\u0116\t\u000b\u0003\u000b\u0118"+ + "\b\u000b\u0001\u000b\u0001\u000b\u0001\f\u0001\f\u0001\f\u0003\f\u011f"+ + "\b\f\u0001\r\u0001\r\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000f\u0001"+ + "\u000f\u0001\u000f\u0005\u000f\u0129\b\u000f\n\u000f\f\u000f\u012c\t\u000f"+ + "\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0003\u0010"+ + "\u0133\b\u0010\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0005\u0011"+ + "\u0139\b\u0011\n\u0011\f\u0011\u013c\t\u0011\u0001\u0011\u0003\u0011\u013f"+ + "\b\u0011\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0003"+ + "\u0012\u0146\b\u0012\u0001\u0013\u0001\u0013\u0001\u0014\u0001\u0014\u0001"+ + "\u0015\u0001\u0015\u0003\u0015\u014e\b\u0015\u0001\u0016\u0001\u0016\u0001"+ + "\u0016\u0001\u0016\u0005\u0016\u0154\b\u0016\n\u0016\f\u0016\u0157\t\u0016"+ + "\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0018\u0001\u0018"+ + "\u0001\u0018\u0001\u0018\u0005\u0018\u0161\b\u0018\n\u0018\f\u0018\u0164"+ + "\t\u0018\u0001\u0018\u0003\u0018\u0167\b\u0018\u0001\u0018\u0001\u0018"+ + "\u0003\u0018\u016b\b\u0018\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u001a"+ + "\u0001\u001a\u0003\u001a\u0172\b\u001a\u0001\u001a\u0001\u001a\u0003\u001a"+ + "\u0176\b\u001a\u0001\u001b\u0001\u001b\u0001\u001b\u0005\u001b\u017b\b"+ + "\u001b\n\u001b\f\u001b\u017e\t\u001b\u0001\u001c\u0001\u001c\u0001\u001c"+ + "\u0005\u001c\u0183\b\u001c\n\u001c\f\u001c\u0186\t\u001c\u0001\u001d\u0001"+ + "\u001d\u0001\u001d\u0005\u001d\u018b\b\u001d\n\u001d\f\u001d\u018e\t\u001d"+ + "\u0001\u001e\u0001\u001e\u0001\u001f\u0001\u001f\u0003\u001f\u0194\b\u001f"+ + "\u0001 \u0001 \u0001 \u0001 \u0001 \u0001 \u0001 \u0001 \u0001 \u0001"+ + " \u0001 \u0001 \u0001 \u0005 \u01a3\b \n \f \u01a6\t \u0001 \u0001 \u0001"+ + " \u0001 \u0001 \u0001 \u0005 \u01ae\b \n \f \u01b1\t \u0001 \u0001 \u0001"+ + " \u0001 \u0001 \u0001 \u0005 \u01b9\b \n \f \u01bc\t \u0001 \u0001 \u0003"+ + " \u01c0\b \u0001!\u0001!\u0003!\u01c4\b!\u0001\"\u0001\"\u0003\"\u01c8"+ + "\b\"\u0001#\u0001#\u0001#\u0001$\u0001$\u0001$\u0001$\u0005$\u01d1\b$"+ + "\n$\f$\u01d4\t$\u0001%\u0001%\u0003%\u01d8\b%\u0001%\u0001%\u0003%\u01dc"+ + "\b%\u0001&\u0001&\u0001&\u0001\'\u0001\'\u0001\'\u0001(\u0001(\u0001("+ + "\u0001(\u0005(\u01e8\b(\n(\f(\u01eb\t(\u0001)\u0001)\u0001)\u0001)\u0001"+ + "*\u0001*\u0001*\u0001*\u0003*\u01f5\b*\u0001+\u0001+\u0001+\u0001+\u0001"+ + ",\u0001,\u0001,\u0001-\u0001-\u0001-\u0005-\u0201\b-\n-\f-\u0204\t-\u0001"+ + ".\u0001.\u0001.\u0001.\u0001/\u0001/\u00010\u00010\u00030\u020e\b0\u0001"+ + "1\u00031\u0211\b1\u00011\u00011\u00012\u00032\u0216\b2\u00012\u00012\u0001"+ + "3\u00013\u00014\u00014\u00015\u00015\u00015\u00016\u00016\u00016\u0001"+ + "6\u00017\u00017\u00017\u00018\u00018\u00018\u00018\u00038\u022c\b8\u0001"+ + "8\u00018\u00018\u00018\u00058\u0232\b8\n8\f8\u0235\t8\u00038\u0237\b8"+ + "\u00019\u00019\u00019\u00039\u023c\b9\u00019\u00019\u0001:\u0001:\u0001"+ + ":\u0001:\u0001:\u0001;\u0001;\u0001;\u0001;\u0003;\u0249\b;\u0001;\u0000"+ + "\u0004\u0002\n\u0012\u0014<\u0000\u0002\u0004\u0006\b\n\f\u000e\u0010"+ + "\u0012\u0014\u0016\u0018\u001a\u001c\u001e \"$&(*,.02468:<>@BDFHJLNPR"+ + "TVXZ\\^`bdfhjlnprtv\u0000\b\u0001\u0000:;\u0001\u0000<>\u0002\u0000\u0019"+ + "\u0019LL\u0001\u0000CD\u0002\u0000\u001e\u001e\"\"\u0002\u0000%%((\u0002"+ + "\u0000$$22\u0002\u00003359\u0265\u0000x\u0001\u0000\u0000\u0000\u0002"+ + "{\u0001\u0000\u0000\u0000\u0004\u008c\u0001\u0000\u0000\u0000\u0006\u009e"+ + "\u0001\u0000\u0000\u0000\b\u00a0\u0001\u0000\u0000\u0000\n\u00c1\u0001"+ + "\u0000\u0000\u0000\f\u00dc\u0001\u0000\u0000\u0000\u000e\u00de\u0001\u0000"+ + "\u0000\u0000\u0010\u00e7\u0001\u0000\u0000\u0000\u0012\u00ed\u0001\u0000"+ + "\u0000\u0000\u0014\u0102\u0001\u0000\u0000\u0000\u0016\u010c\u0001\u0000"+ + "\u0000\u0000\u0018\u011e\u0001\u0000\u0000\u0000\u001a\u0120\u0001\u0000"+ + "\u0000\u0000\u001c\u0122\u0001\u0000\u0000\u0000\u001e\u0125\u0001\u0000"+ + "\u0000\u0000 \u0132\u0001\u0000\u0000\u0000\"\u0134\u0001\u0000\u0000"+ + "\u0000$\u0145\u0001\u0000\u0000\u0000&\u0147\u0001\u0000\u0000\u0000("+ + "\u0149\u0001\u0000\u0000\u0000*\u014d\u0001\u0000\u0000\u0000,\u014f\u0001"+ + "\u0000\u0000\u0000.\u0158\u0001\u0000\u0000\u00000\u015c\u0001\u0000\u0000"+ + "\u00002\u016c\u0001\u0000\u0000\u00004\u016f\u0001\u0000\u0000\u00006"+ + "\u0177\u0001\u0000\u0000\u00008\u017f\u0001\u0000\u0000\u0000:\u0187\u0001"+ + "\u0000\u0000\u0000<\u018f\u0001\u0000\u0000\u0000>\u0193\u0001\u0000\u0000"+ + "\u0000@\u01bf\u0001\u0000\u0000\u0000B\u01c3\u0001\u0000\u0000\u0000D"+ + "\u01c7\u0001\u0000\u0000\u0000F\u01c9\u0001\u0000\u0000\u0000H\u01cc\u0001"+ + "\u0000\u0000\u0000J\u01d5\u0001\u0000\u0000\u0000L\u01dd\u0001\u0000\u0000"+ + "\u0000N\u01e0\u0001\u0000\u0000\u0000P\u01e3\u0001\u0000\u0000\u0000R"+ + "\u01ec\u0001\u0000\u0000\u0000T\u01f0\u0001\u0000\u0000\u0000V\u01f6\u0001"+ + "\u0000\u0000\u0000X\u01fa\u0001\u0000\u0000\u0000Z\u01fd\u0001\u0000\u0000"+ + "\u0000\\\u0205\u0001\u0000\u0000\u0000^\u0209\u0001\u0000\u0000\u0000"+ + "`\u020d\u0001\u0000\u0000\u0000b\u0210\u0001\u0000\u0000\u0000d\u0215"+ + "\u0001\u0000\u0000\u0000f\u0219\u0001\u0000\u0000\u0000h\u021b\u0001\u0000"+ + "\u0000\u0000j\u021d\u0001\u0000\u0000\u0000l\u0220\u0001\u0000\u0000\u0000"+ + "n\u0224\u0001\u0000\u0000\u0000p\u0227\u0001\u0000\u0000\u0000r\u023b"+ + "\u0001\u0000\u0000\u0000t\u023f\u0001\u0000\u0000\u0000v\u0244\u0001\u0000"+ + "\u0000\u0000xy\u0003\u0002\u0001\u0000yz\u0005\u0000\u0000\u0001z\u0001"+ + "\u0001\u0000\u0000\u0000{|\u0006\u0001\uffff\uffff\u0000|}\u0003\u0004"+ + "\u0002\u0000}\u0083\u0001\u0000\u0000\u0000~\u007f\n\u0001\u0000\u0000"+ + "\u007f\u0080\u0005\u0018\u0000\u0000\u0080\u0082\u0003\u0006\u0003\u0000"+ + "\u0081~\u0001\u0000\u0000\u0000\u0082\u0085\u0001\u0000\u0000\u0000\u0083"+ + "\u0081\u0001\u0000\u0000\u0000\u0083\u0084\u0001\u0000\u0000\u0000\u0084"+ + "\u0003\u0001\u0000\u0000\u0000\u0085\u0083\u0001\u0000\u0000\u0000\u0086"+ + "\u008d\u0003j5\u0000\u0087\u008d\u0003\"\u0011\u0000\u0088\u008d\u0003"+ + "\u001c\u000e\u0000\u0089\u008d\u0003n7\u0000\u008a\u008b\u0004\u0002\u0001"+ + "\u0000\u008b\u008d\u00030\u0018\u0000\u008c\u0086\u0001\u0000\u0000\u0000"+ + "\u008c\u0087\u0001\u0000\u0000\u0000\u008c\u0088\u0001\u0000\u0000\u0000"+ + "\u008c\u0089\u0001\u0000\u0000\u0000\u008c\u008a\u0001\u0000\u0000\u0000"+ + "\u008d\u0005\u0001\u0000\u0000\u0000\u008e\u009f\u00032\u0019\u0000\u008f"+ + "\u009f\u0003\b\u0004\u0000\u0090\u009f\u0003L&\u0000\u0091\u009f\u0003"+ + "F#\u0000\u0092\u009f\u00034\u001a\u0000\u0093\u009f\u0003H$\u0000\u0094"+ + "\u009f\u0003N\'\u0000\u0095\u009f\u0003P(\u0000\u0096\u009f\u0003T*\u0000"+ + "\u0097\u009f\u0003V+\u0000\u0098\u009f\u0003p8\u0000\u0099\u009f\u0003"+ + "X,\u0000\u009a\u009b\u0004\u0003\u0002\u0000\u009b\u009f\u0003v;\u0000"+ + "\u009c\u009d\u0004\u0003\u0003\u0000\u009d\u009f\u0003t:\u0000\u009e\u008e"+ + "\u0001\u0000\u0000\u0000\u009e\u008f\u0001\u0000\u0000\u0000\u009e\u0090"+ + "\u0001\u0000\u0000\u0000\u009e\u0091\u0001\u0000\u0000\u0000\u009e\u0092"+ + "\u0001\u0000\u0000\u0000\u009e\u0093\u0001\u0000\u0000\u0000\u009e\u0094"+ + "\u0001\u0000\u0000\u0000\u009e\u0095\u0001\u0000\u0000\u0000\u009e\u0096"+ + "\u0001\u0000\u0000\u0000\u009e\u0097\u0001\u0000\u0000\u0000\u009e\u0098"+ + "\u0001\u0000\u0000\u0000\u009e\u0099\u0001\u0000\u0000\u0000\u009e\u009a"+ + "\u0001\u0000\u0000\u0000\u009e\u009c\u0001\u0000\u0000\u0000\u009f\u0007"+ + "\u0001\u0000\u0000\u0000\u00a0\u00a1\u0005\u0010\u0000\u0000\u00a1\u00a2"+ + "\u0003\n\u0005\u0000\u00a2\t\u0001\u0000\u0000\u0000\u00a3\u00a4\u0006"+ + "\u0005\uffff\uffff\u0000\u00a4\u00a5\u0005+\u0000\u0000\u00a5\u00c2\u0003"+ + "\n\u0005\b\u00a6\u00c2\u0003\u0010\b\u0000\u00a7\u00c2\u0003\f\u0006\u0000"+ + "\u00a8\u00aa\u0003\u0010\b\u0000\u00a9\u00ab\u0005+\u0000\u0000\u00aa"+ + "\u00a9\u0001\u0000\u0000\u0000\u00aa\u00ab\u0001\u0000\u0000\u0000\u00ab"+ + "\u00ac\u0001\u0000\u0000\u0000\u00ac\u00ad\u0005&\u0000\u0000\u00ad\u00ae"+ + "\u0005*\u0000\u0000\u00ae\u00b3\u0003\u0010\b\u0000\u00af\u00b0\u0005"+ + "!\u0000\u0000\u00b0\u00b2\u0003\u0010\b\u0000\u00b1\u00af\u0001\u0000"+ + "\u0000\u0000\u00b2\u00b5\u0001\u0000\u0000\u0000\u00b3\u00b1\u0001\u0000"+ + "\u0000\u0000\u00b3\u00b4\u0001\u0000\u0000\u0000\u00b4\u00b6\u0001\u0000"+ + "\u0000\u0000\u00b5\u00b3\u0001\u0000\u0000\u0000\u00b6\u00b7\u00051\u0000"+ + "\u0000\u00b7\u00c2\u0001\u0000\u0000\u0000\u00b8\u00b9\u0003\u0010\b\u0000"+ + "\u00b9\u00bb\u0005\'\u0000\u0000\u00ba\u00bc\u0005+\u0000\u0000\u00bb"+ + "\u00ba\u0001\u0000\u0000\u0000\u00bb\u00bc\u0001\u0000\u0000\u0000\u00bc"+ + "\u00bd\u0001\u0000\u0000\u0000\u00bd\u00be\u0005,\u0000\u0000\u00be\u00c2"+ + "\u0001\u0000\u0000\u0000\u00bf\u00c0\u0004\u0005\u0004\u0000\u00c0\u00c2"+ + "\u0003\u000e\u0007\u0000\u00c1\u00a3\u0001\u0000\u0000\u0000\u00c1\u00a6"+ + "\u0001\u0000\u0000\u0000\u00c1\u00a7\u0001\u0000\u0000\u0000\u00c1\u00a8"+ + "\u0001\u0000\u0000\u0000\u00c1\u00b8\u0001\u0000\u0000\u0000\u00c1\u00bf"+ + "\u0001\u0000\u0000\u0000\u00c2\u00cb\u0001\u0000\u0000\u0000\u00c3\u00c4"+ + "\n\u0005\u0000\u0000\u00c4\u00c5\u0005\u001d\u0000\u0000\u00c5\u00ca\u0003"+ + "\n\u0005\u0006\u00c6\u00c7\n\u0004\u0000\u0000\u00c7\u00c8\u0005.\u0000"+ + "\u0000\u00c8\u00ca\u0003\n\u0005\u0005\u00c9\u00c3\u0001\u0000\u0000\u0000"+ + "\u00c9\u00c6\u0001\u0000\u0000\u0000\u00ca\u00cd\u0001\u0000\u0000\u0000"+ + "\u00cb\u00c9\u0001\u0000\u0000\u0000\u00cb\u00cc\u0001\u0000\u0000\u0000"+ + "\u00cc\u000b\u0001\u0000\u0000\u0000\u00cd\u00cb\u0001\u0000\u0000\u0000"+ + "\u00ce\u00d0\u0003\u0010\b\u0000\u00cf\u00d1\u0005+\u0000\u0000\u00d0"+ + "\u00cf\u0001\u0000\u0000\u0000\u00d0\u00d1\u0001\u0000\u0000\u0000\u00d1"+ + "\u00d2\u0001\u0000\u0000\u0000\u00d2\u00d3\u0005)\u0000\u0000\u00d3\u00d4"+ + "\u0003f3\u0000\u00d4\u00dd\u0001\u0000\u0000\u0000\u00d5\u00d7\u0003\u0010"+ + "\b\u0000\u00d6\u00d8\u0005+\u0000\u0000\u00d7\u00d6\u0001\u0000\u0000"+ + "\u0000\u00d7\u00d8\u0001\u0000\u0000\u0000\u00d8\u00d9\u0001\u0000\u0000"+ + "\u0000\u00d9\u00da\u00050\u0000\u0000\u00da\u00db\u0003f3\u0000\u00db"+ + "\u00dd\u0001\u0000\u0000\u0000\u00dc\u00ce\u0001\u0000\u0000\u0000\u00dc"+ + "\u00d5\u0001\u0000\u0000\u0000\u00dd\r\u0001\u0000\u0000\u0000\u00de\u00df"+ + "\u0003\u0010\b\u0000\u00df\u00e0\u0005?\u0000\u0000\u00e0\u00e1\u0003"+ + "f3\u0000\u00e1\u000f\u0001\u0000\u0000\u0000\u00e2\u00e8\u0003\u0012\t"+ + "\u0000\u00e3\u00e4\u0003\u0012\t\u0000\u00e4\u00e5\u0003h4\u0000\u00e5"+ + "\u00e6\u0003\u0012\t\u0000\u00e6\u00e8\u0001\u0000\u0000\u0000\u00e7\u00e2"+ + "\u0001\u0000\u0000\u0000\u00e7\u00e3\u0001\u0000\u0000\u0000\u00e8\u0011"+ + "\u0001\u0000\u0000\u0000\u00e9\u00ea\u0006\t\uffff\uffff\u0000\u00ea\u00ee"+ + "\u0003\u0014\n\u0000\u00eb\u00ec\u0007\u0000\u0000\u0000\u00ec\u00ee\u0003"+ + "\u0012\t\u0003\u00ed\u00e9\u0001\u0000\u0000\u0000\u00ed\u00eb\u0001\u0000"+ + "\u0000\u0000\u00ee\u00f7\u0001\u0000\u0000\u0000\u00ef\u00f0\n\u0002\u0000"+ + "\u0000\u00f0\u00f1\u0007\u0001\u0000\u0000\u00f1\u00f6\u0003\u0012\t\u0003"+ + "\u00f2\u00f3\n\u0001\u0000\u0000\u00f3\u00f4\u0007\u0000\u0000\u0000\u00f4"+ + "\u00f6\u0003\u0012\t\u0002\u00f5\u00ef\u0001\u0000\u0000\u0000\u00f5\u00f2"+ + "\u0001\u0000\u0000\u0000\u00f6\u00f9\u0001\u0000\u0000\u0000\u00f7\u00f5"+ + "\u0001\u0000\u0000\u0000\u00f7\u00f8\u0001\u0000\u0000\u0000\u00f8\u0013"+ + "\u0001\u0000\u0000\u0000\u00f9\u00f7\u0001\u0000\u0000\u0000\u00fa\u00fb"+ + "\u0006\n\uffff\uffff\u0000\u00fb\u0103\u0003@ \u0000\u00fc\u0103\u0003"+ + "6\u001b\u0000\u00fd\u0103\u0003\u0016\u000b\u0000\u00fe\u00ff\u0005*\u0000"+ + "\u0000\u00ff\u0100\u0003\n\u0005\u0000\u0100\u0101\u00051\u0000\u0000"+ + "\u0101\u0103\u0001\u0000\u0000\u0000\u0102\u00fa\u0001\u0000\u0000\u0000"+ + "\u0102\u00fc\u0001\u0000\u0000\u0000\u0102\u00fd\u0001\u0000\u0000\u0000"+ + "\u0102\u00fe\u0001\u0000\u0000\u0000\u0103\u0109\u0001\u0000\u0000\u0000"+ + "\u0104\u0105\n\u0001\u0000\u0000\u0105\u0106\u0005 \u0000\u0000\u0106"+ + "\u0108\u0003\u001a\r\u0000\u0107\u0104\u0001\u0000\u0000\u0000\u0108\u010b"+ + "\u0001\u0000\u0000\u0000\u0109\u0107\u0001\u0000\u0000\u0000\u0109\u010a"+ + "\u0001\u0000\u0000\u0000\u010a\u0015\u0001\u0000\u0000\u0000\u010b\u0109"+ + "\u0001\u0000\u0000\u0000\u010c\u010d\u0003\u0018\f\u0000\u010d\u0117\u0005"+ + "*\u0000\u0000\u010e\u0118\u0005<\u0000\u0000\u010f\u0114\u0003\n\u0005"+ + "\u0000\u0110\u0111\u0005!\u0000\u0000\u0111\u0113\u0003\n\u0005\u0000"+ + "\u0112\u0110\u0001\u0000\u0000\u0000\u0113\u0116\u0001\u0000\u0000\u0000"+ + "\u0114\u0112\u0001\u0000\u0000\u0000\u0114\u0115\u0001\u0000\u0000\u0000"+ + "\u0115\u0118\u0001\u0000\u0000\u0000\u0116\u0114\u0001\u0000\u0000\u0000"+ + "\u0117\u010e\u0001\u0000\u0000\u0000\u0117\u010f\u0001\u0000\u0000\u0000"+ + "\u0117\u0118\u0001\u0000\u0000\u0000\u0118\u0119\u0001\u0000\u0000\u0000"+ + "\u0119\u011a\u00051\u0000\u0000\u011a\u0017\u0001\u0000\u0000\u0000\u011b"+ + "\u011c\u0004\f\n\u0000\u011c\u011f\u0005?\u0000\u0000\u011d\u011f\u0003"+ + "D\"\u0000\u011e\u011b\u0001\u0000\u0000\u0000\u011e\u011d\u0001\u0000"+ + "\u0000\u0000\u011f\u0019\u0001\u0000\u0000\u0000\u0120\u0121\u0003<\u001e"+ + "\u0000\u0121\u001b\u0001\u0000\u0000\u0000\u0122\u0123\u0005\f\u0000\u0000"+ + "\u0123\u0124\u0003\u001e\u000f\u0000\u0124\u001d\u0001\u0000\u0000\u0000"+ + "\u0125\u012a\u0003 \u0010\u0000\u0126\u0127\u0005!\u0000\u0000\u0127\u0129"+ + "\u0003 \u0010\u0000\u0128\u0126\u0001\u0000\u0000\u0000\u0129\u012c\u0001"+ + "\u0000\u0000\u0000\u012a\u0128\u0001\u0000\u0000\u0000\u012a\u012b\u0001"+ + "\u0000\u0000\u0000\u012b\u001f\u0001\u0000\u0000\u0000\u012c\u012a\u0001"+ + "\u0000\u0000\u0000\u012d\u0133\u0003\n\u0005\u0000\u012e\u012f\u00036"+ + "\u001b\u0000\u012f\u0130\u0005\u001f\u0000\u0000\u0130\u0131\u0003\n\u0005"+ + "\u0000\u0131\u0133\u0001\u0000\u0000\u0000\u0132\u012d\u0001\u0000\u0000"+ + "\u0000\u0132\u012e\u0001\u0000\u0000\u0000\u0133!\u0001\u0000\u0000\u0000"+ + "\u0134\u0135\u0005\u0006\u0000\u0000\u0135\u013a\u0003$\u0012\u0000\u0136"+ + "\u0137\u0005!\u0000\u0000\u0137\u0139\u0003$\u0012\u0000\u0138\u0136\u0001"+ + "\u0000\u0000\u0000\u0139\u013c\u0001\u0000\u0000\u0000\u013a\u0138\u0001"+ + "\u0000\u0000\u0000\u013a\u013b\u0001\u0000\u0000\u0000\u013b\u013e\u0001"+ + "\u0000\u0000\u0000\u013c\u013a\u0001\u0000\u0000\u0000\u013d\u013f\u0003"+ + "*\u0015\u0000\u013e\u013d\u0001\u0000\u0000\u0000\u013e\u013f\u0001\u0000"+ + "\u0000\u0000\u013f#\u0001\u0000\u0000\u0000\u0140\u0141\u0003&\u0013\u0000"+ + "\u0141\u0142\u0005h\u0000\u0000\u0142\u0143\u0003(\u0014\u0000\u0143\u0146"+ + "\u0001\u0000\u0000\u0000\u0144\u0146\u0003(\u0014\u0000\u0145\u0140\u0001"+ + "\u0000\u0000\u0000\u0145\u0144\u0001\u0000\u0000\u0000\u0146%\u0001\u0000"+ + "\u0000\u0000\u0147\u0148\u0005L\u0000\u0000\u0148\'\u0001\u0000\u0000"+ + "\u0000\u0149\u014a\u0007\u0002\u0000\u0000\u014a)\u0001\u0000\u0000\u0000"+ + "\u014b\u014e\u0003,\u0016\u0000\u014c\u014e\u0003.\u0017\u0000\u014d\u014b"+ + "\u0001\u0000\u0000\u0000\u014d\u014c\u0001\u0000\u0000\u0000\u014e+\u0001"+ + "\u0000\u0000\u0000\u014f\u0150\u0005K\u0000\u0000\u0150\u0155\u0005L\u0000"+ + "\u0000\u0151\u0152\u0005!\u0000\u0000\u0152\u0154\u0005L\u0000\u0000\u0153"+ + "\u0151\u0001\u0000\u0000\u0000\u0154\u0157\u0001\u0000\u0000\u0000\u0155"+ + "\u0153\u0001\u0000\u0000\u0000\u0155\u0156\u0001\u0000\u0000\u0000\u0156"+ + "-\u0001\u0000\u0000\u0000\u0157\u0155\u0001\u0000\u0000\u0000\u0158\u0159"+ + "\u0005A\u0000\u0000\u0159\u015a\u0003,\u0016\u0000\u015a\u015b\u0005B"+ + "\u0000\u0000\u015b/\u0001\u0000\u0000\u0000\u015c\u015d\u0005\u0013\u0000"+ + "\u0000\u015d\u0162\u0003$\u0012\u0000\u015e\u015f\u0005!\u0000\u0000\u015f"+ + "\u0161\u0003$\u0012\u0000\u0160\u015e\u0001\u0000\u0000\u0000\u0161\u0164"+ + "\u0001\u0000\u0000\u0000\u0162\u0160\u0001\u0000\u0000\u0000\u0162\u0163"+ + "\u0001\u0000\u0000\u0000\u0163\u0166\u0001\u0000\u0000\u0000\u0164\u0162"+ + "\u0001\u0000\u0000\u0000\u0165\u0167\u0003\u001e\u000f\u0000\u0166\u0165"+ + "\u0001\u0000\u0000\u0000\u0166\u0167\u0001\u0000\u0000\u0000\u0167\u016a"+ + "\u0001\u0000\u0000\u0000\u0168\u0169\u0005\u001c\u0000\u0000\u0169\u016b"+ + "\u0003\u001e\u000f\u0000\u016a\u0168\u0001\u0000\u0000\u0000\u016a\u016b"+ + "\u0001\u0000\u0000\u0000\u016b1\u0001\u0000\u0000\u0000\u016c\u016d\u0005"+ + "\u0004\u0000\u0000\u016d\u016e\u0003\u001e\u000f\u0000\u016e3\u0001\u0000"+ + "\u0000\u0000\u016f\u0171\u0005\u000f\u0000\u0000\u0170\u0172\u0003\u001e"+ + "\u000f\u0000\u0171\u0170\u0001\u0000\u0000\u0000\u0171\u0172\u0001\u0000"+ + "\u0000\u0000\u0172\u0175\u0001\u0000\u0000\u0000\u0173\u0174\u0005\u001c"+ + "\u0000\u0000\u0174\u0176\u0003\u001e\u000f\u0000\u0175\u0173\u0001\u0000"+ + "\u0000\u0000\u0175\u0176\u0001\u0000\u0000\u0000\u01765\u0001\u0000\u0000"+ + "\u0000\u0177\u017c\u0003D\"\u0000\u0178\u0179\u0005#\u0000\u0000\u0179"+ + "\u017b\u0003D\"\u0000\u017a\u0178\u0001\u0000\u0000\u0000\u017b\u017e"+ + "\u0001\u0000\u0000\u0000\u017c\u017a\u0001\u0000\u0000\u0000\u017c\u017d"+ + "\u0001\u0000\u0000\u0000\u017d7\u0001\u0000\u0000\u0000\u017e\u017c\u0001"+ + "\u0000\u0000\u0000\u017f\u0184\u0003>\u001f\u0000\u0180\u0181\u0005#\u0000"+ + "\u0000\u0181\u0183\u0003>\u001f\u0000\u0182\u0180\u0001\u0000\u0000\u0000"+ + "\u0183\u0186\u0001\u0000\u0000\u0000\u0184\u0182\u0001\u0000\u0000\u0000"+ + "\u0184\u0185\u0001\u0000\u0000\u0000\u01859\u0001\u0000\u0000\u0000\u0186"+ + "\u0184\u0001\u0000\u0000\u0000\u0187\u018c\u00038\u001c\u0000\u0188\u0189"+ + "\u0005!\u0000\u0000\u0189\u018b\u00038\u001c\u0000\u018a\u0188\u0001\u0000"+ + "\u0000\u0000\u018b\u018e\u0001\u0000\u0000\u0000\u018c\u018a\u0001\u0000"+ + "\u0000\u0000\u018c\u018d\u0001\u0000\u0000\u0000\u018d;\u0001\u0000\u0000"+ + "\u0000\u018e\u018c\u0001\u0000\u0000\u0000\u018f\u0190\u0007\u0003\u0000"+ + "\u0000\u0190=\u0001\u0000\u0000\u0000\u0191\u0194\u0005P\u0000\u0000\u0192"+ + "\u0194\u0003B!\u0000\u0193\u0191\u0001\u0000\u0000\u0000\u0193\u0192\u0001"+ + "\u0000\u0000\u0000\u0194?\u0001\u0000\u0000\u0000\u0195\u01c0\u0005,\u0000"+ + "\u0000\u0196\u0197\u0003d2\u0000\u0197\u0198\u0005C\u0000\u0000\u0198"+ + "\u01c0\u0001\u0000\u0000\u0000\u0199\u01c0\u0003b1\u0000\u019a\u01c0\u0003"+ + "d2\u0000\u019b\u01c0\u0003^/\u0000\u019c\u01c0\u0003B!\u0000\u019d\u01c0"+ + "\u0003f3\u0000\u019e\u019f\u0005A\u0000\u0000\u019f\u01a4\u0003`0\u0000"+ + "\u01a0\u01a1\u0005!\u0000\u0000\u01a1\u01a3\u0003`0\u0000\u01a2\u01a0"+ + "\u0001\u0000\u0000\u0000\u01a3\u01a6\u0001\u0000\u0000\u0000\u01a4\u01a2"+ + "\u0001\u0000\u0000\u0000\u01a4\u01a5\u0001\u0000\u0000\u0000\u01a5\u01a7"+ + "\u0001\u0000\u0000\u0000\u01a6\u01a4\u0001\u0000\u0000\u0000\u01a7\u01a8"+ + "\u0005B\u0000\u0000\u01a8\u01c0\u0001\u0000\u0000\u0000\u01a9\u01aa\u0005"+ + "A\u0000\u0000\u01aa\u01af\u0003^/\u0000\u01ab\u01ac\u0005!\u0000\u0000"+ + "\u01ac\u01ae\u0003^/\u0000\u01ad\u01ab\u0001\u0000\u0000\u0000\u01ae\u01b1"+ + "\u0001\u0000\u0000\u0000\u01af\u01ad\u0001\u0000\u0000\u0000\u01af\u01b0"+ + "\u0001\u0000\u0000\u0000\u01b0\u01b2\u0001\u0000\u0000\u0000\u01b1\u01af"+ + "\u0001\u0000\u0000\u0000\u01b2\u01b3\u0005B\u0000\u0000\u01b3\u01c0\u0001"+ + "\u0000\u0000\u0000\u01b4\u01b5\u0005A\u0000\u0000\u01b5\u01ba\u0003f3"+ + "\u0000\u01b6\u01b7\u0005!\u0000\u0000\u01b7\u01b9\u0003f3\u0000\u01b8"+ + "\u01b6\u0001\u0000\u0000\u0000\u01b9\u01bc\u0001\u0000\u0000\u0000\u01ba"+ + "\u01b8\u0001\u0000\u0000\u0000\u01ba\u01bb\u0001\u0000\u0000\u0000\u01bb"+ + "\u01bd\u0001\u0000\u0000\u0000\u01bc\u01ba\u0001\u0000\u0000\u0000\u01bd"+ + "\u01be\u0005B\u0000\u0000\u01be\u01c0\u0001\u0000\u0000\u0000\u01bf\u0195"+ + "\u0001\u0000\u0000\u0000\u01bf\u0196\u0001\u0000\u0000\u0000\u01bf\u0199"+ + "\u0001\u0000\u0000\u0000\u01bf\u019a\u0001\u0000\u0000\u0000\u01bf\u019b"+ + "\u0001\u0000\u0000\u0000\u01bf\u019c\u0001\u0000\u0000\u0000\u01bf\u019d"+ + "\u0001\u0000\u0000\u0000\u01bf\u019e\u0001\u0000\u0000\u0000\u01bf\u01a9"+ + "\u0001\u0000\u0000\u0000\u01bf\u01b4\u0001\u0000\u0000\u0000\u01c0A\u0001"+ + "\u0000\u0000\u0000\u01c1\u01c4\u0005/\u0000\u0000\u01c2\u01c4\u0005@\u0000"+ + "\u0000\u01c3\u01c1\u0001\u0000\u0000\u0000\u01c3\u01c2\u0001\u0000\u0000"+ + "\u0000\u01c4C\u0001\u0000\u0000\u0000\u01c5\u01c8\u0003<\u001e\u0000\u01c6"+ + "\u01c8\u0003B!\u0000\u01c7\u01c5\u0001\u0000\u0000\u0000\u01c7\u01c6\u0001"+ + "\u0000\u0000\u0000\u01c8E\u0001\u0000\u0000\u0000\u01c9\u01ca\u0005\t"+ + "\u0000\u0000\u01ca\u01cb\u0005\u001a\u0000\u0000\u01cbG\u0001\u0000\u0000"+ + "\u0000\u01cc\u01cd\u0005\u000e\u0000\u0000\u01cd\u01d2\u0003J%\u0000\u01ce"+ + "\u01cf\u0005!\u0000\u0000\u01cf\u01d1\u0003J%\u0000\u01d0\u01ce\u0001"+ + "\u0000\u0000\u0000\u01d1\u01d4\u0001\u0000\u0000\u0000\u01d2\u01d0\u0001"+ + "\u0000\u0000\u0000\u01d2\u01d3\u0001\u0000\u0000\u0000\u01d3I\u0001\u0000"+ + "\u0000\u0000\u01d4\u01d2\u0001\u0000\u0000\u0000\u01d5\u01d7\u0003\n\u0005"+ + "\u0000\u01d6\u01d8\u0007\u0004\u0000\u0000\u01d7\u01d6\u0001\u0000\u0000"+ + "\u0000\u01d7\u01d8\u0001\u0000\u0000\u0000\u01d8\u01db\u0001\u0000\u0000"+ + "\u0000\u01d9\u01da\u0005-\u0000\u0000\u01da\u01dc\u0007\u0005\u0000\u0000"+ + "\u01db\u01d9\u0001\u0000\u0000\u0000\u01db\u01dc\u0001\u0000\u0000\u0000"+ + "\u01dcK\u0001\u0000\u0000\u0000\u01dd\u01de\u0005\b\u0000\u0000\u01de"+ + "\u01df\u0003:\u001d\u0000\u01dfM\u0001\u0000\u0000\u0000\u01e0\u01e1\u0005"+ + "\u0002\u0000\u0000\u01e1\u01e2\u0003:\u001d\u0000\u01e2O\u0001\u0000\u0000"+ + "\u0000\u01e3\u01e4\u0005\u000b\u0000\u0000\u01e4\u01e9\u0003R)\u0000\u01e5"+ + "\u01e6\u0005!\u0000\u0000\u01e6\u01e8\u0003R)\u0000\u01e7\u01e5\u0001"+ + "\u0000\u0000\u0000\u01e8\u01eb\u0001\u0000\u0000\u0000\u01e9\u01e7\u0001"+ + "\u0000\u0000\u0000\u01e9\u01ea\u0001\u0000\u0000\u0000\u01eaQ\u0001\u0000"+ + "\u0000\u0000\u01eb\u01e9\u0001\u0000\u0000\u0000\u01ec\u01ed\u00038\u001c"+ + "\u0000\u01ed\u01ee\u0005T\u0000\u0000\u01ee\u01ef\u00038\u001c\u0000\u01ef"+ + "S\u0001\u0000\u0000\u0000\u01f0\u01f1\u0005\u0001\u0000\u0000\u01f1\u01f2"+ + "\u0003\u0014\n\u0000\u01f2\u01f4\u0003f3\u0000\u01f3\u01f5\u0003Z-\u0000"+ + "\u01f4\u01f3\u0001\u0000\u0000\u0000\u01f4\u01f5\u0001\u0000\u0000\u0000"+ + "\u01f5U\u0001\u0000\u0000\u0000\u01f6\u01f7\u0005\u0007\u0000\u0000\u01f7"+ + "\u01f8\u0003\u0014\n\u0000\u01f8\u01f9\u0003f3\u0000\u01f9W\u0001\u0000"+ + "\u0000\u0000\u01fa\u01fb\u0005\n\u0000\u0000\u01fb\u01fc\u00036\u001b"+ + "\u0000\u01fcY\u0001\u0000\u0000\u0000\u01fd\u0202\u0003\\.\u0000\u01fe"+ + "\u01ff\u0005!\u0000\u0000\u01ff\u0201\u0003\\.\u0000\u0200\u01fe\u0001"+ + "\u0000\u0000\u0000\u0201\u0204\u0001\u0000\u0000\u0000\u0202\u0200\u0001"+ + "\u0000\u0000\u0000\u0202\u0203\u0001\u0000\u0000\u0000\u0203[\u0001\u0000"+ + "\u0000\u0000\u0204\u0202\u0001\u0000\u0000\u0000\u0205\u0206\u0003<\u001e"+ + "\u0000\u0206\u0207\u0005\u001f\u0000\u0000\u0207\u0208\u0003@ \u0000\u0208"+ + "]\u0001\u0000\u0000\u0000\u0209\u020a\u0007\u0006\u0000\u0000\u020a_\u0001"+ + "\u0000\u0000\u0000\u020b\u020e\u0003b1\u0000\u020c\u020e\u0003d2\u0000"+ + "\u020d\u020b\u0001\u0000\u0000\u0000\u020d\u020c\u0001\u0000\u0000\u0000"+ + "\u020ea\u0001\u0000\u0000\u0000\u020f\u0211\u0007\u0000\u0000\u0000\u0210"+ + "\u020f\u0001\u0000\u0000\u0000\u0210\u0211\u0001\u0000\u0000\u0000\u0211"+ + "\u0212\u0001\u0000\u0000\u0000\u0212\u0213\u0005\u001b\u0000\u0000\u0213"+ + "c\u0001\u0000\u0000\u0000\u0214\u0216\u0007\u0000\u0000\u0000\u0215\u0214"+ + "\u0001\u0000\u0000\u0000\u0215\u0216\u0001\u0000\u0000\u0000\u0216\u0217"+ + "\u0001\u0000\u0000\u0000\u0217\u0218\u0005\u001a\u0000\u0000\u0218e\u0001"+ + "\u0000\u0000\u0000\u0219\u021a\u0005\u0019\u0000\u0000\u021ag\u0001\u0000"+ + "\u0000\u0000\u021b\u021c\u0007\u0007\u0000\u0000\u021ci\u0001\u0000\u0000"+ + "\u0000\u021d\u021e\u0005\u0005\u0000\u0000\u021e\u021f\u0003l6\u0000\u021f"+ + "k\u0001\u0000\u0000\u0000\u0220\u0221\u0005A\u0000\u0000\u0221\u0222\u0003"+ + "\u0002\u0001\u0000\u0222\u0223\u0005B\u0000\u0000\u0223m\u0001\u0000\u0000"+ + "\u0000\u0224\u0225\u0005\r\u0000\u0000\u0225\u0226\u0005d\u0000\u0000"+ + "\u0226o\u0001\u0000\u0000\u0000\u0227\u0228\u0005\u0003\u0000\u0000\u0228"+ + "\u022b\u0005Z\u0000\u0000\u0229\u022a\u0005X\u0000\u0000\u022a\u022c\u0003"+ + "8\u001c\u0000\u022b\u0229\u0001\u0000\u0000\u0000\u022b\u022c\u0001\u0000"+ + "\u0000\u0000\u022c\u0236\u0001\u0000\u0000\u0000\u022d\u022e\u0005Y\u0000"+ + "\u0000\u022e\u0233\u0003r9\u0000\u022f\u0230\u0005!\u0000\u0000\u0230"+ + "\u0232\u0003r9\u0000\u0231\u022f\u0001\u0000\u0000\u0000\u0232\u0235\u0001"+ + "\u0000\u0000\u0000\u0233\u0231\u0001\u0000\u0000\u0000\u0233\u0234\u0001"+ + "\u0000\u0000\u0000\u0234\u0237\u0001\u0000\u0000\u0000\u0235\u0233\u0001"+ + "\u0000\u0000\u0000\u0236\u022d\u0001\u0000\u0000\u0000\u0236\u0237\u0001"+ + "\u0000\u0000\u0000\u0237q\u0001\u0000\u0000\u0000\u0238\u0239\u00038\u001c"+ + "\u0000\u0239\u023a\u0005\u001f\u0000\u0000\u023a\u023c\u0001\u0000\u0000"+ + "\u0000\u023b\u0238\u0001\u0000\u0000\u0000\u023b\u023c\u0001\u0000\u0000"+ + "\u0000\u023c\u023d\u0001\u0000\u0000\u0000\u023d\u023e\u00038\u001c\u0000"+ + "\u023es\u0001\u0000\u0000\u0000\u023f\u0240\u0005\u0012\u0000\u0000\u0240"+ + "\u0241\u0003$\u0012\u0000\u0241\u0242\u0005X\u0000\u0000\u0242\u0243\u0003"+ + ":\u001d\u0000\u0243u\u0001\u0000\u0000\u0000\u0244\u0245\u0005\u0011\u0000"+ + "\u0000\u0245\u0248\u0003\u001e\u000f\u0000\u0246\u0247\u0005\u001c\u0000"+ + "\u0000\u0247\u0249\u0003\u001e\u000f\u0000\u0248\u0246\u0001\u0000\u0000"+ + "\u0000\u0248\u0249\u0001\u0000\u0000\u0000\u0249w\u0001\u0000\u0000\u0000"+ + "9\u0083\u008c\u009e\u00aa\u00b3\u00bb\u00c1\u00c9\u00cb\u00d0\u00d7\u00dc"+ + "\u00e7\u00ed\u00f5\u00f7\u0102\u0109\u0114\u0117\u011e\u012a\u0132\u013a"+ + "\u013e\u0145\u014d\u0155\u0162\u0166\u016a\u0171\u0175\u017c\u0184\u018c"+ + "\u0193\u01a4\u01af\u01ba\u01bf\u01c3\u01c7\u01d2\u01d7\u01db\u01e9\u01f4"+ + "\u0202\u020d\u0210\u0215\u022b\u0233\u0236\u023b\u0248"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java index 027281d44b2dc..e2340df954674 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java @@ -332,6 +332,18 @@ public class EsqlBaseParserBaseListener implements EsqlBaseParserListener { *

    The default implementation does nothing.

    */ @Override public void exitFunctionExpression(EsqlBaseParser.FunctionExpressionContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterFunctionName(EsqlBaseParser.FunctionNameContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitFunctionName(EsqlBaseParser.FunctionNameContext ctx) { } /** * {@inheritDoc} * diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java index 463414ab67ea2..99f038b14b5e0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java @@ -202,6 +202,13 @@ public class EsqlBaseParserBaseVisitor extends AbstractParseTreeVisitor im * {@link #visitChildren} on {@code ctx}.

    */ @Override public T visitFunctionExpression(EsqlBaseParser.FunctionExpressionContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitFunctionName(EsqlBaseParser.FunctionNameContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java index 1747ff001e162..c6dcaca736e1f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java @@ -313,6 +313,16 @@ public interface EsqlBaseParserListener extends ParseTreeListener { * @param ctx the parse tree */ void exitFunctionExpression(EsqlBaseParser.FunctionExpressionContext ctx); + /** + * Enter a parse tree produced by {@link EsqlBaseParser#functionName}. + * @param ctx the parse tree + */ + void enterFunctionName(EsqlBaseParser.FunctionNameContext ctx); + /** + * Exit a parse tree produced by {@link EsqlBaseParser#functionName}. + * @param ctx the parse tree + */ + void exitFunctionName(EsqlBaseParser.FunctionNameContext ctx); /** * Enter a parse tree produced by the {@code toDataType} * labeled alternative in {@link EsqlBaseParser#dataType}. diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java index b7411d0f99c09..310d3dc76dd6d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java @@ -193,6 +193,12 @@ public interface EsqlBaseParserVisitor extends ParseTreeVisitor { * @return the visitor result */ T visitFunctionExpression(EsqlBaseParser.FunctionExpressionContext ctx); + /** + * Visit a parse tree produced by {@link EsqlBaseParser#functionName}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitFunctionName(EsqlBaseParser.FunctionNameContext ctx); /** * Visit a parse tree produced by the {@code toDataType} * labeled alternative in {@link EsqlBaseParser#dataType}. diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlParser.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlParser.java index 5696ccea188b1..620a25e0170ea 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlParser.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlParser.java @@ -98,10 +98,8 @@ private class PostProcessor extends EsqlBaseParserBaseListener { @Override public void exitFunctionExpression(EsqlBaseParser.FunctionExpressionContext ctx) { // TODO remove this at some point - EsqlBaseParser.IdentifierOrParameterContext identifierOrParameter = ctx.identifierOrParameter(); - EsqlBaseParser.IdentifierContext idCtx = identifierOrParameter.identifier(); - String functionName = idCtx != null ? idCtx.getText() : identifierOrParameter.parameter().getText(); - if ("is_null".equalsIgnoreCase(functionName)) { + EsqlBaseParser.FunctionNameContext identifier = ctx.functionName(); + if (identifier.getText().equalsIgnoreCase("is_null")) { throw new ParsingException( source(ctx), "is_null function is not supported anymore, please use 'is null'/'is not null' predicates instead" diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java index 50cf4bc998679..39f1758b78733 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java @@ -593,13 +593,7 @@ public UnresolvedAttribute visitDereference(EsqlBaseParser.DereferenceContext ct @Override public Expression visitFunctionExpression(EsqlBaseParser.FunctionExpressionContext ctx) { - EsqlBaseParser.IdentifierOrParameterContext identifierOrParameter = ctx.identifierOrParameter(); - String name; - if (identifierOrParameter.identifier() != null) { - name = visitIdentifier(identifierOrParameter.identifier()); - } else { - name = unresolvedAttributeNameInParam(identifierOrParameter.parameter(), expression(identifierOrParameter.parameter())); - } + String name = visitFunctionName(ctx.functionName()); List args = expressions(ctx.booleanExpression()); if ("is_null".equals(EsqlFunctionRegistry.normalizeName(name))) { throw new ParsingException( @@ -616,6 +610,23 @@ public Expression visitFunctionExpression(EsqlBaseParser.FunctionExpressionConte return new UnresolvedFunction(source(ctx), name, FunctionResolutionStrategy.DEFAULT, args); } + @Override + public String visitFunctionName(EsqlBaseParser.FunctionNameContext ctx) { + if (ctx.DEV_MATCH() != null) { + return ctx.DEV_MATCH().getText(); + } + return visitIdentifierOrParameter(ctx.identifierOrParameter()); + } + + @Override + public String visitIdentifierOrParameter(EsqlBaseParser.IdentifierOrParameterContext ctx) { + if (ctx.identifier() != null) { + return visitIdentifier(ctx.identifier()); + } + + return unresolvedAttributeNameInParam(ctx.parameter(), expression(ctx.parameter())); + } + @Override public Expression visitInlineCast(EsqlBaseParser.InlineCastContext ctx) { Source source = source(ctx); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java index 18aa2628fdc7c..2c8604a7c4a80 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java @@ -24,15 +24,18 @@ import org.elasticsearch.xpack.esql.core.planner.ExpressionTranslators; import org.elasticsearch.xpack.esql.core.planner.TranslatorHandler; import org.elasticsearch.xpack.esql.core.querydsl.query.MatchAll; +import org.elasticsearch.xpack.esql.core.querydsl.query.MatchQuery; import org.elasticsearch.xpack.esql.core.querydsl.query.NotQuery; import org.elasticsearch.xpack.esql.core.querydsl.query.Query; +import org.elasticsearch.xpack.esql.core.querydsl.query.QueryStringQuery; import org.elasticsearch.xpack.esql.core.querydsl.query.RangeQuery; import org.elasticsearch.xpack.esql.core.querydsl.query.TermQuery; import org.elasticsearch.xpack.esql.core.querydsl.query.TermsQuery; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.Check; -import org.elasticsearch.xpack.esql.expression.function.fulltext.FullTextFunction; +import org.elasticsearch.xpack.esql.expression.function.fulltext.Match; +import org.elasticsearch.xpack.esql.expression.function.fulltext.QueryString; import org.elasticsearch.xpack.esql.expression.function.scalar.ip.CIDRMatch; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesFunction; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesUtils; @@ -55,6 +58,7 @@ import java.util.ArrayList; import java.util.LinkedHashSet; import java.util.List; +import java.util.Map; import java.util.Set; import static org.elasticsearch.xpack.esql.core.expression.Foldables.valueOf; @@ -85,17 +89,11 @@ public final class EsqlExpressionTranslators { new ExpressionTranslators.StringQueries(), new ExpressionTranslators.Matches(), new ExpressionTranslators.MultiMatches(), - new FullTextFunctions(), + new MatchFunctionTranslator(), + new QueryStringFunctionTranslator(), new Scalars() ); - public static class FullTextFunctions extends ExpressionTranslator { - @Override - protected Query asQuery(FullTextFunction fullTextFunction, TranslatorHandler handler) { - return fullTextFunction.asQuery(); - } - } - public static Query toQuery(Expression e, TranslatorHandler handler) { Query translation = null; for (ExpressionTranslator translator : QUERY_TRANSLATORS) { @@ -528,4 +526,18 @@ private static RangeQuery translate(Range r, TranslatorHandler handler) { ); } } + + public static class MatchFunctionTranslator extends ExpressionTranslator { + @Override + protected Query asQuery(Match match, TranslatorHandler handler) { + return new MatchQuery(match.source(), ((FieldAttribute) match.field()).name(), match.queryAsText()); + } + } + + public static class QueryStringFunctionTranslator extends ExpressionTranslator { + @Override + protected Query asQuery(QueryString queryString, TranslatorHandler handler) { + return new QueryStringQuery(queryString.source(), queryString.queryAsText(), Map.of(), null); + } + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index 965358c0c3f8c..f881c0e1a9bba 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -252,6 +252,10 @@ public final void test() throws Throwable { "can't use QSTR function in csv tests", testCase.requiredCapabilities.contains(EsqlCapabilities.Cap.QSTR_FUNCTION.capabilityName()) ); + assumeFalse( + "can't use MATCH function in csv tests", + testCase.requiredCapabilities.contains(EsqlCapabilities.Cap.MATCH_FUNCTION.capabilityName()) + ); if (Build.current().isSnapshot()) { assertThat( diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java index 83b3e88c57964..612f2870fe8bc 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.esql.analysis; import org.elasticsearch.Build; +import org.elasticsearch.common.logging.LoggerMessageFormat; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.action.EsqlCapabilities; @@ -1086,6 +1087,15 @@ public void testMatchFilter() throws Exception { ); } + public void testMatchFunctionNotAllowedAfterCommands() throws Exception { + assumeTrue("skipping because MATCH is not enabled", EsqlCapabilities.Cap.MATCH_FUNCTION.isEnabled()); + + assertEquals( + "1:24: [MATCH] function cannot be used after LIMIT", + error("from test | limit 10 | where match(first_name, \"Anna\")") + ); + } + public void testQueryStringFunctionsNotAllowedAfterCommands() throws Exception { assumeTrue("skipping because QSTR is not enabled", EsqlCapabilities.Cap.QSTR_FUNCTION.isEnabled()); @@ -1146,26 +1156,167 @@ public void testQueryStringFunctionsNotAllowedAfterCommands() throws Exception { ); } - public void testQueryStringFunctionsOnlyAllowedInWhere() throws Exception { + public void testQueryStringFunctionOnlyAllowedInWhere() throws Exception { assumeTrue("skipping because QSTR is not enabled", EsqlCapabilities.Cap.QSTR_FUNCTION.isEnabled()); - assertEquals("1:22: [QSTR] function is only supported in WHERE commands", error("from test | eval y = qstr(\"Anna\")")); - assertEquals("1:18: [QSTR] function is only supported in WHERE commands", error("from test | sort qstr(\"Connection\") asc")); - assertEquals("1:5: [QSTR] function is only supported in WHERE commands", error("row qstr(\"Connection\")")); + assertEquals("1:9: [QSTR] function is only supported in WHERE commands", error("row a = qstr(\"Anna\")")); + checkFullTextFunctionsOnlyAllowedInWhere("QSTR", "qstr(\"Anna\")"); + } + + public void testMatchFunctionOnlyAllowedInWhere() throws Exception { + assumeTrue("skipping because MATCH is not enabled", EsqlCapabilities.Cap.MATCH_FUNCTION.isEnabled()); + + checkFullTextFunctionsOnlyAllowedInWhere("MATCH", "match(first_name, \"Anna\")"); + } + + private void checkFullTextFunctionsOnlyAllowedInWhere(String functionName, String functionInvocation) throws Exception { + assertEquals( + "1:22: [" + functionName + "] function is only supported in WHERE commands", + error("from test | eval y = " + functionInvocation) + ); + assertEquals( + "1:18: [" + functionName + "] function is only supported in WHERE commands", + error("from test | sort " + functionInvocation + " asc") + ); assertEquals( - "1:23: [QSTR] function is only supported in WHERE commands", - error("from test | STATS c = qstr(\"foo\") BY languages") + "1:23: [" + functionName + "] function is only supported in WHERE commands", + error("from test | STATS c = " + functionInvocation + " BY first_name") ); } public void testQueryStringFunctionArgNotNullOrConstant() throws Exception { assumeTrue("skipping because QSTR is not enabled", EsqlCapabilities.Cap.QSTR_FUNCTION.isEnabled()); - assertEquals("1:19: argument of [QSTR] must be a constant, received [first_name]", error("from test | where qstr(first_name)")); - assertEquals("1:19: argument of [QSTR] cannot be null, received [null]", error("from test | where qstr(null)")); + assertEquals( + "1:19: argument of [qstr(first_name)] must be a constant, received [first_name]", + error("from test | where qstr(first_name)") + ); + assertEquals("1:19: argument of [qstr(null)] cannot be null, received [null]", error("from test | where qstr(null)")); // Other value types are tested in QueryStringFunctionTests } + public void testQueryStringWithDisjunctions() { + assumeTrue("skipping because QSTR is not enabled", EsqlCapabilities.Cap.QSTR_FUNCTION.isEnabled()); + + checkWithDisjunctions("QSTR", "qstr(\"first_name: Anna\")"); + } + + public void testMatchWithDisjunctions() { + assumeTrue("skipping because MATCH is not enabled", EsqlCapabilities.Cap.MATCH_FUNCTION.isEnabled()); + + checkWithDisjunctions("MATCH", "match(first_name, \"Anna\")"); + } + + private void checkWithDisjunctions(String functionName, String functionInvocation) { + assertEquals( + LoggerMessageFormat.format( + null, + "1:19: Invalid condition [{} or length(first_name) > 12]. " + "Function {} can't be used as part of an or condition", + functionInvocation, + functionName + ), + error("from test | where " + functionInvocation + " or length(first_name) > 12") + ); + assertEquals( + LoggerMessageFormat.format( + null, + "1:19: Invalid condition [({} and first_name is not null) or (length(first_name) > 12 and first_name is null)]. " + + "Function {} can't be used as part of an or condition", + functionInvocation, + functionName + ), + error( + "from test | where (" + + functionInvocation + + " and first_name is not null) or (length(first_name) > 12 and first_name is null)" + ) + ); + assertEquals( + LoggerMessageFormat.format( + null, + "1:19: Invalid condition [({} and first_name is not null) or first_name is null]. " + + "Function {} can't be used as part of an or condition", + functionInvocation, + functionName + ), + error("from test | where (" + functionInvocation + " and first_name is not null) or first_name is null") + ); + } + + public void testQueryStringFunctionWithNonBooleanFunctions() { + assumeTrue("skipping because QSTR is not enabled", EsqlCapabilities.Cap.QSTR_FUNCTION.isEnabled()); + + checkFullTextFunctionsWithNonBooleanFunctions("QSTR", "qstr(\"first_name: Anna\")"); + } + + public void testMatchFunctionWithNonBooleanFunctions() { + assumeTrue("skipping because MATCH is not enabled", EsqlCapabilities.Cap.MATCH_FUNCTION.isEnabled()); + + checkFullTextFunctionsWithNonBooleanFunctions("MATCH", "match(first_name, \"Anna\")"); + } + + private void checkFullTextFunctionsWithNonBooleanFunctions(String functionName, String functionInvocation) { + assertEquals( + "1:19: Invalid condition [" + functionInvocation + " is not null]. Function " + functionName + " can't be used with ISNOTNULL", + error("from test | where " + functionInvocation + " is not null") + ); + assertEquals( + "1:19: Invalid condition [" + functionInvocation + " is null]. Function " + functionName + " can't be used with ISNULL", + error("from test | where " + functionInvocation + " is null") + ); + assertEquals( + "1:19: Invalid condition [" + + functionInvocation + + " in (\"hello\", \"world\")]. Function " + + functionName + + " can't be used with IN", + error("from test | where " + functionInvocation + " in (\"hello\", \"world\")") + ); + } + + public void testMatchFunctionArgNotConstant() throws Exception { + assumeTrue("skipping because MATCH is not enabled", EsqlCapabilities.Cap.MATCH_FUNCTION.isEnabled()); + + assertEquals( + "1:19: second argument of [match(first_name, first_name)] must be a constant, received [first_name]", + error("from test | where match(first_name, first_name)") + ); + assertEquals( + "1:59: second argument of [match(first_name, query)] must be a constant, received [query]", + error("from test | eval query = concat(\"first\", \" name\") | where match(first_name, query)") + ); + // Other value types are tested in QueryStringFunctionTests + } + + // These should pass eventually once we lift some restrictions on match function + public void testMatchFunctionCurrentlyUnsupportedBehaviour() throws Exception { + assumeTrue("skipping because MATCH is not enabled", EsqlCapabilities.Cap.MATCH_FUNCTION.isEnabled()); + + assertEquals( + "1:68: Unknown column [first_name]", + error("from test | stats max_salary = max(salary) by emp_no | where match(first_name, \"Anna\")") + ); + } + + public void testMatchFunctionNullArgs() throws Exception { + assumeTrue("skipping because MATCH is not enabled", EsqlCapabilities.Cap.MATCH_FUNCTION.isEnabled()); + + assertEquals( + "1:19: first argument of [match(null, \"query\")] cannot be null, received [null]", + error("from test | where match(null, \"query\")") + ); + assertEquals( + "1:19: second argument of [match(first_name, null)] cannot be null, received [null]", + error("from test | where match(first_name, null)") + ); + } + + public void testMatchFunctionTargetsExistingField() throws Exception { + assumeTrue("skipping because MATCH is not enabled", EsqlCapabilities.Cap.MATCH_FUNCTION.isEnabled()); + + assertEquals("1:39: Unknown column [first_name]", error("from test | keep emp_no | where match(first_name, \"Anna\")")); + } + public void testCoalesceWithMixedNumericTypes() { assertEquals( "1:22: second argument of [coalesce(languages, height)] must be [integer], found value [height] type [double]", diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchTests.java new file mode 100644 index 0000000000000..f04e9bd495a49 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchTests.java @@ -0,0 +1,107 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.fulltext; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.xpack.core.security.authc.support.mapper.expressiondsl.FieldExpression; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.FunctionName; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; +import org.junit.BeforeClass; + +import java.util.Arrays; +import java.util.LinkedList; +import java.util.List; +import java.util.Set; +import java.util.function.Supplier; + +import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.MATCH_FUNCTION; +import static org.hamcrest.Matchers.equalTo; + +@FunctionName("match") +public class MatchTests extends AbstractFunctionTestCase { + + @BeforeClass + public static void checkFunctionEnabled() { + assumeTrue("MATCH function should be enabled ", MATCH_FUNCTION.isEnabled()); + } + + public MatchTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + Set supported = Set.of(DataType.KEYWORD, DataType.TEXT); + List> supportedPerPosition = List.of(supported, supported); + List suppliers = new LinkedList<>(); + for (DataType fieldType : validStringDataTypes()) { + for (DataType queryType : validStringDataTypes()) { + suppliers.add( + new TestCaseSupplier( + "<" + fieldType + "-ES field, " + queryType + ">", + List.of(fieldType, queryType), + () -> testCase(fieldType, randomIdentifier(), queryType, randomAlphaOfLengthBetween(1, 10), equalTo(true)) + ) + ); + suppliers.add( + new TestCaseSupplier( + "<" + fieldType + "-non ES field, " + queryType + ">", + List.of(fieldType, queryType), + typeErrorSupplier(true, supportedPerPosition, List.of(fieldType, queryType), MatchTests::matchTypeErrorSupplier) + ) + ); + } + } + List errorsSuppliers = errorsForCasesWithoutExamples(suppliers, (v, p) -> "string"); + // Don't test null, as it is not allowed but the expected message is not a type error - so we check it separately in VerifierTests + return parameterSuppliersFromTypedData(errorsSuppliers.stream().filter(s -> s.types().contains(DataType.NULL) == false).toList()); + } + + private static String matchTypeErrorSupplier(boolean includeOrdinal, List> validPerPosition, List types) { + return "[] cannot operate on [" + types.getFirst().typeName() + "], which is not a field from an index mapping"; + } + + private static List validStringDataTypes() { + return Arrays.stream(DataType.values()).filter(DataType::isString).toList(); + } + + private static TestCaseSupplier.TestCase testCase( + DataType fieldType, + String field, + DataType queryType, + String query, + Matcher matcher + ) { + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData( + new FieldExpression(field, List.of(new FieldExpression.FieldValue(field))), + fieldType, + "field" + ), + new TestCaseSupplier.TypedData(new BytesRef(query), queryType, "query") + ), + "EndsWithEvaluator[str=Attribute[channel=0], suffix=Attribute[channel=1]]", + DataType.BOOLEAN, + matcher + ); + } + + @Override + protected Expression build(Source source, List args) { + return new Match(source, args.get(0), args.get(1)); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/QueryStringFunctionTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/QueryStringTests.java similarity index 92% rename from x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/QueryStringFunctionTests.java rename to x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/QueryStringTests.java index 37e16a2499cd9..8b0e4f10b8d54 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/QueryStringFunctionTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/QueryStringTests.java @@ -29,14 +29,14 @@ import static org.hamcrest.Matchers.equalTo; @FunctionName("qstr") -public class QueryStringFunctionTests extends AbstractFunctionTestCase { +public class QueryStringTests extends AbstractFunctionTestCase { @BeforeClass public static void checkFunctionEnabled() { assumeTrue("QSTR capability should be enabled ", QSTR_FUNCTION.isEnabled()); } - public QueryStringFunctionTests(@Name("TestCase") Supplier testCaseSupplier) { + public QueryStringTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } @@ -77,6 +77,6 @@ private static TestCaseSupplier.TestCase testCase(DataType strType, String str, @Override protected Expression build(Source source, List args) { - return new QueryStringFunction(source, args.get(0)); + return new QueryString(source, args.get(0)); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java index c2779b7dbc46d..3dd0828b82eed 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java @@ -50,7 +50,6 @@ import org.elasticsearch.xpack.esql.plan.physical.EvalExec; import org.elasticsearch.xpack.esql.plan.physical.ExchangeExec; import org.elasticsearch.xpack.esql.plan.physical.FieldExtractExec; -import org.elasticsearch.xpack.esql.plan.physical.FilterExec; import org.elasticsearch.xpack.esql.plan.physical.LimitExec; import org.elasticsearch.xpack.esql.plan.physical.LocalSourceExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; @@ -438,22 +437,24 @@ public void testQueryStringFunctionConjunctionWhereOperands() { /** * Expecting * LimitExec[1000[INTEGER]] - * \_ExchangeExec[[_meta_field{f}#9, emp_no{f}#3, first_name{f}#4, gender{f}#5, job{f}#10, job.raw{f}#11, languages{f}#6, last_n - * ame{f}#7, long_noidx{f}#12, salary{f}#8],false] - * \_ProjectExec[[_meta_field{f}#9, emp_no{f}#3, first_name{f}#4, gender{f}#5, job{f}#10, job.raw{f}#11, languages{f}#6, last_n - * ame{f}#7, long_noidx{f}#12, salary{f}#8]] - * \_FieldExtractExec[_meta_field{f}#9, emp_no{f}#3, first_name{f}#4, gen] - * \_EsQueryExec[test], indexMode[standard], query[{"bool":{"should":[{"query_string":{"query":"last_name: Smith","fields":[]}}, - * {"esql_single_value":{"field":"emp_no","next":{"range":{"emp_no":{"gt":10010,"boost":1.0}}},"source":"emp_no > 10010@2:37"}}], - * "boost":1.0}}][_doc{f}#13], limit[1000], sort[] estimatedRowSize[324] + * \_ExchangeExec[[!alias_integer, boolean{f}#4, byte{f}#5, constant_keyword-foo{f}#6, date{f}#7, double{f}#8, float{f}#9, half_ + * float{f}#10, integer{f}#12, ip{f}#13, keyword{f}#14, long{f}#15, scaled_float{f}#11, short{f}#17, text{f}#18, unsigned_long{f}#16], + * false] + * \_ProjectExec[[!alias_integer, boolean{f}#4, byte{f}#5, constant_keyword-foo{f}#6, date{f}#7, double{f}#8, float{f}#9, half_ + * float{f}#10, integer{f}#12, ip{f}#13, keyword{f}#14, long{f}#15, scaled_float{f}#11, short{f}#17, text{f}#18, unsigned_long{f}#16] + * \_FieldExtractExec[!alias_integer, boolean{f}#4, byte{f}#5, constant_k..] + * \_EsQueryExec[test], indexMode[standard], query[{"bool":{"must":[{"query_string":{"query":"last_name: Smith","fields":[]}},{ + * "esql_single_value":{"field":"ip","next":{"terms":{"ip":["127.0.0.1/32"],"boost":1.0}}, + * "source":"cidr_match(ip, \"127.0.0.1/32\")@2:38"}}],"boost":1.0}}][_doc{f}#21], limit[1000], sort[] estimatedRowSize[354] */ - public void testQueryStringFunctionDisjunctionWhereClauses() { + public void testQueryStringFunctionWithFunctionsPushedToLucene() { assumeTrue("skipping because QSTR_FUNCTION is not enabled", EsqlCapabilities.Cap.QSTR_FUNCTION.isEnabled()); String queryText = """ from test - | where qstr("last_name: Smith") or emp_no > 10010 + | where qstr("last_name: Smith") and cidr_match(ip, "127.0.0.1/32") """; - var plan = plannerOptimizer.plan(queryText, IS_SV_STATS); + var analyzer = makeAnalyzer("mapping-all-types.json", new EnrichResolution()); + var plan = plannerOptimizer.plan(queryText, IS_SV_STATS, analyzer); var limit = as(plan, LimitExec.class); var exchange = as(limit.child(), ExchangeExec.class); @@ -462,34 +463,34 @@ public void testQueryStringFunctionDisjunctionWhereClauses() { var query = as(field.child(), EsQueryExec.class); assertThat(query.limit().fold(), is(1000)); - Source filterSource = new Source(2, 36, "emp_no > 10000"); - var range = wrapWithSingleQuery(queryText, QueryBuilders.rangeQuery("emp_no").gt(10010), "emp_no", filterSource); + Source filterSource = new Source(2, 37, "cidr_match(ip, \"127.0.0.1/32\")"); + var terms = wrapWithSingleQuery(queryText, QueryBuilders.termsQuery("ip", "127.0.0.1/32"), "ip", filterSource); var queryString = QueryBuilders.queryStringQuery("last_name: Smith"); - var expected = QueryBuilders.boolQuery().should(queryString).should(range); + var expected = QueryBuilders.boolQuery().must(queryString).must(terms); assertThat(query.query().toString(), is(expected.toString())); } /** * Expecting * LimitExec[1000[INTEGER]] - * \_ExchangeExec[[!alias_integer, boolean{f}#4, byte{f}#5, constant_keyword-foo{f}#6, date{f}#7, double{f}#8, float{f}#9, half_ - * float{f}#10, integer{f}#12, ip{f}#13, keyword{f}#14, long{f}#15, scaled_float{f}#11, short{f}#17, text{f}#18, unsigned_long{f}#16], - * false] - * \_ProjectExec[[!alias_integer, boolean{f}#4, byte{f}#5, constant_keyword-foo{f}#6, date{f}#7, double{f}#8, float{f}#9, half_ - * float{f}#10, integer{f}#12, ip{f}#13, keyword{f}#14, long{f}#15, scaled_float{f}#11, short{f}#17, text{f}#18, unsigned_long{f}#16] - * \_FieldExtractExec[!alias_integer, boolean{f}#4, byte{f}#5, constant_k..] - * \_EsQueryExec[test], indexMode[standard], query[{"bool":{"must":[{"query_string":{"query":"last_name: Smith","fields":[]}},{ - * "esql_single_value":{"field":"ip","next":{"terms":{"ip":["127.0.0.1/32"],"boost":1.0}}, - * "source":"cidr_match(ip, \"127.0.0.1/32\")@2:38"}}],"boost":1.0}}][_doc{f}#21], limit[1000], sort[] estimatedRowSize[354] + * \_ExchangeExec[[_meta_field{f}#1163, emp_no{f}#1157, first_name{f}#1158, gender{f}#1159, job{f}#1164, job.raw{f}#1165, langua + * ges{f}#1160, last_name{f}#1161, long_noidx{f}#1166, salary{f}#1162],false] + * \_ProjectExec[[_meta_field{f}#1163, emp_no{f}#1157, first_name{f}#1158, gender{f}#1159, job{f}#1164, job.raw{f}#1165, langua + * ges{f}#1160, last_name{f}#1161, long_noidx{f}#1166, salary{f}#1162]] + * \_FieldExtractExec[_meta_field{f}#1163, emp_no{f}#1157, first_name{f}#] + * \_EsQueryExec[test], indexMode[standard], + * query[{"bool":{"must":[{"query_string":{"query":"last_name: Smith","fields":[]}}, + * {"esql_single_value":{"field":"emp_no","next":{"range":{"emp_no":{"gt":10010,"boost":1.0}}},"source":"emp_no > 10010@3:9"}}], + * "boost":1.0}}][_doc{f}#1167], limit[1000], sort[] estimatedRowSize[324] */ - public void testQueryStringFunctionWithFunctionsPushedToLucene() { + public void testQueryStringFunctionMultipleWhereClauses() { assumeTrue("skipping because QSTR_FUNCTION is not enabled", EsqlCapabilities.Cap.QSTR_FUNCTION.isEnabled()); String queryText = """ from test - | where qstr("last_name: Smith") and cidr_match(ip, "127.0.0.1/32") + | where qstr("last_name: Smith") + | where emp_no > 10010 """; - var analyzer = makeAnalyzer("mapping-all-types.json", new EnrichResolution()); - var plan = plannerOptimizer.plan(queryText, IS_SV_STATS, analyzer); + var plan = plannerOptimizer.plan(queryText, IS_SV_STATS); var limit = as(plan, LimitExec.class); var exchange = as(limit.child(), ExchangeExec.class); @@ -498,45 +499,140 @@ public void testQueryStringFunctionWithFunctionsPushedToLucene() { var query = as(field.child(), EsQueryExec.class); assertThat(query.limit().fold(), is(1000)); - Source filterSource = new Source(2, 37, "cidr_match(ip, \"127.0.0.1/32\")"); - var terms = wrapWithSingleQuery(queryText, QueryBuilders.termsQuery("ip", "127.0.0.1/32"), "ip", filterSource); + Source filterSource = new Source(3, 8, "emp_no > 10000"); + var range = wrapWithSingleQuery(queryText, QueryBuilders.rangeQuery("emp_no").gt(10010), "emp_no", filterSource); var queryString = QueryBuilders.queryStringQuery("last_name: Smith"); - var expected = QueryBuilders.boolQuery().must(queryString).must(terms); + var expected = QueryBuilders.boolQuery().must(queryString).must(range); assertThat(query.query().toString(), is(expected.toString())); } /** * Expecting - *LimitExec[1000[INTEGER]] - * \_ExchangeExec[[_meta_field{f}#9, emp_no{f}#3, first_name{f}#4, gender{f}#5, job{f}#10, job.raw{f}#11, languages{f}#6, last_n - * ame{f}#7, long_noidx{f}#12, salary{f}#8],false] - * \_ProjectExec[[_meta_field{f}#9, emp_no{f}#3, first_name{f}#4, gender{f}#5, job{f}#10, job.raw{f}#11, languages{f}#6, last_n - * ame{f}#7, long_noidx{f}#12, salary{f}#8]] - * \_FieldExtractExec[_meta_field{f}#9, emp_no{f}#3, gender{f}#5, job{f}#] - * \_LimitExec[1000[INTEGER]] - * \_FilterExec[LENGTH(first_name{f}#4) > 10[INTEGER]] - * \_FieldExtractExec[first_name{f}#4] - * \_EsQueryExec[test], indexMode[standard], - * query[{"query_string":{"query":"last_name: Smith","fields":[]}}][_doc{f}#13], limit[], sort[] estimatedRowSize[324] + * LimitExec[1000[INTEGER]] + * \_ExchangeExec[[_meta_field{f}#8, emp_no{f}#2, first_name{f}#3, gender{f}#4, job{f}#9, job.raw{f}#10, languages{f}#5, last_na + * me{f}#6, long_noidx{f}#11, salary{f}#7],false] + * \_ProjectExec[[_meta_field{f}#8, emp_no{f}#2, first_name{f}#3, gender{f}#4, job{f}#9, job.raw{f}#10, languages{f}#5, last_na + * me{f}#6, long_noidx{f}#11, salary{f}#7]] + * \_FieldExtractExec[_meta_field{f}#8, emp_no{f}#2, first_name{f}#3, gen] + * \_EsQueryExec[test], indexMode[standard], query[{"bool": + * {"must":[{"query_string":{"query":"last_name: Smith","fields":[]}}, + * {"query_string":{"query":"emp_no: [10010 TO *]","fields":[]}}],"boost":1.0}}] */ - public void testQueryStringFunctionWithFunctionNotPushedDown() { + public void testQueryStringFunctionMultipleQstrClauses() { assumeTrue("skipping because QSTR_FUNCTION is not enabled", EsqlCapabilities.Cap.QSTR_FUNCTION.isEnabled()); String queryText = """ from test - | where qstr("last_name: Smith") and length(first_name) > 10 + | where qstr("last_name: Smith") and qstr("emp_no: [10010 TO *]") """; var plan = plannerOptimizer.plan(queryText, IS_SV_STATS); - var firstLimit = as(plan, LimitExec.class); - var exchange = as(firstLimit.child(), ExchangeExec.class); + var limit = as(plan, LimitExec.class); + var exchange = as(limit.child(), ExchangeExec.class); var project = as(exchange.child(), ProjectExec.class); var field = as(project.child(), FieldExtractExec.class); - var secondLimit = as(field.child(), LimitExec.class); - var filter = as(secondLimit.child(), FilterExec.class); - var fieldExtract = as(filter.child(), FieldExtractExec.class); - var query = as(fieldExtract.child(), EsQueryExec.class); + var query = as(field.child(), EsQueryExec.class); + assertThat(query.limit().fold(), is(1000)); - var expected = QueryBuilders.queryStringQuery("last_name: Smith"); + var queryStringLeft = QueryBuilders.queryStringQuery("last_name: Smith"); + var queryStringRight = QueryBuilders.queryStringQuery("emp_no: [10010 TO *]"); + var expected = QueryBuilders.boolQuery().must(queryStringLeft).must(queryStringRight); + assertThat(query.query().toString(), is(expected.toString())); + } + + /** + * Expecting + * LimitExec[1000[INTEGER]] + * \_ExchangeExec[[_meta_field{f}#8, emp_no{f}#2, first_name{f}#3, gender{f}#4, job{f}#9, job.raw{f}#10, languages{f}#5, last_na + * me{f}#6, long_noidx{f}#11, salary{f}#7],false] + * \_ProjectExec[[_meta_field{f}#8, emp_no{f}#2, first_name{f}#3, gender{f}#4, job{f}#9, job.raw{f}#10, languages{f}#5, last_na + * me{f}#6, long_noidx{f}#11, salary{f}#7]] + * \_FieldExtractExec[_meta_field{f}#8, emp_no{f}#2, first_name{f}#3, gen] + * \_EsQueryExec[test], indexMode[standard], query[{"match":{"last_name":{"query":"Smith"}}}] + */ + public void testMatchFunction() { + assumeTrue("skipping because MATCH function is not enabled", EsqlCapabilities.Cap.MATCH_FUNCTION.isEnabled()); + var plan = plannerOptimizer.plan(""" + from test + | where match(last_name, "Smith") + """, IS_SV_STATS); + + var limit = as(plan, LimitExec.class); + var exchange = as(limit.child(), ExchangeExec.class); + var project = as(exchange.child(), ProjectExec.class); + var field = as(project.child(), FieldExtractExec.class); + var query = as(field.child(), EsQueryExec.class); + assertThat(query.limit().fold(), is(1000)); + var expected = QueryBuilders.matchQuery("last_name", "Smith"); + assertThat(query.query().toString(), is(expected.toString())); + } + + /** + * Expecting + * LimitExec[1000[INTEGER]] + * \_ExchangeExec[[_meta_field{f}#1419, emp_no{f}#1413, first_name{f}#1414, gender{f}#1415, job{f}#1420, job.raw{f}#1421, langua + * ges{f}#1416, last_name{f}#1417, long_noidx{f}#1422, salary{f}#1418],false] + * \_ProjectExec[[_meta_field{f}#1419, emp_no{f}#1413, first_name{f}#1414, gender{f}#1415, job{f}#1420, job.raw{f}#1421, langua + * ges{f}#1416, last_name{f}#1417, long_noidx{f}#1422, salary{f}#1418]] + * \_FieldExtractExec[_meta_field{f}#1419, emp_no{f}#1413, first_name{f}#] + * \EsQueryExec[test], indexMode[standard], query[{"bool":{"must":[{"match":{"last_name":{"query":"Smith"}}}, + * {"esql_single_value":{"field":"emp_no","next":{"range":{"emp_no":{"gt":10010,"boost":1.0}}}, + * "source":"emp_no > 10010@2:39"}}],"boost":1.0}}][_doc{f}#14], limit[1000], sort[] estimatedRowSize[324] + */ + public void testMatchFunctionConjunctionWhereOperands() { + assumeTrue("skipping because MATCH function is not enabled", EsqlCapabilities.Cap.MATCH_FUNCTION.isEnabled()); + String queryText = """ + from test + | where match(last_name, "Smith") and emp_no > 10010 + """; + var plan = plannerOptimizer.plan(queryText, IS_SV_STATS); + + var limit = as(plan, LimitExec.class); + var exchange = as(limit.child(), ExchangeExec.class); + var project = as(exchange.child(), ProjectExec.class); + var field = as(project.child(), FieldExtractExec.class); + var query = as(field.child(), EsQueryExec.class); + assertThat(query.limit().fold(), is(1000)); + + Source filterSource = new Source(2, 38, "emp_no > 10000"); + var range = wrapWithSingleQuery(queryText, QueryBuilders.rangeQuery("emp_no").gt(10010), "emp_no", filterSource); + var queryString = QueryBuilders.matchQuery("last_name", "Smith"); + var expected = QueryBuilders.boolQuery().must(queryString).must(range); + assertThat(query.query().toString(), is(expected.toString())); + } + + /** + * Expecting + * LimitExec[1000[INTEGER]] + * \_ExchangeExec[[!alias_integer, boolean{f}#4, byte{f}#5, constant_keyword-foo{f}#6, date{f}#7, double{f}#8, float{f}#9, half_ + * float{f}#10, integer{f}#12, ip{f}#13, keyword{f}#14, long{f}#15, scaled_float{f}#11, short{f}#17, text{f}#18, unsigned_long{f}#16], + * false] + * \_ProjectExec[[!alias_integer, boolean{f}#4, byte{f}#5, constant_keyword-foo{f}#6, date{f}#7, double{f}#8, float{f}#9, half_ + * float{f}#10, integer{f}#12, ip{f}#13, keyword{f}#14, long{f}#15, scaled_float{f}#11, short{f}#17, text{f}#18, unsigned_long{f}#16] + * \_FieldExtractExec[!alias_integer, boolean{f}#4, byte{f}#5, constant_k..] + * \_EsQueryExec[test], indexMode[standard], query[{"bool":{"must":[{"match":{"text":{"query":"beta"}}}, + * {"esql_single_value":{"field":"ip","next":{"terms":{"ip":["127.0.0.1/32"],"boost":1.0}}, + * "source":"cidr_match(ip, \"127.0.0.1/32\")@2:33"}}],"boost":1.0}}][_doc{f}#22], limit[1000], sort[] estimatedRowSize[354] + */ + public void testMatchFunctionWithFunctionsPushedToLucene() { + assumeTrue("skipping because MATCH function is not enabled", EsqlCapabilities.Cap.MATCH_FUNCTION.isEnabled()); + String queryText = """ + from test + | where match(text, "beta") and cidr_match(ip, "127.0.0.1/32") + """; + var analyzer = makeAnalyzer("mapping-all-types.json", new EnrichResolution()); + var plan = plannerOptimizer.plan(queryText, IS_SV_STATS, analyzer); + + var limit = as(plan, LimitExec.class); + var exchange = as(limit.child(), ExchangeExec.class); + var project = as(exchange.child(), ProjectExec.class); + var field = as(project.child(), FieldExtractExec.class); + var query = as(field.child(), EsQueryExec.class); + assertThat(query.limit().fold(), is(1000)); + + Source filterSource = new Source(2, 32, "cidr_match(ip, \"127.0.0.1/32\")"); + var terms = wrapWithSingleQuery(queryText, QueryBuilders.termsQuery("ip", "127.0.0.1/32"), "ip", filterSource); + var queryString = QueryBuilders.matchQuery("text", "beta"); + var expected = QueryBuilders.boolQuery().must(queryString).must(terms); assertThat(query.query().toString(), is(expected.toString())); } @@ -548,16 +644,15 @@ public void testQueryStringFunctionWithFunctionNotPushedDown() { * \_ProjectExec[[_meta_field{f}#1163, emp_no{f}#1157, first_name{f}#1158, gender{f}#1159, job{f}#1164, job.raw{f}#1165, langua * ges{f}#1160, last_name{f}#1161, long_noidx{f}#1166, salary{f}#1162]] * \_FieldExtractExec[_meta_field{f}#1163, emp_no{f}#1157, first_name{f}#] - * \_EsQueryExec[test], indexMode[standard], - * query[{"bool":{"must":[{"query_string":{"query":"last_name: Smith","fields":[]}}, - * {"esql_single_value":{"field":"emp_no","next":{"range":{"emp_no":{"gt":10010,"boost":1.0}}},"source":"emp_no > 10010@3:9"}}], - * "boost":1.0}}][_doc{f}#1167], limit[1000], sort[] estimatedRowSize[324] + * \_EsQueryExec[test], indexMode[standard], query[{"bool":{"must":[{"match":{"last_name":{"query":"Smith"}}}, + * {"esql_single_value":{"field":"emp_no","next":{"range":{"emp_no":{"gt":10010,"boost":1.0}}}, + * "source":"emp_no > 10010@3:9"}}],"boost":1.0}}][_doc{f}#14], limit[1000], sort[] estimatedRowSize[324] */ - public void testQueryStringFunctionMultipleWhereClauses() { - assumeTrue("skipping because QSTR_FUNCTION is not enabled", EsqlCapabilities.Cap.QSTR_FUNCTION.isEnabled()); + public void testMatchFunctionMultipleWhereClauses() { + assumeTrue("skipping because MATCH function is not enabled", EsqlCapabilities.Cap.MATCH_FUNCTION.isEnabled()); String queryText = """ from test - | where qstr("last_name: Smith") + | where match(last_name, "Smith") | where emp_no > 10010 """; var plan = plannerOptimizer.plan(queryText, IS_SV_STATS); @@ -571,7 +666,7 @@ public void testQueryStringFunctionMultipleWhereClauses() { Source filterSource = new Source(3, 8, "emp_no > 10000"); var range = wrapWithSingleQuery(queryText, QueryBuilders.rangeQuery("emp_no").gt(10010), "emp_no", filterSource); - var queryString = QueryBuilders.queryStringQuery("last_name: Smith"); + var queryString = QueryBuilders.matchQuery("last_name", "Smith"); var expected = QueryBuilders.boolQuery().must(queryString).must(range); assertThat(query.query().toString(), is(expected.toString())); } @@ -584,15 +679,14 @@ public void testQueryStringFunctionMultipleWhereClauses() { * \_ProjectExec[[_meta_field{f}#8, emp_no{f}#2, first_name{f}#3, gender{f}#4, job{f}#9, job.raw{f}#10, languages{f}#5, last_na * me{f}#6, long_noidx{f}#11, salary{f}#7]] * \_FieldExtractExec[_meta_field{f}#8, emp_no{f}#2, first_name{f}#3, gen] - * \_EsQueryExec[test], indexMode[standard], query[{"bool": - * {"must":[{"query_string":{"query":"last_name: Smith","fields":[]}}, - * {"query_string":{"query":"emp_no: [10010 TO *]","fields":[]}}],"boost":1.0}}] + * \_EsQueryExec[test], indexMode[standard], query[{"bool":{"must":[{"match":{"last_name":{"query":"Smith"}}}, + * {"match":{"first_name":{"query":"John"}}}],"boost":1.0}}][_doc{f}#14], limit[1000], sort[] estimatedRowSize[324] */ - public void testQueryStringFunctionMultipleQstrClauses() { - assumeTrue("skipping because QSTR_FUNCTION is not enabled", EsqlCapabilities.Cap.QSTR_FUNCTION.isEnabled()); + public void testMatchFunctionMultipleQstrClauses() { + assumeTrue("skipping because MATCH function is not enabled", EsqlCapabilities.Cap.MATCH_FUNCTION.isEnabled()); String queryText = """ from test - | where qstr("last_name: Smith") and qstr("emp_no: [10010 TO *]") + | where match(last_name, "Smith") and match(first_name, "John") """; var plan = plannerOptimizer.plan(queryText, IS_SV_STATS); @@ -603,8 +697,8 @@ public void testQueryStringFunctionMultipleQstrClauses() { var query = as(field.child(), EsQueryExec.class); assertThat(query.limit().fold(), is(1000)); - var queryStringLeft = QueryBuilders.queryStringQuery("last_name: Smith"); - var queryStringRight = QueryBuilders.queryStringQuery("emp_no: [10010 TO *]"); + var queryStringLeft = QueryBuilders.matchQuery("last_name", "Smith"); + var queryStringRight = QueryBuilders.matchQuery("first_name", "John"); var expected = QueryBuilders.boolQuery().must(queryStringLeft).must(queryStringRight); assertThat(query.query().toString(), is(expected.toString())); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index 6ff541de1854a..e8980c99a61f9 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -18,6 +18,7 @@ import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.TestBlockFactory; import org.elasticsearch.xpack.esql.VerificationException; +import org.elasticsearch.xpack.esql.action.EsqlCapabilities; import org.elasticsearch.xpack.esql.analysis.Analyzer; import org.elasticsearch.xpack.esql.analysis.AnalyzerContext; import org.elasticsearch.xpack.esql.analysis.AnalyzerTestUtils; @@ -5565,6 +5566,42 @@ public void testToDatePeriodToTimeDurationWithField() { assertEquals("1:60: argument of [to_timeduration(x)] must be a constant, received [x]", e.getMessage().substring(header.length())); } + // These should pass eventually once we lift some restrictions on match function + public void testMatchWithNonIndexedColumnCurrentlyUnsupported() { + assumeTrue("skipping because MATCH function is not enabled", EsqlCapabilities.Cap.MATCH_FUNCTION.isEnabled()); + + final String header = "Found 1 problem\nline "; + VerificationException e = expectThrows(VerificationException.class, () -> plan(""" + from test | eval initial = substring(first_name, 1) | where match(initial, "A")""")); + assertTrue(e.getMessage().startsWith("Found ")); + assertEquals( + "1:67: [MATCH] cannot operate on [initial], which is not a field from an index mapping", + e.getMessage().substring(header.length()) + ); + + e = expectThrows(VerificationException.class, () -> plan(""" + from test | eval text=concat(first_name, last_name) | where match(text, "cat")""")); + assertTrue(e.getMessage().startsWith("Found ")); + assertEquals( + "1:67: [MATCH] cannot operate on [text], which is not a field from an index mapping", + e.getMessage().substring(header.length()) + ); + } + + public void testMatchFunctionIsNotNullable() { + assumeTrue("skipping because MATCH function is not enabled", EsqlCapabilities.Cap.MATCH_FUNCTION.isEnabled()); + + String queryText = """ + row n = null | eval text = n + 5 | where match(text::keyword, "Anna") + """; + + VerificationException ve = expectThrows(VerificationException.class, () -> plan(queryText)); + assertThat( + ve.getMessage(), + containsString("[MATCH] cannot operate on [text::keyword], which is not a field from an index mapping") + ); + } + private Literal nullOf(DataType dataType) { return new Literal(Source.EMPTY, null, dataType); } From d94fbcb01a0004445787f356aeecce5338be02c3 Mon Sep 17 00:00:00 2001 From: Gergely Kalapos Date: Mon, 14 Oct 2024 10:33:36 +0200 Subject: [PATCH 032/449] Add alias event.dataset -> data_stream.dataset (#114642) --- .../component-templates/otel@mappings.yaml | 3 +++ .../rest-api-spec/test/20_logs_tests.yml | 18 ++++++++++++++++++ 2 files changed, 21 insertions(+) diff --git a/x-pack/plugin/otel-data/src/main/resources/component-templates/otel@mappings.yaml b/x-pack/plugin/otel-data/src/main/resources/component-templates/otel@mappings.yaml index 513e1a857787e..4a039886ecc4e 100644 --- a/x-pack/plugin/otel-data/src/main/resources/component-templates/otel@mappings.yaml +++ b/x-pack/plugin/otel-data/src/main/resources/component-templates/otel@mappings.yaml @@ -17,6 +17,9 @@ template: type: constant_keyword data_stream.namespace: type: constant_keyword + event.dataset: + type: alias + path: data_stream.dataset attributes: type: passthrough dynamic: true diff --git a/x-pack/plugin/otel-data/src/yamlRestTest/resources/rest-api-spec/test/20_logs_tests.yml b/x-pack/plugin/otel-data/src/yamlRestTest/resources/rest-api-spec/test/20_logs_tests.yml index 0957a79552ad3..be4de6dca6c76 100644 --- a/x-pack/plugin/otel-data/src/yamlRestTest/resources/rest-api-spec/test/20_logs_tests.yml +++ b/x-pack/plugin/otel-data/src/yamlRestTest/resources/rest-api-spec/test/20_logs_tests.yml @@ -145,3 +145,21 @@ Structured log body: index: $datastream-backing-index - is_true: $datastream-backing-index - match: { .$datastream-backing-index.mappings.properties.body.properties.flattened.type: "flattened" } +--- +"event.dataset alias must point to data_stream.dataset": + - do: + bulk: + index: logs-generic.otel-default + refresh: true + body: + - create: {} + - '{"@timestamp":"2024-07-18T14:49:33.467654000Z","data_stream":{"dataset":"generic.otel","namespace":"default"}, "body_text":"error1"}' + - is_false: errors + - is_false: errors + - do: + search: + index: logs-generic.otel-default + body: + fields: ["event.dataset"] + - length: { hits.hits: 1 } + - match: { hits.hits.0.fields.event\.dataset: ["generic.otel"] } From bdce88a190efe7c3586119a476458896bd87ffd6 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Mon, 14 Oct 2024 09:34:35 +0100 Subject: [PATCH 033/449] [ML] Feature flag default configs (#114660) --- .../org/elasticsearch/xpack/inference/InferencePlugin.java | 6 ++++-- .../rest-api-spec/test/inference/inference_crud.yml | 4 ++++ 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java index d251120980e0b..d361ce0837b93 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java @@ -217,8 +217,10 @@ public Collection createComponents(PluginServices services) { // reference correctly var registry = new InferenceServiceRegistry(inferenceServices, factoryContext); registry.init(services.client()); - for (var service : registry.getServices().values()) { - service.defaultConfigs().forEach(modelRegistry::addDefaultConfiguration); + if (DefaultElserFeatureFlag.isEnabled()) { + for (var service : registry.getServices().values()) { + service.defaultConfigs().forEach(modelRegistry::addDefaultConfiguration); + } } inferenceServiceRegistry.set(registry); diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/inference/inference_crud.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/inference/inference_crud.yml index 11be68cc764e2..b1f640a40b34e 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/inference/inference_crud.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/inference/inference_crud.yml @@ -41,6 +41,10 @@ --- "Test get all": + - requires: + cluster_features: "semantic_text.default_elser_2" + reason: semantic_text default ELSER 2 inference ID introduced in 8.16.0 + - do: inference.get: inference_id: "*" From 1d037811fd9e0d407b34bf96635cf118a5f11d46 Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Mon, 14 Oct 2024 10:47:24 +0200 Subject: [PATCH 034/449] Renovate Bot PRs should run ci checks (#114699) --- .buildkite/pull-requests.json | 1 + 1 file changed, 1 insertion(+) diff --git a/.buildkite/pull-requests.json b/.buildkite/pull-requests.json index 235a4b2dbb4ad..ea4f34bcbe11e 100644 --- a/.buildkite/pull-requests.json +++ b/.buildkite/pull-requests.json @@ -8,6 +8,7 @@ "admin", "write" ], + "allowed_list": ["elastic-renovate-prod[bot]"], "set_commit_status": false, "build_on_commit": true, "build_on_comment": true, From 08bad488cfabeb5c158eda8f385da9563792fb40 Mon Sep 17 00:00:00 2001 From: Ievgen Degtiarenko Date: Mon, 14 Oct 2024 10:56:30 +0200 Subject: [PATCH 035/449] Simplify NodeShutdownShardsIT (#114583) We no longer need to manually reroute after registering node shutdown in test since https://github.com/elastic/elasticsearch/pull/103251 --- .../xpack/shutdown/NodeShutdownShardsIT.java | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownShardsIT.java b/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownShardsIT.java index d12d093dd5b8d..0e162238e96c8 100644 --- a/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownShardsIT.java +++ b/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownShardsIT.java @@ -327,11 +327,8 @@ public void testAutoExpandDuringRestart() throws Exception { ensureGreen("myindex"); putNodeShutdown(primaryNodeId, SingleNodeShutdownMetadata.Type.RESTART, null); - // registering node shutdown entry does not perform reroute, neither should it. - // we provoke it here in the test to ensure that auto-expansion has run. - updateIndexSettings(Settings.builder().put("index.routing.allocation.exclude.name", "non-existent"), "myindex"); - assertBusy(() -> assertIndexSetting("myindex", "index.number_of_replicas", "1")); + assertIndexSetting("myindex", "index.number_of_replicas", "1"); indexRandomData("myindex"); internalCluster().restartNode(primaryNode, new InternalTestCluster.RestartCallback() { @@ -361,9 +358,6 @@ public void testAutoExpandDuringReplace() throws Exception { var replacementNodeName = "node_t2"; putNodeShutdown(nodeIdToReplace, SingleNodeShutdownMetadata.Type.REPLACE, replacementNodeName); - // registering node shutdown entry does not perform reroute, neither should it. - // we provoke it here in the test to ensure that auto-expansion has run. - updateIndexSettings(Settings.builder().put("index.routing.allocation.exclude.name", "non-existent"), "index"); ensureGreen("index"); assertIndexSetting("index", "index.number_of_replicas", "1"); From 30ff4741c674532c832c53a3bd31aa307dce8a95 Mon Sep 17 00:00:00 2001 From: Ioana Tagirta Date: Mon, 14 Oct 2024 11:05:35 +0200 Subject: [PATCH 036/449] Add generated code changes for HypotEvaluator (#114697) --- .../function/scalar/math/HypotEvaluator.java | 24 +++++++++++++++---- 1 file changed, 19 insertions(+), 5 deletions(-) diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/HypotEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/HypotEvaluator.java index f5684bcb4be18..22094f7e623e6 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/HypotEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/HypotEvaluator.java @@ -13,16 +13,16 @@ import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.Warnings; import org.elasticsearch.core.Releasables; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.expression.function.Warnings; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Hypot}. * This class is generated. Do not edit it. */ public final class HypotEvaluator implements EvalOperator.ExpressionEvaluator { - private final Warnings warnings; + private final Source source; private final EvalOperator.ExpressionEvaluator n1; @@ -30,12 +30,14 @@ public final class HypotEvaluator implements EvalOperator.ExpressionEvaluator { private final DriverContext driverContext; + private Warnings warnings; + public HypotEvaluator(Source source, EvalOperator.ExpressionEvaluator n1, EvalOperator.ExpressionEvaluator n2, DriverContext driverContext) { + this.source = source; this.n1 = n1; this.n2 = n2; this.driverContext = driverContext; - this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -64,7 +66,7 @@ public DoubleBlock eval(int positionCount, DoubleBlock n1Block, DoubleBlock n2Bl } if (n1Block.getValueCount(p) != 1) { if (n1Block.getValueCount(p) > 1) { - warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); } result.appendNull(); continue position; @@ -75,7 +77,7 @@ public DoubleBlock eval(int positionCount, DoubleBlock n1Block, DoubleBlock n2Bl } if (n2Block.getValueCount(p) != 1) { if (n2Block.getValueCount(p) > 1) { - warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); } result.appendNull(); continue position; @@ -105,6 +107,18 @@ public void close() { Releasables.closeExpectNoException(n1, n2); } + private Warnings warnings() { + if (warnings == null) { + this.warnings = Warnings.createWarnings( + driverContext.warningsMode(), + source.source().getLineNumber(), + source.source().getColumnNumber(), + source.text() + ); + } + return warnings; + } + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; From 2af19d87b08565b2b3960bee8b7e797b4ef27190 Mon Sep 17 00:00:00 2001 From: Andrei Stefan Date: Mon, 14 Oct 2024 12:24:40 +0300 Subject: [PATCH 037/449] ES|QL: Restrict sorting for _source and counter field types (#114638) --- docs/changelog/114638.yaml | 7 ++++++ .../xpack/esql/core/type/DataType.java | 8 +++++++ .../src/main/resources/tsdb-mapping.json | 4 ++++ .../xpack/esql/action/EsqlCapabilities.java | 7 +++++- .../xpack/esql/analysis/Verifier.java | 8 +++---- .../expression/function/aggregate/Rate.java | 2 +- .../xpack/esql/analysis/AnalyzerTests.java | 2 +- .../xpack/esql/analysis/VerifierTests.java | 23 +++++++++++++++++++ .../rest-api-spec/test/esql/140_metadata.yml | 16 +++++++++++++ .../rest-api-spec/test/esql/40_tsdb.yml | 12 ++++++++-- 10 files changed, 80 insertions(+), 9 deletions(-) create mode 100644 docs/changelog/114638.yaml diff --git a/docs/changelog/114638.yaml b/docs/changelog/114638.yaml new file mode 100644 index 0000000000000..0386aacfe3e18 --- /dev/null +++ b/docs/changelog/114638.yaml @@ -0,0 +1,7 @@ +pr: 114638 +summary: "ES|QL: Restrict sorting for `_source` and counter field types" +area: ES|QL +type: bug +issues: + - 114423 + - 111976 diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java index c0092caeb9d5d..b23703c6d8b66 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java @@ -425,6 +425,10 @@ public static boolean isRepresentable(DataType t) { && t.isCounter() == false; } + public static boolean isCounter(DataType t) { + return t == COUNTER_DOUBLE || t == COUNTER_INTEGER || t == COUNTER_LONG; + } + public static boolean isSpatialPoint(DataType t) { return t == GEO_POINT || t == CARTESIAN_POINT; } @@ -437,6 +441,10 @@ public static boolean isSpatial(DataType t) { return t == GEO_POINT || t == CARTESIAN_POINT || t == GEO_SHAPE || t == CARTESIAN_SHAPE; } + public static boolean isSortable(DataType t) { + return false == (t == SOURCE || isCounter(t) || isSpatial(t)); + } + public String nameUpper() { return name; } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/tsdb-mapping.json b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/tsdb-mapping.json index dd4073d5dc7cf..39b1b10edd916 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/tsdb-mapping.json +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/tsdb-mapping.json @@ -27,6 +27,10 @@ "message_in": { "type": "float", "time_series_metric": "counter" + }, + "message_out": { + "type": "integer", + "time_series_metric": "counter" } } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index 2e979dcce1758..1d6d81077b9be 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -365,7 +365,12 @@ public enum Cap { /** * Support named parameters for field names. */ - NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES; + NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES, + + /** + * Fix sorting not allowed on _source and counters. + */ + SORTING_ON_SOURCE_AND_COUNTERS_FORBIDDEN; private final boolean snapshotOnly; private final FeatureFlag featureFlag; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java index e45db0c02be7e..dd2b72b4d35d9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java @@ -190,7 +190,7 @@ else if (p instanceof Lookup lookup) { checkOperationsOnUnsignedLong(p, failures); checkBinaryComparison(p, failures); - checkForSortOnSpatialTypes(p, failures); + checkForSortableDataTypes(p, failures); checkFilterMatchConditions(p, failures); checkFullTextQueryFunctions(p, failures); @@ -555,12 +555,12 @@ private static Failure validateUnsignedLongNegation(Neg neg) { } /** - * Makes sure that spatial types do not appear in sorting contexts. + * Some datatypes are not sortable */ - private static void checkForSortOnSpatialTypes(LogicalPlan p, Set localFailures) { + private static void checkForSortableDataTypes(LogicalPlan p, Set localFailures) { if (p instanceof OrderBy ob) { ob.order().forEach(order -> { - if (DataType.isSpatial(order.dataType())) { + if (DataType.isSortable(order.dataType()) == false) { localFailures.add(fail(order, "cannot sort on " + order.dataType().typeName())); } }); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Rate.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Rate.java index f5597b7d64e81..135264c448f10 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Rate.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Rate.java @@ -115,7 +115,7 @@ public DataType dataType() { protected TypeResolution resolveType() { TypeResolution resolution = isType( field(), - dt -> dt == DataType.COUNTER_LONG || dt == DataType.COUNTER_INTEGER || dt == DataType.COUNTER_DOUBLE, + dt -> DataType.isCounter(dt), sourceText(), FIRST, "counter_long", diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java index 5d75549893512..6644f9b17055e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java @@ -1638,7 +1638,7 @@ public void testCounterTypes() { var attributes = limit.output().stream().collect(Collectors.toMap(NamedExpression::name, a -> a)); assertThat( attributes.keySet(), - equalTo(Set.of("network.connections", "network.bytes_in", "network.bytes_out", "network.message_in")) + equalTo(Set.of("network.connections", "network.bytes_in", "network.bytes_out", "network.message_in", "network.message_out")) ); assertThat(attributes.get("network.connections").dataType(), equalTo(DataType.LONG)); assertThat(attributes.get("network.bytes_in").dataType(), equalTo(DataType.COUNTER_LONG)); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java index 612f2870fe8bc..01c020b16ecad 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java @@ -26,12 +26,16 @@ import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.Set; import static org.elasticsearch.xpack.esql.EsqlTestUtils.paramAsConstant; import static org.elasticsearch.xpack.esql.EsqlTestUtils.withDefaultLimitWarning; import static org.elasticsearch.xpack.esql.analysis.AnalyzerTestUtils.loadMapping; +import static org.elasticsearch.xpack.esql.core.type.DataType.COUNTER_DOUBLE; +import static org.elasticsearch.xpack.esql.core.type.DataType.COUNTER_INTEGER; +import static org.elasticsearch.xpack.esql.core.type.DataType.COUNTER_LONG; import static org.elasticsearch.xpack.esql.core.type.DataType.UNSIGNED_LONG; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -909,6 +913,25 @@ public void testSpatialSort() { assertEquals("1:42: cannot sort on cartesian_shape", error("FROM countries_bbox_web | LIMIT 5 | sort shape", countriesBboxWeb)); } + public void testSourceSorting() { + assertEquals("1:35: cannot sort on _source", error("from test metadata _source | sort _source")); + } + + public void testCountersSorting() { + Map counterDataTypes = Map.of( + COUNTER_DOUBLE, + "network.message_in", + COUNTER_INTEGER, + "network.message_out", + COUNTER_LONG, + "network.bytes_out" + ); + for (DataType counterDT : counterDataTypes.keySet()) { + var fieldName = counterDataTypes.get(counterDT); + assertEquals("1:18: cannot sort on " + counterDT.name().toLowerCase(Locale.ROOT), error("from test | sort " + fieldName, tsdb)); + } + } + public void testInlineImpossibleConvert() { assertEquals("1:5: argument of [false::ip] must be [ip or string], found value [false] type [boolean]", error("ROW false::ip")); } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/140_metadata.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/140_metadata.yml index 33c9cc7558672..83234901ae8f2 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/140_metadata.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/140_metadata.yml @@ -155,3 +155,19 @@ setup: esql.query: body: query: 'FROM test [metadata _source] | STATS COUNT_DISTINCT(_source)' + +--- +"sort on _source not allowed": + - requires: + test_runner_features: [capabilities] + capabilities: + - method: POST + path: /_query + parameters: [] + capabilities: [sorting_on_source_and_counters_forbidden] + reason: "Sorting on _source shouldn't have been possible" + - do: + catch: /cannot sort on _source/ + esql.query: + body: + query: 'FROM test metadata _source | sort _source' diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_tsdb.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_tsdb.yml index 642407ac6d45b..ebf464ba667db 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_tsdb.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_tsdb.yml @@ -178,11 +178,19 @@ cast counter then filter: --- sort on counter without cast: + - requires: + test_runner_features: [capabilities] + capabilities: + - method: POST + path: /_query + parameters: [] + capabilities: [sorting_on_source_and_counters_forbidden] + reason: "Sorting on counters shouldn't have been possible" - do: - catch: bad_request + catch: /cannot sort on counter_long/ esql.query: body: - query: 'from test | KEEP k8s.pod.network.tx | sort @k8s.pod.network.tx | limit 1' + query: 'from test | KEEP k8s.pod.network.tx | sort k8s.pod.network.tx | limit 1' --- cast then sort on counter: From 2f09fb66e95b327048156d2ef970ebd6c0e2fe23 Mon Sep 17 00:00:00 2001 From: Pooya Salehi Date: Mon, 14 Oct 2024 11:59:44 +0200 Subject: [PATCH 038/449] Preserve thread context when waiting for segment generation in RTG (#114623) Closes ES-9778 --- docs/changelog/114623.yaml | 5 +++++ .../org/elasticsearch/action/get/TransportGetAction.java | 9 +++++---- 2 files changed, 10 insertions(+), 4 deletions(-) create mode 100644 docs/changelog/114623.yaml diff --git a/docs/changelog/114623.yaml b/docs/changelog/114623.yaml new file mode 100644 index 0000000000000..817a8e874bcc0 --- /dev/null +++ b/docs/changelog/114623.yaml @@ -0,0 +1,5 @@ +pr: 114623 +summary: Preserve thread context when waiting for segment generation in RTG +area: CRUD +type: bug +issues: [] diff --git a/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java b/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java index 99eac250641ae..fb4b3907d2bfd 100644 --- a/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java +++ b/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java @@ -20,6 +20,7 @@ import org.elasticsearch.action.NoShardAvailableActionException; import org.elasticsearch.action.admin.indices.refresh.TransportShardRefreshAction; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.action.support.replication.BasicReplicationRequest; import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; import org.elasticsearch.client.internal.node.NodeClient; @@ -284,11 +285,11 @@ private void tryGetFromTranslog(GetRequest request, IndexShard indexShard, Disco } else { assert r.segmentGeneration() > -1L; assert r.primaryTerm() > Engine.UNKNOWN_PRIMARY_TERM; - indexShard.waitForPrimaryTermAndGeneration( - r.primaryTerm(), - r.segmentGeneration(), - listener.delegateFailureAndWrap((ll, aLong) -> super.asyncShardOperation(request, shardId, ll)) + final ActionListener termAndGenerationListener = ContextPreservingActionListener.wrapPreservingContext( + listener.delegateFailureAndWrap((ll, aLong) -> super.asyncShardOperation(request, shardId, ll)), + threadPool.getThreadContext() ); + indexShard.waitForPrimaryTermAndGeneration(r.primaryTerm(), r.segmentGeneration(), termAndGenerationListener); } } }), TransportGetFromTranslogAction.Response::new, getExecutor(request, shardId)) From 4ab2e6157db6ea02c2abd824ce4c9c03b560cd88 Mon Sep 17 00:00:00 2001 From: Craig Taverner Date: Mon, 14 Oct 2024 12:01:53 +0200 Subject: [PATCH 039/449] Fix failing tests after PR clash (#114625) Two PRs conflicted without github or CI noticing. The first added these tests, and the second modified their behaviour. Both went green in CI and both were merged within an hour of each other. * PR that added the tests: * https://github.com/elastic/elasticsearch/pull/112938 * merged 14:13CET * PR that changed the behaviour of these tests: * https://github.com/elastic/elasticsearch/pull/114411 * merged 14:48CET --- muted-tests.yml | 6 -- .../optimizer/PhysicalPlanOptimizerTests.java | 58 +++++++++---------- 2 files changed, 28 insertions(+), 36 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index d0fc50de31bd1..975eb0c434054 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -363,12 +363,6 @@ tests: - class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT method: test {p0=synonyms/60_synonym_rule_get/Synonym rule not found} issue: https://github.com/elastic/elasticsearch/issues/114444 -- class: org.elasticsearch.xpack.esql.optimizer.PhysicalPlanOptimizerTests - method: testPushSpatialIntersectsEvalToSource {default} - issue: https://github.com/elastic/elasticsearch/issues/114627 -- class: org.elasticsearch.xpack.esql.optimizer.PhysicalPlanOptimizerTests - method: testPushWhereEvalToSource {default} - issue: https://github.com/elastic/elasticsearch/issues/114628 - class: org.elasticsearch.xpack.inference.integration.ModelRegistryIT method: testGetModel issue: https://github.com/elastic/elasticsearch/issues/114657 diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java index 6746b8ff61268..114aed68761fe 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java @@ -3211,28 +3211,28 @@ public void testPushSpatialIntersectsStringToSource() { /** * Plan: * - * LimitExec[1000[INTEGER]] - * \_ExchangeExec[[],false] - * \_FragmentExec[filter=null, estimatedRowSize=0, reducer=[], fragment=[ + * EvalExec[[scalerank{f}#8 AS rank]] + * \_LimitExec[1000[INTEGER]] + * \_ExchangeExec[[],false] + * \_FragmentExec[filter=null, estimatedRowSize=0, reducer=[], fragment=[ * Limit[1000[INTEGER]] - * \_Filter[rank{r}#4 lt 4[INTEGER]] - * \_Eval[[scalerank{f}#8 AS rank]] - * \_EsRelation[airports][abbrev{f}#6, city{f}#12, city_location{f}#13, count..]]] + * \_Filter[scalerank{f}#8 < 4[INTEGER]] + * \_EsRelation[airports][abbrev{f}#6, city{f}#12, city_location{f}#13, count..]]] * * Optimized: * - * LimitExec[1000[INTEGER]] - * \_ExchangeExec[[abbrev{f}#6, city{f}#12, city_location{f}#13, country{f}#11, location{f}#10, name{f}#7, scalerank{f}#8, - * type{f}#9, rank{r}#4],false] - * \_ProjectExec[[abbrev{f}#6, city{f}#12, city_location{f}#13, country{f}#11, location{f}#10, name{f}#7, scalerank{f}#8, - * type{f}#9, rank{r}#4]] - * \_FieldExtractExec[abbrev{f}#6, city{f}#12, city_location{f}#13, count..][] - * \_LimitExec[1000[INTEGER]] - * \_EvalExec[[scalerank{f}#8 AS rank]] - * \_FieldExtractExec[scalerank{f}#8][] - * \_EsQueryExec[airports], indexMode[standard], query[{" - * esql_single_value":{"field":"scalerank","next":{"range":{"scalerank":{"lt":4,"boost":1.0}}},"source":"rank < 4@3:9"} - * }][_doc{f}#23], limit[], sort[] estimatedRowSize[304] + * EvalExec[[scalerank{f}#8 AS rank]] + * \_LimitExec[1000[INTEGER]] + * \_ExchangeExec[[abbrev{f}#6, city{f}#12, city_location{f}#13, country{f}#11, location{f}#10, name{f}#7, scalerank{f}#8, + * type{f}#9],false + * ] + * \_ProjectExec[[abbrev{f}#6, city{f}#12, city_location{f}#13, country{f}#11, location{f}#10, name{f}#7, scalerank{f}#8, + * type{f}#9] + * ] + * \_FieldExtractExec[abbrev{f}#6, city{f}#12, city_location{f}#13, count..][] + * \_EsQueryExec[airports], indexMode[standard], query[{ + * "esql_single_value":{"field":"scalerank","next":{"range":{"scalerank":{"lt":4,"boost":1.0}}},"source":"rank < 4@3:9"} + * ][_doc{f}#23], limit[1000], sort[] estimatedRowSize[304] * */ public void testPushWhereEvalToSource() { @@ -3243,7 +3243,8 @@ public void testPushWhereEvalToSource() { """; var plan = this.physicalPlan(query, airports); - var limit = as(plan, LimitExec.class); + var eval = as(plan, EvalExec.class); + var limit = as(eval.child(), LimitExec.class); var exchange = as(limit.child(), ExchangeExec.class); var fragment = as(exchange.child(), FragmentExec.class); var limit2 = as(fragment.fragment(), Limit.class); @@ -3251,16 +3252,14 @@ public void testPushWhereEvalToSource() { assertThat("filter contains LessThan", filter.condition(), instanceOf(LessThan.class)); var optimized = optimizedPlan(plan); - var topLimit = as(optimized, LimitExec.class); + eval = as(optimized, EvalExec.class); + var topLimit = as(eval.child(), LimitExec.class); exchange = as(topLimit.child(), ExchangeExec.class); var project = as(exchange.child(), ProjectExec.class); var fieldExtract = as(project.child(), FieldExtractExec.class); assertThat(fieldExtract.attributesToExtract().size(), greaterThan(5)); - limit = as(fieldExtract.child(), LimitExec.class); - var eval = as(limit.child(), EvalExec.class); - fieldExtract = as(eval.child(), FieldExtractExec.class); - assertThat(fieldExtract.attributesToExtract().stream().map(Attribute::name).collect(Collectors.toList()), contains("scalerank")); var source = source(fieldExtract.child()); + assertThat(source.limit(), is(topLimit.limit())); var condition = as(source.query(), SingleValueQuery.Builder.class); assertThat("Expected predicate to be passed to Lucene query", condition.source().text(), equalTo("rank < 4")); assertThat("Expected field to be passed to Lucene query", condition.field(), equalTo("scalerank")); @@ -3281,7 +3280,8 @@ public void testPushSpatialIntersectsEvalToSource() { """ }) { var plan = this.physicalPlan(query, airports); - var limit = as(plan, LimitExec.class); + var eval = as(plan, EvalExec.class); + var limit = as(eval.child(), LimitExec.class); var exchange = as(limit.child(), ExchangeExec.class); var fragment = as(exchange.child(), FragmentExec.class); var limit2 = as(fragment.fragment(), Limit.class); @@ -3289,16 +3289,14 @@ public void testPushSpatialIntersectsEvalToSource() { assertThat("filter contains ST_INTERSECTS", filter.condition(), instanceOf(SpatialIntersects.class)); var optimized = optimizedPlan(plan); - var topLimit = as(optimized, LimitExec.class); + eval = as(optimized, EvalExec.class); + var topLimit = as(eval.child(), LimitExec.class); exchange = as(topLimit.child(), ExchangeExec.class); var project = as(exchange.child(), ProjectExec.class); var fieldExtract = as(project.child(), FieldExtractExec.class); assertThat(fieldExtract.attributesToExtract().size(), greaterThan(5)); - limit = as(fieldExtract.child(), LimitExec.class); - var eval = as(limit.child(), EvalExec.class); - fieldExtract = as(eval.child(), FieldExtractExec.class); - assertThat(fieldExtract.attributesToExtract().stream().map(Attribute::name).collect(Collectors.toList()), contains("location")); var source = source(fieldExtract.child()); + assertThat(source.limit(), is(topLimit.limit())); var condition = as(source.query(), SpatialRelatesQuery.ShapeQueryBuilder.class); assertThat("Geometry field name", condition.fieldName(), equalTo("location")); assertThat("Spatial relationship", condition.relation(), equalTo(ShapeRelation.INTERSECTS)); From 98e0a4e953d339402c87ed426a70e6cc8320c17f Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Mon, 14 Oct 2024 13:03:02 +0300 Subject: [PATCH 040/449] Guard second doc parsing pass with index setting (#114649) * Guard second doc parsing pass with index setting * add test * updates * updates * merge --- .../21_synthetic_source_stored.yml | 49 +++++++++++++++++++ .../common/settings/IndexScopedSettings.java | 1 + .../elasticsearch/index/IndexSettings.java | 21 ++++++++ .../index/mapper/DocumentParserContext.java | 7 ++- .../TransportResumeFollowActionTests.java | 1 + 5 files changed, 78 insertions(+), 1 deletion(-) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/21_synthetic_source_stored.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/21_synthetic_source_stored.yml index dfe6c9820a16a..eab51427876aa 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/21_synthetic_source_stored.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/21_synthetic_source_stored.yml @@ -411,6 +411,55 @@ index param - nested array within array: - match: { hits.hits.0._source.path.to.some.3.id: [ 1000, 2000 ] } +--- +index param - nested array within array - disabled second pass: + - requires: + cluster_features: ["mapper.synthetic_source_keep", "mapper.bwc_workaround_9_0"] + reason: requires tracking ignored source + + - do: + indices.create: + index: test + body: + settings: + index: + synthetic_source: + enable_second_doc_parsing_pass: false + mappings: + _source: + mode: synthetic + properties: + name: + type: keyword + path: + properties: + to: + properties: + some: + synthetic_source_keep: arrays + properties: + id: + type: integer + + - do: + bulk: + index: test + refresh: true + body: + - '{ "create": { } }' + - '{ "name": "A", "path": [ { "to": [ { "some" : [ { "id": 10 }, { "id": [1, 3, 2] } ] }, { "some": { "id": 100 } } ] }, { "to": { "some": { "id": [1000, 2000] } } } ] }' + - match: { errors: false } + + - do: + search: + index: test + sort: name + - match: { hits.hits.0._source.name: A } + - length: { hits.hits.0._source.path.to.some: 2} + - match: { hits.hits.0._source.path.to.some.0.id: 10 } + - match: { hits.hits.0._source.path.to.some.1.id: [ 1, 3, 2] } + + --- # 112156 stored field under object with store_array_source: diff --git a/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java b/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java index 884ce38fba391..f5276bbe49b63 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java @@ -187,6 +187,7 @@ public final class IndexScopedSettings extends AbstractScopedSettings { FieldMapper.SYNTHETIC_SOURCE_KEEP_INDEX_SETTING, IgnoredSourceFieldMapper.SKIP_IGNORED_SOURCE_WRITE_SETTING, IgnoredSourceFieldMapper.SKIP_IGNORED_SOURCE_READ_SETTING, + IndexSettings.SYNTHETIC_SOURCE_SECOND_DOC_PARSING_PASS_SETTING, SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING, // validate that built-in similarities don't get redefined diff --git a/server/src/main/java/org/elasticsearch/index/IndexSettings.java b/server/src/main/java/org/elasticsearch/index/IndexSettings.java index f3f8ce4b8e7e4..347b44a22e7c0 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexSettings.java +++ b/server/src/main/java/org/elasticsearch/index/IndexSettings.java @@ -652,6 +652,13 @@ public Iterator> settings() { Property.Final ); + public static final Setting SYNTHETIC_SOURCE_SECOND_DOC_PARSING_PASS_SETTING = Setting.boolSetting( + "index.synthetic_source.enable_second_doc_parsing_pass", + true, + Property.IndexScope, + Property.Dynamic + ); + /** * Returns true if TSDB encoding is enabled. The default is true */ @@ -821,6 +828,7 @@ private void setRetentionLeaseMillis(final TimeValue retentionLease) { private volatile long mappingDimensionFieldsLimit; private volatile boolean skipIgnoredSourceWrite; private volatile boolean skipIgnoredSourceRead; + private volatile boolean syntheticSourceSecondDocParsingPassEnabled; private final SourceFieldMapper.Mode indexMappingSourceMode; /** @@ -982,6 +990,7 @@ public IndexSettings(final IndexMetadata indexMetadata, final Settings nodeSetti es87TSDBCodecEnabled = scopedSettings.get(TIME_SERIES_ES87TSDB_CODEC_ENABLED_SETTING); skipIgnoredSourceWrite = scopedSettings.get(IgnoredSourceFieldMapper.SKIP_IGNORED_SOURCE_WRITE_SETTING); skipIgnoredSourceRead = scopedSettings.get(IgnoredSourceFieldMapper.SKIP_IGNORED_SOURCE_READ_SETTING); + syntheticSourceSecondDocParsingPassEnabled = scopedSettings.get(SYNTHETIC_SOURCE_SECOND_DOC_PARSING_PASS_SETTING); indexMappingSourceMode = scopedSettings.get(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING); scopedSettings.addSettingsUpdateConsumer( @@ -1070,6 +1079,10 @@ public IndexSettings(final IndexMetadata indexMetadata, final Settings nodeSetti this::setSkipIgnoredSourceWrite ); scopedSettings.addSettingsUpdateConsumer(IgnoredSourceFieldMapper.SKIP_IGNORED_SOURCE_READ_SETTING, this::setSkipIgnoredSourceRead); + scopedSettings.addSettingsUpdateConsumer( + SYNTHETIC_SOURCE_SECOND_DOC_PARSING_PASS_SETTING, + this::setSyntheticSourceSecondDocParsingPassEnabled + ); } private void setSearchIdleAfter(TimeValue searchIdleAfter) { @@ -1662,6 +1675,14 @@ private void setSkipIgnoredSourceRead(boolean value) { this.skipIgnoredSourceRead = value; } + private void setSyntheticSourceSecondDocParsingPassEnabled(boolean syntheticSourceSecondDocParsingPassEnabled) { + this.syntheticSourceSecondDocParsingPassEnabled = syntheticSourceSecondDocParsingPassEnabled; + } + + public boolean isSyntheticSourceSecondDocParsingPassEnabled() { + return syntheticSourceSecondDocParsingPassEnabled; + } + public SourceFieldMapper.Mode getIndexMappingSourceMode() { return indexMappingSourceMode; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java index ac236e5a7e5fd..2eec14bd1a8d6 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java @@ -111,6 +111,7 @@ public int get() { private final Set ignoredFields; private final List ignoredFieldValues; private final List ignoredFieldsMissingValues; + private final boolean inArrayScopeEnabled; private boolean inArrayScope; private final Map> dynamicMappers; @@ -143,6 +144,7 @@ private DocumentParserContext( Set ignoreFields, List ignoredFieldValues, List ignoredFieldsWithNoSource, + boolean inArrayScopeEnabled, boolean inArrayScope, Map> dynamicMappers, Map dynamicObjectMappers, @@ -164,6 +166,7 @@ private DocumentParserContext( this.ignoredFields = ignoreFields; this.ignoredFieldValues = ignoredFieldValues; this.ignoredFieldsMissingValues = ignoredFieldsWithNoSource; + this.inArrayScopeEnabled = inArrayScopeEnabled; this.inArrayScope = inArrayScope; this.dynamicMappers = dynamicMappers; this.dynamicObjectMappers = dynamicObjectMappers; @@ -188,6 +191,7 @@ private DocumentParserContext(ObjectMapper parent, ObjectMapper.Dynamic dynamic, in.ignoredFields, in.ignoredFieldValues, in.ignoredFieldsMissingValues, + in.inArrayScopeEnabled, in.inArrayScope, in.dynamicMappers, in.dynamicObjectMappers, @@ -219,6 +223,7 @@ protected DocumentParserContext( new HashSet<>(), new ArrayList<>(), new ArrayList<>(), + mappingParserContext.getIndexSettings().isSyntheticSourceSecondDocParsingPassEnabled(), false, new HashMap<>(), new HashMap<>(), @@ -371,7 +376,7 @@ public final Collection getIgnoredFieldsMiss * Applies to synthetic source only. */ public final DocumentParserContext maybeCloneForArray(Mapper mapper) throws IOException { - if (canAddIgnoredField() && mapper instanceof ObjectMapper) { + if (canAddIgnoredField() && mapper instanceof ObjectMapper && inArrayScopeEnabled) { boolean isNested = mapper instanceof NestedObjectMapper; if ((inArrayScope == false && isNested == false) || (inArrayScope && isNested)) { DocumentParserContext subcontext = switchParser(parser()); diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowActionTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowActionTests.java index 357e1bca38e8f..ef03fd0ba6f0e 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowActionTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowActionTests.java @@ -333,6 +333,7 @@ public void testDynamicIndexSettingsAreClassified() { replicatedSettings.add(IndexSettings.MAX_SHINGLE_DIFF_SETTING); replicatedSettings.add(IndexSettings.TIME_SERIES_END_TIME); replicatedSettings.add(IndexSettings.PREFER_ILM_SETTING); + replicatedSettings.add(IndexSettings.SYNTHETIC_SOURCE_SECOND_DOC_PARSING_PASS_SETTING); replicatedSettings.add(IgnoredSourceFieldMapper.SKIP_IGNORED_SOURCE_READ_SETTING); replicatedSettings.add(IgnoredSourceFieldMapper.SKIP_IGNORED_SOURCE_WRITE_SETTING); replicatedSettings.add(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING); From c4118c639f11ae111cea6992376e1b046883d15f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Aur=C3=A9lien=20FOUCRET?= Date: Mon, 14 Oct 2024 12:50:00 +0200 Subject: [PATCH 041/449] Fix termStats posting usage (#114644) --- .../elasticsearch/script/ScriptTermStats.java | 59 +++++++++---------- .../script/ScriptTermStatsTests.java | 18 +++--- 2 files changed, 36 insertions(+), 41 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/script/ScriptTermStats.java b/server/src/main/java/org/elasticsearch/script/ScriptTermStats.java index 9dde32cc75e6a..b27019765e33b 100644 --- a/server/src/main/java/org/elasticsearch/script/ScriptTermStats.java +++ b/server/src/main/java/org/elasticsearch/script/ScriptTermStats.java @@ -12,9 +12,8 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.PostingsEnum; import org.apache.lucene.index.Term; -import org.apache.lucene.index.TermState; import org.apache.lucene.index.TermStates; -import org.apache.lucene.index.TermsEnum; +import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; import org.elasticsearch.common.util.CachedSupplier; import org.elasticsearch.features.NodeFeature; @@ -71,17 +70,15 @@ public int uniqueTermsCount() { public int matchedTermsCount() { final int docId = docIdSupplier.getAsInt(); int matchedTerms = 0; + advancePostings(docId); - try { - for (PostingsEnum postingsEnum : postingsSupplier.get()) { - if (postingsEnum != null && postingsEnum.advance(docId) == docId && postingsEnum.freq() > 0) { - matchedTerms++; - } + for (PostingsEnum postingsEnum : postingsSupplier.get()) { + if (postingsEnum != null && postingsEnum.docID() == docId) { + matchedTerms++; } - return matchedTerms; - } catch (IOException e) { - throw new UncheckedIOException(e); } + + return matchedTerms; } /** @@ -150,8 +147,9 @@ public StatsSummary termFreq() { final int docId = docIdSupplier.getAsInt(); try { + advancePostings(docId); for (PostingsEnum postingsEnum : postingsSupplier.get()) { - if (postingsEnum == null || postingsEnum.advance(docId) != docId) { + if (postingsEnum == null || postingsEnum.docID() != docId) { statsSummary.accept(0); } else { statsSummary.accept(postingsEnum.freq()); @@ -170,12 +168,13 @@ public StatsSummary termFreq() { * @return statistics on termPositions for the terms of the query in the current dac */ public StatsSummary termPositions() { - try { - statsSummary.reset(); - int docId = docIdSupplier.getAsInt(); + statsSummary.reset(); + int docId = docIdSupplier.getAsInt(); + try { + advancePostings(docId); for (PostingsEnum postingsEnum : postingsSupplier.get()) { - if (postingsEnum == null || postingsEnum.advance(docId) != docId) { + if (postingsEnum == null || postingsEnum.docID() != docId) { continue; } for (int i = 0; i < postingsEnum.freq(); i++) { @@ -206,25 +205,9 @@ private TermStates[] loadTermContexts() { private PostingsEnum[] loadPostings() { try { PostingsEnum[] postings = new PostingsEnum[terms.length]; - TermStates[] contexts = termContextsSupplier.get(); for (int i = 0; i < terms.length; i++) { - TermStates termStates = contexts[i]; - if (termStates.docFreq() == 0) { - postings[i] = null; - continue; - } - - TermState state = termStates.get(leafReaderContext); - if (state == null) { - postings[i] = null; - continue; - } - - TermsEnum termsEnum = leafReaderContext.reader().terms(terms[i].field()).iterator(); - termsEnum.seekExact(terms[i].bytes(), state); - - postings[i] = termsEnum.postings(null, PostingsEnum.ALL); + postings[i] = leafReaderContext.reader().postings(terms[i], PostingsEnum.POSITIONS); } return postings; @@ -232,4 +215,16 @@ private PostingsEnum[] loadPostings() { throw new UncheckedIOException(e); } } + + private void advancePostings(int targetDocId) { + try { + for (PostingsEnum posting : postingsSupplier.get()) { + if (posting != null && posting.docID() < targetDocId && posting.docID() != DocIdSetIterator.NO_MORE_DOCS) { + posting.advance(targetDocId); + } + } + } catch (IOException e) { + throw new UncheckedIOException(e); + } + } } diff --git a/server/src/test/java/org/elasticsearch/script/ScriptTermStatsTests.java b/server/src/test/java/org/elasticsearch/script/ScriptTermStatsTests.java index b1b6a11764120..239c90bdee2fd 100644 --- a/server/src/test/java/org/elasticsearch/script/ScriptTermStatsTests.java +++ b/server/src/test/java/org/elasticsearch/script/ScriptTermStatsTests.java @@ -48,9 +48,9 @@ public void testMatchedTermsCount() throws IOException { // Partial match assertAllDocs( - Set.of(new Term("field", "foo"), new Term("field", "baz")), + Set.of(new Term("field", "foo"), new Term("field", "qux"), new Term("field", "baz")), ScriptTermStats::matchedTermsCount, - Map.of("doc-1", equalTo(1), "doc-2", equalTo(1), "doc-3", equalTo(0)) + Map.of("doc-1", equalTo(2), "doc-2", equalTo(1), "doc-3", equalTo(0)) ); // Always returns 0 when no term is provided. @@ -211,12 +211,12 @@ public void testTermFreq() throws IOException { // With missing terms { assertAllDocs( - Set.of(new Term("field", "foo"), new Term("field", "baz")), + Set.of(new Term("field", "foo"), new Term("field", "qux"), new Term("field", "baz")), ScriptTermStats::termFreq, Map.ofEntries( - Map.entry("doc-1", equalTo(new StatsSummary(2, 1, 0, 1))), - Map.entry("doc-2", equalTo(new StatsSummary(2, 2, 0, 2))), - Map.entry("doc-3", equalTo(new StatsSummary(2, 0, 0, 0))) + Map.entry("doc-1", equalTo(new StatsSummary(3, 2, 0, 1))), + Map.entry("doc-2", equalTo(new StatsSummary(3, 2, 0, 2))), + Map.entry("doc-3", equalTo(new StatsSummary(3, 0, 0, 0))) ) ); } @@ -274,10 +274,10 @@ public void testTermPositions() throws IOException { // With missing terms { assertAllDocs( - Set.of(new Term("field", "foo"), new Term("field", "baz")), + Set.of(new Term("field", "foo"), new Term("field", "qux"), new Term("field", "baz")), ScriptTermStats::termPositions, Map.ofEntries( - Map.entry("doc-1", equalTo(new StatsSummary(1, 1, 1, 1))), + Map.entry("doc-1", equalTo(new StatsSummary(2, 4, 1, 3))), Map.entry("doc-2", equalTo(new StatsSummary(2, 3, 1, 2))), Map.entry("doc-3", equalTo(new StatsSummary())) ) @@ -311,7 +311,7 @@ private void withIndexSearcher(CheckedConsumer consu Document doc = new Document(); doc.add(new TextField("id", "doc-1", Field.Store.YES)); - doc.add(new TextField("field", "foo bar", Field.Store.YES)); + doc.add(new TextField("field", "foo bar qux", Field.Store.YES)); w.addDocument(doc); doc = new Document(); From 8c31d80ae5f73df3d989ffbb4c29e73c44a526a2 Mon Sep 17 00:00:00 2001 From: Ievgen Degtiarenko Date: Mon, 14 Oct 2024 13:03:47 +0200 Subject: [PATCH 042/449] Node shutdown test integration test (#114582) This change adds a test case that verifies that the node can be shutdown while hosting an index with 0-1 or 0-all auto-expand configuration. --- .../xpack/shutdown/NodeShutdownShardsIT.java | 26 +++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownShardsIT.java b/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownShardsIT.java index 0e162238e96c8..ee7438dfca428 100644 --- a/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownShardsIT.java +++ b/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownShardsIT.java @@ -375,6 +375,32 @@ public void testAutoExpandDuringReplace() throws Exception { assertIndexSetting("index", "index.number_of_replicas", "1"); } + public void testAutoExpandDuringShutdown() throws Exception { + + var node1 = internalCluster().startNode(); + var node2 = internalCluster().startNode(); + + createIndex("index", indexSettings(1, 0).put("index.auto_expand_replicas", randomFrom("0-all", "0-1")).build()); + indexRandomData("index"); + + ensureGreen("index"); + assertIndexSetting("index", "index.number_of_replicas", "1"); + + var nodeNameToShutdown = randomFrom(node1, node2); + var nodeIdToShutdown = getNodeId(nodeNameToShutdown); + + putNodeShutdown(nodeIdToShutdown, SingleNodeShutdownMetadata.Type.REMOVE, null); + + ensureGreen("index"); + assertIndexSetting("index", "index.number_of_replicas", "0"); + + assertBusy(() -> assertNodeShutdownStatus(nodeIdToShutdown, COMPLETE)); + internalCluster().stopNode(nodeIdToShutdown); + + ensureGreen("index"); + assertIndexSetting("index", "index.number_of_replicas", "0"); + } + public void testNodeShutdownWithUnassignedShards() throws Exception { final String nodeA = internalCluster().startNode(); final String nodeAId = getNodeId(nodeA); From 7157c0a4c4a3638c2d264b42b234491d27ca7557 Mon Sep 17 00:00:00 2001 From: "elastic-renovate-prod[bot]" <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> Date: Mon, 14 Oct 2024 14:01:59 +0200 Subject: [PATCH 043/449] Update docker.elastic.co/wolfi/chainguard-base:latest Docker digest to 277ebb4 (main) (#114409) * Update docker.elastic.co/wolfi/chainguard-base:latest Docker digest to 277ebb4 * Tweak renovate replace pattern --------- Co-authored-by: elastic-renovate-prod[bot] <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> Co-authored-by: Rene Groeschke --- .../main/java/org/elasticsearch/gradle/internal/DockerBase.java | 2 +- renovate.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java index 9d78d3229edc1..d80256ee36a17 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java @@ -27,7 +27,7 @@ public enum DockerBase { // Chainguard based wolfi image with latest jdk // This is usually updated via renovatebot // spotless:off - WOLFI("docker.elastic.co/wolfi/chainguard-base:latest@sha256:90888b190da54062f67f3fef1372eb0ae7d81ea55f5a1f56d748b13e4853d984", + WOLFI("docker.elastic.co/wolfi/chainguard-base:latest@sha256:277ebb42c458ef39cb4028f9204f0b3d51d8cd628ea737a65696a1143c3e42fe", "-wolfi", "apk" ), diff --git a/renovate.json b/renovate.json index 0a1d588e6332c..293a2bb262375 100644 --- a/renovate.json +++ b/renovate.json @@ -30,7 +30,7 @@ "\\s*\"?(?[^\\s:@\"]+)(?::(?[-a-zA-Z0-9.]+))?(?:@(?sha256:[a-zA-Z0-9]+))?\"?" ], "currentValueTemplate": "{{#if currentValue}}{{{currentValue}}}{{else}}latest{{/if}}", - "autoReplaceStringTemplate": "\"{{{depName}}}{{#if newValue}}:{{{newValue}}}{{/if}}{{#if newDigest}}@{{{newDigest}}}{{/if}}\"", + "autoReplaceStringTemplate": "{{{depName}}}{{#if newValue}}:{{{newValue}}}{{/if}}{{#if newDigest}}@{{{newDigest}}}{{/if}}\"", "datasourceTemplate": "docker" } ] From af35cada9251eb81aff7563c8012cfb93103c18d Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Mon, 14 Oct 2024 23:53:26 +1100 Subject: [PATCH 044/449] Mute org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT test {yaml=reference/rest-api/usage/line_38} #113694 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 975eb0c434054..2e6fd10c6ef65 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -366,6 +366,9 @@ tests: - class: org.elasticsearch.xpack.inference.integration.ModelRegistryIT method: testGetModel issue: https://github.com/elastic/elasticsearch/issues/114657 +- class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT + method: test {yaml=reference/rest-api/usage/line_38} + issue: https://github.com/elastic/elasticsearch/issues/113694 # Examples: # From 51ea024eda1336a300e2836d1659a42691880b00 Mon Sep 17 00:00:00 2001 From: Mary Gouseti Date: Mon, 14 Oct 2024 16:02:24 +0300 Subject: [PATCH 045/449] Introduce CRUD APIs for data stream options (#113945) In this PR we introduce two endpoint PUT and GET to manage the data stream options and consequently the failure store configuration on the data stream level. This means that we can manage the failure store of existing data streams. The APIs look like: ``` # Enable/disable PUT _data_stream/my-data-stream/_options { "failure_store": { "enabled": true } } # Remove existing configuration DELETE _data_stream/my-data-stream/_options # Retrieve GET _data_stream/my-data-stream/_options { "failure_store": { "enabled": true } } ``` Future work: - Document the new APIs - Convert `DataStreamOptionsIT.java` to a yaml test. --- .../datastreams/DataStreamOptionsIT.java | 144 +++++++++++ .../src/main/java/module-info.java | 1 + .../datastreams/DataStreamsPlugin.java | 20 ++ .../action/DeleteDataStreamOptionsAction.java | 108 +++++++++ .../action/GetDataStreamOptionsAction.java | 223 ++++++++++++++++++ .../action/PutDataStreamOptionsAction.java | 165 +++++++++++++ ...ransportDeleteDataStreamOptionsAction.java | 86 +++++++ .../TransportGetDataStreamOptionsAction.java | 104 ++++++++ .../TransportPutDataStreamOptionsAction.java | 92 ++++++++ .../RestDeleteDataStreamOptionsAction.java | 54 +++++ .../rest/RestGetDataStreamOptionsAction.java | 58 +++++ .../rest/RestPutDataStreamOptionsAction.java | 58 +++++ .../metadata/MetadataDataStreamsService.java | 94 ++++++++ .../metadata/DataStreamOptionsTests.java | 11 +- .../MetadataDataStreamsServiceTests.java | 33 +++ .../xpack/security/operator/Constants.java | 3 + 16 files changed, 1253 insertions(+), 1 deletion(-) create mode 100644 modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/DataStreamOptionsIT.java create mode 100644 modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/action/DeleteDataStreamOptionsAction.java create mode 100644 modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/action/GetDataStreamOptionsAction.java create mode 100644 modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/action/PutDataStreamOptionsAction.java create mode 100644 modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/action/TransportDeleteDataStreamOptionsAction.java create mode 100644 modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/action/TransportGetDataStreamOptionsAction.java create mode 100644 modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/action/TransportPutDataStreamOptionsAction.java create mode 100644 modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/rest/RestDeleteDataStreamOptionsAction.java create mode 100644 modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/rest/RestGetDataStreamOptionsAction.java create mode 100644 modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/rest/RestPutDataStreamOptionsAction.java diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/DataStreamOptionsIT.java b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/DataStreamOptionsIT.java new file mode 100644 index 0000000000000..980cc32a12c68 --- /dev/null +++ b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/DataStreamOptionsIT.java @@ -0,0 +1,144 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.datastreams; + +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.junit.Before; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; + +/** + * This should be a yaml test, but in order to write one we would need to expose the new APIs in the rest-api-spec. + * We do not want to do that until the feature flag is removed. For this reason, we temporarily, test the new APIs here. + * Please convert this to a yaml test when the feature flag is removed. + */ +public class DataStreamOptionsIT extends DisabledSecurityDataStreamTestCase { + + private static final String DATA_STREAM_NAME = "failure-data-stream"; + + @SuppressWarnings("unchecked") + @Before + public void setup() throws IOException { + Request putComposableIndexTemplateRequest = new Request("POST", "/_index_template/ds-template"); + putComposableIndexTemplateRequest.setJsonEntity(""" + { + "index_patterns": ["failure-data-stream"], + "template": { + "settings": { + "number_of_replicas": 0 + } + }, + "data_stream": { + "failure_store": true + } + } + """); + assertOK(client().performRequest(putComposableIndexTemplateRequest)); + + assertOK(client().performRequest(new Request("PUT", "/_data_stream/" + DATA_STREAM_NAME))); + // Initialize the failure store. + assertOK(client().performRequest(new Request("POST", DATA_STREAM_NAME + "/_rollover?target_failure_store"))); + ensureGreen(DATA_STREAM_NAME); + + final Response dataStreamResponse = client().performRequest(new Request("GET", "/_data_stream/" + DATA_STREAM_NAME)); + List dataStreams = (List) entityAsMap(dataStreamResponse).get("data_streams"); + assertThat(dataStreams.size(), is(1)); + Map dataStream = (Map) dataStreams.get(0); + assertThat(dataStream.get("name"), equalTo(DATA_STREAM_NAME)); + List backingIndices = getIndices(dataStream); + assertThat(backingIndices.size(), is(1)); + List failureStore = getFailureStore(dataStream); + assertThat(failureStore.size(), is(1)); + } + + public void testEnableDisableFailureStore() throws IOException { + { + assertAcknowledged(client().performRequest(new Request("DELETE", "/_data_stream/" + DATA_STREAM_NAME + "/_options"))); + assertFailureStore(false, 1); + assertDataStreamOptions(null); + } + { + Request enableRequest = new Request("PUT", "/_data_stream/" + DATA_STREAM_NAME + "/_options"); + enableRequest.setJsonEntity(""" + { + "failure_store": { + "enabled": true + } + }"""); + assertAcknowledged(client().performRequest(enableRequest)); + assertFailureStore(true, 1); + assertDataStreamOptions(true); + } + + { + Request disableRequest = new Request("PUT", "/_data_stream/" + DATA_STREAM_NAME + "/_options"); + disableRequest.setJsonEntity(""" + { + "failure_store": { + "enabled": false + } + }"""); + assertAcknowledged(client().performRequest(disableRequest)); + assertFailureStore(false, 1); + assertDataStreamOptions(false); + } + } + + @SuppressWarnings("unchecked") + private void assertFailureStore(boolean failureStoreEnabled, int failureStoreSize) throws IOException { + final Response dataStreamResponse = client().performRequest(new Request("GET", "/_data_stream/" + DATA_STREAM_NAME)); + List dataStreams = (List) entityAsMap(dataStreamResponse).get("data_streams"); + assertThat(dataStreams.size(), is(1)); + Map dataStream = (Map) dataStreams.get(0); + assertThat(dataStream.get("name"), equalTo(DATA_STREAM_NAME)); + assertThat(dataStream.containsKey("failure_store"), is(true)); + // Ensure the failure store is set to the provided value + assertThat(((Map) dataStream.get("failure_store")).get("enabled"), equalTo(failureStoreEnabled)); + // And the failure indices preserved + List failureStore = getFailureStore(dataStream); + assertThat(failureStore.size(), is(failureStoreSize)); + } + + @SuppressWarnings("unchecked") + private void assertDataStreamOptions(Boolean failureStoreEnabled) throws IOException { + final Response dataStreamResponse = client().performRequest(new Request("GET", "/_data_stream/" + DATA_STREAM_NAME + "/_options")); + List dataStreams = (List) entityAsMap(dataStreamResponse).get("data_streams"); + assertThat(dataStreams.size(), is(1)); + Map dataStream = (Map) dataStreams.get(0); + assertThat(dataStream.get("name"), equalTo(DATA_STREAM_NAME)); + Map> options = (Map>) dataStream.get("options"); + if (failureStoreEnabled == null) { + assertThat(options, nullValue()); + } else { + assertThat(options.containsKey("failure_store"), is(true)); + assertThat(options.get("failure_store").get("enabled"), equalTo(failureStoreEnabled)); + } + } + + @SuppressWarnings("unchecked") + private List getFailureStore(Map response) { + var failureStore = (Map) response.get("failure_store"); + return getIndices(failureStore); + + } + + @SuppressWarnings("unchecked") + private List getIndices(Map response) { + List> indices = (List>) response.get("indices"); + return indices.stream().map(index -> index.get("index_name")).toList(); + } +} diff --git a/modules/data-streams/src/main/java/module-info.java b/modules/data-streams/src/main/java/module-info.java index 16229f9eb2394..2d49029c1023c 100644 --- a/modules/data-streams/src/main/java/module-info.java +++ b/modules/data-streams/src/main/java/module-info.java @@ -17,6 +17,7 @@ exports org.elasticsearch.datastreams.action to org.elasticsearch.server; exports org.elasticsearch.datastreams.lifecycle.action to org.elasticsearch.server; exports org.elasticsearch.datastreams.lifecycle; + exports org.elasticsearch.datastreams.options.action to org.elasticsearch.server; provides org.elasticsearch.features.FeatureSpecification with org.elasticsearch.datastreams.DataStreamFeatures; } diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java index 1a6465a251021..cb7445705537a 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java @@ -23,6 +23,7 @@ import org.elasticsearch.action.datastreams.lifecycle.GetDataStreamLifecycleAction; import org.elasticsearch.action.datastreams.lifecycle.PutDataStreamLifecycleAction; import org.elasticsearch.client.internal.OriginSettingClient; +import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; @@ -56,6 +57,15 @@ import org.elasticsearch.datastreams.lifecycle.rest.RestExplainDataStreamLifecycleAction; import org.elasticsearch.datastreams.lifecycle.rest.RestGetDataStreamLifecycleAction; import org.elasticsearch.datastreams.lifecycle.rest.RestPutDataStreamLifecycleAction; +import org.elasticsearch.datastreams.options.action.DeleteDataStreamOptionsAction; +import org.elasticsearch.datastreams.options.action.GetDataStreamOptionsAction; +import org.elasticsearch.datastreams.options.action.PutDataStreamOptionsAction; +import org.elasticsearch.datastreams.options.action.TransportDeleteDataStreamOptionsAction; +import org.elasticsearch.datastreams.options.action.TransportGetDataStreamOptionsAction; +import org.elasticsearch.datastreams.options.action.TransportPutDataStreamOptionsAction; +import org.elasticsearch.datastreams.options.rest.RestDeleteDataStreamOptionsAction; +import org.elasticsearch.datastreams.options.rest.RestGetDataStreamOptionsAction; +import org.elasticsearch.datastreams.options.rest.RestPutDataStreamOptionsAction; import org.elasticsearch.datastreams.rest.RestCreateDataStreamAction; import org.elasticsearch.datastreams.rest.RestDataStreamsStatsAction; import org.elasticsearch.datastreams.rest.RestDeleteDataStreamAction; @@ -229,6 +239,11 @@ public Collection createComponents(PluginServices services) { actions.add(new ActionHandler<>(DeleteDataStreamLifecycleAction.INSTANCE, TransportDeleteDataStreamLifecycleAction.class)); actions.add(new ActionHandler<>(ExplainDataStreamLifecycleAction.INSTANCE, TransportExplainDataStreamLifecycleAction.class)); actions.add(new ActionHandler<>(GetDataStreamLifecycleStatsAction.INSTANCE, TransportGetDataStreamLifecycleStatsAction.class)); + if (DataStream.isFailureStoreFeatureFlagEnabled()) { + actions.add(new ActionHandler<>(GetDataStreamOptionsAction.INSTANCE, TransportGetDataStreamOptionsAction.class)); + actions.add(new ActionHandler<>(PutDataStreamOptionsAction.INSTANCE, TransportPutDataStreamOptionsAction.class)); + actions.add(new ActionHandler<>(DeleteDataStreamOptionsAction.INSTANCE, TransportDeleteDataStreamOptionsAction.class)); + } return actions; } @@ -261,6 +276,11 @@ public List getRestHandlers( handlers.add(new RestDeleteDataStreamLifecycleAction()); handlers.add(new RestExplainDataStreamLifecycleAction()); handlers.add(new RestDataStreamLifecycleStatsAction()); + if (DataStream.isFailureStoreFeatureFlagEnabled()) { + handlers.add(new RestGetDataStreamOptionsAction()); + handlers.add(new RestPutDataStreamOptionsAction()); + handlers.add(new RestDeleteDataStreamOptionsAction()); + } return handlers; } diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/action/DeleteDataStreamOptionsAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/action/DeleteDataStreamOptionsAction.java new file mode 100644 index 0000000000000..98a29dd636ddf --- /dev/null +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/action/DeleteDataStreamOptionsAction.java @@ -0,0 +1,108 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.datastreams.options.action; + +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.master.AcknowledgedRequest; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.TimeValue; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Objects; + +/** + * Removes the data stream options configuration from the requested data streams. + */ +public class DeleteDataStreamOptionsAction { + + public static final ActionType INSTANCE = new ActionType<>("indices:admin/data_stream/options/delete"); + + private DeleteDataStreamOptionsAction() {/* no instances */} + + public static final class Request extends AcknowledgedRequest implements IndicesRequest.Replaceable { + + private String[] names; + private IndicesOptions indicesOptions = IndicesOptions.builder() + .concreteTargetOptions(IndicesOptions.ConcreteTargetOptions.ERROR_WHEN_UNAVAILABLE_TARGETS) + .wildcardOptions( + IndicesOptions.WildcardOptions.builder().matchOpen(true).matchClosed(true).allowEmptyExpressions(true).resolveAliases(false) + ) + .gatekeeperOptions(IndicesOptions.GatekeeperOptions.builder().allowAliasToMultipleIndices(false).allowClosedIndices(true)) + .build(); + + public Request(StreamInput in) throws IOException { + super(in); + this.names = in.readOptionalStringArray(); + this.indicesOptions = IndicesOptions.readIndicesOptions(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeOptionalStringArray(names); + indicesOptions.writeIndicesOptions(out); + } + + public Request(TimeValue masterNodeTimeout, TimeValue ackTimeout, String[] names) { + super(masterNodeTimeout, ackTimeout); + this.names = names; + } + + public String[] getNames() { + return names; + } + + @Override + public String[] indices() { + return names; + } + + @Override + public IndicesOptions indicesOptions() { + return indicesOptions; + } + + public Request indicesOptions(IndicesOptions indicesOptions) { + this.indicesOptions = indicesOptions; + return this; + } + + @Override + public boolean includeDataStreams() { + return true; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return Arrays.equals(names, request.names) && Objects.equals(indicesOptions, request.indicesOptions); + } + + @Override + public int hashCode() { + int result = Objects.hash(indicesOptions); + result = 31 * result + Arrays.hashCode(names); + return result; + } + + @Override + public IndicesRequest indices(String... indices) { + this.names = indices; + return this; + } + } +} diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/action/GetDataStreamOptionsAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/action/GetDataStreamOptionsAction.java new file mode 100644 index 0000000000000..c1354da1129ca --- /dev/null +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/action/GetDataStreamOptionsAction.java @@ -0,0 +1,223 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ +package org.elasticsearch.datastreams.options.action; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.master.MasterNodeReadRequest; +import org.elasticsearch.cluster.metadata.DataStreamOptions; +import org.elasticsearch.common.collect.Iterators; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ChunkedToXContentObject; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Iterator; +import java.util.List; +import java.util.Objects; + +/** + * This action retrieves the data stream options from every data stream. Currently, data stream options only support + * failure store. + */ +public class GetDataStreamOptionsAction { + + public static final ActionType INSTANCE = new ActionType<>("indices:admin/data_stream/options/get"); + + private GetDataStreamOptionsAction() {/* no instances */} + + public static class Request extends MasterNodeReadRequest implements IndicesRequest.Replaceable { + + private String[] names; + private IndicesOptions indicesOptions = IndicesOptions.builder() + .concreteTargetOptions(IndicesOptions.ConcreteTargetOptions.ERROR_WHEN_UNAVAILABLE_TARGETS) + .wildcardOptions( + IndicesOptions.WildcardOptions.builder().matchOpen(true).matchClosed(true).allowEmptyExpressions(true).resolveAliases(false) + ) + .gatekeeperOptions(IndicesOptions.GatekeeperOptions.builder().allowAliasToMultipleIndices(false).allowClosedIndices(true)) + .build(); + private boolean includeDefaults = false; + + public Request(TimeValue masterNodeTimeout, String[] names) { + super(masterNodeTimeout); + this.names = names; + } + + public Request(TimeValue masterNodeTimeout, String[] names, boolean includeDefaults) { + super(masterNodeTimeout); + this.names = names; + this.includeDefaults = includeDefaults; + } + + public String[] getNames() { + return names; + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + public Request(StreamInput in) throws IOException { + super(in); + this.names = in.readOptionalStringArray(); + this.indicesOptions = IndicesOptions.readIndicesOptions(in); + this.includeDefaults = in.readBoolean(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeOptionalStringArray(names); + indicesOptions.writeIndicesOptions(out); + out.writeBoolean(includeDefaults); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return Arrays.equals(names, request.names) + && indicesOptions.equals(request.indicesOptions) + && includeDefaults == request.includeDefaults; + } + + @Override + public int hashCode() { + int result = Objects.hash(indicesOptions, includeDefaults); + result = 31 * result + Arrays.hashCode(names); + return result; + } + + @Override + public String[] indices() { + return names; + } + + @Override + public IndicesOptions indicesOptions() { + return indicesOptions; + } + + public boolean includeDefaults() { + return includeDefaults; + } + + public Request indicesOptions(IndicesOptions indicesOptions) { + this.indicesOptions = indicesOptions; + return this; + } + + @Override + public boolean includeDataStreams() { + return true; + } + + @Override + public IndicesRequest indices(String... indices) { + this.names = indices; + return this; + } + + public Request includeDefaults(boolean includeDefaults) { + this.includeDefaults = includeDefaults; + return this; + } + } + + public static class Response extends ActionResponse implements ChunkedToXContentObject { + public static final ParseField DATA_STREAMS_FIELD = new ParseField("data_streams"); + + public record DataStreamEntry(String dataStreamName, DataStreamOptions dataStreamOptions) implements Writeable, ToXContentObject { + + public static final ParseField NAME_FIELD = new ParseField("name"); + public static final ParseField OPTIONS_FIELD = new ParseField("options"); + + DataStreamEntry(StreamInput in) throws IOException { + this(in.readString(), in.readOptionalWriteable(DataStreamOptions::read)); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(dataStreamName); + out.writeOptionalWriteable(dataStreamOptions); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(NAME_FIELD.getPreferredName(), dataStreamName); + if (dataStreamOptions != null && dataStreamOptions.isEmpty() == false) { + builder.field(OPTIONS_FIELD.getPreferredName(), dataStreamOptions); + } + builder.endObject(); + return builder; + } + } + + private final List dataStreams; + + public Response(List dataStreams) { + this.dataStreams = dataStreams; + } + + public Response(StreamInput in) throws IOException { + this(in.readCollectionAsList(DataStreamEntry::new)); + } + + public List getDataStreams() { + return dataStreams; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeCollection(dataStreams); + } + + @Override + public Iterator toXContentChunked(ToXContent.Params outerParams) { + return Iterators.concat(Iterators.single((builder, params) -> { + builder.startObject(); + builder.startArray(DATA_STREAMS_FIELD.getPreferredName()); + return builder; + }), + Iterators.map(dataStreams.iterator(), entry -> (builder, params) -> entry.toXContent(builder, outerParams)), + Iterators.single((builder, params) -> { + builder.endArray(); + builder.endObject(); + return builder; + }) + ); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Response response = (Response) o; + return dataStreams.equals(response.dataStreams); + } + + @Override + public int hashCode() { + return Objects.hash(dataStreams); + } + } +} diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/action/PutDataStreamOptionsAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/action/PutDataStreamOptionsAction.java new file mode 100644 index 0000000000000..d055a6972312a --- /dev/null +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/action/PutDataStreamOptionsAction.java @@ -0,0 +1,165 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.datastreams.options.action; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.master.AcknowledgedRequest; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.cluster.metadata.DataStreamFailureStore; +import org.elasticsearch.cluster.metadata.DataStreamOptions; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Objects; + +import static org.elasticsearch.action.ValidateActions.addValidationError; + +/** + * Sets the data stream options that was provided in the request to the requested data streams. + */ +public class PutDataStreamOptionsAction { + + public static final ActionType INSTANCE = new ActionType<>("indices:admin/data_stream/options/put"); + + private PutDataStreamOptionsAction() {/* no instances */} + + public static final class Request extends AcknowledgedRequest implements IndicesRequest.Replaceable { + + public interface Factory { + Request create(@Nullable DataStreamFailureStore dataStreamFailureStore); + } + + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "put_data_stream_options_request", + false, + (args, factory) -> factory.create((DataStreamFailureStore) args[0]) + ); + + static { + PARSER.declareObjectOrNull( + ConstructingObjectParser.optionalConstructorArg(), + (p, c) -> DataStreamFailureStore.PARSER.parse(p, null), + null, + new ParseField("failure_store") + ); + } + + public static Request parseRequest(XContentParser parser, Factory factory) { + return PARSER.apply(parser, factory); + } + + private String[] names; + private IndicesOptions indicesOptions = IndicesOptions.builder() + .concreteTargetOptions(IndicesOptions.ConcreteTargetOptions.ERROR_WHEN_UNAVAILABLE_TARGETS) + .wildcardOptions( + IndicesOptions.WildcardOptions.builder().matchOpen(true).matchClosed(true).allowEmptyExpressions(true).resolveAliases(false) + ) + .gatekeeperOptions(IndicesOptions.GatekeeperOptions.builder().allowAliasToMultipleIndices(false).allowClosedIndices(true)) + .build(); + private final DataStreamOptions options; + + public Request(StreamInput in) throws IOException { + super(in); + this.names = in.readStringArray(); + this.indicesOptions = IndicesOptions.readIndicesOptions(in); + options = DataStreamOptions.read(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeStringArray(names); + indicesOptions.writeIndicesOptions(out); + out.writeWriteable(options); + } + + public Request(TimeValue masterNodeTimeout, TimeValue ackTimeout, String[] names, DataStreamOptions options) { + super(masterNodeTimeout, ackTimeout); + this.names = names; + this.options = options; + } + + public Request(TimeValue masterNodeTimeout, TimeValue ackTimeout, String[] names, @Nullable DataStreamFailureStore failureStore) { + super(masterNodeTimeout, ackTimeout); + this.names = names; + this.options = new DataStreamOptions(failureStore); + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + if (options.failureStore() == null) { + validationException = addValidationError("At least one option needs to be provided", validationException); + } + return validationException; + } + + public String[] getNames() { + return names; + } + + public DataStreamOptions getOptions() { + return options; + } + + @Override + public String[] indices() { + return names; + } + + @Override + public IndicesOptions indicesOptions() { + return indicesOptions; + } + + public Request indicesOptions(IndicesOptions indicesOptions) { + this.indicesOptions = indicesOptions; + return this; + } + + @Override + public boolean includeDataStreams() { + return true; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return Arrays.equals(names, request.names) + && Objects.equals(indicesOptions, request.indicesOptions) + && options.equals(request.options); + } + + @Override + public int hashCode() { + int result = Objects.hash(indicesOptions, options); + result = 31 * result + Arrays.hashCode(names); + return result; + } + + @Override + public IndicesRequest indices(String... names) { + this.names = names; + return this; + } + } +} diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/action/TransportDeleteDataStreamOptionsAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/action/TransportDeleteDataStreamOptionsAction.java new file mode 100644 index 0000000000000..ead23ed78222b --- /dev/null +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/action/TransportDeleteDataStreamOptionsAction.java @@ -0,0 +1,86 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ +package org.elasticsearch.datastreams.options.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.datastreams.DataStreamsActionUtil; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.action.support.master.AcknowledgedTransportMasterNodeAction; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.MetadataDataStreamsService; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.indices.SystemIndices; +import org.elasticsearch.injection.guice.Inject; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; + +import java.util.List; + +/** + * Transport action that resolves the data stream names from the request and removes any configured data stream options from them. + */ +public class TransportDeleteDataStreamOptionsAction extends AcknowledgedTransportMasterNodeAction { + + private final MetadataDataStreamsService metadataDataStreamsService; + private final SystemIndices systemIndices; + + @Inject + public TransportDeleteDataStreamOptionsAction( + TransportService transportService, + ClusterService clusterService, + ThreadPool threadPool, + ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, + MetadataDataStreamsService metadataDataStreamsService, + SystemIndices systemIndices + ) { + super( + DeleteDataStreamOptionsAction.INSTANCE.name(), + transportService, + clusterService, + threadPool, + actionFilters, + DeleteDataStreamOptionsAction.Request::new, + indexNameExpressionResolver, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + this.metadataDataStreamsService = metadataDataStreamsService; + this.systemIndices = systemIndices; + } + + @Override + protected void masterOperation( + Task task, + DeleteDataStreamOptionsAction.Request request, + ClusterState state, + ActionListener listener + ) { + List dataStreamNames = DataStreamsActionUtil.getDataStreamNames( + indexNameExpressionResolver, + state, + request.getNames(), + request.indicesOptions() + ); + for (String name : dataStreamNames) { + systemIndices.validateDataStreamAccess(name, threadPool.getThreadContext()); + } + metadataDataStreamsService.removeDataStreamOptions(dataStreamNames, request.ackTimeout(), request.masterNodeTimeout(), listener); + } + + @Override + protected ClusterBlockException checkBlock(DeleteDataStreamOptionsAction.Request request, ClusterState state) { + return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); + } +} diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/action/TransportGetDataStreamOptionsAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/action/TransportGetDataStreamOptionsAction.java new file mode 100644 index 0000000000000..b032b35c943c0 --- /dev/null +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/action/TransportGetDataStreamOptionsAction.java @@ -0,0 +1,104 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ +package org.elasticsearch.datastreams.options.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.datastreams.DataStreamsActionUtil; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.DataStream; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.indices.SystemIndices; +import org.elasticsearch.injection.guice.Inject; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; + +import java.util.Comparator; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** + * Collects the data streams from the cluster state and then returns for each data stream its name and its + * data stream options. Currently, data stream options include only the failure store configuration. + */ +public class TransportGetDataStreamOptionsAction extends TransportMasterNodeReadAction< + GetDataStreamOptionsAction.Request, + GetDataStreamOptionsAction.Response> { + + private final SystemIndices systemIndices; + + @Inject + public TransportGetDataStreamOptionsAction( + TransportService transportService, + ClusterService clusterService, + ThreadPool threadPool, + ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, + SystemIndices systemIndices + ) { + super( + GetDataStreamOptionsAction.INSTANCE.name(), + transportService, + clusterService, + threadPool, + actionFilters, + GetDataStreamOptionsAction.Request::new, + indexNameExpressionResolver, + GetDataStreamOptionsAction.Response::new, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + this.systemIndices = systemIndices; + } + + @Override + protected void masterOperation( + Task task, + GetDataStreamOptionsAction.Request request, + ClusterState state, + ActionListener listener + ) { + List requestedDataStreams = DataStreamsActionUtil.getDataStreamNames( + indexNameExpressionResolver, + state, + request.getNames(), + request.indicesOptions() + ); + Map dataStreams = state.metadata().dataStreams(); + for (String name : requestedDataStreams) { + systemIndices.validateDataStreamAccess(name, threadPool.getThreadContext()); + } + listener.onResponse( + new GetDataStreamOptionsAction.Response( + requestedDataStreams.stream() + .map(dataStreams::get) + .filter(Objects::nonNull) + .map( + dataStream -> new GetDataStreamOptionsAction.Response.DataStreamEntry( + dataStream.getName(), + dataStream.getDataStreamOptions() + ) + ) + .sorted(Comparator.comparing(GetDataStreamOptionsAction.Response.DataStreamEntry::dataStreamName)) + .toList() + ) + ); + } + + @Override + protected ClusterBlockException checkBlock(GetDataStreamOptionsAction.Request request, ClusterState state) { + return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_READ); + } +} diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/action/TransportPutDataStreamOptionsAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/action/TransportPutDataStreamOptionsAction.java new file mode 100644 index 0000000000000..b1386232c44f9 --- /dev/null +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/action/TransportPutDataStreamOptionsAction.java @@ -0,0 +1,92 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ +package org.elasticsearch.datastreams.options.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.datastreams.DataStreamsActionUtil; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.action.support.master.AcknowledgedTransportMasterNodeAction; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.MetadataDataStreamsService; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.indices.SystemIndices; +import org.elasticsearch.injection.guice.Inject; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; + +import java.util.List; + +/** + * Transport action that resolves the data stream names from the request and sets the data stream lifecycle provided in the request. + */ +public class TransportPutDataStreamOptionsAction extends AcknowledgedTransportMasterNodeAction { + + private final MetadataDataStreamsService metadataDataStreamsService; + private final SystemIndices systemIndices; + + @Inject + public TransportPutDataStreamOptionsAction( + TransportService transportService, + ClusterService clusterService, + ThreadPool threadPool, + ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, + MetadataDataStreamsService metadataDataStreamsService, + SystemIndices systemIndices + ) { + super( + PutDataStreamOptionsAction.INSTANCE.name(), + transportService, + clusterService, + threadPool, + actionFilters, + PutDataStreamOptionsAction.Request::new, + indexNameExpressionResolver, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + this.metadataDataStreamsService = metadataDataStreamsService; + this.systemIndices = systemIndices; + } + + @Override + protected void masterOperation( + Task task, + PutDataStreamOptionsAction.Request request, + ClusterState state, + ActionListener listener + ) { + List dataStreamNames = DataStreamsActionUtil.getDataStreamNames( + indexNameExpressionResolver, + state, + request.getNames(), + request.indicesOptions() + ); + for (String name : dataStreamNames) { + systemIndices.validateDataStreamAccess(name, threadPool.getThreadContext()); + } + metadataDataStreamsService.setDataStreamOptions( + dataStreamNames, + request.getOptions(), + request.ackTimeout(), + request.masterNodeTimeout(), + listener + ); + } + + @Override + protected ClusterBlockException checkBlock(PutDataStreamOptionsAction.Request request, ClusterState state) { + return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); + } +} diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/rest/RestDeleteDataStreamOptionsAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/rest/RestDeleteDataStreamOptionsAction.java new file mode 100644 index 0000000000000..96460632ff443 --- /dev/null +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/rest/RestDeleteDataStreamOptionsAction.java @@ -0,0 +1,54 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ +package org.elasticsearch.datastreams.options.rest; + +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.master.AcknowledgedRequest; +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.common.Strings; +import org.elasticsearch.datastreams.options.action.DeleteDataStreamOptionsAction; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.Scope; +import org.elasticsearch.rest.ServerlessScope; +import org.elasticsearch.rest.action.RestToXContentListener; + +import java.util.List; + +import static org.elasticsearch.rest.RestRequest.Method.DELETE; +import static org.elasticsearch.rest.RestUtils.getMasterNodeTimeout; + +@ServerlessScope(Scope.INTERNAL) +public class RestDeleteDataStreamOptionsAction extends BaseRestHandler { + + @Override + public String getName() { + return "delete_data_stream_options_action"; + } + + @Override + public List routes() { + return List.of(new Route(DELETE, "/_data_stream/{name}/_options")); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) { + final var deleteDataOptionsRequest = new DeleteDataStreamOptionsAction.Request( + getMasterNodeTimeout(request), + request.paramAsTime("timeout", AcknowledgedRequest.DEFAULT_ACK_TIMEOUT), + Strings.splitStringByCommaToArray(request.param("name")) + ); + deleteDataOptionsRequest.indicesOptions(IndicesOptions.fromRequest(request, deleteDataOptionsRequest.indicesOptions())); + return channel -> client.execute( + DeleteDataStreamOptionsAction.INSTANCE, + deleteDataOptionsRequest, + new RestToXContentListener<>(channel) + ); + } +} diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/rest/RestGetDataStreamOptionsAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/rest/RestGetDataStreamOptionsAction.java new file mode 100644 index 0000000000000..6d6530efce1b9 --- /dev/null +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/rest/RestGetDataStreamOptionsAction.java @@ -0,0 +1,58 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ +package org.elasticsearch.datastreams.options.rest; + +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.common.Strings; +import org.elasticsearch.datastreams.options.action.GetDataStreamOptionsAction; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestUtils; +import org.elasticsearch.rest.Scope; +import org.elasticsearch.rest.ServerlessScope; +import org.elasticsearch.rest.action.RestRefCountedChunkedToXContentListener; + +import java.util.List; + +import static org.elasticsearch.rest.RestRequest.Method.GET; + +@ServerlessScope(Scope.PUBLIC) +public class RestGetDataStreamOptionsAction extends BaseRestHandler { + + @Override + public String getName() { + return "get_data_stream_options_action"; + } + + @Override + public List routes() { + return List.of(new Route(GET, "/_data_stream/{name}/_options")); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) { + GetDataStreamOptionsAction.Request getDataStreamOptionsRequest = new GetDataStreamOptionsAction.Request( + RestUtils.getMasterNodeTimeout(request), + Strings.splitStringByCommaToArray(request.param("name")) + ); + getDataStreamOptionsRequest.includeDefaults(request.paramAsBoolean("include_defaults", false)); + getDataStreamOptionsRequest.indicesOptions(IndicesOptions.fromRequest(request, getDataStreamOptionsRequest.indicesOptions())); + return channel -> client.execute( + GetDataStreamOptionsAction.INSTANCE, + getDataStreamOptionsRequest, + new RestRefCountedChunkedToXContentListener<>(channel) + ); + } + + @Override + public boolean allowSystemIndexAccessByDefault() { + return true; + } +} diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/rest/RestPutDataStreamOptionsAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/rest/RestPutDataStreamOptionsAction.java new file mode 100644 index 0000000000000..9191b96b6039e --- /dev/null +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/rest/RestPutDataStreamOptionsAction.java @@ -0,0 +1,58 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ +package org.elasticsearch.datastreams.options.rest; + +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.common.Strings; +import org.elasticsearch.datastreams.options.action.PutDataStreamOptionsAction; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.Scope; +import org.elasticsearch.rest.ServerlessScope; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.rest.RestRequest.Method.PUT; +import static org.elasticsearch.rest.RestUtils.getAckTimeout; +import static org.elasticsearch.rest.RestUtils.getMasterNodeTimeout; + +@ServerlessScope(Scope.PUBLIC) +public class RestPutDataStreamOptionsAction extends BaseRestHandler { + + @Override + public String getName() { + return "put_data_stream_options_action"; + } + + @Override + public List routes() { + return List.of(new Route(PUT, "/_data_stream/{name}/_options")); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { + try (XContentParser parser = request.contentParser()) { + PutDataStreamOptionsAction.Request putOptionsRequest = PutDataStreamOptionsAction.Request.parseRequest( + parser, + (failureStore) -> new PutDataStreamOptionsAction.Request( + getMasterNodeTimeout(request), + getAckTimeout(request), + Strings.splitStringByCommaToArray(request.param("name")), + failureStore + ) + ); + putOptionsRequest.indicesOptions(IndicesOptions.fromRequest(request, putOptionsRequest.indicesOptions())); + return channel -> client.execute(PutDataStreamOptionsAction.INSTANCE, putOptionsRequest, new RestToXContentListener<>(channel)); + } + } +} diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsService.java index 8a46550f8a689..db3973c1a15a8 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsService.java @@ -45,6 +45,7 @@ public class MetadataDataStreamsService { private final DataStreamGlobalRetentionSettings globalRetentionSettings; private final MasterServiceTaskQueue updateLifecycleTaskQueue; private final MasterServiceTaskQueue setRolloverOnWriteTaskQueue; + private final MasterServiceTaskQueue updateOptionsTaskQueue; public MetadataDataStreamsService( ClusterService clusterService, @@ -93,6 +94,20 @@ public Tuple executeTask( Priority.NORMAL, rolloverOnWriteExecutor ); + ClusterStateTaskExecutor updateOptionsExecutor = new SimpleBatchedAckListenerTaskExecutor<>() { + + @Override + public Tuple executeTask( + UpdateOptionsTask modifyOptionsTask, + ClusterState clusterState + ) { + return new Tuple<>( + updateDataStreamOptions(clusterState, modifyOptionsTask.getDataStreamNames(), modifyOptionsTask.getOptions()), + modifyOptionsTask + ); + } + }; + this.updateOptionsTaskQueue = clusterService.createTaskQueue("modify-data-stream-options", Priority.NORMAL, updateOptionsExecutor); } public void modifyDataStream(final ModifyDataStreamsAction.Request request, final ActionListener listener) { @@ -147,6 +162,39 @@ public void removeLifecycle( ); } + /** + * Submits the task to set the provided data stream options to the requested data streams. + */ + public void setDataStreamOptions( + final List dataStreamNames, + DataStreamOptions options, + TimeValue ackTimeout, + TimeValue masterTimeout, + final ActionListener listener + ) { + updateOptionsTaskQueue.submitTask( + "set-data-stream-options", + new UpdateOptionsTask(dataStreamNames, options, ackTimeout, listener), + masterTimeout + ); + } + + /** + * Submits the task to remove the data stream options from the requested data streams. + */ + public void removeDataStreamOptions( + List dataStreamNames, + TimeValue ackTimeout, + TimeValue masterTimeout, + ActionListener listener + ) { + updateOptionsTaskQueue.submitTask( + "delete-data-stream-options", + new UpdateOptionsTask(dataStreamNames, null, ackTimeout, listener), + masterTimeout + ); + } + @SuppressForbidden(reason = "legacy usage of unbatched task") // TODO add support for batching here private void submitUnbatchedTask(@SuppressWarnings("SameParameterValue") String source, ClusterStateUpdateTask task) { clusterService.submitUnbatchedStateUpdateTask(source, task); @@ -228,6 +276,24 @@ ClusterState updateDataLifecycle(ClusterState currentState, List dataStr return ClusterState.builder(currentState).metadata(builder.build()).build(); } + /** + * Creates an updated cluster state in which the requested data streams have the data stream options provided. + * Visible for testing. + */ + ClusterState updateDataStreamOptions( + ClusterState currentState, + List dataStreamNames, + @Nullable DataStreamOptions dataStreamOptions + ) { + Metadata metadata = currentState.metadata(); + Metadata.Builder builder = Metadata.builder(metadata); + for (var dataStreamName : dataStreamNames) { + var dataStream = validateDataStream(metadata, dataStreamName); + builder.put(dataStream.copy().setDataStreamOptions(dataStreamOptions).build()); + } + return ClusterState.builder(currentState).metadata(builder.build()).build(); + } + /** * Creates an updated cluster state in which the requested data stream has the flag {@link DataStream#rolloverOnWrite()} * set to the value of the parameter rolloverOnWrite @@ -372,6 +438,34 @@ public DataStreamLifecycle getDataLifecycle() { } } + /** + * A cluster state update task that consists of the cluster state request and the listeners that need to be notified upon completion. + */ + static class UpdateOptionsTask extends AckedBatchedClusterStateUpdateTask { + + private final List dataStreamNames; + private final DataStreamOptions options; + + UpdateOptionsTask( + List dataStreamNames, + @Nullable DataStreamOptions options, + TimeValue ackTimeout, + ActionListener listener + ) { + super(ackTimeout, listener); + this.dataStreamNames = dataStreamNames; + this.options = options; + } + + public List getDataStreamNames() { + return dataStreamNames; + } + + public DataStreamOptions getOptions() { + return options; + } + } + /** * A cluster state update task that consists of the cluster state request and the listeners that need to be notified upon completion. */ diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamOptionsTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamOptionsTests.java index 020955d226a0f..9b0eb93b496a4 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamOptionsTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamOptionsTests.java @@ -24,7 +24,16 @@ protected Writeable.Reader instanceReader() { @Override protected DataStreamOptions createTestInstance() { - return new DataStreamOptions(randomBoolean() ? null : DataStreamFailureStoreTests.randomFailureStore()); + return randomDataStreamOptions(); + } + + public static DataStreamOptions randomDataStreamOptions() { + return switch (randomIntBetween(0, 2)) { + case 0 -> DataStreamOptions.EMPTY; + case 1 -> DataStreamOptions.FAILURE_STORE_DISABLED; + case 2 -> DataStreamOptions.FAILURE_STORE_ENABLED; + default -> throw new IllegalArgumentException("Illegal randomisation branch"); + }; } @Override diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsServiceTests.java index 92c1103c950c0..276c20d2d1322 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsServiceTests.java @@ -422,6 +422,39 @@ public void testUpdateLifecycle() { } } + public void testUpdateDataStreamOptions() { + String dataStream = randomAlphaOfLength(5); + // we want the data stream options to be non-empty, so we can see the removal in action + DataStreamOptions dataStreamOptions = randomValueOtherThan( + DataStreamOptions.EMPTY, + DataStreamOptionsTests::randomDataStreamOptions + ); + ClusterState before = DataStreamTestHelper.getClusterStateWithDataStreams(List.of(new Tuple<>(dataStream, 2)), List.of()); + MetadataDataStreamsService service = new MetadataDataStreamsService( + mock(ClusterService.class), + mock(IndicesService.class), + DataStreamGlobalRetentionSettings.create(ClusterSettings.createBuiltInClusterSettings()) + ); + + // Ensure no data stream options are stored + DataStream updatedDataStream = before.metadata().dataStreams().get(dataStream); + assertNotNull(updatedDataStream); + assertThat(updatedDataStream.getDataStreamOptions(), equalTo(DataStreamOptions.EMPTY)); + + // Set non-empty data stream options + ClusterState after = service.updateDataStreamOptions(before, List.of(dataStream), dataStreamOptions); + updatedDataStream = after.metadata().dataStreams().get(dataStream); + assertNotNull(updatedDataStream); + assertThat(updatedDataStream.getDataStreamOptions(), equalTo(dataStreamOptions)); + before = after; + + // Remove data stream options + after = service.updateDataStreamOptions(before, List.of(dataStream), null); + updatedDataStream = after.metadata().dataStreams().get(dataStream); + assertNotNull(updatedDataStream); + assertThat(updatedDataStream.getDataStreamOptions(), equalTo(DataStreamOptions.EMPTY)); + } + private MapperService getMapperService(IndexMetadata im) { try { String mapping = im.mapping().source().toString(); diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index d791873eb3142..b29dc0fa410b6 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -502,6 +502,9 @@ public class Constants { "indices:admin/data_stream/lifecycle/get", "indices:admin/data_stream/lifecycle/put", "indices:admin/data_stream/lifecycle/explain", + "indices:admin/data_stream/options/delete", + "indices:admin/data_stream/options/get", + "indices:admin/data_stream/options/put", "indices:admin/delete", "indices:admin/flush", "indices:admin/flush[s]", From 7bd6f2ce6a708364a41b1d1620a08df3f8816258 Mon Sep 17 00:00:00 2001 From: kosabogi <105062005+kosabogi@users.noreply.github.com> Date: Mon, 14 Oct 2024 15:57:00 +0200 Subject: [PATCH 046/449] Expands semantic_text tutorial with hybrid search (#114398) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Creates a new page for the hybrid search tutorial * Update docs/reference/search/search-your-data/semantic-text-hybrid-search Co-authored-by: István Zoltán Szabó * Update docs/reference/search/search-your-data/semantic-text-hybrid-search Co-authored-by: István Zoltán Szabó * Update docs/reference/search/search-your-data/semantic-text-hybrid-search Co-authored-by: István Zoltán Szabó * Update docs/reference/search/search-your-data/semantic-text-hybrid-search Co-authored-by: István Zoltán Szabó * Update docs/reference/search/search-your-data/semantic-text-hybrid-search Co-authored-by: István Zoltán Szabó * Update docs/reference/search/search-your-data/semantic-text-hybrid-search Co-authored-by: István Zoltán Szabó * Update docs/reference/search/search-your-data/semantic-text-hybrid-search Co-authored-by: István Zoltán Szabó * Update docs/reference/search/search-your-data/semantic-text-hybrid-search Co-authored-by: István Zoltán Szabó * Update docs/reference/search/search-your-data/semantic-text-hybrid-search Co-authored-by: István Zoltán Szabó * Update docs/reference/search/search-your-data/semantic-text-hybrid-search Co-authored-by: István Zoltán Szabó * Adds search response example * Update docs/reference/search/search-your-data/semantic-text-hybrid-search Co-authored-by: István Zoltán Szabó * Update docs/reference/search/search-your-data/semantic-text-hybrid-search Co-authored-by: István Zoltán Szabó * Update docs/reference/search/search-your-data/semantic-text-hybrid-search Co-authored-by: István Zoltán Szabó * Update docs/reference/search/search-your-data/semantic-text-hybrid-search Co-authored-by: István Zoltán Szabó * Update docs/reference/search/search-your-data/semantic-text-hybrid-search Co-authored-by: István Zoltán Szabó * Update docs/reference/search/search-your-data/semantic-text-hybrid-search Co-authored-by: István Zoltán Szabó * Update docs/reference/search/search-your-data/semantic-text-hybrid-search Co-authored-by: István Zoltán Szabó * Update docs/reference/search/search-your-data/semantic-text-hybrid-search Co-authored-by: István Zoltán Szabó --------- Co-authored-by: István Zoltán Szabó --- .../search-your-data/semantic-search.asciidoc | 1 + .../semantic-text-hybrid-search | 254 ++++++++++++++++++ 2 files changed, 255 insertions(+) create mode 100644 docs/reference/search/search-your-data/semantic-text-hybrid-search diff --git a/docs/reference/search/search-your-data/semantic-search.asciidoc b/docs/reference/search/search-your-data/semantic-search.asciidoc index 62e41b3eef3de..0ef8591e42b5d 100644 --- a/docs/reference/search/search-your-data/semantic-search.asciidoc +++ b/docs/reference/search/search-your-data/semantic-search.asciidoc @@ -104,6 +104,7 @@ IMPORTANT: For the easiest way to perform semantic search in the {stack}, refer include::semantic-search-semantic-text.asciidoc[] +include::semantic-text-hybrid-search[] include::semantic-search-inference.asciidoc[] include::semantic-search-elser.asciidoc[] include::cohere-es.asciidoc[] diff --git a/docs/reference/search/search-your-data/semantic-text-hybrid-search b/docs/reference/search/search-your-data/semantic-text-hybrid-search new file mode 100644 index 0000000000000..c56b283434df5 --- /dev/null +++ b/docs/reference/search/search-your-data/semantic-text-hybrid-search @@ -0,0 +1,254 @@ +[[semantic-text-hybrid-search]] +=== Tutorial: hybrid search with `semantic_text` +++++ +Hybrid search with `semantic_text` +++++ + +This tutorial demonstrates how to perform hybrid search, combining semantic search with traditional full-text search. + +In hybrid search, semantic search retrieves results based on the meaning of the text, while full-text search focuses on exact word matches. By combining both methods, hybrid search delivers more relevant results, particularly in cases where relying on a single approach may not be sufficient. + +The recommended way to use hybrid search in the {stack} is following the `semantic_text` workflow. This tutorial uses the <> for demonstration, but you can use any service and its supported models offered by the {infer-cap} API. + +[discrete] +[[semantic-text-hybrid-infer-endpoint]] +==== Create the {infer} endpoint + +Create an inference endpoint by using the <>: + +[source,console] +------------------------------------------------------------ +PUT _inference/sparse_embedding/my-elser-endpoint <1> +{ + "service": "elser", <2> + "service_settings": { + "adaptive_allocations": { <3> + "enabled": true, + "min_number_of_allocations": 3, + "max_number_of_allocations": 10 + }, + "num_threads": 1 + } +} +------------------------------------------------------------ +// TEST[skip:TBD] +<1> The task type is `sparse_embedding` in the path as the `elser` service will +be used and ELSER creates sparse vectors. The `inference_id` is +`my-elser-endpoint`. +<2> The `elser` service is used in this example. +<3> This setting enables and configures adaptive allocations. +Adaptive allocations make it possible for ELSER to automatically scale up or down resources based on the current load on the process. + +[NOTE] +==== +You might see a 502 bad gateway error in the response when using the {kib} Console. +This error usually just reflects a timeout, while the model downloads in the background. +You can check the download progress in the {ml-app} UI. +==== + +[discrete] +[[hybrid-search-create-index-mapping]] +==== Create an index mapping for hybrid search + +The destination index will contain both the embeddings for semantic search and the original text field for full-text search. This structure enables the combination of semantic search and full-text search. + +[source,console] +------------------------------------------------------------ +PUT semantic-embeddings +{ + "mappings": { + "properties": { + "semantic_text": { <1> + "type": "semantic_text", + "inference_id": "my-elser-endpoint" <2> + }, + "content": { <3> + "type": "text", + "copy_to": "semantic_text" <4> + } + } + } +} +------------------------------------------------------------ +// TEST[skip:TBD] +<1> The name of the field to contain the generated embeddings for semantic search. +<2> The identifier of the inference endpoint that generates the embeddings based on the input text. +<3> The name of the field to contain the original text for lexical search. +<4> The textual data stored in the `content` field will be copied to `semantic_text` and processed by the {infer} endpoint. + +[NOTE] +==== +If you want to run a search on indices that were populated by web crawlers or connectors, you have to +<> for these indices to +include the `semantic_text` field. Once the mapping is updated, you'll need to run a full web crawl or a full connector sync. This ensures that all existing +documents are reprocessed and updated with the new semantic embeddings, enabling hybrid search on the updated data. +==== + +[discrete] +[[semantic-text-hybrid-load-data]] +==== Load data + +In this step, you load the data that you later use to create embeddings from. + +Use the `msmarco-passagetest2019-top1000` data set, which is a subset of the MS MARCO Passage Ranking data set. It consists of 200 queries, each accompanied by a list of relevant text passages. All unique passages, along with their IDs, have been extracted from that data set and compiled into a https://github.com/elastic/stack-docs/blob/main/docs/en/stack/ml/nlp/data/msmarco-passagetest2019-unique.tsv[tsv file]. + +Download the file and upload it to your cluster using the {kibana-ref}/connect-to-elasticsearch.html#upload-data-kibana[Data Visualizer] in the {ml-app} UI. After your data is analyzed, click **Override settings**. Under **Edit field names**, assign `id` to the first column and `content` to the second. Click **Apply**, then **Import**. Name the index `test-data`, and click **Import**. After the upload is complete, you will see an index named `test-data` with 182,469 documents. + +[discrete] +[[hybrid-search-reindex-data]] +==== Reindex the data for hybrid search + +Reindex the data from the `test-data` index into the `semantic-embeddings` index. +The data in the `content` field of the source index is copied into the `content` field of the destination index. +The `copy_to` parameter set in the index mapping creation ensures that the content is copied into the `semantic_text` field. The data is processed by the {infer} endpoint at ingest time to generate embeddings. + +[NOTE] +==== +This step uses the reindex API to simulate data ingestion. If you are working with data that has already been indexed, +rather than using the `test-data` set, reindexing is still required to ensure that the data is processed by the {infer} endpoint +and the necessary embeddings are generated. +==== + +[source,console] +------------------------------------------------------------ +POST _reindex?wait_for_completion=false +{ + "source": { + "index": "test-data", + "size": 10 <1> + }, + "dest": { + "index": "semantic-embeddings" + } +} +------------------------------------------------------------ +// TEST[skip:TBD] +<1> The default batch size for reindexing is 1000. Reducing size to a smaller +number makes the update of the reindexing process quicker which enables you to +follow the progress closely and detect errors early. + +The call returns a task ID to monitor the progress: + +[source,console] +------------------------------------------------------------ +GET _tasks/ +------------------------------------------------------------ +// TEST[skip:TBD] + +Reindexing large datasets can take a long time. You can test this workflow using only a subset of the dataset. + +To cancel the reindexing process and generate embeddings for the subset that was reindexed: + +[source,console] +------------------------------------------------------------ +POST _tasks//_cancel +------------------------------------------------------------ +// TEST[skip:TBD] + +[discrete] +[[hybrid-search-perform-search]] +==== Perform hybrid search + +After reindexing the data into the `semantic-embeddings` index, you can perform hybrid search by using <>. RRF is a technique that merges the rankings from both semantic and lexical queries, giving more weight to results that rank high in either search. This ensures that the final results are balanced and relevant. + +[source,console] +------------------------------------------------------------ +GET semantic-embeddings/_search +{ + "retriever": { + "rrf": { + "retrievers": [ + { + "standard": { <1> + "query": { + "match": { + "content": "How to avoid muscle soreness while running?" <2> + } + } + } + }, + { + "standard": { <3> + "query": { + "semantic": { + "field": "semantic_text", <4> + "query": "How to avoid muscle soreness while running?" + } + } + } + } + ] + } + } +} +------------------------------------------------------------ +// TEST[skip:TBD] +<1> The first `standard` retriever represents the traditional lexical search. +<2> Lexical search is performed on the `content` field using the specified phrase. +<3> The second `standard` retriever refers to the semantic search. +<4> The `semantic_text` field is used to perform the semantic search. + + +After performing the hybrid search, the query will return the top 10 documents that match both semantic and lexical search criteria. The results include detailed information about each document: + +[source,console-result] +------------------------------------------------------------ +{ + "took": 107, + "timed_out": false, + "_shards": { + "total": 1, + "successful": 1, + "skipped": 0, + "failed": 0 + }, + "hits": { + "total": { + "value": 473, + "relation": "eq" + }, + "max_score": null, + "hits": [ + { + "_index": "semantic-embeddings", + "_id": "wv65epIBEMBRnhfTsOFM", + "_score": 0.032786883, + "_rank": 1, + "_source": { + "semantic_text": { + "inference": { + "inference_id": "my-elser-endpoint", + "model_settings": { + "task_type": "sparse_embedding" + }, + "chunks": [ + { + "text": "What so many out there do not realize is the importance of what you do after you work out. You may have done the majority of the work, but how you treat your body in the minutes and hours after you exercise has a direct effect on muscle soreness, muscle strength and growth, and staying hydrated. Cool Down. After your last exercise, your workout is not over. The first thing you need to do is cool down. Even if running was all that you did, you still should do light cardio for a few minutes. This brings your heart rate down at a slow and steady pace, which helps you avoid feeling sick after a workout.", + "embeddings": { + "exercise": 1.571044, + "after": 1.3603843, + "sick": 1.3281639, + "cool": 1.3227621, + "muscle": 1.2645415, + "sore": 1.2561599, + "cooling": 1.2335974, + "running": 1.1750668, + "hours": 1.1104802, + "out": 1.0991782, + "##io": 1.0794281, + "last": 1.0474665, + (...) + } + } + ] + } + }, + "id": 8408852, + "content": "What so many out there do not realize is the importance of (...)" + } + } + ] + } +} +------------------------------------------------------------ +// NOTCONSOLE From 4c15cc077887d00ecf0e02c39b42cf01874ab6c4 Mon Sep 17 00:00:00 2001 From: Mary Gouseti Date: Mon, 14 Oct 2024 17:08:23 +0300 Subject: [PATCH 047/449] Add ResolvedExpression wrapper (#114592) **Introduction** > In order to make adoption of failure stores simpler for all users, we are introducing a new syntactical feature to index expression resolution: The selector. > > Selectors, denoted with a :: followed by a recognized suffix will allow users to specify which component of an index abstraction they would like to operate on within an API call. In this case, an index abstraction is a concrete index, data stream, or alias; Any abstraction that can be resolved to a set of indices/shards. We define a component of an index abstraction to be some searchable unit of the index abstraction. > > To start, we will support two components: data and failures. Concrete indices are their own data components, while the data component for index aliases are all of the indices contained therein. For data streams, the data component corresponds to their backing indices. Data stream aliases mirror this, treating all backing indices of the data streams they correspond to as their data component. > > The failure component is only supported by data streams and data stream aliases. The failure component of these abstractions refer to the data streams' failure stores. Indices and index aliases do not have a failure component. For more details and examples see https://github.com/elastic/elasticsearch/pull/113144. All this work has been cherry picked from there. **Purpose of this PR** This PR is introducing a wrapper around the resolved expression that used to be a `String` to create the base on which the selectors are going to be added. The current PR is just a refactoring and does not and should not change any existing behaviour. --- .../TransportClusterSearchShardsAction.java | 3 +- .../indices/resolve/ResolveIndexAction.java | 9 +- .../query/TransportValidateQueryAction.java | 3 +- .../explain/TransportExplainAction.java | 3 +- .../action/search/TransportSearchAction.java | 24 +- .../search/TransportSearchShardsAction.java | 6 +- .../metadata/IndexNameExpressionResolver.java | 196 +++++++----- .../elasticsearch/indices/IndicesService.java | 3 +- .../elasticsearch/search/SearchService.java | 3 +- .../indices/resolve/ResolveIndexTests.java | 15 +- .../DateMathExpressionResolverTests.java | 89 +++--- .../cluster/metadata/ExpressionListTests.java | 108 ++++--- .../IndexNameExpressionResolverTests.java | 65 ++-- .../WildcardExpressionResolverTests.java | 299 ++++++++++-------- .../indices/IndicesServiceTests.java | 34 +- 15 files changed, 504 insertions(+), 356 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java index 9ffef1f178f44..b855f2cee7613 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java @@ -17,6 +17,7 @@ import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardIterator; @@ -84,7 +85,7 @@ protected void masterOperation( String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(clusterState, request); Map> routingMap = indexNameExpressionResolver.resolveSearchRouting(state, request.routing(), request.indices()); Map indicesAndFilters = new HashMap<>(); - Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions(clusterState, request.indices()); + Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions(clusterState, request.indices()); for (String index : concreteIndices) { final AliasFilter aliasFilter = indicesService.buildAliasFilter(clusterState, index, indicesAndAliases); final String[] aliases = indexNameExpressionResolver.indexAliases( diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexAction.java index 5c5c71bc002b3..f5c100b7884bb 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexAction.java @@ -25,6 +25,7 @@ import org.elasticsearch.cluster.metadata.IndexAbstraction; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; @@ -565,8 +566,8 @@ static void resolveIndices( if (names.length == 1 && (Metadata.ALL.equals(names[0]) || Regex.isMatchAllPattern(names[0]))) { names = new String[] { "**" }; } - Set resolvedIndexAbstractions = resolver.resolveExpressions(clusterState, indicesOptions, true, names); - for (String s : resolvedIndexAbstractions) { + Set resolvedIndexAbstractions = resolver.resolveExpressions(clusterState, indicesOptions, true, names); + for (ResolvedExpression s : resolvedIndexAbstractions) { enrichIndexAbstraction(clusterState, s, indices, aliases, dataStreams); } indices.sort(Comparator.comparing(ResolvedIndexAbstraction::getName)); @@ -597,12 +598,12 @@ private static void mergeResults( private static void enrichIndexAbstraction( ClusterState clusterState, - String indexAbstraction, + ResolvedExpression indexAbstraction, List indices, List aliases, List dataStreams ) { - IndexAbstraction ia = clusterState.metadata().getIndicesLookup().get(indexAbstraction); + IndexAbstraction ia = clusterState.metadata().getIndicesLookup().get(indexAbstraction.resource()); if (ia != null) { switch (ia.getType()) { case CONCRETE_INDEX -> { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java index 4e9830fe0d14e..e01f364712676 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java @@ -21,6 +21,7 @@ import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.routing.ShardRouting; @@ -133,7 +134,7 @@ protected void doExecute(Task task, ValidateQueryRequest request, ActionListener @Override protected ShardValidateQueryRequest newShardRequest(int numShards, ShardRouting shard, ValidateQueryRequest request) { final ClusterState clusterState = clusterService.state(); - final Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions(clusterState, request.indices()); + final Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions(clusterState, request.indices()); final AliasFilter aliasFilter = searchService.buildAliasFilter(clusterState, shard.getIndexName(), indicesAndAliases); return new ShardValidateQueryRequest(shard.shardId(), aliasFilter, request); } diff --git a/server/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java b/server/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java index 9c82d032014f2..84c6df7b8a66f 100644 --- a/server/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java +++ b/server/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java @@ -18,6 +18,7 @@ import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.Writeable; @@ -109,7 +110,7 @@ protected boolean resolveIndex(ExplainRequest request) { @Override protected void resolveRequest(ClusterState state, InternalRequest request) { - final Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions(state, request.request().index()); + final Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions(state, request.request().index()); final AliasFilter aliasFilter = searchService.buildAliasFilter(state, request.concreteIndex(), indicesAndAliases); request.request().filteringAlias(aliasFilter); } diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java index 1645a378446a4..b5864f64a7824 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java @@ -37,6 +37,7 @@ import org.elasticsearch.cluster.metadata.IndexAbstraction; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.GroupShardsIterator; @@ -110,6 +111,7 @@ import java.util.function.BiFunction; import java.util.function.Function; import java.util.function.LongSupplier; +import java.util.stream.Collectors; import static org.elasticsearch.action.search.SearchType.DFS_QUERY_THEN_FETCH; import static org.elasticsearch.action.search.SearchType.QUERY_THEN_FETCH; @@ -203,7 +205,7 @@ public TransportSearchAction( private Map buildPerIndexOriginalIndices( ClusterState clusterState, - Set indicesAndAliases, + Set indicesAndAliases, String[] indices, IndicesOptions indicesOptions ) { @@ -211,6 +213,9 @@ private Map buildPerIndexOriginalIndices( var blocks = clusterState.blocks(); // optimization: mostly we do not have any blocks so there's no point in the expensive per-index checking boolean hasBlocks = blocks.global().isEmpty() == false || blocks.indices().isEmpty() == false; + // Get a distinct set of index abstraction names present from the resolved expressions to help with the reverse resolution from + // concrete index to the expression that produced it. + Set indicesAndAliasesResources = indicesAndAliases.stream().map(ResolvedExpression::resource).collect(Collectors.toSet()); for (String index : indices) { if (hasBlocks) { blocks.indexBlockedRaiseException(ClusterBlockLevel.READ, index); @@ -227,8 +232,8 @@ private Map buildPerIndexOriginalIndices( String[] finalIndices = Strings.EMPTY_ARRAY; if (aliases == null || aliases.length == 0 - || indicesAndAliases.contains(index) - || hasDataStreamRef(clusterState, indicesAndAliases, index)) { + || indicesAndAliasesResources.contains(index) + || hasDataStreamRef(clusterState, indicesAndAliasesResources, index)) { finalIndices = new String[] { index }; } if (aliases != null) { @@ -247,7 +252,11 @@ private static boolean hasDataStreamRef(ClusterState clusterState, Set i return indicesAndAliases.contains(ret.getParentDataStream().getName()); } - Map buildIndexAliasFilters(ClusterState clusterState, Set indicesAndAliases, Index[] concreteIndices) { + Map buildIndexAliasFilters( + ClusterState clusterState, + Set indicesAndAliases, + Index[] concreteIndices + ) { final Map aliasFilterMap = new HashMap<>(); for (Index index : concreteIndices) { clusterState.blocks().indexBlockedRaiseException(ClusterBlockLevel.READ, index.getName()); @@ -1237,7 +1246,10 @@ private void executeSearch( } else { final Index[] indices = resolvedIndices.getConcreteLocalIndices(); concreteLocalIndices = Arrays.stream(indices).map(Index::getName).toArray(String[]::new); - final Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions(clusterState, searchRequest.indices()); + final Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions( + clusterState, + searchRequest.indices() + ); aliasFilter = buildIndexAliasFilters(clusterState, indicesAndAliases, indices); aliasFilter.putAll(remoteAliasMap); localShardIterators = getLocalShardsIterator( @@ -1810,7 +1822,7 @@ List getLocalShardsIterator( ClusterState clusterState, SearchRequest searchRequest, String clusterAlias, - Set indicesAndAliases, + Set indicesAndAliases, String[] concreteIndices ) { var routingMap = indexNameExpressionResolver.resolveSearchRouting(clusterState, searchRequest.routing(), searchRequest.indices()); diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchShardsAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchShardsAction.java index f418b5617b2a1..b94bd95c93d8a 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchShardsAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchShardsAction.java @@ -17,6 +17,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.index.Index; @@ -127,7 +128,10 @@ public void searchShards(Task task, SearchShardsRequest searchShardsRequest, Act searchService.getRewriteContext(timeProvider::absoluteStartMillis, resolvedIndices, null), listener.delegateFailureAndWrap((delegate, searchRequest) -> { Index[] concreteIndices = resolvedIndices.getConcreteLocalIndices(); - final Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions(clusterState, searchRequest.indices()); + final Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions( + clusterState, + searchRequest.indices() + ); final Map aliasFilters = transportSearchAction.buildIndexAliasFilters( clusterState, indicesAndAliases, diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java index 2229166a2d779..eaf54034b22e0 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java @@ -74,6 +74,15 @@ public IndexNameExpressionResolver(ThreadContext threadContext, SystemIndices sy this.systemIndices = Objects.requireNonNull(systemIndices, "System Indices must not be null"); } + /** + * This contains the resolved expression in the form of the resource. + * Soon it will facilitate the index component selector. + * @param resource the resolved resolvedExpression + */ + public record ResolvedExpression(String resource) { + + } + /** * Same as {@link #concreteIndexNames(ClusterState, IndicesOptions, String...)}, but the index expressions and options * are encapsulated in the specified request. @@ -191,8 +200,9 @@ public List dataStreamNames(ClusterState state, IndicesOptions options, getSystemIndexAccessPredicate(), getNetNewSystemIndexPredicate() ); - final Collection expressions = resolveExpressions(context, indexExpressions); + final Collection expressions = resolveExpressions(context, indexExpressions); return expressions.stream() + .map(ResolvedExpression::resource) .map(x -> state.metadata().getIndicesLookup().get(x)) .filter(Objects::nonNull) .filter(ia -> ia.getType() == Type.DATA_STREAM) @@ -221,10 +231,11 @@ public IndexAbstraction resolveWriteIndexAbstraction(ClusterState state, DocWrit getNetNewSystemIndexPredicate() ); - final Collection expressions = resolveExpressions(context, request.index()); + final Collection expressions = resolveExpressions(context, request.index()); if (expressions.size() == 1) { - IndexAbstraction ia = state.metadata().getIndicesLookup().get(expressions.iterator().next()); + ResolvedExpression resolvedExpression = expressions.iterator().next(); + IndexAbstraction ia = state.metadata().getIndicesLookup().get(resolvedExpression.resource()); if (ia.getType() == Type.ALIAS) { Index writeIndex = ia.getWriteIndex(); if (writeIndex == null) { @@ -246,14 +257,14 @@ public IndexAbstraction resolveWriteIndexAbstraction(ClusterState state, DocWrit } } - protected static Collection resolveExpressions(Context context, String... expressions) { + protected static Collection resolveExpressions(Context context, String... expressions) { if (context.getOptions().expandWildcardExpressions() == false) { if (expressions == null || expressions.length == 0 || expressions.length == 1 && Metadata.ALL.equals(expressions[0])) { return List.of(); } else { return ExplicitResourceNameFilter.filterUnavailable( context, - DateMathExpressionResolver.resolve(context, List.of(expressions)) + DateMathExpressionResolver.resolve(context, Arrays.stream(expressions).map(ResolvedExpression::new).toList()) ); } } else { @@ -264,7 +275,10 @@ protected static Collection resolveExpressions(Context context, String.. } else { return WildcardExpressionResolver.resolve( context, - ExplicitResourceNameFilter.filterUnavailable(context, DateMathExpressionResolver.resolve(context, List.of(expressions))) + ExplicitResourceNameFilter.filterUnavailable( + context, + DateMathExpressionResolver.resolve(context, Arrays.stream(expressions).map(ResolvedExpression::new).toList()) + ) ); } } @@ -339,12 +353,12 @@ String[] concreteIndexNames(Context context, String... indexExpressions) { } Index[] concreteIndices(Context context, String... indexExpressions) { - final Collection expressions = resolveExpressions(context, indexExpressions); + final Collection expressions = resolveExpressions(context, indexExpressions); final Set concreteIndicesResult = Sets.newLinkedHashSetWithExpectedSize(expressions.size()); final Map indicesLookup = context.getState().metadata().getIndicesLookup(); - for (String expression : expressions) { - final IndexAbstraction indexAbstraction = indicesLookup.get(expression); + for (ResolvedExpression resolvedExpression : expressions) { + final IndexAbstraction indexAbstraction = indicesLookup.get(resolvedExpression.resource()); assert indexAbstraction != null; if (indexAbstraction.getType() == Type.ALIAS && context.isResolveToWriteIndex()) { Index writeIndex = indexAbstraction.getWriteIndex(); @@ -378,7 +392,7 @@ Index[] concreteIndices(Context context, String... indexExpressions) { throw new IllegalArgumentException( indexAbstraction.getType().getDisplayName() + " [" - + expression + + resolvedExpression.resource() + "] has more than one index associated with it " + Arrays.toString(indexNames) + ", can't execute a single index op" @@ -642,7 +656,7 @@ public Index concreteSingleIndex(ClusterState state, IndicesRequest request) { * Utility method that allows to resolve an index expression to its corresponding single write index. * * @param state the cluster state containing all the data to resolve to expression to a concrete index - * @param request The request that defines how the an alias or an index need to be resolved to a concrete index + * @param request The request that defines how an alias or an index need to be resolved to a concrete index * and the expression that can be resolved to an alias or an index name. * @throws IllegalArgumentException if the index resolution does not lead to an index, or leads to more than one index * @return the write index obtained as a result of the index resolution @@ -734,7 +748,7 @@ public static String resolveDateMathExpression(String dateExpression, long time) /** * Resolve an array of expressions to the set of indices and aliases that these expressions match. */ - public Set resolveExpressions(ClusterState state, String... expressions) { + public Set resolveExpressions(ClusterState state, String... expressions) { return resolveExpressions(state, IndicesOptions.lenientExpandOpen(), false, expressions); } @@ -743,7 +757,7 @@ public Set resolveExpressions(ClusterState state, String... expressions) * If {@param preserveDataStreams} is {@code true}, datastreams that are covered by the wildcards from the * {@param expressions} are returned as-is, without expanding them further to their respective backing indices. */ - public Set resolveExpressions( + public Set resolveExpressions( ClusterState state, IndicesOptions indicesOptions, boolean preserveDataStreams, @@ -760,10 +774,10 @@ public Set resolveExpressions( getSystemIndexAccessPredicate(), getNetNewSystemIndexPredicate() ); - Collection resolved = resolveExpressions(context, expressions); - if (resolved instanceof Set) { + Collection resolved = resolveExpressions(context, expressions); + if (resolved instanceof Set) { // unmodifiable without creating a new collection as it might contain many items - return Collections.unmodifiableSet((Set) resolved); + return Collections.unmodifiableSet((Set) resolved); } else { return Set.copyOf(resolved); } @@ -776,7 +790,7 @@ public Set resolveExpressions( * the index itself - null is returned. Returns {@code null} if no filtering is required. * NOTE: The provided expressions must have been resolved already via {@link #resolveExpressions}. */ - public String[] filteringAliases(ClusterState state, String index, Set resolvedExpressions) { + public String[] filteringAliases(ClusterState state, String index, Set resolvedExpressions) { return indexAliases(state, index, AliasMetadata::filteringRequired, DataStreamAlias::filteringRequired, false, resolvedExpressions); } @@ -802,39 +816,39 @@ public String[] indexAliases( Predicate requiredAlias, Predicate requiredDataStreamAlias, boolean skipIdentity, - Set resolvedExpressions + Set resolvedExpressions ) { - if (isAllIndices(resolvedExpressions)) { + if (isAllIndicesExpression(resolvedExpressions)) { return null; } - + Set resources = resolvedExpressions.stream().map(ResolvedExpression::resource).collect(Collectors.toSet()); final IndexMetadata indexMetadata = state.metadata().getIndices().get(index); if (indexMetadata == null) { // Shouldn't happen throw new IndexNotFoundException(index); } - if (skipIdentity == false && resolvedExpressions.contains(index)) { + if (skipIdentity == false && resources.contains(index)) { return null; } IndexAbstraction ia = state.metadata().getIndicesLookup().get(index); DataStream dataStream = ia.getParentDataStream(); if (dataStream != null) { - if (skipIdentity == false && resolvedExpressions.contains(dataStream.getName())) { + if (skipIdentity == false && resources.contains(dataStream.getName())) { // skip the filters when the request targets the data stream name return null; } Map dataStreamAliases = state.metadata().dataStreamAliases(); List aliasesForDataStream; - if (iterateIndexAliases(dataStreamAliases.size(), resolvedExpressions.size())) { + if (iterateIndexAliases(dataStreamAliases.size(), resources.size())) { aliasesForDataStream = dataStreamAliases.values() .stream() - .filter(dataStreamAlias -> resolvedExpressions.contains(dataStreamAlias.getName())) + .filter(dataStreamAlias -> resources.contains(dataStreamAlias.getName())) .filter(dataStreamAlias -> dataStreamAlias.getDataStreams().contains(dataStream.getName())) .toList(); } else { - aliasesForDataStream = resolvedExpressions.stream() + aliasesForDataStream = resources.stream() .map(dataStreamAliases::get) .filter(dataStreamAlias -> dataStreamAlias != null && dataStreamAlias.getDataStreams().contains(dataStream.getName())) .toList(); @@ -859,18 +873,15 @@ public String[] indexAliases( } else { final Map indexAliases = indexMetadata.getAliases(); final AliasMetadata[] aliasCandidates; - if (iterateIndexAliases(indexAliases.size(), resolvedExpressions.size())) { + if (iterateIndexAliases(indexAliases.size(), resources.size())) { // faster to iterate indexAliases aliasCandidates = indexAliases.values() .stream() - .filter(aliasMetadata -> resolvedExpressions.contains(aliasMetadata.alias())) + .filter(aliasMetadata -> resources.contains(aliasMetadata.alias())) .toArray(AliasMetadata[]::new); } else { // faster to iterate resolvedExpressions - aliasCandidates = resolvedExpressions.stream() - .map(indexAliases::get) - .filter(Objects::nonNull) - .toArray(AliasMetadata[]::new); + aliasCandidates = resources.stream().map(indexAliases::get).filter(Objects::nonNull).toArray(AliasMetadata[]::new); } List aliases = null; for (AliasMetadata aliasMetadata : aliasCandidates) { @@ -909,12 +920,7 @@ public Map> resolveSearchRouting(ClusterState state, @Nullab getSystemIndexAccessPredicate(), getNetNewSystemIndexPredicate() ); - final Collection resolvedExpressions = resolveExpressions(context, expressions); - - // TODO: it appears that this can never be true? - if (isAllIndices(resolvedExpressions)) { - return resolveSearchRoutingAllIndices(state.metadata(), routing); - } + final Collection resolvedExpressions = resolveExpressions(context, expressions); Map> routings = null; Set paramRouting = null; @@ -924,8 +930,8 @@ public Map> resolveSearchRouting(ClusterState state, @Nullab paramRouting = Sets.newHashSet(Strings.splitStringByCommaToArray(routing)); } - for (String expression : resolvedExpressions) { - IndexAbstraction indexAbstraction = state.metadata().getIndicesLookup().get(expression); + for (ResolvedExpression resolvedExpression : resolvedExpressions) { + IndexAbstraction indexAbstraction = state.metadata().getIndicesLookup().get(resolvedExpression.resource); if (indexAbstraction != null && indexAbstraction.getType() == Type.ALIAS) { for (Index index : indexAbstraction.getIndices()) { String concreteIndex = index.getName(); @@ -963,7 +969,7 @@ public Map> resolveSearchRouting(ClusterState state, @Nullab } } else { // Index - routings = collectRoutings(routings, paramRouting, norouting, expression); + routings = collectRoutings(routings, paramRouting, norouting, resolvedExpression.resource()); } } @@ -1009,6 +1015,17 @@ public static Map> resolveSearchRoutingAllIndices(Metadata m return null; } + /** + * Identifies whether the array containing index names given as argument refers to all indices + * The empty or null array identifies all indices + * + * @param aliasesOrIndices the array containing index names + * @return true if the provided array maps to all indices, false otherwise + */ + public static boolean isAllIndicesExpression(Collection aliasesOrIndices) { + return isAllIndices(aliasesOrIndices.stream().map(ResolvedExpression::resource).toList()); + } + /** * Identifies whether the array containing index names given as argument refers to all indices * The empty or null array identifies all indices @@ -1249,8 +1266,8 @@ private WildcardExpressionResolver() { * Returns all the indices, datastreams, and aliases, considering the open/closed, system, and hidden context parameters. * Depending on the context, returns the names of the datastreams themselves or their backing indices. */ - public static Collection resolveAll(Context context) { - List concreteIndices = resolveEmptyOrTrivialWildcard(context); + public static Collection resolveAll(Context context) { + List concreteIndices = resolveEmptyOrTrivialWildcard(context); if (context.includeDataStreams() == false && context.getOptions().ignoreAliases()) { return concreteIndices; @@ -1265,7 +1282,7 @@ public static Collection resolveAll(Context context) { .filter(ia -> shouldIncludeIfDataStream(ia, context) || shouldIncludeIfAlias(ia, context)) .filter(ia -> ia.isSystem() == false || context.systemIndexAccessPredicate.test(ia.getName())); - Set resolved = expandToOpenClosed(context, ias).collect(Collectors.toSet()); + Set resolved = expandToOpenClosed(context, ias).collect(Collectors.toSet()); resolved.addAll(concreteIndices); return resolved; } @@ -1293,17 +1310,17 @@ private static boolean shouldIncludeIfAlias(IndexAbstraction ia, IndexNameExpres * ultimately returned, instead of the alias or datastream name * */ - public static Collection resolve(Context context, List expressions) { + public static Collection resolve(Context context, List expressions) { ExpressionList expressionList = new ExpressionList(context, expressions); // fast exit if there are no wildcards to evaluate if (expressionList.hasWildcard() == false) { return expressions; } - Set result = new HashSet<>(); + Set result = new HashSet<>(); for (ExpressionList.Expression expression : expressionList) { if (expression.isWildcard()) { Stream matchingResources = matchResourcesToWildcard(context, expression.get()); - Stream matchingOpenClosedNames = expandToOpenClosed(context, matchingResources); + Stream matchingOpenClosedNames = expandToOpenClosed(context, matchingResources); AtomicBoolean emptyWildcardExpansion = new AtomicBoolean(false); if (context.getOptions().allowNoIndices() == false) { emptyWildcardExpansion.set(true); @@ -1319,9 +1336,9 @@ public static Collection resolve(Context context, List expressio } } else { if (expression.isExclusion()) { - result.remove(expression.get()); + result.remove(new ResolvedExpression(expression.get())); } else { - result.add(expression.get()); + result.add(expression.resolvedExpression()); } } } @@ -1412,13 +1429,13 @@ private static Map filterIndicesLookupForSuffixWildcar * Data streams and aliases are interpreted to refer to multiple indices, * then all index resources are filtered by their open/closed status. */ - private static Stream expandToOpenClosed(Context context, Stream resources) { + private static Stream expandToOpenClosed(Context context, Stream resources) { final IndexMetadata.State excludeState = excludeState(context.getOptions()); return resources.flatMap(indexAbstraction -> { if (context.isPreserveAliases() && indexAbstraction.getType() == Type.ALIAS) { - return Stream.of(indexAbstraction.getName()); + return Stream.of(new ResolvedExpression(indexAbstraction.getName())); } else if (context.isPreserveDataStreams() && indexAbstraction.getType() == Type.DATA_STREAM) { - return Stream.of(indexAbstraction.getName()); + return Stream.of(new ResolvedExpression(indexAbstraction.getName())); } else { Stream indicesStateStream = Stream.of(); if (shouldIncludeRegularIndices(context.getOptions())) { @@ -1434,18 +1451,20 @@ private static Stream expandToOpenClosed(Context context, Stream indexMeta.getState() != excludeState); } - return indicesStateStream.map(indexMeta -> indexMeta.getIndex().getName()); + return indicesStateStream.map(indexMeta -> new ResolvedExpression(indexMeta.getIndex().getName())); } }); } - private static List resolveEmptyOrTrivialWildcard(Context context) { + private static List resolveEmptyOrTrivialWildcard(Context context) { final String[] allIndices = resolveEmptyOrTrivialWildcardToAllIndices(context.getOptions(), context.getState().metadata()); + Stream result; if (context.systemIndexAccessLevel == SystemIndexAccessLevel.ALL) { - return List.of(allIndices); + result = Arrays.stream(allIndices); } else { - return resolveEmptyOrTrivialWildcardWithAllowedSystemIndices(context, allIndices); + result = resolveEmptyOrTrivialWildcardWithAllowedSystemIndices(context, allIndices).stream(); } + return result.map(ResolvedExpression::new).toList(); } private static List resolveEmptyOrTrivialWildcardWithAllowedSystemIndices(Context context, String[] allIndices) { @@ -1507,8 +1526,8 @@ private DateMathExpressionResolver() { // utility class } - public static List resolve(Context context, List expressions) { - List result = new ArrayList<>(expressions.size()); + public static List resolve(Context context, List expressions) { + List result = new ArrayList<>(expressions.size()); for (ExpressionList.Expression expression : new ExpressionList(context, expressions)) { result.add(resolveExpression(expression, context::getStartTime)); } @@ -1519,13 +1538,15 @@ static String resolveExpression(String expression) { return resolveExpression(expression, System::currentTimeMillis); } - static String resolveExpression(ExpressionList.Expression expression, LongSupplier getTime) { + static ResolvedExpression resolveExpression(ExpressionList.Expression expression, LongSupplier getTime) { + String result; if (expression.isExclusion()) { // accepts date-math exclusions that are of the form "-<...{}>", i.e. the "-" is outside the "<>" date-math template - return "-" + resolveExpression(expression.get(), getTime); + result = "-" + resolveExpression(expression.get(), getTime); } else { - return resolveExpression(expression.get(), getTime); + result = resolveExpression(expression.get(), getTime); } + return new ResolvedExpression(result); } static String resolveExpression(String expression, LongSupplier getTime) { @@ -1687,25 +1708,26 @@ private ExplicitResourceNameFilter() { * Returns an expression list with "unavailable" (missing or not acceptable) resource names filtered out. * Only explicit resource names are considered for filtering. Wildcard and exclusion expressions are kept in. */ - public static List filterUnavailable(Context context, List expressions) { + public static List filterUnavailable(Context context, List expressions) { ensureRemoteIndicesRequireIgnoreUnavailable(context.getOptions(), expressions); - List result = new ArrayList<>(expressions.size()); + List result = new ArrayList<>(expressions.size()); for (ExpressionList.Expression expression : new ExpressionList(context, expressions)) { validateAliasOrIndex(expression); - if (expression.isWildcard() || expression.isExclusion() || ensureAliasOrIndexExists(context, expression.get())) { - result.add(expression.expression()); + if (expression.isWildcard() || expression.isExclusion() || ensureAliasOrIndexExists(context, expression)) { + result.add(expression.resolvedExpression()); } } return result; } /** - * This returns `true` if the given {@param name} is of a resource that exists. - * Otherwise, it returns `false` if the `ignore_unvailable` option is `true`, or, if `false`, it throws a "not found" type of + * This returns `true` if the given {@param resolvedExpression} is of a resource that exists. + * Otherwise, it returns `false` if the `ignore_unavailable` option is `true`, or, if `false`, it throws a "not found" type of * exception. */ @Nullable - private static boolean ensureAliasOrIndexExists(Context context, String name) { + private static boolean ensureAliasOrIndexExists(Context context, ExpressionList.Expression expression) { + String name = expression.get(); boolean ignoreUnavailable = context.getOptions().ignoreUnavailable(); IndexAbstraction indexAbstraction = context.getState().getMetadata().getIndicesLookup().get(name); if (indexAbstraction == null) { @@ -1737,32 +1759,37 @@ private static boolean ensureAliasOrIndexExists(Context context, String name) { } private static void validateAliasOrIndex(ExpressionList.Expression expression) { - if (Strings.isEmpty(expression.expression())) { - throw notFoundException(expression.expression()); + if (Strings.isEmpty(expression.resolvedExpression().resource())) { + throw notFoundException(expression.get()); } // Expressions can not start with an underscore. This is reserved for APIs. If the check gets here, the API // does not exist and the path is interpreted as an expression. If the expression begins with an underscore, // throw a specific error that is different from the [[IndexNotFoundException]], which is typically thrown // if the expression can't be found. - if (expression.expression().charAt(0) == '_') { - throw new InvalidIndexNameException(expression.expression(), "must not start with '_'."); + if (expression.resolvedExpression().resource().charAt(0) == '_') { + throw new InvalidIndexNameException(expression.get(), "must not start with '_'."); } } - private static void ensureRemoteIndicesRequireIgnoreUnavailable(IndicesOptions options, List indexExpressions) { + private static void ensureRemoteIndicesRequireIgnoreUnavailable( + IndicesOptions options, + List resolvedExpressions + ) { if (options.ignoreUnavailable()) { return; } - for (String index : indexExpressions) { + for (ResolvedExpression resolvedExpression : resolvedExpressions) { + var index = resolvedExpression.resource(); if (RemoteClusterAware.isRemoteIndexName(index)) { - failOnRemoteIndicesNotIgnoringUnavailable(indexExpressions); + failOnRemoteIndicesNotIgnoringUnavailable(resolvedExpressions); } } } - private static void failOnRemoteIndicesNotIgnoringUnavailable(List indexExpressions) { + private static void failOnRemoteIndicesNotIgnoringUnavailable(List resolvedExpressions) { List crossClusterIndices = new ArrayList<>(); - for (String index : indexExpressions) { + for (ResolvedExpression resolvedExpression : resolvedExpressions) { + String index = resolvedExpression.resource(); if (RemoteClusterAware.isRemoteIndexName(index)) { crossClusterIndices.add(index); } @@ -1780,13 +1807,13 @@ public static final class ExpressionList implements Iterable expressionsList; private final boolean hasWildcard; - public record Expression(String expression, boolean isWildcard, boolean isExclusion) { + public record Expression(ResolvedExpression resolvedExpression, boolean isWildcard, boolean isExclusion) { public String get() { if (isExclusion()) { // drop the leading "-" if exclusion because it is easier for callers to handle it like this - return expression().substring(1); + return resolvedExpression().resource().substring(1); } else { - return expression(); + return resolvedExpression().resource(); } } } @@ -1795,16 +1822,17 @@ public String get() { * Creates the expression iterable that can be used to easily check which expression item is a wildcard or an exclusion (or both). * The {@param context} is used to check if wildcards ought to be considered or not. */ - public ExpressionList(Context context, List expressionStrings) { - List expressionsList = new ArrayList<>(expressionStrings.size()); + public ExpressionList(Context context, List resolvedExpressions) { + List expressionsList = new ArrayList<>(resolvedExpressions.size()); boolean wildcardSeen = false; - for (String expressionString : expressionStrings) { + for (ResolvedExpression resolvedExpression : resolvedExpressions) { + var expressionString = resolvedExpression.resource(); boolean isExclusion = expressionString.startsWith("-") && wildcardSeen; if (context.getOptions().expandWildcardExpressions() && isWildcard(expressionString)) { wildcardSeen = true; - expressionsList.add(new Expression(expressionString, true, isExclusion)); + expressionsList.add(new Expression(resolvedExpression, true, isExclusion)); } else { - expressionsList.add(new Expression(expressionString, false, isExclusion)); + expressionsList.add(new Expression(resolvedExpression, false, isExclusion)); } } this.expressionsList = expressionsList; diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesService.java b/server/src/main/java/org/elasticsearch/indices/IndicesService.java index 706f788e8a310..2dc5e7c28ad0b 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -38,6 +38,7 @@ import org.elasticsearch.cluster.metadata.IndexAbstraction; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.RecoverySource; @@ -1713,7 +1714,7 @@ interface IndexDeletionAllowedPredicate { IndexSettings indexSettings) -> canDeleteIndexContents(index); private final IndexDeletionAllowedPredicate ALWAYS_TRUE = (Index index, IndexSettings indexSettings) -> true; - public AliasFilter buildAliasFilter(ClusterState state, String index, Set resolvedExpressions) { + public AliasFilter buildAliasFilter(ClusterState state, String index, Set resolvedExpressions) { /* Being static, parseAliasFilter doesn't have access to whatever guts it needs to parse a query. Instead of passing in a bunch * of dependencies we pass in a function that can perform the parsing. */ CheckedFunction filterParser = bytes -> { diff --git a/server/src/main/java/org/elasticsearch/search/SearchService.java b/server/src/main/java/org/elasticsearch/search/SearchService.java index be96b4e25d841..3a900a8a9b8a6 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchService.java +++ b/server/src/main/java/org/elasticsearch/search/SearchService.java @@ -26,6 +26,7 @@ import org.elasticsearch.action.search.SearchType; import org.elasticsearch.action.support.TransportActions; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.CheckedSupplier; @@ -1618,7 +1619,7 @@ public boolean isForceExecution() { } } - public AliasFilter buildAliasFilter(ClusterState state, String index, Set resolvedExpressions) { + public AliasFilter buildAliasFilter(ClusterState state, String index, Set resolvedExpressions) { return indicesService.buildAliasFilter(state, index, resolvedExpressions); } diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexTests.java index 834bacd9e6a04..1faeabb6acbf7 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.cluster.metadata.DataStreamTestHelper; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; @@ -229,9 +230,19 @@ public void testResolveHiddenProperlyWithDateMath() { .metadata(buildMetadata(new Object[][] {}, indices)) .build(); String[] requestedIndex = new String[] { "" }; - Set resolvedIndices = resolver.resolveExpressions(clusterState, IndicesOptions.LENIENT_EXPAND_OPEN, true, requestedIndex); + Set resolvedIndices = resolver.resolveExpressions( + clusterState, + IndicesOptions.LENIENT_EXPAND_OPEN, + true, + requestedIndex + ); assertThat(resolvedIndices.size(), is(1)); - assertThat(resolvedIndices, contains(oneOf("logs-pgsql-prod-" + todaySuffix, "logs-pgsql-prod-" + tomorrowSuffix))); + assertThat( + resolvedIndices, + contains( + oneOf(new ResolvedExpression("logs-pgsql-prod-" + todaySuffix), new ResolvedExpression("logs-pgsql-prod-" + tomorrowSuffix)) + ) + ); } public void testSystemIndexAccess() { diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java index 6be5b48f9d723..fe0b7926229cb 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.Context; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.DateMathExpressionResolver; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.indices.SystemIndices.SystemIndexAccessLevel; import org.elasticsearch.test.ESTestCase; import org.hamcrest.Matchers; @@ -26,7 +27,6 @@ import java.time.format.DateTimeFormatter; import java.util.ArrayList; import java.util.Arrays; -import java.util.Collections; import java.util.List; import java.util.Locale; @@ -52,11 +52,11 @@ private static String formatDate(String pattern, ZonedDateTime zonedDateTime) { public void testNormal() throws Exception { int numIndexExpressions = randomIntBetween(1, 9); - List indexExpressions = new ArrayList<>(numIndexExpressions); + List indexExpressions = new ArrayList<>(numIndexExpressions); for (int i = 0; i < numIndexExpressions; i++) { - indexExpressions.add(randomAlphaOfLength(10)); + indexExpressions.add(new ResolvedExpression(randomAlphaOfLength(10))); } - List result = DateMathExpressionResolver.resolve(context, indexExpressions); + List result = DateMathExpressionResolver.resolve(context, indexExpressions); assertThat(result.size(), equalTo(indexExpressions.size())); for (int i = 0; i < indexExpressions.size(); i++) { assertThat(result.get(i), equalTo(indexExpressions.get(i))); @@ -64,25 +64,25 @@ public void testNormal() throws Exception { } public void testExpression() throws Exception { - List indexExpressions = Arrays.asList("<.marvel-{now}>", "<.watch_history-{now}>", ""); - List result = DateMathExpressionResolver.resolve(context, indexExpressions); + List indexExpressions = resolvedExpressions("<.marvel-{now}>", "<.watch_history-{now}>", ""); + List result = DateMathExpressionResolver.resolve(context, indexExpressions); assertThat(result.size(), equalTo(3)); - assertThat(result.get(0), equalTo(".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); - assertThat(result.get(1), equalTo(".watch_history-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); - assertThat(result.get(2), equalTo("logstash-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); + assertThat(result.get(0).resource(), equalTo(".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); + assertThat(result.get(1).resource(), equalTo(".watch_history-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); + assertThat(result.get(2).resource(), equalTo("logstash-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); } public void testExpressionWithWildcardAndExclusions() { - List indexExpressions = Arrays.asList( + List indexExpressions = resolvedExpressions( "<-before-inner-{now}>", "-", "", "<-after-inner-{now}>", "-" ); - List result = DateMathExpressionResolver.resolve(context, indexExpressions); + List result = DateMathExpressionResolver.resolve(context, indexExpressions); assertThat( - result, + result.stream().map(ResolvedExpression::resource).toList(), Matchers.contains( equalTo("-before-inner-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime()))), equalTo("-"), // doesn't evaluate because it doesn't start with "<" and it is not an exclusion @@ -98,7 +98,7 @@ public void testExpressionWithWildcardAndExclusions() { ); result = DateMathExpressionResolver.resolve(noWildcardExpandContext, indexExpressions); assertThat( - result, + result.stream().map(ResolvedExpression::resource).toList(), Matchers.contains( equalTo("-before-inner-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime()))), // doesn't evaluate because it doesn't start with "<" and there can't be exclusions without wildcard expansion @@ -112,21 +112,24 @@ public void testExpressionWithWildcardAndExclusions() { } public void testEmpty() throws Exception { - List result = DateMathExpressionResolver.resolve(context, Collections.emptyList()); + List result = DateMathExpressionResolver.resolve(context, List.of()); assertThat(result.size(), equalTo(0)); } public void testExpression_Static() throws Exception { - List result = DateMathExpressionResolver.resolve(context, Arrays.asList("<.marvel-test>")); + List result = DateMathExpressionResolver.resolve(context, resolvedExpressions("<.marvel-test>")); assertThat(result.size(), equalTo(1)); - assertThat(result.get(0), equalTo(".marvel-test")); + assertThat(result.get(0).resource(), equalTo(".marvel-test")); } public void testExpression_MultiParts() throws Exception { - List result = DateMathExpressionResolver.resolve(context, Arrays.asList("<.text1-{now/d}-text2-{now/M}>")); + List result = DateMathExpressionResolver.resolve( + context, + resolvedExpressions("<.text1-{now/d}-text2-{now/M}>") + ); assertThat(result.size(), equalTo(1)); assertThat( - result.get(0), + result.get(0).resource(), equalTo( ".text1-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())) @@ -137,33 +140,42 @@ public void testExpression_MultiParts() throws Exception { } public void testExpression_CustomFormat() throws Exception { - List results = DateMathExpressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{yyyy.MM.dd}}>")); + List results = DateMathExpressionResolver.resolve( + context, + resolvedExpressions("<.marvel-{now/d{yyyy.MM.dd}}>") + ); assertThat(results.size(), equalTo(1)); - assertThat(results.get(0), equalTo(".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); + assertThat(results.get(0).resource(), equalTo(".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); } public void testExpression_EscapeStatic() throws Exception { - List result = DateMathExpressionResolver.resolve(context, Arrays.asList("<.mar\\{v\\}el-{now/d}>")); + List result = DateMathExpressionResolver.resolve(context, resolvedExpressions("<.mar\\{v\\}el-{now/d}>")); assertThat(result.size(), equalTo(1)); - assertThat(result.get(0), equalTo(".mar{v}el-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); + assertThat(result.get(0).resource(), equalTo(".mar{v}el-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); } public void testExpression_EscapeDateFormat() throws Exception { - List result = DateMathExpressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{'\\{year\\}'yyyy}}>")); + List result = DateMathExpressionResolver.resolve( + context, + resolvedExpressions("<.marvel-{now/d{'\\{year\\}'yyyy}}>") + ); assertThat(result.size(), equalTo(1)); - assertThat(result.get(0), equalTo(".marvel-" + formatDate("'{year}'yyyy", dateFromMillis(context.getStartTime())))); + assertThat(result.get(0).resource(), equalTo(".marvel-" + formatDate("'{year}'yyyy", dateFromMillis(context.getStartTime())))); } public void testExpression_MixedArray() throws Exception { - List result = DateMathExpressionResolver.resolve( + List result = DateMathExpressionResolver.resolve( context, - Arrays.asList("name1", "<.marvel-{now/d}>", "name2", "<.logstash-{now/M{uuuu.MM}}>") + resolvedExpressions("name1", "<.marvel-{now/d}>", "name2", "<.logstash-{now/M{uuuu.MM}}>") ); assertThat(result.size(), equalTo(4)); - assertThat(result.get(0), equalTo("name1")); - assertThat(result.get(1), equalTo(".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); - assertThat(result.get(2), equalTo("name2")); - assertThat(result.get(3), equalTo(".logstash-" + formatDate("uuuu.MM", dateFromMillis(context.getStartTime()).withDayOfMonth(1)))); + assertThat(result.get(0).resource(), equalTo("name1")); + assertThat(result.get(1).resource(), equalTo(".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); + assertThat(result.get(2).resource(), equalTo("name2")); + assertThat( + result.get(3).resource(), + equalTo(".logstash-" + formatDate("uuuu.MM", dateFromMillis(context.getStartTime()).withDayOfMonth(1))) + ); } public void testExpression_CustomTimeZoneInIndexName() throws Exception { @@ -202,19 +214,19 @@ public void testExpression_CustomTimeZoneInIndexName() throws Exception { name -> false, name -> false ); - List results = DateMathExpressionResolver.resolve( + List results = DateMathExpressionResolver.resolve( context, - Arrays.asList("<.marvel-{now/d{yyyy.MM.dd|" + timeZone.getId() + "}}>") + resolvedExpressions("<.marvel-{now/d{yyyy.MM.dd|" + timeZone.getId() + "}}>") ); assertThat(results.size(), equalTo(1)); logger.info("timezone: [{}], now [{}], name: [{}]", timeZone, now, results.get(0)); - assertThat(results.get(0), equalTo(".marvel-" + formatDate("uuuu.MM.dd", now.withZoneSameInstant(timeZone)))); + assertThat(results.get(0).resource(), equalTo(".marvel-" + formatDate("uuuu.MM.dd", now.withZoneSameInstant(timeZone)))); } public void testExpressionInvalidUnescaped() throws Exception { Exception e = expectThrows( ElasticsearchParseException.class, - () -> DateMathExpressionResolver.resolve(context, Arrays.asList("<.mar}vel-{now/d}>")) + () -> DateMathExpressionResolver.resolve(context, resolvedExpressions("<.mar}vel-{now/d}>")) ); assertThat(e.getMessage(), containsString("invalid dynamic name expression")); assertThat(e.getMessage(), containsString("invalid character at position [")); @@ -223,7 +235,7 @@ public void testExpressionInvalidUnescaped() throws Exception { public void testExpressionInvalidDateMathFormat() throws Exception { Exception e = expectThrows( ElasticsearchParseException.class, - () -> DateMathExpressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{}>")) + () -> DateMathExpressionResolver.resolve(context, resolvedExpressions("<.marvel-{now/d{}>")) ); assertThat(e.getMessage(), containsString("invalid dynamic name expression")); assertThat(e.getMessage(), containsString("date math placeholder is open ended")); @@ -232,7 +244,7 @@ public void testExpressionInvalidDateMathFormat() throws Exception { public void testExpressionInvalidEmptyDateMathFormat() throws Exception { Exception e = expectThrows( ElasticsearchParseException.class, - () -> DateMathExpressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{}}>")) + () -> DateMathExpressionResolver.resolve(context, resolvedExpressions("<.marvel-{now/d{}}>")) ); assertThat(e.getMessage(), containsString("invalid dynamic name expression")); assertThat(e.getMessage(), containsString("missing date format")); @@ -241,10 +253,13 @@ public void testExpressionInvalidEmptyDateMathFormat() throws Exception { public void testExpressionInvalidOpenEnded() throws Exception { Exception e = expectThrows( ElasticsearchParseException.class, - () -> DateMathExpressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d>")) + () -> DateMathExpressionResolver.resolve(context, resolvedExpressions("<.marvel-{now/d>")) ); assertThat(e.getMessage(), containsString("invalid dynamic name expression")); assertThat(e.getMessage(), containsString("date math placeholder is open ended")); } + private List resolvedExpressions(String... expressions) { + return Arrays.stream(expressions).map(ResolvedExpression::new).toList(); + } } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/ExpressionListTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/ExpressionListTests.java index 1ca59ff402bd8..1df3bf4132b60 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/ExpressionListTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/ExpressionListTests.java @@ -13,10 +13,12 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.Context; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ExpressionList; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ExpressionList.Expression; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.core.Tuple; import org.elasticsearch.test.ESTestCase; import java.util.ArrayList; +import java.util.Arrays; import java.util.Iterator; import java.util.List; import java.util.function.Supplier; @@ -39,10 +41,13 @@ public void testEmpty() { public void testExplicitSingleNameExpression() { for (IndicesOptions indicesOptions : List.of(getExpandWildcardsIndicesOptions(), getNoExpandWildcardsIndicesOptions())) { for (String expressionString : List.of("non_wildcard", "-non_exclusion")) { - ExpressionList expressionList = new ExpressionList(getContextWithOptions(indicesOptions), List.of(expressionString)); + ExpressionList expressionList = new ExpressionList( + getContextWithOptions(indicesOptions), + resolvedExpressions(expressionString) + ); assertThat(expressionList.hasWildcard(), is(false)); if (randomBoolean()) { - expressionList = new ExpressionList(getContextWithOptions(indicesOptions), List.of(expressionString)); + expressionList = new ExpressionList(getContextWithOptions(indicesOptions), resolvedExpressions((expressionString))); } Iterator expressionIterator = expressionList.iterator(); assertThat(expressionIterator.hasNext(), is(true)); @@ -62,11 +67,14 @@ public void testWildcardSingleExpression() { for (String wildcardTest : List.of("*", "a*", "*b", "a*b", "a-*b", "a*-b", "-*", "-a*", "-*b", "**", "*-*")) { ExpressionList expressionList = new ExpressionList( getContextWithOptions(getExpandWildcardsIndicesOptions()), - List.of(wildcardTest) + resolvedExpressions(wildcardTest) ); assertThat(expressionList.hasWildcard(), is(true)); if (randomBoolean()) { - expressionList = new ExpressionList(getContextWithOptions(getExpandWildcardsIndicesOptions()), List.of(wildcardTest)); + expressionList = new ExpressionList( + getContextWithOptions(getExpandWildcardsIndicesOptions()), + resolvedExpressions(wildcardTest) + ); } Iterator expressionIterator = expressionList.iterator(); assertThat(expressionIterator.hasNext(), is(true)); @@ -82,13 +90,13 @@ public void testWildcardSingleExpression() { } public void testWildcardLongerExpression() { - List onlyExplicits = randomList(7, () -> randomAlphaOfLengthBetween(0, 5)); - String wildcard = randomFrom("*", "*b", "-*", "*-", "c*", "a*b", "**"); - List expressionList = new ArrayList<>(onlyExplicits.size() + 1); + List onlyExplicits = randomList(7, () -> new ResolvedExpression(randomAlphaOfLengthBetween(0, 5))); + ResolvedExpression wildcard = new ResolvedExpression(randomFrom("*", "*b", "-*", "*-", "c*", "a*b", "**")); + List expressionList = new ArrayList<>(onlyExplicits.size() + 1); expressionList.addAll(randomSubsetOf(onlyExplicits)); int wildcardPos = expressionList.size(); expressionList.add(wildcard); - for (String item : onlyExplicits) { + for (ResolvedExpression item : onlyExplicits) { if (expressionList.contains(item) == false) { expressionList.add(item); } @@ -106,18 +114,18 @@ public void testWildcardLongerExpression() { } else { assertThat(expression.isWildcard(), is(true)); } - assertThat(expression.get(), is(expressionList.get(i++))); + assertThat(expression.get(), is(expressionList.get(i++).resource())); } } public void testWildcardsNoExclusionExpressions() { - for (List wildcardExpression : List.of( - List.of("*"), - List.of("a", "*"), - List.of("-b", "*c"), - List.of("-", "a", "c*"), - List.of("*", "a*", "*b"), - List.of("-*", "a", "b*") + for (List wildcardExpression : List.of( + resolvedExpressions("*"), + resolvedExpressions("a", "*"), + resolvedExpressions("-b", "*c"), + resolvedExpressions("-", "a", "c*"), + resolvedExpressions("*", "a*", "*b"), + resolvedExpressions("-*", "a", "b*") )) { ExpressionList expressionList = new ExpressionList( getContextWithOptions(getExpandWildcardsIndicesOptions()), @@ -130,25 +138,25 @@ public void testWildcardsNoExclusionExpressions() { int i = 0; for (Expression expression : expressionList) { assertThat(expression.isExclusion(), is(false)); - if (wildcardExpression.get(i).contains("*")) { + if (wildcardExpression.get(i).resource().contains("*")) { assertThat(expression.isWildcard(), is(true)); } else { assertThat(expression.isWildcard(), is(false)); } - assertThat(expression.get(), is(wildcardExpression.get(i++))); + assertThat(expression.get(), is(wildcardExpression.get(i++).resource())); } } } public void testWildcardExpressionNoExpandOptions() { - for (List wildcardExpression : List.of( - List.of("*"), - List.of("a", "*"), - List.of("-b", "*c"), - List.of("*d", "-"), - List.of("*", "-*"), - List.of("-", "a", "c*"), - List.of("*", "a*", "*b") + for (List wildcardExpression : List.of( + resolvedExpressions("*"), + resolvedExpressions("a", "*"), + resolvedExpressions("-b", "*c"), + resolvedExpressions("*d", "-"), + resolvedExpressions("*", "-*"), + resolvedExpressions("-", "a", "c*"), + resolvedExpressions("*", "a*", "*b") )) { ExpressionList expressionList = new ExpressionList( getContextWithOptions(getNoExpandWildcardsIndicesOptions()), @@ -162,7 +170,7 @@ public void testWildcardExpressionNoExpandOptions() { for (Expression expression : expressionList) { assertThat(expression.isWildcard(), is(false)); assertThat(expression.isExclusion(), is(false)); - assertThat(expression.get(), is(wildcardExpression.get(i++))); + assertThat(expression.get(), is(wildcardExpression.get(i++).resource())); } } } @@ -172,17 +180,17 @@ public void testSingleExclusionExpression() { int wildcardPos = randomIntBetween(0, 3); String exclusion = randomFrom("-*", "-", "-c*", "-ab", "--"); int exclusionPos = randomIntBetween(wildcardPos + 1, 7); - List exclusionExpression = new ArrayList<>(); + List exclusionExpression = new ArrayList<>(); for (int i = 0; i < wildcardPos; i++) { - exclusionExpression.add(randomAlphaOfLengthBetween(0, 5)); + exclusionExpression.add(new ResolvedExpression(randomAlphaOfLengthBetween(0, 5))); } - exclusionExpression.add(wildcard); + exclusionExpression.add(new ResolvedExpression(wildcard)); for (int i = wildcardPos + 1; i < exclusionPos; i++) { - exclusionExpression.add(randomAlphaOfLengthBetween(0, 5)); + exclusionExpression.add(new ResolvedExpression(randomAlphaOfLengthBetween(0, 5))); } - exclusionExpression.add(exclusion); + exclusionExpression.add(new ResolvedExpression(exclusion)); for (int i = 0; i < randomIntBetween(0, 3); i++) { - exclusionExpression.add(randomAlphaOfLengthBetween(0, 5)); + exclusionExpression.add(new ResolvedExpression(randomAlphaOfLengthBetween(0, 5))); } ExpressionList expressionList = new ExpressionList(getContextWithOptions(getExpandWildcardsIndicesOptions()), exclusionExpression); if (randomBoolean()) { @@ -193,28 +201,28 @@ public void testSingleExclusionExpression() { if (i == wildcardPos) { assertThat(expression.isWildcard(), is(true)); assertThat(expression.isExclusion(), is(false)); - assertThat(expression.get(), is(exclusionExpression.get(i++))); + assertThat(expression.get(), is(exclusionExpression.get(i++).resource())); } else if (i == exclusionPos) { assertThat(expression.isExclusion(), is(true)); - assertThat(expression.isWildcard(), is(exclusionExpression.get(i).contains("*"))); - assertThat(expression.get(), is(exclusionExpression.get(i++).substring(1))); + assertThat(expression.isWildcard(), is(exclusionExpression.get(i).resource().contains("*"))); + assertThat(expression.get(), is(exclusionExpression.get(i++).resource().substring(1))); } else { assertThat(expression.isWildcard(), is(false)); assertThat(expression.isExclusion(), is(false)); - assertThat(expression.get(), is(exclusionExpression.get(i++))); + assertThat(expression.get(), is(exclusionExpression.get(i++).resource())); } } } public void testExclusionsExpression() { - for (Tuple, List> exclusionExpression : List.of( - new Tuple<>(List.of("-a", "*", "-a"), List.of(false, false, true)), - new Tuple<>(List.of("-b*", "c", "-a"), List.of(false, false, true)), - new Tuple<>(List.of("*d", "-", "*b"), List.of(false, true, false)), - new Tuple<>(List.of("-", "--", "-*", "", "-*"), List.of(false, false, false, false, true)), - new Tuple<>(List.of("*-", "-*", "a", "-b"), List.of(false, true, false, true)), - new Tuple<>(List.of("a", "-b", "-*", "-b", "*", "-b"), List.of(false, false, false, true, false, true)), - new Tuple<>(List.of("-a", "*d", "-a", "-*b", "-b", "--"), List.of(false, false, true, true, true, true)) + for (Tuple, List> exclusionExpression : List.of( + new Tuple<>(resolvedExpressions("-a", "*", "-a"), List.of(false, false, true)), + new Tuple<>(resolvedExpressions("-b*", "c", "-a"), List.of(false, false, true)), + new Tuple<>(resolvedExpressions("*d", "-", "*b"), List.of(false, true, false)), + new Tuple<>(resolvedExpressions("-", "--", "-*", "", "-*"), List.of(false, false, false, false, true)), + new Tuple<>(resolvedExpressions("*-", "-*", "a", "-b"), List.of(false, true, false, true)), + new Tuple<>(resolvedExpressions("a", "-b", "-*", "-b", "*", "-b"), List.of(false, false, false, true, false, true)), + new Tuple<>(resolvedExpressions("-a", "*d", "-a", "-*b", "-b", "--"), List.of(false, false, true, true, true, true)) )) { ExpressionList expressionList = new ExpressionList( getContextWithOptions(getExpandWildcardsIndicesOptions()), @@ -227,11 +235,11 @@ public void testExclusionsExpression() { for (Expression expression : expressionList) { boolean isExclusion = exclusionExpression.v2().get(i); assertThat(expression.isExclusion(), is(isExclusion)); - assertThat(expression.isWildcard(), is(exclusionExpression.v1().get(i).contains("*"))); + assertThat(expression.isWildcard(), is(exclusionExpression.v1().get(i).resource().contains("*"))); if (isExclusion) { - assertThat(expression.get(), is(exclusionExpression.v1().get(i++).substring(1))); + assertThat(expression.get(), is(exclusionExpression.v1().get(i++).resource().substring(1))); } else { - assertThat(expression.get(), is(exclusionExpression.v1().get(i++))); + assertThat(expression.get(), is(exclusionExpression.v1().get(i++).resource())); } } } @@ -306,4 +314,8 @@ private Context getContextWithOptions(IndicesOptions indicesOptions) { when(context.getOptions()).thenReturn(indicesOptions); return context; } + + private List resolvedExpressions(String... expressions) { + return Arrays.stream(expressions).map(ResolvedExpression::new).toList(); + } } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java index 5f55d203e00e4..bddbe259e0ef3 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetadata.State; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; @@ -1580,16 +1581,27 @@ public void testResolveExpressions() { .put(indexBuilder("test-1").state(State.OPEN).putAlias(AliasMetadata.builder("alias-1"))); ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); - assertEquals(new HashSet<>(Arrays.asList("alias-0", "alias-1")), indexNameExpressionResolver.resolveExpressions(state, "alias-*")); assertEquals( - new HashSet<>(Arrays.asList("test-0", "alias-0", "alias-1")), + Set.of(new ResolvedExpression("alias-0"), new ResolvedExpression("alias-1")), + indexNameExpressionResolver.resolveExpressions(state, "alias-*") + ); + assertEquals( + Set.of(new ResolvedExpression("test-0"), new ResolvedExpression("alias-0"), new ResolvedExpression("alias-1")), indexNameExpressionResolver.resolveExpressions(state, "test-0", "alias-*") ); assertEquals( - new HashSet<>(Arrays.asList("test-0", "test-1", "alias-0", "alias-1")), + Set.of( + new ResolvedExpression("test-0"), + new ResolvedExpression("test-1"), + new ResolvedExpression("alias-0"), + new ResolvedExpression("alias-1") + ), indexNameExpressionResolver.resolveExpressions(state, "test-*", "alias-*") ); - assertEquals(new HashSet<>(Arrays.asList("test-1", "alias-1")), indexNameExpressionResolver.resolveExpressions(state, "*-1")); + assertEquals( + Set.of(new ResolvedExpression("test-1"), new ResolvedExpression("alias-1")), + indexNameExpressionResolver.resolveExpressions(state, "*-1") + ); } public void testFilteringAliases() { @@ -1598,16 +1610,25 @@ public void testFilteringAliases() { .put(indexBuilder("test-1").state(State.OPEN).putAlias(AliasMetadata.builder("alias-1"))); ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); - Set resolvedExpressions = new HashSet<>(Arrays.asList("alias-0", "alias-1")); + Set resolvedExpressions = Set.of(new ResolvedExpression("alias-0"), new ResolvedExpression("alias-1")); String[] strings = indexNameExpressionResolver.filteringAliases(state, "test-0", resolvedExpressions); assertArrayEquals(new String[] { "alias-0" }, strings); // concrete index supersedes filtering alias - resolvedExpressions = new HashSet<>(Arrays.asList("test-0", "alias-0", "alias-1")); + resolvedExpressions = Set.of( + new ResolvedExpression("test-0"), + new ResolvedExpression("alias-0"), + new ResolvedExpression("alias-1") + ); strings = indexNameExpressionResolver.filteringAliases(state, "test-0", resolvedExpressions); assertNull(strings); - resolvedExpressions = new HashSet<>(Arrays.asList("test-0", "test-1", "alias-0", "alias-1")); + resolvedExpressions = Set.of( + new ResolvedExpression("test-0"), + new ResolvedExpression("test-1"), + new ResolvedExpression("alias-0"), + new ResolvedExpression("alias-1") + ); strings = indexNameExpressionResolver.filteringAliases(state, "test-0", resolvedExpressions); assertNull(strings); } @@ -1621,7 +1642,7 @@ public void testIndexAliases() { .putAlias(AliasMetadata.builder("test-alias-non-filtering")) ); ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); - Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, "test-*"); + Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, "test-*"); String[] strings = indexNameExpressionResolver.indexAliases(state, "test-0", x -> true, x -> true, true, resolvedExpressions); Arrays.sort(strings); @@ -1656,28 +1677,28 @@ public void testIndexAliasesDataStreamAliases() { ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); { // Only resolve aliases with with that refer to dataStreamName1 - Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, "l*"); + Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, "l*"); String index = backingIndex1.getIndex().getName(); String[] result = indexNameExpressionResolver.indexAliases(state, index, x -> true, x -> true, true, resolvedExpressions); assertThat(result, arrayContainingInAnyOrder("logs_foo", "logs", "logs_bar")); } { // Only resolve aliases with with that refer to dataStreamName2 - Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, "l*"); + Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, "l*"); String index = backingIndex2.getIndex().getName(); String[] result = indexNameExpressionResolver.indexAliases(state, index, x -> true, x -> true, true, resolvedExpressions); assertThat(result, arrayContainingInAnyOrder("logs_baz", "logs_baz2")); } { // Null is returned, because skipping identity check and resolvedExpressions contains the backing index name - Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, "l*"); + Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, "l*"); String index = backingIndex2.getIndex().getName(); String[] result = indexNameExpressionResolver.indexAliases(state, index, x -> true, x -> true, false, resolvedExpressions); assertThat(result, nullValue()); } { // Null is returned, because the wildcard expands to a list of aliases containing an unfiltered alias for dataStreamName1 - Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, "l*"); + Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, "l*"); String index = backingIndex1.getIndex().getName(); String[] result = indexNameExpressionResolver.indexAliases( state, @@ -1691,7 +1712,7 @@ public void testIndexAliasesDataStreamAliases() { } { // Null is returned, because an unfiltered alias is targeting the same data stream - Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, "logs_bar", "logs"); + Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, "logs_bar", "logs"); String index = backingIndex1.getIndex().getName(); String[] result = indexNameExpressionResolver.indexAliases( state, @@ -1705,7 +1726,7 @@ public void testIndexAliasesDataStreamAliases() { } { // The filtered alias is returned because although we target the data stream name, skipIdentity is true - Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, dataStreamName1, "logs"); + Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, dataStreamName1, "logs"); String index = backingIndex1.getIndex().getName(); String[] result = indexNameExpressionResolver.indexAliases( state, @@ -1719,7 +1740,7 @@ public void testIndexAliasesDataStreamAliases() { } { // Null is returned because we target the data stream name and skipIdentity is false - Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, dataStreamName1, "logs"); + Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, dataStreamName1, "logs"); String index = backingIndex1.getIndex().getName(); String[] result = indexNameExpressionResolver.indexAliases( state, @@ -1742,13 +1763,13 @@ public void testIndexAliasesSkipIdentity() { ); ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); - Set resolvedExpressions = new HashSet<>(Arrays.asList("test-0", "test-alias")); + Set resolvedExpressions = Set.of(new ResolvedExpression("test-0"), new ResolvedExpression("test-alias")); String[] aliases = indexNameExpressionResolver.indexAliases(state, "test-0", x -> true, x -> true, false, resolvedExpressions); assertNull(aliases); aliases = indexNameExpressionResolver.indexAliases(state, "test-0", x -> true, x -> true, true, resolvedExpressions); assertArrayEquals(new String[] { "test-alias" }, aliases); - resolvedExpressions = Collections.singleton("other-alias"); + resolvedExpressions = Collections.singleton(new ResolvedExpression("other-alias")); aliases = indexNameExpressionResolver.indexAliases(state, "test-0", x -> true, x -> true, false, resolvedExpressions); assertArrayEquals(new String[] { "other-alias" }, aliases); aliases = indexNameExpressionResolver.indexAliases(state, "test-0", x -> true, x -> true, true, resolvedExpressions); @@ -1769,7 +1790,7 @@ public void testConcreteWriteIndexSuccessful() { x -> true, x -> true, true, - new HashSet<>(Arrays.asList("test-0", "test-alias")) + Set.of(new ResolvedExpression("test-0"), new ResolvedExpression("test-alias")) ); Arrays.sort(strings); assertArrayEquals(new String[] { "test-alias" }, strings); @@ -1851,7 +1872,7 @@ public void testConcreteWriteIndexWithWildcardExpansion() { x -> true, x -> true, true, - new HashSet<>(Arrays.asList("test-0", "test-1", "test-alias")) + Set.of(new ResolvedExpression("test-0"), new ResolvedExpression("test-1"), new ResolvedExpression("test-alias")) ); Arrays.sort(strings); assertArrayEquals(new String[] { "test-alias" }, strings); @@ -1889,7 +1910,7 @@ public void testConcreteWriteIndexWithNoWriteIndexWithSingleIndex() { x -> true, x -> true, true, - new HashSet<>(Arrays.asList("test-0", "test-alias")) + Set.of(new ResolvedExpression("test-0"), new ResolvedExpression("test-alias")) ); Arrays.sort(strings); assertArrayEquals(new String[] { "test-alias" }, strings); @@ -1925,7 +1946,7 @@ public void testConcreteWriteIndexWithNoWriteIndexWithMultipleIndices() { x -> true, x -> true, true, - new HashSet<>(Arrays.asList("test-0", "test-1", "test-alias")) + Set.of(new ResolvedExpression("test-0"), new ResolvedExpression("test-1"), new ResolvedExpression("test-alias")) ); Arrays.sort(strings); assertArrayEquals(new String[] { "test-alias" }, strings); @@ -1966,7 +1987,7 @@ public void testAliasResolutionNotAllowingMultipleIndices() { x -> true, x -> true, true, - new HashSet<>(Arrays.asList("test-0", "test-1", "test-alias")) + Set.of(new ResolvedExpression("test-0"), new ResolvedExpression("test-1"), new ResolvedExpression("test-alias")) ); Arrays.sort(strings); assertArrayEquals(new String[] { "test-alias" }, strings); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java index 982394ca31b1c..25ed5fb2bdab2 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetadata.State; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.indices.SystemIndices.SystemIndexAccessLevel; @@ -20,13 +21,13 @@ import java.util.Arrays; import java.util.Collection; -import java.util.Collections; import java.util.List; +import java.util.Set; import java.util.function.Predicate; +import java.util.stream.Collectors; import static org.elasticsearch.cluster.metadata.DataStreamTestHelper.createBackingIndex; import static org.elasticsearch.common.util.set.Sets.newHashSet; -import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -50,50 +51,52 @@ public void testConvertWildcardsJustIndicesTests() { SystemIndexAccessLevel.NONE ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("testXXX"))), - equalTo(newHashSet("testXXX")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("testXXX"))), + equalTo(resolvedExpressionsSet("testXXX")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testXXX", "testYYY"))), - equalTo(newHashSet("testXXX", "testYYY")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("testXXX", "testYYY"))), + equalTo(resolvedExpressionsSet("testXXX", "testYYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testXXX", "ku*"))), - equalTo(newHashSet("testXXX", "kuku")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("testXXX", "ku*"))), + equalTo(resolvedExpressionsSet("testXXX", "kuku")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("test*"))), - equalTo(newHashSet("testXXX", "testXYY", "testYYY")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("test*"))), + equalTo(resolvedExpressionsSet("testXXX", "testXYY", "testYYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("testX*"))), - equalTo(newHashSet("testXXX", "testXYY")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("testX*"))), + equalTo(resolvedExpressionsSet("testXXX", "testXYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testX*", "kuku"))), - equalTo(newHashSet("testXXX", "testXYY", "kuku")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("testX*", "kuku"))), + equalTo(resolvedExpressionsSet("testXXX", "testXYY", "kuku")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("*"))), - equalTo(newHashSet("testXXX", "testXYY", "testYYY", "kuku")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("*"))), + equalTo(resolvedExpressionsSet("testXXX", "testXYY", "testYYY", "kuku")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("*", "-kuku"))), - equalTo(newHashSet("testXXX", "testXYY", "testYYY")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("*", "-kuku"))), + equalTo(resolvedExpressionsSet("testXXX", "testXYY", "testYYY")) ); assertThat( newHashSet( IndexNameExpressionResolver.WildcardExpressionResolver.resolve( context, - Arrays.asList("testX*", "-doe", "-testXXX", "-testYYY") + resolvedExpressions("testX*", "-doe", "-testXXX", "-testYYY") ) ), - equalTo(newHashSet("testXYY")) + equalTo(resolvedExpressionsSet("testXYY")) ); if (indicesOptions == IndicesOptions.lenientExpandOpen()) { assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testXXX", "-testXXX"))), - equalTo(newHashSet("testXXX", "-testXXX")) + newHashSet( + IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("testXXX", "-testXXX")) + ), + equalTo(resolvedExpressionsSet("testXXX", "-testXXX")) ); } else if (indicesOptions == IndicesOptions.strictExpandOpen()) { IndexNotFoundException infe = expectThrows( @@ -103,8 +106,8 @@ public void testConvertWildcardsJustIndicesTests() { assertEquals("-testXXX", infe.getIndex().getName()); } assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testXXX", "-testX*"))), - equalTo(newHashSet("testXXX")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("testXXX", "-testX*"))), + equalTo(resolvedExpressionsSet("testXXX")) ); } @@ -122,24 +125,24 @@ public void testConvertWildcardsTests() { SystemIndexAccessLevel.NONE ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testYY*", "alias*"))), - equalTo(newHashSet("testXXX", "testXYY", "testYYY")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("testYY*", "alias*"))), + equalTo(resolvedExpressionsSet("testXXX", "testXYY", "testYYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("-kuku"))), - equalTo(newHashSet("-kuku")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("-kuku"))), + equalTo(resolvedExpressionsSet("-kuku")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("test*", "-testYYY"))), - equalTo(newHashSet("testXXX", "testXYY")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("test*", "-testYYY"))), + equalTo(resolvedExpressionsSet("testXXX", "testXYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testX*", "testYYY"))), - equalTo(newHashSet("testXXX", "testXYY", "testYYY")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("testX*", "testYYY"))), + equalTo(resolvedExpressionsSet("testXXX", "testXYY", "testYYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testYYY", "testX*"))), - equalTo(newHashSet("testXXX", "testXYY", "testYYY")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("testYYY", "testX*"))), + equalTo(resolvedExpressionsSet("testXXX", "testXYY", "testYYY")) ); } @@ -159,8 +162,8 @@ public void testConvertWildcardsOpenClosedIndicesTests() { SystemIndexAccessLevel.NONE ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("testX*"))), - equalTo(newHashSet("testXXX", "testXXY", "testXYY")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("testX*"))), + equalTo(resolvedExpressionsSet("testXXX", "testXXY", "testXYY")) ); context = new IndexNameExpressionResolver.Context( state, @@ -168,8 +171,8 @@ public void testConvertWildcardsOpenClosedIndicesTests() { SystemIndexAccessLevel.NONE ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("testX*"))), - equalTo(newHashSet("testXYY")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("testX*"))), + equalTo(resolvedExpressionsSet("testXYY")) ); context = new IndexNameExpressionResolver.Context( state, @@ -177,8 +180,8 @@ public void testConvertWildcardsOpenClosedIndicesTests() { SystemIndexAccessLevel.NONE ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("testX*"))), - equalTo(newHashSet("testXXX", "testXXY")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("testX*"))), + equalTo(resolvedExpressionsSet("testXXX", "testXXY")) ); context = new IndexNameExpressionResolver.Context( state, @@ -217,28 +220,27 @@ public void testMultipleWildcards() { SystemIndexAccessLevel.NONE ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("test*X*"))), - equalTo(newHashSet("testXXX", "testXXY", "testXYY")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("test*X*"))), + equalTo(resolvedExpressionsSet("testXXX", "testXXY", "testXYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("test*X*Y"))), - equalTo(newHashSet("testXXY", "testXYY")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("test*X*Y"))), + equalTo(resolvedExpressionsSet("testXXY", "testXYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("kuku*Y*"))), - equalTo(newHashSet("kukuYYY")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("kuku*Y*"))), + equalTo(resolvedExpressionsSet("kukuYYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("*Y*"))), - equalTo(newHashSet("testXXY", "testXYY", "testYYY", "kukuYYY")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("*Y*"))), + equalTo(resolvedExpressionsSet("testXXY", "testXYY", "testYYY", "kukuYYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("test*Y*X"))) - .size(), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("test*Y*X"))).size(), equalTo(0) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("*Y*X"))).size(), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("*Y*X"))).size(), equalTo(0) ); } @@ -257,11 +259,11 @@ public void testAll() { ); assertThat( newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolveAll(context)), - equalTo(newHashSet("testXXX", "testXYY", "testYYY")) + equalTo(resolvedExpressionsSet("testXXX", "testXYY", "testYYY")) ); assertThat( newHashSet(IndexNameExpressionResolver.resolveExpressions(context, "_all")), - equalTo(newHashSet("testXXX", "testXYY", "testYYY")) + equalTo(resolvedExpressionsSet("testXXX", "testXYY", "testYYY")) ); IndicesOptions noExpandOptions = IndicesOptions.fromOptions( randomBoolean(), @@ -298,7 +300,7 @@ public void testAllAliases() { IndicesOptions.lenientExpandOpen(), // don't include hidden SystemIndexAccessLevel.NONE ); - assertThat(newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolveAll(context)), equalTo(newHashSet())); + assertThat(newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolveAll(context)), equalTo(Set.of())); } { @@ -319,7 +321,7 @@ public void testAllAliases() { ); assertThat( newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolveAll(context)), - equalTo(newHashSet("index-visible-alias")) + equalTo(resolvedExpressionsSet("index-visible-alias")) ); } } @@ -362,7 +364,7 @@ public void testAllDataStreams() { assertThat( newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolveAll(context)), - equalTo(newHashSet(DataStream.getDefaultBackingIndexName("foo_logs", 1, epochMillis))) + equalTo(resolvedExpressionsSet(DataStream.getDefaultBackingIndexName("foo_logs", 1, epochMillis))) ); } @@ -385,7 +387,7 @@ public void testAllDataStreams() { NONE ); - assertThat(newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolveAll(context)), equalTo(newHashSet())); + assertThat(newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolveAll(context)), equalTo(Set.of())); } } @@ -506,16 +508,16 @@ public void testResolveAliases() { ); { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( indicesAndAliasesContext, - Collections.singletonList("foo_a*") + resolvedExpressions("foo_a*") ); - assertThat(indices, containsInAnyOrder("foo_index", "bar_index")); + assertThat(newHashSet(indices), equalTo(resolvedExpressionsSet("foo_index", "bar_index"))); } { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( skipAliasesLenientContext, - Collections.singletonList("foo_a*") + resolvedExpressions("foo_a*") ); assertEquals(0, indices.size()); } @@ -524,45 +526,45 @@ public void testResolveAliases() { IndexNotFoundException.class, () -> IndexNameExpressionResolver.WildcardExpressionResolver.resolve( skipAliasesStrictContext, - Collections.singletonList("foo_a*") + resolvedExpressions("foo_a*") ) ); assertEquals("foo_a*", infe.getIndex().getName()); } { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( indicesAndAliasesContext, - Collections.singletonList("foo*") + resolvedExpressions("foo*") ); - assertThat(indices, containsInAnyOrder("foo_foo", "foo_index", "bar_index")); + assertThat(newHashSet(indices), equalTo(resolvedExpressionsSet("foo_foo", "foo_index", "bar_index"))); } { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( skipAliasesLenientContext, - Collections.singletonList("foo*") + resolvedExpressions("foo*") ); - assertThat(indices, containsInAnyOrder("foo_foo", "foo_index")); + assertThat(newHashSet(indices), equalTo(resolvedExpressionsSet("foo_foo", "foo_index"))); } { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( skipAliasesStrictContext, - Collections.singletonList("foo*") + resolvedExpressions("foo*") ); - assertThat(indices, containsInAnyOrder("foo_foo", "foo_index")); + assertThat(newHashSet(indices), equalTo(resolvedExpressionsSet("foo_foo", "foo_index"))); } { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( indicesAndAliasesContext, - Collections.singletonList("foo_alias") + resolvedExpressions("foo_alias") ); - assertThat(indices, containsInAnyOrder("foo_alias")); + assertThat(newHashSet(indices), equalTo(resolvedExpressionsSet("foo_alias"))); } { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( skipAliasesLenientContext, - Collections.singletonList("foo_alias") + resolvedExpressions("foo_alias") ); - assertThat(indices, containsInAnyOrder("foo_alias")); + assertThat(newHashSet(indices), equalTo(resolvedExpressionsSet("foo_alias"))); } { IllegalArgumentException iae = expectThrows( @@ -581,11 +583,11 @@ public void testResolveAliases() { SystemIndexAccessLevel.NONE ); { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( noExpandNoAliasesContext, - List.of("foo_alias") + resolvedExpressions("foo_alias") ); - assertThat(indices, containsInAnyOrder("foo_alias")); + assertThat(newHashSet(indices), equalTo(resolvedExpressionsSet("foo_alias"))); } IndicesOptions strictNoExpandNoAliasesIndicesOptions = IndicesOptions.fromOptions( false, @@ -654,18 +656,18 @@ public void testResolveDataStreams() { ); // data streams are not included but expression matches the data stream - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( indicesAndAliasesContext, - Collections.singletonList("foo_*") + resolvedExpressions("foo_*") ); - assertThat(indices, containsInAnyOrder("foo_index", "foo_foo", "bar_index")); + assertThat(newHashSet(indices), equalTo(resolvedExpressionsSet("foo_index", "foo_foo", "bar_index"))); // data streams are not included and expression doesn't match the data steram indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( indicesAndAliasesContext, - Collections.singletonList("bar_*") + resolvedExpressions("bar_*") ); - assertThat(indices, containsInAnyOrder("bar_bar", "bar_index")); + assertThat(newHashSet(indices), equalTo(resolvedExpressionsSet("bar_bar", "bar_index"))); } { @@ -691,35 +693,39 @@ public void testResolveDataStreams() { ); // data stream's corresponding backing indices are resolved - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( indicesAliasesAndDataStreamsContext, - Collections.singletonList("foo_*") + resolvedExpressions("foo_*") ); assertThat( - indices, - containsInAnyOrder( - "foo_index", - "bar_index", - "foo_foo", - DataStream.getDefaultBackingIndexName("foo_logs", 1, epochMillis), - DataStream.getDefaultBackingIndexName("foo_logs", 2, epochMillis) + newHashSet(indices), + equalTo( + resolvedExpressionsSet( + "foo_index", + "bar_index", + "foo_foo", + DataStream.getDefaultBackingIndexName("foo_logs", 1, epochMillis), + DataStream.getDefaultBackingIndexName("foo_logs", 2, epochMillis) + ) ) ); // include all wildcard adds the data stream's backing indices indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( indicesAliasesAndDataStreamsContext, - Collections.singletonList("*") + resolvedExpressions("*") ); assertThat( - indices, - containsInAnyOrder( - "foo_index", - "bar_index", - "foo_foo", - "bar_bar", - DataStream.getDefaultBackingIndexName("foo_logs", 1, epochMillis), - DataStream.getDefaultBackingIndexName("foo_logs", 2, epochMillis) + newHashSet(indices), + equalTo( + resolvedExpressionsSet( + "foo_index", + "bar_index", + "foo_foo", + "bar_bar", + DataStream.getDefaultBackingIndexName("foo_logs", 1, epochMillis), + DataStream.getDefaultBackingIndexName("foo_logs", 2, epochMillis) + ) ) ); } @@ -748,35 +754,39 @@ public void testResolveDataStreams() { ); // data stream's corresponding backing indices are resolved - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( indicesAliasesDataStreamsAndHiddenIndices, - Collections.singletonList("foo_*") + resolvedExpressions("foo_*") ); assertThat( - indices, - containsInAnyOrder( - "foo_index", - "bar_index", - "foo_foo", - DataStream.getDefaultBackingIndexName("foo_logs", 1, epochMillis), - DataStream.getDefaultBackingIndexName("foo_logs", 2, epochMillis) + newHashSet(indices), + equalTo( + resolvedExpressionsSet( + "foo_index", + "bar_index", + "foo_foo", + DataStream.getDefaultBackingIndexName("foo_logs", 1, epochMillis), + DataStream.getDefaultBackingIndexName("foo_logs", 2, epochMillis) + ) ) ); // include all wildcard adds the data stream's backing indices indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( indicesAliasesDataStreamsAndHiddenIndices, - Collections.singletonList("*") + resolvedExpressions("*") ); assertThat( - indices, - containsInAnyOrder( - "foo_index", - "bar_index", - "foo_foo", - "bar_bar", - DataStream.getDefaultBackingIndexName("foo_logs", 1, epochMillis), - DataStream.getDefaultBackingIndexName("foo_logs", 2, epochMillis) + newHashSet(indices), + equalTo( + resolvedExpressionsSet( + "foo_index", + "bar_index", + "foo_foo", + "bar_bar", + DataStream.getDefaultBackingIndexName("foo_logs", 1, epochMillis), + DataStream.getDefaultBackingIndexName("foo_logs", 2, epochMillis) + ) ) ); } @@ -808,16 +818,28 @@ public void testMatchesConcreteIndicesWildcardAndAliases() { SystemIndexAccessLevel.NONE ); - Collection matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve(indicesAndAliasesContext, List.of("*")); - assertThat(matches, containsInAnyOrder("bar_bar", "foo_foo", "foo_index", "bar_index")); - matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve(onlyIndicesContext, List.of("*")); - assertThat(matches, containsInAnyOrder("bar_bar", "foo_foo", "foo_index", "bar_index")); - matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve(indicesAndAliasesContext, List.of("foo*")); - assertThat(matches, containsInAnyOrder("foo_foo", "foo_index", "bar_index")); - matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve(onlyIndicesContext, List.of("foo*")); - assertThat(matches, containsInAnyOrder("foo_foo", "foo_index")); - matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve(indicesAndAliasesContext, List.of("foo_alias")); - assertThat(matches, containsInAnyOrder("foo_alias")); + Collection matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + indicesAndAliasesContext, + List.of(new ResolvedExpression("*")) + ); + assertThat(newHashSet(matches), equalTo(resolvedExpressionsSet("bar_bar", "foo_foo", "foo_index", "bar_index"))); + matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve(onlyIndicesContext, List.of(new ResolvedExpression("*"))); + assertThat(newHashSet(matches), equalTo(resolvedExpressionsSet("bar_bar", "foo_foo", "foo_index", "bar_index"))); + matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + indicesAndAliasesContext, + List.of(new ResolvedExpression("foo*")) + ); + assertThat(newHashSet(matches), equalTo(resolvedExpressionsSet("foo_foo", "foo_index", "bar_index"))); + matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + onlyIndicesContext, + List.of(new ResolvedExpression("foo*")) + ); + assertThat(newHashSet(matches), equalTo(resolvedExpressionsSet("foo_foo", "foo_index"))); + matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + indicesAndAliasesContext, + List.of(new ResolvedExpression("foo_alias")) + ); + assertThat(newHashSet(matches), equalTo(resolvedExpressionsSet("foo_alias"))); IllegalArgumentException iae = expectThrows( IllegalArgumentException.class, () -> IndexNameExpressionResolver.resolveExpressions(onlyIndicesContext, "foo_alias") @@ -840,8 +862,19 @@ private static IndexMetadata.Builder indexBuilder(String index) { private static void assertWildcardResolvesToEmpty(IndexNameExpressionResolver.Context context, String wildcardExpression) { IndexNotFoundException infe = expectThrows( IndexNotFoundException.class, - () -> IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, List.of(wildcardExpression)) + () -> IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + context, + List.of(new ResolvedExpression(wildcardExpression)) + ) ); assertEquals(wildcardExpression, infe.getIndex().getName()); } + + private List resolvedExpressions(String... expressions) { + return Arrays.stream(expressions).map(ResolvedExpression::new).toList(); + } + + private Set resolvedExpressionsSet(String... expressions) { + return Arrays.stream(expressions).map(ResolvedExpression::new).collect(Collectors.toSet()); + } } diff --git a/server/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java b/server/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java index 36f7355a541c1..17975b7d18dd8 100644 --- a/server/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java +++ b/server/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.cluster.metadata.DataStreamTestHelper; import org.elasticsearch.cluster.metadata.IndexGraveyard; import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; @@ -77,6 +78,7 @@ import java.util.Optional; import java.util.Set; import java.util.concurrent.CountDownLatch; +import java.util.stream.Collectors; import java.util.stream.Stream; import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; @@ -677,27 +679,27 @@ public void testBuildAliasFilter() { ); ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); { - AliasFilter result = indicesService.buildAliasFilter(state, "test-0", Set.of("test-alias-0")); + AliasFilter result = indicesService.buildAliasFilter(state, "test-0", resolvedExpressions("test-alias-0")); assertThat(result.getAliases(), arrayContainingInAnyOrder("test-alias-0")); assertThat(result.getQueryBuilder(), equalTo(QueryBuilders.termQuery("foo", "bar"))); } { - AliasFilter result = indicesService.buildAliasFilter(state, "test-1", Set.of("test-alias-0")); + AliasFilter result = indicesService.buildAliasFilter(state, "test-1", resolvedExpressions("test-alias-0")); assertThat(result.getAliases(), arrayContainingInAnyOrder("test-alias-0")); assertThat(result.getQueryBuilder(), equalTo(QueryBuilders.termQuery("foo", "bar"))); } { - AliasFilter result = indicesService.buildAliasFilter(state, "test-0", Set.of("test-alias-1")); + AliasFilter result = indicesService.buildAliasFilter(state, "test-0", resolvedExpressions("test-alias-1")); assertThat(result.getAliases(), arrayContainingInAnyOrder("test-alias-1")); assertThat(result.getQueryBuilder(), equalTo(QueryBuilders.termQuery("foo", "baz"))); } { - AliasFilter result = indicesService.buildAliasFilter(state, "test-1", Set.of("test-alias-1")); + AliasFilter result = indicesService.buildAliasFilter(state, "test-1", resolvedExpressions("test-alias-1")); assertThat(result.getAliases(), arrayContainingInAnyOrder("test-alias-1")); assertThat(result.getQueryBuilder(), equalTo(QueryBuilders.termQuery("foo", "bax"))); } { - AliasFilter result = indicesService.buildAliasFilter(state, "test-0", Set.of("test-alias-0", "test-alias-1")); + AliasFilter result = indicesService.buildAliasFilter(state, "test-0", resolvedExpressions("test-alias-0", "test-alias-1")); assertThat(result.getAliases(), arrayContainingInAnyOrder("test-alias-0", "test-alias-1")); BoolQueryBuilder filter = (BoolQueryBuilder) result.getQueryBuilder(); assertThat(filter.filter(), empty()); @@ -706,7 +708,7 @@ public void testBuildAliasFilter() { assertThat(filter.should(), containsInAnyOrder(QueryBuilders.termQuery("foo", "baz"), QueryBuilders.termQuery("foo", "bar"))); } { - AliasFilter result = indicesService.buildAliasFilter(state, "test-1", Set.of("test-alias-0", "test-alias-1")); + AliasFilter result = indicesService.buildAliasFilter(state, "test-1", resolvedExpressions("test-alias-0", "test-alias-1")); assertThat(result.getAliases(), arrayContainingInAnyOrder("test-alias-0", "test-alias-1")); BoolQueryBuilder filter = (BoolQueryBuilder) result.getQueryBuilder(); assertThat(filter.filter(), empty()); @@ -718,7 +720,7 @@ public void testBuildAliasFilter() { AliasFilter result = indicesService.buildAliasFilter( state, "test-0", - Set.of("test-alias-0", "test-alias-1", "test-alias-non-filtering") + resolvedExpressions("test-alias-0", "test-alias-1", "test-alias-non-filtering") ); assertThat(result.getAliases(), emptyArray()); assertThat(result.getQueryBuilder(), nullValue()); @@ -727,7 +729,7 @@ public void testBuildAliasFilter() { AliasFilter result = indicesService.buildAliasFilter( state, "test-1", - Set.of("test-alias-0", "test-alias-1", "test-alias-non-filtering") + resolvedExpressions("test-alias-0", "test-alias-1", "test-alias-non-filtering") ); assertThat(result.getAliases(), emptyArray()); assertThat(result.getQueryBuilder(), nullValue()); @@ -754,19 +756,19 @@ public void testBuildAliasFilterDataStreamAliases() { ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); { String index = backingIndex1.getIndex().getName(); - AliasFilter result = indicesService.buildAliasFilter(state, index, Set.of("logs_foo")); + AliasFilter result = indicesService.buildAliasFilter(state, index, resolvedExpressions("logs_foo")); assertThat(result.getAliases(), arrayContainingInAnyOrder("logs_foo")); assertThat(result.getQueryBuilder(), equalTo(QueryBuilders.termQuery("foo", "bar"))); } { String index = backingIndex2.getIndex().getName(); - AliasFilter result = indicesService.buildAliasFilter(state, index, Set.of("logs_foo")); + AliasFilter result = indicesService.buildAliasFilter(state, index, resolvedExpressions("logs_foo")); assertThat(result.getAliases(), arrayContainingInAnyOrder("logs_foo")); assertThat(result.getQueryBuilder(), equalTo(QueryBuilders.termQuery("foo", "baz"))); } { String index = backingIndex1.getIndex().getName(); - AliasFilter result = indicesService.buildAliasFilter(state, index, Set.of("logs_foo", "logs")); + AliasFilter result = indicesService.buildAliasFilter(state, index, resolvedExpressions("logs_foo", "logs")); assertThat(result.getAliases(), arrayContainingInAnyOrder("logs_foo", "logs")); BoolQueryBuilder filter = (BoolQueryBuilder) result.getQueryBuilder(); assertThat(filter.filter(), empty()); @@ -776,7 +778,7 @@ public void testBuildAliasFilterDataStreamAliases() { } { String index = backingIndex2.getIndex().getName(); - AliasFilter result = indicesService.buildAliasFilter(state, index, Set.of("logs_foo", "logs")); + AliasFilter result = indicesService.buildAliasFilter(state, index, resolvedExpressions("logs_foo", "logs")); assertThat(result.getAliases(), arrayContainingInAnyOrder("logs_foo", "logs")); BoolQueryBuilder filter = (BoolQueryBuilder) result.getQueryBuilder(); assertThat(filter.filter(), empty()); @@ -787,13 +789,13 @@ public void testBuildAliasFilterDataStreamAliases() { { // querying an unfiltered and a filtered alias for the same data stream should drop the filters String index = backingIndex1.getIndex().getName(); - AliasFilter result = indicesService.buildAliasFilter(state, index, Set.of("logs_foo", "logs", "logs_bar")); + AliasFilter result = indicesService.buildAliasFilter(state, index, resolvedExpressions("logs_foo", "logs", "logs_bar")); assertThat(result, is(AliasFilter.EMPTY)); } { // similarly, querying the data stream name and a filtered alias should drop the filter String index = backingIndex1.getIndex().getName(); - AliasFilter result = indicesService.buildAliasFilter(state, index, Set.of("logs", dataStreamName1)); + AliasFilter result = indicesService.buildAliasFilter(state, index, resolvedExpressions("logs", dataStreamName1)); assertThat(result, is(AliasFilter.EMPTY)); } } @@ -846,4 +848,8 @@ public void testWithTempIndexServiceHandlesExistingIndex() throws Exception { return null; }); } + + private Set resolvedExpressions(String... expressions) { + return Arrays.stream(expressions).map(ResolvedExpression::new).collect(Collectors.toSet()); + } } From 8c3d19badc104e5de565548ee53aa6a7745faadf Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Mon, 14 Oct 2024 16:27:37 +0200 Subject: [PATCH 048/449] Update IndexSettingProvider#getAdditionalIndexSettings() signature (#114150) With logsdb another index mode is available, the isTimeSeries parameter is limiting. Instead, we should just push down the index mode from template to index settings provider. Follow up from #113451 Relates to #113583 --- .../DataStreamIndexSettingsProvider.java | 9 +-- .../DataStreamIndexSettingsProviderTests.java | 24 +++---- .../TransportSimulateIndexTemplateAction.java | 2 +- .../cluster/metadata/Metadata.java | 17 ----- .../metadata/MetadataCreateIndexService.java | 8 +-- .../MetadataIndexTemplateService.java | 2 +- .../cluster/routing/allocation/DataTier.java | 3 +- .../index/IndexSettingProvider.java | 21 +++--- ...sportSimulateIndexTemplateActionTests.java | 3 +- .../cluster/metadata/MetadataTests.java | 70 +++++++++++++++++-- .../index/IndexSettingProviderTests.java | 2 +- .../LogsdbIndexModeSettingsProvider.java | 2 +- .../SyntheticSourceIndexSettingsProvider.java | 12 ++-- .../LogsdbIndexModeSettingsProviderTests.java | 30 ++++---- ...heticSourceIndexSettingsProviderTests.java | 26 +++---- 15 files changed, 138 insertions(+), 93 deletions(-) diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProvider.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProvider.java index a3d0347c3d192..d6a0fd86265e5 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProvider.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProvider.java @@ -59,7 +59,7 @@ public class DataStreamIndexSettingsProvider implements IndexSettingProvider { public Settings getAdditionalIndexSettings( String indexName, @Nullable String dataStreamName, - boolean isTimeSeries, + @Nullable IndexMode templateIndexMode, Metadata metadata, Instant resolvedAt, Settings indexTemplateAndCreateRequestSettings, @@ -70,15 +70,16 @@ public Settings getAdditionalIndexSettings( // First backing index is created and then data stream is rolled over (in a single cluster state update). // So at this point we can't check index_mode==time_series, // so checking that index_mode==null|standard and templateIndexMode == TIME_SERIES + boolean isMigratingToTimeSeries = templateIndexMode == IndexMode.TIME_SERIES; boolean migrating = dataStream != null && (dataStream.getIndexMode() == null || dataStream.getIndexMode() == IndexMode.STANDARD) - && isTimeSeries; + && isMigratingToTimeSeries; IndexMode indexMode; if (migrating) { indexMode = IndexMode.TIME_SERIES; } else if (dataStream != null) { - indexMode = isTimeSeries ? dataStream.getIndexMode() : null; - } else if (isTimeSeries) { + indexMode = isMigratingToTimeSeries ? dataStream.getIndexMode() : null; + } else if (isMigratingToTimeSeries) { indexMode = IndexMode.TIME_SERIES; } else { indexMode = null; diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProviderTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProviderTests.java index d8d4a9c03933a..015752724cb5d 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProviderTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProviderTests.java @@ -78,7 +78,7 @@ public void testGetAdditionalIndexSettings() throws Exception { Settings result = provider.getAdditionalIndexSettings( DataStream.getDefaultBackingIndexName(dataStreamName, 1), dataStreamName, - true, + IndexMode.TIME_SERIES, metadata, now, settings, @@ -123,7 +123,7 @@ public void testGetAdditionalIndexSettingsIndexRoutingPathAlreadyDefined() throw Settings result = provider.getAdditionalIndexSettings( DataStream.getDefaultBackingIndexName(dataStreamName, 1), dataStreamName, - true, + IndexMode.TIME_SERIES, metadata, now, settings, @@ -193,7 +193,7 @@ public void testGetAdditionalIndexSettingsMappingsMerging() throws Exception { Settings result = provider.getAdditionalIndexSettings( DataStream.getDefaultBackingIndexName(dataStreamName, 1), dataStreamName, - true, + IndexMode.TIME_SERIES, metadata, now, settings, @@ -218,7 +218,7 @@ public void testGetAdditionalIndexSettingsNoMappings() { Settings result = provider.getAdditionalIndexSettings( DataStream.getDefaultBackingIndexName(dataStreamName, 1), dataStreamName, - true, + IndexMode.TIME_SERIES, metadata, now, settings, @@ -243,7 +243,7 @@ public void testGetAdditionalIndexSettingsLookAheadTime() throws Exception { Settings result = provider.getAdditionalIndexSettings( DataStream.getDefaultBackingIndexName(dataStreamName, 1), dataStreamName, - true, + IndexMode.TIME_SERIES, metadata, now, settings, @@ -268,7 +268,7 @@ public void testGetAdditionalIndexSettingsLookBackTime() throws Exception { Settings result = provider.getAdditionalIndexSettings( DataStream.getDefaultBackingIndexName(dataStreamName, 1), dataStreamName, - true, + IndexMode.TIME_SERIES, metadata, now, settings, @@ -299,7 +299,7 @@ public void testGetAdditionalIndexSettingsDataStreamAlreadyCreated() throws Exce var result = provider.getAdditionalIndexSettings( DataStream.getDefaultBackingIndexName(dataStreamName, 1), dataStreamName, - true, + IndexMode.TIME_SERIES, metadata, now, settings, @@ -336,7 +336,7 @@ public void testGetAdditionalIndexSettingsDataStreamAlreadyCreatedTimeSettingsMi () -> provider.getAdditionalIndexSettings( DataStream.getDefaultBackingIndexName(dataStreamName, 1), dataStreamName, - true, + IndexMode.TIME_SERIES, metadata, now, settings, @@ -362,7 +362,7 @@ public void testGetAdditionalIndexSettingsNonTsdbTemplate() { Settings result = provider.getAdditionalIndexSettings( DataStream.getDefaultBackingIndexName(dataStreamName, 1), dataStreamName, - false, + null, metadata, Instant.ofEpochMilli(1L), settings, @@ -382,7 +382,7 @@ public void testGetAdditionalIndexSettingsMigrateToTsdb() { Settings result = provider.getAdditionalIndexSettings( DataStream.getDefaultBackingIndexName(dataStreamName, 2), dataStreamName, - true, + IndexMode.TIME_SERIES, metadata, now, settings, @@ -415,7 +415,7 @@ public void testGetAdditionalIndexSettingsDowngradeFromTsdb() { Settings result = provider.getAdditionalIndexSettings( DataStream.getDefaultBackingIndexName(dataStreamName, 2), dataStreamName, - false, + null, metadata, Instant.ofEpochMilli(1L), settings, @@ -694,7 +694,7 @@ private Settings generateTsdbSettings(String mapping, Instant now) throws IOExce var result = provider.getAdditionalIndexSettings( DataStream.getDefaultBackingIndexName(dataStreamName, 1), dataStreamName, - true, + IndexMode.TIME_SERIES, metadata, now, settings, diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateAction.java index ec8eb4babfdac..5e3799cd14518 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateAction.java @@ -274,7 +274,7 @@ public static Template resolveTemplate( Settings result = provider.getAdditionalIndexSettings( indexName, template.getDataStreamTemplate() != null ? indexName : null, - template.getDataStreamTemplate() != null && metadata.isTimeSeriesTemplate(template), + metadata.retrieveIndexModeFromTemplate(template), simulatedState.getMetadata(), now, templateSettings, diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java index 0756080c16d00..b7777eca86179 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java @@ -1316,23 +1316,6 @@ public Map templatesV2() { .orElse(Collections.emptyMap()); } - // TODO: remove this method: - public boolean isTimeSeriesTemplate(ComposableIndexTemplate indexTemplate) { - var indexModeFromTemplate = retrieveIndexModeFromTemplate(indexTemplate); - if (indexModeFromTemplate == IndexMode.TIME_SERIES) { - // No need to check for the existence of index.routing_path here, because index.mode=time_series can't be specified without it. - // Setting validation takes care of this. - // Also no need to validate that the fields defined in index.routing_path are keyword fields with time_series_dimension - // attribute enabled. This is validated elsewhere (DocumentMapper). - return true; - } - - // in a followup change: check the existence of keyword fields of type keyword and time_series_dimension attribute enabled in - // the template. In this case the index.routing_path setting can be generated from the mapping. - - return false; - } - public IndexMode retrieveIndexModeFromTemplate(ComposableIndexTemplate indexTemplate) { if (indexTemplate.getDataStreamTemplate() == null) { return null; diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java index 1cebbabde0769..7f2c076281735 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java @@ -982,10 +982,10 @@ static Settings aggregateIndexSettings( if (sourceMetadata == null) { final Settings templateAndRequestSettings = Settings.builder().put(combinedTemplateSettings).put(request.settings()).build(); - final boolean timeSeriesTemplate = Optional.of(request) + final IndexMode templateIndexMode = Optional.of(request) .map(CreateIndexClusterStateUpdateRequest::matchingTemplate) - .map(metadata::isTimeSeriesTemplate) - .orElse(false); + .map(metadata::retrieveIndexModeFromTemplate) + .orElse(null); // Loop through all the explicit index setting providers, adding them to the // additionalIndexSettings map @@ -995,7 +995,7 @@ static Settings aggregateIndexSettings( var newAdditionalSettings = provider.getAdditionalIndexSettings( request.index(), request.dataStreamName(), - timeSeriesTemplate, + templateIndexMode, currentState.getMetadata(), resolvedAt, templateAndRequestSettings, diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java index 57194ded9422e..ccdfaa5518aee 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java @@ -705,7 +705,7 @@ private void validateIndexTemplateV2(String name, ComposableIndexTemplate indexT var newAdditionalSettings = provider.getAdditionalIndexSettings( "validate-index-name", indexTemplate.getDataStreamTemplate() != null ? "validate-data-stream-name" : null, - indexTemplate.getDataStreamTemplate() != null && metadata.isTimeSeriesTemplate(indexTemplate), + metadata.retrieveIndexModeFromTemplate(indexTemplate), currentState.getMetadata(), now, combinedSettings, diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DataTier.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DataTier.java index 3c559f9421a38..4c2f0cbaaf729 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DataTier.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DataTier.java @@ -21,6 +21,7 @@ import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Nullable; +import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexSettingProvider; import org.elasticsearch.snapshots.SearchableSnapshotsSettings; @@ -226,7 +227,7 @@ public static class DefaultHotAllocationSettingProvider implements IndexSettingP public Settings getAdditionalIndexSettings( String indexName, @Nullable String dataStreamName, - boolean isTimeSeries, + IndexMode templateIndexMode, Metadata metadata, Instant resolvedAt, Settings indexTemplateAndCreateRequestSettings, diff --git a/server/src/main/java/org/elasticsearch/index/IndexSettingProvider.java b/server/src/main/java/org/elasticsearch/index/IndexSettingProvider.java index aaa4c738c0e13..0180d2c8df119 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexSettingProvider.java +++ b/server/src/main/java/org/elasticsearch/index/IndexSettingProvider.java @@ -30,20 +30,21 @@ public interface IndexSettingProvider { * Returns explicitly set default index {@link Settings} for the given index. This should not * return null. * - * @param indexName The name of the new index being created - * @param dataStreamName The name of the data stream if the index being created is part of a data stream otherwise - * null - * @param isTimeSeries Whether the template is in time series mode. - * @param metadata The current metadata instance that doesn't yet contain the index to be created - * @param resolvedAt The time the request to create this new index was accepted. - * @param indexTemplateAndCreateRequestSettings All the settings resolved from the template that matches and any settings - * defined on the create index request - * @param combinedTemplateMappings All the mappings resolved from the template that matches + * @param indexName The name of the new index being created + * @param dataStreamName The name of the data stream if the index being created is part of a data stream + * otherwise null + * @param templateIndexMode The index mode defined in template if template creates data streams, + * otherwise null is returned. + * @param metadata The current metadata instance that doesn't yet contain the index to be created + * @param resolvedAt The time the request to create this new index was accepted. + * @param indexTemplateAndCreateRequestSettings All the settings resolved from the template that matches and any settings + * defined on the create index request + * @param combinedTemplateMappings All the mappings resolved from the template that matches */ Settings getAdditionalIndexSettings( String indexName, @Nullable String dataStreamName, - boolean isTimeSeries, + @Nullable IndexMode templateIndexMode, Metadata metadata, Instant resolvedAt, Settings indexTemplateAndCreateRequestSettings, diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateActionTests.java index 8f0ff82beab4b..74408b99e92ce 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateActionTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.cluster.metadata.Template; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexSettingProvider; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.SystemIndices; @@ -69,7 +70,7 @@ public void testSettingsProviderIsOverridden() throws Exception { public Settings getAdditionalIndexSettings( String indexName, String dataStreamName, - boolean timeSeries, + IndexMode templateIndexMode, Metadata metadata, Instant resolvedAt, Settings allSettings, diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java index 00e21603ec8b4..ba1f9f01f49d2 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java @@ -35,6 +35,7 @@ import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; @@ -2412,30 +2413,87 @@ public void testEnsureMetadataFieldCheckedForGlobalStateChanges() { assertThat(unclassifiedFields, empty()); } - public void testIsTimeSeriesTemplate() throws IOException { - var template = new Template(Settings.builder().put("index.mode", "time_series").build(), new CompressedXContent("{}"), null); + public void testRetrieveIndexModeFromTemplateTsdb() throws IOException { + // tsdb: + var tsdbTemplate = new Template(Settings.builder().put("index.mode", "time_series").build(), new CompressedXContent("{}"), null); // Settings in component template: { - var componentTemplate = new ComponentTemplate(template, null, null); + var componentTemplate = new ComponentTemplate(tsdbTemplate, null, null); var indexTemplate = ComposableIndexTemplate.builder() .indexPatterns(List.of("test-*")) .componentTemplates(List.of("component_template_1")) .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) .build(); Metadata m = Metadata.builder().put("component_template_1", componentTemplate).put("index_template_1", indexTemplate).build(); - assertThat(m.isTimeSeriesTemplate(indexTemplate), is(true)); + assertThat(m.retrieveIndexModeFromTemplate(indexTemplate), is(IndexMode.TIME_SERIES)); } // Settings in composable index template: { var componentTemplate = new ComponentTemplate(new Template(null, null, null), null, null); var indexTemplate = ComposableIndexTemplate.builder() .indexPatterns(List.of("test-*")) - .template(template) + .template(tsdbTemplate) .componentTemplates(List.of("component_template_1")) .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) .build(); Metadata m = Metadata.builder().put("component_template_1", componentTemplate).put("index_template_1", indexTemplate).build(); - assertThat(m.isTimeSeriesTemplate(indexTemplate), is(true)); + assertThat(m.retrieveIndexModeFromTemplate(indexTemplate), is(IndexMode.TIME_SERIES)); + } + } + + public void testRetrieveIndexModeFromTemplateLogsdb() throws IOException { + // logsdb: + var logsdbTemplate = new Template(Settings.builder().put("index.mode", "logsdb").build(), new CompressedXContent("{}"), null); + // Settings in component template: + { + var componentTemplate = new ComponentTemplate(logsdbTemplate, null, null); + var indexTemplate = ComposableIndexTemplate.builder() + .indexPatterns(List.of("test-*")) + .componentTemplates(List.of("component_template_1")) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build(); + Metadata m = Metadata.builder().put("component_template_1", componentTemplate).put("index_template_1", indexTemplate).build(); + assertThat(m.retrieveIndexModeFromTemplate(indexTemplate), is(IndexMode.LOGSDB)); + } + // Settings in composable index template: + { + var componentTemplate = new ComponentTemplate(new Template(null, null, null), null, null); + var indexTemplate = ComposableIndexTemplate.builder() + .indexPatterns(List.of("test-*")) + .template(logsdbTemplate) + .componentTemplates(List.of("component_template_1")) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build(); + Metadata m = Metadata.builder().put("component_template_1", componentTemplate).put("index_template_1", indexTemplate).build(); + assertThat(m.retrieveIndexModeFromTemplate(indexTemplate), is(IndexMode.LOGSDB)); + } + } + + public void testRetrieveIndexModeFromTemplateEmpty() throws IOException { + // no index mode: + var emptyTemplate = new Template(Settings.EMPTY, new CompressedXContent("{}"), null); + // Settings in component template: + { + var componentTemplate = new ComponentTemplate(emptyTemplate, null, null); + var indexTemplate = ComposableIndexTemplate.builder() + .indexPatterns(List.of("test-*")) + .componentTemplates(List.of("component_template_1")) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build(); + Metadata m = Metadata.builder().put("component_template_1", componentTemplate).put("index_template_1", indexTemplate).build(); + assertThat(m.retrieveIndexModeFromTemplate(indexTemplate), nullValue()); + } + // Settings in composable index template: + { + var componentTemplate = new ComponentTemplate(new Template(null, null, null), null, null); + var indexTemplate = ComposableIndexTemplate.builder() + .indexPatterns(List.of("test-*")) + .template(emptyTemplate) + .componentTemplates(List.of("component_template_1")) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build(); + Metadata m = Metadata.builder().put("component_template_1", componentTemplate).put("index_template_1", indexTemplate).build(); + assertThat(m.retrieveIndexModeFromTemplate(indexTemplate), nullValue()); } } diff --git a/server/src/test/java/org/elasticsearch/index/IndexSettingProviderTests.java b/server/src/test/java/org/elasticsearch/index/IndexSettingProviderTests.java index 387340c0a6f50..628de0b047bf5 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexSettingProviderTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexSettingProviderTests.java @@ -79,7 +79,7 @@ static class TestIndexSettingsProvider implements IndexSettingProvider { public Settings getAdditionalIndexSettings( String indexName, String dataStreamName, - boolean isTimeSeries, + IndexMode templateIndexMode, Metadata metadata, Instant resolvedAt, Settings indexTemplateAndCreateRequestSettings, diff --git a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProvider.java b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProvider.java index b463426de0848..ee9d6129dcd54 100644 --- a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProvider.java +++ b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProvider.java @@ -42,7 +42,7 @@ void updateClusterIndexModeLogsdbEnabled(boolean isLogsdbEnabled) { public Settings getAdditionalIndexSettings( final String indexName, final String dataStreamName, - boolean isTimeSeries, + IndexMode templateIndexMode, final Metadata metadata, final Instant resolvedAt, final Settings settings, diff --git a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java index 6e139cc3ce9e6..a190ff72de8df 100644 --- a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java +++ b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java @@ -50,7 +50,7 @@ final class SyntheticSourceIndexSettingsProvider implements IndexSettingProvider public Settings getAdditionalIndexSettings( String indexName, String dataStreamName, - boolean isTimeSeries, + IndexMode templateIndexMode, Metadata metadata, Instant resolvedAt, Settings indexTemplateAndCreateRequestSettings, @@ -59,7 +59,7 @@ public Settings getAdditionalIndexSettings( // This index name is used when validating component and index templates, we should skip this check in that case. // (See MetadataIndexTemplateService#validateIndexTemplateV2(...) method) boolean isTemplateValidation = "validate-index-name".equals(indexName); - if (newIndexHasSyntheticSourceUsage(indexName, isTimeSeries, indexTemplateAndCreateRequestSettings, combinedTemplateMappings) + if (newIndexHasSyntheticSourceUsage(indexName, templateIndexMode, indexTemplateAndCreateRequestSettings, combinedTemplateMappings) && syntheticSourceLicenseService.fallbackToStoredSource(isTemplateValidation)) { LOGGER.debug("creation of index [{}] with synthetic source without it being allowed", indexName); // TODO: handle falling back to stored source @@ -69,7 +69,7 @@ public Settings getAdditionalIndexSettings( boolean newIndexHasSyntheticSourceUsage( String indexName, - boolean isTimeSeries, + IndexMode templateIndexMode, Settings indexTemplateAndCreateRequestSettings, List combinedTemplateMappings ) { @@ -80,7 +80,7 @@ boolean newIndexHasSyntheticSourceUsage( } try { - var tmpIndexMetadata = buildIndexMetadataForMapperService(indexName, isTimeSeries, indexTemplateAndCreateRequestSettings); + var tmpIndexMetadata = buildIndexMetadataForMapperService(indexName, templateIndexMode, indexTemplateAndCreateRequestSettings); try (var mapperService = mapperServiceFactory.apply(tmpIndexMetadata)) { // combinedTemplateMappings can be null when creating system indices // combinedTemplateMappings can be empty when creating a normal index that doesn't match any template and without mapping. @@ -101,7 +101,7 @@ boolean newIndexHasSyntheticSourceUsage( // Create a dummy IndexMetadata instance that can be used to create a MapperService in order to check whether synthetic source is used: private IndexMetadata buildIndexMetadataForMapperService( String indexName, - boolean isTimeSeries, + IndexMode templateIndexMode, Settings indexTemplateAndCreateRequestSettings ) { var tmpIndexMetadata = IndexMetadata.builder(indexName); @@ -119,7 +119,7 @@ private IndexMetadata buildIndexMetadataForMapperService( .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, shardReplicas) .put(IndexMetadata.SETTING_INDEX_UUID, UUIDs.randomBase64UUID()); - if (isTimeSeries) { + if (templateIndexMode == IndexMode.TIME_SERIES) { finalResolvedSettings.put(IndexSettings.MODE.getKey(), IndexMode.TIME_SERIES); // Avoid failing because index.routing_path is missing (in case fields are marked as dimension) finalResolvedSettings.putList(INDEX_ROUTING_PATH.getKey(), List.of("path")); diff --git a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProviderTests.java b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProviderTests.java index 04e89af254f64..5f23dbdca1143 100644 --- a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProviderTests.java +++ b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProviderTests.java @@ -51,7 +51,7 @@ public void testLogsDbDisabled() throws IOException { final Settings additionalIndexSettings = provider.getAdditionalIndexSettings( null, "logs-apache-production", - false, + null, Metadata.EMPTY_METADATA, Instant.now().truncatedTo(ChronoUnit.SECONDS), Settings.EMPTY, @@ -69,7 +69,7 @@ public void testOnIndexCreation() throws IOException { final Settings additionalIndexSettings = provider.getAdditionalIndexSettings( "logs-apache-production", null, - false, + null, Metadata.EMPTY_METADATA, Instant.now().truncatedTo(ChronoUnit.SECONDS), Settings.EMPTY, @@ -87,7 +87,7 @@ public void testOnExplicitStandardIndex() throws IOException { final Settings additionalIndexSettings = provider.getAdditionalIndexSettings( null, "logs-apache-production", - false, + null, Metadata.EMPTY_METADATA, Instant.now().truncatedTo(ChronoUnit.SECONDS), Settings.builder().put(IndexSettings.MODE.getKey(), IndexMode.STANDARD.getName()).build(), @@ -105,7 +105,7 @@ public void testOnExplicitTimeSeriesIndex() throws IOException { final Settings additionalIndexSettings = provider.getAdditionalIndexSettings( null, "logs-apache-production", - false, + null, Metadata.EMPTY_METADATA, Instant.now().truncatedTo(ChronoUnit.SECONDS), Settings.builder().put(IndexSettings.MODE.getKey(), IndexMode.TIME_SERIES.getName()).build(), @@ -123,7 +123,7 @@ public void testNonLogsDataStream() throws IOException { final Settings additionalIndexSettings = provider.getAdditionalIndexSettings( null, "logs", - false, + null, Metadata.EMPTY_METADATA, Instant.now().truncatedTo(ChronoUnit.SECONDS), Settings.EMPTY, @@ -141,7 +141,7 @@ public void testWithoutLogsComponentTemplate() throws IOException { final Settings additionalIndexSettings = provider.getAdditionalIndexSettings( null, "logs-apache-production", - false, + null, buildMetadata(List.of("*"), List.of()), Instant.now().truncatedTo(ChronoUnit.SECONDS), Settings.EMPTY, @@ -159,7 +159,7 @@ public void testWithLogsComponentTemplate() throws IOException { final Settings additionalIndexSettings = provider.getAdditionalIndexSettings( null, "logs-apache-production", - false, + null, buildMetadata(List.of("*"), List.of("logs@settings")), Instant.now().truncatedTo(ChronoUnit.SECONDS), Settings.EMPTY, @@ -177,7 +177,7 @@ public void testWithMultipleComponentTemplates() throws IOException { final Settings additionalIndexSettings = provider.getAdditionalIndexSettings( null, "logs-apache-production", - false, + null, buildMetadata(List.of("*"), List.of("logs@settings", "logs@custom")), Instant.now().truncatedTo(ChronoUnit.SECONDS), Settings.EMPTY, @@ -195,7 +195,7 @@ public void testWithCustomComponentTemplatesOnly() throws IOException { final Settings additionalIndexSettings = provider.getAdditionalIndexSettings( null, "logs-apache-production", - false, + null, buildMetadata(List.of("*"), List.of("logs@custom", "custom-component-template")), Instant.now().truncatedTo(ChronoUnit.SECONDS), Settings.EMPTY, @@ -213,7 +213,7 @@ public void testNonMatchingTemplateIndexPattern() throws IOException { final Settings additionalIndexSettings = provider.getAdditionalIndexSettings( null, "logs-apache-production", - false, + null, buildMetadata(List.of("standard-apache-production"), List.of("logs@settings")), Instant.now().truncatedTo(ChronoUnit.SECONDS), Settings.EMPTY, @@ -231,7 +231,7 @@ public void testCaseSensitivity() throws IOException { final Settings additionalIndexSettings = provider.getAdditionalIndexSettings( null, "LOGS-apache-production", - false, + null, Metadata.EMPTY_METADATA, Instant.now().truncatedTo(ChronoUnit.SECONDS), Settings.EMPTY, @@ -249,7 +249,7 @@ public void testMultipleHyphensInDataStreamName() throws IOException { final Settings additionalIndexSettings = provider.getAdditionalIndexSettings( null, "logs-apache-production-eu", - false, + null, Metadata.EMPTY_METADATA, Instant.now().truncatedTo(ChronoUnit.SECONDS), Settings.EMPTY, @@ -267,7 +267,7 @@ public void testBeforeAndAFterSettingUpdate() throws IOException { final Settings beforeSettings = provider.getAdditionalIndexSettings( null, "logs-apache-production", - false, + null, buildMetadata(List.of("*"), List.of("logs@settings")), Instant.now().truncatedTo(ChronoUnit.SECONDS), Settings.EMPTY, @@ -281,7 +281,7 @@ public void testBeforeAndAFterSettingUpdate() throws IOException { final Settings afterSettings = provider.getAdditionalIndexSettings( null, "logs-apache-production", - false, + null, buildMetadata(List.of("*"), List.of("logs@settings")), Instant.now().truncatedTo(ChronoUnit.SECONDS), Settings.EMPTY, @@ -295,7 +295,7 @@ public void testBeforeAndAFterSettingUpdate() throws IOException { final Settings laterSettings = provider.getAdditionalIndexSettings( null, "logs-apache-production", - false, + null, buildMetadata(List.of("*"), List.of("logs@settings")), Instant.now().truncatedTo(ChronoUnit.SECONDS), Settings.EMPTY, diff --git a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderTests.java b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderTests.java index c97328da132bd..738487b9365a7 100644 --- a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderTests.java +++ b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderTests.java @@ -49,7 +49,7 @@ public void testNewIndexHasSyntheticSourceUsage() throws IOException { } } """; - boolean result = provider.newIndexHasSyntheticSourceUsage(indexName, false, settings, List.of(new CompressedXContent(mapping))); + boolean result = provider.newIndexHasSyntheticSourceUsage(indexName, null, settings, List.of(new CompressedXContent(mapping))); assertTrue(result); } { @@ -82,7 +82,7 @@ public void testNewIndexHasSyntheticSourceUsage() throws IOException { } """; } - boolean result = provider.newIndexHasSyntheticSourceUsage(indexName, false, settings, List.of(new CompressedXContent(mapping))); + boolean result = provider.newIndexHasSyntheticSourceUsage(indexName, null, settings, List.of(new CompressedXContent(mapping))); assertFalse(result); } } @@ -104,7 +104,7 @@ public void testValidateIndexName() throws IOException { } """; Settings settings = Settings.EMPTY; - boolean result = provider.newIndexHasSyntheticSourceUsage(indexName, false, settings, List.of(new CompressedXContent(mapping))); + boolean result = provider.newIndexHasSyntheticSourceUsage(indexName, null, settings, List.of(new CompressedXContent(mapping))); assertFalse(result); } @@ -124,22 +124,22 @@ public void testNewIndexHasSyntheticSourceUsageLogsdbIndex() throws IOException """; { Settings settings = Settings.builder().put("index.mode", "logsdb").build(); - boolean result = provider.newIndexHasSyntheticSourceUsage(indexName, false, settings, List.of(new CompressedXContent(mapping))); + boolean result = provider.newIndexHasSyntheticSourceUsage(indexName, null, settings, List.of(new CompressedXContent(mapping))); assertTrue(result); } { Settings settings = Settings.builder().put("index.mode", "logsdb").build(); - boolean result = provider.newIndexHasSyntheticSourceUsage(indexName, false, settings, List.of()); + boolean result = provider.newIndexHasSyntheticSourceUsage(indexName, null, settings, List.of()); assertTrue(result); } { - boolean result = provider.newIndexHasSyntheticSourceUsage(indexName, false, Settings.EMPTY, List.of()); + boolean result = provider.newIndexHasSyntheticSourceUsage(indexName, null, Settings.EMPTY, List.of()); assertFalse(result); } { boolean result = provider.newIndexHasSyntheticSourceUsage( indexName, - false, + null, Settings.EMPTY, List.of(new CompressedXContent(mapping)) ); @@ -164,22 +164,22 @@ public void testNewIndexHasSyntheticSourceUsageTimeSeries() throws IOException { """; { Settings settings = Settings.builder().put("index.mode", "time_series").put("index.routing_path", "my_field").build(); - boolean result = provider.newIndexHasSyntheticSourceUsage(indexName, false, settings, List.of(new CompressedXContent(mapping))); + boolean result = provider.newIndexHasSyntheticSourceUsage(indexName, null, settings, List.of(new CompressedXContent(mapping))); assertTrue(result); } { Settings settings = Settings.builder().put("index.mode", "time_series").put("index.routing_path", "my_field").build(); - boolean result = provider.newIndexHasSyntheticSourceUsage(indexName, false, settings, List.of()); + boolean result = provider.newIndexHasSyntheticSourceUsage(indexName, null, settings, List.of()); assertTrue(result); } { - boolean result = provider.newIndexHasSyntheticSourceUsage(indexName, false, Settings.EMPTY, List.of()); + boolean result = provider.newIndexHasSyntheticSourceUsage(indexName, null, Settings.EMPTY, List.of()); assertFalse(result); } { boolean result = provider.newIndexHasSyntheticSourceUsage( indexName, - false, + null, Settings.EMPTY, List.of(new CompressedXContent(mapping)) ); @@ -206,7 +206,7 @@ public void testNewIndexHasSyntheticSourceUsage_invalidSettings() throws IOExcep } } """; - boolean result = provider.newIndexHasSyntheticSourceUsage(indexName, false, settings, List.of(new CompressedXContent(mapping))); + boolean result = provider.newIndexHasSyntheticSourceUsage(indexName, null, settings, List.of(new CompressedXContent(mapping))); assertFalse(result); } { @@ -221,7 +221,7 @@ public void testNewIndexHasSyntheticSourceUsage_invalidSettings() throws IOExcep } } """; - boolean result = provider.newIndexHasSyntheticSourceUsage(indexName, false, settings, List.of(new CompressedXContent(mapping))); + boolean result = provider.newIndexHasSyntheticSourceUsage(indexName, null, settings, List.of(new CompressedXContent(mapping))); assertFalse(result); } } From b78cf6cd1e306363243087bf2ddf6ffb8b666ef8 Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Mon, 14 Oct 2024 15:28:39 +0100 Subject: [PATCH 049/449] Fix Max Score Propagation in RankDocsQuery (#114716) Fix rank doc query when some segments have no ranked docs --- .../search/retriever/rankdoc/RankDocsQuery.java | 9 ++++----- .../rankdoc/RankDocsQueryBuilderTests.java | 17 +++++++++++++++++ 2 files changed, 21 insertions(+), 5 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/retriever/rankdoc/RankDocsQuery.java b/server/src/main/java/org/elasticsearch/search/retriever/rankdoc/RankDocsQuery.java index fb5015a82dbdb..b78d9e40ba120 100644 --- a/server/src/main/java/org/elasticsearch/search/retriever/rankdoc/RankDocsQuery.java +++ b/server/src/main/java/org/elasticsearch/search/retriever/rankdoc/RankDocsQuery.java @@ -107,11 +107,10 @@ public Explanation explain(LeafReaderContext context, int doc) throws IOExceptio @Override public Scorer scorer(LeafReaderContext context) { - // Segment starts indicate how many docs are in the segment, - // upper equalling lower indicates no documents for this segment - if (segmentStarts[context.ord] == segmentStarts[context.ord + 1]) { - return null; - } + /** + * We return a scorer even if there are no ranked documents within the segment. + * This ensures the correct propagation of the maximum score. + */ return new Scorer(this) { final int lower = segmentStarts[context.ord]; final int upper = segmentStarts[context.ord + 1]; diff --git a/server/src/test/java/org/elasticsearch/search/retriever/rankdoc/RankDocsQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/search/retriever/rankdoc/RankDocsQueryBuilderTests.java index ca05c57b7d733..b295b78453f93 100644 --- a/server/src/test/java/org/elasticsearch/search/retriever/rankdoc/RankDocsQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/retriever/rankdoc/RankDocsQueryBuilderTests.java @@ -195,6 +195,23 @@ public void testRankDocsQueryEarlyTerminate() throws IOException { assertThat(col.totalHits.value, equalTo((long) topSize)); assertEqualTopDocs(col.scoreDocs, rankDocs); } + + { + // A single rank doc in the last segment + RankDoc[] singleRankDoc = new RankDoc[1]; + singleRankDoc[0] = rankDocs[rankDocs.length - 1]; + RankDocsQuery q = new RankDocsQuery( + reader, + singleRankDoc, + new Query[] { NumericDocValuesField.newSlowExactQuery("active", 1) }, + new String[1], + false + ); + var topDocsManager = new TopScoreDocCollectorManager(1, null, 0); + var col = searcher.search(q, topDocsManager); + assertThat(col.totalHits.value, lessThanOrEqualTo((long) (2 + rankDocs.length))); + assertEqualTopDocs(col.scoreDocs, singleRankDoc); + } } } } From e1451997b1bbbf3f22b4b552352e65dc62e6a947 Mon Sep 17 00:00:00 2001 From: Dan Rubinstein Date: Mon, 14 Oct 2024 10:51:50 -0400 Subject: [PATCH 050/449] [ML] Switch default chunking strategy to sentence (#114453) --- docs/changelog/114453.yaml | 5 +++++ .../xpack/inference/chunking/ChunkingSettingsBuilder.java | 2 +- .../inference/chunking/ChunkingSettingsBuilderTests.java | 2 +- 3 files changed, 7 insertions(+), 2 deletions(-) create mode 100644 docs/changelog/114453.yaml diff --git a/docs/changelog/114453.yaml b/docs/changelog/114453.yaml new file mode 100644 index 0000000000000..0d5345ad9d2a6 --- /dev/null +++ b/docs/changelog/114453.yaml @@ -0,0 +1,5 @@ +pr: 114453 +summary: Switch default chunking strategy to sentence +area: Machine Learning +type: enhancement +issues: [] diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/ChunkingSettingsBuilder.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/ChunkingSettingsBuilder.java index 477c3ea6352f5..20520ca829297 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/ChunkingSettingsBuilder.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/ChunkingSettingsBuilder.java @@ -13,7 +13,7 @@ import java.util.Map; public class ChunkingSettingsBuilder { - public static final WordBoundaryChunkingSettings DEFAULT_SETTINGS = new WordBoundaryChunkingSettings(250, 100); + public static final SentenceBoundaryChunkingSettings DEFAULT_SETTINGS = new SentenceBoundaryChunkingSettings(250, 1); public static ChunkingSettings fromMap(Map settings) { if (settings.isEmpty()) { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/ChunkingSettingsBuilderTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/ChunkingSettingsBuilderTests.java index 3c09984ac0162..5b9625073e6c6 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/ChunkingSettingsBuilderTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/ChunkingSettingsBuilderTests.java @@ -17,7 +17,7 @@ public class ChunkingSettingsBuilderTests extends ESTestCase { - public static final WordBoundaryChunkingSettings DEFAULT_SETTINGS = new WordBoundaryChunkingSettings(250, 100); + public static final SentenceBoundaryChunkingSettings DEFAULT_SETTINGS = new SentenceBoundaryChunkingSettings(250, 1); public void testEmptyChunkingSettingsMap() { ChunkingSettings chunkingSettings = ChunkingSettingsBuilder.fromMap(Collections.emptyMap()); From 0c02c2b66367713bc29cc38a983669c76865c426 Mon Sep 17 00:00:00 2001 From: Jan Kuipers <148754765+jan-elastic@users.noreply.github.com> Date: Mon, 14 Oct 2024 17:16:12 +0200 Subject: [PATCH 051/449] Don't close/recreate adaptive allocations metrics (#114721) --- .../AdaptiveAllocationsScalerService.java | 13 +------------ 1 file changed, 1 insertion(+), 12 deletions(-) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerService.java index 193fa9e7e07f9..8f43044a465c2 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerService.java @@ -171,17 +171,6 @@ Collection observeDouble(Function Date: Mon, 14 Oct 2024 16:32:29 +0100 Subject: [PATCH 052/449] Simplify `XContent` output of epoch times (#114491) Today the overloads of `XContentBuilder#timeField` do two rather different things: one formats an object as a `String` representation of a time (where the object is either an unambiguous time object or else a `long`) and the other formats only a `long` as one or two fields depending on the `?human` flag. This is trappy in a number of ways: - `long` means an absolute (epoch) time, but sometimes folks will mistakenly use this for time intervals too. - `long` means only milliseconds, there is no facility to specify a different unit. - the dependence on the `?human` flag in exactly one of the overloads is kinda weird. This commit removes the confusion by dropping support for considering a `Long` as a valid representation of a time at all, and instead requiring callers to either convert it into a proper time object or else call a method that is explicitly expecting an epoch time in milliseconds. --- .../xcontent/XContentBuilder.java | 69 +++++++----- .../xcontent/XContentBuilderExtension.java | 5 + .../pipeline/DateDerivativeIT.java | 8 +- .../direct/DatabaseConfigurationMetadata.java | 2 +- .../GetDatabaseConfigurationAction.java | 6 +- .../aggregations/bucket/DateHistogramIT.java | 34 +++--- .../bucket/DateHistogramOffsetIT.java | 2 +- .../aggregations/bucket/DateRangeIT.java | 10 +- .../list/ListDanglingIndicesResponse.java | 2 +- .../stats/FieldUsageShardResponse.java | 2 +- .../bulk/FailureStoreDocumentConverter.java | 4 +- .../ExplainIndexDataStreamLifecycle.java | 8 +- .../cluster/ClusterSnapshotStats.java | 2 +- .../cluster/SnapshotDeletionsInProgress.java | 2 +- .../cluster/SnapshotsInProgress.java | 2 +- .../cluster/metadata/IndexGraveyard.java | 2 +- .../metadata/SingleNodeShutdownMetadata.java | 6 +- .../XContentElasticsearchExtension.java | 21 ++-- .../indices/recovery/RecoveryState.java | 4 +- .../elasticsearch/monitor/jvm/JvmInfo.java | 2 +- .../org/elasticsearch/tasks/TaskInfo.java | 2 +- .../common/xcontent/BaseXContentTestCase.java | 103 ++++++++++-------- .../builder/XContentBuilderTests.java | 2 +- .../org/elasticsearch/license/License.java | 6 +- .../protocol/xpack/XPackInfoResponse.java | 2 +- .../ilm/IndexLifecycleExplainResponse.java | 26 ++++- .../xpack/core/ilm/PhaseExecutionInfo.java | 2 +- .../xpack/core/ml/action/FlushJobAction.java | 2 +- .../xpack/core/ml/annotations/Annotation.java | 24 +++- .../core/ml/calendars/ScheduledEvent.java | 12 +- .../core/ml/datafeed/SearchInterval.java | 4 +- .../dataframe/DataFrameAnalyticsConfig.java | 6 +- .../DataFrameAnalyticsTaskState.java | 2 +- .../classification/ClassificationStats.java | 6 +- .../dataframe/stats/common/MemoryUsage.java | 2 +- .../OutlierDetectionStats.java | 6 +- .../stats/regression/RegressionStats.java | 6 +- .../core/ml/inference/TrainedModelConfig.java | 6 +- .../inference/assignment/AssignmentStats.java | 6 +- .../assignment/TrainedModelAssignment.java | 2 +- .../trainedmodel/InferenceStats.java | 6 +- .../trainedmodel/ModelPackageConfig.java | 6 +- .../xpack/core/ml/job/config/Job.java | 8 +- .../core/ml/job/config/JobTaskState.java | 2 +- .../output/FlushAcknowledgement.java | 2 +- .../autodetect/state/CategorizerStats.java | 12 +- .../process/autodetect/state/DataCounts.java | 16 ++- .../autodetect/state/ModelSizeStats.java | 12 +- .../autodetect/state/ModelSnapshot.java | 10 +- .../core/ml/job/results/AnomalyRecord.java | 6 +- .../xpack/core/ml/job/results/Bucket.java | 6 +- .../core/ml/job/results/BucketInfluencer.java | 6 +- .../xpack/core/ml/job/results/Forecast.java | 6 +- .../xpack/core/ml/job/results/Influencer.java | 6 +- .../xpack/core/ml/job/results/ModelPlot.java | 6 +- .../core/ml/job/results/OverallBucket.java | 6 +- .../ExponentialAverageCalculationContext.java | 2 +- .../search/action/AsyncSearchResponse.java | 10 +- .../search/action/AsyncStatusResponse.java | 6 +- .../core/slm/SnapshotInvocationRecord.java | 4 +- .../core/slm/SnapshotLifecyclePolicyItem.java | 6 +- .../slm/SnapshotLifecyclePolicyMetadata.java | 2 +- .../xpack/core/ssl/cert/CertificateInfo.java | 2 +- .../transforms/TransformCheckpointStats.java | 4 +- .../TransformCheckpointingInfo.java | 8 +- .../transform/transforms/TransformConfig.java | 2 +- .../transforms/TransformHealthIssue.java | 2 +- .../core/watcher/execution/QueuedWatch.java | 4 +- .../execution/WatchExecutionSnapshot.java | 4 +- .../compute/operator/DriverProfile.java | 4 +- .../compute/operator/DriverSleeps.java | 4 +- .../xpack/ql/async/QlStatusResponse.java | 4 +- .../shutdown/SingleNodeShutdownStatus.java | 4 +- .../slm/history/SnapshotHistoryItem.java | 2 +- ...epositoryVerifyIntegrityResponseChunk.java | 2 +- .../xpack/sql/qa/jdbc/ResultSetTestCase.java | 4 +- .../watcher/notification/email/Email.java | 2 +- 77 files changed, 398 insertions(+), 220 deletions(-) diff --git a/libs/x-content/src/main/java/org/elasticsearch/xcontent/XContentBuilder.java b/libs/x-content/src/main/java/org/elasticsearch/xcontent/XContentBuilder.java index aa869b1af4f5e..6f0b473b5ba1f 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/xcontent/XContentBuilder.java +++ b/libs/x-content/src/main/java/org/elasticsearch/xcontent/XContentBuilder.java @@ -40,6 +40,7 @@ import java.util.ServiceLoader; import java.util.Set; import java.util.function.Function; +import java.util.function.LongFunction; /** * A utility to build XContent (ie json). @@ -107,13 +108,15 @@ public static XContentBuilder builder(XContentType xContentType, Set inc private static final Map, Writer> WRITERS; private static final Map, HumanReadableTransformer> HUMAN_READABLE_TRANSFORMERS; private static final Map, Function> DATE_TRANSFORMERS; + private static final LongFunction UNIX_EPOCH_MILLIS_FORMATTER; + static { Map, Writer> writers = new HashMap<>(); writers.put(Boolean.class, (b, v) -> b.value((Boolean) v)); writers.put(boolean[].class, (b, v) -> b.values((boolean[]) v)); writers.put(Byte.class, (b, v) -> b.value((Byte) v)); writers.put(byte[].class, (b, v) -> b.value((byte[]) v)); - writers.put(Date.class, XContentBuilder::timeValue); + writers.put(Date.class, XContentBuilder::timestampValue); writers.put(Double.class, (b, v) -> b.value((Double) v)); writers.put(double[].class, (b, v) -> b.values((double[]) v)); writers.put(Float.class, (b, v) -> b.value((Float) v)); @@ -129,8 +132,8 @@ public static XContentBuilder builder(XContentType xContentType, Set inc writers.put(Locale.class, (b, v) -> b.value(v.toString())); writers.put(Class.class, (b, v) -> b.value(v.toString())); writers.put(ZonedDateTime.class, (b, v) -> b.value(v.toString())); - writers.put(Calendar.class, XContentBuilder::timeValue); - writers.put(GregorianCalendar.class, XContentBuilder::timeValue); + writers.put(Calendar.class, XContentBuilder::timestampValue); + writers.put(GregorianCalendar.class, XContentBuilder::timestampValue); writers.put(BigInteger.class, (b, v) -> b.value((BigInteger) v)); writers.put(BigDecimal.class, (b, v) -> b.value((BigDecimal) v)); @@ -140,6 +143,8 @@ public static XContentBuilder builder(XContentType xContentType, Set inc // treat strings as already converted dateTransformers.put(String.class, Function.identity()); + LongFunction unixEpochMillisFormatter = Long::toString; + // Load pluggable extensions for (XContentBuilderExtension service : ServiceLoader.load(XContentBuilderExtension.class)) { Map, Writer> addlWriters = service.getXContentWriters(); @@ -157,11 +162,14 @@ public static XContentBuilder builder(XContentType xContentType, Set inc writers.putAll(addlWriters); humanReadableTransformer.putAll(addlTransformers); dateTransformers.putAll(addlDateTransformers); + + unixEpochMillisFormatter = service::formatUnixEpochMillis; } WRITERS = Map.copyOf(writers); HUMAN_READABLE_TRANSFORMERS = Map.copyOf(humanReadableTransformer); DATE_TRANSFORMERS = Map.copyOf(dateTransformers); + UNIX_EPOCH_MILLIS_FORMATTER = unixEpochMillisFormatter; } @FunctionalInterface @@ -797,52 +805,53 @@ public XContentBuilder utf8Value(byte[] bytes, int offset, int length) throws IO } //////////////////////////////////////////////////////////////////////////// - // Date + // Timestamps ////////////////////////////////// /** - * Write a time-based field and value, if the passed timeValue is null a - * null value is written, otherwise a date transformers lookup is performed. - - * @throws IllegalArgumentException if there is no transformers for the type of object + * Write a field with a timestamp value: if the passed timestamp is null then writes null, otherwise looks up the date transformer + * for the type of {@code timestamp} and uses it to format the value. + * + * @throws IllegalArgumentException if there is no transformer for the given value type */ - public XContentBuilder timeField(String name, Object timeValue) throws IOException { - return field(name).timeValue(timeValue); + public XContentBuilder timestampField(String name, Object timestamp) throws IOException { + return field(name).timestampValue(timestamp); } /** - * If the {@code humanReadable} flag is set, writes both a formatted and - * unformatted version of the time value using the date transformer for the - * {@link Long} class. + * Writes a field containing the raw number of milliseconds since the unix epoch, and also if the {@code humanReadable} flag is set, + * writes a formatted representation of this value using the UNIX_EPOCH_MILLIS_FORMATTER. */ - public XContentBuilder timeField(String name, String readableName, long value) throws IOException { - assert name.equals(readableName) == false : "expected raw and readable field names to differ, but they were both: " + name; + public XContentBuilder timestampFieldsFromUnixEpochMillis(String rawFieldName, String humanReadableFieldName, long unixEpochMillis) + throws IOException { + assert rawFieldName.equals(humanReadableFieldName) == false + : "expected raw and readable field names to differ, but they were both: " + rawFieldName; if (humanReadable) { - Function longTransformer = DATE_TRANSFORMERS.get(Long.class); - if (longTransformer == null) { - throw new IllegalArgumentException("cannot write time value xcontent for unknown value of type Long"); - } - field(readableName).value(longTransformer.apply(value)); + field(humanReadableFieldName, UNIX_EPOCH_MILLIS_FORMATTER.apply(unixEpochMillis)); } - field(name, value); + field(rawFieldName, unixEpochMillis); return this; } /** - * Write a time-based value, if the value is null a null value is written, - * otherwise a date transformers lookup is performed. - - * @throws IllegalArgumentException if there is no transformers for the type of object + * Write a timestamp value: if the passed timestamp is null then writes null, otherwise looks up the date transformer for the type of + * {@code timestamp} and uses it to format the value. + * + * @throws IllegalArgumentException if there is no transformer for the given value type */ - public XContentBuilder timeValue(Object timeValue) throws IOException { - if (timeValue == null) { + public XContentBuilder timestampValue(Object timestamp) throws IOException { + if (timestamp == null) { return nullValue(); } else { - Function transformer = DATE_TRANSFORMERS.get(timeValue.getClass()); + Function transformer = DATE_TRANSFORMERS.get(timestamp.getClass()); if (transformer == null) { - throw new IllegalArgumentException("cannot write time value xcontent for unknown value of type " + timeValue.getClass()); + final var exception = new IllegalArgumentException( + "cannot write timestamp value xcontent for value of unknown type " + timestamp.getClass() + ); + assert false : exception; + throw exception; } - return value(transformer.apply(timeValue)); + return value(transformer.apply(timestamp)); } } diff --git a/libs/x-content/src/main/java/org/elasticsearch/xcontent/XContentBuilderExtension.java b/libs/x-content/src/main/java/org/elasticsearch/xcontent/XContentBuilderExtension.java index 1e48667079cfc..4e3b442e7d473 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/xcontent/XContentBuilderExtension.java +++ b/libs/x-content/src/main/java/org/elasticsearch/xcontent/XContentBuilderExtension.java @@ -68,4 +68,9 @@ public interface XContentBuilderExtension { * */ Map, Function> getDateTransformers(); + + /** + * Used to format a {@code long} representing the number of milliseconds since the Unix Epoch. + */ + String formatUnixEpochMillis(long unixEpochMillis); } diff --git a/modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/pipeline/DateDerivativeIT.java b/modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/pipeline/DateDerivativeIT.java index 3e66bf0edf394..e911bf1a41198 100644 --- a/modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/pipeline/DateDerivativeIT.java +++ b/modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/pipeline/DateDerivativeIT.java @@ -65,17 +65,17 @@ protected Collection> nodePlugins() { } private static IndexRequestBuilder indexDoc(String idx, ZonedDateTime date, int value) throws Exception { - return prepareIndex(idx).setSource(jsonBuilder().startObject().timeField("date", date).field("value", value).endObject()); + return prepareIndex(idx).setSource(jsonBuilder().startObject().timestampField("date", date).field("value", value).endObject()); } private IndexRequestBuilder indexDoc(int month, int day, int value) throws Exception { return prepareIndex("idx").setSource( jsonBuilder().startObject() .field("value", value) - .timeField("date", date(month, day)) + .timestampField("date", date(month, day)) .startArray("dates") - .timeValue(date(month, day)) - .timeValue(date(month + 1, day + 1)) + .timestampValue(date(month, day)) + .timestampValue(date(month + 1, day + 1)) .endArray() .endObject() ); diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/DatabaseConfigurationMetadata.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/DatabaseConfigurationMetadata.java index 82888fa39c857..fcfd8e51aabb5 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/DatabaseConfigurationMetadata.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/DatabaseConfigurationMetadata.java @@ -66,7 +66,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws // (we'll be a in a json map where the id is the key) builder.startObject(); builder.field(VERSION.getPreferredName(), version); - builder.timeField(MODIFIED_DATE_MILLIS.getPreferredName(), MODIFIED_DATE.getPreferredName(), modifiedDate); + builder.timestampFieldsFromUnixEpochMillis(MODIFIED_DATE_MILLIS.getPreferredName(), MODIFIED_DATE.getPreferredName(), modifiedDate); builder.field(DATABASE.getPreferredName(), database); builder.endObject(); return builder; diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/GetDatabaseConfigurationAction.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/GetDatabaseConfigurationAction.java index 89bc3d1ce5d37..1970883e91b3e 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/GetDatabaseConfigurationAction.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/GetDatabaseConfigurationAction.java @@ -110,7 +110,11 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.startObject(); builder.field("id", database.id()); // serialize including the id -- this is get response serialization builder.field(VERSION.getPreferredName(), item.version()); - builder.timeField(MODIFIED_DATE_MILLIS.getPreferredName(), MODIFIED_DATE.getPreferredName(), item.modifiedDate()); + builder.timestampFieldsFromUnixEpochMillis( + MODIFIED_DATE_MILLIS.getPreferredName(), + MODIFIED_DATE.getPreferredName(), + item.modifiedDate() + ); builder.field(DATABASE.getPreferredName(), database); builder.endObject(); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java index 1787b4f784574..a8e2ca818d3f4 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java @@ -88,11 +88,11 @@ private static String format(ZonedDateTime date, String pattern) { private IndexRequestBuilder indexDoc(String idx, ZonedDateTime date, int value) throws Exception { return prepareIndex(idx).setSource( jsonBuilder().startObject() - .timeField("date", date) + .timestampField("date", date) .field("value", value) .startArray("dates") - .timeValue(date) - .timeValue(date.plusMonths(1).plusDays(1)) + .timestampValue(date) + .timestampValue(date.plusMonths(1).plusDays(1)) .endArray() .endObject() ); @@ -103,10 +103,10 @@ private IndexRequestBuilder indexDoc(int month, int day, int value) throws Excep jsonBuilder().startObject() .field("value", value) .field("constant", 1) - .timeField("date", date(month, day)) + .timestampField("date", date(month, day)) .startArray("dates") - .timeValue(date(month, day)) - .timeValue(date(month + 1, day + 1)) + .timestampValue(date(month, day)) + .timestampValue(date(month + 1, day + 1)) .endArray() .endObject() ); @@ -162,53 +162,53 @@ private void getMultiSortDocs(List builders) throws IOExcep for (int i = 1; i <= 3; i++) { builders.add( prepareIndex("sort_idx").setSource( - jsonBuilder().startObject().timeField("date", date(1, 1)).field("l", 1).field("d", i).endObject() + jsonBuilder().startObject().timestampField("date", date(1, 1)).field("l", 1).field("d", i).endObject() ) ); builders.add( prepareIndex("sort_idx").setSource( - jsonBuilder().startObject().timeField("date", date(1, 2)).field("l", 2).field("d", i).endObject() + jsonBuilder().startObject().timestampField("date", date(1, 2)).field("l", 2).field("d", i).endObject() ) ); } builders.add( prepareIndex("sort_idx").setSource( - jsonBuilder().startObject().timeField("date", date(1, 3)).field("l", 3).field("d", 1).endObject() + jsonBuilder().startObject().timestampField("date", date(1, 3)).field("l", 3).field("d", 1).endObject() ) ); builders.add( prepareIndex("sort_idx").setSource( - jsonBuilder().startObject().timeField("date", date(1, 3).plusHours(1)).field("l", 3).field("d", 2).endObject() + jsonBuilder().startObject().timestampField("date", date(1, 3).plusHours(1)).field("l", 3).field("d", 2).endObject() ) ); builders.add( prepareIndex("sort_idx").setSource( - jsonBuilder().startObject().timeField("date", date(1, 4)).field("l", 3).field("d", 1).endObject() + jsonBuilder().startObject().timestampField("date", date(1, 4)).field("l", 3).field("d", 1).endObject() ) ); builders.add( prepareIndex("sort_idx").setSource( - jsonBuilder().startObject().timeField("date", date(1, 4).plusHours(2)).field("l", 3).field("d", 3).endObject() + jsonBuilder().startObject().timestampField("date", date(1, 4).plusHours(2)).field("l", 3).field("d", 3).endObject() ) ); builders.add( prepareIndex("sort_idx").setSource( - jsonBuilder().startObject().timeField("date", date(1, 5)).field("l", 5).field("d", 1).endObject() + jsonBuilder().startObject().timestampField("date", date(1, 5)).field("l", 5).field("d", 1).endObject() ) ); builders.add( prepareIndex("sort_idx").setSource( - jsonBuilder().startObject().timeField("date", date(1, 5).plusHours(12)).field("l", 5).field("d", 2).endObject() + jsonBuilder().startObject().timestampField("date", date(1, 5).plusHours(12)).field("l", 5).field("d", 2).endObject() ) ); builders.add( prepareIndex("sort_idx").setSource( - jsonBuilder().startObject().timeField("date", date(1, 6)).field("l", 5).field("d", 1).endObject() + jsonBuilder().startObject().timestampField("date", date(1, 6)).field("l", 5).field("d", 1).endObject() ) ); builders.add( prepareIndex("sort_idx").setSource( - jsonBuilder().startObject().timeField("date", date(1, 7)).field("l", 5).field("d", 1).endObject() + jsonBuilder().startObject().timestampField("date", date(1, 7)).field("l", 5).field("d", 1).endObject() ) ); } @@ -997,7 +997,7 @@ public void testSingleValueWithTimeZone() throws Exception { IndexRequestBuilder[] reqs = new IndexRequestBuilder[5]; ZonedDateTime date = date("2014-03-11T00:00:00+00:00"); for (int i = 0; i < reqs.length; i++) { - reqs[i] = prepareIndex("idx2").setId("" + i).setSource(jsonBuilder().startObject().timeField("date", date).endObject()); + reqs[i] = prepareIndex("idx2").setId("" + i).setSource(jsonBuilder().startObject().timestampField("date", date).endObject()); date = date.plusHours(1); } indexRandom(true, reqs); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java index 0afc479474814..778be4ee0705f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java @@ -63,7 +63,7 @@ private void prepareIndex(ZonedDateTime date, int numHours, int stepSizeHours, i IndexRequestBuilder[] reqs = new IndexRequestBuilder[numHours]; for (int i = idxIdStart; i < idxIdStart + reqs.length; i++) { reqs[i - idxIdStart] = prepareIndex("idx2").setId("" + i) - .setSource(jsonBuilder().startObject().timeField("date", date).endObject()); + .setSource(jsonBuilder().startObject().timestampField("date", date).endObject()); date = date.plusHours(stepSizeHours); } indexRandom(true, reqs); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java index 6e9a9305eaf4e..afa3ad9d7e737 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java @@ -58,10 +58,10 @@ private static IndexRequestBuilder indexDoc(int month, int day, int value) throw return prepareIndex("idx").setSource( jsonBuilder().startObject() .field("value", value) - .timeField("date", date(month, day)) + .timestampField("date", date(month, day)) .startArray("dates") - .timeValue(date(month, day)) - .timeValue(date(month + 1, day + 1)) + .timestampValue(date(month, day)) + .timestampValue(date(month + 1, day + 1)) .endArray() .endObject() ); @@ -620,8 +620,8 @@ public void testScriptCaching() throws Exception { ); indexRandom( true, - prepareIndex("cache_test_idx").setId("1").setSource(jsonBuilder().startObject().timeField("date", date(1, 1)).endObject()), - prepareIndex("cache_test_idx").setId("2").setSource(jsonBuilder().startObject().timeField("date", date(2, 1)).endObject()) + prepareIndex("cache_test_idx").setId("1").setSource(jsonBuilder().startObject().timestampField("date", date(1, 1)).endObject()), + prepareIndex("cache_test_idx").setId("2").setSource(jsonBuilder().startObject().timestampField("date", date(2, 1)).endObject()) ); // Make sure we are starting with a clear cache diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/list/ListDanglingIndicesResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/list/ListDanglingIndicesResponse.java index 6fe8432c31ccc..d942c4347960a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/list/ListDanglingIndicesResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/list/ListDanglingIndicesResponse.java @@ -79,7 +79,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field("index_name", info.indexName); builder.field("index_uuid", info.indexUUID); - builder.timeField("creation_date_millis", "creation_date", info.creationDateMillis); + builder.timestampFieldsFromUnixEpochMillis("creation_date_millis", "creation_date", info.creationDateMillis); builder.array("node_ids", info.nodeIds.toArray(new String[0])); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/FieldUsageShardResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/FieldUsageShardResponse.java index 47abda4fabcde..347376a918d4c 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/FieldUsageShardResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/FieldUsageShardResponse.java @@ -69,7 +69,7 @@ public FieldUsageStats getStats() { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(Fields.TRACKING_ID, trackingId); - builder.timeField(Fields.TRACKING_STARTED_AT_MILLIS, Fields.TRACKING_STARTED_AT, trackingStartTime); + builder.timestampFieldsFromUnixEpochMillis(Fields.TRACKING_STARTED_AT_MILLIS, Fields.TRACKING_STARTED_AT, trackingStartTime); builder.startObject(Fields.ROUTING) .field(Fields.STATE, shardRouting.state()) .field(Fields.PRIMARY, shardRouting.primary()) diff --git a/server/src/main/java/org/elasticsearch/action/bulk/FailureStoreDocumentConverter.java b/server/src/main/java/org/elasticsearch/action/bulk/FailureStoreDocumentConverter.java index f433e937dbe5d..a5a38a288d342 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/FailureStoreDocumentConverter.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/FailureStoreDocumentConverter.java @@ -18,12 +18,14 @@ import org.elasticsearch.xcontent.json.JsonXContent; import java.io.IOException; +import java.time.Instant; import java.util.Collections; import java.util.List; import java.util.Objects; import java.util.Set; import java.util.function.Supplier; +import static org.elasticsearch.common.xcontent.XContentElasticsearchExtension.DEFAULT_FORMATTER; import static org.elasticsearch.ingest.CompoundProcessor.PIPELINE_ORIGIN_EXCEPTION_HEADER; import static org.elasticsearch.ingest.CompoundProcessor.PROCESSOR_TAG_EXCEPTION_HEADER; import static org.elasticsearch.ingest.CompoundProcessor.PROCESSOR_TYPE_EXCEPTION_HEADER; @@ -84,7 +86,7 @@ private static XContentBuilder createSource(IndexRequest source, Exception excep XContentBuilder builder = JsonXContent.contentBuilder(); builder.startObject(); { - builder.timeField("@timestamp", timeSupplier.get()); + builder.field("@timestamp", DEFAULT_FORMATTER.format(Instant.ofEpochMilli(timeSupplier.get()))); builder.startObject("document"); { if (source.id() != null) { diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/ExplainIndexDataStreamLifecycle.java b/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/ExplainIndexDataStreamLifecycle.java index 2352628264394..94c294435acd3 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/ExplainIndexDataStreamLifecycle.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/ExplainIndexDataStreamLifecycle.java @@ -123,7 +123,7 @@ public XContentBuilder toXContent( builder.field(MANAGED_BY_LIFECYCLE_FIELD.getPreferredName(), managedByLifecycle); if (managedByLifecycle) { if (indexCreationDate != null) { - builder.timeField( + builder.timestampFieldsFromUnixEpochMillis( INDEX_CREATION_DATE_MILLIS_FIELD.getPreferredName(), INDEX_CREATION_DATE_FIELD.getPreferredName(), indexCreationDate @@ -134,7 +134,11 @@ public XContentBuilder toXContent( ); } if (rolloverDate != null) { - builder.timeField(ROLLOVER_DATE_MILLIS_FIELD.getPreferredName(), ROLLOVER_DATE_FIELD.getPreferredName(), rolloverDate); + builder.timestampFieldsFromUnixEpochMillis( + ROLLOVER_DATE_MILLIS_FIELD.getPreferredName(), + ROLLOVER_DATE_FIELD.getPreferredName(), + rolloverDate + ); builder.field(TIME_SINCE_ROLLOVER_FIELD.getPreferredName(), getTimeSinceRollover(nowSupplier).toHumanReadableString(2)); } if (generationDateMillis != null) { diff --git a/server/src/main/java/org/elasticsearch/cluster/ClusterSnapshotStats.java b/server/src/main/java/org/elasticsearch/cluster/ClusterSnapshotStats.java index cb98cd4b2f535..ac96a2d55bc71 100644 --- a/server/src/main/java/org/elasticsearch/cluster/ClusterSnapshotStats.java +++ b/server/src/main/java/org/elasticsearch/cluster/ClusterSnapshotStats.java @@ -228,7 +228,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.endObject(); builder.endObject(); - builder.timeField("oldest_start_time_millis", "oldest_start_time", firstStartTimeMillis); + builder.timestampFieldsFromUnixEpochMillis("oldest_start_time_millis", "oldest_start_time", firstStartTimeMillis); return builder.endObject(); } diff --git a/server/src/main/java/org/elasticsearch/cluster/SnapshotDeletionsInProgress.java b/server/src/main/java/org/elasticsearch/cluster/SnapshotDeletionsInProgress.java index c371ff4d37a05..fe144135d42bd 100644 --- a/server/src/main/java/org/elasticsearch/cluster/SnapshotDeletionsInProgress.java +++ b/server/src/main/java/org/elasticsearch/cluster/SnapshotDeletionsInProgress.java @@ -180,7 +180,7 @@ public Iterator toXContentChunked(ToXContent.Params ignore builder.value(snapshot.getName()); } builder.endArray(); - builder.timeField("start_time_millis", "start_time", entry.startTime); + builder.timestampFieldsFromUnixEpochMillis("start_time_millis", "start_time", entry.startTime); builder.field("repository_state_id", entry.repositoryStateId); builder.field("state", entry.state); } diff --git a/server/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java b/server/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java index c32175fc9367d..d82a31720d6d4 100644 --- a/server/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java +++ b/server/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java @@ -1404,7 +1404,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } } builder.endArray(); - builder.timeField("start_time_millis", "start_time", startTime); + builder.timestampFieldsFromUnixEpochMillis("start_time_millis", "start_time", startTime); builder.field("repository_state_id", repositoryStateId); builder.startArray("shards"); { diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexGraveyard.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexGraveyard.java index 783145d3618f1..320be8acb0af9 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexGraveyard.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexGraveyard.java @@ -434,7 +434,7 @@ public XContentBuilder toXContent(final XContentBuilder builder, final Params pa builder.startObject(); builder.field(INDEX_KEY); index.toXContent(builder, params); - builder.timeField(DELETE_DATE_IN_MILLIS_KEY, DELETE_DATE_KEY, deleteDateInMillis); + builder.timestampFieldsFromUnixEpochMillis(DELETE_DATE_IN_MILLIS_KEY, DELETE_DATE_KEY, deleteDateInMillis); return builder.endObject(); } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/SingleNodeShutdownMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/SingleNodeShutdownMetadata.java index 4257543498c54..aa8b092ffcca0 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/SingleNodeShutdownMetadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/SingleNodeShutdownMetadata.java @@ -266,7 +266,11 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(NODE_ID_FIELD.getPreferredName(), nodeId); builder.field(TYPE_FIELD.getPreferredName(), type); builder.field(REASON_FIELD.getPreferredName(), reason); - builder.timeField(STARTED_AT_MILLIS_FIELD.getPreferredName(), STARTED_AT_READABLE_FIELD, startedAtMillis); + builder.timestampFieldsFromUnixEpochMillis( + STARTED_AT_MILLIS_FIELD.getPreferredName(), + STARTED_AT_READABLE_FIELD, + startedAtMillis + ); builder.field(NODE_SEEN_FIELD.getPreferredName(), nodeSeen); if (allocationDelay != null) { builder.field(ALLOCATION_DELAY_FIELD.getPreferredName(), allocationDelay.getStringRep()); diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/XContentElasticsearchExtension.java b/server/src/main/java/org/elasticsearch/common/xcontent/XContentElasticsearchExtension.java index dea851b1b553a..0298e1a123b58 100644 --- a/server/src/main/java/org/elasticsearch/common/xcontent/XContentElasticsearchExtension.java +++ b/server/src/main/java/org/elasticsearch/common/xcontent/XContentElasticsearchExtension.java @@ -57,13 +57,13 @@ public Map, XContentBuilder.Writer> getXContentWriters() { // Fully-qualified here to reduce ambiguity around our (ES') Version class writers.put(org.apache.lucene.util.Version.class, (b, v) -> b.value(Objects.toString(v))); writers.put(TimeValue.class, (b, v) -> b.value(v.toString())); - writers.put(ZonedDateTime.class, XContentBuilder::timeValue); - writers.put(OffsetDateTime.class, XContentBuilder::timeValue); - writers.put(OffsetTime.class, XContentBuilder::timeValue); - writers.put(java.time.Instant.class, XContentBuilder::timeValue); - writers.put(LocalDateTime.class, XContentBuilder::timeValue); - writers.put(LocalDate.class, XContentBuilder::timeValue); - writers.put(LocalTime.class, XContentBuilder::timeValue); + writers.put(ZonedDateTime.class, XContentBuilder::timestampValue); + writers.put(OffsetDateTime.class, XContentBuilder::timestampValue); + writers.put(OffsetTime.class, XContentBuilder::timestampValue); + writers.put(java.time.Instant.class, XContentBuilder::timestampValue); + writers.put(LocalDateTime.class, XContentBuilder::timestampValue); + writers.put(LocalDate.class, XContentBuilder::timestampValue); + writers.put(LocalTime.class, XContentBuilder::timestampValue); writers.put(DayOfWeek.class, (b, v) -> b.value(v.toString())); writers.put(Month.class, (b, v) -> b.value(v.toString())); writers.put(MonthDay.class, (b, v) -> b.value(v.toString())); @@ -103,10 +103,8 @@ public Map, XContentBuilder.HumanReadableTransformer> getXContentHumanR public Map, Function> getDateTransformers() { Map, Function> transformers = new HashMap<>(); transformers.put(Date.class, d -> DEFAULT_FORMATTER.format(((Date) d).toInstant())); - transformers.put(Long.class, d -> DEFAULT_FORMATTER.format(Instant.ofEpochMilli((long) d))); transformers.put(Calendar.class, d -> DEFAULT_FORMATTER.format(((Calendar) d).toInstant())); transformers.put(GregorianCalendar.class, d -> DEFAULT_FORMATTER.format(((Calendar) d).toInstant())); - transformers.put(Instant.class, d -> DEFAULT_FORMATTER.format((Instant) d)); transformers.put(ZonedDateTime.class, d -> DEFAULT_FORMATTER.format((ZonedDateTime) d)); transformers.put(OffsetDateTime.class, d -> DEFAULT_FORMATTER.format((OffsetDateTime) d)); transformers.put(OffsetTime.class, d -> OFFSET_TIME_FORMATTER.format((OffsetTime) d)); @@ -119,4 +117,9 @@ public Map, Function> getDateTransformers() { transformers.put(LocalTime.class, d -> LOCAL_TIME_FORMATTER.format((LocalTime) d)); return transformers; } + + @Override + public String formatUnixEpochMillis(long unixEpochMillis) { + return DEFAULT_FORMATTER.format(Instant.ofEpochMilli(unixEpochMillis)); + } } diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryState.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryState.java index 6be94ab21a4f7..b0d33a75ba883 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryState.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryState.java @@ -293,9 +293,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(Fields.TYPE, recoverySource.getType()); builder.field(Fields.STAGE, stage.toString()); builder.field(Fields.PRIMARY, primary); - builder.timeField(Fields.START_TIME_IN_MILLIS, Fields.START_TIME, timer.startTime); + builder.timestampFieldsFromUnixEpochMillis(Fields.START_TIME_IN_MILLIS, Fields.START_TIME, timer.startTime); if (timer.stopTime > 0) { - builder.timeField(Fields.STOP_TIME_IN_MILLIS, Fields.STOP_TIME, timer.stopTime); + builder.timestampFieldsFromUnixEpochMillis(Fields.STOP_TIME_IN_MILLIS, Fields.STOP_TIME, timer.stopTime); } builder.humanReadableField(Fields.TOTAL_TIME_IN_MILLIS, Fields.TOTAL_TIME, new TimeValue(timer.time())); diff --git a/server/src/main/java/org/elasticsearch/monitor/jvm/JvmInfo.java b/server/src/main/java/org/elasticsearch/monitor/jvm/JvmInfo.java index 1c68615203d3a..a3639214a1b9d 100644 --- a/server/src/main/java/org/elasticsearch/monitor/jvm/JvmInfo.java +++ b/server/src/main/java/org/elasticsearch/monitor/jvm/JvmInfo.java @@ -420,7 +420,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(Fields.VM_VERSION, vmVersion); builder.field(Fields.VM_VENDOR, vmVendor); builder.field(Fields.USING_BUNDLED_JDK, usingBundledJdk); - builder.timeField(Fields.START_TIME_IN_MILLIS, Fields.START_TIME, startTime); + builder.timestampFieldsFromUnixEpochMillis(Fields.START_TIME_IN_MILLIS, Fields.START_TIME, startTime); builder.startObject(Fields.MEM); builder.humanReadableField(Fields.HEAP_INIT_IN_BYTES, Fields.HEAP_INIT, ByteSizeValue.ofBytes(mem.heapInit)); diff --git a/server/src/main/java/org/elasticsearch/tasks/TaskInfo.java b/server/src/main/java/org/elasticsearch/tasks/TaskInfo.java index 6707d77d6a2d0..d49ac1e29bea6 100644 --- a/server/src/main/java/org/elasticsearch/tasks/TaskInfo.java +++ b/server/src/main/java/org/elasticsearch/tasks/TaskInfo.java @@ -115,7 +115,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (description != null) { builder.field("description", description); } - builder.timeField("start_time_in_millis", "start_time", startTime); + builder.timestampFieldsFromUnixEpochMillis("start_time_in_millis", "start_time", startTime); if (builder.humanReadable()) { builder.field("running_time", new TimeValue(runningTimeNanos, TimeUnit.NANOSECONDS).toString()); } diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java b/server/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java index 2793e03fc3fa8..b3af430cc43e2 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java @@ -390,28 +390,31 @@ public void testText() throws Exception { } public void testDate() throws Exception { - assertResult("{'date':null}", () -> builder().startObject().timeField("date", (Date) null).endObject()); - assertResult("{'date':null}", () -> builder().startObject().field("date").timeValue((Date) null).endObject()); + assertResult("{'date':null}", () -> builder().startObject().timestampField("date", (Date) null).endObject()); + assertResult("{'date':null}", () -> builder().startObject().field("date").timestampValue((Date) null).endObject()); final Date d1 = Date.from(ZonedDateTime.of(2016, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC).toInstant()); - assertResult("{'d1':'2016-01-01T00:00:00.000Z'}", () -> builder().startObject().timeField("d1", d1).endObject()); - assertResult("{'d1':'2016-01-01T00:00:00.000Z'}", () -> builder().startObject().field("d1").timeValue(d1).endObject()); + assertResult("{'d1':'2016-01-01T00:00:00.000Z'}", () -> builder().startObject().timestampField("d1", d1).endObject()); + assertResult("{'d1':'2016-01-01T00:00:00.000Z'}", () -> builder().startObject().field("d1").timestampValue(d1).endObject()); final Date d2 = Date.from(ZonedDateTime.of(2016, 12, 25, 7, 59, 42, 213000000, ZoneOffset.UTC).toInstant()); - assertResult("{'d2':'2016-12-25T07:59:42.213Z'}", () -> builder().startObject().timeField("d2", d2).endObject()); - assertResult("{'d2':'2016-12-25T07:59:42.213Z'}", () -> builder().startObject().field("d2").timeValue(d2).endObject()); + assertResult("{'d2':'2016-12-25T07:59:42.213Z'}", () -> builder().startObject().timestampField("d2", d2).endObject()); + assertResult("{'d2':'2016-12-25T07:59:42.213Z'}", () -> builder().startObject().field("d2").timestampValue(d2).endObject()); } - public void testDateField() throws Exception { + public void testUnixEpochMillisField() throws Exception { final Date d = Date.from(ZonedDateTime.of(2016, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC).toInstant()); assertResult( "{'date_in_millis':1451606400000}", - () -> builder().startObject().timeField("date_in_millis", "date", d.getTime()).endObject() + () -> builder().startObject().timestampFieldsFromUnixEpochMillis("date_in_millis", "date", d.getTime()).endObject() ); assertResult( "{'date':'2016-01-01T00:00:00.000Z','date_in_millis':1451606400000}", - () -> builder().humanReadable(true).startObject().timeField("date_in_millis", "date", d.getTime()).endObject() + () -> builder().humanReadable(true) + .startObject() + .timestampFieldsFromUnixEpochMillis("date_in_millis", "date", d.getTime()) + .endObject() ); } @@ -419,7 +422,7 @@ public void testCalendar() throws Exception { Calendar calendar = GregorianCalendar.from(ZonedDateTime.of(2016, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC)); assertResult( "{'calendar':'2016-01-01T00:00:00.000Z'}", - () -> builder().startObject().field("calendar").timeValue(calendar).endObject() + () -> builder().startObject().field("calendar").timestampValue(calendar).endObject() ); } @@ -427,83 +430,95 @@ public void testJavaTime() throws Exception { final ZonedDateTime d1 = ZonedDateTime.of(2016, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); // ZonedDateTime - assertResult("{'date':null}", () -> builder().startObject().timeField("date", (ZonedDateTime) null).endObject()); - assertResult("{'date':null}", () -> builder().startObject().field("date").timeValue((ZonedDateTime) null).endObject()); + assertResult("{'date':null}", () -> builder().startObject().timestampField("date", (ZonedDateTime) null).endObject()); + assertResult("{'date':null}", () -> builder().startObject().field("date").timestampValue((ZonedDateTime) null).endObject()); assertResult("{'date':null}", () -> builder().startObject().field("date", (ZonedDateTime) null).endObject()); - assertResult("{'d1':'2016-01-01T00:00:00.000Z'}", () -> builder().startObject().timeField("d1", d1).endObject()); - assertResult("{'d1':'2016-01-01T00:00:00.000Z'}", () -> builder().startObject().field("d1").timeValue(d1).endObject()); + assertResult("{'d1':'2016-01-01T00:00:00.000Z'}", () -> builder().startObject().timestampField("d1", d1).endObject()); + assertResult("{'d1':'2016-01-01T00:00:00.000Z'}", () -> builder().startObject().field("d1").timestampValue(d1).endObject()); assertResult("{'d1':'2016-01-01T00:00:00.000Z'}", () -> builder().startObject().field("d1", d1).endObject()); // Instant - assertResult("{'date':null}", () -> builder().startObject().timeField("date", (java.time.Instant) null).endObject()); - assertResult("{'date':null}", () -> builder().startObject().field("date").timeValue((java.time.Instant) null).endObject()); + assertResult("{'date':null}", () -> builder().startObject().timestampField("date", (java.time.Instant) null).endObject()); + assertResult("{'date':null}", () -> builder().startObject().field("date").timestampValue((java.time.Instant) null).endObject()); assertResult("{'date':null}", () -> builder().startObject().field("date", (java.time.Instant) null).endObject()); - assertResult("{'d1':'2016-01-01T00:00:00.000Z'}", () -> builder().startObject().timeField("d1", d1.toInstant()).endObject()); - assertResult("{'d1':'2016-01-01T00:00:00.000Z'}", () -> builder().startObject().field("d1").timeValue(d1.toInstant()).endObject()); + assertResult("{'d1':'2016-01-01T00:00:00.000Z'}", () -> builder().startObject().timestampField("d1", d1.toInstant()).endObject()); + assertResult( + "{'d1':'2016-01-01T00:00:00.000Z'}", + () -> builder().startObject().field("d1").timestampValue(d1.toInstant()).endObject() + ); assertResult("{'d1':'2016-01-01T00:00:00.000Z'}", () -> builder().startObject().field("d1", d1.toInstant()).endObject()); // LocalDateTime (no time zone) - assertResult("{'date':null}", () -> builder().startObject().timeField("date", (LocalDateTime) null).endObject()); - assertResult("{'date':null}", () -> builder().startObject().field("date").timeValue((LocalDateTime) null).endObject()); + assertResult("{'date':null}", () -> builder().startObject().timestampField("date", (LocalDateTime) null).endObject()); + assertResult("{'date':null}", () -> builder().startObject().field("date").timestampValue((LocalDateTime) null).endObject()); assertResult("{'date':null}", () -> builder().startObject().field("date", (LocalDateTime) null).endObject()); - assertResult("{'d1':'2016-01-01T00:00:00.000Z'}", () -> builder().startObject().timeField("d1", d1.toLocalDateTime()).endObject()); assertResult( "{'d1':'2016-01-01T00:00:00.000Z'}", - () -> builder().startObject().field("d1").timeValue(d1.toLocalDateTime()).endObject() + () -> builder().startObject().timestampField("d1", d1.toLocalDateTime()).endObject() + ); + assertResult( + "{'d1':'2016-01-01T00:00:00.000Z'}", + () -> builder().startObject().field("d1").timestampValue(d1.toLocalDateTime()).endObject() ); assertResult("{'d1':'2016-01-01T00:00:00.000Z'}", () -> builder().startObject().field("d1", d1.toLocalDateTime()).endObject()); // LocalDate (no time, no time zone) - assertResult("{'date':null}", () -> builder().startObject().timeField("date", (LocalDate) null).endObject()); - assertResult("{'date':null}", () -> builder().startObject().field("date").timeValue((LocalDate) null).endObject()); + assertResult("{'date':null}", () -> builder().startObject().timestampField("date", (LocalDate) null).endObject()); + assertResult("{'date':null}", () -> builder().startObject().field("date").timestampValue((LocalDate) null).endObject()); assertResult("{'date':null}", () -> builder().startObject().field("date", (LocalDate) null).endObject()); - assertResult("{'d1':'2016-01-01'}", () -> builder().startObject().timeField("d1", d1.toLocalDate()).endObject()); - assertResult("{'d1':'2016-01-01'}", () -> builder().startObject().field("d1").timeValue(d1.toLocalDate()).endObject()); + assertResult("{'d1':'2016-01-01'}", () -> builder().startObject().timestampField("d1", d1.toLocalDate()).endObject()); + assertResult("{'d1':'2016-01-01'}", () -> builder().startObject().field("d1").timestampValue(d1.toLocalDate()).endObject()); assertResult("{'d1':'2016-01-01'}", () -> builder().startObject().field("d1", d1.toLocalDate()).endObject()); // LocalTime (no date, no time zone) - assertResult("{'date':null}", () -> builder().startObject().timeField("date", (LocalTime) null).endObject()); - assertResult("{'date':null}", () -> builder().startObject().field("date").timeValue((LocalTime) null).endObject()); + assertResult("{'date':null}", () -> builder().startObject().timestampField("date", (LocalTime) null).endObject()); + assertResult("{'date':null}", () -> builder().startObject().field("date").timestampValue((LocalTime) null).endObject()); assertResult("{'date':null}", () -> builder().startObject().field("date", (LocalTime) null).endObject()); - assertResult("{'d1':'00:00:00.000'}", () -> builder().startObject().timeField("d1", d1.toLocalTime()).endObject()); - assertResult("{'d1':'00:00:00.000'}", () -> builder().startObject().field("d1").timeValue(d1.toLocalTime()).endObject()); + assertResult("{'d1':'00:00:00.000'}", () -> builder().startObject().timestampField("d1", d1.toLocalTime()).endObject()); + assertResult("{'d1':'00:00:00.000'}", () -> builder().startObject().field("d1").timestampValue(d1.toLocalTime()).endObject()); assertResult("{'d1':'00:00:00.000'}", () -> builder().startObject().field("d1", d1.toLocalTime()).endObject()); final ZonedDateTime d2 = ZonedDateTime.of(2016, 1, 1, 7, 59, 23, 123_000_000, ZoneOffset.UTC); - assertResult("{'d1':'07:59:23.123'}", () -> builder().startObject().timeField("d1", d2.toLocalTime()).endObject()); - assertResult("{'d1':'07:59:23.123'}", () -> builder().startObject().field("d1").timeValue(d2.toLocalTime()).endObject()); + assertResult("{'d1':'07:59:23.123'}", () -> builder().startObject().timestampField("d1", d2.toLocalTime()).endObject()); + assertResult("{'d1':'07:59:23.123'}", () -> builder().startObject().field("d1").timestampValue(d2.toLocalTime()).endObject()); assertResult("{'d1':'07:59:23.123'}", () -> builder().startObject().field("d1", d2.toLocalTime()).endObject()); // OffsetDateTime - assertResult("{'date':null}", () -> builder().startObject().timeField("date", (OffsetDateTime) null).endObject()); - assertResult("{'date':null}", () -> builder().startObject().field("date").timeValue((OffsetDateTime) null).endObject()); + assertResult("{'date':null}", () -> builder().startObject().timestampField("date", (OffsetDateTime) null).endObject()); + assertResult("{'date':null}", () -> builder().startObject().field("date").timestampValue((OffsetDateTime) null).endObject()); assertResult("{'date':null}", () -> builder().startObject().field("date", (OffsetDateTime) null).endObject()); assertResult("{'d1':'2016-01-01T00:00:00.000Z'}", () -> builder().startObject().field("d1", d1.toOffsetDateTime()).endObject()); - assertResult("{'d1':'2016-01-01T00:00:00.000Z'}", () -> builder().startObject().timeField("d1", d1.toOffsetDateTime()).endObject()); assertResult( "{'d1':'2016-01-01T00:00:00.000Z'}", - () -> builder().startObject().field("d1").timeValue(d1.toOffsetDateTime()).endObject() + () -> builder().startObject().timestampField("d1", d1.toOffsetDateTime()).endObject() + ); + assertResult( + "{'d1':'2016-01-01T00:00:00.000Z'}", + () -> builder().startObject().field("d1").timestampValue(d1.toOffsetDateTime()).endObject() ); // also test with a date that has a real offset OffsetDateTime offsetDateTime = d1.withZoneSameLocal(ZoneOffset.ofHours(5)).toOffsetDateTime(); assertResult("{'d1':'2016-01-01T00:00:00.000+05:00'}", () -> builder().startObject().field("d1", offsetDateTime).endObject()); - assertResult("{'d1':'2016-01-01T00:00:00.000+05:00'}", () -> builder().startObject().timeField("d1", offsetDateTime).endObject()); assertResult( "{'d1':'2016-01-01T00:00:00.000+05:00'}", - () -> builder().startObject().field("d1").timeValue(offsetDateTime).endObject() + () -> builder().startObject().timestampField("d1", offsetDateTime).endObject() + ); + assertResult( + "{'d1':'2016-01-01T00:00:00.000+05:00'}", + () -> builder().startObject().field("d1").timestampValue(offsetDateTime).endObject() ); // OffsetTime - assertResult("{'date':null}", () -> builder().startObject().timeField("date", (OffsetTime) null).endObject()); - assertResult("{'date':null}", () -> builder().startObject().field("date").timeValue((OffsetTime) null).endObject()); + assertResult("{'date':null}", () -> builder().startObject().timestampField("date", (OffsetTime) null).endObject()); + assertResult("{'date':null}", () -> builder().startObject().field("date").timestampValue((OffsetTime) null).endObject()); assertResult("{'date':null}", () -> builder().startObject().field("date", (OffsetTime) null).endObject()); final OffsetTime offsetTime = d2.toOffsetDateTime().toOffsetTime(); - assertResult("{'o':'07:59:23.123Z'}", () -> builder().startObject().timeField("o", offsetTime).endObject()); - assertResult("{'o':'07:59:23.123Z'}", () -> builder().startObject().field("o").timeValue(offsetTime).endObject()); + assertResult("{'o':'07:59:23.123Z'}", () -> builder().startObject().timestampField("o", offsetTime).endObject()); + assertResult("{'o':'07:59:23.123Z'}", () -> builder().startObject().field("o").timestampValue(offsetTime).endObject()); assertResult("{'o':'07:59:23.123Z'}", () -> builder().startObject().field("o", offsetTime).endObject()); // also test with a date that has a real offset final OffsetTime zonedOffsetTime = offsetTime.withOffsetSameLocal(ZoneOffset.ofHours(5)); - assertResult("{'o':'07:59:23.123+05:00'}", () -> builder().startObject().timeField("o", zonedOffsetTime).endObject()); - assertResult("{'o':'07:59:23.123+05:00'}", () -> builder().startObject().field("o").timeValue(zonedOffsetTime).endObject()); + assertResult("{'o':'07:59:23.123+05:00'}", () -> builder().startObject().timestampField("o", zonedOffsetTime).endObject()); + assertResult("{'o':'07:59:23.123+05:00'}", () -> builder().startObject().field("o").timestampValue(zonedOffsetTime).endObject()); assertResult("{'o':'07:59:23.123+05:00'}", () -> builder().startObject().field("o", zonedOffsetTime).endObject()); // DayOfWeek enum, not a real time value, but might be used in scripts diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/builder/XContentBuilderTests.java b/server/src/test/java/org/elasticsearch/common/xcontent/builder/XContentBuilderTests.java index a695fb6c45348..575382c7fb441 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/builder/XContentBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/builder/XContentBuilderTests.java @@ -189,7 +189,7 @@ public void testDateTypesConversion() throws Exception { Calendar calendar = new GregorianCalendar(TimeZone.getTimeZone("UTC"), Locale.ROOT); String expectedCalendar = XContentElasticsearchExtension.DEFAULT_FORMATTER.format(calendar.toInstant()); XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); - builder.startObject().timeField("date", date).endObject(); + builder.startObject().timestampField("date", date).endObject(); assertThat(Strings.toString(builder), equalTo("{\"date\":\"" + expectedDate + "\"}")); builder = XContentFactory.contentBuilder(XContentType.JSON); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/License.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/License.java index 2b01f4d7fa2a4..0d1a007db0d39 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/License.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/License.java @@ -524,13 +524,13 @@ public XContentBuilder toInnerXContent(XContentBuilder builder, Params params) t if (licenseVersion == VERSION_START) { builder.field(Fields.SUBSCRIPTION_TYPE, subscriptionType); } - builder.timeField(Fields.ISSUE_DATE_IN_MILLIS, Fields.ISSUE_DATE, issueDate); + builder.timestampFieldsFromUnixEpochMillis(Fields.ISSUE_DATE_IN_MILLIS, Fields.ISSUE_DATE, issueDate); if (licenseVersion == VERSION_START) { builder.field(Fields.FEATURE, feature); } if (expiryDate != LicenseSettings.BASIC_SELF_GENERATED_LICENSE_EXPIRATION_MILLIS) { - builder.timeField(Fields.EXPIRY_DATE_IN_MILLIS, Fields.EXPIRY_DATE, expiryDate); + builder.timestampFieldsFromUnixEpochMillis(Fields.EXPIRY_DATE_IN_MILLIS, Fields.EXPIRY_DATE, expiryDate); } if (licenseVersion >= VERSION_ENTERPRISE) { @@ -551,7 +551,7 @@ public XContentBuilder toInnerXContent(XContentBuilder builder, Params params) t builder.humanReadable(previouslyHumanReadable); } if (licenseVersion >= VERSION_START_DATE) { - builder.timeField(Fields.START_DATE_IN_MILLIS, Fields.START_DATE, startDate); + builder.timestampFieldsFromUnixEpochMillis(Fields.START_DATE_IN_MILLIS, Fields.START_DATE, startDate); } return builder; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoResponse.java index 5ba0e584d63bb..2f9b125352e9c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoResponse.java @@ -226,7 +226,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field("status", status.label()); if (expiryDate != BASIC_SELF_GENERATED_LICENSE_EXPIRATION_MILLIS) { - builder.timeField("expiry_date_in_millis", "expiry_date", expiryDate); + builder.timestampFieldsFromUnixEpochMillis("expiry_date_in_millis", "expiry_date", expiryDate); } return builder.endObject(); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleExplainResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleExplainResponse.java index 9c679cd04c94d..33402671a2236 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleExplainResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleExplainResponse.java @@ -489,7 +489,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (managedByILM) { builder.field(POLICY_NAME_FIELD.getPreferredName(), policyName); if (indexCreationDate != null) { - builder.timeField( + builder.timestampFieldsFromUnixEpochMillis( INDEX_CREATION_DATE_MILLIS_FIELD.getPreferredName(), INDEX_CREATION_DATE_FIELD.getPreferredName(), indexCreationDate @@ -500,26 +500,42 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws ); } if (lifecycleDate != null) { - builder.timeField(LIFECYCLE_DATE_MILLIS_FIELD.getPreferredName(), LIFECYCLE_DATE_FIELD.getPreferredName(), lifecycleDate); + builder.timestampFieldsFromUnixEpochMillis( + LIFECYCLE_DATE_MILLIS_FIELD.getPreferredName(), + LIFECYCLE_DATE_FIELD.getPreferredName(), + lifecycleDate + ); builder.field(AGE_FIELD.getPreferredName(), getAge(nowSupplier).toHumanReadableString(2)); } if (phase != null) { builder.field(PHASE_FIELD.getPreferredName(), phase); } if (phaseTime != null) { - builder.timeField(PHASE_TIME_MILLIS_FIELD.getPreferredName(), PHASE_TIME_FIELD.getPreferredName(), phaseTime); + builder.timestampFieldsFromUnixEpochMillis( + PHASE_TIME_MILLIS_FIELD.getPreferredName(), + PHASE_TIME_FIELD.getPreferredName(), + phaseTime + ); } if (action != null) { builder.field(ACTION_FIELD.getPreferredName(), action); } if (actionTime != null) { - builder.timeField(ACTION_TIME_MILLIS_FIELD.getPreferredName(), ACTION_TIME_FIELD.getPreferredName(), actionTime); + builder.timestampFieldsFromUnixEpochMillis( + ACTION_TIME_MILLIS_FIELD.getPreferredName(), + ACTION_TIME_FIELD.getPreferredName(), + actionTime + ); } if (step != null) { builder.field(STEP_FIELD.getPreferredName(), step); } if (stepTime != null) { - builder.timeField(STEP_TIME_MILLIS_FIELD.getPreferredName(), STEP_TIME_FIELD.getPreferredName(), stepTime); + builder.timestampFieldsFromUnixEpochMillis( + STEP_TIME_MILLIS_FIELD.getPreferredName(), + STEP_TIME_FIELD.getPreferredName(), + stepTime + ); } if (Strings.hasLength(failedStep)) { builder.field(FAILED_STEP_FIELD.getPreferredName(), failedStep); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/PhaseExecutionInfo.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/PhaseExecutionInfo.java index 78ff08d5ced5b..2aed198d2e5fe 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/PhaseExecutionInfo.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/PhaseExecutionInfo.java @@ -130,7 +130,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(PHASE_DEFINITION_FIELD.getPreferredName(), phase); } builder.field(VERSION_FIELD.getPreferredName(), version); - builder.timeField(MODIFIED_DATE_IN_MILLIS_FIELD.getPreferredName(), "modified_date", modifiedDate); + builder.timestampFieldsFromUnixEpochMillis(MODIFIED_DATE_IN_MILLIS_FIELD.getPreferredName(), "modified_date", modifiedDate); builder.endObject(); return builder; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FlushJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FlushJobAction.java index 082f6d7aff899..72f05091c1ccd 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FlushJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FlushJobAction.java @@ -255,7 +255,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.startObject(); builder.field("flushed", flushed); if (lastFinalizedBucketEnd != null) { - builder.timeField( + builder.timestampFieldsFromUnixEpochMillis( FlushAcknowledgement.LAST_FINALIZED_BUCKET_END.getPreferredName(), FlushAcknowledgement.LAST_FINALIZED_BUCKET_END.getPreferredName() + "_string", lastFinalizedBucketEnd.toEpochMilli() diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/annotations/Annotation.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/annotations/Annotation.java index d4da74df85ba9..2ea605753ccfc 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/annotations/Annotation.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/annotations/Annotation.java @@ -332,17 +332,33 @@ public String getByFieldValue() { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(ANNOTATION.getPreferredName(), annotation); - builder.timeField(CREATE_TIME.getPreferredName(), CREATE_TIME.getPreferredName() + "_string", createTime.getTime()); + builder.timestampFieldsFromUnixEpochMillis( + CREATE_TIME.getPreferredName(), + CREATE_TIME.getPreferredName() + "_string", + createTime.getTime() + ); builder.field(CREATE_USERNAME.getPreferredName(), createUsername); - builder.timeField(TIMESTAMP.getPreferredName(), TIMESTAMP.getPreferredName() + "_string", timestamp.getTime()); + builder.timestampFieldsFromUnixEpochMillis( + TIMESTAMP.getPreferredName(), + TIMESTAMP.getPreferredName() + "_string", + timestamp.getTime() + ); if (endTimestamp != null) { - builder.timeField(END_TIMESTAMP.getPreferredName(), END_TIMESTAMP.getPreferredName() + "_string", endTimestamp.getTime()); + builder.timestampFieldsFromUnixEpochMillis( + END_TIMESTAMP.getPreferredName(), + END_TIMESTAMP.getPreferredName() + "_string", + endTimestamp.getTime() + ); } if (jobId != null) { builder.field(Job.ID.getPreferredName(), jobId); } if (modifiedTime != null) { - builder.timeField(MODIFIED_TIME.getPreferredName(), MODIFIED_TIME.getPreferredName() + "_string", modifiedTime.getTime()); + builder.timestampFieldsFromUnixEpochMillis( + MODIFIED_TIME.getPreferredName(), + MODIFIED_TIME.getPreferredName() + "_string", + modifiedTime.getTime() + ); } if (modifiedUsername != null) { builder.field(MODIFIED_USERNAME.getPreferredName(), modifiedUsername); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/calendars/ScheduledEvent.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/calendars/ScheduledEvent.java index c6fa4e052c683..b007c1da451f5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/calendars/ScheduledEvent.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/calendars/ScheduledEvent.java @@ -217,8 +217,16 @@ public void writeTo(StreamOutput out) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(DESCRIPTION.getPreferredName(), description); - builder.timeField(START_TIME.getPreferredName(), START_TIME.getPreferredName() + "_string", startTime.toEpochMilli()); - builder.timeField(END_TIME.getPreferredName(), END_TIME.getPreferredName() + "_string", endTime.toEpochMilli()); + builder.timestampFieldsFromUnixEpochMillis( + START_TIME.getPreferredName(), + START_TIME.getPreferredName() + "_string", + startTime.toEpochMilli() + ); + builder.timestampFieldsFromUnixEpochMillis( + END_TIME.getPreferredName(), + END_TIME.getPreferredName() + "_string", + endTime.toEpochMilli() + ); builder.field(SKIP_RESULT.getPreferredName(), skipResult); builder.field(SKIP_MODEL_UPDATE.getPreferredName(), skipModelUpdate); if (forceTimeShift != null) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/SearchInterval.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/SearchInterval.java index 7a3334aad00f1..694d248efc7be 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/SearchInterval.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/SearchInterval.java @@ -30,8 +30,8 @@ public SearchInterval(StreamInput in) throws IOException { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.timeField(START_MS.getPreferredName(), START.getPreferredName(), startMs); - builder.timeField(END_MS.getPreferredName(), END.getPreferredName(), endMs); + builder.timestampFieldsFromUnixEpochMillis(START_MS.getPreferredName(), START.getPreferredName(), startMs); + builder.timestampFieldsFromUnixEpochMillis(END_MS.getPreferredName(), END.getPreferredName(), endMs); builder.endObject(); return builder; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/DataFrameAnalyticsConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/DataFrameAnalyticsConfig.java index 4c9028f64c2fd..779c6ef263ebe 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/DataFrameAnalyticsConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/DataFrameAnalyticsConfig.java @@ -258,7 +258,11 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(ID.getPreferredName(), id); if (params.paramAsBoolean(EXCLUDE_GENERATED, false) == false) { if (createTime != null) { - builder.timeField(CREATE_TIME.getPreferredName(), CREATE_TIME.getPreferredName() + "_string", createTime.toEpochMilli()); + builder.timestampFieldsFromUnixEpochMillis( + CREATE_TIME.getPreferredName(), + CREATE_TIME.getPreferredName() + "_string", + createTime.toEpochMilli() + ); } if (version != null) { builder.field(VERSION.getPreferredName(), version); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/DataFrameAnalyticsTaskState.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/DataFrameAnalyticsTaskState.java index e61517569445b..61c18c7c84161 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/DataFrameAnalyticsTaskState.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/DataFrameAnalyticsTaskState.java @@ -143,7 +143,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(REASON.getPreferredName(), reason); } if (lastStateChangeTime != null) { - builder.timeField( + builder.timestampFieldsFromUnixEpochMillis( LAST_STATE_CHANGE_TIME.getPreferredName(), LAST_STATE_CHANGE_TIME.getPreferredName() + "_string", lastStateChangeTime.toEpochMilli() diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/stats/classification/ClassificationStats.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/stats/classification/ClassificationStats.java index 8b7cff0e80441..0bc191defa6ec 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/stats/classification/ClassificationStats.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/stats/classification/ClassificationStats.java @@ -131,7 +131,11 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(Fields.TYPE.getPreferredName(), TYPE_VALUE); builder.field(Fields.JOB_ID.getPreferredName(), jobId); } - builder.timeField(Fields.TIMESTAMP.getPreferredName(), Fields.TIMESTAMP.getPreferredName() + "_string", timestamp.toEpochMilli()); + builder.timestampFieldsFromUnixEpochMillis( + Fields.TIMESTAMP.getPreferredName(), + Fields.TIMESTAMP.getPreferredName() + "_string", + timestamp.toEpochMilli() + ); builder.field(ITERATION.getPreferredName(), iteration); builder.field(HYPERPARAMETERS.getPreferredName(), hyperparameters); builder.field(TIMING_STATS.getPreferredName(), timingStats); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/stats/common/MemoryUsage.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/stats/common/MemoryUsage.java index 9e9ff3e759e49..c5941cefb1531 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/stats/common/MemoryUsage.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/stats/common/MemoryUsage.java @@ -126,7 +126,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(Fields.JOB_ID.getPreferredName(), jobId); } if (timestamp != null) { - builder.timeField( + builder.timestampFieldsFromUnixEpochMillis( Fields.TIMESTAMP.getPreferredName(), Fields.TIMESTAMP.getPreferredName() + "_string", timestamp.toEpochMilli() diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/stats/outlierdetection/OutlierDetectionStats.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/stats/outlierdetection/OutlierDetectionStats.java index 6ddc078bef4af..b78b495015ab1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/stats/outlierdetection/OutlierDetectionStats.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/stats/outlierdetection/OutlierDetectionStats.java @@ -101,7 +101,11 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(Fields.TYPE.getPreferredName(), TYPE_VALUE); builder.field(Fields.JOB_ID.getPreferredName(), jobId); } - builder.timeField(Fields.TIMESTAMP.getPreferredName(), Fields.TIMESTAMP.getPreferredName() + "_string", timestamp.toEpochMilli()); + builder.timestampFieldsFromUnixEpochMillis( + Fields.TIMESTAMP.getPreferredName(), + Fields.TIMESTAMP.getPreferredName() + "_string", + timestamp.toEpochMilli() + ); builder.field(PARAMETERS.getPreferredName(), parameters); builder.field(TIMING_STATS.getPreferredName(), timingStats); builder.endObject(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/stats/regression/RegressionStats.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/stats/regression/RegressionStats.java index 7fff20bcb68ee..c411f3e5353fa 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/stats/regression/RegressionStats.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/stats/regression/RegressionStats.java @@ -131,7 +131,11 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(Fields.TYPE.getPreferredName(), TYPE_VALUE); builder.field(Fields.JOB_ID.getPreferredName(), jobId); } - builder.timeField(Fields.TIMESTAMP.getPreferredName(), Fields.TIMESTAMP.getPreferredName() + "_string", timestamp.toEpochMilli()); + builder.timestampFieldsFromUnixEpochMillis( + Fields.TIMESTAMP.getPreferredName(), + Fields.TIMESTAMP.getPreferredName() + "_string", + timestamp.toEpochMilli() + ); builder.field(ITERATION.getPreferredName(), iteration); builder.field(HYPERPARAMETERS.getPreferredName(), hyperparameters); builder.field(TIMING_STATS.getPreferredName(), timingStats); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelConfig.java index f0909f75d9402..5ae19f6db6bb4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelConfig.java @@ -509,7 +509,11 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (params.paramAsBoolean(EXCLUDE_GENERATED, false) == false) { builder.field(CREATED_BY.getPreferredName(), createdBy); builder.field(VERSION.getPreferredName(), version.toString()); - builder.timeField(CREATE_TIME.getPreferredName(), CREATE_TIME.getPreferredName() + "_string", createTime.toEpochMilli()); + builder.timestampFieldsFromUnixEpochMillis( + CREATE_TIME.getPreferredName(), + CREATE_TIME.getPreferredName() + "_string", + createTime.toEpochMilli() + ); // If we are NOT storing the model, we should return the deprecated field name if (params.paramAsBoolean(ToXContentParams.FOR_INTERNAL_STORAGE, false) == false && builder.getRestApiVersion().matches(RestApiVersion.equalTo(RestApiVersion.V_7))) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/AssignmentStats.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/AssignmentStats.java index aadaa5254ff15..858d97bf6f956 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/AssignmentStats.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/AssignmentStats.java @@ -297,7 +297,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field("inference_cache_hit_count", cacheHitCount); } if (lastAccess != null) { - builder.timeField("last_access", "last_access_string", lastAccess.toEpochMilli()); + builder.timestampFieldsFromUnixEpochMillis("last_access", "last_access_string", lastAccess.toEpochMilli()); } if (pendingCount != null) { builder.field("number_of_pending_requests", pendingCount); @@ -312,7 +312,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field("timeout_count", timeoutCount); } if (startTime != null) { - builder.timeField("start_time", "start_time_string", startTime.toEpochMilli()); + builder.timestampFieldsFromUnixEpochMillis("start_time", "start_time_string", startTime.toEpochMilli()); } if (threadsPerAllocation != null) { builder.field("threads_per_allocation", threadsPerAllocation); @@ -608,7 +608,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field("cache_size", cacheSize); } builder.field("priority", priority); - builder.timeField("start_time", "start_time_string", startTime.toEpochMilli()); + builder.timestampFieldsFromUnixEpochMillis("start_time", "start_time_string", startTime.toEpochMilli()); int totalErrorCount = nodeStats.stream().mapToInt(NodeStats::getErrorCount).sum(); int totalRejectedExecutionCount = nodeStats.stream().mapToInt(NodeStats::getRejectedExecutionCount).sum(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/TrainedModelAssignment.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/TrainedModelAssignment.java index 4a87b8e24f481..d9e7693870643 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/TrainedModelAssignment.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/TrainedModelAssignment.java @@ -368,7 +368,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (reason != null) { builder.field(REASON.getPreferredName(), reason); } - builder.timeField(START_TIME.getPreferredName(), startTime); + builder.timestampField(START_TIME.getPreferredName(), startTime); builder.field(MAX_ASSIGNED_ALLOCATIONS.getPreferredName(), maxAssignedAllocations); builder.field(ADAPTIVE_ALLOCATIONS.getPreferredName(), adaptiveAllocationsSettings); builder.endObject(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/InferenceStats.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/InferenceStats.java index 5314702be0688..1721ae0b21349 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/InferenceStats.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/InferenceStats.java @@ -162,7 +162,11 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(INFERENCE_COUNT.getPreferredName(), inferenceCount); builder.field(CACHE_MISS_COUNT.getPreferredName(), cacheMissCount); builder.field(MISSING_ALL_FIELDS_COUNT.getPreferredName(), missingAllFieldsCount); - builder.timeField(TIMESTAMP.getPreferredName(), TIMESTAMP.getPreferredName() + "_string", timeStamp.toEpochMilli()); + builder.timestampFieldsFromUnixEpochMillis( + TIMESTAMP.getPreferredName(), + TIMESTAMP.getPreferredName() + "_string", + timeStamp.toEpochMilli() + ); builder.endObject(); return builder; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ModelPackageConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ModelPackageConfig.java index d921bc1d4a158..cfbc6c6701427 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ModelPackageConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ModelPackageConfig.java @@ -260,7 +260,11 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(MINIMUM_VERSION.getPreferredName(), minimumVersion); } if (createTime != null) { - builder.timeField(CREATE_TIME.getPreferredName(), CREATE_TIME.getPreferredName() + "_string", createTime.toEpochMilli()); + builder.timestampFieldsFromUnixEpochMillis( + CREATE_TIME.getPreferredName(), + CREATE_TIME.getPreferredName() + "_string", + createTime.toEpochMilli() + ); } if (size > 0) { builder.field(SIZE.getPreferredName(), size); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Job.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Job.java index 8da0209e10293..e663bbd6800bd 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Job.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Job.java @@ -613,9 +613,13 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th if (jobVersion != null) { builder.field(JOB_VERSION.getPreferredName(), jobVersion); } - builder.timeField(CREATE_TIME.getPreferredName(), CREATE_TIME.getPreferredName() + humanReadableSuffix, createTime.getTime()); + builder.timestampFieldsFromUnixEpochMillis( + CREATE_TIME.getPreferredName(), + CREATE_TIME.getPreferredName() + humanReadableSuffix, + createTime.getTime() + ); if (finishedTime != null) { - builder.timeField( + builder.timestampFieldsFromUnixEpochMillis( FINISHED_TIME.getPreferredName(), FINISHED_TIME.getPreferredName() + humanReadableSuffix, finishedTime.getTime() diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/JobTaskState.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/JobTaskState.java index 2d03d4273013d..64c449020daa8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/JobTaskState.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/JobTaskState.java @@ -150,7 +150,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(REASON.getPreferredName(), reason); } if (lastStateChangeTime != null) { - builder.timeField( + builder.timestampFieldsFromUnixEpochMillis( LAST_STATE_CHANGE_TIME.getPreferredName(), LAST_STATE_CHANGE_TIME.getPreferredName() + "_string", lastStateChangeTime.toEpochMilli() diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/output/FlushAcknowledgement.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/output/FlushAcknowledgement.java index 2254959242eab..24a6668a0c016 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/output/FlushAcknowledgement.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/output/FlushAcknowledgement.java @@ -99,7 +99,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.startObject(); builder.field(ID.getPreferredName(), id); if (lastFinalizedBucketEnd != null) { - builder.timeField( + builder.timestampFieldsFromUnixEpochMillis( LAST_FINALIZED_BUCKET_END.getPreferredName(), LAST_FINALIZED_BUCKET_END.getPreferredName() + "_string", lastFinalizedBucketEnd.toEpochMilli() diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/CategorizerStats.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/CategorizerStats.java index 91f09bc8171da..fe8cb390db805 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/CategorizerStats.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/CategorizerStats.java @@ -195,8 +195,16 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(DEAD_CATEGORY_COUNT_FIELD.getPreferredName(), deadCategoryCount); builder.field(FAILED_CATEGORY_COUNT_FIELD.getPreferredName(), failedCategoryCount); builder.field(CATEGORIZATION_STATUS_FIELD.getPreferredName(), categorizationStatus); - builder.timeField(LOG_TIME_FIELD.getPreferredName(), LOG_TIME_FIELD.getPreferredName() + "_string", logTime.toEpochMilli()); - builder.timeField(TIMESTAMP_FIELD.getPreferredName(), TIMESTAMP_FIELD.getPreferredName() + "_string", timestamp.toEpochMilli()); + builder.timestampFieldsFromUnixEpochMillis( + LOG_TIME_FIELD.getPreferredName(), + LOG_TIME_FIELD.getPreferredName() + "_string", + logTime.toEpochMilli() + ); + builder.timestampFieldsFromUnixEpochMillis( + TIMESTAMP_FIELD.getPreferredName(), + TIMESTAMP_FIELD.getPreferredName() + "_string", + timestamp.toEpochMilli() + ); builder.endObject(); return builder; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/DataCounts.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/DataCounts.java index 775640ac2048f..4c9a3a4b70ecb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/DataCounts.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/DataCounts.java @@ -583,35 +583,35 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th builder.field(SPARSE_BUCKET_COUNT.getPreferredName(), sparseBucketCount); builder.field(BUCKET_COUNT.getPreferredName(), bucketCount); if (earliestRecordTimeStamp != null) { - builder.timeField( + builder.timestampFieldsFromUnixEpochMillis( EARLIEST_RECORD_TIME.getPreferredName(), EARLIEST_RECORD_TIME.getPreferredName() + "_string", earliestRecordTimeStamp.getTime() ); } if (latestRecordTimeStamp != null) { - builder.timeField( + builder.timestampFieldsFromUnixEpochMillis( LATEST_RECORD_TIME.getPreferredName(), LATEST_RECORD_TIME.getPreferredName() + "_string", latestRecordTimeStamp.getTime() ); } if (lastDataTimeStamp != null) { - builder.timeField( + builder.timestampFieldsFromUnixEpochMillis( LAST_DATA_TIME.getPreferredName(), LAST_DATA_TIME.getPreferredName() + "_string", lastDataTimeStamp.getTime() ); } if (latestEmptyBucketTimeStamp != null) { - builder.timeField( + builder.timestampFieldsFromUnixEpochMillis( LATEST_EMPTY_BUCKET_TIME.getPreferredName(), LATEST_EMPTY_BUCKET_TIME.getPreferredName() + "_string", latestEmptyBucketTimeStamp.getTime() ); } if (latestSparseBucketTimeStamp != null) { - builder.timeField( + builder.timestampFieldsFromUnixEpochMillis( LATEST_SPARSE_BUCKET_TIME.getPreferredName(), LATEST_SPARSE_BUCKET_TIME.getPreferredName() + "_string", latestSparseBucketTimeStamp.getTime() @@ -619,7 +619,11 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th } builder.field(INPUT_RECORD_COUNT.getPreferredName(), getInputRecordCount()); if (logTime != null) { - builder.timeField(LOG_TIME.getPreferredName(), LOG_TIME.getPreferredName() + "_string", logTime.toEpochMilli()); + builder.timestampFieldsFromUnixEpochMillis( + LOG_TIME.getPreferredName(), + LOG_TIME.getPreferredName() + "_string", + logTime.toEpochMilli() + ); } return builder; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelSizeStats.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelSizeStats.java index 16eceb1e89a95..a95ee13f57913 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelSizeStats.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelSizeStats.java @@ -363,9 +363,17 @@ public XContentBuilder doXContentBody(XContentBuilder builder) throws IOExceptio builder.field(DEAD_CATEGORY_COUNT_FIELD.getPreferredName(), deadCategoryCount); builder.field(FAILED_CATEGORY_COUNT_FIELD.getPreferredName(), failedCategoryCount); builder.field(CATEGORIZATION_STATUS_FIELD.getPreferredName(), categorizationStatus); - builder.timeField(LOG_TIME_FIELD.getPreferredName(), LOG_TIME_FIELD.getPreferredName() + "_string", logTime.getTime()); + builder.timestampFieldsFromUnixEpochMillis( + LOG_TIME_FIELD.getPreferredName(), + LOG_TIME_FIELD.getPreferredName() + "_string", + logTime.getTime() + ); if (timestamp != null) { - builder.timeField(TIMESTAMP_FIELD.getPreferredName(), TIMESTAMP_FIELD.getPreferredName() + "_string", timestamp.getTime()); + builder.timestampFieldsFromUnixEpochMillis( + TIMESTAMP_FIELD.getPreferredName(), + TIMESTAMP_FIELD.getPreferredName() + "_string", + timestamp.getTime() + ); } return builder; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelSnapshot.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelSnapshot.java index bf62a8a267f84..3114c03879eb7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelSnapshot.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelSnapshot.java @@ -194,7 +194,11 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(Job.ID.getPreferredName(), jobId); builder.field(MIN_VERSION.getPreferredName(), minVersion); if (timestamp != null) { - builder.timeField(TIMESTAMP.getPreferredName(), TIMESTAMP.getPreferredName() + "_string", timestamp.getTime()); + builder.timestampFieldsFromUnixEpochMillis( + TIMESTAMP.getPreferredName(), + TIMESTAMP.getPreferredName() + "_string", + timestamp.getTime() + ); } if (description != null) { builder.field(DESCRIPTION.getPreferredName(), description); @@ -207,14 +211,14 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(ModelSizeStats.RESULT_TYPE_FIELD.getPreferredName(), modelSizeStats); } if (latestRecordTimeStamp != null) { - builder.timeField( + builder.timestampFieldsFromUnixEpochMillis( LATEST_RECORD_TIME.getPreferredName(), LATEST_RECORD_TIME.getPreferredName() + "_string", latestRecordTimeStamp.getTime() ); } if (latestResultTimeStamp != null) { - builder.timeField( + builder.timestampFieldsFromUnixEpochMillis( LATEST_RESULT_TIME.getPreferredName(), LATEST_RESULT_TIME.getPreferredName() + "_string", latestResultTimeStamp.getTime() diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/AnomalyRecord.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/AnomalyRecord.java index ca1fd98b7bfb3..3b4d5b6a72654 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/AnomalyRecord.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/AnomalyRecord.java @@ -283,7 +283,11 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan); builder.field(Detector.DETECTOR_INDEX.getPreferredName(), detectorIndex); builder.field(Result.IS_INTERIM.getPreferredName(), isInterim); - builder.timeField(Result.TIMESTAMP.getPreferredName(), Result.TIMESTAMP.getPreferredName() + "_string", timestamp.getTime()); + builder.timestampFieldsFromUnixEpochMillis( + Result.TIMESTAMP.getPreferredName(), + Result.TIMESTAMP.getPreferredName() + "_string", + timestamp.getTime() + ); if (byFieldName != null) { builder.field(BY_FIELD_NAME.getPreferredName(), byFieldName); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/Bucket.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/Bucket.java index b4798b404a434..f867511d992c6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/Bucket.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/Bucket.java @@ -173,7 +173,11 @@ public void writeTo(StreamOutput out) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(JOB_ID.getPreferredName(), jobId); - builder.timeField(Result.TIMESTAMP.getPreferredName(), Result.TIMESTAMP.getPreferredName() + "_string", timestamp.getTime()); + builder.timestampFieldsFromUnixEpochMillis( + Result.TIMESTAMP.getPreferredName(), + Result.TIMESTAMP.getPreferredName() + "_string", + timestamp.getTime() + ); builder.field(ANOMALY_SCORE.getPreferredName(), anomalyScore); builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan); builder.field(INITIAL_ANOMALY_SCORE.getPreferredName(), initialAnomalyScore); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/BucketInfluencer.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/BucketInfluencer.java index f659ceced3565..131e0c24b387e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/BucketInfluencer.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/BucketInfluencer.java @@ -132,7 +132,11 @@ XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws I builder.field(ANOMALY_SCORE.getPreferredName(), anomalyScore); builder.field(RAW_ANOMALY_SCORE.getPreferredName(), rawAnomalyScore); builder.field(PROBABILITY.getPreferredName(), probability); - builder.timeField(Result.TIMESTAMP.getPreferredName(), Result.TIMESTAMP.getPreferredName() + "_string", timestamp.getTime()); + builder.timestampFieldsFromUnixEpochMillis( + Result.TIMESTAMP.getPreferredName(), + Result.TIMESTAMP.getPreferredName() + "_string", + timestamp.getTime() + ); builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan); builder.field(Result.IS_INTERIM.getPreferredName(), isInterim); return builder; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/Forecast.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/Forecast.java index 20a2fa95b08f3..37eba1fc081a0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/Forecast.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/Forecast.java @@ -140,7 +140,11 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan); builder.field(DETECTOR_INDEX.getPreferredName(), detectorIndex); if (timestamp != null) { - builder.timeField(Result.TIMESTAMP.getPreferredName(), Result.TIMESTAMP.getPreferredName() + "_string", timestamp.getTime()); + builder.timestampFieldsFromUnixEpochMillis( + Result.TIMESTAMP.getPreferredName(), + Result.TIMESTAMP.getPreferredName() + "_string", + timestamp.getTime() + ); } if (partitionFieldName != null) { builder.field(PARTITION_FIELD_NAME.getPreferredName(), partitionFieldName); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/Influencer.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/Influencer.java index b544c43295bc5..930c8b6f3ef68 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/Influencer.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/Influencer.java @@ -132,7 +132,11 @@ XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws I builder.field(PROBABILITY.getPreferredName(), probability); builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan); builder.field(Result.IS_INTERIM.getPreferredName(), isInterim); - builder.timeField(Result.TIMESTAMP.getPreferredName(), Result.TIMESTAMP.getPreferredName() + "_string", timestamp.getTime()); + builder.timestampFieldsFromUnixEpochMillis( + Result.TIMESTAMP.getPreferredName(), + Result.TIMESTAMP.getPreferredName() + "_string", + timestamp.getTime() + ); return builder; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/ModelPlot.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/ModelPlot.java index ba1a03c64e15e..043611f3333f6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/ModelPlot.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/ModelPlot.java @@ -153,7 +153,11 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(DETECTOR_INDEX.getPreferredName(), detectorIndex); if (timestamp != null) { - builder.timeField(Result.TIMESTAMP.getPreferredName(), Result.TIMESTAMP.getPreferredName() + "_string", timestamp.getTime()); + builder.timestampFieldsFromUnixEpochMillis( + Result.TIMESTAMP.getPreferredName(), + Result.TIMESTAMP.getPreferredName() + "_string", + timestamp.getTime() + ); } if (partitionFieldName != null) { builder.field(PARTITION_FIELD_NAME.getPreferredName(), partitionFieldName); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/OverallBucket.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/OverallBucket.java index c04a61951ad99..8cdcaa0205b0f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/OverallBucket.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/OverallBucket.java @@ -71,7 +71,11 @@ public void writeTo(StreamOutput out) throws IOException { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.timeField(Result.TIMESTAMP.getPreferredName(), Result.TIMESTAMP.getPreferredName() + "_string", timestamp.getTime()); + builder.timestampFieldsFromUnixEpochMillis( + Result.TIMESTAMP.getPreferredName(), + Result.TIMESTAMP.getPreferredName() + "_string", + timestamp.getTime() + ); builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan); builder.field(OVERALL_SCORE.getPreferredName(), overallScore); builder.field(JOBS.getPreferredName(), jobs); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/ExponentialAverageCalculationContext.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/ExponentialAverageCalculationContext.java index 39d822b843d15..e102f0712b283 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/ExponentialAverageCalculationContext.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/ExponentialAverageCalculationContext.java @@ -178,7 +178,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.startObject(); builder.field(INCREMENTAL_METRIC_VALUE_MS.getPreferredName(), incrementalMetricValueMs); if (latestTimestamp != null) { - builder.timeField( + builder.timestampFieldsFromUnixEpochMillis( LATEST_TIMESTAMP.getPreferredName(), LATEST_TIMESTAMP.getPreferredName() + "_string", latestTimestamp.toEpochMilli() diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/search/action/AsyncSearchResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/search/action/AsyncSearchResponse.java index b632c680260cf..32b401ebfb32d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/search/action/AsyncSearchResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/search/action/AsyncSearchResponse.java @@ -238,12 +238,16 @@ public Iterator toXContentChunked(ToXContent.Params params } builder.field("is_partial", isPartial); builder.field("is_running", isRunning); - builder.timeField("start_time_in_millis", "start_time", startTimeMillis); - builder.timeField("expiration_time_in_millis", "expiration_time", expirationTimeMillis); + builder.timestampFieldsFromUnixEpochMillis("start_time_in_millis", "start_time", startTimeMillis); + builder.timestampFieldsFromUnixEpochMillis("expiration_time_in_millis", "expiration_time", expirationTimeMillis); if (searchResponse != null) { if (isRunning == false) { TimeValue took = searchResponse.getTook(); - builder.timeField("completion_time_in_millis", "completion_time", startTimeMillis + took.millis()); + builder.timestampFieldsFromUnixEpochMillis( + "completion_time_in_millis", + "completion_time", + startTimeMillis + took.millis() + ); } builder.field("response"); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/search/action/AsyncStatusResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/search/action/AsyncStatusResponse.java index 10b7730b58c9b..89d4be514adde 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/search/action/AsyncStatusResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/search/action/AsyncStatusResponse.java @@ -175,10 +175,10 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field("id", id); builder.field("is_running", isRunning); builder.field("is_partial", isPartial); - builder.timeField("start_time_in_millis", "start_time", startTimeMillis); - builder.timeField("expiration_time_in_millis", "expiration_time", expirationTimeMillis); + builder.timestampFieldsFromUnixEpochMillis("start_time_in_millis", "start_time", startTimeMillis); + builder.timestampFieldsFromUnixEpochMillis("expiration_time_in_millis", "expiration_time", expirationTimeMillis); if (completionTimeMillis != null) { - builder.timeField("completion_time_in_millis", "completion_time", completionTimeMillis); + builder.timestampFieldsFromUnixEpochMillis("completion_time_in_millis", "completion_time", completionTimeMillis); } RestActions.buildBroadcastShardsHeader(builder, params, totalShards, successfulShards, skippedShards, failedShards, null); if (clusters != null) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/SnapshotInvocationRecord.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/SnapshotInvocationRecord.java index 0ada92bbb1e68..186cd81537909 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/SnapshotInvocationRecord.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/SnapshotInvocationRecord.java @@ -106,9 +106,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws { builder.field(SNAPSHOT_NAME.getPreferredName(), snapshotName); if (snapshotStartTimestamp != null) { - builder.timeField(START_TIMESTAMP.getPreferredName(), "start_time_string", snapshotStartTimestamp); + builder.timestampFieldsFromUnixEpochMillis(START_TIMESTAMP.getPreferredName(), "start_time_string", snapshotStartTimestamp); } - builder.timeField(TIMESTAMP.getPreferredName(), "time_string", snapshotFinishTimestamp); + builder.timestampFieldsFromUnixEpochMillis(TIMESTAMP.getPreferredName(), "time_string", snapshotFinishTimestamp); if (Objects.nonNull(details)) { builder.field(DETAILS.getPreferredName(), details); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/SnapshotLifecyclePolicyItem.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/SnapshotLifecyclePolicyItem.java index c3c70e595eb75..ea52930f4ae84 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/SnapshotLifecyclePolicyItem.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/SnapshotLifecyclePolicyItem.java @@ -157,7 +157,7 @@ public boolean equals(Object obj) { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(policy.getId()); builder.field(SnapshotLifecyclePolicyMetadata.VERSION.getPreferredName(), version); - builder.timeField( + builder.timestampFieldsFromUnixEpochMillis( SnapshotLifecyclePolicyMetadata.MODIFIED_DATE_MILLIS.getPreferredName(), SnapshotLifecyclePolicyMetadata.MODIFIED_DATE.getPreferredName(), modifiedDate @@ -169,7 +169,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (lastFailure != null) { builder.field(SnapshotLifecyclePolicyMetadata.LAST_FAILURE.getPreferredName(), lastFailure); } - builder.timeField( + builder.timestampFieldsFromUnixEpochMillis( SnapshotLifecyclePolicyMetadata.NEXT_EXECUTION_MILLIS.getPreferredName(), SnapshotLifecyclePolicyMetadata.NEXT_EXECUTION.getPreferredName(), policy.calculateNextExecution(modifiedDate, Clock.systemUTC()) @@ -249,7 +249,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(NAME.getPreferredName(), snapshotId.getName()); builder.field(UUID.getPreferredName(), snapshotId.getUUID()); builder.field(STATE.getPreferredName(), state); - builder.timeField(START_TIME.getPreferredName(), "start_time", startTime); + builder.timestampFieldsFromUnixEpochMillis(START_TIME.getPreferredName(), "start_time", startTime); if (failure != null) { builder.field(FAILURE.getPreferredName(), failure); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/SnapshotLifecyclePolicyMetadata.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/SnapshotLifecyclePolicyMetadata.java index 672578787762e..dfaaa48f1e2cb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/SnapshotLifecyclePolicyMetadata.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/SnapshotLifecyclePolicyMetadata.java @@ -192,7 +192,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(POLICY.getPreferredName(), policy); builder.field(HEADERS.getPreferredName(), headers); builder.field(VERSION.getPreferredName(), version); - builder.timeField(MODIFIED_DATE_MILLIS.getPreferredName(), MODIFIED_DATE.getPreferredName(), modifiedDate); + builder.timestampFieldsFromUnixEpochMillis(MODIFIED_DATE_MILLIS.getPreferredName(), MODIFIED_DATE.getPreferredName(), modifiedDate); if (Objects.nonNull(lastSuccess)) { builder.field(LAST_SUCCESS.getPreferredName(), lastSuccess); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/cert/CertificateInfo.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/cert/CertificateInfo.java index ee077e5140606..06ff971ecf890 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/cert/CertificateInfo.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/cert/CertificateInfo.java @@ -134,7 +134,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws .field("subject_dn", subjectDn) .field("serial_number", serialNumber) .field("has_private_key", hasPrivateKey) - .timeField("expiry", expiry); + .timestampField("expiry", expiry); if (Strings.hasLength(issuer)) { builder.field("issuer", issuer); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformCheckpointStats.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformCheckpointStats.java index 2828a46a28b8c..aa256940daa9b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformCheckpointStats.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformCheckpointStats.java @@ -93,14 +93,14 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(TransformField.CHECKPOINT_PROGRESS.getPreferredName(), checkpointProgress); } if (timestampMillis > 0) { - builder.timeField( + builder.timestampFieldsFromUnixEpochMillis( TransformField.TIMESTAMP_MILLIS.getPreferredName(), TransformField.TIMESTAMP.getPreferredName(), timestampMillis ); } if (timeUpperBoundMillis > 0) { - builder.timeField( + builder.timestampFieldsFromUnixEpochMillis( TransformField.TIME_UPPER_BOUND_MILLIS.getPreferredName(), TransformField.TIME_UPPER_BOUND.getPreferredName(), timeUpperBoundMillis diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformCheckpointingInfo.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformCheckpointingInfo.java index c4530c535cbcf..a6e365b793d93 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformCheckpointingInfo.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformCheckpointingInfo.java @@ -217,10 +217,14 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(OPERATIONS_BEHIND, operationsBehind); } if (changesLastDetectedAt != null) { - builder.timeField(CHANGES_LAST_DETECTED_AT, CHANGES_LAST_DETECTED_AT_HUMAN, changesLastDetectedAt.toEpochMilli()); + builder.timestampFieldsFromUnixEpochMillis( + CHANGES_LAST_DETECTED_AT, + CHANGES_LAST_DETECTED_AT_HUMAN, + changesLastDetectedAt.toEpochMilli() + ); } if (lastSearchTime != null) { - builder.timeField(LAST_SEARCH_TIME, LAST_SEARCH_TIME_HUMAN, lastSearchTime.toEpochMilli()); + builder.timestampFieldsFromUnixEpochMillis(LAST_SEARCH_TIME, LAST_SEARCH_TIME_HUMAN, lastSearchTime.toEpochMilli()); } builder.endObject(); return builder; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfig.java index fb782bdae0068..d8972dcf6a6be 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfig.java @@ -450,7 +450,7 @@ public XContentBuilder toXContent(final XContentBuilder builder, final Params pa builder.field(TransformField.VERSION.getPreferredName(), transformVersion); } if (createTime != null) { - builder.timeField( + builder.timestampFieldsFromUnixEpochMillis( TransformField.CREATE_TIME.getPreferredName(), TransformField.CREATE_TIME.getPreferredName() + "_string", createTime.toEpochMilli() diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformHealthIssue.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformHealthIssue.java index 5697e1793f0b0..451cfd89f31af 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformHealthIssue.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformHealthIssue.java @@ -90,7 +90,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } builder.field(COUNT, count); if (firstOccurrence != null) { - builder.timeField(FIRST_OCCURRENCE, FIRST_OCCURRENCE_HUMAN_READABLE, firstOccurrence.toEpochMilli()); + builder.timestampFieldsFromUnixEpochMillis(FIRST_OCCURRENCE, FIRST_OCCURRENCE_HUMAN_READABLE, firstOccurrence.toEpochMilli()); } return builder.endObject(); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/execution/QueuedWatch.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/execution/QueuedWatch.java index 4da5d46e82fa6..a7633ed0fa1a1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/execution/QueuedWatch.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/execution/QueuedWatch.java @@ -71,8 +71,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.startObject(); builder.field("watch_id", watchId); builder.field("watch_record_id", watchRecordId); - builder.timeField("triggered_time", triggeredTime); - builder.timeField("execution_time", executionTime); + builder.timestampField("triggered_time", triggeredTime); + builder.timestampField("execution_time", executionTime); builder.endObject(); return builder; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/execution/WatchExecutionSnapshot.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/execution/WatchExecutionSnapshot.java index 2b80c32f3c327..49d0566dffeaa 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/execution/WatchExecutionSnapshot.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/execution/WatchExecutionSnapshot.java @@ -108,8 +108,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.startObject(); builder.field("watch_id", watchId); builder.field("watch_record_id", watchRecordId); - builder.timeField("triggered_time", triggeredTime); - builder.timeField("execution_time", executionTime); + builder.timestampField("triggered_time", triggeredTime); + builder.timestampField("execution_time", executionTime); builder.field("execution_phase", phase); if (executedActions != null) { builder.array("executed_actions", executedActions); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverProfile.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverProfile.java index e7b16072f4b66..a685687e8bfc6 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverProfile.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverProfile.java @@ -169,8 +169,8 @@ public DriverSleeps sleeps() { @Override public Iterator toXContentChunked(ToXContent.Params params) { return Iterators.concat(ChunkedToXContentHelper.startObject(), Iterators.single((b, p) -> { - b.timeField("start_millis", "start", startMillis); - b.timeField("stop_millis", "stop", stopMillis); + b.timestampFieldsFromUnixEpochMillis("start_millis", "start", startMillis); + b.timestampFieldsFromUnixEpochMillis("stop_millis", "stop", stopMillis); b.field("took_nanos", tookNanos); if (b.humanReadable()) { b.field("took_time", TimeValue.timeValueNanos(tookNanos)); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverSleeps.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverSleeps.java index 217a0b033bed4..01e9a73c4fb5f 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverSleeps.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverSleeps.java @@ -62,9 +62,9 @@ public boolean isStillSleeping() { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field("reason", reason); - builder.timeField("sleep_millis", "sleep", sleep); + builder.timestampFieldsFromUnixEpochMillis("sleep_millis", "sleep", sleep); if (wake > 0) { - builder.timeField("wake_millis", "wake", wake); + builder.timestampFieldsFromUnixEpochMillis("wake_millis", "wake", wake); } return builder.endObject(); } diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/async/QlStatusResponse.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/async/QlStatusResponse.java index 3943ddd3e207a..73e47a631de96 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/async/QlStatusResponse.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/async/QlStatusResponse.java @@ -121,9 +121,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field("is_running", isRunning); builder.field("is_partial", isPartial); if (startTimeMillis != null) { // start time is available only for a running eql search - builder.timeField("start_time_in_millis", "start_time", startTimeMillis); + builder.timestampFieldsFromUnixEpochMillis("start_time_in_millis", "start_time", startTimeMillis); } - builder.timeField("expiration_time_in_millis", "expiration_time", expirationTimeMillis); + builder.timestampFieldsFromUnixEpochMillis("expiration_time_in_millis", "expiration_time", expirationTimeMillis); if (isRunning == false) { // completion status is available only for a completed eql search builder.field("completion_status", completionStatus.getStatus()); } diff --git a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/SingleNodeShutdownStatus.java b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/SingleNodeShutdownStatus.java index 810bd8f6e9ceb..95fd97cd5931f 100644 --- a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/SingleNodeShutdownStatus.java +++ b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/SingleNodeShutdownStatus.java @@ -122,7 +122,7 @@ public Iterator toXContentChunked(ToXContent.Params params metadata.getAllocationDelay().getStringRep() ); } - builder.timeField( + builder.timestampFieldsFromUnixEpochMillis( SingleNodeShutdownMetadata.STARTED_AT_MILLIS_FIELD.getPreferredName(), SingleNodeShutdownMetadata.STARTED_AT_READABLE_FIELD, metadata.getStartedAtMillis() @@ -138,7 +138,7 @@ public Iterator toXContentChunked(ToXContent.Params params builder.field(TARGET_NODE_NAME_FIELD.getPreferredName(), metadata.getTargetNodeName()); } if (metadata.getGracePeriod() != null) { - builder.timeField( + builder.timestampField( SingleNodeShutdownMetadata.GRACE_PERIOD_FIELD.getPreferredName(), metadata.getGracePeriod().getStringRep() ); diff --git a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/history/SnapshotHistoryItem.java b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/history/SnapshotHistoryItem.java index 60fdba2051041..8426ad491e353 100644 --- a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/history/SnapshotHistoryItem.java +++ b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/history/SnapshotHistoryItem.java @@ -220,7 +220,7 @@ public final void writeTo(StreamOutput out) throws IOException { public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); { - builder.timeField(TIMESTAMP.getPreferredName(), "timestamp_string", timestamp); + builder.timestampFieldsFromUnixEpochMillis(TIMESTAMP.getPreferredName(), "timestamp_string", timestamp); builder.field(POLICY_ID.getPreferredName(), policyId); builder.field(REPOSITORY.getPreferredName(), repository); builder.field(SNAPSHOT_NAME.getPreferredName(), snapshotName); diff --git a/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/RepositoryVerifyIntegrityResponseChunk.java b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/RepositoryVerifyIntegrityResponseChunk.java index 90130811c1218..143d2671b9eab 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/RepositoryVerifyIntegrityResponseChunk.java +++ b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/RepositoryVerifyIntegrityResponseChunk.java @@ -158,7 +158,7 @@ public void writeTo(StreamOutput out) throws IOException { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.timeField("timestamp_in_millis", "timestamp", timestampMillis); + builder.timestampFieldsFromUnixEpochMillis("timestamp_in_millis", "timestamp", timestampMillis); if (anomaly() != null) { builder.field("anomaly", anomaly()); diff --git a/x-pack/plugin/sql/qa/jdbc/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/ResultSetTestCase.java b/x-pack/plugin/sql/qa/jdbc/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/ResultSetTestCase.java index d8534b963c2d7..20fb342aa4b38 100644 --- a/x-pack/plugin/sql/qa/jdbc/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/ResultSetTestCase.java +++ b/x-pack/plugin/sql/qa/jdbc/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/ResultSetTestCase.java @@ -1350,8 +1350,8 @@ private void setupDataForDateTimeTests(long randomLongDate, Long randomLongDateN indexSimpleDocumentWithBooleanValues("1", true, randomLongDate, randomLongDateNanos); index("test", "2", builder -> { - builder.timeField("test_date", null); - builder.timeField("test_date_nanos", null); + builder.timestampField("test_date", null); + builder.timestampField("test_date_nanos", null); }); } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/Email.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/Email.java index f1a6d7b07d8e7..79470f967ab3c 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/Email.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/Email.java @@ -141,7 +141,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (priority != null) { builder.field(Field.PRIORITY.getPreferredName(), priority.value()); } - builder.timeField(Field.SENT_DATE.getPreferredName(), sentDate); + builder.timestampField(Field.SENT_DATE.getPreferredName(), sentDate); if (to != null) { builder.field(Field.TO.getPreferredName(), to, params); } From 9eab11c45ba8c7a910e038dced2bcafa8571ba76 Mon Sep 17 00:00:00 2001 From: David Turner Date: Mon, 14 Oct 2024 16:39:00 +0100 Subject: [PATCH 053/449] Clarify use of special values for publish addresses (#114551) Special values like `0.0.0.0` may resolve to multiple IP addresses just like hostnames, so the same considerations apply when using such values as a publish address. This commit spells this case out in the docs and cleans up the nearby wording a little. --- docs/reference/modules/network.asciidoc | 25 ++++++++++++++++++------- 1 file changed, 18 insertions(+), 7 deletions(-) diff --git a/docs/reference/modules/network.asciidoc b/docs/reference/modules/network.asciidoc index 8fdc9f2e4f9cb..1e4c5a21d386c 100644 --- a/docs/reference/modules/network.asciidoc +++ b/docs/reference/modules/network.asciidoc @@ -153,23 +153,34 @@ The only requirements are that each node must be: * Accessible at its transport publish address by all other nodes in its cluster, and by any remote clusters that will discover it using - <>. + <>. Each node must have its own distinct publish address. If you specify the transport publish address using a hostname then {es} will resolve this hostname to an IP address once during startup, and other nodes will use the resulting IP address instead of resolving the name again -themselves. To avoid confusion, use a hostname which resolves to the node's -address in all network locations. +themselves. You must use a hostname such that all of the addresses to which it +resolves are addresses at which the node is accessible from all other nodes. To +avoid confusion, it is simplest to use a hostname which resolves to a single +address. + +If you specify the transport publish address using a +<> then {es} will resolve this value to +a single IP address during startup, and other nodes will use the resulting IP +address instead of resolving the value again themselves. You must use a value +such that all of the addresses to which it resolves are addresses at which the +node is accessible from all other nodes. To avoid confusion, it is simplest to +use a value which resolves to a single address. It is usually a mistake to use +`0.0.0.0` as a publish address on hosts with more than one network interface. ===== Using a single address The most common configuration is for {es} to bind to a single address at which -it is accessible to clients and other nodes. In this configuration you should -just set `network.host` to that address. You should not separately set any bind -or publish addresses, nor should you separately configure the addresses for the -HTTP or transport interfaces. +it is accessible to clients and other nodes. To use this configuration, set +only `network.host` to the desired address. Do not separately set any bind or +publish addresses. Do not separately specify the addresses for the HTTP or +transport interfaces. ===== Using multiple addresses From 74522c43d80c7eaf67b12eccb841fef74b85f808 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Mon, 14 Oct 2024 17:28:02 +0100 Subject: [PATCH 054/449] [ML] Pick best model variant for the default elser endpoint (#114690) --- .../inference/InferenceService.java | 18 +- .../inference/InferenceServiceExtension.java | 4 +- .../xpack/core/ml/MachineLearningField.java | 8 + .../integration/ModelRegistryIT.java | 143 +++++++---- .../xpack/inference/InferencePlugin.java | 10 +- .../SentenceBoundaryChunkingSettings.java | 15 +- .../inference/registry/ModelRegistry.java | 223 ++++++++++-------- .../BaseElasticsearchInternalService.java | 49 ++-- .../ElasticsearchInternalService.java | 109 +++++---- .../registry/ModelRegistryTests.java | 87 +++---- .../ElasticsearchInternalServiceTests.java | 62 +++-- .../xpack/ml/integration/AutoscalingIT.java | 5 +- .../xpack/ml/integration/TooManyJobsIT.java | 5 +- .../xpack/ml/MachineLearning.java | 10 +- .../ml/action/TransportMlInfoAction.java | 3 +- .../TrainedModelAssignmentClusterService.java | 4 +- .../AbstractJobPersistentTasksExecutor.java | 5 +- .../ml/utils/NativeMemoryCalculator.java | 2 +- ...ortStartDataFrameAnalyticsActionTests.java | 2 +- ...nedModelAssignmentClusterServiceTests.java | 4 +- .../OpenJobPersistentTasksExecutorTests.java | 6 +- .../ml/utils/NativeMemoryCalculatorTests.java | 2 +- .../test/inference/inference_crud.yml | 23 -- 23 files changed, 444 insertions(+), 355 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/inference/InferenceService.java b/server/src/main/java/org/elasticsearch/inference/InferenceService.java index cbbfef2cc65fa..190f2d689a58d 100644 --- a/server/src/main/java/org/elasticsearch/inference/InferenceService.java +++ b/server/src/main/java/org/elasticsearch/inference/InferenceService.java @@ -192,12 +192,22 @@ default boolean canStream(TaskType taskType) { return supportedStreamingTasks().contains(taskType); } + record DefaultConfigId(String inferenceId, TaskType taskType, InferenceService service) {}; + /** - * A service can define default configurations that can be - * used out of the box without creating an endpoint first. - * @return Default configurations provided by this service + * Get the Ids and task type of any default configurations provided by this service + * @return Defaults */ - default List defaultConfigs() { + default List defaultConfigIds() { return List.of(); } + + /** + * Call the listener with the default model configurations defined by + * the service + * @param defaultsListener The listener + */ + default void defaultConfigs(ActionListener> defaultsListener) { + defaultsListener.onResponse(List.of()); + } } diff --git a/server/src/main/java/org/elasticsearch/inference/InferenceServiceExtension.java b/server/src/main/java/org/elasticsearch/inference/InferenceServiceExtension.java index 68dc865b4c7db..3274bf571d10a 100644 --- a/server/src/main/java/org/elasticsearch/inference/InferenceServiceExtension.java +++ b/server/src/main/java/org/elasticsearch/inference/InferenceServiceExtension.java @@ -10,6 +10,8 @@ package org.elasticsearch.inference; import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; import java.util.List; @@ -21,7 +23,7 @@ public interface InferenceServiceExtension { List getInferenceServiceFactories(); - record InferenceServiceFactoryContext(Client client, ThreadPool threadPool) {} + record InferenceServiceFactoryContext(Client client, ThreadPool threadPool, ClusterService clusterService, Settings settings) {} interface Factory { /** diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MachineLearningField.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MachineLearningField.java index 3e61f6b4e9258..6c49cadb8d189 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MachineLearningField.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MachineLearningField.java @@ -37,6 +37,14 @@ public final class MachineLearningField { Setting.Property.NodeScope ); + public static final Setting MAX_LAZY_ML_NODES = Setting.intSetting( + "xpack.ml.max_lazy_ml_nodes", + 0, + 0, + Setting.Property.OperatorDynamic, + Setting.Property.NodeScope + ); + /** * This boolean value indicates if `max_machine_memory_percent` should be ignored and an automatic calculation is used instead. * diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java index a76c4303268e4..e62cdcdc7fd2a 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java @@ -11,7 +11,10 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.inference.InferenceService; import org.elasticsearch.inference.InferenceServiceExtension; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; @@ -47,6 +50,7 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Consumer; +import java.util.function.Function; import java.util.stream.Collectors; import static org.hamcrest.CoreMatchers.equalTo; @@ -57,6 +61,8 @@ import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; public class ModelRegistryIT extends ESSingleNodeTestCase { @@ -122,7 +128,12 @@ public void testGetModel() throws Exception { assertEquals(model.getConfigurations().getService(), modelHolder.get().service()); var elserService = new ElasticsearchInternalService( - new InferenceServiceExtension.InferenceServiceFactoryContext(mock(Client.class), mock(ThreadPool.class)) + new InferenceServiceExtension.InferenceServiceFactoryContext( + mock(Client.class), + mock(ThreadPool.class), + mock(ClusterService.class), + Settings.EMPTY + ) ); ElasticsearchInternalModel roundTripModel = (ElasticsearchInternalModel) elserService.parsePersistedConfigWithSecrets( modelHolder.get().inferenceEntityId(), @@ -283,18 +294,30 @@ public void testGetModelWithSecrets() throws InterruptedException { } public void testGetAllModels_WithDefaults() throws Exception { - var service = "foo"; - var secret = "abc"; + var serviceName = "foo"; int configuredModelCount = 10; int defaultModelCount = 2; int totalModelCount = 12; - var defaultConfigs = new HashMap(); + var service = mock(InferenceService.class); + + var defaultConfigs = new ArrayList(); + var defaultIds = new ArrayList(); for (int i = 0; i < defaultModelCount; i++) { var id = "default-" + i; - defaultConfigs.put(id, createUnparsedConfig(id, randomFrom(TaskType.values()), service, secret)); + var taskType = randomFrom(TaskType.values()); + defaultConfigs.add(createModel(id, taskType, serviceName)); + defaultIds.add(new InferenceService.DefaultConfigId(id, taskType, service)); } - defaultConfigs.values().forEach(modelRegistry::addDefaultConfiguration); + + doAnswer(invocation -> { + @SuppressWarnings("unchecked") + var listener = (ActionListener>) invocation.getArguments()[0]; + listener.onResponse(defaultConfigs); + return Void.TYPE; + }).when(service).defaultConfigs(any()); + + defaultIds.forEach(modelRegistry::addDefaultIds); AtomicReference putModelHolder = new AtomicReference<>(); AtomicReference exceptionHolder = new AtomicReference<>(); @@ -302,7 +325,7 @@ public void testGetAllModels_WithDefaults() throws Exception { var createdModels = new HashMap(); for (int i = 0; i < configuredModelCount; i++) { var id = randomAlphaOfLength(5) + i; - var model = createModel(id, randomFrom(TaskType.values()), service); + var model = createModel(id, randomFrom(TaskType.values()), serviceName); createdModels.put(id, model); blockingCall(listener -> modelRegistry.storeModel(model, listener), putModelHolder, exceptionHolder); assertThat(putModelHolder.get(), is(true)); @@ -316,16 +339,22 @@ public void testGetAllModels_WithDefaults() throws Exception { var getAllModels = modelHolder.get(); assertReturnModelIsModifiable(modelHolder.get().get(0)); + // same result but configs should have been persisted this time + blockingCall(listener -> modelRegistry.getAllModels(listener), modelHolder, exceptionHolder); + assertNull(exceptionHolder.get()); + assertThat(modelHolder.get(), hasSize(totalModelCount)); + // sort in the same order as the returned models - var ids = new ArrayList<>(defaultConfigs.keySet().stream().toList()); + var ids = new ArrayList<>(defaultIds.stream().map(InferenceService.DefaultConfigId::inferenceId).toList()); ids.addAll(createdModels.keySet().stream().toList()); ids.sort(String::compareTo); + var configsById = defaultConfigs.stream().collect(Collectors.toMap(Model::getInferenceEntityId, Function.identity())); for (int i = 0; i < totalModelCount; i++) { var id = ids.get(i); assertEquals(id, getAllModels.get(i).inferenceEntityId()); if (id.startsWith("default")) { - assertEquals(defaultConfigs.get(id).taskType(), getAllModels.get(i).taskType()); - assertEquals(defaultConfigs.get(id).service(), getAllModels.get(i).service()); + assertEquals(configsById.get(id).getTaskType(), getAllModels.get(i).taskType()); + assertEquals(configsById.get(id).getConfigurations().getService(), getAllModels.get(i).service()); } else { assertEquals(createdModels.get(id).getTaskType(), getAllModels.get(i).taskType()); assertEquals(createdModels.get(id).getConfigurations().getService(), getAllModels.get(i).service()); @@ -334,16 +363,27 @@ public void testGetAllModels_WithDefaults() throws Exception { } public void testGetAllModels_OnlyDefaults() throws Exception { - var service = "foo"; - var secret = "abc"; int defaultModelCount = 2; + var serviceName = "foo"; + var service = mock(InferenceService.class); - var defaultConfigs = new HashMap(); + var defaultConfigs = new ArrayList(); + var defaultIds = new ArrayList(); for (int i = 0; i < defaultModelCount; i++) { var id = "default-" + i; - defaultConfigs.put(id, createUnparsedConfig(id, randomFrom(TaskType.values()), service, secret)); + var taskType = randomFrom(TaskType.values()); + defaultConfigs.add(createModel(id, taskType, serviceName)); + defaultIds.add(new InferenceService.DefaultConfigId(id, taskType, service)); } - defaultConfigs.values().forEach(modelRegistry::addDefaultConfiguration); + + doAnswer(invocation -> { + @SuppressWarnings("unchecked") + var listener = (ActionListener>) invocation.getArguments()[0]; + listener.onResponse(defaultConfigs); + return Void.TYPE; + }).when(service).defaultConfigs(any()); + + defaultIds.forEach(modelRegistry::addDefaultIds); AtomicReference exceptionHolder = new AtomicReference<>(); AtomicReference> modelHolder = new AtomicReference<>(); @@ -354,31 +394,42 @@ public void testGetAllModels_OnlyDefaults() throws Exception { assertReturnModelIsModifiable(modelHolder.get().get(0)); // sort in the same order as the returned models - var ids = new ArrayList<>(defaultConfigs.keySet().stream().toList()); + var configsById = defaultConfigs.stream().collect(Collectors.toMap(Model::getInferenceEntityId, Function.identity())); + var ids = new ArrayList<>(configsById.keySet().stream().toList()); ids.sort(String::compareTo); for (int i = 0; i < defaultModelCount; i++) { var id = ids.get(i); assertEquals(id, getAllModels.get(i).inferenceEntityId()); - assertEquals(defaultConfigs.get(id).taskType(), getAllModels.get(i).taskType()); - assertEquals(defaultConfigs.get(id).service(), getAllModels.get(i).service()); + assertEquals(configsById.get(id).getTaskType(), getAllModels.get(i).taskType()); + assertEquals(configsById.get(id).getConfigurations().getService(), getAllModels.get(i).service()); } } public void testGet_WithDefaults() throws InterruptedException { - var service = "foo"; - var secret = "abc"; + var serviceName = "foo"; + var service = mock(InferenceService.class); + + var defaultConfigs = new ArrayList(); + var defaultIds = new ArrayList(); - var defaultSparse = createUnparsedConfig("default-sparse", TaskType.SPARSE_EMBEDDING, service, secret); - var defaultText = createUnparsedConfig("default-text", TaskType.TEXT_EMBEDDING, service, secret); + defaultConfigs.add(createModel("default-sparse", TaskType.SPARSE_EMBEDDING, serviceName)); + defaultConfigs.add(createModel("default-text", TaskType.TEXT_EMBEDDING, serviceName)); + defaultIds.add(new InferenceService.DefaultConfigId("default-sparse", TaskType.SPARSE_EMBEDDING, service)); + defaultIds.add(new InferenceService.DefaultConfigId("default-text", TaskType.TEXT_EMBEDDING, service)); - modelRegistry.addDefaultConfiguration(defaultSparse); - modelRegistry.addDefaultConfiguration(defaultText); + doAnswer(invocation -> { + @SuppressWarnings("unchecked") + var listener = (ActionListener>) invocation.getArguments()[0]; + listener.onResponse(defaultConfigs); + return Void.TYPE; + }).when(service).defaultConfigs(any()); + defaultIds.forEach(modelRegistry::addDefaultIds); AtomicReference putModelHolder = new AtomicReference<>(); AtomicReference exceptionHolder = new AtomicReference<>(); - var configured1 = createModel(randomAlphaOfLength(5) + 1, randomFrom(TaskType.values()), service); - var configured2 = createModel(randomAlphaOfLength(5) + 1, randomFrom(TaskType.values()), service); + var configured1 = createModel(randomAlphaOfLength(5) + 1, randomFrom(TaskType.values()), serviceName); + var configured2 = createModel(randomAlphaOfLength(5) + 1, randomFrom(TaskType.values()), serviceName); blockingCall(listener -> modelRegistry.storeModel(configured1, listener), putModelHolder, exceptionHolder); assertThat(putModelHolder.get(), is(true)); blockingCall(listener -> modelRegistry.storeModel(configured2, listener), putModelHolder, exceptionHolder); @@ -387,6 +438,7 @@ public void testGet_WithDefaults() throws InterruptedException { AtomicReference modelHolder = new AtomicReference<>(); blockingCall(listener -> modelRegistry.getModel("default-sparse", listener), modelHolder, exceptionHolder); + assertNull(exceptionHolder.get()); assertEquals("default-sparse", modelHolder.get().inferenceEntityId()); assertEquals(TaskType.SPARSE_EMBEDDING, modelHolder.get().taskType()); assertReturnModelIsModifiable(modelHolder.get()); @@ -401,23 +453,32 @@ public void testGet_WithDefaults() throws InterruptedException { } public void testGetByTaskType_WithDefaults() throws Exception { - var service = "foo"; - var secret = "abc"; - - var defaultSparse = createUnparsedConfig("default-sparse", TaskType.SPARSE_EMBEDDING, service, secret); - var defaultText = createUnparsedConfig("default-text", TaskType.TEXT_EMBEDDING, service, secret); - var defaultChat = createUnparsedConfig("default-chat", TaskType.COMPLETION, service, secret); - - modelRegistry.addDefaultConfiguration(defaultSparse); - modelRegistry.addDefaultConfiguration(defaultText); - modelRegistry.addDefaultConfiguration(defaultChat); + var serviceName = "foo"; + + var defaultSparse = createModel("default-sparse", TaskType.SPARSE_EMBEDDING, serviceName); + var defaultText = createModel("default-text", TaskType.TEXT_EMBEDDING, serviceName); + var defaultChat = createModel("default-chat", TaskType.COMPLETION, serviceName); + + var service = mock(InferenceService.class); + var defaultIds = new ArrayList(); + defaultIds.add(new InferenceService.DefaultConfigId("default-sparse", TaskType.SPARSE_EMBEDDING, service)); + defaultIds.add(new InferenceService.DefaultConfigId("default-text", TaskType.TEXT_EMBEDDING, service)); + defaultIds.add(new InferenceService.DefaultConfigId("default-chat", TaskType.COMPLETION, service)); + + doAnswer(invocation -> { + @SuppressWarnings("unchecked") + var listener = (ActionListener>) invocation.getArguments()[0]; + listener.onResponse(List.of(defaultSparse, defaultChat, defaultText)); + return Void.TYPE; + }).when(service).defaultConfigs(any()); + defaultIds.forEach(modelRegistry::addDefaultIds); AtomicReference putModelHolder = new AtomicReference<>(); AtomicReference exceptionHolder = new AtomicReference<>(); - var configuredSparse = createModel("configured-sparse", TaskType.SPARSE_EMBEDDING, service); - var configuredText = createModel("configured-text", TaskType.TEXT_EMBEDDING, service); - var configuredRerank = createModel("configured-rerank", TaskType.RERANK, service); + var configuredSparse = createModel("configured-sparse", TaskType.SPARSE_EMBEDDING, serviceName); + var configuredText = createModel("configured-text", TaskType.TEXT_EMBEDDING, serviceName); + var configuredRerank = createModel("configured-rerank", TaskType.RERANK, serviceName); blockingCall(listener -> modelRegistry.storeModel(configuredSparse, listener), putModelHolder, exceptionHolder); assertThat(putModelHolder.get(), is(true)); blockingCall(listener -> modelRegistry.storeModel(configuredText, listener), putModelHolder, exceptionHolder); @@ -531,10 +592,6 @@ public static Model createModelWithSecrets(String inferenceEntityId, TaskType ta ); } - public static UnparsedModel createUnparsedConfig(String inferenceEntityId, TaskType taskType, String service, String secret) { - return new UnparsedModel(inferenceEntityId, taskType, service, Map.of("a", "b"), Map.of("secret", secret)); - } - private static class TestModelOfAnyKind extends ModelConfigurations { record TestModelServiceSettings() implements ServiceSettings { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java index d361ce0837b93..ebbf1e59e8b1f 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java @@ -212,14 +212,20 @@ public Collection createComponents(PluginServices services) { ); } - var factoryContext = new InferenceServiceExtension.InferenceServiceFactoryContext(services.client(), services.threadPool()); + var factoryContext = new InferenceServiceExtension.InferenceServiceFactoryContext( + services.client(), + services.threadPool(), + services.clusterService(), + settings + ); + // This must be done after the HttpRequestSenderFactory is created so that the services can get the // reference correctly var registry = new InferenceServiceRegistry(inferenceServices, factoryContext); registry.init(services.client()); if (DefaultElserFeatureFlag.isEnabled()) { for (var service : registry.getServices().values()) { - service.defaultConfigs().forEach(modelRegistry::addDefaultConfiguration); + service.defaultConfigIds().forEach(modelRegistry::addDefaultIds); } } inferenceServiceRegistry.set(registry); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunkingSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunkingSettings.java index 758dd5d04e268..04a07eeb984ec 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunkingSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunkingSettings.java @@ -35,7 +35,7 @@ public class SentenceBoundaryChunkingSettings implements ChunkingSettings { ChunkingSettingsOptions.SENTENCE_OVERLAP.toString() ); - private static int DEFAULT_OVERLAP = 0; + private static int DEFAULT_OVERLAP = 1; protected final int maxChunkSize; protected int sentenceOverlap = DEFAULT_OVERLAP; @@ -69,17 +69,18 @@ public static SentenceBoundaryChunkingSettings fromMap(Map map) validationException ); - Integer sentenceOverlap = ServiceUtils.extractOptionalPositiveInteger( + Integer sentenceOverlap = ServiceUtils.removeAsType( map, ChunkingSettingsOptions.SENTENCE_OVERLAP.toString(), - ModelConfigurations.CHUNKING_SETTINGS, + Integer.class, validationException ); - - if (sentenceOverlap != null && sentenceOverlap > 1) { + if (sentenceOverlap == null) { + sentenceOverlap = DEFAULT_OVERLAP; + } else if (sentenceOverlap > 1 || sentenceOverlap < 0) { validationException.addValidationError( - ChunkingSettingsOptions.SENTENCE_OVERLAP.toString() + "[" + sentenceOverlap + "] must be either 0 or 1" - ); // todo better + ChunkingSettingsOptions.SENTENCE_OVERLAP + "[" + sentenceOverlap + "] must be either 0 or 1" + ); } if (validationException.validationErrors().isEmpty() == false) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java index 62571c13aebf4..33a97f1e91621 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java @@ -23,15 +23,19 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.support.GroupedActionListener; import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.OriginSettingClient; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.reindex.DeleteByQueryAction; import org.elasticsearch.index.reindex.DeleteByQueryRequest; +import org.elasticsearch.inference.InferenceService; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.TaskType; @@ -57,6 +61,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.function.Function; @@ -87,29 +92,33 @@ public static UnparsedModel unparsedModelFromMap(ModelConfigMap modelConfigMap) private static final Logger logger = LogManager.getLogger(ModelRegistry.class); private final OriginSettingClient client; - private Map defaultConfigs; + private final List defaultConfigIds; private final Set preventDeletionLock = Collections.newSetFromMap(new ConcurrentHashMap<>()); public ModelRegistry(Client client) { this.client = new OriginSettingClient(client, ClientHelper.INFERENCE_ORIGIN); - this.defaultConfigs = new HashMap<>(); + defaultConfigIds = new ArrayList<>(); } - public void addDefaultConfiguration(UnparsedModel serviceDefaultConfig) { - if (defaultConfigs.containsKey(serviceDefaultConfig.inferenceEntityId())) { + /** + * Set the default inference ids provided by the services + * @param defaultConfigIds The defaults + */ + public void addDefaultIds(InferenceService.DefaultConfigId defaultConfigIds) { + var matched = idMatchedDefault(defaultConfigIds.inferenceId(), this.defaultConfigIds); + if (matched.isPresent()) { throw new IllegalStateException( "Cannot add default endpoint to the inference endpoint registry with duplicate inference id [" - + serviceDefaultConfig.inferenceEntityId() + + defaultConfigIds.inferenceId() + "] declared by service [" - + serviceDefaultConfig.service() + + defaultConfigIds.service().name() + "]. The inference Id is already use by [" - + defaultConfigs.get(serviceDefaultConfig.inferenceEntityId()).service() + + matched.get().service().name() + "] service." ); } - - defaultConfigs.put(serviceDefaultConfig.inferenceEntityId(), serviceDefaultConfig); + this.defaultConfigIds.add(defaultConfigIds); } /** @@ -118,15 +127,15 @@ public void addDefaultConfiguration(UnparsedModel serviceDefaultConfig) { * @param listener Model listener */ public void getModelWithSecrets(String inferenceEntityId, ActionListener listener) { - if (defaultConfigs.containsKey(inferenceEntityId)) { - listener.onResponse(deepCopyDefaultConfig(defaultConfigs.get(inferenceEntityId))); - return; - } - ActionListener searchListener = listener.delegateFailureAndWrap((delegate, searchResponse) -> { - // There should be a hit for the configurations and secrets + // There should be a hit for the configurations if (searchResponse.getHits().getHits().length == 0) { - delegate.onFailure(inferenceNotFoundException(inferenceEntityId)); + var maybeDefault = idMatchedDefault(inferenceEntityId, defaultConfigIds); + if (maybeDefault.isPresent()) { + getDefaultConfig(maybeDefault.get(), listener); + } else { + delegate.onFailure(inferenceNotFoundException(inferenceEntityId)); + } return; } @@ -149,15 +158,15 @@ public void getModelWithSecrets(String inferenceEntityId, ActionListener listener) { - if (defaultConfigs.containsKey(inferenceEntityId)) { - listener.onResponse(deepCopyDefaultConfig(defaultConfigs.get(inferenceEntityId))); - return; - } - ActionListener searchListener = listener.delegateFailureAndWrap((delegate, searchResponse) -> { - // There should be a hit for the configurations and secrets + // There should be a hit for the configurations if (searchResponse.getHits().getHits().length == 0) { - delegate.onFailure(inferenceNotFoundException(inferenceEntityId)); + var maybeDefault = idMatchedDefault(inferenceEntityId, defaultConfigIds); + if (maybeDefault.isPresent()) { + getDefaultConfig(maybeDefault.get(), listener); + } else { + delegate.onFailure(inferenceNotFoundException(inferenceEntityId)); + } return; } @@ -188,29 +197,9 @@ private ResourceNotFoundException inferenceNotFoundException(String inferenceEnt */ public void getModelsByTaskType(TaskType taskType, ActionListener> listener) { ActionListener searchListener = listener.delegateFailureAndWrap((delegate, searchResponse) -> { - var defaultConfigsForTaskType = defaultConfigs.values() - .stream() - .filter(m -> m.taskType() == taskType) - .map(ModelRegistry::deepCopyDefaultConfig) - .toList(); - - // Not an error if no models of this task_type - if (searchResponse.getHits().getHits().length == 0 && defaultConfigsForTaskType.isEmpty()) { - delegate.onResponse(List.of()); - return; - } - var modelConfigs = parseHitsAsModels(searchResponse.getHits()).stream().map(ModelRegistry::unparsedModelFromMap).toList(); - - if (defaultConfigsForTaskType.isEmpty() == false) { - var allConfigs = new ArrayList(); - allConfigs.addAll(modelConfigs); - allConfigs.addAll(defaultConfigsForTaskType); - allConfigs.sort(Comparator.comparing(UnparsedModel::inferenceEntityId)); - delegate.onResponse(allConfigs); - } else { - delegate.onResponse(modelConfigs); - } + var defaultConfigsForTaskType = taskTypeMatchedDefaults(taskType, defaultConfigIds); + addAllDefaultConfigsIfMissing(modelConfigs, defaultConfigsForTaskType, delegate); }); QueryBuilder queryBuilder = QueryBuilders.constantScoreQuery(QueryBuilders.termsQuery(TASK_TYPE_FIELD, taskType.toString())); @@ -232,19 +221,8 @@ public void getModelsByTaskType(TaskType taskType, ActionListener> listener) { ActionListener searchListener = listener.delegateFailureAndWrap((delegate, searchResponse) -> { - var defaults = defaultConfigs.values().stream().map(ModelRegistry::deepCopyDefaultConfig).toList(); - - if (searchResponse.getHits().getHits().length == 0 && defaults.isEmpty()) { - delegate.onResponse(List.of()); - return; - } - var foundConfigs = parseHitsAsModels(searchResponse.getHits()).stream().map(ModelRegistry::unparsedModelFromMap).toList(); - var allConfigs = new ArrayList(); - allConfigs.addAll(foundConfigs); - allConfigs.addAll(defaults); - allConfigs.sort(Comparator.comparing(UnparsedModel::inferenceEntityId)); - delegate.onResponse(allConfigs); + addAllDefaultConfigsIfMissing(foundConfigs, defaultConfigIds, delegate); }); // In theory the index should only contain model config documents @@ -262,6 +240,67 @@ public void getAllModels(ActionListener> listener) { client.search(modelSearch, searchListener); } + private void addAllDefaultConfigsIfMissing( + List foundConfigs, + List matchedDefaults, + ActionListener> listener + ) { + var foundIds = foundConfigs.stream().map(UnparsedModel::inferenceEntityId).collect(Collectors.toSet()); + var missing = matchedDefaults.stream().filter(d -> foundIds.contains(d.inferenceId()) == false).toList(); + + if (missing.isEmpty()) { + listener.onResponse(foundConfigs); + } else { + var groupedListener = new GroupedActionListener( + missing.size(), + listener.delegateFailure((delegate, listOfModels) -> { + var allConfigs = new ArrayList(); + allConfigs.addAll(foundConfigs); + allConfigs.addAll(listOfModels); + allConfigs.sort(Comparator.comparing(UnparsedModel::inferenceEntityId)); + delegate.onResponse(allConfigs); + }) + ); + + for (var required : missing) { + getDefaultConfig(required, groupedListener); + } + } + } + + private void getDefaultConfig(InferenceService.DefaultConfigId defaultConfig, ActionListener listener) { + defaultConfig.service().defaultConfigs(listener.delegateFailureAndWrap((delegate, models) -> { + boolean foundModel = false; + for (var m : models) { + if (m.getInferenceEntityId().equals(defaultConfig.inferenceId())) { + foundModel = true; + storeDefaultEndpoint(m, () -> listener.onResponse(modelToUnparsedModel(m))); + break; + } + } + + if (foundModel == false) { + listener.onFailure( + new IllegalStateException("Configuration not found for default inference id [" + defaultConfig.inferenceId() + "]") + ); + } + })); + } + + public void storeDefaultEndpoint(Model preconfigured, Runnable runAfter) { + var responseListener = ActionListener.wrap(success -> { + logger.debug("Added default inference endpoint [{}]", preconfigured.getInferenceEntityId()); + }, exception -> { + if (exception instanceof ResourceAlreadyExistsException) { + logger.debug("Default inference id [{}] already exists", preconfigured.getInferenceEntityId()); + } else { + logger.error("Failed to store default inference id [" + preconfigured.getInferenceEntityId() + "]", exception); + } + }); + + storeModel(preconfigured, ActionListener.runAfter(responseListener, runAfter)); + } + private ArrayList parseHitsAsModels(SearchHits hits) { var modelConfigs = new ArrayList(); for (var hit : hits) { @@ -578,60 +617,36 @@ private static IndexRequest createIndexRequest(String docId, String indexName, T } } - private QueryBuilder documentIdQuery(String inferenceEntityId) { - return QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds(Model.documentId(inferenceEntityId))); - } - - static UnparsedModel deepCopyDefaultConfig(UnparsedModel other) { - // Because the default config uses immutable maps - return new UnparsedModel( - other.inferenceEntityId(), - other.taskType(), - other.service(), - copySettingsMap(other.settings()), - copySecretsMap(other.secrets()) - ); - } - - @SuppressWarnings("unchecked") - static Map copySettingsMap(Map other) { - var result = new HashMap(); - - var serviceSettings = (Map) other.get(ModelConfigurations.SERVICE_SETTINGS); - if (serviceSettings != null) { - var copiedServiceSettings = copyMap1LevelDeep(serviceSettings); - result.put(ModelConfigurations.SERVICE_SETTINGS, copiedServiceSettings); - } + private static UnparsedModel modelToUnparsedModel(Model model) { + try (XContentBuilder builder = XContentFactory.jsonBuilder()) { + model.getConfigurations() + .toXContent(builder, new ToXContent.MapParams(Map.of(ModelConfigurations.USE_ID_FOR_INDEX, Boolean.TRUE.toString()))); - var taskSettings = (Map) other.get(ModelConfigurations.TASK_SETTINGS); - if (taskSettings != null) { - var copiedTaskSettings = copyMap1LevelDeep(taskSettings); - result.put(ModelConfigurations.TASK_SETTINGS, copiedTaskSettings); - } + var modelConfigMap = XContentHelper.convertToMap(BytesReference.bytes(builder), false, builder.contentType()).v2(); + return unparsedModelFromMap(new ModelConfigMap(modelConfigMap, new HashMap<>())); - var chunkSettings = (Map) other.get(ModelConfigurations.CHUNKING_SETTINGS); - if (chunkSettings != null) { - var copiedChunkSettings = copyMap1LevelDeep(chunkSettings); - result.put(ModelConfigurations.CHUNKING_SETTINGS, copiedChunkSettings); + } catch (IOException ex) { + throw new ElasticsearchException("[{}] Error serializing inference endpoint configuration", model.getInferenceEntityId(), ex); } + } - return result; + private QueryBuilder documentIdQuery(String inferenceEntityId) { + return QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds(Model.documentId(inferenceEntityId))); } - static Map copySecretsMap(Map other) { - return copyMap1LevelDeep(other); + static Optional idMatchedDefault( + String inferenceId, + List defaultConfigIds + ) { + return defaultConfigIds.stream().filter(defaultConfigId -> defaultConfigId.inferenceId().equals(inferenceId)).findFirst(); } - @SuppressWarnings("unchecked") - static Map copyMap1LevelDeep(Map other) { - var result = new HashMap(); - for (var entry : other.entrySet()) { - if (entry.getValue() instanceof Map) { - result.put(entry.getKey(), new HashMap<>((Map) entry.getValue())); - } else { - result.put(entry.getKey(), entry.getValue()); - } - } - return result; + static List taskTypeMatchedDefaults( + TaskType taskType, + List defaultConfigIds + ) { + return defaultConfigIds.stream() + .filter(defaultConfigId -> defaultConfigId.taskType().equals(taskType)) + .collect(Collectors.toList()); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/BaseElasticsearchInternalService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/BaseElasticsearchInternalService.java index 881e2e82b766a..43d3dff8756fa 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/BaseElasticsearchInternalService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/BaseElasticsearchInternalService.java @@ -15,6 +15,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.client.internal.OriginSettingClient; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.InferenceService; import org.elasticsearch.inference.InferenceServiceExtension; @@ -22,6 +23,7 @@ import org.elasticsearch.inference.Model; import org.elasticsearch.inference.TaskType; import org.elasticsearch.xpack.core.ClientHelper; +import org.elasticsearch.xpack.core.ml.MachineLearningField; import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction; import org.elasticsearch.xpack.core.ml.action.InferModelAction; import org.elasticsearch.xpack.core.ml.action.PutTrainedModelAction; @@ -38,7 +40,6 @@ import java.io.IOException; import java.util.EnumSet; import java.util.List; -import java.util.Set; import java.util.concurrent.ExecutorService; import java.util.function.Consumer; @@ -49,14 +50,21 @@ public abstract class BaseElasticsearchInternalService implements InferenceServi protected final OriginSettingClient client; protected final ExecutorService inferenceExecutor; - protected final Consumer>> platformArch; + protected final Consumer> preferredModelVariantFn; + private final ClusterService clusterService; + + public enum PreferredModelVariant { + LINUX_X86_OPTIMIZED, + PLATFORM_AGNOSTIC + }; private static final Logger logger = LogManager.getLogger(BaseElasticsearchInternalService.class); public BaseElasticsearchInternalService(InferenceServiceExtension.InferenceServiceFactoryContext context) { this.client = new OriginSettingClient(context.client(), ClientHelper.INFERENCE_ORIGIN); this.inferenceExecutor = context.threadPool().executor(InferencePlugin.UTILITY_THREAD_POOL_NAME); - this.platformArch = this::platformArchitecture; + this.preferredModelVariantFn = this::preferredVariantFromPlatformArchitecture; + this.clusterService = context.clusterService(); } // For testing. @@ -66,11 +74,12 @@ public BaseElasticsearchInternalService(InferenceServiceExtension.InferenceServi // service package. public BaseElasticsearchInternalService( InferenceServiceExtension.InferenceServiceFactoryContext context, - Consumer>> platformArchFn + Consumer> preferredModelVariantFn ) { this.client = new OriginSettingClient(context.client(), ClientHelper.INFERENCE_ORIGIN); this.inferenceExecutor = context.threadPool().executor(InferencePlugin.UTILITY_THREAD_POOL_NAME); - this.platformArch = platformArchFn; + this.preferredModelVariantFn = preferredModelVariantFn; + this.clusterService = context.clusterService(); } /** @@ -206,31 +215,31 @@ protected void isBuiltinModelPut(Model model, ActionListener listener) public void close() throws IOException {} public static String selectDefaultModelVariantBasedOnClusterArchitecture( - Set modelArchitectures, - String linuxX86OptimisedModel, + PreferredModelVariant preferredModelVariant, + String linuxX86OptimizedModel, String platformAgnosticModel ) { // choose a default model version based on the cluster architecture - boolean homogenous = modelArchitectures.size() == 1; - if (homogenous && modelArchitectures.iterator().next().equals("linux-x86_64")) { + if (PreferredModelVariant.LINUX_X86_OPTIMIZED.equals(preferredModelVariant)) { // Use the hardware optimized model - return linuxX86OptimisedModel; + return linuxX86OptimizedModel; } else { // default to the platform-agnostic model return platformAgnosticModel; } } - private void platformArchitecture(ActionListener> platformArchitectureListener) { + private void preferredVariantFromPlatformArchitecture(ActionListener preferredVariantListener) { // Find the cluster platform as the service may need that // information when creating the model MlPlatformArchitecturesUtil.getMlNodesArchitecturesSet( - platformArchitectureListener.delegateFailureAndWrap((delegate, architectures) -> { - if (architectures.isEmpty() && clusterIsInElasticCloud()) { - // In Elastic cloud ml nodes run on Linux x86 - delegate.onResponse(Set.of("linux-x86_64")); + preferredVariantListener.delegateFailureAndWrap((delegate, architectures) -> { + if (architectures.isEmpty() && isClusterInElasticCloud()) { + // There are no ml nodes to check the current arch. + // However, in Elastic cloud ml nodes run on Linux x86 + delegate.onResponse(PreferredModelVariant.LINUX_X86_OPTIMIZED); } else { - delegate.onResponse(architectures); + delegate.onResponse(PreferredModelVariant.PLATFORM_AGNOSTIC); } }), client, @@ -238,9 +247,11 @@ private void platformArchitecture(ActionListener> platformArchitectu ); } - static boolean clusterIsInElasticCloud() { - // use a heuristic to determine if in Elastic cloud. - return true; // TODO + boolean isClusterInElasticCloud() { + // Use the ml lazy node count as a heuristic to determine if in Elastic cloud. + // A value > 0 means scaling should be available for ml nodes + var maxMlLazyNodes = clusterService.getClusterSettings().get(MachineLearningField.MAX_LAZY_ML_NODES); + return maxMlLazyNodes > 0; } public static InferModelAction.Request buildInferenceRequest( diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java index 9a4201842873e..489f2c6706e5f 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java @@ -27,7 +27,6 @@ import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.TaskType; -import org.elasticsearch.inference.UnparsedModel; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.core.inference.ChunkingSettingsFeatureFlag; import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; @@ -35,6 +34,7 @@ import org.elasticsearch.xpack.core.inference.results.SparseEmbeddingResults; import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction; import org.elasticsearch.xpack.core.ml.action.InferModelAction; +import org.elasticsearch.xpack.core.ml.inference.assignment.AdaptiveAllocationsSettings; import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults; import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResults; import org.elasticsearch.xpack.core.ml.inference.results.TextExpansionResults; @@ -62,7 +62,6 @@ import static org.elasticsearch.xpack.inference.services.ServiceUtils.throwIfNotEmptyMap; import static org.elasticsearch.xpack.inference.services.elasticsearch.ElserModels.ELSER_V2_MODEL; import static org.elasticsearch.xpack.inference.services.elasticsearch.ElserModels.ELSER_V2_MODEL_LINUX_X86; -import static org.elasticsearch.xpack.inference.services.openai.OpenAiServiceFields.EMBEDDING_MAX_BATCH_SIZE; public class ElasticsearchInternalService extends BaseElasticsearchInternalService { @@ -89,7 +88,7 @@ public ElasticsearchInternalService(InferenceServiceExtension.InferenceServiceFa // for testing ElasticsearchInternalService( InferenceServiceExtension.InferenceServiceFactoryContext context, - Consumer>> platformArch + Consumer> platformArch ) { super(context, platformArch); } @@ -145,13 +144,13 @@ public void parseRequestConfig( "Putting elasticsearch service inference endpoints (including elser service) without a model_id field is" + " deprecated and will be removed in a future release. Please specify a model_id field." ); - platformArch.accept( + preferredModelVariantFn.accept( modelListener.delegateFailureAndWrap( - (delegate, arch) -> elserCase( + (delegate, preferredModelVariant) -> elserCase( inferenceEntityId, taskType, config, - arch, + preferredModelVariant, serviceSettingsMap, chunkingSettings, modelListener @@ -162,13 +161,13 @@ public void parseRequestConfig( throw new IllegalArgumentException("Error parsing service settings, model_id must be provided"); } } else if (MULTILINGUAL_E5_SMALL_VALID_IDS.contains(modelId)) { - platformArch.accept( + preferredModelVariantFn.accept( modelListener.delegateFailureAndWrap( - (delegate, arch) -> e5Case( + (delegate, preferredModelVariant) -> e5Case( inferenceEntityId, taskType, config, - arch, + preferredModelVariant, serviceSettingsMap, chunkingSettings, modelListener @@ -176,13 +175,13 @@ public void parseRequestConfig( ) ); } else if (ElserModels.isValidModel(modelId)) { - platformArch.accept( + preferredModelVariantFn.accept( modelListener.delegateFailureAndWrap( - (delegate, arch) -> elserCase( + (delegate, preferredModelVariant) -> elserCase( inferenceEntityId, taskType, config, - arch, + preferredModelVariant, serviceSettingsMap, chunkingSettings, modelListener @@ -286,7 +285,7 @@ private void e5Case( String inferenceEntityId, TaskType taskType, Map config, - Set platformArchitectures, + PreferredModelVariant preferredModelVariant, Map serviceSettingsMap, ChunkingSettings chunkingSettings, ActionListener modelListener @@ -296,12 +295,12 @@ private void e5Case( if (esServiceSettingsBuilder.getModelId() == null) { esServiceSettingsBuilder.setModelId( selectDefaultModelVariantBasedOnClusterArchitecture( - platformArchitectures, + preferredModelVariant, MULTILINGUAL_E5_SMALL_MODEL_ID_LINUX_X86, MULTILINGUAL_E5_SMALL_MODEL_ID ) ); - } else if (modelVariantValidForArchitecture(platformArchitectures, esServiceSettingsBuilder.getModelId()) == false) { + } else if (modelVariantValidForArchitecture(preferredModelVariant, esServiceSettingsBuilder.getModelId()) == false) { throw new IllegalArgumentException( "Error parsing request config, model id does not match any models available on this platform. Was [" + esServiceSettingsBuilder.getModelId() @@ -323,14 +322,14 @@ private void e5Case( ); } - static boolean modelVariantValidForArchitecture(Set platformArchitectures, String modelId) { + static boolean modelVariantValidForArchitecture(PreferredModelVariant modelVariant, String modelId) { if (modelId.equals(MULTILINGUAL_E5_SMALL_MODEL_ID)) { // platform agnostic model is always compatible return true; } return modelId.equals( selectDefaultModelVariantBasedOnClusterArchitecture( - platformArchitectures, + modelVariant, MULTILINGUAL_E5_SMALL_MODEL_ID_LINUX_X86, MULTILINGUAL_E5_SMALL_MODEL_ID ) @@ -341,14 +340,14 @@ private void elserCase( String inferenceEntityId, TaskType taskType, Map config, - Set platformArchitectures, + PreferredModelVariant preferredModelVariant, Map serviceSettingsMap, ChunkingSettings chunkingSettings, ActionListener modelListener ) { var esServiceSettingsBuilder = ElasticsearchInternalServiceSettings.fromRequestMap(serviceSettingsMap); final String defaultModelId = selectDefaultModelVariantBasedOnClusterArchitecture( - platformArchitectures, + preferredModelVariant, ELSER_V2_MODEL_LINUX_X86, ELSER_V2_MODEL ); @@ -383,7 +382,7 @@ private void elserCase( defaultModelId ); - if (modelVariantDoesNotMatchArchitecturesAndIsNotPlatformAgnostic(platformArchitectures, esServiceSettingsBuilder.getModelId())) { + if (modelVariantDoesNotMatchArchitecturesAndIsNotPlatformAgnostic(preferredModelVariant, esServiceSettingsBuilder.getModelId())) { throw new IllegalArgumentException( "Error parsing request config, model id does not match any models available on this platform. Was [" + esServiceSettingsBuilder.getModelId() @@ -407,12 +406,12 @@ private void elserCase( } private static boolean modelVariantDoesNotMatchArchitecturesAndIsNotPlatformAgnostic( - Set platformArchitectures, + PreferredModelVariant preferredModelVariant, String modelId ) { return modelId.equals( selectDefaultModelVariantBasedOnClusterArchitecture( - platformArchitectures, + preferredModelVariant, MULTILINGUAL_E5_SMALL_MODEL_ID_LINUX_X86, MULTILINGUAL_E5_SMALL_MODEL_ID ) @@ -793,37 +792,49 @@ private RankedDocsResults textSimilarityResultsToRankedDocs( return new RankedDocsResults(rankings); } + public List defaultConfigIds() { + return List.of(new DefaultConfigId(DEFAULT_ELSER_ID, TaskType.SPARSE_EMBEDDING, this)); + } + + /** + * Default configurations that can be out of the box without creating an endpoint first. + * @param defaultsListener Config listener + */ @Override - public List defaultConfigs() { - // TODO Chunking settings - Map elserSettings = Map.of( - ModelConfigurations.SERVICE_SETTINGS, - Map.of( - ElasticsearchInternalServiceSettings.MODEL_ID, - ElserModels.ELSER_V2_MODEL, // TODO pick model depending on platform - ElasticsearchInternalServiceSettings.NUM_THREADS, + public void defaultConfigs(ActionListener> defaultsListener) { + preferredModelVariantFn.accept(defaultsListener.delegateFailureAndWrap((delegate, preferredModelVariant) -> { + if (PreferredModelVariant.LINUX_X86_OPTIMIZED.equals(preferredModelVariant)) { + defaultsListener.onResponse(defaultConfigsLinuxOptimized()); + } else { + defaultsListener.onResponse(defaultConfigsPlatfromAgnostic()); + } + })); + } + + private List defaultConfigsLinuxOptimized() { + return defaultConfigs(true); + } + + private List defaultConfigsPlatfromAgnostic() { + return defaultConfigs(false); + } + + private List defaultConfigs(boolean useLinuxOptimizedModel) { + var defaultElser = new ElserInternalModel( + DEFAULT_ELSER_ID, + TaskType.SPARSE_EMBEDDING, + NAME, + new ElserInternalServiceSettings( + null, 1, - ElasticsearchInternalServiceSettings.ADAPTIVE_ALLOCATIONS, - Map.of( - "enabled", - Boolean.TRUE, - "min_number_of_allocations", - 1, - "max_number_of_allocations", - 8 // no max? - ) - ) + useLinuxOptimizedModel ? ELSER_V2_MODEL_LINUX_X86 : ELSER_V2_MODEL, + new AdaptiveAllocationsSettings(Boolean.TRUE, 1, 8) + ), + ElserMlNodeTaskSettings.DEFAULT, + null // default chunking settings ); - return List.of( - new UnparsedModel( - DEFAULT_ELSER_ID, - TaskType.SPARSE_EMBEDDING, - NAME, - elserSettings, - Map.of() // no secrets - ) - ); + return List.of(defaultElser); } @Override diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java index 75c370fd4d3fb..409d62426949c 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.engine.VersionConflictEngineException; +import org.elasticsearch.inference.InferenceService; import org.elasticsearch.inference.TaskType; import org.elasticsearch.inference.UnparsedModel; import org.elasticsearch.search.SearchHit; @@ -35,16 +36,16 @@ import org.junit.Before; import java.nio.ByteBuffer; +import java.util.ArrayList; import java.util.Map; import java.util.concurrent.TimeUnit; import static org.elasticsearch.core.Strings.format; +import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.not; -import static org.hamcrest.Matchers.sameInstance; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; @@ -292,58 +293,30 @@ public void testStoreModel_ThrowsException_WhenFailureIsNotAVersionConflict() { ); } - @SuppressWarnings("unchecked") - public void testDeepCopyDefaultConfig() { - { - var toCopy = new UnparsedModel("tocopy", randomFrom(TaskType.values()), "service-a", Map.of(), Map.of()); - var copied = ModelRegistry.deepCopyDefaultConfig(toCopy); - assertThat(copied, not(sameInstance(toCopy))); - assertThat(copied.taskType(), is(toCopy.taskType())); - assertThat(copied.service(), is(toCopy.service())); - assertThat(copied.secrets(), not(sameInstance(toCopy.secrets()))); - assertThat(copied.secrets(), is(toCopy.secrets())); - // Test copied is a modifiable map - copied.secrets().put("foo", "bar"); - - assertThat(copied.settings(), not(sameInstance(toCopy.settings()))); - assertThat(copied.settings(), is(toCopy.settings())); - // Test copied is a modifiable map - copied.settings().put("foo", "bar"); - } + public void testIdMatchedDefault() { + var defaultConfigIds = new ArrayList(); + defaultConfigIds.add(new InferenceService.DefaultConfigId("foo", TaskType.SPARSE_EMBEDDING, mock(InferenceService.class))); + defaultConfigIds.add(new InferenceService.DefaultConfigId("bar", TaskType.SPARSE_EMBEDDING, mock(InferenceService.class))); - { - Map secretsMap = Map.of("secret", "value"); - Map chunking = Map.of("strategy", "word"); - Map task = Map.of("user", "name"); - Map service = Map.of("num_threads", 1, "adaptive_allocations", Map.of("enabled", true)); - Map settings = Map.of("chunking_settings", chunking, "service_settings", service, "task_settings", task); - - var toCopy = new UnparsedModel("tocopy", randomFrom(TaskType.values()), "service-a", settings, secretsMap); - var copied = ModelRegistry.deepCopyDefaultConfig(toCopy); - assertThat(copied, not(sameInstance(toCopy))); - - assertThat(copied.secrets(), not(sameInstance(toCopy.secrets()))); - assertThat(copied.secrets(), is(toCopy.secrets())); - // Test copied is a modifiable map - copied.secrets().remove("secret"); - - assertThat(copied.settings(), not(sameInstance(toCopy.settings()))); - assertThat(copied.settings(), is(toCopy.settings())); - // Test copied is a modifiable map - var chunkOut = (Map) copied.settings().get("chunking_settings"); - assertThat(chunkOut, is(chunking)); - chunkOut.remove("strategy"); - - var taskOut = (Map) copied.settings().get("task_settings"); - assertThat(taskOut, is(task)); - taskOut.remove("user"); - - var serviceOut = (Map) copied.settings().get("service_settings"); - assertThat(serviceOut, is(service)); - var adaptiveOut = (Map) serviceOut.remove("adaptive_allocations"); - assertThat(adaptiveOut, is(Map.of("enabled", true))); - adaptiveOut.remove("enabled"); - } + var matched = ModelRegistry.idMatchedDefault("bar", defaultConfigIds); + assertEquals(defaultConfigIds.get(1), matched.get()); + matched = ModelRegistry.idMatchedDefault("baz", defaultConfigIds); + assertFalse(matched.isPresent()); + } + + public void testTaskTypeMatchedDefaults() { + var defaultConfigIds = new ArrayList(); + defaultConfigIds.add(new InferenceService.DefaultConfigId("s1", TaskType.SPARSE_EMBEDDING, mock(InferenceService.class))); + defaultConfigIds.add(new InferenceService.DefaultConfigId("s2", TaskType.SPARSE_EMBEDDING, mock(InferenceService.class))); + defaultConfigIds.add(new InferenceService.DefaultConfigId("d1", TaskType.TEXT_EMBEDDING, mock(InferenceService.class))); + defaultConfigIds.add(new InferenceService.DefaultConfigId("c1", TaskType.COMPLETION, mock(InferenceService.class))); + + var matched = ModelRegistry.taskTypeMatchedDefaults(TaskType.SPARSE_EMBEDDING, defaultConfigIds); + assertThat(matched, contains(defaultConfigIds.get(0), defaultConfigIds.get(1))); + matched = ModelRegistry.taskTypeMatchedDefaults(TaskType.TEXT_EMBEDDING, defaultConfigIds); + assertThat(matched, contains(defaultConfigIds.get(2))); + matched = ModelRegistry.taskTypeMatchedDefaults(TaskType.RERANK, defaultConfigIds); + assertThat(matched, empty()); } public void testDuplicateDefaultIds() { @@ -351,11 +324,15 @@ public void testDuplicateDefaultIds() { var registry = new ModelRegistry(client); var id = "my-inference"; + var mockServiceA = mock(InferenceService.class); + when(mockServiceA.name()).thenReturn("service-a"); + var mockServiceB = mock(InferenceService.class); + when(mockServiceB.name()).thenReturn("service-b"); - registry.addDefaultConfiguration(new UnparsedModel(id, randomFrom(TaskType.values()), "service-a", Map.of(), Map.of())); + registry.addDefaultIds(new InferenceService.DefaultConfigId(id, randomFrom(TaskType.values()), mockServiceA)); var ise = expectThrows( IllegalStateException.class, - () -> registry.addDefaultConfiguration(new UnparsedModel(id, randomFrom(TaskType.values()), "service-b", Map.of(), Map.of())) + () -> registry.addDefaultIds(new InferenceService.DefaultConfigId(id, randomFrom(TaskType.values()), mockServiceB)) ); assertThat( ise.getMessage(), diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java index 61645613b8722..d4462c021dcac 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java @@ -14,7 +14,9 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.logging.DeprecationLogger; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; @@ -38,6 +40,7 @@ import org.elasticsearch.xpack.core.inference.results.ErrorChunkedInferenceResults; import org.elasticsearch.xpack.core.inference.results.InferenceChunkedSparseEmbeddingResults; import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingFloatResults; +import org.elasticsearch.xpack.core.ml.MachineLearningField; import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction; import org.elasticsearch.xpack.core.ml.action.InferModelAction; import org.elasticsearch.xpack.core.ml.action.InferTrainedModelDeploymentAction; @@ -171,7 +174,7 @@ public void testParseRequestConfig_Misconfigured() { public void testParseRequestConfig_E5() { { - var service = createService(mock(Client.class), Set.of("Aarch64")); + var service = createService(mock(Client.class), BaseElasticsearchInternalService.PreferredModelVariant.PLATFORM_AGNOSTIC); var settings = new HashMap(); settings.put( ModelConfigurations.SERVICE_SETTINGS, @@ -198,7 +201,7 @@ public void testParseRequestConfig_E5() { } { - var service = createService(mock(Client.class), Set.of("linux-x86_64")); + var service = createService(mock(Client.class), BaseElasticsearchInternalService.PreferredModelVariant.LINUX_X86_OPTIMIZED); var settings = new HashMap(); settings.put( ModelConfigurations.SERVICE_SETTINGS, @@ -231,7 +234,7 @@ public void testParseRequestConfig_E5() { // Invalid service settings { - var service = createService(mock(Client.class), Set.of("Aarch64")); + var service = createService(mock(Client.class), BaseElasticsearchInternalService.PreferredModelVariant.PLATFORM_AGNOSTIC); var settings = new HashMap(); settings.put( ModelConfigurations.SERVICE_SETTINGS, @@ -267,7 +270,7 @@ public void testParseRequestConfig_E5() { } ); - var service = createService(mock(Client.class), Set.of("Aarch64")); + var service = createService(mock(Client.class), BaseElasticsearchInternalService.PreferredModelVariant.PLATFORM_AGNOSTIC); var settings = new HashMap(); settings.put( ModelConfigurations.SERVICE_SETTINGS, @@ -289,7 +292,7 @@ public void testParseRequestConfig_E5() { { assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); - var service = createService(mock(Client.class), Set.of("Aarch64")); + var service = createService(mock(Client.class), BaseElasticsearchInternalService.PreferredModelVariant.PLATFORM_AGNOSTIC); var settings = new HashMap(); settings.put( ModelConfigurations.SERVICE_SETTINGS, @@ -318,7 +321,7 @@ public void testParseRequestConfig_E5() { { assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); - var service = createService(mock(Client.class), Set.of("Aarch64")); + var service = createService(mock(Client.class), BaseElasticsearchInternalService.PreferredModelVariant.PLATFORM_AGNOSTIC); var settings = new HashMap(); settings.put( ModelConfigurations.SERVICE_SETTINGS, @@ -1492,26 +1495,33 @@ public void testParseRequestConfigEland_SetsDimensionsToOne() { public void testModelVariantDoesNotMatchArchitecturesAndIsNotPlatformAgnostic() { { - var architectures = Set.of("Aarch64"); assertFalse( - ElasticsearchInternalService.modelVariantValidForArchitecture(architectures, MULTILINGUAL_E5_SMALL_MODEL_ID_LINUX_X86) + ElasticsearchInternalService.modelVariantValidForArchitecture( + BaseElasticsearchInternalService.PreferredModelVariant.PLATFORM_AGNOSTIC, + MULTILINGUAL_E5_SMALL_MODEL_ID_LINUX_X86 + ) ); - assertTrue(ElasticsearchInternalService.modelVariantValidForArchitecture(architectures, MULTILINGUAL_E5_SMALL_MODEL_ID)); - } - { - var architectures = Set.of("linux-x86_64"); assertTrue( - ElasticsearchInternalService.modelVariantValidForArchitecture(architectures, MULTILINGUAL_E5_SMALL_MODEL_ID_LINUX_X86) + ElasticsearchInternalService.modelVariantValidForArchitecture( + BaseElasticsearchInternalService.PreferredModelVariant.PLATFORM_AGNOSTIC, + MULTILINGUAL_E5_SMALL_MODEL_ID + ) ); - assertTrue(ElasticsearchInternalService.modelVariantValidForArchitecture(architectures, MULTILINGUAL_E5_SMALL_MODEL_ID)); } { - var architectures = Set.of("linux-x86_64", "Aarch64"); - assertFalse( - ElasticsearchInternalService.modelVariantValidForArchitecture(architectures, MULTILINGUAL_E5_SMALL_MODEL_ID_LINUX_X86) + assertTrue( + ElasticsearchInternalService.modelVariantValidForArchitecture( + BaseElasticsearchInternalService.PreferredModelVariant.LINUX_X86_OPTIMIZED, + MULTILINGUAL_E5_SMALL_MODEL_ID_LINUX_X86 + ) + ); + assertTrue( + ElasticsearchInternalService.modelVariantValidForArchitecture( + BaseElasticsearchInternalService.PreferredModelVariant.LINUX_X86_OPTIMIZED, + MULTILINGUAL_E5_SMALL_MODEL_ID + ) ); - assertTrue(ElasticsearchInternalService.modelVariantValidForArchitecture(architectures, MULTILINGUAL_E5_SMALL_MODEL_ID)); } } @@ -1542,12 +1552,20 @@ public void testEmbeddingTypeFromTaskTypeAndSettings() { } private ElasticsearchInternalService createService(Client client) { - var context = new InferenceServiceExtension.InferenceServiceFactoryContext(client, threadPool); + var cs = mock(ClusterService.class); + var cSettings = new ClusterSettings(Settings.EMPTY, Set.of(MachineLearningField.MAX_LAZY_ML_NODES)); + when(cs.getClusterSettings()).thenReturn(cSettings); + var context = new InferenceServiceExtension.InferenceServiceFactoryContext(client, threadPool, cs, Settings.EMPTY); return new ElasticsearchInternalService(context); } - private ElasticsearchInternalService createService(Client client, Set architectures) { - var context = new InferenceServiceExtension.InferenceServiceFactoryContext(client, threadPool); - return new ElasticsearchInternalService(context, l -> l.onResponse(architectures)); + private ElasticsearchInternalService createService(Client client, BaseElasticsearchInternalService.PreferredModelVariant modelVariant) { + var context = new InferenceServiceExtension.InferenceServiceFactoryContext( + client, + threadPool, + mock(ClusterService.class), + Settings.EMPTY + ); + return new ElasticsearchInternalService(context, l -> l.onResponse(modelVariant)); } } diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/AutoscalingIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/AutoscalingIT.java index 13fb2f21bbf67..f6eb7206009fa 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/AutoscalingIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/AutoscalingIT.java @@ -16,6 +16,7 @@ import org.elasticsearch.xpack.autoscaling.action.PutAutoscalingPolicyAction; import org.elasticsearch.xpack.autoscaling.capacity.AutoscalingDeciderResult; import org.elasticsearch.xpack.autoscaling.capacity.AutoscalingDeciderResults; +import org.elasticsearch.xpack.core.ml.MachineLearningField; import org.elasticsearch.xpack.core.ml.action.PutTrainedModelAction; import org.elasticsearch.xpack.core.ml.action.PutTrainedModelDefinitionPartAction; import org.elasticsearch.xpack.core.ml.action.PutTrainedModelVocabularyAction; @@ -62,14 +63,14 @@ public class AutoscalingIT extends MlNativeAutodetectIntegTestCase { @Before public void putSettings() { updateClusterSettings( - Settings.builder().put(MachineLearning.MAX_LAZY_ML_NODES.getKey(), 100).put("logger.org.elasticsearch.xpack.ml", "DEBUG") + Settings.builder().put(MachineLearningField.MAX_LAZY_ML_NODES.getKey(), 100).put("logger.org.elasticsearch.xpack.ml", "DEBUG") ); } @After public void removeSettings() { updateClusterSettings( - Settings.builder().putNull(MachineLearning.MAX_LAZY_ML_NODES.getKey()).putNull("logger.org.elasticsearch.xpack.ml") + Settings.builder().putNull(MachineLearningField.MAX_LAZY_ML_NODES.getKey()).putNull("logger.org.elasticsearch.xpack.ml") ); cleanUp(); } diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/TooManyJobsIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/TooManyJobsIT.java index f6a58002bbac5..083a444fbf14c 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/TooManyJobsIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/TooManyJobsIT.java @@ -16,6 +16,7 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.persistent.PersistentTasksCustomMetadata; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.ml.MachineLearningField; import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.action.CloseJobAction; import org.elasticsearch.xpack.core.ml.action.GetJobsStatsAction; @@ -84,9 +85,9 @@ public void testLazyNodeValidation() throws Exception { logger.info("Started [{}] nodes", numNodes); ensureStableCluster(numNodes); ensureTemplatesArePresent(); - logger.info("[{}] is [{}]", MachineLearning.MAX_LAZY_ML_NODES.getKey(), maxNumberOfLazyNodes); + logger.info("[{}] is [{}]", MachineLearningField.MAX_LAZY_ML_NODES.getKey(), maxNumberOfLazyNodes); // Set our lazy node number - updateClusterSettings(Settings.builder().put(MachineLearning.MAX_LAZY_ML_NODES.getKey(), maxNumberOfLazyNodes)); + updateClusterSettings(Settings.builder().put(MachineLearningField.MAX_LAZY_ML_NODES.getKey(), maxNumberOfLazyNodes)); // create and open first job, which succeeds: Job.Builder job = createJob("lazy-node-validation-job-1", ByteSizeValue.ofMb(2)); PutJobAction.Request putJobRequest = new PutJobAction.Request(job); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java index f8a590a23a2c1..6d21654f9e161 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java @@ -649,14 +649,6 @@ public void loadExtensions(ExtensionLoader loader) { Property.NodeScope ); - public static final Setting MAX_LAZY_ML_NODES = Setting.intSetting( - "xpack.ml.max_lazy_ml_nodes", - 0, - 0, - Property.OperatorDynamic, - Property.NodeScope - ); - // Before 8.0.0 this needs to match the max allowed value for xpack.ml.max_open_jobs, // as the current node could be running in a cluster where some nodes are still using // that setting. From 8.0.0 onwards we have the flexibility to increase it... @@ -810,7 +802,7 @@ public List> getSettings() { PROCESS_CONNECT_TIMEOUT, CONCURRENT_JOB_ALLOCATIONS, MachineLearningField.MAX_MODEL_MEMORY_LIMIT, - MAX_LAZY_ML_NODES, + MachineLearningField.MAX_LAZY_ML_NODES, MAX_MACHINE_MEMORY_PERCENT, AutodetectBuilder.MAX_ANOMALY_RECORDS_SETTING_DYNAMIC, MAX_OPEN_JOBS_PER_NODE, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportMlInfoAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportMlInfoAction.java index bc017915e00aa..1edc02ff44a11 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportMlInfoAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportMlInfoAction.java @@ -22,6 +22,7 @@ import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.elasticsearch.xpack.core.ml.MachineLearningField; import org.elasticsearch.xpack.core.ml.MlMetadata; import org.elasticsearch.xpack.core.ml.action.MlInfoAction; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; @@ -162,7 +163,7 @@ private Map limits() { clusterSettings.get(MachineLearning.ALLOCATED_PROCESSORS_SCALE) ); if (totalMlProcessors.count() > 0) { - int potentialExtraProcessors = Math.max(0, clusterSettings.get(MachineLearning.MAX_LAZY_ML_NODES) - mlNodes.size()) + int potentialExtraProcessors = Math.max(0, clusterSettings.get(MachineLearningField.MAX_LAZY_ML_NODES) - mlNodes.size()) * singleNodeProcessors.roundUp(); limits.put("total_ml_processors", totalMlProcessors.roundUp() + potentialExtraProcessors); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterService.java index 96490716c5c6c..65fa47e1c510d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterService.java @@ -113,7 +113,7 @@ public TrainedModelAssignmentClusterService( this.maxMemoryPercentage = MachineLearning.MAX_MACHINE_MEMORY_PERCENT.get(settings); this.useAuto = MachineLearningField.USE_AUTO_MACHINE_MEMORY_PERCENT.get(settings); this.maxOpenJobs = MachineLearning.MAX_OPEN_JOBS_PER_NODE.get(settings); - this.maxLazyMLNodes = MachineLearning.MAX_LAZY_ML_NODES.get(settings); + this.maxLazyMLNodes = MachineLearningField.MAX_LAZY_ML_NODES.get(settings); this.maxMLNodeSize = MachineLearning.MAX_ML_NODE_SIZE.get(settings).getBytes(); this.allocatedProcessorsScale = MachineLearning.ALLOCATED_PROCESSORS_SCALE.get(settings); this.client = client; @@ -125,7 +125,7 @@ public TrainedModelAssignmentClusterService( clusterService.getClusterSettings() .addSettingsUpdateConsumer(MachineLearningField.USE_AUTO_MACHINE_MEMORY_PERCENT, this::setUseAuto); clusterService.getClusterSettings().addSettingsUpdateConsumer(MachineLearning.MAX_OPEN_JOBS_PER_NODE, this::setMaxOpenJobs); - clusterService.getClusterSettings().addSettingsUpdateConsumer(MachineLearning.MAX_LAZY_ML_NODES, this::setMaxLazyMLNodes); + clusterService.getClusterSettings().addSettingsUpdateConsumer(MachineLearningField.MAX_LAZY_ML_NODES, this::setMaxLazyMLNodes); clusterService.getClusterSettings().addSettingsUpdateConsumer(MachineLearning.MAX_ML_NODE_SIZE, this::setMaxMLNodeSize); clusterService.getClusterSettings() .addSettingsUpdateConsumer(MachineLearning.ALLOCATED_PROCESSORS_SCALE, this::setAllocatedProcessorsScale); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/task/AbstractJobPersistentTasksExecutor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/task/AbstractJobPersistentTasksExecutor.java index 32543b45259c2..7e0ff4f029bd4 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/task/AbstractJobPersistentTasksExecutor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/task/AbstractJobPersistentTasksExecutor.java @@ -24,6 +24,7 @@ import org.elasticsearch.persistent.PersistentTasksCustomMetadata; import org.elasticsearch.persistent.PersistentTasksExecutor; import org.elasticsearch.xpack.core.common.notifications.AbstractAuditor; +import org.elasticsearch.xpack.core.ml.MachineLearningField; import org.elasticsearch.xpack.core.ml.MlMetadata; import org.elasticsearch.xpack.core.ml.job.messages.Messages; import org.elasticsearch.xpack.ml.MachineLearning; @@ -103,7 +104,7 @@ protected AbstractJobPersistentTasksExecutor( this.expressionResolver = Objects.requireNonNull(expressionResolver); this.maxConcurrentJobAllocations = MachineLearning.CONCURRENT_JOB_ALLOCATIONS.get(settings); this.maxMachineMemoryPercent = MachineLearning.MAX_MACHINE_MEMORY_PERCENT.get(settings); - this.maxLazyMLNodes = MachineLearning.MAX_LAZY_ML_NODES.get(settings); + this.maxLazyMLNodes = MachineLearningField.MAX_LAZY_ML_NODES.get(settings); this.maxOpenJobs = MAX_OPEN_JOBS_PER_NODE.get(settings); this.useAutoMemoryPercentage = USE_AUTO_MACHINE_MEMORY_PERCENT.get(settings); this.maxNodeMemory = MAX_ML_NODE_SIZE.get(settings).getBytes(); @@ -111,7 +112,7 @@ protected AbstractJobPersistentTasksExecutor( .addSettingsUpdateConsumer(MachineLearning.CONCURRENT_JOB_ALLOCATIONS, this::setMaxConcurrentJobAllocations); clusterService.getClusterSettings() .addSettingsUpdateConsumer(MachineLearning.MAX_MACHINE_MEMORY_PERCENT, this::setMaxMachineMemoryPercent); - clusterService.getClusterSettings().addSettingsUpdateConsumer(MachineLearning.MAX_LAZY_ML_NODES, this::setMaxLazyMLNodes); + clusterService.getClusterSettings().addSettingsUpdateConsumer(MachineLearningField.MAX_LAZY_ML_NODES, this::setMaxLazyMLNodes); clusterService.getClusterSettings().addSettingsUpdateConsumer(MAX_OPEN_JOBS_PER_NODE, this::setMaxOpenJobs); clusterService.getClusterSettings().addSettingsUpdateConsumer(USE_AUTO_MACHINE_MEMORY_PERCENT, this::setUseAutoMemoryPercentage); clusterService.getClusterSettings().addSettingsUpdateConsumer(MAX_ML_NODE_SIZE, this::setMaxNodeSize); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/NativeMemoryCalculator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/NativeMemoryCalculator.java index 020f1aae29427..980d7d6b57481 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/NativeMemoryCalculator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/NativeMemoryCalculator.java @@ -22,10 +22,10 @@ import java.util.OptionalLong; +import static org.elasticsearch.xpack.core.ml.MachineLearningField.MAX_LAZY_ML_NODES; import static org.elasticsearch.xpack.core.ml.MachineLearningField.USE_AUTO_MACHINE_MEMORY_PERCENT; import static org.elasticsearch.xpack.ml.MachineLearning.MACHINE_MEMORY_NODE_ATTR; import static org.elasticsearch.xpack.ml.MachineLearning.MAX_JVM_SIZE_NODE_ATTR; -import static org.elasticsearch.xpack.ml.MachineLearning.MAX_LAZY_ML_NODES; import static org.elasticsearch.xpack.ml.MachineLearning.MAX_MACHINE_MEMORY_PERCENT; import static org.elasticsearch.xpack.ml.MachineLearning.MAX_ML_NODE_SIZE; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportStartDataFrameAnalyticsActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportStartDataFrameAnalyticsActionTests.java index 64d1414134f38..33fae40f80db6 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportStartDataFrameAnalyticsActionTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportStartDataFrameAnalyticsActionTests.java @@ -130,7 +130,7 @@ private static TaskExecutor createTaskExecutor() { MachineLearning.MAX_MACHINE_MEMORY_PERCENT, MachineLearningField.USE_AUTO_MACHINE_MEMORY_PERCENT, MachineLearning.MAX_ML_NODE_SIZE, - MachineLearning.MAX_LAZY_ML_NODES, + MachineLearningField.MAX_LAZY_ML_NODES, MachineLearning.MAX_OPEN_JOBS_PER_NODE ) ); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterServiceTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterServiceTests.java index 1dc44582492aa..7b5c928f1f81a 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterServiceTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterServiceTests.java @@ -127,7 +127,7 @@ public void setupObjects() throws IllegalAccessException { MachineLearning.MAX_MACHINE_MEMORY_PERCENT, MachineLearningField.USE_AUTO_MACHINE_MEMORY_PERCENT, MachineLearning.MAX_OPEN_JOBS_PER_NODE, - MachineLearning.MAX_LAZY_ML_NODES, + MachineLearningField.MAX_LAZY_ML_NODES, MachineLearning.MAX_ML_NODE_SIZE, MachineLearning.ALLOCATED_PROCESSORS_SCALE ) @@ -2079,7 +2079,7 @@ private void assertThatStoppingAssignmentPreventsMutation( private TrainedModelAssignmentClusterService createClusterService(int maxLazyNodes) { return new TrainedModelAssignmentClusterService( - Settings.builder().put(MachineLearning.MAX_LAZY_ML_NODES.getKey(), maxLazyNodes).build(), + Settings.builder().put(MachineLearningField.MAX_LAZY_ML_NODES.getKey(), maxLazyNodes).build(), clusterService, threadPool, nodeLoadDetector, diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutorTests.java index eb2e21d5fda6c..64251c05af7c8 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutorTests.java @@ -105,7 +105,7 @@ public void setUpMocks() { ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_THREAD_DUMP_TIMEOUT_SETTING, MachineLearning.CONCURRENT_JOB_ALLOCATIONS, MachineLearning.MAX_MACHINE_MEMORY_PERCENT, - MachineLearning.MAX_LAZY_ML_NODES, + MachineLearningField.MAX_LAZY_ML_NODES, MachineLearning.MAX_ML_NODE_SIZE, MachineLearning.MAX_OPEN_JOBS_PER_NODE, MachineLearningField.USE_AUTO_MACHINE_MEMORY_PERCENT @@ -155,7 +155,7 @@ public void testValidate_givenValidJob() { // An index being unavailable should take precedence over waiting for a lazy node public void testGetAssignment_GivenUnavailableIndicesWithLazyNode() { - Settings settings = Settings.builder().put(MachineLearning.MAX_LAZY_ML_NODES.getKey(), 1).build(); + Settings settings = Settings.builder().put(MachineLearningField.MAX_LAZY_ML_NODES.getKey(), 1).build(); ClusterState.Builder csBuilder = ClusterState.builder(new ClusterName("_name")); Metadata.Builder metadata = Metadata.builder(); @@ -177,7 +177,7 @@ public void testGetAssignment_GivenUnavailableIndicesWithLazyNode() { } public void testGetAssignment_GivenLazyJobAndNoGlobalLazyNodes() { - Settings settings = Settings.builder().put(MachineLearning.MAX_LAZY_ML_NODES.getKey(), 0).build(); + Settings settings = Settings.builder().put(MachineLearningField.MAX_LAZY_ML_NODES.getKey(), 0).build(); ClusterState.Builder csBuilder = ClusterState.builder(new ClusterName("_name")); Metadata.Builder metadata = Metadata.builder(); RoutingTable.Builder routingTable = RoutingTable.builder(); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/NativeMemoryCalculatorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/NativeMemoryCalculatorTests.java index 7f7c22594abb8..fdb4458a6ec3f 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/NativeMemoryCalculatorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/NativeMemoryCalculatorTests.java @@ -36,11 +36,11 @@ import java.util.Set; import java.util.function.BiConsumer; +import static org.elasticsearch.xpack.core.ml.MachineLearningField.MAX_LAZY_ML_NODES; import static org.elasticsearch.xpack.core.ml.MachineLearningField.MAX_MODEL_MEMORY_LIMIT; import static org.elasticsearch.xpack.core.ml.MachineLearningField.USE_AUTO_MACHINE_MEMORY_PERCENT; import static org.elasticsearch.xpack.ml.MachineLearning.MACHINE_MEMORY_NODE_ATTR; import static org.elasticsearch.xpack.ml.MachineLearning.MAX_JVM_SIZE_NODE_ATTR; -import static org.elasticsearch.xpack.ml.MachineLearning.MAX_LAZY_ML_NODES; import static org.elasticsearch.xpack.ml.MachineLearning.MAX_MACHINE_MEMORY_PERCENT; import static org.elasticsearch.xpack.ml.MachineLearning.MAX_ML_NODE_SIZE; import static org.elasticsearch.xpack.ml.autoscaling.MlAutoscalingDeciderServiceTests.AUTO_NODE_TIERS_NO_MONITORING; diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/inference/inference_crud.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/inference/inference_crud.yml index b1f640a40b34e..cdc69001d33ef 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/inference/inference_crud.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/inference/inference_crud.yml @@ -39,27 +39,4 @@ } - match: { error.reason: "Unknown task_type [bad]" } ---- -"Test get all": - - requires: - cluster_features: "semantic_text.default_elser_2" - reason: semantic_text default ELSER 2 inference ID introduced in 8.16.0 - - - do: - inference.get: - inference_id: "*" - - length: { endpoints: 1} - - match: { endpoints.0.inference_id: ".elser-2" } - - - do: - inference.get: - inference_id: _all - - length: { endpoints: 1} - - match: { endpoints.0.inference_id: ".elser-2" } - - - do: - inference.get: - inference_id: "" - - length: { endpoints: 1} - - match: { endpoints.0.inference_id: ".elser-2" } From 7c2e0752df1833808ed496a865f3f13b75ef150d Mon Sep 17 00:00:00 2001 From: Pat Whelan Date: Mon, 14 Oct 2024 12:45:56 -0400 Subject: [PATCH 055/449] [ML] Ignore unrecognized openai sse fields (#114715) Azure / Llama sends back fields we do not expect - rewriting the parser to better handle unknown fields (by dropping them). --- docs/changelog/114715.yaml | 5 +++ .../openai/OpenAiStreamingProcessor.java | 32 +++++++++-------- .../openai/OpenAiStreamingProcessorTests.java | 36 +++++++++++++++++++ 3 files changed, 59 insertions(+), 14 deletions(-) create mode 100644 docs/changelog/114715.yaml diff --git a/docs/changelog/114715.yaml b/docs/changelog/114715.yaml new file mode 100644 index 0000000000000..0894cb2fa42ca --- /dev/null +++ b/docs/changelog/114715.yaml @@ -0,0 +1,5 @@ +pr: 114715 +summary: Ignore unrecognized openai sse fields +area: Machine Learning +type: bug +issues: [] diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiStreamingProcessor.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiStreamingProcessor.java index 803bae40b33ed..6e006fe255956 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiStreamingProcessor.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiStreamingProcessor.java @@ -110,8 +110,6 @@ public class OpenAiStreamingProcessor extends DelegatingProcessor parse(XContentParserConf ensureExpectedToken(XContentParser.Token.START_OBJECT, currentToken, parser); currentToken = parser.nextToken(); - if (currentToken == XContentParser.Token.END_OBJECT) { - consumeUntilObjectEnd(parser); // end choices - return ""; // stopped - } - if (currentToken == XContentParser.Token.FIELD_NAME && parser.currentName().equals(CONTENT_FIELD)) { - parser.nextToken(); - } else { - positionParserAtTokenAfterField(parser, CONTENT_FIELD, FAILED_TO_FIND_FIELD_TEMPLATE); + // continue until the end of delta + while (currentToken != null && currentToken != XContentParser.Token.END_OBJECT) { + if (currentToken == XContentParser.Token.START_OBJECT || currentToken == XContentParser.Token.START_ARRAY) { + parser.skipChildren(); + } + + if (currentToken == XContentParser.Token.FIELD_NAME && parser.currentName().equals(CONTENT_FIELD)) { + parser.nextToken(); + ensureExpectedToken(XContentParser.Token.VALUE_STRING, parser.currentToken(), parser); + var content = parser.text(); + consumeUntilObjectEnd(parser); // end delta + consumeUntilObjectEnd(parser); // end choices + return content; + } + + currentToken = parser.nextToken(); } - ensureExpectedToken(XContentParser.Token.VALUE_STRING, parser.currentToken(), parser); - var content = parser.text(); - consumeUntilObjectEnd(parser); // end delta + consumeUntilObjectEnd(parser); // end choices - return content; + return ""; // stopped }).stream() .filter(Objects::nonNull) .filter(Predicate.not(String::isEmpty)) diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/openai/OpenAiStreamingProcessorTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/openai/OpenAiStreamingProcessorTests.java index a57e7c1b64c07..90d0e8742f733 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/openai/OpenAiStreamingProcessorTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/openai/OpenAiStreamingProcessorTests.java @@ -149,6 +149,42 @@ public void testDoneMessageIsIgnored() throws Exception { verify(downstream, times(0)).onNext(any()); } + public void testInitialLlamaResponseIsIgnored() throws Exception { + var item = new ArrayDeque(); + item.offer(new ServerSentEvent(ServerSentEventField.DATA, """ + { + "id":"12345", + "object":"chat.completion.chunk", + "created":123456789, + "model":"Llama-2-7b-chat", + "system_fingerprint": "123456789", + "choices":[ + { + "index":0, + "delta":{ + "role":"assistant" + }, + "logprobs":null, + "finish_reason":null + } + ] + } + """)); + + var processor = new OpenAiStreamingProcessor(); + + Flow.Subscriber downstream = mock(); + processor.subscribe(downstream); + + Flow.Subscription upstream = mock(); + processor.onSubscribe(upstream); + + processor.next(item); + + verify(upstream, times(1)).request(1); + verify(downstream, times(0)).onNext(any()); + } + private String toJsonString(ChunkedToXContent chunkedToXContent) throws IOException { try (var builder = XContentFactory.jsonBuilder()) { chunkedToXContent.toXContentChunked(EMPTY_PARAMS).forEachRemaining(xContent -> { From 150058ef27df3da931b31e636bd8b335eacb1e66 Mon Sep 17 00:00:00 2001 From: Pat Whelan Date: Mon, 14 Oct 2024 12:49:51 -0400 Subject: [PATCH 056/449] [ML] Send mid-stream errors to users (#114549) If apache sends an error mid stream, forward it to the user rather than the now-ignored listener. --- docs/changelog/114549.yaml | 5 + .../inference/external/http/HttpClient.java | 59 ++++------- .../http/StreamingHttpResultPublisher.java | 12 ++- .../StreamingHttpResultPublisherTests.java | 100 +++++++++++++----- 4 files changed, 112 insertions(+), 64 deletions(-) create mode 100644 docs/changelog/114549.yaml diff --git a/docs/changelog/114549.yaml b/docs/changelog/114549.yaml new file mode 100644 index 0000000000000..a6bdbba93876b --- /dev/null +++ b/docs/changelog/114549.yaml @@ -0,0 +1,5 @@ +pr: 114549 +summary: Send mid-stream errors to users +area: Machine Learning +type: bug +issues: [] diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpClient.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpClient.java index 6b04b66cb7c11..f0102d01b37a1 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpClient.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpClient.java @@ -153,44 +153,31 @@ public void stream(HttpRequest request, HttpContext context, ActionListener client.execute( - request.requestProducer(), - new StreamingHttpResultPublisher(threadPool, settings, callOnceListener), - context, - new FutureCallback<>() { - @Override - public void completed(HttpResponse response) { - // StreamingHttpResultPublisher will publish results to the Flow.Publisher returned in the ActionListener - } - - @Override - public void failed(Exception ex) { - throttlerManager.warn( - logger, - format("Request from inference entity id [%s] failed", request.inferenceEntityId()), - ex - ); - failUsingUtilityThread(ex, callOnceListener); - } - - @Override - public void cancelled() { - failUsingUtilityThread( + var streamingProcessor = new StreamingHttpResultPublisher(threadPool, settings, listener); + + SocketAccess.doPrivileged(() -> client.execute(request.requestProducer(), streamingProcessor, context, new FutureCallback<>() { + @Override + public void completed(HttpResponse response) { + streamingProcessor.close(); + } + + @Override + public void failed(Exception ex) { + threadPool.executor(UTILITY_THREAD_POOL_NAME).execute(() -> streamingProcessor.failed(ex)); + } + + @Override + public void cancelled() { + threadPool.executor(UTILITY_THREAD_POOL_NAME) + .execute( + () -> streamingProcessor.failed( new CancellationException( format("Request from inference entity id [%s] was cancelled", request.inferenceEntityId()) - ), - callOnceListener - ); - } - } - ) - ); + ) + ) + ); + } + })); } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/StreamingHttpResultPublisher.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/StreamingHttpResultPublisher.java index 3fd5d02ef4679..bf74ca86a969a 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/StreamingHttpResultPublisher.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/StreamingHttpResultPublisher.java @@ -65,7 +65,7 @@ class StreamingHttpResultPublisher implements HttpAsyncResponseConsumer> listener) { this.settings = Objects.requireNonNull(settings); - this.listener = Objects.requireNonNull(listener); + this.listener = ActionListener.notifyOnce(Objects.requireNonNull(listener)); this.taskRunner = new RequestBasedTaskRunner(new OffloadThread(), threadPool, UTILITY_THREAD_POOL_NAME); } @@ -152,9 +152,13 @@ public void responseCompleted(HttpContext httpContext) {} @Override public void failed(Exception e) { if (this.isDone.compareAndSet(false, true)) { - ex = e; - queue.offer(() -> subscriber.onError(e)); - taskRunner.requestNextRun(); + if (listenerCalled.compareAndSet(false, true)) { + listener.onFailure(e); + } else { + ex = e; + queue.offer(() -> subscriber.onError(e)); + taskRunner.requestNextRun(); + } } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/StreamingHttpResultPublisherTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/StreamingHttpResultPublisherTests.java index be47d8806aade..a400b67b3761f 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/StreamingHttpResultPublisherTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/StreamingHttpResultPublisherTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.junit.Before; +import org.mockito.ArgumentCaptor; import java.io.IOException; import java.nio.ByteBuffer; @@ -38,6 +39,7 @@ import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; @@ -59,7 +61,7 @@ public void setUp() throws Exception { super.setUp(); threadPool = mock(ThreadPool.class); settings = mock(HttpSettings.class); - listener = ActionListener.noop(); + listener = spy(ActionListener.noop()); when(threadPool.executor(UTILITY_THREAD_POOL_NAME)).thenReturn(EsExecutors.DIRECT_EXECUTOR_SERVICE); when(settings.getMaxResponseSize()).thenReturn(ByteSizeValue.ofBytes(maxBytes)); @@ -235,33 +237,13 @@ public void testTotalBytesDecrement() throws IOException { } /** - * Given an error from Apache - * When the subscriber requests the next set of data - * Then the subscriber receives the error from Apache + * When there is an error from Apache before the publisher invokes the listener + * Then the publisher will forward the call to the listener's onFailure */ public void testErrorBeforeRequest() { - var subscriber = subscribe(); var exception = new NullPointerException("test"); - publisher.failed(exception); - assertThat("subscriber receives exception on next request", subscriber.throwable, nullValue()); - - subscriber.requestData(); - assertThat("subscriber receives exception", subscriber.throwable, is(exception)); - } - - /** - * Given the subscriber is waiting for data - * When Apache sends an error - * Then the subscriber immediately receives the error - */ - public void testErrorAfterRequest() { - var subscriber = subscribe(); - var exception = new NullPointerException("test"); - - subscriber.requestData(); - publisher.failed(exception); - assertThat("subscriber receives exception", subscriber.throwable, is(exception)); + verify(listener).onFailure(exception); } /** @@ -375,6 +357,76 @@ public void testCancelAfterRequest() { assertTrue("onComplete should be called", subscriber.completed); } + /** + * When cancel is called + * Then we only send onComplete once + */ + public void testCancelIsIdempotent() throws IOException { + Flow.Subscriber subscriber = mock(); + + var subscription = ArgumentCaptor.forClass(Flow.Subscription.class); + publisher.subscribe(subscriber); + verify(subscriber).onSubscribe(subscription.capture()); + + publisher.responseReceived(mock()); + publisher.consumeContent(contentDecoder(message), mock(IOControl.class)); + subscription.getValue().request(1); + + subscription.getValue().request(1); + publisher.cancel(); + verify(subscriber, times(1)).onComplete(); + subscription.getValue().request(1); + publisher.cancel(); + verify(subscriber, times(1)).onComplete(); + } + + /** + * When close is called + * Then we only send onComplete once + */ + public void testCloseIsIdempotent() throws IOException { + Flow.Subscriber subscriber = mock(); + + var subscription = ArgumentCaptor.forClass(Flow.Subscription.class); + publisher.subscribe(subscriber); + verify(subscriber).onSubscribe(subscription.capture()); + + publisher.responseReceived(mock()); + publisher.consumeContent(contentDecoder(message), mock(IOControl.class)); + subscription.getValue().request(1); + + subscription.getValue().request(1); + publisher.close(); + verify(subscriber, times(1)).onComplete(); + subscription.getValue().request(1); + publisher.close(); + verify(subscriber, times(1)).onComplete(); + } + + /** + * When failed is called + * Then we only send onError once + */ + public void testFailedIsIdempotent() throws IOException { + var expectedException = new IllegalStateException("wow"); + Flow.Subscriber subscriber = mock(); + + var subscription = ArgumentCaptor.forClass(Flow.Subscription.class); + publisher.subscribe(subscriber); + verify(subscriber).onSubscribe(subscription.capture()); + + publisher.responseReceived(mock()); + publisher.consumeContent(contentDecoder(message), mock(IOControl.class)); + subscription.getValue().request(1); + + subscription.getValue().request(1); + publisher.failed(expectedException); + verify(subscriber, times(1)).onError(eq(expectedException)); + subscription.getValue().request(1); + publisher.failed(expectedException); + verify(subscriber, times(1)).onError(eq(expectedException)); + } + /** * Given the queue is being processed * When Apache cancels the publisher From 69b4a9f8ff634db52b7768bde25352463f34359e Mon Sep 17 00:00:00 2001 From: Kathleen DeRusso Date: Mon, 14 Oct 2024 12:55:11 -0400 Subject: [PATCH 057/449] Add a query rules tester API call (#114168) * Add a query rules tester API call * Update docs/changelog/114168.yaml * Wrap client call in async with origin * Remove unused param * PR feedback * Remove redundant test * CI workaround - add ent-search as ml dependency so it can find node features --- docs/changelog/114168.yaml | 5 + .../reference/query-rules/apis/index.asciidoc | 2 + .../apis/test-query-ruleset.asciidoc | 133 +++++++++ .../rest-api-spec/api/query_rules.test.json | 38 +++ .../org/elasticsearch/TransportVersions.java | 1 + .../entsearch/rules/70_query_rule_test.yml | 252 ++++++++++++++++++ .../xpack/application/EnterpriseSearch.java | 7 +- .../application/EnterpriseSearchFeatures.java | 9 + .../xpack/application/rules/QueryRule.java | 13 +- .../rules/action/GetQueryRulesetAction.java | 3 +- .../action/RestTestQueryRulesetAction.java | 53 ++++ .../rules/action/TestQueryRulesetAction.java | 212 +++++++++++++++ .../TransportTestQueryRulesetAction.java | 64 +++++ .../EnterpriseSearchModuleTestUtils.java | 4 + .../RestTestQueryRulesetActionTests.java | 53 ++++ ...lesetActionRequestBWCSerializingTests.java | 56 ++++ ...esetActionResponseBWCSerializingTests.java | 52 ++++ .../qa/native-multi-node-tests/build.gradle | 1 + .../xpack/security/operator/Constants.java | 1 + 19 files changed, 951 insertions(+), 8 deletions(-) create mode 100644 docs/changelog/114168.yaml create mode 100644 docs/reference/query-rules/apis/test-query-ruleset.asciidoc create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/query_rules.test.json create mode 100644 x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/70_query_rule_test.yml create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/RestTestQueryRulesetAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/TestQueryRulesetAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/TransportTestQueryRulesetAction.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/RestTestQueryRulesetActionTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/TestQueryRulesetActionRequestBWCSerializingTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/TestQueryRulesetActionResponseBWCSerializingTests.java diff --git a/docs/changelog/114168.yaml b/docs/changelog/114168.yaml new file mode 100644 index 0000000000000..58f1ab7110e7d --- /dev/null +++ b/docs/changelog/114168.yaml @@ -0,0 +1,5 @@ +pr: 114168 +summary: Add a query rules tester API call +area: Relevance +type: enhancement +issues: [] diff --git a/docs/reference/query-rules/apis/index.asciidoc b/docs/reference/query-rules/apis/index.asciidoc index 53d5fc3dc4eee..fbeb477acacb5 100644 --- a/docs/reference/query-rules/apis/index.asciidoc +++ b/docs/reference/query-rules/apis/index.asciidoc @@ -23,6 +23,7 @@ Use the following APIs to manage query rulesets: * <> * <> * <> +* preview:[] <> include::put-query-ruleset.asciidoc[] include::get-query-ruleset.asciidoc[] @@ -31,4 +32,5 @@ include::delete-query-ruleset.asciidoc[] include::put-query-rule.asciidoc[] include::get-query-rule.asciidoc[] include::delete-query-rule.asciidoc[] +include::test-query-ruleset.asciidoc[] diff --git a/docs/reference/query-rules/apis/test-query-ruleset.asciidoc b/docs/reference/query-rules/apis/test-query-ruleset.asciidoc new file mode 100644 index 0000000000000..4a670645cea6e --- /dev/null +++ b/docs/reference/query-rules/apis/test-query-ruleset.asciidoc @@ -0,0 +1,133 @@ +[role="xpack"] +[[test-query-ruleset]] +=== Test query ruleset + +++++ +Tests query ruleset +++++ + +Evaluates match criteria against a query ruleset to identify the rules that would match that criteria. + +preview::[] + +[[test-query-ruleset-request]] +==== {api-request-title} + +`POST _query_rules//_test` + +[[test-query-ruleset-prereq]] +==== {api-prereq-title} + +Requires the `manage_search_query_rules` privilege. + +[[test-query-ruleset-path-params]] +==== {api-path-parms-title} + +``:: +(Required, string) + +[[test-query-rule-request-body]] +==== {api-request-body-title} + +`match_criteria`:: +(Required, object) Defines the match criteria to apply to rules in the given query ruleset. +Match criteria should match the keys defined in the `criteria.metadata` field of the rule. + +[[test-query-ruleset-response-codes]] +==== {api-response-codes-title} + +`400`:: +The `ruleset_id` or `match_criteria` were not provided. + +`404` (Missing resources):: +No query ruleset matching `ruleset_id` could be found. + +[[test-query-ruleset-example]] +==== {api-examples-title} + +To test a ruleset, provide the match criteria that you want to test against: + +//// + +[source,console] +-------------------------------------------------- +PUT _query_rules/my-ruleset +{ + "rules": [ + { + "rule_id": "my-rule1", + "type": "pinned", + "criteria": [ + { + "type": "contains", + "metadata": "query_string", + "values": [ "pugs", "puggles" ] + } + ], + "actions": { + "ids": [ + "id1", + "id2" + ] + } + }, + { + "rule_id": "my-rule2", + "type": "pinned", + "criteria": [ + { + "type": "fuzzy", + "metadata": "query_string", + "values": [ "rescue dogs" ] + } + ], + "actions": { + "docs": [ + { + "_index": "index1", + "_id": "id3" + }, + { + "_index": "index2", + "_id": "id4" + } + ] + } + } + ] +} +-------------------------------------------------- +// TESTSETUP + +[source,console] +-------------------------------------------------- +DELETE _query_rules/my-ruleset +-------------------------------------------------- +// TEARDOWN + +//// + +[source,console] +---- +POST _query_rules/my-ruleset/_test +{ + "match_criteria": { + "query_string": "puggles" + } +} +---- + +A sample response: + +[source,console-result] +---- +{ + "total_matched_rules": 1, + "matched_rules": [ + { + "ruleset_id": "my-ruleset", + "rule_id": "my-rule1" + } + ] +} +---- diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/query_rules.test.json b/rest-api-spec/src/main/resources/rest-api-spec/api/query_rules.test.json new file mode 100644 index 0000000000000..c82b45771ac7f --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/query_rules.test.json @@ -0,0 +1,38 @@ +{ + "query_rules.test": { + "documentation": { + "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/test-query-ruleset.html", + "description": "Tests a query ruleset to identify the rules that would match input criteria" + }, + "stability": "experimental", + "visibility": "public", + "headers": { + "accept": [ + "application/json" + ], + "content_type": [ + "application/json" + ] + }, + "url": { + "paths": [ + { + "path": "/_query_rules/{ruleset_id}/_test", + "methods": [ + "POST" + ], + "parts": { + "ruleset_id": { + "type": "string", + "description": "The unique identifier of the ruleset to test." + } + } + } + ] + }, + "body": { + "description": "The match criteria to test against the ruleset", + "required": true + } + } +} diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 03186e63240e5..ab4321edd3f71 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -242,6 +242,7 @@ static TransportVersion def(int id) { public static final TransportVersion ESQL_CACHED_STRING_SERIALIZATION = def(8_766_00_0); public static final TransportVersion CHUNK_SENTENCE_OVERLAP_SETTING_ADDED = def(8_767_00_0); public static final TransportVersion OPT_IN_ESQL_CCS_EXECUTION_INFO = def(8_768_00_0); + public static final TransportVersion QUERY_RULE_TEST_API = def(8_769_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/70_query_rule_test.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/70_query_rule_test.yml new file mode 100644 index 0000000000000..016d9f10fe77f --- /dev/null +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/70_query_rule_test.yml @@ -0,0 +1,252 @@ +setup: + - requires: + cluster_features: [ "query_rules.test" ] + reason: Introduced in 8.16.0 + + - do: + query_rules.put_ruleset: + ruleset_id: test-ruleset + body: + rules: + - rule_id: rule1 + type: pinned + criteria: + - type: exact + metadata: query_string + values: [ search ] + actions: + ids: + - 'doc1' + - rule_id: rule2 + type: pinned + criteria: + - type: exact + metadata: query_string + values: [ ui ] + actions: + docs: + - '_index': 'test-index1' + '_id': 'doc2' + - rule_id: rule3 + type: pinned + criteria: + - type: contains + metadata: query_string + values: [ kibana, logstash ] + actions: + ids: + - 'doc2' + - 'doc3' + - rule_id: rule4 + type: pinned + criteria: + - type: exact + metadata: query_string + values: [ ops ] + actions: + ids: + - 'doc7' + - rule_id: rule5 + type: exclude + criteria: + - type: exact + metadata: query_string + values: [ search ] + actions: + ids: + - 'doc8' + +--- +teardown: + - do: + query_rules.delete_ruleset: + ruleset_id: test-ruleset + ignore: 404 + + - do: + query_rules.delete_ruleset: + ruleset_id: combined-ruleset + ignore: 404 + + - do: + query_rules.delete_ruleset: + ruleset_id: double-jeopardy-ruleset + ignore: 404 + +--- +"Test query rules, specifying a ruleset that does not exist": + - do: + catch: /resource_not_found_exception/ + query_rules.test: + ruleset_id: nonexistent-ruleset + body: + match_criteria: + foo: bar + + +--- +"Test query rules with an empty body": + - do: + catch: bad_request + query_rules.test: + ruleset_id: nonexistent-ruleset + body: { } + +--- +"Test query rules with an ID match": + + - do: + query_rules.test: + ruleset_id: test-ruleset + body: + match_criteria: + query_string: search + + - match: { total_matched_rules: 2 } + - match: { matched_rules.0.rule_id: 'rule1' } + - match: { matched_rules.1.rule_id: 'rule5' } + +--- +"As a user, test query rules with an ID match": + - skip: + features: headers + + - do: + catch: forbidden + headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user + query_rules.test: + ruleset_id: test-ruleset + body: + match_criteria: + query_string: search + +--- +"Test query rules with a doc match": + + - do: + query_rules.test: + ruleset_id: test-ruleset + body: + match_criteria: + query_string: ui + + - match: { total_matched_rules: 1 } + - match: { matched_rules.0.rule_id: 'rule2' } + +--- +"As a user, test query rules with a doc match": + - skip: + features: headers + + - do: + catch: forbidden + headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user + query_rules.test: + ruleset_id: test-ruleset + body: + match_criteria: + query_string: ui + +--- +"Test query rules with no matching rules": + + - do: + query_rules.test: + ruleset_id: test-ruleset + body: + match_criteria: + query_string: no-match + + - match: { total_matched_rules: 0 } + +--- +"Test rules where the same ID is both pinned and excluded": + - do: + query_rules.put_ruleset: + ruleset_id: double-jeopardy-ruleset + body: + rules: + - rule_id: rule1 + type: pinned + criteria: + - type: exact + metadata: foo + values: [ bar ] + actions: + ids: + - 'doc8' + - rule_id: rule2 + type: exclude + criteria: + - type: exact + metadata: foo + values: [ bar ] + actions: + ids: + - 'doc8' + + - do: + query_rules.test: + ruleset_id: double-jeopardy-ruleset + body: + match_criteria: + foo: bar + + - match: { total_matched_rules: 2 } + - match: { matched_rules.0.rule_id: 'rule1' } + - match: { matched_rules.1.rule_id: 'rule2' } + +--- +"Perform a rule query over a ruleset with combined numeric and text rule matching": + + - do: + query_rules.put_ruleset: + ruleset_id: combined-ruleset + body: + rules: + - rule_id: rule1 + type: pinned + criteria: + - type: exact + metadata: foo + values: [ bar ] + actions: + ids: + - 'doc1' + - rule_id: rule2 + type: pinned + criteria: + - type: lte + metadata: foo + values: [ 100 ] + actions: + ids: + - 'doc2' + - do: + query_rules.test: + ruleset_id: combined-ruleset + body: + match_criteria: + foo: 100 + + - match: { total_matched_rules: 1 } + - match: { matched_rules.0.rule_id: 'rule2' } + + - do: + query_rules.test: + ruleset_id: combined-ruleset + body: + match_criteria: + foo: bar + + - match: { total_matched_rules: 1 } + - match: { matched_rules.0.rule_id: 'rule1' } + + - do: + query_rules.test: + ruleset_id: combined-ruleset + body: + match_criteria: + foo: baz + + - match: { total_matched_rules: 0 } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java index bdd4cae3dda81..d5aef3b8808e8 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java @@ -165,6 +165,8 @@ import org.elasticsearch.xpack.application.rules.action.RestListQueryRulesetsAction; import org.elasticsearch.xpack.application.rules.action.RestPutQueryRuleAction; import org.elasticsearch.xpack.application.rules.action.RestPutQueryRulesetAction; +import org.elasticsearch.xpack.application.rules.action.RestTestQueryRulesetAction; +import org.elasticsearch.xpack.application.rules.action.TestQueryRulesetAction; import org.elasticsearch.xpack.application.rules.action.TransportDeleteQueryRuleAction; import org.elasticsearch.xpack.application.rules.action.TransportDeleteQueryRulesetAction; import org.elasticsearch.xpack.application.rules.action.TransportGetQueryRuleAction; @@ -172,6 +174,7 @@ import org.elasticsearch.xpack.application.rules.action.TransportListQueryRulesetsAction; import org.elasticsearch.xpack.application.rules.action.TransportPutQueryRuleAction; import org.elasticsearch.xpack.application.rules.action.TransportPutQueryRulesetAction; +import org.elasticsearch.xpack.application.rules.action.TransportTestQueryRulesetAction; import org.elasticsearch.xpack.application.search.SearchApplicationIndexService; import org.elasticsearch.xpack.application.search.action.DeleteSearchApplicationAction; import org.elasticsearch.xpack.application.search.action.GetSearchApplicationAction; @@ -266,6 +269,7 @@ protected XPackLicenseState getLicenseState() { new ActionHandler<>(DeleteQueryRuleAction.INSTANCE, TransportDeleteQueryRuleAction.class), new ActionHandler<>(GetQueryRuleAction.INSTANCE, TransportGetQueryRuleAction.class), new ActionHandler<>(PutQueryRuleAction.INSTANCE, TransportPutQueryRuleAction.class), + new ActionHandler<>(TestQueryRulesetAction.INSTANCE, TransportTestQueryRulesetAction.class), usageAction, infoAction @@ -373,7 +377,8 @@ public List getRestHandlers( new RestPutQueryRulesetAction(getLicenseState()), new RestDeleteQueryRuleAction(getLicenseState()), new RestGetQueryRuleAction(getLicenseState()), - new RestPutQueryRuleAction(getLicenseState()) + new RestPutQueryRuleAction(getLicenseState()), + new RestTestQueryRulesetAction(getLicenseState()) ) ); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearchFeatures.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearchFeatures.java index 81e072479d402..174bcbe886dfb 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearchFeatures.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearchFeatures.java @@ -14,8 +14,17 @@ import org.elasticsearch.xpack.application.connector.ConnectorTemplateRegistry; import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.xpack.application.rules.action.TestQueryRulesetAction.QUERY_RULES_TEST_API; public class EnterpriseSearchFeatures implements FeatureSpecification { + + @Override + public Set getFeatures() { + return Set.of(QUERY_RULES_TEST_API); + } + @Override public Map getHistoricalFeatures() { return Map.of( diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRule.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRule.java index 0ecb35531ac09..c14bb8e9a4ec9 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRule.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRule.java @@ -331,12 +331,8 @@ public AppliedQueryRules applyRule(AppliedQueryRules appliedRules, Map identifyMatchingDocs(Map matchCriteria) { - List matchingDocs = new ArrayList<>(); + public boolean isRuleMatch(Map matchCriteria) { Boolean isRuleMatch = null; - - // All specified criteria in a rule must match for the rule to be applied for (QueryRuleCriteria criterion : criteria) { for (String match : matchCriteria.keySet()) { final Object matchValue = matchCriteria.get(match); @@ -349,8 +345,13 @@ private List identifyMatchingDocs(Map matchCr } } } + return isRuleMatch != null && isRuleMatch; + } - if (isRuleMatch != null && isRuleMatch) { + @SuppressWarnings("unchecked") + private List identifyMatchingDocs(Map matchCriteria) { + List matchingDocs = new ArrayList<>(); + if (isRuleMatch(matchCriteria)) { if (actions.containsKey(IDS_FIELD.getPreferredName())) { matchingDocs.addAll( ((List) actions.get(IDS_FIELD.getPreferredName())).stream().map(id -> new SpecifiedDocument(null, id)).toList() diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/GetQueryRulesetAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/GetQueryRulesetAction.java index f7e6f166cf53f..1d5ba878264f7 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/GetQueryRulesetAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/GetQueryRulesetAction.java @@ -31,7 +31,8 @@ public class GetQueryRulesetAction { - public static final String NAME = "cluster:admin/xpack/query_rules/get"; + public static final ActionType TYPE = new ActionType<>("cluster:admin/xpack/query_rules/get"); + public static final String NAME = TYPE.name(); public static final ActionType INSTANCE = new ActionType<>(NAME); private GetQueryRulesetAction() {/* no instances */} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/RestTestQueryRulesetAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/RestTestQueryRulesetAction.java new file mode 100644 index 0000000000000..b6e02b3c37262 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/RestTestQueryRulesetAction.java @@ -0,0 +1,53 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.rules.action; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.Scope; +import org.elasticsearch.rest.ServerlessScope; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.application.EnterpriseSearch; +import org.elasticsearch.xpack.application.EnterpriseSearchBaseRestHandler; +import org.elasticsearch.xpack.application.utils.LicenseUtils; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.rest.RestRequest.Method.POST; + +@ServerlessScope(Scope.PUBLIC) +public class RestTestQueryRulesetAction extends EnterpriseSearchBaseRestHandler { + public RestTestQueryRulesetAction(XPackLicenseState licenseState) { + super(licenseState, LicenseUtils.Product.QUERY_RULES); + } + + @Override + public String getName() { + return "query_ruleset_test_action"; + } + + @Override + public List routes() { + return List.of(new Route(POST, "/" + EnterpriseSearch.QUERY_RULES_API_ENDPOINT + "/{ruleset_id}" + "/_test")); + } + + @Override + protected RestChannelConsumer innerPrepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + final String rulesetId = restRequest.param("ruleset_id"); + TestQueryRulesetAction.Request request = null; + if (restRequest.hasContent()) { + try (var parser = restRequest.contentParser()) { + request = TestQueryRulesetAction.Request.parse(parser, rulesetId); + } + } + final TestQueryRulesetAction.Request finalRequest = request; + return channel -> client.execute(TestQueryRulesetAction.INSTANCE, finalRequest, new RestToXContentListener<>(channel)); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/TestQueryRulesetAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/TestQueryRulesetAction.java new file mode 100644 index 0000000000000..28f4a3b38dd59 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/TestQueryRulesetAction.java @@ -0,0 +1,212 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.rules.action; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.features.NodeFeature; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.application.rules.QueryRulesIndexService; + +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.action.ValidateActions.addValidationError; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; + +public class TestQueryRulesetAction { + + public static final NodeFeature QUERY_RULES_TEST_API = new NodeFeature("query_rules.test"); + + // TODO - We'd like to transition this to require less stringent permissions + public static final ActionType TYPE = new ActionType<>("cluster:admin/xpack/query_rules/test"); + + public static final String NAME = TYPE.name(); + public static final ActionType INSTANCE = new ActionType<>(NAME); + + private TestQueryRulesetAction() {/* no instances */} + + public static class Request extends ActionRequest implements ToXContentObject, IndicesRequest { + private final String rulesetId; + private final Map matchCriteria; + + private static final ParseField RULESET_ID_FIELD = new ParseField("ruleset_id"); + private static final ParseField MATCH_CRITERIA_FIELD = new ParseField("match_criteria"); + + public Request(StreamInput in) throws IOException { + super(in); + this.rulesetId = in.readString(); + this.matchCriteria = in.readGenericMap(); + } + + public Request(String rulesetId, Map matchCriteria) { + this.rulesetId = rulesetId; + this.matchCriteria = matchCriteria; + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + + if (Strings.isNullOrEmpty(rulesetId)) { + validationException = addValidationError("ruleset_id missing", validationException); + } + + return validationException; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(rulesetId); + out.writeGenericMap(matchCriteria); + } + + public String rulesetId() { + return rulesetId; + } + + public Map matchCriteria() { + return matchCriteria; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return Objects.equals(rulesetId, request.rulesetId) && Objects.equals(matchCriteria, request.matchCriteria); + } + + @Override + public int hashCode() { + return Objects.hash(rulesetId, matchCriteria); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(RULESET_ID_FIELD.getPreferredName(), rulesetId); + builder.startObject(MATCH_CRITERIA_FIELD.getPreferredName()); + builder.mapContents(matchCriteria); + builder.endObject(); + builder.endObject(); + return builder; + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "test_query_ruleset_request", + false, + (p, name) -> { + @SuppressWarnings("unchecked") + Map matchCriteria = (Map) p[0]; + return new Request(name, matchCriteria); + } + + ); + static { + PARSER.declareObject(constructorArg(), (p, c) -> p.map(), MATCH_CRITERIA_FIELD); + PARSER.declareString(optionalConstructorArg(), RULESET_ID_FIELD); // Required for parsing + } + + public static Request parse(XContentParser parser, String name) { + return PARSER.apply(parser, name); + } + + @Override + public String[] indices() { + return new String[] { QueryRulesIndexService.QUERY_RULES_ALIAS_NAME }; + } + + @Override + public IndicesOptions indicesOptions() { + return IndicesOptions.lenientExpandHidden(); + } + + } + + public static class Response extends ActionResponse implements ToXContentObject { + + private final int totalMatchedRules; + private final List matchedRules; + + private static final ParseField TOTAL_MATCHED_RULES_FIELD = new ParseField("total_matched_rules"); + private static final ParseField MATCHED_RULES_FIELD = new ParseField("matched_rules"); + + public Response(StreamInput in) throws IOException { + super(in); + this.totalMatchedRules = in.readVInt(); + this.matchedRules = in.readCollectionAsList(MatchedRule::new); + } + + public Response(int totalMatchedRules, List matchedRules) { + this.totalMatchedRules = totalMatchedRules; + this.matchedRules = matchedRules; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(totalMatchedRules); + out.writeCollection(matchedRules, (stream, matchedRule) -> matchedRule.writeTo(stream)); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(TOTAL_MATCHED_RULES_FIELD.getPreferredName(), totalMatchedRules); + builder.startArray(MATCHED_RULES_FIELD.getPreferredName()); + for (MatchedRule matchedRule : matchedRules) { + builder.startObject(); + builder.field("ruleset_id", matchedRule.rulesetId()); + builder.field("rule_id", matchedRule.ruleId()); + builder.endObject(); + } + builder.endArray(); + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Response response = (Response) o; + return Objects.equals(totalMatchedRules, response.totalMatchedRules) && Objects.equals(matchedRules, response.matchedRules); + } + + @Override + public int hashCode() { + return Objects.hash(totalMatchedRules, matchedRules); + } + } + + public record MatchedRule(String rulesetId, String ruleId) { + public MatchedRule(StreamInput in) throws IOException { + this(in.readString(), in.readString()); + } + + public void writeTo(StreamOutput out) throws IOException { + out.writeString(rulesetId); + out.writeString(ruleId); + } + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/TransportTestQueryRulesetAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/TransportTestQueryRulesetAction.java new file mode 100644 index 0000000000000..115cdc516831a --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/TransportTestQueryRulesetAction.java @@ -0,0 +1,64 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.rules.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.injection.guice.Inject; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.rules.QueryRule; + +import java.util.ArrayList; +import java.util.List; + +import static org.elasticsearch.xpack.core.ClientHelper.ENT_SEARCH_ORIGIN; +import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; + +public class TransportTestQueryRulesetAction extends HandledTransportAction< + TestQueryRulesetAction.Request, + TestQueryRulesetAction.Response> { + + private final Client client; + + @Inject + public TransportTestQueryRulesetAction(TransportService transportService, ActionFilters actionFilters, Client client) { + super( + TestQueryRulesetAction.NAME, + transportService, + actionFilters, + TestQueryRulesetAction.Request::new, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + this.client = client; + } + + @Override + protected void doExecute(Task task, TestQueryRulesetAction.Request request, ActionListener listener) { + GetQueryRulesetAction.Request getQueryRulesetRequest = new GetQueryRulesetAction.Request(request.rulesetId()); + executeAsyncWithOrigin( + client, + ENT_SEARCH_ORIGIN, + GetQueryRulesetAction.TYPE, + getQueryRulesetRequest, + ActionListener.wrap(getQueryRulesetResponse -> { + List matchedRules = new ArrayList<>(); + for (QueryRule rule : getQueryRulesetResponse.queryRuleset().rules()) { + if (rule.isRuleMatch(request.matchCriteria())) { + matchedRules.add(new TestQueryRulesetAction.MatchedRule(request.rulesetId(), rule.id())); + } + } + listener.onResponse(new TestQueryRulesetAction.Response(matchedRules.size(), matchedRules)); + }, listener::onFailure) + ); + } + +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/EnterpriseSearchModuleTestUtils.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/EnterpriseSearchModuleTestUtils.java index 06adb29e32691..190b3c3e53169 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/EnterpriseSearchModuleTestUtils.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/EnterpriseSearchModuleTestUtils.java @@ -115,4 +115,8 @@ public static QueryRuleset randomQueryRuleset() { return new QueryRuleset(id, rules); } + public static Map randomMatchCriteria() { + return randomMap(1, 3, () -> Tuple.tuple(randomIdentifier(), randomAlphaOfLengthBetween(0, 10))); + } + } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/RestTestQueryRulesetActionTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/RestTestQueryRulesetActionTests.java new file mode 100644 index 0000000000000..dc2869e3ff0be --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/RestTestQueryRulesetActionTests.java @@ -0,0 +1,53 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.rules.action; + +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.test.rest.FakeRestRequest; +import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.application.AbstractRestEnterpriseSearchActionTests; +import org.elasticsearch.xpack.application.EnterpriseSearchBaseRestHandler; +import org.elasticsearch.xpack.application.utils.LicenseUtils; + +import java.util.Map; + +public class RestTestQueryRulesetActionTests extends AbstractRestEnterpriseSearchActionTests { + public void testWithNonCompliantLicense() throws Exception { + checkLicenseForRequest( + new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withMethod(RestRequest.Method.POST) + .withParams(Map.of("ruleset_id", "ruleset-id")) + .withContent(new BytesArray(""" + { + "match_criteria": { + "foo": "bar" + } + } + """), XContentType.JSON) + .build(), + LicenseUtils.Product.QUERY_RULES + ); + } + + public void testInvalidRequestWithNonCompliantLicense() throws Exception { + checkLicenseForRequest( + new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withMethod(RestRequest.Method.POST) + .withParams(Map.of("invalid_param_name", "invalid_value")) + .withContent(new BytesArray("{}"), XContentType.JSON) + .build(), + LicenseUtils.Product.QUERY_RULES + ); + } + + @Override + protected EnterpriseSearchBaseRestHandler getRestAction(XPackLicenseState licenseState) { + return new RestTestQueryRulesetAction(licenseState); + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/TestQueryRulesetActionRequestBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/TestQueryRulesetActionRequestBWCSerializingTests.java new file mode 100644 index 0000000000000..7041de1106b50 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/TestQueryRulesetActionRequestBWCSerializingTests.java @@ -0,0 +1,56 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.rules.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractBWCSerializationTestCase; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.application.EnterpriseSearchModuleTestUtils; + +import java.io.IOException; +import java.util.List; +import java.util.stream.Collectors; + +import static org.elasticsearch.test.BWCVersions.getAllBWCVersions; + +public class TestQueryRulesetActionRequestBWCSerializingTests extends AbstractBWCSerializationTestCase { + + private final String RULESET_NAME = "my-ruleset"; + + @Override + protected Writeable.Reader instanceReader() { + return TestQueryRulesetAction.Request::new; + } + + @Override + protected TestQueryRulesetAction.Request createTestInstance() { + return new TestQueryRulesetAction.Request(RULESET_NAME, EnterpriseSearchModuleTestUtils.randomMatchCriteria()); + } + + @Override + protected TestQueryRulesetAction.Request mutateInstance(TestQueryRulesetAction.Request instance) { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected TestQueryRulesetAction.Request doParseInstance(XContentParser parser) throws IOException { + return TestQueryRulesetAction.Request.parse(parser, RULESET_NAME); + } + + @Override + protected TestQueryRulesetAction.Request mutateInstanceForVersion(TestQueryRulesetAction.Request instance, TransportVersion version) { + return instance; + } + + @Override + protected List bwcVersions() { + return getAllBWCVersions().stream().filter(v -> v.onOrAfter(TransportVersions.QUERY_RULE_TEST_API)).collect(Collectors.toList()); + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/TestQueryRulesetActionResponseBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/TestQueryRulesetActionResponseBWCSerializingTests.java new file mode 100644 index 0000000000000..a6562fb7b52af --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/TestQueryRulesetActionResponseBWCSerializingTests.java @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.rules.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; + +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.IntStream; + +import static org.elasticsearch.test.BWCVersions.getAllBWCVersions; + +public class TestQueryRulesetActionResponseBWCSerializingTests extends AbstractBWCWireSerializationTestCase< + TestQueryRulesetAction.Response> { + + @Override + protected Writeable.Reader instanceReader() { + return TestQueryRulesetAction.Response::new; + } + + @Override + protected TestQueryRulesetAction.Response mutateInstance(TestQueryRulesetAction.Response instance) { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected TestQueryRulesetAction.Response createTestInstance() { + int totalMatchedRules = randomIntBetween(0, 10); + List matchedRules = IntStream.range(0, totalMatchedRules) + .mapToObj(i -> new TestQueryRulesetAction.MatchedRule(randomAlphaOfLengthBetween(5, 10), randomAlphaOfLengthBetween(5, 10))) + .toList(); + return new TestQueryRulesetAction.Response(totalMatchedRules, matchedRules); + } + + @Override + protected TestQueryRulesetAction.Response mutateInstanceForVersion(TestQueryRulesetAction.Response instance, TransportVersion version) { + return instance; + } + + @Override + protected List bwcVersions() { + return getAllBWCVersions().stream().filter(v -> v.onOrAfter(TransportVersions.QUERY_RULE_TEST_API)).collect(Collectors.toList()); + } +} diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/build.gradle b/x-pack/plugin/ml/qa/native-multi-node-tests/build.gradle index 51e39f144e44c..554cd0489ae8a 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/build.gradle +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/build.gradle @@ -18,6 +18,7 @@ dependencies { javaRestTestImplementation project(path: xpackModule('esql-core')) javaRestTestImplementation project(path: xpackModule('esql')) javaRestTestImplementation project(path: xpackModule('snapshot-repo-test-kit')) + javaRestTestImplementation project(path: xpackModule('ent-search')) } // location for keys and certificates diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index b29dc0fa410b6..4405ef575b24f 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -245,6 +245,7 @@ public class Constants { "cluster:admin/xpack/query_rules/get", "cluster:admin/xpack/query_rules/list", "cluster:admin/xpack/query_rules/put", + "cluster:admin/xpack/query_rules/test", "cluster:admin/xpack/rollup/delete", "cluster:admin/xpack/rollup/put", "cluster:admin/xpack/rollup/start", From c3d53a80eadc64ac1f2b233e125e87279b6aabda Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 15 Oct 2024 04:09:37 +1100 Subject: [PATCH 058/449] Mute org.elasticsearch.xpack.enrich.EnrichRestIT test {p0=enrich/10_basic/Test using the deprecated elasticsearch_version field results in a warning} #114748 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 2e6fd10c6ef65..51d74f12e4925 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -369,6 +369,9 @@ tests: - class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT method: test {yaml=reference/rest-api/usage/line_38} issue: https://github.com/elastic/elasticsearch/issues/113694 +- class: org.elasticsearch.xpack.enrich.EnrichRestIT + method: test {p0=enrich/10_basic/Test using the deprecated elasticsearch_version field results in a warning} + issue: https://github.com/elastic/elasticsearch/issues/114748 # Examples: # From 35fd8939890e9d304b2fc6e5d8ab70a227c70199 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 15 Oct 2024 04:37:17 +1100 Subject: [PATCH 059/449] Mute org.elasticsearch.xpack.eql.EqlRestIT testIndexWildcardPatterns #114749 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 51d74f12e4925..f11ffaac2a4e2 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -372,6 +372,9 @@ tests: - class: org.elasticsearch.xpack.enrich.EnrichRestIT method: test {p0=enrich/10_basic/Test using the deprecated elasticsearch_version field results in a warning} issue: https://github.com/elastic/elasticsearch/issues/114748 +- class: org.elasticsearch.xpack.eql.EqlRestIT + method: testIndexWildcardPatterns + issue: https://github.com/elastic/elasticsearch/issues/114749 # Examples: # From a8de5545d82120ce316d12e4372e145f1d5a2fa4 Mon Sep 17 00:00:00 2001 From: Tim Brooks Date: Mon, 14 Oct 2024 11:59:13 -0600 Subject: [PATCH 060/449] Refactor merge scheduling code to allow overrides (#114547) This code refactors how the merge scheduler is configured to allow different engine implementations to configure different merge schedulers. --- ...ElasticsearchConcurrentMergeScheduler.java | 112 +++------------ .../engine/ElasticsearchMergeScheduler.java | 27 ++++ .../index/engine/InternalEngine.java | 68 +++++---- .../index/engine/MergeTracking.java | 135 ++++++++++++++++++ .../index/merge/OnGoingMerge.java | 4 + 5 files changed, 226 insertions(+), 120 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/index/engine/ElasticsearchMergeScheduler.java create mode 100644 server/src/main/java/org/elasticsearch/index/engine/MergeTracking.java diff --git a/server/src/main/java/org/elasticsearch/index/engine/ElasticsearchConcurrentMergeScheduler.java b/server/src/main/java/org/elasticsearch/index/engine/ElasticsearchConcurrentMergeScheduler.java index d321600e03bf9..90f8e6adab73d 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/ElasticsearchConcurrentMergeScheduler.java +++ b/server/src/main/java/org/elasticsearch/index/engine/ElasticsearchConcurrentMergeScheduler.java @@ -15,11 +15,7 @@ import org.apache.lucene.index.MergeScheduler; import org.apache.lucene.util.SameThreadExecutorService; import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.metrics.CounterMetric; -import org.elasticsearch.common.metrics.MeanMetric; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexSettings; @@ -29,8 +25,6 @@ import org.elasticsearch.index.shard.ShardId; import java.io.IOException; -import java.util.Collections; -import java.util.Locale; import java.util.Set; import java.util.concurrent.Executor; @@ -38,23 +32,13 @@ * An extension to the {@link ConcurrentMergeScheduler} that provides tracking on merge times, total * and current merges. */ -class ElasticsearchConcurrentMergeScheduler extends ConcurrentMergeScheduler { +public class ElasticsearchConcurrentMergeScheduler extends ConcurrentMergeScheduler implements ElasticsearchMergeScheduler { protected final Logger logger; private final Settings indexSettings; private final ShardId shardId; - private final MeanMetric totalMerges = new MeanMetric(); - private final CounterMetric totalMergesNumDocs = new CounterMetric(); - private final CounterMetric totalMergesSizeInBytes = new CounterMetric(); - private final CounterMetric currentMerges = new CounterMetric(); - private final CounterMetric currentMergesNumDocs = new CounterMetric(); - private final CounterMetric currentMergesSizeInBytes = new CounterMetric(); - private final CounterMetric totalMergeStoppedTime = new CounterMetric(); - private final CounterMetric totalMergeThrottledTime = new CounterMetric(); - - private final Set onGoingMerges = ConcurrentCollections.newConcurrentSet(); - private final Set readOnlyOnGoingMerges = Collections.unmodifiableSet(onGoingMerges); + private final MergeTracking mergeTracking; private final MergeSchedulerConfig config; private final SameThreadExecutorService sameThreadExecutorService = new SameThreadExecutorService(); @@ -63,11 +47,16 @@ class ElasticsearchConcurrentMergeScheduler extends ConcurrentMergeScheduler { this.shardId = shardId; this.indexSettings = indexSettings.getSettings(); this.logger = Loggers.getLogger(getClass(), shardId); + this.mergeTracking = new MergeTracking( + logger, + () -> indexSettings.getMergeSchedulerConfig().isAutoThrottle() ? getIORateLimitMBPerSec() : Double.POSITIVE_INFINITY + ); refreshConfig(); } + @Override public Set onGoingMerges() { - return readOnlyOnGoingMerges; + return mergeTracking.onGoingMerges(); } /** We're currently only interested in messages with this prefix. */ @@ -104,74 +93,21 @@ protected void message(String message) { super.message(message); } - private static String getSegmentName(MergePolicy.OneMerge merge) { - return merge.getMergeInfo() != null ? merge.getMergeInfo().info.name : "_na_"; - } - @Override protected void doMerge(MergeSource mergeSource, MergePolicy.OneMerge merge) throws IOException { - int totalNumDocs = merge.totalNumDocs(); - long totalSizeInBytes = merge.totalBytesSize(); long timeNS = System.nanoTime(); - currentMerges.inc(); - currentMergesNumDocs.inc(totalNumDocs); - currentMergesSizeInBytes.inc(totalSizeInBytes); - OnGoingMerge onGoingMerge = new OnGoingMerge(merge); - onGoingMerges.add(onGoingMerge); - - if (logger.isTraceEnabled()) { - logger.trace( - "merge [{}] starting..., merging [{}] segments, [{}] docs, [{}] size, into [{}] estimated_size", - getSegmentName(merge), - merge.segments.size(), - totalNumDocs, - ByteSizeValue.ofBytes(totalSizeInBytes), - ByteSizeValue.ofBytes(merge.estimatedMergeBytes) - ); - } + mergeTracking.mergeStarted(onGoingMerge); try { beforeMerge(onGoingMerge); super.doMerge(mergeSource, merge); } finally { long tookMS = TimeValue.nsecToMSec(System.nanoTime() - timeNS); + mergeTracking.mergeFinished(merge, onGoingMerge, tookMS); - onGoingMerges.remove(onGoingMerge); afterMerge(onGoingMerge); - - currentMerges.dec(); - currentMergesNumDocs.dec(totalNumDocs); - currentMergesSizeInBytes.dec(totalSizeInBytes); - - totalMergesNumDocs.inc(totalNumDocs); - totalMergesSizeInBytes.inc(totalSizeInBytes); - totalMerges.inc(tookMS); - long stoppedMS = TimeValue.nsecToMSec( - merge.getMergeProgress().getPauseTimes().get(MergePolicy.OneMergeProgress.PauseReason.STOPPED) - ); - long throttledMS = TimeValue.nsecToMSec( - merge.getMergeProgress().getPauseTimes().get(MergePolicy.OneMergeProgress.PauseReason.PAUSED) - ); - totalMergeStoppedTime.inc(stoppedMS); - totalMergeThrottledTime.inc(throttledMS); - - String message = String.format( - Locale.ROOT, - "merge segment [%s] done: took [%s], [%,.1f MB], [%,d docs], [%s stopped], [%s throttled]", - getSegmentName(merge), - TimeValue.timeValueMillis(tookMS), - totalSizeInBytes / 1024f / 1024f, - totalNumDocs, - TimeValue.timeValueMillis(stoppedMS), - TimeValue.timeValueMillis(throttledMS) - ); - - if (tookMS > 20000) { // if more than 20 seconds, DEBUG log it - logger.debug("{}", message); - } else if (logger.isTraceEnabled()) { - logger.trace("{}", message); - } } + } /** @@ -206,24 +142,13 @@ protected MergeThread getMergeThread(MergeSource mergeSource, MergePolicy.OneMer return thread; } - MergeStats stats() { - final MergeStats mergeStats = new MergeStats(); - mergeStats.add( - totalMerges.count(), - totalMerges.sum(), - totalMergesNumDocs.count(), - totalMergesSizeInBytes.count(), - currentMerges.count(), - currentMergesNumDocs.count(), - currentMergesSizeInBytes.count(), - totalMergeStoppedTime.count(), - totalMergeThrottledTime.count(), - config.isAutoThrottle() ? getIORateLimitMBPerSec() : Double.POSITIVE_INFINITY - ); - return mergeStats; + @Override + public MergeStats stats() { + return mergeTracking.stats(); } - void refreshConfig() { + @Override + public void refreshConfig() { if (this.getMaxMergeCount() != config.getMaxMergeCount() || this.getMaxThreadCount() != config.getMaxThreadCount()) { this.setMaxMergesAndThreads(config.getMaxMergeCount(), config.getMaxThreadCount()); } @@ -234,4 +159,9 @@ void refreshConfig() { disableAutoIOThrottle(); } } + + @Override + public MergeScheduler getMergeScheduler() { + return this; + } } diff --git a/server/src/main/java/org/elasticsearch/index/engine/ElasticsearchMergeScheduler.java b/server/src/main/java/org/elasticsearch/index/engine/ElasticsearchMergeScheduler.java new file mode 100644 index 0000000000000..ac72c7a21da75 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/engine/ElasticsearchMergeScheduler.java @@ -0,0 +1,27 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.index.engine; + +import org.apache.lucene.index.MergeScheduler; +import org.elasticsearch.index.merge.MergeStats; +import org.elasticsearch.index.merge.OnGoingMerge; + +import java.util.Set; + +public interface ElasticsearchMergeScheduler { + + Set onGoingMerges(); + + MergeStats stats(); + + void refreshConfig(); + + MergeScheduler getMergeScheduler(); +} diff --git a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index c72f5ce740d94..8d43252d178ee 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -20,6 +20,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.LiveIndexWriterConfig; import org.apache.lucene.index.MergePolicy; +import org.apache.lucene.index.MergeScheduler; import org.apache.lucene.index.SegmentCommitInfo; import org.apache.lucene.index.SegmentInfos; import org.apache.lucene.index.SoftDeletesRetentionMergePolicy; @@ -139,7 +140,7 @@ public class InternalEngine extends Engine { private volatile long lastDeleteVersionPruneTimeMSec; private final Translog translog; - private final ElasticsearchConcurrentMergeScheduler mergeScheduler; + private final ElasticsearchMergeScheduler mergeScheduler; private final IndexWriter indexWriter; @@ -248,11 +249,12 @@ public InternalEngine(EngineConfig engineConfig) { Translog translog = null; ExternalReaderManager externalReaderManager = null; ElasticsearchReaderManager internalReaderManager = null; - EngineMergeScheduler scheduler = null; + MergeScheduler scheduler = null; boolean success = false; try { this.lastDeleteVersionPruneTimeMSec = engineConfig.getThreadPool().relativeTimeInMillis(); - mergeScheduler = scheduler = new EngineMergeScheduler(engineConfig.getShardId(), engineConfig.getIndexSettings()); + mergeScheduler = createMergeScheduler(engineConfig.getShardId(), engineConfig.getIndexSettings()); + scheduler = mergeScheduler.getMergeScheduler(); throttle = new IndexThrottle(); try { store.trimUnsafeCommits(config().getTranslogConfig().getTranslogPath()); @@ -383,7 +385,7 @@ private SoftDeletesPolicy newSoftDeletesPolicy() throws IOException { @Nullable private CombinedDeletionPolicy.CommitsListener newCommitsListener() { - Engine.IndexCommitListener listener = engineConfig.getIndexCommitListener(); + IndexCommitListener listener = engineConfig.getIndexCommitListener(); if (listener != null) { final IndexCommitListener wrappedListener = Assertions.ENABLED ? assertingCommitsOrderListener(listener) : listener; return new CombinedDeletionPolicy.CommitsListener() { @@ -824,7 +826,7 @@ private GetResult getFromTranslog( config(), translogInMemorySegmentsCount::incrementAndGet ); - final Engine.Searcher searcher = new Engine.Searcher( + final Searcher searcher = new Searcher( "realtime_get", ElasticsearchDirectoryReader.wrap(inMemoryReader, shardId), config().getSimilarity(), @@ -841,7 +843,7 @@ public GetResult get( Get get, MappingLookup mappingLookup, DocumentParser documentParser, - Function searcherWrapper + Function searcherWrapper ) { try (var ignored = acquireEnsureOpenRef()) { if (get.realtime()) { @@ -875,7 +877,7 @@ protected GetResult realtimeGetUnderLock( Get get, MappingLookup mappingLookup, DocumentParser documentParser, - Function searcherWrapper, + Function searcherWrapper, boolean getFromSearcher ) { assert isDrainedForClose() == false; @@ -1098,7 +1100,7 @@ protected boolean assertPrimaryCanOptimizeAddDocument(final Index index) { return true; } - private boolean assertIncomingSequenceNumber(final Engine.Operation.Origin origin, final long seqNo) { + private boolean assertIncomingSequenceNumber(final Operation.Origin origin, final long seqNo) { if (origin == Operation.Origin.PRIMARY) { assert assertPrimaryIncomingSequenceNumber(origin, seqNo); } else { @@ -1108,7 +1110,7 @@ private boolean assertIncomingSequenceNumber(final Engine.Operation.Origin origi return true; } - protected boolean assertPrimaryIncomingSequenceNumber(final Engine.Operation.Origin origin, final long seqNo) { + protected boolean assertPrimaryIncomingSequenceNumber(final Operation.Origin origin, final long seqNo) { // sequence number should not be set when operation origin is primary assert seqNo == SequenceNumbers.UNASSIGNED_SEQ_NO : "primary operations must never have an assigned sequence number but was [" + seqNo + "]"; @@ -2700,7 +2702,7 @@ private IndexWriterConfig getIndexWriterConfig() { iwc.setOpenMode(IndexWriterConfig.OpenMode.APPEND); iwc.setIndexDeletionPolicy(combinedDeletionPolicy); iwc.setInfoStream(TESTS_VERBOSE ? InfoStream.getDefault() : new LoggerInfoStream(logger)); - iwc.setMergeScheduler(mergeScheduler); + iwc.setMergeScheduler(mergeScheduler.getMergeScheduler()); // Give us the opportunity to upgrade old segments while performing // background merges MergePolicy mergePolicy = config().getMergePolicy(); @@ -2753,7 +2755,7 @@ private IndexWriterConfig getIndexWriterConfig() { /** A listener that warms the segments if needed when acquiring a new reader */ static final class RefreshWarmerListener implements BiConsumer { - private final Engine.Warmer warmer; + private final Warmer warmer; private final Logger logger; private final AtomicBoolean isEngineClosed; @@ -2817,6 +2819,10 @@ LiveIndexWriterConfig getCurrentIndexWriterConfig() { return indexWriter.getConfig(); } + protected ElasticsearchMergeScheduler createMergeScheduler(ShardId shardId, IndexSettings indexSettings) { + return new EngineMergeScheduler(shardId, indexSettings); + } + private final class EngineMergeScheduler extends ElasticsearchConcurrentMergeScheduler { private final AtomicInteger numMergesInFlight = new AtomicInteger(0); private final AtomicBoolean isThrottling = new AtomicBoolean(); @@ -2827,7 +2833,7 @@ private final class EngineMergeScheduler extends ElasticsearchConcurrentMergeSch @Override public synchronized void beforeMerge(OnGoingMerge merge) { - int maxNumMerges = mergeScheduler.getMaxMergeCount(); + int maxNumMerges = getMaxMergeCount(); if (numMergesInFlight.incrementAndGet() > maxNumMerges) { if (isThrottling.getAndSet(true) == false) { logger.info("now throttling indexing: numMergesInFlight={}, maxNumMerges={}", numMergesInFlight, maxNumMerges); @@ -2838,7 +2844,7 @@ public synchronized void beforeMerge(OnGoingMerge merge) { @Override public synchronized void afterMerge(OnGoingMerge merge) { - int maxNumMerges = mergeScheduler.getMaxMergeCount(); + int maxNumMerges = getMaxMergeCount(); if (numMergesInFlight.decrementAndGet() < maxNumMerges) { if (isThrottling.getAndSet(false)) { logger.info("stop throttling indexing: numMergesInFlight={}, maxNumMerges={}", numMergesInFlight, maxNumMerges); @@ -2876,25 +2882,29 @@ protected void doRun() { @Override protected void handleMergeException(final Throwable exc) { - engineConfig.getThreadPool().generic().execute(new AbstractRunnable() { - @Override - public void onFailure(Exception e) { - logger.debug("merge failure action rejected", e); - } - - @Override - protected void doRun() throws Exception { - /* - * We do this on another thread rather than the merge thread that we are initially called on so that we have complete - * confidence that the call stack does not contain catch statements that would cause the error that might be thrown - * here from being caught and never reaching the uncaught exception handler. - */ - failEngine("merge failed", new MergePolicy.MergeException(exc)); - } - }); + mergeException(exc); } } + protected void mergeException(final Throwable exc) { + engineConfig.getThreadPool().generic().execute(new AbstractRunnable() { + @Override + public void onFailure(Exception e) { + logger.debug("merge failure action rejected", e); + } + + @Override + protected void doRun() throws Exception { + /* + * We do this on another thread rather than the merge thread that we are initially called on so that we have complete + * confidence that the call stack does not contain catch statements that would cause the error that might be thrown + * here from being caught and never reaching the uncaught exception handler. + */ + failEngine("merge failed", new MergePolicy.MergeException(exc)); + } + }); + } + /** * Commits the specified index writer. * diff --git a/server/src/main/java/org/elasticsearch/index/engine/MergeTracking.java b/server/src/main/java/org/elasticsearch/index/engine/MergeTracking.java new file mode 100644 index 0000000000000..3f52b607cf356 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/engine/MergeTracking.java @@ -0,0 +1,135 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.index.engine; + +import org.apache.logging.log4j.Logger; +import org.apache.lucene.index.MergePolicy; +import org.elasticsearch.common.metrics.CounterMetric; +import org.elasticsearch.common.metrics.MeanMetric; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.merge.MergeStats; +import org.elasticsearch.index.merge.OnGoingMerge; + +import java.util.Collections; +import java.util.Locale; +import java.util.Set; +import java.util.function.DoubleSupplier; + +public class MergeTracking { + + protected final Logger logger; + private final DoubleSupplier mbPerSecAutoThrottle; + + private final MeanMetric totalMerges = new MeanMetric(); + private final CounterMetric totalMergesNumDocs = new CounterMetric(); + private final CounterMetric totalMergesSizeInBytes = new CounterMetric(); + private final CounterMetric currentMerges = new CounterMetric(); + private final CounterMetric currentMergesNumDocs = new CounterMetric(); + private final CounterMetric currentMergesSizeInBytes = new CounterMetric(); + private final CounterMetric totalMergeStoppedTime = new CounterMetric(); + private final CounterMetric totalMergeThrottledTime = new CounterMetric(); + + private final Set onGoingMerges = ConcurrentCollections.newConcurrentSet(); + private final Set readOnlyOnGoingMerges = Collections.unmodifiableSet(onGoingMerges); + + public MergeTracking(Logger logger, DoubleSupplier mbPerSecAutoThrottle) { + this.logger = logger; + this.mbPerSecAutoThrottle = mbPerSecAutoThrottle; + } + + public Set onGoingMerges() { + return readOnlyOnGoingMerges; + } + + public void mergeStarted(OnGoingMerge onGoingMerge) { + MergePolicy.OneMerge merge = onGoingMerge.getMerge(); + int totalNumDocs = merge.totalNumDocs(); + long totalSizeInBytes = merge.totalBytesSize(); + currentMerges.inc(); + currentMergesNumDocs.inc(totalNumDocs); + currentMergesSizeInBytes.inc(totalSizeInBytes); + onGoingMerges.add(onGoingMerge); + + if (logger.isTraceEnabled()) { + logger.trace( + "merge [{}] starting: merging [{}] segments, [{}] docs, [{}] size, into [{}] estimated_size", + onGoingMerge.getId(), + merge.segments.size(), + totalNumDocs, + ByteSizeValue.ofBytes(totalSizeInBytes), + ByteSizeValue.ofBytes(merge.estimatedMergeBytes) + ); + } + } + + public void mergeFinished(final MergePolicy.OneMerge merge, final OnGoingMerge onGoingMerge, long tookMS) { + int totalNumDocs = merge.totalNumDocs(); + long totalSizeInBytes = merge.totalBytesSize(); + + onGoingMerges.remove(onGoingMerge); + + currentMerges.dec(); + currentMergesNumDocs.dec(totalNumDocs); + currentMergesSizeInBytes.dec(totalSizeInBytes); + + totalMergesNumDocs.inc(totalNumDocs); + totalMergesSizeInBytes.inc(totalSizeInBytes); + totalMerges.inc(tookMS); + long stoppedMS = TimeValue.nsecToMSec( + merge.getMergeProgress().getPauseTimes().get(MergePolicy.OneMergeProgress.PauseReason.STOPPED) + ); + long throttledMS = TimeValue.nsecToMSec( + merge.getMergeProgress().getPauseTimes().get(MergePolicy.OneMergeProgress.PauseReason.PAUSED) + ); + totalMergeStoppedTime.inc(stoppedMS); + totalMergeThrottledTime.inc(throttledMS); + + String message = String.format( + Locale.ROOT, + "merge [%s] segment [%s] done: took [%s], [%s], [%,d] docs, [%s] stopped, [%s] throttled", + onGoingMerge.getId(), + getSegmentName(merge), + TimeValue.timeValueMillis(tookMS), + ByteSizeValue.ofBytes(totalSizeInBytes), + totalNumDocs, + TimeValue.timeValueMillis(stoppedMS), + TimeValue.timeValueMillis(throttledMS) + ); + + if (tookMS > 20000) { // if more than 20 seconds, DEBUG log it + logger.debug("{}", message); + } else if (logger.isTraceEnabled()) { + logger.trace("{}", message); + } + } + + public MergeStats stats() { + final MergeStats mergeStats = new MergeStats(); + mergeStats.add( + totalMerges.count(), + totalMerges.sum(), + totalMergesNumDocs.count(), + totalMergesSizeInBytes.count(), + currentMerges.count(), + currentMergesNumDocs.count(), + currentMergesSizeInBytes.count(), + totalMergeStoppedTime.count(), + totalMergeThrottledTime.count(), + mbPerSecAutoThrottle.getAsDouble() + ); + return mergeStats; + } + + private static String getSegmentName(MergePolicy.OneMerge merge) { + return merge.getMergeInfo() != null ? merge.getMergeInfo().info.name : "_na_"; + } +} diff --git a/server/src/main/java/org/elasticsearch/index/merge/OnGoingMerge.java b/server/src/main/java/org/elasticsearch/index/merge/OnGoingMerge.java index df49e00f8af73..7c40fdc93a48b 100644 --- a/server/src/main/java/org/elasticsearch/index/merge/OnGoingMerge.java +++ b/server/src/main/java/org/elasticsearch/index/merge/OnGoingMerge.java @@ -50,4 +50,8 @@ public long getTotalBytesSize() { public List getMergedSegments() { return oneMerge.segments; } + + public MergePolicy.OneMerge getMerge() { + return oneMerge; + } } From ee74ce564fad4bb476a3c0cafffe0882fdb5c60e Mon Sep 17 00:00:00 2001 From: Kyle Thomas <170446584+KyleOnK8s@users.noreply.github.com> Date: Mon, 14 Oct 2024 13:05:12 -0500 Subject: [PATCH 061/449] [DOCS] ES|QL: Adding a tip to the WHERE documentation (#114050) * Adding a tip to make null field behavior more apparent. * Update docs/reference/esql/processing-commands/where.asciidoc Co-authored-by: Andrei Stefan * Update docs/reference/esql/processing-commands/where.asciidoc Rephrasing for clarity Co-authored-by: Liam Thompson <32779855+leemthompo@users.noreply.github.com> --------- Co-authored-by: Andrei Stefan Co-authored-by: Liam Thompson <32779855+leemthompo@users.noreply.github.com> --- docs/reference/esql/processing-commands/where.asciidoc | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/docs/reference/esql/processing-commands/where.asciidoc b/docs/reference/esql/processing-commands/where.asciidoc index 407df30c57215..1d6fc1e90d595 100644 --- a/docs/reference/esql/processing-commands/where.asciidoc +++ b/docs/reference/esql/processing-commands/where.asciidoc @@ -5,6 +5,13 @@ The `WHERE` processing command produces a table that contains all the rows from the input table for which the provided condition evaluates to `true`. +[TIP] +==== +In case of value exclusions, fields with `null` values will be excluded from search results. +In this context a `null` means either there is an explicit `null` value in the document or there is no value at all. +For example: `WHERE field != "value"` will be interpreted as `WHERE field != "value" AND field IS NOT NULL`. +==== + **Syntax** [source,esql] From fad0d2aed66e6aea026c71898d186b9b50f45075 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 15 Oct 2024 05:21:30 +1100 Subject: [PATCH 062/449] Mute org.elasticsearch.xpack.eql.EqlRestIT testBadRequests #114752 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index f11ffaac2a4e2..9f601e51e90c1 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -375,6 +375,9 @@ tests: - class: org.elasticsearch.xpack.eql.EqlRestIT method: testIndexWildcardPatterns issue: https://github.com/elastic/elasticsearch/issues/114749 +- class: org.elasticsearch.xpack.eql.EqlRestIT + method: testBadRequests + issue: https://github.com/elastic/elasticsearch/issues/114752 # Examples: # From a9d5fa6227c4d346b5484c183ca55d865c31b964 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 15 Oct 2024 05:30:34 +1100 Subject: [PATCH 063/449] Mute org.elasticsearch.xpack.enrich.EnrichRestIT test {p0=enrich/20_standard_index/enrich stats REST response structure} #114753 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 9f601e51e90c1..bbb3dcf008dc7 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -378,6 +378,9 @@ tests: - class: org.elasticsearch.xpack.eql.EqlRestIT method: testBadRequests issue: https://github.com/elastic/elasticsearch/issues/114752 +- class: org.elasticsearch.xpack.enrich.EnrichRestIT + method: test {p0=enrich/20_standard_index/enrich stats REST response structure} + issue: https://github.com/elastic/elasticsearch/issues/114753 # Examples: # From f5188affb70f4e1e5c2bf628fe082a3d96bfd520 Mon Sep 17 00:00:00 2001 From: Craig Taverner Date: Mon, 14 Oct 2024 20:46:04 +0200 Subject: [PATCH 064/449] Remove PushTopNToSource support for ExchangeExec (#114637) This appears to be dead code, so we're removing it. --- .../physical/local/PushTopNToSource.java | 19 ------------------- 1 file changed, 19 deletions(-) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushTopNToSource.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushTopNToSource.java index 6db35fa0a06d6..855faf9df5ed2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushTopNToSource.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushTopNToSource.java @@ -24,7 +24,6 @@ import org.elasticsearch.xpack.esql.optimizer.PhysicalOptimizerRules; import org.elasticsearch.xpack.esql.plan.physical.EsQueryExec; import org.elasticsearch.xpack.esql.plan.physical.EvalExec; -import org.elasticsearch.xpack.esql.plan.physical.ExchangeExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.plan.physical.TopNExec; @@ -82,17 +81,6 @@ public PhysicalPlan rewrite(TopNExec topNExec) { } } - /** - * TODO: Consider deleting this case entirely. We do not know if this is ever hit. - */ - record PushableExchangeExec(ExchangeExec exchangeExec, EsQueryExec queryExec) implements Pushable { - public PhysicalPlan rewrite(TopNExec topNExec) { - var sorts = buildFieldSorts(topNExec.order()); - var limit = topNExec.limit(); - return exchangeExec.replaceChild(queryExec.withSorts(sorts).withLimit(limit)); - } - } - record PushableQueryExec(EsQueryExec queryExec) implements Pushable { public PhysicalPlan rewrite(TopNExec topNExec) { var sorts = buildFieldSorts(topNExec.order()); @@ -141,13 +129,6 @@ && canPushDownOrders(topNExec.order(), hasIdenticalDelegate)) { // With the simplest case of `FROM index | SORT ...` we only allow pushing down if the sort is on a field return new PushableQueryExec(queryExec); } - if (child instanceof ExchangeExec exchangeExec - && exchangeExec.child() instanceof EsQueryExec queryExec - && queryExec.canPushSorts() - && canPushDownOrders(topNExec.order(), hasIdenticalDelegate)) { - // When we have an exchange between the FROM and the SORT, we also only allow pushing down if the sort is on a field - return new PushableExchangeExec(exchangeExec, queryExec); - } if (child instanceof EvalExec evalExec && evalExec.child() instanceof EsQueryExec queryExec && queryExec.canPushSorts()) { // When we have an EVAL between the FROM and the SORT, we consider pushing down if the sort is on a field and/or // a distance function defined in the EVAL. We also move the EVAL to after the SORT. From 35e79f85f084fe9189dc9d850ba0755841933b95 Mon Sep 17 00:00:00 2001 From: Craig Taverner Date: Mon, 14 Oct 2024 20:46:20 +0200 Subject: [PATCH 065/449] Test StDistance multivalue consistency and fixed two CartesianPoint bugs (#114729) --- .../SpatialPushDownCartesianPointIT.java | 6 ++ .../spatial/SpatialPushDownGeoPointIT.java | 5 ++ .../SpatialPushDownPointsTestCase.java | 57 +++++++++++++++++++ .../esql/spatial/SpatialPushDownTestCase.java | 2 +- .../scalar/spatial/BinarySpatialFunction.java | 4 +- .../local/EnableSpatialDistancePushdown.java | 21 +++++-- 6 files changed, 87 insertions(+), 8 deletions(-) diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/spatial/SpatialPushDownCartesianPointIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/spatial/SpatialPushDownCartesianPointIT.java index 93701552b94aa..94fc4030f73a9 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/spatial/SpatialPushDownCartesianPointIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/spatial/SpatialPushDownCartesianPointIT.java @@ -31,4 +31,10 @@ protected Geometry getQueryGeometry() { protected String castingFunction() { return "TO_CARTESIANSHAPE"; } + + @Override + protected double searchDistance() { + // We search much larger distances for Cartesian, to ensure we actually get results from the much wider data range + return 1e12; + } } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/spatial/SpatialPushDownGeoPointIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/spatial/SpatialPushDownGeoPointIT.java index 871fb222de3d4..9bc3312fff63e 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/spatial/SpatialPushDownGeoPointIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/spatial/SpatialPushDownGeoPointIT.java @@ -36,4 +36,9 @@ protected Geometry getQueryGeometry() { protected String castingFunction() { return "TO_GEOSHAPE"; } + + @Override + protected double searchDistance() { + return 10000000; + } } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/spatial/SpatialPushDownPointsTestCase.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/spatial/SpatialPushDownPointsTestCase.java index 411106f008986..0acbe98022f02 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/spatial/SpatialPushDownPointsTestCase.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/spatial/SpatialPushDownPointsTestCase.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.spatial; +import org.elasticsearch.geometry.Geometry; import org.elasticsearch.geometry.Point; import org.elasticsearch.geometry.utils.GeometryValidator; import org.elasticsearch.geometry.utils.WellKnownText; @@ -20,6 +21,7 @@ import java.io.IOException; import java.text.ParseException; import java.util.ArrayList; +import java.util.Arrays; import java.util.Locale; import static org.hamcrest.Matchers.closeTo; @@ -108,6 +110,61 @@ protected void assertFunction(String spatialFunction, String wkt, long expected, } } + public void testPushedDownDistanceSingleValue() throws RuntimeException { + assertPushedDownDistance(false); + } + + public void testPushedDownDistanceMultiValue() throws RuntimeException { + assertPushedDownDistance(true); + } + + private void assertPushedDownDistance(boolean multiValue) throws RuntimeException { + initIndexes(); + for (int i = 0; i < random().nextInt(50, 100); i++) { + if (multiValue) { + final String[] values = new String[randomIntBetween(1, 5)]; + for (int j = 0; j < values.length; j++) { + values[j] = "\"" + WellKnownText.toWKT(getIndexGeometry()) + "\""; + } + index("indexed", i + "", "{\"location\" : " + Arrays.toString(values) + " }"); + index("not-indexed", i + "", "{\"location\" : " + Arrays.toString(values) + " }"); + } else { + final String value = WellKnownText.toWKT(getIndexGeometry()); + index("indexed", i + "", "{\"location\" : \"" + value + "\" }"); + index("not-indexed", i + "", "{\"location\" : \"" + value + "\" }"); + } + } + + refresh("indexed", "not-indexed"); + + for (int i = 0; i < 10; i++) { + final Geometry geometry = getIndexGeometry(); + final String wkt = WellKnownText.toWKT(geometry); + assertDistanceFunction(wkt); + } + } + + protected abstract double searchDistance(); + + protected void assertDistanceFunction(String wkt) { + String spatialFunction = "ST_DISTANCE"; + String castingFunction = castingFunction().replaceAll("SHAPE", "POINT"); + final String query1 = String.format(Locale.ROOT, """ + FROM indexed | WHERE %s(location, %s("%s")) < %.1f | STATS COUNT(*) + """, spatialFunction, castingFunction, wkt, searchDistance()); + final String query2 = String.format(Locale.ROOT, """ + FROM not-indexed | WHERE %s(location, %s("%s")) < %.1f | STATS COUNT(*) + """, spatialFunction, castingFunction, wkt, searchDistance()); + try ( + EsqlQueryResponse response1 = EsqlQueryRequestBuilder.newRequestBuilder(client()).query(query1).get(); + EsqlQueryResponse response2 = EsqlQueryRequestBuilder.newRequestBuilder(client()).query(query2).get(); + ) { + Object indexedResult = response1.response().column(0).iterator().next(); + Object notIndexedResult = response2.response().column(0).iterator().next(); + assertEquals(spatialFunction, indexedResult, notIndexedResult); + } + } + private String toString(CentroidCalculator centroid) { return "Centroid (x:" + centroid.getX() + ", y:" + centroid.getY() + ")"; } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/spatial/SpatialPushDownTestCase.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/spatial/SpatialPushDownTestCase.java index 9dff647763b6b..e7e0c785f50e5 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/spatial/SpatialPushDownTestCase.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/spatial/SpatialPushDownTestCase.java @@ -28,7 +28,7 @@ /** * Base class to check that a query than can be pushed down gives the same result * if it is actually pushed down and when it is executed by the compute engine, - * + *

    * For doing that we create two indices, one fully indexed and another with index * and doc values disabled. Then we index the same data in both indices and we check * that the same ES|QL queries produce the same results in both. diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/BinarySpatialFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/BinarySpatialFunction.java index 5e8d39217fcca..8839244e6c601 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/BinarySpatialFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/BinarySpatialFunction.java @@ -28,6 +28,8 @@ import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.SECOND; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isType; +import static org.elasticsearch.xpack.esql.core.type.DataType.CARTESIAN_POINT; +import static org.elasticsearch.xpack.esql.core.type.DataType.CARTESIAN_SHAPE; import static org.elasticsearch.xpack.esql.core.type.DataType.GEO_POINT; import static org.elasticsearch.xpack.esql.core.type.DataType.GEO_SHAPE; import static org.elasticsearch.xpack.esql.core.type.DataType.isNull; @@ -203,7 +205,7 @@ public void setCrsType(DataType dataType) { } private static final String[] GEO_TYPE_NAMES = new String[] { GEO_POINT.typeName(), GEO_SHAPE.typeName() }; - private static final String[] CARTESIAN_TYPE_NAMES = new String[] { GEO_POINT.typeName(), GEO_SHAPE.typeName() }; + private static final String[] CARTESIAN_TYPE_NAMES = new String[] { CARTESIAN_POINT.typeName(), CARTESIAN_SHAPE.typeName() }; protected static boolean spatialCRSCompatible(DataType spatialDataType, DataType otherDataType) { return DataType.isSpatialGeo(spatialDataType) && DataType.isSpatialGeo(otherDataType) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/EnableSpatialDistancePushdown.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/EnableSpatialDistancePushdown.java index be6e124502ba5..ec25c69deba5c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/EnableSpatialDistancePushdown.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/EnableSpatialDistancePushdown.java @@ -216,31 +216,40 @@ private Expression rewriteDistanceFilter( Number number, ComparisonType comparisonType ) { + DataType shapeDataType = getShapeDataType(spatialExp); Geometry geometry = SpatialRelatesUtils.makeGeometryFromLiteral(literalExp); if (geometry instanceof Point point) { double distance = number.doubleValue(); Source source = comparison.source(); if (comparisonType.lt) { distance = comparisonType.eq ? distance : Math.nextDown(distance); - return new SpatialIntersects(source, spatialExp, makeCircleLiteral(point, distance, literalExp)); + return new SpatialIntersects(source, spatialExp, makeCircleLiteral(point, distance, literalExp, shapeDataType)); } else if (comparisonType.gt) { distance = comparisonType.eq ? distance : Math.nextUp(distance); - return new SpatialDisjoint(source, spatialExp, makeCircleLiteral(point, distance, literalExp)); + return new SpatialDisjoint(source, spatialExp, makeCircleLiteral(point, distance, literalExp, shapeDataType)); } else if (comparisonType.eq) { return new And( source, - new SpatialIntersects(source, spatialExp, makeCircleLiteral(point, distance, literalExp)), - new SpatialDisjoint(source, spatialExp, makeCircleLiteral(point, Math.nextDown(distance), literalExp)) + new SpatialIntersects(source, spatialExp, makeCircleLiteral(point, distance, literalExp, shapeDataType)), + new SpatialDisjoint(source, spatialExp, makeCircleLiteral(point, Math.nextDown(distance), literalExp, shapeDataType)) ); } } return comparison; } - private Literal makeCircleLiteral(Point point, double distance, Expression literalExpression) { + private Literal makeCircleLiteral(Point point, double distance, Expression literalExpression, DataType shapeDataType) { var circle = new Circle(point.getX(), point.getY(), distance); var wkb = WellKnownBinary.toWKB(circle, ByteOrder.LITTLE_ENDIAN); - return new Literal(literalExpression.source(), new BytesRef(wkb), DataType.GEO_SHAPE); + return new Literal(literalExpression.source(), new BytesRef(wkb), shapeDataType); + } + + private DataType getShapeDataType(Expression expression) { + return switch (expression.dataType()) { + case GEO_POINT, GEO_SHAPE -> DataType.GEO_SHAPE; + case CARTESIAN_POINT, CARTESIAN_SHAPE -> DataType.CARTESIAN_SHAPE; + default -> throw new IllegalArgumentException("Unsupported spatial data type: " + expression.dataType()); + }; } /** From 465c65c02fdca9f3c3faa01a8cb15f9f58dde58e Mon Sep 17 00:00:00 2001 From: Rassyan Date: Tue, 15 Oct 2024 02:59:56 +0800 Subject: [PATCH 066/449] Fix Synthetic Source Handling for `bit` Type in `dense_vector` Field (#114407) **Description:** This PR addresses the issue described in [#114402](https://github.com/elastic/elasticsearch/issues/114402), where the `synthetic_source` feature does not correctly handle the `bit` type in `dense_vector` fields when `index` is set to `false`. The root cause of the issue was that the `bit` type was not properly accounted for, leading to an array that is 8 times the size of the actual `dims` value of docvalue. This mismatch will causes an array out-of-bounds exception when reconstructing the document. **Changes:** - Adjusted the `synthetic_source` logic to correctly handle the `bit` type by ensuring the array size accounts for the 8x difference in dimensions. - Added yaml test to cover the `bit` type scenario in `dense_vector` fields with `index` set to `false`. **Related Issues:** - Closes [#114402](https://github.com/elastic/elasticsearch/issues/114402) - Introduced in [#110059](https://github.com/elastic/elasticsearch/pull/110059) --- docs/changelog/114407.yaml | 6 +++ .../test/search.vectors/45_knn_search_bit.yml | 51 +++++++++++++++++++ .../ES814ScalarQuantizedVectorsFormat.java | 6 +++ .../vectors/ES815BitFlatVectorsFormat.java | 7 +++ .../vectors/DenseVectorFieldMapper.java | 2 +- .../action/search/SearchCapabilities.java | 7 ++- .../vectors/DenseVectorFieldMapperTests.java | 15 +++--- 7 files changed, 86 insertions(+), 8 deletions(-) create mode 100644 docs/changelog/114407.yaml diff --git a/docs/changelog/114407.yaml b/docs/changelog/114407.yaml new file mode 100644 index 0000000000000..4c1134a9d3834 --- /dev/null +++ b/docs/changelog/114407.yaml @@ -0,0 +1,6 @@ +pr: 114407 +summary: Fix synthetic source handling for `bit` type in `dense_vector` field +area: Search +type: bug +issues: + - 114402 diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/45_knn_search_bit.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/45_knn_search_bit.yml index ed469ffd7ff16..02576ad1b2b01 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/45_knn_search_bit.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/45_knn_search_bit.yml @@ -354,3 +354,54 @@ setup: dims: 40 index: true similarity: max_inner_product + + +--- +"Search with synthetic source": + - requires: + capabilities: + - method: POST + path: /_search + capabilities: [ bit_dense_vector_synthetic_source ] + test_runner_features: capabilities + reason: "Support for bit dense vector synthetic source capability required" + - do: + indices.create: + index: test_synthetic_source + body: + mappings: + properties: + name: + type: keyword + vector1: + type: dense_vector + element_type: bit + dims: 40 + index: false + vector2: + type: dense_vector + element_type: bit + dims: 40 + index: true + similarity: l2_norm + + - do: + index: + index: test_synthetic_source + id: "1" + body: + name: cow.jpg + vector1: [2, -1, 1, 4, -3] + vector2: [2, -1, 1, 4, -3] + + - do: + indices.refresh: {} + + - do: + search: + force_synthetic_source: true + index: test_synthetic_source + + - match: {hits.hits.0._id: "1"} + - match: {hits.hits.0._source.vector1: [2, -1, 1, 4, -3]} + - match: {hits.hits.0._source.vector2: [2, -1, 1, 4, -3]} diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES814ScalarQuantizedVectorsFormat.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES814ScalarQuantizedVectorsFormat.java index 4bf396e8d5ad1..10a20839ab3c5 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES814ScalarQuantizedVectorsFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES814ScalarQuantizedVectorsFormat.java @@ -41,6 +41,7 @@ import java.io.IOException; import static org.apache.lucene.codecs.lucene99.Lucene99ScalarQuantizedVectorsFormat.DYNAMIC_CONFIDENCE_INTERVAL; +import static org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper.MAX_DIMS_COUNT; public class ES814ScalarQuantizedVectorsFormat extends FlatVectorsFormat { @@ -291,4 +292,9 @@ public RandomVectorScorer getRandomVectorScorer(VectorSimilarityFunction sim, Ra return delegate.getRandomVectorScorer(sim, values, query); } } + + @Override + public int getMaxDimensions(String fieldName) { + return MAX_DIMS_COUNT; + } } diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorsFormat.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorsFormat.java index f0f25bd702749..7e586e210afd3 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorsFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorsFormat.java @@ -25,6 +25,8 @@ import java.io.IOException; +import static org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper.MAX_DIMS_COUNT; + class ES815BitFlatVectorsFormat extends FlatVectorsFormat { private static final FlatVectorsFormat delegate = new Lucene99FlatVectorsFormat(FlatBitVectorScorer.INSTANCE); @@ -43,6 +45,11 @@ public FlatVectorsReader fieldsReader(SegmentReadState segmentReadState) throws return delegate.fieldsReader(segmentReadState); } + @Override + public int getMaxDimensions(String fieldName) { + return MAX_DIMS_COUNT; + } + static class FlatBitVectorScorer implements FlatVectorsScorer { static final FlatBitVectorScorer INSTANCE = new FlatBitVectorScorer(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java index d7353584706d8..c3959bd442a1a 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java @@ -2270,7 +2270,7 @@ public void write(XContentBuilder b) throws IOException { if (indexCreatedVersion.onOrAfter(LITTLE_ENDIAN_FLOAT_STORED_INDEX_VERSION)) { byteBuffer.order(ByteOrder.LITTLE_ENDIAN); } - int dims = fieldType().dims; + int dims = fieldType().elementType == ElementType.BIT ? fieldType().dims / Byte.SIZE : fieldType().dims; for (int dim = 0; dim < dims; dim++) { fieldType().elementType.readAndWriteValue(byteBuffer, b); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/SearchCapabilities.java b/server/src/main/java/org/elasticsearch/rest/action/search/SearchCapabilities.java index 45fd6afe4fca6..7828bb956a160 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/SearchCapabilities.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/SearchCapabilities.java @@ -20,6 +20,11 @@ private SearchCapabilities() {} /** Support regex and range match rules in interval queries. */ private static final String RANGE_REGEX_INTERVAL_QUERY_CAPABILITY = "range_regexp_interval_queries"; + /** Support synthetic source with `bit` type in `dense_vector` field when `index` is set to `false`. */ + private static final String BIT_DENSE_VECTOR_SYNTHETIC_SOURCE_CAPABILITY = "bit_dense_vector_synthetic_source"; - public static final Set CAPABILITIES = Set.of(RANGE_REGEX_INTERVAL_QUERY_CAPABILITY); + public static final Set CAPABILITIES = Set.of( + RANGE_REGEX_INTERVAL_QUERY_CAPABILITY, + BIT_DENSE_VECTOR_SYNTHETIC_SOURCE_CAPABILITY + ); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java index 04b9b05ecfe3a..492c76924c729 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java @@ -2022,24 +2022,27 @@ protected boolean supportsEmptyInputArray() { private static class DenseVectorSyntheticSourceSupport implements SyntheticSourceSupport { private final int dims = between(5, 1000); - private final ElementType elementType = randomFrom(ElementType.BYTE, ElementType.FLOAT); + private final ElementType elementType = randomFrom(ElementType.BYTE, ElementType.FLOAT, ElementType.BIT); private final boolean indexed = randomBoolean(); private final boolean indexOptionsSet = indexed && randomBoolean(); @Override public SyntheticSourceExample example(int maxValues) throws IOException { - Object value = elementType == ElementType.BYTE - ? randomList(dims, dims, ESTestCase::randomByte) - : randomList(dims, dims, ESTestCase::randomFloat); + Object value = switch (elementType) { + case BYTE, BIT: + yield randomList(dims, dims, ESTestCase::randomByte); + case FLOAT: + yield randomList(dims, dims, ESTestCase::randomFloat); + }; return new SyntheticSourceExample(value, value, this::mapping); } private void mapping(XContentBuilder b) throws IOException { b.field("type", "dense_vector"); - b.field("dims", dims); - if (elementType == ElementType.BYTE || randomBoolean()) { + if (elementType == ElementType.BYTE || elementType == ElementType.BIT || randomBoolean()) { b.field("element_type", elementType.toString()); } + b.field("dims", elementType == ElementType.BIT ? dims * Byte.SIZE : dims); if (indexed) { b.field("index", true); b.field("similarity", "l2_norm"); From 255cbd6f6a3f0ad92433888cd732678765f4bc81 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 15 Oct 2024 06:04:31 +1100 Subject: [PATCH 067/449] Mute org.elasticsearch.xpack.rank.rrf.RRFRankClientYamlTestSuiteIT test {yaml=rrf/800_rrf_with_text_similarity_reranker_retriever/explain using rrf retriever and text-similarity} #114757 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index bbb3dcf008dc7..0fa8f627ba0d1 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -381,6 +381,9 @@ tests: - class: org.elasticsearch.xpack.enrich.EnrichRestIT method: test {p0=enrich/20_standard_index/enrich stats REST response structure} issue: https://github.com/elastic/elasticsearch/issues/114753 +- class: org.elasticsearch.xpack.rank.rrf.RRFRankClientYamlTestSuiteIT + method: test {yaml=rrf/800_rrf_with_text_similarity_reranker_retriever/explain using rrf retriever and text-similarity} + issue: https://github.com/elastic/elasticsearch/issues/114757 # Examples: # From a7752a3d44cf1ffa792b3c8a4de48fd662152d3a Mon Sep 17 00:00:00 2001 From: Max Hniebergall <137079448+maxhniebergall@users.noreply.github.com> Date: Mon, 14 Oct 2024 15:31:24 -0400 Subject: [PATCH 068/449] only return deprecation warning for elser service (#114507) Co-authored-by: Elastic Machine --- .../ElasticsearchInternalService.java | 23 +++++++++++-------- 1 file changed, 14 insertions(+), 9 deletions(-) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java index 489f2c6706e5f..8b3436a2f4fb7 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java @@ -152,6 +152,7 @@ public void parseRequestConfig( config, preferredModelVariant, serviceSettingsMap, + true, chunkingSettings, modelListener ) @@ -183,6 +184,7 @@ public void parseRequestConfig( config, preferredModelVariant, serviceSettingsMap, + OLD_ELSER_SERVICE_NAME.equals(serviceName), chunkingSettings, modelListener ) @@ -342,6 +344,7 @@ private void elserCase( Map config, PreferredModelVariant preferredModelVariant, Map serviceSettingsMap, + boolean isElserService, ChunkingSettings chunkingSettings, ActionListener modelListener ) { @@ -372,15 +375,17 @@ private void elserCase( } } - DEPRECATION_LOGGER.warn( - DeprecationCategory.API, - "inference_api_elser_service", - "The [{}] service is deprecated and will be removed in a future release. Use the [{}] service instead, with" - + " [model_id] set to [{}] in the [service_settings]", - OLD_ELSER_SERVICE_NAME, - ElasticsearchInternalService.NAME, - defaultModelId - ); + if (isElserService) { + DEPRECATION_LOGGER.warn( + DeprecationCategory.API, + "inference_api_elser_service", + "The [{}] service is deprecated and will be removed in a future release. Use the [{}] service instead, with" + + " [model_id] set to [{}] in the [service_settings]", + OLD_ELSER_SERVICE_NAME, + ElasticsearchInternalService.NAME, + defaultModelId + ); + } if (modelVariantDoesNotMatchArchitecturesAndIsNotPlatformAgnostic(preferredModelVariant, esServiceSettingsBuilder.getModelId())) { throw new IllegalArgumentException( From 79c5a4f0791ca3ac64053471c8ea589c2a1077f5 Mon Sep 17 00:00:00 2001 From: Pat Whelan Date: Mon, 14 Oct 2024 15:34:22 -0400 Subject: [PATCH 069/449] [ML] Stream Google Completion (#114596) Google supports SSE for chat completion and sends the same payload as their non-streaming calls, so we can reuse the SSE parser with our existing parse function. The downside is, google requires a different URI, so we refactored away from the visitor pattern to allow for a different URI creating and set during request time rather than on model instantiation time. --- docs/changelog/114596.yaml | 5 + .../GoogleAiStudioActionCreator.java | 55 ------- .../GoogleAiStudioActionVisitor.java | 21 --- .../GoogleAiStudioResponseHandler.java | 39 +++++ .../GoogleAiStudioStreamingProcessor.java | 57 +++++++ ...oogleAiStudioCompletionRequestManager.java | 11 +- .../GoogleAiStudioCompletionRequest.java | 42 +++-- .../googleaistudio/GoogleAiStudioRequest.java | 19 ++- .../googleaistudio/GoogleAiStudioUtils.java | 2 + ...oogleAiStudioCompletionResponseEntity.java | 39 ++--- .../googleaistudio/GoogleAiStudioModel.java | 6 - .../googleaistudio/GoogleAiStudioService.java | 49 ++++-- .../GoogleAiStudioCompletionModel.java | 44 +----- .../GoogleAiStudioEmbeddingsModel.java | 8 - .../GoogleAiStudioCompletionActionTests.java | 2 +- .../AnthropicStreamingProcessorTests.java | 22 +-- ...GoogleAiStudioStreamingProcessorTests.java | 144 ++++++++++++++++++ .../GoogleAiStudioCompletionRequestTests.java | 11 +- .../StreamingInferenceTestUtils.java | 34 +++++ .../GoogleAiStudioCompletionModelTests.java | 23 ++- 20 files changed, 422 insertions(+), 211 deletions(-) create mode 100644 docs/changelog/114596.yaml delete mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/googleaistudio/GoogleAiStudioActionCreator.java delete mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/googleaistudio/GoogleAiStudioActionVisitor.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/googleaistudio/GoogleAiStudioStreamingProcessor.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/googleaistudio/GoogleAiStudioStreamingProcessorTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/streaming/StreamingInferenceTestUtils.java diff --git a/docs/changelog/114596.yaml b/docs/changelog/114596.yaml new file mode 100644 index 0000000000000..a36978dcacd8c --- /dev/null +++ b/docs/changelog/114596.yaml @@ -0,0 +1,5 @@ +pr: 114596 +summary: Stream Google Completion +area: Machine Learning +type: enhancement +issues: [] diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/googleaistudio/GoogleAiStudioActionCreator.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/googleaistudio/GoogleAiStudioActionCreator.java deleted file mode 100644 index 3871b5fb98882..0000000000000 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/googleaistudio/GoogleAiStudioActionCreator.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference.external.action.googleaistudio; - -import org.elasticsearch.xpack.inference.external.action.ExecutableAction; -import org.elasticsearch.xpack.inference.external.action.SenderExecutableAction; -import org.elasticsearch.xpack.inference.external.action.SingleInputSenderExecutableAction; -import org.elasticsearch.xpack.inference.external.http.sender.GoogleAiStudioCompletionRequestManager; -import org.elasticsearch.xpack.inference.external.http.sender.GoogleAiStudioEmbeddingsRequestManager; -import org.elasticsearch.xpack.inference.external.http.sender.Sender; -import org.elasticsearch.xpack.inference.services.ServiceComponents; -import org.elasticsearch.xpack.inference.services.googleaistudio.completion.GoogleAiStudioCompletionModel; -import org.elasticsearch.xpack.inference.services.googleaistudio.embeddings.GoogleAiStudioEmbeddingsModel; - -import java.util.Map; -import java.util.Objects; - -import static org.elasticsearch.xpack.inference.external.action.ActionUtils.constructFailedToSendRequestMessage; - -public class GoogleAiStudioActionCreator implements GoogleAiStudioActionVisitor { - - private static final String COMPLETION_ERROR_MESSAGE = "Google AI Studio completion"; - private final Sender sender; - - private final ServiceComponents serviceComponents; - - public GoogleAiStudioActionCreator(Sender sender, ServiceComponents serviceComponents) { - this.sender = Objects.requireNonNull(sender); - this.serviceComponents = Objects.requireNonNull(serviceComponents); - } - - @Override - public ExecutableAction create(GoogleAiStudioCompletionModel model, Map taskSettings) { - // no overridden model as task settings are always empty for Google AI Studio completion model - var requestManager = new GoogleAiStudioCompletionRequestManager(model, serviceComponents.threadPool()); - var failedToSendRequestErrorMessage = constructFailedToSendRequestMessage(model.uri(), COMPLETION_ERROR_MESSAGE); - return new SingleInputSenderExecutableAction(sender, requestManager, failedToSendRequestErrorMessage, COMPLETION_ERROR_MESSAGE); - } - - @Override - public ExecutableAction create(GoogleAiStudioEmbeddingsModel model, Map taskSettings) { - var requestManager = new GoogleAiStudioEmbeddingsRequestManager( - model, - serviceComponents.truncator(), - serviceComponents.threadPool() - ); - var failedToSendRequestErrorMessage = constructFailedToSendRequestMessage(model.uri(), "Google AI Studio embeddings"); - return new SenderExecutableAction(sender, requestManager, failedToSendRequestErrorMessage); - } -} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/googleaistudio/GoogleAiStudioActionVisitor.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/googleaistudio/GoogleAiStudioActionVisitor.java deleted file mode 100644 index 2e89200cce53b..0000000000000 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/googleaistudio/GoogleAiStudioActionVisitor.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference.external.action.googleaistudio; - -import org.elasticsearch.xpack.inference.external.action.ExecutableAction; -import org.elasticsearch.xpack.inference.services.googleaistudio.completion.GoogleAiStudioCompletionModel; -import org.elasticsearch.xpack.inference.services.googleaistudio.embeddings.GoogleAiStudioEmbeddingsModel; - -import java.util.Map; - -public interface GoogleAiStudioActionVisitor { - - ExecutableAction create(GoogleAiStudioCompletionModel model, Map taskSettings); - - ExecutableAction create(GoogleAiStudioEmbeddingsModel model, Map taskSettings); -} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/googleaistudio/GoogleAiStudioResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/googleaistudio/GoogleAiStudioResponseHandler.java index 4ba5b552f802a..0241dcd6142a6 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/googleaistudio/GoogleAiStudioResponseHandler.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/googleaistudio/GoogleAiStudioResponseHandler.java @@ -8,23 +8,48 @@ package org.elasticsearch.xpack.inference.external.googleaistudio; import org.apache.logging.log4j.Logger; +import org.elasticsearch.core.CheckedFunction; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.core.inference.results.StreamingChatCompletionResults; import org.elasticsearch.xpack.inference.external.http.HttpResult; import org.elasticsearch.xpack.inference.external.http.retry.BaseResponseHandler; import org.elasticsearch.xpack.inference.external.http.retry.ResponseParser; import org.elasticsearch.xpack.inference.external.http.retry.RetryException; import org.elasticsearch.xpack.inference.external.request.Request; import org.elasticsearch.xpack.inference.external.response.googleaistudio.GoogleAiStudioErrorResponseEntity; +import org.elasticsearch.xpack.inference.external.response.streaming.ServerSentEventParser; +import org.elasticsearch.xpack.inference.external.response.streaming.ServerSentEventProcessor; import org.elasticsearch.xpack.inference.logging.ThrottlerManager; +import java.io.IOException; +import java.util.concurrent.Flow; + import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.xpack.inference.external.http.HttpUtils.checkForEmptyBody; public class GoogleAiStudioResponseHandler extends BaseResponseHandler { static final String GOOGLE_AI_STUDIO_UNAVAILABLE = "The Google AI Studio service may be temporarily overloaded or down"; + private final boolean canHandleStreamingResponses; + private final CheckedFunction content; public GoogleAiStudioResponseHandler(String requestType, ResponseParser parseFunction) { + this(requestType, parseFunction, false, xContentParser -> { + assert false : "do not call this"; + return ""; + }); + } + + public GoogleAiStudioResponseHandler( + String requestType, + ResponseParser parseFunction, + boolean canHandleStreamingResponses, + CheckedFunction content + ) { super(requestType, parseFunction, GoogleAiStudioErrorResponseEntity::fromResponse); + this.canHandleStreamingResponses = canHandleStreamingResponses; + this.content = content; } @Override @@ -72,4 +97,18 @@ private static String resourceNotFoundError(Request request) { return format("Resource not found at [%s]", request.getURI()); } + @Override + public boolean canHandleStreamingResponses() { + return canHandleStreamingResponses; + } + + @Override + public InferenceServiceResults parseResult(Request request, Flow.Publisher flow) { + var serverSentEventProcessor = new ServerSentEventProcessor(new ServerSentEventParser()); + var googleAiProcessor = new GoogleAiStudioStreamingProcessor(content); + flow.subscribe(serverSentEventProcessor); + serverSentEventProcessor.subscribe(googleAiProcessor); + return new StreamingChatCompletionResults(googleAiProcessor); + } + } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/googleaistudio/GoogleAiStudioStreamingProcessor.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/googleaistudio/GoogleAiStudioStreamingProcessor.java new file mode 100644 index 0000000000000..aa1232f4182e3 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/googleaistudio/GoogleAiStudioStreamingProcessor.java @@ -0,0 +1,57 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.googleaistudio; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.core.CheckedFunction; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.inference.results.StreamingChatCompletionResults; +import org.elasticsearch.xpack.inference.common.DelegatingProcessor; +import org.elasticsearch.xpack.inference.external.response.streaming.ServerSentEvent; +import org.elasticsearch.xpack.inference.external.response.streaming.ServerSentEventField; + +import java.io.IOException; +import java.util.ArrayDeque; +import java.util.Deque; + +class GoogleAiStudioStreamingProcessor extends DelegatingProcessor, StreamingChatCompletionResults.Results> { + private static final Logger log = LogManager.getLogger(GoogleAiStudioStreamingProcessor.class); + private final CheckedFunction content; + + GoogleAiStudioStreamingProcessor(CheckedFunction content) { + this.content = content; + } + + @Override + protected void next(Deque item) throws Exception { + var parserConfig = XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE); + var results = new ArrayDeque(item.size()); + for (ServerSentEvent event : item) { + if (ServerSentEventField.DATA == event.name() && event.hasValue()) { + try (XContentParser jsonParser = XContentFactory.xContent(XContentType.JSON).createParser(parserConfig, event.value())) { + var delta = content.apply(jsonParser); + results.offer(new StreamingChatCompletionResults.Result(delta)); + } catch (Exception e) { + log.warn("Failed to parse event from inference provider: {}", event); + throw e; + } + } + } + + if (results.isEmpty()) { + upstream().request(1); + } else { + downstream().onNext(new StreamingChatCompletionResults.Results(results)); + } + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/GoogleAiStudioCompletionRequestManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/GoogleAiStudioCompletionRequestManager.java index 426102f7f2376..abe50c6fae3f9 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/GoogleAiStudioCompletionRequestManager.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/GoogleAiStudioCompletionRequestManager.java @@ -19,7 +19,6 @@ import org.elasticsearch.xpack.inference.external.response.googleaistudio.GoogleAiStudioCompletionResponseEntity; import org.elasticsearch.xpack.inference.services.googleaistudio.completion.GoogleAiStudioCompletionModel; -import java.util.List; import java.util.Objects; import java.util.function.Supplier; @@ -32,7 +31,12 @@ public class GoogleAiStudioCompletionRequestManager extends GoogleAiStudioReques private final GoogleAiStudioCompletionModel model; private static ResponseHandler createCompletionHandler() { - return new GoogleAiStudioResponseHandler("google ai studio completion", GoogleAiStudioCompletionResponseEntity::fromResponse); + return new GoogleAiStudioResponseHandler( + "google ai studio completion", + GoogleAiStudioCompletionResponseEntity::fromResponse, + true, + GoogleAiStudioCompletionResponseEntity::content + ); } public GoogleAiStudioCompletionRequestManager(GoogleAiStudioCompletionModel model, ThreadPool threadPool) { @@ -47,8 +51,7 @@ public void execute( Supplier hasRequestCompletedFunction, ActionListener listener ) { - List docsInput = DocumentsOnlyInput.of(inferenceInputs).getInputs(); - GoogleAiStudioCompletionRequest request = new GoogleAiStudioCompletionRequest(docsInput, model); + GoogleAiStudioCompletionRequest request = new GoogleAiStudioCompletionRequest(DocumentsOnlyInput.of(inferenceInputs), model); execute(new ExecutableInferenceRequest(requestSender, logger, request, HANDLER, hasRequestCompletedFunction, listener)); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/googleaistudio/GoogleAiStudioCompletionRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/googleaistudio/GoogleAiStudioCompletionRequest.java index f52fe623e7918..80770d63ef139 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/googleaistudio/GoogleAiStudioCompletionRequest.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/googleaistudio/GoogleAiStudioCompletionRequest.java @@ -11,46 +11,63 @@ import org.apache.http.client.methods.HttpPost; import org.apache.http.entity.ByteArrayEntity; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.util.LazyInitializable; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; import org.elasticsearch.xpack.inference.external.request.HttpRequest; import org.elasticsearch.xpack.inference.external.request.Request; import org.elasticsearch.xpack.inference.services.googleaistudio.completion.GoogleAiStudioCompletionModel; import java.net.URI; import java.nio.charset.StandardCharsets; -import java.util.List; import java.util.Objects; public class GoogleAiStudioCompletionRequest implements GoogleAiStudioRequest { + private static final String ALT_PARAM = "alt"; + private static final String SSE_VALUE = "sse"; - private final List input; + private final DocumentsOnlyInput input; - private final URI uri; + private final LazyInitializable uri; private final GoogleAiStudioCompletionModel model; - public GoogleAiStudioCompletionRequest(List input, GoogleAiStudioCompletionModel model) { - this.input = input; + public GoogleAiStudioCompletionRequest(DocumentsOnlyInput input, GoogleAiStudioCompletionModel model) { + this.input = Objects.requireNonNull(input); this.model = Objects.requireNonNull(model); - this.uri = model.uri(); + this.uri = new LazyInitializable<>(() -> model.uri(input.stream())); } @Override public HttpRequest createHttpRequest() { - var httpPost = new HttpPost(uri); - var requestEntity = Strings.toString(new GoogleAiStudioCompletionRequestEntity(input)); + var httpPost = createHttpPost(); + var requestEntity = Strings.toString(new GoogleAiStudioCompletionRequestEntity(input.getInputs())); ByteArrayEntity byteEntity = new ByteArrayEntity(requestEntity.getBytes(StandardCharsets.UTF_8)); httpPost.setEntity(byteEntity); httpPost.setHeader(HttpHeaders.CONTENT_TYPE, XContentType.JSON.mediaType()); - GoogleAiStudioRequest.decorateWithApiKeyParameter(httpPost, model.getSecretSettings()); return new HttpRequest(httpPost, getInferenceEntityId()); } + private HttpPost createHttpPost() { + try { + var uriBuilder = GoogleAiStudioRequest.builderWithApiKeyParameter(uri.getOrCompute(), model.getSecretSettings()); + if (isStreaming()) { + uriBuilder.addParameter(ALT_PARAM, SSE_VALUE); + } + return new HttpPost(uriBuilder.build()); + } catch (Exception e) { + ValidationException validationException = new ValidationException(e); + validationException.addValidationError(e.getMessage()); + throw validationException; + } + } + @Override public URI getURI() { - return this.uri; + return uri.getOrCompute(); } @Override @@ -69,4 +86,9 @@ public boolean[] getTruncationInfo() { public String getInferenceEntityId() { return model.getInferenceEntityId(); } + + @Override + public boolean isStreaming() { + return input.stream(); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/googleaistudio/GoogleAiStudioRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/googleaistudio/GoogleAiStudioRequest.java index fb99deabc9c5e..45403fb8e507d 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/googleaistudio/GoogleAiStudioRequest.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/googleaistudio/GoogleAiStudioRequest.java @@ -13,20 +13,15 @@ import org.elasticsearch.xpack.inference.external.request.Request; import org.elasticsearch.xpack.inference.services.settings.DefaultSecretSettings; +import java.net.URI; + public interface GoogleAiStudioRequest extends Request { String API_KEY_PARAMETER = "key"; static void decorateWithApiKeyParameter(HttpPost httpPost, DefaultSecretSettings secretSettings) { try { - var uri = httpPost.getURI(); - var uriWithApiKey = new URIBuilder().setScheme(uri.getScheme()) - .setHost(uri.getHost()) - .setPort(uri.getPort()) - .setPath(uri.getPath()) - .addParameter(API_KEY_PARAMETER, secretSettings.apiKey().toString()) - .build(); - + var uriWithApiKey = builderWithApiKeyParameter(httpPost.getURI(), secretSettings).build(); httpPost.setURI(uriWithApiKey); } catch (Exception e) { ValidationException validationException = new ValidationException(e); @@ -35,4 +30,12 @@ static void decorateWithApiKeyParameter(HttpPost httpPost, DefaultSecretSettings } } + static URIBuilder builderWithApiKeyParameter(URI uri, DefaultSecretSettings secretSettings) { + return new URIBuilder().setScheme(uri.getScheme()) + .setHost(uri.getHost()) + .setPort(uri.getPort()) + .setPath(uri.getPath()) + .addParameter(API_KEY_PARAMETER, secretSettings.apiKey().toString()); + } + } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/googleaistudio/GoogleAiStudioUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/googleaistudio/GoogleAiStudioUtils.java index 81ad5b6203682..16c9e7254e108 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/googleaistudio/GoogleAiStudioUtils.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/googleaistudio/GoogleAiStudioUtils.java @@ -17,6 +17,8 @@ public class GoogleAiStudioUtils { public static final String GENERATE_CONTENT_ACTION = "generateContent"; + public static final String STREAM_GENERATE_CONTENT_ACTION = "streamGenerateContent"; + public static final String BATCH_EMBED_CONTENTS_ACTION = "batchEmbedContents"; private GoogleAiStudioUtils() {} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/googleaistudio/GoogleAiStudioCompletionResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/googleaistudio/GoogleAiStudioCompletionResponseEntity.java index 852f25705d6ff..11dddc78bc469 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/googleaistudio/GoogleAiStudioCompletionResponseEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/googleaistudio/GoogleAiStudioCompletionResponseEntity.java @@ -77,33 +77,36 @@ public class GoogleAiStudioCompletionResponseEntity { public static ChatCompletionResults fromResponse(Request request, HttpResult response) throws IOException { var parserConfig = XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE); try (XContentParser jsonParser = XContentFactory.xContent(XContentType.JSON).createParser(parserConfig, response.body())) { - moveToFirstToken(jsonParser); + return new ChatCompletionResults(List.of(new ChatCompletionResults.Result(content(jsonParser)))); + } + } - XContentParser.Token token = jsonParser.currentToken(); - ensureExpectedToken(XContentParser.Token.START_OBJECT, token, jsonParser); + public static String content(XContentParser jsonParser) throws IOException { + moveToFirstToken(jsonParser); - positionParserAtTokenAfterField(jsonParser, "candidates", FAILED_TO_FIND_FIELD_TEMPLATE); + XContentParser.Token token = jsonParser.currentToken(); + ensureExpectedToken(XContentParser.Token.START_OBJECT, token, jsonParser); - jsonParser.nextToken(); - ensureExpectedToken(XContentParser.Token.START_OBJECT, jsonParser.currentToken(), jsonParser); + positionParserAtTokenAfterField(jsonParser, "candidates", FAILED_TO_FIND_FIELD_TEMPLATE); - positionParserAtTokenAfterField(jsonParser, "content", FAILED_TO_FIND_FIELD_TEMPLATE); + jsonParser.nextToken(); + ensureExpectedToken(XContentParser.Token.START_OBJECT, jsonParser.currentToken(), jsonParser); - token = jsonParser.currentToken(); - ensureExpectedToken(XContentParser.Token.START_OBJECT, token, jsonParser); + positionParserAtTokenAfterField(jsonParser, "content", FAILED_TO_FIND_FIELD_TEMPLATE); - positionParserAtTokenAfterField(jsonParser, "parts", FAILED_TO_FIND_FIELD_TEMPLATE); + token = jsonParser.currentToken(); + ensureExpectedToken(XContentParser.Token.START_OBJECT, token, jsonParser); - jsonParser.nextToken(); - ensureExpectedToken(XContentParser.Token.START_OBJECT, token, jsonParser); + positionParserAtTokenAfterField(jsonParser, "parts", FAILED_TO_FIND_FIELD_TEMPLATE); - positionParserAtTokenAfterField(jsonParser, "text", FAILED_TO_FIND_FIELD_TEMPLATE); + jsonParser.nextToken(); + ensureExpectedToken(XContentParser.Token.START_OBJECT, token, jsonParser); - XContentParser.Token contentToken = jsonParser.currentToken(); - ensureExpectedToken(XContentParser.Token.VALUE_STRING, contentToken, jsonParser); - String content = jsonParser.text(); + positionParserAtTokenAfterField(jsonParser, "text", FAILED_TO_FIND_FIELD_TEMPLATE); + + XContentParser.Token contentToken = jsonParser.currentToken(); + ensureExpectedToken(XContentParser.Token.VALUE_STRING, contentToken, jsonParser); + return jsonParser.text(); - return new ChatCompletionResults(List.of(new ChatCompletionResults.Result(content))); - } } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioModel.java index d817a3bbb73ef..d29095be808b9 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioModel.java @@ -7,15 +7,11 @@ package org.elasticsearch.xpack.inference.services.googleaistudio; -import org.elasticsearch.inference.InputType; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ModelSecrets; import org.elasticsearch.inference.ServiceSettings; -import org.elasticsearch.xpack.inference.external.action.ExecutableAction; -import org.elasticsearch.xpack.inference.external.action.googleaistudio.GoogleAiStudioActionVisitor; -import java.util.Map; import java.util.Objects; public abstract class GoogleAiStudioModel extends Model { @@ -38,8 +34,6 @@ public GoogleAiStudioModel(GoogleAiStudioModel model, ServiceSettings serviceSet rateLimitServiceSettings = model.rateLimitServiceSettings(); } - public abstract ExecutableAction accept(GoogleAiStudioActionVisitor creator, Map taskSettings, InputType inputType); - public GoogleAiStudioRateLimitServiceSettings rateLimitServiceSettings() { return rateLimitServiceSettings; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioService.java index 798ce4ea5800b..c685441271194 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioService.java @@ -27,8 +27,11 @@ import org.elasticsearch.xpack.core.inference.ChunkingSettingsFeatureFlag; import org.elasticsearch.xpack.inference.chunking.ChunkingSettingsBuilder; import org.elasticsearch.xpack.inference.chunking.EmbeddingRequestChunker; -import org.elasticsearch.xpack.inference.external.action.googleaistudio.GoogleAiStudioActionCreator; +import org.elasticsearch.xpack.inference.external.action.SenderExecutableAction; +import org.elasticsearch.xpack.inference.external.action.SingleInputSenderExecutableAction; import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; +import org.elasticsearch.xpack.inference.external.http.sender.GoogleAiStudioCompletionRequestManager; +import org.elasticsearch.xpack.inference.external.http.sender.GoogleAiStudioEmbeddingsRequestManager; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; import org.elasticsearch.xpack.inference.external.http.sender.InferenceInputs; import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; @@ -42,7 +45,9 @@ import java.util.List; import java.util.Map; +import java.util.Set; +import static org.elasticsearch.xpack.inference.external.action.ActionUtils.constructFailedToSendRequestMessage; import static org.elasticsearch.xpack.inference.services.ServiceUtils.createInvalidModelException; import static org.elasticsearch.xpack.inference.services.ServiceUtils.parsePersistedConfigErrorMsg; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMapOrDefaultEmpty; @@ -211,6 +216,11 @@ public TransportVersion getMinimalSupportedVersion() { return TransportVersions.ML_INFERENCE_GOOGLE_AI_STUDIO_COMPLETION_ADDED; } + @Override + public Set supportedStreamingTasks() { + return COMPLETION_ONLY; + } + @Override public void checkModelConfig(Model model, ActionListener listener) { // TODO: Remove this function once all services have been updated to use the new model validators @@ -247,16 +257,32 @@ protected void doInfer( TimeValue timeout, ActionListener listener ) { - if (model instanceof GoogleAiStudioModel == false) { + if (model instanceof GoogleAiStudioCompletionModel completionModel) { + var requestManager = new GoogleAiStudioCompletionRequestManager(completionModel, getServiceComponents().threadPool()); + var docsOnly = DocumentsOnlyInput.of(inputs); + var failedToSendRequestErrorMessage = constructFailedToSendRequestMessage( + completionModel.uri(docsOnly.stream()), + "Google AI Studio completion" + ); + var action = new SingleInputSenderExecutableAction( + getSender(), + requestManager, + failedToSendRequestErrorMessage, + "Google AI Studio completion" + ); + action.execute(inputs, timeout, listener); + } else if (model instanceof GoogleAiStudioEmbeddingsModel embeddingsModel) { + var requestManager = new GoogleAiStudioEmbeddingsRequestManager( + embeddingsModel, + getServiceComponents().truncator(), + getServiceComponents().threadPool() + ); + var failedToSendRequestErrorMessage = constructFailedToSendRequestMessage(embeddingsModel.uri(), "Google AI Studio embeddings"); + var action = new SenderExecutableAction(getSender(), requestManager, failedToSendRequestErrorMessage); + action.execute(inputs, timeout, listener); + } else { listener.onFailure(createInvalidModelException(model)); - return; } - - GoogleAiStudioModel googleAiStudioModel = (GoogleAiStudioModel) model; - var actionCreator = new GoogleAiStudioActionCreator(getSender(), getServiceComponents()); - - var action = googleAiStudioModel.accept(actionCreator, taskSettings, inputType); - action.execute(inputs, timeout, listener); } @Override @@ -270,7 +296,6 @@ protected void doChunkedInfer( ActionListener> listener ) { GoogleAiStudioModel googleAiStudioModel = (GoogleAiStudioModel) model; - var actionCreator = new GoogleAiStudioActionCreator(getSender(), getServiceComponents()); List batchedRequests; if (ChunkingSettingsFeatureFlag.isEnabled()) { @@ -287,10 +312,8 @@ protected void doChunkedInfer( EmbeddingRequestChunker.EmbeddingType.FLOAT ).batchRequestsWithListeners(listener); } - for (var request : batchedRequests) { - var action = googleAiStudioModel.accept(actionCreator, taskSettings, inputType); - action.execute(new DocumentsOnlyInput(request.batch().inputs()), timeout, request.listener()); + doInfer(model, new DocumentsOnlyInput(request.batch().inputs()), taskSettings, inputType, timeout, request.listener()); } } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/completion/GoogleAiStudioCompletionModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/completion/GoogleAiStudioCompletionModel.java index 8fa2ac0148716..7b793ab37d342 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/completion/GoogleAiStudioCompletionModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/completion/GoogleAiStudioCompletionModel.java @@ -10,13 +10,10 @@ import org.apache.http.client.utils.URIBuilder; import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.EmptyTaskSettings; -import org.elasticsearch.inference.InputType; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ModelSecrets; import org.elasticsearch.inference.TaskSettings; import org.elasticsearch.inference.TaskType; -import org.elasticsearch.xpack.inference.external.action.ExecutableAction; -import org.elasticsearch.xpack.inference.external.action.googleaistudio.GoogleAiStudioActionVisitor; import org.elasticsearch.xpack.inference.external.request.googleaistudio.GoogleAiStudioUtils; import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; import org.elasticsearch.xpack.inference.services.googleaistudio.GoogleAiStudioModel; @@ -30,8 +27,6 @@ public class GoogleAiStudioCompletionModel extends GoogleAiStudioModel { - private URI uri; - public GoogleAiStudioCompletionModel( String inferenceEntityId, TaskType taskType, @@ -65,39 +60,20 @@ public GoogleAiStudioCompletionModel( new ModelSecrets(secrets), serviceSettings ); - try { - this.uri = buildUri(serviceSettings.modelId()); - } catch (URISyntaxException e) { - throw new RuntimeException(e); - } } - // Should only be used directly for testing - GoogleAiStudioCompletionModel( - String inferenceEntityId, - TaskType taskType, - String service, - String url, - GoogleAiStudioCompletionServiceSettings serviceSettings, - TaskSettings taskSettings, - @Nullable DefaultSecretSettings secrets - ) { - super( - new ModelConfigurations(inferenceEntityId, taskType, service, serviceSettings, taskSettings), - new ModelSecrets(secrets), - serviceSettings - ); + public URI uri(boolean streaming) { try { - this.uri = new URI(url); + var api = streaming ? GoogleAiStudioUtils.STREAM_GENERATE_CONTENT_ACTION : GoogleAiStudioUtils.GENERATE_CONTENT_ACTION; + return new URIBuilder().setScheme("https") + .setHost(GoogleAiStudioUtils.HOST_SUFFIX) + .setPathSegments(GoogleAiStudioUtils.V1, GoogleAiStudioUtils.MODELS, format("%s:%s", getServiceSettings().modelId(), api)) + .build(); } catch (URISyntaxException e) { throw new RuntimeException(e); } } - public URI uri() { - return uri; - } - @Override public GoogleAiStudioCompletionServiceSettings getServiceSettings() { return (GoogleAiStudioCompletionServiceSettings) super.getServiceSettings(); @@ -108,7 +84,8 @@ public DefaultSecretSettings getSecretSettings() { return (DefaultSecretSettings) super.getSecretSettings(); } - public static URI buildUri(String model) throws URISyntaxException { + // visible for testing + static URI buildUri(String model) throws URISyntaxException { return new URIBuilder().setScheme("https") .setHost(GoogleAiStudioUtils.HOST_SUFFIX) .setPathSegments( @@ -118,9 +95,4 @@ public static URI buildUri(String model) throws URISyntaxException { ) .build(); } - - @Override - public ExecutableAction accept(GoogleAiStudioActionVisitor visitor, Map taskSettings, InputType inputType) { - return visitor.create(this, taskSettings); - } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/embeddings/GoogleAiStudioEmbeddingsModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/embeddings/GoogleAiStudioEmbeddingsModel.java index 5d46a8e129dff..a9434fb473599 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/embeddings/GoogleAiStudioEmbeddingsModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/embeddings/GoogleAiStudioEmbeddingsModel.java @@ -11,13 +11,10 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.ChunkingSettings; import org.elasticsearch.inference.EmptyTaskSettings; -import org.elasticsearch.inference.InputType; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ModelSecrets; import org.elasticsearch.inference.TaskSettings; import org.elasticsearch.inference.TaskType; -import org.elasticsearch.xpack.inference.external.action.ExecutableAction; -import org.elasticsearch.xpack.inference.external.action.googleaistudio.GoogleAiStudioActionVisitor; import org.elasticsearch.xpack.inference.external.request.googleaistudio.GoogleAiStudioUtils; import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; import org.elasticsearch.xpack.inference.services.googleaistudio.GoogleAiStudioModel; @@ -139,11 +136,6 @@ public URI uri() { return uri; } - @Override - public ExecutableAction accept(GoogleAiStudioActionVisitor visitor, Map taskSettings, InputType inputType) { - return visitor.create(this, taskSettings); - } - public static URI buildUri(String model) throws URISyntaxException { return new URIBuilder().setScheme("https") .setHost(GoogleAiStudioUtils.HOST_SUFFIX) diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/googleaistudio/GoogleAiStudioCompletionActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/googleaistudio/GoogleAiStudioCompletionActionTests.java index 6fcd386702497..72b5ffa45a0dd 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/googleaistudio/GoogleAiStudioCompletionActionTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/googleaistudio/GoogleAiStudioCompletionActionTests.java @@ -272,7 +272,7 @@ public void testExecute_ThrowsException_WhenInputIsGreaterThanOne() throws IOExc private ExecutableAction createAction(String url, String apiKey, String modelName, Sender sender) { var model = GoogleAiStudioCompletionModelTests.createModel(modelName, url, apiKey); var requestManager = new GoogleAiStudioCompletionRequestManager(model, threadPool); - var failedToSendRequestErrorMessage = constructFailedToSendRequestMessage(model.uri(), "Google AI Studio completion"); + var failedToSendRequestErrorMessage = constructFailedToSendRequestMessage(model.uri(false), "Google AI Studio completion"); return new SingleInputSenderExecutableAction( sender, requestManager, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/anthropic/AnthropicStreamingProcessorTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/anthropic/AnthropicStreamingProcessorTests.java index 1667dac84d2db..ba6bcf8b57d5b 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/anthropic/AnthropicStreamingProcessorTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/anthropic/AnthropicStreamingProcessorTests.java @@ -11,20 +11,17 @@ import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.core.inference.results.StreamingChatCompletionResults; import org.elasticsearch.xpack.inference.external.response.streaming.ServerSentEvent; -import org.elasticsearch.xpack.inference.external.response.streaming.ServerSentEventField; -import org.hamcrest.Matcher; -import org.hamcrest.Matchers; import java.util.ArrayDeque; -import java.util.Arrays; import java.util.Deque; import java.util.Map; import java.util.concurrent.Flow; import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.xpack.inference.common.DelegatingProcessorTests.onNext; +import static org.elasticsearch.xpack.inference.external.response.streaming.StreamingInferenceTestUtils.containsResults; +import static org.elasticsearch.xpack.inference.external.response.streaming.StreamingInferenceTestUtils.events; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.isA; import static org.hamcrest.Matchers.notNullValue; @@ -133,21 +130,6 @@ public void testDroppedEventsRequestsMoreData() throws Exception { verify(downstream, times(0)).onNext(any()); } - private Deque events(String... data) { - var item = new ArrayDeque(); - Arrays.stream(data).map(datum -> new ServerSentEvent(ServerSentEventField.DATA, datum)).forEach(item::offer); - return item; - } - - @SuppressWarnings("unchecked") - private Matcher> containsResults(String... results) { - Matcher[] resultMatcher = Arrays.stream(results) - .map(StreamingChatCompletionResults.Result::new) - .map(Matchers::equalTo) - .toArray(Matcher[]::new); - return Matchers.contains(resultMatcher); - } - private static ElasticsearchStatusException onError(Deque item) { var processor = new AnthropicStreamingProcessor(); var response = new AtomicReference(); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/googleaistudio/GoogleAiStudioStreamingProcessorTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/googleaistudio/GoogleAiStudioStreamingProcessorTests.java new file mode 100644 index 0000000000000..f41fe5b765c8c --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/googleaistudio/GoogleAiStudioStreamingProcessorTests.java @@ -0,0 +1,144 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.googleaistudio; + +import org.elasticsearch.common.xcontent.ChunkedToXContent; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.external.response.googleaistudio.GoogleAiStudioCompletionResponseEntity; +import org.elasticsearch.xpack.inference.external.response.streaming.ServerSentEvent; + +import java.util.ArrayDeque; +import java.util.concurrent.Flow; + +import static org.elasticsearch.xpack.inference.common.DelegatingProcessorTests.onError; +import static org.elasticsearch.xpack.inference.common.DelegatingProcessorTests.onNext; +import static org.elasticsearch.xpack.inference.external.response.streaming.StreamingInferenceTestUtils.containsResults; +import static org.elasticsearch.xpack.inference.external.response.streaming.StreamingInferenceTestUtils.events; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.sameInstance; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; + +public class GoogleAiStudioStreamingProcessorTests extends ESTestCase { + + public void testParseSuccess() { + var item = events(""" + { + "candidates": [ + { + "content": { + "parts": [ + { + "text": "Hello" + } + ], + "role": "model" + }, + "finishReason": "STOP", + "index": 0, + "safetyRatings": [ + { + "category": "HARM_CATEGORY_SEXUALLY_EXPLICIT", + "probability": "NEGLIGIBLE" + }, + { + "category": "HARM_CATEGORY_HATE_SPEECH", + "probability": "NEGLIGIBLE" + }, + { + "category": "HARM_CATEGORY_HARASSMENT", + "probability": "NEGLIGIBLE" + }, + { + "category": "HARM_CATEGORY_DANGEROUS_CONTENT", + "probability": "NEGLIGIBLE" + } + ] + } + ], + "usageMetadata": { + "promptTokenCount": 1, + "candidatesTokenCount": 1, + "totalTokenCount": 1 + } + }""", """ + { + "candidates": [ + { + "content": { + "parts": [ + { + "text": ", World" + } + ], + "role": "model" + }, + "finishReason": "STOP", + "index": 0, + "safetyRatings": [ + { + "category": "HARM_CATEGORY_SEXUALLY_EXPLICIT", + "probability": "NEGLIGIBLE" + }, + { + "category": "HARM_CATEGORY_HATE_SPEECH", + "probability": "NEGLIGIBLE" + }, + { + "category": "HARM_CATEGORY_HARASSMENT", + "probability": "NEGLIGIBLE" + }, + { + "category": "HARM_CATEGORY_DANGEROUS_CONTENT", + "probability": "NEGLIGIBLE" + } + ] + } + ], + "usageMetadata": { + "promptTokenCount": 1, + "candidatesTokenCount": 1, + "totalTokenCount": 1 + } + }"""); + + var response = onNext(new GoogleAiStudioStreamingProcessor(GoogleAiStudioCompletionResponseEntity::content), item); + assertThat(response.results().size(), equalTo(2)); + assertThat(response.results(), containsResults("Hello", ", World")); + } + + public void testEmptyResultsRequestsMoreData() throws Exception { + var emptyDeque = new ArrayDeque(); + + var processor = new GoogleAiStudioStreamingProcessor(noOp -> { + fail("This should not be called"); + return null; + }); + + Flow.Subscriber downstream = mock(); + processor.subscribe(downstream); + + Flow.Subscription upstream = mock(); + processor.onSubscribe(upstream); + + processor.next(emptyDeque); + + verify(upstream, times(1)).request(1); + verify(downstream, times(0)).onNext(any()); + } + + public void testOnError() { + var expectedException = new RuntimeException("hello"); + + var processor = new GoogleAiStudioStreamingProcessor(noOp -> { throw expectedException; }); + + assertThat(onError(processor, events("hi")), sameInstance(expectedException)); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/googleaistudio/completion/GoogleAiStudioCompletionRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/googleaistudio/completion/GoogleAiStudioCompletionRequestTests.java index 7d7ee1dcba6c2..7ffa8940ad6be 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/googleaistudio/completion/GoogleAiStudioCompletionRequestTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/googleaistudio/completion/GoogleAiStudioCompletionRequestTests.java @@ -10,6 +10,7 @@ import org.apache.http.client.methods.HttpPost; import org.elasticsearch.common.Strings; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; import org.elasticsearch.xpack.inference.external.request.googleaistudio.GoogleAiStudioCompletionRequest; import org.elasticsearch.xpack.inference.services.googleaistudio.completion.GoogleAiStudioCompletionModelTests; @@ -29,7 +30,7 @@ public void testCreateRequest() throws IOException { var apiKey = "api_key"; var input = "input"; - var request = new GoogleAiStudioCompletionRequest(List.of(input), GoogleAiStudioCompletionModelTests.createModel("model", apiKey)); + var request = new GoogleAiStudioCompletionRequest(listOf(input), GoogleAiStudioCompletionModelTests.createModel("model", apiKey)); var httpRequest = request.createHttpRequest(); assertThat(httpRequest.httpRequestBase(), instanceOf(HttpPost.class)); @@ -54,7 +55,7 @@ public void testCreateRequest() throws IOException { public void testTruncate_ReturnsSameInstance() { var request = new GoogleAiStudioCompletionRequest( - List.of("input"), + listOf("input"), GoogleAiStudioCompletionModelTests.createModel("model", "api key") ); var truncatedRequest = request.truncate(); @@ -64,10 +65,14 @@ public void testTruncate_ReturnsSameInstance() { public void testTruncationInfo_ReturnsNull() { var request = new GoogleAiStudioCompletionRequest( - List.of("input"), + listOf("input"), GoogleAiStudioCompletionModelTests.createModel("model", "api key") ); assertNull(request.getTruncationInfo()); } + + private static DocumentsOnlyInput listOf(String... input) { + return new DocumentsOnlyInput(List.of(input)); + } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/streaming/StreamingInferenceTestUtils.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/streaming/StreamingInferenceTestUtils.java new file mode 100644 index 0000000000000..e0aef58c4f3b3 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/streaming/StreamingInferenceTestUtils.java @@ -0,0 +1,34 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.response.streaming; + +import org.elasticsearch.xpack.core.inference.results.StreamingChatCompletionResults; +import org.hamcrest.Matcher; +import org.hamcrest.Matchers; + +import java.util.ArrayDeque; +import java.util.Arrays; +import java.util.Deque; + +public class StreamingInferenceTestUtils { + + public static Deque events(String... data) { + var item = new ArrayDeque(); + Arrays.stream(data).map(datum -> new ServerSentEvent(ServerSentEventField.DATA, datum)).forEach(item::offer); + return item; + } + + @SuppressWarnings("unchecked") + public static Matcher> containsResults(String... results) { + Matcher[] resultMatcher = Arrays.stream(results) + .map(StreamingChatCompletionResults.Result::new) + .map(Matchers::equalTo) + .toArray(Matcher[]::new); + return Matchers.contains(resultMatcher); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googleaistudio/completion/GoogleAiStudioCompletionModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googleaistudio/completion/GoogleAiStudioCompletionModelTests.java index f4c13db78c4bc..3d523d7cab498 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googleaistudio/completion/GoogleAiStudioCompletionModelTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googleaistudio/completion/GoogleAiStudioCompletionModelTests.java @@ -14,11 +14,15 @@ import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; import org.elasticsearch.xpack.inference.services.settings.DefaultSecretSettings; +import java.net.URI; import java.net.URISyntaxException; import java.util.HashMap; import java.util.Map; import static org.hamcrest.Matchers.is; +import static org.mockito.ArgumentMatchers.anyBoolean; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.when; public class GoogleAiStudioCompletionModelTests extends ESTestCase { @@ -55,14 +59,17 @@ public static GoogleAiStudioCompletionModel createModel(String model, String api } public static GoogleAiStudioCompletionModel createModel(String model, String url, String apiKey) { - return new GoogleAiStudioCompletionModel( - "id", - TaskType.COMPLETION, - "service", - url, - new GoogleAiStudioCompletionServiceSettings(model, null), - EmptyTaskSettings.INSTANCE, - new DefaultSecretSettings(new SecureString(apiKey.toCharArray())) + var googleModel = spy( + new GoogleAiStudioCompletionModel( + "id", + TaskType.COMPLETION, + "service", + new GoogleAiStudioCompletionServiceSettings(model, null), + EmptyTaskSettings.INSTANCE, + new DefaultSecretSettings(new SecureString(apiKey.toCharArray())) + ) ); + when(googleModel.uri(anyBoolean())).thenReturn(URI.create(url)); + return googleModel; } } From 97d1c413a89864a60c2ca2ebd3c5a44cf9fd3c17 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 15 Oct 2024 07:04:57 +1100 Subject: [PATCH 070/449] Mute org.elasticsearch.xpack.enrich.EnrichRestIT test {p0=enrich/30_tsdb_index/enrich documents over _bulk} #114761 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 0fa8f627ba0d1..650a600745abb 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -384,6 +384,9 @@ tests: - class: org.elasticsearch.xpack.rank.rrf.RRFRankClientYamlTestSuiteIT method: test {yaml=rrf/800_rrf_with_text_similarity_reranker_retriever/explain using rrf retriever and text-similarity} issue: https://github.com/elastic/elasticsearch/issues/114757 +- class: org.elasticsearch.xpack.enrich.EnrichRestIT + method: test {p0=enrich/30_tsdb_index/enrich documents over _bulk} + issue: https://github.com/elastic/elasticsearch/issues/114761 # Examples: # From a74ede750ad84970e3e6ab12a3063048fc588283 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 15 Oct 2024 07:28:22 +1100 Subject: [PATCH 071/449] Mute org.elasticsearch.xpack.enrich.EnrichRestIT test {p0=enrich/20_standard_index/enrich documents over _bulk via an alias} #114763 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 650a600745abb..d78430db035e5 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -387,6 +387,9 @@ tests: - class: org.elasticsearch.xpack.enrich.EnrichRestIT method: test {p0=enrich/30_tsdb_index/enrich documents over _bulk} issue: https://github.com/elastic/elasticsearch/issues/114761 +- class: org.elasticsearch.xpack.enrich.EnrichRestIT + method: test {p0=enrich/20_standard_index/enrich documents over _bulk via an alias} + issue: https://github.com/elastic/elasticsearch/issues/114763 # Examples: # From 01bfdf82620c190faa07ce54525b125914440fc7 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 15 Oct 2024 07:36:11 +1100 Subject: [PATCH 072/449] Mute org.elasticsearch.xpack.enrich.EnrichRestIT test {p0=enrich/10_basic/Test enrich crud apis} #114766 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index d78430db035e5..2cd3c8112acca 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -390,6 +390,9 @@ tests: - class: org.elasticsearch.xpack.enrich.EnrichRestIT method: test {p0=enrich/20_standard_index/enrich documents over _bulk via an alias} issue: https://github.com/elastic/elasticsearch/issues/114763 +- class: org.elasticsearch.xpack.enrich.EnrichRestIT + method: test {p0=enrich/10_basic/Test enrich crud apis} + issue: https://github.com/elastic/elasticsearch/issues/114766 # Examples: # From 8040fbb0d05401d40ea856f0a4982e8aaab48340 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Mon, 14 Oct 2024 21:39:57 +0100 Subject: [PATCH 073/449] [ML] Dynamically get of num allocations (#114636) --- docs/changelog/114636.yaml | 5 ++ .../inference/InferenceService.java | 4 ++ .../TransportGetInferenceModelAction.java | 72 ++++++++++++++----- .../ElasticsearchInternalModel.java | 11 ++- .../ElasticsearchInternalService.java | 46 ++++++++++-- .../ElasticsearchInternalServiceSettings.java | 4 ++ .../ElserInternalModelTests.java | 30 ++++++++ 7 files changed, 148 insertions(+), 24 deletions(-) create mode 100644 docs/changelog/114636.yaml create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalModelTests.java diff --git a/docs/changelog/114636.yaml b/docs/changelog/114636.yaml new file mode 100644 index 0000000000000..c63876fda67f7 --- /dev/null +++ b/docs/changelog/114636.yaml @@ -0,0 +1,5 @@ +pr: 114636 +summary: Dynamically get of num allocations +area: Machine Learning +type: enhancement +issues: [] diff --git a/server/src/main/java/org/elasticsearch/inference/InferenceService.java b/server/src/main/java/org/elasticsearch/inference/InferenceService.java index 190f2d689a58d..835262ff28edc 100644 --- a/server/src/main/java/org/elasticsearch/inference/InferenceService.java +++ b/server/src/main/java/org/elasticsearch/inference/InferenceService.java @@ -210,4 +210,8 @@ default List defaultConfigIds() { default void defaultConfigs(ActionListener> defaultsListener) { defaultsListener.onResponse(List.of()); } + + default void updateModelsWithDynamicFields(List model, ActionListener> listener) { + listener.onResponse(model); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java index 5ee1e40869dbc..55aad5c55a2ac 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java @@ -9,13 +9,13 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.GroupedActionListener; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.Strings; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.inference.InferenceServiceRegistry; -import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.Model; import org.elasticsearch.inference.TaskType; import org.elasticsearch.inference.UnparsedModel; import org.elasticsearch.injection.guice.Inject; @@ -29,8 +29,11 @@ import org.elasticsearch.xpack.inference.registry.ModelRegistry; import java.util.ArrayList; +import java.util.Comparator; +import java.util.HashMap; import java.util.List; import java.util.concurrent.Executor; +import java.util.stream.Collectors; public class TransportGetInferenceModelAction extends HandledTransportAction< GetInferenceModelAction.Request, @@ -96,38 +99,69 @@ private void getSingleModel( var model = service.get() .parsePersistedConfig(unparsedModel.inferenceEntityId(), unparsedModel.taskType(), unparsedModel.settings()); - delegate.onResponse(new GetInferenceModelAction.Response(List.of(model.getConfigurations()))); + + service.get() + .updateModelsWithDynamicFields( + List.of(model), + delegate.delegateFailureAndWrap( + (l2, updatedModels) -> l2.onResponse( + new GetInferenceModelAction.Response( + updatedModels.stream().map(Model::getConfigurations).collect(Collectors.toList()) + ) + ) + ) + ); })); } private void getAllModels(ActionListener listener) { - modelRegistry.getAllModels( - listener.delegateFailureAndWrap((l, models) -> executor.execute(ActionRunnable.supply(l, () -> parseModels(models)))) - ); + modelRegistry.getAllModels(listener.delegateFailureAndWrap((l, models) -> executor.execute(() -> parseModels(models, listener)))); } private void getModelsByTaskType(TaskType taskType, ActionListener listener) { modelRegistry.getModelsByTaskType( taskType, - listener.delegateFailureAndWrap((l, models) -> executor.execute(ActionRunnable.supply(l, () -> parseModels(models)))) + listener.delegateFailureAndWrap((l, models) -> executor.execute(() -> parseModels(models, listener))) ); } - private GetInferenceModelAction.Response parseModels(List unparsedModels) { - var parsedModels = new ArrayList(); - - for (var unparsedModel : unparsedModels) { - var service = serviceRegistry.getService(unparsedModel.service()); - if (service.isEmpty()) { - throw serviceNotFoundException(unparsedModel.service(), unparsedModel.inferenceEntityId()); + private void parseModels(List unparsedModels, ActionListener listener) { + var parsedModelsByService = new HashMap>(); + try { + for (var unparsedModel : unparsedModels) { + var service = serviceRegistry.getService(unparsedModel.service()); + if (service.isEmpty()) { + throw serviceNotFoundException(unparsedModel.service(), unparsedModel.inferenceEntityId()); + } + var list = parsedModelsByService.computeIfAbsent(service.get().name(), s -> new ArrayList<>()); + list.add( + service.get() + .parsePersistedConfig(unparsedModel.inferenceEntityId(), unparsedModel.taskType(), unparsedModel.settings()) + ); } - parsedModels.add( - service.get() - .parsePersistedConfig(unparsedModel.inferenceEntityId(), unparsedModel.taskType(), unparsedModel.settings()) - .getConfigurations() + + var groupedListener = new GroupedActionListener>( + parsedModelsByService.entrySet().size(), + listener.delegateFailureAndWrap((delegate, listOfListOfModels) -> { + var modifiable = new ArrayList(); + for (var l : listOfListOfModels) { + modifiable.addAll(l); + } + modifiable.sort(Comparator.comparing(Model::getInferenceEntityId)); + delegate.onResponse( + new GetInferenceModelAction.Response(modifiable.stream().map(Model::getConfigurations).collect(Collectors.toList())) + ); + }) ); + + for (var entry : parsedModelsByService.entrySet()) { + serviceRegistry.getService(entry.getKey()) + .get() // must be non-null to get this far + .updateModelsWithDynamicFields(entry.getValue(), groupedListener); + } + } catch (Exception e) { + listener.onFailure(e); } - return new GetInferenceModelAction.Response(parsedModels); } private ElasticsearchStatusException serviceNotFoundException(String service, String inferenceId) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalModel.java index 2aee37312a16e..f312790ded655 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalModel.java @@ -21,7 +21,7 @@ public abstract class ElasticsearchInternalModel extends Model { - protected final ElasticsearchInternalServiceSettings internalServiceSettings; + protected ElasticsearchInternalServiceSettings internalServiceSettings; public ElasticsearchInternalModel( String inferenceEntityId, @@ -87,6 +87,15 @@ public ElasticsearchInternalServiceSettings getServiceSettings() { return (ElasticsearchInternalServiceSettings) super.getServiceSettings(); } + public void updateNumAllocation(Integer numAllocations) { + this.internalServiceSettings = new ElasticsearchInternalServiceSettings( + numAllocations, + this.internalServiceSettings.getNumThreads(), + this.internalServiceSettings.modelId(), + this.internalServiceSettings.getAdaptiveAllocationsSettings() + ); + } + @Override public String toString() { return Strings.toString(this.getConfigurations()); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java index 8b3436a2f4fb7..fa04a9c20c3b3 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java @@ -32,6 +32,7 @@ import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import org.elasticsearch.xpack.core.inference.results.RankedDocsResults; import org.elasticsearch.xpack.core.inference.results.SparseEmbeddingResults; +import org.elasticsearch.xpack.core.ml.action.GetDeploymentStatsAction; import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction; import org.elasticsearch.xpack.core.ml.action.InferModelAction; import org.elasticsearch.xpack.core.ml.inference.assignment.AdaptiveAllocationsSettings; @@ -50,6 +51,7 @@ import java.util.ArrayList; import java.util.Collections; import java.util.EnumSet; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; @@ -801,11 +803,47 @@ public List defaultConfigIds() { return List.of(new DefaultConfigId(DEFAULT_ELSER_ID, TaskType.SPARSE_EMBEDDING, this)); } - /** - * Default configurations that can be out of the box without creating an endpoint first. - * @param defaultsListener Config listener - */ @Override + public void updateModelsWithDynamicFields(List models, ActionListener> listener) { + var modelsByDeploymentIds = new HashMap(); + for (var model : models) { + if (model instanceof ElasticsearchInternalModel esModel) { + modelsByDeploymentIds.put(esModel.internalServiceSettings.deloymentId(), esModel); + } else { + listener.onFailure( + new ElasticsearchStatusException( + "Cannot update model [{}] as it is not an Elasticsearch service model", + RestStatus.INTERNAL_SERVER_ERROR, + model.getInferenceEntityId() + ) + ); + return; + } + } + + if (modelsByDeploymentIds.isEmpty()) { + listener.onResponse(models); + return; + } + + String deploymentIds = String.join(",", modelsByDeploymentIds.keySet()); + client.execute( + GetDeploymentStatsAction.INSTANCE, + new GetDeploymentStatsAction.Request(deploymentIds), + ActionListener.wrap(stats -> { + for (var deploymentStats : stats.getStats().results()) { + var model = modelsByDeploymentIds.get(deploymentStats.getDeploymentId()); + model.updateNumAllocation(deploymentStats.getNumberOfAllocations()); + } + listener.onResponse(new ArrayList<>(modelsByDeploymentIds.values())); + }, e -> { + logger.warn("Get deployment stats failed, cannot update the endpoint's number of allocations", e); + // continue with the original response + listener.onResponse(models); + }) + ); + } + public void defaultConfigs(ActionListener> defaultsListener) { preferredModelVariantFn.accept(defaultsListener.delegateFailureAndWrap((delegate, preferredModelVariant) -> { if (PreferredModelVariant.LINUX_X86_OPTIMIZED.equals(preferredModelVariant)) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettings.java index 37e0f28dfb3fe..68db964e86b10 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettings.java @@ -166,6 +166,10 @@ public String modelId() { return modelId; } + public String deloymentId() { + return modelId; + } + public Integer getNumAllocations() { return numAllocations; } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalModelTests.java new file mode 100644 index 0000000000000..74cdab79fe79b --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalModelTests.java @@ -0,0 +1,30 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.elasticsearch; + +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.test.ESTestCase; + +public class ElserInternalModelTests extends ESTestCase { + public void testUpdateNumAllocation() { + var model = new ElserInternalModel( + "foo", + TaskType.SPARSE_EMBEDDING, + ElasticsearchInternalService.NAME, + new ElserInternalServiceSettings(null, 1, "elser", null), + new ElserMlNodeTaskSettings(), + null + ); + + model.updateNumAllocation(1); + assertEquals(1, model.internalServiceSettings.getNumAllocations().intValue()); + + model.updateNumAllocation(null); + assertNull(model.internalServiceSettings.getNumAllocations()); + } +} From a4c0cef220cecb9df56b2162d8152d091181b406 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 15 Oct 2024 08:06:46 +1100 Subject: [PATCH 074/449] Mute org.elasticsearch.xpack.enrich.EnrichRestIT test {p0=enrich/20_standard_index/enrich documents over _bulk} #114768 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 2cd3c8112acca..9d6b098198e6b 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -393,6 +393,9 @@ tests: - class: org.elasticsearch.xpack.enrich.EnrichRestIT method: test {p0=enrich/10_basic/Test enrich crud apis} issue: https://github.com/elastic/elasticsearch/issues/114766 +- class: org.elasticsearch.xpack.enrich.EnrichRestIT + method: test {p0=enrich/20_standard_index/enrich documents over _bulk} + issue: https://github.com/elastic/elasticsearch/issues/114768 # Examples: # From a74dbd3fffa96aa7932241aee8f3dc2e8c6dd7c1 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 15 Oct 2024 08:19:44 +1100 Subject: [PATCH 075/449] Mute org.elasticsearch.xpack.enrich.EnrichRestIT test {p0=enrich/50_data_stream/enrich documents over _bulk via a data stream} #114769 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 9d6b098198e6b..62375572cab60 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -396,6 +396,9 @@ tests: - class: org.elasticsearch.xpack.enrich.EnrichRestIT method: test {p0=enrich/20_standard_index/enrich documents over _bulk} issue: https://github.com/elastic/elasticsearch/issues/114768 +- class: org.elasticsearch.xpack.enrich.EnrichRestIT + method: test {p0=enrich/50_data_stream/enrich documents over _bulk via a data stream} + issue: https://github.com/elastic/elasticsearch/issues/114769 # Examples: # From 45aebb967b0aa1c741394977d9f8046cbc66cd9f Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 15 Oct 2024 08:37:21 +1100 Subject: [PATCH 076/449] Mute org.elasticsearch.xpack.eql.EqlRestValidationIT testDefaultIndicesOptions #114771 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 62375572cab60..b44ac48fae102 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -399,6 +399,9 @@ tests: - class: org.elasticsearch.xpack.enrich.EnrichRestIT method: test {p0=enrich/50_data_stream/enrich documents over _bulk via a data stream} issue: https://github.com/elastic/elasticsearch/issues/114769 +- class: org.elasticsearch.xpack.eql.EqlRestValidationIT + method: testDefaultIndicesOptions + issue: https://github.com/elastic/elasticsearch/issues/114771 # Examples: # From c5411444a009861f9d2666c7b6c338760f72dc2b Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 15 Oct 2024 08:48:44 +1100 Subject: [PATCH 077/449] Mute org.elasticsearch.xpack.enrich.EnrichIT testEnrichSpecialTypes #114773 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index b44ac48fae102..5aae063ee3cb4 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -402,6 +402,9 @@ tests: - class: org.elasticsearch.xpack.eql.EqlRestValidationIT method: testDefaultIndicesOptions issue: https://github.com/elastic/elasticsearch/issues/114771 +- class: org.elasticsearch.xpack.enrich.EnrichIT + method: testEnrichSpecialTypes + issue: https://github.com/elastic/elasticsearch/issues/114773 # Examples: # From 3b9d55dce11ab87f0cd20c918815e9682f44f2e0 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 15 Oct 2024 09:15:35 +1100 Subject: [PATCH 078/449] Mute org.elasticsearch.xpack.security.operator.OperatorPrivilegesIT testEveryActionIsEitherOperatorOnlyOrNonOperator #102992 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 5aae063ee3cb4..9e82f68852b83 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -405,6 +405,9 @@ tests: - class: org.elasticsearch.xpack.enrich.EnrichIT method: testEnrichSpecialTypes issue: https://github.com/elastic/elasticsearch/issues/114773 +- class: org.elasticsearch.xpack.security.operator.OperatorPrivilegesIT + method: testEveryActionIsEitherOperatorOnlyOrNonOperator + issue: https://github.com/elastic/elasticsearch/issues/102992 # Examples: # From 1b321dd3d6abc0fac8f9b488dbc971d3ea1fa5ba Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 15 Oct 2024 09:19:08 +1100 Subject: [PATCH 079/449] Mute org.elasticsearch.xpack.enrich.EnrichIT testDeleteExistingPipeline #114775 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 9e82f68852b83..b7faa7d6e0182 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -408,6 +408,9 @@ tests: - class: org.elasticsearch.xpack.security.operator.OperatorPrivilegesIT method: testEveryActionIsEitherOperatorOnlyOrNonOperator issue: https://github.com/elastic/elasticsearch/issues/102992 +- class: org.elasticsearch.xpack.enrich.EnrichIT + method: testDeleteExistingPipeline + issue: https://github.com/elastic/elasticsearch/issues/114775 # Examples: # From 50c02f414672e4e052abefb62d4f1b22eaa0c31c Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Mon, 14 Oct 2024 18:25:08 -0400 Subject: [PATCH 080/449] Support IPinfo databases in the ip_location processor (#114735) --- .../src/main/java/module-info.java | 2 + .../ingest/geoip/ConfigDatabases.java | 4 +- .../ingest/geoip/GeoIpProcessor.java | 20 ++- .../ingest/geoip/IpDataLookupFactories.java | 73 +++------ .../ingest/geoip/IpinfoIpDataLookups.java | 79 ++++++++++ .../ingest/geoip/MaxmindIpDataLookups.java | 55 +++++++ .../ingest/geoip/GeoIpProcessorTests.java | 144 +++++++++++++----- .../geoip/IpinfoIpDataLookupsTests.java | 103 +++++++++++++ 8 files changed, 383 insertions(+), 97 deletions(-) diff --git a/modules/ingest-geoip/src/main/java/module-info.java b/modules/ingest-geoip/src/main/java/module-info.java index f64c30c060b08..0703d9fb449aa 100644 --- a/modules/ingest-geoip/src/main/java/module-info.java +++ b/modules/ingest-geoip/src/main/java/module-info.java @@ -18,4 +18,6 @@ exports org.elasticsearch.ingest.geoip.direct to org.elasticsearch.server; exports org.elasticsearch.ingest.geoip.stats to org.elasticsearch.server; + + exports org.elasticsearch.ingest.geoip to com.maxmind.db; } diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/ConfigDatabases.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/ConfigDatabases.java index 865d0c5a9eca0..3d2b54b04695f 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/ConfigDatabases.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/ConfigDatabases.java @@ -73,14 +73,14 @@ void updateDatabase(Path file, boolean update) { String databaseFileName = file.getFileName().toString(); try { if (update) { - logger.info("database file changed [{}], reload database...", file); + logger.info("database file changed [{}], reloading database...", file); DatabaseReaderLazyLoader loader = new DatabaseReaderLazyLoader(cache, file, null); DatabaseReaderLazyLoader existing = configDatabases.put(databaseFileName, loader); if (existing != null) { existing.shutdown(); } } else { - logger.info("database file removed [{}], close database...", file); + logger.info("database file removed [{}], closing database...", file); DatabaseReaderLazyLoader existing = configDatabases.remove(databaseFileName); assert existing != null; existing.shutdown(); diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java index 6c64cb755bb32..9508bf0346058 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java @@ -196,13 +196,19 @@ public IpDatabase get() throws IOException { } if (Assertions.ENABLED) { - // Only check whether the suffix has changed and not the entire database type. - // To sanity check whether a city db isn't overwriting with a country or asn db. - // For example overwriting a geoip lite city db with geoip city db is a valid change, but the db type is slightly different, - // by checking just the suffix this assertion doesn't fail. - String expectedSuffix = databaseType.substring(databaseType.lastIndexOf('-')); - assert loader.getDatabaseType().endsWith(expectedSuffix) - : "database type [" + loader.getDatabaseType() + "] doesn't match with expected suffix [" + expectedSuffix + "]"; + // Note that the expected suffix might be null for providers that aren't amenable to using dashes as separator for + // determining the database type. + int last = databaseType.lastIndexOf('-'); + final String expectedSuffix = last == -1 ? null : databaseType.substring(last); + + // If the entire database type matches, then that's a match. Otherwise, if there's a suffix to compare on, then + // check whether the suffix has changed (not the entire database type). + // This is to sanity check, for example, that a city db isn't overwritten with a country or asn db. + // But there are permissible overwrites that make sense, for example overwriting a geolite city db with a geoip city db + // is a valid change, but the db type is slightly different -- by checking just the suffix this assertion won't fail. + final String loaderType = loader.getDatabaseType(); + assert loaderType.equals(databaseType) || expectedSuffix == null || loaderType.endsWith(expectedSuffix) + : "database type [" + loaderType + "] doesn't match with expected suffix [" + expectedSuffix + "]"; } return loader; } diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IpDataLookupFactories.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IpDataLookupFactories.java index 3379fdff0633a..e879f0e0e3514 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IpDataLookupFactories.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IpDataLookupFactories.java @@ -13,9 +13,16 @@ import org.elasticsearch.core.Nullable; import java.util.List; +import java.util.Locale; import java.util.Set; import java.util.function.Function; +import static org.elasticsearch.ingest.geoip.IpinfoIpDataLookups.IPINFO_PREFIX; +import static org.elasticsearch.ingest.geoip.IpinfoIpDataLookups.getIpinfoDatabase; +import static org.elasticsearch.ingest.geoip.IpinfoIpDataLookups.getIpinfoLookup; +import static org.elasticsearch.ingest.geoip.MaxmindIpDataLookups.getMaxmindDatabase; +import static org.elasticsearch.ingest.geoip.MaxmindIpDataLookups.getMaxmindLookup; + final class IpDataLookupFactories { private IpDataLookupFactories() { @@ -26,78 +33,44 @@ interface IpDataLookupFactory { IpDataLookup create(List properties); } - private static final String CITY_DB_SUFFIX = "-City"; - private static final String COUNTRY_DB_SUFFIX = "-Country"; - private static final String ASN_DB_SUFFIX = "-ASN"; - private static final String ANONYMOUS_IP_DB_SUFFIX = "-Anonymous-IP"; - private static final String CONNECTION_TYPE_DB_SUFFIX = "-Connection-Type"; - private static final String DOMAIN_DB_SUFFIX = "-Domain"; - private static final String ENTERPRISE_DB_SUFFIX = "-Enterprise"; - private static final String ISP_DB_SUFFIX = "-ISP"; - - @Nullable - private static Database getMaxmindDatabase(final String databaseType) { - if (databaseType.endsWith(CITY_DB_SUFFIX)) { - return Database.City; - } else if (databaseType.endsWith(COUNTRY_DB_SUFFIX)) { - return Database.Country; - } else if (databaseType.endsWith(ASN_DB_SUFFIX)) { - return Database.Asn; - } else if (databaseType.endsWith(ANONYMOUS_IP_DB_SUFFIX)) { - return Database.AnonymousIp; - } else if (databaseType.endsWith(CONNECTION_TYPE_DB_SUFFIX)) { - return Database.ConnectionType; - } else if (databaseType.endsWith(DOMAIN_DB_SUFFIX)) { - return Database.Domain; - } else if (databaseType.endsWith(ENTERPRISE_DB_SUFFIX)) { - return Database.Enterprise; - } else if (databaseType.endsWith(ISP_DB_SUFFIX)) { - return Database.Isp; - } else { - return null; // no match was found - } - } - /** * Parses the passed-in databaseType and return the Database instance that is * associated with that databaseType. * * @param databaseType the database type String from the metadata of the database file - * @return the Database instance that is associated with the databaseType + * @return the Database instance that is associated with the databaseType (or null) */ @Nullable static Database getDatabase(final String databaseType) { Database database = null; if (Strings.hasText(databaseType)) { - database = getMaxmindDatabase(databaseType); + final String databaseTypeLowerCase = databaseType.toLowerCase(Locale.ROOT); + if (databaseTypeLowerCase.startsWith(IPINFO_PREFIX)) { + database = getIpinfoDatabase(databaseTypeLowerCase); // all lower case! + } else { + // for historical reasons, fall back to assuming maxmind-like type parsing + database = getMaxmindDatabase(databaseType); + } } return database; } - @Nullable - static Function, IpDataLookup> getMaxmindLookup(final Database database) { - return switch (database) { - case City -> MaxmindIpDataLookups.City::new; - case Country -> MaxmindIpDataLookups.Country::new; - case Asn -> MaxmindIpDataLookups.Asn::new; - case AnonymousIp -> MaxmindIpDataLookups.AnonymousIp::new; - case ConnectionType -> MaxmindIpDataLookups.ConnectionType::new; - case Domain -> MaxmindIpDataLookups.Domain::new; - case Enterprise -> MaxmindIpDataLookups.Enterprise::new; - case Isp -> MaxmindIpDataLookups.Isp::new; - default -> null; - }; - } - static IpDataLookupFactory get(final String databaseType, final String databaseFile) { final Database database = getDatabase(databaseType); if (database == null) { throw new IllegalArgumentException("Unsupported database type [" + databaseType + "] for file [" + databaseFile + "]"); } - final Function, IpDataLookup> factoryMethod = getMaxmindLookup(database); + final Function, IpDataLookup> factoryMethod; + final String databaseTypeLowerCase = databaseType.toLowerCase(Locale.ROOT); + if (databaseTypeLowerCase.startsWith(IPINFO_PREFIX)) { + factoryMethod = getIpinfoLookup(database); + } else { + // for historical reasons, fall back to assuming maxmind-like types + factoryMethod = getMaxmindLookup(database); + } if (factoryMethod == null) { throw new IllegalArgumentException("Unsupported database type [" + databaseType + "] for file [" + databaseFile + "]"); diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookups.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookups.java index efc6734b3bd93..19a98fb1b5746 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookups.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookups.java @@ -23,10 +23,14 @@ import java.io.IOException; import java.net.InetAddress; +import java.util.Arrays; import java.util.HashMap; +import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; +import java.util.function.Function; +import java.util.stream.Collectors; /** * A collection of {@link IpDataLookup} implementations for IPinfo databases @@ -43,6 +47,81 @@ private IpinfoIpDataLookups() { // prefix dispatch and checks case-insensitive, so that works out nicely static final String IPINFO_PREFIX = "ipinfo"; + private static final Set IPINFO_TYPE_STOP_WORDS = Set.of( + "ipinfo", + "extended", + "free", + "generic", + "ip", + "sample", + "standard", + "mmdb" + ); + + /** + * Cleans up the database_type String from an ipinfo database by splitting on punctuation, removing stop words, and then joining + * with an underscore. + *

    + * e.g. "ipinfo free_foo_sample.mmdb" -> "foo" + * + * @param type the database_type from an ipinfo database + * @return a cleaned up database_type string + */ + // n.b. this is just based on observation of the types from a survey of such databases -- it's like browser user agent sniffing, + // there aren't necessarily any amazing guarantees about this behavior + static String ipinfoTypeCleanup(String type) { + List parts = Arrays.asList(type.split("[ _.]")); + return parts.stream().filter((s) -> IPINFO_TYPE_STOP_WORDS.contains(s) == false).collect(Collectors.joining("_")); + } + + @Nullable + static Database getIpinfoDatabase(final String databaseType) { + // for ipinfo the database selection is more along the lines of user-agent sniffing than + // string-based dispatch. the specific database_type strings could change in the future, + // hence the somewhat loose nature of this checking. + + final String cleanedType = ipinfoTypeCleanup(databaseType); + + // early detection on any of the 'extended' types + if (databaseType.contains("extended")) { + // which are not currently supported + logger.trace("returning null for unsupported database_type [{}]", databaseType); + return null; + } + + // early detection on 'country_asn' so the 'country' and 'asn' checks don't get faked out + if (cleanedType.contains("country_asn")) { + // but it's not currently supported + logger.trace("returning null for unsupported database_type [{}]", databaseType); + return null; + } + + if (cleanedType.contains("asn")) { + return Database.AsnV2; + } else if (cleanedType.contains("country")) { + return Database.CountryV2; + } else if (cleanedType.contains("location")) { // note: catches 'location' and 'geolocation' ;) + return Database.CityV2; + } else if (cleanedType.contains("privacy")) { + return Database.PrivacyDetection; + } else { + // no match was found + logger.trace("returning null for unsupported database_type [{}]", databaseType); + return null; + } + } + + @Nullable + static Function, IpDataLookup> getIpinfoLookup(final Database database) { + return switch (database) { + case Database.AsnV2 -> IpinfoIpDataLookups.Asn::new; + case Database.CountryV2 -> IpinfoIpDataLookups.Country::new; + case Database.CityV2 -> IpinfoIpDataLookups.Geolocation::new; + case Database.PrivacyDetection -> IpinfoIpDataLookups.PrivacyDetection::new; + default -> null; + }; + } + /** * Lax-ly parses a string that (ideally) looks like 'AS123' into a Long like 123L (or null, if such parsing isn't possible). * @param asn a potentially empty (or null) ASN string that is expected to contain 'AS' and then a parsable long diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/MaxmindIpDataLookups.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/MaxmindIpDataLookups.java index 4297413073e52..8bc74c0e4aac4 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/MaxmindIpDataLookups.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/MaxmindIpDataLookups.java @@ -26,6 +26,8 @@ import com.maxmind.geoip2.record.Postal; import com.maxmind.geoip2.record.Subdivision; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.core.Nullable; @@ -37,6 +39,7 @@ import java.util.Locale; import java.util.Map; import java.util.Set; +import java.util.function.Function; /** * A collection of {@link IpDataLookup} implementations for MaxMind databases @@ -47,11 +50,63 @@ private MaxmindIpDataLookups() { // utility class } + private static final Logger logger = LogManager.getLogger(MaxmindIpDataLookups.class); + // the actual prefixes from the metadata are cased like the literal strings, but // prefix dispatch and checks case-insensitive, so the actual constants are lowercase static final String GEOIP2_PREFIX = "GeoIP2".toLowerCase(Locale.ROOT); static final String GEOLITE2_PREFIX = "GeoLite2".toLowerCase(Locale.ROOT); + // note: the secondary dispatch on suffix happens to be case sensitive + private static final String CITY_DB_SUFFIX = "-City"; + private static final String COUNTRY_DB_SUFFIX = "-Country"; + private static final String ASN_DB_SUFFIX = "-ASN"; + private static final String ANONYMOUS_IP_DB_SUFFIX = "-Anonymous-IP"; + private static final String CONNECTION_TYPE_DB_SUFFIX = "-Connection-Type"; + private static final String DOMAIN_DB_SUFFIX = "-Domain"; + private static final String ENTERPRISE_DB_SUFFIX = "-Enterprise"; + private static final String ISP_DB_SUFFIX = "-ISP"; + + @Nullable + static Database getMaxmindDatabase(final String databaseType) { + if (databaseType.endsWith(CITY_DB_SUFFIX)) { + return Database.City; + } else if (databaseType.endsWith(COUNTRY_DB_SUFFIX)) { + return Database.Country; + } else if (databaseType.endsWith(ASN_DB_SUFFIX)) { + return Database.Asn; + } else if (databaseType.endsWith(ANONYMOUS_IP_DB_SUFFIX)) { + return Database.AnonymousIp; + } else if (databaseType.endsWith(CONNECTION_TYPE_DB_SUFFIX)) { + return Database.ConnectionType; + } else if (databaseType.endsWith(DOMAIN_DB_SUFFIX)) { + return Database.Domain; + } else if (databaseType.endsWith(ENTERPRISE_DB_SUFFIX)) { + return Database.Enterprise; + } else if (databaseType.endsWith(ISP_DB_SUFFIX)) { + return Database.Isp; + } else { + // no match was found + logger.trace("returning null for unsupported database_type [{}]", databaseType); + return null; + } + } + + @Nullable + static Function, IpDataLookup> getMaxmindLookup(final Database database) { + return switch (database) { + case City -> MaxmindIpDataLookups.City::new; + case Country -> MaxmindIpDataLookups.Country::new; + case Asn -> MaxmindIpDataLookups.Asn::new; + case AnonymousIp -> MaxmindIpDataLookups.AnonymousIp::new; + case ConnectionType -> MaxmindIpDataLookups.ConnectionType::new; + case Domain -> MaxmindIpDataLookups.Domain::new; + case Enterprise -> MaxmindIpDataLookups.Enterprise::new; + case Isp -> MaxmindIpDataLookups.Isp::new; + default -> null; + }; + } + static class AnonymousIp extends AbstractBase { AnonymousIp(final Set properties) { super( diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java index e96bdbd6314b2..640480ed277c5 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java @@ -27,6 +27,7 @@ import static org.elasticsearch.ingest.IngestDocumentMatcher.assertIngestDocument; import static org.elasticsearch.ingest.geoip.GeoIpProcessor.GEOIP_TYPE; +import static org.elasticsearch.ingest.geoip.GeoIpProcessor.IP_LOCATION_TYPE; import static org.elasticsearch.ingest.geoip.GeoIpTestUtils.copyDatabase; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -37,10 +38,6 @@ public class GeoIpProcessorTests extends ESTestCase { - private static IpDataLookup ipDataLookupAll(final Database database) { - return IpDataLookupFactories.getMaxmindLookup(database).apply(database.properties()); - } - // a temporary directory that mmdb files can be copied to and read from private Path tmpDir; @@ -54,6 +51,66 @@ public void cleanup() throws IOException { IOUtils.rm(tmpDir); } + public void testMaxmindCity() throws Exception { + String ip = "2602:306:33d3:8000::3257:9652"; + GeoIpProcessor processor = new GeoIpProcessor( + GEOIP_TYPE, // n.b. this is a "geoip" processor + randomAlphaOfLength(10), + null, + "source_field", + loader("GeoLite2-City.mmdb"), + () -> true, + "target_field", + getMaxmindCityLookup(), + false, + false, + "filename" + ); + + Map document = new HashMap<>(); + document.put("source_field", ip); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); + processor.execute(ingestDocument); + + assertThat(ingestDocument.getSourceAndMetadata().get("source_field"), equalTo(ip)); + @SuppressWarnings("unchecked") + Map data = (Map) ingestDocument.getSourceAndMetadata().get("target_field"); + assertThat(data, notNullValue()); + assertThat(data.get("ip"), equalTo(ip)); + assertThat(data.get("city_name"), equalTo("Homestead")); + // see MaxmindIpDataLookupsTests for more tests of the data lookup behavior + } + + public void testIpinfoGeolocation() throws Exception { + String ip = "13.107.39.238"; + GeoIpProcessor processor = new GeoIpProcessor( + IP_LOCATION_TYPE, // n.b. this is an "ip_location" processor + randomAlphaOfLength(10), + null, + "source_field", + loader("ipinfo/ip_geolocation_sample.mmdb"), + () -> true, + "target_field", + getIpinfoGeolocationLookup(), + false, + false, + "filename" + ); + + Map document = new HashMap<>(); + document.put("source_field", ip); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); + processor.execute(ingestDocument); + + assertThat(ingestDocument.getSourceAndMetadata().get("source_field"), equalTo(ip)); + @SuppressWarnings("unchecked") + Map data = (Map) ingestDocument.getSourceAndMetadata().get("target_field"); + assertThat(data, notNullValue()); + assertThat(data.get("ip"), equalTo(ip)); + assertThat(data.get("city_name"), equalTo("Des Moines")); + // see IpinfoIpDataLookupsTests for more tests of the data lookup behavior + } + public void testNullValueWithIgnoreMissing() throws Exception { GeoIpProcessor processor = new GeoIpProcessor( GEOIP_TYPE, @@ -63,7 +120,7 @@ public void testNullValueWithIgnoreMissing() throws Exception { loader("GeoLite2-City.mmdb"), () -> true, "target_field", - ipDataLookupAll(Database.City), + getMaxmindCityLookup(), true, false, "filename" @@ -86,7 +143,7 @@ public void testNonExistentWithIgnoreMissing() throws Exception { loader("GeoLite2-City.mmdb"), () -> true, "target_field", - ipDataLookupAll(Database.City), + getMaxmindCityLookup(), true, false, "filename" @@ -106,7 +163,7 @@ public void testNullWithoutIgnoreMissing() { loader("GeoLite2-City.mmdb"), () -> true, "target_field", - ipDataLookupAll(Database.City), + getMaxmindCityLookup(), false, false, "filename" @@ -129,7 +186,7 @@ public void testNonExistentWithoutIgnoreMissing() { loader("GeoLite2-City.mmdb"), () -> true, "target_field", - ipDataLookupAll(Database.City), + getMaxmindCityLookup(), false, false, "filename" @@ -149,7 +206,7 @@ public void testAddressIsNotInTheDatabase() throws Exception { loader("GeoLite2-City.mmdb"), () -> true, "target_field", - ipDataLookupAll(Database.City), + getMaxmindCityLookup(), false, false, "filename" @@ -174,7 +231,7 @@ public void testExceptionPropagates() { loader("GeoLite2-City.mmdb"), () -> true, "target_field", - ipDataLookupAll(Database.City), + getMaxmindCityLookup(), false, false, "filename" @@ -196,7 +253,7 @@ public void testListAllValid() throws Exception { loader("GeoLite2-City.mmdb"), () -> true, "target_field", - ipDataLookupAll(Database.City), + getMaxmindCityLookup(), false, false, "filename" @@ -208,11 +265,11 @@ public void testListAllValid() throws Exception { processor.execute(ingestDocument); @SuppressWarnings("unchecked") - List> geoData = (List>) ingestDocument.getSourceAndMetadata().get("target_field"); - assertThat(geoData, notNullValue()); - assertThat(geoData.size(), equalTo(2)); - assertThat(geoData.get(0).get("location"), equalTo(Map.of("lat", 37.751d, "lon", -97.822d))); - assertThat(geoData.get(1).get("city_name"), equalTo("Hoensbroek")); + List> data = (List>) ingestDocument.getSourceAndMetadata().get("target_field"); + assertThat(data, notNullValue()); + assertThat(data.size(), equalTo(2)); + assertThat(data.get(0).get("location"), equalTo(Map.of("lat", 37.751d, "lon", -97.822d))); + assertThat(data.get(1).get("city_name"), equalTo("Hoensbroek")); } public void testListPartiallyValid() throws Exception { @@ -224,7 +281,7 @@ public void testListPartiallyValid() throws Exception { loader("GeoLite2-City.mmdb"), () -> true, "target_field", - ipDataLookupAll(Database.City), + getMaxmindCityLookup(), false, false, "filename" @@ -236,11 +293,11 @@ public void testListPartiallyValid() throws Exception { processor.execute(ingestDocument); @SuppressWarnings("unchecked") - List> geoData = (List>) ingestDocument.getSourceAndMetadata().get("target_field"); - assertThat(geoData, notNullValue()); - assertThat(geoData.size(), equalTo(2)); - assertThat(geoData.get(0).get("location"), equalTo(Map.of("lat", 37.751d, "lon", -97.822d))); - assertThat(geoData.get(1), nullValue()); + List> data = (List>) ingestDocument.getSourceAndMetadata().get("target_field"); + assertThat(data, notNullValue()); + assertThat(data.size(), equalTo(2)); + assertThat(data.get(0).get("location"), equalTo(Map.of("lat", 37.751d, "lon", -97.822d))); + assertThat(data.get(1), nullValue()); } public void testListNoMatches() throws Exception { @@ -252,7 +309,7 @@ public void testListNoMatches() throws Exception { loader("GeoLite2-City.mmdb"), () -> true, "target_field", - ipDataLookupAll(Database.City), + getMaxmindCityLookup(), false, false, "filename" @@ -272,7 +329,7 @@ public void testListDatabaseReferenceCounting() throws Exception { GeoIpProcessor processor = new GeoIpProcessor(GEOIP_TYPE, randomAlphaOfLength(10), null, "source_field", () -> { loader.preLookup(); return loader; - }, () -> true, "target_field", ipDataLookupAll(Database.City), false, false, "filename"); + }, () -> true, "target_field", getMaxmindCityLookup(), false, false, "filename"); Map document = new HashMap<>(); document.put("source_field", List.of("8.8.8.8", "82.171.64.0")); @@ -280,11 +337,11 @@ public void testListDatabaseReferenceCounting() throws Exception { processor.execute(ingestDocument); @SuppressWarnings("unchecked") - List> geoData = (List>) ingestDocument.getSourceAndMetadata().get("target_field"); - assertThat(geoData, notNullValue()); - assertThat(geoData.size(), equalTo(2)); - assertThat(geoData.get(0).get("location"), equalTo(Map.of("lat", 37.751d, "lon", -97.822d))); - assertThat(geoData.get(1).get("city_name"), equalTo("Hoensbroek")); + List> data = (List>) ingestDocument.getSourceAndMetadata().get("target_field"); + assertThat(data, notNullValue()); + assertThat(data.size(), equalTo(2)); + assertThat(data.get(0).get("location"), equalTo(Map.of("lat", 37.751d, "lon", -97.822d))); + assertThat(data.get(1).get("city_name"), equalTo("Hoensbroek")); // Check the loader's reference count and attempt to close assertThat(loader.current(), equalTo(0)); @@ -301,7 +358,7 @@ public void testListFirstOnly() throws Exception { loader("GeoLite2-City.mmdb"), () -> true, "target_field", - ipDataLookupAll(Database.City), + getMaxmindCityLookup(), false, true, "filename" @@ -313,9 +370,9 @@ public void testListFirstOnly() throws Exception { processor.execute(ingestDocument); @SuppressWarnings("unchecked") - Map geoData = (Map) ingestDocument.getSourceAndMetadata().get("target_field"); - assertThat(geoData, notNullValue()); - assertThat(geoData.get("location"), equalTo(Map.of("lat", 37.751d, "lon", -97.822d))); + Map data = (Map) ingestDocument.getSourceAndMetadata().get("target_field"); + assertThat(data, notNullValue()); + assertThat(data.get("location"), equalTo(Map.of("lat", 37.751d, "lon", -97.822d))); } public void testListFirstOnlyNoMatches() throws Exception { @@ -327,7 +384,7 @@ public void testListFirstOnlyNoMatches() throws Exception { loader("GeoLite2-City.mmdb"), () -> true, "target_field", - ipDataLookupAll(Database.City), + getMaxmindCityLookup(), false, true, "filename" @@ -350,7 +407,7 @@ public void testInvalidDatabase() throws Exception { loader("GeoLite2-City.mmdb"), () -> false, "target_field", - ipDataLookupAll(Database.City), + getMaxmindCityLookup(), false, true, "filename" @@ -374,7 +431,7 @@ public void testNoDatabase() throws Exception { () -> null, () -> true, "target_field", - ipDataLookupAll(Database.City), + getMaxmindCityLookup(), false, false, "GeoLite2-City" @@ -398,7 +455,7 @@ public void testNoDatabase_ignoreMissing() throws Exception { () -> null, () -> true, "target_field", - ipDataLookupAll(Database.City), + getMaxmindCityLookup(), true, false, "GeoLite2-City" @@ -412,13 +469,24 @@ public void testNoDatabase_ignoreMissing() throws Exception { assertIngestDocument(originalIngestDocument, ingestDocument); } + private static IpDataLookup getMaxmindCityLookup() { + final var database = Database.City; + return MaxmindIpDataLookups.getMaxmindLookup(database).apply(database.properties()); + } + + private static IpDataLookup getIpinfoGeolocationLookup() { + final var database = Database.CityV2; + return IpinfoIpDataLookups.getIpinfoLookup(database).apply(database.properties()); + } + private CheckedSupplier loader(final String path) { var loader = loader(path, null); return () -> loader; } private DatabaseReaderLazyLoader loader(final String databaseName, final AtomicBoolean closed) { - Path path = tmpDir.resolve(databaseName); + int last = databaseName.lastIndexOf("/"); + final Path path = tmpDir.resolve(last == -1 ? databaseName : databaseName.substring(last + 1)); copyDatabase(databaseName, path); final GeoIpCache cache = new GeoIpCache(1000); diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookupsTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookupsTests.java index 4167170567f52..e998748efbcad 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookupsTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookupsTests.java @@ -31,12 +31,14 @@ import static java.util.Map.entry; import static org.elasticsearch.ingest.geoip.GeoIpTestUtils.copyDatabase; +import static org.elasticsearch.ingest.geoip.IpinfoIpDataLookups.ipinfoTypeCleanup; import static org.elasticsearch.ingest.geoip.IpinfoIpDataLookups.parseAsn; import static org.elasticsearch.ingest.geoip.IpinfoIpDataLookups.parseBoolean; import static org.elasticsearch.ingest.geoip.IpinfoIpDataLookups.parseLocationDouble; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.startsWith; @@ -308,6 +310,107 @@ public void testPrivacyDetectionInvariants() { } } + public void testIpinfoTypeCleanup() { + Map typesToCleanedTypes = Map.ofEntries( + // database_type strings from upstream: + // abuse.mmdb + entry("ipinfo standard_abuse_mmdb_v4.mmdb", "abuse_v4"), + // asn.mmdb + entry("ipinfo generic_asn_mmdb_v4.mmdb", "asn_v4"), + // carrier.mmdb + entry("ipinfo standard_carrier_mmdb.mmdb", "carrier"), + // location_extended_v2.mmdb + entry("ipinfo extended_location_v2.mmdb", "location_v2"), + // privacy_extended_v2.mmdb + entry("ipinfo extended_privacy_v2.mmdb", "privacy_v2"), + // standard_company.mmdb + entry("ipinfo standard_company.mmdb", "company"), + // standard_ip_hosted_domains_sample.mmdb + entry("ipinfo standard_ip_hosted_domains_sample.mmdb", "hosted_domains"), + // standard_location.mmdb + entry("ipinfo standard_location_mmdb_v4.mmdb", "location_v4"), + // standard_privacy.mmdb + entry("ipinfo standard_privacy.mmdb", "privacy"), + + // database_type strings from test files: + // ip_asn_sample.mmdb + entry("ipinfo ip_asn_sample.mmdb", "asn"), + // ip_country_asn_sample.mmdb + entry("ipinfo ip_country_asn_sample.mmdb", "country_asn"), + // ip_geolocation_sample.mmdb + entry("ipinfo ip_geolocation_sample.mmdb", "geolocation"), + // abuse_contact_sample.mmdb + entry("ipinfo abuse_contact_sample.mmdb", "abuse_contact"), + // asn_sample.mmdb + entry("ipinfo asn_sample.mmdb", "asn"), + // hosted_domains_sample.mmdb + entry("ipinfo hosted_domains_sample.mmdb", "hosted_domains"), + // ip_carrier_sample.mmdb + entry("ipinfo ip_carrier_sample.mmdb", "carrier"), + // ip_company_sample.mmdb + entry("ipinfo ip_company_sample.mmdb", "company"), + // ip_country_sample.mmdb + entry("ipinfo ip_country_sample.mmdb", "country"), + // ip_geolocation_extended_ipv4_sample.mmdb + entry("ipinfo ip_geolocation_extended_ipv4_sample.mmdb", "geolocation_ipv4"), + // ip_geolocation_extended_ipv6_sample.mmdb + entry("ipinfo ip_geolocation_extended_ipv6_sample.mmdb", "geolocation_ipv6"), + // ip_geolocation_extended_sample.mmdb + entry("ipinfo ip_geolocation_extended_sample.mmdb", "geolocation"), + // ip_rdns_domains_sample.mmdb + entry("ipinfo ip_rdns_domains_sample.mmdb", "rdns_domains"), + // ip_rdns_hostnames_sample.mmdb + entry("ipinfo ip_rdns_hostnames_sample.mmdb", "rdns_hostnames"), + // privacy_detection_extended_sample.mmdb + entry("ipinfo privacy_detection_extended_sample.mmdb", "privacy_detection"), + // privacy_detection_sample.mmdb + entry("ipinfo privacy_detection_sample.mmdb", "privacy_detection"), + + // database_type strings from downloaded (free) files: + // asn.mmdb + entry("ipinfo generic_asn_free.mmdb", "asn"), + // country.mmdb + entry("ipinfo generic_country_free.mmdb", "country"), + // country_asn.mmdb + entry("ipinfo generic_country_free_country_asn.mmdb", "country_country_asn") + ); + + for (var entry : typesToCleanedTypes.entrySet()) { + String type = entry.getKey(); + String cleanedType = entry.getValue(); + assertThat(ipinfoTypeCleanup(type), equalTo(cleanedType)); + } + } + + public void testDatabaseTypeParsing() throws IOException { + // this test is a little bit overloaded -- it's testing that we're getting the expected sorts of + // database_type strings from these files, *and* it's also testing that we dispatch on those strings + // correctly and associated those files with the correct high-level Elasticsearch Database type. + // down the road it would probably make sense to split these out and find a better home for some of the + // logic, but for now it's probably more valuable to have the test *somewhere* than to get especially + // pedantic about where precisely it should be. + + copyDatabase("ipinfo/ip_asn_sample.mmdb", tmpDir.resolve("ip_asn_sample.mmdb")); + copyDatabase("ipinfo/ip_geolocation_sample.mmdb", tmpDir.resolve("ip_geolocation_sample.mmdb")); + copyDatabase("ipinfo/asn_sample.mmdb", tmpDir.resolve("asn_sample.mmdb")); + copyDatabase("ipinfo/ip_country_sample.mmdb", tmpDir.resolve("ip_country_sample.mmdb")); + copyDatabase("ipinfo/privacy_detection_sample.mmdb", tmpDir.resolve("privacy_detection_sample.mmdb")); + + assertThat(parseDatabaseFromType("ip_asn_sample.mmdb"), is(Database.AsnV2)); + assertThat(parseDatabaseFromType("ip_geolocation_sample.mmdb"), is(Database.CityV2)); + assertThat(parseDatabaseFromType("asn_sample.mmdb"), is(Database.AsnV2)); + assertThat(parseDatabaseFromType("ip_country_sample.mmdb"), is(Database.CountryV2)); + assertThat(parseDatabaseFromType("privacy_detection_sample.mmdb"), is(Database.PrivacyDetection)); + + // additional cases where we're bailing early on types we don't support + assertThat(IpDataLookupFactories.getDatabase("ipinfo ip_country_asn_sample.mmdb"), nullValue()); + assertThat(IpDataLookupFactories.getDatabase("ipinfo privacy_detection_extended_sample.mmdb"), nullValue()); + } + + private Database parseDatabaseFromType(String databaseFile) throws IOException { + return IpDataLookupFactories.getDatabase(MMDBUtil.getDatabaseType(tmpDir.resolve(databaseFile))); + } + private static void assertDatabaseInvariants(final Path databasePath, final BiConsumer> rowConsumer) { try (Reader reader = new Reader(pathToFile(databasePath))) { Networks networks = reader.networks(Map.class); From 5efba5b43d79c15d8d7304d32e4f95a741134ffb Mon Sep 17 00:00:00 2001 From: David Kyle Date: Tue, 15 Oct 2024 00:05:40 +0100 Subject: [PATCH 081/449] [ML] Default inference endpoint for the multilingual-e5-small model (#114683) --- docs/changelog/114683.yaml | 5 ++ docs/reference/rest-api/usage.asciidoc | 7 ++- ...ltElserIT.java => DefaultEndPointsIT.java} | 41 +++++++++++++++-- .../xpack/inference/InferenceCrudIT.java | 5 +- .../BaseElasticsearchInternalService.java | 9 +++- .../ElasticsearchInternalService.java | 46 ++++++++----------- .../ElasticsearchInternalServiceTests.java | 7 +++ ...portStartTrainedModelDeploymentAction.java | 4 +- 8 files changed, 88 insertions(+), 36 deletions(-) create mode 100644 docs/changelog/114683.yaml rename x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/{DefaultElserIT.java => DefaultEndPointsIT.java} (57%) diff --git a/docs/changelog/114683.yaml b/docs/changelog/114683.yaml new file mode 100644 index 0000000000000..a677e65a12b0e --- /dev/null +++ b/docs/changelog/114683.yaml @@ -0,0 +1,5 @@ +pr: 114683 +summary: Default inference endpoint for the multilingual-e5-small model +area: Machine Learning +type: enhancement +issues: [] diff --git a/docs/reference/rest-api/usage.asciidoc b/docs/reference/rest-api/usage.asciidoc index 957f57ffc9105..5fd2304ff9378 100644 --- a/docs/reference/rest-api/usage.asciidoc +++ b/docs/reference/rest-api/usage.asciidoc @@ -210,7 +210,12 @@ GET /_xpack/usage "service": "elasticsearch", "task_type": "SPARSE_EMBEDDING", "count": 1 - } + }, + { + "service": "elasticsearch", + "task_type": "TEXT_EMBEDDING", + "count": 1 + }, ] }, "logstash" : { diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/DefaultElserIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/DefaultEndPointsIT.java similarity index 57% rename from x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/DefaultElserIT.java rename to x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/DefaultEndPointsIT.java index 5d84aad4b7344..083bad2c91613 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/DefaultElserIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/DefaultEndPointsIT.java @@ -22,13 +22,13 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.oneOf; -public class DefaultElserIT extends InferenceBaseRestTest { +public class DefaultEndPointsIT extends InferenceBaseRestTest { private TestThreadPool threadPool; @Before public void createThreadPool() { - threadPool = new TestThreadPool(DefaultElserIT.class.getSimpleName()); + threadPool = new TestThreadPool(DefaultEndPointsIT.class.getSimpleName()); } @After @@ -38,7 +38,7 @@ public void tearDown() throws Exception { } @SuppressWarnings("unchecked") - public void testInferCreatesDefaultElser() throws IOException { + public void testInferDeploysDefaultElser() throws IOException { assumeTrue("Default config requires a feature flag", DefaultElserFeatureFlag.isEnabled()); var model = getModel(ElasticsearchInternalService.DEFAULT_ELSER_ID); assertDefaultElserConfig(model); @@ -67,4 +67,39 @@ private static void assertDefaultElserConfig(Map modelConfig) { Matchers.is(Map.of("enabled", true, "min_number_of_allocations", 1, "max_number_of_allocations", 8)) ); } + + @SuppressWarnings("unchecked") + public void testInferDeploysDefaultE5() throws IOException { + assumeTrue("Default config requires a feature flag", DefaultElserFeatureFlag.isEnabled()); + var model = getModel(ElasticsearchInternalService.DEFAULT_E5_ID); + assertDefaultE5Config(model); + + var inputs = List.of("Hello World", "Goodnight moon"); + var queryParams = Map.of("timeout", "120s"); + var results = infer(ElasticsearchInternalService.DEFAULT_E5_ID, TaskType.TEXT_EMBEDDING, inputs, queryParams); + var embeddings = (List>) results.get("text_embedding"); + assertThat(results.toString(), embeddings, hasSize(2)); + } + + @SuppressWarnings("unchecked") + private static void assertDefaultE5Config(Map modelConfig) { + assertEquals(modelConfig.toString(), ElasticsearchInternalService.DEFAULT_E5_ID, modelConfig.get("inference_id")); + assertEquals(modelConfig.toString(), ElasticsearchInternalService.NAME, modelConfig.get("service")); + assertEquals(modelConfig.toString(), TaskType.TEXT_EMBEDDING.toString(), modelConfig.get("task_type")); + + var serviceSettings = (Map) modelConfig.get("service_settings"); + assertThat( + modelConfig.toString(), + serviceSettings.get("model_id"), + is(oneOf(".multilingual-e5-small", ".multilingual-e5-small_linux-x86_64")) + ); + assertEquals(modelConfig.toString(), 1, serviceSettings.get("num_threads")); + + var adaptiveAllocations = (Map) serviceSettings.get("adaptive_allocations"); + assertThat( + modelConfig.toString(), + adaptiveAllocations, + Matchers.is(Map.of("enabled", true, "min_number_of_allocations", 1, "max_number_of_allocations", 8)) + ); + } } diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java index 98c8d43707219..cbc50c361e3b5 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java @@ -40,7 +40,7 @@ public void testCRUD() throws IOException { } var getAllModels = getAllModels(); - int numModels = DefaultElserFeatureFlag.isEnabled() ? 10 : 9; + int numModels = DefaultElserFeatureFlag.isEnabled() ? 11 : 9; assertThat(getAllModels, hasSize(numModels)); var getSparseModels = getModels("_all", TaskType.SPARSE_EMBEDDING); @@ -51,7 +51,8 @@ public void testCRUD() throws IOException { } var getDenseModels = getModels("_all", TaskType.TEXT_EMBEDDING); - assertThat(getDenseModels, hasSize(4)); + int numDenseModels = DefaultElserFeatureFlag.isEnabled() ? 5 : 4; + assertThat(getDenseModels, hasSize(numDenseModels)); for (var denseModel : getDenseModels) { assertEquals("text_embedding", denseModel.get("task_type")); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/BaseElasticsearchInternalService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/BaseElasticsearchInternalService.java index 43d3dff8756fa..98777e9722242 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/BaseElasticsearchInternalService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/BaseElasticsearchInternalService.java @@ -239,7 +239,12 @@ private void preferredVariantFromPlatformArchitecture(ActionListener defaultConfigIds() { - return List.of(new DefaultConfigId(DEFAULT_ELSER_ID, TaskType.SPARSE_EMBEDDING, this)); + return List.of( + new DefaultConfigId(DEFAULT_ELSER_ID, TaskType.SPARSE_EMBEDDING, this), + new DefaultConfigId(DEFAULT_E5_ID, TaskType.TEXT_EMBEDDING, this) + ); } @Override @@ -876,13 +859,24 @@ private List defaultConfigs(boolean useLinuxOptimizedModel) { ElserMlNodeTaskSettings.DEFAULT, null // default chunking settings ); - - return List.of(defaultElser); + var defaultE5 = new MultilingualE5SmallModel( + DEFAULT_E5_ID, + TaskType.TEXT_EMBEDDING, + NAME, + new MultilingualE5SmallInternalServiceSettings( + null, + 1, + useLinuxOptimizedModel ? MULTILINGUAL_E5_SMALL_MODEL_ID_LINUX_X86 : MULTILINGUAL_E5_SMALL_MODEL_ID, + new AdaptiveAllocationsSettings(Boolean.TRUE, 1, 8) + ), + null // default chunking settings + ); + return List.of(defaultElser, defaultE5); } @Override - protected boolean isDefaultId(String inferenceId) { - return DEFAULT_ELSER_ID.equals(inferenceId); + boolean isDefaultId(String inferenceId) { + return DEFAULT_ELSER_ID.equals(inferenceId) || DEFAULT_E5_ID.equals(inferenceId); } static EmbeddingRequestChunker.EmbeddingType embeddingTypeFromTaskTypeAndSettings( diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java index d4462c021dcac..860642a23fb2c 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java @@ -1551,6 +1551,13 @@ public void testEmbeddingTypeFromTaskTypeAndSettings() { assertThat(e.getMessage(), containsString("Chunking is not supported for task type [completion]")); } + public void testIsDefaultId() { + var service = createService(mock(Client.class)); + assertTrue(service.isDefaultId(".elser-2")); + assertTrue(service.isDefaultId(".multi-e5-small")); + assertFalse(service.isDefaultId("foo")); + } + private ElasticsearchInternalService createService(Client client) { var cs = mock(ClusterService.class); var cSettings = new ClusterSettings(Settings.EMPTY, Set.of(MachineLearningField.MAX_LAZY_ML_NODES)); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartTrainedModelDeploymentAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartTrainedModelDeploymentAction.java index e130b13f4ec30..0bda2de2ce9ae 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartTrainedModelDeploymentAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartTrainedModelDeploymentAction.java @@ -234,9 +234,9 @@ protected void masterOperation( if (getModelResponse.getResources().results().size() > 1) { listener.onFailure( ExceptionsHelper.badRequestException( - "cannot deploy more than one models at the same time; [{}] matches [{}] models]", + "cannot deploy more than one model at the same time; [{}] matches models [{}]", request.getModelId(), - getModelResponse.getResources().results().size() + getModelResponse.getResources().results().stream().map(TrainedModelConfig::getModelId).toList() ) ); return; From 209ee0c361c670a6309ec1e940915f8ac536ab69 Mon Sep 17 00:00:00 2001 From: Pat Whelan Date: Mon, 14 Oct 2024 19:15:32 -0400 Subject: [PATCH 082/449] [ML] Stream Bedrock Completion (#114732) Notes: - Adds a new API to the chatCompletionRequest to invoke the Bedrock Stream API - Create a StreamingChatProcessor that subscribes to streaming results from bedrock and handles the parsing on another thread. - There was no good way (that I could see) to extend the Provider-based CompletionRequestEntity, so they have been flattened into one RequestEntity that can be shared between ConverseRequest and ConverseStreamRequest. --- docs/changelog/114732.yaml | 5 + .../inference/src/main/java/module-info.java | 1 + .../AmazonBedrockChatCompletionExecutor.java | 18 +- .../amazonbedrock/AmazonBedrockClient.java | 5 + .../AmazonBedrockInferenceClient.java | 24 +- .../AmazonBedrockInferenceClientCache.java | 7 +- .../AmazonBedrockRequestSender.java | 6 +- .../AmazonBedrockStreamingChatProcessor.java | 156 +++++++++++ ...onBedrockChatCompletionRequestManager.java | 7 +- ...edrockAI21LabsCompletionRequestEntity.java | 60 ----- ...drockAnthropicCompletionRequestEntity.java | 67 ----- ...zonBedrockChatCompletionEntityFactory.java | 48 +--- .../AmazonBedrockChatCompletionRequest.java | 43 ++- ...nBedrockCohereCompletionRequestEntity.java | 67 ----- .../AmazonBedrockConverseRequestEntity.java | 23 +- .../AmazonBedrockConverseUtils.java | 33 +++ ...zonBedrockMetaCompletionRequestEntity.java | 60 ----- ...BedrockMistralCompletionRequestEntity.java | 67 ----- ...onBedrockTitanCompletionRequestEntity.java | 60 ----- .../amazonbedrock/AmazonBedrockService.java | 6 + .../AmazonBedrockExecutorTests.java | 10 +- ...mazonBedrockInferenceClientCacheTests.java | 2 +- .../AmazonBedrockMockInferenceClient.java | 12 +- ...zonBedrockStreamingChatProcessorTests.java | 251 ++++++++++++++++++ ...kAI21LabsCompletionRequestEntityTests.java | 70 ----- ...AnthropicCompletionRequestEntityTests.java | 82 ------ ...drockChatCompletionEntityFactoryTests.java | 102 +++++++ ...ockCohereCompletionRequestEntityTests.java | 82 ------ .../AmazonBedrockConverseRequestUtils.java | 12 +- ...drockMetaCompletionRequestEntityTests.java | 70 ----- ...ckMistralCompletionRequestEntityTests.java | 82 ------ ...rockTitanCompletionRequestEntityTests.java | 70 ----- .../AmazonBedrockServiceTests.java | 14 +- 33 files changed, 702 insertions(+), 920 deletions(-) create mode 100644 docs/changelog/114732.yaml create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockStreamingChatProcessor.java delete mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockAI21LabsCompletionRequestEntity.java delete mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockAnthropicCompletionRequestEntity.java delete mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockCohereCompletionRequestEntity.java delete mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockMetaCompletionRequestEntity.java delete mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockMistralCompletionRequestEntity.java delete mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockTitanCompletionRequestEntity.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockStreamingChatProcessorTests.java delete mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockAI21LabsCompletionRequestEntityTests.java delete mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockAnthropicCompletionRequestEntityTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockChatCompletionEntityFactoryTests.java delete mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockCohereCompletionRequestEntityTests.java delete mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockMetaCompletionRequestEntityTests.java delete mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockMistralCompletionRequestEntityTests.java delete mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockTitanCompletionRequestEntityTests.java diff --git a/docs/changelog/114732.yaml b/docs/changelog/114732.yaml new file mode 100644 index 0000000000000..42176cdbda443 --- /dev/null +++ b/docs/changelog/114732.yaml @@ -0,0 +1,5 @@ +pr: 114732 +summary: Stream Bedrock Completion +area: Machine Learning +type: enhancement +issues: [] diff --git a/x-pack/plugin/inference/src/main/java/module-info.java b/x-pack/plugin/inference/src/main/java/module-info.java index 53cb6ac154ced..60cb254e0afbe 100644 --- a/x-pack/plugin/inference/src/main/java/module-info.java +++ b/x-pack/plugin/inference/src/main/java/module-info.java @@ -32,6 +32,7 @@ requires software.amazon.awssdk.profiles; requires org.slf4j; requires software.amazon.awssdk.retries.api; + requires org.reactivestreams; exports org.elasticsearch.xpack.inference.action; exports org.elasticsearch.xpack.inference.registry; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockChatCompletionExecutor.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockChatCompletionExecutor.java index a4e0c399517c1..2afa91d4dc776 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockChatCompletionExecutor.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockChatCompletionExecutor.java @@ -10,6 +10,7 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.xpack.core.inference.results.StreamingChatCompletionResults; import org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockChatCompletionRequest; import org.elasticsearch.xpack.inference.external.response.amazonbedrock.AmazonBedrockResponseHandler; import org.elasticsearch.xpack.inference.external.response.amazonbedrock.completion.AmazonBedrockChatCompletionResponseListener; @@ -33,11 +34,16 @@ protected AmazonBedrockChatCompletionExecutor( @Override protected void executeClientRequest(AmazonBedrockBaseClient awsBedrockClient) { - var chatCompletionResponseListener = new AmazonBedrockChatCompletionResponseListener( - chatCompletionRequest, - responseHandler, - inferenceResultsListener - ); - chatCompletionRequest.executeChatCompletionRequest(awsBedrockClient, chatCompletionResponseListener); + if (chatCompletionRequest.isStreaming()) { + var publisher = chatCompletionRequest.executeStreamChatCompletionRequest(awsBedrockClient); + inferenceResultsListener.onResponse(new StreamingChatCompletionResults(publisher)); + } else { + var chatCompletionResponseListener = new AmazonBedrockChatCompletionResponseListener( + chatCompletionRequest, + responseHandler, + inferenceResultsListener + ); + chatCompletionRequest.executeChatCompletionRequest(awsBedrockClient, chatCompletionResponseListener); + } } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockClient.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockClient.java index 23b6884ddc33a..f1cfc84643b1c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockClient.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockClient.java @@ -9,17 +9,22 @@ import software.amazon.awssdk.services.bedrockruntime.model.ConverseRequest; import software.amazon.awssdk.services.bedrockruntime.model.ConverseResponse; +import software.amazon.awssdk.services.bedrockruntime.model.ConverseStreamRequest; import software.amazon.awssdk.services.bedrockruntime.model.InvokeModelRequest; import software.amazon.awssdk.services.bedrockruntime.model.InvokeModelResponse; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.common.xcontent.ChunkedToXContent; import java.time.Instant; +import java.util.concurrent.Flow; public interface AmazonBedrockClient { void converse(ConverseRequest converseRequest, ActionListener responseListener) throws ElasticsearchException; + Flow.Publisher converseStream(ConverseStreamRequest converseStreamRequest) throws ElasticsearchException; + void invokeModel(InvokeModelRequest invokeModelRequest, ActionListener responseListener) throws ElasticsearchException; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockInferenceClient.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockInferenceClient.java index b1486f4995b84..040aa99d81346 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockInferenceClient.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockInferenceClient.java @@ -17,16 +17,21 @@ import software.amazon.awssdk.services.bedrockruntime.model.BedrockRuntimeException; import software.amazon.awssdk.services.bedrockruntime.model.ConverseRequest; import software.amazon.awssdk.services.bedrockruntime.model.ConverseResponse; +import software.amazon.awssdk.services.bedrockruntime.model.ConverseStreamRequest; +import software.amazon.awssdk.services.bedrockruntime.model.ConverseStreamResponseHandler; import software.amazon.awssdk.services.bedrockruntime.model.InvokeModelRequest; import software.amazon.awssdk.services.bedrockruntime.model.InvokeModelResponse; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.SpecialPermission; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Strings; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockModel; +import org.reactivestreams.FlowAdapters; import org.slf4j.LoggerFactory; import java.security.AccessController; @@ -36,6 +41,7 @@ import java.util.Objects; import java.util.concurrent.CompletionException; import java.util.concurrent.ExecutionException; +import java.util.concurrent.Flow; /** * Not marking this as "final" so we can subclass it for mocking @@ -53,19 +59,21 @@ public class AmazonBedrockInferenceClient extends AmazonBedrockBaseClient { private static final Duration DEFAULT_CLIENT_TIMEOUT_MS = Duration.ofMillis(10000); private final BedrockRuntimeAsyncClient internalClient; + private final ThreadPool threadPool; private volatile Instant expiryTimestamp; - public static AmazonBedrockBaseClient create(AmazonBedrockModel model, @Nullable TimeValue timeout) { + public static AmazonBedrockBaseClient create(AmazonBedrockModel model, @Nullable TimeValue timeout, ThreadPool threadPool) { try { - return new AmazonBedrockInferenceClient(model, timeout); + return new AmazonBedrockInferenceClient(model, timeout, threadPool); } catch (Exception e) { throw new ElasticsearchException("Failed to create Amazon Bedrock Client", e); } } - protected AmazonBedrockInferenceClient(AmazonBedrockModel model, @Nullable TimeValue timeout) { + protected AmazonBedrockInferenceClient(AmazonBedrockModel model, @Nullable TimeValue timeout, ThreadPool threadPool) { super(model, timeout); this.internalClient = createAmazonBedrockClient(model, timeout); + this.threadPool = Objects.requireNonNull(threadPool); setExpiryTimestamp(); } @@ -79,6 +87,16 @@ public void converse(ConverseRequest converseRequest, ActionListener converseStream(ConverseStreamRequest request) throws ElasticsearchException { + var awsResponseProcessor = new AmazonBedrockStreamingChatProcessor(threadPool); + internalClient.converseStream( + request, + ConverseStreamResponseHandler.builder().subscriber(() -> FlowAdapters.toSubscriber(awsResponseProcessor)).build() + ); + return awsResponseProcessor; + } + private void onFailure(ActionListener listener, Throwable t, String method) { var unwrappedException = t; if (t instanceof CompletionException || t instanceof ExecutionException) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockInferenceClientCache.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockInferenceClientCache.java index 21e5cfaf211e5..339673e1302ac 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockInferenceClientCache.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockInferenceClientCache.java @@ -29,12 +29,9 @@ public final class AmazonBedrockInferenceClientCache implements AmazonBedrockCli // not final for testing private Clock clock; - public AmazonBedrockInferenceClientCache( - BiFunction creator, - @Nullable Clock clock - ) { + public AmazonBedrockInferenceClientCache(BiFunction creator, Clock clock) { this.creator = Objects.requireNonNull(creator); - this.clock = Objects.requireNonNullElse(clock, Clock.systemUTC()); + this.clock = Objects.requireNonNull(clock); } public AmazonBedrockBaseClient getOrCreateClient(AmazonBedrockModel model, @Nullable TimeValue timeout) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockRequestSender.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockRequestSender.java index e23b0274ede26..a8d85d896d684 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockRequestSender.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockRequestSender.java @@ -23,6 +23,7 @@ import org.elasticsearch.xpack.inference.services.ServiceComponents; import java.io.IOException; +import java.time.Clock; import java.util.Objects; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; @@ -42,7 +43,10 @@ public Factory(ServiceComponents serviceComponents, ClusterService clusterServic } public Sender createSender() { - var clientCache = new AmazonBedrockInferenceClientCache(AmazonBedrockInferenceClient::create, null); + var clientCache = new AmazonBedrockInferenceClientCache( + (model, timeout) -> AmazonBedrockInferenceClient.create(model, timeout, serviceComponents.threadPool()), + Clock.systemUTC() + ); return createSender(new AmazonBedrockExecuteOnlyRequestSender(clientCache, serviceComponents.throttlerManager())); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockStreamingChatProcessor.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockStreamingChatProcessor.java new file mode 100644 index 0000000000000..439fc5b65efd5 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockStreamingChatProcessor.java @@ -0,0 +1,156 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.amazonbedrock; + +import software.amazon.awssdk.services.bedrockruntime.model.ContentBlockDeltaEvent; +import software.amazon.awssdk.services.bedrockruntime.model.ConverseStreamOutput; +import software.amazon.awssdk.services.bedrockruntime.model.ConverseStreamResponseHandler; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.core.Strings; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.inference.results.StreamingChatCompletionResults; + +import java.util.ArrayDeque; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.Flow; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.atomic.AtomicReference; + +import static org.elasticsearch.xpack.inference.InferencePlugin.UTILITY_THREAD_POOL_NAME; + +class AmazonBedrockStreamingChatProcessor implements Flow.Processor { + private final AtomicReference error = new AtomicReference<>(null); + private final AtomicLong demand = new AtomicLong(0); + private final AtomicBoolean isDone = new AtomicBoolean(false); + private final AtomicBoolean onCompleteCalled = new AtomicBoolean(false); + private final AtomicBoolean onErrorCalled = new AtomicBoolean(false); + private final ThreadPool threadPool; + private volatile Flow.Subscriber downstream; + private volatile Flow.Subscription upstream; + + AmazonBedrockStreamingChatProcessor(ThreadPool threadPool) { + this.threadPool = threadPool; + } + + @Override + public void subscribe(Flow.Subscriber subscriber) { + if (downstream == null) { + downstream = subscriber; + downstream.onSubscribe(new StreamSubscription()); + } else { + subscriber.onError(new IllegalStateException("Subscriber already set.")); + } + } + + @Override + public void onSubscribe(Flow.Subscription subscription) { + if (upstream == null) { + upstream = subscription; + var currentRequestCount = demand.getAndUpdate(i -> 0); + if (currentRequestCount > 0) { + upstream.request(currentRequestCount); + } + } else { + subscription.cancel(); + } + } + + @Override + public void onNext(ConverseStreamOutput item) { + if (item.sdkEventType() == ConverseStreamOutput.EventType.CONTENT_BLOCK_DELTA) { + demand.set(0); // reset demand before we fork to another thread + item.accept(ConverseStreamResponseHandler.Visitor.builder().onContentBlockDelta(this::sendDownstreamOnAnotherThread).build()); + } else { + upstream.request(1); + } + } + + // this is always called from a netty thread maintained by the AWS SDK, we'll move it to our thread to process the response + private void sendDownstreamOnAnotherThread(ContentBlockDeltaEvent event) { + CompletableFuture.runAsync(() -> { + var text = event.delta().text(); + var result = new ArrayDeque(1); + result.offer(new StreamingChatCompletionResults.Result(text)); + var results = new StreamingChatCompletionResults.Results(result); + downstream.onNext(results); + }, threadPool.executor(UTILITY_THREAD_POOL_NAME)); + } + + @Override + public void onError(Throwable amazonBedrockRuntimeException) { + error.set( + new ElasticsearchException( + Strings.format("AmazonBedrock StreamingChatProcessor failure: [%s]", amazonBedrockRuntimeException.getMessage()), + amazonBedrockRuntimeException + ) + ); + if (isDone.compareAndSet(false, true) && checkAndResetDemand() && onErrorCalled.compareAndSet(false, true)) { + downstream.onError(error.get()); + } + } + + private boolean checkAndResetDemand() { + return demand.getAndUpdate(i -> 0L) > 0L; + } + + @Override + public void onComplete() { + if (isDone.compareAndSet(false, true) && checkAndResetDemand() && onCompleteCalled.compareAndSet(false, true)) { + downstream.onComplete(); + } + } + + private class StreamSubscription implements Flow.Subscription { + @Override + public void request(long n) { + if (n > 0L) { + demand.updateAndGet(i -> { + var sum = i + n; + return sum >= 0 ? sum : Long.MAX_VALUE; + }); + if (upstream == null) { + // wait for upstream to subscribe before forwarding request + return; + } + if (upstreamIsRunning()) { + requestOnMlThread(n); + } else if (error.get() != null && onErrorCalled.compareAndSet(false, true)) { + downstream.onError(error.get()); + } else if (onCompleteCalled.compareAndSet(false, true)) { + downstream.onComplete(); + } + } else { + cancel(); + downstream.onError(new IllegalStateException("Cannot request a negative number.")); + } + } + + private boolean upstreamIsRunning() { + return isDone.get() == false && error.get() == null; + } + + private void requestOnMlThread(long n) { + var currentThreadPool = EsExecutors.executorName(Thread.currentThread().getName()); + if (UTILITY_THREAD_POOL_NAME.equalsIgnoreCase(currentThreadPool)) { + upstream.request(n); + } else { + CompletableFuture.runAsync(() -> upstream.request(n), threadPool.executor(UTILITY_THREAD_POOL_NAME)); + } + } + + @Override + public void cancel() { + if (upstream != null && upstreamIsRunning()) { + upstream.cancel(); + } + } + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AmazonBedrockChatCompletionRequestManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AmazonBedrockChatCompletionRequestManager.java index 1c6bb58717942..69a5c665feb86 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AmazonBedrockChatCompletionRequestManager.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AmazonBedrockChatCompletionRequestManager.java @@ -22,7 +22,6 @@ import org.elasticsearch.xpack.inference.external.response.amazonbedrock.completion.AmazonBedrockChatCompletionResponseHandler; import org.elasticsearch.xpack.inference.services.amazonbedrock.completion.AmazonBedrockChatCompletionModel; -import java.util.List; import java.util.function.Supplier; public class AmazonBedrockChatCompletionRequestManager extends AmazonBedrockRequestManager { @@ -45,9 +44,11 @@ public void execute( Supplier hasRequestCompletedFunction, ActionListener listener ) { - List docsInput = DocumentsOnlyInput.of(inferenceInputs).getInputs(); + var docsOnly = DocumentsOnlyInput.of(inferenceInputs); + var docsInput = docsOnly.getInputs(); + var stream = docsOnly.stream(); var requestEntity = AmazonBedrockChatCompletionEntityFactory.createEntity(model, docsInput); - var request = new AmazonBedrockChatCompletionRequest(model, requestEntity, timeout); + var request = new AmazonBedrockChatCompletionRequest(model, requestEntity, timeout, stream); var responseHandler = new AmazonBedrockChatCompletionResponseHandler(); try { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockAI21LabsCompletionRequestEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockAI21LabsCompletionRequestEntity.java deleted file mode 100644 index aff01316838f8..0000000000000 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockAI21LabsCompletionRequestEntity.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion; - -import software.amazon.awssdk.services.bedrockruntime.model.ConverseRequest; - -import org.elasticsearch.core.Nullable; - -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseUtils.getConverseMessageList; - -public record AmazonBedrockAI21LabsCompletionRequestEntity( - List messages, - @Nullable Double temperature, - @Nullable Double topP, - @Nullable Integer maxTokenCount -) implements AmazonBedrockConverseRequestEntity { - - public AmazonBedrockAI21LabsCompletionRequestEntity { - Objects.requireNonNull(messages); - } - - @Override - public ConverseRequest.Builder addMessages(ConverseRequest.Builder request) { - return request.messages(getConverseMessageList(messages)); - } - - @Override - public ConverseRequest.Builder addInferenceConfig(ConverseRequest.Builder request) { - if (temperature == null && topP == null && maxTokenCount == null) { - return request; - } - - return request.inferenceConfig(config -> { - if (temperature != null) { - config.temperature(temperature.floatValue()); - } - - if (topP != null) { - config.topP(topP.floatValue()); - } - - if (maxTokenCount != null) { - config.maxTokens(maxTokenCount); - } - }); - } - - @Override - public ConverseRequest.Builder addAdditionalModelFields(ConverseRequest.Builder request) { - return request; - } -} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockAnthropicCompletionRequestEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockAnthropicCompletionRequestEntity.java deleted file mode 100644 index 540012c221192..0000000000000 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockAnthropicCompletionRequestEntity.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion; - -import software.amazon.awssdk.services.bedrockruntime.model.ConverseRequest; - -import org.elasticsearch.core.Nullable; -import org.elasticsearch.core.Strings; - -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseUtils.getConverseMessageList; - -public record AmazonBedrockAnthropicCompletionRequestEntity( - List messages, - @Nullable Double temperature, - @Nullable Double topP, - @Nullable Double topK, - @Nullable Integer maxTokenCount -) implements AmazonBedrockConverseRequestEntity { - - public AmazonBedrockAnthropicCompletionRequestEntity { - Objects.requireNonNull(messages); - } - - @Override - public ConverseRequest.Builder addMessages(ConverseRequest.Builder request) { - return request.messages(getConverseMessageList(messages)); - } - - @Override - public ConverseRequest.Builder addInferenceConfig(ConverseRequest.Builder request) { - if (temperature == null && topP == null && maxTokenCount == null) { - return request; - } - - return request.inferenceConfig(config -> { - if (temperature != null) { - config.temperature(temperature.floatValue()); - } - - if (topP != null) { - config.topP(topP.floatValue()); - } - - if (maxTokenCount != null) { - config.maxTokens(maxTokenCount); - } - }); - } - - @Override - public ConverseRequest.Builder addAdditionalModelFields(ConverseRequest.Builder request) { - if (topK == null) { - return request; - } - - String topKField = Strings.format("{\"top_k\":%f}", topK.floatValue()); - return request.additionalModelResponseFieldPaths(topKField); - } -} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockChatCompletionEntityFactory.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockChatCompletionEntityFactory.java index f86d2229d42ad..db902290ba0be 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockChatCompletionEntityFactory.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockChatCompletionEntityFactory.java @@ -12,6 +12,8 @@ import java.util.List; import java.util.Objects; +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseUtils.additionalTopK; + public final class AmazonBedrockChatCompletionEntityFactory { public static AmazonBedrockConverseRequestEntity createEntity(AmazonBedrockChatCompletionModel model, List messages) { Objects.requireNonNull(model); @@ -19,55 +21,21 @@ public static AmazonBedrockConverseRequestEntity createEntity(AmazonBedrockChatC var serviceSettings = model.getServiceSettings(); var taskSettings = model.getTaskSettings(); switch (serviceSettings.provider()) { - case AI21LABS -> { - return new AmazonBedrockAI21LabsCompletionRequestEntity( - messages, - taskSettings.temperature(), - taskSettings.topP(), - taskSettings.maxNewTokens() - ); - } - case AMAZONTITAN -> { - return new AmazonBedrockTitanCompletionRequestEntity( - messages, - taskSettings.temperature(), - taskSettings.topP(), - taskSettings.maxNewTokens() - ); - } - case ANTHROPIC -> { - return new AmazonBedrockAnthropicCompletionRequestEntity( + case AI21LABS, AMAZONTITAN, META -> { + return new AmazonBedrockConverseRequestEntity( messages, taskSettings.temperature(), taskSettings.topP(), - taskSettings.topK(), taskSettings.maxNewTokens() ); } - case COHERE -> { - return new AmazonBedrockCohereCompletionRequestEntity( + case ANTHROPIC, COHERE, MISTRAL -> { + return new AmazonBedrockConverseRequestEntity( messages, taskSettings.temperature(), taskSettings.topP(), - taskSettings.topK(), - taskSettings.maxNewTokens() - ); - } - case META -> { - return new AmazonBedrockMetaCompletionRequestEntity( - messages, - taskSettings.temperature(), - taskSettings.topP(), - taskSettings.maxNewTokens() - ); - } - case MISTRAL -> { - return new AmazonBedrockMistralCompletionRequestEntity( - messages, - taskSettings.temperature(), - taskSettings.topP(), - taskSettings.topK(), - taskSettings.maxNewTokens() + taskSettings.maxNewTokens(), + additionalTopK(taskSettings.topK()) ); } default -> { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockChatCompletionRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockChatCompletionRequest.java index 61e0504732462..05d7d90873a71 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockChatCompletionRequest.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockChatCompletionRequest.java @@ -8,7 +8,9 @@ package org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion; import software.amazon.awssdk.services.bedrockruntime.model.ConverseRequest; +import software.amazon.awssdk.services.bedrockruntime.model.ConverseStreamRequest; +import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.TaskType; @@ -20,19 +22,26 @@ import java.io.IOException; import java.util.Objects; +import java.util.concurrent.Flow; + +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseUtils.getConverseMessageList; +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseUtils.inferenceConfig; public class AmazonBedrockChatCompletionRequest extends AmazonBedrockRequest { public static final String USER_ROLE = "user"; private final AmazonBedrockConverseRequestEntity requestEntity; private AmazonBedrockChatCompletionResponseListener listener; + private final boolean stream; public AmazonBedrockChatCompletionRequest( AmazonBedrockChatCompletionModel model, AmazonBedrockConverseRequestEntity requestEntity, - @Nullable TimeValue timeout + @Nullable TimeValue timeout, + boolean stream ) { super(model, timeout); this.requestEntity = Objects.requireNonNull(requestEntity); + this.stream = stream; } @Override @@ -52,10 +61,16 @@ public TaskType taskType() { } private ConverseRequest getConverseRequest() { - var converseRequest = ConverseRequest.builder().modelId(amazonBedrockModel.model()); - converseRequest = requestEntity.addMessages(converseRequest); - converseRequest = requestEntity.addInferenceConfig(converseRequest); - converseRequest = requestEntity.addAdditionalModelFields(converseRequest); + var converseRequest = ConverseRequest.builder() + .modelId(amazonBedrockModel.model()) + .messages(getConverseMessageList(requestEntity.messages())) + .additionalModelResponseFieldPaths(requestEntity.additionalModelFields()); + + inferenceConfig(requestEntity).ifPresent(converseRequest::inferenceConfig); + + if (requestEntity.additionalModelFields() != null) { + converseRequest.additionalModelResponseFieldPaths(requestEntity.additionalModelFields()); + } return converseRequest.build(); } @@ -66,4 +81,22 @@ public void executeChatCompletionRequest( this.listener = chatCompletionResponseListener; this.executeRequest(awsBedrockClient); } + + public Flow.Publisher executeStreamChatCompletionRequest(AmazonBedrockBaseClient awsBedrockClient) { + var converseStreamRequest = ConverseStreamRequest.builder() + .modelId(amazonBedrockModel.model()) + .messages(getConverseMessageList(requestEntity.messages())); + + inferenceConfig(requestEntity).ifPresent(converseStreamRequest::inferenceConfig); + + if (requestEntity.additionalModelFields() != null) { + converseStreamRequest.additionalModelResponseFieldPaths(requestEntity.additionalModelFields()); + } + return awsBedrockClient.converseStream(converseStreamRequest.build()); + } + + @Override + public boolean isStreaming() { + return stream; + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockCohereCompletionRequestEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockCohereCompletionRequestEntity.java deleted file mode 100644 index f1ae04ad39516..0000000000000 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockCohereCompletionRequestEntity.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion; - -import software.amazon.awssdk.services.bedrockruntime.model.ConverseRequest; - -import org.elasticsearch.core.Nullable; -import org.elasticsearch.core.Strings; - -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseUtils.getConverseMessageList; - -public record AmazonBedrockCohereCompletionRequestEntity( - List messages, - @Nullable Double temperature, - @Nullable Double topP, - @Nullable Double topK, - @Nullable Integer maxTokenCount -) implements AmazonBedrockConverseRequestEntity { - - public AmazonBedrockCohereCompletionRequestEntity { - Objects.requireNonNull(messages); - } - - @Override - public ConverseRequest.Builder addMessages(ConverseRequest.Builder request) { - return request.messages(getConverseMessageList(messages)); - } - - @Override - public ConverseRequest.Builder addInferenceConfig(ConverseRequest.Builder request) { - if (temperature == null && topP == null && maxTokenCount == null) { - return request; - } - - return request.inferenceConfig(config -> { - if (temperature != null) { - config.temperature(temperature.floatValue()); - } - - if (topP != null) { - config.topP(topP.floatValue()); - } - - if (maxTokenCount != null) { - config.maxTokens(maxTokenCount); - } - }); - } - - @Override - public ConverseRequest.Builder addAdditionalModelFields(ConverseRequest.Builder request) { - if (topK == null) { - return request; - } - - String topKField = Strings.format("{\"top_k\":%f}", topK.floatValue()); - return request.additionalModelResponseFieldPaths(topKField); - } -} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockConverseRequestEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockConverseRequestEntity.java index d8e9fa43797cd..203b2820ab16f 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockConverseRequestEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockConverseRequestEntity.java @@ -7,12 +7,23 @@ package org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion; -import software.amazon.awssdk.services.bedrockruntime.model.ConverseRequest; +import org.elasticsearch.core.Nullable; -public interface AmazonBedrockConverseRequestEntity { - ConverseRequest.Builder addMessages(ConverseRequest.Builder request); +import java.util.List; - ConverseRequest.Builder addInferenceConfig(ConverseRequest.Builder request); - - ConverseRequest.Builder addAdditionalModelFields(ConverseRequest.Builder request); +public record AmazonBedrockConverseRequestEntity( + List messages, + @Nullable Double temperature, + @Nullable Double topP, + @Nullable Integer maxTokenCount, + @Nullable List additionalModelFields +) { + public AmazonBedrockConverseRequestEntity( + List messages, + @Nullable Double temperature, + @Nullable Double topP, + @Nullable Integer maxTokenCount + ) { + this(messages, temperature, topP, maxTokenCount, null); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockConverseUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockConverseUtils.java index 22e0d26a315a7..eb1652ff7ff6d 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockConverseUtils.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockConverseUtils.java @@ -8,9 +8,14 @@ package org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion; import software.amazon.awssdk.services.bedrockruntime.model.ContentBlock; +import software.amazon.awssdk.services.bedrockruntime.model.InferenceConfiguration; import software.amazon.awssdk.services.bedrockruntime.model.Message; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.Strings; + import java.util.List; +import java.util.Optional; import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockChatCompletionRequest.USER_ROLE; @@ -22,4 +27,32 @@ public static List getConverseMessageList(List texts) { .map(content -> Message.builder().role(USER_ROLE).content(content).build()) .toList(); } + + public static Optional inferenceConfig(AmazonBedrockConverseRequestEntity request) { + if (request.temperature() != null || request.topP() != null || request.maxTokenCount() != null) { + var builder = InferenceConfiguration.builder(); + if (request.temperature() != null) { + builder.temperature(request.temperature().floatValue()); + } + + if (request.topP() != null) { + builder.topP(request.topP().floatValue()); + } + + if (request.maxTokenCount() != null) { + builder.maxTokens(request.maxTokenCount()); + } + return Optional.of(builder.build()); + } + return Optional.empty(); + } + + @Nullable + public static List additionalTopK(@Nullable Double topK) { + if (topK == null) { + return null; + } + + return List.of(Strings.format("{\"top_k\":%f}", topK.floatValue())); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockMetaCompletionRequestEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockMetaCompletionRequestEntity.java deleted file mode 100644 index c21791ced02cb..0000000000000 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockMetaCompletionRequestEntity.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion; - -import software.amazon.awssdk.services.bedrockruntime.model.ConverseRequest; - -import org.elasticsearch.core.Nullable; - -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseUtils.getConverseMessageList; - -public record AmazonBedrockMetaCompletionRequestEntity( - List messages, - @Nullable Double temperature, - @Nullable Double topP, - @Nullable Integer maxTokenCount -) implements AmazonBedrockConverseRequestEntity { - - public AmazonBedrockMetaCompletionRequestEntity { - Objects.requireNonNull(messages); - } - - @Override - public ConverseRequest.Builder addMessages(ConverseRequest.Builder request) { - return request.messages(getConverseMessageList(messages)); - } - - @Override - public ConverseRequest.Builder addInferenceConfig(ConverseRequest.Builder request) { - if (temperature == null && topP == null && maxTokenCount == null) { - return request; - } - - return request.inferenceConfig(config -> { - if (temperature != null) { - config.temperature(temperature.floatValue()); - } - - if (topP != null) { - config.topP(topP.floatValue()); - } - - if (maxTokenCount != null) { - config.maxTokens(maxTokenCount); - } - }); - } - - @Override - public ConverseRequest.Builder addAdditionalModelFields(ConverseRequest.Builder request) { - return request; - } -} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockMistralCompletionRequestEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockMistralCompletionRequestEntity.java deleted file mode 100644 index 15931674cbabb..0000000000000 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockMistralCompletionRequestEntity.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion; - -import software.amazon.awssdk.services.bedrockruntime.model.ConverseRequest; - -import org.elasticsearch.core.Nullable; -import org.elasticsearch.core.Strings; - -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseUtils.getConverseMessageList; - -public record AmazonBedrockMistralCompletionRequestEntity( - List messages, - @Nullable Double temperature, - @Nullable Double topP, - @Nullable Double topK, - @Nullable Integer maxTokenCount -) implements AmazonBedrockConverseRequestEntity { - - public AmazonBedrockMistralCompletionRequestEntity { - Objects.requireNonNull(messages); - } - - @Override - public ConverseRequest.Builder addMessages(ConverseRequest.Builder request) { - return request.messages(getConverseMessageList(messages)); - } - - @Override - public ConverseRequest.Builder addInferenceConfig(ConverseRequest.Builder request) { - if (temperature == null && topP == null && maxTokenCount == null) { - return request; - } - - return request.inferenceConfig(config -> { - if (temperature != null) { - config.temperature(temperature.floatValue()); - } - - if (topP != null) { - config.topP(topP.floatValue()); - } - - if (maxTokenCount != null) { - config.maxTokens(maxTokenCount); - } - }); - } - - @Override - public ConverseRequest.Builder addAdditionalModelFields(ConverseRequest.Builder request) { - if (topK == null) { - return request; - } - - String topKField = Strings.format("{\"top_k\":%f}", topK.floatValue()); - return request.additionalModelResponseFieldPaths(topKField); - } -} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockTitanCompletionRequestEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockTitanCompletionRequestEntity.java deleted file mode 100644 index e267592dfd0ba..0000000000000 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockTitanCompletionRequestEntity.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion; - -import software.amazon.awssdk.services.bedrockruntime.model.ConverseRequest; - -import org.elasticsearch.core.Nullable; - -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseUtils.getConverseMessageList; - -public record AmazonBedrockTitanCompletionRequestEntity( - List messages, - @Nullable Double temperature, - @Nullable Double topP, - @Nullable Integer maxTokenCount -) implements AmazonBedrockConverseRequestEntity { - - public AmazonBedrockTitanCompletionRequestEntity { - Objects.requireNonNull(messages); - } - - @Override - public ConverseRequest.Builder addMessages(ConverseRequest.Builder request) { - return request.messages(getConverseMessageList(messages)); - } - - @Override - public ConverseRequest.Builder addInferenceConfig(ConverseRequest.Builder request) { - if (temperature == null && topP == null && maxTokenCount == null) { - return request; - } - - return request.inferenceConfig(config -> { - if (temperature != null) { - config.temperature(temperature.floatValue()); - } - - if (topP != null) { - config.topP(topP.floatValue()); - } - - if (maxTokenCount != null) { - config.maxTokens(maxTokenCount); - } - }); - } - - @Override - public ConverseRequest.Builder addAdditionalModelFields(ConverseRequest.Builder request) { - return request; - } -} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockService.java index 9b066f2a1679a..e1ed23a318e6c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockService.java @@ -44,6 +44,7 @@ import java.io.IOException; import java.util.List; import java.util.Map; +import java.util.Set; import static org.elasticsearch.TransportVersions.ML_INFERENCE_AMAZON_BEDROCK_ADDED; import static org.elasticsearch.xpack.inference.services.ServiceUtils.createInvalidModelException; @@ -267,6 +268,11 @@ public TransportVersion getMinimalSupportedVersion() { return ML_INFERENCE_AMAZON_BEDROCK_ADDED; } + @Override + public Set supportedStreamingTasks() { + return COMPLETION_ONLY; + } + /** * For text embedding models get the embedding size and * update the service settings. diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockExecutorTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockExecutorTests.java index 8f09c53c99366..6d601b4b08c53 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockExecutorTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockExecutorTests.java @@ -20,7 +20,7 @@ import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockChatCompletionRequest; -import org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockTitanCompletionRequestEntity; +import org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestEntity; import org.elasticsearch.xpack.inference.external.request.amazonbedrock.embeddings.AmazonBedrockEmbeddingsRequest; import org.elasticsearch.xpack.inference.external.request.amazonbedrock.embeddings.AmazonBedrockTitanEmbeddingsRequestEntity; import org.elasticsearch.xpack.inference.external.response.amazonbedrock.completion.AmazonBedrockChatCompletionResponseHandler; @@ -100,8 +100,8 @@ public void testExecute_ChatCompletionRequest() throws CharacterCodingException "secretkey" ); - var requestEntity = new AmazonBedrockTitanCompletionRequestEntity(List.of("abc"), null, null, 512); - var request = new AmazonBedrockChatCompletionRequest(model, requestEntity, null); + var requestEntity = new AmazonBedrockConverseRequestEntity(List.of("abc"), null, null, 512); + var request = new AmazonBedrockChatCompletionRequest(model, requestEntity, null, false); var responseHandler = new AmazonBedrockChatCompletionResponseHandler(); var clientCache = new AmazonBedrockMockClientCache(getTestConverseResult("converse result"), null, null); @@ -124,8 +124,8 @@ public void testExecute_FailsProperly_WithElasticsearchException() { "secretkey" ); - var requestEntity = new AmazonBedrockTitanCompletionRequestEntity(List.of("abc"), null, null, 512); - var request = new AmazonBedrockChatCompletionRequest(model, requestEntity, null); + var requestEntity = new AmazonBedrockConverseRequestEntity(List.of("abc"), null, null, 512); + var request = new AmazonBedrockChatCompletionRequest(model, requestEntity, null, false); var responseHandler = new AmazonBedrockChatCompletionResponseHandler(); var clientCache = new AmazonBedrockMockClientCache(null, null, new ElasticsearchException("test exception")); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockInferenceClientCacheTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockInferenceClientCacheTests.java index 873b2e22497c6..bb7c669cdf09b 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockInferenceClientCacheTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockInferenceClientCacheTests.java @@ -25,7 +25,7 @@ public class AmazonBedrockInferenceClientCacheTests extends ESTestCase { public void testCache_ReturnsSameObject() throws IOException { AmazonBedrockInferenceClientCache cacheInstance; - try (var cache = new AmazonBedrockInferenceClientCache(AmazonBedrockMockInferenceClient::create, null)) { + try (var cache = new AmazonBedrockInferenceClientCache(AmazonBedrockMockInferenceClient::create, Clock.systemUTC())) { cacheInstance = cache; var model = AmazonBedrockEmbeddingsModelTests.createModel( "inferenceId", diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockMockInferenceClient.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockMockInferenceClient.java index 5584e90b3264d..e6cd667b824b3 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockMockInferenceClient.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockMockInferenceClient.java @@ -14,15 +14,19 @@ import software.amazon.awssdk.services.bedrockruntime.model.InvokeModelResponse; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockModel; import java.util.concurrent.CompletableFuture; import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; public class AmazonBedrockMockInferenceClient extends AmazonBedrockInferenceClient { private CompletableFuture converseResponseFuture = CompletableFuture.completedFuture(null); @@ -33,7 +37,13 @@ public static AmazonBedrockMockInferenceClient create(AmazonBedrockModel model, } protected AmazonBedrockMockInferenceClient(AmazonBedrockModel model, @Nullable TimeValue timeout) { - super(model, timeout); + super(model, timeout, mockThreadPool()); + } + + private static ThreadPool mockThreadPool() { + ThreadPool threadPool = mock(); + when(threadPool.executor(anyString())).thenReturn(EsExecutors.DIRECT_EXECUTOR_SERVICE); + return threadPool; } public void setExceptionToThrow(ElasticsearchException exceptionToThrow) { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockStreamingChatProcessorTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockStreamingChatProcessorTests.java new file mode 100644 index 0000000000000..ba87bdfe16cdd --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockStreamingChatProcessorTests.java @@ -0,0 +1,251 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.amazonbedrock; + +import software.amazon.awssdk.services.bedrockruntime.model.BedrockRuntimeException; +import software.amazon.awssdk.services.bedrockruntime.model.ContentBlockDelta; +import software.amazon.awssdk.services.bedrockruntime.model.ContentBlockDeltaEvent; +import software.amazon.awssdk.services.bedrockruntime.model.ConverseStreamOutput; +import software.amazon.awssdk.services.bedrockruntime.model.ConverseStreamResponseHandler; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.inference.results.StreamingChatCompletionResults; +import org.junit.Before; +import org.mockito.ArgumentCaptor; + +import java.util.Arrays; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Flow; +import java.util.concurrent.atomic.AtomicInteger; + +import static org.elasticsearch.xpack.inference.InferencePlugin.UTILITY_THREAD_POOL_NAME; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.isA; +import static org.hamcrest.Matchers.notNullValue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyLong; +import static org.mockito.ArgumentMatchers.assertArg; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; +import static org.mockito.Mockito.when; + +public class AmazonBedrockStreamingChatProcessorTests extends ESTestCase { + private AmazonBedrockStreamingChatProcessor processor; + + @Before + public void setUp() throws Exception { + super.setUp(); + ThreadPool threadPool = mock(); + when(threadPool.executor(UTILITY_THREAD_POOL_NAME)).thenReturn(EsExecutors.DIRECT_EXECUTOR_SERVICE); + processor = new AmazonBedrockStreamingChatProcessor(threadPool); + } + + /** + * We do not issue requests on subscribe because the downstream will control the pacing. + */ + public void testOnSubscribeBeforeDownstreamDoesNotRequest() { + var upstream = mock(Flow.Subscription.class); + processor.onSubscribe(upstream); + + verify(upstream, never()).request(anyLong()); + } + + /** + * If the downstream requests data before the upstream is set, when the upstream is set, we will forward the pending requests to it. + */ + public void testOnSubscribeAfterDownstreamRequests() { + var expectedRequestCount = randomLongBetween(1, 500); + Flow.Subscriber subscriber = mock(); + doAnswer(ans -> { + Flow.Subscription sub = ans.getArgument(0); + sub.request(expectedRequestCount); + return null; + }).when(subscriber).onSubscribe(any()); + processor.subscribe(subscriber); + + var upstream = mock(Flow.Subscription.class); + processor.onSubscribe(upstream); + + verify(upstream, times(1)).request(anyLong()); + } + + public void testCancelDuplicateSubscriptions() { + processor.onSubscribe(mock()); + + var upstream = mock(Flow.Subscription.class); + processor.onSubscribe(upstream); + + verify(upstream, times(1)).cancel(); + verifyNoMoreInteractions(upstream); + } + + public void testMultiplePublishesCallsOnError() { + processor.subscribe(mock()); + + Flow.Subscriber subscriber = mock(); + processor.subscribe(subscriber); + + verify(subscriber, times(1)).onError(assertArg(e -> { + assertThat(e, isA(IllegalStateException.class)); + assertThat(e.getMessage(), equalTo("Subscriber already set.")); + })); + } + + public void testNonDeltaBlocksAreSkipped() { + var upstream = mock(Flow.Subscription.class); + processor.onSubscribe(upstream); + var counter = new AtomicInteger(); + Arrays.stream(ConverseStreamOutput.EventType.values()) + .filter(type -> type != ConverseStreamOutput.EventType.CONTENT_BLOCK_DELTA) + .forEach(type -> { + ConverseStreamOutput output = mock(); + when(output.sdkEventType()).thenReturn(type); + processor.onNext(output); + verify(upstream, times(counter.incrementAndGet())).request(eq(1L)); + }); + } + + public void testDeltaBlockForwardsDownstream() { + var expectedText = "hello"; + + // mock executorservice so we can make sure we handle the response on another thread + ExecutorService executorService = mock(); + ThreadPool threadPool = mock(); + when(threadPool.executor(UTILITY_THREAD_POOL_NAME)).thenReturn(executorService); + processor = new AmazonBedrockStreamingChatProcessor(threadPool); + doAnswer(ans -> { + Runnable command = ans.getArgument(0); + command.run(); + return null; + }).when(executorService).execute(any()); + + Flow.Subscription upstream = mock(); + processor.onSubscribe(upstream); + Flow.Subscriber downstream = mock(); + processor.subscribe(downstream); + + ConverseStreamOutput output = output(expectedText); + + processor.onNext(output); + + verifyText(downstream, expectedText); + verify(executorService, times(1)).execute(any()); + verify(upstream, times(0)).request(anyLong()); + } + + private ConverseStreamOutput output(String text) { + ConverseStreamOutput output = mock(); + when(output.sdkEventType()).thenReturn(ConverseStreamOutput.EventType.CONTENT_BLOCK_DELTA); + doAnswer(ans -> { + ConverseStreamResponseHandler.Visitor visitor = ans.getArgument(0); + ContentBlockDelta delta = ContentBlockDelta.fromText(text); + ContentBlockDeltaEvent event = ContentBlockDeltaEvent.builder().delta(delta).build(); + visitor.visitContentBlockDelta(event); + return null; + }).when(output).accept(any()); + return output; + } + + private void verifyText(Flow.Subscriber downstream, String expectedText) { + verify(downstream, times(1)).onNext(assertArg(results -> { + assertThat(results, notNullValue()); + assertThat(results.results().size(), equalTo(1)); + assertThat(results.results().getFirst().delta(), equalTo(expectedText)); + })); + } + + public void verifyCompleteBeforeRequest() { + processor.onComplete(); + + Flow.Subscriber downstream = mock(); + var sub = ArgumentCaptor.forClass(Flow.Subscription.class); + processor.subscribe(downstream); + verify(downstream).onSubscribe(sub.capture()); + + sub.getValue().request(1); + verify(downstream, times(1)).onComplete(); + } + + public void verifyCompleteAfterRequest() { + + Flow.Subscriber downstream = mock(); + var sub = ArgumentCaptor.forClass(Flow.Subscription.class); + processor.subscribe(downstream); + verify(downstream).onSubscribe(sub.capture()); + + sub.getValue().request(1); + processor.onComplete(); + verify(downstream, times(1)).onComplete(); + } + + public void verifyOnErrorBeforeRequest() { + var expectedError = BedrockRuntimeException.builder().message("ahhhhhh").build(); + processor.onError(expectedError); + + Flow.Subscriber downstream = mock(); + var sub = ArgumentCaptor.forClass(Flow.Subscription.class); + processor.subscribe(downstream); + verify(downstream).onSubscribe(sub.capture()); + + sub.getValue().request(1); + verify(downstream, times(1)).onError(assertArg(e -> { + assertThat(e, isA(ElasticsearchException.class)); + assertThat(e.getCause(), is(expectedError)); + })); + } + + public void verifyOnErrorAfterRequest() { + var expectedError = BedrockRuntimeException.builder().message("ahhhhhh").build(); + + Flow.Subscriber downstream = mock(); + var sub = ArgumentCaptor.forClass(Flow.Subscription.class); + processor.subscribe(downstream); + verify(downstream).onSubscribe(sub.capture()); + + sub.getValue().request(1); + processor.onError(expectedError); + verify(downstream, times(1)).onError(assertArg(e -> { + assertThat(e, isA(ElasticsearchException.class)); + assertThat(e.getCause(), is(expectedError)); + })); + } + + public void verifyAsyncOnCompleteIsStillDeliveredSynchronously() { + mockUpstream(); + + Flow.Subscriber downstream = mock(); + var sub = ArgumentCaptor.forClass(Flow.Subscription.class); + processor.subscribe(downstream); + verify(downstream).onSubscribe(sub.capture()); + + sub.getValue().request(1); + verify(downstream, times(1)).onNext(any()); + processor.onComplete(); + verify(downstream, times(0)).onComplete(); + sub.getValue().request(1); + verify(downstream, times(1)).onComplete(); + } + + private void mockUpstream() { + Flow.Subscription upstream = mock(); + doAnswer(ans -> { + processor.onNext(output(randomIdentifier())); + return null; + }).when(upstream).request(anyLong()); + processor.onSubscribe(upstream); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockAI21LabsCompletionRequestEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockAI21LabsCompletionRequestEntityTests.java deleted file mode 100644 index 10c8943c75f6c..0000000000000 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockAI21LabsCompletionRequestEntityTests.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion; - -import org.elasticsearch.test.ESTestCase; - -import java.util.List; - -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHasMessage; -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveAnyMaxTokensInput; -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveAnyTemperatureInput; -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveAnyTopKInput; -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveAnyTopPInput; -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveMaxTokensInput; -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveTemperatureInput; -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveTopPInput; -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.getConverseRequest; -import static org.hamcrest.Matchers.is; - -public class AmazonBedrockAI21LabsCompletionRequestEntityTests extends ESTestCase { - public void testRequestEntity_CreatesProperRequest() { - var request = new AmazonBedrockAI21LabsCompletionRequestEntity(List.of("test message"), null, null, null); - var builtRequest = getConverseRequest("testmodel", request); - assertThat(builtRequest.modelId(), is("testmodel")); - assertThat(doesConverseRequestHasMessage(builtRequest, "test message"), is(true)); - assertThat(builtRequest.modelId(), is("testmodel")); - assertFalse(doesConverseRequestHaveAnyTemperatureInput(builtRequest)); - assertFalse(doesConverseRequestHaveAnyTopPInput(builtRequest)); - assertFalse(doesConverseRequestHaveAnyTopKInput(builtRequest)); - assertFalse(doesConverseRequestHaveAnyMaxTokensInput(builtRequest)); - } - - public void testRequestEntity_CreatesProperRequest_WithTemperature() { - var request = new AmazonBedrockAI21LabsCompletionRequestEntity(List.of("test message"), 1.0, null, null); - var builtRequest = getConverseRequest("testmodel", request); - assertThat(builtRequest.modelId(), is("testmodel")); - assertThat(doesConverseRequestHasMessage(builtRequest, "test message"), is(true)); - assertTrue(doesConverseRequestHaveTemperatureInput(builtRequest, 1.0)); - assertFalse(doesConverseRequestHaveAnyTopPInput(builtRequest)); - assertFalse(doesConverseRequestHaveAnyTopKInput(builtRequest)); - assertFalse(doesConverseRequestHaveAnyMaxTokensInput(builtRequest)); - } - - public void testRequestEntity_CreatesProperRequest_WithTopP() { - var request = new AmazonBedrockAI21LabsCompletionRequestEntity(List.of("test message"), null, 1.0, null); - var builtRequest = getConverseRequest("testmodel", request); - assertThat(builtRequest.modelId(), is("testmodel")); - assertThat(doesConverseRequestHasMessage(builtRequest, "test message"), is(true)); - assertFalse(doesConverseRequestHaveAnyTemperatureInput(builtRequest)); - assertTrue(doesConverseRequestHaveTopPInput(builtRequest, 1.0)); - assertFalse(doesConverseRequestHaveAnyTopKInput(builtRequest)); - assertFalse(doesConverseRequestHaveAnyMaxTokensInput(builtRequest)); - } - - public void testRequestEntity_CreatesProperRequest_WithMaxTokens() { - var request = new AmazonBedrockAI21LabsCompletionRequestEntity(List.of("test message"), null, null, 128); - var builtRequest = getConverseRequest("testmodel", request); - assertThat(builtRequest.modelId(), is("testmodel")); - assertThat(doesConverseRequestHasMessage(builtRequest, "test message"), is(true)); - assertFalse(doesConverseRequestHaveAnyTemperatureInput(builtRequest)); - assertFalse(doesConverseRequestHaveAnyTopPInput(builtRequest)); - assertFalse(doesConverseRequestHaveAnyTopKInput(builtRequest)); - assertTrue(doesConverseRequestHaveMaxTokensInput(builtRequest, 128)); - } -} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockAnthropicCompletionRequestEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockAnthropicCompletionRequestEntityTests.java deleted file mode 100644 index e8a3440a37294..0000000000000 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockAnthropicCompletionRequestEntityTests.java +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion; - -import org.elasticsearch.test.ESTestCase; - -import java.util.List; - -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHasMessage; -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveAnyMaxTokensInput; -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveAnyTemperatureInput; -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveAnyTopKInput; -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveAnyTopPInput; -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveMaxTokensInput; -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveTemperatureInput; -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveTopKInput; -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveTopPInput; -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.getConverseRequest; -import static org.hamcrest.Matchers.is; - -public class AmazonBedrockAnthropicCompletionRequestEntityTests extends ESTestCase { - public void testRequestEntity_CreatesProperRequest() { - var request = new AmazonBedrockAnthropicCompletionRequestEntity(List.of("test message"), null, null, null, null); - var builtRequest = getConverseRequest("testmodel", request); - assertThat(builtRequest.modelId(), is("testmodel")); - assertThat(doesConverseRequestHasMessage(builtRequest, "test message"), is(true)); - assertThat(builtRequest.modelId(), is("testmodel")); - assertFalse(doesConverseRequestHaveAnyTemperatureInput(builtRequest)); - assertFalse(doesConverseRequestHaveAnyTopPInput(builtRequest)); - assertFalse(doesConverseRequestHaveAnyTopKInput(builtRequest)); - assertFalse(doesConverseRequestHaveAnyMaxTokensInput(builtRequest)); - } - - public void testRequestEntity_CreatesProperRequest_WithTemperature() { - var request = new AmazonBedrockAnthropicCompletionRequestEntity(List.of("test message"), 1.0, null, null, null); - var builtRequest = getConverseRequest("testmodel", request); - assertThat(builtRequest.modelId(), is("testmodel")); - assertThat(doesConverseRequestHasMessage(builtRequest, "test message"), is(true)); - assertTrue(doesConverseRequestHaveTemperatureInput(builtRequest, 1.0)); - assertFalse(doesConverseRequestHaveAnyTopPInput(builtRequest)); - assertFalse(doesConverseRequestHaveAnyTopKInput(builtRequest)); - assertFalse(doesConverseRequestHaveAnyMaxTokensInput(builtRequest)); - } - - public void testRequestEntity_CreatesProperRequest_WithTopP() { - var request = new AmazonBedrockAnthropicCompletionRequestEntity(List.of("test message"), null, 1.0, null, null); - var builtRequest = getConverseRequest("testmodel", request); - assertThat(builtRequest.modelId(), is("testmodel")); - assertThat(doesConverseRequestHasMessage(builtRequest, "test message"), is(true)); - assertFalse(doesConverseRequestHaveAnyTemperatureInput(builtRequest)); - assertTrue(doesConverseRequestHaveTopPInput(builtRequest, 1.0)); - assertFalse(doesConverseRequestHaveAnyTopKInput(builtRequest)); - assertFalse(doesConverseRequestHaveAnyMaxTokensInput(builtRequest)); - } - - public void testRequestEntity_CreatesProperRequest_WithMaxTokens() { - var request = new AmazonBedrockAnthropicCompletionRequestEntity(List.of("test message"), null, null, null, 128); - var builtRequest = getConverseRequest("testmodel", request); - assertThat(builtRequest.modelId(), is("testmodel")); - assertThat(doesConverseRequestHasMessage(builtRequest, "test message"), is(true)); - assertFalse(doesConverseRequestHaveAnyTemperatureInput(builtRequest)); - assertFalse(doesConverseRequestHaveAnyTopPInput(builtRequest)); - assertFalse(doesConverseRequestHaveAnyTopKInput(builtRequest)); - assertTrue(doesConverseRequestHaveMaxTokensInput(builtRequest, 128)); - } - - public void testRequestEntity_CreatesProperRequest_WithTopK() { - var request = new AmazonBedrockAnthropicCompletionRequestEntity(List.of("test message"), null, null, 1.0, null); - var builtRequest = getConverseRequest("testmodel", request); - assertThat(builtRequest.modelId(), is("testmodel")); - assertThat(doesConverseRequestHasMessage(builtRequest, "test message"), is(true)); - assertFalse(doesConverseRequestHaveAnyTemperatureInput(builtRequest)); - assertFalse(doesConverseRequestHaveAnyTopPInput(builtRequest)); - assertTrue(doesConverseRequestHaveTopKInput(builtRequest, 1.0)); - assertFalse(doesConverseRequestHaveAnyMaxTokensInput(builtRequest)); - } -} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockChatCompletionEntityFactoryTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockChatCompletionEntityFactoryTests.java new file mode 100644 index 0000000000000..32cd21bc3d45a --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockChatCompletionEntityFactoryTests.java @@ -0,0 +1,102 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion; + +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockProvider; +import org.elasticsearch.xpack.inference.services.amazonbedrock.completion.AmazonBedrockChatCompletionModel; +import org.elasticsearch.xpack.inference.services.amazonbedrock.completion.AmazonBedrockChatCompletionServiceSettings; +import org.elasticsearch.xpack.inference.services.amazonbedrock.completion.AmazonBedrockChatCompletionTaskSettings; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.xcontent.XContentParserConfiguration.EMPTY; +import static org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockProvider.AI21LABS; +import static org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockProvider.AMAZONTITAN; +import static org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockProvider.ANTHROPIC; +import static org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockProvider.COHERE; +import static org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockProvider.META; +import static org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockProvider.MISTRAL; +import static org.hamcrest.Matchers.closeTo; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class AmazonBedrockChatCompletionEntityFactoryTests extends ESTestCase { + public void testEntitiesWithoutAdditionalMessages() { + List.of(AI21LABS, AMAZONTITAN, META).forEach(provider -> { + var expectedTemp = randomDoubleBetween(1, 10, true); + var expectedTopP = randomDoubleBetween(1, 10, true); + + var expectedMaxToken = randomIntBetween(1, 10); + var expectedMessage = List.of(randomIdentifier()); + var model = model(provider, expectedTemp, expectedTopP, expectedMaxToken); + + var entity = AmazonBedrockChatCompletionEntityFactory.createEntity(model, expectedMessage); + + assertThat(entity, notNullValue()); + assertThat(entity.temperature(), equalTo(expectedTemp)); + assertThat(entity.topP(), equalTo(expectedTopP)); + assertThat(entity.maxTokenCount(), equalTo(expectedMaxToken)); + assertThat(entity.additionalModelFields(), nullValue()); + assertThat(entity.messages(), equalTo(expectedMessage)); + }); + } + + public void testWithAdditionalMessages() { + List.of(ANTHROPIC, COHERE, MISTRAL).forEach(provider -> { + var expectedTemp = randomDoubleBetween(1, 10, true); + var expectedTopP = randomDoubleBetween(1, 10, true); + var expectedMaxToken = randomIntBetween(1, 10); + var expectedMessage = List.of(randomIdentifier()); + var expectedTopK = randomDoubleBetween(1, 10, true); + var model = model(provider, expectedTemp, expectedTopP, expectedMaxToken, expectedTopK); + + var entity = AmazonBedrockChatCompletionEntityFactory.createEntity(model, expectedMessage); + + assertThat(entity, notNullValue()); + assertThat(entity.temperature(), equalTo(expectedTemp)); + assertThat(entity.topP(), equalTo(expectedTopP)); + assertThat(entity.maxTokenCount(), equalTo(expectedMaxToken)); + assertThat(entity.messages(), equalTo(expectedMessage)); + assertThat(entity.additionalModelFields(), notNullValue()); + assertThat(entity.additionalModelFields().size(), equalTo(1)); + try (var parser = XContentFactory.xContent(XContentType.JSON).createParser(EMPTY, entity.additionalModelFields().getFirst())) { + var additionalModelFields = parser.map(); + assertThat((Double) additionalModelFields.get("top_k"), closeTo(expectedTopK, 0.1)); + } catch (IOException e) { + fail(e); + } + }); + } + + AmazonBedrockChatCompletionModel model(AmazonBedrockProvider provider, Double temperature, Double topP, Integer maxTokenCount) { + return model(provider, temperature, topP, maxTokenCount, null); + } + + AmazonBedrockChatCompletionModel model(AmazonBedrockProvider provider, Double temp, Double topP, Integer tokenCount, Double topK) { + var serviceSettings = mock(AmazonBedrockChatCompletionServiceSettings.class); + when(serviceSettings.provider()).thenReturn(provider); + + var taskSettings = mock(AmazonBedrockChatCompletionTaskSettings.class); + when(taskSettings.temperature()).thenReturn(temp); + when(taskSettings.topP()).thenReturn(topP); + when(taskSettings.maxNewTokens()).thenReturn(tokenCount); + when(taskSettings.topK()).thenReturn(topK); + + var model = mock(AmazonBedrockChatCompletionModel.class); + when(model.getServiceSettings()).thenReturn(serviceSettings); + when(model.getTaskSettings()).thenReturn(taskSettings); + return model; + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockCohereCompletionRequestEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockCohereCompletionRequestEntityTests.java deleted file mode 100644 index c8e844d000240..0000000000000 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockCohereCompletionRequestEntityTests.java +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion; - -import org.elasticsearch.test.ESTestCase; - -import java.util.List; - -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHasMessage; -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveAnyMaxTokensInput; -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveAnyTemperatureInput; -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveAnyTopKInput; -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveAnyTopPInput; -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveMaxTokensInput; -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveTemperatureInput; -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveTopKInput; -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveTopPInput; -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.getConverseRequest; -import static org.hamcrest.Matchers.is; - -public class AmazonBedrockCohereCompletionRequestEntityTests extends ESTestCase { - public void testRequestEntity_CreatesProperRequest() { - var request = new AmazonBedrockCohereCompletionRequestEntity(List.of("test message"), null, null, null, null); - var builtRequest = getConverseRequest("testmodel", request); - assertThat(builtRequest.modelId(), is("testmodel")); - assertThat(doesConverseRequestHasMessage(builtRequest, "test message"), is(true)); - assertThat(builtRequest.modelId(), is("testmodel")); - assertFalse(doesConverseRequestHaveAnyTemperatureInput(builtRequest)); - assertFalse(doesConverseRequestHaveAnyTopPInput(builtRequest)); - assertFalse(doesConverseRequestHaveAnyTopKInput(builtRequest)); - assertFalse(doesConverseRequestHaveAnyMaxTokensInput(builtRequest)); - } - - public void testRequestEntity_CreatesProperRequest_WithTemperature() { - var request = new AmazonBedrockCohereCompletionRequestEntity(List.of("test message"), 1.0, null, null, null); - var builtRequest = getConverseRequest("testmodel", request); - assertThat(builtRequest.modelId(), is("testmodel")); - assertThat(doesConverseRequestHasMessage(builtRequest, "test message"), is(true)); - assertTrue(doesConverseRequestHaveTemperatureInput(builtRequest, 1.0)); - assertFalse(doesConverseRequestHaveAnyTopPInput(builtRequest)); - assertFalse(doesConverseRequestHaveAnyTopKInput(builtRequest)); - assertFalse(doesConverseRequestHaveAnyMaxTokensInput(builtRequest)); - } - - public void testRequestEntity_CreatesProperRequest_WithTopP() { - var request = new AmazonBedrockCohereCompletionRequestEntity(List.of("test message"), null, 1.0, null, null); - var builtRequest = getConverseRequest("testmodel", request); - assertThat(builtRequest.modelId(), is("testmodel")); - assertThat(doesConverseRequestHasMessage(builtRequest, "test message"), is(true)); - assertFalse(doesConverseRequestHaveAnyTemperatureInput(builtRequest)); - assertTrue(doesConverseRequestHaveTopPInput(builtRequest, 1.0)); - assertFalse(doesConverseRequestHaveAnyTopKInput(builtRequest)); - assertFalse(doesConverseRequestHaveAnyMaxTokensInput(builtRequest)); - } - - public void testRequestEntity_CreatesProperRequest_WithMaxTokens() { - var request = new AmazonBedrockCohereCompletionRequestEntity(List.of("test message"), null, null, null, 128); - var builtRequest = getConverseRequest("testmodel", request); - assertThat(builtRequest.modelId(), is("testmodel")); - assertThat(doesConverseRequestHasMessage(builtRequest, "test message"), is(true)); - assertFalse(doesConverseRequestHaveAnyTemperatureInput(builtRequest)); - assertFalse(doesConverseRequestHaveAnyTopPInput(builtRequest)); - assertFalse(doesConverseRequestHaveAnyTopKInput(builtRequest)); - assertTrue(doesConverseRequestHaveMaxTokensInput(builtRequest, 128)); - } - - public void testRequestEntity_CreatesProperRequest_WithTopK() { - var request = new AmazonBedrockCohereCompletionRequestEntity(List.of("test message"), null, null, 1.0, null); - var builtRequest = getConverseRequest("testmodel", request); - assertThat(builtRequest.modelId(), is("testmodel")); - assertThat(doesConverseRequestHasMessage(builtRequest, "test message"), is(true)); - assertFalse(doesConverseRequestHaveAnyTemperatureInput(builtRequest)); - assertFalse(doesConverseRequestHaveAnyTopPInput(builtRequest)); - assertTrue(doesConverseRequestHaveTopKInput(builtRequest, 1.0)); - assertFalse(doesConverseRequestHaveAnyMaxTokensInput(builtRequest)); - } -} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockConverseRequestUtils.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockConverseRequestUtils.java index 17c3b4488bae4..0e7acd3337e0f 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockConverseRequestUtils.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockConverseRequestUtils.java @@ -15,12 +15,16 @@ import java.util.Collection; +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseUtils.getConverseMessageList; +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseUtils.inferenceConfig; + public final class AmazonBedrockConverseRequestUtils { public static ConverseRequest getConverseRequest(String modelId, AmazonBedrockConverseRequestEntity requestEntity) { - var converseRequest = ConverseRequest.builder().modelId(modelId); - converseRequest = requestEntity.addMessages(converseRequest); - converseRequest = requestEntity.addInferenceConfig(converseRequest); - converseRequest = requestEntity.addAdditionalModelFields(converseRequest); + var converseRequest = ConverseRequest.builder() + .modelId(modelId) + .messages(getConverseMessageList(requestEntity.messages())) + .additionalModelResponseFieldPaths(requestEntity.additionalModelFields()); + inferenceConfig(requestEntity).ifPresent(converseRequest::inferenceConfig); return converseRequest.build(); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockMetaCompletionRequestEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockMetaCompletionRequestEntityTests.java deleted file mode 100644 index 25700f7c7aee1..0000000000000 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockMetaCompletionRequestEntityTests.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion; - -import org.elasticsearch.test.ESTestCase; - -import java.util.List; - -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHasMessage; -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveAnyMaxTokensInput; -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveAnyTemperatureInput; -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveAnyTopKInput; -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveAnyTopPInput; -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveMaxTokensInput; -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveTemperatureInput; -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveTopPInput; -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.getConverseRequest; -import static org.hamcrest.Matchers.is; - -public class AmazonBedrockMetaCompletionRequestEntityTests extends ESTestCase { - public void testRequestEntity_CreatesProperRequest() { - var request = new AmazonBedrockMetaCompletionRequestEntity(List.of("test message"), null, null, null); - var builtRequest = getConverseRequest("testmodel", request); - assertThat(builtRequest.modelId(), is("testmodel")); - assertThat(doesConverseRequestHasMessage(builtRequest, "test message"), is(true)); - assertThat(builtRequest.modelId(), is("testmodel")); - assertFalse(doesConverseRequestHaveAnyTemperatureInput(builtRequest)); - assertFalse(doesConverseRequestHaveAnyTopPInput(builtRequest)); - assertFalse(doesConverseRequestHaveAnyTopKInput(builtRequest)); - assertFalse(doesConverseRequestHaveAnyMaxTokensInput(builtRequest)); - } - - public void testRequestEntity_CreatesProperRequest_WithTemperature() { - var request = new AmazonBedrockMetaCompletionRequestEntity(List.of("test message"), 1.0, null, null); - var builtRequest = getConverseRequest("testmodel", request); - assertThat(builtRequest.modelId(), is("testmodel")); - assertThat(doesConverseRequestHasMessage(builtRequest, "test message"), is(true)); - assertTrue(doesConverseRequestHaveTemperatureInput(builtRequest, 1.0)); - assertFalse(doesConverseRequestHaveAnyTopPInput(builtRequest)); - assertFalse(doesConverseRequestHaveAnyTopKInput(builtRequest)); - assertFalse(doesConverseRequestHaveAnyMaxTokensInput(builtRequest)); - } - - public void testRequestEntity_CreatesProperRequest_WithTopP() { - var request = new AmazonBedrockMetaCompletionRequestEntity(List.of("test message"), null, 1.0, null); - var builtRequest = getConverseRequest("testmodel", request); - assertThat(builtRequest.modelId(), is("testmodel")); - assertThat(doesConverseRequestHasMessage(builtRequest, "test message"), is(true)); - assertFalse(doesConverseRequestHaveAnyTemperatureInput(builtRequest)); - assertTrue(doesConverseRequestHaveTopPInput(builtRequest, 1.0)); - assertFalse(doesConverseRequestHaveAnyTopKInput(builtRequest)); - assertFalse(doesConverseRequestHaveAnyMaxTokensInput(builtRequest)); - } - - public void testRequestEntity_CreatesProperRequest_WithMaxTokens() { - var request = new AmazonBedrockMetaCompletionRequestEntity(List.of("test message"), null, null, 128); - var builtRequest = getConverseRequest("testmodel", request); - assertThat(builtRequest.modelId(), is("testmodel")); - assertThat(doesConverseRequestHasMessage(builtRequest, "test message"), is(true)); - assertFalse(doesConverseRequestHaveAnyTemperatureInput(builtRequest)); - assertFalse(doesConverseRequestHaveAnyTopPInput(builtRequest)); - assertFalse(doesConverseRequestHaveAnyTopKInput(builtRequest)); - assertTrue(doesConverseRequestHaveMaxTokensInput(builtRequest, 128)); - } -} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockMistralCompletionRequestEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockMistralCompletionRequestEntityTests.java deleted file mode 100644 index 8e321b0cb33a7..0000000000000 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockMistralCompletionRequestEntityTests.java +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion; - -import org.elasticsearch.test.ESTestCase; - -import java.util.List; - -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHasMessage; -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveAnyMaxTokensInput; -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveAnyTemperatureInput; -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveAnyTopKInput; -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveAnyTopPInput; -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveMaxTokensInput; -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveTemperatureInput; -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveTopKInput; -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveTopPInput; -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.getConverseRequest; -import static org.hamcrest.Matchers.is; - -public class AmazonBedrockMistralCompletionRequestEntityTests extends ESTestCase { - public void testRequestEntity_CreatesProperRequest() { - var request = new AmazonBedrockMistralCompletionRequestEntity(List.of("test message"), null, null, null, null); - var builtRequest = getConverseRequest("testmodel", request); - assertThat(builtRequest.modelId(), is("testmodel")); - assertThat(doesConverseRequestHasMessage(builtRequest, "test message"), is(true)); - assertThat(builtRequest.modelId(), is("testmodel")); - assertFalse(doesConverseRequestHaveAnyTemperatureInput(builtRequest)); - assertFalse(doesConverseRequestHaveAnyTopPInput(builtRequest)); - assertFalse(doesConverseRequestHaveAnyTopKInput(builtRequest)); - assertFalse(doesConverseRequestHaveAnyMaxTokensInput(builtRequest)); - } - - public void testRequestEntity_CreatesProperRequest_WithTemperature() { - var request = new AmazonBedrockMistralCompletionRequestEntity(List.of("test message"), 1.0, null, null, null); - var builtRequest = getConverseRequest("testmodel", request); - assertThat(builtRequest.modelId(), is("testmodel")); - assertThat(doesConverseRequestHasMessage(builtRequest, "test message"), is(true)); - assertTrue(doesConverseRequestHaveTemperatureInput(builtRequest, 1.0)); - assertFalse(doesConverseRequestHaveAnyTopPInput(builtRequest)); - assertFalse(doesConverseRequestHaveAnyTopKInput(builtRequest)); - assertFalse(doesConverseRequestHaveAnyMaxTokensInput(builtRequest)); - } - - public void testRequestEntity_CreatesProperRequest_WithTopP() { - var request = new AmazonBedrockMistralCompletionRequestEntity(List.of("test message"), null, 1.0, null, null); - var builtRequest = getConverseRequest("testmodel", request); - assertThat(builtRequest.modelId(), is("testmodel")); - assertThat(doesConverseRequestHasMessage(builtRequest, "test message"), is(true)); - assertFalse(doesConverseRequestHaveAnyTemperatureInput(builtRequest)); - assertTrue(doesConverseRequestHaveTopPInput(builtRequest, 1.0)); - assertFalse(doesConverseRequestHaveAnyTopKInput(builtRequest)); - assertFalse(doesConverseRequestHaveAnyMaxTokensInput(builtRequest)); - } - - public void testRequestEntity_CreatesProperRequest_WithMaxTokens() { - var request = new AmazonBedrockMistralCompletionRequestEntity(List.of("test message"), null, null, null, 128); - var builtRequest = getConverseRequest("testmodel", request); - assertThat(builtRequest.modelId(), is("testmodel")); - assertThat(doesConverseRequestHasMessage(builtRequest, "test message"), is(true)); - assertFalse(doesConverseRequestHaveAnyTemperatureInput(builtRequest)); - assertFalse(doesConverseRequestHaveAnyTopPInput(builtRequest)); - assertFalse(doesConverseRequestHaveAnyTopKInput(builtRequest)); - assertTrue(doesConverseRequestHaveMaxTokensInput(builtRequest, 128)); - } - - public void testRequestEntity_CreatesProperRequest_WithTopK() { - var request = new AmazonBedrockMistralCompletionRequestEntity(List.of("test message"), null, null, 1.0, null); - var builtRequest = getConverseRequest("testmodel", request); - assertThat(builtRequest.modelId(), is("testmodel")); - assertThat(doesConverseRequestHasMessage(builtRequest, "test message"), is(true)); - assertFalse(doesConverseRequestHaveAnyTemperatureInput(builtRequest)); - assertFalse(doesConverseRequestHaveAnyTopPInput(builtRequest)); - assertTrue(doesConverseRequestHaveTopKInput(builtRequest, 1.0)); - assertFalse(doesConverseRequestHaveAnyMaxTokensInput(builtRequest)); - } -} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockTitanCompletionRequestEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockTitanCompletionRequestEntityTests.java deleted file mode 100644 index 8d1c15499bfb6..0000000000000 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockTitanCompletionRequestEntityTests.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion; - -import org.elasticsearch.test.ESTestCase; - -import java.util.List; - -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHasMessage; -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveAnyMaxTokensInput; -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveAnyTemperatureInput; -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveAnyTopKInput; -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveAnyTopPInput; -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveMaxTokensInput; -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveTemperatureInput; -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveTopPInput; -import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.getConverseRequest; -import static org.hamcrest.Matchers.is; - -public class AmazonBedrockTitanCompletionRequestEntityTests extends ESTestCase { - public void testRequestEntity_CreatesProperRequest() { - var request = new AmazonBedrockTitanCompletionRequestEntity(List.of("test message"), null, null, null); - var builtRequest = getConverseRequest("testmodel", request); - assertThat(builtRequest.modelId(), is("testmodel")); - assertThat(doesConverseRequestHasMessage(builtRequest, "test message"), is(true)); - assertThat(builtRequest.modelId(), is("testmodel")); - assertFalse(doesConverseRequestHaveAnyTemperatureInput(builtRequest)); - assertFalse(doesConverseRequestHaveAnyTopPInput(builtRequest)); - assertFalse(doesConverseRequestHaveAnyTopKInput(builtRequest)); - assertFalse(doesConverseRequestHaveAnyMaxTokensInput(builtRequest)); - } - - public void testRequestEntity_CreatesProperRequest_WithTemperature() { - var request = new AmazonBedrockTitanCompletionRequestEntity(List.of("test message"), 1.0, null, null); - var builtRequest = getConverseRequest("testmodel", request); - assertThat(builtRequest.modelId(), is("testmodel")); - assertThat(doesConverseRequestHasMessage(builtRequest, "test message"), is(true)); - assertTrue(doesConverseRequestHaveTemperatureInput(builtRequest, 1.0)); - assertFalse(doesConverseRequestHaveAnyTopPInput(builtRequest)); - assertFalse(doesConverseRequestHaveAnyTopKInput(builtRequest)); - assertFalse(doesConverseRequestHaveAnyMaxTokensInput(builtRequest)); - } - - public void testRequestEntity_CreatesProperRequest_WithTopP() { - var request = new AmazonBedrockTitanCompletionRequestEntity(List.of("test message"), null, 1.0, null); - var builtRequest = getConverseRequest("testmodel", request); - assertThat(builtRequest.modelId(), is("testmodel")); - assertThat(doesConverseRequestHasMessage(builtRequest, "test message"), is(true)); - assertFalse(doesConverseRequestHaveAnyTemperatureInput(builtRequest)); - assertTrue(doesConverseRequestHaveTopPInput(builtRequest, 1.0)); - assertFalse(doesConverseRequestHaveAnyTopKInput(builtRequest)); - assertFalse(doesConverseRequestHaveAnyMaxTokensInput(builtRequest)); - } - - public void testRequestEntity_CreatesProperRequest_WithMaxTokens() { - var request = new AmazonBedrockTitanCompletionRequestEntity(List.of("test message"), null, null, 128); - var builtRequest = getConverseRequest("testmodel", request); - assertThat(builtRequest.modelId(), is("testmodel")); - assertThat(doesConverseRequestHasMessage(builtRequest, "test message"), is(true)); - assertFalse(doesConverseRequestHaveAnyTemperatureInput(builtRequest)); - assertFalse(doesConverseRequestHaveAnyTopPInput(builtRequest)); - assertFalse(doesConverseRequestHaveAnyTopKInput(builtRequest)); - assertTrue(doesConverseRequestHaveMaxTokensInput(builtRequest, 128)); - } -} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockServiceTests.java index 9c746e7c2aed9..06c5a68987a9e 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockServiceTests.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.inference.services.amazonbedrock; +import software.amazon.awssdk.services.bedrockruntime.model.BedrockRuntimeException; + import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; @@ -33,7 +35,6 @@ import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import org.elasticsearch.xpack.inference.Utils; import org.elasticsearch.xpack.inference.external.amazonbedrock.AmazonBedrockMockRequestSender; -import org.elasticsearch.xpack.inference.external.amazonbedrock.AmazonBedrockRequestSender; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; import org.elasticsearch.xpack.inference.external.http.sender.Sender; import org.elasticsearch.xpack.inference.services.ServiceComponentsTests; @@ -1265,12 +1266,19 @@ public void testInfer_UnauthorizedResponse() throws IOException { var factory = mock(HttpRequestSender.Factory.class); when(factory.createSender()).thenReturn(sender); - var amazonBedrockFactory = new AmazonBedrockRequestSender.Factory( + var amazonBedrockFactory = new AmazonBedrockMockRequestSender.Factory( ServiceComponentsTests.createWithSettings(threadPool, Settings.EMPTY), mockClusterServiceEmpty() ); - try (var service = new AmazonBedrockService(factory, amazonBedrockFactory, createWithEmptySettings(threadPool))) { + try ( + var service = new AmazonBedrockService(factory, amazonBedrockFactory, createWithEmptySettings(threadPool)); + var requestSender = (AmazonBedrockMockRequestSender) amazonBedrockFactory.createSender() + ) { + requestSender.enqueue( + BedrockRuntimeException.builder().message("The security token included in the request is invalid").build() + ); + var model = AmazonBedrockEmbeddingsModelTests.createModel( "id", "us-east-1", From 6c752abc231598f852ff47b4cde79bd97b9a1f5f Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Mon, 14 Oct 2024 20:13:27 -0400 Subject: [PATCH 083/449] Adding new bbq index types behind a feature flag (#114439) new index types of bbq_hnsw and bbq_flat which utilize the better binary quantization formats. A 32x reduction in memory, with nice recall properties. --- docs/changelog/114439.yaml | 5 + .../mapping/types/dense-vector.asciidoc | 41 ++++- .../search-your-data/knn-search.asciidoc | 92 ++++++++++ .../search.vectors/41_knn_search_bbq_hnsw.yml | 160 +++++++++++++++++ .../search.vectors/42_knn_search_bbq_flat.yml | 165 ++++++++++++++++++ server/src/main/java/module-info.java | 7 +- .../index/codec/vectors/BQVectorUtils.java | 8 + .../vectors/ES816BinaryFlatVectorsScorer.java | 49 ++---- .../vectors/OffHeapBinarizedVectorValues.java | 22 +++ ...RandomAccessBinarizedByteVectorValues.java | 14 ++ .../index/mapper/MapperFeatures.java | 8 +- .../vectors/DenseVectorFieldMapper.java | 160 ++++++++++++++++- .../index/store/LuceneFilesExtensions.java | 4 +- .../ES816BinaryFlatVectorsScorerTests.java | 2 +- .../vectors/DenseVectorFieldMapperTests.java | 66 ++++++- .../vectors/DenseVectorFieldTypeTests.java | 23 ++- 16 files changed, 767 insertions(+), 59 deletions(-) create mode 100644 docs/changelog/114439.yaml create mode 100644 rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/41_knn_search_bbq_hnsw.yml create mode 100644 rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/42_knn_search_bbq_flat.yml diff --git a/docs/changelog/114439.yaml b/docs/changelog/114439.yaml new file mode 100644 index 0000000000000..fd097d02f885f --- /dev/null +++ b/docs/changelog/114439.yaml @@ -0,0 +1,5 @@ +pr: 114439 +summary: Adding new bbq index types behind a feature flag +area: Vector Search +type: feature +issues: [] diff --git a/docs/reference/mapping/types/dense-vector.asciidoc b/docs/reference/mapping/types/dense-vector.asciidoc index 0cd9ee0578b70..44f90eded8632 100644 --- a/docs/reference/mapping/types/dense-vector.asciidoc +++ b/docs/reference/mapping/types/dense-vector.asciidoc @@ -115,22 +115,27 @@ that sacrifices result accuracy for improved speed. ==== Automatically quantize vectors for kNN search The `dense_vector` type supports quantization to reduce the memory footprint required when <> `float` vectors. -The two following quantization strategies are supported: +The three following quantization strategies are supported: + -- -`int8` - Quantizes each dimension of the vector to 1-byte integers. This can reduce the memory footprint by 75% at the cost of some accuracy. -`int4` - Quantizes each dimension of the vector to half-byte integers. This can reduce the memory footprint by 87% at the cost of some accuracy. +`int8` - Quantizes each dimension of the vector to 1-byte integers. This reduces the memory footprint by 75% (or 4x) at the cost of some accuracy. +`int4` - Quantizes each dimension of the vector to half-byte integers. This reduces the memory footprint by 87% (or 8x) at the cost of accuracy. +`bbq` - experimental:[] Better binary quantization which reduces each dimension to a single bit precision. This reduces the memory footprint by 96% (or 32x) at a larger cost of accuracy. Generally, oversampling during query time and reranking can help mitigate the accuracy loss. -- -To use a quantized index, you can set your index type to `int8_hnsw` or `int4_hnsw`. When indexing `float` vectors, the current default +When using a quantized format, you may want to oversample and rescore the results to improve accuracy. See <> for more information. + +To use a quantized index, you can set your index type to `int8_hnsw`, `int4_hnsw`, or `bbq_hnsw`. When indexing `float` vectors, the current default index type is `int8_hnsw`. NOTE: Quantization will continue to keep the raw float vector values on disk for reranking, reindexing, and quantization improvements over the lifetime of the data. -This means disk usage will increase by ~25% for `int8` and ~12.5% for `int4` due to the overhead of storing the quantized and raw vectors. +This means disk usage will increase by ~25% for `int8`, ~12.5% for `int4`, and ~3.1% for `bbq` due to the overhead of storing the quantized and raw vectors. NOTE: `int4` quantization requires an even number of vector dimensions. +NOTE: experimental:[] `bbq` quantization only supports vector dimensions that are greater than 64. + Here is an example of how to create a byte-quantized index: [source,console] @@ -173,6 +178,27 @@ PUT my-byte-quantized-index } -------------------------------------------------- +experimental:[] Here is an example of how to create a binary quantized index: + +[source,console] +-------------------------------------------------- +PUT my-byte-quantized-index +{ + "mappings": { + "properties": { + "my_vector": { + "type": "dense_vector", + "dims": 64, + "index": true, + "index_options": { + "type": "bbq_hnsw" + } + } + } + } +} +-------------------------------------------------- + [role="child_attributes"] [[dense-vector-params]] ==== Parameters for dense vector fields @@ -301,11 +327,16 @@ by 4x at the cost of some accuracy. See <>. +* experimental:[] `bbq_hnsw` - This utilizes the https://arxiv.org/abs/1603.09320[HNSW algorithm] in addition to automatically binary +quantization for scalable approximate kNN search with `element_type` of `float`. This can reduce the memory footprint +by 32x at the cost of accuracy. See <>. * `flat` - This utilizes a brute-force search algorithm for exact kNN search. This supports all `element_type` values. * `int8_flat` - This utilizes a brute-force search algorithm in addition to automatically scalar quantization. Only supports `element_type` of `float`. * `int4_flat` - This utilizes a brute-force search algorithm in addition to automatically half-byte scalar quantization. Only supports `element_type` of `float`. +* experimental:[] `bbq_flat` - This utilizes a brute-force search algorithm in addition to automatically binary quantization. Only supports +`element_type` of `float`. -- `m`::: (Optional, integer) diff --git a/docs/reference/search/search-your-data/knn-search.asciidoc b/docs/reference/search/search-your-data/knn-search.asciidoc index 70cf9eec121d7..6fb7f1747051f 100644 --- a/docs/reference/search/search-your-data/knn-search.asciidoc +++ b/docs/reference/search/search-your-data/knn-search.asciidoc @@ -1149,3 +1149,95 @@ POST product-index/_search ---- //TEST[continued] +[discrete] +[[dense-vector-knn-search-reranking]] +==== Oversampling and rescoring for quantized vectors + +All forms of quantization will result in some accuracy loss and as the quantization level increases the accuracy loss will also increase. +Generally, we have found that: +- `int8` requires minimal if any rescoring +- `int4` requires some rescoring for higher accuracy and larger recall scenarios. Generally, oversampling by 1.5x-2x recovers most of the accuracy loss. +- `bbq` requires rescoring except on exceptionally large indices or models specifically designed for quantization. We have found that between 3x-5x oversampling is generally sufficient. But for fewer dimensions or vectors that do not quantize well, higher oversampling may be required. + +There are two main ways to oversample and rescore. The first is to utilize the <> in the `_search` request. + +Here is an example using the top level `knn` search with oversampling and using `rescore` to rerank the results: + +[source,console] +-------------------------------------------------- +POST /my-index/_search +{ + "size": 10, <1> + "knn": { + "query_vector": [0.04283529, 0.85670587, -0.51402352, 0], + "field": "my_int4_vector", + "k": 20, <2> + "num_candidates": 50 + }, + "rescore": { + "window_size": 20, <3> + "query": { + "rescore_query": { + "script_score": { + "query": { + "match_all": {} + }, + "script": { + "source": "(dotProduct(params.queryVector, 'my_int4_vector') + 1.0)", <4> + "params": { + "queryVector": [0.04283529, 0.85670587, -0.51402352, 0] + } + } + } + }, + "query_weight": 0, <5> + "rescore_query_weight": 1 <6> + } + } +} +-------------------------------------------------- +// TEST[skip: setup not provided] +<1> The number of results to return, note its only 10 and we will oversample by 2x, gathering 20 nearest neighbors. +<2> The number of results to return from the KNN search. This will do an approximate KNN search with 50 candidates +per HNSW graph and use the quantized vectors, returning the 20 most similar vectors +according to the quantized score. Additionally, since this is the top-level `knn` object, the global top 20 results +will from all shards will be gathered before rescoring. Combining with `rescore`, this is oversampling by `2x`, meaning +gathering 20 nearest neighbors according to quantized scoring and rescoring with higher fidelity float vectors. +<3> The number of results to rescore, if you want to rescore all results, set this to the same value as `k` +<4> The script to rescore the results. Script score will interact directly with the originally provided float32 vector. +<5> The weight of the original query, here we simply throw away the original score +<6> The weight of the rescore query, here we only use the rescore query + +The second way is to score per shard with the <> and <>. Generally, this means that there will be more rescoring per shard, but this +can increase overall recall at the cost of compute. + +[source,console] +-------------------------------------------------- +POST /my-index/_search +{ + "size": 10, <1> + "query": { + "script_score": { + "query": { + "knn": { <2> + "query_vector": [0.04283529, 0.85670587, -0.51402352, 0], + "field": "my_int4_vector", + "num_candidates": 20 <3> + } + }, + "script": { + "source": "(dotProduct(params.queryVector, 'my_int4_vector') + 1.0)", <4> + "params": { + "queryVector": [0.04283529, 0.85670587, -0.51402352, 0] + } + } + } + } +} +-------------------------------------------------- +// TEST[skip: setup not provided] +<1> The number of results to return +<2> The `knn` query to perform the initial search, this is executed per-shard +<3> The number of candidates to use for the initial approximate `knn` search. This will search using the quantized vectors +and return the top 20 candidates per shard to then be scored +<4> The script to score the results. Script score will interact directly with the originally provided float32 vector. diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/41_knn_search_bbq_hnsw.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/41_knn_search_bbq_hnsw.yml new file mode 100644 index 0000000000000..188c155e4a836 --- /dev/null +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/41_knn_search_bbq_hnsw.yml @@ -0,0 +1,160 @@ +setup: + - requires: + cluster_features: "mapper.vectors.bbq" + reason: 'kNN float to better-binary quantization is required' + - do: + indices.create: + index: bbq_hnsw + body: + settings: + index: + number_of_shards: 1 + mappings: + properties: + name: + type: keyword + vector: + type: dense_vector + dims: 64 + index: true + similarity: l2_norm + index_options: + type: bbq_hnsw + another_vector: + type: dense_vector + dims: 64 + index: true + similarity: l2_norm + index_options: + type: bbq_hnsw + + - do: + index: + index: bbq_hnsw + id: "1" + body: + name: cow.jpg + vector: [300.33, -34.8988, 15.555, -200.0, 230.0, 300.33, -34.8988, 15.555, -200.0, 230.0, 300.33, -34.8988, 15.555, -200.0, 230.0, 300.33, -34.8988, 15.555, -200.0, 230.0, 300.33, -34.8988, 15.555, -200.0, 230.0, 300.33, -34.8988, 15.555, -200.0, 230.0, 300.33, -34.8988, 15.555, -200.0, 230.0, 300.33, -34.8988, 15.555, -200.0, 230.0, 300.33, -34.8988, 15.555, -200.0, 230.0, 300.33, -34.8988, 15.555, -200.0, 230.0, 300.33, -34.8988, 15.555, -200.0, 230.0, 300.33, -34.8988, 15.555, -200.0, 230.0, 300.33, -34.8988, 15.555, -200.0] + another_vector: [115.0, -1.02, 15.555, -100.0, 130.0, 115.0, -1.02, 15.555, -100.0, 130.0, 115.0, -1.02, 15.555, -100.0, 130.0, 115.0, -1.02, 15.555, -100.0, 130.0, 115.0, -1.02, 15.555, -100.0, 130.0, 115.0, -1.02, 15.555, -100.0, 130.0, 115.0, -1.02, 15.555, -100.0, 130.0, 115.0, -1.02, 15.555, -100.0, 130.0, 115.0, -1.02, 15.555, -100.0, 130.0, 115.0, -1.02, 15.555, -100.0, 130.0, 115.0, -1.02, 15.555, -100.0, 130.0, 115.0, -1.02, 15.555, -100.0, 130.0, 115.0, -1.02, 15.555, -100.0] + # Flush in order to provoke a merge later + - do: + indices.flush: + index: bbq_hnsw + + - do: + index: + index: bbq_hnsw + id: "2" + body: + name: moose.jpg + vector: [100.0, -13, 14.8, -156.0, -0.5, 100.0, -13, 14.8, -156.0, -0.5, 100.0, -13, 14.8, -156.0, -0.5, 100.0, -13, 14.8, -156.0, -0.5, 100.0, -13, 14.8, -156.0, -0.5, 100.0, -13, 14.8, -156.0, -0.5, 100.0, -13, 14.8, -156.0, -0.5, 100.0, -13, 14.8, -156.0, -0.5, 100.0, -13, 14.8, -156.0, -0.5, 100.0, -13, 14.8, -156.0, -0.5, 100.0, -13, 14.8, -156.0, -0.5, 100.0, -13, 14.8, -156.0, -0.5, 100.0, -13, 14.8, -156.0] + another_vector: [50.0, -1, 1, 120, -0.5, 50.0, -1, 1, 120, -0.5, 50.0, -1, 1, 120, -0.5, 50.0, -1, 1, 120, -0.5, 50.0, -1, 1, 120, -0.5, 50.0, -1, 1, 120, -0.5, 50.0, -1, 1, 120, -0.5, 50.0, -1, 1, 120, -0.5, 50.0, -1, 1, 120, -0.5, 50.0, -1, 1, 120, -0.5, 50.0, -1, 1, 120, -0.5, 50.0, -1, 1, 120, -0.5, 50.0, -1, 1, 120] + # Flush in order to provoke a merge later + - do: + indices.flush: + index: bbq_hnsw + + - do: + index: + index: bbq_hnsw + id: "3" + body: + name: rabbit.jpg + vector: [111.3, -13.0, 14.8, -156.0, 0.5, 111.3, -13.0, 14.8, -156.0, 0.5, 111.3, -13.0, 14.8, -156.0, 0.5, 111.3, -13.0, 14.8, -156.0, 0.5, 111.3, -13.0, 14.8, -156.0, 0.5, 111.3, -13.0, 14.8, -156.0, 0.5, 111.3, -13.0, 14.8, -156.0, 0.5, 111.3, -13.0, 14.8, -156.0, 0.5, 111.3, -13.0, 14.8, -156.0, 0.5, 111.3, -13.0, 14.8, -156.0, 0.5, 111.3, -13.0, 14.8, -156.0, 0.5, 111.3, -13.0, 14.8, -156.0, 0.5, 111.3, -13.0, 14.8, -156.0] + another_vector: [11.0, 0, 12, 111.0, -0.5, 11.0, 0, 12, 111.0, -0.5, 11.0, 0, 12, 111.0, -0.5, 11.0, 0, 12, 111.0, -0.5, 11.0, 0, 12, 111.0, -0.5, 11.0, 0, 12, 111.0, -0.5, 11.0, 0, 12, 111.0, -0.5, 11.0, 0, 12, 111.0, -0.5, 11.0, 0, 12, 111.0, -0.5, 11.0, 0, 12, 111.0, -0.5, 11.0, 0, 12, 111.0, -0.5, 11.0, 0, 12, 111.0, -0.5, 11.0, 0, 12, 111.0] + # Flush in order to provoke a merge later + - do: + indices.flush: + index: bbq_hnsw + + - do: + indices.forcemerge: + index: bbq_hnsw + max_num_segments: 1 +--- +"Test knn search": + - do: + search: + index: bbq_hnsw + body: + knn: + field: vector + query_vector: [ 90.0, -10, 14.8, -156.0, -0.5, 90.0, -10, 14.8, -156.0, -0.5, 90.0, -10, 14.8, -156.0, -0.5, 90.0, -10, 14.8, -156.0, -0.5, 90.0, -10, 14.8, -156.0, -0.5, 90.0, -10, 14.8, -156.0, -0.5, 90.0, -10, 14.8, -156.0, -0.5, 90.0, -10, 14.8, -156.0, -0.5, 90.0, -10, 14.8, -156.0, -0.5, 90.0, -10, 14.8, -156.0, -0.5, 90.0, -10, 14.8, -156.0, -0.5, 90.0, -10, 14.8, -156.0, -0.5, 90.0, -10, 14.8, -156.0] + k: 3 + num_candidates: 3 + + # Depending on how things are distributed, docs 2 and 3 might be swapped + # here we verify that are last hit is always the worst one + - match: { hits.hits.2._id: "1" } + +--- +"Test bad quantization parameters": + - do: + catch: bad_request + indices.create: + index: bad_bbq_hnsw + body: + mappings: + properties: + vector: + type: dense_vector + dims: 64 + element_type: byte + index: true + index_options: + type: bbq_hnsw + + - do: + catch: bad_request + indices.create: + index: bad_bbq_hnsw + body: + mappings: + properties: + vector: + type: dense_vector + dims: 64 + index: false + index_options: + type: bbq_hnsw +--- +"Test few dimensions fail indexing": + - do: + catch: bad_request + indices.create: + index: bad_bbq_hnsw + body: + mappings: + properties: + vector: + type: dense_vector + dims: 42 + index: true + index_options: + type: bbq_hnsw + + - do: + indices.create: + index: dynamic_dim_bbq_hnsw + body: + mappings: + properties: + vector: + type: dense_vector + index: true + similarity: l2_norm + index_options: + type: bbq_hnsw + + - do: + catch: bad_request + index: + index: dynamic_dim_bbq_hnsw + body: + vector: [1.0, 2.0, 3.0, 4.0, 5.0] + + - do: + index: + index: dynamic_dim_bbq_hnsw + body: + vector: [1.0, 2.0, 3.0, 4.0, 1.0, 2.0, 3.0, 4.0, 1.0, 2.0, 3.0, 4.0, 1.0, 2.0, 3.0, 4.0, 1.0, 2.0, 3.0, 4.0, 1.0, 2.0, 3.0, 4.0, 1.0, 2.0, 3.0, 4.0, 1.0, 2.0, 3.0, 4.0, 1.0, 2.0, 3.0, 4.0, 1.0, 2.0, 3.0, 4.0, 1.0, 2.0, 3.0, 4.0, 1.0, 2.0, 3.0, 4.0, 1.0, 2.0, 3.0, 4.0, 1.0, 2.0, 3.0, 4.0, 1.0, 2.0, 3.0, 4.0, 1.0, 2.0, 3.0, 4.0] diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/42_knn_search_bbq_flat.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/42_knn_search_bbq_flat.yml new file mode 100644 index 0000000000000..ed7a8dd5df65d --- /dev/null +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/42_knn_search_bbq_flat.yml @@ -0,0 +1,165 @@ +setup: + - requires: + cluster_features: "mapper.vectors.bbq" + reason: 'kNN float to better-binary quantization is required' + - do: + indices.create: + index: bbq_flat + body: + settings: + index: + number_of_shards: 1 + mappings: + properties: + name: + type: keyword + vector: + type: dense_vector + dims: 64 + index: true + similarity: l2_norm + index_options: + type: bbq_flat + another_vector: + type: dense_vector + dims: 64 + index: true + similarity: l2_norm + index_options: + type: bbq_flat + + - do: + index: + index: bbq_flat + id: "1" + body: + name: cow.jpg + vector: [300.33, -34.8988, 15.555, -200.0, 230.0, 300.33, -34.8988, 15.555, -200.0, 230.0, 300.33, -34.8988, 15.555, -200.0, 230.0, 300.33, -34.8988, 15.555, -200.0, 230.0, 300.33, -34.8988, 15.555, -200.0, 230.0, 300.33, -34.8988, 15.555, -200.0, 230.0, 300.33, -34.8988, 15.555, -200.0, 230.0, 300.33, -34.8988, 15.555, -200.0, 230.0, 300.33, -34.8988, 15.555, -200.0, 230.0, 300.33, -34.8988, 15.555, -200.0, 230.0, 300.33, -34.8988, 15.555, -200.0, 230.0, 300.33, -34.8988, 15.555, -200.0, 230.0, 300.33, -34.8988, 15.555, -200.0] + another_vector: [115.0, -1.02, 15.555, -100.0, 130.0, 115.0, -1.02, 15.555, -100.0, 130.0, 115.0, -1.02, 15.555, -100.0, 130.0, 115.0, -1.02, 15.555, -100.0, 130.0, 115.0, -1.02, 15.555, -100.0, 130.0, 115.0, -1.02, 15.555, -100.0, 130.0, 115.0, -1.02, 15.555, -100.0, 130.0, 115.0, -1.02, 15.555, -100.0, 130.0, 115.0, -1.02, 15.555, -100.0, 130.0, 115.0, -1.02, 15.555, -100.0, 130.0, 115.0, -1.02, 15.555, -100.0, 130.0, 115.0, -1.02, 15.555, -100.0, 130.0, 115.0, -1.02, 15.555, -100.0] + # Flush in order to provoke a merge later + - do: + indices.flush: + index: bbq_flat + + - do: + index: + index: bbq_flat + id: "2" + body: + name: moose.jpg + vector: [100.0, -13, 14.8, -156.0, -0.5, 100.0, -13, 14.8, -156.0, -0.5, 100.0, -13, 14.8, -156.0, -0.5, 100.0, -13, 14.8, -156.0, -0.5, 100.0, -13, 14.8, -156.0, -0.5, 100.0, -13, 14.8, -156.0, -0.5, 100.0, -13, 14.8, -156.0, -0.5, 100.0, -13, 14.8, -156.0, -0.5, 100.0, -13, 14.8, -156.0, -0.5, 100.0, -13, 14.8, -156.0, -0.5, 100.0, -13, 14.8, -156.0, -0.5, 100.0, -13, 14.8, -156.0, -0.5, 100.0, -13, 14.8, -156.0] + another_vector: [50.0, -1, 1, 120, -0.5, 50.0, -1, 1, 120, -0.5, 50.0, -1, 1, 120, -0.5, 50.0, -1, 1, 120, -0.5, 50.0, -1, 1, 120, -0.5, 50.0, -1, 1, 120, -0.5, 50.0, -1, 1, 120, -0.5, 50.0, -1, 1, 120, -0.5, 50.0, -1, 1, 120, -0.5, 50.0, -1, 1, 120, -0.5, 50.0, -1, 1, 120, -0.5, 50.0, -1, 1, 120, -0.5, 50.0, -1, 1, 120] + # Flush in order to provoke a merge later + - do: + indices.flush: + index: bbq_flat + + - do: + index: + index: bbq_flat + id: "3" + body: + name: rabbit.jpg + vector: [111.3, -13.0, 14.8, -156.0, 0.5, 111.3, -13.0, 14.8, -156.0, 0.5, 111.3, -13.0, 14.8, -156.0, 0.5, 111.3, -13.0, 14.8, -156.0, 0.5, 111.3, -13.0, 14.8, -156.0, 0.5, 111.3, -13.0, 14.8, -156.0, 0.5, 111.3, -13.0, 14.8, -156.0, 0.5, 111.3, -13.0, 14.8, -156.0, 0.5, 111.3, -13.0, 14.8, -156.0, 0.5, 111.3, -13.0, 14.8, -156.0, 0.5, 111.3, -13.0, 14.8, -156.0, 0.5, 111.3, -13.0, 14.8, -156.0, 0.5, 111.3, -13.0, 14.8, -156.0] + another_vector: [11.0, 0, 12, 111.0, -0.5, 11.0, 0, 12, 111.0, -0.5, 11.0, 0, 12, 111.0, -0.5, 11.0, 0, 12, 111.0, -0.5, 11.0, 0, 12, 111.0, -0.5, 11.0, 0, 12, 111.0, -0.5, 11.0, 0, 12, 111.0, -0.5, 11.0, 0, 12, 111.0, -0.5, 11.0, 0, 12, 111.0, -0.5, 11.0, 0, 12, 111.0, -0.5, 11.0, 0, 12, 111.0, -0.5, 11.0, 0, 12, 111.0, -0.5, 11.0, 0, 12, 111.0] + # Flush in order to provoke a merge later + - do: + indices.flush: + index: bbq_flat + + - do: + indices.forcemerge: + index: bbq_flat + max_num_segments: 1 +--- +"Test knn search": + - do: + search: + index: bbq_flat + body: + knn: + field: vector + query_vector: [ 90.0, -10, 14.8, -156.0, -0.5, 90.0, -10, 14.8, -156.0, -0.5, 90.0, -10, 14.8, -156.0, -0.5, 90.0, -10, 14.8, -156.0, -0.5, 90.0, -10, 14.8, -156.0, -0.5, 90.0, -10, 14.8, -156.0, -0.5, 90.0, -10, 14.8, -156.0, -0.5, 90.0, -10, 14.8, -156.0, -0.5, 90.0, -10, 14.8, -156.0, -0.5, 90.0, -10, 14.8, -156.0, -0.5, 90.0, -10, 14.8, -156.0, -0.5, 90.0, -10, 14.8, -156.0, -0.5, 90.0, -10, 14.8, -156.0] + k: 3 + num_candidates: 3 + + # Depending on how things are distributed, docs 2 and 3 might be swapped + # here we verify that are last hit is always the worst one + - match: { hits.hits.2._id: "1" } +--- +"Test bad parameters": + - do: + catch: bad_request + indices.create: + index: bad_bbq_flat + body: + mappings: + properties: + vector: + type: dense_vector + dims: 64 + index: true + index_options: + type: bbq_flat + m: 42 + + - do: + catch: bad_request + indices.create: + index: bad_bbq_flat + body: + mappings: + properties: + vector: + type: dense_vector + dims: 64 + element_type: byte + index: true + index_options: + type: bbq_flat +--- +"Test few dimensions fail indexing": + # verify index creation fails + - do: + catch: bad_request + indices.create: + index: bad_bbq_flat + body: + mappings: + properties: + vector: + type: dense_vector + dims: 42 + index: true + similarity: l2_norm + index_options: + type: bbq_flat + + # verify dynamic dimension fails + - do: + indices.create: + index: dynamic_dim_bbq_flat + body: + mappings: + properties: + vector: + type: dense_vector + index: true + similarity: l2_norm + index_options: + type: bbq_flat + + # verify index fails for odd dim vector + - do: + catch: bad_request + index: + index: dynamic_dim_bbq_flat + body: + vector: [1.0, 2.0, 3.0, 4.0, 5.0] + + # verify that we can index an even dim vector after the odd dim vector failure + - do: + index: + index: dynamic_dim_bbq_flat + body: + vector: [1.0, 2.0, 3.0, 4.0, 1.0, 2.0, 3.0, 4.0, 1.0, 2.0, 3.0, 4.0, 1.0, 2.0, 3.0, 4.0, 1.0, 2.0, 3.0, 4.0, 1.0, 2.0, 3.0, 4.0, 1.0, 2.0, 3.0, 4.0, 1.0, 2.0, 3.0, 4.0, 1.0, 2.0, 3.0, 4.0, 1.0, 2.0, 3.0, 4.0, 1.0, 2.0, 3.0, 4.0, 1.0, 2.0, 3.0, 4.0, 1.0, 2.0, 3.0, 4.0, 1.0, 2.0, 3.0, 4.0, 1.0, 2.0, 3.0, 4.0, 1.0, 2.0, 3.0, 4.0] diff --git a/server/src/main/java/module-info.java b/server/src/main/java/module-info.java index 70b748c86ec96..414a6c6ba66a6 100644 --- a/server/src/main/java/module-info.java +++ b/server/src/main/java/module-info.java @@ -7,7 +7,6 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -import org.elasticsearch.index.codec.tsdb.ES87TSDBDocValuesFormat; import org.elasticsearch.plugins.internal.RestExtension; /** The Elasticsearch Server Module. */ @@ -445,14 +444,16 @@ org.elasticsearch.index.codec.bloomfilter.ES85BloomFilterPostingsFormat, org.elasticsearch.index.codec.bloomfilter.ES87BloomFilterPostingsFormat, org.elasticsearch.index.codec.postings.ES812PostingsFormat; - provides org.apache.lucene.codecs.DocValuesFormat with ES87TSDBDocValuesFormat; + provides org.apache.lucene.codecs.DocValuesFormat with org.elasticsearch.index.codec.tsdb.ES87TSDBDocValuesFormat; provides org.apache.lucene.codecs.KnnVectorsFormat with org.elasticsearch.index.codec.vectors.ES813FlatVectorFormat, org.elasticsearch.index.codec.vectors.ES813Int8FlatVectorFormat, org.elasticsearch.index.codec.vectors.ES814HnswScalarQuantizedVectorsFormat, org.elasticsearch.index.codec.vectors.ES815HnswBitVectorsFormat, - org.elasticsearch.index.codec.vectors.ES815BitFlatVectorFormat; + org.elasticsearch.index.codec.vectors.ES815BitFlatVectorFormat, + org.elasticsearch.index.codec.vectors.ES816BinaryQuantizedVectorsFormat, + org.elasticsearch.index.codec.vectors.ES816HnswBinaryQuantizedVectorsFormat; provides org.apache.lucene.codecs.Codec with diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/BQVectorUtils.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/BQVectorUtils.java index 3d2acb533e26d..5201e57179cc7 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/BQVectorUtils.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/BQVectorUtils.java @@ -27,6 +27,14 @@ public class BQVectorUtils { private static final float EPSILON = 1e-4f; + public static double sqrtNewtonRaphson(double x, double curr, double prev) { + return (curr == prev) ? curr : sqrtNewtonRaphson(x, 0.5 * (curr + x / curr), curr); + } + + public static double constSqrt(double x) { + return x >= 0 && Double.isInfinite(x) == false ? sqrtNewtonRaphson(x, x, 0) : Double.NaN; + } + public static boolean isUnitVector(float[] v) { double l1norm = VectorUtil.dotProduct(v, v); return Math.abs(l1norm - 1.0d) <= EPSILON; diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryFlatVectorsScorer.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryFlatVectorsScorer.java index 78fa282709098..f4d22edc6dfdb 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryFlatVectorsScorer.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryFlatVectorsScorer.java @@ -153,6 +153,7 @@ public static class BinarizedRandomVectorScorer extends RandomVectorScorer.Abstr private final VectorSimilarityFunction similarityFunction; private final float sqrtDimensions; + private final float maxX1; public BinarizedRandomVectorScorer( BinaryQueryVector queryVectors, @@ -164,24 +165,12 @@ public BinarizedRandomVectorScorer( this.targetVectors = targetVectors; this.similarityFunction = similarityFunction; // FIXME: precompute this once? - this.sqrtDimensions = (float) Utils.constSqrt(targetVectors.dimension()); - } - - // FIXME: utils class; pull this out - private static class Utils { - public static double sqrtNewtonRaphson(double x, double curr, double prev) { - return (curr == prev) ? curr : sqrtNewtonRaphson(x, 0.5 * (curr + x / curr), curr); - } - - public static double constSqrt(double x) { - return x >= 0 && Double.isInfinite(x) == false ? sqrtNewtonRaphson(x, x, 0) : Double.NaN; - } + this.sqrtDimensions = targetVectors.sqrtDimensions(); + this.maxX1 = targetVectors.maxX1(); } @Override public float score(int targetOrd) throws IOException { - // FIXME: implement fastscan in the future? - byte[] quantizedQuery = queryVector.vector(); int quantizedSum = queryVector.factors().quantizedSum(); float lower = queryVector.factors().lower(); @@ -218,17 +207,13 @@ public float score(int targetOrd) throws IOException { } assert Float.isFinite(dist); - // TODO: this is useful for mandatory rescoring by accounting for bias - // However, for just oversampling & rescoring, it isn't strictly useful. - // We should consider utilizing this bias in the future to determine which vectors need to - // be rescored - // float ooqSqr = (float) Math.pow(ooq, 2); - // float errorBound = (float) (normVmC * normOC * (maxX1 * Math.sqrt((1 - ooqSqr) / ooqSqr))); - // float score = dist - errorBound; + float ooqSqr = (float) Math.pow(ooq, 2); + float errorBound = (float) (vmC * normOC * (maxX1 * Math.sqrt((1 - ooqSqr) / ooqSqr))); + float score = Float.isFinite(errorBound) ? dist - errorBound : dist; if (similarityFunction == MAXIMUM_INNER_PRODUCT) { - return VectorUtil.scaleMaxInnerProductScore(dist); + return VectorUtil.scaleMaxInnerProductScore(score); } - return Math.max((1f + dist) / 2f, 0); + return Math.max((1f + score) / 2f, 0); } private float euclideanScore( @@ -256,17 +241,13 @@ private float euclideanScore( long qcDist = ESVectorUtil.ipByteBinByte(quantizedQuery, binaryCode); float score = sqrX + distanceToCentroid + factorPPC * lower + (qcDist * 2 - quantizedSum) * factorIP * width; - // TODO: this is useful for mandatory rescoring by accounting for bias - // However, for just oversampling & rescoring, it isn't strictly useful. - // We should consider utilizing this bias in the future to determine which vectors need to - // be rescored - // float projectionDist = (float) Math.sqrt(xX0 * xX0 - targetDistToC * targetDistToC); - // float error = 2.0f * maxX1 * projectionDist; - // float y = (float) Math.sqrt(distanceToCentroid); - // float errorBound = y * error; - // if (Float.isFinite(errorBound)) { - // score = dist + errorBound; - // } + float projectionDist = (float) Math.sqrt(xX0 * xX0 - targetDistToC * targetDistToC); + float error = 2.0f * maxX1 * projectionDist; + float y = (float) Math.sqrt(distanceToCentroid); + float errorBound = y * error; + if (Float.isFinite(errorBound)) { + score = score + errorBound; + } return Math.max(1 / (1f + score), 0); } } diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/OffHeapBinarizedVectorValues.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/OffHeapBinarizedVectorValues.java index 2a3c3aca60e54..628480e273b34 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/OffHeapBinarizedVectorValues.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/OffHeapBinarizedVectorValues.java @@ -34,6 +34,7 @@ import java.nio.ByteBuffer; import static org.apache.lucene.index.VectorSimilarityFunction.EUCLIDEAN; +import static org.elasticsearch.index.codec.vectors.BQVectorUtils.constSqrt; /** Binarized vector values loaded from off-heap */ public abstract class OffHeapBinarizedVectorValues extends BinarizedByteVectorValues implements RandomAccessBinarizedByteVectorValues { @@ -53,6 +54,9 @@ public abstract class OffHeapBinarizedVectorValues extends BinarizedByteVectorVa protected final BinaryQuantizer binaryQuantizer; protected final float[] centroid; protected final float centroidDp; + private final int discretizedDimensions; + private final float maxX1; + private final float sqrtDimensions; private final int correctionsCount; OffHeapBinarizedVectorValues( @@ -79,6 +83,9 @@ public abstract class OffHeapBinarizedVectorValues extends BinarizedByteVectorVa this.byteBuffer = ByteBuffer.allocate(numBytes); this.binaryValue = byteBuffer.array(); this.binaryQuantizer = quantizer; + this.discretizedDimensions = BQVectorUtils.discretize(dimension, 64); + this.sqrtDimensions = (float) constSqrt(dimension); + this.maxX1 = (float) (1.9 / constSqrt(discretizedDimensions - 1.0)); } @Override @@ -103,6 +110,21 @@ public byte[] vectorValue(int targetOrd) throws IOException { return binaryValue; } + @Override + public int discretizedDimensions() { + return discretizedDimensions; + } + + @Override + public float sqrtDimensions() { + return sqrtDimensions; + } + + @Override + public float maxX1() { + return maxX1; + } + @Override public float getCentroidDP() { return centroidDp; diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/RandomAccessBinarizedByteVectorValues.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/RandomAccessBinarizedByteVectorValues.java index 2417353373ba5..5163baf617c29 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/RandomAccessBinarizedByteVectorValues.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/RandomAccessBinarizedByteVectorValues.java @@ -24,6 +24,8 @@ import java.io.IOException; +import static org.elasticsearch.index.codec.vectors.BQVectorUtils.constSqrt; + /** * Copied from Lucene, replace with Lucene's implementation sometime after Lucene 10 */ @@ -54,6 +56,18 @@ public interface RandomAccessBinarizedByteVectorValues extends RandomAccessVecto */ BinaryQuantizer getQuantizer(); + default int discretizedDimensions() { + return BQVectorUtils.discretize(dimension(), 64); + } + + default float sqrtDimensions() { + return (float) constSqrt(dimension()); + } + + default float maxX1() { + return (float) (1.9 / constSqrt(discretizedDimensions() - 1.0)); + } + /** * @return coarse grained centroids for the vectors */ diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java index f3744c974e9e3..dbaa1f3a04ab9 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java @@ -9,6 +9,7 @@ package org.elasticsearch.index.mapper; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.IndexSettings; @@ -28,7 +29,7 @@ public class MapperFeatures implements FeatureSpecification { @Override public Set getFeatures() { - return Set.of( + Set features = Set.of( BWC_WORKAROUND_9_0, IgnoredSourceFieldMapper.TRACK_IGNORED_SOURCE, PassThroughObjectMapper.PASS_THROUGH_PRIORITY, @@ -54,6 +55,11 @@ public Set getFeatures() { TimeSeriesRoutingHashFieldMapper.TS_ROUTING_HASH_FIELD_PARSES_BYTES_REF, FlattenedFieldMapper.IGNORE_ABOVE_WITH_ARRAYS_SUPPORT ); + // BBQ is currently behind a feature flag for testing + if (DenseVectorFieldMapper.BBQ_FEATURE_FLAG.isEnabled()) { + return Sets.union(features, Set.of(DenseVectorFieldMapper.BBQ_FORMAT)); + } + return features; } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java index c3959bd442a1a..52ff7a3014d1d 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java @@ -36,6 +36,7 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.VectorUtil; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.util.FeatureFlag; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.IndexVersion; @@ -45,6 +46,8 @@ import org.elasticsearch.index.codec.vectors.ES814HnswScalarQuantizedVectorsFormat; import org.elasticsearch.index.codec.vectors.ES815BitFlatVectorFormat; import org.elasticsearch.index.codec.vectors.ES815HnswBitVectorsFormat; +import org.elasticsearch.index.codec.vectors.ES816BinaryQuantizedVectorsFormat; +import org.elasticsearch.index.codec.vectors.ES816HnswBinaryQuantizedVectorsFormat; import org.elasticsearch.index.fielddata.FieldDataContext; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.mapper.ArraySourceValueFetcher; @@ -98,6 +101,7 @@ public class DenseVectorFieldMapper extends FieldMapper { public static final String COSINE_MAGNITUDE_FIELD_SUFFIX = "._magnitude"; private static final float EPS = 1e-3f; + static final int BBQ_MIN_DIMS = 64; public static boolean isNotUnitVector(float magnitude) { return Math.abs(magnitude - 1.0f) > EPS; @@ -105,6 +109,8 @@ public static boolean isNotUnitVector(float magnitude) { public static final NodeFeature INT4_QUANTIZATION = new NodeFeature("mapper.vectors.int4_quantization"); public static final NodeFeature BIT_VECTORS = new NodeFeature("mapper.vectors.bit_vectors"); + public static final NodeFeature BBQ_FORMAT = new NodeFeature("mapper.vectors.bbq"); + public static final FeatureFlag BBQ_FEATURE_FLAG = new FeatureFlag("bbq_index_format"); public static final IndexVersion MAGNITUDE_STORED_INDEX_VERSION = IndexVersions.V_7_5_0; public static final IndexVersion INDEXED_BY_DEFAULT_INDEX_VERSION = IndexVersions.FIRST_DETACHED_INDEX_VERSION; @@ -1162,7 +1168,7 @@ final void validateElementType(ElementType elementType) { abstract boolean updatableTo(IndexOptions update); - public final void validateDimension(int dim) { + public void validateDimension(int dim) { if (type.supportsDimension(dim)) { return; } @@ -1342,6 +1348,50 @@ public boolean supportsElementType(ElementType elementType) { public boolean supportsDimension(int dims) { return dims % 2 == 0; } + }, + BBQ_HNSW("bbq_hnsw") { + @Override + public IndexOptions parseIndexOptions(String fieldName, Map indexOptionsMap) { + Object mNode = indexOptionsMap.remove("m"); + Object efConstructionNode = indexOptionsMap.remove("ef_construction"); + if (mNode == null) { + mNode = Lucene99HnswVectorsFormat.DEFAULT_MAX_CONN; + } + if (efConstructionNode == null) { + efConstructionNode = Lucene99HnswVectorsFormat.DEFAULT_BEAM_WIDTH; + } + int m = XContentMapValues.nodeIntegerValue(mNode); + int efConstruction = XContentMapValues.nodeIntegerValue(efConstructionNode); + MappingParser.checkNoRemainingFields(fieldName, indexOptionsMap); + return new BBQHnswIndexOptions(m, efConstruction); + } + + @Override + public boolean supportsElementType(ElementType elementType) { + return elementType == ElementType.FLOAT; + } + + @Override + public boolean supportsDimension(int dims) { + return dims >= BBQ_MIN_DIMS; + } + }, + BBQ_FLAT("bbq_flat") { + @Override + public IndexOptions parseIndexOptions(String fieldName, Map indexOptionsMap) { + MappingParser.checkNoRemainingFields(fieldName, indexOptionsMap); + return new BBQFlatIndexOptions(); + } + + @Override + public boolean supportsElementType(ElementType elementType) { + return elementType == ElementType.FLOAT; + } + + @Override + public boolean supportsDimension(int dims) { + return dims >= BBQ_MIN_DIMS; + } }; static Optional fromString(String type) { @@ -1707,6 +1757,102 @@ public String toString() { } } + static class BBQHnswIndexOptions extends IndexOptions { + private final int m; + private final int efConstruction; + + BBQHnswIndexOptions(int m, int efConstruction) { + super(VectorIndexType.BBQ_HNSW); + this.m = m; + this.efConstruction = efConstruction; + } + + @Override + KnnVectorsFormat getVectorsFormat(ElementType elementType) { + assert elementType == ElementType.FLOAT; + return new ES816HnswBinaryQuantizedVectorsFormat(m, efConstruction); + } + + @Override + boolean updatableTo(IndexOptions update) { + return update.type.equals(this.type); + } + + @Override + boolean doEquals(IndexOptions other) { + BBQHnswIndexOptions that = (BBQHnswIndexOptions) other; + return m == that.m && efConstruction == that.efConstruction; + } + + @Override + int doHashCode() { + return Objects.hash(m, efConstruction); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("type", type); + builder.field("m", m); + builder.field("ef_construction", efConstruction); + builder.endObject(); + return builder; + } + + @Override + public void validateDimension(int dim) { + if (type.supportsDimension(dim)) { + return; + } + throw new IllegalArgumentException(type.name + " does not support dimensions fewer than " + BBQ_MIN_DIMS + "; provided=" + dim); + } + } + + static class BBQFlatIndexOptions extends IndexOptions { + private final int CLASS_NAME_HASH = this.getClass().getName().hashCode(); + + BBQFlatIndexOptions() { + super(VectorIndexType.BBQ_FLAT); + } + + @Override + KnnVectorsFormat getVectorsFormat(ElementType elementType) { + assert elementType == ElementType.FLOAT; + return new ES816BinaryQuantizedVectorsFormat(); + } + + @Override + boolean updatableTo(IndexOptions update) { + return update.type.equals(this.type); + } + + @Override + boolean doEquals(IndexOptions other) { + return other instanceof BBQFlatIndexOptions; + } + + @Override + int doHashCode() { + return CLASS_NAME_HASH; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("type", type); + builder.endObject(); + return builder; + } + + @Override + public void validateDimension(int dim) { + if (type.supportsDimension(dim)) { + return; + } + throw new IllegalArgumentException(type.name + " does not support dimensions fewer than " + BBQ_MIN_DIMS + "; provided=" + dim); + } + } + public static final TypeParser PARSER = new TypeParser( (n, c) -> new Builder(n, c.indexVersionCreated()), notInMultiFields(CONTENT_TYPE) @@ -2108,9 +2254,15 @@ private static IndexOptions parseIndexOptions(String fieldName, Object propNode) throw new MapperParsingException("[index_options] requires field [type] to be configured"); } String type = XContentMapValues.nodeStringValue(typeNode); - return VectorIndexType.fromString(type) - .orElseThrow(() -> new MapperParsingException("Unknown vector index options type [" + type + "] for field [" + fieldName + "]")) - .parseIndexOptions(fieldName, indexOptionsMap); + Optional vectorIndexType = VectorIndexType.fromString(type); + if (vectorIndexType.isEmpty()) { + throw new MapperParsingException("Unknown vector index options type [" + type + "] for field [" + fieldName + "]"); + } + VectorIndexType parsedType = vectorIndexType.get(); + if ((parsedType == VectorIndexType.BBQ_FLAT || parsedType == VectorIndexType.BBQ_HNSW) && BBQ_FEATURE_FLAG.isEnabled() == false) { + throw new MapperParsingException("Unknown vector index options type [" + type + "] for field [" + fieldName + "]"); + } + return parsedType.parseIndexOptions(fieldName, indexOptionsMap); } /** diff --git a/server/src/main/java/org/elasticsearch/index/store/LuceneFilesExtensions.java b/server/src/main/java/org/elasticsearch/index/store/LuceneFilesExtensions.java index 186aff230b8d0..387385ea2d6a4 100644 --- a/server/src/main/java/org/elasticsearch/index/store/LuceneFilesExtensions.java +++ b/server/src/main/java/org/elasticsearch/index/store/LuceneFilesExtensions.java @@ -81,7 +81,9 @@ public enum LuceneFilesExtensions { VEM("vem", "Vector Metadata", true, false), VEMF("vemf", "Flat Vector Metadata", true, false), VEMQ("vemq", "Scalar Quantized Vector Metadata", true, false), - VEQ("veq", "Scalar Quantized Vector Data", false, true); + VEQ("veq", "Scalar Quantized Vector Data", false, true), + VEMB("vemb", "Binarized Vector Metadata", true, false), + VEB("veb", "Binarized Vector Data", false, true); /** * Allow plugin developers of custom codecs to opt out of the assertion in {@link #fromExtension} diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816BinaryFlatVectorsScorerTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816BinaryFlatVectorsScorerTests.java index 4ac66a9f63a3f..04d4ef2079b99 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816BinaryFlatVectorsScorerTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816BinaryFlatVectorsScorerTests.java @@ -1741,6 +1741,6 @@ public int dimension() { similarityFunction ); - assertEquals(132.30249f, scorer.score(0), 0.0001f); + assertEquals(129.64046f, scorer.score(0), 0.0001f); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java index 492c76924c729..cd7ff54ffc938 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java @@ -63,6 +63,7 @@ import static org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat.DEFAULT_BEAM_WIDTH; import static org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat.DEFAULT_MAX_CONN; +import static org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper.BBQ_FEATURE_FLAG; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -1227,13 +1228,18 @@ public void testInvalidParameters() { e.getMessage(), containsString("Failed to parse mapping: Mapping definition for [field] has unsupported parameters: [foo : {}]") ); - for (String quantizationKind : new String[] { "int4_hnsw", "int8_hnsw", "int8_flat", "int4_flat" }) { + List floatOnlyQuantizations = new ArrayList<>(Arrays.asList("int4_hnsw", "int8_hnsw", "int8_flat", "int4_flat")); + if (BBQ_FEATURE_FLAG.isEnabled()) { + floatOnlyQuantizations.add("bbq_hnsw"); + floatOnlyQuantizations.add("bbq_flat"); + } + for (String quantizationKind : floatOnlyQuantizations) { e = expectThrows( MapperParsingException.class, () -> createDocumentMapper( fieldMapping( b -> b.field("type", "dense_vector") - .field("dims", dims) + .field("dims", 64) .field("element_type", "byte") .field("similarity", "l2_norm") .field("index", true) @@ -1939,6 +1945,62 @@ public void testKnnQuantizedHNSWVectorsFormat() throws IOException { assertEquals(expectedString, knnVectorsFormat.toString()); } + public void testKnnBBQHNSWVectorsFormat() throws IOException { + assumeTrue("BBQ vectors are not supported in the current version", BBQ_FEATURE_FLAG.isEnabled()); + final int m = randomIntBetween(1, DEFAULT_MAX_CONN + 10); + final int efConstruction = randomIntBetween(1, DEFAULT_BEAM_WIDTH + 10); + final int dims = randomIntBetween(64, 4096); + MapperService mapperService = createMapperService(fieldMapping(b -> { + b.field("type", "dense_vector"); + b.field("dims", dims); + b.field("index", true); + b.field("similarity", "dot_product"); + b.startObject("index_options"); + b.field("type", "bbq_hnsw"); + b.field("m", m); + b.field("ef_construction", efConstruction); + b.endObject(); + })); + CodecService codecService = new CodecService(mapperService, BigArrays.NON_RECYCLING_INSTANCE); + Codec codec = codecService.codec("default"); + KnnVectorsFormat knnVectorsFormat; + if (CodecService.ZSTD_STORED_FIELDS_FEATURE_FLAG.isEnabled()) { + assertThat(codec, instanceOf(PerFieldMapperCodec.class)); + knnVectorsFormat = ((PerFieldMapperCodec) codec).getKnnVectorsFormatForField("field"); + } else { + if (codec instanceof CodecService.DeduplicateFieldInfosCodec deduplicateFieldInfosCodec) { + codec = deduplicateFieldInfosCodec.delegate(); + } + assertThat(codec, instanceOf(LegacyPerFieldMapperCodec.class)); + knnVectorsFormat = ((LegacyPerFieldMapperCodec) codec).getKnnVectorsFormatForField("field"); + } + String expectedString = "ES816HnswBinaryQuantizedVectorsFormat(name=ES816HnswBinaryQuantizedVectorsFormat, maxConn=" + + m + + ", beamWidth=" + + efConstruction + + ", flatVectorFormat=ES816BinaryQuantizedVectorsFormat(" + + "name=ES816BinaryQuantizedVectorsFormat, " + + "flatVectorScorer=ES816BinaryFlatVectorsScorer(nonQuantizedDelegate=DefaultFlatVectorScorer())))"; + assertEquals(expectedString, knnVectorsFormat.toString()); + } + + public void testInvalidVectorDimensionsBBQ() { + assumeTrue("BBQ vectors are not supported in the current version", BBQ_FEATURE_FLAG.isEnabled()); + for (String quantizedFlatFormat : new String[] { "bbq_hnsw", "bbq_flat" }) { + MapperParsingException e = expectThrows(MapperParsingException.class, () -> createDocumentMapper(fieldMapping(b -> { + b.field("type", "dense_vector"); + b.field("dims", randomIntBetween(1, 63)); + b.field("element_type", "float"); + b.field("index", true); + b.field("similarity", "dot_product"); + b.startObject("index_options"); + b.field("type", quantizedFlatFormat); + b.endObject(); + }))); + assertThat(e.getMessage(), containsString("does not support dimensions fewer than 64")); + } + } + public void testKnnHalfByteQuantizedHNSWVectorsFormat() throws IOException { final int m = randomIntBetween(1, DEFAULT_MAX_CONN + 10); final int efConstruction = randomIntBetween(1, DEFAULT_BEAM_WIDTH + 10); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldTypeTests.java index 23864777db961..6433cf2f1c0d4 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldTypeTests.java @@ -29,6 +29,7 @@ import java.util.List; import java.util.Set; +import static org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper.BBQ_MIN_DIMS; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.instanceOf; @@ -61,7 +62,9 @@ private DenseVectorFieldMapper.IndexOptions randomIndexOptionsAll() { ), new DenseVectorFieldMapper.FlatIndexOptions(), new DenseVectorFieldMapper.Int8FlatIndexOptions(randomFrom((Float) null, 0f, (float) randomDoubleBetween(0.9, 1.0, true))), - new DenseVectorFieldMapper.Int4FlatIndexOptions(randomFrom((Float) null, 0f, (float) randomDoubleBetween(0.9, 1.0, true))) + new DenseVectorFieldMapper.Int4FlatIndexOptions(randomFrom((Float) null, 0f, (float) randomDoubleBetween(0.9, 1.0, true))), + new DenseVectorFieldMapper.BBQHnswIndexOptions(randomIntBetween(1, 100), randomIntBetween(1, 10_000)), + new DenseVectorFieldMapper.BBQFlatIndexOptions() ); } @@ -70,7 +73,7 @@ private DenseVectorFieldType createFloatFieldType() { "f", IndexVersion.current(), DenseVectorFieldMapper.ElementType.FLOAT, - 6, + BBQ_MIN_DIMS, indexed, VectorSimilarity.COSINE, indexed ? randomIndexOptionsAll() : null, @@ -147,7 +150,7 @@ public void testFetchSourceValue() throws IOException { public void testCreateNestedKnnQuery() { BitSetProducer producer = context -> null; - int dims = randomIntBetween(2, 2048); + int dims = randomIntBetween(BBQ_MIN_DIMS, 2048); if (dims % 2 != 0) { dims++; } @@ -197,7 +200,7 @@ public void testCreateNestedKnnQuery() { } public void testExactKnnQuery() { - int dims = randomIntBetween(2, 2048); + int dims = randomIntBetween(BBQ_MIN_DIMS, 2048); if (dims % 2 != 0) { dims++; } @@ -260,15 +263,19 @@ public void testFloatCreateKnnQuery() { "f", IndexVersion.current(), DenseVectorFieldMapper.ElementType.FLOAT, - 4, + BBQ_MIN_DIMS, true, VectorSimilarity.DOT_PRODUCT, randomIndexOptionsAll(), Collections.emptyMap() ); + float[] queryVector = new float[BBQ_MIN_DIMS]; + for (int i = 0; i < BBQ_MIN_DIMS; i++) { + queryVector[i] = i; + } e = expectThrows( IllegalArgumentException.class, - () -> dotProductField.createKnnQuery(VectorData.fromFloats(new float[] { 0.3f, 0.1f, 1.0f, 0.0f }), 10, 10, null, null, null) + () -> dotProductField.createKnnQuery(VectorData.fromFloats(queryVector), 10, 10, null, null, null) ); assertThat(e.getMessage(), containsString("The [dot_product] similarity can only be used with unit-length vectors.")); @@ -276,7 +283,7 @@ public void testFloatCreateKnnQuery() { "f", IndexVersion.current(), DenseVectorFieldMapper.ElementType.FLOAT, - 4, + BBQ_MIN_DIMS, true, VectorSimilarity.COSINE, randomIndexOptionsAll(), @@ -284,7 +291,7 @@ public void testFloatCreateKnnQuery() { ); e = expectThrows( IllegalArgumentException.class, - () -> cosineField.createKnnQuery(VectorData.fromFloats(new float[] { 0.0f, 0.0f, 0.0f, 0.0f }), 10, 10, null, null, null) + () -> cosineField.createKnnQuery(VectorData.fromFloats(new float[BBQ_MIN_DIMS]), 10, 10, null, null, null) ); assertThat(e.getMessage(), containsString("The [cosine] similarity does not support vectors with zero magnitude.")); } From 025f6bb743460040f371af2b348c248ce62580d8 Mon Sep 17 00:00:00 2001 From: Yang Wang Date: Tue, 15 Oct 2024 13:46:27 +1100 Subject: [PATCH 084/449] Add IndicesMetrics instead of IndicesService to toClose (#114782) The same line already exists in [L543](https://github.com/ywangd/elasticsearch/blob/9f4a7927bdc366f8ca98c4652ac7d1102d9430f5/server/src/main/java/org/elasticsearch/node/Node.java#L543). It should have no practial impact since AbstractLifecycleComponent#close short-circuits if its lifecycle is already closed. The original code meant to close IndicesMetrics. This PR adds it. Relates: #113737 --- server/src/main/java/org/elasticsearch/node/Node.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index 5024cc5468866..32a65302922a8 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -561,7 +561,7 @@ public synchronized void close() throws IOException { toClose.add(() -> stopWatch.stop().start("transport")); toClose.add(injector.getInstance(TransportService.class)); toClose.add(injector.getInstance(NodeMetrics.class)); - toClose.add(injector.getInstance(IndicesService.class)); + toClose.add(injector.getInstance(IndicesMetrics.class)); if (ReadinessService.enabled(environment)) { toClose.add(injector.getInstance(ReadinessService.class)); } From 9f2fbf3b6fc26c8afe5aa8ae466f6f9bed1285a3 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 15 Oct 2024 15:42:15 +1100 Subject: [PATCH 085/449] Mute org.elasticsearch.test.rest.ClientYamlTestSuiteIT org.elasticsearch.test.rest.ClientYamlTestSuiteIT #114787 --- muted-tests.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index b7faa7d6e0182..19ae02b0f8d36 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -411,6 +411,8 @@ tests: - class: org.elasticsearch.xpack.enrich.EnrichIT method: testDeleteExistingPipeline issue: https://github.com/elastic/elasticsearch/issues/114775 +- class: org.elasticsearch.test.rest.ClientYamlTestSuiteIT + issue: https://github.com/elastic/elasticsearch/issues/114787 # Examples: # From dd64342bae879bb43f06fae42121ab7cfcd8e8f0 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 15 Oct 2024 16:07:34 +1100 Subject: [PATCH 086/449] Mute org.elasticsearch.xpack.inference.rest.ServerSentEventsRestActionListenerTests testNoStream #114788 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 19ae02b0f8d36..4c7278e4d9ce7 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -413,6 +413,9 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/114775 - class: org.elasticsearch.test.rest.ClientYamlTestSuiteIT issue: https://github.com/elastic/elasticsearch/issues/114787 +- class: org.elasticsearch.xpack.inference.rest.ServerSentEventsRestActionListenerTests + method: testNoStream + issue: https://github.com/elastic/elasticsearch/issues/114788 # Examples: # From a41e897969b4881e02d9bb4516abb9d77c1296da Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 15 Oct 2024 16:39:20 +1100 Subject: [PATCH 087/449] Mute org.elasticsearch.xpack.eql.EqlRestValidationIT testAllowNoIndicesOption #114789 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 4c7278e4d9ce7..24fe8479f2aa3 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -416,6 +416,9 @@ tests: - class: org.elasticsearch.xpack.inference.rest.ServerSentEventsRestActionListenerTests method: testNoStream issue: https://github.com/elastic/elasticsearch/issues/114788 +- class: org.elasticsearch.xpack.eql.EqlRestValidationIT + method: testAllowNoIndicesOption + issue: https://github.com/elastic/elasticsearch/issues/114789 # Examples: # From e943ceb901533694aa2f635679b88a836a9c72bb Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 15 Oct 2024 16:41:40 +1100 Subject: [PATCH 088/449] Mute org.elasticsearch.xpack.eql.EqlStatsIT testEqlRestUsage #114790 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 24fe8479f2aa3..7222255fbd7a4 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -419,6 +419,9 @@ tests: - class: org.elasticsearch.xpack.eql.EqlRestValidationIT method: testAllowNoIndicesOption issue: https://github.com/elastic/elasticsearch/issues/114789 +- class: org.elasticsearch.xpack.eql.EqlStatsIT + method: testEqlRestUsage + issue: https://github.com/elastic/elasticsearch/issues/114790 # Examples: # From 8446cc713bf5f2a9d2d515092878b40b8a1aaae6 Mon Sep 17 00:00:00 2001 From: Alexander Spies Date: Tue, 15 Oct 2024 08:05:23 +0200 Subject: [PATCH 089/449] ESQL: Add skips to tests that were added retroactively (#114727) Skip some csv tests that cannot be used in bwc tests before 8.13/8.14. --- .../esql/qa/testFixtures/src/main/resources/date.csv-spec | 4 ++-- .../esql/qa/testFixtures/src/main/resources/topN.csv-spec | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec index 36035c48f182c..237c6a9af197f 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec @@ -367,7 +367,7 @@ date1:date | dd_ms:integer 2023-12-02T11:00:00.000Z | 1000 ; -evalDateDiffMonthAsWhole0Months +evalDateDiffMonthAsWhole0Months#[skip:-8.14.1, reason:omitting millis/timezone not allowed before 8.14] ROW from=TO_DATETIME("2023-12-31T23:59:59.999Z"), to=TO_DATETIME("2024-01-01T00:00:00") | EVAL msecs=DATE_DIFF("milliseconds", from, to), months=DATE_DIFF("month", from, to) @@ -378,7 +378,7 @@ ROW from=TO_DATETIME("2023-12-31T23:59:59.999Z"), to=TO_DATETIME("2024-01-01T00: ; -evalDateDiffMonthAsWhole1Month +evalDateDiffMonthAsWhole1Month#[skip:-8.14.1, reason:omitting millis/timezone not allowed before 8.14] ROW from=TO_DATETIME("2023-12-31T23:59:59.999Z"), to=TO_DATETIME("2024-02-01T00:00:00") | EVAL secs=DATE_DIFF("seconds", from, to), months=DATE_DIFF("month", from, to) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/topN.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/topN.csv-spec index 3d4d890546050..e7bf953f5e08d 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/topN.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/topN.csv-spec @@ -135,7 +135,7 @@ null |Swan |-8.46 |-8.46 Sanjiv |Zschoche |[-7.67, -3.25] |[-3.25, -7.67] |[-3, -8] |10053 ; -sortingOnSwappedFields +sortingOnSwappedFields#[skip:-8.13.3, reason:fixed in 8.13] FROM employees | EVAL name = last_name, last_name = first_name, first_name = name | WHERE first_name > "B" AND last_name IS NOT NULL @@ -157,7 +157,7 @@ Brattka | Charlene | Brattka Bridgland | Patricio | Bridgland ; -sortingOnSwappedFieldsNoKeep +sortingOnSwappedFieldsNoKeep#[skip:-8.13.3, reason:fixed in 8.13] // Note that this test requires all fields to be returned in order to test a specific code path in physical planning FROM employees | EVAL name = first_name, first_name = last_name, last_name = name From 7ad1a0c39c312d97103589fd42ad5836a72bc666 Mon Sep 17 00:00:00 2001 From: Carlos Delgado <6339205+carlosdelest@users.noreply.github.com> Date: Tue, 15 Oct 2024 08:07:07 +0200 Subject: [PATCH 090/449] Remove snapshot build restriction for match and qstr functions (#114482) --- docs/changelog/114482.yaml | 5 + .../functions/kibana/definition/match.json | 2 +- .../functions/kibana/definition/qstr.json | 2 +- .../xpack/esql/plugin/QueryStringIT.java | 9 - .../esql/src/main/antlr/EsqlBaseLexer.g4 | 2 +- .../esql/src/main/antlr/EsqlBaseLexer.tokens | 3 +- .../esql/src/main/antlr/EsqlBaseParser.g4 | 4 +- .../esql/src/main/antlr/EsqlBaseParser.tokens | 3 +- .../xpack/esql/action/EsqlCapabilities.java | 4 +- .../function/EsqlFunctionRegistry.java | 9 +- .../function/fulltext/FullTextFunction.java | 11 +- .../xpack/esql/parser/EsqlBaseLexer.interp | 8 +- .../xpack/esql/parser/EsqlBaseLexer.java | 977 +++++++++--------- .../xpack/esql/parser/EsqlBaseParser.interp | 6 +- .../xpack/esql/parser/EsqlBaseParser.java | 910 ++++++++-------- .../xpack/esql/parser/ExpressionBuilder.java | 4 +- .../xpack/esql/analysis/VerifierTests.java | 26 - .../function/fulltext/MatchTests.java | 7 - .../function/fulltext/QueryStringTests.java | 7 - .../LocalPhysicalPlanOptimizerTests.java | 11 - .../optimizer/LogicalPlanOptimizerTests.java | 5 - 21 files changed, 966 insertions(+), 1049 deletions(-) create mode 100644 docs/changelog/114482.yaml diff --git a/docs/changelog/114482.yaml b/docs/changelog/114482.yaml new file mode 100644 index 0000000000000..a5e2e981f7adc --- /dev/null +++ b/docs/changelog/114482.yaml @@ -0,0 +1,5 @@ +pr: 114482 +summary: Remove snapshot build restriction for match and qstr functions +area: ES|QL +type: feature +issues: [] diff --git a/docs/reference/esql/functions/kibana/definition/match.json b/docs/reference/esql/functions/kibana/definition/match.json index d2fe0bba53866..8a355360a790f 100644 --- a/docs/reference/esql/functions/kibana/definition/match.json +++ b/docs/reference/esql/functions/kibana/definition/match.json @@ -81,5 +81,5 @@ "from books \n| where match(author, \"Faulkner\")\n| keep book_no, author \n| sort book_no \n| limit 5;" ], "preview" : true, - "snapshot_only" : true + "snapshot_only" : false } diff --git a/docs/reference/esql/functions/kibana/definition/qstr.json b/docs/reference/esql/functions/kibana/definition/qstr.json index 72be906cbae63..9823c3cff8923 100644 --- a/docs/reference/esql/functions/kibana/definition/qstr.json +++ b/docs/reference/esql/functions/kibana/definition/qstr.json @@ -33,5 +33,5 @@ "from books \n| where qstr(\"author: Faulkner\")\n| keep book_no, author \n| sort book_no \n| limit 5;" ], "preview" : true, - "snapshot_only" : true + "snapshot_only" : false } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/QueryStringIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/QueryStringIT.java index 53b833c7e8a15..e7da83a40fb20 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/QueryStringIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/QueryStringIT.java @@ -14,9 +14,6 @@ import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.action.AbstractEsqlIntegTestCase; import org.elasticsearch.xpack.esql.action.ColumnInfoImpl; -import org.elasticsearch.xpack.esql.action.EsqlCapabilities; -import org.elasticsearch.xpack.esql.action.EsqlQueryRequest; -import org.elasticsearch.xpack.esql.action.EsqlQueryResponse; import org.elasticsearch.xpack.esql.core.type.DataType; import org.junit.Before; @@ -36,12 +33,6 @@ public void setupIndex() { createAndPopulateIndex(); } - @Override - protected EsqlQueryResponse run(EsqlQueryRequest request) { - assumeTrue("qstr function available in snapshot builds only", EsqlCapabilities.Cap.QSTR_FUNCTION.isEnabled()); - return super.run(request); - } - public void testSimpleQueryString() { var query = """ FROM test diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 index ce3947875e6c7..d6d45097a1d07 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 @@ -208,7 +208,7 @@ ASTERISK : '*'; SLASH : '/'; PERCENT : '%'; -DEV_MATCH : {this.isDevVersion()}? 'match'; +MATCH : 'match'; NAMED_OR_POSITIONAL_PARAM : PARAM (LETTER | UNDERSCORE) UNQUOTED_ID_BODY* diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens index 2fe262a6983f7..4d1f426289149 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens @@ -60,7 +60,7 @@ MINUS=59 ASTERISK=60 SLASH=61 PERCENT=62 -DEV_MATCH=63 +MATCH=63 NAMED_OR_POSITIONAL_PARAM=64 OPENING_BRACKET=65 CLOSING_BRACKET=66 @@ -170,6 +170,7 @@ CLOSING_METRICS_WS=120 '*'=60 '/'=61 '%'=62 +'match'=63 ']'=66 'metadata'=75 'as'=84 diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 index c053824861a96..77568d5527cd1 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 @@ -77,7 +77,7 @@ regexBooleanExpression ; matchBooleanExpression - : valueExpression DEV_MATCH queryString=string + : valueExpression MATCH queryString=string ; valueExpression @@ -106,7 +106,7 @@ functionExpression functionName // Additional function identifiers that are already a reserved word in the language - : {this.isDevVersion()}? DEV_MATCH + : MATCH | identifierOrParameter ; diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens index 2fe262a6983f7..4d1f426289149 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens @@ -60,7 +60,7 @@ MINUS=59 ASTERISK=60 SLASH=61 PERCENT=62 -DEV_MATCH=63 +MATCH=63 NAMED_OR_POSITIONAL_PARAM=64 OPENING_BRACKET=65 CLOSING_BRACKET=66 @@ -170,6 +170,7 @@ CLOSING_METRICS_WS=120 '*'=60 '/'=61 '%'=62 +'match'=63 ']'=66 'metadata'=75 'as'=84 diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index 1d6d81077b9be..9dc17b020e426 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -344,12 +344,12 @@ public enum Cap { /** * QSTR function */ - QSTR_FUNCTION(true), + QSTR_FUNCTION, /** * MATCH function */ - MATCH_FUNCTION(true), + MATCH_FUNCTION, /** * Don't optimize CASE IS NOT NULL function by not requiring the fields to be not null as well. diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index e8921c68b8913..faf99d6bd65bc 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -386,7 +386,9 @@ private FunctionDefinition[][] functions() { def(MvSlice.class, MvSlice::new, "mv_slice"), def(MvZip.class, MvZip::new, "mv_zip"), def(MvSum.class, MvSum::new, "mv_sum"), - def(Split.class, Split::new, "split") } }; + def(Split.class, Split::new, "split") }, + // fulltext functions + new FunctionDefinition[] { def(Match.class, Match::new, "match"), def(QueryString.class, QueryString::new, "qstr") } }; } @@ -394,10 +396,7 @@ private static FunctionDefinition[][] snapshotFunctions() { return new FunctionDefinition[][] { new FunctionDefinition[] { def(Categorize.class, Categorize::new, "categorize"), - def(Rate.class, Rate::withUnresolvedTimestamp, "rate"), - // Full text functions - def(QueryString.class, QueryString::new, "qstr"), - def(Match.class, Match::new, "match") } }; + def(Rate.class, Rate::withUnresolvedTimestamp, "rate") } }; } public EsqlFunctionRegistry snapshotRegistry() { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextFunction.java index a39c0d7bc6b50..2f97de4c64469 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextFunction.java @@ -9,7 +9,6 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.xpack.esql.action.EsqlCapabilities; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Nullability; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; @@ -17,7 +16,6 @@ import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import java.util.ArrayList; import java.util.List; import static org.elasticsearch.common.logging.LoggerMessageFormat.format; @@ -32,14 +30,7 @@ */ public abstract class FullTextFunction extends Function { public static List getNamedWriteables() { - List entries = new ArrayList<>(); - if (EsqlCapabilities.Cap.QSTR_FUNCTION.isEnabled()) { - entries.add(QueryString.ENTRY); - } - if (EsqlCapabilities.Cap.MATCH_FUNCTION.isEnabled()) { - entries.add(Match.ENTRY); - } - return entries; + return List.of(QueryString.ENTRY, Match.ENTRY); } private final Expression query; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp index e9e6f45bdc30f..b8251869c48cd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp @@ -62,7 +62,7 @@ null '*' '/' '%' -null +'match' null null ']' @@ -185,7 +185,7 @@ MINUS ASTERISK SLASH PERCENT -DEV_MATCH +MATCH NAMED_OR_POSITIONAL_PARAM OPENING_BRACKET CLOSING_BRACKET @@ -317,7 +317,7 @@ MINUS ASTERISK SLASH PERCENT -DEV_MATCH +MATCH NAMED_OR_POSITIONAL_PARAM OPENING_BRACKET CLOSING_BRACKET @@ -465,4 +465,4 @@ METRICS_MODE CLOSING_METRICS_MODE atn: -[4, 0, 120, 1466, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 2, 154, 7, 154, 2, 155, 7, 155, 2, 156, 7, 156, 2, 157, 7, 157, 2, 158, 7, 158, 2, 159, 7, 159, 2, 160, 7, 160, 2, 161, 7, 161, 2, 162, 7, 162, 2, 163, 7, 163, 2, 164, 7, 164, 2, 165, 7, 165, 2, 166, 7, 166, 2, 167, 7, 167, 2, 168, 7, 168, 2, 169, 7, 169, 2, 170, 7, 170, 2, 171, 7, 171, 2, 172, 7, 172, 2, 173, 7, 173, 2, 174, 7, 174, 2, 175, 7, 175, 2, 176, 7, 176, 2, 177, 7, 177, 2, 178, 7, 178, 2, 179, 7, 179, 2, 180, 7, 180, 2, 181, 7, 181, 2, 182, 7, 182, 2, 183, 7, 183, 2, 184, 7, 184, 2, 185, 7, 185, 2, 186, 7, 186, 2, 187, 7, 187, 2, 188, 7, 188, 2, 189, 7, 189, 2, 190, 7, 190, 2, 191, 7, 191, 2, 192, 7, 192, 2, 193, 7, 193, 2, 194, 7, 194, 2, 195, 7, 195, 2, 196, 7, 196, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 4, 19, 576, 8, 19, 11, 19, 12, 19, 577, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 5, 20, 586, 8, 20, 10, 20, 12, 20, 589, 9, 20, 1, 20, 3, 20, 592, 8, 20, 1, 20, 3, 20, 595, 8, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 5, 21, 604, 8, 21, 10, 21, 12, 21, 607, 9, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 4, 22, 615, 8, 22, 11, 22, 12, 22, 616, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 28, 1, 28, 3, 28, 636, 8, 28, 1, 28, 4, 28, 639, 8, 28, 11, 28, 12, 28, 640, 1, 29, 1, 29, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 3, 31, 650, 8, 31, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 3, 33, 657, 8, 33, 1, 34, 1, 34, 1, 34, 5, 34, 662, 8, 34, 10, 34, 12, 34, 665, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 673, 8, 34, 10, 34, 12, 34, 676, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 3, 34, 683, 8, 34, 1, 34, 3, 34, 686, 8, 34, 3, 34, 688, 8, 34, 1, 35, 4, 35, 691, 8, 35, 11, 35, 12, 35, 692, 1, 36, 4, 36, 696, 8, 36, 11, 36, 12, 36, 697, 1, 36, 1, 36, 5, 36, 702, 8, 36, 10, 36, 12, 36, 705, 9, 36, 1, 36, 1, 36, 4, 36, 709, 8, 36, 11, 36, 12, 36, 710, 1, 36, 4, 36, 714, 8, 36, 11, 36, 12, 36, 715, 1, 36, 1, 36, 5, 36, 720, 8, 36, 10, 36, 12, 36, 723, 9, 36, 3, 36, 725, 8, 36, 1, 36, 1, 36, 1, 36, 1, 36, 4, 36, 731, 8, 36, 11, 36, 12, 36, 732, 1, 36, 1, 36, 3, 36, 737, 8, 36, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 1, 39, 1, 39, 1, 39, 1, 39, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 1, 64, 1, 64, 1, 64, 1, 65, 1, 65, 1, 66, 1, 66, 1, 66, 1, 67, 1, 67, 1, 68, 1, 68, 1, 69, 1, 69, 1, 70, 1, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 72, 1, 72, 1, 72, 1, 72, 1, 72, 1, 73, 1, 73, 1, 73, 3, 73, 866, 8, 73, 1, 73, 5, 73, 869, 8, 73, 10, 73, 12, 73, 872, 9, 73, 1, 73, 1, 73, 4, 73, 876, 8, 73, 11, 73, 12, 73, 877, 3, 73, 880, 8, 73, 1, 74, 1, 74, 1, 74, 1, 74, 1, 74, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 76, 1, 76, 5, 76, 894, 8, 76, 10, 76, 12, 76, 897, 9, 76, 1, 76, 1, 76, 3, 76, 901, 8, 76, 1, 76, 4, 76, 904, 8, 76, 11, 76, 12, 76, 905, 3, 76, 908, 8, 76, 1, 77, 1, 77, 4, 77, 912, 8, 77, 11, 77, 12, 77, 913, 1, 77, 1, 77, 1, 78, 1, 78, 1, 79, 1, 79, 1, 79, 1, 79, 1, 80, 1, 80, 1, 80, 1, 80, 1, 81, 1, 81, 1, 81, 1, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 1, 83, 1, 83, 1, 83, 1, 84, 1, 84, 1, 84, 1, 84, 1, 85, 1, 85, 1, 85, 1, 85, 1, 86, 1, 86, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 1, 88, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 1, 92, 1, 93, 1, 93, 1, 93, 1, 93, 1, 93, 1, 93, 1, 93, 1, 93, 1, 93, 1, 94, 1, 94, 1, 94, 3, 94, 991, 8, 94, 1, 95, 4, 95, 994, 8, 95, 11, 95, 12, 95, 995, 1, 96, 1, 96, 1, 96, 1, 96, 1, 97, 1, 97, 1, 97, 1, 97, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 1, 100, 1, 101, 1, 101, 1, 101, 1, 101, 1, 101, 1, 102, 1, 102, 1, 102, 1, 102, 1, 103, 1, 103, 1, 103, 1, 103, 1, 104, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 106, 3, 106, 1043, 8, 106, 1, 107, 1, 107, 3, 107, 1047, 8, 107, 1, 107, 5, 107, 1050, 8, 107, 10, 107, 12, 107, 1053, 9, 107, 1, 107, 1, 107, 3, 107, 1057, 8, 107, 1, 107, 4, 107, 1060, 8, 107, 11, 107, 12, 107, 1061, 3, 107, 1064, 8, 107, 1, 108, 1, 108, 4, 108, 1068, 8, 108, 11, 108, 12, 108, 1069, 1, 109, 1, 109, 1, 109, 1, 109, 1, 110, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 1, 114, 1, 114, 1, 114, 1, 114, 1, 115, 1, 115, 1, 115, 1, 115, 1, 116, 1, 116, 1, 116, 1, 116, 1, 117, 1, 117, 1, 117, 1, 117, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 120, 1, 120, 1, 121, 1, 121, 1, 121, 1, 121, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 126, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 128, 4, 128, 1153, 8, 128, 11, 128, 12, 128, 1154, 1, 128, 1, 128, 3, 128, 1159, 8, 128, 1, 128, 4, 128, 1162, 8, 128, 11, 128, 12, 128, 1163, 1, 129, 1, 129, 1, 129, 1, 129, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 133, 1, 133, 1, 133, 1, 133, 1, 134, 1, 134, 1, 134, 1, 134, 1, 135, 1, 135, 1, 135, 1, 135, 1, 136, 1, 136, 1, 136, 1, 136, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 152, 1, 152, 1, 153, 1, 153, 1, 153, 1, 153, 1, 154, 1, 154, 1, 154, 1, 154, 1, 154, 1, 155, 1, 155, 1, 155, 1, 155, 1, 155, 1, 156, 1, 156, 1, 156, 1, 156, 1, 157, 1, 157, 1, 157, 1, 157, 1, 158, 1, 158, 1, 158, 1, 158, 1, 159, 1, 159, 1, 159, 1, 159, 1, 159, 1, 160, 1, 160, 1, 161, 1, 161, 1, 161, 1, 161, 1, 161, 4, 161, 1303, 8, 161, 11, 161, 12, 161, 1304, 1, 162, 1, 162, 1, 162, 1, 162, 1, 163, 1, 163, 1, 163, 1, 163, 1, 164, 1, 164, 1, 164, 1, 164, 1, 165, 1, 165, 1, 165, 1, 165, 1, 165, 1, 166, 1, 166, 1, 166, 1, 166, 1, 167, 1, 167, 1, 167, 1, 167, 1, 168, 1, 168, 1, 168, 1, 168, 1, 169, 1, 169, 1, 169, 1, 169, 1, 169, 1, 170, 1, 170, 1, 170, 1, 170, 1, 171, 1, 171, 1, 171, 1, 171, 1, 172, 1, 172, 1, 172, 1, 172, 1, 173, 1, 173, 1, 173, 1, 173, 1, 174, 1, 174, 1, 174, 1, 174, 1, 175, 1, 175, 1, 175, 1, 175, 1, 175, 1, 175, 1, 176, 1, 176, 1, 176, 1, 176, 1, 177, 1, 177, 1, 177, 1, 177, 1, 178, 1, 178, 1, 178, 1, 178, 1, 179, 1, 179, 1, 179, 1, 179, 1, 180, 1, 180, 1, 180, 1, 180, 1, 181, 1, 181, 1, 181, 1, 181, 1, 182, 1, 182, 1, 182, 1, 182, 1, 182, 1, 183, 1, 183, 1, 183, 1, 183, 1, 183, 1, 183, 1, 184, 1, 184, 1, 184, 1, 184, 1, 184, 1, 184, 1, 185, 1, 185, 1, 185, 1, 185, 1, 186, 1, 186, 1, 186, 1, 186, 1, 187, 1, 187, 1, 187, 1, 187, 1, 188, 1, 188, 1, 188, 1, 188, 1, 188, 1, 188, 1, 189, 1, 189, 1, 189, 1, 189, 1, 189, 1, 189, 1, 190, 1, 190, 1, 190, 1, 190, 1, 191, 1, 191, 1, 191, 1, 191, 1, 192, 1, 192, 1, 192, 1, 192, 1, 193, 1, 193, 1, 193, 1, 193, 1, 193, 1, 193, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 195, 1, 195, 1, 195, 1, 195, 1, 195, 1, 195, 1, 196, 1, 196, 1, 196, 1, 196, 1, 196, 2, 605, 674, 0, 197, 15, 1, 17, 2, 19, 3, 21, 4, 23, 5, 25, 6, 27, 7, 29, 8, 31, 9, 33, 10, 35, 11, 37, 12, 39, 13, 41, 14, 43, 15, 45, 16, 47, 17, 49, 18, 51, 19, 53, 20, 55, 21, 57, 22, 59, 23, 61, 24, 63, 0, 65, 0, 67, 0, 69, 0, 71, 0, 73, 0, 75, 0, 77, 0, 79, 0, 81, 0, 83, 25, 85, 26, 87, 27, 89, 28, 91, 29, 93, 30, 95, 31, 97, 32, 99, 33, 101, 34, 103, 35, 105, 36, 107, 37, 109, 38, 111, 39, 113, 40, 115, 41, 117, 42, 119, 43, 121, 44, 123, 45, 125, 46, 127, 47, 129, 48, 131, 49, 133, 50, 135, 51, 137, 52, 139, 53, 141, 54, 143, 55, 145, 56, 147, 57, 149, 58, 151, 59, 153, 60, 155, 61, 157, 62, 159, 63, 161, 64, 163, 65, 165, 66, 167, 67, 169, 0, 171, 68, 173, 69, 175, 70, 177, 71, 179, 0, 181, 0, 183, 72, 185, 73, 187, 74, 189, 0, 191, 0, 193, 0, 195, 0, 197, 0, 199, 0, 201, 75, 203, 0, 205, 76, 207, 0, 209, 0, 211, 77, 213, 78, 215, 79, 217, 0, 219, 0, 221, 0, 223, 0, 225, 0, 227, 0, 229, 0, 231, 80, 233, 81, 235, 82, 237, 83, 239, 0, 241, 0, 243, 0, 245, 0, 247, 0, 249, 0, 251, 84, 253, 0, 255, 85, 257, 86, 259, 87, 261, 0, 263, 0, 265, 88, 267, 89, 269, 0, 271, 90, 273, 0, 275, 91, 277, 92, 279, 93, 281, 0, 283, 0, 285, 0, 287, 0, 289, 0, 291, 0, 293, 0, 295, 0, 297, 0, 299, 94, 301, 95, 303, 96, 305, 0, 307, 0, 309, 0, 311, 0, 313, 0, 315, 0, 317, 97, 319, 98, 321, 99, 323, 0, 325, 100, 327, 101, 329, 102, 331, 103, 333, 0, 335, 104, 337, 105, 339, 106, 341, 107, 343, 108, 345, 0, 347, 0, 349, 0, 351, 0, 353, 0, 355, 0, 357, 0, 359, 109, 361, 110, 363, 111, 365, 0, 367, 0, 369, 0, 371, 0, 373, 112, 375, 113, 377, 114, 379, 0, 381, 0, 383, 0, 385, 115, 387, 116, 389, 117, 391, 0, 393, 0, 395, 118, 397, 119, 399, 120, 401, 0, 403, 0, 405, 0, 407, 0, 15, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 35, 2, 0, 68, 68, 100, 100, 2, 0, 73, 73, 105, 105, 2, 0, 83, 83, 115, 115, 2, 0, 69, 69, 101, 101, 2, 0, 67, 67, 99, 99, 2, 0, 84, 84, 116, 116, 2, 0, 82, 82, 114, 114, 2, 0, 79, 79, 111, 111, 2, 0, 80, 80, 112, 112, 2, 0, 78, 78, 110, 110, 2, 0, 72, 72, 104, 104, 2, 0, 86, 86, 118, 118, 2, 0, 65, 65, 97, 97, 2, 0, 76, 76, 108, 108, 2, 0, 88, 88, 120, 120, 2, 0, 70, 70, 102, 102, 2, 0, 77, 77, 109, 109, 2, 0, 71, 71, 103, 103, 2, 0, 75, 75, 107, 107, 2, 0, 87, 87, 119, 119, 2, 0, 85, 85, 117, 117, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 8, 0, 34, 34, 78, 78, 82, 82, 84, 84, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 2, 0, 66, 66, 98, 98, 2, 0, 89, 89, 121, 121, 11, 0, 9, 10, 13, 13, 32, 32, 34, 34, 44, 44, 47, 47, 58, 58, 61, 61, 91, 91, 93, 93, 124, 124, 2, 0, 42, 42, 47, 47, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1494, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0, 0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0, 0, 0, 33, 1, 0, 0, 0, 0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0, 0, 0, 39, 1, 0, 0, 0, 0, 41, 1, 0, 0, 0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0, 0, 0, 0, 47, 1, 0, 0, 0, 0, 49, 1, 0, 0, 0, 0, 51, 1, 0, 0, 0, 0, 53, 1, 0, 0, 0, 0, 55, 1, 0, 0, 0, 0, 57, 1, 0, 0, 0, 0, 59, 1, 0, 0, 0, 1, 61, 1, 0, 0, 0, 1, 83, 1, 0, 0, 0, 1, 85, 1, 0, 0, 0, 1, 87, 1, 0, 0, 0, 1, 89, 1, 0, 0, 0, 1, 91, 1, 0, 0, 0, 1, 93, 1, 0, 0, 0, 1, 95, 1, 0, 0, 0, 1, 97, 1, 0, 0, 0, 1, 99, 1, 0, 0, 0, 1, 101, 1, 0, 0, 0, 1, 103, 1, 0, 0, 0, 1, 105, 1, 0, 0, 0, 1, 107, 1, 0, 0, 0, 1, 109, 1, 0, 0, 0, 1, 111, 1, 0, 0, 0, 1, 113, 1, 0, 0, 0, 1, 115, 1, 0, 0, 0, 1, 117, 1, 0, 0, 0, 1, 119, 1, 0, 0, 0, 1, 121, 1, 0, 0, 0, 1, 123, 1, 0, 0, 0, 1, 125, 1, 0, 0, 0, 1, 127, 1, 0, 0, 0, 1, 129, 1, 0, 0, 0, 1, 131, 1, 0, 0, 0, 1, 133, 1, 0, 0, 0, 1, 135, 1, 0, 0, 0, 1, 137, 1, 0, 0, 0, 1, 139, 1, 0, 0, 0, 1, 141, 1, 0, 0, 0, 1, 143, 1, 0, 0, 0, 1, 145, 1, 0, 0, 0, 1, 147, 1, 0, 0, 0, 1, 149, 1, 0, 0, 0, 1, 151, 1, 0, 0, 0, 1, 153, 1, 0, 0, 0, 1, 155, 1, 0, 0, 0, 1, 157, 1, 0, 0, 0, 1, 159, 1, 0, 0, 0, 1, 161, 1, 0, 0, 0, 1, 163, 1, 0, 0, 0, 1, 165, 1, 0, 0, 0, 1, 167, 1, 0, 0, 0, 1, 171, 1, 0, 0, 0, 1, 173, 1, 0, 0, 0, 1, 175, 1, 0, 0, 0, 1, 177, 1, 0, 0, 0, 2, 179, 1, 0, 0, 0, 2, 181, 1, 0, 0, 0, 2, 183, 1, 0, 0, 0, 2, 185, 1, 0, 0, 0, 2, 187, 1, 0, 0, 0, 3, 189, 1, 0, 0, 0, 3, 191, 1, 0, 0, 0, 3, 193, 1, 0, 0, 0, 3, 195, 1, 0, 0, 0, 3, 197, 1, 0, 0, 0, 3, 199, 1, 0, 0, 0, 3, 201, 1, 0, 0, 0, 3, 205, 1, 0, 0, 0, 3, 207, 1, 0, 0, 0, 3, 209, 1, 0, 0, 0, 3, 211, 1, 0, 0, 0, 3, 213, 1, 0, 0, 0, 3, 215, 1, 0, 0, 0, 4, 217, 1, 0, 0, 0, 4, 219, 1, 0, 0, 0, 4, 221, 1, 0, 0, 0, 4, 223, 1, 0, 0, 0, 4, 225, 1, 0, 0, 0, 4, 231, 1, 0, 0, 0, 4, 233, 1, 0, 0, 0, 4, 235, 1, 0, 0, 0, 4, 237, 1, 0, 0, 0, 5, 239, 1, 0, 0, 0, 5, 241, 1, 0, 0, 0, 5, 243, 1, 0, 0, 0, 5, 245, 1, 0, 0, 0, 5, 247, 1, 0, 0, 0, 5, 249, 1, 0, 0, 0, 5, 251, 1, 0, 0, 0, 5, 253, 1, 0, 0, 0, 5, 255, 1, 0, 0, 0, 5, 257, 1, 0, 0, 0, 5, 259, 1, 0, 0, 0, 6, 261, 1, 0, 0, 0, 6, 263, 1, 0, 0, 0, 6, 265, 1, 0, 0, 0, 6, 267, 1, 0, 0, 0, 6, 271, 1, 0, 0, 0, 6, 273, 1, 0, 0, 0, 6, 275, 1, 0, 0, 0, 6, 277, 1, 0, 0, 0, 6, 279, 1, 0, 0, 0, 7, 281, 1, 0, 0, 0, 7, 283, 1, 0, 0, 0, 7, 285, 1, 0, 0, 0, 7, 287, 1, 0, 0, 0, 7, 289, 1, 0, 0, 0, 7, 291, 1, 0, 0, 0, 7, 293, 1, 0, 0, 0, 7, 295, 1, 0, 0, 0, 7, 297, 1, 0, 0, 0, 7, 299, 1, 0, 0, 0, 7, 301, 1, 0, 0, 0, 7, 303, 1, 0, 0, 0, 8, 305, 1, 0, 0, 0, 8, 307, 1, 0, 0, 0, 8, 309, 1, 0, 0, 0, 8, 311, 1, 0, 0, 0, 8, 313, 1, 0, 0, 0, 8, 315, 1, 0, 0, 0, 8, 317, 1, 0, 0, 0, 8, 319, 1, 0, 0, 0, 8, 321, 1, 0, 0, 0, 9, 323, 1, 0, 0, 0, 9, 325, 1, 0, 0, 0, 9, 327, 1, 0, 0, 0, 9, 329, 1, 0, 0, 0, 9, 331, 1, 0, 0, 0, 10, 333, 1, 0, 0, 0, 10, 335, 1, 0, 0, 0, 10, 337, 1, 0, 0, 0, 10, 339, 1, 0, 0, 0, 10, 341, 1, 0, 0, 0, 10, 343, 1, 0, 0, 0, 11, 345, 1, 0, 0, 0, 11, 347, 1, 0, 0, 0, 11, 349, 1, 0, 0, 0, 11, 351, 1, 0, 0, 0, 11, 353, 1, 0, 0, 0, 11, 355, 1, 0, 0, 0, 11, 357, 1, 0, 0, 0, 11, 359, 1, 0, 0, 0, 11, 361, 1, 0, 0, 0, 11, 363, 1, 0, 0, 0, 12, 365, 1, 0, 0, 0, 12, 367, 1, 0, 0, 0, 12, 369, 1, 0, 0, 0, 12, 371, 1, 0, 0, 0, 12, 373, 1, 0, 0, 0, 12, 375, 1, 0, 0, 0, 12, 377, 1, 0, 0, 0, 13, 379, 1, 0, 0, 0, 13, 381, 1, 0, 0, 0, 13, 383, 1, 0, 0, 0, 13, 385, 1, 0, 0, 0, 13, 387, 1, 0, 0, 0, 13, 389, 1, 0, 0, 0, 14, 391, 1, 0, 0, 0, 14, 393, 1, 0, 0, 0, 14, 395, 1, 0, 0, 0, 14, 397, 1, 0, 0, 0, 14, 399, 1, 0, 0, 0, 14, 401, 1, 0, 0, 0, 14, 403, 1, 0, 0, 0, 14, 405, 1, 0, 0, 0, 14, 407, 1, 0, 0, 0, 15, 409, 1, 0, 0, 0, 17, 419, 1, 0, 0, 0, 19, 426, 1, 0, 0, 0, 21, 435, 1, 0, 0, 0, 23, 442, 1, 0, 0, 0, 25, 452, 1, 0, 0, 0, 27, 459, 1, 0, 0, 0, 29, 466, 1, 0, 0, 0, 31, 473, 1, 0, 0, 0, 33, 481, 1, 0, 0, 0, 35, 493, 1, 0, 0, 0, 37, 502, 1, 0, 0, 0, 39, 508, 1, 0, 0, 0, 41, 515, 1, 0, 0, 0, 43, 522, 1, 0, 0, 0, 45, 530, 1, 0, 0, 0, 47, 538, 1, 0, 0, 0, 49, 553, 1, 0, 0, 0, 51, 563, 1, 0, 0, 0, 53, 575, 1, 0, 0, 0, 55, 581, 1, 0, 0, 0, 57, 598, 1, 0, 0, 0, 59, 614, 1, 0, 0, 0, 61, 620, 1, 0, 0, 0, 63, 624, 1, 0, 0, 0, 65, 626, 1, 0, 0, 0, 67, 628, 1, 0, 0, 0, 69, 631, 1, 0, 0, 0, 71, 633, 1, 0, 0, 0, 73, 642, 1, 0, 0, 0, 75, 644, 1, 0, 0, 0, 77, 649, 1, 0, 0, 0, 79, 651, 1, 0, 0, 0, 81, 656, 1, 0, 0, 0, 83, 687, 1, 0, 0, 0, 85, 690, 1, 0, 0, 0, 87, 736, 1, 0, 0, 0, 89, 738, 1, 0, 0, 0, 91, 741, 1, 0, 0, 0, 93, 745, 1, 0, 0, 0, 95, 749, 1, 0, 0, 0, 97, 751, 1, 0, 0, 0, 99, 754, 1, 0, 0, 0, 101, 756, 1, 0, 0, 0, 103, 761, 1, 0, 0, 0, 105, 763, 1, 0, 0, 0, 107, 769, 1, 0, 0, 0, 109, 775, 1, 0, 0, 0, 111, 778, 1, 0, 0, 0, 113, 781, 1, 0, 0, 0, 115, 786, 1, 0, 0, 0, 117, 791, 1, 0, 0, 0, 119, 793, 1, 0, 0, 0, 121, 797, 1, 0, 0, 0, 123, 802, 1, 0, 0, 0, 125, 808, 1, 0, 0, 0, 127, 811, 1, 0, 0, 0, 129, 813, 1, 0, 0, 0, 131, 819, 1, 0, 0, 0, 133, 821, 1, 0, 0, 0, 135, 826, 1, 0, 0, 0, 137, 829, 1, 0, 0, 0, 139, 832, 1, 0, 0, 0, 141, 835, 1, 0, 0, 0, 143, 837, 1, 0, 0, 0, 145, 840, 1, 0, 0, 0, 147, 842, 1, 0, 0, 0, 149, 845, 1, 0, 0, 0, 151, 847, 1, 0, 0, 0, 153, 849, 1, 0, 0, 0, 155, 851, 1, 0, 0, 0, 157, 853, 1, 0, 0, 0, 159, 855, 1, 0, 0, 0, 161, 879, 1, 0, 0, 0, 163, 881, 1, 0, 0, 0, 165, 886, 1, 0, 0, 0, 167, 907, 1, 0, 0, 0, 169, 909, 1, 0, 0, 0, 171, 917, 1, 0, 0, 0, 173, 919, 1, 0, 0, 0, 175, 923, 1, 0, 0, 0, 177, 927, 1, 0, 0, 0, 179, 931, 1, 0, 0, 0, 181, 936, 1, 0, 0, 0, 183, 941, 1, 0, 0, 0, 185, 945, 1, 0, 0, 0, 187, 949, 1, 0, 0, 0, 189, 953, 1, 0, 0, 0, 191, 958, 1, 0, 0, 0, 193, 962, 1, 0, 0, 0, 195, 966, 1, 0, 0, 0, 197, 970, 1, 0, 0, 0, 199, 974, 1, 0, 0, 0, 201, 978, 1, 0, 0, 0, 203, 990, 1, 0, 0, 0, 205, 993, 1, 0, 0, 0, 207, 997, 1, 0, 0, 0, 209, 1001, 1, 0, 0, 0, 211, 1005, 1, 0, 0, 0, 213, 1009, 1, 0, 0, 0, 215, 1013, 1, 0, 0, 0, 217, 1017, 1, 0, 0, 0, 219, 1022, 1, 0, 0, 0, 221, 1026, 1, 0, 0, 0, 223, 1030, 1, 0, 0, 0, 225, 1034, 1, 0, 0, 0, 227, 1042, 1, 0, 0, 0, 229, 1063, 1, 0, 0, 0, 231, 1067, 1, 0, 0, 0, 233, 1071, 1, 0, 0, 0, 235, 1075, 1, 0, 0, 0, 237, 1079, 1, 0, 0, 0, 239, 1083, 1, 0, 0, 0, 241, 1088, 1, 0, 0, 0, 243, 1092, 1, 0, 0, 0, 245, 1096, 1, 0, 0, 0, 247, 1100, 1, 0, 0, 0, 249, 1104, 1, 0, 0, 0, 251, 1108, 1, 0, 0, 0, 253, 1111, 1, 0, 0, 0, 255, 1115, 1, 0, 0, 0, 257, 1119, 1, 0, 0, 0, 259, 1123, 1, 0, 0, 0, 261, 1127, 1, 0, 0, 0, 263, 1132, 1, 0, 0, 0, 265, 1137, 1, 0, 0, 0, 267, 1142, 1, 0, 0, 0, 269, 1149, 1, 0, 0, 0, 271, 1158, 1, 0, 0, 0, 273, 1165, 1, 0, 0, 0, 275, 1169, 1, 0, 0, 0, 277, 1173, 1, 0, 0, 0, 279, 1177, 1, 0, 0, 0, 281, 1181, 1, 0, 0, 0, 283, 1187, 1, 0, 0, 0, 285, 1191, 1, 0, 0, 0, 287, 1195, 1, 0, 0, 0, 289, 1199, 1, 0, 0, 0, 291, 1203, 1, 0, 0, 0, 293, 1207, 1, 0, 0, 0, 295, 1211, 1, 0, 0, 0, 297, 1215, 1, 0, 0, 0, 299, 1219, 1, 0, 0, 0, 301, 1223, 1, 0, 0, 0, 303, 1227, 1, 0, 0, 0, 305, 1231, 1, 0, 0, 0, 307, 1236, 1, 0, 0, 0, 309, 1240, 1, 0, 0, 0, 311, 1244, 1, 0, 0, 0, 313, 1248, 1, 0, 0, 0, 315, 1252, 1, 0, 0, 0, 317, 1256, 1, 0, 0, 0, 319, 1260, 1, 0, 0, 0, 321, 1264, 1, 0, 0, 0, 323, 1268, 1, 0, 0, 0, 325, 1273, 1, 0, 0, 0, 327, 1278, 1, 0, 0, 0, 329, 1282, 1, 0, 0, 0, 331, 1286, 1, 0, 0, 0, 333, 1290, 1, 0, 0, 0, 335, 1295, 1, 0, 0, 0, 337, 1302, 1, 0, 0, 0, 339, 1306, 1, 0, 0, 0, 341, 1310, 1, 0, 0, 0, 343, 1314, 1, 0, 0, 0, 345, 1318, 1, 0, 0, 0, 347, 1323, 1, 0, 0, 0, 349, 1327, 1, 0, 0, 0, 351, 1331, 1, 0, 0, 0, 353, 1335, 1, 0, 0, 0, 355, 1340, 1, 0, 0, 0, 357, 1344, 1, 0, 0, 0, 359, 1348, 1, 0, 0, 0, 361, 1352, 1, 0, 0, 0, 363, 1356, 1, 0, 0, 0, 365, 1360, 1, 0, 0, 0, 367, 1366, 1, 0, 0, 0, 369, 1370, 1, 0, 0, 0, 371, 1374, 1, 0, 0, 0, 373, 1378, 1, 0, 0, 0, 375, 1382, 1, 0, 0, 0, 377, 1386, 1, 0, 0, 0, 379, 1390, 1, 0, 0, 0, 381, 1395, 1, 0, 0, 0, 383, 1401, 1, 0, 0, 0, 385, 1407, 1, 0, 0, 0, 387, 1411, 1, 0, 0, 0, 389, 1415, 1, 0, 0, 0, 391, 1419, 1, 0, 0, 0, 393, 1425, 1, 0, 0, 0, 395, 1431, 1, 0, 0, 0, 397, 1435, 1, 0, 0, 0, 399, 1439, 1, 0, 0, 0, 401, 1443, 1, 0, 0, 0, 403, 1449, 1, 0, 0, 0, 405, 1455, 1, 0, 0, 0, 407, 1461, 1, 0, 0, 0, 409, 410, 7, 0, 0, 0, 410, 411, 7, 1, 0, 0, 411, 412, 7, 2, 0, 0, 412, 413, 7, 2, 0, 0, 413, 414, 7, 3, 0, 0, 414, 415, 7, 4, 0, 0, 415, 416, 7, 5, 0, 0, 416, 417, 1, 0, 0, 0, 417, 418, 6, 0, 0, 0, 418, 16, 1, 0, 0, 0, 419, 420, 7, 0, 0, 0, 420, 421, 7, 6, 0, 0, 421, 422, 7, 7, 0, 0, 422, 423, 7, 8, 0, 0, 423, 424, 1, 0, 0, 0, 424, 425, 6, 1, 1, 0, 425, 18, 1, 0, 0, 0, 426, 427, 7, 3, 0, 0, 427, 428, 7, 9, 0, 0, 428, 429, 7, 6, 0, 0, 429, 430, 7, 1, 0, 0, 430, 431, 7, 4, 0, 0, 431, 432, 7, 10, 0, 0, 432, 433, 1, 0, 0, 0, 433, 434, 6, 2, 2, 0, 434, 20, 1, 0, 0, 0, 435, 436, 7, 3, 0, 0, 436, 437, 7, 11, 0, 0, 437, 438, 7, 12, 0, 0, 438, 439, 7, 13, 0, 0, 439, 440, 1, 0, 0, 0, 440, 441, 6, 3, 0, 0, 441, 22, 1, 0, 0, 0, 442, 443, 7, 3, 0, 0, 443, 444, 7, 14, 0, 0, 444, 445, 7, 8, 0, 0, 445, 446, 7, 13, 0, 0, 446, 447, 7, 12, 0, 0, 447, 448, 7, 1, 0, 0, 448, 449, 7, 9, 0, 0, 449, 450, 1, 0, 0, 0, 450, 451, 6, 4, 3, 0, 451, 24, 1, 0, 0, 0, 452, 453, 7, 15, 0, 0, 453, 454, 7, 6, 0, 0, 454, 455, 7, 7, 0, 0, 455, 456, 7, 16, 0, 0, 456, 457, 1, 0, 0, 0, 457, 458, 6, 5, 4, 0, 458, 26, 1, 0, 0, 0, 459, 460, 7, 17, 0, 0, 460, 461, 7, 6, 0, 0, 461, 462, 7, 7, 0, 0, 462, 463, 7, 18, 0, 0, 463, 464, 1, 0, 0, 0, 464, 465, 6, 6, 0, 0, 465, 28, 1, 0, 0, 0, 466, 467, 7, 18, 0, 0, 467, 468, 7, 3, 0, 0, 468, 469, 7, 3, 0, 0, 469, 470, 7, 8, 0, 0, 470, 471, 1, 0, 0, 0, 471, 472, 6, 7, 1, 0, 472, 30, 1, 0, 0, 0, 473, 474, 7, 13, 0, 0, 474, 475, 7, 1, 0, 0, 475, 476, 7, 16, 0, 0, 476, 477, 7, 1, 0, 0, 477, 478, 7, 5, 0, 0, 478, 479, 1, 0, 0, 0, 479, 480, 6, 8, 0, 0, 480, 32, 1, 0, 0, 0, 481, 482, 7, 16, 0, 0, 482, 483, 7, 11, 0, 0, 483, 484, 5, 95, 0, 0, 484, 485, 7, 3, 0, 0, 485, 486, 7, 14, 0, 0, 486, 487, 7, 8, 0, 0, 487, 488, 7, 12, 0, 0, 488, 489, 7, 9, 0, 0, 489, 490, 7, 0, 0, 0, 490, 491, 1, 0, 0, 0, 491, 492, 6, 9, 5, 0, 492, 34, 1, 0, 0, 0, 493, 494, 7, 6, 0, 0, 494, 495, 7, 3, 0, 0, 495, 496, 7, 9, 0, 0, 496, 497, 7, 12, 0, 0, 497, 498, 7, 16, 0, 0, 498, 499, 7, 3, 0, 0, 499, 500, 1, 0, 0, 0, 500, 501, 6, 10, 6, 0, 501, 36, 1, 0, 0, 0, 502, 503, 7, 6, 0, 0, 503, 504, 7, 7, 0, 0, 504, 505, 7, 19, 0, 0, 505, 506, 1, 0, 0, 0, 506, 507, 6, 11, 0, 0, 507, 38, 1, 0, 0, 0, 508, 509, 7, 2, 0, 0, 509, 510, 7, 10, 0, 0, 510, 511, 7, 7, 0, 0, 511, 512, 7, 19, 0, 0, 512, 513, 1, 0, 0, 0, 513, 514, 6, 12, 7, 0, 514, 40, 1, 0, 0, 0, 515, 516, 7, 2, 0, 0, 516, 517, 7, 7, 0, 0, 517, 518, 7, 6, 0, 0, 518, 519, 7, 5, 0, 0, 519, 520, 1, 0, 0, 0, 520, 521, 6, 13, 0, 0, 521, 42, 1, 0, 0, 0, 522, 523, 7, 2, 0, 0, 523, 524, 7, 5, 0, 0, 524, 525, 7, 12, 0, 0, 525, 526, 7, 5, 0, 0, 526, 527, 7, 2, 0, 0, 527, 528, 1, 0, 0, 0, 528, 529, 6, 14, 0, 0, 529, 44, 1, 0, 0, 0, 530, 531, 7, 19, 0, 0, 531, 532, 7, 10, 0, 0, 532, 533, 7, 3, 0, 0, 533, 534, 7, 6, 0, 0, 534, 535, 7, 3, 0, 0, 535, 536, 1, 0, 0, 0, 536, 537, 6, 15, 0, 0, 537, 46, 1, 0, 0, 0, 538, 539, 4, 16, 0, 0, 539, 540, 7, 1, 0, 0, 540, 541, 7, 9, 0, 0, 541, 542, 7, 13, 0, 0, 542, 543, 7, 1, 0, 0, 543, 544, 7, 9, 0, 0, 544, 545, 7, 3, 0, 0, 545, 546, 7, 2, 0, 0, 546, 547, 7, 5, 0, 0, 547, 548, 7, 12, 0, 0, 548, 549, 7, 5, 0, 0, 549, 550, 7, 2, 0, 0, 550, 551, 1, 0, 0, 0, 551, 552, 6, 16, 0, 0, 552, 48, 1, 0, 0, 0, 553, 554, 4, 17, 1, 0, 554, 555, 7, 13, 0, 0, 555, 556, 7, 7, 0, 0, 556, 557, 7, 7, 0, 0, 557, 558, 7, 18, 0, 0, 558, 559, 7, 20, 0, 0, 559, 560, 7, 8, 0, 0, 560, 561, 1, 0, 0, 0, 561, 562, 6, 17, 8, 0, 562, 50, 1, 0, 0, 0, 563, 564, 4, 18, 2, 0, 564, 565, 7, 16, 0, 0, 565, 566, 7, 3, 0, 0, 566, 567, 7, 5, 0, 0, 567, 568, 7, 6, 0, 0, 568, 569, 7, 1, 0, 0, 569, 570, 7, 4, 0, 0, 570, 571, 7, 2, 0, 0, 571, 572, 1, 0, 0, 0, 572, 573, 6, 18, 9, 0, 573, 52, 1, 0, 0, 0, 574, 576, 8, 21, 0, 0, 575, 574, 1, 0, 0, 0, 576, 577, 1, 0, 0, 0, 577, 575, 1, 0, 0, 0, 577, 578, 1, 0, 0, 0, 578, 579, 1, 0, 0, 0, 579, 580, 6, 19, 0, 0, 580, 54, 1, 0, 0, 0, 581, 582, 5, 47, 0, 0, 582, 583, 5, 47, 0, 0, 583, 587, 1, 0, 0, 0, 584, 586, 8, 22, 0, 0, 585, 584, 1, 0, 0, 0, 586, 589, 1, 0, 0, 0, 587, 585, 1, 0, 0, 0, 587, 588, 1, 0, 0, 0, 588, 591, 1, 0, 0, 0, 589, 587, 1, 0, 0, 0, 590, 592, 5, 13, 0, 0, 591, 590, 1, 0, 0, 0, 591, 592, 1, 0, 0, 0, 592, 594, 1, 0, 0, 0, 593, 595, 5, 10, 0, 0, 594, 593, 1, 0, 0, 0, 594, 595, 1, 0, 0, 0, 595, 596, 1, 0, 0, 0, 596, 597, 6, 20, 10, 0, 597, 56, 1, 0, 0, 0, 598, 599, 5, 47, 0, 0, 599, 600, 5, 42, 0, 0, 600, 605, 1, 0, 0, 0, 601, 604, 3, 57, 21, 0, 602, 604, 9, 0, 0, 0, 603, 601, 1, 0, 0, 0, 603, 602, 1, 0, 0, 0, 604, 607, 1, 0, 0, 0, 605, 606, 1, 0, 0, 0, 605, 603, 1, 0, 0, 0, 606, 608, 1, 0, 0, 0, 607, 605, 1, 0, 0, 0, 608, 609, 5, 42, 0, 0, 609, 610, 5, 47, 0, 0, 610, 611, 1, 0, 0, 0, 611, 612, 6, 21, 10, 0, 612, 58, 1, 0, 0, 0, 613, 615, 7, 23, 0, 0, 614, 613, 1, 0, 0, 0, 615, 616, 1, 0, 0, 0, 616, 614, 1, 0, 0, 0, 616, 617, 1, 0, 0, 0, 617, 618, 1, 0, 0, 0, 618, 619, 6, 22, 10, 0, 619, 60, 1, 0, 0, 0, 620, 621, 5, 124, 0, 0, 621, 622, 1, 0, 0, 0, 622, 623, 6, 23, 11, 0, 623, 62, 1, 0, 0, 0, 624, 625, 7, 24, 0, 0, 625, 64, 1, 0, 0, 0, 626, 627, 7, 25, 0, 0, 627, 66, 1, 0, 0, 0, 628, 629, 5, 92, 0, 0, 629, 630, 7, 26, 0, 0, 630, 68, 1, 0, 0, 0, 631, 632, 8, 27, 0, 0, 632, 70, 1, 0, 0, 0, 633, 635, 7, 3, 0, 0, 634, 636, 7, 28, 0, 0, 635, 634, 1, 0, 0, 0, 635, 636, 1, 0, 0, 0, 636, 638, 1, 0, 0, 0, 637, 639, 3, 63, 24, 0, 638, 637, 1, 0, 0, 0, 639, 640, 1, 0, 0, 0, 640, 638, 1, 0, 0, 0, 640, 641, 1, 0, 0, 0, 641, 72, 1, 0, 0, 0, 642, 643, 5, 64, 0, 0, 643, 74, 1, 0, 0, 0, 644, 645, 5, 96, 0, 0, 645, 76, 1, 0, 0, 0, 646, 650, 8, 29, 0, 0, 647, 648, 5, 96, 0, 0, 648, 650, 5, 96, 0, 0, 649, 646, 1, 0, 0, 0, 649, 647, 1, 0, 0, 0, 650, 78, 1, 0, 0, 0, 651, 652, 5, 95, 0, 0, 652, 80, 1, 0, 0, 0, 653, 657, 3, 65, 25, 0, 654, 657, 3, 63, 24, 0, 655, 657, 3, 79, 32, 0, 656, 653, 1, 0, 0, 0, 656, 654, 1, 0, 0, 0, 656, 655, 1, 0, 0, 0, 657, 82, 1, 0, 0, 0, 658, 663, 5, 34, 0, 0, 659, 662, 3, 67, 26, 0, 660, 662, 3, 69, 27, 0, 661, 659, 1, 0, 0, 0, 661, 660, 1, 0, 0, 0, 662, 665, 1, 0, 0, 0, 663, 661, 1, 0, 0, 0, 663, 664, 1, 0, 0, 0, 664, 666, 1, 0, 0, 0, 665, 663, 1, 0, 0, 0, 666, 688, 5, 34, 0, 0, 667, 668, 5, 34, 0, 0, 668, 669, 5, 34, 0, 0, 669, 670, 5, 34, 0, 0, 670, 674, 1, 0, 0, 0, 671, 673, 8, 22, 0, 0, 672, 671, 1, 0, 0, 0, 673, 676, 1, 0, 0, 0, 674, 675, 1, 0, 0, 0, 674, 672, 1, 0, 0, 0, 675, 677, 1, 0, 0, 0, 676, 674, 1, 0, 0, 0, 677, 678, 5, 34, 0, 0, 678, 679, 5, 34, 0, 0, 679, 680, 5, 34, 0, 0, 680, 682, 1, 0, 0, 0, 681, 683, 5, 34, 0, 0, 682, 681, 1, 0, 0, 0, 682, 683, 1, 0, 0, 0, 683, 685, 1, 0, 0, 0, 684, 686, 5, 34, 0, 0, 685, 684, 1, 0, 0, 0, 685, 686, 1, 0, 0, 0, 686, 688, 1, 0, 0, 0, 687, 658, 1, 0, 0, 0, 687, 667, 1, 0, 0, 0, 688, 84, 1, 0, 0, 0, 689, 691, 3, 63, 24, 0, 690, 689, 1, 0, 0, 0, 691, 692, 1, 0, 0, 0, 692, 690, 1, 0, 0, 0, 692, 693, 1, 0, 0, 0, 693, 86, 1, 0, 0, 0, 694, 696, 3, 63, 24, 0, 695, 694, 1, 0, 0, 0, 696, 697, 1, 0, 0, 0, 697, 695, 1, 0, 0, 0, 697, 698, 1, 0, 0, 0, 698, 699, 1, 0, 0, 0, 699, 703, 3, 103, 44, 0, 700, 702, 3, 63, 24, 0, 701, 700, 1, 0, 0, 0, 702, 705, 1, 0, 0, 0, 703, 701, 1, 0, 0, 0, 703, 704, 1, 0, 0, 0, 704, 737, 1, 0, 0, 0, 705, 703, 1, 0, 0, 0, 706, 708, 3, 103, 44, 0, 707, 709, 3, 63, 24, 0, 708, 707, 1, 0, 0, 0, 709, 710, 1, 0, 0, 0, 710, 708, 1, 0, 0, 0, 710, 711, 1, 0, 0, 0, 711, 737, 1, 0, 0, 0, 712, 714, 3, 63, 24, 0, 713, 712, 1, 0, 0, 0, 714, 715, 1, 0, 0, 0, 715, 713, 1, 0, 0, 0, 715, 716, 1, 0, 0, 0, 716, 724, 1, 0, 0, 0, 717, 721, 3, 103, 44, 0, 718, 720, 3, 63, 24, 0, 719, 718, 1, 0, 0, 0, 720, 723, 1, 0, 0, 0, 721, 719, 1, 0, 0, 0, 721, 722, 1, 0, 0, 0, 722, 725, 1, 0, 0, 0, 723, 721, 1, 0, 0, 0, 724, 717, 1, 0, 0, 0, 724, 725, 1, 0, 0, 0, 725, 726, 1, 0, 0, 0, 726, 727, 3, 71, 28, 0, 727, 737, 1, 0, 0, 0, 728, 730, 3, 103, 44, 0, 729, 731, 3, 63, 24, 0, 730, 729, 1, 0, 0, 0, 731, 732, 1, 0, 0, 0, 732, 730, 1, 0, 0, 0, 732, 733, 1, 0, 0, 0, 733, 734, 1, 0, 0, 0, 734, 735, 3, 71, 28, 0, 735, 737, 1, 0, 0, 0, 736, 695, 1, 0, 0, 0, 736, 706, 1, 0, 0, 0, 736, 713, 1, 0, 0, 0, 736, 728, 1, 0, 0, 0, 737, 88, 1, 0, 0, 0, 738, 739, 7, 30, 0, 0, 739, 740, 7, 31, 0, 0, 740, 90, 1, 0, 0, 0, 741, 742, 7, 12, 0, 0, 742, 743, 7, 9, 0, 0, 743, 744, 7, 0, 0, 0, 744, 92, 1, 0, 0, 0, 745, 746, 7, 12, 0, 0, 746, 747, 7, 2, 0, 0, 747, 748, 7, 4, 0, 0, 748, 94, 1, 0, 0, 0, 749, 750, 5, 61, 0, 0, 750, 96, 1, 0, 0, 0, 751, 752, 5, 58, 0, 0, 752, 753, 5, 58, 0, 0, 753, 98, 1, 0, 0, 0, 754, 755, 5, 44, 0, 0, 755, 100, 1, 0, 0, 0, 756, 757, 7, 0, 0, 0, 757, 758, 7, 3, 0, 0, 758, 759, 7, 2, 0, 0, 759, 760, 7, 4, 0, 0, 760, 102, 1, 0, 0, 0, 761, 762, 5, 46, 0, 0, 762, 104, 1, 0, 0, 0, 763, 764, 7, 15, 0, 0, 764, 765, 7, 12, 0, 0, 765, 766, 7, 13, 0, 0, 766, 767, 7, 2, 0, 0, 767, 768, 7, 3, 0, 0, 768, 106, 1, 0, 0, 0, 769, 770, 7, 15, 0, 0, 770, 771, 7, 1, 0, 0, 771, 772, 7, 6, 0, 0, 772, 773, 7, 2, 0, 0, 773, 774, 7, 5, 0, 0, 774, 108, 1, 0, 0, 0, 775, 776, 7, 1, 0, 0, 776, 777, 7, 9, 0, 0, 777, 110, 1, 0, 0, 0, 778, 779, 7, 1, 0, 0, 779, 780, 7, 2, 0, 0, 780, 112, 1, 0, 0, 0, 781, 782, 7, 13, 0, 0, 782, 783, 7, 12, 0, 0, 783, 784, 7, 2, 0, 0, 784, 785, 7, 5, 0, 0, 785, 114, 1, 0, 0, 0, 786, 787, 7, 13, 0, 0, 787, 788, 7, 1, 0, 0, 788, 789, 7, 18, 0, 0, 789, 790, 7, 3, 0, 0, 790, 116, 1, 0, 0, 0, 791, 792, 5, 40, 0, 0, 792, 118, 1, 0, 0, 0, 793, 794, 7, 9, 0, 0, 794, 795, 7, 7, 0, 0, 795, 796, 7, 5, 0, 0, 796, 120, 1, 0, 0, 0, 797, 798, 7, 9, 0, 0, 798, 799, 7, 20, 0, 0, 799, 800, 7, 13, 0, 0, 800, 801, 7, 13, 0, 0, 801, 122, 1, 0, 0, 0, 802, 803, 7, 9, 0, 0, 803, 804, 7, 20, 0, 0, 804, 805, 7, 13, 0, 0, 805, 806, 7, 13, 0, 0, 806, 807, 7, 2, 0, 0, 807, 124, 1, 0, 0, 0, 808, 809, 7, 7, 0, 0, 809, 810, 7, 6, 0, 0, 810, 126, 1, 0, 0, 0, 811, 812, 5, 63, 0, 0, 812, 128, 1, 0, 0, 0, 813, 814, 7, 6, 0, 0, 814, 815, 7, 13, 0, 0, 815, 816, 7, 1, 0, 0, 816, 817, 7, 18, 0, 0, 817, 818, 7, 3, 0, 0, 818, 130, 1, 0, 0, 0, 819, 820, 5, 41, 0, 0, 820, 132, 1, 0, 0, 0, 821, 822, 7, 5, 0, 0, 822, 823, 7, 6, 0, 0, 823, 824, 7, 20, 0, 0, 824, 825, 7, 3, 0, 0, 825, 134, 1, 0, 0, 0, 826, 827, 5, 61, 0, 0, 827, 828, 5, 61, 0, 0, 828, 136, 1, 0, 0, 0, 829, 830, 5, 61, 0, 0, 830, 831, 5, 126, 0, 0, 831, 138, 1, 0, 0, 0, 832, 833, 5, 33, 0, 0, 833, 834, 5, 61, 0, 0, 834, 140, 1, 0, 0, 0, 835, 836, 5, 60, 0, 0, 836, 142, 1, 0, 0, 0, 837, 838, 5, 60, 0, 0, 838, 839, 5, 61, 0, 0, 839, 144, 1, 0, 0, 0, 840, 841, 5, 62, 0, 0, 841, 146, 1, 0, 0, 0, 842, 843, 5, 62, 0, 0, 843, 844, 5, 61, 0, 0, 844, 148, 1, 0, 0, 0, 845, 846, 5, 43, 0, 0, 846, 150, 1, 0, 0, 0, 847, 848, 5, 45, 0, 0, 848, 152, 1, 0, 0, 0, 849, 850, 5, 42, 0, 0, 850, 154, 1, 0, 0, 0, 851, 852, 5, 47, 0, 0, 852, 156, 1, 0, 0, 0, 853, 854, 5, 37, 0, 0, 854, 158, 1, 0, 0, 0, 855, 856, 4, 72, 3, 0, 856, 857, 7, 16, 0, 0, 857, 858, 7, 12, 0, 0, 858, 859, 7, 5, 0, 0, 859, 860, 7, 4, 0, 0, 860, 861, 7, 10, 0, 0, 861, 160, 1, 0, 0, 0, 862, 865, 3, 127, 56, 0, 863, 866, 3, 65, 25, 0, 864, 866, 3, 79, 32, 0, 865, 863, 1, 0, 0, 0, 865, 864, 1, 0, 0, 0, 866, 870, 1, 0, 0, 0, 867, 869, 3, 81, 33, 0, 868, 867, 1, 0, 0, 0, 869, 872, 1, 0, 0, 0, 870, 868, 1, 0, 0, 0, 870, 871, 1, 0, 0, 0, 871, 880, 1, 0, 0, 0, 872, 870, 1, 0, 0, 0, 873, 875, 3, 127, 56, 0, 874, 876, 3, 63, 24, 0, 875, 874, 1, 0, 0, 0, 876, 877, 1, 0, 0, 0, 877, 875, 1, 0, 0, 0, 877, 878, 1, 0, 0, 0, 878, 880, 1, 0, 0, 0, 879, 862, 1, 0, 0, 0, 879, 873, 1, 0, 0, 0, 880, 162, 1, 0, 0, 0, 881, 882, 5, 91, 0, 0, 882, 883, 1, 0, 0, 0, 883, 884, 6, 74, 0, 0, 884, 885, 6, 74, 0, 0, 885, 164, 1, 0, 0, 0, 886, 887, 5, 93, 0, 0, 887, 888, 1, 0, 0, 0, 888, 889, 6, 75, 11, 0, 889, 890, 6, 75, 11, 0, 890, 166, 1, 0, 0, 0, 891, 895, 3, 65, 25, 0, 892, 894, 3, 81, 33, 0, 893, 892, 1, 0, 0, 0, 894, 897, 1, 0, 0, 0, 895, 893, 1, 0, 0, 0, 895, 896, 1, 0, 0, 0, 896, 908, 1, 0, 0, 0, 897, 895, 1, 0, 0, 0, 898, 901, 3, 79, 32, 0, 899, 901, 3, 73, 29, 0, 900, 898, 1, 0, 0, 0, 900, 899, 1, 0, 0, 0, 901, 903, 1, 0, 0, 0, 902, 904, 3, 81, 33, 0, 903, 902, 1, 0, 0, 0, 904, 905, 1, 0, 0, 0, 905, 903, 1, 0, 0, 0, 905, 906, 1, 0, 0, 0, 906, 908, 1, 0, 0, 0, 907, 891, 1, 0, 0, 0, 907, 900, 1, 0, 0, 0, 908, 168, 1, 0, 0, 0, 909, 911, 3, 75, 30, 0, 910, 912, 3, 77, 31, 0, 911, 910, 1, 0, 0, 0, 912, 913, 1, 0, 0, 0, 913, 911, 1, 0, 0, 0, 913, 914, 1, 0, 0, 0, 914, 915, 1, 0, 0, 0, 915, 916, 3, 75, 30, 0, 916, 170, 1, 0, 0, 0, 917, 918, 3, 169, 77, 0, 918, 172, 1, 0, 0, 0, 919, 920, 3, 55, 20, 0, 920, 921, 1, 0, 0, 0, 921, 922, 6, 79, 10, 0, 922, 174, 1, 0, 0, 0, 923, 924, 3, 57, 21, 0, 924, 925, 1, 0, 0, 0, 925, 926, 6, 80, 10, 0, 926, 176, 1, 0, 0, 0, 927, 928, 3, 59, 22, 0, 928, 929, 1, 0, 0, 0, 929, 930, 6, 81, 10, 0, 930, 178, 1, 0, 0, 0, 931, 932, 3, 163, 74, 0, 932, 933, 1, 0, 0, 0, 933, 934, 6, 82, 12, 0, 934, 935, 6, 82, 13, 0, 935, 180, 1, 0, 0, 0, 936, 937, 3, 61, 23, 0, 937, 938, 1, 0, 0, 0, 938, 939, 6, 83, 14, 0, 939, 940, 6, 83, 11, 0, 940, 182, 1, 0, 0, 0, 941, 942, 3, 59, 22, 0, 942, 943, 1, 0, 0, 0, 943, 944, 6, 84, 10, 0, 944, 184, 1, 0, 0, 0, 945, 946, 3, 55, 20, 0, 946, 947, 1, 0, 0, 0, 947, 948, 6, 85, 10, 0, 948, 186, 1, 0, 0, 0, 949, 950, 3, 57, 21, 0, 950, 951, 1, 0, 0, 0, 951, 952, 6, 86, 10, 0, 952, 188, 1, 0, 0, 0, 953, 954, 3, 61, 23, 0, 954, 955, 1, 0, 0, 0, 955, 956, 6, 87, 14, 0, 956, 957, 6, 87, 11, 0, 957, 190, 1, 0, 0, 0, 958, 959, 3, 163, 74, 0, 959, 960, 1, 0, 0, 0, 960, 961, 6, 88, 12, 0, 961, 192, 1, 0, 0, 0, 962, 963, 3, 165, 75, 0, 963, 964, 1, 0, 0, 0, 964, 965, 6, 89, 15, 0, 965, 194, 1, 0, 0, 0, 966, 967, 3, 335, 160, 0, 967, 968, 1, 0, 0, 0, 968, 969, 6, 90, 16, 0, 969, 196, 1, 0, 0, 0, 970, 971, 3, 99, 42, 0, 971, 972, 1, 0, 0, 0, 972, 973, 6, 91, 17, 0, 973, 198, 1, 0, 0, 0, 974, 975, 3, 95, 40, 0, 975, 976, 1, 0, 0, 0, 976, 977, 6, 92, 18, 0, 977, 200, 1, 0, 0, 0, 978, 979, 7, 16, 0, 0, 979, 980, 7, 3, 0, 0, 980, 981, 7, 5, 0, 0, 981, 982, 7, 12, 0, 0, 982, 983, 7, 0, 0, 0, 983, 984, 7, 12, 0, 0, 984, 985, 7, 5, 0, 0, 985, 986, 7, 12, 0, 0, 986, 202, 1, 0, 0, 0, 987, 991, 8, 32, 0, 0, 988, 989, 5, 47, 0, 0, 989, 991, 8, 33, 0, 0, 990, 987, 1, 0, 0, 0, 990, 988, 1, 0, 0, 0, 991, 204, 1, 0, 0, 0, 992, 994, 3, 203, 94, 0, 993, 992, 1, 0, 0, 0, 994, 995, 1, 0, 0, 0, 995, 993, 1, 0, 0, 0, 995, 996, 1, 0, 0, 0, 996, 206, 1, 0, 0, 0, 997, 998, 3, 205, 95, 0, 998, 999, 1, 0, 0, 0, 999, 1000, 6, 96, 19, 0, 1000, 208, 1, 0, 0, 0, 1001, 1002, 3, 83, 34, 0, 1002, 1003, 1, 0, 0, 0, 1003, 1004, 6, 97, 20, 0, 1004, 210, 1, 0, 0, 0, 1005, 1006, 3, 55, 20, 0, 1006, 1007, 1, 0, 0, 0, 1007, 1008, 6, 98, 10, 0, 1008, 212, 1, 0, 0, 0, 1009, 1010, 3, 57, 21, 0, 1010, 1011, 1, 0, 0, 0, 1011, 1012, 6, 99, 10, 0, 1012, 214, 1, 0, 0, 0, 1013, 1014, 3, 59, 22, 0, 1014, 1015, 1, 0, 0, 0, 1015, 1016, 6, 100, 10, 0, 1016, 216, 1, 0, 0, 0, 1017, 1018, 3, 61, 23, 0, 1018, 1019, 1, 0, 0, 0, 1019, 1020, 6, 101, 14, 0, 1020, 1021, 6, 101, 11, 0, 1021, 218, 1, 0, 0, 0, 1022, 1023, 3, 103, 44, 0, 1023, 1024, 1, 0, 0, 0, 1024, 1025, 6, 102, 21, 0, 1025, 220, 1, 0, 0, 0, 1026, 1027, 3, 99, 42, 0, 1027, 1028, 1, 0, 0, 0, 1028, 1029, 6, 103, 17, 0, 1029, 222, 1, 0, 0, 0, 1030, 1031, 3, 127, 56, 0, 1031, 1032, 1, 0, 0, 0, 1032, 1033, 6, 104, 22, 0, 1033, 224, 1, 0, 0, 0, 1034, 1035, 3, 161, 73, 0, 1035, 1036, 1, 0, 0, 0, 1036, 1037, 6, 105, 23, 0, 1037, 226, 1, 0, 0, 0, 1038, 1043, 3, 65, 25, 0, 1039, 1043, 3, 63, 24, 0, 1040, 1043, 3, 79, 32, 0, 1041, 1043, 3, 153, 69, 0, 1042, 1038, 1, 0, 0, 0, 1042, 1039, 1, 0, 0, 0, 1042, 1040, 1, 0, 0, 0, 1042, 1041, 1, 0, 0, 0, 1043, 228, 1, 0, 0, 0, 1044, 1047, 3, 65, 25, 0, 1045, 1047, 3, 153, 69, 0, 1046, 1044, 1, 0, 0, 0, 1046, 1045, 1, 0, 0, 0, 1047, 1051, 1, 0, 0, 0, 1048, 1050, 3, 227, 106, 0, 1049, 1048, 1, 0, 0, 0, 1050, 1053, 1, 0, 0, 0, 1051, 1049, 1, 0, 0, 0, 1051, 1052, 1, 0, 0, 0, 1052, 1064, 1, 0, 0, 0, 1053, 1051, 1, 0, 0, 0, 1054, 1057, 3, 79, 32, 0, 1055, 1057, 3, 73, 29, 0, 1056, 1054, 1, 0, 0, 0, 1056, 1055, 1, 0, 0, 0, 1057, 1059, 1, 0, 0, 0, 1058, 1060, 3, 227, 106, 0, 1059, 1058, 1, 0, 0, 0, 1060, 1061, 1, 0, 0, 0, 1061, 1059, 1, 0, 0, 0, 1061, 1062, 1, 0, 0, 0, 1062, 1064, 1, 0, 0, 0, 1063, 1046, 1, 0, 0, 0, 1063, 1056, 1, 0, 0, 0, 1064, 230, 1, 0, 0, 0, 1065, 1068, 3, 229, 107, 0, 1066, 1068, 3, 169, 77, 0, 1067, 1065, 1, 0, 0, 0, 1067, 1066, 1, 0, 0, 0, 1068, 1069, 1, 0, 0, 0, 1069, 1067, 1, 0, 0, 0, 1069, 1070, 1, 0, 0, 0, 1070, 232, 1, 0, 0, 0, 1071, 1072, 3, 55, 20, 0, 1072, 1073, 1, 0, 0, 0, 1073, 1074, 6, 109, 10, 0, 1074, 234, 1, 0, 0, 0, 1075, 1076, 3, 57, 21, 0, 1076, 1077, 1, 0, 0, 0, 1077, 1078, 6, 110, 10, 0, 1078, 236, 1, 0, 0, 0, 1079, 1080, 3, 59, 22, 0, 1080, 1081, 1, 0, 0, 0, 1081, 1082, 6, 111, 10, 0, 1082, 238, 1, 0, 0, 0, 1083, 1084, 3, 61, 23, 0, 1084, 1085, 1, 0, 0, 0, 1085, 1086, 6, 112, 14, 0, 1086, 1087, 6, 112, 11, 0, 1087, 240, 1, 0, 0, 0, 1088, 1089, 3, 95, 40, 0, 1089, 1090, 1, 0, 0, 0, 1090, 1091, 6, 113, 18, 0, 1091, 242, 1, 0, 0, 0, 1092, 1093, 3, 99, 42, 0, 1093, 1094, 1, 0, 0, 0, 1094, 1095, 6, 114, 17, 0, 1095, 244, 1, 0, 0, 0, 1096, 1097, 3, 103, 44, 0, 1097, 1098, 1, 0, 0, 0, 1098, 1099, 6, 115, 21, 0, 1099, 246, 1, 0, 0, 0, 1100, 1101, 3, 127, 56, 0, 1101, 1102, 1, 0, 0, 0, 1102, 1103, 6, 116, 22, 0, 1103, 248, 1, 0, 0, 0, 1104, 1105, 3, 161, 73, 0, 1105, 1106, 1, 0, 0, 0, 1106, 1107, 6, 117, 23, 0, 1107, 250, 1, 0, 0, 0, 1108, 1109, 7, 12, 0, 0, 1109, 1110, 7, 2, 0, 0, 1110, 252, 1, 0, 0, 0, 1111, 1112, 3, 231, 108, 0, 1112, 1113, 1, 0, 0, 0, 1113, 1114, 6, 119, 24, 0, 1114, 254, 1, 0, 0, 0, 1115, 1116, 3, 55, 20, 0, 1116, 1117, 1, 0, 0, 0, 1117, 1118, 6, 120, 10, 0, 1118, 256, 1, 0, 0, 0, 1119, 1120, 3, 57, 21, 0, 1120, 1121, 1, 0, 0, 0, 1121, 1122, 6, 121, 10, 0, 1122, 258, 1, 0, 0, 0, 1123, 1124, 3, 59, 22, 0, 1124, 1125, 1, 0, 0, 0, 1125, 1126, 6, 122, 10, 0, 1126, 260, 1, 0, 0, 0, 1127, 1128, 3, 61, 23, 0, 1128, 1129, 1, 0, 0, 0, 1129, 1130, 6, 123, 14, 0, 1130, 1131, 6, 123, 11, 0, 1131, 262, 1, 0, 0, 0, 1132, 1133, 3, 163, 74, 0, 1133, 1134, 1, 0, 0, 0, 1134, 1135, 6, 124, 12, 0, 1135, 1136, 6, 124, 25, 0, 1136, 264, 1, 0, 0, 0, 1137, 1138, 7, 7, 0, 0, 1138, 1139, 7, 9, 0, 0, 1139, 1140, 1, 0, 0, 0, 1140, 1141, 6, 125, 26, 0, 1141, 266, 1, 0, 0, 0, 1142, 1143, 7, 19, 0, 0, 1143, 1144, 7, 1, 0, 0, 1144, 1145, 7, 5, 0, 0, 1145, 1146, 7, 10, 0, 0, 1146, 1147, 1, 0, 0, 0, 1147, 1148, 6, 126, 26, 0, 1148, 268, 1, 0, 0, 0, 1149, 1150, 8, 34, 0, 0, 1150, 270, 1, 0, 0, 0, 1151, 1153, 3, 269, 127, 0, 1152, 1151, 1, 0, 0, 0, 1153, 1154, 1, 0, 0, 0, 1154, 1152, 1, 0, 0, 0, 1154, 1155, 1, 0, 0, 0, 1155, 1156, 1, 0, 0, 0, 1156, 1157, 3, 335, 160, 0, 1157, 1159, 1, 0, 0, 0, 1158, 1152, 1, 0, 0, 0, 1158, 1159, 1, 0, 0, 0, 1159, 1161, 1, 0, 0, 0, 1160, 1162, 3, 269, 127, 0, 1161, 1160, 1, 0, 0, 0, 1162, 1163, 1, 0, 0, 0, 1163, 1161, 1, 0, 0, 0, 1163, 1164, 1, 0, 0, 0, 1164, 272, 1, 0, 0, 0, 1165, 1166, 3, 271, 128, 0, 1166, 1167, 1, 0, 0, 0, 1167, 1168, 6, 129, 27, 0, 1168, 274, 1, 0, 0, 0, 1169, 1170, 3, 55, 20, 0, 1170, 1171, 1, 0, 0, 0, 1171, 1172, 6, 130, 10, 0, 1172, 276, 1, 0, 0, 0, 1173, 1174, 3, 57, 21, 0, 1174, 1175, 1, 0, 0, 0, 1175, 1176, 6, 131, 10, 0, 1176, 278, 1, 0, 0, 0, 1177, 1178, 3, 59, 22, 0, 1178, 1179, 1, 0, 0, 0, 1179, 1180, 6, 132, 10, 0, 1180, 280, 1, 0, 0, 0, 1181, 1182, 3, 61, 23, 0, 1182, 1183, 1, 0, 0, 0, 1183, 1184, 6, 133, 14, 0, 1184, 1185, 6, 133, 11, 0, 1185, 1186, 6, 133, 11, 0, 1186, 282, 1, 0, 0, 0, 1187, 1188, 3, 95, 40, 0, 1188, 1189, 1, 0, 0, 0, 1189, 1190, 6, 134, 18, 0, 1190, 284, 1, 0, 0, 0, 1191, 1192, 3, 99, 42, 0, 1192, 1193, 1, 0, 0, 0, 1193, 1194, 6, 135, 17, 0, 1194, 286, 1, 0, 0, 0, 1195, 1196, 3, 103, 44, 0, 1196, 1197, 1, 0, 0, 0, 1197, 1198, 6, 136, 21, 0, 1198, 288, 1, 0, 0, 0, 1199, 1200, 3, 267, 126, 0, 1200, 1201, 1, 0, 0, 0, 1201, 1202, 6, 137, 28, 0, 1202, 290, 1, 0, 0, 0, 1203, 1204, 3, 231, 108, 0, 1204, 1205, 1, 0, 0, 0, 1205, 1206, 6, 138, 24, 0, 1206, 292, 1, 0, 0, 0, 1207, 1208, 3, 171, 78, 0, 1208, 1209, 1, 0, 0, 0, 1209, 1210, 6, 139, 29, 0, 1210, 294, 1, 0, 0, 0, 1211, 1212, 3, 127, 56, 0, 1212, 1213, 1, 0, 0, 0, 1213, 1214, 6, 140, 22, 0, 1214, 296, 1, 0, 0, 0, 1215, 1216, 3, 161, 73, 0, 1216, 1217, 1, 0, 0, 0, 1217, 1218, 6, 141, 23, 0, 1218, 298, 1, 0, 0, 0, 1219, 1220, 3, 55, 20, 0, 1220, 1221, 1, 0, 0, 0, 1221, 1222, 6, 142, 10, 0, 1222, 300, 1, 0, 0, 0, 1223, 1224, 3, 57, 21, 0, 1224, 1225, 1, 0, 0, 0, 1225, 1226, 6, 143, 10, 0, 1226, 302, 1, 0, 0, 0, 1227, 1228, 3, 59, 22, 0, 1228, 1229, 1, 0, 0, 0, 1229, 1230, 6, 144, 10, 0, 1230, 304, 1, 0, 0, 0, 1231, 1232, 3, 61, 23, 0, 1232, 1233, 1, 0, 0, 0, 1233, 1234, 6, 145, 14, 0, 1234, 1235, 6, 145, 11, 0, 1235, 306, 1, 0, 0, 0, 1236, 1237, 3, 103, 44, 0, 1237, 1238, 1, 0, 0, 0, 1238, 1239, 6, 146, 21, 0, 1239, 308, 1, 0, 0, 0, 1240, 1241, 3, 127, 56, 0, 1241, 1242, 1, 0, 0, 0, 1242, 1243, 6, 147, 22, 0, 1243, 310, 1, 0, 0, 0, 1244, 1245, 3, 161, 73, 0, 1245, 1246, 1, 0, 0, 0, 1246, 1247, 6, 148, 23, 0, 1247, 312, 1, 0, 0, 0, 1248, 1249, 3, 171, 78, 0, 1249, 1250, 1, 0, 0, 0, 1250, 1251, 6, 149, 29, 0, 1251, 314, 1, 0, 0, 0, 1252, 1253, 3, 167, 76, 0, 1253, 1254, 1, 0, 0, 0, 1254, 1255, 6, 150, 30, 0, 1255, 316, 1, 0, 0, 0, 1256, 1257, 3, 55, 20, 0, 1257, 1258, 1, 0, 0, 0, 1258, 1259, 6, 151, 10, 0, 1259, 318, 1, 0, 0, 0, 1260, 1261, 3, 57, 21, 0, 1261, 1262, 1, 0, 0, 0, 1262, 1263, 6, 152, 10, 0, 1263, 320, 1, 0, 0, 0, 1264, 1265, 3, 59, 22, 0, 1265, 1266, 1, 0, 0, 0, 1266, 1267, 6, 153, 10, 0, 1267, 322, 1, 0, 0, 0, 1268, 1269, 3, 61, 23, 0, 1269, 1270, 1, 0, 0, 0, 1270, 1271, 6, 154, 14, 0, 1271, 1272, 6, 154, 11, 0, 1272, 324, 1, 0, 0, 0, 1273, 1274, 7, 1, 0, 0, 1274, 1275, 7, 9, 0, 0, 1275, 1276, 7, 15, 0, 0, 1276, 1277, 7, 7, 0, 0, 1277, 326, 1, 0, 0, 0, 1278, 1279, 3, 55, 20, 0, 1279, 1280, 1, 0, 0, 0, 1280, 1281, 6, 156, 10, 0, 1281, 328, 1, 0, 0, 0, 1282, 1283, 3, 57, 21, 0, 1283, 1284, 1, 0, 0, 0, 1284, 1285, 6, 157, 10, 0, 1285, 330, 1, 0, 0, 0, 1286, 1287, 3, 59, 22, 0, 1287, 1288, 1, 0, 0, 0, 1288, 1289, 6, 158, 10, 0, 1289, 332, 1, 0, 0, 0, 1290, 1291, 3, 165, 75, 0, 1291, 1292, 1, 0, 0, 0, 1292, 1293, 6, 159, 15, 0, 1293, 1294, 6, 159, 11, 0, 1294, 334, 1, 0, 0, 0, 1295, 1296, 5, 58, 0, 0, 1296, 336, 1, 0, 0, 0, 1297, 1303, 3, 73, 29, 0, 1298, 1303, 3, 63, 24, 0, 1299, 1303, 3, 103, 44, 0, 1300, 1303, 3, 65, 25, 0, 1301, 1303, 3, 79, 32, 0, 1302, 1297, 1, 0, 0, 0, 1302, 1298, 1, 0, 0, 0, 1302, 1299, 1, 0, 0, 0, 1302, 1300, 1, 0, 0, 0, 1302, 1301, 1, 0, 0, 0, 1303, 1304, 1, 0, 0, 0, 1304, 1302, 1, 0, 0, 0, 1304, 1305, 1, 0, 0, 0, 1305, 338, 1, 0, 0, 0, 1306, 1307, 3, 55, 20, 0, 1307, 1308, 1, 0, 0, 0, 1308, 1309, 6, 162, 10, 0, 1309, 340, 1, 0, 0, 0, 1310, 1311, 3, 57, 21, 0, 1311, 1312, 1, 0, 0, 0, 1312, 1313, 6, 163, 10, 0, 1313, 342, 1, 0, 0, 0, 1314, 1315, 3, 59, 22, 0, 1315, 1316, 1, 0, 0, 0, 1316, 1317, 6, 164, 10, 0, 1317, 344, 1, 0, 0, 0, 1318, 1319, 3, 61, 23, 0, 1319, 1320, 1, 0, 0, 0, 1320, 1321, 6, 165, 14, 0, 1321, 1322, 6, 165, 11, 0, 1322, 346, 1, 0, 0, 0, 1323, 1324, 3, 335, 160, 0, 1324, 1325, 1, 0, 0, 0, 1325, 1326, 6, 166, 16, 0, 1326, 348, 1, 0, 0, 0, 1327, 1328, 3, 99, 42, 0, 1328, 1329, 1, 0, 0, 0, 1329, 1330, 6, 167, 17, 0, 1330, 350, 1, 0, 0, 0, 1331, 1332, 3, 103, 44, 0, 1332, 1333, 1, 0, 0, 0, 1333, 1334, 6, 168, 21, 0, 1334, 352, 1, 0, 0, 0, 1335, 1336, 3, 265, 125, 0, 1336, 1337, 1, 0, 0, 0, 1337, 1338, 6, 169, 31, 0, 1338, 1339, 6, 169, 32, 0, 1339, 354, 1, 0, 0, 0, 1340, 1341, 3, 205, 95, 0, 1341, 1342, 1, 0, 0, 0, 1342, 1343, 6, 170, 19, 0, 1343, 356, 1, 0, 0, 0, 1344, 1345, 3, 83, 34, 0, 1345, 1346, 1, 0, 0, 0, 1346, 1347, 6, 171, 20, 0, 1347, 358, 1, 0, 0, 0, 1348, 1349, 3, 55, 20, 0, 1349, 1350, 1, 0, 0, 0, 1350, 1351, 6, 172, 10, 0, 1351, 360, 1, 0, 0, 0, 1352, 1353, 3, 57, 21, 0, 1353, 1354, 1, 0, 0, 0, 1354, 1355, 6, 173, 10, 0, 1355, 362, 1, 0, 0, 0, 1356, 1357, 3, 59, 22, 0, 1357, 1358, 1, 0, 0, 0, 1358, 1359, 6, 174, 10, 0, 1359, 364, 1, 0, 0, 0, 1360, 1361, 3, 61, 23, 0, 1361, 1362, 1, 0, 0, 0, 1362, 1363, 6, 175, 14, 0, 1363, 1364, 6, 175, 11, 0, 1364, 1365, 6, 175, 11, 0, 1365, 366, 1, 0, 0, 0, 1366, 1367, 3, 99, 42, 0, 1367, 1368, 1, 0, 0, 0, 1368, 1369, 6, 176, 17, 0, 1369, 368, 1, 0, 0, 0, 1370, 1371, 3, 103, 44, 0, 1371, 1372, 1, 0, 0, 0, 1372, 1373, 6, 177, 21, 0, 1373, 370, 1, 0, 0, 0, 1374, 1375, 3, 231, 108, 0, 1375, 1376, 1, 0, 0, 0, 1376, 1377, 6, 178, 24, 0, 1377, 372, 1, 0, 0, 0, 1378, 1379, 3, 55, 20, 0, 1379, 1380, 1, 0, 0, 0, 1380, 1381, 6, 179, 10, 0, 1381, 374, 1, 0, 0, 0, 1382, 1383, 3, 57, 21, 0, 1383, 1384, 1, 0, 0, 0, 1384, 1385, 6, 180, 10, 0, 1385, 376, 1, 0, 0, 0, 1386, 1387, 3, 59, 22, 0, 1387, 1388, 1, 0, 0, 0, 1388, 1389, 6, 181, 10, 0, 1389, 378, 1, 0, 0, 0, 1390, 1391, 3, 61, 23, 0, 1391, 1392, 1, 0, 0, 0, 1392, 1393, 6, 182, 14, 0, 1393, 1394, 6, 182, 11, 0, 1394, 380, 1, 0, 0, 0, 1395, 1396, 3, 205, 95, 0, 1396, 1397, 1, 0, 0, 0, 1397, 1398, 6, 183, 19, 0, 1398, 1399, 6, 183, 11, 0, 1399, 1400, 6, 183, 33, 0, 1400, 382, 1, 0, 0, 0, 1401, 1402, 3, 83, 34, 0, 1402, 1403, 1, 0, 0, 0, 1403, 1404, 6, 184, 20, 0, 1404, 1405, 6, 184, 11, 0, 1405, 1406, 6, 184, 33, 0, 1406, 384, 1, 0, 0, 0, 1407, 1408, 3, 55, 20, 0, 1408, 1409, 1, 0, 0, 0, 1409, 1410, 6, 185, 10, 0, 1410, 386, 1, 0, 0, 0, 1411, 1412, 3, 57, 21, 0, 1412, 1413, 1, 0, 0, 0, 1413, 1414, 6, 186, 10, 0, 1414, 388, 1, 0, 0, 0, 1415, 1416, 3, 59, 22, 0, 1416, 1417, 1, 0, 0, 0, 1417, 1418, 6, 187, 10, 0, 1418, 390, 1, 0, 0, 0, 1419, 1420, 3, 335, 160, 0, 1420, 1421, 1, 0, 0, 0, 1421, 1422, 6, 188, 16, 0, 1422, 1423, 6, 188, 11, 0, 1423, 1424, 6, 188, 9, 0, 1424, 392, 1, 0, 0, 0, 1425, 1426, 3, 99, 42, 0, 1426, 1427, 1, 0, 0, 0, 1427, 1428, 6, 189, 17, 0, 1428, 1429, 6, 189, 11, 0, 1429, 1430, 6, 189, 9, 0, 1430, 394, 1, 0, 0, 0, 1431, 1432, 3, 55, 20, 0, 1432, 1433, 1, 0, 0, 0, 1433, 1434, 6, 190, 10, 0, 1434, 396, 1, 0, 0, 0, 1435, 1436, 3, 57, 21, 0, 1436, 1437, 1, 0, 0, 0, 1437, 1438, 6, 191, 10, 0, 1438, 398, 1, 0, 0, 0, 1439, 1440, 3, 59, 22, 0, 1440, 1441, 1, 0, 0, 0, 1441, 1442, 6, 192, 10, 0, 1442, 400, 1, 0, 0, 0, 1443, 1444, 3, 171, 78, 0, 1444, 1445, 1, 0, 0, 0, 1445, 1446, 6, 193, 11, 0, 1446, 1447, 6, 193, 0, 0, 1447, 1448, 6, 193, 29, 0, 1448, 402, 1, 0, 0, 0, 1449, 1450, 3, 167, 76, 0, 1450, 1451, 1, 0, 0, 0, 1451, 1452, 6, 194, 11, 0, 1452, 1453, 6, 194, 0, 0, 1453, 1454, 6, 194, 30, 0, 1454, 404, 1, 0, 0, 0, 1455, 1456, 3, 89, 37, 0, 1456, 1457, 1, 0, 0, 0, 1457, 1458, 6, 195, 11, 0, 1458, 1459, 6, 195, 0, 0, 1459, 1460, 6, 195, 34, 0, 1460, 406, 1, 0, 0, 0, 1461, 1462, 3, 61, 23, 0, 1462, 1463, 1, 0, 0, 0, 1463, 1464, 6, 196, 14, 0, 1464, 1465, 6, 196, 11, 0, 1465, 408, 1, 0, 0, 0, 65, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 577, 587, 591, 594, 603, 605, 616, 635, 640, 649, 656, 661, 663, 674, 682, 685, 687, 692, 697, 703, 710, 715, 721, 724, 732, 736, 865, 870, 877, 879, 895, 900, 905, 907, 913, 990, 995, 1042, 1046, 1051, 1056, 1061, 1063, 1067, 1069, 1154, 1158, 1163, 1302, 1304, 35, 5, 1, 0, 5, 4, 0, 5, 6, 0, 5, 2, 0, 5, 3, 0, 5, 8, 0, 5, 5, 0, 5, 9, 0, 5, 11, 0, 5, 13, 0, 0, 1, 0, 4, 0, 0, 7, 65, 0, 5, 0, 0, 7, 24, 0, 7, 66, 0, 7, 104, 0, 7, 33, 0, 7, 31, 0, 7, 76, 0, 7, 25, 0, 7, 35, 0, 7, 47, 0, 7, 64, 0, 7, 80, 0, 5, 10, 0, 5, 7, 0, 7, 90, 0, 7, 89, 0, 7, 68, 0, 7, 67, 0, 7, 88, 0, 5, 12, 0, 5, 14, 0, 7, 28, 0] \ No newline at end of file +[4, 0, 120, 1465, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 2, 154, 7, 154, 2, 155, 7, 155, 2, 156, 7, 156, 2, 157, 7, 157, 2, 158, 7, 158, 2, 159, 7, 159, 2, 160, 7, 160, 2, 161, 7, 161, 2, 162, 7, 162, 2, 163, 7, 163, 2, 164, 7, 164, 2, 165, 7, 165, 2, 166, 7, 166, 2, 167, 7, 167, 2, 168, 7, 168, 2, 169, 7, 169, 2, 170, 7, 170, 2, 171, 7, 171, 2, 172, 7, 172, 2, 173, 7, 173, 2, 174, 7, 174, 2, 175, 7, 175, 2, 176, 7, 176, 2, 177, 7, 177, 2, 178, 7, 178, 2, 179, 7, 179, 2, 180, 7, 180, 2, 181, 7, 181, 2, 182, 7, 182, 2, 183, 7, 183, 2, 184, 7, 184, 2, 185, 7, 185, 2, 186, 7, 186, 2, 187, 7, 187, 2, 188, 7, 188, 2, 189, 7, 189, 2, 190, 7, 190, 2, 191, 7, 191, 2, 192, 7, 192, 2, 193, 7, 193, 2, 194, 7, 194, 2, 195, 7, 195, 2, 196, 7, 196, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 4, 19, 576, 8, 19, 11, 19, 12, 19, 577, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 5, 20, 586, 8, 20, 10, 20, 12, 20, 589, 9, 20, 1, 20, 3, 20, 592, 8, 20, 1, 20, 3, 20, 595, 8, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 5, 21, 604, 8, 21, 10, 21, 12, 21, 607, 9, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 4, 22, 615, 8, 22, 11, 22, 12, 22, 616, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 28, 1, 28, 3, 28, 636, 8, 28, 1, 28, 4, 28, 639, 8, 28, 11, 28, 12, 28, 640, 1, 29, 1, 29, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 3, 31, 650, 8, 31, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 3, 33, 657, 8, 33, 1, 34, 1, 34, 1, 34, 5, 34, 662, 8, 34, 10, 34, 12, 34, 665, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 673, 8, 34, 10, 34, 12, 34, 676, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 3, 34, 683, 8, 34, 1, 34, 3, 34, 686, 8, 34, 3, 34, 688, 8, 34, 1, 35, 4, 35, 691, 8, 35, 11, 35, 12, 35, 692, 1, 36, 4, 36, 696, 8, 36, 11, 36, 12, 36, 697, 1, 36, 1, 36, 5, 36, 702, 8, 36, 10, 36, 12, 36, 705, 9, 36, 1, 36, 1, 36, 4, 36, 709, 8, 36, 11, 36, 12, 36, 710, 1, 36, 4, 36, 714, 8, 36, 11, 36, 12, 36, 715, 1, 36, 1, 36, 5, 36, 720, 8, 36, 10, 36, 12, 36, 723, 9, 36, 3, 36, 725, 8, 36, 1, 36, 1, 36, 1, 36, 1, 36, 4, 36, 731, 8, 36, 11, 36, 12, 36, 732, 1, 36, 1, 36, 3, 36, 737, 8, 36, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 1, 39, 1, 39, 1, 39, 1, 39, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 1, 64, 1, 64, 1, 64, 1, 65, 1, 65, 1, 66, 1, 66, 1, 66, 1, 67, 1, 67, 1, 68, 1, 68, 1, 69, 1, 69, 1, 70, 1, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 72, 1, 72, 1, 72, 1, 72, 1, 73, 1, 73, 1, 73, 3, 73, 865, 8, 73, 1, 73, 5, 73, 868, 8, 73, 10, 73, 12, 73, 871, 9, 73, 1, 73, 1, 73, 4, 73, 875, 8, 73, 11, 73, 12, 73, 876, 3, 73, 879, 8, 73, 1, 74, 1, 74, 1, 74, 1, 74, 1, 74, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 76, 1, 76, 5, 76, 893, 8, 76, 10, 76, 12, 76, 896, 9, 76, 1, 76, 1, 76, 3, 76, 900, 8, 76, 1, 76, 4, 76, 903, 8, 76, 11, 76, 12, 76, 904, 3, 76, 907, 8, 76, 1, 77, 1, 77, 4, 77, 911, 8, 77, 11, 77, 12, 77, 912, 1, 77, 1, 77, 1, 78, 1, 78, 1, 79, 1, 79, 1, 79, 1, 79, 1, 80, 1, 80, 1, 80, 1, 80, 1, 81, 1, 81, 1, 81, 1, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 1, 83, 1, 83, 1, 83, 1, 84, 1, 84, 1, 84, 1, 84, 1, 85, 1, 85, 1, 85, 1, 85, 1, 86, 1, 86, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 1, 88, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 1, 92, 1, 93, 1, 93, 1, 93, 1, 93, 1, 93, 1, 93, 1, 93, 1, 93, 1, 93, 1, 94, 1, 94, 1, 94, 3, 94, 990, 8, 94, 1, 95, 4, 95, 993, 8, 95, 11, 95, 12, 95, 994, 1, 96, 1, 96, 1, 96, 1, 96, 1, 97, 1, 97, 1, 97, 1, 97, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 1, 100, 1, 101, 1, 101, 1, 101, 1, 101, 1, 101, 1, 102, 1, 102, 1, 102, 1, 102, 1, 103, 1, 103, 1, 103, 1, 103, 1, 104, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 106, 3, 106, 1042, 8, 106, 1, 107, 1, 107, 3, 107, 1046, 8, 107, 1, 107, 5, 107, 1049, 8, 107, 10, 107, 12, 107, 1052, 9, 107, 1, 107, 1, 107, 3, 107, 1056, 8, 107, 1, 107, 4, 107, 1059, 8, 107, 11, 107, 12, 107, 1060, 3, 107, 1063, 8, 107, 1, 108, 1, 108, 4, 108, 1067, 8, 108, 11, 108, 12, 108, 1068, 1, 109, 1, 109, 1, 109, 1, 109, 1, 110, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 1, 114, 1, 114, 1, 114, 1, 114, 1, 115, 1, 115, 1, 115, 1, 115, 1, 116, 1, 116, 1, 116, 1, 116, 1, 117, 1, 117, 1, 117, 1, 117, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 120, 1, 120, 1, 121, 1, 121, 1, 121, 1, 121, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 126, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 128, 4, 128, 1152, 8, 128, 11, 128, 12, 128, 1153, 1, 128, 1, 128, 3, 128, 1158, 8, 128, 1, 128, 4, 128, 1161, 8, 128, 11, 128, 12, 128, 1162, 1, 129, 1, 129, 1, 129, 1, 129, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 133, 1, 133, 1, 133, 1, 133, 1, 134, 1, 134, 1, 134, 1, 134, 1, 135, 1, 135, 1, 135, 1, 135, 1, 136, 1, 136, 1, 136, 1, 136, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 152, 1, 152, 1, 153, 1, 153, 1, 153, 1, 153, 1, 154, 1, 154, 1, 154, 1, 154, 1, 154, 1, 155, 1, 155, 1, 155, 1, 155, 1, 155, 1, 156, 1, 156, 1, 156, 1, 156, 1, 157, 1, 157, 1, 157, 1, 157, 1, 158, 1, 158, 1, 158, 1, 158, 1, 159, 1, 159, 1, 159, 1, 159, 1, 159, 1, 160, 1, 160, 1, 161, 1, 161, 1, 161, 1, 161, 1, 161, 4, 161, 1302, 8, 161, 11, 161, 12, 161, 1303, 1, 162, 1, 162, 1, 162, 1, 162, 1, 163, 1, 163, 1, 163, 1, 163, 1, 164, 1, 164, 1, 164, 1, 164, 1, 165, 1, 165, 1, 165, 1, 165, 1, 165, 1, 166, 1, 166, 1, 166, 1, 166, 1, 167, 1, 167, 1, 167, 1, 167, 1, 168, 1, 168, 1, 168, 1, 168, 1, 169, 1, 169, 1, 169, 1, 169, 1, 169, 1, 170, 1, 170, 1, 170, 1, 170, 1, 171, 1, 171, 1, 171, 1, 171, 1, 172, 1, 172, 1, 172, 1, 172, 1, 173, 1, 173, 1, 173, 1, 173, 1, 174, 1, 174, 1, 174, 1, 174, 1, 175, 1, 175, 1, 175, 1, 175, 1, 175, 1, 175, 1, 176, 1, 176, 1, 176, 1, 176, 1, 177, 1, 177, 1, 177, 1, 177, 1, 178, 1, 178, 1, 178, 1, 178, 1, 179, 1, 179, 1, 179, 1, 179, 1, 180, 1, 180, 1, 180, 1, 180, 1, 181, 1, 181, 1, 181, 1, 181, 1, 182, 1, 182, 1, 182, 1, 182, 1, 182, 1, 183, 1, 183, 1, 183, 1, 183, 1, 183, 1, 183, 1, 184, 1, 184, 1, 184, 1, 184, 1, 184, 1, 184, 1, 185, 1, 185, 1, 185, 1, 185, 1, 186, 1, 186, 1, 186, 1, 186, 1, 187, 1, 187, 1, 187, 1, 187, 1, 188, 1, 188, 1, 188, 1, 188, 1, 188, 1, 188, 1, 189, 1, 189, 1, 189, 1, 189, 1, 189, 1, 189, 1, 190, 1, 190, 1, 190, 1, 190, 1, 191, 1, 191, 1, 191, 1, 191, 1, 192, 1, 192, 1, 192, 1, 192, 1, 193, 1, 193, 1, 193, 1, 193, 1, 193, 1, 193, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 195, 1, 195, 1, 195, 1, 195, 1, 195, 1, 195, 1, 196, 1, 196, 1, 196, 1, 196, 1, 196, 2, 605, 674, 0, 197, 15, 1, 17, 2, 19, 3, 21, 4, 23, 5, 25, 6, 27, 7, 29, 8, 31, 9, 33, 10, 35, 11, 37, 12, 39, 13, 41, 14, 43, 15, 45, 16, 47, 17, 49, 18, 51, 19, 53, 20, 55, 21, 57, 22, 59, 23, 61, 24, 63, 0, 65, 0, 67, 0, 69, 0, 71, 0, 73, 0, 75, 0, 77, 0, 79, 0, 81, 0, 83, 25, 85, 26, 87, 27, 89, 28, 91, 29, 93, 30, 95, 31, 97, 32, 99, 33, 101, 34, 103, 35, 105, 36, 107, 37, 109, 38, 111, 39, 113, 40, 115, 41, 117, 42, 119, 43, 121, 44, 123, 45, 125, 46, 127, 47, 129, 48, 131, 49, 133, 50, 135, 51, 137, 52, 139, 53, 141, 54, 143, 55, 145, 56, 147, 57, 149, 58, 151, 59, 153, 60, 155, 61, 157, 62, 159, 63, 161, 64, 163, 65, 165, 66, 167, 67, 169, 0, 171, 68, 173, 69, 175, 70, 177, 71, 179, 0, 181, 0, 183, 72, 185, 73, 187, 74, 189, 0, 191, 0, 193, 0, 195, 0, 197, 0, 199, 0, 201, 75, 203, 0, 205, 76, 207, 0, 209, 0, 211, 77, 213, 78, 215, 79, 217, 0, 219, 0, 221, 0, 223, 0, 225, 0, 227, 0, 229, 0, 231, 80, 233, 81, 235, 82, 237, 83, 239, 0, 241, 0, 243, 0, 245, 0, 247, 0, 249, 0, 251, 84, 253, 0, 255, 85, 257, 86, 259, 87, 261, 0, 263, 0, 265, 88, 267, 89, 269, 0, 271, 90, 273, 0, 275, 91, 277, 92, 279, 93, 281, 0, 283, 0, 285, 0, 287, 0, 289, 0, 291, 0, 293, 0, 295, 0, 297, 0, 299, 94, 301, 95, 303, 96, 305, 0, 307, 0, 309, 0, 311, 0, 313, 0, 315, 0, 317, 97, 319, 98, 321, 99, 323, 0, 325, 100, 327, 101, 329, 102, 331, 103, 333, 0, 335, 104, 337, 105, 339, 106, 341, 107, 343, 108, 345, 0, 347, 0, 349, 0, 351, 0, 353, 0, 355, 0, 357, 0, 359, 109, 361, 110, 363, 111, 365, 0, 367, 0, 369, 0, 371, 0, 373, 112, 375, 113, 377, 114, 379, 0, 381, 0, 383, 0, 385, 115, 387, 116, 389, 117, 391, 0, 393, 0, 395, 118, 397, 119, 399, 120, 401, 0, 403, 0, 405, 0, 407, 0, 15, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 35, 2, 0, 68, 68, 100, 100, 2, 0, 73, 73, 105, 105, 2, 0, 83, 83, 115, 115, 2, 0, 69, 69, 101, 101, 2, 0, 67, 67, 99, 99, 2, 0, 84, 84, 116, 116, 2, 0, 82, 82, 114, 114, 2, 0, 79, 79, 111, 111, 2, 0, 80, 80, 112, 112, 2, 0, 78, 78, 110, 110, 2, 0, 72, 72, 104, 104, 2, 0, 86, 86, 118, 118, 2, 0, 65, 65, 97, 97, 2, 0, 76, 76, 108, 108, 2, 0, 88, 88, 120, 120, 2, 0, 70, 70, 102, 102, 2, 0, 77, 77, 109, 109, 2, 0, 71, 71, 103, 103, 2, 0, 75, 75, 107, 107, 2, 0, 87, 87, 119, 119, 2, 0, 85, 85, 117, 117, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 8, 0, 34, 34, 78, 78, 82, 82, 84, 84, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 2, 0, 66, 66, 98, 98, 2, 0, 89, 89, 121, 121, 11, 0, 9, 10, 13, 13, 32, 32, 34, 34, 44, 44, 47, 47, 58, 58, 61, 61, 91, 91, 93, 93, 124, 124, 2, 0, 42, 42, 47, 47, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1493, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0, 0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0, 0, 0, 33, 1, 0, 0, 0, 0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0, 0, 0, 39, 1, 0, 0, 0, 0, 41, 1, 0, 0, 0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0, 0, 0, 0, 47, 1, 0, 0, 0, 0, 49, 1, 0, 0, 0, 0, 51, 1, 0, 0, 0, 0, 53, 1, 0, 0, 0, 0, 55, 1, 0, 0, 0, 0, 57, 1, 0, 0, 0, 0, 59, 1, 0, 0, 0, 1, 61, 1, 0, 0, 0, 1, 83, 1, 0, 0, 0, 1, 85, 1, 0, 0, 0, 1, 87, 1, 0, 0, 0, 1, 89, 1, 0, 0, 0, 1, 91, 1, 0, 0, 0, 1, 93, 1, 0, 0, 0, 1, 95, 1, 0, 0, 0, 1, 97, 1, 0, 0, 0, 1, 99, 1, 0, 0, 0, 1, 101, 1, 0, 0, 0, 1, 103, 1, 0, 0, 0, 1, 105, 1, 0, 0, 0, 1, 107, 1, 0, 0, 0, 1, 109, 1, 0, 0, 0, 1, 111, 1, 0, 0, 0, 1, 113, 1, 0, 0, 0, 1, 115, 1, 0, 0, 0, 1, 117, 1, 0, 0, 0, 1, 119, 1, 0, 0, 0, 1, 121, 1, 0, 0, 0, 1, 123, 1, 0, 0, 0, 1, 125, 1, 0, 0, 0, 1, 127, 1, 0, 0, 0, 1, 129, 1, 0, 0, 0, 1, 131, 1, 0, 0, 0, 1, 133, 1, 0, 0, 0, 1, 135, 1, 0, 0, 0, 1, 137, 1, 0, 0, 0, 1, 139, 1, 0, 0, 0, 1, 141, 1, 0, 0, 0, 1, 143, 1, 0, 0, 0, 1, 145, 1, 0, 0, 0, 1, 147, 1, 0, 0, 0, 1, 149, 1, 0, 0, 0, 1, 151, 1, 0, 0, 0, 1, 153, 1, 0, 0, 0, 1, 155, 1, 0, 0, 0, 1, 157, 1, 0, 0, 0, 1, 159, 1, 0, 0, 0, 1, 161, 1, 0, 0, 0, 1, 163, 1, 0, 0, 0, 1, 165, 1, 0, 0, 0, 1, 167, 1, 0, 0, 0, 1, 171, 1, 0, 0, 0, 1, 173, 1, 0, 0, 0, 1, 175, 1, 0, 0, 0, 1, 177, 1, 0, 0, 0, 2, 179, 1, 0, 0, 0, 2, 181, 1, 0, 0, 0, 2, 183, 1, 0, 0, 0, 2, 185, 1, 0, 0, 0, 2, 187, 1, 0, 0, 0, 3, 189, 1, 0, 0, 0, 3, 191, 1, 0, 0, 0, 3, 193, 1, 0, 0, 0, 3, 195, 1, 0, 0, 0, 3, 197, 1, 0, 0, 0, 3, 199, 1, 0, 0, 0, 3, 201, 1, 0, 0, 0, 3, 205, 1, 0, 0, 0, 3, 207, 1, 0, 0, 0, 3, 209, 1, 0, 0, 0, 3, 211, 1, 0, 0, 0, 3, 213, 1, 0, 0, 0, 3, 215, 1, 0, 0, 0, 4, 217, 1, 0, 0, 0, 4, 219, 1, 0, 0, 0, 4, 221, 1, 0, 0, 0, 4, 223, 1, 0, 0, 0, 4, 225, 1, 0, 0, 0, 4, 231, 1, 0, 0, 0, 4, 233, 1, 0, 0, 0, 4, 235, 1, 0, 0, 0, 4, 237, 1, 0, 0, 0, 5, 239, 1, 0, 0, 0, 5, 241, 1, 0, 0, 0, 5, 243, 1, 0, 0, 0, 5, 245, 1, 0, 0, 0, 5, 247, 1, 0, 0, 0, 5, 249, 1, 0, 0, 0, 5, 251, 1, 0, 0, 0, 5, 253, 1, 0, 0, 0, 5, 255, 1, 0, 0, 0, 5, 257, 1, 0, 0, 0, 5, 259, 1, 0, 0, 0, 6, 261, 1, 0, 0, 0, 6, 263, 1, 0, 0, 0, 6, 265, 1, 0, 0, 0, 6, 267, 1, 0, 0, 0, 6, 271, 1, 0, 0, 0, 6, 273, 1, 0, 0, 0, 6, 275, 1, 0, 0, 0, 6, 277, 1, 0, 0, 0, 6, 279, 1, 0, 0, 0, 7, 281, 1, 0, 0, 0, 7, 283, 1, 0, 0, 0, 7, 285, 1, 0, 0, 0, 7, 287, 1, 0, 0, 0, 7, 289, 1, 0, 0, 0, 7, 291, 1, 0, 0, 0, 7, 293, 1, 0, 0, 0, 7, 295, 1, 0, 0, 0, 7, 297, 1, 0, 0, 0, 7, 299, 1, 0, 0, 0, 7, 301, 1, 0, 0, 0, 7, 303, 1, 0, 0, 0, 8, 305, 1, 0, 0, 0, 8, 307, 1, 0, 0, 0, 8, 309, 1, 0, 0, 0, 8, 311, 1, 0, 0, 0, 8, 313, 1, 0, 0, 0, 8, 315, 1, 0, 0, 0, 8, 317, 1, 0, 0, 0, 8, 319, 1, 0, 0, 0, 8, 321, 1, 0, 0, 0, 9, 323, 1, 0, 0, 0, 9, 325, 1, 0, 0, 0, 9, 327, 1, 0, 0, 0, 9, 329, 1, 0, 0, 0, 9, 331, 1, 0, 0, 0, 10, 333, 1, 0, 0, 0, 10, 335, 1, 0, 0, 0, 10, 337, 1, 0, 0, 0, 10, 339, 1, 0, 0, 0, 10, 341, 1, 0, 0, 0, 10, 343, 1, 0, 0, 0, 11, 345, 1, 0, 0, 0, 11, 347, 1, 0, 0, 0, 11, 349, 1, 0, 0, 0, 11, 351, 1, 0, 0, 0, 11, 353, 1, 0, 0, 0, 11, 355, 1, 0, 0, 0, 11, 357, 1, 0, 0, 0, 11, 359, 1, 0, 0, 0, 11, 361, 1, 0, 0, 0, 11, 363, 1, 0, 0, 0, 12, 365, 1, 0, 0, 0, 12, 367, 1, 0, 0, 0, 12, 369, 1, 0, 0, 0, 12, 371, 1, 0, 0, 0, 12, 373, 1, 0, 0, 0, 12, 375, 1, 0, 0, 0, 12, 377, 1, 0, 0, 0, 13, 379, 1, 0, 0, 0, 13, 381, 1, 0, 0, 0, 13, 383, 1, 0, 0, 0, 13, 385, 1, 0, 0, 0, 13, 387, 1, 0, 0, 0, 13, 389, 1, 0, 0, 0, 14, 391, 1, 0, 0, 0, 14, 393, 1, 0, 0, 0, 14, 395, 1, 0, 0, 0, 14, 397, 1, 0, 0, 0, 14, 399, 1, 0, 0, 0, 14, 401, 1, 0, 0, 0, 14, 403, 1, 0, 0, 0, 14, 405, 1, 0, 0, 0, 14, 407, 1, 0, 0, 0, 15, 409, 1, 0, 0, 0, 17, 419, 1, 0, 0, 0, 19, 426, 1, 0, 0, 0, 21, 435, 1, 0, 0, 0, 23, 442, 1, 0, 0, 0, 25, 452, 1, 0, 0, 0, 27, 459, 1, 0, 0, 0, 29, 466, 1, 0, 0, 0, 31, 473, 1, 0, 0, 0, 33, 481, 1, 0, 0, 0, 35, 493, 1, 0, 0, 0, 37, 502, 1, 0, 0, 0, 39, 508, 1, 0, 0, 0, 41, 515, 1, 0, 0, 0, 43, 522, 1, 0, 0, 0, 45, 530, 1, 0, 0, 0, 47, 538, 1, 0, 0, 0, 49, 553, 1, 0, 0, 0, 51, 563, 1, 0, 0, 0, 53, 575, 1, 0, 0, 0, 55, 581, 1, 0, 0, 0, 57, 598, 1, 0, 0, 0, 59, 614, 1, 0, 0, 0, 61, 620, 1, 0, 0, 0, 63, 624, 1, 0, 0, 0, 65, 626, 1, 0, 0, 0, 67, 628, 1, 0, 0, 0, 69, 631, 1, 0, 0, 0, 71, 633, 1, 0, 0, 0, 73, 642, 1, 0, 0, 0, 75, 644, 1, 0, 0, 0, 77, 649, 1, 0, 0, 0, 79, 651, 1, 0, 0, 0, 81, 656, 1, 0, 0, 0, 83, 687, 1, 0, 0, 0, 85, 690, 1, 0, 0, 0, 87, 736, 1, 0, 0, 0, 89, 738, 1, 0, 0, 0, 91, 741, 1, 0, 0, 0, 93, 745, 1, 0, 0, 0, 95, 749, 1, 0, 0, 0, 97, 751, 1, 0, 0, 0, 99, 754, 1, 0, 0, 0, 101, 756, 1, 0, 0, 0, 103, 761, 1, 0, 0, 0, 105, 763, 1, 0, 0, 0, 107, 769, 1, 0, 0, 0, 109, 775, 1, 0, 0, 0, 111, 778, 1, 0, 0, 0, 113, 781, 1, 0, 0, 0, 115, 786, 1, 0, 0, 0, 117, 791, 1, 0, 0, 0, 119, 793, 1, 0, 0, 0, 121, 797, 1, 0, 0, 0, 123, 802, 1, 0, 0, 0, 125, 808, 1, 0, 0, 0, 127, 811, 1, 0, 0, 0, 129, 813, 1, 0, 0, 0, 131, 819, 1, 0, 0, 0, 133, 821, 1, 0, 0, 0, 135, 826, 1, 0, 0, 0, 137, 829, 1, 0, 0, 0, 139, 832, 1, 0, 0, 0, 141, 835, 1, 0, 0, 0, 143, 837, 1, 0, 0, 0, 145, 840, 1, 0, 0, 0, 147, 842, 1, 0, 0, 0, 149, 845, 1, 0, 0, 0, 151, 847, 1, 0, 0, 0, 153, 849, 1, 0, 0, 0, 155, 851, 1, 0, 0, 0, 157, 853, 1, 0, 0, 0, 159, 855, 1, 0, 0, 0, 161, 878, 1, 0, 0, 0, 163, 880, 1, 0, 0, 0, 165, 885, 1, 0, 0, 0, 167, 906, 1, 0, 0, 0, 169, 908, 1, 0, 0, 0, 171, 916, 1, 0, 0, 0, 173, 918, 1, 0, 0, 0, 175, 922, 1, 0, 0, 0, 177, 926, 1, 0, 0, 0, 179, 930, 1, 0, 0, 0, 181, 935, 1, 0, 0, 0, 183, 940, 1, 0, 0, 0, 185, 944, 1, 0, 0, 0, 187, 948, 1, 0, 0, 0, 189, 952, 1, 0, 0, 0, 191, 957, 1, 0, 0, 0, 193, 961, 1, 0, 0, 0, 195, 965, 1, 0, 0, 0, 197, 969, 1, 0, 0, 0, 199, 973, 1, 0, 0, 0, 201, 977, 1, 0, 0, 0, 203, 989, 1, 0, 0, 0, 205, 992, 1, 0, 0, 0, 207, 996, 1, 0, 0, 0, 209, 1000, 1, 0, 0, 0, 211, 1004, 1, 0, 0, 0, 213, 1008, 1, 0, 0, 0, 215, 1012, 1, 0, 0, 0, 217, 1016, 1, 0, 0, 0, 219, 1021, 1, 0, 0, 0, 221, 1025, 1, 0, 0, 0, 223, 1029, 1, 0, 0, 0, 225, 1033, 1, 0, 0, 0, 227, 1041, 1, 0, 0, 0, 229, 1062, 1, 0, 0, 0, 231, 1066, 1, 0, 0, 0, 233, 1070, 1, 0, 0, 0, 235, 1074, 1, 0, 0, 0, 237, 1078, 1, 0, 0, 0, 239, 1082, 1, 0, 0, 0, 241, 1087, 1, 0, 0, 0, 243, 1091, 1, 0, 0, 0, 245, 1095, 1, 0, 0, 0, 247, 1099, 1, 0, 0, 0, 249, 1103, 1, 0, 0, 0, 251, 1107, 1, 0, 0, 0, 253, 1110, 1, 0, 0, 0, 255, 1114, 1, 0, 0, 0, 257, 1118, 1, 0, 0, 0, 259, 1122, 1, 0, 0, 0, 261, 1126, 1, 0, 0, 0, 263, 1131, 1, 0, 0, 0, 265, 1136, 1, 0, 0, 0, 267, 1141, 1, 0, 0, 0, 269, 1148, 1, 0, 0, 0, 271, 1157, 1, 0, 0, 0, 273, 1164, 1, 0, 0, 0, 275, 1168, 1, 0, 0, 0, 277, 1172, 1, 0, 0, 0, 279, 1176, 1, 0, 0, 0, 281, 1180, 1, 0, 0, 0, 283, 1186, 1, 0, 0, 0, 285, 1190, 1, 0, 0, 0, 287, 1194, 1, 0, 0, 0, 289, 1198, 1, 0, 0, 0, 291, 1202, 1, 0, 0, 0, 293, 1206, 1, 0, 0, 0, 295, 1210, 1, 0, 0, 0, 297, 1214, 1, 0, 0, 0, 299, 1218, 1, 0, 0, 0, 301, 1222, 1, 0, 0, 0, 303, 1226, 1, 0, 0, 0, 305, 1230, 1, 0, 0, 0, 307, 1235, 1, 0, 0, 0, 309, 1239, 1, 0, 0, 0, 311, 1243, 1, 0, 0, 0, 313, 1247, 1, 0, 0, 0, 315, 1251, 1, 0, 0, 0, 317, 1255, 1, 0, 0, 0, 319, 1259, 1, 0, 0, 0, 321, 1263, 1, 0, 0, 0, 323, 1267, 1, 0, 0, 0, 325, 1272, 1, 0, 0, 0, 327, 1277, 1, 0, 0, 0, 329, 1281, 1, 0, 0, 0, 331, 1285, 1, 0, 0, 0, 333, 1289, 1, 0, 0, 0, 335, 1294, 1, 0, 0, 0, 337, 1301, 1, 0, 0, 0, 339, 1305, 1, 0, 0, 0, 341, 1309, 1, 0, 0, 0, 343, 1313, 1, 0, 0, 0, 345, 1317, 1, 0, 0, 0, 347, 1322, 1, 0, 0, 0, 349, 1326, 1, 0, 0, 0, 351, 1330, 1, 0, 0, 0, 353, 1334, 1, 0, 0, 0, 355, 1339, 1, 0, 0, 0, 357, 1343, 1, 0, 0, 0, 359, 1347, 1, 0, 0, 0, 361, 1351, 1, 0, 0, 0, 363, 1355, 1, 0, 0, 0, 365, 1359, 1, 0, 0, 0, 367, 1365, 1, 0, 0, 0, 369, 1369, 1, 0, 0, 0, 371, 1373, 1, 0, 0, 0, 373, 1377, 1, 0, 0, 0, 375, 1381, 1, 0, 0, 0, 377, 1385, 1, 0, 0, 0, 379, 1389, 1, 0, 0, 0, 381, 1394, 1, 0, 0, 0, 383, 1400, 1, 0, 0, 0, 385, 1406, 1, 0, 0, 0, 387, 1410, 1, 0, 0, 0, 389, 1414, 1, 0, 0, 0, 391, 1418, 1, 0, 0, 0, 393, 1424, 1, 0, 0, 0, 395, 1430, 1, 0, 0, 0, 397, 1434, 1, 0, 0, 0, 399, 1438, 1, 0, 0, 0, 401, 1442, 1, 0, 0, 0, 403, 1448, 1, 0, 0, 0, 405, 1454, 1, 0, 0, 0, 407, 1460, 1, 0, 0, 0, 409, 410, 7, 0, 0, 0, 410, 411, 7, 1, 0, 0, 411, 412, 7, 2, 0, 0, 412, 413, 7, 2, 0, 0, 413, 414, 7, 3, 0, 0, 414, 415, 7, 4, 0, 0, 415, 416, 7, 5, 0, 0, 416, 417, 1, 0, 0, 0, 417, 418, 6, 0, 0, 0, 418, 16, 1, 0, 0, 0, 419, 420, 7, 0, 0, 0, 420, 421, 7, 6, 0, 0, 421, 422, 7, 7, 0, 0, 422, 423, 7, 8, 0, 0, 423, 424, 1, 0, 0, 0, 424, 425, 6, 1, 1, 0, 425, 18, 1, 0, 0, 0, 426, 427, 7, 3, 0, 0, 427, 428, 7, 9, 0, 0, 428, 429, 7, 6, 0, 0, 429, 430, 7, 1, 0, 0, 430, 431, 7, 4, 0, 0, 431, 432, 7, 10, 0, 0, 432, 433, 1, 0, 0, 0, 433, 434, 6, 2, 2, 0, 434, 20, 1, 0, 0, 0, 435, 436, 7, 3, 0, 0, 436, 437, 7, 11, 0, 0, 437, 438, 7, 12, 0, 0, 438, 439, 7, 13, 0, 0, 439, 440, 1, 0, 0, 0, 440, 441, 6, 3, 0, 0, 441, 22, 1, 0, 0, 0, 442, 443, 7, 3, 0, 0, 443, 444, 7, 14, 0, 0, 444, 445, 7, 8, 0, 0, 445, 446, 7, 13, 0, 0, 446, 447, 7, 12, 0, 0, 447, 448, 7, 1, 0, 0, 448, 449, 7, 9, 0, 0, 449, 450, 1, 0, 0, 0, 450, 451, 6, 4, 3, 0, 451, 24, 1, 0, 0, 0, 452, 453, 7, 15, 0, 0, 453, 454, 7, 6, 0, 0, 454, 455, 7, 7, 0, 0, 455, 456, 7, 16, 0, 0, 456, 457, 1, 0, 0, 0, 457, 458, 6, 5, 4, 0, 458, 26, 1, 0, 0, 0, 459, 460, 7, 17, 0, 0, 460, 461, 7, 6, 0, 0, 461, 462, 7, 7, 0, 0, 462, 463, 7, 18, 0, 0, 463, 464, 1, 0, 0, 0, 464, 465, 6, 6, 0, 0, 465, 28, 1, 0, 0, 0, 466, 467, 7, 18, 0, 0, 467, 468, 7, 3, 0, 0, 468, 469, 7, 3, 0, 0, 469, 470, 7, 8, 0, 0, 470, 471, 1, 0, 0, 0, 471, 472, 6, 7, 1, 0, 472, 30, 1, 0, 0, 0, 473, 474, 7, 13, 0, 0, 474, 475, 7, 1, 0, 0, 475, 476, 7, 16, 0, 0, 476, 477, 7, 1, 0, 0, 477, 478, 7, 5, 0, 0, 478, 479, 1, 0, 0, 0, 479, 480, 6, 8, 0, 0, 480, 32, 1, 0, 0, 0, 481, 482, 7, 16, 0, 0, 482, 483, 7, 11, 0, 0, 483, 484, 5, 95, 0, 0, 484, 485, 7, 3, 0, 0, 485, 486, 7, 14, 0, 0, 486, 487, 7, 8, 0, 0, 487, 488, 7, 12, 0, 0, 488, 489, 7, 9, 0, 0, 489, 490, 7, 0, 0, 0, 490, 491, 1, 0, 0, 0, 491, 492, 6, 9, 5, 0, 492, 34, 1, 0, 0, 0, 493, 494, 7, 6, 0, 0, 494, 495, 7, 3, 0, 0, 495, 496, 7, 9, 0, 0, 496, 497, 7, 12, 0, 0, 497, 498, 7, 16, 0, 0, 498, 499, 7, 3, 0, 0, 499, 500, 1, 0, 0, 0, 500, 501, 6, 10, 6, 0, 501, 36, 1, 0, 0, 0, 502, 503, 7, 6, 0, 0, 503, 504, 7, 7, 0, 0, 504, 505, 7, 19, 0, 0, 505, 506, 1, 0, 0, 0, 506, 507, 6, 11, 0, 0, 507, 38, 1, 0, 0, 0, 508, 509, 7, 2, 0, 0, 509, 510, 7, 10, 0, 0, 510, 511, 7, 7, 0, 0, 511, 512, 7, 19, 0, 0, 512, 513, 1, 0, 0, 0, 513, 514, 6, 12, 7, 0, 514, 40, 1, 0, 0, 0, 515, 516, 7, 2, 0, 0, 516, 517, 7, 7, 0, 0, 517, 518, 7, 6, 0, 0, 518, 519, 7, 5, 0, 0, 519, 520, 1, 0, 0, 0, 520, 521, 6, 13, 0, 0, 521, 42, 1, 0, 0, 0, 522, 523, 7, 2, 0, 0, 523, 524, 7, 5, 0, 0, 524, 525, 7, 12, 0, 0, 525, 526, 7, 5, 0, 0, 526, 527, 7, 2, 0, 0, 527, 528, 1, 0, 0, 0, 528, 529, 6, 14, 0, 0, 529, 44, 1, 0, 0, 0, 530, 531, 7, 19, 0, 0, 531, 532, 7, 10, 0, 0, 532, 533, 7, 3, 0, 0, 533, 534, 7, 6, 0, 0, 534, 535, 7, 3, 0, 0, 535, 536, 1, 0, 0, 0, 536, 537, 6, 15, 0, 0, 537, 46, 1, 0, 0, 0, 538, 539, 4, 16, 0, 0, 539, 540, 7, 1, 0, 0, 540, 541, 7, 9, 0, 0, 541, 542, 7, 13, 0, 0, 542, 543, 7, 1, 0, 0, 543, 544, 7, 9, 0, 0, 544, 545, 7, 3, 0, 0, 545, 546, 7, 2, 0, 0, 546, 547, 7, 5, 0, 0, 547, 548, 7, 12, 0, 0, 548, 549, 7, 5, 0, 0, 549, 550, 7, 2, 0, 0, 550, 551, 1, 0, 0, 0, 551, 552, 6, 16, 0, 0, 552, 48, 1, 0, 0, 0, 553, 554, 4, 17, 1, 0, 554, 555, 7, 13, 0, 0, 555, 556, 7, 7, 0, 0, 556, 557, 7, 7, 0, 0, 557, 558, 7, 18, 0, 0, 558, 559, 7, 20, 0, 0, 559, 560, 7, 8, 0, 0, 560, 561, 1, 0, 0, 0, 561, 562, 6, 17, 8, 0, 562, 50, 1, 0, 0, 0, 563, 564, 4, 18, 2, 0, 564, 565, 7, 16, 0, 0, 565, 566, 7, 3, 0, 0, 566, 567, 7, 5, 0, 0, 567, 568, 7, 6, 0, 0, 568, 569, 7, 1, 0, 0, 569, 570, 7, 4, 0, 0, 570, 571, 7, 2, 0, 0, 571, 572, 1, 0, 0, 0, 572, 573, 6, 18, 9, 0, 573, 52, 1, 0, 0, 0, 574, 576, 8, 21, 0, 0, 575, 574, 1, 0, 0, 0, 576, 577, 1, 0, 0, 0, 577, 575, 1, 0, 0, 0, 577, 578, 1, 0, 0, 0, 578, 579, 1, 0, 0, 0, 579, 580, 6, 19, 0, 0, 580, 54, 1, 0, 0, 0, 581, 582, 5, 47, 0, 0, 582, 583, 5, 47, 0, 0, 583, 587, 1, 0, 0, 0, 584, 586, 8, 22, 0, 0, 585, 584, 1, 0, 0, 0, 586, 589, 1, 0, 0, 0, 587, 585, 1, 0, 0, 0, 587, 588, 1, 0, 0, 0, 588, 591, 1, 0, 0, 0, 589, 587, 1, 0, 0, 0, 590, 592, 5, 13, 0, 0, 591, 590, 1, 0, 0, 0, 591, 592, 1, 0, 0, 0, 592, 594, 1, 0, 0, 0, 593, 595, 5, 10, 0, 0, 594, 593, 1, 0, 0, 0, 594, 595, 1, 0, 0, 0, 595, 596, 1, 0, 0, 0, 596, 597, 6, 20, 10, 0, 597, 56, 1, 0, 0, 0, 598, 599, 5, 47, 0, 0, 599, 600, 5, 42, 0, 0, 600, 605, 1, 0, 0, 0, 601, 604, 3, 57, 21, 0, 602, 604, 9, 0, 0, 0, 603, 601, 1, 0, 0, 0, 603, 602, 1, 0, 0, 0, 604, 607, 1, 0, 0, 0, 605, 606, 1, 0, 0, 0, 605, 603, 1, 0, 0, 0, 606, 608, 1, 0, 0, 0, 607, 605, 1, 0, 0, 0, 608, 609, 5, 42, 0, 0, 609, 610, 5, 47, 0, 0, 610, 611, 1, 0, 0, 0, 611, 612, 6, 21, 10, 0, 612, 58, 1, 0, 0, 0, 613, 615, 7, 23, 0, 0, 614, 613, 1, 0, 0, 0, 615, 616, 1, 0, 0, 0, 616, 614, 1, 0, 0, 0, 616, 617, 1, 0, 0, 0, 617, 618, 1, 0, 0, 0, 618, 619, 6, 22, 10, 0, 619, 60, 1, 0, 0, 0, 620, 621, 5, 124, 0, 0, 621, 622, 1, 0, 0, 0, 622, 623, 6, 23, 11, 0, 623, 62, 1, 0, 0, 0, 624, 625, 7, 24, 0, 0, 625, 64, 1, 0, 0, 0, 626, 627, 7, 25, 0, 0, 627, 66, 1, 0, 0, 0, 628, 629, 5, 92, 0, 0, 629, 630, 7, 26, 0, 0, 630, 68, 1, 0, 0, 0, 631, 632, 8, 27, 0, 0, 632, 70, 1, 0, 0, 0, 633, 635, 7, 3, 0, 0, 634, 636, 7, 28, 0, 0, 635, 634, 1, 0, 0, 0, 635, 636, 1, 0, 0, 0, 636, 638, 1, 0, 0, 0, 637, 639, 3, 63, 24, 0, 638, 637, 1, 0, 0, 0, 639, 640, 1, 0, 0, 0, 640, 638, 1, 0, 0, 0, 640, 641, 1, 0, 0, 0, 641, 72, 1, 0, 0, 0, 642, 643, 5, 64, 0, 0, 643, 74, 1, 0, 0, 0, 644, 645, 5, 96, 0, 0, 645, 76, 1, 0, 0, 0, 646, 650, 8, 29, 0, 0, 647, 648, 5, 96, 0, 0, 648, 650, 5, 96, 0, 0, 649, 646, 1, 0, 0, 0, 649, 647, 1, 0, 0, 0, 650, 78, 1, 0, 0, 0, 651, 652, 5, 95, 0, 0, 652, 80, 1, 0, 0, 0, 653, 657, 3, 65, 25, 0, 654, 657, 3, 63, 24, 0, 655, 657, 3, 79, 32, 0, 656, 653, 1, 0, 0, 0, 656, 654, 1, 0, 0, 0, 656, 655, 1, 0, 0, 0, 657, 82, 1, 0, 0, 0, 658, 663, 5, 34, 0, 0, 659, 662, 3, 67, 26, 0, 660, 662, 3, 69, 27, 0, 661, 659, 1, 0, 0, 0, 661, 660, 1, 0, 0, 0, 662, 665, 1, 0, 0, 0, 663, 661, 1, 0, 0, 0, 663, 664, 1, 0, 0, 0, 664, 666, 1, 0, 0, 0, 665, 663, 1, 0, 0, 0, 666, 688, 5, 34, 0, 0, 667, 668, 5, 34, 0, 0, 668, 669, 5, 34, 0, 0, 669, 670, 5, 34, 0, 0, 670, 674, 1, 0, 0, 0, 671, 673, 8, 22, 0, 0, 672, 671, 1, 0, 0, 0, 673, 676, 1, 0, 0, 0, 674, 675, 1, 0, 0, 0, 674, 672, 1, 0, 0, 0, 675, 677, 1, 0, 0, 0, 676, 674, 1, 0, 0, 0, 677, 678, 5, 34, 0, 0, 678, 679, 5, 34, 0, 0, 679, 680, 5, 34, 0, 0, 680, 682, 1, 0, 0, 0, 681, 683, 5, 34, 0, 0, 682, 681, 1, 0, 0, 0, 682, 683, 1, 0, 0, 0, 683, 685, 1, 0, 0, 0, 684, 686, 5, 34, 0, 0, 685, 684, 1, 0, 0, 0, 685, 686, 1, 0, 0, 0, 686, 688, 1, 0, 0, 0, 687, 658, 1, 0, 0, 0, 687, 667, 1, 0, 0, 0, 688, 84, 1, 0, 0, 0, 689, 691, 3, 63, 24, 0, 690, 689, 1, 0, 0, 0, 691, 692, 1, 0, 0, 0, 692, 690, 1, 0, 0, 0, 692, 693, 1, 0, 0, 0, 693, 86, 1, 0, 0, 0, 694, 696, 3, 63, 24, 0, 695, 694, 1, 0, 0, 0, 696, 697, 1, 0, 0, 0, 697, 695, 1, 0, 0, 0, 697, 698, 1, 0, 0, 0, 698, 699, 1, 0, 0, 0, 699, 703, 3, 103, 44, 0, 700, 702, 3, 63, 24, 0, 701, 700, 1, 0, 0, 0, 702, 705, 1, 0, 0, 0, 703, 701, 1, 0, 0, 0, 703, 704, 1, 0, 0, 0, 704, 737, 1, 0, 0, 0, 705, 703, 1, 0, 0, 0, 706, 708, 3, 103, 44, 0, 707, 709, 3, 63, 24, 0, 708, 707, 1, 0, 0, 0, 709, 710, 1, 0, 0, 0, 710, 708, 1, 0, 0, 0, 710, 711, 1, 0, 0, 0, 711, 737, 1, 0, 0, 0, 712, 714, 3, 63, 24, 0, 713, 712, 1, 0, 0, 0, 714, 715, 1, 0, 0, 0, 715, 713, 1, 0, 0, 0, 715, 716, 1, 0, 0, 0, 716, 724, 1, 0, 0, 0, 717, 721, 3, 103, 44, 0, 718, 720, 3, 63, 24, 0, 719, 718, 1, 0, 0, 0, 720, 723, 1, 0, 0, 0, 721, 719, 1, 0, 0, 0, 721, 722, 1, 0, 0, 0, 722, 725, 1, 0, 0, 0, 723, 721, 1, 0, 0, 0, 724, 717, 1, 0, 0, 0, 724, 725, 1, 0, 0, 0, 725, 726, 1, 0, 0, 0, 726, 727, 3, 71, 28, 0, 727, 737, 1, 0, 0, 0, 728, 730, 3, 103, 44, 0, 729, 731, 3, 63, 24, 0, 730, 729, 1, 0, 0, 0, 731, 732, 1, 0, 0, 0, 732, 730, 1, 0, 0, 0, 732, 733, 1, 0, 0, 0, 733, 734, 1, 0, 0, 0, 734, 735, 3, 71, 28, 0, 735, 737, 1, 0, 0, 0, 736, 695, 1, 0, 0, 0, 736, 706, 1, 0, 0, 0, 736, 713, 1, 0, 0, 0, 736, 728, 1, 0, 0, 0, 737, 88, 1, 0, 0, 0, 738, 739, 7, 30, 0, 0, 739, 740, 7, 31, 0, 0, 740, 90, 1, 0, 0, 0, 741, 742, 7, 12, 0, 0, 742, 743, 7, 9, 0, 0, 743, 744, 7, 0, 0, 0, 744, 92, 1, 0, 0, 0, 745, 746, 7, 12, 0, 0, 746, 747, 7, 2, 0, 0, 747, 748, 7, 4, 0, 0, 748, 94, 1, 0, 0, 0, 749, 750, 5, 61, 0, 0, 750, 96, 1, 0, 0, 0, 751, 752, 5, 58, 0, 0, 752, 753, 5, 58, 0, 0, 753, 98, 1, 0, 0, 0, 754, 755, 5, 44, 0, 0, 755, 100, 1, 0, 0, 0, 756, 757, 7, 0, 0, 0, 757, 758, 7, 3, 0, 0, 758, 759, 7, 2, 0, 0, 759, 760, 7, 4, 0, 0, 760, 102, 1, 0, 0, 0, 761, 762, 5, 46, 0, 0, 762, 104, 1, 0, 0, 0, 763, 764, 7, 15, 0, 0, 764, 765, 7, 12, 0, 0, 765, 766, 7, 13, 0, 0, 766, 767, 7, 2, 0, 0, 767, 768, 7, 3, 0, 0, 768, 106, 1, 0, 0, 0, 769, 770, 7, 15, 0, 0, 770, 771, 7, 1, 0, 0, 771, 772, 7, 6, 0, 0, 772, 773, 7, 2, 0, 0, 773, 774, 7, 5, 0, 0, 774, 108, 1, 0, 0, 0, 775, 776, 7, 1, 0, 0, 776, 777, 7, 9, 0, 0, 777, 110, 1, 0, 0, 0, 778, 779, 7, 1, 0, 0, 779, 780, 7, 2, 0, 0, 780, 112, 1, 0, 0, 0, 781, 782, 7, 13, 0, 0, 782, 783, 7, 12, 0, 0, 783, 784, 7, 2, 0, 0, 784, 785, 7, 5, 0, 0, 785, 114, 1, 0, 0, 0, 786, 787, 7, 13, 0, 0, 787, 788, 7, 1, 0, 0, 788, 789, 7, 18, 0, 0, 789, 790, 7, 3, 0, 0, 790, 116, 1, 0, 0, 0, 791, 792, 5, 40, 0, 0, 792, 118, 1, 0, 0, 0, 793, 794, 7, 9, 0, 0, 794, 795, 7, 7, 0, 0, 795, 796, 7, 5, 0, 0, 796, 120, 1, 0, 0, 0, 797, 798, 7, 9, 0, 0, 798, 799, 7, 20, 0, 0, 799, 800, 7, 13, 0, 0, 800, 801, 7, 13, 0, 0, 801, 122, 1, 0, 0, 0, 802, 803, 7, 9, 0, 0, 803, 804, 7, 20, 0, 0, 804, 805, 7, 13, 0, 0, 805, 806, 7, 13, 0, 0, 806, 807, 7, 2, 0, 0, 807, 124, 1, 0, 0, 0, 808, 809, 7, 7, 0, 0, 809, 810, 7, 6, 0, 0, 810, 126, 1, 0, 0, 0, 811, 812, 5, 63, 0, 0, 812, 128, 1, 0, 0, 0, 813, 814, 7, 6, 0, 0, 814, 815, 7, 13, 0, 0, 815, 816, 7, 1, 0, 0, 816, 817, 7, 18, 0, 0, 817, 818, 7, 3, 0, 0, 818, 130, 1, 0, 0, 0, 819, 820, 5, 41, 0, 0, 820, 132, 1, 0, 0, 0, 821, 822, 7, 5, 0, 0, 822, 823, 7, 6, 0, 0, 823, 824, 7, 20, 0, 0, 824, 825, 7, 3, 0, 0, 825, 134, 1, 0, 0, 0, 826, 827, 5, 61, 0, 0, 827, 828, 5, 61, 0, 0, 828, 136, 1, 0, 0, 0, 829, 830, 5, 61, 0, 0, 830, 831, 5, 126, 0, 0, 831, 138, 1, 0, 0, 0, 832, 833, 5, 33, 0, 0, 833, 834, 5, 61, 0, 0, 834, 140, 1, 0, 0, 0, 835, 836, 5, 60, 0, 0, 836, 142, 1, 0, 0, 0, 837, 838, 5, 60, 0, 0, 838, 839, 5, 61, 0, 0, 839, 144, 1, 0, 0, 0, 840, 841, 5, 62, 0, 0, 841, 146, 1, 0, 0, 0, 842, 843, 5, 62, 0, 0, 843, 844, 5, 61, 0, 0, 844, 148, 1, 0, 0, 0, 845, 846, 5, 43, 0, 0, 846, 150, 1, 0, 0, 0, 847, 848, 5, 45, 0, 0, 848, 152, 1, 0, 0, 0, 849, 850, 5, 42, 0, 0, 850, 154, 1, 0, 0, 0, 851, 852, 5, 47, 0, 0, 852, 156, 1, 0, 0, 0, 853, 854, 5, 37, 0, 0, 854, 158, 1, 0, 0, 0, 855, 856, 7, 16, 0, 0, 856, 857, 7, 12, 0, 0, 857, 858, 7, 5, 0, 0, 858, 859, 7, 4, 0, 0, 859, 860, 7, 10, 0, 0, 860, 160, 1, 0, 0, 0, 861, 864, 3, 127, 56, 0, 862, 865, 3, 65, 25, 0, 863, 865, 3, 79, 32, 0, 864, 862, 1, 0, 0, 0, 864, 863, 1, 0, 0, 0, 865, 869, 1, 0, 0, 0, 866, 868, 3, 81, 33, 0, 867, 866, 1, 0, 0, 0, 868, 871, 1, 0, 0, 0, 869, 867, 1, 0, 0, 0, 869, 870, 1, 0, 0, 0, 870, 879, 1, 0, 0, 0, 871, 869, 1, 0, 0, 0, 872, 874, 3, 127, 56, 0, 873, 875, 3, 63, 24, 0, 874, 873, 1, 0, 0, 0, 875, 876, 1, 0, 0, 0, 876, 874, 1, 0, 0, 0, 876, 877, 1, 0, 0, 0, 877, 879, 1, 0, 0, 0, 878, 861, 1, 0, 0, 0, 878, 872, 1, 0, 0, 0, 879, 162, 1, 0, 0, 0, 880, 881, 5, 91, 0, 0, 881, 882, 1, 0, 0, 0, 882, 883, 6, 74, 0, 0, 883, 884, 6, 74, 0, 0, 884, 164, 1, 0, 0, 0, 885, 886, 5, 93, 0, 0, 886, 887, 1, 0, 0, 0, 887, 888, 6, 75, 11, 0, 888, 889, 6, 75, 11, 0, 889, 166, 1, 0, 0, 0, 890, 894, 3, 65, 25, 0, 891, 893, 3, 81, 33, 0, 892, 891, 1, 0, 0, 0, 893, 896, 1, 0, 0, 0, 894, 892, 1, 0, 0, 0, 894, 895, 1, 0, 0, 0, 895, 907, 1, 0, 0, 0, 896, 894, 1, 0, 0, 0, 897, 900, 3, 79, 32, 0, 898, 900, 3, 73, 29, 0, 899, 897, 1, 0, 0, 0, 899, 898, 1, 0, 0, 0, 900, 902, 1, 0, 0, 0, 901, 903, 3, 81, 33, 0, 902, 901, 1, 0, 0, 0, 903, 904, 1, 0, 0, 0, 904, 902, 1, 0, 0, 0, 904, 905, 1, 0, 0, 0, 905, 907, 1, 0, 0, 0, 906, 890, 1, 0, 0, 0, 906, 899, 1, 0, 0, 0, 907, 168, 1, 0, 0, 0, 908, 910, 3, 75, 30, 0, 909, 911, 3, 77, 31, 0, 910, 909, 1, 0, 0, 0, 911, 912, 1, 0, 0, 0, 912, 910, 1, 0, 0, 0, 912, 913, 1, 0, 0, 0, 913, 914, 1, 0, 0, 0, 914, 915, 3, 75, 30, 0, 915, 170, 1, 0, 0, 0, 916, 917, 3, 169, 77, 0, 917, 172, 1, 0, 0, 0, 918, 919, 3, 55, 20, 0, 919, 920, 1, 0, 0, 0, 920, 921, 6, 79, 10, 0, 921, 174, 1, 0, 0, 0, 922, 923, 3, 57, 21, 0, 923, 924, 1, 0, 0, 0, 924, 925, 6, 80, 10, 0, 925, 176, 1, 0, 0, 0, 926, 927, 3, 59, 22, 0, 927, 928, 1, 0, 0, 0, 928, 929, 6, 81, 10, 0, 929, 178, 1, 0, 0, 0, 930, 931, 3, 163, 74, 0, 931, 932, 1, 0, 0, 0, 932, 933, 6, 82, 12, 0, 933, 934, 6, 82, 13, 0, 934, 180, 1, 0, 0, 0, 935, 936, 3, 61, 23, 0, 936, 937, 1, 0, 0, 0, 937, 938, 6, 83, 14, 0, 938, 939, 6, 83, 11, 0, 939, 182, 1, 0, 0, 0, 940, 941, 3, 59, 22, 0, 941, 942, 1, 0, 0, 0, 942, 943, 6, 84, 10, 0, 943, 184, 1, 0, 0, 0, 944, 945, 3, 55, 20, 0, 945, 946, 1, 0, 0, 0, 946, 947, 6, 85, 10, 0, 947, 186, 1, 0, 0, 0, 948, 949, 3, 57, 21, 0, 949, 950, 1, 0, 0, 0, 950, 951, 6, 86, 10, 0, 951, 188, 1, 0, 0, 0, 952, 953, 3, 61, 23, 0, 953, 954, 1, 0, 0, 0, 954, 955, 6, 87, 14, 0, 955, 956, 6, 87, 11, 0, 956, 190, 1, 0, 0, 0, 957, 958, 3, 163, 74, 0, 958, 959, 1, 0, 0, 0, 959, 960, 6, 88, 12, 0, 960, 192, 1, 0, 0, 0, 961, 962, 3, 165, 75, 0, 962, 963, 1, 0, 0, 0, 963, 964, 6, 89, 15, 0, 964, 194, 1, 0, 0, 0, 965, 966, 3, 335, 160, 0, 966, 967, 1, 0, 0, 0, 967, 968, 6, 90, 16, 0, 968, 196, 1, 0, 0, 0, 969, 970, 3, 99, 42, 0, 970, 971, 1, 0, 0, 0, 971, 972, 6, 91, 17, 0, 972, 198, 1, 0, 0, 0, 973, 974, 3, 95, 40, 0, 974, 975, 1, 0, 0, 0, 975, 976, 6, 92, 18, 0, 976, 200, 1, 0, 0, 0, 977, 978, 7, 16, 0, 0, 978, 979, 7, 3, 0, 0, 979, 980, 7, 5, 0, 0, 980, 981, 7, 12, 0, 0, 981, 982, 7, 0, 0, 0, 982, 983, 7, 12, 0, 0, 983, 984, 7, 5, 0, 0, 984, 985, 7, 12, 0, 0, 985, 202, 1, 0, 0, 0, 986, 990, 8, 32, 0, 0, 987, 988, 5, 47, 0, 0, 988, 990, 8, 33, 0, 0, 989, 986, 1, 0, 0, 0, 989, 987, 1, 0, 0, 0, 990, 204, 1, 0, 0, 0, 991, 993, 3, 203, 94, 0, 992, 991, 1, 0, 0, 0, 993, 994, 1, 0, 0, 0, 994, 992, 1, 0, 0, 0, 994, 995, 1, 0, 0, 0, 995, 206, 1, 0, 0, 0, 996, 997, 3, 205, 95, 0, 997, 998, 1, 0, 0, 0, 998, 999, 6, 96, 19, 0, 999, 208, 1, 0, 0, 0, 1000, 1001, 3, 83, 34, 0, 1001, 1002, 1, 0, 0, 0, 1002, 1003, 6, 97, 20, 0, 1003, 210, 1, 0, 0, 0, 1004, 1005, 3, 55, 20, 0, 1005, 1006, 1, 0, 0, 0, 1006, 1007, 6, 98, 10, 0, 1007, 212, 1, 0, 0, 0, 1008, 1009, 3, 57, 21, 0, 1009, 1010, 1, 0, 0, 0, 1010, 1011, 6, 99, 10, 0, 1011, 214, 1, 0, 0, 0, 1012, 1013, 3, 59, 22, 0, 1013, 1014, 1, 0, 0, 0, 1014, 1015, 6, 100, 10, 0, 1015, 216, 1, 0, 0, 0, 1016, 1017, 3, 61, 23, 0, 1017, 1018, 1, 0, 0, 0, 1018, 1019, 6, 101, 14, 0, 1019, 1020, 6, 101, 11, 0, 1020, 218, 1, 0, 0, 0, 1021, 1022, 3, 103, 44, 0, 1022, 1023, 1, 0, 0, 0, 1023, 1024, 6, 102, 21, 0, 1024, 220, 1, 0, 0, 0, 1025, 1026, 3, 99, 42, 0, 1026, 1027, 1, 0, 0, 0, 1027, 1028, 6, 103, 17, 0, 1028, 222, 1, 0, 0, 0, 1029, 1030, 3, 127, 56, 0, 1030, 1031, 1, 0, 0, 0, 1031, 1032, 6, 104, 22, 0, 1032, 224, 1, 0, 0, 0, 1033, 1034, 3, 161, 73, 0, 1034, 1035, 1, 0, 0, 0, 1035, 1036, 6, 105, 23, 0, 1036, 226, 1, 0, 0, 0, 1037, 1042, 3, 65, 25, 0, 1038, 1042, 3, 63, 24, 0, 1039, 1042, 3, 79, 32, 0, 1040, 1042, 3, 153, 69, 0, 1041, 1037, 1, 0, 0, 0, 1041, 1038, 1, 0, 0, 0, 1041, 1039, 1, 0, 0, 0, 1041, 1040, 1, 0, 0, 0, 1042, 228, 1, 0, 0, 0, 1043, 1046, 3, 65, 25, 0, 1044, 1046, 3, 153, 69, 0, 1045, 1043, 1, 0, 0, 0, 1045, 1044, 1, 0, 0, 0, 1046, 1050, 1, 0, 0, 0, 1047, 1049, 3, 227, 106, 0, 1048, 1047, 1, 0, 0, 0, 1049, 1052, 1, 0, 0, 0, 1050, 1048, 1, 0, 0, 0, 1050, 1051, 1, 0, 0, 0, 1051, 1063, 1, 0, 0, 0, 1052, 1050, 1, 0, 0, 0, 1053, 1056, 3, 79, 32, 0, 1054, 1056, 3, 73, 29, 0, 1055, 1053, 1, 0, 0, 0, 1055, 1054, 1, 0, 0, 0, 1056, 1058, 1, 0, 0, 0, 1057, 1059, 3, 227, 106, 0, 1058, 1057, 1, 0, 0, 0, 1059, 1060, 1, 0, 0, 0, 1060, 1058, 1, 0, 0, 0, 1060, 1061, 1, 0, 0, 0, 1061, 1063, 1, 0, 0, 0, 1062, 1045, 1, 0, 0, 0, 1062, 1055, 1, 0, 0, 0, 1063, 230, 1, 0, 0, 0, 1064, 1067, 3, 229, 107, 0, 1065, 1067, 3, 169, 77, 0, 1066, 1064, 1, 0, 0, 0, 1066, 1065, 1, 0, 0, 0, 1067, 1068, 1, 0, 0, 0, 1068, 1066, 1, 0, 0, 0, 1068, 1069, 1, 0, 0, 0, 1069, 232, 1, 0, 0, 0, 1070, 1071, 3, 55, 20, 0, 1071, 1072, 1, 0, 0, 0, 1072, 1073, 6, 109, 10, 0, 1073, 234, 1, 0, 0, 0, 1074, 1075, 3, 57, 21, 0, 1075, 1076, 1, 0, 0, 0, 1076, 1077, 6, 110, 10, 0, 1077, 236, 1, 0, 0, 0, 1078, 1079, 3, 59, 22, 0, 1079, 1080, 1, 0, 0, 0, 1080, 1081, 6, 111, 10, 0, 1081, 238, 1, 0, 0, 0, 1082, 1083, 3, 61, 23, 0, 1083, 1084, 1, 0, 0, 0, 1084, 1085, 6, 112, 14, 0, 1085, 1086, 6, 112, 11, 0, 1086, 240, 1, 0, 0, 0, 1087, 1088, 3, 95, 40, 0, 1088, 1089, 1, 0, 0, 0, 1089, 1090, 6, 113, 18, 0, 1090, 242, 1, 0, 0, 0, 1091, 1092, 3, 99, 42, 0, 1092, 1093, 1, 0, 0, 0, 1093, 1094, 6, 114, 17, 0, 1094, 244, 1, 0, 0, 0, 1095, 1096, 3, 103, 44, 0, 1096, 1097, 1, 0, 0, 0, 1097, 1098, 6, 115, 21, 0, 1098, 246, 1, 0, 0, 0, 1099, 1100, 3, 127, 56, 0, 1100, 1101, 1, 0, 0, 0, 1101, 1102, 6, 116, 22, 0, 1102, 248, 1, 0, 0, 0, 1103, 1104, 3, 161, 73, 0, 1104, 1105, 1, 0, 0, 0, 1105, 1106, 6, 117, 23, 0, 1106, 250, 1, 0, 0, 0, 1107, 1108, 7, 12, 0, 0, 1108, 1109, 7, 2, 0, 0, 1109, 252, 1, 0, 0, 0, 1110, 1111, 3, 231, 108, 0, 1111, 1112, 1, 0, 0, 0, 1112, 1113, 6, 119, 24, 0, 1113, 254, 1, 0, 0, 0, 1114, 1115, 3, 55, 20, 0, 1115, 1116, 1, 0, 0, 0, 1116, 1117, 6, 120, 10, 0, 1117, 256, 1, 0, 0, 0, 1118, 1119, 3, 57, 21, 0, 1119, 1120, 1, 0, 0, 0, 1120, 1121, 6, 121, 10, 0, 1121, 258, 1, 0, 0, 0, 1122, 1123, 3, 59, 22, 0, 1123, 1124, 1, 0, 0, 0, 1124, 1125, 6, 122, 10, 0, 1125, 260, 1, 0, 0, 0, 1126, 1127, 3, 61, 23, 0, 1127, 1128, 1, 0, 0, 0, 1128, 1129, 6, 123, 14, 0, 1129, 1130, 6, 123, 11, 0, 1130, 262, 1, 0, 0, 0, 1131, 1132, 3, 163, 74, 0, 1132, 1133, 1, 0, 0, 0, 1133, 1134, 6, 124, 12, 0, 1134, 1135, 6, 124, 25, 0, 1135, 264, 1, 0, 0, 0, 1136, 1137, 7, 7, 0, 0, 1137, 1138, 7, 9, 0, 0, 1138, 1139, 1, 0, 0, 0, 1139, 1140, 6, 125, 26, 0, 1140, 266, 1, 0, 0, 0, 1141, 1142, 7, 19, 0, 0, 1142, 1143, 7, 1, 0, 0, 1143, 1144, 7, 5, 0, 0, 1144, 1145, 7, 10, 0, 0, 1145, 1146, 1, 0, 0, 0, 1146, 1147, 6, 126, 26, 0, 1147, 268, 1, 0, 0, 0, 1148, 1149, 8, 34, 0, 0, 1149, 270, 1, 0, 0, 0, 1150, 1152, 3, 269, 127, 0, 1151, 1150, 1, 0, 0, 0, 1152, 1153, 1, 0, 0, 0, 1153, 1151, 1, 0, 0, 0, 1153, 1154, 1, 0, 0, 0, 1154, 1155, 1, 0, 0, 0, 1155, 1156, 3, 335, 160, 0, 1156, 1158, 1, 0, 0, 0, 1157, 1151, 1, 0, 0, 0, 1157, 1158, 1, 0, 0, 0, 1158, 1160, 1, 0, 0, 0, 1159, 1161, 3, 269, 127, 0, 1160, 1159, 1, 0, 0, 0, 1161, 1162, 1, 0, 0, 0, 1162, 1160, 1, 0, 0, 0, 1162, 1163, 1, 0, 0, 0, 1163, 272, 1, 0, 0, 0, 1164, 1165, 3, 271, 128, 0, 1165, 1166, 1, 0, 0, 0, 1166, 1167, 6, 129, 27, 0, 1167, 274, 1, 0, 0, 0, 1168, 1169, 3, 55, 20, 0, 1169, 1170, 1, 0, 0, 0, 1170, 1171, 6, 130, 10, 0, 1171, 276, 1, 0, 0, 0, 1172, 1173, 3, 57, 21, 0, 1173, 1174, 1, 0, 0, 0, 1174, 1175, 6, 131, 10, 0, 1175, 278, 1, 0, 0, 0, 1176, 1177, 3, 59, 22, 0, 1177, 1178, 1, 0, 0, 0, 1178, 1179, 6, 132, 10, 0, 1179, 280, 1, 0, 0, 0, 1180, 1181, 3, 61, 23, 0, 1181, 1182, 1, 0, 0, 0, 1182, 1183, 6, 133, 14, 0, 1183, 1184, 6, 133, 11, 0, 1184, 1185, 6, 133, 11, 0, 1185, 282, 1, 0, 0, 0, 1186, 1187, 3, 95, 40, 0, 1187, 1188, 1, 0, 0, 0, 1188, 1189, 6, 134, 18, 0, 1189, 284, 1, 0, 0, 0, 1190, 1191, 3, 99, 42, 0, 1191, 1192, 1, 0, 0, 0, 1192, 1193, 6, 135, 17, 0, 1193, 286, 1, 0, 0, 0, 1194, 1195, 3, 103, 44, 0, 1195, 1196, 1, 0, 0, 0, 1196, 1197, 6, 136, 21, 0, 1197, 288, 1, 0, 0, 0, 1198, 1199, 3, 267, 126, 0, 1199, 1200, 1, 0, 0, 0, 1200, 1201, 6, 137, 28, 0, 1201, 290, 1, 0, 0, 0, 1202, 1203, 3, 231, 108, 0, 1203, 1204, 1, 0, 0, 0, 1204, 1205, 6, 138, 24, 0, 1205, 292, 1, 0, 0, 0, 1206, 1207, 3, 171, 78, 0, 1207, 1208, 1, 0, 0, 0, 1208, 1209, 6, 139, 29, 0, 1209, 294, 1, 0, 0, 0, 1210, 1211, 3, 127, 56, 0, 1211, 1212, 1, 0, 0, 0, 1212, 1213, 6, 140, 22, 0, 1213, 296, 1, 0, 0, 0, 1214, 1215, 3, 161, 73, 0, 1215, 1216, 1, 0, 0, 0, 1216, 1217, 6, 141, 23, 0, 1217, 298, 1, 0, 0, 0, 1218, 1219, 3, 55, 20, 0, 1219, 1220, 1, 0, 0, 0, 1220, 1221, 6, 142, 10, 0, 1221, 300, 1, 0, 0, 0, 1222, 1223, 3, 57, 21, 0, 1223, 1224, 1, 0, 0, 0, 1224, 1225, 6, 143, 10, 0, 1225, 302, 1, 0, 0, 0, 1226, 1227, 3, 59, 22, 0, 1227, 1228, 1, 0, 0, 0, 1228, 1229, 6, 144, 10, 0, 1229, 304, 1, 0, 0, 0, 1230, 1231, 3, 61, 23, 0, 1231, 1232, 1, 0, 0, 0, 1232, 1233, 6, 145, 14, 0, 1233, 1234, 6, 145, 11, 0, 1234, 306, 1, 0, 0, 0, 1235, 1236, 3, 103, 44, 0, 1236, 1237, 1, 0, 0, 0, 1237, 1238, 6, 146, 21, 0, 1238, 308, 1, 0, 0, 0, 1239, 1240, 3, 127, 56, 0, 1240, 1241, 1, 0, 0, 0, 1241, 1242, 6, 147, 22, 0, 1242, 310, 1, 0, 0, 0, 1243, 1244, 3, 161, 73, 0, 1244, 1245, 1, 0, 0, 0, 1245, 1246, 6, 148, 23, 0, 1246, 312, 1, 0, 0, 0, 1247, 1248, 3, 171, 78, 0, 1248, 1249, 1, 0, 0, 0, 1249, 1250, 6, 149, 29, 0, 1250, 314, 1, 0, 0, 0, 1251, 1252, 3, 167, 76, 0, 1252, 1253, 1, 0, 0, 0, 1253, 1254, 6, 150, 30, 0, 1254, 316, 1, 0, 0, 0, 1255, 1256, 3, 55, 20, 0, 1256, 1257, 1, 0, 0, 0, 1257, 1258, 6, 151, 10, 0, 1258, 318, 1, 0, 0, 0, 1259, 1260, 3, 57, 21, 0, 1260, 1261, 1, 0, 0, 0, 1261, 1262, 6, 152, 10, 0, 1262, 320, 1, 0, 0, 0, 1263, 1264, 3, 59, 22, 0, 1264, 1265, 1, 0, 0, 0, 1265, 1266, 6, 153, 10, 0, 1266, 322, 1, 0, 0, 0, 1267, 1268, 3, 61, 23, 0, 1268, 1269, 1, 0, 0, 0, 1269, 1270, 6, 154, 14, 0, 1270, 1271, 6, 154, 11, 0, 1271, 324, 1, 0, 0, 0, 1272, 1273, 7, 1, 0, 0, 1273, 1274, 7, 9, 0, 0, 1274, 1275, 7, 15, 0, 0, 1275, 1276, 7, 7, 0, 0, 1276, 326, 1, 0, 0, 0, 1277, 1278, 3, 55, 20, 0, 1278, 1279, 1, 0, 0, 0, 1279, 1280, 6, 156, 10, 0, 1280, 328, 1, 0, 0, 0, 1281, 1282, 3, 57, 21, 0, 1282, 1283, 1, 0, 0, 0, 1283, 1284, 6, 157, 10, 0, 1284, 330, 1, 0, 0, 0, 1285, 1286, 3, 59, 22, 0, 1286, 1287, 1, 0, 0, 0, 1287, 1288, 6, 158, 10, 0, 1288, 332, 1, 0, 0, 0, 1289, 1290, 3, 165, 75, 0, 1290, 1291, 1, 0, 0, 0, 1291, 1292, 6, 159, 15, 0, 1292, 1293, 6, 159, 11, 0, 1293, 334, 1, 0, 0, 0, 1294, 1295, 5, 58, 0, 0, 1295, 336, 1, 0, 0, 0, 1296, 1302, 3, 73, 29, 0, 1297, 1302, 3, 63, 24, 0, 1298, 1302, 3, 103, 44, 0, 1299, 1302, 3, 65, 25, 0, 1300, 1302, 3, 79, 32, 0, 1301, 1296, 1, 0, 0, 0, 1301, 1297, 1, 0, 0, 0, 1301, 1298, 1, 0, 0, 0, 1301, 1299, 1, 0, 0, 0, 1301, 1300, 1, 0, 0, 0, 1302, 1303, 1, 0, 0, 0, 1303, 1301, 1, 0, 0, 0, 1303, 1304, 1, 0, 0, 0, 1304, 338, 1, 0, 0, 0, 1305, 1306, 3, 55, 20, 0, 1306, 1307, 1, 0, 0, 0, 1307, 1308, 6, 162, 10, 0, 1308, 340, 1, 0, 0, 0, 1309, 1310, 3, 57, 21, 0, 1310, 1311, 1, 0, 0, 0, 1311, 1312, 6, 163, 10, 0, 1312, 342, 1, 0, 0, 0, 1313, 1314, 3, 59, 22, 0, 1314, 1315, 1, 0, 0, 0, 1315, 1316, 6, 164, 10, 0, 1316, 344, 1, 0, 0, 0, 1317, 1318, 3, 61, 23, 0, 1318, 1319, 1, 0, 0, 0, 1319, 1320, 6, 165, 14, 0, 1320, 1321, 6, 165, 11, 0, 1321, 346, 1, 0, 0, 0, 1322, 1323, 3, 335, 160, 0, 1323, 1324, 1, 0, 0, 0, 1324, 1325, 6, 166, 16, 0, 1325, 348, 1, 0, 0, 0, 1326, 1327, 3, 99, 42, 0, 1327, 1328, 1, 0, 0, 0, 1328, 1329, 6, 167, 17, 0, 1329, 350, 1, 0, 0, 0, 1330, 1331, 3, 103, 44, 0, 1331, 1332, 1, 0, 0, 0, 1332, 1333, 6, 168, 21, 0, 1333, 352, 1, 0, 0, 0, 1334, 1335, 3, 265, 125, 0, 1335, 1336, 1, 0, 0, 0, 1336, 1337, 6, 169, 31, 0, 1337, 1338, 6, 169, 32, 0, 1338, 354, 1, 0, 0, 0, 1339, 1340, 3, 205, 95, 0, 1340, 1341, 1, 0, 0, 0, 1341, 1342, 6, 170, 19, 0, 1342, 356, 1, 0, 0, 0, 1343, 1344, 3, 83, 34, 0, 1344, 1345, 1, 0, 0, 0, 1345, 1346, 6, 171, 20, 0, 1346, 358, 1, 0, 0, 0, 1347, 1348, 3, 55, 20, 0, 1348, 1349, 1, 0, 0, 0, 1349, 1350, 6, 172, 10, 0, 1350, 360, 1, 0, 0, 0, 1351, 1352, 3, 57, 21, 0, 1352, 1353, 1, 0, 0, 0, 1353, 1354, 6, 173, 10, 0, 1354, 362, 1, 0, 0, 0, 1355, 1356, 3, 59, 22, 0, 1356, 1357, 1, 0, 0, 0, 1357, 1358, 6, 174, 10, 0, 1358, 364, 1, 0, 0, 0, 1359, 1360, 3, 61, 23, 0, 1360, 1361, 1, 0, 0, 0, 1361, 1362, 6, 175, 14, 0, 1362, 1363, 6, 175, 11, 0, 1363, 1364, 6, 175, 11, 0, 1364, 366, 1, 0, 0, 0, 1365, 1366, 3, 99, 42, 0, 1366, 1367, 1, 0, 0, 0, 1367, 1368, 6, 176, 17, 0, 1368, 368, 1, 0, 0, 0, 1369, 1370, 3, 103, 44, 0, 1370, 1371, 1, 0, 0, 0, 1371, 1372, 6, 177, 21, 0, 1372, 370, 1, 0, 0, 0, 1373, 1374, 3, 231, 108, 0, 1374, 1375, 1, 0, 0, 0, 1375, 1376, 6, 178, 24, 0, 1376, 372, 1, 0, 0, 0, 1377, 1378, 3, 55, 20, 0, 1378, 1379, 1, 0, 0, 0, 1379, 1380, 6, 179, 10, 0, 1380, 374, 1, 0, 0, 0, 1381, 1382, 3, 57, 21, 0, 1382, 1383, 1, 0, 0, 0, 1383, 1384, 6, 180, 10, 0, 1384, 376, 1, 0, 0, 0, 1385, 1386, 3, 59, 22, 0, 1386, 1387, 1, 0, 0, 0, 1387, 1388, 6, 181, 10, 0, 1388, 378, 1, 0, 0, 0, 1389, 1390, 3, 61, 23, 0, 1390, 1391, 1, 0, 0, 0, 1391, 1392, 6, 182, 14, 0, 1392, 1393, 6, 182, 11, 0, 1393, 380, 1, 0, 0, 0, 1394, 1395, 3, 205, 95, 0, 1395, 1396, 1, 0, 0, 0, 1396, 1397, 6, 183, 19, 0, 1397, 1398, 6, 183, 11, 0, 1398, 1399, 6, 183, 33, 0, 1399, 382, 1, 0, 0, 0, 1400, 1401, 3, 83, 34, 0, 1401, 1402, 1, 0, 0, 0, 1402, 1403, 6, 184, 20, 0, 1403, 1404, 6, 184, 11, 0, 1404, 1405, 6, 184, 33, 0, 1405, 384, 1, 0, 0, 0, 1406, 1407, 3, 55, 20, 0, 1407, 1408, 1, 0, 0, 0, 1408, 1409, 6, 185, 10, 0, 1409, 386, 1, 0, 0, 0, 1410, 1411, 3, 57, 21, 0, 1411, 1412, 1, 0, 0, 0, 1412, 1413, 6, 186, 10, 0, 1413, 388, 1, 0, 0, 0, 1414, 1415, 3, 59, 22, 0, 1415, 1416, 1, 0, 0, 0, 1416, 1417, 6, 187, 10, 0, 1417, 390, 1, 0, 0, 0, 1418, 1419, 3, 335, 160, 0, 1419, 1420, 1, 0, 0, 0, 1420, 1421, 6, 188, 16, 0, 1421, 1422, 6, 188, 11, 0, 1422, 1423, 6, 188, 9, 0, 1423, 392, 1, 0, 0, 0, 1424, 1425, 3, 99, 42, 0, 1425, 1426, 1, 0, 0, 0, 1426, 1427, 6, 189, 17, 0, 1427, 1428, 6, 189, 11, 0, 1428, 1429, 6, 189, 9, 0, 1429, 394, 1, 0, 0, 0, 1430, 1431, 3, 55, 20, 0, 1431, 1432, 1, 0, 0, 0, 1432, 1433, 6, 190, 10, 0, 1433, 396, 1, 0, 0, 0, 1434, 1435, 3, 57, 21, 0, 1435, 1436, 1, 0, 0, 0, 1436, 1437, 6, 191, 10, 0, 1437, 398, 1, 0, 0, 0, 1438, 1439, 3, 59, 22, 0, 1439, 1440, 1, 0, 0, 0, 1440, 1441, 6, 192, 10, 0, 1441, 400, 1, 0, 0, 0, 1442, 1443, 3, 171, 78, 0, 1443, 1444, 1, 0, 0, 0, 1444, 1445, 6, 193, 11, 0, 1445, 1446, 6, 193, 0, 0, 1446, 1447, 6, 193, 29, 0, 1447, 402, 1, 0, 0, 0, 1448, 1449, 3, 167, 76, 0, 1449, 1450, 1, 0, 0, 0, 1450, 1451, 6, 194, 11, 0, 1451, 1452, 6, 194, 0, 0, 1452, 1453, 6, 194, 30, 0, 1453, 404, 1, 0, 0, 0, 1454, 1455, 3, 89, 37, 0, 1455, 1456, 1, 0, 0, 0, 1456, 1457, 6, 195, 11, 0, 1457, 1458, 6, 195, 0, 0, 1458, 1459, 6, 195, 34, 0, 1459, 406, 1, 0, 0, 0, 1460, 1461, 3, 61, 23, 0, 1461, 1462, 1, 0, 0, 0, 1462, 1463, 6, 196, 14, 0, 1463, 1464, 6, 196, 11, 0, 1464, 408, 1, 0, 0, 0, 65, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 577, 587, 591, 594, 603, 605, 616, 635, 640, 649, 656, 661, 663, 674, 682, 685, 687, 692, 697, 703, 710, 715, 721, 724, 732, 736, 864, 869, 876, 878, 894, 899, 904, 906, 912, 989, 994, 1041, 1045, 1050, 1055, 1060, 1062, 1066, 1068, 1153, 1157, 1162, 1301, 1303, 35, 5, 1, 0, 5, 4, 0, 5, 6, 0, 5, 2, 0, 5, 3, 0, 5, 8, 0, 5, 5, 0, 5, 9, 0, 5, 11, 0, 5, 13, 0, 0, 1, 0, 4, 0, 0, 7, 65, 0, 5, 0, 0, 7, 24, 0, 7, 66, 0, 7, 104, 0, 7, 33, 0, 7, 31, 0, 7, 76, 0, 7, 25, 0, 7, 35, 0, 7, 47, 0, 7, 64, 0, 7, 80, 0, 5, 10, 0, 5, 7, 0, 7, 90, 0, 7, 89, 0, 7, 68, 0, 7, 67, 0, 7, 88, 0, 5, 12, 0, 5, 14, 0, 7, 28, 0] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java index 563e2418e7eff..aa1eab437be5c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java @@ -35,7 +35,7 @@ public class EsqlBaseLexer extends LexerConfig { CAST_OP=32, COMMA=33, DESC=34, DOT=35, FALSE=36, FIRST=37, IN=38, IS=39, LAST=40, LIKE=41, LP=42, NOT=43, NULL=44, NULLS=45, OR=46, PARAM=47, RLIKE=48, RP=49, TRUE=50, EQ=51, CIEQ=52, NEQ=53, LT=54, LTE=55, GT=56, GTE=57, - PLUS=58, MINUS=59, ASTERISK=60, SLASH=61, PERCENT=62, DEV_MATCH=63, NAMED_OR_POSITIONAL_PARAM=64, + PLUS=58, MINUS=59, ASTERISK=60, SLASH=61, PERCENT=62, MATCH=63, NAMED_OR_POSITIONAL_PARAM=64, OPENING_BRACKET=65, CLOSING_BRACKET=66, UNQUOTED_IDENTIFIER=67, QUOTED_IDENTIFIER=68, EXPR_LINE_COMMENT=69, EXPR_MULTILINE_COMMENT=70, EXPR_WS=71, EXPLAIN_WS=72, EXPLAIN_LINE_COMMENT=73, EXPLAIN_MULTILINE_COMMENT=74, METADATA=75, UNQUOTED_SOURCE=76, @@ -78,12 +78,12 @@ private static String[] makeRuleNames() { "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", "LIKE", "LP", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", - "PERCENT", "DEV_MATCH", "NAMED_OR_POSITIONAL_PARAM", "OPENING_BRACKET", - "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", "QUOTED_ID", "QUOTED_IDENTIFIER", - "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", "EXPLAIN_OPENING_BRACKET", - "EXPLAIN_PIPE", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", - "FROM_PIPE", "FROM_OPENING_BRACKET", "FROM_CLOSING_BRACKET", "FROM_COLON", - "FROM_COMMA", "FROM_ASSIGN", "METADATA", "UNQUOTED_SOURCE_PART", "UNQUOTED_SOURCE", + "PERCENT", "MATCH", "NAMED_OR_POSITIONAL_PARAM", "OPENING_BRACKET", "CLOSING_BRACKET", + "UNQUOTED_IDENTIFIER", "QUOTED_ID", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", + "EXPR_MULTILINE_COMMENT", "EXPR_WS", "EXPLAIN_OPENING_BRACKET", "EXPLAIN_PIPE", + "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", "FROM_PIPE", + "FROM_OPENING_BRACKET", "FROM_CLOSING_BRACKET", "FROM_COLON", "FROM_COMMA", + "FROM_ASSIGN", "METADATA", "UNQUOTED_SOURCE_PART", "UNQUOTED_SOURCE", "FROM_UNQUOTED_SOURCE", "FROM_QUOTED_SOURCE", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", "FROM_WS", "PROJECT_PIPE", "PROJECT_DOT", "PROJECT_COMMA", "PROJECT_PARAM", "PROJECT_NAMED_OR_POSITIONAL_PARAM", "UNQUOTED_ID_BODY_WITH_PATTERN", @@ -125,11 +125,11 @@ private static String[] makeLiteralNames() { "'desc'", "'.'", "'false'", "'first'", "'in'", "'is'", "'last'", "'like'", "'('", "'not'", "'null'", "'nulls'", "'or'", "'?'", "'rlike'", "')'", "'true'", "'=='", "'=~'", "'!='", "'<'", "'<='", "'>'", "'>='", "'+'", - "'-'", "'*'", "'/'", "'%'", null, null, null, "']'", null, null, null, - null, null, null, null, null, "'metadata'", null, null, null, null, null, - null, null, null, "'as'", null, null, null, "'on'", "'with'", null, null, - null, null, null, null, null, null, null, null, "'info'", null, null, - null, "':'" + "'-'", "'*'", "'/'", "'%'", "'match'", null, null, "']'", null, null, + null, null, null, null, null, null, "'metadata'", null, null, null, null, + null, null, null, null, "'as'", null, null, null, "'on'", "'with'", null, + null, null, null, null, null, null, null, null, null, "'info'", null, + null, null, "':'" }; } private static final String[] _LITERAL_NAMES = makeLiteralNames(); @@ -143,14 +143,14 @@ private static String[] makeSymbolicNames() { "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", "LIKE", "LP", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", - "PERCENT", "DEV_MATCH", "NAMED_OR_POSITIONAL_PARAM", "OPENING_BRACKET", - "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", - "EXPR_MULTILINE_COMMENT", "EXPR_WS", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", - "EXPLAIN_MULTILINE_COMMENT", "METADATA", "UNQUOTED_SOURCE", "FROM_LINE_COMMENT", - "FROM_MULTILINE_COMMENT", "FROM_WS", "ID_PATTERN", "PROJECT_LINE_COMMENT", - "PROJECT_MULTILINE_COMMENT", "PROJECT_WS", "AS", "RENAME_LINE_COMMENT", - "RENAME_MULTILINE_COMMENT", "RENAME_WS", "ON", "WITH", "ENRICH_POLICY_NAME", - "ENRICH_LINE_COMMENT", "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", + "PERCENT", "MATCH", "NAMED_OR_POSITIONAL_PARAM", "OPENING_BRACKET", "CLOSING_BRACKET", + "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", + "EXPR_WS", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", + "METADATA", "UNQUOTED_SOURCE", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", + "FROM_WS", "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", + "PROJECT_WS", "AS", "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", + "RENAME_WS", "ON", "WITH", "ENRICH_POLICY_NAME", "ENRICH_LINE_COMMENT", + "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT", "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "INFO", "SHOW_LINE_COMMENT", "SHOW_MULTILINE_COMMENT", "SHOW_WS", "COLON", "SETTING", "SETTING_LINE_COMMENT", @@ -229,8 +229,6 @@ public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { return DEV_LOOKUP_sempred((RuleContext)_localctx, predIndex); case 18: return DEV_METRICS_sempred((RuleContext)_localctx, predIndex); - case 72: - return DEV_MATCH_sempred((RuleContext)_localctx, predIndex); } return true; } @@ -255,16 +253,9 @@ private boolean DEV_METRICS_sempred(RuleContext _localctx, int predIndex) { } return true; } - private boolean DEV_MATCH_sempred(RuleContext _localctx, int predIndex) { - switch (predIndex) { - case 3: - return this.isDevVersion(); - } - return true; - } public static final String _serializedATN = - "\u0004\u0000x\u05ba\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ + "\u0004\u0000x\u05b9\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ @@ -380,92 +371,92 @@ private boolean DEV_MATCH_sempred(RuleContext _localctx, int predIndex) { "<\u0001<\u0001=\u0001=\u0001=\u0001>\u0001>\u0001>\u0001?\u0001?\u0001"+ "@\u0001@\u0001@\u0001A\u0001A\u0001B\u0001B\u0001B\u0001C\u0001C\u0001"+ "D\u0001D\u0001E\u0001E\u0001F\u0001F\u0001G\u0001G\u0001H\u0001H\u0001"+ - "H\u0001H\u0001H\u0001H\u0001H\u0001I\u0001I\u0001I\u0003I\u0362\bI\u0001"+ - "I\u0005I\u0365\bI\nI\fI\u0368\tI\u0001I\u0001I\u0004I\u036c\bI\u000bI"+ - "\fI\u036d\u0003I\u0370\bI\u0001J\u0001J\u0001J\u0001J\u0001J\u0001K\u0001"+ - "K\u0001K\u0001K\u0001K\u0001L\u0001L\u0005L\u037e\bL\nL\fL\u0381\tL\u0001"+ - "L\u0001L\u0003L\u0385\bL\u0001L\u0004L\u0388\bL\u000bL\fL\u0389\u0003"+ - "L\u038c\bL\u0001M\u0001M\u0004M\u0390\bM\u000bM\fM\u0391\u0001M\u0001"+ - "M\u0001N\u0001N\u0001O\u0001O\u0001O\u0001O\u0001P\u0001P\u0001P\u0001"+ - "P\u0001Q\u0001Q\u0001Q\u0001Q\u0001R\u0001R\u0001R\u0001R\u0001R\u0001"+ - "S\u0001S\u0001S\u0001S\u0001S\u0001T\u0001T\u0001T\u0001T\u0001U\u0001"+ - "U\u0001U\u0001U\u0001V\u0001V\u0001V\u0001V\u0001W\u0001W\u0001W\u0001"+ - "W\u0001W\u0001X\u0001X\u0001X\u0001X\u0001Y\u0001Y\u0001Y\u0001Y\u0001"+ - "Z\u0001Z\u0001Z\u0001Z\u0001[\u0001[\u0001[\u0001[\u0001\\\u0001\\\u0001"+ - "\\\u0001\\\u0001]\u0001]\u0001]\u0001]\u0001]\u0001]\u0001]\u0001]\u0001"+ - "]\u0001^\u0001^\u0001^\u0003^\u03df\b^\u0001_\u0004_\u03e2\b_\u000b_\f"+ - "_\u03e3\u0001`\u0001`\u0001`\u0001`\u0001a\u0001a\u0001a\u0001a\u0001"+ - "b\u0001b\u0001b\u0001b\u0001c\u0001c\u0001c\u0001c\u0001d\u0001d\u0001"+ - "d\u0001d\u0001e\u0001e\u0001e\u0001e\u0001e\u0001f\u0001f\u0001f\u0001"+ - "f\u0001g\u0001g\u0001g\u0001g\u0001h\u0001h\u0001h\u0001h\u0001i\u0001"+ - "i\u0001i\u0001i\u0001j\u0001j\u0001j\u0001j\u0003j\u0413\bj\u0001k\u0001"+ - "k\u0003k\u0417\bk\u0001k\u0005k\u041a\bk\nk\fk\u041d\tk\u0001k\u0001k"+ - "\u0003k\u0421\bk\u0001k\u0004k\u0424\bk\u000bk\fk\u0425\u0003k\u0428\b"+ - "k\u0001l\u0001l\u0004l\u042c\bl\u000bl\fl\u042d\u0001m\u0001m\u0001m\u0001"+ - "m\u0001n\u0001n\u0001n\u0001n\u0001o\u0001o\u0001o\u0001o\u0001p\u0001"+ - "p\u0001p\u0001p\u0001p\u0001q\u0001q\u0001q\u0001q\u0001r\u0001r\u0001"+ - "r\u0001r\u0001s\u0001s\u0001s\u0001s\u0001t\u0001t\u0001t\u0001t\u0001"+ - "u\u0001u\u0001u\u0001u\u0001v\u0001v\u0001v\u0001w\u0001w\u0001w\u0001"+ - "w\u0001x\u0001x\u0001x\u0001x\u0001y\u0001y\u0001y\u0001y\u0001z\u0001"+ - "z\u0001z\u0001z\u0001{\u0001{\u0001{\u0001{\u0001{\u0001|\u0001|\u0001"+ - "|\u0001|\u0001|\u0001}\u0001}\u0001}\u0001}\u0001}\u0001~\u0001~\u0001"+ - "~\u0001~\u0001~\u0001~\u0001~\u0001\u007f\u0001\u007f\u0001\u0080\u0004"+ - "\u0080\u0481\b\u0080\u000b\u0080\f\u0080\u0482\u0001\u0080\u0001\u0080"+ - "\u0003\u0080\u0487\b\u0080\u0001\u0080\u0004\u0080\u048a\b\u0080\u000b"+ - "\u0080\f\u0080\u048b\u0001\u0081\u0001\u0081\u0001\u0081\u0001\u0081\u0001"+ - "\u0082\u0001\u0082\u0001\u0082\u0001\u0082\u0001\u0083\u0001\u0083\u0001"+ - "\u0083\u0001\u0083\u0001\u0084\u0001\u0084\u0001\u0084\u0001\u0084\u0001"+ - "\u0085\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0085\u0001"+ - "\u0086\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0087\u0001\u0087\u0001"+ - "\u0087\u0001\u0087\u0001\u0088\u0001\u0088\u0001\u0088\u0001\u0088\u0001"+ - "\u0089\u0001\u0089\u0001\u0089\u0001\u0089\u0001\u008a\u0001\u008a\u0001"+ - "\u008a\u0001\u008a\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008b\u0001"+ - "\u008c\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008d\u0001\u008d\u0001"+ - "\u008d\u0001\u008d\u0001\u008e\u0001\u008e\u0001\u008e\u0001\u008e\u0001"+ - "\u008f\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u0090\u0001\u0090\u0001"+ - "\u0090\u0001\u0090\u0001\u0091\u0001\u0091\u0001\u0091\u0001\u0091\u0001"+ - "\u0091\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0093\u0001"+ - "\u0093\u0001\u0093\u0001\u0093\u0001\u0094\u0001\u0094\u0001\u0094\u0001"+ - "\u0094\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0096\u0001"+ - "\u0096\u0001\u0096\u0001\u0096\u0001\u0097\u0001\u0097\u0001\u0097\u0001"+ - "\u0097\u0001\u0098\u0001\u0098\u0001\u0098\u0001\u0098\u0001\u0099\u0001"+ - "\u0099\u0001\u0099\u0001\u0099\u0001\u009a\u0001\u009a\u0001\u009a\u0001"+ - "\u009a\u0001\u009a\u0001\u009b\u0001\u009b\u0001\u009b\u0001\u009b\u0001"+ - "\u009b\u0001\u009c\u0001\u009c\u0001\u009c\u0001\u009c\u0001\u009d\u0001"+ - "\u009d\u0001\u009d\u0001\u009d\u0001\u009e\u0001\u009e\u0001\u009e\u0001"+ - "\u009e\u0001\u009f\u0001\u009f\u0001\u009f\u0001\u009f\u0001\u009f\u0001"+ - "\u00a0\u0001\u00a0\u0001\u00a1\u0001\u00a1\u0001\u00a1\u0001\u00a1\u0001"+ - "\u00a1\u0004\u00a1\u0517\b\u00a1\u000b\u00a1\f\u00a1\u0518\u0001\u00a2"+ - "\u0001\u00a2\u0001\u00a2\u0001\u00a2\u0001\u00a3\u0001\u00a3\u0001\u00a3"+ - "\u0001\u00a3\u0001\u00a4\u0001\u00a4\u0001\u00a4\u0001\u00a4\u0001\u00a5"+ - "\u0001\u00a5\u0001\u00a5\u0001\u00a5\u0001\u00a5\u0001\u00a6\u0001\u00a6"+ - "\u0001\u00a6\u0001\u00a6\u0001\u00a7\u0001\u00a7\u0001\u00a7\u0001\u00a7"+ - "\u0001\u00a8\u0001\u00a8\u0001\u00a8\u0001\u00a8\u0001\u00a9\u0001\u00a9"+ - "\u0001\u00a9\u0001\u00a9\u0001\u00a9\u0001\u00aa\u0001\u00aa\u0001\u00aa"+ - "\u0001\u00aa\u0001\u00ab\u0001\u00ab\u0001\u00ab\u0001\u00ab\u0001\u00ac"+ - "\u0001\u00ac\u0001\u00ac\u0001\u00ac\u0001\u00ad\u0001\u00ad\u0001\u00ad"+ - "\u0001\u00ad\u0001\u00ae\u0001\u00ae\u0001\u00ae\u0001\u00ae\u0001\u00af"+ - "\u0001\u00af\u0001\u00af\u0001\u00af\u0001\u00af\u0001\u00af\u0001\u00b0"+ - "\u0001\u00b0\u0001\u00b0\u0001\u00b0\u0001\u00b1\u0001\u00b1\u0001\u00b1"+ - "\u0001\u00b1\u0001\u00b2\u0001\u00b2\u0001\u00b2\u0001\u00b2\u0001\u00b3"+ - "\u0001\u00b3\u0001\u00b3\u0001\u00b3\u0001\u00b4\u0001\u00b4\u0001\u00b4"+ - "\u0001\u00b4\u0001\u00b5\u0001\u00b5\u0001\u00b5\u0001\u00b5\u0001\u00b6"+ - "\u0001\u00b6\u0001\u00b6\u0001\u00b6\u0001\u00b6\u0001\u00b7\u0001\u00b7"+ - "\u0001\u00b7\u0001\u00b7\u0001\u00b7\u0001\u00b7\u0001\u00b8\u0001\u00b8"+ - "\u0001\u00b8\u0001\u00b8\u0001\u00b8\u0001\u00b8\u0001\u00b9\u0001\u00b9"+ - "\u0001\u00b9\u0001\u00b9\u0001\u00ba\u0001\u00ba\u0001\u00ba\u0001\u00ba"+ - "\u0001\u00bb\u0001\u00bb\u0001\u00bb\u0001\u00bb\u0001\u00bc\u0001\u00bc"+ - "\u0001\u00bc\u0001\u00bc\u0001\u00bc\u0001\u00bc\u0001\u00bd\u0001\u00bd"+ - "\u0001\u00bd\u0001\u00bd\u0001\u00bd\u0001\u00bd\u0001\u00be\u0001\u00be"+ - "\u0001\u00be\u0001\u00be\u0001\u00bf\u0001\u00bf\u0001\u00bf\u0001\u00bf"+ - "\u0001\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c1\u0001\u00c1"+ - "\u0001\u00c1\u0001\u00c1\u0001\u00c1\u0001\u00c1\u0001\u00c2\u0001\u00c2"+ - "\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001\u00c3\u0001\u00c3"+ - "\u0001\u00c3\u0001\u00c3\u0001\u00c3\u0001\u00c3\u0001\u00c4\u0001\u00c4"+ - "\u0001\u00c4\u0001\u00c4\u0001\u00c4\u0002\u025d\u02a2\u0000\u00c5\u000f"+ - "\u0001\u0011\u0002\u0013\u0003\u0015\u0004\u0017\u0005\u0019\u0006\u001b"+ - "\u0007\u001d\b\u001f\t!\n#\u000b%\f\'\r)\u000e+\u000f-\u0010/\u00111\u0012"+ - "3\u00135\u00147\u00159\u0016;\u0017=\u0018?\u0000A\u0000C\u0000E\u0000"+ + "H\u0001H\u0001H\u0001H\u0001I\u0001I\u0001I\u0003I\u0361\bI\u0001I\u0005"+ + "I\u0364\bI\nI\fI\u0367\tI\u0001I\u0001I\u0004I\u036b\bI\u000bI\fI\u036c"+ + "\u0003I\u036f\bI\u0001J\u0001J\u0001J\u0001J\u0001J\u0001K\u0001K\u0001"+ + "K\u0001K\u0001K\u0001L\u0001L\u0005L\u037d\bL\nL\fL\u0380\tL\u0001L\u0001"+ + "L\u0003L\u0384\bL\u0001L\u0004L\u0387\bL\u000bL\fL\u0388\u0003L\u038b"+ + "\bL\u0001M\u0001M\u0004M\u038f\bM\u000bM\fM\u0390\u0001M\u0001M\u0001"+ + "N\u0001N\u0001O\u0001O\u0001O\u0001O\u0001P\u0001P\u0001P\u0001P\u0001"+ + "Q\u0001Q\u0001Q\u0001Q\u0001R\u0001R\u0001R\u0001R\u0001R\u0001S\u0001"+ + "S\u0001S\u0001S\u0001S\u0001T\u0001T\u0001T\u0001T\u0001U\u0001U\u0001"+ + "U\u0001U\u0001V\u0001V\u0001V\u0001V\u0001W\u0001W\u0001W\u0001W\u0001"+ + "W\u0001X\u0001X\u0001X\u0001X\u0001Y\u0001Y\u0001Y\u0001Y\u0001Z\u0001"+ + "Z\u0001Z\u0001Z\u0001[\u0001[\u0001[\u0001[\u0001\\\u0001\\\u0001\\\u0001"+ + "\\\u0001]\u0001]\u0001]\u0001]\u0001]\u0001]\u0001]\u0001]\u0001]\u0001"+ + "^\u0001^\u0001^\u0003^\u03de\b^\u0001_\u0004_\u03e1\b_\u000b_\f_\u03e2"+ + "\u0001`\u0001`\u0001`\u0001`\u0001a\u0001a\u0001a\u0001a\u0001b\u0001"+ + "b\u0001b\u0001b\u0001c\u0001c\u0001c\u0001c\u0001d\u0001d\u0001d\u0001"+ + "d\u0001e\u0001e\u0001e\u0001e\u0001e\u0001f\u0001f\u0001f\u0001f\u0001"+ + "g\u0001g\u0001g\u0001g\u0001h\u0001h\u0001h\u0001h\u0001i\u0001i\u0001"+ + "i\u0001i\u0001j\u0001j\u0001j\u0001j\u0003j\u0412\bj\u0001k\u0001k\u0003"+ + "k\u0416\bk\u0001k\u0005k\u0419\bk\nk\fk\u041c\tk\u0001k\u0001k\u0003k"+ + "\u0420\bk\u0001k\u0004k\u0423\bk\u000bk\fk\u0424\u0003k\u0427\bk\u0001"+ + "l\u0001l\u0004l\u042b\bl\u000bl\fl\u042c\u0001m\u0001m\u0001m\u0001m\u0001"+ + "n\u0001n\u0001n\u0001n\u0001o\u0001o\u0001o\u0001o\u0001p\u0001p\u0001"+ + "p\u0001p\u0001p\u0001q\u0001q\u0001q\u0001q\u0001r\u0001r\u0001r\u0001"+ + "r\u0001s\u0001s\u0001s\u0001s\u0001t\u0001t\u0001t\u0001t\u0001u\u0001"+ + "u\u0001u\u0001u\u0001v\u0001v\u0001v\u0001w\u0001w\u0001w\u0001w\u0001"+ + "x\u0001x\u0001x\u0001x\u0001y\u0001y\u0001y\u0001y\u0001z\u0001z\u0001"+ + "z\u0001z\u0001{\u0001{\u0001{\u0001{\u0001{\u0001|\u0001|\u0001|\u0001"+ + "|\u0001|\u0001}\u0001}\u0001}\u0001}\u0001}\u0001~\u0001~\u0001~\u0001"+ + "~\u0001~\u0001~\u0001~\u0001\u007f\u0001\u007f\u0001\u0080\u0004\u0080"+ + "\u0480\b\u0080\u000b\u0080\f\u0080\u0481\u0001\u0080\u0001\u0080\u0003"+ + "\u0080\u0486\b\u0080\u0001\u0080\u0004\u0080\u0489\b\u0080\u000b\u0080"+ + "\f\u0080\u048a\u0001\u0081\u0001\u0081\u0001\u0081\u0001\u0081\u0001\u0082"+ + "\u0001\u0082\u0001\u0082\u0001\u0082\u0001\u0083\u0001\u0083\u0001\u0083"+ + "\u0001\u0083\u0001\u0084\u0001\u0084\u0001\u0084\u0001\u0084\u0001\u0085"+ + "\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0086"+ + "\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0087\u0001\u0087\u0001\u0087"+ + "\u0001\u0087\u0001\u0088\u0001\u0088\u0001\u0088\u0001\u0088\u0001\u0089"+ + "\u0001\u0089\u0001\u0089\u0001\u0089\u0001\u008a\u0001\u008a\u0001\u008a"+ + "\u0001\u008a\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008c"+ + "\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008d\u0001\u008d\u0001\u008d"+ + "\u0001\u008d\u0001\u008e\u0001\u008e\u0001\u008e\u0001\u008e\u0001\u008f"+ + "\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u0090\u0001\u0090\u0001\u0090"+ + "\u0001\u0090\u0001\u0091\u0001\u0091\u0001\u0091\u0001\u0091\u0001\u0091"+ + "\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0093\u0001\u0093"+ + "\u0001\u0093\u0001\u0093\u0001\u0094\u0001\u0094\u0001\u0094\u0001\u0094"+ + "\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0096\u0001\u0096"+ + "\u0001\u0096\u0001\u0096\u0001\u0097\u0001\u0097\u0001\u0097\u0001\u0097"+ + "\u0001\u0098\u0001\u0098\u0001\u0098\u0001\u0098\u0001\u0099\u0001\u0099"+ + "\u0001\u0099\u0001\u0099\u0001\u009a\u0001\u009a\u0001\u009a\u0001\u009a"+ + "\u0001\u009a\u0001\u009b\u0001\u009b\u0001\u009b\u0001\u009b\u0001\u009b"+ + "\u0001\u009c\u0001\u009c\u0001\u009c\u0001\u009c\u0001\u009d\u0001\u009d"+ + "\u0001\u009d\u0001\u009d\u0001\u009e\u0001\u009e\u0001\u009e\u0001\u009e"+ + "\u0001\u009f\u0001\u009f\u0001\u009f\u0001\u009f\u0001\u009f\u0001\u00a0"+ + "\u0001\u00a0\u0001\u00a1\u0001\u00a1\u0001\u00a1\u0001\u00a1\u0001\u00a1"+ + "\u0004\u00a1\u0516\b\u00a1\u000b\u00a1\f\u00a1\u0517\u0001\u00a2\u0001"+ + "\u00a2\u0001\u00a2\u0001\u00a2\u0001\u00a3\u0001\u00a3\u0001\u00a3\u0001"+ + "\u00a3\u0001\u00a4\u0001\u00a4\u0001\u00a4\u0001\u00a4\u0001\u00a5\u0001"+ + "\u00a5\u0001\u00a5\u0001\u00a5\u0001\u00a5\u0001\u00a6\u0001\u00a6\u0001"+ + "\u00a6\u0001\u00a6\u0001\u00a7\u0001\u00a7\u0001\u00a7\u0001\u00a7\u0001"+ + "\u00a8\u0001\u00a8\u0001\u00a8\u0001\u00a8\u0001\u00a9\u0001\u00a9\u0001"+ + "\u00a9\u0001\u00a9\u0001\u00a9\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001"+ + "\u00aa\u0001\u00ab\u0001\u00ab\u0001\u00ab\u0001\u00ab\u0001\u00ac\u0001"+ + "\u00ac\u0001\u00ac\u0001\u00ac\u0001\u00ad\u0001\u00ad\u0001\u00ad\u0001"+ + "\u00ad\u0001\u00ae\u0001\u00ae\u0001\u00ae\u0001\u00ae\u0001\u00af\u0001"+ + "\u00af\u0001\u00af\u0001\u00af\u0001\u00af\u0001\u00af\u0001\u00b0\u0001"+ + "\u00b0\u0001\u00b0\u0001\u00b0\u0001\u00b1\u0001\u00b1\u0001\u00b1\u0001"+ + "\u00b1\u0001\u00b2\u0001\u00b2\u0001\u00b2\u0001\u00b2\u0001\u00b3\u0001"+ + "\u00b3\u0001\u00b3\u0001\u00b3\u0001\u00b4\u0001\u00b4\u0001\u00b4\u0001"+ + "\u00b4\u0001\u00b5\u0001\u00b5\u0001\u00b5\u0001\u00b5\u0001\u00b6\u0001"+ + "\u00b6\u0001\u00b6\u0001\u00b6\u0001\u00b6\u0001\u00b7\u0001\u00b7\u0001"+ + "\u00b7\u0001\u00b7\u0001\u00b7\u0001\u00b7\u0001\u00b8\u0001\u00b8\u0001"+ + "\u00b8\u0001\u00b8\u0001\u00b8\u0001\u00b8\u0001\u00b9\u0001\u00b9\u0001"+ + "\u00b9\u0001\u00b9\u0001\u00ba\u0001\u00ba\u0001\u00ba\u0001\u00ba\u0001"+ + "\u00bb\u0001\u00bb\u0001\u00bb\u0001\u00bb\u0001\u00bc\u0001\u00bc\u0001"+ + "\u00bc\u0001\u00bc\u0001\u00bc\u0001\u00bc\u0001\u00bd\u0001\u00bd\u0001"+ + "\u00bd\u0001\u00bd\u0001\u00bd\u0001\u00bd\u0001\u00be\u0001\u00be\u0001"+ + "\u00be\u0001\u00be\u0001\u00bf\u0001\u00bf\u0001\u00bf\u0001\u00bf\u0001"+ + "\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c1\u0001\u00c1\u0001"+ + "\u00c1\u0001\u00c1\u0001\u00c1\u0001\u00c1\u0001\u00c2\u0001\u00c2\u0001"+ + "\u00c2\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001\u00c3\u0001\u00c3\u0001"+ + "\u00c3\u0001\u00c3\u0001\u00c3\u0001\u00c3\u0001\u00c4\u0001\u00c4\u0001"+ + "\u00c4\u0001\u00c4\u0001\u00c4\u0002\u025d\u02a2\u0000\u00c5\u000f\u0001"+ + "\u0011\u0002\u0013\u0003\u0015\u0004\u0017\u0005\u0019\u0006\u001b\u0007"+ + "\u001d\b\u001f\t!\n#\u000b%\f\'\r)\u000e+\u000f-\u0010/\u00111\u00123"+ + "\u00135\u00147\u00159\u0016;\u0017=\u0018?\u0000A\u0000C\u0000E\u0000"+ "G\u0000I\u0000K\u0000M\u0000O\u0000Q\u0000S\u0019U\u001aW\u001bY\u001c"+ "[\u001d]\u001e_\u001fa c!e\"g#i$k%m&o\'q(s)u*w+y,{-}.\u007f/\u00810\u0083"+ "1\u00852\u00873\u00894\u008b5\u008d6\u008f7\u00918\u00939\u0095:\u0097"+ @@ -496,7 +487,7 @@ private boolean DEV_MATCH_sempred(RuleContext _localctx, int predIndex) { "\r \u0001\u000009\u0002\u0000AZaz\b\u0000\"\"NNRRTT\\\\nnrrtt\u0004\u0000"+ "\n\n\r\r\"\"\\\\\u0002\u0000++--\u0001\u0000``\u0002\u0000BBbb\u0002\u0000"+ "YYyy\u000b\u0000\t\n\r\r \"\",,//::==[[]]||\u0002\u0000**//\u000b\u0000"+ - "\t\n\r\r \"#,,//::<<>?\\\\||\u05d6\u0000\u000f\u0001\u0000\u0000\u0000"+ + "\t\n\r\r \"#,,//::<<>?\\\\||\u05d5\u0000\u000f\u0001\u0000\u0000\u0000"+ "\u0000\u0011\u0001\u0000\u0000\u0000\u0000\u0013\u0001\u0000\u0000\u0000"+ "\u0000\u0015\u0001\u0000\u0000\u0000\u0000\u0017\u0001\u0000\u0000\u0000"+ "\u0000\u0019\u0001\u0000\u0000\u0000\u0000\u001b\u0001\u0000\u0000\u0000"+ @@ -616,69 +607,69 @@ private boolean DEV_MATCH_sempred(RuleContext _localctx, int predIndex) { "\u0000\u0093\u034a\u0001\u0000\u0000\u0000\u0095\u034d\u0001\u0000\u0000"+ "\u0000\u0097\u034f\u0001\u0000\u0000\u0000\u0099\u0351\u0001\u0000\u0000"+ "\u0000\u009b\u0353\u0001\u0000\u0000\u0000\u009d\u0355\u0001\u0000\u0000"+ - "\u0000\u009f\u0357\u0001\u0000\u0000\u0000\u00a1\u036f\u0001\u0000\u0000"+ - "\u0000\u00a3\u0371\u0001\u0000\u0000\u0000\u00a5\u0376\u0001\u0000\u0000"+ - "\u0000\u00a7\u038b\u0001\u0000\u0000\u0000\u00a9\u038d\u0001\u0000\u0000"+ - "\u0000\u00ab\u0395\u0001\u0000\u0000\u0000\u00ad\u0397\u0001\u0000\u0000"+ - "\u0000\u00af\u039b\u0001\u0000\u0000\u0000\u00b1\u039f\u0001\u0000\u0000"+ - "\u0000\u00b3\u03a3\u0001\u0000\u0000\u0000\u00b5\u03a8\u0001\u0000\u0000"+ - "\u0000\u00b7\u03ad\u0001\u0000\u0000\u0000\u00b9\u03b1\u0001\u0000\u0000"+ - "\u0000\u00bb\u03b5\u0001\u0000\u0000\u0000\u00bd\u03b9\u0001\u0000\u0000"+ - "\u0000\u00bf\u03be\u0001\u0000\u0000\u0000\u00c1\u03c2\u0001\u0000\u0000"+ - "\u0000\u00c3\u03c6\u0001\u0000\u0000\u0000\u00c5\u03ca\u0001\u0000\u0000"+ - "\u0000\u00c7\u03ce\u0001\u0000\u0000\u0000\u00c9\u03d2\u0001\u0000\u0000"+ - "\u0000\u00cb\u03de\u0001\u0000\u0000\u0000\u00cd\u03e1\u0001\u0000\u0000"+ - "\u0000\u00cf\u03e5\u0001\u0000\u0000\u0000\u00d1\u03e9\u0001\u0000\u0000"+ - "\u0000\u00d3\u03ed\u0001\u0000\u0000\u0000\u00d5\u03f1\u0001\u0000\u0000"+ - "\u0000\u00d7\u03f5\u0001\u0000\u0000\u0000\u00d9\u03f9\u0001\u0000\u0000"+ - "\u0000\u00db\u03fe\u0001\u0000\u0000\u0000\u00dd\u0402\u0001\u0000\u0000"+ - "\u0000\u00df\u0406\u0001\u0000\u0000\u0000\u00e1\u040a\u0001\u0000\u0000"+ - "\u0000\u00e3\u0412\u0001\u0000\u0000\u0000\u00e5\u0427\u0001\u0000\u0000"+ - "\u0000\u00e7\u042b\u0001\u0000\u0000\u0000\u00e9\u042f\u0001\u0000\u0000"+ - "\u0000\u00eb\u0433\u0001\u0000\u0000\u0000\u00ed\u0437\u0001\u0000\u0000"+ - "\u0000\u00ef\u043b\u0001\u0000\u0000\u0000\u00f1\u0440\u0001\u0000\u0000"+ - "\u0000\u00f3\u0444\u0001\u0000\u0000\u0000\u00f5\u0448\u0001\u0000\u0000"+ - "\u0000\u00f7\u044c\u0001\u0000\u0000\u0000\u00f9\u0450\u0001\u0000\u0000"+ - "\u0000\u00fb\u0454\u0001\u0000\u0000\u0000\u00fd\u0457\u0001\u0000\u0000"+ - "\u0000\u00ff\u045b\u0001\u0000\u0000\u0000\u0101\u045f\u0001\u0000\u0000"+ - "\u0000\u0103\u0463\u0001\u0000\u0000\u0000\u0105\u0467\u0001\u0000\u0000"+ - "\u0000\u0107\u046c\u0001\u0000\u0000\u0000\u0109\u0471\u0001\u0000\u0000"+ - "\u0000\u010b\u0476\u0001\u0000\u0000\u0000\u010d\u047d\u0001\u0000\u0000"+ - "\u0000\u010f\u0486\u0001\u0000\u0000\u0000\u0111\u048d\u0001\u0000\u0000"+ - "\u0000\u0113\u0491\u0001\u0000\u0000\u0000\u0115\u0495\u0001\u0000\u0000"+ - "\u0000\u0117\u0499\u0001\u0000\u0000\u0000\u0119\u049d\u0001\u0000\u0000"+ - "\u0000\u011b\u04a3\u0001\u0000\u0000\u0000\u011d\u04a7\u0001\u0000\u0000"+ - "\u0000\u011f\u04ab\u0001\u0000\u0000\u0000\u0121\u04af\u0001\u0000\u0000"+ - "\u0000\u0123\u04b3\u0001\u0000\u0000\u0000\u0125\u04b7\u0001\u0000\u0000"+ - "\u0000\u0127\u04bb\u0001\u0000\u0000\u0000\u0129\u04bf\u0001\u0000\u0000"+ - "\u0000\u012b\u04c3\u0001\u0000\u0000\u0000\u012d\u04c7\u0001\u0000\u0000"+ - "\u0000\u012f\u04cb\u0001\u0000\u0000\u0000\u0131\u04cf\u0001\u0000\u0000"+ - "\u0000\u0133\u04d4\u0001\u0000\u0000\u0000\u0135\u04d8\u0001\u0000\u0000"+ - "\u0000\u0137\u04dc\u0001\u0000\u0000\u0000\u0139\u04e0\u0001\u0000\u0000"+ - "\u0000\u013b\u04e4\u0001\u0000\u0000\u0000\u013d\u04e8\u0001\u0000\u0000"+ - "\u0000\u013f\u04ec\u0001\u0000\u0000\u0000\u0141\u04f0\u0001\u0000\u0000"+ - "\u0000\u0143\u04f4\u0001\u0000\u0000\u0000\u0145\u04f9\u0001\u0000\u0000"+ - "\u0000\u0147\u04fe\u0001\u0000\u0000\u0000\u0149\u0502\u0001\u0000\u0000"+ - "\u0000\u014b\u0506\u0001\u0000\u0000\u0000\u014d\u050a\u0001\u0000\u0000"+ - "\u0000\u014f\u050f\u0001\u0000\u0000\u0000\u0151\u0516\u0001\u0000\u0000"+ - "\u0000\u0153\u051a\u0001\u0000\u0000\u0000\u0155\u051e\u0001\u0000\u0000"+ - "\u0000\u0157\u0522\u0001\u0000\u0000\u0000\u0159\u0526\u0001\u0000\u0000"+ - "\u0000\u015b\u052b\u0001\u0000\u0000\u0000\u015d\u052f\u0001\u0000\u0000"+ - "\u0000\u015f\u0533\u0001\u0000\u0000\u0000\u0161\u0537\u0001\u0000\u0000"+ - "\u0000\u0163\u053c\u0001\u0000\u0000\u0000\u0165\u0540\u0001\u0000\u0000"+ - "\u0000\u0167\u0544\u0001\u0000\u0000\u0000\u0169\u0548\u0001\u0000\u0000"+ - "\u0000\u016b\u054c\u0001\u0000\u0000\u0000\u016d\u0550\u0001\u0000\u0000"+ - "\u0000\u016f\u0556\u0001\u0000\u0000\u0000\u0171\u055a\u0001\u0000\u0000"+ - "\u0000\u0173\u055e\u0001\u0000\u0000\u0000\u0175\u0562\u0001\u0000\u0000"+ - "\u0000\u0177\u0566\u0001\u0000\u0000\u0000\u0179\u056a\u0001\u0000\u0000"+ - "\u0000\u017b\u056e\u0001\u0000\u0000\u0000\u017d\u0573\u0001\u0000\u0000"+ - "\u0000\u017f\u0579\u0001\u0000\u0000\u0000\u0181\u057f\u0001\u0000\u0000"+ - "\u0000\u0183\u0583\u0001\u0000\u0000\u0000\u0185\u0587\u0001\u0000\u0000"+ - "\u0000\u0187\u058b\u0001\u0000\u0000\u0000\u0189\u0591\u0001\u0000\u0000"+ - "\u0000\u018b\u0597\u0001\u0000\u0000\u0000\u018d\u059b\u0001\u0000\u0000"+ - "\u0000\u018f\u059f\u0001\u0000\u0000\u0000\u0191\u05a3\u0001\u0000\u0000"+ - "\u0000\u0193\u05a9\u0001\u0000\u0000\u0000\u0195\u05af\u0001\u0000\u0000"+ - "\u0000\u0197\u05b5\u0001\u0000\u0000\u0000\u0199\u019a\u0007\u0000\u0000"+ + "\u0000\u009f\u0357\u0001\u0000\u0000\u0000\u00a1\u036e\u0001\u0000\u0000"+ + "\u0000\u00a3\u0370\u0001\u0000\u0000\u0000\u00a5\u0375\u0001\u0000\u0000"+ + "\u0000\u00a7\u038a\u0001\u0000\u0000\u0000\u00a9\u038c\u0001\u0000\u0000"+ + "\u0000\u00ab\u0394\u0001\u0000\u0000\u0000\u00ad\u0396\u0001\u0000\u0000"+ + "\u0000\u00af\u039a\u0001\u0000\u0000\u0000\u00b1\u039e\u0001\u0000\u0000"+ + "\u0000\u00b3\u03a2\u0001\u0000\u0000\u0000\u00b5\u03a7\u0001\u0000\u0000"+ + "\u0000\u00b7\u03ac\u0001\u0000\u0000\u0000\u00b9\u03b0\u0001\u0000\u0000"+ + "\u0000\u00bb\u03b4\u0001\u0000\u0000\u0000\u00bd\u03b8\u0001\u0000\u0000"+ + "\u0000\u00bf\u03bd\u0001\u0000\u0000\u0000\u00c1\u03c1\u0001\u0000\u0000"+ + "\u0000\u00c3\u03c5\u0001\u0000\u0000\u0000\u00c5\u03c9\u0001\u0000\u0000"+ + "\u0000\u00c7\u03cd\u0001\u0000\u0000\u0000\u00c9\u03d1\u0001\u0000\u0000"+ + "\u0000\u00cb\u03dd\u0001\u0000\u0000\u0000\u00cd\u03e0\u0001\u0000\u0000"+ + "\u0000\u00cf\u03e4\u0001\u0000\u0000\u0000\u00d1\u03e8\u0001\u0000\u0000"+ + "\u0000\u00d3\u03ec\u0001\u0000\u0000\u0000\u00d5\u03f0\u0001\u0000\u0000"+ + "\u0000\u00d7\u03f4\u0001\u0000\u0000\u0000\u00d9\u03f8\u0001\u0000\u0000"+ + "\u0000\u00db\u03fd\u0001\u0000\u0000\u0000\u00dd\u0401\u0001\u0000\u0000"+ + "\u0000\u00df\u0405\u0001\u0000\u0000\u0000\u00e1\u0409\u0001\u0000\u0000"+ + "\u0000\u00e3\u0411\u0001\u0000\u0000\u0000\u00e5\u0426\u0001\u0000\u0000"+ + "\u0000\u00e7\u042a\u0001\u0000\u0000\u0000\u00e9\u042e\u0001\u0000\u0000"+ + "\u0000\u00eb\u0432\u0001\u0000\u0000\u0000\u00ed\u0436\u0001\u0000\u0000"+ + "\u0000\u00ef\u043a\u0001\u0000\u0000\u0000\u00f1\u043f\u0001\u0000\u0000"+ + "\u0000\u00f3\u0443\u0001\u0000\u0000\u0000\u00f5\u0447\u0001\u0000\u0000"+ + "\u0000\u00f7\u044b\u0001\u0000\u0000\u0000\u00f9\u044f\u0001\u0000\u0000"+ + "\u0000\u00fb\u0453\u0001\u0000\u0000\u0000\u00fd\u0456\u0001\u0000\u0000"+ + "\u0000\u00ff\u045a\u0001\u0000\u0000\u0000\u0101\u045e\u0001\u0000\u0000"+ + "\u0000\u0103\u0462\u0001\u0000\u0000\u0000\u0105\u0466\u0001\u0000\u0000"+ + "\u0000\u0107\u046b\u0001\u0000\u0000\u0000\u0109\u0470\u0001\u0000\u0000"+ + "\u0000\u010b\u0475\u0001\u0000\u0000\u0000\u010d\u047c\u0001\u0000\u0000"+ + "\u0000\u010f\u0485\u0001\u0000\u0000\u0000\u0111\u048c\u0001\u0000\u0000"+ + "\u0000\u0113\u0490\u0001\u0000\u0000\u0000\u0115\u0494\u0001\u0000\u0000"+ + "\u0000\u0117\u0498\u0001\u0000\u0000\u0000\u0119\u049c\u0001\u0000\u0000"+ + "\u0000\u011b\u04a2\u0001\u0000\u0000\u0000\u011d\u04a6\u0001\u0000\u0000"+ + "\u0000\u011f\u04aa\u0001\u0000\u0000\u0000\u0121\u04ae\u0001\u0000\u0000"+ + "\u0000\u0123\u04b2\u0001\u0000\u0000\u0000\u0125\u04b6\u0001\u0000\u0000"+ + "\u0000\u0127\u04ba\u0001\u0000\u0000\u0000\u0129\u04be\u0001\u0000\u0000"+ + "\u0000\u012b\u04c2\u0001\u0000\u0000\u0000\u012d\u04c6\u0001\u0000\u0000"+ + "\u0000\u012f\u04ca\u0001\u0000\u0000\u0000\u0131\u04ce\u0001\u0000\u0000"+ + "\u0000\u0133\u04d3\u0001\u0000\u0000\u0000\u0135\u04d7\u0001\u0000\u0000"+ + "\u0000\u0137\u04db\u0001\u0000\u0000\u0000\u0139\u04df\u0001\u0000\u0000"+ + "\u0000\u013b\u04e3\u0001\u0000\u0000\u0000\u013d\u04e7\u0001\u0000\u0000"+ + "\u0000\u013f\u04eb\u0001\u0000\u0000\u0000\u0141\u04ef\u0001\u0000\u0000"+ + "\u0000\u0143\u04f3\u0001\u0000\u0000\u0000\u0145\u04f8\u0001\u0000\u0000"+ + "\u0000\u0147\u04fd\u0001\u0000\u0000\u0000\u0149\u0501\u0001\u0000\u0000"+ + "\u0000\u014b\u0505\u0001\u0000\u0000\u0000\u014d\u0509\u0001\u0000\u0000"+ + "\u0000\u014f\u050e\u0001\u0000\u0000\u0000\u0151\u0515\u0001\u0000\u0000"+ + "\u0000\u0153\u0519\u0001\u0000\u0000\u0000\u0155\u051d\u0001\u0000\u0000"+ + "\u0000\u0157\u0521\u0001\u0000\u0000\u0000\u0159\u0525\u0001\u0000\u0000"+ + "\u0000\u015b\u052a\u0001\u0000\u0000\u0000\u015d\u052e\u0001\u0000\u0000"+ + "\u0000\u015f\u0532\u0001\u0000\u0000\u0000\u0161\u0536\u0001\u0000\u0000"+ + "\u0000\u0163\u053b\u0001\u0000\u0000\u0000\u0165\u053f\u0001\u0000\u0000"+ + "\u0000\u0167\u0543\u0001\u0000\u0000\u0000\u0169\u0547\u0001\u0000\u0000"+ + "\u0000\u016b\u054b\u0001\u0000\u0000\u0000\u016d\u054f\u0001\u0000\u0000"+ + "\u0000\u016f\u0555\u0001\u0000\u0000\u0000\u0171\u0559\u0001\u0000\u0000"+ + "\u0000\u0173\u055d\u0001\u0000\u0000\u0000\u0175\u0561\u0001\u0000\u0000"+ + "\u0000\u0177\u0565\u0001\u0000\u0000\u0000\u0179\u0569\u0001\u0000\u0000"+ + "\u0000\u017b\u056d\u0001\u0000\u0000\u0000\u017d\u0572\u0001\u0000\u0000"+ + "\u0000\u017f\u0578\u0001\u0000\u0000\u0000\u0181\u057e\u0001\u0000\u0000"+ + "\u0000\u0183\u0582\u0001\u0000\u0000\u0000\u0185\u0586\u0001\u0000\u0000"+ + "\u0000\u0187\u058a\u0001\u0000\u0000\u0000\u0189\u0590\u0001\u0000\u0000"+ + "\u0000\u018b\u0596\u0001\u0000\u0000\u0000\u018d\u059a\u0001\u0000\u0000"+ + "\u0000\u018f\u059e\u0001\u0000\u0000\u0000\u0191\u05a2\u0001\u0000\u0000"+ + "\u0000\u0193\u05a8\u0001\u0000\u0000\u0000\u0195\u05ae\u0001\u0000\u0000"+ + "\u0000\u0197\u05b4\u0001\u0000\u0000\u0000\u0199\u019a\u0007\u0000\u0000"+ "\u0000\u019a\u019b\u0007\u0001\u0000\u0000\u019b\u019c\u0007\u0002\u0000"+ "\u0000\u019c\u019d\u0007\u0002\u0000\u0000\u019d\u019e\u0007\u0003\u0000"+ "\u0000\u019e\u019f\u0007\u0004\u0000\u0000\u019f\u01a0\u0007\u0005\u0000"+ @@ -903,319 +894,319 @@ private boolean DEV_MATCH_sempred(RuleContext _localctx, int predIndex) { "-\u0000\u0000\u0350\u0098\u0001\u0000\u0000\u0000\u0351\u0352\u0005*\u0000"+ "\u0000\u0352\u009a\u0001\u0000\u0000\u0000\u0353\u0354\u0005/\u0000\u0000"+ "\u0354\u009c\u0001\u0000\u0000\u0000\u0355\u0356\u0005%\u0000\u0000\u0356"+ - "\u009e\u0001\u0000\u0000\u0000\u0357\u0358\u0004H\u0003\u0000\u0358\u0359"+ - "\u0007\u0010\u0000\u0000\u0359\u035a\u0007\f\u0000\u0000\u035a\u035b\u0007"+ - "\u0005\u0000\u0000\u035b\u035c\u0007\u0004\u0000\u0000\u035c\u035d\u0007"+ - "\n\u0000\u0000\u035d\u00a0\u0001\u0000\u0000\u0000\u035e\u0361\u0003\u007f"+ - "8\u0000\u035f\u0362\u0003A\u0019\u0000\u0360\u0362\u0003O \u0000\u0361"+ - "\u035f\u0001\u0000\u0000\u0000\u0361\u0360\u0001\u0000\u0000\u0000\u0362"+ - "\u0366\u0001\u0000\u0000\u0000\u0363\u0365\u0003Q!\u0000\u0364\u0363\u0001"+ - "\u0000\u0000\u0000\u0365\u0368\u0001\u0000\u0000\u0000\u0366\u0364\u0001"+ - "\u0000\u0000\u0000\u0366\u0367\u0001\u0000\u0000\u0000\u0367\u0370\u0001"+ - "\u0000\u0000\u0000\u0368\u0366\u0001\u0000\u0000\u0000\u0369\u036b\u0003"+ - "\u007f8\u0000\u036a\u036c\u0003?\u0018\u0000\u036b\u036a\u0001\u0000\u0000"+ - "\u0000\u036c\u036d\u0001\u0000\u0000\u0000\u036d\u036b\u0001\u0000\u0000"+ - "\u0000\u036d\u036e\u0001\u0000\u0000\u0000\u036e\u0370\u0001\u0000\u0000"+ - "\u0000\u036f\u035e\u0001\u0000\u0000\u0000\u036f\u0369\u0001\u0000\u0000"+ - "\u0000\u0370\u00a2\u0001\u0000\u0000\u0000\u0371\u0372\u0005[\u0000\u0000"+ - "\u0372\u0373\u0001\u0000\u0000\u0000\u0373\u0374\u0006J\u0000\u0000\u0374"+ - "\u0375\u0006J\u0000\u0000\u0375\u00a4\u0001\u0000\u0000\u0000\u0376\u0377"+ - "\u0005]\u0000\u0000\u0377\u0378\u0001\u0000\u0000\u0000\u0378\u0379\u0006"+ - "K\u000b\u0000\u0379\u037a\u0006K\u000b\u0000\u037a\u00a6\u0001\u0000\u0000"+ - "\u0000\u037b\u037f\u0003A\u0019\u0000\u037c\u037e\u0003Q!\u0000\u037d"+ - "\u037c\u0001\u0000\u0000\u0000\u037e\u0381\u0001\u0000\u0000\u0000\u037f"+ - "\u037d\u0001\u0000\u0000\u0000\u037f\u0380\u0001\u0000\u0000\u0000\u0380"+ - "\u038c\u0001\u0000\u0000\u0000\u0381\u037f\u0001\u0000\u0000\u0000\u0382"+ - "\u0385\u0003O \u0000\u0383\u0385\u0003I\u001d\u0000\u0384\u0382\u0001"+ - "\u0000\u0000\u0000\u0384\u0383\u0001\u0000\u0000\u0000\u0385\u0387\u0001"+ - "\u0000\u0000\u0000\u0386\u0388\u0003Q!\u0000\u0387\u0386\u0001\u0000\u0000"+ - "\u0000\u0388\u0389\u0001\u0000\u0000\u0000\u0389\u0387\u0001\u0000\u0000"+ - "\u0000\u0389\u038a\u0001\u0000\u0000\u0000\u038a\u038c\u0001\u0000\u0000"+ - "\u0000\u038b\u037b\u0001\u0000\u0000\u0000\u038b\u0384\u0001\u0000\u0000"+ - "\u0000\u038c\u00a8\u0001\u0000\u0000\u0000\u038d\u038f\u0003K\u001e\u0000"+ - "\u038e\u0390\u0003M\u001f\u0000\u038f\u038e\u0001\u0000\u0000\u0000\u0390"+ - "\u0391\u0001\u0000\u0000\u0000\u0391\u038f\u0001\u0000\u0000\u0000\u0391"+ - "\u0392\u0001\u0000\u0000\u0000\u0392\u0393\u0001\u0000\u0000\u0000\u0393"+ - "\u0394\u0003K\u001e\u0000\u0394\u00aa\u0001\u0000\u0000\u0000\u0395\u0396"+ - "\u0003\u00a9M\u0000\u0396\u00ac\u0001\u0000\u0000\u0000\u0397\u0398\u0003"+ - "7\u0014\u0000\u0398\u0399\u0001\u0000\u0000\u0000\u0399\u039a\u0006O\n"+ - "\u0000\u039a\u00ae\u0001\u0000\u0000\u0000\u039b\u039c\u00039\u0015\u0000"+ - "\u039c\u039d\u0001\u0000\u0000\u0000\u039d\u039e\u0006P\n\u0000\u039e"+ - "\u00b0\u0001\u0000\u0000\u0000\u039f\u03a0\u0003;\u0016\u0000\u03a0\u03a1"+ - "\u0001\u0000\u0000\u0000\u03a1\u03a2\u0006Q\n\u0000\u03a2\u00b2\u0001"+ - "\u0000\u0000\u0000\u03a3\u03a4\u0003\u00a3J\u0000\u03a4\u03a5\u0001\u0000"+ - "\u0000\u0000\u03a5\u03a6\u0006R\f\u0000\u03a6\u03a7\u0006R\r\u0000\u03a7"+ - "\u00b4\u0001\u0000\u0000\u0000\u03a8\u03a9\u0003=\u0017\u0000\u03a9\u03aa"+ - "\u0001\u0000\u0000\u0000\u03aa\u03ab\u0006S\u000e\u0000\u03ab\u03ac\u0006"+ - "S\u000b\u0000\u03ac\u00b6\u0001\u0000\u0000\u0000\u03ad\u03ae\u0003;\u0016"+ - "\u0000\u03ae\u03af\u0001\u0000\u0000\u0000\u03af\u03b0\u0006T\n\u0000"+ - "\u03b0\u00b8\u0001\u0000\u0000\u0000\u03b1\u03b2\u00037\u0014\u0000\u03b2"+ - "\u03b3\u0001\u0000\u0000\u0000\u03b3\u03b4\u0006U\n\u0000\u03b4\u00ba"+ - "\u0001\u0000\u0000\u0000\u03b5\u03b6\u00039\u0015\u0000\u03b6\u03b7\u0001"+ - "\u0000\u0000\u0000\u03b7\u03b8\u0006V\n\u0000\u03b8\u00bc\u0001\u0000"+ - "\u0000\u0000\u03b9\u03ba\u0003=\u0017\u0000\u03ba\u03bb\u0001\u0000\u0000"+ - "\u0000\u03bb\u03bc\u0006W\u000e\u0000\u03bc\u03bd\u0006W\u000b\u0000\u03bd"+ - "\u00be\u0001\u0000\u0000\u0000\u03be\u03bf\u0003\u00a3J\u0000\u03bf\u03c0"+ - "\u0001\u0000\u0000\u0000\u03c0\u03c1\u0006X\f\u0000\u03c1\u00c0\u0001"+ - "\u0000\u0000\u0000\u03c2\u03c3\u0003\u00a5K\u0000\u03c3\u03c4\u0001\u0000"+ - "\u0000\u0000\u03c4\u03c5\u0006Y\u000f\u0000\u03c5\u00c2\u0001\u0000\u0000"+ - "\u0000\u03c6\u03c7\u0003\u014f\u00a0\u0000\u03c7\u03c8\u0001\u0000\u0000"+ - "\u0000\u03c8\u03c9\u0006Z\u0010\u0000\u03c9\u00c4\u0001\u0000\u0000\u0000"+ - "\u03ca\u03cb\u0003c*\u0000\u03cb\u03cc\u0001\u0000\u0000\u0000\u03cc\u03cd"+ - "\u0006[\u0011\u0000\u03cd\u00c6\u0001\u0000\u0000\u0000\u03ce\u03cf\u0003"+ - "_(\u0000\u03cf\u03d0\u0001\u0000\u0000\u0000\u03d0\u03d1\u0006\\\u0012"+ - "\u0000\u03d1\u00c8\u0001\u0000\u0000\u0000\u03d2\u03d3\u0007\u0010\u0000"+ - "\u0000\u03d3\u03d4\u0007\u0003\u0000\u0000\u03d4\u03d5\u0007\u0005\u0000"+ - "\u0000\u03d5\u03d6\u0007\f\u0000\u0000\u03d6\u03d7\u0007\u0000\u0000\u0000"+ - "\u03d7\u03d8\u0007\f\u0000\u0000\u03d8\u03d9\u0007\u0005\u0000\u0000\u03d9"+ - "\u03da\u0007\f\u0000\u0000\u03da\u00ca\u0001\u0000\u0000\u0000\u03db\u03df"+ - "\b \u0000\u0000\u03dc\u03dd\u0005/\u0000\u0000\u03dd\u03df\b!\u0000\u0000"+ - "\u03de\u03db\u0001\u0000\u0000\u0000\u03de\u03dc\u0001\u0000\u0000\u0000"+ - "\u03df\u00cc\u0001\u0000\u0000\u0000\u03e0\u03e2\u0003\u00cb^\u0000\u03e1"+ - "\u03e0\u0001\u0000\u0000\u0000\u03e2\u03e3\u0001\u0000\u0000\u0000\u03e3"+ - "\u03e1\u0001\u0000\u0000\u0000\u03e3\u03e4\u0001\u0000\u0000\u0000\u03e4"+ - "\u00ce\u0001\u0000\u0000\u0000\u03e5\u03e6\u0003\u00cd_\u0000\u03e6\u03e7"+ - "\u0001\u0000\u0000\u0000\u03e7\u03e8\u0006`\u0013\u0000\u03e8\u00d0\u0001"+ - "\u0000\u0000\u0000\u03e9\u03ea\u0003S\"\u0000\u03ea\u03eb\u0001\u0000"+ - "\u0000\u0000\u03eb\u03ec\u0006a\u0014\u0000\u03ec\u00d2\u0001\u0000\u0000"+ - "\u0000\u03ed\u03ee\u00037\u0014\u0000\u03ee\u03ef\u0001\u0000\u0000\u0000"+ - "\u03ef\u03f0\u0006b\n\u0000\u03f0\u00d4\u0001\u0000\u0000\u0000\u03f1"+ - "\u03f2\u00039\u0015\u0000\u03f2\u03f3\u0001\u0000\u0000\u0000\u03f3\u03f4"+ - "\u0006c\n\u0000\u03f4\u00d6\u0001\u0000\u0000\u0000\u03f5\u03f6\u0003"+ - ";\u0016\u0000\u03f6\u03f7\u0001\u0000\u0000\u0000\u03f7\u03f8\u0006d\n"+ - "\u0000\u03f8\u00d8\u0001\u0000\u0000\u0000\u03f9\u03fa\u0003=\u0017\u0000"+ - "\u03fa\u03fb\u0001\u0000\u0000\u0000\u03fb\u03fc\u0006e\u000e\u0000\u03fc"+ - "\u03fd\u0006e\u000b\u0000\u03fd\u00da\u0001\u0000\u0000\u0000\u03fe\u03ff"+ - "\u0003g,\u0000\u03ff\u0400\u0001\u0000\u0000\u0000\u0400\u0401\u0006f"+ - "\u0015\u0000\u0401\u00dc\u0001\u0000\u0000\u0000\u0402\u0403\u0003c*\u0000"+ - "\u0403\u0404\u0001\u0000\u0000\u0000\u0404\u0405\u0006g\u0011\u0000\u0405"+ - "\u00de\u0001\u0000\u0000\u0000\u0406\u0407\u0003\u007f8\u0000\u0407\u0408"+ - "\u0001\u0000\u0000\u0000\u0408\u0409\u0006h\u0016\u0000\u0409\u00e0\u0001"+ - "\u0000\u0000\u0000\u040a\u040b\u0003\u00a1I\u0000\u040b\u040c\u0001\u0000"+ - "\u0000\u0000\u040c\u040d\u0006i\u0017\u0000\u040d\u00e2\u0001\u0000\u0000"+ - "\u0000\u040e\u0413\u0003A\u0019\u0000\u040f\u0413\u0003?\u0018\u0000\u0410"+ - "\u0413\u0003O \u0000\u0411\u0413\u0003\u0099E\u0000\u0412\u040e\u0001"+ - "\u0000\u0000\u0000\u0412\u040f\u0001\u0000\u0000\u0000\u0412\u0410\u0001"+ - "\u0000\u0000\u0000\u0412\u0411\u0001\u0000\u0000\u0000\u0413\u00e4\u0001"+ - "\u0000\u0000\u0000\u0414\u0417\u0003A\u0019\u0000\u0415\u0417\u0003\u0099"+ - "E\u0000\u0416\u0414\u0001\u0000\u0000\u0000\u0416\u0415\u0001\u0000\u0000"+ - "\u0000\u0417\u041b\u0001\u0000\u0000\u0000\u0418\u041a\u0003\u00e3j\u0000"+ - "\u0419\u0418\u0001\u0000\u0000\u0000\u041a\u041d\u0001\u0000\u0000\u0000"+ - "\u041b\u0419\u0001\u0000\u0000\u0000\u041b\u041c\u0001\u0000\u0000\u0000"+ - "\u041c\u0428\u0001\u0000\u0000\u0000\u041d\u041b\u0001\u0000\u0000\u0000"+ - "\u041e\u0421\u0003O \u0000\u041f\u0421\u0003I\u001d\u0000\u0420\u041e"+ - "\u0001\u0000\u0000\u0000\u0420\u041f\u0001\u0000\u0000\u0000\u0421\u0423"+ - "\u0001\u0000\u0000\u0000\u0422\u0424\u0003\u00e3j\u0000\u0423\u0422\u0001"+ - "\u0000\u0000\u0000\u0424\u0425\u0001\u0000\u0000\u0000\u0425\u0423\u0001"+ - "\u0000\u0000\u0000\u0425\u0426\u0001\u0000\u0000\u0000\u0426\u0428\u0001"+ - "\u0000\u0000\u0000\u0427\u0416\u0001\u0000\u0000\u0000\u0427\u0420\u0001"+ - "\u0000\u0000\u0000\u0428\u00e6\u0001\u0000\u0000\u0000\u0429\u042c\u0003"+ - "\u00e5k\u0000\u042a\u042c\u0003\u00a9M\u0000\u042b\u0429\u0001\u0000\u0000"+ - "\u0000\u042b\u042a\u0001\u0000\u0000\u0000\u042c\u042d\u0001\u0000\u0000"+ - "\u0000\u042d\u042b\u0001\u0000\u0000\u0000\u042d\u042e\u0001\u0000\u0000"+ - "\u0000\u042e\u00e8\u0001\u0000\u0000\u0000\u042f\u0430\u00037\u0014\u0000"+ - "\u0430\u0431\u0001\u0000\u0000\u0000\u0431\u0432\u0006m\n\u0000\u0432"+ - "\u00ea\u0001\u0000\u0000\u0000\u0433\u0434\u00039\u0015\u0000\u0434\u0435"+ - "\u0001\u0000\u0000\u0000\u0435\u0436\u0006n\n\u0000\u0436\u00ec\u0001"+ - "\u0000\u0000\u0000\u0437\u0438\u0003;\u0016\u0000\u0438\u0439\u0001\u0000"+ - "\u0000\u0000\u0439\u043a\u0006o\n\u0000\u043a\u00ee\u0001\u0000\u0000"+ - "\u0000\u043b\u043c\u0003=\u0017\u0000\u043c\u043d\u0001\u0000\u0000\u0000"+ - "\u043d\u043e\u0006p\u000e\u0000\u043e\u043f\u0006p\u000b\u0000\u043f\u00f0"+ - "\u0001\u0000\u0000\u0000\u0440\u0441\u0003_(\u0000\u0441\u0442\u0001\u0000"+ - "\u0000\u0000\u0442\u0443\u0006q\u0012\u0000\u0443\u00f2\u0001\u0000\u0000"+ - "\u0000\u0444\u0445\u0003c*\u0000\u0445\u0446\u0001\u0000\u0000\u0000\u0446"+ - "\u0447\u0006r\u0011\u0000\u0447\u00f4\u0001\u0000\u0000\u0000\u0448\u0449"+ - "\u0003g,\u0000\u0449\u044a\u0001\u0000\u0000\u0000\u044a\u044b\u0006s"+ - "\u0015\u0000\u044b\u00f6\u0001\u0000\u0000\u0000\u044c\u044d\u0003\u007f"+ - "8\u0000\u044d\u044e\u0001\u0000\u0000\u0000\u044e\u044f\u0006t\u0016\u0000"+ - "\u044f\u00f8\u0001\u0000\u0000\u0000\u0450\u0451\u0003\u00a1I\u0000\u0451"+ - "\u0452\u0001\u0000\u0000\u0000\u0452\u0453\u0006u\u0017\u0000\u0453\u00fa"+ - "\u0001\u0000\u0000\u0000\u0454\u0455\u0007\f\u0000\u0000\u0455\u0456\u0007"+ - "\u0002\u0000\u0000\u0456\u00fc\u0001\u0000\u0000\u0000\u0457\u0458\u0003"+ - "\u00e7l\u0000\u0458\u0459\u0001\u0000\u0000\u0000\u0459\u045a\u0006w\u0018"+ - "\u0000\u045a\u00fe\u0001\u0000\u0000\u0000\u045b\u045c\u00037\u0014\u0000"+ - "\u045c\u045d\u0001\u0000\u0000\u0000\u045d\u045e\u0006x\n\u0000\u045e"+ - "\u0100\u0001\u0000\u0000\u0000\u045f\u0460\u00039\u0015\u0000\u0460\u0461"+ - "\u0001\u0000\u0000\u0000\u0461\u0462\u0006y\n\u0000\u0462\u0102\u0001"+ - "\u0000\u0000\u0000\u0463\u0464\u0003;\u0016\u0000\u0464\u0465\u0001\u0000"+ - "\u0000\u0000\u0465\u0466\u0006z\n\u0000\u0466\u0104\u0001\u0000\u0000"+ - "\u0000\u0467\u0468\u0003=\u0017\u0000\u0468\u0469\u0001\u0000\u0000\u0000"+ - "\u0469\u046a\u0006{\u000e\u0000\u046a\u046b\u0006{\u000b\u0000\u046b\u0106"+ - "\u0001\u0000\u0000\u0000\u046c\u046d\u0003\u00a3J\u0000\u046d\u046e\u0001"+ - "\u0000\u0000\u0000\u046e\u046f\u0006|\f\u0000\u046f\u0470\u0006|\u0019"+ - "\u0000\u0470\u0108\u0001\u0000\u0000\u0000\u0471\u0472\u0007\u0007\u0000"+ - "\u0000\u0472\u0473\u0007\t\u0000\u0000\u0473\u0474\u0001\u0000\u0000\u0000"+ - "\u0474\u0475\u0006}\u001a\u0000\u0475\u010a\u0001\u0000\u0000\u0000\u0476"+ - "\u0477\u0007\u0013\u0000\u0000\u0477\u0478\u0007\u0001\u0000\u0000\u0478"+ - "\u0479\u0007\u0005\u0000\u0000\u0479\u047a\u0007\n\u0000\u0000\u047a\u047b"+ - "\u0001\u0000\u0000\u0000\u047b\u047c\u0006~\u001a\u0000\u047c\u010c\u0001"+ - "\u0000\u0000\u0000\u047d\u047e\b\"\u0000\u0000\u047e\u010e\u0001\u0000"+ - "\u0000\u0000\u047f\u0481\u0003\u010d\u007f\u0000\u0480\u047f\u0001\u0000"+ - "\u0000\u0000\u0481\u0482\u0001\u0000\u0000\u0000\u0482\u0480\u0001\u0000"+ - "\u0000\u0000\u0482\u0483\u0001\u0000\u0000\u0000\u0483\u0484\u0001\u0000"+ - "\u0000\u0000\u0484\u0485\u0003\u014f\u00a0\u0000\u0485\u0487\u0001\u0000"+ - "\u0000\u0000\u0486\u0480\u0001\u0000\u0000\u0000\u0486\u0487\u0001\u0000"+ - "\u0000\u0000\u0487\u0489\u0001\u0000\u0000\u0000\u0488\u048a\u0003\u010d"+ - "\u007f\u0000\u0489\u0488\u0001\u0000\u0000\u0000\u048a\u048b\u0001\u0000"+ - "\u0000\u0000\u048b\u0489\u0001\u0000\u0000\u0000\u048b\u048c\u0001\u0000"+ - "\u0000\u0000\u048c\u0110\u0001\u0000\u0000\u0000\u048d\u048e\u0003\u010f"+ - "\u0080\u0000\u048e\u048f\u0001\u0000\u0000\u0000\u048f\u0490\u0006\u0081"+ - "\u001b\u0000\u0490\u0112\u0001\u0000\u0000\u0000\u0491\u0492\u00037\u0014"+ - "\u0000\u0492\u0493\u0001\u0000\u0000\u0000\u0493\u0494\u0006\u0082\n\u0000"+ - "\u0494\u0114\u0001\u0000\u0000\u0000\u0495\u0496\u00039\u0015\u0000\u0496"+ - "\u0497\u0001\u0000\u0000\u0000\u0497\u0498\u0006\u0083\n\u0000\u0498\u0116"+ - "\u0001\u0000\u0000\u0000\u0499\u049a\u0003;\u0016\u0000\u049a\u049b\u0001"+ - "\u0000\u0000\u0000\u049b\u049c\u0006\u0084\n\u0000\u049c\u0118\u0001\u0000"+ - "\u0000\u0000\u049d\u049e\u0003=\u0017\u0000\u049e\u049f\u0001\u0000\u0000"+ - "\u0000\u049f\u04a0\u0006\u0085\u000e\u0000\u04a0\u04a1\u0006\u0085\u000b"+ - "\u0000\u04a1\u04a2\u0006\u0085\u000b\u0000\u04a2\u011a\u0001\u0000\u0000"+ - "\u0000\u04a3\u04a4\u0003_(\u0000\u04a4\u04a5\u0001\u0000\u0000\u0000\u04a5"+ - "\u04a6\u0006\u0086\u0012\u0000\u04a6\u011c\u0001\u0000\u0000\u0000\u04a7"+ - "\u04a8\u0003c*\u0000\u04a8\u04a9\u0001\u0000\u0000\u0000\u04a9\u04aa\u0006"+ - "\u0087\u0011\u0000\u04aa\u011e\u0001\u0000\u0000\u0000\u04ab\u04ac\u0003"+ - "g,\u0000\u04ac\u04ad\u0001\u0000\u0000\u0000\u04ad\u04ae\u0006\u0088\u0015"+ - "\u0000\u04ae\u0120\u0001\u0000\u0000\u0000\u04af\u04b0\u0003\u010b~\u0000"+ - "\u04b0\u04b1\u0001\u0000\u0000\u0000\u04b1\u04b2\u0006\u0089\u001c\u0000"+ - "\u04b2\u0122\u0001\u0000\u0000\u0000\u04b3\u04b4\u0003\u00e7l\u0000\u04b4"+ - "\u04b5\u0001\u0000\u0000\u0000\u04b5\u04b6\u0006\u008a\u0018\u0000\u04b6"+ - "\u0124\u0001\u0000\u0000\u0000\u04b7\u04b8\u0003\u00abN\u0000\u04b8\u04b9"+ - "\u0001\u0000\u0000\u0000\u04b9\u04ba\u0006\u008b\u001d\u0000\u04ba\u0126"+ - "\u0001\u0000\u0000\u0000\u04bb\u04bc\u0003\u007f8\u0000\u04bc\u04bd\u0001"+ - "\u0000\u0000\u0000\u04bd\u04be\u0006\u008c\u0016\u0000\u04be\u0128\u0001"+ - "\u0000\u0000\u0000\u04bf\u04c0\u0003\u00a1I\u0000\u04c0\u04c1\u0001\u0000"+ - "\u0000\u0000\u04c1\u04c2\u0006\u008d\u0017\u0000\u04c2\u012a\u0001\u0000"+ - "\u0000\u0000\u04c3\u04c4\u00037\u0014\u0000\u04c4\u04c5\u0001\u0000\u0000"+ - "\u0000\u04c5\u04c6\u0006\u008e\n\u0000\u04c6\u012c\u0001\u0000\u0000\u0000"+ - "\u04c7\u04c8\u00039\u0015\u0000\u04c8\u04c9\u0001\u0000\u0000\u0000\u04c9"+ - "\u04ca\u0006\u008f\n\u0000\u04ca\u012e\u0001\u0000\u0000\u0000\u04cb\u04cc"+ - "\u0003;\u0016\u0000\u04cc\u04cd\u0001\u0000\u0000\u0000\u04cd\u04ce\u0006"+ - "\u0090\n\u0000\u04ce\u0130\u0001\u0000\u0000\u0000\u04cf\u04d0\u0003="+ - "\u0017\u0000\u04d0\u04d1\u0001\u0000\u0000\u0000\u04d1\u04d2\u0006\u0091"+ - "\u000e\u0000\u04d2\u04d3\u0006\u0091\u000b\u0000\u04d3\u0132\u0001\u0000"+ - "\u0000\u0000\u04d4\u04d5\u0003g,\u0000\u04d5\u04d6\u0001\u0000\u0000\u0000"+ - "\u04d6\u04d7\u0006\u0092\u0015\u0000\u04d7\u0134\u0001\u0000\u0000\u0000"+ - "\u04d8\u04d9\u0003\u007f8\u0000\u04d9\u04da\u0001\u0000\u0000\u0000\u04da"+ - "\u04db\u0006\u0093\u0016\u0000\u04db\u0136\u0001\u0000\u0000\u0000\u04dc"+ - "\u04dd\u0003\u00a1I\u0000\u04dd\u04de\u0001\u0000\u0000\u0000\u04de\u04df"+ - "\u0006\u0094\u0017\u0000\u04df\u0138\u0001\u0000\u0000\u0000\u04e0\u04e1"+ - "\u0003\u00abN\u0000\u04e1\u04e2\u0001\u0000\u0000\u0000\u04e2\u04e3\u0006"+ - "\u0095\u001d\u0000\u04e3\u013a\u0001\u0000\u0000\u0000\u04e4\u04e5\u0003"+ - "\u00a7L\u0000\u04e5\u04e6\u0001\u0000\u0000\u0000\u04e6\u04e7\u0006\u0096"+ - "\u001e\u0000\u04e7\u013c\u0001\u0000\u0000\u0000\u04e8\u04e9\u00037\u0014"+ - "\u0000\u04e9\u04ea\u0001\u0000\u0000\u0000\u04ea\u04eb\u0006\u0097\n\u0000"+ - "\u04eb\u013e\u0001\u0000\u0000\u0000\u04ec\u04ed\u00039\u0015\u0000\u04ed"+ - "\u04ee\u0001\u0000\u0000\u0000\u04ee\u04ef\u0006\u0098\n\u0000\u04ef\u0140"+ - "\u0001\u0000\u0000\u0000\u04f0\u04f1\u0003;\u0016\u0000\u04f1\u04f2\u0001"+ - "\u0000\u0000\u0000\u04f2\u04f3\u0006\u0099\n\u0000\u04f3\u0142\u0001\u0000"+ - "\u0000\u0000\u04f4\u04f5\u0003=\u0017\u0000\u04f5\u04f6\u0001\u0000\u0000"+ - "\u0000\u04f6\u04f7\u0006\u009a\u000e\u0000\u04f7\u04f8\u0006\u009a\u000b"+ - "\u0000\u04f8\u0144\u0001\u0000\u0000\u0000\u04f9\u04fa\u0007\u0001\u0000"+ - "\u0000\u04fa\u04fb\u0007\t\u0000\u0000\u04fb\u04fc\u0007\u000f\u0000\u0000"+ - "\u04fc\u04fd\u0007\u0007\u0000\u0000\u04fd\u0146\u0001\u0000\u0000\u0000"+ - "\u04fe\u04ff\u00037\u0014\u0000\u04ff\u0500\u0001\u0000\u0000\u0000\u0500"+ - "\u0501\u0006\u009c\n\u0000\u0501\u0148\u0001\u0000\u0000\u0000\u0502\u0503"+ - "\u00039\u0015\u0000\u0503\u0504\u0001\u0000\u0000\u0000\u0504\u0505\u0006"+ - "\u009d\n\u0000\u0505\u014a\u0001\u0000\u0000\u0000\u0506\u0507\u0003;"+ - "\u0016\u0000\u0507\u0508\u0001\u0000\u0000\u0000\u0508\u0509\u0006\u009e"+ - "\n\u0000\u0509\u014c\u0001\u0000\u0000\u0000\u050a\u050b\u0003\u00a5K"+ - "\u0000\u050b\u050c\u0001\u0000\u0000\u0000\u050c\u050d\u0006\u009f\u000f"+ - "\u0000\u050d\u050e\u0006\u009f\u000b\u0000\u050e\u014e\u0001\u0000\u0000"+ - "\u0000\u050f\u0510\u0005:\u0000\u0000\u0510\u0150\u0001\u0000\u0000\u0000"+ - "\u0511\u0517\u0003I\u001d\u0000\u0512\u0517\u0003?\u0018\u0000\u0513\u0517"+ - "\u0003g,\u0000\u0514\u0517\u0003A\u0019\u0000\u0515\u0517\u0003O \u0000"+ - "\u0516\u0511\u0001\u0000\u0000\u0000\u0516\u0512\u0001\u0000\u0000\u0000"+ - "\u0516\u0513\u0001\u0000\u0000\u0000\u0516\u0514\u0001\u0000\u0000\u0000"+ - "\u0516\u0515\u0001\u0000\u0000\u0000\u0517\u0518\u0001\u0000\u0000\u0000"+ - "\u0518\u0516\u0001\u0000\u0000\u0000\u0518\u0519\u0001\u0000\u0000\u0000"+ - "\u0519\u0152\u0001\u0000\u0000\u0000\u051a\u051b\u00037\u0014\u0000\u051b"+ - "\u051c\u0001\u0000\u0000\u0000\u051c\u051d\u0006\u00a2\n\u0000\u051d\u0154"+ - "\u0001\u0000\u0000\u0000\u051e\u051f\u00039\u0015\u0000\u051f\u0520\u0001"+ - "\u0000\u0000\u0000\u0520\u0521\u0006\u00a3\n\u0000\u0521\u0156\u0001\u0000"+ - "\u0000\u0000\u0522\u0523\u0003;\u0016\u0000\u0523\u0524\u0001\u0000\u0000"+ - "\u0000\u0524\u0525\u0006\u00a4\n\u0000\u0525\u0158\u0001\u0000\u0000\u0000"+ - "\u0526\u0527\u0003=\u0017\u0000\u0527\u0528\u0001\u0000\u0000\u0000\u0528"+ - "\u0529\u0006\u00a5\u000e\u0000\u0529\u052a\u0006\u00a5\u000b\u0000\u052a"+ - "\u015a\u0001\u0000\u0000\u0000\u052b\u052c\u0003\u014f\u00a0\u0000\u052c"+ - "\u052d\u0001\u0000\u0000\u0000\u052d\u052e\u0006\u00a6\u0010\u0000\u052e"+ - "\u015c\u0001\u0000\u0000\u0000\u052f\u0530\u0003c*\u0000\u0530\u0531\u0001"+ - "\u0000\u0000\u0000\u0531\u0532\u0006\u00a7\u0011\u0000\u0532\u015e\u0001"+ - "\u0000\u0000\u0000\u0533\u0534\u0003g,\u0000\u0534\u0535\u0001\u0000\u0000"+ - "\u0000\u0535\u0536\u0006\u00a8\u0015\u0000\u0536\u0160\u0001\u0000\u0000"+ - "\u0000\u0537\u0538\u0003\u0109}\u0000\u0538\u0539\u0001\u0000\u0000\u0000"+ - "\u0539\u053a\u0006\u00a9\u001f\u0000\u053a\u053b\u0006\u00a9 \u0000\u053b"+ - "\u0162\u0001\u0000\u0000\u0000\u053c\u053d\u0003\u00cd_\u0000\u053d\u053e"+ - "\u0001\u0000\u0000\u0000\u053e\u053f\u0006\u00aa\u0013\u0000\u053f\u0164"+ - "\u0001\u0000\u0000\u0000\u0540\u0541\u0003S\"\u0000\u0541\u0542\u0001"+ - "\u0000\u0000\u0000\u0542\u0543\u0006\u00ab\u0014\u0000\u0543\u0166\u0001"+ - "\u0000\u0000\u0000\u0544\u0545\u00037\u0014\u0000\u0545\u0546\u0001\u0000"+ - "\u0000\u0000\u0546\u0547\u0006\u00ac\n\u0000\u0547\u0168\u0001\u0000\u0000"+ - "\u0000\u0548\u0549\u00039\u0015\u0000\u0549\u054a\u0001\u0000\u0000\u0000"+ - "\u054a\u054b\u0006\u00ad\n\u0000\u054b\u016a\u0001\u0000\u0000\u0000\u054c"+ - "\u054d\u0003;\u0016\u0000\u054d\u054e\u0001\u0000\u0000\u0000\u054e\u054f"+ - "\u0006\u00ae\n\u0000\u054f\u016c\u0001\u0000\u0000\u0000\u0550\u0551\u0003"+ - "=\u0017\u0000\u0551\u0552\u0001\u0000\u0000\u0000\u0552\u0553\u0006\u00af"+ - "\u000e\u0000\u0553\u0554\u0006\u00af\u000b\u0000\u0554\u0555\u0006\u00af"+ - "\u000b\u0000\u0555\u016e\u0001\u0000\u0000\u0000\u0556\u0557\u0003c*\u0000"+ - "\u0557\u0558\u0001\u0000\u0000\u0000\u0558\u0559\u0006\u00b0\u0011\u0000"+ - "\u0559\u0170\u0001\u0000\u0000\u0000\u055a\u055b\u0003g,\u0000\u055b\u055c"+ - "\u0001\u0000\u0000\u0000\u055c\u055d\u0006\u00b1\u0015\u0000\u055d\u0172"+ - "\u0001\u0000\u0000\u0000\u055e\u055f\u0003\u00e7l\u0000\u055f\u0560\u0001"+ - "\u0000\u0000\u0000\u0560\u0561\u0006\u00b2\u0018\u0000\u0561\u0174\u0001"+ - "\u0000\u0000\u0000\u0562\u0563\u00037\u0014\u0000\u0563\u0564\u0001\u0000"+ - "\u0000\u0000\u0564\u0565\u0006\u00b3\n\u0000\u0565\u0176\u0001\u0000\u0000"+ - "\u0000\u0566\u0567\u00039\u0015\u0000\u0567\u0568\u0001\u0000\u0000\u0000"+ - "\u0568\u0569\u0006\u00b4\n\u0000\u0569\u0178\u0001\u0000\u0000\u0000\u056a"+ - "\u056b\u0003;\u0016\u0000\u056b\u056c\u0001\u0000\u0000\u0000\u056c\u056d"+ - "\u0006\u00b5\n\u0000\u056d\u017a\u0001\u0000\u0000\u0000\u056e\u056f\u0003"+ - "=\u0017\u0000\u056f\u0570\u0001\u0000\u0000\u0000\u0570\u0571\u0006\u00b6"+ - "\u000e\u0000\u0571\u0572\u0006\u00b6\u000b\u0000\u0572\u017c\u0001\u0000"+ - "\u0000\u0000\u0573\u0574\u0003\u00cd_\u0000\u0574\u0575\u0001\u0000\u0000"+ - "\u0000\u0575\u0576\u0006\u00b7\u0013\u0000\u0576\u0577\u0006\u00b7\u000b"+ - "\u0000\u0577\u0578\u0006\u00b7!\u0000\u0578\u017e\u0001\u0000\u0000\u0000"+ - "\u0579\u057a\u0003S\"\u0000\u057a\u057b\u0001\u0000\u0000\u0000\u057b"+ - "\u057c\u0006\u00b8\u0014\u0000\u057c\u057d\u0006\u00b8\u000b\u0000\u057d"+ - "\u057e\u0006\u00b8!\u0000\u057e\u0180\u0001\u0000\u0000\u0000\u057f\u0580"+ - "\u00037\u0014\u0000\u0580\u0581\u0001\u0000\u0000\u0000\u0581\u0582\u0006"+ - "\u00b9\n\u0000\u0582\u0182\u0001\u0000\u0000\u0000\u0583\u0584\u00039"+ - "\u0015\u0000\u0584\u0585\u0001\u0000\u0000\u0000\u0585\u0586\u0006\u00ba"+ - "\n\u0000\u0586\u0184\u0001\u0000\u0000\u0000\u0587\u0588\u0003;\u0016"+ - "\u0000\u0588\u0589\u0001\u0000\u0000\u0000\u0589\u058a\u0006\u00bb\n\u0000"+ - "\u058a\u0186\u0001\u0000\u0000\u0000\u058b\u058c\u0003\u014f\u00a0\u0000"+ - "\u058c\u058d\u0001\u0000\u0000\u0000\u058d\u058e\u0006\u00bc\u0010\u0000"+ - "\u058e\u058f\u0006\u00bc\u000b\u0000\u058f\u0590\u0006\u00bc\t\u0000\u0590"+ - "\u0188\u0001\u0000\u0000\u0000\u0591\u0592\u0003c*\u0000\u0592\u0593\u0001"+ - "\u0000\u0000\u0000\u0593\u0594\u0006\u00bd\u0011\u0000\u0594\u0595\u0006"+ - "\u00bd\u000b\u0000\u0595\u0596\u0006\u00bd\t\u0000\u0596\u018a\u0001\u0000"+ - "\u0000\u0000\u0597\u0598\u00037\u0014\u0000\u0598\u0599\u0001\u0000\u0000"+ - "\u0000\u0599\u059a\u0006\u00be\n\u0000\u059a\u018c\u0001\u0000\u0000\u0000"+ - "\u059b\u059c\u00039\u0015\u0000\u059c\u059d\u0001\u0000\u0000\u0000\u059d"+ - "\u059e\u0006\u00bf\n\u0000\u059e\u018e\u0001\u0000\u0000\u0000\u059f\u05a0"+ - "\u0003;\u0016\u0000\u05a0\u05a1\u0001\u0000\u0000\u0000\u05a1\u05a2\u0006"+ - "\u00c0\n\u0000\u05a2\u0190\u0001\u0000\u0000\u0000\u05a3\u05a4\u0003\u00ab"+ - "N\u0000\u05a4\u05a5\u0001\u0000\u0000\u0000\u05a5\u05a6\u0006\u00c1\u000b"+ - "\u0000\u05a6\u05a7\u0006\u00c1\u0000\u0000\u05a7\u05a8\u0006\u00c1\u001d"+ - "\u0000\u05a8\u0192\u0001\u0000\u0000\u0000\u05a9\u05aa\u0003\u00a7L\u0000"+ - "\u05aa\u05ab\u0001\u0000\u0000\u0000\u05ab\u05ac\u0006\u00c2\u000b\u0000"+ - "\u05ac\u05ad\u0006\u00c2\u0000\u0000\u05ad\u05ae\u0006\u00c2\u001e\u0000"+ - "\u05ae\u0194\u0001\u0000\u0000\u0000\u05af\u05b0\u0003Y%\u0000\u05b0\u05b1"+ - "\u0001\u0000\u0000\u0000\u05b1\u05b2\u0006\u00c3\u000b\u0000\u05b2\u05b3"+ - "\u0006\u00c3\u0000\u0000\u05b3\u05b4\u0006\u00c3\"\u0000\u05b4\u0196\u0001"+ - "\u0000\u0000\u0000\u05b5\u05b6\u0003=\u0017\u0000\u05b6\u05b7\u0001\u0000"+ - "\u0000\u0000\u05b7\u05b8\u0006\u00c4\u000e\u0000\u05b8\u05b9\u0006\u00c4"+ - "\u000b\u0000\u05b9\u0198\u0001\u0000\u0000\u0000A\u0000\u0001\u0002\u0003"+ - "\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e\u0241\u024b\u024f\u0252"+ - "\u025b\u025d\u0268\u027b\u0280\u0289\u0290\u0295\u0297\u02a2\u02aa\u02ad"+ - "\u02af\u02b4\u02b9\u02bf\u02c6\u02cb\u02d1\u02d4\u02dc\u02e0\u0361\u0366"+ - "\u036d\u036f\u037f\u0384\u0389\u038b\u0391\u03de\u03e3\u0412\u0416\u041b"+ - "\u0420\u0425\u0427\u042b\u042d\u0482\u0486\u048b\u0516\u0518#\u0005\u0001"+ - "\u0000\u0005\u0004\u0000\u0005\u0006\u0000\u0005\u0002\u0000\u0005\u0003"+ - "\u0000\u0005\b\u0000\u0005\u0005\u0000\u0005\t\u0000\u0005\u000b\u0000"+ - "\u0005\r\u0000\u0000\u0001\u0000\u0004\u0000\u0000\u0007A\u0000\u0005"+ - "\u0000\u0000\u0007\u0018\u0000\u0007B\u0000\u0007h\u0000\u0007!\u0000"+ - "\u0007\u001f\u0000\u0007L\u0000\u0007\u0019\u0000\u0007#\u0000\u0007/"+ - "\u0000\u0007@\u0000\u0007P\u0000\u0005\n\u0000\u0005\u0007\u0000\u0007"+ - "Z\u0000\u0007Y\u0000\u0007D\u0000\u0007C\u0000\u0007X\u0000\u0005\f\u0000"+ - "\u0005\u000e\u0000\u0007\u001c\u0000"; + "\u009e\u0001\u0000\u0000\u0000\u0357\u0358\u0007\u0010\u0000\u0000\u0358"+ + "\u0359\u0007\f\u0000\u0000\u0359\u035a\u0007\u0005\u0000\u0000\u035a\u035b"+ + "\u0007\u0004\u0000\u0000\u035b\u035c\u0007\n\u0000\u0000\u035c\u00a0\u0001"+ + "\u0000\u0000\u0000\u035d\u0360\u0003\u007f8\u0000\u035e\u0361\u0003A\u0019"+ + "\u0000\u035f\u0361\u0003O \u0000\u0360\u035e\u0001\u0000\u0000\u0000\u0360"+ + "\u035f\u0001\u0000\u0000\u0000\u0361\u0365\u0001\u0000\u0000\u0000\u0362"+ + "\u0364\u0003Q!\u0000\u0363\u0362\u0001\u0000\u0000\u0000\u0364\u0367\u0001"+ + "\u0000\u0000\u0000\u0365\u0363\u0001\u0000\u0000\u0000\u0365\u0366\u0001"+ + "\u0000\u0000\u0000\u0366\u036f\u0001\u0000\u0000\u0000\u0367\u0365\u0001"+ + "\u0000\u0000\u0000\u0368\u036a\u0003\u007f8\u0000\u0369\u036b\u0003?\u0018"+ + "\u0000\u036a\u0369\u0001\u0000\u0000\u0000\u036b\u036c\u0001\u0000\u0000"+ + "\u0000\u036c\u036a\u0001\u0000\u0000\u0000\u036c\u036d\u0001\u0000\u0000"+ + "\u0000\u036d\u036f\u0001\u0000\u0000\u0000\u036e\u035d\u0001\u0000\u0000"+ + "\u0000\u036e\u0368\u0001\u0000\u0000\u0000\u036f\u00a2\u0001\u0000\u0000"+ + "\u0000\u0370\u0371\u0005[\u0000\u0000\u0371\u0372\u0001\u0000\u0000\u0000"+ + "\u0372\u0373\u0006J\u0000\u0000\u0373\u0374\u0006J\u0000\u0000\u0374\u00a4"+ + "\u0001\u0000\u0000\u0000\u0375\u0376\u0005]\u0000\u0000\u0376\u0377\u0001"+ + "\u0000\u0000\u0000\u0377\u0378\u0006K\u000b\u0000\u0378\u0379\u0006K\u000b"+ + "\u0000\u0379\u00a6\u0001\u0000\u0000\u0000\u037a\u037e\u0003A\u0019\u0000"+ + "\u037b\u037d\u0003Q!\u0000\u037c\u037b\u0001\u0000\u0000\u0000\u037d\u0380"+ + "\u0001\u0000\u0000\u0000\u037e\u037c\u0001\u0000\u0000\u0000\u037e\u037f"+ + "\u0001\u0000\u0000\u0000\u037f\u038b\u0001\u0000\u0000\u0000\u0380\u037e"+ + "\u0001\u0000\u0000\u0000\u0381\u0384\u0003O \u0000\u0382\u0384\u0003I"+ + "\u001d\u0000\u0383\u0381\u0001\u0000\u0000\u0000\u0383\u0382\u0001\u0000"+ + "\u0000\u0000\u0384\u0386\u0001\u0000\u0000\u0000\u0385\u0387\u0003Q!\u0000"+ + "\u0386\u0385\u0001\u0000\u0000\u0000\u0387\u0388\u0001\u0000\u0000\u0000"+ + "\u0388\u0386\u0001\u0000\u0000\u0000\u0388\u0389\u0001\u0000\u0000\u0000"+ + "\u0389\u038b\u0001\u0000\u0000\u0000\u038a\u037a\u0001\u0000\u0000\u0000"+ + "\u038a\u0383\u0001\u0000\u0000\u0000\u038b\u00a8\u0001\u0000\u0000\u0000"+ + "\u038c\u038e\u0003K\u001e\u0000\u038d\u038f\u0003M\u001f\u0000\u038e\u038d"+ + "\u0001\u0000\u0000\u0000\u038f\u0390\u0001\u0000\u0000\u0000\u0390\u038e"+ + "\u0001\u0000\u0000\u0000\u0390\u0391\u0001\u0000\u0000\u0000\u0391\u0392"+ + "\u0001\u0000\u0000\u0000\u0392\u0393\u0003K\u001e\u0000\u0393\u00aa\u0001"+ + "\u0000\u0000\u0000\u0394\u0395\u0003\u00a9M\u0000\u0395\u00ac\u0001\u0000"+ + "\u0000\u0000\u0396\u0397\u00037\u0014\u0000\u0397\u0398\u0001\u0000\u0000"+ + "\u0000\u0398\u0399\u0006O\n\u0000\u0399\u00ae\u0001\u0000\u0000\u0000"+ + "\u039a\u039b\u00039\u0015\u0000\u039b\u039c\u0001\u0000\u0000\u0000\u039c"+ + "\u039d\u0006P\n\u0000\u039d\u00b0\u0001\u0000\u0000\u0000\u039e\u039f"+ + "\u0003;\u0016\u0000\u039f\u03a0\u0001\u0000\u0000\u0000\u03a0\u03a1\u0006"+ + "Q\n\u0000\u03a1\u00b2\u0001\u0000\u0000\u0000\u03a2\u03a3\u0003\u00a3"+ + "J\u0000\u03a3\u03a4\u0001\u0000\u0000\u0000\u03a4\u03a5\u0006R\f\u0000"+ + "\u03a5\u03a6\u0006R\r\u0000\u03a6\u00b4\u0001\u0000\u0000\u0000\u03a7"+ + "\u03a8\u0003=\u0017\u0000\u03a8\u03a9\u0001\u0000\u0000\u0000\u03a9\u03aa"+ + "\u0006S\u000e\u0000\u03aa\u03ab\u0006S\u000b\u0000\u03ab\u00b6\u0001\u0000"+ + "\u0000\u0000\u03ac\u03ad\u0003;\u0016\u0000\u03ad\u03ae\u0001\u0000\u0000"+ + "\u0000\u03ae\u03af\u0006T\n\u0000\u03af\u00b8\u0001\u0000\u0000\u0000"+ + "\u03b0\u03b1\u00037\u0014\u0000\u03b1\u03b2\u0001\u0000\u0000\u0000\u03b2"+ + "\u03b3\u0006U\n\u0000\u03b3\u00ba\u0001\u0000\u0000\u0000\u03b4\u03b5"+ + "\u00039\u0015\u0000\u03b5\u03b6\u0001\u0000\u0000\u0000\u03b6\u03b7\u0006"+ + "V\n\u0000\u03b7\u00bc\u0001\u0000\u0000\u0000\u03b8\u03b9\u0003=\u0017"+ + "\u0000\u03b9\u03ba\u0001\u0000\u0000\u0000\u03ba\u03bb\u0006W\u000e\u0000"+ + "\u03bb\u03bc\u0006W\u000b\u0000\u03bc\u00be\u0001\u0000\u0000\u0000\u03bd"+ + "\u03be\u0003\u00a3J\u0000\u03be\u03bf\u0001\u0000\u0000\u0000\u03bf\u03c0"+ + "\u0006X\f\u0000\u03c0\u00c0\u0001\u0000\u0000\u0000\u03c1\u03c2\u0003"+ + "\u00a5K\u0000\u03c2\u03c3\u0001\u0000\u0000\u0000\u03c3\u03c4\u0006Y\u000f"+ + "\u0000\u03c4\u00c2\u0001\u0000\u0000\u0000\u03c5\u03c6\u0003\u014f\u00a0"+ + "\u0000\u03c6\u03c7\u0001\u0000\u0000\u0000\u03c7\u03c8\u0006Z\u0010\u0000"+ + "\u03c8\u00c4\u0001\u0000\u0000\u0000\u03c9\u03ca\u0003c*\u0000\u03ca\u03cb"+ + "\u0001\u0000\u0000\u0000\u03cb\u03cc\u0006[\u0011\u0000\u03cc\u00c6\u0001"+ + "\u0000\u0000\u0000\u03cd\u03ce\u0003_(\u0000\u03ce\u03cf\u0001\u0000\u0000"+ + "\u0000\u03cf\u03d0\u0006\\\u0012\u0000\u03d0\u00c8\u0001\u0000\u0000\u0000"+ + "\u03d1\u03d2\u0007\u0010\u0000\u0000\u03d2\u03d3\u0007\u0003\u0000\u0000"+ + "\u03d3\u03d4\u0007\u0005\u0000\u0000\u03d4\u03d5\u0007\f\u0000\u0000\u03d5"+ + "\u03d6\u0007\u0000\u0000\u0000\u03d6\u03d7\u0007\f\u0000\u0000\u03d7\u03d8"+ + "\u0007\u0005\u0000\u0000\u03d8\u03d9\u0007\f\u0000\u0000\u03d9\u00ca\u0001"+ + "\u0000\u0000\u0000\u03da\u03de\b \u0000\u0000\u03db\u03dc\u0005/\u0000"+ + "\u0000\u03dc\u03de\b!\u0000\u0000\u03dd\u03da\u0001\u0000\u0000\u0000"+ + "\u03dd\u03db\u0001\u0000\u0000\u0000\u03de\u00cc\u0001\u0000\u0000\u0000"+ + "\u03df\u03e1\u0003\u00cb^\u0000\u03e0\u03df\u0001\u0000\u0000\u0000\u03e1"+ + "\u03e2\u0001\u0000\u0000\u0000\u03e2\u03e0\u0001\u0000\u0000\u0000\u03e2"+ + "\u03e3\u0001\u0000\u0000\u0000\u03e3\u00ce\u0001\u0000\u0000\u0000\u03e4"+ + "\u03e5\u0003\u00cd_\u0000\u03e5\u03e6\u0001\u0000\u0000\u0000\u03e6\u03e7"+ + "\u0006`\u0013\u0000\u03e7\u00d0\u0001\u0000\u0000\u0000\u03e8\u03e9\u0003"+ + "S\"\u0000\u03e9\u03ea\u0001\u0000\u0000\u0000\u03ea\u03eb\u0006a\u0014"+ + "\u0000\u03eb\u00d2\u0001\u0000\u0000\u0000\u03ec\u03ed\u00037\u0014\u0000"+ + "\u03ed\u03ee\u0001\u0000\u0000\u0000\u03ee\u03ef\u0006b\n\u0000\u03ef"+ + "\u00d4\u0001\u0000\u0000\u0000\u03f0\u03f1\u00039\u0015\u0000\u03f1\u03f2"+ + "\u0001\u0000\u0000\u0000\u03f2\u03f3\u0006c\n\u0000\u03f3\u00d6\u0001"+ + "\u0000\u0000\u0000\u03f4\u03f5\u0003;\u0016\u0000\u03f5\u03f6\u0001\u0000"+ + "\u0000\u0000\u03f6\u03f7\u0006d\n\u0000\u03f7\u00d8\u0001\u0000\u0000"+ + "\u0000\u03f8\u03f9\u0003=\u0017\u0000\u03f9\u03fa\u0001\u0000\u0000\u0000"+ + "\u03fa\u03fb\u0006e\u000e\u0000\u03fb\u03fc\u0006e\u000b\u0000\u03fc\u00da"+ + "\u0001\u0000\u0000\u0000\u03fd\u03fe\u0003g,\u0000\u03fe\u03ff\u0001\u0000"+ + "\u0000\u0000\u03ff\u0400\u0006f\u0015\u0000\u0400\u00dc\u0001\u0000\u0000"+ + "\u0000\u0401\u0402\u0003c*\u0000\u0402\u0403\u0001\u0000\u0000\u0000\u0403"+ + "\u0404\u0006g\u0011\u0000\u0404\u00de\u0001\u0000\u0000\u0000\u0405\u0406"+ + "\u0003\u007f8\u0000\u0406\u0407\u0001\u0000\u0000\u0000\u0407\u0408\u0006"+ + "h\u0016\u0000\u0408\u00e0\u0001\u0000\u0000\u0000\u0409\u040a\u0003\u00a1"+ + "I\u0000\u040a\u040b\u0001\u0000\u0000\u0000\u040b\u040c\u0006i\u0017\u0000"+ + "\u040c\u00e2\u0001\u0000\u0000\u0000\u040d\u0412\u0003A\u0019\u0000\u040e"+ + "\u0412\u0003?\u0018\u0000\u040f\u0412\u0003O \u0000\u0410\u0412\u0003"+ + "\u0099E\u0000\u0411\u040d\u0001\u0000\u0000\u0000\u0411\u040e\u0001\u0000"+ + "\u0000\u0000\u0411\u040f\u0001\u0000\u0000\u0000\u0411\u0410\u0001\u0000"+ + "\u0000\u0000\u0412\u00e4\u0001\u0000\u0000\u0000\u0413\u0416\u0003A\u0019"+ + "\u0000\u0414\u0416\u0003\u0099E\u0000\u0415\u0413\u0001\u0000\u0000\u0000"+ + "\u0415\u0414\u0001\u0000\u0000\u0000\u0416\u041a\u0001\u0000\u0000\u0000"+ + "\u0417\u0419\u0003\u00e3j\u0000\u0418\u0417\u0001\u0000\u0000\u0000\u0419"+ + "\u041c\u0001\u0000\u0000\u0000\u041a\u0418\u0001\u0000\u0000\u0000\u041a"+ + "\u041b\u0001\u0000\u0000\u0000\u041b\u0427\u0001\u0000\u0000\u0000\u041c"+ + "\u041a\u0001\u0000\u0000\u0000\u041d\u0420\u0003O \u0000\u041e\u0420\u0003"+ + "I\u001d\u0000\u041f\u041d\u0001\u0000\u0000\u0000\u041f\u041e\u0001\u0000"+ + "\u0000\u0000\u0420\u0422\u0001\u0000\u0000\u0000\u0421\u0423\u0003\u00e3"+ + "j\u0000\u0422\u0421\u0001\u0000\u0000\u0000\u0423\u0424\u0001\u0000\u0000"+ + "\u0000\u0424\u0422\u0001\u0000\u0000\u0000\u0424\u0425\u0001\u0000\u0000"+ + "\u0000\u0425\u0427\u0001\u0000\u0000\u0000\u0426\u0415\u0001\u0000\u0000"+ + "\u0000\u0426\u041f\u0001\u0000\u0000\u0000\u0427\u00e6\u0001\u0000\u0000"+ + "\u0000\u0428\u042b\u0003\u00e5k\u0000\u0429\u042b\u0003\u00a9M\u0000\u042a"+ + "\u0428\u0001\u0000\u0000\u0000\u042a\u0429\u0001\u0000\u0000\u0000\u042b"+ + "\u042c\u0001\u0000\u0000\u0000\u042c\u042a\u0001\u0000\u0000\u0000\u042c"+ + "\u042d\u0001\u0000\u0000\u0000\u042d\u00e8\u0001\u0000\u0000\u0000\u042e"+ + "\u042f\u00037\u0014\u0000\u042f\u0430\u0001\u0000\u0000\u0000\u0430\u0431"+ + "\u0006m\n\u0000\u0431\u00ea\u0001\u0000\u0000\u0000\u0432\u0433\u0003"+ + "9\u0015\u0000\u0433\u0434\u0001\u0000\u0000\u0000\u0434\u0435\u0006n\n"+ + "\u0000\u0435\u00ec\u0001\u0000\u0000\u0000\u0436\u0437\u0003;\u0016\u0000"+ + "\u0437\u0438\u0001\u0000\u0000\u0000\u0438\u0439\u0006o\n\u0000\u0439"+ + "\u00ee\u0001\u0000\u0000\u0000\u043a\u043b\u0003=\u0017\u0000\u043b\u043c"+ + "\u0001\u0000\u0000\u0000\u043c\u043d\u0006p\u000e\u0000\u043d\u043e\u0006"+ + "p\u000b\u0000\u043e\u00f0\u0001\u0000\u0000\u0000\u043f\u0440\u0003_("+ + "\u0000\u0440\u0441\u0001\u0000\u0000\u0000\u0441\u0442\u0006q\u0012\u0000"+ + "\u0442\u00f2\u0001\u0000\u0000\u0000\u0443\u0444\u0003c*\u0000\u0444\u0445"+ + "\u0001\u0000\u0000\u0000\u0445\u0446\u0006r\u0011\u0000\u0446\u00f4\u0001"+ + "\u0000\u0000\u0000\u0447\u0448\u0003g,\u0000\u0448\u0449\u0001\u0000\u0000"+ + "\u0000\u0449\u044a\u0006s\u0015\u0000\u044a\u00f6\u0001\u0000\u0000\u0000"+ + "\u044b\u044c\u0003\u007f8\u0000\u044c\u044d\u0001\u0000\u0000\u0000\u044d"+ + "\u044e\u0006t\u0016\u0000\u044e\u00f8\u0001\u0000\u0000\u0000\u044f\u0450"+ + "\u0003\u00a1I\u0000\u0450\u0451\u0001\u0000\u0000\u0000\u0451\u0452\u0006"+ + "u\u0017\u0000\u0452\u00fa\u0001\u0000\u0000\u0000\u0453\u0454\u0007\f"+ + "\u0000\u0000\u0454\u0455\u0007\u0002\u0000\u0000\u0455\u00fc\u0001\u0000"+ + "\u0000\u0000\u0456\u0457\u0003\u00e7l\u0000\u0457\u0458\u0001\u0000\u0000"+ + "\u0000\u0458\u0459\u0006w\u0018\u0000\u0459\u00fe\u0001\u0000\u0000\u0000"+ + "\u045a\u045b\u00037\u0014\u0000\u045b\u045c\u0001\u0000\u0000\u0000\u045c"+ + "\u045d\u0006x\n\u0000\u045d\u0100\u0001\u0000\u0000\u0000\u045e\u045f"+ + "\u00039\u0015\u0000\u045f\u0460\u0001\u0000\u0000\u0000\u0460\u0461\u0006"+ + "y\n\u0000\u0461\u0102\u0001\u0000\u0000\u0000\u0462\u0463\u0003;\u0016"+ + "\u0000\u0463\u0464\u0001\u0000\u0000\u0000\u0464\u0465\u0006z\n\u0000"+ + "\u0465\u0104\u0001\u0000\u0000\u0000\u0466\u0467\u0003=\u0017\u0000\u0467"+ + "\u0468\u0001\u0000\u0000\u0000\u0468\u0469\u0006{\u000e\u0000\u0469\u046a"+ + "\u0006{\u000b\u0000\u046a\u0106\u0001\u0000\u0000\u0000\u046b\u046c\u0003"+ + "\u00a3J\u0000\u046c\u046d\u0001\u0000\u0000\u0000\u046d\u046e\u0006|\f"+ + "\u0000\u046e\u046f\u0006|\u0019\u0000\u046f\u0108\u0001\u0000\u0000\u0000"+ + "\u0470\u0471\u0007\u0007\u0000\u0000\u0471\u0472\u0007\t\u0000\u0000\u0472"+ + "\u0473\u0001\u0000\u0000\u0000\u0473\u0474\u0006}\u001a\u0000\u0474\u010a"+ + "\u0001\u0000\u0000\u0000\u0475\u0476\u0007\u0013\u0000\u0000\u0476\u0477"+ + "\u0007\u0001\u0000\u0000\u0477\u0478\u0007\u0005\u0000\u0000\u0478\u0479"+ + "\u0007\n\u0000\u0000\u0479\u047a\u0001\u0000\u0000\u0000\u047a\u047b\u0006"+ + "~\u001a\u0000\u047b\u010c\u0001\u0000\u0000\u0000\u047c\u047d\b\"\u0000"+ + "\u0000\u047d\u010e\u0001\u0000\u0000\u0000\u047e\u0480\u0003\u010d\u007f"+ + "\u0000\u047f\u047e\u0001\u0000\u0000\u0000\u0480\u0481\u0001\u0000\u0000"+ + "\u0000\u0481\u047f\u0001\u0000\u0000\u0000\u0481\u0482\u0001\u0000\u0000"+ + "\u0000\u0482\u0483\u0001\u0000\u0000\u0000\u0483\u0484\u0003\u014f\u00a0"+ + "\u0000\u0484\u0486\u0001\u0000\u0000\u0000\u0485\u047f\u0001\u0000\u0000"+ + "\u0000\u0485\u0486\u0001\u0000\u0000\u0000\u0486\u0488\u0001\u0000\u0000"+ + "\u0000\u0487\u0489\u0003\u010d\u007f\u0000\u0488\u0487\u0001\u0000\u0000"+ + "\u0000\u0489\u048a\u0001\u0000\u0000\u0000\u048a\u0488\u0001\u0000\u0000"+ + "\u0000\u048a\u048b\u0001\u0000\u0000\u0000\u048b\u0110\u0001\u0000\u0000"+ + "\u0000\u048c\u048d\u0003\u010f\u0080\u0000\u048d\u048e\u0001\u0000\u0000"+ + "\u0000\u048e\u048f\u0006\u0081\u001b\u0000\u048f\u0112\u0001\u0000\u0000"+ + "\u0000\u0490\u0491\u00037\u0014\u0000\u0491\u0492\u0001\u0000\u0000\u0000"+ + "\u0492\u0493\u0006\u0082\n\u0000\u0493\u0114\u0001\u0000\u0000\u0000\u0494"+ + "\u0495\u00039\u0015\u0000\u0495\u0496\u0001\u0000\u0000\u0000\u0496\u0497"+ + "\u0006\u0083\n\u0000\u0497\u0116\u0001\u0000\u0000\u0000\u0498\u0499\u0003"+ + ";\u0016\u0000\u0499\u049a\u0001\u0000\u0000\u0000\u049a\u049b\u0006\u0084"+ + "\n\u0000\u049b\u0118\u0001\u0000\u0000\u0000\u049c\u049d\u0003=\u0017"+ + "\u0000\u049d\u049e\u0001\u0000\u0000\u0000\u049e\u049f\u0006\u0085\u000e"+ + "\u0000\u049f\u04a0\u0006\u0085\u000b\u0000\u04a0\u04a1\u0006\u0085\u000b"+ + "\u0000\u04a1\u011a\u0001\u0000\u0000\u0000\u04a2\u04a3\u0003_(\u0000\u04a3"+ + "\u04a4\u0001\u0000\u0000\u0000\u04a4\u04a5\u0006\u0086\u0012\u0000\u04a5"+ + "\u011c\u0001\u0000\u0000\u0000\u04a6\u04a7\u0003c*\u0000\u04a7\u04a8\u0001"+ + "\u0000\u0000\u0000\u04a8\u04a9\u0006\u0087\u0011\u0000\u04a9\u011e\u0001"+ + "\u0000\u0000\u0000\u04aa\u04ab\u0003g,\u0000\u04ab\u04ac\u0001\u0000\u0000"+ + "\u0000\u04ac\u04ad\u0006\u0088\u0015\u0000\u04ad\u0120\u0001\u0000\u0000"+ + "\u0000\u04ae\u04af\u0003\u010b~\u0000\u04af\u04b0\u0001\u0000\u0000\u0000"+ + "\u04b0\u04b1\u0006\u0089\u001c\u0000\u04b1\u0122\u0001\u0000\u0000\u0000"+ + "\u04b2\u04b3\u0003\u00e7l\u0000\u04b3\u04b4\u0001\u0000\u0000\u0000\u04b4"+ + "\u04b5\u0006\u008a\u0018\u0000\u04b5\u0124\u0001\u0000\u0000\u0000\u04b6"+ + "\u04b7\u0003\u00abN\u0000\u04b7\u04b8\u0001\u0000\u0000\u0000\u04b8\u04b9"+ + "\u0006\u008b\u001d\u0000\u04b9\u0126\u0001\u0000\u0000\u0000\u04ba\u04bb"+ + "\u0003\u007f8\u0000\u04bb\u04bc\u0001\u0000\u0000\u0000\u04bc\u04bd\u0006"+ + "\u008c\u0016\u0000\u04bd\u0128\u0001\u0000\u0000\u0000\u04be\u04bf\u0003"+ + "\u00a1I\u0000\u04bf\u04c0\u0001\u0000\u0000\u0000\u04c0\u04c1\u0006\u008d"+ + "\u0017\u0000\u04c1\u012a\u0001\u0000\u0000\u0000\u04c2\u04c3\u00037\u0014"+ + "\u0000\u04c3\u04c4\u0001\u0000\u0000\u0000\u04c4\u04c5\u0006\u008e\n\u0000"+ + "\u04c5\u012c\u0001\u0000\u0000\u0000\u04c6\u04c7\u00039\u0015\u0000\u04c7"+ + "\u04c8\u0001\u0000\u0000\u0000\u04c8\u04c9\u0006\u008f\n\u0000\u04c9\u012e"+ + "\u0001\u0000\u0000\u0000\u04ca\u04cb\u0003;\u0016\u0000\u04cb\u04cc\u0001"+ + "\u0000\u0000\u0000\u04cc\u04cd\u0006\u0090\n\u0000\u04cd\u0130\u0001\u0000"+ + "\u0000\u0000\u04ce\u04cf\u0003=\u0017\u0000\u04cf\u04d0\u0001\u0000\u0000"+ + "\u0000\u04d0\u04d1\u0006\u0091\u000e\u0000\u04d1\u04d2\u0006\u0091\u000b"+ + "\u0000\u04d2\u0132\u0001\u0000\u0000\u0000\u04d3\u04d4\u0003g,\u0000\u04d4"+ + "\u04d5\u0001\u0000\u0000\u0000\u04d5\u04d6\u0006\u0092\u0015\u0000\u04d6"+ + "\u0134\u0001\u0000\u0000\u0000\u04d7\u04d8\u0003\u007f8\u0000\u04d8\u04d9"+ + "\u0001\u0000\u0000\u0000\u04d9\u04da\u0006\u0093\u0016\u0000\u04da\u0136"+ + "\u0001\u0000\u0000\u0000\u04db\u04dc\u0003\u00a1I\u0000\u04dc\u04dd\u0001"+ + "\u0000\u0000\u0000\u04dd\u04de\u0006\u0094\u0017\u0000\u04de\u0138\u0001"+ + "\u0000\u0000\u0000\u04df\u04e0\u0003\u00abN\u0000\u04e0\u04e1\u0001\u0000"+ + "\u0000\u0000\u04e1\u04e2\u0006\u0095\u001d\u0000\u04e2\u013a\u0001\u0000"+ + "\u0000\u0000\u04e3\u04e4\u0003\u00a7L\u0000\u04e4\u04e5\u0001\u0000\u0000"+ + "\u0000\u04e5\u04e6\u0006\u0096\u001e\u0000\u04e6\u013c\u0001\u0000\u0000"+ + "\u0000\u04e7\u04e8\u00037\u0014\u0000\u04e8\u04e9\u0001\u0000\u0000\u0000"+ + "\u04e9\u04ea\u0006\u0097\n\u0000\u04ea\u013e\u0001\u0000\u0000\u0000\u04eb"+ + "\u04ec\u00039\u0015\u0000\u04ec\u04ed\u0001\u0000\u0000\u0000\u04ed\u04ee"+ + "\u0006\u0098\n\u0000\u04ee\u0140\u0001\u0000\u0000\u0000\u04ef\u04f0\u0003"+ + ";\u0016\u0000\u04f0\u04f1\u0001\u0000\u0000\u0000\u04f1\u04f2\u0006\u0099"+ + "\n\u0000\u04f2\u0142\u0001\u0000\u0000\u0000\u04f3\u04f4\u0003=\u0017"+ + "\u0000\u04f4\u04f5\u0001\u0000\u0000\u0000\u04f5\u04f6\u0006\u009a\u000e"+ + "\u0000\u04f6\u04f7\u0006\u009a\u000b\u0000\u04f7\u0144\u0001\u0000\u0000"+ + "\u0000\u04f8\u04f9\u0007\u0001\u0000\u0000\u04f9\u04fa\u0007\t\u0000\u0000"+ + "\u04fa\u04fb\u0007\u000f\u0000\u0000\u04fb\u04fc\u0007\u0007\u0000\u0000"+ + "\u04fc\u0146\u0001\u0000\u0000\u0000\u04fd\u04fe\u00037\u0014\u0000\u04fe"+ + "\u04ff\u0001\u0000\u0000\u0000\u04ff\u0500\u0006\u009c\n\u0000\u0500\u0148"+ + "\u0001\u0000\u0000\u0000\u0501\u0502\u00039\u0015\u0000\u0502\u0503\u0001"+ + "\u0000\u0000\u0000\u0503\u0504\u0006\u009d\n\u0000\u0504\u014a\u0001\u0000"+ + "\u0000\u0000\u0505\u0506\u0003;\u0016\u0000\u0506\u0507\u0001\u0000\u0000"+ + "\u0000\u0507\u0508\u0006\u009e\n\u0000\u0508\u014c\u0001\u0000\u0000\u0000"+ + "\u0509\u050a\u0003\u00a5K\u0000\u050a\u050b\u0001\u0000\u0000\u0000\u050b"+ + "\u050c\u0006\u009f\u000f\u0000\u050c\u050d\u0006\u009f\u000b\u0000\u050d"+ + "\u014e\u0001\u0000\u0000\u0000\u050e\u050f\u0005:\u0000\u0000\u050f\u0150"+ + "\u0001\u0000\u0000\u0000\u0510\u0516\u0003I\u001d\u0000\u0511\u0516\u0003"+ + "?\u0018\u0000\u0512\u0516\u0003g,\u0000\u0513\u0516\u0003A\u0019\u0000"+ + "\u0514\u0516\u0003O \u0000\u0515\u0510\u0001\u0000\u0000\u0000\u0515\u0511"+ + "\u0001\u0000\u0000\u0000\u0515\u0512\u0001\u0000\u0000\u0000\u0515\u0513"+ + "\u0001\u0000\u0000\u0000\u0515\u0514\u0001\u0000\u0000\u0000\u0516\u0517"+ + "\u0001\u0000\u0000\u0000\u0517\u0515\u0001\u0000\u0000\u0000\u0517\u0518"+ + "\u0001\u0000\u0000\u0000\u0518\u0152\u0001\u0000\u0000\u0000\u0519\u051a"+ + "\u00037\u0014\u0000\u051a\u051b\u0001\u0000\u0000\u0000\u051b\u051c\u0006"+ + "\u00a2\n\u0000\u051c\u0154\u0001\u0000\u0000\u0000\u051d\u051e\u00039"+ + "\u0015\u0000\u051e\u051f\u0001\u0000\u0000\u0000\u051f\u0520\u0006\u00a3"+ + "\n\u0000\u0520\u0156\u0001\u0000\u0000\u0000\u0521\u0522\u0003;\u0016"+ + "\u0000\u0522\u0523\u0001\u0000\u0000\u0000\u0523\u0524\u0006\u00a4\n\u0000"+ + "\u0524\u0158\u0001\u0000\u0000\u0000\u0525\u0526\u0003=\u0017\u0000\u0526"+ + "\u0527\u0001\u0000\u0000\u0000\u0527\u0528\u0006\u00a5\u000e\u0000\u0528"+ + "\u0529\u0006\u00a5\u000b\u0000\u0529\u015a\u0001\u0000\u0000\u0000\u052a"+ + "\u052b\u0003\u014f\u00a0\u0000\u052b\u052c\u0001\u0000\u0000\u0000\u052c"+ + "\u052d\u0006\u00a6\u0010\u0000\u052d\u015c\u0001\u0000\u0000\u0000\u052e"+ + "\u052f\u0003c*\u0000\u052f\u0530\u0001\u0000\u0000\u0000\u0530\u0531\u0006"+ + "\u00a7\u0011\u0000\u0531\u015e\u0001\u0000\u0000\u0000\u0532\u0533\u0003"+ + "g,\u0000\u0533\u0534\u0001\u0000\u0000\u0000\u0534\u0535\u0006\u00a8\u0015"+ + "\u0000\u0535\u0160\u0001\u0000\u0000\u0000\u0536\u0537\u0003\u0109}\u0000"+ + "\u0537\u0538\u0001\u0000\u0000\u0000\u0538\u0539\u0006\u00a9\u001f\u0000"+ + "\u0539\u053a\u0006\u00a9 \u0000\u053a\u0162\u0001\u0000\u0000\u0000\u053b"+ + "\u053c\u0003\u00cd_\u0000\u053c\u053d\u0001\u0000\u0000\u0000\u053d\u053e"+ + "\u0006\u00aa\u0013\u0000\u053e\u0164\u0001\u0000\u0000\u0000\u053f\u0540"+ + "\u0003S\"\u0000\u0540\u0541\u0001\u0000\u0000\u0000\u0541\u0542\u0006"+ + "\u00ab\u0014\u0000\u0542\u0166\u0001\u0000\u0000\u0000\u0543\u0544\u0003"+ + "7\u0014\u0000\u0544\u0545\u0001\u0000\u0000\u0000\u0545\u0546\u0006\u00ac"+ + "\n\u0000\u0546\u0168\u0001\u0000\u0000\u0000\u0547\u0548\u00039\u0015"+ + "\u0000\u0548\u0549\u0001\u0000\u0000\u0000\u0549\u054a\u0006\u00ad\n\u0000"+ + "\u054a\u016a\u0001\u0000\u0000\u0000\u054b\u054c\u0003;\u0016\u0000\u054c"+ + "\u054d\u0001\u0000\u0000\u0000\u054d\u054e\u0006\u00ae\n\u0000\u054e\u016c"+ + "\u0001\u0000\u0000\u0000\u054f\u0550\u0003=\u0017\u0000\u0550\u0551\u0001"+ + "\u0000\u0000\u0000\u0551\u0552\u0006\u00af\u000e\u0000\u0552\u0553\u0006"+ + "\u00af\u000b\u0000\u0553\u0554\u0006\u00af\u000b\u0000\u0554\u016e\u0001"+ + "\u0000\u0000\u0000\u0555\u0556\u0003c*\u0000\u0556\u0557\u0001\u0000\u0000"+ + "\u0000\u0557\u0558\u0006\u00b0\u0011\u0000\u0558\u0170\u0001\u0000\u0000"+ + "\u0000\u0559\u055a\u0003g,\u0000\u055a\u055b\u0001\u0000\u0000\u0000\u055b"+ + "\u055c\u0006\u00b1\u0015\u0000\u055c\u0172\u0001\u0000\u0000\u0000\u055d"+ + "\u055e\u0003\u00e7l\u0000\u055e\u055f\u0001\u0000\u0000\u0000\u055f\u0560"+ + "\u0006\u00b2\u0018\u0000\u0560\u0174\u0001\u0000\u0000\u0000\u0561\u0562"+ + "\u00037\u0014\u0000\u0562\u0563\u0001\u0000\u0000\u0000\u0563\u0564\u0006"+ + "\u00b3\n\u0000\u0564\u0176\u0001\u0000\u0000\u0000\u0565\u0566\u00039"+ + "\u0015\u0000\u0566\u0567\u0001\u0000\u0000\u0000\u0567\u0568\u0006\u00b4"+ + "\n\u0000\u0568\u0178\u0001\u0000\u0000\u0000\u0569\u056a\u0003;\u0016"+ + "\u0000\u056a\u056b\u0001\u0000\u0000\u0000\u056b\u056c\u0006\u00b5\n\u0000"+ + "\u056c\u017a\u0001\u0000\u0000\u0000\u056d\u056e\u0003=\u0017\u0000\u056e"+ + "\u056f\u0001\u0000\u0000\u0000\u056f\u0570\u0006\u00b6\u000e\u0000\u0570"+ + "\u0571\u0006\u00b6\u000b\u0000\u0571\u017c\u0001\u0000\u0000\u0000\u0572"+ + "\u0573\u0003\u00cd_\u0000\u0573\u0574\u0001\u0000\u0000\u0000\u0574\u0575"+ + "\u0006\u00b7\u0013\u0000\u0575\u0576\u0006\u00b7\u000b\u0000\u0576\u0577"+ + "\u0006\u00b7!\u0000\u0577\u017e\u0001\u0000\u0000\u0000\u0578\u0579\u0003"+ + "S\"\u0000\u0579\u057a\u0001\u0000\u0000\u0000\u057a\u057b\u0006\u00b8"+ + "\u0014\u0000\u057b\u057c\u0006\u00b8\u000b\u0000\u057c\u057d\u0006\u00b8"+ + "!\u0000\u057d\u0180\u0001\u0000\u0000\u0000\u057e\u057f\u00037\u0014\u0000"+ + "\u057f\u0580\u0001\u0000\u0000\u0000\u0580\u0581\u0006\u00b9\n\u0000\u0581"+ + "\u0182\u0001\u0000\u0000\u0000\u0582\u0583\u00039\u0015\u0000\u0583\u0584"+ + "\u0001\u0000\u0000\u0000\u0584\u0585\u0006\u00ba\n\u0000\u0585\u0184\u0001"+ + "\u0000\u0000\u0000\u0586\u0587\u0003;\u0016\u0000\u0587\u0588\u0001\u0000"+ + "\u0000\u0000\u0588\u0589\u0006\u00bb\n\u0000\u0589\u0186\u0001\u0000\u0000"+ + "\u0000\u058a\u058b\u0003\u014f\u00a0\u0000\u058b\u058c\u0001\u0000\u0000"+ + "\u0000\u058c\u058d\u0006\u00bc\u0010\u0000\u058d\u058e\u0006\u00bc\u000b"+ + "\u0000\u058e\u058f\u0006\u00bc\t\u0000\u058f\u0188\u0001\u0000\u0000\u0000"+ + "\u0590\u0591\u0003c*\u0000\u0591\u0592\u0001\u0000\u0000\u0000\u0592\u0593"+ + "\u0006\u00bd\u0011\u0000\u0593\u0594\u0006\u00bd\u000b\u0000\u0594\u0595"+ + "\u0006\u00bd\t\u0000\u0595\u018a\u0001\u0000\u0000\u0000\u0596\u0597\u0003"+ + "7\u0014\u0000\u0597\u0598\u0001\u0000\u0000\u0000\u0598\u0599\u0006\u00be"+ + "\n\u0000\u0599\u018c\u0001\u0000\u0000\u0000\u059a\u059b\u00039\u0015"+ + "\u0000\u059b\u059c\u0001\u0000\u0000\u0000\u059c\u059d\u0006\u00bf\n\u0000"+ + "\u059d\u018e\u0001\u0000\u0000\u0000\u059e\u059f\u0003;\u0016\u0000\u059f"+ + "\u05a0\u0001\u0000\u0000\u0000\u05a0\u05a1\u0006\u00c0\n\u0000\u05a1\u0190"+ + "\u0001\u0000\u0000\u0000\u05a2\u05a3\u0003\u00abN\u0000\u05a3\u05a4\u0001"+ + "\u0000\u0000\u0000\u05a4\u05a5\u0006\u00c1\u000b\u0000\u05a5\u05a6\u0006"+ + "\u00c1\u0000\u0000\u05a6\u05a7\u0006\u00c1\u001d\u0000\u05a7\u0192\u0001"+ + "\u0000\u0000\u0000\u05a8\u05a9\u0003\u00a7L\u0000\u05a9\u05aa\u0001\u0000"+ + "\u0000\u0000\u05aa\u05ab\u0006\u00c2\u000b\u0000\u05ab\u05ac\u0006\u00c2"+ + "\u0000\u0000\u05ac\u05ad\u0006\u00c2\u001e\u0000\u05ad\u0194\u0001\u0000"+ + "\u0000\u0000\u05ae\u05af\u0003Y%\u0000\u05af\u05b0\u0001\u0000\u0000\u0000"+ + "\u05b0\u05b1\u0006\u00c3\u000b\u0000\u05b1\u05b2\u0006\u00c3\u0000\u0000"+ + "\u05b2\u05b3\u0006\u00c3\"\u0000\u05b3\u0196\u0001\u0000\u0000\u0000\u05b4"+ + "\u05b5\u0003=\u0017\u0000\u05b5\u05b6\u0001\u0000\u0000\u0000\u05b6\u05b7"+ + "\u0006\u00c4\u000e\u0000\u05b7\u05b8\u0006\u00c4\u000b\u0000\u05b8\u0198"+ + "\u0001\u0000\u0000\u0000A\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007"+ + "\b\t\n\u000b\f\r\u000e\u0241\u024b\u024f\u0252\u025b\u025d\u0268\u027b"+ + "\u0280\u0289\u0290\u0295\u0297\u02a2\u02aa\u02ad\u02af\u02b4\u02b9\u02bf"+ + "\u02c6\u02cb\u02d1\u02d4\u02dc\u02e0\u0360\u0365\u036c\u036e\u037e\u0383"+ + "\u0388\u038a\u0390\u03dd\u03e2\u0411\u0415\u041a\u041f\u0424\u0426\u042a"+ + "\u042c\u0481\u0485\u048a\u0515\u0517#\u0005\u0001\u0000\u0005\u0004\u0000"+ + "\u0005\u0006\u0000\u0005\u0002\u0000\u0005\u0003\u0000\u0005\b\u0000\u0005"+ + "\u0005\u0000\u0005\t\u0000\u0005\u000b\u0000\u0005\r\u0000\u0000\u0001"+ + "\u0000\u0004\u0000\u0000\u0007A\u0000\u0005\u0000\u0000\u0007\u0018\u0000"+ + "\u0007B\u0000\u0007h\u0000\u0007!\u0000\u0007\u001f\u0000\u0007L\u0000"+ + "\u0007\u0019\u0000\u0007#\u0000\u0007/\u0000\u0007@\u0000\u0007P\u0000"+ + "\u0005\n\u0000\u0005\u0007\u0000\u0007Z\u0000\u0007Y\u0000\u0007D\u0000"+ + "\u0007C\u0000\u0007X\u0000\u0005\f\u0000\u0005\u000e\u0000\u0007\u001c"+ + "\u0000"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp index 5fdf80f24d9b0..0db5c82878fcf 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp @@ -62,7 +62,7 @@ null '*' '/' '%' -null +'match' null null ']' @@ -185,7 +185,7 @@ MINUS ASTERISK SLASH PERCENT -DEV_MATCH +MATCH NAMED_OR_POSITIONAL_PARAM OPENING_BRACKET CLOSING_BRACKET @@ -308,4 +308,4 @@ inlinestatsCommand atn: -[4, 1, 120, 587, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 130, 8, 1, 10, 1, 12, 1, 133, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 141, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 159, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 171, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 178, 8, 5, 10, 5, 12, 5, 181, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 188, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 194, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 202, 8, 5, 10, 5, 12, 5, 205, 9, 5, 1, 6, 1, 6, 3, 6, 209, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 216, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 221, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 232, 8, 8, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 238, 8, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 5, 9, 246, 8, 9, 10, 9, 12, 9, 249, 9, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 3, 10, 259, 8, 10, 1, 10, 1, 10, 1, 10, 5, 10, 264, 8, 10, 10, 10, 12, 10, 267, 9, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 5, 11, 275, 8, 11, 10, 11, 12, 11, 278, 9, 11, 3, 11, 280, 8, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 3, 12, 287, 8, 12, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 5, 15, 297, 8, 15, 10, 15, 12, 15, 300, 9, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 3, 16, 307, 8, 16, 1, 17, 1, 17, 1, 17, 1, 17, 5, 17, 313, 8, 17, 10, 17, 12, 17, 316, 9, 17, 1, 17, 3, 17, 319, 8, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 3, 18, 326, 8, 18, 1, 19, 1, 19, 1, 20, 1, 20, 1, 21, 1, 21, 3, 21, 334, 8, 21, 1, 22, 1, 22, 1, 22, 1, 22, 5, 22, 340, 8, 22, 10, 22, 12, 22, 343, 9, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 5, 24, 353, 8, 24, 10, 24, 12, 24, 356, 9, 24, 1, 24, 3, 24, 359, 8, 24, 1, 24, 1, 24, 3, 24, 363, 8, 24, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 3, 26, 370, 8, 26, 1, 26, 1, 26, 3, 26, 374, 8, 26, 1, 27, 1, 27, 1, 27, 5, 27, 379, 8, 27, 10, 27, 12, 27, 382, 9, 27, 1, 28, 1, 28, 1, 28, 5, 28, 387, 8, 28, 10, 28, 12, 28, 390, 9, 28, 1, 29, 1, 29, 1, 29, 5, 29, 395, 8, 29, 10, 29, 12, 29, 398, 9, 29, 1, 30, 1, 30, 1, 31, 1, 31, 3, 31, 404, 8, 31, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 5, 32, 419, 8, 32, 10, 32, 12, 32, 422, 9, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 5, 32, 430, 8, 32, 10, 32, 12, 32, 433, 9, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 5, 32, 441, 8, 32, 10, 32, 12, 32, 444, 9, 32, 1, 32, 1, 32, 3, 32, 448, 8, 32, 1, 33, 1, 33, 3, 33, 452, 8, 33, 1, 34, 1, 34, 3, 34, 456, 8, 34, 1, 35, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 1, 36, 5, 36, 465, 8, 36, 10, 36, 12, 36, 468, 9, 36, 1, 37, 1, 37, 3, 37, 472, 8, 37, 1, 37, 1, 37, 3, 37, 476, 8, 37, 1, 38, 1, 38, 1, 38, 1, 39, 1, 39, 1, 39, 1, 40, 1, 40, 1, 40, 1, 40, 5, 40, 488, 8, 40, 10, 40, 12, 40, 491, 9, 40, 1, 41, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 3, 42, 501, 8, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 45, 1, 45, 1, 45, 5, 45, 513, 8, 45, 10, 45, 12, 45, 516, 9, 45, 1, 46, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 48, 1, 48, 3, 48, 526, 8, 48, 1, 49, 3, 49, 529, 8, 49, 1, 49, 1, 49, 1, 50, 3, 50, 534, 8, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 3, 56, 556, 8, 56, 1, 56, 1, 56, 1, 56, 1, 56, 5, 56, 562, 8, 56, 10, 56, 12, 56, 565, 9, 56, 3, 56, 567, 8, 56, 1, 57, 1, 57, 1, 57, 3, 57, 572, 8, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 59, 3, 59, 585, 8, 59, 1, 59, 0, 4, 2, 10, 18, 20, 60, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 118, 0, 8, 1, 0, 58, 59, 1, 0, 60, 62, 2, 0, 25, 25, 76, 76, 1, 0, 67, 68, 2, 0, 30, 30, 34, 34, 2, 0, 37, 37, 40, 40, 2, 0, 36, 36, 50, 50, 2, 0, 51, 51, 53, 57, 613, 0, 120, 1, 0, 0, 0, 2, 123, 1, 0, 0, 0, 4, 140, 1, 0, 0, 0, 6, 158, 1, 0, 0, 0, 8, 160, 1, 0, 0, 0, 10, 193, 1, 0, 0, 0, 12, 220, 1, 0, 0, 0, 14, 222, 1, 0, 0, 0, 16, 231, 1, 0, 0, 0, 18, 237, 1, 0, 0, 0, 20, 258, 1, 0, 0, 0, 22, 268, 1, 0, 0, 0, 24, 286, 1, 0, 0, 0, 26, 288, 1, 0, 0, 0, 28, 290, 1, 0, 0, 0, 30, 293, 1, 0, 0, 0, 32, 306, 1, 0, 0, 0, 34, 308, 1, 0, 0, 0, 36, 325, 1, 0, 0, 0, 38, 327, 1, 0, 0, 0, 40, 329, 1, 0, 0, 0, 42, 333, 1, 0, 0, 0, 44, 335, 1, 0, 0, 0, 46, 344, 1, 0, 0, 0, 48, 348, 1, 0, 0, 0, 50, 364, 1, 0, 0, 0, 52, 367, 1, 0, 0, 0, 54, 375, 1, 0, 0, 0, 56, 383, 1, 0, 0, 0, 58, 391, 1, 0, 0, 0, 60, 399, 1, 0, 0, 0, 62, 403, 1, 0, 0, 0, 64, 447, 1, 0, 0, 0, 66, 451, 1, 0, 0, 0, 68, 455, 1, 0, 0, 0, 70, 457, 1, 0, 0, 0, 72, 460, 1, 0, 0, 0, 74, 469, 1, 0, 0, 0, 76, 477, 1, 0, 0, 0, 78, 480, 1, 0, 0, 0, 80, 483, 1, 0, 0, 0, 82, 492, 1, 0, 0, 0, 84, 496, 1, 0, 0, 0, 86, 502, 1, 0, 0, 0, 88, 506, 1, 0, 0, 0, 90, 509, 1, 0, 0, 0, 92, 517, 1, 0, 0, 0, 94, 521, 1, 0, 0, 0, 96, 525, 1, 0, 0, 0, 98, 528, 1, 0, 0, 0, 100, 533, 1, 0, 0, 0, 102, 537, 1, 0, 0, 0, 104, 539, 1, 0, 0, 0, 106, 541, 1, 0, 0, 0, 108, 544, 1, 0, 0, 0, 110, 548, 1, 0, 0, 0, 112, 551, 1, 0, 0, 0, 114, 571, 1, 0, 0, 0, 116, 575, 1, 0, 0, 0, 118, 580, 1, 0, 0, 0, 120, 121, 3, 2, 1, 0, 121, 122, 5, 0, 0, 1, 122, 1, 1, 0, 0, 0, 123, 124, 6, 1, -1, 0, 124, 125, 3, 4, 2, 0, 125, 131, 1, 0, 0, 0, 126, 127, 10, 1, 0, 0, 127, 128, 5, 24, 0, 0, 128, 130, 3, 6, 3, 0, 129, 126, 1, 0, 0, 0, 130, 133, 1, 0, 0, 0, 131, 129, 1, 0, 0, 0, 131, 132, 1, 0, 0, 0, 132, 3, 1, 0, 0, 0, 133, 131, 1, 0, 0, 0, 134, 141, 3, 106, 53, 0, 135, 141, 3, 34, 17, 0, 136, 141, 3, 28, 14, 0, 137, 141, 3, 110, 55, 0, 138, 139, 4, 2, 1, 0, 139, 141, 3, 48, 24, 0, 140, 134, 1, 0, 0, 0, 140, 135, 1, 0, 0, 0, 140, 136, 1, 0, 0, 0, 140, 137, 1, 0, 0, 0, 140, 138, 1, 0, 0, 0, 141, 5, 1, 0, 0, 0, 142, 159, 3, 50, 25, 0, 143, 159, 3, 8, 4, 0, 144, 159, 3, 76, 38, 0, 145, 159, 3, 70, 35, 0, 146, 159, 3, 52, 26, 0, 147, 159, 3, 72, 36, 0, 148, 159, 3, 78, 39, 0, 149, 159, 3, 80, 40, 0, 150, 159, 3, 84, 42, 0, 151, 159, 3, 86, 43, 0, 152, 159, 3, 112, 56, 0, 153, 159, 3, 88, 44, 0, 154, 155, 4, 3, 2, 0, 155, 159, 3, 118, 59, 0, 156, 157, 4, 3, 3, 0, 157, 159, 3, 116, 58, 0, 158, 142, 1, 0, 0, 0, 158, 143, 1, 0, 0, 0, 158, 144, 1, 0, 0, 0, 158, 145, 1, 0, 0, 0, 158, 146, 1, 0, 0, 0, 158, 147, 1, 0, 0, 0, 158, 148, 1, 0, 0, 0, 158, 149, 1, 0, 0, 0, 158, 150, 1, 0, 0, 0, 158, 151, 1, 0, 0, 0, 158, 152, 1, 0, 0, 0, 158, 153, 1, 0, 0, 0, 158, 154, 1, 0, 0, 0, 158, 156, 1, 0, 0, 0, 159, 7, 1, 0, 0, 0, 160, 161, 5, 16, 0, 0, 161, 162, 3, 10, 5, 0, 162, 9, 1, 0, 0, 0, 163, 164, 6, 5, -1, 0, 164, 165, 5, 43, 0, 0, 165, 194, 3, 10, 5, 8, 166, 194, 3, 16, 8, 0, 167, 194, 3, 12, 6, 0, 168, 170, 3, 16, 8, 0, 169, 171, 5, 43, 0, 0, 170, 169, 1, 0, 0, 0, 170, 171, 1, 0, 0, 0, 171, 172, 1, 0, 0, 0, 172, 173, 5, 38, 0, 0, 173, 174, 5, 42, 0, 0, 174, 179, 3, 16, 8, 0, 175, 176, 5, 33, 0, 0, 176, 178, 3, 16, 8, 0, 177, 175, 1, 0, 0, 0, 178, 181, 1, 0, 0, 0, 179, 177, 1, 0, 0, 0, 179, 180, 1, 0, 0, 0, 180, 182, 1, 0, 0, 0, 181, 179, 1, 0, 0, 0, 182, 183, 5, 49, 0, 0, 183, 194, 1, 0, 0, 0, 184, 185, 3, 16, 8, 0, 185, 187, 5, 39, 0, 0, 186, 188, 5, 43, 0, 0, 187, 186, 1, 0, 0, 0, 187, 188, 1, 0, 0, 0, 188, 189, 1, 0, 0, 0, 189, 190, 5, 44, 0, 0, 190, 194, 1, 0, 0, 0, 191, 192, 4, 5, 4, 0, 192, 194, 3, 14, 7, 0, 193, 163, 1, 0, 0, 0, 193, 166, 1, 0, 0, 0, 193, 167, 1, 0, 0, 0, 193, 168, 1, 0, 0, 0, 193, 184, 1, 0, 0, 0, 193, 191, 1, 0, 0, 0, 194, 203, 1, 0, 0, 0, 195, 196, 10, 5, 0, 0, 196, 197, 5, 29, 0, 0, 197, 202, 3, 10, 5, 6, 198, 199, 10, 4, 0, 0, 199, 200, 5, 46, 0, 0, 200, 202, 3, 10, 5, 5, 201, 195, 1, 0, 0, 0, 201, 198, 1, 0, 0, 0, 202, 205, 1, 0, 0, 0, 203, 201, 1, 0, 0, 0, 203, 204, 1, 0, 0, 0, 204, 11, 1, 0, 0, 0, 205, 203, 1, 0, 0, 0, 206, 208, 3, 16, 8, 0, 207, 209, 5, 43, 0, 0, 208, 207, 1, 0, 0, 0, 208, 209, 1, 0, 0, 0, 209, 210, 1, 0, 0, 0, 210, 211, 5, 41, 0, 0, 211, 212, 3, 102, 51, 0, 212, 221, 1, 0, 0, 0, 213, 215, 3, 16, 8, 0, 214, 216, 5, 43, 0, 0, 215, 214, 1, 0, 0, 0, 215, 216, 1, 0, 0, 0, 216, 217, 1, 0, 0, 0, 217, 218, 5, 48, 0, 0, 218, 219, 3, 102, 51, 0, 219, 221, 1, 0, 0, 0, 220, 206, 1, 0, 0, 0, 220, 213, 1, 0, 0, 0, 221, 13, 1, 0, 0, 0, 222, 223, 3, 16, 8, 0, 223, 224, 5, 63, 0, 0, 224, 225, 3, 102, 51, 0, 225, 15, 1, 0, 0, 0, 226, 232, 3, 18, 9, 0, 227, 228, 3, 18, 9, 0, 228, 229, 3, 104, 52, 0, 229, 230, 3, 18, 9, 0, 230, 232, 1, 0, 0, 0, 231, 226, 1, 0, 0, 0, 231, 227, 1, 0, 0, 0, 232, 17, 1, 0, 0, 0, 233, 234, 6, 9, -1, 0, 234, 238, 3, 20, 10, 0, 235, 236, 7, 0, 0, 0, 236, 238, 3, 18, 9, 3, 237, 233, 1, 0, 0, 0, 237, 235, 1, 0, 0, 0, 238, 247, 1, 0, 0, 0, 239, 240, 10, 2, 0, 0, 240, 241, 7, 1, 0, 0, 241, 246, 3, 18, 9, 3, 242, 243, 10, 1, 0, 0, 243, 244, 7, 0, 0, 0, 244, 246, 3, 18, 9, 2, 245, 239, 1, 0, 0, 0, 245, 242, 1, 0, 0, 0, 246, 249, 1, 0, 0, 0, 247, 245, 1, 0, 0, 0, 247, 248, 1, 0, 0, 0, 248, 19, 1, 0, 0, 0, 249, 247, 1, 0, 0, 0, 250, 251, 6, 10, -1, 0, 251, 259, 3, 64, 32, 0, 252, 259, 3, 54, 27, 0, 253, 259, 3, 22, 11, 0, 254, 255, 5, 42, 0, 0, 255, 256, 3, 10, 5, 0, 256, 257, 5, 49, 0, 0, 257, 259, 1, 0, 0, 0, 258, 250, 1, 0, 0, 0, 258, 252, 1, 0, 0, 0, 258, 253, 1, 0, 0, 0, 258, 254, 1, 0, 0, 0, 259, 265, 1, 0, 0, 0, 260, 261, 10, 1, 0, 0, 261, 262, 5, 32, 0, 0, 262, 264, 3, 26, 13, 0, 263, 260, 1, 0, 0, 0, 264, 267, 1, 0, 0, 0, 265, 263, 1, 0, 0, 0, 265, 266, 1, 0, 0, 0, 266, 21, 1, 0, 0, 0, 267, 265, 1, 0, 0, 0, 268, 269, 3, 24, 12, 0, 269, 279, 5, 42, 0, 0, 270, 280, 5, 60, 0, 0, 271, 276, 3, 10, 5, 0, 272, 273, 5, 33, 0, 0, 273, 275, 3, 10, 5, 0, 274, 272, 1, 0, 0, 0, 275, 278, 1, 0, 0, 0, 276, 274, 1, 0, 0, 0, 276, 277, 1, 0, 0, 0, 277, 280, 1, 0, 0, 0, 278, 276, 1, 0, 0, 0, 279, 270, 1, 0, 0, 0, 279, 271, 1, 0, 0, 0, 279, 280, 1, 0, 0, 0, 280, 281, 1, 0, 0, 0, 281, 282, 5, 49, 0, 0, 282, 23, 1, 0, 0, 0, 283, 284, 4, 12, 10, 0, 284, 287, 5, 63, 0, 0, 285, 287, 3, 68, 34, 0, 286, 283, 1, 0, 0, 0, 286, 285, 1, 0, 0, 0, 287, 25, 1, 0, 0, 0, 288, 289, 3, 60, 30, 0, 289, 27, 1, 0, 0, 0, 290, 291, 5, 12, 0, 0, 291, 292, 3, 30, 15, 0, 292, 29, 1, 0, 0, 0, 293, 298, 3, 32, 16, 0, 294, 295, 5, 33, 0, 0, 295, 297, 3, 32, 16, 0, 296, 294, 1, 0, 0, 0, 297, 300, 1, 0, 0, 0, 298, 296, 1, 0, 0, 0, 298, 299, 1, 0, 0, 0, 299, 31, 1, 0, 0, 0, 300, 298, 1, 0, 0, 0, 301, 307, 3, 10, 5, 0, 302, 303, 3, 54, 27, 0, 303, 304, 5, 31, 0, 0, 304, 305, 3, 10, 5, 0, 305, 307, 1, 0, 0, 0, 306, 301, 1, 0, 0, 0, 306, 302, 1, 0, 0, 0, 307, 33, 1, 0, 0, 0, 308, 309, 5, 6, 0, 0, 309, 314, 3, 36, 18, 0, 310, 311, 5, 33, 0, 0, 311, 313, 3, 36, 18, 0, 312, 310, 1, 0, 0, 0, 313, 316, 1, 0, 0, 0, 314, 312, 1, 0, 0, 0, 314, 315, 1, 0, 0, 0, 315, 318, 1, 0, 0, 0, 316, 314, 1, 0, 0, 0, 317, 319, 3, 42, 21, 0, 318, 317, 1, 0, 0, 0, 318, 319, 1, 0, 0, 0, 319, 35, 1, 0, 0, 0, 320, 321, 3, 38, 19, 0, 321, 322, 5, 104, 0, 0, 322, 323, 3, 40, 20, 0, 323, 326, 1, 0, 0, 0, 324, 326, 3, 40, 20, 0, 325, 320, 1, 0, 0, 0, 325, 324, 1, 0, 0, 0, 326, 37, 1, 0, 0, 0, 327, 328, 5, 76, 0, 0, 328, 39, 1, 0, 0, 0, 329, 330, 7, 2, 0, 0, 330, 41, 1, 0, 0, 0, 331, 334, 3, 44, 22, 0, 332, 334, 3, 46, 23, 0, 333, 331, 1, 0, 0, 0, 333, 332, 1, 0, 0, 0, 334, 43, 1, 0, 0, 0, 335, 336, 5, 75, 0, 0, 336, 341, 5, 76, 0, 0, 337, 338, 5, 33, 0, 0, 338, 340, 5, 76, 0, 0, 339, 337, 1, 0, 0, 0, 340, 343, 1, 0, 0, 0, 341, 339, 1, 0, 0, 0, 341, 342, 1, 0, 0, 0, 342, 45, 1, 0, 0, 0, 343, 341, 1, 0, 0, 0, 344, 345, 5, 65, 0, 0, 345, 346, 3, 44, 22, 0, 346, 347, 5, 66, 0, 0, 347, 47, 1, 0, 0, 0, 348, 349, 5, 19, 0, 0, 349, 354, 3, 36, 18, 0, 350, 351, 5, 33, 0, 0, 351, 353, 3, 36, 18, 0, 352, 350, 1, 0, 0, 0, 353, 356, 1, 0, 0, 0, 354, 352, 1, 0, 0, 0, 354, 355, 1, 0, 0, 0, 355, 358, 1, 0, 0, 0, 356, 354, 1, 0, 0, 0, 357, 359, 3, 30, 15, 0, 358, 357, 1, 0, 0, 0, 358, 359, 1, 0, 0, 0, 359, 362, 1, 0, 0, 0, 360, 361, 5, 28, 0, 0, 361, 363, 3, 30, 15, 0, 362, 360, 1, 0, 0, 0, 362, 363, 1, 0, 0, 0, 363, 49, 1, 0, 0, 0, 364, 365, 5, 4, 0, 0, 365, 366, 3, 30, 15, 0, 366, 51, 1, 0, 0, 0, 367, 369, 5, 15, 0, 0, 368, 370, 3, 30, 15, 0, 369, 368, 1, 0, 0, 0, 369, 370, 1, 0, 0, 0, 370, 373, 1, 0, 0, 0, 371, 372, 5, 28, 0, 0, 372, 374, 3, 30, 15, 0, 373, 371, 1, 0, 0, 0, 373, 374, 1, 0, 0, 0, 374, 53, 1, 0, 0, 0, 375, 380, 3, 68, 34, 0, 376, 377, 5, 35, 0, 0, 377, 379, 3, 68, 34, 0, 378, 376, 1, 0, 0, 0, 379, 382, 1, 0, 0, 0, 380, 378, 1, 0, 0, 0, 380, 381, 1, 0, 0, 0, 381, 55, 1, 0, 0, 0, 382, 380, 1, 0, 0, 0, 383, 388, 3, 62, 31, 0, 384, 385, 5, 35, 0, 0, 385, 387, 3, 62, 31, 0, 386, 384, 1, 0, 0, 0, 387, 390, 1, 0, 0, 0, 388, 386, 1, 0, 0, 0, 388, 389, 1, 0, 0, 0, 389, 57, 1, 0, 0, 0, 390, 388, 1, 0, 0, 0, 391, 396, 3, 56, 28, 0, 392, 393, 5, 33, 0, 0, 393, 395, 3, 56, 28, 0, 394, 392, 1, 0, 0, 0, 395, 398, 1, 0, 0, 0, 396, 394, 1, 0, 0, 0, 396, 397, 1, 0, 0, 0, 397, 59, 1, 0, 0, 0, 398, 396, 1, 0, 0, 0, 399, 400, 7, 3, 0, 0, 400, 61, 1, 0, 0, 0, 401, 404, 5, 80, 0, 0, 402, 404, 3, 66, 33, 0, 403, 401, 1, 0, 0, 0, 403, 402, 1, 0, 0, 0, 404, 63, 1, 0, 0, 0, 405, 448, 5, 44, 0, 0, 406, 407, 3, 100, 50, 0, 407, 408, 5, 67, 0, 0, 408, 448, 1, 0, 0, 0, 409, 448, 3, 98, 49, 0, 410, 448, 3, 100, 50, 0, 411, 448, 3, 94, 47, 0, 412, 448, 3, 66, 33, 0, 413, 448, 3, 102, 51, 0, 414, 415, 5, 65, 0, 0, 415, 420, 3, 96, 48, 0, 416, 417, 5, 33, 0, 0, 417, 419, 3, 96, 48, 0, 418, 416, 1, 0, 0, 0, 419, 422, 1, 0, 0, 0, 420, 418, 1, 0, 0, 0, 420, 421, 1, 0, 0, 0, 421, 423, 1, 0, 0, 0, 422, 420, 1, 0, 0, 0, 423, 424, 5, 66, 0, 0, 424, 448, 1, 0, 0, 0, 425, 426, 5, 65, 0, 0, 426, 431, 3, 94, 47, 0, 427, 428, 5, 33, 0, 0, 428, 430, 3, 94, 47, 0, 429, 427, 1, 0, 0, 0, 430, 433, 1, 0, 0, 0, 431, 429, 1, 0, 0, 0, 431, 432, 1, 0, 0, 0, 432, 434, 1, 0, 0, 0, 433, 431, 1, 0, 0, 0, 434, 435, 5, 66, 0, 0, 435, 448, 1, 0, 0, 0, 436, 437, 5, 65, 0, 0, 437, 442, 3, 102, 51, 0, 438, 439, 5, 33, 0, 0, 439, 441, 3, 102, 51, 0, 440, 438, 1, 0, 0, 0, 441, 444, 1, 0, 0, 0, 442, 440, 1, 0, 0, 0, 442, 443, 1, 0, 0, 0, 443, 445, 1, 0, 0, 0, 444, 442, 1, 0, 0, 0, 445, 446, 5, 66, 0, 0, 446, 448, 1, 0, 0, 0, 447, 405, 1, 0, 0, 0, 447, 406, 1, 0, 0, 0, 447, 409, 1, 0, 0, 0, 447, 410, 1, 0, 0, 0, 447, 411, 1, 0, 0, 0, 447, 412, 1, 0, 0, 0, 447, 413, 1, 0, 0, 0, 447, 414, 1, 0, 0, 0, 447, 425, 1, 0, 0, 0, 447, 436, 1, 0, 0, 0, 448, 65, 1, 0, 0, 0, 449, 452, 5, 47, 0, 0, 450, 452, 5, 64, 0, 0, 451, 449, 1, 0, 0, 0, 451, 450, 1, 0, 0, 0, 452, 67, 1, 0, 0, 0, 453, 456, 3, 60, 30, 0, 454, 456, 3, 66, 33, 0, 455, 453, 1, 0, 0, 0, 455, 454, 1, 0, 0, 0, 456, 69, 1, 0, 0, 0, 457, 458, 5, 9, 0, 0, 458, 459, 5, 26, 0, 0, 459, 71, 1, 0, 0, 0, 460, 461, 5, 14, 0, 0, 461, 466, 3, 74, 37, 0, 462, 463, 5, 33, 0, 0, 463, 465, 3, 74, 37, 0, 464, 462, 1, 0, 0, 0, 465, 468, 1, 0, 0, 0, 466, 464, 1, 0, 0, 0, 466, 467, 1, 0, 0, 0, 467, 73, 1, 0, 0, 0, 468, 466, 1, 0, 0, 0, 469, 471, 3, 10, 5, 0, 470, 472, 7, 4, 0, 0, 471, 470, 1, 0, 0, 0, 471, 472, 1, 0, 0, 0, 472, 475, 1, 0, 0, 0, 473, 474, 5, 45, 0, 0, 474, 476, 7, 5, 0, 0, 475, 473, 1, 0, 0, 0, 475, 476, 1, 0, 0, 0, 476, 75, 1, 0, 0, 0, 477, 478, 5, 8, 0, 0, 478, 479, 3, 58, 29, 0, 479, 77, 1, 0, 0, 0, 480, 481, 5, 2, 0, 0, 481, 482, 3, 58, 29, 0, 482, 79, 1, 0, 0, 0, 483, 484, 5, 11, 0, 0, 484, 489, 3, 82, 41, 0, 485, 486, 5, 33, 0, 0, 486, 488, 3, 82, 41, 0, 487, 485, 1, 0, 0, 0, 488, 491, 1, 0, 0, 0, 489, 487, 1, 0, 0, 0, 489, 490, 1, 0, 0, 0, 490, 81, 1, 0, 0, 0, 491, 489, 1, 0, 0, 0, 492, 493, 3, 56, 28, 0, 493, 494, 5, 84, 0, 0, 494, 495, 3, 56, 28, 0, 495, 83, 1, 0, 0, 0, 496, 497, 5, 1, 0, 0, 497, 498, 3, 20, 10, 0, 498, 500, 3, 102, 51, 0, 499, 501, 3, 90, 45, 0, 500, 499, 1, 0, 0, 0, 500, 501, 1, 0, 0, 0, 501, 85, 1, 0, 0, 0, 502, 503, 5, 7, 0, 0, 503, 504, 3, 20, 10, 0, 504, 505, 3, 102, 51, 0, 505, 87, 1, 0, 0, 0, 506, 507, 5, 10, 0, 0, 507, 508, 3, 54, 27, 0, 508, 89, 1, 0, 0, 0, 509, 514, 3, 92, 46, 0, 510, 511, 5, 33, 0, 0, 511, 513, 3, 92, 46, 0, 512, 510, 1, 0, 0, 0, 513, 516, 1, 0, 0, 0, 514, 512, 1, 0, 0, 0, 514, 515, 1, 0, 0, 0, 515, 91, 1, 0, 0, 0, 516, 514, 1, 0, 0, 0, 517, 518, 3, 60, 30, 0, 518, 519, 5, 31, 0, 0, 519, 520, 3, 64, 32, 0, 520, 93, 1, 0, 0, 0, 521, 522, 7, 6, 0, 0, 522, 95, 1, 0, 0, 0, 523, 526, 3, 98, 49, 0, 524, 526, 3, 100, 50, 0, 525, 523, 1, 0, 0, 0, 525, 524, 1, 0, 0, 0, 526, 97, 1, 0, 0, 0, 527, 529, 7, 0, 0, 0, 528, 527, 1, 0, 0, 0, 528, 529, 1, 0, 0, 0, 529, 530, 1, 0, 0, 0, 530, 531, 5, 27, 0, 0, 531, 99, 1, 0, 0, 0, 532, 534, 7, 0, 0, 0, 533, 532, 1, 0, 0, 0, 533, 534, 1, 0, 0, 0, 534, 535, 1, 0, 0, 0, 535, 536, 5, 26, 0, 0, 536, 101, 1, 0, 0, 0, 537, 538, 5, 25, 0, 0, 538, 103, 1, 0, 0, 0, 539, 540, 7, 7, 0, 0, 540, 105, 1, 0, 0, 0, 541, 542, 5, 5, 0, 0, 542, 543, 3, 108, 54, 0, 543, 107, 1, 0, 0, 0, 544, 545, 5, 65, 0, 0, 545, 546, 3, 2, 1, 0, 546, 547, 5, 66, 0, 0, 547, 109, 1, 0, 0, 0, 548, 549, 5, 13, 0, 0, 549, 550, 5, 100, 0, 0, 550, 111, 1, 0, 0, 0, 551, 552, 5, 3, 0, 0, 552, 555, 5, 90, 0, 0, 553, 554, 5, 88, 0, 0, 554, 556, 3, 56, 28, 0, 555, 553, 1, 0, 0, 0, 555, 556, 1, 0, 0, 0, 556, 566, 1, 0, 0, 0, 557, 558, 5, 89, 0, 0, 558, 563, 3, 114, 57, 0, 559, 560, 5, 33, 0, 0, 560, 562, 3, 114, 57, 0, 561, 559, 1, 0, 0, 0, 562, 565, 1, 0, 0, 0, 563, 561, 1, 0, 0, 0, 563, 564, 1, 0, 0, 0, 564, 567, 1, 0, 0, 0, 565, 563, 1, 0, 0, 0, 566, 557, 1, 0, 0, 0, 566, 567, 1, 0, 0, 0, 567, 113, 1, 0, 0, 0, 568, 569, 3, 56, 28, 0, 569, 570, 5, 31, 0, 0, 570, 572, 1, 0, 0, 0, 571, 568, 1, 0, 0, 0, 571, 572, 1, 0, 0, 0, 572, 573, 1, 0, 0, 0, 573, 574, 3, 56, 28, 0, 574, 115, 1, 0, 0, 0, 575, 576, 5, 18, 0, 0, 576, 577, 3, 36, 18, 0, 577, 578, 5, 88, 0, 0, 578, 579, 3, 58, 29, 0, 579, 117, 1, 0, 0, 0, 580, 581, 5, 17, 0, 0, 581, 584, 3, 30, 15, 0, 582, 583, 5, 28, 0, 0, 583, 585, 3, 30, 15, 0, 584, 582, 1, 0, 0, 0, 584, 585, 1, 0, 0, 0, 585, 119, 1, 0, 0, 0, 57, 131, 140, 158, 170, 179, 187, 193, 201, 203, 208, 215, 220, 231, 237, 245, 247, 258, 265, 276, 279, 286, 298, 306, 314, 318, 325, 333, 341, 354, 358, 362, 369, 373, 380, 388, 396, 403, 420, 431, 442, 447, 451, 455, 466, 471, 475, 489, 500, 514, 525, 528, 533, 555, 563, 566, 571, 584] \ No newline at end of file +[4, 1, 120, 586, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 130, 8, 1, 10, 1, 12, 1, 133, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 141, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 159, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 171, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 178, 8, 5, 10, 5, 12, 5, 181, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 188, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 194, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 202, 8, 5, 10, 5, 12, 5, 205, 9, 5, 1, 6, 1, 6, 3, 6, 209, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 216, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 221, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 232, 8, 8, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 238, 8, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 5, 9, 246, 8, 9, 10, 9, 12, 9, 249, 9, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 3, 10, 259, 8, 10, 1, 10, 1, 10, 1, 10, 5, 10, 264, 8, 10, 10, 10, 12, 10, 267, 9, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 5, 11, 275, 8, 11, 10, 11, 12, 11, 278, 9, 11, 3, 11, 280, 8, 11, 1, 11, 1, 11, 1, 12, 1, 12, 3, 12, 286, 8, 12, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 5, 15, 296, 8, 15, 10, 15, 12, 15, 299, 9, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 3, 16, 306, 8, 16, 1, 17, 1, 17, 1, 17, 1, 17, 5, 17, 312, 8, 17, 10, 17, 12, 17, 315, 9, 17, 1, 17, 3, 17, 318, 8, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 3, 18, 325, 8, 18, 1, 19, 1, 19, 1, 20, 1, 20, 1, 21, 1, 21, 3, 21, 333, 8, 21, 1, 22, 1, 22, 1, 22, 1, 22, 5, 22, 339, 8, 22, 10, 22, 12, 22, 342, 9, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 5, 24, 352, 8, 24, 10, 24, 12, 24, 355, 9, 24, 1, 24, 3, 24, 358, 8, 24, 1, 24, 1, 24, 3, 24, 362, 8, 24, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 3, 26, 369, 8, 26, 1, 26, 1, 26, 3, 26, 373, 8, 26, 1, 27, 1, 27, 1, 27, 5, 27, 378, 8, 27, 10, 27, 12, 27, 381, 9, 27, 1, 28, 1, 28, 1, 28, 5, 28, 386, 8, 28, 10, 28, 12, 28, 389, 9, 28, 1, 29, 1, 29, 1, 29, 5, 29, 394, 8, 29, 10, 29, 12, 29, 397, 9, 29, 1, 30, 1, 30, 1, 31, 1, 31, 3, 31, 403, 8, 31, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 5, 32, 418, 8, 32, 10, 32, 12, 32, 421, 9, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 5, 32, 429, 8, 32, 10, 32, 12, 32, 432, 9, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 5, 32, 440, 8, 32, 10, 32, 12, 32, 443, 9, 32, 1, 32, 1, 32, 3, 32, 447, 8, 32, 1, 33, 1, 33, 3, 33, 451, 8, 33, 1, 34, 1, 34, 3, 34, 455, 8, 34, 1, 35, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 1, 36, 5, 36, 464, 8, 36, 10, 36, 12, 36, 467, 9, 36, 1, 37, 1, 37, 3, 37, 471, 8, 37, 1, 37, 1, 37, 3, 37, 475, 8, 37, 1, 38, 1, 38, 1, 38, 1, 39, 1, 39, 1, 39, 1, 40, 1, 40, 1, 40, 1, 40, 5, 40, 487, 8, 40, 10, 40, 12, 40, 490, 9, 40, 1, 41, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 3, 42, 500, 8, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 45, 1, 45, 1, 45, 5, 45, 512, 8, 45, 10, 45, 12, 45, 515, 9, 45, 1, 46, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 48, 1, 48, 3, 48, 525, 8, 48, 1, 49, 3, 49, 528, 8, 49, 1, 49, 1, 49, 1, 50, 3, 50, 533, 8, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 3, 56, 555, 8, 56, 1, 56, 1, 56, 1, 56, 1, 56, 5, 56, 561, 8, 56, 10, 56, 12, 56, 564, 9, 56, 3, 56, 566, 8, 56, 1, 57, 1, 57, 1, 57, 3, 57, 571, 8, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 59, 3, 59, 584, 8, 59, 1, 59, 0, 4, 2, 10, 18, 20, 60, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 118, 0, 8, 1, 0, 58, 59, 1, 0, 60, 62, 2, 0, 25, 25, 76, 76, 1, 0, 67, 68, 2, 0, 30, 30, 34, 34, 2, 0, 37, 37, 40, 40, 2, 0, 36, 36, 50, 50, 2, 0, 51, 51, 53, 57, 612, 0, 120, 1, 0, 0, 0, 2, 123, 1, 0, 0, 0, 4, 140, 1, 0, 0, 0, 6, 158, 1, 0, 0, 0, 8, 160, 1, 0, 0, 0, 10, 193, 1, 0, 0, 0, 12, 220, 1, 0, 0, 0, 14, 222, 1, 0, 0, 0, 16, 231, 1, 0, 0, 0, 18, 237, 1, 0, 0, 0, 20, 258, 1, 0, 0, 0, 22, 268, 1, 0, 0, 0, 24, 285, 1, 0, 0, 0, 26, 287, 1, 0, 0, 0, 28, 289, 1, 0, 0, 0, 30, 292, 1, 0, 0, 0, 32, 305, 1, 0, 0, 0, 34, 307, 1, 0, 0, 0, 36, 324, 1, 0, 0, 0, 38, 326, 1, 0, 0, 0, 40, 328, 1, 0, 0, 0, 42, 332, 1, 0, 0, 0, 44, 334, 1, 0, 0, 0, 46, 343, 1, 0, 0, 0, 48, 347, 1, 0, 0, 0, 50, 363, 1, 0, 0, 0, 52, 366, 1, 0, 0, 0, 54, 374, 1, 0, 0, 0, 56, 382, 1, 0, 0, 0, 58, 390, 1, 0, 0, 0, 60, 398, 1, 0, 0, 0, 62, 402, 1, 0, 0, 0, 64, 446, 1, 0, 0, 0, 66, 450, 1, 0, 0, 0, 68, 454, 1, 0, 0, 0, 70, 456, 1, 0, 0, 0, 72, 459, 1, 0, 0, 0, 74, 468, 1, 0, 0, 0, 76, 476, 1, 0, 0, 0, 78, 479, 1, 0, 0, 0, 80, 482, 1, 0, 0, 0, 82, 491, 1, 0, 0, 0, 84, 495, 1, 0, 0, 0, 86, 501, 1, 0, 0, 0, 88, 505, 1, 0, 0, 0, 90, 508, 1, 0, 0, 0, 92, 516, 1, 0, 0, 0, 94, 520, 1, 0, 0, 0, 96, 524, 1, 0, 0, 0, 98, 527, 1, 0, 0, 0, 100, 532, 1, 0, 0, 0, 102, 536, 1, 0, 0, 0, 104, 538, 1, 0, 0, 0, 106, 540, 1, 0, 0, 0, 108, 543, 1, 0, 0, 0, 110, 547, 1, 0, 0, 0, 112, 550, 1, 0, 0, 0, 114, 570, 1, 0, 0, 0, 116, 574, 1, 0, 0, 0, 118, 579, 1, 0, 0, 0, 120, 121, 3, 2, 1, 0, 121, 122, 5, 0, 0, 1, 122, 1, 1, 0, 0, 0, 123, 124, 6, 1, -1, 0, 124, 125, 3, 4, 2, 0, 125, 131, 1, 0, 0, 0, 126, 127, 10, 1, 0, 0, 127, 128, 5, 24, 0, 0, 128, 130, 3, 6, 3, 0, 129, 126, 1, 0, 0, 0, 130, 133, 1, 0, 0, 0, 131, 129, 1, 0, 0, 0, 131, 132, 1, 0, 0, 0, 132, 3, 1, 0, 0, 0, 133, 131, 1, 0, 0, 0, 134, 141, 3, 106, 53, 0, 135, 141, 3, 34, 17, 0, 136, 141, 3, 28, 14, 0, 137, 141, 3, 110, 55, 0, 138, 139, 4, 2, 1, 0, 139, 141, 3, 48, 24, 0, 140, 134, 1, 0, 0, 0, 140, 135, 1, 0, 0, 0, 140, 136, 1, 0, 0, 0, 140, 137, 1, 0, 0, 0, 140, 138, 1, 0, 0, 0, 141, 5, 1, 0, 0, 0, 142, 159, 3, 50, 25, 0, 143, 159, 3, 8, 4, 0, 144, 159, 3, 76, 38, 0, 145, 159, 3, 70, 35, 0, 146, 159, 3, 52, 26, 0, 147, 159, 3, 72, 36, 0, 148, 159, 3, 78, 39, 0, 149, 159, 3, 80, 40, 0, 150, 159, 3, 84, 42, 0, 151, 159, 3, 86, 43, 0, 152, 159, 3, 112, 56, 0, 153, 159, 3, 88, 44, 0, 154, 155, 4, 3, 2, 0, 155, 159, 3, 118, 59, 0, 156, 157, 4, 3, 3, 0, 157, 159, 3, 116, 58, 0, 158, 142, 1, 0, 0, 0, 158, 143, 1, 0, 0, 0, 158, 144, 1, 0, 0, 0, 158, 145, 1, 0, 0, 0, 158, 146, 1, 0, 0, 0, 158, 147, 1, 0, 0, 0, 158, 148, 1, 0, 0, 0, 158, 149, 1, 0, 0, 0, 158, 150, 1, 0, 0, 0, 158, 151, 1, 0, 0, 0, 158, 152, 1, 0, 0, 0, 158, 153, 1, 0, 0, 0, 158, 154, 1, 0, 0, 0, 158, 156, 1, 0, 0, 0, 159, 7, 1, 0, 0, 0, 160, 161, 5, 16, 0, 0, 161, 162, 3, 10, 5, 0, 162, 9, 1, 0, 0, 0, 163, 164, 6, 5, -1, 0, 164, 165, 5, 43, 0, 0, 165, 194, 3, 10, 5, 8, 166, 194, 3, 16, 8, 0, 167, 194, 3, 12, 6, 0, 168, 170, 3, 16, 8, 0, 169, 171, 5, 43, 0, 0, 170, 169, 1, 0, 0, 0, 170, 171, 1, 0, 0, 0, 171, 172, 1, 0, 0, 0, 172, 173, 5, 38, 0, 0, 173, 174, 5, 42, 0, 0, 174, 179, 3, 16, 8, 0, 175, 176, 5, 33, 0, 0, 176, 178, 3, 16, 8, 0, 177, 175, 1, 0, 0, 0, 178, 181, 1, 0, 0, 0, 179, 177, 1, 0, 0, 0, 179, 180, 1, 0, 0, 0, 180, 182, 1, 0, 0, 0, 181, 179, 1, 0, 0, 0, 182, 183, 5, 49, 0, 0, 183, 194, 1, 0, 0, 0, 184, 185, 3, 16, 8, 0, 185, 187, 5, 39, 0, 0, 186, 188, 5, 43, 0, 0, 187, 186, 1, 0, 0, 0, 187, 188, 1, 0, 0, 0, 188, 189, 1, 0, 0, 0, 189, 190, 5, 44, 0, 0, 190, 194, 1, 0, 0, 0, 191, 192, 4, 5, 4, 0, 192, 194, 3, 14, 7, 0, 193, 163, 1, 0, 0, 0, 193, 166, 1, 0, 0, 0, 193, 167, 1, 0, 0, 0, 193, 168, 1, 0, 0, 0, 193, 184, 1, 0, 0, 0, 193, 191, 1, 0, 0, 0, 194, 203, 1, 0, 0, 0, 195, 196, 10, 5, 0, 0, 196, 197, 5, 29, 0, 0, 197, 202, 3, 10, 5, 6, 198, 199, 10, 4, 0, 0, 199, 200, 5, 46, 0, 0, 200, 202, 3, 10, 5, 5, 201, 195, 1, 0, 0, 0, 201, 198, 1, 0, 0, 0, 202, 205, 1, 0, 0, 0, 203, 201, 1, 0, 0, 0, 203, 204, 1, 0, 0, 0, 204, 11, 1, 0, 0, 0, 205, 203, 1, 0, 0, 0, 206, 208, 3, 16, 8, 0, 207, 209, 5, 43, 0, 0, 208, 207, 1, 0, 0, 0, 208, 209, 1, 0, 0, 0, 209, 210, 1, 0, 0, 0, 210, 211, 5, 41, 0, 0, 211, 212, 3, 102, 51, 0, 212, 221, 1, 0, 0, 0, 213, 215, 3, 16, 8, 0, 214, 216, 5, 43, 0, 0, 215, 214, 1, 0, 0, 0, 215, 216, 1, 0, 0, 0, 216, 217, 1, 0, 0, 0, 217, 218, 5, 48, 0, 0, 218, 219, 3, 102, 51, 0, 219, 221, 1, 0, 0, 0, 220, 206, 1, 0, 0, 0, 220, 213, 1, 0, 0, 0, 221, 13, 1, 0, 0, 0, 222, 223, 3, 16, 8, 0, 223, 224, 5, 63, 0, 0, 224, 225, 3, 102, 51, 0, 225, 15, 1, 0, 0, 0, 226, 232, 3, 18, 9, 0, 227, 228, 3, 18, 9, 0, 228, 229, 3, 104, 52, 0, 229, 230, 3, 18, 9, 0, 230, 232, 1, 0, 0, 0, 231, 226, 1, 0, 0, 0, 231, 227, 1, 0, 0, 0, 232, 17, 1, 0, 0, 0, 233, 234, 6, 9, -1, 0, 234, 238, 3, 20, 10, 0, 235, 236, 7, 0, 0, 0, 236, 238, 3, 18, 9, 3, 237, 233, 1, 0, 0, 0, 237, 235, 1, 0, 0, 0, 238, 247, 1, 0, 0, 0, 239, 240, 10, 2, 0, 0, 240, 241, 7, 1, 0, 0, 241, 246, 3, 18, 9, 3, 242, 243, 10, 1, 0, 0, 243, 244, 7, 0, 0, 0, 244, 246, 3, 18, 9, 2, 245, 239, 1, 0, 0, 0, 245, 242, 1, 0, 0, 0, 246, 249, 1, 0, 0, 0, 247, 245, 1, 0, 0, 0, 247, 248, 1, 0, 0, 0, 248, 19, 1, 0, 0, 0, 249, 247, 1, 0, 0, 0, 250, 251, 6, 10, -1, 0, 251, 259, 3, 64, 32, 0, 252, 259, 3, 54, 27, 0, 253, 259, 3, 22, 11, 0, 254, 255, 5, 42, 0, 0, 255, 256, 3, 10, 5, 0, 256, 257, 5, 49, 0, 0, 257, 259, 1, 0, 0, 0, 258, 250, 1, 0, 0, 0, 258, 252, 1, 0, 0, 0, 258, 253, 1, 0, 0, 0, 258, 254, 1, 0, 0, 0, 259, 265, 1, 0, 0, 0, 260, 261, 10, 1, 0, 0, 261, 262, 5, 32, 0, 0, 262, 264, 3, 26, 13, 0, 263, 260, 1, 0, 0, 0, 264, 267, 1, 0, 0, 0, 265, 263, 1, 0, 0, 0, 265, 266, 1, 0, 0, 0, 266, 21, 1, 0, 0, 0, 267, 265, 1, 0, 0, 0, 268, 269, 3, 24, 12, 0, 269, 279, 5, 42, 0, 0, 270, 280, 5, 60, 0, 0, 271, 276, 3, 10, 5, 0, 272, 273, 5, 33, 0, 0, 273, 275, 3, 10, 5, 0, 274, 272, 1, 0, 0, 0, 275, 278, 1, 0, 0, 0, 276, 274, 1, 0, 0, 0, 276, 277, 1, 0, 0, 0, 277, 280, 1, 0, 0, 0, 278, 276, 1, 0, 0, 0, 279, 270, 1, 0, 0, 0, 279, 271, 1, 0, 0, 0, 279, 280, 1, 0, 0, 0, 280, 281, 1, 0, 0, 0, 281, 282, 5, 49, 0, 0, 282, 23, 1, 0, 0, 0, 283, 286, 5, 63, 0, 0, 284, 286, 3, 68, 34, 0, 285, 283, 1, 0, 0, 0, 285, 284, 1, 0, 0, 0, 286, 25, 1, 0, 0, 0, 287, 288, 3, 60, 30, 0, 288, 27, 1, 0, 0, 0, 289, 290, 5, 12, 0, 0, 290, 291, 3, 30, 15, 0, 291, 29, 1, 0, 0, 0, 292, 297, 3, 32, 16, 0, 293, 294, 5, 33, 0, 0, 294, 296, 3, 32, 16, 0, 295, 293, 1, 0, 0, 0, 296, 299, 1, 0, 0, 0, 297, 295, 1, 0, 0, 0, 297, 298, 1, 0, 0, 0, 298, 31, 1, 0, 0, 0, 299, 297, 1, 0, 0, 0, 300, 306, 3, 10, 5, 0, 301, 302, 3, 54, 27, 0, 302, 303, 5, 31, 0, 0, 303, 304, 3, 10, 5, 0, 304, 306, 1, 0, 0, 0, 305, 300, 1, 0, 0, 0, 305, 301, 1, 0, 0, 0, 306, 33, 1, 0, 0, 0, 307, 308, 5, 6, 0, 0, 308, 313, 3, 36, 18, 0, 309, 310, 5, 33, 0, 0, 310, 312, 3, 36, 18, 0, 311, 309, 1, 0, 0, 0, 312, 315, 1, 0, 0, 0, 313, 311, 1, 0, 0, 0, 313, 314, 1, 0, 0, 0, 314, 317, 1, 0, 0, 0, 315, 313, 1, 0, 0, 0, 316, 318, 3, 42, 21, 0, 317, 316, 1, 0, 0, 0, 317, 318, 1, 0, 0, 0, 318, 35, 1, 0, 0, 0, 319, 320, 3, 38, 19, 0, 320, 321, 5, 104, 0, 0, 321, 322, 3, 40, 20, 0, 322, 325, 1, 0, 0, 0, 323, 325, 3, 40, 20, 0, 324, 319, 1, 0, 0, 0, 324, 323, 1, 0, 0, 0, 325, 37, 1, 0, 0, 0, 326, 327, 5, 76, 0, 0, 327, 39, 1, 0, 0, 0, 328, 329, 7, 2, 0, 0, 329, 41, 1, 0, 0, 0, 330, 333, 3, 44, 22, 0, 331, 333, 3, 46, 23, 0, 332, 330, 1, 0, 0, 0, 332, 331, 1, 0, 0, 0, 333, 43, 1, 0, 0, 0, 334, 335, 5, 75, 0, 0, 335, 340, 5, 76, 0, 0, 336, 337, 5, 33, 0, 0, 337, 339, 5, 76, 0, 0, 338, 336, 1, 0, 0, 0, 339, 342, 1, 0, 0, 0, 340, 338, 1, 0, 0, 0, 340, 341, 1, 0, 0, 0, 341, 45, 1, 0, 0, 0, 342, 340, 1, 0, 0, 0, 343, 344, 5, 65, 0, 0, 344, 345, 3, 44, 22, 0, 345, 346, 5, 66, 0, 0, 346, 47, 1, 0, 0, 0, 347, 348, 5, 19, 0, 0, 348, 353, 3, 36, 18, 0, 349, 350, 5, 33, 0, 0, 350, 352, 3, 36, 18, 0, 351, 349, 1, 0, 0, 0, 352, 355, 1, 0, 0, 0, 353, 351, 1, 0, 0, 0, 353, 354, 1, 0, 0, 0, 354, 357, 1, 0, 0, 0, 355, 353, 1, 0, 0, 0, 356, 358, 3, 30, 15, 0, 357, 356, 1, 0, 0, 0, 357, 358, 1, 0, 0, 0, 358, 361, 1, 0, 0, 0, 359, 360, 5, 28, 0, 0, 360, 362, 3, 30, 15, 0, 361, 359, 1, 0, 0, 0, 361, 362, 1, 0, 0, 0, 362, 49, 1, 0, 0, 0, 363, 364, 5, 4, 0, 0, 364, 365, 3, 30, 15, 0, 365, 51, 1, 0, 0, 0, 366, 368, 5, 15, 0, 0, 367, 369, 3, 30, 15, 0, 368, 367, 1, 0, 0, 0, 368, 369, 1, 0, 0, 0, 369, 372, 1, 0, 0, 0, 370, 371, 5, 28, 0, 0, 371, 373, 3, 30, 15, 0, 372, 370, 1, 0, 0, 0, 372, 373, 1, 0, 0, 0, 373, 53, 1, 0, 0, 0, 374, 379, 3, 68, 34, 0, 375, 376, 5, 35, 0, 0, 376, 378, 3, 68, 34, 0, 377, 375, 1, 0, 0, 0, 378, 381, 1, 0, 0, 0, 379, 377, 1, 0, 0, 0, 379, 380, 1, 0, 0, 0, 380, 55, 1, 0, 0, 0, 381, 379, 1, 0, 0, 0, 382, 387, 3, 62, 31, 0, 383, 384, 5, 35, 0, 0, 384, 386, 3, 62, 31, 0, 385, 383, 1, 0, 0, 0, 386, 389, 1, 0, 0, 0, 387, 385, 1, 0, 0, 0, 387, 388, 1, 0, 0, 0, 388, 57, 1, 0, 0, 0, 389, 387, 1, 0, 0, 0, 390, 395, 3, 56, 28, 0, 391, 392, 5, 33, 0, 0, 392, 394, 3, 56, 28, 0, 393, 391, 1, 0, 0, 0, 394, 397, 1, 0, 0, 0, 395, 393, 1, 0, 0, 0, 395, 396, 1, 0, 0, 0, 396, 59, 1, 0, 0, 0, 397, 395, 1, 0, 0, 0, 398, 399, 7, 3, 0, 0, 399, 61, 1, 0, 0, 0, 400, 403, 5, 80, 0, 0, 401, 403, 3, 66, 33, 0, 402, 400, 1, 0, 0, 0, 402, 401, 1, 0, 0, 0, 403, 63, 1, 0, 0, 0, 404, 447, 5, 44, 0, 0, 405, 406, 3, 100, 50, 0, 406, 407, 5, 67, 0, 0, 407, 447, 1, 0, 0, 0, 408, 447, 3, 98, 49, 0, 409, 447, 3, 100, 50, 0, 410, 447, 3, 94, 47, 0, 411, 447, 3, 66, 33, 0, 412, 447, 3, 102, 51, 0, 413, 414, 5, 65, 0, 0, 414, 419, 3, 96, 48, 0, 415, 416, 5, 33, 0, 0, 416, 418, 3, 96, 48, 0, 417, 415, 1, 0, 0, 0, 418, 421, 1, 0, 0, 0, 419, 417, 1, 0, 0, 0, 419, 420, 1, 0, 0, 0, 420, 422, 1, 0, 0, 0, 421, 419, 1, 0, 0, 0, 422, 423, 5, 66, 0, 0, 423, 447, 1, 0, 0, 0, 424, 425, 5, 65, 0, 0, 425, 430, 3, 94, 47, 0, 426, 427, 5, 33, 0, 0, 427, 429, 3, 94, 47, 0, 428, 426, 1, 0, 0, 0, 429, 432, 1, 0, 0, 0, 430, 428, 1, 0, 0, 0, 430, 431, 1, 0, 0, 0, 431, 433, 1, 0, 0, 0, 432, 430, 1, 0, 0, 0, 433, 434, 5, 66, 0, 0, 434, 447, 1, 0, 0, 0, 435, 436, 5, 65, 0, 0, 436, 441, 3, 102, 51, 0, 437, 438, 5, 33, 0, 0, 438, 440, 3, 102, 51, 0, 439, 437, 1, 0, 0, 0, 440, 443, 1, 0, 0, 0, 441, 439, 1, 0, 0, 0, 441, 442, 1, 0, 0, 0, 442, 444, 1, 0, 0, 0, 443, 441, 1, 0, 0, 0, 444, 445, 5, 66, 0, 0, 445, 447, 1, 0, 0, 0, 446, 404, 1, 0, 0, 0, 446, 405, 1, 0, 0, 0, 446, 408, 1, 0, 0, 0, 446, 409, 1, 0, 0, 0, 446, 410, 1, 0, 0, 0, 446, 411, 1, 0, 0, 0, 446, 412, 1, 0, 0, 0, 446, 413, 1, 0, 0, 0, 446, 424, 1, 0, 0, 0, 446, 435, 1, 0, 0, 0, 447, 65, 1, 0, 0, 0, 448, 451, 5, 47, 0, 0, 449, 451, 5, 64, 0, 0, 450, 448, 1, 0, 0, 0, 450, 449, 1, 0, 0, 0, 451, 67, 1, 0, 0, 0, 452, 455, 3, 60, 30, 0, 453, 455, 3, 66, 33, 0, 454, 452, 1, 0, 0, 0, 454, 453, 1, 0, 0, 0, 455, 69, 1, 0, 0, 0, 456, 457, 5, 9, 0, 0, 457, 458, 5, 26, 0, 0, 458, 71, 1, 0, 0, 0, 459, 460, 5, 14, 0, 0, 460, 465, 3, 74, 37, 0, 461, 462, 5, 33, 0, 0, 462, 464, 3, 74, 37, 0, 463, 461, 1, 0, 0, 0, 464, 467, 1, 0, 0, 0, 465, 463, 1, 0, 0, 0, 465, 466, 1, 0, 0, 0, 466, 73, 1, 0, 0, 0, 467, 465, 1, 0, 0, 0, 468, 470, 3, 10, 5, 0, 469, 471, 7, 4, 0, 0, 470, 469, 1, 0, 0, 0, 470, 471, 1, 0, 0, 0, 471, 474, 1, 0, 0, 0, 472, 473, 5, 45, 0, 0, 473, 475, 7, 5, 0, 0, 474, 472, 1, 0, 0, 0, 474, 475, 1, 0, 0, 0, 475, 75, 1, 0, 0, 0, 476, 477, 5, 8, 0, 0, 477, 478, 3, 58, 29, 0, 478, 77, 1, 0, 0, 0, 479, 480, 5, 2, 0, 0, 480, 481, 3, 58, 29, 0, 481, 79, 1, 0, 0, 0, 482, 483, 5, 11, 0, 0, 483, 488, 3, 82, 41, 0, 484, 485, 5, 33, 0, 0, 485, 487, 3, 82, 41, 0, 486, 484, 1, 0, 0, 0, 487, 490, 1, 0, 0, 0, 488, 486, 1, 0, 0, 0, 488, 489, 1, 0, 0, 0, 489, 81, 1, 0, 0, 0, 490, 488, 1, 0, 0, 0, 491, 492, 3, 56, 28, 0, 492, 493, 5, 84, 0, 0, 493, 494, 3, 56, 28, 0, 494, 83, 1, 0, 0, 0, 495, 496, 5, 1, 0, 0, 496, 497, 3, 20, 10, 0, 497, 499, 3, 102, 51, 0, 498, 500, 3, 90, 45, 0, 499, 498, 1, 0, 0, 0, 499, 500, 1, 0, 0, 0, 500, 85, 1, 0, 0, 0, 501, 502, 5, 7, 0, 0, 502, 503, 3, 20, 10, 0, 503, 504, 3, 102, 51, 0, 504, 87, 1, 0, 0, 0, 505, 506, 5, 10, 0, 0, 506, 507, 3, 54, 27, 0, 507, 89, 1, 0, 0, 0, 508, 513, 3, 92, 46, 0, 509, 510, 5, 33, 0, 0, 510, 512, 3, 92, 46, 0, 511, 509, 1, 0, 0, 0, 512, 515, 1, 0, 0, 0, 513, 511, 1, 0, 0, 0, 513, 514, 1, 0, 0, 0, 514, 91, 1, 0, 0, 0, 515, 513, 1, 0, 0, 0, 516, 517, 3, 60, 30, 0, 517, 518, 5, 31, 0, 0, 518, 519, 3, 64, 32, 0, 519, 93, 1, 0, 0, 0, 520, 521, 7, 6, 0, 0, 521, 95, 1, 0, 0, 0, 522, 525, 3, 98, 49, 0, 523, 525, 3, 100, 50, 0, 524, 522, 1, 0, 0, 0, 524, 523, 1, 0, 0, 0, 525, 97, 1, 0, 0, 0, 526, 528, 7, 0, 0, 0, 527, 526, 1, 0, 0, 0, 527, 528, 1, 0, 0, 0, 528, 529, 1, 0, 0, 0, 529, 530, 5, 27, 0, 0, 530, 99, 1, 0, 0, 0, 531, 533, 7, 0, 0, 0, 532, 531, 1, 0, 0, 0, 532, 533, 1, 0, 0, 0, 533, 534, 1, 0, 0, 0, 534, 535, 5, 26, 0, 0, 535, 101, 1, 0, 0, 0, 536, 537, 5, 25, 0, 0, 537, 103, 1, 0, 0, 0, 538, 539, 7, 7, 0, 0, 539, 105, 1, 0, 0, 0, 540, 541, 5, 5, 0, 0, 541, 542, 3, 108, 54, 0, 542, 107, 1, 0, 0, 0, 543, 544, 5, 65, 0, 0, 544, 545, 3, 2, 1, 0, 545, 546, 5, 66, 0, 0, 546, 109, 1, 0, 0, 0, 547, 548, 5, 13, 0, 0, 548, 549, 5, 100, 0, 0, 549, 111, 1, 0, 0, 0, 550, 551, 5, 3, 0, 0, 551, 554, 5, 90, 0, 0, 552, 553, 5, 88, 0, 0, 553, 555, 3, 56, 28, 0, 554, 552, 1, 0, 0, 0, 554, 555, 1, 0, 0, 0, 555, 565, 1, 0, 0, 0, 556, 557, 5, 89, 0, 0, 557, 562, 3, 114, 57, 0, 558, 559, 5, 33, 0, 0, 559, 561, 3, 114, 57, 0, 560, 558, 1, 0, 0, 0, 561, 564, 1, 0, 0, 0, 562, 560, 1, 0, 0, 0, 562, 563, 1, 0, 0, 0, 563, 566, 1, 0, 0, 0, 564, 562, 1, 0, 0, 0, 565, 556, 1, 0, 0, 0, 565, 566, 1, 0, 0, 0, 566, 113, 1, 0, 0, 0, 567, 568, 3, 56, 28, 0, 568, 569, 5, 31, 0, 0, 569, 571, 1, 0, 0, 0, 570, 567, 1, 0, 0, 0, 570, 571, 1, 0, 0, 0, 571, 572, 1, 0, 0, 0, 572, 573, 3, 56, 28, 0, 573, 115, 1, 0, 0, 0, 574, 575, 5, 18, 0, 0, 575, 576, 3, 36, 18, 0, 576, 577, 5, 88, 0, 0, 577, 578, 3, 58, 29, 0, 578, 117, 1, 0, 0, 0, 579, 580, 5, 17, 0, 0, 580, 583, 3, 30, 15, 0, 581, 582, 5, 28, 0, 0, 582, 584, 3, 30, 15, 0, 583, 581, 1, 0, 0, 0, 583, 584, 1, 0, 0, 0, 584, 119, 1, 0, 0, 0, 57, 131, 140, 158, 170, 179, 187, 193, 201, 203, 208, 215, 220, 231, 237, 245, 247, 258, 265, 276, 279, 285, 297, 305, 313, 317, 324, 332, 340, 353, 357, 361, 368, 372, 379, 387, 395, 402, 419, 430, 441, 446, 450, 454, 465, 470, 474, 488, 499, 513, 524, 527, 532, 554, 562, 565, 570, 583] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java index 522393fb42c4b..e3e8790d205ff 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java @@ -45,7 +45,7 @@ public class EsqlBaseParser extends ParserConfig { CAST_OP=32, COMMA=33, DESC=34, DOT=35, FALSE=36, FIRST=37, IN=38, IS=39, LAST=40, LIKE=41, LP=42, NOT=43, NULL=44, NULLS=45, OR=46, PARAM=47, RLIKE=48, RP=49, TRUE=50, EQ=51, CIEQ=52, NEQ=53, LT=54, LTE=55, GT=56, GTE=57, - PLUS=58, MINUS=59, ASTERISK=60, SLASH=61, PERCENT=62, DEV_MATCH=63, NAMED_OR_POSITIONAL_PARAM=64, + PLUS=58, MINUS=59, ASTERISK=60, SLASH=61, PERCENT=62, MATCH=63, NAMED_OR_POSITIONAL_PARAM=64, OPENING_BRACKET=65, CLOSING_BRACKET=66, UNQUOTED_IDENTIFIER=67, QUOTED_IDENTIFIER=68, EXPR_LINE_COMMENT=69, EXPR_MULTILINE_COMMENT=70, EXPR_WS=71, EXPLAIN_WS=72, EXPLAIN_LINE_COMMENT=73, EXPLAIN_MULTILINE_COMMENT=74, METADATA=75, UNQUOTED_SOURCE=76, @@ -111,11 +111,11 @@ private static String[] makeLiteralNames() { "'desc'", "'.'", "'false'", "'first'", "'in'", "'is'", "'last'", "'like'", "'('", "'not'", "'null'", "'nulls'", "'or'", "'?'", "'rlike'", "')'", "'true'", "'=='", "'=~'", "'!='", "'<'", "'<='", "'>'", "'>='", "'+'", - "'-'", "'*'", "'/'", "'%'", null, null, null, "']'", null, null, null, - null, null, null, null, null, "'metadata'", null, null, null, null, null, - null, null, null, "'as'", null, null, null, "'on'", "'with'", null, null, - null, null, null, null, null, null, null, null, "'info'", null, null, - null, "':'" + "'-'", "'*'", "'/'", "'%'", "'match'", null, null, "']'", null, null, + null, null, null, null, null, null, "'metadata'", null, null, null, null, + null, null, null, null, "'as'", null, null, null, "'on'", "'with'", null, + null, null, null, null, null, null, null, null, null, "'info'", null, + null, null, "':'" }; } private static final String[] _LITERAL_NAMES = makeLiteralNames(); @@ -129,14 +129,14 @@ private static String[] makeSymbolicNames() { "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", "LIKE", "LP", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", - "PERCENT", "DEV_MATCH", "NAMED_OR_POSITIONAL_PARAM", "OPENING_BRACKET", - "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", - "EXPR_MULTILINE_COMMENT", "EXPR_WS", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", - "EXPLAIN_MULTILINE_COMMENT", "METADATA", "UNQUOTED_SOURCE", "FROM_LINE_COMMENT", - "FROM_MULTILINE_COMMENT", "FROM_WS", "ID_PATTERN", "PROJECT_LINE_COMMENT", - "PROJECT_MULTILINE_COMMENT", "PROJECT_WS", "AS", "RENAME_LINE_COMMENT", - "RENAME_MULTILINE_COMMENT", "RENAME_WS", "ON", "WITH", "ENRICH_POLICY_NAME", - "ENRICH_LINE_COMMENT", "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", + "PERCENT", "MATCH", "NAMED_OR_POSITIONAL_PARAM", "OPENING_BRACKET", "CLOSING_BRACKET", + "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", + "EXPR_WS", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", + "METADATA", "UNQUOTED_SOURCE", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", + "FROM_WS", "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", + "PROJECT_WS", "AS", "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", + "RENAME_WS", "ON", "WITH", "ENRICH_POLICY_NAME", "ENRICH_LINE_COMMENT", + "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT", "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "INFO", "SHOW_LINE_COMMENT", "SHOW_MULTILINE_COMMENT", "SHOW_WS", "COLON", "SETTING", "SETTING_LINE_COMMENT", @@ -1171,7 +1171,7 @@ public static class MatchBooleanExpressionContext extends ParserRuleContext { public ValueExpressionContext valueExpression() { return getRuleContext(ValueExpressionContext.class,0); } - public TerminalNode DEV_MATCH() { return getToken(EsqlBaseParser.DEV_MATCH, 0); } + public TerminalNode MATCH() { return getToken(EsqlBaseParser.MATCH, 0); } public StringContext string() { return getRuleContext(StringContext.class,0); } @@ -1204,7 +1204,7 @@ public final MatchBooleanExpressionContext matchBooleanExpression() throws Recog setState(222); valueExpression(); setState(223); - match(DEV_MATCH); + match(MATCH); setState(224); ((MatchBooleanExpressionContext)_localctx).queryString = string(); } @@ -1867,7 +1867,7 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx @SuppressWarnings("CheckReturnValue") public static class FunctionNameContext extends ParserRuleContext { - public TerminalNode DEV_MATCH() { return getToken(EsqlBaseParser.DEV_MATCH, 0); } + public TerminalNode MATCH() { return getToken(EsqlBaseParser.MATCH, 0); } public IdentifierOrParameterContext identifierOrParameter() { return getRuleContext(IdentifierOrParameterContext.class,0); } @@ -1895,25 +1895,28 @@ public final FunctionNameContext functionName() throws RecognitionException { FunctionNameContext _localctx = new FunctionNameContext(_ctx, getState()); enterRule(_localctx, 24, RULE_functionName); try { - setState(286); + setState(285); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,20,_ctx) ) { - case 1: + switch (_input.LA(1)) { + case MATCH: enterOuterAlt(_localctx, 1); { setState(283); - if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(284); - match(DEV_MATCH); + match(MATCH); } break; - case 2: + case PARAM: + case NAMED_OR_POSITIONAL_PARAM: + case UNQUOTED_IDENTIFIER: + case QUOTED_IDENTIFIER: enterOuterAlt(_localctx, 2); { - setState(285); + setState(284); identifierOrParameter(); } break; + default: + throw new NoViableAltException(this); } } catch (RecognitionException re) { @@ -1970,7 +1973,7 @@ public final DataTypeContext dataType() throws RecognitionException { _localctx = new ToDataTypeContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(288); + setState(287); identifier(); } } @@ -2017,9 +2020,9 @@ public final RowCommandContext rowCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(290); + setState(289); match(ROW); - setState(291); + setState(290); fields(); } } @@ -2073,23 +2076,23 @@ public final FieldsContext fields() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(293); + setState(292); field(); - setState(298); + setState(297); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,21,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(294); + setState(293); match(COMMA); - setState(295); + setState(294); field(); } } } - setState(300); + setState(299); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,21,_ctx); } @@ -2139,24 +2142,24 @@ public final FieldContext field() throws RecognitionException { FieldContext _localctx = new FieldContext(_ctx, getState()); enterRule(_localctx, 32, RULE_field); try { - setState(306); + setState(305); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,22,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(301); + setState(300); booleanExpression(0); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(302); + setState(301); qualifiedName(); - setState(303); + setState(302); match(ASSIGN); - setState(304); + setState(303); booleanExpression(0); } break; @@ -2216,34 +2219,34 @@ public final FromCommandContext fromCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(308); + setState(307); match(FROM); - setState(309); + setState(308); indexPattern(); - setState(314); + setState(313); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,23,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(310); + setState(309); match(COMMA); - setState(311); + setState(310); indexPattern(); } } } - setState(316); + setState(315); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,23,_ctx); } - setState(318); + setState(317); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,24,_ctx) ) { case 1: { - setState(317); + setState(316); metadata(); } break; @@ -2294,24 +2297,24 @@ public final IndexPatternContext indexPattern() throws RecognitionException { IndexPatternContext _localctx = new IndexPatternContext(_ctx, getState()); enterRule(_localctx, 36, RULE_indexPattern); try { - setState(325); + setState(324); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,25,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(320); + setState(319); clusterString(); - setState(321); + setState(320); match(COLON); - setState(322); + setState(321); indexString(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(324); + setState(323); indexString(); } break; @@ -2357,7 +2360,7 @@ public final ClusterStringContext clusterString() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(327); + setState(326); match(UNQUOTED_SOURCE); } } @@ -2403,7 +2406,7 @@ public final IndexStringContext indexString() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(329); + setState(328); _la = _input.LA(1); if ( !(_la==QUOTED_STRING || _la==UNQUOTED_SOURCE) ) { _errHandler.recoverInline(this); @@ -2458,20 +2461,20 @@ public final MetadataContext metadata() throws RecognitionException { MetadataContext _localctx = new MetadataContext(_ctx, getState()); enterRule(_localctx, 42, RULE_metadata); try { - setState(333); + setState(332); _errHandler.sync(this); switch (_input.LA(1)) { case METADATA: enterOuterAlt(_localctx, 1); { - setState(331); + setState(330); metadataOption(); } break; case OPENING_BRACKET: enterOuterAlt(_localctx, 2); { - setState(332); + setState(331); deprecated_metadata(); } break; @@ -2528,25 +2531,25 @@ public final MetadataOptionContext metadataOption() throws RecognitionException int _alt; enterOuterAlt(_localctx, 1); { - setState(335); + setState(334); match(METADATA); - setState(336); + setState(335); match(UNQUOTED_SOURCE); - setState(341); + setState(340); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,27,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(337); + setState(336); match(COMMA); - setState(338); + setState(337); match(UNQUOTED_SOURCE); } } } - setState(343); + setState(342); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,27,_ctx); } @@ -2595,11 +2598,11 @@ public final Deprecated_metadataContext deprecated_metadata() throws Recognition try { enterOuterAlt(_localctx, 1); { - setState(344); + setState(343); match(OPENING_BRACKET); - setState(345); + setState(344); metadataOption(); - setState(346); + setState(345); match(CLOSING_BRACKET); } } @@ -2663,46 +2666,46 @@ public final MetricsCommandContext metricsCommand() throws RecognitionException int _alt; enterOuterAlt(_localctx, 1); { - setState(348); + setState(347); match(DEV_METRICS); - setState(349); + setState(348); indexPattern(); - setState(354); + setState(353); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,28,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(350); + setState(349); match(COMMA); - setState(351); + setState(350); indexPattern(); } } } - setState(356); + setState(355); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,28,_ctx); } - setState(358); + setState(357); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,29,_ctx) ) { case 1: { - setState(357); + setState(356); ((MetricsCommandContext)_localctx).aggregates = fields(); } break; } - setState(362); + setState(361); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,30,_ctx) ) { case 1: { - setState(360); + setState(359); match(BY); - setState(361); + setState(360); ((MetricsCommandContext)_localctx).grouping = fields(); } break; @@ -2752,9 +2755,9 @@ public final EvalCommandContext evalCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(364); + setState(363); match(EVAL); - setState(365); + setState(364); fields(); } } @@ -2807,26 +2810,26 @@ public final StatsCommandContext statsCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(367); + setState(366); match(STATS); - setState(369); + setState(368); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,31,_ctx) ) { case 1: { - setState(368); + setState(367); ((StatsCommandContext)_localctx).stats = fields(); } break; } - setState(373); + setState(372); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,32,_ctx) ) { case 1: { - setState(371); + setState(370); match(BY); - setState(372); + setState(371); ((StatsCommandContext)_localctx).grouping = fields(); } break; @@ -2883,23 +2886,23 @@ public final QualifiedNameContext qualifiedName() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(375); + setState(374); identifierOrParameter(); - setState(380); + setState(379); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,33,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(376); + setState(375); match(DOT); - setState(377); + setState(376); identifierOrParameter(); } } } - setState(382); + setState(381); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,33,_ctx); } @@ -2955,23 +2958,23 @@ public final QualifiedNamePatternContext qualifiedNamePattern() throws Recogniti int _alt; enterOuterAlt(_localctx, 1); { - setState(383); + setState(382); identifierPattern(); - setState(388); + setState(387); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,34,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(384); + setState(383); match(DOT); - setState(385); + setState(384); identifierPattern(); } } } - setState(390); + setState(389); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,34,_ctx); } @@ -3027,23 +3030,23 @@ public final QualifiedNamePatternsContext qualifiedNamePatterns() throws Recogni int _alt; enterOuterAlt(_localctx, 1); { - setState(391); + setState(390); qualifiedNamePattern(); - setState(396); + setState(395); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,35,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(392); + setState(391); match(COMMA); - setState(393); + setState(392); qualifiedNamePattern(); } } } - setState(398); + setState(397); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,35,_ctx); } @@ -3091,7 +3094,7 @@ public final IdentifierContext identifier() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(399); + setState(398); _la = _input.LA(1); if ( !(_la==UNQUOTED_IDENTIFIER || _la==QUOTED_IDENTIFIER) ) { _errHandler.recoverInline(this); @@ -3144,13 +3147,13 @@ public final IdentifierPatternContext identifierPattern() throws RecognitionExce IdentifierPatternContext _localctx = new IdentifierPatternContext(_ctx, getState()); enterRule(_localctx, 62, RULE_identifierPattern); try { - setState(403); + setState(402); _errHandler.sync(this); switch (_input.LA(1)) { case ID_PATTERN: enterOuterAlt(_localctx, 1); { - setState(401); + setState(400); match(ID_PATTERN); } break; @@ -3158,7 +3161,7 @@ public final IdentifierPatternContext identifierPattern() throws RecognitionExce case NAMED_OR_POSITIONAL_PARAM: enterOuterAlt(_localctx, 2); { - setState(402); + setState(401); parameter(); } break; @@ -3433,14 +3436,14 @@ public final ConstantContext constant() throws RecognitionException { enterRule(_localctx, 64, RULE_constant); int _la; try { - setState(447); + setState(446); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,40,_ctx) ) { case 1: _localctx = new NullLiteralContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(405); + setState(404); match(NULL); } break; @@ -3448,9 +3451,9 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new QualifiedIntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(406); + setState(405); integerValue(); - setState(407); + setState(406); match(UNQUOTED_IDENTIFIER); } break; @@ -3458,7 +3461,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new DecimalLiteralContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(409); + setState(408); decimalValue(); } break; @@ -3466,7 +3469,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new IntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(410); + setState(409); integerValue(); } break; @@ -3474,7 +3477,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanLiteralContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(411); + setState(410); booleanValue(); } break; @@ -3482,7 +3485,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new InputParameterContext(_localctx); enterOuterAlt(_localctx, 6); { - setState(412); + setState(411); parameter(); } break; @@ -3490,7 +3493,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringLiteralContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(413); + setState(412); string(); } break; @@ -3498,27 +3501,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new NumericArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 8); { - setState(414); + setState(413); match(OPENING_BRACKET); - setState(415); + setState(414); numericValue(); - setState(420); + setState(419); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(416); + setState(415); match(COMMA); - setState(417); + setState(416); numericValue(); } } - setState(422); + setState(421); _errHandler.sync(this); _la = _input.LA(1); } - setState(423); + setState(422); match(CLOSING_BRACKET); } break; @@ -3526,27 +3529,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 9); { - setState(425); + setState(424); match(OPENING_BRACKET); - setState(426); + setState(425); booleanValue(); - setState(431); + setState(430); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(427); + setState(426); match(COMMA); - setState(428); + setState(427); booleanValue(); } } - setState(433); + setState(432); _errHandler.sync(this); _la = _input.LA(1); } - setState(434); + setState(433); match(CLOSING_BRACKET); } break; @@ -3554,27 +3557,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 10); { - setState(436); + setState(435); match(OPENING_BRACKET); - setState(437); + setState(436); string(); - setState(442); + setState(441); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(438); + setState(437); match(COMMA); - setState(439); + setState(438); string(); } } - setState(444); + setState(443); _errHandler.sync(this); _la = _input.LA(1); } - setState(445); + setState(444); match(CLOSING_BRACKET); } break; @@ -3648,14 +3651,14 @@ public final ParameterContext parameter() throws RecognitionException { ParameterContext _localctx = new ParameterContext(_ctx, getState()); enterRule(_localctx, 66, RULE_parameter); try { - setState(451); + setState(450); _errHandler.sync(this); switch (_input.LA(1)) { case PARAM: _localctx = new InputParamContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(449); + setState(448); match(PARAM); } break; @@ -3663,7 +3666,7 @@ public final ParameterContext parameter() throws RecognitionException { _localctx = new InputNamedOrPositionalParamContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(450); + setState(449); match(NAMED_OR_POSITIONAL_PARAM); } break; @@ -3714,14 +3717,14 @@ public final IdentifierOrParameterContext identifierOrParameter() throws Recogni IdentifierOrParameterContext _localctx = new IdentifierOrParameterContext(_ctx, getState()); enterRule(_localctx, 68, RULE_identifierOrParameter); try { - setState(455); + setState(454); _errHandler.sync(this); switch (_input.LA(1)) { case UNQUOTED_IDENTIFIER: case QUOTED_IDENTIFIER: enterOuterAlt(_localctx, 1); { - setState(453); + setState(452); identifier(); } break; @@ -3729,7 +3732,7 @@ public final IdentifierOrParameterContext identifierOrParameter() throws Recogni case NAMED_OR_POSITIONAL_PARAM: enterOuterAlt(_localctx, 2); { - setState(454); + setState(453); parameter(); } break; @@ -3778,9 +3781,9 @@ public final LimitCommandContext limitCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(457); + setState(456); match(LIMIT); - setState(458); + setState(457); match(INTEGER_LITERAL); } } @@ -3835,25 +3838,25 @@ public final SortCommandContext sortCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(460); + setState(459); match(SORT); - setState(461); + setState(460); orderExpression(); - setState(466); + setState(465); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,43,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(462); + setState(461); match(COMMA); - setState(463); + setState(462); orderExpression(); } } } - setState(468); + setState(467); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,43,_ctx); } @@ -3909,14 +3912,14 @@ public final OrderExpressionContext orderExpression() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(469); + setState(468); booleanExpression(0); - setState(471); + setState(470); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,44,_ctx) ) { case 1: { - setState(470); + setState(469); ((OrderExpressionContext)_localctx).ordering = _input.LT(1); _la = _input.LA(1); if ( !(_la==ASC || _la==DESC) ) { @@ -3930,14 +3933,14 @@ public final OrderExpressionContext orderExpression() throws RecognitionExceptio } break; } - setState(475); + setState(474); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,45,_ctx) ) { case 1: { - setState(473); + setState(472); match(NULLS); - setState(474); + setState(473); ((OrderExpressionContext)_localctx).nullOrdering = _input.LT(1); _la = _input.LA(1); if ( !(_la==FIRST || _la==LAST) ) { @@ -3996,9 +3999,9 @@ public final KeepCommandContext keepCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(477); + setState(476); match(KEEP); - setState(478); + setState(477); qualifiedNamePatterns(); } } @@ -4045,9 +4048,9 @@ public final DropCommandContext dropCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(480); + setState(479); match(DROP); - setState(481); + setState(480); qualifiedNamePatterns(); } } @@ -4102,25 +4105,25 @@ public final RenameCommandContext renameCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(483); + setState(482); match(RENAME); - setState(484); + setState(483); renameClause(); - setState(489); + setState(488); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,46,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(485); + setState(484); match(COMMA); - setState(486); + setState(485); renameClause(); } } } - setState(491); + setState(490); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,46,_ctx); } @@ -4174,11 +4177,11 @@ public final RenameClauseContext renameClause() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(492); + setState(491); ((RenameClauseContext)_localctx).oldName = qualifiedNamePattern(); - setState(493); + setState(492); match(AS); - setState(494); + setState(493); ((RenameClauseContext)_localctx).newName = qualifiedNamePattern(); } } @@ -4231,18 +4234,18 @@ public final DissectCommandContext dissectCommand() throws RecognitionException try { enterOuterAlt(_localctx, 1); { - setState(496); + setState(495); match(DISSECT); - setState(497); + setState(496); primaryExpression(0); - setState(498); + setState(497); string(); - setState(500); + setState(499); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,47,_ctx) ) { case 1: { - setState(499); + setState(498); commandOptions(); } break; @@ -4295,11 +4298,11 @@ public final GrokCommandContext grokCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(502); + setState(501); match(GROK); - setState(503); + setState(502); primaryExpression(0); - setState(504); + setState(503); string(); } } @@ -4346,9 +4349,9 @@ public final MvExpandCommandContext mvExpandCommand() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(506); + setState(505); match(MV_EXPAND); - setState(507); + setState(506); qualifiedName(); } } @@ -4402,23 +4405,23 @@ public final CommandOptionsContext commandOptions() throws RecognitionException int _alt; enterOuterAlt(_localctx, 1); { - setState(509); + setState(508); commandOption(); - setState(514); + setState(513); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,48,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(510); + setState(509); match(COMMA); - setState(511); + setState(510); commandOption(); } } } - setState(516); + setState(515); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,48,_ctx); } @@ -4470,11 +4473,11 @@ public final CommandOptionContext commandOption() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(517); + setState(516); identifier(); - setState(518); + setState(517); match(ASSIGN); - setState(519); + setState(518); constant(); } } @@ -4520,7 +4523,7 @@ public final BooleanValueContext booleanValue() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(521); + setState(520); _la = _input.LA(1); if ( !(_la==FALSE || _la==TRUE) ) { _errHandler.recoverInline(this); @@ -4575,20 +4578,20 @@ public final NumericValueContext numericValue() throws RecognitionException { NumericValueContext _localctx = new NumericValueContext(_ctx, getState()); enterRule(_localctx, 96, RULE_numericValue); try { - setState(525); + setState(524); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,49,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(523); + setState(522); decimalValue(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(524); + setState(523); integerValue(); } break; @@ -4637,12 +4640,12 @@ public final DecimalValueContext decimalValue() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(528); + setState(527); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(527); + setState(526); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -4655,7 +4658,7 @@ public final DecimalValueContext decimalValue() throws RecognitionException { } } - setState(530); + setState(529); match(DECIMAL_LITERAL); } } @@ -4702,12 +4705,12 @@ public final IntegerValueContext integerValue() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(533); + setState(532); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(532); + setState(531); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -4720,7 +4723,7 @@ public final IntegerValueContext integerValue() throws RecognitionException { } } - setState(535); + setState(534); match(INTEGER_LITERAL); } } @@ -4764,7 +4767,7 @@ public final StringContext string() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(537); + setState(536); match(QUOTED_STRING); } } @@ -4814,7 +4817,7 @@ public final ComparisonOperatorContext comparisonOperator() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(539); + setState(538); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 281474976710656000L) != 0)) ) { _errHandler.recoverInline(this); @@ -4869,9 +4872,9 @@ public final ExplainCommandContext explainCommand() throws RecognitionException try { enterOuterAlt(_localctx, 1); { - setState(541); + setState(540); match(EXPLAIN); - setState(542); + setState(541); subqueryExpression(); } } @@ -4919,11 +4922,11 @@ public final SubqueryExpressionContext subqueryExpression() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(544); + setState(543); match(OPENING_BRACKET); - setState(545); + setState(544); query(0); - setState(546); + setState(545); match(CLOSING_BRACKET); } } @@ -4980,9 +4983,9 @@ public final ShowCommandContext showCommand() throws RecognitionException { _localctx = new ShowInfoContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(548); + setState(547); match(SHOW); - setState(549); + setState(548); match(INFO); } } @@ -5045,46 +5048,46 @@ public final EnrichCommandContext enrichCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(551); + setState(550); match(ENRICH); - setState(552); + setState(551); ((EnrichCommandContext)_localctx).policyName = match(ENRICH_POLICY_NAME); - setState(555); + setState(554); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,52,_ctx) ) { case 1: { - setState(553); + setState(552); match(ON); - setState(554); + setState(553); ((EnrichCommandContext)_localctx).matchField = qualifiedNamePattern(); } break; } - setState(566); + setState(565); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,54,_ctx) ) { case 1: { - setState(557); + setState(556); match(WITH); - setState(558); + setState(557); enrichWithClause(); - setState(563); + setState(562); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,53,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(559); + setState(558); match(COMMA); - setState(560); + setState(559); enrichWithClause(); } } } - setState(565); + setState(564); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,53,_ctx); } @@ -5141,19 +5144,19 @@ public final EnrichWithClauseContext enrichWithClause() throws RecognitionExcept try { enterOuterAlt(_localctx, 1); { - setState(571); + setState(570); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,55,_ctx) ) { case 1: { - setState(568); + setState(567); ((EnrichWithClauseContext)_localctx).newName = qualifiedNamePattern(); - setState(569); + setState(568); match(ASSIGN); } break; } - setState(573); + setState(572); ((EnrichWithClauseContext)_localctx).enrichField = qualifiedNamePattern(); } } @@ -5206,13 +5209,13 @@ public final LookupCommandContext lookupCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(575); + setState(574); match(DEV_LOOKUP); - setState(576); + setState(575); ((LookupCommandContext)_localctx).tableName = indexPattern(); - setState(577); + setState(576); match(ON); - setState(578); + setState(577); ((LookupCommandContext)_localctx).matchFields = qualifiedNamePatterns(); } } @@ -5265,18 +5268,18 @@ public final InlinestatsCommandContext inlinestatsCommand() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(580); + setState(579); match(DEV_INLINESTATS); - setState(581); + setState(580); ((InlinestatsCommandContext)_localctx).stats = fields(); - setState(584); + setState(583); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,56,_ctx) ) { case 1: { - setState(582); + setState(581); match(BY); - setState(583); + setState(582); ((InlinestatsCommandContext)_localctx).grouping = fields(); } break; @@ -5308,8 +5311,6 @@ public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { return operatorExpression_sempred((OperatorExpressionContext)_localctx, predIndex); case 10: return primaryExpression_sempred((PrimaryExpressionContext)_localctx, predIndex); - case 12: - return functionName_sempred((FunctionNameContext)_localctx, predIndex); } return true; } @@ -5363,16 +5364,9 @@ private boolean primaryExpression_sempred(PrimaryExpressionContext _localctx, in } return true; } - private boolean functionName_sempred(FunctionNameContext _localctx, int predIndex) { - switch (predIndex) { - case 10: - return this.isDevVersion(); - } - return true; - } public static final String _serializedATN = - "\u0004\u0001x\u024b\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ + "\u0004\u0001x\u024a\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ "\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004\u0002"+ "\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007\u0002"+ "\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b\u0002"+ @@ -5412,75 +5406,75 @@ private boolean functionName_sempred(FunctionNameContext _localctx, int predInde "\n\u0103\b\n\u0001\n\u0001\n\u0001\n\u0005\n\u0108\b\n\n\n\f\n\u010b\t"+ "\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b"+ "\u0005\u000b\u0113\b\u000b\n\u000b\f\u000b\u0116\t\u000b\u0003\u000b\u0118"+ - "\b\u000b\u0001\u000b\u0001\u000b\u0001\f\u0001\f\u0001\f\u0003\f\u011f"+ - "\b\f\u0001\r\u0001\r\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000f\u0001"+ - "\u000f\u0001\u000f\u0005\u000f\u0129\b\u000f\n\u000f\f\u000f\u012c\t\u000f"+ - "\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0003\u0010"+ - "\u0133\b\u0010\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0005\u0011"+ - "\u0139\b\u0011\n\u0011\f\u0011\u013c\t\u0011\u0001\u0011\u0003\u0011\u013f"+ - "\b\u0011\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0003"+ - "\u0012\u0146\b\u0012\u0001\u0013\u0001\u0013\u0001\u0014\u0001\u0014\u0001"+ - "\u0015\u0001\u0015\u0003\u0015\u014e\b\u0015\u0001\u0016\u0001\u0016\u0001"+ - "\u0016\u0001\u0016\u0005\u0016\u0154\b\u0016\n\u0016\f\u0016\u0157\t\u0016"+ + "\b\u000b\u0001\u000b\u0001\u000b\u0001\f\u0001\f\u0003\f\u011e\b\f\u0001"+ + "\r\u0001\r\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000f\u0001\u000f"+ + "\u0001\u000f\u0005\u000f\u0128\b\u000f\n\u000f\f\u000f\u012b\t\u000f\u0001"+ + "\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0003\u0010\u0132"+ + "\b\u0010\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0005\u0011\u0138"+ + "\b\u0011\n\u0011\f\u0011\u013b\t\u0011\u0001\u0011\u0003\u0011\u013e\b"+ + "\u0011\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0003"+ + "\u0012\u0145\b\u0012\u0001\u0013\u0001\u0013\u0001\u0014\u0001\u0014\u0001"+ + "\u0015\u0001\u0015\u0003\u0015\u014d\b\u0015\u0001\u0016\u0001\u0016\u0001"+ + "\u0016\u0001\u0016\u0005\u0016\u0153\b\u0016\n\u0016\f\u0016\u0156\t\u0016"+ "\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0018\u0001\u0018"+ - "\u0001\u0018\u0001\u0018\u0005\u0018\u0161\b\u0018\n\u0018\f\u0018\u0164"+ - "\t\u0018\u0001\u0018\u0003\u0018\u0167\b\u0018\u0001\u0018\u0001\u0018"+ - "\u0003\u0018\u016b\b\u0018\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u001a"+ - "\u0001\u001a\u0003\u001a\u0172\b\u001a\u0001\u001a\u0001\u001a\u0003\u001a"+ - "\u0176\b\u001a\u0001\u001b\u0001\u001b\u0001\u001b\u0005\u001b\u017b\b"+ - "\u001b\n\u001b\f\u001b\u017e\t\u001b\u0001\u001c\u0001\u001c\u0001\u001c"+ - "\u0005\u001c\u0183\b\u001c\n\u001c\f\u001c\u0186\t\u001c\u0001\u001d\u0001"+ - "\u001d\u0001\u001d\u0005\u001d\u018b\b\u001d\n\u001d\f\u001d\u018e\t\u001d"+ - "\u0001\u001e\u0001\u001e\u0001\u001f\u0001\u001f\u0003\u001f\u0194\b\u001f"+ + "\u0001\u0018\u0001\u0018\u0005\u0018\u0160\b\u0018\n\u0018\f\u0018\u0163"+ + "\t\u0018\u0001\u0018\u0003\u0018\u0166\b\u0018\u0001\u0018\u0001\u0018"+ + "\u0003\u0018\u016a\b\u0018\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u001a"+ + "\u0001\u001a\u0003\u001a\u0171\b\u001a\u0001\u001a\u0001\u001a\u0003\u001a"+ + "\u0175\b\u001a\u0001\u001b\u0001\u001b\u0001\u001b\u0005\u001b\u017a\b"+ + "\u001b\n\u001b\f\u001b\u017d\t\u001b\u0001\u001c\u0001\u001c\u0001\u001c"+ + "\u0005\u001c\u0182\b\u001c\n\u001c\f\u001c\u0185\t\u001c\u0001\u001d\u0001"+ + "\u001d\u0001\u001d\u0005\u001d\u018a\b\u001d\n\u001d\f\u001d\u018d\t\u001d"+ + "\u0001\u001e\u0001\u001e\u0001\u001f\u0001\u001f\u0003\u001f\u0193\b\u001f"+ "\u0001 \u0001 \u0001 \u0001 \u0001 \u0001 \u0001 \u0001 \u0001 \u0001"+ - " \u0001 \u0001 \u0001 \u0005 \u01a3\b \n \f \u01a6\t \u0001 \u0001 \u0001"+ - " \u0001 \u0001 \u0001 \u0005 \u01ae\b \n \f \u01b1\t \u0001 \u0001 \u0001"+ - " \u0001 \u0001 \u0001 \u0005 \u01b9\b \n \f \u01bc\t \u0001 \u0001 \u0003"+ - " \u01c0\b \u0001!\u0001!\u0003!\u01c4\b!\u0001\"\u0001\"\u0003\"\u01c8"+ - "\b\"\u0001#\u0001#\u0001#\u0001$\u0001$\u0001$\u0001$\u0005$\u01d1\b$"+ - "\n$\f$\u01d4\t$\u0001%\u0001%\u0003%\u01d8\b%\u0001%\u0001%\u0003%\u01dc"+ + " \u0001 \u0001 \u0001 \u0005 \u01a2\b \n \f \u01a5\t \u0001 \u0001 \u0001"+ + " \u0001 \u0001 \u0001 \u0005 \u01ad\b \n \f \u01b0\t \u0001 \u0001 \u0001"+ + " \u0001 \u0001 \u0001 \u0005 \u01b8\b \n \f \u01bb\t \u0001 \u0001 \u0003"+ + " \u01bf\b \u0001!\u0001!\u0003!\u01c3\b!\u0001\"\u0001\"\u0003\"\u01c7"+ + "\b\"\u0001#\u0001#\u0001#\u0001$\u0001$\u0001$\u0001$\u0005$\u01d0\b$"+ + "\n$\f$\u01d3\t$\u0001%\u0001%\u0003%\u01d7\b%\u0001%\u0001%\u0003%\u01db"+ "\b%\u0001&\u0001&\u0001&\u0001\'\u0001\'\u0001\'\u0001(\u0001(\u0001("+ - "\u0001(\u0005(\u01e8\b(\n(\f(\u01eb\t(\u0001)\u0001)\u0001)\u0001)\u0001"+ - "*\u0001*\u0001*\u0001*\u0003*\u01f5\b*\u0001+\u0001+\u0001+\u0001+\u0001"+ - ",\u0001,\u0001,\u0001-\u0001-\u0001-\u0005-\u0201\b-\n-\f-\u0204\t-\u0001"+ - ".\u0001.\u0001.\u0001.\u0001/\u0001/\u00010\u00010\u00030\u020e\b0\u0001"+ - "1\u00031\u0211\b1\u00011\u00011\u00012\u00032\u0216\b2\u00012\u00012\u0001"+ + "\u0001(\u0005(\u01e7\b(\n(\f(\u01ea\t(\u0001)\u0001)\u0001)\u0001)\u0001"+ + "*\u0001*\u0001*\u0001*\u0003*\u01f4\b*\u0001+\u0001+\u0001+\u0001+\u0001"+ + ",\u0001,\u0001,\u0001-\u0001-\u0001-\u0005-\u0200\b-\n-\f-\u0203\t-\u0001"+ + ".\u0001.\u0001.\u0001.\u0001/\u0001/\u00010\u00010\u00030\u020d\b0\u0001"+ + "1\u00031\u0210\b1\u00011\u00011\u00012\u00032\u0215\b2\u00012\u00012\u0001"+ "3\u00013\u00014\u00014\u00015\u00015\u00015\u00016\u00016\u00016\u0001"+ - "6\u00017\u00017\u00017\u00018\u00018\u00018\u00018\u00038\u022c\b8\u0001"+ - "8\u00018\u00018\u00018\u00058\u0232\b8\n8\f8\u0235\t8\u00038\u0237\b8"+ - "\u00019\u00019\u00019\u00039\u023c\b9\u00019\u00019\u0001:\u0001:\u0001"+ - ":\u0001:\u0001:\u0001;\u0001;\u0001;\u0001;\u0003;\u0249\b;\u0001;\u0000"+ + "6\u00017\u00017\u00017\u00018\u00018\u00018\u00018\u00038\u022b\b8\u0001"+ + "8\u00018\u00018\u00018\u00058\u0231\b8\n8\f8\u0234\t8\u00038\u0236\b8"+ + "\u00019\u00019\u00019\u00039\u023b\b9\u00019\u00019\u0001:\u0001:\u0001"+ + ":\u0001:\u0001:\u0001;\u0001;\u0001;\u0001;\u0003;\u0248\b;\u0001;\u0000"+ "\u0004\u0002\n\u0012\u0014<\u0000\u0002\u0004\u0006\b\n\f\u000e\u0010"+ "\u0012\u0014\u0016\u0018\u001a\u001c\u001e \"$&(*,.02468:<>@BDFHJLNPR"+ "TVXZ\\^`bdfhjlnprtv\u0000\b\u0001\u0000:;\u0001\u0000<>\u0002\u0000\u0019"+ "\u0019LL\u0001\u0000CD\u0002\u0000\u001e\u001e\"\"\u0002\u0000%%((\u0002"+ - "\u0000$$22\u0002\u00003359\u0265\u0000x\u0001\u0000\u0000\u0000\u0002"+ + "\u0000$$22\u0002\u00003359\u0264\u0000x\u0001\u0000\u0000\u0000\u0002"+ "{\u0001\u0000\u0000\u0000\u0004\u008c\u0001\u0000\u0000\u0000\u0006\u009e"+ "\u0001\u0000\u0000\u0000\b\u00a0\u0001\u0000\u0000\u0000\n\u00c1\u0001"+ "\u0000\u0000\u0000\f\u00dc\u0001\u0000\u0000\u0000\u000e\u00de\u0001\u0000"+ "\u0000\u0000\u0010\u00e7\u0001\u0000\u0000\u0000\u0012\u00ed\u0001\u0000"+ "\u0000\u0000\u0014\u0102\u0001\u0000\u0000\u0000\u0016\u010c\u0001\u0000"+ - "\u0000\u0000\u0018\u011e\u0001\u0000\u0000\u0000\u001a\u0120\u0001\u0000"+ - "\u0000\u0000\u001c\u0122\u0001\u0000\u0000\u0000\u001e\u0125\u0001\u0000"+ - "\u0000\u0000 \u0132\u0001\u0000\u0000\u0000\"\u0134\u0001\u0000\u0000"+ - "\u0000$\u0145\u0001\u0000\u0000\u0000&\u0147\u0001\u0000\u0000\u0000("+ - "\u0149\u0001\u0000\u0000\u0000*\u014d\u0001\u0000\u0000\u0000,\u014f\u0001"+ - "\u0000\u0000\u0000.\u0158\u0001\u0000\u0000\u00000\u015c\u0001\u0000\u0000"+ - "\u00002\u016c\u0001\u0000\u0000\u00004\u016f\u0001\u0000\u0000\u00006"+ - "\u0177\u0001\u0000\u0000\u00008\u017f\u0001\u0000\u0000\u0000:\u0187\u0001"+ - "\u0000\u0000\u0000<\u018f\u0001\u0000\u0000\u0000>\u0193\u0001\u0000\u0000"+ - "\u0000@\u01bf\u0001\u0000\u0000\u0000B\u01c3\u0001\u0000\u0000\u0000D"+ - "\u01c7\u0001\u0000\u0000\u0000F\u01c9\u0001\u0000\u0000\u0000H\u01cc\u0001"+ - "\u0000\u0000\u0000J\u01d5\u0001\u0000\u0000\u0000L\u01dd\u0001\u0000\u0000"+ - "\u0000N\u01e0\u0001\u0000\u0000\u0000P\u01e3\u0001\u0000\u0000\u0000R"+ - "\u01ec\u0001\u0000\u0000\u0000T\u01f0\u0001\u0000\u0000\u0000V\u01f6\u0001"+ - "\u0000\u0000\u0000X\u01fa\u0001\u0000\u0000\u0000Z\u01fd\u0001\u0000\u0000"+ - "\u0000\\\u0205\u0001\u0000\u0000\u0000^\u0209\u0001\u0000\u0000\u0000"+ - "`\u020d\u0001\u0000\u0000\u0000b\u0210\u0001\u0000\u0000\u0000d\u0215"+ - "\u0001\u0000\u0000\u0000f\u0219\u0001\u0000\u0000\u0000h\u021b\u0001\u0000"+ - "\u0000\u0000j\u021d\u0001\u0000\u0000\u0000l\u0220\u0001\u0000\u0000\u0000"+ - "n\u0224\u0001\u0000\u0000\u0000p\u0227\u0001\u0000\u0000\u0000r\u023b"+ - "\u0001\u0000\u0000\u0000t\u023f\u0001\u0000\u0000\u0000v\u0244\u0001\u0000"+ + "\u0000\u0000\u0018\u011d\u0001\u0000\u0000\u0000\u001a\u011f\u0001\u0000"+ + "\u0000\u0000\u001c\u0121\u0001\u0000\u0000\u0000\u001e\u0124\u0001\u0000"+ + "\u0000\u0000 \u0131\u0001\u0000\u0000\u0000\"\u0133\u0001\u0000\u0000"+ + "\u0000$\u0144\u0001\u0000\u0000\u0000&\u0146\u0001\u0000\u0000\u0000("+ + "\u0148\u0001\u0000\u0000\u0000*\u014c\u0001\u0000\u0000\u0000,\u014e\u0001"+ + "\u0000\u0000\u0000.\u0157\u0001\u0000\u0000\u00000\u015b\u0001\u0000\u0000"+ + "\u00002\u016b\u0001\u0000\u0000\u00004\u016e\u0001\u0000\u0000\u00006"+ + "\u0176\u0001\u0000\u0000\u00008\u017e\u0001\u0000\u0000\u0000:\u0186\u0001"+ + "\u0000\u0000\u0000<\u018e\u0001\u0000\u0000\u0000>\u0192\u0001\u0000\u0000"+ + "\u0000@\u01be\u0001\u0000\u0000\u0000B\u01c2\u0001\u0000\u0000\u0000D"+ + "\u01c6\u0001\u0000\u0000\u0000F\u01c8\u0001\u0000\u0000\u0000H\u01cb\u0001"+ + "\u0000\u0000\u0000J\u01d4\u0001\u0000\u0000\u0000L\u01dc\u0001\u0000\u0000"+ + "\u0000N\u01df\u0001\u0000\u0000\u0000P\u01e2\u0001\u0000\u0000\u0000R"+ + "\u01eb\u0001\u0000\u0000\u0000T\u01ef\u0001\u0000\u0000\u0000V\u01f5\u0001"+ + "\u0000\u0000\u0000X\u01f9\u0001\u0000\u0000\u0000Z\u01fc\u0001\u0000\u0000"+ + "\u0000\\\u0204\u0001\u0000\u0000\u0000^\u0208\u0001\u0000\u0000\u0000"+ + "`\u020c\u0001\u0000\u0000\u0000b\u020f\u0001\u0000\u0000\u0000d\u0214"+ + "\u0001\u0000\u0000\u0000f\u0218\u0001\u0000\u0000\u0000h\u021a\u0001\u0000"+ + "\u0000\u0000j\u021c\u0001\u0000\u0000\u0000l\u021f\u0001\u0000\u0000\u0000"+ + "n\u0223\u0001\u0000\u0000\u0000p\u0226\u0001\u0000\u0000\u0000r\u023a"+ + "\u0001\u0000\u0000\u0000t\u023e\u0001\u0000\u0000\u0000v\u0243\u0001\u0000"+ "\u0000\u0000xy\u0003\u0002\u0001\u0000yz\u0005\u0000\u0000\u0001z\u0001"+ "\u0001\u0000\u0000\u0000{|\u0006\u0001\uffff\uffff\u0000|}\u0003\u0004"+ "\u0002\u0000}\u0083\u0001\u0000\u0000\u0000~\u007f\n\u0001\u0000\u0000"+ @@ -5577,171 +5571,171 @@ private boolean functionName_sempred(FunctionNameContext _localctx, int predInde "\u0117\u010e\u0001\u0000\u0000\u0000\u0117\u010f\u0001\u0000\u0000\u0000"+ "\u0117\u0118\u0001\u0000\u0000\u0000\u0118\u0119\u0001\u0000\u0000\u0000"+ "\u0119\u011a\u00051\u0000\u0000\u011a\u0017\u0001\u0000\u0000\u0000\u011b"+ - "\u011c\u0004\f\n\u0000\u011c\u011f\u0005?\u0000\u0000\u011d\u011f\u0003"+ - "D\"\u0000\u011e\u011b\u0001\u0000\u0000\u0000\u011e\u011d\u0001\u0000"+ - "\u0000\u0000\u011f\u0019\u0001\u0000\u0000\u0000\u0120\u0121\u0003<\u001e"+ - "\u0000\u0121\u001b\u0001\u0000\u0000\u0000\u0122\u0123\u0005\f\u0000\u0000"+ - "\u0123\u0124\u0003\u001e\u000f\u0000\u0124\u001d\u0001\u0000\u0000\u0000"+ - "\u0125\u012a\u0003 \u0010\u0000\u0126\u0127\u0005!\u0000\u0000\u0127\u0129"+ - "\u0003 \u0010\u0000\u0128\u0126\u0001\u0000\u0000\u0000\u0129\u012c\u0001"+ - "\u0000\u0000\u0000\u012a\u0128\u0001\u0000\u0000\u0000\u012a\u012b\u0001"+ - "\u0000\u0000\u0000\u012b\u001f\u0001\u0000\u0000\u0000\u012c\u012a\u0001"+ - "\u0000\u0000\u0000\u012d\u0133\u0003\n\u0005\u0000\u012e\u012f\u00036"+ - "\u001b\u0000\u012f\u0130\u0005\u001f\u0000\u0000\u0130\u0131\u0003\n\u0005"+ - "\u0000\u0131\u0133\u0001\u0000\u0000\u0000\u0132\u012d\u0001\u0000\u0000"+ - "\u0000\u0132\u012e\u0001\u0000\u0000\u0000\u0133!\u0001\u0000\u0000\u0000"+ - "\u0134\u0135\u0005\u0006\u0000\u0000\u0135\u013a\u0003$\u0012\u0000\u0136"+ - "\u0137\u0005!\u0000\u0000\u0137\u0139\u0003$\u0012\u0000\u0138\u0136\u0001"+ - "\u0000\u0000\u0000\u0139\u013c\u0001\u0000\u0000\u0000\u013a\u0138\u0001"+ - "\u0000\u0000\u0000\u013a\u013b\u0001\u0000\u0000\u0000\u013b\u013e\u0001"+ - "\u0000\u0000\u0000\u013c\u013a\u0001\u0000\u0000\u0000\u013d\u013f\u0003"+ - "*\u0015\u0000\u013e\u013d\u0001\u0000\u0000\u0000\u013e\u013f\u0001\u0000"+ - "\u0000\u0000\u013f#\u0001\u0000\u0000\u0000\u0140\u0141\u0003&\u0013\u0000"+ - "\u0141\u0142\u0005h\u0000\u0000\u0142\u0143\u0003(\u0014\u0000\u0143\u0146"+ - "\u0001\u0000\u0000\u0000\u0144\u0146\u0003(\u0014\u0000\u0145\u0140\u0001"+ - "\u0000\u0000\u0000\u0145\u0144\u0001\u0000\u0000\u0000\u0146%\u0001\u0000"+ - "\u0000\u0000\u0147\u0148\u0005L\u0000\u0000\u0148\'\u0001\u0000\u0000"+ - "\u0000\u0149\u014a\u0007\u0002\u0000\u0000\u014a)\u0001\u0000\u0000\u0000"+ - "\u014b\u014e\u0003,\u0016\u0000\u014c\u014e\u0003.\u0017\u0000\u014d\u014b"+ - "\u0001\u0000\u0000\u0000\u014d\u014c\u0001\u0000\u0000\u0000\u014e+\u0001"+ - "\u0000\u0000\u0000\u014f\u0150\u0005K\u0000\u0000\u0150\u0155\u0005L\u0000"+ - "\u0000\u0151\u0152\u0005!\u0000\u0000\u0152\u0154\u0005L\u0000\u0000\u0153"+ - "\u0151\u0001\u0000\u0000\u0000\u0154\u0157\u0001\u0000\u0000\u0000\u0155"+ - "\u0153\u0001\u0000\u0000\u0000\u0155\u0156\u0001\u0000\u0000\u0000\u0156"+ - "-\u0001\u0000\u0000\u0000\u0157\u0155\u0001\u0000\u0000\u0000\u0158\u0159"+ - "\u0005A\u0000\u0000\u0159\u015a\u0003,\u0016\u0000\u015a\u015b\u0005B"+ - "\u0000\u0000\u015b/\u0001\u0000\u0000\u0000\u015c\u015d\u0005\u0013\u0000"+ - "\u0000\u015d\u0162\u0003$\u0012\u0000\u015e\u015f\u0005!\u0000\u0000\u015f"+ - "\u0161\u0003$\u0012\u0000\u0160\u015e\u0001\u0000\u0000\u0000\u0161\u0164"+ - "\u0001\u0000\u0000\u0000\u0162\u0160\u0001\u0000\u0000\u0000\u0162\u0163"+ - "\u0001\u0000\u0000\u0000\u0163\u0166\u0001\u0000\u0000\u0000\u0164\u0162"+ - "\u0001\u0000\u0000\u0000\u0165\u0167\u0003\u001e\u000f\u0000\u0166\u0165"+ - "\u0001\u0000\u0000\u0000\u0166\u0167\u0001\u0000\u0000\u0000\u0167\u016a"+ - "\u0001\u0000\u0000\u0000\u0168\u0169\u0005\u001c\u0000\u0000\u0169\u016b"+ - "\u0003\u001e\u000f\u0000\u016a\u0168\u0001\u0000\u0000\u0000\u016a\u016b"+ - "\u0001\u0000\u0000\u0000\u016b1\u0001\u0000\u0000\u0000\u016c\u016d\u0005"+ - "\u0004\u0000\u0000\u016d\u016e\u0003\u001e\u000f\u0000\u016e3\u0001\u0000"+ - "\u0000\u0000\u016f\u0171\u0005\u000f\u0000\u0000\u0170\u0172\u0003\u001e"+ - "\u000f\u0000\u0171\u0170\u0001\u0000\u0000\u0000\u0171\u0172\u0001\u0000"+ - "\u0000\u0000\u0172\u0175\u0001\u0000\u0000\u0000\u0173\u0174\u0005\u001c"+ - "\u0000\u0000\u0174\u0176\u0003\u001e\u000f\u0000\u0175\u0173\u0001\u0000"+ - "\u0000\u0000\u0175\u0176\u0001\u0000\u0000\u0000\u01765\u0001\u0000\u0000"+ - "\u0000\u0177\u017c\u0003D\"\u0000\u0178\u0179\u0005#\u0000\u0000\u0179"+ - "\u017b\u0003D\"\u0000\u017a\u0178\u0001\u0000\u0000\u0000\u017b\u017e"+ - "\u0001\u0000\u0000\u0000\u017c\u017a\u0001\u0000\u0000\u0000\u017c\u017d"+ - "\u0001\u0000\u0000\u0000\u017d7\u0001\u0000\u0000\u0000\u017e\u017c\u0001"+ - "\u0000\u0000\u0000\u017f\u0184\u0003>\u001f\u0000\u0180\u0181\u0005#\u0000"+ - "\u0000\u0181\u0183\u0003>\u001f\u0000\u0182\u0180\u0001\u0000\u0000\u0000"+ - "\u0183\u0186\u0001\u0000\u0000\u0000\u0184\u0182\u0001\u0000\u0000\u0000"+ - "\u0184\u0185\u0001\u0000\u0000\u0000\u01859\u0001\u0000\u0000\u0000\u0186"+ - "\u0184\u0001\u0000\u0000\u0000\u0187\u018c\u00038\u001c\u0000\u0188\u0189"+ - "\u0005!\u0000\u0000\u0189\u018b\u00038\u001c\u0000\u018a\u0188\u0001\u0000"+ - "\u0000\u0000\u018b\u018e\u0001\u0000\u0000\u0000\u018c\u018a\u0001\u0000"+ - "\u0000\u0000\u018c\u018d\u0001\u0000\u0000\u0000\u018d;\u0001\u0000\u0000"+ - "\u0000\u018e\u018c\u0001\u0000\u0000\u0000\u018f\u0190\u0007\u0003\u0000"+ - "\u0000\u0190=\u0001\u0000\u0000\u0000\u0191\u0194\u0005P\u0000\u0000\u0192"+ - "\u0194\u0003B!\u0000\u0193\u0191\u0001\u0000\u0000\u0000\u0193\u0192\u0001"+ - "\u0000\u0000\u0000\u0194?\u0001\u0000\u0000\u0000\u0195\u01c0\u0005,\u0000"+ - "\u0000\u0196\u0197\u0003d2\u0000\u0197\u0198\u0005C\u0000\u0000\u0198"+ - "\u01c0\u0001\u0000\u0000\u0000\u0199\u01c0\u0003b1\u0000\u019a\u01c0\u0003"+ - "d2\u0000\u019b\u01c0\u0003^/\u0000\u019c\u01c0\u0003B!\u0000\u019d\u01c0"+ - "\u0003f3\u0000\u019e\u019f\u0005A\u0000\u0000\u019f\u01a4\u0003`0\u0000"+ - "\u01a0\u01a1\u0005!\u0000\u0000\u01a1\u01a3\u0003`0\u0000\u01a2\u01a0"+ - "\u0001\u0000\u0000\u0000\u01a3\u01a6\u0001\u0000\u0000\u0000\u01a4\u01a2"+ - "\u0001\u0000\u0000\u0000\u01a4\u01a5\u0001\u0000\u0000\u0000\u01a5\u01a7"+ - "\u0001\u0000\u0000\u0000\u01a6\u01a4\u0001\u0000\u0000\u0000\u01a7\u01a8"+ - "\u0005B\u0000\u0000\u01a8\u01c0\u0001\u0000\u0000\u0000\u01a9\u01aa\u0005"+ - "A\u0000\u0000\u01aa\u01af\u0003^/\u0000\u01ab\u01ac\u0005!\u0000\u0000"+ - "\u01ac\u01ae\u0003^/\u0000\u01ad\u01ab\u0001\u0000\u0000\u0000\u01ae\u01b1"+ - "\u0001\u0000\u0000\u0000\u01af\u01ad\u0001\u0000\u0000\u0000\u01af\u01b0"+ - "\u0001\u0000\u0000\u0000\u01b0\u01b2\u0001\u0000\u0000\u0000\u01b1\u01af"+ - "\u0001\u0000\u0000\u0000\u01b2\u01b3\u0005B\u0000\u0000\u01b3\u01c0\u0001"+ - "\u0000\u0000\u0000\u01b4\u01b5\u0005A\u0000\u0000\u01b5\u01ba\u0003f3"+ - "\u0000\u01b6\u01b7\u0005!\u0000\u0000\u01b7\u01b9\u0003f3\u0000\u01b8"+ - "\u01b6\u0001\u0000\u0000\u0000\u01b9\u01bc\u0001\u0000\u0000\u0000\u01ba"+ - "\u01b8\u0001\u0000\u0000\u0000\u01ba\u01bb\u0001\u0000\u0000\u0000\u01bb"+ - "\u01bd\u0001\u0000\u0000\u0000\u01bc\u01ba\u0001\u0000\u0000\u0000\u01bd"+ - "\u01be\u0005B\u0000\u0000\u01be\u01c0\u0001\u0000\u0000\u0000\u01bf\u0195"+ - "\u0001\u0000\u0000\u0000\u01bf\u0196\u0001\u0000\u0000\u0000\u01bf\u0199"+ - "\u0001\u0000\u0000\u0000\u01bf\u019a\u0001\u0000\u0000\u0000\u01bf\u019b"+ - "\u0001\u0000\u0000\u0000\u01bf\u019c\u0001\u0000\u0000\u0000\u01bf\u019d"+ - "\u0001\u0000\u0000\u0000\u01bf\u019e\u0001\u0000\u0000\u0000\u01bf\u01a9"+ - "\u0001\u0000\u0000\u0000\u01bf\u01b4\u0001\u0000\u0000\u0000\u01c0A\u0001"+ - "\u0000\u0000\u0000\u01c1\u01c4\u0005/\u0000\u0000\u01c2\u01c4\u0005@\u0000"+ - "\u0000\u01c3\u01c1\u0001\u0000\u0000\u0000\u01c3\u01c2\u0001\u0000\u0000"+ - "\u0000\u01c4C\u0001\u0000\u0000\u0000\u01c5\u01c8\u0003<\u001e\u0000\u01c6"+ - "\u01c8\u0003B!\u0000\u01c7\u01c5\u0001\u0000\u0000\u0000\u01c7\u01c6\u0001"+ - "\u0000\u0000\u0000\u01c8E\u0001\u0000\u0000\u0000\u01c9\u01ca\u0005\t"+ - "\u0000\u0000\u01ca\u01cb\u0005\u001a\u0000\u0000\u01cbG\u0001\u0000\u0000"+ - "\u0000\u01cc\u01cd\u0005\u000e\u0000\u0000\u01cd\u01d2\u0003J%\u0000\u01ce"+ - "\u01cf\u0005!\u0000\u0000\u01cf\u01d1\u0003J%\u0000\u01d0\u01ce\u0001"+ - "\u0000\u0000\u0000\u01d1\u01d4\u0001\u0000\u0000\u0000\u01d2\u01d0\u0001"+ - "\u0000\u0000\u0000\u01d2\u01d3\u0001\u0000\u0000\u0000\u01d3I\u0001\u0000"+ - "\u0000\u0000\u01d4\u01d2\u0001\u0000\u0000\u0000\u01d5\u01d7\u0003\n\u0005"+ - "\u0000\u01d6\u01d8\u0007\u0004\u0000\u0000\u01d7\u01d6\u0001\u0000\u0000"+ - "\u0000\u01d7\u01d8\u0001\u0000\u0000\u0000\u01d8\u01db\u0001\u0000\u0000"+ - "\u0000\u01d9\u01da\u0005-\u0000\u0000\u01da\u01dc\u0007\u0005\u0000\u0000"+ - "\u01db\u01d9\u0001\u0000\u0000\u0000\u01db\u01dc\u0001\u0000\u0000\u0000"+ - "\u01dcK\u0001\u0000\u0000\u0000\u01dd\u01de\u0005\b\u0000\u0000\u01de"+ - "\u01df\u0003:\u001d\u0000\u01dfM\u0001\u0000\u0000\u0000\u01e0\u01e1\u0005"+ - "\u0002\u0000\u0000\u01e1\u01e2\u0003:\u001d\u0000\u01e2O\u0001\u0000\u0000"+ - "\u0000\u01e3\u01e4\u0005\u000b\u0000\u0000\u01e4\u01e9\u0003R)\u0000\u01e5"+ - "\u01e6\u0005!\u0000\u0000\u01e6\u01e8\u0003R)\u0000\u01e7\u01e5\u0001"+ - "\u0000\u0000\u0000\u01e8\u01eb\u0001\u0000\u0000\u0000\u01e9\u01e7\u0001"+ - "\u0000\u0000\u0000\u01e9\u01ea\u0001\u0000\u0000\u0000\u01eaQ\u0001\u0000"+ - "\u0000\u0000\u01eb\u01e9\u0001\u0000\u0000\u0000\u01ec\u01ed\u00038\u001c"+ - "\u0000\u01ed\u01ee\u0005T\u0000\u0000\u01ee\u01ef\u00038\u001c\u0000\u01ef"+ - "S\u0001\u0000\u0000\u0000\u01f0\u01f1\u0005\u0001\u0000\u0000\u01f1\u01f2"+ - "\u0003\u0014\n\u0000\u01f2\u01f4\u0003f3\u0000\u01f3\u01f5\u0003Z-\u0000"+ - "\u01f4\u01f3\u0001\u0000\u0000\u0000\u01f4\u01f5\u0001\u0000\u0000\u0000"+ - "\u01f5U\u0001\u0000\u0000\u0000\u01f6\u01f7\u0005\u0007\u0000\u0000\u01f7"+ - "\u01f8\u0003\u0014\n\u0000\u01f8\u01f9\u0003f3\u0000\u01f9W\u0001\u0000"+ - "\u0000\u0000\u01fa\u01fb\u0005\n\u0000\u0000\u01fb\u01fc\u00036\u001b"+ - "\u0000\u01fcY\u0001\u0000\u0000\u0000\u01fd\u0202\u0003\\.\u0000\u01fe"+ - "\u01ff\u0005!\u0000\u0000\u01ff\u0201\u0003\\.\u0000\u0200\u01fe\u0001"+ - "\u0000\u0000\u0000\u0201\u0204\u0001\u0000\u0000\u0000\u0202\u0200\u0001"+ - "\u0000\u0000\u0000\u0202\u0203\u0001\u0000\u0000\u0000\u0203[\u0001\u0000"+ - "\u0000\u0000\u0204\u0202\u0001\u0000\u0000\u0000\u0205\u0206\u0003<\u001e"+ - "\u0000\u0206\u0207\u0005\u001f\u0000\u0000\u0207\u0208\u0003@ \u0000\u0208"+ - "]\u0001\u0000\u0000\u0000\u0209\u020a\u0007\u0006\u0000\u0000\u020a_\u0001"+ - "\u0000\u0000\u0000\u020b\u020e\u0003b1\u0000\u020c\u020e\u0003d2\u0000"+ - "\u020d\u020b\u0001\u0000\u0000\u0000\u020d\u020c\u0001\u0000\u0000\u0000"+ - "\u020ea\u0001\u0000\u0000\u0000\u020f\u0211\u0007\u0000\u0000\u0000\u0210"+ - "\u020f\u0001\u0000\u0000\u0000\u0210\u0211\u0001\u0000\u0000\u0000\u0211"+ - "\u0212\u0001\u0000\u0000\u0000\u0212\u0213\u0005\u001b\u0000\u0000\u0213"+ - "c\u0001\u0000\u0000\u0000\u0214\u0216\u0007\u0000\u0000\u0000\u0215\u0214"+ - "\u0001\u0000\u0000\u0000\u0215\u0216\u0001\u0000\u0000\u0000\u0216\u0217"+ - "\u0001\u0000\u0000\u0000\u0217\u0218\u0005\u001a\u0000\u0000\u0218e\u0001"+ - "\u0000\u0000\u0000\u0219\u021a\u0005\u0019\u0000\u0000\u021ag\u0001\u0000"+ - "\u0000\u0000\u021b\u021c\u0007\u0007\u0000\u0000\u021ci\u0001\u0000\u0000"+ - "\u0000\u021d\u021e\u0005\u0005\u0000\u0000\u021e\u021f\u0003l6\u0000\u021f"+ - "k\u0001\u0000\u0000\u0000\u0220\u0221\u0005A\u0000\u0000\u0221\u0222\u0003"+ - "\u0002\u0001\u0000\u0222\u0223\u0005B\u0000\u0000\u0223m\u0001\u0000\u0000"+ - "\u0000\u0224\u0225\u0005\r\u0000\u0000\u0225\u0226\u0005d\u0000\u0000"+ - "\u0226o\u0001\u0000\u0000\u0000\u0227\u0228\u0005\u0003\u0000\u0000\u0228"+ - "\u022b\u0005Z\u0000\u0000\u0229\u022a\u0005X\u0000\u0000\u022a\u022c\u0003"+ - "8\u001c\u0000\u022b\u0229\u0001\u0000\u0000\u0000\u022b\u022c\u0001\u0000"+ - "\u0000\u0000\u022c\u0236\u0001\u0000\u0000\u0000\u022d\u022e\u0005Y\u0000"+ - "\u0000\u022e\u0233\u0003r9\u0000\u022f\u0230\u0005!\u0000\u0000\u0230"+ - "\u0232\u0003r9\u0000\u0231\u022f\u0001\u0000\u0000\u0000\u0232\u0235\u0001"+ - "\u0000\u0000\u0000\u0233\u0231\u0001\u0000\u0000\u0000\u0233\u0234\u0001"+ - "\u0000\u0000\u0000\u0234\u0237\u0001\u0000\u0000\u0000\u0235\u0233\u0001"+ - "\u0000\u0000\u0000\u0236\u022d\u0001\u0000\u0000\u0000\u0236\u0237\u0001"+ - "\u0000\u0000\u0000\u0237q\u0001\u0000\u0000\u0000\u0238\u0239\u00038\u001c"+ - "\u0000\u0239\u023a\u0005\u001f\u0000\u0000\u023a\u023c\u0001\u0000\u0000"+ - "\u0000\u023b\u0238\u0001\u0000\u0000\u0000\u023b\u023c\u0001\u0000\u0000"+ - "\u0000\u023c\u023d\u0001\u0000\u0000\u0000\u023d\u023e\u00038\u001c\u0000"+ - "\u023es\u0001\u0000\u0000\u0000\u023f\u0240\u0005\u0012\u0000\u0000\u0240"+ - "\u0241\u0003$\u0012\u0000\u0241\u0242\u0005X\u0000\u0000\u0242\u0243\u0003"+ - ":\u001d\u0000\u0243u\u0001\u0000\u0000\u0000\u0244\u0245\u0005\u0011\u0000"+ - "\u0000\u0245\u0248\u0003\u001e\u000f\u0000\u0246\u0247\u0005\u001c\u0000"+ - "\u0000\u0247\u0249\u0003\u001e\u000f\u0000\u0248\u0246\u0001\u0000\u0000"+ - "\u0000\u0248\u0249\u0001\u0000\u0000\u0000\u0249w\u0001\u0000\u0000\u0000"+ + "\u011e\u0005?\u0000\u0000\u011c\u011e\u0003D\"\u0000\u011d\u011b\u0001"+ + "\u0000\u0000\u0000\u011d\u011c\u0001\u0000\u0000\u0000\u011e\u0019\u0001"+ + "\u0000\u0000\u0000\u011f\u0120\u0003<\u001e\u0000\u0120\u001b\u0001\u0000"+ + "\u0000\u0000\u0121\u0122\u0005\f\u0000\u0000\u0122\u0123\u0003\u001e\u000f"+ + "\u0000\u0123\u001d\u0001\u0000\u0000\u0000\u0124\u0129\u0003 \u0010\u0000"+ + "\u0125\u0126\u0005!\u0000\u0000\u0126\u0128\u0003 \u0010\u0000\u0127\u0125"+ + "\u0001\u0000\u0000\u0000\u0128\u012b\u0001\u0000\u0000\u0000\u0129\u0127"+ + "\u0001\u0000\u0000\u0000\u0129\u012a\u0001\u0000\u0000\u0000\u012a\u001f"+ + "\u0001\u0000\u0000\u0000\u012b\u0129\u0001\u0000\u0000\u0000\u012c\u0132"+ + "\u0003\n\u0005\u0000\u012d\u012e\u00036\u001b\u0000\u012e\u012f\u0005"+ + "\u001f\u0000\u0000\u012f\u0130\u0003\n\u0005\u0000\u0130\u0132\u0001\u0000"+ + "\u0000\u0000\u0131\u012c\u0001\u0000\u0000\u0000\u0131\u012d\u0001\u0000"+ + "\u0000\u0000\u0132!\u0001\u0000\u0000\u0000\u0133\u0134\u0005\u0006\u0000"+ + "\u0000\u0134\u0139\u0003$\u0012\u0000\u0135\u0136\u0005!\u0000\u0000\u0136"+ + "\u0138\u0003$\u0012\u0000\u0137\u0135\u0001\u0000\u0000\u0000\u0138\u013b"+ + "\u0001\u0000\u0000\u0000\u0139\u0137\u0001\u0000\u0000\u0000\u0139\u013a"+ + "\u0001\u0000\u0000\u0000\u013a\u013d\u0001\u0000\u0000\u0000\u013b\u0139"+ + "\u0001\u0000\u0000\u0000\u013c\u013e\u0003*\u0015\u0000\u013d\u013c\u0001"+ + "\u0000\u0000\u0000\u013d\u013e\u0001\u0000\u0000\u0000\u013e#\u0001\u0000"+ + "\u0000\u0000\u013f\u0140\u0003&\u0013\u0000\u0140\u0141\u0005h\u0000\u0000"+ + "\u0141\u0142\u0003(\u0014\u0000\u0142\u0145\u0001\u0000\u0000\u0000\u0143"+ + "\u0145\u0003(\u0014\u0000\u0144\u013f\u0001\u0000\u0000\u0000\u0144\u0143"+ + "\u0001\u0000\u0000\u0000\u0145%\u0001\u0000\u0000\u0000\u0146\u0147\u0005"+ + "L\u0000\u0000\u0147\'\u0001\u0000\u0000\u0000\u0148\u0149\u0007\u0002"+ + "\u0000\u0000\u0149)\u0001\u0000\u0000\u0000\u014a\u014d\u0003,\u0016\u0000"+ + "\u014b\u014d\u0003.\u0017\u0000\u014c\u014a\u0001\u0000\u0000\u0000\u014c"+ + "\u014b\u0001\u0000\u0000\u0000\u014d+\u0001\u0000\u0000\u0000\u014e\u014f"+ + "\u0005K\u0000\u0000\u014f\u0154\u0005L\u0000\u0000\u0150\u0151\u0005!"+ + "\u0000\u0000\u0151\u0153\u0005L\u0000\u0000\u0152\u0150\u0001\u0000\u0000"+ + "\u0000\u0153\u0156\u0001\u0000\u0000\u0000\u0154\u0152\u0001\u0000\u0000"+ + "\u0000\u0154\u0155\u0001\u0000\u0000\u0000\u0155-\u0001\u0000\u0000\u0000"+ + "\u0156\u0154\u0001\u0000\u0000\u0000\u0157\u0158\u0005A\u0000\u0000\u0158"+ + "\u0159\u0003,\u0016\u0000\u0159\u015a\u0005B\u0000\u0000\u015a/\u0001"+ + "\u0000\u0000\u0000\u015b\u015c\u0005\u0013\u0000\u0000\u015c\u0161\u0003"+ + "$\u0012\u0000\u015d\u015e\u0005!\u0000\u0000\u015e\u0160\u0003$\u0012"+ + "\u0000\u015f\u015d\u0001\u0000\u0000\u0000\u0160\u0163\u0001\u0000\u0000"+ + "\u0000\u0161\u015f\u0001\u0000\u0000\u0000\u0161\u0162\u0001\u0000\u0000"+ + "\u0000\u0162\u0165\u0001\u0000\u0000\u0000\u0163\u0161\u0001\u0000\u0000"+ + "\u0000\u0164\u0166\u0003\u001e\u000f\u0000\u0165\u0164\u0001\u0000\u0000"+ + "\u0000\u0165\u0166\u0001\u0000\u0000\u0000\u0166\u0169\u0001\u0000\u0000"+ + "\u0000\u0167\u0168\u0005\u001c\u0000\u0000\u0168\u016a\u0003\u001e\u000f"+ + "\u0000\u0169\u0167\u0001\u0000\u0000\u0000\u0169\u016a\u0001\u0000\u0000"+ + "\u0000\u016a1\u0001\u0000\u0000\u0000\u016b\u016c\u0005\u0004\u0000\u0000"+ + "\u016c\u016d\u0003\u001e\u000f\u0000\u016d3\u0001\u0000\u0000\u0000\u016e"+ + "\u0170\u0005\u000f\u0000\u0000\u016f\u0171\u0003\u001e\u000f\u0000\u0170"+ + "\u016f\u0001\u0000\u0000\u0000\u0170\u0171\u0001\u0000\u0000\u0000\u0171"+ + "\u0174\u0001\u0000\u0000\u0000\u0172\u0173\u0005\u001c\u0000\u0000\u0173"+ + "\u0175\u0003\u001e\u000f\u0000\u0174\u0172\u0001\u0000\u0000\u0000\u0174"+ + "\u0175\u0001\u0000\u0000\u0000\u01755\u0001\u0000\u0000\u0000\u0176\u017b"+ + "\u0003D\"\u0000\u0177\u0178\u0005#\u0000\u0000\u0178\u017a\u0003D\"\u0000"+ + "\u0179\u0177\u0001\u0000\u0000\u0000\u017a\u017d\u0001\u0000\u0000\u0000"+ + "\u017b\u0179\u0001\u0000\u0000\u0000\u017b\u017c\u0001\u0000\u0000\u0000"+ + "\u017c7\u0001\u0000\u0000\u0000\u017d\u017b\u0001\u0000\u0000\u0000\u017e"+ + "\u0183\u0003>\u001f\u0000\u017f\u0180\u0005#\u0000\u0000\u0180\u0182\u0003"+ + ">\u001f\u0000\u0181\u017f\u0001\u0000\u0000\u0000\u0182\u0185\u0001\u0000"+ + "\u0000\u0000\u0183\u0181\u0001\u0000\u0000\u0000\u0183\u0184\u0001\u0000"+ + "\u0000\u0000\u01849\u0001\u0000\u0000\u0000\u0185\u0183\u0001\u0000\u0000"+ + "\u0000\u0186\u018b\u00038\u001c\u0000\u0187\u0188\u0005!\u0000\u0000\u0188"+ + "\u018a\u00038\u001c\u0000\u0189\u0187\u0001\u0000\u0000\u0000\u018a\u018d"+ + "\u0001\u0000\u0000\u0000\u018b\u0189\u0001\u0000\u0000\u0000\u018b\u018c"+ + "\u0001\u0000\u0000\u0000\u018c;\u0001\u0000\u0000\u0000\u018d\u018b\u0001"+ + "\u0000\u0000\u0000\u018e\u018f\u0007\u0003\u0000\u0000\u018f=\u0001\u0000"+ + "\u0000\u0000\u0190\u0193\u0005P\u0000\u0000\u0191\u0193\u0003B!\u0000"+ + "\u0192\u0190\u0001\u0000\u0000\u0000\u0192\u0191\u0001\u0000\u0000\u0000"+ + "\u0193?\u0001\u0000\u0000\u0000\u0194\u01bf\u0005,\u0000\u0000\u0195\u0196"+ + "\u0003d2\u0000\u0196\u0197\u0005C\u0000\u0000\u0197\u01bf\u0001\u0000"+ + "\u0000\u0000\u0198\u01bf\u0003b1\u0000\u0199\u01bf\u0003d2\u0000\u019a"+ + "\u01bf\u0003^/\u0000\u019b\u01bf\u0003B!\u0000\u019c\u01bf\u0003f3\u0000"+ + "\u019d\u019e\u0005A\u0000\u0000\u019e\u01a3\u0003`0\u0000\u019f\u01a0"+ + "\u0005!\u0000\u0000\u01a0\u01a2\u0003`0\u0000\u01a1\u019f\u0001\u0000"+ + "\u0000\u0000\u01a2\u01a5\u0001\u0000\u0000\u0000\u01a3\u01a1\u0001\u0000"+ + "\u0000\u0000\u01a3\u01a4\u0001\u0000\u0000\u0000\u01a4\u01a6\u0001\u0000"+ + "\u0000\u0000\u01a5\u01a3\u0001\u0000\u0000\u0000\u01a6\u01a7\u0005B\u0000"+ + "\u0000\u01a7\u01bf\u0001\u0000\u0000\u0000\u01a8\u01a9\u0005A\u0000\u0000"+ + "\u01a9\u01ae\u0003^/\u0000\u01aa\u01ab\u0005!\u0000\u0000\u01ab\u01ad"+ + "\u0003^/\u0000\u01ac\u01aa\u0001\u0000\u0000\u0000\u01ad\u01b0\u0001\u0000"+ + "\u0000\u0000\u01ae\u01ac\u0001\u0000\u0000\u0000\u01ae\u01af\u0001\u0000"+ + "\u0000\u0000\u01af\u01b1\u0001\u0000\u0000\u0000\u01b0\u01ae\u0001\u0000"+ + "\u0000\u0000\u01b1\u01b2\u0005B\u0000\u0000\u01b2\u01bf\u0001\u0000\u0000"+ + "\u0000\u01b3\u01b4\u0005A\u0000\u0000\u01b4\u01b9\u0003f3\u0000\u01b5"+ + "\u01b6\u0005!\u0000\u0000\u01b6\u01b8\u0003f3\u0000\u01b7\u01b5\u0001"+ + "\u0000\u0000\u0000\u01b8\u01bb\u0001\u0000\u0000\u0000\u01b9\u01b7\u0001"+ + "\u0000\u0000\u0000\u01b9\u01ba\u0001\u0000\u0000\u0000\u01ba\u01bc\u0001"+ + "\u0000\u0000\u0000\u01bb\u01b9\u0001\u0000\u0000\u0000\u01bc\u01bd\u0005"+ + "B\u0000\u0000\u01bd\u01bf\u0001\u0000\u0000\u0000\u01be\u0194\u0001\u0000"+ + "\u0000\u0000\u01be\u0195\u0001\u0000\u0000\u0000\u01be\u0198\u0001\u0000"+ + "\u0000\u0000\u01be\u0199\u0001\u0000\u0000\u0000\u01be\u019a\u0001\u0000"+ + "\u0000\u0000\u01be\u019b\u0001\u0000\u0000\u0000\u01be\u019c\u0001\u0000"+ + "\u0000\u0000\u01be\u019d\u0001\u0000\u0000\u0000\u01be\u01a8\u0001\u0000"+ + "\u0000\u0000\u01be\u01b3\u0001\u0000\u0000\u0000\u01bfA\u0001\u0000\u0000"+ + "\u0000\u01c0\u01c3\u0005/\u0000\u0000\u01c1\u01c3\u0005@\u0000\u0000\u01c2"+ + "\u01c0\u0001\u0000\u0000\u0000\u01c2\u01c1\u0001\u0000\u0000\u0000\u01c3"+ + "C\u0001\u0000\u0000\u0000\u01c4\u01c7\u0003<\u001e\u0000\u01c5\u01c7\u0003"+ + "B!\u0000\u01c6\u01c4\u0001\u0000\u0000\u0000\u01c6\u01c5\u0001\u0000\u0000"+ + "\u0000\u01c7E\u0001\u0000\u0000\u0000\u01c8\u01c9\u0005\t\u0000\u0000"+ + "\u01c9\u01ca\u0005\u001a\u0000\u0000\u01caG\u0001\u0000\u0000\u0000\u01cb"+ + "\u01cc\u0005\u000e\u0000\u0000\u01cc\u01d1\u0003J%\u0000\u01cd\u01ce\u0005"+ + "!\u0000\u0000\u01ce\u01d0\u0003J%\u0000\u01cf\u01cd\u0001\u0000\u0000"+ + "\u0000\u01d0\u01d3\u0001\u0000\u0000\u0000\u01d1\u01cf\u0001\u0000\u0000"+ + "\u0000\u01d1\u01d2\u0001\u0000\u0000\u0000\u01d2I\u0001\u0000\u0000\u0000"+ + "\u01d3\u01d1\u0001\u0000\u0000\u0000\u01d4\u01d6\u0003\n\u0005\u0000\u01d5"+ + "\u01d7\u0007\u0004\u0000\u0000\u01d6\u01d5\u0001\u0000\u0000\u0000\u01d6"+ + "\u01d7\u0001\u0000\u0000\u0000\u01d7\u01da\u0001\u0000\u0000\u0000\u01d8"+ + "\u01d9\u0005-\u0000\u0000\u01d9\u01db\u0007\u0005\u0000\u0000\u01da\u01d8"+ + "\u0001\u0000\u0000\u0000\u01da\u01db\u0001\u0000\u0000\u0000\u01dbK\u0001"+ + "\u0000\u0000\u0000\u01dc\u01dd\u0005\b\u0000\u0000\u01dd\u01de\u0003:"+ + "\u001d\u0000\u01deM\u0001\u0000\u0000\u0000\u01df\u01e0\u0005\u0002\u0000"+ + "\u0000\u01e0\u01e1\u0003:\u001d\u0000\u01e1O\u0001\u0000\u0000\u0000\u01e2"+ + "\u01e3\u0005\u000b\u0000\u0000\u01e3\u01e8\u0003R)\u0000\u01e4\u01e5\u0005"+ + "!\u0000\u0000\u01e5\u01e7\u0003R)\u0000\u01e6\u01e4\u0001\u0000\u0000"+ + "\u0000\u01e7\u01ea\u0001\u0000\u0000\u0000\u01e8\u01e6\u0001\u0000\u0000"+ + "\u0000\u01e8\u01e9\u0001\u0000\u0000\u0000\u01e9Q\u0001\u0000\u0000\u0000"+ + "\u01ea\u01e8\u0001\u0000\u0000\u0000\u01eb\u01ec\u00038\u001c\u0000\u01ec"+ + "\u01ed\u0005T\u0000\u0000\u01ed\u01ee\u00038\u001c\u0000\u01eeS\u0001"+ + "\u0000\u0000\u0000\u01ef\u01f0\u0005\u0001\u0000\u0000\u01f0\u01f1\u0003"+ + "\u0014\n\u0000\u01f1\u01f3\u0003f3\u0000\u01f2\u01f4\u0003Z-\u0000\u01f3"+ + "\u01f2\u0001\u0000\u0000\u0000\u01f3\u01f4\u0001\u0000\u0000\u0000\u01f4"+ + "U\u0001\u0000\u0000\u0000\u01f5\u01f6\u0005\u0007\u0000\u0000\u01f6\u01f7"+ + "\u0003\u0014\n\u0000\u01f7\u01f8\u0003f3\u0000\u01f8W\u0001\u0000\u0000"+ + "\u0000\u01f9\u01fa\u0005\n\u0000\u0000\u01fa\u01fb\u00036\u001b\u0000"+ + "\u01fbY\u0001\u0000\u0000\u0000\u01fc\u0201\u0003\\.\u0000\u01fd\u01fe"+ + "\u0005!\u0000\u0000\u01fe\u0200\u0003\\.\u0000\u01ff\u01fd\u0001\u0000"+ + "\u0000\u0000\u0200\u0203\u0001\u0000\u0000\u0000\u0201\u01ff\u0001\u0000"+ + "\u0000\u0000\u0201\u0202\u0001\u0000\u0000\u0000\u0202[\u0001\u0000\u0000"+ + "\u0000\u0203\u0201\u0001\u0000\u0000\u0000\u0204\u0205\u0003<\u001e\u0000"+ + "\u0205\u0206\u0005\u001f\u0000\u0000\u0206\u0207\u0003@ \u0000\u0207]"+ + "\u0001\u0000\u0000\u0000\u0208\u0209\u0007\u0006\u0000\u0000\u0209_\u0001"+ + "\u0000\u0000\u0000\u020a\u020d\u0003b1\u0000\u020b\u020d\u0003d2\u0000"+ + "\u020c\u020a\u0001\u0000\u0000\u0000\u020c\u020b\u0001\u0000\u0000\u0000"+ + "\u020da\u0001\u0000\u0000\u0000\u020e\u0210\u0007\u0000\u0000\u0000\u020f"+ + "\u020e\u0001\u0000\u0000\u0000\u020f\u0210\u0001\u0000\u0000\u0000\u0210"+ + "\u0211\u0001\u0000\u0000\u0000\u0211\u0212\u0005\u001b\u0000\u0000\u0212"+ + "c\u0001\u0000\u0000\u0000\u0213\u0215\u0007\u0000\u0000\u0000\u0214\u0213"+ + "\u0001\u0000\u0000\u0000\u0214\u0215\u0001\u0000\u0000\u0000\u0215\u0216"+ + "\u0001\u0000\u0000\u0000\u0216\u0217\u0005\u001a\u0000\u0000\u0217e\u0001"+ + "\u0000\u0000\u0000\u0218\u0219\u0005\u0019\u0000\u0000\u0219g\u0001\u0000"+ + "\u0000\u0000\u021a\u021b\u0007\u0007\u0000\u0000\u021bi\u0001\u0000\u0000"+ + "\u0000\u021c\u021d\u0005\u0005\u0000\u0000\u021d\u021e\u0003l6\u0000\u021e"+ + "k\u0001\u0000\u0000\u0000\u021f\u0220\u0005A\u0000\u0000\u0220\u0221\u0003"+ + "\u0002\u0001\u0000\u0221\u0222\u0005B\u0000\u0000\u0222m\u0001\u0000\u0000"+ + "\u0000\u0223\u0224\u0005\r\u0000\u0000\u0224\u0225\u0005d\u0000\u0000"+ + "\u0225o\u0001\u0000\u0000\u0000\u0226\u0227\u0005\u0003\u0000\u0000\u0227"+ + "\u022a\u0005Z\u0000\u0000\u0228\u0229\u0005X\u0000\u0000\u0229\u022b\u0003"+ + "8\u001c\u0000\u022a\u0228\u0001\u0000\u0000\u0000\u022a\u022b\u0001\u0000"+ + "\u0000\u0000\u022b\u0235\u0001\u0000\u0000\u0000\u022c\u022d\u0005Y\u0000"+ + "\u0000\u022d\u0232\u0003r9\u0000\u022e\u022f\u0005!\u0000\u0000\u022f"+ + "\u0231\u0003r9\u0000\u0230\u022e\u0001\u0000\u0000\u0000\u0231\u0234\u0001"+ + "\u0000\u0000\u0000\u0232\u0230\u0001\u0000\u0000\u0000\u0232\u0233\u0001"+ + "\u0000\u0000\u0000\u0233\u0236\u0001\u0000\u0000\u0000\u0234\u0232\u0001"+ + "\u0000\u0000\u0000\u0235\u022c\u0001\u0000\u0000\u0000\u0235\u0236\u0001"+ + "\u0000\u0000\u0000\u0236q\u0001\u0000\u0000\u0000\u0237\u0238\u00038\u001c"+ + "\u0000\u0238\u0239\u0005\u001f\u0000\u0000\u0239\u023b\u0001\u0000\u0000"+ + "\u0000\u023a\u0237\u0001\u0000\u0000\u0000\u023a\u023b\u0001\u0000\u0000"+ + "\u0000\u023b\u023c\u0001\u0000\u0000\u0000\u023c\u023d\u00038\u001c\u0000"+ + "\u023ds\u0001\u0000\u0000\u0000\u023e\u023f\u0005\u0012\u0000\u0000\u023f"+ + "\u0240\u0003$\u0012\u0000\u0240\u0241\u0005X\u0000\u0000\u0241\u0242\u0003"+ + ":\u001d\u0000\u0242u\u0001\u0000\u0000\u0000\u0243\u0244\u0005\u0011\u0000"+ + "\u0000\u0244\u0247\u0003\u001e\u000f\u0000\u0245\u0246\u0005\u001c\u0000"+ + "\u0000\u0246\u0248\u0003\u001e\u000f\u0000\u0247\u0245\u0001\u0000\u0000"+ + "\u0000\u0247\u0248\u0001\u0000\u0000\u0000\u0248w\u0001\u0000\u0000\u0000"+ "9\u0083\u008c\u009e\u00aa\u00b3\u00bb\u00c1\u00c9\u00cb\u00d0\u00d7\u00dc"+ - "\u00e7\u00ed\u00f5\u00f7\u0102\u0109\u0114\u0117\u011e\u012a\u0132\u013a"+ - "\u013e\u0145\u014d\u0155\u0162\u0166\u016a\u0171\u0175\u017c\u0184\u018c"+ - "\u0193\u01a4\u01af\u01ba\u01bf\u01c3\u01c7\u01d2\u01d7\u01db\u01e9\u01f4"+ - "\u0202\u020d\u0210\u0215\u022b\u0233\u0236\u023b\u0248"; + "\u00e7\u00ed\u00f5\u00f7\u0102\u0109\u0114\u0117\u011d\u0129\u0131\u0139"+ + "\u013d\u0144\u014c\u0154\u0161\u0165\u0169\u0170\u0174\u017b\u0183\u018b"+ + "\u0192\u01a3\u01ae\u01b9\u01be\u01c2\u01c6\u01d1\u01d6\u01da\u01e8\u01f3"+ + "\u0201\u020c\u020f\u0214\u022a\u0232\u0235\u023a\u0247"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java index 39f1758b78733..bcbd28aced939 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java @@ -612,8 +612,8 @@ public Expression visitFunctionExpression(EsqlBaseParser.FunctionExpressionConte @Override public String visitFunctionName(EsqlBaseParser.FunctionNameContext ctx) { - if (ctx.DEV_MATCH() != null) { - return ctx.DEV_MATCH().getText(); + if (ctx.MATCH() != null) { + return ctx.MATCH().getText(); } return visitIdentifierOrParameter(ctx.identifierOrParameter()); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java index 01c020b16ecad..ecf012718eaf8 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java @@ -1111,8 +1111,6 @@ public void testMatchFilter() throws Exception { } public void testMatchFunctionNotAllowedAfterCommands() throws Exception { - assumeTrue("skipping because MATCH is not enabled", EsqlCapabilities.Cap.MATCH_FUNCTION.isEnabled()); - assertEquals( "1:24: [MATCH] function cannot be used after LIMIT", error("from test | limit 10 | where match(first_name, \"Anna\")") @@ -1120,8 +1118,6 @@ public void testMatchFunctionNotAllowedAfterCommands() throws Exception { } public void testQueryStringFunctionsNotAllowedAfterCommands() throws Exception { - assumeTrue("skipping because QSTR is not enabled", EsqlCapabilities.Cap.QSTR_FUNCTION.isEnabled()); - // Source commands assertEquals("1:13: [QSTR] function cannot be used after SHOW", error("show info | where qstr(\"8.16.0\")")); assertEquals("1:17: [QSTR] function cannot be used after ROW", error("row a= \"Anna\" | where qstr(\"Anna\")")); @@ -1180,15 +1176,11 @@ public void testQueryStringFunctionsNotAllowedAfterCommands() throws Exception { } public void testQueryStringFunctionOnlyAllowedInWhere() throws Exception { - assumeTrue("skipping because QSTR is not enabled", EsqlCapabilities.Cap.QSTR_FUNCTION.isEnabled()); - assertEquals("1:9: [QSTR] function is only supported in WHERE commands", error("row a = qstr(\"Anna\")")); checkFullTextFunctionsOnlyAllowedInWhere("QSTR", "qstr(\"Anna\")"); } public void testMatchFunctionOnlyAllowedInWhere() throws Exception { - assumeTrue("skipping because MATCH is not enabled", EsqlCapabilities.Cap.MATCH_FUNCTION.isEnabled()); - checkFullTextFunctionsOnlyAllowedInWhere("MATCH", "match(first_name, \"Anna\")"); } @@ -1208,8 +1200,6 @@ private void checkFullTextFunctionsOnlyAllowedInWhere(String functionName, Strin } public void testQueryStringFunctionArgNotNullOrConstant() throws Exception { - assumeTrue("skipping because QSTR is not enabled", EsqlCapabilities.Cap.QSTR_FUNCTION.isEnabled()); - assertEquals( "1:19: argument of [qstr(first_name)] must be a constant, received [first_name]", error("from test | where qstr(first_name)") @@ -1219,14 +1209,10 @@ public void testQueryStringFunctionArgNotNullOrConstant() throws Exception { } public void testQueryStringWithDisjunctions() { - assumeTrue("skipping because QSTR is not enabled", EsqlCapabilities.Cap.QSTR_FUNCTION.isEnabled()); - checkWithDisjunctions("QSTR", "qstr(\"first_name: Anna\")"); } public void testMatchWithDisjunctions() { - assumeTrue("skipping because MATCH is not enabled", EsqlCapabilities.Cap.MATCH_FUNCTION.isEnabled()); - checkWithDisjunctions("MATCH", "match(first_name, \"Anna\")"); } @@ -1267,14 +1253,10 @@ private void checkWithDisjunctions(String functionName, String functionInvocatio } public void testQueryStringFunctionWithNonBooleanFunctions() { - assumeTrue("skipping because QSTR is not enabled", EsqlCapabilities.Cap.QSTR_FUNCTION.isEnabled()); - checkFullTextFunctionsWithNonBooleanFunctions("QSTR", "qstr(\"first_name: Anna\")"); } public void testMatchFunctionWithNonBooleanFunctions() { - assumeTrue("skipping because MATCH is not enabled", EsqlCapabilities.Cap.MATCH_FUNCTION.isEnabled()); - checkFullTextFunctionsWithNonBooleanFunctions("MATCH", "match(first_name, \"Anna\")"); } @@ -1298,8 +1280,6 @@ private void checkFullTextFunctionsWithNonBooleanFunctions(String functionName, } public void testMatchFunctionArgNotConstant() throws Exception { - assumeTrue("skipping because MATCH is not enabled", EsqlCapabilities.Cap.MATCH_FUNCTION.isEnabled()); - assertEquals( "1:19: second argument of [match(first_name, first_name)] must be a constant, received [first_name]", error("from test | where match(first_name, first_name)") @@ -1313,8 +1293,6 @@ public void testMatchFunctionArgNotConstant() throws Exception { // These should pass eventually once we lift some restrictions on match function public void testMatchFunctionCurrentlyUnsupportedBehaviour() throws Exception { - assumeTrue("skipping because MATCH is not enabled", EsqlCapabilities.Cap.MATCH_FUNCTION.isEnabled()); - assertEquals( "1:68: Unknown column [first_name]", error("from test | stats max_salary = max(salary) by emp_no | where match(first_name, \"Anna\")") @@ -1322,8 +1300,6 @@ public void testMatchFunctionCurrentlyUnsupportedBehaviour() throws Exception { } public void testMatchFunctionNullArgs() throws Exception { - assumeTrue("skipping because MATCH is not enabled", EsqlCapabilities.Cap.MATCH_FUNCTION.isEnabled()); - assertEquals( "1:19: first argument of [match(null, \"query\")] cannot be null, received [null]", error("from test | where match(null, \"query\")") @@ -1335,8 +1311,6 @@ public void testMatchFunctionNullArgs() throws Exception { } public void testMatchFunctionTargetsExistingField() throws Exception { - assumeTrue("skipping because MATCH is not enabled", EsqlCapabilities.Cap.MATCH_FUNCTION.isEnabled()); - assertEquals("1:39: Unknown column [first_name]", error("from test | keep emp_no | where match(first_name, \"Anna\")")); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchTests.java index f04e9bd495a49..d37bc89635c1d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchTests.java @@ -19,7 +19,6 @@ import org.elasticsearch.xpack.esql.expression.function.FunctionName; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.hamcrest.Matcher; -import org.junit.BeforeClass; import java.util.Arrays; import java.util.LinkedList; @@ -27,17 +26,11 @@ import java.util.Set; import java.util.function.Supplier; -import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.MATCH_FUNCTION; import static org.hamcrest.Matchers.equalTo; @FunctionName("match") public class MatchTests extends AbstractFunctionTestCase { - @BeforeClass - public static void checkFunctionEnabled() { - assumeTrue("MATCH function should be enabled ", MATCH_FUNCTION.isEnabled()); - } - public MatchTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/QueryStringTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/QueryStringTests.java index 8b0e4f10b8d54..2dfdb05ec8ecc 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/QueryStringTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/QueryStringTests.java @@ -18,24 +18,17 @@ import org.elasticsearch.xpack.esql.expression.function.FunctionName; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.hamcrest.Matcher; -import org.junit.BeforeClass; import java.util.Arrays; import java.util.LinkedList; import java.util.List; import java.util.function.Supplier; -import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.QSTR_FUNCTION; import static org.hamcrest.Matchers.equalTo; @FunctionName("qstr") public class QueryStringTests extends AbstractFunctionTestCase { - @BeforeClass - public static void checkFunctionEnabled() { - assumeTrue("QSTR capability should be enabled ", QSTR_FUNCTION.isEnabled()); - } - public QueryStringTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java index 3dd0828b82eed..8501dd6e478df 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java @@ -25,7 +25,6 @@ import org.elasticsearch.xpack.core.enrich.EnrichPolicy; import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.EsqlTestUtils.TestSearchStats; -import org.elasticsearch.xpack.esql.action.EsqlCapabilities; import org.elasticsearch.xpack.esql.analysis.Analyzer; import org.elasticsearch.xpack.esql.analysis.AnalyzerContext; import org.elasticsearch.xpack.esql.analysis.EnrichResolution; @@ -384,7 +383,6 @@ public void testMultiCountAllWithFilter() { * \_EsQueryExec[test], indexMode[standard], query[{"query_string":{"query":"last_name: Smith","fields":[]}}] */ public void testQueryStringFunction() { - assumeTrue("skipping because QSTR_FUNCTION is not enabled", EsqlCapabilities.Cap.QSTR_FUNCTION.isEnabled()); var plan = plannerOptimizer.plan(""" from test | where qstr("last_name: Smith") @@ -413,7 +411,6 @@ public void testQueryStringFunction() { * "boost":1.0}}][_doc{f}#1423], limit[1000], sort[] estimatedRowSize[324] */ public void testQueryStringFunctionConjunctionWhereOperands() { - assumeTrue("skipping because QSTR_FUNCTION is not enabled", EsqlCapabilities.Cap.QSTR_FUNCTION.isEnabled()); String queryText = """ from test | where qstr("last_name: Smith") and emp_no > 10010 @@ -448,7 +445,6 @@ public void testQueryStringFunctionConjunctionWhereOperands() { * "source":"cidr_match(ip, \"127.0.0.1/32\")@2:38"}}],"boost":1.0}}][_doc{f}#21], limit[1000], sort[] estimatedRowSize[354] */ public void testQueryStringFunctionWithFunctionsPushedToLucene() { - assumeTrue("skipping because QSTR_FUNCTION is not enabled", EsqlCapabilities.Cap.QSTR_FUNCTION.isEnabled()); String queryText = """ from test | where qstr("last_name: Smith") and cidr_match(ip, "127.0.0.1/32") @@ -484,7 +480,6 @@ public void testQueryStringFunctionWithFunctionsPushedToLucene() { * "boost":1.0}}][_doc{f}#1167], limit[1000], sort[] estimatedRowSize[324] */ public void testQueryStringFunctionMultipleWhereClauses() { - assumeTrue("skipping because QSTR_FUNCTION is not enabled", EsqlCapabilities.Cap.QSTR_FUNCTION.isEnabled()); String queryText = """ from test | where qstr("last_name: Smith") @@ -519,7 +514,6 @@ public void testQueryStringFunctionMultipleWhereClauses() { * {"query_string":{"query":"emp_no: [10010 TO *]","fields":[]}}],"boost":1.0}}] */ public void testQueryStringFunctionMultipleQstrClauses() { - assumeTrue("skipping because QSTR_FUNCTION is not enabled", EsqlCapabilities.Cap.QSTR_FUNCTION.isEnabled()); String queryText = """ from test | where qstr("last_name: Smith") and qstr("emp_no: [10010 TO *]") @@ -550,7 +544,6 @@ public void testQueryStringFunctionMultipleQstrClauses() { * \_EsQueryExec[test], indexMode[standard], query[{"match":{"last_name":{"query":"Smith"}}}] */ public void testMatchFunction() { - assumeTrue("skipping because MATCH function is not enabled", EsqlCapabilities.Cap.MATCH_FUNCTION.isEnabled()); var plan = plannerOptimizer.plan(""" from test | where match(last_name, "Smith") @@ -579,7 +572,6 @@ public void testMatchFunction() { * "source":"emp_no > 10010@2:39"}}],"boost":1.0}}][_doc{f}#14], limit[1000], sort[] estimatedRowSize[324] */ public void testMatchFunctionConjunctionWhereOperands() { - assumeTrue("skipping because MATCH function is not enabled", EsqlCapabilities.Cap.MATCH_FUNCTION.isEnabled()); String queryText = """ from test | where match(last_name, "Smith") and emp_no > 10010 @@ -614,7 +606,6 @@ public void testMatchFunctionConjunctionWhereOperands() { * "source":"cidr_match(ip, \"127.0.0.1/32\")@2:33"}}],"boost":1.0}}][_doc{f}#22], limit[1000], sort[] estimatedRowSize[354] */ public void testMatchFunctionWithFunctionsPushedToLucene() { - assumeTrue("skipping because MATCH function is not enabled", EsqlCapabilities.Cap.MATCH_FUNCTION.isEnabled()); String queryText = """ from test | where match(text, "beta") and cidr_match(ip, "127.0.0.1/32") @@ -649,7 +640,6 @@ public void testMatchFunctionWithFunctionsPushedToLucene() { * "source":"emp_no > 10010@3:9"}}],"boost":1.0}}][_doc{f}#14], limit[1000], sort[] estimatedRowSize[324] */ public void testMatchFunctionMultipleWhereClauses() { - assumeTrue("skipping because MATCH function is not enabled", EsqlCapabilities.Cap.MATCH_FUNCTION.isEnabled()); String queryText = """ from test | where match(last_name, "Smith") @@ -683,7 +673,6 @@ public void testMatchFunctionMultipleWhereClauses() { * {"match":{"first_name":{"query":"John"}}}],"boost":1.0}}][_doc{f}#14], limit[1000], sort[] estimatedRowSize[324] */ public void testMatchFunctionMultipleQstrClauses() { - assumeTrue("skipping because MATCH function is not enabled", EsqlCapabilities.Cap.MATCH_FUNCTION.isEnabled()); String queryText = """ from test | where match(last_name, "Smith") and match(first_name, "John") diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index e8980c99a61f9..c05b5dd165485 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -18,7 +18,6 @@ import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.TestBlockFactory; import org.elasticsearch.xpack.esql.VerificationException; -import org.elasticsearch.xpack.esql.action.EsqlCapabilities; import org.elasticsearch.xpack.esql.analysis.Analyzer; import org.elasticsearch.xpack.esql.analysis.AnalyzerContext; import org.elasticsearch.xpack.esql.analysis.AnalyzerTestUtils; @@ -5568,8 +5567,6 @@ public void testToDatePeriodToTimeDurationWithField() { // These should pass eventually once we lift some restrictions on match function public void testMatchWithNonIndexedColumnCurrentlyUnsupported() { - assumeTrue("skipping because MATCH function is not enabled", EsqlCapabilities.Cap.MATCH_FUNCTION.isEnabled()); - final String header = "Found 1 problem\nline "; VerificationException e = expectThrows(VerificationException.class, () -> plan(""" from test | eval initial = substring(first_name, 1) | where match(initial, "A")""")); @@ -5589,8 +5586,6 @@ public void testMatchWithNonIndexedColumnCurrentlyUnsupported() { } public void testMatchFunctionIsNotNullable() { - assumeTrue("skipping because MATCH function is not enabled", EsqlCapabilities.Cap.MATCH_FUNCTION.isEnabled()); - String queryText = """ row n = null | eval text = n + 5 | where match(text::keyword, "Anna") """; From d102659dce45a56020423f2802478ebd0c3ef341 Mon Sep 17 00:00:00 2001 From: Costin Leau Date: Tue, 15 Oct 2024 00:35:14 -0700 Subject: [PATCH 091/449] ESQL: Introduce per agg filter (#113735) Add support for aggregation scoped filters that work dynamically on the data in each group. | STATS success = COUNT(*) WHERE 200 <= code AND code < 300, redirect = COUNT(*) WHERE 300 <= code AND code < 400, client_err = COUNT(*) WHERE 400 <= code AND code < 500, server_err = COUNT(*) WHERE 500 <= code AND code < 600, total_count = COUNT(*) Implementation wise, the base AggregateFunction has been extended to allow a filter to be passed on. This is required to incorporate the filter as part of the aggregate equality/identify which would fail with the filter as an external component. As part of the process, the serialization for the existing aggregations had to be fixed so AggregateFunction implementations so that it delegates to their parent first. --- docs/changelog/113735.yaml | 28 + .../org/elasticsearch/TransportVersions.java | 1 + .../xpack/esql/core/util/CollectionUtils.java | 15 + .../src/main/resources/stats.csv-spec | 183 ++ .../esql/src/main/antlr/EsqlBaseLexer.g4 | 1 + .../esql/src/main/antlr/EsqlBaseParser.g4 | 20 +- .../xpack/esql/action/EsqlCapabilities.java | 7 +- .../xpack/esql/analysis/Analyzer.java | 1 + .../xpack/esql/analysis/Verifier.java | 26 +- .../function/EsqlFunctionRegistry.java | 74 +- .../function/aggregate/AggregateFunction.java | 66 +- .../expression/function/aggregate/Avg.java | 21 +- .../expression/function/aggregate/Count.java | 19 +- .../function/aggregate/CountDistinct.java | 34 +- .../function/aggregate/EnclosedAgg.java | 13 - .../aggregate/FilteredExpression.java | 95 + .../function/aggregate/FromPartial.java | 32 +- .../expression/function/aggregate/Max.java | 17 +- .../expression/function/aggregate/Median.java | 16 +- .../aggregate/MedianAbsoluteDeviation.java | 18 +- .../expression/function/aggregate/Min.java | 17 +- .../function/aggregate/NumericAggregate.java | 4 + .../function/aggregate/Percentile.java | 35 +- .../expression/function/aggregate/Rate.java | 52 +- .../aggregate/SpatialAggregateFunction.java | 6 +- .../function/aggregate/SpatialCentroid.java | 14 +- .../expression/function/aggregate/Sum.java | 16 +- .../function/aggregate/ToPartial.java | 34 +- .../expression/function/aggregate/Top.java | 41 +- .../expression/function/aggregate/Values.java | 17 +- .../function/aggregate/WeightedAvg.java | 36 +- .../esql/optimizer/LogicalPlanOptimizer.java | 4 + .../optimizer/rules/logical/FoldNull.java | 11 + .../ReplaceStatsAggExpressionWithEval.java | 14 +- .../logical/SubstituteFilteredExpression.java | 27 + .../xpack/esql/parser/EsqlBaseLexer.interp | 3 +- .../xpack/esql/parser/EsqlBaseLexer.java | 1991 +++++++++-------- .../xpack/esql/parser/EsqlBaseParser.interp | 4 +- .../xpack/esql/parser/EsqlBaseParser.java | 1950 ++++++++-------- .../parser/EsqlBaseParserBaseListener.java | 24 + .../parser/EsqlBaseParserBaseVisitor.java | 14 + .../esql/parser/EsqlBaseParserListener.java | 20 + .../esql/parser/EsqlBaseParserVisitor.java | 12 + .../xpack/esql/parser/ExpressionBuilder.java | 37 +- .../xpack/esql/parser/LogicalPlanBuilder.java | 13 +- .../xpack/esql/plan/logical/Aggregate.java | 1 + .../AbstractPhysicalOperationProviders.java | 42 +- .../xpack/esql/planner/AggregateMapper.java | 16 +- .../elasticsearch/xpack/esql/CsvTests.java | 4 - .../xpack/esql/analysis/AnalyzerTests.java | 4 +- .../xpack/esql/analysis/VerifierTests.java | 38 + .../aggregate/RateSerializationTests.java | 5 + .../aggregate/TopSerializationTests.java | 5 + .../optimizer/LogicalPlanOptimizerTests.java | 18 + .../xpack/esql/parser/ExpressionTests.java | 4 +- .../esql/parser/StatementParserTests.java | 59 + .../esql/tree/EsqlNodeSubclassTests.java | 15 + 57 files changed, 3181 insertions(+), 2113 deletions(-) create mode 100644 docs/changelog/113735.yaml delete mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/EnclosedAgg.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/FilteredExpression.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/SubstituteFilteredExpression.java diff --git a/docs/changelog/113735.yaml b/docs/changelog/113735.yaml new file mode 100644 index 0000000000000..4f6579c7cb9e0 --- /dev/null +++ b/docs/changelog/113735.yaml @@ -0,0 +1,28 @@ +pr: 113735 +summary: "ESQL: Introduce per agg filter" +area: ES|QL +type: feature +issues: [] +highlight: + title: "ESQL: Introduce per agg filter" + body: |- + Add support for aggregation scoped filters that work dynamically on the + data in each group. + + [source,esql] + ---- + | STATS success = COUNT(*) WHERE 200 <= code AND code < 300, + redirect = COUNT(*) WHERE 300 <= code AND code < 400, + client_err = COUNT(*) WHERE 400 <= code AND code < 500, + server_err = COUNT(*) WHERE 500 <= code AND code < 600, + total_count = COUNT(*) + ---- + + Implementation wise, the base AggregateFunction has been extended to + allow a filter to be passed on. This is required to incorporate the + filter as part of the aggregate equality/identity which would fail with + the filter as an external component. + As part of the process, the serialization for the existing aggregations + had to be fixed so AggregateFunction implementations so that it + delegates to their parent first. + notable: true diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index ab4321edd3f71..3cb4695e867df 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -243,6 +243,7 @@ static TransportVersion def(int id) { public static final TransportVersion CHUNK_SENTENCE_OVERLAP_SETTING_ADDED = def(8_767_00_0); public static final TransportVersion OPT_IN_ESQL_CCS_EXECUTION_INFO = def(8_768_00_0); public static final TransportVersion QUERY_RULE_TEST_API = def(8_769_00_0); + public static final TransportVersion ESQL_PER_AGGREGATE_FILTER = def(8_770_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/CollectionUtils.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/CollectionUtils.java index 48b5fd1605edf..8bfcf4ca5c405 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/CollectionUtils.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/CollectionUtils.java @@ -79,4 +79,19 @@ public static int mapSize(int size) { } return (int) (size / 0.75f + 1f); } + + @SafeVarargs + @SuppressWarnings("varargs") + public static List nullSafeList(T... entries) { + if (entries == null || entries.length == 0) { + return emptyList(); + } + List list = new ArrayList<>(entries.length); + for (T entry : entries) { + if (entry != null) { + list.add(entry); + } + } + return list; + } } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec index 8a2e9b402fbca..496a747fd9c2b 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec @@ -2290,3 +2290,186 @@ from employees m:integer |a:double |x:integer 74999 |48249.0 |0 ; + + +statsWithFiltering +required_capability: per_agg_filtering +from employees +| stats max = max(salary), max_f = max(salary) where salary < 50000, max_a = max(salary) where salary > 100, + min = min(salary), min_f = min(salary) where salary > 50000, min_a = min(salary) where salary > 100 +; + +max:integer |max_f:integer |max_a:integer | min:integer | min_f:integer | min_a:integer +74999 |49818 |74999 | 25324 | 50064 | 25324 +; + +statsWithEverythingFiltered +required_capability: per_agg_filtering +from employees +| stats max = max(salary), max_a = max(salary) where salary < 100, + min = min(salary), min_a = min(salary) where salary > 99999 +; + +max:integer |max_a:integer|min:integer | min_a:integer +74999 |null |25324 | null +; + +statsWithNullFilter +required_capability: per_agg_filtering +from employees +| stats max = max(salary), max_a = max(salary) where null, + min = min(salary), min_a = min(salary) where to_string(null) == "abc" +; + +max:integer |max_a:integer|min:integer | min_a:integer +74999 |null |25324 | null +; + +statsWithBasicExpressionFiltered +required_capability: per_agg_filtering +from employees +| stats max = max(salary), max_f = max(salary) where salary < 50000, + min = min(salary), min_f = min(salary) where salary > 50000, + exp_p = max(salary) + 10000 where salary < 50000, + exp_m = min(salary) % 10000 where salary > 50000 +; + +max:integer |max_f:integer|min:integer | min_f:integer|exp_p:integer | exp_m:integer +74999 |49818 |25324 | 50064 |59818 | 64 +; + +statsWithExpressionOverFilters +required_capability: per_agg_filtering +from employees +| stats max = max(salary), max_f = max(salary) where salary < 50000, + min = min(salary), min_f = min(salary) where salary > 50000, + exp_gt = max(salary) - min(salary) where salary > 50000, + exp_lt = max(salary) - min(salary) where salary < 50000 + +; + +max:integer |max_f:integer | min:integer | min_f:integer |exp_gt:integer | exp_lt:integer +74999 |49818 | 25324 | 50064 |24935 | 24494 +; + + +statsWithExpressionOfExpressionsOverFilters +required_capability: per_agg_filtering +from employees +| stats max = max(salary + 1), max_f = max(salary + 2) where salary < 50000, + min = min(salary - 1), min_f = min(salary - 2) where salary > 50000, + exp_gt = max(salary + 3) - min(salary - 3) where salary > 50000, + exp_lt = max(salary + 4) - min(salary - 4) where salary < 50000 + +; + +max:integer |max_f:integer | min:integer | min_f:integer |exp_gt:integer | exp_lt:integer +75000 |49820 | 25323 | 50062 |24941 | 24502 +; + +statsWithSubstitutedExpressionOverFilters +required_capability: per_agg_filtering +from employees +| stats sum = sum(salary), s_l = sum(salary) where salary < 50000, s_u = sum(salary) where salary > 50000, + count = count(salary), c_l = count(salary) where salary < 50000, c_u = count(salary) where salary > 50000, + avg = round(avg(salary), 2), a_l = round(avg(salary), 2) where salary < 50000, a_u = round(avg(salary),2) where salary > 50000 +; + +sum:l |s_l:l | s_u:l | count:l |c_l:l |c_u:l |avg:double |a_l:double | a_u:double +4824855 |2220951 | 2603904 | 100 |58 |42 |48248.55 |38292.26 | 61997.71 +; + + +statsWithFilterAndGroupBy +required_capability: per_agg_filtering +from employees +| stats m = max(height), + m_f = max(height + 1) where gender == "M" OR is_rehired is null + BY gender, is_rehired +| sort gender, is_rehired +; + +m:d |m_f:d |gender:s|is_rehired:bool +2.1 |null |F |false +2.1 |null |F |true +1.85|2.85 |F |null +2.1 |3.1 |M |false +2.1 |3.1 |M |true +2.01|3.01 |M |null +2.06|null |null |false +1.97|null |null |true +1.99|2.99 |null |null +; + +statsWithFilterOnGroupBy +required_capability: per_agg_filtering +from employees +| stats m_f = max(height) where gender == "M" BY gender +| sort gender +; + +m_f:d |gender:s +null |F +2.1 |M +null |null +; + +statsWithGroupByLiteral +required_capability: per_agg_filtering +from employees +| stats m = max(languages) by salary = 2 +; + +m:i |salary:i +5 |2 +; + + +statsWithFilterOnSameColumn +required_capability: per_agg_filtering +from employees +| stats m = max(languages), m_f = max(languages) where salary > 50000 by salary = 2 +| sort salary +; + +m:i |m_f:i |salary:i +5 |null |2 +; + +# the query is reused below in a multi-stats +statsWithFilteringAndGrouping +required_capability: per_agg_filtering +from employees +| stats c = count(), c_f = count(languages) where l > 1, + m_f = max(height) where salary > 50000 + by l = languages +| sort c +; + +c:l |c_f:l |m_f:d |l:i +10 |0 |2.08 |null +15 |0 |2.06 |1 +17 |17 |2.1 |3 +18 |18 |1.83 |4 +19 |19 |2.03 |2 +21 |21 |2.1 |5 +; + +multiStatsWithFiltering +required_capability: per_agg_filtering +from employees +| stats c = count(), c_f = count(languages) where l > 1, + m_f = max(height) where salary > 50000 + by l = languages +| stats c2 = count(), c2_f = count() where m_f > 2.06 , m2 = max(l), m2_f = max(l) where l > 1 by c +| sort c +; + +c2:l |c2_f:l |m2:i |m2_f:i |c:l +1 |1 |null |null |10 +1 |0 |1 |null |15 +1 |1 |3 |3 |17 +1 |0 |4 |4 |18 +1 |0 |2 |2 |19 +1 |1 |5 |5 |21 +; diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 index d6d45097a1d07..b13606befd2a4 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 @@ -209,6 +209,7 @@ SLASH : '/'; PERCENT : '%'; MATCH : 'match'; +NESTED_WHERE : {this.isDevVersion()}? WHERE -> type(WHERE); NAMED_OR_POSITIONAL_PARAM : PARAM (LETTER | UNDERSCORE) UNQUOTED_ID_BODY* diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 index 77568d5527cd1..9a95e0e6726ba 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 @@ -123,8 +123,7 @@ fields ; field - : booleanExpression - | qualifiedName ASSIGN booleanExpression + : (qualifiedName ASSIGN)? booleanExpression ; fromCommand @@ -132,8 +131,7 @@ fromCommand ; indexPattern - : clusterString COLON indexString - | indexString + : (clusterString COLON)? indexString ; clusterString @@ -159,7 +157,7 @@ deprecated_metadata ; metricsCommand - : DEV_METRICS indexPattern (COMMA indexPattern)* aggregates=fields? (BY grouping=fields)? + : DEV_METRICS indexPattern (COMMA indexPattern)* aggregates=aggFields? (BY grouping=fields)? ; evalCommand @@ -167,7 +165,15 @@ evalCommand ; statsCommand - : STATS stats=fields? (BY grouping=fields)? + : STATS stats=aggFields? (BY grouping=fields)? + ; + +aggFields + : aggField (COMMA aggField)* + ; + +aggField + : field {this.isDevVersion()}? (WHERE booleanExpression)? ; qualifiedName @@ -316,5 +322,5 @@ lookupCommand ; inlinestatsCommand - : DEV_INLINESTATS stats=fields (BY grouping=fields)? + : DEV_INLINESTATS stats=aggFields (BY grouping=fields)? ; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index 9dc17b020e426..f5baaef4f579d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -370,7 +370,12 @@ public enum Cap { /** * Fix sorting not allowed on _source and counters. */ - SORTING_ON_SOURCE_AND_COUNTERS_FORBIDDEN; + SORTING_ON_SOURCE_AND_COUNTERS_FORBIDDEN, + + /** + * Allow filter per individual aggregation. + */ + PER_AGG_FILTERING; private final boolean snapshotOnly; private final FeatureFlag featureFlag; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java index 90957f55141b9..fe7b945a9b3c1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java @@ -488,6 +488,7 @@ private LogicalPlan resolveStats(Stats stats, List childrenOutput) { newAggregates.add(agg); } + // TODO: remove this when Stats interface is removed stats = changed.get() ? stats.with(stats.child(), groupings, newAggregates) : stats; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java index dd2b72b4d35d9..ef39220d7ffcc 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java @@ -30,6 +30,7 @@ import org.elasticsearch.xpack.esql.core.util.Holder; import org.elasticsearch.xpack.esql.expression.function.UnsupportedAttribute; import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction; +import org.elasticsearch.xpack.esql.expression.function.aggregate.FilteredExpression; import org.elasticsearch.xpack.esql.expression.function.aggregate.Rate; import org.elasticsearch.xpack.esql.expression.function.fulltext.FullTextFunction; import org.elasticsearch.xpack.esql.expression.function.fulltext.Match; @@ -308,6 +309,29 @@ private static void checkInvalidNamedExpressionUsage( Set failures, int level ) { + // unwrap filtered expression + if (e instanceof FilteredExpression fe) { + e = fe.delegate(); + // make sure they work on aggregate functions + if (e.anyMatch(AggregateFunction.class::isInstance) == false) { + Expression filter = fe.filter(); + failures.add(fail(filter, "WHERE clause allowed only for aggregate functions, none found in [{}]", fe.sourceText())); + } + // but that the filter doesn't use grouping or aggregate functions + fe.filter().forEachDown(c -> { + if (c instanceof AggregateFunction af) { + failures.add( + fail(af, "cannot use aggregate function [{}] in aggregate WHERE clause [{}]", af.sourceText(), fe.sourceText()) + ); + } + // check the bucketing function against the group + else if (c instanceof GroupingFunction gf) { + if (Expressions.anyMatch(groups, ex -> ex instanceof Alias a && a.child().semanticEquals(gf)) == false) { + failures.add(fail(gf, "can only use grouping function [{}] part of the BY clause", gf.sourceText())); + } + } + }); + } // found an aggregate, constant or a group, bail out if (e instanceof AggregateFunction af) { af.field().forEachDown(AggregateFunction.class, f -> { @@ -319,7 +343,7 @@ private static void checkInvalidNamedExpressionUsage( } else if (e instanceof GroupingFunction gf) { // optimizer will later unroll expressions with aggs and non-aggs with a grouping function into an EVAL, but that will no longer // be verified (by check above in checkAggregate()), so do it explicitly here - if (groups.stream().anyMatch(ex -> ex instanceof Alias a && a.child().semanticEquals(gf)) == false) { + if (Expressions.anyMatch(groups, ex -> ex instanceof Alias a && a.child().semanticEquals(gf)) == false) { failures.add(fail(gf, "can only use grouping function [{}] part of the BY clause", gf.sourceText())); } else if (level == 0) { addFailureOnGroupingUsedNakedInAggs(failures, gf, "function"); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index faf99d6bd65bc..66151275fc2e8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -259,19 +259,21 @@ private FunctionDefinition[][] functions() { // grouping functions new FunctionDefinition[] { def(Bucket.class, Bucket::new, "bucket", "bin"), }, // aggregate functions + // since they declare two public constructors - one with filter (for nested where) and one without + // use casting to disambiguate between the two new FunctionDefinition[] { - def(Avg.class, Avg::new, "avg"), - def(Count.class, Count::new, "count"), - def(CountDistinct.class, CountDistinct::new, "count_distinct"), - def(Max.class, Max::new, "max"), - def(Median.class, Median::new, "median"), - def(MedianAbsoluteDeviation.class, MedianAbsoluteDeviation::new, "median_absolute_deviation"), - def(Min.class, Min::new, "min"), - def(Percentile.class, Percentile::new, "percentile"), - def(Sum.class, Sum::new, "sum"), - def(Top.class, Top::new, "top"), - def(Values.class, Values::new, "values"), - def(WeightedAvg.class, WeightedAvg::new, "weighted_avg") }, + def(Avg.class, uni(Avg::new), "avg"), + def(Count.class, uni(Count::new), "count"), + def(CountDistinct.class, bi(CountDistinct::new), "count_distinct"), + def(Max.class, uni(Max::new), "max"), + def(Median.class, uni(Median::new), "median"), + def(MedianAbsoluteDeviation.class, uni(MedianAbsoluteDeviation::new), "median_absolute_deviation"), + def(Min.class, uni(Min::new), "min"), + def(Percentile.class, bi(Percentile::new), "percentile"), + def(Sum.class, uni(Sum::new), "sum"), + def(Top.class, tri(Top::new), "top"), + def(Values.class, uni(Values::new), "values"), + def(WeightedAvg.class, bi(WeightedAvg::new), "weighted_avg") }, // math new FunctionDefinition[] { def(Abs.class, Abs::new, "abs"), @@ -482,11 +484,10 @@ public static DataType getTargetType(String[] names) { } public static FunctionDescription description(FunctionDefinition def) { - var constructors = def.clazz().getConstructors(); - if (constructors.length == 0) { + Constructor constructor = constructorFor(def.clazz()); + if (constructor == null) { return new FunctionDescription(def.name(), List.of(), null, null, false, false); } - Constructor constructor = constructors[0]; FunctionInfo functionInfo = functionInfo(def); String functionDescription = functionInfo == null ? "" : functionInfo.description().replace('\n', ' '); String[] returnType = functionInfo == null ? new String[] { "?" } : removeUnderConstruction(functionInfo.returnType()); @@ -523,14 +524,29 @@ private static String[] removeUnderConstruction(String[] types) { } public static FunctionInfo functionInfo(FunctionDefinition def) { - var constructors = def.clazz().getConstructors(); - if (constructors.length == 0) { + Constructor constructor = constructorFor(def.clazz()); + if (constructor == null) { return null; } - Constructor constructor = constructors[0]; return constructor.getAnnotation(FunctionInfo.class); } + private static Constructor constructorFor(Class clazz) { + Constructor[] constructors = clazz.getConstructors(); + if (constructors.length == 0) { + return null; + } + // when dealing with multiple, pick the constructor exposing the FunctionInfo annotation + if (constructors.length > 1) { + for (Constructor constructor : constructors) { + if (constructor.getAnnotation(FunctionInfo.class) != null) { + return constructor; + } + } + } + return constructors[0]; + } + private void buildDataTypesForStringLiteralConversion(FunctionDefinition[]... groupFunctions) { for (FunctionDefinition[] group : groupFunctions) { for (FunctionDefinition def : group) { @@ -913,15 +929,19 @@ protected interface TernaryConfigurationAwareBuilder { } // - // Utility method for extra argument extraction. + // Utility functions to help disambiguate the method handle passed in. + // They work by providing additional method information to help the compiler know which method to pick. // - protected static Boolean asBool(Object[] extras) { - if (CollectionUtils.isEmpty(extras)) { - return null; - } - if (extras.length != 1 || (extras[0] instanceof Boolean) == false) { - throw new QlIllegalArgumentException("Invalid number and types of arguments given to function definition"); - } - return (Boolean) extras[0]; + private static BiFunction uni(BiFunction function) { + return function; } + + private static BinaryBuilder bi(BinaryBuilder function) { + return function; + } + + private static TernaryBuilder tri(TernaryBuilder function) { + return function; + } + } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateFunction.java index f0acac0e9744e..f7a74cc2ae93f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateFunction.java @@ -6,10 +6,12 @@ */ package org.elasticsearch.xpack.esql.expression.function.aggregate; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; import org.elasticsearch.xpack.esql.core.expression.function.Function; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -20,8 +22,8 @@ import java.util.List; import java.util.Objects; +import static java.util.Arrays.asList; import static java.util.Collections.emptyList; -import static java.util.Collections.singletonList; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; /** @@ -52,25 +54,51 @@ public static List getNamedWriteables() { private final Expression field; private final List parameters; + private final Expression filter; protected AggregateFunction(Source source, Expression field) { - this(source, field, emptyList()); + this(source, field, Literal.TRUE, emptyList()); } protected AggregateFunction(Source source, Expression field, List parameters) { - super(source, CollectionUtils.combine(singletonList(field), parameters)); + this(source, field, Literal.TRUE, parameters); + } + + protected AggregateFunction(Source source, Expression field, Expression filter, List parameters) { + super(source, CollectionUtils.combine(asList(field, filter), parameters)); this.field = field; + this.filter = filter; this.parameters = parameters; } protected AggregateFunction(StreamInput in) throws IOException { - this(Source.readFrom((PlanStreamInput) in), in.readNamedWriteable(Expression.class)); + this( + Source.readFrom((PlanStreamInput) in), + in.readNamedWriteable(Expression.class), + in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PER_AGGREGATE_FILTER) + ? in.readNamedWriteable(Expression.class) + : Literal.TRUE, + in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PER_AGGREGATE_FILTER) + ? in.readNamedWriteableCollectionAsList(Expression.class) + : emptyList() + ); } @Override - public void writeTo(StreamOutput out) throws IOException { + public final void writeTo(StreamOutput out) throws IOException { Source.EMPTY.writeTo(out); out.writeNamedWriteable(field); + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_PER_AGGREGATE_FILTER)) { + out.writeNamedWriteable(filter); + out.writeNamedWriteableCollection(parameters); + } else { + deprecatedWriteParams(out); + } + } + + @Deprecated(since = "8.16", forRemoval = true) + protected void deprecatedWriteParams(StreamOutput out) throws IOException { + // } public Expression field() { @@ -81,12 +109,12 @@ public List parameters() { return parameters; } - /** - * Returns the input expressions used in aggregation. - * Defaults to a list containing the only the input field. - */ - public List inputExpressions() { - return List.of(field); + public boolean hasFilter() { + return filter != null && (filter.foldable() == false || Boolean.TRUE.equals(filter.fold()) == false); + } + + public Expression filter() { + return filter; } @Override @@ -94,6 +122,18 @@ protected TypeResolution resolveType() { return TypeResolutions.isExact(field, sourceText(), DEFAULT); } + /** + * Attach a filter to the aggregate function. + */ + public abstract AggregateFunction withFilter(Expression filter); + + public AggregateFunction withParameters(List parameters) { + if (parameters == this.parameters) { + return this; + } + return (AggregateFunction) replaceChildren(CollectionUtils.combine(asList(field, filter), parameters)); + } + @Override public int hashCode() { // NB: the hashcode is currently used for key generation so @@ -105,7 +145,9 @@ public int hashCode() { public boolean equals(Object obj) { if (super.equals(obj)) { AggregateFunction other = (AggregateFunction) obj; - return Objects.equals(other.field(), field()) && Objects.equals(other.parameters(), parameters()); + return Objects.equals(other.field(), field()) + && Objects.equals(other.filter(), filter()) + && Objects.equals(other.parameters(), parameters()); } return false; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Avg.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Avg.java index b5c0b8e5ffdc8..82c0f9d24899e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Avg.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Avg.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -23,6 +24,7 @@ import java.io.IOException; import java.util.List; +import static java.util.Collections.emptyList; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isType; @@ -44,7 +46,11 @@ public class Avg extends AggregateFunction implements SurrogateExpression { ) } ) public Avg(Source source, @Param(name = "number", type = { "double", "integer", "long" }) Expression field) { - super(source, field); + this(source, field, Literal.TRUE); + } + + public Avg(Source source, Expression field, Expression filter) { + super(source, field, filter, emptyList()); } @Override @@ -74,12 +80,17 @@ public DataType dataType() { @Override protected NodeInfo info() { - return NodeInfo.create(this, Avg::new, field()); + return NodeInfo.create(this, Avg::new, field(), filter()); } @Override public Avg replaceChildren(List newChildren) { - return new Avg(source(), newChildren.get(0)); + return new Avg(source(), newChildren.get(0), newChildren.get(1)); + } + + @Override + public Avg withFilter(Expression filter) { + return new Avg(source(), field(), filter); } @Override @@ -87,6 +98,8 @@ public Expression surrogate() { var s = source(); var field = field(); - return field().foldable() ? new MvAvg(s, field) : new Div(s, new Sum(s, field), new Count(s, field), dataType()); + return field().foldable() + ? new MvAvg(s, field) + : new Div(s, new Sum(s, field, filter()), new Count(s, field, filter()), dataType()); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Count.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Count.java index 9b6190408dbd4..fa8a9e7d8c837 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Count.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Count.java @@ -30,10 +30,11 @@ import java.io.IOException; import java.util.List; +import static java.util.Collections.emptyList; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isType; -public class Count extends AggregateFunction implements EnclosedAgg, ToAggregator, SurrogateExpression { +public class Count extends AggregateFunction implements ToAggregator, SurrogateExpression { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Count", Count::new); @FunctionInfo( @@ -83,7 +84,11 @@ public Count( description = "Expression that outputs values to be counted. If omitted, equivalent to `COUNT(*)` (the number of rows)." ) Expression field ) { - super(source, field); + this(source, field, Literal.TRUE); + } + + public Count(Source source, Expression field, Expression filter) { + super(source, field, filter, emptyList()); } private Count(StreamInput in) throws IOException { @@ -97,17 +102,17 @@ public String getWriteableName() { @Override protected NodeInfo info() { - return NodeInfo.create(this, Count::new, field()); + return NodeInfo.create(this, Count::new, field(), filter()); } @Override - public Count replaceChildren(List newChildren) { - return new Count(source(), newChildren.get(0)); + public AggregateFunction withFilter(Expression filter) { + return new Count(source(), field(), filter); } @Override - public String innerName() { - return "count"; + public Count replaceChildren(List newChildren) { + return new Count(source(), newChildren.get(0), newChildren.get(1)); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java index 858c6e659449c..2550e5bdcf515 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.expression.function.aggregate; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -43,6 +44,7 @@ import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isFoldable; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isType; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isWholeNumber; +import static org.elasticsearch.xpack.esql.core.util.CollectionUtils.nullSafeList; public class CountDistinct extends AggregateFunction implements OptionalArgument, ToAggregator, SurrogateExpression { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( @@ -112,22 +114,33 @@ public CountDistinct( + "same effect as a threshold of 40000. The default value is 3000." ) Expression precision ) { - super(source, field, precision != null ? List.of(precision) : List.of()); - this.precision = precision; + this(source, field, Literal.TRUE, precision); + } + + public CountDistinct(Source source, Expression field, Expression filter, Expression precision) { + this(source, field, filter, precision != null ? List.of(precision) : List.of()); + } + + private CountDistinct(Source source, Expression field, Expression filter, List params) { + super(source, field, filter, params); + this.precision = params.size() > 0 ? params.get(0) : null; } private CountDistinct(StreamInput in) throws IOException { this( Source.readFrom((PlanStreamInput) in), in.readNamedWriteable(Expression.class), - in.readOptionalNamedWriteable(Expression.class) + in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PER_AGGREGATE_FILTER) + ? in.readNamedWriteable(Expression.class) + : Literal.TRUE, + in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PER_AGGREGATE_FILTER) + ? in.readNamedWriteableCollectionAsList(Expression.class) + : nullSafeList(in.readOptionalNamedWriteable(Expression.class)) ); } @Override - public void writeTo(StreamOutput out) throws IOException { - Source.EMPTY.writeTo(out); - out.writeNamedWriteable(field()); + protected void deprecatedWriteParams(StreamOutput out) throws IOException { out.writeOptionalNamedWriteable(precision); } @@ -138,12 +151,17 @@ public String getWriteableName() { @Override protected NodeInfo info() { - return NodeInfo.create(this, CountDistinct::new, field(), precision); + return NodeInfo.create(this, CountDistinct::new, field(), filter(), precision); } @Override public CountDistinct replaceChildren(List newChildren) { - return new CountDistinct(source(), newChildren.get(0), newChildren.size() > 1 ? newChildren.get(1) : null); + return new CountDistinct(source(), newChildren.get(0), newChildren.get(1), newChildren.size() > 2 ? newChildren.get(2) : null); + } + + @Override + public CountDistinct withFilter(Expression filter) { + return new CountDistinct(source(), field(), filter, precision); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/EnclosedAgg.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/EnclosedAgg.java deleted file mode 100644 index 951a991da376b..0000000000000 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/EnclosedAgg.java +++ /dev/null @@ -1,13 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.expression.function.aggregate; - -// Agg 'enclosed' by another agg. Used for agg that return multiple embedded aggs (like MatrixStats) -public interface EnclosedAgg { - - String innerName(); -} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/FilteredExpression.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/FilteredExpression.java new file mode 100644 index 0000000000000..97c6fb6dbd887 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/FilteredExpression.java @@ -0,0 +1,95 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.aggregate; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.Nullability; +import org.elasticsearch.xpack.esql.core.tree.NodeInfo; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; + +import java.io.IOException; +import java.util.List; + +import static java.util.Arrays.asList; + +/** + * Basic wrapper for expressions declared with a nested filter (typically in stats). + * Used during parsing to attach the filter to the nested expression - it is expected the two + * get fused later on. + */ +// TODO: This class should implement SurrogateExpression but it doesn't due to its use on folding aggregates +// see https://github.com/elastic/elasticsearch/issues/100634#issuecomment-2400665066 +public class FilteredExpression extends Expression { + public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( + Expression.class, + "FilteredExpression", + FilteredExpression::new + ); + + private final Expression delegate; + private final Expression filter; + + public FilteredExpression(Source source, Expression delegate, Expression filter) { + super(source, asList(delegate, filter)); + this.delegate = delegate; + this.filter = filter; + } + + public FilteredExpression(StreamInput in) throws IOException { + this(Source.readFrom((PlanStreamInput) in), in.readNamedWriteable(Expression.class), in.readNamedWriteable(Expression.class)); + } + + public Expression surrogate() { + return delegate.transformUp(AggregateFunction.class, af -> af.withFilter(filter)); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + Source.EMPTY.writeTo(out); + out.writeNamedWriteable(delegate); + out.writeNamedWriteable(filter); + } + + @Override + public String getWriteableName() { + return ENTRY.name; + } + + public Expression delegate() { + return delegate; + } + + public Expression filter() { + return filter; + } + + @Override + public DataType dataType() { + return delegate.dataType(); + } + + @Override + public Nullability nullable() { + return delegate.nullable(); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, FilteredExpression::new, delegate, filter); + } + + @Override + public Expression replaceChildren(List newChildren) { + return new FilteredExpression(source(), newChildren.get(0), newChildren.get(1)); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/FromPartial.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/FromPartial.java index 593e6fa463371..0f9037a28d7d7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/FromPartial.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/FromPartial.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.expression.function.aggregate; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -21,6 +22,7 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.xpack.esql.core.expression.AttributeSet; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -44,18 +46,29 @@ public class FromPartial extends AggregateFunction implements ToAggregator { private final Expression function; public FromPartial(Source source, Expression field, Expression function) { - super(source, field, List.of(function)); + this(source, field, Literal.TRUE, function); + } + + public FromPartial(Source source, Expression field, Expression filter, Expression function) { + super(source, field, filter, List.of(function)); this.function = function; } private FromPartial(StreamInput in) throws IOException { - this(Source.readFrom((PlanStreamInput) in), in.readNamedWriteable(Expression.class), in.readNamedWriteable(Expression.class)); + this( + Source.readFrom((PlanStreamInput) in), + in.readNamedWriteable(Expression.class), + in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PER_AGGREGATE_FILTER) + ? in.readNamedWriteable(Expression.class) + : Literal.TRUE, + in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PER_AGGREGATE_FILTER) + ? in.readNamedWriteableCollectionAsList(Expression.class).get(0) + : in.readNamedWriteable(Expression.class) + ); } @Override - public void writeTo(StreamOutput out) throws IOException { - source().writeTo(out); - out.writeNamedWriteable(field()); + protected void deprecatedWriteParams(StreamOutput out) throws IOException { out.writeNamedWriteable(function); } @@ -85,12 +98,17 @@ public AttributeSet references() { @Override public Expression replaceChildren(List newChildren) { - return new FromPartial(source(), newChildren.get(0), newChildren.get(1)); + return new FromPartial(source(), newChildren.get(0), newChildren.get(1), newChildren.get(2)); } @Override protected NodeInfo info() { - return NodeInfo.create(this, FromPartial::new, field(), function); + return NodeInfo.create(this, FromPartial::new, field(), filter(), function); + } + + @Override + public FromPartial withFilter(Expression filter) { + return new FromPartial(source(), field(), filter, function); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java index e7f790f90803a..47d74c71d9cc5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java @@ -18,6 +18,7 @@ import org.elasticsearch.compute.aggregation.MaxLongAggregatorFunctionSupplier; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -32,6 +33,7 @@ import java.io.IOException; import java.util.List; +import static java.util.Collections.emptyList; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; import static org.elasticsearch.xpack.esql.core.type.DataType.UNSIGNED_LONG; import static org.elasticsearch.xpack.esql.core.type.DataType.isRepresentable; @@ -61,7 +63,11 @@ public Max( type = { "boolean", "double", "integer", "long", "date", "ip", "keyword", "text", "long", "version" } ) Expression field ) { - super(source, field); + this(source, field, Literal.TRUE); + } + + public Max(Source source, Expression field, Expression filter) { + super(source, field, filter, emptyList()); } private Max(StreamInput in) throws IOException { @@ -73,14 +79,19 @@ public String getWriteableName() { return ENTRY.name; } + @Override + public Max withFilter(Expression filter) { + return new Max(source(), field(), filter); + } + @Override protected NodeInfo info() { - return NodeInfo.create(this, Max::new, field()); + return NodeInfo.create(this, Max::new, field(), filter()); } @Override public Max replaceChildren(List newChildren) { - return new Max(source(), newChildren.get(0)); + return new Max(source(), newChildren.get(0), newChildren.get(1)); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Median.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Median.java index 348fef577c934..c47fa612c1c49 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Median.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Median.java @@ -25,6 +25,7 @@ import java.io.IOException; import java.util.List; +import static java.util.Collections.emptyList; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isType; @@ -55,7 +56,11 @@ public class Median extends AggregateFunction implements SurrogateExpression { ), } ) public Median(Source source, @Param(name = "number", type = { "double", "integer", "long" }) Expression field) { - super(source, field); + this(source, field, Literal.TRUE); + } + + public Median(Source source, Expression field, Expression filter) { + super(source, field, filter, emptyList()); } @Override @@ -85,12 +90,17 @@ public DataType dataType() { @Override protected NodeInfo info() { - return NodeInfo.create(this, Median::new, field()); + return NodeInfo.create(this, Median::new, field(), filter()); } @Override public Median replaceChildren(List newChildren) { - return new Median(source(), newChildren.get(0)); + return new Median(source(), newChildren.get(0), newChildren.get(1)); + } + + @Override + public AggregateFunction withFilter(Expression filter) { + return new Median(source(), field(), filter); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianAbsoluteDeviation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianAbsoluteDeviation.java index 23a6b23a35cde..dfcbd6d22abae 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianAbsoluteDeviation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianAbsoluteDeviation.java @@ -14,6 +14,7 @@ import org.elasticsearch.compute.aggregation.MedianAbsoluteDeviationIntAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.MedianAbsoluteDeviationLongAggregatorFunctionSupplier; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.SurrogateExpression; @@ -26,6 +27,8 @@ import java.io.IOException; import java.util.List; +import static java.util.Collections.emptyList; + public class MedianAbsoluteDeviation extends NumericAggregate implements SurrogateExpression { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( Expression.class, @@ -64,7 +67,11 @@ public class MedianAbsoluteDeviation extends NumericAggregate implements Surroga ), } ) public MedianAbsoluteDeviation(Source source, @Param(name = "number", type = { "double", "integer", "long" }) Expression field) { - super(source, field); + this(source, field, Literal.TRUE); + } + + public MedianAbsoluteDeviation(Source source, Expression field, Expression filter) { + super(source, field, filter, emptyList()); } private MedianAbsoluteDeviation(StreamInput in) throws IOException { @@ -78,12 +85,17 @@ public String getWriteableName() { @Override protected NodeInfo info() { - return NodeInfo.create(this, MedianAbsoluteDeviation::new, field()); + return NodeInfo.create(this, MedianAbsoluteDeviation::new, field(), filter()); } @Override public MedianAbsoluteDeviation replaceChildren(List newChildren) { - return new MedianAbsoluteDeviation(source(), newChildren.get(0)); + return new MedianAbsoluteDeviation(source(), newChildren.get(0), newChildren.get(1)); + } + + @Override + public MedianAbsoluteDeviation withFilter(Expression filter) { + return new MedianAbsoluteDeviation(source(), field(), filter); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java index 6866811995059..ce69decca8e81 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java @@ -18,6 +18,7 @@ import org.elasticsearch.compute.aggregation.MinLongAggregatorFunctionSupplier; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -32,6 +33,7 @@ import java.io.IOException; import java.util.List; +import static java.util.Collections.emptyList; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; import static org.elasticsearch.xpack.esql.core.type.DataType.UNSIGNED_LONG; import static org.elasticsearch.xpack.esql.core.type.DataType.isRepresentable; @@ -61,7 +63,11 @@ public Min( type = { "boolean", "double", "integer", "long", "date", "ip", "keyword", "text", "long", "version" } ) Expression field ) { - super(source, field); + this(source, field, Literal.TRUE); + } + + public Min(Source source, Expression field, Expression filter) { + super(source, field, filter, emptyList()); } private Min(StreamInput in) throws IOException { @@ -75,12 +81,17 @@ public String getWriteableName() { @Override protected NodeInfo info() { - return NodeInfo.create(this, Min::new, field()); + return NodeInfo.create(this, Min::new, field(), filter()); } @Override public Min replaceChildren(List newChildren) { - return new Min(source(), newChildren.get(0)); + return new Min(source(), newChildren.get(0), newChildren.get(1)); + } + + @Override + public Min withFilter(Expression filter) { + return new Min(source(), field(), filter); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/NumericAggregate.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/NumericAggregate.java index e7825a1d11704..5c639c465c649 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/NumericAggregate.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/NumericAggregate.java @@ -49,6 +49,10 @@ public abstract class NumericAggregate extends AggregateFunction implements ToAg super(source, field, parameters); } + NumericAggregate(Source source, Expression field, Expression filter, List parameters) { + super(source, field, filter, parameters); + } + NumericAggregate(Source source, Expression field) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Percentile.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Percentile.java index 0d5dd4b66501c..febd9f28b2291 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Percentile.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Percentile.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.expression.function.aggregate; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -15,6 +16,7 @@ import org.elasticsearch.compute.aggregation.PercentileIntAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.PercentileLongAggregatorFunctionSupplier; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -29,6 +31,7 @@ import java.io.IOException; import java.util.List; +import static java.util.Collections.singletonList; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.SECOND; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isFoldable; @@ -77,19 +80,30 @@ public Percentile( @Param(name = "number", type = { "double", "integer", "long" }) Expression field, @Param(name = "percentile", type = { "double", "integer", "long" }) Expression percentile ) { - super(source, field, List.of(percentile)); + this(source, field, Literal.TRUE, percentile); + } + + public Percentile(Source source, Expression field, Expression filter, Expression percentile) { + super(source, field, filter, singletonList(percentile)); this.percentile = percentile; } private Percentile(StreamInput in) throws IOException { - this(Source.readFrom((PlanStreamInput) in), in.readNamedWriteable(Expression.class), in.readNamedWriteable(Expression.class)); + this( + Source.readFrom((PlanStreamInput) in), + in.readNamedWriteable(Expression.class), + in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PER_AGGREGATE_FILTER) + ? in.readNamedWriteable(Expression.class) + : Literal.TRUE, + in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PER_AGGREGATE_FILTER) + ? in.readNamedWriteableCollectionAsList(Expression.class).get(0) + : in.readNamedWriteable(Expression.class) + ); } @Override - public void writeTo(StreamOutput out) throws IOException { - Source.EMPTY.writeTo(out); - out.writeNamedWriteable(children().get(0)); - out.writeNamedWriteable(children().get(1)); + protected void deprecatedWriteParams(StreamOutput out) throws IOException { + out.writeNamedWriteable(percentile); } @Override @@ -99,12 +113,17 @@ public String getWriteableName() { @Override protected NodeInfo info() { - return NodeInfo.create(this, Percentile::new, field(), percentile); + return NodeInfo.create(this, Percentile::new, field(), filter(), percentile); } @Override public Percentile replaceChildren(List newChildren) { - return new Percentile(source(), newChildren.get(0), newChildren.get(1)); + return new Percentile(source(), newChildren.get(0), newChildren.get(1), newChildren.get(2)); + } + + @Override + public Percentile withFilter(Expression filter) { + return new Percentile(source(), field(), filter, percentile); } public Expression percentile() { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Rate.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Rate.java index 135264c448f10..b7b04658f8d58 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Rate.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Rate.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.expression.function.aggregate; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -17,6 +18,7 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.UnresolvedAttribute; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -34,6 +36,7 @@ import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.SECOND; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isType; +import static org.elasticsearch.xpack.esql.core.util.CollectionUtils.nullSafeList; public class Rate extends AggregateFunction implements OptionalArgument, ToAggregator { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Rate", Rate::new); @@ -53,7 +56,16 @@ public Rate( Expression timestamp, @Param(optional = true, name = "unit", type = { "time_duration" }, description = "the unit") Expression unit ) { - super(source, field, unit != null ? List.of(timestamp, unit) : List.of(timestamp)); + this(source, field, Literal.TRUE, timestamp, unit); + } + + // compatibility constructor used when reading from the stream + private Rate(Source source, Expression field, Expression filter, List children) { + this(source, field, filter, children.get(0), children.size() > 1 ? children.get(1) : null); + } + + private Rate(Source source, Expression field, Expression filter, Expression timestamp, Expression unit) { + super(source, field, filter, unit != null ? List.of(timestamp, unit) : List.of(timestamp)); this.timestamp = timestamp; this.unit = unit; } @@ -62,15 +74,17 @@ public Rate(StreamInput in) throws IOException { this( Source.readFrom((PlanStreamInput) in), in.readNamedWriteable(Expression.class), - in.readNamedWriteable(Expression.class), - in.readOptionalNamedWriteable(Expression.class) + in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PER_AGGREGATE_FILTER) + ? in.readNamedWriteable(Expression.class) + : Literal.TRUE, + in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PER_AGGREGATE_FILTER) + ? in.readNamedWriteableCollectionAsList(Expression.class) + : nullSafeList(in.readNamedWriteable(Expression.class), in.readOptionalNamedWriteable(Expression.class)) ); } @Override - public void writeTo(StreamOutput out) throws IOException { - source().writeTo(out); - out.writeNamedWriteable(field()); + protected void deprecatedWriteParams(StreamOutput out) throws IOException { out.writeNamedWriteable(timestamp); out.writeOptionalNamedWriteable(unit); } @@ -92,20 +106,25 @@ protected NodeInfo info() { @Override public Rate replaceChildren(List newChildren) { if (unit != null) { - if (newChildren.size() == 3) { - return new Rate(source(), newChildren.get(0), newChildren.get(1), newChildren.get(2)); + if (newChildren.size() == 4) { + return new Rate(source(), newChildren.get(0), newChildren.get(1), newChildren.get(2), newChildren.get(3)); } - assert false : "expected 3 children for field, @timestamp, and unit; got " + newChildren; - throw new IllegalArgumentException("expected 3 children for field, @timestamp, and unit; got " + newChildren); + assert false : "expected 4 children for field, filter, @timestamp, and unit; got " + newChildren; + throw new IllegalArgumentException("expected 4 children for field, filter, @timestamp, and unit; got " + newChildren); } else { - if (newChildren.size() == 2) { - return new Rate(source(), newChildren.get(0), newChildren.get(1), null); + if (newChildren.size() == 3) { + return new Rate(source(), newChildren.get(0), newChildren.get(1), newChildren.get(2), null); } - assert false : "expected 2 children for field and @timestamp; got " + newChildren; - throw new IllegalArgumentException("expected 2 children for field and @timestamp; got " + newChildren); + assert false : "expected 3 children for field, filter and @timestamp; got " + newChildren; + throw new IllegalArgumentException("expected 3 children for field, filter and @timestamp; got " + newChildren); } } + @Override + public Rate withFilter(Expression filter) { + return new Rate(source(), field(), filter, timestamp, unit); + } + @Override public DataType dataType() { return DataType.DOUBLE; @@ -149,11 +168,6 @@ long unitInMillis() { throw new IllegalArgumentException("function [" + sourceText() + "] has invalid unit [" + unit.sourceText() + "]"); } - @Override - public List inputExpressions() { - return List.of(field(), timestamp); - } - @Override public AggregatorFunctionSupplier supplier(List inputChannels) { if (inputChannels.size() != 2 && inputChannels.size() != 3) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialAggregateFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialAggregateFunction.java index 5cb7edf2581d5..87eec540932b1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialAggregateFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialAggregateFunction.java @@ -14,6 +14,8 @@ import java.io.IOException; import java.util.Objects; +import static java.util.Collections.emptyList; + /** * All spatial aggregate functions extend this class to enable the planning of reading from doc values for higher performance. * The AggregateMapper class will generate multiple aggregation functions for each combination, allowing the planner to @@ -22,8 +24,8 @@ public abstract class SpatialAggregateFunction extends AggregateFunction { protected final boolean useDocValues; - protected SpatialAggregateFunction(Source source, Expression field, boolean useDocValues) { - super(source, field); + protected SpatialAggregateFunction(Source source, Expression field, Expression filter, boolean useDocValues) { + super(source, field, filter, emptyList()); this.useDocValues = useDocValues; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialCentroid.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialCentroid.java index b9cd99f8eb7f0..aad95c07e3492 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialCentroid.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialCentroid.java @@ -15,6 +15,7 @@ import org.elasticsearch.compute.aggregation.spatial.SpatialCentroidGeoPointSourceValuesAggregatorFunctionSupplier; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -46,11 +47,11 @@ public class SpatialCentroid extends SpatialAggregateFunction implements ToAggre examples = @Example(file = "spatial", tag = "st_centroid_agg-airports") ) public SpatialCentroid(Source source, @Param(name = "field", type = { "geo_point", "cartesian_point" }) Expression field) { - super(source, field, false); + this(source, field, Literal.TRUE, false); } - private SpatialCentroid(Source source, Expression field, boolean useDocValues) { - super(source, field, useDocValues); + private SpatialCentroid(Source source, Expression field, Expression filter, boolean useDocValues) { + super(source, field, filter, useDocValues); } private SpatialCentroid(StreamInput in) throws IOException { @@ -62,9 +63,14 @@ public String getWriteableName() { return ENTRY.name; } + @Override + public SpatialCentroid withFilter(Expression filter) { + return new SpatialCentroid(source(), field(), filter, useDocValues); + } + @Override public SpatialCentroid withDocValues() { - return new SpatialCentroid(source(), field(), true); + return new SpatialCentroid(source(), field(), filter(), true); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Sum.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Sum.java index 4f85a15732a6f..37c2abaae1e4e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Sum.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Sum.java @@ -28,6 +28,7 @@ import java.io.IOException; import java.util.List; +import static java.util.Collections.emptyList; import static org.elasticsearch.xpack.esql.core.type.DataType.DOUBLE; import static org.elasticsearch.xpack.esql.core.type.DataType.LONG; import static org.elasticsearch.xpack.esql.core.type.DataType.UNSIGNED_LONG; @@ -53,7 +54,11 @@ public class Sum extends NumericAggregate implements SurrogateExpression { ) } ) public Sum(Source source, @Param(name = "number", type = { "double", "integer", "long" }) Expression field) { - super(source, field); + this(source, field, Literal.TRUE); + } + + public Sum(Source source, Expression field, Expression filter) { + super(source, field, filter, emptyList()); } private Sum(StreamInput in) throws IOException { @@ -67,12 +72,17 @@ public String getWriteableName() { @Override protected NodeInfo info() { - return NodeInfo.create(this, Sum::new, field()); + return NodeInfo.create(this, Sum::new, field(), filter()); } @Override public Sum replaceChildren(List newChildren) { - return new Sum(source(), newChildren.get(0)); + return new Sum(source(), newChildren.get(0), newChildren.get(1)); + } + + @Override + public Sum withFilter(Expression filter) { + return new Sum(source(), field(), filter); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ToPartial.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ToPartial.java index c1da400185944..cffac616b3c8c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ToPartial.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ToPartial.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.expression.function.aggregate; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -21,6 +22,7 @@ import org.elasticsearch.compute.aggregation.ToPartialGroupingAggregatorFunction; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -66,18 +68,29 @@ public class ToPartial extends AggregateFunction implements ToAggregator { private final Expression function; public ToPartial(Source source, Expression field, Expression function) { - super(source, field, List.of(function)); + this(source, field, Literal.TRUE, function); + } + + public ToPartial(Source source, Expression field, Expression filter, Expression function) { + super(source, field, filter, List.of(function)); this.function = function; } private ToPartial(StreamInput in) throws IOException { - this(Source.readFrom((PlanStreamInput) in), in.readNamedWriteable(Expression.class), in.readNamedWriteable(Expression.class)); + this( + Source.readFrom((PlanStreamInput) in), + in.readNamedWriteable(Expression.class), + in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PER_AGGREGATE_FILTER) + ? in.readNamedWriteable(Expression.class) + : Literal.TRUE, + in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PER_AGGREGATE_FILTER) + ? in.readNamedWriteableCollectionAsList(Expression.class).get(0) + : in.readNamedWriteable(Expression.class) + ); } @Override - public void writeTo(StreamOutput out) throws IOException { - source().writeTo(out); - out.writeNamedWriteable(field()); + protected void deprecatedWriteParams(StreamOutput out) throws IOException { out.writeNamedWriteable(function); } @@ -102,12 +115,17 @@ protected TypeResolution resolveType() { @Override public Expression replaceChildren(List newChildren) { - return new ToPartial(source(), newChildren.get(0), newChildren.get(1)); + return new ToPartial(source(), newChildren.get(0), newChildren.get(1), newChildren.get(2)); + } + + @Override + public ToPartial withFilter(Expression filter) { + return new ToPartial(source(), field(), filter(), function); } @Override - protected NodeInfo info() { - return NodeInfo.create(this, ToPartial::new, field(), function); + protected NodeInfo info() { + return NodeInfo.create(this, ToPartial::new, field(), filter(), function); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Top.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Top.java index cb1b0f0cad895..4f81e0a897f9c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Top.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Top.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.expression.function.aggregate; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -20,6 +21,7 @@ import org.elasticsearch.compute.aggregation.TopLongAggregatorFunctionSupplier; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -31,9 +33,9 @@ import org.elasticsearch.xpack.esql.planner.ToAggregator; import java.io.IOException; -import java.util.Arrays; import java.util.List; +import static java.util.Arrays.asList; import static org.elasticsearch.common.logging.LoggerMessageFormat.format; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.SECOND; @@ -68,26 +70,37 @@ public Top( description = "The order to calculate the top values. Either `asc` or `desc`." ) Expression order ) { - super(source, field, Arrays.asList(limit, order)); + this(source, field, Literal.TRUE, limit, order); + } + + public Top(Source source, Expression field, Expression filter, Expression limit, Expression order) { + super(source, field, filter, asList(limit, order)); } private Top(StreamInput in) throws IOException { - this( + super( Source.readFrom((PlanStreamInput) in), in.readNamedWriteable(Expression.class), - in.readNamedWriteable(Expression.class), - in.readNamedWriteable(Expression.class) + in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PER_AGGREGATE_FILTER) + ? in.readNamedWriteable(Expression.class) + : Literal.TRUE, + in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PER_AGGREGATE_FILTER) + ? in.readNamedWriteableCollectionAsList(Expression.class) + : asList(in.readNamedWriteable(Expression.class), in.readNamedWriteable(Expression.class)) ); } @Override - public void writeTo(StreamOutput out) throws IOException { - source().writeTo(out); - List fields = children(); - assert fields.size() == 3; - out.writeNamedWriteable(fields.get(0)); - out.writeNamedWriteable(fields.get(1)); - out.writeNamedWriteable(fields.get(2)); + protected void deprecatedWriteParams(StreamOutput out) throws IOException { + List params = parameters(); + assert params.size() == 2; + out.writeNamedWriteable(params.get(0)); + out.writeNamedWriteable(params.get(1)); + } + + @Override + public Top withFilter(Expression filter) { + return new Top(source(), field(), filter, limitField(), orderField()); } @Override @@ -167,12 +180,12 @@ public DataType dataType() { @Override protected NodeInfo info() { - return NodeInfo.create(this, Top::new, children().get(0), children().get(1), children().get(2)); + return NodeInfo.create(this, Top::new, field(), filter(), limitField(), orderField()); } @Override public Top replaceChildren(List newChildren) { - return new Top(source(), newChildren.get(0), newChildren.get(1), newChildren.get(2)); + return new Top(source(), newChildren.get(0), newChildren.get(1), newChildren.get(2), newChildren.get(3)); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Values.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Values.java index 136e1233601f9..a844b981c95d6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Values.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Values.java @@ -17,6 +17,7 @@ import org.elasticsearch.compute.aggregation.ValuesLongAggregatorFunctionSupplier; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -29,6 +30,7 @@ import java.io.IOException; import java.util.List; +import static java.util.Collections.emptyList; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; import static org.elasticsearch.xpack.esql.core.type.DataType.UNSIGNED_LONG; @@ -56,7 +58,11 @@ public Values( Source source, @Param(name = "field", type = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "text", "version" }) Expression v ) { - super(source, v); + this(source, v, Literal.TRUE); + } + + public Values(Source source, Expression field, Expression filter) { + super(source, field, filter, emptyList()); } private Values(StreamInput in) throws IOException { @@ -70,12 +76,17 @@ public String getWriteableName() { @Override protected NodeInfo info() { - return NodeInfo.create(this, Values::new, field()); + return NodeInfo.create(this, Values::new, field(), filter()); } @Override public Values replaceChildren(List newChildren) { - return new Values(source(), newChildren.get(0)); + return new Values(source(), newChildren.get(0), newChildren.get(1)); + } + + @Override + public Values withFilter(Expression filter) { + return new Values(source(), field(), filter); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/WeightedAvg.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/WeightedAvg.java index 23a20d9897e72..dbcc50cea3b9b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/WeightedAvg.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/WeightedAvg.java @@ -7,11 +7,13 @@ package org.elasticsearch.xpack.esql.expression.function.aggregate; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.capabilities.Validatable; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -54,21 +56,30 @@ public WeightedAvg( @Param(name = "number", type = { "double", "integer", "long" }, description = "A numeric value.") Expression field, @Param(name = "weight", type = { "double", "integer", "long" }, description = "A numeric weight.") Expression weight ) { - super(source, field, List.of(weight)); + this(source, field, Literal.TRUE, weight); + } + + public WeightedAvg(Source source, Expression field, Expression filter, Expression weight) { + super(source, field, filter, List.of(weight)); this.weight = weight; } private WeightedAvg(StreamInput in) throws IOException { - this(Source.readFrom((PlanStreamInput) in), in.readNamedWriteable(Expression.class), in.readNamedWriteable(Expression.class)); + this( + Source.readFrom((PlanStreamInput) in), + in.readNamedWriteable(Expression.class), + in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PER_AGGREGATE_FILTER) + ? in.readNamedWriteable(Expression.class) + : Literal.TRUE, + in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PER_AGGREGATE_FILTER) + ? in.readNamedWriteableCollectionAsList(Expression.class).get(0) + : in.readNamedWriteable(Expression.class) + ); } @Override - public void writeTo(StreamOutput out) throws IOException { - source().writeTo(out); - List fields = children(); - assert fields.size() == 2; - out.writeNamedWriteable(fields.get(0)); - out.writeNamedWriteable(fields.get(1)); + protected void deprecatedWriteParams(StreamOutput out) throws IOException { + out.writeNamedWriteable(weight); } @Override @@ -121,12 +132,17 @@ public DataType dataType() { @Override protected NodeInfo info() { - return NodeInfo.create(this, WeightedAvg::new, field(), weight); + return NodeInfo.create(this, WeightedAvg::new, field(), filter(), weight); } @Override public WeightedAvg replaceChildren(List newChildren) { - return new WeightedAvg(source(), newChildren.get(0), newChildren.get(1)); + return new WeightedAvg(source(), newChildren.get(0), newChildren.get(1), newChildren.get(2)); + } + + @Override + public WeightedAvg withFilter(Expression filter) { + return new WeightedAvg(source(), field(), filter, weight()); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java index bfbf5a8f0c66f..a1da269f896da 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java @@ -53,6 +53,7 @@ import org.elasticsearch.xpack.esql.optimizer.rules.logical.SkipQueryOnEmptyMappings; import org.elasticsearch.xpack.esql.optimizer.rules.logical.SkipQueryOnLimitZero; import org.elasticsearch.xpack.esql.optimizer.rules.logical.SplitInWithFoldableValue; +import org.elasticsearch.xpack.esql.optimizer.rules.logical.SubstituteFilteredExpression; import org.elasticsearch.xpack.esql.optimizer.rules.logical.SubstituteSpatialSurrogates; import org.elasticsearch.xpack.esql.optimizer.rules.logical.SubstituteSurrogates; import org.elasticsearch.xpack.esql.optimizer.rules.logical.TranslateMetricsAggregate; @@ -122,6 +123,9 @@ protected static Batch substitutions() { "Substitutions", Limiter.ONCE, new ReplaceLookupWithJoin(), + // translate filtered expressions into aggregate with filters - can't use surrogate expressions because it was + // retrofitted for constant folding - this needs to be fixed + new SubstituteFilteredExpression(), new RemoveStatsOverride(), // first extract nested expressions inside aggs new ReplaceStatsNestedExpressionWithEval(), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/FoldNull.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/FoldNull.java index 0561865213a1b..0f08cd66444a3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/FoldNull.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/FoldNull.java @@ -12,6 +12,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.Nullability; +import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.In; public class FoldNull extends OptimizerRules.OptimizerExpressionRule { @@ -23,6 +24,16 @@ public FoldNull() { @Override public Expression rule(Expression e) { Expression result = tryReplaceIsNullIsNotNull(e); + + // convert an aggregate null filter into a false + // perform this early to prevent the rule from converting the null filter into nullifying the whole expression + // P.S. this could be done inside the Aggregate but this place better centralizes the logic + if (e instanceof AggregateFunction agg) { + if (Expressions.isNull(agg.filter())) { + return agg.withFilter(Literal.of(agg.filter(), false)); + } + } + if (result != e) { return result; } else if (e instanceof In in) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceStatsAggExpressionWithEval.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceStatsAggExpressionWithEval.java index d74811518624a..559546d48eb7d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceStatsAggExpressionWithEval.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceStatsAggExpressionWithEval.java @@ -13,7 +13,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.util.CollectionUtils; import org.elasticsearch.xpack.esql.core.util.Holder; import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; @@ -25,8 +24,6 @@ import java.util.List; import java.util.Map; -import static java.util.Collections.singleton; - /** * Replace nested expressions over aggregates with synthetic eval post the aggregation * stats a = sum(a) + min(b) by x @@ -71,16 +68,13 @@ protected LogicalPlan rule(Aggregate aggregate) { for (NamedExpression agg : aggs) { if (agg instanceof Alias as) { - // if the child a nested expression + // use intermediate variable to mark child as final for lambda use Expression child = as.child(); // common case - handle duplicates if (child instanceof AggregateFunction af) { - AggregateFunction canonical = (AggregateFunction) af.canonical(); - Expression field = canonical.field().transformUp(e -> aliases.resolve(e, e)); - canonical = (AggregateFunction) canonical.replaceChildren( - CollectionUtils.combine(singleton(field), canonical.parameters()) - ); + // canonical representation, with resolved aliases + AggregateFunction canonical = (AggregateFunction) af.canonical().transformUp(e -> aliases.resolve(e, e)); Alias found = rootAggs.get(canonical); // aggregate is new @@ -130,7 +124,7 @@ protected LogicalPlan rule(Aggregate aggregate) { LogicalPlan plan = aggregate; if (changed.get()) { Source source = aggregate.source(); - plan = new Aggregate(source, aggregate.child(), aggregate.aggregateType(), aggregate.groupings(), newAggs); + plan = aggregate.with(aggregate.child(), aggregate.groupings(), newAggs); if (newEvals.size() > 0) { plan = new Eval(source, plan, newEvals); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/SubstituteFilteredExpression.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/SubstituteFilteredExpression.java new file mode 100644 index 0000000000000..c8369d2b08a34 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/SubstituteFilteredExpression.java @@ -0,0 +1,27 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.optimizer.rules.logical; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.expression.function.aggregate.FilteredExpression; +import org.elasticsearch.xpack.esql.optimizer.rules.logical.OptimizerRules.OptimizerExpressionRule; +import org.elasticsearch.xpack.esql.optimizer.rules.logical.OptimizerRules.TransformDirection; + +/** + * This rule should not be needed - the substitute infrastructure should be enough. + */ +public class SubstituteFilteredExpression extends OptimizerExpressionRule { + public SubstituteFilteredExpression() { + super(TransformDirection.UP); + } + + @Override + protected Expression rule(FilteredExpression filteredExpression) { + return filteredExpression.surrogate(); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp index b8251869c48cd..aa6ddfb433d23 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp @@ -318,6 +318,7 @@ ASTERISK SLASH PERCENT MATCH +NESTED_WHERE NAMED_OR_POSITIONAL_PARAM OPENING_BRACKET CLOSING_BRACKET @@ -465,4 +466,4 @@ METRICS_MODE CLOSING_METRICS_MODE atn: -[4, 0, 120, 1465, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 2, 154, 7, 154, 2, 155, 7, 155, 2, 156, 7, 156, 2, 157, 7, 157, 2, 158, 7, 158, 2, 159, 7, 159, 2, 160, 7, 160, 2, 161, 7, 161, 2, 162, 7, 162, 2, 163, 7, 163, 2, 164, 7, 164, 2, 165, 7, 165, 2, 166, 7, 166, 2, 167, 7, 167, 2, 168, 7, 168, 2, 169, 7, 169, 2, 170, 7, 170, 2, 171, 7, 171, 2, 172, 7, 172, 2, 173, 7, 173, 2, 174, 7, 174, 2, 175, 7, 175, 2, 176, 7, 176, 2, 177, 7, 177, 2, 178, 7, 178, 2, 179, 7, 179, 2, 180, 7, 180, 2, 181, 7, 181, 2, 182, 7, 182, 2, 183, 7, 183, 2, 184, 7, 184, 2, 185, 7, 185, 2, 186, 7, 186, 2, 187, 7, 187, 2, 188, 7, 188, 2, 189, 7, 189, 2, 190, 7, 190, 2, 191, 7, 191, 2, 192, 7, 192, 2, 193, 7, 193, 2, 194, 7, 194, 2, 195, 7, 195, 2, 196, 7, 196, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 4, 19, 576, 8, 19, 11, 19, 12, 19, 577, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 5, 20, 586, 8, 20, 10, 20, 12, 20, 589, 9, 20, 1, 20, 3, 20, 592, 8, 20, 1, 20, 3, 20, 595, 8, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 5, 21, 604, 8, 21, 10, 21, 12, 21, 607, 9, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 4, 22, 615, 8, 22, 11, 22, 12, 22, 616, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 28, 1, 28, 3, 28, 636, 8, 28, 1, 28, 4, 28, 639, 8, 28, 11, 28, 12, 28, 640, 1, 29, 1, 29, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 3, 31, 650, 8, 31, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 3, 33, 657, 8, 33, 1, 34, 1, 34, 1, 34, 5, 34, 662, 8, 34, 10, 34, 12, 34, 665, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 673, 8, 34, 10, 34, 12, 34, 676, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 3, 34, 683, 8, 34, 1, 34, 3, 34, 686, 8, 34, 3, 34, 688, 8, 34, 1, 35, 4, 35, 691, 8, 35, 11, 35, 12, 35, 692, 1, 36, 4, 36, 696, 8, 36, 11, 36, 12, 36, 697, 1, 36, 1, 36, 5, 36, 702, 8, 36, 10, 36, 12, 36, 705, 9, 36, 1, 36, 1, 36, 4, 36, 709, 8, 36, 11, 36, 12, 36, 710, 1, 36, 4, 36, 714, 8, 36, 11, 36, 12, 36, 715, 1, 36, 1, 36, 5, 36, 720, 8, 36, 10, 36, 12, 36, 723, 9, 36, 3, 36, 725, 8, 36, 1, 36, 1, 36, 1, 36, 1, 36, 4, 36, 731, 8, 36, 11, 36, 12, 36, 732, 1, 36, 1, 36, 3, 36, 737, 8, 36, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 1, 39, 1, 39, 1, 39, 1, 39, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 1, 64, 1, 64, 1, 64, 1, 65, 1, 65, 1, 66, 1, 66, 1, 66, 1, 67, 1, 67, 1, 68, 1, 68, 1, 69, 1, 69, 1, 70, 1, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 72, 1, 72, 1, 72, 1, 72, 1, 73, 1, 73, 1, 73, 3, 73, 865, 8, 73, 1, 73, 5, 73, 868, 8, 73, 10, 73, 12, 73, 871, 9, 73, 1, 73, 1, 73, 4, 73, 875, 8, 73, 11, 73, 12, 73, 876, 3, 73, 879, 8, 73, 1, 74, 1, 74, 1, 74, 1, 74, 1, 74, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 76, 1, 76, 5, 76, 893, 8, 76, 10, 76, 12, 76, 896, 9, 76, 1, 76, 1, 76, 3, 76, 900, 8, 76, 1, 76, 4, 76, 903, 8, 76, 11, 76, 12, 76, 904, 3, 76, 907, 8, 76, 1, 77, 1, 77, 4, 77, 911, 8, 77, 11, 77, 12, 77, 912, 1, 77, 1, 77, 1, 78, 1, 78, 1, 79, 1, 79, 1, 79, 1, 79, 1, 80, 1, 80, 1, 80, 1, 80, 1, 81, 1, 81, 1, 81, 1, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 1, 83, 1, 83, 1, 83, 1, 84, 1, 84, 1, 84, 1, 84, 1, 85, 1, 85, 1, 85, 1, 85, 1, 86, 1, 86, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 1, 88, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 1, 92, 1, 93, 1, 93, 1, 93, 1, 93, 1, 93, 1, 93, 1, 93, 1, 93, 1, 93, 1, 94, 1, 94, 1, 94, 3, 94, 990, 8, 94, 1, 95, 4, 95, 993, 8, 95, 11, 95, 12, 95, 994, 1, 96, 1, 96, 1, 96, 1, 96, 1, 97, 1, 97, 1, 97, 1, 97, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 1, 100, 1, 101, 1, 101, 1, 101, 1, 101, 1, 101, 1, 102, 1, 102, 1, 102, 1, 102, 1, 103, 1, 103, 1, 103, 1, 103, 1, 104, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 106, 3, 106, 1042, 8, 106, 1, 107, 1, 107, 3, 107, 1046, 8, 107, 1, 107, 5, 107, 1049, 8, 107, 10, 107, 12, 107, 1052, 9, 107, 1, 107, 1, 107, 3, 107, 1056, 8, 107, 1, 107, 4, 107, 1059, 8, 107, 11, 107, 12, 107, 1060, 3, 107, 1063, 8, 107, 1, 108, 1, 108, 4, 108, 1067, 8, 108, 11, 108, 12, 108, 1068, 1, 109, 1, 109, 1, 109, 1, 109, 1, 110, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 1, 114, 1, 114, 1, 114, 1, 114, 1, 115, 1, 115, 1, 115, 1, 115, 1, 116, 1, 116, 1, 116, 1, 116, 1, 117, 1, 117, 1, 117, 1, 117, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 120, 1, 120, 1, 121, 1, 121, 1, 121, 1, 121, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 126, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 128, 4, 128, 1152, 8, 128, 11, 128, 12, 128, 1153, 1, 128, 1, 128, 3, 128, 1158, 8, 128, 1, 128, 4, 128, 1161, 8, 128, 11, 128, 12, 128, 1162, 1, 129, 1, 129, 1, 129, 1, 129, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 133, 1, 133, 1, 133, 1, 133, 1, 134, 1, 134, 1, 134, 1, 134, 1, 135, 1, 135, 1, 135, 1, 135, 1, 136, 1, 136, 1, 136, 1, 136, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 152, 1, 152, 1, 153, 1, 153, 1, 153, 1, 153, 1, 154, 1, 154, 1, 154, 1, 154, 1, 154, 1, 155, 1, 155, 1, 155, 1, 155, 1, 155, 1, 156, 1, 156, 1, 156, 1, 156, 1, 157, 1, 157, 1, 157, 1, 157, 1, 158, 1, 158, 1, 158, 1, 158, 1, 159, 1, 159, 1, 159, 1, 159, 1, 159, 1, 160, 1, 160, 1, 161, 1, 161, 1, 161, 1, 161, 1, 161, 4, 161, 1302, 8, 161, 11, 161, 12, 161, 1303, 1, 162, 1, 162, 1, 162, 1, 162, 1, 163, 1, 163, 1, 163, 1, 163, 1, 164, 1, 164, 1, 164, 1, 164, 1, 165, 1, 165, 1, 165, 1, 165, 1, 165, 1, 166, 1, 166, 1, 166, 1, 166, 1, 167, 1, 167, 1, 167, 1, 167, 1, 168, 1, 168, 1, 168, 1, 168, 1, 169, 1, 169, 1, 169, 1, 169, 1, 169, 1, 170, 1, 170, 1, 170, 1, 170, 1, 171, 1, 171, 1, 171, 1, 171, 1, 172, 1, 172, 1, 172, 1, 172, 1, 173, 1, 173, 1, 173, 1, 173, 1, 174, 1, 174, 1, 174, 1, 174, 1, 175, 1, 175, 1, 175, 1, 175, 1, 175, 1, 175, 1, 176, 1, 176, 1, 176, 1, 176, 1, 177, 1, 177, 1, 177, 1, 177, 1, 178, 1, 178, 1, 178, 1, 178, 1, 179, 1, 179, 1, 179, 1, 179, 1, 180, 1, 180, 1, 180, 1, 180, 1, 181, 1, 181, 1, 181, 1, 181, 1, 182, 1, 182, 1, 182, 1, 182, 1, 182, 1, 183, 1, 183, 1, 183, 1, 183, 1, 183, 1, 183, 1, 184, 1, 184, 1, 184, 1, 184, 1, 184, 1, 184, 1, 185, 1, 185, 1, 185, 1, 185, 1, 186, 1, 186, 1, 186, 1, 186, 1, 187, 1, 187, 1, 187, 1, 187, 1, 188, 1, 188, 1, 188, 1, 188, 1, 188, 1, 188, 1, 189, 1, 189, 1, 189, 1, 189, 1, 189, 1, 189, 1, 190, 1, 190, 1, 190, 1, 190, 1, 191, 1, 191, 1, 191, 1, 191, 1, 192, 1, 192, 1, 192, 1, 192, 1, 193, 1, 193, 1, 193, 1, 193, 1, 193, 1, 193, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 195, 1, 195, 1, 195, 1, 195, 1, 195, 1, 195, 1, 196, 1, 196, 1, 196, 1, 196, 1, 196, 2, 605, 674, 0, 197, 15, 1, 17, 2, 19, 3, 21, 4, 23, 5, 25, 6, 27, 7, 29, 8, 31, 9, 33, 10, 35, 11, 37, 12, 39, 13, 41, 14, 43, 15, 45, 16, 47, 17, 49, 18, 51, 19, 53, 20, 55, 21, 57, 22, 59, 23, 61, 24, 63, 0, 65, 0, 67, 0, 69, 0, 71, 0, 73, 0, 75, 0, 77, 0, 79, 0, 81, 0, 83, 25, 85, 26, 87, 27, 89, 28, 91, 29, 93, 30, 95, 31, 97, 32, 99, 33, 101, 34, 103, 35, 105, 36, 107, 37, 109, 38, 111, 39, 113, 40, 115, 41, 117, 42, 119, 43, 121, 44, 123, 45, 125, 46, 127, 47, 129, 48, 131, 49, 133, 50, 135, 51, 137, 52, 139, 53, 141, 54, 143, 55, 145, 56, 147, 57, 149, 58, 151, 59, 153, 60, 155, 61, 157, 62, 159, 63, 161, 64, 163, 65, 165, 66, 167, 67, 169, 0, 171, 68, 173, 69, 175, 70, 177, 71, 179, 0, 181, 0, 183, 72, 185, 73, 187, 74, 189, 0, 191, 0, 193, 0, 195, 0, 197, 0, 199, 0, 201, 75, 203, 0, 205, 76, 207, 0, 209, 0, 211, 77, 213, 78, 215, 79, 217, 0, 219, 0, 221, 0, 223, 0, 225, 0, 227, 0, 229, 0, 231, 80, 233, 81, 235, 82, 237, 83, 239, 0, 241, 0, 243, 0, 245, 0, 247, 0, 249, 0, 251, 84, 253, 0, 255, 85, 257, 86, 259, 87, 261, 0, 263, 0, 265, 88, 267, 89, 269, 0, 271, 90, 273, 0, 275, 91, 277, 92, 279, 93, 281, 0, 283, 0, 285, 0, 287, 0, 289, 0, 291, 0, 293, 0, 295, 0, 297, 0, 299, 94, 301, 95, 303, 96, 305, 0, 307, 0, 309, 0, 311, 0, 313, 0, 315, 0, 317, 97, 319, 98, 321, 99, 323, 0, 325, 100, 327, 101, 329, 102, 331, 103, 333, 0, 335, 104, 337, 105, 339, 106, 341, 107, 343, 108, 345, 0, 347, 0, 349, 0, 351, 0, 353, 0, 355, 0, 357, 0, 359, 109, 361, 110, 363, 111, 365, 0, 367, 0, 369, 0, 371, 0, 373, 112, 375, 113, 377, 114, 379, 0, 381, 0, 383, 0, 385, 115, 387, 116, 389, 117, 391, 0, 393, 0, 395, 118, 397, 119, 399, 120, 401, 0, 403, 0, 405, 0, 407, 0, 15, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 35, 2, 0, 68, 68, 100, 100, 2, 0, 73, 73, 105, 105, 2, 0, 83, 83, 115, 115, 2, 0, 69, 69, 101, 101, 2, 0, 67, 67, 99, 99, 2, 0, 84, 84, 116, 116, 2, 0, 82, 82, 114, 114, 2, 0, 79, 79, 111, 111, 2, 0, 80, 80, 112, 112, 2, 0, 78, 78, 110, 110, 2, 0, 72, 72, 104, 104, 2, 0, 86, 86, 118, 118, 2, 0, 65, 65, 97, 97, 2, 0, 76, 76, 108, 108, 2, 0, 88, 88, 120, 120, 2, 0, 70, 70, 102, 102, 2, 0, 77, 77, 109, 109, 2, 0, 71, 71, 103, 103, 2, 0, 75, 75, 107, 107, 2, 0, 87, 87, 119, 119, 2, 0, 85, 85, 117, 117, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 8, 0, 34, 34, 78, 78, 82, 82, 84, 84, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 2, 0, 66, 66, 98, 98, 2, 0, 89, 89, 121, 121, 11, 0, 9, 10, 13, 13, 32, 32, 34, 34, 44, 44, 47, 47, 58, 58, 61, 61, 91, 91, 93, 93, 124, 124, 2, 0, 42, 42, 47, 47, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1493, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0, 0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0, 0, 0, 33, 1, 0, 0, 0, 0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0, 0, 0, 39, 1, 0, 0, 0, 0, 41, 1, 0, 0, 0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0, 0, 0, 0, 47, 1, 0, 0, 0, 0, 49, 1, 0, 0, 0, 0, 51, 1, 0, 0, 0, 0, 53, 1, 0, 0, 0, 0, 55, 1, 0, 0, 0, 0, 57, 1, 0, 0, 0, 0, 59, 1, 0, 0, 0, 1, 61, 1, 0, 0, 0, 1, 83, 1, 0, 0, 0, 1, 85, 1, 0, 0, 0, 1, 87, 1, 0, 0, 0, 1, 89, 1, 0, 0, 0, 1, 91, 1, 0, 0, 0, 1, 93, 1, 0, 0, 0, 1, 95, 1, 0, 0, 0, 1, 97, 1, 0, 0, 0, 1, 99, 1, 0, 0, 0, 1, 101, 1, 0, 0, 0, 1, 103, 1, 0, 0, 0, 1, 105, 1, 0, 0, 0, 1, 107, 1, 0, 0, 0, 1, 109, 1, 0, 0, 0, 1, 111, 1, 0, 0, 0, 1, 113, 1, 0, 0, 0, 1, 115, 1, 0, 0, 0, 1, 117, 1, 0, 0, 0, 1, 119, 1, 0, 0, 0, 1, 121, 1, 0, 0, 0, 1, 123, 1, 0, 0, 0, 1, 125, 1, 0, 0, 0, 1, 127, 1, 0, 0, 0, 1, 129, 1, 0, 0, 0, 1, 131, 1, 0, 0, 0, 1, 133, 1, 0, 0, 0, 1, 135, 1, 0, 0, 0, 1, 137, 1, 0, 0, 0, 1, 139, 1, 0, 0, 0, 1, 141, 1, 0, 0, 0, 1, 143, 1, 0, 0, 0, 1, 145, 1, 0, 0, 0, 1, 147, 1, 0, 0, 0, 1, 149, 1, 0, 0, 0, 1, 151, 1, 0, 0, 0, 1, 153, 1, 0, 0, 0, 1, 155, 1, 0, 0, 0, 1, 157, 1, 0, 0, 0, 1, 159, 1, 0, 0, 0, 1, 161, 1, 0, 0, 0, 1, 163, 1, 0, 0, 0, 1, 165, 1, 0, 0, 0, 1, 167, 1, 0, 0, 0, 1, 171, 1, 0, 0, 0, 1, 173, 1, 0, 0, 0, 1, 175, 1, 0, 0, 0, 1, 177, 1, 0, 0, 0, 2, 179, 1, 0, 0, 0, 2, 181, 1, 0, 0, 0, 2, 183, 1, 0, 0, 0, 2, 185, 1, 0, 0, 0, 2, 187, 1, 0, 0, 0, 3, 189, 1, 0, 0, 0, 3, 191, 1, 0, 0, 0, 3, 193, 1, 0, 0, 0, 3, 195, 1, 0, 0, 0, 3, 197, 1, 0, 0, 0, 3, 199, 1, 0, 0, 0, 3, 201, 1, 0, 0, 0, 3, 205, 1, 0, 0, 0, 3, 207, 1, 0, 0, 0, 3, 209, 1, 0, 0, 0, 3, 211, 1, 0, 0, 0, 3, 213, 1, 0, 0, 0, 3, 215, 1, 0, 0, 0, 4, 217, 1, 0, 0, 0, 4, 219, 1, 0, 0, 0, 4, 221, 1, 0, 0, 0, 4, 223, 1, 0, 0, 0, 4, 225, 1, 0, 0, 0, 4, 231, 1, 0, 0, 0, 4, 233, 1, 0, 0, 0, 4, 235, 1, 0, 0, 0, 4, 237, 1, 0, 0, 0, 5, 239, 1, 0, 0, 0, 5, 241, 1, 0, 0, 0, 5, 243, 1, 0, 0, 0, 5, 245, 1, 0, 0, 0, 5, 247, 1, 0, 0, 0, 5, 249, 1, 0, 0, 0, 5, 251, 1, 0, 0, 0, 5, 253, 1, 0, 0, 0, 5, 255, 1, 0, 0, 0, 5, 257, 1, 0, 0, 0, 5, 259, 1, 0, 0, 0, 6, 261, 1, 0, 0, 0, 6, 263, 1, 0, 0, 0, 6, 265, 1, 0, 0, 0, 6, 267, 1, 0, 0, 0, 6, 271, 1, 0, 0, 0, 6, 273, 1, 0, 0, 0, 6, 275, 1, 0, 0, 0, 6, 277, 1, 0, 0, 0, 6, 279, 1, 0, 0, 0, 7, 281, 1, 0, 0, 0, 7, 283, 1, 0, 0, 0, 7, 285, 1, 0, 0, 0, 7, 287, 1, 0, 0, 0, 7, 289, 1, 0, 0, 0, 7, 291, 1, 0, 0, 0, 7, 293, 1, 0, 0, 0, 7, 295, 1, 0, 0, 0, 7, 297, 1, 0, 0, 0, 7, 299, 1, 0, 0, 0, 7, 301, 1, 0, 0, 0, 7, 303, 1, 0, 0, 0, 8, 305, 1, 0, 0, 0, 8, 307, 1, 0, 0, 0, 8, 309, 1, 0, 0, 0, 8, 311, 1, 0, 0, 0, 8, 313, 1, 0, 0, 0, 8, 315, 1, 0, 0, 0, 8, 317, 1, 0, 0, 0, 8, 319, 1, 0, 0, 0, 8, 321, 1, 0, 0, 0, 9, 323, 1, 0, 0, 0, 9, 325, 1, 0, 0, 0, 9, 327, 1, 0, 0, 0, 9, 329, 1, 0, 0, 0, 9, 331, 1, 0, 0, 0, 10, 333, 1, 0, 0, 0, 10, 335, 1, 0, 0, 0, 10, 337, 1, 0, 0, 0, 10, 339, 1, 0, 0, 0, 10, 341, 1, 0, 0, 0, 10, 343, 1, 0, 0, 0, 11, 345, 1, 0, 0, 0, 11, 347, 1, 0, 0, 0, 11, 349, 1, 0, 0, 0, 11, 351, 1, 0, 0, 0, 11, 353, 1, 0, 0, 0, 11, 355, 1, 0, 0, 0, 11, 357, 1, 0, 0, 0, 11, 359, 1, 0, 0, 0, 11, 361, 1, 0, 0, 0, 11, 363, 1, 0, 0, 0, 12, 365, 1, 0, 0, 0, 12, 367, 1, 0, 0, 0, 12, 369, 1, 0, 0, 0, 12, 371, 1, 0, 0, 0, 12, 373, 1, 0, 0, 0, 12, 375, 1, 0, 0, 0, 12, 377, 1, 0, 0, 0, 13, 379, 1, 0, 0, 0, 13, 381, 1, 0, 0, 0, 13, 383, 1, 0, 0, 0, 13, 385, 1, 0, 0, 0, 13, 387, 1, 0, 0, 0, 13, 389, 1, 0, 0, 0, 14, 391, 1, 0, 0, 0, 14, 393, 1, 0, 0, 0, 14, 395, 1, 0, 0, 0, 14, 397, 1, 0, 0, 0, 14, 399, 1, 0, 0, 0, 14, 401, 1, 0, 0, 0, 14, 403, 1, 0, 0, 0, 14, 405, 1, 0, 0, 0, 14, 407, 1, 0, 0, 0, 15, 409, 1, 0, 0, 0, 17, 419, 1, 0, 0, 0, 19, 426, 1, 0, 0, 0, 21, 435, 1, 0, 0, 0, 23, 442, 1, 0, 0, 0, 25, 452, 1, 0, 0, 0, 27, 459, 1, 0, 0, 0, 29, 466, 1, 0, 0, 0, 31, 473, 1, 0, 0, 0, 33, 481, 1, 0, 0, 0, 35, 493, 1, 0, 0, 0, 37, 502, 1, 0, 0, 0, 39, 508, 1, 0, 0, 0, 41, 515, 1, 0, 0, 0, 43, 522, 1, 0, 0, 0, 45, 530, 1, 0, 0, 0, 47, 538, 1, 0, 0, 0, 49, 553, 1, 0, 0, 0, 51, 563, 1, 0, 0, 0, 53, 575, 1, 0, 0, 0, 55, 581, 1, 0, 0, 0, 57, 598, 1, 0, 0, 0, 59, 614, 1, 0, 0, 0, 61, 620, 1, 0, 0, 0, 63, 624, 1, 0, 0, 0, 65, 626, 1, 0, 0, 0, 67, 628, 1, 0, 0, 0, 69, 631, 1, 0, 0, 0, 71, 633, 1, 0, 0, 0, 73, 642, 1, 0, 0, 0, 75, 644, 1, 0, 0, 0, 77, 649, 1, 0, 0, 0, 79, 651, 1, 0, 0, 0, 81, 656, 1, 0, 0, 0, 83, 687, 1, 0, 0, 0, 85, 690, 1, 0, 0, 0, 87, 736, 1, 0, 0, 0, 89, 738, 1, 0, 0, 0, 91, 741, 1, 0, 0, 0, 93, 745, 1, 0, 0, 0, 95, 749, 1, 0, 0, 0, 97, 751, 1, 0, 0, 0, 99, 754, 1, 0, 0, 0, 101, 756, 1, 0, 0, 0, 103, 761, 1, 0, 0, 0, 105, 763, 1, 0, 0, 0, 107, 769, 1, 0, 0, 0, 109, 775, 1, 0, 0, 0, 111, 778, 1, 0, 0, 0, 113, 781, 1, 0, 0, 0, 115, 786, 1, 0, 0, 0, 117, 791, 1, 0, 0, 0, 119, 793, 1, 0, 0, 0, 121, 797, 1, 0, 0, 0, 123, 802, 1, 0, 0, 0, 125, 808, 1, 0, 0, 0, 127, 811, 1, 0, 0, 0, 129, 813, 1, 0, 0, 0, 131, 819, 1, 0, 0, 0, 133, 821, 1, 0, 0, 0, 135, 826, 1, 0, 0, 0, 137, 829, 1, 0, 0, 0, 139, 832, 1, 0, 0, 0, 141, 835, 1, 0, 0, 0, 143, 837, 1, 0, 0, 0, 145, 840, 1, 0, 0, 0, 147, 842, 1, 0, 0, 0, 149, 845, 1, 0, 0, 0, 151, 847, 1, 0, 0, 0, 153, 849, 1, 0, 0, 0, 155, 851, 1, 0, 0, 0, 157, 853, 1, 0, 0, 0, 159, 855, 1, 0, 0, 0, 161, 878, 1, 0, 0, 0, 163, 880, 1, 0, 0, 0, 165, 885, 1, 0, 0, 0, 167, 906, 1, 0, 0, 0, 169, 908, 1, 0, 0, 0, 171, 916, 1, 0, 0, 0, 173, 918, 1, 0, 0, 0, 175, 922, 1, 0, 0, 0, 177, 926, 1, 0, 0, 0, 179, 930, 1, 0, 0, 0, 181, 935, 1, 0, 0, 0, 183, 940, 1, 0, 0, 0, 185, 944, 1, 0, 0, 0, 187, 948, 1, 0, 0, 0, 189, 952, 1, 0, 0, 0, 191, 957, 1, 0, 0, 0, 193, 961, 1, 0, 0, 0, 195, 965, 1, 0, 0, 0, 197, 969, 1, 0, 0, 0, 199, 973, 1, 0, 0, 0, 201, 977, 1, 0, 0, 0, 203, 989, 1, 0, 0, 0, 205, 992, 1, 0, 0, 0, 207, 996, 1, 0, 0, 0, 209, 1000, 1, 0, 0, 0, 211, 1004, 1, 0, 0, 0, 213, 1008, 1, 0, 0, 0, 215, 1012, 1, 0, 0, 0, 217, 1016, 1, 0, 0, 0, 219, 1021, 1, 0, 0, 0, 221, 1025, 1, 0, 0, 0, 223, 1029, 1, 0, 0, 0, 225, 1033, 1, 0, 0, 0, 227, 1041, 1, 0, 0, 0, 229, 1062, 1, 0, 0, 0, 231, 1066, 1, 0, 0, 0, 233, 1070, 1, 0, 0, 0, 235, 1074, 1, 0, 0, 0, 237, 1078, 1, 0, 0, 0, 239, 1082, 1, 0, 0, 0, 241, 1087, 1, 0, 0, 0, 243, 1091, 1, 0, 0, 0, 245, 1095, 1, 0, 0, 0, 247, 1099, 1, 0, 0, 0, 249, 1103, 1, 0, 0, 0, 251, 1107, 1, 0, 0, 0, 253, 1110, 1, 0, 0, 0, 255, 1114, 1, 0, 0, 0, 257, 1118, 1, 0, 0, 0, 259, 1122, 1, 0, 0, 0, 261, 1126, 1, 0, 0, 0, 263, 1131, 1, 0, 0, 0, 265, 1136, 1, 0, 0, 0, 267, 1141, 1, 0, 0, 0, 269, 1148, 1, 0, 0, 0, 271, 1157, 1, 0, 0, 0, 273, 1164, 1, 0, 0, 0, 275, 1168, 1, 0, 0, 0, 277, 1172, 1, 0, 0, 0, 279, 1176, 1, 0, 0, 0, 281, 1180, 1, 0, 0, 0, 283, 1186, 1, 0, 0, 0, 285, 1190, 1, 0, 0, 0, 287, 1194, 1, 0, 0, 0, 289, 1198, 1, 0, 0, 0, 291, 1202, 1, 0, 0, 0, 293, 1206, 1, 0, 0, 0, 295, 1210, 1, 0, 0, 0, 297, 1214, 1, 0, 0, 0, 299, 1218, 1, 0, 0, 0, 301, 1222, 1, 0, 0, 0, 303, 1226, 1, 0, 0, 0, 305, 1230, 1, 0, 0, 0, 307, 1235, 1, 0, 0, 0, 309, 1239, 1, 0, 0, 0, 311, 1243, 1, 0, 0, 0, 313, 1247, 1, 0, 0, 0, 315, 1251, 1, 0, 0, 0, 317, 1255, 1, 0, 0, 0, 319, 1259, 1, 0, 0, 0, 321, 1263, 1, 0, 0, 0, 323, 1267, 1, 0, 0, 0, 325, 1272, 1, 0, 0, 0, 327, 1277, 1, 0, 0, 0, 329, 1281, 1, 0, 0, 0, 331, 1285, 1, 0, 0, 0, 333, 1289, 1, 0, 0, 0, 335, 1294, 1, 0, 0, 0, 337, 1301, 1, 0, 0, 0, 339, 1305, 1, 0, 0, 0, 341, 1309, 1, 0, 0, 0, 343, 1313, 1, 0, 0, 0, 345, 1317, 1, 0, 0, 0, 347, 1322, 1, 0, 0, 0, 349, 1326, 1, 0, 0, 0, 351, 1330, 1, 0, 0, 0, 353, 1334, 1, 0, 0, 0, 355, 1339, 1, 0, 0, 0, 357, 1343, 1, 0, 0, 0, 359, 1347, 1, 0, 0, 0, 361, 1351, 1, 0, 0, 0, 363, 1355, 1, 0, 0, 0, 365, 1359, 1, 0, 0, 0, 367, 1365, 1, 0, 0, 0, 369, 1369, 1, 0, 0, 0, 371, 1373, 1, 0, 0, 0, 373, 1377, 1, 0, 0, 0, 375, 1381, 1, 0, 0, 0, 377, 1385, 1, 0, 0, 0, 379, 1389, 1, 0, 0, 0, 381, 1394, 1, 0, 0, 0, 383, 1400, 1, 0, 0, 0, 385, 1406, 1, 0, 0, 0, 387, 1410, 1, 0, 0, 0, 389, 1414, 1, 0, 0, 0, 391, 1418, 1, 0, 0, 0, 393, 1424, 1, 0, 0, 0, 395, 1430, 1, 0, 0, 0, 397, 1434, 1, 0, 0, 0, 399, 1438, 1, 0, 0, 0, 401, 1442, 1, 0, 0, 0, 403, 1448, 1, 0, 0, 0, 405, 1454, 1, 0, 0, 0, 407, 1460, 1, 0, 0, 0, 409, 410, 7, 0, 0, 0, 410, 411, 7, 1, 0, 0, 411, 412, 7, 2, 0, 0, 412, 413, 7, 2, 0, 0, 413, 414, 7, 3, 0, 0, 414, 415, 7, 4, 0, 0, 415, 416, 7, 5, 0, 0, 416, 417, 1, 0, 0, 0, 417, 418, 6, 0, 0, 0, 418, 16, 1, 0, 0, 0, 419, 420, 7, 0, 0, 0, 420, 421, 7, 6, 0, 0, 421, 422, 7, 7, 0, 0, 422, 423, 7, 8, 0, 0, 423, 424, 1, 0, 0, 0, 424, 425, 6, 1, 1, 0, 425, 18, 1, 0, 0, 0, 426, 427, 7, 3, 0, 0, 427, 428, 7, 9, 0, 0, 428, 429, 7, 6, 0, 0, 429, 430, 7, 1, 0, 0, 430, 431, 7, 4, 0, 0, 431, 432, 7, 10, 0, 0, 432, 433, 1, 0, 0, 0, 433, 434, 6, 2, 2, 0, 434, 20, 1, 0, 0, 0, 435, 436, 7, 3, 0, 0, 436, 437, 7, 11, 0, 0, 437, 438, 7, 12, 0, 0, 438, 439, 7, 13, 0, 0, 439, 440, 1, 0, 0, 0, 440, 441, 6, 3, 0, 0, 441, 22, 1, 0, 0, 0, 442, 443, 7, 3, 0, 0, 443, 444, 7, 14, 0, 0, 444, 445, 7, 8, 0, 0, 445, 446, 7, 13, 0, 0, 446, 447, 7, 12, 0, 0, 447, 448, 7, 1, 0, 0, 448, 449, 7, 9, 0, 0, 449, 450, 1, 0, 0, 0, 450, 451, 6, 4, 3, 0, 451, 24, 1, 0, 0, 0, 452, 453, 7, 15, 0, 0, 453, 454, 7, 6, 0, 0, 454, 455, 7, 7, 0, 0, 455, 456, 7, 16, 0, 0, 456, 457, 1, 0, 0, 0, 457, 458, 6, 5, 4, 0, 458, 26, 1, 0, 0, 0, 459, 460, 7, 17, 0, 0, 460, 461, 7, 6, 0, 0, 461, 462, 7, 7, 0, 0, 462, 463, 7, 18, 0, 0, 463, 464, 1, 0, 0, 0, 464, 465, 6, 6, 0, 0, 465, 28, 1, 0, 0, 0, 466, 467, 7, 18, 0, 0, 467, 468, 7, 3, 0, 0, 468, 469, 7, 3, 0, 0, 469, 470, 7, 8, 0, 0, 470, 471, 1, 0, 0, 0, 471, 472, 6, 7, 1, 0, 472, 30, 1, 0, 0, 0, 473, 474, 7, 13, 0, 0, 474, 475, 7, 1, 0, 0, 475, 476, 7, 16, 0, 0, 476, 477, 7, 1, 0, 0, 477, 478, 7, 5, 0, 0, 478, 479, 1, 0, 0, 0, 479, 480, 6, 8, 0, 0, 480, 32, 1, 0, 0, 0, 481, 482, 7, 16, 0, 0, 482, 483, 7, 11, 0, 0, 483, 484, 5, 95, 0, 0, 484, 485, 7, 3, 0, 0, 485, 486, 7, 14, 0, 0, 486, 487, 7, 8, 0, 0, 487, 488, 7, 12, 0, 0, 488, 489, 7, 9, 0, 0, 489, 490, 7, 0, 0, 0, 490, 491, 1, 0, 0, 0, 491, 492, 6, 9, 5, 0, 492, 34, 1, 0, 0, 0, 493, 494, 7, 6, 0, 0, 494, 495, 7, 3, 0, 0, 495, 496, 7, 9, 0, 0, 496, 497, 7, 12, 0, 0, 497, 498, 7, 16, 0, 0, 498, 499, 7, 3, 0, 0, 499, 500, 1, 0, 0, 0, 500, 501, 6, 10, 6, 0, 501, 36, 1, 0, 0, 0, 502, 503, 7, 6, 0, 0, 503, 504, 7, 7, 0, 0, 504, 505, 7, 19, 0, 0, 505, 506, 1, 0, 0, 0, 506, 507, 6, 11, 0, 0, 507, 38, 1, 0, 0, 0, 508, 509, 7, 2, 0, 0, 509, 510, 7, 10, 0, 0, 510, 511, 7, 7, 0, 0, 511, 512, 7, 19, 0, 0, 512, 513, 1, 0, 0, 0, 513, 514, 6, 12, 7, 0, 514, 40, 1, 0, 0, 0, 515, 516, 7, 2, 0, 0, 516, 517, 7, 7, 0, 0, 517, 518, 7, 6, 0, 0, 518, 519, 7, 5, 0, 0, 519, 520, 1, 0, 0, 0, 520, 521, 6, 13, 0, 0, 521, 42, 1, 0, 0, 0, 522, 523, 7, 2, 0, 0, 523, 524, 7, 5, 0, 0, 524, 525, 7, 12, 0, 0, 525, 526, 7, 5, 0, 0, 526, 527, 7, 2, 0, 0, 527, 528, 1, 0, 0, 0, 528, 529, 6, 14, 0, 0, 529, 44, 1, 0, 0, 0, 530, 531, 7, 19, 0, 0, 531, 532, 7, 10, 0, 0, 532, 533, 7, 3, 0, 0, 533, 534, 7, 6, 0, 0, 534, 535, 7, 3, 0, 0, 535, 536, 1, 0, 0, 0, 536, 537, 6, 15, 0, 0, 537, 46, 1, 0, 0, 0, 538, 539, 4, 16, 0, 0, 539, 540, 7, 1, 0, 0, 540, 541, 7, 9, 0, 0, 541, 542, 7, 13, 0, 0, 542, 543, 7, 1, 0, 0, 543, 544, 7, 9, 0, 0, 544, 545, 7, 3, 0, 0, 545, 546, 7, 2, 0, 0, 546, 547, 7, 5, 0, 0, 547, 548, 7, 12, 0, 0, 548, 549, 7, 5, 0, 0, 549, 550, 7, 2, 0, 0, 550, 551, 1, 0, 0, 0, 551, 552, 6, 16, 0, 0, 552, 48, 1, 0, 0, 0, 553, 554, 4, 17, 1, 0, 554, 555, 7, 13, 0, 0, 555, 556, 7, 7, 0, 0, 556, 557, 7, 7, 0, 0, 557, 558, 7, 18, 0, 0, 558, 559, 7, 20, 0, 0, 559, 560, 7, 8, 0, 0, 560, 561, 1, 0, 0, 0, 561, 562, 6, 17, 8, 0, 562, 50, 1, 0, 0, 0, 563, 564, 4, 18, 2, 0, 564, 565, 7, 16, 0, 0, 565, 566, 7, 3, 0, 0, 566, 567, 7, 5, 0, 0, 567, 568, 7, 6, 0, 0, 568, 569, 7, 1, 0, 0, 569, 570, 7, 4, 0, 0, 570, 571, 7, 2, 0, 0, 571, 572, 1, 0, 0, 0, 572, 573, 6, 18, 9, 0, 573, 52, 1, 0, 0, 0, 574, 576, 8, 21, 0, 0, 575, 574, 1, 0, 0, 0, 576, 577, 1, 0, 0, 0, 577, 575, 1, 0, 0, 0, 577, 578, 1, 0, 0, 0, 578, 579, 1, 0, 0, 0, 579, 580, 6, 19, 0, 0, 580, 54, 1, 0, 0, 0, 581, 582, 5, 47, 0, 0, 582, 583, 5, 47, 0, 0, 583, 587, 1, 0, 0, 0, 584, 586, 8, 22, 0, 0, 585, 584, 1, 0, 0, 0, 586, 589, 1, 0, 0, 0, 587, 585, 1, 0, 0, 0, 587, 588, 1, 0, 0, 0, 588, 591, 1, 0, 0, 0, 589, 587, 1, 0, 0, 0, 590, 592, 5, 13, 0, 0, 591, 590, 1, 0, 0, 0, 591, 592, 1, 0, 0, 0, 592, 594, 1, 0, 0, 0, 593, 595, 5, 10, 0, 0, 594, 593, 1, 0, 0, 0, 594, 595, 1, 0, 0, 0, 595, 596, 1, 0, 0, 0, 596, 597, 6, 20, 10, 0, 597, 56, 1, 0, 0, 0, 598, 599, 5, 47, 0, 0, 599, 600, 5, 42, 0, 0, 600, 605, 1, 0, 0, 0, 601, 604, 3, 57, 21, 0, 602, 604, 9, 0, 0, 0, 603, 601, 1, 0, 0, 0, 603, 602, 1, 0, 0, 0, 604, 607, 1, 0, 0, 0, 605, 606, 1, 0, 0, 0, 605, 603, 1, 0, 0, 0, 606, 608, 1, 0, 0, 0, 607, 605, 1, 0, 0, 0, 608, 609, 5, 42, 0, 0, 609, 610, 5, 47, 0, 0, 610, 611, 1, 0, 0, 0, 611, 612, 6, 21, 10, 0, 612, 58, 1, 0, 0, 0, 613, 615, 7, 23, 0, 0, 614, 613, 1, 0, 0, 0, 615, 616, 1, 0, 0, 0, 616, 614, 1, 0, 0, 0, 616, 617, 1, 0, 0, 0, 617, 618, 1, 0, 0, 0, 618, 619, 6, 22, 10, 0, 619, 60, 1, 0, 0, 0, 620, 621, 5, 124, 0, 0, 621, 622, 1, 0, 0, 0, 622, 623, 6, 23, 11, 0, 623, 62, 1, 0, 0, 0, 624, 625, 7, 24, 0, 0, 625, 64, 1, 0, 0, 0, 626, 627, 7, 25, 0, 0, 627, 66, 1, 0, 0, 0, 628, 629, 5, 92, 0, 0, 629, 630, 7, 26, 0, 0, 630, 68, 1, 0, 0, 0, 631, 632, 8, 27, 0, 0, 632, 70, 1, 0, 0, 0, 633, 635, 7, 3, 0, 0, 634, 636, 7, 28, 0, 0, 635, 634, 1, 0, 0, 0, 635, 636, 1, 0, 0, 0, 636, 638, 1, 0, 0, 0, 637, 639, 3, 63, 24, 0, 638, 637, 1, 0, 0, 0, 639, 640, 1, 0, 0, 0, 640, 638, 1, 0, 0, 0, 640, 641, 1, 0, 0, 0, 641, 72, 1, 0, 0, 0, 642, 643, 5, 64, 0, 0, 643, 74, 1, 0, 0, 0, 644, 645, 5, 96, 0, 0, 645, 76, 1, 0, 0, 0, 646, 650, 8, 29, 0, 0, 647, 648, 5, 96, 0, 0, 648, 650, 5, 96, 0, 0, 649, 646, 1, 0, 0, 0, 649, 647, 1, 0, 0, 0, 650, 78, 1, 0, 0, 0, 651, 652, 5, 95, 0, 0, 652, 80, 1, 0, 0, 0, 653, 657, 3, 65, 25, 0, 654, 657, 3, 63, 24, 0, 655, 657, 3, 79, 32, 0, 656, 653, 1, 0, 0, 0, 656, 654, 1, 0, 0, 0, 656, 655, 1, 0, 0, 0, 657, 82, 1, 0, 0, 0, 658, 663, 5, 34, 0, 0, 659, 662, 3, 67, 26, 0, 660, 662, 3, 69, 27, 0, 661, 659, 1, 0, 0, 0, 661, 660, 1, 0, 0, 0, 662, 665, 1, 0, 0, 0, 663, 661, 1, 0, 0, 0, 663, 664, 1, 0, 0, 0, 664, 666, 1, 0, 0, 0, 665, 663, 1, 0, 0, 0, 666, 688, 5, 34, 0, 0, 667, 668, 5, 34, 0, 0, 668, 669, 5, 34, 0, 0, 669, 670, 5, 34, 0, 0, 670, 674, 1, 0, 0, 0, 671, 673, 8, 22, 0, 0, 672, 671, 1, 0, 0, 0, 673, 676, 1, 0, 0, 0, 674, 675, 1, 0, 0, 0, 674, 672, 1, 0, 0, 0, 675, 677, 1, 0, 0, 0, 676, 674, 1, 0, 0, 0, 677, 678, 5, 34, 0, 0, 678, 679, 5, 34, 0, 0, 679, 680, 5, 34, 0, 0, 680, 682, 1, 0, 0, 0, 681, 683, 5, 34, 0, 0, 682, 681, 1, 0, 0, 0, 682, 683, 1, 0, 0, 0, 683, 685, 1, 0, 0, 0, 684, 686, 5, 34, 0, 0, 685, 684, 1, 0, 0, 0, 685, 686, 1, 0, 0, 0, 686, 688, 1, 0, 0, 0, 687, 658, 1, 0, 0, 0, 687, 667, 1, 0, 0, 0, 688, 84, 1, 0, 0, 0, 689, 691, 3, 63, 24, 0, 690, 689, 1, 0, 0, 0, 691, 692, 1, 0, 0, 0, 692, 690, 1, 0, 0, 0, 692, 693, 1, 0, 0, 0, 693, 86, 1, 0, 0, 0, 694, 696, 3, 63, 24, 0, 695, 694, 1, 0, 0, 0, 696, 697, 1, 0, 0, 0, 697, 695, 1, 0, 0, 0, 697, 698, 1, 0, 0, 0, 698, 699, 1, 0, 0, 0, 699, 703, 3, 103, 44, 0, 700, 702, 3, 63, 24, 0, 701, 700, 1, 0, 0, 0, 702, 705, 1, 0, 0, 0, 703, 701, 1, 0, 0, 0, 703, 704, 1, 0, 0, 0, 704, 737, 1, 0, 0, 0, 705, 703, 1, 0, 0, 0, 706, 708, 3, 103, 44, 0, 707, 709, 3, 63, 24, 0, 708, 707, 1, 0, 0, 0, 709, 710, 1, 0, 0, 0, 710, 708, 1, 0, 0, 0, 710, 711, 1, 0, 0, 0, 711, 737, 1, 0, 0, 0, 712, 714, 3, 63, 24, 0, 713, 712, 1, 0, 0, 0, 714, 715, 1, 0, 0, 0, 715, 713, 1, 0, 0, 0, 715, 716, 1, 0, 0, 0, 716, 724, 1, 0, 0, 0, 717, 721, 3, 103, 44, 0, 718, 720, 3, 63, 24, 0, 719, 718, 1, 0, 0, 0, 720, 723, 1, 0, 0, 0, 721, 719, 1, 0, 0, 0, 721, 722, 1, 0, 0, 0, 722, 725, 1, 0, 0, 0, 723, 721, 1, 0, 0, 0, 724, 717, 1, 0, 0, 0, 724, 725, 1, 0, 0, 0, 725, 726, 1, 0, 0, 0, 726, 727, 3, 71, 28, 0, 727, 737, 1, 0, 0, 0, 728, 730, 3, 103, 44, 0, 729, 731, 3, 63, 24, 0, 730, 729, 1, 0, 0, 0, 731, 732, 1, 0, 0, 0, 732, 730, 1, 0, 0, 0, 732, 733, 1, 0, 0, 0, 733, 734, 1, 0, 0, 0, 734, 735, 3, 71, 28, 0, 735, 737, 1, 0, 0, 0, 736, 695, 1, 0, 0, 0, 736, 706, 1, 0, 0, 0, 736, 713, 1, 0, 0, 0, 736, 728, 1, 0, 0, 0, 737, 88, 1, 0, 0, 0, 738, 739, 7, 30, 0, 0, 739, 740, 7, 31, 0, 0, 740, 90, 1, 0, 0, 0, 741, 742, 7, 12, 0, 0, 742, 743, 7, 9, 0, 0, 743, 744, 7, 0, 0, 0, 744, 92, 1, 0, 0, 0, 745, 746, 7, 12, 0, 0, 746, 747, 7, 2, 0, 0, 747, 748, 7, 4, 0, 0, 748, 94, 1, 0, 0, 0, 749, 750, 5, 61, 0, 0, 750, 96, 1, 0, 0, 0, 751, 752, 5, 58, 0, 0, 752, 753, 5, 58, 0, 0, 753, 98, 1, 0, 0, 0, 754, 755, 5, 44, 0, 0, 755, 100, 1, 0, 0, 0, 756, 757, 7, 0, 0, 0, 757, 758, 7, 3, 0, 0, 758, 759, 7, 2, 0, 0, 759, 760, 7, 4, 0, 0, 760, 102, 1, 0, 0, 0, 761, 762, 5, 46, 0, 0, 762, 104, 1, 0, 0, 0, 763, 764, 7, 15, 0, 0, 764, 765, 7, 12, 0, 0, 765, 766, 7, 13, 0, 0, 766, 767, 7, 2, 0, 0, 767, 768, 7, 3, 0, 0, 768, 106, 1, 0, 0, 0, 769, 770, 7, 15, 0, 0, 770, 771, 7, 1, 0, 0, 771, 772, 7, 6, 0, 0, 772, 773, 7, 2, 0, 0, 773, 774, 7, 5, 0, 0, 774, 108, 1, 0, 0, 0, 775, 776, 7, 1, 0, 0, 776, 777, 7, 9, 0, 0, 777, 110, 1, 0, 0, 0, 778, 779, 7, 1, 0, 0, 779, 780, 7, 2, 0, 0, 780, 112, 1, 0, 0, 0, 781, 782, 7, 13, 0, 0, 782, 783, 7, 12, 0, 0, 783, 784, 7, 2, 0, 0, 784, 785, 7, 5, 0, 0, 785, 114, 1, 0, 0, 0, 786, 787, 7, 13, 0, 0, 787, 788, 7, 1, 0, 0, 788, 789, 7, 18, 0, 0, 789, 790, 7, 3, 0, 0, 790, 116, 1, 0, 0, 0, 791, 792, 5, 40, 0, 0, 792, 118, 1, 0, 0, 0, 793, 794, 7, 9, 0, 0, 794, 795, 7, 7, 0, 0, 795, 796, 7, 5, 0, 0, 796, 120, 1, 0, 0, 0, 797, 798, 7, 9, 0, 0, 798, 799, 7, 20, 0, 0, 799, 800, 7, 13, 0, 0, 800, 801, 7, 13, 0, 0, 801, 122, 1, 0, 0, 0, 802, 803, 7, 9, 0, 0, 803, 804, 7, 20, 0, 0, 804, 805, 7, 13, 0, 0, 805, 806, 7, 13, 0, 0, 806, 807, 7, 2, 0, 0, 807, 124, 1, 0, 0, 0, 808, 809, 7, 7, 0, 0, 809, 810, 7, 6, 0, 0, 810, 126, 1, 0, 0, 0, 811, 812, 5, 63, 0, 0, 812, 128, 1, 0, 0, 0, 813, 814, 7, 6, 0, 0, 814, 815, 7, 13, 0, 0, 815, 816, 7, 1, 0, 0, 816, 817, 7, 18, 0, 0, 817, 818, 7, 3, 0, 0, 818, 130, 1, 0, 0, 0, 819, 820, 5, 41, 0, 0, 820, 132, 1, 0, 0, 0, 821, 822, 7, 5, 0, 0, 822, 823, 7, 6, 0, 0, 823, 824, 7, 20, 0, 0, 824, 825, 7, 3, 0, 0, 825, 134, 1, 0, 0, 0, 826, 827, 5, 61, 0, 0, 827, 828, 5, 61, 0, 0, 828, 136, 1, 0, 0, 0, 829, 830, 5, 61, 0, 0, 830, 831, 5, 126, 0, 0, 831, 138, 1, 0, 0, 0, 832, 833, 5, 33, 0, 0, 833, 834, 5, 61, 0, 0, 834, 140, 1, 0, 0, 0, 835, 836, 5, 60, 0, 0, 836, 142, 1, 0, 0, 0, 837, 838, 5, 60, 0, 0, 838, 839, 5, 61, 0, 0, 839, 144, 1, 0, 0, 0, 840, 841, 5, 62, 0, 0, 841, 146, 1, 0, 0, 0, 842, 843, 5, 62, 0, 0, 843, 844, 5, 61, 0, 0, 844, 148, 1, 0, 0, 0, 845, 846, 5, 43, 0, 0, 846, 150, 1, 0, 0, 0, 847, 848, 5, 45, 0, 0, 848, 152, 1, 0, 0, 0, 849, 850, 5, 42, 0, 0, 850, 154, 1, 0, 0, 0, 851, 852, 5, 47, 0, 0, 852, 156, 1, 0, 0, 0, 853, 854, 5, 37, 0, 0, 854, 158, 1, 0, 0, 0, 855, 856, 7, 16, 0, 0, 856, 857, 7, 12, 0, 0, 857, 858, 7, 5, 0, 0, 858, 859, 7, 4, 0, 0, 859, 860, 7, 10, 0, 0, 860, 160, 1, 0, 0, 0, 861, 864, 3, 127, 56, 0, 862, 865, 3, 65, 25, 0, 863, 865, 3, 79, 32, 0, 864, 862, 1, 0, 0, 0, 864, 863, 1, 0, 0, 0, 865, 869, 1, 0, 0, 0, 866, 868, 3, 81, 33, 0, 867, 866, 1, 0, 0, 0, 868, 871, 1, 0, 0, 0, 869, 867, 1, 0, 0, 0, 869, 870, 1, 0, 0, 0, 870, 879, 1, 0, 0, 0, 871, 869, 1, 0, 0, 0, 872, 874, 3, 127, 56, 0, 873, 875, 3, 63, 24, 0, 874, 873, 1, 0, 0, 0, 875, 876, 1, 0, 0, 0, 876, 874, 1, 0, 0, 0, 876, 877, 1, 0, 0, 0, 877, 879, 1, 0, 0, 0, 878, 861, 1, 0, 0, 0, 878, 872, 1, 0, 0, 0, 879, 162, 1, 0, 0, 0, 880, 881, 5, 91, 0, 0, 881, 882, 1, 0, 0, 0, 882, 883, 6, 74, 0, 0, 883, 884, 6, 74, 0, 0, 884, 164, 1, 0, 0, 0, 885, 886, 5, 93, 0, 0, 886, 887, 1, 0, 0, 0, 887, 888, 6, 75, 11, 0, 888, 889, 6, 75, 11, 0, 889, 166, 1, 0, 0, 0, 890, 894, 3, 65, 25, 0, 891, 893, 3, 81, 33, 0, 892, 891, 1, 0, 0, 0, 893, 896, 1, 0, 0, 0, 894, 892, 1, 0, 0, 0, 894, 895, 1, 0, 0, 0, 895, 907, 1, 0, 0, 0, 896, 894, 1, 0, 0, 0, 897, 900, 3, 79, 32, 0, 898, 900, 3, 73, 29, 0, 899, 897, 1, 0, 0, 0, 899, 898, 1, 0, 0, 0, 900, 902, 1, 0, 0, 0, 901, 903, 3, 81, 33, 0, 902, 901, 1, 0, 0, 0, 903, 904, 1, 0, 0, 0, 904, 902, 1, 0, 0, 0, 904, 905, 1, 0, 0, 0, 905, 907, 1, 0, 0, 0, 906, 890, 1, 0, 0, 0, 906, 899, 1, 0, 0, 0, 907, 168, 1, 0, 0, 0, 908, 910, 3, 75, 30, 0, 909, 911, 3, 77, 31, 0, 910, 909, 1, 0, 0, 0, 911, 912, 1, 0, 0, 0, 912, 910, 1, 0, 0, 0, 912, 913, 1, 0, 0, 0, 913, 914, 1, 0, 0, 0, 914, 915, 3, 75, 30, 0, 915, 170, 1, 0, 0, 0, 916, 917, 3, 169, 77, 0, 917, 172, 1, 0, 0, 0, 918, 919, 3, 55, 20, 0, 919, 920, 1, 0, 0, 0, 920, 921, 6, 79, 10, 0, 921, 174, 1, 0, 0, 0, 922, 923, 3, 57, 21, 0, 923, 924, 1, 0, 0, 0, 924, 925, 6, 80, 10, 0, 925, 176, 1, 0, 0, 0, 926, 927, 3, 59, 22, 0, 927, 928, 1, 0, 0, 0, 928, 929, 6, 81, 10, 0, 929, 178, 1, 0, 0, 0, 930, 931, 3, 163, 74, 0, 931, 932, 1, 0, 0, 0, 932, 933, 6, 82, 12, 0, 933, 934, 6, 82, 13, 0, 934, 180, 1, 0, 0, 0, 935, 936, 3, 61, 23, 0, 936, 937, 1, 0, 0, 0, 937, 938, 6, 83, 14, 0, 938, 939, 6, 83, 11, 0, 939, 182, 1, 0, 0, 0, 940, 941, 3, 59, 22, 0, 941, 942, 1, 0, 0, 0, 942, 943, 6, 84, 10, 0, 943, 184, 1, 0, 0, 0, 944, 945, 3, 55, 20, 0, 945, 946, 1, 0, 0, 0, 946, 947, 6, 85, 10, 0, 947, 186, 1, 0, 0, 0, 948, 949, 3, 57, 21, 0, 949, 950, 1, 0, 0, 0, 950, 951, 6, 86, 10, 0, 951, 188, 1, 0, 0, 0, 952, 953, 3, 61, 23, 0, 953, 954, 1, 0, 0, 0, 954, 955, 6, 87, 14, 0, 955, 956, 6, 87, 11, 0, 956, 190, 1, 0, 0, 0, 957, 958, 3, 163, 74, 0, 958, 959, 1, 0, 0, 0, 959, 960, 6, 88, 12, 0, 960, 192, 1, 0, 0, 0, 961, 962, 3, 165, 75, 0, 962, 963, 1, 0, 0, 0, 963, 964, 6, 89, 15, 0, 964, 194, 1, 0, 0, 0, 965, 966, 3, 335, 160, 0, 966, 967, 1, 0, 0, 0, 967, 968, 6, 90, 16, 0, 968, 196, 1, 0, 0, 0, 969, 970, 3, 99, 42, 0, 970, 971, 1, 0, 0, 0, 971, 972, 6, 91, 17, 0, 972, 198, 1, 0, 0, 0, 973, 974, 3, 95, 40, 0, 974, 975, 1, 0, 0, 0, 975, 976, 6, 92, 18, 0, 976, 200, 1, 0, 0, 0, 977, 978, 7, 16, 0, 0, 978, 979, 7, 3, 0, 0, 979, 980, 7, 5, 0, 0, 980, 981, 7, 12, 0, 0, 981, 982, 7, 0, 0, 0, 982, 983, 7, 12, 0, 0, 983, 984, 7, 5, 0, 0, 984, 985, 7, 12, 0, 0, 985, 202, 1, 0, 0, 0, 986, 990, 8, 32, 0, 0, 987, 988, 5, 47, 0, 0, 988, 990, 8, 33, 0, 0, 989, 986, 1, 0, 0, 0, 989, 987, 1, 0, 0, 0, 990, 204, 1, 0, 0, 0, 991, 993, 3, 203, 94, 0, 992, 991, 1, 0, 0, 0, 993, 994, 1, 0, 0, 0, 994, 992, 1, 0, 0, 0, 994, 995, 1, 0, 0, 0, 995, 206, 1, 0, 0, 0, 996, 997, 3, 205, 95, 0, 997, 998, 1, 0, 0, 0, 998, 999, 6, 96, 19, 0, 999, 208, 1, 0, 0, 0, 1000, 1001, 3, 83, 34, 0, 1001, 1002, 1, 0, 0, 0, 1002, 1003, 6, 97, 20, 0, 1003, 210, 1, 0, 0, 0, 1004, 1005, 3, 55, 20, 0, 1005, 1006, 1, 0, 0, 0, 1006, 1007, 6, 98, 10, 0, 1007, 212, 1, 0, 0, 0, 1008, 1009, 3, 57, 21, 0, 1009, 1010, 1, 0, 0, 0, 1010, 1011, 6, 99, 10, 0, 1011, 214, 1, 0, 0, 0, 1012, 1013, 3, 59, 22, 0, 1013, 1014, 1, 0, 0, 0, 1014, 1015, 6, 100, 10, 0, 1015, 216, 1, 0, 0, 0, 1016, 1017, 3, 61, 23, 0, 1017, 1018, 1, 0, 0, 0, 1018, 1019, 6, 101, 14, 0, 1019, 1020, 6, 101, 11, 0, 1020, 218, 1, 0, 0, 0, 1021, 1022, 3, 103, 44, 0, 1022, 1023, 1, 0, 0, 0, 1023, 1024, 6, 102, 21, 0, 1024, 220, 1, 0, 0, 0, 1025, 1026, 3, 99, 42, 0, 1026, 1027, 1, 0, 0, 0, 1027, 1028, 6, 103, 17, 0, 1028, 222, 1, 0, 0, 0, 1029, 1030, 3, 127, 56, 0, 1030, 1031, 1, 0, 0, 0, 1031, 1032, 6, 104, 22, 0, 1032, 224, 1, 0, 0, 0, 1033, 1034, 3, 161, 73, 0, 1034, 1035, 1, 0, 0, 0, 1035, 1036, 6, 105, 23, 0, 1036, 226, 1, 0, 0, 0, 1037, 1042, 3, 65, 25, 0, 1038, 1042, 3, 63, 24, 0, 1039, 1042, 3, 79, 32, 0, 1040, 1042, 3, 153, 69, 0, 1041, 1037, 1, 0, 0, 0, 1041, 1038, 1, 0, 0, 0, 1041, 1039, 1, 0, 0, 0, 1041, 1040, 1, 0, 0, 0, 1042, 228, 1, 0, 0, 0, 1043, 1046, 3, 65, 25, 0, 1044, 1046, 3, 153, 69, 0, 1045, 1043, 1, 0, 0, 0, 1045, 1044, 1, 0, 0, 0, 1046, 1050, 1, 0, 0, 0, 1047, 1049, 3, 227, 106, 0, 1048, 1047, 1, 0, 0, 0, 1049, 1052, 1, 0, 0, 0, 1050, 1048, 1, 0, 0, 0, 1050, 1051, 1, 0, 0, 0, 1051, 1063, 1, 0, 0, 0, 1052, 1050, 1, 0, 0, 0, 1053, 1056, 3, 79, 32, 0, 1054, 1056, 3, 73, 29, 0, 1055, 1053, 1, 0, 0, 0, 1055, 1054, 1, 0, 0, 0, 1056, 1058, 1, 0, 0, 0, 1057, 1059, 3, 227, 106, 0, 1058, 1057, 1, 0, 0, 0, 1059, 1060, 1, 0, 0, 0, 1060, 1058, 1, 0, 0, 0, 1060, 1061, 1, 0, 0, 0, 1061, 1063, 1, 0, 0, 0, 1062, 1045, 1, 0, 0, 0, 1062, 1055, 1, 0, 0, 0, 1063, 230, 1, 0, 0, 0, 1064, 1067, 3, 229, 107, 0, 1065, 1067, 3, 169, 77, 0, 1066, 1064, 1, 0, 0, 0, 1066, 1065, 1, 0, 0, 0, 1067, 1068, 1, 0, 0, 0, 1068, 1066, 1, 0, 0, 0, 1068, 1069, 1, 0, 0, 0, 1069, 232, 1, 0, 0, 0, 1070, 1071, 3, 55, 20, 0, 1071, 1072, 1, 0, 0, 0, 1072, 1073, 6, 109, 10, 0, 1073, 234, 1, 0, 0, 0, 1074, 1075, 3, 57, 21, 0, 1075, 1076, 1, 0, 0, 0, 1076, 1077, 6, 110, 10, 0, 1077, 236, 1, 0, 0, 0, 1078, 1079, 3, 59, 22, 0, 1079, 1080, 1, 0, 0, 0, 1080, 1081, 6, 111, 10, 0, 1081, 238, 1, 0, 0, 0, 1082, 1083, 3, 61, 23, 0, 1083, 1084, 1, 0, 0, 0, 1084, 1085, 6, 112, 14, 0, 1085, 1086, 6, 112, 11, 0, 1086, 240, 1, 0, 0, 0, 1087, 1088, 3, 95, 40, 0, 1088, 1089, 1, 0, 0, 0, 1089, 1090, 6, 113, 18, 0, 1090, 242, 1, 0, 0, 0, 1091, 1092, 3, 99, 42, 0, 1092, 1093, 1, 0, 0, 0, 1093, 1094, 6, 114, 17, 0, 1094, 244, 1, 0, 0, 0, 1095, 1096, 3, 103, 44, 0, 1096, 1097, 1, 0, 0, 0, 1097, 1098, 6, 115, 21, 0, 1098, 246, 1, 0, 0, 0, 1099, 1100, 3, 127, 56, 0, 1100, 1101, 1, 0, 0, 0, 1101, 1102, 6, 116, 22, 0, 1102, 248, 1, 0, 0, 0, 1103, 1104, 3, 161, 73, 0, 1104, 1105, 1, 0, 0, 0, 1105, 1106, 6, 117, 23, 0, 1106, 250, 1, 0, 0, 0, 1107, 1108, 7, 12, 0, 0, 1108, 1109, 7, 2, 0, 0, 1109, 252, 1, 0, 0, 0, 1110, 1111, 3, 231, 108, 0, 1111, 1112, 1, 0, 0, 0, 1112, 1113, 6, 119, 24, 0, 1113, 254, 1, 0, 0, 0, 1114, 1115, 3, 55, 20, 0, 1115, 1116, 1, 0, 0, 0, 1116, 1117, 6, 120, 10, 0, 1117, 256, 1, 0, 0, 0, 1118, 1119, 3, 57, 21, 0, 1119, 1120, 1, 0, 0, 0, 1120, 1121, 6, 121, 10, 0, 1121, 258, 1, 0, 0, 0, 1122, 1123, 3, 59, 22, 0, 1123, 1124, 1, 0, 0, 0, 1124, 1125, 6, 122, 10, 0, 1125, 260, 1, 0, 0, 0, 1126, 1127, 3, 61, 23, 0, 1127, 1128, 1, 0, 0, 0, 1128, 1129, 6, 123, 14, 0, 1129, 1130, 6, 123, 11, 0, 1130, 262, 1, 0, 0, 0, 1131, 1132, 3, 163, 74, 0, 1132, 1133, 1, 0, 0, 0, 1133, 1134, 6, 124, 12, 0, 1134, 1135, 6, 124, 25, 0, 1135, 264, 1, 0, 0, 0, 1136, 1137, 7, 7, 0, 0, 1137, 1138, 7, 9, 0, 0, 1138, 1139, 1, 0, 0, 0, 1139, 1140, 6, 125, 26, 0, 1140, 266, 1, 0, 0, 0, 1141, 1142, 7, 19, 0, 0, 1142, 1143, 7, 1, 0, 0, 1143, 1144, 7, 5, 0, 0, 1144, 1145, 7, 10, 0, 0, 1145, 1146, 1, 0, 0, 0, 1146, 1147, 6, 126, 26, 0, 1147, 268, 1, 0, 0, 0, 1148, 1149, 8, 34, 0, 0, 1149, 270, 1, 0, 0, 0, 1150, 1152, 3, 269, 127, 0, 1151, 1150, 1, 0, 0, 0, 1152, 1153, 1, 0, 0, 0, 1153, 1151, 1, 0, 0, 0, 1153, 1154, 1, 0, 0, 0, 1154, 1155, 1, 0, 0, 0, 1155, 1156, 3, 335, 160, 0, 1156, 1158, 1, 0, 0, 0, 1157, 1151, 1, 0, 0, 0, 1157, 1158, 1, 0, 0, 0, 1158, 1160, 1, 0, 0, 0, 1159, 1161, 3, 269, 127, 0, 1160, 1159, 1, 0, 0, 0, 1161, 1162, 1, 0, 0, 0, 1162, 1160, 1, 0, 0, 0, 1162, 1163, 1, 0, 0, 0, 1163, 272, 1, 0, 0, 0, 1164, 1165, 3, 271, 128, 0, 1165, 1166, 1, 0, 0, 0, 1166, 1167, 6, 129, 27, 0, 1167, 274, 1, 0, 0, 0, 1168, 1169, 3, 55, 20, 0, 1169, 1170, 1, 0, 0, 0, 1170, 1171, 6, 130, 10, 0, 1171, 276, 1, 0, 0, 0, 1172, 1173, 3, 57, 21, 0, 1173, 1174, 1, 0, 0, 0, 1174, 1175, 6, 131, 10, 0, 1175, 278, 1, 0, 0, 0, 1176, 1177, 3, 59, 22, 0, 1177, 1178, 1, 0, 0, 0, 1178, 1179, 6, 132, 10, 0, 1179, 280, 1, 0, 0, 0, 1180, 1181, 3, 61, 23, 0, 1181, 1182, 1, 0, 0, 0, 1182, 1183, 6, 133, 14, 0, 1183, 1184, 6, 133, 11, 0, 1184, 1185, 6, 133, 11, 0, 1185, 282, 1, 0, 0, 0, 1186, 1187, 3, 95, 40, 0, 1187, 1188, 1, 0, 0, 0, 1188, 1189, 6, 134, 18, 0, 1189, 284, 1, 0, 0, 0, 1190, 1191, 3, 99, 42, 0, 1191, 1192, 1, 0, 0, 0, 1192, 1193, 6, 135, 17, 0, 1193, 286, 1, 0, 0, 0, 1194, 1195, 3, 103, 44, 0, 1195, 1196, 1, 0, 0, 0, 1196, 1197, 6, 136, 21, 0, 1197, 288, 1, 0, 0, 0, 1198, 1199, 3, 267, 126, 0, 1199, 1200, 1, 0, 0, 0, 1200, 1201, 6, 137, 28, 0, 1201, 290, 1, 0, 0, 0, 1202, 1203, 3, 231, 108, 0, 1203, 1204, 1, 0, 0, 0, 1204, 1205, 6, 138, 24, 0, 1205, 292, 1, 0, 0, 0, 1206, 1207, 3, 171, 78, 0, 1207, 1208, 1, 0, 0, 0, 1208, 1209, 6, 139, 29, 0, 1209, 294, 1, 0, 0, 0, 1210, 1211, 3, 127, 56, 0, 1211, 1212, 1, 0, 0, 0, 1212, 1213, 6, 140, 22, 0, 1213, 296, 1, 0, 0, 0, 1214, 1215, 3, 161, 73, 0, 1215, 1216, 1, 0, 0, 0, 1216, 1217, 6, 141, 23, 0, 1217, 298, 1, 0, 0, 0, 1218, 1219, 3, 55, 20, 0, 1219, 1220, 1, 0, 0, 0, 1220, 1221, 6, 142, 10, 0, 1221, 300, 1, 0, 0, 0, 1222, 1223, 3, 57, 21, 0, 1223, 1224, 1, 0, 0, 0, 1224, 1225, 6, 143, 10, 0, 1225, 302, 1, 0, 0, 0, 1226, 1227, 3, 59, 22, 0, 1227, 1228, 1, 0, 0, 0, 1228, 1229, 6, 144, 10, 0, 1229, 304, 1, 0, 0, 0, 1230, 1231, 3, 61, 23, 0, 1231, 1232, 1, 0, 0, 0, 1232, 1233, 6, 145, 14, 0, 1233, 1234, 6, 145, 11, 0, 1234, 306, 1, 0, 0, 0, 1235, 1236, 3, 103, 44, 0, 1236, 1237, 1, 0, 0, 0, 1237, 1238, 6, 146, 21, 0, 1238, 308, 1, 0, 0, 0, 1239, 1240, 3, 127, 56, 0, 1240, 1241, 1, 0, 0, 0, 1241, 1242, 6, 147, 22, 0, 1242, 310, 1, 0, 0, 0, 1243, 1244, 3, 161, 73, 0, 1244, 1245, 1, 0, 0, 0, 1245, 1246, 6, 148, 23, 0, 1246, 312, 1, 0, 0, 0, 1247, 1248, 3, 171, 78, 0, 1248, 1249, 1, 0, 0, 0, 1249, 1250, 6, 149, 29, 0, 1250, 314, 1, 0, 0, 0, 1251, 1252, 3, 167, 76, 0, 1252, 1253, 1, 0, 0, 0, 1253, 1254, 6, 150, 30, 0, 1254, 316, 1, 0, 0, 0, 1255, 1256, 3, 55, 20, 0, 1256, 1257, 1, 0, 0, 0, 1257, 1258, 6, 151, 10, 0, 1258, 318, 1, 0, 0, 0, 1259, 1260, 3, 57, 21, 0, 1260, 1261, 1, 0, 0, 0, 1261, 1262, 6, 152, 10, 0, 1262, 320, 1, 0, 0, 0, 1263, 1264, 3, 59, 22, 0, 1264, 1265, 1, 0, 0, 0, 1265, 1266, 6, 153, 10, 0, 1266, 322, 1, 0, 0, 0, 1267, 1268, 3, 61, 23, 0, 1268, 1269, 1, 0, 0, 0, 1269, 1270, 6, 154, 14, 0, 1270, 1271, 6, 154, 11, 0, 1271, 324, 1, 0, 0, 0, 1272, 1273, 7, 1, 0, 0, 1273, 1274, 7, 9, 0, 0, 1274, 1275, 7, 15, 0, 0, 1275, 1276, 7, 7, 0, 0, 1276, 326, 1, 0, 0, 0, 1277, 1278, 3, 55, 20, 0, 1278, 1279, 1, 0, 0, 0, 1279, 1280, 6, 156, 10, 0, 1280, 328, 1, 0, 0, 0, 1281, 1282, 3, 57, 21, 0, 1282, 1283, 1, 0, 0, 0, 1283, 1284, 6, 157, 10, 0, 1284, 330, 1, 0, 0, 0, 1285, 1286, 3, 59, 22, 0, 1286, 1287, 1, 0, 0, 0, 1287, 1288, 6, 158, 10, 0, 1288, 332, 1, 0, 0, 0, 1289, 1290, 3, 165, 75, 0, 1290, 1291, 1, 0, 0, 0, 1291, 1292, 6, 159, 15, 0, 1292, 1293, 6, 159, 11, 0, 1293, 334, 1, 0, 0, 0, 1294, 1295, 5, 58, 0, 0, 1295, 336, 1, 0, 0, 0, 1296, 1302, 3, 73, 29, 0, 1297, 1302, 3, 63, 24, 0, 1298, 1302, 3, 103, 44, 0, 1299, 1302, 3, 65, 25, 0, 1300, 1302, 3, 79, 32, 0, 1301, 1296, 1, 0, 0, 0, 1301, 1297, 1, 0, 0, 0, 1301, 1298, 1, 0, 0, 0, 1301, 1299, 1, 0, 0, 0, 1301, 1300, 1, 0, 0, 0, 1302, 1303, 1, 0, 0, 0, 1303, 1301, 1, 0, 0, 0, 1303, 1304, 1, 0, 0, 0, 1304, 338, 1, 0, 0, 0, 1305, 1306, 3, 55, 20, 0, 1306, 1307, 1, 0, 0, 0, 1307, 1308, 6, 162, 10, 0, 1308, 340, 1, 0, 0, 0, 1309, 1310, 3, 57, 21, 0, 1310, 1311, 1, 0, 0, 0, 1311, 1312, 6, 163, 10, 0, 1312, 342, 1, 0, 0, 0, 1313, 1314, 3, 59, 22, 0, 1314, 1315, 1, 0, 0, 0, 1315, 1316, 6, 164, 10, 0, 1316, 344, 1, 0, 0, 0, 1317, 1318, 3, 61, 23, 0, 1318, 1319, 1, 0, 0, 0, 1319, 1320, 6, 165, 14, 0, 1320, 1321, 6, 165, 11, 0, 1321, 346, 1, 0, 0, 0, 1322, 1323, 3, 335, 160, 0, 1323, 1324, 1, 0, 0, 0, 1324, 1325, 6, 166, 16, 0, 1325, 348, 1, 0, 0, 0, 1326, 1327, 3, 99, 42, 0, 1327, 1328, 1, 0, 0, 0, 1328, 1329, 6, 167, 17, 0, 1329, 350, 1, 0, 0, 0, 1330, 1331, 3, 103, 44, 0, 1331, 1332, 1, 0, 0, 0, 1332, 1333, 6, 168, 21, 0, 1333, 352, 1, 0, 0, 0, 1334, 1335, 3, 265, 125, 0, 1335, 1336, 1, 0, 0, 0, 1336, 1337, 6, 169, 31, 0, 1337, 1338, 6, 169, 32, 0, 1338, 354, 1, 0, 0, 0, 1339, 1340, 3, 205, 95, 0, 1340, 1341, 1, 0, 0, 0, 1341, 1342, 6, 170, 19, 0, 1342, 356, 1, 0, 0, 0, 1343, 1344, 3, 83, 34, 0, 1344, 1345, 1, 0, 0, 0, 1345, 1346, 6, 171, 20, 0, 1346, 358, 1, 0, 0, 0, 1347, 1348, 3, 55, 20, 0, 1348, 1349, 1, 0, 0, 0, 1349, 1350, 6, 172, 10, 0, 1350, 360, 1, 0, 0, 0, 1351, 1352, 3, 57, 21, 0, 1352, 1353, 1, 0, 0, 0, 1353, 1354, 6, 173, 10, 0, 1354, 362, 1, 0, 0, 0, 1355, 1356, 3, 59, 22, 0, 1356, 1357, 1, 0, 0, 0, 1357, 1358, 6, 174, 10, 0, 1358, 364, 1, 0, 0, 0, 1359, 1360, 3, 61, 23, 0, 1360, 1361, 1, 0, 0, 0, 1361, 1362, 6, 175, 14, 0, 1362, 1363, 6, 175, 11, 0, 1363, 1364, 6, 175, 11, 0, 1364, 366, 1, 0, 0, 0, 1365, 1366, 3, 99, 42, 0, 1366, 1367, 1, 0, 0, 0, 1367, 1368, 6, 176, 17, 0, 1368, 368, 1, 0, 0, 0, 1369, 1370, 3, 103, 44, 0, 1370, 1371, 1, 0, 0, 0, 1371, 1372, 6, 177, 21, 0, 1372, 370, 1, 0, 0, 0, 1373, 1374, 3, 231, 108, 0, 1374, 1375, 1, 0, 0, 0, 1375, 1376, 6, 178, 24, 0, 1376, 372, 1, 0, 0, 0, 1377, 1378, 3, 55, 20, 0, 1378, 1379, 1, 0, 0, 0, 1379, 1380, 6, 179, 10, 0, 1380, 374, 1, 0, 0, 0, 1381, 1382, 3, 57, 21, 0, 1382, 1383, 1, 0, 0, 0, 1383, 1384, 6, 180, 10, 0, 1384, 376, 1, 0, 0, 0, 1385, 1386, 3, 59, 22, 0, 1386, 1387, 1, 0, 0, 0, 1387, 1388, 6, 181, 10, 0, 1388, 378, 1, 0, 0, 0, 1389, 1390, 3, 61, 23, 0, 1390, 1391, 1, 0, 0, 0, 1391, 1392, 6, 182, 14, 0, 1392, 1393, 6, 182, 11, 0, 1393, 380, 1, 0, 0, 0, 1394, 1395, 3, 205, 95, 0, 1395, 1396, 1, 0, 0, 0, 1396, 1397, 6, 183, 19, 0, 1397, 1398, 6, 183, 11, 0, 1398, 1399, 6, 183, 33, 0, 1399, 382, 1, 0, 0, 0, 1400, 1401, 3, 83, 34, 0, 1401, 1402, 1, 0, 0, 0, 1402, 1403, 6, 184, 20, 0, 1403, 1404, 6, 184, 11, 0, 1404, 1405, 6, 184, 33, 0, 1405, 384, 1, 0, 0, 0, 1406, 1407, 3, 55, 20, 0, 1407, 1408, 1, 0, 0, 0, 1408, 1409, 6, 185, 10, 0, 1409, 386, 1, 0, 0, 0, 1410, 1411, 3, 57, 21, 0, 1411, 1412, 1, 0, 0, 0, 1412, 1413, 6, 186, 10, 0, 1413, 388, 1, 0, 0, 0, 1414, 1415, 3, 59, 22, 0, 1415, 1416, 1, 0, 0, 0, 1416, 1417, 6, 187, 10, 0, 1417, 390, 1, 0, 0, 0, 1418, 1419, 3, 335, 160, 0, 1419, 1420, 1, 0, 0, 0, 1420, 1421, 6, 188, 16, 0, 1421, 1422, 6, 188, 11, 0, 1422, 1423, 6, 188, 9, 0, 1423, 392, 1, 0, 0, 0, 1424, 1425, 3, 99, 42, 0, 1425, 1426, 1, 0, 0, 0, 1426, 1427, 6, 189, 17, 0, 1427, 1428, 6, 189, 11, 0, 1428, 1429, 6, 189, 9, 0, 1429, 394, 1, 0, 0, 0, 1430, 1431, 3, 55, 20, 0, 1431, 1432, 1, 0, 0, 0, 1432, 1433, 6, 190, 10, 0, 1433, 396, 1, 0, 0, 0, 1434, 1435, 3, 57, 21, 0, 1435, 1436, 1, 0, 0, 0, 1436, 1437, 6, 191, 10, 0, 1437, 398, 1, 0, 0, 0, 1438, 1439, 3, 59, 22, 0, 1439, 1440, 1, 0, 0, 0, 1440, 1441, 6, 192, 10, 0, 1441, 400, 1, 0, 0, 0, 1442, 1443, 3, 171, 78, 0, 1443, 1444, 1, 0, 0, 0, 1444, 1445, 6, 193, 11, 0, 1445, 1446, 6, 193, 0, 0, 1446, 1447, 6, 193, 29, 0, 1447, 402, 1, 0, 0, 0, 1448, 1449, 3, 167, 76, 0, 1449, 1450, 1, 0, 0, 0, 1450, 1451, 6, 194, 11, 0, 1451, 1452, 6, 194, 0, 0, 1452, 1453, 6, 194, 30, 0, 1453, 404, 1, 0, 0, 0, 1454, 1455, 3, 89, 37, 0, 1455, 1456, 1, 0, 0, 0, 1456, 1457, 6, 195, 11, 0, 1457, 1458, 6, 195, 0, 0, 1458, 1459, 6, 195, 34, 0, 1459, 406, 1, 0, 0, 0, 1460, 1461, 3, 61, 23, 0, 1461, 1462, 1, 0, 0, 0, 1462, 1463, 6, 196, 14, 0, 1463, 1464, 6, 196, 11, 0, 1464, 408, 1, 0, 0, 0, 65, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 577, 587, 591, 594, 603, 605, 616, 635, 640, 649, 656, 661, 663, 674, 682, 685, 687, 692, 697, 703, 710, 715, 721, 724, 732, 736, 864, 869, 876, 878, 894, 899, 904, 906, 912, 989, 994, 1041, 1045, 1050, 1055, 1060, 1062, 1066, 1068, 1153, 1157, 1162, 1301, 1303, 35, 5, 1, 0, 5, 4, 0, 5, 6, 0, 5, 2, 0, 5, 3, 0, 5, 8, 0, 5, 5, 0, 5, 9, 0, 5, 11, 0, 5, 13, 0, 0, 1, 0, 4, 0, 0, 7, 65, 0, 5, 0, 0, 7, 24, 0, 7, 66, 0, 7, 104, 0, 7, 33, 0, 7, 31, 0, 7, 76, 0, 7, 25, 0, 7, 35, 0, 7, 47, 0, 7, 64, 0, 7, 80, 0, 5, 10, 0, 5, 7, 0, 7, 90, 0, 7, 89, 0, 7, 68, 0, 7, 67, 0, 7, 88, 0, 5, 12, 0, 5, 14, 0, 7, 28, 0] \ No newline at end of file +[4, 0, 120, 1472, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 2, 154, 7, 154, 2, 155, 7, 155, 2, 156, 7, 156, 2, 157, 7, 157, 2, 158, 7, 158, 2, 159, 7, 159, 2, 160, 7, 160, 2, 161, 7, 161, 2, 162, 7, 162, 2, 163, 7, 163, 2, 164, 7, 164, 2, 165, 7, 165, 2, 166, 7, 166, 2, 167, 7, 167, 2, 168, 7, 168, 2, 169, 7, 169, 2, 170, 7, 170, 2, 171, 7, 171, 2, 172, 7, 172, 2, 173, 7, 173, 2, 174, 7, 174, 2, 175, 7, 175, 2, 176, 7, 176, 2, 177, 7, 177, 2, 178, 7, 178, 2, 179, 7, 179, 2, 180, 7, 180, 2, 181, 7, 181, 2, 182, 7, 182, 2, 183, 7, 183, 2, 184, 7, 184, 2, 185, 7, 185, 2, 186, 7, 186, 2, 187, 7, 187, 2, 188, 7, 188, 2, 189, 7, 189, 2, 190, 7, 190, 2, 191, 7, 191, 2, 192, 7, 192, 2, 193, 7, 193, 2, 194, 7, 194, 2, 195, 7, 195, 2, 196, 7, 196, 2, 197, 7, 197, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 4, 19, 578, 8, 19, 11, 19, 12, 19, 579, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 5, 20, 588, 8, 20, 10, 20, 12, 20, 591, 9, 20, 1, 20, 3, 20, 594, 8, 20, 1, 20, 3, 20, 597, 8, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 5, 21, 606, 8, 21, 10, 21, 12, 21, 609, 9, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 4, 22, 617, 8, 22, 11, 22, 12, 22, 618, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 28, 1, 28, 3, 28, 638, 8, 28, 1, 28, 4, 28, 641, 8, 28, 11, 28, 12, 28, 642, 1, 29, 1, 29, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 3, 31, 652, 8, 31, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 3, 33, 659, 8, 33, 1, 34, 1, 34, 1, 34, 5, 34, 664, 8, 34, 10, 34, 12, 34, 667, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 675, 8, 34, 10, 34, 12, 34, 678, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 3, 34, 685, 8, 34, 1, 34, 3, 34, 688, 8, 34, 3, 34, 690, 8, 34, 1, 35, 4, 35, 693, 8, 35, 11, 35, 12, 35, 694, 1, 36, 4, 36, 698, 8, 36, 11, 36, 12, 36, 699, 1, 36, 1, 36, 5, 36, 704, 8, 36, 10, 36, 12, 36, 707, 9, 36, 1, 36, 1, 36, 4, 36, 711, 8, 36, 11, 36, 12, 36, 712, 1, 36, 4, 36, 716, 8, 36, 11, 36, 12, 36, 717, 1, 36, 1, 36, 5, 36, 722, 8, 36, 10, 36, 12, 36, 725, 9, 36, 3, 36, 727, 8, 36, 1, 36, 1, 36, 1, 36, 1, 36, 4, 36, 733, 8, 36, 11, 36, 12, 36, 734, 1, 36, 1, 36, 3, 36, 739, 8, 36, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 1, 39, 1, 39, 1, 39, 1, 39, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 1, 64, 1, 64, 1, 64, 1, 65, 1, 65, 1, 66, 1, 66, 1, 66, 1, 67, 1, 67, 1, 68, 1, 68, 1, 69, 1, 69, 1, 70, 1, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 72, 1, 72, 1, 72, 1, 72, 1, 73, 1, 73, 1, 73, 1, 73, 1, 73, 1, 74, 1, 74, 1, 74, 3, 74, 872, 8, 74, 1, 74, 5, 74, 875, 8, 74, 10, 74, 12, 74, 878, 9, 74, 1, 74, 1, 74, 4, 74, 882, 8, 74, 11, 74, 12, 74, 883, 3, 74, 886, 8, 74, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 76, 1, 76, 1, 76, 1, 76, 1, 76, 1, 77, 1, 77, 5, 77, 900, 8, 77, 10, 77, 12, 77, 903, 9, 77, 1, 77, 1, 77, 3, 77, 907, 8, 77, 1, 77, 4, 77, 910, 8, 77, 11, 77, 12, 77, 911, 3, 77, 914, 8, 77, 1, 78, 1, 78, 4, 78, 918, 8, 78, 11, 78, 12, 78, 919, 1, 78, 1, 78, 1, 79, 1, 79, 1, 80, 1, 80, 1, 80, 1, 80, 1, 81, 1, 81, 1, 81, 1, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 1, 83, 1, 83, 1, 83, 1, 84, 1, 84, 1, 84, 1, 84, 1, 84, 1, 85, 1, 85, 1, 85, 1, 85, 1, 86, 1, 86, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 1, 88, 1, 88, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 1, 92, 1, 93, 1, 93, 1, 93, 1, 93, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 3, 95, 997, 8, 95, 1, 96, 4, 96, 1000, 8, 96, 11, 96, 12, 96, 1001, 1, 97, 1, 97, 1, 97, 1, 97, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 1, 100, 1, 101, 1, 101, 1, 101, 1, 101, 1, 102, 1, 102, 1, 102, 1, 102, 1, 102, 1, 103, 1, 103, 1, 103, 1, 103, 1, 104, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 106, 1, 107, 1, 107, 1, 107, 1, 107, 3, 107, 1049, 8, 107, 1, 108, 1, 108, 3, 108, 1053, 8, 108, 1, 108, 5, 108, 1056, 8, 108, 10, 108, 12, 108, 1059, 9, 108, 1, 108, 1, 108, 3, 108, 1063, 8, 108, 1, 108, 4, 108, 1066, 8, 108, 11, 108, 12, 108, 1067, 3, 108, 1070, 8, 108, 1, 109, 1, 109, 4, 109, 1074, 8, 109, 11, 109, 12, 109, 1075, 1, 110, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 1, 113, 1, 114, 1, 114, 1, 114, 1, 114, 1, 115, 1, 115, 1, 115, 1, 115, 1, 116, 1, 116, 1, 116, 1, 116, 1, 117, 1, 117, 1, 117, 1, 117, 1, 118, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 120, 1, 120, 1, 121, 1, 121, 1, 121, 1, 121, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 129, 4, 129, 1159, 8, 129, 11, 129, 12, 129, 1160, 1, 129, 1, 129, 3, 129, 1165, 8, 129, 1, 129, 4, 129, 1168, 8, 129, 11, 129, 12, 129, 1169, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 133, 1, 133, 1, 134, 1, 134, 1, 134, 1, 134, 1, 134, 1, 134, 1, 135, 1, 135, 1, 135, 1, 135, 1, 136, 1, 136, 1, 136, 1, 136, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 152, 1, 152, 1, 153, 1, 153, 1, 153, 1, 153, 1, 154, 1, 154, 1, 154, 1, 154, 1, 155, 1, 155, 1, 155, 1, 155, 1, 155, 1, 156, 1, 156, 1, 156, 1, 156, 1, 156, 1, 157, 1, 157, 1, 157, 1, 157, 1, 158, 1, 158, 1, 158, 1, 158, 1, 159, 1, 159, 1, 159, 1, 159, 1, 160, 1, 160, 1, 160, 1, 160, 1, 160, 1, 161, 1, 161, 1, 162, 1, 162, 1, 162, 1, 162, 1, 162, 4, 162, 1309, 8, 162, 11, 162, 12, 162, 1310, 1, 163, 1, 163, 1, 163, 1, 163, 1, 164, 1, 164, 1, 164, 1, 164, 1, 165, 1, 165, 1, 165, 1, 165, 1, 166, 1, 166, 1, 166, 1, 166, 1, 166, 1, 167, 1, 167, 1, 167, 1, 167, 1, 168, 1, 168, 1, 168, 1, 168, 1, 169, 1, 169, 1, 169, 1, 169, 1, 170, 1, 170, 1, 170, 1, 170, 1, 170, 1, 171, 1, 171, 1, 171, 1, 171, 1, 172, 1, 172, 1, 172, 1, 172, 1, 173, 1, 173, 1, 173, 1, 173, 1, 174, 1, 174, 1, 174, 1, 174, 1, 175, 1, 175, 1, 175, 1, 175, 1, 176, 1, 176, 1, 176, 1, 176, 1, 176, 1, 176, 1, 177, 1, 177, 1, 177, 1, 177, 1, 178, 1, 178, 1, 178, 1, 178, 1, 179, 1, 179, 1, 179, 1, 179, 1, 180, 1, 180, 1, 180, 1, 180, 1, 181, 1, 181, 1, 181, 1, 181, 1, 182, 1, 182, 1, 182, 1, 182, 1, 183, 1, 183, 1, 183, 1, 183, 1, 183, 1, 184, 1, 184, 1, 184, 1, 184, 1, 184, 1, 184, 1, 185, 1, 185, 1, 185, 1, 185, 1, 185, 1, 185, 1, 186, 1, 186, 1, 186, 1, 186, 1, 187, 1, 187, 1, 187, 1, 187, 1, 188, 1, 188, 1, 188, 1, 188, 1, 189, 1, 189, 1, 189, 1, 189, 1, 189, 1, 189, 1, 190, 1, 190, 1, 190, 1, 190, 1, 190, 1, 190, 1, 191, 1, 191, 1, 191, 1, 191, 1, 192, 1, 192, 1, 192, 1, 192, 1, 193, 1, 193, 1, 193, 1, 193, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 195, 1, 195, 1, 195, 1, 195, 1, 195, 1, 195, 1, 196, 1, 196, 1, 196, 1, 196, 1, 196, 1, 196, 1, 197, 1, 197, 1, 197, 1, 197, 1, 197, 2, 607, 676, 0, 198, 15, 1, 17, 2, 19, 3, 21, 4, 23, 5, 25, 6, 27, 7, 29, 8, 31, 9, 33, 10, 35, 11, 37, 12, 39, 13, 41, 14, 43, 15, 45, 16, 47, 17, 49, 18, 51, 19, 53, 20, 55, 21, 57, 22, 59, 23, 61, 24, 63, 0, 65, 0, 67, 0, 69, 0, 71, 0, 73, 0, 75, 0, 77, 0, 79, 0, 81, 0, 83, 25, 85, 26, 87, 27, 89, 28, 91, 29, 93, 30, 95, 31, 97, 32, 99, 33, 101, 34, 103, 35, 105, 36, 107, 37, 109, 38, 111, 39, 113, 40, 115, 41, 117, 42, 119, 43, 121, 44, 123, 45, 125, 46, 127, 47, 129, 48, 131, 49, 133, 50, 135, 51, 137, 52, 139, 53, 141, 54, 143, 55, 145, 56, 147, 57, 149, 58, 151, 59, 153, 60, 155, 61, 157, 62, 159, 63, 161, 0, 163, 64, 165, 65, 167, 66, 169, 67, 171, 0, 173, 68, 175, 69, 177, 70, 179, 71, 181, 0, 183, 0, 185, 72, 187, 73, 189, 74, 191, 0, 193, 0, 195, 0, 197, 0, 199, 0, 201, 0, 203, 75, 205, 0, 207, 76, 209, 0, 211, 0, 213, 77, 215, 78, 217, 79, 219, 0, 221, 0, 223, 0, 225, 0, 227, 0, 229, 0, 231, 0, 233, 80, 235, 81, 237, 82, 239, 83, 241, 0, 243, 0, 245, 0, 247, 0, 249, 0, 251, 0, 253, 84, 255, 0, 257, 85, 259, 86, 261, 87, 263, 0, 265, 0, 267, 88, 269, 89, 271, 0, 273, 90, 275, 0, 277, 91, 279, 92, 281, 93, 283, 0, 285, 0, 287, 0, 289, 0, 291, 0, 293, 0, 295, 0, 297, 0, 299, 0, 301, 94, 303, 95, 305, 96, 307, 0, 309, 0, 311, 0, 313, 0, 315, 0, 317, 0, 319, 97, 321, 98, 323, 99, 325, 0, 327, 100, 329, 101, 331, 102, 333, 103, 335, 0, 337, 104, 339, 105, 341, 106, 343, 107, 345, 108, 347, 0, 349, 0, 351, 0, 353, 0, 355, 0, 357, 0, 359, 0, 361, 109, 363, 110, 365, 111, 367, 0, 369, 0, 371, 0, 373, 0, 375, 112, 377, 113, 379, 114, 381, 0, 383, 0, 385, 0, 387, 115, 389, 116, 391, 117, 393, 0, 395, 0, 397, 118, 399, 119, 401, 120, 403, 0, 405, 0, 407, 0, 409, 0, 15, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 35, 2, 0, 68, 68, 100, 100, 2, 0, 73, 73, 105, 105, 2, 0, 83, 83, 115, 115, 2, 0, 69, 69, 101, 101, 2, 0, 67, 67, 99, 99, 2, 0, 84, 84, 116, 116, 2, 0, 82, 82, 114, 114, 2, 0, 79, 79, 111, 111, 2, 0, 80, 80, 112, 112, 2, 0, 78, 78, 110, 110, 2, 0, 72, 72, 104, 104, 2, 0, 86, 86, 118, 118, 2, 0, 65, 65, 97, 97, 2, 0, 76, 76, 108, 108, 2, 0, 88, 88, 120, 120, 2, 0, 70, 70, 102, 102, 2, 0, 77, 77, 109, 109, 2, 0, 71, 71, 103, 103, 2, 0, 75, 75, 107, 107, 2, 0, 87, 87, 119, 119, 2, 0, 85, 85, 117, 117, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 8, 0, 34, 34, 78, 78, 82, 82, 84, 84, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 2, 0, 66, 66, 98, 98, 2, 0, 89, 89, 121, 121, 11, 0, 9, 10, 13, 13, 32, 32, 34, 34, 44, 44, 47, 47, 58, 58, 61, 61, 91, 91, 93, 93, 124, 124, 2, 0, 42, 42, 47, 47, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1500, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0, 0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0, 0, 0, 33, 1, 0, 0, 0, 0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0, 0, 0, 39, 1, 0, 0, 0, 0, 41, 1, 0, 0, 0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0, 0, 0, 0, 47, 1, 0, 0, 0, 0, 49, 1, 0, 0, 0, 0, 51, 1, 0, 0, 0, 0, 53, 1, 0, 0, 0, 0, 55, 1, 0, 0, 0, 0, 57, 1, 0, 0, 0, 0, 59, 1, 0, 0, 0, 1, 61, 1, 0, 0, 0, 1, 83, 1, 0, 0, 0, 1, 85, 1, 0, 0, 0, 1, 87, 1, 0, 0, 0, 1, 89, 1, 0, 0, 0, 1, 91, 1, 0, 0, 0, 1, 93, 1, 0, 0, 0, 1, 95, 1, 0, 0, 0, 1, 97, 1, 0, 0, 0, 1, 99, 1, 0, 0, 0, 1, 101, 1, 0, 0, 0, 1, 103, 1, 0, 0, 0, 1, 105, 1, 0, 0, 0, 1, 107, 1, 0, 0, 0, 1, 109, 1, 0, 0, 0, 1, 111, 1, 0, 0, 0, 1, 113, 1, 0, 0, 0, 1, 115, 1, 0, 0, 0, 1, 117, 1, 0, 0, 0, 1, 119, 1, 0, 0, 0, 1, 121, 1, 0, 0, 0, 1, 123, 1, 0, 0, 0, 1, 125, 1, 0, 0, 0, 1, 127, 1, 0, 0, 0, 1, 129, 1, 0, 0, 0, 1, 131, 1, 0, 0, 0, 1, 133, 1, 0, 0, 0, 1, 135, 1, 0, 0, 0, 1, 137, 1, 0, 0, 0, 1, 139, 1, 0, 0, 0, 1, 141, 1, 0, 0, 0, 1, 143, 1, 0, 0, 0, 1, 145, 1, 0, 0, 0, 1, 147, 1, 0, 0, 0, 1, 149, 1, 0, 0, 0, 1, 151, 1, 0, 0, 0, 1, 153, 1, 0, 0, 0, 1, 155, 1, 0, 0, 0, 1, 157, 1, 0, 0, 0, 1, 159, 1, 0, 0, 0, 1, 161, 1, 0, 0, 0, 1, 163, 1, 0, 0, 0, 1, 165, 1, 0, 0, 0, 1, 167, 1, 0, 0, 0, 1, 169, 1, 0, 0, 0, 1, 173, 1, 0, 0, 0, 1, 175, 1, 0, 0, 0, 1, 177, 1, 0, 0, 0, 1, 179, 1, 0, 0, 0, 2, 181, 1, 0, 0, 0, 2, 183, 1, 0, 0, 0, 2, 185, 1, 0, 0, 0, 2, 187, 1, 0, 0, 0, 2, 189, 1, 0, 0, 0, 3, 191, 1, 0, 0, 0, 3, 193, 1, 0, 0, 0, 3, 195, 1, 0, 0, 0, 3, 197, 1, 0, 0, 0, 3, 199, 1, 0, 0, 0, 3, 201, 1, 0, 0, 0, 3, 203, 1, 0, 0, 0, 3, 207, 1, 0, 0, 0, 3, 209, 1, 0, 0, 0, 3, 211, 1, 0, 0, 0, 3, 213, 1, 0, 0, 0, 3, 215, 1, 0, 0, 0, 3, 217, 1, 0, 0, 0, 4, 219, 1, 0, 0, 0, 4, 221, 1, 0, 0, 0, 4, 223, 1, 0, 0, 0, 4, 225, 1, 0, 0, 0, 4, 227, 1, 0, 0, 0, 4, 233, 1, 0, 0, 0, 4, 235, 1, 0, 0, 0, 4, 237, 1, 0, 0, 0, 4, 239, 1, 0, 0, 0, 5, 241, 1, 0, 0, 0, 5, 243, 1, 0, 0, 0, 5, 245, 1, 0, 0, 0, 5, 247, 1, 0, 0, 0, 5, 249, 1, 0, 0, 0, 5, 251, 1, 0, 0, 0, 5, 253, 1, 0, 0, 0, 5, 255, 1, 0, 0, 0, 5, 257, 1, 0, 0, 0, 5, 259, 1, 0, 0, 0, 5, 261, 1, 0, 0, 0, 6, 263, 1, 0, 0, 0, 6, 265, 1, 0, 0, 0, 6, 267, 1, 0, 0, 0, 6, 269, 1, 0, 0, 0, 6, 273, 1, 0, 0, 0, 6, 275, 1, 0, 0, 0, 6, 277, 1, 0, 0, 0, 6, 279, 1, 0, 0, 0, 6, 281, 1, 0, 0, 0, 7, 283, 1, 0, 0, 0, 7, 285, 1, 0, 0, 0, 7, 287, 1, 0, 0, 0, 7, 289, 1, 0, 0, 0, 7, 291, 1, 0, 0, 0, 7, 293, 1, 0, 0, 0, 7, 295, 1, 0, 0, 0, 7, 297, 1, 0, 0, 0, 7, 299, 1, 0, 0, 0, 7, 301, 1, 0, 0, 0, 7, 303, 1, 0, 0, 0, 7, 305, 1, 0, 0, 0, 8, 307, 1, 0, 0, 0, 8, 309, 1, 0, 0, 0, 8, 311, 1, 0, 0, 0, 8, 313, 1, 0, 0, 0, 8, 315, 1, 0, 0, 0, 8, 317, 1, 0, 0, 0, 8, 319, 1, 0, 0, 0, 8, 321, 1, 0, 0, 0, 8, 323, 1, 0, 0, 0, 9, 325, 1, 0, 0, 0, 9, 327, 1, 0, 0, 0, 9, 329, 1, 0, 0, 0, 9, 331, 1, 0, 0, 0, 9, 333, 1, 0, 0, 0, 10, 335, 1, 0, 0, 0, 10, 337, 1, 0, 0, 0, 10, 339, 1, 0, 0, 0, 10, 341, 1, 0, 0, 0, 10, 343, 1, 0, 0, 0, 10, 345, 1, 0, 0, 0, 11, 347, 1, 0, 0, 0, 11, 349, 1, 0, 0, 0, 11, 351, 1, 0, 0, 0, 11, 353, 1, 0, 0, 0, 11, 355, 1, 0, 0, 0, 11, 357, 1, 0, 0, 0, 11, 359, 1, 0, 0, 0, 11, 361, 1, 0, 0, 0, 11, 363, 1, 0, 0, 0, 11, 365, 1, 0, 0, 0, 12, 367, 1, 0, 0, 0, 12, 369, 1, 0, 0, 0, 12, 371, 1, 0, 0, 0, 12, 373, 1, 0, 0, 0, 12, 375, 1, 0, 0, 0, 12, 377, 1, 0, 0, 0, 12, 379, 1, 0, 0, 0, 13, 381, 1, 0, 0, 0, 13, 383, 1, 0, 0, 0, 13, 385, 1, 0, 0, 0, 13, 387, 1, 0, 0, 0, 13, 389, 1, 0, 0, 0, 13, 391, 1, 0, 0, 0, 14, 393, 1, 0, 0, 0, 14, 395, 1, 0, 0, 0, 14, 397, 1, 0, 0, 0, 14, 399, 1, 0, 0, 0, 14, 401, 1, 0, 0, 0, 14, 403, 1, 0, 0, 0, 14, 405, 1, 0, 0, 0, 14, 407, 1, 0, 0, 0, 14, 409, 1, 0, 0, 0, 15, 411, 1, 0, 0, 0, 17, 421, 1, 0, 0, 0, 19, 428, 1, 0, 0, 0, 21, 437, 1, 0, 0, 0, 23, 444, 1, 0, 0, 0, 25, 454, 1, 0, 0, 0, 27, 461, 1, 0, 0, 0, 29, 468, 1, 0, 0, 0, 31, 475, 1, 0, 0, 0, 33, 483, 1, 0, 0, 0, 35, 495, 1, 0, 0, 0, 37, 504, 1, 0, 0, 0, 39, 510, 1, 0, 0, 0, 41, 517, 1, 0, 0, 0, 43, 524, 1, 0, 0, 0, 45, 532, 1, 0, 0, 0, 47, 540, 1, 0, 0, 0, 49, 555, 1, 0, 0, 0, 51, 565, 1, 0, 0, 0, 53, 577, 1, 0, 0, 0, 55, 583, 1, 0, 0, 0, 57, 600, 1, 0, 0, 0, 59, 616, 1, 0, 0, 0, 61, 622, 1, 0, 0, 0, 63, 626, 1, 0, 0, 0, 65, 628, 1, 0, 0, 0, 67, 630, 1, 0, 0, 0, 69, 633, 1, 0, 0, 0, 71, 635, 1, 0, 0, 0, 73, 644, 1, 0, 0, 0, 75, 646, 1, 0, 0, 0, 77, 651, 1, 0, 0, 0, 79, 653, 1, 0, 0, 0, 81, 658, 1, 0, 0, 0, 83, 689, 1, 0, 0, 0, 85, 692, 1, 0, 0, 0, 87, 738, 1, 0, 0, 0, 89, 740, 1, 0, 0, 0, 91, 743, 1, 0, 0, 0, 93, 747, 1, 0, 0, 0, 95, 751, 1, 0, 0, 0, 97, 753, 1, 0, 0, 0, 99, 756, 1, 0, 0, 0, 101, 758, 1, 0, 0, 0, 103, 763, 1, 0, 0, 0, 105, 765, 1, 0, 0, 0, 107, 771, 1, 0, 0, 0, 109, 777, 1, 0, 0, 0, 111, 780, 1, 0, 0, 0, 113, 783, 1, 0, 0, 0, 115, 788, 1, 0, 0, 0, 117, 793, 1, 0, 0, 0, 119, 795, 1, 0, 0, 0, 121, 799, 1, 0, 0, 0, 123, 804, 1, 0, 0, 0, 125, 810, 1, 0, 0, 0, 127, 813, 1, 0, 0, 0, 129, 815, 1, 0, 0, 0, 131, 821, 1, 0, 0, 0, 133, 823, 1, 0, 0, 0, 135, 828, 1, 0, 0, 0, 137, 831, 1, 0, 0, 0, 139, 834, 1, 0, 0, 0, 141, 837, 1, 0, 0, 0, 143, 839, 1, 0, 0, 0, 145, 842, 1, 0, 0, 0, 147, 844, 1, 0, 0, 0, 149, 847, 1, 0, 0, 0, 151, 849, 1, 0, 0, 0, 153, 851, 1, 0, 0, 0, 155, 853, 1, 0, 0, 0, 157, 855, 1, 0, 0, 0, 159, 857, 1, 0, 0, 0, 161, 863, 1, 0, 0, 0, 163, 885, 1, 0, 0, 0, 165, 887, 1, 0, 0, 0, 167, 892, 1, 0, 0, 0, 169, 913, 1, 0, 0, 0, 171, 915, 1, 0, 0, 0, 173, 923, 1, 0, 0, 0, 175, 925, 1, 0, 0, 0, 177, 929, 1, 0, 0, 0, 179, 933, 1, 0, 0, 0, 181, 937, 1, 0, 0, 0, 183, 942, 1, 0, 0, 0, 185, 947, 1, 0, 0, 0, 187, 951, 1, 0, 0, 0, 189, 955, 1, 0, 0, 0, 191, 959, 1, 0, 0, 0, 193, 964, 1, 0, 0, 0, 195, 968, 1, 0, 0, 0, 197, 972, 1, 0, 0, 0, 199, 976, 1, 0, 0, 0, 201, 980, 1, 0, 0, 0, 203, 984, 1, 0, 0, 0, 205, 996, 1, 0, 0, 0, 207, 999, 1, 0, 0, 0, 209, 1003, 1, 0, 0, 0, 211, 1007, 1, 0, 0, 0, 213, 1011, 1, 0, 0, 0, 215, 1015, 1, 0, 0, 0, 217, 1019, 1, 0, 0, 0, 219, 1023, 1, 0, 0, 0, 221, 1028, 1, 0, 0, 0, 223, 1032, 1, 0, 0, 0, 225, 1036, 1, 0, 0, 0, 227, 1040, 1, 0, 0, 0, 229, 1048, 1, 0, 0, 0, 231, 1069, 1, 0, 0, 0, 233, 1073, 1, 0, 0, 0, 235, 1077, 1, 0, 0, 0, 237, 1081, 1, 0, 0, 0, 239, 1085, 1, 0, 0, 0, 241, 1089, 1, 0, 0, 0, 243, 1094, 1, 0, 0, 0, 245, 1098, 1, 0, 0, 0, 247, 1102, 1, 0, 0, 0, 249, 1106, 1, 0, 0, 0, 251, 1110, 1, 0, 0, 0, 253, 1114, 1, 0, 0, 0, 255, 1117, 1, 0, 0, 0, 257, 1121, 1, 0, 0, 0, 259, 1125, 1, 0, 0, 0, 261, 1129, 1, 0, 0, 0, 263, 1133, 1, 0, 0, 0, 265, 1138, 1, 0, 0, 0, 267, 1143, 1, 0, 0, 0, 269, 1148, 1, 0, 0, 0, 271, 1155, 1, 0, 0, 0, 273, 1164, 1, 0, 0, 0, 275, 1171, 1, 0, 0, 0, 277, 1175, 1, 0, 0, 0, 279, 1179, 1, 0, 0, 0, 281, 1183, 1, 0, 0, 0, 283, 1187, 1, 0, 0, 0, 285, 1193, 1, 0, 0, 0, 287, 1197, 1, 0, 0, 0, 289, 1201, 1, 0, 0, 0, 291, 1205, 1, 0, 0, 0, 293, 1209, 1, 0, 0, 0, 295, 1213, 1, 0, 0, 0, 297, 1217, 1, 0, 0, 0, 299, 1221, 1, 0, 0, 0, 301, 1225, 1, 0, 0, 0, 303, 1229, 1, 0, 0, 0, 305, 1233, 1, 0, 0, 0, 307, 1237, 1, 0, 0, 0, 309, 1242, 1, 0, 0, 0, 311, 1246, 1, 0, 0, 0, 313, 1250, 1, 0, 0, 0, 315, 1254, 1, 0, 0, 0, 317, 1258, 1, 0, 0, 0, 319, 1262, 1, 0, 0, 0, 321, 1266, 1, 0, 0, 0, 323, 1270, 1, 0, 0, 0, 325, 1274, 1, 0, 0, 0, 327, 1279, 1, 0, 0, 0, 329, 1284, 1, 0, 0, 0, 331, 1288, 1, 0, 0, 0, 333, 1292, 1, 0, 0, 0, 335, 1296, 1, 0, 0, 0, 337, 1301, 1, 0, 0, 0, 339, 1308, 1, 0, 0, 0, 341, 1312, 1, 0, 0, 0, 343, 1316, 1, 0, 0, 0, 345, 1320, 1, 0, 0, 0, 347, 1324, 1, 0, 0, 0, 349, 1329, 1, 0, 0, 0, 351, 1333, 1, 0, 0, 0, 353, 1337, 1, 0, 0, 0, 355, 1341, 1, 0, 0, 0, 357, 1346, 1, 0, 0, 0, 359, 1350, 1, 0, 0, 0, 361, 1354, 1, 0, 0, 0, 363, 1358, 1, 0, 0, 0, 365, 1362, 1, 0, 0, 0, 367, 1366, 1, 0, 0, 0, 369, 1372, 1, 0, 0, 0, 371, 1376, 1, 0, 0, 0, 373, 1380, 1, 0, 0, 0, 375, 1384, 1, 0, 0, 0, 377, 1388, 1, 0, 0, 0, 379, 1392, 1, 0, 0, 0, 381, 1396, 1, 0, 0, 0, 383, 1401, 1, 0, 0, 0, 385, 1407, 1, 0, 0, 0, 387, 1413, 1, 0, 0, 0, 389, 1417, 1, 0, 0, 0, 391, 1421, 1, 0, 0, 0, 393, 1425, 1, 0, 0, 0, 395, 1431, 1, 0, 0, 0, 397, 1437, 1, 0, 0, 0, 399, 1441, 1, 0, 0, 0, 401, 1445, 1, 0, 0, 0, 403, 1449, 1, 0, 0, 0, 405, 1455, 1, 0, 0, 0, 407, 1461, 1, 0, 0, 0, 409, 1467, 1, 0, 0, 0, 411, 412, 7, 0, 0, 0, 412, 413, 7, 1, 0, 0, 413, 414, 7, 2, 0, 0, 414, 415, 7, 2, 0, 0, 415, 416, 7, 3, 0, 0, 416, 417, 7, 4, 0, 0, 417, 418, 7, 5, 0, 0, 418, 419, 1, 0, 0, 0, 419, 420, 6, 0, 0, 0, 420, 16, 1, 0, 0, 0, 421, 422, 7, 0, 0, 0, 422, 423, 7, 6, 0, 0, 423, 424, 7, 7, 0, 0, 424, 425, 7, 8, 0, 0, 425, 426, 1, 0, 0, 0, 426, 427, 6, 1, 1, 0, 427, 18, 1, 0, 0, 0, 428, 429, 7, 3, 0, 0, 429, 430, 7, 9, 0, 0, 430, 431, 7, 6, 0, 0, 431, 432, 7, 1, 0, 0, 432, 433, 7, 4, 0, 0, 433, 434, 7, 10, 0, 0, 434, 435, 1, 0, 0, 0, 435, 436, 6, 2, 2, 0, 436, 20, 1, 0, 0, 0, 437, 438, 7, 3, 0, 0, 438, 439, 7, 11, 0, 0, 439, 440, 7, 12, 0, 0, 440, 441, 7, 13, 0, 0, 441, 442, 1, 0, 0, 0, 442, 443, 6, 3, 0, 0, 443, 22, 1, 0, 0, 0, 444, 445, 7, 3, 0, 0, 445, 446, 7, 14, 0, 0, 446, 447, 7, 8, 0, 0, 447, 448, 7, 13, 0, 0, 448, 449, 7, 12, 0, 0, 449, 450, 7, 1, 0, 0, 450, 451, 7, 9, 0, 0, 451, 452, 1, 0, 0, 0, 452, 453, 6, 4, 3, 0, 453, 24, 1, 0, 0, 0, 454, 455, 7, 15, 0, 0, 455, 456, 7, 6, 0, 0, 456, 457, 7, 7, 0, 0, 457, 458, 7, 16, 0, 0, 458, 459, 1, 0, 0, 0, 459, 460, 6, 5, 4, 0, 460, 26, 1, 0, 0, 0, 461, 462, 7, 17, 0, 0, 462, 463, 7, 6, 0, 0, 463, 464, 7, 7, 0, 0, 464, 465, 7, 18, 0, 0, 465, 466, 1, 0, 0, 0, 466, 467, 6, 6, 0, 0, 467, 28, 1, 0, 0, 0, 468, 469, 7, 18, 0, 0, 469, 470, 7, 3, 0, 0, 470, 471, 7, 3, 0, 0, 471, 472, 7, 8, 0, 0, 472, 473, 1, 0, 0, 0, 473, 474, 6, 7, 1, 0, 474, 30, 1, 0, 0, 0, 475, 476, 7, 13, 0, 0, 476, 477, 7, 1, 0, 0, 477, 478, 7, 16, 0, 0, 478, 479, 7, 1, 0, 0, 479, 480, 7, 5, 0, 0, 480, 481, 1, 0, 0, 0, 481, 482, 6, 8, 0, 0, 482, 32, 1, 0, 0, 0, 483, 484, 7, 16, 0, 0, 484, 485, 7, 11, 0, 0, 485, 486, 5, 95, 0, 0, 486, 487, 7, 3, 0, 0, 487, 488, 7, 14, 0, 0, 488, 489, 7, 8, 0, 0, 489, 490, 7, 12, 0, 0, 490, 491, 7, 9, 0, 0, 491, 492, 7, 0, 0, 0, 492, 493, 1, 0, 0, 0, 493, 494, 6, 9, 5, 0, 494, 34, 1, 0, 0, 0, 495, 496, 7, 6, 0, 0, 496, 497, 7, 3, 0, 0, 497, 498, 7, 9, 0, 0, 498, 499, 7, 12, 0, 0, 499, 500, 7, 16, 0, 0, 500, 501, 7, 3, 0, 0, 501, 502, 1, 0, 0, 0, 502, 503, 6, 10, 6, 0, 503, 36, 1, 0, 0, 0, 504, 505, 7, 6, 0, 0, 505, 506, 7, 7, 0, 0, 506, 507, 7, 19, 0, 0, 507, 508, 1, 0, 0, 0, 508, 509, 6, 11, 0, 0, 509, 38, 1, 0, 0, 0, 510, 511, 7, 2, 0, 0, 511, 512, 7, 10, 0, 0, 512, 513, 7, 7, 0, 0, 513, 514, 7, 19, 0, 0, 514, 515, 1, 0, 0, 0, 515, 516, 6, 12, 7, 0, 516, 40, 1, 0, 0, 0, 517, 518, 7, 2, 0, 0, 518, 519, 7, 7, 0, 0, 519, 520, 7, 6, 0, 0, 520, 521, 7, 5, 0, 0, 521, 522, 1, 0, 0, 0, 522, 523, 6, 13, 0, 0, 523, 42, 1, 0, 0, 0, 524, 525, 7, 2, 0, 0, 525, 526, 7, 5, 0, 0, 526, 527, 7, 12, 0, 0, 527, 528, 7, 5, 0, 0, 528, 529, 7, 2, 0, 0, 529, 530, 1, 0, 0, 0, 530, 531, 6, 14, 0, 0, 531, 44, 1, 0, 0, 0, 532, 533, 7, 19, 0, 0, 533, 534, 7, 10, 0, 0, 534, 535, 7, 3, 0, 0, 535, 536, 7, 6, 0, 0, 536, 537, 7, 3, 0, 0, 537, 538, 1, 0, 0, 0, 538, 539, 6, 15, 0, 0, 539, 46, 1, 0, 0, 0, 540, 541, 4, 16, 0, 0, 541, 542, 7, 1, 0, 0, 542, 543, 7, 9, 0, 0, 543, 544, 7, 13, 0, 0, 544, 545, 7, 1, 0, 0, 545, 546, 7, 9, 0, 0, 546, 547, 7, 3, 0, 0, 547, 548, 7, 2, 0, 0, 548, 549, 7, 5, 0, 0, 549, 550, 7, 12, 0, 0, 550, 551, 7, 5, 0, 0, 551, 552, 7, 2, 0, 0, 552, 553, 1, 0, 0, 0, 553, 554, 6, 16, 0, 0, 554, 48, 1, 0, 0, 0, 555, 556, 4, 17, 1, 0, 556, 557, 7, 13, 0, 0, 557, 558, 7, 7, 0, 0, 558, 559, 7, 7, 0, 0, 559, 560, 7, 18, 0, 0, 560, 561, 7, 20, 0, 0, 561, 562, 7, 8, 0, 0, 562, 563, 1, 0, 0, 0, 563, 564, 6, 17, 8, 0, 564, 50, 1, 0, 0, 0, 565, 566, 4, 18, 2, 0, 566, 567, 7, 16, 0, 0, 567, 568, 7, 3, 0, 0, 568, 569, 7, 5, 0, 0, 569, 570, 7, 6, 0, 0, 570, 571, 7, 1, 0, 0, 571, 572, 7, 4, 0, 0, 572, 573, 7, 2, 0, 0, 573, 574, 1, 0, 0, 0, 574, 575, 6, 18, 9, 0, 575, 52, 1, 0, 0, 0, 576, 578, 8, 21, 0, 0, 577, 576, 1, 0, 0, 0, 578, 579, 1, 0, 0, 0, 579, 577, 1, 0, 0, 0, 579, 580, 1, 0, 0, 0, 580, 581, 1, 0, 0, 0, 581, 582, 6, 19, 0, 0, 582, 54, 1, 0, 0, 0, 583, 584, 5, 47, 0, 0, 584, 585, 5, 47, 0, 0, 585, 589, 1, 0, 0, 0, 586, 588, 8, 22, 0, 0, 587, 586, 1, 0, 0, 0, 588, 591, 1, 0, 0, 0, 589, 587, 1, 0, 0, 0, 589, 590, 1, 0, 0, 0, 590, 593, 1, 0, 0, 0, 591, 589, 1, 0, 0, 0, 592, 594, 5, 13, 0, 0, 593, 592, 1, 0, 0, 0, 593, 594, 1, 0, 0, 0, 594, 596, 1, 0, 0, 0, 595, 597, 5, 10, 0, 0, 596, 595, 1, 0, 0, 0, 596, 597, 1, 0, 0, 0, 597, 598, 1, 0, 0, 0, 598, 599, 6, 20, 10, 0, 599, 56, 1, 0, 0, 0, 600, 601, 5, 47, 0, 0, 601, 602, 5, 42, 0, 0, 602, 607, 1, 0, 0, 0, 603, 606, 3, 57, 21, 0, 604, 606, 9, 0, 0, 0, 605, 603, 1, 0, 0, 0, 605, 604, 1, 0, 0, 0, 606, 609, 1, 0, 0, 0, 607, 608, 1, 0, 0, 0, 607, 605, 1, 0, 0, 0, 608, 610, 1, 0, 0, 0, 609, 607, 1, 0, 0, 0, 610, 611, 5, 42, 0, 0, 611, 612, 5, 47, 0, 0, 612, 613, 1, 0, 0, 0, 613, 614, 6, 21, 10, 0, 614, 58, 1, 0, 0, 0, 615, 617, 7, 23, 0, 0, 616, 615, 1, 0, 0, 0, 617, 618, 1, 0, 0, 0, 618, 616, 1, 0, 0, 0, 618, 619, 1, 0, 0, 0, 619, 620, 1, 0, 0, 0, 620, 621, 6, 22, 10, 0, 621, 60, 1, 0, 0, 0, 622, 623, 5, 124, 0, 0, 623, 624, 1, 0, 0, 0, 624, 625, 6, 23, 11, 0, 625, 62, 1, 0, 0, 0, 626, 627, 7, 24, 0, 0, 627, 64, 1, 0, 0, 0, 628, 629, 7, 25, 0, 0, 629, 66, 1, 0, 0, 0, 630, 631, 5, 92, 0, 0, 631, 632, 7, 26, 0, 0, 632, 68, 1, 0, 0, 0, 633, 634, 8, 27, 0, 0, 634, 70, 1, 0, 0, 0, 635, 637, 7, 3, 0, 0, 636, 638, 7, 28, 0, 0, 637, 636, 1, 0, 0, 0, 637, 638, 1, 0, 0, 0, 638, 640, 1, 0, 0, 0, 639, 641, 3, 63, 24, 0, 640, 639, 1, 0, 0, 0, 641, 642, 1, 0, 0, 0, 642, 640, 1, 0, 0, 0, 642, 643, 1, 0, 0, 0, 643, 72, 1, 0, 0, 0, 644, 645, 5, 64, 0, 0, 645, 74, 1, 0, 0, 0, 646, 647, 5, 96, 0, 0, 647, 76, 1, 0, 0, 0, 648, 652, 8, 29, 0, 0, 649, 650, 5, 96, 0, 0, 650, 652, 5, 96, 0, 0, 651, 648, 1, 0, 0, 0, 651, 649, 1, 0, 0, 0, 652, 78, 1, 0, 0, 0, 653, 654, 5, 95, 0, 0, 654, 80, 1, 0, 0, 0, 655, 659, 3, 65, 25, 0, 656, 659, 3, 63, 24, 0, 657, 659, 3, 79, 32, 0, 658, 655, 1, 0, 0, 0, 658, 656, 1, 0, 0, 0, 658, 657, 1, 0, 0, 0, 659, 82, 1, 0, 0, 0, 660, 665, 5, 34, 0, 0, 661, 664, 3, 67, 26, 0, 662, 664, 3, 69, 27, 0, 663, 661, 1, 0, 0, 0, 663, 662, 1, 0, 0, 0, 664, 667, 1, 0, 0, 0, 665, 663, 1, 0, 0, 0, 665, 666, 1, 0, 0, 0, 666, 668, 1, 0, 0, 0, 667, 665, 1, 0, 0, 0, 668, 690, 5, 34, 0, 0, 669, 670, 5, 34, 0, 0, 670, 671, 5, 34, 0, 0, 671, 672, 5, 34, 0, 0, 672, 676, 1, 0, 0, 0, 673, 675, 8, 22, 0, 0, 674, 673, 1, 0, 0, 0, 675, 678, 1, 0, 0, 0, 676, 677, 1, 0, 0, 0, 676, 674, 1, 0, 0, 0, 677, 679, 1, 0, 0, 0, 678, 676, 1, 0, 0, 0, 679, 680, 5, 34, 0, 0, 680, 681, 5, 34, 0, 0, 681, 682, 5, 34, 0, 0, 682, 684, 1, 0, 0, 0, 683, 685, 5, 34, 0, 0, 684, 683, 1, 0, 0, 0, 684, 685, 1, 0, 0, 0, 685, 687, 1, 0, 0, 0, 686, 688, 5, 34, 0, 0, 687, 686, 1, 0, 0, 0, 687, 688, 1, 0, 0, 0, 688, 690, 1, 0, 0, 0, 689, 660, 1, 0, 0, 0, 689, 669, 1, 0, 0, 0, 690, 84, 1, 0, 0, 0, 691, 693, 3, 63, 24, 0, 692, 691, 1, 0, 0, 0, 693, 694, 1, 0, 0, 0, 694, 692, 1, 0, 0, 0, 694, 695, 1, 0, 0, 0, 695, 86, 1, 0, 0, 0, 696, 698, 3, 63, 24, 0, 697, 696, 1, 0, 0, 0, 698, 699, 1, 0, 0, 0, 699, 697, 1, 0, 0, 0, 699, 700, 1, 0, 0, 0, 700, 701, 1, 0, 0, 0, 701, 705, 3, 103, 44, 0, 702, 704, 3, 63, 24, 0, 703, 702, 1, 0, 0, 0, 704, 707, 1, 0, 0, 0, 705, 703, 1, 0, 0, 0, 705, 706, 1, 0, 0, 0, 706, 739, 1, 0, 0, 0, 707, 705, 1, 0, 0, 0, 708, 710, 3, 103, 44, 0, 709, 711, 3, 63, 24, 0, 710, 709, 1, 0, 0, 0, 711, 712, 1, 0, 0, 0, 712, 710, 1, 0, 0, 0, 712, 713, 1, 0, 0, 0, 713, 739, 1, 0, 0, 0, 714, 716, 3, 63, 24, 0, 715, 714, 1, 0, 0, 0, 716, 717, 1, 0, 0, 0, 717, 715, 1, 0, 0, 0, 717, 718, 1, 0, 0, 0, 718, 726, 1, 0, 0, 0, 719, 723, 3, 103, 44, 0, 720, 722, 3, 63, 24, 0, 721, 720, 1, 0, 0, 0, 722, 725, 1, 0, 0, 0, 723, 721, 1, 0, 0, 0, 723, 724, 1, 0, 0, 0, 724, 727, 1, 0, 0, 0, 725, 723, 1, 0, 0, 0, 726, 719, 1, 0, 0, 0, 726, 727, 1, 0, 0, 0, 727, 728, 1, 0, 0, 0, 728, 729, 3, 71, 28, 0, 729, 739, 1, 0, 0, 0, 730, 732, 3, 103, 44, 0, 731, 733, 3, 63, 24, 0, 732, 731, 1, 0, 0, 0, 733, 734, 1, 0, 0, 0, 734, 732, 1, 0, 0, 0, 734, 735, 1, 0, 0, 0, 735, 736, 1, 0, 0, 0, 736, 737, 3, 71, 28, 0, 737, 739, 1, 0, 0, 0, 738, 697, 1, 0, 0, 0, 738, 708, 1, 0, 0, 0, 738, 715, 1, 0, 0, 0, 738, 730, 1, 0, 0, 0, 739, 88, 1, 0, 0, 0, 740, 741, 7, 30, 0, 0, 741, 742, 7, 31, 0, 0, 742, 90, 1, 0, 0, 0, 743, 744, 7, 12, 0, 0, 744, 745, 7, 9, 0, 0, 745, 746, 7, 0, 0, 0, 746, 92, 1, 0, 0, 0, 747, 748, 7, 12, 0, 0, 748, 749, 7, 2, 0, 0, 749, 750, 7, 4, 0, 0, 750, 94, 1, 0, 0, 0, 751, 752, 5, 61, 0, 0, 752, 96, 1, 0, 0, 0, 753, 754, 5, 58, 0, 0, 754, 755, 5, 58, 0, 0, 755, 98, 1, 0, 0, 0, 756, 757, 5, 44, 0, 0, 757, 100, 1, 0, 0, 0, 758, 759, 7, 0, 0, 0, 759, 760, 7, 3, 0, 0, 760, 761, 7, 2, 0, 0, 761, 762, 7, 4, 0, 0, 762, 102, 1, 0, 0, 0, 763, 764, 5, 46, 0, 0, 764, 104, 1, 0, 0, 0, 765, 766, 7, 15, 0, 0, 766, 767, 7, 12, 0, 0, 767, 768, 7, 13, 0, 0, 768, 769, 7, 2, 0, 0, 769, 770, 7, 3, 0, 0, 770, 106, 1, 0, 0, 0, 771, 772, 7, 15, 0, 0, 772, 773, 7, 1, 0, 0, 773, 774, 7, 6, 0, 0, 774, 775, 7, 2, 0, 0, 775, 776, 7, 5, 0, 0, 776, 108, 1, 0, 0, 0, 777, 778, 7, 1, 0, 0, 778, 779, 7, 9, 0, 0, 779, 110, 1, 0, 0, 0, 780, 781, 7, 1, 0, 0, 781, 782, 7, 2, 0, 0, 782, 112, 1, 0, 0, 0, 783, 784, 7, 13, 0, 0, 784, 785, 7, 12, 0, 0, 785, 786, 7, 2, 0, 0, 786, 787, 7, 5, 0, 0, 787, 114, 1, 0, 0, 0, 788, 789, 7, 13, 0, 0, 789, 790, 7, 1, 0, 0, 790, 791, 7, 18, 0, 0, 791, 792, 7, 3, 0, 0, 792, 116, 1, 0, 0, 0, 793, 794, 5, 40, 0, 0, 794, 118, 1, 0, 0, 0, 795, 796, 7, 9, 0, 0, 796, 797, 7, 7, 0, 0, 797, 798, 7, 5, 0, 0, 798, 120, 1, 0, 0, 0, 799, 800, 7, 9, 0, 0, 800, 801, 7, 20, 0, 0, 801, 802, 7, 13, 0, 0, 802, 803, 7, 13, 0, 0, 803, 122, 1, 0, 0, 0, 804, 805, 7, 9, 0, 0, 805, 806, 7, 20, 0, 0, 806, 807, 7, 13, 0, 0, 807, 808, 7, 13, 0, 0, 808, 809, 7, 2, 0, 0, 809, 124, 1, 0, 0, 0, 810, 811, 7, 7, 0, 0, 811, 812, 7, 6, 0, 0, 812, 126, 1, 0, 0, 0, 813, 814, 5, 63, 0, 0, 814, 128, 1, 0, 0, 0, 815, 816, 7, 6, 0, 0, 816, 817, 7, 13, 0, 0, 817, 818, 7, 1, 0, 0, 818, 819, 7, 18, 0, 0, 819, 820, 7, 3, 0, 0, 820, 130, 1, 0, 0, 0, 821, 822, 5, 41, 0, 0, 822, 132, 1, 0, 0, 0, 823, 824, 7, 5, 0, 0, 824, 825, 7, 6, 0, 0, 825, 826, 7, 20, 0, 0, 826, 827, 7, 3, 0, 0, 827, 134, 1, 0, 0, 0, 828, 829, 5, 61, 0, 0, 829, 830, 5, 61, 0, 0, 830, 136, 1, 0, 0, 0, 831, 832, 5, 61, 0, 0, 832, 833, 5, 126, 0, 0, 833, 138, 1, 0, 0, 0, 834, 835, 5, 33, 0, 0, 835, 836, 5, 61, 0, 0, 836, 140, 1, 0, 0, 0, 837, 838, 5, 60, 0, 0, 838, 142, 1, 0, 0, 0, 839, 840, 5, 60, 0, 0, 840, 841, 5, 61, 0, 0, 841, 144, 1, 0, 0, 0, 842, 843, 5, 62, 0, 0, 843, 146, 1, 0, 0, 0, 844, 845, 5, 62, 0, 0, 845, 846, 5, 61, 0, 0, 846, 148, 1, 0, 0, 0, 847, 848, 5, 43, 0, 0, 848, 150, 1, 0, 0, 0, 849, 850, 5, 45, 0, 0, 850, 152, 1, 0, 0, 0, 851, 852, 5, 42, 0, 0, 852, 154, 1, 0, 0, 0, 853, 854, 5, 47, 0, 0, 854, 156, 1, 0, 0, 0, 855, 856, 5, 37, 0, 0, 856, 158, 1, 0, 0, 0, 857, 858, 7, 16, 0, 0, 858, 859, 7, 12, 0, 0, 859, 860, 7, 5, 0, 0, 860, 861, 7, 4, 0, 0, 861, 862, 7, 10, 0, 0, 862, 160, 1, 0, 0, 0, 863, 864, 4, 73, 3, 0, 864, 865, 3, 45, 15, 0, 865, 866, 1, 0, 0, 0, 866, 867, 6, 73, 12, 0, 867, 162, 1, 0, 0, 0, 868, 871, 3, 127, 56, 0, 869, 872, 3, 65, 25, 0, 870, 872, 3, 79, 32, 0, 871, 869, 1, 0, 0, 0, 871, 870, 1, 0, 0, 0, 872, 876, 1, 0, 0, 0, 873, 875, 3, 81, 33, 0, 874, 873, 1, 0, 0, 0, 875, 878, 1, 0, 0, 0, 876, 874, 1, 0, 0, 0, 876, 877, 1, 0, 0, 0, 877, 886, 1, 0, 0, 0, 878, 876, 1, 0, 0, 0, 879, 881, 3, 127, 56, 0, 880, 882, 3, 63, 24, 0, 881, 880, 1, 0, 0, 0, 882, 883, 1, 0, 0, 0, 883, 881, 1, 0, 0, 0, 883, 884, 1, 0, 0, 0, 884, 886, 1, 0, 0, 0, 885, 868, 1, 0, 0, 0, 885, 879, 1, 0, 0, 0, 886, 164, 1, 0, 0, 0, 887, 888, 5, 91, 0, 0, 888, 889, 1, 0, 0, 0, 889, 890, 6, 75, 0, 0, 890, 891, 6, 75, 0, 0, 891, 166, 1, 0, 0, 0, 892, 893, 5, 93, 0, 0, 893, 894, 1, 0, 0, 0, 894, 895, 6, 76, 11, 0, 895, 896, 6, 76, 11, 0, 896, 168, 1, 0, 0, 0, 897, 901, 3, 65, 25, 0, 898, 900, 3, 81, 33, 0, 899, 898, 1, 0, 0, 0, 900, 903, 1, 0, 0, 0, 901, 899, 1, 0, 0, 0, 901, 902, 1, 0, 0, 0, 902, 914, 1, 0, 0, 0, 903, 901, 1, 0, 0, 0, 904, 907, 3, 79, 32, 0, 905, 907, 3, 73, 29, 0, 906, 904, 1, 0, 0, 0, 906, 905, 1, 0, 0, 0, 907, 909, 1, 0, 0, 0, 908, 910, 3, 81, 33, 0, 909, 908, 1, 0, 0, 0, 910, 911, 1, 0, 0, 0, 911, 909, 1, 0, 0, 0, 911, 912, 1, 0, 0, 0, 912, 914, 1, 0, 0, 0, 913, 897, 1, 0, 0, 0, 913, 906, 1, 0, 0, 0, 914, 170, 1, 0, 0, 0, 915, 917, 3, 75, 30, 0, 916, 918, 3, 77, 31, 0, 917, 916, 1, 0, 0, 0, 918, 919, 1, 0, 0, 0, 919, 917, 1, 0, 0, 0, 919, 920, 1, 0, 0, 0, 920, 921, 1, 0, 0, 0, 921, 922, 3, 75, 30, 0, 922, 172, 1, 0, 0, 0, 923, 924, 3, 171, 78, 0, 924, 174, 1, 0, 0, 0, 925, 926, 3, 55, 20, 0, 926, 927, 1, 0, 0, 0, 927, 928, 6, 80, 10, 0, 928, 176, 1, 0, 0, 0, 929, 930, 3, 57, 21, 0, 930, 931, 1, 0, 0, 0, 931, 932, 6, 81, 10, 0, 932, 178, 1, 0, 0, 0, 933, 934, 3, 59, 22, 0, 934, 935, 1, 0, 0, 0, 935, 936, 6, 82, 10, 0, 936, 180, 1, 0, 0, 0, 937, 938, 3, 165, 75, 0, 938, 939, 1, 0, 0, 0, 939, 940, 6, 83, 13, 0, 940, 941, 6, 83, 14, 0, 941, 182, 1, 0, 0, 0, 942, 943, 3, 61, 23, 0, 943, 944, 1, 0, 0, 0, 944, 945, 6, 84, 15, 0, 945, 946, 6, 84, 11, 0, 946, 184, 1, 0, 0, 0, 947, 948, 3, 59, 22, 0, 948, 949, 1, 0, 0, 0, 949, 950, 6, 85, 10, 0, 950, 186, 1, 0, 0, 0, 951, 952, 3, 55, 20, 0, 952, 953, 1, 0, 0, 0, 953, 954, 6, 86, 10, 0, 954, 188, 1, 0, 0, 0, 955, 956, 3, 57, 21, 0, 956, 957, 1, 0, 0, 0, 957, 958, 6, 87, 10, 0, 958, 190, 1, 0, 0, 0, 959, 960, 3, 61, 23, 0, 960, 961, 1, 0, 0, 0, 961, 962, 6, 88, 15, 0, 962, 963, 6, 88, 11, 0, 963, 192, 1, 0, 0, 0, 964, 965, 3, 165, 75, 0, 965, 966, 1, 0, 0, 0, 966, 967, 6, 89, 13, 0, 967, 194, 1, 0, 0, 0, 968, 969, 3, 167, 76, 0, 969, 970, 1, 0, 0, 0, 970, 971, 6, 90, 16, 0, 971, 196, 1, 0, 0, 0, 972, 973, 3, 337, 161, 0, 973, 974, 1, 0, 0, 0, 974, 975, 6, 91, 17, 0, 975, 198, 1, 0, 0, 0, 976, 977, 3, 99, 42, 0, 977, 978, 1, 0, 0, 0, 978, 979, 6, 92, 18, 0, 979, 200, 1, 0, 0, 0, 980, 981, 3, 95, 40, 0, 981, 982, 1, 0, 0, 0, 982, 983, 6, 93, 19, 0, 983, 202, 1, 0, 0, 0, 984, 985, 7, 16, 0, 0, 985, 986, 7, 3, 0, 0, 986, 987, 7, 5, 0, 0, 987, 988, 7, 12, 0, 0, 988, 989, 7, 0, 0, 0, 989, 990, 7, 12, 0, 0, 990, 991, 7, 5, 0, 0, 991, 992, 7, 12, 0, 0, 992, 204, 1, 0, 0, 0, 993, 997, 8, 32, 0, 0, 994, 995, 5, 47, 0, 0, 995, 997, 8, 33, 0, 0, 996, 993, 1, 0, 0, 0, 996, 994, 1, 0, 0, 0, 997, 206, 1, 0, 0, 0, 998, 1000, 3, 205, 95, 0, 999, 998, 1, 0, 0, 0, 1000, 1001, 1, 0, 0, 0, 1001, 999, 1, 0, 0, 0, 1001, 1002, 1, 0, 0, 0, 1002, 208, 1, 0, 0, 0, 1003, 1004, 3, 207, 96, 0, 1004, 1005, 1, 0, 0, 0, 1005, 1006, 6, 97, 20, 0, 1006, 210, 1, 0, 0, 0, 1007, 1008, 3, 83, 34, 0, 1008, 1009, 1, 0, 0, 0, 1009, 1010, 6, 98, 21, 0, 1010, 212, 1, 0, 0, 0, 1011, 1012, 3, 55, 20, 0, 1012, 1013, 1, 0, 0, 0, 1013, 1014, 6, 99, 10, 0, 1014, 214, 1, 0, 0, 0, 1015, 1016, 3, 57, 21, 0, 1016, 1017, 1, 0, 0, 0, 1017, 1018, 6, 100, 10, 0, 1018, 216, 1, 0, 0, 0, 1019, 1020, 3, 59, 22, 0, 1020, 1021, 1, 0, 0, 0, 1021, 1022, 6, 101, 10, 0, 1022, 218, 1, 0, 0, 0, 1023, 1024, 3, 61, 23, 0, 1024, 1025, 1, 0, 0, 0, 1025, 1026, 6, 102, 15, 0, 1026, 1027, 6, 102, 11, 0, 1027, 220, 1, 0, 0, 0, 1028, 1029, 3, 103, 44, 0, 1029, 1030, 1, 0, 0, 0, 1030, 1031, 6, 103, 22, 0, 1031, 222, 1, 0, 0, 0, 1032, 1033, 3, 99, 42, 0, 1033, 1034, 1, 0, 0, 0, 1034, 1035, 6, 104, 18, 0, 1035, 224, 1, 0, 0, 0, 1036, 1037, 3, 127, 56, 0, 1037, 1038, 1, 0, 0, 0, 1038, 1039, 6, 105, 23, 0, 1039, 226, 1, 0, 0, 0, 1040, 1041, 3, 163, 74, 0, 1041, 1042, 1, 0, 0, 0, 1042, 1043, 6, 106, 24, 0, 1043, 228, 1, 0, 0, 0, 1044, 1049, 3, 65, 25, 0, 1045, 1049, 3, 63, 24, 0, 1046, 1049, 3, 79, 32, 0, 1047, 1049, 3, 153, 69, 0, 1048, 1044, 1, 0, 0, 0, 1048, 1045, 1, 0, 0, 0, 1048, 1046, 1, 0, 0, 0, 1048, 1047, 1, 0, 0, 0, 1049, 230, 1, 0, 0, 0, 1050, 1053, 3, 65, 25, 0, 1051, 1053, 3, 153, 69, 0, 1052, 1050, 1, 0, 0, 0, 1052, 1051, 1, 0, 0, 0, 1053, 1057, 1, 0, 0, 0, 1054, 1056, 3, 229, 107, 0, 1055, 1054, 1, 0, 0, 0, 1056, 1059, 1, 0, 0, 0, 1057, 1055, 1, 0, 0, 0, 1057, 1058, 1, 0, 0, 0, 1058, 1070, 1, 0, 0, 0, 1059, 1057, 1, 0, 0, 0, 1060, 1063, 3, 79, 32, 0, 1061, 1063, 3, 73, 29, 0, 1062, 1060, 1, 0, 0, 0, 1062, 1061, 1, 0, 0, 0, 1063, 1065, 1, 0, 0, 0, 1064, 1066, 3, 229, 107, 0, 1065, 1064, 1, 0, 0, 0, 1066, 1067, 1, 0, 0, 0, 1067, 1065, 1, 0, 0, 0, 1067, 1068, 1, 0, 0, 0, 1068, 1070, 1, 0, 0, 0, 1069, 1052, 1, 0, 0, 0, 1069, 1062, 1, 0, 0, 0, 1070, 232, 1, 0, 0, 0, 1071, 1074, 3, 231, 108, 0, 1072, 1074, 3, 171, 78, 0, 1073, 1071, 1, 0, 0, 0, 1073, 1072, 1, 0, 0, 0, 1074, 1075, 1, 0, 0, 0, 1075, 1073, 1, 0, 0, 0, 1075, 1076, 1, 0, 0, 0, 1076, 234, 1, 0, 0, 0, 1077, 1078, 3, 55, 20, 0, 1078, 1079, 1, 0, 0, 0, 1079, 1080, 6, 110, 10, 0, 1080, 236, 1, 0, 0, 0, 1081, 1082, 3, 57, 21, 0, 1082, 1083, 1, 0, 0, 0, 1083, 1084, 6, 111, 10, 0, 1084, 238, 1, 0, 0, 0, 1085, 1086, 3, 59, 22, 0, 1086, 1087, 1, 0, 0, 0, 1087, 1088, 6, 112, 10, 0, 1088, 240, 1, 0, 0, 0, 1089, 1090, 3, 61, 23, 0, 1090, 1091, 1, 0, 0, 0, 1091, 1092, 6, 113, 15, 0, 1092, 1093, 6, 113, 11, 0, 1093, 242, 1, 0, 0, 0, 1094, 1095, 3, 95, 40, 0, 1095, 1096, 1, 0, 0, 0, 1096, 1097, 6, 114, 19, 0, 1097, 244, 1, 0, 0, 0, 1098, 1099, 3, 99, 42, 0, 1099, 1100, 1, 0, 0, 0, 1100, 1101, 6, 115, 18, 0, 1101, 246, 1, 0, 0, 0, 1102, 1103, 3, 103, 44, 0, 1103, 1104, 1, 0, 0, 0, 1104, 1105, 6, 116, 22, 0, 1105, 248, 1, 0, 0, 0, 1106, 1107, 3, 127, 56, 0, 1107, 1108, 1, 0, 0, 0, 1108, 1109, 6, 117, 23, 0, 1109, 250, 1, 0, 0, 0, 1110, 1111, 3, 163, 74, 0, 1111, 1112, 1, 0, 0, 0, 1112, 1113, 6, 118, 24, 0, 1113, 252, 1, 0, 0, 0, 1114, 1115, 7, 12, 0, 0, 1115, 1116, 7, 2, 0, 0, 1116, 254, 1, 0, 0, 0, 1117, 1118, 3, 233, 109, 0, 1118, 1119, 1, 0, 0, 0, 1119, 1120, 6, 120, 25, 0, 1120, 256, 1, 0, 0, 0, 1121, 1122, 3, 55, 20, 0, 1122, 1123, 1, 0, 0, 0, 1123, 1124, 6, 121, 10, 0, 1124, 258, 1, 0, 0, 0, 1125, 1126, 3, 57, 21, 0, 1126, 1127, 1, 0, 0, 0, 1127, 1128, 6, 122, 10, 0, 1128, 260, 1, 0, 0, 0, 1129, 1130, 3, 59, 22, 0, 1130, 1131, 1, 0, 0, 0, 1131, 1132, 6, 123, 10, 0, 1132, 262, 1, 0, 0, 0, 1133, 1134, 3, 61, 23, 0, 1134, 1135, 1, 0, 0, 0, 1135, 1136, 6, 124, 15, 0, 1136, 1137, 6, 124, 11, 0, 1137, 264, 1, 0, 0, 0, 1138, 1139, 3, 165, 75, 0, 1139, 1140, 1, 0, 0, 0, 1140, 1141, 6, 125, 13, 0, 1141, 1142, 6, 125, 26, 0, 1142, 266, 1, 0, 0, 0, 1143, 1144, 7, 7, 0, 0, 1144, 1145, 7, 9, 0, 0, 1145, 1146, 1, 0, 0, 0, 1146, 1147, 6, 126, 27, 0, 1147, 268, 1, 0, 0, 0, 1148, 1149, 7, 19, 0, 0, 1149, 1150, 7, 1, 0, 0, 1150, 1151, 7, 5, 0, 0, 1151, 1152, 7, 10, 0, 0, 1152, 1153, 1, 0, 0, 0, 1153, 1154, 6, 127, 27, 0, 1154, 270, 1, 0, 0, 0, 1155, 1156, 8, 34, 0, 0, 1156, 272, 1, 0, 0, 0, 1157, 1159, 3, 271, 128, 0, 1158, 1157, 1, 0, 0, 0, 1159, 1160, 1, 0, 0, 0, 1160, 1158, 1, 0, 0, 0, 1160, 1161, 1, 0, 0, 0, 1161, 1162, 1, 0, 0, 0, 1162, 1163, 3, 337, 161, 0, 1163, 1165, 1, 0, 0, 0, 1164, 1158, 1, 0, 0, 0, 1164, 1165, 1, 0, 0, 0, 1165, 1167, 1, 0, 0, 0, 1166, 1168, 3, 271, 128, 0, 1167, 1166, 1, 0, 0, 0, 1168, 1169, 1, 0, 0, 0, 1169, 1167, 1, 0, 0, 0, 1169, 1170, 1, 0, 0, 0, 1170, 274, 1, 0, 0, 0, 1171, 1172, 3, 273, 129, 0, 1172, 1173, 1, 0, 0, 0, 1173, 1174, 6, 130, 28, 0, 1174, 276, 1, 0, 0, 0, 1175, 1176, 3, 55, 20, 0, 1176, 1177, 1, 0, 0, 0, 1177, 1178, 6, 131, 10, 0, 1178, 278, 1, 0, 0, 0, 1179, 1180, 3, 57, 21, 0, 1180, 1181, 1, 0, 0, 0, 1181, 1182, 6, 132, 10, 0, 1182, 280, 1, 0, 0, 0, 1183, 1184, 3, 59, 22, 0, 1184, 1185, 1, 0, 0, 0, 1185, 1186, 6, 133, 10, 0, 1186, 282, 1, 0, 0, 0, 1187, 1188, 3, 61, 23, 0, 1188, 1189, 1, 0, 0, 0, 1189, 1190, 6, 134, 15, 0, 1190, 1191, 6, 134, 11, 0, 1191, 1192, 6, 134, 11, 0, 1192, 284, 1, 0, 0, 0, 1193, 1194, 3, 95, 40, 0, 1194, 1195, 1, 0, 0, 0, 1195, 1196, 6, 135, 19, 0, 1196, 286, 1, 0, 0, 0, 1197, 1198, 3, 99, 42, 0, 1198, 1199, 1, 0, 0, 0, 1199, 1200, 6, 136, 18, 0, 1200, 288, 1, 0, 0, 0, 1201, 1202, 3, 103, 44, 0, 1202, 1203, 1, 0, 0, 0, 1203, 1204, 6, 137, 22, 0, 1204, 290, 1, 0, 0, 0, 1205, 1206, 3, 269, 127, 0, 1206, 1207, 1, 0, 0, 0, 1207, 1208, 6, 138, 29, 0, 1208, 292, 1, 0, 0, 0, 1209, 1210, 3, 233, 109, 0, 1210, 1211, 1, 0, 0, 0, 1211, 1212, 6, 139, 25, 0, 1212, 294, 1, 0, 0, 0, 1213, 1214, 3, 173, 79, 0, 1214, 1215, 1, 0, 0, 0, 1215, 1216, 6, 140, 30, 0, 1216, 296, 1, 0, 0, 0, 1217, 1218, 3, 127, 56, 0, 1218, 1219, 1, 0, 0, 0, 1219, 1220, 6, 141, 23, 0, 1220, 298, 1, 0, 0, 0, 1221, 1222, 3, 163, 74, 0, 1222, 1223, 1, 0, 0, 0, 1223, 1224, 6, 142, 24, 0, 1224, 300, 1, 0, 0, 0, 1225, 1226, 3, 55, 20, 0, 1226, 1227, 1, 0, 0, 0, 1227, 1228, 6, 143, 10, 0, 1228, 302, 1, 0, 0, 0, 1229, 1230, 3, 57, 21, 0, 1230, 1231, 1, 0, 0, 0, 1231, 1232, 6, 144, 10, 0, 1232, 304, 1, 0, 0, 0, 1233, 1234, 3, 59, 22, 0, 1234, 1235, 1, 0, 0, 0, 1235, 1236, 6, 145, 10, 0, 1236, 306, 1, 0, 0, 0, 1237, 1238, 3, 61, 23, 0, 1238, 1239, 1, 0, 0, 0, 1239, 1240, 6, 146, 15, 0, 1240, 1241, 6, 146, 11, 0, 1241, 308, 1, 0, 0, 0, 1242, 1243, 3, 103, 44, 0, 1243, 1244, 1, 0, 0, 0, 1244, 1245, 6, 147, 22, 0, 1245, 310, 1, 0, 0, 0, 1246, 1247, 3, 127, 56, 0, 1247, 1248, 1, 0, 0, 0, 1248, 1249, 6, 148, 23, 0, 1249, 312, 1, 0, 0, 0, 1250, 1251, 3, 163, 74, 0, 1251, 1252, 1, 0, 0, 0, 1252, 1253, 6, 149, 24, 0, 1253, 314, 1, 0, 0, 0, 1254, 1255, 3, 173, 79, 0, 1255, 1256, 1, 0, 0, 0, 1256, 1257, 6, 150, 30, 0, 1257, 316, 1, 0, 0, 0, 1258, 1259, 3, 169, 77, 0, 1259, 1260, 1, 0, 0, 0, 1260, 1261, 6, 151, 31, 0, 1261, 318, 1, 0, 0, 0, 1262, 1263, 3, 55, 20, 0, 1263, 1264, 1, 0, 0, 0, 1264, 1265, 6, 152, 10, 0, 1265, 320, 1, 0, 0, 0, 1266, 1267, 3, 57, 21, 0, 1267, 1268, 1, 0, 0, 0, 1268, 1269, 6, 153, 10, 0, 1269, 322, 1, 0, 0, 0, 1270, 1271, 3, 59, 22, 0, 1271, 1272, 1, 0, 0, 0, 1272, 1273, 6, 154, 10, 0, 1273, 324, 1, 0, 0, 0, 1274, 1275, 3, 61, 23, 0, 1275, 1276, 1, 0, 0, 0, 1276, 1277, 6, 155, 15, 0, 1277, 1278, 6, 155, 11, 0, 1278, 326, 1, 0, 0, 0, 1279, 1280, 7, 1, 0, 0, 1280, 1281, 7, 9, 0, 0, 1281, 1282, 7, 15, 0, 0, 1282, 1283, 7, 7, 0, 0, 1283, 328, 1, 0, 0, 0, 1284, 1285, 3, 55, 20, 0, 1285, 1286, 1, 0, 0, 0, 1286, 1287, 6, 157, 10, 0, 1287, 330, 1, 0, 0, 0, 1288, 1289, 3, 57, 21, 0, 1289, 1290, 1, 0, 0, 0, 1290, 1291, 6, 158, 10, 0, 1291, 332, 1, 0, 0, 0, 1292, 1293, 3, 59, 22, 0, 1293, 1294, 1, 0, 0, 0, 1294, 1295, 6, 159, 10, 0, 1295, 334, 1, 0, 0, 0, 1296, 1297, 3, 167, 76, 0, 1297, 1298, 1, 0, 0, 0, 1298, 1299, 6, 160, 16, 0, 1299, 1300, 6, 160, 11, 0, 1300, 336, 1, 0, 0, 0, 1301, 1302, 5, 58, 0, 0, 1302, 338, 1, 0, 0, 0, 1303, 1309, 3, 73, 29, 0, 1304, 1309, 3, 63, 24, 0, 1305, 1309, 3, 103, 44, 0, 1306, 1309, 3, 65, 25, 0, 1307, 1309, 3, 79, 32, 0, 1308, 1303, 1, 0, 0, 0, 1308, 1304, 1, 0, 0, 0, 1308, 1305, 1, 0, 0, 0, 1308, 1306, 1, 0, 0, 0, 1308, 1307, 1, 0, 0, 0, 1309, 1310, 1, 0, 0, 0, 1310, 1308, 1, 0, 0, 0, 1310, 1311, 1, 0, 0, 0, 1311, 340, 1, 0, 0, 0, 1312, 1313, 3, 55, 20, 0, 1313, 1314, 1, 0, 0, 0, 1314, 1315, 6, 163, 10, 0, 1315, 342, 1, 0, 0, 0, 1316, 1317, 3, 57, 21, 0, 1317, 1318, 1, 0, 0, 0, 1318, 1319, 6, 164, 10, 0, 1319, 344, 1, 0, 0, 0, 1320, 1321, 3, 59, 22, 0, 1321, 1322, 1, 0, 0, 0, 1322, 1323, 6, 165, 10, 0, 1323, 346, 1, 0, 0, 0, 1324, 1325, 3, 61, 23, 0, 1325, 1326, 1, 0, 0, 0, 1326, 1327, 6, 166, 15, 0, 1327, 1328, 6, 166, 11, 0, 1328, 348, 1, 0, 0, 0, 1329, 1330, 3, 337, 161, 0, 1330, 1331, 1, 0, 0, 0, 1331, 1332, 6, 167, 17, 0, 1332, 350, 1, 0, 0, 0, 1333, 1334, 3, 99, 42, 0, 1334, 1335, 1, 0, 0, 0, 1335, 1336, 6, 168, 18, 0, 1336, 352, 1, 0, 0, 0, 1337, 1338, 3, 103, 44, 0, 1338, 1339, 1, 0, 0, 0, 1339, 1340, 6, 169, 22, 0, 1340, 354, 1, 0, 0, 0, 1341, 1342, 3, 267, 126, 0, 1342, 1343, 1, 0, 0, 0, 1343, 1344, 6, 170, 32, 0, 1344, 1345, 6, 170, 33, 0, 1345, 356, 1, 0, 0, 0, 1346, 1347, 3, 207, 96, 0, 1347, 1348, 1, 0, 0, 0, 1348, 1349, 6, 171, 20, 0, 1349, 358, 1, 0, 0, 0, 1350, 1351, 3, 83, 34, 0, 1351, 1352, 1, 0, 0, 0, 1352, 1353, 6, 172, 21, 0, 1353, 360, 1, 0, 0, 0, 1354, 1355, 3, 55, 20, 0, 1355, 1356, 1, 0, 0, 0, 1356, 1357, 6, 173, 10, 0, 1357, 362, 1, 0, 0, 0, 1358, 1359, 3, 57, 21, 0, 1359, 1360, 1, 0, 0, 0, 1360, 1361, 6, 174, 10, 0, 1361, 364, 1, 0, 0, 0, 1362, 1363, 3, 59, 22, 0, 1363, 1364, 1, 0, 0, 0, 1364, 1365, 6, 175, 10, 0, 1365, 366, 1, 0, 0, 0, 1366, 1367, 3, 61, 23, 0, 1367, 1368, 1, 0, 0, 0, 1368, 1369, 6, 176, 15, 0, 1369, 1370, 6, 176, 11, 0, 1370, 1371, 6, 176, 11, 0, 1371, 368, 1, 0, 0, 0, 1372, 1373, 3, 99, 42, 0, 1373, 1374, 1, 0, 0, 0, 1374, 1375, 6, 177, 18, 0, 1375, 370, 1, 0, 0, 0, 1376, 1377, 3, 103, 44, 0, 1377, 1378, 1, 0, 0, 0, 1378, 1379, 6, 178, 22, 0, 1379, 372, 1, 0, 0, 0, 1380, 1381, 3, 233, 109, 0, 1381, 1382, 1, 0, 0, 0, 1382, 1383, 6, 179, 25, 0, 1383, 374, 1, 0, 0, 0, 1384, 1385, 3, 55, 20, 0, 1385, 1386, 1, 0, 0, 0, 1386, 1387, 6, 180, 10, 0, 1387, 376, 1, 0, 0, 0, 1388, 1389, 3, 57, 21, 0, 1389, 1390, 1, 0, 0, 0, 1390, 1391, 6, 181, 10, 0, 1391, 378, 1, 0, 0, 0, 1392, 1393, 3, 59, 22, 0, 1393, 1394, 1, 0, 0, 0, 1394, 1395, 6, 182, 10, 0, 1395, 380, 1, 0, 0, 0, 1396, 1397, 3, 61, 23, 0, 1397, 1398, 1, 0, 0, 0, 1398, 1399, 6, 183, 15, 0, 1399, 1400, 6, 183, 11, 0, 1400, 382, 1, 0, 0, 0, 1401, 1402, 3, 207, 96, 0, 1402, 1403, 1, 0, 0, 0, 1403, 1404, 6, 184, 20, 0, 1404, 1405, 6, 184, 11, 0, 1405, 1406, 6, 184, 34, 0, 1406, 384, 1, 0, 0, 0, 1407, 1408, 3, 83, 34, 0, 1408, 1409, 1, 0, 0, 0, 1409, 1410, 6, 185, 21, 0, 1410, 1411, 6, 185, 11, 0, 1411, 1412, 6, 185, 34, 0, 1412, 386, 1, 0, 0, 0, 1413, 1414, 3, 55, 20, 0, 1414, 1415, 1, 0, 0, 0, 1415, 1416, 6, 186, 10, 0, 1416, 388, 1, 0, 0, 0, 1417, 1418, 3, 57, 21, 0, 1418, 1419, 1, 0, 0, 0, 1419, 1420, 6, 187, 10, 0, 1420, 390, 1, 0, 0, 0, 1421, 1422, 3, 59, 22, 0, 1422, 1423, 1, 0, 0, 0, 1423, 1424, 6, 188, 10, 0, 1424, 392, 1, 0, 0, 0, 1425, 1426, 3, 337, 161, 0, 1426, 1427, 1, 0, 0, 0, 1427, 1428, 6, 189, 17, 0, 1428, 1429, 6, 189, 11, 0, 1429, 1430, 6, 189, 9, 0, 1430, 394, 1, 0, 0, 0, 1431, 1432, 3, 99, 42, 0, 1432, 1433, 1, 0, 0, 0, 1433, 1434, 6, 190, 18, 0, 1434, 1435, 6, 190, 11, 0, 1435, 1436, 6, 190, 9, 0, 1436, 396, 1, 0, 0, 0, 1437, 1438, 3, 55, 20, 0, 1438, 1439, 1, 0, 0, 0, 1439, 1440, 6, 191, 10, 0, 1440, 398, 1, 0, 0, 0, 1441, 1442, 3, 57, 21, 0, 1442, 1443, 1, 0, 0, 0, 1443, 1444, 6, 192, 10, 0, 1444, 400, 1, 0, 0, 0, 1445, 1446, 3, 59, 22, 0, 1446, 1447, 1, 0, 0, 0, 1447, 1448, 6, 193, 10, 0, 1448, 402, 1, 0, 0, 0, 1449, 1450, 3, 173, 79, 0, 1450, 1451, 1, 0, 0, 0, 1451, 1452, 6, 194, 11, 0, 1452, 1453, 6, 194, 0, 0, 1453, 1454, 6, 194, 30, 0, 1454, 404, 1, 0, 0, 0, 1455, 1456, 3, 169, 77, 0, 1456, 1457, 1, 0, 0, 0, 1457, 1458, 6, 195, 11, 0, 1458, 1459, 6, 195, 0, 0, 1459, 1460, 6, 195, 31, 0, 1460, 406, 1, 0, 0, 0, 1461, 1462, 3, 89, 37, 0, 1462, 1463, 1, 0, 0, 0, 1463, 1464, 6, 196, 11, 0, 1464, 1465, 6, 196, 0, 0, 1465, 1466, 6, 196, 35, 0, 1466, 408, 1, 0, 0, 0, 1467, 1468, 3, 61, 23, 0, 1468, 1469, 1, 0, 0, 0, 1469, 1470, 6, 197, 15, 0, 1470, 1471, 6, 197, 11, 0, 1471, 410, 1, 0, 0, 0, 65, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 579, 589, 593, 596, 605, 607, 618, 637, 642, 651, 658, 663, 665, 676, 684, 687, 689, 694, 699, 705, 712, 717, 723, 726, 734, 738, 871, 876, 883, 885, 901, 906, 911, 913, 919, 996, 1001, 1048, 1052, 1057, 1062, 1067, 1069, 1073, 1075, 1160, 1164, 1169, 1308, 1310, 36, 5, 1, 0, 5, 4, 0, 5, 6, 0, 5, 2, 0, 5, 3, 0, 5, 8, 0, 5, 5, 0, 5, 9, 0, 5, 11, 0, 5, 13, 0, 0, 1, 0, 4, 0, 0, 7, 16, 0, 7, 65, 0, 5, 0, 0, 7, 24, 0, 7, 66, 0, 7, 104, 0, 7, 33, 0, 7, 31, 0, 7, 76, 0, 7, 25, 0, 7, 35, 0, 7, 47, 0, 7, 64, 0, 7, 80, 0, 5, 10, 0, 5, 7, 0, 7, 90, 0, 7, 89, 0, 7, 68, 0, 7, 67, 0, 7, 88, 0, 5, 12, 0, 5, 14, 0, 7, 28, 0] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java index aa1eab437be5c..3bef23f4d2751 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java @@ -8,16 +8,14 @@ * 2.0. */ +import org.antlr.v4.runtime.Lexer; import org.antlr.v4.runtime.CharStream; -import org.antlr.v4.runtime.RuleContext; -import org.antlr.v4.runtime.RuntimeMetaData; -import org.antlr.v4.runtime.Vocabulary; -import org.antlr.v4.runtime.VocabularyImpl; -import org.antlr.v4.runtime.atn.ATN; -import org.antlr.v4.runtime.atn.ATNDeserializer; -import org.antlr.v4.runtime.atn.LexerATNSimulator; -import org.antlr.v4.runtime.atn.PredictionContextCache; +import org.antlr.v4.runtime.Token; +import org.antlr.v4.runtime.TokenStream; +import org.antlr.v4.runtime.*; +import org.antlr.v4.runtime.atn.*; import org.antlr.v4.runtime.dfa.DFA; +import org.antlr.v4.runtime.misc.*; @SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast", "CheckReturnValue", "this-escape"}) public class EsqlBaseLexer extends LexerConfig { @@ -27,90 +25,90 @@ public class EsqlBaseLexer extends LexerConfig { protected static final PredictionContextCache _sharedContextCache = new PredictionContextCache(); public static final int - DISSECT=1, DROP=2, ENRICH=3, EVAL=4, EXPLAIN=5, FROM=6, GROK=7, KEEP=8, - LIMIT=9, MV_EXPAND=10, RENAME=11, ROW=12, SHOW=13, SORT=14, STATS=15, - WHERE=16, DEV_INLINESTATS=17, DEV_LOOKUP=18, DEV_METRICS=19, UNKNOWN_CMD=20, - LINE_COMMENT=21, MULTILINE_COMMENT=22, WS=23, PIPE=24, QUOTED_STRING=25, - INTEGER_LITERAL=26, DECIMAL_LITERAL=27, BY=28, AND=29, ASC=30, ASSIGN=31, - CAST_OP=32, COMMA=33, DESC=34, DOT=35, FALSE=36, FIRST=37, IN=38, IS=39, - LAST=40, LIKE=41, LP=42, NOT=43, NULL=44, NULLS=45, OR=46, PARAM=47, RLIKE=48, - RP=49, TRUE=50, EQ=51, CIEQ=52, NEQ=53, LT=54, LTE=55, GT=56, GTE=57, - PLUS=58, MINUS=59, ASTERISK=60, SLASH=61, PERCENT=62, MATCH=63, NAMED_OR_POSITIONAL_PARAM=64, - OPENING_BRACKET=65, CLOSING_BRACKET=66, UNQUOTED_IDENTIFIER=67, QUOTED_IDENTIFIER=68, - EXPR_LINE_COMMENT=69, EXPR_MULTILINE_COMMENT=70, EXPR_WS=71, EXPLAIN_WS=72, - EXPLAIN_LINE_COMMENT=73, EXPLAIN_MULTILINE_COMMENT=74, METADATA=75, UNQUOTED_SOURCE=76, - FROM_LINE_COMMENT=77, FROM_MULTILINE_COMMENT=78, FROM_WS=79, ID_PATTERN=80, - PROJECT_LINE_COMMENT=81, PROJECT_MULTILINE_COMMENT=82, PROJECT_WS=83, - AS=84, RENAME_LINE_COMMENT=85, RENAME_MULTILINE_COMMENT=86, RENAME_WS=87, - ON=88, WITH=89, ENRICH_POLICY_NAME=90, ENRICH_LINE_COMMENT=91, ENRICH_MULTILINE_COMMENT=92, - ENRICH_WS=93, ENRICH_FIELD_LINE_COMMENT=94, ENRICH_FIELD_MULTILINE_COMMENT=95, - ENRICH_FIELD_WS=96, MVEXPAND_LINE_COMMENT=97, MVEXPAND_MULTILINE_COMMENT=98, - MVEXPAND_WS=99, INFO=100, SHOW_LINE_COMMENT=101, SHOW_MULTILINE_COMMENT=102, - SHOW_WS=103, COLON=104, SETTING=105, SETTING_LINE_COMMENT=106, SETTTING_MULTILINE_COMMENT=107, - SETTING_WS=108, LOOKUP_LINE_COMMENT=109, LOOKUP_MULTILINE_COMMENT=110, - LOOKUP_WS=111, LOOKUP_FIELD_LINE_COMMENT=112, LOOKUP_FIELD_MULTILINE_COMMENT=113, - LOOKUP_FIELD_WS=114, METRICS_LINE_COMMENT=115, METRICS_MULTILINE_COMMENT=116, - METRICS_WS=117, CLOSING_METRICS_LINE_COMMENT=118, CLOSING_METRICS_MULTILINE_COMMENT=119, + DISSECT=1, DROP=2, ENRICH=3, EVAL=4, EXPLAIN=5, FROM=6, GROK=7, KEEP=8, + LIMIT=9, MV_EXPAND=10, RENAME=11, ROW=12, SHOW=13, SORT=14, STATS=15, + WHERE=16, DEV_INLINESTATS=17, DEV_LOOKUP=18, DEV_METRICS=19, UNKNOWN_CMD=20, + LINE_COMMENT=21, MULTILINE_COMMENT=22, WS=23, PIPE=24, QUOTED_STRING=25, + INTEGER_LITERAL=26, DECIMAL_LITERAL=27, BY=28, AND=29, ASC=30, ASSIGN=31, + CAST_OP=32, COMMA=33, DESC=34, DOT=35, FALSE=36, FIRST=37, IN=38, IS=39, + LAST=40, LIKE=41, LP=42, NOT=43, NULL=44, NULLS=45, OR=46, PARAM=47, RLIKE=48, + RP=49, TRUE=50, EQ=51, CIEQ=52, NEQ=53, LT=54, LTE=55, GT=56, GTE=57, + PLUS=58, MINUS=59, ASTERISK=60, SLASH=61, PERCENT=62, MATCH=63, NAMED_OR_POSITIONAL_PARAM=64, + OPENING_BRACKET=65, CLOSING_BRACKET=66, UNQUOTED_IDENTIFIER=67, QUOTED_IDENTIFIER=68, + EXPR_LINE_COMMENT=69, EXPR_MULTILINE_COMMENT=70, EXPR_WS=71, EXPLAIN_WS=72, + EXPLAIN_LINE_COMMENT=73, EXPLAIN_MULTILINE_COMMENT=74, METADATA=75, UNQUOTED_SOURCE=76, + FROM_LINE_COMMENT=77, FROM_MULTILINE_COMMENT=78, FROM_WS=79, ID_PATTERN=80, + PROJECT_LINE_COMMENT=81, PROJECT_MULTILINE_COMMENT=82, PROJECT_WS=83, + AS=84, RENAME_LINE_COMMENT=85, RENAME_MULTILINE_COMMENT=86, RENAME_WS=87, + ON=88, WITH=89, ENRICH_POLICY_NAME=90, ENRICH_LINE_COMMENT=91, ENRICH_MULTILINE_COMMENT=92, + ENRICH_WS=93, ENRICH_FIELD_LINE_COMMENT=94, ENRICH_FIELD_MULTILINE_COMMENT=95, + ENRICH_FIELD_WS=96, MVEXPAND_LINE_COMMENT=97, MVEXPAND_MULTILINE_COMMENT=98, + MVEXPAND_WS=99, INFO=100, SHOW_LINE_COMMENT=101, SHOW_MULTILINE_COMMENT=102, + SHOW_WS=103, COLON=104, SETTING=105, SETTING_LINE_COMMENT=106, SETTTING_MULTILINE_COMMENT=107, + SETTING_WS=108, LOOKUP_LINE_COMMENT=109, LOOKUP_MULTILINE_COMMENT=110, + LOOKUP_WS=111, LOOKUP_FIELD_LINE_COMMENT=112, LOOKUP_FIELD_MULTILINE_COMMENT=113, + LOOKUP_FIELD_WS=114, METRICS_LINE_COMMENT=115, METRICS_MULTILINE_COMMENT=116, + METRICS_WS=117, CLOSING_METRICS_LINE_COMMENT=118, CLOSING_METRICS_MULTILINE_COMMENT=119, CLOSING_METRICS_WS=120; public static final int - EXPRESSION_MODE=1, EXPLAIN_MODE=2, FROM_MODE=3, PROJECT_MODE=4, RENAME_MODE=5, - ENRICH_MODE=6, ENRICH_FIELD_MODE=7, MVEXPAND_MODE=8, SHOW_MODE=9, SETTING_MODE=10, + EXPRESSION_MODE=1, EXPLAIN_MODE=2, FROM_MODE=3, PROJECT_MODE=4, RENAME_MODE=5, + ENRICH_MODE=6, ENRICH_FIELD_MODE=7, MVEXPAND_MODE=8, SHOW_MODE=9, SETTING_MODE=10, LOOKUP_MODE=11, LOOKUP_FIELD_MODE=12, METRICS_MODE=13, CLOSING_METRICS_MODE=14; public static String[] channelNames = { "DEFAULT_TOKEN_CHANNEL", "HIDDEN" }; public static String[] modeNames = { - "DEFAULT_MODE", "EXPRESSION_MODE", "EXPLAIN_MODE", "FROM_MODE", "PROJECT_MODE", - "RENAME_MODE", "ENRICH_MODE", "ENRICH_FIELD_MODE", "MVEXPAND_MODE", "SHOW_MODE", + "DEFAULT_MODE", "EXPRESSION_MODE", "EXPLAIN_MODE", "FROM_MODE", "PROJECT_MODE", + "RENAME_MODE", "ENRICH_MODE", "ENRICH_FIELD_MODE", "MVEXPAND_MODE", "SHOW_MODE", "SETTING_MODE", "LOOKUP_MODE", "LOOKUP_FIELD_MODE", "METRICS_MODE", "CLOSING_METRICS_MODE" }; private static String[] makeRuleNames() { return new String[] { - "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", "KEEP", - "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", "WHERE", - "DEV_INLINESTATS", "DEV_LOOKUP", "DEV_METRICS", "UNKNOWN_CMD", "LINE_COMMENT", - "MULTILINE_COMMENT", "WS", "PIPE", "DIGIT", "LETTER", "ESCAPE_SEQUENCE", - "UNESCAPED_CHARS", "EXPONENT", "ASPERAND", "BACKQUOTE", "BACKQUOTE_BLOCK", - "UNDERSCORE", "UNQUOTED_ID_BODY", "QUOTED_STRING", "INTEGER_LITERAL", - "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", "COMMA", - "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", "LIKE", "LP", "NOT", - "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", "CIEQ", - "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", - "PERCENT", "MATCH", "NAMED_OR_POSITIONAL_PARAM", "OPENING_BRACKET", "CLOSING_BRACKET", - "UNQUOTED_IDENTIFIER", "QUOTED_ID", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", - "EXPR_MULTILINE_COMMENT", "EXPR_WS", "EXPLAIN_OPENING_BRACKET", "EXPLAIN_PIPE", - "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", "FROM_PIPE", - "FROM_OPENING_BRACKET", "FROM_CLOSING_BRACKET", "FROM_COLON", "FROM_COMMA", - "FROM_ASSIGN", "METADATA", "UNQUOTED_SOURCE_PART", "UNQUOTED_SOURCE", - "FROM_UNQUOTED_SOURCE", "FROM_QUOTED_SOURCE", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", - "FROM_WS", "PROJECT_PIPE", "PROJECT_DOT", "PROJECT_COMMA", "PROJECT_PARAM", - "PROJECT_NAMED_OR_POSITIONAL_PARAM", "UNQUOTED_ID_BODY_WITH_PATTERN", - "UNQUOTED_ID_PATTERN", "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", - "PROJECT_WS", "RENAME_PIPE", "RENAME_ASSIGN", "RENAME_COMMA", "RENAME_DOT", - "RENAME_PARAM", "RENAME_NAMED_OR_POSITIONAL_PARAM", "AS", "RENAME_ID_PATTERN", - "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", "RENAME_WS", "ENRICH_PIPE", - "ENRICH_OPENING_BRACKET", "ON", "WITH", "ENRICH_POLICY_NAME_BODY", "ENRICH_POLICY_NAME", - "ENRICH_MODE_UNQUOTED_VALUE", "ENRICH_LINE_COMMENT", "ENRICH_MULTILINE_COMMENT", - "ENRICH_WS", "ENRICH_FIELD_PIPE", "ENRICH_FIELD_ASSIGN", "ENRICH_FIELD_COMMA", - "ENRICH_FIELD_DOT", "ENRICH_FIELD_WITH", "ENRICH_FIELD_ID_PATTERN", "ENRICH_FIELD_QUOTED_IDENTIFIER", - "ENRICH_FIELD_PARAM", "ENRICH_FIELD_NAMED_OR_POSITIONAL_PARAM", "ENRICH_FIELD_LINE_COMMENT", - "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_PIPE", - "MVEXPAND_DOT", "MVEXPAND_PARAM", "MVEXPAND_NAMED_OR_POSITIONAL_PARAM", - "MVEXPAND_QUOTED_IDENTIFIER", "MVEXPAND_UNQUOTED_IDENTIFIER", "MVEXPAND_LINE_COMMENT", - "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "SHOW_PIPE", "INFO", "SHOW_LINE_COMMENT", - "SHOW_MULTILINE_COMMENT", "SHOW_WS", "SETTING_CLOSING_BRACKET", "COLON", - "SETTING", "SETTING_LINE_COMMENT", "SETTTING_MULTILINE_COMMENT", "SETTING_WS", - "LOOKUP_PIPE", "LOOKUP_COLON", "LOOKUP_COMMA", "LOOKUP_DOT", "LOOKUP_ON", - "LOOKUP_UNQUOTED_SOURCE", "LOOKUP_QUOTED_SOURCE", "LOOKUP_LINE_COMMENT", - "LOOKUP_MULTILINE_COMMENT", "LOOKUP_WS", "LOOKUP_FIELD_PIPE", "LOOKUP_FIELD_COMMA", - "LOOKUP_FIELD_DOT", "LOOKUP_FIELD_ID_PATTERN", "LOOKUP_FIELD_LINE_COMMENT", - "LOOKUP_FIELD_MULTILINE_COMMENT", "LOOKUP_FIELD_WS", "METRICS_PIPE", - "METRICS_UNQUOTED_SOURCE", "METRICS_QUOTED_SOURCE", "METRICS_LINE_COMMENT", - "METRICS_MULTILINE_COMMENT", "METRICS_WS", "CLOSING_METRICS_COLON", "CLOSING_METRICS_COMMA", - "CLOSING_METRICS_LINE_COMMENT", "CLOSING_METRICS_MULTILINE_COMMENT", - "CLOSING_METRICS_WS", "CLOSING_METRICS_QUOTED_IDENTIFIER", "CLOSING_METRICS_UNQUOTED_IDENTIFIER", + "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", "KEEP", + "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", "WHERE", + "DEV_INLINESTATS", "DEV_LOOKUP", "DEV_METRICS", "UNKNOWN_CMD", "LINE_COMMENT", + "MULTILINE_COMMENT", "WS", "PIPE", "DIGIT", "LETTER", "ESCAPE_SEQUENCE", + "UNESCAPED_CHARS", "EXPONENT", "ASPERAND", "BACKQUOTE", "BACKQUOTE_BLOCK", + "UNDERSCORE", "UNQUOTED_ID_BODY", "QUOTED_STRING", "INTEGER_LITERAL", + "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", "COMMA", + "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", "LIKE", "LP", "NOT", + "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", "CIEQ", + "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", + "PERCENT", "MATCH", "NESTED_WHERE", "NAMED_OR_POSITIONAL_PARAM", "OPENING_BRACKET", + "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", "QUOTED_ID", "QUOTED_IDENTIFIER", + "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", "EXPLAIN_OPENING_BRACKET", + "EXPLAIN_PIPE", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", + "FROM_PIPE", "FROM_OPENING_BRACKET", "FROM_CLOSING_BRACKET", "FROM_COLON", + "FROM_COMMA", "FROM_ASSIGN", "METADATA", "UNQUOTED_SOURCE_PART", "UNQUOTED_SOURCE", + "FROM_UNQUOTED_SOURCE", "FROM_QUOTED_SOURCE", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", + "FROM_WS", "PROJECT_PIPE", "PROJECT_DOT", "PROJECT_COMMA", "PROJECT_PARAM", + "PROJECT_NAMED_OR_POSITIONAL_PARAM", "UNQUOTED_ID_BODY_WITH_PATTERN", + "UNQUOTED_ID_PATTERN", "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", + "PROJECT_WS", "RENAME_PIPE", "RENAME_ASSIGN", "RENAME_COMMA", "RENAME_DOT", + "RENAME_PARAM", "RENAME_NAMED_OR_POSITIONAL_PARAM", "AS", "RENAME_ID_PATTERN", + "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", "RENAME_WS", "ENRICH_PIPE", + "ENRICH_OPENING_BRACKET", "ON", "WITH", "ENRICH_POLICY_NAME_BODY", "ENRICH_POLICY_NAME", + "ENRICH_MODE_UNQUOTED_VALUE", "ENRICH_LINE_COMMENT", "ENRICH_MULTILINE_COMMENT", + "ENRICH_WS", "ENRICH_FIELD_PIPE", "ENRICH_FIELD_ASSIGN", "ENRICH_FIELD_COMMA", + "ENRICH_FIELD_DOT", "ENRICH_FIELD_WITH", "ENRICH_FIELD_ID_PATTERN", "ENRICH_FIELD_QUOTED_IDENTIFIER", + "ENRICH_FIELD_PARAM", "ENRICH_FIELD_NAMED_OR_POSITIONAL_PARAM", "ENRICH_FIELD_LINE_COMMENT", + "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_PIPE", + "MVEXPAND_DOT", "MVEXPAND_PARAM", "MVEXPAND_NAMED_OR_POSITIONAL_PARAM", + "MVEXPAND_QUOTED_IDENTIFIER", "MVEXPAND_UNQUOTED_IDENTIFIER", "MVEXPAND_LINE_COMMENT", + "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "SHOW_PIPE", "INFO", "SHOW_LINE_COMMENT", + "SHOW_MULTILINE_COMMENT", "SHOW_WS", "SETTING_CLOSING_BRACKET", "COLON", + "SETTING", "SETTING_LINE_COMMENT", "SETTTING_MULTILINE_COMMENT", "SETTING_WS", + "LOOKUP_PIPE", "LOOKUP_COLON", "LOOKUP_COMMA", "LOOKUP_DOT", "LOOKUP_ON", + "LOOKUP_UNQUOTED_SOURCE", "LOOKUP_QUOTED_SOURCE", "LOOKUP_LINE_COMMENT", + "LOOKUP_MULTILINE_COMMENT", "LOOKUP_WS", "LOOKUP_FIELD_PIPE", "LOOKUP_FIELD_COMMA", + "LOOKUP_FIELD_DOT", "LOOKUP_FIELD_ID_PATTERN", "LOOKUP_FIELD_LINE_COMMENT", + "LOOKUP_FIELD_MULTILINE_COMMENT", "LOOKUP_FIELD_WS", "METRICS_PIPE", + "METRICS_UNQUOTED_SOURCE", "METRICS_QUOTED_SOURCE", "METRICS_LINE_COMMENT", + "METRICS_MULTILINE_COMMENT", "METRICS_WS", "CLOSING_METRICS_COLON", "CLOSING_METRICS_COMMA", + "CLOSING_METRICS_LINE_COMMENT", "CLOSING_METRICS_MULTILINE_COMMENT", + "CLOSING_METRICS_WS", "CLOSING_METRICS_QUOTED_IDENTIFIER", "CLOSING_METRICS_UNQUOTED_IDENTIFIER", "CLOSING_METRICS_BY", "CLOSING_METRICS_PIPE" }; } @@ -118,46 +116,46 @@ private static String[] makeRuleNames() { private static String[] makeLiteralNames() { return new String[] { - null, "'dissect'", "'drop'", "'enrich'", "'eval'", "'explain'", "'from'", - "'grok'", "'keep'", "'limit'", "'mv_expand'", "'rename'", "'row'", "'show'", - "'sort'", "'stats'", "'where'", null, null, null, null, null, null, null, - "'|'", null, null, null, "'by'", "'and'", "'asc'", "'='", "'::'", "','", - "'desc'", "'.'", "'false'", "'first'", "'in'", "'is'", "'last'", "'like'", - "'('", "'not'", "'null'", "'nulls'", "'or'", "'?'", "'rlike'", "')'", - "'true'", "'=='", "'=~'", "'!='", "'<'", "'<='", "'>'", "'>='", "'+'", - "'-'", "'*'", "'/'", "'%'", "'match'", null, null, "']'", null, null, - null, null, null, null, null, null, "'metadata'", null, null, null, null, - null, null, null, null, "'as'", null, null, null, "'on'", "'with'", null, - null, null, null, null, null, null, null, null, null, "'info'", null, + null, "'dissect'", "'drop'", "'enrich'", "'eval'", "'explain'", "'from'", + "'grok'", "'keep'", "'limit'", "'mv_expand'", "'rename'", "'row'", "'show'", + "'sort'", "'stats'", "'where'", null, null, null, null, null, null, null, + "'|'", null, null, null, "'by'", "'and'", "'asc'", "'='", "'::'", "','", + "'desc'", "'.'", "'false'", "'first'", "'in'", "'is'", "'last'", "'like'", + "'('", "'not'", "'null'", "'nulls'", "'or'", "'?'", "'rlike'", "')'", + "'true'", "'=='", "'=~'", "'!='", "'<'", "'<='", "'>'", "'>='", "'+'", + "'-'", "'*'", "'/'", "'%'", "'match'", null, null, "']'", null, null, + null, null, null, null, null, null, "'metadata'", null, null, null, null, + null, null, null, null, "'as'", null, null, null, "'on'", "'with'", null, + null, null, null, null, null, null, null, null, null, "'info'", null, null, null, "':'" }; } private static final String[] _LITERAL_NAMES = makeLiteralNames(); private static String[] makeSymbolicNames() { return new String[] { - null, "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", - "KEEP", "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", - "WHERE", "DEV_INLINESTATS", "DEV_LOOKUP", "DEV_METRICS", "UNKNOWN_CMD", - "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "PIPE", "QUOTED_STRING", "INTEGER_LITERAL", - "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", "COMMA", - "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", "LIKE", "LP", "NOT", - "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", "CIEQ", - "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", - "PERCENT", "MATCH", "NAMED_OR_POSITIONAL_PARAM", "OPENING_BRACKET", "CLOSING_BRACKET", - "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", - "EXPR_WS", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", - "METADATA", "UNQUOTED_SOURCE", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", - "FROM_WS", "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", - "PROJECT_WS", "AS", "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", - "RENAME_WS", "ON", "WITH", "ENRICH_POLICY_NAME", "ENRICH_LINE_COMMENT", - "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", - "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT", - "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "INFO", "SHOW_LINE_COMMENT", - "SHOW_MULTILINE_COMMENT", "SHOW_WS", "COLON", "SETTING", "SETTING_LINE_COMMENT", - "SETTTING_MULTILINE_COMMENT", "SETTING_WS", "LOOKUP_LINE_COMMENT", "LOOKUP_MULTILINE_COMMENT", - "LOOKUP_WS", "LOOKUP_FIELD_LINE_COMMENT", "LOOKUP_FIELD_MULTILINE_COMMENT", - "LOOKUP_FIELD_WS", "METRICS_LINE_COMMENT", "METRICS_MULTILINE_COMMENT", - "METRICS_WS", "CLOSING_METRICS_LINE_COMMENT", "CLOSING_METRICS_MULTILINE_COMMENT", + null, "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", + "KEEP", "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", + "WHERE", "DEV_INLINESTATS", "DEV_LOOKUP", "DEV_METRICS", "UNKNOWN_CMD", + "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "PIPE", "QUOTED_STRING", "INTEGER_LITERAL", + "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", "COMMA", + "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", "LIKE", "LP", "NOT", + "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", "CIEQ", + "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", + "PERCENT", "MATCH", "NAMED_OR_POSITIONAL_PARAM", "OPENING_BRACKET", "CLOSING_BRACKET", + "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", + "EXPR_WS", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", + "METADATA", "UNQUOTED_SOURCE", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", + "FROM_WS", "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", + "PROJECT_WS", "AS", "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", + "RENAME_WS", "ON", "WITH", "ENRICH_POLICY_NAME", "ENRICH_LINE_COMMENT", + "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", + "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT", + "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "INFO", "SHOW_LINE_COMMENT", + "SHOW_MULTILINE_COMMENT", "SHOW_WS", "COLON", "SETTING", "SETTING_LINE_COMMENT", + "SETTTING_MULTILINE_COMMENT", "SETTING_WS", "LOOKUP_LINE_COMMENT", "LOOKUP_MULTILINE_COMMENT", + "LOOKUP_WS", "LOOKUP_FIELD_LINE_COMMENT", "LOOKUP_FIELD_MULTILINE_COMMENT", + "LOOKUP_FIELD_WS", "METRICS_LINE_COMMENT", "METRICS_MULTILINE_COMMENT", + "METRICS_WS", "CLOSING_METRICS_LINE_COMMENT", "CLOSING_METRICS_MULTILINE_COMMENT", "CLOSING_METRICS_WS" }; } @@ -229,6 +227,8 @@ public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { return DEV_LOOKUP_sempred((RuleContext)_localctx, predIndex); case 18: return DEV_METRICS_sempred((RuleContext)_localctx, predIndex); + case 73: + return NESTED_WHERE_sempred((RuleContext)_localctx, predIndex); } return true; } @@ -253,9 +253,16 @@ private boolean DEV_METRICS_sempred(RuleContext _localctx, int predIndex) { } return true; } + private boolean NESTED_WHERE_sempred(RuleContext _localctx, int predIndex) { + switch (predIndex) { + case 3: + return this.isDevVersion(); + } + return true; + } public static final String _serializedATN = - "\u0004\u0000x\u05b9\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ + "\u0004\u0000x\u05c0\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ @@ -311,54 +318,54 @@ private boolean DEV_METRICS_sempred(RuleContext _localctx, int predIndex) { "\u00ba\u0007\u00ba\u0002\u00bb\u0007\u00bb\u0002\u00bc\u0007\u00bc\u0002"+ "\u00bd\u0007\u00bd\u0002\u00be\u0007\u00be\u0002\u00bf\u0007\u00bf\u0002"+ "\u00c0\u0007\u00c0\u0002\u00c1\u0007\u00c1\u0002\u00c2\u0007\u00c2\u0002"+ - "\u00c3\u0007\u00c3\u0002\u00c4\u0007\u00c4\u0001\u0000\u0001\u0000\u0001"+ + "\u00c3\u0007\u00c3\u0002\u00c4\u0007\u00c4\u0002\u00c5\u0007\u00c5\u0001"+ "\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001"+ - "\u0000\u0001\u0000\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ - "\u0001\u0001\u0001\u0001\u0001\u0001\u0002\u0001\u0002\u0001\u0002\u0001"+ + "\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0001\u0001\u0001\u0001"+ + "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0002\u0001"+ "\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001"+ - "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ - "\u0003\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001"+ - "\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001"+ - "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ - "\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001"+ - "\u0006\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001"+ - "\u0007\u0001\u0007\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001"+ - "\b\u0001\b\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001"+ - "\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001"+ - "\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\u000b\u0001\u000b\u0001\u000b"+ - "\u0001\u000b\u0001\u000b\u0001\u000b\u0001\f\u0001\f\u0001\f\u0001\f\u0001"+ - "\f\u0001\f\u0001\f\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001"+ - "\r\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e"+ - "\u0001\u000e\u0001\u000e\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f"+ - "\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u0010\u0001\u0010"+ + "\u0002\u0001\u0002\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ + "\u0003\u0001\u0003\u0001\u0003\u0001\u0004\u0001\u0004\u0001\u0004\u0001"+ + "\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001"+ + "\u0004\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ + "\u0005\u0001\u0005\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001"+ + "\u0006\u0001\u0006\u0001\u0006\u0001\u0007\u0001\u0007\u0001\u0007\u0001"+ + "\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\b\u0001\b\u0001\b\u0001"+ + "\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\t\u0001\t\u0001\t\u0001\t\u0001"+ + "\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\n\u0001"+ + "\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\u000b"+ + "\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\f\u0001"+ + "\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\r\u0001\r\u0001\r\u0001"+ + "\r\u0001\r\u0001\r\u0001\r\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e"+ + "\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000f\u0001\u000f"+ + "\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f"+ "\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010"+ "\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010"+ - "\u0001\u0010\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011"+ - "\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0012"+ + "\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0011\u0001\u0011\u0001\u0011"+ + "\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011"+ + "\u0001\u0011\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012"+ "\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012"+ - "\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0013\u0004\u0013"+ - "\u0240\b\u0013\u000b\u0013\f\u0013\u0241\u0001\u0013\u0001\u0013\u0001"+ - "\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0005\u0014\u024a\b\u0014\n"+ - "\u0014\f\u0014\u024d\t\u0014\u0001\u0014\u0003\u0014\u0250\b\u0014\u0001"+ - "\u0014\u0003\u0014\u0253\b\u0014\u0001\u0014\u0001\u0014\u0001\u0015\u0001"+ - "\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0005\u0015\u025c\b\u0015\n"+ - "\u0015\f\u0015\u025f\t\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001"+ - "\u0015\u0001\u0015\u0001\u0016\u0004\u0016\u0267\b\u0016\u000b\u0016\f"+ - "\u0016\u0268\u0001\u0016\u0001\u0016\u0001\u0017\u0001\u0017\u0001\u0017"+ - "\u0001\u0017\u0001\u0018\u0001\u0018\u0001\u0019\u0001\u0019\u0001\u001a"+ - "\u0001\u001a\u0001\u001a\u0001\u001b\u0001\u001b\u0001\u001c\u0001\u001c"+ - "\u0003\u001c\u027c\b\u001c\u0001\u001c\u0004\u001c\u027f\b\u001c\u000b"+ - "\u001c\f\u001c\u0280\u0001\u001d\u0001\u001d\u0001\u001e\u0001\u001e\u0001"+ - "\u001f\u0001\u001f\u0001\u001f\u0003\u001f\u028a\b\u001f\u0001 \u0001"+ - " \u0001!\u0001!\u0001!\u0003!\u0291\b!\u0001\"\u0001\"\u0001\"\u0005\""+ - "\u0296\b\"\n\"\f\"\u0299\t\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001"+ - "\"\u0005\"\u02a1\b\"\n\"\f\"\u02a4\t\"\u0001\"\u0001\"\u0001\"\u0001\""+ - "\u0001\"\u0003\"\u02ab\b\"\u0001\"\u0003\"\u02ae\b\"\u0003\"\u02b0\b\""+ - "\u0001#\u0004#\u02b3\b#\u000b#\f#\u02b4\u0001$\u0004$\u02b8\b$\u000b$"+ - "\f$\u02b9\u0001$\u0001$\u0005$\u02be\b$\n$\f$\u02c1\t$\u0001$\u0001$\u0004"+ - "$\u02c5\b$\u000b$\f$\u02c6\u0001$\u0004$\u02ca\b$\u000b$\f$\u02cb\u0001"+ - "$\u0001$\u0005$\u02d0\b$\n$\f$\u02d3\t$\u0003$\u02d5\b$\u0001$\u0001$"+ - "\u0001$\u0001$\u0004$\u02db\b$\u000b$\f$\u02dc\u0001$\u0001$\u0003$\u02e1"+ + "\u0001\u0013\u0004\u0013\u0242\b\u0013\u000b\u0013\f\u0013\u0243\u0001"+ + "\u0013\u0001\u0013\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0005"+ + "\u0014\u024c\b\u0014\n\u0014\f\u0014\u024f\t\u0014\u0001\u0014\u0003\u0014"+ + "\u0252\b\u0014\u0001\u0014\u0003\u0014\u0255\b\u0014\u0001\u0014\u0001"+ + "\u0014\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0005"+ + "\u0015\u025e\b\u0015\n\u0015\f\u0015\u0261\t\u0015\u0001\u0015\u0001\u0015"+ + "\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0016\u0004\u0016\u0269\b\u0016"+ + "\u000b\u0016\f\u0016\u026a\u0001\u0016\u0001\u0016\u0001\u0017\u0001\u0017"+ + "\u0001\u0017\u0001\u0017\u0001\u0018\u0001\u0018\u0001\u0019\u0001\u0019"+ + "\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001b\u0001\u001b\u0001\u001c"+ + "\u0001\u001c\u0003\u001c\u027e\b\u001c\u0001\u001c\u0004\u001c\u0281\b"+ + "\u001c\u000b\u001c\f\u001c\u0282\u0001\u001d\u0001\u001d\u0001\u001e\u0001"+ + "\u001e\u0001\u001f\u0001\u001f\u0001\u001f\u0003\u001f\u028c\b\u001f\u0001"+ + " \u0001 \u0001!\u0001!\u0001!\u0003!\u0293\b!\u0001\"\u0001\"\u0001\""+ + "\u0005\"\u0298\b\"\n\"\f\"\u029b\t\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001"+ + "\"\u0001\"\u0005\"\u02a3\b\"\n\"\f\"\u02a6\t\"\u0001\"\u0001\"\u0001\""+ + "\u0001\"\u0001\"\u0003\"\u02ad\b\"\u0001\"\u0003\"\u02b0\b\"\u0003\"\u02b2"+ + "\b\"\u0001#\u0004#\u02b5\b#\u000b#\f#\u02b6\u0001$\u0004$\u02ba\b$\u000b"+ + "$\f$\u02bb\u0001$\u0001$\u0005$\u02c0\b$\n$\f$\u02c3\t$\u0001$\u0001$"+ + "\u0004$\u02c7\b$\u000b$\f$\u02c8\u0001$\u0004$\u02cc\b$\u000b$\f$\u02cd"+ + "\u0001$\u0001$\u0005$\u02d2\b$\n$\f$\u02d5\t$\u0003$\u02d7\b$\u0001$\u0001"+ + "$\u0001$\u0001$\u0004$\u02dd\b$\u000b$\f$\u02de\u0001$\u0001$\u0003$\u02e3"+ "\b$\u0001%\u0001%\u0001%\u0001&\u0001&\u0001&\u0001&\u0001\'\u0001\'\u0001"+ "\'\u0001\'\u0001(\u0001(\u0001)\u0001)\u0001)\u0001*\u0001*\u0001+\u0001"+ "+\u0001+\u0001+\u0001+\u0001,\u0001,\u0001-\u0001-\u0001-\u0001-\u0001"+ @@ -371,842 +378,846 @@ private boolean DEV_METRICS_sempred(RuleContext _localctx, int predIndex) { "<\u0001<\u0001=\u0001=\u0001=\u0001>\u0001>\u0001>\u0001?\u0001?\u0001"+ "@\u0001@\u0001@\u0001A\u0001A\u0001B\u0001B\u0001B\u0001C\u0001C\u0001"+ "D\u0001D\u0001E\u0001E\u0001F\u0001F\u0001G\u0001G\u0001H\u0001H\u0001"+ - "H\u0001H\u0001H\u0001H\u0001I\u0001I\u0001I\u0003I\u0361\bI\u0001I\u0005"+ - "I\u0364\bI\nI\fI\u0367\tI\u0001I\u0001I\u0004I\u036b\bI\u000bI\fI\u036c"+ - "\u0003I\u036f\bI\u0001J\u0001J\u0001J\u0001J\u0001J\u0001K\u0001K\u0001"+ - "K\u0001K\u0001K\u0001L\u0001L\u0005L\u037d\bL\nL\fL\u0380\tL\u0001L\u0001"+ - "L\u0003L\u0384\bL\u0001L\u0004L\u0387\bL\u000bL\fL\u0388\u0003L\u038b"+ - "\bL\u0001M\u0001M\u0004M\u038f\bM\u000bM\fM\u0390\u0001M\u0001M\u0001"+ - "N\u0001N\u0001O\u0001O\u0001O\u0001O\u0001P\u0001P\u0001P\u0001P\u0001"+ - "Q\u0001Q\u0001Q\u0001Q\u0001R\u0001R\u0001R\u0001R\u0001R\u0001S\u0001"+ - "S\u0001S\u0001S\u0001S\u0001T\u0001T\u0001T\u0001T\u0001U\u0001U\u0001"+ - "U\u0001U\u0001V\u0001V\u0001V\u0001V\u0001W\u0001W\u0001W\u0001W\u0001"+ - "W\u0001X\u0001X\u0001X\u0001X\u0001Y\u0001Y\u0001Y\u0001Y\u0001Z\u0001"+ - "Z\u0001Z\u0001Z\u0001[\u0001[\u0001[\u0001[\u0001\\\u0001\\\u0001\\\u0001"+ - "\\\u0001]\u0001]\u0001]\u0001]\u0001]\u0001]\u0001]\u0001]\u0001]\u0001"+ - "^\u0001^\u0001^\u0003^\u03de\b^\u0001_\u0004_\u03e1\b_\u000b_\f_\u03e2"+ - "\u0001`\u0001`\u0001`\u0001`\u0001a\u0001a\u0001a\u0001a\u0001b\u0001"+ - "b\u0001b\u0001b\u0001c\u0001c\u0001c\u0001c\u0001d\u0001d\u0001d\u0001"+ - "d\u0001e\u0001e\u0001e\u0001e\u0001e\u0001f\u0001f\u0001f\u0001f\u0001"+ - "g\u0001g\u0001g\u0001g\u0001h\u0001h\u0001h\u0001h\u0001i\u0001i\u0001"+ - "i\u0001i\u0001j\u0001j\u0001j\u0001j\u0003j\u0412\bj\u0001k\u0001k\u0003"+ - "k\u0416\bk\u0001k\u0005k\u0419\bk\nk\fk\u041c\tk\u0001k\u0001k\u0003k"+ - "\u0420\bk\u0001k\u0004k\u0423\bk\u000bk\fk\u0424\u0003k\u0427\bk\u0001"+ - "l\u0001l\u0004l\u042b\bl\u000bl\fl\u042c\u0001m\u0001m\u0001m\u0001m\u0001"+ - "n\u0001n\u0001n\u0001n\u0001o\u0001o\u0001o\u0001o\u0001p\u0001p\u0001"+ - "p\u0001p\u0001p\u0001q\u0001q\u0001q\u0001q\u0001r\u0001r\u0001r\u0001"+ - "r\u0001s\u0001s\u0001s\u0001s\u0001t\u0001t\u0001t\u0001t\u0001u\u0001"+ - "u\u0001u\u0001u\u0001v\u0001v\u0001v\u0001w\u0001w\u0001w\u0001w\u0001"+ - "x\u0001x\u0001x\u0001x\u0001y\u0001y\u0001y\u0001y\u0001z\u0001z\u0001"+ - "z\u0001z\u0001{\u0001{\u0001{\u0001{\u0001{\u0001|\u0001|\u0001|\u0001"+ - "|\u0001|\u0001}\u0001}\u0001}\u0001}\u0001}\u0001~\u0001~\u0001~\u0001"+ - "~\u0001~\u0001~\u0001~\u0001\u007f\u0001\u007f\u0001\u0080\u0004\u0080"+ - "\u0480\b\u0080\u000b\u0080\f\u0080\u0481\u0001\u0080\u0001\u0080\u0003"+ - "\u0080\u0486\b\u0080\u0001\u0080\u0004\u0080\u0489\b\u0080\u000b\u0080"+ - "\f\u0080\u048a\u0001\u0081\u0001\u0081\u0001\u0081\u0001\u0081\u0001\u0082"+ - "\u0001\u0082\u0001\u0082\u0001\u0082\u0001\u0083\u0001\u0083\u0001\u0083"+ - "\u0001\u0083\u0001\u0084\u0001\u0084\u0001\u0084\u0001\u0084\u0001\u0085"+ - "\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0086"+ - "\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0087\u0001\u0087\u0001\u0087"+ - "\u0001\u0087\u0001\u0088\u0001\u0088\u0001\u0088\u0001\u0088\u0001\u0089"+ - "\u0001\u0089\u0001\u0089\u0001\u0089\u0001\u008a\u0001\u008a\u0001\u008a"+ - "\u0001\u008a\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008c"+ - "\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008d\u0001\u008d\u0001\u008d"+ - "\u0001\u008d\u0001\u008e\u0001\u008e\u0001\u008e\u0001\u008e\u0001\u008f"+ - "\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u0090\u0001\u0090\u0001\u0090"+ - "\u0001\u0090\u0001\u0091\u0001\u0091\u0001\u0091\u0001\u0091\u0001\u0091"+ - "\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0093\u0001\u0093"+ - "\u0001\u0093\u0001\u0093\u0001\u0094\u0001\u0094\u0001\u0094\u0001\u0094"+ - "\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0096\u0001\u0096"+ - "\u0001\u0096\u0001\u0096\u0001\u0097\u0001\u0097\u0001\u0097\u0001\u0097"+ - "\u0001\u0098\u0001\u0098\u0001\u0098\u0001\u0098\u0001\u0099\u0001\u0099"+ - "\u0001\u0099\u0001\u0099\u0001\u009a\u0001\u009a\u0001\u009a\u0001\u009a"+ - "\u0001\u009a\u0001\u009b\u0001\u009b\u0001\u009b\u0001\u009b\u0001\u009b"+ - "\u0001\u009c\u0001\u009c\u0001\u009c\u0001\u009c\u0001\u009d\u0001\u009d"+ - "\u0001\u009d\u0001\u009d\u0001\u009e\u0001\u009e\u0001\u009e\u0001\u009e"+ - "\u0001\u009f\u0001\u009f\u0001\u009f\u0001\u009f\u0001\u009f\u0001\u00a0"+ - "\u0001\u00a0\u0001\u00a1\u0001\u00a1\u0001\u00a1\u0001\u00a1\u0001\u00a1"+ - "\u0004\u00a1\u0516\b\u00a1\u000b\u00a1\f\u00a1\u0517\u0001\u00a2\u0001"+ - "\u00a2\u0001\u00a2\u0001\u00a2\u0001\u00a3\u0001\u00a3\u0001\u00a3\u0001"+ - "\u00a3\u0001\u00a4\u0001\u00a4\u0001\u00a4\u0001\u00a4\u0001\u00a5\u0001"+ - "\u00a5\u0001\u00a5\u0001\u00a5\u0001\u00a5\u0001\u00a6\u0001\u00a6\u0001"+ - "\u00a6\u0001\u00a6\u0001\u00a7\u0001\u00a7\u0001\u00a7\u0001\u00a7\u0001"+ - "\u00a8\u0001\u00a8\u0001\u00a8\u0001\u00a8\u0001\u00a9\u0001\u00a9\u0001"+ - "\u00a9\u0001\u00a9\u0001\u00a9\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001"+ - "\u00aa\u0001\u00ab\u0001\u00ab\u0001\u00ab\u0001\u00ab\u0001\u00ac\u0001"+ - "\u00ac\u0001\u00ac\u0001\u00ac\u0001\u00ad\u0001\u00ad\u0001\u00ad\u0001"+ - "\u00ad\u0001\u00ae\u0001\u00ae\u0001\u00ae\u0001\u00ae\u0001\u00af\u0001"+ - "\u00af\u0001\u00af\u0001\u00af\u0001\u00af\u0001\u00af\u0001\u00b0\u0001"+ - "\u00b0\u0001\u00b0\u0001\u00b0\u0001\u00b1\u0001\u00b1\u0001\u00b1\u0001"+ - "\u00b1\u0001\u00b2\u0001\u00b2\u0001\u00b2\u0001\u00b2\u0001\u00b3\u0001"+ - "\u00b3\u0001\u00b3\u0001\u00b3\u0001\u00b4\u0001\u00b4\u0001\u00b4\u0001"+ - "\u00b4\u0001\u00b5\u0001\u00b5\u0001\u00b5\u0001\u00b5\u0001\u00b6\u0001"+ - "\u00b6\u0001\u00b6\u0001\u00b6\u0001\u00b6\u0001\u00b7\u0001\u00b7\u0001"+ - "\u00b7\u0001\u00b7\u0001\u00b7\u0001\u00b7\u0001\u00b8\u0001\u00b8\u0001"+ - "\u00b8\u0001\u00b8\u0001\u00b8\u0001\u00b8\u0001\u00b9\u0001\u00b9\u0001"+ - "\u00b9\u0001\u00b9\u0001\u00ba\u0001\u00ba\u0001\u00ba\u0001\u00ba\u0001"+ - "\u00bb\u0001\u00bb\u0001\u00bb\u0001\u00bb\u0001\u00bc\u0001\u00bc\u0001"+ - "\u00bc\u0001\u00bc\u0001\u00bc\u0001\u00bc\u0001\u00bd\u0001\u00bd\u0001"+ - "\u00bd\u0001\u00bd\u0001\u00bd\u0001\u00bd\u0001\u00be\u0001\u00be\u0001"+ - "\u00be\u0001\u00be\u0001\u00bf\u0001\u00bf\u0001\u00bf\u0001\u00bf\u0001"+ - "\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c1\u0001\u00c1\u0001"+ - "\u00c1\u0001\u00c1\u0001\u00c1\u0001\u00c1\u0001\u00c2\u0001\u00c2\u0001"+ - "\u00c2\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001\u00c3\u0001\u00c3\u0001"+ - "\u00c3\u0001\u00c3\u0001\u00c3\u0001\u00c3\u0001\u00c4\u0001\u00c4\u0001"+ - "\u00c4\u0001\u00c4\u0001\u00c4\u0002\u025d\u02a2\u0000\u00c5\u000f\u0001"+ - "\u0011\u0002\u0013\u0003\u0015\u0004\u0017\u0005\u0019\u0006\u001b\u0007"+ - "\u001d\b\u001f\t!\n#\u000b%\f\'\r)\u000e+\u000f-\u0010/\u00111\u00123"+ - "\u00135\u00147\u00159\u0016;\u0017=\u0018?\u0000A\u0000C\u0000E\u0000"+ + "H\u0001H\u0001H\u0001H\u0001I\u0001I\u0001I\u0001I\u0001I\u0001J\u0001"+ + "J\u0001J\u0003J\u0368\bJ\u0001J\u0005J\u036b\bJ\nJ\fJ\u036e\tJ\u0001J"+ + "\u0001J\u0004J\u0372\bJ\u000bJ\fJ\u0373\u0003J\u0376\bJ\u0001K\u0001K"+ + "\u0001K\u0001K\u0001K\u0001L\u0001L\u0001L\u0001L\u0001L\u0001M\u0001"+ + "M\u0005M\u0384\bM\nM\fM\u0387\tM\u0001M\u0001M\u0003M\u038b\bM\u0001M"+ + "\u0004M\u038e\bM\u000bM\fM\u038f\u0003M\u0392\bM\u0001N\u0001N\u0004N"+ + "\u0396\bN\u000bN\fN\u0397\u0001N\u0001N\u0001O\u0001O\u0001P\u0001P\u0001"+ + "P\u0001P\u0001Q\u0001Q\u0001Q\u0001Q\u0001R\u0001R\u0001R\u0001R\u0001"+ + "S\u0001S\u0001S\u0001S\u0001S\u0001T\u0001T\u0001T\u0001T\u0001T\u0001"+ + "U\u0001U\u0001U\u0001U\u0001V\u0001V\u0001V\u0001V\u0001W\u0001W\u0001"+ + "W\u0001W\u0001X\u0001X\u0001X\u0001X\u0001X\u0001Y\u0001Y\u0001Y\u0001"+ + "Y\u0001Z\u0001Z\u0001Z\u0001Z\u0001[\u0001[\u0001[\u0001[\u0001\\\u0001"+ + "\\\u0001\\\u0001\\\u0001]\u0001]\u0001]\u0001]\u0001^\u0001^\u0001^\u0001"+ + "^\u0001^\u0001^\u0001^\u0001^\u0001^\u0001_\u0001_\u0001_\u0003_\u03e5"+ + "\b_\u0001`\u0004`\u03e8\b`\u000b`\f`\u03e9\u0001a\u0001a\u0001a\u0001"+ + "a\u0001b\u0001b\u0001b\u0001b\u0001c\u0001c\u0001c\u0001c\u0001d\u0001"+ + "d\u0001d\u0001d\u0001e\u0001e\u0001e\u0001e\u0001f\u0001f\u0001f\u0001"+ + "f\u0001f\u0001g\u0001g\u0001g\u0001g\u0001h\u0001h\u0001h\u0001h\u0001"+ + "i\u0001i\u0001i\u0001i\u0001j\u0001j\u0001j\u0001j\u0001k\u0001k\u0001"+ + "k\u0001k\u0003k\u0419\bk\u0001l\u0001l\u0003l\u041d\bl\u0001l\u0005l\u0420"+ + "\bl\nl\fl\u0423\tl\u0001l\u0001l\u0003l\u0427\bl\u0001l\u0004l\u042a\b"+ + "l\u000bl\fl\u042b\u0003l\u042e\bl\u0001m\u0001m\u0004m\u0432\bm\u000b"+ + "m\fm\u0433\u0001n\u0001n\u0001n\u0001n\u0001o\u0001o\u0001o\u0001o\u0001"+ + "p\u0001p\u0001p\u0001p\u0001q\u0001q\u0001q\u0001q\u0001q\u0001r\u0001"+ + "r\u0001r\u0001r\u0001s\u0001s\u0001s\u0001s\u0001t\u0001t\u0001t\u0001"+ + "t\u0001u\u0001u\u0001u\u0001u\u0001v\u0001v\u0001v\u0001v\u0001w\u0001"+ + "w\u0001w\u0001x\u0001x\u0001x\u0001x\u0001y\u0001y\u0001y\u0001y\u0001"+ + "z\u0001z\u0001z\u0001z\u0001{\u0001{\u0001{\u0001{\u0001|\u0001|\u0001"+ + "|\u0001|\u0001|\u0001}\u0001}\u0001}\u0001}\u0001}\u0001~\u0001~\u0001"+ + "~\u0001~\u0001~\u0001\u007f\u0001\u007f\u0001\u007f\u0001\u007f\u0001"+ + "\u007f\u0001\u007f\u0001\u007f\u0001\u0080\u0001\u0080\u0001\u0081\u0004"+ + "\u0081\u0487\b\u0081\u000b\u0081\f\u0081\u0488\u0001\u0081\u0001\u0081"+ + "\u0003\u0081\u048d\b\u0081\u0001\u0081\u0004\u0081\u0490\b\u0081\u000b"+ + "\u0081\f\u0081\u0491\u0001\u0082\u0001\u0082\u0001\u0082\u0001\u0082\u0001"+ + "\u0083\u0001\u0083\u0001\u0083\u0001\u0083\u0001\u0084\u0001\u0084\u0001"+ + "\u0084\u0001\u0084\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0085\u0001"+ + "\u0086\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0086\u0001"+ + "\u0087\u0001\u0087\u0001\u0087\u0001\u0087\u0001\u0088\u0001\u0088\u0001"+ + "\u0088\u0001\u0088\u0001\u0089\u0001\u0089\u0001\u0089\u0001\u0089\u0001"+ + "\u008a\u0001\u008a\u0001\u008a\u0001\u008a\u0001\u008b\u0001\u008b\u0001"+ + "\u008b\u0001\u008b\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008c\u0001"+ + "\u008d\u0001\u008d\u0001\u008d\u0001\u008d\u0001\u008e\u0001\u008e\u0001"+ + "\u008e\u0001\u008e\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u008f\u0001"+ + "\u0090\u0001\u0090\u0001\u0090\u0001\u0090\u0001\u0091\u0001\u0091\u0001"+ + "\u0091\u0001\u0091\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0092\u0001"+ + "\u0092\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0094\u0001"+ + "\u0094\u0001\u0094\u0001\u0094\u0001\u0095\u0001\u0095\u0001\u0095\u0001"+ + "\u0095\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0097\u0001"+ + "\u0097\u0001\u0097\u0001\u0097\u0001\u0098\u0001\u0098\u0001\u0098\u0001"+ + "\u0098\u0001\u0099\u0001\u0099\u0001\u0099\u0001\u0099\u0001\u009a\u0001"+ + "\u009a\u0001\u009a\u0001\u009a\u0001\u009b\u0001\u009b\u0001\u009b\u0001"+ + "\u009b\u0001\u009b\u0001\u009c\u0001\u009c\u0001\u009c\u0001\u009c\u0001"+ + "\u009c\u0001\u009d\u0001\u009d\u0001\u009d\u0001\u009d\u0001\u009e\u0001"+ + "\u009e\u0001\u009e\u0001\u009e\u0001\u009f\u0001\u009f\u0001\u009f\u0001"+ + "\u009f\u0001\u00a0\u0001\u00a0\u0001\u00a0\u0001\u00a0\u0001\u00a0\u0001"+ + "\u00a1\u0001\u00a1\u0001\u00a2\u0001\u00a2\u0001\u00a2\u0001\u00a2\u0001"+ + "\u00a2\u0004\u00a2\u051d\b\u00a2\u000b\u00a2\f\u00a2\u051e\u0001\u00a3"+ + "\u0001\u00a3\u0001\u00a3\u0001\u00a3\u0001\u00a4\u0001\u00a4\u0001\u00a4"+ + "\u0001\u00a4\u0001\u00a5\u0001\u00a5\u0001\u00a5\u0001\u00a5\u0001\u00a6"+ + "\u0001\u00a6\u0001\u00a6\u0001\u00a6\u0001\u00a6\u0001\u00a7\u0001\u00a7"+ + "\u0001\u00a7\u0001\u00a7\u0001\u00a8\u0001\u00a8\u0001\u00a8\u0001\u00a8"+ + "\u0001\u00a9\u0001\u00a9\u0001\u00a9\u0001\u00a9\u0001\u00aa\u0001\u00aa"+ + "\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001\u00ab\u0001\u00ab\u0001\u00ab"+ + "\u0001\u00ab\u0001\u00ac\u0001\u00ac\u0001\u00ac\u0001\u00ac\u0001\u00ad"+ + "\u0001\u00ad\u0001\u00ad\u0001\u00ad\u0001\u00ae\u0001\u00ae\u0001\u00ae"+ + "\u0001\u00ae\u0001\u00af\u0001\u00af\u0001\u00af\u0001\u00af\u0001\u00b0"+ + "\u0001\u00b0\u0001\u00b0\u0001\u00b0\u0001\u00b0\u0001\u00b0\u0001\u00b1"+ + "\u0001\u00b1\u0001\u00b1\u0001\u00b1\u0001\u00b2\u0001\u00b2\u0001\u00b2"+ + "\u0001\u00b2\u0001\u00b3\u0001\u00b3\u0001\u00b3\u0001\u00b3\u0001\u00b4"+ + "\u0001\u00b4\u0001\u00b4\u0001\u00b4\u0001\u00b5\u0001\u00b5\u0001\u00b5"+ + "\u0001\u00b5\u0001\u00b6\u0001\u00b6\u0001\u00b6\u0001\u00b6\u0001\u00b7"+ + "\u0001\u00b7\u0001\u00b7\u0001\u00b7\u0001\u00b7\u0001\u00b8\u0001\u00b8"+ + "\u0001\u00b8\u0001\u00b8\u0001\u00b8\u0001\u00b8\u0001\u00b9\u0001\u00b9"+ + "\u0001\u00b9\u0001\u00b9\u0001\u00b9\u0001\u00b9\u0001\u00ba\u0001\u00ba"+ + "\u0001\u00ba\u0001\u00ba\u0001\u00bb\u0001\u00bb\u0001\u00bb\u0001\u00bb"+ + "\u0001\u00bc\u0001\u00bc\u0001\u00bc\u0001\u00bc\u0001\u00bd\u0001\u00bd"+ + "\u0001\u00bd\u0001\u00bd\u0001\u00bd\u0001\u00bd\u0001\u00be\u0001\u00be"+ + "\u0001\u00be\u0001\u00be\u0001\u00be\u0001\u00be\u0001\u00bf\u0001\u00bf"+ + "\u0001\u00bf\u0001\u00bf\u0001\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c0"+ + "\u0001\u00c1\u0001\u00c1\u0001\u00c1\u0001\u00c1\u0001\u00c2\u0001\u00c2"+ + "\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001\u00c3\u0001\u00c3"+ + "\u0001\u00c3\u0001\u00c3\u0001\u00c3\u0001\u00c3\u0001\u00c4\u0001\u00c4"+ + "\u0001\u00c4\u0001\u00c4\u0001\u00c4\u0001\u00c4\u0001\u00c5\u0001\u00c5"+ + "\u0001\u00c5\u0001\u00c5\u0001\u00c5\u0002\u025f\u02a4\u0000\u00c6\u000f"+ + "\u0001\u0011\u0002\u0013\u0003\u0015\u0004\u0017\u0005\u0019\u0006\u001b"+ + "\u0007\u001d\b\u001f\t!\n#\u000b%\f\'\r)\u000e+\u000f-\u0010/\u00111\u0012"+ + "3\u00135\u00147\u00159\u0016;\u0017=\u0018?\u0000A\u0000C\u0000E\u0000"+ "G\u0000I\u0000K\u0000M\u0000O\u0000Q\u0000S\u0019U\u001aW\u001bY\u001c"+ "[\u001d]\u001e_\u001fa c!e\"g#i$k%m&o\'q(s)u*w+y,{-}.\u007f/\u00810\u0083"+ "1\u00852\u00873\u00894\u008b5\u008d6\u008f7\u00918\u00939\u0095:\u0097"+ - ";\u0099<\u009b=\u009d>\u009f?\u00a1@\u00a3A\u00a5B\u00a7C\u00a9\u0000"+ - "\u00abD\u00adE\u00afF\u00b1G\u00b3\u0000\u00b5\u0000\u00b7H\u00b9I\u00bb"+ - "J\u00bd\u0000\u00bf\u0000\u00c1\u0000\u00c3\u0000\u00c5\u0000\u00c7\u0000"+ - "\u00c9K\u00cb\u0000\u00cdL\u00cf\u0000\u00d1\u0000\u00d3M\u00d5N\u00d7"+ - "O\u00d9\u0000\u00db\u0000\u00dd\u0000\u00df\u0000\u00e1\u0000\u00e3\u0000"+ - "\u00e5\u0000\u00e7P\u00e9Q\u00ebR\u00edS\u00ef\u0000\u00f1\u0000\u00f3"+ - "\u0000\u00f5\u0000\u00f7\u0000\u00f9\u0000\u00fbT\u00fd\u0000\u00ffU\u0101"+ - "V\u0103W\u0105\u0000\u0107\u0000\u0109X\u010bY\u010d\u0000\u010fZ\u0111"+ - "\u0000\u0113[\u0115\\\u0117]\u0119\u0000\u011b\u0000\u011d\u0000\u011f"+ - "\u0000\u0121\u0000\u0123\u0000\u0125\u0000\u0127\u0000\u0129\u0000\u012b"+ - "^\u012d_\u012f`\u0131\u0000\u0133\u0000\u0135\u0000\u0137\u0000\u0139"+ - "\u0000\u013b\u0000\u013da\u013fb\u0141c\u0143\u0000\u0145d\u0147e\u0149"+ - "f\u014bg\u014d\u0000\u014fh\u0151i\u0153j\u0155k\u0157l\u0159\u0000\u015b"+ - "\u0000\u015d\u0000\u015f\u0000\u0161\u0000\u0163\u0000\u0165\u0000\u0167"+ - "m\u0169n\u016bo\u016d\u0000\u016f\u0000\u0171\u0000\u0173\u0000\u0175"+ - "p\u0177q\u0179r\u017b\u0000\u017d\u0000\u017f\u0000\u0181s\u0183t\u0185"+ - "u\u0187\u0000\u0189\u0000\u018bv\u018dw\u018fx\u0191\u0000\u0193\u0000"+ - "\u0195\u0000\u0197\u0000\u000f\u0000\u0001\u0002\u0003\u0004\u0005\u0006"+ - "\u0007\b\t\n\u000b\f\r\u000e#\u0002\u0000DDdd\u0002\u0000IIii\u0002\u0000"+ - "SSss\u0002\u0000EEee\u0002\u0000CCcc\u0002\u0000TTtt\u0002\u0000RRrr\u0002"+ - "\u0000OOoo\u0002\u0000PPpp\u0002\u0000NNnn\u0002\u0000HHhh\u0002\u0000"+ - "VVvv\u0002\u0000AAaa\u0002\u0000LLll\u0002\u0000XXxx\u0002\u0000FFff\u0002"+ - "\u0000MMmm\u0002\u0000GGgg\u0002\u0000KKkk\u0002\u0000WWww\u0002\u0000"+ - "UUuu\u0006\u0000\t\n\r\r //[[]]\u0002\u0000\n\n\r\r\u0003\u0000\t\n\r"+ - "\r \u0001\u000009\u0002\u0000AZaz\b\u0000\"\"NNRRTT\\\\nnrrtt\u0004\u0000"+ - "\n\n\r\r\"\"\\\\\u0002\u0000++--\u0001\u0000``\u0002\u0000BBbb\u0002\u0000"+ - "YYyy\u000b\u0000\t\n\r\r \"\",,//::==[[]]||\u0002\u0000**//\u000b\u0000"+ - "\t\n\r\r \"#,,//::<<>?\\\\||\u05d5\u0000\u000f\u0001\u0000\u0000\u0000"+ - "\u0000\u0011\u0001\u0000\u0000\u0000\u0000\u0013\u0001\u0000\u0000\u0000"+ - "\u0000\u0015\u0001\u0000\u0000\u0000\u0000\u0017\u0001\u0000\u0000\u0000"+ - "\u0000\u0019\u0001\u0000\u0000\u0000\u0000\u001b\u0001\u0000\u0000\u0000"+ - "\u0000\u001d\u0001\u0000\u0000\u0000\u0000\u001f\u0001\u0000\u0000\u0000"+ - "\u0000!\u0001\u0000\u0000\u0000\u0000#\u0001\u0000\u0000\u0000\u0000%"+ - "\u0001\u0000\u0000\u0000\u0000\'\u0001\u0000\u0000\u0000\u0000)\u0001"+ - "\u0000\u0000\u0000\u0000+\u0001\u0000\u0000\u0000\u0000-\u0001\u0000\u0000"+ - "\u0000\u0000/\u0001\u0000\u0000\u0000\u00001\u0001\u0000\u0000\u0000\u0000"+ - "3\u0001\u0000\u0000\u0000\u00005\u0001\u0000\u0000\u0000\u00007\u0001"+ - "\u0000\u0000\u0000\u00009\u0001\u0000\u0000\u0000\u0000;\u0001\u0000\u0000"+ - "\u0000\u0001=\u0001\u0000\u0000\u0000\u0001S\u0001\u0000\u0000\u0000\u0001"+ - "U\u0001\u0000\u0000\u0000\u0001W\u0001\u0000\u0000\u0000\u0001Y\u0001"+ - "\u0000\u0000\u0000\u0001[\u0001\u0000\u0000\u0000\u0001]\u0001\u0000\u0000"+ - "\u0000\u0001_\u0001\u0000\u0000\u0000\u0001a\u0001\u0000\u0000\u0000\u0001"+ - "c\u0001\u0000\u0000\u0000\u0001e\u0001\u0000\u0000\u0000\u0001g\u0001"+ - "\u0000\u0000\u0000\u0001i\u0001\u0000\u0000\u0000\u0001k\u0001\u0000\u0000"+ - "\u0000\u0001m\u0001\u0000\u0000\u0000\u0001o\u0001\u0000\u0000\u0000\u0001"+ - "q\u0001\u0000\u0000\u0000\u0001s\u0001\u0000\u0000\u0000\u0001u\u0001"+ - "\u0000\u0000\u0000\u0001w\u0001\u0000\u0000\u0000\u0001y\u0001\u0000\u0000"+ - "\u0000\u0001{\u0001\u0000\u0000\u0000\u0001}\u0001\u0000\u0000\u0000\u0001"+ - "\u007f\u0001\u0000\u0000\u0000\u0001\u0081\u0001\u0000\u0000\u0000\u0001"+ - "\u0083\u0001\u0000\u0000\u0000\u0001\u0085\u0001\u0000\u0000\u0000\u0001"+ - "\u0087\u0001\u0000\u0000\u0000\u0001\u0089\u0001\u0000\u0000\u0000\u0001"+ - "\u008b\u0001\u0000\u0000\u0000\u0001\u008d\u0001\u0000\u0000\u0000\u0001"+ - "\u008f\u0001\u0000\u0000\u0000\u0001\u0091\u0001\u0000\u0000\u0000\u0001"+ - "\u0093\u0001\u0000\u0000\u0000\u0001\u0095\u0001\u0000\u0000\u0000\u0001"+ - "\u0097\u0001\u0000\u0000\u0000\u0001\u0099\u0001\u0000\u0000\u0000\u0001"+ - "\u009b\u0001\u0000\u0000\u0000\u0001\u009d\u0001\u0000\u0000\u0000\u0001"+ - "\u009f\u0001\u0000\u0000\u0000\u0001\u00a1\u0001\u0000\u0000\u0000\u0001"+ - "\u00a3\u0001\u0000\u0000\u0000\u0001\u00a5\u0001\u0000\u0000\u0000\u0001"+ - "\u00a7\u0001\u0000\u0000\u0000\u0001\u00ab\u0001\u0000\u0000\u0000\u0001"+ - "\u00ad\u0001\u0000\u0000\u0000\u0001\u00af\u0001\u0000\u0000\u0000\u0001"+ - "\u00b1\u0001\u0000\u0000\u0000\u0002\u00b3\u0001\u0000\u0000\u0000\u0002"+ - "\u00b5\u0001\u0000\u0000\u0000\u0002\u00b7\u0001\u0000\u0000\u0000\u0002"+ - "\u00b9\u0001\u0000\u0000\u0000\u0002\u00bb\u0001\u0000\u0000\u0000\u0003"+ - "\u00bd\u0001\u0000\u0000\u0000\u0003\u00bf\u0001\u0000\u0000\u0000\u0003"+ - "\u00c1\u0001\u0000\u0000\u0000\u0003\u00c3\u0001\u0000\u0000\u0000\u0003"+ - "\u00c5\u0001\u0000\u0000\u0000\u0003\u00c7\u0001\u0000\u0000\u0000\u0003"+ - "\u00c9\u0001\u0000\u0000\u0000\u0003\u00cd\u0001\u0000\u0000\u0000\u0003"+ - "\u00cf\u0001\u0000\u0000\u0000\u0003\u00d1\u0001\u0000\u0000\u0000\u0003"+ - "\u00d3\u0001\u0000\u0000\u0000\u0003\u00d5\u0001\u0000\u0000\u0000\u0003"+ - "\u00d7\u0001\u0000\u0000\u0000\u0004\u00d9\u0001\u0000\u0000\u0000\u0004"+ - "\u00db\u0001\u0000\u0000\u0000\u0004\u00dd\u0001\u0000\u0000\u0000\u0004"+ - "\u00df\u0001\u0000\u0000\u0000\u0004\u00e1\u0001\u0000\u0000\u0000\u0004"+ - "\u00e7\u0001\u0000\u0000\u0000\u0004\u00e9\u0001\u0000\u0000\u0000\u0004"+ - "\u00eb\u0001\u0000\u0000\u0000\u0004\u00ed\u0001\u0000\u0000\u0000\u0005"+ - "\u00ef\u0001\u0000\u0000\u0000\u0005\u00f1\u0001\u0000\u0000\u0000\u0005"+ - "\u00f3\u0001\u0000\u0000\u0000\u0005\u00f5\u0001\u0000\u0000\u0000\u0005"+ - "\u00f7\u0001\u0000\u0000\u0000\u0005\u00f9\u0001\u0000\u0000\u0000\u0005"+ - "\u00fb\u0001\u0000\u0000\u0000\u0005\u00fd\u0001\u0000\u0000\u0000\u0005"+ - "\u00ff\u0001\u0000\u0000\u0000\u0005\u0101\u0001\u0000\u0000\u0000\u0005"+ - "\u0103\u0001\u0000\u0000\u0000\u0006\u0105\u0001\u0000\u0000\u0000\u0006"+ - "\u0107\u0001\u0000\u0000\u0000\u0006\u0109\u0001\u0000\u0000\u0000\u0006"+ - "\u010b\u0001\u0000\u0000\u0000\u0006\u010f\u0001\u0000\u0000\u0000\u0006"+ - "\u0111\u0001\u0000\u0000\u0000\u0006\u0113\u0001\u0000\u0000\u0000\u0006"+ - "\u0115\u0001\u0000\u0000\u0000\u0006\u0117\u0001\u0000\u0000\u0000\u0007"+ - "\u0119\u0001\u0000\u0000\u0000\u0007\u011b\u0001\u0000\u0000\u0000\u0007"+ - "\u011d\u0001\u0000\u0000\u0000\u0007\u011f\u0001\u0000\u0000\u0000\u0007"+ - "\u0121\u0001\u0000\u0000\u0000\u0007\u0123\u0001\u0000\u0000\u0000\u0007"+ - "\u0125\u0001\u0000\u0000\u0000\u0007\u0127\u0001\u0000\u0000\u0000\u0007"+ - "\u0129\u0001\u0000\u0000\u0000\u0007\u012b\u0001\u0000\u0000\u0000\u0007"+ - "\u012d\u0001\u0000\u0000\u0000\u0007\u012f\u0001\u0000\u0000\u0000\b\u0131"+ - "\u0001\u0000\u0000\u0000\b\u0133\u0001\u0000\u0000\u0000\b\u0135\u0001"+ - "\u0000\u0000\u0000\b\u0137\u0001\u0000\u0000\u0000\b\u0139\u0001\u0000"+ - "\u0000\u0000\b\u013b\u0001\u0000\u0000\u0000\b\u013d\u0001\u0000\u0000"+ - "\u0000\b\u013f\u0001\u0000\u0000\u0000\b\u0141\u0001\u0000\u0000\u0000"+ - "\t\u0143\u0001\u0000\u0000\u0000\t\u0145\u0001\u0000\u0000\u0000\t\u0147"+ - "\u0001\u0000\u0000\u0000\t\u0149\u0001\u0000\u0000\u0000\t\u014b\u0001"+ - "\u0000\u0000\u0000\n\u014d\u0001\u0000\u0000\u0000\n\u014f\u0001\u0000"+ - "\u0000\u0000\n\u0151\u0001\u0000\u0000\u0000\n\u0153\u0001\u0000\u0000"+ - "\u0000\n\u0155\u0001\u0000\u0000\u0000\n\u0157\u0001\u0000\u0000\u0000"+ - "\u000b\u0159\u0001\u0000\u0000\u0000\u000b\u015b\u0001\u0000\u0000\u0000"+ - "\u000b\u015d\u0001\u0000\u0000\u0000\u000b\u015f\u0001\u0000\u0000\u0000"+ - "\u000b\u0161\u0001\u0000\u0000\u0000\u000b\u0163\u0001\u0000\u0000\u0000"+ - "\u000b\u0165\u0001\u0000\u0000\u0000\u000b\u0167\u0001\u0000\u0000\u0000"+ - "\u000b\u0169\u0001\u0000\u0000\u0000\u000b\u016b\u0001\u0000\u0000\u0000"+ - "\f\u016d\u0001\u0000\u0000\u0000\f\u016f\u0001\u0000\u0000\u0000\f\u0171"+ - "\u0001\u0000\u0000\u0000\f\u0173\u0001\u0000\u0000\u0000\f\u0175\u0001"+ - "\u0000\u0000\u0000\f\u0177\u0001\u0000\u0000\u0000\f\u0179\u0001\u0000"+ - "\u0000\u0000\r\u017b\u0001\u0000\u0000\u0000\r\u017d\u0001\u0000\u0000"+ - "\u0000\r\u017f\u0001\u0000\u0000\u0000\r\u0181\u0001\u0000\u0000\u0000"+ - "\r\u0183\u0001\u0000\u0000\u0000\r\u0185\u0001\u0000\u0000\u0000\u000e"+ - "\u0187\u0001\u0000\u0000\u0000\u000e\u0189\u0001\u0000\u0000\u0000\u000e"+ - "\u018b\u0001\u0000\u0000\u0000\u000e\u018d\u0001\u0000\u0000\u0000\u000e"+ - "\u018f\u0001\u0000\u0000\u0000\u000e\u0191\u0001\u0000\u0000\u0000\u000e"+ - "\u0193\u0001\u0000\u0000\u0000\u000e\u0195\u0001\u0000\u0000\u0000\u000e"+ - "\u0197\u0001\u0000\u0000\u0000\u000f\u0199\u0001\u0000\u0000\u0000\u0011"+ - "\u01a3\u0001\u0000\u0000\u0000\u0013\u01aa\u0001\u0000\u0000\u0000\u0015"+ - "\u01b3\u0001\u0000\u0000\u0000\u0017\u01ba\u0001\u0000\u0000\u0000\u0019"+ - "\u01c4\u0001\u0000\u0000\u0000\u001b\u01cb\u0001\u0000\u0000\u0000\u001d"+ - "\u01d2\u0001\u0000\u0000\u0000\u001f\u01d9\u0001\u0000\u0000\u0000!\u01e1"+ - "\u0001\u0000\u0000\u0000#\u01ed\u0001\u0000\u0000\u0000%\u01f6\u0001\u0000"+ - "\u0000\u0000\'\u01fc\u0001\u0000\u0000\u0000)\u0203\u0001\u0000\u0000"+ - "\u0000+\u020a\u0001\u0000\u0000\u0000-\u0212\u0001\u0000\u0000\u0000/"+ - "\u021a\u0001\u0000\u0000\u00001\u0229\u0001\u0000\u0000\u00003\u0233\u0001"+ - "\u0000\u0000\u00005\u023f\u0001\u0000\u0000\u00007\u0245\u0001\u0000\u0000"+ - "\u00009\u0256\u0001\u0000\u0000\u0000;\u0266\u0001\u0000\u0000\u0000="+ - "\u026c\u0001\u0000\u0000\u0000?\u0270\u0001\u0000\u0000\u0000A\u0272\u0001"+ - "\u0000\u0000\u0000C\u0274\u0001\u0000\u0000\u0000E\u0277\u0001\u0000\u0000"+ - "\u0000G\u0279\u0001\u0000\u0000\u0000I\u0282\u0001\u0000\u0000\u0000K"+ - "\u0284\u0001\u0000\u0000\u0000M\u0289\u0001\u0000\u0000\u0000O\u028b\u0001"+ - "\u0000\u0000\u0000Q\u0290\u0001\u0000\u0000\u0000S\u02af\u0001\u0000\u0000"+ - "\u0000U\u02b2\u0001\u0000\u0000\u0000W\u02e0\u0001\u0000\u0000\u0000Y"+ - "\u02e2\u0001\u0000\u0000\u0000[\u02e5\u0001\u0000\u0000\u0000]\u02e9\u0001"+ - "\u0000\u0000\u0000_\u02ed\u0001\u0000\u0000\u0000a\u02ef\u0001\u0000\u0000"+ - "\u0000c\u02f2\u0001\u0000\u0000\u0000e\u02f4\u0001\u0000\u0000\u0000g"+ - "\u02f9\u0001\u0000\u0000\u0000i\u02fb\u0001\u0000\u0000\u0000k\u0301\u0001"+ - "\u0000\u0000\u0000m\u0307\u0001\u0000\u0000\u0000o\u030a\u0001\u0000\u0000"+ - "\u0000q\u030d\u0001\u0000\u0000\u0000s\u0312\u0001\u0000\u0000\u0000u"+ - "\u0317\u0001\u0000\u0000\u0000w\u0319\u0001\u0000\u0000\u0000y\u031d\u0001"+ - "\u0000\u0000\u0000{\u0322\u0001\u0000\u0000\u0000}\u0328\u0001\u0000\u0000"+ - "\u0000\u007f\u032b\u0001\u0000\u0000\u0000\u0081\u032d\u0001\u0000\u0000"+ - "\u0000\u0083\u0333\u0001\u0000\u0000\u0000\u0085\u0335\u0001\u0000\u0000"+ - "\u0000\u0087\u033a\u0001\u0000\u0000\u0000\u0089\u033d\u0001\u0000\u0000"+ - "\u0000\u008b\u0340\u0001\u0000\u0000\u0000\u008d\u0343\u0001\u0000\u0000"+ - "\u0000\u008f\u0345\u0001\u0000\u0000\u0000\u0091\u0348\u0001\u0000\u0000"+ - "\u0000\u0093\u034a\u0001\u0000\u0000\u0000\u0095\u034d\u0001\u0000\u0000"+ - "\u0000\u0097\u034f\u0001\u0000\u0000\u0000\u0099\u0351\u0001\u0000\u0000"+ - "\u0000\u009b\u0353\u0001\u0000\u0000\u0000\u009d\u0355\u0001\u0000\u0000"+ - "\u0000\u009f\u0357\u0001\u0000\u0000\u0000\u00a1\u036e\u0001\u0000\u0000"+ - "\u0000\u00a3\u0370\u0001\u0000\u0000\u0000\u00a5\u0375\u0001\u0000\u0000"+ - "\u0000\u00a7\u038a\u0001\u0000\u0000\u0000\u00a9\u038c\u0001\u0000\u0000"+ - "\u0000\u00ab\u0394\u0001\u0000\u0000\u0000\u00ad\u0396\u0001\u0000\u0000"+ - "\u0000\u00af\u039a\u0001\u0000\u0000\u0000\u00b1\u039e\u0001\u0000\u0000"+ - "\u0000\u00b3\u03a2\u0001\u0000\u0000\u0000\u00b5\u03a7\u0001\u0000\u0000"+ - "\u0000\u00b7\u03ac\u0001\u0000\u0000\u0000\u00b9\u03b0\u0001\u0000\u0000"+ - "\u0000\u00bb\u03b4\u0001\u0000\u0000\u0000\u00bd\u03b8\u0001\u0000\u0000"+ - "\u0000\u00bf\u03bd\u0001\u0000\u0000\u0000\u00c1\u03c1\u0001\u0000\u0000"+ - "\u0000\u00c3\u03c5\u0001\u0000\u0000\u0000\u00c5\u03c9\u0001\u0000\u0000"+ - "\u0000\u00c7\u03cd\u0001\u0000\u0000\u0000\u00c9\u03d1\u0001\u0000\u0000"+ - "\u0000\u00cb\u03dd\u0001\u0000\u0000\u0000\u00cd\u03e0\u0001\u0000\u0000"+ - "\u0000\u00cf\u03e4\u0001\u0000\u0000\u0000\u00d1\u03e8\u0001\u0000\u0000"+ - "\u0000\u00d3\u03ec\u0001\u0000\u0000\u0000\u00d5\u03f0\u0001\u0000\u0000"+ - "\u0000\u00d7\u03f4\u0001\u0000\u0000\u0000\u00d9\u03f8\u0001\u0000\u0000"+ - "\u0000\u00db\u03fd\u0001\u0000\u0000\u0000\u00dd\u0401\u0001\u0000\u0000"+ - "\u0000\u00df\u0405\u0001\u0000\u0000\u0000\u00e1\u0409\u0001\u0000\u0000"+ - "\u0000\u00e3\u0411\u0001\u0000\u0000\u0000\u00e5\u0426\u0001\u0000\u0000"+ - "\u0000\u00e7\u042a\u0001\u0000\u0000\u0000\u00e9\u042e\u0001\u0000\u0000"+ - "\u0000\u00eb\u0432\u0001\u0000\u0000\u0000\u00ed\u0436\u0001\u0000\u0000"+ - "\u0000\u00ef\u043a\u0001\u0000\u0000\u0000\u00f1\u043f\u0001\u0000\u0000"+ - "\u0000\u00f3\u0443\u0001\u0000\u0000\u0000\u00f5\u0447\u0001\u0000\u0000"+ - "\u0000\u00f7\u044b\u0001\u0000\u0000\u0000\u00f9\u044f\u0001\u0000\u0000"+ - "\u0000\u00fb\u0453\u0001\u0000\u0000\u0000\u00fd\u0456\u0001\u0000\u0000"+ - "\u0000\u00ff\u045a\u0001\u0000\u0000\u0000\u0101\u045e\u0001\u0000\u0000"+ - "\u0000\u0103\u0462\u0001\u0000\u0000\u0000\u0105\u0466\u0001\u0000\u0000"+ - "\u0000\u0107\u046b\u0001\u0000\u0000\u0000\u0109\u0470\u0001\u0000\u0000"+ - "\u0000\u010b\u0475\u0001\u0000\u0000\u0000\u010d\u047c\u0001\u0000\u0000"+ - "\u0000\u010f\u0485\u0001\u0000\u0000\u0000\u0111\u048c\u0001\u0000\u0000"+ - "\u0000\u0113\u0490\u0001\u0000\u0000\u0000\u0115\u0494\u0001\u0000\u0000"+ - "\u0000\u0117\u0498\u0001\u0000\u0000\u0000\u0119\u049c\u0001\u0000\u0000"+ - "\u0000\u011b\u04a2\u0001\u0000\u0000\u0000\u011d\u04a6\u0001\u0000\u0000"+ - "\u0000\u011f\u04aa\u0001\u0000\u0000\u0000\u0121\u04ae\u0001\u0000\u0000"+ - "\u0000\u0123\u04b2\u0001\u0000\u0000\u0000\u0125\u04b6\u0001\u0000\u0000"+ - "\u0000\u0127\u04ba\u0001\u0000\u0000\u0000\u0129\u04be\u0001\u0000\u0000"+ - "\u0000\u012b\u04c2\u0001\u0000\u0000\u0000\u012d\u04c6\u0001\u0000\u0000"+ - "\u0000\u012f\u04ca\u0001\u0000\u0000\u0000\u0131\u04ce\u0001\u0000\u0000"+ - "\u0000\u0133\u04d3\u0001\u0000\u0000\u0000\u0135\u04d7\u0001\u0000\u0000"+ - "\u0000\u0137\u04db\u0001\u0000\u0000\u0000\u0139\u04df\u0001\u0000\u0000"+ - "\u0000\u013b\u04e3\u0001\u0000\u0000\u0000\u013d\u04e7\u0001\u0000\u0000"+ - "\u0000\u013f\u04eb\u0001\u0000\u0000\u0000\u0141\u04ef\u0001\u0000\u0000"+ - "\u0000\u0143\u04f3\u0001\u0000\u0000\u0000\u0145\u04f8\u0001\u0000\u0000"+ - "\u0000\u0147\u04fd\u0001\u0000\u0000\u0000\u0149\u0501\u0001\u0000\u0000"+ - "\u0000\u014b\u0505\u0001\u0000\u0000\u0000\u014d\u0509\u0001\u0000\u0000"+ - "\u0000\u014f\u050e\u0001\u0000\u0000\u0000\u0151\u0515\u0001\u0000\u0000"+ - "\u0000\u0153\u0519\u0001\u0000\u0000\u0000\u0155\u051d\u0001\u0000\u0000"+ - "\u0000\u0157\u0521\u0001\u0000\u0000\u0000\u0159\u0525\u0001\u0000\u0000"+ - "\u0000\u015b\u052a\u0001\u0000\u0000\u0000\u015d\u052e\u0001\u0000\u0000"+ - "\u0000\u015f\u0532\u0001\u0000\u0000\u0000\u0161\u0536\u0001\u0000\u0000"+ - "\u0000\u0163\u053b\u0001\u0000\u0000\u0000\u0165\u053f\u0001\u0000\u0000"+ - "\u0000\u0167\u0543\u0001\u0000\u0000\u0000\u0169\u0547\u0001\u0000\u0000"+ - "\u0000\u016b\u054b\u0001\u0000\u0000\u0000\u016d\u054f\u0001\u0000\u0000"+ - "\u0000\u016f\u0555\u0001\u0000\u0000\u0000\u0171\u0559\u0001\u0000\u0000"+ - "\u0000\u0173\u055d\u0001\u0000\u0000\u0000\u0175\u0561\u0001\u0000\u0000"+ - "\u0000\u0177\u0565\u0001\u0000\u0000\u0000\u0179\u0569\u0001\u0000\u0000"+ - "\u0000\u017b\u056d\u0001\u0000\u0000\u0000\u017d\u0572\u0001\u0000\u0000"+ - "\u0000\u017f\u0578\u0001\u0000\u0000\u0000\u0181\u057e\u0001\u0000\u0000"+ - "\u0000\u0183\u0582\u0001\u0000\u0000\u0000\u0185\u0586\u0001\u0000\u0000"+ - "\u0000\u0187\u058a\u0001\u0000\u0000\u0000\u0189\u0590\u0001\u0000\u0000"+ - "\u0000\u018b\u0596\u0001\u0000\u0000\u0000\u018d\u059a\u0001\u0000\u0000"+ - "\u0000\u018f\u059e\u0001\u0000\u0000\u0000\u0191\u05a2\u0001\u0000\u0000"+ - "\u0000\u0193\u05a8\u0001\u0000\u0000\u0000\u0195\u05ae\u0001\u0000\u0000"+ - "\u0000\u0197\u05b4\u0001\u0000\u0000\u0000\u0199\u019a\u0007\u0000\u0000"+ - "\u0000\u019a\u019b\u0007\u0001\u0000\u0000\u019b\u019c\u0007\u0002\u0000"+ - "\u0000\u019c\u019d\u0007\u0002\u0000\u0000\u019d\u019e\u0007\u0003\u0000"+ - "\u0000\u019e\u019f\u0007\u0004\u0000\u0000\u019f\u01a0\u0007\u0005\u0000"+ - "\u0000\u01a0\u01a1\u0001\u0000\u0000\u0000\u01a1\u01a2\u0006\u0000\u0000"+ - "\u0000\u01a2\u0010\u0001\u0000\u0000\u0000\u01a3\u01a4\u0007\u0000\u0000"+ - "\u0000\u01a4\u01a5\u0007\u0006\u0000\u0000\u01a5\u01a6\u0007\u0007\u0000"+ - "\u0000\u01a6\u01a7\u0007\b\u0000\u0000\u01a7\u01a8\u0001\u0000\u0000\u0000"+ - "\u01a8\u01a9\u0006\u0001\u0001\u0000\u01a9\u0012\u0001\u0000\u0000\u0000"+ - "\u01aa\u01ab\u0007\u0003\u0000\u0000\u01ab\u01ac\u0007\t\u0000\u0000\u01ac"+ - "\u01ad\u0007\u0006\u0000\u0000\u01ad\u01ae\u0007\u0001\u0000\u0000\u01ae"+ - "\u01af\u0007\u0004\u0000\u0000\u01af\u01b0\u0007\n\u0000\u0000\u01b0\u01b1"+ - "\u0001\u0000\u0000\u0000\u01b1\u01b2\u0006\u0002\u0002\u0000\u01b2\u0014"+ - "\u0001\u0000\u0000\u0000\u01b3\u01b4\u0007\u0003\u0000\u0000\u01b4\u01b5"+ - "\u0007\u000b\u0000\u0000\u01b5\u01b6\u0007\f\u0000\u0000\u01b6\u01b7\u0007"+ - "\r\u0000\u0000\u01b7\u01b8\u0001\u0000\u0000\u0000\u01b8\u01b9\u0006\u0003"+ - "\u0000\u0000\u01b9\u0016\u0001\u0000\u0000\u0000\u01ba\u01bb\u0007\u0003"+ - "\u0000\u0000\u01bb\u01bc\u0007\u000e\u0000\u0000\u01bc\u01bd\u0007\b\u0000"+ - "\u0000\u01bd\u01be\u0007\r\u0000\u0000\u01be\u01bf\u0007\f\u0000\u0000"+ - "\u01bf\u01c0\u0007\u0001\u0000\u0000\u01c0\u01c1\u0007\t\u0000\u0000\u01c1"+ - "\u01c2\u0001\u0000\u0000\u0000\u01c2\u01c3\u0006\u0004\u0003\u0000\u01c3"+ - "\u0018\u0001\u0000\u0000\u0000\u01c4\u01c5\u0007\u000f\u0000\u0000\u01c5"+ - "\u01c6\u0007\u0006\u0000\u0000\u01c6\u01c7\u0007\u0007\u0000\u0000\u01c7"+ - "\u01c8\u0007\u0010\u0000\u0000\u01c8\u01c9\u0001\u0000\u0000\u0000\u01c9"+ - "\u01ca\u0006\u0005\u0004\u0000\u01ca\u001a\u0001\u0000\u0000\u0000\u01cb"+ - "\u01cc\u0007\u0011\u0000\u0000\u01cc\u01cd\u0007\u0006\u0000\u0000\u01cd"+ - "\u01ce\u0007\u0007\u0000\u0000\u01ce\u01cf\u0007\u0012\u0000\u0000\u01cf"+ - "\u01d0\u0001\u0000\u0000\u0000\u01d0\u01d1\u0006\u0006\u0000\u0000\u01d1"+ - "\u001c\u0001\u0000\u0000\u0000\u01d2\u01d3\u0007\u0012\u0000\u0000\u01d3"+ - "\u01d4\u0007\u0003\u0000\u0000\u01d4\u01d5\u0007\u0003\u0000\u0000\u01d5"+ - "\u01d6\u0007\b\u0000\u0000\u01d6\u01d7\u0001\u0000\u0000\u0000\u01d7\u01d8"+ - "\u0006\u0007\u0001\u0000\u01d8\u001e\u0001\u0000\u0000\u0000\u01d9\u01da"+ - "\u0007\r\u0000\u0000\u01da\u01db\u0007\u0001\u0000\u0000\u01db\u01dc\u0007"+ - "\u0010\u0000\u0000\u01dc\u01dd\u0007\u0001\u0000\u0000\u01dd\u01de\u0007"+ - "\u0005\u0000\u0000\u01de\u01df\u0001\u0000\u0000\u0000\u01df\u01e0\u0006"+ - "\b\u0000\u0000\u01e0 \u0001\u0000\u0000\u0000\u01e1\u01e2\u0007\u0010"+ - "\u0000\u0000\u01e2\u01e3\u0007\u000b\u0000\u0000\u01e3\u01e4\u0005_\u0000"+ - "\u0000\u01e4\u01e5\u0007\u0003\u0000\u0000\u01e5\u01e6\u0007\u000e\u0000"+ - "\u0000\u01e6\u01e7\u0007\b\u0000\u0000\u01e7\u01e8\u0007\f\u0000\u0000"+ - "\u01e8\u01e9\u0007\t\u0000\u0000\u01e9\u01ea\u0007\u0000\u0000\u0000\u01ea"+ - "\u01eb\u0001\u0000\u0000\u0000\u01eb\u01ec\u0006\t\u0005\u0000\u01ec\""+ - "\u0001\u0000\u0000\u0000\u01ed\u01ee\u0007\u0006\u0000\u0000\u01ee\u01ef"+ - "\u0007\u0003\u0000\u0000\u01ef\u01f0\u0007\t\u0000\u0000\u01f0\u01f1\u0007"+ - "\f\u0000\u0000\u01f1\u01f2\u0007\u0010\u0000\u0000\u01f2\u01f3\u0007\u0003"+ - "\u0000\u0000\u01f3\u01f4\u0001\u0000\u0000\u0000\u01f4\u01f5\u0006\n\u0006"+ - "\u0000\u01f5$\u0001\u0000\u0000\u0000\u01f6\u01f7\u0007\u0006\u0000\u0000"+ - "\u01f7\u01f8\u0007\u0007\u0000\u0000\u01f8\u01f9\u0007\u0013\u0000\u0000"+ - "\u01f9\u01fa\u0001\u0000\u0000\u0000\u01fa\u01fb\u0006\u000b\u0000\u0000"+ - "\u01fb&\u0001\u0000\u0000\u0000\u01fc\u01fd\u0007\u0002\u0000\u0000\u01fd"+ - "\u01fe\u0007\n\u0000\u0000\u01fe\u01ff\u0007\u0007\u0000\u0000\u01ff\u0200"+ - "\u0007\u0013\u0000\u0000\u0200\u0201\u0001\u0000\u0000\u0000\u0201\u0202"+ - "\u0006\f\u0007\u0000\u0202(\u0001\u0000\u0000\u0000\u0203\u0204\u0007"+ - "\u0002\u0000\u0000\u0204\u0205\u0007\u0007\u0000\u0000\u0205\u0206\u0007"+ - "\u0006\u0000\u0000\u0206\u0207\u0007\u0005\u0000\u0000\u0207\u0208\u0001"+ - "\u0000\u0000\u0000\u0208\u0209\u0006\r\u0000\u0000\u0209*\u0001\u0000"+ - "\u0000\u0000\u020a\u020b\u0007\u0002\u0000\u0000\u020b\u020c\u0007\u0005"+ - "\u0000\u0000\u020c\u020d\u0007\f\u0000\u0000\u020d\u020e\u0007\u0005\u0000"+ - "\u0000\u020e\u020f\u0007\u0002\u0000\u0000\u020f\u0210\u0001\u0000\u0000"+ - "\u0000\u0210\u0211\u0006\u000e\u0000\u0000\u0211,\u0001\u0000\u0000\u0000"+ - "\u0212\u0213\u0007\u0013\u0000\u0000\u0213\u0214\u0007\n\u0000\u0000\u0214"+ - "\u0215\u0007\u0003\u0000\u0000\u0215\u0216\u0007\u0006\u0000\u0000\u0216"+ - "\u0217\u0007\u0003\u0000\u0000\u0217\u0218\u0001\u0000\u0000\u0000\u0218"+ - "\u0219\u0006\u000f\u0000\u0000\u0219.\u0001\u0000\u0000\u0000\u021a\u021b"+ - "\u0004\u0010\u0000\u0000\u021b\u021c\u0007\u0001\u0000\u0000\u021c\u021d"+ - "\u0007\t\u0000\u0000\u021d\u021e\u0007\r\u0000\u0000\u021e\u021f\u0007"+ - "\u0001\u0000\u0000\u021f\u0220\u0007\t\u0000\u0000\u0220\u0221\u0007\u0003"+ - "\u0000\u0000\u0221\u0222\u0007\u0002\u0000\u0000\u0222\u0223\u0007\u0005"+ - "\u0000\u0000\u0223\u0224\u0007\f\u0000\u0000\u0224\u0225\u0007\u0005\u0000"+ - "\u0000\u0225\u0226\u0007\u0002\u0000\u0000\u0226\u0227\u0001\u0000\u0000"+ - "\u0000\u0227\u0228\u0006\u0010\u0000\u0000\u02280\u0001\u0000\u0000\u0000"+ - "\u0229\u022a\u0004\u0011\u0001\u0000\u022a\u022b\u0007\r\u0000\u0000\u022b"+ - "\u022c\u0007\u0007\u0000\u0000\u022c\u022d\u0007\u0007\u0000\u0000\u022d"+ - "\u022e\u0007\u0012\u0000\u0000\u022e\u022f\u0007\u0014\u0000\u0000\u022f"+ - "\u0230\u0007\b\u0000\u0000\u0230\u0231\u0001\u0000\u0000\u0000\u0231\u0232"+ - "\u0006\u0011\b\u0000\u02322\u0001\u0000\u0000\u0000\u0233\u0234\u0004"+ - "\u0012\u0002\u0000\u0234\u0235\u0007\u0010\u0000\u0000\u0235\u0236\u0007"+ - "\u0003\u0000\u0000\u0236\u0237\u0007\u0005\u0000\u0000\u0237\u0238\u0007"+ - "\u0006\u0000\u0000\u0238\u0239\u0007\u0001\u0000\u0000\u0239\u023a\u0007"+ - "\u0004\u0000\u0000\u023a\u023b\u0007\u0002\u0000\u0000\u023b\u023c\u0001"+ - "\u0000\u0000\u0000\u023c\u023d\u0006\u0012\t\u0000\u023d4\u0001\u0000"+ - "\u0000\u0000\u023e\u0240\b\u0015\u0000\u0000\u023f\u023e\u0001\u0000\u0000"+ - "\u0000\u0240\u0241\u0001\u0000\u0000\u0000\u0241\u023f\u0001\u0000\u0000"+ - "\u0000\u0241\u0242\u0001\u0000\u0000\u0000\u0242\u0243\u0001\u0000\u0000"+ - "\u0000\u0243\u0244\u0006\u0013\u0000\u0000\u02446\u0001\u0000\u0000\u0000"+ - "\u0245\u0246\u0005/\u0000\u0000\u0246\u0247\u0005/\u0000\u0000\u0247\u024b"+ - "\u0001\u0000\u0000\u0000\u0248\u024a\b\u0016\u0000\u0000\u0249\u0248\u0001"+ - "\u0000\u0000\u0000\u024a\u024d\u0001\u0000\u0000\u0000\u024b\u0249\u0001"+ - "\u0000\u0000\u0000\u024b\u024c\u0001\u0000\u0000\u0000\u024c\u024f\u0001"+ - "\u0000\u0000\u0000\u024d\u024b\u0001\u0000\u0000\u0000\u024e\u0250\u0005"+ - "\r\u0000\u0000\u024f\u024e\u0001\u0000\u0000\u0000\u024f\u0250\u0001\u0000"+ - "\u0000\u0000\u0250\u0252\u0001\u0000\u0000\u0000\u0251\u0253\u0005\n\u0000"+ - "\u0000\u0252\u0251\u0001\u0000\u0000\u0000\u0252\u0253\u0001\u0000\u0000"+ - "\u0000\u0253\u0254\u0001\u0000\u0000\u0000\u0254\u0255\u0006\u0014\n\u0000"+ - "\u02558\u0001\u0000\u0000\u0000\u0256\u0257\u0005/\u0000\u0000\u0257\u0258"+ - "\u0005*\u0000\u0000\u0258\u025d\u0001\u0000\u0000\u0000\u0259\u025c\u0003"+ - "9\u0015\u0000\u025a\u025c\t\u0000\u0000\u0000\u025b\u0259\u0001\u0000"+ - "\u0000\u0000\u025b\u025a\u0001\u0000\u0000\u0000\u025c\u025f\u0001\u0000"+ - "\u0000\u0000\u025d\u025e\u0001\u0000\u0000\u0000\u025d\u025b\u0001\u0000"+ - "\u0000\u0000\u025e\u0260\u0001\u0000\u0000\u0000\u025f\u025d\u0001\u0000"+ - "\u0000\u0000\u0260\u0261\u0005*\u0000\u0000\u0261\u0262\u0005/\u0000\u0000"+ - "\u0262\u0263\u0001\u0000\u0000\u0000\u0263\u0264\u0006\u0015\n\u0000\u0264"+ - ":\u0001\u0000\u0000\u0000\u0265\u0267\u0007\u0017\u0000\u0000\u0266\u0265"+ - "\u0001\u0000\u0000\u0000\u0267\u0268\u0001\u0000\u0000\u0000\u0268\u0266"+ - "\u0001\u0000\u0000\u0000\u0268\u0269\u0001\u0000\u0000\u0000\u0269\u026a"+ - "\u0001\u0000\u0000\u0000\u026a\u026b\u0006\u0016\n\u0000\u026b<\u0001"+ - "\u0000\u0000\u0000\u026c\u026d\u0005|\u0000\u0000\u026d\u026e\u0001\u0000"+ - "\u0000\u0000\u026e\u026f\u0006\u0017\u000b\u0000\u026f>\u0001\u0000\u0000"+ - "\u0000\u0270\u0271\u0007\u0018\u0000\u0000\u0271@\u0001\u0000\u0000\u0000"+ - "\u0272\u0273\u0007\u0019\u0000\u0000\u0273B\u0001\u0000\u0000\u0000\u0274"+ - "\u0275\u0005\\\u0000\u0000\u0275\u0276\u0007\u001a\u0000\u0000\u0276D"+ - "\u0001\u0000\u0000\u0000\u0277\u0278\b\u001b\u0000\u0000\u0278F\u0001"+ - "\u0000\u0000\u0000\u0279\u027b\u0007\u0003\u0000\u0000\u027a\u027c\u0007"+ - "\u001c\u0000\u0000\u027b\u027a\u0001\u0000\u0000\u0000\u027b\u027c\u0001"+ - "\u0000\u0000\u0000\u027c\u027e\u0001\u0000\u0000\u0000\u027d\u027f\u0003"+ - "?\u0018\u0000\u027e\u027d\u0001\u0000\u0000\u0000\u027f\u0280\u0001\u0000"+ - "\u0000\u0000\u0280\u027e\u0001\u0000\u0000\u0000\u0280\u0281\u0001\u0000"+ - "\u0000\u0000\u0281H\u0001\u0000\u0000\u0000\u0282\u0283\u0005@\u0000\u0000"+ - "\u0283J\u0001\u0000\u0000\u0000\u0284\u0285\u0005`\u0000\u0000\u0285L"+ - "\u0001\u0000\u0000\u0000\u0286\u028a\b\u001d\u0000\u0000\u0287\u0288\u0005"+ - "`\u0000\u0000\u0288\u028a\u0005`\u0000\u0000\u0289\u0286\u0001\u0000\u0000"+ - "\u0000\u0289\u0287\u0001\u0000\u0000\u0000\u028aN\u0001\u0000\u0000\u0000"+ - "\u028b\u028c\u0005_\u0000\u0000\u028cP\u0001\u0000\u0000\u0000\u028d\u0291"+ - "\u0003A\u0019\u0000\u028e\u0291\u0003?\u0018\u0000\u028f\u0291\u0003O"+ - " \u0000\u0290\u028d\u0001\u0000\u0000\u0000\u0290\u028e\u0001\u0000\u0000"+ - "\u0000\u0290\u028f\u0001\u0000\u0000\u0000\u0291R\u0001\u0000\u0000\u0000"+ - "\u0292\u0297\u0005\"\u0000\u0000\u0293\u0296\u0003C\u001a\u0000\u0294"+ - "\u0296\u0003E\u001b\u0000\u0295\u0293\u0001\u0000\u0000\u0000\u0295\u0294"+ - "\u0001\u0000\u0000\u0000\u0296\u0299\u0001\u0000\u0000\u0000\u0297\u0295"+ - "\u0001\u0000\u0000\u0000\u0297\u0298\u0001\u0000\u0000\u0000\u0298\u029a"+ - "\u0001\u0000\u0000\u0000\u0299\u0297\u0001\u0000\u0000\u0000\u029a\u02b0"+ - "\u0005\"\u0000\u0000\u029b\u029c\u0005\"\u0000\u0000\u029c\u029d\u0005"+ - "\"\u0000\u0000\u029d\u029e\u0005\"\u0000\u0000\u029e\u02a2\u0001\u0000"+ - "\u0000\u0000\u029f\u02a1\b\u0016\u0000\u0000\u02a0\u029f\u0001\u0000\u0000"+ - "\u0000\u02a1\u02a4\u0001\u0000\u0000\u0000\u02a2\u02a3\u0001\u0000\u0000"+ - "\u0000\u02a2\u02a0\u0001\u0000\u0000\u0000\u02a3\u02a5\u0001\u0000\u0000"+ - "\u0000\u02a4\u02a2\u0001\u0000\u0000\u0000\u02a5\u02a6\u0005\"\u0000\u0000"+ - "\u02a6\u02a7\u0005\"\u0000\u0000\u02a7\u02a8\u0005\"\u0000\u0000\u02a8"+ - "\u02aa\u0001\u0000\u0000\u0000\u02a9\u02ab\u0005\"\u0000\u0000\u02aa\u02a9"+ - "\u0001\u0000\u0000\u0000\u02aa\u02ab\u0001\u0000\u0000\u0000\u02ab\u02ad"+ - "\u0001\u0000\u0000\u0000\u02ac\u02ae\u0005\"\u0000\u0000\u02ad\u02ac\u0001"+ - "\u0000\u0000\u0000\u02ad\u02ae\u0001\u0000\u0000\u0000\u02ae\u02b0\u0001"+ - "\u0000\u0000\u0000\u02af\u0292\u0001\u0000\u0000\u0000\u02af\u029b\u0001"+ - "\u0000\u0000\u0000\u02b0T\u0001\u0000\u0000\u0000\u02b1\u02b3\u0003?\u0018"+ - "\u0000\u02b2\u02b1\u0001\u0000\u0000\u0000\u02b3\u02b4\u0001\u0000\u0000"+ - "\u0000\u02b4\u02b2\u0001\u0000\u0000\u0000\u02b4\u02b5\u0001\u0000\u0000"+ - "\u0000\u02b5V\u0001\u0000\u0000\u0000\u02b6\u02b8\u0003?\u0018\u0000\u02b7"+ - "\u02b6\u0001\u0000\u0000\u0000\u02b8\u02b9\u0001\u0000\u0000\u0000\u02b9"+ - "\u02b7\u0001\u0000\u0000\u0000\u02b9\u02ba\u0001\u0000\u0000\u0000\u02ba"+ - "\u02bb\u0001\u0000\u0000\u0000\u02bb\u02bf\u0003g,\u0000\u02bc\u02be\u0003"+ - "?\u0018\u0000\u02bd\u02bc\u0001\u0000\u0000\u0000\u02be\u02c1\u0001\u0000"+ - "\u0000\u0000\u02bf\u02bd\u0001\u0000\u0000\u0000\u02bf\u02c0\u0001\u0000"+ - "\u0000\u0000\u02c0\u02e1\u0001\u0000\u0000\u0000\u02c1\u02bf\u0001\u0000"+ - "\u0000\u0000\u02c2\u02c4\u0003g,\u0000\u02c3\u02c5\u0003?\u0018\u0000"+ - "\u02c4\u02c3\u0001\u0000\u0000\u0000\u02c5\u02c6\u0001\u0000\u0000\u0000"+ - "\u02c6\u02c4\u0001\u0000\u0000\u0000\u02c6\u02c7\u0001\u0000\u0000\u0000"+ - "\u02c7\u02e1\u0001\u0000\u0000\u0000\u02c8\u02ca\u0003?\u0018\u0000\u02c9"+ - "\u02c8\u0001\u0000\u0000\u0000\u02ca\u02cb\u0001\u0000\u0000\u0000\u02cb"+ - "\u02c9\u0001\u0000\u0000\u0000\u02cb\u02cc\u0001\u0000\u0000\u0000\u02cc"+ - "\u02d4\u0001\u0000\u0000\u0000\u02cd\u02d1\u0003g,\u0000\u02ce\u02d0\u0003"+ - "?\u0018\u0000\u02cf\u02ce\u0001\u0000\u0000\u0000\u02d0\u02d3\u0001\u0000"+ - "\u0000\u0000\u02d1\u02cf\u0001\u0000\u0000\u0000\u02d1\u02d2\u0001\u0000"+ - "\u0000\u0000\u02d2\u02d5\u0001\u0000\u0000\u0000\u02d3\u02d1\u0001\u0000"+ - "\u0000\u0000\u02d4\u02cd\u0001\u0000\u0000\u0000\u02d4\u02d5\u0001\u0000"+ - "\u0000\u0000\u02d5\u02d6\u0001\u0000\u0000\u0000\u02d6\u02d7\u0003G\u001c"+ - "\u0000\u02d7\u02e1\u0001\u0000\u0000\u0000\u02d8\u02da\u0003g,\u0000\u02d9"+ - "\u02db\u0003?\u0018\u0000\u02da\u02d9\u0001\u0000\u0000\u0000\u02db\u02dc"+ - "\u0001\u0000\u0000\u0000\u02dc\u02da\u0001\u0000\u0000\u0000\u02dc\u02dd"+ - "\u0001\u0000\u0000\u0000\u02dd\u02de\u0001\u0000\u0000\u0000\u02de\u02df"+ - "\u0003G\u001c\u0000\u02df\u02e1\u0001\u0000\u0000\u0000\u02e0\u02b7\u0001"+ - "\u0000\u0000\u0000\u02e0\u02c2\u0001\u0000\u0000\u0000\u02e0\u02c9\u0001"+ - "\u0000\u0000\u0000\u02e0\u02d8\u0001\u0000\u0000\u0000\u02e1X\u0001\u0000"+ - "\u0000\u0000\u02e2\u02e3\u0007\u001e\u0000\u0000\u02e3\u02e4\u0007\u001f"+ - "\u0000\u0000\u02e4Z\u0001\u0000\u0000\u0000\u02e5\u02e6\u0007\f\u0000"+ - "\u0000\u02e6\u02e7\u0007\t\u0000\u0000\u02e7\u02e8\u0007\u0000\u0000\u0000"+ - "\u02e8\\\u0001\u0000\u0000\u0000\u02e9\u02ea\u0007\f\u0000\u0000\u02ea"+ - "\u02eb\u0007\u0002\u0000\u0000\u02eb\u02ec\u0007\u0004\u0000\u0000\u02ec"+ - "^\u0001\u0000\u0000\u0000\u02ed\u02ee\u0005=\u0000\u0000\u02ee`\u0001"+ - "\u0000\u0000\u0000\u02ef\u02f0\u0005:\u0000\u0000\u02f0\u02f1\u0005:\u0000"+ - "\u0000\u02f1b\u0001\u0000\u0000\u0000\u02f2\u02f3\u0005,\u0000\u0000\u02f3"+ - "d\u0001\u0000\u0000\u0000\u02f4\u02f5\u0007\u0000\u0000\u0000\u02f5\u02f6"+ - "\u0007\u0003\u0000\u0000\u02f6\u02f7\u0007\u0002\u0000\u0000\u02f7\u02f8"+ - "\u0007\u0004\u0000\u0000\u02f8f\u0001\u0000\u0000\u0000\u02f9\u02fa\u0005"+ - ".\u0000\u0000\u02fah\u0001\u0000\u0000\u0000\u02fb\u02fc\u0007\u000f\u0000"+ - "\u0000\u02fc\u02fd\u0007\f\u0000\u0000\u02fd\u02fe\u0007\r\u0000\u0000"+ - "\u02fe\u02ff\u0007\u0002\u0000\u0000\u02ff\u0300\u0007\u0003\u0000\u0000"+ - "\u0300j\u0001\u0000\u0000\u0000\u0301\u0302\u0007\u000f\u0000\u0000\u0302"+ - "\u0303\u0007\u0001\u0000\u0000\u0303\u0304\u0007\u0006\u0000\u0000\u0304"+ - "\u0305\u0007\u0002\u0000\u0000\u0305\u0306\u0007\u0005\u0000\u0000\u0306"+ - "l\u0001\u0000\u0000\u0000\u0307\u0308\u0007\u0001\u0000\u0000\u0308\u0309"+ - "\u0007\t\u0000\u0000\u0309n\u0001\u0000\u0000\u0000\u030a\u030b\u0007"+ - "\u0001\u0000\u0000\u030b\u030c\u0007\u0002\u0000\u0000\u030cp\u0001\u0000"+ - "\u0000\u0000\u030d\u030e\u0007\r\u0000\u0000\u030e\u030f\u0007\f\u0000"+ - "\u0000\u030f\u0310\u0007\u0002\u0000\u0000\u0310\u0311\u0007\u0005\u0000"+ - "\u0000\u0311r\u0001\u0000\u0000\u0000\u0312\u0313\u0007\r\u0000\u0000"+ - "\u0313\u0314\u0007\u0001\u0000\u0000\u0314\u0315\u0007\u0012\u0000\u0000"+ - "\u0315\u0316\u0007\u0003\u0000\u0000\u0316t\u0001\u0000\u0000\u0000\u0317"+ - "\u0318\u0005(\u0000\u0000\u0318v\u0001\u0000\u0000\u0000\u0319\u031a\u0007"+ - "\t\u0000\u0000\u031a\u031b\u0007\u0007\u0000\u0000\u031b\u031c\u0007\u0005"+ - "\u0000\u0000\u031cx\u0001\u0000\u0000\u0000\u031d\u031e\u0007\t\u0000"+ - "\u0000\u031e\u031f\u0007\u0014\u0000\u0000\u031f\u0320\u0007\r\u0000\u0000"+ - "\u0320\u0321\u0007\r\u0000\u0000\u0321z\u0001\u0000\u0000\u0000\u0322"+ - "\u0323\u0007\t\u0000\u0000\u0323\u0324\u0007\u0014\u0000\u0000\u0324\u0325"+ - "\u0007\r\u0000\u0000\u0325\u0326\u0007\r\u0000\u0000\u0326\u0327\u0007"+ - "\u0002\u0000\u0000\u0327|\u0001\u0000\u0000\u0000\u0328\u0329\u0007\u0007"+ - "\u0000\u0000\u0329\u032a\u0007\u0006\u0000\u0000\u032a~\u0001\u0000\u0000"+ - "\u0000\u032b\u032c\u0005?\u0000\u0000\u032c\u0080\u0001\u0000\u0000\u0000"+ - "\u032d\u032e\u0007\u0006\u0000\u0000\u032e\u032f\u0007\r\u0000\u0000\u032f"+ - "\u0330\u0007\u0001\u0000\u0000\u0330\u0331\u0007\u0012\u0000\u0000\u0331"+ - "\u0332\u0007\u0003\u0000\u0000\u0332\u0082\u0001\u0000\u0000\u0000\u0333"+ - "\u0334\u0005)\u0000\u0000\u0334\u0084\u0001\u0000\u0000\u0000\u0335\u0336"+ - "\u0007\u0005\u0000\u0000\u0336\u0337\u0007\u0006\u0000\u0000\u0337\u0338"+ - "\u0007\u0014\u0000\u0000\u0338\u0339\u0007\u0003\u0000\u0000\u0339\u0086"+ - "\u0001\u0000\u0000\u0000\u033a\u033b\u0005=\u0000\u0000\u033b\u033c\u0005"+ - "=\u0000\u0000\u033c\u0088\u0001\u0000\u0000\u0000\u033d\u033e\u0005=\u0000"+ - "\u0000\u033e\u033f\u0005~\u0000\u0000\u033f\u008a\u0001\u0000\u0000\u0000"+ - "\u0340\u0341\u0005!\u0000\u0000\u0341\u0342\u0005=\u0000\u0000\u0342\u008c"+ - "\u0001\u0000\u0000\u0000\u0343\u0344\u0005<\u0000\u0000\u0344\u008e\u0001"+ - "\u0000\u0000\u0000\u0345\u0346\u0005<\u0000\u0000\u0346\u0347\u0005=\u0000"+ - "\u0000\u0347\u0090\u0001\u0000\u0000\u0000\u0348\u0349\u0005>\u0000\u0000"+ - "\u0349\u0092\u0001\u0000\u0000\u0000\u034a\u034b\u0005>\u0000\u0000\u034b"+ - "\u034c\u0005=\u0000\u0000\u034c\u0094\u0001\u0000\u0000\u0000\u034d\u034e"+ - "\u0005+\u0000\u0000\u034e\u0096\u0001\u0000\u0000\u0000\u034f\u0350\u0005"+ - "-\u0000\u0000\u0350\u0098\u0001\u0000\u0000\u0000\u0351\u0352\u0005*\u0000"+ - "\u0000\u0352\u009a\u0001\u0000\u0000\u0000\u0353\u0354\u0005/\u0000\u0000"+ - "\u0354\u009c\u0001\u0000\u0000\u0000\u0355\u0356\u0005%\u0000\u0000\u0356"+ - "\u009e\u0001\u0000\u0000\u0000\u0357\u0358\u0007\u0010\u0000\u0000\u0358"+ - "\u0359\u0007\f\u0000\u0000\u0359\u035a\u0007\u0005\u0000\u0000\u035a\u035b"+ - "\u0007\u0004\u0000\u0000\u035b\u035c\u0007\n\u0000\u0000\u035c\u00a0\u0001"+ - "\u0000\u0000\u0000\u035d\u0360\u0003\u007f8\u0000\u035e\u0361\u0003A\u0019"+ - "\u0000\u035f\u0361\u0003O \u0000\u0360\u035e\u0001\u0000\u0000\u0000\u0360"+ - "\u035f\u0001\u0000\u0000\u0000\u0361\u0365\u0001\u0000\u0000\u0000\u0362"+ - "\u0364\u0003Q!\u0000\u0363\u0362\u0001\u0000\u0000\u0000\u0364\u0367\u0001"+ - "\u0000\u0000\u0000\u0365\u0363\u0001\u0000\u0000\u0000\u0365\u0366\u0001"+ - "\u0000\u0000\u0000\u0366\u036f\u0001\u0000\u0000\u0000\u0367\u0365\u0001"+ - "\u0000\u0000\u0000\u0368\u036a\u0003\u007f8\u0000\u0369\u036b\u0003?\u0018"+ - "\u0000\u036a\u0369\u0001\u0000\u0000\u0000\u036b\u036c\u0001\u0000\u0000"+ - "\u0000\u036c\u036a\u0001\u0000\u0000\u0000\u036c\u036d\u0001\u0000\u0000"+ - "\u0000\u036d\u036f\u0001\u0000\u0000\u0000\u036e\u035d\u0001\u0000\u0000"+ - "\u0000\u036e\u0368\u0001\u0000\u0000\u0000\u036f\u00a2\u0001\u0000\u0000"+ - "\u0000\u0370\u0371\u0005[\u0000\u0000\u0371\u0372\u0001\u0000\u0000\u0000"+ - "\u0372\u0373\u0006J\u0000\u0000\u0373\u0374\u0006J\u0000\u0000\u0374\u00a4"+ - "\u0001\u0000\u0000\u0000\u0375\u0376\u0005]\u0000\u0000\u0376\u0377\u0001"+ - "\u0000\u0000\u0000\u0377\u0378\u0006K\u000b\u0000\u0378\u0379\u0006K\u000b"+ - "\u0000\u0379\u00a6\u0001\u0000\u0000\u0000\u037a\u037e\u0003A\u0019\u0000"+ - "\u037b\u037d\u0003Q!\u0000\u037c\u037b\u0001\u0000\u0000\u0000\u037d\u0380"+ - "\u0001\u0000\u0000\u0000\u037e\u037c\u0001\u0000\u0000\u0000\u037e\u037f"+ - "\u0001\u0000\u0000\u0000\u037f\u038b\u0001\u0000\u0000\u0000\u0380\u037e"+ - "\u0001\u0000\u0000\u0000\u0381\u0384\u0003O \u0000\u0382\u0384\u0003I"+ - "\u001d\u0000\u0383\u0381\u0001\u0000\u0000\u0000\u0383\u0382\u0001\u0000"+ - "\u0000\u0000\u0384\u0386\u0001\u0000\u0000\u0000\u0385\u0387\u0003Q!\u0000"+ - "\u0386\u0385\u0001\u0000\u0000\u0000\u0387\u0388\u0001\u0000\u0000\u0000"+ - "\u0388\u0386\u0001\u0000\u0000\u0000\u0388\u0389\u0001\u0000\u0000\u0000"+ - "\u0389\u038b\u0001\u0000\u0000\u0000\u038a\u037a\u0001\u0000\u0000\u0000"+ - "\u038a\u0383\u0001\u0000\u0000\u0000\u038b\u00a8\u0001\u0000\u0000\u0000"+ - "\u038c\u038e\u0003K\u001e\u0000\u038d\u038f\u0003M\u001f\u0000\u038e\u038d"+ - "\u0001\u0000\u0000\u0000\u038f\u0390\u0001\u0000\u0000\u0000\u0390\u038e"+ - "\u0001\u0000\u0000\u0000\u0390\u0391\u0001\u0000\u0000\u0000\u0391\u0392"+ - "\u0001\u0000\u0000\u0000\u0392\u0393\u0003K\u001e\u0000\u0393\u00aa\u0001"+ - "\u0000\u0000\u0000\u0394\u0395\u0003\u00a9M\u0000\u0395\u00ac\u0001\u0000"+ - "\u0000\u0000\u0396\u0397\u00037\u0014\u0000\u0397\u0398\u0001\u0000\u0000"+ - "\u0000\u0398\u0399\u0006O\n\u0000\u0399\u00ae\u0001\u0000\u0000\u0000"+ - "\u039a\u039b\u00039\u0015\u0000\u039b\u039c\u0001\u0000\u0000\u0000\u039c"+ - "\u039d\u0006P\n\u0000\u039d\u00b0\u0001\u0000\u0000\u0000\u039e\u039f"+ - "\u0003;\u0016\u0000\u039f\u03a0\u0001\u0000\u0000\u0000\u03a0\u03a1\u0006"+ - "Q\n\u0000\u03a1\u00b2\u0001\u0000\u0000\u0000\u03a2\u03a3\u0003\u00a3"+ - "J\u0000\u03a3\u03a4\u0001\u0000\u0000\u0000\u03a4\u03a5\u0006R\f\u0000"+ - "\u03a5\u03a6\u0006R\r\u0000\u03a6\u00b4\u0001\u0000\u0000\u0000\u03a7"+ - "\u03a8\u0003=\u0017\u0000\u03a8\u03a9\u0001\u0000\u0000\u0000\u03a9\u03aa"+ - "\u0006S\u000e\u0000\u03aa\u03ab\u0006S\u000b\u0000\u03ab\u00b6\u0001\u0000"+ - "\u0000\u0000\u03ac\u03ad\u0003;\u0016\u0000\u03ad\u03ae\u0001\u0000\u0000"+ - "\u0000\u03ae\u03af\u0006T\n\u0000\u03af\u00b8\u0001\u0000\u0000\u0000"+ - "\u03b0\u03b1\u00037\u0014\u0000\u03b1\u03b2\u0001\u0000\u0000\u0000\u03b2"+ - "\u03b3\u0006U\n\u0000\u03b3\u00ba\u0001\u0000\u0000\u0000\u03b4\u03b5"+ - "\u00039\u0015\u0000\u03b5\u03b6\u0001\u0000\u0000\u0000\u03b6\u03b7\u0006"+ - "V\n\u0000\u03b7\u00bc\u0001\u0000\u0000\u0000\u03b8\u03b9\u0003=\u0017"+ - "\u0000\u03b9\u03ba\u0001\u0000\u0000\u0000\u03ba\u03bb\u0006W\u000e\u0000"+ - "\u03bb\u03bc\u0006W\u000b\u0000\u03bc\u00be\u0001\u0000\u0000\u0000\u03bd"+ - "\u03be\u0003\u00a3J\u0000\u03be\u03bf\u0001\u0000\u0000\u0000\u03bf\u03c0"+ - "\u0006X\f\u0000\u03c0\u00c0\u0001\u0000\u0000\u0000\u03c1\u03c2\u0003"+ - "\u00a5K\u0000\u03c2\u03c3\u0001\u0000\u0000\u0000\u03c3\u03c4\u0006Y\u000f"+ - "\u0000\u03c4\u00c2\u0001\u0000\u0000\u0000\u03c5\u03c6\u0003\u014f\u00a0"+ - "\u0000\u03c6\u03c7\u0001\u0000\u0000\u0000\u03c7\u03c8\u0006Z\u0010\u0000"+ - "\u03c8\u00c4\u0001\u0000\u0000\u0000\u03c9\u03ca\u0003c*\u0000\u03ca\u03cb"+ - "\u0001\u0000\u0000\u0000\u03cb\u03cc\u0006[\u0011\u0000\u03cc\u00c6\u0001"+ - "\u0000\u0000\u0000\u03cd\u03ce\u0003_(\u0000\u03ce\u03cf\u0001\u0000\u0000"+ - "\u0000\u03cf\u03d0\u0006\\\u0012\u0000\u03d0\u00c8\u0001\u0000\u0000\u0000"+ - "\u03d1\u03d2\u0007\u0010\u0000\u0000\u03d2\u03d3\u0007\u0003\u0000\u0000"+ - "\u03d3\u03d4\u0007\u0005\u0000\u0000\u03d4\u03d5\u0007\f\u0000\u0000\u03d5"+ - "\u03d6\u0007\u0000\u0000\u0000\u03d6\u03d7\u0007\f\u0000\u0000\u03d7\u03d8"+ - "\u0007\u0005\u0000\u0000\u03d8\u03d9\u0007\f\u0000\u0000\u03d9\u00ca\u0001"+ - "\u0000\u0000\u0000\u03da\u03de\b \u0000\u0000\u03db\u03dc\u0005/\u0000"+ - "\u0000\u03dc\u03de\b!\u0000\u0000\u03dd\u03da\u0001\u0000\u0000\u0000"+ - "\u03dd\u03db\u0001\u0000\u0000\u0000\u03de\u00cc\u0001\u0000\u0000\u0000"+ - "\u03df\u03e1\u0003\u00cb^\u0000\u03e0\u03df\u0001\u0000\u0000\u0000\u03e1"+ - "\u03e2\u0001\u0000\u0000\u0000\u03e2\u03e0\u0001\u0000\u0000\u0000\u03e2"+ - "\u03e3\u0001\u0000\u0000\u0000\u03e3\u00ce\u0001\u0000\u0000\u0000\u03e4"+ - "\u03e5\u0003\u00cd_\u0000\u03e5\u03e6\u0001\u0000\u0000\u0000\u03e6\u03e7"+ - "\u0006`\u0013\u0000\u03e7\u00d0\u0001\u0000\u0000\u0000\u03e8\u03e9\u0003"+ - "S\"\u0000\u03e9\u03ea\u0001\u0000\u0000\u0000\u03ea\u03eb\u0006a\u0014"+ - "\u0000\u03eb\u00d2\u0001\u0000\u0000\u0000\u03ec\u03ed\u00037\u0014\u0000"+ - "\u03ed\u03ee\u0001\u0000\u0000\u0000\u03ee\u03ef\u0006b\n\u0000\u03ef"+ - "\u00d4\u0001\u0000\u0000\u0000\u03f0\u03f1\u00039\u0015\u0000\u03f1\u03f2"+ - "\u0001\u0000\u0000\u0000\u03f2\u03f3\u0006c\n\u0000\u03f3\u00d6\u0001"+ - "\u0000\u0000\u0000\u03f4\u03f5\u0003;\u0016\u0000\u03f5\u03f6\u0001\u0000"+ - "\u0000\u0000\u03f6\u03f7\u0006d\n\u0000\u03f7\u00d8\u0001\u0000\u0000"+ - "\u0000\u03f8\u03f9\u0003=\u0017\u0000\u03f9\u03fa\u0001\u0000\u0000\u0000"+ - "\u03fa\u03fb\u0006e\u000e\u0000\u03fb\u03fc\u0006e\u000b\u0000\u03fc\u00da"+ - "\u0001\u0000\u0000\u0000\u03fd\u03fe\u0003g,\u0000\u03fe\u03ff\u0001\u0000"+ - "\u0000\u0000\u03ff\u0400\u0006f\u0015\u0000\u0400\u00dc\u0001\u0000\u0000"+ - "\u0000\u0401\u0402\u0003c*\u0000\u0402\u0403\u0001\u0000\u0000\u0000\u0403"+ - "\u0404\u0006g\u0011\u0000\u0404\u00de\u0001\u0000\u0000\u0000\u0405\u0406"+ - "\u0003\u007f8\u0000\u0406\u0407\u0001\u0000\u0000\u0000\u0407\u0408\u0006"+ - "h\u0016\u0000\u0408\u00e0\u0001\u0000\u0000\u0000\u0409\u040a\u0003\u00a1"+ - "I\u0000\u040a\u040b\u0001\u0000\u0000\u0000\u040b\u040c\u0006i\u0017\u0000"+ - "\u040c\u00e2\u0001\u0000\u0000\u0000\u040d\u0412\u0003A\u0019\u0000\u040e"+ - "\u0412\u0003?\u0018\u0000\u040f\u0412\u0003O \u0000\u0410\u0412\u0003"+ - "\u0099E\u0000\u0411\u040d\u0001\u0000\u0000\u0000\u0411\u040e\u0001\u0000"+ - "\u0000\u0000\u0411\u040f\u0001\u0000\u0000\u0000\u0411\u0410\u0001\u0000"+ - "\u0000\u0000\u0412\u00e4\u0001\u0000\u0000\u0000\u0413\u0416\u0003A\u0019"+ - "\u0000\u0414\u0416\u0003\u0099E\u0000\u0415\u0413\u0001\u0000\u0000\u0000"+ - "\u0415\u0414\u0001\u0000\u0000\u0000\u0416\u041a\u0001\u0000\u0000\u0000"+ - "\u0417\u0419\u0003\u00e3j\u0000\u0418\u0417\u0001\u0000\u0000\u0000\u0419"+ - "\u041c\u0001\u0000\u0000\u0000\u041a\u0418\u0001\u0000\u0000\u0000\u041a"+ - "\u041b\u0001\u0000\u0000\u0000\u041b\u0427\u0001\u0000\u0000\u0000\u041c"+ - "\u041a\u0001\u0000\u0000\u0000\u041d\u0420\u0003O \u0000\u041e\u0420\u0003"+ - "I\u001d\u0000\u041f\u041d\u0001\u0000\u0000\u0000\u041f\u041e\u0001\u0000"+ - "\u0000\u0000\u0420\u0422\u0001\u0000\u0000\u0000\u0421\u0423\u0003\u00e3"+ - "j\u0000\u0422\u0421\u0001\u0000\u0000\u0000\u0423\u0424\u0001\u0000\u0000"+ - "\u0000\u0424\u0422\u0001\u0000\u0000\u0000\u0424\u0425\u0001\u0000\u0000"+ - "\u0000\u0425\u0427\u0001\u0000\u0000\u0000\u0426\u0415\u0001\u0000\u0000"+ - "\u0000\u0426\u041f\u0001\u0000\u0000\u0000\u0427\u00e6\u0001\u0000\u0000"+ - "\u0000\u0428\u042b\u0003\u00e5k\u0000\u0429\u042b\u0003\u00a9M\u0000\u042a"+ - "\u0428\u0001\u0000\u0000\u0000\u042a\u0429\u0001\u0000\u0000\u0000\u042b"+ - "\u042c\u0001\u0000\u0000\u0000\u042c\u042a\u0001\u0000\u0000\u0000\u042c"+ - "\u042d\u0001\u0000\u0000\u0000\u042d\u00e8\u0001\u0000\u0000\u0000\u042e"+ - "\u042f\u00037\u0014\u0000\u042f\u0430\u0001\u0000\u0000\u0000\u0430\u0431"+ - "\u0006m\n\u0000\u0431\u00ea\u0001\u0000\u0000\u0000\u0432\u0433\u0003"+ - "9\u0015\u0000\u0433\u0434\u0001\u0000\u0000\u0000\u0434\u0435\u0006n\n"+ - "\u0000\u0435\u00ec\u0001\u0000\u0000\u0000\u0436\u0437\u0003;\u0016\u0000"+ - "\u0437\u0438\u0001\u0000\u0000\u0000\u0438\u0439\u0006o\n\u0000\u0439"+ - "\u00ee\u0001\u0000\u0000\u0000\u043a\u043b\u0003=\u0017\u0000\u043b\u043c"+ - "\u0001\u0000\u0000\u0000\u043c\u043d\u0006p\u000e\u0000\u043d\u043e\u0006"+ - "p\u000b\u0000\u043e\u00f0\u0001\u0000\u0000\u0000\u043f\u0440\u0003_("+ - "\u0000\u0440\u0441\u0001\u0000\u0000\u0000\u0441\u0442\u0006q\u0012\u0000"+ - "\u0442\u00f2\u0001\u0000\u0000\u0000\u0443\u0444\u0003c*\u0000\u0444\u0445"+ - "\u0001\u0000\u0000\u0000\u0445\u0446\u0006r\u0011\u0000\u0446\u00f4\u0001"+ - "\u0000\u0000\u0000\u0447\u0448\u0003g,\u0000\u0448\u0449\u0001\u0000\u0000"+ - "\u0000\u0449\u044a\u0006s\u0015\u0000\u044a\u00f6\u0001\u0000\u0000\u0000"+ - "\u044b\u044c\u0003\u007f8\u0000\u044c\u044d\u0001\u0000\u0000\u0000\u044d"+ - "\u044e\u0006t\u0016\u0000\u044e\u00f8\u0001\u0000\u0000\u0000\u044f\u0450"+ - "\u0003\u00a1I\u0000\u0450\u0451\u0001\u0000\u0000\u0000\u0451\u0452\u0006"+ - "u\u0017\u0000\u0452\u00fa\u0001\u0000\u0000\u0000\u0453\u0454\u0007\f"+ - "\u0000\u0000\u0454\u0455\u0007\u0002\u0000\u0000\u0455\u00fc\u0001\u0000"+ - "\u0000\u0000\u0456\u0457\u0003\u00e7l\u0000\u0457\u0458\u0001\u0000\u0000"+ - "\u0000\u0458\u0459\u0006w\u0018\u0000\u0459\u00fe\u0001\u0000\u0000\u0000"+ - "\u045a\u045b\u00037\u0014\u0000\u045b\u045c\u0001\u0000\u0000\u0000\u045c"+ - "\u045d\u0006x\n\u0000\u045d\u0100\u0001\u0000\u0000\u0000\u045e\u045f"+ - "\u00039\u0015\u0000\u045f\u0460\u0001\u0000\u0000\u0000\u0460\u0461\u0006"+ - "y\n\u0000\u0461\u0102\u0001\u0000\u0000\u0000\u0462\u0463\u0003;\u0016"+ - "\u0000\u0463\u0464\u0001\u0000\u0000\u0000\u0464\u0465\u0006z\n\u0000"+ - "\u0465\u0104\u0001\u0000\u0000\u0000\u0466\u0467\u0003=\u0017\u0000\u0467"+ - "\u0468\u0001\u0000\u0000\u0000\u0468\u0469\u0006{\u000e\u0000\u0469\u046a"+ - "\u0006{\u000b\u0000\u046a\u0106\u0001\u0000\u0000\u0000\u046b\u046c\u0003"+ - "\u00a3J\u0000\u046c\u046d\u0001\u0000\u0000\u0000\u046d\u046e\u0006|\f"+ - "\u0000\u046e\u046f\u0006|\u0019\u0000\u046f\u0108\u0001\u0000\u0000\u0000"+ - "\u0470\u0471\u0007\u0007\u0000\u0000\u0471\u0472\u0007\t\u0000\u0000\u0472"+ - "\u0473\u0001\u0000\u0000\u0000\u0473\u0474\u0006}\u001a\u0000\u0474\u010a"+ - "\u0001\u0000\u0000\u0000\u0475\u0476\u0007\u0013\u0000\u0000\u0476\u0477"+ - "\u0007\u0001\u0000\u0000\u0477\u0478\u0007\u0005\u0000\u0000\u0478\u0479"+ - "\u0007\n\u0000\u0000\u0479\u047a\u0001\u0000\u0000\u0000\u047a\u047b\u0006"+ - "~\u001a\u0000\u047b\u010c\u0001\u0000\u0000\u0000\u047c\u047d\b\"\u0000"+ - "\u0000\u047d\u010e\u0001\u0000\u0000\u0000\u047e\u0480\u0003\u010d\u007f"+ - "\u0000\u047f\u047e\u0001\u0000\u0000\u0000\u0480\u0481\u0001\u0000\u0000"+ - "\u0000\u0481\u047f\u0001\u0000\u0000\u0000\u0481\u0482\u0001\u0000\u0000"+ - "\u0000\u0482\u0483\u0001\u0000\u0000\u0000\u0483\u0484\u0003\u014f\u00a0"+ - "\u0000\u0484\u0486\u0001\u0000\u0000\u0000\u0485\u047f\u0001\u0000\u0000"+ - "\u0000\u0485\u0486\u0001\u0000\u0000\u0000\u0486\u0488\u0001\u0000\u0000"+ - "\u0000\u0487\u0489\u0003\u010d\u007f\u0000\u0488\u0487\u0001\u0000\u0000"+ - "\u0000\u0489\u048a\u0001\u0000\u0000\u0000\u048a\u0488\u0001\u0000\u0000"+ - "\u0000\u048a\u048b\u0001\u0000\u0000\u0000\u048b\u0110\u0001\u0000\u0000"+ - "\u0000\u048c\u048d\u0003\u010f\u0080\u0000\u048d\u048e\u0001\u0000\u0000"+ - "\u0000\u048e\u048f\u0006\u0081\u001b\u0000\u048f\u0112\u0001\u0000\u0000"+ - "\u0000\u0490\u0491\u00037\u0014\u0000\u0491\u0492\u0001\u0000\u0000\u0000"+ - "\u0492\u0493\u0006\u0082\n\u0000\u0493\u0114\u0001\u0000\u0000\u0000\u0494"+ - "\u0495\u00039\u0015\u0000\u0495\u0496\u0001\u0000\u0000\u0000\u0496\u0497"+ - "\u0006\u0083\n\u0000\u0497\u0116\u0001\u0000\u0000\u0000\u0498\u0499\u0003"+ - ";\u0016\u0000\u0499\u049a\u0001\u0000\u0000\u0000\u049a\u049b\u0006\u0084"+ - "\n\u0000\u049b\u0118\u0001\u0000\u0000\u0000\u049c\u049d\u0003=\u0017"+ - "\u0000\u049d\u049e\u0001\u0000\u0000\u0000\u049e\u049f\u0006\u0085\u000e"+ - "\u0000\u049f\u04a0\u0006\u0085\u000b\u0000\u04a0\u04a1\u0006\u0085\u000b"+ - "\u0000\u04a1\u011a\u0001\u0000\u0000\u0000\u04a2\u04a3\u0003_(\u0000\u04a3"+ - "\u04a4\u0001\u0000\u0000\u0000\u04a4\u04a5\u0006\u0086\u0012\u0000\u04a5"+ - "\u011c\u0001\u0000\u0000\u0000\u04a6\u04a7\u0003c*\u0000\u04a7\u04a8\u0001"+ - "\u0000\u0000\u0000\u04a8\u04a9\u0006\u0087\u0011\u0000\u04a9\u011e\u0001"+ - "\u0000\u0000\u0000\u04aa\u04ab\u0003g,\u0000\u04ab\u04ac\u0001\u0000\u0000"+ - "\u0000\u04ac\u04ad\u0006\u0088\u0015\u0000\u04ad\u0120\u0001\u0000\u0000"+ - "\u0000\u04ae\u04af\u0003\u010b~\u0000\u04af\u04b0\u0001\u0000\u0000\u0000"+ - "\u04b0\u04b1\u0006\u0089\u001c\u0000\u04b1\u0122\u0001\u0000\u0000\u0000"+ - "\u04b2\u04b3\u0003\u00e7l\u0000\u04b3\u04b4\u0001\u0000\u0000\u0000\u04b4"+ - "\u04b5\u0006\u008a\u0018\u0000\u04b5\u0124\u0001\u0000\u0000\u0000\u04b6"+ - "\u04b7\u0003\u00abN\u0000\u04b7\u04b8\u0001\u0000\u0000\u0000\u04b8\u04b9"+ - "\u0006\u008b\u001d\u0000\u04b9\u0126\u0001\u0000\u0000\u0000\u04ba\u04bb"+ - "\u0003\u007f8\u0000\u04bb\u04bc\u0001\u0000\u0000\u0000\u04bc\u04bd\u0006"+ - "\u008c\u0016\u0000\u04bd\u0128\u0001\u0000\u0000\u0000\u04be\u04bf\u0003"+ - "\u00a1I\u0000\u04bf\u04c0\u0001\u0000\u0000\u0000\u04c0\u04c1\u0006\u008d"+ - "\u0017\u0000\u04c1\u012a\u0001\u0000\u0000\u0000\u04c2\u04c3\u00037\u0014"+ - "\u0000\u04c3\u04c4\u0001\u0000\u0000\u0000\u04c4\u04c5\u0006\u008e\n\u0000"+ - "\u04c5\u012c\u0001\u0000\u0000\u0000\u04c6\u04c7\u00039\u0015\u0000\u04c7"+ - "\u04c8\u0001\u0000\u0000\u0000\u04c8\u04c9\u0006\u008f\n\u0000\u04c9\u012e"+ - "\u0001\u0000\u0000\u0000\u04ca\u04cb\u0003;\u0016\u0000\u04cb\u04cc\u0001"+ - "\u0000\u0000\u0000\u04cc\u04cd\u0006\u0090\n\u0000\u04cd\u0130\u0001\u0000"+ - "\u0000\u0000\u04ce\u04cf\u0003=\u0017\u0000\u04cf\u04d0\u0001\u0000\u0000"+ - "\u0000\u04d0\u04d1\u0006\u0091\u000e\u0000\u04d1\u04d2\u0006\u0091\u000b"+ - "\u0000\u04d2\u0132\u0001\u0000\u0000\u0000\u04d3\u04d4\u0003g,\u0000\u04d4"+ - "\u04d5\u0001\u0000\u0000\u0000\u04d5\u04d6\u0006\u0092\u0015\u0000\u04d6"+ - "\u0134\u0001\u0000\u0000\u0000\u04d7\u04d8\u0003\u007f8\u0000\u04d8\u04d9"+ - "\u0001\u0000\u0000\u0000\u04d9\u04da\u0006\u0093\u0016\u0000\u04da\u0136"+ - "\u0001\u0000\u0000\u0000\u04db\u04dc\u0003\u00a1I\u0000\u04dc\u04dd\u0001"+ - "\u0000\u0000\u0000\u04dd\u04de\u0006\u0094\u0017\u0000\u04de\u0138\u0001"+ - "\u0000\u0000\u0000\u04df\u04e0\u0003\u00abN\u0000\u04e0\u04e1\u0001\u0000"+ - "\u0000\u0000\u04e1\u04e2\u0006\u0095\u001d\u0000\u04e2\u013a\u0001\u0000"+ - "\u0000\u0000\u04e3\u04e4\u0003\u00a7L\u0000\u04e4\u04e5\u0001\u0000\u0000"+ - "\u0000\u04e5\u04e6\u0006\u0096\u001e\u0000\u04e6\u013c\u0001\u0000\u0000"+ - "\u0000\u04e7\u04e8\u00037\u0014\u0000\u04e8\u04e9\u0001\u0000\u0000\u0000"+ - "\u04e9\u04ea\u0006\u0097\n\u0000\u04ea\u013e\u0001\u0000\u0000\u0000\u04eb"+ - "\u04ec\u00039\u0015\u0000\u04ec\u04ed\u0001\u0000\u0000\u0000\u04ed\u04ee"+ - "\u0006\u0098\n\u0000\u04ee\u0140\u0001\u0000\u0000\u0000\u04ef\u04f0\u0003"+ - ";\u0016\u0000\u04f0\u04f1\u0001\u0000\u0000\u0000\u04f1\u04f2\u0006\u0099"+ - "\n\u0000\u04f2\u0142\u0001\u0000\u0000\u0000\u04f3\u04f4\u0003=\u0017"+ - "\u0000\u04f4\u04f5\u0001\u0000\u0000\u0000\u04f5\u04f6\u0006\u009a\u000e"+ - "\u0000\u04f6\u04f7\u0006\u009a\u000b\u0000\u04f7\u0144\u0001\u0000\u0000"+ - "\u0000\u04f8\u04f9\u0007\u0001\u0000\u0000\u04f9\u04fa\u0007\t\u0000\u0000"+ - "\u04fa\u04fb\u0007\u000f\u0000\u0000\u04fb\u04fc\u0007\u0007\u0000\u0000"+ - "\u04fc\u0146\u0001\u0000\u0000\u0000\u04fd\u04fe\u00037\u0014\u0000\u04fe"+ - "\u04ff\u0001\u0000\u0000\u0000\u04ff\u0500\u0006\u009c\n\u0000\u0500\u0148"+ - "\u0001\u0000\u0000\u0000\u0501\u0502\u00039\u0015\u0000\u0502\u0503\u0001"+ - "\u0000\u0000\u0000\u0503\u0504\u0006\u009d\n\u0000\u0504\u014a\u0001\u0000"+ - "\u0000\u0000\u0505\u0506\u0003;\u0016\u0000\u0506\u0507\u0001\u0000\u0000"+ - "\u0000\u0507\u0508\u0006\u009e\n\u0000\u0508\u014c\u0001\u0000\u0000\u0000"+ - "\u0509\u050a\u0003\u00a5K\u0000\u050a\u050b\u0001\u0000\u0000\u0000\u050b"+ - "\u050c\u0006\u009f\u000f\u0000\u050c\u050d\u0006\u009f\u000b\u0000\u050d"+ - "\u014e\u0001\u0000\u0000\u0000\u050e\u050f\u0005:\u0000\u0000\u050f\u0150"+ - "\u0001\u0000\u0000\u0000\u0510\u0516\u0003I\u001d\u0000\u0511\u0516\u0003"+ - "?\u0018\u0000\u0512\u0516\u0003g,\u0000\u0513\u0516\u0003A\u0019\u0000"+ - "\u0514\u0516\u0003O \u0000\u0515\u0510\u0001\u0000\u0000\u0000\u0515\u0511"+ - "\u0001\u0000\u0000\u0000\u0515\u0512\u0001\u0000\u0000\u0000\u0515\u0513"+ - "\u0001\u0000\u0000\u0000\u0515\u0514\u0001\u0000\u0000\u0000\u0516\u0517"+ - "\u0001\u0000\u0000\u0000\u0517\u0515\u0001\u0000\u0000\u0000\u0517\u0518"+ - "\u0001\u0000\u0000\u0000\u0518\u0152\u0001\u0000\u0000\u0000\u0519\u051a"+ - "\u00037\u0014\u0000\u051a\u051b\u0001\u0000\u0000\u0000\u051b\u051c\u0006"+ - "\u00a2\n\u0000\u051c\u0154\u0001\u0000\u0000\u0000\u051d\u051e\u00039"+ - "\u0015\u0000\u051e\u051f\u0001\u0000\u0000\u0000\u051f\u0520\u0006\u00a3"+ - "\n\u0000\u0520\u0156\u0001\u0000\u0000\u0000\u0521\u0522\u0003;\u0016"+ - "\u0000\u0522\u0523\u0001\u0000\u0000\u0000\u0523\u0524\u0006\u00a4\n\u0000"+ - "\u0524\u0158\u0001\u0000\u0000\u0000\u0525\u0526\u0003=\u0017\u0000\u0526"+ - "\u0527\u0001\u0000\u0000\u0000\u0527\u0528\u0006\u00a5\u000e\u0000\u0528"+ - "\u0529\u0006\u00a5\u000b\u0000\u0529\u015a\u0001\u0000\u0000\u0000\u052a"+ - "\u052b\u0003\u014f\u00a0\u0000\u052b\u052c\u0001\u0000\u0000\u0000\u052c"+ - "\u052d\u0006\u00a6\u0010\u0000\u052d\u015c\u0001\u0000\u0000\u0000\u052e"+ - "\u052f\u0003c*\u0000\u052f\u0530\u0001\u0000\u0000\u0000\u0530\u0531\u0006"+ - "\u00a7\u0011\u0000\u0531\u015e\u0001\u0000\u0000\u0000\u0532\u0533\u0003"+ - "g,\u0000\u0533\u0534\u0001\u0000\u0000\u0000\u0534\u0535\u0006\u00a8\u0015"+ - "\u0000\u0535\u0160\u0001\u0000\u0000\u0000\u0536\u0537\u0003\u0109}\u0000"+ - "\u0537\u0538\u0001\u0000\u0000\u0000\u0538\u0539\u0006\u00a9\u001f\u0000"+ - "\u0539\u053a\u0006\u00a9 \u0000\u053a\u0162\u0001\u0000\u0000\u0000\u053b"+ - "\u053c\u0003\u00cd_\u0000\u053c\u053d\u0001\u0000\u0000\u0000\u053d\u053e"+ - "\u0006\u00aa\u0013\u0000\u053e\u0164\u0001\u0000\u0000\u0000\u053f\u0540"+ - "\u0003S\"\u0000\u0540\u0541\u0001\u0000\u0000\u0000\u0541\u0542\u0006"+ - "\u00ab\u0014\u0000\u0542\u0166\u0001\u0000\u0000\u0000\u0543\u0544\u0003"+ - "7\u0014\u0000\u0544\u0545\u0001\u0000\u0000\u0000\u0545\u0546\u0006\u00ac"+ - "\n\u0000\u0546\u0168\u0001\u0000\u0000\u0000\u0547\u0548\u00039\u0015"+ - "\u0000\u0548\u0549\u0001\u0000\u0000\u0000\u0549\u054a\u0006\u00ad\n\u0000"+ - "\u054a\u016a\u0001\u0000\u0000\u0000\u054b\u054c\u0003;\u0016\u0000\u054c"+ - "\u054d\u0001\u0000\u0000\u0000\u054d\u054e\u0006\u00ae\n\u0000\u054e\u016c"+ - "\u0001\u0000\u0000\u0000\u054f\u0550\u0003=\u0017\u0000\u0550\u0551\u0001"+ - "\u0000\u0000\u0000\u0551\u0552\u0006\u00af\u000e\u0000\u0552\u0553\u0006"+ - "\u00af\u000b\u0000\u0553\u0554\u0006\u00af\u000b\u0000\u0554\u016e\u0001"+ - "\u0000\u0000\u0000\u0555\u0556\u0003c*\u0000\u0556\u0557\u0001\u0000\u0000"+ - "\u0000\u0557\u0558\u0006\u00b0\u0011\u0000\u0558\u0170\u0001\u0000\u0000"+ - "\u0000\u0559\u055a\u0003g,\u0000\u055a\u055b\u0001\u0000\u0000\u0000\u055b"+ - "\u055c\u0006\u00b1\u0015\u0000\u055c\u0172\u0001\u0000\u0000\u0000\u055d"+ - "\u055e\u0003\u00e7l\u0000\u055e\u055f\u0001\u0000\u0000\u0000\u055f\u0560"+ - "\u0006\u00b2\u0018\u0000\u0560\u0174\u0001\u0000\u0000\u0000\u0561\u0562"+ - "\u00037\u0014\u0000\u0562\u0563\u0001\u0000\u0000\u0000\u0563\u0564\u0006"+ - "\u00b3\n\u0000\u0564\u0176\u0001\u0000\u0000\u0000\u0565\u0566\u00039"+ - "\u0015\u0000\u0566\u0567\u0001\u0000\u0000\u0000\u0567\u0568\u0006\u00b4"+ - "\n\u0000\u0568\u0178\u0001\u0000\u0000\u0000\u0569\u056a\u0003;\u0016"+ - "\u0000\u056a\u056b\u0001\u0000\u0000\u0000\u056b\u056c\u0006\u00b5\n\u0000"+ - "\u056c\u017a\u0001\u0000\u0000\u0000\u056d\u056e\u0003=\u0017\u0000\u056e"+ - "\u056f\u0001\u0000\u0000\u0000\u056f\u0570\u0006\u00b6\u000e\u0000\u0570"+ - "\u0571\u0006\u00b6\u000b\u0000\u0571\u017c\u0001\u0000\u0000\u0000\u0572"+ - "\u0573\u0003\u00cd_\u0000\u0573\u0574\u0001\u0000\u0000\u0000\u0574\u0575"+ - "\u0006\u00b7\u0013\u0000\u0575\u0576\u0006\u00b7\u000b\u0000\u0576\u0577"+ - "\u0006\u00b7!\u0000\u0577\u017e\u0001\u0000\u0000\u0000\u0578\u0579\u0003"+ - "S\"\u0000\u0579\u057a\u0001\u0000\u0000\u0000\u057a\u057b\u0006\u00b8"+ - "\u0014\u0000\u057b\u057c\u0006\u00b8\u000b\u0000\u057c\u057d\u0006\u00b8"+ - "!\u0000\u057d\u0180\u0001\u0000\u0000\u0000\u057e\u057f\u00037\u0014\u0000"+ - "\u057f\u0580\u0001\u0000\u0000\u0000\u0580\u0581\u0006\u00b9\n\u0000\u0581"+ - "\u0182\u0001\u0000\u0000\u0000\u0582\u0583\u00039\u0015\u0000\u0583\u0584"+ - "\u0001\u0000\u0000\u0000\u0584\u0585\u0006\u00ba\n\u0000\u0585\u0184\u0001"+ - "\u0000\u0000\u0000\u0586\u0587\u0003;\u0016\u0000\u0587\u0588\u0001\u0000"+ - "\u0000\u0000\u0588\u0589\u0006\u00bb\n\u0000\u0589\u0186\u0001\u0000\u0000"+ - "\u0000\u058a\u058b\u0003\u014f\u00a0\u0000\u058b\u058c\u0001\u0000\u0000"+ - "\u0000\u058c\u058d\u0006\u00bc\u0010\u0000\u058d\u058e\u0006\u00bc\u000b"+ - "\u0000\u058e\u058f\u0006\u00bc\t\u0000\u058f\u0188\u0001\u0000\u0000\u0000"+ - "\u0590\u0591\u0003c*\u0000\u0591\u0592\u0001\u0000\u0000\u0000\u0592\u0593"+ - "\u0006\u00bd\u0011\u0000\u0593\u0594\u0006\u00bd\u000b\u0000\u0594\u0595"+ - "\u0006\u00bd\t\u0000\u0595\u018a\u0001\u0000\u0000\u0000\u0596\u0597\u0003"+ - "7\u0014\u0000\u0597\u0598\u0001\u0000\u0000\u0000\u0598\u0599\u0006\u00be"+ - "\n\u0000\u0599\u018c\u0001\u0000\u0000\u0000\u059a\u059b\u00039\u0015"+ - "\u0000\u059b\u059c\u0001\u0000\u0000\u0000\u059c\u059d\u0006\u00bf\n\u0000"+ - "\u059d\u018e\u0001\u0000\u0000\u0000\u059e\u059f\u0003;\u0016\u0000\u059f"+ - "\u05a0\u0001\u0000\u0000\u0000\u05a0\u05a1\u0006\u00c0\n\u0000\u05a1\u0190"+ - "\u0001\u0000\u0000\u0000\u05a2\u05a3\u0003\u00abN\u0000\u05a3\u05a4\u0001"+ - "\u0000\u0000\u0000\u05a4\u05a5\u0006\u00c1\u000b\u0000\u05a5\u05a6\u0006"+ - "\u00c1\u0000\u0000\u05a6\u05a7\u0006\u00c1\u001d\u0000\u05a7\u0192\u0001"+ - "\u0000\u0000\u0000\u05a8\u05a9\u0003\u00a7L\u0000\u05a9\u05aa\u0001\u0000"+ - "\u0000\u0000\u05aa\u05ab\u0006\u00c2\u000b\u0000\u05ab\u05ac\u0006\u00c2"+ - "\u0000\u0000\u05ac\u05ad\u0006\u00c2\u001e\u0000\u05ad\u0194\u0001\u0000"+ - "\u0000\u0000\u05ae\u05af\u0003Y%\u0000\u05af\u05b0\u0001\u0000\u0000\u0000"+ - "\u05b0\u05b1\u0006\u00c3\u000b\u0000\u05b1\u05b2\u0006\u00c3\u0000\u0000"+ - "\u05b2\u05b3\u0006\u00c3\"\u0000\u05b3\u0196\u0001\u0000\u0000\u0000\u05b4"+ - "\u05b5\u0003=\u0017\u0000\u05b5\u05b6\u0001\u0000\u0000\u0000\u05b6\u05b7"+ - "\u0006\u00c4\u000e\u0000\u05b7\u05b8\u0006\u00c4\u000b\u0000\u05b8\u0198"+ - "\u0001\u0000\u0000\u0000A\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007"+ - "\b\t\n\u000b\f\r\u000e\u0241\u024b\u024f\u0252\u025b\u025d\u0268\u027b"+ - "\u0280\u0289\u0290\u0295\u0297\u02a2\u02aa\u02ad\u02af\u02b4\u02b9\u02bf"+ - "\u02c6\u02cb\u02d1\u02d4\u02dc\u02e0\u0360\u0365\u036c\u036e\u037e\u0383"+ - "\u0388\u038a\u0390\u03dd\u03e2\u0411\u0415\u041a\u041f\u0424\u0426\u042a"+ - "\u042c\u0481\u0485\u048a\u0515\u0517#\u0005\u0001\u0000\u0005\u0004\u0000"+ - "\u0005\u0006\u0000\u0005\u0002\u0000\u0005\u0003\u0000\u0005\b\u0000\u0005"+ - "\u0005\u0000\u0005\t\u0000\u0005\u000b\u0000\u0005\r\u0000\u0000\u0001"+ - "\u0000\u0004\u0000\u0000\u0007A\u0000\u0005\u0000\u0000\u0007\u0018\u0000"+ - "\u0007B\u0000\u0007h\u0000\u0007!\u0000\u0007\u001f\u0000\u0007L\u0000"+ - "\u0007\u0019\u0000\u0007#\u0000\u0007/\u0000\u0007@\u0000\u0007P\u0000"+ - "\u0005\n\u0000\u0005\u0007\u0000\u0007Z\u0000\u0007Y\u0000\u0007D\u0000"+ - "\u0007C\u0000\u0007X\u0000\u0005\f\u0000\u0005\u000e\u0000\u0007\u001c"+ - "\u0000"; + ";\u0099<\u009b=\u009d>\u009f?\u00a1\u0000\u00a3@\u00a5A\u00a7B\u00a9C"+ + "\u00ab\u0000\u00adD\u00afE\u00b1F\u00b3G\u00b5\u0000\u00b7\u0000\u00b9"+ + "H\u00bbI\u00bdJ\u00bf\u0000\u00c1\u0000\u00c3\u0000\u00c5\u0000\u00c7"+ + "\u0000\u00c9\u0000\u00cbK\u00cd\u0000\u00cfL\u00d1\u0000\u00d3\u0000\u00d5"+ + "M\u00d7N\u00d9O\u00db\u0000\u00dd\u0000\u00df\u0000\u00e1\u0000\u00e3"+ + "\u0000\u00e5\u0000\u00e7\u0000\u00e9P\u00ebQ\u00edR\u00efS\u00f1\u0000"+ + "\u00f3\u0000\u00f5\u0000\u00f7\u0000\u00f9\u0000\u00fb\u0000\u00fdT\u00ff"+ + "\u0000\u0101U\u0103V\u0105W\u0107\u0000\u0109\u0000\u010bX\u010dY\u010f"+ + "\u0000\u0111Z\u0113\u0000\u0115[\u0117\\\u0119]\u011b\u0000\u011d\u0000"+ + "\u011f\u0000\u0121\u0000\u0123\u0000\u0125\u0000\u0127\u0000\u0129\u0000"+ + "\u012b\u0000\u012d^\u012f_\u0131`\u0133\u0000\u0135\u0000\u0137\u0000"+ + "\u0139\u0000\u013b\u0000\u013d\u0000\u013fa\u0141b\u0143c\u0145\u0000"+ + "\u0147d\u0149e\u014bf\u014dg\u014f\u0000\u0151h\u0153i\u0155j\u0157k\u0159"+ + "l\u015b\u0000\u015d\u0000\u015f\u0000\u0161\u0000\u0163\u0000\u0165\u0000"+ + "\u0167\u0000\u0169m\u016bn\u016do\u016f\u0000\u0171\u0000\u0173\u0000"+ + "\u0175\u0000\u0177p\u0179q\u017br\u017d\u0000\u017f\u0000\u0181\u0000"+ + "\u0183s\u0185t\u0187u\u0189\u0000\u018b\u0000\u018dv\u018fw\u0191x\u0193"+ + "\u0000\u0195\u0000\u0197\u0000\u0199\u0000\u000f\u0000\u0001\u0002\u0003"+ + "\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e#\u0002\u0000DDdd\u0002"+ + "\u0000IIii\u0002\u0000SSss\u0002\u0000EEee\u0002\u0000CCcc\u0002\u0000"+ + "TTtt\u0002\u0000RRrr\u0002\u0000OOoo\u0002\u0000PPpp\u0002\u0000NNnn\u0002"+ + "\u0000HHhh\u0002\u0000VVvv\u0002\u0000AAaa\u0002\u0000LLll\u0002\u0000"+ + "XXxx\u0002\u0000FFff\u0002\u0000MMmm\u0002\u0000GGgg\u0002\u0000KKkk\u0002"+ + "\u0000WWww\u0002\u0000UUuu\u0006\u0000\t\n\r\r //[[]]\u0002\u0000\n\n"+ + "\r\r\u0003\u0000\t\n\r\r \u0001\u000009\u0002\u0000AZaz\b\u0000\"\"N"+ + "NRRTT\\\\nnrrtt\u0004\u0000\n\n\r\r\"\"\\\\\u0002\u0000++--\u0001\u0000"+ + "``\u0002\u0000BBbb\u0002\u0000YYyy\u000b\u0000\t\n\r\r \"\",,//::==["+ + "[]]||\u0002\u0000**//\u000b\u0000\t\n\r\r \"#,,//::<<>?\\\\||\u05dc\u0000"+ + "\u000f\u0001\u0000\u0000\u0000\u0000\u0011\u0001\u0000\u0000\u0000\u0000"+ + "\u0013\u0001\u0000\u0000\u0000\u0000\u0015\u0001\u0000\u0000\u0000\u0000"+ + "\u0017\u0001\u0000\u0000\u0000\u0000\u0019\u0001\u0000\u0000\u0000\u0000"+ + "\u001b\u0001\u0000\u0000\u0000\u0000\u001d\u0001\u0000\u0000\u0000\u0000"+ + "\u001f\u0001\u0000\u0000\u0000\u0000!\u0001\u0000\u0000\u0000\u0000#\u0001"+ + "\u0000\u0000\u0000\u0000%\u0001\u0000\u0000\u0000\u0000\'\u0001\u0000"+ + "\u0000\u0000\u0000)\u0001\u0000\u0000\u0000\u0000+\u0001\u0000\u0000\u0000"+ + "\u0000-\u0001\u0000\u0000\u0000\u0000/\u0001\u0000\u0000\u0000\u00001"+ + "\u0001\u0000\u0000\u0000\u00003\u0001\u0000\u0000\u0000\u00005\u0001\u0000"+ + "\u0000\u0000\u00007\u0001\u0000\u0000\u0000\u00009\u0001\u0000\u0000\u0000"+ + "\u0000;\u0001\u0000\u0000\u0000\u0001=\u0001\u0000\u0000\u0000\u0001S"+ + "\u0001\u0000\u0000\u0000\u0001U\u0001\u0000\u0000\u0000\u0001W\u0001\u0000"+ + "\u0000\u0000\u0001Y\u0001\u0000\u0000\u0000\u0001[\u0001\u0000\u0000\u0000"+ + "\u0001]\u0001\u0000\u0000\u0000\u0001_\u0001\u0000\u0000\u0000\u0001a"+ + "\u0001\u0000\u0000\u0000\u0001c\u0001\u0000\u0000\u0000\u0001e\u0001\u0000"+ + "\u0000\u0000\u0001g\u0001\u0000\u0000\u0000\u0001i\u0001\u0000\u0000\u0000"+ + "\u0001k\u0001\u0000\u0000\u0000\u0001m\u0001\u0000\u0000\u0000\u0001o"+ + "\u0001\u0000\u0000\u0000\u0001q\u0001\u0000\u0000\u0000\u0001s\u0001\u0000"+ + "\u0000\u0000\u0001u\u0001\u0000\u0000\u0000\u0001w\u0001\u0000\u0000\u0000"+ + "\u0001y\u0001\u0000\u0000\u0000\u0001{\u0001\u0000\u0000\u0000\u0001}"+ + "\u0001\u0000\u0000\u0000\u0001\u007f\u0001\u0000\u0000\u0000\u0001\u0081"+ + "\u0001\u0000\u0000\u0000\u0001\u0083\u0001\u0000\u0000\u0000\u0001\u0085"+ + "\u0001\u0000\u0000\u0000\u0001\u0087\u0001\u0000\u0000\u0000\u0001\u0089"+ + "\u0001\u0000\u0000\u0000\u0001\u008b\u0001\u0000\u0000\u0000\u0001\u008d"+ + "\u0001\u0000\u0000\u0000\u0001\u008f\u0001\u0000\u0000\u0000\u0001\u0091"+ + "\u0001\u0000\u0000\u0000\u0001\u0093\u0001\u0000\u0000\u0000\u0001\u0095"+ + "\u0001\u0000\u0000\u0000\u0001\u0097\u0001\u0000\u0000\u0000\u0001\u0099"+ + "\u0001\u0000\u0000\u0000\u0001\u009b\u0001\u0000\u0000\u0000\u0001\u009d"+ + "\u0001\u0000\u0000\u0000\u0001\u009f\u0001\u0000\u0000\u0000\u0001\u00a1"+ + "\u0001\u0000\u0000\u0000\u0001\u00a3\u0001\u0000\u0000\u0000\u0001\u00a5"+ + "\u0001\u0000\u0000\u0000\u0001\u00a7\u0001\u0000\u0000\u0000\u0001\u00a9"+ + "\u0001\u0000\u0000\u0000\u0001\u00ad\u0001\u0000\u0000\u0000\u0001\u00af"+ + "\u0001\u0000\u0000\u0000\u0001\u00b1\u0001\u0000\u0000\u0000\u0001\u00b3"+ + "\u0001\u0000\u0000\u0000\u0002\u00b5\u0001\u0000\u0000\u0000\u0002\u00b7"+ + "\u0001\u0000\u0000\u0000\u0002\u00b9\u0001\u0000\u0000\u0000\u0002\u00bb"+ + "\u0001\u0000\u0000\u0000\u0002\u00bd\u0001\u0000\u0000\u0000\u0003\u00bf"+ + "\u0001\u0000\u0000\u0000\u0003\u00c1\u0001\u0000\u0000\u0000\u0003\u00c3"+ + "\u0001\u0000\u0000\u0000\u0003\u00c5\u0001\u0000\u0000\u0000\u0003\u00c7"+ + "\u0001\u0000\u0000\u0000\u0003\u00c9\u0001\u0000\u0000\u0000\u0003\u00cb"+ + "\u0001\u0000\u0000\u0000\u0003\u00cf\u0001\u0000\u0000\u0000\u0003\u00d1"+ + "\u0001\u0000\u0000\u0000\u0003\u00d3\u0001\u0000\u0000\u0000\u0003\u00d5"+ + "\u0001\u0000\u0000\u0000\u0003\u00d7\u0001\u0000\u0000\u0000\u0003\u00d9"+ + "\u0001\u0000\u0000\u0000\u0004\u00db\u0001\u0000\u0000\u0000\u0004\u00dd"+ + "\u0001\u0000\u0000\u0000\u0004\u00df\u0001\u0000\u0000\u0000\u0004\u00e1"+ + "\u0001\u0000\u0000\u0000\u0004\u00e3\u0001\u0000\u0000\u0000\u0004\u00e9"+ + "\u0001\u0000\u0000\u0000\u0004\u00eb\u0001\u0000\u0000\u0000\u0004\u00ed"+ + "\u0001\u0000\u0000\u0000\u0004\u00ef\u0001\u0000\u0000\u0000\u0005\u00f1"+ + "\u0001\u0000\u0000\u0000\u0005\u00f3\u0001\u0000\u0000\u0000\u0005\u00f5"+ + "\u0001\u0000\u0000\u0000\u0005\u00f7\u0001\u0000\u0000\u0000\u0005\u00f9"+ + "\u0001\u0000\u0000\u0000\u0005\u00fb\u0001\u0000\u0000\u0000\u0005\u00fd"+ + "\u0001\u0000\u0000\u0000\u0005\u00ff\u0001\u0000\u0000\u0000\u0005\u0101"+ + "\u0001\u0000\u0000\u0000\u0005\u0103\u0001\u0000\u0000\u0000\u0005\u0105"+ + "\u0001\u0000\u0000\u0000\u0006\u0107\u0001\u0000\u0000\u0000\u0006\u0109"+ + "\u0001\u0000\u0000\u0000\u0006\u010b\u0001\u0000\u0000\u0000\u0006\u010d"+ + "\u0001\u0000\u0000\u0000\u0006\u0111\u0001\u0000\u0000\u0000\u0006\u0113"+ + "\u0001\u0000\u0000\u0000\u0006\u0115\u0001\u0000\u0000\u0000\u0006\u0117"+ + "\u0001\u0000\u0000\u0000\u0006\u0119\u0001\u0000\u0000\u0000\u0007\u011b"+ + "\u0001\u0000\u0000\u0000\u0007\u011d\u0001\u0000\u0000\u0000\u0007\u011f"+ + "\u0001\u0000\u0000\u0000\u0007\u0121\u0001\u0000\u0000\u0000\u0007\u0123"+ + "\u0001\u0000\u0000\u0000\u0007\u0125\u0001\u0000\u0000\u0000\u0007\u0127"+ + "\u0001\u0000\u0000\u0000\u0007\u0129\u0001\u0000\u0000\u0000\u0007\u012b"+ + "\u0001\u0000\u0000\u0000\u0007\u012d\u0001\u0000\u0000\u0000\u0007\u012f"+ + "\u0001\u0000\u0000\u0000\u0007\u0131\u0001\u0000\u0000\u0000\b\u0133\u0001"+ + "\u0000\u0000\u0000\b\u0135\u0001\u0000\u0000\u0000\b\u0137\u0001\u0000"+ + "\u0000\u0000\b\u0139\u0001\u0000\u0000\u0000\b\u013b\u0001\u0000\u0000"+ + "\u0000\b\u013d\u0001\u0000\u0000\u0000\b\u013f\u0001\u0000\u0000\u0000"+ + "\b\u0141\u0001\u0000\u0000\u0000\b\u0143\u0001\u0000\u0000\u0000\t\u0145"+ + "\u0001\u0000\u0000\u0000\t\u0147\u0001\u0000\u0000\u0000\t\u0149\u0001"+ + "\u0000\u0000\u0000\t\u014b\u0001\u0000\u0000\u0000\t\u014d\u0001\u0000"+ + "\u0000\u0000\n\u014f\u0001\u0000\u0000\u0000\n\u0151\u0001\u0000\u0000"+ + "\u0000\n\u0153\u0001\u0000\u0000\u0000\n\u0155\u0001\u0000\u0000\u0000"+ + "\n\u0157\u0001\u0000\u0000\u0000\n\u0159\u0001\u0000\u0000\u0000\u000b"+ + "\u015b\u0001\u0000\u0000\u0000\u000b\u015d\u0001\u0000\u0000\u0000\u000b"+ + "\u015f\u0001\u0000\u0000\u0000\u000b\u0161\u0001\u0000\u0000\u0000\u000b"+ + "\u0163\u0001\u0000\u0000\u0000\u000b\u0165\u0001\u0000\u0000\u0000\u000b"+ + "\u0167\u0001\u0000\u0000\u0000\u000b\u0169\u0001\u0000\u0000\u0000\u000b"+ + "\u016b\u0001\u0000\u0000\u0000\u000b\u016d\u0001\u0000\u0000\u0000\f\u016f"+ + "\u0001\u0000\u0000\u0000\f\u0171\u0001\u0000\u0000\u0000\f\u0173\u0001"+ + "\u0000\u0000\u0000\f\u0175\u0001\u0000\u0000\u0000\f\u0177\u0001\u0000"+ + "\u0000\u0000\f\u0179\u0001\u0000\u0000\u0000\f\u017b\u0001\u0000\u0000"+ + "\u0000\r\u017d\u0001\u0000\u0000\u0000\r\u017f\u0001\u0000\u0000\u0000"+ + "\r\u0181\u0001\u0000\u0000\u0000\r\u0183\u0001\u0000\u0000\u0000\r\u0185"+ + "\u0001\u0000\u0000\u0000\r\u0187\u0001\u0000\u0000\u0000\u000e\u0189\u0001"+ + "\u0000\u0000\u0000\u000e\u018b\u0001\u0000\u0000\u0000\u000e\u018d\u0001"+ + "\u0000\u0000\u0000\u000e\u018f\u0001\u0000\u0000\u0000\u000e\u0191\u0001"+ + "\u0000\u0000\u0000\u000e\u0193\u0001\u0000\u0000\u0000\u000e\u0195\u0001"+ + "\u0000\u0000\u0000\u000e\u0197\u0001\u0000\u0000\u0000\u000e\u0199\u0001"+ + "\u0000\u0000\u0000\u000f\u019b\u0001\u0000\u0000\u0000\u0011\u01a5\u0001"+ + "\u0000\u0000\u0000\u0013\u01ac\u0001\u0000\u0000\u0000\u0015\u01b5\u0001"+ + "\u0000\u0000\u0000\u0017\u01bc\u0001\u0000\u0000\u0000\u0019\u01c6\u0001"+ + "\u0000\u0000\u0000\u001b\u01cd\u0001\u0000\u0000\u0000\u001d\u01d4\u0001"+ + "\u0000\u0000\u0000\u001f\u01db\u0001\u0000\u0000\u0000!\u01e3\u0001\u0000"+ + "\u0000\u0000#\u01ef\u0001\u0000\u0000\u0000%\u01f8\u0001\u0000\u0000\u0000"+ + "\'\u01fe\u0001\u0000\u0000\u0000)\u0205\u0001\u0000\u0000\u0000+\u020c"+ + "\u0001\u0000\u0000\u0000-\u0214\u0001\u0000\u0000\u0000/\u021c\u0001\u0000"+ + "\u0000\u00001\u022b\u0001\u0000\u0000\u00003\u0235\u0001\u0000\u0000\u0000"+ + "5\u0241\u0001\u0000\u0000\u00007\u0247\u0001\u0000\u0000\u00009\u0258"+ + "\u0001\u0000\u0000\u0000;\u0268\u0001\u0000\u0000\u0000=\u026e\u0001\u0000"+ + "\u0000\u0000?\u0272\u0001\u0000\u0000\u0000A\u0274\u0001\u0000\u0000\u0000"+ + "C\u0276\u0001\u0000\u0000\u0000E\u0279\u0001\u0000\u0000\u0000G\u027b"+ + "\u0001\u0000\u0000\u0000I\u0284\u0001\u0000\u0000\u0000K\u0286\u0001\u0000"+ + "\u0000\u0000M\u028b\u0001\u0000\u0000\u0000O\u028d\u0001\u0000\u0000\u0000"+ + "Q\u0292\u0001\u0000\u0000\u0000S\u02b1\u0001\u0000\u0000\u0000U\u02b4"+ + "\u0001\u0000\u0000\u0000W\u02e2\u0001\u0000\u0000\u0000Y\u02e4\u0001\u0000"+ + "\u0000\u0000[\u02e7\u0001\u0000\u0000\u0000]\u02eb\u0001\u0000\u0000\u0000"+ + "_\u02ef\u0001\u0000\u0000\u0000a\u02f1\u0001\u0000\u0000\u0000c\u02f4"+ + "\u0001\u0000\u0000\u0000e\u02f6\u0001\u0000\u0000\u0000g\u02fb\u0001\u0000"+ + "\u0000\u0000i\u02fd\u0001\u0000\u0000\u0000k\u0303\u0001\u0000\u0000\u0000"+ + "m\u0309\u0001\u0000\u0000\u0000o\u030c\u0001\u0000\u0000\u0000q\u030f"+ + "\u0001\u0000\u0000\u0000s\u0314\u0001\u0000\u0000\u0000u\u0319\u0001\u0000"+ + "\u0000\u0000w\u031b\u0001\u0000\u0000\u0000y\u031f\u0001\u0000\u0000\u0000"+ + "{\u0324\u0001\u0000\u0000\u0000}\u032a\u0001\u0000\u0000\u0000\u007f\u032d"+ + "\u0001\u0000\u0000\u0000\u0081\u032f\u0001\u0000\u0000\u0000\u0083\u0335"+ + "\u0001\u0000\u0000\u0000\u0085\u0337\u0001\u0000\u0000\u0000\u0087\u033c"+ + "\u0001\u0000\u0000\u0000\u0089\u033f\u0001\u0000\u0000\u0000\u008b\u0342"+ + "\u0001\u0000\u0000\u0000\u008d\u0345\u0001\u0000\u0000\u0000\u008f\u0347"+ + "\u0001\u0000\u0000\u0000\u0091\u034a\u0001\u0000\u0000\u0000\u0093\u034c"+ + "\u0001\u0000\u0000\u0000\u0095\u034f\u0001\u0000\u0000\u0000\u0097\u0351"+ + "\u0001\u0000\u0000\u0000\u0099\u0353\u0001\u0000\u0000\u0000\u009b\u0355"+ + "\u0001\u0000\u0000\u0000\u009d\u0357\u0001\u0000\u0000\u0000\u009f\u0359"+ + "\u0001\u0000\u0000\u0000\u00a1\u035f\u0001\u0000\u0000\u0000\u00a3\u0375"+ + "\u0001\u0000\u0000\u0000\u00a5\u0377\u0001\u0000\u0000\u0000\u00a7\u037c"+ + "\u0001\u0000\u0000\u0000\u00a9\u0391\u0001\u0000\u0000\u0000\u00ab\u0393"+ + "\u0001\u0000\u0000\u0000\u00ad\u039b\u0001\u0000\u0000\u0000\u00af\u039d"+ + "\u0001\u0000\u0000\u0000\u00b1\u03a1\u0001\u0000\u0000\u0000\u00b3\u03a5"+ + "\u0001\u0000\u0000\u0000\u00b5\u03a9\u0001\u0000\u0000\u0000\u00b7\u03ae"+ + "\u0001\u0000\u0000\u0000\u00b9\u03b3\u0001\u0000\u0000\u0000\u00bb\u03b7"+ + "\u0001\u0000\u0000\u0000\u00bd\u03bb\u0001\u0000\u0000\u0000\u00bf\u03bf"+ + "\u0001\u0000\u0000\u0000\u00c1\u03c4\u0001\u0000\u0000\u0000\u00c3\u03c8"+ + "\u0001\u0000\u0000\u0000\u00c5\u03cc\u0001\u0000\u0000\u0000\u00c7\u03d0"+ + "\u0001\u0000\u0000\u0000\u00c9\u03d4\u0001\u0000\u0000\u0000\u00cb\u03d8"+ + "\u0001\u0000\u0000\u0000\u00cd\u03e4\u0001\u0000\u0000\u0000\u00cf\u03e7"+ + "\u0001\u0000\u0000\u0000\u00d1\u03eb\u0001\u0000\u0000\u0000\u00d3\u03ef"+ + "\u0001\u0000\u0000\u0000\u00d5\u03f3\u0001\u0000\u0000\u0000\u00d7\u03f7"+ + "\u0001\u0000\u0000\u0000\u00d9\u03fb\u0001\u0000\u0000\u0000\u00db\u03ff"+ + "\u0001\u0000\u0000\u0000\u00dd\u0404\u0001\u0000\u0000\u0000\u00df\u0408"+ + "\u0001\u0000\u0000\u0000\u00e1\u040c\u0001\u0000\u0000\u0000\u00e3\u0410"+ + "\u0001\u0000\u0000\u0000\u00e5\u0418\u0001\u0000\u0000\u0000\u00e7\u042d"+ + "\u0001\u0000\u0000\u0000\u00e9\u0431\u0001\u0000\u0000\u0000\u00eb\u0435"+ + "\u0001\u0000\u0000\u0000\u00ed\u0439\u0001\u0000\u0000\u0000\u00ef\u043d"+ + "\u0001\u0000\u0000\u0000\u00f1\u0441\u0001\u0000\u0000\u0000\u00f3\u0446"+ + "\u0001\u0000\u0000\u0000\u00f5\u044a\u0001\u0000\u0000\u0000\u00f7\u044e"+ + "\u0001\u0000\u0000\u0000\u00f9\u0452\u0001\u0000\u0000\u0000\u00fb\u0456"+ + "\u0001\u0000\u0000\u0000\u00fd\u045a\u0001\u0000\u0000\u0000\u00ff\u045d"+ + "\u0001\u0000\u0000\u0000\u0101\u0461\u0001\u0000\u0000\u0000\u0103\u0465"+ + "\u0001\u0000\u0000\u0000\u0105\u0469\u0001\u0000\u0000\u0000\u0107\u046d"+ + "\u0001\u0000\u0000\u0000\u0109\u0472\u0001\u0000\u0000\u0000\u010b\u0477"+ + "\u0001\u0000\u0000\u0000\u010d\u047c\u0001\u0000\u0000\u0000\u010f\u0483"+ + "\u0001\u0000\u0000\u0000\u0111\u048c\u0001\u0000\u0000\u0000\u0113\u0493"+ + "\u0001\u0000\u0000\u0000\u0115\u0497\u0001\u0000\u0000\u0000\u0117\u049b"+ + "\u0001\u0000\u0000\u0000\u0119\u049f\u0001\u0000\u0000\u0000\u011b\u04a3"+ + "\u0001\u0000\u0000\u0000\u011d\u04a9\u0001\u0000\u0000\u0000\u011f\u04ad"+ + "\u0001\u0000\u0000\u0000\u0121\u04b1\u0001\u0000\u0000\u0000\u0123\u04b5"+ + "\u0001\u0000\u0000\u0000\u0125\u04b9\u0001\u0000\u0000\u0000\u0127\u04bd"+ + "\u0001\u0000\u0000\u0000\u0129\u04c1\u0001\u0000\u0000\u0000\u012b\u04c5"+ + "\u0001\u0000\u0000\u0000\u012d\u04c9\u0001\u0000\u0000\u0000\u012f\u04cd"+ + "\u0001\u0000\u0000\u0000\u0131\u04d1\u0001\u0000\u0000\u0000\u0133\u04d5"+ + "\u0001\u0000\u0000\u0000\u0135\u04da\u0001\u0000\u0000\u0000\u0137\u04de"+ + "\u0001\u0000\u0000\u0000\u0139\u04e2\u0001\u0000\u0000\u0000\u013b\u04e6"+ + "\u0001\u0000\u0000\u0000\u013d\u04ea\u0001\u0000\u0000\u0000\u013f\u04ee"+ + "\u0001\u0000\u0000\u0000\u0141\u04f2\u0001\u0000\u0000\u0000\u0143\u04f6"+ + "\u0001\u0000\u0000\u0000\u0145\u04fa\u0001\u0000\u0000\u0000\u0147\u04ff"+ + "\u0001\u0000\u0000\u0000\u0149\u0504\u0001\u0000\u0000\u0000\u014b\u0508"+ + "\u0001\u0000\u0000\u0000\u014d\u050c\u0001\u0000\u0000\u0000\u014f\u0510"+ + "\u0001\u0000\u0000\u0000\u0151\u0515\u0001\u0000\u0000\u0000\u0153\u051c"+ + "\u0001\u0000\u0000\u0000\u0155\u0520\u0001\u0000\u0000\u0000\u0157\u0524"+ + "\u0001\u0000\u0000\u0000\u0159\u0528\u0001\u0000\u0000\u0000\u015b\u052c"+ + "\u0001\u0000\u0000\u0000\u015d\u0531\u0001\u0000\u0000\u0000\u015f\u0535"+ + "\u0001\u0000\u0000\u0000\u0161\u0539\u0001\u0000\u0000\u0000\u0163\u053d"+ + "\u0001\u0000\u0000\u0000\u0165\u0542\u0001\u0000\u0000\u0000\u0167\u0546"+ + "\u0001\u0000\u0000\u0000\u0169\u054a\u0001\u0000\u0000\u0000\u016b\u054e"+ + "\u0001\u0000\u0000\u0000\u016d\u0552\u0001\u0000\u0000\u0000\u016f\u0556"+ + "\u0001\u0000\u0000\u0000\u0171\u055c\u0001\u0000\u0000\u0000\u0173\u0560"+ + "\u0001\u0000\u0000\u0000\u0175\u0564\u0001\u0000\u0000\u0000\u0177\u0568"+ + "\u0001\u0000\u0000\u0000\u0179\u056c\u0001\u0000\u0000\u0000\u017b\u0570"+ + "\u0001\u0000\u0000\u0000\u017d\u0574\u0001\u0000\u0000\u0000\u017f\u0579"+ + "\u0001\u0000\u0000\u0000\u0181\u057f\u0001\u0000\u0000\u0000\u0183\u0585"+ + "\u0001\u0000\u0000\u0000\u0185\u0589\u0001\u0000\u0000\u0000\u0187\u058d"+ + "\u0001\u0000\u0000\u0000\u0189\u0591\u0001\u0000\u0000\u0000\u018b\u0597"+ + "\u0001\u0000\u0000\u0000\u018d\u059d\u0001\u0000\u0000\u0000\u018f\u05a1"+ + "\u0001\u0000\u0000\u0000\u0191\u05a5\u0001\u0000\u0000\u0000\u0193\u05a9"+ + "\u0001\u0000\u0000\u0000\u0195\u05af\u0001\u0000\u0000\u0000\u0197\u05b5"+ + "\u0001\u0000\u0000\u0000\u0199\u05bb\u0001\u0000\u0000\u0000\u019b\u019c"+ + "\u0007\u0000\u0000\u0000\u019c\u019d\u0007\u0001\u0000\u0000\u019d\u019e"+ + "\u0007\u0002\u0000\u0000\u019e\u019f\u0007\u0002\u0000\u0000\u019f\u01a0"+ + "\u0007\u0003\u0000\u0000\u01a0\u01a1\u0007\u0004\u0000\u0000\u01a1\u01a2"+ + "\u0007\u0005\u0000\u0000\u01a2\u01a3\u0001\u0000\u0000\u0000\u01a3\u01a4"+ + "\u0006\u0000\u0000\u0000\u01a4\u0010\u0001\u0000\u0000\u0000\u01a5\u01a6"+ + "\u0007\u0000\u0000\u0000\u01a6\u01a7\u0007\u0006\u0000\u0000\u01a7\u01a8"+ + "\u0007\u0007\u0000\u0000\u01a8\u01a9\u0007\b\u0000\u0000\u01a9\u01aa\u0001"+ + "\u0000\u0000\u0000\u01aa\u01ab\u0006\u0001\u0001\u0000\u01ab\u0012\u0001"+ + "\u0000\u0000\u0000\u01ac\u01ad\u0007\u0003\u0000\u0000\u01ad\u01ae\u0007"+ + "\t\u0000\u0000\u01ae\u01af\u0007\u0006\u0000\u0000\u01af\u01b0\u0007\u0001"+ + "\u0000\u0000\u01b0\u01b1\u0007\u0004\u0000\u0000\u01b1\u01b2\u0007\n\u0000"+ + "\u0000\u01b2\u01b3\u0001\u0000\u0000\u0000\u01b3\u01b4\u0006\u0002\u0002"+ + "\u0000\u01b4\u0014\u0001\u0000\u0000\u0000\u01b5\u01b6\u0007\u0003\u0000"+ + "\u0000\u01b6\u01b7\u0007\u000b\u0000\u0000\u01b7\u01b8\u0007\f\u0000\u0000"+ + "\u01b8\u01b9\u0007\r\u0000\u0000\u01b9\u01ba\u0001\u0000\u0000\u0000\u01ba"+ + "\u01bb\u0006\u0003\u0000\u0000\u01bb\u0016\u0001\u0000\u0000\u0000\u01bc"+ + "\u01bd\u0007\u0003\u0000\u0000\u01bd\u01be\u0007\u000e\u0000\u0000\u01be"+ + "\u01bf\u0007\b\u0000\u0000\u01bf\u01c0\u0007\r\u0000\u0000\u01c0\u01c1"+ + "\u0007\f\u0000\u0000\u01c1\u01c2\u0007\u0001\u0000\u0000\u01c2\u01c3\u0007"+ + "\t\u0000\u0000\u01c3\u01c4\u0001\u0000\u0000\u0000\u01c4\u01c5\u0006\u0004"+ + "\u0003\u0000\u01c5\u0018\u0001\u0000\u0000\u0000\u01c6\u01c7\u0007\u000f"+ + "\u0000\u0000\u01c7\u01c8\u0007\u0006\u0000\u0000\u01c8\u01c9\u0007\u0007"+ + "\u0000\u0000\u01c9\u01ca\u0007\u0010\u0000\u0000\u01ca\u01cb\u0001\u0000"+ + "\u0000\u0000\u01cb\u01cc\u0006\u0005\u0004\u0000\u01cc\u001a\u0001\u0000"+ + "\u0000\u0000\u01cd\u01ce\u0007\u0011\u0000\u0000\u01ce\u01cf\u0007\u0006"+ + "\u0000\u0000\u01cf\u01d0\u0007\u0007\u0000\u0000\u01d0\u01d1\u0007\u0012"+ + "\u0000\u0000\u01d1\u01d2\u0001\u0000\u0000\u0000\u01d2\u01d3\u0006\u0006"+ + "\u0000\u0000\u01d3\u001c\u0001\u0000\u0000\u0000\u01d4\u01d5\u0007\u0012"+ + "\u0000\u0000\u01d5\u01d6\u0007\u0003\u0000\u0000\u01d6\u01d7\u0007\u0003"+ + "\u0000\u0000\u01d7\u01d8\u0007\b\u0000\u0000\u01d8\u01d9\u0001\u0000\u0000"+ + "\u0000\u01d9\u01da\u0006\u0007\u0001\u0000\u01da\u001e\u0001\u0000\u0000"+ + "\u0000\u01db\u01dc\u0007\r\u0000\u0000\u01dc\u01dd\u0007\u0001\u0000\u0000"+ + "\u01dd\u01de\u0007\u0010\u0000\u0000\u01de\u01df\u0007\u0001\u0000\u0000"+ + "\u01df\u01e0\u0007\u0005\u0000\u0000\u01e0\u01e1\u0001\u0000\u0000\u0000"+ + "\u01e1\u01e2\u0006\b\u0000\u0000\u01e2 \u0001\u0000\u0000\u0000\u01e3"+ + "\u01e4\u0007\u0010\u0000\u0000\u01e4\u01e5\u0007\u000b\u0000\u0000\u01e5"+ + "\u01e6\u0005_\u0000\u0000\u01e6\u01e7\u0007\u0003\u0000\u0000\u01e7\u01e8"+ + "\u0007\u000e\u0000\u0000\u01e8\u01e9\u0007\b\u0000\u0000\u01e9\u01ea\u0007"+ + "\f\u0000\u0000\u01ea\u01eb\u0007\t\u0000\u0000\u01eb\u01ec\u0007\u0000"+ + "\u0000\u0000\u01ec\u01ed\u0001\u0000\u0000\u0000\u01ed\u01ee\u0006\t\u0005"+ + "\u0000\u01ee\"\u0001\u0000\u0000\u0000\u01ef\u01f0\u0007\u0006\u0000\u0000"+ + "\u01f0\u01f1\u0007\u0003\u0000\u0000\u01f1\u01f2\u0007\t\u0000\u0000\u01f2"+ + "\u01f3\u0007\f\u0000\u0000\u01f3\u01f4\u0007\u0010\u0000\u0000\u01f4\u01f5"+ + "\u0007\u0003\u0000\u0000\u01f5\u01f6\u0001\u0000\u0000\u0000\u01f6\u01f7"+ + "\u0006\n\u0006\u0000\u01f7$\u0001\u0000\u0000\u0000\u01f8\u01f9\u0007"+ + "\u0006\u0000\u0000\u01f9\u01fa\u0007\u0007\u0000\u0000\u01fa\u01fb\u0007"+ + "\u0013\u0000\u0000\u01fb\u01fc\u0001\u0000\u0000\u0000\u01fc\u01fd\u0006"+ + "\u000b\u0000\u0000\u01fd&\u0001\u0000\u0000\u0000\u01fe\u01ff\u0007\u0002"+ + "\u0000\u0000\u01ff\u0200\u0007\n\u0000\u0000\u0200\u0201\u0007\u0007\u0000"+ + "\u0000\u0201\u0202\u0007\u0013\u0000\u0000\u0202\u0203\u0001\u0000\u0000"+ + "\u0000\u0203\u0204\u0006\f\u0007\u0000\u0204(\u0001\u0000\u0000\u0000"+ + "\u0205\u0206\u0007\u0002\u0000\u0000\u0206\u0207\u0007\u0007\u0000\u0000"+ + "\u0207\u0208\u0007\u0006\u0000\u0000\u0208\u0209\u0007\u0005\u0000\u0000"+ + "\u0209\u020a\u0001\u0000\u0000\u0000\u020a\u020b\u0006\r\u0000\u0000\u020b"+ + "*\u0001\u0000\u0000\u0000\u020c\u020d\u0007\u0002\u0000\u0000\u020d\u020e"+ + "\u0007\u0005\u0000\u0000\u020e\u020f\u0007\f\u0000\u0000\u020f\u0210\u0007"+ + "\u0005\u0000\u0000\u0210\u0211\u0007\u0002\u0000\u0000\u0211\u0212\u0001"+ + "\u0000\u0000\u0000\u0212\u0213\u0006\u000e\u0000\u0000\u0213,\u0001\u0000"+ + "\u0000\u0000\u0214\u0215\u0007\u0013\u0000\u0000\u0215\u0216\u0007\n\u0000"+ + "\u0000\u0216\u0217\u0007\u0003\u0000\u0000\u0217\u0218\u0007\u0006\u0000"+ + "\u0000\u0218\u0219\u0007\u0003\u0000\u0000\u0219\u021a\u0001\u0000\u0000"+ + "\u0000\u021a\u021b\u0006\u000f\u0000\u0000\u021b.\u0001\u0000\u0000\u0000"+ + "\u021c\u021d\u0004\u0010\u0000\u0000\u021d\u021e\u0007\u0001\u0000\u0000"+ + "\u021e\u021f\u0007\t\u0000\u0000\u021f\u0220\u0007\r\u0000\u0000\u0220"+ + "\u0221\u0007\u0001\u0000\u0000\u0221\u0222\u0007\t\u0000\u0000\u0222\u0223"+ + "\u0007\u0003\u0000\u0000\u0223\u0224\u0007\u0002\u0000\u0000\u0224\u0225"+ + "\u0007\u0005\u0000\u0000\u0225\u0226\u0007\f\u0000\u0000\u0226\u0227\u0007"+ + "\u0005\u0000\u0000\u0227\u0228\u0007\u0002\u0000\u0000\u0228\u0229\u0001"+ + "\u0000\u0000\u0000\u0229\u022a\u0006\u0010\u0000\u0000\u022a0\u0001\u0000"+ + "\u0000\u0000\u022b\u022c\u0004\u0011\u0001\u0000\u022c\u022d\u0007\r\u0000"+ + "\u0000\u022d\u022e\u0007\u0007\u0000\u0000\u022e\u022f\u0007\u0007\u0000"+ + "\u0000\u022f\u0230\u0007\u0012\u0000\u0000\u0230\u0231\u0007\u0014\u0000"+ + "\u0000\u0231\u0232\u0007\b\u0000\u0000\u0232\u0233\u0001\u0000\u0000\u0000"+ + "\u0233\u0234\u0006\u0011\b\u0000\u02342\u0001\u0000\u0000\u0000\u0235"+ + "\u0236\u0004\u0012\u0002\u0000\u0236\u0237\u0007\u0010\u0000\u0000\u0237"+ + "\u0238\u0007\u0003\u0000\u0000\u0238\u0239\u0007\u0005\u0000\u0000\u0239"+ + "\u023a\u0007\u0006\u0000\u0000\u023a\u023b\u0007\u0001\u0000\u0000\u023b"+ + "\u023c\u0007\u0004\u0000\u0000\u023c\u023d\u0007\u0002\u0000\u0000\u023d"+ + "\u023e\u0001\u0000\u0000\u0000\u023e\u023f\u0006\u0012\t\u0000\u023f4"+ + "\u0001\u0000\u0000\u0000\u0240\u0242\b\u0015\u0000\u0000\u0241\u0240\u0001"+ + "\u0000\u0000\u0000\u0242\u0243\u0001\u0000\u0000\u0000\u0243\u0241\u0001"+ + "\u0000\u0000\u0000\u0243\u0244\u0001\u0000\u0000\u0000\u0244\u0245\u0001"+ + "\u0000\u0000\u0000\u0245\u0246\u0006\u0013\u0000\u0000\u02466\u0001\u0000"+ + "\u0000\u0000\u0247\u0248\u0005/\u0000\u0000\u0248\u0249\u0005/\u0000\u0000"+ + "\u0249\u024d\u0001\u0000\u0000\u0000\u024a\u024c\b\u0016\u0000\u0000\u024b"+ + "\u024a\u0001\u0000\u0000\u0000\u024c\u024f\u0001\u0000\u0000\u0000\u024d"+ + "\u024b\u0001\u0000\u0000\u0000\u024d\u024e\u0001\u0000\u0000\u0000\u024e"+ + "\u0251\u0001\u0000\u0000\u0000\u024f\u024d\u0001\u0000\u0000\u0000\u0250"+ + "\u0252\u0005\r\u0000\u0000\u0251\u0250\u0001\u0000\u0000\u0000\u0251\u0252"+ + "\u0001\u0000\u0000\u0000\u0252\u0254\u0001\u0000\u0000\u0000\u0253\u0255"+ + "\u0005\n\u0000\u0000\u0254\u0253\u0001\u0000\u0000\u0000\u0254\u0255\u0001"+ + "\u0000\u0000\u0000\u0255\u0256\u0001\u0000\u0000\u0000\u0256\u0257\u0006"+ + "\u0014\n\u0000\u02578\u0001\u0000\u0000\u0000\u0258\u0259\u0005/\u0000"+ + "\u0000\u0259\u025a\u0005*\u0000\u0000\u025a\u025f\u0001\u0000\u0000\u0000"+ + "\u025b\u025e\u00039\u0015\u0000\u025c\u025e\t\u0000\u0000\u0000\u025d"+ + "\u025b\u0001\u0000\u0000\u0000\u025d\u025c\u0001\u0000\u0000\u0000\u025e"+ + "\u0261\u0001\u0000\u0000\u0000\u025f\u0260\u0001\u0000\u0000\u0000\u025f"+ + "\u025d\u0001\u0000\u0000\u0000\u0260\u0262\u0001\u0000\u0000\u0000\u0261"+ + "\u025f\u0001\u0000\u0000\u0000\u0262\u0263\u0005*\u0000\u0000\u0263\u0264"+ + "\u0005/\u0000\u0000\u0264\u0265\u0001\u0000\u0000\u0000\u0265\u0266\u0006"+ + "\u0015\n\u0000\u0266:\u0001\u0000\u0000\u0000\u0267\u0269\u0007\u0017"+ + "\u0000\u0000\u0268\u0267\u0001\u0000\u0000\u0000\u0269\u026a\u0001\u0000"+ + "\u0000\u0000\u026a\u0268\u0001\u0000\u0000\u0000\u026a\u026b\u0001\u0000"+ + "\u0000\u0000\u026b\u026c\u0001\u0000\u0000\u0000\u026c\u026d\u0006\u0016"+ + "\n\u0000\u026d<\u0001\u0000\u0000\u0000\u026e\u026f\u0005|\u0000\u0000"+ + "\u026f\u0270\u0001\u0000\u0000\u0000\u0270\u0271\u0006\u0017\u000b\u0000"+ + "\u0271>\u0001\u0000\u0000\u0000\u0272\u0273\u0007\u0018\u0000\u0000\u0273"+ + "@\u0001\u0000\u0000\u0000\u0274\u0275\u0007\u0019\u0000\u0000\u0275B\u0001"+ + "\u0000\u0000\u0000\u0276\u0277\u0005\\\u0000\u0000\u0277\u0278\u0007\u001a"+ + "\u0000\u0000\u0278D\u0001\u0000\u0000\u0000\u0279\u027a\b\u001b\u0000"+ + "\u0000\u027aF\u0001\u0000\u0000\u0000\u027b\u027d\u0007\u0003\u0000\u0000"+ + "\u027c\u027e\u0007\u001c\u0000\u0000\u027d\u027c\u0001\u0000\u0000\u0000"+ + "\u027d\u027e\u0001\u0000\u0000\u0000\u027e\u0280\u0001\u0000\u0000\u0000"+ + "\u027f\u0281\u0003?\u0018\u0000\u0280\u027f\u0001\u0000\u0000\u0000\u0281"+ + "\u0282\u0001\u0000\u0000\u0000\u0282\u0280\u0001\u0000\u0000\u0000\u0282"+ + "\u0283\u0001\u0000\u0000\u0000\u0283H\u0001\u0000\u0000\u0000\u0284\u0285"+ + "\u0005@\u0000\u0000\u0285J\u0001\u0000\u0000\u0000\u0286\u0287\u0005`"+ + "\u0000\u0000\u0287L\u0001\u0000\u0000\u0000\u0288\u028c\b\u001d\u0000"+ + "\u0000\u0289\u028a\u0005`\u0000\u0000\u028a\u028c\u0005`\u0000\u0000\u028b"+ + "\u0288\u0001\u0000\u0000\u0000\u028b\u0289\u0001\u0000\u0000\u0000\u028c"+ + "N\u0001\u0000\u0000\u0000\u028d\u028e\u0005_\u0000\u0000\u028eP\u0001"+ + "\u0000\u0000\u0000\u028f\u0293\u0003A\u0019\u0000\u0290\u0293\u0003?\u0018"+ + "\u0000\u0291\u0293\u0003O \u0000\u0292\u028f\u0001\u0000\u0000\u0000\u0292"+ + "\u0290\u0001\u0000\u0000\u0000\u0292\u0291\u0001\u0000\u0000\u0000\u0293"+ + "R\u0001\u0000\u0000\u0000\u0294\u0299\u0005\"\u0000\u0000\u0295\u0298"+ + "\u0003C\u001a\u0000\u0296\u0298\u0003E\u001b\u0000\u0297\u0295\u0001\u0000"+ + "\u0000\u0000\u0297\u0296\u0001\u0000\u0000\u0000\u0298\u029b\u0001\u0000"+ + "\u0000\u0000\u0299\u0297\u0001\u0000\u0000\u0000\u0299\u029a\u0001\u0000"+ + "\u0000\u0000\u029a\u029c\u0001\u0000\u0000\u0000\u029b\u0299\u0001\u0000"+ + "\u0000\u0000\u029c\u02b2\u0005\"\u0000\u0000\u029d\u029e\u0005\"\u0000"+ + "\u0000\u029e\u029f\u0005\"\u0000\u0000\u029f\u02a0\u0005\"\u0000\u0000"+ + "\u02a0\u02a4\u0001\u0000\u0000\u0000\u02a1\u02a3\b\u0016\u0000\u0000\u02a2"+ + "\u02a1\u0001\u0000\u0000\u0000\u02a3\u02a6\u0001\u0000\u0000\u0000\u02a4"+ + "\u02a5\u0001\u0000\u0000\u0000\u02a4\u02a2\u0001\u0000\u0000\u0000\u02a5"+ + "\u02a7\u0001\u0000\u0000\u0000\u02a6\u02a4\u0001\u0000\u0000\u0000\u02a7"+ + "\u02a8\u0005\"\u0000\u0000\u02a8\u02a9\u0005\"\u0000\u0000\u02a9\u02aa"+ + "\u0005\"\u0000\u0000\u02aa\u02ac\u0001\u0000\u0000\u0000\u02ab\u02ad\u0005"+ + "\"\u0000\u0000\u02ac\u02ab\u0001\u0000\u0000\u0000\u02ac\u02ad\u0001\u0000"+ + "\u0000\u0000\u02ad\u02af\u0001\u0000\u0000\u0000\u02ae\u02b0\u0005\"\u0000"+ + "\u0000\u02af\u02ae\u0001\u0000\u0000\u0000\u02af\u02b0\u0001\u0000\u0000"+ + "\u0000\u02b0\u02b2\u0001\u0000\u0000\u0000\u02b1\u0294\u0001\u0000\u0000"+ + "\u0000\u02b1\u029d\u0001\u0000\u0000\u0000\u02b2T\u0001\u0000\u0000\u0000"+ + "\u02b3\u02b5\u0003?\u0018\u0000\u02b4\u02b3\u0001\u0000\u0000\u0000\u02b5"+ + "\u02b6\u0001\u0000\u0000\u0000\u02b6\u02b4\u0001\u0000\u0000\u0000\u02b6"+ + "\u02b7\u0001\u0000\u0000\u0000\u02b7V\u0001\u0000\u0000\u0000\u02b8\u02ba"+ + "\u0003?\u0018\u0000\u02b9\u02b8\u0001\u0000\u0000\u0000\u02ba\u02bb\u0001"+ + "\u0000\u0000\u0000\u02bb\u02b9\u0001\u0000\u0000\u0000\u02bb\u02bc\u0001"+ + "\u0000\u0000\u0000\u02bc\u02bd\u0001\u0000\u0000\u0000\u02bd\u02c1\u0003"+ + "g,\u0000\u02be\u02c0\u0003?\u0018\u0000\u02bf\u02be\u0001\u0000\u0000"+ + "\u0000\u02c0\u02c3\u0001\u0000\u0000\u0000\u02c1\u02bf\u0001\u0000\u0000"+ + "\u0000\u02c1\u02c2\u0001\u0000\u0000\u0000\u02c2\u02e3\u0001\u0000\u0000"+ + "\u0000\u02c3\u02c1\u0001\u0000\u0000\u0000\u02c4\u02c6\u0003g,\u0000\u02c5"+ + "\u02c7\u0003?\u0018\u0000\u02c6\u02c5\u0001\u0000\u0000\u0000\u02c7\u02c8"+ + "\u0001\u0000\u0000\u0000\u02c8\u02c6\u0001\u0000\u0000\u0000\u02c8\u02c9"+ + "\u0001\u0000\u0000\u0000\u02c9\u02e3\u0001\u0000\u0000\u0000\u02ca\u02cc"+ + "\u0003?\u0018\u0000\u02cb\u02ca\u0001\u0000\u0000\u0000\u02cc\u02cd\u0001"+ + "\u0000\u0000\u0000\u02cd\u02cb\u0001\u0000\u0000\u0000\u02cd\u02ce\u0001"+ + "\u0000\u0000\u0000\u02ce\u02d6\u0001\u0000\u0000\u0000\u02cf\u02d3\u0003"+ + "g,\u0000\u02d0\u02d2\u0003?\u0018\u0000\u02d1\u02d0\u0001\u0000\u0000"+ + "\u0000\u02d2\u02d5\u0001\u0000\u0000\u0000\u02d3\u02d1\u0001\u0000\u0000"+ + "\u0000\u02d3\u02d4\u0001\u0000\u0000\u0000\u02d4\u02d7\u0001\u0000\u0000"+ + "\u0000\u02d5\u02d3\u0001\u0000\u0000\u0000\u02d6\u02cf\u0001\u0000\u0000"+ + "\u0000\u02d6\u02d7\u0001\u0000\u0000\u0000\u02d7\u02d8\u0001\u0000\u0000"+ + "\u0000\u02d8\u02d9\u0003G\u001c\u0000\u02d9\u02e3\u0001\u0000\u0000\u0000"+ + "\u02da\u02dc\u0003g,\u0000\u02db\u02dd\u0003?\u0018\u0000\u02dc\u02db"+ + "\u0001\u0000\u0000\u0000\u02dd\u02de\u0001\u0000\u0000\u0000\u02de\u02dc"+ + "\u0001\u0000\u0000\u0000\u02de\u02df\u0001\u0000\u0000\u0000\u02df\u02e0"+ + "\u0001\u0000\u0000\u0000\u02e0\u02e1\u0003G\u001c\u0000\u02e1\u02e3\u0001"+ + "\u0000\u0000\u0000\u02e2\u02b9\u0001\u0000\u0000\u0000\u02e2\u02c4\u0001"+ + "\u0000\u0000\u0000\u02e2\u02cb\u0001\u0000\u0000\u0000\u02e2\u02da\u0001"+ + "\u0000\u0000\u0000\u02e3X\u0001\u0000\u0000\u0000\u02e4\u02e5\u0007\u001e"+ + "\u0000\u0000\u02e5\u02e6\u0007\u001f\u0000\u0000\u02e6Z\u0001\u0000\u0000"+ + "\u0000\u02e7\u02e8\u0007\f\u0000\u0000\u02e8\u02e9\u0007\t\u0000\u0000"+ + "\u02e9\u02ea\u0007\u0000\u0000\u0000\u02ea\\\u0001\u0000\u0000\u0000\u02eb"+ + "\u02ec\u0007\f\u0000\u0000\u02ec\u02ed\u0007\u0002\u0000\u0000\u02ed\u02ee"+ + "\u0007\u0004\u0000\u0000\u02ee^\u0001\u0000\u0000\u0000\u02ef\u02f0\u0005"+ + "=\u0000\u0000\u02f0`\u0001\u0000\u0000\u0000\u02f1\u02f2\u0005:\u0000"+ + "\u0000\u02f2\u02f3\u0005:\u0000\u0000\u02f3b\u0001\u0000\u0000\u0000\u02f4"+ + "\u02f5\u0005,\u0000\u0000\u02f5d\u0001\u0000\u0000\u0000\u02f6\u02f7\u0007"+ + "\u0000\u0000\u0000\u02f7\u02f8\u0007\u0003\u0000\u0000\u02f8\u02f9\u0007"+ + "\u0002\u0000\u0000\u02f9\u02fa\u0007\u0004\u0000\u0000\u02faf\u0001\u0000"+ + "\u0000\u0000\u02fb\u02fc\u0005.\u0000\u0000\u02fch\u0001\u0000\u0000\u0000"+ + "\u02fd\u02fe\u0007\u000f\u0000\u0000\u02fe\u02ff\u0007\f\u0000\u0000\u02ff"+ + "\u0300\u0007\r\u0000\u0000\u0300\u0301\u0007\u0002\u0000\u0000\u0301\u0302"+ + "\u0007\u0003\u0000\u0000\u0302j\u0001\u0000\u0000\u0000\u0303\u0304\u0007"+ + "\u000f\u0000\u0000\u0304\u0305\u0007\u0001\u0000\u0000\u0305\u0306\u0007"+ + "\u0006\u0000\u0000\u0306\u0307\u0007\u0002\u0000\u0000\u0307\u0308\u0007"+ + "\u0005\u0000\u0000\u0308l\u0001\u0000\u0000\u0000\u0309\u030a\u0007\u0001"+ + "\u0000\u0000\u030a\u030b\u0007\t\u0000\u0000\u030bn\u0001\u0000\u0000"+ + "\u0000\u030c\u030d\u0007\u0001\u0000\u0000\u030d\u030e\u0007\u0002\u0000"+ + "\u0000\u030ep\u0001\u0000\u0000\u0000\u030f\u0310\u0007\r\u0000\u0000"+ + "\u0310\u0311\u0007\f\u0000\u0000\u0311\u0312\u0007\u0002\u0000\u0000\u0312"+ + "\u0313\u0007\u0005\u0000\u0000\u0313r\u0001\u0000\u0000\u0000\u0314\u0315"+ + "\u0007\r\u0000\u0000\u0315\u0316\u0007\u0001\u0000\u0000\u0316\u0317\u0007"+ + "\u0012\u0000\u0000\u0317\u0318\u0007\u0003\u0000\u0000\u0318t\u0001\u0000"+ + "\u0000\u0000\u0319\u031a\u0005(\u0000\u0000\u031av\u0001\u0000\u0000\u0000"+ + "\u031b\u031c\u0007\t\u0000\u0000\u031c\u031d\u0007\u0007\u0000\u0000\u031d"+ + "\u031e\u0007\u0005\u0000\u0000\u031ex\u0001\u0000\u0000\u0000\u031f\u0320"+ + "\u0007\t\u0000\u0000\u0320\u0321\u0007\u0014\u0000\u0000\u0321\u0322\u0007"+ + "\r\u0000\u0000\u0322\u0323\u0007\r\u0000\u0000\u0323z\u0001\u0000\u0000"+ + "\u0000\u0324\u0325\u0007\t\u0000\u0000\u0325\u0326\u0007\u0014\u0000\u0000"+ + "\u0326\u0327\u0007\r\u0000\u0000\u0327\u0328\u0007\r\u0000\u0000\u0328"+ + "\u0329\u0007\u0002\u0000\u0000\u0329|\u0001\u0000\u0000\u0000\u032a\u032b"+ + "\u0007\u0007\u0000\u0000\u032b\u032c\u0007\u0006\u0000\u0000\u032c~\u0001"+ + "\u0000\u0000\u0000\u032d\u032e\u0005?\u0000\u0000\u032e\u0080\u0001\u0000"+ + "\u0000\u0000\u032f\u0330\u0007\u0006\u0000\u0000\u0330\u0331\u0007\r\u0000"+ + "\u0000\u0331\u0332\u0007\u0001\u0000\u0000\u0332\u0333\u0007\u0012\u0000"+ + "\u0000\u0333\u0334\u0007\u0003\u0000\u0000\u0334\u0082\u0001\u0000\u0000"+ + "\u0000\u0335\u0336\u0005)\u0000\u0000\u0336\u0084\u0001\u0000\u0000\u0000"+ + "\u0337\u0338\u0007\u0005\u0000\u0000\u0338\u0339\u0007\u0006\u0000\u0000"+ + "\u0339\u033a\u0007\u0014\u0000\u0000\u033a\u033b\u0007\u0003\u0000\u0000"+ + "\u033b\u0086\u0001\u0000\u0000\u0000\u033c\u033d\u0005=\u0000\u0000\u033d"+ + "\u033e\u0005=\u0000\u0000\u033e\u0088\u0001\u0000\u0000\u0000\u033f\u0340"+ + "\u0005=\u0000\u0000\u0340\u0341\u0005~\u0000\u0000\u0341\u008a\u0001\u0000"+ + "\u0000\u0000\u0342\u0343\u0005!\u0000\u0000\u0343\u0344\u0005=\u0000\u0000"+ + "\u0344\u008c\u0001\u0000\u0000\u0000\u0345\u0346\u0005<\u0000\u0000\u0346"+ + "\u008e\u0001\u0000\u0000\u0000\u0347\u0348\u0005<\u0000\u0000\u0348\u0349"+ + "\u0005=\u0000\u0000\u0349\u0090\u0001\u0000\u0000\u0000\u034a\u034b\u0005"+ + ">\u0000\u0000\u034b\u0092\u0001\u0000\u0000\u0000\u034c\u034d\u0005>\u0000"+ + "\u0000\u034d\u034e\u0005=\u0000\u0000\u034e\u0094\u0001\u0000\u0000\u0000"+ + "\u034f\u0350\u0005+\u0000\u0000\u0350\u0096\u0001\u0000\u0000\u0000\u0351"+ + "\u0352\u0005-\u0000\u0000\u0352\u0098\u0001\u0000\u0000\u0000\u0353\u0354"+ + "\u0005*\u0000\u0000\u0354\u009a\u0001\u0000\u0000\u0000\u0355\u0356\u0005"+ + "/\u0000\u0000\u0356\u009c\u0001\u0000\u0000\u0000\u0357\u0358\u0005%\u0000"+ + "\u0000\u0358\u009e\u0001\u0000\u0000\u0000\u0359\u035a\u0007\u0010\u0000"+ + "\u0000\u035a\u035b\u0007\f\u0000\u0000\u035b\u035c\u0007\u0005\u0000\u0000"+ + "\u035c\u035d\u0007\u0004\u0000\u0000\u035d\u035e\u0007\n\u0000\u0000\u035e"+ + "\u00a0\u0001\u0000\u0000\u0000\u035f\u0360\u0004I\u0003\u0000\u0360\u0361"+ + "\u0003-\u000f\u0000\u0361\u0362\u0001\u0000\u0000\u0000\u0362\u0363\u0006"+ + "I\f\u0000\u0363\u00a2\u0001\u0000\u0000\u0000\u0364\u0367\u0003\u007f"+ + "8\u0000\u0365\u0368\u0003A\u0019\u0000\u0366\u0368\u0003O \u0000\u0367"+ + "\u0365\u0001\u0000\u0000\u0000\u0367\u0366\u0001\u0000\u0000\u0000\u0368"+ + "\u036c\u0001\u0000\u0000\u0000\u0369\u036b\u0003Q!\u0000\u036a\u0369\u0001"+ + "\u0000\u0000\u0000\u036b\u036e\u0001\u0000\u0000\u0000\u036c\u036a\u0001"+ + "\u0000\u0000\u0000\u036c\u036d\u0001\u0000\u0000\u0000\u036d\u0376\u0001"+ + "\u0000\u0000\u0000\u036e\u036c\u0001\u0000\u0000\u0000\u036f\u0371\u0003"+ + "\u007f8\u0000\u0370\u0372\u0003?\u0018\u0000\u0371\u0370\u0001\u0000\u0000"+ + "\u0000\u0372\u0373\u0001\u0000\u0000\u0000\u0373\u0371\u0001\u0000\u0000"+ + "\u0000\u0373\u0374\u0001\u0000\u0000\u0000\u0374\u0376\u0001\u0000\u0000"+ + "\u0000\u0375\u0364\u0001\u0000\u0000\u0000\u0375\u036f\u0001\u0000\u0000"+ + "\u0000\u0376\u00a4\u0001\u0000\u0000\u0000\u0377\u0378\u0005[\u0000\u0000"+ + "\u0378\u0379\u0001\u0000\u0000\u0000\u0379\u037a\u0006K\u0000\u0000\u037a"+ + "\u037b\u0006K\u0000\u0000\u037b\u00a6\u0001\u0000\u0000\u0000\u037c\u037d"+ + "\u0005]\u0000\u0000\u037d\u037e\u0001\u0000\u0000\u0000\u037e\u037f\u0006"+ + "L\u000b\u0000\u037f\u0380\u0006L\u000b\u0000\u0380\u00a8\u0001\u0000\u0000"+ + "\u0000\u0381\u0385\u0003A\u0019\u0000\u0382\u0384\u0003Q!\u0000\u0383"+ + "\u0382\u0001\u0000\u0000\u0000\u0384\u0387\u0001\u0000\u0000\u0000\u0385"+ + "\u0383\u0001\u0000\u0000\u0000\u0385\u0386\u0001\u0000\u0000\u0000\u0386"+ + "\u0392\u0001\u0000\u0000\u0000\u0387\u0385\u0001\u0000\u0000\u0000\u0388"+ + "\u038b\u0003O \u0000\u0389\u038b\u0003I\u001d\u0000\u038a\u0388\u0001"+ + "\u0000\u0000\u0000\u038a\u0389\u0001\u0000\u0000\u0000\u038b\u038d\u0001"+ + "\u0000\u0000\u0000\u038c\u038e\u0003Q!\u0000\u038d\u038c\u0001\u0000\u0000"+ + "\u0000\u038e\u038f\u0001\u0000\u0000\u0000\u038f\u038d\u0001\u0000\u0000"+ + "\u0000\u038f\u0390\u0001\u0000\u0000\u0000\u0390\u0392\u0001\u0000\u0000"+ + "\u0000\u0391\u0381\u0001\u0000\u0000\u0000\u0391\u038a\u0001\u0000\u0000"+ + "\u0000\u0392\u00aa\u0001\u0000\u0000\u0000\u0393\u0395\u0003K\u001e\u0000"+ + "\u0394\u0396\u0003M\u001f\u0000\u0395\u0394\u0001\u0000\u0000\u0000\u0396"+ + "\u0397\u0001\u0000\u0000\u0000\u0397\u0395\u0001\u0000\u0000\u0000\u0397"+ + "\u0398\u0001\u0000\u0000\u0000\u0398\u0399\u0001\u0000\u0000\u0000\u0399"+ + "\u039a\u0003K\u001e\u0000\u039a\u00ac\u0001\u0000\u0000\u0000\u039b\u039c"+ + "\u0003\u00abN\u0000\u039c\u00ae\u0001\u0000\u0000\u0000\u039d\u039e\u0003"+ + "7\u0014\u0000\u039e\u039f\u0001\u0000\u0000\u0000\u039f\u03a0\u0006P\n"+ + "\u0000\u03a0\u00b0\u0001\u0000\u0000\u0000\u03a1\u03a2\u00039\u0015\u0000"+ + "\u03a2\u03a3\u0001\u0000\u0000\u0000\u03a3\u03a4\u0006Q\n\u0000\u03a4"+ + "\u00b2\u0001\u0000\u0000\u0000\u03a5\u03a6\u0003;\u0016\u0000\u03a6\u03a7"+ + "\u0001\u0000\u0000\u0000\u03a7\u03a8\u0006R\n\u0000\u03a8\u00b4\u0001"+ + "\u0000\u0000\u0000\u03a9\u03aa\u0003\u00a5K\u0000\u03aa\u03ab\u0001\u0000"+ + "\u0000\u0000\u03ab\u03ac\u0006S\r\u0000\u03ac\u03ad\u0006S\u000e\u0000"+ + "\u03ad\u00b6\u0001\u0000\u0000\u0000\u03ae\u03af\u0003=\u0017\u0000\u03af"+ + "\u03b0\u0001\u0000\u0000\u0000\u03b0\u03b1\u0006T\u000f\u0000\u03b1\u03b2"+ + "\u0006T\u000b\u0000\u03b2\u00b8\u0001\u0000\u0000\u0000\u03b3\u03b4\u0003"+ + ";\u0016\u0000\u03b4\u03b5\u0001\u0000\u0000\u0000\u03b5\u03b6\u0006U\n"+ + "\u0000\u03b6\u00ba\u0001\u0000\u0000\u0000\u03b7\u03b8\u00037\u0014\u0000"+ + "\u03b8\u03b9\u0001\u0000\u0000\u0000\u03b9\u03ba\u0006V\n\u0000\u03ba"+ + "\u00bc\u0001\u0000\u0000\u0000\u03bb\u03bc\u00039\u0015\u0000\u03bc\u03bd"+ + "\u0001\u0000\u0000\u0000\u03bd\u03be\u0006W\n\u0000\u03be\u00be\u0001"+ + "\u0000\u0000\u0000\u03bf\u03c0\u0003=\u0017\u0000\u03c0\u03c1\u0001\u0000"+ + "\u0000\u0000\u03c1\u03c2\u0006X\u000f\u0000\u03c2\u03c3\u0006X\u000b\u0000"+ + "\u03c3\u00c0\u0001\u0000\u0000\u0000\u03c4\u03c5\u0003\u00a5K\u0000\u03c5"+ + "\u03c6\u0001\u0000\u0000\u0000\u03c6\u03c7\u0006Y\r\u0000\u03c7\u00c2"+ + "\u0001\u0000\u0000\u0000\u03c8\u03c9\u0003\u00a7L\u0000\u03c9\u03ca\u0001"+ + "\u0000\u0000\u0000\u03ca\u03cb\u0006Z\u0010\u0000\u03cb\u00c4\u0001\u0000"+ + "\u0000\u0000\u03cc\u03cd\u0003\u0151\u00a1\u0000\u03cd\u03ce\u0001\u0000"+ + "\u0000\u0000\u03ce\u03cf\u0006[\u0011\u0000\u03cf\u00c6\u0001\u0000\u0000"+ + "\u0000\u03d0\u03d1\u0003c*\u0000\u03d1\u03d2\u0001\u0000\u0000\u0000\u03d2"+ + "\u03d3\u0006\\\u0012\u0000\u03d3\u00c8\u0001\u0000\u0000\u0000\u03d4\u03d5"+ + "\u0003_(\u0000\u03d5\u03d6\u0001\u0000\u0000\u0000\u03d6\u03d7\u0006]"+ + "\u0013\u0000\u03d7\u00ca\u0001\u0000\u0000\u0000\u03d8\u03d9\u0007\u0010"+ + "\u0000\u0000\u03d9\u03da\u0007\u0003\u0000\u0000\u03da\u03db\u0007\u0005"+ + "\u0000\u0000\u03db\u03dc\u0007\f\u0000\u0000\u03dc\u03dd\u0007\u0000\u0000"+ + "\u0000\u03dd\u03de\u0007\f\u0000\u0000\u03de\u03df\u0007\u0005\u0000\u0000"+ + "\u03df\u03e0\u0007\f\u0000\u0000\u03e0\u00cc\u0001\u0000\u0000\u0000\u03e1"+ + "\u03e5\b \u0000\u0000\u03e2\u03e3\u0005/\u0000\u0000\u03e3\u03e5\b!\u0000"+ + "\u0000\u03e4\u03e1\u0001\u0000\u0000\u0000\u03e4\u03e2\u0001\u0000\u0000"+ + "\u0000\u03e5\u00ce\u0001\u0000\u0000\u0000\u03e6\u03e8\u0003\u00cd_\u0000"+ + "\u03e7\u03e6\u0001\u0000\u0000\u0000\u03e8\u03e9\u0001\u0000\u0000\u0000"+ + "\u03e9\u03e7\u0001\u0000\u0000\u0000\u03e9\u03ea\u0001\u0000\u0000\u0000"+ + "\u03ea\u00d0\u0001\u0000\u0000\u0000\u03eb\u03ec\u0003\u00cf`\u0000\u03ec"+ + "\u03ed\u0001\u0000\u0000\u0000\u03ed\u03ee\u0006a\u0014\u0000\u03ee\u00d2"+ + "\u0001\u0000\u0000\u0000\u03ef\u03f0\u0003S\"\u0000\u03f0\u03f1\u0001"+ + "\u0000\u0000\u0000\u03f1\u03f2\u0006b\u0015\u0000\u03f2\u00d4\u0001\u0000"+ + "\u0000\u0000\u03f3\u03f4\u00037\u0014\u0000\u03f4\u03f5\u0001\u0000\u0000"+ + "\u0000\u03f5\u03f6\u0006c\n\u0000\u03f6\u00d6\u0001\u0000\u0000\u0000"+ + "\u03f7\u03f8\u00039\u0015\u0000\u03f8\u03f9\u0001\u0000\u0000\u0000\u03f9"+ + "\u03fa\u0006d\n\u0000\u03fa\u00d8\u0001\u0000\u0000\u0000\u03fb\u03fc"+ + "\u0003;\u0016\u0000\u03fc\u03fd\u0001\u0000\u0000\u0000\u03fd\u03fe\u0006"+ + "e\n\u0000\u03fe\u00da\u0001\u0000\u0000\u0000\u03ff\u0400\u0003=\u0017"+ + "\u0000\u0400\u0401\u0001\u0000\u0000\u0000\u0401\u0402\u0006f\u000f\u0000"+ + "\u0402\u0403\u0006f\u000b\u0000\u0403\u00dc\u0001\u0000\u0000\u0000\u0404"+ + "\u0405\u0003g,\u0000\u0405\u0406\u0001\u0000\u0000\u0000\u0406\u0407\u0006"+ + "g\u0016\u0000\u0407\u00de\u0001\u0000\u0000\u0000\u0408\u0409\u0003c*"+ + "\u0000\u0409\u040a\u0001\u0000\u0000\u0000\u040a\u040b\u0006h\u0012\u0000"+ + "\u040b\u00e0\u0001\u0000\u0000\u0000\u040c\u040d\u0003\u007f8\u0000\u040d"+ + "\u040e\u0001\u0000\u0000\u0000\u040e\u040f\u0006i\u0017\u0000\u040f\u00e2"+ + "\u0001\u0000\u0000\u0000\u0410\u0411\u0003\u00a3J\u0000\u0411\u0412\u0001"+ + "\u0000\u0000\u0000\u0412\u0413\u0006j\u0018\u0000\u0413\u00e4\u0001\u0000"+ + "\u0000\u0000\u0414\u0419\u0003A\u0019\u0000\u0415\u0419\u0003?\u0018\u0000"+ + "\u0416\u0419\u0003O \u0000\u0417\u0419\u0003\u0099E\u0000\u0418\u0414"+ + "\u0001\u0000\u0000\u0000\u0418\u0415\u0001\u0000\u0000\u0000\u0418\u0416"+ + "\u0001\u0000\u0000\u0000\u0418\u0417\u0001\u0000\u0000\u0000\u0419\u00e6"+ + "\u0001\u0000\u0000\u0000\u041a\u041d\u0003A\u0019\u0000\u041b\u041d\u0003"+ + "\u0099E\u0000\u041c\u041a\u0001\u0000\u0000\u0000\u041c\u041b\u0001\u0000"+ + "\u0000\u0000\u041d\u0421\u0001\u0000\u0000\u0000\u041e\u0420\u0003\u00e5"+ + "k\u0000\u041f\u041e\u0001\u0000\u0000\u0000\u0420\u0423\u0001\u0000\u0000"+ + "\u0000\u0421\u041f\u0001\u0000\u0000\u0000\u0421\u0422\u0001\u0000\u0000"+ + "\u0000\u0422\u042e\u0001\u0000\u0000\u0000\u0423\u0421\u0001\u0000\u0000"+ + "\u0000\u0424\u0427\u0003O \u0000\u0425\u0427\u0003I\u001d\u0000\u0426"+ + "\u0424\u0001\u0000\u0000\u0000\u0426\u0425\u0001\u0000\u0000\u0000\u0427"+ + "\u0429\u0001\u0000\u0000\u0000\u0428\u042a\u0003\u00e5k\u0000\u0429\u0428"+ + "\u0001\u0000\u0000\u0000\u042a\u042b\u0001\u0000\u0000\u0000\u042b\u0429"+ + "\u0001\u0000\u0000\u0000\u042b\u042c\u0001\u0000\u0000\u0000\u042c\u042e"+ + "\u0001\u0000\u0000\u0000\u042d\u041c\u0001\u0000\u0000\u0000\u042d\u0426"+ + "\u0001\u0000\u0000\u0000\u042e\u00e8\u0001\u0000\u0000\u0000\u042f\u0432"+ + "\u0003\u00e7l\u0000\u0430\u0432\u0003\u00abN\u0000\u0431\u042f\u0001\u0000"+ + "\u0000\u0000\u0431\u0430\u0001\u0000\u0000\u0000\u0432\u0433\u0001\u0000"+ + "\u0000\u0000\u0433\u0431\u0001\u0000\u0000\u0000\u0433\u0434\u0001\u0000"+ + "\u0000\u0000\u0434\u00ea\u0001\u0000\u0000\u0000\u0435\u0436\u00037\u0014"+ + "\u0000\u0436\u0437\u0001\u0000\u0000\u0000\u0437\u0438\u0006n\n\u0000"+ + "\u0438\u00ec\u0001\u0000\u0000\u0000\u0439\u043a\u00039\u0015\u0000\u043a"+ + "\u043b\u0001\u0000\u0000\u0000\u043b\u043c\u0006o\n\u0000\u043c\u00ee"+ + "\u0001\u0000\u0000\u0000\u043d\u043e\u0003;\u0016\u0000\u043e\u043f\u0001"+ + "\u0000\u0000\u0000\u043f\u0440\u0006p\n\u0000\u0440\u00f0\u0001\u0000"+ + "\u0000\u0000\u0441\u0442\u0003=\u0017\u0000\u0442\u0443\u0001\u0000\u0000"+ + "\u0000\u0443\u0444\u0006q\u000f\u0000\u0444\u0445\u0006q\u000b\u0000\u0445"+ + "\u00f2\u0001\u0000\u0000\u0000\u0446\u0447\u0003_(\u0000\u0447\u0448\u0001"+ + "\u0000\u0000\u0000\u0448\u0449\u0006r\u0013\u0000\u0449\u00f4\u0001\u0000"+ + "\u0000\u0000\u044a\u044b\u0003c*\u0000\u044b\u044c\u0001\u0000\u0000\u0000"+ + "\u044c\u044d\u0006s\u0012\u0000\u044d\u00f6\u0001\u0000\u0000\u0000\u044e"+ + "\u044f\u0003g,\u0000\u044f\u0450\u0001\u0000\u0000\u0000\u0450\u0451\u0006"+ + "t\u0016\u0000\u0451\u00f8\u0001\u0000\u0000\u0000\u0452\u0453\u0003\u007f"+ + "8\u0000\u0453\u0454\u0001\u0000\u0000\u0000\u0454\u0455\u0006u\u0017\u0000"+ + "\u0455\u00fa\u0001\u0000\u0000\u0000\u0456\u0457\u0003\u00a3J\u0000\u0457"+ + "\u0458\u0001\u0000\u0000\u0000\u0458\u0459\u0006v\u0018\u0000\u0459\u00fc"+ + "\u0001\u0000\u0000\u0000\u045a\u045b\u0007\f\u0000\u0000\u045b\u045c\u0007"+ + "\u0002\u0000\u0000\u045c\u00fe\u0001\u0000\u0000\u0000\u045d\u045e\u0003"+ + "\u00e9m\u0000\u045e\u045f\u0001\u0000\u0000\u0000\u045f\u0460\u0006x\u0019"+ + "\u0000\u0460\u0100\u0001\u0000\u0000\u0000\u0461\u0462\u00037\u0014\u0000"+ + "\u0462\u0463\u0001\u0000\u0000\u0000\u0463\u0464\u0006y\n\u0000\u0464"+ + "\u0102\u0001\u0000\u0000\u0000\u0465\u0466\u00039\u0015\u0000\u0466\u0467"+ + "\u0001\u0000\u0000\u0000\u0467\u0468\u0006z\n\u0000\u0468\u0104\u0001"+ + "\u0000\u0000\u0000\u0469\u046a\u0003;\u0016\u0000\u046a\u046b\u0001\u0000"+ + "\u0000\u0000\u046b\u046c\u0006{\n\u0000\u046c\u0106\u0001\u0000\u0000"+ + "\u0000\u046d\u046e\u0003=\u0017\u0000\u046e\u046f\u0001\u0000\u0000\u0000"+ + "\u046f\u0470\u0006|\u000f\u0000\u0470\u0471\u0006|\u000b\u0000\u0471\u0108"+ + "\u0001\u0000\u0000\u0000\u0472\u0473\u0003\u00a5K\u0000\u0473\u0474\u0001"+ + "\u0000\u0000\u0000\u0474\u0475\u0006}\r\u0000\u0475\u0476\u0006}\u001a"+ + "\u0000\u0476\u010a\u0001\u0000\u0000\u0000\u0477\u0478\u0007\u0007\u0000"+ + "\u0000\u0478\u0479\u0007\t\u0000\u0000\u0479\u047a\u0001\u0000\u0000\u0000"+ + "\u047a\u047b\u0006~\u001b\u0000\u047b\u010c\u0001\u0000\u0000\u0000\u047c"+ + "\u047d\u0007\u0013\u0000\u0000\u047d\u047e\u0007\u0001\u0000\u0000\u047e"+ + "\u047f\u0007\u0005\u0000\u0000\u047f\u0480\u0007\n\u0000\u0000\u0480\u0481"+ + "\u0001\u0000\u0000\u0000\u0481\u0482\u0006\u007f\u001b\u0000\u0482\u010e"+ + "\u0001\u0000\u0000\u0000\u0483\u0484\b\"\u0000\u0000\u0484\u0110\u0001"+ + "\u0000\u0000\u0000\u0485\u0487\u0003\u010f\u0080\u0000\u0486\u0485\u0001"+ + "\u0000\u0000\u0000\u0487\u0488\u0001\u0000\u0000\u0000\u0488\u0486\u0001"+ + "\u0000\u0000\u0000\u0488\u0489\u0001\u0000\u0000\u0000\u0489\u048a\u0001"+ + "\u0000\u0000\u0000\u048a\u048b\u0003\u0151\u00a1\u0000\u048b\u048d\u0001"+ + "\u0000\u0000\u0000\u048c\u0486\u0001\u0000\u0000\u0000\u048c\u048d\u0001"+ + "\u0000\u0000\u0000\u048d\u048f\u0001\u0000\u0000\u0000\u048e\u0490\u0003"+ + "\u010f\u0080\u0000\u048f\u048e\u0001\u0000\u0000\u0000\u0490\u0491\u0001"+ + "\u0000\u0000\u0000\u0491\u048f\u0001\u0000\u0000\u0000\u0491\u0492\u0001"+ + "\u0000\u0000\u0000\u0492\u0112\u0001\u0000\u0000\u0000\u0493\u0494\u0003"+ + "\u0111\u0081\u0000\u0494\u0495\u0001\u0000\u0000\u0000\u0495\u0496\u0006"+ + "\u0082\u001c\u0000\u0496\u0114\u0001\u0000\u0000\u0000\u0497\u0498\u0003"+ + "7\u0014\u0000\u0498\u0499\u0001\u0000\u0000\u0000\u0499\u049a\u0006\u0083"+ + "\n\u0000\u049a\u0116\u0001\u0000\u0000\u0000\u049b\u049c\u00039\u0015"+ + "\u0000\u049c\u049d\u0001\u0000\u0000\u0000\u049d\u049e\u0006\u0084\n\u0000"+ + "\u049e\u0118\u0001\u0000\u0000\u0000\u049f\u04a0\u0003;\u0016\u0000\u04a0"+ + "\u04a1\u0001\u0000\u0000\u0000\u04a1\u04a2\u0006\u0085\n\u0000\u04a2\u011a"+ + "\u0001\u0000\u0000\u0000\u04a3\u04a4\u0003=\u0017\u0000\u04a4\u04a5\u0001"+ + "\u0000\u0000\u0000\u04a5\u04a6\u0006\u0086\u000f\u0000\u04a6\u04a7\u0006"+ + "\u0086\u000b\u0000\u04a7\u04a8\u0006\u0086\u000b\u0000\u04a8\u011c\u0001"+ + "\u0000\u0000\u0000\u04a9\u04aa\u0003_(\u0000\u04aa\u04ab\u0001\u0000\u0000"+ + "\u0000\u04ab\u04ac\u0006\u0087\u0013\u0000\u04ac\u011e\u0001\u0000\u0000"+ + "\u0000\u04ad\u04ae\u0003c*\u0000\u04ae\u04af\u0001\u0000\u0000\u0000\u04af"+ + "\u04b0\u0006\u0088\u0012\u0000\u04b0\u0120\u0001\u0000\u0000\u0000\u04b1"+ + "\u04b2\u0003g,\u0000\u04b2\u04b3\u0001\u0000\u0000\u0000\u04b3\u04b4\u0006"+ + "\u0089\u0016\u0000\u04b4\u0122\u0001\u0000\u0000\u0000\u04b5\u04b6\u0003"+ + "\u010d\u007f\u0000\u04b6\u04b7\u0001\u0000\u0000\u0000\u04b7\u04b8\u0006"+ + "\u008a\u001d\u0000\u04b8\u0124\u0001\u0000\u0000\u0000\u04b9\u04ba\u0003"+ + "\u00e9m\u0000\u04ba\u04bb\u0001\u0000\u0000\u0000\u04bb\u04bc\u0006\u008b"+ + "\u0019\u0000\u04bc\u0126\u0001\u0000\u0000\u0000\u04bd\u04be\u0003\u00ad"+ + "O\u0000\u04be\u04bf\u0001\u0000\u0000\u0000\u04bf\u04c0\u0006\u008c\u001e"+ + "\u0000\u04c0\u0128\u0001\u0000\u0000\u0000\u04c1\u04c2\u0003\u007f8\u0000"+ + "\u04c2\u04c3\u0001\u0000\u0000\u0000\u04c3\u04c4\u0006\u008d\u0017\u0000"+ + "\u04c4\u012a\u0001\u0000\u0000\u0000\u04c5\u04c6\u0003\u00a3J\u0000\u04c6"+ + "\u04c7\u0001\u0000\u0000\u0000\u04c7\u04c8\u0006\u008e\u0018\u0000\u04c8"+ + "\u012c\u0001\u0000\u0000\u0000\u04c9\u04ca\u00037\u0014\u0000\u04ca\u04cb"+ + "\u0001\u0000\u0000\u0000\u04cb\u04cc\u0006\u008f\n\u0000\u04cc\u012e\u0001"+ + "\u0000\u0000\u0000\u04cd\u04ce\u00039\u0015\u0000\u04ce\u04cf\u0001\u0000"+ + "\u0000\u0000\u04cf\u04d0\u0006\u0090\n\u0000\u04d0\u0130\u0001\u0000\u0000"+ + "\u0000\u04d1\u04d2\u0003;\u0016\u0000\u04d2\u04d3\u0001\u0000\u0000\u0000"+ + "\u04d3\u04d4\u0006\u0091\n\u0000\u04d4\u0132\u0001\u0000\u0000\u0000\u04d5"+ + "\u04d6\u0003=\u0017\u0000\u04d6\u04d7\u0001\u0000\u0000\u0000\u04d7\u04d8"+ + "\u0006\u0092\u000f\u0000\u04d8\u04d9\u0006\u0092\u000b\u0000\u04d9\u0134"+ + "\u0001\u0000\u0000\u0000\u04da\u04db\u0003g,\u0000\u04db\u04dc\u0001\u0000"+ + "\u0000\u0000\u04dc\u04dd\u0006\u0093\u0016\u0000\u04dd\u0136\u0001\u0000"+ + "\u0000\u0000\u04de\u04df\u0003\u007f8\u0000\u04df\u04e0\u0001\u0000\u0000"+ + "\u0000\u04e0\u04e1\u0006\u0094\u0017\u0000\u04e1\u0138\u0001\u0000\u0000"+ + "\u0000\u04e2\u04e3\u0003\u00a3J\u0000\u04e3\u04e4\u0001\u0000\u0000\u0000"+ + "\u04e4\u04e5\u0006\u0095\u0018\u0000\u04e5\u013a\u0001\u0000\u0000\u0000"+ + "\u04e6\u04e7\u0003\u00adO\u0000\u04e7\u04e8\u0001\u0000\u0000\u0000\u04e8"+ + "\u04e9\u0006\u0096\u001e\u0000\u04e9\u013c\u0001\u0000\u0000\u0000\u04ea"+ + "\u04eb\u0003\u00a9M\u0000\u04eb\u04ec\u0001\u0000\u0000\u0000\u04ec\u04ed"+ + "\u0006\u0097\u001f\u0000\u04ed\u013e\u0001\u0000\u0000\u0000\u04ee\u04ef"+ + "\u00037\u0014\u0000\u04ef\u04f0\u0001\u0000\u0000\u0000\u04f0\u04f1\u0006"+ + "\u0098\n\u0000\u04f1\u0140\u0001\u0000\u0000\u0000\u04f2\u04f3\u00039"+ + "\u0015\u0000\u04f3\u04f4\u0001\u0000\u0000\u0000\u04f4\u04f5\u0006\u0099"+ + "\n\u0000\u04f5\u0142\u0001\u0000\u0000\u0000\u04f6\u04f7\u0003;\u0016"+ + "\u0000\u04f7\u04f8\u0001\u0000\u0000\u0000\u04f8\u04f9\u0006\u009a\n\u0000"+ + "\u04f9\u0144\u0001\u0000\u0000\u0000\u04fa\u04fb\u0003=\u0017\u0000\u04fb"+ + "\u04fc\u0001\u0000\u0000\u0000\u04fc\u04fd\u0006\u009b\u000f\u0000\u04fd"+ + "\u04fe\u0006\u009b\u000b\u0000\u04fe\u0146\u0001\u0000\u0000\u0000\u04ff"+ + "\u0500\u0007\u0001\u0000\u0000\u0500\u0501\u0007\t\u0000\u0000\u0501\u0502"+ + "\u0007\u000f\u0000\u0000\u0502\u0503\u0007\u0007\u0000\u0000\u0503\u0148"+ + "\u0001\u0000\u0000\u0000\u0504\u0505\u00037\u0014\u0000\u0505\u0506\u0001"+ + "\u0000\u0000\u0000\u0506\u0507\u0006\u009d\n\u0000\u0507\u014a\u0001\u0000"+ + "\u0000\u0000\u0508\u0509\u00039\u0015\u0000\u0509\u050a\u0001\u0000\u0000"+ + "\u0000\u050a\u050b\u0006\u009e\n\u0000\u050b\u014c\u0001\u0000\u0000\u0000"+ + "\u050c\u050d\u0003;\u0016\u0000\u050d\u050e\u0001\u0000\u0000\u0000\u050e"+ + "\u050f\u0006\u009f\n\u0000\u050f\u014e\u0001\u0000\u0000\u0000\u0510\u0511"+ + "\u0003\u00a7L\u0000\u0511\u0512\u0001\u0000\u0000\u0000\u0512\u0513\u0006"+ + "\u00a0\u0010\u0000\u0513\u0514\u0006\u00a0\u000b\u0000\u0514\u0150\u0001"+ + "\u0000\u0000\u0000\u0515\u0516\u0005:\u0000\u0000\u0516\u0152\u0001\u0000"+ + "\u0000\u0000\u0517\u051d\u0003I\u001d\u0000\u0518\u051d\u0003?\u0018\u0000"+ + "\u0519\u051d\u0003g,\u0000\u051a\u051d\u0003A\u0019\u0000\u051b\u051d"+ + "\u0003O \u0000\u051c\u0517\u0001\u0000\u0000\u0000\u051c\u0518\u0001\u0000"+ + "\u0000\u0000\u051c\u0519\u0001\u0000\u0000\u0000\u051c\u051a\u0001\u0000"+ + "\u0000\u0000\u051c\u051b\u0001\u0000\u0000\u0000\u051d\u051e\u0001\u0000"+ + "\u0000\u0000\u051e\u051c\u0001\u0000\u0000\u0000\u051e\u051f\u0001\u0000"+ + "\u0000\u0000\u051f\u0154\u0001\u0000\u0000\u0000\u0520\u0521\u00037\u0014"+ + "\u0000\u0521\u0522\u0001\u0000\u0000\u0000\u0522\u0523\u0006\u00a3\n\u0000"+ + "\u0523\u0156\u0001\u0000\u0000\u0000\u0524\u0525\u00039\u0015\u0000\u0525"+ + "\u0526\u0001\u0000\u0000\u0000\u0526\u0527\u0006\u00a4\n\u0000\u0527\u0158"+ + "\u0001\u0000\u0000\u0000\u0528\u0529\u0003;\u0016\u0000\u0529\u052a\u0001"+ + "\u0000\u0000\u0000\u052a\u052b\u0006\u00a5\n\u0000\u052b\u015a\u0001\u0000"+ + "\u0000\u0000\u052c\u052d\u0003=\u0017\u0000\u052d\u052e\u0001\u0000\u0000"+ + "\u0000\u052e\u052f\u0006\u00a6\u000f\u0000\u052f\u0530\u0006\u00a6\u000b"+ + "\u0000\u0530\u015c\u0001\u0000\u0000\u0000\u0531\u0532\u0003\u0151\u00a1"+ + "\u0000\u0532\u0533\u0001\u0000\u0000\u0000\u0533\u0534\u0006\u00a7\u0011"+ + "\u0000\u0534\u015e\u0001\u0000\u0000\u0000\u0535\u0536\u0003c*\u0000\u0536"+ + "\u0537\u0001\u0000\u0000\u0000\u0537\u0538\u0006\u00a8\u0012\u0000\u0538"+ + "\u0160\u0001\u0000\u0000\u0000\u0539\u053a\u0003g,\u0000\u053a\u053b\u0001"+ + "\u0000\u0000\u0000\u053b\u053c\u0006\u00a9\u0016\u0000\u053c\u0162\u0001"+ + "\u0000\u0000\u0000\u053d\u053e\u0003\u010b~\u0000\u053e\u053f\u0001\u0000"+ + "\u0000\u0000\u053f\u0540\u0006\u00aa \u0000\u0540\u0541\u0006\u00aa!\u0000"+ + "\u0541\u0164\u0001\u0000\u0000\u0000\u0542\u0543\u0003\u00cf`\u0000\u0543"+ + "\u0544\u0001\u0000\u0000\u0000\u0544\u0545\u0006\u00ab\u0014\u0000\u0545"+ + "\u0166\u0001\u0000\u0000\u0000\u0546\u0547\u0003S\"\u0000\u0547\u0548"+ + "\u0001\u0000\u0000\u0000\u0548\u0549\u0006\u00ac\u0015\u0000\u0549\u0168"+ + "\u0001\u0000\u0000\u0000\u054a\u054b\u00037\u0014\u0000\u054b\u054c\u0001"+ + "\u0000\u0000\u0000\u054c\u054d\u0006\u00ad\n\u0000\u054d\u016a\u0001\u0000"+ + "\u0000\u0000\u054e\u054f\u00039\u0015\u0000\u054f\u0550\u0001\u0000\u0000"+ + "\u0000\u0550\u0551\u0006\u00ae\n\u0000\u0551\u016c\u0001\u0000\u0000\u0000"+ + "\u0552\u0553\u0003;\u0016\u0000\u0553\u0554\u0001\u0000\u0000\u0000\u0554"+ + "\u0555\u0006\u00af\n\u0000\u0555\u016e\u0001\u0000\u0000\u0000\u0556\u0557"+ + "\u0003=\u0017\u0000\u0557\u0558\u0001\u0000\u0000\u0000\u0558\u0559\u0006"+ + "\u00b0\u000f\u0000\u0559\u055a\u0006\u00b0\u000b\u0000\u055a\u055b\u0006"+ + "\u00b0\u000b\u0000\u055b\u0170\u0001\u0000\u0000\u0000\u055c\u055d\u0003"+ + "c*\u0000\u055d\u055e\u0001\u0000\u0000\u0000\u055e\u055f\u0006\u00b1\u0012"+ + "\u0000\u055f\u0172\u0001\u0000\u0000\u0000\u0560\u0561\u0003g,\u0000\u0561"+ + "\u0562\u0001\u0000\u0000\u0000\u0562\u0563\u0006\u00b2\u0016\u0000\u0563"+ + "\u0174\u0001\u0000\u0000\u0000\u0564\u0565\u0003\u00e9m\u0000\u0565\u0566"+ + "\u0001\u0000\u0000\u0000\u0566\u0567\u0006\u00b3\u0019\u0000\u0567\u0176"+ + "\u0001\u0000\u0000\u0000\u0568\u0569\u00037\u0014\u0000\u0569\u056a\u0001"+ + "\u0000\u0000\u0000\u056a\u056b\u0006\u00b4\n\u0000\u056b\u0178\u0001\u0000"+ + "\u0000\u0000\u056c\u056d\u00039\u0015\u0000\u056d\u056e\u0001\u0000\u0000"+ + "\u0000\u056e\u056f\u0006\u00b5\n\u0000\u056f\u017a\u0001\u0000\u0000\u0000"+ + "\u0570\u0571\u0003;\u0016\u0000\u0571\u0572\u0001\u0000\u0000\u0000\u0572"+ + "\u0573\u0006\u00b6\n\u0000\u0573\u017c\u0001\u0000\u0000\u0000\u0574\u0575"+ + "\u0003=\u0017\u0000\u0575\u0576\u0001\u0000\u0000\u0000\u0576\u0577\u0006"+ + "\u00b7\u000f\u0000\u0577\u0578\u0006\u00b7\u000b\u0000\u0578\u017e\u0001"+ + "\u0000\u0000\u0000\u0579\u057a\u0003\u00cf`\u0000\u057a\u057b\u0001\u0000"+ + "\u0000\u0000\u057b\u057c\u0006\u00b8\u0014\u0000\u057c\u057d\u0006\u00b8"+ + "\u000b\u0000\u057d\u057e\u0006\u00b8\"\u0000\u057e\u0180\u0001\u0000\u0000"+ + "\u0000\u057f\u0580\u0003S\"\u0000\u0580\u0581\u0001\u0000\u0000\u0000"+ + "\u0581\u0582\u0006\u00b9\u0015\u0000\u0582\u0583\u0006\u00b9\u000b\u0000"+ + "\u0583\u0584\u0006\u00b9\"\u0000\u0584\u0182\u0001\u0000\u0000\u0000\u0585"+ + "\u0586\u00037\u0014\u0000\u0586\u0587\u0001\u0000\u0000\u0000\u0587\u0588"+ + "\u0006\u00ba\n\u0000\u0588\u0184\u0001\u0000\u0000\u0000\u0589\u058a\u0003"+ + "9\u0015\u0000\u058a\u058b\u0001\u0000\u0000\u0000\u058b\u058c\u0006\u00bb"+ + "\n\u0000\u058c\u0186\u0001\u0000\u0000\u0000\u058d\u058e\u0003;\u0016"+ + "\u0000\u058e\u058f\u0001\u0000\u0000\u0000\u058f\u0590\u0006\u00bc\n\u0000"+ + "\u0590\u0188\u0001\u0000\u0000\u0000\u0591\u0592\u0003\u0151\u00a1\u0000"+ + "\u0592\u0593\u0001\u0000\u0000\u0000\u0593\u0594\u0006\u00bd\u0011\u0000"+ + "\u0594\u0595\u0006\u00bd\u000b\u0000\u0595\u0596\u0006\u00bd\t\u0000\u0596"+ + "\u018a\u0001\u0000\u0000\u0000\u0597\u0598\u0003c*\u0000\u0598\u0599\u0001"+ + "\u0000\u0000\u0000\u0599\u059a\u0006\u00be\u0012\u0000\u059a\u059b\u0006"+ + "\u00be\u000b\u0000\u059b\u059c\u0006\u00be\t\u0000\u059c\u018c\u0001\u0000"+ + "\u0000\u0000\u059d\u059e\u00037\u0014\u0000\u059e\u059f\u0001\u0000\u0000"+ + "\u0000\u059f\u05a0\u0006\u00bf\n\u0000\u05a0\u018e\u0001\u0000\u0000\u0000"+ + "\u05a1\u05a2\u00039\u0015\u0000\u05a2\u05a3\u0001\u0000\u0000\u0000\u05a3"+ + "\u05a4\u0006\u00c0\n\u0000\u05a4\u0190\u0001\u0000\u0000\u0000\u05a5\u05a6"+ + "\u0003;\u0016\u0000\u05a6\u05a7\u0001\u0000\u0000\u0000\u05a7\u05a8\u0006"+ + "\u00c1\n\u0000\u05a8\u0192\u0001\u0000\u0000\u0000\u05a9\u05aa\u0003\u00ad"+ + "O\u0000\u05aa\u05ab\u0001\u0000\u0000\u0000\u05ab\u05ac\u0006\u00c2\u000b"+ + "\u0000\u05ac\u05ad\u0006\u00c2\u0000\u0000\u05ad\u05ae\u0006\u00c2\u001e"+ + "\u0000\u05ae\u0194\u0001\u0000\u0000\u0000\u05af\u05b0\u0003\u00a9M\u0000"+ + "\u05b0\u05b1\u0001\u0000\u0000\u0000\u05b1\u05b2\u0006\u00c3\u000b\u0000"+ + "\u05b2\u05b3\u0006\u00c3\u0000\u0000\u05b3\u05b4\u0006\u00c3\u001f\u0000"+ + "\u05b4\u0196\u0001\u0000\u0000\u0000\u05b5\u05b6\u0003Y%\u0000\u05b6\u05b7"+ + "\u0001\u0000\u0000\u0000\u05b7\u05b8\u0006\u00c4\u000b\u0000\u05b8\u05b9"+ + "\u0006\u00c4\u0000\u0000\u05b9\u05ba\u0006\u00c4#\u0000\u05ba\u0198\u0001"+ + "\u0000\u0000\u0000\u05bb\u05bc\u0003=\u0017\u0000\u05bc\u05bd\u0001\u0000"+ + "\u0000\u0000\u05bd\u05be\u0006\u00c5\u000f\u0000\u05be\u05bf\u0006\u00c5"+ + "\u000b\u0000\u05bf\u019a\u0001\u0000\u0000\u0000A\u0000\u0001\u0002\u0003"+ + "\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e\u0243\u024d\u0251\u0254"+ + "\u025d\u025f\u026a\u027d\u0282\u028b\u0292\u0297\u0299\u02a4\u02ac\u02af"+ + "\u02b1\u02b6\u02bb\u02c1\u02c8\u02cd\u02d3\u02d6\u02de\u02e2\u0367\u036c"+ + "\u0373\u0375\u0385\u038a\u038f\u0391\u0397\u03e4\u03e9\u0418\u041c\u0421"+ + "\u0426\u042b\u042d\u0431\u0433\u0488\u048c\u0491\u051c\u051e$\u0005\u0001"+ + "\u0000\u0005\u0004\u0000\u0005\u0006\u0000\u0005\u0002\u0000\u0005\u0003"+ + "\u0000\u0005\b\u0000\u0005\u0005\u0000\u0005\t\u0000\u0005\u000b\u0000"+ + "\u0005\r\u0000\u0000\u0001\u0000\u0004\u0000\u0000\u0007\u0010\u0000\u0007"+ + "A\u0000\u0005\u0000\u0000\u0007\u0018\u0000\u0007B\u0000\u0007h\u0000"+ + "\u0007!\u0000\u0007\u001f\u0000\u0007L\u0000\u0007\u0019\u0000\u0007#"+ + "\u0000\u0007/\u0000\u0007@\u0000\u0007P\u0000\u0005\n\u0000\u0005\u0007"+ + "\u0000\u0007Z\u0000\u0007Y\u0000\u0007D\u0000\u0007C\u0000\u0007X\u0000"+ + "\u0005\f\u0000\u0005\u000e\u0000\u0007\u001c\u0000"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp index 0db5c82878fcf..e718d402982ed 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp @@ -272,6 +272,8 @@ deprecated_metadata metricsCommand evalCommand statsCommand +aggFields +aggField qualifiedName qualifiedNamePattern qualifiedNamePatterns @@ -308,4 +310,4 @@ inlinestatsCommand atn: -[4, 1, 120, 586, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 130, 8, 1, 10, 1, 12, 1, 133, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 141, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 159, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 171, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 178, 8, 5, 10, 5, 12, 5, 181, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 188, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 194, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 202, 8, 5, 10, 5, 12, 5, 205, 9, 5, 1, 6, 1, 6, 3, 6, 209, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 216, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 221, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 232, 8, 8, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 238, 8, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 5, 9, 246, 8, 9, 10, 9, 12, 9, 249, 9, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 3, 10, 259, 8, 10, 1, 10, 1, 10, 1, 10, 5, 10, 264, 8, 10, 10, 10, 12, 10, 267, 9, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 5, 11, 275, 8, 11, 10, 11, 12, 11, 278, 9, 11, 3, 11, 280, 8, 11, 1, 11, 1, 11, 1, 12, 1, 12, 3, 12, 286, 8, 12, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 5, 15, 296, 8, 15, 10, 15, 12, 15, 299, 9, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 3, 16, 306, 8, 16, 1, 17, 1, 17, 1, 17, 1, 17, 5, 17, 312, 8, 17, 10, 17, 12, 17, 315, 9, 17, 1, 17, 3, 17, 318, 8, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 3, 18, 325, 8, 18, 1, 19, 1, 19, 1, 20, 1, 20, 1, 21, 1, 21, 3, 21, 333, 8, 21, 1, 22, 1, 22, 1, 22, 1, 22, 5, 22, 339, 8, 22, 10, 22, 12, 22, 342, 9, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 5, 24, 352, 8, 24, 10, 24, 12, 24, 355, 9, 24, 1, 24, 3, 24, 358, 8, 24, 1, 24, 1, 24, 3, 24, 362, 8, 24, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 3, 26, 369, 8, 26, 1, 26, 1, 26, 3, 26, 373, 8, 26, 1, 27, 1, 27, 1, 27, 5, 27, 378, 8, 27, 10, 27, 12, 27, 381, 9, 27, 1, 28, 1, 28, 1, 28, 5, 28, 386, 8, 28, 10, 28, 12, 28, 389, 9, 28, 1, 29, 1, 29, 1, 29, 5, 29, 394, 8, 29, 10, 29, 12, 29, 397, 9, 29, 1, 30, 1, 30, 1, 31, 1, 31, 3, 31, 403, 8, 31, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 5, 32, 418, 8, 32, 10, 32, 12, 32, 421, 9, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 5, 32, 429, 8, 32, 10, 32, 12, 32, 432, 9, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 5, 32, 440, 8, 32, 10, 32, 12, 32, 443, 9, 32, 1, 32, 1, 32, 3, 32, 447, 8, 32, 1, 33, 1, 33, 3, 33, 451, 8, 33, 1, 34, 1, 34, 3, 34, 455, 8, 34, 1, 35, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 1, 36, 5, 36, 464, 8, 36, 10, 36, 12, 36, 467, 9, 36, 1, 37, 1, 37, 3, 37, 471, 8, 37, 1, 37, 1, 37, 3, 37, 475, 8, 37, 1, 38, 1, 38, 1, 38, 1, 39, 1, 39, 1, 39, 1, 40, 1, 40, 1, 40, 1, 40, 5, 40, 487, 8, 40, 10, 40, 12, 40, 490, 9, 40, 1, 41, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 3, 42, 500, 8, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 45, 1, 45, 1, 45, 5, 45, 512, 8, 45, 10, 45, 12, 45, 515, 9, 45, 1, 46, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 48, 1, 48, 3, 48, 525, 8, 48, 1, 49, 3, 49, 528, 8, 49, 1, 49, 1, 49, 1, 50, 3, 50, 533, 8, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 3, 56, 555, 8, 56, 1, 56, 1, 56, 1, 56, 1, 56, 5, 56, 561, 8, 56, 10, 56, 12, 56, 564, 9, 56, 3, 56, 566, 8, 56, 1, 57, 1, 57, 1, 57, 3, 57, 571, 8, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 59, 3, 59, 584, 8, 59, 1, 59, 0, 4, 2, 10, 18, 20, 60, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 118, 0, 8, 1, 0, 58, 59, 1, 0, 60, 62, 2, 0, 25, 25, 76, 76, 1, 0, 67, 68, 2, 0, 30, 30, 34, 34, 2, 0, 37, 37, 40, 40, 2, 0, 36, 36, 50, 50, 2, 0, 51, 51, 53, 57, 612, 0, 120, 1, 0, 0, 0, 2, 123, 1, 0, 0, 0, 4, 140, 1, 0, 0, 0, 6, 158, 1, 0, 0, 0, 8, 160, 1, 0, 0, 0, 10, 193, 1, 0, 0, 0, 12, 220, 1, 0, 0, 0, 14, 222, 1, 0, 0, 0, 16, 231, 1, 0, 0, 0, 18, 237, 1, 0, 0, 0, 20, 258, 1, 0, 0, 0, 22, 268, 1, 0, 0, 0, 24, 285, 1, 0, 0, 0, 26, 287, 1, 0, 0, 0, 28, 289, 1, 0, 0, 0, 30, 292, 1, 0, 0, 0, 32, 305, 1, 0, 0, 0, 34, 307, 1, 0, 0, 0, 36, 324, 1, 0, 0, 0, 38, 326, 1, 0, 0, 0, 40, 328, 1, 0, 0, 0, 42, 332, 1, 0, 0, 0, 44, 334, 1, 0, 0, 0, 46, 343, 1, 0, 0, 0, 48, 347, 1, 0, 0, 0, 50, 363, 1, 0, 0, 0, 52, 366, 1, 0, 0, 0, 54, 374, 1, 0, 0, 0, 56, 382, 1, 0, 0, 0, 58, 390, 1, 0, 0, 0, 60, 398, 1, 0, 0, 0, 62, 402, 1, 0, 0, 0, 64, 446, 1, 0, 0, 0, 66, 450, 1, 0, 0, 0, 68, 454, 1, 0, 0, 0, 70, 456, 1, 0, 0, 0, 72, 459, 1, 0, 0, 0, 74, 468, 1, 0, 0, 0, 76, 476, 1, 0, 0, 0, 78, 479, 1, 0, 0, 0, 80, 482, 1, 0, 0, 0, 82, 491, 1, 0, 0, 0, 84, 495, 1, 0, 0, 0, 86, 501, 1, 0, 0, 0, 88, 505, 1, 0, 0, 0, 90, 508, 1, 0, 0, 0, 92, 516, 1, 0, 0, 0, 94, 520, 1, 0, 0, 0, 96, 524, 1, 0, 0, 0, 98, 527, 1, 0, 0, 0, 100, 532, 1, 0, 0, 0, 102, 536, 1, 0, 0, 0, 104, 538, 1, 0, 0, 0, 106, 540, 1, 0, 0, 0, 108, 543, 1, 0, 0, 0, 110, 547, 1, 0, 0, 0, 112, 550, 1, 0, 0, 0, 114, 570, 1, 0, 0, 0, 116, 574, 1, 0, 0, 0, 118, 579, 1, 0, 0, 0, 120, 121, 3, 2, 1, 0, 121, 122, 5, 0, 0, 1, 122, 1, 1, 0, 0, 0, 123, 124, 6, 1, -1, 0, 124, 125, 3, 4, 2, 0, 125, 131, 1, 0, 0, 0, 126, 127, 10, 1, 0, 0, 127, 128, 5, 24, 0, 0, 128, 130, 3, 6, 3, 0, 129, 126, 1, 0, 0, 0, 130, 133, 1, 0, 0, 0, 131, 129, 1, 0, 0, 0, 131, 132, 1, 0, 0, 0, 132, 3, 1, 0, 0, 0, 133, 131, 1, 0, 0, 0, 134, 141, 3, 106, 53, 0, 135, 141, 3, 34, 17, 0, 136, 141, 3, 28, 14, 0, 137, 141, 3, 110, 55, 0, 138, 139, 4, 2, 1, 0, 139, 141, 3, 48, 24, 0, 140, 134, 1, 0, 0, 0, 140, 135, 1, 0, 0, 0, 140, 136, 1, 0, 0, 0, 140, 137, 1, 0, 0, 0, 140, 138, 1, 0, 0, 0, 141, 5, 1, 0, 0, 0, 142, 159, 3, 50, 25, 0, 143, 159, 3, 8, 4, 0, 144, 159, 3, 76, 38, 0, 145, 159, 3, 70, 35, 0, 146, 159, 3, 52, 26, 0, 147, 159, 3, 72, 36, 0, 148, 159, 3, 78, 39, 0, 149, 159, 3, 80, 40, 0, 150, 159, 3, 84, 42, 0, 151, 159, 3, 86, 43, 0, 152, 159, 3, 112, 56, 0, 153, 159, 3, 88, 44, 0, 154, 155, 4, 3, 2, 0, 155, 159, 3, 118, 59, 0, 156, 157, 4, 3, 3, 0, 157, 159, 3, 116, 58, 0, 158, 142, 1, 0, 0, 0, 158, 143, 1, 0, 0, 0, 158, 144, 1, 0, 0, 0, 158, 145, 1, 0, 0, 0, 158, 146, 1, 0, 0, 0, 158, 147, 1, 0, 0, 0, 158, 148, 1, 0, 0, 0, 158, 149, 1, 0, 0, 0, 158, 150, 1, 0, 0, 0, 158, 151, 1, 0, 0, 0, 158, 152, 1, 0, 0, 0, 158, 153, 1, 0, 0, 0, 158, 154, 1, 0, 0, 0, 158, 156, 1, 0, 0, 0, 159, 7, 1, 0, 0, 0, 160, 161, 5, 16, 0, 0, 161, 162, 3, 10, 5, 0, 162, 9, 1, 0, 0, 0, 163, 164, 6, 5, -1, 0, 164, 165, 5, 43, 0, 0, 165, 194, 3, 10, 5, 8, 166, 194, 3, 16, 8, 0, 167, 194, 3, 12, 6, 0, 168, 170, 3, 16, 8, 0, 169, 171, 5, 43, 0, 0, 170, 169, 1, 0, 0, 0, 170, 171, 1, 0, 0, 0, 171, 172, 1, 0, 0, 0, 172, 173, 5, 38, 0, 0, 173, 174, 5, 42, 0, 0, 174, 179, 3, 16, 8, 0, 175, 176, 5, 33, 0, 0, 176, 178, 3, 16, 8, 0, 177, 175, 1, 0, 0, 0, 178, 181, 1, 0, 0, 0, 179, 177, 1, 0, 0, 0, 179, 180, 1, 0, 0, 0, 180, 182, 1, 0, 0, 0, 181, 179, 1, 0, 0, 0, 182, 183, 5, 49, 0, 0, 183, 194, 1, 0, 0, 0, 184, 185, 3, 16, 8, 0, 185, 187, 5, 39, 0, 0, 186, 188, 5, 43, 0, 0, 187, 186, 1, 0, 0, 0, 187, 188, 1, 0, 0, 0, 188, 189, 1, 0, 0, 0, 189, 190, 5, 44, 0, 0, 190, 194, 1, 0, 0, 0, 191, 192, 4, 5, 4, 0, 192, 194, 3, 14, 7, 0, 193, 163, 1, 0, 0, 0, 193, 166, 1, 0, 0, 0, 193, 167, 1, 0, 0, 0, 193, 168, 1, 0, 0, 0, 193, 184, 1, 0, 0, 0, 193, 191, 1, 0, 0, 0, 194, 203, 1, 0, 0, 0, 195, 196, 10, 5, 0, 0, 196, 197, 5, 29, 0, 0, 197, 202, 3, 10, 5, 6, 198, 199, 10, 4, 0, 0, 199, 200, 5, 46, 0, 0, 200, 202, 3, 10, 5, 5, 201, 195, 1, 0, 0, 0, 201, 198, 1, 0, 0, 0, 202, 205, 1, 0, 0, 0, 203, 201, 1, 0, 0, 0, 203, 204, 1, 0, 0, 0, 204, 11, 1, 0, 0, 0, 205, 203, 1, 0, 0, 0, 206, 208, 3, 16, 8, 0, 207, 209, 5, 43, 0, 0, 208, 207, 1, 0, 0, 0, 208, 209, 1, 0, 0, 0, 209, 210, 1, 0, 0, 0, 210, 211, 5, 41, 0, 0, 211, 212, 3, 102, 51, 0, 212, 221, 1, 0, 0, 0, 213, 215, 3, 16, 8, 0, 214, 216, 5, 43, 0, 0, 215, 214, 1, 0, 0, 0, 215, 216, 1, 0, 0, 0, 216, 217, 1, 0, 0, 0, 217, 218, 5, 48, 0, 0, 218, 219, 3, 102, 51, 0, 219, 221, 1, 0, 0, 0, 220, 206, 1, 0, 0, 0, 220, 213, 1, 0, 0, 0, 221, 13, 1, 0, 0, 0, 222, 223, 3, 16, 8, 0, 223, 224, 5, 63, 0, 0, 224, 225, 3, 102, 51, 0, 225, 15, 1, 0, 0, 0, 226, 232, 3, 18, 9, 0, 227, 228, 3, 18, 9, 0, 228, 229, 3, 104, 52, 0, 229, 230, 3, 18, 9, 0, 230, 232, 1, 0, 0, 0, 231, 226, 1, 0, 0, 0, 231, 227, 1, 0, 0, 0, 232, 17, 1, 0, 0, 0, 233, 234, 6, 9, -1, 0, 234, 238, 3, 20, 10, 0, 235, 236, 7, 0, 0, 0, 236, 238, 3, 18, 9, 3, 237, 233, 1, 0, 0, 0, 237, 235, 1, 0, 0, 0, 238, 247, 1, 0, 0, 0, 239, 240, 10, 2, 0, 0, 240, 241, 7, 1, 0, 0, 241, 246, 3, 18, 9, 3, 242, 243, 10, 1, 0, 0, 243, 244, 7, 0, 0, 0, 244, 246, 3, 18, 9, 2, 245, 239, 1, 0, 0, 0, 245, 242, 1, 0, 0, 0, 246, 249, 1, 0, 0, 0, 247, 245, 1, 0, 0, 0, 247, 248, 1, 0, 0, 0, 248, 19, 1, 0, 0, 0, 249, 247, 1, 0, 0, 0, 250, 251, 6, 10, -1, 0, 251, 259, 3, 64, 32, 0, 252, 259, 3, 54, 27, 0, 253, 259, 3, 22, 11, 0, 254, 255, 5, 42, 0, 0, 255, 256, 3, 10, 5, 0, 256, 257, 5, 49, 0, 0, 257, 259, 1, 0, 0, 0, 258, 250, 1, 0, 0, 0, 258, 252, 1, 0, 0, 0, 258, 253, 1, 0, 0, 0, 258, 254, 1, 0, 0, 0, 259, 265, 1, 0, 0, 0, 260, 261, 10, 1, 0, 0, 261, 262, 5, 32, 0, 0, 262, 264, 3, 26, 13, 0, 263, 260, 1, 0, 0, 0, 264, 267, 1, 0, 0, 0, 265, 263, 1, 0, 0, 0, 265, 266, 1, 0, 0, 0, 266, 21, 1, 0, 0, 0, 267, 265, 1, 0, 0, 0, 268, 269, 3, 24, 12, 0, 269, 279, 5, 42, 0, 0, 270, 280, 5, 60, 0, 0, 271, 276, 3, 10, 5, 0, 272, 273, 5, 33, 0, 0, 273, 275, 3, 10, 5, 0, 274, 272, 1, 0, 0, 0, 275, 278, 1, 0, 0, 0, 276, 274, 1, 0, 0, 0, 276, 277, 1, 0, 0, 0, 277, 280, 1, 0, 0, 0, 278, 276, 1, 0, 0, 0, 279, 270, 1, 0, 0, 0, 279, 271, 1, 0, 0, 0, 279, 280, 1, 0, 0, 0, 280, 281, 1, 0, 0, 0, 281, 282, 5, 49, 0, 0, 282, 23, 1, 0, 0, 0, 283, 286, 5, 63, 0, 0, 284, 286, 3, 68, 34, 0, 285, 283, 1, 0, 0, 0, 285, 284, 1, 0, 0, 0, 286, 25, 1, 0, 0, 0, 287, 288, 3, 60, 30, 0, 288, 27, 1, 0, 0, 0, 289, 290, 5, 12, 0, 0, 290, 291, 3, 30, 15, 0, 291, 29, 1, 0, 0, 0, 292, 297, 3, 32, 16, 0, 293, 294, 5, 33, 0, 0, 294, 296, 3, 32, 16, 0, 295, 293, 1, 0, 0, 0, 296, 299, 1, 0, 0, 0, 297, 295, 1, 0, 0, 0, 297, 298, 1, 0, 0, 0, 298, 31, 1, 0, 0, 0, 299, 297, 1, 0, 0, 0, 300, 306, 3, 10, 5, 0, 301, 302, 3, 54, 27, 0, 302, 303, 5, 31, 0, 0, 303, 304, 3, 10, 5, 0, 304, 306, 1, 0, 0, 0, 305, 300, 1, 0, 0, 0, 305, 301, 1, 0, 0, 0, 306, 33, 1, 0, 0, 0, 307, 308, 5, 6, 0, 0, 308, 313, 3, 36, 18, 0, 309, 310, 5, 33, 0, 0, 310, 312, 3, 36, 18, 0, 311, 309, 1, 0, 0, 0, 312, 315, 1, 0, 0, 0, 313, 311, 1, 0, 0, 0, 313, 314, 1, 0, 0, 0, 314, 317, 1, 0, 0, 0, 315, 313, 1, 0, 0, 0, 316, 318, 3, 42, 21, 0, 317, 316, 1, 0, 0, 0, 317, 318, 1, 0, 0, 0, 318, 35, 1, 0, 0, 0, 319, 320, 3, 38, 19, 0, 320, 321, 5, 104, 0, 0, 321, 322, 3, 40, 20, 0, 322, 325, 1, 0, 0, 0, 323, 325, 3, 40, 20, 0, 324, 319, 1, 0, 0, 0, 324, 323, 1, 0, 0, 0, 325, 37, 1, 0, 0, 0, 326, 327, 5, 76, 0, 0, 327, 39, 1, 0, 0, 0, 328, 329, 7, 2, 0, 0, 329, 41, 1, 0, 0, 0, 330, 333, 3, 44, 22, 0, 331, 333, 3, 46, 23, 0, 332, 330, 1, 0, 0, 0, 332, 331, 1, 0, 0, 0, 333, 43, 1, 0, 0, 0, 334, 335, 5, 75, 0, 0, 335, 340, 5, 76, 0, 0, 336, 337, 5, 33, 0, 0, 337, 339, 5, 76, 0, 0, 338, 336, 1, 0, 0, 0, 339, 342, 1, 0, 0, 0, 340, 338, 1, 0, 0, 0, 340, 341, 1, 0, 0, 0, 341, 45, 1, 0, 0, 0, 342, 340, 1, 0, 0, 0, 343, 344, 5, 65, 0, 0, 344, 345, 3, 44, 22, 0, 345, 346, 5, 66, 0, 0, 346, 47, 1, 0, 0, 0, 347, 348, 5, 19, 0, 0, 348, 353, 3, 36, 18, 0, 349, 350, 5, 33, 0, 0, 350, 352, 3, 36, 18, 0, 351, 349, 1, 0, 0, 0, 352, 355, 1, 0, 0, 0, 353, 351, 1, 0, 0, 0, 353, 354, 1, 0, 0, 0, 354, 357, 1, 0, 0, 0, 355, 353, 1, 0, 0, 0, 356, 358, 3, 30, 15, 0, 357, 356, 1, 0, 0, 0, 357, 358, 1, 0, 0, 0, 358, 361, 1, 0, 0, 0, 359, 360, 5, 28, 0, 0, 360, 362, 3, 30, 15, 0, 361, 359, 1, 0, 0, 0, 361, 362, 1, 0, 0, 0, 362, 49, 1, 0, 0, 0, 363, 364, 5, 4, 0, 0, 364, 365, 3, 30, 15, 0, 365, 51, 1, 0, 0, 0, 366, 368, 5, 15, 0, 0, 367, 369, 3, 30, 15, 0, 368, 367, 1, 0, 0, 0, 368, 369, 1, 0, 0, 0, 369, 372, 1, 0, 0, 0, 370, 371, 5, 28, 0, 0, 371, 373, 3, 30, 15, 0, 372, 370, 1, 0, 0, 0, 372, 373, 1, 0, 0, 0, 373, 53, 1, 0, 0, 0, 374, 379, 3, 68, 34, 0, 375, 376, 5, 35, 0, 0, 376, 378, 3, 68, 34, 0, 377, 375, 1, 0, 0, 0, 378, 381, 1, 0, 0, 0, 379, 377, 1, 0, 0, 0, 379, 380, 1, 0, 0, 0, 380, 55, 1, 0, 0, 0, 381, 379, 1, 0, 0, 0, 382, 387, 3, 62, 31, 0, 383, 384, 5, 35, 0, 0, 384, 386, 3, 62, 31, 0, 385, 383, 1, 0, 0, 0, 386, 389, 1, 0, 0, 0, 387, 385, 1, 0, 0, 0, 387, 388, 1, 0, 0, 0, 388, 57, 1, 0, 0, 0, 389, 387, 1, 0, 0, 0, 390, 395, 3, 56, 28, 0, 391, 392, 5, 33, 0, 0, 392, 394, 3, 56, 28, 0, 393, 391, 1, 0, 0, 0, 394, 397, 1, 0, 0, 0, 395, 393, 1, 0, 0, 0, 395, 396, 1, 0, 0, 0, 396, 59, 1, 0, 0, 0, 397, 395, 1, 0, 0, 0, 398, 399, 7, 3, 0, 0, 399, 61, 1, 0, 0, 0, 400, 403, 5, 80, 0, 0, 401, 403, 3, 66, 33, 0, 402, 400, 1, 0, 0, 0, 402, 401, 1, 0, 0, 0, 403, 63, 1, 0, 0, 0, 404, 447, 5, 44, 0, 0, 405, 406, 3, 100, 50, 0, 406, 407, 5, 67, 0, 0, 407, 447, 1, 0, 0, 0, 408, 447, 3, 98, 49, 0, 409, 447, 3, 100, 50, 0, 410, 447, 3, 94, 47, 0, 411, 447, 3, 66, 33, 0, 412, 447, 3, 102, 51, 0, 413, 414, 5, 65, 0, 0, 414, 419, 3, 96, 48, 0, 415, 416, 5, 33, 0, 0, 416, 418, 3, 96, 48, 0, 417, 415, 1, 0, 0, 0, 418, 421, 1, 0, 0, 0, 419, 417, 1, 0, 0, 0, 419, 420, 1, 0, 0, 0, 420, 422, 1, 0, 0, 0, 421, 419, 1, 0, 0, 0, 422, 423, 5, 66, 0, 0, 423, 447, 1, 0, 0, 0, 424, 425, 5, 65, 0, 0, 425, 430, 3, 94, 47, 0, 426, 427, 5, 33, 0, 0, 427, 429, 3, 94, 47, 0, 428, 426, 1, 0, 0, 0, 429, 432, 1, 0, 0, 0, 430, 428, 1, 0, 0, 0, 430, 431, 1, 0, 0, 0, 431, 433, 1, 0, 0, 0, 432, 430, 1, 0, 0, 0, 433, 434, 5, 66, 0, 0, 434, 447, 1, 0, 0, 0, 435, 436, 5, 65, 0, 0, 436, 441, 3, 102, 51, 0, 437, 438, 5, 33, 0, 0, 438, 440, 3, 102, 51, 0, 439, 437, 1, 0, 0, 0, 440, 443, 1, 0, 0, 0, 441, 439, 1, 0, 0, 0, 441, 442, 1, 0, 0, 0, 442, 444, 1, 0, 0, 0, 443, 441, 1, 0, 0, 0, 444, 445, 5, 66, 0, 0, 445, 447, 1, 0, 0, 0, 446, 404, 1, 0, 0, 0, 446, 405, 1, 0, 0, 0, 446, 408, 1, 0, 0, 0, 446, 409, 1, 0, 0, 0, 446, 410, 1, 0, 0, 0, 446, 411, 1, 0, 0, 0, 446, 412, 1, 0, 0, 0, 446, 413, 1, 0, 0, 0, 446, 424, 1, 0, 0, 0, 446, 435, 1, 0, 0, 0, 447, 65, 1, 0, 0, 0, 448, 451, 5, 47, 0, 0, 449, 451, 5, 64, 0, 0, 450, 448, 1, 0, 0, 0, 450, 449, 1, 0, 0, 0, 451, 67, 1, 0, 0, 0, 452, 455, 3, 60, 30, 0, 453, 455, 3, 66, 33, 0, 454, 452, 1, 0, 0, 0, 454, 453, 1, 0, 0, 0, 455, 69, 1, 0, 0, 0, 456, 457, 5, 9, 0, 0, 457, 458, 5, 26, 0, 0, 458, 71, 1, 0, 0, 0, 459, 460, 5, 14, 0, 0, 460, 465, 3, 74, 37, 0, 461, 462, 5, 33, 0, 0, 462, 464, 3, 74, 37, 0, 463, 461, 1, 0, 0, 0, 464, 467, 1, 0, 0, 0, 465, 463, 1, 0, 0, 0, 465, 466, 1, 0, 0, 0, 466, 73, 1, 0, 0, 0, 467, 465, 1, 0, 0, 0, 468, 470, 3, 10, 5, 0, 469, 471, 7, 4, 0, 0, 470, 469, 1, 0, 0, 0, 470, 471, 1, 0, 0, 0, 471, 474, 1, 0, 0, 0, 472, 473, 5, 45, 0, 0, 473, 475, 7, 5, 0, 0, 474, 472, 1, 0, 0, 0, 474, 475, 1, 0, 0, 0, 475, 75, 1, 0, 0, 0, 476, 477, 5, 8, 0, 0, 477, 478, 3, 58, 29, 0, 478, 77, 1, 0, 0, 0, 479, 480, 5, 2, 0, 0, 480, 481, 3, 58, 29, 0, 481, 79, 1, 0, 0, 0, 482, 483, 5, 11, 0, 0, 483, 488, 3, 82, 41, 0, 484, 485, 5, 33, 0, 0, 485, 487, 3, 82, 41, 0, 486, 484, 1, 0, 0, 0, 487, 490, 1, 0, 0, 0, 488, 486, 1, 0, 0, 0, 488, 489, 1, 0, 0, 0, 489, 81, 1, 0, 0, 0, 490, 488, 1, 0, 0, 0, 491, 492, 3, 56, 28, 0, 492, 493, 5, 84, 0, 0, 493, 494, 3, 56, 28, 0, 494, 83, 1, 0, 0, 0, 495, 496, 5, 1, 0, 0, 496, 497, 3, 20, 10, 0, 497, 499, 3, 102, 51, 0, 498, 500, 3, 90, 45, 0, 499, 498, 1, 0, 0, 0, 499, 500, 1, 0, 0, 0, 500, 85, 1, 0, 0, 0, 501, 502, 5, 7, 0, 0, 502, 503, 3, 20, 10, 0, 503, 504, 3, 102, 51, 0, 504, 87, 1, 0, 0, 0, 505, 506, 5, 10, 0, 0, 506, 507, 3, 54, 27, 0, 507, 89, 1, 0, 0, 0, 508, 513, 3, 92, 46, 0, 509, 510, 5, 33, 0, 0, 510, 512, 3, 92, 46, 0, 511, 509, 1, 0, 0, 0, 512, 515, 1, 0, 0, 0, 513, 511, 1, 0, 0, 0, 513, 514, 1, 0, 0, 0, 514, 91, 1, 0, 0, 0, 515, 513, 1, 0, 0, 0, 516, 517, 3, 60, 30, 0, 517, 518, 5, 31, 0, 0, 518, 519, 3, 64, 32, 0, 519, 93, 1, 0, 0, 0, 520, 521, 7, 6, 0, 0, 521, 95, 1, 0, 0, 0, 522, 525, 3, 98, 49, 0, 523, 525, 3, 100, 50, 0, 524, 522, 1, 0, 0, 0, 524, 523, 1, 0, 0, 0, 525, 97, 1, 0, 0, 0, 526, 528, 7, 0, 0, 0, 527, 526, 1, 0, 0, 0, 527, 528, 1, 0, 0, 0, 528, 529, 1, 0, 0, 0, 529, 530, 5, 27, 0, 0, 530, 99, 1, 0, 0, 0, 531, 533, 7, 0, 0, 0, 532, 531, 1, 0, 0, 0, 532, 533, 1, 0, 0, 0, 533, 534, 1, 0, 0, 0, 534, 535, 5, 26, 0, 0, 535, 101, 1, 0, 0, 0, 536, 537, 5, 25, 0, 0, 537, 103, 1, 0, 0, 0, 538, 539, 7, 7, 0, 0, 539, 105, 1, 0, 0, 0, 540, 541, 5, 5, 0, 0, 541, 542, 3, 108, 54, 0, 542, 107, 1, 0, 0, 0, 543, 544, 5, 65, 0, 0, 544, 545, 3, 2, 1, 0, 545, 546, 5, 66, 0, 0, 546, 109, 1, 0, 0, 0, 547, 548, 5, 13, 0, 0, 548, 549, 5, 100, 0, 0, 549, 111, 1, 0, 0, 0, 550, 551, 5, 3, 0, 0, 551, 554, 5, 90, 0, 0, 552, 553, 5, 88, 0, 0, 553, 555, 3, 56, 28, 0, 554, 552, 1, 0, 0, 0, 554, 555, 1, 0, 0, 0, 555, 565, 1, 0, 0, 0, 556, 557, 5, 89, 0, 0, 557, 562, 3, 114, 57, 0, 558, 559, 5, 33, 0, 0, 559, 561, 3, 114, 57, 0, 560, 558, 1, 0, 0, 0, 561, 564, 1, 0, 0, 0, 562, 560, 1, 0, 0, 0, 562, 563, 1, 0, 0, 0, 563, 566, 1, 0, 0, 0, 564, 562, 1, 0, 0, 0, 565, 556, 1, 0, 0, 0, 565, 566, 1, 0, 0, 0, 566, 113, 1, 0, 0, 0, 567, 568, 3, 56, 28, 0, 568, 569, 5, 31, 0, 0, 569, 571, 1, 0, 0, 0, 570, 567, 1, 0, 0, 0, 570, 571, 1, 0, 0, 0, 571, 572, 1, 0, 0, 0, 572, 573, 3, 56, 28, 0, 573, 115, 1, 0, 0, 0, 574, 575, 5, 18, 0, 0, 575, 576, 3, 36, 18, 0, 576, 577, 5, 88, 0, 0, 577, 578, 3, 58, 29, 0, 578, 117, 1, 0, 0, 0, 579, 580, 5, 17, 0, 0, 580, 583, 3, 30, 15, 0, 581, 582, 5, 28, 0, 0, 582, 584, 3, 30, 15, 0, 583, 581, 1, 0, 0, 0, 583, 584, 1, 0, 0, 0, 584, 119, 1, 0, 0, 0, 57, 131, 140, 158, 170, 179, 187, 193, 201, 203, 208, 215, 220, 231, 237, 245, 247, 258, 265, 276, 279, 285, 297, 305, 313, 317, 324, 332, 340, 353, 357, 361, 368, 372, 379, 387, 395, 402, 419, 430, 441, 446, 450, 454, 465, 470, 474, 488, 499, 513, 524, 527, 532, 554, 562, 565, 570, 583] \ No newline at end of file +[4, 1, 120, 604, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 134, 8, 1, 10, 1, 12, 1, 137, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 145, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 163, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 175, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 182, 8, 5, 10, 5, 12, 5, 185, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 192, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 198, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 206, 8, 5, 10, 5, 12, 5, 209, 9, 5, 1, 6, 1, 6, 3, 6, 213, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 220, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 225, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 236, 8, 8, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 242, 8, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 5, 9, 250, 8, 9, 10, 9, 12, 9, 253, 9, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 3, 10, 263, 8, 10, 1, 10, 1, 10, 1, 10, 5, 10, 268, 8, 10, 10, 10, 12, 10, 271, 9, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 5, 11, 279, 8, 11, 10, 11, 12, 11, 282, 9, 11, 3, 11, 284, 8, 11, 1, 11, 1, 11, 1, 12, 1, 12, 3, 12, 290, 8, 12, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 5, 15, 300, 8, 15, 10, 15, 12, 15, 303, 9, 15, 1, 16, 1, 16, 1, 16, 3, 16, 308, 8, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 5, 17, 316, 8, 17, 10, 17, 12, 17, 319, 9, 17, 1, 17, 3, 17, 322, 8, 17, 1, 18, 1, 18, 1, 18, 3, 18, 327, 8, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 20, 1, 20, 1, 21, 1, 21, 3, 21, 337, 8, 21, 1, 22, 1, 22, 1, 22, 1, 22, 5, 22, 343, 8, 22, 10, 22, 12, 22, 346, 9, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 5, 24, 356, 8, 24, 10, 24, 12, 24, 359, 9, 24, 1, 24, 3, 24, 362, 8, 24, 1, 24, 1, 24, 3, 24, 366, 8, 24, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 3, 26, 373, 8, 26, 1, 26, 1, 26, 3, 26, 377, 8, 26, 1, 27, 1, 27, 1, 27, 5, 27, 382, 8, 27, 10, 27, 12, 27, 385, 9, 27, 1, 28, 1, 28, 1, 28, 1, 28, 3, 28, 391, 8, 28, 1, 29, 1, 29, 1, 29, 5, 29, 396, 8, 29, 10, 29, 12, 29, 399, 9, 29, 1, 30, 1, 30, 1, 30, 5, 30, 404, 8, 30, 10, 30, 12, 30, 407, 9, 30, 1, 31, 1, 31, 1, 31, 5, 31, 412, 8, 31, 10, 31, 12, 31, 415, 9, 31, 1, 32, 1, 32, 1, 33, 1, 33, 3, 33, 421, 8, 33, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 436, 8, 34, 10, 34, 12, 34, 439, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 447, 8, 34, 10, 34, 12, 34, 450, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 458, 8, 34, 10, 34, 12, 34, 461, 9, 34, 1, 34, 1, 34, 3, 34, 465, 8, 34, 1, 35, 1, 35, 3, 35, 469, 8, 35, 1, 36, 1, 36, 3, 36, 473, 8, 36, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 5, 38, 482, 8, 38, 10, 38, 12, 38, 485, 9, 38, 1, 39, 1, 39, 3, 39, 489, 8, 39, 1, 39, 1, 39, 3, 39, 493, 8, 39, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 5, 42, 505, 8, 42, 10, 42, 12, 42, 508, 9, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 3, 44, 518, 8, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 5, 47, 530, 8, 47, 10, 47, 12, 47, 533, 9, 47, 1, 48, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 50, 1, 50, 3, 50, 543, 8, 50, 1, 51, 3, 51, 546, 8, 51, 1, 51, 1, 51, 1, 52, 3, 52, 551, 8, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 573, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 5, 58, 579, 8, 58, 10, 58, 12, 58, 582, 9, 58, 3, 58, 584, 8, 58, 1, 59, 1, 59, 1, 59, 3, 59, 589, 8, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 3, 61, 602, 8, 61, 1, 61, 0, 4, 2, 10, 18, 20, 62, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 118, 120, 122, 0, 8, 1, 0, 58, 59, 1, 0, 60, 62, 2, 0, 25, 25, 76, 76, 1, 0, 67, 68, 2, 0, 30, 30, 34, 34, 2, 0, 37, 37, 40, 40, 2, 0, 36, 36, 50, 50, 2, 0, 51, 51, 53, 57, 630, 0, 124, 1, 0, 0, 0, 2, 127, 1, 0, 0, 0, 4, 144, 1, 0, 0, 0, 6, 162, 1, 0, 0, 0, 8, 164, 1, 0, 0, 0, 10, 197, 1, 0, 0, 0, 12, 224, 1, 0, 0, 0, 14, 226, 1, 0, 0, 0, 16, 235, 1, 0, 0, 0, 18, 241, 1, 0, 0, 0, 20, 262, 1, 0, 0, 0, 22, 272, 1, 0, 0, 0, 24, 289, 1, 0, 0, 0, 26, 291, 1, 0, 0, 0, 28, 293, 1, 0, 0, 0, 30, 296, 1, 0, 0, 0, 32, 307, 1, 0, 0, 0, 34, 311, 1, 0, 0, 0, 36, 326, 1, 0, 0, 0, 38, 330, 1, 0, 0, 0, 40, 332, 1, 0, 0, 0, 42, 336, 1, 0, 0, 0, 44, 338, 1, 0, 0, 0, 46, 347, 1, 0, 0, 0, 48, 351, 1, 0, 0, 0, 50, 367, 1, 0, 0, 0, 52, 370, 1, 0, 0, 0, 54, 378, 1, 0, 0, 0, 56, 386, 1, 0, 0, 0, 58, 392, 1, 0, 0, 0, 60, 400, 1, 0, 0, 0, 62, 408, 1, 0, 0, 0, 64, 416, 1, 0, 0, 0, 66, 420, 1, 0, 0, 0, 68, 464, 1, 0, 0, 0, 70, 468, 1, 0, 0, 0, 72, 472, 1, 0, 0, 0, 74, 474, 1, 0, 0, 0, 76, 477, 1, 0, 0, 0, 78, 486, 1, 0, 0, 0, 80, 494, 1, 0, 0, 0, 82, 497, 1, 0, 0, 0, 84, 500, 1, 0, 0, 0, 86, 509, 1, 0, 0, 0, 88, 513, 1, 0, 0, 0, 90, 519, 1, 0, 0, 0, 92, 523, 1, 0, 0, 0, 94, 526, 1, 0, 0, 0, 96, 534, 1, 0, 0, 0, 98, 538, 1, 0, 0, 0, 100, 542, 1, 0, 0, 0, 102, 545, 1, 0, 0, 0, 104, 550, 1, 0, 0, 0, 106, 554, 1, 0, 0, 0, 108, 556, 1, 0, 0, 0, 110, 558, 1, 0, 0, 0, 112, 561, 1, 0, 0, 0, 114, 565, 1, 0, 0, 0, 116, 568, 1, 0, 0, 0, 118, 588, 1, 0, 0, 0, 120, 592, 1, 0, 0, 0, 122, 597, 1, 0, 0, 0, 124, 125, 3, 2, 1, 0, 125, 126, 5, 0, 0, 1, 126, 1, 1, 0, 0, 0, 127, 128, 6, 1, -1, 0, 128, 129, 3, 4, 2, 0, 129, 135, 1, 0, 0, 0, 130, 131, 10, 1, 0, 0, 131, 132, 5, 24, 0, 0, 132, 134, 3, 6, 3, 0, 133, 130, 1, 0, 0, 0, 134, 137, 1, 0, 0, 0, 135, 133, 1, 0, 0, 0, 135, 136, 1, 0, 0, 0, 136, 3, 1, 0, 0, 0, 137, 135, 1, 0, 0, 0, 138, 145, 3, 110, 55, 0, 139, 145, 3, 34, 17, 0, 140, 145, 3, 28, 14, 0, 141, 145, 3, 114, 57, 0, 142, 143, 4, 2, 1, 0, 143, 145, 3, 48, 24, 0, 144, 138, 1, 0, 0, 0, 144, 139, 1, 0, 0, 0, 144, 140, 1, 0, 0, 0, 144, 141, 1, 0, 0, 0, 144, 142, 1, 0, 0, 0, 145, 5, 1, 0, 0, 0, 146, 163, 3, 50, 25, 0, 147, 163, 3, 8, 4, 0, 148, 163, 3, 80, 40, 0, 149, 163, 3, 74, 37, 0, 150, 163, 3, 52, 26, 0, 151, 163, 3, 76, 38, 0, 152, 163, 3, 82, 41, 0, 153, 163, 3, 84, 42, 0, 154, 163, 3, 88, 44, 0, 155, 163, 3, 90, 45, 0, 156, 163, 3, 116, 58, 0, 157, 163, 3, 92, 46, 0, 158, 159, 4, 3, 2, 0, 159, 163, 3, 122, 61, 0, 160, 161, 4, 3, 3, 0, 161, 163, 3, 120, 60, 0, 162, 146, 1, 0, 0, 0, 162, 147, 1, 0, 0, 0, 162, 148, 1, 0, 0, 0, 162, 149, 1, 0, 0, 0, 162, 150, 1, 0, 0, 0, 162, 151, 1, 0, 0, 0, 162, 152, 1, 0, 0, 0, 162, 153, 1, 0, 0, 0, 162, 154, 1, 0, 0, 0, 162, 155, 1, 0, 0, 0, 162, 156, 1, 0, 0, 0, 162, 157, 1, 0, 0, 0, 162, 158, 1, 0, 0, 0, 162, 160, 1, 0, 0, 0, 163, 7, 1, 0, 0, 0, 164, 165, 5, 16, 0, 0, 165, 166, 3, 10, 5, 0, 166, 9, 1, 0, 0, 0, 167, 168, 6, 5, -1, 0, 168, 169, 5, 43, 0, 0, 169, 198, 3, 10, 5, 8, 170, 198, 3, 16, 8, 0, 171, 198, 3, 12, 6, 0, 172, 174, 3, 16, 8, 0, 173, 175, 5, 43, 0, 0, 174, 173, 1, 0, 0, 0, 174, 175, 1, 0, 0, 0, 175, 176, 1, 0, 0, 0, 176, 177, 5, 38, 0, 0, 177, 178, 5, 42, 0, 0, 178, 183, 3, 16, 8, 0, 179, 180, 5, 33, 0, 0, 180, 182, 3, 16, 8, 0, 181, 179, 1, 0, 0, 0, 182, 185, 1, 0, 0, 0, 183, 181, 1, 0, 0, 0, 183, 184, 1, 0, 0, 0, 184, 186, 1, 0, 0, 0, 185, 183, 1, 0, 0, 0, 186, 187, 5, 49, 0, 0, 187, 198, 1, 0, 0, 0, 188, 189, 3, 16, 8, 0, 189, 191, 5, 39, 0, 0, 190, 192, 5, 43, 0, 0, 191, 190, 1, 0, 0, 0, 191, 192, 1, 0, 0, 0, 192, 193, 1, 0, 0, 0, 193, 194, 5, 44, 0, 0, 194, 198, 1, 0, 0, 0, 195, 196, 4, 5, 4, 0, 196, 198, 3, 14, 7, 0, 197, 167, 1, 0, 0, 0, 197, 170, 1, 0, 0, 0, 197, 171, 1, 0, 0, 0, 197, 172, 1, 0, 0, 0, 197, 188, 1, 0, 0, 0, 197, 195, 1, 0, 0, 0, 198, 207, 1, 0, 0, 0, 199, 200, 10, 5, 0, 0, 200, 201, 5, 29, 0, 0, 201, 206, 3, 10, 5, 6, 202, 203, 10, 4, 0, 0, 203, 204, 5, 46, 0, 0, 204, 206, 3, 10, 5, 5, 205, 199, 1, 0, 0, 0, 205, 202, 1, 0, 0, 0, 206, 209, 1, 0, 0, 0, 207, 205, 1, 0, 0, 0, 207, 208, 1, 0, 0, 0, 208, 11, 1, 0, 0, 0, 209, 207, 1, 0, 0, 0, 210, 212, 3, 16, 8, 0, 211, 213, 5, 43, 0, 0, 212, 211, 1, 0, 0, 0, 212, 213, 1, 0, 0, 0, 213, 214, 1, 0, 0, 0, 214, 215, 5, 41, 0, 0, 215, 216, 3, 106, 53, 0, 216, 225, 1, 0, 0, 0, 217, 219, 3, 16, 8, 0, 218, 220, 5, 43, 0, 0, 219, 218, 1, 0, 0, 0, 219, 220, 1, 0, 0, 0, 220, 221, 1, 0, 0, 0, 221, 222, 5, 48, 0, 0, 222, 223, 3, 106, 53, 0, 223, 225, 1, 0, 0, 0, 224, 210, 1, 0, 0, 0, 224, 217, 1, 0, 0, 0, 225, 13, 1, 0, 0, 0, 226, 227, 3, 16, 8, 0, 227, 228, 5, 63, 0, 0, 228, 229, 3, 106, 53, 0, 229, 15, 1, 0, 0, 0, 230, 236, 3, 18, 9, 0, 231, 232, 3, 18, 9, 0, 232, 233, 3, 108, 54, 0, 233, 234, 3, 18, 9, 0, 234, 236, 1, 0, 0, 0, 235, 230, 1, 0, 0, 0, 235, 231, 1, 0, 0, 0, 236, 17, 1, 0, 0, 0, 237, 238, 6, 9, -1, 0, 238, 242, 3, 20, 10, 0, 239, 240, 7, 0, 0, 0, 240, 242, 3, 18, 9, 3, 241, 237, 1, 0, 0, 0, 241, 239, 1, 0, 0, 0, 242, 251, 1, 0, 0, 0, 243, 244, 10, 2, 0, 0, 244, 245, 7, 1, 0, 0, 245, 250, 3, 18, 9, 3, 246, 247, 10, 1, 0, 0, 247, 248, 7, 0, 0, 0, 248, 250, 3, 18, 9, 2, 249, 243, 1, 0, 0, 0, 249, 246, 1, 0, 0, 0, 250, 253, 1, 0, 0, 0, 251, 249, 1, 0, 0, 0, 251, 252, 1, 0, 0, 0, 252, 19, 1, 0, 0, 0, 253, 251, 1, 0, 0, 0, 254, 255, 6, 10, -1, 0, 255, 263, 3, 68, 34, 0, 256, 263, 3, 58, 29, 0, 257, 263, 3, 22, 11, 0, 258, 259, 5, 42, 0, 0, 259, 260, 3, 10, 5, 0, 260, 261, 5, 49, 0, 0, 261, 263, 1, 0, 0, 0, 262, 254, 1, 0, 0, 0, 262, 256, 1, 0, 0, 0, 262, 257, 1, 0, 0, 0, 262, 258, 1, 0, 0, 0, 263, 269, 1, 0, 0, 0, 264, 265, 10, 1, 0, 0, 265, 266, 5, 32, 0, 0, 266, 268, 3, 26, 13, 0, 267, 264, 1, 0, 0, 0, 268, 271, 1, 0, 0, 0, 269, 267, 1, 0, 0, 0, 269, 270, 1, 0, 0, 0, 270, 21, 1, 0, 0, 0, 271, 269, 1, 0, 0, 0, 272, 273, 3, 24, 12, 0, 273, 283, 5, 42, 0, 0, 274, 284, 5, 60, 0, 0, 275, 280, 3, 10, 5, 0, 276, 277, 5, 33, 0, 0, 277, 279, 3, 10, 5, 0, 278, 276, 1, 0, 0, 0, 279, 282, 1, 0, 0, 0, 280, 278, 1, 0, 0, 0, 280, 281, 1, 0, 0, 0, 281, 284, 1, 0, 0, 0, 282, 280, 1, 0, 0, 0, 283, 274, 1, 0, 0, 0, 283, 275, 1, 0, 0, 0, 283, 284, 1, 0, 0, 0, 284, 285, 1, 0, 0, 0, 285, 286, 5, 49, 0, 0, 286, 23, 1, 0, 0, 0, 287, 290, 5, 63, 0, 0, 288, 290, 3, 72, 36, 0, 289, 287, 1, 0, 0, 0, 289, 288, 1, 0, 0, 0, 290, 25, 1, 0, 0, 0, 291, 292, 3, 64, 32, 0, 292, 27, 1, 0, 0, 0, 293, 294, 5, 12, 0, 0, 294, 295, 3, 30, 15, 0, 295, 29, 1, 0, 0, 0, 296, 301, 3, 32, 16, 0, 297, 298, 5, 33, 0, 0, 298, 300, 3, 32, 16, 0, 299, 297, 1, 0, 0, 0, 300, 303, 1, 0, 0, 0, 301, 299, 1, 0, 0, 0, 301, 302, 1, 0, 0, 0, 302, 31, 1, 0, 0, 0, 303, 301, 1, 0, 0, 0, 304, 305, 3, 58, 29, 0, 305, 306, 5, 31, 0, 0, 306, 308, 1, 0, 0, 0, 307, 304, 1, 0, 0, 0, 307, 308, 1, 0, 0, 0, 308, 309, 1, 0, 0, 0, 309, 310, 3, 10, 5, 0, 310, 33, 1, 0, 0, 0, 311, 312, 5, 6, 0, 0, 312, 317, 3, 36, 18, 0, 313, 314, 5, 33, 0, 0, 314, 316, 3, 36, 18, 0, 315, 313, 1, 0, 0, 0, 316, 319, 1, 0, 0, 0, 317, 315, 1, 0, 0, 0, 317, 318, 1, 0, 0, 0, 318, 321, 1, 0, 0, 0, 319, 317, 1, 0, 0, 0, 320, 322, 3, 42, 21, 0, 321, 320, 1, 0, 0, 0, 321, 322, 1, 0, 0, 0, 322, 35, 1, 0, 0, 0, 323, 324, 3, 38, 19, 0, 324, 325, 5, 104, 0, 0, 325, 327, 1, 0, 0, 0, 326, 323, 1, 0, 0, 0, 326, 327, 1, 0, 0, 0, 327, 328, 1, 0, 0, 0, 328, 329, 3, 40, 20, 0, 329, 37, 1, 0, 0, 0, 330, 331, 5, 76, 0, 0, 331, 39, 1, 0, 0, 0, 332, 333, 7, 2, 0, 0, 333, 41, 1, 0, 0, 0, 334, 337, 3, 44, 22, 0, 335, 337, 3, 46, 23, 0, 336, 334, 1, 0, 0, 0, 336, 335, 1, 0, 0, 0, 337, 43, 1, 0, 0, 0, 338, 339, 5, 75, 0, 0, 339, 344, 5, 76, 0, 0, 340, 341, 5, 33, 0, 0, 341, 343, 5, 76, 0, 0, 342, 340, 1, 0, 0, 0, 343, 346, 1, 0, 0, 0, 344, 342, 1, 0, 0, 0, 344, 345, 1, 0, 0, 0, 345, 45, 1, 0, 0, 0, 346, 344, 1, 0, 0, 0, 347, 348, 5, 65, 0, 0, 348, 349, 3, 44, 22, 0, 349, 350, 5, 66, 0, 0, 350, 47, 1, 0, 0, 0, 351, 352, 5, 19, 0, 0, 352, 357, 3, 36, 18, 0, 353, 354, 5, 33, 0, 0, 354, 356, 3, 36, 18, 0, 355, 353, 1, 0, 0, 0, 356, 359, 1, 0, 0, 0, 357, 355, 1, 0, 0, 0, 357, 358, 1, 0, 0, 0, 358, 361, 1, 0, 0, 0, 359, 357, 1, 0, 0, 0, 360, 362, 3, 54, 27, 0, 361, 360, 1, 0, 0, 0, 361, 362, 1, 0, 0, 0, 362, 365, 1, 0, 0, 0, 363, 364, 5, 28, 0, 0, 364, 366, 3, 30, 15, 0, 365, 363, 1, 0, 0, 0, 365, 366, 1, 0, 0, 0, 366, 49, 1, 0, 0, 0, 367, 368, 5, 4, 0, 0, 368, 369, 3, 30, 15, 0, 369, 51, 1, 0, 0, 0, 370, 372, 5, 15, 0, 0, 371, 373, 3, 54, 27, 0, 372, 371, 1, 0, 0, 0, 372, 373, 1, 0, 0, 0, 373, 376, 1, 0, 0, 0, 374, 375, 5, 28, 0, 0, 375, 377, 3, 30, 15, 0, 376, 374, 1, 0, 0, 0, 376, 377, 1, 0, 0, 0, 377, 53, 1, 0, 0, 0, 378, 383, 3, 56, 28, 0, 379, 380, 5, 33, 0, 0, 380, 382, 3, 56, 28, 0, 381, 379, 1, 0, 0, 0, 382, 385, 1, 0, 0, 0, 383, 381, 1, 0, 0, 0, 383, 384, 1, 0, 0, 0, 384, 55, 1, 0, 0, 0, 385, 383, 1, 0, 0, 0, 386, 387, 3, 32, 16, 0, 387, 390, 4, 28, 10, 0, 388, 389, 5, 16, 0, 0, 389, 391, 3, 10, 5, 0, 390, 388, 1, 0, 0, 0, 390, 391, 1, 0, 0, 0, 391, 57, 1, 0, 0, 0, 392, 397, 3, 72, 36, 0, 393, 394, 5, 35, 0, 0, 394, 396, 3, 72, 36, 0, 395, 393, 1, 0, 0, 0, 396, 399, 1, 0, 0, 0, 397, 395, 1, 0, 0, 0, 397, 398, 1, 0, 0, 0, 398, 59, 1, 0, 0, 0, 399, 397, 1, 0, 0, 0, 400, 405, 3, 66, 33, 0, 401, 402, 5, 35, 0, 0, 402, 404, 3, 66, 33, 0, 403, 401, 1, 0, 0, 0, 404, 407, 1, 0, 0, 0, 405, 403, 1, 0, 0, 0, 405, 406, 1, 0, 0, 0, 406, 61, 1, 0, 0, 0, 407, 405, 1, 0, 0, 0, 408, 413, 3, 60, 30, 0, 409, 410, 5, 33, 0, 0, 410, 412, 3, 60, 30, 0, 411, 409, 1, 0, 0, 0, 412, 415, 1, 0, 0, 0, 413, 411, 1, 0, 0, 0, 413, 414, 1, 0, 0, 0, 414, 63, 1, 0, 0, 0, 415, 413, 1, 0, 0, 0, 416, 417, 7, 3, 0, 0, 417, 65, 1, 0, 0, 0, 418, 421, 5, 80, 0, 0, 419, 421, 3, 70, 35, 0, 420, 418, 1, 0, 0, 0, 420, 419, 1, 0, 0, 0, 421, 67, 1, 0, 0, 0, 422, 465, 5, 44, 0, 0, 423, 424, 3, 104, 52, 0, 424, 425, 5, 67, 0, 0, 425, 465, 1, 0, 0, 0, 426, 465, 3, 102, 51, 0, 427, 465, 3, 104, 52, 0, 428, 465, 3, 98, 49, 0, 429, 465, 3, 70, 35, 0, 430, 465, 3, 106, 53, 0, 431, 432, 5, 65, 0, 0, 432, 437, 3, 100, 50, 0, 433, 434, 5, 33, 0, 0, 434, 436, 3, 100, 50, 0, 435, 433, 1, 0, 0, 0, 436, 439, 1, 0, 0, 0, 437, 435, 1, 0, 0, 0, 437, 438, 1, 0, 0, 0, 438, 440, 1, 0, 0, 0, 439, 437, 1, 0, 0, 0, 440, 441, 5, 66, 0, 0, 441, 465, 1, 0, 0, 0, 442, 443, 5, 65, 0, 0, 443, 448, 3, 98, 49, 0, 444, 445, 5, 33, 0, 0, 445, 447, 3, 98, 49, 0, 446, 444, 1, 0, 0, 0, 447, 450, 1, 0, 0, 0, 448, 446, 1, 0, 0, 0, 448, 449, 1, 0, 0, 0, 449, 451, 1, 0, 0, 0, 450, 448, 1, 0, 0, 0, 451, 452, 5, 66, 0, 0, 452, 465, 1, 0, 0, 0, 453, 454, 5, 65, 0, 0, 454, 459, 3, 106, 53, 0, 455, 456, 5, 33, 0, 0, 456, 458, 3, 106, 53, 0, 457, 455, 1, 0, 0, 0, 458, 461, 1, 0, 0, 0, 459, 457, 1, 0, 0, 0, 459, 460, 1, 0, 0, 0, 460, 462, 1, 0, 0, 0, 461, 459, 1, 0, 0, 0, 462, 463, 5, 66, 0, 0, 463, 465, 1, 0, 0, 0, 464, 422, 1, 0, 0, 0, 464, 423, 1, 0, 0, 0, 464, 426, 1, 0, 0, 0, 464, 427, 1, 0, 0, 0, 464, 428, 1, 0, 0, 0, 464, 429, 1, 0, 0, 0, 464, 430, 1, 0, 0, 0, 464, 431, 1, 0, 0, 0, 464, 442, 1, 0, 0, 0, 464, 453, 1, 0, 0, 0, 465, 69, 1, 0, 0, 0, 466, 469, 5, 47, 0, 0, 467, 469, 5, 64, 0, 0, 468, 466, 1, 0, 0, 0, 468, 467, 1, 0, 0, 0, 469, 71, 1, 0, 0, 0, 470, 473, 3, 64, 32, 0, 471, 473, 3, 70, 35, 0, 472, 470, 1, 0, 0, 0, 472, 471, 1, 0, 0, 0, 473, 73, 1, 0, 0, 0, 474, 475, 5, 9, 0, 0, 475, 476, 5, 26, 0, 0, 476, 75, 1, 0, 0, 0, 477, 478, 5, 14, 0, 0, 478, 483, 3, 78, 39, 0, 479, 480, 5, 33, 0, 0, 480, 482, 3, 78, 39, 0, 481, 479, 1, 0, 0, 0, 482, 485, 1, 0, 0, 0, 483, 481, 1, 0, 0, 0, 483, 484, 1, 0, 0, 0, 484, 77, 1, 0, 0, 0, 485, 483, 1, 0, 0, 0, 486, 488, 3, 10, 5, 0, 487, 489, 7, 4, 0, 0, 488, 487, 1, 0, 0, 0, 488, 489, 1, 0, 0, 0, 489, 492, 1, 0, 0, 0, 490, 491, 5, 45, 0, 0, 491, 493, 7, 5, 0, 0, 492, 490, 1, 0, 0, 0, 492, 493, 1, 0, 0, 0, 493, 79, 1, 0, 0, 0, 494, 495, 5, 8, 0, 0, 495, 496, 3, 62, 31, 0, 496, 81, 1, 0, 0, 0, 497, 498, 5, 2, 0, 0, 498, 499, 3, 62, 31, 0, 499, 83, 1, 0, 0, 0, 500, 501, 5, 11, 0, 0, 501, 506, 3, 86, 43, 0, 502, 503, 5, 33, 0, 0, 503, 505, 3, 86, 43, 0, 504, 502, 1, 0, 0, 0, 505, 508, 1, 0, 0, 0, 506, 504, 1, 0, 0, 0, 506, 507, 1, 0, 0, 0, 507, 85, 1, 0, 0, 0, 508, 506, 1, 0, 0, 0, 509, 510, 3, 60, 30, 0, 510, 511, 5, 84, 0, 0, 511, 512, 3, 60, 30, 0, 512, 87, 1, 0, 0, 0, 513, 514, 5, 1, 0, 0, 514, 515, 3, 20, 10, 0, 515, 517, 3, 106, 53, 0, 516, 518, 3, 94, 47, 0, 517, 516, 1, 0, 0, 0, 517, 518, 1, 0, 0, 0, 518, 89, 1, 0, 0, 0, 519, 520, 5, 7, 0, 0, 520, 521, 3, 20, 10, 0, 521, 522, 3, 106, 53, 0, 522, 91, 1, 0, 0, 0, 523, 524, 5, 10, 0, 0, 524, 525, 3, 58, 29, 0, 525, 93, 1, 0, 0, 0, 526, 531, 3, 96, 48, 0, 527, 528, 5, 33, 0, 0, 528, 530, 3, 96, 48, 0, 529, 527, 1, 0, 0, 0, 530, 533, 1, 0, 0, 0, 531, 529, 1, 0, 0, 0, 531, 532, 1, 0, 0, 0, 532, 95, 1, 0, 0, 0, 533, 531, 1, 0, 0, 0, 534, 535, 3, 64, 32, 0, 535, 536, 5, 31, 0, 0, 536, 537, 3, 68, 34, 0, 537, 97, 1, 0, 0, 0, 538, 539, 7, 6, 0, 0, 539, 99, 1, 0, 0, 0, 540, 543, 3, 102, 51, 0, 541, 543, 3, 104, 52, 0, 542, 540, 1, 0, 0, 0, 542, 541, 1, 0, 0, 0, 543, 101, 1, 0, 0, 0, 544, 546, 7, 0, 0, 0, 545, 544, 1, 0, 0, 0, 545, 546, 1, 0, 0, 0, 546, 547, 1, 0, 0, 0, 547, 548, 5, 27, 0, 0, 548, 103, 1, 0, 0, 0, 549, 551, 7, 0, 0, 0, 550, 549, 1, 0, 0, 0, 550, 551, 1, 0, 0, 0, 551, 552, 1, 0, 0, 0, 552, 553, 5, 26, 0, 0, 553, 105, 1, 0, 0, 0, 554, 555, 5, 25, 0, 0, 555, 107, 1, 0, 0, 0, 556, 557, 7, 7, 0, 0, 557, 109, 1, 0, 0, 0, 558, 559, 5, 5, 0, 0, 559, 560, 3, 112, 56, 0, 560, 111, 1, 0, 0, 0, 561, 562, 5, 65, 0, 0, 562, 563, 3, 2, 1, 0, 563, 564, 5, 66, 0, 0, 564, 113, 1, 0, 0, 0, 565, 566, 5, 13, 0, 0, 566, 567, 5, 100, 0, 0, 567, 115, 1, 0, 0, 0, 568, 569, 5, 3, 0, 0, 569, 572, 5, 90, 0, 0, 570, 571, 5, 88, 0, 0, 571, 573, 3, 60, 30, 0, 572, 570, 1, 0, 0, 0, 572, 573, 1, 0, 0, 0, 573, 583, 1, 0, 0, 0, 574, 575, 5, 89, 0, 0, 575, 580, 3, 118, 59, 0, 576, 577, 5, 33, 0, 0, 577, 579, 3, 118, 59, 0, 578, 576, 1, 0, 0, 0, 579, 582, 1, 0, 0, 0, 580, 578, 1, 0, 0, 0, 580, 581, 1, 0, 0, 0, 581, 584, 1, 0, 0, 0, 582, 580, 1, 0, 0, 0, 583, 574, 1, 0, 0, 0, 583, 584, 1, 0, 0, 0, 584, 117, 1, 0, 0, 0, 585, 586, 3, 60, 30, 0, 586, 587, 5, 31, 0, 0, 587, 589, 1, 0, 0, 0, 588, 585, 1, 0, 0, 0, 588, 589, 1, 0, 0, 0, 589, 590, 1, 0, 0, 0, 590, 591, 3, 60, 30, 0, 591, 119, 1, 0, 0, 0, 592, 593, 5, 18, 0, 0, 593, 594, 3, 36, 18, 0, 594, 595, 5, 88, 0, 0, 595, 596, 3, 62, 31, 0, 596, 121, 1, 0, 0, 0, 597, 598, 5, 17, 0, 0, 598, 601, 3, 54, 27, 0, 599, 600, 5, 28, 0, 0, 600, 602, 3, 30, 15, 0, 601, 599, 1, 0, 0, 0, 601, 602, 1, 0, 0, 0, 602, 123, 1, 0, 0, 0, 59, 135, 144, 162, 174, 183, 191, 197, 205, 207, 212, 219, 224, 235, 241, 249, 251, 262, 269, 280, 283, 289, 301, 307, 317, 321, 326, 336, 344, 357, 361, 365, 372, 376, 383, 390, 397, 405, 413, 420, 437, 448, 459, 464, 468, 472, 483, 488, 492, 506, 517, 531, 542, 545, 550, 572, 580, 583, 588, 601] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java index e3e8790d205ff..ee1ed0a05e978 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java @@ -8,26 +8,14 @@ * 2.0. */ -import org.antlr.v4.runtime.FailedPredicateException; -import org.antlr.v4.runtime.NoViableAltException; -import org.antlr.v4.runtime.ParserRuleContext; -import org.antlr.v4.runtime.RecognitionException; -import org.antlr.v4.runtime.RuleContext; -import org.antlr.v4.runtime.RuntimeMetaData; -import org.antlr.v4.runtime.Token; -import org.antlr.v4.runtime.TokenStream; -import org.antlr.v4.runtime.Vocabulary; -import org.antlr.v4.runtime.VocabularyImpl; -import org.antlr.v4.runtime.atn.ATN; -import org.antlr.v4.runtime.atn.ATNDeserializer; -import org.antlr.v4.runtime.atn.ParserATNSimulator; -import org.antlr.v4.runtime.atn.PredictionContextCache; +import org.antlr.v4.runtime.atn.*; import org.antlr.v4.runtime.dfa.DFA; -import org.antlr.v4.runtime.tree.ParseTreeListener; -import org.antlr.v4.runtime.tree.ParseTreeVisitor; -import org.antlr.v4.runtime.tree.TerminalNode; - +import org.antlr.v4.runtime.*; +import org.antlr.v4.runtime.misc.*; +import org.antlr.v4.runtime.tree.*; import java.util.List; +import java.util.Iterator; +import java.util.ArrayList; @SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast", "CheckReturnValue"}) public class EsqlBaseParser extends ParserConfig { @@ -37,66 +25,67 @@ public class EsqlBaseParser extends ParserConfig { protected static final PredictionContextCache _sharedContextCache = new PredictionContextCache(); public static final int - DISSECT=1, DROP=2, ENRICH=3, EVAL=4, EXPLAIN=5, FROM=6, GROK=7, KEEP=8, - LIMIT=9, MV_EXPAND=10, RENAME=11, ROW=12, SHOW=13, SORT=14, STATS=15, - WHERE=16, DEV_INLINESTATS=17, DEV_LOOKUP=18, DEV_METRICS=19, UNKNOWN_CMD=20, - LINE_COMMENT=21, MULTILINE_COMMENT=22, WS=23, PIPE=24, QUOTED_STRING=25, - INTEGER_LITERAL=26, DECIMAL_LITERAL=27, BY=28, AND=29, ASC=30, ASSIGN=31, - CAST_OP=32, COMMA=33, DESC=34, DOT=35, FALSE=36, FIRST=37, IN=38, IS=39, - LAST=40, LIKE=41, LP=42, NOT=43, NULL=44, NULLS=45, OR=46, PARAM=47, RLIKE=48, - RP=49, TRUE=50, EQ=51, CIEQ=52, NEQ=53, LT=54, LTE=55, GT=56, GTE=57, - PLUS=58, MINUS=59, ASTERISK=60, SLASH=61, PERCENT=62, MATCH=63, NAMED_OR_POSITIONAL_PARAM=64, - OPENING_BRACKET=65, CLOSING_BRACKET=66, UNQUOTED_IDENTIFIER=67, QUOTED_IDENTIFIER=68, - EXPR_LINE_COMMENT=69, EXPR_MULTILINE_COMMENT=70, EXPR_WS=71, EXPLAIN_WS=72, - EXPLAIN_LINE_COMMENT=73, EXPLAIN_MULTILINE_COMMENT=74, METADATA=75, UNQUOTED_SOURCE=76, - FROM_LINE_COMMENT=77, FROM_MULTILINE_COMMENT=78, FROM_WS=79, ID_PATTERN=80, - PROJECT_LINE_COMMENT=81, PROJECT_MULTILINE_COMMENT=82, PROJECT_WS=83, - AS=84, RENAME_LINE_COMMENT=85, RENAME_MULTILINE_COMMENT=86, RENAME_WS=87, - ON=88, WITH=89, ENRICH_POLICY_NAME=90, ENRICH_LINE_COMMENT=91, ENRICH_MULTILINE_COMMENT=92, - ENRICH_WS=93, ENRICH_FIELD_LINE_COMMENT=94, ENRICH_FIELD_MULTILINE_COMMENT=95, - ENRICH_FIELD_WS=96, MVEXPAND_LINE_COMMENT=97, MVEXPAND_MULTILINE_COMMENT=98, - MVEXPAND_WS=99, INFO=100, SHOW_LINE_COMMENT=101, SHOW_MULTILINE_COMMENT=102, - SHOW_WS=103, COLON=104, SETTING=105, SETTING_LINE_COMMENT=106, SETTTING_MULTILINE_COMMENT=107, - SETTING_WS=108, LOOKUP_LINE_COMMENT=109, LOOKUP_MULTILINE_COMMENT=110, - LOOKUP_WS=111, LOOKUP_FIELD_LINE_COMMENT=112, LOOKUP_FIELD_MULTILINE_COMMENT=113, - LOOKUP_FIELD_WS=114, METRICS_LINE_COMMENT=115, METRICS_MULTILINE_COMMENT=116, - METRICS_WS=117, CLOSING_METRICS_LINE_COMMENT=118, CLOSING_METRICS_MULTILINE_COMMENT=119, + DISSECT=1, DROP=2, ENRICH=3, EVAL=4, EXPLAIN=5, FROM=6, GROK=7, KEEP=8, + LIMIT=9, MV_EXPAND=10, RENAME=11, ROW=12, SHOW=13, SORT=14, STATS=15, + WHERE=16, DEV_INLINESTATS=17, DEV_LOOKUP=18, DEV_METRICS=19, UNKNOWN_CMD=20, + LINE_COMMENT=21, MULTILINE_COMMENT=22, WS=23, PIPE=24, QUOTED_STRING=25, + INTEGER_LITERAL=26, DECIMAL_LITERAL=27, BY=28, AND=29, ASC=30, ASSIGN=31, + CAST_OP=32, COMMA=33, DESC=34, DOT=35, FALSE=36, FIRST=37, IN=38, IS=39, + LAST=40, LIKE=41, LP=42, NOT=43, NULL=44, NULLS=45, OR=46, PARAM=47, RLIKE=48, + RP=49, TRUE=50, EQ=51, CIEQ=52, NEQ=53, LT=54, LTE=55, GT=56, GTE=57, + PLUS=58, MINUS=59, ASTERISK=60, SLASH=61, PERCENT=62, MATCH=63, NAMED_OR_POSITIONAL_PARAM=64, + OPENING_BRACKET=65, CLOSING_BRACKET=66, UNQUOTED_IDENTIFIER=67, QUOTED_IDENTIFIER=68, + EXPR_LINE_COMMENT=69, EXPR_MULTILINE_COMMENT=70, EXPR_WS=71, EXPLAIN_WS=72, + EXPLAIN_LINE_COMMENT=73, EXPLAIN_MULTILINE_COMMENT=74, METADATA=75, UNQUOTED_SOURCE=76, + FROM_LINE_COMMENT=77, FROM_MULTILINE_COMMENT=78, FROM_WS=79, ID_PATTERN=80, + PROJECT_LINE_COMMENT=81, PROJECT_MULTILINE_COMMENT=82, PROJECT_WS=83, + AS=84, RENAME_LINE_COMMENT=85, RENAME_MULTILINE_COMMENT=86, RENAME_WS=87, + ON=88, WITH=89, ENRICH_POLICY_NAME=90, ENRICH_LINE_COMMENT=91, ENRICH_MULTILINE_COMMENT=92, + ENRICH_WS=93, ENRICH_FIELD_LINE_COMMENT=94, ENRICH_FIELD_MULTILINE_COMMENT=95, + ENRICH_FIELD_WS=96, MVEXPAND_LINE_COMMENT=97, MVEXPAND_MULTILINE_COMMENT=98, + MVEXPAND_WS=99, INFO=100, SHOW_LINE_COMMENT=101, SHOW_MULTILINE_COMMENT=102, + SHOW_WS=103, COLON=104, SETTING=105, SETTING_LINE_COMMENT=106, SETTTING_MULTILINE_COMMENT=107, + SETTING_WS=108, LOOKUP_LINE_COMMENT=109, LOOKUP_MULTILINE_COMMENT=110, + LOOKUP_WS=111, LOOKUP_FIELD_LINE_COMMENT=112, LOOKUP_FIELD_MULTILINE_COMMENT=113, + LOOKUP_FIELD_WS=114, METRICS_LINE_COMMENT=115, METRICS_MULTILINE_COMMENT=116, + METRICS_WS=117, CLOSING_METRICS_LINE_COMMENT=118, CLOSING_METRICS_MULTILINE_COMMENT=119, CLOSING_METRICS_WS=120; public static final int - RULE_singleStatement = 0, RULE_query = 1, RULE_sourceCommand = 2, RULE_processingCommand = 3, - RULE_whereCommand = 4, RULE_booleanExpression = 5, RULE_regexBooleanExpression = 6, - RULE_matchBooleanExpression = 7, RULE_valueExpression = 8, RULE_operatorExpression = 9, - RULE_primaryExpression = 10, RULE_functionExpression = 11, RULE_functionName = 12, - RULE_dataType = 13, RULE_rowCommand = 14, RULE_fields = 15, RULE_field = 16, - RULE_fromCommand = 17, RULE_indexPattern = 18, RULE_clusterString = 19, - RULE_indexString = 20, RULE_metadata = 21, RULE_metadataOption = 22, RULE_deprecated_metadata = 23, - RULE_metricsCommand = 24, RULE_evalCommand = 25, RULE_statsCommand = 26, - RULE_qualifiedName = 27, RULE_qualifiedNamePattern = 28, RULE_qualifiedNamePatterns = 29, - RULE_identifier = 30, RULE_identifierPattern = 31, RULE_constant = 32, - RULE_parameter = 33, RULE_identifierOrParameter = 34, RULE_limitCommand = 35, - RULE_sortCommand = 36, RULE_orderExpression = 37, RULE_keepCommand = 38, - RULE_dropCommand = 39, RULE_renameCommand = 40, RULE_renameClause = 41, - RULE_dissectCommand = 42, RULE_grokCommand = 43, RULE_mvExpandCommand = 44, - RULE_commandOptions = 45, RULE_commandOption = 46, RULE_booleanValue = 47, - RULE_numericValue = 48, RULE_decimalValue = 49, RULE_integerValue = 50, - RULE_string = 51, RULE_comparisonOperator = 52, RULE_explainCommand = 53, - RULE_subqueryExpression = 54, RULE_showCommand = 55, RULE_enrichCommand = 56, - RULE_enrichWithClause = 57, RULE_lookupCommand = 58, RULE_inlinestatsCommand = 59; + RULE_singleStatement = 0, RULE_query = 1, RULE_sourceCommand = 2, RULE_processingCommand = 3, + RULE_whereCommand = 4, RULE_booleanExpression = 5, RULE_regexBooleanExpression = 6, + RULE_matchBooleanExpression = 7, RULE_valueExpression = 8, RULE_operatorExpression = 9, + RULE_primaryExpression = 10, RULE_functionExpression = 11, RULE_functionName = 12, + RULE_dataType = 13, RULE_rowCommand = 14, RULE_fields = 15, RULE_field = 16, + RULE_fromCommand = 17, RULE_indexPattern = 18, RULE_clusterString = 19, + RULE_indexString = 20, RULE_metadata = 21, RULE_metadataOption = 22, RULE_deprecated_metadata = 23, + RULE_metricsCommand = 24, RULE_evalCommand = 25, RULE_statsCommand = 26, + RULE_aggFields = 27, RULE_aggField = 28, RULE_qualifiedName = 29, RULE_qualifiedNamePattern = 30, + RULE_qualifiedNamePatterns = 31, RULE_identifier = 32, RULE_identifierPattern = 33, + RULE_constant = 34, RULE_parameter = 35, RULE_identifierOrParameter = 36, + RULE_limitCommand = 37, RULE_sortCommand = 38, RULE_orderExpression = 39, + RULE_keepCommand = 40, RULE_dropCommand = 41, RULE_renameCommand = 42, + RULE_renameClause = 43, RULE_dissectCommand = 44, RULE_grokCommand = 45, + RULE_mvExpandCommand = 46, RULE_commandOptions = 47, RULE_commandOption = 48, + RULE_booleanValue = 49, RULE_numericValue = 50, RULE_decimalValue = 51, + RULE_integerValue = 52, RULE_string = 53, RULE_comparisonOperator = 54, + RULE_explainCommand = 55, RULE_subqueryExpression = 56, RULE_showCommand = 57, + RULE_enrichCommand = 58, RULE_enrichWithClause = 59, RULE_lookupCommand = 60, + RULE_inlinestatsCommand = 61; private static String[] makeRuleNames() { return new String[] { - "singleStatement", "query", "sourceCommand", "processingCommand", "whereCommand", - "booleanExpression", "regexBooleanExpression", "matchBooleanExpression", - "valueExpression", "operatorExpression", "primaryExpression", "functionExpression", - "functionName", "dataType", "rowCommand", "fields", "field", "fromCommand", - "indexPattern", "clusterString", "indexString", "metadata", "metadataOption", - "deprecated_metadata", "metricsCommand", "evalCommand", "statsCommand", - "qualifiedName", "qualifiedNamePattern", "qualifiedNamePatterns", "identifier", - "identifierPattern", "constant", "parameter", "identifierOrParameter", - "limitCommand", "sortCommand", "orderExpression", "keepCommand", "dropCommand", - "renameCommand", "renameClause", "dissectCommand", "grokCommand", "mvExpandCommand", - "commandOptions", "commandOption", "booleanValue", "numericValue", "decimalValue", - "integerValue", "string", "comparisonOperator", "explainCommand", "subqueryExpression", - "showCommand", "enrichCommand", "enrichWithClause", "lookupCommand", + "singleStatement", "query", "sourceCommand", "processingCommand", "whereCommand", + "booleanExpression", "regexBooleanExpression", "matchBooleanExpression", + "valueExpression", "operatorExpression", "primaryExpression", "functionExpression", + "functionName", "dataType", "rowCommand", "fields", "field", "fromCommand", + "indexPattern", "clusterString", "indexString", "metadata", "metadataOption", + "deprecated_metadata", "metricsCommand", "evalCommand", "statsCommand", + "aggFields", "aggField", "qualifiedName", "qualifiedNamePattern", "qualifiedNamePatterns", + "identifier", "identifierPattern", "constant", "parameter", "identifierOrParameter", + "limitCommand", "sortCommand", "orderExpression", "keepCommand", "dropCommand", + "renameCommand", "renameClause", "dissectCommand", "grokCommand", "mvExpandCommand", + "commandOptions", "commandOption", "booleanValue", "numericValue", "decimalValue", + "integerValue", "string", "comparisonOperator", "explainCommand", "subqueryExpression", + "showCommand", "enrichCommand", "enrichWithClause", "lookupCommand", "inlinestatsCommand" }; } @@ -104,46 +93,46 @@ private static String[] makeRuleNames() { private static String[] makeLiteralNames() { return new String[] { - null, "'dissect'", "'drop'", "'enrich'", "'eval'", "'explain'", "'from'", - "'grok'", "'keep'", "'limit'", "'mv_expand'", "'rename'", "'row'", "'show'", - "'sort'", "'stats'", "'where'", null, null, null, null, null, null, null, - "'|'", null, null, null, "'by'", "'and'", "'asc'", "'='", "'::'", "','", - "'desc'", "'.'", "'false'", "'first'", "'in'", "'is'", "'last'", "'like'", - "'('", "'not'", "'null'", "'nulls'", "'or'", "'?'", "'rlike'", "')'", - "'true'", "'=='", "'=~'", "'!='", "'<'", "'<='", "'>'", "'>='", "'+'", - "'-'", "'*'", "'/'", "'%'", "'match'", null, null, "']'", null, null, - null, null, null, null, null, null, "'metadata'", null, null, null, null, - null, null, null, null, "'as'", null, null, null, "'on'", "'with'", null, - null, null, null, null, null, null, null, null, null, "'info'", null, + null, "'dissect'", "'drop'", "'enrich'", "'eval'", "'explain'", "'from'", + "'grok'", "'keep'", "'limit'", "'mv_expand'", "'rename'", "'row'", "'show'", + "'sort'", "'stats'", "'where'", null, null, null, null, null, null, null, + "'|'", null, null, null, "'by'", "'and'", "'asc'", "'='", "'::'", "','", + "'desc'", "'.'", "'false'", "'first'", "'in'", "'is'", "'last'", "'like'", + "'('", "'not'", "'null'", "'nulls'", "'or'", "'?'", "'rlike'", "')'", + "'true'", "'=='", "'=~'", "'!='", "'<'", "'<='", "'>'", "'>='", "'+'", + "'-'", "'*'", "'/'", "'%'", "'match'", null, null, "']'", null, null, + null, null, null, null, null, null, "'metadata'", null, null, null, null, + null, null, null, null, "'as'", null, null, null, "'on'", "'with'", null, + null, null, null, null, null, null, null, null, null, "'info'", null, null, null, "':'" }; } private static final String[] _LITERAL_NAMES = makeLiteralNames(); private static String[] makeSymbolicNames() { return new String[] { - null, "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", - "KEEP", "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", - "WHERE", "DEV_INLINESTATS", "DEV_LOOKUP", "DEV_METRICS", "UNKNOWN_CMD", - "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "PIPE", "QUOTED_STRING", "INTEGER_LITERAL", - "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", "COMMA", - "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", "LIKE", "LP", "NOT", - "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", "CIEQ", - "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", - "PERCENT", "MATCH", "NAMED_OR_POSITIONAL_PARAM", "OPENING_BRACKET", "CLOSING_BRACKET", - "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", - "EXPR_WS", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", - "METADATA", "UNQUOTED_SOURCE", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", - "FROM_WS", "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", - "PROJECT_WS", "AS", "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", - "RENAME_WS", "ON", "WITH", "ENRICH_POLICY_NAME", "ENRICH_LINE_COMMENT", - "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", - "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT", - "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "INFO", "SHOW_LINE_COMMENT", - "SHOW_MULTILINE_COMMENT", "SHOW_WS", "COLON", "SETTING", "SETTING_LINE_COMMENT", - "SETTTING_MULTILINE_COMMENT", "SETTING_WS", "LOOKUP_LINE_COMMENT", "LOOKUP_MULTILINE_COMMENT", - "LOOKUP_WS", "LOOKUP_FIELD_LINE_COMMENT", "LOOKUP_FIELD_MULTILINE_COMMENT", - "LOOKUP_FIELD_WS", "METRICS_LINE_COMMENT", "METRICS_MULTILINE_COMMENT", - "METRICS_WS", "CLOSING_METRICS_LINE_COMMENT", "CLOSING_METRICS_MULTILINE_COMMENT", + null, "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", + "KEEP", "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", + "WHERE", "DEV_INLINESTATS", "DEV_LOOKUP", "DEV_METRICS", "UNKNOWN_CMD", + "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "PIPE", "QUOTED_STRING", "INTEGER_LITERAL", + "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", "COMMA", + "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", "LIKE", "LP", "NOT", + "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", "CIEQ", + "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", + "PERCENT", "MATCH", "NAMED_OR_POSITIONAL_PARAM", "OPENING_BRACKET", "CLOSING_BRACKET", + "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", + "EXPR_WS", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", + "METADATA", "UNQUOTED_SOURCE", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", + "FROM_WS", "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", + "PROJECT_WS", "AS", "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", + "RENAME_WS", "ON", "WITH", "ENRICH_POLICY_NAME", "ENRICH_LINE_COMMENT", + "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", + "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT", + "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "INFO", "SHOW_LINE_COMMENT", + "SHOW_MULTILINE_COMMENT", "SHOW_WS", "COLON", "SETTING", "SETTING_LINE_COMMENT", + "SETTTING_MULTILINE_COMMENT", "SETTING_WS", "LOOKUP_LINE_COMMENT", "LOOKUP_MULTILINE_COMMENT", + "LOOKUP_WS", "LOOKUP_FIELD_LINE_COMMENT", "LOOKUP_FIELD_MULTILINE_COMMENT", + "LOOKUP_FIELD_WS", "METRICS_LINE_COMMENT", "METRICS_MULTILINE_COMMENT", + "METRICS_WS", "CLOSING_METRICS_LINE_COMMENT", "CLOSING_METRICS_MULTILINE_COMMENT", "CLOSING_METRICS_WS" }; } @@ -231,9 +220,9 @@ public final SingleStatementContext singleStatement() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(120); + setState(124); query(0); - setState(121); + setState(125); match(EOF); } } @@ -255,7 +244,7 @@ public QueryContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_query; } - + @SuppressWarnings("this-escape") public QueryContext() { } public void copyFrom(QueryContext ctx) { @@ -329,11 +318,11 @@ private QueryContext query(int _p) throws RecognitionException { _ctx = _localctx; _prevctx = _localctx; - setState(124); + setState(128); sourceCommand(); } _ctx.stop = _input.LT(-1); - setState(131); + setState(135); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,0,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -344,16 +333,16 @@ private QueryContext query(int _p) throws RecognitionException { { _localctx = new CompositeQueryContext(new QueryContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_query); - setState(126); + setState(130); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(127); + setState(131); match(PIPE); - setState(128); + setState(132); processingCommand(); } - } + } } - setState(133); + setState(137); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,0,_ctx); } @@ -411,43 +400,43 @@ public final SourceCommandContext sourceCommand() throws RecognitionException { SourceCommandContext _localctx = new SourceCommandContext(_ctx, getState()); enterRule(_localctx, 4, RULE_sourceCommand); try { - setState(140); + setState(144); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,1,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(134); + setState(138); explainCommand(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(135); + setState(139); fromCommand(); } break; case 3: enterOuterAlt(_localctx, 3); { - setState(136); + setState(140); rowCommand(); } break; case 4: enterOuterAlt(_localctx, 4); { - setState(137); + setState(141); showCommand(); } break; case 5: enterOuterAlt(_localctx, 5); { - setState(138); + setState(142); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(139); + setState(143); metricsCommand(); } break; @@ -532,108 +521,108 @@ public final ProcessingCommandContext processingCommand() throws RecognitionExce ProcessingCommandContext _localctx = new ProcessingCommandContext(_ctx, getState()); enterRule(_localctx, 6, RULE_processingCommand); try { - setState(158); + setState(162); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,2,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(142); + setState(146); evalCommand(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(143); + setState(147); whereCommand(); } break; case 3: enterOuterAlt(_localctx, 3); { - setState(144); + setState(148); keepCommand(); } break; case 4: enterOuterAlt(_localctx, 4); { - setState(145); + setState(149); limitCommand(); } break; case 5: enterOuterAlt(_localctx, 5); { - setState(146); + setState(150); statsCommand(); } break; case 6: enterOuterAlt(_localctx, 6); { - setState(147); + setState(151); sortCommand(); } break; case 7: enterOuterAlt(_localctx, 7); { - setState(148); + setState(152); dropCommand(); } break; case 8: enterOuterAlt(_localctx, 8); { - setState(149); + setState(153); renameCommand(); } break; case 9: enterOuterAlt(_localctx, 9); { - setState(150); + setState(154); dissectCommand(); } break; case 10: enterOuterAlt(_localctx, 10); { - setState(151); + setState(155); grokCommand(); } break; case 11: enterOuterAlt(_localctx, 11); { - setState(152); + setState(156); enrichCommand(); } break; case 12: enterOuterAlt(_localctx, 12); { - setState(153); + setState(157); mvExpandCommand(); } break; case 13: enterOuterAlt(_localctx, 13); { - setState(154); + setState(158); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(155); + setState(159); inlinestatsCommand(); } break; case 14: enterOuterAlt(_localctx, 14); { - setState(156); + setState(160); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(157); + setState(161); lookupCommand(); } break; @@ -682,9 +671,9 @@ public final WhereCommandContext whereCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(160); + setState(164); match(WHERE); - setState(161); + setState(165); booleanExpression(0); } } @@ -706,7 +695,7 @@ public BooleanExpressionContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_booleanExpression; } - + @SuppressWarnings("this-escape") public BooleanExpressionContext() { } public void copyFrom(BooleanExpressionContext ctx) { @@ -900,7 +889,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc int _alt; enterOuterAlt(_localctx, 1); { - setState(193); + setState(197); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,6,_ctx) ) { case 1: @@ -909,9 +898,9 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _ctx = _localctx; _prevctx = _localctx; - setState(164); + setState(168); match(NOT); - setState(165); + setState(169); booleanExpression(8); } break; @@ -920,7 +909,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new BooleanDefaultContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(166); + setState(170); valueExpression(); } break; @@ -929,7 +918,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new RegexExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(167); + setState(171); regexBooleanExpression(); } break; @@ -938,41 +927,41 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalInContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(168); + setState(172); valueExpression(); - setState(170); + setState(174); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(169); + setState(173); match(NOT); } } - setState(172); + setState(176); match(IN); - setState(173); + setState(177); match(LP); - setState(174); + setState(178); valueExpression(); - setState(179); + setState(183); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(175); + setState(179); match(COMMA); - setState(176); + setState(180); valueExpression(); } } - setState(181); + setState(185); _errHandler.sync(this); _la = _input.LA(1); } - setState(182); + setState(186); match(RP); } break; @@ -981,21 +970,21 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new IsNullContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(184); + setState(188); valueExpression(); - setState(185); + setState(189); match(IS); - setState(187); + setState(191); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(186); + setState(190); match(NOT); } } - setState(189); + setState(193); match(NULL); } break; @@ -1004,15 +993,15 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new MatchExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(191); + setState(195); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(192); + setState(196); matchBooleanExpression(); } break; } _ctx.stop = _input.LT(-1); - setState(203); + setState(207); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,8,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1020,7 +1009,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(201); + setState(205); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,7,_ctx) ) { case 1: @@ -1028,11 +1017,11 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(195); + setState(199); if (!(precpred(_ctx, 5))) throw new FailedPredicateException(this, "precpred(_ctx, 5)"); - setState(196); + setState(200); ((LogicalBinaryContext)_localctx).operator = match(AND); - setState(197); + setState(201); ((LogicalBinaryContext)_localctx).right = booleanExpression(6); } break; @@ -1041,18 +1030,18 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(198); + setState(202); if (!(precpred(_ctx, 4))) throw new FailedPredicateException(this, "precpred(_ctx, 4)"); - setState(199); + setState(203); ((LogicalBinaryContext)_localctx).operator = match(OR); - setState(200); + setState(204); ((LogicalBinaryContext)_localctx).right = booleanExpression(5); } break; } - } + } } - setState(205); + setState(209); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,8,_ctx); } @@ -1107,48 +1096,48 @@ public final RegexBooleanExpressionContext regexBooleanExpression() throws Recog enterRule(_localctx, 12, RULE_regexBooleanExpression); int _la; try { - setState(220); + setState(224); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,11,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(206); + setState(210); valueExpression(); - setState(208); + setState(212); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(207); + setState(211); match(NOT); } } - setState(210); + setState(214); ((RegexBooleanExpressionContext)_localctx).kind = match(LIKE); - setState(211); + setState(215); ((RegexBooleanExpressionContext)_localctx).pattern = string(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(213); + setState(217); valueExpression(); - setState(215); + setState(219); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(214); + setState(218); match(NOT); } } - setState(217); + setState(221); ((RegexBooleanExpressionContext)_localctx).kind = match(RLIKE); - setState(218); + setState(222); ((RegexBooleanExpressionContext)_localctx).pattern = string(); } break; @@ -1201,11 +1190,11 @@ public final MatchBooleanExpressionContext matchBooleanExpression() throws Recog try { enterOuterAlt(_localctx, 1); { - setState(222); + setState(226); valueExpression(); - setState(223); + setState(227); match(MATCH); - setState(224); + setState(228); ((MatchBooleanExpressionContext)_localctx).queryString = string(); } } @@ -1227,7 +1216,7 @@ public ValueExpressionContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_valueExpression; } - + @SuppressWarnings("this-escape") public ValueExpressionContext() { } public void copyFrom(ValueExpressionContext ctx) { @@ -1289,14 +1278,14 @@ public final ValueExpressionContext valueExpression() throws RecognitionExceptio ValueExpressionContext _localctx = new ValueExpressionContext(_ctx, getState()); enterRule(_localctx, 16, RULE_valueExpression); try { - setState(231); + setState(235); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,12,_ctx) ) { case 1: _localctx = new ValueExpressionDefaultContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(226); + setState(230); operatorExpression(0); } break; @@ -1304,11 +1293,11 @@ public final ValueExpressionContext valueExpression() throws RecognitionExceptio _localctx = new ComparisonContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(227); + setState(231); ((ComparisonContext)_localctx).left = operatorExpression(0); - setState(228); + setState(232); comparisonOperator(); - setState(229); + setState(233); ((ComparisonContext)_localctx).right = operatorExpression(0); } break; @@ -1332,7 +1321,7 @@ public OperatorExpressionContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_operatorExpression; } - + @SuppressWarnings("this-escape") public OperatorExpressionContext() { } public void copyFrom(OperatorExpressionContext ctx) { @@ -1433,7 +1422,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE int _alt; enterOuterAlt(_localctx, 1); { - setState(237); + setState(241); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,13,_ctx) ) { case 1: @@ -1442,7 +1431,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _ctx = _localctx; _prevctx = _localctx; - setState(234); + setState(238); primaryExpression(0); } break; @@ -1451,7 +1440,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticUnaryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(235); + setState(239); ((ArithmeticUnaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -1462,13 +1451,13 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(236); + setState(240); operatorExpression(3); } break; } _ctx.stop = _input.LT(-1); - setState(247); + setState(251); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,15,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1476,7 +1465,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(245); + setState(249); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,14,_ctx) ) { case 1: @@ -1484,9 +1473,9 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticBinaryContext(new OperatorExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_operatorExpression); - setState(239); + setState(243); if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); - setState(240); + setState(244); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 8070450532247928832L) != 0)) ) { @@ -1497,7 +1486,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(241); + setState(245); ((ArithmeticBinaryContext)_localctx).right = operatorExpression(3); } break; @@ -1506,9 +1495,9 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticBinaryContext(new OperatorExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_operatorExpression); - setState(242); + setState(246); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(243); + setState(247); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -1519,14 +1508,14 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(244); + setState(248); ((ArithmeticBinaryContext)_localctx).right = operatorExpression(2); } break; } - } + } } - setState(249); + setState(253); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,15,_ctx); } @@ -1550,7 +1539,7 @@ public PrimaryExpressionContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_primaryExpression; } - + @SuppressWarnings("this-escape") public PrimaryExpressionContext() { } public void copyFrom(PrimaryExpressionContext ctx) { @@ -1684,7 +1673,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc int _alt; enterOuterAlt(_localctx, 1); { - setState(258); + setState(262); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,16,_ctx) ) { case 1: @@ -1693,7 +1682,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _ctx = _localctx; _prevctx = _localctx; - setState(251); + setState(255); constant(); } break; @@ -1702,7 +1691,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new DereferenceContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(252); + setState(256); qualifiedName(); } break; @@ -1711,7 +1700,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new FunctionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(253); + setState(257); functionExpression(); } break; @@ -1720,17 +1709,17 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new ParenthesizedExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(254); + setState(258); match(LP); - setState(255); + setState(259); booleanExpression(0); - setState(256); + setState(260); match(RP); } break; } _ctx.stop = _input.LT(-1); - setState(265); + setState(269); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,17,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1741,16 +1730,16 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc { _localctx = new InlineCastContext(new PrimaryExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_primaryExpression); - setState(260); + setState(264); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(261); + setState(265); match(CAST_OP); - setState(262); + setState(266); dataType(); } - } + } } - setState(267); + setState(271); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,17,_ctx); } @@ -1812,37 +1801,37 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(268); + setState(272); functionName(); - setState(269); + setState(273); match(LP); - setState(279); + setState(283); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,19,_ctx) ) { case 1: { - setState(270); + setState(274); match(ASTERISK); } break; case 2: { { - setState(271); + setState(275); booleanExpression(0); - setState(276); + setState(280); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(272); + setState(276); match(COMMA); - setState(273); + setState(277); booleanExpression(0); } } - setState(278); + setState(282); _errHandler.sync(this); _la = _input.LA(1); } @@ -1850,7 +1839,7 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx } break; } - setState(281); + setState(285); match(RP); } } @@ -1895,13 +1884,13 @@ public final FunctionNameContext functionName() throws RecognitionException { FunctionNameContext _localctx = new FunctionNameContext(_ctx, getState()); enterRule(_localctx, 24, RULE_functionName); try { - setState(285); + setState(289); _errHandler.sync(this); switch (_input.LA(1)) { case MATCH: enterOuterAlt(_localctx, 1); { - setState(283); + setState(287); match(MATCH); } break; @@ -1911,7 +1900,7 @@ public final FunctionNameContext functionName() throws RecognitionException { case QUOTED_IDENTIFIER: enterOuterAlt(_localctx, 2); { - setState(284); + setState(288); identifierOrParameter(); } break; @@ -1937,7 +1926,7 @@ public DataTypeContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_dataType; } - + @SuppressWarnings("this-escape") public DataTypeContext() { } public void copyFrom(DataTypeContext ctx) { @@ -1973,7 +1962,7 @@ public final DataTypeContext dataType() throws RecognitionException { _localctx = new ToDataTypeContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(287); + setState(291); identifier(); } } @@ -2020,9 +2009,9 @@ public final RowCommandContext rowCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(289); + setState(293); match(ROW); - setState(290); + setState(294); fields(); } } @@ -2076,23 +2065,23 @@ public final FieldsContext fields() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(292); + setState(296); field(); - setState(297); + setState(301); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,21,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(293); + setState(297); match(COMMA); - setState(294); + setState(298); field(); } - } + } } - setState(299); + setState(303); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,21,_ctx); } @@ -2142,28 +2131,23 @@ public final FieldContext field() throws RecognitionException { FieldContext _localctx = new FieldContext(_ctx, getState()); enterRule(_localctx, 32, RULE_field); try { - setState(305); + enterOuterAlt(_localctx, 1); + { + setState(307); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,22,_ctx) ) { case 1: - enterOuterAlt(_localctx, 1); - { - setState(300); - booleanExpression(0); - } - break; - case 2: - enterOuterAlt(_localctx, 2); { - setState(301); + setState(304); qualifiedName(); - setState(302); + setState(305); match(ASSIGN); - setState(303); - booleanExpression(0); } break; } + setState(309); + booleanExpression(0); + } } catch (RecognitionException re) { _localctx.exception = re; @@ -2219,34 +2203,34 @@ public final FromCommandContext fromCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(307); + setState(311); match(FROM); - setState(308); + setState(312); indexPattern(); - setState(313); + setState(317); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,23,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(309); + setState(313); match(COMMA); - setState(310); + setState(314); indexPattern(); } - } + } } - setState(315); + setState(319); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,23,_ctx); } - setState(317); + setState(321); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,24,_ctx) ) { case 1: { - setState(316); + setState(320); metadata(); } break; @@ -2266,13 +2250,13 @@ public final FromCommandContext fromCommand() throws RecognitionException { @SuppressWarnings("CheckReturnValue") public static class IndexPatternContext extends ParserRuleContext { + public IndexStringContext indexString() { + return getRuleContext(IndexStringContext.class,0); + } public ClusterStringContext clusterString() { return getRuleContext(ClusterStringContext.class,0); } public TerminalNode COLON() { return getToken(EsqlBaseParser.COLON, 0); } - public IndexStringContext indexString() { - return getRuleContext(IndexStringContext.class,0); - } @SuppressWarnings("this-escape") public IndexPatternContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); @@ -2297,28 +2281,23 @@ public final IndexPatternContext indexPattern() throws RecognitionException { IndexPatternContext _localctx = new IndexPatternContext(_ctx, getState()); enterRule(_localctx, 36, RULE_indexPattern); try { - setState(324); + enterOuterAlt(_localctx, 1); + { + setState(326); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,25,_ctx) ) { case 1: - enterOuterAlt(_localctx, 1); { - setState(319); + setState(323); clusterString(); - setState(320); + setState(324); match(COLON); - setState(321); - indexString(); - } - break; - case 2: - enterOuterAlt(_localctx, 2); - { - setState(323); - indexString(); } break; } + setState(328); + indexString(); + } } catch (RecognitionException re) { _localctx.exception = re; @@ -2360,7 +2339,7 @@ public final ClusterStringContext clusterString() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(326); + setState(330); match(UNQUOTED_SOURCE); } } @@ -2406,7 +2385,7 @@ public final IndexStringContext indexString() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(328); + setState(332); _la = _input.LA(1); if ( !(_la==QUOTED_STRING || _la==UNQUOTED_SOURCE) ) { _errHandler.recoverInline(this); @@ -2461,20 +2440,20 @@ public final MetadataContext metadata() throws RecognitionException { MetadataContext _localctx = new MetadataContext(_ctx, getState()); enterRule(_localctx, 42, RULE_metadata); try { - setState(332); + setState(336); _errHandler.sync(this); switch (_input.LA(1)) { case METADATA: enterOuterAlt(_localctx, 1); { - setState(330); + setState(334); metadataOption(); } break; case OPENING_BRACKET: enterOuterAlt(_localctx, 2); { - setState(331); + setState(335); deprecated_metadata(); } break; @@ -2531,25 +2510,25 @@ public final MetadataOptionContext metadataOption() throws RecognitionException int _alt; enterOuterAlt(_localctx, 1); { - setState(334); + setState(338); match(METADATA); - setState(335); + setState(339); match(UNQUOTED_SOURCE); - setState(340); + setState(344); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,27,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(336); + setState(340); match(COMMA); - setState(337); + setState(341); match(UNQUOTED_SOURCE); } - } + } } - setState(342); + setState(346); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,27,_ctx); } @@ -2598,11 +2577,11 @@ public final Deprecated_metadataContext deprecated_metadata() throws Recognition try { enterOuterAlt(_localctx, 1); { - setState(343); + setState(347); match(OPENING_BRACKET); - setState(344); + setState(348); metadataOption(); - setState(345); + setState(349); match(CLOSING_BRACKET); } } @@ -2619,7 +2598,7 @@ public final Deprecated_metadataContext deprecated_metadata() throws Recognition @SuppressWarnings("CheckReturnValue") public static class MetricsCommandContext extends ParserRuleContext { - public FieldsContext aggregates; + public AggFieldsContext aggregates; public FieldsContext grouping; public TerminalNode DEV_METRICS() { return getToken(EsqlBaseParser.DEV_METRICS, 0); } public List indexPattern() { @@ -2633,11 +2612,11 @@ public TerminalNode COMMA(int i) { return getToken(EsqlBaseParser.COMMA, i); } public TerminalNode BY() { return getToken(EsqlBaseParser.BY, 0); } - public List fields() { - return getRuleContexts(FieldsContext.class); + public AggFieldsContext aggFields() { + return getRuleContext(AggFieldsContext.class,0); } - public FieldsContext fields(int i) { - return getRuleContext(FieldsContext.class,i); + public FieldsContext fields() { + return getRuleContext(FieldsContext.class,0); } @SuppressWarnings("this-escape") public MetricsCommandContext(ParserRuleContext parent, int invokingState) { @@ -2666,46 +2645,46 @@ public final MetricsCommandContext metricsCommand() throws RecognitionException int _alt; enterOuterAlt(_localctx, 1); { - setState(347); + setState(351); match(DEV_METRICS); - setState(348); + setState(352); indexPattern(); - setState(353); + setState(357); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,28,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(349); + setState(353); match(COMMA); - setState(350); + setState(354); indexPattern(); } - } + } } - setState(355); + setState(359); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,28,_ctx); } - setState(357); + setState(361); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,29,_ctx) ) { case 1: { - setState(356); - ((MetricsCommandContext)_localctx).aggregates = fields(); + setState(360); + ((MetricsCommandContext)_localctx).aggregates = aggFields(); } break; } - setState(361); + setState(365); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,30,_ctx) ) { case 1: { - setState(359); + setState(363); match(BY); - setState(360); + setState(364); ((MetricsCommandContext)_localctx).grouping = fields(); } break; @@ -2755,9 +2734,9 @@ public final EvalCommandContext evalCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(363); + setState(367); match(EVAL); - setState(364); + setState(368); fields(); } } @@ -2774,15 +2753,15 @@ public final EvalCommandContext evalCommand() throws RecognitionException { @SuppressWarnings("CheckReturnValue") public static class StatsCommandContext extends ParserRuleContext { - public FieldsContext stats; + public AggFieldsContext stats; public FieldsContext grouping; public TerminalNode STATS() { return getToken(EsqlBaseParser.STATS, 0); } public TerminalNode BY() { return getToken(EsqlBaseParser.BY, 0); } - public List fields() { - return getRuleContexts(FieldsContext.class); + public AggFieldsContext aggFields() { + return getRuleContext(AggFieldsContext.class,0); } - public FieldsContext fields(int i) { - return getRuleContext(FieldsContext.class,i); + public FieldsContext fields() { + return getRuleContext(FieldsContext.class,0); } @SuppressWarnings("this-escape") public StatsCommandContext(ParserRuleContext parent, int invokingState) { @@ -2810,26 +2789,26 @@ public final StatsCommandContext statsCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(366); + setState(370); match(STATS); - setState(368); + setState(372); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,31,_ctx) ) { case 1: { - setState(367); - ((StatsCommandContext)_localctx).stats = fields(); + setState(371); + ((StatsCommandContext)_localctx).stats = aggFields(); } break; } - setState(372); + setState(376); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,32,_ctx) ) { case 1: { - setState(370); + setState(374); match(BY); - setState(371); + setState(375); ((StatsCommandContext)_localctx).grouping = fields(); } break; @@ -2847,6 +2826,142 @@ public final StatsCommandContext statsCommand() throws RecognitionException { return _localctx; } + @SuppressWarnings("CheckReturnValue") + public static class AggFieldsContext extends ParserRuleContext { + public List aggField() { + return getRuleContexts(AggFieldContext.class); + } + public AggFieldContext aggField(int i) { + return getRuleContext(AggFieldContext.class,i); + } + public List COMMA() { return getTokens(EsqlBaseParser.COMMA); } + public TerminalNode COMMA(int i) { + return getToken(EsqlBaseParser.COMMA, i); + } + @SuppressWarnings("this-escape") + public AggFieldsContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_aggFields; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterAggFields(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitAggFields(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor)visitor).visitAggFields(this); + else return visitor.visitChildren(this); + } + } + + public final AggFieldsContext aggFields() throws RecognitionException { + AggFieldsContext _localctx = new AggFieldsContext(_ctx, getState()); + enterRule(_localctx, 54, RULE_aggFields); + try { + int _alt; + enterOuterAlt(_localctx, 1); + { + setState(378); + aggField(); + setState(383); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input,33,_ctx); + while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { + if ( _alt==1 ) { + { + { + setState(379); + match(COMMA); + setState(380); + aggField(); + } + } + } + setState(385); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input,33,_ctx); + } + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + @SuppressWarnings("CheckReturnValue") + public static class AggFieldContext extends ParserRuleContext { + public FieldContext field() { + return getRuleContext(FieldContext.class,0); + } + public TerminalNode WHERE() { return getToken(EsqlBaseParser.WHERE, 0); } + public BooleanExpressionContext booleanExpression() { + return getRuleContext(BooleanExpressionContext.class,0); + } + @SuppressWarnings("this-escape") + public AggFieldContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_aggField; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterAggField(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitAggField(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor)visitor).visitAggField(this); + else return visitor.visitChildren(this); + } + } + + public final AggFieldContext aggField() throws RecognitionException { + AggFieldContext _localctx = new AggFieldContext(_ctx, getState()); + enterRule(_localctx, 56, RULE_aggField); + try { + enterOuterAlt(_localctx, 1); + { + setState(386); + field(); + setState(387); + if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); + setState(390); + _errHandler.sync(this); + switch ( getInterpreter().adaptivePredict(_input,34,_ctx) ) { + case 1: + { + setState(388); + match(WHERE); + setState(389); + booleanExpression(0); + } + break; + } + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + @SuppressWarnings("CheckReturnValue") public static class QualifiedNameContext extends ParserRuleContext { public List identifierOrParameter() { @@ -2881,30 +2996,30 @@ public T accept(ParseTreeVisitor visitor) { public final QualifiedNameContext qualifiedName() throws RecognitionException { QualifiedNameContext _localctx = new QualifiedNameContext(_ctx, getState()); - enterRule(_localctx, 54, RULE_qualifiedName); + enterRule(_localctx, 58, RULE_qualifiedName); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(374); + setState(392); identifierOrParameter(); - setState(379); + setState(397); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,33,_ctx); + _alt = getInterpreter().adaptivePredict(_input,35,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(375); + setState(393); match(DOT); - setState(376); + setState(394); identifierOrParameter(); } - } + } } - setState(381); + setState(399); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,33,_ctx); + _alt = getInterpreter().adaptivePredict(_input,35,_ctx); } } } @@ -2953,30 +3068,30 @@ public T accept(ParseTreeVisitor visitor) { public final QualifiedNamePatternContext qualifiedNamePattern() throws RecognitionException { QualifiedNamePatternContext _localctx = new QualifiedNamePatternContext(_ctx, getState()); - enterRule(_localctx, 56, RULE_qualifiedNamePattern); + enterRule(_localctx, 60, RULE_qualifiedNamePattern); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(382); + setState(400); identifierPattern(); - setState(387); + setState(405); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,34,_ctx); + _alt = getInterpreter().adaptivePredict(_input,36,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(383); + setState(401); match(DOT); - setState(384); + setState(402); identifierPattern(); } - } + } } - setState(389); + setState(407); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,34,_ctx); + _alt = getInterpreter().adaptivePredict(_input,36,_ctx); } } } @@ -3025,30 +3140,30 @@ public T accept(ParseTreeVisitor visitor) { public final QualifiedNamePatternsContext qualifiedNamePatterns() throws RecognitionException { QualifiedNamePatternsContext _localctx = new QualifiedNamePatternsContext(_ctx, getState()); - enterRule(_localctx, 58, RULE_qualifiedNamePatterns); + enterRule(_localctx, 62, RULE_qualifiedNamePatterns); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(390); + setState(408); qualifiedNamePattern(); - setState(395); + setState(413); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,35,_ctx); + _alt = getInterpreter().adaptivePredict(_input,37,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(391); + setState(409); match(COMMA); - setState(392); + setState(410); qualifiedNamePattern(); } - } + } } - setState(397); + setState(415); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,35,_ctx); + _alt = getInterpreter().adaptivePredict(_input,37,_ctx); } } } @@ -3089,12 +3204,12 @@ public T accept(ParseTreeVisitor visitor) { public final IdentifierContext identifier() throws RecognitionException { IdentifierContext _localctx = new IdentifierContext(_ctx, getState()); - enterRule(_localctx, 60, RULE_identifier); + enterRule(_localctx, 64, RULE_identifier); int _la; try { enterOuterAlt(_localctx, 1); { - setState(398); + setState(416); _la = _input.LA(1); if ( !(_la==UNQUOTED_IDENTIFIER || _la==QUOTED_IDENTIFIER) ) { _errHandler.recoverInline(this); @@ -3145,15 +3260,15 @@ public T accept(ParseTreeVisitor visitor) { public final IdentifierPatternContext identifierPattern() throws RecognitionException { IdentifierPatternContext _localctx = new IdentifierPatternContext(_ctx, getState()); - enterRule(_localctx, 62, RULE_identifierPattern); + enterRule(_localctx, 66, RULE_identifierPattern); try { - setState(402); + setState(420); _errHandler.sync(this); switch (_input.LA(1)) { case ID_PATTERN: enterOuterAlt(_localctx, 1); { - setState(400); + setState(418); match(ID_PATTERN); } break; @@ -3161,7 +3276,7 @@ public final IdentifierPatternContext identifierPattern() throws RecognitionExce case NAMED_OR_POSITIONAL_PARAM: enterOuterAlt(_localctx, 2); { - setState(401); + setState(419); parameter(); } break; @@ -3187,7 +3302,7 @@ public ConstantContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_constant; } - + @SuppressWarnings("this-escape") public ConstantContext() { } public void copyFrom(ConstantContext ctx) { @@ -3433,17 +3548,17 @@ public T accept(ParseTreeVisitor visitor) { public final ConstantContext constant() throws RecognitionException { ConstantContext _localctx = new ConstantContext(_ctx, getState()); - enterRule(_localctx, 64, RULE_constant); + enterRule(_localctx, 68, RULE_constant); int _la; try { - setState(446); + setState(464); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,40,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,42,_ctx) ) { case 1: _localctx = new NullLiteralContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(404); + setState(422); match(NULL); } break; @@ -3451,9 +3566,9 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new QualifiedIntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(405); + setState(423); integerValue(); - setState(406); + setState(424); match(UNQUOTED_IDENTIFIER); } break; @@ -3461,7 +3576,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new DecimalLiteralContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(408); + setState(426); decimalValue(); } break; @@ -3469,7 +3584,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new IntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(409); + setState(427); integerValue(); } break; @@ -3477,7 +3592,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanLiteralContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(410); + setState(428); booleanValue(); } break; @@ -3485,7 +3600,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new InputParameterContext(_localctx); enterOuterAlt(_localctx, 6); { - setState(411); + setState(429); parameter(); } break; @@ -3493,7 +3608,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringLiteralContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(412); + setState(430); string(); } break; @@ -3501,27 +3616,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new NumericArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 8); { - setState(413); + setState(431); match(OPENING_BRACKET); - setState(414); + setState(432); numericValue(); - setState(419); + setState(437); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(415); + setState(433); match(COMMA); - setState(416); + setState(434); numericValue(); } } - setState(421); + setState(439); _errHandler.sync(this); _la = _input.LA(1); } - setState(422); + setState(440); match(CLOSING_BRACKET); } break; @@ -3529,27 +3644,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 9); { - setState(424); + setState(442); match(OPENING_BRACKET); - setState(425); + setState(443); booleanValue(); - setState(430); + setState(448); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(426); + setState(444); match(COMMA); - setState(427); + setState(445); booleanValue(); } } - setState(432); + setState(450); _errHandler.sync(this); _la = _input.LA(1); } - setState(433); + setState(451); match(CLOSING_BRACKET); } break; @@ -3557,27 +3672,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 10); { - setState(435); + setState(453); match(OPENING_BRACKET); - setState(436); + setState(454); string(); - setState(441); + setState(459); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(437); + setState(455); match(COMMA); - setState(438); + setState(456); string(); } } - setState(443); + setState(461); _errHandler.sync(this); _la = _input.LA(1); } - setState(444); + setState(462); match(CLOSING_BRACKET); } break; @@ -3601,7 +3716,7 @@ public ParameterContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_parameter; } - + @SuppressWarnings("this-escape") public ParameterContext() { } public void copyFrom(ParameterContext ctx) { @@ -3649,16 +3764,16 @@ public T accept(ParseTreeVisitor visitor) { public final ParameterContext parameter() throws RecognitionException { ParameterContext _localctx = new ParameterContext(_ctx, getState()); - enterRule(_localctx, 66, RULE_parameter); + enterRule(_localctx, 70, RULE_parameter); try { - setState(450); + setState(468); _errHandler.sync(this); switch (_input.LA(1)) { case PARAM: _localctx = new InputParamContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(448); + setState(466); match(PARAM); } break; @@ -3666,7 +3781,7 @@ public final ParameterContext parameter() throws RecognitionException { _localctx = new InputNamedOrPositionalParamContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(449); + setState(467); match(NAMED_OR_POSITIONAL_PARAM); } break; @@ -3715,16 +3830,16 @@ public T accept(ParseTreeVisitor visitor) { public final IdentifierOrParameterContext identifierOrParameter() throws RecognitionException { IdentifierOrParameterContext _localctx = new IdentifierOrParameterContext(_ctx, getState()); - enterRule(_localctx, 68, RULE_identifierOrParameter); + enterRule(_localctx, 72, RULE_identifierOrParameter); try { - setState(454); + setState(472); _errHandler.sync(this); switch (_input.LA(1)) { case UNQUOTED_IDENTIFIER: case QUOTED_IDENTIFIER: enterOuterAlt(_localctx, 1); { - setState(452); + setState(470); identifier(); } break; @@ -3732,7 +3847,7 @@ public final IdentifierOrParameterContext identifierOrParameter() throws Recogni case NAMED_OR_POSITIONAL_PARAM: enterOuterAlt(_localctx, 2); { - setState(453); + setState(471); parameter(); } break; @@ -3777,13 +3892,13 @@ public T accept(ParseTreeVisitor visitor) { public final LimitCommandContext limitCommand() throws RecognitionException { LimitCommandContext _localctx = new LimitCommandContext(_ctx, getState()); - enterRule(_localctx, 70, RULE_limitCommand); + enterRule(_localctx, 74, RULE_limitCommand); try { enterOuterAlt(_localctx, 1); { - setState(456); + setState(474); match(LIMIT); - setState(457); + setState(475); match(INTEGER_LITERAL); } } @@ -3833,32 +3948,32 @@ public T accept(ParseTreeVisitor visitor) { public final SortCommandContext sortCommand() throws RecognitionException { SortCommandContext _localctx = new SortCommandContext(_ctx, getState()); - enterRule(_localctx, 72, RULE_sortCommand); + enterRule(_localctx, 76, RULE_sortCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(459); + setState(477); match(SORT); - setState(460); + setState(478); orderExpression(); - setState(465); + setState(483); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,43,_ctx); + _alt = getInterpreter().adaptivePredict(_input,45,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(461); + setState(479); match(COMMA); - setState(462); + setState(480); orderExpression(); } - } + } } - setState(467); + setState(485); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,43,_ctx); + _alt = getInterpreter().adaptivePredict(_input,45,_ctx); } } } @@ -3907,19 +4022,19 @@ public T accept(ParseTreeVisitor visitor) { public final OrderExpressionContext orderExpression() throws RecognitionException { OrderExpressionContext _localctx = new OrderExpressionContext(_ctx, getState()); - enterRule(_localctx, 74, RULE_orderExpression); + enterRule(_localctx, 78, RULE_orderExpression); int _la; try { enterOuterAlt(_localctx, 1); { - setState(468); + setState(486); booleanExpression(0); - setState(470); + setState(488); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,44,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,46,_ctx) ) { case 1: { - setState(469); + setState(487); ((OrderExpressionContext)_localctx).ordering = _input.LT(1); _la = _input.LA(1); if ( !(_la==ASC || _la==DESC) ) { @@ -3933,14 +4048,14 @@ public final OrderExpressionContext orderExpression() throws RecognitionExceptio } break; } - setState(474); + setState(492); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,45,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,47,_ctx) ) { case 1: { - setState(472); + setState(490); match(NULLS); - setState(473); + setState(491); ((OrderExpressionContext)_localctx).nullOrdering = _input.LT(1); _la = _input.LA(1); if ( !(_la==FIRST || _la==LAST) ) { @@ -3995,13 +4110,13 @@ public T accept(ParseTreeVisitor visitor) { public final KeepCommandContext keepCommand() throws RecognitionException { KeepCommandContext _localctx = new KeepCommandContext(_ctx, getState()); - enterRule(_localctx, 76, RULE_keepCommand); + enterRule(_localctx, 80, RULE_keepCommand); try { enterOuterAlt(_localctx, 1); { - setState(476); + setState(494); match(KEEP); - setState(477); + setState(495); qualifiedNamePatterns(); } } @@ -4044,13 +4159,13 @@ public T accept(ParseTreeVisitor visitor) { public final DropCommandContext dropCommand() throws RecognitionException { DropCommandContext _localctx = new DropCommandContext(_ctx, getState()); - enterRule(_localctx, 78, RULE_dropCommand); + enterRule(_localctx, 82, RULE_dropCommand); try { enterOuterAlt(_localctx, 1); { - setState(479); + setState(497); match(DROP); - setState(480); + setState(498); qualifiedNamePatterns(); } } @@ -4100,32 +4215,32 @@ public T accept(ParseTreeVisitor visitor) { public final RenameCommandContext renameCommand() throws RecognitionException { RenameCommandContext _localctx = new RenameCommandContext(_ctx, getState()); - enterRule(_localctx, 80, RULE_renameCommand); + enterRule(_localctx, 84, RULE_renameCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(482); + setState(500); match(RENAME); - setState(483); + setState(501); renameClause(); - setState(488); + setState(506); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,46,_ctx); + _alt = getInterpreter().adaptivePredict(_input,48,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(484); + setState(502); match(COMMA); - setState(485); + setState(503); renameClause(); } - } + } } - setState(490); + setState(508); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,46,_ctx); + _alt = getInterpreter().adaptivePredict(_input,48,_ctx); } } } @@ -4173,15 +4288,15 @@ public T accept(ParseTreeVisitor visitor) { public final RenameClauseContext renameClause() throws RecognitionException { RenameClauseContext _localctx = new RenameClauseContext(_ctx, getState()); - enterRule(_localctx, 82, RULE_renameClause); + enterRule(_localctx, 86, RULE_renameClause); try { enterOuterAlt(_localctx, 1); { - setState(491); + setState(509); ((RenameClauseContext)_localctx).oldName = qualifiedNamePattern(); - setState(492); + setState(510); match(AS); - setState(493); + setState(511); ((RenameClauseContext)_localctx).newName = qualifiedNamePattern(); } } @@ -4230,22 +4345,22 @@ public T accept(ParseTreeVisitor visitor) { public final DissectCommandContext dissectCommand() throws RecognitionException { DissectCommandContext _localctx = new DissectCommandContext(_ctx, getState()); - enterRule(_localctx, 84, RULE_dissectCommand); + enterRule(_localctx, 88, RULE_dissectCommand); try { enterOuterAlt(_localctx, 1); { - setState(495); + setState(513); match(DISSECT); - setState(496); + setState(514); primaryExpression(0); - setState(497); + setState(515); string(); - setState(499); + setState(517); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,47,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,49,_ctx) ) { case 1: { - setState(498); + setState(516); commandOptions(); } break; @@ -4294,15 +4409,15 @@ public T accept(ParseTreeVisitor visitor) { public final GrokCommandContext grokCommand() throws RecognitionException { GrokCommandContext _localctx = new GrokCommandContext(_ctx, getState()); - enterRule(_localctx, 86, RULE_grokCommand); + enterRule(_localctx, 90, RULE_grokCommand); try { enterOuterAlt(_localctx, 1); { - setState(501); + setState(519); match(GROK); - setState(502); + setState(520); primaryExpression(0); - setState(503); + setState(521); string(); } } @@ -4345,13 +4460,13 @@ public T accept(ParseTreeVisitor visitor) { public final MvExpandCommandContext mvExpandCommand() throws RecognitionException { MvExpandCommandContext _localctx = new MvExpandCommandContext(_ctx, getState()); - enterRule(_localctx, 88, RULE_mvExpandCommand); + enterRule(_localctx, 92, RULE_mvExpandCommand); try { enterOuterAlt(_localctx, 1); { - setState(505); + setState(523); match(MV_EXPAND); - setState(506); + setState(524); qualifiedName(); } } @@ -4400,30 +4515,30 @@ public T accept(ParseTreeVisitor visitor) { public final CommandOptionsContext commandOptions() throws RecognitionException { CommandOptionsContext _localctx = new CommandOptionsContext(_ctx, getState()); - enterRule(_localctx, 90, RULE_commandOptions); + enterRule(_localctx, 94, RULE_commandOptions); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(508); + setState(526); commandOption(); - setState(513); + setState(531); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,48,_ctx); + _alt = getInterpreter().adaptivePredict(_input,50,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(509); + setState(527); match(COMMA); - setState(510); + setState(528); commandOption(); } - } + } } - setState(515); + setState(533); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,48,_ctx); + _alt = getInterpreter().adaptivePredict(_input,50,_ctx); } } } @@ -4469,15 +4584,15 @@ public T accept(ParseTreeVisitor visitor) { public final CommandOptionContext commandOption() throws RecognitionException { CommandOptionContext _localctx = new CommandOptionContext(_ctx, getState()); - enterRule(_localctx, 92, RULE_commandOption); + enterRule(_localctx, 96, RULE_commandOption); try { enterOuterAlt(_localctx, 1); { - setState(516); + setState(534); identifier(); - setState(517); + setState(535); match(ASSIGN); - setState(518); + setState(536); constant(); } } @@ -4518,12 +4633,12 @@ public T accept(ParseTreeVisitor visitor) { public final BooleanValueContext booleanValue() throws RecognitionException { BooleanValueContext _localctx = new BooleanValueContext(_ctx, getState()); - enterRule(_localctx, 94, RULE_booleanValue); + enterRule(_localctx, 98, RULE_booleanValue); int _la; try { enterOuterAlt(_localctx, 1); { - setState(520); + setState(538); _la = _input.LA(1); if ( !(_la==FALSE || _la==TRUE) ) { _errHandler.recoverInline(this); @@ -4576,22 +4691,22 @@ public T accept(ParseTreeVisitor visitor) { public final NumericValueContext numericValue() throws RecognitionException { NumericValueContext _localctx = new NumericValueContext(_ctx, getState()); - enterRule(_localctx, 96, RULE_numericValue); + enterRule(_localctx, 100, RULE_numericValue); try { - setState(524); + setState(542); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,49,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,51,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(522); + setState(540); decimalValue(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(523); + setState(541); integerValue(); } break; @@ -4635,17 +4750,17 @@ public T accept(ParseTreeVisitor visitor) { public final DecimalValueContext decimalValue() throws RecognitionException { DecimalValueContext _localctx = new DecimalValueContext(_ctx, getState()); - enterRule(_localctx, 98, RULE_decimalValue); + enterRule(_localctx, 102, RULE_decimalValue); int _la; try { enterOuterAlt(_localctx, 1); { - setState(527); + setState(545); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(526); + setState(544); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -4658,7 +4773,7 @@ public final DecimalValueContext decimalValue() throws RecognitionException { } } - setState(529); + setState(547); match(DECIMAL_LITERAL); } } @@ -4700,17 +4815,17 @@ public T accept(ParseTreeVisitor visitor) { public final IntegerValueContext integerValue() throws RecognitionException { IntegerValueContext _localctx = new IntegerValueContext(_ctx, getState()); - enterRule(_localctx, 100, RULE_integerValue); + enterRule(_localctx, 104, RULE_integerValue); int _la; try { enterOuterAlt(_localctx, 1); { - setState(532); + setState(550); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(531); + setState(549); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -4723,7 +4838,7 @@ public final IntegerValueContext integerValue() throws RecognitionException { } } - setState(534); + setState(552); match(INTEGER_LITERAL); } } @@ -4763,11 +4878,11 @@ public T accept(ParseTreeVisitor visitor) { public final StringContext string() throws RecognitionException { StringContext _localctx = new StringContext(_ctx, getState()); - enterRule(_localctx, 102, RULE_string); + enterRule(_localctx, 106, RULE_string); try { enterOuterAlt(_localctx, 1); { - setState(536); + setState(554); match(QUOTED_STRING); } } @@ -4812,12 +4927,12 @@ public T accept(ParseTreeVisitor visitor) { public final ComparisonOperatorContext comparisonOperator() throws RecognitionException { ComparisonOperatorContext _localctx = new ComparisonOperatorContext(_ctx, getState()); - enterRule(_localctx, 104, RULE_comparisonOperator); + enterRule(_localctx, 108, RULE_comparisonOperator); int _la; try { enterOuterAlt(_localctx, 1); { - setState(538); + setState(556); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 281474976710656000L) != 0)) ) { _errHandler.recoverInline(this); @@ -4868,13 +4983,13 @@ public T accept(ParseTreeVisitor visitor) { public final ExplainCommandContext explainCommand() throws RecognitionException { ExplainCommandContext _localctx = new ExplainCommandContext(_ctx, getState()); - enterRule(_localctx, 106, RULE_explainCommand); + enterRule(_localctx, 110, RULE_explainCommand); try { enterOuterAlt(_localctx, 1); { - setState(540); + setState(558); match(EXPLAIN); - setState(541); + setState(559); subqueryExpression(); } } @@ -4918,15 +5033,15 @@ public T accept(ParseTreeVisitor visitor) { public final SubqueryExpressionContext subqueryExpression() throws RecognitionException { SubqueryExpressionContext _localctx = new SubqueryExpressionContext(_ctx, getState()); - enterRule(_localctx, 108, RULE_subqueryExpression); + enterRule(_localctx, 112, RULE_subqueryExpression); try { enterOuterAlt(_localctx, 1); { - setState(543); + setState(561); match(OPENING_BRACKET); - setState(544); + setState(562); query(0); - setState(545); + setState(563); match(CLOSING_BRACKET); } } @@ -4948,7 +5063,7 @@ public ShowCommandContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_showCommand; } - + @SuppressWarnings("this-escape") public ShowCommandContext() { } public void copyFrom(ShowCommandContext ctx) { @@ -4978,14 +5093,14 @@ public T accept(ParseTreeVisitor visitor) { public final ShowCommandContext showCommand() throws RecognitionException { ShowCommandContext _localctx = new ShowCommandContext(_ctx, getState()); - enterRule(_localctx, 110, RULE_showCommand); + enterRule(_localctx, 114, RULE_showCommand); try { _localctx = new ShowInfoContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(547); + setState(565); match(SHOW); - setState(548); + setState(566); match(INFO); } } @@ -5043,53 +5158,53 @@ public T accept(ParseTreeVisitor visitor) { public final EnrichCommandContext enrichCommand() throws RecognitionException { EnrichCommandContext _localctx = new EnrichCommandContext(_ctx, getState()); - enterRule(_localctx, 112, RULE_enrichCommand); + enterRule(_localctx, 116, RULE_enrichCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(550); + setState(568); match(ENRICH); - setState(551); + setState(569); ((EnrichCommandContext)_localctx).policyName = match(ENRICH_POLICY_NAME); - setState(554); + setState(572); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,52,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,54,_ctx) ) { case 1: { - setState(552); + setState(570); match(ON); - setState(553); + setState(571); ((EnrichCommandContext)_localctx).matchField = qualifiedNamePattern(); } break; } - setState(565); + setState(583); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,54,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,56,_ctx) ) { case 1: { - setState(556); + setState(574); match(WITH); - setState(557); + setState(575); enrichWithClause(); - setState(562); + setState(580); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,53,_ctx); + _alt = getInterpreter().adaptivePredict(_input,55,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(558); + setState(576); match(COMMA); - setState(559); + setState(577); enrichWithClause(); } - } + } } - setState(564); + setState(582); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,53,_ctx); + _alt = getInterpreter().adaptivePredict(_input,55,_ctx); } } break; @@ -5140,23 +5255,23 @@ public T accept(ParseTreeVisitor visitor) { public final EnrichWithClauseContext enrichWithClause() throws RecognitionException { EnrichWithClauseContext _localctx = new EnrichWithClauseContext(_ctx, getState()); - enterRule(_localctx, 114, RULE_enrichWithClause); + enterRule(_localctx, 118, RULE_enrichWithClause); try { enterOuterAlt(_localctx, 1); { - setState(570); + setState(588); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,55,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,57,_ctx) ) { case 1: { - setState(567); + setState(585); ((EnrichWithClauseContext)_localctx).newName = qualifiedNamePattern(); - setState(568); + setState(586); match(ASSIGN); } break; } - setState(572); + setState(590); ((EnrichWithClauseContext)_localctx).enrichField = qualifiedNamePattern(); } } @@ -5205,17 +5320,17 @@ public T accept(ParseTreeVisitor visitor) { public final LookupCommandContext lookupCommand() throws RecognitionException { LookupCommandContext _localctx = new LookupCommandContext(_ctx, getState()); - enterRule(_localctx, 116, RULE_lookupCommand); + enterRule(_localctx, 120, RULE_lookupCommand); try { enterOuterAlt(_localctx, 1); { - setState(574); + setState(592); match(DEV_LOOKUP); - setState(575); + setState(593); ((LookupCommandContext)_localctx).tableName = indexPattern(); - setState(576); + setState(594); match(ON); - setState(577); + setState(595); ((LookupCommandContext)_localctx).matchFields = qualifiedNamePatterns(); } } @@ -5232,16 +5347,16 @@ public final LookupCommandContext lookupCommand() throws RecognitionException { @SuppressWarnings("CheckReturnValue") public static class InlinestatsCommandContext extends ParserRuleContext { - public FieldsContext stats; + public AggFieldsContext stats; public FieldsContext grouping; public TerminalNode DEV_INLINESTATS() { return getToken(EsqlBaseParser.DEV_INLINESTATS, 0); } - public List fields() { - return getRuleContexts(FieldsContext.class); - } - public FieldsContext fields(int i) { - return getRuleContext(FieldsContext.class,i); + public AggFieldsContext aggFields() { + return getRuleContext(AggFieldsContext.class,0); } public TerminalNode BY() { return getToken(EsqlBaseParser.BY, 0); } + public FieldsContext fields() { + return getRuleContext(FieldsContext.class,0); + } @SuppressWarnings("this-escape") public InlinestatsCommandContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); @@ -5264,22 +5379,22 @@ public T accept(ParseTreeVisitor visitor) { public final InlinestatsCommandContext inlinestatsCommand() throws RecognitionException { InlinestatsCommandContext _localctx = new InlinestatsCommandContext(_ctx, getState()); - enterRule(_localctx, 118, RULE_inlinestatsCommand); + enterRule(_localctx, 122, RULE_inlinestatsCommand); try { enterOuterAlt(_localctx, 1); { - setState(579); + setState(597); match(DEV_INLINESTATS); - setState(580); - ((InlinestatsCommandContext)_localctx).stats = fields(); - setState(583); + setState(598); + ((InlinestatsCommandContext)_localctx).stats = aggFields(); + setState(601); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,56,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,58,_ctx) ) { case 1: { - setState(581); + setState(599); match(BY); - setState(582); + setState(600); ((InlinestatsCommandContext)_localctx).grouping = fields(); } break; @@ -5311,6 +5426,8 @@ public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { return operatorExpression_sempred((OperatorExpressionContext)_localctx, predIndex); case 10: return primaryExpression_sempred((PrimaryExpressionContext)_localctx, predIndex); + case 28: + return aggField_sempred((AggFieldContext)_localctx, predIndex); } return true; } @@ -5364,9 +5481,16 @@ private boolean primaryExpression_sempred(PrimaryExpressionContext _localctx, in } return true; } + private boolean aggField_sempred(AggFieldContext _localctx, int predIndex) { + switch (predIndex) { + case 10: + return this.isDevVersion(); + } + return true; + } public static final String _serializedATN = - "\u0004\u0001x\u024a\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ + "\u0004\u0001x\u025c\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ "\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004\u0002"+ "\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007\u0002"+ "\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b\u0002"+ @@ -5381,361 +5505,373 @@ private boolean primaryExpression_sempred(PrimaryExpressionContext _localctx, in "(\u0007(\u0002)\u0007)\u0002*\u0007*\u0002+\u0007+\u0002,\u0007,\u0002"+ "-\u0007-\u0002.\u0007.\u0002/\u0007/\u00020\u00070\u00021\u00071\u0002"+ "2\u00072\u00023\u00073\u00024\u00074\u00025\u00075\u00026\u00076\u0002"+ - "7\u00077\u00028\u00078\u00029\u00079\u0002:\u0007:\u0002;\u0007;\u0001"+ - "\u0000\u0001\u0000\u0001\u0000\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ - "\u0001\u0001\u0001\u0001\u0001\u0005\u0001\u0082\b\u0001\n\u0001\f\u0001"+ - "\u0085\t\u0001\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002"+ - "\u0001\u0002\u0003\u0002\u008d\b\u0002\u0001\u0003\u0001\u0003\u0001\u0003"+ - "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+ - "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+ - "\u0001\u0003\u0003\u0003\u009f\b\u0003\u0001\u0004\u0001\u0004\u0001\u0004"+ - "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ - "\u0001\u0005\u0003\u0005\u00ab\b\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ - "\u0001\u0005\u0001\u0005\u0005\u0005\u00b2\b\u0005\n\u0005\f\u0005\u00b5"+ - "\t\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003"+ - "\u0005\u00bc\b\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003"+ - "\u0005\u00c2\b\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ - "\u0005\u0001\u0005\u0005\u0005\u00ca\b\u0005\n\u0005\f\u0005\u00cd\t\u0005"+ - "\u0001\u0006\u0001\u0006\u0003\u0006\u00d1\b\u0006\u0001\u0006\u0001\u0006"+ - "\u0001\u0006\u0001\u0006\u0001\u0006\u0003\u0006\u00d8\b\u0006\u0001\u0006"+ - "\u0001\u0006\u0001\u0006\u0003\u0006\u00dd\b\u0006\u0001\u0007\u0001\u0007"+ - "\u0001\u0007\u0001\u0007\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0003"+ - "\b\u00e8\b\b\u0001\t\u0001\t\u0001\t\u0001\t\u0003\t\u00ee\b\t\u0001\t"+ - "\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0005\t\u00f6\b\t\n\t\f\t\u00f9"+ - "\t\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0003"+ - "\n\u0103\b\n\u0001\n\u0001\n\u0001\n\u0005\n\u0108\b\n\n\n\f\n\u010b\t"+ - "\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b"+ - "\u0005\u000b\u0113\b\u000b\n\u000b\f\u000b\u0116\t\u000b\u0003\u000b\u0118"+ - "\b\u000b\u0001\u000b\u0001\u000b\u0001\f\u0001\f\u0003\f\u011e\b\f\u0001"+ - "\r\u0001\r\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000f\u0001\u000f"+ - "\u0001\u000f\u0005\u000f\u0128\b\u000f\n\u000f\f\u000f\u012b\t\u000f\u0001"+ - "\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0003\u0010\u0132"+ - "\b\u0010\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0005\u0011\u0138"+ - "\b\u0011\n\u0011\f\u0011\u013b\t\u0011\u0001\u0011\u0003\u0011\u013e\b"+ - "\u0011\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0003"+ - "\u0012\u0145\b\u0012\u0001\u0013\u0001\u0013\u0001\u0014\u0001\u0014\u0001"+ - "\u0015\u0001\u0015\u0003\u0015\u014d\b\u0015\u0001\u0016\u0001\u0016\u0001"+ - "\u0016\u0001\u0016\u0005\u0016\u0153\b\u0016\n\u0016\f\u0016\u0156\t\u0016"+ - "\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0018\u0001\u0018"+ - "\u0001\u0018\u0001\u0018\u0005\u0018\u0160\b\u0018\n\u0018\f\u0018\u0163"+ - "\t\u0018\u0001\u0018\u0003\u0018\u0166\b\u0018\u0001\u0018\u0001\u0018"+ - "\u0003\u0018\u016a\b\u0018\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u001a"+ - "\u0001\u001a\u0003\u001a\u0171\b\u001a\u0001\u001a\u0001\u001a\u0003\u001a"+ - "\u0175\b\u001a\u0001\u001b\u0001\u001b\u0001\u001b\u0005\u001b\u017a\b"+ - "\u001b\n\u001b\f\u001b\u017d\t\u001b\u0001\u001c\u0001\u001c\u0001\u001c"+ - "\u0005\u001c\u0182\b\u001c\n\u001c\f\u001c\u0185\t\u001c\u0001\u001d\u0001"+ - "\u001d\u0001\u001d\u0005\u001d\u018a\b\u001d\n\u001d\f\u001d\u018d\t\u001d"+ - "\u0001\u001e\u0001\u001e\u0001\u001f\u0001\u001f\u0003\u001f\u0193\b\u001f"+ - "\u0001 \u0001 \u0001 \u0001 \u0001 \u0001 \u0001 \u0001 \u0001 \u0001"+ - " \u0001 \u0001 \u0001 \u0005 \u01a2\b \n \f \u01a5\t \u0001 \u0001 \u0001"+ - " \u0001 \u0001 \u0001 \u0005 \u01ad\b \n \f \u01b0\t \u0001 \u0001 \u0001"+ - " \u0001 \u0001 \u0001 \u0005 \u01b8\b \n \f \u01bb\t \u0001 \u0001 \u0003"+ - " \u01bf\b \u0001!\u0001!\u0003!\u01c3\b!\u0001\"\u0001\"\u0003\"\u01c7"+ - "\b\"\u0001#\u0001#\u0001#\u0001$\u0001$\u0001$\u0001$\u0005$\u01d0\b$"+ - "\n$\f$\u01d3\t$\u0001%\u0001%\u0003%\u01d7\b%\u0001%\u0001%\u0003%\u01db"+ - "\b%\u0001&\u0001&\u0001&\u0001\'\u0001\'\u0001\'\u0001(\u0001(\u0001("+ - "\u0001(\u0005(\u01e7\b(\n(\f(\u01ea\t(\u0001)\u0001)\u0001)\u0001)\u0001"+ - "*\u0001*\u0001*\u0001*\u0003*\u01f4\b*\u0001+\u0001+\u0001+\u0001+\u0001"+ - ",\u0001,\u0001,\u0001-\u0001-\u0001-\u0005-\u0200\b-\n-\f-\u0203\t-\u0001"+ - ".\u0001.\u0001.\u0001.\u0001/\u0001/\u00010\u00010\u00030\u020d\b0\u0001"+ - "1\u00031\u0210\b1\u00011\u00011\u00012\u00032\u0215\b2\u00012\u00012\u0001"+ - "3\u00013\u00014\u00014\u00015\u00015\u00015\u00016\u00016\u00016\u0001"+ - "6\u00017\u00017\u00017\u00018\u00018\u00018\u00018\u00038\u022b\b8\u0001"+ - "8\u00018\u00018\u00018\u00058\u0231\b8\n8\f8\u0234\t8\u00038\u0236\b8"+ - "\u00019\u00019\u00019\u00039\u023b\b9\u00019\u00019\u0001:\u0001:\u0001"+ - ":\u0001:\u0001:\u0001;\u0001;\u0001;\u0001;\u0003;\u0248\b;\u0001;\u0000"+ - "\u0004\u0002\n\u0012\u0014<\u0000\u0002\u0004\u0006\b\n\f\u000e\u0010"+ + "7\u00077\u00028\u00078\u00029\u00079\u0002:\u0007:\u0002;\u0007;\u0002"+ + "<\u0007<\u0002=\u0007=\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0001"+ + "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0005\u0001"+ + "\u0086\b\u0001\n\u0001\f\u0001\u0089\t\u0001\u0001\u0002\u0001\u0002\u0001"+ + "\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0003\u0002\u0091\b\u0002\u0001"+ + "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ + "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ + "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0003\u0003\u00a3\b\u0003\u0001"+ + "\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ + "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u00af\b\u0005\u0001"+ + "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0005\u0005\u00b6"+ + "\b\u0005\n\u0005\f\u0005\u00b9\t\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ + "\u0001\u0005\u0001\u0005\u0003\u0005\u00c0\b\u0005\u0001\u0005\u0001\u0005"+ + "\u0001\u0005\u0001\u0005\u0003\u0005\u00c6\b\u0005\u0001\u0005\u0001\u0005"+ + "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0005\u0005\u00ce\b\u0005"+ + "\n\u0005\f\u0005\u00d1\t\u0005\u0001\u0006\u0001\u0006\u0003\u0006\u00d5"+ + "\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0003"+ + "\u0006\u00dc\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0003\u0006\u00e1"+ + "\b\u0006\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\b\u0001"+ + "\b\u0001\b\u0001\b\u0001\b\u0003\b\u00ec\b\b\u0001\t\u0001\t\u0001\t\u0001"+ + "\t\u0003\t\u00f2\b\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0005"+ + "\t\u00fa\b\t\n\t\f\t\u00fd\t\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n"+ + "\u0001\n\u0001\n\u0001\n\u0003\n\u0107\b\n\u0001\n\u0001\n\u0001\n\u0005"+ + "\n\u010c\b\n\n\n\f\n\u010f\t\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001"+ + "\u000b\u0001\u000b\u0001\u000b\u0005\u000b\u0117\b\u000b\n\u000b\f\u000b"+ + "\u011a\t\u000b\u0003\u000b\u011c\b\u000b\u0001\u000b\u0001\u000b\u0001"+ + "\f\u0001\f\u0003\f\u0122\b\f\u0001\r\u0001\r\u0001\u000e\u0001\u000e\u0001"+ + "\u000e\u0001\u000f\u0001\u000f\u0001\u000f\u0005\u000f\u012c\b\u000f\n"+ + "\u000f\f\u000f\u012f\t\u000f\u0001\u0010\u0001\u0010\u0001\u0010\u0003"+ + "\u0010\u0134\b\u0010\u0001\u0010\u0001\u0010\u0001\u0011\u0001\u0011\u0001"+ + "\u0011\u0001\u0011\u0005\u0011\u013c\b\u0011\n\u0011\f\u0011\u013f\t\u0011"+ + "\u0001\u0011\u0003\u0011\u0142\b\u0011\u0001\u0012\u0001\u0012\u0001\u0012"+ + "\u0003\u0012\u0147\b\u0012\u0001\u0012\u0001\u0012\u0001\u0013\u0001\u0013"+ + "\u0001\u0014\u0001\u0014\u0001\u0015\u0001\u0015\u0003\u0015\u0151\b\u0015"+ + "\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0005\u0016\u0157\b\u0016"+ + "\n\u0016\f\u0016\u015a\t\u0016\u0001\u0017\u0001\u0017\u0001\u0017\u0001"+ + "\u0017\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0018\u0005\u0018\u0164"+ + "\b\u0018\n\u0018\f\u0018\u0167\t\u0018\u0001\u0018\u0003\u0018\u016a\b"+ + "\u0018\u0001\u0018\u0001\u0018\u0003\u0018\u016e\b\u0018\u0001\u0019\u0001"+ + "\u0019\u0001\u0019\u0001\u001a\u0001\u001a\u0003\u001a\u0175\b\u001a\u0001"+ + "\u001a\u0001\u001a\u0003\u001a\u0179\b\u001a\u0001\u001b\u0001\u001b\u0001"+ + "\u001b\u0005\u001b\u017e\b\u001b\n\u001b\f\u001b\u0181\t\u001b\u0001\u001c"+ + "\u0001\u001c\u0001\u001c\u0001\u001c\u0003\u001c\u0187\b\u001c\u0001\u001d"+ + "\u0001\u001d\u0001\u001d\u0005\u001d\u018c\b\u001d\n\u001d\f\u001d\u018f"+ + "\t\u001d\u0001\u001e\u0001\u001e\u0001\u001e\u0005\u001e\u0194\b\u001e"+ + "\n\u001e\f\u001e\u0197\t\u001e\u0001\u001f\u0001\u001f\u0001\u001f\u0005"+ + "\u001f\u019c\b\u001f\n\u001f\f\u001f\u019f\t\u001f\u0001 \u0001 \u0001"+ + "!\u0001!\u0003!\u01a5\b!\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001"+ + "\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005\"\u01b4"+ + "\b\"\n\"\f\"\u01b7\t\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\""+ + "\u0005\"\u01bf\b\"\n\"\f\"\u01c2\t\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001"+ + "\"\u0001\"\u0005\"\u01ca\b\"\n\"\f\"\u01cd\t\"\u0001\"\u0001\"\u0003\""+ + "\u01d1\b\"\u0001#\u0001#\u0003#\u01d5\b#\u0001$\u0001$\u0003$\u01d9\b"+ + "$\u0001%\u0001%\u0001%\u0001&\u0001&\u0001&\u0001&\u0005&\u01e2\b&\n&"+ + "\f&\u01e5\t&\u0001\'\u0001\'\u0003\'\u01e9\b\'\u0001\'\u0001\'\u0003\'"+ + "\u01ed\b\'\u0001(\u0001(\u0001(\u0001)\u0001)\u0001)\u0001*\u0001*\u0001"+ + "*\u0001*\u0005*\u01f9\b*\n*\f*\u01fc\t*\u0001+\u0001+\u0001+\u0001+\u0001"+ + ",\u0001,\u0001,\u0001,\u0003,\u0206\b,\u0001-\u0001-\u0001-\u0001-\u0001"+ + ".\u0001.\u0001.\u0001/\u0001/\u0001/\u0005/\u0212\b/\n/\f/\u0215\t/\u0001"+ + "0\u00010\u00010\u00010\u00011\u00011\u00012\u00012\u00032\u021f\b2\u0001"+ + "3\u00033\u0222\b3\u00013\u00013\u00014\u00034\u0227\b4\u00014\u00014\u0001"+ + "5\u00015\u00016\u00016\u00017\u00017\u00017\u00018\u00018\u00018\u0001"+ + "8\u00019\u00019\u00019\u0001:\u0001:\u0001:\u0001:\u0003:\u023d\b:\u0001"+ + ":\u0001:\u0001:\u0001:\u0005:\u0243\b:\n:\f:\u0246\t:\u0003:\u0248\b:"+ + "\u0001;\u0001;\u0001;\u0003;\u024d\b;\u0001;\u0001;\u0001<\u0001<\u0001"+ + "<\u0001<\u0001<\u0001=\u0001=\u0001=\u0001=\u0003=\u025a\b=\u0001=\u0000"+ + "\u0004\u0002\n\u0012\u0014>\u0000\u0002\u0004\u0006\b\n\f\u000e\u0010"+ "\u0012\u0014\u0016\u0018\u001a\u001c\u001e \"$&(*,.02468:<>@BDFHJLNPR"+ - "TVXZ\\^`bdfhjlnprtv\u0000\b\u0001\u0000:;\u0001\u0000<>\u0002\u0000\u0019"+ - "\u0019LL\u0001\u0000CD\u0002\u0000\u001e\u001e\"\"\u0002\u0000%%((\u0002"+ - "\u0000$$22\u0002\u00003359\u0264\u0000x\u0001\u0000\u0000\u0000\u0002"+ - "{\u0001\u0000\u0000\u0000\u0004\u008c\u0001\u0000\u0000\u0000\u0006\u009e"+ - "\u0001\u0000\u0000\u0000\b\u00a0\u0001\u0000\u0000\u0000\n\u00c1\u0001"+ - "\u0000\u0000\u0000\f\u00dc\u0001\u0000\u0000\u0000\u000e\u00de\u0001\u0000"+ - "\u0000\u0000\u0010\u00e7\u0001\u0000\u0000\u0000\u0012\u00ed\u0001\u0000"+ - "\u0000\u0000\u0014\u0102\u0001\u0000\u0000\u0000\u0016\u010c\u0001\u0000"+ - "\u0000\u0000\u0018\u011d\u0001\u0000\u0000\u0000\u001a\u011f\u0001\u0000"+ - "\u0000\u0000\u001c\u0121\u0001\u0000\u0000\u0000\u001e\u0124\u0001\u0000"+ - "\u0000\u0000 \u0131\u0001\u0000\u0000\u0000\"\u0133\u0001\u0000\u0000"+ - "\u0000$\u0144\u0001\u0000\u0000\u0000&\u0146\u0001\u0000\u0000\u0000("+ - "\u0148\u0001\u0000\u0000\u0000*\u014c\u0001\u0000\u0000\u0000,\u014e\u0001"+ - "\u0000\u0000\u0000.\u0157\u0001\u0000\u0000\u00000\u015b\u0001\u0000\u0000"+ - "\u00002\u016b\u0001\u0000\u0000\u00004\u016e\u0001\u0000\u0000\u00006"+ - "\u0176\u0001\u0000\u0000\u00008\u017e\u0001\u0000\u0000\u0000:\u0186\u0001"+ - "\u0000\u0000\u0000<\u018e\u0001\u0000\u0000\u0000>\u0192\u0001\u0000\u0000"+ - "\u0000@\u01be\u0001\u0000\u0000\u0000B\u01c2\u0001\u0000\u0000\u0000D"+ - "\u01c6\u0001\u0000\u0000\u0000F\u01c8\u0001\u0000\u0000\u0000H\u01cb\u0001"+ - "\u0000\u0000\u0000J\u01d4\u0001\u0000\u0000\u0000L\u01dc\u0001\u0000\u0000"+ - "\u0000N\u01df\u0001\u0000\u0000\u0000P\u01e2\u0001\u0000\u0000\u0000R"+ - "\u01eb\u0001\u0000\u0000\u0000T\u01ef\u0001\u0000\u0000\u0000V\u01f5\u0001"+ - "\u0000\u0000\u0000X\u01f9\u0001\u0000\u0000\u0000Z\u01fc\u0001\u0000\u0000"+ - "\u0000\\\u0204\u0001\u0000\u0000\u0000^\u0208\u0001\u0000\u0000\u0000"+ - "`\u020c\u0001\u0000\u0000\u0000b\u020f\u0001\u0000\u0000\u0000d\u0214"+ - "\u0001\u0000\u0000\u0000f\u0218\u0001\u0000\u0000\u0000h\u021a\u0001\u0000"+ - "\u0000\u0000j\u021c\u0001\u0000\u0000\u0000l\u021f\u0001\u0000\u0000\u0000"+ - "n\u0223\u0001\u0000\u0000\u0000p\u0226\u0001\u0000\u0000\u0000r\u023a"+ - "\u0001\u0000\u0000\u0000t\u023e\u0001\u0000\u0000\u0000v\u0243\u0001\u0000"+ - "\u0000\u0000xy\u0003\u0002\u0001\u0000yz\u0005\u0000\u0000\u0001z\u0001"+ - "\u0001\u0000\u0000\u0000{|\u0006\u0001\uffff\uffff\u0000|}\u0003\u0004"+ - "\u0002\u0000}\u0083\u0001\u0000\u0000\u0000~\u007f\n\u0001\u0000\u0000"+ - "\u007f\u0080\u0005\u0018\u0000\u0000\u0080\u0082\u0003\u0006\u0003\u0000"+ - "\u0081~\u0001\u0000\u0000\u0000\u0082\u0085\u0001\u0000\u0000\u0000\u0083"+ - "\u0081\u0001\u0000\u0000\u0000\u0083\u0084\u0001\u0000\u0000\u0000\u0084"+ - "\u0003\u0001\u0000\u0000\u0000\u0085\u0083\u0001\u0000\u0000\u0000\u0086"+ - "\u008d\u0003j5\u0000\u0087\u008d\u0003\"\u0011\u0000\u0088\u008d\u0003"+ - "\u001c\u000e\u0000\u0089\u008d\u0003n7\u0000\u008a\u008b\u0004\u0002\u0001"+ - "\u0000\u008b\u008d\u00030\u0018\u0000\u008c\u0086\u0001\u0000\u0000\u0000"+ - "\u008c\u0087\u0001\u0000\u0000\u0000\u008c\u0088\u0001\u0000\u0000\u0000"+ - "\u008c\u0089\u0001\u0000\u0000\u0000\u008c\u008a\u0001\u0000\u0000\u0000"+ - "\u008d\u0005\u0001\u0000\u0000\u0000\u008e\u009f\u00032\u0019\u0000\u008f"+ - "\u009f\u0003\b\u0004\u0000\u0090\u009f\u0003L&\u0000\u0091\u009f\u0003"+ - "F#\u0000\u0092\u009f\u00034\u001a\u0000\u0093\u009f\u0003H$\u0000\u0094"+ - "\u009f\u0003N\'\u0000\u0095\u009f\u0003P(\u0000\u0096\u009f\u0003T*\u0000"+ - "\u0097\u009f\u0003V+\u0000\u0098\u009f\u0003p8\u0000\u0099\u009f\u0003"+ - "X,\u0000\u009a\u009b\u0004\u0003\u0002\u0000\u009b\u009f\u0003v;\u0000"+ - "\u009c\u009d\u0004\u0003\u0003\u0000\u009d\u009f\u0003t:\u0000\u009e\u008e"+ - "\u0001\u0000\u0000\u0000\u009e\u008f\u0001\u0000\u0000\u0000\u009e\u0090"+ - "\u0001\u0000\u0000\u0000\u009e\u0091\u0001\u0000\u0000\u0000\u009e\u0092"+ - "\u0001\u0000\u0000\u0000\u009e\u0093\u0001\u0000\u0000\u0000\u009e\u0094"+ - "\u0001\u0000\u0000\u0000\u009e\u0095\u0001\u0000\u0000\u0000\u009e\u0096"+ - "\u0001\u0000\u0000\u0000\u009e\u0097\u0001\u0000\u0000\u0000\u009e\u0098"+ - "\u0001\u0000\u0000\u0000\u009e\u0099\u0001\u0000\u0000\u0000\u009e\u009a"+ - "\u0001\u0000\u0000\u0000\u009e\u009c\u0001\u0000\u0000\u0000\u009f\u0007"+ - "\u0001\u0000\u0000\u0000\u00a0\u00a1\u0005\u0010\u0000\u0000\u00a1\u00a2"+ - "\u0003\n\u0005\u0000\u00a2\t\u0001\u0000\u0000\u0000\u00a3\u00a4\u0006"+ - "\u0005\uffff\uffff\u0000\u00a4\u00a5\u0005+\u0000\u0000\u00a5\u00c2\u0003"+ - "\n\u0005\b\u00a6\u00c2\u0003\u0010\b\u0000\u00a7\u00c2\u0003\f\u0006\u0000"+ - "\u00a8\u00aa\u0003\u0010\b\u0000\u00a9\u00ab\u0005+\u0000\u0000\u00aa"+ - "\u00a9\u0001\u0000\u0000\u0000\u00aa\u00ab\u0001\u0000\u0000\u0000\u00ab"+ - "\u00ac\u0001\u0000\u0000\u0000\u00ac\u00ad\u0005&\u0000\u0000\u00ad\u00ae"+ - "\u0005*\u0000\u0000\u00ae\u00b3\u0003\u0010\b\u0000\u00af\u00b0\u0005"+ - "!\u0000\u0000\u00b0\u00b2\u0003\u0010\b\u0000\u00b1\u00af\u0001\u0000"+ - "\u0000\u0000\u00b2\u00b5\u0001\u0000\u0000\u0000\u00b3\u00b1\u0001\u0000"+ - "\u0000\u0000\u00b3\u00b4\u0001\u0000\u0000\u0000\u00b4\u00b6\u0001\u0000"+ - "\u0000\u0000\u00b5\u00b3\u0001\u0000\u0000\u0000\u00b6\u00b7\u00051\u0000"+ - "\u0000\u00b7\u00c2\u0001\u0000\u0000\u0000\u00b8\u00b9\u0003\u0010\b\u0000"+ - "\u00b9\u00bb\u0005\'\u0000\u0000\u00ba\u00bc\u0005+\u0000\u0000\u00bb"+ - "\u00ba\u0001\u0000\u0000\u0000\u00bb\u00bc\u0001\u0000\u0000\u0000\u00bc"+ - "\u00bd\u0001\u0000\u0000\u0000\u00bd\u00be\u0005,\u0000\u0000\u00be\u00c2"+ - "\u0001\u0000\u0000\u0000\u00bf\u00c0\u0004\u0005\u0004\u0000\u00c0\u00c2"+ - "\u0003\u000e\u0007\u0000\u00c1\u00a3\u0001\u0000\u0000\u0000\u00c1\u00a6"+ - "\u0001\u0000\u0000\u0000\u00c1\u00a7\u0001\u0000\u0000\u0000\u00c1\u00a8"+ - "\u0001\u0000\u0000\u0000\u00c1\u00b8\u0001\u0000\u0000\u0000\u00c1\u00bf"+ - "\u0001\u0000\u0000\u0000\u00c2\u00cb\u0001\u0000\u0000\u0000\u00c3\u00c4"+ - "\n\u0005\u0000\u0000\u00c4\u00c5\u0005\u001d\u0000\u0000\u00c5\u00ca\u0003"+ - "\n\u0005\u0006\u00c6\u00c7\n\u0004\u0000\u0000\u00c7\u00c8\u0005.\u0000"+ - "\u0000\u00c8\u00ca\u0003\n\u0005\u0005\u00c9\u00c3\u0001\u0000\u0000\u0000"+ - "\u00c9\u00c6\u0001\u0000\u0000\u0000\u00ca\u00cd\u0001\u0000\u0000\u0000"+ - "\u00cb\u00c9\u0001\u0000\u0000\u0000\u00cb\u00cc\u0001\u0000\u0000\u0000"+ - "\u00cc\u000b\u0001\u0000\u0000\u0000\u00cd\u00cb\u0001\u0000\u0000\u0000"+ - "\u00ce\u00d0\u0003\u0010\b\u0000\u00cf\u00d1\u0005+\u0000\u0000\u00d0"+ - "\u00cf\u0001\u0000\u0000\u0000\u00d0\u00d1\u0001\u0000\u0000\u0000\u00d1"+ - "\u00d2\u0001\u0000\u0000\u0000\u00d2\u00d3\u0005)\u0000\u0000\u00d3\u00d4"+ - "\u0003f3\u0000\u00d4\u00dd\u0001\u0000\u0000\u0000\u00d5\u00d7\u0003\u0010"+ - "\b\u0000\u00d6\u00d8\u0005+\u0000\u0000\u00d7\u00d6\u0001\u0000\u0000"+ - "\u0000\u00d7\u00d8\u0001\u0000\u0000\u0000\u00d8\u00d9\u0001\u0000\u0000"+ - "\u0000\u00d9\u00da\u00050\u0000\u0000\u00da\u00db\u0003f3\u0000\u00db"+ - "\u00dd\u0001\u0000\u0000\u0000\u00dc\u00ce\u0001\u0000\u0000\u0000\u00dc"+ - "\u00d5\u0001\u0000\u0000\u0000\u00dd\r\u0001\u0000\u0000\u0000\u00de\u00df"+ - "\u0003\u0010\b\u0000\u00df\u00e0\u0005?\u0000\u0000\u00e0\u00e1\u0003"+ - "f3\u0000\u00e1\u000f\u0001\u0000\u0000\u0000\u00e2\u00e8\u0003\u0012\t"+ - "\u0000\u00e3\u00e4\u0003\u0012\t\u0000\u00e4\u00e5\u0003h4\u0000\u00e5"+ - "\u00e6\u0003\u0012\t\u0000\u00e6\u00e8\u0001\u0000\u0000\u0000\u00e7\u00e2"+ - "\u0001\u0000\u0000\u0000\u00e7\u00e3\u0001\u0000\u0000\u0000\u00e8\u0011"+ - "\u0001\u0000\u0000\u0000\u00e9\u00ea\u0006\t\uffff\uffff\u0000\u00ea\u00ee"+ - "\u0003\u0014\n\u0000\u00eb\u00ec\u0007\u0000\u0000\u0000\u00ec\u00ee\u0003"+ - "\u0012\t\u0003\u00ed\u00e9\u0001\u0000\u0000\u0000\u00ed\u00eb\u0001\u0000"+ - "\u0000\u0000\u00ee\u00f7\u0001\u0000\u0000\u0000\u00ef\u00f0\n\u0002\u0000"+ - "\u0000\u00f0\u00f1\u0007\u0001\u0000\u0000\u00f1\u00f6\u0003\u0012\t\u0003"+ - "\u00f2\u00f3\n\u0001\u0000\u0000\u00f3\u00f4\u0007\u0000\u0000\u0000\u00f4"+ - "\u00f6\u0003\u0012\t\u0002\u00f5\u00ef\u0001\u0000\u0000\u0000\u00f5\u00f2"+ - "\u0001\u0000\u0000\u0000\u00f6\u00f9\u0001\u0000\u0000\u0000\u00f7\u00f5"+ - "\u0001\u0000\u0000\u0000\u00f7\u00f8\u0001\u0000\u0000\u0000\u00f8\u0013"+ - "\u0001\u0000\u0000\u0000\u00f9\u00f7\u0001\u0000\u0000\u0000\u00fa\u00fb"+ - "\u0006\n\uffff\uffff\u0000\u00fb\u0103\u0003@ \u0000\u00fc\u0103\u0003"+ - "6\u001b\u0000\u00fd\u0103\u0003\u0016\u000b\u0000\u00fe\u00ff\u0005*\u0000"+ - "\u0000\u00ff\u0100\u0003\n\u0005\u0000\u0100\u0101\u00051\u0000\u0000"+ - "\u0101\u0103\u0001\u0000\u0000\u0000\u0102\u00fa\u0001\u0000\u0000\u0000"+ - "\u0102\u00fc\u0001\u0000\u0000\u0000\u0102\u00fd\u0001\u0000\u0000\u0000"+ - "\u0102\u00fe\u0001\u0000\u0000\u0000\u0103\u0109\u0001\u0000\u0000\u0000"+ - "\u0104\u0105\n\u0001\u0000\u0000\u0105\u0106\u0005 \u0000\u0000\u0106"+ - "\u0108\u0003\u001a\r\u0000\u0107\u0104\u0001\u0000\u0000\u0000\u0108\u010b"+ - "\u0001\u0000\u0000\u0000\u0109\u0107\u0001\u0000\u0000\u0000\u0109\u010a"+ - "\u0001\u0000\u0000\u0000\u010a\u0015\u0001\u0000\u0000\u0000\u010b\u0109"+ - "\u0001\u0000\u0000\u0000\u010c\u010d\u0003\u0018\f\u0000\u010d\u0117\u0005"+ - "*\u0000\u0000\u010e\u0118\u0005<\u0000\u0000\u010f\u0114\u0003\n\u0005"+ - "\u0000\u0110\u0111\u0005!\u0000\u0000\u0111\u0113\u0003\n\u0005\u0000"+ - "\u0112\u0110\u0001\u0000\u0000\u0000\u0113\u0116\u0001\u0000\u0000\u0000"+ - "\u0114\u0112\u0001\u0000\u0000\u0000\u0114\u0115\u0001\u0000\u0000\u0000"+ - "\u0115\u0118\u0001\u0000\u0000\u0000\u0116\u0114\u0001\u0000\u0000\u0000"+ - "\u0117\u010e\u0001\u0000\u0000\u0000\u0117\u010f\u0001\u0000\u0000\u0000"+ - "\u0117\u0118\u0001\u0000\u0000\u0000\u0118\u0119\u0001\u0000\u0000\u0000"+ - "\u0119\u011a\u00051\u0000\u0000\u011a\u0017\u0001\u0000\u0000\u0000\u011b"+ - "\u011e\u0005?\u0000\u0000\u011c\u011e\u0003D\"\u0000\u011d\u011b\u0001"+ - "\u0000\u0000\u0000\u011d\u011c\u0001\u0000\u0000\u0000\u011e\u0019\u0001"+ - "\u0000\u0000\u0000\u011f\u0120\u0003<\u001e\u0000\u0120\u001b\u0001\u0000"+ - "\u0000\u0000\u0121\u0122\u0005\f\u0000\u0000\u0122\u0123\u0003\u001e\u000f"+ - "\u0000\u0123\u001d\u0001\u0000\u0000\u0000\u0124\u0129\u0003 \u0010\u0000"+ - "\u0125\u0126\u0005!\u0000\u0000\u0126\u0128\u0003 \u0010\u0000\u0127\u0125"+ - "\u0001\u0000\u0000\u0000\u0128\u012b\u0001\u0000\u0000\u0000\u0129\u0127"+ - "\u0001\u0000\u0000\u0000\u0129\u012a\u0001\u0000\u0000\u0000\u012a\u001f"+ - "\u0001\u0000\u0000\u0000\u012b\u0129\u0001\u0000\u0000\u0000\u012c\u0132"+ - "\u0003\n\u0005\u0000\u012d\u012e\u00036\u001b\u0000\u012e\u012f\u0005"+ - "\u001f\u0000\u0000\u012f\u0130\u0003\n\u0005\u0000\u0130\u0132\u0001\u0000"+ - "\u0000\u0000\u0131\u012c\u0001\u0000\u0000\u0000\u0131\u012d\u0001\u0000"+ - "\u0000\u0000\u0132!\u0001\u0000\u0000\u0000\u0133\u0134\u0005\u0006\u0000"+ - "\u0000\u0134\u0139\u0003$\u0012\u0000\u0135\u0136\u0005!\u0000\u0000\u0136"+ - "\u0138\u0003$\u0012\u0000\u0137\u0135\u0001\u0000\u0000\u0000\u0138\u013b"+ - "\u0001\u0000\u0000\u0000\u0139\u0137\u0001\u0000\u0000\u0000\u0139\u013a"+ - "\u0001\u0000\u0000\u0000\u013a\u013d\u0001\u0000\u0000\u0000\u013b\u0139"+ - "\u0001\u0000\u0000\u0000\u013c\u013e\u0003*\u0015\u0000\u013d\u013c\u0001"+ - "\u0000\u0000\u0000\u013d\u013e\u0001\u0000\u0000\u0000\u013e#\u0001\u0000"+ - "\u0000\u0000\u013f\u0140\u0003&\u0013\u0000\u0140\u0141\u0005h\u0000\u0000"+ - "\u0141\u0142\u0003(\u0014\u0000\u0142\u0145\u0001\u0000\u0000\u0000\u0143"+ - "\u0145\u0003(\u0014\u0000\u0144\u013f\u0001\u0000\u0000\u0000\u0144\u0143"+ - "\u0001\u0000\u0000\u0000\u0145%\u0001\u0000\u0000\u0000\u0146\u0147\u0005"+ - "L\u0000\u0000\u0147\'\u0001\u0000\u0000\u0000\u0148\u0149\u0007\u0002"+ - "\u0000\u0000\u0149)\u0001\u0000\u0000\u0000\u014a\u014d\u0003,\u0016\u0000"+ - "\u014b\u014d\u0003.\u0017\u0000\u014c\u014a\u0001\u0000\u0000\u0000\u014c"+ - "\u014b\u0001\u0000\u0000\u0000\u014d+\u0001\u0000\u0000\u0000\u014e\u014f"+ - "\u0005K\u0000\u0000\u014f\u0154\u0005L\u0000\u0000\u0150\u0151\u0005!"+ - "\u0000\u0000\u0151\u0153\u0005L\u0000\u0000\u0152\u0150\u0001\u0000\u0000"+ - "\u0000\u0153\u0156\u0001\u0000\u0000\u0000\u0154\u0152\u0001\u0000\u0000"+ - "\u0000\u0154\u0155\u0001\u0000\u0000\u0000\u0155-\u0001\u0000\u0000\u0000"+ - "\u0156\u0154\u0001\u0000\u0000\u0000\u0157\u0158\u0005A\u0000\u0000\u0158"+ - "\u0159\u0003,\u0016\u0000\u0159\u015a\u0005B\u0000\u0000\u015a/\u0001"+ - "\u0000\u0000\u0000\u015b\u015c\u0005\u0013\u0000\u0000\u015c\u0161\u0003"+ - "$\u0012\u0000\u015d\u015e\u0005!\u0000\u0000\u015e\u0160\u0003$\u0012"+ - "\u0000\u015f\u015d\u0001\u0000\u0000\u0000\u0160\u0163\u0001\u0000\u0000"+ - "\u0000\u0161\u015f\u0001\u0000\u0000\u0000\u0161\u0162\u0001\u0000\u0000"+ - "\u0000\u0162\u0165\u0001\u0000\u0000\u0000\u0163\u0161\u0001\u0000\u0000"+ - "\u0000\u0164\u0166\u0003\u001e\u000f\u0000\u0165\u0164\u0001\u0000\u0000"+ - "\u0000\u0165\u0166\u0001\u0000\u0000\u0000\u0166\u0169\u0001\u0000\u0000"+ - "\u0000\u0167\u0168\u0005\u001c\u0000\u0000\u0168\u016a\u0003\u001e\u000f"+ - "\u0000\u0169\u0167\u0001\u0000\u0000\u0000\u0169\u016a\u0001\u0000\u0000"+ - "\u0000\u016a1\u0001\u0000\u0000\u0000\u016b\u016c\u0005\u0004\u0000\u0000"+ - "\u016c\u016d\u0003\u001e\u000f\u0000\u016d3\u0001\u0000\u0000\u0000\u016e"+ - "\u0170\u0005\u000f\u0000\u0000\u016f\u0171\u0003\u001e\u000f\u0000\u0170"+ - "\u016f\u0001\u0000\u0000\u0000\u0170\u0171\u0001\u0000\u0000\u0000\u0171"+ - "\u0174\u0001\u0000\u0000\u0000\u0172\u0173\u0005\u001c\u0000\u0000\u0173"+ - "\u0175\u0003\u001e\u000f\u0000\u0174\u0172\u0001\u0000\u0000\u0000\u0174"+ - "\u0175\u0001\u0000\u0000\u0000\u01755\u0001\u0000\u0000\u0000\u0176\u017b"+ - "\u0003D\"\u0000\u0177\u0178\u0005#\u0000\u0000\u0178\u017a\u0003D\"\u0000"+ - "\u0179\u0177\u0001\u0000\u0000\u0000\u017a\u017d\u0001\u0000\u0000\u0000"+ - "\u017b\u0179\u0001\u0000\u0000\u0000\u017b\u017c\u0001\u0000\u0000\u0000"+ - "\u017c7\u0001\u0000\u0000\u0000\u017d\u017b\u0001\u0000\u0000\u0000\u017e"+ - "\u0183\u0003>\u001f\u0000\u017f\u0180\u0005#\u0000\u0000\u0180\u0182\u0003"+ - ">\u001f\u0000\u0181\u017f\u0001\u0000\u0000\u0000\u0182\u0185\u0001\u0000"+ - "\u0000\u0000\u0183\u0181\u0001\u0000\u0000\u0000\u0183\u0184\u0001\u0000"+ - "\u0000\u0000\u01849\u0001\u0000\u0000\u0000\u0185\u0183\u0001\u0000\u0000"+ - "\u0000\u0186\u018b\u00038\u001c\u0000\u0187\u0188\u0005!\u0000\u0000\u0188"+ - "\u018a\u00038\u001c\u0000\u0189\u0187\u0001\u0000\u0000\u0000\u018a\u018d"+ - "\u0001\u0000\u0000\u0000\u018b\u0189\u0001\u0000\u0000\u0000\u018b\u018c"+ - "\u0001\u0000\u0000\u0000\u018c;\u0001\u0000\u0000\u0000\u018d\u018b\u0001"+ - "\u0000\u0000\u0000\u018e\u018f\u0007\u0003\u0000\u0000\u018f=\u0001\u0000"+ - "\u0000\u0000\u0190\u0193\u0005P\u0000\u0000\u0191\u0193\u0003B!\u0000"+ - "\u0192\u0190\u0001\u0000\u0000\u0000\u0192\u0191\u0001\u0000\u0000\u0000"+ - "\u0193?\u0001\u0000\u0000\u0000\u0194\u01bf\u0005,\u0000\u0000\u0195\u0196"+ - "\u0003d2\u0000\u0196\u0197\u0005C\u0000\u0000\u0197\u01bf\u0001\u0000"+ - "\u0000\u0000\u0198\u01bf\u0003b1\u0000\u0199\u01bf\u0003d2\u0000\u019a"+ - "\u01bf\u0003^/\u0000\u019b\u01bf\u0003B!\u0000\u019c\u01bf\u0003f3\u0000"+ - "\u019d\u019e\u0005A\u0000\u0000\u019e\u01a3\u0003`0\u0000\u019f\u01a0"+ - "\u0005!\u0000\u0000\u01a0\u01a2\u0003`0\u0000\u01a1\u019f\u0001\u0000"+ - "\u0000\u0000\u01a2\u01a5\u0001\u0000\u0000\u0000\u01a3\u01a1\u0001\u0000"+ - "\u0000\u0000\u01a3\u01a4\u0001\u0000\u0000\u0000\u01a4\u01a6\u0001\u0000"+ - "\u0000\u0000\u01a5\u01a3\u0001\u0000\u0000\u0000\u01a6\u01a7\u0005B\u0000"+ - "\u0000\u01a7\u01bf\u0001\u0000\u0000\u0000\u01a8\u01a9\u0005A\u0000\u0000"+ - "\u01a9\u01ae\u0003^/\u0000\u01aa\u01ab\u0005!\u0000\u0000\u01ab\u01ad"+ - "\u0003^/\u0000\u01ac\u01aa\u0001\u0000\u0000\u0000\u01ad\u01b0\u0001\u0000"+ - "\u0000\u0000\u01ae\u01ac\u0001\u0000\u0000\u0000\u01ae\u01af\u0001\u0000"+ - "\u0000\u0000\u01af\u01b1\u0001\u0000\u0000\u0000\u01b0\u01ae\u0001\u0000"+ - "\u0000\u0000\u01b1\u01b2\u0005B\u0000\u0000\u01b2\u01bf\u0001\u0000\u0000"+ - "\u0000\u01b3\u01b4\u0005A\u0000\u0000\u01b4\u01b9\u0003f3\u0000\u01b5"+ - "\u01b6\u0005!\u0000\u0000\u01b6\u01b8\u0003f3\u0000\u01b7\u01b5\u0001"+ - "\u0000\u0000\u0000\u01b8\u01bb\u0001\u0000\u0000\u0000\u01b9\u01b7\u0001"+ - "\u0000\u0000\u0000\u01b9\u01ba\u0001\u0000\u0000\u0000\u01ba\u01bc\u0001"+ - "\u0000\u0000\u0000\u01bb\u01b9\u0001\u0000\u0000\u0000\u01bc\u01bd\u0005"+ - "B\u0000\u0000\u01bd\u01bf\u0001\u0000\u0000\u0000\u01be\u0194\u0001\u0000"+ - "\u0000\u0000\u01be\u0195\u0001\u0000\u0000\u0000\u01be\u0198\u0001\u0000"+ - "\u0000\u0000\u01be\u0199\u0001\u0000\u0000\u0000\u01be\u019a\u0001\u0000"+ - "\u0000\u0000\u01be\u019b\u0001\u0000\u0000\u0000\u01be\u019c\u0001\u0000"+ - "\u0000\u0000\u01be\u019d\u0001\u0000\u0000\u0000\u01be\u01a8\u0001\u0000"+ - "\u0000\u0000\u01be\u01b3\u0001\u0000\u0000\u0000\u01bfA\u0001\u0000\u0000"+ - "\u0000\u01c0\u01c3\u0005/\u0000\u0000\u01c1\u01c3\u0005@\u0000\u0000\u01c2"+ - "\u01c0\u0001\u0000\u0000\u0000\u01c2\u01c1\u0001\u0000\u0000\u0000\u01c3"+ - "C\u0001\u0000\u0000\u0000\u01c4\u01c7\u0003<\u001e\u0000\u01c5\u01c7\u0003"+ - "B!\u0000\u01c6\u01c4\u0001\u0000\u0000\u0000\u01c6\u01c5\u0001\u0000\u0000"+ - "\u0000\u01c7E\u0001\u0000\u0000\u0000\u01c8\u01c9\u0005\t\u0000\u0000"+ - "\u01c9\u01ca\u0005\u001a\u0000\u0000\u01caG\u0001\u0000\u0000\u0000\u01cb"+ - "\u01cc\u0005\u000e\u0000\u0000\u01cc\u01d1\u0003J%\u0000\u01cd\u01ce\u0005"+ - "!\u0000\u0000\u01ce\u01d0\u0003J%\u0000\u01cf\u01cd\u0001\u0000\u0000"+ - "\u0000\u01d0\u01d3\u0001\u0000\u0000\u0000\u01d1\u01cf\u0001\u0000\u0000"+ - "\u0000\u01d1\u01d2\u0001\u0000\u0000\u0000\u01d2I\u0001\u0000\u0000\u0000"+ - "\u01d3\u01d1\u0001\u0000\u0000\u0000\u01d4\u01d6\u0003\n\u0005\u0000\u01d5"+ - "\u01d7\u0007\u0004\u0000\u0000\u01d6\u01d5\u0001\u0000\u0000\u0000\u01d6"+ - "\u01d7\u0001\u0000\u0000\u0000\u01d7\u01da\u0001\u0000\u0000\u0000\u01d8"+ - "\u01d9\u0005-\u0000\u0000\u01d9\u01db\u0007\u0005\u0000\u0000\u01da\u01d8"+ - "\u0001\u0000\u0000\u0000\u01da\u01db\u0001\u0000\u0000\u0000\u01dbK\u0001"+ - "\u0000\u0000\u0000\u01dc\u01dd\u0005\b\u0000\u0000\u01dd\u01de\u0003:"+ - "\u001d\u0000\u01deM\u0001\u0000\u0000\u0000\u01df\u01e0\u0005\u0002\u0000"+ - "\u0000\u01e0\u01e1\u0003:\u001d\u0000\u01e1O\u0001\u0000\u0000\u0000\u01e2"+ - "\u01e3\u0005\u000b\u0000\u0000\u01e3\u01e8\u0003R)\u0000\u01e4\u01e5\u0005"+ - "!\u0000\u0000\u01e5\u01e7\u0003R)\u0000\u01e6\u01e4\u0001\u0000\u0000"+ - "\u0000\u01e7\u01ea\u0001\u0000\u0000\u0000\u01e8\u01e6\u0001\u0000\u0000"+ - "\u0000\u01e8\u01e9\u0001\u0000\u0000\u0000\u01e9Q\u0001\u0000\u0000\u0000"+ - "\u01ea\u01e8\u0001\u0000\u0000\u0000\u01eb\u01ec\u00038\u001c\u0000\u01ec"+ - "\u01ed\u0005T\u0000\u0000\u01ed\u01ee\u00038\u001c\u0000\u01eeS\u0001"+ - "\u0000\u0000\u0000\u01ef\u01f0\u0005\u0001\u0000\u0000\u01f0\u01f1\u0003"+ - "\u0014\n\u0000\u01f1\u01f3\u0003f3\u0000\u01f2\u01f4\u0003Z-\u0000\u01f3"+ - "\u01f2\u0001\u0000\u0000\u0000\u01f3\u01f4\u0001\u0000\u0000\u0000\u01f4"+ - "U\u0001\u0000\u0000\u0000\u01f5\u01f6\u0005\u0007\u0000\u0000\u01f6\u01f7"+ - "\u0003\u0014\n\u0000\u01f7\u01f8\u0003f3\u0000\u01f8W\u0001\u0000\u0000"+ - "\u0000\u01f9\u01fa\u0005\n\u0000\u0000\u01fa\u01fb\u00036\u001b\u0000"+ - "\u01fbY\u0001\u0000\u0000\u0000\u01fc\u0201\u0003\\.\u0000\u01fd\u01fe"+ - "\u0005!\u0000\u0000\u01fe\u0200\u0003\\.\u0000\u01ff\u01fd\u0001\u0000"+ - "\u0000\u0000\u0200\u0203\u0001\u0000\u0000\u0000\u0201\u01ff\u0001\u0000"+ - "\u0000\u0000\u0201\u0202\u0001\u0000\u0000\u0000\u0202[\u0001\u0000\u0000"+ - "\u0000\u0203\u0201\u0001\u0000\u0000\u0000\u0204\u0205\u0003<\u001e\u0000"+ - "\u0205\u0206\u0005\u001f\u0000\u0000\u0206\u0207\u0003@ \u0000\u0207]"+ - "\u0001\u0000\u0000\u0000\u0208\u0209\u0007\u0006\u0000\u0000\u0209_\u0001"+ - "\u0000\u0000\u0000\u020a\u020d\u0003b1\u0000\u020b\u020d\u0003d2\u0000"+ - "\u020c\u020a\u0001\u0000\u0000\u0000\u020c\u020b\u0001\u0000\u0000\u0000"+ - "\u020da\u0001\u0000\u0000\u0000\u020e\u0210\u0007\u0000\u0000\u0000\u020f"+ - "\u020e\u0001\u0000\u0000\u0000\u020f\u0210\u0001\u0000\u0000\u0000\u0210"+ - "\u0211\u0001\u0000\u0000\u0000\u0211\u0212\u0005\u001b\u0000\u0000\u0212"+ - "c\u0001\u0000\u0000\u0000\u0213\u0215\u0007\u0000\u0000\u0000\u0214\u0213"+ - "\u0001\u0000\u0000\u0000\u0214\u0215\u0001\u0000\u0000\u0000\u0215\u0216"+ - "\u0001\u0000\u0000\u0000\u0216\u0217\u0005\u001a\u0000\u0000\u0217e\u0001"+ - "\u0000\u0000\u0000\u0218\u0219\u0005\u0019\u0000\u0000\u0219g\u0001\u0000"+ - "\u0000\u0000\u021a\u021b\u0007\u0007\u0000\u0000\u021bi\u0001\u0000\u0000"+ - "\u0000\u021c\u021d\u0005\u0005\u0000\u0000\u021d\u021e\u0003l6\u0000\u021e"+ - "k\u0001\u0000\u0000\u0000\u021f\u0220\u0005A\u0000\u0000\u0220\u0221\u0003"+ - "\u0002\u0001\u0000\u0221\u0222\u0005B\u0000\u0000\u0222m\u0001\u0000\u0000"+ - "\u0000\u0223\u0224\u0005\r\u0000\u0000\u0224\u0225\u0005d\u0000\u0000"+ - "\u0225o\u0001\u0000\u0000\u0000\u0226\u0227\u0005\u0003\u0000\u0000\u0227"+ - "\u022a\u0005Z\u0000\u0000\u0228\u0229\u0005X\u0000\u0000\u0229\u022b\u0003"+ - "8\u001c\u0000\u022a\u0228\u0001\u0000\u0000\u0000\u022a\u022b\u0001\u0000"+ - "\u0000\u0000\u022b\u0235\u0001\u0000\u0000\u0000\u022c\u022d\u0005Y\u0000"+ - "\u0000\u022d\u0232\u0003r9\u0000\u022e\u022f\u0005!\u0000\u0000\u022f"+ - "\u0231\u0003r9\u0000\u0230\u022e\u0001\u0000\u0000\u0000\u0231\u0234\u0001"+ - "\u0000\u0000\u0000\u0232\u0230\u0001\u0000\u0000\u0000\u0232\u0233\u0001"+ - "\u0000\u0000\u0000\u0233\u0236\u0001\u0000\u0000\u0000\u0234\u0232\u0001"+ - "\u0000\u0000\u0000\u0235\u022c\u0001\u0000\u0000\u0000\u0235\u0236\u0001"+ - "\u0000\u0000\u0000\u0236q\u0001\u0000\u0000\u0000\u0237\u0238\u00038\u001c"+ - "\u0000\u0238\u0239\u0005\u001f\u0000\u0000\u0239\u023b\u0001\u0000\u0000"+ - "\u0000\u023a\u0237\u0001\u0000\u0000\u0000\u023a\u023b\u0001\u0000\u0000"+ - "\u0000\u023b\u023c\u0001\u0000\u0000\u0000\u023c\u023d\u00038\u001c\u0000"+ - "\u023ds\u0001\u0000\u0000\u0000\u023e\u023f\u0005\u0012\u0000\u0000\u023f"+ - "\u0240\u0003$\u0012\u0000\u0240\u0241\u0005X\u0000\u0000\u0241\u0242\u0003"+ - ":\u001d\u0000\u0242u\u0001\u0000\u0000\u0000\u0243\u0244\u0005\u0011\u0000"+ - "\u0000\u0244\u0247\u0003\u001e\u000f\u0000\u0245\u0246\u0005\u001c\u0000"+ - "\u0000\u0246\u0248\u0003\u001e\u000f\u0000\u0247\u0245\u0001\u0000\u0000"+ - "\u0000\u0247\u0248\u0001\u0000\u0000\u0000\u0248w\u0001\u0000\u0000\u0000"+ - "9\u0083\u008c\u009e\u00aa\u00b3\u00bb\u00c1\u00c9\u00cb\u00d0\u00d7\u00dc"+ - "\u00e7\u00ed\u00f5\u00f7\u0102\u0109\u0114\u0117\u011d\u0129\u0131\u0139"+ - "\u013d\u0144\u014c\u0154\u0161\u0165\u0169\u0170\u0174\u017b\u0183\u018b"+ - "\u0192\u01a3\u01ae\u01b9\u01be\u01c2\u01c6\u01d1\u01d6\u01da\u01e8\u01f3"+ - "\u0201\u020c\u020f\u0214\u022a\u0232\u0235\u023a\u0247"; + "TVXZ\\^`bdfhjlnprtvxz\u0000\b\u0001\u0000:;\u0001\u0000<>\u0002\u0000"+ + "\u0019\u0019LL\u0001\u0000CD\u0002\u0000\u001e\u001e\"\"\u0002\u0000%"+ + "%((\u0002\u0000$$22\u0002\u00003359\u0276\u0000|\u0001\u0000\u0000\u0000"+ + "\u0002\u007f\u0001\u0000\u0000\u0000\u0004\u0090\u0001\u0000\u0000\u0000"+ + "\u0006\u00a2\u0001\u0000\u0000\u0000\b\u00a4\u0001\u0000\u0000\u0000\n"+ + "\u00c5\u0001\u0000\u0000\u0000\f\u00e0\u0001\u0000\u0000\u0000\u000e\u00e2"+ + "\u0001\u0000\u0000\u0000\u0010\u00eb\u0001\u0000\u0000\u0000\u0012\u00f1"+ + "\u0001\u0000\u0000\u0000\u0014\u0106\u0001\u0000\u0000\u0000\u0016\u0110"+ + "\u0001\u0000\u0000\u0000\u0018\u0121\u0001\u0000\u0000\u0000\u001a\u0123"+ + "\u0001\u0000\u0000\u0000\u001c\u0125\u0001\u0000\u0000\u0000\u001e\u0128"+ + "\u0001\u0000\u0000\u0000 \u0133\u0001\u0000\u0000\u0000\"\u0137\u0001"+ + "\u0000\u0000\u0000$\u0146\u0001\u0000\u0000\u0000&\u014a\u0001\u0000\u0000"+ + "\u0000(\u014c\u0001\u0000\u0000\u0000*\u0150\u0001\u0000\u0000\u0000,"+ + "\u0152\u0001\u0000\u0000\u0000.\u015b\u0001\u0000\u0000\u00000\u015f\u0001"+ + "\u0000\u0000\u00002\u016f\u0001\u0000\u0000\u00004\u0172\u0001\u0000\u0000"+ + "\u00006\u017a\u0001\u0000\u0000\u00008\u0182\u0001\u0000\u0000\u0000:"+ + "\u0188\u0001\u0000\u0000\u0000<\u0190\u0001\u0000\u0000\u0000>\u0198\u0001"+ + "\u0000\u0000\u0000@\u01a0\u0001\u0000\u0000\u0000B\u01a4\u0001\u0000\u0000"+ + "\u0000D\u01d0\u0001\u0000\u0000\u0000F\u01d4\u0001\u0000\u0000\u0000H"+ + "\u01d8\u0001\u0000\u0000\u0000J\u01da\u0001\u0000\u0000\u0000L\u01dd\u0001"+ + "\u0000\u0000\u0000N\u01e6\u0001\u0000\u0000\u0000P\u01ee\u0001\u0000\u0000"+ + "\u0000R\u01f1\u0001\u0000\u0000\u0000T\u01f4\u0001\u0000\u0000\u0000V"+ + "\u01fd\u0001\u0000\u0000\u0000X\u0201\u0001\u0000\u0000\u0000Z\u0207\u0001"+ + "\u0000\u0000\u0000\\\u020b\u0001\u0000\u0000\u0000^\u020e\u0001\u0000"+ + "\u0000\u0000`\u0216\u0001\u0000\u0000\u0000b\u021a\u0001\u0000\u0000\u0000"+ + "d\u021e\u0001\u0000\u0000\u0000f\u0221\u0001\u0000\u0000\u0000h\u0226"+ + "\u0001\u0000\u0000\u0000j\u022a\u0001\u0000\u0000\u0000l\u022c\u0001\u0000"+ + "\u0000\u0000n\u022e\u0001\u0000\u0000\u0000p\u0231\u0001\u0000\u0000\u0000"+ + "r\u0235\u0001\u0000\u0000\u0000t\u0238\u0001\u0000\u0000\u0000v\u024c"+ + "\u0001\u0000\u0000\u0000x\u0250\u0001\u0000\u0000\u0000z\u0255\u0001\u0000"+ + "\u0000\u0000|}\u0003\u0002\u0001\u0000}~\u0005\u0000\u0000\u0001~\u0001"+ + "\u0001\u0000\u0000\u0000\u007f\u0080\u0006\u0001\uffff\uffff\u0000\u0080"+ + "\u0081\u0003\u0004\u0002\u0000\u0081\u0087\u0001\u0000\u0000\u0000\u0082"+ + "\u0083\n\u0001\u0000\u0000\u0083\u0084\u0005\u0018\u0000\u0000\u0084\u0086"+ + "\u0003\u0006\u0003\u0000\u0085\u0082\u0001\u0000\u0000\u0000\u0086\u0089"+ + "\u0001\u0000\u0000\u0000\u0087\u0085\u0001\u0000\u0000\u0000\u0087\u0088"+ + "\u0001\u0000\u0000\u0000\u0088\u0003\u0001\u0000\u0000\u0000\u0089\u0087"+ + "\u0001\u0000\u0000\u0000\u008a\u0091\u0003n7\u0000\u008b\u0091\u0003\""+ + "\u0011\u0000\u008c\u0091\u0003\u001c\u000e\u0000\u008d\u0091\u0003r9\u0000"+ + "\u008e\u008f\u0004\u0002\u0001\u0000\u008f\u0091\u00030\u0018\u0000\u0090"+ + "\u008a\u0001\u0000\u0000\u0000\u0090\u008b\u0001\u0000\u0000\u0000\u0090"+ + "\u008c\u0001\u0000\u0000\u0000\u0090\u008d\u0001\u0000\u0000\u0000\u0090"+ + "\u008e\u0001\u0000\u0000\u0000\u0091\u0005\u0001\u0000\u0000\u0000\u0092"+ + "\u00a3\u00032\u0019\u0000\u0093\u00a3\u0003\b\u0004\u0000\u0094\u00a3"+ + "\u0003P(\u0000\u0095\u00a3\u0003J%\u0000\u0096\u00a3\u00034\u001a\u0000"+ + "\u0097\u00a3\u0003L&\u0000\u0098\u00a3\u0003R)\u0000\u0099\u00a3\u0003"+ + "T*\u0000\u009a\u00a3\u0003X,\u0000\u009b\u00a3\u0003Z-\u0000\u009c\u00a3"+ + "\u0003t:\u0000\u009d\u00a3\u0003\\.\u0000\u009e\u009f\u0004\u0003\u0002"+ + "\u0000\u009f\u00a3\u0003z=\u0000\u00a0\u00a1\u0004\u0003\u0003\u0000\u00a1"+ + "\u00a3\u0003x<\u0000\u00a2\u0092\u0001\u0000\u0000\u0000\u00a2\u0093\u0001"+ + "\u0000\u0000\u0000\u00a2\u0094\u0001\u0000\u0000\u0000\u00a2\u0095\u0001"+ + "\u0000\u0000\u0000\u00a2\u0096\u0001\u0000\u0000\u0000\u00a2\u0097\u0001"+ + "\u0000\u0000\u0000\u00a2\u0098\u0001\u0000\u0000\u0000\u00a2\u0099\u0001"+ + "\u0000\u0000\u0000\u00a2\u009a\u0001\u0000\u0000\u0000\u00a2\u009b\u0001"+ + "\u0000\u0000\u0000\u00a2\u009c\u0001\u0000\u0000\u0000\u00a2\u009d\u0001"+ + "\u0000\u0000\u0000\u00a2\u009e\u0001\u0000\u0000\u0000\u00a2\u00a0\u0001"+ + "\u0000\u0000\u0000\u00a3\u0007\u0001\u0000\u0000\u0000\u00a4\u00a5\u0005"+ + "\u0010\u0000\u0000\u00a5\u00a6\u0003\n\u0005\u0000\u00a6\t\u0001\u0000"+ + "\u0000\u0000\u00a7\u00a8\u0006\u0005\uffff\uffff\u0000\u00a8\u00a9\u0005"+ + "+\u0000\u0000\u00a9\u00c6\u0003\n\u0005\b\u00aa\u00c6\u0003\u0010\b\u0000"+ + "\u00ab\u00c6\u0003\f\u0006\u0000\u00ac\u00ae\u0003\u0010\b\u0000\u00ad"+ + "\u00af\u0005+\u0000\u0000\u00ae\u00ad\u0001\u0000\u0000\u0000\u00ae\u00af"+ + "\u0001\u0000\u0000\u0000\u00af\u00b0\u0001\u0000\u0000\u0000\u00b0\u00b1"+ + "\u0005&\u0000\u0000\u00b1\u00b2\u0005*\u0000\u0000\u00b2\u00b7\u0003\u0010"+ + "\b\u0000\u00b3\u00b4\u0005!\u0000\u0000\u00b4\u00b6\u0003\u0010\b\u0000"+ + "\u00b5\u00b3\u0001\u0000\u0000\u0000\u00b6\u00b9\u0001\u0000\u0000\u0000"+ + "\u00b7\u00b5\u0001\u0000\u0000\u0000\u00b7\u00b8\u0001\u0000\u0000\u0000"+ + "\u00b8\u00ba\u0001\u0000\u0000\u0000\u00b9\u00b7\u0001\u0000\u0000\u0000"+ + "\u00ba\u00bb\u00051\u0000\u0000\u00bb\u00c6\u0001\u0000\u0000\u0000\u00bc"+ + "\u00bd\u0003\u0010\b\u0000\u00bd\u00bf\u0005\'\u0000\u0000\u00be\u00c0"+ + "\u0005+\u0000\u0000\u00bf\u00be\u0001\u0000\u0000\u0000\u00bf\u00c0\u0001"+ + "\u0000\u0000\u0000\u00c0\u00c1\u0001\u0000\u0000\u0000\u00c1\u00c2\u0005"+ + ",\u0000\u0000\u00c2\u00c6\u0001\u0000\u0000\u0000\u00c3\u00c4\u0004\u0005"+ + "\u0004\u0000\u00c4\u00c6\u0003\u000e\u0007\u0000\u00c5\u00a7\u0001\u0000"+ + "\u0000\u0000\u00c5\u00aa\u0001\u0000\u0000\u0000\u00c5\u00ab\u0001\u0000"+ + "\u0000\u0000\u00c5\u00ac\u0001\u0000\u0000\u0000\u00c5\u00bc\u0001\u0000"+ + "\u0000\u0000\u00c5\u00c3\u0001\u0000\u0000\u0000\u00c6\u00cf\u0001\u0000"+ + "\u0000\u0000\u00c7\u00c8\n\u0005\u0000\u0000\u00c8\u00c9\u0005\u001d\u0000"+ + "\u0000\u00c9\u00ce\u0003\n\u0005\u0006\u00ca\u00cb\n\u0004\u0000\u0000"+ + "\u00cb\u00cc\u0005.\u0000\u0000\u00cc\u00ce\u0003\n\u0005\u0005\u00cd"+ + "\u00c7\u0001\u0000\u0000\u0000\u00cd\u00ca\u0001\u0000\u0000\u0000\u00ce"+ + "\u00d1\u0001\u0000\u0000\u0000\u00cf\u00cd\u0001\u0000\u0000\u0000\u00cf"+ + "\u00d0\u0001\u0000\u0000\u0000\u00d0\u000b\u0001\u0000\u0000\u0000\u00d1"+ + "\u00cf\u0001\u0000\u0000\u0000\u00d2\u00d4\u0003\u0010\b\u0000\u00d3\u00d5"+ + "\u0005+\u0000\u0000\u00d4\u00d3\u0001\u0000\u0000\u0000\u00d4\u00d5\u0001"+ + "\u0000\u0000\u0000\u00d5\u00d6\u0001\u0000\u0000\u0000\u00d6\u00d7\u0005"+ + ")\u0000\u0000\u00d7\u00d8\u0003j5\u0000\u00d8\u00e1\u0001\u0000\u0000"+ + "\u0000\u00d9\u00db\u0003\u0010\b\u0000\u00da\u00dc\u0005+\u0000\u0000"+ + "\u00db\u00da\u0001\u0000\u0000\u0000\u00db\u00dc\u0001\u0000\u0000\u0000"+ + "\u00dc\u00dd\u0001\u0000\u0000\u0000\u00dd\u00de\u00050\u0000\u0000\u00de"+ + "\u00df\u0003j5\u0000\u00df\u00e1\u0001\u0000\u0000\u0000\u00e0\u00d2\u0001"+ + "\u0000\u0000\u0000\u00e0\u00d9\u0001\u0000\u0000\u0000\u00e1\r\u0001\u0000"+ + "\u0000\u0000\u00e2\u00e3\u0003\u0010\b\u0000\u00e3\u00e4\u0005?\u0000"+ + "\u0000\u00e4\u00e5\u0003j5\u0000\u00e5\u000f\u0001\u0000\u0000\u0000\u00e6"+ + "\u00ec\u0003\u0012\t\u0000\u00e7\u00e8\u0003\u0012\t\u0000\u00e8\u00e9"+ + "\u0003l6\u0000\u00e9\u00ea\u0003\u0012\t\u0000\u00ea\u00ec\u0001\u0000"+ + "\u0000\u0000\u00eb\u00e6\u0001\u0000\u0000\u0000\u00eb\u00e7\u0001\u0000"+ + "\u0000\u0000\u00ec\u0011\u0001\u0000\u0000\u0000\u00ed\u00ee\u0006\t\uffff"+ + "\uffff\u0000\u00ee\u00f2\u0003\u0014\n\u0000\u00ef\u00f0\u0007\u0000\u0000"+ + "\u0000\u00f0\u00f2\u0003\u0012\t\u0003\u00f1\u00ed\u0001\u0000\u0000\u0000"+ + "\u00f1\u00ef\u0001\u0000\u0000\u0000\u00f2\u00fb\u0001\u0000\u0000\u0000"+ + "\u00f3\u00f4\n\u0002\u0000\u0000\u00f4\u00f5\u0007\u0001\u0000\u0000\u00f5"+ + "\u00fa\u0003\u0012\t\u0003\u00f6\u00f7\n\u0001\u0000\u0000\u00f7\u00f8"+ + "\u0007\u0000\u0000\u0000\u00f8\u00fa\u0003\u0012\t\u0002\u00f9\u00f3\u0001"+ + "\u0000\u0000\u0000\u00f9\u00f6\u0001\u0000\u0000\u0000\u00fa\u00fd\u0001"+ + "\u0000\u0000\u0000\u00fb\u00f9\u0001\u0000\u0000\u0000\u00fb\u00fc\u0001"+ + "\u0000\u0000\u0000\u00fc\u0013\u0001\u0000\u0000\u0000\u00fd\u00fb\u0001"+ + "\u0000\u0000\u0000\u00fe\u00ff\u0006\n\uffff\uffff\u0000\u00ff\u0107\u0003"+ + "D\"\u0000\u0100\u0107\u0003:\u001d\u0000\u0101\u0107\u0003\u0016\u000b"+ + "\u0000\u0102\u0103\u0005*\u0000\u0000\u0103\u0104\u0003\n\u0005\u0000"+ + "\u0104\u0105\u00051\u0000\u0000\u0105\u0107\u0001\u0000\u0000\u0000\u0106"+ + "\u00fe\u0001\u0000\u0000\u0000\u0106\u0100\u0001\u0000\u0000\u0000\u0106"+ + "\u0101\u0001\u0000\u0000\u0000\u0106\u0102\u0001\u0000\u0000\u0000\u0107"+ + "\u010d\u0001\u0000\u0000\u0000\u0108\u0109\n\u0001\u0000\u0000\u0109\u010a"+ + "\u0005 \u0000\u0000\u010a\u010c\u0003\u001a\r\u0000\u010b\u0108\u0001"+ + "\u0000\u0000\u0000\u010c\u010f\u0001\u0000\u0000\u0000\u010d\u010b\u0001"+ + "\u0000\u0000\u0000\u010d\u010e\u0001\u0000\u0000\u0000\u010e\u0015\u0001"+ + "\u0000\u0000\u0000\u010f\u010d\u0001\u0000\u0000\u0000\u0110\u0111\u0003"+ + "\u0018\f\u0000\u0111\u011b\u0005*\u0000\u0000\u0112\u011c\u0005<\u0000"+ + "\u0000\u0113\u0118\u0003\n\u0005\u0000\u0114\u0115\u0005!\u0000\u0000"+ + "\u0115\u0117\u0003\n\u0005\u0000\u0116\u0114\u0001\u0000\u0000\u0000\u0117"+ + "\u011a\u0001\u0000\u0000\u0000\u0118\u0116\u0001\u0000\u0000\u0000\u0118"+ + "\u0119\u0001\u0000\u0000\u0000\u0119\u011c\u0001\u0000\u0000\u0000\u011a"+ + "\u0118\u0001\u0000\u0000\u0000\u011b\u0112\u0001\u0000\u0000\u0000\u011b"+ + "\u0113\u0001\u0000\u0000\u0000\u011b\u011c\u0001\u0000\u0000\u0000\u011c"+ + "\u011d\u0001\u0000\u0000\u0000\u011d\u011e\u00051\u0000\u0000\u011e\u0017"+ + "\u0001\u0000\u0000\u0000\u011f\u0122\u0005?\u0000\u0000\u0120\u0122\u0003"+ + "H$\u0000\u0121\u011f\u0001\u0000\u0000\u0000\u0121\u0120\u0001\u0000\u0000"+ + "\u0000\u0122\u0019\u0001\u0000\u0000\u0000\u0123\u0124\u0003@ \u0000\u0124"+ + "\u001b\u0001\u0000\u0000\u0000\u0125\u0126\u0005\f\u0000\u0000\u0126\u0127"+ + "\u0003\u001e\u000f\u0000\u0127\u001d\u0001\u0000\u0000\u0000\u0128\u012d"+ + "\u0003 \u0010\u0000\u0129\u012a\u0005!\u0000\u0000\u012a\u012c\u0003 "+ + "\u0010\u0000\u012b\u0129\u0001\u0000\u0000\u0000\u012c\u012f\u0001\u0000"+ + "\u0000\u0000\u012d\u012b\u0001\u0000\u0000\u0000\u012d\u012e\u0001\u0000"+ + "\u0000\u0000\u012e\u001f\u0001\u0000\u0000\u0000\u012f\u012d\u0001\u0000"+ + "\u0000\u0000\u0130\u0131\u0003:\u001d\u0000\u0131\u0132\u0005\u001f\u0000"+ + "\u0000\u0132\u0134\u0001\u0000\u0000\u0000\u0133\u0130\u0001\u0000\u0000"+ + "\u0000\u0133\u0134\u0001\u0000\u0000\u0000\u0134\u0135\u0001\u0000\u0000"+ + "\u0000\u0135\u0136\u0003\n\u0005\u0000\u0136!\u0001\u0000\u0000\u0000"+ + "\u0137\u0138\u0005\u0006\u0000\u0000\u0138\u013d\u0003$\u0012\u0000\u0139"+ + "\u013a\u0005!\u0000\u0000\u013a\u013c\u0003$\u0012\u0000\u013b\u0139\u0001"+ + "\u0000\u0000\u0000\u013c\u013f\u0001\u0000\u0000\u0000\u013d\u013b\u0001"+ + "\u0000\u0000\u0000\u013d\u013e\u0001\u0000\u0000\u0000\u013e\u0141\u0001"+ + "\u0000\u0000\u0000\u013f\u013d\u0001\u0000\u0000\u0000\u0140\u0142\u0003"+ + "*\u0015\u0000\u0141\u0140\u0001\u0000\u0000\u0000\u0141\u0142\u0001\u0000"+ + "\u0000\u0000\u0142#\u0001\u0000\u0000\u0000\u0143\u0144\u0003&\u0013\u0000"+ + "\u0144\u0145\u0005h\u0000\u0000\u0145\u0147\u0001\u0000\u0000\u0000\u0146"+ + "\u0143\u0001\u0000\u0000\u0000\u0146\u0147\u0001\u0000\u0000\u0000\u0147"+ + "\u0148\u0001\u0000\u0000\u0000\u0148\u0149\u0003(\u0014\u0000\u0149%\u0001"+ + "\u0000\u0000\u0000\u014a\u014b\u0005L\u0000\u0000\u014b\'\u0001\u0000"+ + "\u0000\u0000\u014c\u014d\u0007\u0002\u0000\u0000\u014d)\u0001\u0000\u0000"+ + "\u0000\u014e\u0151\u0003,\u0016\u0000\u014f\u0151\u0003.\u0017\u0000\u0150"+ + "\u014e\u0001\u0000\u0000\u0000\u0150\u014f\u0001\u0000\u0000\u0000\u0151"+ + "+\u0001\u0000\u0000\u0000\u0152\u0153\u0005K\u0000\u0000\u0153\u0158\u0005"+ + "L\u0000\u0000\u0154\u0155\u0005!\u0000\u0000\u0155\u0157\u0005L\u0000"+ + "\u0000\u0156\u0154\u0001\u0000\u0000\u0000\u0157\u015a\u0001\u0000\u0000"+ + "\u0000\u0158\u0156\u0001\u0000\u0000\u0000\u0158\u0159\u0001\u0000\u0000"+ + "\u0000\u0159-\u0001\u0000\u0000\u0000\u015a\u0158\u0001\u0000\u0000\u0000"+ + "\u015b\u015c\u0005A\u0000\u0000\u015c\u015d\u0003,\u0016\u0000\u015d\u015e"+ + "\u0005B\u0000\u0000\u015e/\u0001\u0000\u0000\u0000\u015f\u0160\u0005\u0013"+ + "\u0000\u0000\u0160\u0165\u0003$\u0012\u0000\u0161\u0162\u0005!\u0000\u0000"+ + "\u0162\u0164\u0003$\u0012\u0000\u0163\u0161\u0001\u0000\u0000\u0000\u0164"+ + "\u0167\u0001\u0000\u0000\u0000\u0165\u0163\u0001\u0000\u0000\u0000\u0165"+ + "\u0166\u0001\u0000\u0000\u0000\u0166\u0169\u0001\u0000\u0000\u0000\u0167"+ + "\u0165\u0001\u0000\u0000\u0000\u0168\u016a\u00036\u001b\u0000\u0169\u0168"+ + "\u0001\u0000\u0000\u0000\u0169\u016a\u0001\u0000\u0000\u0000\u016a\u016d"+ + "\u0001\u0000\u0000\u0000\u016b\u016c\u0005\u001c\u0000\u0000\u016c\u016e"+ + "\u0003\u001e\u000f\u0000\u016d\u016b\u0001\u0000\u0000\u0000\u016d\u016e"+ + "\u0001\u0000\u0000\u0000\u016e1\u0001\u0000\u0000\u0000\u016f\u0170\u0005"+ + "\u0004\u0000\u0000\u0170\u0171\u0003\u001e\u000f\u0000\u01713\u0001\u0000"+ + "\u0000\u0000\u0172\u0174\u0005\u000f\u0000\u0000\u0173\u0175\u00036\u001b"+ + "\u0000\u0174\u0173\u0001\u0000\u0000\u0000\u0174\u0175\u0001\u0000\u0000"+ + "\u0000\u0175\u0178\u0001\u0000\u0000\u0000\u0176\u0177\u0005\u001c\u0000"+ + "\u0000\u0177\u0179\u0003\u001e\u000f\u0000\u0178\u0176\u0001\u0000\u0000"+ + "\u0000\u0178\u0179\u0001\u0000\u0000\u0000\u01795\u0001\u0000\u0000\u0000"+ + "\u017a\u017f\u00038\u001c\u0000\u017b\u017c\u0005!\u0000\u0000\u017c\u017e"+ + "\u00038\u001c\u0000\u017d\u017b\u0001\u0000\u0000\u0000\u017e\u0181\u0001"+ + "\u0000\u0000\u0000\u017f\u017d\u0001\u0000\u0000\u0000\u017f\u0180\u0001"+ + "\u0000\u0000\u0000\u01807\u0001\u0000\u0000\u0000\u0181\u017f\u0001\u0000"+ + "\u0000\u0000\u0182\u0183\u0003 \u0010\u0000\u0183\u0186\u0004\u001c\n"+ + "\u0000\u0184\u0185\u0005\u0010\u0000\u0000\u0185\u0187\u0003\n\u0005\u0000"+ + "\u0186\u0184\u0001\u0000\u0000\u0000\u0186\u0187\u0001\u0000\u0000\u0000"+ + "\u01879\u0001\u0000\u0000\u0000\u0188\u018d\u0003H$\u0000\u0189\u018a"+ + "\u0005#\u0000\u0000\u018a\u018c\u0003H$\u0000\u018b\u0189\u0001\u0000"+ + "\u0000\u0000\u018c\u018f\u0001\u0000\u0000\u0000\u018d\u018b\u0001\u0000"+ + "\u0000\u0000\u018d\u018e\u0001\u0000\u0000\u0000\u018e;\u0001\u0000\u0000"+ + "\u0000\u018f\u018d\u0001\u0000\u0000\u0000\u0190\u0195\u0003B!\u0000\u0191"+ + "\u0192\u0005#\u0000\u0000\u0192\u0194\u0003B!\u0000\u0193\u0191\u0001"+ + "\u0000\u0000\u0000\u0194\u0197\u0001\u0000\u0000\u0000\u0195\u0193\u0001"+ + "\u0000\u0000\u0000\u0195\u0196\u0001\u0000\u0000\u0000\u0196=\u0001\u0000"+ + "\u0000\u0000\u0197\u0195\u0001\u0000\u0000\u0000\u0198\u019d\u0003<\u001e"+ + "\u0000\u0199\u019a\u0005!\u0000\u0000\u019a\u019c\u0003<\u001e\u0000\u019b"+ + "\u0199\u0001\u0000\u0000\u0000\u019c\u019f\u0001\u0000\u0000\u0000\u019d"+ + "\u019b\u0001\u0000\u0000\u0000\u019d\u019e\u0001\u0000\u0000\u0000\u019e"+ + "?\u0001\u0000\u0000\u0000\u019f\u019d\u0001\u0000\u0000\u0000\u01a0\u01a1"+ + "\u0007\u0003\u0000\u0000\u01a1A\u0001\u0000\u0000\u0000\u01a2\u01a5\u0005"+ + "P\u0000\u0000\u01a3\u01a5\u0003F#\u0000\u01a4\u01a2\u0001\u0000\u0000"+ + "\u0000\u01a4\u01a3\u0001\u0000\u0000\u0000\u01a5C\u0001\u0000\u0000\u0000"+ + "\u01a6\u01d1\u0005,\u0000\u0000\u01a7\u01a8\u0003h4\u0000\u01a8\u01a9"+ + "\u0005C\u0000\u0000\u01a9\u01d1\u0001\u0000\u0000\u0000\u01aa\u01d1\u0003"+ + "f3\u0000\u01ab\u01d1\u0003h4\u0000\u01ac\u01d1\u0003b1\u0000\u01ad\u01d1"+ + "\u0003F#\u0000\u01ae\u01d1\u0003j5\u0000\u01af\u01b0\u0005A\u0000\u0000"+ + "\u01b0\u01b5\u0003d2\u0000\u01b1\u01b2\u0005!\u0000\u0000\u01b2\u01b4"+ + "\u0003d2\u0000\u01b3\u01b1\u0001\u0000\u0000\u0000\u01b4\u01b7\u0001\u0000"+ + "\u0000\u0000\u01b5\u01b3\u0001\u0000\u0000\u0000\u01b5\u01b6\u0001\u0000"+ + "\u0000\u0000\u01b6\u01b8\u0001\u0000\u0000\u0000\u01b7\u01b5\u0001\u0000"+ + "\u0000\u0000\u01b8\u01b9\u0005B\u0000\u0000\u01b9\u01d1\u0001\u0000\u0000"+ + "\u0000\u01ba\u01bb\u0005A\u0000\u0000\u01bb\u01c0\u0003b1\u0000\u01bc"+ + "\u01bd\u0005!\u0000\u0000\u01bd\u01bf\u0003b1\u0000\u01be\u01bc\u0001"+ + "\u0000\u0000\u0000\u01bf\u01c2\u0001\u0000\u0000\u0000\u01c0\u01be\u0001"+ + "\u0000\u0000\u0000\u01c0\u01c1\u0001\u0000\u0000\u0000\u01c1\u01c3\u0001"+ + "\u0000\u0000\u0000\u01c2\u01c0\u0001\u0000\u0000\u0000\u01c3\u01c4\u0005"+ + "B\u0000\u0000\u01c4\u01d1\u0001\u0000\u0000\u0000\u01c5\u01c6\u0005A\u0000"+ + "\u0000\u01c6\u01cb\u0003j5\u0000\u01c7\u01c8\u0005!\u0000\u0000\u01c8"+ + "\u01ca\u0003j5\u0000\u01c9\u01c7\u0001\u0000\u0000\u0000\u01ca\u01cd\u0001"+ + "\u0000\u0000\u0000\u01cb\u01c9\u0001\u0000\u0000\u0000\u01cb\u01cc\u0001"+ + "\u0000\u0000\u0000\u01cc\u01ce\u0001\u0000\u0000\u0000\u01cd\u01cb\u0001"+ + "\u0000\u0000\u0000\u01ce\u01cf\u0005B\u0000\u0000\u01cf\u01d1\u0001\u0000"+ + "\u0000\u0000\u01d0\u01a6\u0001\u0000\u0000\u0000\u01d0\u01a7\u0001\u0000"+ + "\u0000\u0000\u01d0\u01aa\u0001\u0000\u0000\u0000\u01d0\u01ab\u0001\u0000"+ + "\u0000\u0000\u01d0\u01ac\u0001\u0000\u0000\u0000\u01d0\u01ad\u0001\u0000"+ + "\u0000\u0000\u01d0\u01ae\u0001\u0000\u0000\u0000\u01d0\u01af\u0001\u0000"+ + "\u0000\u0000\u01d0\u01ba\u0001\u0000\u0000\u0000\u01d0\u01c5\u0001\u0000"+ + "\u0000\u0000\u01d1E\u0001\u0000\u0000\u0000\u01d2\u01d5\u0005/\u0000\u0000"+ + "\u01d3\u01d5\u0005@\u0000\u0000\u01d4\u01d2\u0001\u0000\u0000\u0000\u01d4"+ + "\u01d3\u0001\u0000\u0000\u0000\u01d5G\u0001\u0000\u0000\u0000\u01d6\u01d9"+ + "\u0003@ \u0000\u01d7\u01d9\u0003F#\u0000\u01d8\u01d6\u0001\u0000\u0000"+ + "\u0000\u01d8\u01d7\u0001\u0000\u0000\u0000\u01d9I\u0001\u0000\u0000\u0000"+ + "\u01da\u01db\u0005\t\u0000\u0000\u01db\u01dc\u0005\u001a\u0000\u0000\u01dc"+ + "K\u0001\u0000\u0000\u0000\u01dd\u01de\u0005\u000e\u0000\u0000\u01de\u01e3"+ + "\u0003N\'\u0000\u01df\u01e0\u0005!\u0000\u0000\u01e0\u01e2\u0003N\'\u0000"+ + "\u01e1\u01df\u0001\u0000\u0000\u0000\u01e2\u01e5\u0001\u0000\u0000\u0000"+ + "\u01e3\u01e1\u0001\u0000\u0000\u0000\u01e3\u01e4\u0001\u0000\u0000\u0000"+ + "\u01e4M\u0001\u0000\u0000\u0000\u01e5\u01e3\u0001\u0000\u0000\u0000\u01e6"+ + "\u01e8\u0003\n\u0005\u0000\u01e7\u01e9\u0007\u0004\u0000\u0000\u01e8\u01e7"+ + "\u0001\u0000\u0000\u0000\u01e8\u01e9\u0001\u0000\u0000\u0000\u01e9\u01ec"+ + "\u0001\u0000\u0000\u0000\u01ea\u01eb\u0005-\u0000\u0000\u01eb\u01ed\u0007"+ + "\u0005\u0000\u0000\u01ec\u01ea\u0001\u0000\u0000\u0000\u01ec\u01ed\u0001"+ + "\u0000\u0000\u0000\u01edO\u0001\u0000\u0000\u0000\u01ee\u01ef\u0005\b"+ + "\u0000\u0000\u01ef\u01f0\u0003>\u001f\u0000\u01f0Q\u0001\u0000\u0000\u0000"+ + "\u01f1\u01f2\u0005\u0002\u0000\u0000\u01f2\u01f3\u0003>\u001f\u0000\u01f3"+ + "S\u0001\u0000\u0000\u0000\u01f4\u01f5\u0005\u000b\u0000\u0000\u01f5\u01fa"+ + "\u0003V+\u0000\u01f6\u01f7\u0005!\u0000\u0000\u01f7\u01f9\u0003V+\u0000"+ + "\u01f8\u01f6\u0001\u0000\u0000\u0000\u01f9\u01fc\u0001\u0000\u0000\u0000"+ + "\u01fa\u01f8\u0001\u0000\u0000\u0000\u01fa\u01fb\u0001\u0000\u0000\u0000"+ + "\u01fbU\u0001\u0000\u0000\u0000\u01fc\u01fa\u0001\u0000\u0000\u0000\u01fd"+ + "\u01fe\u0003<\u001e\u0000\u01fe\u01ff\u0005T\u0000\u0000\u01ff\u0200\u0003"+ + "<\u001e\u0000\u0200W\u0001\u0000\u0000\u0000\u0201\u0202\u0005\u0001\u0000"+ + "\u0000\u0202\u0203\u0003\u0014\n\u0000\u0203\u0205\u0003j5\u0000\u0204"+ + "\u0206\u0003^/\u0000\u0205\u0204\u0001\u0000\u0000\u0000\u0205\u0206\u0001"+ + "\u0000\u0000\u0000\u0206Y\u0001\u0000\u0000\u0000\u0207\u0208\u0005\u0007"+ + "\u0000\u0000\u0208\u0209\u0003\u0014\n\u0000\u0209\u020a\u0003j5\u0000"+ + "\u020a[\u0001\u0000\u0000\u0000\u020b\u020c\u0005\n\u0000\u0000\u020c"+ + "\u020d\u0003:\u001d\u0000\u020d]\u0001\u0000\u0000\u0000\u020e\u0213\u0003"+ + "`0\u0000\u020f\u0210\u0005!\u0000\u0000\u0210\u0212\u0003`0\u0000\u0211"+ + "\u020f\u0001\u0000\u0000\u0000\u0212\u0215\u0001\u0000\u0000\u0000\u0213"+ + "\u0211\u0001\u0000\u0000\u0000\u0213\u0214\u0001\u0000\u0000\u0000\u0214"+ + "_\u0001\u0000\u0000\u0000\u0215\u0213\u0001\u0000\u0000\u0000\u0216\u0217"+ + "\u0003@ \u0000\u0217\u0218\u0005\u001f\u0000\u0000\u0218\u0219\u0003D"+ + "\"\u0000\u0219a\u0001\u0000\u0000\u0000\u021a\u021b\u0007\u0006\u0000"+ + "\u0000\u021bc\u0001\u0000\u0000\u0000\u021c\u021f\u0003f3\u0000\u021d"+ + "\u021f\u0003h4\u0000\u021e\u021c\u0001\u0000\u0000\u0000\u021e\u021d\u0001"+ + "\u0000\u0000\u0000\u021fe\u0001\u0000\u0000\u0000\u0220\u0222\u0007\u0000"+ + "\u0000\u0000\u0221\u0220\u0001\u0000\u0000\u0000\u0221\u0222\u0001\u0000"+ + "\u0000\u0000\u0222\u0223\u0001\u0000\u0000\u0000\u0223\u0224\u0005\u001b"+ + "\u0000\u0000\u0224g\u0001\u0000\u0000\u0000\u0225\u0227\u0007\u0000\u0000"+ + "\u0000\u0226\u0225\u0001\u0000\u0000\u0000\u0226\u0227\u0001\u0000\u0000"+ + "\u0000\u0227\u0228\u0001\u0000\u0000\u0000\u0228\u0229\u0005\u001a\u0000"+ + "\u0000\u0229i\u0001\u0000\u0000\u0000\u022a\u022b\u0005\u0019\u0000\u0000"+ + "\u022bk\u0001\u0000\u0000\u0000\u022c\u022d\u0007\u0007\u0000\u0000\u022d"+ + "m\u0001\u0000\u0000\u0000\u022e\u022f\u0005\u0005\u0000\u0000\u022f\u0230"+ + "\u0003p8\u0000\u0230o\u0001\u0000\u0000\u0000\u0231\u0232\u0005A\u0000"+ + "\u0000\u0232\u0233\u0003\u0002\u0001\u0000\u0233\u0234\u0005B\u0000\u0000"+ + "\u0234q\u0001\u0000\u0000\u0000\u0235\u0236\u0005\r\u0000\u0000\u0236"+ + "\u0237\u0005d\u0000\u0000\u0237s\u0001\u0000\u0000\u0000\u0238\u0239\u0005"+ + "\u0003\u0000\u0000\u0239\u023c\u0005Z\u0000\u0000\u023a\u023b\u0005X\u0000"+ + "\u0000\u023b\u023d\u0003<\u001e\u0000\u023c\u023a\u0001\u0000\u0000\u0000"+ + "\u023c\u023d\u0001\u0000\u0000\u0000\u023d\u0247\u0001\u0000\u0000\u0000"+ + "\u023e\u023f\u0005Y\u0000\u0000\u023f\u0244\u0003v;\u0000\u0240\u0241"+ + "\u0005!\u0000\u0000\u0241\u0243\u0003v;\u0000\u0242\u0240\u0001\u0000"+ + "\u0000\u0000\u0243\u0246\u0001\u0000\u0000\u0000\u0244\u0242\u0001\u0000"+ + "\u0000\u0000\u0244\u0245\u0001\u0000\u0000\u0000\u0245\u0248\u0001\u0000"+ + "\u0000\u0000\u0246\u0244\u0001\u0000\u0000\u0000\u0247\u023e\u0001\u0000"+ + "\u0000\u0000\u0247\u0248\u0001\u0000\u0000\u0000\u0248u\u0001\u0000\u0000"+ + "\u0000\u0249\u024a\u0003<\u001e\u0000\u024a\u024b\u0005\u001f\u0000\u0000"+ + "\u024b\u024d\u0001\u0000\u0000\u0000\u024c\u0249\u0001\u0000\u0000\u0000"+ + "\u024c\u024d\u0001\u0000\u0000\u0000\u024d\u024e\u0001\u0000\u0000\u0000"+ + "\u024e\u024f\u0003<\u001e\u0000\u024fw\u0001\u0000\u0000\u0000\u0250\u0251"+ + "\u0005\u0012\u0000\u0000\u0251\u0252\u0003$\u0012\u0000\u0252\u0253\u0005"+ + "X\u0000\u0000\u0253\u0254\u0003>\u001f\u0000\u0254y\u0001\u0000\u0000"+ + "\u0000\u0255\u0256\u0005\u0011\u0000\u0000\u0256\u0259\u00036\u001b\u0000"+ + "\u0257\u0258\u0005\u001c\u0000\u0000\u0258\u025a\u0003\u001e\u000f\u0000"+ + "\u0259\u0257\u0001\u0000\u0000\u0000\u0259\u025a\u0001\u0000\u0000\u0000"+ + "\u025a{\u0001\u0000\u0000\u0000;\u0087\u0090\u00a2\u00ae\u00b7\u00bf\u00c5"+ + "\u00cd\u00cf\u00d4\u00db\u00e0\u00eb\u00f1\u00f9\u00fb\u0106\u010d\u0118"+ + "\u011b\u0121\u012d\u0133\u013d\u0141\u0146\u0150\u0158\u0165\u0169\u016d"+ + "\u0174\u0178\u017f\u0186\u018d\u0195\u019d\u01a4\u01b5\u01c0\u01cb\u01d0"+ + "\u01d4\u01d8\u01e3\u01e8\u01ec\u01fa\u0205\u0213\u021e\u0221\u0226\u023c"+ + "\u0244\u0247\u024c\u0259"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java index e2340df954674..556a97657635a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java @@ -512,6 +512,30 @@ public class EsqlBaseParserBaseListener implements EsqlBaseParserListener { *

    The default implementation does nothing.

    */ @Override public void exitStatsCommand(EsqlBaseParser.StatsCommandContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterAggFields(EsqlBaseParser.AggFieldsContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitAggFields(EsqlBaseParser.AggFieldsContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterAggField(EsqlBaseParser.AggFieldContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitAggField(EsqlBaseParser.AggFieldContext ctx) { } /** * {@inheritDoc} * diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java index 99f038b14b5e0..56b6999615f50 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java @@ -307,6 +307,20 @@ public class EsqlBaseParserBaseVisitor extends AbstractParseTreeVisitor im * {@link #visitChildren} on {@code ctx}.

    */ @Override public T visitStatsCommand(EsqlBaseParser.StatsCommandContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitAggFields(EsqlBaseParser.AggFieldsContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitAggField(EsqlBaseParser.AggFieldContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java index c6dcaca736e1f..cf658c4a73141 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java @@ -465,6 +465,26 @@ public interface EsqlBaseParserListener extends ParseTreeListener { * @param ctx the parse tree */ void exitStatsCommand(EsqlBaseParser.StatsCommandContext ctx); + /** + * Enter a parse tree produced by {@link EsqlBaseParser#aggFields}. + * @param ctx the parse tree + */ + void enterAggFields(EsqlBaseParser.AggFieldsContext ctx); + /** + * Exit a parse tree produced by {@link EsqlBaseParser#aggFields}. + * @param ctx the parse tree + */ + void exitAggFields(EsqlBaseParser.AggFieldsContext ctx); + /** + * Enter a parse tree produced by {@link EsqlBaseParser#aggField}. + * @param ctx the parse tree + */ + void enterAggField(EsqlBaseParser.AggFieldContext ctx); + /** + * Exit a parse tree produced by {@link EsqlBaseParser#aggField}. + * @param ctx the parse tree + */ + void exitAggField(EsqlBaseParser.AggFieldContext ctx); /** * Enter a parse tree produced by {@link EsqlBaseParser#qualifiedName}. * @param ctx the parse tree diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java index 310d3dc76dd6d..86c1d1aafc33a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java @@ -284,6 +284,18 @@ public interface EsqlBaseParserVisitor extends ParseTreeVisitor { * @return the visitor result */ T visitStatsCommand(EsqlBaseParser.StatsCommandContext ctx); + /** + * Visit a parse tree produced by {@link EsqlBaseParser#aggFields}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitAggFields(EsqlBaseParser.AggFieldsContext ctx); + /** + * Visit a parse tree produced by {@link EsqlBaseParser#aggField}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitAggField(EsqlBaseParser.AggFieldContext ctx); /** * Visit a parse tree produced by {@link EsqlBaseParser#qualifiedName}. * @param ctx the parse tree diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java index bcbd28aced939..7ff09c23a1403 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java @@ -26,6 +26,7 @@ import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.core.expression.UnresolvedAttribute; import org.elasticsearch.xpack.esql.core.expression.UnresolvedStar; +import org.elasticsearch.xpack.esql.core.expression.function.Function; import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.MatchQueryPredicate; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; @@ -44,6 +45,7 @@ import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; import org.elasticsearch.xpack.esql.expression.function.FunctionResolutionStrategy; import org.elasticsearch.xpack.esql.expression.function.UnresolvedFunction; +import org.elasticsearch.xpack.esql.expression.function.aggregate.FilteredExpression; import org.elasticsearch.xpack.esql.expression.function.scalar.string.RLike; import org.elasticsearch.xpack.esql.expression.function.scalar.string.WildcardLike; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Add; @@ -742,9 +744,12 @@ private NamedExpression enrichFieldName(EsqlBaseParser.QualifiedNamePatternConte @Override public Alias visitField(EsqlBaseParser.FieldContext ctx) { + return visitField(ctx, source(ctx)); + } + + private Alias visitField(EsqlBaseParser.FieldContext ctx, Source source) { UnresolvedAttribute id = visitQualifiedName(ctx.qualifiedName()); Expression value = expression(ctx.booleanExpression()); - var source = source(ctx); String name = id == null ? source.text() : id.name(); return new Alias(source, name, value); } @@ -754,6 +759,36 @@ public List visitFields(EsqlBaseParser.FieldsContext ctx) { return ctx != null ? visitList(this, ctx.field(), Alias.class) : new ArrayList<>(); } + @Override + public NamedExpression visitAggField(EsqlBaseParser.AggFieldContext ctx) { + Source source = source(ctx); + Alias field = visitField(ctx.field(), source); + var filterExpression = ctx.booleanExpression(); + + if (filterExpression != null) { + Expression condition = expression(filterExpression); + Expression child = field.child(); + // basic check as the filter can be specified only on a function (should be an aggregate but we can't determine that yet) + if (field.child().anyMatch(Function.class::isInstance)) { + field = field.replaceChild(new FilteredExpression(field.source(), child, condition)); + } + // allow condition only per aggregated function + else { + throw new ParsingException( + condition.source(), + "WHERE clause allowed only for aggregate functions [{}]", + field.sourceText() + ); + } + } + return field; + } + + @Override + public List visitAggFields(EsqlBaseParser.AggFieldsContext ctx) { + return ctx != null ? visitList(this, ctx.aggField(), Alias.class) : new ArrayList<>(); + } + /** * Similar to {@link #visitFields(EsqlBaseParser.FieldsContext)} however avoids wrapping the expression * into an Alias. diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java index c90c3cba4ef24..dc913cd2f14f4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java @@ -298,13 +298,12 @@ public PlanFactory visitStatsCommand(EsqlBaseParser.StatsCommandContext ctx) { return input -> new Aggregate(source(ctx), input, Aggregate.AggregateType.STANDARD, stats.groupings, stats.aggregates); } - private record Stats(List groupings, List aggregates) { + private record Stats(List groupings, List aggregates) {} - } - - private Stats stats(Source source, EsqlBaseParser.FieldsContext groupingsCtx, EsqlBaseParser.FieldsContext aggregatesCtx) { + private Stats stats(Source source, EsqlBaseParser.FieldsContext groupingsCtx, EsqlBaseParser.AggFieldsContext aggregatesCtx) { List groupings = visitGrouping(groupingsCtx); - List aggregates = new ArrayList<>(visitFields(aggregatesCtx)); + List aggregates = new ArrayList<>(visitAggFields(aggregatesCtx)); + if (aggregates.isEmpty() && groupings.isEmpty()) { throw new ParsingException(source, "At least one aggregation or grouping expression required in [{}]", source.text()); } @@ -341,9 +340,11 @@ public PlanFactory visitInlinestatsCommand(EsqlBaseParser.InlinestatsCommandCont if (false == EsqlPlugin.INLINESTATS_FEATURE_FLAG.isEnabled()) { throw new ParsingException(source(ctx), "INLINESTATS command currently requires a snapshot build"); } - List aggregates = new ArrayList<>(visitFields(ctx.stats)); + List aggFields = visitAggFields(ctx.stats); + List aggregates = new ArrayList<>(aggFields); List groupings = visitGrouping(ctx.grouping); aggregates.addAll(groupings); + // TODO: add support for filters return input -> new InlineStats(source(ctx), input, new ArrayList<>(groupings), aggregates); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Aggregate.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Aggregate.java index 8445c8236c45a..3b7240dcd693b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Aggregate.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Aggregate.java @@ -59,6 +59,7 @@ static AggregateType readType(StreamInput in) throws IOException { private final AggregateType aggregateType; private final List groupings; private final List aggregates; + private List lazyOutput; public Aggregate( diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java index 0e71963e29270..94a9246a56f83 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java @@ -10,10 +10,12 @@ import org.elasticsearch.compute.aggregation.Aggregator; import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.AggregatorMode; +import org.elasticsearch.compute.aggregation.FilteredAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.GroupingAggregator; import org.elasticsearch.compute.aggregation.blockhash.BlockHash; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.operator.AggregationOperator; +import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.compute.operator.HashAggregationOperator.HashAggregationOperatorFactory; import org.elasticsearch.compute.operator.Operator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; @@ -24,6 +26,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.NameId; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; +import org.elasticsearch.xpack.esql.evaluator.EvalMapper; import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction; import org.elasticsearch.xpack.esql.expression.function.aggregate.Count; import org.elasticsearch.xpack.esql.plan.physical.AggregateExec; @@ -231,11 +234,14 @@ private void aggregatesToFactory( boolean grouping, Consumer consumer ) { + // extract filtering channels - and wrap the aggregation with the new evaluator expression only during the init phase for (NamedExpression ne : aggregates) { + // a filter can only appear on aggregate function, not on the grouping columns + if (ne instanceof Alias alias) { var child = alias.child(); if (child instanceof AggregateFunction aggregateFunction) { - List sourceAttr; + List sourceAttr = new ArrayList<>(); if (mode == AggregatorMode.INITIAL) { // TODO: this needs to be made more reliable - use casting to blow up when dealing with expressions (e+1) @@ -251,19 +257,22 @@ private void aggregatesToFactory( ); } } else { - sourceAttr = aggregateFunction.inputExpressions().stream().map(e -> { - Attribute attr = Expressions.attribute(e); + // extra dependencies like TS ones (that require a timestamp) + for (Expression input : aggregateFunction.references()) { + Attribute attr = Expressions.attribute(input); if (attr == null) { throw new EsqlIllegalArgumentException( "Cannot work with target field [{}] for agg [{}]", - e.sourceText(), + input.sourceText(), aggregateFunction.sourceText() ); } - return attr; - }).toList(); + sourceAttr.add(attr); + } } - } else if (mode == AggregatorMode.FINAL || mode == AggregatorMode.INTERMEDIATE) { + } + // coordinator/exchange phase + else if (mode == AggregatorMode.FINAL || mode == AggregatorMode.INTERMEDIATE) { if (grouping) { sourceAttr = aggregateMapper.mapGrouping(aggregateFunction); } else { @@ -274,16 +283,27 @@ private void aggregatesToFactory( } List inputChannels = sourceAttr.stream().map(attr -> layout.get(attr.id()).channel()).toList(); assert inputChannels.stream().allMatch(i -> i >= 0) : inputChannels; - if (aggregateFunction instanceof ToAggregator agg) { - consumer.accept(new AggFunctionSupplierContext(agg.supplier(inputChannels), mode)); - } else { - throw new EsqlIllegalArgumentException("aggregate functions must extend ToAggregator"); + + AggregatorFunctionSupplier aggSupplier = supplier(aggregateFunction, inputChannels); + + // apply the filter only in the initial phase - as the rest of the data is already filtered + if (aggregateFunction.hasFilter() && mode.isInputPartial() == false) { + EvalOperator.ExpressionEvaluator.Factory evalFactory = EvalMapper.toEvaluator(aggregateFunction.filter(), layout); + aggSupplier = new FilteredAggregatorFunctionSupplier(aggSupplier, evalFactory); } + consumer.accept(new AggFunctionSupplierContext(aggSupplier, mode)); } } } } + private static AggregatorFunctionSupplier supplier(AggregateFunction aggregateFunction, List inputChannels) { + if (aggregateFunction instanceof ToAggregator delegate) { + return delegate.supplier(inputChannels); + } + throw new EsqlIllegalArgumentException("aggregate functions must extend ToAggregator"); + } + private record GroupSpec(Integer channel, Attribute attribute) { BlockHash.GroupSpec toHashGroupSpec() { if (channel == null) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java index 13ce9ba77cc71..c322135198262 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java @@ -98,39 +98,39 @@ private record AggDef(Class aggClazz, String type, String extra, boolean grou .collect(Collectors.toUnmodifiableMap(aggDef -> aggDef, AggregateMapper::lookupIntermediateState)); /** Cache of aggregates to intermediate expressions. */ - private final HashMap> cache; + private final HashMap> cache; AggregateMapper() { cache = new HashMap<>(); } - public List mapNonGrouping(List aggregates) { + public List mapNonGrouping(List aggregates) { return doMapping(aggregates, false); } - public List mapNonGrouping(Expression aggregate) { + public List mapNonGrouping(Expression aggregate) { return map(aggregate, false).toList(); } - public List mapGrouping(List aggregates) { + public List mapGrouping(List aggregates) { return doMapping(aggregates, true); } - private List doMapping(List aggregates, boolean grouping) { + private List doMapping(List aggregates, boolean grouping) { AttributeMap attrToExpressions = new AttributeMap<>(); aggregates.stream().flatMap(agg -> map(agg, grouping)).forEach(ne -> attrToExpressions.put(ne.toAttribute(), ne)); return attrToExpressions.values().stream().toList(); } - public List mapGrouping(Expression aggregate) { + public List mapGrouping(Expression aggregate) { return map(aggregate, true).toList(); } - private Stream map(Expression aggregate, boolean grouping) { + private Stream map(Expression aggregate, boolean grouping) { return cache.computeIfAbsent(Alias.unwrap(aggregate), aggKey -> computeEntryForAgg(aggKey, grouping)).stream(); } - private static List computeEntryForAgg(Expression aggregate, boolean grouping) { + private static List computeEntryForAgg(Expression aggregate, boolean grouping) { var aggDef = aggDefOrNull(aggregate, grouping); if (aggDef != null) { var is = getNonNull(aggDef); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index f881c0e1a9bba..ce072e7b0a438 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -67,10 +67,7 @@ import org.elasticsearch.xpack.esql.optimizer.LocalPhysicalOptimizerContext; import org.elasticsearch.xpack.esql.optimizer.LogicalOptimizerContext; import org.elasticsearch.xpack.esql.optimizer.LogicalPlanOptimizer; -import org.elasticsearch.xpack.esql.optimizer.PhysicalOptimizerContext; -import org.elasticsearch.xpack.esql.optimizer.PhysicalPlanOptimizer; import org.elasticsearch.xpack.esql.optimizer.TestLocalPhysicalPlanOptimizer; -import org.elasticsearch.xpack.esql.optimizer.TestPhysicalPlanOptimizer; import org.elasticsearch.xpack.esql.parser.EsqlParser; import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; @@ -167,7 +164,6 @@ public class CsvTests extends ESTestCase { private final EsqlFunctionRegistry functionRegistry = new EsqlFunctionRegistry(); private final EsqlParser parser = new EsqlParser(); private final Mapper mapper = new Mapper(functionRegistry); - private final PhysicalPlanOptimizer physicalPlanOptimizer = new TestPhysicalPlanOptimizer(new PhysicalOptimizerContext(configuration)); private ThreadPool threadPool; private Executor executor; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java index 6644f9b17055e..d365ee3bb2e51 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java @@ -1219,7 +1219,7 @@ public void testAggsOverGroupingKey() throws Exception { assertThat(output, hasSize(2)); var aggs = agg.aggregates(); var min = as(Alias.unwrap(aggs.get(0)), Min.class); - assertThat(min.arguments(), hasSize(1)); + assertThat(min.arguments(), hasSize(2)); // field + filter var group = Alias.unwrap(agg.groupings().get(0)); assertEquals(min.arguments().get(0), group); } @@ -1241,7 +1241,7 @@ public void testAggsOverGroupingKeyWithAlias() throws Exception { assertThat(output, hasSize(2)); var aggs = agg.aggregates(); var min = as(Alias.unwrap(aggs.get(0)), Min.class); - assertThat(min.arguments(), hasSize(1)); + assertThat(min.arguments(), hasSize(2)); // field + filter assertEquals(Expressions.attribute(min.arguments().get(0)), Expressions.attribute(agg.groupings().get(0))); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java index ecf012718eaf8..63f7629f3c720 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java @@ -360,6 +360,40 @@ public void testAggsInsideGrouping() { ); } + public void testAggFilterOnNonAggregates() { + assertEquals( + "1:36: WHERE clause allowed only for aggregate functions, none found in [emp_no + 1 where languages > 1]", + error("from test | stats emp_no + 1 where languages > 1 by emp_no") + ); + assertEquals( + "1:53: WHERE clause allowed only for aggregate functions, none found in [abs(emp_no + languages) % 2 WHERE languages > 1]", + error("from test | stats abs(emp_no + languages) % 2 WHERE languages > 1 by emp_no, languages") + ); + } + + public void testAggFilterOnBucketingOrAggFunctions() { + // query passes when the bucket function is part of the BY clause + query("from test | stats max(languages) WHERE bucket(salary, 10) > 1 by bucket(salary, 10)"); + + // but fails if it's different + assertEquals( + "1:40: can only use grouping function [bucket(salary, 10)] part of the BY clause", + error("from test | stats max(languages) WHERE bucket(salary, 10) > 1 by emp_no") + ); + + assertEquals( + "1:40: cannot use aggregate function [max(salary)] in aggregate WHERE clause [max(languages) WHERE max(salary) > 1]", + error("from test | stats max(languages) WHERE max(salary) > 1 by emp_no") + ); + + assertEquals( + "1:40: cannot use aggregate function [max(salary)] in aggregate WHERE clause [max(languages) WHERE max(salary) + 2 > 1]", + error("from test | stats max(languages) WHERE max(salary) + 2 > 1 by emp_no") + ); + + assertEquals("1:60: Unknown column [m]", error("from test | stats m = max(languages), min(languages) WHERE m + 2 > 1 by emp_no")); + } + public void testGroupingInsideAggsAsAgg() { assertEquals( "1:18: can only use grouping function [bucket(emp_no, 5.)] part of the BY clause", @@ -1507,6 +1541,10 @@ public void testToDatePeriodToTimeDurationWithInvalidType() { ); } + private void query(String query) { + defaultAnalyzer.analyze(parser.createStatement(query)); + } + private String error(String query) { return error(query, defaultAnalyzer); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/RateSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/RateSerializationTests.java index 94b2a81b308d7..ea7c480817317 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/RateSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/RateSerializationTests.java @@ -36,4 +36,9 @@ protected Rate mutateInstance(Rate instance) throws IOException { } return new Rate(source, field, timestamp, unit); } + + @Override + protected boolean alwaysEmptySource() { + return true; + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/TopSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/TopSerializationTests.java index 82bf57d1a194e..e74b26c87c84f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/TopSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/TopSerializationTests.java @@ -36,4 +36,9 @@ protected Top mutateInstance(Top instance) throws IOException { } return new Top(source, field, limit, order); } + + @Override + protected boolean alwaysEmptySource() { + return true; + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index c05b5dd165485..8d7c1997f78e3 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -537,6 +537,24 @@ public void testCombineProjectionWithDuplicateAggregation() { assertThat(Expressions.names(agg.groupings()), contains("last_name", "first_name")); } + /** + * Limit[1000[INTEGER]] + * \_Aggregate[STANDARD,[],[SUM(salary{f}#12,true[BOOLEAN]) AS sum(salary), SUM(salary{f}#12,last_name{f}#11 == [44 6f 65][KEYW + * ORD]) AS sum(salary) WheRe last_name == "Doe"]] + * \_EsRelation[test][_meta_field{f}#13, emp_no{f}#7, first_name{f}#8, ge..] + */ + public void testStatsWithFilteringDefaultAliasing() { + var plan = plan(""" + from test + | stats sum(salary), sum(salary) WheRe last_name == "Doe" + """); + + var limit = as(plan, Limit.class); + var agg = as(limit.child(), Aggregate.class); + assertThat(agg.aggregates(), hasSize(2)); + assertThat(Expressions.names(agg.aggregates()), contains("sum(salary)", "sum(salary) WheRe last_name == \"Doe\"")); + } + public void testQlComparisonOptimizationsApply() { var plan = plan(""" from test diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java index 80a2d49d0d94a..67b4dd71260aa 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java @@ -208,7 +208,7 @@ public void testParenthesizedExpression() { } public void testCommandNamesAsIdentifiers() { - Expression expr = whereExpression("from and where"); + Expression expr = whereExpression("from and limit"); assertThat(expr, instanceOf(And.class)); And and = (And) expr; @@ -216,7 +216,7 @@ public void testCommandNamesAsIdentifiers() { assertThat(((UnresolvedAttribute) and.left()).name(), equalTo("from")); assertThat(and.right(), instanceOf(UnresolvedAttribute.class)); - assertThat(((UnresolvedAttribute) and.right()).name(), equalTo("where")); + assertThat(((UnresolvedAttribute) and.right()).name(), equalTo("limit")); } public void testIdentifiersCaseSensitive() { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java index 53621a79aedac..c797f426d2ae5 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java @@ -20,14 +20,18 @@ import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.core.expression.UnresolvedAttribute; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; +import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparison; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.Order; import org.elasticsearch.xpack.esql.expression.UnresolvedNamePattern; import org.elasticsearch.xpack.esql.expression.function.UnresolvedFunction; +import org.elasticsearch.xpack.esql.expression.function.aggregate.FilteredExpression; import org.elasticsearch.xpack.esql.expression.function.scalar.string.RLike; import org.elasticsearch.xpack.esql.expression.function.scalar.string.WildcardLike; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Add; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Div; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Mod; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.Equals; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.GreaterThan; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.GreaterThanOrEqual; @@ -321,6 +325,61 @@ public void testAggsWithGroupKeyAsAgg() throws Exception { } } + public void testStatsWithGroupKeyAndAggFilter() throws Exception { + var a = attribute("a"); + var f = new UnresolvedFunction(EMPTY, "min", DEFAULT, List.of(a)); + var filter = new Alias(EMPTY, "min(a) where a > 1", new FilteredExpression(EMPTY, f, new GreaterThan(EMPTY, a, integer(1)))); + assertEquals( + new Aggregate(EMPTY, PROCESSING_CMD_INPUT, Aggregate.AggregateType.STANDARD, List.of(a), List.of(filter, a)), + processingCommand("stats min(a) where a > 1 by a") + ); + } + + public void testStatsWithGroupKeyAndMixedAggAndFilter() throws Exception { + var a = attribute("a"); + var min = new UnresolvedFunction(EMPTY, "min", DEFAULT, List.of(a)); + var max = new UnresolvedFunction(EMPTY, "max", DEFAULT, List.of(a)); + var avg = new UnresolvedFunction(EMPTY, "avg", DEFAULT, List.of(a)); + var min_alias = new Alias(EMPTY, "min", min); + + var max_filter_ex = new Or( + EMPTY, + new GreaterThan(EMPTY, new Mod(EMPTY, a, integer(3)), integer(10)), + new GreaterThan(EMPTY, new Div(EMPTY, a, integer(2)), integer(100)) + ); + var max_filter = new Alias(EMPTY, "max", new FilteredExpression(EMPTY, max, max_filter_ex)); + + var avg_filter_ex = new GreaterThan(EMPTY, new Div(EMPTY, a, integer(2)), integer(100)); + var avg_filter = new Alias(EMPTY, "avg", new FilteredExpression(EMPTY, avg, avg_filter_ex)); + + assertEquals( + new Aggregate( + EMPTY, + PROCESSING_CMD_INPUT, + Aggregate.AggregateType.STANDARD, + List.of(a), + List.of(min_alias, max_filter, avg_filter, a) + ), + processingCommand(""" + stats + min = min(a), + max = max(a) WHERE (a % 3 > 10 OR a / 2 > 100), + avg = avg(a) WHERE a / 2 > 100 + BY a + """) + ); + } + + public void testStatsWithoutGroupKeyMixedAggAndFilter() throws Exception { + var a = attribute("a"); + var f = new UnresolvedFunction(EMPTY, "min", DEFAULT, List.of(a)); + var filter = new Alias(EMPTY, "min(a) where a > 1", new FilteredExpression(EMPTY, f, new GreaterThan(EMPTY, a, integer(1)))); + assertEquals( + new Aggregate(EMPTY, PROCESSING_CMD_INPUT, Aggregate.AggregateType.STANDARD, List.of(), List.of(filter)), + processingCommand("stats min(a) where a > 1") + ); + } + public void testInlineStatsWithGroups() { var query = "inlinestats b = min(a) by c, d.e"; if (Build.current().isSnapshot() == false) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java index d186b4c199d77..7075c9fe58d63 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java @@ -21,6 +21,8 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.Literal; +import org.elasticsearch.xpack.esql.core.expression.MetadataAttribute; +import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; import org.elasticsearch.xpack.esql.core.expression.UnresolvedAttribute; import org.elasticsearch.xpack.esql.core.expression.UnresolvedAttributeTests; import org.elasticsearch.xpack.esql.core.expression.UnresolvedNamedExpression; @@ -164,6 +166,16 @@ public void testInfoParameters() throws Exception { * in the parameters and not included. */ expectedCount -= 1; + + // special exceptions with private constructors + if (MetadataAttribute.class.equals(subclass) || ReferenceAttribute.class.equals(subclass)) { + expectedCount++; + } + + if (FieldAttribute.class.equals(subclass)) { + expectedCount += 2; + } + assertEquals(expectedCount, info(node).properties().size()); } @@ -174,6 +186,9 @@ public void testInfoParameters() throws Exception { * implementations in the process. */ public void testTransform() throws Exception { + if (FieldAttribute.class.equals(subclass)) { + assumeTrue("FieldAttribute private constructor", false); + } Constructor ctor = longestCtor(subclass); Object[] nodeCtorArgs = ctorArgs(ctor); T node = ctor.newInstance(nodeCtorArgs); From 4eda4a332a9b0e871563e18c609e65addd356c19 Mon Sep 17 00:00:00 2001 From: Alexander Spies Date: Tue, 15 Oct 2024 09:40:57 +0200 Subject: [PATCH 092/449] Skip spatial.AirportsSortCityName before 8.13 (#114795) Fix https://github.com/elastic/elasticsearch/issues/114767. TopN didn't work in this scenario on old versions. --- .../esql/qa/testFixtures/src/main/resources/spatial.csv-spec | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec index 4c40808a4ff96..b72c8bcb05ae9 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec @@ -1203,7 +1203,7 @@ count:long | country:k 1 | Poland ; -airportsSortCityName +airportsSortCityName#[skip:-8.13.3, reason:fixed in 8.13] FROM airports | SORT abbrev | LIMIT 5 From 162dce55a6221786c48b9f97630a8ce2d3ce5c53 Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 15 Oct 2024 08:47:50 +0100 Subject: [PATCH 093/449] Remove unused v7-only APIs (#114733) Removes several REST endpoints that only existed in the now-inaccessible v7-compatible mode. --- .../RestAddVotingConfigExclusionAction.java | 16 +---- .../cluster/RestNodesHotThreadsAction.java | 59 +------------------ 2 files changed, 2 insertions(+), 73 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestAddVotingConfigExclusionAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestAddVotingConfigExclusionAction.java index c8eb80cdcfdd7..e66b7d2b0b1a4 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestAddVotingConfigExclusionAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestAddVotingConfigExclusionAction.java @@ -13,7 +13,6 @@ import org.elasticsearch.action.admin.cluster.configuration.TransportAddVotingConfigExclusionsAction; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.TimeValue; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -28,10 +27,6 @@ public class RestAddVotingConfigExclusionAction extends BaseRestHandler { private static final TimeValue DEFAULT_TIMEOUT = TimeValue.timeValueSeconds(30L); - private static final String DEPRECATION_MESSAGE = "POST /_cluster/voting_config_exclusions/{node_name} " - + "has been removed. You must use POST /_cluster/voting_config_exclusions?node_ids=... " - + "or POST /_cluster/voting_config_exclusions?node_names=... instead."; - @Override public String getName() { return "add_voting_config_exclusions_action"; @@ -39,12 +34,7 @@ public String getName() { @Override public List routes() { - return List.of( - new Route(POST, "/_cluster/voting_config_exclusions"), - Route.builder(POST, "/_cluster/voting_config_exclusions/{node_name}") - .deprecated(DEPRECATION_MESSAGE, RestApiVersion.V_7) - .build() - ); + return List.of(new Route(POST, "/_cluster/voting_config_exclusions")); } @Override @@ -66,10 +56,6 @@ static AddVotingConfigExclusionsRequest resolveVotingConfigExclusionsRequest(fin String nodeIds = null; String nodeNames = null; - if (request.getRestApiVersion() == RestApiVersion.V_7 && request.hasParam("node_name")) { - throw new IllegalArgumentException("[node_name] has been removed, you must set [node_names] or [node_ids]"); - } - if (request.hasParam("node_ids")) { nodeIds = request.param("node_ids"); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesHotThreadsAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesHotThreadsAction.java index e84ed3d59be1d..1302247a813f7 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesHotThreadsAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesHotThreadsAction.java @@ -14,7 +14,6 @@ import org.elasticsearch.action.admin.cluster.node.hotthreads.TransportNodesHotThreadsAction; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.monitor.jvm.HotThreads; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -26,7 +25,6 @@ import java.io.IOException; import java.util.List; -import java.util.Locale; import static org.elasticsearch.rest.ChunkedRestResponseBodyPart.fromTextChunks; import static org.elasticsearch.rest.RestRequest.Method.GET; @@ -36,64 +34,9 @@ @ServerlessScope(Scope.INTERNAL) public class RestNodesHotThreadsAction extends BaseRestHandler { - private static final String formatDeprecatedMessageWithoutNodeID = "[%s] is a deprecated endpoint. " - + "Please use [/_nodes/hot_threads] instead."; - private static final String formatDeprecatedMessageWithNodeID = "[%s] is a deprecated endpoint. " - + "Please use [/_nodes/{nodeId}/hot_threads] instead."; - private static final String DEPRECATED_MESSAGE_CLUSTER_NODES_HOT_THREADS = String.format( - Locale.ROOT, - formatDeprecatedMessageWithoutNodeID, - "/_cluster/nodes/hot_threads" - ); - private static final String DEPRECATED_MESSAGE_CLUSTER_NODES_NODEID_HOT_THREADS = String.format( - Locale.ROOT, - formatDeprecatedMessageWithNodeID, - "/_cluster/nodes/{nodeId}/hot_threads" - ); - private static final String DEPRECATED_MESSAGE_CLUSTER_NODES_HOTTHREADS = String.format( - Locale.ROOT, - formatDeprecatedMessageWithoutNodeID, - "/_cluster/nodes/hotthreads" - ); - private static final String DEPRECATED_MESSAGE_CLUSTER_NODES_NODEID_HOTTHREADS = String.format( - Locale.ROOT, - formatDeprecatedMessageWithNodeID, - "/_cluster/nodes/{nodeId}/hotthreads" - ); - private static final String DEPRECATED_MESSAGE_NODES_HOTTHREADS = String.format( - Locale.ROOT, - formatDeprecatedMessageWithoutNodeID, - "/_nodes/hotthreads" - ); - private static final String DEPRECATED_MESSAGE_NODES_NODEID_HOTTHREADS = String.format( - Locale.ROOT, - formatDeprecatedMessageWithNodeID, - "/_nodes/{nodeId}/hotthreads" - ); - @Override public List routes() { - return List.of( - new Route(GET, "/_nodes/hot_threads"), - new Route(GET, "/_nodes/{nodeId}/hot_threads"), - - Route.builder(GET, "/_cluster/nodes/hot_threads") - .deprecated(DEPRECATED_MESSAGE_CLUSTER_NODES_HOT_THREADS, RestApiVersion.V_7) - .build(), - Route.builder(GET, "/_cluster/nodes/{nodeId}/hot_threads") - .deprecated(DEPRECATED_MESSAGE_CLUSTER_NODES_NODEID_HOT_THREADS, RestApiVersion.V_7) - .build(), - Route.builder(GET, "/_cluster/nodes/hotthreads") - .deprecated(DEPRECATED_MESSAGE_CLUSTER_NODES_HOTTHREADS, RestApiVersion.V_7) - .build(), - Route.builder(GET, "/_cluster/nodes/{nodeId}/hotthreads") - .deprecated(DEPRECATED_MESSAGE_CLUSTER_NODES_NODEID_HOTTHREADS, RestApiVersion.V_7) - .build(), - Route.builder(GET, "/_nodes/hotthreads").deprecated(DEPRECATED_MESSAGE_NODES_HOTTHREADS, RestApiVersion.V_7).build(), - Route.builder(GET, "/_nodes/{nodeId}/hotthreads") - .deprecated(DEPRECATED_MESSAGE_NODES_NODEID_HOTTHREADS, RestApiVersion.V_7) - .build() - ); + return List.of(new Route(GET, "/_nodes/hot_threads"), new Route(GET, "/_nodes/{nodeId}/hot_threads")); } @Override From 467634147987af61b0afd054ca502857e2044246 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 15 Oct 2024 19:20:15 +1100 Subject: [PATCH 094/449] Mute org.elasticsearch.xpack.eql.EqlRestIT testUnicodeChars #114791 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 7222255fbd7a4..9a84370de121f 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -422,6 +422,9 @@ tests: - class: org.elasticsearch.xpack.eql.EqlStatsIT method: testEqlRestUsage issue: https://github.com/elastic/elasticsearch/issues/114790 +- class: org.elasticsearch.xpack.eql.EqlRestIT + method: testUnicodeChars + issue: https://github.com/elastic/elasticsearch/issues/114791 # Examples: # From 98c9a70cc1eb9b13498a2a3f98047bf174e4edc8 Mon Sep 17 00:00:00 2001 From: Luke Whiting Date: Tue, 15 Oct 2024 09:58:32 +0100 Subject: [PATCH 095/449] [TestFix] ExplainLifecycleIT testStepInfoPreservedOnAutoRetry failing (#114294) * Extend timeout of test and add logging on fail * Unmute unstable test * Switch to using logger for output Keeps the forbiddenApis check happy * Switch to using assertion messages to display To display debug info * Adjust logic of previous step info preservation Add additional checks to ensure previous step info can't be cleared when auto retrying, only updated with new info. Also added logic to ensure previous step info is cleared when transitioning to a new action * Undo accidentally added lines from merge --- .../xpack/ilm/ExplainLifecycleIT.java | 11 +++++++---- .../xpack/ilm/IndexLifecycleTransition.java | 16 +++++++++++----- 2 files changed, 18 insertions(+), 9 deletions(-) diff --git a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/ExplainLifecycleIT.java b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/ExplainLifecycleIT.java index ec8f7c230b1d3..9b7262e8f9b32 100644 --- a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/ExplainLifecycleIT.java +++ b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/ExplainLifecycleIT.java @@ -34,6 +34,7 @@ import java.util.HashMap; import java.util.Locale; import java.util.Map; +import java.util.concurrent.TimeUnit; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.xpack.TimeSeriesRestDriver.createFullPolicy; @@ -307,14 +308,16 @@ public void testStepInfoPreservedOnAutoRetry() throws Exception { assertBusy(() -> { Map explainIndex = explainIndex(client(), indexName); - assertThat(explainIndex.get("failed_step_retry_count"), notNullValue()); - assertThat(explainIndex.get("previous_step_info"), notNullValue()); - assertThat((int) explainIndex.get("failed_step_retry_count"), greaterThan(0)); + var assertionMessage = "Assertion failed for the following response: " + explainIndex; + assertThat(assertionMessage, explainIndex.get("failed_step_retry_count"), notNullValue()); + assertThat(assertionMessage, explainIndex.get("previous_step_info"), notNullValue()); + assertThat(assertionMessage, (int) explainIndex.get("failed_step_retry_count"), greaterThan(0)); assertThat( + assertionMessage, explainIndex.get("previous_step_info").toString(), containsString("rollover_alias [" + aliasName + "] does not point to index [" + indexName + "]") ); - }); + }, 30, TimeUnit.SECONDS); } private void assertUnmanagedIndex(Map explainIndexMap) { diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycleTransition.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycleTransition.java index b3f29535020bf..2499cd92113c2 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycleTransition.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycleTransition.java @@ -137,7 +137,8 @@ static ClusterState moveClusterStateToStep( lifecycleState, newStepKey, nowSupplier, - forcePhaseDefinitionRefresh + forcePhaseDefinitionRefresh, + true ); return LifecycleExecutionStateUtils.newClusterStateWithLifecycleState(state, idxMeta.getIndex(), newLifecycleState); @@ -175,6 +176,7 @@ static ClusterState moveClusterStateToErrorStep( currentState, new Step.StepKey(currentStep.phase(), currentStep.action(), ErrorStep.NAME), nowSupplier, + false, false ); @@ -243,7 +245,8 @@ static ClusterState moveClusterStateToPreviouslyFailedStep( lifecycleState, nextStepKey, nowSupplier, - forcePhaseDefinitionRefresh + forcePhaseDefinitionRefresh, + false ); LifecycleExecutionState.Builder retryStepState = LifecycleExecutionState.builder(nextStepState); @@ -277,7 +280,8 @@ private static LifecycleExecutionState updateExecutionStateToStep( LifecycleExecutionState existingState, Step.StepKey newStep, LongSupplier nowSupplier, - boolean forcePhaseDefinitionRefresh + boolean forcePhaseDefinitionRefresh, + boolean allowNullPreviousStepInfo ) { Step.StepKey currentStep = Step.getCurrentStepKey(existingState); long nowAsMillis = nowSupplier.getAsLong(); @@ -289,7 +293,9 @@ private static LifecycleExecutionState updateExecutionStateToStep( // clear any step info or error-related settings from the current step updatedState.setFailedStep(null); - updatedState.setPreviousStepInfo(existingState.stepInfo()); + if (allowNullPreviousStepInfo || existingState.stepInfo() != null) { + updatedState.setPreviousStepInfo(existingState.stepInfo()); + } updatedState.setStepInfo(null); updatedState.setIsAutoRetryableError(null); updatedState.setFailedStepRetryCount(null); @@ -390,7 +396,7 @@ public static LifecycleExecutionState moveStateToNextActionAndUpdateCachedPhase( updatedState.setStep(nextStep.name()); updatedState.setStepTime(nowAsMillis); updatedState.setFailedStep(null); - updatedState.setPreviousStepInfo(existingState.stepInfo()); + updatedState.setPreviousStepInfo(null); updatedState.setStepInfo(null); updatedState.setIsAutoRetryableError(null); updatedState.setFailedStepRetryCount(null); From 44df0625db568e33098d26da3b1c970715d76d87 Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 15 Oct 2024 10:53:42 +0100 Subject: [PATCH 096/449] Remove v7 compat from `{PUT,DELETE} /_snapshot/${REPO}` APIs (#114726) This exception mangling only existed for v7 API compatibility and is no longer needed. --- .../repositories/RepositoriesService.java | 3 +-- .../RepositoryConflictException.java | 16 +++++-------- .../cluster/RestDeleteRepositoryAction.java | 23 +++++-------------- .../cluster/RestPutRepositoryAction.java | 23 +++++-------------- 4 files changed, 19 insertions(+), 46 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java b/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java index fc39d2d2d80a4..2b95fbc69199f 100644 --- a/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java +++ b/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java @@ -937,8 +937,7 @@ private static boolean indexSettingsMatchRepositoryMetadata(IndexMetadata indexM private static RepositoryConflictException newRepositoryConflictException(String repository, String reason) { return new RepositoryConflictException( repository, - "trying to modify or unregister repository that is currently used (" + reason + ')', - "trying to modify or unregister repository [" + repository + "] that is currently used (" + reason + ')' + "trying to modify or unregister repository that is currently used (" + reason + ')' ); } diff --git a/server/src/main/java/org/elasticsearch/repositories/RepositoryConflictException.java b/server/src/main/java/org/elasticsearch/repositories/RepositoryConflictException.java index ee4ffa86a2475..15a6b0d3791d8 100644 --- a/server/src/main/java/org/elasticsearch/repositories/RepositoryConflictException.java +++ b/server/src/main/java/org/elasticsearch/repositories/RepositoryConflictException.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.rest.RestStatus; import java.io.IOException; @@ -19,11 +20,8 @@ * Repository conflict exception */ public class RepositoryConflictException extends RepositoryException { - private final String backwardCompatibleMessage; - - public RepositoryConflictException(String repository, String message, String backwardCompatibleMessage) { + public RepositoryConflictException(String repository, String message) { super(repository, message); - this.backwardCompatibleMessage = backwardCompatibleMessage; } @Override @@ -31,18 +29,16 @@ public RestStatus status() { return RestStatus.CONFLICT; } - public String getBackwardCompatibleMessage() { - return backwardCompatibleMessage; - } - + @UpdateForV9(owner = UpdateForV9.Owner.DISTRIBUTED_COORDINATION) // drop unneeded string from wire format public RepositoryConflictException(StreamInput in) throws IOException { super(in); - this.backwardCompatibleMessage = in.readString(); + in.readString(); } @Override + @UpdateForV9(owner = UpdateForV9.Owner.DISTRIBUTED_COORDINATION) // drop unneeded string from wire format protected void writeTo(StreamOutput out, Writer nestedExceptionsWriter) throws IOException { super.writeTo(out, nestedExceptionsWriter); - out.writeString(backwardCompatibleMessage); + out.writeString(""); } } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestDeleteRepositoryAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestDeleteRepositoryAction.java index d2f5ca31ae3d1..c512db85d3023 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestDeleteRepositoryAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestDeleteRepositoryAction.java @@ -10,10 +10,7 @@ package org.elasticsearch.rest.action.admin.cluster; import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryRequest; -import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; -import org.elasticsearch.repositories.RepositoryConflictException; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -45,19 +42,11 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - String name = request.param("repository"); - final var deleteRepositoryRequest = new DeleteRepositoryRequest(getMasterNodeTimeout(request), getAckTimeout(request), name); - return channel -> client.admin() - .cluster() - .deleteRepository( - deleteRepositoryRequest, - new RestToXContentListener(channel).delegateResponse((delegate, err) -> { - if (request.getRestApiVersion().equals(RestApiVersion.V_7) && err instanceof RepositoryConflictException) { - delegate.onFailure(new IllegalStateException(((RepositoryConflictException) err).getBackwardCompatibleMessage())); - } else { - delegate.onFailure(err); - } - }) - ); + final var deleteRepositoryRequest = new DeleteRepositoryRequest( + getMasterNodeTimeout(request), + getAckTimeout(request), + request.param("repository") + ); + return channel -> client.admin().cluster().deleteRepository(deleteRepositoryRequest, new RestToXContentListener<>(channel)); } } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestPutRepositoryAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestPutRepositoryAction.java index 6f913932f4335..7419a589890e8 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestPutRepositoryAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestPutRepositoryAction.java @@ -10,10 +10,7 @@ package org.elasticsearch.rest.action.admin.cluster; import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest; -import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; -import org.elasticsearch.repositories.RepositoryConflictException; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -47,23 +44,15 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - String name = request.param("repository"); - final var putRepositoryRequest = new PutRepositoryRequest(getMasterNodeTimeout(request), getAckTimeout(request), name); + final var putRepositoryRequest = new PutRepositoryRequest( + getMasterNodeTimeout(request), + getAckTimeout(request), + request.param("repository") + ); try (XContentParser parser = request.contentParser()) { putRepositoryRequest.source(parser.mapOrdered()); } putRepositoryRequest.verify(request.paramAsBoolean("verify", true)); - return channel -> client.admin() - .cluster() - .putRepository( - putRepositoryRequest, - new RestToXContentListener(channel).delegateResponse((delegate, err) -> { - if (request.getRestApiVersion().equals(RestApiVersion.V_7) && err instanceof RepositoryConflictException) { - delegate.onFailure(new IllegalStateException(((RepositoryConflictException) err).getBackwardCompatibleMessage())); - } else { - delegate.onFailure(err); - } - }) - ); + return channel -> client.admin().cluster().putRepository(putRepositoryRequest, new RestToXContentListener<>(channel)); } } From 551a7d6d9434fac95c582475ee1b3219f6376715 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mariusz=20J=C3=B3zala?= <377355+jozala@users.noreply.github.com> Date: Tue, 15 Oct 2024 11:59:49 +0200 Subject: [PATCH 097/449] [TEST] Migrated ccs-unavailable-clusters QA tests (#114764) Ccs-unavailable-clusters QA tests migrated to the new REST testing framework, using 'elasticsearch.internal-java-rest-test' Gradle plugin --- qa/ccs-unavailable-clusters/build.gradle | 11 +++++++---- .../CrossClusterSearchUnavailableClusterIT.java | 10 ++++++++++ 2 files changed, 17 insertions(+), 4 deletions(-) diff --git a/qa/ccs-unavailable-clusters/build.gradle b/qa/ccs-unavailable-clusters/build.gradle index 6d94e3756eae4..3db6e2e987262 100644 --- a/qa/ccs-unavailable-clusters/build.gradle +++ b/qa/ccs-unavailable-clusters/build.gradle @@ -6,9 +6,12 @@ * your election, the "Elastic License 2.0", the "GNU Affero General Public * License v3.0 only", or the "Server Side Public License, v 1". */ -apply plugin: 'elasticsearch.legacy-java-rest-test' -testClusters.matching { it.name == "javaRestTest" }.configureEach { - setting 'xpack.security.enabled', 'true' - user username: 'admin', password: 'admin-password', role: 'superuser' +import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask + +apply plugin: 'elasticsearch.internal-java-rest-test' + + +tasks.withType(StandaloneRestIntegTestTask) { + usesDefaultDistribution() } diff --git a/qa/ccs-unavailable-clusters/src/javaRestTest/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java b/qa/ccs-unavailable-clusters/src/javaRestTest/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java index 13a77e158c343..7b42292848395 100644 --- a/qa/ccs-unavailable-clusters/src/javaRestTest/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java +++ b/qa/ccs-unavailable-clusters/src/javaRestTest/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java @@ -38,6 +38,7 @@ import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.search.aggregations.InternalAggregations; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.ObjectPath; import org.elasticsearch.test.transport.MockTransportService; @@ -45,6 +46,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.json.JsonXContent; +import org.junit.ClassRule; import java.io.IOException; import java.util.Collections; @@ -61,6 +63,14 @@ public class CrossClusterSearchUnavailableClusterIT extends ESRestTestCase { private final ThreadPool threadPool = new TestThreadPool(getClass().getName()); + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local().build(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + @Override public void tearDown() throws Exception { super.tearDown(); From 4d775cba4ff5708e1e895bdf89b648e609b08b89 Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Tue, 15 Oct 2024 13:05:02 +0300 Subject: [PATCH 098/449] Add documentation for passthrough field type (#114720) * Guard second doc parsing pass with index setting * add test * updates * updates * merge * Add documentation for passthrough field type * Apply suggestions from code review Co-authored-by: Felix Barnsteiner * updates * updates * Update docs/reference/mapping/types/passthrough.asciidoc Co-authored-by: Felix Barnsteiner * address comment * address comment * Update docs/reference/mapping/types/passthrough.asciidoc Co-authored-by: Felix Barnsteiner * address comment --------- Co-authored-by: Felix Barnsteiner --- .../data-streams/set-up-tsds.asciidoc | 21 +- docs/reference/data-streams/tsds.asciidoc | 12 +- .../mapping/params/subobjects.asciidoc | 1 + docs/reference/mapping/types.asciidoc | 15 +- .../mapping/types/passthrough.asciidoc | 218 ++++++++++++++++++ .../index/mapper/PassThroughObjectMapper.java | 3 - 6 files changed, 247 insertions(+), 23 deletions(-) create mode 100644 docs/reference/mapping/types/passthrough.asciidoc diff --git a/docs/reference/data-streams/set-up-tsds.asciidoc b/docs/reference/data-streams/set-up-tsds.asciidoc index 3a483ac351180..d082a9c4eebeb 100644 --- a/docs/reference/data-streams/set-up-tsds.asciidoc +++ b/docs/reference/data-streams/set-up-tsds.asciidoc @@ -121,7 +121,8 @@ naming scheme]. * Specify a mapping that defines your dimensions and metrics: ** One or more <> with a `time_series_dimension` value of `true`. - At least one of these dimensions must be a plain `keyword` field. + Alternatively, one or more <> fields configured as dimension containers, + provided that they will contain at least one sub-field (mapped statically or dynamically). ** One or more <>, marked using the `time_series_metric` mapping parameter. @@ -203,10 +204,9 @@ DELETE _ilm/policy/my-weather-sensor-lifecycle-policy Documents in a TSDS must include: * A `@timestamp` field -* One or more dimension fields. At least one dimension must be a `keyword` field -that matches the `index.routing_path` index setting, if specified. If not specified -explicitly, `index.routing_path` is set automatically to whichever mappings have - `time_series_dimension` set to `true`. +* One or more dimension fields. At least one dimension must match the `index.routing_path` index setting, +if specified. If not specified explicitly, `index.routing_path` is set automatically to whichever mappings have +`time_series_dimension` set to `true`. To automatically create your TSDS, submit an indexing request that targets the TSDS's name. This name must match one of your index template's @@ -285,13 +285,12 @@ POST metrics-weather_sensors-dev/_rollover Configuring a TSDS via an index template that uses component templates is a bit more complicated. Typically with component templates mappings and settings get scattered across multiple component templates. -When configuring the `index.mode` setting in a component template, the `index.routing_path` setting needs to -be defined in the same component template. Additionally the fields mentioned in the `index.routing_path` -also need to be defined in the same component template with the `time_series_dimension` attribute enabled. +If the `index.routing_path` is defined, the fields it references need to be defined in the same component +template with the `time_series_dimension` attribute enabled. -The reasons for this is that each component template needs to be valid on its own and the time series index mode -requires the `index.routing_path` setting. When configuring the `index.mode` setting in an index template, the `index.routing_path` setting is configured automatically. It is derived from -the field mappings with `time_series_dimension` attribute enabled. +The reasons for this is that each component template needs to be valid on its own. When configuring the +`index.mode` setting in an index template, the `index.routing_path` setting is configured automatically. +It is derived from the field mappings with `time_series_dimension` attribute enabled. [discrete] [[set-up-tsds-whats-next]] diff --git a/docs/reference/data-streams/tsds.asciidoc b/docs/reference/data-streams/tsds.asciidoc index 01573658c33d0..6dc559732bf90 100644 --- a/docs/reference/data-streams/tsds.asciidoc +++ b/docs/reference/data-streams/tsds.asciidoc @@ -109,7 +109,10 @@ parameter: * <> * <> -For a flattened field, use the `time_series_dimensions` parameter to configure an array of fields as dimensions. For details refer to <>. +For a flattened field, use the `time_series_dimensions` parameter to configure an array of fields as dimensions. +For details refer to <>. + +Dimension definitions can be simplified through <> fields. [discrete] [[time-series-metric]] @@ -294,12 +297,15 @@ When you create the matching index template for a TSDS, you must specify one or more dimensions in the `index.routing_path` setting. Each document in a TSDS must contain one or more dimensions that match the `index.routing_path` setting. -Dimensions in the `index.routing_path` setting must be plain `keyword` fields. The `index.routing_path` setting accepts wildcard patterns (for example `dim.*`) and can dynamically match new fields. However, {es} will reject any mapping -updates that add scripted, runtime, or non-dimension, non-`keyword` fields that +updates that add scripted, runtime, or non-dimension fields that match the `index.routing_path` value. +<> fields may be configured +as dimension containers. In this case, their sub-fields get included to the +routing path automatically. + TSDS documents don't support a custom `_routing` value. Similarly, you can't require a `_routing` value in mappings for a TSDS. diff --git a/docs/reference/mapping/params/subobjects.asciidoc b/docs/reference/mapping/params/subobjects.asciidoc index b0a5d3817c332..ff91f07cfb359 100644 --- a/docs/reference/mapping/params/subobjects.asciidoc +++ b/docs/reference/mapping/params/subobjects.asciidoc @@ -111,6 +111,7 @@ PUT my-index-000001/_doc/metric_1 The `subobjects` setting for existing fields and the top-level mapping definition cannot be updated. +[[subobjects-auto-flattening]] ==== Auto-flattening object mappings It is generally recommended to define the properties of an object that is configured with `subobjects: false` with dotted field names diff --git a/docs/reference/mapping/types.asciidoc b/docs/reference/mapping/types.asciidoc index 7e2e7083fa70b..babe4f508b5f0 100644 --- a/docs/reference/mapping/types.asciidoc +++ b/docs/reference/mapping/types.asciidoc @@ -35,12 +35,13 @@ Dates:: Date types, including <> and [[object-types]] ==== Objects and relational types -<>:: A JSON object. -<>:: An entire JSON object as a single field value. -<>:: A JSON object that preserves the relationship - between its subfields. -<>:: Defines a parent/child relationship for documents - in the same index. +<>:: A JSON object. +<>:: An entire JSON object as a single field value. +<>:: A JSON object that preserves the relationship + between its subfields. +<>:: Defines a parent/child relationship for documents + in the same index. +<>:: Provides aliases for sub-fields at the same level. [discrete] @@ -167,6 +168,8 @@ include::types/numeric.asciidoc[] include::types/object.asciidoc[] +include::types/passthrough.asciidoc[] + include::types/percolator.asciidoc[] include::types/point.asciidoc[] diff --git a/docs/reference/mapping/types/passthrough.asciidoc b/docs/reference/mapping/types/passthrough.asciidoc new file mode 100644 index 0000000000000..53b2cc5ed260b --- /dev/null +++ b/docs/reference/mapping/types/passthrough.asciidoc @@ -0,0 +1,218 @@ +[[passthrough]] +=== Pass-through object field type +++++ +Pass-through object +++++ + +Pass-through objects extend the functionality of <> by allowing to access +their subfields without including the name of the pass-through object as prefix. For instance: + +[source,console] +-------------------------------------------------- +PUT my-index-000001 +{ + "mappings": { + "properties": { + "attributes": { + "type": "passthrough", <1> + "priority": 10, + "properties": { + "id": { + "type": "keyword" + } + } + } + } + } +} + +PUT my-index-000001/_doc/1 +{ + "attributes" : { <2> + "id": "foo", + "zone": 10 + } +} + +GET my-index-000001/_search +{ + "query": { + "bool": { + "must": [ + { "match": { "id": "foo" }}, <3> + { "match": { "zone": 10 }} + ] + } + } +} + +GET my-index-000001/_search +{ + "query": { + "bool": { + "must": [ + { "match": { "attributes.id": "foo" }}, <4> + { "match": { "attributes.zone": 10 }} + ] + } + } +} + +-------------------------------------------------- + +<1> An object is defined as pass-through. Its priority (required) is used for conflict resolution. +<2> Object contents get indexed as usual, including dynamic mappings. +<3> Sub-fields can be referenced in queries as if they're defined at the root level. +<4> Sub-fields can also be referenced including the object name as prefix. + +[[passthrough-conflicts]] +==== Conflict resolution + +It's possible for conflicting names to arise, for fields that are defined within different scopes: + + 1. A pass-through object is defined next to a field that has the same name as one of the pass-through object + sub-fields, e.g. + +[source,console] +-------------------------------------------------- +PUT my-index-000001/_doc/1 +{ + "attributes" : { + "id": "foo" + }, + "id": "bar" +} +-------------------------------------------------- + + In this case, references to `id` point to the field at the root level, while field `attributes.id` + can only be accessed using the full path. + + 1. Two (or more) pass-through objects are defined within the same object and contain fields with the same name, e.g. + +[source,console] +-------------------------------------------------- +PUT my-index-000002 +{ + "mappings": { + "properties": { + "attributes": { + "type": "passthrough", + "priority": 10, + "properties": { + "id": { + "type": "keyword" + } + } + }, + "resource.attributes": { + "type": "passthrough", + "priority": 20, + "properties": { + "id": { + "type": "keyword" + } + } + } + } + } +} +-------------------------------------------------- + +In this case, param `priority` is used for conflict resolution, with the higher values taking precedence. In the +example above, `resource.attributes` has higher priority than `attributes`, so references to `id` point to the field +within `resource.attributes`. `attributes.id` can still be accessed using its full path. + +[[passthrough-dimensions]] +==== Defining sub-fields as time-series dimensions + +It is possible to configure a pass-through field as a container for <>. +In this case, all sub-fields get annotated with the same parameter under the covers, and they're also +included in <> and <> calculations, thus simplifying +the <> setup: + +[source,console] +-------------------------------------------------- +PUT _index_template/my-metrics +{ + "index_patterns": ["metrics-mymetrics-*"], + "priority": 200, + "data_stream": { }, + "template": { + "settings": { + "index.mode": "time_series" + }, + "mappings": { + "properties": { + "attributes": { + "type": "passthrough", + "priority": 10, + "time_series_dimension": true, + "properties": { + "host.name": { + "type": "keyword" + } + } + }, + "cpu": { + "type": "integer", + "time_series_metric": "counter" + } + } + } + } +} + +POST metrics-mymetrics-test/_doc +{ + "@timestamp": "2020-01-01T00:00:00.000Z", + "attributes" : { + "host.name": "foo", + "zone": "bar" + }, + "cpu": 10 +} +-------------------------------------------------- +// TEST[skip: The @timestamp value won't match an accepted range in the TSDS] + +In the example above, `attributes` is defined as a dimension container. Its sub-fields `host.name` (static) and `zone` +(dynamic) get included in the routing path and tsid, and can be referenced in queries without the `attributes.` prefix. + +[[passthrough-flattening]] +==== Sub-field auto-flattening + +Pass-through fields apply <> to sub-fields by default, to reduce dynamic +mapping conflicts. As a consequence, no sub-object definitions are allowed within pass-through fields. + +[[passthrough-params]] +==== Parameters for `passthrough` fields + +The following parameters are accepted by `passthrough` fields: + +[horizontal] + +<>:: + + (Required) used for naming conflict resolution between pass-through fields. The field with the highest value wins. + Accepts non-negative integer values. + +<>:: + + Whether or not to treat sub-fields as <>. + Accepts `false` (default) or `true`. + +<>:: + + Whether or not new `properties` should be added dynamically to an existing object. + Accepts `true` (default), `runtime`, `false` and `strict`. + +<>:: + + Whether the JSON value given for the object field should be parsed and indexed (`true`, default) + or completely ignored (`false`). + +<>:: + + The fields within the object, which can be of any <>, including `object`. + New properties may be added to an existing object. + +IMPORTANT: If you need to index arrays of objects instead of single objects, read <> first. diff --git a/server/src/main/java/org/elasticsearch/index/mapper/PassThroughObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/PassThroughObjectMapper.java index 80f845d626a2f..decc6d40a2f8e 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/PassThroughObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/PassThroughObjectMapper.java @@ -34,9 +34,6 @@ * In case different pass-through objects contain subfields with the same name (excluding the pass-through prefix), their aliases conflict. * To resolve this, the pass-through spec specifies which object takes precedence through required parameter "priority"; non-negative * integer values are accepted, with the highest priority value winning in case of conflicting aliases. - * - * Note that this is an experimental, undocumented mapper type, currently intended for prototyping purposes only. - * It has not been vetted for use in production systems. */ public class PassThroughObjectMapper extends ObjectMapper { public static final String CONTENT_TYPE = "passthrough"; From 6620be30dda5d728337c9d408bc815d4b2379a79 Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 15 Oct 2024 11:21:28 +0100 Subject: [PATCH 099/449] Remove all v7-only REST endpoints (#114765) These endpoints were deprecated in v7 and are unsupported in v8 so we can remove them entirely in v9. --- .../RestMultiSearchTemplateAction.java | 4 +- .../mustache/RestSearchTemplateAction.java | 9 +- .../reindex/RestDeleteByQueryAction.java | 10 +-- .../reindex/RestUpdateByQueryAction.java | 8 +- .../elasticsearch/action/ActionModule.java | 5 -- .../org/elasticsearch/rest/RestHandler.java | 10 --- .../indices/RestGetFieldMappingAction.java | 8 +- .../admin/indices/RestGetMappingAction.java | 7 +- .../admin/indices/RestPutMappingAction.java | 14 +--- .../admin/indices/RestSyncedFlushAction.java | 83 ------------------- .../indices/RestUpgradeActionDeprecated.java | 65 --------------- .../indices/RestValidateQueryAction.java | 4 +- .../rest/action/document/RestBulkAction.java | 4 +- .../action/document/RestDeleteAction.java | 7 +- .../rest/action/document/RestGetAction.java | 10 +-- .../action/document/RestGetSourceAction.java | 7 +- .../rest/action/document/RestIndexAction.java | 19 +---- .../action/document/RestMultiGetAction.java | 4 +- .../document/RestMultiTermVectorsAction.java | 4 +- .../document/RestTermVectorsAction.java | 6 +- .../action/document/RestUpdateAction.java | 8 +- .../rest/action/search/RestCountAction.java | 4 +- .../rest/action/search/RestExplainAction.java | 8 +- .../action/search/RestMultiSearchAction.java | 4 +- .../rest/action/search/RestSearchAction.java | 4 +- .../TestDeprecationHeaderRestAction.java | 5 +- .../rest/action/RestFreezeIndexAction.java | 1 - .../graph/rest/action/RestGraphAction.java | 8 -- .../xpack/ml/rest/job/RestPostDataAction.java | 5 +- .../rolemapping/RestPutRoleMappingAction.java | 9 +- 30 files changed, 29 insertions(+), 315 deletions(-) delete mode 100644 server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestSyncedFlushAction.java delete mode 100644 server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestUpgradeActionDeprecated.java diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestMultiSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestMultiSearchTemplateAction.java index c1cad60e94a9c..eec6e003f3556 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestMultiSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestMultiSearchTemplateAction.java @@ -46,9 +46,7 @@ public List routes() { new Route(GET, "/_msearch/template"), new Route(POST, "/_msearch/template"), new Route(GET, "/{index}/_msearch/template"), - new Route(POST, "/{index}/_msearch/template"), - Route.builder(GET, "/{index}/{type}/_msearch/template").deprecated(TYPES_DEPRECATION_MESSAGE, RestApiVersion.V_7).build(), - Route.builder(POST, "/{index}/{type}/_msearch/template").deprecated(TYPES_DEPRECATION_MESSAGE, RestApiVersion.V_7).build() + new Route(POST, "/{index}/_msearch/template") ); } diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java index 152e98f39f3b4..50a130d0fbfe8 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java @@ -11,7 +11,6 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -48,13 +47,7 @@ public List routes() { new Route(GET, "/_search/template"), new Route(POST, "/_search/template"), new Route(GET, "/{index}/_search/template"), - new Route(POST, "/{index}/_search/template"), - Route.builder(GET, "/{index}/{type}/_search/template") - .deprecated(RestSearchAction.TYPES_DEPRECATION_MESSAGE, RestApiVersion.V_7) - .build(), - Route.builder(POST, "/{index}/{type}/_search/template") - .deprecated(RestSearchAction.TYPES_DEPRECATION_MESSAGE, RestApiVersion.V_7) - .build() + new Route(POST, "/{index}/_search/template") ); } diff --git a/modules/reindex/src/main/java/org/elasticsearch/reindex/RestDeleteByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/reindex/RestDeleteByQueryAction.java index 920968cc8cbca..29c1519afa4d1 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/reindex/RestDeleteByQueryAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/reindex/RestDeleteByQueryAction.java @@ -10,14 +10,12 @@ package org.elasticsearch.reindex; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.reindex.DeleteByQueryAction; import org.elasticsearch.index.reindex.DeleteByQueryRequest; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; -import org.elasticsearch.rest.action.search.RestSearchAction; import java.io.IOException; import java.util.HashMap; @@ -40,13 +38,7 @@ public RestDeleteByQueryAction(Predicate clusterSupportsFeature) { @Override public List routes() { - return List.of( - new Route(POST, "/{index}/_delete_by_query"), - Route.builder(POST, "/{index}/{type}/_delete_by_query") - .deprecated(RestSearchAction.TYPES_DEPRECATION_MESSAGE, RestApiVersion.V_7) - .build() - ); - + return List.of(new Route(POST, "/{index}/_delete_by_query")); } @Override diff --git a/modules/reindex/src/main/java/org/elasticsearch/reindex/RestUpdateByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/reindex/RestUpdateByQueryAction.java index 32eb34cbf4c71..67ea34f504790 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/reindex/RestUpdateByQueryAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/reindex/RestUpdateByQueryAction.java @@ -17,7 +17,6 @@ import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; -import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.script.Script; import java.io.IOException; @@ -41,12 +40,7 @@ public RestUpdateByQueryAction(Predicate clusterSupportsFeature) { @Override public List routes() { - return List.of( - new Route(POST, "/{index}/_update_by_query"), - Route.builder(POST, "/{index}/{type}/_update_by_query") - .deprecated(RestSearchAction.TYPES_DEPRECATION_MESSAGE, RestApiVersion.V_7) - .build() - ); + return List.of(new Route(POST, "/{index}/_update_by_query")); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/ActionModule.java b/server/src/main/java/org/elasticsearch/action/ActionModule.java index 2d72f5d71ccda..08558b48c08b3 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/server/src/main/java/org/elasticsearch/action/ActionModule.java @@ -352,9 +352,7 @@ import org.elasticsearch.rest.action.admin.indices.RestRolloverIndexAction; import org.elasticsearch.rest.action.admin.indices.RestSimulateIndexTemplateAction; import org.elasticsearch.rest.action.admin.indices.RestSimulateTemplateAction; -import org.elasticsearch.rest.action.admin.indices.RestSyncedFlushAction; import org.elasticsearch.rest.action.admin.indices.RestUpdateSettingsAction; -import org.elasticsearch.rest.action.admin.indices.RestUpgradeActionDeprecated; import org.elasticsearch.rest.action.admin.indices.RestValidateQueryAction; import org.elasticsearch.rest.action.cat.AbstractCatAction; import org.elasticsearch.rest.action.cat.RestAliasAction; @@ -916,7 +914,6 @@ public void initRestHandlers(Supplier nodesInCluster, Predicate< registerHandler.accept(new RestRefreshAction()); registerHandler.accept(new RestFlushAction()); - registerHandler.accept(new RestSyncedFlushAction()); registerHandler.accept(new RestForceMergeAction()); registerHandler.accept(new RestClearIndicesCacheAction()); registerHandler.accept(new RestResolveClusterAction()); @@ -1003,8 +1000,6 @@ public void initRestHandlers(Supplier nodesInCluster, Predicate< registerHandler.accept(new RestAnalyzeIndexDiskUsageAction()); registerHandler.accept(new RestFieldUsageStatsAction()); - registerHandler.accept(new RestUpgradeActionDeprecated()); - // Desired nodes registerHandler.accept(new RestGetDesiredNodesAction()); registerHandler.accept(new RestUpdateDesiredNodesAction(clusterSupportsFeature)); diff --git a/server/src/main/java/org/elasticsearch/rest/RestHandler.java b/server/src/main/java/org/elasticsearch/rest/RestHandler.java index 0e3b8d37dd25c..cf66e402d3691 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestHandler.java +++ b/server/src/main/java/org/elasticsearch/rest/RestHandler.java @@ -197,16 +197,6 @@ private RouteBuilder(Method method, String path) { this.restApiVersion = RestApiVersion.current(); } - /** - * @deprecated Use {@link #deprecatedForRemoval(String, RestApiVersion)} if the intent is deprecate the path and remove in the - * next major version. Use {@link #deprecateAndKeep(String)} if the intent is to deprecate the path but not remove it. - * This method will delegate to {@link #deprecatedForRemoval(String, RestApiVersion)}. - */ - @Deprecated(since = "9.0.0", forRemoval = true) - public RouteBuilder deprecated(String deprecationMessage, RestApiVersion lastFullySupportedVersion) { - return deprecatedForRemoval(deprecationMessage, lastFullySupportedVersion); - } - /** * Marks that the route being built has been deprecated (for some reason -- the deprecationMessage) for removal. Notes the last * major version in which the path is fully supported without compatibility headers. If this path is being replaced by another diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetFieldMappingAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetFieldMappingAction.java index 66465676ba53c..37391028dbd6e 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetFieldMappingAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetFieldMappingAction.java @@ -45,13 +45,7 @@ public class RestGetFieldMappingAction extends BaseRestHandler { @Override public List routes() { - return List.of( - new Route(GET, "/_mapping/field/{fields}"), - new Route(GET, "/{index}/_mapping/field/{fields}"), - Route.builder(GET, "/_mapping/{type}/field/{fields}").deprecated(TYPES_DEPRECATION_MESSAGE, RestApiVersion.V_7).build(), - Route.builder(GET, "/{index}/{type}/_mapping/field/{fields}").deprecated(TYPES_DEPRECATION_MESSAGE, RestApiVersion.V_7).build(), - Route.builder(GET, "/{index}/_mapping/{type}/field/{fields}").deprecated(TYPES_DEPRECATION_MESSAGE, RestApiVersion.V_7).build() - ); + return List.of(new Route(GET, "/_mapping/field/{fields}"), new Route(GET, "/{index}/_mapping/field/{fields}")); } @Override diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingAction.java index 2aa230a13c2a9..5f40bea92f818 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingAction.java @@ -46,13 +46,8 @@ public List routes() { return List.of( new Route(GET, "/_mapping"), new Route(GET, "/_mappings"), - Route.builder(GET, "/{index}/{type}/_mapping").deprecated(TYPES_DEPRECATION_MESSAGE, RestApiVersion.V_7).build(), new Route(GET, "/{index}/_mapping"), - new Route(GET, "/{index}/_mappings"), - Route.builder(GET, "/{index}/_mappings/{type}").deprecated(TYPES_DEPRECATION_MESSAGE, RestApiVersion.V_7).build(), - Route.builder(GET, "/{index}/_mapping/{type}").deprecated(TYPES_DEPRECATION_MESSAGE, RestApiVersion.V_7).build(), - Route.builder(HEAD, "/{index}/_mapping/{type}").deprecated(TYPES_DEPRECATION_MESSAGE, RestApiVersion.V_7).build(), - Route.builder(GET, "/_mapping/{type}").deprecated(TYPES_DEPRECATION_MESSAGE, RestApiVersion.V_7).build() + new Route(GET, "/{index}/_mappings") ); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutMappingAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutMappingAction.java index ae5e46866f5a6..014e761acc388 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutMappingAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutMappingAction.java @@ -45,19 +45,7 @@ public List routes() { new Route(POST, "/{index}/_mapping/"), new Route(PUT, "/{index}/_mapping/"), new Route(POST, "/{index}/_mappings/"), - new Route(PUT, "/{index}/_mappings/"), - Route.builder(POST, "/{index}/{type}/_mapping").deprecated(TYPES_DEPRECATION_MESSAGE, RestApiVersion.V_7).build(), - Route.builder(PUT, "/{index}/{type}/_mapping").deprecated(TYPES_DEPRECATION_MESSAGE, RestApiVersion.V_7).build(), - Route.builder(POST, "/{index}/_mapping/{type}").deprecated(TYPES_DEPRECATION_MESSAGE, RestApiVersion.V_7).build(), - Route.builder(PUT, "/{index}/_mapping/{type}").deprecated(TYPES_DEPRECATION_MESSAGE, RestApiVersion.V_7).build(), - Route.builder(POST, "/_mapping/{type}").deprecated(TYPES_DEPRECATION_MESSAGE, RestApiVersion.V_7).build(), - Route.builder(PUT, "/_mapping/{type}").deprecated(TYPES_DEPRECATION_MESSAGE, RestApiVersion.V_7).build(), - Route.builder(POST, "/{index}/{type}/_mappings").deprecated(TYPES_DEPRECATION_MESSAGE, RestApiVersion.V_7).build(), - Route.builder(PUT, "/{index}/{type}/_mappings").deprecated(TYPES_DEPRECATION_MESSAGE, RestApiVersion.V_7).build(), - Route.builder(POST, "/{index}/_mappings/{type}").deprecated(TYPES_DEPRECATION_MESSAGE, RestApiVersion.V_7).build(), - Route.builder(PUT, "/{index}/_mappings/{type}").deprecated(TYPES_DEPRECATION_MESSAGE, RestApiVersion.V_7).build(), - Route.builder(POST, "/_mappings/{type}").deprecated(TYPES_DEPRECATION_MESSAGE, RestApiVersion.V_7).build(), - Route.builder(PUT, "/_mappings/{type}").deprecated(TYPES_DEPRECATION_MESSAGE, RestApiVersion.V_7).build() + new Route(PUT, "/{index}/_mappings/") ); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestSyncedFlushAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestSyncedFlushAction.java deleted file mode 100644 index d0a4160647f1b..0000000000000 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestSyncedFlushAction.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.rest.action.admin.indices; - -import org.elasticsearch.action.admin.indices.flush.FlushRequest; -import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.action.support.broadcast.BroadcastResponse; -import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.common.Strings; -import org.elasticsearch.core.RestApiVersion; -import org.elasticsearch.rest.BaseRestHandler; -import org.elasticsearch.rest.RestChannel; -import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.RestResponse; -import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.rest.action.RestBuilderListener; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.List; - -import static org.elasticsearch.rest.RestRequest.Method.GET; -import static org.elasticsearch.rest.RestRequest.Method.POST; - -public class RestSyncedFlushAction extends BaseRestHandler { - - private static final String DEPRECATION_MESSAGE = - "Synced flush is deprecated and will be removed in 8.0. Use flush at /_flush or /{index}/_flush instead."; - - @Override - public List routes() { - return List.of( - Route.builder(GET, "/_flush/synced").deprecated(DEPRECATION_MESSAGE, RestApiVersion.V_7).build(), - Route.builder(POST, "/_flush/synced").deprecated(DEPRECATION_MESSAGE, RestApiVersion.V_7).build(), - Route.builder(GET, "/{index}/_flush/synced").deprecated(DEPRECATION_MESSAGE, RestApiVersion.V_7).build(), - Route.builder(POST, "/{index}/_flush/synced").deprecated(DEPRECATION_MESSAGE, RestApiVersion.V_7).build() - ); - } - - @Override - public String getName() { - return "synced_flush_action"; - } - - @Override - public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - final FlushRequest flushRequest = new FlushRequest(Strings.splitStringByCommaToArray(request.param("index"))); - flushRequest.indicesOptions(IndicesOptions.fromRequest(request, flushRequest.indicesOptions())); - return channel -> client.admin().indices().flush(flushRequest, new SimulateSyncedFlushResponseListener(channel)); - } - - static final class SimulateSyncedFlushResponseListener extends RestBuilderListener { - - SimulateSyncedFlushResponseListener(RestChannel channel) { - super(channel); - } - - @Override - public RestResponse buildResponse(BroadcastResponse flushResponse, XContentBuilder builder) throws Exception { - builder.startObject(); - buildSyncedFlushResponse(builder, flushResponse); - builder.endObject(); - final RestStatus restStatus = flushResponse.getFailedShards() == 0 ? RestStatus.OK : RestStatus.CONFLICT; - return new RestResponse(restStatus, builder); - } - - private static void buildSyncedFlushResponse(XContentBuilder builder, BroadcastResponse flushResponse) throws IOException { - builder.startObject("_shards"); - builder.field("total", flushResponse.getTotalShards()); - builder.field("successful", flushResponse.getSuccessfulShards()); - builder.field("failed", flushResponse.getFailedShards()); - // can't serialize the detail of each index as we don't have the shard count per index. - builder.endObject(); - } - } -} diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestUpgradeActionDeprecated.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestUpgradeActionDeprecated.java deleted file mode 100644 index 148924f522e5e..0000000000000 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestUpgradeActionDeprecated.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.rest.action.admin.indices; - -import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; -import org.elasticsearch.rest.BaseRestHandler; -import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.RestResponse; - -import java.io.IOException; -import java.util.List; -import java.util.Locale; - -import static org.elasticsearch.rest.RestRequest.Method.GET; -import static org.elasticsearch.rest.RestRequest.Method.POST; - -public class RestUpgradeActionDeprecated extends BaseRestHandler { - public static final String UPGRADE_API_DEPRECATION_MESSAGE = - "The _upgrade API is no longer useful and will be removed. Instead, see _reindex API."; - - @Override - public List routes() { - return List.of( - Route.builder(POST, "/_upgrade").deprecated(UPGRADE_API_DEPRECATION_MESSAGE, RestApiVersion.V_7).build(), - Route.builder(POST, "/{index}/_upgrade").deprecated(UPGRADE_API_DEPRECATION_MESSAGE, RestApiVersion.V_7).build(), - Route.builder(GET, "/_upgrade").deprecated(UPGRADE_API_DEPRECATION_MESSAGE, RestApiVersion.V_7).build(), - Route.builder(GET, "/{index}/_upgrade").deprecated(UPGRADE_API_DEPRECATION_MESSAGE, RestApiVersion.V_7).build() - ); - } - - @Override - public String getName() { - return "upgrade_action"; - } - - @Override - public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { - request.param("index"); - final UpgradeActionDeprecatedException exception = new UpgradeActionDeprecatedException(request); - return channel -> channel.sendResponse(new RestResponse(channel, exception)); - } - - public static class UpgradeActionDeprecatedException extends IllegalArgumentException { - private final String path; - private final RestRequest.Method method; - - public UpgradeActionDeprecatedException(RestRequest restRequest) { - this.path = restRequest.path(); - this.method = restRequest.method(); - } - - @Override - public final String getMessage() { - return String.format(Locale.ROOT, "Upgrade action %s %s was removed, use _reindex API instead", method, path); - } - } -} diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryAction.java index e5b88d101f11a..8784fad3405d0 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryAction.java @@ -46,9 +46,7 @@ public List routes() { new Route(GET, "/_validate/query"), new Route(POST, "/_validate/query"), new Route(GET, "/{index}/_validate/query"), - new Route(POST, "/{index}/_validate/query"), - Route.builder(GET, "/{index}/{type}/_validate/query").deprecated(TYPES_DEPRECATION_MESSAGE, RestApiVersion.V_7).build(), - Route.builder(POST, "/{index}/{type}/_validate/query").deprecated(TYPES_DEPRECATION_MESSAGE, RestApiVersion.V_7).build() + new Route(POST, "/{index}/_validate/query") ); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java index 33ee87c0c0b5a..0b8e64f5eab4a 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java @@ -79,9 +79,7 @@ public List routes() { new Route(POST, "/_bulk"), new Route(PUT, "/_bulk"), new Route(POST, "/{index}/_bulk"), - new Route(PUT, "/{index}/_bulk"), - Route.builder(POST, "/{index}/{type}/_bulk").deprecated(TYPES_DEPRECATION_MESSAGE, RestApiVersion.V_7).build(), - Route.builder(PUT, "/{index}/{type}/_bulk").deprecated(TYPES_DEPRECATION_MESSAGE, RestApiVersion.V_7).build() + new Route(PUT, "/{index}/_bulk") ); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestDeleteAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestDeleteAction.java index f70335a7d39be..3ee1810967153 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestDeleteAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestDeleteAction.java @@ -29,15 +29,10 @@ @ServerlessScope(Scope.PUBLIC) public class RestDeleteAction extends BaseRestHandler { - public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Specifying types in " - + "document index requests is deprecated, use the /{index}/_doc/{id} endpoint instead."; @Override public List routes() { - return List.of( - new Route(DELETE, "/{index}/_doc/{id}"), - Route.builder(DELETE, "/{index}/{type}/{id}").deprecated(TYPES_DEPRECATION_MESSAGE, RestApiVersion.V_7).build() - ); + return List.of(new Route(DELETE, "/{index}/_doc/{id}")); } @Override diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestGetAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestGetAction.java index 9018e3e0806d4..cc2b820eb05f2 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestGetAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestGetAction.java @@ -32,9 +32,6 @@ @ServerlessScope(Scope.PUBLIC) public class RestGetAction extends BaseRestHandler { - static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Specifying types in " - + "document get requests is deprecated, use the /{index}/_doc/{id} endpoint instead."; - @Override public String getName() { return "document_get_action"; @@ -42,12 +39,7 @@ public String getName() { @Override public List routes() { - return List.of( - new Route(GET, "/{index}/_doc/{id}"), - new Route(HEAD, "/{index}/_doc/{id}"), - Route.builder(GET, "/{index}/{type}/{id}").deprecated(TYPES_DEPRECATION_MESSAGE, RestApiVersion.V_7).build(), - Route.builder(HEAD, "/{index}/{type}/{id}").deprecated(TYPES_DEPRECATION_MESSAGE, RestApiVersion.V_7).build() - ); + return List.of(new Route(GET, "/{index}/_doc/{id}"), new Route(HEAD, "/{index}/_doc/{id}")); } @Override diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestGetSourceAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestGetSourceAction.java index a71bf9c305185..e6567d7fdf592 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestGetSourceAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestGetSourceAction.java @@ -47,12 +47,7 @@ public class RestGetSourceAction extends BaseRestHandler { @Override public List routes() { - return List.of( - new Route(GET, "/{index}/_source/{id}"), - new Route(HEAD, "/{index}/_source/{id}"), - Route.builder(GET, "/{index}/{type}/{id}/_source").deprecated(TYPES_DEPRECATION_MESSAGE, RestApiVersion.V_7).build(), - Route.builder(HEAD, "/{index}/{type}/{id}/_source").deprecated(TYPES_DEPRECATION_MESSAGE, RestApiVersion.V_7).build() - ); + return List.of(new Route(GET, "/{index}/_source/{id}"), new Route(HEAD, "/{index}/_source/{id}")); } @Override diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java index 2eb0e5a1ef038..9931c10b38f3f 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java @@ -44,12 +44,7 @@ public class RestIndexAction extends BaseRestHandler { @Override public List routes() { - return List.of( - new Route(POST, "/{index}/_doc/{id}"), - new Route(PUT, "/{index}/_doc/{id}"), - Route.builder(POST, "/{index}/{type}/{id}").deprecated(TYPES_DEPRECATION_MESSAGE, RestApiVersion.V_7).build(), - Route.builder(PUT, "/{index}/{type}/{id}").deprecated(TYPES_DEPRECATION_MESSAGE, RestApiVersion.V_7).build() - ); + return List.of(new Route(POST, "/{index}/_doc/{id}"), new Route(PUT, "/{index}/_doc/{id}")); } @Override @@ -67,12 +62,7 @@ public String getName() { @Override public List routes() { - return List.of( - new Route(POST, "/{index}/_create/{id}"), - new Route(PUT, "/{index}/_create/{id}"), - Route.builder(POST, "/{index}/{type}/{id}/_create").deprecated(TYPES_DEPRECATION_MESSAGE, RestApiVersion.V_7).build(), - Route.builder(PUT, "/{index}/{type}/{id}/_create").deprecated(TYPES_DEPRECATION_MESSAGE, RestApiVersion.V_7).build() - ); + return List.of(new Route(POST, "/{index}/_create/{id}"), new Route(PUT, "/{index}/_create/{id}")); } @Override @@ -101,10 +91,7 @@ public String getName() { @Override public List routes() { - return List.of( - new Route(POST, "/{index}/_doc"), - Route.builder(POST, "/{index}/{type}").deprecated(TYPES_DEPRECATION_MESSAGE, RestApiVersion.V_7).build() - ); + return List.of(new Route(POST, "/{index}/_doc")); } @Override diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestMultiGetAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestMultiGetAction.java index 36c9653f53316..0ceb9ebab7397 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestMultiGetAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestMultiGetAction.java @@ -44,9 +44,7 @@ public List routes() { new Route(GET, "/_mget"), new Route(POST, "/_mget"), new Route(GET, "/{index}/_mget"), - new Route(POST, "/{index}/_mget"), - Route.builder(GET, "/{index}/{type}/_mget").deprecated(TYPES_DEPRECATION_MESSAGE, RestApiVersion.V_7).build(), - Route.builder(POST, "/{index}/{type}/_mget").deprecated(TYPES_DEPRECATION_MESSAGE, RestApiVersion.V_7).build() + new Route(POST, "/{index}/_mget") ); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestMultiTermVectorsAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestMultiTermVectorsAction.java index 2832b31570723..0e23362e3f5df 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestMultiTermVectorsAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestMultiTermVectorsAction.java @@ -39,9 +39,7 @@ public List routes() { new Route(GET, "/_mtermvectors"), new Route(POST, "/_mtermvectors"), new Route(GET, "/{index}/_mtermvectors"), - new Route(POST, "/{index}/_mtermvectors"), - Route.builder(GET, "/{index}/{type}/_mtermvectors").deprecated(TYPES_DEPRECATION_MESSAGE, RestApiVersion.V_7).build(), - Route.builder(POST, "/{index}/{type}/_mtermvectors").deprecated(TYPES_DEPRECATION_MESSAGE, RestApiVersion.V_7).build() + new Route(POST, "/{index}/_mtermvectors") ); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestTermVectorsAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestTermVectorsAction.java index b2bface51ce89..1fbf35856589b 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestTermVectorsAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestTermVectorsAction.java @@ -44,11 +44,7 @@ public List routes() { new Route(GET, "/{index}/_termvectors"), new Route(POST, "/{index}/_termvectors"), new Route(GET, "/{index}/_termvectors/{id}"), - new Route(POST, "/{index}/_termvectors/{id}"), - Route.builder(GET, "/{index}/{type}/_termvectors").deprecated(TYPES_DEPRECATION_MESSAGE, RestApiVersion.V_7).build(), - Route.builder(POST, "/{index}/{type}/_termvectors").deprecated(TYPES_DEPRECATION_MESSAGE, RestApiVersion.V_7).build(), - Route.builder(GET, "/{index}/{type}/{id}/_termvectors").deprecated(TYPES_DEPRECATION_MESSAGE, RestApiVersion.V_7).build(), - Route.builder(POST, "/{index}/{type}/{id}/_termvectors").deprecated(TYPES_DEPRECATION_MESSAGE, RestApiVersion.V_7).build() + new Route(POST, "/{index}/_termvectors/{id}") ); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestUpdateAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestUpdateAction.java index 7e9b766fb499b..682d5b5c55c3f 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestUpdateAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestUpdateAction.java @@ -33,15 +33,9 @@ @ServerlessScope(Scope.PUBLIC) public class RestUpdateAction extends BaseRestHandler { - public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Specifying types in " - + "document update requests is deprecated, use the endpoint /{index}/_update/{id} instead."; - @Override public List routes() { - return List.of( - new Route(POST, "/{index}/_update/{id}"), - Route.builder(POST, "/{index}/{type}/{id}/_update").deprecated(TYPES_DEPRECATION_MESSAGE, RestApiVersion.V_7).build() - ); + return List.of(new Route(POST, "/{index}/_update/{id}")); } @Override diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestCountAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestCountAction.java index 5556052e79ab8..0c3680e09e6bf 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/RestCountAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestCountAction.java @@ -46,9 +46,7 @@ public List routes() { new Route(GET, "/_count"), new Route(POST, "/_count"), new Route(GET, "/{index}/_count"), - new Route(POST, "/{index}/_count"), - Route.builder(GET, "/{index}/{type}/_count").deprecated(TYPES_DEPRECATION_MESSAGE, RestApiVersion.V_7).build(), - Route.builder(POST, "/{index}/{type}/_count").deprecated(TYPES_DEPRECATION_MESSAGE, RestApiVersion.V_7).build() + new Route(POST, "/{index}/_count") ); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestExplainAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestExplainAction.java index 11635bee3008a..4d88d115e1da7 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/RestExplainAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestExplainAction.java @@ -34,16 +34,10 @@ */ @ServerlessScope(value = Scope.PUBLIC) public class RestExplainAction extends BaseRestHandler { - public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Specifying a type in explain requests is deprecated."; @Override public List routes() { - return List.of( - new Route(GET, "/{index}/_explain/{id}"), - new Route(POST, "/{index}/_explain/{id}"), - Route.builder(GET, "/{index}/{type}/{id}/_explain").deprecated(TYPES_DEPRECATION_MESSAGE, RestApiVersion.V_7).build(), - Route.builder(POST, "/{index}/{type}/{id}/_explain").deprecated(TYPES_DEPRECATION_MESSAGE, RestApiVersion.V_7).build() - ); + return List.of(new Route(GET, "/{index}/_explain/{id}"), new Route(POST, "/{index}/_explain/{id}")); } @Override diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java index bdd7c6734172d..a58904c2649d9 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java @@ -65,9 +65,7 @@ public List routes() { new Route(GET, "/_msearch"), new Route(POST, "/_msearch"), new Route(GET, "/{index}/_msearch"), - new Route(POST, "/{index}/_msearch"), - Route.builder(GET, "/{index}/{type}/_msearch").deprecated(TYPES_DEPRECATION_MESSAGE, RestApiVersion.V_7).build(), - Route.builder(POST, "/{index}/{type}/_msearch").deprecated(TYPES_DEPRECATION_MESSAGE, RestApiVersion.V_7).build() + new Route(POST, "/{index}/_msearch") ); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java index 38157efd8a370..4fff9229372ea 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java @@ -89,9 +89,7 @@ public List routes() { new Route(GET, "/_search"), new Route(POST, "/_search"), new Route(GET, "/{index}/_search"), - new Route(POST, "/{index}/_search"), - Route.builder(GET, "/{index}/{type}/_search").deprecated(TYPES_DEPRECATION_MESSAGE, RestApiVersion.V_7).build(), - Route.builder(POST, "/{index}/{type}/_search").deprecated(TYPES_DEPRECATION_MESSAGE, RestApiVersion.V_7).build() + new Route(POST, "/{index}/_search") ); } diff --git a/x-pack/plugin/deprecation/qa/rest/src/main/java/org/elasticsearch/xpack/deprecation/TestDeprecationHeaderRestAction.java b/x-pack/plugin/deprecation/qa/rest/src/main/java/org/elasticsearch/xpack/deprecation/TestDeprecationHeaderRestAction.java index 9e5f999d1f825..2b9d9b0875220 100644 --- a/x-pack/plugin/deprecation/qa/rest/src/main/java/org/elasticsearch/xpack/deprecation/TestDeprecationHeaderRestAction.java +++ b/x-pack/plugin/deprecation/qa/rest/src/main/java/org/elasticsearch/xpack/deprecation/TestDeprecationHeaderRestAction.java @@ -100,8 +100,9 @@ public List routes() { Route.builder(GET, "/_test_cluster/deprecated_settings") .deprecatedForRemoval(DEPRECATED_ENDPOINT, RestApiVersion.current()) .build(), - // TODO: s/deprecated/deprecatedForRemoval when removing `deprecated` method - Route.builder(POST, "/_test_cluster/deprecated_settings").deprecated(DEPRECATED_ENDPOINT, RestApiVersion.current()).build(), + Route.builder(POST, "/_test_cluster/deprecated_settings") + .deprecatedForRemoval(DEPRECATED_ENDPOINT, RestApiVersion.current()) + .build(), Route.builder(GET, "/_test_cluster/compat_only") .deprecatedForRemoval(DEPRECATED_ENDPOINT, RestApiVersion.minimumSupported()) .build(), diff --git a/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/rest/action/RestFreezeIndexAction.java b/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/rest/action/RestFreezeIndexAction.java index 081061fd9ceac..369752ea5ed75 100644 --- a/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/rest/action/RestFreezeIndexAction.java +++ b/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/rest/action/RestFreezeIndexAction.java @@ -46,7 +46,6 @@ public final class RestFreezeIndexAction extends BaseRestHandler { @Override public List routes() { return List.of( - Route.builder(POST, "/{index}/_freeze").deprecated(FREEZE_REMOVED, RestApiVersion.V_7).build(), // Route.builder(POST, "/{index}/_unfreeze").deprecated(UNFREEZE_DEPRECATED, RestApiVersion.V_8).build() Route.builder(POST, "/{index}/_unfreeze").deprecateAndKeep(UNFREEZE_DEPRECATED).build() ); diff --git a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/rest/action/RestGraphAction.java b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/rest/action/RestGraphAction.java index 472b6f8087a56..983e972248945 100644 --- a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/rest/action/RestGraphAction.java +++ b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/rest/action/RestGraphAction.java @@ -74,14 +74,6 @@ public List routes() { .build(), Route.builder(POST, "/{index}/_graph/explore") .replaces(POST, "/{index}" + URI_BASE + "/graph/_explore", RestApiVersion.V_7) - .build(), - Route.builder(GET, "/{index}/{type}/_graph/explore").deprecated(TYPES_DEPRECATION_MESSAGE, RestApiVersion.V_7).build(), - Route.builder(GET, "/{index}/{type}" + URI_BASE + "/graph/_explore") - .deprecated(TYPES_DEPRECATION_MESSAGE, RestApiVersion.V_7) - .build(), - Route.builder(POST, "/{index}/{type}/_graph/explore").deprecated(TYPES_DEPRECATION_MESSAGE, RestApiVersion.V_7).build(), - Route.builder(POST, "/{index}/{type}" + URI_BASE + "/graph/_explore") - .deprecated(TYPES_DEPRECATION_MESSAGE, RestApiVersion.V_7) .build() ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestPostDataAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestPostDataAction.java index 8c7c12de0ce51..48c6abde3010a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestPostDataAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestPostDataAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.ml.rest.job; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -21,7 +20,6 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; import static org.elasticsearch.xpack.ml.MachineLearning.BASE_PATH; -import static org.elasticsearch.xpack.ml.MachineLearning.PRE_V7_BASE_PATH; public class RestPostDataAction extends BaseRestHandler { @@ -39,8 +37,7 @@ public List routes() { + "in a future major version it will be compulsory to use a datafeed"; return List.of( // Route.builder(POST, BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/_data").deprecated(msg, RestApiVersion.V_8).build(), - Route.builder(POST, BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/_data").deprecateAndKeep(msg).build(), - Route.builder(POST, PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/_data").deprecated(msg, RestApiVersion.V_7).build() + Route.builder(POST, BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/_data").deprecateAndKeep(msg).build() ); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/rolemapping/RestPutRoleMappingAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/rolemapping/RestPutRoleMappingAction.java index 55562c8ee0138..019b1e5095627 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/rolemapping/RestPutRoleMappingAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/rolemapping/RestPutRoleMappingAction.java @@ -10,7 +10,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; @@ -44,12 +43,8 @@ public RestPutRoleMappingAction(Settings settings, XPackLicenseState licenseStat @Override public List routes() { return List.of( - Route.builder(POST, "/_security/role_mapping/{name}") - .replaces(POST, "/_xpack/security/role_mapping/{name}", RestApiVersion.V_7) - .build(), - Route.builder(PUT, "/_security/role_mapping/{name}") - .replaces(PUT, "/_xpack/security/role_mapping/{name}", RestApiVersion.V_7) - .build() + Route.builder(POST, "/_security/role_mapping/{name}").build(), + Route.builder(PUT, "/_security/role_mapping/{name}").build() ); } From 8240945f01e45cf991f4902ecfc558ee08124668 Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 15 Oct 2024 11:24:04 +0100 Subject: [PATCH 100/449] Remove unused `ChunkedToXContent#toXContentChunkedV7` (#114728) We don't support the v7 REST API in v9, so this commit removes the now-unused `ChunkedToXContent#toXContentChunkedV7` method. It also introduces a similar `ChunkedToXContent#toXContentChunkedV8` method for implementations to use for v8 REST API compatibility. --- .../reroute/ClusterRerouteResponse.java | 11 +++----- .../common/xcontent/ChunkedToXContent.java | 27 ++++++++++++++++--- .../rest/ChunkedRestResponseBodyPart.java | 8 +++--- .../rest/StreamingXContentResponse.java | 5 +--- .../reroute/ClusterRerouteResponseTests.java | 16 ----------- .../ServerSentEventsRestActionListener.java | 5 +--- 6 files changed, 33 insertions(+), 39 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponse.java index 7b344a4c25a1b..45ee00a98c2e2 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponse.java @@ -92,17 +92,12 @@ public Iterator toXContentChunked(ToXContent.Params outerP if (emitState(outerParams)) { deprecationLogger.critical(DeprecationCategory.API, "reroute_cluster_state", STATE_FIELD_DEPRECATION_MESSAGE); } - return toXContentChunkedV7(outerParams); - } - - @Override - public Iterator toXContentChunkedV7(ToXContent.Params params) { - return ChunkedToXContent.builder(params).object(b -> { + return ChunkedToXContent.builder(outerParams).object(b -> { b.field(ACKNOWLEDGED_KEY, isAcknowledged()); - if (emitState(params)) { + if (emitState(outerParams)) { b.xContentObject("state", state); } - if (params.paramAsBoolean("explain", false)) { + if (outerParams.paramAsBoolean("explain", false)) { b.append(explanations); } }); diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContent.java b/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContent.java index 63859d89f3e22..db0b5b4357c7d 100644 --- a/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContent.java +++ b/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContent.java @@ -34,6 +34,27 @@ static ChunkedToXContentBuilder builder(ToXContent.Params params) { return new ChunkedToXContentBuilder(params); } + /** + * Create an iterator of {@link ToXContent} chunks for a REST response for the given {@link RestApiVersion}. Each chunk is serialized + * with the same {@link XContentBuilder} and {@link ToXContent.Params}, which is also the same as the {@link ToXContent.Params} passed + * as the {@code params} argument. For best results, all chunks should be {@code O(1)} size. The last chunk in the iterator must always + * yield at least one byte of output. See also {@link ChunkedToXContentHelper} for some handy utilities. + *

    + * Note that chunked response bodies cannot send deprecation warning headers once transmission has started, so implementations must + * check for deprecated feature use before returning. + *

    + * By default, delegates to {@link #toXContentChunked} or {#toXContentChunkedV8}. + * + * @return iterator over chunks of {@link ToXContent} + */ + default Iterator toXContentChunked(RestApiVersion restApiVersion, ToXContent.Params params) { + return switch (restApiVersion) { + case V_7 -> throw new AssertionError("v7 API not supported"); + case V_8 -> toXContentChunkedV8(params); + case V_9 -> toXContentChunked(params); + }; + } + /** * Create an iterator of {@link ToXContent} chunks for a REST response. Each chunk is serialized with the same {@link XContentBuilder} * and {@link ToXContent.Params}, which is also the same as the {@link ToXContent.Params} passed as the {@code params} argument. For @@ -48,12 +69,12 @@ static ChunkedToXContentBuilder builder(ToXContent.Params params) { Iterator toXContentChunked(ToXContent.Params params); /** - * Create an iterator of {@link ToXContent} chunks for a response to the {@link RestApiVersion#V_7} API. Each chunk is serialized with + * Create an iterator of {@link ToXContent} chunks for a response to the {@link RestApiVersion#V_8} API. Each chunk is serialized with * the same {@link XContentBuilder} and {@link ToXContent.Params}, which is also the same as the {@link ToXContent.Params} passed as the * {@code params} argument. For best results, all chunks should be {@code O(1)} size. The last chunk in the iterator must always yield * at least one byte of output. See also {@link ChunkedToXContentHelper} for some handy utilities. *

    - * Similar to {@link #toXContentChunked} but for the {@link RestApiVersion#V_7} API. By default this method delegates to {@link + * Similar to {@link #toXContentChunked} but for the {@link RestApiVersion#V_8} API. By default this method delegates to {@link * #toXContentChunked}. *

    * Note that chunked response bodies cannot send deprecation warning headers once transmission has started, so implementations must @@ -61,7 +82,7 @@ static ChunkedToXContentBuilder builder(ToXContent.Params params) { * * @return iterator over chunks of {@link ToXContent} */ - default Iterator toXContentChunkedV7(ToXContent.Params params) { + default Iterator toXContentChunkedV8(ToXContent.Params params) { return toXContentChunked(params); } diff --git a/server/src/main/java/org/elasticsearch/rest/ChunkedRestResponseBodyPart.java b/server/src/main/java/org/elasticsearch/rest/ChunkedRestResponseBodyPart.java index b28d7c26b11bb..694af7e1606cb 100644 --- a/server/src/main/java/org/elasticsearch/rest/ChunkedRestResponseBodyPart.java +++ b/server/src/main/java/org/elasticsearch/rest/ChunkedRestResponseBodyPart.java @@ -19,7 +19,6 @@ import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.Releasables; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.Streams; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; @@ -129,9 +128,10 @@ public void write(byte[] b, int off, int len) throws IOException { Streams.noCloseStream(out) ); - private final Iterator serialization = builder.getRestApiVersion() == RestApiVersion.V_7 - ? chunkedToXContent.toXContentChunkedV7(params) - : chunkedToXContent.toXContentChunked(params); + private final Iterator serialization = chunkedToXContent.toXContentChunked( + builder.getRestApiVersion(), + params + ); private BytesStream target; diff --git a/server/src/main/java/org/elasticsearch/rest/StreamingXContentResponse.java b/server/src/main/java/org/elasticsearch/rest/StreamingXContentResponse.java index 7f61d171fae33..db33673939ae9 100644 --- a/server/src/main/java/org/elasticsearch/rest/StreamingXContentResponse.java +++ b/server/src/main/java/org/elasticsearch/rest/StreamingXContentResponse.java @@ -25,7 +25,6 @@ import org.elasticsearch.core.RefCounted; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.Streams; import org.elasticsearch.transport.Transports; import org.elasticsearch.xcontent.ToXContent; @@ -125,9 +124,7 @@ public void close() { } private Iterator getChunksIterator(StreamingFragment fragment) { - return xContentBuilder.getRestApiVersion() == RestApiVersion.V_7 - ? fragment.fragment().toXContentChunkedV7(params) - : fragment.fragment().toXContentChunked(params); + return fragment.fragment().toXContentChunked(xContentBuilder.getRestApiVersion(), params); } /** diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponseTests.java index 67e5f30f023c9..b59cc13a20ff2 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponseTests.java @@ -37,7 +37,6 @@ import org.elasticsearch.xcontent.ToXContent; import java.io.IOException; -import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Objects; @@ -323,21 +322,6 @@ private void assertXContent( AbstractChunkedSerializingTestCase.assertChunkCount(response, params, o -> expectedChunks[0]); assertCriticalWarnings(criticalDeprecationWarnings); - - // check the v7 API too - AbstractChunkedSerializingTestCase.assertChunkCount(new ChunkedToXContent() { - @Override - public Iterator toXContentChunked(ToXContent.Params outerParams) { - return response.toXContentChunkedV7(outerParams); - } - - @Override - public boolean isFragment() { - return response.isFragment(); - } - }, params, o -> expectedChunks[0]++); - // the v7 API should not emit any deprecation warnings - assertCriticalWarnings(); } private static ClusterRerouteResponse createClusterRerouteResponse(ClusterState clusterState) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListener.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListener.java index 7ae7c5f4c3909..a397da05b1ce4 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListener.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListener.java @@ -23,7 +23,6 @@ import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.Releasables; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.Streams; import org.elasticsearch.rest.ChunkedRestResponseBodyPart; import org.elasticsearch.rest.RestChannel; @@ -299,9 +298,7 @@ private ServerSentEventResponseBodyPart(ServerSentEvents event, ChunkedToXConten this.xContentBuilder = new LazyInitializable<>( () -> channel.newBuilder(channel.request().getXContentType(), null, true, Streams.noCloseStream(out)) ); - this.serialization = channel.request().getRestApiVersion() == RestApiVersion.V_7 - ? item.toXContentChunkedV7(params) - : item.toXContentChunked(params); + this.serialization = item.toXContentChunked(channel.request().getRestApiVersion(), params); } @Override From bd6eecac4b7c42cd10e2e52fade8f58392975a74 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Tue, 15 Oct 2024 12:38:45 +0100 Subject: [PATCH 101/449] [ML] Wait for allocation on scale up from 0 (#114719) --- docs/changelog/114719.yaml | 5 + .../test/cluster/FeatureFlag.java | 3 +- .../assignment/TrainedModelAssignment.java | 7 +- .../TrainedModelAssignmentTests.java | 14 +- .../AdaptiveAllocationsScaleFromZeroIT.java | 133 ++++++++++++ .../integration/PyTorchModelRestTestCase.java | 35 +++- .../TransportExternalInferModelAction.java | 10 +- .../TransportInternalInferModelAction.java | 71 ++++++- .../inference/InferenceWaitForAllocation.java | 192 ++++++++++++++++++ .../AdaptiveAllocationsScaler.java | 21 +- .../AdaptiveAllocationsScalerService.java | 108 ++++++---- .../ScaleFromZeroFeatureFlag.java | 20 ++ .../TrainedModelAssignmentClusterService.java | 16 +- ...AdaptiveAllocationsScalerServiceTests.java | 102 ++++++++++ .../AdaptiveAllocationsScalerTests.java | 32 ++- ...nedModelAssignmentClusterServiceTests.java | 15 ++ 16 files changed, 707 insertions(+), 77 deletions(-) create mode 100644 docs/changelog/114719.yaml create mode 100644 x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/AdaptiveAllocationsScaleFromZeroIT.java create mode 100644 x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/InferenceWaitForAllocation.java create mode 100644 x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/ScaleFromZeroFeatureFlag.java diff --git a/docs/changelog/114719.yaml b/docs/changelog/114719.yaml new file mode 100644 index 0000000000000..477d656d5b979 --- /dev/null +++ b/docs/changelog/114719.yaml @@ -0,0 +1,5 @@ +pr: 114719 +summary: Wait for allocation on scale up +area: Machine Learning +type: enhancement +issues: [] diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java index ca2300611b4fd..3000819066495 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java @@ -20,7 +20,8 @@ public enum FeatureFlag { FAILURE_STORE_ENABLED("es.failure_store_feature_flag_enabled=true", Version.fromString("8.12.0"), null), SUB_OBJECTS_AUTO_ENABLED("es.sub_objects_auto_feature_flag_enabled=true", Version.fromString("8.16.0"), null), CHUNKING_SETTINGS_ENABLED("es.inference_chunking_settings_feature_flag_enabled=true", Version.fromString("8.16.0"), null), - INFERENCE_DEFAULT_ELSER("es.inference_default_elser_feature_flag_enabled=true", Version.fromString("8.16.0"), null); + INFERENCE_DEFAULT_ELSER("es.inference_default_elser_feature_flag_enabled=true", Version.fromString("8.16.0"), null), + ML_SCALE_FROM_ZERO("es.ml_scale_from_zero_feature_flag_enabled=true", Version.fromString("8.16.0"), null); public final String systemProperty; public final Version from; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/TrainedModelAssignment.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/TrainedModelAssignment.java index d9e7693870643..06c3f75587d62 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/TrainedModelAssignment.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/TrainedModelAssignment.java @@ -224,15 +224,12 @@ public boolean hasStartedRoutes() { return nodeRoutingTable.values().stream().anyMatch(routeInfo -> routeInfo.getState() == RoutingState.STARTED); } - public List> selectRandomStartedNodesWeighedOnAllocationsForNRequests( - int numberOfRequests, - RoutingState requiredState - ) { + public List> selectRandomNodesWeighedOnAllocations(int numberOfRequests, RoutingState... acceptableStates) { List nodeIds = new ArrayList<>(nodeRoutingTable.size()); List cumulativeAllocations = new ArrayList<>(nodeRoutingTable.size()); int allocationSum = 0; for (Map.Entry routingEntry : nodeRoutingTable.entrySet()) { - if (routingEntry.getValue().getState() == requiredState) { + if (routingEntry.getValue().getState().isAnyOf(acceptableStates)) { nodeIds.add(routingEntry.getKey()); allocationSum += routingEntry.getValue().getCurrentAllocations(); cumulativeAllocations.add(allocationSum); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/assignment/TrainedModelAssignmentTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/assignment/TrainedModelAssignmentTests.java index 6a213d6f5e379..c3b6e0089b4ae 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/assignment/TrainedModelAssignmentTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/assignment/TrainedModelAssignmentTests.java @@ -195,7 +195,7 @@ public void testselectRandomStartedNodeWeighedOnAllocationsForNRequests_GivenNoS builder.addRoutingEntry("node-2", new RoutingInfo(1, 1, RoutingState.STOPPED, "")); TrainedModelAssignment assignment = builder.build(); - assertThat(assignment.selectRandomStartedNodesWeighedOnAllocationsForNRequests(1, RoutingState.STARTED).isEmpty(), is(true)); + assertThat(assignment.selectRandomNodesWeighedOnAllocations(1, RoutingState.STARTED).isEmpty(), is(true)); } public void testselectRandomStartedNodeWeighedOnAllocationsForNRequests_GivenSingleStartedNode() { @@ -203,7 +203,7 @@ public void testselectRandomStartedNodeWeighedOnAllocationsForNRequests_GivenSin builder.addRoutingEntry("node-1", new RoutingInfo(4, 4, RoutingState.STARTED, "")); TrainedModelAssignment assignment = builder.build(); - var nodes = assignment.selectRandomStartedNodesWeighedOnAllocationsForNRequests(1, RoutingState.STARTED); + var nodes = assignment.selectRandomNodesWeighedOnAllocations(1, RoutingState.STARTED); assertThat(nodes, contains(new Tuple<>("node-1", 1))); } @@ -213,7 +213,7 @@ public void testselectRandomStartedNodeWeighedOnAllocationsForNRequests_GivenASh builder.addRoutingEntry("node-1", new RoutingInfo(4, 4, RoutingState.STARTED, "")); TrainedModelAssignment assignment = builder.build(); - var nodes = assignment.selectRandomStartedNodesWeighedOnAllocationsForNRequests(1, RoutingState.STOPPING); + var nodes = assignment.selectRandomNodesWeighedOnAllocations(1, RoutingState.STOPPING); assertThat(nodes, empty()); } @@ -223,7 +223,7 @@ public void testselectRandomStartedNodeWeighedOnAllocationsForNRequests_GivenASh builder.addRoutingEntry("node-1", new RoutingInfo(4, 4, RoutingState.STOPPING, "")); TrainedModelAssignment assignment = builder.build(); - var nodes = assignment.selectRandomStartedNodesWeighedOnAllocationsForNRequests(1, RoutingState.STOPPING); + var nodes = assignment.selectRandomNodesWeighedOnAllocations(1, RoutingState.STOPPING); assertThat(nodes, contains(new Tuple<>("node-1", 1))); } @@ -234,7 +234,7 @@ public void testSingleRequestWith2Nodes() { builder.addRoutingEntry("node-2", new RoutingInfo(1, 1, RoutingState.STARTED, "")); TrainedModelAssignment assignment = builder.build(); - var nodes = assignment.selectRandomStartedNodesWeighedOnAllocationsForNRequests(1, RoutingState.STARTED); + var nodes = assignment.selectRandomNodesWeighedOnAllocations(1, RoutingState.STARTED); assertThat(nodes, hasSize(1)); assertEquals(nodes.get(0).v2(), Integer.valueOf(1)); } @@ -248,7 +248,7 @@ public void testSelectRandomStartedNodeWeighedOnAllocationsForNRequests_GivenMul final int selectionCount = 10000; final CountAccumulator countsPerNodeAccumulator = new CountAccumulator(); - var nodes = assignment.selectRandomStartedNodesWeighedOnAllocationsForNRequests(selectionCount, RoutingState.STARTED); + var nodes = assignment.selectRandomNodesWeighedOnAllocations(selectionCount, RoutingState.STARTED); assertThat(nodes, hasSize(3)); assertThat(nodes.stream().mapToInt(Tuple::v2).sum(), equalTo(selectionCount)); @@ -269,7 +269,7 @@ public void testselectRandomStartedNodeWeighedOnAllocationsForNRequests_GivenMul builder.addRoutingEntry("node-3", new RoutingInfo(0, 0, RoutingState.STARTED, "")); TrainedModelAssignment assignment = builder.build(); final int selectionCount = 1000; - var nodeCounts = assignment.selectRandomStartedNodesWeighedOnAllocationsForNRequests(selectionCount, RoutingState.STARTED); + var nodeCounts = assignment.selectRandomNodesWeighedOnAllocations(selectionCount, RoutingState.STARTED); assertThat(nodeCounts, hasSize(3)); var selectedNodes = new HashSet(); diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/AdaptiveAllocationsScaleFromZeroIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/AdaptiveAllocationsScaleFromZeroIT.java new file mode 100644 index 0000000000000..db34ec5eedc86 --- /dev/null +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/AdaptiveAllocationsScaleFromZeroIT.java @@ -0,0 +1,133 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.integration; + +import org.apache.lucene.tests.util.LuceneTestCase; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseListener; +import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.xpack.core.ml.inference.assignment.AdaptiveAllocationsSettings; + +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentLinkedDeque; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; + +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.nullValue; + +@LuceneTestCase.AwaitsFix(bugUrl = "Cannot test without setting the scale to zero period to a small value") +public class AdaptiveAllocationsScaleFromZeroIT extends PyTorchModelRestTestCase { + + @SuppressWarnings("unchecked") + public void testScaleFromZero() throws Exception { + String modelId = "test_scale_from_zero"; + createPassThroughModel(modelId); + putModelDefinition(modelId, PyTorchModelIT.BASE_64_ENCODED_MODEL, PyTorchModelIT.RAW_MODEL_SIZE); + putVocabulary(List.of("Auto", "scale", "and", "infer"), modelId); + + startDeployment(modelId, modelId, new AdaptiveAllocationsSettings(true, 0, 1)); + { + var responseMap = entityAsMap(getTrainedModelStats(modelId)); + List> stats = (List>) responseMap.get("trained_model_stats"); + String statusState = (String) XContentMapValues.extractValue("deployment_stats.allocation_status.state", stats.get(0)); + assertThat(responseMap.toString(), statusState, is(not(nullValue()))); + Integer count = (Integer) XContentMapValues.extractValue("deployment_stats.allocation_status.allocation_count", stats.get(0)); + assertThat(responseMap.toString(), count, is(1)); + } + + // wait for scale down. The scaler service will check every 10 seconds + assertBusy(() -> { + var statsMap = entityAsMap(getTrainedModelStats(modelId)); + List> innerStats = (List>) statsMap.get("trained_model_stats"); + Integer innerCount = (Integer) XContentMapValues.extractValue( + "deployment_stats.allocation_status.allocation_count", + innerStats.get(0) + ); + assertThat(statsMap.toString(), innerCount, is(0)); + }, 30, TimeUnit.SECONDS); + + var failures = new ConcurrentLinkedDeque(); + + // infer will scale up + int inferenceCount = 10; + var latch = new CountDownLatch(inferenceCount); + for (int i = 0; i < inferenceCount; i++) { + asyncInfer("Auto scale and infer", modelId, TimeValue.timeValueSeconds(5), new ResponseListener() { + @Override + public void onSuccess(Response response) { + latch.countDown(); + } + + @Override + public void onFailure(Exception exception) { + latch.countDown(); + failures.add(exception); + } + }); + } + + latch.await(); + assertThat(failures, empty()); + } + + @SuppressWarnings("unchecked") + public void testMultipleDeploymentsWaiting() throws Exception { + String id1 = "test_scale_from_zero_dep_1"; + String id2 = "test_scale_from_zero_dep_2"; + String id3 = "test_scale_from_zero_dep_3"; + var idsList = Arrays.asList(id1, id2, id3); + for (var modelId : idsList) { + createPassThroughModel(modelId); + putModelDefinition(modelId, PyTorchModelIT.BASE_64_ENCODED_MODEL, PyTorchModelIT.RAW_MODEL_SIZE); + putVocabulary(List.of("Auto", "scale", "and", "infer"), modelId); + + startDeployment(modelId, modelId, new AdaptiveAllocationsSettings(true, 0, 1)); + } + + // wait for scale down. The scaler service will check every 10 seconds + assertBusy(() -> { + var statsMap = entityAsMap(getTrainedModelStats("test_scale_from_zero_dep_*")); + List> innerStats = (List>) statsMap.get("trained_model_stats"); + assertThat(innerStats, hasSize(3)); + for (int i = 0; i < 3; i++) { + Integer innerCount = (Integer) XContentMapValues.extractValue( + "deployment_stats.allocation_status.allocation_count", + innerStats.get(i) + ); + assertThat(statsMap.toString(), innerCount, is(0)); + } + }, 30, TimeUnit.SECONDS); + + // infer will scale up + int inferenceCount = 10; + var latch = new CountDownLatch(inferenceCount); + for (int i = 0; i < inferenceCount; i++) { + asyncInfer("Auto scale and infer", randomFrom(idsList), TimeValue.timeValueSeconds(5), new ResponseListener() { + @Override + public void onSuccess(Response response) { + latch.countDown(); + } + + @Override + public void onFailure(Exception exception) { + latch.countDown(); + fail(exception.getMessage()); + } + }); + } + + latch.await(); + } +} diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/PyTorchModelRestTestCase.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/PyTorchModelRestTestCase.java index c785ae96c5c16..b89f1d0c77ed8 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/PyTorchModelRestTestCase.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/PyTorchModelRestTestCase.java @@ -10,13 +10,17 @@ import org.apache.http.util.EntityUtils; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseListener; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.support.XContentMapValues; -import org.elasticsearch.core.Strings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.test.SecuritySettingsSourceField; import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.json.JsonXContent; +import org.elasticsearch.xpack.core.ml.inference.assignment.AdaptiveAllocationsSettings; import org.elasticsearch.xpack.core.ml.inference.assignment.AllocationStatus; import org.elasticsearch.xpack.core.ml.inference.assignment.Priority; import org.elasticsearch.xpack.core.ml.integration.MlRestTestStateCleaner; @@ -282,6 +286,27 @@ protected Response startDeployment( return client().performRequest(request); } + protected Response startDeployment(String modelId, String deploymentId, AdaptiveAllocationsSettings adaptiveAllocationsSettings) + throws IOException { + String endPoint = "/_ml/trained_models/" + + modelId + + "/deployment/_start" + + "?deployment_id=" + + deploymentId + + "&threads_per_allocation=1" + + "&wait_for=started"; + + XContentBuilder builder = JsonXContent.contentBuilder(); + builder.startObject(); + builder.field("adaptive_allocations", adaptiveAllocationsSettings); + builder.endObject(); + var body = Strings.toString(builder); + + Request request = new Request("POST", endPoint); + request.setJsonEntity(body); + return client().performRequest(request); + } + protected void stopDeployment(String modelId) throws IOException { stopDeployment(modelId, false, false); } @@ -325,6 +350,14 @@ protected Response infer(String input, String modelId, TimeValue timeout) throws return client().performRequest(request); } + protected void asyncInfer(String input, String modelId, TimeValue timeout, ResponseListener responseListener) throws IOException { + Request request = new Request("POST", "/_ml/trained_models/" + modelId + "/_infer?timeout=" + timeout.toString()); + request.setJsonEntity(Strings.format(""" + { "docs": [{"input":"%s"}] } + """, input)); + client().performRequestAsync(request, responseListener); + } + protected Response infer(String input, String modelId) throws IOException { Request request = new Request("POST", "/_ml/trained_models/" + modelId + "/_infer?timeout=30s"); request.setJsonEntity(Strings.format(""" diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportExternalInferModelAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportExternalInferModelAction.java index 5603e9c4dca8d..a81a08519f157 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportExternalInferModelAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportExternalInferModelAction.java @@ -11,9 +11,11 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.InferModelAction; import org.elasticsearch.xpack.ml.inference.adaptiveallocations.AdaptiveAllocationsScalerService; +import org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentService; import org.elasticsearch.xpack.ml.inference.loadingservice.ModelLoadingService; import org.elasticsearch.xpack.ml.inference.persistence.TrainedModelProvider; @@ -27,7 +29,9 @@ public TransportExternalInferModelAction( ClusterService clusterService, XPackLicenseState licenseState, TrainedModelProvider trainedModelProvider, - AdaptiveAllocationsScalerService adaptiveAllocationsScalerService + AdaptiveAllocationsScalerService adaptiveAllocationsScalerService, + TrainedModelAssignmentService assignmentService, + ThreadPool threadPool ) { super( InferModelAction.EXTERNAL_NAME, @@ -38,7 +42,9 @@ public TransportExternalInferModelAction( clusterService, licenseState, trainedModelProvider, - adaptiveAllocationsScalerService + adaptiveAllocationsScalerService, + assignmentService, + threadPool ); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInternalInferModelAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInternalInferModelAction.java index b69f8c7d62eb2..ba4483493da1d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInternalInferModelAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInternalInferModelAction.java @@ -26,6 +26,7 @@ import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskCancelledException; import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.XPackField; import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction; @@ -42,7 +43,10 @@ import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.ml.MachineLearning; +import org.elasticsearch.xpack.ml.inference.InferenceWaitForAllocation; import org.elasticsearch.xpack.ml.inference.adaptiveallocations.AdaptiveAllocationsScalerService; +import org.elasticsearch.xpack.ml.inference.adaptiveallocations.ScaleFromZeroFeatureFlag; +import org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentService; import org.elasticsearch.xpack.ml.inference.loadingservice.LocalModel; import org.elasticsearch.xpack.ml.inference.loadingservice.ModelLoadingService; import org.elasticsearch.xpack.ml.inference.persistence.TrainedModelProvider; @@ -68,6 +72,8 @@ public class TransportInternalInferModelAction extends HandledTransportAction { + var nodes = assignment.selectRandomNodesWeighedOnAllocations(request.request().numberOfDocuments(), RoutingState.STARTED); + + if (nodes.isEmpty()) { + request.listener() + .onFailure( + new IllegalStateException( + "[" + request.deploymentId() + "] error waiting for started allocations. The assignment has 0 started nodes" + ) + ); + } + + inferOnAssignmentNodes( + assignment.getDeploymentId(), + nodes, + request.request(), + request.responseBuilder(), + request.parentTaskId(), + request.listener() + ); + }); + } + + private void inferOnAssignmentNodes( + String deploymentId, + List> nodes, + Request request, + Response.Builder responseBuilder, + TaskId parentTaskId, + ActionListener listener + ) { AtomicInteger count = new AtomicInteger(); AtomicArray> results = new AtomicArray<>(nodes.size()); AtomicReference failure = new AtomicReference<>(); @@ -282,14 +339,14 @@ private void inferAgainstAllocatedModel( InferTrainedModelDeploymentAction.Request deploymentRequest; if (request.getTextInput() == null) { deploymentRequest = InferTrainedModelDeploymentAction.Request.forDocs( - assignment.getDeploymentId(), + deploymentId, request.getUpdate(), request.getObjectsToInfer().subList(startPos, startPos + node.v2()), request.getInferenceTimeout() ); } else { deploymentRequest = InferTrainedModelDeploymentAction.Request.forTextInput( - assignment.getDeploymentId(), + deploymentId, request.getUpdate(), request.getTextInput().subList(startPos, startPos + node.v2()), request.getInferenceTimeout() diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/InferenceWaitForAllocation.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/InferenceWaitForAllocation.java new file mode 100644 index 0000000000000..1142257a087c6 --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/InferenceWaitForAllocation.java @@ -0,0 +1,192 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.inference; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.xpack.core.ml.action.InferModelAction; +import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingInfo; +import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingState; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; +import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; +import org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentService; + +import java.util.HashMap; +import java.util.Map; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.BiConsumer; +import java.util.function.Predicate; + +import static org.elasticsearch.core.Strings.format; + +/** + * Class for storing inference requests for ml trained models while + * scaling is in progress. Once the trained model has at least 1 + * allocation the stored requests are forwarded to a consumer for + * processing.Requests will timeout while waiting for scale. + */ +public class InferenceWaitForAllocation { + + public static final int MAX_PENDING_REQUEST_COUNT = 100; + + /** + * Track details of the pending request + */ + public record WaitingRequest( + InferModelAction.Request request, + InferModelAction.Response.Builder responseBuilder, + TaskId parentTaskId, + ActionListener listener + ) { + public String deploymentId() { + return request.getId(); + } + } + + private static final Logger logger = LogManager.getLogger(InferenceWaitForAllocation.class); + + private final TrainedModelAssignmentService assignmentService; + private final BiConsumer queuedConsumer; + private AtomicInteger pendingRequestCount = new AtomicInteger(); + + /** + * Create with consumer of the successful requests + * @param assignmentService Trained model assignment service + * @param onInferenceScaledConsumer The consumer of the waiting request called once an + * allocation is available. + */ + public InferenceWaitForAllocation( + TrainedModelAssignmentService assignmentService, + BiConsumer onInferenceScaledConsumer + ) { + this.assignmentService = assignmentService; + this.queuedConsumer = onInferenceScaledConsumer; + } + + /** + * Wait for at least 1 allocation to be started then process the + * inference request. + * If the pending request count is greater than {@link #MAX_PENDING_REQUEST_COUNT} + * the request listener is failed with a too many requests exception + * The timeout is the inference request timeout. + * @param request The inference request details + */ + public synchronized void waitForAssignment(WaitingRequest request) { + if (pendingRequestCount.incrementAndGet() >= MAX_PENDING_REQUEST_COUNT) { + pendingRequestCount.decrementAndGet(); + request.listener.onFailure( + new ElasticsearchStatusException( + "Rejected inference request waiting for an allocation of deployment [{}]. Too many pending requests", + RestStatus.TOO_MANY_REQUESTS, + request.request.getId() + ) + ); + return; + } + + var predicate = new DeploymentHasAtLeastOneAllocation(request.deploymentId()); + + assignmentService.waitForAssignmentCondition( + request.deploymentId(), + predicate, + request.request().getInferenceTimeout(), + new WaitingListener(request, predicate) + ); + } + + private static class DeploymentHasAtLeastOneAllocation implements Predicate { + + private final String deploymentId; + private AtomicReference exception = new AtomicReference<>(); + + DeploymentHasAtLeastOneAllocation(String deploymentId) { + this.deploymentId = ExceptionsHelper.requireNonNull(deploymentId, "deployment_id"); + } + + @Override + public boolean test(ClusterState clusterState) { + TrainedModelAssignment trainedModelAssignment = TrainedModelAssignmentMetadata.assignmentForDeploymentId( + clusterState, + deploymentId + ).orElse(null); + if (trainedModelAssignment == null) { + logger.info(() -> format("[%s] assignment was null while waiting to scale up", deploymentId)); + exception.set( + new ElasticsearchStatusException( + "[{}] Error waiting for a model allocation, model assignment has been removed", + RestStatus.CONFLICT, + deploymentId + ) + ); + return true; // don't try again + } + + Map nodeFailuresAndReasons = new HashMap<>(); + for (var nodeIdAndRouting : trainedModelAssignment.getNodeRoutingTable().entrySet()) { + if (RoutingState.FAILED.equals(nodeIdAndRouting.getValue().getState())) { + nodeFailuresAndReasons.put(nodeIdAndRouting.getKey(), nodeIdAndRouting.getValue().getReason()); + } + } + if (nodeFailuresAndReasons.isEmpty() == false) { + if (nodeFailuresAndReasons.size() == trainedModelAssignment.getNodeRoutingTable().size()) { + exception.set( + new ElasticsearchStatusException( + "[{}] Error waiting for a model allocation, all nodes have failed with errors [{}]", + RestStatus.INTERNAL_SERVER_ERROR, + trainedModelAssignment.getDeploymentId(), + nodeFailuresAndReasons + ) + ); + return true; // don't try again + } else { + logger.warn("Deployment [{}] has failed routes [{}]", trainedModelAssignment.getDeploymentId(), nodeFailuresAndReasons); + } + } + + var routable = trainedModelAssignment.getNodeRoutingTable().values().stream().filter(RoutingInfo::isRoutable).findFirst(); + return routable.isPresent(); + } + } + + private class WaitingListener implements TrainedModelAssignmentService.WaitForAssignmentListener { + + private final WaitingRequest request; + private final DeploymentHasAtLeastOneAllocation predicate; + + private WaitingListener(WaitingRequest request, DeploymentHasAtLeastOneAllocation predicate) { + this.request = request; + this.predicate = predicate; + } + + @Override + public void onResponse(TrainedModelAssignment assignment) { + // assignment is started, do inference + pendingRequestCount.decrementAndGet(); + + if (predicate.exception.get() != null) { + onFailure(predicate.exception.get()); + return; + } + + queuedConsumer.accept(request, assignment); + } + + @Override + public void onFailure(Exception e) { + pendingRequestCount.decrementAndGet(); + request.listener().onFailure(e); + } + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScaler.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScaler.java index 58259b87c6b00..bbd63e0d3bfe9 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScaler.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScaler.java @@ -10,7 +10,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.common.Strings; -import org.elasticsearch.core.TimeValue; /** * Processes measured requests counts and inference times and decides whether @@ -22,12 +21,6 @@ public class AdaptiveAllocationsScaler { static final double SCALE_UP_THRESHOLD = 0.9; private static final double SCALE_DOWN_THRESHOLD = 0.85; - /** - * The time interval without any requests that has to pass, before scaling down - * to zero allocations (in case min_allocations = 0). - */ - private static final long SCALE_TO_ZERO_AFTER_NO_REQUESTS_TIME_SECONDS = TimeValue.timeValueMinutes(15).getSeconds(); - /** * If the max_number_of_allocations is not set, use this value for now to prevent scaling up * to high numbers due to possible bugs or unexpected behaviour in the scaler. @@ -51,8 +44,9 @@ public class AdaptiveAllocationsScaler { private Double lastMeasuredRequestRate; private Double lastMeasuredInferenceTime; private Long lastMeasuredQueueSize; + private long scaleToZeroAfterNoRequestsSeconds; - AdaptiveAllocationsScaler(String deploymentId, int numberOfAllocations) { + AdaptiveAllocationsScaler(String deploymentId, int numberOfAllocations, long scaleToZeroAfterNoRequestsSeconds) { this.deploymentId = deploymentId; // A smoothing factor of 100 roughly means the last 100 measurements have an effect // on the estimated values. The sampling time is 10 seconds, so approximately the @@ -73,6 +67,7 @@ public class AdaptiveAllocationsScaler { lastMeasuredRequestRate = null; lastMeasuredInferenceTime = null; lastMeasuredQueueSize = null; + this.scaleToZeroAfterNoRequestsSeconds = scaleToZeroAfterNoRequestsSeconds; } void setMinMaxNumberOfAllocations(Integer minNumberOfAllocations, Integer maxNumberOfAllocations) { @@ -143,6 +138,7 @@ Double getInferenceTimeEstimate() { } Integer scale() { + if (requestRateEstimator.hasValue() == false) { return null; } @@ -170,9 +166,14 @@ Integer scale() { if (maxNumberOfAllocations != null) { numberOfAllocations = Math.min(numberOfAllocations, maxNumberOfAllocations); } + if ((minNumberOfAllocations == null || minNumberOfAllocations == 0) - && timeWithoutRequestsSeconds > SCALE_TO_ZERO_AFTER_NO_REQUESTS_TIME_SECONDS) { - logger.debug("[{}] adaptive allocations scaler: scaling down to zero, because of no requests.", deploymentId); + && timeWithoutRequestsSeconds > scaleToZeroAfterNoRequestsSeconds) { + + if (oldNumberOfAllocations != 0) { + // avoid logging this message if there is no change + logger.debug("[{}] adaptive allocations scaler: scaling down to zero, because of no requests.", deploymentId); + } numberOfAllocations = 0; neededNumberOfAllocations = 0; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerService.java index 8f43044a465c2..1c3a73a409dd1 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerService.java @@ -25,6 +25,7 @@ import org.elasticsearch.threadpool.Scheduler; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.ClientHelper; +import org.elasticsearch.xpack.core.ml.action.CreateTrainedModelAssignmentAction; import org.elasticsearch.xpack.core.ml.action.GetDeploymentStatsAction; import org.elasticsearch.xpack.core.ml.action.UpdateTrainedModelDeploymentAction; import org.elasticsearch.xpack.core.ml.inference.assignment.AssignmentStats; @@ -40,6 +41,7 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.concurrent.ConcurrentSkipListSet; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Function; @@ -183,6 +185,12 @@ Collection observeDouble(Function observeDouble(Function deploymentIdsWithInFlightScaleFromZeroRequests = new ConcurrentSkipListSet<>(); + public AdaptiveAllocationsScalerService( ThreadPool threadPool, ClusterService clusterService, @@ -236,6 +248,7 @@ public AdaptiveAllocationsScalerService( scalers = new HashMap<>(); metrics = new Metrics(); busy = new AtomicBoolean(false); + scaleToZeroAfterNoRequestsSeconds = SCALE_TO_ZERO_AFTER_NO_REQUESTS_TIME_SECONDS; } public synchronized void start() { @@ -279,7 +292,11 @@ private synchronized void updateAutoscalers(ClusterState state) { && assignment.getAdaptiveAllocationsSettings().getEnabled() == Boolean.TRUE) { AdaptiveAllocationsScaler adaptiveAllocationsScaler = scalers.computeIfAbsent( assignment.getDeploymentId(), - key -> new AdaptiveAllocationsScaler(assignment.getDeploymentId(), assignment.totalTargetAllocations()) + key -> new AdaptiveAllocationsScaler( + assignment.getDeploymentId(), + assignment.totalTargetAllocations(), + scaleToZeroAfterNoRequestsSeconds + ) ); adaptiveAllocationsScaler.setMinMaxNumberOfAllocations( assignment.getAdaptiveAllocationsSettings().getMinNumberOfAllocations(), @@ -404,22 +421,42 @@ private void processDeploymentStats(GetDeploymentStatsAction.Response statsRespo if (newNumberOfAllocations > numberOfAllocations.get(deploymentId)) { lastScaleUpTimesMillis.put(deploymentId, now); } - updateNumberOfAllocations(deploymentId, newNumberOfAllocations); + updateNumberOfAllocations( + deploymentId, + newNumberOfAllocations, + updateAssigmentListener(deploymentId, newNumberOfAllocations) + ); } } } public boolean maybeStartAllocation(TrainedModelAssignment assignment) { if (assignment.getAdaptiveAllocationsSettings() != null - && assignment.getAdaptiveAllocationsSettings().getEnabled() == Boolean.TRUE) { - lastScaleUpTimesMillis.put(assignment.getDeploymentId(), System.currentTimeMillis()); - updateNumberOfAllocations(assignment.getDeploymentId(), 1); + && assignment.getAdaptiveAllocationsSettings().getEnabled() == Boolean.TRUE + && assignment.getAdaptiveAllocationsSettings().getMinNumberOfAllocations() == 0) { + + // Prevent against a flurry of scale up requests. + if (deploymentIdsWithInFlightScaleFromZeroRequests.contains(assignment.getDeploymentId()) == false) { + lastScaleUpTimesMillis.put(assignment.getDeploymentId(), System.currentTimeMillis()); + var updateListener = updateAssigmentListener(assignment.getDeploymentId(), 1); + var cleanUpListener = ActionListener.runAfter( + updateListener, + () -> deploymentIdsWithInFlightScaleFromZeroRequests.remove(assignment.getDeploymentId()) + ); + + deploymentIdsWithInFlightScaleFromZeroRequests.add(assignment.getDeploymentId()); + updateNumberOfAllocations(assignment.getDeploymentId(), 1, cleanUpListener); + } return true; } return false; } - private void updateNumberOfAllocations(String deploymentId, int numberOfAllocations) { + private void updateNumberOfAllocations( + String deploymentId, + int numberOfAllocations, + ActionListener listener + ) { UpdateTrainedModelDeploymentAction.Request updateRequest = new UpdateTrainedModelDeploymentAction.Request(deploymentId); updateRequest.setNumberOfAllocations(numberOfAllocations); updateRequest.setIsInternal(true); @@ -428,35 +465,38 @@ private void updateNumberOfAllocations(String deploymentId, int numberOfAllocati ClientHelper.ML_ORIGIN, UpdateTrainedModelDeploymentAction.INSTANCE, updateRequest, - ActionListener.wrap(updateResponse -> { - logger.info("adaptive allocations scaler: scaled [{}] to [{}] allocations.", deploymentId, numberOfAllocations); - threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME) - .execute( - () -> inferenceAuditor.info( - deploymentId, - Strings.format( - "adaptive allocations scaler: scaled [%s] to [%s] allocations.", - deploymentId, - numberOfAllocations - ) - ) - ); - }, e -> { - logger.atLevel(Level.WARN) - .withThrowable(e) - .log("adaptive allocations scaler: scaling [{}] to [{}] allocations failed.", deploymentId, numberOfAllocations); - threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME) - .execute( - () -> inferenceAuditor.warning( + listener + ); + } + + private ActionListener updateAssigmentListener( + String deploymentId, + int numberOfAllocations + ) { + return ActionListener.wrap(updateResponse -> { + logger.debug("adaptive allocations scaler: scaled [{}] to [{}] allocations.", deploymentId, numberOfAllocations); + threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME) + .execute( + () -> inferenceAuditor.info( + deploymentId, + Strings.format("adaptive allocations scaler: scaled [%s] to [%s] allocations.", deploymentId, numberOfAllocations) + ) + ); + }, e -> { + logger.atLevel(Level.WARN) + .withThrowable(e) + .log("adaptive allocations scaler: scaling [{}] to [{}] allocations failed.", deploymentId, numberOfAllocations); + threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME) + .execute( + () -> inferenceAuditor.warning( + deploymentId, + Strings.format( + "adaptive allocations scaler: scaling [%s] to [%s] allocations failed.", deploymentId, - Strings.format( - "adaptive allocations scaler: scaling [%s] to [%s] allocations failed.", - deploymentId, - numberOfAllocations - ) + numberOfAllocations ) - ); - }) - ); + ) + ); + }); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/ScaleFromZeroFeatureFlag.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/ScaleFromZeroFeatureFlag.java new file mode 100644 index 0000000000000..4c446b65db9dd --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/ScaleFromZeroFeatureFlag.java @@ -0,0 +1,20 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.inference.adaptiveallocations; + +import org.elasticsearch.common.util.FeatureFlag; + +public class ScaleFromZeroFeatureFlag { + private ScaleFromZeroFeatureFlag() {} + + private static final FeatureFlag FEATURE_FLAG = new FeatureFlag("ml_scale_from_zero"); + + public static boolean isEnabled() { + return FEATURE_FLAG.isEnabled(); + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterService.java index 65fa47e1c510d..0439ef9db30b4 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterService.java @@ -845,10 +845,9 @@ private void updateDeployment( return; } } - boolean hasUpdates = (numberOfAllocations != null - && Objects.equals(numberOfAllocations, existingAssignment.getTaskParams().getNumberOfAllocations()) == false) - || Objects.equals(adaptiveAllocationsSettings, existingAssignment.getAdaptiveAllocationsSettings()) == false; + boolean hasUpdates = hasUpdates(numberOfAllocations, adaptiveAllocationsSettingsUpdates, existingAssignment); if (hasUpdates == false) { + logger.info("no updates"); listener.onResponse(existingAssignment); return; } @@ -917,6 +916,17 @@ public void clusterStateProcessed(ClusterState oldState, ClusterState newState) updateAssignment(clusterState, existingAssignment, numberOfAllocations, adaptiveAllocationsSettings, updatedStateListener); } + static boolean hasUpdates( + Integer proposedNumberOfAllocations, + AdaptiveAllocationsSettings proposedAdaptiveSettings, + TrainedModelAssignment existingAssignment + ) { + return (proposedNumberOfAllocations != null + && Objects.equals(proposedNumberOfAllocations, existingAssignment.getTaskParams().getNumberOfAllocations()) == false) + || (proposedAdaptiveSettings != null + && Objects.equals(proposedAdaptiveSettings, existingAssignment.getAdaptiveAllocationsSettings()) == false); + } + private AdaptiveAllocationsSettings getAdaptiveAllocationsSettings( AdaptiveAllocationsSettings original, AdaptiveAllocationsSettings updates diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerServiceTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerServiceTests.java index 4aaddc91231f3..79f2a913902df 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerServiceTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerServiceTests.java @@ -14,6 +14,8 @@ import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.node.DiscoveryNodeUtils; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.core.TimeValue; import org.elasticsearch.telemetry.metric.MeterRegistry; @@ -38,6 +40,9 @@ import java.time.Instant; import java.util.List; import java.util.Map; +import java.util.Set; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.atomic.AtomicBoolean; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; @@ -66,6 +71,8 @@ public void setUp() throws Exception { new ScalingExecutorBuilder(MachineLearning.UTILITY_THREAD_POOL_NAME, 0, 1, TimeValue.timeValueMinutes(10), false) ); clusterService = mock(ClusterService.class); + when(clusterService.getSettings()).thenReturn(Settings.EMPTY); + when(clusterService.getClusterSettings()).thenReturn(new ClusterSettings(Settings.EMPTY, Set.of())); client = mock(Client.class); inferenceAuditor = mock(InferenceAuditor.class); meterRegistry = mock(MeterRegistry.class); @@ -240,4 +247,99 @@ public void test() throws IOException { service.stop(); } + + public void testMaybeStartAllocation() { + AdaptiveAllocationsScalerService service = new AdaptiveAllocationsScalerService( + threadPool, + clusterService, + client, + inferenceAuditor, + meterRegistry, + true, + 1 + ); + + when(client.threadPool()).thenReturn(threadPool); + + // will not start when adaptive allocations are not enabled + assertFalse(service.maybeStartAllocation(TrainedModelAssignment.Builder.empty(taskParams(1), null).build())); + assertFalse( + service.maybeStartAllocation( + TrainedModelAssignment.Builder.empty(taskParams(1), new AdaptiveAllocationsSettings(Boolean.FALSE, 1, 2)).build() + ) + ); + // min allocations > 0 + assertFalse( + service.maybeStartAllocation( + TrainedModelAssignment.Builder.empty(taskParams(0), new AdaptiveAllocationsSettings(Boolean.TRUE, 1, 2)).build() + ) + ); + assertTrue( + service.maybeStartAllocation( + TrainedModelAssignment.Builder.empty(taskParams(0), new AdaptiveAllocationsSettings(Boolean.TRUE, 0, 2)).build() + ) + ); + } + + public void testMaybeStartAllocation_BlocksMultipleRequests() throws Exception { + AdaptiveAllocationsScalerService service = new AdaptiveAllocationsScalerService( + threadPool, + clusterService, + client, + inferenceAuditor, + meterRegistry, + true, + 1 + ); + + var latch = new CountDownLatch(1); + var scalingUpRequestSent = new AtomicBoolean(); + + when(client.threadPool()).thenReturn(threadPool); + doAnswer(invocationOnMock -> { + @SuppressWarnings("unchecked") + var listener = (ActionListener) invocationOnMock.getArguments()[2]; + scalingUpRequestSent.set(true); + latch.await(); + listener.onResponse(mock(CreateTrainedModelAssignmentAction.Response.class)); + return Void.TYPE; + }).when(client).execute(eq(UpdateTrainedModelDeploymentAction.INSTANCE), any(), any()); + + threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME).execute(() -> { + var starting = service.maybeStartAllocation( + TrainedModelAssignment.Builder.empty(taskParams(0), new AdaptiveAllocationsSettings(Boolean.TRUE, 0, 2)).build() + ); + assertTrue(starting); + }); + + // wait for the request to be sent + assertBusy(() -> assertTrue(scalingUpRequestSent.get())); + + // Due to the inflight request this will not trigger an update request + assertTrue( + service.maybeStartAllocation( + TrainedModelAssignment.Builder.empty(taskParams(0), new AdaptiveAllocationsSettings(Boolean.TRUE, 0, 2)).build() + ) + ); + // release the inflight request + latch.countDown(); + + verify(client, times(1)).execute(eq(UpdateTrainedModelDeploymentAction.INSTANCE), any(), any()); + } + + private StartTrainedModelDeploymentAction.TaskParams taskParams(int numAllocations) { + return new StartTrainedModelDeploymentAction.TaskParams( + "foo", + "foo", + 1000L, + numAllocations, + 1, + 100, + null, + Priority.NORMAL, + 100L, + 100L + ); + } + } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerTests.java index 1887ebe8050e0..0fb8ad314343a 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerTests.java @@ -7,7 +7,10 @@ package org.elasticsearch.xpack.ml.inference.adaptiveallocations; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; +import org.junit.Before; import java.util.Random; @@ -15,11 +18,21 @@ import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.nullValue; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; public class AdaptiveAllocationsScalerTests extends ESTestCase { + private ClusterService clusterService; + + @Before + public void createMocks() throws Exception { + clusterService = mock(ClusterService.class); + when(clusterService.getSettings()).thenReturn(Settings.EMPTY); + } + public void testAutoscaling_scaleUpAndDown() { - AdaptiveAllocationsScaler adaptiveAllocationsScaler = new AdaptiveAllocationsScaler("test-deployment", 1); + AdaptiveAllocationsScaler adaptiveAllocationsScaler = new AdaptiveAllocationsScaler("test-deployment", 1, 60); // With 1 allocation the system can handle 500 requests * 0.020 sec/request. // To handle remaining requests the system should scale to 2 allocations. @@ -47,7 +60,7 @@ public void testAutoscaling_scaleUpAndDown() { } public void testAutoscaling_noOscillating() { - AdaptiveAllocationsScaler adaptiveAllocationsScaler = new AdaptiveAllocationsScaler("test-deployment", 1); + AdaptiveAllocationsScaler adaptiveAllocationsScaler = new AdaptiveAllocationsScaler("test-deployment", 1, 60); // With 1 allocation the system can handle 880 requests * 0.010 sec/request. adaptiveAllocationsScaler.process(new AdaptiveAllocationsScalerService.Stats(880, 0, 0, 0.010), 10, 1); @@ -75,7 +88,7 @@ public void testAutoscaling_noOscillating() { } public void testAutoscaling_respectMinMaxAllocations() { - AdaptiveAllocationsScaler adaptiveAllocationsScaler = new AdaptiveAllocationsScaler("test-deployment", 1); + AdaptiveAllocationsScaler adaptiveAllocationsScaler = new AdaptiveAllocationsScaler("test-deployment", 1, 60); adaptiveAllocationsScaler.setMinMaxNumberOfAllocations(2, 5); // Even though there are no requests, scale to the minimum of 2 allocations. @@ -98,7 +111,7 @@ public void testAutoscaling_respectMinMaxAllocations() { } public void testEstimation_highVariance() { - AdaptiveAllocationsScaler adaptiveAllocationsScaler = new AdaptiveAllocationsScaler("test-deployment", 1); + AdaptiveAllocationsScaler adaptiveAllocationsScaler = new AdaptiveAllocationsScaler("test-deployment", 1, 60); Random random = new Random(42); @@ -140,7 +153,7 @@ public void testEstimation_highVariance() { } public void testAutoscaling_maxAllocationsSafeguard() { - AdaptiveAllocationsScaler adaptiveAllocationsScaler = new AdaptiveAllocationsScaler("test-deployment", 1); + AdaptiveAllocationsScaler adaptiveAllocationsScaler = new AdaptiveAllocationsScaler("test-deployment", 1, 60); adaptiveAllocationsScaler.process(new AdaptiveAllocationsScalerService.Stats(1_000_000, 10_000_000, 1, 0.05), 10, 1); assertThat(adaptiveAllocationsScaler.scale(), equalTo(32)); adaptiveAllocationsScaler.setMinMaxNumberOfAllocations(2, 77); @@ -148,7 +161,12 @@ public void testAutoscaling_maxAllocationsSafeguard() { } public void testAutoscaling_scaleDownToZeroAllocations() { - AdaptiveAllocationsScaler adaptiveAllocationsScaler = new AdaptiveAllocationsScaler("test-deployment", 1); + int scaleDownAfterInactivitySeconds = 60 * 15; // scale down to 0 after 15 minutes + AdaptiveAllocationsScaler adaptiveAllocationsScaler = new AdaptiveAllocationsScaler( + "test-deployment", + 1, + scaleDownAfterInactivitySeconds + ); // 1 hour with 1 request per 1 seconds, so don't scale. for (int i = 0; i < 3600; i++) { adaptiveAllocationsScaler.process(new AdaptiveAllocationsScalerService.Stats(1, 0, 0, 0.05), 1, 1); @@ -178,7 +196,7 @@ public void testAutoscaling_scaleDownToZeroAllocations() { } public void testAutoscaling_dontScaleDownToZeroAllocationsWhenMinAllocationsIsSet() { - AdaptiveAllocationsScaler adaptiveAllocationsScaler = new AdaptiveAllocationsScaler("test-deployment", 1); + AdaptiveAllocationsScaler adaptiveAllocationsScaler = new AdaptiveAllocationsScaler("test-deployment", 1, 60); adaptiveAllocationsScaler.setMinMaxNumberOfAllocations(1, null); // 1 hour with no requests, diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterServiceTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterServiceTests.java index 7b5c928f1f81a..5a358ef833cb5 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterServiceTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterServiceTests.java @@ -52,6 +52,7 @@ import org.elasticsearch.xpack.core.ml.action.StartDataFrameAnalyticsAction; import org.elasticsearch.xpack.core.ml.action.StartTrainedModelDeploymentAction; import org.elasticsearch.xpack.core.ml.action.UpdateTrainedModelAssignmentRoutingInfoAction; +import org.elasticsearch.xpack.core.ml.inference.assignment.AdaptiveAllocationsSettings; import org.elasticsearch.xpack.core.ml.inference.assignment.AssignmentState; import org.elasticsearch.xpack.core.ml.inference.assignment.Priority; import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingInfo; @@ -2077,6 +2078,20 @@ private void assertThatStoppingAssignmentPreventsMutation( ); } + public void testHasUpdates() { + var assignment = TrainedModelAssignment.Builder.empty(newParams("foo", 10_000L, 1, 1), null).build(); + assertFalse(TrainedModelAssignmentClusterService.hasUpdates(1, null, assignment)); + assertTrue(TrainedModelAssignmentClusterService.hasUpdates(2, null, assignment)); + + var adaptiveAllocations = new AdaptiveAllocationsSettings(true, 1, 4); + assignment = TrainedModelAssignment.Builder.empty(newParams("foo", 10_000L, 1, 1), adaptiveAllocations).build(); + assertFalse(TrainedModelAssignmentClusterService.hasUpdates(null, new AdaptiveAllocationsSettings(true, 1, 4), assignment)); + assertTrue(TrainedModelAssignmentClusterService.hasUpdates(null, new AdaptiveAllocationsSettings(true, 0, 4), assignment)); + + assertFalse(TrainedModelAssignmentClusterService.hasUpdates(1, new AdaptiveAllocationsSettings(true, 1, 4), assignment)); + assertTrue(TrainedModelAssignmentClusterService.hasUpdates(1, new AdaptiveAllocationsSettings(true, 0, 4), assignment)); + } + private TrainedModelAssignmentClusterService createClusterService(int maxLazyNodes) { return new TrainedModelAssignmentClusterService( Settings.builder().put(MachineLearningField.MAX_LAZY_ML_NODES.getKey(), maxLazyNodes).build(), From 56d5abc3c1542357bc9a15a7b841bba2900ba019 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 15 Oct 2024 23:07:37 +1100 Subject: [PATCH 102/449] Mute org.elasticsearch.xpack.rank.rrf.RRFRetrieverBuilderNestedDocsIT testRRFExplainWithNamedRetrievers #114820 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 9a84370de121f..51390811e0e6c 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -425,6 +425,9 @@ tests: - class: org.elasticsearch.xpack.eql.EqlRestIT method: testUnicodeChars issue: https://github.com/elastic/elasticsearch/issues/114791 +- class: org.elasticsearch.xpack.rank.rrf.RRFRetrieverBuilderNestedDocsIT + method: testRRFExplainWithNamedRetrievers + issue: https://github.com/elastic/elasticsearch/issues/114820 # Examples: # From 8cf2cb35f64bbbacbd4148d3a1d08664d504beff Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Tue, 15 Oct 2024 15:39:33 +0300 Subject: [PATCH 103/449] Fix minor formatting issue (#114815) The list with two options doesn't get rendered as a list, due to the snippet in between. https://www.elastic.co/guide/en/elasticsearch/reference/master/passthrough.html#passthrough-conflicts --- docs/reference/data-streams/tsds.asciidoc | 2 +- .../mapping/types/passthrough.asciidoc | 18 +++++++++--------- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/docs/reference/data-streams/tsds.asciidoc b/docs/reference/data-streams/tsds.asciidoc index 6dc559732bf90..461c0a1272e96 100644 --- a/docs/reference/data-streams/tsds.asciidoc +++ b/docs/reference/data-streams/tsds.asciidoc @@ -302,7 +302,7 @@ and can dynamically match new fields. However, {es} will reject any mapping updates that add scripted, runtime, or non-dimension fields that match the `index.routing_path` value. -<> fields may be configured +<> fields may be configured as dimension containers. In this case, their sub-fields get included to the routing path automatically. diff --git a/docs/reference/mapping/types/passthrough.asciidoc b/docs/reference/mapping/types/passthrough.asciidoc index 53b2cc5ed260b..f4f1945d21537 100644 --- a/docs/reference/mapping/types/passthrough.asciidoc +++ b/docs/reference/mapping/types/passthrough.asciidoc @@ -70,9 +70,9 @@ GET my-index-000001/_search It's possible for conflicting names to arise, for fields that are defined within different scopes: - 1. A pass-through object is defined next to a field that has the same name as one of the pass-through object - sub-fields, e.g. - + a. A pass-through object is defined next to a field that has the same name as one of the pass-through object +sub-fields, e.g. ++ [source,console] -------------------------------------------------- PUT my-index-000001/_doc/1 @@ -83,12 +83,12 @@ PUT my-index-000001/_doc/1 "id": "bar" } -------------------------------------------------- ++ +In this case, references to `id` point to the field at the root level, while field `attributes.id` +can only be accessed using the full path. - In this case, references to `id` point to the field at the root level, while field `attributes.id` - can only be accessed using the full path. - - 1. Two (or more) pass-through objects are defined within the same object and contain fields with the same name, e.g. - + b. Two (or more) pass-through objects are defined within the same object and contain fields with the same name, e.g. ++ [source,console] -------------------------------------------------- PUT my-index-000002 @@ -117,7 +117,7 @@ PUT my-index-000002 } } -------------------------------------------------- - ++ In this case, param `priority` is used for conflict resolution, with the higher values taking precedence. In the example above, `resource.attributes` has higher priority than `attributes`, so references to `id` point to the field within `resource.attributes`. `attributes.id` can still be accessed using its full path. From fc23f2f1c667cbcee50b4acd1ae0ccf7aa6d8088 Mon Sep 17 00:00:00 2001 From: Quentin Pradet Date: Tue, 15 Oct 2024 17:35:26 +0400 Subject: [PATCH 104/449] [DOCS] Fix User agent processor properties (#112518) --- docs/reference/ingest/processors/user-agent.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/ingest/processors/user-agent.asciidoc b/docs/reference/ingest/processors/user-agent.asciidoc index d2bbc1c8643ce..7f1bad1798fc8 100644 --- a/docs/reference/ingest/processors/user-agent.asciidoc +++ b/docs/reference/ingest/processors/user-agent.asciidoc @@ -20,7 +20,7 @@ The ingest-user-agent module ships by default with the regexes.yaml made availab | `field` | yes | - | The field containing the user agent string. | `target_field` | no | user_agent | The field that will be filled with the user agent details. | `regex_file` | no | - | The name of the file in the `config/ingest-user-agent` directory containing the regular expressions for parsing the user agent string. Both the directory and the file have to be created before starting Elasticsearch. If not specified, ingest-user-agent will use the regexes.yaml from uap-core it ships with (see below). -| `properties` | no | [`name`, `major`, `minor`, `patch`, `build`, `os`, `os_name`, `os_major`, `os_minor`, `device`] | Controls what properties are added to `target_field`. +| `properties` | no | [`name`, `os`, `device`, `original`, `version`] | Controls what properties are added to `target_field`. | `extract_device_type` | no | `false` | beta:[] Extracts device type from the user agent string on a best-effort basis. | `ignore_missing` | no | `false` | If `true` and `field` does not exist, the processor quietly exits without modifying the document |====== From 88ff578f8a4deddb4f51f12d3353b053927eb498 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 16 Oct 2024 00:37:13 +1100 Subject: [PATCH 105/449] Mute org.elasticsearch.ingest.geoip.HttpClientTests org.elasticsearch.ingest.geoip.HttpClientTests #112618 --- muted-tests.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 51390811e0e6c..45479896dc408 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -428,6 +428,8 @@ tests: - class: org.elasticsearch.xpack.rank.rrf.RRFRetrieverBuilderNestedDocsIT method: testRRFExplainWithNamedRetrievers issue: https://github.com/elastic/elasticsearch/issues/114820 +- class: org.elasticsearch.ingest.geoip.HttpClientTests + issue: https://github.com/elastic/elasticsearch/issues/112618 # Examples: # From f8074f468bc8934307976ea086aec939c525f6df Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 16 Oct 2024 00:56:04 +1100 Subject: [PATCH 106/449] Mute org.elasticsearch.xpack.remotecluster.RemoteClusterSecurityWithApmTracingRestIT testTracingCrossCluster #112731 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 45479896dc408..dc0a6ad13a4a7 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -430,6 +430,9 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/114820 - class: org.elasticsearch.ingest.geoip.HttpClientTests issue: https://github.com/elastic/elasticsearch/issues/112618 +- class: org.elasticsearch.xpack.remotecluster.RemoteClusterSecurityWithApmTracingRestIT + method: testTracingCrossCluster + issue: https://github.com/elastic/elasticsearch/issues/112731 # Examples: # From 39168e139d98b2eacade007fcd616715a6106c10 Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Tue, 15 Oct 2024 15:56:21 +0200 Subject: [PATCH 107/449] [EIS] Validate EIS Gateway URL if set (#114600) --- .../ElasticInferenceServiceSettings.java | 53 ++++++++++++++- ...InferenceServiceSparseEmbeddingsModel.java | 8 ++- .../ElasticInferenceServiceSettingsTests.java | 64 +++++++++++++++++++ ...enceServiceSparseEmbeddingsModelTests.java | 28 ++++++++ .../elastic/ElasticInferenceServiceTests.java | 2 +- 5 files changed, 152 insertions(+), 3 deletions(-) create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettingsTests.java diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettings.java index 8525710c6cf23..170b39e0bf76c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettings.java @@ -7,14 +7,65 @@ package org.elasticsearch.xpack.inference.services.elastic; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import java.net.URI; +import java.net.URISyntaxException; import java.util.List; +import java.util.Objects; +import java.util.Set; +/** + * Class encapsulating any global setting for the EIS integration. + */ public class ElasticInferenceServiceSettings { - static final Setting EIS_GATEWAY_URL = Setting.simpleString("xpack.inference.eis.gateway.url", Setting.Property.NodeScope); + public static final Setting EIS_GATEWAY_URL = Setting.simpleString( + "xpack.inference.eis.gateway.url", + new EisGatewayURLValidator(), + Setting.Property.NodeScope + ); + + private static final Logger log = LogManager.getLogger(ElasticInferenceServiceSettings.class); + + /** + * Class to validate the EIS Gateway url set via `xpack.inference.eis.gateway.url`. + */ + public static class EisGatewayURLValidator implements Setting.Validator { + + private static final Set VALID_EIS_GATEWAY_SCHEMES = Set.of("http", "https"); + + @Override + public void validate(String value) { + if (Objects.isNull(value) || value.isEmpty()) { + // No validation needed, if eis-gateway URL is not set + log.debug("eis-gateway url not set. Skipping validation"); + return; + } + + try { + var uri = new URI(value); + var scheme = uri.getScheme(); + + if (scheme == null || VALID_EIS_GATEWAY_SCHEMES.contains(scheme) == false) { + throw new IllegalArgumentException( + "[" + + scheme + + "] is not a valid URI scheme for the setting [" + + ElasticInferenceServiceSettings.EIS_GATEWAY_URL.getKey() + + "]. Use one of [" + + String.join(",", VALID_EIS_GATEWAY_SCHEMES) + + "]" + ); + } + } catch (URISyntaxException e) { + throw new IllegalArgumentException("[" + e.getInput() + "] is not a valid URI", e); + } + } + } // Adjust this variable to be volatile, if the setting can be updated at some point in time private final String eisGatewayUrl; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModel.java index bbbae736dbeb9..b18b362dd099e 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModel.java @@ -108,6 +108,12 @@ private URI createUri() throws URISyntaxException { default -> throw new IllegalArgumentException("Unsupported model for EIS [" + modelId + "]"); } - return new URI(elasticInferenceServiceComponents().eisGatewayUrl() + "/sparse-text-embedding/" + modelIdUriPath); + var uriString = elasticInferenceServiceComponents().eisGatewayUrl() + "/sparse-text-embedding/" + modelIdUriPath; + + // We perform the same validation here as when reading the setting to make sure that our extended URI is still valid + // This method throws, if the URI is invalid + new ElasticInferenceServiceSettings.EisGatewayURLValidator().validate(uriString); + + return new URI(uriString); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettingsTests.java new file mode 100644 index 0000000000000..a053a5eb33cfe --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettingsTests.java @@ -0,0 +1,64 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.elastic; + +import org.elasticsearch.test.ESTestCase; + +public class ElasticInferenceServiceSettingsTests extends ESTestCase { + + public void testEisGatewayURLValidator_Validate_ThrowError_OnMissingURIScheme() { + expectThrows( + IllegalArgumentException.class, + () -> new ElasticInferenceServiceSettings.EisGatewayURLValidator().validate("www.missing-scheme-gateway-url.com") + ); + } + + public void testEisGatewayURLValidator_Validate_ThrowError_OnWrongURIScheme() { + expectThrows( + IllegalArgumentException.class, + () -> new ElasticInferenceServiceSettings.EisGatewayURLValidator().validate("file://www.missing-scheme-gateway-url.com") + ); + } + + public void testEisGatewayURLValidator_Validate_DoesNotThrowError_ForHTTP() { + var scheme = "http"; + + try { + new ElasticInferenceServiceSettings.EisGatewayURLValidator().validate(scheme + "://www.valid-gateway-url.com"); + } catch (Exception e) { + fail(e, "Should not throw exception for " + "[" + scheme + "]"); + } + } + + public void testEisGatewayURLValidator_Validate_DoesNotThrowError_ForHTTPS() { + var scheme = "https"; + + try { + new ElasticInferenceServiceSettings.EisGatewayURLValidator().validate(scheme + "://www.valid-gateway-url.com"); + } catch (Exception e) { + fail(e, "Should not throw exception for " + "[" + scheme + "]"); + } + } + + public void testEisGatewayURLValidator_Validate_DoesNotThrowError_IfURLNull() { + try { + new ElasticInferenceServiceSettings.EisGatewayURLValidator().validate(null); + } catch (Exception e) { + fail(e, "Should not throw exception for, if eis-gateway URL is null"); + } + } + + public void testEisGatewayURLValidator_Validate_DoesNotThrowError_IfURLEmpty() { + try { + new ElasticInferenceServiceSettings.EisGatewayURLValidator().validate(""); + } catch (Exception e) { + fail(e, "Should not throw exception for, if eis-gateway URL is empty"); + } + } + +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModelTests.java index c9f4234331221..27d86e3d59461 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModelTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModelTests.java @@ -15,6 +15,34 @@ public class ElasticInferenceServiceSparseEmbeddingsModelTests extends ESTestCase { + public void testCreateURI_ThrowError_OnMissingURIScheme() { + expectThrows(IllegalArgumentException.class, () -> createModel("www.missing-scheme-gateway-url.com")); + } + + public void testCreateURI_ThrowError_OnWrongURIScheme() { + expectThrows(IllegalArgumentException.class, () -> createModel("file://www.missing-scheme-gateway-url.com")); + } + + public void testCreateURI_DoesNotThrowError_ForHTTP() { + var scheme = "http"; + + try { + createModel(scheme + "://www.valid-gateway-url.com"); + } catch (Exception e) { + fail(e, "Should not throw exception for " + "[" + scheme + "]"); + } + } + + public void testCreateURI_DoesNotThrowError_ForHTTPS() { + var scheme = "https"; + + try { + createModel(scheme + "://www.valid-gateway-url.com"); + } catch (Exception e) { + fail(e, "Should not throw exception for " + "[" + scheme + "]"); + } + } + public static ElasticInferenceServiceSparseEmbeddingsModel createModel(String url) { return createModel(url, null); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java index d10c70c6f0f5e..5a5eae9f51670 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java @@ -492,7 +492,7 @@ private ElasticInferenceService createServiceWithMockSender() { return new ElasticInferenceService( mock(HttpRequestSender.Factory.class), createWithEmptySettings(threadPool), - new ElasticInferenceServiceComponents(null) + new ElasticInferenceServiceComponents("http://valid-eis-gateway-url.com") ); } } From 37f03dc40dfcc55cc08fdbb6d30079f74b230710 Mon Sep 17 00:00:00 2001 From: Luke Whiting Date: Tue, 15 Oct 2024 15:00:53 +0100 Subject: [PATCH 108/449] #111893 Add Warnings For Missing Index Templates (#114589) * Add data stream template validation to snapshot restore * Add data stream template validation to data stream promotion endpoint * Add new assertion for response headers Add a new assertion to synchronously execute a request and check the response contains a specific warning header * Test for warning header on snapshot restore When missing templates * Test for promotion warnings * Add documentation for the potential error states * PR changes * Spotless reformatting * Add logic to look in snapshot global metadata This checks if the snapshot contains a matching template for the DS * Comment on test cleanup to explain it was copied * Removed cluster service field --- .../promote-data-stream-api.asciidoc | 4 + .../reference/snapshot-restore/index.asciidoc | 3 + .../datastreams/DataStreamsSnapshotsIT.java | 155 +++++++++++++++++- .../PromoteDataStreamTransportAction.java | 33 ++++ .../snapshots/RestoreService.java | 33 ++++ .../hamcrest/ElasticsearchAssertions.java | 69 ++++++++ .../elasticsearch/xpack/ccr/AutoFollowIT.java | 112 +++++++++++++ 7 files changed, 407 insertions(+), 2 deletions(-) diff --git a/docs/reference/data-streams/promote-data-stream-api.asciidoc b/docs/reference/data-streams/promote-data-stream-api.asciidoc index 111c7a2256f8a..5ba9c4d9fad0e 100644 --- a/docs/reference/data-streams/promote-data-stream-api.asciidoc +++ b/docs/reference/data-streams/promote-data-stream-api.asciidoc @@ -18,6 +18,10 @@ available, the data stream in the local cluster can be promoted to a regular data stream, which allows these data streams to be rolled over in the local cluster. +NOTE: When promoting a data stream, ensure the local cluster has a data stream enabled index template that matches the data stream. +If this is missing, the data stream will not be able to roll over until a matching index template is created. +This will affect the lifecycle management of the data stream and interfere with the data stream size and retention. + [source,console] ---- POST /_data_stream/_promote/my-data-stream diff --git a/docs/reference/snapshot-restore/index.asciidoc b/docs/reference/snapshot-restore/index.asciidoc index 390f6664391bd..33645034c30a1 100644 --- a/docs/reference/snapshot-restore/index.asciidoc +++ b/docs/reference/snapshot-restore/index.asciidoc @@ -48,6 +48,9 @@ Snapshots don't contain or back up: * Node configuration files * <> +NOTE: When restoring a data stream, if the target cluster does not have an index template that matches the data stream, the data stream will not be able to roll over until a matching index template is created. +This will affect the lifecycle management of the data stream and interfere with the data stream size and retention. + [discrete] [[feature-state]] === Feature states diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamsSnapshotsIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamsSnapshotsIT.java index 36fb02dcff0d8..638e4d813a79a 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamsSnapshotsIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamsSnapshotsIT.java @@ -15,6 +15,7 @@ import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequest; +import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequestBuilder; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; @@ -45,6 +46,7 @@ import org.elasticsearch.search.SearchHit; import org.elasticsearch.snapshots.AbstractSnapshotIntegTestCase; import org.elasticsearch.snapshots.RestoreInfo; +import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.snapshots.SnapshotInProgressException; import org.elasticsearch.snapshots.SnapshotInfo; import org.elasticsearch.snapshots.SnapshotRestoreException; @@ -62,7 +64,9 @@ import java.util.stream.Collectors; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoWarningHeaderOnResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertWarningHeaderOnResponse; import static org.hamcrest.Matchers.anEmptyMap; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; @@ -80,6 +84,8 @@ public class DataStreamsSnapshotsIT extends AbstractSnapshotIntegTestCase { private static final Map DOCUMENT_SOURCE = Collections.singletonMap("@timestamp", 123); public static final String REPO = "repo"; public static final String SNAPSHOT = "snap"; + public static final String TEMPLATE_1_ID = "t1"; + public static final String TEMPLATE_2_ID = "t2"; private Client client; private String dsBackingIndexName; @@ -103,8 +109,8 @@ public void setup() throws Exception { Path location = randomRepoPath(); createRepository(REPO, "fs", location); - DataStreamIT.putComposableIndexTemplate("t1", List.of("ds", "other-ds")); - DataStreamIT.putComposableIndexTemplate("t2", """ + DataStreamIT.putComposableIndexTemplate(TEMPLATE_1_ID, List.of("ds", "other-ds")); + DataStreamIT.putComposableIndexTemplate(TEMPLATE_2_ID, """ { "properties": { "@timestamp": { @@ -1335,4 +1341,149 @@ public void testRestoreDataStreamAliasWithConflictingIndicesAlias() throws Excep ); assertThat(e.getMessage(), containsString("data stream alias and indices alias have the same name (my-alias)")); } + + public void testWarningHeaderOnRestoreWithoutTemplates() throws Exception { + String datastreamName = "ds"; + + CreateSnapshotResponse createSnapshotResponse = client.admin() + .cluster() + .prepareCreateSnapshot(TEST_REQUEST_TIMEOUT, REPO, SNAPSHOT) + .setWaitForCompletion(true) + .setIndices(datastreamName) + .setIncludeGlobalState(false) + .get(); + + RestStatus status = createSnapshotResponse.getSnapshotInfo().status(); + SnapshotId snapshotId = createSnapshotResponse.getSnapshotInfo().snapshotId(); + assertEquals(RestStatus.OK, status); + + assertEquals(Collections.singletonList(dsBackingIndexName), getSnapshot(REPO, SNAPSHOT).indices()); + + assertAcked( + client.execute( + DeleteDataStreamAction.INSTANCE, + new DeleteDataStreamAction.Request(TEST_REQUEST_TIMEOUT, datastreamName, "other-ds") + ) + ); + + assertAcked( + client.execute( + TransportDeleteComposableIndexTemplateAction.TYPE, + new TransportDeleteComposableIndexTemplateAction.Request(TEMPLATE_1_ID) + ).get() + ); + + RestoreSnapshotRequestBuilder request = client.admin() + .cluster() + .prepareRestoreSnapshot(TEST_REQUEST_TIMEOUT, REPO, SNAPSHOT) + .setWaitForCompletion(true) + .setIndices(datastreamName); + + assertWarningHeaderOnResponse( + client, + request, + "Snapshot [" + + snapshotId + + "] contains data stream [" + + datastreamName + + "] but custer does not have a matching index " + + "template. This will cause rollover to fail until a matching index template is created" + ); + + } + + public void testWarningHeaderAbsentOnRestoreWithTemplates() throws Exception { + String datastreamName = "ds"; + + CreateSnapshotResponse createSnapshotResponse = client.admin() + .cluster() + .prepareCreateSnapshot(TEST_REQUEST_TIMEOUT, REPO, SNAPSHOT) + .setWaitForCompletion(true) + .setIndices(datastreamName) + .setIncludeGlobalState(false) + .get(); + + RestStatus status = createSnapshotResponse.getSnapshotInfo().status(); + SnapshotId snapshotId = createSnapshotResponse.getSnapshotInfo().snapshotId(); + assertEquals(RestStatus.OK, status); + + assertEquals(Collections.singletonList(dsBackingIndexName), getSnapshot(REPO, SNAPSHOT).indices()); + + assertAcked( + client.execute( + DeleteDataStreamAction.INSTANCE, + new DeleteDataStreamAction.Request(TEST_REQUEST_TIMEOUT, datastreamName, "other-ds", "with-fs") + ) + ); + + RestoreSnapshotRequestBuilder request = client.admin() + .cluster() + .prepareRestoreSnapshot(TEST_REQUEST_TIMEOUT, REPO, SNAPSHOT) + .setWaitForCompletion(true) + .setIndices(datastreamName); + + assertNoWarningHeaderOnResponse( + client, + request, + "but custer does not have a matching index template. This will cause rollover to fail until a matching index " + + "template is created" + ); + + } + + /** + * This test is muted as it's awaiting the same fix as {@link #testPartialRestoreSnapshotThatIncludesDataStreamWithGlobalState()} + */ + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/107515") + public void testWarningHeaderOnRestoreTemplateFromSnapshot() throws Exception { + String datastreamName = "ds"; + + CreateSnapshotResponse createSnapshotResponse = client.admin() + .cluster() + .prepareCreateSnapshot(TEST_REQUEST_TIMEOUT, REPO, SNAPSHOT) + .setWaitForCompletion(true) + .setIndices(datastreamName) + .setIncludeGlobalState(true) + .get(); + + RestStatus status = createSnapshotResponse.getSnapshotInfo().status(); + SnapshotId snapshotId = createSnapshotResponse.getSnapshotInfo().snapshotId(); + assertEquals(RestStatus.OK, status); + + assertEquals(Collections.singletonList(dsBackingIndexName), getSnapshot(REPO, SNAPSHOT).indices()); + + assertAcked( + client.execute( + DeleteDataStreamAction.INSTANCE, + new DeleteDataStreamAction.Request(TEST_REQUEST_TIMEOUT, datastreamName, "other-ds") + ) + ); + + assertAcked( + client.execute( + TransportDeleteComposableIndexTemplateAction.TYPE, + new TransportDeleteComposableIndexTemplateAction.Request(TEMPLATE_1_ID) + ).get() + ); + + RestoreSnapshotRequestBuilder request = client.admin() + .cluster() + .prepareRestoreSnapshot(TEST_REQUEST_TIMEOUT, REPO, SNAPSHOT) + .setWaitForCompletion(true) + .setRestoreGlobalState(true) + .setIndices(datastreamName); + + assertNoWarningHeaderOnResponse( + client, + request, + "Snapshot [" + + snapshotId + + "] contains data stream [" + + datastreamName + + "] but custer does not have a matching index " + + "template. This will cause rollover to fail until a matching index template is created" + ); + + } + } diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/PromoteDataStreamTransportAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/PromoteDataStreamTransportAction.java index b9f5bdea9c90e..edc17433ab746 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/PromoteDataStreamTransportAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/PromoteDataStreamTransportAction.java @@ -8,6 +8,8 @@ */ package org.elasticsearch.datastreams.action; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.datastreams.PromoteDataStreamAction; @@ -23,6 +25,8 @@ import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Priority; +import org.elasticsearch.common.logging.HeaderWarning; +import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.indices.SystemIndices; @@ -31,8 +35,12 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import static org.elasticsearch.core.Strings.format; + public class PromoteDataStreamTransportAction extends AcknowledgedTransportMasterNodeAction { + private static final Logger logger = LogManager.getLogger(PromoteDataStreamTransportAction.class); + private final SystemIndices systemIndices; @Inject @@ -94,16 +102,41 @@ private void submitUnbatchedTask(@SuppressWarnings("SameParameterValue") String static ClusterState promoteDataStream(ClusterState currentState, PromoteDataStreamAction.Request request) { DataStream dataStream = currentState.getMetadata().dataStreams().get(request.getName()); + if (dataStream == null) { throw new ResourceNotFoundException("data stream [" + request.getName() + "] does not exist"); } + warnIfTemplateMissingForDatastream(dataStream, currentState); + DataStream promotedDataStream = dataStream.promoteDataStream(); Metadata.Builder metadata = Metadata.builder(currentState.metadata()); metadata.put(promotedDataStream); return ClusterState.builder(currentState).metadata(metadata).build(); } + private static void warnIfTemplateMissingForDatastream(DataStream dataStream, ClusterState currentState) { + var datastreamName = dataStream.getName(); + + var matchingIndex = currentState.metadata() + .templatesV2() + .values() + .stream() + .filter(cit -> cit.getDataStreamTemplate() != null) + .flatMap(cit -> cit.indexPatterns().stream()) + .anyMatch(pattern -> Regex.simpleMatch(pattern, datastreamName)); + + if (matchingIndex == false) { + String warningMessage = format( + "Data stream [%s] does not have a matching index template. This will cause rollover to fail until a matching index " + + "template is created", + datastreamName + ); + logger.warn(() -> warningMessage); + HeaderWarning.addWarning(warningMessage); + } + } + @Override protected ClusterBlockException checkBlock(PromoteDataStreamAction.Request request, ClusterState state) { return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); diff --git a/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java b/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java index e901dc28ea541..cf023b0e629c6 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java @@ -26,6 +26,7 @@ import org.elasticsearch.cluster.SnapshotDeletionsInProgress; import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.metadata.AliasMetadata; +import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.cluster.metadata.DataStreamAlias; import org.elasticsearch.cluster.metadata.IndexMetadata; @@ -52,6 +53,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.collect.ImmutableOpenMap; +import org.elasticsearch.common.logging.HeaderWarning; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.ClusterSettings; @@ -398,6 +400,8 @@ private void startRestore( Map dataStreamsToRestore = result.v1(); Map dataStreamAliasesToRestore = result.v2(); + validateDataStreamTemplatesExistAndWarnIfMissing(dataStreamsToRestore, snapshotInfo, globalMetadata); + // Remove the data streams from the list of requested indices requestIndices.removeAll(dataStreamsToRestore.keySet()); @@ -510,6 +514,35 @@ private void startRestore( ); } + private void validateDataStreamTemplatesExistAndWarnIfMissing( + Map dataStreamsToRestore, + SnapshotInfo snapshotInfo, + Metadata globalMetadata + ) { + + Stream streams = Stream.concat( + clusterService.state().metadata().templatesV2().values().stream(), + globalMetadata == null ? Stream.empty() : globalMetadata.templatesV2().values().stream() + ); + + Set templatePatterns = streams.filter(cit -> cit.getDataStreamTemplate() != null) + .flatMap(cit -> cit.indexPatterns().stream()) + .collect(Collectors.toSet()); + + for (String name : dataStreamsToRestore.keySet()) { + if (templatePatterns.stream().noneMatch(pattern -> Regex.simpleMatch(pattern, name))) { + String warningMessage = format( + "Snapshot [%s] contains data stream [%s] but custer does not have a matching index template. This will cause" + + " rollover to fail until a matching index template is created", + snapshotInfo.snapshotId(), + name + ); + logger.warn(() -> warningMessage); + HeaderWarning.addWarning(warningMessage); + } + } + } + @SuppressForbidden(reason = "legacy usage of unbatched task") // TODO add support for batching here private void submitUnbatchedTask(@SuppressWarnings("SameParameterValue") String source, ClusterStateUpdateTask task) { clusterService.submitUnbatchedStateUpdateTask(source, task); diff --git a/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java index ad61a63f7c46e..552e301650d9d 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java +++ b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java @@ -14,7 +14,9 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionFuture; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.RequestBuilder; import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequestBuilder; @@ -873,6 +875,73 @@ public static void awaitLatch(CountDownLatch latch, long timeout, TimeUnit unit) assertThat(message, isCountedDown, is(true)); } + /** + * Check the response of a client request to ensure it has a warning header that contains the provided string + * + * Currently, this allows a fixed 10 seconds for response to be received + * @param client Client making the request - Required to access the response headers + * @param requestBuilder Request to be made + * @param toMatch String to match in the warning headers + * @param Type of the response + * @throws InterruptedException If the request times out + */ + public static void assertWarningHeaderOnResponse( + Client client, + ActionRequestBuilder requestBuilder, + String toMatch + ) throws InterruptedException { + assertWarningHeaderMatchOnResponse(client, requestBuilder, hasItem(containsString(toMatch))); + } + + /** + * Check the response of a client request to ensure it does not have a warning header that contains the provided string + * + * Currently, this allows a fixed 10 seconds for response to be received + * @param client Client making the request - Required to access the response headers + * @param requestBuilder Request to be made + * @param toMatch String to not match in the warning headers + * @param Type of the response + * @throws InterruptedException If the request times out + */ + public static void assertNoWarningHeaderOnResponse( + Client client, + ActionRequestBuilder requestBuilder, + String toMatch + ) throws InterruptedException { + assertWarningHeaderMatchOnResponse(client, requestBuilder, not(hasItem(containsString(toMatch)))); + } + + private static void assertWarningHeaderMatchOnResponse( + Client client, + ActionRequestBuilder requestBuilder, + Matcher> matcher + ) throws InterruptedException { + CountDownLatch latch = new CountDownLatch(1); + requestBuilder.execute(new ActionListener<>() { + @Override + public void onResponse(T response) { + try { + final var warningHeaders = client.threadPool().getThreadContext().getResponseHeaders().get("Warning"); + assertThat(warningHeaders, matcher); + } finally { + latch.countDown(); + } + } + + @Override + public void onFailure(Exception e) { + try { + throw new AssertionError("Failed to execute request", e); + } finally { + latch.countDown(); + } + } + }); + if (latch.await(10, TimeUnit.SECONDS) == false) { + fail("Did not receive request response before timeout"); + } + } + /** * Compares two maps recursively, using arrays comparisons for byte[] through Arrays.equals(byte[], byte[]) */ diff --git a/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/AutoFollowIT.java b/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/AutoFollowIT.java index 6f66e7e386066..9303191fcf75f 100644 --- a/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/AutoFollowIT.java +++ b/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/AutoFollowIT.java @@ -13,6 +13,7 @@ import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; +import org.elasticsearch.client.WarningFailureException; import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.SecureString; @@ -1120,6 +1121,117 @@ public void testAutoFollowSearchableSnapshotsFails() throws Exception { } } + public void testNoWarningOnPromoteDatastreamWhenTemplateExistsOnFollower() throws Exception { + if ("follow".equals(targetCluster) == false) { + return; + } + testDataStreamPromotionWarnings(true); + } + + public void testWarningOnPromoteDatastreamWhenTemplateDoesNotExistsOnFollower() { + if ("follow".equals(targetCluster) == false) { + return; + } + WarningFailureException exception = assertThrows(WarningFailureException.class, () -> testDataStreamPromotionWarnings(false)); + assertThat( + exception.getMessage(), + containsString( + "does not have a matching index template. This will cause rollover to fail until a matching index template is created]" + ) + ); + } + + private void testDataStreamPromotionWarnings(Boolean createFollowerTemplate) throws Exception { + final int numDocs = 64; + final String dataStreamName = getTestName().toLowerCase(Locale.ROOT) + "-dopromo"; + final String autoFollowPatternName = getTestName().toLowerCase(Locale.ROOT); + + int initialNumberOfSuccessfulFollowedIndices = getNumberOfSuccessfulFollowedIndices(); + List backingIndexNames = null; + try { + // Create index template + Request putComposableIndexTemplateRequest = new Request("POST", "/_index_template/" + getTestName().toLowerCase(Locale.ROOT)); + putComposableIndexTemplateRequest.setJsonEntity("{\"index_patterns\":[\"" + dataStreamName + "*\"],\"data_stream\":{}}"); + + if (createFollowerTemplate) { + assertOK(client().performRequest(putComposableIndexTemplateRequest)); + } + + // Create auto follow pattern + createAutoFollowPattern(client(), autoFollowPatternName, dataStreamName + "*", "leader_cluster", null); + + // Create data stream and ensure that it is auto followed + try (var leaderClient = buildLeaderClient()) { + assertOK(leaderClient.performRequest(putComposableIndexTemplateRequest)); + + for (int i = 0; i < numDocs; i++) { + var indexRequest = new Request("POST", "/" + dataStreamName + "/_doc"); + indexRequest.addParameter("refresh", "true"); + indexRequest.setJsonEntity("{\"@timestamp\": \"" + DATE_FORMAT.format(new Date()) + "\",\"message\":\"abc\"}"); + assertOK(leaderClient.performRequest(indexRequest)); + } + verifyDataStream(leaderClient, dataStreamName, backingIndexName(dataStreamName, 1)); + verifyDocuments(leaderClient, dataStreamName, numDocs); + } + assertBusy(() -> { + assertThat(getNumberOfSuccessfulFollowedIndices(), equalTo(initialNumberOfSuccessfulFollowedIndices + 1)); + verifyDataStream(client(), dataStreamName, backingIndexName(dataStreamName, 1)); + ensureYellow(dataStreamName); + verifyDocuments(client(), dataStreamName, numDocs); + }); + + // Rollover in leader cluster and ensure second backing index is replicated: + try (var leaderClient = buildLeaderClient()) { + var rolloverRequest = new Request("POST", "/" + dataStreamName + "/_rollover"); + assertOK(leaderClient.performRequest(rolloverRequest)); + verifyDataStream(leaderClient, dataStreamName, backingIndexName(dataStreamName, 1), backingIndexName(dataStreamName, 2)); + + var indexRequest = new Request("POST", "/" + dataStreamName + "/_doc"); + indexRequest.addParameter("refresh", "true"); + indexRequest.setJsonEntity("{\"@timestamp\": \"" + DATE_FORMAT.format(new Date()) + "\",\"message\":\"abc\"}"); + assertOK(leaderClient.performRequest(indexRequest)); + verifyDocuments(leaderClient, dataStreamName, numDocs + 1); + } + assertBusy(() -> { + assertThat(getNumberOfSuccessfulFollowedIndices(), equalTo(initialNumberOfSuccessfulFollowedIndices + 2)); + verifyDataStream(client(), dataStreamName, backingIndexName(dataStreamName, 1), backingIndexName(dataStreamName, 2)); + ensureYellow(dataStreamName); + verifyDocuments(client(), dataStreamName, numDocs + 1); + }); + + backingIndexNames = verifyDataStream( + client(), + dataStreamName, + backingIndexName(dataStreamName, 1), + backingIndexName(dataStreamName, 2) + ); + + // Promote local data stream + var promoteRequest = new Request("POST", "/_data_stream/_promote/" + dataStreamName); + Response response = client().performRequest(promoteRequest); + assertOK(response); + } finally { + if (backingIndexNames == null) { + // we failed to compute the actual backing index names in the test because we failed earlier on, guessing them on a + // best-effort basis + backingIndexNames = List.of(backingIndexName(dataStreamName, 1), backingIndexName(dataStreamName, 2)); + } + + // These cleanup methods are copied from the finally block of other Data Stream tests in this class however + // they may no longer be required but have been included for completeness + cleanUpFollower(backingIndexNames, List.of(dataStreamName), List.of(autoFollowPatternName)); + cleanUpLeader(backingIndexNames.subList(0, 1), List.of(dataStreamName), List.of()); + Request deleteTemplateRequest = new Request("DELETE", "/_index_template/" + getTestName().toLowerCase(Locale.ROOT)); + if (createFollowerTemplate) { + assertOK(client().performRequest(deleteTemplateRequest)); + } + try (var leaderClient = buildLeaderClient()) { + assertOK(leaderClient.performRequest(deleteTemplateRequest)); + } + } + + } + private int getNumberOfSuccessfulFollowedIndices() throws IOException { return getNumberOfSuccessfulFollowedIndices(client()); } From 24b26f2975db2c91c7a6343a7b8cfbf072ab3fbb Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 16 Oct 2024 01:12:08 +1100 Subject: [PATCH 109/449] Mute org.elasticsearch.xpack.enrich.EnrichIT testImmutablePolicy #114839 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index dc0a6ad13a4a7..1c7ee862d6453 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -433,6 +433,9 @@ tests: - class: org.elasticsearch.xpack.remotecluster.RemoteClusterSecurityWithApmTracingRestIT method: testTracingCrossCluster issue: https://github.com/elastic/elasticsearch/issues/112731 +- class: org.elasticsearch.xpack.enrich.EnrichIT + method: testImmutablePolicy + issue: https://github.com/elastic/elasticsearch/issues/114839 # Examples: # From 6ec7a9ab23f8cb82b4f3303c2cc6859c10fd0973 Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 15 Oct 2024 15:27:07 +0100 Subject: [PATCH 110/449] Introduce utils for _really_ stashing the thread context (#114786) `ThreadContext#stashContext` does not yield a completely fresh context: it preserves headers related to tracing the original request. That may be appropriate in many situations, but sometimes we really do want to detach processing entirely from the original task. This commit introduces new utilities to do that. --- .../remote/RemoteClusterNodesAction.java | 3 +- .../action/bulk/IncrementalBulkService.java | 6 +- .../CoordinationDiagnosticsService.java | 5 +- .../cluster/coordination/LagDetector.java | 5 +- .../service/ClusterApplierService.java | 3 +- .../common/util/concurrent/ThreadContext.java | 39 +++++++++- .../RetentionLeaseBackgroundSyncAction.java | 10 +-- .../index/seqno/RetentionLeaseSyncAction.java | 73 +++++++++---------- .../index/shard/RefreshListeners.java | 3 +- .../transport/InboundHandler.java | 3 +- .../transport/OutboundHandler.java | 3 +- .../transport/RemoteClusterConnection.java | 3 +- .../transport/SniffConnectionStrategy.java | 6 +- .../util/concurrent/ThreadContextTests.java | 51 +++++++++++++ .../xpack/ccr/CcrLicenseChecker.java | 3 +- .../ccr/action/TransportUnfollowAction.java | 3 +- .../xpack/ccr/repository/CcrRepository.java | 5 +- .../license/RemoteClusterLicenseChecker.java | 4 +- .../xpack/core/ClientHelper.java | 2 +- 19 files changed, 140 insertions(+), 90 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/remote/RemoteClusterNodesAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/remote/RemoteClusterNodesAction.java index 8a321f484027f..9bdef4bfe4a13 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/remote/RemoteClusterNodesAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/remote/RemoteClusterNodesAction.java @@ -115,8 +115,7 @@ protected void doExecute(Task task, Request request, ActionListener li } private void executeWithSystemContext(Request request, ThreadContext threadContext, ActionListener listener) { - try (var ignore = threadContext.stashContext()) { - threadContext.markAsSystemContext(); + try (var ignore = threadContext.newEmptySystemContext()) { if (request.remoteClusterServer) { final NodesInfoRequest nodesInfoRequest = new NodesInfoRequest().clear() .addMetrics(NodesInfoMetrics.Metric.REMOTE_CLUSTER_SERVER.metricName()); diff --git a/server/src/main/java/org/elasticsearch/action/bulk/IncrementalBulkService.java b/server/src/main/java/org/elasticsearch/action/bulk/IncrementalBulkService.java index 58ffe25e08e49..2e7c87301b2f6 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/IncrementalBulkService.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/IncrementalBulkService.java @@ -141,8 +141,7 @@ public void addItems(List> items, Releasable releasable, Runn if (shouldBackOff()) { final boolean isFirstRequest = incrementalRequestSubmitted == false; incrementalRequestSubmitted = true; - try (ThreadContext.StoredContext ignored = threadContext.stashContext()) { - requestContext.restore(); + try (var ignored = threadContext.restoreExistingContext(requestContext)) { final ArrayList toRelease = new ArrayList<>(releasables); releasables.clear(); bulkInProgress = true; @@ -188,8 +187,7 @@ public void lastItems(List> items, Releasable releasable, Act } else { assert bulkRequest != null; if (internalAddItems(items, releasable)) { - try (ThreadContext.StoredContext ignored = threadContext.stashContext()) { - requestContext.restore(); + try (var ignored = threadContext.restoreExistingContext(requestContext)) { final ArrayList toRelease = new ArrayList<>(releasables); releasables.clear(); // We do not need to set this back to false as this will be the last request. diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/CoordinationDiagnosticsService.java b/server/src/main/java/org/elasticsearch/cluster/coordination/CoordinationDiagnosticsService.java index 7b9690ad2e2b2..84f9c42d27ece 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/CoordinationDiagnosticsService.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/CoordinationDiagnosticsService.java @@ -30,7 +30,6 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.common.util.concurrent.ListenableFuture; -import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; @@ -194,9 +193,7 @@ public void start() { * system context. */ if (clusterService.localNode().isMasterNode() == false) { - final ThreadContext threadContext = transportService.getThreadPool().getThreadContext(); - try (ThreadContext.StoredContext ignored = threadContext.stashContext()) { - threadContext.markAsSystemContext(); + try (var ignored = transportService.getThreadPool().getThreadContext().newEmptySystemContext()) { beginPollingRemoteMasterStabilityDiagnostic(); } } diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/LagDetector.java b/server/src/main/java/org/elasticsearch/cluster/coordination/LagDetector.java index d73143850b64f..124c17d705378 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/LagDetector.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/LagDetector.java @@ -24,7 +24,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.PrioritizedThrottledTaskRunner; -import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import org.elasticsearch.core.TimeValue; @@ -271,9 +270,7 @@ public void onFailure(Exception e) { @Override public void onResponse(Releasable releasable) { boolean success = false; - final ThreadContext threadContext = transportService.getThreadPool().getThreadContext(); - try (ThreadContext.StoredContext ignored = threadContext.stashContext()) { - threadContext.markAsSystemContext(); + try (var ignored = transportService.getThreadPool().getThreadContext().newEmptySystemContext()) { client.execute( TransportNodesHotThreadsAction.TYPE, new NodesHotThreadsRequest( diff --git a/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java b/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java index 3e1f436006aa7..c34d0d19988c8 100644 --- a/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java +++ b/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java @@ -334,8 +334,7 @@ private void submitStateUpdateTask( final ThreadContext threadContext = threadPool.getThreadContext(); final Supplier storedContextSupplier = threadContext.newRestorableContext(true); - try (ThreadContext.StoredContext ignore = threadContext.stashContext()) { - threadContext.markAsSystemContext(); + try (var ignore = threadContext.newEmptySystemContext()) { threadPoolExecutor.execute( new UpdateTask( priority, diff --git a/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java b/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java index 7c001a39a306d..a9e13b86a5159 100644 --- a/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java +++ b/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java @@ -101,7 +101,7 @@ public ThreadContext(Settings settings) { } /** - * Removes the current context and resets a default context. The removed context can be + * Removes the current context and resets a default context except for headers involved in task tracing. The removed context can be * restored by closing the returned {@link StoredContext}. * @return a stored context that will restore the current context to its state at the point this method was called */ @@ -159,6 +159,28 @@ public StoredContext stashContextPreservingRequestHeaders(final String... reques return stashContextPreservingRequestHeaders(Set.of(requestHeaders)); } + /** + * Removes the current context and replaces it with a completely empty default context, detaching execution entirely from the calling + * context. The calling context can be restored by closing the returned {@link StoredContext}. Similar to {@link #stashContext()} except + * that this method does not even preserve tracing-related headers. + */ + public StoredContext newEmptyContext() { + final var callingContext = threadLocal.get(); + threadLocal.set(DEFAULT_CONTEXT); + return storedOriginalContext(callingContext); + } + + /** + * Removes the current context and replaces it with a completely empty system context, detaching execution entirely from the calling + * context. The calling context can be restored by closing the returned {@link StoredContext}. Similar to {@link #stashContext()} except + * that this method does not even preserve tracing-related headers. + */ + public StoredContext newEmptySystemContext() { + final var callingContext = threadLocal.get(); + threadLocal.set(DEFAULT_CONTEXT.setSystemContext()); + return storedOriginalContext(callingContext); + } + /** * When using a {@link org.elasticsearch.telemetry.tracing.Tracer} to capture activity in Elasticsearch, when a parent span is already * in progress, it is necessary to start a new context before beginning a child span. This method creates a context, @@ -330,6 +352,16 @@ public StoredContext newStoredContextPreservingResponseHeaders() { }; } + /** + * Capture the current context and then restore the given context, returning a {@link StoredContext} that reverts back to the current + * context again. Equivalent to using {@link #newStoredContext()} and then calling {@code existingContext.restore()}. + */ + public StoredContext restoreExistingContext(StoredContext existingContext) { + final var originalContext = threadLocal.get(); + existingContext.restore(); + return storedOriginalContext(originalContext); + } + /** * Just like {@link #stashContext()} but no default context is set. */ @@ -914,14 +946,13 @@ private class ContextPreservingRunnable implements WrappedRunnable { private final ThreadContext.StoredContext ctx; private ContextPreservingRunnable(Runnable in) { - ctx = newStoredContext(); + this.ctx = newStoredContext(); this.in = in; } @Override public void run() { - try (ThreadContext.StoredContext ignore = stashContext()) { - ctx.restore(); + try (var ignore = restoreExistingContext(ctx)) { in.run(); } } diff --git a/server/src/main/java/org/elasticsearch/index/seqno/RetentionLeaseBackgroundSyncAction.java b/server/src/main/java/org/elasticsearch/index/seqno/RetentionLeaseBackgroundSyncAction.java index 81b6ad136f129..cdeaa177dfd5e 100644 --- a/server/src/main/java/org/elasticsearch/index/seqno/RetentionLeaseBackgroundSyncAction.java +++ b/server/src/main/java/org/elasticsearch/index/seqno/RetentionLeaseBackgroundSyncAction.java @@ -23,7 +23,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.IndicesService; @@ -95,14 +94,9 @@ protected void doExecute(Task task, Request request, ActionListener listener ) { - final ThreadContext threadContext = threadPool.getThreadContext(); - try (ThreadContext.StoredContext ignore = threadContext.stashContext()) { + try (var ignore = threadPool.getThreadContext().newEmptySystemContext()) { // we have to execute under the system context so that if security is enabled the sync is authorized - threadContext.markAsSystemContext(); final Request request = new Request(shardId, retentionLeases); - try (var ignored = threadContext.newTraceContext()) { - final ReplicationTask task = (ReplicationTask) taskManager.register("transport", "retention_lease_sync", request); - transportService.sendChildRequest( - clusterService.localNode(), - transportPrimaryAction, - new ConcreteShardRequest<>(request, primaryAllocationId, primaryTerm), - task, - transportOptions, - new TransportResponseHandler() { - @Override - public ReplicationResponse read(StreamInput in) throws IOException { - return newResponseInstance(in); - } - - @Override - public Executor executor() { - return TransportResponseHandler.TRANSPORT_WORKER; - } - - @Override - public void handleResponse(ReplicationResponse response) { - task.setPhase("finished"); - taskManager.unregister(task); - listener.onResponse(response); - } - - @Override - public void handleException(TransportException e) { - LOGGER.log(getExceptionLogLevel(e), () -> format("%s retention lease sync failed", shardId), e); - task.setPhase("finished"); - taskManager.unregister(task); - listener.onFailure(e); - } + final ReplicationTask task = (ReplicationTask) taskManager.register("transport", "retention_lease_sync", request); + transportService.sendChildRequest( + clusterService.localNode(), + transportPrimaryAction, + new ConcreteShardRequest<>(request, primaryAllocationId, primaryTerm), + task, + transportOptions, + new TransportResponseHandler() { + @Override + public ReplicationResponse read(StreamInput in) throws IOException { + return newResponseInstance(in); } - ); - } + + @Override + public Executor executor() { + return TransportResponseHandler.TRANSPORT_WORKER; + } + + @Override + public void handleResponse(ReplicationResponse response) { + task.setPhase("finished"); + taskManager.unregister(task); + listener.onResponse(response); + } + + @Override + public void handleException(TransportException e) { + LOGGER.log(getExceptionLogLevel(e), () -> format("%s retention lease sync failed", shardId), e); + task.setPhase("finished"); + taskManager.unregister(task); + listener.onFailure(e); + } + } + ); } } diff --git a/server/src/main/java/org/elasticsearch/index/shard/RefreshListeners.java b/server/src/main/java/org/elasticsearch/index/shard/RefreshListeners.java index 8a54b842a3792..8a2fc396c6737 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/RefreshListeners.java +++ b/server/src/main/java/org/elasticsearch/index/shard/RefreshListeners.java @@ -147,8 +147,7 @@ public boolean addOrNotify(Translog.Location location, Consumer listene if (refreshForcers == 0 && roomForListener(maxRefreshes, listeners, checkpointRefreshListeners)) { ThreadContext.StoredContext storedContext = threadContext.newStoredContextPreservingResponseHeaders(); Consumer contextPreservingListener = forced -> { - try (ThreadContext.StoredContext ignore = threadContext.stashContext()) { - storedContext.restore(); + try (var ignore = threadContext.restoreExistingContext(storedContext)) { listener.accept(forced); } }; diff --git a/server/src/main/java/org/elasticsearch/transport/InboundHandler.java b/server/src/main/java/org/elasticsearch/transport/InboundHandler.java index dd14319e332e0..01069609ac742 100644 --- a/server/src/main/java/org/elasticsearch/transport/InboundHandler.java +++ b/server/src/main/java/org/elasticsearch/transport/InboundHandler.java @@ -109,7 +109,8 @@ private void messageReceived(TcpChannel channel, InboundMessage message, long st TransportResponseHandler responseHandler = null; ThreadContext threadContext = threadPool.getThreadContext(); - try (ThreadContext.StoredContext existing = threadContext.stashContext()) { + assert threadContext.isDefaultContext(); + try (var ignored = threadContext.newStoredContext()) { // Place the context with the headers from the message threadContext.setHeaders(header.getHeaders()); threadContext.putTransient("_remote_address", remoteAddress); diff --git a/server/src/main/java/org/elasticsearch/transport/OutboundHandler.java b/server/src/main/java/org/elasticsearch/transport/OutboundHandler.java index 279208ecf8b3b..c341b6e2a541c 100644 --- a/server/src/main/java/org/elasticsearch/transport/OutboundHandler.java +++ b/server/src/main/java/org/elasticsearch/transport/OutboundHandler.java @@ -23,7 +23,6 @@ import org.elasticsearch.common.network.HandlingTimeTracker; import org.elasticsearch.common.recycler.Recycler; import org.elasticsearch.common.transport.NetworkExceptionHelper; -import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; @@ -237,7 +236,7 @@ private void internalSend( final long messageSize = reference.length(); TransportLogger.logOutboundMessage(channel, reference); // stash thread context so that channel event loop is not polluted by thread context - try (ThreadContext.StoredContext existing = threadPool.getThreadContext().stashContext()) { + try (var ignored = threadPool.getThreadContext().newEmptyContext()) { channel.sendMessage(reference, new ActionListener<>() { @Override public void onResponse(Void v) { diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java b/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java index f0cafb956457e..bff194a798c1b 100644 --- a/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java +++ b/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java @@ -129,9 +129,8 @@ void collectNodes(ActionListener> listener) { final ThreadContext threadContext = threadPool.getThreadContext(); final ContextPreservingActionListener> contextPreservingActionListener = new ContextPreservingActionListener<>(threadContext.newRestorableContext(false), listener); - try (ThreadContext.StoredContext ignore = threadContext.stashContext()) { + try (var ignore = threadContext.newEmptySystemContext()) { // we stash any context here since this is an internal execution and should not leak any existing context information - threadContext.markAsSystemContext(); Transport.Connection connection = remoteConnectionManager.getAnyRemoteConnection(); // Use different action to collect nodes information depending on the connection model diff --git a/server/src/main/java/org/elasticsearch/transport/SniffConnectionStrategy.java b/server/src/main/java/org/elasticsearch/transport/SniffConnectionStrategy.java index 623f86059de70..2c198caf22354 100644 --- a/server/src/main/java/org/elasticsearch/transport/SniffConnectionStrategy.java +++ b/server/src/main/java/org/elasticsearch/transport/SniffConnectionStrategy.java @@ -328,10 +328,8 @@ private void collectRemoteNodes(Iterator> seedNodesSuppl sniffResponseHandler = new ClusterStateSniffResponseHandler(connection, listener, seedNodesSuppliers); } - try (ThreadContext.StoredContext ignore = threadContext.stashContext()) { - // we stash any context here since this is an internal execution and should not leak any - // existing context information. - threadContext.markAsSystemContext(); + try (var ignored = threadContext.newEmptySystemContext()) { + // we stash any context here since this is an internal execution and should not leak any existing context information. transportService.sendRequest( connection, action, diff --git a/server/src/test/java/org/elasticsearch/common/util/concurrent/ThreadContextTests.java b/server/src/test/java/org/elasticsearch/common/util/concurrent/ThreadContextTests.java index d182d772ee964..144dda1ad3b26 100644 --- a/server/src/test/java/org/elasticsearch/common/util/concurrent/ThreadContextTests.java +++ b/server/src/test/java/org/elasticsearch/common/util/concurrent/ThreadContextTests.java @@ -1122,6 +1122,57 @@ public void testSanitizeHeaders() { } } + public void testNewEmptyContext() { + final var threadContext = new ThreadContext(Settings.EMPTY); + final var header = randomBoolean() ? randomIdentifier() : randomFrom(HEADERS_TO_COPY); + threadContext.putHeader(header, randomIdentifier()); + + try (var ignored = threadContext.newEmptyContext()) { + assertTrue(threadContext.isDefaultContext()); + assertNull(threadContext.getHeader(header)); + assertTrue(threadContext.getHeaders().isEmpty()); + } + + assertNotNull(threadContext.getHeader(header)); + } + + public void testNewEmptySystemContext() { + final var threadContext = new ThreadContext(Settings.EMPTY); + final var header = randomBoolean() ? randomIdentifier() : randomFrom(HEADERS_TO_COPY); + threadContext.putHeader(header, randomIdentifier()); + + try (var ignored = threadContext.newEmptySystemContext()) { + assertTrue(threadContext.isSystemContext()); + assertNull(threadContext.getHeader(header)); + assertTrue(threadContext.getHeaders().isEmpty()); + } + + assertNotNull(threadContext.getHeader(header)); + } + + public void testRestoreExistingContext() { + final var threadContext = new ThreadContext(Settings.EMPTY); + final var header = randomIdentifier(); + final var originalValue = randomIdentifier(); + threadContext.putHeader(header, originalValue); + try (var originalContext = threadContext.newStoredContext()) { + assertEquals(originalValue, threadContext.getHeader(header)); + + try (var ignored1 = threadContext.newEmptyContext()) { + final var updatedValue = randomIdentifier(); + threadContext.putHeader(header, updatedValue); + + try (var ignored2 = threadContext.restoreExistingContext(originalContext)) { + assertEquals(originalValue, threadContext.getHeader(header)); + } + + assertEquals(updatedValue, threadContext.getHeader(header)); + } + + assertEquals(originalValue, threadContext.getHeader(header)); + } + } + private String randomCase(String original) { int i = randomInt(original.length() - 1); StringBuilder sb = new StringBuilder(original); diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrLicenseChecker.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrLicenseChecker.java index 2c633a43264f6..af2e99b047a71 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrLicenseChecker.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrLicenseChecker.java @@ -479,8 +479,7 @@ public void ActionListener listener ) { final Supplier supplier = threadContext.newRestorableContext(false); - try (ThreadContext.StoredContext ignore = threadContext.stashContext()) { - threadContext.markAsSystemContext(); + try (var ignore = threadContext.newEmptySystemContext()) { delegate.execute(connection, action, request, new ContextPreservingActionListener<>(supplier, listener)); } } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportUnfollowAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportUnfollowAction.java index addf47662276d..e6857ba985306 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportUnfollowAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportUnfollowAction.java @@ -195,9 +195,8 @@ private void removeRetentionLeaseForShard( threadContext.newRestorableContext(true), listener ); - try (ThreadContext.StoredContext ignore = threadPool.getThreadContext().stashContext()) { + try (var ignore = threadPool.getThreadContext().newEmptySystemContext()) { // we have to execute under the system context so that if security is enabled the removal is authorized - threadContext.markAsSystemContext(); CcrRetentionLeases.asyncRemoveRetentionLease(leaderShardId, retentionLeaseId, remoteClient, preservedListener); } } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRepository.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRepository.java index ad42f549b1a80..c8136002bbd52 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRepository.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRepository.java @@ -47,7 +47,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.Maps; -import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.Releasable; @@ -447,10 +446,8 @@ public void restoreShard( // schedule renewals to run during the restore final Scheduler.Cancellable renewable = threadPool.scheduleWithFixedDelay(() -> { logger.trace("{} background renewal of retention lease [{}] during restore", shardId, retentionLeaseId); - final ThreadContext threadContext = threadPool.getThreadContext(); - try (ThreadContext.StoredContext ignore = threadContext.stashContext()) { + try (var ignore = threadPool.getThreadContext().newEmptySystemContext()) { // we have to execute under the system context so that if security is enabled the renewal is authorized - threadContext.markAsSystemContext(); CcrRetentionLeases.asyncRenewRetentionLease( leaderShardId, retentionLeaseId, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RemoteClusterLicenseChecker.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RemoteClusterLicenseChecker.java index 01280b1d95f80..87c9713d12362 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RemoteClusterLicenseChecker.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RemoteClusterLicenseChecker.java @@ -214,10 +214,8 @@ private void remoteClusterLicense(final String clusterAlias, final ActionListene threadContext.newRestorableContext(false), listener ); - try (ThreadContext.StoredContext ignore = threadContext.stashContext()) { + try (var ignore = threadContext.newEmptySystemContext()) { // we stash any context here since this is an internal execution and should not leak any existing context information - threadContext.markAsSystemContext(); - final XPackInfoRequest request = new XPackInfoRequest(); request.setCategories(EnumSet.of(XPackInfoRequest.Category.LICENSE)); try { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ClientHelper.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ClientHelper.java index 4e7aa37fe1a0b..8c02462375e1d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ClientHelper.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ClientHelper.java @@ -267,7 +267,7 @@ public static T executeWithHeaders( return supplier.get(); } } else { - try (ThreadContext.StoredContext ignore = client.threadPool().getThreadContext().stashContext()) { + try (var ignore = client.threadPool().getThreadContext().stashContext()) { client.threadPool().getThreadContext().copyHeaders(filteredHeaders.entrySet()); return supplier.get(); } From 075cd24043bcd718c45ba61963d364df7c6c7862 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mariusz=20J=C3=B3zala?= <377355+jozala@users.noreply.github.com> Date: Tue, 15 Oct 2024 16:48:27 +0200 Subject: [PATCH 111/449] [TEST] Fix ccs-unavailable-clusters QA tests build (#114833) Properly use `configureEach` on the task configuration to postpone the tasks creation and configuration in the build process --- qa/ccs-unavailable-clusters/build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/qa/ccs-unavailable-clusters/build.gradle b/qa/ccs-unavailable-clusters/build.gradle index 3db6e2e987262..e013ccaf9341e 100644 --- a/qa/ccs-unavailable-clusters/build.gradle +++ b/qa/ccs-unavailable-clusters/build.gradle @@ -12,6 +12,6 @@ import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-java-rest-test' -tasks.withType(StandaloneRestIntegTestTask) { +tasks.withType(StandaloneRestIntegTestTask).configureEach { usesDefaultDistribution() } From 81976b214a4140fa46201c43d5035c92ad8c3cd3 Mon Sep 17 00:00:00 2001 From: Niels Bauman <33722607+nielsbauman@users.noreply.github.com> Date: Tue, 15 Oct 2024 11:59:12 -0300 Subject: [PATCH 112/449] Mark Data Stream Lifecycle APIs to stable (#114780) Data Stream Lifecycle has GA'ed in 8.14, so we can safely mark these as stable. --- .../rest-api-spec/api/indices.delete_data_lifecycle.json | 2 +- .../rest-api-spec/api/indices.explain_data_lifecycle.json | 2 +- .../resources/rest-api-spec/api/indices.get_data_lifecycle.json | 2 +- .../resources/rest-api-spec/api/indices.put_data_lifecycle.json | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.delete_data_lifecycle.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.delete_data_lifecycle.json index a3106a982b809..92b3ce61b4603 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.delete_data_lifecycle.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.delete_data_lifecycle.json @@ -4,7 +4,7 @@ "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/data-streams-delete-lifecycle.html", "description":"Deletes the data stream lifecycle of the selected data streams." }, - "stability":"experimental", + "stability":"stable", "visibility":"public", "headers":{ "accept": [ "application/json"] diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.explain_data_lifecycle.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.explain_data_lifecycle.json index 3232407000b19..14e07ee28a80d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.explain_data_lifecycle.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.explain_data_lifecycle.json @@ -4,7 +4,7 @@ "url": "https://www.elastic.co/guide/en/elasticsearch/reference/current/data-streams-explain-lifecycle.html", "description": "Retrieves information about the index's current data stream lifecycle, such as any potential encountered error, time since creation etc." }, - "stability": "experimental", + "stability": "stable", "visibility": "public", "headers": { "accept": [ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_data_lifecycle.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_data_lifecycle.json index 6f05af1485f98..a8d2e7185db83 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_data_lifecycle.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_data_lifecycle.json @@ -4,7 +4,7 @@ "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/data-streams-get-lifecycle.html", "description":"Returns the data stream lifecycle of the selected data streams." }, - "stability":"experimental", + "stability":"stable", "visibility":"public", "headers":{ "accept": [ "application/json"] diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_data_lifecycle.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_data_lifecycle.json index 591525f3d99ff..08dc7128234b9 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_data_lifecycle.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_data_lifecycle.json @@ -4,7 +4,7 @@ "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/data-streams-put-lifecycle.html", "description":"Updates the data stream lifecycle of the selected data streams." }, - "stability":"experimental", + "stability":"stable", "visibility":"public", "headers":{ "accept": [ "application/json"] From 97c207c59bceca3c6a608d4fcdf7bf4e03102bc3 Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 15 Oct 2024 16:06:35 +0100 Subject: [PATCH 113/449] Remove all replaced-in-v8 REST endpoints (#114800) These endpoints were deprecated in v7 and are replaced in v8 with different endpoints so we can remove the v7 endpoint names in v9. --- .../system/indices/SystemIndicesQA.java | 2 +- .../common/network/ThreadWatchdogIT.java | 2 +- .../license/RestDeleteLicenseAction.java | 3 +- .../license/RestGetBasicStatus.java | 5 +--- .../license/RestGetLicenseAction.java | 3 +- .../license/RestGetTrialStatus.java | 5 +--- .../license/RestPostStartBasicLicense.java | 5 +--- .../license/RestPostStartTrialLicense.java | 5 +--- .../license/RestPutLicenseAction.java | 6 +--- .../rest/RestGetCertificateInfoAction.java | 3 +- .../RestDeprecationInfoAction.java | 8 +---- .../graph/rest/action/RestGraphAction.java | 9 +----- .../ml/rest/RestDeleteExpiredDataAction.java | 11 ++----- .../xpack/ml/rest/RestMlInfoAction.java | 4 +-- .../ml/rest/RestSetUpgradeModeAction.java | 8 +---- .../calendar/RestDeleteCalendarAction.java | 9 ++---- .../RestDeleteCalendarEventAction.java | 14 ++------- .../calendar/RestDeleteCalendarJobAction.java | 9 ++---- .../calendar/RestGetCalendarEventsAction.java | 9 ++---- .../rest/calendar/RestGetCalendarsAction.java | 15 ++++------ .../calendar/RestPostCalendarEventAction.java | 9 ++---- .../rest/calendar/RestPutCalendarAction.java | 9 ++---- .../calendar/RestPutCalendarJobAction.java | 9 ++---- .../datafeeds/RestDeleteDatafeedAction.java | 9 ++---- .../datafeeds/RestGetDatafeedStatsAction.java | 11 ++----- .../datafeeds/RestGetDatafeedsAction.java | 9 ++---- .../datafeeds/RestPreviewDatafeedAction.java | 19 ++++-------- .../rest/datafeeds/RestPutDatafeedAction.java | 9 ++---- .../datafeeds/RestStartDatafeedAction.java | 9 ++---- .../datafeeds/RestStopDatafeedAction.java | 8 ++--- .../datafeeds/RestUpdateDatafeedAction.java | 9 ++---- .../rest/filter/RestDeleteFilterAction.java | 9 ++---- .../ml/rest/filter/RestGetFiltersAction.java | 10 ++----- .../ml/rest/filter/RestPutFilterAction.java | 9 ++---- .../rest/filter/RestUpdateFilterAction.java | 9 ++---- ...stPutTrainedModelDefinitionPartAction.java | 14 ++------- .../RestPutTrainedModelVocabularyAction.java | 5 ++-- .../xpack/ml/rest/job/RestCloseJobAction.java | 8 ++--- .../ml/rest/job/RestDeleteForecastAction.java | 16 +++------- .../ml/rest/job/RestDeleteJobAction.java | 9 ++---- .../xpack/ml/rest/job/RestFlushJobAction.java | 9 ++---- .../ml/rest/job/RestForecastJobAction.java | 9 ++---- .../ml/rest/job/RestGetJobStatsAction.java | 10 ++----- .../xpack/ml/rest/job/RestGetJobsAction.java | 11 ++----- .../xpack/ml/rest/job/RestOpenJobAction.java | 9 ++---- .../ml/rest/job/RestPostJobUpdateAction.java | 9 ++---- .../xpack/ml/rest/job/RestPutJobAction.java | 9 ++---- .../RestDeleteModelSnapshotAction.java | 13 ++------- .../RestGetModelSnapshotsAction.java | 28 ++++-------------- .../RestRevertModelSnapshotAction.java | 13 ++------- .../RestUpdateModelSnapshotAction.java | 13 ++------- .../ml/rest/results/RestGetBucketsAction.java | 29 ++++--------------- .../rest/results/RestGetCategoriesAction.java | 27 ++++------------- .../results/RestGetInfluencersAction.java | 11 ++----- .../results/RestGetOverallBucketsAction.java | 10 ++----- .../ml/rest/results/RestGetRecordsAction.java | 11 ++----- .../validate/RestValidateDetectorAction.java | 8 +---- .../validate/RestValidateJobConfigAction.java | 8 +---- .../rest/action/RestMonitoringBulkAction.java | 6 +--- .../rest/RestDeleteRollupJobAction.java | 3 +- .../rollup/rest/RestGetRollupCapsAction.java | 3 +- .../rest/RestGetRollupIndexCapsAction.java | 5 +--- .../rollup/rest/RestGetRollupJobsAction.java | 3 +- .../rollup/rest/RestPutRollupJobAction.java | 3 +- .../rollup/rest/RestStartRollupJobAction.java | 5 +--- .../rollup/rest/RestStopRollupJobAction.java | 5 +--- .../rest/action/RestAuthenticateAction.java | 5 +--- .../action/oauth2/RestGetTokenAction.java | 5 +--- .../oauth2/RestInvalidateTokenAction.java | 5 +--- .../privilege/RestDeletePrivilegesAction.java | 7 +---- .../privilege/RestGetPrivilegesAction.java | 11 ++----- .../privilege/RestPutPrivilegesAction.java | 6 +--- .../profile/RestDisableProfileAction.java | 5 +--- .../profile/RestEnableProfileAction.java | 5 +--- .../realm/RestClearRealmCacheAction.java | 7 +---- .../role/RestBulkDeleteRolesAction.java | 2 +- .../action/role/RestBulkPutRolesAction.java | 2 +- .../role/RestClearRolesCacheAction.java | 7 +---- .../action/role/RestDeleteRoleAction.java | 5 +--- .../rest/action/role/RestGetRolesAction.java | 6 +--- .../rest/action/role/RestPutRoleAction.java | 6 +--- .../RestDeleteRoleMappingAction.java | 7 +---- .../RestGetRoleMappingsAction.java | 8 +---- .../rolemapping/RestPutRoleMappingAction.java | 5 +--- .../saml/RestSamlAuthenticateAction.java | 7 +---- .../saml/RestSamlInvalidateSessionAction.java | 5 +--- .../action/saml/RestSamlLogoutAction.java | 5 +--- .../RestSamlPrepareAuthenticationAction.java | 5 +--- .../RestGetSecuritySettingsAction.java | 4 ++- .../RestUpdateSecuritySettingsAction.java | 4 ++- .../action/user/RestChangePasswordAction.java | 13 +++------ .../action/user/RestDeleteUserAction.java | 7 +---- .../user/RestGetUserPrivilegesAction.java | 5 +--- .../rest/action/user/RestGetUsersAction.java | 6 +--- .../action/user/RestHasPrivilegesAction.java | 17 +++-------- .../user/RestProfileHasPrivilegesAction.java | 5 +--- .../rest/action/user/RestPutUserAction.java | 8 +---- .../action/user/RestSetEnabledAction.java | 17 +++-------- .../sql/plugin/RestSqlClearCursorAction.java | 9 ++---- .../xpack/sql/plugin/RestSqlQueryAction.java | 12 ++------ .../xpack/sql/plugin/RestSqlStatsAction.java | 9 ++---- .../sql/plugin/RestSqlTranslateAction.java | 12 ++------ .../rest/action/RestAckWatchAction.java | 13 +++------ .../rest/action/RestActivateWatchAction.java | 19 ++---------- .../rest/action/RestDeleteWatchAction.java | 5 +--- .../rest/action/RestExecuteWatchAction.java | 13 +++------ .../rest/action/RestGetWatchAction.java | 3 +- .../action/RestGetWatcherSettingsAction.java | 4 ++- .../rest/action/RestPutWatchAction.java | 6 +--- .../RestUpdateWatcherSettingsAction.java | 4 ++- .../rest/action/RestWatchServiceAction.java | 5 ++-- .../rest/action/RestWatcherStatsAction.java | 6 +--- 112 files changed, 211 insertions(+), 725 deletions(-) diff --git a/qa/system-indices/src/main/java/org/elasticsearch/system/indices/SystemIndicesQA.java b/qa/system-indices/src/main/java/org/elasticsearch/system/indices/SystemIndicesQA.java index 4b3f623073322..57b0908508bb3 100644 --- a/qa/system-indices/src/main/java/org/elasticsearch/system/indices/SystemIndicesQA.java +++ b/qa/system-indices/src/main/java/org/elasticsearch/system/indices/SystemIndicesQA.java @@ -150,7 +150,7 @@ public String getName() { @Override public List routes() { - return List.of(Route.builder(Method.PUT, "/_net_new_sys_index/_create").build()); + return List.of(new Route(Method.PUT, "/_net_new_sys_index/_create")); } @Override diff --git a/server/src/internalClusterTest/java/org/elasticsearch/common/network/ThreadWatchdogIT.java b/server/src/internalClusterTest/java/org/elasticsearch/common/network/ThreadWatchdogIT.java index f2441e43de8d8..ffe55387d34c6 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/common/network/ThreadWatchdogIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/common/network/ThreadWatchdogIT.java @@ -88,7 +88,7 @@ public Collection getRestHandlers( return List.of(new RestHandler() { @Override public List routes() { - return List.of(Route.builder(RestRequest.Method.POST, "_slow").build()); + return List.of(new Route(RestRequest.Method.POST, "_slow")); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestDeleteLicenseAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestDeleteLicenseAction.java index 24c081c237ee0..472249ed1ae3c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestDeleteLicenseAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestDeleteLicenseAction.java @@ -9,7 +9,6 @@ import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; @@ -27,7 +26,7 @@ public RestDeleteLicenseAction() {} @Override public List routes() { - return List.of(Route.builder(DELETE, "/_license").replaces(DELETE, "/_xpack/license", RestApiVersion.V_7).build()); + return List.of(new Route(DELETE, "/_license")); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestGetBasicStatus.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestGetBasicStatus.java index e0428c0ff2039..8dc8cf2395191 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestGetBasicStatus.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestGetBasicStatus.java @@ -8,7 +8,6 @@ package org.elasticsearch.license; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestUtils; @@ -24,9 +23,7 @@ public RestGetBasicStatus() {} @Override public List routes() { - return List.of( - Route.builder(GET, "/_license/basic_status").replaces(GET, "/_xpack/license/basic_status", RestApiVersion.V_7).build() - ); + return List.of(new Route(GET, "/_license/basic_status")); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestGetLicenseAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestGetLicenseAction.java index 4240119df457d..5bf35dea51af6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestGetLicenseAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestGetLicenseAction.java @@ -11,7 +11,6 @@ import org.elasticsearch.common.logging.DeprecationCategory; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.util.Maps; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.protocol.xpack.license.GetLicenseRequest; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -43,7 +42,7 @@ public RestGetLicenseAction() {} @Override public List routes() { - return List.of(Route.builder(GET, "/_license").replaces(GET, "/_xpack/license", RestApiVersion.V_7).build()); + return List.of(new Route(GET, "/_license")); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestGetTrialStatus.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestGetTrialStatus.java index e42db16ded401..5cfde9288edec 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestGetTrialStatus.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestGetTrialStatus.java @@ -8,7 +8,6 @@ package org.elasticsearch.license; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestUtils; @@ -24,9 +23,7 @@ public RestGetTrialStatus() {} @Override public List routes() { - return List.of( - Route.builder(GET, "/_license/trial_status").replaces(GET, "/_xpack/license/trial_status", RestApiVersion.V_7).build() - ); + return List.of(new Route(GET, "/_license/trial_status")); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestPostStartBasicLicense.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestPostStartBasicLicense.java index 64556bcf69ecf..b44648966b936 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestPostStartBasicLicense.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestPostStartBasicLicense.java @@ -8,7 +8,6 @@ package org.elasticsearch.license; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; @@ -26,9 +25,7 @@ public RestPostStartBasicLicense() {} @Override public List routes() { - return List.of( - Route.builder(POST, "/_license/start_basic").replaces(POST, "/_xpack/license/start_basic", RestApiVersion.V_7).build() - ); + return List.of(new Route(POST, "/_license/start_basic")); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestPostStartTrialLicense.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestPostStartTrialLicense.java index 56fae76c2e2b8..c8321aa518237 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestPostStartTrialLicense.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestPostStartTrialLicense.java @@ -8,7 +8,6 @@ package org.elasticsearch.license; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; @@ -28,9 +27,7 @@ public RestPostStartTrialLicense() {} @Override public List routes() { - return List.of( - Route.builder(POST, "/_license/start_trial").replaces(POST, "/_xpack/license/start_trial", RestApiVersion.V_7).build() - ); + return List.of(new Route(POST, "/_license/start_trial")); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestPutLicenseAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestPutLicenseAction.java index 0798be6e53a14..cf24ac301046b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestPutLicenseAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestPutLicenseAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.license; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; @@ -28,10 +27,7 @@ public RestPutLicenseAction() {} @Override public List routes() { // TODO: remove POST endpoint? - return List.of( - Route.builder(POST, "/_license").replaces(POST, "/_xpack/license", RestApiVersion.V_7).build(), - Route.builder(PUT, "/_license").replaces(PUT, "/_xpack/license", RestApiVersion.V_7).build() - ); + return List.of(new Route(POST, "/_license"), new Route(PUT, "/_license")); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/rest/RestGetCertificateInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/rest/RestGetCertificateInfoAction.java index 38e59a2e34df8..cf9dbd71b98fb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/rest/RestGetCertificateInfoAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/rest/RestGetCertificateInfoAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.ssl.rest; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; @@ -32,7 +31,7 @@ public class RestGetCertificateInfoAction extends BaseRestHandler { @Override public List routes() { - return List.of(Route.builder(GET, "/_ssl/certificates").replaces(GET, "/_xpack/ssl/certificates", RestApiVersion.V_7).build()); + return List.of(new Route(GET, "/_ssl/certificates")); } @Override diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/RestDeprecationInfoAction.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/RestDeprecationInfoAction.java index 235209243ee58..f817885ccd9bc 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/RestDeprecationInfoAction.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/RestDeprecationInfoAction.java @@ -8,7 +8,6 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestUtils; @@ -24,12 +23,7 @@ public class RestDeprecationInfoAction extends BaseRestHandler { @Override public List routes() { - return List.of( - Route.builder(GET, "/_migration/deprecations").replaces(GET, "/_xpack/migration/deprecations", RestApiVersion.V_7).build(), - Route.builder(GET, "/{index}/_migration/deprecations") - .replaces(GET, "/{index}/_xpack/migration/deprecations", RestApiVersion.V_7) - .build() - ); + return List.of(new Route(GET, "/_migration/deprecations"), new Route(GET, "/{index}/_migration/deprecations")); } @Override diff --git a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/rest/action/RestGraphAction.java b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/rest/action/RestGraphAction.java index 983e972248945..7dd360f8a2d82 100644 --- a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/rest/action/RestGraphAction.java +++ b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/rest/action/RestGraphAction.java @@ -68,14 +68,7 @@ public class RestGraphAction extends BaseRestHandler { @Override public List routes() { - return List.of( - Route.builder(GET, "/{index}/_graph/explore") - .replaces(GET, "/{index}" + URI_BASE + "/graph/_explore", RestApiVersion.V_7) - .build(), - Route.builder(POST, "/{index}/_graph/explore") - .replaces(POST, "/{index}" + URI_BASE + "/graph/_explore", RestApiVersion.V_7) - .build() - ); + return List.of(new Route(GET, "/{index}/_graph/explore"), new Route(POST, "/{index}/_graph/explore")); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestDeleteExpiredDataAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestDeleteExpiredDataAction.java index 37731fcbfb10b..efa34ccff70c4 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestDeleteExpiredDataAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestDeleteExpiredDataAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.ml.rest; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -20,8 +19,8 @@ import java.util.List; import static org.elasticsearch.rest.RestRequest.Method.DELETE; +import static org.elasticsearch.xpack.core.ml.job.config.Job.ID; import static org.elasticsearch.xpack.ml.MachineLearning.BASE_PATH; -import static org.elasticsearch.xpack.ml.MachineLearning.PRE_V7_BASE_PATH; @ServerlessScope(Scope.INTERNAL) public class RestDeleteExpiredDataAction extends BaseRestHandler { @@ -29,12 +28,8 @@ public class RestDeleteExpiredDataAction extends BaseRestHandler { @Override public List routes() { return List.of( - Route.builder(DELETE, BASE_PATH + "_delete_expired_data/{" + Job.ID + "}") - .replaces(DELETE, PRE_V7_BASE_PATH + "_delete_expired_data/{" + Job.ID + "}", RestApiVersion.V_7) - .build(), - Route.builder(DELETE, BASE_PATH + "_delete_expired_data") - .replaces(DELETE, PRE_V7_BASE_PATH + "_delete_expired_data", RestApiVersion.V_7) - .build() + new Route(DELETE, BASE_PATH + "_delete_expired_data/{" + ID + "}"), + new Route(DELETE, BASE_PATH + "_delete_expired_data") ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestMlInfoAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestMlInfoAction.java index aace51a45c77b..d08f25b3433b5 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestMlInfoAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestMlInfoAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.ml.rest; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -20,14 +19,13 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.xpack.ml.MachineLearning.BASE_PATH; -import static org.elasticsearch.xpack.ml.MachineLearning.PRE_V7_BASE_PATH; @ServerlessScope(Scope.INTERNAL) public class RestMlInfoAction extends BaseRestHandler { @Override public List routes() { - return List.of(Route.builder(GET, BASE_PATH + "info").replaces(GET, PRE_V7_BASE_PATH + "info", RestApiVersion.V_7).build()); + return List.of(new Route(GET, BASE_PATH + "info")); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestSetUpgradeModeAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestSetUpgradeModeAction.java index 67970d2ad056e..1d12a8b555225 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestSetUpgradeModeAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestSetUpgradeModeAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.ml.rest; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -22,18 +21,13 @@ import static org.elasticsearch.rest.RestUtils.getAckTimeout; import static org.elasticsearch.rest.RestUtils.getMasterNodeTimeout; import static org.elasticsearch.xpack.ml.MachineLearning.BASE_PATH; -import static org.elasticsearch.xpack.ml.MachineLearning.PRE_V7_BASE_PATH; @ServerlessScope(Scope.INTERNAL) public class RestSetUpgradeModeAction extends BaseRestHandler { @Override public List routes() { - return List.of( - Route.builder(POST, BASE_PATH + "set_upgrade_mode") - .replaces(POST, PRE_V7_BASE_PATH + "set_upgrade_mode", RestApiVersion.V_7) - .build() - ); + return List.of(new Route(POST, BASE_PATH + "set_upgrade_mode")); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestDeleteCalendarAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestDeleteCalendarAction.java index 2825d1e4433af..bed1ebc370520 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestDeleteCalendarAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestDeleteCalendarAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.ml.rest.calendar; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -20,19 +19,15 @@ import java.util.List; import static org.elasticsearch.rest.RestRequest.Method.DELETE; +import static org.elasticsearch.xpack.core.ml.calendars.Calendar.ID; import static org.elasticsearch.xpack.ml.MachineLearning.BASE_PATH; -import static org.elasticsearch.xpack.ml.MachineLearning.PRE_V7_BASE_PATH; @ServerlessScope(Scope.PUBLIC) public class RestDeleteCalendarAction extends BaseRestHandler { @Override public List routes() { - return List.of( - Route.builder(DELETE, BASE_PATH + "calendars/{" + Calendar.ID + "}") - .replaces(DELETE, PRE_V7_BASE_PATH + "calendars/{" + Calendar.ID + "}", RestApiVersion.V_7) - .build() - ); + return List.of(new Route(DELETE, BASE_PATH + "calendars/{" + ID + "}")); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestDeleteCalendarEventAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestDeleteCalendarEventAction.java index ec23eb8ea3551..f645d6cb7efbd 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestDeleteCalendarEventAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestDeleteCalendarEventAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.ml.rest.calendar; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -21,23 +20,16 @@ import java.util.List; import static org.elasticsearch.rest.RestRequest.Method.DELETE; +import static org.elasticsearch.xpack.core.ml.calendars.Calendar.ID; +import static org.elasticsearch.xpack.core.ml.calendars.ScheduledEvent.EVENT_ID; import static org.elasticsearch.xpack.ml.MachineLearning.BASE_PATH; -import static org.elasticsearch.xpack.ml.MachineLearning.PRE_V7_BASE_PATH; @ServerlessScope(Scope.PUBLIC) public class RestDeleteCalendarEventAction extends BaseRestHandler { @Override public List routes() { - return List.of( - Route.builder(DELETE, BASE_PATH + "calendars/{" + Calendar.ID + "}/events/{" + ScheduledEvent.EVENT_ID + "}") - .replaces( - DELETE, - PRE_V7_BASE_PATH + "calendars/{" + Calendar.ID + "}/events/{" + ScheduledEvent.EVENT_ID + "}", - RestApiVersion.V_7 - ) - .build() - ); + return List.of(new Route(DELETE, BASE_PATH + "calendars/{" + ID + "}/events/{" + EVENT_ID + "}")); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestDeleteCalendarJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestDeleteCalendarJobAction.java index 342a9008a69b8..782fdc9073ab4 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestDeleteCalendarJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestDeleteCalendarJobAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.ml.rest.calendar; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -21,19 +20,15 @@ import java.util.List; import static org.elasticsearch.rest.RestRequest.Method.DELETE; +import static org.elasticsearch.xpack.core.ml.calendars.Calendar.ID; import static org.elasticsearch.xpack.ml.MachineLearning.BASE_PATH; -import static org.elasticsearch.xpack.ml.MachineLearning.PRE_V7_BASE_PATH; @ServerlessScope(Scope.PUBLIC) public class RestDeleteCalendarJobAction extends BaseRestHandler { @Override public List routes() { - return List.of( - Route.builder(DELETE, BASE_PATH + "calendars/{" + Calendar.ID + "}/jobs/{" + Job.ID + "}") - .replaces(DELETE, PRE_V7_BASE_PATH + "calendars/{" + Calendar.ID + "}/jobs/{" + Job.ID + "}", RestApiVersion.V_7) - .build() - ); + return List.of(new Route(DELETE, BASE_PATH + "calendars/{" + ID + "}/jobs/{" + Job.ID + "}")); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestGetCalendarEventsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestGetCalendarEventsAction.java index be5f9cd3c1446..4bf2cfa8472fe 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestGetCalendarEventsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestGetCalendarEventsAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.ml.rest.calendar; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -23,19 +22,15 @@ import java.util.List; import static org.elasticsearch.rest.RestRequest.Method.GET; +import static org.elasticsearch.xpack.core.ml.calendars.Calendar.ID; import static org.elasticsearch.xpack.ml.MachineLearning.BASE_PATH; -import static org.elasticsearch.xpack.ml.MachineLearning.PRE_V7_BASE_PATH; @ServerlessScope(Scope.PUBLIC) public class RestGetCalendarEventsAction extends BaseRestHandler { @Override public List routes() { - return List.of( - Route.builder(GET, BASE_PATH + "calendars/{" + Calendar.ID + "}/events") - .replaces(GET, PRE_V7_BASE_PATH + "calendars/{" + Calendar.ID + "}/events", RestApiVersion.V_7) - .build() - ); + return List.of(new Route(GET, BASE_PATH + "calendars/{" + ID + "}/events")); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestGetCalendarsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestGetCalendarsAction.java index 1e662e28cf564..16f2a840b4575 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestGetCalendarsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestGetCalendarsAction.java @@ -8,7 +8,6 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -24,8 +23,8 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.POST; +import static org.elasticsearch.xpack.core.ml.calendars.Calendar.ID; import static org.elasticsearch.xpack.ml.MachineLearning.BASE_PATH; -import static org.elasticsearch.xpack.ml.MachineLearning.PRE_V7_BASE_PATH; @ServerlessScope(Scope.PUBLIC) public class RestGetCalendarsAction extends BaseRestHandler { @@ -33,14 +32,10 @@ public class RestGetCalendarsAction extends BaseRestHandler { @Override public List routes() { return List.of( - Route.builder(GET, BASE_PATH + "calendars/{" + Calendar.ID + "}") - .replaces(GET, PRE_V7_BASE_PATH + "calendars/{" + Calendar.ID + "}", RestApiVersion.V_7) - .build(), - Route.builder(GET, BASE_PATH + "calendars/").replaces(GET, PRE_V7_BASE_PATH + "calendars/", RestApiVersion.V_7).build(), - Route.builder(POST, BASE_PATH + "calendars/{" + Calendar.ID + "}") - .replaces(POST, PRE_V7_BASE_PATH + "calendars/{" + Calendar.ID + "}", RestApiVersion.V_7) - .build(), - Route.builder(POST, BASE_PATH + "calendars/").replaces(POST, PRE_V7_BASE_PATH + "calendars/", RestApiVersion.V_7).build() + new Route(GET, BASE_PATH + "calendars/{" + ID + "}"), + new Route(GET, BASE_PATH + "calendars/"), + new Route(POST, BASE_PATH + "calendars/{" + ID + "}"), + new Route(POST, BASE_PATH + "calendars/") ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestPostCalendarEventAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestPostCalendarEventAction.java index 9c686e441f4e3..525127cb0fde4 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestPostCalendarEventAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestPostCalendarEventAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.ml.rest.calendar; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -21,19 +20,15 @@ import java.util.List; import static org.elasticsearch.rest.RestRequest.Method.POST; +import static org.elasticsearch.xpack.core.ml.calendars.Calendar.ID; import static org.elasticsearch.xpack.ml.MachineLearning.BASE_PATH; -import static org.elasticsearch.xpack.ml.MachineLearning.PRE_V7_BASE_PATH; @ServerlessScope(Scope.PUBLIC) public class RestPostCalendarEventAction extends BaseRestHandler { @Override public List routes() { - return List.of( - Route.builder(POST, BASE_PATH + "calendars/{" + Calendar.ID + "}/events") - .replaces(POST, PRE_V7_BASE_PATH + "calendars/{" + Calendar.ID + "}/events", RestApiVersion.V_7) - .build() - ); + return List.of(new Route(POST, BASE_PATH + "calendars/{" + ID + "}/events")); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestPutCalendarAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestPutCalendarAction.java index 6b4b709de6ada..3457195bf1499 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestPutCalendarAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestPutCalendarAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.ml.rest.calendar; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -22,19 +21,15 @@ import java.util.List; import static org.elasticsearch.rest.RestRequest.Method.PUT; +import static org.elasticsearch.xpack.core.ml.calendars.Calendar.ID; import static org.elasticsearch.xpack.ml.MachineLearning.BASE_PATH; -import static org.elasticsearch.xpack.ml.MachineLearning.PRE_V7_BASE_PATH; @ServerlessScope(Scope.PUBLIC) public class RestPutCalendarAction extends BaseRestHandler { @Override public List routes() { - return List.of( - Route.builder(PUT, BASE_PATH + "calendars/{" + Calendar.ID + "}") - .replaces(PUT, PRE_V7_BASE_PATH + "calendars/{" + Calendar.ID + "}", RestApiVersion.V_7) - .build() - ); + return List.of(new Route(PUT, BASE_PATH + "calendars/{" + ID + "}")); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestPutCalendarJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestPutCalendarJobAction.java index bdbda6d931f73..694100045aa8a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestPutCalendarJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestPutCalendarJobAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.ml.rest.calendar; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -21,19 +20,15 @@ import java.util.List; import static org.elasticsearch.rest.RestRequest.Method.PUT; +import static org.elasticsearch.xpack.core.ml.calendars.Calendar.ID; import static org.elasticsearch.xpack.ml.MachineLearning.BASE_PATH; -import static org.elasticsearch.xpack.ml.MachineLearning.PRE_V7_BASE_PATH; @ServerlessScope(Scope.PUBLIC) public class RestPutCalendarJobAction extends BaseRestHandler { @Override public List routes() { - return List.of( - Route.builder(PUT, BASE_PATH + "calendars/{" + Calendar.ID + "}/jobs/{" + Job.ID + "}") - .replaces(PUT, PRE_V7_BASE_PATH + "calendars/{" + Calendar.ID + "}/jobs/{" + Job.ID + "}", RestApiVersion.V_7) - .build() - ); + return List.of(new Route(PUT, BASE_PATH + "calendars/{" + ID + "}/jobs/{" + Job.ID + "}")); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestDeleteDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestDeleteDatafeedAction.java index c195da8fbb0f4..6b4777e112254 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestDeleteDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestDeleteDatafeedAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.ml.rest.datafeeds; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -23,19 +22,15 @@ import static org.elasticsearch.rest.RestRequest.Method.DELETE; import static org.elasticsearch.rest.RestUtils.getAckTimeout; import static org.elasticsearch.rest.RestUtils.getMasterNodeTimeout; +import static org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig.ID; import static org.elasticsearch.xpack.ml.MachineLearning.BASE_PATH; -import static org.elasticsearch.xpack.ml.MachineLearning.PRE_V7_BASE_PATH; @ServerlessScope(Scope.PUBLIC) public class RestDeleteDatafeedAction extends BaseRestHandler { @Override public List routes() { - return List.of( - Route.builder(DELETE, BASE_PATH + "datafeeds/{" + DatafeedConfig.ID + "}") - .replaces(DELETE, PRE_V7_BASE_PATH + "datafeeds/{" + DatafeedConfig.ID + "}", RestApiVersion.V_7) - .build() - ); + return List.of(new Route(DELETE, BASE_PATH + "datafeeds/{" + ID + "}")); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestGetDatafeedStatsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestGetDatafeedStatsAction.java index e676f7d0a07db..eb930edffe055 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestGetDatafeedStatsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestGetDatafeedStatsAction.java @@ -24,8 +24,8 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.xpack.core.ml.MachineLearningField.DEPRECATED_ALLOW_NO_DATAFEEDS_PARAM; +import static org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig.ID; import static org.elasticsearch.xpack.ml.MachineLearning.BASE_PATH; -import static org.elasticsearch.xpack.ml.MachineLearning.PRE_V7_BASE_PATH; import static org.elasticsearch.xpack.ml.rest.RestCompatibilityChecker.checkAndSetDeprecatedParam; @ServerlessScope(Scope.PUBLIC) @@ -33,14 +33,7 @@ public class RestGetDatafeedStatsAction extends BaseRestHandler { @Override public List routes() { - return List.of( - Route.builder(GET, BASE_PATH + "datafeeds/{" + DatafeedConfig.ID + "}/_stats") - .replaces(GET, PRE_V7_BASE_PATH + "datafeeds/{" + DatafeedConfig.ID + "}/_stats", RestApiVersion.V_7) - .build(), - Route.builder(GET, BASE_PATH + "datafeeds/_stats") - .replaces(GET, PRE_V7_BASE_PATH + "datafeeds/_stats", RestApiVersion.V_7) - .build() - ); + return List.of(new Route(GET, BASE_PATH + "datafeeds/{" + ID + "}/_stats"), new Route(GET, BASE_PATH + "datafeeds/_stats")); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestGetDatafeedsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestGetDatafeedsAction.java index 53f0a371fb9d4..6b3a857cdbb9b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestGetDatafeedsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestGetDatafeedsAction.java @@ -26,9 +26,9 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.xpack.core.ml.MachineLearningField.DEPRECATED_ALLOW_NO_DATAFEEDS_PARAM; +import static org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig.ID; import static org.elasticsearch.xpack.core.ml.utils.ToXContentParams.EXCLUDE_GENERATED; import static org.elasticsearch.xpack.ml.MachineLearning.BASE_PATH; -import static org.elasticsearch.xpack.ml.MachineLearning.PRE_V7_BASE_PATH; import static org.elasticsearch.xpack.ml.rest.RestCompatibilityChecker.checkAndSetDeprecatedParam; @ServerlessScope(Scope.PUBLIC) @@ -36,12 +36,7 @@ public class RestGetDatafeedsAction extends BaseRestHandler { @Override public List routes() { - return List.of( - Route.builder(GET, BASE_PATH + "datafeeds/{" + DatafeedConfig.ID + "}") - .replaces(GET, PRE_V7_BASE_PATH + "datafeeds/{" + DatafeedConfig.ID + "}", RestApiVersion.V_7) - .build(), - Route.builder(GET, BASE_PATH + "datafeeds").replaces(GET, PRE_V7_BASE_PATH + "datafeeds", RestApiVersion.V_7).build() - ); + return List.of(new Route(GET, BASE_PATH + "datafeeds/{" + ID + "}"), new Route(GET, BASE_PATH + "datafeeds")); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestPreviewDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestPreviewDatafeedAction.java index 999449b4fa206..f6a73f0cb8c84 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestPreviewDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestPreviewDatafeedAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.ml.rest.datafeeds; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -23,8 +22,8 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.POST; +import static org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig.ID; import static org.elasticsearch.xpack.ml.MachineLearning.BASE_PATH; -import static org.elasticsearch.xpack.ml.MachineLearning.PRE_V7_BASE_PATH; @ServerlessScope(Scope.PUBLIC) public class RestPreviewDatafeedAction extends BaseRestHandler { @@ -32,18 +31,10 @@ public class RestPreviewDatafeedAction extends BaseRestHandler { @Override public List routes() { return List.of( - Route.builder(GET, BASE_PATH + "datafeeds/{" + DatafeedConfig.ID + "}/_preview") - .replaces(GET, PRE_V7_BASE_PATH + "datafeeds/{" + DatafeedConfig.ID + "}/_preview", RestApiVersion.V_7) - .build(), - Route.builder(GET, BASE_PATH + "datafeeds/_preview") - .replaces(GET, PRE_V7_BASE_PATH + "datafeeds/_preview", RestApiVersion.V_7) - .build(), - Route.builder(POST, BASE_PATH + "datafeeds/{" + DatafeedConfig.ID + "}/_preview") - .replaces(POST, PRE_V7_BASE_PATH + "datafeeds/{" + DatafeedConfig.ID + "}/_preview", RestApiVersion.V_7) - .build(), - Route.builder(POST, BASE_PATH + "datafeeds/_preview") - .replaces(POST, PRE_V7_BASE_PATH + "datafeeds/_preview", RestApiVersion.V_7) - .build() + new Route(GET, BASE_PATH + "datafeeds/{" + ID + "}/_preview"), + new Route(GET, BASE_PATH + "datafeeds/_preview"), + new Route(POST, BASE_PATH + "datafeeds/{" + ID + "}/_preview"), + new Route(POST, BASE_PATH + "datafeeds/_preview") ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestPutDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestPutDatafeedAction.java index 64caa8b737e3c..360b14277a19a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestPutDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestPutDatafeedAction.java @@ -9,7 +9,6 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -25,19 +24,15 @@ import static org.elasticsearch.rest.RestRequest.Method.PUT; import static org.elasticsearch.rest.RestUtils.getAckTimeout; import static org.elasticsearch.rest.RestUtils.getMasterNodeTimeout; +import static org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig.ID; import static org.elasticsearch.xpack.ml.MachineLearning.BASE_PATH; -import static org.elasticsearch.xpack.ml.MachineLearning.PRE_V7_BASE_PATH; @ServerlessScope(Scope.PUBLIC) public class RestPutDatafeedAction extends BaseRestHandler { @Override public List routes() { - return List.of( - Route.builder(PUT, BASE_PATH + "datafeeds/{" + DatafeedConfig.ID + "}") - .replaces(PUT, PRE_V7_BASE_PATH + "datafeeds/{" + DatafeedConfig.ID + "}", RestApiVersion.V_7) - .build() - ); + return List.of(new Route(PUT, BASE_PATH + "datafeeds/{" + ID + "}")); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestStartDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestStartDatafeedAction.java index 8d6e6d5336a1f..82db97dced01e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestStartDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestStartDatafeedAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.ml.rest.datafeeds; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.TimeValue; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -26,8 +25,8 @@ import java.util.List; import static org.elasticsearch.rest.RestRequest.Method.POST; +import static org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig.ID; import static org.elasticsearch.xpack.ml.MachineLearning.BASE_PATH; -import static org.elasticsearch.xpack.ml.MachineLearning.PRE_V7_BASE_PATH; @ServerlessScope(Scope.PUBLIC) public class RestStartDatafeedAction extends BaseRestHandler { @@ -36,11 +35,7 @@ public class RestStartDatafeedAction extends BaseRestHandler { @Override public List routes() { - return List.of( - Route.builder(POST, BASE_PATH + "datafeeds/{" + DatafeedConfig.ID + "}/_start") - .replaces(POST, PRE_V7_BASE_PATH + "datafeeds/{" + DatafeedConfig.ID + "}/_start", RestApiVersion.V_7) - .build() - ); + return List.of(new Route(POST, BASE_PATH + "datafeeds/{" + ID + "}/_start")); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestStopDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestStopDatafeedAction.java index 862c0b5318f29..bdbdc18a0d9cb 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestStopDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestStopDatafeedAction.java @@ -28,8 +28,8 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; import static org.elasticsearch.xpack.core.ml.MachineLearningField.DEPRECATED_ALLOW_NO_DATAFEEDS_PARAM; +import static org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig.ID; import static org.elasticsearch.xpack.ml.MachineLearning.BASE_PATH; -import static org.elasticsearch.xpack.ml.MachineLearning.PRE_V7_BASE_PATH; import static org.elasticsearch.xpack.ml.rest.RestCompatibilityChecker.checkAndSetDeprecatedParam; @ServerlessScope(Scope.PUBLIC) @@ -37,11 +37,7 @@ public class RestStopDatafeedAction extends BaseRestHandler { @Override public List routes() { - return List.of( - Route.builder(POST, BASE_PATH + "datafeeds/{" + DatafeedConfig.ID + "}/_stop") - .replaces(POST, PRE_V7_BASE_PATH + "datafeeds/{" + DatafeedConfig.ID + "}/_stop", RestApiVersion.V_7) - .build() - ); + return List.of(new Route(POST, BASE_PATH + "datafeeds/{" + ID + "}/_stop")); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestUpdateDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestUpdateDatafeedAction.java index 44a8415a91fde..5335de78aa18a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestUpdateDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestUpdateDatafeedAction.java @@ -9,7 +9,6 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -25,19 +24,15 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; import static org.elasticsearch.rest.RestUtils.getAckTimeout; import static org.elasticsearch.rest.RestUtils.getMasterNodeTimeout; +import static org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig.ID; import static org.elasticsearch.xpack.ml.MachineLearning.BASE_PATH; -import static org.elasticsearch.xpack.ml.MachineLearning.PRE_V7_BASE_PATH; @ServerlessScope(Scope.PUBLIC) public class RestUpdateDatafeedAction extends BaseRestHandler { @Override public List routes() { - return List.of( - Route.builder(POST, BASE_PATH + "datafeeds/{" + DatafeedConfig.ID + "}/_update") - .replaces(POST, PRE_V7_BASE_PATH + "datafeeds/{" + DatafeedConfig.ID + "}/_update", RestApiVersion.V_7) - .build() - ); + return List.of(new Route(POST, BASE_PATH + "datafeeds/{" + ID + "}/_update")); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestDeleteFilterAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestDeleteFilterAction.java index 494c5638dc75a..57c99627b9330 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestDeleteFilterAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestDeleteFilterAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.ml.rest.filter; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -22,19 +21,15 @@ import static org.elasticsearch.rest.RestRequest.Method.DELETE; import static org.elasticsearch.rest.RestUtils.getAckTimeout; import static org.elasticsearch.rest.RestUtils.getMasterNodeTimeout; +import static org.elasticsearch.xpack.core.ml.action.DeleteFilterAction.Request.FILTER_ID; import static org.elasticsearch.xpack.ml.MachineLearning.BASE_PATH; -import static org.elasticsearch.xpack.ml.MachineLearning.PRE_V7_BASE_PATH; @ServerlessScope(Scope.PUBLIC) public class RestDeleteFilterAction extends BaseRestHandler { @Override public List routes() { - return List.of( - Route.builder(DELETE, BASE_PATH + "filters/{" + Request.FILTER_ID + "}") - .replaces(DELETE, PRE_V7_BASE_PATH + "filters/{" + Request.FILTER_ID + "}", RestApiVersion.V_7) - .build() - ); + return List.of(new Route(DELETE, BASE_PATH + "filters/{" + FILTER_ID + "}")); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestGetFiltersAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestGetFiltersAction.java index b4c76c3fa4475..45a30796d56f3 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestGetFiltersAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestGetFiltersAction.java @@ -8,7 +8,6 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -23,20 +22,15 @@ import java.util.List; import static org.elasticsearch.rest.RestRequest.Method.GET; +import static org.elasticsearch.xpack.core.ml.job.config.MlFilter.ID; import static org.elasticsearch.xpack.ml.MachineLearning.BASE_PATH; -import static org.elasticsearch.xpack.ml.MachineLearning.PRE_V7_BASE_PATH; @ServerlessScope(Scope.PUBLIC) public class RestGetFiltersAction extends BaseRestHandler { @Override public List routes() { - return List.of( - Route.builder(GET, BASE_PATH + "filters/{" + MlFilter.ID + "}") - .replaces(GET, PRE_V7_BASE_PATH + "filters/{" + MlFilter.ID + "}", RestApiVersion.V_7) - .build(), - Route.builder(GET, BASE_PATH + "filters/").replaces(GET, PRE_V7_BASE_PATH + "filters/", RestApiVersion.V_7).build() - ); + return List.of(new Route(GET, BASE_PATH + "filters/{" + ID + "}"), new Route(GET, BASE_PATH + "filters/")); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestPutFilterAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestPutFilterAction.java index 78753a46dc492..1d62137666d93 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestPutFilterAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestPutFilterAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.ml.rest.filter; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -21,19 +20,15 @@ import java.util.List; import static org.elasticsearch.rest.RestRequest.Method.PUT; +import static org.elasticsearch.xpack.core.ml.job.config.MlFilter.ID; import static org.elasticsearch.xpack.ml.MachineLearning.BASE_PATH; -import static org.elasticsearch.xpack.ml.MachineLearning.PRE_V7_BASE_PATH; @ServerlessScope(Scope.PUBLIC) public class RestPutFilterAction extends BaseRestHandler { @Override public List routes() { - return List.of( - Route.builder(PUT, BASE_PATH + "filters/{" + MlFilter.ID + "}") - .replaces(PUT, PRE_V7_BASE_PATH + "filters/{" + MlFilter.ID + "}", RestApiVersion.V_7) - .build() - ); + return List.of(new Route(PUT, BASE_PATH + "filters/{" + ID + "}")); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestUpdateFilterAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestUpdateFilterAction.java index a2cdb4a85395a..b32997d0eee45 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestUpdateFilterAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestUpdateFilterAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.ml.rest.filter; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -21,19 +20,15 @@ import java.util.List; import static org.elasticsearch.rest.RestRequest.Method.POST; +import static org.elasticsearch.xpack.core.ml.job.config.MlFilter.ID; import static org.elasticsearch.xpack.ml.MachineLearning.BASE_PATH; -import static org.elasticsearch.xpack.ml.MachineLearning.PRE_V7_BASE_PATH; @ServerlessScope(Scope.PUBLIC) public class RestUpdateFilterAction extends BaseRestHandler { @Override public List routes() { - return List.of( - Route.builder(POST, BASE_PATH + "filters/{" + MlFilter.ID + "}/_update") - .replaces(POST, PRE_V7_BASE_PATH + "filters/{" + MlFilter.ID + "}/_update", RestApiVersion.V_7) - .build() - ); + return List.of(new Route(POST, BASE_PATH + "filters/{" + ID + "}/_update")); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestPutTrainedModelDefinitionPartAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestPutTrainedModelDefinitionPartAction.java index d328b90a8e0d3..fc2c102747992 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestPutTrainedModelDefinitionPartAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestPutTrainedModelDefinitionPartAction.java @@ -20,6 +20,8 @@ import java.util.List; import static org.elasticsearch.rest.RestRequest.Method.PUT; +import static org.elasticsearch.xpack.core.ml.action.PutTrainedModelDefinitionPartAction.Request.PART; +import static org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig.MODEL_ID; import static org.elasticsearch.xpack.ml.MachineLearning.BASE_PATH; @ServerlessScope(Scope.PUBLIC) @@ -27,17 +29,7 @@ public class RestPutTrainedModelDefinitionPartAction extends BaseRestHandler { @Override public List routes() { - return List.of( - Route.builder( - PUT, - BASE_PATH - + "trained_models/{" - + TrainedModelConfig.MODEL_ID.getPreferredName() - + "}/definition/{" - + PutTrainedModelDefinitionPartAction.Request.PART - + "}" - ).build() - ); + return List.of(new Route(PUT, BASE_PATH + "trained_models/{" + MODEL_ID.getPreferredName() + "}/definition/{" + PART + "}")); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestPutTrainedModelVocabularyAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestPutTrainedModelVocabularyAction.java index ee228223eb340..34f6c4b9f51bb 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestPutTrainedModelVocabularyAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestPutTrainedModelVocabularyAction.java @@ -20,6 +20,7 @@ import java.util.List; import static org.elasticsearch.rest.RestRequest.Method.PUT; +import static org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig.MODEL_ID; import static org.elasticsearch.xpack.ml.MachineLearning.BASE_PATH; @ServerlessScope(Scope.PUBLIC) @@ -27,9 +28,7 @@ public class RestPutTrainedModelVocabularyAction extends BaseRestHandler { @Override public List routes() { - return List.of( - Route.builder(PUT, BASE_PATH + "trained_models/{" + TrainedModelConfig.MODEL_ID.getPreferredName() + "}/vocabulary").build() - ); + return List.of(new Route(PUT, BASE_PATH + "trained_models/{" + MODEL_ID.getPreferredName() + "}/vocabulary")); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestCloseJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestCloseJobAction.java index 6ee4ba8be376e..0986a6a2400db 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestCloseJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestCloseJobAction.java @@ -23,8 +23,8 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; import static org.elasticsearch.xpack.core.ml.MachineLearningField.DEPRECATED_ALLOW_NO_JOBS_PARAM; +import static org.elasticsearch.xpack.core.ml.job.config.Job.ID; import static org.elasticsearch.xpack.ml.MachineLearning.BASE_PATH; -import static org.elasticsearch.xpack.ml.MachineLearning.PRE_V7_BASE_PATH; import static org.elasticsearch.xpack.ml.rest.RestCompatibilityChecker.checkAndSetDeprecatedParam; @ServerlessScope(Scope.PUBLIC) @@ -32,11 +32,7 @@ public class RestCloseJobAction extends BaseRestHandler { @Override public List routes() { - return List.of( - Route.builder(POST, BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/_close") - .replaces(POST, PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/_close", RestApiVersion.V_7) - .build() - ); + return List.of(new Route(POST, BASE_PATH + "anomaly_detectors/{" + ID + "}/_close")); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestDeleteForecastAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestDeleteForecastAction.java index ba3bf73a3a4d6..13cb58964733f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestDeleteForecastAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestDeleteForecastAction.java @@ -8,7 +8,6 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.cluster.metadata.Metadata; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -23,8 +22,9 @@ import static org.elasticsearch.rest.RestRequest.Method.DELETE; import static org.elasticsearch.rest.RestUtils.getAckTimeout; +import static org.elasticsearch.xpack.core.ml.job.config.Job.ID; +import static org.elasticsearch.xpack.core.ml.job.results.Forecast.FORECAST_ID; import static org.elasticsearch.xpack.ml.MachineLearning.BASE_PATH; -import static org.elasticsearch.xpack.ml.MachineLearning.PRE_V7_BASE_PATH; @ServerlessScope(Scope.INTERNAL) public class RestDeleteForecastAction extends BaseRestHandler { @@ -32,16 +32,8 @@ public class RestDeleteForecastAction extends BaseRestHandler { @Override public List routes() { return List.of( - Route.builder(DELETE, BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/_forecast/") - .replaces(DELETE, PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/_forecast/", RestApiVersion.V_7) - .build(), - Route.builder(DELETE, BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/_forecast/{" + Forecast.FORECAST_ID + "}") - .replaces( - DELETE, - PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/_forecast/{" + Forecast.FORECAST_ID + "}", - RestApiVersion.V_7 - ) - .build() + new Route(DELETE, BASE_PATH + "anomaly_detectors/{" + ID + "}/_forecast/"), + new Route(DELETE, BASE_PATH + "anomaly_detectors/{" + ID + "}/_forecast/{" + FORECAST_ID + "}") ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestDeleteJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestDeleteJobAction.java index 1db528cae4a3d..0a4e1c0ea68bb 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestDeleteJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestDeleteJobAction.java @@ -8,7 +8,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; @@ -28,19 +27,15 @@ import static org.elasticsearch.rest.RestRequest.Method.DELETE; import static org.elasticsearch.rest.RestUtils.getAckTimeout; import static org.elasticsearch.rest.RestUtils.getMasterNodeTimeout; +import static org.elasticsearch.xpack.core.ml.job.config.Job.ID; import static org.elasticsearch.xpack.ml.MachineLearning.BASE_PATH; -import static org.elasticsearch.xpack.ml.MachineLearning.PRE_V7_BASE_PATH; @ServerlessScope(Scope.PUBLIC) public class RestDeleteJobAction extends BaseRestHandler { @Override public List routes() { - return List.of( - Route.builder(DELETE, BASE_PATH + "anomaly_detectors/{" + Job.ID + "}") - .replaces(DELETE, PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID + "}", RestApiVersion.V_7) - .build() - ); + return List.of(new Route(DELETE, BASE_PATH + "anomaly_detectors/{" + ID + "}")); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestFlushJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestFlushJobAction.java index d8784a369594c..650c5d92e7589 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestFlushJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestFlushJobAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.ml.rest.job; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -21,8 +20,8 @@ import java.util.List; import static org.elasticsearch.rest.RestRequest.Method.POST; +import static org.elasticsearch.xpack.core.ml.job.config.Job.ID; import static org.elasticsearch.xpack.ml.MachineLearning.BASE_PATH; -import static org.elasticsearch.xpack.ml.MachineLearning.PRE_V7_BASE_PATH; @ServerlessScope(Scope.PUBLIC) public class RestFlushJobAction extends BaseRestHandler { @@ -35,11 +34,7 @@ public class RestFlushJobAction extends BaseRestHandler { @Override public List routes() { - return List.of( - Route.builder(POST, BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/_flush") - .replaces(POST, PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/_flush", RestApiVersion.V_7) - .build() - ); + return List.of(new Route(POST, BASE_PATH + "anomaly_detectors/{" + ID + "}/_flush")); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestForecastJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestForecastJobAction.java index 5f3388c143dd4..b45fe5aeb89a0 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestForecastJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestForecastJobAction.java @@ -8,7 +8,6 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -22,19 +21,15 @@ import java.util.List; import static org.elasticsearch.rest.RestRequest.Method.POST; +import static org.elasticsearch.xpack.core.ml.job.config.Job.ID; import static org.elasticsearch.xpack.ml.MachineLearning.BASE_PATH; -import static org.elasticsearch.xpack.ml.MachineLearning.PRE_V7_BASE_PATH; @ServerlessScope(Scope.INTERNAL) public class RestForecastJobAction extends BaseRestHandler { @Override public List routes() { - return List.of( - Route.builder(POST, BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/_forecast") - .replaces(POST, PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/_forecast", RestApiVersion.V_7) - .build() - ); + return List.of(new Route(POST, BASE_PATH + "anomaly_detectors/{" + ID + "}/_forecast")); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestGetJobStatsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestGetJobStatsAction.java index d77a913ab0491..1ecc0ff0cefa3 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestGetJobStatsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestGetJobStatsAction.java @@ -25,8 +25,8 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.xpack.core.ml.MachineLearningField.DEPRECATED_ALLOW_NO_JOBS_PARAM; +import static org.elasticsearch.xpack.core.ml.job.config.Job.ID; import static org.elasticsearch.xpack.ml.MachineLearning.BASE_PATH; -import static org.elasticsearch.xpack.ml.MachineLearning.PRE_V7_BASE_PATH; import static org.elasticsearch.xpack.ml.rest.RestCompatibilityChecker.checkAndSetDeprecatedParam; @ServerlessScope(Scope.PUBLIC) @@ -35,12 +35,8 @@ public class RestGetJobStatsAction extends BaseRestHandler { @Override public List routes() { return List.of( - Route.builder(GET, BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/_stats") - .replaces(GET, PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/_stats", RestApiVersion.V_7) - .build(), - Route.builder(GET, BASE_PATH + "anomaly_detectors/_stats") - .replaces(GET, PRE_V7_BASE_PATH + "anomaly_detectors/_stats", RestApiVersion.V_7) - .build() + new Route(GET, BASE_PATH + "anomaly_detectors/{" + ID + "}/_stats"), + new Route(GET, BASE_PATH + "anomaly_detectors/_stats") ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestGetJobsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestGetJobsAction.java index f3c7b8c6636ef..e6f4325024c3c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestGetJobsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestGetJobsAction.java @@ -27,9 +27,9 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.xpack.core.ml.MachineLearningField.DEPRECATED_ALLOW_NO_JOBS_PARAM; +import static org.elasticsearch.xpack.core.ml.job.config.Job.ID; import static org.elasticsearch.xpack.core.ml.utils.ToXContentParams.EXCLUDE_GENERATED; import static org.elasticsearch.xpack.ml.MachineLearning.BASE_PATH; -import static org.elasticsearch.xpack.ml.MachineLearning.PRE_V7_BASE_PATH; import static org.elasticsearch.xpack.ml.rest.RestCompatibilityChecker.checkAndSetDeprecatedParam; @ServerlessScope(Scope.PUBLIC) @@ -37,14 +37,7 @@ public class RestGetJobsAction extends BaseRestHandler { @Override public List routes() { - return List.of( - Route.builder(GET, BASE_PATH + "anomaly_detectors/{" + Job.ID + "}") - .replaces(GET, PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID + "}", RestApiVersion.V_7) - .build(), - Route.builder(GET, BASE_PATH + "anomaly_detectors") - .replaces(GET, PRE_V7_BASE_PATH + "anomaly_detectors", RestApiVersion.V_7) - .build() - ); + return List.of(new Route(GET, BASE_PATH + "anomaly_detectors/{" + ID + "}"), new Route(GET, BASE_PATH + "anomaly_detectors")); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestOpenJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestOpenJobAction.java index dba1a7a2fb132..74dfc959acb26 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestOpenJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestOpenJobAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.ml.rest.job; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.TimeValue; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -25,19 +24,15 @@ import java.util.List; import static org.elasticsearch.rest.RestRequest.Method.POST; +import static org.elasticsearch.xpack.core.ml.job.config.Job.ID; import static org.elasticsearch.xpack.ml.MachineLearning.BASE_PATH; -import static org.elasticsearch.xpack.ml.MachineLearning.PRE_V7_BASE_PATH; @ServerlessScope(Scope.PUBLIC) public class RestOpenJobAction extends BaseRestHandler { @Override public List routes() { - return List.of( - Route.builder(POST, BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/_open") - .replaces(POST, PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/_open", RestApiVersion.V_7) - .build() - ); + return List.of(new Route(POST, BASE_PATH + "anomaly_detectors/{" + ID + "}/_open")); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestPostJobUpdateAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestPostJobUpdateAction.java index 7b2097012e8c1..e1e8296a62073 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestPostJobUpdateAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestPostJobUpdateAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.ml.rest.job; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -23,19 +22,15 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; import static org.elasticsearch.rest.RestUtils.getAckTimeout; import static org.elasticsearch.rest.RestUtils.getMasterNodeTimeout; +import static org.elasticsearch.xpack.core.ml.job.config.Job.ID; import static org.elasticsearch.xpack.ml.MachineLearning.BASE_PATH; -import static org.elasticsearch.xpack.ml.MachineLearning.PRE_V7_BASE_PATH; @ServerlessScope(Scope.PUBLIC) public class RestPostJobUpdateAction extends BaseRestHandler { @Override public List routes() { - return List.of( - Route.builder(POST, BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/_update") - .replaces(POST, PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/_update", RestApiVersion.V_7) - .build() - ); + return List.of(new Route(POST, BASE_PATH + "anomaly_detectors/{" + ID + "}/_update")); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestPutJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestPutJobAction.java index 4c754d9bcfdd9..743fe8fac4317 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestPutJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestPutJobAction.java @@ -9,7 +9,6 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -25,19 +24,15 @@ import static org.elasticsearch.rest.RestRequest.Method.PUT; import static org.elasticsearch.rest.RestUtils.getAckTimeout; import static org.elasticsearch.rest.RestUtils.getMasterNodeTimeout; +import static org.elasticsearch.xpack.core.ml.job.config.Job.ID; import static org.elasticsearch.xpack.ml.MachineLearning.BASE_PATH; -import static org.elasticsearch.xpack.ml.MachineLearning.PRE_V7_BASE_PATH; @ServerlessScope(Scope.PUBLIC) public class RestPutJobAction extends BaseRestHandler { @Override public List routes() { - return List.of( - Route.builder(PUT, BASE_PATH + "anomaly_detectors/{" + Job.ID + "}") - .replaces(PUT, PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID + "}", RestApiVersion.V_7) - .build() - ); + return List.of(new Route(PUT, BASE_PATH + "anomaly_detectors/{" + ID + "}")); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestDeleteModelSnapshotAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestDeleteModelSnapshotAction.java index d3069f4f6c405..2a9d83ccfc292 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestDeleteModelSnapshotAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestDeleteModelSnapshotAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.ml.rest.modelsnapshots; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -20,24 +19,16 @@ import java.util.List; import static org.elasticsearch.rest.RestRequest.Method.DELETE; +import static org.elasticsearch.xpack.core.ml.job.config.Job.ID; import static org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshotField.SNAPSHOT_ID; import static org.elasticsearch.xpack.ml.MachineLearning.BASE_PATH; -import static org.elasticsearch.xpack.ml.MachineLearning.PRE_V7_BASE_PATH; @ServerlessScope(Scope.INTERNAL) public class RestDeleteModelSnapshotAction extends BaseRestHandler { @Override public List routes() { - return List.of( - Route.builder(DELETE, BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/model_snapshots/{" + SNAPSHOT_ID + "}") - .replaces( - DELETE, - PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/model_snapshots/{" + SNAPSHOT_ID + "}", - RestApiVersion.V_7 - ) - .build() - ); + return List.of(new Route(DELETE, BASE_PATH + "anomaly_detectors/{" + ID + "}/model_snapshots/{" + SNAPSHOT_ID + "}")); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestGetModelSnapshotsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestGetModelSnapshotsAction.java index 7cca7d250d030..ec2d567700df8 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestGetModelSnapshotsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestGetModelSnapshotsAction.java @@ -8,7 +8,6 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -26,8 +25,9 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.POST; +import static org.elasticsearch.xpack.core.ml.action.GetModelSnapshotsAction.Request.SNAPSHOT_ID; +import static org.elasticsearch.xpack.core.ml.job.config.Job.ID; import static org.elasticsearch.xpack.ml.MachineLearning.BASE_PATH; -import static org.elasticsearch.xpack.ml.MachineLearning.PRE_V7_BASE_PATH; @ServerlessScope(Scope.INTERNAL) public class RestGetModelSnapshotsAction extends BaseRestHandler { @@ -44,26 +44,10 @@ public class RestGetModelSnapshotsAction extends BaseRestHandler { @Override public List routes() { return List.of( - Route.builder(GET, BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/model_snapshots/{" + Request.SNAPSHOT_ID + "}") - .replaces( - GET, - PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/model_snapshots/{" + Request.SNAPSHOT_ID + "}", - RestApiVersion.V_7 - ) - .build(), - Route.builder(POST, BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/model_snapshots/{" + Request.SNAPSHOT_ID + "}") - .replaces( - POST, - PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/model_snapshots/{" + Request.SNAPSHOT_ID + "}", - RestApiVersion.V_7 - ) - .build(), - Route.builder(GET, BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/model_snapshots") - .replaces(GET, PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/model_snapshots", RestApiVersion.V_7) - .build(), - Route.builder(POST, BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/model_snapshots") - .replaces(POST, PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/model_snapshots", RestApiVersion.V_7) - .build() + new Route(GET, BASE_PATH + "anomaly_detectors/{" + ID + "}/model_snapshots/{" + SNAPSHOT_ID + "}"), + new Route(POST, BASE_PATH + "anomaly_detectors/{" + ID + "}/model_snapshots/{" + SNAPSHOT_ID + "}"), + new Route(GET, BASE_PATH + "anomaly_detectors/{" + ID + "}/model_snapshots"), + new Route(POST, BASE_PATH + "anomaly_detectors/{" + ID + "}/model_snapshots") ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestRevertModelSnapshotAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestRevertModelSnapshotAction.java index 20833853b3107..c4a441d637fb0 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestRevertModelSnapshotAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestRevertModelSnapshotAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.ml.rest.modelsnapshots; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -24,8 +23,8 @@ import static org.elasticsearch.rest.RestUtils.getAckTimeout; import static org.elasticsearch.rest.RestUtils.getMasterNodeTimeout; import static org.elasticsearch.xpack.core.ml.action.RevertModelSnapshotAction.Request.SNAPSHOT_ID; +import static org.elasticsearch.xpack.core.ml.job.config.Job.ID; import static org.elasticsearch.xpack.ml.MachineLearning.BASE_PATH; -import static org.elasticsearch.xpack.ml.MachineLearning.PRE_V7_BASE_PATH; @ServerlessScope(Scope.INTERNAL) public class RestRevertModelSnapshotAction extends BaseRestHandler { @@ -34,15 +33,7 @@ public class RestRevertModelSnapshotAction extends BaseRestHandler { @Override public List routes() { - return List.of( - Route.builder(POST, BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/model_snapshots/{" + SNAPSHOT_ID + "}/_revert") - .replaces( - POST, - PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/model_snapshots/{" + SNAPSHOT_ID + "}/_revert", - RestApiVersion.V_7 - ) - .build() - ); + return List.of(new Route(POST, BASE_PATH + "anomaly_detectors/{" + ID + "}/model_snapshots/{" + SNAPSHOT_ID + "}/_revert")); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestUpdateModelSnapshotAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestUpdateModelSnapshotAction.java index 911305aca474c..e540d7115b3cd 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestUpdateModelSnapshotAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestUpdateModelSnapshotAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.ml.rest.modelsnapshots; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -21,24 +20,16 @@ import java.util.List; import static org.elasticsearch.rest.RestRequest.Method.POST; +import static org.elasticsearch.xpack.core.ml.job.config.Job.ID; import static org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshotField.SNAPSHOT_ID; import static org.elasticsearch.xpack.ml.MachineLearning.BASE_PATH; -import static org.elasticsearch.xpack.ml.MachineLearning.PRE_V7_BASE_PATH; @ServerlessScope(Scope.INTERNAL) public class RestUpdateModelSnapshotAction extends BaseRestHandler { @Override public List routes() { - return List.of( - Route.builder(POST, BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/model_snapshots/{" + SNAPSHOT_ID + "}/_update") - .replaces( - POST, - PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/model_snapshots/{" + SNAPSHOT_ID + "}/_update", - RestApiVersion.V_7 - ) - .build() - ); + return List.of(new Route(POST, BASE_PATH + "anomaly_detectors/{" + ID + "}/model_snapshots/{" + SNAPSHOT_ID + "}/_update")); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetBucketsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetBucketsAction.java index babffc3fd4650..a049529c889fc 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetBucketsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetBucketsAction.java @@ -8,7 +8,6 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -18,15 +17,15 @@ import org.elasticsearch.xpack.core.action.util.PageParams; import org.elasticsearch.xpack.core.ml.action.GetBucketsAction; import org.elasticsearch.xpack.core.ml.job.config.Job; -import org.elasticsearch.xpack.core.ml.job.results.Result; import java.io.IOException; import java.util.List; import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.POST; +import static org.elasticsearch.xpack.core.ml.job.config.Job.ID; +import static org.elasticsearch.xpack.core.ml.job.results.Result.TIMESTAMP; import static org.elasticsearch.xpack.ml.MachineLearning.BASE_PATH; -import static org.elasticsearch.xpack.ml.MachineLearning.PRE_V7_BASE_PATH; @ServerlessScope(Scope.INTERNAL) public class RestGetBucketsAction extends BaseRestHandler { @@ -34,26 +33,10 @@ public class RestGetBucketsAction extends BaseRestHandler { @Override public List routes() { return List.of( - Route.builder(GET, BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/results/buckets/{" + Result.TIMESTAMP + "}") - .replaces( - GET, - PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/results/buckets/{" + Result.TIMESTAMP + "}", - RestApiVersion.V_7 - ) - .build(), - Route.builder(POST, BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/results/buckets/{" + Result.TIMESTAMP + "}") - .replaces( - POST, - PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/results/buckets/{" + Result.TIMESTAMP + "}", - RestApiVersion.V_7 - ) - .build(), - Route.builder(GET, BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/results/buckets") - .replaces(GET, PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/results/buckets", RestApiVersion.V_7) - .build(), - Route.builder(POST, BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/results/buckets") - .replaces(POST, PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/results/buckets", RestApiVersion.V_7) - .build() + new Route(GET, BASE_PATH + "anomaly_detectors/{" + ID + "}/results/buckets/{" + TIMESTAMP + "}"), + new Route(POST, BASE_PATH + "anomaly_detectors/{" + ID + "}/results/buckets/{" + TIMESTAMP + "}"), + new Route(GET, BASE_PATH + "anomaly_detectors/{" + ID + "}/results/buckets"), + new Route(POST, BASE_PATH + "anomaly_detectors/{" + ID + "}/results/buckets") ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetCategoriesAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetCategoriesAction.java index 22abcabf5f34f..5eb7f38cd1d1b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetCategoriesAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetCategoriesAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.ml.rest.results; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -26,8 +25,8 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.POST; import static org.elasticsearch.xpack.core.ml.action.GetCategoriesAction.Request.CATEGORY_ID; +import static org.elasticsearch.xpack.core.ml.job.config.Job.ID; import static org.elasticsearch.xpack.ml.MachineLearning.BASE_PATH; -import static org.elasticsearch.xpack.ml.MachineLearning.PRE_V7_BASE_PATH; @ServerlessScope(Scope.INTERNAL) public class RestGetCategoriesAction extends BaseRestHandler { @@ -35,26 +34,10 @@ public class RestGetCategoriesAction extends BaseRestHandler { @Override public List routes() { return List.of( - Route.builder(GET, BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/results/categories/{" + CATEGORY_ID + "}") - .replaces( - GET, - PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/results/categories/{" + CATEGORY_ID + "}", - RestApiVersion.V_7 - ) - .build(), - Route.builder(POST, BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/results/categories/{" + CATEGORY_ID + "}") - .replaces( - POST, - PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/results/categories/{" + CATEGORY_ID + "}", - RestApiVersion.V_7 - ) - .build(), - Route.builder(GET, BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/results/categories") - .replaces(GET, PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/results/categories", RestApiVersion.V_7) - .build(), - Route.builder(POST, BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/results/categories") - .replaces(POST, PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/results/categories", RestApiVersion.V_7) - .build() + new Route(GET, BASE_PATH + "anomaly_detectors/{" + ID + "}/results/categories/{" + CATEGORY_ID + "}"), + new Route(POST, BASE_PATH + "anomaly_detectors/{" + ID + "}/results/categories/{" + CATEGORY_ID + "}"), + new Route(GET, BASE_PATH + "anomaly_detectors/{" + ID + "}/results/categories"), + new Route(POST, BASE_PATH + "anomaly_detectors/{" + ID + "}/results/categories") ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetInfluencersAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetInfluencersAction.java index 02b57e9c75654..54947d72a96d8 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetInfluencersAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetInfluencersAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.ml.rest.results; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -23,8 +22,8 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.POST; +import static org.elasticsearch.xpack.core.ml.job.config.Job.ID; import static org.elasticsearch.xpack.ml.MachineLearning.BASE_PATH; -import static org.elasticsearch.xpack.ml.MachineLearning.PRE_V7_BASE_PATH; @ServerlessScope(Scope.INTERNAL) public class RestGetInfluencersAction extends BaseRestHandler { @@ -32,12 +31,8 @@ public class RestGetInfluencersAction extends BaseRestHandler { @Override public List routes() { return List.of( - Route.builder(GET, BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/results/influencers") - .replaces(GET, PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/results/influencers", RestApiVersion.V_7) - .build(), - Route.builder(POST, BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/results/influencers") - .replaces(POST, PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/results/influencers", RestApiVersion.V_7) - .build() + new Route(GET, BASE_PATH + "anomaly_detectors/{" + ID + "}/results/influencers"), + new Route(POST, BASE_PATH + "anomaly_detectors/{" + ID + "}/results/influencers") ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetOverallBucketsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetOverallBucketsAction.java index c93d7dd57a28d..e73e5b32d3a1c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetOverallBucketsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetOverallBucketsAction.java @@ -24,8 +24,8 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.POST; import static org.elasticsearch.xpack.core.ml.MachineLearningField.DEPRECATED_ALLOW_NO_JOBS_PARAM; +import static org.elasticsearch.xpack.core.ml.job.config.Job.ID; import static org.elasticsearch.xpack.ml.MachineLearning.BASE_PATH; -import static org.elasticsearch.xpack.ml.MachineLearning.PRE_V7_BASE_PATH; import static org.elasticsearch.xpack.ml.rest.RestCompatibilityChecker.checkAndSetDeprecatedParam; @ServerlessScope(Scope.PUBLIC) @@ -34,12 +34,8 @@ public class RestGetOverallBucketsAction extends BaseRestHandler { @Override public List routes() { return List.of( - Route.builder(GET, BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/results/overall_buckets") - .replaces(GET, PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/results/overall_buckets", RestApiVersion.V_7) - .build(), - Route.builder(POST, BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/results/overall_buckets") - .replaces(POST, PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/results/overall_buckets", RestApiVersion.V_7) - .build() + new Route(GET, BASE_PATH + "anomaly_detectors/{" + ID + "}/results/overall_buckets"), + new Route(POST, BASE_PATH + "anomaly_detectors/{" + ID + "}/results/overall_buckets") ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetRecordsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetRecordsAction.java index 3b756ddbec17c..84cf13e7032c8 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetRecordsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetRecordsAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.ml.rest.results; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -23,8 +22,8 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.POST; +import static org.elasticsearch.xpack.core.ml.job.config.Job.ID; import static org.elasticsearch.xpack.ml.MachineLearning.BASE_PATH; -import static org.elasticsearch.xpack.ml.MachineLearning.PRE_V7_BASE_PATH; @ServerlessScope(Scope.INTERNAL) public class RestGetRecordsAction extends BaseRestHandler { @@ -32,12 +31,8 @@ public class RestGetRecordsAction extends BaseRestHandler { @Override public List routes() { return List.of( - Route.builder(GET, BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/results/records") - .replaces(GET, PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/results/records", RestApiVersion.V_7) - .build(), - Route.builder(POST, BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/results/records") - .replaces(POST, PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/results/records", RestApiVersion.V_7) - .build() + new Route(GET, BASE_PATH + "anomaly_detectors/{" + ID + "}/results/records"), + new Route(POST, BASE_PATH + "anomaly_detectors/{" + ID + "}/results/records") ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/validate/RestValidateDetectorAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/validate/RestValidateDetectorAction.java index 865d630c47816..f1ecf0382cc97 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/validate/RestValidateDetectorAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/validate/RestValidateDetectorAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.ml.rest.validate; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -21,18 +20,13 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; import static org.elasticsearch.xpack.ml.MachineLearning.BASE_PATH; -import static org.elasticsearch.xpack.ml.MachineLearning.PRE_V7_BASE_PATH; @ServerlessScope(Scope.INTERNAL) public class RestValidateDetectorAction extends BaseRestHandler { @Override public List routes() { - return List.of( - Route.builder(POST, BASE_PATH + "anomaly_detectors/_validate/detector") - .replaces(POST, PRE_V7_BASE_PATH + "anomaly_detectors/_validate/detector", RestApiVersion.V_7) - .build() - ); + return List.of(new Route(POST, BASE_PATH + "anomaly_detectors/_validate/detector")); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/validate/RestValidateJobConfigAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/validate/RestValidateJobConfigAction.java index 00e776956c7dd..61945ce30faee 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/validate/RestValidateJobConfigAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/validate/RestValidateJobConfigAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.ml.rest.validate; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -21,18 +20,13 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; import static org.elasticsearch.xpack.ml.MachineLearning.BASE_PATH; -import static org.elasticsearch.xpack.ml.MachineLearning.PRE_V7_BASE_PATH; @ServerlessScope(Scope.INTERNAL) public class RestValidateJobConfigAction extends BaseRestHandler { @Override public List routes() { - return List.of( - Route.builder(POST, BASE_PATH + "anomaly_detectors/_validate") - .replaces(POST, PRE_V7_BASE_PATH + "anomaly_detectors/_validate", RestApiVersion.V_7) - .build() - ); + return List.of(new Route(POST, BASE_PATH + "anomaly_detectors/_validate")); } @Override diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/rest/action/RestMonitoringBulkAction.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/rest/action/RestMonitoringBulkAction.java index 300ac5a650a8d..b69b958a27ce6 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/rest/action/RestMonitoringBulkAction.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/rest/action/RestMonitoringBulkAction.java @@ -9,7 +9,6 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestRequest; @@ -53,10 +52,7 @@ public class RestMonitoringBulkAction extends BaseRestHandler { @Override public List routes() { - return List.of( - Route.builder(POST, "/_monitoring/bulk").replaces(POST, "/_xpack/monitoring/_bulk", RestApiVersion.V_7).build(), - Route.builder(PUT, "/_monitoring/bulk").replaces(PUT, "/_xpack/monitoring/_bulk", RestApiVersion.V_7).build() - ); + return List.of(new Route(POST, "/_monitoring/bulk"), new Route(PUT, "/_monitoring/bulk")); } @Override diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestDeleteRollupJobAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestDeleteRollupJobAction.java index 3662fc94e4c7c..e80c69ae84122 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestDeleteRollupJobAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestDeleteRollupJobAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.rollup.rest; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestStatus; @@ -26,7 +25,7 @@ public class RestDeleteRollupJobAction extends BaseRestHandler { @Override public List routes() { - return List.of(Route.builder(DELETE, "/_rollup/job/{id}").replaces(DELETE, "/_xpack/rollup/job/{id}/", RestApiVersion.V_7).build()); + return List.of(new Route(DELETE, "/_rollup/job/{id}")); } @Override diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestGetRollupCapsAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestGetRollupCapsAction.java index 5f3d64587d8e6..d60a5307ebe05 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestGetRollupCapsAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestGetRollupCapsAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.rollup.rest; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; @@ -25,7 +24,7 @@ public class RestGetRollupCapsAction extends BaseRestHandler { @Override public List routes() { - return List.of(Route.builder(GET, "/_rollup/data/{id}").replaces(GET, "/_xpack/rollup/data/{id}/", RestApiVersion.V_7).build()); + return List.of(new Route(GET, "/_rollup/data/{id}")); } @Override diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestGetRollupIndexCapsAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestGetRollupIndexCapsAction.java index e8714116dad22..1dab108ad7a3c 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestGetRollupIndexCapsAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestGetRollupIndexCapsAction.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; @@ -27,9 +26,7 @@ public class RestGetRollupIndexCapsAction extends BaseRestHandler { @Override public List routes() { - return List.of( - Route.builder(GET, "/{index}/_rollup/data").replaces(GET, "/{index}/_xpack/rollup/data", RestApiVersion.V_7).build() - ); + return List.of(new Route(GET, "/{index}/_rollup/data")); } @Override diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestGetRollupJobsAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestGetRollupJobsAction.java index 1b19a17fa2a79..3308425c08232 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestGetRollupJobsAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestGetRollupJobsAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.rollup.rest; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; @@ -25,7 +24,7 @@ public class RestGetRollupJobsAction extends BaseRestHandler { @Override public List routes() { - return List.of(Route.builder(GET, "/_rollup/job/{id}").replaces(GET, "/_xpack/rollup/job/{id}/", RestApiVersion.V_7).build()); + return List.of(new Route(GET, "/_rollup/job/{id}")); } @Override diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestPutRollupJobAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestPutRollupJobAction.java index e434da37b7585..7ca5489ff5ddc 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestPutRollupJobAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestPutRollupJobAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.rollup.rest; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; @@ -23,7 +22,7 @@ public class RestPutRollupJobAction extends BaseRestHandler { @Override public List routes() { - return List.of(Route.builder(PUT, "/_rollup/job/{id}").replaces(PUT, "/_xpack/rollup/job/{id}", RestApiVersion.V_7).build()); + return List.of(new Route(PUT, "/_rollup/job/{id}")); } @Override diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestStartRollupJobAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestStartRollupJobAction.java index 3c84e67dcd010..b22d70cc8d789 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestStartRollupJobAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestStartRollupJobAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.rollup.rest; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; @@ -23,9 +22,7 @@ public class RestStartRollupJobAction extends BaseRestHandler { @Override public List routes() { - return List.of( - Route.builder(POST, "/_rollup/job/{id}/_start").replaces(POST, "/_xpack/rollup/job/{id}/_start", RestApiVersion.V_7).build() - ); + return List.of(new Route(POST, "/_rollup/job/{id}/_start")); } @Override diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestStopRollupJobAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestStopRollupJobAction.java index a5de3b6b22ce0..540dafb841c2d 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestStopRollupJobAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestStopRollupJobAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.rollup.rest; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.TimeValue; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -24,9 +23,7 @@ public class RestStopRollupJobAction extends BaseRestHandler { @Override public List routes() { - return List.of( - Route.builder(POST, "/_rollup/job/{id}/_stop").replaces(POST, "/_xpack/rollup/job/{id}/_stop", RestApiVersion.V_7).build() - ); + return List.of(new Route(POST, "/_rollup/job/{id}/_stop")); } @Override diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/RestAuthenticateAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/RestAuthenticateAction.java index 680f6b0f14f5e..ec287b01d0df2 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/RestAuthenticateAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/RestAuthenticateAction.java @@ -8,7 +8,6 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; @@ -41,9 +40,7 @@ public RestAuthenticateAction(Settings settings, SecurityContext securityContext @Override public List routes() { - return List.of( - Route.builder(GET, "/_security/_authenticate").replaces(GET, "/_xpack/security/_authenticate", RestApiVersion.V_7).build() - ); + return List.of(new Route(GET, "/_security/_authenticate")); } @Override diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oauth2/RestGetTokenAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oauth2/RestGetTokenAction.java index 3102ba4e4ee36..fc07fa88d6d3d 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oauth2/RestGetTokenAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oauth2/RestGetTokenAction.java @@ -14,7 +14,6 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestRequest; @@ -85,9 +84,7 @@ public RestGetTokenAction(Settings settings, XPackLicenseState xPackLicenseState @Override public List routes() { - return List.of( - Route.builder(POST, "/_security/oauth2/token").replaces(POST, "/_xpack/security/oauth2/token", RestApiVersion.V_7).build() - ); + return List.of(new Route(POST, "/_security/oauth2/token")); } @Override diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oauth2/RestInvalidateTokenAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oauth2/RestInvalidateTokenAction.java index d26d9c0e63aab..1959f8914d5e6 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oauth2/RestInvalidateTokenAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oauth2/RestInvalidateTokenAction.java @@ -9,7 +9,6 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestRequestFilter; @@ -70,9 +69,7 @@ public RestInvalidateTokenAction(Settings settings, XPackLicenseState xPackLicen @Override public List routes() { - return List.of( - Route.builder(DELETE, "/_security/oauth2/token").replaces(DELETE, "/_xpack/security/oauth2/token", RestApiVersion.V_7).build() - ); + return List.of(new Route(DELETE, "/_security/oauth2/token")); } @Override diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/privilege/RestDeletePrivilegesAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/privilege/RestDeletePrivilegesAction.java index 6e0183c066668..967a2aec3f608 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/privilege/RestDeletePrivilegesAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/privilege/RestDeletePrivilegesAction.java @@ -8,7 +8,6 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; @@ -41,11 +40,7 @@ public RestDeletePrivilegesAction(Settings settings, XPackLicenseState licenseSt @Override public List routes() { - return List.of( - Route.builder(DELETE, "/_security/privilege/{application}/{privilege}") - .replaces(DELETE, "/_xpack/security/privilege/{application}/{privilege}", RestApiVersion.V_7) - .build() - ); + return List.of(new Route(DELETE, "/_security/privilege/{application}/{privilege}")); } @Override diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/privilege/RestGetPrivilegesAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/privilege/RestGetPrivilegesAction.java index 827d0db186310..f8b8db44142ba 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/privilege/RestGetPrivilegesAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/privilege/RestGetPrivilegesAction.java @@ -10,7 +10,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.set.Sets; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; @@ -47,13 +46,9 @@ public RestGetPrivilegesAction(Settings settings, XPackLicenseState licenseState @Override public List routes() { return List.of( - Route.builder(GET, "/_security/privilege/").replaces(GET, "/_xpack/security/privilege/", RestApiVersion.V_7).build(), - Route.builder(GET, "/_security/privilege/{application}") - .replaces(GET, "/_xpack/security/privilege/{application}", RestApiVersion.V_7) - .build(), - Route.builder(GET, "/_security/privilege/{application}/{privilege}") - .replaces(GET, "/_xpack/security/privilege/{application}/{privilege}", RestApiVersion.V_7) - .build() + new Route(GET, "/_security/privilege/"), + new Route(GET, "/_security/privilege/{application}"), + new Route(GET, "/_security/privilege/{application}/{privilege}") ); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/privilege/RestPutPrivilegesAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/privilege/RestPutPrivilegesAction.java index 53db1d135c125..78fe41e939316 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/privilege/RestPutPrivilegesAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/privilege/RestPutPrivilegesAction.java @@ -8,7 +8,6 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; @@ -44,10 +43,7 @@ public RestPutPrivilegesAction(Settings settings, XPackLicenseState licenseState @Override public List routes() { - return List.of( - Route.builder(PUT, "/_security/privilege/").replaces(PUT, "/_xpack/security/privilege/", RestApiVersion.V_7).build(), - Route.builder(POST, "/_security/privilege/").replaces(POST, "/_xpack/security/privilege/", RestApiVersion.V_7).build() - ); + return List.of(new Route(PUT, "/_security/privilege/"), new Route(POST, "/_security/privilege/")); } @Override diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/profile/RestDisableProfileAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/profile/RestDisableProfileAction.java index 60e2cc332d3e9..08632c9c04759 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/profile/RestDisableProfileAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/profile/RestDisableProfileAction.java @@ -33,10 +33,7 @@ public RestDisableProfileAction(Settings settings, XPackLicenseState licenseStat @Override public List routes() { - return List.of( - Route.builder(POST, "/_security/profile/{uid}/_disable").build(), - Route.builder(PUT, "/_security/profile/{uid}/_disable").build() - ); + return List.of(new Route(POST, "/_security/profile/{uid}/_disable"), new Route(PUT, "/_security/profile/{uid}/_disable")); } @Override diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/profile/RestEnableProfileAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/profile/RestEnableProfileAction.java index 21cc647b3ef4b..74bc9d43002db 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/profile/RestEnableProfileAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/profile/RestEnableProfileAction.java @@ -33,10 +33,7 @@ public RestEnableProfileAction(Settings settings, XPackLicenseState licenseState @Override public List routes() { - return List.of( - Route.builder(POST, "/_security/profile/{uid}/_enable").build(), - Route.builder(PUT, "/_security/profile/{uid}/_enable").build() - ); + return List.of(new Route(POST, "/_security/profile/{uid}/_enable"), new Route(PUT, "/_security/profile/{uid}/_enable")); } @Override diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/realm/RestClearRealmCacheAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/realm/RestClearRealmCacheAction.java index 3b274bcea38d7..194029225da2d 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/realm/RestClearRealmCacheAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/realm/RestClearRealmCacheAction.java @@ -8,7 +8,6 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -31,11 +30,7 @@ public RestClearRealmCacheAction(Settings settings, XPackLicenseState licenseSta @Override public List routes() { - return List.of( - Route.builder(POST, "/_security/realm/{realms}/_clear_cache") - .replaces(POST, "/_xpack/security/realm/{realms}/_clear_cache", RestApiVersion.V_7) - .build() - ); + return List.of(new Route(POST, "/_security/realm/{realms}/_clear_cache")); } @Override diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestBulkDeleteRolesAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestBulkDeleteRolesAction.java index 683faf5cfa914..a420196b7237b 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestBulkDeleteRolesAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestBulkDeleteRolesAction.java @@ -43,7 +43,7 @@ public RestBulkDeleteRolesAction(Settings settings, XPackLicenseState licenseSta @Override public List routes() { - return List.of(Route.builder(DELETE, "/_security/role").build()); + return List.of(new Route(DELETE, "/_security/role")); } @Override diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestBulkPutRolesAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestBulkPutRolesAction.java index f132da09c4ec0..550e94b477034 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestBulkPutRolesAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestBulkPutRolesAction.java @@ -34,7 +34,7 @@ public RestBulkPutRolesAction(Settings settings, XPackLicenseState licenseState, @Override public List routes() { - return List.of(Route.builder(POST, "/_security/role").build()); + return List.of(new Route(POST, "/_security/role")); } @Override diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestClearRolesCacheAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestClearRolesCacheAction.java index e562d187764e6..546da4a1f31b1 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestClearRolesCacheAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestClearRolesCacheAction.java @@ -8,7 +8,6 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -31,11 +30,7 @@ public RestClearRolesCacheAction(Settings settings, XPackLicenseState licenseSta @Override public List routes() { - return List.of( - Route.builder(POST, "/_security/role/{name}/_clear_cache") - .replaces(POST, "/_xpack/security/role/{name}/_clear_cache", RestApiVersion.V_7) - .build() - ); + return List.of(new Route(POST, "/_security/role/{name}/_clear_cache")); } @Override diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestDeleteRoleAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestDeleteRoleAction.java index 0c3f0364e60d4..b74ef7eaa3190 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestDeleteRoleAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestDeleteRoleAction.java @@ -8,7 +8,6 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; @@ -37,9 +36,7 @@ public RestDeleteRoleAction(Settings settings, XPackLicenseState licenseState) { @Override public List routes() { - return List.of( - Route.builder(DELETE, "/_security/role/{name}").replaces(DELETE, "/_xpack/security/role/{name}", RestApiVersion.V_7).build() - ); + return List.of(new Route(DELETE, "/_security/role/{name}")); } @Override diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestGetRolesAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestGetRolesAction.java index dc9ecbbc63a8d..82fe3dc5e12c2 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestGetRolesAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestGetRolesAction.java @@ -9,7 +9,6 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; @@ -39,10 +38,7 @@ public RestGetRolesAction(Settings settings, XPackLicenseState licenseState) { @Override public List routes() { - return List.of( - Route.builder(GET, "/_security/role/").replaces(GET, "/_xpack/security/role/", RestApiVersion.V_7).build(), - Route.builder(GET, "/_security/role/{name}").replaces(GET, "/_xpack/security/role/{name}", RestApiVersion.V_7).build() - ); + return List.of(new Route(GET, "/_security/role/"), new Route(GET, "/_security/role/{name}")); } @Override diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestPutRoleAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestPutRoleAction.java index 6a819c098e9f1..a56ab4500f849 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestPutRoleAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestPutRoleAction.java @@ -8,7 +8,6 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; @@ -42,10 +41,7 @@ public RestPutRoleAction(Settings settings, XPackLicenseState licenseState, PutR @Override public List routes() { - return List.of( - Route.builder(POST, "/_security/role/{name}").replaces(POST, "/_xpack/security/role/{name}", RestApiVersion.V_7).build(), - Route.builder(PUT, "/_security/role/{name}").replaces(PUT, "/_xpack/security/role/{name}", RestApiVersion.V_7).build() - ); + return List.of(new Route(POST, "/_security/role/{name}"), new Route(PUT, "/_security/role/{name}")); } @Override diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/rolemapping/RestDeleteRoleMappingAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/rolemapping/RestDeleteRoleMappingAction.java index 5964228009c4b..eb4dca0546254 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/rolemapping/RestDeleteRoleMappingAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/rolemapping/RestDeleteRoleMappingAction.java @@ -8,7 +8,6 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; @@ -37,11 +36,7 @@ public RestDeleteRoleMappingAction(Settings settings, XPackLicenseState licenseS @Override public List routes() { - return List.of( - Route.builder(DELETE, "/_security/role_mapping/{name}") - .replaces(DELETE, "/_xpack/security/role_mapping/{name}", RestApiVersion.V_7) - .build() - ); + return List.of(new Route(DELETE, "/_security/role_mapping/{name}")); } @Override diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/rolemapping/RestGetRoleMappingsAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/rolemapping/RestGetRoleMappingsAction.java index 7a3378d843bca..21b20ffa551bd 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/rolemapping/RestGetRoleMappingsAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/rolemapping/RestGetRoleMappingsAction.java @@ -8,7 +8,6 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; @@ -38,12 +37,7 @@ public RestGetRoleMappingsAction(Settings settings, XPackLicenseState licenseSta @Override public List routes() { - return List.of( - Route.builder(GET, "/_security/role_mapping/").replaces(GET, "/_xpack/security/role_mapping/", RestApiVersion.V_7).build(), - Route.builder(GET, "/_security/role_mapping/{name}") - .replaces(GET, "/_xpack/security/role_mapping/{name}", RestApiVersion.V_7) - .build() - ); + return List.of(new Route(GET, "/_security/role_mapping/"), new Route(GET, "/_security/role_mapping/{name}")); } @Override diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/rolemapping/RestPutRoleMappingAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/rolemapping/RestPutRoleMappingAction.java index 019b1e5095627..378307022d8d8 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/rolemapping/RestPutRoleMappingAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/rolemapping/RestPutRoleMappingAction.java @@ -42,10 +42,7 @@ public RestPutRoleMappingAction(Settings settings, XPackLicenseState licenseStat @Override public List routes() { - return List.of( - Route.builder(POST, "/_security/role_mapping/{name}").build(), - Route.builder(PUT, "/_security/role_mapping/{name}").build() - ); + return List.of(new Route(POST, "/_security/role_mapping/{name}"), new Route(PUT, "/_security/role_mapping/{name}")); } @Override diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/saml/RestSamlAuthenticateAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/saml/RestSamlAuthenticateAction.java index 09527b251b6e8..f59d6984478ee 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/saml/RestSamlAuthenticateAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/saml/RestSamlAuthenticateAction.java @@ -11,7 +11,6 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestRequestFilter; @@ -73,11 +72,7 @@ public RestSamlAuthenticateAction(Settings settings, XPackLicenseState licenseSt @Override public List routes() { - return List.of( - Route.builder(POST, "/_security/saml/authenticate") - .replaces(POST, "/_xpack/security/saml/authenticate", RestApiVersion.V_7) - .build() - ); + return List.of(new Route(POST, "/_security/saml/authenticate")); } @Override diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/saml/RestSamlInvalidateSessionAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/saml/RestSamlInvalidateSessionAction.java index f05bdb9a174dc..f326b1371209e 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/saml/RestSamlInvalidateSessionAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/saml/RestSamlInvalidateSessionAction.java @@ -8,7 +8,6 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; @@ -53,9 +52,7 @@ public RestSamlInvalidateSessionAction(Settings settings, XPackLicenseState lice @Override public List routes() { - return List.of( - Route.builder(POST, "/_security/saml/invalidate").replaces(POST, "/_xpack/security/saml/invalidate", RestApiVersion.V_7).build() - ); + return List.of(new Route(POST, "/_security/saml/invalidate")); } @Override diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/saml/RestSamlLogoutAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/saml/RestSamlLogoutAction.java index bc271e38d82b4..51e61d62af955 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/saml/RestSamlLogoutAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/saml/RestSamlLogoutAction.java @@ -8,7 +8,6 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; @@ -51,9 +50,7 @@ public RestSamlLogoutAction(Settings settings, XPackLicenseState licenseState) { @Override public List routes() { - return List.of( - Route.builder(POST, "/_security/saml/logout").replaces(POST, "/_xpack/security/saml/logout", RestApiVersion.V_7).build() - ); + return List.of(new Route(POST, "/_security/saml/logout")); } @Override diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/saml/RestSamlPrepareAuthenticationAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/saml/RestSamlPrepareAuthenticationAction.java index cf3ad0d2edd3a..0903338578bac 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/saml/RestSamlPrepareAuthenticationAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/saml/RestSamlPrepareAuthenticationAction.java @@ -8,7 +8,6 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; @@ -55,9 +54,7 @@ public RestSamlPrepareAuthenticationAction(Settings settings, XPackLicenseState @Override public List routes() { - return List.of( - Route.builder(POST, "/_security/saml/prepare").replaces(POST, "/_xpack/security/saml/prepare", RestApiVersion.V_7).build() - ); + return List.of(new Route(POST, "/_security/saml/prepare")); } @Override diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/settings/RestGetSecuritySettingsAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/settings/RestGetSecuritySettingsAction.java index 0b4ced0a20444..44b66a69b8f2a 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/settings/RestGetSecuritySettingsAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/settings/RestGetSecuritySettingsAction.java @@ -19,6 +19,8 @@ import java.io.IOException; import java.util.List; +import static org.elasticsearch.rest.RestRequest.Method.GET; + public class RestGetSecuritySettingsAction extends SecurityBaseRestHandler { public RestGetSecuritySettingsAction(Settings settings, XPackLicenseState licenseState) { @@ -32,7 +34,7 @@ public String getName() { @Override public List routes() { - return List.of(Route.builder(RestRequest.Method.GET, "/_security/settings").build()); + return List.of(new Route(GET, "/_security/settings")); } @Override diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/settings/RestUpdateSecuritySettingsAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/settings/RestUpdateSecuritySettingsAction.java index 27ed6d2475d2c..f54b524df9ea1 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/settings/RestUpdateSecuritySettingsAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/settings/RestUpdateSecuritySettingsAction.java @@ -19,6 +19,8 @@ import java.io.IOException; import java.util.List; +import static org.elasticsearch.rest.RestRequest.Method.PUT; + public class RestUpdateSecuritySettingsAction extends SecurityBaseRestHandler { public RestUpdateSecuritySettingsAction(Settings settings, XPackLicenseState licenseState) { @@ -32,7 +34,7 @@ public String getName() { @Override public List routes() { - return List.of(Route.builder(RestRequest.Method.PUT, "/_security/settings").build()); + return List.of(new Route(PUT, "/_security/settings")); } @Override diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestChangePasswordAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestChangePasswordAction.java index 68500c4d07e26..7840c34dbe393 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestChangePasswordAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestChangePasswordAction.java @@ -10,7 +10,6 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestRequestFilter; @@ -48,14 +47,10 @@ public RestChangePasswordAction(Settings settings, SecurityContext securityConte @Override public List routes() { return List.of( - Route.builder(PUT, "/_security/user/{username}/_password") - .replaces(PUT, "/_xpack/security/user/{username}/_password", RestApiVersion.V_7) - .build(), - Route.builder(POST, "/_security/user/{username}/_password") - .replaces(POST, "/_xpack/security/user/{username}/_password", RestApiVersion.V_7) - .build(), - Route.builder(PUT, "/_security/user/_password").replaces(PUT, "/_xpack/security/user/_password", RestApiVersion.V_7).build(), - Route.builder(POST, "/_security/user/_password").replaces(POST, "/_xpack/security/user/_password", RestApiVersion.V_7).build() + new Route(PUT, "/_security/user/{username}/_password"), + new Route(POST, "/_security/user/{username}/_password"), + new Route(PUT, "/_security/user/_password"), + new Route(POST, "/_security/user/_password") ); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestDeleteUserAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestDeleteUserAction.java index 55c68b3b4b3f1..01136bdc21025 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestDeleteUserAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestDeleteUserAction.java @@ -8,7 +8,6 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; @@ -37,11 +36,7 @@ public RestDeleteUserAction(Settings settings, XPackLicenseState licenseState) { @Override public List routes() { - return List.of( - Route.builder(DELETE, "/_security/user/{username}") - .replaces(DELETE, "/_xpack/security/user/{username}", RestApiVersion.V_7) - .build() - ); + return List.of(new Route(DELETE, "/_security/user/{username}")); } @Override diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestGetUserPrivilegesAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestGetUserPrivilegesAction.java index 96e8ffd74a314..bbf5e0ca71842 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestGetUserPrivilegesAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestGetUserPrivilegesAction.java @@ -9,7 +9,6 @@ import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestRequest; @@ -48,9 +47,7 @@ public RestGetUserPrivilegesAction(Settings settings, SecurityContext securityCo @Override public List routes() { - return List.of( - Route.builder(GET, "/_security/user/_privileges").replaces(GET, "/_xpack/security/user/_privileges", RestApiVersion.V_7).build() - ); + return List.of(new Route(GET, "/_security/user/_privileges")); } @Override diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestGetUsersAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestGetUsersAction.java index ebc956add641a..7a24854ef6d9c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestGetUsersAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestGetUsersAction.java @@ -9,7 +9,6 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; @@ -39,10 +38,7 @@ public RestGetUsersAction(Settings settings, XPackLicenseState licenseState) { @Override public List routes() { - return List.of( - Route.builder(GET, "/_security/user/").replaces(GET, "/_xpack/security/user/", RestApiVersion.V_7).build(), - Route.builder(GET, "/_security/user/{username}").replaces(GET, "/_xpack/security/user/{username}", RestApiVersion.V_7).build() - ); + return List.of(new Route(GET, "/_security/user/"), new Route(GET, "/_security/user/{username}")); } @Override diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestHasPrivilegesAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestHasPrivilegesAction.java index a896d4855b73d..f2233a7e19fd0 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestHasPrivilegesAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestHasPrivilegesAction.java @@ -10,7 +10,6 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.Tuple; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestRequest; @@ -60,18 +59,10 @@ public RestHasPrivilegesAction( @Override public List routes() { return List.of( - Route.builder(GET, "/_security/user/{username}/_has_privileges") - .replaces(GET, "/_xpack/security/user/{username}/_has_privileges", RestApiVersion.V_7) - .build(), - Route.builder(POST, "/_security/user/{username}/_has_privileges") - .replaces(POST, "/_xpack/security/user/{username}/_has_privileges", RestApiVersion.V_7) - .build(), - Route.builder(GET, "/_security/user/_has_privileges") - .replaces(GET, "/_xpack/security/user/_has_privileges", RestApiVersion.V_7) - .build(), - Route.builder(POST, "/_security/user/_has_privileges") - .replaces(POST, "/_xpack/security/user/_has_privileges", RestApiVersion.V_7) - .build() + new Route(GET, "/_security/user/{username}/_has_privileges"), + new Route(POST, "/_security/user/{username}/_has_privileges"), + new Route(GET, "/_security/user/_has_privileges"), + new Route(POST, "/_security/user/_has_privileges") ); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestProfileHasPrivilegesAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestProfileHasPrivilegesAction.java index 77c35438d0d0b..a2de75b8b4f64 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestProfileHasPrivilegesAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestProfileHasPrivilegesAction.java @@ -38,10 +38,7 @@ public RestProfileHasPrivilegesAction(Settings settings, XPackLicenseState licen @Override public List routes() { - return List.of( - Route.builder(GET, "/_security/profile/_has_privileges").build(), - Route.builder(POST, "/_security/profile/_has_privileges").build() - ); + return List.of(new Route(GET, "/_security/profile/_has_privileges"), new Route(POST, "/_security/profile/_has_privileges")); } @Override diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestPutUserAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestPutUserAction.java index 0d20103f06b57..bbaa8ac915127 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestPutUserAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestPutUserAction.java @@ -8,7 +8,6 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestRequestFilter; @@ -45,12 +44,7 @@ public RestPutUserAction(Settings settings, XPackLicenseState licenseState) { @Override public List routes() { - return List.of( - Route.builder(POST, "/_security/user/{username}") - .replaces(POST, "/_xpack/security/user/{username}", RestApiVersion.V_7) - .build(), - Route.builder(PUT, "/_security/user/{username}").replaces(PUT, "/_xpack/security/user/{username}", RestApiVersion.V_7).build() - ); + return List.of(new Route(POST, "/_security/user/{username}"), new Route(PUT, "/_security/user/{username}")); } @Override diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestSetEnabledAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestSetEnabledAction.java index f34450cbbe1ef..1ee42b191f31a 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestSetEnabledAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestSetEnabledAction.java @@ -9,7 +9,6 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; @@ -40,18 +39,10 @@ public RestSetEnabledAction(Settings settings, XPackLicenseState licenseState) { @Override public List routes() { return List.of( - Route.builder(POST, "/_security/user/{username}/_enable") - .replaces(POST, "/_xpack/security/user/{username}/_enable", RestApiVersion.V_7) - .build(), - Route.builder(PUT, "/_security/user/{username}/_enable") - .replaces(PUT, "/_xpack/security/user/{username}/_enable", RestApiVersion.V_7) - .build(), - Route.builder(POST, "/_security/user/{username}/_disable") - .replaces(POST, "/_xpack/security/user/{username}/_disable", RestApiVersion.V_7) - .build(), - Route.builder(PUT, "/_security/user/{username}/_disable") - .replaces(PUT, "/_xpack/security/user/{username}/_disable", RestApiVersion.V_7) - .build() + new Route(POST, "/_security/user/{username}/_enable"), + new Route(PUT, "/_security/user/{username}/_enable"), + new Route(POST, "/_security/user/{username}/_disable"), + new Route(PUT, "/_security/user/{username}/_disable") ); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlClearCursorAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlClearCursorAction.java index 26b6bfdc0cead..bf68e174f033c 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlClearCursorAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlClearCursorAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.sql.plugin; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; @@ -19,7 +18,6 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xpack.sql.action.Protocol; import org.elasticsearch.xpack.sql.action.SqlClearCursorAction; import org.elasticsearch.xpack.sql.action.SqlClearCursorRequest; import org.elasticsearch.xpack.sql.action.SqlClearCursorResponse; @@ -29,17 +27,14 @@ import java.util.List; import static org.elasticsearch.rest.RestRequest.Method.POST; +import static org.elasticsearch.xpack.sql.proto.CoreProtocol.CLEAR_CURSOR_REST_ENDPOINT; @ServerlessScope(Scope.PUBLIC) public class RestSqlClearCursorAction extends BaseRestHandler { @Override public List routes() { - return List.of( - Route.builder(POST, Protocol.CLEAR_CURSOR_REST_ENDPOINT) - .replaces(POST, Protocol.CLEAR_CURSOR_DEPRECATED_REST_ENDPOINT, RestApiVersion.V_7) - .build() - ); + return List.of(new Route(POST, CLEAR_CURSOR_REST_ENDPOINT)); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlQueryAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlQueryAction.java index 43051e9e16160..63f942714f8de 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlQueryAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlQueryAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.sql.plugin; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; import org.elasticsearch.rest.BaseRestHandler; @@ -17,7 +16,6 @@ import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestCancellableNodeClient; import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xpack.sql.action.Protocol; import org.elasticsearch.xpack.sql.action.SqlQueryAction; import org.elasticsearch.xpack.sql.action.SqlQueryRequest; @@ -29,6 +27,7 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.POST; import static org.elasticsearch.xpack.ql.util.LoggingUtils.logOnFailure; +import static org.elasticsearch.xpack.sql.proto.CoreProtocol.SQL_QUERY_REST_ENDPOINT; import static org.elasticsearch.xpack.sql.proto.CoreProtocol.URL_PARAM_DELIMITER; @ServerlessScope(Scope.PUBLIC) @@ -37,14 +36,7 @@ public class RestSqlQueryAction extends BaseRestHandler { @Override public List routes() { - return List.of( - Route.builder(GET, Protocol.SQL_QUERY_REST_ENDPOINT) - .replaces(GET, Protocol.SQL_QUERY_DEPRECATED_REST_ENDPOINT, RestApiVersion.V_7) - .build(), - Route.builder(POST, Protocol.SQL_QUERY_REST_ENDPOINT) - .replaces(POST, Protocol.SQL_QUERY_DEPRECATED_REST_ENDPOINT, RestApiVersion.V_7) - .build() - ); + return List.of(new Route(GET, SQL_QUERY_REST_ENDPOINT), new Route(POST, SQL_QUERY_REST_ENDPOINT)); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlStatsAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlStatsAction.java index 7ba50b2f5aaf6..897226f50d75c 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlStatsAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlStatsAction.java @@ -8,25 +8,20 @@ package org.elasticsearch.xpack.sql.plugin; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestActions; -import org.elasticsearch.xpack.sql.action.Protocol; import java.util.List; import static org.elasticsearch.rest.RestRequest.Method.GET; +import static org.elasticsearch.xpack.sql.proto.CoreProtocol.SQL_STATS_REST_ENDPOINT; public class RestSqlStatsAction extends BaseRestHandler { @Override public List routes() { - return List.of( - Route.builder(GET, Protocol.SQL_STATS_REST_ENDPOINT) - .replaces(GET, Protocol.SQL_STATS_DEPRECATED_REST_ENDPOINT, RestApiVersion.V_7) - .build() - ); + return List.of(new Route(GET, SQL_STATS_REST_ENDPOINT)); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlTranslateAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlTranslateAction.java index 04565d77b68d2..690c340e30d44 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlTranslateAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlTranslateAction.java @@ -7,14 +7,12 @@ package org.elasticsearch.xpack.sql.plugin; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xpack.sql.action.Protocol; import org.elasticsearch.xpack.sql.action.SqlTranslateAction; import org.elasticsearch.xpack.sql.action.SqlTranslateRequest; @@ -23,6 +21,7 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.POST; +import static org.elasticsearch.xpack.sql.proto.CoreProtocol.SQL_TRANSLATE_REST_ENDPOINT; /** * REST action for translating SQL queries into ES requests @@ -32,14 +31,7 @@ public class RestSqlTranslateAction extends BaseRestHandler { @Override public List routes() { - return List.of( - Route.builder(GET, Protocol.SQL_TRANSLATE_REST_ENDPOINT) - .replaces(GET, Protocol.SQL_TRANSLATE_DEPRECATED_REST_ENDPOINT, RestApiVersion.V_7) - .build(), - Route.builder(POST, Protocol.SQL_TRANSLATE_REST_ENDPOINT) - .replaces(POST, Protocol.SQL_TRANSLATE_DEPRECATED_REST_ENDPOINT, RestApiVersion.V_7) - .build() - ); + return List.of(new Route(GET, SQL_TRANSLATE_REST_ENDPOINT), new Route(POST, SQL_TRANSLATE_REST_ENDPOINT)); } @Override diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestAckWatchAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestAckWatchAction.java index a01bf660ba8d5..f298a1cf00624 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestAckWatchAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestAckWatchAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.watcher.rest.action; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; @@ -34,14 +33,10 @@ public class RestAckWatchAction extends BaseRestHandler { @Override public List routes() { return List.of( - Route.builder(POST, "/_watcher/watch/{id}/_ack").replaces(POST, "/_xpack/watcher/watch/{id}/_ack", RestApiVersion.V_7).build(), - Route.builder(PUT, "/_watcher/watch/{id}/_ack").replaces(PUT, "/_xpack/watcher/watch/{id}/_ack", RestApiVersion.V_7).build(), - Route.builder(POST, "/_watcher/watch/{id}/_ack/{actions}") - .replaces(POST, "/_xpack/watcher/watch/{id}/_ack/{actions}", RestApiVersion.V_7) - .build(), - Route.builder(PUT, "/_watcher/watch/{id}/_ack/{actions}") - .replaces(PUT, "/_xpack/watcher/watch/{id}/_ack/{actions}", RestApiVersion.V_7) - .build() + new Route(POST, "/_watcher/watch/{id}/_ack"), + new Route(PUT, "/_watcher/watch/{id}/_ack"), + new Route(POST, "/_watcher/watch/{id}/_ack/{actions}"), + new Route(PUT, "/_watcher/watch/{id}/_ack/{actions}") ); } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestActivateWatchAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestActivateWatchAction.java index 18c9e5a2825f4..b8b466883f7fe 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestActivateWatchAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestActivateWatchAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.watcher.rest.action; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; @@ -33,14 +32,7 @@ public class RestActivateWatchAction extends BaseRestHandler { @Override public List routes() { - return List.of( - Route.builder(POST, "/_watcher/watch/{id}/_activate") - .replaces(POST, "/_xpack/watcher/watch/{id}/_activate", RestApiVersion.V_7) - .build(), - Route.builder(PUT, "/_watcher/watch/{id}/_activate") - .replaces(PUT, "/_xpack/watcher/watch/{id}/_activate", RestApiVersion.V_7) - .build() - ); + return List.of(new Route(POST, "/_watcher/watch/{id}/_activate"), new Route(PUT, "/_watcher/watch/{id}/_activate")); } @Override @@ -72,14 +64,7 @@ public static class DeactivateRestHandler extends BaseRestHandler { @Override public List routes() { - return List.of( - Route.builder(POST, "/_watcher/watch/{id}/_deactivate") - .replaces(POST, "/_xpack/watcher/watch/{id}/_deactivate", RestApiVersion.V_7) - .build(), - Route.builder(PUT, "/_watcher/watch/{id}/_deactivate") - .replaces(PUT, "/_xpack/watcher/watch/{id}/_deactivate", RestApiVersion.V_7) - .build() - ); + return List.of(new Route(POST, "/_watcher/watch/{id}/_deactivate"), new Route(PUT, "/_watcher/watch/{id}/_deactivate")); } @Override diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestDeleteWatchAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestDeleteWatchAction.java index 3c2ddc7a254d1..e909a34157583 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestDeleteWatchAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestDeleteWatchAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.watcher.rest.action; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest; import org.elasticsearch.protocol.xpack.watcher.DeleteWatchResponse; import org.elasticsearch.rest.BaseRestHandler; @@ -29,9 +28,7 @@ public class RestDeleteWatchAction extends BaseRestHandler { @Override public List routes() { - return List.of( - Route.builder(DELETE, "/_watcher/watch/{id}").replaces(DELETE, "/_xpack/watcher/watch/{id}", RestApiVersion.V_7).build() - ); + return List.of(new Route(DELETE, "/_watcher/watch/{id}")); } @Override diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestExecuteWatchAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestExecuteWatchAction.java index 56d249c427be1..a25e7704c0653 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestExecuteWatchAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestExecuteWatchAction.java @@ -10,7 +10,6 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestRequestFilter; @@ -55,14 +54,10 @@ public class RestExecuteWatchAction extends BaseRestHandler implements RestReque @Override public List routes() { return List.of( - Route.builder(POST, "/_watcher/watch/{id}/_execute") - .replaces(POST, "/_xpack/watcher/watch/{id}/_execute", RestApiVersion.V_7) - .build(), - Route.builder(PUT, "/_watcher/watch/{id}/_execute") - .replaces(PUT, "/_xpack/watcher/watch/{id}/_execute", RestApiVersion.V_7) - .build(), - Route.builder(POST, "/_watcher/watch/_execute").replaces(POST, "/_xpack/watcher/watch/_execute", RestApiVersion.V_7).build(), - Route.builder(PUT, "/_watcher/watch/_execute").replaces(PUT, "/_xpack/watcher/watch/_execute", RestApiVersion.V_7).build() + new Route(POST, "/_watcher/watch/{id}/_execute"), + new Route(PUT, "/_watcher/watch/{id}/_execute"), + new Route(POST, "/_watcher/watch/_execute"), + new Route(PUT, "/_watcher/watch/_execute") ); } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestGetWatchAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestGetWatchAction.java index 5467297ab1e72..f296e9869cd9e 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestGetWatchAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestGetWatchAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.watcher.rest.action; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; @@ -29,7 +28,7 @@ public class RestGetWatchAction extends BaseRestHandler { @Override public List routes() { - return List.of(Route.builder(GET, "/_watcher/watch/{id}").replaces(GET, "/_xpack/watcher/watch/{id}", RestApiVersion.V_7).build()); + return List.of(new Route(GET, "/_watcher/watch/{id}")); } @Override diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestGetWatcherSettingsAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestGetWatcherSettingsAction.java index 73a933c9c2e46..ff9e8c45d72f3 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestGetWatcherSettingsAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestGetWatcherSettingsAction.java @@ -17,6 +17,8 @@ import java.io.IOException; import java.util.List; +import static org.elasticsearch.rest.RestRequest.Method.GET; + /** * Allows retrieving a subset of index settings (those use-settable) for the .watches index. * See {@link RestUpdateWatcherSettingsAction} for the setting counterpart. @@ -29,7 +31,7 @@ public String getName() { @Override public List routes() { - return List.of(Route.builder(RestRequest.Method.GET, "/_watcher/settings").build()); + return List.of(new Route(GET, "/_watcher/settings")); } @Override diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestPutWatchAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestPutWatchAction.java index 42820dabf9d7d..9dba72b1f64c3 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestPutWatchAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestPutWatchAction.java @@ -9,7 +9,6 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.lucene.uid.Versions; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse; import org.elasticsearch.rest.BaseRestHandler; @@ -33,10 +32,7 @@ public class RestPutWatchAction extends BaseRestHandler implements RestRequestFi @Override public List routes() { - return List.of( - Route.builder(POST, "/_watcher/watch/{id}").replaces(POST, "/_xpack/watcher/watch/{id}", RestApiVersion.V_7).build(), - Route.builder(PUT, "/_watcher/watch/{id}").replaces(PUT, "/_xpack/watcher/watch/{id}", RestApiVersion.V_7).build() - ); + return List.of(new Route(POST, "/_watcher/watch/{id}"), new Route(PUT, "/_watcher/watch/{id}")); } @Override diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestUpdateWatcherSettingsAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestUpdateWatcherSettingsAction.java index 26f64a0918141..db14a6572fa99 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestUpdateWatcherSettingsAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestUpdateWatcherSettingsAction.java @@ -17,6 +17,8 @@ import java.io.IOException; import java.util.List; +import static org.elasticsearch.rest.RestRequest.Method.PUT; + /** * Allows setting a subset of index settings for the .watches index. * See {@link RestGetWatcherSettingsAction} for the retrieval counterpart. @@ -29,7 +31,7 @@ public String getName() { @Override public List routes() { - return List.of(Route.builder(RestRequest.Method.PUT, "/_watcher/settings").build()); + return List.of(new Route(PUT, "/_watcher/settings")); } @Override diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestWatchServiceAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestWatchServiceAction.java index f6b9ae9c4031c..a52e0882fa81e 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestWatchServiceAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestWatchServiceAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.watcher.rest.action; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestUtils; @@ -24,7 +23,7 @@ public class RestWatchServiceAction extends BaseRestHandler { @Override public List routes() { - return List.of(Route.builder(POST, "/_watcher/_start").replaces(POST, "/_xpack/watcher/_start", RestApiVersion.V_7).build()); + return List.of(new Route(POST, "/_watcher/_start")); } @Override @@ -42,7 +41,7 @@ public static class StopRestHandler extends BaseRestHandler { @Override public List routes() { - return List.of(Route.builder(POST, "/_watcher/_stop").replaces(POST, "/_xpack/watcher/_stop", RestApiVersion.V_7).build()); + return List.of(new Route(POST, "/_watcher/_stop")); } @Override diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestWatcherStatsAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestWatcherStatsAction.java index f7a59553a65ff..2c04501ee66d6 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestWatcherStatsAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestWatcherStatsAction.java @@ -11,7 +11,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.DeprecationCategory; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestActions; @@ -29,10 +28,7 @@ public class RestWatcherStatsAction extends BaseRestHandler { @Override public List routes() { - return List.of( - Route.builder(GET, "/_watcher/stats").replaces(GET, "/_xpack/watcher/stats", RestApiVersion.V_7).build(), - Route.builder(GET, "/_watcher/stats/{metric}").replaces(GET, "/_xpack/watcher/stats/{metric}", RestApiVersion.V_7).build() - ); + return List.of(new Route(GET, "/_watcher/stats"), new Route(GET, "/_watcher/stats/{metric}")); } @Override From 38d9710e61744f94a78b053d6aaa006d6b60cbf1 Mon Sep 17 00:00:00 2001 From: Dan Rubinstein Date: Tue, 15 Oct 2024 11:23:23 -0400 Subject: [PATCH 114/449] Set min number of allocations for ElasticSearchInternalService to 0 (#114829) * Set min number of allocations for ElasticSearchInternalService to 0 * Updating IT tests with new min allocations value --------- Co-authored-by: Elastic Machine --- .../org/elasticsearch/xpack/inference/DefaultEndPointsIT.java | 4 ++-- .../services/elasticsearch/ElasticsearchInternalService.java | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/DefaultEndPointsIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/DefaultEndPointsIT.java index 083bad2c91613..3a774a7a37d93 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/DefaultEndPointsIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/DefaultEndPointsIT.java @@ -64,7 +64,7 @@ private static void assertDefaultElserConfig(Map modelConfig) { assertThat( modelConfig.toString(), adaptiveAllocations, - Matchers.is(Map.of("enabled", true, "min_number_of_allocations", 1, "max_number_of_allocations", 8)) + Matchers.is(Map.of("enabled", true, "min_number_of_allocations", 0, "max_number_of_allocations", 8)) ); } @@ -99,7 +99,7 @@ private static void assertDefaultE5Config(Map modelConfig) { assertThat( modelConfig.toString(), adaptiveAllocations, - Matchers.is(Map.of("enabled", true, "min_number_of_allocations", 1, "max_number_of_allocations", 8)) + Matchers.is(Map.of("enabled", true, "min_number_of_allocations", 0, "max_number_of_allocations", 8)) ); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java index 396c679bf322e..8f7b9b79c28d7 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java @@ -854,7 +854,7 @@ private List defaultConfigs(boolean useLinuxOptimizedModel) { null, 1, useLinuxOptimizedModel ? ELSER_V2_MODEL_LINUX_X86 : ELSER_V2_MODEL, - new AdaptiveAllocationsSettings(Boolean.TRUE, 1, 8) + new AdaptiveAllocationsSettings(Boolean.TRUE, 0, 8) ), ElserMlNodeTaskSettings.DEFAULT, null // default chunking settings @@ -867,7 +867,7 @@ private List defaultConfigs(boolean useLinuxOptimizedModel) { null, 1, useLinuxOptimizedModel ? MULTILINGUAL_E5_SMALL_MODEL_ID_LINUX_X86 : MULTILINGUAL_E5_SMALL_MODEL_ID, - new AdaptiveAllocationsSettings(Boolean.TRUE, 1, 8) + new AdaptiveAllocationsSettings(Boolean.TRUE, 0, 8) ), null // default chunking settings ); From 2f1f24d7f22709f8932225981553d0b8bd66cd2f Mon Sep 17 00:00:00 2001 From: Panagiotis Bailis Date: Tue, 15 Oct 2024 18:27:53 +0300 Subject: [PATCH 115/449] Updating queries used in rrf with text similarity tests (#114838) --- muted-tests.yml | 3 --- .../rrf/800_rrf_with_text_similarity_reranker_retriever.yml | 6 ++++++ 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 1c7ee862d6453..042ceb609a2f8 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -381,9 +381,6 @@ tests: - class: org.elasticsearch.xpack.enrich.EnrichRestIT method: test {p0=enrich/20_standard_index/enrich stats REST response structure} issue: https://github.com/elastic/elasticsearch/issues/114753 -- class: org.elasticsearch.xpack.rank.rrf.RRFRankClientYamlTestSuiteIT - method: test {yaml=rrf/800_rrf_with_text_similarity_reranker_retriever/explain using rrf retriever and text-similarity} - issue: https://github.com/elastic/elasticsearch/issues/114757 - class: org.elasticsearch.xpack.enrich.EnrichRestIT method: test {p0=enrich/30_tsdb_index/enrich documents over _bulk} issue: https://github.com/elastic/elasticsearch/issues/114761 diff --git a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/800_rrf_with_text_similarity_reranker_retriever.yml b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/800_rrf_with_text_similarity_reranker_retriever.yml index 3e758ae11f7e6..105efcec8bc65 100644 --- a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/800_rrf_with_text_similarity_reranker_retriever.yml +++ b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/800_rrf_with_text_similarity_reranker_retriever.yml @@ -125,6 +125,9 @@ setup: term: { topic: "science" } + }, + "sort": { + "integer": "asc" } } }, @@ -303,6 +306,9 @@ setup: term: { topic: "science" } + }, + "sort": { + "integer": "asc" } } }, From e87b894f68ff5422737fdb32ce8c93fd7dea9e1c Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Tue, 15 Oct 2024 12:13:25 -0400 Subject: [PATCH 116/449] Fix bbq index feature exposure for testing & remove feature flag (#114832) We actually don't need a cluster feature, a capability added if the feature flag is enabled is enough for testing. closes https://github.com/elastic/elasticsearch/issues/114787 --- muted-tests.yml | 2 -- .../elasticsearch/index/mapper/MapperFeatures.java | 11 +++-------- .../index/mapper/vectors/DenseVectorFieldMapper.java | 5 ----- .../mapper/vectors/DenseVectorFieldMapperTests.java | 11 +++-------- 4 files changed, 6 insertions(+), 23 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 042ceb609a2f8..b105c77b34521 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -408,8 +408,6 @@ tests: - class: org.elasticsearch.xpack.enrich.EnrichIT method: testDeleteExistingPipeline issue: https://github.com/elastic/elasticsearch/issues/114775 -- class: org.elasticsearch.test.rest.ClientYamlTestSuiteIT - issue: https://github.com/elastic/elasticsearch/issues/114787 - class: org.elasticsearch.xpack.inference.rest.ServerSentEventsRestActionListenerTests method: testNoStream issue: https://github.com/elastic/elasticsearch/issues/114788 diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java index dbaa1f3a04ab9..31c89b2fc8ad4 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java @@ -9,7 +9,6 @@ package org.elasticsearch.index.mapper; -import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.IndexSettings; @@ -29,7 +28,7 @@ public class MapperFeatures implements FeatureSpecification { @Override public Set getFeatures() { - Set features = Set.of( + return Set.of( BWC_WORKAROUND_9_0, IgnoredSourceFieldMapper.TRACK_IGNORED_SOURCE, PassThroughObjectMapper.PASS_THROUGH_PRIORITY, @@ -53,13 +52,9 @@ public Set getFeatures() { IndexSettings.IGNORE_ABOVE_INDEX_LEVEL_SETTING, SourceFieldMapper.SYNTHETIC_SOURCE_COPY_TO_INSIDE_OBJECTS_FIX, TimeSeriesRoutingHashFieldMapper.TS_ROUTING_HASH_FIELD_PARSES_BYTES_REF, - FlattenedFieldMapper.IGNORE_ABOVE_WITH_ARRAYS_SUPPORT + FlattenedFieldMapper.IGNORE_ABOVE_WITH_ARRAYS_SUPPORT, + DenseVectorFieldMapper.BBQ_FORMAT ); - // BBQ is currently behind a feature flag for testing - if (DenseVectorFieldMapper.BBQ_FEATURE_FLAG.isEnabled()) { - return Sets.union(features, Set.of(DenseVectorFieldMapper.BBQ_FORMAT)); - } - return features; } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java index 52ff7a3014d1d..a023837a0efb7 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java @@ -36,7 +36,6 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.VectorUtil; import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.util.FeatureFlag; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.IndexVersion; @@ -110,7 +109,6 @@ public static boolean isNotUnitVector(float magnitude) { public static final NodeFeature INT4_QUANTIZATION = new NodeFeature("mapper.vectors.int4_quantization"); public static final NodeFeature BIT_VECTORS = new NodeFeature("mapper.vectors.bit_vectors"); public static final NodeFeature BBQ_FORMAT = new NodeFeature("mapper.vectors.bbq"); - public static final FeatureFlag BBQ_FEATURE_FLAG = new FeatureFlag("bbq_index_format"); public static final IndexVersion MAGNITUDE_STORED_INDEX_VERSION = IndexVersions.V_7_5_0; public static final IndexVersion INDEXED_BY_DEFAULT_INDEX_VERSION = IndexVersions.FIRST_DETACHED_INDEX_VERSION; @@ -2259,9 +2257,6 @@ private static IndexOptions parseIndexOptions(String fieldName, Object propNode) throw new MapperParsingException("Unknown vector index options type [" + type + "] for field [" + fieldName + "]"); } VectorIndexType parsedType = vectorIndexType.get(); - if ((parsedType == VectorIndexType.BBQ_FLAT || parsedType == VectorIndexType.BBQ_HNSW) && BBQ_FEATURE_FLAG.isEnabled() == false) { - throw new MapperParsingException("Unknown vector index options type [" + type + "] for field [" + fieldName + "]"); - } return parsedType.parseIndexOptions(fieldName, indexOptionsMap); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java index cd7ff54ffc938..de084cd4582e2 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java @@ -63,7 +63,6 @@ import static org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat.DEFAULT_BEAM_WIDTH; import static org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat.DEFAULT_MAX_CONN; -import static org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper.BBQ_FEATURE_FLAG; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -1228,11 +1227,9 @@ public void testInvalidParameters() { e.getMessage(), containsString("Failed to parse mapping: Mapping definition for [field] has unsupported parameters: [foo : {}]") ); - List floatOnlyQuantizations = new ArrayList<>(Arrays.asList("int4_hnsw", "int8_hnsw", "int8_flat", "int4_flat")); - if (BBQ_FEATURE_FLAG.isEnabled()) { - floatOnlyQuantizations.add("bbq_hnsw"); - floatOnlyQuantizations.add("bbq_flat"); - } + List floatOnlyQuantizations = new ArrayList<>( + Arrays.asList("int4_hnsw", "int8_hnsw", "int8_flat", "int4_flat", "bbq_hnsw", "bbq_flat") + ); for (String quantizationKind : floatOnlyQuantizations) { e = expectThrows( MapperParsingException.class, @@ -1946,7 +1943,6 @@ public void testKnnQuantizedHNSWVectorsFormat() throws IOException { } public void testKnnBBQHNSWVectorsFormat() throws IOException { - assumeTrue("BBQ vectors are not supported in the current version", BBQ_FEATURE_FLAG.isEnabled()); final int m = randomIntBetween(1, DEFAULT_MAX_CONN + 10); final int efConstruction = randomIntBetween(1, DEFAULT_BEAM_WIDTH + 10); final int dims = randomIntBetween(64, 4096); @@ -1985,7 +1981,6 @@ public void testKnnBBQHNSWVectorsFormat() throws IOException { } public void testInvalidVectorDimensionsBBQ() { - assumeTrue("BBQ vectors are not supported in the current version", BBQ_FEATURE_FLAG.isEnabled()); for (String quantizedFlatFormat : new String[] { "bbq_hnsw", "bbq_flat" }) { MapperParsingException e = expectThrows(MapperParsingException.class, () -> createDocumentMapper(fieldMapping(b -> { b.field("type", "dense_vector"); From 3af4d67fac206cd802e5316fed1c7b2e8561b86b Mon Sep 17 00:00:00 2001 From: Salvatore Campagna <93581129+salvatore-campagna@users.noreply.github.com> Date: Tue, 15 Oct 2024 18:53:09 +0200 Subject: [PATCH 117/449] Allow synthetic source and disabled source for standard indices (#114817) When using the index.mapping.source.mode setting we need to make sure that it takes precedence and that is used also when standard index mode is used. Without this patch we always return stored source if _source.mode is not used and the setting is. Relates #114433 --- .../index/mapper/SourceFieldMapper.java | 98 ++++++++--- .../index/mapper/SourceFieldMapperTests.java | 161 ++++++++++++++++++ 2 files changed, 239 insertions(+), 20 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java index f9b9de97715ed..ea1ffdb7c019f 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java @@ -88,6 +88,42 @@ public enum Mode { true ); + private static final SourceFieldMapper DEFAULT_DISABLED = new SourceFieldMapper( + Mode.DISABLED, + Explicit.IMPLICIT_TRUE, + Strings.EMPTY_ARRAY, + Strings.EMPTY_ARRAY, + null, + true + ); + + private static final SourceFieldMapper DEFAULT_DISABLED_NO_RECOVERY_SOURCE = new SourceFieldMapper( + Mode.DISABLED, + Explicit.IMPLICIT_TRUE, + Strings.EMPTY_ARRAY, + Strings.EMPTY_ARRAY, + null, + false + ); + + private static final SourceFieldMapper DEFAULT_SYNTHETIC = new SourceFieldMapper( + Mode.SYNTHETIC, + Explicit.IMPLICIT_TRUE, + Strings.EMPTY_ARRAY, + Strings.EMPTY_ARRAY, + null, + true + ); + + private static final SourceFieldMapper DEFAULT_SYNTHETIC_NO_RECOVERY_SOURCE = new SourceFieldMapper( + Mode.SYNTHETIC, + Explicit.IMPLICIT_TRUE, + Strings.EMPTY_ARRAY, + Strings.EMPTY_ARRAY, + null, + false + ); + private static final SourceFieldMapper DEFAULT_NO_RECOVERY_SOURCE = new SourceFieldMapper( null, Explicit.IMPLICIT_TRUE, @@ -297,7 +333,7 @@ public SourceFieldMapper build() { ? INDEX_MAPPER_SOURCE_MODE_SETTING.get(settings) : mode.get(); if (isDefault(sourceMode)) { - return resolveSourceMode(indexMode, sourceMode, enableRecoverySource); + return resolveSourceMode(indexMode, sourceMode == null ? Mode.STORED : sourceMode, enableRecoverySource); } if (supportsNonDefaultParameterValues == false) { @@ -340,25 +376,39 @@ public SourceFieldMapper build() { } private static SourceFieldMapper resolveSourceMode(final IndexMode indexMode, final Mode sourceMode, boolean enableRecoverySource) { - if (indexMode == IndexMode.STANDARD) { - return enableRecoverySource ? DEFAULT : DEFAULT_NO_RECOVERY_SOURCE; + switch (indexMode) { + case STANDARD: + switch (sourceMode) { + case SYNTHETIC: + return enableRecoverySource ? DEFAULT_SYNTHETIC : DEFAULT_SYNTHETIC_NO_RECOVERY_SOURCE; + case STORED: + return enableRecoverySource ? DEFAULT : DEFAULT_NO_RECOVERY_SOURCE; + case DISABLED: + return enableRecoverySource ? DEFAULT_DISABLED : DEFAULT_DISABLED_NO_RECOVERY_SOURCE; + default: + throw new IllegalArgumentException("Unsupported source mode: " + sourceMode); + } + case TIME_SERIES: + case LOGSDB: + switch (sourceMode) { + case SYNTHETIC: + return enableRecoverySource + ? (indexMode == IndexMode.TIME_SERIES ? TSDB_DEFAULT : LOGSDB_DEFAULT) + : (indexMode == IndexMode.TIME_SERIES ? TSDB_DEFAULT_NO_RECOVERY_SOURCE : LOGSDB_DEFAULT_NO_RECOVERY_SOURCE); + case STORED: + return enableRecoverySource + ? (indexMode == IndexMode.TIME_SERIES ? TSDB_DEFAULT_STORED : LOGSDB_DEFAULT_STORED) + : (indexMode == IndexMode.TIME_SERIES + ? TSDB_DEFAULT_NO_RECOVERY_SOURCE_STORED + : LOGSDB_DEFAULT_NO_RECOVERY_SOURCE_STORED); + case DISABLED: + throw new IllegalArgumentException("_source can not be disabled in index using [" + indexMode + "] index mode"); + default: + throw new IllegalArgumentException("Unsupported source mode: " + sourceMode); + } + default: + throw new IllegalArgumentException("Unsupported index mode: " + indexMode); } - final SourceFieldMapper syntheticWithoutRecoverySource = indexMode == IndexMode.TIME_SERIES - ? TSDB_DEFAULT_NO_RECOVERY_SOURCE - : LOGSDB_DEFAULT_NO_RECOVERY_SOURCE; - final SourceFieldMapper syntheticWithRecoverySource = indexMode == IndexMode.TIME_SERIES ? TSDB_DEFAULT : LOGSDB_DEFAULT; - final SourceFieldMapper storedWithoutRecoverySource = indexMode == IndexMode.TIME_SERIES - ? TSDB_DEFAULT_NO_RECOVERY_SOURCE_STORED - : LOGSDB_DEFAULT_NO_RECOVERY_SOURCE_STORED; - final SourceFieldMapper storedWithRecoverySource = indexMode == IndexMode.TIME_SERIES ? TSDB_DEFAULT_STORED : LOGSDB_DEFAULT_STORED; - - return switch (sourceMode) { - case SYNTHETIC -> enableRecoverySource ? syntheticWithRecoverySource : syntheticWithoutRecoverySource; - case STORED -> enableRecoverySource ? storedWithRecoverySource : storedWithoutRecoverySource; - case DISABLED -> throw new IllegalArgumentException( - "_source can not be disabled in index using [" + indexMode + "] index mode" - ); - }; } public static final TypeParser PARSER = new ConfigurableTypeParser(c -> { @@ -371,7 +421,7 @@ private static SourceFieldMapper resolveSourceMode(final IndexMode indexMode, fi return enableRecoverySource ? TSDB_LEGACY_DEFAULT : TSDB_LEGACY_DEFAULT_NO_RECOVERY_SOURCE; } } - return resolveSourceMode(indexMode, settingSourceMode, enableRecoverySource); + return resolveSourceMode(indexMode, settingSourceMode == null ? Mode.STORED : settingSourceMode, enableRecoverySource); }, c -> new Builder( c.getIndexSettings().getMode(), @@ -541,4 +591,12 @@ public SourceLoader newSourceLoader(Mapping mapping, SourceFieldMetrics metrics) public boolean isSynthetic() { return mode == Mode.SYNTHETIC; } + + public boolean isDisabled() { + return mode == Mode.DISABLED; + } + + public boolean isStored() { + return mode == null || mode == Mode.STORED; + } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java index 2f417e688cb97..df6d9380fd141 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java @@ -443,6 +443,167 @@ public void testRecoverySourceWithLogs() throws IOException { } } + public void testStandardIndexModeWithSourceModeSetting() throws IOException { + // Test for IndexMode.STANDARD + { + final XContentBuilder mappings = topMapping(b -> {}); + final Settings settings = Settings.builder() + .put(IndexSettings.MODE.getKey(), IndexMode.STANDARD.name()) + .put(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.SYNTHETIC) + .build(); + final MapperService mapperService = createMapperService(settings, mappings); + DocumentMapper docMapper = mapperService.documentMapper(); + assertTrue(docMapper.sourceMapper().isSynthetic()); + } + { + final XContentBuilder mappings = topMapping(b -> {}); + final Settings settings = Settings.builder() + .put(IndexSettings.MODE.getKey(), IndexMode.STANDARD.name()) + .put(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.STORED) + .build(); + final MapperService mapperService = createMapperService(settings, mappings); + final DocumentMapper docMapper = mapperService.documentMapper(); + assertTrue(docMapper.sourceMapper().isStored()); + } + { + final XContentBuilder mappings = topMapping(b -> {}); + final Settings settings = Settings.builder() + .put(IndexSettings.MODE.getKey(), IndexMode.STANDARD.name()) + .put(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.DISABLED) + .build(); + final MapperService mapperService = createMapperService(settings, mappings); + final DocumentMapper docMapper = mapperService.documentMapper(); + assertTrue(docMapper.sourceMapper().isDisabled()); + } + + // Test for IndexMode.LOGSDB + { + final XContentBuilder mappings = topMapping(b -> {}); + final Settings settings = Settings.builder() + .put(IndexSettings.MODE.getKey(), IndexMode.LOGSDB.name()) + .put(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.SYNTHETIC) + .build(); + final MapperService mapperService = createMapperService(settings, mappings); + DocumentMapper docMapper = mapperService.documentMapper(); + assertTrue(docMapper.sourceMapper().isSynthetic()); + } + { + final XContentBuilder mappings = topMapping(b -> {}); + final Settings settings = Settings.builder() + .put(IndexSettings.MODE.getKey(), IndexMode.LOGSDB.name()) + .put(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.STORED) + .build(); + final MapperService mapperService = createMapperService(settings, mappings); + final DocumentMapper docMapper = mapperService.documentMapper(); + assertTrue(docMapper.sourceMapper().isStored()); + } + { + final XContentBuilder mappings = topMapping(b -> {}); + final Settings settings = Settings.builder() + .put(IndexSettings.MODE.getKey(), IndexMode.LOGSDB.name()) + .put(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.DISABLED) + .build(); + var ex = expectThrows(MapperParsingException.class, () -> createMapperService(settings, mappings)); + assertEquals("Failed to parse mapping: _source can not be disabled in index using [logsdb] index mode", ex.getMessage()); + } + + // Test for IndexMode.TIME_SERIES + { + final String mappings = """ + { + "_doc" : { + "properties": { + "routing_field": { + "type": "keyword", + "time_series_dimension": true + } + } + } + } + """; + final Settings settings = Settings.builder() + .put(IndexSettings.MODE.getKey(), IndexMode.TIME_SERIES.name()) + .put(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.SYNTHETIC) + .put(IndexMetadata.INDEX_ROUTING_PATH.getKey(), "routing_field") + .build(); + final MapperService mapperService = createMapperService(settings, mappings); + DocumentMapper docMapper = mapperService.documentMapper(); + assertTrue(docMapper.sourceMapper().isSynthetic()); + } + { + final String mappings = """ + { + "_doc" : { + "properties": { + "routing_field": { + "type": "keyword", + "time_series_dimension": true + } + } + } + } + """; + final Settings settings = Settings.builder() + .put(IndexSettings.MODE.getKey(), IndexMode.TIME_SERIES.name()) + .put(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.STORED) + .put(IndexMetadata.INDEX_ROUTING_PATH.getKey(), "routing_field") + .build(); + final MapperService mapperService = createMapperService(settings, mappings); + final DocumentMapper docMapper = mapperService.documentMapper(); + assertTrue(docMapper.sourceMapper().isStored()); + } + { + final String mappings = """ + { + "_doc" : { + "properties": { + "routing_field": { + "type": "keyword", + "time_series_dimension": true + } + } + } + } + """; + final Settings settings = Settings.builder() + .put(IndexSettings.MODE.getKey(), IndexMode.TIME_SERIES.name()) + .put(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.DISABLED) + .put(IndexMetadata.INDEX_ROUTING_PATH.getKey(), "routing_field") + .build(); + var ex = expectThrows(MapperParsingException.class, () -> createMapperService(settings, mappings)); + assertEquals("Failed to parse mapping: _source can not be disabled in index using [time_series] index mode", ex.getMessage()); + } + + // Test cases without IndexMode (default to standard) + { + final XContentBuilder mappings = topMapping(b -> {}); + final Settings settings = Settings.builder() + .put(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.SYNTHETIC) + .build(); + final MapperService mapperService = createMapperService(settings, mappings); + DocumentMapper docMapper = mapperService.documentMapper(); + assertTrue(docMapper.sourceMapper().isSynthetic()); + } + { + final XContentBuilder mappings = topMapping(b -> {}); + final Settings settings = Settings.builder() + .put(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.STORED) + .build(); + final MapperService mapperService = createMapperService(settings, mappings); + final DocumentMapper docMapper = mapperService.documentMapper(); + assertTrue(docMapper.sourceMapper().isStored()); + } + { + final XContentBuilder mappings = topMapping(b -> {}); + final Settings settings = Settings.builder() + .put(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.DISABLED) + .build(); + final MapperService mapperService = createMapperService(settings, mappings); + final DocumentMapper docMapper = mapperService.documentMapper(); + assertTrue(docMapper.sourceMapper().isDisabled()); + } + } + public void testRecoverySourceWithLogsCustom() throws IOException { XContentBuilder mappings = topMapping(b -> b.startObject(SourceFieldMapper.NAME).field("mode", "synthetic").endObject()); { From 5e59ab5018dc87026ca2407bd3e772373e8b59f4 Mon Sep 17 00:00:00 2001 From: Costin Leau Date: Tue, 15 Oct 2024 10:13:37 -0700 Subject: [PATCH 118/449] ESQL: Fix grammar changes around per agg filtering (#114848) Remove dev flag left in grammar for agg filtering Related to #113735 --- docs/changelog/114848.yaml | 5 + .../esql/src/main/antlr/EsqlBaseLexer.g4 | 2 +- .../esql/src/main/antlr/EsqlBaseParser.g4 | 2 +- .../xpack/esql/parser/EsqlBaseLexer.interp | 2 +- .../xpack/esql/parser/EsqlBaseLexer.java | 1690 ++++++++--------- .../xpack/esql/parser/EsqlBaseParser.interp | 2 +- .../xpack/esql/parser/EsqlBaseParser.java | 952 +++++----- 7 files changed, 1319 insertions(+), 1336 deletions(-) create mode 100644 docs/changelog/114848.yaml diff --git a/docs/changelog/114848.yaml b/docs/changelog/114848.yaml new file mode 100644 index 0000000000000..db41e8496f787 --- /dev/null +++ b/docs/changelog/114848.yaml @@ -0,0 +1,5 @@ +pr: 114848 +summary: "ESQL: Fix grammar changes around per agg filtering" +area: ES|QL +type: bug +issues: [] diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 index b13606befd2a4..051e83129d12d 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 @@ -209,7 +209,7 @@ SLASH : '/'; PERCENT : '%'; MATCH : 'match'; -NESTED_WHERE : {this.isDevVersion()}? WHERE -> type(WHERE); +NESTED_WHERE : WHERE -> type(WHERE); NAMED_OR_POSITIONAL_PARAM : PARAM (LETTER | UNDERSCORE) UNQUOTED_ID_BODY* diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 index 9a95e0e6726ba..7d489417ab4ca 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 @@ -173,7 +173,7 @@ aggFields ; aggField - : field {this.isDevVersion()}? (WHERE booleanExpression)? + : field (WHERE booleanExpression)? ; qualifiedName diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp index aa6ddfb433d23..1a0105b9951d2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp @@ -466,4 +466,4 @@ METRICS_MODE CLOSING_METRICS_MODE atn: -[4, 0, 120, 1472, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 2, 154, 7, 154, 2, 155, 7, 155, 2, 156, 7, 156, 2, 157, 7, 157, 2, 158, 7, 158, 2, 159, 7, 159, 2, 160, 7, 160, 2, 161, 7, 161, 2, 162, 7, 162, 2, 163, 7, 163, 2, 164, 7, 164, 2, 165, 7, 165, 2, 166, 7, 166, 2, 167, 7, 167, 2, 168, 7, 168, 2, 169, 7, 169, 2, 170, 7, 170, 2, 171, 7, 171, 2, 172, 7, 172, 2, 173, 7, 173, 2, 174, 7, 174, 2, 175, 7, 175, 2, 176, 7, 176, 2, 177, 7, 177, 2, 178, 7, 178, 2, 179, 7, 179, 2, 180, 7, 180, 2, 181, 7, 181, 2, 182, 7, 182, 2, 183, 7, 183, 2, 184, 7, 184, 2, 185, 7, 185, 2, 186, 7, 186, 2, 187, 7, 187, 2, 188, 7, 188, 2, 189, 7, 189, 2, 190, 7, 190, 2, 191, 7, 191, 2, 192, 7, 192, 2, 193, 7, 193, 2, 194, 7, 194, 2, 195, 7, 195, 2, 196, 7, 196, 2, 197, 7, 197, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 4, 19, 578, 8, 19, 11, 19, 12, 19, 579, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 5, 20, 588, 8, 20, 10, 20, 12, 20, 591, 9, 20, 1, 20, 3, 20, 594, 8, 20, 1, 20, 3, 20, 597, 8, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 5, 21, 606, 8, 21, 10, 21, 12, 21, 609, 9, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 4, 22, 617, 8, 22, 11, 22, 12, 22, 618, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 28, 1, 28, 3, 28, 638, 8, 28, 1, 28, 4, 28, 641, 8, 28, 11, 28, 12, 28, 642, 1, 29, 1, 29, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 3, 31, 652, 8, 31, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 3, 33, 659, 8, 33, 1, 34, 1, 34, 1, 34, 5, 34, 664, 8, 34, 10, 34, 12, 34, 667, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 675, 8, 34, 10, 34, 12, 34, 678, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 3, 34, 685, 8, 34, 1, 34, 3, 34, 688, 8, 34, 3, 34, 690, 8, 34, 1, 35, 4, 35, 693, 8, 35, 11, 35, 12, 35, 694, 1, 36, 4, 36, 698, 8, 36, 11, 36, 12, 36, 699, 1, 36, 1, 36, 5, 36, 704, 8, 36, 10, 36, 12, 36, 707, 9, 36, 1, 36, 1, 36, 4, 36, 711, 8, 36, 11, 36, 12, 36, 712, 1, 36, 4, 36, 716, 8, 36, 11, 36, 12, 36, 717, 1, 36, 1, 36, 5, 36, 722, 8, 36, 10, 36, 12, 36, 725, 9, 36, 3, 36, 727, 8, 36, 1, 36, 1, 36, 1, 36, 1, 36, 4, 36, 733, 8, 36, 11, 36, 12, 36, 734, 1, 36, 1, 36, 3, 36, 739, 8, 36, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 1, 39, 1, 39, 1, 39, 1, 39, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 1, 64, 1, 64, 1, 64, 1, 65, 1, 65, 1, 66, 1, 66, 1, 66, 1, 67, 1, 67, 1, 68, 1, 68, 1, 69, 1, 69, 1, 70, 1, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 72, 1, 72, 1, 72, 1, 72, 1, 73, 1, 73, 1, 73, 1, 73, 1, 73, 1, 74, 1, 74, 1, 74, 3, 74, 872, 8, 74, 1, 74, 5, 74, 875, 8, 74, 10, 74, 12, 74, 878, 9, 74, 1, 74, 1, 74, 4, 74, 882, 8, 74, 11, 74, 12, 74, 883, 3, 74, 886, 8, 74, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 76, 1, 76, 1, 76, 1, 76, 1, 76, 1, 77, 1, 77, 5, 77, 900, 8, 77, 10, 77, 12, 77, 903, 9, 77, 1, 77, 1, 77, 3, 77, 907, 8, 77, 1, 77, 4, 77, 910, 8, 77, 11, 77, 12, 77, 911, 3, 77, 914, 8, 77, 1, 78, 1, 78, 4, 78, 918, 8, 78, 11, 78, 12, 78, 919, 1, 78, 1, 78, 1, 79, 1, 79, 1, 80, 1, 80, 1, 80, 1, 80, 1, 81, 1, 81, 1, 81, 1, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 1, 83, 1, 83, 1, 83, 1, 84, 1, 84, 1, 84, 1, 84, 1, 84, 1, 85, 1, 85, 1, 85, 1, 85, 1, 86, 1, 86, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 1, 88, 1, 88, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 1, 92, 1, 93, 1, 93, 1, 93, 1, 93, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 3, 95, 997, 8, 95, 1, 96, 4, 96, 1000, 8, 96, 11, 96, 12, 96, 1001, 1, 97, 1, 97, 1, 97, 1, 97, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 1, 100, 1, 101, 1, 101, 1, 101, 1, 101, 1, 102, 1, 102, 1, 102, 1, 102, 1, 102, 1, 103, 1, 103, 1, 103, 1, 103, 1, 104, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 106, 1, 107, 1, 107, 1, 107, 1, 107, 3, 107, 1049, 8, 107, 1, 108, 1, 108, 3, 108, 1053, 8, 108, 1, 108, 5, 108, 1056, 8, 108, 10, 108, 12, 108, 1059, 9, 108, 1, 108, 1, 108, 3, 108, 1063, 8, 108, 1, 108, 4, 108, 1066, 8, 108, 11, 108, 12, 108, 1067, 3, 108, 1070, 8, 108, 1, 109, 1, 109, 4, 109, 1074, 8, 109, 11, 109, 12, 109, 1075, 1, 110, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 1, 113, 1, 114, 1, 114, 1, 114, 1, 114, 1, 115, 1, 115, 1, 115, 1, 115, 1, 116, 1, 116, 1, 116, 1, 116, 1, 117, 1, 117, 1, 117, 1, 117, 1, 118, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 120, 1, 120, 1, 121, 1, 121, 1, 121, 1, 121, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 129, 4, 129, 1159, 8, 129, 11, 129, 12, 129, 1160, 1, 129, 1, 129, 3, 129, 1165, 8, 129, 1, 129, 4, 129, 1168, 8, 129, 11, 129, 12, 129, 1169, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 133, 1, 133, 1, 134, 1, 134, 1, 134, 1, 134, 1, 134, 1, 134, 1, 135, 1, 135, 1, 135, 1, 135, 1, 136, 1, 136, 1, 136, 1, 136, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 152, 1, 152, 1, 153, 1, 153, 1, 153, 1, 153, 1, 154, 1, 154, 1, 154, 1, 154, 1, 155, 1, 155, 1, 155, 1, 155, 1, 155, 1, 156, 1, 156, 1, 156, 1, 156, 1, 156, 1, 157, 1, 157, 1, 157, 1, 157, 1, 158, 1, 158, 1, 158, 1, 158, 1, 159, 1, 159, 1, 159, 1, 159, 1, 160, 1, 160, 1, 160, 1, 160, 1, 160, 1, 161, 1, 161, 1, 162, 1, 162, 1, 162, 1, 162, 1, 162, 4, 162, 1309, 8, 162, 11, 162, 12, 162, 1310, 1, 163, 1, 163, 1, 163, 1, 163, 1, 164, 1, 164, 1, 164, 1, 164, 1, 165, 1, 165, 1, 165, 1, 165, 1, 166, 1, 166, 1, 166, 1, 166, 1, 166, 1, 167, 1, 167, 1, 167, 1, 167, 1, 168, 1, 168, 1, 168, 1, 168, 1, 169, 1, 169, 1, 169, 1, 169, 1, 170, 1, 170, 1, 170, 1, 170, 1, 170, 1, 171, 1, 171, 1, 171, 1, 171, 1, 172, 1, 172, 1, 172, 1, 172, 1, 173, 1, 173, 1, 173, 1, 173, 1, 174, 1, 174, 1, 174, 1, 174, 1, 175, 1, 175, 1, 175, 1, 175, 1, 176, 1, 176, 1, 176, 1, 176, 1, 176, 1, 176, 1, 177, 1, 177, 1, 177, 1, 177, 1, 178, 1, 178, 1, 178, 1, 178, 1, 179, 1, 179, 1, 179, 1, 179, 1, 180, 1, 180, 1, 180, 1, 180, 1, 181, 1, 181, 1, 181, 1, 181, 1, 182, 1, 182, 1, 182, 1, 182, 1, 183, 1, 183, 1, 183, 1, 183, 1, 183, 1, 184, 1, 184, 1, 184, 1, 184, 1, 184, 1, 184, 1, 185, 1, 185, 1, 185, 1, 185, 1, 185, 1, 185, 1, 186, 1, 186, 1, 186, 1, 186, 1, 187, 1, 187, 1, 187, 1, 187, 1, 188, 1, 188, 1, 188, 1, 188, 1, 189, 1, 189, 1, 189, 1, 189, 1, 189, 1, 189, 1, 190, 1, 190, 1, 190, 1, 190, 1, 190, 1, 190, 1, 191, 1, 191, 1, 191, 1, 191, 1, 192, 1, 192, 1, 192, 1, 192, 1, 193, 1, 193, 1, 193, 1, 193, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 195, 1, 195, 1, 195, 1, 195, 1, 195, 1, 195, 1, 196, 1, 196, 1, 196, 1, 196, 1, 196, 1, 196, 1, 197, 1, 197, 1, 197, 1, 197, 1, 197, 2, 607, 676, 0, 198, 15, 1, 17, 2, 19, 3, 21, 4, 23, 5, 25, 6, 27, 7, 29, 8, 31, 9, 33, 10, 35, 11, 37, 12, 39, 13, 41, 14, 43, 15, 45, 16, 47, 17, 49, 18, 51, 19, 53, 20, 55, 21, 57, 22, 59, 23, 61, 24, 63, 0, 65, 0, 67, 0, 69, 0, 71, 0, 73, 0, 75, 0, 77, 0, 79, 0, 81, 0, 83, 25, 85, 26, 87, 27, 89, 28, 91, 29, 93, 30, 95, 31, 97, 32, 99, 33, 101, 34, 103, 35, 105, 36, 107, 37, 109, 38, 111, 39, 113, 40, 115, 41, 117, 42, 119, 43, 121, 44, 123, 45, 125, 46, 127, 47, 129, 48, 131, 49, 133, 50, 135, 51, 137, 52, 139, 53, 141, 54, 143, 55, 145, 56, 147, 57, 149, 58, 151, 59, 153, 60, 155, 61, 157, 62, 159, 63, 161, 0, 163, 64, 165, 65, 167, 66, 169, 67, 171, 0, 173, 68, 175, 69, 177, 70, 179, 71, 181, 0, 183, 0, 185, 72, 187, 73, 189, 74, 191, 0, 193, 0, 195, 0, 197, 0, 199, 0, 201, 0, 203, 75, 205, 0, 207, 76, 209, 0, 211, 0, 213, 77, 215, 78, 217, 79, 219, 0, 221, 0, 223, 0, 225, 0, 227, 0, 229, 0, 231, 0, 233, 80, 235, 81, 237, 82, 239, 83, 241, 0, 243, 0, 245, 0, 247, 0, 249, 0, 251, 0, 253, 84, 255, 0, 257, 85, 259, 86, 261, 87, 263, 0, 265, 0, 267, 88, 269, 89, 271, 0, 273, 90, 275, 0, 277, 91, 279, 92, 281, 93, 283, 0, 285, 0, 287, 0, 289, 0, 291, 0, 293, 0, 295, 0, 297, 0, 299, 0, 301, 94, 303, 95, 305, 96, 307, 0, 309, 0, 311, 0, 313, 0, 315, 0, 317, 0, 319, 97, 321, 98, 323, 99, 325, 0, 327, 100, 329, 101, 331, 102, 333, 103, 335, 0, 337, 104, 339, 105, 341, 106, 343, 107, 345, 108, 347, 0, 349, 0, 351, 0, 353, 0, 355, 0, 357, 0, 359, 0, 361, 109, 363, 110, 365, 111, 367, 0, 369, 0, 371, 0, 373, 0, 375, 112, 377, 113, 379, 114, 381, 0, 383, 0, 385, 0, 387, 115, 389, 116, 391, 117, 393, 0, 395, 0, 397, 118, 399, 119, 401, 120, 403, 0, 405, 0, 407, 0, 409, 0, 15, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 35, 2, 0, 68, 68, 100, 100, 2, 0, 73, 73, 105, 105, 2, 0, 83, 83, 115, 115, 2, 0, 69, 69, 101, 101, 2, 0, 67, 67, 99, 99, 2, 0, 84, 84, 116, 116, 2, 0, 82, 82, 114, 114, 2, 0, 79, 79, 111, 111, 2, 0, 80, 80, 112, 112, 2, 0, 78, 78, 110, 110, 2, 0, 72, 72, 104, 104, 2, 0, 86, 86, 118, 118, 2, 0, 65, 65, 97, 97, 2, 0, 76, 76, 108, 108, 2, 0, 88, 88, 120, 120, 2, 0, 70, 70, 102, 102, 2, 0, 77, 77, 109, 109, 2, 0, 71, 71, 103, 103, 2, 0, 75, 75, 107, 107, 2, 0, 87, 87, 119, 119, 2, 0, 85, 85, 117, 117, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 8, 0, 34, 34, 78, 78, 82, 82, 84, 84, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 2, 0, 66, 66, 98, 98, 2, 0, 89, 89, 121, 121, 11, 0, 9, 10, 13, 13, 32, 32, 34, 34, 44, 44, 47, 47, 58, 58, 61, 61, 91, 91, 93, 93, 124, 124, 2, 0, 42, 42, 47, 47, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1500, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0, 0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0, 0, 0, 33, 1, 0, 0, 0, 0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0, 0, 0, 39, 1, 0, 0, 0, 0, 41, 1, 0, 0, 0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0, 0, 0, 0, 47, 1, 0, 0, 0, 0, 49, 1, 0, 0, 0, 0, 51, 1, 0, 0, 0, 0, 53, 1, 0, 0, 0, 0, 55, 1, 0, 0, 0, 0, 57, 1, 0, 0, 0, 0, 59, 1, 0, 0, 0, 1, 61, 1, 0, 0, 0, 1, 83, 1, 0, 0, 0, 1, 85, 1, 0, 0, 0, 1, 87, 1, 0, 0, 0, 1, 89, 1, 0, 0, 0, 1, 91, 1, 0, 0, 0, 1, 93, 1, 0, 0, 0, 1, 95, 1, 0, 0, 0, 1, 97, 1, 0, 0, 0, 1, 99, 1, 0, 0, 0, 1, 101, 1, 0, 0, 0, 1, 103, 1, 0, 0, 0, 1, 105, 1, 0, 0, 0, 1, 107, 1, 0, 0, 0, 1, 109, 1, 0, 0, 0, 1, 111, 1, 0, 0, 0, 1, 113, 1, 0, 0, 0, 1, 115, 1, 0, 0, 0, 1, 117, 1, 0, 0, 0, 1, 119, 1, 0, 0, 0, 1, 121, 1, 0, 0, 0, 1, 123, 1, 0, 0, 0, 1, 125, 1, 0, 0, 0, 1, 127, 1, 0, 0, 0, 1, 129, 1, 0, 0, 0, 1, 131, 1, 0, 0, 0, 1, 133, 1, 0, 0, 0, 1, 135, 1, 0, 0, 0, 1, 137, 1, 0, 0, 0, 1, 139, 1, 0, 0, 0, 1, 141, 1, 0, 0, 0, 1, 143, 1, 0, 0, 0, 1, 145, 1, 0, 0, 0, 1, 147, 1, 0, 0, 0, 1, 149, 1, 0, 0, 0, 1, 151, 1, 0, 0, 0, 1, 153, 1, 0, 0, 0, 1, 155, 1, 0, 0, 0, 1, 157, 1, 0, 0, 0, 1, 159, 1, 0, 0, 0, 1, 161, 1, 0, 0, 0, 1, 163, 1, 0, 0, 0, 1, 165, 1, 0, 0, 0, 1, 167, 1, 0, 0, 0, 1, 169, 1, 0, 0, 0, 1, 173, 1, 0, 0, 0, 1, 175, 1, 0, 0, 0, 1, 177, 1, 0, 0, 0, 1, 179, 1, 0, 0, 0, 2, 181, 1, 0, 0, 0, 2, 183, 1, 0, 0, 0, 2, 185, 1, 0, 0, 0, 2, 187, 1, 0, 0, 0, 2, 189, 1, 0, 0, 0, 3, 191, 1, 0, 0, 0, 3, 193, 1, 0, 0, 0, 3, 195, 1, 0, 0, 0, 3, 197, 1, 0, 0, 0, 3, 199, 1, 0, 0, 0, 3, 201, 1, 0, 0, 0, 3, 203, 1, 0, 0, 0, 3, 207, 1, 0, 0, 0, 3, 209, 1, 0, 0, 0, 3, 211, 1, 0, 0, 0, 3, 213, 1, 0, 0, 0, 3, 215, 1, 0, 0, 0, 3, 217, 1, 0, 0, 0, 4, 219, 1, 0, 0, 0, 4, 221, 1, 0, 0, 0, 4, 223, 1, 0, 0, 0, 4, 225, 1, 0, 0, 0, 4, 227, 1, 0, 0, 0, 4, 233, 1, 0, 0, 0, 4, 235, 1, 0, 0, 0, 4, 237, 1, 0, 0, 0, 4, 239, 1, 0, 0, 0, 5, 241, 1, 0, 0, 0, 5, 243, 1, 0, 0, 0, 5, 245, 1, 0, 0, 0, 5, 247, 1, 0, 0, 0, 5, 249, 1, 0, 0, 0, 5, 251, 1, 0, 0, 0, 5, 253, 1, 0, 0, 0, 5, 255, 1, 0, 0, 0, 5, 257, 1, 0, 0, 0, 5, 259, 1, 0, 0, 0, 5, 261, 1, 0, 0, 0, 6, 263, 1, 0, 0, 0, 6, 265, 1, 0, 0, 0, 6, 267, 1, 0, 0, 0, 6, 269, 1, 0, 0, 0, 6, 273, 1, 0, 0, 0, 6, 275, 1, 0, 0, 0, 6, 277, 1, 0, 0, 0, 6, 279, 1, 0, 0, 0, 6, 281, 1, 0, 0, 0, 7, 283, 1, 0, 0, 0, 7, 285, 1, 0, 0, 0, 7, 287, 1, 0, 0, 0, 7, 289, 1, 0, 0, 0, 7, 291, 1, 0, 0, 0, 7, 293, 1, 0, 0, 0, 7, 295, 1, 0, 0, 0, 7, 297, 1, 0, 0, 0, 7, 299, 1, 0, 0, 0, 7, 301, 1, 0, 0, 0, 7, 303, 1, 0, 0, 0, 7, 305, 1, 0, 0, 0, 8, 307, 1, 0, 0, 0, 8, 309, 1, 0, 0, 0, 8, 311, 1, 0, 0, 0, 8, 313, 1, 0, 0, 0, 8, 315, 1, 0, 0, 0, 8, 317, 1, 0, 0, 0, 8, 319, 1, 0, 0, 0, 8, 321, 1, 0, 0, 0, 8, 323, 1, 0, 0, 0, 9, 325, 1, 0, 0, 0, 9, 327, 1, 0, 0, 0, 9, 329, 1, 0, 0, 0, 9, 331, 1, 0, 0, 0, 9, 333, 1, 0, 0, 0, 10, 335, 1, 0, 0, 0, 10, 337, 1, 0, 0, 0, 10, 339, 1, 0, 0, 0, 10, 341, 1, 0, 0, 0, 10, 343, 1, 0, 0, 0, 10, 345, 1, 0, 0, 0, 11, 347, 1, 0, 0, 0, 11, 349, 1, 0, 0, 0, 11, 351, 1, 0, 0, 0, 11, 353, 1, 0, 0, 0, 11, 355, 1, 0, 0, 0, 11, 357, 1, 0, 0, 0, 11, 359, 1, 0, 0, 0, 11, 361, 1, 0, 0, 0, 11, 363, 1, 0, 0, 0, 11, 365, 1, 0, 0, 0, 12, 367, 1, 0, 0, 0, 12, 369, 1, 0, 0, 0, 12, 371, 1, 0, 0, 0, 12, 373, 1, 0, 0, 0, 12, 375, 1, 0, 0, 0, 12, 377, 1, 0, 0, 0, 12, 379, 1, 0, 0, 0, 13, 381, 1, 0, 0, 0, 13, 383, 1, 0, 0, 0, 13, 385, 1, 0, 0, 0, 13, 387, 1, 0, 0, 0, 13, 389, 1, 0, 0, 0, 13, 391, 1, 0, 0, 0, 14, 393, 1, 0, 0, 0, 14, 395, 1, 0, 0, 0, 14, 397, 1, 0, 0, 0, 14, 399, 1, 0, 0, 0, 14, 401, 1, 0, 0, 0, 14, 403, 1, 0, 0, 0, 14, 405, 1, 0, 0, 0, 14, 407, 1, 0, 0, 0, 14, 409, 1, 0, 0, 0, 15, 411, 1, 0, 0, 0, 17, 421, 1, 0, 0, 0, 19, 428, 1, 0, 0, 0, 21, 437, 1, 0, 0, 0, 23, 444, 1, 0, 0, 0, 25, 454, 1, 0, 0, 0, 27, 461, 1, 0, 0, 0, 29, 468, 1, 0, 0, 0, 31, 475, 1, 0, 0, 0, 33, 483, 1, 0, 0, 0, 35, 495, 1, 0, 0, 0, 37, 504, 1, 0, 0, 0, 39, 510, 1, 0, 0, 0, 41, 517, 1, 0, 0, 0, 43, 524, 1, 0, 0, 0, 45, 532, 1, 0, 0, 0, 47, 540, 1, 0, 0, 0, 49, 555, 1, 0, 0, 0, 51, 565, 1, 0, 0, 0, 53, 577, 1, 0, 0, 0, 55, 583, 1, 0, 0, 0, 57, 600, 1, 0, 0, 0, 59, 616, 1, 0, 0, 0, 61, 622, 1, 0, 0, 0, 63, 626, 1, 0, 0, 0, 65, 628, 1, 0, 0, 0, 67, 630, 1, 0, 0, 0, 69, 633, 1, 0, 0, 0, 71, 635, 1, 0, 0, 0, 73, 644, 1, 0, 0, 0, 75, 646, 1, 0, 0, 0, 77, 651, 1, 0, 0, 0, 79, 653, 1, 0, 0, 0, 81, 658, 1, 0, 0, 0, 83, 689, 1, 0, 0, 0, 85, 692, 1, 0, 0, 0, 87, 738, 1, 0, 0, 0, 89, 740, 1, 0, 0, 0, 91, 743, 1, 0, 0, 0, 93, 747, 1, 0, 0, 0, 95, 751, 1, 0, 0, 0, 97, 753, 1, 0, 0, 0, 99, 756, 1, 0, 0, 0, 101, 758, 1, 0, 0, 0, 103, 763, 1, 0, 0, 0, 105, 765, 1, 0, 0, 0, 107, 771, 1, 0, 0, 0, 109, 777, 1, 0, 0, 0, 111, 780, 1, 0, 0, 0, 113, 783, 1, 0, 0, 0, 115, 788, 1, 0, 0, 0, 117, 793, 1, 0, 0, 0, 119, 795, 1, 0, 0, 0, 121, 799, 1, 0, 0, 0, 123, 804, 1, 0, 0, 0, 125, 810, 1, 0, 0, 0, 127, 813, 1, 0, 0, 0, 129, 815, 1, 0, 0, 0, 131, 821, 1, 0, 0, 0, 133, 823, 1, 0, 0, 0, 135, 828, 1, 0, 0, 0, 137, 831, 1, 0, 0, 0, 139, 834, 1, 0, 0, 0, 141, 837, 1, 0, 0, 0, 143, 839, 1, 0, 0, 0, 145, 842, 1, 0, 0, 0, 147, 844, 1, 0, 0, 0, 149, 847, 1, 0, 0, 0, 151, 849, 1, 0, 0, 0, 153, 851, 1, 0, 0, 0, 155, 853, 1, 0, 0, 0, 157, 855, 1, 0, 0, 0, 159, 857, 1, 0, 0, 0, 161, 863, 1, 0, 0, 0, 163, 885, 1, 0, 0, 0, 165, 887, 1, 0, 0, 0, 167, 892, 1, 0, 0, 0, 169, 913, 1, 0, 0, 0, 171, 915, 1, 0, 0, 0, 173, 923, 1, 0, 0, 0, 175, 925, 1, 0, 0, 0, 177, 929, 1, 0, 0, 0, 179, 933, 1, 0, 0, 0, 181, 937, 1, 0, 0, 0, 183, 942, 1, 0, 0, 0, 185, 947, 1, 0, 0, 0, 187, 951, 1, 0, 0, 0, 189, 955, 1, 0, 0, 0, 191, 959, 1, 0, 0, 0, 193, 964, 1, 0, 0, 0, 195, 968, 1, 0, 0, 0, 197, 972, 1, 0, 0, 0, 199, 976, 1, 0, 0, 0, 201, 980, 1, 0, 0, 0, 203, 984, 1, 0, 0, 0, 205, 996, 1, 0, 0, 0, 207, 999, 1, 0, 0, 0, 209, 1003, 1, 0, 0, 0, 211, 1007, 1, 0, 0, 0, 213, 1011, 1, 0, 0, 0, 215, 1015, 1, 0, 0, 0, 217, 1019, 1, 0, 0, 0, 219, 1023, 1, 0, 0, 0, 221, 1028, 1, 0, 0, 0, 223, 1032, 1, 0, 0, 0, 225, 1036, 1, 0, 0, 0, 227, 1040, 1, 0, 0, 0, 229, 1048, 1, 0, 0, 0, 231, 1069, 1, 0, 0, 0, 233, 1073, 1, 0, 0, 0, 235, 1077, 1, 0, 0, 0, 237, 1081, 1, 0, 0, 0, 239, 1085, 1, 0, 0, 0, 241, 1089, 1, 0, 0, 0, 243, 1094, 1, 0, 0, 0, 245, 1098, 1, 0, 0, 0, 247, 1102, 1, 0, 0, 0, 249, 1106, 1, 0, 0, 0, 251, 1110, 1, 0, 0, 0, 253, 1114, 1, 0, 0, 0, 255, 1117, 1, 0, 0, 0, 257, 1121, 1, 0, 0, 0, 259, 1125, 1, 0, 0, 0, 261, 1129, 1, 0, 0, 0, 263, 1133, 1, 0, 0, 0, 265, 1138, 1, 0, 0, 0, 267, 1143, 1, 0, 0, 0, 269, 1148, 1, 0, 0, 0, 271, 1155, 1, 0, 0, 0, 273, 1164, 1, 0, 0, 0, 275, 1171, 1, 0, 0, 0, 277, 1175, 1, 0, 0, 0, 279, 1179, 1, 0, 0, 0, 281, 1183, 1, 0, 0, 0, 283, 1187, 1, 0, 0, 0, 285, 1193, 1, 0, 0, 0, 287, 1197, 1, 0, 0, 0, 289, 1201, 1, 0, 0, 0, 291, 1205, 1, 0, 0, 0, 293, 1209, 1, 0, 0, 0, 295, 1213, 1, 0, 0, 0, 297, 1217, 1, 0, 0, 0, 299, 1221, 1, 0, 0, 0, 301, 1225, 1, 0, 0, 0, 303, 1229, 1, 0, 0, 0, 305, 1233, 1, 0, 0, 0, 307, 1237, 1, 0, 0, 0, 309, 1242, 1, 0, 0, 0, 311, 1246, 1, 0, 0, 0, 313, 1250, 1, 0, 0, 0, 315, 1254, 1, 0, 0, 0, 317, 1258, 1, 0, 0, 0, 319, 1262, 1, 0, 0, 0, 321, 1266, 1, 0, 0, 0, 323, 1270, 1, 0, 0, 0, 325, 1274, 1, 0, 0, 0, 327, 1279, 1, 0, 0, 0, 329, 1284, 1, 0, 0, 0, 331, 1288, 1, 0, 0, 0, 333, 1292, 1, 0, 0, 0, 335, 1296, 1, 0, 0, 0, 337, 1301, 1, 0, 0, 0, 339, 1308, 1, 0, 0, 0, 341, 1312, 1, 0, 0, 0, 343, 1316, 1, 0, 0, 0, 345, 1320, 1, 0, 0, 0, 347, 1324, 1, 0, 0, 0, 349, 1329, 1, 0, 0, 0, 351, 1333, 1, 0, 0, 0, 353, 1337, 1, 0, 0, 0, 355, 1341, 1, 0, 0, 0, 357, 1346, 1, 0, 0, 0, 359, 1350, 1, 0, 0, 0, 361, 1354, 1, 0, 0, 0, 363, 1358, 1, 0, 0, 0, 365, 1362, 1, 0, 0, 0, 367, 1366, 1, 0, 0, 0, 369, 1372, 1, 0, 0, 0, 371, 1376, 1, 0, 0, 0, 373, 1380, 1, 0, 0, 0, 375, 1384, 1, 0, 0, 0, 377, 1388, 1, 0, 0, 0, 379, 1392, 1, 0, 0, 0, 381, 1396, 1, 0, 0, 0, 383, 1401, 1, 0, 0, 0, 385, 1407, 1, 0, 0, 0, 387, 1413, 1, 0, 0, 0, 389, 1417, 1, 0, 0, 0, 391, 1421, 1, 0, 0, 0, 393, 1425, 1, 0, 0, 0, 395, 1431, 1, 0, 0, 0, 397, 1437, 1, 0, 0, 0, 399, 1441, 1, 0, 0, 0, 401, 1445, 1, 0, 0, 0, 403, 1449, 1, 0, 0, 0, 405, 1455, 1, 0, 0, 0, 407, 1461, 1, 0, 0, 0, 409, 1467, 1, 0, 0, 0, 411, 412, 7, 0, 0, 0, 412, 413, 7, 1, 0, 0, 413, 414, 7, 2, 0, 0, 414, 415, 7, 2, 0, 0, 415, 416, 7, 3, 0, 0, 416, 417, 7, 4, 0, 0, 417, 418, 7, 5, 0, 0, 418, 419, 1, 0, 0, 0, 419, 420, 6, 0, 0, 0, 420, 16, 1, 0, 0, 0, 421, 422, 7, 0, 0, 0, 422, 423, 7, 6, 0, 0, 423, 424, 7, 7, 0, 0, 424, 425, 7, 8, 0, 0, 425, 426, 1, 0, 0, 0, 426, 427, 6, 1, 1, 0, 427, 18, 1, 0, 0, 0, 428, 429, 7, 3, 0, 0, 429, 430, 7, 9, 0, 0, 430, 431, 7, 6, 0, 0, 431, 432, 7, 1, 0, 0, 432, 433, 7, 4, 0, 0, 433, 434, 7, 10, 0, 0, 434, 435, 1, 0, 0, 0, 435, 436, 6, 2, 2, 0, 436, 20, 1, 0, 0, 0, 437, 438, 7, 3, 0, 0, 438, 439, 7, 11, 0, 0, 439, 440, 7, 12, 0, 0, 440, 441, 7, 13, 0, 0, 441, 442, 1, 0, 0, 0, 442, 443, 6, 3, 0, 0, 443, 22, 1, 0, 0, 0, 444, 445, 7, 3, 0, 0, 445, 446, 7, 14, 0, 0, 446, 447, 7, 8, 0, 0, 447, 448, 7, 13, 0, 0, 448, 449, 7, 12, 0, 0, 449, 450, 7, 1, 0, 0, 450, 451, 7, 9, 0, 0, 451, 452, 1, 0, 0, 0, 452, 453, 6, 4, 3, 0, 453, 24, 1, 0, 0, 0, 454, 455, 7, 15, 0, 0, 455, 456, 7, 6, 0, 0, 456, 457, 7, 7, 0, 0, 457, 458, 7, 16, 0, 0, 458, 459, 1, 0, 0, 0, 459, 460, 6, 5, 4, 0, 460, 26, 1, 0, 0, 0, 461, 462, 7, 17, 0, 0, 462, 463, 7, 6, 0, 0, 463, 464, 7, 7, 0, 0, 464, 465, 7, 18, 0, 0, 465, 466, 1, 0, 0, 0, 466, 467, 6, 6, 0, 0, 467, 28, 1, 0, 0, 0, 468, 469, 7, 18, 0, 0, 469, 470, 7, 3, 0, 0, 470, 471, 7, 3, 0, 0, 471, 472, 7, 8, 0, 0, 472, 473, 1, 0, 0, 0, 473, 474, 6, 7, 1, 0, 474, 30, 1, 0, 0, 0, 475, 476, 7, 13, 0, 0, 476, 477, 7, 1, 0, 0, 477, 478, 7, 16, 0, 0, 478, 479, 7, 1, 0, 0, 479, 480, 7, 5, 0, 0, 480, 481, 1, 0, 0, 0, 481, 482, 6, 8, 0, 0, 482, 32, 1, 0, 0, 0, 483, 484, 7, 16, 0, 0, 484, 485, 7, 11, 0, 0, 485, 486, 5, 95, 0, 0, 486, 487, 7, 3, 0, 0, 487, 488, 7, 14, 0, 0, 488, 489, 7, 8, 0, 0, 489, 490, 7, 12, 0, 0, 490, 491, 7, 9, 0, 0, 491, 492, 7, 0, 0, 0, 492, 493, 1, 0, 0, 0, 493, 494, 6, 9, 5, 0, 494, 34, 1, 0, 0, 0, 495, 496, 7, 6, 0, 0, 496, 497, 7, 3, 0, 0, 497, 498, 7, 9, 0, 0, 498, 499, 7, 12, 0, 0, 499, 500, 7, 16, 0, 0, 500, 501, 7, 3, 0, 0, 501, 502, 1, 0, 0, 0, 502, 503, 6, 10, 6, 0, 503, 36, 1, 0, 0, 0, 504, 505, 7, 6, 0, 0, 505, 506, 7, 7, 0, 0, 506, 507, 7, 19, 0, 0, 507, 508, 1, 0, 0, 0, 508, 509, 6, 11, 0, 0, 509, 38, 1, 0, 0, 0, 510, 511, 7, 2, 0, 0, 511, 512, 7, 10, 0, 0, 512, 513, 7, 7, 0, 0, 513, 514, 7, 19, 0, 0, 514, 515, 1, 0, 0, 0, 515, 516, 6, 12, 7, 0, 516, 40, 1, 0, 0, 0, 517, 518, 7, 2, 0, 0, 518, 519, 7, 7, 0, 0, 519, 520, 7, 6, 0, 0, 520, 521, 7, 5, 0, 0, 521, 522, 1, 0, 0, 0, 522, 523, 6, 13, 0, 0, 523, 42, 1, 0, 0, 0, 524, 525, 7, 2, 0, 0, 525, 526, 7, 5, 0, 0, 526, 527, 7, 12, 0, 0, 527, 528, 7, 5, 0, 0, 528, 529, 7, 2, 0, 0, 529, 530, 1, 0, 0, 0, 530, 531, 6, 14, 0, 0, 531, 44, 1, 0, 0, 0, 532, 533, 7, 19, 0, 0, 533, 534, 7, 10, 0, 0, 534, 535, 7, 3, 0, 0, 535, 536, 7, 6, 0, 0, 536, 537, 7, 3, 0, 0, 537, 538, 1, 0, 0, 0, 538, 539, 6, 15, 0, 0, 539, 46, 1, 0, 0, 0, 540, 541, 4, 16, 0, 0, 541, 542, 7, 1, 0, 0, 542, 543, 7, 9, 0, 0, 543, 544, 7, 13, 0, 0, 544, 545, 7, 1, 0, 0, 545, 546, 7, 9, 0, 0, 546, 547, 7, 3, 0, 0, 547, 548, 7, 2, 0, 0, 548, 549, 7, 5, 0, 0, 549, 550, 7, 12, 0, 0, 550, 551, 7, 5, 0, 0, 551, 552, 7, 2, 0, 0, 552, 553, 1, 0, 0, 0, 553, 554, 6, 16, 0, 0, 554, 48, 1, 0, 0, 0, 555, 556, 4, 17, 1, 0, 556, 557, 7, 13, 0, 0, 557, 558, 7, 7, 0, 0, 558, 559, 7, 7, 0, 0, 559, 560, 7, 18, 0, 0, 560, 561, 7, 20, 0, 0, 561, 562, 7, 8, 0, 0, 562, 563, 1, 0, 0, 0, 563, 564, 6, 17, 8, 0, 564, 50, 1, 0, 0, 0, 565, 566, 4, 18, 2, 0, 566, 567, 7, 16, 0, 0, 567, 568, 7, 3, 0, 0, 568, 569, 7, 5, 0, 0, 569, 570, 7, 6, 0, 0, 570, 571, 7, 1, 0, 0, 571, 572, 7, 4, 0, 0, 572, 573, 7, 2, 0, 0, 573, 574, 1, 0, 0, 0, 574, 575, 6, 18, 9, 0, 575, 52, 1, 0, 0, 0, 576, 578, 8, 21, 0, 0, 577, 576, 1, 0, 0, 0, 578, 579, 1, 0, 0, 0, 579, 577, 1, 0, 0, 0, 579, 580, 1, 0, 0, 0, 580, 581, 1, 0, 0, 0, 581, 582, 6, 19, 0, 0, 582, 54, 1, 0, 0, 0, 583, 584, 5, 47, 0, 0, 584, 585, 5, 47, 0, 0, 585, 589, 1, 0, 0, 0, 586, 588, 8, 22, 0, 0, 587, 586, 1, 0, 0, 0, 588, 591, 1, 0, 0, 0, 589, 587, 1, 0, 0, 0, 589, 590, 1, 0, 0, 0, 590, 593, 1, 0, 0, 0, 591, 589, 1, 0, 0, 0, 592, 594, 5, 13, 0, 0, 593, 592, 1, 0, 0, 0, 593, 594, 1, 0, 0, 0, 594, 596, 1, 0, 0, 0, 595, 597, 5, 10, 0, 0, 596, 595, 1, 0, 0, 0, 596, 597, 1, 0, 0, 0, 597, 598, 1, 0, 0, 0, 598, 599, 6, 20, 10, 0, 599, 56, 1, 0, 0, 0, 600, 601, 5, 47, 0, 0, 601, 602, 5, 42, 0, 0, 602, 607, 1, 0, 0, 0, 603, 606, 3, 57, 21, 0, 604, 606, 9, 0, 0, 0, 605, 603, 1, 0, 0, 0, 605, 604, 1, 0, 0, 0, 606, 609, 1, 0, 0, 0, 607, 608, 1, 0, 0, 0, 607, 605, 1, 0, 0, 0, 608, 610, 1, 0, 0, 0, 609, 607, 1, 0, 0, 0, 610, 611, 5, 42, 0, 0, 611, 612, 5, 47, 0, 0, 612, 613, 1, 0, 0, 0, 613, 614, 6, 21, 10, 0, 614, 58, 1, 0, 0, 0, 615, 617, 7, 23, 0, 0, 616, 615, 1, 0, 0, 0, 617, 618, 1, 0, 0, 0, 618, 616, 1, 0, 0, 0, 618, 619, 1, 0, 0, 0, 619, 620, 1, 0, 0, 0, 620, 621, 6, 22, 10, 0, 621, 60, 1, 0, 0, 0, 622, 623, 5, 124, 0, 0, 623, 624, 1, 0, 0, 0, 624, 625, 6, 23, 11, 0, 625, 62, 1, 0, 0, 0, 626, 627, 7, 24, 0, 0, 627, 64, 1, 0, 0, 0, 628, 629, 7, 25, 0, 0, 629, 66, 1, 0, 0, 0, 630, 631, 5, 92, 0, 0, 631, 632, 7, 26, 0, 0, 632, 68, 1, 0, 0, 0, 633, 634, 8, 27, 0, 0, 634, 70, 1, 0, 0, 0, 635, 637, 7, 3, 0, 0, 636, 638, 7, 28, 0, 0, 637, 636, 1, 0, 0, 0, 637, 638, 1, 0, 0, 0, 638, 640, 1, 0, 0, 0, 639, 641, 3, 63, 24, 0, 640, 639, 1, 0, 0, 0, 641, 642, 1, 0, 0, 0, 642, 640, 1, 0, 0, 0, 642, 643, 1, 0, 0, 0, 643, 72, 1, 0, 0, 0, 644, 645, 5, 64, 0, 0, 645, 74, 1, 0, 0, 0, 646, 647, 5, 96, 0, 0, 647, 76, 1, 0, 0, 0, 648, 652, 8, 29, 0, 0, 649, 650, 5, 96, 0, 0, 650, 652, 5, 96, 0, 0, 651, 648, 1, 0, 0, 0, 651, 649, 1, 0, 0, 0, 652, 78, 1, 0, 0, 0, 653, 654, 5, 95, 0, 0, 654, 80, 1, 0, 0, 0, 655, 659, 3, 65, 25, 0, 656, 659, 3, 63, 24, 0, 657, 659, 3, 79, 32, 0, 658, 655, 1, 0, 0, 0, 658, 656, 1, 0, 0, 0, 658, 657, 1, 0, 0, 0, 659, 82, 1, 0, 0, 0, 660, 665, 5, 34, 0, 0, 661, 664, 3, 67, 26, 0, 662, 664, 3, 69, 27, 0, 663, 661, 1, 0, 0, 0, 663, 662, 1, 0, 0, 0, 664, 667, 1, 0, 0, 0, 665, 663, 1, 0, 0, 0, 665, 666, 1, 0, 0, 0, 666, 668, 1, 0, 0, 0, 667, 665, 1, 0, 0, 0, 668, 690, 5, 34, 0, 0, 669, 670, 5, 34, 0, 0, 670, 671, 5, 34, 0, 0, 671, 672, 5, 34, 0, 0, 672, 676, 1, 0, 0, 0, 673, 675, 8, 22, 0, 0, 674, 673, 1, 0, 0, 0, 675, 678, 1, 0, 0, 0, 676, 677, 1, 0, 0, 0, 676, 674, 1, 0, 0, 0, 677, 679, 1, 0, 0, 0, 678, 676, 1, 0, 0, 0, 679, 680, 5, 34, 0, 0, 680, 681, 5, 34, 0, 0, 681, 682, 5, 34, 0, 0, 682, 684, 1, 0, 0, 0, 683, 685, 5, 34, 0, 0, 684, 683, 1, 0, 0, 0, 684, 685, 1, 0, 0, 0, 685, 687, 1, 0, 0, 0, 686, 688, 5, 34, 0, 0, 687, 686, 1, 0, 0, 0, 687, 688, 1, 0, 0, 0, 688, 690, 1, 0, 0, 0, 689, 660, 1, 0, 0, 0, 689, 669, 1, 0, 0, 0, 690, 84, 1, 0, 0, 0, 691, 693, 3, 63, 24, 0, 692, 691, 1, 0, 0, 0, 693, 694, 1, 0, 0, 0, 694, 692, 1, 0, 0, 0, 694, 695, 1, 0, 0, 0, 695, 86, 1, 0, 0, 0, 696, 698, 3, 63, 24, 0, 697, 696, 1, 0, 0, 0, 698, 699, 1, 0, 0, 0, 699, 697, 1, 0, 0, 0, 699, 700, 1, 0, 0, 0, 700, 701, 1, 0, 0, 0, 701, 705, 3, 103, 44, 0, 702, 704, 3, 63, 24, 0, 703, 702, 1, 0, 0, 0, 704, 707, 1, 0, 0, 0, 705, 703, 1, 0, 0, 0, 705, 706, 1, 0, 0, 0, 706, 739, 1, 0, 0, 0, 707, 705, 1, 0, 0, 0, 708, 710, 3, 103, 44, 0, 709, 711, 3, 63, 24, 0, 710, 709, 1, 0, 0, 0, 711, 712, 1, 0, 0, 0, 712, 710, 1, 0, 0, 0, 712, 713, 1, 0, 0, 0, 713, 739, 1, 0, 0, 0, 714, 716, 3, 63, 24, 0, 715, 714, 1, 0, 0, 0, 716, 717, 1, 0, 0, 0, 717, 715, 1, 0, 0, 0, 717, 718, 1, 0, 0, 0, 718, 726, 1, 0, 0, 0, 719, 723, 3, 103, 44, 0, 720, 722, 3, 63, 24, 0, 721, 720, 1, 0, 0, 0, 722, 725, 1, 0, 0, 0, 723, 721, 1, 0, 0, 0, 723, 724, 1, 0, 0, 0, 724, 727, 1, 0, 0, 0, 725, 723, 1, 0, 0, 0, 726, 719, 1, 0, 0, 0, 726, 727, 1, 0, 0, 0, 727, 728, 1, 0, 0, 0, 728, 729, 3, 71, 28, 0, 729, 739, 1, 0, 0, 0, 730, 732, 3, 103, 44, 0, 731, 733, 3, 63, 24, 0, 732, 731, 1, 0, 0, 0, 733, 734, 1, 0, 0, 0, 734, 732, 1, 0, 0, 0, 734, 735, 1, 0, 0, 0, 735, 736, 1, 0, 0, 0, 736, 737, 3, 71, 28, 0, 737, 739, 1, 0, 0, 0, 738, 697, 1, 0, 0, 0, 738, 708, 1, 0, 0, 0, 738, 715, 1, 0, 0, 0, 738, 730, 1, 0, 0, 0, 739, 88, 1, 0, 0, 0, 740, 741, 7, 30, 0, 0, 741, 742, 7, 31, 0, 0, 742, 90, 1, 0, 0, 0, 743, 744, 7, 12, 0, 0, 744, 745, 7, 9, 0, 0, 745, 746, 7, 0, 0, 0, 746, 92, 1, 0, 0, 0, 747, 748, 7, 12, 0, 0, 748, 749, 7, 2, 0, 0, 749, 750, 7, 4, 0, 0, 750, 94, 1, 0, 0, 0, 751, 752, 5, 61, 0, 0, 752, 96, 1, 0, 0, 0, 753, 754, 5, 58, 0, 0, 754, 755, 5, 58, 0, 0, 755, 98, 1, 0, 0, 0, 756, 757, 5, 44, 0, 0, 757, 100, 1, 0, 0, 0, 758, 759, 7, 0, 0, 0, 759, 760, 7, 3, 0, 0, 760, 761, 7, 2, 0, 0, 761, 762, 7, 4, 0, 0, 762, 102, 1, 0, 0, 0, 763, 764, 5, 46, 0, 0, 764, 104, 1, 0, 0, 0, 765, 766, 7, 15, 0, 0, 766, 767, 7, 12, 0, 0, 767, 768, 7, 13, 0, 0, 768, 769, 7, 2, 0, 0, 769, 770, 7, 3, 0, 0, 770, 106, 1, 0, 0, 0, 771, 772, 7, 15, 0, 0, 772, 773, 7, 1, 0, 0, 773, 774, 7, 6, 0, 0, 774, 775, 7, 2, 0, 0, 775, 776, 7, 5, 0, 0, 776, 108, 1, 0, 0, 0, 777, 778, 7, 1, 0, 0, 778, 779, 7, 9, 0, 0, 779, 110, 1, 0, 0, 0, 780, 781, 7, 1, 0, 0, 781, 782, 7, 2, 0, 0, 782, 112, 1, 0, 0, 0, 783, 784, 7, 13, 0, 0, 784, 785, 7, 12, 0, 0, 785, 786, 7, 2, 0, 0, 786, 787, 7, 5, 0, 0, 787, 114, 1, 0, 0, 0, 788, 789, 7, 13, 0, 0, 789, 790, 7, 1, 0, 0, 790, 791, 7, 18, 0, 0, 791, 792, 7, 3, 0, 0, 792, 116, 1, 0, 0, 0, 793, 794, 5, 40, 0, 0, 794, 118, 1, 0, 0, 0, 795, 796, 7, 9, 0, 0, 796, 797, 7, 7, 0, 0, 797, 798, 7, 5, 0, 0, 798, 120, 1, 0, 0, 0, 799, 800, 7, 9, 0, 0, 800, 801, 7, 20, 0, 0, 801, 802, 7, 13, 0, 0, 802, 803, 7, 13, 0, 0, 803, 122, 1, 0, 0, 0, 804, 805, 7, 9, 0, 0, 805, 806, 7, 20, 0, 0, 806, 807, 7, 13, 0, 0, 807, 808, 7, 13, 0, 0, 808, 809, 7, 2, 0, 0, 809, 124, 1, 0, 0, 0, 810, 811, 7, 7, 0, 0, 811, 812, 7, 6, 0, 0, 812, 126, 1, 0, 0, 0, 813, 814, 5, 63, 0, 0, 814, 128, 1, 0, 0, 0, 815, 816, 7, 6, 0, 0, 816, 817, 7, 13, 0, 0, 817, 818, 7, 1, 0, 0, 818, 819, 7, 18, 0, 0, 819, 820, 7, 3, 0, 0, 820, 130, 1, 0, 0, 0, 821, 822, 5, 41, 0, 0, 822, 132, 1, 0, 0, 0, 823, 824, 7, 5, 0, 0, 824, 825, 7, 6, 0, 0, 825, 826, 7, 20, 0, 0, 826, 827, 7, 3, 0, 0, 827, 134, 1, 0, 0, 0, 828, 829, 5, 61, 0, 0, 829, 830, 5, 61, 0, 0, 830, 136, 1, 0, 0, 0, 831, 832, 5, 61, 0, 0, 832, 833, 5, 126, 0, 0, 833, 138, 1, 0, 0, 0, 834, 835, 5, 33, 0, 0, 835, 836, 5, 61, 0, 0, 836, 140, 1, 0, 0, 0, 837, 838, 5, 60, 0, 0, 838, 142, 1, 0, 0, 0, 839, 840, 5, 60, 0, 0, 840, 841, 5, 61, 0, 0, 841, 144, 1, 0, 0, 0, 842, 843, 5, 62, 0, 0, 843, 146, 1, 0, 0, 0, 844, 845, 5, 62, 0, 0, 845, 846, 5, 61, 0, 0, 846, 148, 1, 0, 0, 0, 847, 848, 5, 43, 0, 0, 848, 150, 1, 0, 0, 0, 849, 850, 5, 45, 0, 0, 850, 152, 1, 0, 0, 0, 851, 852, 5, 42, 0, 0, 852, 154, 1, 0, 0, 0, 853, 854, 5, 47, 0, 0, 854, 156, 1, 0, 0, 0, 855, 856, 5, 37, 0, 0, 856, 158, 1, 0, 0, 0, 857, 858, 7, 16, 0, 0, 858, 859, 7, 12, 0, 0, 859, 860, 7, 5, 0, 0, 860, 861, 7, 4, 0, 0, 861, 862, 7, 10, 0, 0, 862, 160, 1, 0, 0, 0, 863, 864, 4, 73, 3, 0, 864, 865, 3, 45, 15, 0, 865, 866, 1, 0, 0, 0, 866, 867, 6, 73, 12, 0, 867, 162, 1, 0, 0, 0, 868, 871, 3, 127, 56, 0, 869, 872, 3, 65, 25, 0, 870, 872, 3, 79, 32, 0, 871, 869, 1, 0, 0, 0, 871, 870, 1, 0, 0, 0, 872, 876, 1, 0, 0, 0, 873, 875, 3, 81, 33, 0, 874, 873, 1, 0, 0, 0, 875, 878, 1, 0, 0, 0, 876, 874, 1, 0, 0, 0, 876, 877, 1, 0, 0, 0, 877, 886, 1, 0, 0, 0, 878, 876, 1, 0, 0, 0, 879, 881, 3, 127, 56, 0, 880, 882, 3, 63, 24, 0, 881, 880, 1, 0, 0, 0, 882, 883, 1, 0, 0, 0, 883, 881, 1, 0, 0, 0, 883, 884, 1, 0, 0, 0, 884, 886, 1, 0, 0, 0, 885, 868, 1, 0, 0, 0, 885, 879, 1, 0, 0, 0, 886, 164, 1, 0, 0, 0, 887, 888, 5, 91, 0, 0, 888, 889, 1, 0, 0, 0, 889, 890, 6, 75, 0, 0, 890, 891, 6, 75, 0, 0, 891, 166, 1, 0, 0, 0, 892, 893, 5, 93, 0, 0, 893, 894, 1, 0, 0, 0, 894, 895, 6, 76, 11, 0, 895, 896, 6, 76, 11, 0, 896, 168, 1, 0, 0, 0, 897, 901, 3, 65, 25, 0, 898, 900, 3, 81, 33, 0, 899, 898, 1, 0, 0, 0, 900, 903, 1, 0, 0, 0, 901, 899, 1, 0, 0, 0, 901, 902, 1, 0, 0, 0, 902, 914, 1, 0, 0, 0, 903, 901, 1, 0, 0, 0, 904, 907, 3, 79, 32, 0, 905, 907, 3, 73, 29, 0, 906, 904, 1, 0, 0, 0, 906, 905, 1, 0, 0, 0, 907, 909, 1, 0, 0, 0, 908, 910, 3, 81, 33, 0, 909, 908, 1, 0, 0, 0, 910, 911, 1, 0, 0, 0, 911, 909, 1, 0, 0, 0, 911, 912, 1, 0, 0, 0, 912, 914, 1, 0, 0, 0, 913, 897, 1, 0, 0, 0, 913, 906, 1, 0, 0, 0, 914, 170, 1, 0, 0, 0, 915, 917, 3, 75, 30, 0, 916, 918, 3, 77, 31, 0, 917, 916, 1, 0, 0, 0, 918, 919, 1, 0, 0, 0, 919, 917, 1, 0, 0, 0, 919, 920, 1, 0, 0, 0, 920, 921, 1, 0, 0, 0, 921, 922, 3, 75, 30, 0, 922, 172, 1, 0, 0, 0, 923, 924, 3, 171, 78, 0, 924, 174, 1, 0, 0, 0, 925, 926, 3, 55, 20, 0, 926, 927, 1, 0, 0, 0, 927, 928, 6, 80, 10, 0, 928, 176, 1, 0, 0, 0, 929, 930, 3, 57, 21, 0, 930, 931, 1, 0, 0, 0, 931, 932, 6, 81, 10, 0, 932, 178, 1, 0, 0, 0, 933, 934, 3, 59, 22, 0, 934, 935, 1, 0, 0, 0, 935, 936, 6, 82, 10, 0, 936, 180, 1, 0, 0, 0, 937, 938, 3, 165, 75, 0, 938, 939, 1, 0, 0, 0, 939, 940, 6, 83, 13, 0, 940, 941, 6, 83, 14, 0, 941, 182, 1, 0, 0, 0, 942, 943, 3, 61, 23, 0, 943, 944, 1, 0, 0, 0, 944, 945, 6, 84, 15, 0, 945, 946, 6, 84, 11, 0, 946, 184, 1, 0, 0, 0, 947, 948, 3, 59, 22, 0, 948, 949, 1, 0, 0, 0, 949, 950, 6, 85, 10, 0, 950, 186, 1, 0, 0, 0, 951, 952, 3, 55, 20, 0, 952, 953, 1, 0, 0, 0, 953, 954, 6, 86, 10, 0, 954, 188, 1, 0, 0, 0, 955, 956, 3, 57, 21, 0, 956, 957, 1, 0, 0, 0, 957, 958, 6, 87, 10, 0, 958, 190, 1, 0, 0, 0, 959, 960, 3, 61, 23, 0, 960, 961, 1, 0, 0, 0, 961, 962, 6, 88, 15, 0, 962, 963, 6, 88, 11, 0, 963, 192, 1, 0, 0, 0, 964, 965, 3, 165, 75, 0, 965, 966, 1, 0, 0, 0, 966, 967, 6, 89, 13, 0, 967, 194, 1, 0, 0, 0, 968, 969, 3, 167, 76, 0, 969, 970, 1, 0, 0, 0, 970, 971, 6, 90, 16, 0, 971, 196, 1, 0, 0, 0, 972, 973, 3, 337, 161, 0, 973, 974, 1, 0, 0, 0, 974, 975, 6, 91, 17, 0, 975, 198, 1, 0, 0, 0, 976, 977, 3, 99, 42, 0, 977, 978, 1, 0, 0, 0, 978, 979, 6, 92, 18, 0, 979, 200, 1, 0, 0, 0, 980, 981, 3, 95, 40, 0, 981, 982, 1, 0, 0, 0, 982, 983, 6, 93, 19, 0, 983, 202, 1, 0, 0, 0, 984, 985, 7, 16, 0, 0, 985, 986, 7, 3, 0, 0, 986, 987, 7, 5, 0, 0, 987, 988, 7, 12, 0, 0, 988, 989, 7, 0, 0, 0, 989, 990, 7, 12, 0, 0, 990, 991, 7, 5, 0, 0, 991, 992, 7, 12, 0, 0, 992, 204, 1, 0, 0, 0, 993, 997, 8, 32, 0, 0, 994, 995, 5, 47, 0, 0, 995, 997, 8, 33, 0, 0, 996, 993, 1, 0, 0, 0, 996, 994, 1, 0, 0, 0, 997, 206, 1, 0, 0, 0, 998, 1000, 3, 205, 95, 0, 999, 998, 1, 0, 0, 0, 1000, 1001, 1, 0, 0, 0, 1001, 999, 1, 0, 0, 0, 1001, 1002, 1, 0, 0, 0, 1002, 208, 1, 0, 0, 0, 1003, 1004, 3, 207, 96, 0, 1004, 1005, 1, 0, 0, 0, 1005, 1006, 6, 97, 20, 0, 1006, 210, 1, 0, 0, 0, 1007, 1008, 3, 83, 34, 0, 1008, 1009, 1, 0, 0, 0, 1009, 1010, 6, 98, 21, 0, 1010, 212, 1, 0, 0, 0, 1011, 1012, 3, 55, 20, 0, 1012, 1013, 1, 0, 0, 0, 1013, 1014, 6, 99, 10, 0, 1014, 214, 1, 0, 0, 0, 1015, 1016, 3, 57, 21, 0, 1016, 1017, 1, 0, 0, 0, 1017, 1018, 6, 100, 10, 0, 1018, 216, 1, 0, 0, 0, 1019, 1020, 3, 59, 22, 0, 1020, 1021, 1, 0, 0, 0, 1021, 1022, 6, 101, 10, 0, 1022, 218, 1, 0, 0, 0, 1023, 1024, 3, 61, 23, 0, 1024, 1025, 1, 0, 0, 0, 1025, 1026, 6, 102, 15, 0, 1026, 1027, 6, 102, 11, 0, 1027, 220, 1, 0, 0, 0, 1028, 1029, 3, 103, 44, 0, 1029, 1030, 1, 0, 0, 0, 1030, 1031, 6, 103, 22, 0, 1031, 222, 1, 0, 0, 0, 1032, 1033, 3, 99, 42, 0, 1033, 1034, 1, 0, 0, 0, 1034, 1035, 6, 104, 18, 0, 1035, 224, 1, 0, 0, 0, 1036, 1037, 3, 127, 56, 0, 1037, 1038, 1, 0, 0, 0, 1038, 1039, 6, 105, 23, 0, 1039, 226, 1, 0, 0, 0, 1040, 1041, 3, 163, 74, 0, 1041, 1042, 1, 0, 0, 0, 1042, 1043, 6, 106, 24, 0, 1043, 228, 1, 0, 0, 0, 1044, 1049, 3, 65, 25, 0, 1045, 1049, 3, 63, 24, 0, 1046, 1049, 3, 79, 32, 0, 1047, 1049, 3, 153, 69, 0, 1048, 1044, 1, 0, 0, 0, 1048, 1045, 1, 0, 0, 0, 1048, 1046, 1, 0, 0, 0, 1048, 1047, 1, 0, 0, 0, 1049, 230, 1, 0, 0, 0, 1050, 1053, 3, 65, 25, 0, 1051, 1053, 3, 153, 69, 0, 1052, 1050, 1, 0, 0, 0, 1052, 1051, 1, 0, 0, 0, 1053, 1057, 1, 0, 0, 0, 1054, 1056, 3, 229, 107, 0, 1055, 1054, 1, 0, 0, 0, 1056, 1059, 1, 0, 0, 0, 1057, 1055, 1, 0, 0, 0, 1057, 1058, 1, 0, 0, 0, 1058, 1070, 1, 0, 0, 0, 1059, 1057, 1, 0, 0, 0, 1060, 1063, 3, 79, 32, 0, 1061, 1063, 3, 73, 29, 0, 1062, 1060, 1, 0, 0, 0, 1062, 1061, 1, 0, 0, 0, 1063, 1065, 1, 0, 0, 0, 1064, 1066, 3, 229, 107, 0, 1065, 1064, 1, 0, 0, 0, 1066, 1067, 1, 0, 0, 0, 1067, 1065, 1, 0, 0, 0, 1067, 1068, 1, 0, 0, 0, 1068, 1070, 1, 0, 0, 0, 1069, 1052, 1, 0, 0, 0, 1069, 1062, 1, 0, 0, 0, 1070, 232, 1, 0, 0, 0, 1071, 1074, 3, 231, 108, 0, 1072, 1074, 3, 171, 78, 0, 1073, 1071, 1, 0, 0, 0, 1073, 1072, 1, 0, 0, 0, 1074, 1075, 1, 0, 0, 0, 1075, 1073, 1, 0, 0, 0, 1075, 1076, 1, 0, 0, 0, 1076, 234, 1, 0, 0, 0, 1077, 1078, 3, 55, 20, 0, 1078, 1079, 1, 0, 0, 0, 1079, 1080, 6, 110, 10, 0, 1080, 236, 1, 0, 0, 0, 1081, 1082, 3, 57, 21, 0, 1082, 1083, 1, 0, 0, 0, 1083, 1084, 6, 111, 10, 0, 1084, 238, 1, 0, 0, 0, 1085, 1086, 3, 59, 22, 0, 1086, 1087, 1, 0, 0, 0, 1087, 1088, 6, 112, 10, 0, 1088, 240, 1, 0, 0, 0, 1089, 1090, 3, 61, 23, 0, 1090, 1091, 1, 0, 0, 0, 1091, 1092, 6, 113, 15, 0, 1092, 1093, 6, 113, 11, 0, 1093, 242, 1, 0, 0, 0, 1094, 1095, 3, 95, 40, 0, 1095, 1096, 1, 0, 0, 0, 1096, 1097, 6, 114, 19, 0, 1097, 244, 1, 0, 0, 0, 1098, 1099, 3, 99, 42, 0, 1099, 1100, 1, 0, 0, 0, 1100, 1101, 6, 115, 18, 0, 1101, 246, 1, 0, 0, 0, 1102, 1103, 3, 103, 44, 0, 1103, 1104, 1, 0, 0, 0, 1104, 1105, 6, 116, 22, 0, 1105, 248, 1, 0, 0, 0, 1106, 1107, 3, 127, 56, 0, 1107, 1108, 1, 0, 0, 0, 1108, 1109, 6, 117, 23, 0, 1109, 250, 1, 0, 0, 0, 1110, 1111, 3, 163, 74, 0, 1111, 1112, 1, 0, 0, 0, 1112, 1113, 6, 118, 24, 0, 1113, 252, 1, 0, 0, 0, 1114, 1115, 7, 12, 0, 0, 1115, 1116, 7, 2, 0, 0, 1116, 254, 1, 0, 0, 0, 1117, 1118, 3, 233, 109, 0, 1118, 1119, 1, 0, 0, 0, 1119, 1120, 6, 120, 25, 0, 1120, 256, 1, 0, 0, 0, 1121, 1122, 3, 55, 20, 0, 1122, 1123, 1, 0, 0, 0, 1123, 1124, 6, 121, 10, 0, 1124, 258, 1, 0, 0, 0, 1125, 1126, 3, 57, 21, 0, 1126, 1127, 1, 0, 0, 0, 1127, 1128, 6, 122, 10, 0, 1128, 260, 1, 0, 0, 0, 1129, 1130, 3, 59, 22, 0, 1130, 1131, 1, 0, 0, 0, 1131, 1132, 6, 123, 10, 0, 1132, 262, 1, 0, 0, 0, 1133, 1134, 3, 61, 23, 0, 1134, 1135, 1, 0, 0, 0, 1135, 1136, 6, 124, 15, 0, 1136, 1137, 6, 124, 11, 0, 1137, 264, 1, 0, 0, 0, 1138, 1139, 3, 165, 75, 0, 1139, 1140, 1, 0, 0, 0, 1140, 1141, 6, 125, 13, 0, 1141, 1142, 6, 125, 26, 0, 1142, 266, 1, 0, 0, 0, 1143, 1144, 7, 7, 0, 0, 1144, 1145, 7, 9, 0, 0, 1145, 1146, 1, 0, 0, 0, 1146, 1147, 6, 126, 27, 0, 1147, 268, 1, 0, 0, 0, 1148, 1149, 7, 19, 0, 0, 1149, 1150, 7, 1, 0, 0, 1150, 1151, 7, 5, 0, 0, 1151, 1152, 7, 10, 0, 0, 1152, 1153, 1, 0, 0, 0, 1153, 1154, 6, 127, 27, 0, 1154, 270, 1, 0, 0, 0, 1155, 1156, 8, 34, 0, 0, 1156, 272, 1, 0, 0, 0, 1157, 1159, 3, 271, 128, 0, 1158, 1157, 1, 0, 0, 0, 1159, 1160, 1, 0, 0, 0, 1160, 1158, 1, 0, 0, 0, 1160, 1161, 1, 0, 0, 0, 1161, 1162, 1, 0, 0, 0, 1162, 1163, 3, 337, 161, 0, 1163, 1165, 1, 0, 0, 0, 1164, 1158, 1, 0, 0, 0, 1164, 1165, 1, 0, 0, 0, 1165, 1167, 1, 0, 0, 0, 1166, 1168, 3, 271, 128, 0, 1167, 1166, 1, 0, 0, 0, 1168, 1169, 1, 0, 0, 0, 1169, 1167, 1, 0, 0, 0, 1169, 1170, 1, 0, 0, 0, 1170, 274, 1, 0, 0, 0, 1171, 1172, 3, 273, 129, 0, 1172, 1173, 1, 0, 0, 0, 1173, 1174, 6, 130, 28, 0, 1174, 276, 1, 0, 0, 0, 1175, 1176, 3, 55, 20, 0, 1176, 1177, 1, 0, 0, 0, 1177, 1178, 6, 131, 10, 0, 1178, 278, 1, 0, 0, 0, 1179, 1180, 3, 57, 21, 0, 1180, 1181, 1, 0, 0, 0, 1181, 1182, 6, 132, 10, 0, 1182, 280, 1, 0, 0, 0, 1183, 1184, 3, 59, 22, 0, 1184, 1185, 1, 0, 0, 0, 1185, 1186, 6, 133, 10, 0, 1186, 282, 1, 0, 0, 0, 1187, 1188, 3, 61, 23, 0, 1188, 1189, 1, 0, 0, 0, 1189, 1190, 6, 134, 15, 0, 1190, 1191, 6, 134, 11, 0, 1191, 1192, 6, 134, 11, 0, 1192, 284, 1, 0, 0, 0, 1193, 1194, 3, 95, 40, 0, 1194, 1195, 1, 0, 0, 0, 1195, 1196, 6, 135, 19, 0, 1196, 286, 1, 0, 0, 0, 1197, 1198, 3, 99, 42, 0, 1198, 1199, 1, 0, 0, 0, 1199, 1200, 6, 136, 18, 0, 1200, 288, 1, 0, 0, 0, 1201, 1202, 3, 103, 44, 0, 1202, 1203, 1, 0, 0, 0, 1203, 1204, 6, 137, 22, 0, 1204, 290, 1, 0, 0, 0, 1205, 1206, 3, 269, 127, 0, 1206, 1207, 1, 0, 0, 0, 1207, 1208, 6, 138, 29, 0, 1208, 292, 1, 0, 0, 0, 1209, 1210, 3, 233, 109, 0, 1210, 1211, 1, 0, 0, 0, 1211, 1212, 6, 139, 25, 0, 1212, 294, 1, 0, 0, 0, 1213, 1214, 3, 173, 79, 0, 1214, 1215, 1, 0, 0, 0, 1215, 1216, 6, 140, 30, 0, 1216, 296, 1, 0, 0, 0, 1217, 1218, 3, 127, 56, 0, 1218, 1219, 1, 0, 0, 0, 1219, 1220, 6, 141, 23, 0, 1220, 298, 1, 0, 0, 0, 1221, 1222, 3, 163, 74, 0, 1222, 1223, 1, 0, 0, 0, 1223, 1224, 6, 142, 24, 0, 1224, 300, 1, 0, 0, 0, 1225, 1226, 3, 55, 20, 0, 1226, 1227, 1, 0, 0, 0, 1227, 1228, 6, 143, 10, 0, 1228, 302, 1, 0, 0, 0, 1229, 1230, 3, 57, 21, 0, 1230, 1231, 1, 0, 0, 0, 1231, 1232, 6, 144, 10, 0, 1232, 304, 1, 0, 0, 0, 1233, 1234, 3, 59, 22, 0, 1234, 1235, 1, 0, 0, 0, 1235, 1236, 6, 145, 10, 0, 1236, 306, 1, 0, 0, 0, 1237, 1238, 3, 61, 23, 0, 1238, 1239, 1, 0, 0, 0, 1239, 1240, 6, 146, 15, 0, 1240, 1241, 6, 146, 11, 0, 1241, 308, 1, 0, 0, 0, 1242, 1243, 3, 103, 44, 0, 1243, 1244, 1, 0, 0, 0, 1244, 1245, 6, 147, 22, 0, 1245, 310, 1, 0, 0, 0, 1246, 1247, 3, 127, 56, 0, 1247, 1248, 1, 0, 0, 0, 1248, 1249, 6, 148, 23, 0, 1249, 312, 1, 0, 0, 0, 1250, 1251, 3, 163, 74, 0, 1251, 1252, 1, 0, 0, 0, 1252, 1253, 6, 149, 24, 0, 1253, 314, 1, 0, 0, 0, 1254, 1255, 3, 173, 79, 0, 1255, 1256, 1, 0, 0, 0, 1256, 1257, 6, 150, 30, 0, 1257, 316, 1, 0, 0, 0, 1258, 1259, 3, 169, 77, 0, 1259, 1260, 1, 0, 0, 0, 1260, 1261, 6, 151, 31, 0, 1261, 318, 1, 0, 0, 0, 1262, 1263, 3, 55, 20, 0, 1263, 1264, 1, 0, 0, 0, 1264, 1265, 6, 152, 10, 0, 1265, 320, 1, 0, 0, 0, 1266, 1267, 3, 57, 21, 0, 1267, 1268, 1, 0, 0, 0, 1268, 1269, 6, 153, 10, 0, 1269, 322, 1, 0, 0, 0, 1270, 1271, 3, 59, 22, 0, 1271, 1272, 1, 0, 0, 0, 1272, 1273, 6, 154, 10, 0, 1273, 324, 1, 0, 0, 0, 1274, 1275, 3, 61, 23, 0, 1275, 1276, 1, 0, 0, 0, 1276, 1277, 6, 155, 15, 0, 1277, 1278, 6, 155, 11, 0, 1278, 326, 1, 0, 0, 0, 1279, 1280, 7, 1, 0, 0, 1280, 1281, 7, 9, 0, 0, 1281, 1282, 7, 15, 0, 0, 1282, 1283, 7, 7, 0, 0, 1283, 328, 1, 0, 0, 0, 1284, 1285, 3, 55, 20, 0, 1285, 1286, 1, 0, 0, 0, 1286, 1287, 6, 157, 10, 0, 1287, 330, 1, 0, 0, 0, 1288, 1289, 3, 57, 21, 0, 1289, 1290, 1, 0, 0, 0, 1290, 1291, 6, 158, 10, 0, 1291, 332, 1, 0, 0, 0, 1292, 1293, 3, 59, 22, 0, 1293, 1294, 1, 0, 0, 0, 1294, 1295, 6, 159, 10, 0, 1295, 334, 1, 0, 0, 0, 1296, 1297, 3, 167, 76, 0, 1297, 1298, 1, 0, 0, 0, 1298, 1299, 6, 160, 16, 0, 1299, 1300, 6, 160, 11, 0, 1300, 336, 1, 0, 0, 0, 1301, 1302, 5, 58, 0, 0, 1302, 338, 1, 0, 0, 0, 1303, 1309, 3, 73, 29, 0, 1304, 1309, 3, 63, 24, 0, 1305, 1309, 3, 103, 44, 0, 1306, 1309, 3, 65, 25, 0, 1307, 1309, 3, 79, 32, 0, 1308, 1303, 1, 0, 0, 0, 1308, 1304, 1, 0, 0, 0, 1308, 1305, 1, 0, 0, 0, 1308, 1306, 1, 0, 0, 0, 1308, 1307, 1, 0, 0, 0, 1309, 1310, 1, 0, 0, 0, 1310, 1308, 1, 0, 0, 0, 1310, 1311, 1, 0, 0, 0, 1311, 340, 1, 0, 0, 0, 1312, 1313, 3, 55, 20, 0, 1313, 1314, 1, 0, 0, 0, 1314, 1315, 6, 163, 10, 0, 1315, 342, 1, 0, 0, 0, 1316, 1317, 3, 57, 21, 0, 1317, 1318, 1, 0, 0, 0, 1318, 1319, 6, 164, 10, 0, 1319, 344, 1, 0, 0, 0, 1320, 1321, 3, 59, 22, 0, 1321, 1322, 1, 0, 0, 0, 1322, 1323, 6, 165, 10, 0, 1323, 346, 1, 0, 0, 0, 1324, 1325, 3, 61, 23, 0, 1325, 1326, 1, 0, 0, 0, 1326, 1327, 6, 166, 15, 0, 1327, 1328, 6, 166, 11, 0, 1328, 348, 1, 0, 0, 0, 1329, 1330, 3, 337, 161, 0, 1330, 1331, 1, 0, 0, 0, 1331, 1332, 6, 167, 17, 0, 1332, 350, 1, 0, 0, 0, 1333, 1334, 3, 99, 42, 0, 1334, 1335, 1, 0, 0, 0, 1335, 1336, 6, 168, 18, 0, 1336, 352, 1, 0, 0, 0, 1337, 1338, 3, 103, 44, 0, 1338, 1339, 1, 0, 0, 0, 1339, 1340, 6, 169, 22, 0, 1340, 354, 1, 0, 0, 0, 1341, 1342, 3, 267, 126, 0, 1342, 1343, 1, 0, 0, 0, 1343, 1344, 6, 170, 32, 0, 1344, 1345, 6, 170, 33, 0, 1345, 356, 1, 0, 0, 0, 1346, 1347, 3, 207, 96, 0, 1347, 1348, 1, 0, 0, 0, 1348, 1349, 6, 171, 20, 0, 1349, 358, 1, 0, 0, 0, 1350, 1351, 3, 83, 34, 0, 1351, 1352, 1, 0, 0, 0, 1352, 1353, 6, 172, 21, 0, 1353, 360, 1, 0, 0, 0, 1354, 1355, 3, 55, 20, 0, 1355, 1356, 1, 0, 0, 0, 1356, 1357, 6, 173, 10, 0, 1357, 362, 1, 0, 0, 0, 1358, 1359, 3, 57, 21, 0, 1359, 1360, 1, 0, 0, 0, 1360, 1361, 6, 174, 10, 0, 1361, 364, 1, 0, 0, 0, 1362, 1363, 3, 59, 22, 0, 1363, 1364, 1, 0, 0, 0, 1364, 1365, 6, 175, 10, 0, 1365, 366, 1, 0, 0, 0, 1366, 1367, 3, 61, 23, 0, 1367, 1368, 1, 0, 0, 0, 1368, 1369, 6, 176, 15, 0, 1369, 1370, 6, 176, 11, 0, 1370, 1371, 6, 176, 11, 0, 1371, 368, 1, 0, 0, 0, 1372, 1373, 3, 99, 42, 0, 1373, 1374, 1, 0, 0, 0, 1374, 1375, 6, 177, 18, 0, 1375, 370, 1, 0, 0, 0, 1376, 1377, 3, 103, 44, 0, 1377, 1378, 1, 0, 0, 0, 1378, 1379, 6, 178, 22, 0, 1379, 372, 1, 0, 0, 0, 1380, 1381, 3, 233, 109, 0, 1381, 1382, 1, 0, 0, 0, 1382, 1383, 6, 179, 25, 0, 1383, 374, 1, 0, 0, 0, 1384, 1385, 3, 55, 20, 0, 1385, 1386, 1, 0, 0, 0, 1386, 1387, 6, 180, 10, 0, 1387, 376, 1, 0, 0, 0, 1388, 1389, 3, 57, 21, 0, 1389, 1390, 1, 0, 0, 0, 1390, 1391, 6, 181, 10, 0, 1391, 378, 1, 0, 0, 0, 1392, 1393, 3, 59, 22, 0, 1393, 1394, 1, 0, 0, 0, 1394, 1395, 6, 182, 10, 0, 1395, 380, 1, 0, 0, 0, 1396, 1397, 3, 61, 23, 0, 1397, 1398, 1, 0, 0, 0, 1398, 1399, 6, 183, 15, 0, 1399, 1400, 6, 183, 11, 0, 1400, 382, 1, 0, 0, 0, 1401, 1402, 3, 207, 96, 0, 1402, 1403, 1, 0, 0, 0, 1403, 1404, 6, 184, 20, 0, 1404, 1405, 6, 184, 11, 0, 1405, 1406, 6, 184, 34, 0, 1406, 384, 1, 0, 0, 0, 1407, 1408, 3, 83, 34, 0, 1408, 1409, 1, 0, 0, 0, 1409, 1410, 6, 185, 21, 0, 1410, 1411, 6, 185, 11, 0, 1411, 1412, 6, 185, 34, 0, 1412, 386, 1, 0, 0, 0, 1413, 1414, 3, 55, 20, 0, 1414, 1415, 1, 0, 0, 0, 1415, 1416, 6, 186, 10, 0, 1416, 388, 1, 0, 0, 0, 1417, 1418, 3, 57, 21, 0, 1418, 1419, 1, 0, 0, 0, 1419, 1420, 6, 187, 10, 0, 1420, 390, 1, 0, 0, 0, 1421, 1422, 3, 59, 22, 0, 1422, 1423, 1, 0, 0, 0, 1423, 1424, 6, 188, 10, 0, 1424, 392, 1, 0, 0, 0, 1425, 1426, 3, 337, 161, 0, 1426, 1427, 1, 0, 0, 0, 1427, 1428, 6, 189, 17, 0, 1428, 1429, 6, 189, 11, 0, 1429, 1430, 6, 189, 9, 0, 1430, 394, 1, 0, 0, 0, 1431, 1432, 3, 99, 42, 0, 1432, 1433, 1, 0, 0, 0, 1433, 1434, 6, 190, 18, 0, 1434, 1435, 6, 190, 11, 0, 1435, 1436, 6, 190, 9, 0, 1436, 396, 1, 0, 0, 0, 1437, 1438, 3, 55, 20, 0, 1438, 1439, 1, 0, 0, 0, 1439, 1440, 6, 191, 10, 0, 1440, 398, 1, 0, 0, 0, 1441, 1442, 3, 57, 21, 0, 1442, 1443, 1, 0, 0, 0, 1443, 1444, 6, 192, 10, 0, 1444, 400, 1, 0, 0, 0, 1445, 1446, 3, 59, 22, 0, 1446, 1447, 1, 0, 0, 0, 1447, 1448, 6, 193, 10, 0, 1448, 402, 1, 0, 0, 0, 1449, 1450, 3, 173, 79, 0, 1450, 1451, 1, 0, 0, 0, 1451, 1452, 6, 194, 11, 0, 1452, 1453, 6, 194, 0, 0, 1453, 1454, 6, 194, 30, 0, 1454, 404, 1, 0, 0, 0, 1455, 1456, 3, 169, 77, 0, 1456, 1457, 1, 0, 0, 0, 1457, 1458, 6, 195, 11, 0, 1458, 1459, 6, 195, 0, 0, 1459, 1460, 6, 195, 31, 0, 1460, 406, 1, 0, 0, 0, 1461, 1462, 3, 89, 37, 0, 1462, 1463, 1, 0, 0, 0, 1463, 1464, 6, 196, 11, 0, 1464, 1465, 6, 196, 0, 0, 1465, 1466, 6, 196, 35, 0, 1466, 408, 1, 0, 0, 0, 1467, 1468, 3, 61, 23, 0, 1468, 1469, 1, 0, 0, 0, 1469, 1470, 6, 197, 15, 0, 1470, 1471, 6, 197, 11, 0, 1471, 410, 1, 0, 0, 0, 65, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 579, 589, 593, 596, 605, 607, 618, 637, 642, 651, 658, 663, 665, 676, 684, 687, 689, 694, 699, 705, 712, 717, 723, 726, 734, 738, 871, 876, 883, 885, 901, 906, 911, 913, 919, 996, 1001, 1048, 1052, 1057, 1062, 1067, 1069, 1073, 1075, 1160, 1164, 1169, 1308, 1310, 36, 5, 1, 0, 5, 4, 0, 5, 6, 0, 5, 2, 0, 5, 3, 0, 5, 8, 0, 5, 5, 0, 5, 9, 0, 5, 11, 0, 5, 13, 0, 0, 1, 0, 4, 0, 0, 7, 16, 0, 7, 65, 0, 5, 0, 0, 7, 24, 0, 7, 66, 0, 7, 104, 0, 7, 33, 0, 7, 31, 0, 7, 76, 0, 7, 25, 0, 7, 35, 0, 7, 47, 0, 7, 64, 0, 7, 80, 0, 5, 10, 0, 5, 7, 0, 7, 90, 0, 7, 89, 0, 7, 68, 0, 7, 67, 0, 7, 88, 0, 5, 12, 0, 5, 14, 0, 7, 28, 0] \ No newline at end of file +[4, 0, 120, 1471, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 2, 154, 7, 154, 2, 155, 7, 155, 2, 156, 7, 156, 2, 157, 7, 157, 2, 158, 7, 158, 2, 159, 7, 159, 2, 160, 7, 160, 2, 161, 7, 161, 2, 162, 7, 162, 2, 163, 7, 163, 2, 164, 7, 164, 2, 165, 7, 165, 2, 166, 7, 166, 2, 167, 7, 167, 2, 168, 7, 168, 2, 169, 7, 169, 2, 170, 7, 170, 2, 171, 7, 171, 2, 172, 7, 172, 2, 173, 7, 173, 2, 174, 7, 174, 2, 175, 7, 175, 2, 176, 7, 176, 2, 177, 7, 177, 2, 178, 7, 178, 2, 179, 7, 179, 2, 180, 7, 180, 2, 181, 7, 181, 2, 182, 7, 182, 2, 183, 7, 183, 2, 184, 7, 184, 2, 185, 7, 185, 2, 186, 7, 186, 2, 187, 7, 187, 2, 188, 7, 188, 2, 189, 7, 189, 2, 190, 7, 190, 2, 191, 7, 191, 2, 192, 7, 192, 2, 193, 7, 193, 2, 194, 7, 194, 2, 195, 7, 195, 2, 196, 7, 196, 2, 197, 7, 197, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 4, 19, 578, 8, 19, 11, 19, 12, 19, 579, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 5, 20, 588, 8, 20, 10, 20, 12, 20, 591, 9, 20, 1, 20, 3, 20, 594, 8, 20, 1, 20, 3, 20, 597, 8, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 5, 21, 606, 8, 21, 10, 21, 12, 21, 609, 9, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 4, 22, 617, 8, 22, 11, 22, 12, 22, 618, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 28, 1, 28, 3, 28, 638, 8, 28, 1, 28, 4, 28, 641, 8, 28, 11, 28, 12, 28, 642, 1, 29, 1, 29, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 3, 31, 652, 8, 31, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 3, 33, 659, 8, 33, 1, 34, 1, 34, 1, 34, 5, 34, 664, 8, 34, 10, 34, 12, 34, 667, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 675, 8, 34, 10, 34, 12, 34, 678, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 3, 34, 685, 8, 34, 1, 34, 3, 34, 688, 8, 34, 3, 34, 690, 8, 34, 1, 35, 4, 35, 693, 8, 35, 11, 35, 12, 35, 694, 1, 36, 4, 36, 698, 8, 36, 11, 36, 12, 36, 699, 1, 36, 1, 36, 5, 36, 704, 8, 36, 10, 36, 12, 36, 707, 9, 36, 1, 36, 1, 36, 4, 36, 711, 8, 36, 11, 36, 12, 36, 712, 1, 36, 4, 36, 716, 8, 36, 11, 36, 12, 36, 717, 1, 36, 1, 36, 5, 36, 722, 8, 36, 10, 36, 12, 36, 725, 9, 36, 3, 36, 727, 8, 36, 1, 36, 1, 36, 1, 36, 1, 36, 4, 36, 733, 8, 36, 11, 36, 12, 36, 734, 1, 36, 1, 36, 3, 36, 739, 8, 36, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 1, 39, 1, 39, 1, 39, 1, 39, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 1, 64, 1, 64, 1, 64, 1, 65, 1, 65, 1, 66, 1, 66, 1, 66, 1, 67, 1, 67, 1, 68, 1, 68, 1, 69, 1, 69, 1, 70, 1, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 72, 1, 72, 1, 72, 1, 72, 1, 73, 1, 73, 1, 73, 1, 73, 1, 74, 1, 74, 1, 74, 3, 74, 871, 8, 74, 1, 74, 5, 74, 874, 8, 74, 10, 74, 12, 74, 877, 9, 74, 1, 74, 1, 74, 4, 74, 881, 8, 74, 11, 74, 12, 74, 882, 3, 74, 885, 8, 74, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 76, 1, 76, 1, 76, 1, 76, 1, 76, 1, 77, 1, 77, 5, 77, 899, 8, 77, 10, 77, 12, 77, 902, 9, 77, 1, 77, 1, 77, 3, 77, 906, 8, 77, 1, 77, 4, 77, 909, 8, 77, 11, 77, 12, 77, 910, 3, 77, 913, 8, 77, 1, 78, 1, 78, 4, 78, 917, 8, 78, 11, 78, 12, 78, 918, 1, 78, 1, 78, 1, 79, 1, 79, 1, 80, 1, 80, 1, 80, 1, 80, 1, 81, 1, 81, 1, 81, 1, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 1, 83, 1, 83, 1, 83, 1, 84, 1, 84, 1, 84, 1, 84, 1, 84, 1, 85, 1, 85, 1, 85, 1, 85, 1, 86, 1, 86, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 1, 88, 1, 88, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 1, 92, 1, 93, 1, 93, 1, 93, 1, 93, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 3, 95, 996, 8, 95, 1, 96, 4, 96, 999, 8, 96, 11, 96, 12, 96, 1000, 1, 97, 1, 97, 1, 97, 1, 97, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 1, 100, 1, 101, 1, 101, 1, 101, 1, 101, 1, 102, 1, 102, 1, 102, 1, 102, 1, 102, 1, 103, 1, 103, 1, 103, 1, 103, 1, 104, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 106, 1, 107, 1, 107, 1, 107, 1, 107, 3, 107, 1048, 8, 107, 1, 108, 1, 108, 3, 108, 1052, 8, 108, 1, 108, 5, 108, 1055, 8, 108, 10, 108, 12, 108, 1058, 9, 108, 1, 108, 1, 108, 3, 108, 1062, 8, 108, 1, 108, 4, 108, 1065, 8, 108, 11, 108, 12, 108, 1066, 3, 108, 1069, 8, 108, 1, 109, 1, 109, 4, 109, 1073, 8, 109, 11, 109, 12, 109, 1074, 1, 110, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 1, 113, 1, 114, 1, 114, 1, 114, 1, 114, 1, 115, 1, 115, 1, 115, 1, 115, 1, 116, 1, 116, 1, 116, 1, 116, 1, 117, 1, 117, 1, 117, 1, 117, 1, 118, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 120, 1, 120, 1, 121, 1, 121, 1, 121, 1, 121, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 129, 4, 129, 1158, 8, 129, 11, 129, 12, 129, 1159, 1, 129, 1, 129, 3, 129, 1164, 8, 129, 1, 129, 4, 129, 1167, 8, 129, 11, 129, 12, 129, 1168, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 133, 1, 133, 1, 134, 1, 134, 1, 134, 1, 134, 1, 134, 1, 134, 1, 135, 1, 135, 1, 135, 1, 135, 1, 136, 1, 136, 1, 136, 1, 136, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 152, 1, 152, 1, 153, 1, 153, 1, 153, 1, 153, 1, 154, 1, 154, 1, 154, 1, 154, 1, 155, 1, 155, 1, 155, 1, 155, 1, 155, 1, 156, 1, 156, 1, 156, 1, 156, 1, 156, 1, 157, 1, 157, 1, 157, 1, 157, 1, 158, 1, 158, 1, 158, 1, 158, 1, 159, 1, 159, 1, 159, 1, 159, 1, 160, 1, 160, 1, 160, 1, 160, 1, 160, 1, 161, 1, 161, 1, 162, 1, 162, 1, 162, 1, 162, 1, 162, 4, 162, 1308, 8, 162, 11, 162, 12, 162, 1309, 1, 163, 1, 163, 1, 163, 1, 163, 1, 164, 1, 164, 1, 164, 1, 164, 1, 165, 1, 165, 1, 165, 1, 165, 1, 166, 1, 166, 1, 166, 1, 166, 1, 166, 1, 167, 1, 167, 1, 167, 1, 167, 1, 168, 1, 168, 1, 168, 1, 168, 1, 169, 1, 169, 1, 169, 1, 169, 1, 170, 1, 170, 1, 170, 1, 170, 1, 170, 1, 171, 1, 171, 1, 171, 1, 171, 1, 172, 1, 172, 1, 172, 1, 172, 1, 173, 1, 173, 1, 173, 1, 173, 1, 174, 1, 174, 1, 174, 1, 174, 1, 175, 1, 175, 1, 175, 1, 175, 1, 176, 1, 176, 1, 176, 1, 176, 1, 176, 1, 176, 1, 177, 1, 177, 1, 177, 1, 177, 1, 178, 1, 178, 1, 178, 1, 178, 1, 179, 1, 179, 1, 179, 1, 179, 1, 180, 1, 180, 1, 180, 1, 180, 1, 181, 1, 181, 1, 181, 1, 181, 1, 182, 1, 182, 1, 182, 1, 182, 1, 183, 1, 183, 1, 183, 1, 183, 1, 183, 1, 184, 1, 184, 1, 184, 1, 184, 1, 184, 1, 184, 1, 185, 1, 185, 1, 185, 1, 185, 1, 185, 1, 185, 1, 186, 1, 186, 1, 186, 1, 186, 1, 187, 1, 187, 1, 187, 1, 187, 1, 188, 1, 188, 1, 188, 1, 188, 1, 189, 1, 189, 1, 189, 1, 189, 1, 189, 1, 189, 1, 190, 1, 190, 1, 190, 1, 190, 1, 190, 1, 190, 1, 191, 1, 191, 1, 191, 1, 191, 1, 192, 1, 192, 1, 192, 1, 192, 1, 193, 1, 193, 1, 193, 1, 193, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 195, 1, 195, 1, 195, 1, 195, 1, 195, 1, 195, 1, 196, 1, 196, 1, 196, 1, 196, 1, 196, 1, 196, 1, 197, 1, 197, 1, 197, 1, 197, 1, 197, 2, 607, 676, 0, 198, 15, 1, 17, 2, 19, 3, 21, 4, 23, 5, 25, 6, 27, 7, 29, 8, 31, 9, 33, 10, 35, 11, 37, 12, 39, 13, 41, 14, 43, 15, 45, 16, 47, 17, 49, 18, 51, 19, 53, 20, 55, 21, 57, 22, 59, 23, 61, 24, 63, 0, 65, 0, 67, 0, 69, 0, 71, 0, 73, 0, 75, 0, 77, 0, 79, 0, 81, 0, 83, 25, 85, 26, 87, 27, 89, 28, 91, 29, 93, 30, 95, 31, 97, 32, 99, 33, 101, 34, 103, 35, 105, 36, 107, 37, 109, 38, 111, 39, 113, 40, 115, 41, 117, 42, 119, 43, 121, 44, 123, 45, 125, 46, 127, 47, 129, 48, 131, 49, 133, 50, 135, 51, 137, 52, 139, 53, 141, 54, 143, 55, 145, 56, 147, 57, 149, 58, 151, 59, 153, 60, 155, 61, 157, 62, 159, 63, 161, 0, 163, 64, 165, 65, 167, 66, 169, 67, 171, 0, 173, 68, 175, 69, 177, 70, 179, 71, 181, 0, 183, 0, 185, 72, 187, 73, 189, 74, 191, 0, 193, 0, 195, 0, 197, 0, 199, 0, 201, 0, 203, 75, 205, 0, 207, 76, 209, 0, 211, 0, 213, 77, 215, 78, 217, 79, 219, 0, 221, 0, 223, 0, 225, 0, 227, 0, 229, 0, 231, 0, 233, 80, 235, 81, 237, 82, 239, 83, 241, 0, 243, 0, 245, 0, 247, 0, 249, 0, 251, 0, 253, 84, 255, 0, 257, 85, 259, 86, 261, 87, 263, 0, 265, 0, 267, 88, 269, 89, 271, 0, 273, 90, 275, 0, 277, 91, 279, 92, 281, 93, 283, 0, 285, 0, 287, 0, 289, 0, 291, 0, 293, 0, 295, 0, 297, 0, 299, 0, 301, 94, 303, 95, 305, 96, 307, 0, 309, 0, 311, 0, 313, 0, 315, 0, 317, 0, 319, 97, 321, 98, 323, 99, 325, 0, 327, 100, 329, 101, 331, 102, 333, 103, 335, 0, 337, 104, 339, 105, 341, 106, 343, 107, 345, 108, 347, 0, 349, 0, 351, 0, 353, 0, 355, 0, 357, 0, 359, 0, 361, 109, 363, 110, 365, 111, 367, 0, 369, 0, 371, 0, 373, 0, 375, 112, 377, 113, 379, 114, 381, 0, 383, 0, 385, 0, 387, 115, 389, 116, 391, 117, 393, 0, 395, 0, 397, 118, 399, 119, 401, 120, 403, 0, 405, 0, 407, 0, 409, 0, 15, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 35, 2, 0, 68, 68, 100, 100, 2, 0, 73, 73, 105, 105, 2, 0, 83, 83, 115, 115, 2, 0, 69, 69, 101, 101, 2, 0, 67, 67, 99, 99, 2, 0, 84, 84, 116, 116, 2, 0, 82, 82, 114, 114, 2, 0, 79, 79, 111, 111, 2, 0, 80, 80, 112, 112, 2, 0, 78, 78, 110, 110, 2, 0, 72, 72, 104, 104, 2, 0, 86, 86, 118, 118, 2, 0, 65, 65, 97, 97, 2, 0, 76, 76, 108, 108, 2, 0, 88, 88, 120, 120, 2, 0, 70, 70, 102, 102, 2, 0, 77, 77, 109, 109, 2, 0, 71, 71, 103, 103, 2, 0, 75, 75, 107, 107, 2, 0, 87, 87, 119, 119, 2, 0, 85, 85, 117, 117, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 8, 0, 34, 34, 78, 78, 82, 82, 84, 84, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 2, 0, 66, 66, 98, 98, 2, 0, 89, 89, 121, 121, 11, 0, 9, 10, 13, 13, 32, 32, 34, 34, 44, 44, 47, 47, 58, 58, 61, 61, 91, 91, 93, 93, 124, 124, 2, 0, 42, 42, 47, 47, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1499, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0, 0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0, 0, 0, 33, 1, 0, 0, 0, 0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0, 0, 0, 39, 1, 0, 0, 0, 0, 41, 1, 0, 0, 0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0, 0, 0, 0, 47, 1, 0, 0, 0, 0, 49, 1, 0, 0, 0, 0, 51, 1, 0, 0, 0, 0, 53, 1, 0, 0, 0, 0, 55, 1, 0, 0, 0, 0, 57, 1, 0, 0, 0, 0, 59, 1, 0, 0, 0, 1, 61, 1, 0, 0, 0, 1, 83, 1, 0, 0, 0, 1, 85, 1, 0, 0, 0, 1, 87, 1, 0, 0, 0, 1, 89, 1, 0, 0, 0, 1, 91, 1, 0, 0, 0, 1, 93, 1, 0, 0, 0, 1, 95, 1, 0, 0, 0, 1, 97, 1, 0, 0, 0, 1, 99, 1, 0, 0, 0, 1, 101, 1, 0, 0, 0, 1, 103, 1, 0, 0, 0, 1, 105, 1, 0, 0, 0, 1, 107, 1, 0, 0, 0, 1, 109, 1, 0, 0, 0, 1, 111, 1, 0, 0, 0, 1, 113, 1, 0, 0, 0, 1, 115, 1, 0, 0, 0, 1, 117, 1, 0, 0, 0, 1, 119, 1, 0, 0, 0, 1, 121, 1, 0, 0, 0, 1, 123, 1, 0, 0, 0, 1, 125, 1, 0, 0, 0, 1, 127, 1, 0, 0, 0, 1, 129, 1, 0, 0, 0, 1, 131, 1, 0, 0, 0, 1, 133, 1, 0, 0, 0, 1, 135, 1, 0, 0, 0, 1, 137, 1, 0, 0, 0, 1, 139, 1, 0, 0, 0, 1, 141, 1, 0, 0, 0, 1, 143, 1, 0, 0, 0, 1, 145, 1, 0, 0, 0, 1, 147, 1, 0, 0, 0, 1, 149, 1, 0, 0, 0, 1, 151, 1, 0, 0, 0, 1, 153, 1, 0, 0, 0, 1, 155, 1, 0, 0, 0, 1, 157, 1, 0, 0, 0, 1, 159, 1, 0, 0, 0, 1, 161, 1, 0, 0, 0, 1, 163, 1, 0, 0, 0, 1, 165, 1, 0, 0, 0, 1, 167, 1, 0, 0, 0, 1, 169, 1, 0, 0, 0, 1, 173, 1, 0, 0, 0, 1, 175, 1, 0, 0, 0, 1, 177, 1, 0, 0, 0, 1, 179, 1, 0, 0, 0, 2, 181, 1, 0, 0, 0, 2, 183, 1, 0, 0, 0, 2, 185, 1, 0, 0, 0, 2, 187, 1, 0, 0, 0, 2, 189, 1, 0, 0, 0, 3, 191, 1, 0, 0, 0, 3, 193, 1, 0, 0, 0, 3, 195, 1, 0, 0, 0, 3, 197, 1, 0, 0, 0, 3, 199, 1, 0, 0, 0, 3, 201, 1, 0, 0, 0, 3, 203, 1, 0, 0, 0, 3, 207, 1, 0, 0, 0, 3, 209, 1, 0, 0, 0, 3, 211, 1, 0, 0, 0, 3, 213, 1, 0, 0, 0, 3, 215, 1, 0, 0, 0, 3, 217, 1, 0, 0, 0, 4, 219, 1, 0, 0, 0, 4, 221, 1, 0, 0, 0, 4, 223, 1, 0, 0, 0, 4, 225, 1, 0, 0, 0, 4, 227, 1, 0, 0, 0, 4, 233, 1, 0, 0, 0, 4, 235, 1, 0, 0, 0, 4, 237, 1, 0, 0, 0, 4, 239, 1, 0, 0, 0, 5, 241, 1, 0, 0, 0, 5, 243, 1, 0, 0, 0, 5, 245, 1, 0, 0, 0, 5, 247, 1, 0, 0, 0, 5, 249, 1, 0, 0, 0, 5, 251, 1, 0, 0, 0, 5, 253, 1, 0, 0, 0, 5, 255, 1, 0, 0, 0, 5, 257, 1, 0, 0, 0, 5, 259, 1, 0, 0, 0, 5, 261, 1, 0, 0, 0, 6, 263, 1, 0, 0, 0, 6, 265, 1, 0, 0, 0, 6, 267, 1, 0, 0, 0, 6, 269, 1, 0, 0, 0, 6, 273, 1, 0, 0, 0, 6, 275, 1, 0, 0, 0, 6, 277, 1, 0, 0, 0, 6, 279, 1, 0, 0, 0, 6, 281, 1, 0, 0, 0, 7, 283, 1, 0, 0, 0, 7, 285, 1, 0, 0, 0, 7, 287, 1, 0, 0, 0, 7, 289, 1, 0, 0, 0, 7, 291, 1, 0, 0, 0, 7, 293, 1, 0, 0, 0, 7, 295, 1, 0, 0, 0, 7, 297, 1, 0, 0, 0, 7, 299, 1, 0, 0, 0, 7, 301, 1, 0, 0, 0, 7, 303, 1, 0, 0, 0, 7, 305, 1, 0, 0, 0, 8, 307, 1, 0, 0, 0, 8, 309, 1, 0, 0, 0, 8, 311, 1, 0, 0, 0, 8, 313, 1, 0, 0, 0, 8, 315, 1, 0, 0, 0, 8, 317, 1, 0, 0, 0, 8, 319, 1, 0, 0, 0, 8, 321, 1, 0, 0, 0, 8, 323, 1, 0, 0, 0, 9, 325, 1, 0, 0, 0, 9, 327, 1, 0, 0, 0, 9, 329, 1, 0, 0, 0, 9, 331, 1, 0, 0, 0, 9, 333, 1, 0, 0, 0, 10, 335, 1, 0, 0, 0, 10, 337, 1, 0, 0, 0, 10, 339, 1, 0, 0, 0, 10, 341, 1, 0, 0, 0, 10, 343, 1, 0, 0, 0, 10, 345, 1, 0, 0, 0, 11, 347, 1, 0, 0, 0, 11, 349, 1, 0, 0, 0, 11, 351, 1, 0, 0, 0, 11, 353, 1, 0, 0, 0, 11, 355, 1, 0, 0, 0, 11, 357, 1, 0, 0, 0, 11, 359, 1, 0, 0, 0, 11, 361, 1, 0, 0, 0, 11, 363, 1, 0, 0, 0, 11, 365, 1, 0, 0, 0, 12, 367, 1, 0, 0, 0, 12, 369, 1, 0, 0, 0, 12, 371, 1, 0, 0, 0, 12, 373, 1, 0, 0, 0, 12, 375, 1, 0, 0, 0, 12, 377, 1, 0, 0, 0, 12, 379, 1, 0, 0, 0, 13, 381, 1, 0, 0, 0, 13, 383, 1, 0, 0, 0, 13, 385, 1, 0, 0, 0, 13, 387, 1, 0, 0, 0, 13, 389, 1, 0, 0, 0, 13, 391, 1, 0, 0, 0, 14, 393, 1, 0, 0, 0, 14, 395, 1, 0, 0, 0, 14, 397, 1, 0, 0, 0, 14, 399, 1, 0, 0, 0, 14, 401, 1, 0, 0, 0, 14, 403, 1, 0, 0, 0, 14, 405, 1, 0, 0, 0, 14, 407, 1, 0, 0, 0, 14, 409, 1, 0, 0, 0, 15, 411, 1, 0, 0, 0, 17, 421, 1, 0, 0, 0, 19, 428, 1, 0, 0, 0, 21, 437, 1, 0, 0, 0, 23, 444, 1, 0, 0, 0, 25, 454, 1, 0, 0, 0, 27, 461, 1, 0, 0, 0, 29, 468, 1, 0, 0, 0, 31, 475, 1, 0, 0, 0, 33, 483, 1, 0, 0, 0, 35, 495, 1, 0, 0, 0, 37, 504, 1, 0, 0, 0, 39, 510, 1, 0, 0, 0, 41, 517, 1, 0, 0, 0, 43, 524, 1, 0, 0, 0, 45, 532, 1, 0, 0, 0, 47, 540, 1, 0, 0, 0, 49, 555, 1, 0, 0, 0, 51, 565, 1, 0, 0, 0, 53, 577, 1, 0, 0, 0, 55, 583, 1, 0, 0, 0, 57, 600, 1, 0, 0, 0, 59, 616, 1, 0, 0, 0, 61, 622, 1, 0, 0, 0, 63, 626, 1, 0, 0, 0, 65, 628, 1, 0, 0, 0, 67, 630, 1, 0, 0, 0, 69, 633, 1, 0, 0, 0, 71, 635, 1, 0, 0, 0, 73, 644, 1, 0, 0, 0, 75, 646, 1, 0, 0, 0, 77, 651, 1, 0, 0, 0, 79, 653, 1, 0, 0, 0, 81, 658, 1, 0, 0, 0, 83, 689, 1, 0, 0, 0, 85, 692, 1, 0, 0, 0, 87, 738, 1, 0, 0, 0, 89, 740, 1, 0, 0, 0, 91, 743, 1, 0, 0, 0, 93, 747, 1, 0, 0, 0, 95, 751, 1, 0, 0, 0, 97, 753, 1, 0, 0, 0, 99, 756, 1, 0, 0, 0, 101, 758, 1, 0, 0, 0, 103, 763, 1, 0, 0, 0, 105, 765, 1, 0, 0, 0, 107, 771, 1, 0, 0, 0, 109, 777, 1, 0, 0, 0, 111, 780, 1, 0, 0, 0, 113, 783, 1, 0, 0, 0, 115, 788, 1, 0, 0, 0, 117, 793, 1, 0, 0, 0, 119, 795, 1, 0, 0, 0, 121, 799, 1, 0, 0, 0, 123, 804, 1, 0, 0, 0, 125, 810, 1, 0, 0, 0, 127, 813, 1, 0, 0, 0, 129, 815, 1, 0, 0, 0, 131, 821, 1, 0, 0, 0, 133, 823, 1, 0, 0, 0, 135, 828, 1, 0, 0, 0, 137, 831, 1, 0, 0, 0, 139, 834, 1, 0, 0, 0, 141, 837, 1, 0, 0, 0, 143, 839, 1, 0, 0, 0, 145, 842, 1, 0, 0, 0, 147, 844, 1, 0, 0, 0, 149, 847, 1, 0, 0, 0, 151, 849, 1, 0, 0, 0, 153, 851, 1, 0, 0, 0, 155, 853, 1, 0, 0, 0, 157, 855, 1, 0, 0, 0, 159, 857, 1, 0, 0, 0, 161, 863, 1, 0, 0, 0, 163, 884, 1, 0, 0, 0, 165, 886, 1, 0, 0, 0, 167, 891, 1, 0, 0, 0, 169, 912, 1, 0, 0, 0, 171, 914, 1, 0, 0, 0, 173, 922, 1, 0, 0, 0, 175, 924, 1, 0, 0, 0, 177, 928, 1, 0, 0, 0, 179, 932, 1, 0, 0, 0, 181, 936, 1, 0, 0, 0, 183, 941, 1, 0, 0, 0, 185, 946, 1, 0, 0, 0, 187, 950, 1, 0, 0, 0, 189, 954, 1, 0, 0, 0, 191, 958, 1, 0, 0, 0, 193, 963, 1, 0, 0, 0, 195, 967, 1, 0, 0, 0, 197, 971, 1, 0, 0, 0, 199, 975, 1, 0, 0, 0, 201, 979, 1, 0, 0, 0, 203, 983, 1, 0, 0, 0, 205, 995, 1, 0, 0, 0, 207, 998, 1, 0, 0, 0, 209, 1002, 1, 0, 0, 0, 211, 1006, 1, 0, 0, 0, 213, 1010, 1, 0, 0, 0, 215, 1014, 1, 0, 0, 0, 217, 1018, 1, 0, 0, 0, 219, 1022, 1, 0, 0, 0, 221, 1027, 1, 0, 0, 0, 223, 1031, 1, 0, 0, 0, 225, 1035, 1, 0, 0, 0, 227, 1039, 1, 0, 0, 0, 229, 1047, 1, 0, 0, 0, 231, 1068, 1, 0, 0, 0, 233, 1072, 1, 0, 0, 0, 235, 1076, 1, 0, 0, 0, 237, 1080, 1, 0, 0, 0, 239, 1084, 1, 0, 0, 0, 241, 1088, 1, 0, 0, 0, 243, 1093, 1, 0, 0, 0, 245, 1097, 1, 0, 0, 0, 247, 1101, 1, 0, 0, 0, 249, 1105, 1, 0, 0, 0, 251, 1109, 1, 0, 0, 0, 253, 1113, 1, 0, 0, 0, 255, 1116, 1, 0, 0, 0, 257, 1120, 1, 0, 0, 0, 259, 1124, 1, 0, 0, 0, 261, 1128, 1, 0, 0, 0, 263, 1132, 1, 0, 0, 0, 265, 1137, 1, 0, 0, 0, 267, 1142, 1, 0, 0, 0, 269, 1147, 1, 0, 0, 0, 271, 1154, 1, 0, 0, 0, 273, 1163, 1, 0, 0, 0, 275, 1170, 1, 0, 0, 0, 277, 1174, 1, 0, 0, 0, 279, 1178, 1, 0, 0, 0, 281, 1182, 1, 0, 0, 0, 283, 1186, 1, 0, 0, 0, 285, 1192, 1, 0, 0, 0, 287, 1196, 1, 0, 0, 0, 289, 1200, 1, 0, 0, 0, 291, 1204, 1, 0, 0, 0, 293, 1208, 1, 0, 0, 0, 295, 1212, 1, 0, 0, 0, 297, 1216, 1, 0, 0, 0, 299, 1220, 1, 0, 0, 0, 301, 1224, 1, 0, 0, 0, 303, 1228, 1, 0, 0, 0, 305, 1232, 1, 0, 0, 0, 307, 1236, 1, 0, 0, 0, 309, 1241, 1, 0, 0, 0, 311, 1245, 1, 0, 0, 0, 313, 1249, 1, 0, 0, 0, 315, 1253, 1, 0, 0, 0, 317, 1257, 1, 0, 0, 0, 319, 1261, 1, 0, 0, 0, 321, 1265, 1, 0, 0, 0, 323, 1269, 1, 0, 0, 0, 325, 1273, 1, 0, 0, 0, 327, 1278, 1, 0, 0, 0, 329, 1283, 1, 0, 0, 0, 331, 1287, 1, 0, 0, 0, 333, 1291, 1, 0, 0, 0, 335, 1295, 1, 0, 0, 0, 337, 1300, 1, 0, 0, 0, 339, 1307, 1, 0, 0, 0, 341, 1311, 1, 0, 0, 0, 343, 1315, 1, 0, 0, 0, 345, 1319, 1, 0, 0, 0, 347, 1323, 1, 0, 0, 0, 349, 1328, 1, 0, 0, 0, 351, 1332, 1, 0, 0, 0, 353, 1336, 1, 0, 0, 0, 355, 1340, 1, 0, 0, 0, 357, 1345, 1, 0, 0, 0, 359, 1349, 1, 0, 0, 0, 361, 1353, 1, 0, 0, 0, 363, 1357, 1, 0, 0, 0, 365, 1361, 1, 0, 0, 0, 367, 1365, 1, 0, 0, 0, 369, 1371, 1, 0, 0, 0, 371, 1375, 1, 0, 0, 0, 373, 1379, 1, 0, 0, 0, 375, 1383, 1, 0, 0, 0, 377, 1387, 1, 0, 0, 0, 379, 1391, 1, 0, 0, 0, 381, 1395, 1, 0, 0, 0, 383, 1400, 1, 0, 0, 0, 385, 1406, 1, 0, 0, 0, 387, 1412, 1, 0, 0, 0, 389, 1416, 1, 0, 0, 0, 391, 1420, 1, 0, 0, 0, 393, 1424, 1, 0, 0, 0, 395, 1430, 1, 0, 0, 0, 397, 1436, 1, 0, 0, 0, 399, 1440, 1, 0, 0, 0, 401, 1444, 1, 0, 0, 0, 403, 1448, 1, 0, 0, 0, 405, 1454, 1, 0, 0, 0, 407, 1460, 1, 0, 0, 0, 409, 1466, 1, 0, 0, 0, 411, 412, 7, 0, 0, 0, 412, 413, 7, 1, 0, 0, 413, 414, 7, 2, 0, 0, 414, 415, 7, 2, 0, 0, 415, 416, 7, 3, 0, 0, 416, 417, 7, 4, 0, 0, 417, 418, 7, 5, 0, 0, 418, 419, 1, 0, 0, 0, 419, 420, 6, 0, 0, 0, 420, 16, 1, 0, 0, 0, 421, 422, 7, 0, 0, 0, 422, 423, 7, 6, 0, 0, 423, 424, 7, 7, 0, 0, 424, 425, 7, 8, 0, 0, 425, 426, 1, 0, 0, 0, 426, 427, 6, 1, 1, 0, 427, 18, 1, 0, 0, 0, 428, 429, 7, 3, 0, 0, 429, 430, 7, 9, 0, 0, 430, 431, 7, 6, 0, 0, 431, 432, 7, 1, 0, 0, 432, 433, 7, 4, 0, 0, 433, 434, 7, 10, 0, 0, 434, 435, 1, 0, 0, 0, 435, 436, 6, 2, 2, 0, 436, 20, 1, 0, 0, 0, 437, 438, 7, 3, 0, 0, 438, 439, 7, 11, 0, 0, 439, 440, 7, 12, 0, 0, 440, 441, 7, 13, 0, 0, 441, 442, 1, 0, 0, 0, 442, 443, 6, 3, 0, 0, 443, 22, 1, 0, 0, 0, 444, 445, 7, 3, 0, 0, 445, 446, 7, 14, 0, 0, 446, 447, 7, 8, 0, 0, 447, 448, 7, 13, 0, 0, 448, 449, 7, 12, 0, 0, 449, 450, 7, 1, 0, 0, 450, 451, 7, 9, 0, 0, 451, 452, 1, 0, 0, 0, 452, 453, 6, 4, 3, 0, 453, 24, 1, 0, 0, 0, 454, 455, 7, 15, 0, 0, 455, 456, 7, 6, 0, 0, 456, 457, 7, 7, 0, 0, 457, 458, 7, 16, 0, 0, 458, 459, 1, 0, 0, 0, 459, 460, 6, 5, 4, 0, 460, 26, 1, 0, 0, 0, 461, 462, 7, 17, 0, 0, 462, 463, 7, 6, 0, 0, 463, 464, 7, 7, 0, 0, 464, 465, 7, 18, 0, 0, 465, 466, 1, 0, 0, 0, 466, 467, 6, 6, 0, 0, 467, 28, 1, 0, 0, 0, 468, 469, 7, 18, 0, 0, 469, 470, 7, 3, 0, 0, 470, 471, 7, 3, 0, 0, 471, 472, 7, 8, 0, 0, 472, 473, 1, 0, 0, 0, 473, 474, 6, 7, 1, 0, 474, 30, 1, 0, 0, 0, 475, 476, 7, 13, 0, 0, 476, 477, 7, 1, 0, 0, 477, 478, 7, 16, 0, 0, 478, 479, 7, 1, 0, 0, 479, 480, 7, 5, 0, 0, 480, 481, 1, 0, 0, 0, 481, 482, 6, 8, 0, 0, 482, 32, 1, 0, 0, 0, 483, 484, 7, 16, 0, 0, 484, 485, 7, 11, 0, 0, 485, 486, 5, 95, 0, 0, 486, 487, 7, 3, 0, 0, 487, 488, 7, 14, 0, 0, 488, 489, 7, 8, 0, 0, 489, 490, 7, 12, 0, 0, 490, 491, 7, 9, 0, 0, 491, 492, 7, 0, 0, 0, 492, 493, 1, 0, 0, 0, 493, 494, 6, 9, 5, 0, 494, 34, 1, 0, 0, 0, 495, 496, 7, 6, 0, 0, 496, 497, 7, 3, 0, 0, 497, 498, 7, 9, 0, 0, 498, 499, 7, 12, 0, 0, 499, 500, 7, 16, 0, 0, 500, 501, 7, 3, 0, 0, 501, 502, 1, 0, 0, 0, 502, 503, 6, 10, 6, 0, 503, 36, 1, 0, 0, 0, 504, 505, 7, 6, 0, 0, 505, 506, 7, 7, 0, 0, 506, 507, 7, 19, 0, 0, 507, 508, 1, 0, 0, 0, 508, 509, 6, 11, 0, 0, 509, 38, 1, 0, 0, 0, 510, 511, 7, 2, 0, 0, 511, 512, 7, 10, 0, 0, 512, 513, 7, 7, 0, 0, 513, 514, 7, 19, 0, 0, 514, 515, 1, 0, 0, 0, 515, 516, 6, 12, 7, 0, 516, 40, 1, 0, 0, 0, 517, 518, 7, 2, 0, 0, 518, 519, 7, 7, 0, 0, 519, 520, 7, 6, 0, 0, 520, 521, 7, 5, 0, 0, 521, 522, 1, 0, 0, 0, 522, 523, 6, 13, 0, 0, 523, 42, 1, 0, 0, 0, 524, 525, 7, 2, 0, 0, 525, 526, 7, 5, 0, 0, 526, 527, 7, 12, 0, 0, 527, 528, 7, 5, 0, 0, 528, 529, 7, 2, 0, 0, 529, 530, 1, 0, 0, 0, 530, 531, 6, 14, 0, 0, 531, 44, 1, 0, 0, 0, 532, 533, 7, 19, 0, 0, 533, 534, 7, 10, 0, 0, 534, 535, 7, 3, 0, 0, 535, 536, 7, 6, 0, 0, 536, 537, 7, 3, 0, 0, 537, 538, 1, 0, 0, 0, 538, 539, 6, 15, 0, 0, 539, 46, 1, 0, 0, 0, 540, 541, 4, 16, 0, 0, 541, 542, 7, 1, 0, 0, 542, 543, 7, 9, 0, 0, 543, 544, 7, 13, 0, 0, 544, 545, 7, 1, 0, 0, 545, 546, 7, 9, 0, 0, 546, 547, 7, 3, 0, 0, 547, 548, 7, 2, 0, 0, 548, 549, 7, 5, 0, 0, 549, 550, 7, 12, 0, 0, 550, 551, 7, 5, 0, 0, 551, 552, 7, 2, 0, 0, 552, 553, 1, 0, 0, 0, 553, 554, 6, 16, 0, 0, 554, 48, 1, 0, 0, 0, 555, 556, 4, 17, 1, 0, 556, 557, 7, 13, 0, 0, 557, 558, 7, 7, 0, 0, 558, 559, 7, 7, 0, 0, 559, 560, 7, 18, 0, 0, 560, 561, 7, 20, 0, 0, 561, 562, 7, 8, 0, 0, 562, 563, 1, 0, 0, 0, 563, 564, 6, 17, 8, 0, 564, 50, 1, 0, 0, 0, 565, 566, 4, 18, 2, 0, 566, 567, 7, 16, 0, 0, 567, 568, 7, 3, 0, 0, 568, 569, 7, 5, 0, 0, 569, 570, 7, 6, 0, 0, 570, 571, 7, 1, 0, 0, 571, 572, 7, 4, 0, 0, 572, 573, 7, 2, 0, 0, 573, 574, 1, 0, 0, 0, 574, 575, 6, 18, 9, 0, 575, 52, 1, 0, 0, 0, 576, 578, 8, 21, 0, 0, 577, 576, 1, 0, 0, 0, 578, 579, 1, 0, 0, 0, 579, 577, 1, 0, 0, 0, 579, 580, 1, 0, 0, 0, 580, 581, 1, 0, 0, 0, 581, 582, 6, 19, 0, 0, 582, 54, 1, 0, 0, 0, 583, 584, 5, 47, 0, 0, 584, 585, 5, 47, 0, 0, 585, 589, 1, 0, 0, 0, 586, 588, 8, 22, 0, 0, 587, 586, 1, 0, 0, 0, 588, 591, 1, 0, 0, 0, 589, 587, 1, 0, 0, 0, 589, 590, 1, 0, 0, 0, 590, 593, 1, 0, 0, 0, 591, 589, 1, 0, 0, 0, 592, 594, 5, 13, 0, 0, 593, 592, 1, 0, 0, 0, 593, 594, 1, 0, 0, 0, 594, 596, 1, 0, 0, 0, 595, 597, 5, 10, 0, 0, 596, 595, 1, 0, 0, 0, 596, 597, 1, 0, 0, 0, 597, 598, 1, 0, 0, 0, 598, 599, 6, 20, 10, 0, 599, 56, 1, 0, 0, 0, 600, 601, 5, 47, 0, 0, 601, 602, 5, 42, 0, 0, 602, 607, 1, 0, 0, 0, 603, 606, 3, 57, 21, 0, 604, 606, 9, 0, 0, 0, 605, 603, 1, 0, 0, 0, 605, 604, 1, 0, 0, 0, 606, 609, 1, 0, 0, 0, 607, 608, 1, 0, 0, 0, 607, 605, 1, 0, 0, 0, 608, 610, 1, 0, 0, 0, 609, 607, 1, 0, 0, 0, 610, 611, 5, 42, 0, 0, 611, 612, 5, 47, 0, 0, 612, 613, 1, 0, 0, 0, 613, 614, 6, 21, 10, 0, 614, 58, 1, 0, 0, 0, 615, 617, 7, 23, 0, 0, 616, 615, 1, 0, 0, 0, 617, 618, 1, 0, 0, 0, 618, 616, 1, 0, 0, 0, 618, 619, 1, 0, 0, 0, 619, 620, 1, 0, 0, 0, 620, 621, 6, 22, 10, 0, 621, 60, 1, 0, 0, 0, 622, 623, 5, 124, 0, 0, 623, 624, 1, 0, 0, 0, 624, 625, 6, 23, 11, 0, 625, 62, 1, 0, 0, 0, 626, 627, 7, 24, 0, 0, 627, 64, 1, 0, 0, 0, 628, 629, 7, 25, 0, 0, 629, 66, 1, 0, 0, 0, 630, 631, 5, 92, 0, 0, 631, 632, 7, 26, 0, 0, 632, 68, 1, 0, 0, 0, 633, 634, 8, 27, 0, 0, 634, 70, 1, 0, 0, 0, 635, 637, 7, 3, 0, 0, 636, 638, 7, 28, 0, 0, 637, 636, 1, 0, 0, 0, 637, 638, 1, 0, 0, 0, 638, 640, 1, 0, 0, 0, 639, 641, 3, 63, 24, 0, 640, 639, 1, 0, 0, 0, 641, 642, 1, 0, 0, 0, 642, 640, 1, 0, 0, 0, 642, 643, 1, 0, 0, 0, 643, 72, 1, 0, 0, 0, 644, 645, 5, 64, 0, 0, 645, 74, 1, 0, 0, 0, 646, 647, 5, 96, 0, 0, 647, 76, 1, 0, 0, 0, 648, 652, 8, 29, 0, 0, 649, 650, 5, 96, 0, 0, 650, 652, 5, 96, 0, 0, 651, 648, 1, 0, 0, 0, 651, 649, 1, 0, 0, 0, 652, 78, 1, 0, 0, 0, 653, 654, 5, 95, 0, 0, 654, 80, 1, 0, 0, 0, 655, 659, 3, 65, 25, 0, 656, 659, 3, 63, 24, 0, 657, 659, 3, 79, 32, 0, 658, 655, 1, 0, 0, 0, 658, 656, 1, 0, 0, 0, 658, 657, 1, 0, 0, 0, 659, 82, 1, 0, 0, 0, 660, 665, 5, 34, 0, 0, 661, 664, 3, 67, 26, 0, 662, 664, 3, 69, 27, 0, 663, 661, 1, 0, 0, 0, 663, 662, 1, 0, 0, 0, 664, 667, 1, 0, 0, 0, 665, 663, 1, 0, 0, 0, 665, 666, 1, 0, 0, 0, 666, 668, 1, 0, 0, 0, 667, 665, 1, 0, 0, 0, 668, 690, 5, 34, 0, 0, 669, 670, 5, 34, 0, 0, 670, 671, 5, 34, 0, 0, 671, 672, 5, 34, 0, 0, 672, 676, 1, 0, 0, 0, 673, 675, 8, 22, 0, 0, 674, 673, 1, 0, 0, 0, 675, 678, 1, 0, 0, 0, 676, 677, 1, 0, 0, 0, 676, 674, 1, 0, 0, 0, 677, 679, 1, 0, 0, 0, 678, 676, 1, 0, 0, 0, 679, 680, 5, 34, 0, 0, 680, 681, 5, 34, 0, 0, 681, 682, 5, 34, 0, 0, 682, 684, 1, 0, 0, 0, 683, 685, 5, 34, 0, 0, 684, 683, 1, 0, 0, 0, 684, 685, 1, 0, 0, 0, 685, 687, 1, 0, 0, 0, 686, 688, 5, 34, 0, 0, 687, 686, 1, 0, 0, 0, 687, 688, 1, 0, 0, 0, 688, 690, 1, 0, 0, 0, 689, 660, 1, 0, 0, 0, 689, 669, 1, 0, 0, 0, 690, 84, 1, 0, 0, 0, 691, 693, 3, 63, 24, 0, 692, 691, 1, 0, 0, 0, 693, 694, 1, 0, 0, 0, 694, 692, 1, 0, 0, 0, 694, 695, 1, 0, 0, 0, 695, 86, 1, 0, 0, 0, 696, 698, 3, 63, 24, 0, 697, 696, 1, 0, 0, 0, 698, 699, 1, 0, 0, 0, 699, 697, 1, 0, 0, 0, 699, 700, 1, 0, 0, 0, 700, 701, 1, 0, 0, 0, 701, 705, 3, 103, 44, 0, 702, 704, 3, 63, 24, 0, 703, 702, 1, 0, 0, 0, 704, 707, 1, 0, 0, 0, 705, 703, 1, 0, 0, 0, 705, 706, 1, 0, 0, 0, 706, 739, 1, 0, 0, 0, 707, 705, 1, 0, 0, 0, 708, 710, 3, 103, 44, 0, 709, 711, 3, 63, 24, 0, 710, 709, 1, 0, 0, 0, 711, 712, 1, 0, 0, 0, 712, 710, 1, 0, 0, 0, 712, 713, 1, 0, 0, 0, 713, 739, 1, 0, 0, 0, 714, 716, 3, 63, 24, 0, 715, 714, 1, 0, 0, 0, 716, 717, 1, 0, 0, 0, 717, 715, 1, 0, 0, 0, 717, 718, 1, 0, 0, 0, 718, 726, 1, 0, 0, 0, 719, 723, 3, 103, 44, 0, 720, 722, 3, 63, 24, 0, 721, 720, 1, 0, 0, 0, 722, 725, 1, 0, 0, 0, 723, 721, 1, 0, 0, 0, 723, 724, 1, 0, 0, 0, 724, 727, 1, 0, 0, 0, 725, 723, 1, 0, 0, 0, 726, 719, 1, 0, 0, 0, 726, 727, 1, 0, 0, 0, 727, 728, 1, 0, 0, 0, 728, 729, 3, 71, 28, 0, 729, 739, 1, 0, 0, 0, 730, 732, 3, 103, 44, 0, 731, 733, 3, 63, 24, 0, 732, 731, 1, 0, 0, 0, 733, 734, 1, 0, 0, 0, 734, 732, 1, 0, 0, 0, 734, 735, 1, 0, 0, 0, 735, 736, 1, 0, 0, 0, 736, 737, 3, 71, 28, 0, 737, 739, 1, 0, 0, 0, 738, 697, 1, 0, 0, 0, 738, 708, 1, 0, 0, 0, 738, 715, 1, 0, 0, 0, 738, 730, 1, 0, 0, 0, 739, 88, 1, 0, 0, 0, 740, 741, 7, 30, 0, 0, 741, 742, 7, 31, 0, 0, 742, 90, 1, 0, 0, 0, 743, 744, 7, 12, 0, 0, 744, 745, 7, 9, 0, 0, 745, 746, 7, 0, 0, 0, 746, 92, 1, 0, 0, 0, 747, 748, 7, 12, 0, 0, 748, 749, 7, 2, 0, 0, 749, 750, 7, 4, 0, 0, 750, 94, 1, 0, 0, 0, 751, 752, 5, 61, 0, 0, 752, 96, 1, 0, 0, 0, 753, 754, 5, 58, 0, 0, 754, 755, 5, 58, 0, 0, 755, 98, 1, 0, 0, 0, 756, 757, 5, 44, 0, 0, 757, 100, 1, 0, 0, 0, 758, 759, 7, 0, 0, 0, 759, 760, 7, 3, 0, 0, 760, 761, 7, 2, 0, 0, 761, 762, 7, 4, 0, 0, 762, 102, 1, 0, 0, 0, 763, 764, 5, 46, 0, 0, 764, 104, 1, 0, 0, 0, 765, 766, 7, 15, 0, 0, 766, 767, 7, 12, 0, 0, 767, 768, 7, 13, 0, 0, 768, 769, 7, 2, 0, 0, 769, 770, 7, 3, 0, 0, 770, 106, 1, 0, 0, 0, 771, 772, 7, 15, 0, 0, 772, 773, 7, 1, 0, 0, 773, 774, 7, 6, 0, 0, 774, 775, 7, 2, 0, 0, 775, 776, 7, 5, 0, 0, 776, 108, 1, 0, 0, 0, 777, 778, 7, 1, 0, 0, 778, 779, 7, 9, 0, 0, 779, 110, 1, 0, 0, 0, 780, 781, 7, 1, 0, 0, 781, 782, 7, 2, 0, 0, 782, 112, 1, 0, 0, 0, 783, 784, 7, 13, 0, 0, 784, 785, 7, 12, 0, 0, 785, 786, 7, 2, 0, 0, 786, 787, 7, 5, 0, 0, 787, 114, 1, 0, 0, 0, 788, 789, 7, 13, 0, 0, 789, 790, 7, 1, 0, 0, 790, 791, 7, 18, 0, 0, 791, 792, 7, 3, 0, 0, 792, 116, 1, 0, 0, 0, 793, 794, 5, 40, 0, 0, 794, 118, 1, 0, 0, 0, 795, 796, 7, 9, 0, 0, 796, 797, 7, 7, 0, 0, 797, 798, 7, 5, 0, 0, 798, 120, 1, 0, 0, 0, 799, 800, 7, 9, 0, 0, 800, 801, 7, 20, 0, 0, 801, 802, 7, 13, 0, 0, 802, 803, 7, 13, 0, 0, 803, 122, 1, 0, 0, 0, 804, 805, 7, 9, 0, 0, 805, 806, 7, 20, 0, 0, 806, 807, 7, 13, 0, 0, 807, 808, 7, 13, 0, 0, 808, 809, 7, 2, 0, 0, 809, 124, 1, 0, 0, 0, 810, 811, 7, 7, 0, 0, 811, 812, 7, 6, 0, 0, 812, 126, 1, 0, 0, 0, 813, 814, 5, 63, 0, 0, 814, 128, 1, 0, 0, 0, 815, 816, 7, 6, 0, 0, 816, 817, 7, 13, 0, 0, 817, 818, 7, 1, 0, 0, 818, 819, 7, 18, 0, 0, 819, 820, 7, 3, 0, 0, 820, 130, 1, 0, 0, 0, 821, 822, 5, 41, 0, 0, 822, 132, 1, 0, 0, 0, 823, 824, 7, 5, 0, 0, 824, 825, 7, 6, 0, 0, 825, 826, 7, 20, 0, 0, 826, 827, 7, 3, 0, 0, 827, 134, 1, 0, 0, 0, 828, 829, 5, 61, 0, 0, 829, 830, 5, 61, 0, 0, 830, 136, 1, 0, 0, 0, 831, 832, 5, 61, 0, 0, 832, 833, 5, 126, 0, 0, 833, 138, 1, 0, 0, 0, 834, 835, 5, 33, 0, 0, 835, 836, 5, 61, 0, 0, 836, 140, 1, 0, 0, 0, 837, 838, 5, 60, 0, 0, 838, 142, 1, 0, 0, 0, 839, 840, 5, 60, 0, 0, 840, 841, 5, 61, 0, 0, 841, 144, 1, 0, 0, 0, 842, 843, 5, 62, 0, 0, 843, 146, 1, 0, 0, 0, 844, 845, 5, 62, 0, 0, 845, 846, 5, 61, 0, 0, 846, 148, 1, 0, 0, 0, 847, 848, 5, 43, 0, 0, 848, 150, 1, 0, 0, 0, 849, 850, 5, 45, 0, 0, 850, 152, 1, 0, 0, 0, 851, 852, 5, 42, 0, 0, 852, 154, 1, 0, 0, 0, 853, 854, 5, 47, 0, 0, 854, 156, 1, 0, 0, 0, 855, 856, 5, 37, 0, 0, 856, 158, 1, 0, 0, 0, 857, 858, 7, 16, 0, 0, 858, 859, 7, 12, 0, 0, 859, 860, 7, 5, 0, 0, 860, 861, 7, 4, 0, 0, 861, 862, 7, 10, 0, 0, 862, 160, 1, 0, 0, 0, 863, 864, 3, 45, 15, 0, 864, 865, 1, 0, 0, 0, 865, 866, 6, 73, 12, 0, 866, 162, 1, 0, 0, 0, 867, 870, 3, 127, 56, 0, 868, 871, 3, 65, 25, 0, 869, 871, 3, 79, 32, 0, 870, 868, 1, 0, 0, 0, 870, 869, 1, 0, 0, 0, 871, 875, 1, 0, 0, 0, 872, 874, 3, 81, 33, 0, 873, 872, 1, 0, 0, 0, 874, 877, 1, 0, 0, 0, 875, 873, 1, 0, 0, 0, 875, 876, 1, 0, 0, 0, 876, 885, 1, 0, 0, 0, 877, 875, 1, 0, 0, 0, 878, 880, 3, 127, 56, 0, 879, 881, 3, 63, 24, 0, 880, 879, 1, 0, 0, 0, 881, 882, 1, 0, 0, 0, 882, 880, 1, 0, 0, 0, 882, 883, 1, 0, 0, 0, 883, 885, 1, 0, 0, 0, 884, 867, 1, 0, 0, 0, 884, 878, 1, 0, 0, 0, 885, 164, 1, 0, 0, 0, 886, 887, 5, 91, 0, 0, 887, 888, 1, 0, 0, 0, 888, 889, 6, 75, 0, 0, 889, 890, 6, 75, 0, 0, 890, 166, 1, 0, 0, 0, 891, 892, 5, 93, 0, 0, 892, 893, 1, 0, 0, 0, 893, 894, 6, 76, 11, 0, 894, 895, 6, 76, 11, 0, 895, 168, 1, 0, 0, 0, 896, 900, 3, 65, 25, 0, 897, 899, 3, 81, 33, 0, 898, 897, 1, 0, 0, 0, 899, 902, 1, 0, 0, 0, 900, 898, 1, 0, 0, 0, 900, 901, 1, 0, 0, 0, 901, 913, 1, 0, 0, 0, 902, 900, 1, 0, 0, 0, 903, 906, 3, 79, 32, 0, 904, 906, 3, 73, 29, 0, 905, 903, 1, 0, 0, 0, 905, 904, 1, 0, 0, 0, 906, 908, 1, 0, 0, 0, 907, 909, 3, 81, 33, 0, 908, 907, 1, 0, 0, 0, 909, 910, 1, 0, 0, 0, 910, 908, 1, 0, 0, 0, 910, 911, 1, 0, 0, 0, 911, 913, 1, 0, 0, 0, 912, 896, 1, 0, 0, 0, 912, 905, 1, 0, 0, 0, 913, 170, 1, 0, 0, 0, 914, 916, 3, 75, 30, 0, 915, 917, 3, 77, 31, 0, 916, 915, 1, 0, 0, 0, 917, 918, 1, 0, 0, 0, 918, 916, 1, 0, 0, 0, 918, 919, 1, 0, 0, 0, 919, 920, 1, 0, 0, 0, 920, 921, 3, 75, 30, 0, 921, 172, 1, 0, 0, 0, 922, 923, 3, 171, 78, 0, 923, 174, 1, 0, 0, 0, 924, 925, 3, 55, 20, 0, 925, 926, 1, 0, 0, 0, 926, 927, 6, 80, 10, 0, 927, 176, 1, 0, 0, 0, 928, 929, 3, 57, 21, 0, 929, 930, 1, 0, 0, 0, 930, 931, 6, 81, 10, 0, 931, 178, 1, 0, 0, 0, 932, 933, 3, 59, 22, 0, 933, 934, 1, 0, 0, 0, 934, 935, 6, 82, 10, 0, 935, 180, 1, 0, 0, 0, 936, 937, 3, 165, 75, 0, 937, 938, 1, 0, 0, 0, 938, 939, 6, 83, 13, 0, 939, 940, 6, 83, 14, 0, 940, 182, 1, 0, 0, 0, 941, 942, 3, 61, 23, 0, 942, 943, 1, 0, 0, 0, 943, 944, 6, 84, 15, 0, 944, 945, 6, 84, 11, 0, 945, 184, 1, 0, 0, 0, 946, 947, 3, 59, 22, 0, 947, 948, 1, 0, 0, 0, 948, 949, 6, 85, 10, 0, 949, 186, 1, 0, 0, 0, 950, 951, 3, 55, 20, 0, 951, 952, 1, 0, 0, 0, 952, 953, 6, 86, 10, 0, 953, 188, 1, 0, 0, 0, 954, 955, 3, 57, 21, 0, 955, 956, 1, 0, 0, 0, 956, 957, 6, 87, 10, 0, 957, 190, 1, 0, 0, 0, 958, 959, 3, 61, 23, 0, 959, 960, 1, 0, 0, 0, 960, 961, 6, 88, 15, 0, 961, 962, 6, 88, 11, 0, 962, 192, 1, 0, 0, 0, 963, 964, 3, 165, 75, 0, 964, 965, 1, 0, 0, 0, 965, 966, 6, 89, 13, 0, 966, 194, 1, 0, 0, 0, 967, 968, 3, 167, 76, 0, 968, 969, 1, 0, 0, 0, 969, 970, 6, 90, 16, 0, 970, 196, 1, 0, 0, 0, 971, 972, 3, 337, 161, 0, 972, 973, 1, 0, 0, 0, 973, 974, 6, 91, 17, 0, 974, 198, 1, 0, 0, 0, 975, 976, 3, 99, 42, 0, 976, 977, 1, 0, 0, 0, 977, 978, 6, 92, 18, 0, 978, 200, 1, 0, 0, 0, 979, 980, 3, 95, 40, 0, 980, 981, 1, 0, 0, 0, 981, 982, 6, 93, 19, 0, 982, 202, 1, 0, 0, 0, 983, 984, 7, 16, 0, 0, 984, 985, 7, 3, 0, 0, 985, 986, 7, 5, 0, 0, 986, 987, 7, 12, 0, 0, 987, 988, 7, 0, 0, 0, 988, 989, 7, 12, 0, 0, 989, 990, 7, 5, 0, 0, 990, 991, 7, 12, 0, 0, 991, 204, 1, 0, 0, 0, 992, 996, 8, 32, 0, 0, 993, 994, 5, 47, 0, 0, 994, 996, 8, 33, 0, 0, 995, 992, 1, 0, 0, 0, 995, 993, 1, 0, 0, 0, 996, 206, 1, 0, 0, 0, 997, 999, 3, 205, 95, 0, 998, 997, 1, 0, 0, 0, 999, 1000, 1, 0, 0, 0, 1000, 998, 1, 0, 0, 0, 1000, 1001, 1, 0, 0, 0, 1001, 208, 1, 0, 0, 0, 1002, 1003, 3, 207, 96, 0, 1003, 1004, 1, 0, 0, 0, 1004, 1005, 6, 97, 20, 0, 1005, 210, 1, 0, 0, 0, 1006, 1007, 3, 83, 34, 0, 1007, 1008, 1, 0, 0, 0, 1008, 1009, 6, 98, 21, 0, 1009, 212, 1, 0, 0, 0, 1010, 1011, 3, 55, 20, 0, 1011, 1012, 1, 0, 0, 0, 1012, 1013, 6, 99, 10, 0, 1013, 214, 1, 0, 0, 0, 1014, 1015, 3, 57, 21, 0, 1015, 1016, 1, 0, 0, 0, 1016, 1017, 6, 100, 10, 0, 1017, 216, 1, 0, 0, 0, 1018, 1019, 3, 59, 22, 0, 1019, 1020, 1, 0, 0, 0, 1020, 1021, 6, 101, 10, 0, 1021, 218, 1, 0, 0, 0, 1022, 1023, 3, 61, 23, 0, 1023, 1024, 1, 0, 0, 0, 1024, 1025, 6, 102, 15, 0, 1025, 1026, 6, 102, 11, 0, 1026, 220, 1, 0, 0, 0, 1027, 1028, 3, 103, 44, 0, 1028, 1029, 1, 0, 0, 0, 1029, 1030, 6, 103, 22, 0, 1030, 222, 1, 0, 0, 0, 1031, 1032, 3, 99, 42, 0, 1032, 1033, 1, 0, 0, 0, 1033, 1034, 6, 104, 18, 0, 1034, 224, 1, 0, 0, 0, 1035, 1036, 3, 127, 56, 0, 1036, 1037, 1, 0, 0, 0, 1037, 1038, 6, 105, 23, 0, 1038, 226, 1, 0, 0, 0, 1039, 1040, 3, 163, 74, 0, 1040, 1041, 1, 0, 0, 0, 1041, 1042, 6, 106, 24, 0, 1042, 228, 1, 0, 0, 0, 1043, 1048, 3, 65, 25, 0, 1044, 1048, 3, 63, 24, 0, 1045, 1048, 3, 79, 32, 0, 1046, 1048, 3, 153, 69, 0, 1047, 1043, 1, 0, 0, 0, 1047, 1044, 1, 0, 0, 0, 1047, 1045, 1, 0, 0, 0, 1047, 1046, 1, 0, 0, 0, 1048, 230, 1, 0, 0, 0, 1049, 1052, 3, 65, 25, 0, 1050, 1052, 3, 153, 69, 0, 1051, 1049, 1, 0, 0, 0, 1051, 1050, 1, 0, 0, 0, 1052, 1056, 1, 0, 0, 0, 1053, 1055, 3, 229, 107, 0, 1054, 1053, 1, 0, 0, 0, 1055, 1058, 1, 0, 0, 0, 1056, 1054, 1, 0, 0, 0, 1056, 1057, 1, 0, 0, 0, 1057, 1069, 1, 0, 0, 0, 1058, 1056, 1, 0, 0, 0, 1059, 1062, 3, 79, 32, 0, 1060, 1062, 3, 73, 29, 0, 1061, 1059, 1, 0, 0, 0, 1061, 1060, 1, 0, 0, 0, 1062, 1064, 1, 0, 0, 0, 1063, 1065, 3, 229, 107, 0, 1064, 1063, 1, 0, 0, 0, 1065, 1066, 1, 0, 0, 0, 1066, 1064, 1, 0, 0, 0, 1066, 1067, 1, 0, 0, 0, 1067, 1069, 1, 0, 0, 0, 1068, 1051, 1, 0, 0, 0, 1068, 1061, 1, 0, 0, 0, 1069, 232, 1, 0, 0, 0, 1070, 1073, 3, 231, 108, 0, 1071, 1073, 3, 171, 78, 0, 1072, 1070, 1, 0, 0, 0, 1072, 1071, 1, 0, 0, 0, 1073, 1074, 1, 0, 0, 0, 1074, 1072, 1, 0, 0, 0, 1074, 1075, 1, 0, 0, 0, 1075, 234, 1, 0, 0, 0, 1076, 1077, 3, 55, 20, 0, 1077, 1078, 1, 0, 0, 0, 1078, 1079, 6, 110, 10, 0, 1079, 236, 1, 0, 0, 0, 1080, 1081, 3, 57, 21, 0, 1081, 1082, 1, 0, 0, 0, 1082, 1083, 6, 111, 10, 0, 1083, 238, 1, 0, 0, 0, 1084, 1085, 3, 59, 22, 0, 1085, 1086, 1, 0, 0, 0, 1086, 1087, 6, 112, 10, 0, 1087, 240, 1, 0, 0, 0, 1088, 1089, 3, 61, 23, 0, 1089, 1090, 1, 0, 0, 0, 1090, 1091, 6, 113, 15, 0, 1091, 1092, 6, 113, 11, 0, 1092, 242, 1, 0, 0, 0, 1093, 1094, 3, 95, 40, 0, 1094, 1095, 1, 0, 0, 0, 1095, 1096, 6, 114, 19, 0, 1096, 244, 1, 0, 0, 0, 1097, 1098, 3, 99, 42, 0, 1098, 1099, 1, 0, 0, 0, 1099, 1100, 6, 115, 18, 0, 1100, 246, 1, 0, 0, 0, 1101, 1102, 3, 103, 44, 0, 1102, 1103, 1, 0, 0, 0, 1103, 1104, 6, 116, 22, 0, 1104, 248, 1, 0, 0, 0, 1105, 1106, 3, 127, 56, 0, 1106, 1107, 1, 0, 0, 0, 1107, 1108, 6, 117, 23, 0, 1108, 250, 1, 0, 0, 0, 1109, 1110, 3, 163, 74, 0, 1110, 1111, 1, 0, 0, 0, 1111, 1112, 6, 118, 24, 0, 1112, 252, 1, 0, 0, 0, 1113, 1114, 7, 12, 0, 0, 1114, 1115, 7, 2, 0, 0, 1115, 254, 1, 0, 0, 0, 1116, 1117, 3, 233, 109, 0, 1117, 1118, 1, 0, 0, 0, 1118, 1119, 6, 120, 25, 0, 1119, 256, 1, 0, 0, 0, 1120, 1121, 3, 55, 20, 0, 1121, 1122, 1, 0, 0, 0, 1122, 1123, 6, 121, 10, 0, 1123, 258, 1, 0, 0, 0, 1124, 1125, 3, 57, 21, 0, 1125, 1126, 1, 0, 0, 0, 1126, 1127, 6, 122, 10, 0, 1127, 260, 1, 0, 0, 0, 1128, 1129, 3, 59, 22, 0, 1129, 1130, 1, 0, 0, 0, 1130, 1131, 6, 123, 10, 0, 1131, 262, 1, 0, 0, 0, 1132, 1133, 3, 61, 23, 0, 1133, 1134, 1, 0, 0, 0, 1134, 1135, 6, 124, 15, 0, 1135, 1136, 6, 124, 11, 0, 1136, 264, 1, 0, 0, 0, 1137, 1138, 3, 165, 75, 0, 1138, 1139, 1, 0, 0, 0, 1139, 1140, 6, 125, 13, 0, 1140, 1141, 6, 125, 26, 0, 1141, 266, 1, 0, 0, 0, 1142, 1143, 7, 7, 0, 0, 1143, 1144, 7, 9, 0, 0, 1144, 1145, 1, 0, 0, 0, 1145, 1146, 6, 126, 27, 0, 1146, 268, 1, 0, 0, 0, 1147, 1148, 7, 19, 0, 0, 1148, 1149, 7, 1, 0, 0, 1149, 1150, 7, 5, 0, 0, 1150, 1151, 7, 10, 0, 0, 1151, 1152, 1, 0, 0, 0, 1152, 1153, 6, 127, 27, 0, 1153, 270, 1, 0, 0, 0, 1154, 1155, 8, 34, 0, 0, 1155, 272, 1, 0, 0, 0, 1156, 1158, 3, 271, 128, 0, 1157, 1156, 1, 0, 0, 0, 1158, 1159, 1, 0, 0, 0, 1159, 1157, 1, 0, 0, 0, 1159, 1160, 1, 0, 0, 0, 1160, 1161, 1, 0, 0, 0, 1161, 1162, 3, 337, 161, 0, 1162, 1164, 1, 0, 0, 0, 1163, 1157, 1, 0, 0, 0, 1163, 1164, 1, 0, 0, 0, 1164, 1166, 1, 0, 0, 0, 1165, 1167, 3, 271, 128, 0, 1166, 1165, 1, 0, 0, 0, 1167, 1168, 1, 0, 0, 0, 1168, 1166, 1, 0, 0, 0, 1168, 1169, 1, 0, 0, 0, 1169, 274, 1, 0, 0, 0, 1170, 1171, 3, 273, 129, 0, 1171, 1172, 1, 0, 0, 0, 1172, 1173, 6, 130, 28, 0, 1173, 276, 1, 0, 0, 0, 1174, 1175, 3, 55, 20, 0, 1175, 1176, 1, 0, 0, 0, 1176, 1177, 6, 131, 10, 0, 1177, 278, 1, 0, 0, 0, 1178, 1179, 3, 57, 21, 0, 1179, 1180, 1, 0, 0, 0, 1180, 1181, 6, 132, 10, 0, 1181, 280, 1, 0, 0, 0, 1182, 1183, 3, 59, 22, 0, 1183, 1184, 1, 0, 0, 0, 1184, 1185, 6, 133, 10, 0, 1185, 282, 1, 0, 0, 0, 1186, 1187, 3, 61, 23, 0, 1187, 1188, 1, 0, 0, 0, 1188, 1189, 6, 134, 15, 0, 1189, 1190, 6, 134, 11, 0, 1190, 1191, 6, 134, 11, 0, 1191, 284, 1, 0, 0, 0, 1192, 1193, 3, 95, 40, 0, 1193, 1194, 1, 0, 0, 0, 1194, 1195, 6, 135, 19, 0, 1195, 286, 1, 0, 0, 0, 1196, 1197, 3, 99, 42, 0, 1197, 1198, 1, 0, 0, 0, 1198, 1199, 6, 136, 18, 0, 1199, 288, 1, 0, 0, 0, 1200, 1201, 3, 103, 44, 0, 1201, 1202, 1, 0, 0, 0, 1202, 1203, 6, 137, 22, 0, 1203, 290, 1, 0, 0, 0, 1204, 1205, 3, 269, 127, 0, 1205, 1206, 1, 0, 0, 0, 1206, 1207, 6, 138, 29, 0, 1207, 292, 1, 0, 0, 0, 1208, 1209, 3, 233, 109, 0, 1209, 1210, 1, 0, 0, 0, 1210, 1211, 6, 139, 25, 0, 1211, 294, 1, 0, 0, 0, 1212, 1213, 3, 173, 79, 0, 1213, 1214, 1, 0, 0, 0, 1214, 1215, 6, 140, 30, 0, 1215, 296, 1, 0, 0, 0, 1216, 1217, 3, 127, 56, 0, 1217, 1218, 1, 0, 0, 0, 1218, 1219, 6, 141, 23, 0, 1219, 298, 1, 0, 0, 0, 1220, 1221, 3, 163, 74, 0, 1221, 1222, 1, 0, 0, 0, 1222, 1223, 6, 142, 24, 0, 1223, 300, 1, 0, 0, 0, 1224, 1225, 3, 55, 20, 0, 1225, 1226, 1, 0, 0, 0, 1226, 1227, 6, 143, 10, 0, 1227, 302, 1, 0, 0, 0, 1228, 1229, 3, 57, 21, 0, 1229, 1230, 1, 0, 0, 0, 1230, 1231, 6, 144, 10, 0, 1231, 304, 1, 0, 0, 0, 1232, 1233, 3, 59, 22, 0, 1233, 1234, 1, 0, 0, 0, 1234, 1235, 6, 145, 10, 0, 1235, 306, 1, 0, 0, 0, 1236, 1237, 3, 61, 23, 0, 1237, 1238, 1, 0, 0, 0, 1238, 1239, 6, 146, 15, 0, 1239, 1240, 6, 146, 11, 0, 1240, 308, 1, 0, 0, 0, 1241, 1242, 3, 103, 44, 0, 1242, 1243, 1, 0, 0, 0, 1243, 1244, 6, 147, 22, 0, 1244, 310, 1, 0, 0, 0, 1245, 1246, 3, 127, 56, 0, 1246, 1247, 1, 0, 0, 0, 1247, 1248, 6, 148, 23, 0, 1248, 312, 1, 0, 0, 0, 1249, 1250, 3, 163, 74, 0, 1250, 1251, 1, 0, 0, 0, 1251, 1252, 6, 149, 24, 0, 1252, 314, 1, 0, 0, 0, 1253, 1254, 3, 173, 79, 0, 1254, 1255, 1, 0, 0, 0, 1255, 1256, 6, 150, 30, 0, 1256, 316, 1, 0, 0, 0, 1257, 1258, 3, 169, 77, 0, 1258, 1259, 1, 0, 0, 0, 1259, 1260, 6, 151, 31, 0, 1260, 318, 1, 0, 0, 0, 1261, 1262, 3, 55, 20, 0, 1262, 1263, 1, 0, 0, 0, 1263, 1264, 6, 152, 10, 0, 1264, 320, 1, 0, 0, 0, 1265, 1266, 3, 57, 21, 0, 1266, 1267, 1, 0, 0, 0, 1267, 1268, 6, 153, 10, 0, 1268, 322, 1, 0, 0, 0, 1269, 1270, 3, 59, 22, 0, 1270, 1271, 1, 0, 0, 0, 1271, 1272, 6, 154, 10, 0, 1272, 324, 1, 0, 0, 0, 1273, 1274, 3, 61, 23, 0, 1274, 1275, 1, 0, 0, 0, 1275, 1276, 6, 155, 15, 0, 1276, 1277, 6, 155, 11, 0, 1277, 326, 1, 0, 0, 0, 1278, 1279, 7, 1, 0, 0, 1279, 1280, 7, 9, 0, 0, 1280, 1281, 7, 15, 0, 0, 1281, 1282, 7, 7, 0, 0, 1282, 328, 1, 0, 0, 0, 1283, 1284, 3, 55, 20, 0, 1284, 1285, 1, 0, 0, 0, 1285, 1286, 6, 157, 10, 0, 1286, 330, 1, 0, 0, 0, 1287, 1288, 3, 57, 21, 0, 1288, 1289, 1, 0, 0, 0, 1289, 1290, 6, 158, 10, 0, 1290, 332, 1, 0, 0, 0, 1291, 1292, 3, 59, 22, 0, 1292, 1293, 1, 0, 0, 0, 1293, 1294, 6, 159, 10, 0, 1294, 334, 1, 0, 0, 0, 1295, 1296, 3, 167, 76, 0, 1296, 1297, 1, 0, 0, 0, 1297, 1298, 6, 160, 16, 0, 1298, 1299, 6, 160, 11, 0, 1299, 336, 1, 0, 0, 0, 1300, 1301, 5, 58, 0, 0, 1301, 338, 1, 0, 0, 0, 1302, 1308, 3, 73, 29, 0, 1303, 1308, 3, 63, 24, 0, 1304, 1308, 3, 103, 44, 0, 1305, 1308, 3, 65, 25, 0, 1306, 1308, 3, 79, 32, 0, 1307, 1302, 1, 0, 0, 0, 1307, 1303, 1, 0, 0, 0, 1307, 1304, 1, 0, 0, 0, 1307, 1305, 1, 0, 0, 0, 1307, 1306, 1, 0, 0, 0, 1308, 1309, 1, 0, 0, 0, 1309, 1307, 1, 0, 0, 0, 1309, 1310, 1, 0, 0, 0, 1310, 340, 1, 0, 0, 0, 1311, 1312, 3, 55, 20, 0, 1312, 1313, 1, 0, 0, 0, 1313, 1314, 6, 163, 10, 0, 1314, 342, 1, 0, 0, 0, 1315, 1316, 3, 57, 21, 0, 1316, 1317, 1, 0, 0, 0, 1317, 1318, 6, 164, 10, 0, 1318, 344, 1, 0, 0, 0, 1319, 1320, 3, 59, 22, 0, 1320, 1321, 1, 0, 0, 0, 1321, 1322, 6, 165, 10, 0, 1322, 346, 1, 0, 0, 0, 1323, 1324, 3, 61, 23, 0, 1324, 1325, 1, 0, 0, 0, 1325, 1326, 6, 166, 15, 0, 1326, 1327, 6, 166, 11, 0, 1327, 348, 1, 0, 0, 0, 1328, 1329, 3, 337, 161, 0, 1329, 1330, 1, 0, 0, 0, 1330, 1331, 6, 167, 17, 0, 1331, 350, 1, 0, 0, 0, 1332, 1333, 3, 99, 42, 0, 1333, 1334, 1, 0, 0, 0, 1334, 1335, 6, 168, 18, 0, 1335, 352, 1, 0, 0, 0, 1336, 1337, 3, 103, 44, 0, 1337, 1338, 1, 0, 0, 0, 1338, 1339, 6, 169, 22, 0, 1339, 354, 1, 0, 0, 0, 1340, 1341, 3, 267, 126, 0, 1341, 1342, 1, 0, 0, 0, 1342, 1343, 6, 170, 32, 0, 1343, 1344, 6, 170, 33, 0, 1344, 356, 1, 0, 0, 0, 1345, 1346, 3, 207, 96, 0, 1346, 1347, 1, 0, 0, 0, 1347, 1348, 6, 171, 20, 0, 1348, 358, 1, 0, 0, 0, 1349, 1350, 3, 83, 34, 0, 1350, 1351, 1, 0, 0, 0, 1351, 1352, 6, 172, 21, 0, 1352, 360, 1, 0, 0, 0, 1353, 1354, 3, 55, 20, 0, 1354, 1355, 1, 0, 0, 0, 1355, 1356, 6, 173, 10, 0, 1356, 362, 1, 0, 0, 0, 1357, 1358, 3, 57, 21, 0, 1358, 1359, 1, 0, 0, 0, 1359, 1360, 6, 174, 10, 0, 1360, 364, 1, 0, 0, 0, 1361, 1362, 3, 59, 22, 0, 1362, 1363, 1, 0, 0, 0, 1363, 1364, 6, 175, 10, 0, 1364, 366, 1, 0, 0, 0, 1365, 1366, 3, 61, 23, 0, 1366, 1367, 1, 0, 0, 0, 1367, 1368, 6, 176, 15, 0, 1368, 1369, 6, 176, 11, 0, 1369, 1370, 6, 176, 11, 0, 1370, 368, 1, 0, 0, 0, 1371, 1372, 3, 99, 42, 0, 1372, 1373, 1, 0, 0, 0, 1373, 1374, 6, 177, 18, 0, 1374, 370, 1, 0, 0, 0, 1375, 1376, 3, 103, 44, 0, 1376, 1377, 1, 0, 0, 0, 1377, 1378, 6, 178, 22, 0, 1378, 372, 1, 0, 0, 0, 1379, 1380, 3, 233, 109, 0, 1380, 1381, 1, 0, 0, 0, 1381, 1382, 6, 179, 25, 0, 1382, 374, 1, 0, 0, 0, 1383, 1384, 3, 55, 20, 0, 1384, 1385, 1, 0, 0, 0, 1385, 1386, 6, 180, 10, 0, 1386, 376, 1, 0, 0, 0, 1387, 1388, 3, 57, 21, 0, 1388, 1389, 1, 0, 0, 0, 1389, 1390, 6, 181, 10, 0, 1390, 378, 1, 0, 0, 0, 1391, 1392, 3, 59, 22, 0, 1392, 1393, 1, 0, 0, 0, 1393, 1394, 6, 182, 10, 0, 1394, 380, 1, 0, 0, 0, 1395, 1396, 3, 61, 23, 0, 1396, 1397, 1, 0, 0, 0, 1397, 1398, 6, 183, 15, 0, 1398, 1399, 6, 183, 11, 0, 1399, 382, 1, 0, 0, 0, 1400, 1401, 3, 207, 96, 0, 1401, 1402, 1, 0, 0, 0, 1402, 1403, 6, 184, 20, 0, 1403, 1404, 6, 184, 11, 0, 1404, 1405, 6, 184, 34, 0, 1405, 384, 1, 0, 0, 0, 1406, 1407, 3, 83, 34, 0, 1407, 1408, 1, 0, 0, 0, 1408, 1409, 6, 185, 21, 0, 1409, 1410, 6, 185, 11, 0, 1410, 1411, 6, 185, 34, 0, 1411, 386, 1, 0, 0, 0, 1412, 1413, 3, 55, 20, 0, 1413, 1414, 1, 0, 0, 0, 1414, 1415, 6, 186, 10, 0, 1415, 388, 1, 0, 0, 0, 1416, 1417, 3, 57, 21, 0, 1417, 1418, 1, 0, 0, 0, 1418, 1419, 6, 187, 10, 0, 1419, 390, 1, 0, 0, 0, 1420, 1421, 3, 59, 22, 0, 1421, 1422, 1, 0, 0, 0, 1422, 1423, 6, 188, 10, 0, 1423, 392, 1, 0, 0, 0, 1424, 1425, 3, 337, 161, 0, 1425, 1426, 1, 0, 0, 0, 1426, 1427, 6, 189, 17, 0, 1427, 1428, 6, 189, 11, 0, 1428, 1429, 6, 189, 9, 0, 1429, 394, 1, 0, 0, 0, 1430, 1431, 3, 99, 42, 0, 1431, 1432, 1, 0, 0, 0, 1432, 1433, 6, 190, 18, 0, 1433, 1434, 6, 190, 11, 0, 1434, 1435, 6, 190, 9, 0, 1435, 396, 1, 0, 0, 0, 1436, 1437, 3, 55, 20, 0, 1437, 1438, 1, 0, 0, 0, 1438, 1439, 6, 191, 10, 0, 1439, 398, 1, 0, 0, 0, 1440, 1441, 3, 57, 21, 0, 1441, 1442, 1, 0, 0, 0, 1442, 1443, 6, 192, 10, 0, 1443, 400, 1, 0, 0, 0, 1444, 1445, 3, 59, 22, 0, 1445, 1446, 1, 0, 0, 0, 1446, 1447, 6, 193, 10, 0, 1447, 402, 1, 0, 0, 0, 1448, 1449, 3, 173, 79, 0, 1449, 1450, 1, 0, 0, 0, 1450, 1451, 6, 194, 11, 0, 1451, 1452, 6, 194, 0, 0, 1452, 1453, 6, 194, 30, 0, 1453, 404, 1, 0, 0, 0, 1454, 1455, 3, 169, 77, 0, 1455, 1456, 1, 0, 0, 0, 1456, 1457, 6, 195, 11, 0, 1457, 1458, 6, 195, 0, 0, 1458, 1459, 6, 195, 31, 0, 1459, 406, 1, 0, 0, 0, 1460, 1461, 3, 89, 37, 0, 1461, 1462, 1, 0, 0, 0, 1462, 1463, 6, 196, 11, 0, 1463, 1464, 6, 196, 0, 0, 1464, 1465, 6, 196, 35, 0, 1465, 408, 1, 0, 0, 0, 1466, 1467, 3, 61, 23, 0, 1467, 1468, 1, 0, 0, 0, 1468, 1469, 6, 197, 15, 0, 1469, 1470, 6, 197, 11, 0, 1470, 410, 1, 0, 0, 0, 65, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 579, 589, 593, 596, 605, 607, 618, 637, 642, 651, 658, 663, 665, 676, 684, 687, 689, 694, 699, 705, 712, 717, 723, 726, 734, 738, 870, 875, 882, 884, 900, 905, 910, 912, 918, 995, 1000, 1047, 1051, 1056, 1061, 1066, 1068, 1072, 1074, 1159, 1163, 1168, 1307, 1309, 36, 5, 1, 0, 5, 4, 0, 5, 6, 0, 5, 2, 0, 5, 3, 0, 5, 8, 0, 5, 5, 0, 5, 9, 0, 5, 11, 0, 5, 13, 0, 0, 1, 0, 4, 0, 0, 7, 16, 0, 7, 65, 0, 5, 0, 0, 7, 24, 0, 7, 66, 0, 7, 104, 0, 7, 33, 0, 7, 31, 0, 7, 76, 0, 7, 25, 0, 7, 35, 0, 7, 47, 0, 7, 64, 0, 7, 80, 0, 5, 10, 0, 5, 7, 0, 7, 90, 0, 7, 89, 0, 7, 68, 0, 7, 67, 0, 7, 88, 0, 5, 12, 0, 5, 14, 0, 7, 28, 0] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java index 3bef23f4d2751..305126ddfae2d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java @@ -227,8 +227,6 @@ public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { return DEV_LOOKUP_sempred((RuleContext)_localctx, predIndex); case 18: return DEV_METRICS_sempred((RuleContext)_localctx, predIndex); - case 73: - return NESTED_WHERE_sempred((RuleContext)_localctx, predIndex); } return true; } @@ -253,16 +251,9 @@ private boolean DEV_METRICS_sempred(RuleContext _localctx, int predIndex) { } return true; } - private boolean NESTED_WHERE_sempred(RuleContext _localctx, int predIndex) { - switch (predIndex) { - case 3: - return this.isDevVersion(); - } - return true; - } public static final String _serializedATN = - "\u0004\u0000x\u05c0\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ + "\u0004\u0000x\u05bf\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ @@ -378,846 +369,845 @@ private boolean NESTED_WHERE_sempred(RuleContext _localctx, int predIndex) { "<\u0001<\u0001=\u0001=\u0001=\u0001>\u0001>\u0001>\u0001?\u0001?\u0001"+ "@\u0001@\u0001@\u0001A\u0001A\u0001B\u0001B\u0001B\u0001C\u0001C\u0001"+ "D\u0001D\u0001E\u0001E\u0001F\u0001F\u0001G\u0001G\u0001H\u0001H\u0001"+ - "H\u0001H\u0001H\u0001H\u0001I\u0001I\u0001I\u0001I\u0001I\u0001J\u0001"+ - "J\u0001J\u0003J\u0368\bJ\u0001J\u0005J\u036b\bJ\nJ\fJ\u036e\tJ\u0001J"+ - "\u0001J\u0004J\u0372\bJ\u000bJ\fJ\u0373\u0003J\u0376\bJ\u0001K\u0001K"+ - "\u0001K\u0001K\u0001K\u0001L\u0001L\u0001L\u0001L\u0001L\u0001M\u0001"+ - "M\u0005M\u0384\bM\nM\fM\u0387\tM\u0001M\u0001M\u0003M\u038b\bM\u0001M"+ - "\u0004M\u038e\bM\u000bM\fM\u038f\u0003M\u0392\bM\u0001N\u0001N\u0004N"+ - "\u0396\bN\u000bN\fN\u0397\u0001N\u0001N\u0001O\u0001O\u0001P\u0001P\u0001"+ - "P\u0001P\u0001Q\u0001Q\u0001Q\u0001Q\u0001R\u0001R\u0001R\u0001R\u0001"+ - "S\u0001S\u0001S\u0001S\u0001S\u0001T\u0001T\u0001T\u0001T\u0001T\u0001"+ - "U\u0001U\u0001U\u0001U\u0001V\u0001V\u0001V\u0001V\u0001W\u0001W\u0001"+ - "W\u0001W\u0001X\u0001X\u0001X\u0001X\u0001X\u0001Y\u0001Y\u0001Y\u0001"+ - "Y\u0001Z\u0001Z\u0001Z\u0001Z\u0001[\u0001[\u0001[\u0001[\u0001\\\u0001"+ - "\\\u0001\\\u0001\\\u0001]\u0001]\u0001]\u0001]\u0001^\u0001^\u0001^\u0001"+ - "^\u0001^\u0001^\u0001^\u0001^\u0001^\u0001_\u0001_\u0001_\u0003_\u03e5"+ - "\b_\u0001`\u0004`\u03e8\b`\u000b`\f`\u03e9\u0001a\u0001a\u0001a\u0001"+ - "a\u0001b\u0001b\u0001b\u0001b\u0001c\u0001c\u0001c\u0001c\u0001d\u0001"+ - "d\u0001d\u0001d\u0001e\u0001e\u0001e\u0001e\u0001f\u0001f\u0001f\u0001"+ - "f\u0001f\u0001g\u0001g\u0001g\u0001g\u0001h\u0001h\u0001h\u0001h\u0001"+ - "i\u0001i\u0001i\u0001i\u0001j\u0001j\u0001j\u0001j\u0001k\u0001k\u0001"+ - "k\u0001k\u0003k\u0419\bk\u0001l\u0001l\u0003l\u041d\bl\u0001l\u0005l\u0420"+ - "\bl\nl\fl\u0423\tl\u0001l\u0001l\u0003l\u0427\bl\u0001l\u0004l\u042a\b"+ - "l\u000bl\fl\u042b\u0003l\u042e\bl\u0001m\u0001m\u0004m\u0432\bm\u000b"+ - "m\fm\u0433\u0001n\u0001n\u0001n\u0001n\u0001o\u0001o\u0001o\u0001o\u0001"+ - "p\u0001p\u0001p\u0001p\u0001q\u0001q\u0001q\u0001q\u0001q\u0001r\u0001"+ - "r\u0001r\u0001r\u0001s\u0001s\u0001s\u0001s\u0001t\u0001t\u0001t\u0001"+ - "t\u0001u\u0001u\u0001u\u0001u\u0001v\u0001v\u0001v\u0001v\u0001w\u0001"+ - "w\u0001w\u0001x\u0001x\u0001x\u0001x\u0001y\u0001y\u0001y\u0001y\u0001"+ - "z\u0001z\u0001z\u0001z\u0001{\u0001{\u0001{\u0001{\u0001|\u0001|\u0001"+ - "|\u0001|\u0001|\u0001}\u0001}\u0001}\u0001}\u0001}\u0001~\u0001~\u0001"+ - "~\u0001~\u0001~\u0001\u007f\u0001\u007f\u0001\u007f\u0001\u007f\u0001"+ - "\u007f\u0001\u007f\u0001\u007f\u0001\u0080\u0001\u0080\u0001\u0081\u0004"+ - "\u0081\u0487\b\u0081\u000b\u0081\f\u0081\u0488\u0001\u0081\u0001\u0081"+ - "\u0003\u0081\u048d\b\u0081\u0001\u0081\u0004\u0081\u0490\b\u0081\u000b"+ - "\u0081\f\u0081\u0491\u0001\u0082\u0001\u0082\u0001\u0082\u0001\u0082\u0001"+ - "\u0083\u0001\u0083\u0001\u0083\u0001\u0083\u0001\u0084\u0001\u0084\u0001"+ - "\u0084\u0001\u0084\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0085\u0001"+ - "\u0086\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0086\u0001"+ - "\u0087\u0001\u0087\u0001\u0087\u0001\u0087\u0001\u0088\u0001\u0088\u0001"+ - "\u0088\u0001\u0088\u0001\u0089\u0001\u0089\u0001\u0089\u0001\u0089\u0001"+ - "\u008a\u0001\u008a\u0001\u008a\u0001\u008a\u0001\u008b\u0001\u008b\u0001"+ - "\u008b\u0001\u008b\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008c\u0001"+ - "\u008d\u0001\u008d\u0001\u008d\u0001\u008d\u0001\u008e\u0001\u008e\u0001"+ - "\u008e\u0001\u008e\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u008f\u0001"+ - "\u0090\u0001\u0090\u0001\u0090\u0001\u0090\u0001\u0091\u0001\u0091\u0001"+ - "\u0091\u0001\u0091\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0092\u0001"+ - "\u0092\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0094\u0001"+ - "\u0094\u0001\u0094\u0001\u0094\u0001\u0095\u0001\u0095\u0001\u0095\u0001"+ - "\u0095\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0097\u0001"+ - "\u0097\u0001\u0097\u0001\u0097\u0001\u0098\u0001\u0098\u0001\u0098\u0001"+ - "\u0098\u0001\u0099\u0001\u0099\u0001\u0099\u0001\u0099\u0001\u009a\u0001"+ - "\u009a\u0001\u009a\u0001\u009a\u0001\u009b\u0001\u009b\u0001\u009b\u0001"+ - "\u009b\u0001\u009b\u0001\u009c\u0001\u009c\u0001\u009c\u0001\u009c\u0001"+ - "\u009c\u0001\u009d\u0001\u009d\u0001\u009d\u0001\u009d\u0001\u009e\u0001"+ - "\u009e\u0001\u009e\u0001\u009e\u0001\u009f\u0001\u009f\u0001\u009f\u0001"+ - "\u009f\u0001\u00a0\u0001\u00a0\u0001\u00a0\u0001\u00a0\u0001\u00a0\u0001"+ - "\u00a1\u0001\u00a1\u0001\u00a2\u0001\u00a2\u0001\u00a2\u0001\u00a2\u0001"+ - "\u00a2\u0004\u00a2\u051d\b\u00a2\u000b\u00a2\f\u00a2\u051e\u0001\u00a3"+ - "\u0001\u00a3\u0001\u00a3\u0001\u00a3\u0001\u00a4\u0001\u00a4\u0001\u00a4"+ - "\u0001\u00a4\u0001\u00a5\u0001\u00a5\u0001\u00a5\u0001\u00a5\u0001\u00a6"+ - "\u0001\u00a6\u0001\u00a6\u0001\u00a6\u0001\u00a6\u0001\u00a7\u0001\u00a7"+ - "\u0001\u00a7\u0001\u00a7\u0001\u00a8\u0001\u00a8\u0001\u00a8\u0001\u00a8"+ - "\u0001\u00a9\u0001\u00a9\u0001\u00a9\u0001\u00a9\u0001\u00aa\u0001\u00aa"+ - "\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001\u00ab\u0001\u00ab\u0001\u00ab"+ - "\u0001\u00ab\u0001\u00ac\u0001\u00ac\u0001\u00ac\u0001\u00ac\u0001\u00ad"+ - "\u0001\u00ad\u0001\u00ad\u0001\u00ad\u0001\u00ae\u0001\u00ae\u0001\u00ae"+ - "\u0001\u00ae\u0001\u00af\u0001\u00af\u0001\u00af\u0001\u00af\u0001\u00b0"+ - "\u0001\u00b0\u0001\u00b0\u0001\u00b0\u0001\u00b0\u0001\u00b0\u0001\u00b1"+ - "\u0001\u00b1\u0001\u00b1\u0001\u00b1\u0001\u00b2\u0001\u00b2\u0001\u00b2"+ - "\u0001\u00b2\u0001\u00b3\u0001\u00b3\u0001\u00b3\u0001\u00b3\u0001\u00b4"+ - "\u0001\u00b4\u0001\u00b4\u0001\u00b4\u0001\u00b5\u0001\u00b5\u0001\u00b5"+ - "\u0001\u00b5\u0001\u00b6\u0001\u00b6\u0001\u00b6\u0001\u00b6\u0001\u00b7"+ - "\u0001\u00b7\u0001\u00b7\u0001\u00b7\u0001\u00b7\u0001\u00b8\u0001\u00b8"+ - "\u0001\u00b8\u0001\u00b8\u0001\u00b8\u0001\u00b8\u0001\u00b9\u0001\u00b9"+ - "\u0001\u00b9\u0001\u00b9\u0001\u00b9\u0001\u00b9\u0001\u00ba\u0001\u00ba"+ - "\u0001\u00ba\u0001\u00ba\u0001\u00bb\u0001\u00bb\u0001\u00bb\u0001\u00bb"+ - "\u0001\u00bc\u0001\u00bc\u0001\u00bc\u0001\u00bc\u0001\u00bd\u0001\u00bd"+ - "\u0001\u00bd\u0001\u00bd\u0001\u00bd\u0001\u00bd\u0001\u00be\u0001\u00be"+ - "\u0001\u00be\u0001\u00be\u0001\u00be\u0001\u00be\u0001\u00bf\u0001\u00bf"+ - "\u0001\u00bf\u0001\u00bf\u0001\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c0"+ - "\u0001\u00c1\u0001\u00c1\u0001\u00c1\u0001\u00c1\u0001\u00c2\u0001\u00c2"+ - "\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001\u00c3\u0001\u00c3"+ - "\u0001\u00c3\u0001\u00c3\u0001\u00c3\u0001\u00c3\u0001\u00c4\u0001\u00c4"+ - "\u0001\u00c4\u0001\u00c4\u0001\u00c4\u0001\u00c4\u0001\u00c5\u0001\u00c5"+ - "\u0001\u00c5\u0001\u00c5\u0001\u00c5\u0002\u025f\u02a4\u0000\u00c6\u000f"+ - "\u0001\u0011\u0002\u0013\u0003\u0015\u0004\u0017\u0005\u0019\u0006\u001b"+ - "\u0007\u001d\b\u001f\t!\n#\u000b%\f\'\r)\u000e+\u000f-\u0010/\u00111\u0012"+ - "3\u00135\u00147\u00159\u0016;\u0017=\u0018?\u0000A\u0000C\u0000E\u0000"+ - "G\u0000I\u0000K\u0000M\u0000O\u0000Q\u0000S\u0019U\u001aW\u001bY\u001c"+ - "[\u001d]\u001e_\u001fa c!e\"g#i$k%m&o\'q(s)u*w+y,{-}.\u007f/\u00810\u0083"+ - "1\u00852\u00873\u00894\u008b5\u008d6\u008f7\u00918\u00939\u0095:\u0097"+ - ";\u0099<\u009b=\u009d>\u009f?\u00a1\u0000\u00a3@\u00a5A\u00a7B\u00a9C"+ - "\u00ab\u0000\u00adD\u00afE\u00b1F\u00b3G\u00b5\u0000\u00b7\u0000\u00b9"+ - "H\u00bbI\u00bdJ\u00bf\u0000\u00c1\u0000\u00c3\u0000\u00c5\u0000\u00c7"+ - "\u0000\u00c9\u0000\u00cbK\u00cd\u0000\u00cfL\u00d1\u0000\u00d3\u0000\u00d5"+ - "M\u00d7N\u00d9O\u00db\u0000\u00dd\u0000\u00df\u0000\u00e1\u0000\u00e3"+ - "\u0000\u00e5\u0000\u00e7\u0000\u00e9P\u00ebQ\u00edR\u00efS\u00f1\u0000"+ - "\u00f3\u0000\u00f5\u0000\u00f7\u0000\u00f9\u0000\u00fb\u0000\u00fdT\u00ff"+ - "\u0000\u0101U\u0103V\u0105W\u0107\u0000\u0109\u0000\u010bX\u010dY\u010f"+ - "\u0000\u0111Z\u0113\u0000\u0115[\u0117\\\u0119]\u011b\u0000\u011d\u0000"+ - "\u011f\u0000\u0121\u0000\u0123\u0000\u0125\u0000\u0127\u0000\u0129\u0000"+ - "\u012b\u0000\u012d^\u012f_\u0131`\u0133\u0000\u0135\u0000\u0137\u0000"+ - "\u0139\u0000\u013b\u0000\u013d\u0000\u013fa\u0141b\u0143c\u0145\u0000"+ - "\u0147d\u0149e\u014bf\u014dg\u014f\u0000\u0151h\u0153i\u0155j\u0157k\u0159"+ - "l\u015b\u0000\u015d\u0000\u015f\u0000\u0161\u0000\u0163\u0000\u0165\u0000"+ - "\u0167\u0000\u0169m\u016bn\u016do\u016f\u0000\u0171\u0000\u0173\u0000"+ - "\u0175\u0000\u0177p\u0179q\u017br\u017d\u0000\u017f\u0000\u0181\u0000"+ - "\u0183s\u0185t\u0187u\u0189\u0000\u018b\u0000\u018dv\u018fw\u0191x\u0193"+ - "\u0000\u0195\u0000\u0197\u0000\u0199\u0000\u000f\u0000\u0001\u0002\u0003"+ - "\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e#\u0002\u0000DDdd\u0002"+ - "\u0000IIii\u0002\u0000SSss\u0002\u0000EEee\u0002\u0000CCcc\u0002\u0000"+ - "TTtt\u0002\u0000RRrr\u0002\u0000OOoo\u0002\u0000PPpp\u0002\u0000NNnn\u0002"+ - "\u0000HHhh\u0002\u0000VVvv\u0002\u0000AAaa\u0002\u0000LLll\u0002\u0000"+ - "XXxx\u0002\u0000FFff\u0002\u0000MMmm\u0002\u0000GGgg\u0002\u0000KKkk\u0002"+ - "\u0000WWww\u0002\u0000UUuu\u0006\u0000\t\n\r\r //[[]]\u0002\u0000\n\n"+ - "\r\r\u0003\u0000\t\n\r\r \u0001\u000009\u0002\u0000AZaz\b\u0000\"\"N"+ - "NRRTT\\\\nnrrtt\u0004\u0000\n\n\r\r\"\"\\\\\u0002\u0000++--\u0001\u0000"+ - "``\u0002\u0000BBbb\u0002\u0000YYyy\u000b\u0000\t\n\r\r \"\",,//::==["+ - "[]]||\u0002\u0000**//\u000b\u0000\t\n\r\r \"#,,//::<<>?\\\\||\u05dc\u0000"+ - "\u000f\u0001\u0000\u0000\u0000\u0000\u0011\u0001\u0000\u0000\u0000\u0000"+ - "\u0013\u0001\u0000\u0000\u0000\u0000\u0015\u0001\u0000\u0000\u0000\u0000"+ - "\u0017\u0001\u0000\u0000\u0000\u0000\u0019\u0001\u0000\u0000\u0000\u0000"+ - "\u001b\u0001\u0000\u0000\u0000\u0000\u001d\u0001\u0000\u0000\u0000\u0000"+ - "\u001f\u0001\u0000\u0000\u0000\u0000!\u0001\u0000\u0000\u0000\u0000#\u0001"+ - "\u0000\u0000\u0000\u0000%\u0001\u0000\u0000\u0000\u0000\'\u0001\u0000"+ - "\u0000\u0000\u0000)\u0001\u0000\u0000\u0000\u0000+\u0001\u0000\u0000\u0000"+ - "\u0000-\u0001\u0000\u0000\u0000\u0000/\u0001\u0000\u0000\u0000\u00001"+ - "\u0001\u0000\u0000\u0000\u00003\u0001\u0000\u0000\u0000\u00005\u0001\u0000"+ - "\u0000\u0000\u00007\u0001\u0000\u0000\u0000\u00009\u0001\u0000\u0000\u0000"+ - "\u0000;\u0001\u0000\u0000\u0000\u0001=\u0001\u0000\u0000\u0000\u0001S"+ - "\u0001\u0000\u0000\u0000\u0001U\u0001\u0000\u0000\u0000\u0001W\u0001\u0000"+ - "\u0000\u0000\u0001Y\u0001\u0000\u0000\u0000\u0001[\u0001\u0000\u0000\u0000"+ - "\u0001]\u0001\u0000\u0000\u0000\u0001_\u0001\u0000\u0000\u0000\u0001a"+ - "\u0001\u0000\u0000\u0000\u0001c\u0001\u0000\u0000\u0000\u0001e\u0001\u0000"+ - "\u0000\u0000\u0001g\u0001\u0000\u0000\u0000\u0001i\u0001\u0000\u0000\u0000"+ - "\u0001k\u0001\u0000\u0000\u0000\u0001m\u0001\u0000\u0000\u0000\u0001o"+ - "\u0001\u0000\u0000\u0000\u0001q\u0001\u0000\u0000\u0000\u0001s\u0001\u0000"+ - "\u0000\u0000\u0001u\u0001\u0000\u0000\u0000\u0001w\u0001\u0000\u0000\u0000"+ - "\u0001y\u0001\u0000\u0000\u0000\u0001{\u0001\u0000\u0000\u0000\u0001}"+ - "\u0001\u0000\u0000\u0000\u0001\u007f\u0001\u0000\u0000\u0000\u0001\u0081"+ - "\u0001\u0000\u0000\u0000\u0001\u0083\u0001\u0000\u0000\u0000\u0001\u0085"+ - "\u0001\u0000\u0000\u0000\u0001\u0087\u0001\u0000\u0000\u0000\u0001\u0089"+ - "\u0001\u0000\u0000\u0000\u0001\u008b\u0001\u0000\u0000\u0000\u0001\u008d"+ - "\u0001\u0000\u0000\u0000\u0001\u008f\u0001\u0000\u0000\u0000\u0001\u0091"+ - "\u0001\u0000\u0000\u0000\u0001\u0093\u0001\u0000\u0000\u0000\u0001\u0095"+ - "\u0001\u0000\u0000\u0000\u0001\u0097\u0001\u0000\u0000\u0000\u0001\u0099"+ - "\u0001\u0000\u0000\u0000\u0001\u009b\u0001\u0000\u0000\u0000\u0001\u009d"+ - "\u0001\u0000\u0000\u0000\u0001\u009f\u0001\u0000\u0000\u0000\u0001\u00a1"+ - "\u0001\u0000\u0000\u0000\u0001\u00a3\u0001\u0000\u0000\u0000\u0001\u00a5"+ - "\u0001\u0000\u0000\u0000\u0001\u00a7\u0001\u0000\u0000\u0000\u0001\u00a9"+ - "\u0001\u0000\u0000\u0000\u0001\u00ad\u0001\u0000\u0000\u0000\u0001\u00af"+ - "\u0001\u0000\u0000\u0000\u0001\u00b1\u0001\u0000\u0000\u0000\u0001\u00b3"+ - "\u0001\u0000\u0000\u0000\u0002\u00b5\u0001\u0000\u0000\u0000\u0002\u00b7"+ - "\u0001\u0000\u0000\u0000\u0002\u00b9\u0001\u0000\u0000\u0000\u0002\u00bb"+ - "\u0001\u0000\u0000\u0000\u0002\u00bd\u0001\u0000\u0000\u0000\u0003\u00bf"+ - "\u0001\u0000\u0000\u0000\u0003\u00c1\u0001\u0000\u0000\u0000\u0003\u00c3"+ - "\u0001\u0000\u0000\u0000\u0003\u00c5\u0001\u0000\u0000\u0000\u0003\u00c7"+ - "\u0001\u0000\u0000\u0000\u0003\u00c9\u0001\u0000\u0000\u0000\u0003\u00cb"+ - "\u0001\u0000\u0000\u0000\u0003\u00cf\u0001\u0000\u0000\u0000\u0003\u00d1"+ - "\u0001\u0000\u0000\u0000\u0003\u00d3\u0001\u0000\u0000\u0000\u0003\u00d5"+ - "\u0001\u0000\u0000\u0000\u0003\u00d7\u0001\u0000\u0000\u0000\u0003\u00d9"+ - "\u0001\u0000\u0000\u0000\u0004\u00db\u0001\u0000\u0000\u0000\u0004\u00dd"+ - "\u0001\u0000\u0000\u0000\u0004\u00df\u0001\u0000\u0000\u0000\u0004\u00e1"+ - "\u0001\u0000\u0000\u0000\u0004\u00e3\u0001\u0000\u0000\u0000\u0004\u00e9"+ - "\u0001\u0000\u0000\u0000\u0004\u00eb\u0001\u0000\u0000\u0000\u0004\u00ed"+ - "\u0001\u0000\u0000\u0000\u0004\u00ef\u0001\u0000\u0000\u0000\u0005\u00f1"+ - "\u0001\u0000\u0000\u0000\u0005\u00f3\u0001\u0000\u0000\u0000\u0005\u00f5"+ - "\u0001\u0000\u0000\u0000\u0005\u00f7\u0001\u0000\u0000\u0000\u0005\u00f9"+ - "\u0001\u0000\u0000\u0000\u0005\u00fb\u0001\u0000\u0000\u0000\u0005\u00fd"+ - "\u0001\u0000\u0000\u0000\u0005\u00ff\u0001\u0000\u0000\u0000\u0005\u0101"+ - "\u0001\u0000\u0000\u0000\u0005\u0103\u0001\u0000\u0000\u0000\u0005\u0105"+ - "\u0001\u0000\u0000\u0000\u0006\u0107\u0001\u0000\u0000\u0000\u0006\u0109"+ - "\u0001\u0000\u0000\u0000\u0006\u010b\u0001\u0000\u0000\u0000\u0006\u010d"+ - "\u0001\u0000\u0000\u0000\u0006\u0111\u0001\u0000\u0000\u0000\u0006\u0113"+ - "\u0001\u0000\u0000\u0000\u0006\u0115\u0001\u0000\u0000\u0000\u0006\u0117"+ - "\u0001\u0000\u0000\u0000\u0006\u0119\u0001\u0000\u0000\u0000\u0007\u011b"+ - "\u0001\u0000\u0000\u0000\u0007\u011d\u0001\u0000\u0000\u0000\u0007\u011f"+ - "\u0001\u0000\u0000\u0000\u0007\u0121\u0001\u0000\u0000\u0000\u0007\u0123"+ - "\u0001\u0000\u0000\u0000\u0007\u0125\u0001\u0000\u0000\u0000\u0007\u0127"+ - "\u0001\u0000\u0000\u0000\u0007\u0129\u0001\u0000\u0000\u0000\u0007\u012b"+ - "\u0001\u0000\u0000\u0000\u0007\u012d\u0001\u0000\u0000\u0000\u0007\u012f"+ - "\u0001\u0000\u0000\u0000\u0007\u0131\u0001\u0000\u0000\u0000\b\u0133\u0001"+ - "\u0000\u0000\u0000\b\u0135\u0001\u0000\u0000\u0000\b\u0137\u0001\u0000"+ - "\u0000\u0000\b\u0139\u0001\u0000\u0000\u0000\b\u013b\u0001\u0000\u0000"+ - "\u0000\b\u013d\u0001\u0000\u0000\u0000\b\u013f\u0001\u0000\u0000\u0000"+ - "\b\u0141\u0001\u0000\u0000\u0000\b\u0143\u0001\u0000\u0000\u0000\t\u0145"+ - "\u0001\u0000\u0000\u0000\t\u0147\u0001\u0000\u0000\u0000\t\u0149\u0001"+ - "\u0000\u0000\u0000\t\u014b\u0001\u0000\u0000\u0000\t\u014d\u0001\u0000"+ - "\u0000\u0000\n\u014f\u0001\u0000\u0000\u0000\n\u0151\u0001\u0000\u0000"+ - "\u0000\n\u0153\u0001\u0000\u0000\u0000\n\u0155\u0001\u0000\u0000\u0000"+ - "\n\u0157\u0001\u0000\u0000\u0000\n\u0159\u0001\u0000\u0000\u0000\u000b"+ - "\u015b\u0001\u0000\u0000\u0000\u000b\u015d\u0001\u0000\u0000\u0000\u000b"+ - "\u015f\u0001\u0000\u0000\u0000\u000b\u0161\u0001\u0000\u0000\u0000\u000b"+ - "\u0163\u0001\u0000\u0000\u0000\u000b\u0165\u0001\u0000\u0000\u0000\u000b"+ - "\u0167\u0001\u0000\u0000\u0000\u000b\u0169\u0001\u0000\u0000\u0000\u000b"+ - "\u016b\u0001\u0000\u0000\u0000\u000b\u016d\u0001\u0000\u0000\u0000\f\u016f"+ - "\u0001\u0000\u0000\u0000\f\u0171\u0001\u0000\u0000\u0000\f\u0173\u0001"+ - "\u0000\u0000\u0000\f\u0175\u0001\u0000\u0000\u0000\f\u0177\u0001\u0000"+ - "\u0000\u0000\f\u0179\u0001\u0000\u0000\u0000\f\u017b\u0001\u0000\u0000"+ - "\u0000\r\u017d\u0001\u0000\u0000\u0000\r\u017f\u0001\u0000\u0000\u0000"+ - "\r\u0181\u0001\u0000\u0000\u0000\r\u0183\u0001\u0000\u0000\u0000\r\u0185"+ - "\u0001\u0000\u0000\u0000\r\u0187\u0001\u0000\u0000\u0000\u000e\u0189\u0001"+ - "\u0000\u0000\u0000\u000e\u018b\u0001\u0000\u0000\u0000\u000e\u018d\u0001"+ - "\u0000\u0000\u0000\u000e\u018f\u0001\u0000\u0000\u0000\u000e\u0191\u0001"+ - "\u0000\u0000\u0000\u000e\u0193\u0001\u0000\u0000\u0000\u000e\u0195\u0001"+ - "\u0000\u0000\u0000\u000e\u0197\u0001\u0000\u0000\u0000\u000e\u0199\u0001"+ - "\u0000\u0000\u0000\u000f\u019b\u0001\u0000\u0000\u0000\u0011\u01a5\u0001"+ - "\u0000\u0000\u0000\u0013\u01ac\u0001\u0000\u0000\u0000\u0015\u01b5\u0001"+ - "\u0000\u0000\u0000\u0017\u01bc\u0001\u0000\u0000\u0000\u0019\u01c6\u0001"+ - "\u0000\u0000\u0000\u001b\u01cd\u0001\u0000\u0000\u0000\u001d\u01d4\u0001"+ - "\u0000\u0000\u0000\u001f\u01db\u0001\u0000\u0000\u0000!\u01e3\u0001\u0000"+ - "\u0000\u0000#\u01ef\u0001\u0000\u0000\u0000%\u01f8\u0001\u0000\u0000\u0000"+ - "\'\u01fe\u0001\u0000\u0000\u0000)\u0205\u0001\u0000\u0000\u0000+\u020c"+ - "\u0001\u0000\u0000\u0000-\u0214\u0001\u0000\u0000\u0000/\u021c\u0001\u0000"+ - "\u0000\u00001\u022b\u0001\u0000\u0000\u00003\u0235\u0001\u0000\u0000\u0000"+ - "5\u0241\u0001\u0000\u0000\u00007\u0247\u0001\u0000\u0000\u00009\u0258"+ - "\u0001\u0000\u0000\u0000;\u0268\u0001\u0000\u0000\u0000=\u026e\u0001\u0000"+ - "\u0000\u0000?\u0272\u0001\u0000\u0000\u0000A\u0274\u0001\u0000\u0000\u0000"+ - "C\u0276\u0001\u0000\u0000\u0000E\u0279\u0001\u0000\u0000\u0000G\u027b"+ - "\u0001\u0000\u0000\u0000I\u0284\u0001\u0000\u0000\u0000K\u0286\u0001\u0000"+ - "\u0000\u0000M\u028b\u0001\u0000\u0000\u0000O\u028d\u0001\u0000\u0000\u0000"+ - "Q\u0292\u0001\u0000\u0000\u0000S\u02b1\u0001\u0000\u0000\u0000U\u02b4"+ - "\u0001\u0000\u0000\u0000W\u02e2\u0001\u0000\u0000\u0000Y\u02e4\u0001\u0000"+ - "\u0000\u0000[\u02e7\u0001\u0000\u0000\u0000]\u02eb\u0001\u0000\u0000\u0000"+ - "_\u02ef\u0001\u0000\u0000\u0000a\u02f1\u0001\u0000\u0000\u0000c\u02f4"+ - "\u0001\u0000\u0000\u0000e\u02f6\u0001\u0000\u0000\u0000g\u02fb\u0001\u0000"+ - "\u0000\u0000i\u02fd\u0001\u0000\u0000\u0000k\u0303\u0001\u0000\u0000\u0000"+ - "m\u0309\u0001\u0000\u0000\u0000o\u030c\u0001\u0000\u0000\u0000q\u030f"+ - "\u0001\u0000\u0000\u0000s\u0314\u0001\u0000\u0000\u0000u\u0319\u0001\u0000"+ - "\u0000\u0000w\u031b\u0001\u0000\u0000\u0000y\u031f\u0001\u0000\u0000\u0000"+ - "{\u0324\u0001\u0000\u0000\u0000}\u032a\u0001\u0000\u0000\u0000\u007f\u032d"+ - "\u0001\u0000\u0000\u0000\u0081\u032f\u0001\u0000\u0000\u0000\u0083\u0335"+ - "\u0001\u0000\u0000\u0000\u0085\u0337\u0001\u0000\u0000\u0000\u0087\u033c"+ - "\u0001\u0000\u0000\u0000\u0089\u033f\u0001\u0000\u0000\u0000\u008b\u0342"+ - "\u0001\u0000\u0000\u0000\u008d\u0345\u0001\u0000\u0000\u0000\u008f\u0347"+ - "\u0001\u0000\u0000\u0000\u0091\u034a\u0001\u0000\u0000\u0000\u0093\u034c"+ - "\u0001\u0000\u0000\u0000\u0095\u034f\u0001\u0000\u0000\u0000\u0097\u0351"+ - "\u0001\u0000\u0000\u0000\u0099\u0353\u0001\u0000\u0000\u0000\u009b\u0355"+ - "\u0001\u0000\u0000\u0000\u009d\u0357\u0001\u0000\u0000\u0000\u009f\u0359"+ - "\u0001\u0000\u0000\u0000\u00a1\u035f\u0001\u0000\u0000\u0000\u00a3\u0375"+ - "\u0001\u0000\u0000\u0000\u00a5\u0377\u0001\u0000\u0000\u0000\u00a7\u037c"+ - "\u0001\u0000\u0000\u0000\u00a9\u0391\u0001\u0000\u0000\u0000\u00ab\u0393"+ - "\u0001\u0000\u0000\u0000\u00ad\u039b\u0001\u0000\u0000\u0000\u00af\u039d"+ - "\u0001\u0000\u0000\u0000\u00b1\u03a1\u0001\u0000\u0000\u0000\u00b3\u03a5"+ - "\u0001\u0000\u0000\u0000\u00b5\u03a9\u0001\u0000\u0000\u0000\u00b7\u03ae"+ - "\u0001\u0000\u0000\u0000\u00b9\u03b3\u0001\u0000\u0000\u0000\u00bb\u03b7"+ - "\u0001\u0000\u0000\u0000\u00bd\u03bb\u0001\u0000\u0000\u0000\u00bf\u03bf"+ - "\u0001\u0000\u0000\u0000\u00c1\u03c4\u0001\u0000\u0000\u0000\u00c3\u03c8"+ - "\u0001\u0000\u0000\u0000\u00c5\u03cc\u0001\u0000\u0000\u0000\u00c7\u03d0"+ - "\u0001\u0000\u0000\u0000\u00c9\u03d4\u0001\u0000\u0000\u0000\u00cb\u03d8"+ - "\u0001\u0000\u0000\u0000\u00cd\u03e4\u0001\u0000\u0000\u0000\u00cf\u03e7"+ - "\u0001\u0000\u0000\u0000\u00d1\u03eb\u0001\u0000\u0000\u0000\u00d3\u03ef"+ - "\u0001\u0000\u0000\u0000\u00d5\u03f3\u0001\u0000\u0000\u0000\u00d7\u03f7"+ - "\u0001\u0000\u0000\u0000\u00d9\u03fb\u0001\u0000\u0000\u0000\u00db\u03ff"+ - "\u0001\u0000\u0000\u0000\u00dd\u0404\u0001\u0000\u0000\u0000\u00df\u0408"+ - "\u0001\u0000\u0000\u0000\u00e1\u040c\u0001\u0000\u0000\u0000\u00e3\u0410"+ - "\u0001\u0000\u0000\u0000\u00e5\u0418\u0001\u0000\u0000\u0000\u00e7\u042d"+ - "\u0001\u0000\u0000\u0000\u00e9\u0431\u0001\u0000\u0000\u0000\u00eb\u0435"+ - "\u0001\u0000\u0000\u0000\u00ed\u0439\u0001\u0000\u0000\u0000\u00ef\u043d"+ - "\u0001\u0000\u0000\u0000\u00f1\u0441\u0001\u0000\u0000\u0000\u00f3\u0446"+ - "\u0001\u0000\u0000\u0000\u00f5\u044a\u0001\u0000\u0000\u0000\u00f7\u044e"+ - "\u0001\u0000\u0000\u0000\u00f9\u0452\u0001\u0000\u0000\u0000\u00fb\u0456"+ - "\u0001\u0000\u0000\u0000\u00fd\u045a\u0001\u0000\u0000\u0000\u00ff\u045d"+ - "\u0001\u0000\u0000\u0000\u0101\u0461\u0001\u0000\u0000\u0000\u0103\u0465"+ - "\u0001\u0000\u0000\u0000\u0105\u0469\u0001\u0000\u0000\u0000\u0107\u046d"+ - "\u0001\u0000\u0000\u0000\u0109\u0472\u0001\u0000\u0000\u0000\u010b\u0477"+ - "\u0001\u0000\u0000\u0000\u010d\u047c\u0001\u0000\u0000\u0000\u010f\u0483"+ - "\u0001\u0000\u0000\u0000\u0111\u048c\u0001\u0000\u0000\u0000\u0113\u0493"+ - "\u0001\u0000\u0000\u0000\u0115\u0497\u0001\u0000\u0000\u0000\u0117\u049b"+ - "\u0001\u0000\u0000\u0000\u0119\u049f\u0001\u0000\u0000\u0000\u011b\u04a3"+ - "\u0001\u0000\u0000\u0000\u011d\u04a9\u0001\u0000\u0000\u0000\u011f\u04ad"+ - "\u0001\u0000\u0000\u0000\u0121\u04b1\u0001\u0000\u0000\u0000\u0123\u04b5"+ - "\u0001\u0000\u0000\u0000\u0125\u04b9\u0001\u0000\u0000\u0000\u0127\u04bd"+ - "\u0001\u0000\u0000\u0000\u0129\u04c1\u0001\u0000\u0000\u0000\u012b\u04c5"+ - "\u0001\u0000\u0000\u0000\u012d\u04c9\u0001\u0000\u0000\u0000\u012f\u04cd"+ - "\u0001\u0000\u0000\u0000\u0131\u04d1\u0001\u0000\u0000\u0000\u0133\u04d5"+ - "\u0001\u0000\u0000\u0000\u0135\u04da\u0001\u0000\u0000\u0000\u0137\u04de"+ - "\u0001\u0000\u0000\u0000\u0139\u04e2\u0001\u0000\u0000\u0000\u013b\u04e6"+ - "\u0001\u0000\u0000\u0000\u013d\u04ea\u0001\u0000\u0000\u0000\u013f\u04ee"+ - "\u0001\u0000\u0000\u0000\u0141\u04f2\u0001\u0000\u0000\u0000\u0143\u04f6"+ - "\u0001\u0000\u0000\u0000\u0145\u04fa\u0001\u0000\u0000\u0000\u0147\u04ff"+ - "\u0001\u0000\u0000\u0000\u0149\u0504\u0001\u0000\u0000\u0000\u014b\u0508"+ - "\u0001\u0000\u0000\u0000\u014d\u050c\u0001\u0000\u0000\u0000\u014f\u0510"+ - "\u0001\u0000\u0000\u0000\u0151\u0515\u0001\u0000\u0000\u0000\u0153\u051c"+ - "\u0001\u0000\u0000\u0000\u0155\u0520\u0001\u0000\u0000\u0000\u0157\u0524"+ - "\u0001\u0000\u0000\u0000\u0159\u0528\u0001\u0000\u0000\u0000\u015b\u052c"+ - "\u0001\u0000\u0000\u0000\u015d\u0531\u0001\u0000\u0000\u0000\u015f\u0535"+ - "\u0001\u0000\u0000\u0000\u0161\u0539\u0001\u0000\u0000\u0000\u0163\u053d"+ - "\u0001\u0000\u0000\u0000\u0165\u0542\u0001\u0000\u0000\u0000\u0167\u0546"+ - "\u0001\u0000\u0000\u0000\u0169\u054a\u0001\u0000\u0000\u0000\u016b\u054e"+ - "\u0001\u0000\u0000\u0000\u016d\u0552\u0001\u0000\u0000\u0000\u016f\u0556"+ - "\u0001\u0000\u0000\u0000\u0171\u055c\u0001\u0000\u0000\u0000\u0173\u0560"+ - "\u0001\u0000\u0000\u0000\u0175\u0564\u0001\u0000\u0000\u0000\u0177\u0568"+ - "\u0001\u0000\u0000\u0000\u0179\u056c\u0001\u0000\u0000\u0000\u017b\u0570"+ - "\u0001\u0000\u0000\u0000\u017d\u0574\u0001\u0000\u0000\u0000\u017f\u0579"+ - "\u0001\u0000\u0000\u0000\u0181\u057f\u0001\u0000\u0000\u0000\u0183\u0585"+ - "\u0001\u0000\u0000\u0000\u0185\u0589\u0001\u0000\u0000\u0000\u0187\u058d"+ - "\u0001\u0000\u0000\u0000\u0189\u0591\u0001\u0000\u0000\u0000\u018b\u0597"+ - "\u0001\u0000\u0000\u0000\u018d\u059d\u0001\u0000\u0000\u0000\u018f\u05a1"+ - "\u0001\u0000\u0000\u0000\u0191\u05a5\u0001\u0000\u0000\u0000\u0193\u05a9"+ - "\u0001\u0000\u0000\u0000\u0195\u05af\u0001\u0000\u0000\u0000\u0197\u05b5"+ - "\u0001\u0000\u0000\u0000\u0199\u05bb\u0001\u0000\u0000\u0000\u019b\u019c"+ - "\u0007\u0000\u0000\u0000\u019c\u019d\u0007\u0001\u0000\u0000\u019d\u019e"+ - "\u0007\u0002\u0000\u0000\u019e\u019f\u0007\u0002\u0000\u0000\u019f\u01a0"+ - "\u0007\u0003\u0000\u0000\u01a0\u01a1\u0007\u0004\u0000\u0000\u01a1\u01a2"+ - "\u0007\u0005\u0000\u0000\u01a2\u01a3\u0001\u0000\u0000\u0000\u01a3\u01a4"+ - "\u0006\u0000\u0000\u0000\u01a4\u0010\u0001\u0000\u0000\u0000\u01a5\u01a6"+ - "\u0007\u0000\u0000\u0000\u01a6\u01a7\u0007\u0006\u0000\u0000\u01a7\u01a8"+ - "\u0007\u0007\u0000\u0000\u01a8\u01a9\u0007\b\u0000\u0000\u01a9\u01aa\u0001"+ - "\u0000\u0000\u0000\u01aa\u01ab\u0006\u0001\u0001\u0000\u01ab\u0012\u0001"+ - "\u0000\u0000\u0000\u01ac\u01ad\u0007\u0003\u0000\u0000\u01ad\u01ae\u0007"+ - "\t\u0000\u0000\u01ae\u01af\u0007\u0006\u0000\u0000\u01af\u01b0\u0007\u0001"+ - "\u0000\u0000\u01b0\u01b1\u0007\u0004\u0000\u0000\u01b1\u01b2\u0007\n\u0000"+ - "\u0000\u01b2\u01b3\u0001\u0000\u0000\u0000\u01b3\u01b4\u0006\u0002\u0002"+ - "\u0000\u01b4\u0014\u0001\u0000\u0000\u0000\u01b5\u01b6\u0007\u0003\u0000"+ - "\u0000\u01b6\u01b7\u0007\u000b\u0000\u0000\u01b7\u01b8\u0007\f\u0000\u0000"+ - "\u01b8\u01b9\u0007\r\u0000\u0000\u01b9\u01ba\u0001\u0000\u0000\u0000\u01ba"+ - "\u01bb\u0006\u0003\u0000\u0000\u01bb\u0016\u0001\u0000\u0000\u0000\u01bc"+ - "\u01bd\u0007\u0003\u0000\u0000\u01bd\u01be\u0007\u000e\u0000\u0000\u01be"+ - "\u01bf\u0007\b\u0000\u0000\u01bf\u01c0\u0007\r\u0000\u0000\u01c0\u01c1"+ - "\u0007\f\u0000\u0000\u01c1\u01c2\u0007\u0001\u0000\u0000\u01c2\u01c3\u0007"+ - "\t\u0000\u0000\u01c3\u01c4\u0001\u0000\u0000\u0000\u01c4\u01c5\u0006\u0004"+ - "\u0003\u0000\u01c5\u0018\u0001\u0000\u0000\u0000\u01c6\u01c7\u0007\u000f"+ - "\u0000\u0000\u01c7\u01c8\u0007\u0006\u0000\u0000\u01c8\u01c9\u0007\u0007"+ - "\u0000\u0000\u01c9\u01ca\u0007\u0010\u0000\u0000\u01ca\u01cb\u0001\u0000"+ - "\u0000\u0000\u01cb\u01cc\u0006\u0005\u0004\u0000\u01cc\u001a\u0001\u0000"+ - "\u0000\u0000\u01cd\u01ce\u0007\u0011\u0000\u0000\u01ce\u01cf\u0007\u0006"+ - "\u0000\u0000\u01cf\u01d0\u0007\u0007\u0000\u0000\u01d0\u01d1\u0007\u0012"+ - "\u0000\u0000\u01d1\u01d2\u0001\u0000\u0000\u0000\u01d2\u01d3\u0006\u0006"+ - "\u0000\u0000\u01d3\u001c\u0001\u0000\u0000\u0000\u01d4\u01d5\u0007\u0012"+ - "\u0000\u0000\u01d5\u01d6\u0007\u0003\u0000\u0000\u01d6\u01d7\u0007\u0003"+ - "\u0000\u0000\u01d7\u01d8\u0007\b\u0000\u0000\u01d8\u01d9\u0001\u0000\u0000"+ - "\u0000\u01d9\u01da\u0006\u0007\u0001\u0000\u01da\u001e\u0001\u0000\u0000"+ - "\u0000\u01db\u01dc\u0007\r\u0000\u0000\u01dc\u01dd\u0007\u0001\u0000\u0000"+ - "\u01dd\u01de\u0007\u0010\u0000\u0000\u01de\u01df\u0007\u0001\u0000\u0000"+ - "\u01df\u01e0\u0007\u0005\u0000\u0000\u01e0\u01e1\u0001\u0000\u0000\u0000"+ - "\u01e1\u01e2\u0006\b\u0000\u0000\u01e2 \u0001\u0000\u0000\u0000\u01e3"+ - "\u01e4\u0007\u0010\u0000\u0000\u01e4\u01e5\u0007\u000b\u0000\u0000\u01e5"+ - "\u01e6\u0005_\u0000\u0000\u01e6\u01e7\u0007\u0003\u0000\u0000\u01e7\u01e8"+ - "\u0007\u000e\u0000\u0000\u01e8\u01e9\u0007\b\u0000\u0000\u01e9\u01ea\u0007"+ - "\f\u0000\u0000\u01ea\u01eb\u0007\t\u0000\u0000\u01eb\u01ec\u0007\u0000"+ - "\u0000\u0000\u01ec\u01ed\u0001\u0000\u0000\u0000\u01ed\u01ee\u0006\t\u0005"+ - "\u0000\u01ee\"\u0001\u0000\u0000\u0000\u01ef\u01f0\u0007\u0006\u0000\u0000"+ - "\u01f0\u01f1\u0007\u0003\u0000\u0000\u01f1\u01f2\u0007\t\u0000\u0000\u01f2"+ - "\u01f3\u0007\f\u0000\u0000\u01f3\u01f4\u0007\u0010\u0000\u0000\u01f4\u01f5"+ - "\u0007\u0003\u0000\u0000\u01f5\u01f6\u0001\u0000\u0000\u0000\u01f6\u01f7"+ - "\u0006\n\u0006\u0000\u01f7$\u0001\u0000\u0000\u0000\u01f8\u01f9\u0007"+ - "\u0006\u0000\u0000\u01f9\u01fa\u0007\u0007\u0000\u0000\u01fa\u01fb\u0007"+ - "\u0013\u0000\u0000\u01fb\u01fc\u0001\u0000\u0000\u0000\u01fc\u01fd\u0006"+ - "\u000b\u0000\u0000\u01fd&\u0001\u0000\u0000\u0000\u01fe\u01ff\u0007\u0002"+ - "\u0000\u0000\u01ff\u0200\u0007\n\u0000\u0000\u0200\u0201\u0007\u0007\u0000"+ - "\u0000\u0201\u0202\u0007\u0013\u0000\u0000\u0202\u0203\u0001\u0000\u0000"+ - "\u0000\u0203\u0204\u0006\f\u0007\u0000\u0204(\u0001\u0000\u0000\u0000"+ - "\u0205\u0206\u0007\u0002\u0000\u0000\u0206\u0207\u0007\u0007\u0000\u0000"+ - "\u0207\u0208\u0007\u0006\u0000\u0000\u0208\u0209\u0007\u0005\u0000\u0000"+ - "\u0209\u020a\u0001\u0000\u0000\u0000\u020a\u020b\u0006\r\u0000\u0000\u020b"+ - "*\u0001\u0000\u0000\u0000\u020c\u020d\u0007\u0002\u0000\u0000\u020d\u020e"+ - "\u0007\u0005\u0000\u0000\u020e\u020f\u0007\f\u0000\u0000\u020f\u0210\u0007"+ - "\u0005\u0000\u0000\u0210\u0211\u0007\u0002\u0000\u0000\u0211\u0212\u0001"+ - "\u0000\u0000\u0000\u0212\u0213\u0006\u000e\u0000\u0000\u0213,\u0001\u0000"+ - "\u0000\u0000\u0214\u0215\u0007\u0013\u0000\u0000\u0215\u0216\u0007\n\u0000"+ - "\u0000\u0216\u0217\u0007\u0003\u0000\u0000\u0217\u0218\u0007\u0006\u0000"+ - "\u0000\u0218\u0219\u0007\u0003\u0000\u0000\u0219\u021a\u0001\u0000\u0000"+ - "\u0000\u021a\u021b\u0006\u000f\u0000\u0000\u021b.\u0001\u0000\u0000\u0000"+ - "\u021c\u021d\u0004\u0010\u0000\u0000\u021d\u021e\u0007\u0001\u0000\u0000"+ - "\u021e\u021f\u0007\t\u0000\u0000\u021f\u0220\u0007\r\u0000\u0000\u0220"+ - "\u0221\u0007\u0001\u0000\u0000\u0221\u0222\u0007\t\u0000\u0000\u0222\u0223"+ - "\u0007\u0003\u0000\u0000\u0223\u0224\u0007\u0002\u0000\u0000\u0224\u0225"+ - "\u0007\u0005\u0000\u0000\u0225\u0226\u0007\f\u0000\u0000\u0226\u0227\u0007"+ - "\u0005\u0000\u0000\u0227\u0228\u0007\u0002\u0000\u0000\u0228\u0229\u0001"+ - "\u0000\u0000\u0000\u0229\u022a\u0006\u0010\u0000\u0000\u022a0\u0001\u0000"+ - "\u0000\u0000\u022b\u022c\u0004\u0011\u0001\u0000\u022c\u022d\u0007\r\u0000"+ - "\u0000\u022d\u022e\u0007\u0007\u0000\u0000\u022e\u022f\u0007\u0007\u0000"+ - "\u0000\u022f\u0230\u0007\u0012\u0000\u0000\u0230\u0231\u0007\u0014\u0000"+ - "\u0000\u0231\u0232\u0007\b\u0000\u0000\u0232\u0233\u0001\u0000\u0000\u0000"+ - "\u0233\u0234\u0006\u0011\b\u0000\u02342\u0001\u0000\u0000\u0000\u0235"+ - "\u0236\u0004\u0012\u0002\u0000\u0236\u0237\u0007\u0010\u0000\u0000\u0237"+ - "\u0238\u0007\u0003\u0000\u0000\u0238\u0239\u0007\u0005\u0000\u0000\u0239"+ - "\u023a\u0007\u0006\u0000\u0000\u023a\u023b\u0007\u0001\u0000\u0000\u023b"+ - "\u023c\u0007\u0004\u0000\u0000\u023c\u023d\u0007\u0002\u0000\u0000\u023d"+ - "\u023e\u0001\u0000\u0000\u0000\u023e\u023f\u0006\u0012\t\u0000\u023f4"+ - "\u0001\u0000\u0000\u0000\u0240\u0242\b\u0015\u0000\u0000\u0241\u0240\u0001"+ - "\u0000\u0000\u0000\u0242\u0243\u0001\u0000\u0000\u0000\u0243\u0241\u0001"+ - "\u0000\u0000\u0000\u0243\u0244\u0001\u0000\u0000\u0000\u0244\u0245\u0001"+ - "\u0000\u0000\u0000\u0245\u0246\u0006\u0013\u0000\u0000\u02466\u0001\u0000"+ - "\u0000\u0000\u0247\u0248\u0005/\u0000\u0000\u0248\u0249\u0005/\u0000\u0000"+ - "\u0249\u024d\u0001\u0000\u0000\u0000\u024a\u024c\b\u0016\u0000\u0000\u024b"+ - "\u024a\u0001\u0000\u0000\u0000\u024c\u024f\u0001\u0000\u0000\u0000\u024d"+ - "\u024b\u0001\u0000\u0000\u0000\u024d\u024e\u0001\u0000\u0000\u0000\u024e"+ - "\u0251\u0001\u0000\u0000\u0000\u024f\u024d\u0001\u0000\u0000\u0000\u0250"+ - "\u0252\u0005\r\u0000\u0000\u0251\u0250\u0001\u0000\u0000\u0000\u0251\u0252"+ - "\u0001\u0000\u0000\u0000\u0252\u0254\u0001\u0000\u0000\u0000\u0253\u0255"+ - "\u0005\n\u0000\u0000\u0254\u0253\u0001\u0000\u0000\u0000\u0254\u0255\u0001"+ - "\u0000\u0000\u0000\u0255\u0256\u0001\u0000\u0000\u0000\u0256\u0257\u0006"+ - "\u0014\n\u0000\u02578\u0001\u0000\u0000\u0000\u0258\u0259\u0005/\u0000"+ - "\u0000\u0259\u025a\u0005*\u0000\u0000\u025a\u025f\u0001\u0000\u0000\u0000"+ - "\u025b\u025e\u00039\u0015\u0000\u025c\u025e\t\u0000\u0000\u0000\u025d"+ - "\u025b\u0001\u0000\u0000\u0000\u025d\u025c\u0001\u0000\u0000\u0000\u025e"+ - "\u0261\u0001\u0000\u0000\u0000\u025f\u0260\u0001\u0000\u0000\u0000\u025f"+ - "\u025d\u0001\u0000\u0000\u0000\u0260\u0262\u0001\u0000\u0000\u0000\u0261"+ - "\u025f\u0001\u0000\u0000\u0000\u0262\u0263\u0005*\u0000\u0000\u0263\u0264"+ - "\u0005/\u0000\u0000\u0264\u0265\u0001\u0000\u0000\u0000\u0265\u0266\u0006"+ - "\u0015\n\u0000\u0266:\u0001\u0000\u0000\u0000\u0267\u0269\u0007\u0017"+ - "\u0000\u0000\u0268\u0267\u0001\u0000\u0000\u0000\u0269\u026a\u0001\u0000"+ - "\u0000\u0000\u026a\u0268\u0001\u0000\u0000\u0000\u026a\u026b\u0001\u0000"+ - "\u0000\u0000\u026b\u026c\u0001\u0000\u0000\u0000\u026c\u026d\u0006\u0016"+ - "\n\u0000\u026d<\u0001\u0000\u0000\u0000\u026e\u026f\u0005|\u0000\u0000"+ - "\u026f\u0270\u0001\u0000\u0000\u0000\u0270\u0271\u0006\u0017\u000b\u0000"+ - "\u0271>\u0001\u0000\u0000\u0000\u0272\u0273\u0007\u0018\u0000\u0000\u0273"+ - "@\u0001\u0000\u0000\u0000\u0274\u0275\u0007\u0019\u0000\u0000\u0275B\u0001"+ - "\u0000\u0000\u0000\u0276\u0277\u0005\\\u0000\u0000\u0277\u0278\u0007\u001a"+ - "\u0000\u0000\u0278D\u0001\u0000\u0000\u0000\u0279\u027a\b\u001b\u0000"+ - "\u0000\u027aF\u0001\u0000\u0000\u0000\u027b\u027d\u0007\u0003\u0000\u0000"+ - "\u027c\u027e\u0007\u001c\u0000\u0000\u027d\u027c\u0001\u0000\u0000\u0000"+ - "\u027d\u027e\u0001\u0000\u0000\u0000\u027e\u0280\u0001\u0000\u0000\u0000"+ - "\u027f\u0281\u0003?\u0018\u0000\u0280\u027f\u0001\u0000\u0000\u0000\u0281"+ - "\u0282\u0001\u0000\u0000\u0000\u0282\u0280\u0001\u0000\u0000\u0000\u0282"+ - "\u0283\u0001\u0000\u0000\u0000\u0283H\u0001\u0000\u0000\u0000\u0284\u0285"+ - "\u0005@\u0000\u0000\u0285J\u0001\u0000\u0000\u0000\u0286\u0287\u0005`"+ - "\u0000\u0000\u0287L\u0001\u0000\u0000\u0000\u0288\u028c\b\u001d\u0000"+ - "\u0000\u0289\u028a\u0005`\u0000\u0000\u028a\u028c\u0005`\u0000\u0000\u028b"+ - "\u0288\u0001\u0000\u0000\u0000\u028b\u0289\u0001\u0000\u0000\u0000\u028c"+ - "N\u0001\u0000\u0000\u0000\u028d\u028e\u0005_\u0000\u0000\u028eP\u0001"+ - "\u0000\u0000\u0000\u028f\u0293\u0003A\u0019\u0000\u0290\u0293\u0003?\u0018"+ - "\u0000\u0291\u0293\u0003O \u0000\u0292\u028f\u0001\u0000\u0000\u0000\u0292"+ - "\u0290\u0001\u0000\u0000\u0000\u0292\u0291\u0001\u0000\u0000\u0000\u0293"+ - "R\u0001\u0000\u0000\u0000\u0294\u0299\u0005\"\u0000\u0000\u0295\u0298"+ - "\u0003C\u001a\u0000\u0296\u0298\u0003E\u001b\u0000\u0297\u0295\u0001\u0000"+ - "\u0000\u0000\u0297\u0296\u0001\u0000\u0000\u0000\u0298\u029b\u0001\u0000"+ - "\u0000\u0000\u0299\u0297\u0001\u0000\u0000\u0000\u0299\u029a\u0001\u0000"+ - "\u0000\u0000\u029a\u029c\u0001\u0000\u0000\u0000\u029b\u0299\u0001\u0000"+ - "\u0000\u0000\u029c\u02b2\u0005\"\u0000\u0000\u029d\u029e\u0005\"\u0000"+ - "\u0000\u029e\u029f\u0005\"\u0000\u0000\u029f\u02a0\u0005\"\u0000\u0000"+ - "\u02a0\u02a4\u0001\u0000\u0000\u0000\u02a1\u02a3\b\u0016\u0000\u0000\u02a2"+ - "\u02a1\u0001\u0000\u0000\u0000\u02a3\u02a6\u0001\u0000\u0000\u0000\u02a4"+ - "\u02a5\u0001\u0000\u0000\u0000\u02a4\u02a2\u0001\u0000\u0000\u0000\u02a5"+ - "\u02a7\u0001\u0000\u0000\u0000\u02a6\u02a4\u0001\u0000\u0000\u0000\u02a7"+ - "\u02a8\u0005\"\u0000\u0000\u02a8\u02a9\u0005\"\u0000\u0000\u02a9\u02aa"+ - "\u0005\"\u0000\u0000\u02aa\u02ac\u0001\u0000\u0000\u0000\u02ab\u02ad\u0005"+ - "\"\u0000\u0000\u02ac\u02ab\u0001\u0000\u0000\u0000\u02ac\u02ad\u0001\u0000"+ - "\u0000\u0000\u02ad\u02af\u0001\u0000\u0000\u0000\u02ae\u02b0\u0005\"\u0000"+ - "\u0000\u02af\u02ae\u0001\u0000\u0000\u0000\u02af\u02b0\u0001\u0000\u0000"+ - "\u0000\u02b0\u02b2\u0001\u0000\u0000\u0000\u02b1\u0294\u0001\u0000\u0000"+ - "\u0000\u02b1\u029d\u0001\u0000\u0000\u0000\u02b2T\u0001\u0000\u0000\u0000"+ - "\u02b3\u02b5\u0003?\u0018\u0000\u02b4\u02b3\u0001\u0000\u0000\u0000\u02b5"+ - "\u02b6\u0001\u0000\u0000\u0000\u02b6\u02b4\u0001\u0000\u0000\u0000\u02b6"+ - "\u02b7\u0001\u0000\u0000\u0000\u02b7V\u0001\u0000\u0000\u0000\u02b8\u02ba"+ - "\u0003?\u0018\u0000\u02b9\u02b8\u0001\u0000\u0000\u0000\u02ba\u02bb\u0001"+ - "\u0000\u0000\u0000\u02bb\u02b9\u0001\u0000\u0000\u0000\u02bb\u02bc\u0001"+ - "\u0000\u0000\u0000\u02bc\u02bd\u0001\u0000\u0000\u0000\u02bd\u02c1\u0003"+ - "g,\u0000\u02be\u02c0\u0003?\u0018\u0000\u02bf\u02be\u0001\u0000\u0000"+ - "\u0000\u02c0\u02c3\u0001\u0000\u0000\u0000\u02c1\u02bf\u0001\u0000\u0000"+ - "\u0000\u02c1\u02c2\u0001\u0000\u0000\u0000\u02c2\u02e3\u0001\u0000\u0000"+ - "\u0000\u02c3\u02c1\u0001\u0000\u0000\u0000\u02c4\u02c6\u0003g,\u0000\u02c5"+ - "\u02c7\u0003?\u0018\u0000\u02c6\u02c5\u0001\u0000\u0000\u0000\u02c7\u02c8"+ - "\u0001\u0000\u0000\u0000\u02c8\u02c6\u0001\u0000\u0000\u0000\u02c8\u02c9"+ - "\u0001\u0000\u0000\u0000\u02c9\u02e3\u0001\u0000\u0000\u0000\u02ca\u02cc"+ - "\u0003?\u0018\u0000\u02cb\u02ca\u0001\u0000\u0000\u0000\u02cc\u02cd\u0001"+ - "\u0000\u0000\u0000\u02cd\u02cb\u0001\u0000\u0000\u0000\u02cd\u02ce\u0001"+ - "\u0000\u0000\u0000\u02ce\u02d6\u0001\u0000\u0000\u0000\u02cf\u02d3\u0003"+ - "g,\u0000\u02d0\u02d2\u0003?\u0018\u0000\u02d1\u02d0\u0001\u0000\u0000"+ - "\u0000\u02d2\u02d5\u0001\u0000\u0000\u0000\u02d3\u02d1\u0001\u0000\u0000"+ - "\u0000\u02d3\u02d4\u0001\u0000\u0000\u0000\u02d4\u02d7\u0001\u0000\u0000"+ - "\u0000\u02d5\u02d3\u0001\u0000\u0000\u0000\u02d6\u02cf\u0001\u0000\u0000"+ - "\u0000\u02d6\u02d7\u0001\u0000\u0000\u0000\u02d7\u02d8\u0001\u0000\u0000"+ - "\u0000\u02d8\u02d9\u0003G\u001c\u0000\u02d9\u02e3\u0001\u0000\u0000\u0000"+ - "\u02da\u02dc\u0003g,\u0000\u02db\u02dd\u0003?\u0018\u0000\u02dc\u02db"+ - "\u0001\u0000\u0000\u0000\u02dd\u02de\u0001\u0000\u0000\u0000\u02de\u02dc"+ - "\u0001\u0000\u0000\u0000\u02de\u02df\u0001\u0000\u0000\u0000\u02df\u02e0"+ - "\u0001\u0000\u0000\u0000\u02e0\u02e1\u0003G\u001c\u0000\u02e1\u02e3\u0001"+ - "\u0000\u0000\u0000\u02e2\u02b9\u0001\u0000\u0000\u0000\u02e2\u02c4\u0001"+ - "\u0000\u0000\u0000\u02e2\u02cb\u0001\u0000\u0000\u0000\u02e2\u02da\u0001"+ - "\u0000\u0000\u0000\u02e3X\u0001\u0000\u0000\u0000\u02e4\u02e5\u0007\u001e"+ - "\u0000\u0000\u02e5\u02e6\u0007\u001f\u0000\u0000\u02e6Z\u0001\u0000\u0000"+ - "\u0000\u02e7\u02e8\u0007\f\u0000\u0000\u02e8\u02e9\u0007\t\u0000\u0000"+ - "\u02e9\u02ea\u0007\u0000\u0000\u0000\u02ea\\\u0001\u0000\u0000\u0000\u02eb"+ - "\u02ec\u0007\f\u0000\u0000\u02ec\u02ed\u0007\u0002\u0000\u0000\u02ed\u02ee"+ - "\u0007\u0004\u0000\u0000\u02ee^\u0001\u0000\u0000\u0000\u02ef\u02f0\u0005"+ - "=\u0000\u0000\u02f0`\u0001\u0000\u0000\u0000\u02f1\u02f2\u0005:\u0000"+ - "\u0000\u02f2\u02f3\u0005:\u0000\u0000\u02f3b\u0001\u0000\u0000\u0000\u02f4"+ - "\u02f5\u0005,\u0000\u0000\u02f5d\u0001\u0000\u0000\u0000\u02f6\u02f7\u0007"+ - "\u0000\u0000\u0000\u02f7\u02f8\u0007\u0003\u0000\u0000\u02f8\u02f9\u0007"+ - "\u0002\u0000\u0000\u02f9\u02fa\u0007\u0004\u0000\u0000\u02faf\u0001\u0000"+ - "\u0000\u0000\u02fb\u02fc\u0005.\u0000\u0000\u02fch\u0001\u0000\u0000\u0000"+ - "\u02fd\u02fe\u0007\u000f\u0000\u0000\u02fe\u02ff\u0007\f\u0000\u0000\u02ff"+ - "\u0300\u0007\r\u0000\u0000\u0300\u0301\u0007\u0002\u0000\u0000\u0301\u0302"+ - "\u0007\u0003\u0000\u0000\u0302j\u0001\u0000\u0000\u0000\u0303\u0304\u0007"+ - "\u000f\u0000\u0000\u0304\u0305\u0007\u0001\u0000\u0000\u0305\u0306\u0007"+ - "\u0006\u0000\u0000\u0306\u0307\u0007\u0002\u0000\u0000\u0307\u0308\u0007"+ - "\u0005\u0000\u0000\u0308l\u0001\u0000\u0000\u0000\u0309\u030a\u0007\u0001"+ - "\u0000\u0000\u030a\u030b\u0007\t\u0000\u0000\u030bn\u0001\u0000\u0000"+ - "\u0000\u030c\u030d\u0007\u0001\u0000\u0000\u030d\u030e\u0007\u0002\u0000"+ - "\u0000\u030ep\u0001\u0000\u0000\u0000\u030f\u0310\u0007\r\u0000\u0000"+ - "\u0310\u0311\u0007\f\u0000\u0000\u0311\u0312\u0007\u0002\u0000\u0000\u0312"+ - "\u0313\u0007\u0005\u0000\u0000\u0313r\u0001\u0000\u0000\u0000\u0314\u0315"+ - "\u0007\r\u0000\u0000\u0315\u0316\u0007\u0001\u0000\u0000\u0316\u0317\u0007"+ - "\u0012\u0000\u0000\u0317\u0318\u0007\u0003\u0000\u0000\u0318t\u0001\u0000"+ - "\u0000\u0000\u0319\u031a\u0005(\u0000\u0000\u031av\u0001\u0000\u0000\u0000"+ - "\u031b\u031c\u0007\t\u0000\u0000\u031c\u031d\u0007\u0007\u0000\u0000\u031d"+ - "\u031e\u0007\u0005\u0000\u0000\u031ex\u0001\u0000\u0000\u0000\u031f\u0320"+ - "\u0007\t\u0000\u0000\u0320\u0321\u0007\u0014\u0000\u0000\u0321\u0322\u0007"+ - "\r\u0000\u0000\u0322\u0323\u0007\r\u0000\u0000\u0323z\u0001\u0000\u0000"+ - "\u0000\u0324\u0325\u0007\t\u0000\u0000\u0325\u0326\u0007\u0014\u0000\u0000"+ - "\u0326\u0327\u0007\r\u0000\u0000\u0327\u0328\u0007\r\u0000\u0000\u0328"+ - "\u0329\u0007\u0002\u0000\u0000\u0329|\u0001\u0000\u0000\u0000\u032a\u032b"+ - "\u0007\u0007\u0000\u0000\u032b\u032c\u0007\u0006\u0000\u0000\u032c~\u0001"+ - "\u0000\u0000\u0000\u032d\u032e\u0005?\u0000\u0000\u032e\u0080\u0001\u0000"+ - "\u0000\u0000\u032f\u0330\u0007\u0006\u0000\u0000\u0330\u0331\u0007\r\u0000"+ - "\u0000\u0331\u0332\u0007\u0001\u0000\u0000\u0332\u0333\u0007\u0012\u0000"+ - "\u0000\u0333\u0334\u0007\u0003\u0000\u0000\u0334\u0082\u0001\u0000\u0000"+ - "\u0000\u0335\u0336\u0005)\u0000\u0000\u0336\u0084\u0001\u0000\u0000\u0000"+ - "\u0337\u0338\u0007\u0005\u0000\u0000\u0338\u0339\u0007\u0006\u0000\u0000"+ - "\u0339\u033a\u0007\u0014\u0000\u0000\u033a\u033b\u0007\u0003\u0000\u0000"+ - "\u033b\u0086\u0001\u0000\u0000\u0000\u033c\u033d\u0005=\u0000\u0000\u033d"+ - "\u033e\u0005=\u0000\u0000\u033e\u0088\u0001\u0000\u0000\u0000\u033f\u0340"+ - "\u0005=\u0000\u0000\u0340\u0341\u0005~\u0000\u0000\u0341\u008a\u0001\u0000"+ - "\u0000\u0000\u0342\u0343\u0005!\u0000\u0000\u0343\u0344\u0005=\u0000\u0000"+ - "\u0344\u008c\u0001\u0000\u0000\u0000\u0345\u0346\u0005<\u0000\u0000\u0346"+ - "\u008e\u0001\u0000\u0000\u0000\u0347\u0348\u0005<\u0000\u0000\u0348\u0349"+ - "\u0005=\u0000\u0000\u0349\u0090\u0001\u0000\u0000\u0000\u034a\u034b\u0005"+ - ">\u0000\u0000\u034b\u0092\u0001\u0000\u0000\u0000\u034c\u034d\u0005>\u0000"+ - "\u0000\u034d\u034e\u0005=\u0000\u0000\u034e\u0094\u0001\u0000\u0000\u0000"+ - "\u034f\u0350\u0005+\u0000\u0000\u0350\u0096\u0001\u0000\u0000\u0000\u0351"+ - "\u0352\u0005-\u0000\u0000\u0352\u0098\u0001\u0000\u0000\u0000\u0353\u0354"+ - "\u0005*\u0000\u0000\u0354\u009a\u0001\u0000\u0000\u0000\u0355\u0356\u0005"+ - "/\u0000\u0000\u0356\u009c\u0001\u0000\u0000\u0000\u0357\u0358\u0005%\u0000"+ - "\u0000\u0358\u009e\u0001\u0000\u0000\u0000\u0359\u035a\u0007\u0010\u0000"+ - "\u0000\u035a\u035b\u0007\f\u0000\u0000\u035b\u035c\u0007\u0005\u0000\u0000"+ - "\u035c\u035d\u0007\u0004\u0000\u0000\u035d\u035e\u0007\n\u0000\u0000\u035e"+ - "\u00a0\u0001\u0000\u0000\u0000\u035f\u0360\u0004I\u0003\u0000\u0360\u0361"+ - "\u0003-\u000f\u0000\u0361\u0362\u0001\u0000\u0000\u0000\u0362\u0363\u0006"+ - "I\f\u0000\u0363\u00a2\u0001\u0000\u0000\u0000\u0364\u0367\u0003\u007f"+ - "8\u0000\u0365\u0368\u0003A\u0019\u0000\u0366\u0368\u0003O \u0000\u0367"+ - "\u0365\u0001\u0000\u0000\u0000\u0367\u0366\u0001\u0000\u0000\u0000\u0368"+ - "\u036c\u0001\u0000\u0000\u0000\u0369\u036b\u0003Q!\u0000\u036a\u0369\u0001"+ - "\u0000\u0000\u0000\u036b\u036e\u0001\u0000\u0000\u0000\u036c\u036a\u0001"+ - "\u0000\u0000\u0000\u036c\u036d\u0001\u0000\u0000\u0000\u036d\u0376\u0001"+ - "\u0000\u0000\u0000\u036e\u036c\u0001\u0000\u0000\u0000\u036f\u0371\u0003"+ - "\u007f8\u0000\u0370\u0372\u0003?\u0018\u0000\u0371\u0370\u0001\u0000\u0000"+ - "\u0000\u0372\u0373\u0001\u0000\u0000\u0000\u0373\u0371\u0001\u0000\u0000"+ - "\u0000\u0373\u0374\u0001\u0000\u0000\u0000\u0374\u0376\u0001\u0000\u0000"+ - "\u0000\u0375\u0364\u0001\u0000\u0000\u0000\u0375\u036f\u0001\u0000\u0000"+ - "\u0000\u0376\u00a4\u0001\u0000\u0000\u0000\u0377\u0378\u0005[\u0000\u0000"+ - "\u0378\u0379\u0001\u0000\u0000\u0000\u0379\u037a\u0006K\u0000\u0000\u037a"+ - "\u037b\u0006K\u0000\u0000\u037b\u00a6\u0001\u0000\u0000\u0000\u037c\u037d"+ - "\u0005]\u0000\u0000\u037d\u037e\u0001\u0000\u0000\u0000\u037e\u037f\u0006"+ - "L\u000b\u0000\u037f\u0380\u0006L\u000b\u0000\u0380\u00a8\u0001\u0000\u0000"+ - "\u0000\u0381\u0385\u0003A\u0019\u0000\u0382\u0384\u0003Q!\u0000\u0383"+ - "\u0382\u0001\u0000\u0000\u0000\u0384\u0387\u0001\u0000\u0000\u0000\u0385"+ - "\u0383\u0001\u0000\u0000\u0000\u0385\u0386\u0001\u0000\u0000\u0000\u0386"+ - "\u0392\u0001\u0000\u0000\u0000\u0387\u0385\u0001\u0000\u0000\u0000\u0388"+ - "\u038b\u0003O \u0000\u0389\u038b\u0003I\u001d\u0000\u038a\u0388\u0001"+ - "\u0000\u0000\u0000\u038a\u0389\u0001\u0000\u0000\u0000\u038b\u038d\u0001"+ - "\u0000\u0000\u0000\u038c\u038e\u0003Q!\u0000\u038d\u038c\u0001\u0000\u0000"+ - "\u0000\u038e\u038f\u0001\u0000\u0000\u0000\u038f\u038d\u0001\u0000\u0000"+ - "\u0000\u038f\u0390\u0001\u0000\u0000\u0000\u0390\u0392\u0001\u0000\u0000"+ - "\u0000\u0391\u0381\u0001\u0000\u0000\u0000\u0391\u038a\u0001\u0000\u0000"+ - "\u0000\u0392\u00aa\u0001\u0000\u0000\u0000\u0393\u0395\u0003K\u001e\u0000"+ - "\u0394\u0396\u0003M\u001f\u0000\u0395\u0394\u0001\u0000\u0000\u0000\u0396"+ - "\u0397\u0001\u0000\u0000\u0000\u0397\u0395\u0001\u0000\u0000\u0000\u0397"+ - "\u0398\u0001\u0000\u0000\u0000\u0398\u0399\u0001\u0000\u0000\u0000\u0399"+ - "\u039a\u0003K\u001e\u0000\u039a\u00ac\u0001\u0000\u0000\u0000\u039b\u039c"+ - "\u0003\u00abN\u0000\u039c\u00ae\u0001\u0000\u0000\u0000\u039d\u039e\u0003"+ - "7\u0014\u0000\u039e\u039f\u0001\u0000\u0000\u0000\u039f\u03a0\u0006P\n"+ - "\u0000\u03a0\u00b0\u0001\u0000\u0000\u0000\u03a1\u03a2\u00039\u0015\u0000"+ - "\u03a2\u03a3\u0001\u0000\u0000\u0000\u03a3\u03a4\u0006Q\n\u0000\u03a4"+ - "\u00b2\u0001\u0000\u0000\u0000\u03a5\u03a6\u0003;\u0016\u0000\u03a6\u03a7"+ - "\u0001\u0000\u0000\u0000\u03a7\u03a8\u0006R\n\u0000\u03a8\u00b4\u0001"+ - "\u0000\u0000\u0000\u03a9\u03aa\u0003\u00a5K\u0000\u03aa\u03ab\u0001\u0000"+ - "\u0000\u0000\u03ab\u03ac\u0006S\r\u0000\u03ac\u03ad\u0006S\u000e\u0000"+ - "\u03ad\u00b6\u0001\u0000\u0000\u0000\u03ae\u03af\u0003=\u0017\u0000\u03af"+ - "\u03b0\u0001\u0000\u0000\u0000\u03b0\u03b1\u0006T\u000f\u0000\u03b1\u03b2"+ - "\u0006T\u000b\u0000\u03b2\u00b8\u0001\u0000\u0000\u0000\u03b3\u03b4\u0003"+ - ";\u0016\u0000\u03b4\u03b5\u0001\u0000\u0000\u0000\u03b5\u03b6\u0006U\n"+ - "\u0000\u03b6\u00ba\u0001\u0000\u0000\u0000\u03b7\u03b8\u00037\u0014\u0000"+ - "\u03b8\u03b9\u0001\u0000\u0000\u0000\u03b9\u03ba\u0006V\n\u0000\u03ba"+ - "\u00bc\u0001\u0000\u0000\u0000\u03bb\u03bc\u00039\u0015\u0000\u03bc\u03bd"+ - "\u0001\u0000\u0000\u0000\u03bd\u03be\u0006W\n\u0000\u03be\u00be\u0001"+ - "\u0000\u0000\u0000\u03bf\u03c0\u0003=\u0017\u0000\u03c0\u03c1\u0001\u0000"+ - "\u0000\u0000\u03c1\u03c2\u0006X\u000f\u0000\u03c2\u03c3\u0006X\u000b\u0000"+ - "\u03c3\u00c0\u0001\u0000\u0000\u0000\u03c4\u03c5\u0003\u00a5K\u0000\u03c5"+ - "\u03c6\u0001\u0000\u0000\u0000\u03c6\u03c7\u0006Y\r\u0000\u03c7\u00c2"+ - "\u0001\u0000\u0000\u0000\u03c8\u03c9\u0003\u00a7L\u0000\u03c9\u03ca\u0001"+ - "\u0000\u0000\u0000\u03ca\u03cb\u0006Z\u0010\u0000\u03cb\u00c4\u0001\u0000"+ - "\u0000\u0000\u03cc\u03cd\u0003\u0151\u00a1\u0000\u03cd\u03ce\u0001\u0000"+ - "\u0000\u0000\u03ce\u03cf\u0006[\u0011\u0000\u03cf\u00c6\u0001\u0000\u0000"+ - "\u0000\u03d0\u03d1\u0003c*\u0000\u03d1\u03d2\u0001\u0000\u0000\u0000\u03d2"+ - "\u03d3\u0006\\\u0012\u0000\u03d3\u00c8\u0001\u0000\u0000\u0000\u03d4\u03d5"+ - "\u0003_(\u0000\u03d5\u03d6\u0001\u0000\u0000\u0000\u03d6\u03d7\u0006]"+ - "\u0013\u0000\u03d7\u00ca\u0001\u0000\u0000\u0000\u03d8\u03d9\u0007\u0010"+ - "\u0000\u0000\u03d9\u03da\u0007\u0003\u0000\u0000\u03da\u03db\u0007\u0005"+ - "\u0000\u0000\u03db\u03dc\u0007\f\u0000\u0000\u03dc\u03dd\u0007\u0000\u0000"+ - "\u0000\u03dd\u03de\u0007\f\u0000\u0000\u03de\u03df\u0007\u0005\u0000\u0000"+ - "\u03df\u03e0\u0007\f\u0000\u0000\u03e0\u00cc\u0001\u0000\u0000\u0000\u03e1"+ - "\u03e5\b \u0000\u0000\u03e2\u03e3\u0005/\u0000\u0000\u03e3\u03e5\b!\u0000"+ - "\u0000\u03e4\u03e1\u0001\u0000\u0000\u0000\u03e4\u03e2\u0001\u0000\u0000"+ - "\u0000\u03e5\u00ce\u0001\u0000\u0000\u0000\u03e6\u03e8\u0003\u00cd_\u0000"+ - "\u03e7\u03e6\u0001\u0000\u0000\u0000\u03e8\u03e9\u0001\u0000\u0000\u0000"+ - "\u03e9\u03e7\u0001\u0000\u0000\u0000\u03e9\u03ea\u0001\u0000\u0000\u0000"+ - "\u03ea\u00d0\u0001\u0000\u0000\u0000\u03eb\u03ec\u0003\u00cf`\u0000\u03ec"+ - "\u03ed\u0001\u0000\u0000\u0000\u03ed\u03ee\u0006a\u0014\u0000\u03ee\u00d2"+ - "\u0001\u0000\u0000\u0000\u03ef\u03f0\u0003S\"\u0000\u03f0\u03f1\u0001"+ - "\u0000\u0000\u0000\u03f1\u03f2\u0006b\u0015\u0000\u03f2\u00d4\u0001\u0000"+ - "\u0000\u0000\u03f3\u03f4\u00037\u0014\u0000\u03f4\u03f5\u0001\u0000\u0000"+ - "\u0000\u03f5\u03f6\u0006c\n\u0000\u03f6\u00d6\u0001\u0000\u0000\u0000"+ - "\u03f7\u03f8\u00039\u0015\u0000\u03f8\u03f9\u0001\u0000\u0000\u0000\u03f9"+ - "\u03fa\u0006d\n\u0000\u03fa\u00d8\u0001\u0000\u0000\u0000\u03fb\u03fc"+ - "\u0003;\u0016\u0000\u03fc\u03fd\u0001\u0000\u0000\u0000\u03fd\u03fe\u0006"+ - "e\n\u0000\u03fe\u00da\u0001\u0000\u0000\u0000\u03ff\u0400\u0003=\u0017"+ - "\u0000\u0400\u0401\u0001\u0000\u0000\u0000\u0401\u0402\u0006f\u000f\u0000"+ - "\u0402\u0403\u0006f\u000b\u0000\u0403\u00dc\u0001\u0000\u0000\u0000\u0404"+ - "\u0405\u0003g,\u0000\u0405\u0406\u0001\u0000\u0000\u0000\u0406\u0407\u0006"+ - "g\u0016\u0000\u0407\u00de\u0001\u0000\u0000\u0000\u0408\u0409\u0003c*"+ - "\u0000\u0409\u040a\u0001\u0000\u0000\u0000\u040a\u040b\u0006h\u0012\u0000"+ - "\u040b\u00e0\u0001\u0000\u0000\u0000\u040c\u040d\u0003\u007f8\u0000\u040d"+ - "\u040e\u0001\u0000\u0000\u0000\u040e\u040f\u0006i\u0017\u0000\u040f\u00e2"+ - "\u0001\u0000\u0000\u0000\u0410\u0411\u0003\u00a3J\u0000\u0411\u0412\u0001"+ - "\u0000\u0000\u0000\u0412\u0413\u0006j\u0018\u0000\u0413\u00e4\u0001\u0000"+ - "\u0000\u0000\u0414\u0419\u0003A\u0019\u0000\u0415\u0419\u0003?\u0018\u0000"+ - "\u0416\u0419\u0003O \u0000\u0417\u0419\u0003\u0099E\u0000\u0418\u0414"+ - "\u0001\u0000\u0000\u0000\u0418\u0415\u0001\u0000\u0000\u0000\u0418\u0416"+ - "\u0001\u0000\u0000\u0000\u0418\u0417\u0001\u0000\u0000\u0000\u0419\u00e6"+ - "\u0001\u0000\u0000\u0000\u041a\u041d\u0003A\u0019\u0000\u041b\u041d\u0003"+ - "\u0099E\u0000\u041c\u041a\u0001\u0000\u0000\u0000\u041c\u041b\u0001\u0000"+ - "\u0000\u0000\u041d\u0421\u0001\u0000\u0000\u0000\u041e\u0420\u0003\u00e5"+ - "k\u0000\u041f\u041e\u0001\u0000\u0000\u0000\u0420\u0423\u0001\u0000\u0000"+ - "\u0000\u0421\u041f\u0001\u0000\u0000\u0000\u0421\u0422\u0001\u0000\u0000"+ - "\u0000\u0422\u042e\u0001\u0000\u0000\u0000\u0423\u0421\u0001\u0000\u0000"+ - "\u0000\u0424\u0427\u0003O \u0000\u0425\u0427\u0003I\u001d\u0000\u0426"+ - "\u0424\u0001\u0000\u0000\u0000\u0426\u0425\u0001\u0000\u0000\u0000\u0427"+ - "\u0429\u0001\u0000\u0000\u0000\u0428\u042a\u0003\u00e5k\u0000\u0429\u0428"+ - "\u0001\u0000\u0000\u0000\u042a\u042b\u0001\u0000\u0000\u0000\u042b\u0429"+ - "\u0001\u0000\u0000\u0000\u042b\u042c\u0001\u0000\u0000\u0000\u042c\u042e"+ - "\u0001\u0000\u0000\u0000\u042d\u041c\u0001\u0000\u0000\u0000\u042d\u0426"+ - "\u0001\u0000\u0000\u0000\u042e\u00e8\u0001\u0000\u0000\u0000\u042f\u0432"+ - "\u0003\u00e7l\u0000\u0430\u0432\u0003\u00abN\u0000\u0431\u042f\u0001\u0000"+ - "\u0000\u0000\u0431\u0430\u0001\u0000\u0000\u0000\u0432\u0433\u0001\u0000"+ - "\u0000\u0000\u0433\u0431\u0001\u0000\u0000\u0000\u0433\u0434\u0001\u0000"+ - "\u0000\u0000\u0434\u00ea\u0001\u0000\u0000\u0000\u0435\u0436\u00037\u0014"+ - "\u0000\u0436\u0437\u0001\u0000\u0000\u0000\u0437\u0438\u0006n\n\u0000"+ - "\u0438\u00ec\u0001\u0000\u0000\u0000\u0439\u043a\u00039\u0015\u0000\u043a"+ - "\u043b\u0001\u0000\u0000\u0000\u043b\u043c\u0006o\n\u0000\u043c\u00ee"+ - "\u0001\u0000\u0000\u0000\u043d\u043e\u0003;\u0016\u0000\u043e\u043f\u0001"+ - "\u0000\u0000\u0000\u043f\u0440\u0006p\n\u0000\u0440\u00f0\u0001\u0000"+ - "\u0000\u0000\u0441\u0442\u0003=\u0017\u0000\u0442\u0443\u0001\u0000\u0000"+ - "\u0000\u0443\u0444\u0006q\u000f\u0000\u0444\u0445\u0006q\u000b\u0000\u0445"+ - "\u00f2\u0001\u0000\u0000\u0000\u0446\u0447\u0003_(\u0000\u0447\u0448\u0001"+ - "\u0000\u0000\u0000\u0448\u0449\u0006r\u0013\u0000\u0449\u00f4\u0001\u0000"+ - "\u0000\u0000\u044a\u044b\u0003c*\u0000\u044b\u044c\u0001\u0000\u0000\u0000"+ - "\u044c\u044d\u0006s\u0012\u0000\u044d\u00f6\u0001\u0000\u0000\u0000\u044e"+ - "\u044f\u0003g,\u0000\u044f\u0450\u0001\u0000\u0000\u0000\u0450\u0451\u0006"+ - "t\u0016\u0000\u0451\u00f8\u0001\u0000\u0000\u0000\u0452\u0453\u0003\u007f"+ - "8\u0000\u0453\u0454\u0001\u0000\u0000\u0000\u0454\u0455\u0006u\u0017\u0000"+ - "\u0455\u00fa\u0001\u0000\u0000\u0000\u0456\u0457\u0003\u00a3J\u0000\u0457"+ - "\u0458\u0001\u0000\u0000\u0000\u0458\u0459\u0006v\u0018\u0000\u0459\u00fc"+ - "\u0001\u0000\u0000\u0000\u045a\u045b\u0007\f\u0000\u0000\u045b\u045c\u0007"+ - "\u0002\u0000\u0000\u045c\u00fe\u0001\u0000\u0000\u0000\u045d\u045e\u0003"+ - "\u00e9m\u0000\u045e\u045f\u0001\u0000\u0000\u0000\u045f\u0460\u0006x\u0019"+ - "\u0000\u0460\u0100\u0001\u0000\u0000\u0000\u0461\u0462\u00037\u0014\u0000"+ - "\u0462\u0463\u0001\u0000\u0000\u0000\u0463\u0464\u0006y\n\u0000\u0464"+ - "\u0102\u0001\u0000\u0000\u0000\u0465\u0466\u00039\u0015\u0000\u0466\u0467"+ - "\u0001\u0000\u0000\u0000\u0467\u0468\u0006z\n\u0000\u0468\u0104\u0001"+ - "\u0000\u0000\u0000\u0469\u046a\u0003;\u0016\u0000\u046a\u046b\u0001\u0000"+ - "\u0000\u0000\u046b\u046c\u0006{\n\u0000\u046c\u0106\u0001\u0000\u0000"+ - "\u0000\u046d\u046e\u0003=\u0017\u0000\u046e\u046f\u0001\u0000\u0000\u0000"+ - "\u046f\u0470\u0006|\u000f\u0000\u0470\u0471\u0006|\u000b\u0000\u0471\u0108"+ - "\u0001\u0000\u0000\u0000\u0472\u0473\u0003\u00a5K\u0000\u0473\u0474\u0001"+ - "\u0000\u0000\u0000\u0474\u0475\u0006}\r\u0000\u0475\u0476\u0006}\u001a"+ - "\u0000\u0476\u010a\u0001\u0000\u0000\u0000\u0477\u0478\u0007\u0007\u0000"+ - "\u0000\u0478\u0479\u0007\t\u0000\u0000\u0479\u047a\u0001\u0000\u0000\u0000"+ - "\u047a\u047b\u0006~\u001b\u0000\u047b\u010c\u0001\u0000\u0000\u0000\u047c"+ - "\u047d\u0007\u0013\u0000\u0000\u047d\u047e\u0007\u0001\u0000\u0000\u047e"+ - "\u047f\u0007\u0005\u0000\u0000\u047f\u0480\u0007\n\u0000\u0000\u0480\u0481"+ - "\u0001\u0000\u0000\u0000\u0481\u0482\u0006\u007f\u001b\u0000\u0482\u010e"+ - "\u0001\u0000\u0000\u0000\u0483\u0484\b\"\u0000\u0000\u0484\u0110\u0001"+ - "\u0000\u0000\u0000\u0485\u0487\u0003\u010f\u0080\u0000\u0486\u0485\u0001"+ - "\u0000\u0000\u0000\u0487\u0488\u0001\u0000\u0000\u0000\u0488\u0486\u0001"+ - "\u0000\u0000\u0000\u0488\u0489\u0001\u0000\u0000\u0000\u0489\u048a\u0001"+ - "\u0000\u0000\u0000\u048a\u048b\u0003\u0151\u00a1\u0000\u048b\u048d\u0001"+ - "\u0000\u0000\u0000\u048c\u0486\u0001\u0000\u0000\u0000\u048c\u048d\u0001"+ - "\u0000\u0000\u0000\u048d\u048f\u0001\u0000\u0000\u0000\u048e\u0490\u0003"+ - "\u010f\u0080\u0000\u048f\u048e\u0001\u0000\u0000\u0000\u0490\u0491\u0001"+ - "\u0000\u0000\u0000\u0491\u048f\u0001\u0000\u0000\u0000\u0491\u0492\u0001"+ - "\u0000\u0000\u0000\u0492\u0112\u0001\u0000\u0000\u0000\u0493\u0494\u0003"+ - "\u0111\u0081\u0000\u0494\u0495\u0001\u0000\u0000\u0000\u0495\u0496\u0006"+ - "\u0082\u001c\u0000\u0496\u0114\u0001\u0000\u0000\u0000\u0497\u0498\u0003"+ - "7\u0014\u0000\u0498\u0499\u0001\u0000\u0000\u0000\u0499\u049a\u0006\u0083"+ - "\n\u0000\u049a\u0116\u0001\u0000\u0000\u0000\u049b\u049c\u00039\u0015"+ - "\u0000\u049c\u049d\u0001\u0000\u0000\u0000\u049d\u049e\u0006\u0084\n\u0000"+ - "\u049e\u0118\u0001\u0000\u0000\u0000\u049f\u04a0\u0003;\u0016\u0000\u04a0"+ - "\u04a1\u0001\u0000\u0000\u0000\u04a1\u04a2\u0006\u0085\n\u0000\u04a2\u011a"+ - "\u0001\u0000\u0000\u0000\u04a3\u04a4\u0003=\u0017\u0000\u04a4\u04a5\u0001"+ - "\u0000\u0000\u0000\u04a5\u04a6\u0006\u0086\u000f\u0000\u04a6\u04a7\u0006"+ - "\u0086\u000b\u0000\u04a7\u04a8\u0006\u0086\u000b\u0000\u04a8\u011c\u0001"+ - "\u0000\u0000\u0000\u04a9\u04aa\u0003_(\u0000\u04aa\u04ab\u0001\u0000\u0000"+ - "\u0000\u04ab\u04ac\u0006\u0087\u0013\u0000\u04ac\u011e\u0001\u0000\u0000"+ - "\u0000\u04ad\u04ae\u0003c*\u0000\u04ae\u04af\u0001\u0000\u0000\u0000\u04af"+ - "\u04b0\u0006\u0088\u0012\u0000\u04b0\u0120\u0001\u0000\u0000\u0000\u04b1"+ - "\u04b2\u0003g,\u0000\u04b2\u04b3\u0001\u0000\u0000\u0000\u04b3\u04b4\u0006"+ - "\u0089\u0016\u0000\u04b4\u0122\u0001\u0000\u0000\u0000\u04b5\u04b6\u0003"+ - "\u010d\u007f\u0000\u04b6\u04b7\u0001\u0000\u0000\u0000\u04b7\u04b8\u0006"+ - "\u008a\u001d\u0000\u04b8\u0124\u0001\u0000\u0000\u0000\u04b9\u04ba\u0003"+ - "\u00e9m\u0000\u04ba\u04bb\u0001\u0000\u0000\u0000\u04bb\u04bc\u0006\u008b"+ - "\u0019\u0000\u04bc\u0126\u0001\u0000\u0000\u0000\u04bd\u04be\u0003\u00ad"+ - "O\u0000\u04be\u04bf\u0001\u0000\u0000\u0000\u04bf\u04c0\u0006\u008c\u001e"+ - "\u0000\u04c0\u0128\u0001\u0000\u0000\u0000\u04c1\u04c2\u0003\u007f8\u0000"+ - "\u04c2\u04c3\u0001\u0000\u0000\u0000\u04c3\u04c4\u0006\u008d\u0017\u0000"+ - "\u04c4\u012a\u0001\u0000\u0000\u0000\u04c5\u04c6\u0003\u00a3J\u0000\u04c6"+ - "\u04c7\u0001\u0000\u0000\u0000\u04c7\u04c8\u0006\u008e\u0018\u0000\u04c8"+ - "\u012c\u0001\u0000\u0000\u0000\u04c9\u04ca\u00037\u0014\u0000\u04ca\u04cb"+ - "\u0001\u0000\u0000\u0000\u04cb\u04cc\u0006\u008f\n\u0000\u04cc\u012e\u0001"+ - "\u0000\u0000\u0000\u04cd\u04ce\u00039\u0015\u0000\u04ce\u04cf\u0001\u0000"+ - "\u0000\u0000\u04cf\u04d0\u0006\u0090\n\u0000\u04d0\u0130\u0001\u0000\u0000"+ - "\u0000\u04d1\u04d2\u0003;\u0016\u0000\u04d2\u04d3\u0001\u0000\u0000\u0000"+ - "\u04d3\u04d4\u0006\u0091\n\u0000\u04d4\u0132\u0001\u0000\u0000\u0000\u04d5"+ - "\u04d6\u0003=\u0017\u0000\u04d6\u04d7\u0001\u0000\u0000\u0000\u04d7\u04d8"+ - "\u0006\u0092\u000f\u0000\u04d8\u04d9\u0006\u0092\u000b\u0000\u04d9\u0134"+ - "\u0001\u0000\u0000\u0000\u04da\u04db\u0003g,\u0000\u04db\u04dc\u0001\u0000"+ - "\u0000\u0000\u04dc\u04dd\u0006\u0093\u0016\u0000\u04dd\u0136\u0001\u0000"+ - "\u0000\u0000\u04de\u04df\u0003\u007f8\u0000\u04df\u04e0\u0001\u0000\u0000"+ - "\u0000\u04e0\u04e1\u0006\u0094\u0017\u0000\u04e1\u0138\u0001\u0000\u0000"+ - "\u0000\u04e2\u04e3\u0003\u00a3J\u0000\u04e3\u04e4\u0001\u0000\u0000\u0000"+ - "\u04e4\u04e5\u0006\u0095\u0018\u0000\u04e5\u013a\u0001\u0000\u0000\u0000"+ - "\u04e6\u04e7\u0003\u00adO\u0000\u04e7\u04e8\u0001\u0000\u0000\u0000\u04e8"+ - "\u04e9\u0006\u0096\u001e\u0000\u04e9\u013c\u0001\u0000\u0000\u0000\u04ea"+ - "\u04eb\u0003\u00a9M\u0000\u04eb\u04ec\u0001\u0000\u0000\u0000\u04ec\u04ed"+ - "\u0006\u0097\u001f\u0000\u04ed\u013e\u0001\u0000\u0000\u0000\u04ee\u04ef"+ - "\u00037\u0014\u0000\u04ef\u04f0\u0001\u0000\u0000\u0000\u04f0\u04f1\u0006"+ - "\u0098\n\u0000\u04f1\u0140\u0001\u0000\u0000\u0000\u04f2\u04f3\u00039"+ - "\u0015\u0000\u04f3\u04f4\u0001\u0000\u0000\u0000\u04f4\u04f5\u0006\u0099"+ - "\n\u0000\u04f5\u0142\u0001\u0000\u0000\u0000\u04f6\u04f7\u0003;\u0016"+ - "\u0000\u04f7\u04f8\u0001\u0000\u0000\u0000\u04f8\u04f9\u0006\u009a\n\u0000"+ - "\u04f9\u0144\u0001\u0000\u0000\u0000\u04fa\u04fb\u0003=\u0017\u0000\u04fb"+ - "\u04fc\u0001\u0000\u0000\u0000\u04fc\u04fd\u0006\u009b\u000f\u0000\u04fd"+ - "\u04fe\u0006\u009b\u000b\u0000\u04fe\u0146\u0001\u0000\u0000\u0000\u04ff"+ - "\u0500\u0007\u0001\u0000\u0000\u0500\u0501\u0007\t\u0000\u0000\u0501\u0502"+ - "\u0007\u000f\u0000\u0000\u0502\u0503\u0007\u0007\u0000\u0000\u0503\u0148"+ - "\u0001\u0000\u0000\u0000\u0504\u0505\u00037\u0014\u0000\u0505\u0506\u0001"+ - "\u0000\u0000\u0000\u0506\u0507\u0006\u009d\n\u0000\u0507\u014a\u0001\u0000"+ - "\u0000\u0000\u0508\u0509\u00039\u0015\u0000\u0509\u050a\u0001\u0000\u0000"+ - "\u0000\u050a\u050b\u0006\u009e\n\u0000\u050b\u014c\u0001\u0000\u0000\u0000"+ - "\u050c\u050d\u0003;\u0016\u0000\u050d\u050e\u0001\u0000\u0000\u0000\u050e"+ - "\u050f\u0006\u009f\n\u0000\u050f\u014e\u0001\u0000\u0000\u0000\u0510\u0511"+ - "\u0003\u00a7L\u0000\u0511\u0512\u0001\u0000\u0000\u0000\u0512\u0513\u0006"+ - "\u00a0\u0010\u0000\u0513\u0514\u0006\u00a0\u000b\u0000\u0514\u0150\u0001"+ - "\u0000\u0000\u0000\u0515\u0516\u0005:\u0000\u0000\u0516\u0152\u0001\u0000"+ - "\u0000\u0000\u0517\u051d\u0003I\u001d\u0000\u0518\u051d\u0003?\u0018\u0000"+ - "\u0519\u051d\u0003g,\u0000\u051a\u051d\u0003A\u0019\u0000\u051b\u051d"+ - "\u0003O \u0000\u051c\u0517\u0001\u0000\u0000\u0000\u051c\u0518\u0001\u0000"+ - "\u0000\u0000\u051c\u0519\u0001\u0000\u0000\u0000\u051c\u051a\u0001\u0000"+ - "\u0000\u0000\u051c\u051b\u0001\u0000\u0000\u0000\u051d\u051e\u0001\u0000"+ - "\u0000\u0000\u051e\u051c\u0001\u0000\u0000\u0000\u051e\u051f\u0001\u0000"+ - "\u0000\u0000\u051f\u0154\u0001\u0000\u0000\u0000\u0520\u0521\u00037\u0014"+ - "\u0000\u0521\u0522\u0001\u0000\u0000\u0000\u0522\u0523\u0006\u00a3\n\u0000"+ - "\u0523\u0156\u0001\u0000\u0000\u0000\u0524\u0525\u00039\u0015\u0000\u0525"+ - "\u0526\u0001\u0000\u0000\u0000\u0526\u0527\u0006\u00a4\n\u0000\u0527\u0158"+ - "\u0001\u0000\u0000\u0000\u0528\u0529\u0003;\u0016\u0000\u0529\u052a\u0001"+ - "\u0000\u0000\u0000\u052a\u052b\u0006\u00a5\n\u0000\u052b\u015a\u0001\u0000"+ - "\u0000\u0000\u052c\u052d\u0003=\u0017\u0000\u052d\u052e\u0001\u0000\u0000"+ - "\u0000\u052e\u052f\u0006\u00a6\u000f\u0000\u052f\u0530\u0006\u00a6\u000b"+ - "\u0000\u0530\u015c\u0001\u0000\u0000\u0000\u0531\u0532\u0003\u0151\u00a1"+ - "\u0000\u0532\u0533\u0001\u0000\u0000\u0000\u0533\u0534\u0006\u00a7\u0011"+ - "\u0000\u0534\u015e\u0001\u0000\u0000\u0000\u0535\u0536\u0003c*\u0000\u0536"+ - "\u0537\u0001\u0000\u0000\u0000\u0537\u0538\u0006\u00a8\u0012\u0000\u0538"+ - "\u0160\u0001\u0000\u0000\u0000\u0539\u053a\u0003g,\u0000\u053a\u053b\u0001"+ - "\u0000\u0000\u0000\u053b\u053c\u0006\u00a9\u0016\u0000\u053c\u0162\u0001"+ - "\u0000\u0000\u0000\u053d\u053e\u0003\u010b~\u0000\u053e\u053f\u0001\u0000"+ - "\u0000\u0000\u053f\u0540\u0006\u00aa \u0000\u0540\u0541\u0006\u00aa!\u0000"+ - "\u0541\u0164\u0001\u0000\u0000\u0000\u0542\u0543\u0003\u00cf`\u0000\u0543"+ - "\u0544\u0001\u0000\u0000\u0000\u0544\u0545\u0006\u00ab\u0014\u0000\u0545"+ - "\u0166\u0001\u0000\u0000\u0000\u0546\u0547\u0003S\"\u0000\u0547\u0548"+ - "\u0001\u0000\u0000\u0000\u0548\u0549\u0006\u00ac\u0015\u0000\u0549\u0168"+ - "\u0001\u0000\u0000\u0000\u054a\u054b\u00037\u0014\u0000\u054b\u054c\u0001"+ - "\u0000\u0000\u0000\u054c\u054d\u0006\u00ad\n\u0000\u054d\u016a\u0001\u0000"+ - "\u0000\u0000\u054e\u054f\u00039\u0015\u0000\u054f\u0550\u0001\u0000\u0000"+ - "\u0000\u0550\u0551\u0006\u00ae\n\u0000\u0551\u016c\u0001\u0000\u0000\u0000"+ - "\u0552\u0553\u0003;\u0016\u0000\u0553\u0554\u0001\u0000\u0000\u0000\u0554"+ - "\u0555\u0006\u00af\n\u0000\u0555\u016e\u0001\u0000\u0000\u0000\u0556\u0557"+ - "\u0003=\u0017\u0000\u0557\u0558\u0001\u0000\u0000\u0000\u0558\u0559\u0006"+ - "\u00b0\u000f\u0000\u0559\u055a\u0006\u00b0\u000b\u0000\u055a\u055b\u0006"+ - "\u00b0\u000b\u0000\u055b\u0170\u0001\u0000\u0000\u0000\u055c\u055d\u0003"+ - "c*\u0000\u055d\u055e\u0001\u0000\u0000\u0000\u055e\u055f\u0006\u00b1\u0012"+ - "\u0000\u055f\u0172\u0001\u0000\u0000\u0000\u0560\u0561\u0003g,\u0000\u0561"+ - "\u0562\u0001\u0000\u0000\u0000\u0562\u0563\u0006\u00b2\u0016\u0000\u0563"+ - "\u0174\u0001\u0000\u0000\u0000\u0564\u0565\u0003\u00e9m\u0000\u0565\u0566"+ - "\u0001\u0000\u0000\u0000\u0566\u0567\u0006\u00b3\u0019\u0000\u0567\u0176"+ - "\u0001\u0000\u0000\u0000\u0568\u0569\u00037\u0014\u0000\u0569\u056a\u0001"+ - "\u0000\u0000\u0000\u056a\u056b\u0006\u00b4\n\u0000\u056b\u0178\u0001\u0000"+ - "\u0000\u0000\u056c\u056d\u00039\u0015\u0000\u056d\u056e\u0001\u0000\u0000"+ - "\u0000\u056e\u056f\u0006\u00b5\n\u0000\u056f\u017a\u0001\u0000\u0000\u0000"+ - "\u0570\u0571\u0003;\u0016\u0000\u0571\u0572\u0001\u0000\u0000\u0000\u0572"+ - "\u0573\u0006\u00b6\n\u0000\u0573\u017c\u0001\u0000\u0000\u0000\u0574\u0575"+ - "\u0003=\u0017\u0000\u0575\u0576\u0001\u0000\u0000\u0000\u0576\u0577\u0006"+ - "\u00b7\u000f\u0000\u0577\u0578\u0006\u00b7\u000b\u0000\u0578\u017e\u0001"+ - "\u0000\u0000\u0000\u0579\u057a\u0003\u00cf`\u0000\u057a\u057b\u0001\u0000"+ - "\u0000\u0000\u057b\u057c\u0006\u00b8\u0014\u0000\u057c\u057d\u0006\u00b8"+ - "\u000b\u0000\u057d\u057e\u0006\u00b8\"\u0000\u057e\u0180\u0001\u0000\u0000"+ - "\u0000\u057f\u0580\u0003S\"\u0000\u0580\u0581\u0001\u0000\u0000\u0000"+ - "\u0581\u0582\u0006\u00b9\u0015\u0000\u0582\u0583\u0006\u00b9\u000b\u0000"+ - "\u0583\u0584\u0006\u00b9\"\u0000\u0584\u0182\u0001\u0000\u0000\u0000\u0585"+ - "\u0586\u00037\u0014\u0000\u0586\u0587\u0001\u0000\u0000\u0000\u0587\u0588"+ - "\u0006\u00ba\n\u0000\u0588\u0184\u0001\u0000\u0000\u0000\u0589\u058a\u0003"+ - "9\u0015\u0000\u058a\u058b\u0001\u0000\u0000\u0000\u058b\u058c\u0006\u00bb"+ - "\n\u0000\u058c\u0186\u0001\u0000\u0000\u0000\u058d\u058e\u0003;\u0016"+ - "\u0000\u058e\u058f\u0001\u0000\u0000\u0000\u058f\u0590\u0006\u00bc\n\u0000"+ - "\u0590\u0188\u0001\u0000\u0000\u0000\u0591\u0592\u0003\u0151\u00a1\u0000"+ - "\u0592\u0593\u0001\u0000\u0000\u0000\u0593\u0594\u0006\u00bd\u0011\u0000"+ - "\u0594\u0595\u0006\u00bd\u000b\u0000\u0595\u0596\u0006\u00bd\t\u0000\u0596"+ - "\u018a\u0001\u0000\u0000\u0000\u0597\u0598\u0003c*\u0000\u0598\u0599\u0001"+ - "\u0000\u0000\u0000\u0599\u059a\u0006\u00be\u0012\u0000\u059a\u059b\u0006"+ - "\u00be\u000b\u0000\u059b\u059c\u0006\u00be\t\u0000\u059c\u018c\u0001\u0000"+ - "\u0000\u0000\u059d\u059e\u00037\u0014\u0000\u059e\u059f\u0001\u0000\u0000"+ - "\u0000\u059f\u05a0\u0006\u00bf\n\u0000\u05a0\u018e\u0001\u0000\u0000\u0000"+ - "\u05a1\u05a2\u00039\u0015\u0000\u05a2\u05a3\u0001\u0000\u0000\u0000\u05a3"+ - "\u05a4\u0006\u00c0\n\u0000\u05a4\u0190\u0001\u0000\u0000\u0000\u05a5\u05a6"+ - "\u0003;\u0016\u0000\u05a6\u05a7\u0001\u0000\u0000\u0000\u05a7\u05a8\u0006"+ - "\u00c1\n\u0000\u05a8\u0192\u0001\u0000\u0000\u0000\u05a9\u05aa\u0003\u00ad"+ - "O\u0000\u05aa\u05ab\u0001\u0000\u0000\u0000\u05ab\u05ac\u0006\u00c2\u000b"+ - "\u0000\u05ac\u05ad\u0006\u00c2\u0000\u0000\u05ad\u05ae\u0006\u00c2\u001e"+ - "\u0000\u05ae\u0194\u0001\u0000\u0000\u0000\u05af\u05b0\u0003\u00a9M\u0000"+ - "\u05b0\u05b1\u0001\u0000\u0000\u0000\u05b1\u05b2\u0006\u00c3\u000b\u0000"+ - "\u05b2\u05b3\u0006\u00c3\u0000\u0000\u05b3\u05b4\u0006\u00c3\u001f\u0000"+ - "\u05b4\u0196\u0001\u0000\u0000\u0000\u05b5\u05b6\u0003Y%\u0000\u05b6\u05b7"+ - "\u0001\u0000\u0000\u0000\u05b7\u05b8\u0006\u00c4\u000b\u0000\u05b8\u05b9"+ - "\u0006\u00c4\u0000\u0000\u05b9\u05ba\u0006\u00c4#\u0000\u05ba\u0198\u0001"+ - "\u0000\u0000\u0000\u05bb\u05bc\u0003=\u0017\u0000\u05bc\u05bd\u0001\u0000"+ - "\u0000\u0000\u05bd\u05be\u0006\u00c5\u000f\u0000\u05be\u05bf\u0006\u00c5"+ - "\u000b\u0000\u05bf\u019a\u0001\u0000\u0000\u0000A\u0000\u0001\u0002\u0003"+ - "\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e\u0243\u024d\u0251\u0254"+ - "\u025d\u025f\u026a\u027d\u0282\u028b\u0292\u0297\u0299\u02a4\u02ac\u02af"+ - "\u02b1\u02b6\u02bb\u02c1\u02c8\u02cd\u02d3\u02d6\u02de\u02e2\u0367\u036c"+ - "\u0373\u0375\u0385\u038a\u038f\u0391\u0397\u03e4\u03e9\u0418\u041c\u0421"+ - "\u0426\u042b\u042d\u0431\u0433\u0488\u048c\u0491\u051c\u051e$\u0005\u0001"+ - "\u0000\u0005\u0004\u0000\u0005\u0006\u0000\u0005\u0002\u0000\u0005\u0003"+ - "\u0000\u0005\b\u0000\u0005\u0005\u0000\u0005\t\u0000\u0005\u000b\u0000"+ - "\u0005\r\u0000\u0000\u0001\u0000\u0004\u0000\u0000\u0007\u0010\u0000\u0007"+ - "A\u0000\u0005\u0000\u0000\u0007\u0018\u0000\u0007B\u0000\u0007h\u0000"+ - "\u0007!\u0000\u0007\u001f\u0000\u0007L\u0000\u0007\u0019\u0000\u0007#"+ - "\u0000\u0007/\u0000\u0007@\u0000\u0007P\u0000\u0005\n\u0000\u0005\u0007"+ - "\u0000\u0007Z\u0000\u0007Y\u0000\u0007D\u0000\u0007C\u0000\u0007X\u0000"+ - "\u0005\f\u0000\u0005\u000e\u0000\u0007\u001c\u0000"; + "H\u0001H\u0001H\u0001H\u0001I\u0001I\u0001I\u0001I\u0001J\u0001J\u0001"+ + "J\u0003J\u0367\bJ\u0001J\u0005J\u036a\bJ\nJ\fJ\u036d\tJ\u0001J\u0001J"+ + "\u0004J\u0371\bJ\u000bJ\fJ\u0372\u0003J\u0375\bJ\u0001K\u0001K\u0001K"+ + "\u0001K\u0001K\u0001L\u0001L\u0001L\u0001L\u0001L\u0001M\u0001M\u0005"+ + "M\u0383\bM\nM\fM\u0386\tM\u0001M\u0001M\u0003M\u038a\bM\u0001M\u0004M"+ + "\u038d\bM\u000bM\fM\u038e\u0003M\u0391\bM\u0001N\u0001N\u0004N\u0395\b"+ + "N\u000bN\fN\u0396\u0001N\u0001N\u0001O\u0001O\u0001P\u0001P\u0001P\u0001"+ + "P\u0001Q\u0001Q\u0001Q\u0001Q\u0001R\u0001R\u0001R\u0001R\u0001S\u0001"+ + "S\u0001S\u0001S\u0001S\u0001T\u0001T\u0001T\u0001T\u0001T\u0001U\u0001"+ + "U\u0001U\u0001U\u0001V\u0001V\u0001V\u0001V\u0001W\u0001W\u0001W\u0001"+ + "W\u0001X\u0001X\u0001X\u0001X\u0001X\u0001Y\u0001Y\u0001Y\u0001Y\u0001"+ + "Z\u0001Z\u0001Z\u0001Z\u0001[\u0001[\u0001[\u0001[\u0001\\\u0001\\\u0001"+ + "\\\u0001\\\u0001]\u0001]\u0001]\u0001]\u0001^\u0001^\u0001^\u0001^\u0001"+ + "^\u0001^\u0001^\u0001^\u0001^\u0001_\u0001_\u0001_\u0003_\u03e4\b_\u0001"+ + "`\u0004`\u03e7\b`\u000b`\f`\u03e8\u0001a\u0001a\u0001a\u0001a\u0001b\u0001"+ + "b\u0001b\u0001b\u0001c\u0001c\u0001c\u0001c\u0001d\u0001d\u0001d\u0001"+ + "d\u0001e\u0001e\u0001e\u0001e\u0001f\u0001f\u0001f\u0001f\u0001f\u0001"+ + "g\u0001g\u0001g\u0001g\u0001h\u0001h\u0001h\u0001h\u0001i\u0001i\u0001"+ + "i\u0001i\u0001j\u0001j\u0001j\u0001j\u0001k\u0001k\u0001k\u0001k\u0003"+ + "k\u0418\bk\u0001l\u0001l\u0003l\u041c\bl\u0001l\u0005l\u041f\bl\nl\fl"+ + "\u0422\tl\u0001l\u0001l\u0003l\u0426\bl\u0001l\u0004l\u0429\bl\u000bl"+ + "\fl\u042a\u0003l\u042d\bl\u0001m\u0001m\u0004m\u0431\bm\u000bm\fm\u0432"+ + "\u0001n\u0001n\u0001n\u0001n\u0001o\u0001o\u0001o\u0001o\u0001p\u0001"+ + "p\u0001p\u0001p\u0001q\u0001q\u0001q\u0001q\u0001q\u0001r\u0001r\u0001"+ + "r\u0001r\u0001s\u0001s\u0001s\u0001s\u0001t\u0001t\u0001t\u0001t\u0001"+ + "u\u0001u\u0001u\u0001u\u0001v\u0001v\u0001v\u0001v\u0001w\u0001w\u0001"+ + "w\u0001x\u0001x\u0001x\u0001x\u0001y\u0001y\u0001y\u0001y\u0001z\u0001"+ + "z\u0001z\u0001z\u0001{\u0001{\u0001{\u0001{\u0001|\u0001|\u0001|\u0001"+ + "|\u0001|\u0001}\u0001}\u0001}\u0001}\u0001}\u0001~\u0001~\u0001~\u0001"+ + "~\u0001~\u0001\u007f\u0001\u007f\u0001\u007f\u0001\u007f\u0001\u007f\u0001"+ + "\u007f\u0001\u007f\u0001\u0080\u0001\u0080\u0001\u0081\u0004\u0081\u0486"+ + "\b\u0081\u000b\u0081\f\u0081\u0487\u0001\u0081\u0001\u0081\u0003\u0081"+ + "\u048c\b\u0081\u0001\u0081\u0004\u0081\u048f\b\u0081\u000b\u0081\f\u0081"+ + "\u0490\u0001\u0082\u0001\u0082\u0001\u0082\u0001\u0082\u0001\u0083\u0001"+ + "\u0083\u0001\u0083\u0001\u0083\u0001\u0084\u0001\u0084\u0001\u0084\u0001"+ + "\u0084\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0086\u0001"+ + "\u0086\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0087\u0001"+ + "\u0087\u0001\u0087\u0001\u0087\u0001\u0088\u0001\u0088\u0001\u0088\u0001"+ + "\u0088\u0001\u0089\u0001\u0089\u0001\u0089\u0001\u0089\u0001\u008a\u0001"+ + "\u008a\u0001\u008a\u0001\u008a\u0001\u008b\u0001\u008b\u0001\u008b\u0001"+ + "\u008b\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008d\u0001"+ + "\u008d\u0001\u008d\u0001\u008d\u0001\u008e\u0001\u008e\u0001\u008e\u0001"+ + "\u008e\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u0090\u0001"+ + "\u0090\u0001\u0090\u0001\u0090\u0001\u0091\u0001\u0091\u0001\u0091\u0001"+ + "\u0091\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0092\u0001"+ + "\u0093\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0094\u0001\u0094\u0001"+ + "\u0094\u0001\u0094\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0095\u0001"+ + "\u0096\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0097\u0001\u0097\u0001"+ + "\u0097\u0001\u0097\u0001\u0098\u0001\u0098\u0001\u0098\u0001\u0098\u0001"+ + "\u0099\u0001\u0099\u0001\u0099\u0001\u0099\u0001\u009a\u0001\u009a\u0001"+ + "\u009a\u0001\u009a\u0001\u009b\u0001\u009b\u0001\u009b\u0001\u009b\u0001"+ + "\u009b\u0001\u009c\u0001\u009c\u0001\u009c\u0001\u009c\u0001\u009c\u0001"+ + "\u009d\u0001\u009d\u0001\u009d\u0001\u009d\u0001\u009e\u0001\u009e\u0001"+ + "\u009e\u0001\u009e\u0001\u009f\u0001\u009f\u0001\u009f\u0001\u009f\u0001"+ + "\u00a0\u0001\u00a0\u0001\u00a0\u0001\u00a0\u0001\u00a0\u0001\u00a1\u0001"+ + "\u00a1\u0001\u00a2\u0001\u00a2\u0001\u00a2\u0001\u00a2\u0001\u00a2\u0004"+ + "\u00a2\u051c\b\u00a2\u000b\u00a2\f\u00a2\u051d\u0001\u00a3\u0001\u00a3"+ + "\u0001\u00a3\u0001\u00a3\u0001\u00a4\u0001\u00a4\u0001\u00a4\u0001\u00a4"+ + "\u0001\u00a5\u0001\u00a5\u0001\u00a5\u0001\u00a5\u0001\u00a6\u0001\u00a6"+ + "\u0001\u00a6\u0001\u00a6\u0001\u00a6\u0001\u00a7\u0001\u00a7\u0001\u00a7"+ + "\u0001\u00a7\u0001\u00a8\u0001\u00a8\u0001\u00a8\u0001\u00a8\u0001\u00a9"+ + "\u0001\u00a9\u0001\u00a9\u0001\u00a9\u0001\u00aa\u0001\u00aa\u0001\u00aa"+ + "\u0001\u00aa\u0001\u00aa\u0001\u00ab\u0001\u00ab\u0001\u00ab\u0001\u00ab"+ + "\u0001\u00ac\u0001\u00ac\u0001\u00ac\u0001\u00ac\u0001\u00ad\u0001\u00ad"+ + "\u0001\u00ad\u0001\u00ad\u0001\u00ae\u0001\u00ae\u0001\u00ae\u0001\u00ae"+ + "\u0001\u00af\u0001\u00af\u0001\u00af\u0001\u00af\u0001\u00b0\u0001\u00b0"+ + "\u0001\u00b0\u0001\u00b0\u0001\u00b0\u0001\u00b0\u0001\u00b1\u0001\u00b1"+ + "\u0001\u00b1\u0001\u00b1\u0001\u00b2\u0001\u00b2\u0001\u00b2\u0001\u00b2"+ + "\u0001\u00b3\u0001\u00b3\u0001\u00b3\u0001\u00b3\u0001\u00b4\u0001\u00b4"+ + "\u0001\u00b4\u0001\u00b4\u0001\u00b5\u0001\u00b5\u0001\u00b5\u0001\u00b5"+ + "\u0001\u00b6\u0001\u00b6\u0001\u00b6\u0001\u00b6\u0001\u00b7\u0001\u00b7"+ + "\u0001\u00b7\u0001\u00b7\u0001\u00b7\u0001\u00b8\u0001\u00b8\u0001\u00b8"+ + "\u0001\u00b8\u0001\u00b8\u0001\u00b8\u0001\u00b9\u0001\u00b9\u0001\u00b9"+ + "\u0001\u00b9\u0001\u00b9\u0001\u00b9\u0001\u00ba\u0001\u00ba\u0001\u00ba"+ + "\u0001\u00ba\u0001\u00bb\u0001\u00bb\u0001\u00bb\u0001\u00bb\u0001\u00bc"+ + "\u0001\u00bc\u0001\u00bc\u0001\u00bc\u0001\u00bd\u0001\u00bd\u0001\u00bd"+ + "\u0001\u00bd\u0001\u00bd\u0001\u00bd\u0001\u00be\u0001\u00be\u0001\u00be"+ + "\u0001\u00be\u0001\u00be\u0001\u00be\u0001\u00bf\u0001\u00bf\u0001\u00bf"+ + "\u0001\u00bf\u0001\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c1"+ + "\u0001\u00c1\u0001\u00c1\u0001\u00c1\u0001\u00c2\u0001\u00c2\u0001\u00c2"+ + "\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001\u00c3\u0001\u00c3\u0001\u00c3"+ + "\u0001\u00c3\u0001\u00c3\u0001\u00c3\u0001\u00c4\u0001\u00c4\u0001\u00c4"+ + "\u0001\u00c4\u0001\u00c4\u0001\u00c4\u0001\u00c5\u0001\u00c5\u0001\u00c5"+ + "\u0001\u00c5\u0001\u00c5\u0002\u025f\u02a4\u0000\u00c6\u000f\u0001\u0011"+ + "\u0002\u0013\u0003\u0015\u0004\u0017\u0005\u0019\u0006\u001b\u0007\u001d"+ + "\b\u001f\t!\n#\u000b%\f\'\r)\u000e+\u000f-\u0010/\u00111\u00123\u0013"+ + "5\u00147\u00159\u0016;\u0017=\u0018?\u0000A\u0000C\u0000E\u0000G\u0000"+ + "I\u0000K\u0000M\u0000O\u0000Q\u0000S\u0019U\u001aW\u001bY\u001c[\u001d"+ + "]\u001e_\u001fa c!e\"g#i$k%m&o\'q(s)u*w+y,{-}.\u007f/\u00810\u00831\u0085"+ + "2\u00873\u00894\u008b5\u008d6\u008f7\u00918\u00939\u0095:\u0097;\u0099"+ + "<\u009b=\u009d>\u009f?\u00a1\u0000\u00a3@\u00a5A\u00a7B\u00a9C\u00ab\u0000"+ + "\u00adD\u00afE\u00b1F\u00b3G\u00b5\u0000\u00b7\u0000\u00b9H\u00bbI\u00bd"+ + "J\u00bf\u0000\u00c1\u0000\u00c3\u0000\u00c5\u0000\u00c7\u0000\u00c9\u0000"+ + "\u00cbK\u00cd\u0000\u00cfL\u00d1\u0000\u00d3\u0000\u00d5M\u00d7N\u00d9"+ + "O\u00db\u0000\u00dd\u0000\u00df\u0000\u00e1\u0000\u00e3\u0000\u00e5\u0000"+ + "\u00e7\u0000\u00e9P\u00ebQ\u00edR\u00efS\u00f1\u0000\u00f3\u0000\u00f5"+ + "\u0000\u00f7\u0000\u00f9\u0000\u00fb\u0000\u00fdT\u00ff\u0000\u0101U\u0103"+ + "V\u0105W\u0107\u0000\u0109\u0000\u010bX\u010dY\u010f\u0000\u0111Z\u0113"+ + "\u0000\u0115[\u0117\\\u0119]\u011b\u0000\u011d\u0000\u011f\u0000\u0121"+ + "\u0000\u0123\u0000\u0125\u0000\u0127\u0000\u0129\u0000\u012b\u0000\u012d"+ + "^\u012f_\u0131`\u0133\u0000\u0135\u0000\u0137\u0000\u0139\u0000\u013b"+ + "\u0000\u013d\u0000\u013fa\u0141b\u0143c\u0145\u0000\u0147d\u0149e\u014b"+ + "f\u014dg\u014f\u0000\u0151h\u0153i\u0155j\u0157k\u0159l\u015b\u0000\u015d"+ + "\u0000\u015f\u0000\u0161\u0000\u0163\u0000\u0165\u0000\u0167\u0000\u0169"+ + "m\u016bn\u016do\u016f\u0000\u0171\u0000\u0173\u0000\u0175\u0000\u0177"+ + "p\u0179q\u017br\u017d\u0000\u017f\u0000\u0181\u0000\u0183s\u0185t\u0187"+ + "u\u0189\u0000\u018b\u0000\u018dv\u018fw\u0191x\u0193\u0000\u0195\u0000"+ + "\u0197\u0000\u0199\u0000\u000f\u0000\u0001\u0002\u0003\u0004\u0005\u0006"+ + "\u0007\b\t\n\u000b\f\r\u000e#\u0002\u0000DDdd\u0002\u0000IIii\u0002\u0000"+ + "SSss\u0002\u0000EEee\u0002\u0000CCcc\u0002\u0000TTtt\u0002\u0000RRrr\u0002"+ + "\u0000OOoo\u0002\u0000PPpp\u0002\u0000NNnn\u0002\u0000HHhh\u0002\u0000"+ + "VVvv\u0002\u0000AAaa\u0002\u0000LLll\u0002\u0000XXxx\u0002\u0000FFff\u0002"+ + "\u0000MMmm\u0002\u0000GGgg\u0002\u0000KKkk\u0002\u0000WWww\u0002\u0000"+ + "UUuu\u0006\u0000\t\n\r\r //[[]]\u0002\u0000\n\n\r\r\u0003\u0000\t\n\r"+ + "\r \u0001\u000009\u0002\u0000AZaz\b\u0000\"\"NNRRTT\\\\nnrrtt\u0004\u0000"+ + "\n\n\r\r\"\"\\\\\u0002\u0000++--\u0001\u0000``\u0002\u0000BBbb\u0002\u0000"+ + "YYyy\u000b\u0000\t\n\r\r \"\",,//::==[[]]||\u0002\u0000**//\u000b\u0000"+ + "\t\n\r\r \"#,,//::<<>?\\\\||\u05db\u0000\u000f\u0001\u0000\u0000\u0000"+ + "\u0000\u0011\u0001\u0000\u0000\u0000\u0000\u0013\u0001\u0000\u0000\u0000"+ + "\u0000\u0015\u0001\u0000\u0000\u0000\u0000\u0017\u0001\u0000\u0000\u0000"+ + "\u0000\u0019\u0001\u0000\u0000\u0000\u0000\u001b\u0001\u0000\u0000\u0000"+ + "\u0000\u001d\u0001\u0000\u0000\u0000\u0000\u001f\u0001\u0000\u0000\u0000"+ + "\u0000!\u0001\u0000\u0000\u0000\u0000#\u0001\u0000\u0000\u0000\u0000%"+ + "\u0001\u0000\u0000\u0000\u0000\'\u0001\u0000\u0000\u0000\u0000)\u0001"+ + "\u0000\u0000\u0000\u0000+\u0001\u0000\u0000\u0000\u0000-\u0001\u0000\u0000"+ + "\u0000\u0000/\u0001\u0000\u0000\u0000\u00001\u0001\u0000\u0000\u0000\u0000"+ + "3\u0001\u0000\u0000\u0000\u00005\u0001\u0000\u0000\u0000\u00007\u0001"+ + "\u0000\u0000\u0000\u00009\u0001\u0000\u0000\u0000\u0000;\u0001\u0000\u0000"+ + "\u0000\u0001=\u0001\u0000\u0000\u0000\u0001S\u0001\u0000\u0000\u0000\u0001"+ + "U\u0001\u0000\u0000\u0000\u0001W\u0001\u0000\u0000\u0000\u0001Y\u0001"+ + "\u0000\u0000\u0000\u0001[\u0001\u0000\u0000\u0000\u0001]\u0001\u0000\u0000"+ + "\u0000\u0001_\u0001\u0000\u0000\u0000\u0001a\u0001\u0000\u0000\u0000\u0001"+ + "c\u0001\u0000\u0000\u0000\u0001e\u0001\u0000\u0000\u0000\u0001g\u0001"+ + "\u0000\u0000\u0000\u0001i\u0001\u0000\u0000\u0000\u0001k\u0001\u0000\u0000"+ + "\u0000\u0001m\u0001\u0000\u0000\u0000\u0001o\u0001\u0000\u0000\u0000\u0001"+ + "q\u0001\u0000\u0000\u0000\u0001s\u0001\u0000\u0000\u0000\u0001u\u0001"+ + "\u0000\u0000\u0000\u0001w\u0001\u0000\u0000\u0000\u0001y\u0001\u0000\u0000"+ + "\u0000\u0001{\u0001\u0000\u0000\u0000\u0001}\u0001\u0000\u0000\u0000\u0001"+ + "\u007f\u0001\u0000\u0000\u0000\u0001\u0081\u0001\u0000\u0000\u0000\u0001"+ + "\u0083\u0001\u0000\u0000\u0000\u0001\u0085\u0001\u0000\u0000\u0000\u0001"+ + "\u0087\u0001\u0000\u0000\u0000\u0001\u0089\u0001\u0000\u0000\u0000\u0001"+ + "\u008b\u0001\u0000\u0000\u0000\u0001\u008d\u0001\u0000\u0000\u0000\u0001"+ + "\u008f\u0001\u0000\u0000\u0000\u0001\u0091\u0001\u0000\u0000\u0000\u0001"+ + "\u0093\u0001\u0000\u0000\u0000\u0001\u0095\u0001\u0000\u0000\u0000\u0001"+ + "\u0097\u0001\u0000\u0000\u0000\u0001\u0099\u0001\u0000\u0000\u0000\u0001"+ + "\u009b\u0001\u0000\u0000\u0000\u0001\u009d\u0001\u0000\u0000\u0000\u0001"+ + "\u009f\u0001\u0000\u0000\u0000\u0001\u00a1\u0001\u0000\u0000\u0000\u0001"+ + "\u00a3\u0001\u0000\u0000\u0000\u0001\u00a5\u0001\u0000\u0000\u0000\u0001"+ + "\u00a7\u0001\u0000\u0000\u0000\u0001\u00a9\u0001\u0000\u0000\u0000\u0001"+ + "\u00ad\u0001\u0000\u0000\u0000\u0001\u00af\u0001\u0000\u0000\u0000\u0001"+ + "\u00b1\u0001\u0000\u0000\u0000\u0001\u00b3\u0001\u0000\u0000\u0000\u0002"+ + "\u00b5\u0001\u0000\u0000\u0000\u0002\u00b7\u0001\u0000\u0000\u0000\u0002"+ + "\u00b9\u0001\u0000\u0000\u0000\u0002\u00bb\u0001\u0000\u0000\u0000\u0002"+ + "\u00bd\u0001\u0000\u0000\u0000\u0003\u00bf\u0001\u0000\u0000\u0000\u0003"+ + "\u00c1\u0001\u0000\u0000\u0000\u0003\u00c3\u0001\u0000\u0000\u0000\u0003"+ + "\u00c5\u0001\u0000\u0000\u0000\u0003\u00c7\u0001\u0000\u0000\u0000\u0003"+ + "\u00c9\u0001\u0000\u0000\u0000\u0003\u00cb\u0001\u0000\u0000\u0000\u0003"+ + "\u00cf\u0001\u0000\u0000\u0000\u0003\u00d1\u0001\u0000\u0000\u0000\u0003"+ + "\u00d3\u0001\u0000\u0000\u0000\u0003\u00d5\u0001\u0000\u0000\u0000\u0003"+ + "\u00d7\u0001\u0000\u0000\u0000\u0003\u00d9\u0001\u0000\u0000\u0000\u0004"+ + "\u00db\u0001\u0000\u0000\u0000\u0004\u00dd\u0001\u0000\u0000\u0000\u0004"+ + "\u00df\u0001\u0000\u0000\u0000\u0004\u00e1\u0001\u0000\u0000\u0000\u0004"+ + "\u00e3\u0001\u0000\u0000\u0000\u0004\u00e9\u0001\u0000\u0000\u0000\u0004"+ + "\u00eb\u0001\u0000\u0000\u0000\u0004\u00ed\u0001\u0000\u0000\u0000\u0004"+ + "\u00ef\u0001\u0000\u0000\u0000\u0005\u00f1\u0001\u0000\u0000\u0000\u0005"+ + "\u00f3\u0001\u0000\u0000\u0000\u0005\u00f5\u0001\u0000\u0000\u0000\u0005"+ + "\u00f7\u0001\u0000\u0000\u0000\u0005\u00f9\u0001\u0000\u0000\u0000\u0005"+ + "\u00fb\u0001\u0000\u0000\u0000\u0005\u00fd\u0001\u0000\u0000\u0000\u0005"+ + "\u00ff\u0001\u0000\u0000\u0000\u0005\u0101\u0001\u0000\u0000\u0000\u0005"+ + "\u0103\u0001\u0000\u0000\u0000\u0005\u0105\u0001\u0000\u0000\u0000\u0006"+ + "\u0107\u0001\u0000\u0000\u0000\u0006\u0109\u0001\u0000\u0000\u0000\u0006"+ + "\u010b\u0001\u0000\u0000\u0000\u0006\u010d\u0001\u0000\u0000\u0000\u0006"+ + "\u0111\u0001\u0000\u0000\u0000\u0006\u0113\u0001\u0000\u0000\u0000\u0006"+ + "\u0115\u0001\u0000\u0000\u0000\u0006\u0117\u0001\u0000\u0000\u0000\u0006"+ + "\u0119\u0001\u0000\u0000\u0000\u0007\u011b\u0001\u0000\u0000\u0000\u0007"+ + "\u011d\u0001\u0000\u0000\u0000\u0007\u011f\u0001\u0000\u0000\u0000\u0007"+ + "\u0121\u0001\u0000\u0000\u0000\u0007\u0123\u0001\u0000\u0000\u0000\u0007"+ + "\u0125\u0001\u0000\u0000\u0000\u0007\u0127\u0001\u0000\u0000\u0000\u0007"+ + "\u0129\u0001\u0000\u0000\u0000\u0007\u012b\u0001\u0000\u0000\u0000\u0007"+ + "\u012d\u0001\u0000\u0000\u0000\u0007\u012f\u0001\u0000\u0000\u0000\u0007"+ + "\u0131\u0001\u0000\u0000\u0000\b\u0133\u0001\u0000\u0000\u0000\b\u0135"+ + "\u0001\u0000\u0000\u0000\b\u0137\u0001\u0000\u0000\u0000\b\u0139\u0001"+ + "\u0000\u0000\u0000\b\u013b\u0001\u0000\u0000\u0000\b\u013d\u0001\u0000"+ + "\u0000\u0000\b\u013f\u0001\u0000\u0000\u0000\b\u0141\u0001\u0000\u0000"+ + "\u0000\b\u0143\u0001\u0000\u0000\u0000\t\u0145\u0001\u0000\u0000\u0000"+ + "\t\u0147\u0001\u0000\u0000\u0000\t\u0149\u0001\u0000\u0000\u0000\t\u014b"+ + "\u0001\u0000\u0000\u0000\t\u014d\u0001\u0000\u0000\u0000\n\u014f\u0001"+ + "\u0000\u0000\u0000\n\u0151\u0001\u0000\u0000\u0000\n\u0153\u0001\u0000"+ + "\u0000\u0000\n\u0155\u0001\u0000\u0000\u0000\n\u0157\u0001\u0000\u0000"+ + "\u0000\n\u0159\u0001\u0000\u0000\u0000\u000b\u015b\u0001\u0000\u0000\u0000"+ + "\u000b\u015d\u0001\u0000\u0000\u0000\u000b\u015f\u0001\u0000\u0000\u0000"+ + "\u000b\u0161\u0001\u0000\u0000\u0000\u000b\u0163\u0001\u0000\u0000\u0000"+ + "\u000b\u0165\u0001\u0000\u0000\u0000\u000b\u0167\u0001\u0000\u0000\u0000"+ + "\u000b\u0169\u0001\u0000\u0000\u0000\u000b\u016b\u0001\u0000\u0000\u0000"+ + "\u000b\u016d\u0001\u0000\u0000\u0000\f\u016f\u0001\u0000\u0000\u0000\f"+ + "\u0171\u0001\u0000\u0000\u0000\f\u0173\u0001\u0000\u0000\u0000\f\u0175"+ + "\u0001\u0000\u0000\u0000\f\u0177\u0001\u0000\u0000\u0000\f\u0179\u0001"+ + "\u0000\u0000\u0000\f\u017b\u0001\u0000\u0000\u0000\r\u017d\u0001\u0000"+ + "\u0000\u0000\r\u017f\u0001\u0000\u0000\u0000\r\u0181\u0001\u0000\u0000"+ + "\u0000\r\u0183\u0001\u0000\u0000\u0000\r\u0185\u0001\u0000\u0000\u0000"+ + "\r\u0187\u0001\u0000\u0000\u0000\u000e\u0189\u0001\u0000\u0000\u0000\u000e"+ + "\u018b\u0001\u0000\u0000\u0000\u000e\u018d\u0001\u0000\u0000\u0000\u000e"+ + "\u018f\u0001\u0000\u0000\u0000\u000e\u0191\u0001\u0000\u0000\u0000\u000e"+ + "\u0193\u0001\u0000\u0000\u0000\u000e\u0195\u0001\u0000\u0000\u0000\u000e"+ + "\u0197\u0001\u0000\u0000\u0000\u000e\u0199\u0001\u0000\u0000\u0000\u000f"+ + "\u019b\u0001\u0000\u0000\u0000\u0011\u01a5\u0001\u0000\u0000\u0000\u0013"+ + "\u01ac\u0001\u0000\u0000\u0000\u0015\u01b5\u0001\u0000\u0000\u0000\u0017"+ + "\u01bc\u0001\u0000\u0000\u0000\u0019\u01c6\u0001\u0000\u0000\u0000\u001b"+ + "\u01cd\u0001\u0000\u0000\u0000\u001d\u01d4\u0001\u0000\u0000\u0000\u001f"+ + "\u01db\u0001\u0000\u0000\u0000!\u01e3\u0001\u0000\u0000\u0000#\u01ef\u0001"+ + "\u0000\u0000\u0000%\u01f8\u0001\u0000\u0000\u0000\'\u01fe\u0001\u0000"+ + "\u0000\u0000)\u0205\u0001\u0000\u0000\u0000+\u020c\u0001\u0000\u0000\u0000"+ + "-\u0214\u0001\u0000\u0000\u0000/\u021c\u0001\u0000\u0000\u00001\u022b"+ + "\u0001\u0000\u0000\u00003\u0235\u0001\u0000\u0000\u00005\u0241\u0001\u0000"+ + "\u0000\u00007\u0247\u0001\u0000\u0000\u00009\u0258\u0001\u0000\u0000\u0000"+ + ";\u0268\u0001\u0000\u0000\u0000=\u026e\u0001\u0000\u0000\u0000?\u0272"+ + "\u0001\u0000\u0000\u0000A\u0274\u0001\u0000\u0000\u0000C\u0276\u0001\u0000"+ + "\u0000\u0000E\u0279\u0001\u0000\u0000\u0000G\u027b\u0001\u0000\u0000\u0000"+ + "I\u0284\u0001\u0000\u0000\u0000K\u0286\u0001\u0000\u0000\u0000M\u028b"+ + "\u0001\u0000\u0000\u0000O\u028d\u0001\u0000\u0000\u0000Q\u0292\u0001\u0000"+ + "\u0000\u0000S\u02b1\u0001\u0000\u0000\u0000U\u02b4\u0001\u0000\u0000\u0000"+ + "W\u02e2\u0001\u0000\u0000\u0000Y\u02e4\u0001\u0000\u0000\u0000[\u02e7"+ + "\u0001\u0000\u0000\u0000]\u02eb\u0001\u0000\u0000\u0000_\u02ef\u0001\u0000"+ + "\u0000\u0000a\u02f1\u0001\u0000\u0000\u0000c\u02f4\u0001\u0000\u0000\u0000"+ + "e\u02f6\u0001\u0000\u0000\u0000g\u02fb\u0001\u0000\u0000\u0000i\u02fd"+ + "\u0001\u0000\u0000\u0000k\u0303\u0001\u0000\u0000\u0000m\u0309\u0001\u0000"+ + "\u0000\u0000o\u030c\u0001\u0000\u0000\u0000q\u030f\u0001\u0000\u0000\u0000"+ + "s\u0314\u0001\u0000\u0000\u0000u\u0319\u0001\u0000\u0000\u0000w\u031b"+ + "\u0001\u0000\u0000\u0000y\u031f\u0001\u0000\u0000\u0000{\u0324\u0001\u0000"+ + "\u0000\u0000}\u032a\u0001\u0000\u0000\u0000\u007f\u032d\u0001\u0000\u0000"+ + "\u0000\u0081\u032f\u0001\u0000\u0000\u0000\u0083\u0335\u0001\u0000\u0000"+ + "\u0000\u0085\u0337\u0001\u0000\u0000\u0000\u0087\u033c\u0001\u0000\u0000"+ + "\u0000\u0089\u033f\u0001\u0000\u0000\u0000\u008b\u0342\u0001\u0000\u0000"+ + "\u0000\u008d\u0345\u0001\u0000\u0000\u0000\u008f\u0347\u0001\u0000\u0000"+ + "\u0000\u0091\u034a\u0001\u0000\u0000\u0000\u0093\u034c\u0001\u0000\u0000"+ + "\u0000\u0095\u034f\u0001\u0000\u0000\u0000\u0097\u0351\u0001\u0000\u0000"+ + "\u0000\u0099\u0353\u0001\u0000\u0000\u0000\u009b\u0355\u0001\u0000\u0000"+ + "\u0000\u009d\u0357\u0001\u0000\u0000\u0000\u009f\u0359\u0001\u0000\u0000"+ + "\u0000\u00a1\u035f\u0001\u0000\u0000\u0000\u00a3\u0374\u0001\u0000\u0000"+ + "\u0000\u00a5\u0376\u0001\u0000\u0000\u0000\u00a7\u037b\u0001\u0000\u0000"+ + "\u0000\u00a9\u0390\u0001\u0000\u0000\u0000\u00ab\u0392\u0001\u0000\u0000"+ + "\u0000\u00ad\u039a\u0001\u0000\u0000\u0000\u00af\u039c\u0001\u0000\u0000"+ + "\u0000\u00b1\u03a0\u0001\u0000\u0000\u0000\u00b3\u03a4\u0001\u0000\u0000"+ + "\u0000\u00b5\u03a8\u0001\u0000\u0000\u0000\u00b7\u03ad\u0001\u0000\u0000"+ + "\u0000\u00b9\u03b2\u0001\u0000\u0000\u0000\u00bb\u03b6\u0001\u0000\u0000"+ + "\u0000\u00bd\u03ba\u0001\u0000\u0000\u0000\u00bf\u03be\u0001\u0000\u0000"+ + "\u0000\u00c1\u03c3\u0001\u0000\u0000\u0000\u00c3\u03c7\u0001\u0000\u0000"+ + "\u0000\u00c5\u03cb\u0001\u0000\u0000\u0000\u00c7\u03cf\u0001\u0000\u0000"+ + "\u0000\u00c9\u03d3\u0001\u0000\u0000\u0000\u00cb\u03d7\u0001\u0000\u0000"+ + "\u0000\u00cd\u03e3\u0001\u0000\u0000\u0000\u00cf\u03e6\u0001\u0000\u0000"+ + "\u0000\u00d1\u03ea\u0001\u0000\u0000\u0000\u00d3\u03ee\u0001\u0000\u0000"+ + "\u0000\u00d5\u03f2\u0001\u0000\u0000\u0000\u00d7\u03f6\u0001\u0000\u0000"+ + "\u0000\u00d9\u03fa\u0001\u0000\u0000\u0000\u00db\u03fe\u0001\u0000\u0000"+ + "\u0000\u00dd\u0403\u0001\u0000\u0000\u0000\u00df\u0407\u0001\u0000\u0000"+ + "\u0000\u00e1\u040b\u0001\u0000\u0000\u0000\u00e3\u040f\u0001\u0000\u0000"+ + "\u0000\u00e5\u0417\u0001\u0000\u0000\u0000\u00e7\u042c\u0001\u0000\u0000"+ + "\u0000\u00e9\u0430\u0001\u0000\u0000\u0000\u00eb\u0434\u0001\u0000\u0000"+ + "\u0000\u00ed\u0438\u0001\u0000\u0000\u0000\u00ef\u043c\u0001\u0000\u0000"+ + "\u0000\u00f1\u0440\u0001\u0000\u0000\u0000\u00f3\u0445\u0001\u0000\u0000"+ + "\u0000\u00f5\u0449\u0001\u0000\u0000\u0000\u00f7\u044d\u0001\u0000\u0000"+ + "\u0000\u00f9\u0451\u0001\u0000\u0000\u0000\u00fb\u0455\u0001\u0000\u0000"+ + "\u0000\u00fd\u0459\u0001\u0000\u0000\u0000\u00ff\u045c\u0001\u0000\u0000"+ + "\u0000\u0101\u0460\u0001\u0000\u0000\u0000\u0103\u0464\u0001\u0000\u0000"+ + "\u0000\u0105\u0468\u0001\u0000\u0000\u0000\u0107\u046c\u0001\u0000\u0000"+ + "\u0000\u0109\u0471\u0001\u0000\u0000\u0000\u010b\u0476\u0001\u0000\u0000"+ + "\u0000\u010d\u047b\u0001\u0000\u0000\u0000\u010f\u0482\u0001\u0000\u0000"+ + "\u0000\u0111\u048b\u0001\u0000\u0000\u0000\u0113\u0492\u0001\u0000\u0000"+ + "\u0000\u0115\u0496\u0001\u0000\u0000\u0000\u0117\u049a\u0001\u0000\u0000"+ + "\u0000\u0119\u049e\u0001\u0000\u0000\u0000\u011b\u04a2\u0001\u0000\u0000"+ + "\u0000\u011d\u04a8\u0001\u0000\u0000\u0000\u011f\u04ac\u0001\u0000\u0000"+ + "\u0000\u0121\u04b0\u0001\u0000\u0000\u0000\u0123\u04b4\u0001\u0000\u0000"+ + "\u0000\u0125\u04b8\u0001\u0000\u0000\u0000\u0127\u04bc\u0001\u0000\u0000"+ + "\u0000\u0129\u04c0\u0001\u0000\u0000\u0000\u012b\u04c4\u0001\u0000\u0000"+ + "\u0000\u012d\u04c8\u0001\u0000\u0000\u0000\u012f\u04cc\u0001\u0000\u0000"+ + "\u0000\u0131\u04d0\u0001\u0000\u0000\u0000\u0133\u04d4\u0001\u0000\u0000"+ + "\u0000\u0135\u04d9\u0001\u0000\u0000\u0000\u0137\u04dd\u0001\u0000\u0000"+ + "\u0000\u0139\u04e1\u0001\u0000\u0000\u0000\u013b\u04e5\u0001\u0000\u0000"+ + "\u0000\u013d\u04e9\u0001\u0000\u0000\u0000\u013f\u04ed\u0001\u0000\u0000"+ + "\u0000\u0141\u04f1\u0001\u0000\u0000\u0000\u0143\u04f5\u0001\u0000\u0000"+ + "\u0000\u0145\u04f9\u0001\u0000\u0000\u0000\u0147\u04fe\u0001\u0000\u0000"+ + "\u0000\u0149\u0503\u0001\u0000\u0000\u0000\u014b\u0507\u0001\u0000\u0000"+ + "\u0000\u014d\u050b\u0001\u0000\u0000\u0000\u014f\u050f\u0001\u0000\u0000"+ + "\u0000\u0151\u0514\u0001\u0000\u0000\u0000\u0153\u051b\u0001\u0000\u0000"+ + "\u0000\u0155\u051f\u0001\u0000\u0000\u0000\u0157\u0523\u0001\u0000\u0000"+ + "\u0000\u0159\u0527\u0001\u0000\u0000\u0000\u015b\u052b\u0001\u0000\u0000"+ + "\u0000\u015d\u0530\u0001\u0000\u0000\u0000\u015f\u0534\u0001\u0000\u0000"+ + "\u0000\u0161\u0538\u0001\u0000\u0000\u0000\u0163\u053c\u0001\u0000\u0000"+ + "\u0000\u0165\u0541\u0001\u0000\u0000\u0000\u0167\u0545\u0001\u0000\u0000"+ + "\u0000\u0169\u0549\u0001\u0000\u0000\u0000\u016b\u054d\u0001\u0000\u0000"+ + "\u0000\u016d\u0551\u0001\u0000\u0000\u0000\u016f\u0555\u0001\u0000\u0000"+ + "\u0000\u0171\u055b\u0001\u0000\u0000\u0000\u0173\u055f\u0001\u0000\u0000"+ + "\u0000\u0175\u0563\u0001\u0000\u0000\u0000\u0177\u0567\u0001\u0000\u0000"+ + "\u0000\u0179\u056b\u0001\u0000\u0000\u0000\u017b\u056f\u0001\u0000\u0000"+ + "\u0000\u017d\u0573\u0001\u0000\u0000\u0000\u017f\u0578\u0001\u0000\u0000"+ + "\u0000\u0181\u057e\u0001\u0000\u0000\u0000\u0183\u0584\u0001\u0000\u0000"+ + "\u0000\u0185\u0588\u0001\u0000\u0000\u0000\u0187\u058c\u0001\u0000\u0000"+ + "\u0000\u0189\u0590\u0001\u0000\u0000\u0000\u018b\u0596\u0001\u0000\u0000"+ + "\u0000\u018d\u059c\u0001\u0000\u0000\u0000\u018f\u05a0\u0001\u0000\u0000"+ + "\u0000\u0191\u05a4\u0001\u0000\u0000\u0000\u0193\u05a8\u0001\u0000\u0000"+ + "\u0000\u0195\u05ae\u0001\u0000\u0000\u0000\u0197\u05b4\u0001\u0000\u0000"+ + "\u0000\u0199\u05ba\u0001\u0000\u0000\u0000\u019b\u019c\u0007\u0000\u0000"+ + "\u0000\u019c\u019d\u0007\u0001\u0000\u0000\u019d\u019e\u0007\u0002\u0000"+ + "\u0000\u019e\u019f\u0007\u0002\u0000\u0000\u019f\u01a0\u0007\u0003\u0000"+ + "\u0000\u01a0\u01a1\u0007\u0004\u0000\u0000\u01a1\u01a2\u0007\u0005\u0000"+ + "\u0000\u01a2\u01a3\u0001\u0000\u0000\u0000\u01a3\u01a4\u0006\u0000\u0000"+ + "\u0000\u01a4\u0010\u0001\u0000\u0000\u0000\u01a5\u01a6\u0007\u0000\u0000"+ + "\u0000\u01a6\u01a7\u0007\u0006\u0000\u0000\u01a7\u01a8\u0007\u0007\u0000"+ + "\u0000\u01a8\u01a9\u0007\b\u0000\u0000\u01a9\u01aa\u0001\u0000\u0000\u0000"+ + "\u01aa\u01ab\u0006\u0001\u0001\u0000\u01ab\u0012\u0001\u0000\u0000\u0000"+ + "\u01ac\u01ad\u0007\u0003\u0000\u0000\u01ad\u01ae\u0007\t\u0000\u0000\u01ae"+ + "\u01af\u0007\u0006\u0000\u0000\u01af\u01b0\u0007\u0001\u0000\u0000\u01b0"+ + "\u01b1\u0007\u0004\u0000\u0000\u01b1\u01b2\u0007\n\u0000\u0000\u01b2\u01b3"+ + "\u0001\u0000\u0000\u0000\u01b3\u01b4\u0006\u0002\u0002\u0000\u01b4\u0014"+ + "\u0001\u0000\u0000\u0000\u01b5\u01b6\u0007\u0003\u0000\u0000\u01b6\u01b7"+ + "\u0007\u000b\u0000\u0000\u01b7\u01b8\u0007\f\u0000\u0000\u01b8\u01b9\u0007"+ + "\r\u0000\u0000\u01b9\u01ba\u0001\u0000\u0000\u0000\u01ba\u01bb\u0006\u0003"+ + "\u0000\u0000\u01bb\u0016\u0001\u0000\u0000\u0000\u01bc\u01bd\u0007\u0003"+ + "\u0000\u0000\u01bd\u01be\u0007\u000e\u0000\u0000\u01be\u01bf\u0007\b\u0000"+ + "\u0000\u01bf\u01c0\u0007\r\u0000\u0000\u01c0\u01c1\u0007\f\u0000\u0000"+ + "\u01c1\u01c2\u0007\u0001\u0000\u0000\u01c2\u01c3\u0007\t\u0000\u0000\u01c3"+ + "\u01c4\u0001\u0000\u0000\u0000\u01c4\u01c5\u0006\u0004\u0003\u0000\u01c5"+ + "\u0018\u0001\u0000\u0000\u0000\u01c6\u01c7\u0007\u000f\u0000\u0000\u01c7"+ + "\u01c8\u0007\u0006\u0000\u0000\u01c8\u01c9\u0007\u0007\u0000\u0000\u01c9"+ + "\u01ca\u0007\u0010\u0000\u0000\u01ca\u01cb\u0001\u0000\u0000\u0000\u01cb"+ + "\u01cc\u0006\u0005\u0004\u0000\u01cc\u001a\u0001\u0000\u0000\u0000\u01cd"+ + "\u01ce\u0007\u0011\u0000\u0000\u01ce\u01cf\u0007\u0006\u0000\u0000\u01cf"+ + "\u01d0\u0007\u0007\u0000\u0000\u01d0\u01d1\u0007\u0012\u0000\u0000\u01d1"+ + "\u01d2\u0001\u0000\u0000\u0000\u01d2\u01d3\u0006\u0006\u0000\u0000\u01d3"+ + "\u001c\u0001\u0000\u0000\u0000\u01d4\u01d5\u0007\u0012\u0000\u0000\u01d5"+ + "\u01d6\u0007\u0003\u0000\u0000\u01d6\u01d7\u0007\u0003\u0000\u0000\u01d7"+ + "\u01d8\u0007\b\u0000\u0000\u01d8\u01d9\u0001\u0000\u0000\u0000\u01d9\u01da"+ + "\u0006\u0007\u0001\u0000\u01da\u001e\u0001\u0000\u0000\u0000\u01db\u01dc"+ + "\u0007\r\u0000\u0000\u01dc\u01dd\u0007\u0001\u0000\u0000\u01dd\u01de\u0007"+ + "\u0010\u0000\u0000\u01de\u01df\u0007\u0001\u0000\u0000\u01df\u01e0\u0007"+ + "\u0005\u0000\u0000\u01e0\u01e1\u0001\u0000\u0000\u0000\u01e1\u01e2\u0006"+ + "\b\u0000\u0000\u01e2 \u0001\u0000\u0000\u0000\u01e3\u01e4\u0007\u0010"+ + "\u0000\u0000\u01e4\u01e5\u0007\u000b\u0000\u0000\u01e5\u01e6\u0005_\u0000"+ + "\u0000\u01e6\u01e7\u0007\u0003\u0000\u0000\u01e7\u01e8\u0007\u000e\u0000"+ + "\u0000\u01e8\u01e9\u0007\b\u0000\u0000\u01e9\u01ea\u0007\f\u0000\u0000"+ + "\u01ea\u01eb\u0007\t\u0000\u0000\u01eb\u01ec\u0007\u0000\u0000\u0000\u01ec"+ + "\u01ed\u0001\u0000\u0000\u0000\u01ed\u01ee\u0006\t\u0005\u0000\u01ee\""+ + "\u0001\u0000\u0000\u0000\u01ef\u01f0\u0007\u0006\u0000\u0000\u01f0\u01f1"+ + "\u0007\u0003\u0000\u0000\u01f1\u01f2\u0007\t\u0000\u0000\u01f2\u01f3\u0007"+ + "\f\u0000\u0000\u01f3\u01f4\u0007\u0010\u0000\u0000\u01f4\u01f5\u0007\u0003"+ + "\u0000\u0000\u01f5\u01f6\u0001\u0000\u0000\u0000\u01f6\u01f7\u0006\n\u0006"+ + "\u0000\u01f7$\u0001\u0000\u0000\u0000\u01f8\u01f9\u0007\u0006\u0000\u0000"+ + "\u01f9\u01fa\u0007\u0007\u0000\u0000\u01fa\u01fb\u0007\u0013\u0000\u0000"+ + "\u01fb\u01fc\u0001\u0000\u0000\u0000\u01fc\u01fd\u0006\u000b\u0000\u0000"+ + "\u01fd&\u0001\u0000\u0000\u0000\u01fe\u01ff\u0007\u0002\u0000\u0000\u01ff"+ + "\u0200\u0007\n\u0000\u0000\u0200\u0201\u0007\u0007\u0000\u0000\u0201\u0202"+ + "\u0007\u0013\u0000\u0000\u0202\u0203\u0001\u0000\u0000\u0000\u0203\u0204"+ + "\u0006\f\u0007\u0000\u0204(\u0001\u0000\u0000\u0000\u0205\u0206\u0007"+ + "\u0002\u0000\u0000\u0206\u0207\u0007\u0007\u0000\u0000\u0207\u0208\u0007"+ + "\u0006\u0000\u0000\u0208\u0209\u0007\u0005\u0000\u0000\u0209\u020a\u0001"+ + "\u0000\u0000\u0000\u020a\u020b\u0006\r\u0000\u0000\u020b*\u0001\u0000"+ + "\u0000\u0000\u020c\u020d\u0007\u0002\u0000\u0000\u020d\u020e\u0007\u0005"+ + "\u0000\u0000\u020e\u020f\u0007\f\u0000\u0000\u020f\u0210\u0007\u0005\u0000"+ + "\u0000\u0210\u0211\u0007\u0002\u0000\u0000\u0211\u0212\u0001\u0000\u0000"+ + "\u0000\u0212\u0213\u0006\u000e\u0000\u0000\u0213,\u0001\u0000\u0000\u0000"+ + "\u0214\u0215\u0007\u0013\u0000\u0000\u0215\u0216\u0007\n\u0000\u0000\u0216"+ + "\u0217\u0007\u0003\u0000\u0000\u0217\u0218\u0007\u0006\u0000\u0000\u0218"+ + "\u0219\u0007\u0003\u0000\u0000\u0219\u021a\u0001\u0000\u0000\u0000\u021a"+ + "\u021b\u0006\u000f\u0000\u0000\u021b.\u0001\u0000\u0000\u0000\u021c\u021d"+ + "\u0004\u0010\u0000\u0000\u021d\u021e\u0007\u0001\u0000\u0000\u021e\u021f"+ + "\u0007\t\u0000\u0000\u021f\u0220\u0007\r\u0000\u0000\u0220\u0221\u0007"+ + "\u0001\u0000\u0000\u0221\u0222\u0007\t\u0000\u0000\u0222\u0223\u0007\u0003"+ + "\u0000\u0000\u0223\u0224\u0007\u0002\u0000\u0000\u0224\u0225\u0007\u0005"+ + "\u0000\u0000\u0225\u0226\u0007\f\u0000\u0000\u0226\u0227\u0007\u0005\u0000"+ + "\u0000\u0227\u0228\u0007\u0002\u0000\u0000\u0228\u0229\u0001\u0000\u0000"+ + "\u0000\u0229\u022a\u0006\u0010\u0000\u0000\u022a0\u0001\u0000\u0000\u0000"+ + "\u022b\u022c\u0004\u0011\u0001\u0000\u022c\u022d\u0007\r\u0000\u0000\u022d"+ + "\u022e\u0007\u0007\u0000\u0000\u022e\u022f\u0007\u0007\u0000\u0000\u022f"+ + "\u0230\u0007\u0012\u0000\u0000\u0230\u0231\u0007\u0014\u0000\u0000\u0231"+ + "\u0232\u0007\b\u0000\u0000\u0232\u0233\u0001\u0000\u0000\u0000\u0233\u0234"+ + "\u0006\u0011\b\u0000\u02342\u0001\u0000\u0000\u0000\u0235\u0236\u0004"+ + "\u0012\u0002\u0000\u0236\u0237\u0007\u0010\u0000\u0000\u0237\u0238\u0007"+ + "\u0003\u0000\u0000\u0238\u0239\u0007\u0005\u0000\u0000\u0239\u023a\u0007"+ + "\u0006\u0000\u0000\u023a\u023b\u0007\u0001\u0000\u0000\u023b\u023c\u0007"+ + "\u0004\u0000\u0000\u023c\u023d\u0007\u0002\u0000\u0000\u023d\u023e\u0001"+ + "\u0000\u0000\u0000\u023e\u023f\u0006\u0012\t\u0000\u023f4\u0001\u0000"+ + "\u0000\u0000\u0240\u0242\b\u0015\u0000\u0000\u0241\u0240\u0001\u0000\u0000"+ + "\u0000\u0242\u0243\u0001\u0000\u0000\u0000\u0243\u0241\u0001\u0000\u0000"+ + "\u0000\u0243\u0244\u0001\u0000\u0000\u0000\u0244\u0245\u0001\u0000\u0000"+ + "\u0000\u0245\u0246\u0006\u0013\u0000\u0000\u02466\u0001\u0000\u0000\u0000"+ + "\u0247\u0248\u0005/\u0000\u0000\u0248\u0249\u0005/\u0000\u0000\u0249\u024d"+ + "\u0001\u0000\u0000\u0000\u024a\u024c\b\u0016\u0000\u0000\u024b\u024a\u0001"+ + "\u0000\u0000\u0000\u024c\u024f\u0001\u0000\u0000\u0000\u024d\u024b\u0001"+ + "\u0000\u0000\u0000\u024d\u024e\u0001\u0000\u0000\u0000\u024e\u0251\u0001"+ + "\u0000\u0000\u0000\u024f\u024d\u0001\u0000\u0000\u0000\u0250\u0252\u0005"+ + "\r\u0000\u0000\u0251\u0250\u0001\u0000\u0000\u0000\u0251\u0252\u0001\u0000"+ + "\u0000\u0000\u0252\u0254\u0001\u0000\u0000\u0000\u0253\u0255\u0005\n\u0000"+ + "\u0000\u0254\u0253\u0001\u0000\u0000\u0000\u0254\u0255\u0001\u0000\u0000"+ + "\u0000\u0255\u0256\u0001\u0000\u0000\u0000\u0256\u0257\u0006\u0014\n\u0000"+ + "\u02578\u0001\u0000\u0000\u0000\u0258\u0259\u0005/\u0000\u0000\u0259\u025a"+ + "\u0005*\u0000\u0000\u025a\u025f\u0001\u0000\u0000\u0000\u025b\u025e\u0003"+ + "9\u0015\u0000\u025c\u025e\t\u0000\u0000\u0000\u025d\u025b\u0001\u0000"+ + "\u0000\u0000\u025d\u025c\u0001\u0000\u0000\u0000\u025e\u0261\u0001\u0000"+ + "\u0000\u0000\u025f\u0260\u0001\u0000\u0000\u0000\u025f\u025d\u0001\u0000"+ + "\u0000\u0000\u0260\u0262\u0001\u0000\u0000\u0000\u0261\u025f\u0001\u0000"+ + "\u0000\u0000\u0262\u0263\u0005*\u0000\u0000\u0263\u0264\u0005/\u0000\u0000"+ + "\u0264\u0265\u0001\u0000\u0000\u0000\u0265\u0266\u0006\u0015\n\u0000\u0266"+ + ":\u0001\u0000\u0000\u0000\u0267\u0269\u0007\u0017\u0000\u0000\u0268\u0267"+ + "\u0001\u0000\u0000\u0000\u0269\u026a\u0001\u0000\u0000\u0000\u026a\u0268"+ + "\u0001\u0000\u0000\u0000\u026a\u026b\u0001\u0000\u0000\u0000\u026b\u026c"+ + "\u0001\u0000\u0000\u0000\u026c\u026d\u0006\u0016\n\u0000\u026d<\u0001"+ + "\u0000\u0000\u0000\u026e\u026f\u0005|\u0000\u0000\u026f\u0270\u0001\u0000"+ + "\u0000\u0000\u0270\u0271\u0006\u0017\u000b\u0000\u0271>\u0001\u0000\u0000"+ + "\u0000\u0272\u0273\u0007\u0018\u0000\u0000\u0273@\u0001\u0000\u0000\u0000"+ + "\u0274\u0275\u0007\u0019\u0000\u0000\u0275B\u0001\u0000\u0000\u0000\u0276"+ + "\u0277\u0005\\\u0000\u0000\u0277\u0278\u0007\u001a\u0000\u0000\u0278D"+ + "\u0001\u0000\u0000\u0000\u0279\u027a\b\u001b\u0000\u0000\u027aF\u0001"+ + "\u0000\u0000\u0000\u027b\u027d\u0007\u0003\u0000\u0000\u027c\u027e\u0007"+ + "\u001c\u0000\u0000\u027d\u027c\u0001\u0000\u0000\u0000\u027d\u027e\u0001"+ + "\u0000\u0000\u0000\u027e\u0280\u0001\u0000\u0000\u0000\u027f\u0281\u0003"+ + "?\u0018\u0000\u0280\u027f\u0001\u0000\u0000\u0000\u0281\u0282\u0001\u0000"+ + "\u0000\u0000\u0282\u0280\u0001\u0000\u0000\u0000\u0282\u0283\u0001\u0000"+ + "\u0000\u0000\u0283H\u0001\u0000\u0000\u0000\u0284\u0285\u0005@\u0000\u0000"+ + "\u0285J\u0001\u0000\u0000\u0000\u0286\u0287\u0005`\u0000\u0000\u0287L"+ + "\u0001\u0000\u0000\u0000\u0288\u028c\b\u001d\u0000\u0000\u0289\u028a\u0005"+ + "`\u0000\u0000\u028a\u028c\u0005`\u0000\u0000\u028b\u0288\u0001\u0000\u0000"+ + "\u0000\u028b\u0289\u0001\u0000\u0000\u0000\u028cN\u0001\u0000\u0000\u0000"+ + "\u028d\u028e\u0005_\u0000\u0000\u028eP\u0001\u0000\u0000\u0000\u028f\u0293"+ + "\u0003A\u0019\u0000\u0290\u0293\u0003?\u0018\u0000\u0291\u0293\u0003O"+ + " \u0000\u0292\u028f\u0001\u0000\u0000\u0000\u0292\u0290\u0001\u0000\u0000"+ + "\u0000\u0292\u0291\u0001\u0000\u0000\u0000\u0293R\u0001\u0000\u0000\u0000"+ + "\u0294\u0299\u0005\"\u0000\u0000\u0295\u0298\u0003C\u001a\u0000\u0296"+ + "\u0298\u0003E\u001b\u0000\u0297\u0295\u0001\u0000\u0000\u0000\u0297\u0296"+ + "\u0001\u0000\u0000\u0000\u0298\u029b\u0001\u0000\u0000\u0000\u0299\u0297"+ + "\u0001\u0000\u0000\u0000\u0299\u029a\u0001\u0000\u0000\u0000\u029a\u029c"+ + "\u0001\u0000\u0000\u0000\u029b\u0299\u0001\u0000\u0000\u0000\u029c\u02b2"+ + "\u0005\"\u0000\u0000\u029d\u029e\u0005\"\u0000\u0000\u029e\u029f\u0005"+ + "\"\u0000\u0000\u029f\u02a0\u0005\"\u0000\u0000\u02a0\u02a4\u0001\u0000"+ + "\u0000\u0000\u02a1\u02a3\b\u0016\u0000\u0000\u02a2\u02a1\u0001\u0000\u0000"+ + "\u0000\u02a3\u02a6\u0001\u0000\u0000\u0000\u02a4\u02a5\u0001\u0000\u0000"+ + "\u0000\u02a4\u02a2\u0001\u0000\u0000\u0000\u02a5\u02a7\u0001\u0000\u0000"+ + "\u0000\u02a6\u02a4\u0001\u0000\u0000\u0000\u02a7\u02a8\u0005\"\u0000\u0000"+ + "\u02a8\u02a9\u0005\"\u0000\u0000\u02a9\u02aa\u0005\"\u0000\u0000\u02aa"+ + "\u02ac\u0001\u0000\u0000\u0000\u02ab\u02ad\u0005\"\u0000\u0000\u02ac\u02ab"+ + "\u0001\u0000\u0000\u0000\u02ac\u02ad\u0001\u0000\u0000\u0000\u02ad\u02af"+ + "\u0001\u0000\u0000\u0000\u02ae\u02b0\u0005\"\u0000\u0000\u02af\u02ae\u0001"+ + "\u0000\u0000\u0000\u02af\u02b0\u0001\u0000\u0000\u0000\u02b0\u02b2\u0001"+ + "\u0000\u0000\u0000\u02b1\u0294\u0001\u0000\u0000\u0000\u02b1\u029d\u0001"+ + "\u0000\u0000\u0000\u02b2T\u0001\u0000\u0000\u0000\u02b3\u02b5\u0003?\u0018"+ + "\u0000\u02b4\u02b3\u0001\u0000\u0000\u0000\u02b5\u02b6\u0001\u0000\u0000"+ + "\u0000\u02b6\u02b4\u0001\u0000\u0000\u0000\u02b6\u02b7\u0001\u0000\u0000"+ + "\u0000\u02b7V\u0001\u0000\u0000\u0000\u02b8\u02ba\u0003?\u0018\u0000\u02b9"+ + "\u02b8\u0001\u0000\u0000\u0000\u02ba\u02bb\u0001\u0000\u0000\u0000\u02bb"+ + "\u02b9\u0001\u0000\u0000\u0000\u02bb\u02bc\u0001\u0000\u0000\u0000\u02bc"+ + "\u02bd\u0001\u0000\u0000\u0000\u02bd\u02c1\u0003g,\u0000\u02be\u02c0\u0003"+ + "?\u0018\u0000\u02bf\u02be\u0001\u0000\u0000\u0000\u02c0\u02c3\u0001\u0000"+ + "\u0000\u0000\u02c1\u02bf\u0001\u0000\u0000\u0000\u02c1\u02c2\u0001\u0000"+ + "\u0000\u0000\u02c2\u02e3\u0001\u0000\u0000\u0000\u02c3\u02c1\u0001\u0000"+ + "\u0000\u0000\u02c4\u02c6\u0003g,\u0000\u02c5\u02c7\u0003?\u0018\u0000"+ + "\u02c6\u02c5\u0001\u0000\u0000\u0000\u02c7\u02c8\u0001\u0000\u0000\u0000"+ + "\u02c8\u02c6\u0001\u0000\u0000\u0000\u02c8\u02c9\u0001\u0000\u0000\u0000"+ + "\u02c9\u02e3\u0001\u0000\u0000\u0000\u02ca\u02cc\u0003?\u0018\u0000\u02cb"+ + "\u02ca\u0001\u0000\u0000\u0000\u02cc\u02cd\u0001\u0000\u0000\u0000\u02cd"+ + "\u02cb\u0001\u0000\u0000\u0000\u02cd\u02ce\u0001\u0000\u0000\u0000\u02ce"+ + "\u02d6\u0001\u0000\u0000\u0000\u02cf\u02d3\u0003g,\u0000\u02d0\u02d2\u0003"+ + "?\u0018\u0000\u02d1\u02d0\u0001\u0000\u0000\u0000\u02d2\u02d5\u0001\u0000"+ + "\u0000\u0000\u02d3\u02d1\u0001\u0000\u0000\u0000\u02d3\u02d4\u0001\u0000"+ + "\u0000\u0000\u02d4\u02d7\u0001\u0000\u0000\u0000\u02d5\u02d3\u0001\u0000"+ + "\u0000\u0000\u02d6\u02cf\u0001\u0000\u0000\u0000\u02d6\u02d7\u0001\u0000"+ + "\u0000\u0000\u02d7\u02d8\u0001\u0000\u0000\u0000\u02d8\u02d9\u0003G\u001c"+ + "\u0000\u02d9\u02e3\u0001\u0000\u0000\u0000\u02da\u02dc\u0003g,\u0000\u02db"+ + "\u02dd\u0003?\u0018\u0000\u02dc\u02db\u0001\u0000\u0000\u0000\u02dd\u02de"+ + "\u0001\u0000\u0000\u0000\u02de\u02dc\u0001\u0000\u0000\u0000\u02de\u02df"+ + "\u0001\u0000\u0000\u0000\u02df\u02e0\u0001\u0000\u0000\u0000\u02e0\u02e1"+ + "\u0003G\u001c\u0000\u02e1\u02e3\u0001\u0000\u0000\u0000\u02e2\u02b9\u0001"+ + "\u0000\u0000\u0000\u02e2\u02c4\u0001\u0000\u0000\u0000\u02e2\u02cb\u0001"+ + "\u0000\u0000\u0000\u02e2\u02da\u0001\u0000\u0000\u0000\u02e3X\u0001\u0000"+ + "\u0000\u0000\u02e4\u02e5\u0007\u001e\u0000\u0000\u02e5\u02e6\u0007\u001f"+ + "\u0000\u0000\u02e6Z\u0001\u0000\u0000\u0000\u02e7\u02e8\u0007\f\u0000"+ + "\u0000\u02e8\u02e9\u0007\t\u0000\u0000\u02e9\u02ea\u0007\u0000\u0000\u0000"+ + "\u02ea\\\u0001\u0000\u0000\u0000\u02eb\u02ec\u0007\f\u0000\u0000\u02ec"+ + "\u02ed\u0007\u0002\u0000\u0000\u02ed\u02ee\u0007\u0004\u0000\u0000\u02ee"+ + "^\u0001\u0000\u0000\u0000\u02ef\u02f0\u0005=\u0000\u0000\u02f0`\u0001"+ + "\u0000\u0000\u0000\u02f1\u02f2\u0005:\u0000\u0000\u02f2\u02f3\u0005:\u0000"+ + "\u0000\u02f3b\u0001\u0000\u0000\u0000\u02f4\u02f5\u0005,\u0000\u0000\u02f5"+ + "d\u0001\u0000\u0000\u0000\u02f6\u02f7\u0007\u0000\u0000\u0000\u02f7\u02f8"+ + "\u0007\u0003\u0000\u0000\u02f8\u02f9\u0007\u0002\u0000\u0000\u02f9\u02fa"+ + "\u0007\u0004\u0000\u0000\u02faf\u0001\u0000\u0000\u0000\u02fb\u02fc\u0005"+ + ".\u0000\u0000\u02fch\u0001\u0000\u0000\u0000\u02fd\u02fe\u0007\u000f\u0000"+ + "\u0000\u02fe\u02ff\u0007\f\u0000\u0000\u02ff\u0300\u0007\r\u0000\u0000"+ + "\u0300\u0301\u0007\u0002\u0000\u0000\u0301\u0302\u0007\u0003\u0000\u0000"+ + "\u0302j\u0001\u0000\u0000\u0000\u0303\u0304\u0007\u000f\u0000\u0000\u0304"+ + "\u0305\u0007\u0001\u0000\u0000\u0305\u0306\u0007\u0006\u0000\u0000\u0306"+ + "\u0307\u0007\u0002\u0000\u0000\u0307\u0308\u0007\u0005\u0000\u0000\u0308"+ + "l\u0001\u0000\u0000\u0000\u0309\u030a\u0007\u0001\u0000\u0000\u030a\u030b"+ + "\u0007\t\u0000\u0000\u030bn\u0001\u0000\u0000\u0000\u030c\u030d\u0007"+ + "\u0001\u0000\u0000\u030d\u030e\u0007\u0002\u0000\u0000\u030ep\u0001\u0000"+ + "\u0000\u0000\u030f\u0310\u0007\r\u0000\u0000\u0310\u0311\u0007\f\u0000"+ + "\u0000\u0311\u0312\u0007\u0002\u0000\u0000\u0312\u0313\u0007\u0005\u0000"+ + "\u0000\u0313r\u0001\u0000\u0000\u0000\u0314\u0315\u0007\r\u0000\u0000"+ + "\u0315\u0316\u0007\u0001\u0000\u0000\u0316\u0317\u0007\u0012\u0000\u0000"+ + "\u0317\u0318\u0007\u0003\u0000\u0000\u0318t\u0001\u0000\u0000\u0000\u0319"+ + "\u031a\u0005(\u0000\u0000\u031av\u0001\u0000\u0000\u0000\u031b\u031c\u0007"+ + "\t\u0000\u0000\u031c\u031d\u0007\u0007\u0000\u0000\u031d\u031e\u0007\u0005"+ + "\u0000\u0000\u031ex\u0001\u0000\u0000\u0000\u031f\u0320\u0007\t\u0000"+ + "\u0000\u0320\u0321\u0007\u0014\u0000\u0000\u0321\u0322\u0007\r\u0000\u0000"+ + "\u0322\u0323\u0007\r\u0000\u0000\u0323z\u0001\u0000\u0000\u0000\u0324"+ + "\u0325\u0007\t\u0000\u0000\u0325\u0326\u0007\u0014\u0000\u0000\u0326\u0327"+ + "\u0007\r\u0000\u0000\u0327\u0328\u0007\r\u0000\u0000\u0328\u0329\u0007"+ + "\u0002\u0000\u0000\u0329|\u0001\u0000\u0000\u0000\u032a\u032b\u0007\u0007"+ + "\u0000\u0000\u032b\u032c\u0007\u0006\u0000\u0000\u032c~\u0001\u0000\u0000"+ + "\u0000\u032d\u032e\u0005?\u0000\u0000\u032e\u0080\u0001\u0000\u0000\u0000"+ + "\u032f\u0330\u0007\u0006\u0000\u0000\u0330\u0331\u0007\r\u0000\u0000\u0331"+ + "\u0332\u0007\u0001\u0000\u0000\u0332\u0333\u0007\u0012\u0000\u0000\u0333"+ + "\u0334\u0007\u0003\u0000\u0000\u0334\u0082\u0001\u0000\u0000\u0000\u0335"+ + "\u0336\u0005)\u0000\u0000\u0336\u0084\u0001\u0000\u0000\u0000\u0337\u0338"+ + "\u0007\u0005\u0000\u0000\u0338\u0339\u0007\u0006\u0000\u0000\u0339\u033a"+ + "\u0007\u0014\u0000\u0000\u033a\u033b\u0007\u0003\u0000\u0000\u033b\u0086"+ + "\u0001\u0000\u0000\u0000\u033c\u033d\u0005=\u0000\u0000\u033d\u033e\u0005"+ + "=\u0000\u0000\u033e\u0088\u0001\u0000\u0000\u0000\u033f\u0340\u0005=\u0000"+ + "\u0000\u0340\u0341\u0005~\u0000\u0000\u0341\u008a\u0001\u0000\u0000\u0000"+ + "\u0342\u0343\u0005!\u0000\u0000\u0343\u0344\u0005=\u0000\u0000\u0344\u008c"+ + "\u0001\u0000\u0000\u0000\u0345\u0346\u0005<\u0000\u0000\u0346\u008e\u0001"+ + "\u0000\u0000\u0000\u0347\u0348\u0005<\u0000\u0000\u0348\u0349\u0005=\u0000"+ + "\u0000\u0349\u0090\u0001\u0000\u0000\u0000\u034a\u034b\u0005>\u0000\u0000"+ + "\u034b\u0092\u0001\u0000\u0000\u0000\u034c\u034d\u0005>\u0000\u0000\u034d"+ + "\u034e\u0005=\u0000\u0000\u034e\u0094\u0001\u0000\u0000\u0000\u034f\u0350"+ + "\u0005+\u0000\u0000\u0350\u0096\u0001\u0000\u0000\u0000\u0351\u0352\u0005"+ + "-\u0000\u0000\u0352\u0098\u0001\u0000\u0000\u0000\u0353\u0354\u0005*\u0000"+ + "\u0000\u0354\u009a\u0001\u0000\u0000\u0000\u0355\u0356\u0005/\u0000\u0000"+ + "\u0356\u009c\u0001\u0000\u0000\u0000\u0357\u0358\u0005%\u0000\u0000\u0358"+ + "\u009e\u0001\u0000\u0000\u0000\u0359\u035a\u0007\u0010\u0000\u0000\u035a"+ + "\u035b\u0007\f\u0000\u0000\u035b\u035c\u0007\u0005\u0000\u0000\u035c\u035d"+ + "\u0007\u0004\u0000\u0000\u035d\u035e\u0007\n\u0000\u0000\u035e\u00a0\u0001"+ + "\u0000\u0000\u0000\u035f\u0360\u0003-\u000f\u0000\u0360\u0361\u0001\u0000"+ + "\u0000\u0000\u0361\u0362\u0006I\f\u0000\u0362\u00a2\u0001\u0000\u0000"+ + "\u0000\u0363\u0366\u0003\u007f8\u0000\u0364\u0367\u0003A\u0019\u0000\u0365"+ + "\u0367\u0003O \u0000\u0366\u0364\u0001\u0000\u0000\u0000\u0366\u0365\u0001"+ + "\u0000\u0000\u0000\u0367\u036b\u0001\u0000\u0000\u0000\u0368\u036a\u0003"+ + "Q!\u0000\u0369\u0368\u0001\u0000\u0000\u0000\u036a\u036d\u0001\u0000\u0000"+ + "\u0000\u036b\u0369\u0001\u0000\u0000\u0000\u036b\u036c\u0001\u0000\u0000"+ + "\u0000\u036c\u0375\u0001\u0000\u0000\u0000\u036d\u036b\u0001\u0000\u0000"+ + "\u0000\u036e\u0370\u0003\u007f8\u0000\u036f\u0371\u0003?\u0018\u0000\u0370"+ + "\u036f\u0001\u0000\u0000\u0000\u0371\u0372\u0001\u0000\u0000\u0000\u0372"+ + "\u0370\u0001\u0000\u0000\u0000\u0372\u0373\u0001\u0000\u0000\u0000\u0373"+ + "\u0375\u0001\u0000\u0000\u0000\u0374\u0363\u0001\u0000\u0000\u0000\u0374"+ + "\u036e\u0001\u0000\u0000\u0000\u0375\u00a4\u0001\u0000\u0000\u0000\u0376"+ + "\u0377\u0005[\u0000\u0000\u0377\u0378\u0001\u0000\u0000\u0000\u0378\u0379"+ + "\u0006K\u0000\u0000\u0379\u037a\u0006K\u0000\u0000\u037a\u00a6\u0001\u0000"+ + "\u0000\u0000\u037b\u037c\u0005]\u0000\u0000\u037c\u037d\u0001\u0000\u0000"+ + "\u0000\u037d\u037e\u0006L\u000b\u0000\u037e\u037f\u0006L\u000b\u0000\u037f"+ + "\u00a8\u0001\u0000\u0000\u0000\u0380\u0384\u0003A\u0019\u0000\u0381\u0383"+ + "\u0003Q!\u0000\u0382\u0381\u0001\u0000\u0000\u0000\u0383\u0386\u0001\u0000"+ + "\u0000\u0000\u0384\u0382\u0001\u0000\u0000\u0000\u0384\u0385\u0001\u0000"+ + "\u0000\u0000\u0385\u0391\u0001\u0000\u0000\u0000\u0386\u0384\u0001\u0000"+ + "\u0000\u0000\u0387\u038a\u0003O \u0000\u0388\u038a\u0003I\u001d\u0000"+ + "\u0389\u0387\u0001\u0000\u0000\u0000\u0389\u0388\u0001\u0000\u0000\u0000"+ + "\u038a\u038c\u0001\u0000\u0000\u0000\u038b\u038d\u0003Q!\u0000\u038c\u038b"+ + "\u0001\u0000\u0000\u0000\u038d\u038e\u0001\u0000\u0000\u0000\u038e\u038c"+ + "\u0001\u0000\u0000\u0000\u038e\u038f\u0001\u0000\u0000\u0000\u038f\u0391"+ + "\u0001\u0000\u0000\u0000\u0390\u0380\u0001\u0000\u0000\u0000\u0390\u0389"+ + "\u0001\u0000\u0000\u0000\u0391\u00aa\u0001\u0000\u0000\u0000\u0392\u0394"+ + "\u0003K\u001e\u0000\u0393\u0395\u0003M\u001f\u0000\u0394\u0393\u0001\u0000"+ + "\u0000\u0000\u0395\u0396\u0001\u0000\u0000\u0000\u0396\u0394\u0001\u0000"+ + "\u0000\u0000\u0396\u0397\u0001\u0000\u0000\u0000\u0397\u0398\u0001\u0000"+ + "\u0000\u0000\u0398\u0399\u0003K\u001e\u0000\u0399\u00ac\u0001\u0000\u0000"+ + "\u0000\u039a\u039b\u0003\u00abN\u0000\u039b\u00ae\u0001\u0000\u0000\u0000"+ + "\u039c\u039d\u00037\u0014\u0000\u039d\u039e\u0001\u0000\u0000\u0000\u039e"+ + "\u039f\u0006P\n\u0000\u039f\u00b0\u0001\u0000\u0000\u0000\u03a0\u03a1"+ + "\u00039\u0015\u0000\u03a1\u03a2\u0001\u0000\u0000\u0000\u03a2\u03a3\u0006"+ + "Q\n\u0000\u03a3\u00b2\u0001\u0000\u0000\u0000\u03a4\u03a5\u0003;\u0016"+ + "\u0000\u03a5\u03a6\u0001\u0000\u0000\u0000\u03a6\u03a7\u0006R\n\u0000"+ + "\u03a7\u00b4\u0001\u0000\u0000\u0000\u03a8\u03a9\u0003\u00a5K\u0000\u03a9"+ + "\u03aa\u0001\u0000\u0000\u0000\u03aa\u03ab\u0006S\r\u0000\u03ab\u03ac"+ + "\u0006S\u000e\u0000\u03ac\u00b6\u0001\u0000\u0000\u0000\u03ad\u03ae\u0003"+ + "=\u0017\u0000\u03ae\u03af\u0001\u0000\u0000\u0000\u03af\u03b0\u0006T\u000f"+ + "\u0000\u03b0\u03b1\u0006T\u000b\u0000\u03b1\u00b8\u0001\u0000\u0000\u0000"+ + "\u03b2\u03b3\u0003;\u0016\u0000\u03b3\u03b4\u0001\u0000\u0000\u0000\u03b4"+ + "\u03b5\u0006U\n\u0000\u03b5\u00ba\u0001\u0000\u0000\u0000\u03b6\u03b7"+ + "\u00037\u0014\u0000\u03b7\u03b8\u0001\u0000\u0000\u0000\u03b8\u03b9\u0006"+ + "V\n\u0000\u03b9\u00bc\u0001\u0000\u0000\u0000\u03ba\u03bb\u00039\u0015"+ + "\u0000\u03bb\u03bc\u0001\u0000\u0000\u0000\u03bc\u03bd\u0006W\n\u0000"+ + "\u03bd\u00be\u0001\u0000\u0000\u0000\u03be\u03bf\u0003=\u0017\u0000\u03bf"+ + "\u03c0\u0001\u0000\u0000\u0000\u03c0\u03c1\u0006X\u000f\u0000\u03c1\u03c2"+ + "\u0006X\u000b\u0000\u03c2\u00c0\u0001\u0000\u0000\u0000\u03c3\u03c4\u0003"+ + "\u00a5K\u0000\u03c4\u03c5\u0001\u0000\u0000\u0000\u03c5\u03c6\u0006Y\r"+ + "\u0000\u03c6\u00c2\u0001\u0000\u0000\u0000\u03c7\u03c8\u0003\u00a7L\u0000"+ + "\u03c8\u03c9\u0001\u0000\u0000\u0000\u03c9\u03ca\u0006Z\u0010\u0000\u03ca"+ + "\u00c4\u0001\u0000\u0000\u0000\u03cb\u03cc\u0003\u0151\u00a1\u0000\u03cc"+ + "\u03cd\u0001\u0000\u0000\u0000\u03cd\u03ce\u0006[\u0011\u0000\u03ce\u00c6"+ + "\u0001\u0000\u0000\u0000\u03cf\u03d0\u0003c*\u0000\u03d0\u03d1\u0001\u0000"+ + "\u0000\u0000\u03d1\u03d2\u0006\\\u0012\u0000\u03d2\u00c8\u0001\u0000\u0000"+ + "\u0000\u03d3\u03d4\u0003_(\u0000\u03d4\u03d5\u0001\u0000\u0000\u0000\u03d5"+ + "\u03d6\u0006]\u0013\u0000\u03d6\u00ca\u0001\u0000\u0000\u0000\u03d7\u03d8"+ + "\u0007\u0010\u0000\u0000\u03d8\u03d9\u0007\u0003\u0000\u0000\u03d9\u03da"+ + "\u0007\u0005\u0000\u0000\u03da\u03db\u0007\f\u0000\u0000\u03db\u03dc\u0007"+ + "\u0000\u0000\u0000\u03dc\u03dd\u0007\f\u0000\u0000\u03dd\u03de\u0007\u0005"+ + "\u0000\u0000\u03de\u03df\u0007\f\u0000\u0000\u03df\u00cc\u0001\u0000\u0000"+ + "\u0000\u03e0\u03e4\b \u0000\u0000\u03e1\u03e2\u0005/\u0000\u0000\u03e2"+ + "\u03e4\b!\u0000\u0000\u03e3\u03e0\u0001\u0000\u0000\u0000\u03e3\u03e1"+ + "\u0001\u0000\u0000\u0000\u03e4\u00ce\u0001\u0000\u0000\u0000\u03e5\u03e7"+ + "\u0003\u00cd_\u0000\u03e6\u03e5\u0001\u0000\u0000\u0000\u03e7\u03e8\u0001"+ + "\u0000\u0000\u0000\u03e8\u03e6\u0001\u0000\u0000\u0000\u03e8\u03e9\u0001"+ + "\u0000\u0000\u0000\u03e9\u00d0\u0001\u0000\u0000\u0000\u03ea\u03eb\u0003"+ + "\u00cf`\u0000\u03eb\u03ec\u0001\u0000\u0000\u0000\u03ec\u03ed\u0006a\u0014"+ + "\u0000\u03ed\u00d2\u0001\u0000\u0000\u0000\u03ee\u03ef\u0003S\"\u0000"+ + "\u03ef\u03f0\u0001\u0000\u0000\u0000\u03f0\u03f1\u0006b\u0015\u0000\u03f1"+ + "\u00d4\u0001\u0000\u0000\u0000\u03f2\u03f3\u00037\u0014\u0000\u03f3\u03f4"+ + "\u0001\u0000\u0000\u0000\u03f4\u03f5\u0006c\n\u0000\u03f5\u00d6\u0001"+ + "\u0000\u0000\u0000\u03f6\u03f7\u00039\u0015\u0000\u03f7\u03f8\u0001\u0000"+ + "\u0000\u0000\u03f8\u03f9\u0006d\n\u0000\u03f9\u00d8\u0001\u0000\u0000"+ + "\u0000\u03fa\u03fb\u0003;\u0016\u0000\u03fb\u03fc\u0001\u0000\u0000\u0000"+ + "\u03fc\u03fd\u0006e\n\u0000\u03fd\u00da\u0001\u0000\u0000\u0000\u03fe"+ + "\u03ff\u0003=\u0017\u0000\u03ff\u0400\u0001\u0000\u0000\u0000\u0400\u0401"+ + "\u0006f\u000f\u0000\u0401\u0402\u0006f\u000b\u0000\u0402\u00dc\u0001\u0000"+ + "\u0000\u0000\u0403\u0404\u0003g,\u0000\u0404\u0405\u0001\u0000\u0000\u0000"+ + "\u0405\u0406\u0006g\u0016\u0000\u0406\u00de\u0001\u0000\u0000\u0000\u0407"+ + "\u0408\u0003c*\u0000\u0408\u0409\u0001\u0000\u0000\u0000\u0409\u040a\u0006"+ + "h\u0012\u0000\u040a\u00e0\u0001\u0000\u0000\u0000\u040b\u040c\u0003\u007f"+ + "8\u0000\u040c\u040d\u0001\u0000\u0000\u0000\u040d\u040e\u0006i\u0017\u0000"+ + "\u040e\u00e2\u0001\u0000\u0000\u0000\u040f\u0410\u0003\u00a3J\u0000\u0410"+ + "\u0411\u0001\u0000\u0000\u0000\u0411\u0412\u0006j\u0018\u0000\u0412\u00e4"+ + "\u0001\u0000\u0000\u0000\u0413\u0418\u0003A\u0019\u0000\u0414\u0418\u0003"+ + "?\u0018\u0000\u0415\u0418\u0003O \u0000\u0416\u0418\u0003\u0099E\u0000"+ + "\u0417\u0413\u0001\u0000\u0000\u0000\u0417\u0414\u0001\u0000\u0000\u0000"+ + "\u0417\u0415\u0001\u0000\u0000\u0000\u0417\u0416\u0001\u0000\u0000\u0000"+ + "\u0418\u00e6\u0001\u0000\u0000\u0000\u0419\u041c\u0003A\u0019\u0000\u041a"+ + "\u041c\u0003\u0099E\u0000\u041b\u0419\u0001\u0000\u0000\u0000\u041b\u041a"+ + "\u0001\u0000\u0000\u0000\u041c\u0420\u0001\u0000\u0000\u0000\u041d\u041f"+ + "\u0003\u00e5k\u0000\u041e\u041d\u0001\u0000\u0000\u0000\u041f\u0422\u0001"+ + "\u0000\u0000\u0000\u0420\u041e\u0001\u0000\u0000\u0000\u0420\u0421\u0001"+ + "\u0000\u0000\u0000\u0421\u042d\u0001\u0000\u0000\u0000\u0422\u0420\u0001"+ + "\u0000\u0000\u0000\u0423\u0426\u0003O \u0000\u0424\u0426\u0003I\u001d"+ + "\u0000\u0425\u0423\u0001\u0000\u0000\u0000\u0425\u0424\u0001\u0000\u0000"+ + "\u0000\u0426\u0428\u0001\u0000\u0000\u0000\u0427\u0429\u0003\u00e5k\u0000"+ + "\u0428\u0427\u0001\u0000\u0000\u0000\u0429\u042a\u0001\u0000\u0000\u0000"+ + "\u042a\u0428\u0001\u0000\u0000\u0000\u042a\u042b\u0001\u0000\u0000\u0000"+ + "\u042b\u042d\u0001\u0000\u0000\u0000\u042c\u041b\u0001\u0000\u0000\u0000"+ + "\u042c\u0425\u0001\u0000\u0000\u0000\u042d\u00e8\u0001\u0000\u0000\u0000"+ + "\u042e\u0431\u0003\u00e7l\u0000\u042f\u0431\u0003\u00abN\u0000\u0430\u042e"+ + "\u0001\u0000\u0000\u0000\u0430\u042f\u0001\u0000\u0000\u0000\u0431\u0432"+ + "\u0001\u0000\u0000\u0000\u0432\u0430\u0001\u0000\u0000\u0000\u0432\u0433"+ + "\u0001\u0000\u0000\u0000\u0433\u00ea\u0001\u0000\u0000\u0000\u0434\u0435"+ + "\u00037\u0014\u0000\u0435\u0436\u0001\u0000\u0000\u0000\u0436\u0437\u0006"+ + "n\n\u0000\u0437\u00ec\u0001\u0000\u0000\u0000\u0438\u0439\u00039\u0015"+ + "\u0000\u0439\u043a\u0001\u0000\u0000\u0000\u043a\u043b\u0006o\n\u0000"+ + "\u043b\u00ee\u0001\u0000\u0000\u0000\u043c\u043d\u0003;\u0016\u0000\u043d"+ + "\u043e\u0001\u0000\u0000\u0000\u043e\u043f\u0006p\n\u0000\u043f\u00f0"+ + "\u0001\u0000\u0000\u0000\u0440\u0441\u0003=\u0017\u0000\u0441\u0442\u0001"+ + "\u0000\u0000\u0000\u0442\u0443\u0006q\u000f\u0000\u0443\u0444\u0006q\u000b"+ + "\u0000\u0444\u00f2\u0001\u0000\u0000\u0000\u0445\u0446\u0003_(\u0000\u0446"+ + "\u0447\u0001\u0000\u0000\u0000\u0447\u0448\u0006r\u0013\u0000\u0448\u00f4"+ + "\u0001\u0000\u0000\u0000\u0449\u044a\u0003c*\u0000\u044a\u044b\u0001\u0000"+ + "\u0000\u0000\u044b\u044c\u0006s\u0012\u0000\u044c\u00f6\u0001\u0000\u0000"+ + "\u0000\u044d\u044e\u0003g,\u0000\u044e\u044f\u0001\u0000\u0000\u0000\u044f"+ + "\u0450\u0006t\u0016\u0000\u0450\u00f8\u0001\u0000\u0000\u0000\u0451\u0452"+ + "\u0003\u007f8\u0000\u0452\u0453\u0001\u0000\u0000\u0000\u0453\u0454\u0006"+ + "u\u0017\u0000\u0454\u00fa\u0001\u0000\u0000\u0000\u0455\u0456\u0003\u00a3"+ + "J\u0000\u0456\u0457\u0001\u0000\u0000\u0000\u0457\u0458\u0006v\u0018\u0000"+ + "\u0458\u00fc\u0001\u0000\u0000\u0000\u0459\u045a\u0007\f\u0000\u0000\u045a"+ + "\u045b\u0007\u0002\u0000\u0000\u045b\u00fe\u0001\u0000\u0000\u0000\u045c"+ + "\u045d\u0003\u00e9m\u0000\u045d\u045e\u0001\u0000\u0000\u0000\u045e\u045f"+ + "\u0006x\u0019\u0000\u045f\u0100\u0001\u0000\u0000\u0000\u0460\u0461\u0003"+ + "7\u0014\u0000\u0461\u0462\u0001\u0000\u0000\u0000\u0462\u0463\u0006y\n"+ + "\u0000\u0463\u0102\u0001\u0000\u0000\u0000\u0464\u0465\u00039\u0015\u0000"+ + "\u0465\u0466\u0001\u0000\u0000\u0000\u0466\u0467\u0006z\n\u0000\u0467"+ + "\u0104\u0001\u0000\u0000\u0000\u0468\u0469\u0003;\u0016\u0000\u0469\u046a"+ + "\u0001\u0000\u0000\u0000\u046a\u046b\u0006{\n\u0000\u046b\u0106\u0001"+ + "\u0000\u0000\u0000\u046c\u046d\u0003=\u0017\u0000\u046d\u046e\u0001\u0000"+ + "\u0000\u0000\u046e\u046f\u0006|\u000f\u0000\u046f\u0470\u0006|\u000b\u0000"+ + "\u0470\u0108\u0001\u0000\u0000\u0000\u0471\u0472\u0003\u00a5K\u0000\u0472"+ + "\u0473\u0001\u0000\u0000\u0000\u0473\u0474\u0006}\r\u0000\u0474\u0475"+ + "\u0006}\u001a\u0000\u0475\u010a\u0001\u0000\u0000\u0000\u0476\u0477\u0007"+ + "\u0007\u0000\u0000\u0477\u0478\u0007\t\u0000\u0000\u0478\u0479\u0001\u0000"+ + "\u0000\u0000\u0479\u047a\u0006~\u001b\u0000\u047a\u010c\u0001\u0000\u0000"+ + "\u0000\u047b\u047c\u0007\u0013\u0000\u0000\u047c\u047d\u0007\u0001\u0000"+ + "\u0000\u047d\u047e\u0007\u0005\u0000\u0000\u047e\u047f\u0007\n\u0000\u0000"+ + "\u047f\u0480\u0001\u0000\u0000\u0000\u0480\u0481\u0006\u007f\u001b\u0000"+ + "\u0481\u010e\u0001\u0000\u0000\u0000\u0482\u0483\b\"\u0000\u0000\u0483"+ + "\u0110\u0001\u0000\u0000\u0000\u0484\u0486\u0003\u010f\u0080\u0000\u0485"+ + "\u0484\u0001\u0000\u0000\u0000\u0486\u0487\u0001\u0000\u0000\u0000\u0487"+ + "\u0485\u0001\u0000\u0000\u0000\u0487\u0488\u0001\u0000\u0000\u0000\u0488"+ + "\u0489\u0001\u0000\u0000\u0000\u0489\u048a\u0003\u0151\u00a1\u0000\u048a"+ + "\u048c\u0001\u0000\u0000\u0000\u048b\u0485\u0001\u0000\u0000\u0000\u048b"+ + "\u048c\u0001\u0000\u0000\u0000\u048c\u048e\u0001\u0000\u0000\u0000\u048d"+ + "\u048f\u0003\u010f\u0080\u0000\u048e\u048d\u0001\u0000\u0000\u0000\u048f"+ + "\u0490\u0001\u0000\u0000\u0000\u0490\u048e\u0001\u0000\u0000\u0000\u0490"+ + "\u0491\u0001\u0000\u0000\u0000\u0491\u0112\u0001\u0000\u0000\u0000\u0492"+ + "\u0493\u0003\u0111\u0081\u0000\u0493\u0494\u0001\u0000\u0000\u0000\u0494"+ + "\u0495\u0006\u0082\u001c\u0000\u0495\u0114\u0001\u0000\u0000\u0000\u0496"+ + "\u0497\u00037\u0014\u0000\u0497\u0498\u0001\u0000\u0000\u0000\u0498\u0499"+ + "\u0006\u0083\n\u0000\u0499\u0116\u0001\u0000\u0000\u0000\u049a\u049b\u0003"+ + "9\u0015\u0000\u049b\u049c\u0001\u0000\u0000\u0000\u049c\u049d\u0006\u0084"+ + "\n\u0000\u049d\u0118\u0001\u0000\u0000\u0000\u049e\u049f\u0003;\u0016"+ + "\u0000\u049f\u04a0\u0001\u0000\u0000\u0000\u04a0\u04a1\u0006\u0085\n\u0000"+ + "\u04a1\u011a\u0001\u0000\u0000\u0000\u04a2\u04a3\u0003=\u0017\u0000\u04a3"+ + "\u04a4\u0001\u0000\u0000\u0000\u04a4\u04a5\u0006\u0086\u000f\u0000\u04a5"+ + "\u04a6\u0006\u0086\u000b\u0000\u04a6\u04a7\u0006\u0086\u000b\u0000\u04a7"+ + "\u011c\u0001\u0000\u0000\u0000\u04a8\u04a9\u0003_(\u0000\u04a9\u04aa\u0001"+ + "\u0000\u0000\u0000\u04aa\u04ab\u0006\u0087\u0013\u0000\u04ab\u011e\u0001"+ + "\u0000\u0000\u0000\u04ac\u04ad\u0003c*\u0000\u04ad\u04ae\u0001\u0000\u0000"+ + "\u0000\u04ae\u04af\u0006\u0088\u0012\u0000\u04af\u0120\u0001\u0000\u0000"+ + "\u0000\u04b0\u04b1\u0003g,\u0000\u04b1\u04b2\u0001\u0000\u0000\u0000\u04b2"+ + "\u04b3\u0006\u0089\u0016\u0000\u04b3\u0122\u0001\u0000\u0000\u0000\u04b4"+ + "\u04b5\u0003\u010d\u007f\u0000\u04b5\u04b6\u0001\u0000\u0000\u0000\u04b6"+ + "\u04b7\u0006\u008a\u001d\u0000\u04b7\u0124\u0001\u0000\u0000\u0000\u04b8"+ + "\u04b9\u0003\u00e9m\u0000\u04b9\u04ba\u0001\u0000\u0000\u0000\u04ba\u04bb"+ + "\u0006\u008b\u0019\u0000\u04bb\u0126\u0001\u0000\u0000\u0000\u04bc\u04bd"+ + "\u0003\u00adO\u0000\u04bd\u04be\u0001\u0000\u0000\u0000\u04be\u04bf\u0006"+ + "\u008c\u001e\u0000\u04bf\u0128\u0001\u0000\u0000\u0000\u04c0\u04c1\u0003"+ + "\u007f8\u0000\u04c1\u04c2\u0001\u0000\u0000\u0000\u04c2\u04c3\u0006\u008d"+ + "\u0017\u0000\u04c3\u012a\u0001\u0000\u0000\u0000\u04c4\u04c5\u0003\u00a3"+ + "J\u0000\u04c5\u04c6\u0001\u0000\u0000\u0000\u04c6\u04c7\u0006\u008e\u0018"+ + "\u0000\u04c7\u012c\u0001\u0000\u0000\u0000\u04c8\u04c9\u00037\u0014\u0000"+ + "\u04c9\u04ca\u0001\u0000\u0000\u0000\u04ca\u04cb\u0006\u008f\n\u0000\u04cb"+ + "\u012e\u0001\u0000\u0000\u0000\u04cc\u04cd\u00039\u0015\u0000\u04cd\u04ce"+ + "\u0001\u0000\u0000\u0000\u04ce\u04cf\u0006\u0090\n\u0000\u04cf\u0130\u0001"+ + "\u0000\u0000\u0000\u04d0\u04d1\u0003;\u0016\u0000\u04d1\u04d2\u0001\u0000"+ + "\u0000\u0000\u04d2\u04d3\u0006\u0091\n\u0000\u04d3\u0132\u0001\u0000\u0000"+ + "\u0000\u04d4\u04d5\u0003=\u0017\u0000\u04d5\u04d6\u0001\u0000\u0000\u0000"+ + "\u04d6\u04d7\u0006\u0092\u000f\u0000\u04d7\u04d8\u0006\u0092\u000b\u0000"+ + "\u04d8\u0134\u0001\u0000\u0000\u0000\u04d9\u04da\u0003g,\u0000\u04da\u04db"+ + "\u0001\u0000\u0000\u0000\u04db\u04dc\u0006\u0093\u0016\u0000\u04dc\u0136"+ + "\u0001\u0000\u0000\u0000\u04dd\u04de\u0003\u007f8\u0000\u04de\u04df\u0001"+ + "\u0000\u0000\u0000\u04df\u04e0\u0006\u0094\u0017\u0000\u04e0\u0138\u0001"+ + "\u0000\u0000\u0000\u04e1\u04e2\u0003\u00a3J\u0000\u04e2\u04e3\u0001\u0000"+ + "\u0000\u0000\u04e3\u04e4\u0006\u0095\u0018\u0000\u04e4\u013a\u0001\u0000"+ + "\u0000\u0000\u04e5\u04e6\u0003\u00adO\u0000\u04e6\u04e7\u0001\u0000\u0000"+ + "\u0000\u04e7\u04e8\u0006\u0096\u001e\u0000\u04e8\u013c\u0001\u0000\u0000"+ + "\u0000\u04e9\u04ea\u0003\u00a9M\u0000\u04ea\u04eb\u0001\u0000\u0000\u0000"+ + "\u04eb\u04ec\u0006\u0097\u001f\u0000\u04ec\u013e\u0001\u0000\u0000\u0000"+ + "\u04ed\u04ee\u00037\u0014\u0000\u04ee\u04ef\u0001\u0000\u0000\u0000\u04ef"+ + "\u04f0\u0006\u0098\n\u0000\u04f0\u0140\u0001\u0000\u0000\u0000\u04f1\u04f2"+ + "\u00039\u0015\u0000\u04f2\u04f3\u0001\u0000\u0000\u0000\u04f3\u04f4\u0006"+ + "\u0099\n\u0000\u04f4\u0142\u0001\u0000\u0000\u0000\u04f5\u04f6\u0003;"+ + "\u0016\u0000\u04f6\u04f7\u0001\u0000\u0000\u0000\u04f7\u04f8\u0006\u009a"+ + "\n\u0000\u04f8\u0144\u0001\u0000\u0000\u0000\u04f9\u04fa\u0003=\u0017"+ + "\u0000\u04fa\u04fb\u0001\u0000\u0000\u0000\u04fb\u04fc\u0006\u009b\u000f"+ + "\u0000\u04fc\u04fd\u0006\u009b\u000b\u0000\u04fd\u0146\u0001\u0000\u0000"+ + "\u0000\u04fe\u04ff\u0007\u0001\u0000\u0000\u04ff\u0500\u0007\t\u0000\u0000"+ + "\u0500\u0501\u0007\u000f\u0000\u0000\u0501\u0502\u0007\u0007\u0000\u0000"+ + "\u0502\u0148\u0001\u0000\u0000\u0000\u0503\u0504\u00037\u0014\u0000\u0504"+ + "\u0505\u0001\u0000\u0000\u0000\u0505\u0506\u0006\u009d\n\u0000\u0506\u014a"+ + "\u0001\u0000\u0000\u0000\u0507\u0508\u00039\u0015\u0000\u0508\u0509\u0001"+ + "\u0000\u0000\u0000\u0509\u050a\u0006\u009e\n\u0000\u050a\u014c\u0001\u0000"+ + "\u0000\u0000\u050b\u050c\u0003;\u0016\u0000\u050c\u050d\u0001\u0000\u0000"+ + "\u0000\u050d\u050e\u0006\u009f\n\u0000\u050e\u014e\u0001\u0000\u0000\u0000"+ + "\u050f\u0510\u0003\u00a7L\u0000\u0510\u0511\u0001\u0000\u0000\u0000\u0511"+ + "\u0512\u0006\u00a0\u0010\u0000\u0512\u0513\u0006\u00a0\u000b\u0000\u0513"+ + "\u0150\u0001\u0000\u0000\u0000\u0514\u0515\u0005:\u0000\u0000\u0515\u0152"+ + "\u0001\u0000\u0000\u0000\u0516\u051c\u0003I\u001d\u0000\u0517\u051c\u0003"+ + "?\u0018\u0000\u0518\u051c\u0003g,\u0000\u0519\u051c\u0003A\u0019\u0000"+ + "\u051a\u051c\u0003O \u0000\u051b\u0516\u0001\u0000\u0000\u0000\u051b\u0517"+ + "\u0001\u0000\u0000\u0000\u051b\u0518\u0001\u0000\u0000\u0000\u051b\u0519"+ + "\u0001\u0000\u0000\u0000\u051b\u051a\u0001\u0000\u0000\u0000\u051c\u051d"+ + "\u0001\u0000\u0000\u0000\u051d\u051b\u0001\u0000\u0000\u0000\u051d\u051e"+ + "\u0001\u0000\u0000\u0000\u051e\u0154\u0001\u0000\u0000\u0000\u051f\u0520"+ + "\u00037\u0014\u0000\u0520\u0521\u0001\u0000\u0000\u0000\u0521\u0522\u0006"+ + "\u00a3\n\u0000\u0522\u0156\u0001\u0000\u0000\u0000\u0523\u0524\u00039"+ + "\u0015\u0000\u0524\u0525\u0001\u0000\u0000\u0000\u0525\u0526\u0006\u00a4"+ + "\n\u0000\u0526\u0158\u0001\u0000\u0000\u0000\u0527\u0528\u0003;\u0016"+ + "\u0000\u0528\u0529\u0001\u0000\u0000\u0000\u0529\u052a\u0006\u00a5\n\u0000"+ + "\u052a\u015a\u0001\u0000\u0000\u0000\u052b\u052c\u0003=\u0017\u0000\u052c"+ + "\u052d\u0001\u0000\u0000\u0000\u052d\u052e\u0006\u00a6\u000f\u0000\u052e"+ + "\u052f\u0006\u00a6\u000b\u0000\u052f\u015c\u0001\u0000\u0000\u0000\u0530"+ + "\u0531\u0003\u0151\u00a1\u0000\u0531\u0532\u0001\u0000\u0000\u0000\u0532"+ + "\u0533\u0006\u00a7\u0011\u0000\u0533\u015e\u0001\u0000\u0000\u0000\u0534"+ + "\u0535\u0003c*\u0000\u0535\u0536\u0001\u0000\u0000\u0000\u0536\u0537\u0006"+ + "\u00a8\u0012\u0000\u0537\u0160\u0001\u0000\u0000\u0000\u0538\u0539\u0003"+ + "g,\u0000\u0539\u053a\u0001\u0000\u0000\u0000\u053a\u053b\u0006\u00a9\u0016"+ + "\u0000\u053b\u0162\u0001\u0000\u0000\u0000\u053c\u053d\u0003\u010b~\u0000"+ + "\u053d\u053e\u0001\u0000\u0000\u0000\u053e\u053f\u0006\u00aa \u0000\u053f"+ + "\u0540\u0006\u00aa!\u0000\u0540\u0164\u0001\u0000\u0000\u0000\u0541\u0542"+ + "\u0003\u00cf`\u0000\u0542\u0543\u0001\u0000\u0000\u0000\u0543\u0544\u0006"+ + "\u00ab\u0014\u0000\u0544\u0166\u0001\u0000\u0000\u0000\u0545\u0546\u0003"+ + "S\"\u0000\u0546\u0547\u0001\u0000\u0000\u0000\u0547\u0548\u0006\u00ac"+ + "\u0015\u0000\u0548\u0168\u0001\u0000\u0000\u0000\u0549\u054a\u00037\u0014"+ + "\u0000\u054a\u054b\u0001\u0000\u0000\u0000\u054b\u054c\u0006\u00ad\n\u0000"+ + "\u054c\u016a\u0001\u0000\u0000\u0000\u054d\u054e\u00039\u0015\u0000\u054e"+ + "\u054f\u0001\u0000\u0000\u0000\u054f\u0550\u0006\u00ae\n\u0000\u0550\u016c"+ + "\u0001\u0000\u0000\u0000\u0551\u0552\u0003;\u0016\u0000\u0552\u0553\u0001"+ + "\u0000\u0000\u0000\u0553\u0554\u0006\u00af\n\u0000\u0554\u016e\u0001\u0000"+ + "\u0000\u0000\u0555\u0556\u0003=\u0017\u0000\u0556\u0557\u0001\u0000\u0000"+ + "\u0000\u0557\u0558\u0006\u00b0\u000f\u0000\u0558\u0559\u0006\u00b0\u000b"+ + "\u0000\u0559\u055a\u0006\u00b0\u000b\u0000\u055a\u0170\u0001\u0000\u0000"+ + "\u0000\u055b\u055c\u0003c*\u0000\u055c\u055d\u0001\u0000\u0000\u0000\u055d"+ + "\u055e\u0006\u00b1\u0012\u0000\u055e\u0172\u0001\u0000\u0000\u0000\u055f"+ + "\u0560\u0003g,\u0000\u0560\u0561\u0001\u0000\u0000\u0000\u0561\u0562\u0006"+ + "\u00b2\u0016\u0000\u0562\u0174\u0001\u0000\u0000\u0000\u0563\u0564\u0003"+ + "\u00e9m\u0000\u0564\u0565\u0001\u0000\u0000\u0000\u0565\u0566\u0006\u00b3"+ + "\u0019\u0000\u0566\u0176\u0001\u0000\u0000\u0000\u0567\u0568\u00037\u0014"+ + "\u0000\u0568\u0569\u0001\u0000\u0000\u0000\u0569\u056a\u0006\u00b4\n\u0000"+ + "\u056a\u0178\u0001\u0000\u0000\u0000\u056b\u056c\u00039\u0015\u0000\u056c"+ + "\u056d\u0001\u0000\u0000\u0000\u056d\u056e\u0006\u00b5\n\u0000\u056e\u017a"+ + "\u0001\u0000\u0000\u0000\u056f\u0570\u0003;\u0016\u0000\u0570\u0571\u0001"+ + "\u0000\u0000\u0000\u0571\u0572\u0006\u00b6\n\u0000\u0572\u017c\u0001\u0000"+ + "\u0000\u0000\u0573\u0574\u0003=\u0017\u0000\u0574\u0575\u0001\u0000\u0000"+ + "\u0000\u0575\u0576\u0006\u00b7\u000f\u0000\u0576\u0577\u0006\u00b7\u000b"+ + "\u0000\u0577\u017e\u0001\u0000\u0000\u0000\u0578\u0579\u0003\u00cf`\u0000"+ + "\u0579\u057a\u0001\u0000\u0000\u0000\u057a\u057b\u0006\u00b8\u0014\u0000"+ + "\u057b\u057c\u0006\u00b8\u000b\u0000\u057c\u057d\u0006\u00b8\"\u0000\u057d"+ + "\u0180\u0001\u0000\u0000\u0000\u057e\u057f\u0003S\"\u0000\u057f\u0580"+ + "\u0001\u0000\u0000\u0000\u0580\u0581\u0006\u00b9\u0015\u0000\u0581\u0582"+ + "\u0006\u00b9\u000b\u0000\u0582\u0583\u0006\u00b9\"\u0000\u0583\u0182\u0001"+ + "\u0000\u0000\u0000\u0584\u0585\u00037\u0014\u0000\u0585\u0586\u0001\u0000"+ + "\u0000\u0000\u0586\u0587\u0006\u00ba\n\u0000\u0587\u0184\u0001\u0000\u0000"+ + "\u0000\u0588\u0589\u00039\u0015\u0000\u0589\u058a\u0001\u0000\u0000\u0000"+ + "\u058a\u058b\u0006\u00bb\n\u0000\u058b\u0186\u0001\u0000\u0000\u0000\u058c"+ + "\u058d\u0003;\u0016\u0000\u058d\u058e\u0001\u0000\u0000\u0000\u058e\u058f"+ + "\u0006\u00bc\n\u0000\u058f\u0188\u0001\u0000\u0000\u0000\u0590\u0591\u0003"+ + "\u0151\u00a1\u0000\u0591\u0592\u0001\u0000\u0000\u0000\u0592\u0593\u0006"+ + "\u00bd\u0011\u0000\u0593\u0594\u0006\u00bd\u000b\u0000\u0594\u0595\u0006"+ + "\u00bd\t\u0000\u0595\u018a\u0001\u0000\u0000\u0000\u0596\u0597\u0003c"+ + "*\u0000\u0597\u0598\u0001\u0000\u0000\u0000\u0598\u0599\u0006\u00be\u0012"+ + "\u0000\u0599\u059a\u0006\u00be\u000b\u0000\u059a\u059b\u0006\u00be\t\u0000"+ + "\u059b\u018c\u0001\u0000\u0000\u0000\u059c\u059d\u00037\u0014\u0000\u059d"+ + "\u059e\u0001\u0000\u0000\u0000\u059e\u059f\u0006\u00bf\n\u0000\u059f\u018e"+ + "\u0001\u0000\u0000\u0000\u05a0\u05a1\u00039\u0015\u0000\u05a1\u05a2\u0001"+ + "\u0000\u0000\u0000\u05a2\u05a3\u0006\u00c0\n\u0000\u05a3\u0190\u0001\u0000"+ + "\u0000\u0000\u05a4\u05a5\u0003;\u0016\u0000\u05a5\u05a6\u0001\u0000\u0000"+ + "\u0000\u05a6\u05a7\u0006\u00c1\n\u0000\u05a7\u0192\u0001\u0000\u0000\u0000"+ + "\u05a8\u05a9\u0003\u00adO\u0000\u05a9\u05aa\u0001\u0000\u0000\u0000\u05aa"+ + "\u05ab\u0006\u00c2\u000b\u0000\u05ab\u05ac\u0006\u00c2\u0000\u0000\u05ac"+ + "\u05ad\u0006\u00c2\u001e\u0000\u05ad\u0194\u0001\u0000\u0000\u0000\u05ae"+ + "\u05af\u0003\u00a9M\u0000\u05af\u05b0\u0001\u0000\u0000\u0000\u05b0\u05b1"+ + "\u0006\u00c3\u000b\u0000\u05b1\u05b2\u0006\u00c3\u0000\u0000\u05b2\u05b3"+ + "\u0006\u00c3\u001f\u0000\u05b3\u0196\u0001\u0000\u0000\u0000\u05b4\u05b5"+ + "\u0003Y%\u0000\u05b5\u05b6\u0001\u0000\u0000\u0000\u05b6\u05b7\u0006\u00c4"+ + "\u000b\u0000\u05b7\u05b8\u0006\u00c4\u0000\u0000\u05b8\u05b9\u0006\u00c4"+ + "#\u0000\u05b9\u0198\u0001\u0000\u0000\u0000\u05ba\u05bb\u0003=\u0017\u0000"+ + "\u05bb\u05bc\u0001\u0000\u0000\u0000\u05bc\u05bd\u0006\u00c5\u000f\u0000"+ + "\u05bd\u05be\u0006\u00c5\u000b\u0000\u05be\u019a\u0001\u0000\u0000\u0000"+ + "A\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e"+ + "\u0243\u024d\u0251\u0254\u025d\u025f\u026a\u027d\u0282\u028b\u0292\u0297"+ + "\u0299\u02a4\u02ac\u02af\u02b1\u02b6\u02bb\u02c1\u02c8\u02cd\u02d3\u02d6"+ + "\u02de\u02e2\u0366\u036b\u0372\u0374\u0384\u0389\u038e\u0390\u0396\u03e3"+ + "\u03e8\u0417\u041b\u0420\u0425\u042a\u042c\u0430\u0432\u0487\u048b\u0490"+ + "\u051b\u051d$\u0005\u0001\u0000\u0005\u0004\u0000\u0005\u0006\u0000\u0005"+ + "\u0002\u0000\u0005\u0003\u0000\u0005\b\u0000\u0005\u0005\u0000\u0005\t"+ + "\u0000\u0005\u000b\u0000\u0005\r\u0000\u0000\u0001\u0000\u0004\u0000\u0000"+ + "\u0007\u0010\u0000\u0007A\u0000\u0005\u0000\u0000\u0007\u0018\u0000\u0007"+ + "B\u0000\u0007h\u0000\u0007!\u0000\u0007\u001f\u0000\u0007L\u0000\u0007"+ + "\u0019\u0000\u0007#\u0000\u0007/\u0000\u0007@\u0000\u0007P\u0000\u0005"+ + "\n\u0000\u0005\u0007\u0000\u0007Z\u0000\u0007Y\u0000\u0007D\u0000\u0007"+ + "C\u0000\u0007X\u0000\u0005\f\u0000\u0005\u000e\u0000\u0007\u001c\u0000"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp index e718d402982ed..22d8ec32f3d92 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp @@ -310,4 +310,4 @@ inlinestatsCommand atn: -[4, 1, 120, 604, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 134, 8, 1, 10, 1, 12, 1, 137, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 145, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 163, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 175, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 182, 8, 5, 10, 5, 12, 5, 185, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 192, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 198, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 206, 8, 5, 10, 5, 12, 5, 209, 9, 5, 1, 6, 1, 6, 3, 6, 213, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 220, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 225, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 236, 8, 8, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 242, 8, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 5, 9, 250, 8, 9, 10, 9, 12, 9, 253, 9, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 3, 10, 263, 8, 10, 1, 10, 1, 10, 1, 10, 5, 10, 268, 8, 10, 10, 10, 12, 10, 271, 9, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 5, 11, 279, 8, 11, 10, 11, 12, 11, 282, 9, 11, 3, 11, 284, 8, 11, 1, 11, 1, 11, 1, 12, 1, 12, 3, 12, 290, 8, 12, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 5, 15, 300, 8, 15, 10, 15, 12, 15, 303, 9, 15, 1, 16, 1, 16, 1, 16, 3, 16, 308, 8, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 5, 17, 316, 8, 17, 10, 17, 12, 17, 319, 9, 17, 1, 17, 3, 17, 322, 8, 17, 1, 18, 1, 18, 1, 18, 3, 18, 327, 8, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 20, 1, 20, 1, 21, 1, 21, 3, 21, 337, 8, 21, 1, 22, 1, 22, 1, 22, 1, 22, 5, 22, 343, 8, 22, 10, 22, 12, 22, 346, 9, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 5, 24, 356, 8, 24, 10, 24, 12, 24, 359, 9, 24, 1, 24, 3, 24, 362, 8, 24, 1, 24, 1, 24, 3, 24, 366, 8, 24, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 3, 26, 373, 8, 26, 1, 26, 1, 26, 3, 26, 377, 8, 26, 1, 27, 1, 27, 1, 27, 5, 27, 382, 8, 27, 10, 27, 12, 27, 385, 9, 27, 1, 28, 1, 28, 1, 28, 1, 28, 3, 28, 391, 8, 28, 1, 29, 1, 29, 1, 29, 5, 29, 396, 8, 29, 10, 29, 12, 29, 399, 9, 29, 1, 30, 1, 30, 1, 30, 5, 30, 404, 8, 30, 10, 30, 12, 30, 407, 9, 30, 1, 31, 1, 31, 1, 31, 5, 31, 412, 8, 31, 10, 31, 12, 31, 415, 9, 31, 1, 32, 1, 32, 1, 33, 1, 33, 3, 33, 421, 8, 33, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 436, 8, 34, 10, 34, 12, 34, 439, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 447, 8, 34, 10, 34, 12, 34, 450, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 458, 8, 34, 10, 34, 12, 34, 461, 9, 34, 1, 34, 1, 34, 3, 34, 465, 8, 34, 1, 35, 1, 35, 3, 35, 469, 8, 35, 1, 36, 1, 36, 3, 36, 473, 8, 36, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 5, 38, 482, 8, 38, 10, 38, 12, 38, 485, 9, 38, 1, 39, 1, 39, 3, 39, 489, 8, 39, 1, 39, 1, 39, 3, 39, 493, 8, 39, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 5, 42, 505, 8, 42, 10, 42, 12, 42, 508, 9, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 3, 44, 518, 8, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 5, 47, 530, 8, 47, 10, 47, 12, 47, 533, 9, 47, 1, 48, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 50, 1, 50, 3, 50, 543, 8, 50, 1, 51, 3, 51, 546, 8, 51, 1, 51, 1, 51, 1, 52, 3, 52, 551, 8, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 573, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 5, 58, 579, 8, 58, 10, 58, 12, 58, 582, 9, 58, 3, 58, 584, 8, 58, 1, 59, 1, 59, 1, 59, 3, 59, 589, 8, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 3, 61, 602, 8, 61, 1, 61, 0, 4, 2, 10, 18, 20, 62, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 118, 120, 122, 0, 8, 1, 0, 58, 59, 1, 0, 60, 62, 2, 0, 25, 25, 76, 76, 1, 0, 67, 68, 2, 0, 30, 30, 34, 34, 2, 0, 37, 37, 40, 40, 2, 0, 36, 36, 50, 50, 2, 0, 51, 51, 53, 57, 630, 0, 124, 1, 0, 0, 0, 2, 127, 1, 0, 0, 0, 4, 144, 1, 0, 0, 0, 6, 162, 1, 0, 0, 0, 8, 164, 1, 0, 0, 0, 10, 197, 1, 0, 0, 0, 12, 224, 1, 0, 0, 0, 14, 226, 1, 0, 0, 0, 16, 235, 1, 0, 0, 0, 18, 241, 1, 0, 0, 0, 20, 262, 1, 0, 0, 0, 22, 272, 1, 0, 0, 0, 24, 289, 1, 0, 0, 0, 26, 291, 1, 0, 0, 0, 28, 293, 1, 0, 0, 0, 30, 296, 1, 0, 0, 0, 32, 307, 1, 0, 0, 0, 34, 311, 1, 0, 0, 0, 36, 326, 1, 0, 0, 0, 38, 330, 1, 0, 0, 0, 40, 332, 1, 0, 0, 0, 42, 336, 1, 0, 0, 0, 44, 338, 1, 0, 0, 0, 46, 347, 1, 0, 0, 0, 48, 351, 1, 0, 0, 0, 50, 367, 1, 0, 0, 0, 52, 370, 1, 0, 0, 0, 54, 378, 1, 0, 0, 0, 56, 386, 1, 0, 0, 0, 58, 392, 1, 0, 0, 0, 60, 400, 1, 0, 0, 0, 62, 408, 1, 0, 0, 0, 64, 416, 1, 0, 0, 0, 66, 420, 1, 0, 0, 0, 68, 464, 1, 0, 0, 0, 70, 468, 1, 0, 0, 0, 72, 472, 1, 0, 0, 0, 74, 474, 1, 0, 0, 0, 76, 477, 1, 0, 0, 0, 78, 486, 1, 0, 0, 0, 80, 494, 1, 0, 0, 0, 82, 497, 1, 0, 0, 0, 84, 500, 1, 0, 0, 0, 86, 509, 1, 0, 0, 0, 88, 513, 1, 0, 0, 0, 90, 519, 1, 0, 0, 0, 92, 523, 1, 0, 0, 0, 94, 526, 1, 0, 0, 0, 96, 534, 1, 0, 0, 0, 98, 538, 1, 0, 0, 0, 100, 542, 1, 0, 0, 0, 102, 545, 1, 0, 0, 0, 104, 550, 1, 0, 0, 0, 106, 554, 1, 0, 0, 0, 108, 556, 1, 0, 0, 0, 110, 558, 1, 0, 0, 0, 112, 561, 1, 0, 0, 0, 114, 565, 1, 0, 0, 0, 116, 568, 1, 0, 0, 0, 118, 588, 1, 0, 0, 0, 120, 592, 1, 0, 0, 0, 122, 597, 1, 0, 0, 0, 124, 125, 3, 2, 1, 0, 125, 126, 5, 0, 0, 1, 126, 1, 1, 0, 0, 0, 127, 128, 6, 1, -1, 0, 128, 129, 3, 4, 2, 0, 129, 135, 1, 0, 0, 0, 130, 131, 10, 1, 0, 0, 131, 132, 5, 24, 0, 0, 132, 134, 3, 6, 3, 0, 133, 130, 1, 0, 0, 0, 134, 137, 1, 0, 0, 0, 135, 133, 1, 0, 0, 0, 135, 136, 1, 0, 0, 0, 136, 3, 1, 0, 0, 0, 137, 135, 1, 0, 0, 0, 138, 145, 3, 110, 55, 0, 139, 145, 3, 34, 17, 0, 140, 145, 3, 28, 14, 0, 141, 145, 3, 114, 57, 0, 142, 143, 4, 2, 1, 0, 143, 145, 3, 48, 24, 0, 144, 138, 1, 0, 0, 0, 144, 139, 1, 0, 0, 0, 144, 140, 1, 0, 0, 0, 144, 141, 1, 0, 0, 0, 144, 142, 1, 0, 0, 0, 145, 5, 1, 0, 0, 0, 146, 163, 3, 50, 25, 0, 147, 163, 3, 8, 4, 0, 148, 163, 3, 80, 40, 0, 149, 163, 3, 74, 37, 0, 150, 163, 3, 52, 26, 0, 151, 163, 3, 76, 38, 0, 152, 163, 3, 82, 41, 0, 153, 163, 3, 84, 42, 0, 154, 163, 3, 88, 44, 0, 155, 163, 3, 90, 45, 0, 156, 163, 3, 116, 58, 0, 157, 163, 3, 92, 46, 0, 158, 159, 4, 3, 2, 0, 159, 163, 3, 122, 61, 0, 160, 161, 4, 3, 3, 0, 161, 163, 3, 120, 60, 0, 162, 146, 1, 0, 0, 0, 162, 147, 1, 0, 0, 0, 162, 148, 1, 0, 0, 0, 162, 149, 1, 0, 0, 0, 162, 150, 1, 0, 0, 0, 162, 151, 1, 0, 0, 0, 162, 152, 1, 0, 0, 0, 162, 153, 1, 0, 0, 0, 162, 154, 1, 0, 0, 0, 162, 155, 1, 0, 0, 0, 162, 156, 1, 0, 0, 0, 162, 157, 1, 0, 0, 0, 162, 158, 1, 0, 0, 0, 162, 160, 1, 0, 0, 0, 163, 7, 1, 0, 0, 0, 164, 165, 5, 16, 0, 0, 165, 166, 3, 10, 5, 0, 166, 9, 1, 0, 0, 0, 167, 168, 6, 5, -1, 0, 168, 169, 5, 43, 0, 0, 169, 198, 3, 10, 5, 8, 170, 198, 3, 16, 8, 0, 171, 198, 3, 12, 6, 0, 172, 174, 3, 16, 8, 0, 173, 175, 5, 43, 0, 0, 174, 173, 1, 0, 0, 0, 174, 175, 1, 0, 0, 0, 175, 176, 1, 0, 0, 0, 176, 177, 5, 38, 0, 0, 177, 178, 5, 42, 0, 0, 178, 183, 3, 16, 8, 0, 179, 180, 5, 33, 0, 0, 180, 182, 3, 16, 8, 0, 181, 179, 1, 0, 0, 0, 182, 185, 1, 0, 0, 0, 183, 181, 1, 0, 0, 0, 183, 184, 1, 0, 0, 0, 184, 186, 1, 0, 0, 0, 185, 183, 1, 0, 0, 0, 186, 187, 5, 49, 0, 0, 187, 198, 1, 0, 0, 0, 188, 189, 3, 16, 8, 0, 189, 191, 5, 39, 0, 0, 190, 192, 5, 43, 0, 0, 191, 190, 1, 0, 0, 0, 191, 192, 1, 0, 0, 0, 192, 193, 1, 0, 0, 0, 193, 194, 5, 44, 0, 0, 194, 198, 1, 0, 0, 0, 195, 196, 4, 5, 4, 0, 196, 198, 3, 14, 7, 0, 197, 167, 1, 0, 0, 0, 197, 170, 1, 0, 0, 0, 197, 171, 1, 0, 0, 0, 197, 172, 1, 0, 0, 0, 197, 188, 1, 0, 0, 0, 197, 195, 1, 0, 0, 0, 198, 207, 1, 0, 0, 0, 199, 200, 10, 5, 0, 0, 200, 201, 5, 29, 0, 0, 201, 206, 3, 10, 5, 6, 202, 203, 10, 4, 0, 0, 203, 204, 5, 46, 0, 0, 204, 206, 3, 10, 5, 5, 205, 199, 1, 0, 0, 0, 205, 202, 1, 0, 0, 0, 206, 209, 1, 0, 0, 0, 207, 205, 1, 0, 0, 0, 207, 208, 1, 0, 0, 0, 208, 11, 1, 0, 0, 0, 209, 207, 1, 0, 0, 0, 210, 212, 3, 16, 8, 0, 211, 213, 5, 43, 0, 0, 212, 211, 1, 0, 0, 0, 212, 213, 1, 0, 0, 0, 213, 214, 1, 0, 0, 0, 214, 215, 5, 41, 0, 0, 215, 216, 3, 106, 53, 0, 216, 225, 1, 0, 0, 0, 217, 219, 3, 16, 8, 0, 218, 220, 5, 43, 0, 0, 219, 218, 1, 0, 0, 0, 219, 220, 1, 0, 0, 0, 220, 221, 1, 0, 0, 0, 221, 222, 5, 48, 0, 0, 222, 223, 3, 106, 53, 0, 223, 225, 1, 0, 0, 0, 224, 210, 1, 0, 0, 0, 224, 217, 1, 0, 0, 0, 225, 13, 1, 0, 0, 0, 226, 227, 3, 16, 8, 0, 227, 228, 5, 63, 0, 0, 228, 229, 3, 106, 53, 0, 229, 15, 1, 0, 0, 0, 230, 236, 3, 18, 9, 0, 231, 232, 3, 18, 9, 0, 232, 233, 3, 108, 54, 0, 233, 234, 3, 18, 9, 0, 234, 236, 1, 0, 0, 0, 235, 230, 1, 0, 0, 0, 235, 231, 1, 0, 0, 0, 236, 17, 1, 0, 0, 0, 237, 238, 6, 9, -1, 0, 238, 242, 3, 20, 10, 0, 239, 240, 7, 0, 0, 0, 240, 242, 3, 18, 9, 3, 241, 237, 1, 0, 0, 0, 241, 239, 1, 0, 0, 0, 242, 251, 1, 0, 0, 0, 243, 244, 10, 2, 0, 0, 244, 245, 7, 1, 0, 0, 245, 250, 3, 18, 9, 3, 246, 247, 10, 1, 0, 0, 247, 248, 7, 0, 0, 0, 248, 250, 3, 18, 9, 2, 249, 243, 1, 0, 0, 0, 249, 246, 1, 0, 0, 0, 250, 253, 1, 0, 0, 0, 251, 249, 1, 0, 0, 0, 251, 252, 1, 0, 0, 0, 252, 19, 1, 0, 0, 0, 253, 251, 1, 0, 0, 0, 254, 255, 6, 10, -1, 0, 255, 263, 3, 68, 34, 0, 256, 263, 3, 58, 29, 0, 257, 263, 3, 22, 11, 0, 258, 259, 5, 42, 0, 0, 259, 260, 3, 10, 5, 0, 260, 261, 5, 49, 0, 0, 261, 263, 1, 0, 0, 0, 262, 254, 1, 0, 0, 0, 262, 256, 1, 0, 0, 0, 262, 257, 1, 0, 0, 0, 262, 258, 1, 0, 0, 0, 263, 269, 1, 0, 0, 0, 264, 265, 10, 1, 0, 0, 265, 266, 5, 32, 0, 0, 266, 268, 3, 26, 13, 0, 267, 264, 1, 0, 0, 0, 268, 271, 1, 0, 0, 0, 269, 267, 1, 0, 0, 0, 269, 270, 1, 0, 0, 0, 270, 21, 1, 0, 0, 0, 271, 269, 1, 0, 0, 0, 272, 273, 3, 24, 12, 0, 273, 283, 5, 42, 0, 0, 274, 284, 5, 60, 0, 0, 275, 280, 3, 10, 5, 0, 276, 277, 5, 33, 0, 0, 277, 279, 3, 10, 5, 0, 278, 276, 1, 0, 0, 0, 279, 282, 1, 0, 0, 0, 280, 278, 1, 0, 0, 0, 280, 281, 1, 0, 0, 0, 281, 284, 1, 0, 0, 0, 282, 280, 1, 0, 0, 0, 283, 274, 1, 0, 0, 0, 283, 275, 1, 0, 0, 0, 283, 284, 1, 0, 0, 0, 284, 285, 1, 0, 0, 0, 285, 286, 5, 49, 0, 0, 286, 23, 1, 0, 0, 0, 287, 290, 5, 63, 0, 0, 288, 290, 3, 72, 36, 0, 289, 287, 1, 0, 0, 0, 289, 288, 1, 0, 0, 0, 290, 25, 1, 0, 0, 0, 291, 292, 3, 64, 32, 0, 292, 27, 1, 0, 0, 0, 293, 294, 5, 12, 0, 0, 294, 295, 3, 30, 15, 0, 295, 29, 1, 0, 0, 0, 296, 301, 3, 32, 16, 0, 297, 298, 5, 33, 0, 0, 298, 300, 3, 32, 16, 0, 299, 297, 1, 0, 0, 0, 300, 303, 1, 0, 0, 0, 301, 299, 1, 0, 0, 0, 301, 302, 1, 0, 0, 0, 302, 31, 1, 0, 0, 0, 303, 301, 1, 0, 0, 0, 304, 305, 3, 58, 29, 0, 305, 306, 5, 31, 0, 0, 306, 308, 1, 0, 0, 0, 307, 304, 1, 0, 0, 0, 307, 308, 1, 0, 0, 0, 308, 309, 1, 0, 0, 0, 309, 310, 3, 10, 5, 0, 310, 33, 1, 0, 0, 0, 311, 312, 5, 6, 0, 0, 312, 317, 3, 36, 18, 0, 313, 314, 5, 33, 0, 0, 314, 316, 3, 36, 18, 0, 315, 313, 1, 0, 0, 0, 316, 319, 1, 0, 0, 0, 317, 315, 1, 0, 0, 0, 317, 318, 1, 0, 0, 0, 318, 321, 1, 0, 0, 0, 319, 317, 1, 0, 0, 0, 320, 322, 3, 42, 21, 0, 321, 320, 1, 0, 0, 0, 321, 322, 1, 0, 0, 0, 322, 35, 1, 0, 0, 0, 323, 324, 3, 38, 19, 0, 324, 325, 5, 104, 0, 0, 325, 327, 1, 0, 0, 0, 326, 323, 1, 0, 0, 0, 326, 327, 1, 0, 0, 0, 327, 328, 1, 0, 0, 0, 328, 329, 3, 40, 20, 0, 329, 37, 1, 0, 0, 0, 330, 331, 5, 76, 0, 0, 331, 39, 1, 0, 0, 0, 332, 333, 7, 2, 0, 0, 333, 41, 1, 0, 0, 0, 334, 337, 3, 44, 22, 0, 335, 337, 3, 46, 23, 0, 336, 334, 1, 0, 0, 0, 336, 335, 1, 0, 0, 0, 337, 43, 1, 0, 0, 0, 338, 339, 5, 75, 0, 0, 339, 344, 5, 76, 0, 0, 340, 341, 5, 33, 0, 0, 341, 343, 5, 76, 0, 0, 342, 340, 1, 0, 0, 0, 343, 346, 1, 0, 0, 0, 344, 342, 1, 0, 0, 0, 344, 345, 1, 0, 0, 0, 345, 45, 1, 0, 0, 0, 346, 344, 1, 0, 0, 0, 347, 348, 5, 65, 0, 0, 348, 349, 3, 44, 22, 0, 349, 350, 5, 66, 0, 0, 350, 47, 1, 0, 0, 0, 351, 352, 5, 19, 0, 0, 352, 357, 3, 36, 18, 0, 353, 354, 5, 33, 0, 0, 354, 356, 3, 36, 18, 0, 355, 353, 1, 0, 0, 0, 356, 359, 1, 0, 0, 0, 357, 355, 1, 0, 0, 0, 357, 358, 1, 0, 0, 0, 358, 361, 1, 0, 0, 0, 359, 357, 1, 0, 0, 0, 360, 362, 3, 54, 27, 0, 361, 360, 1, 0, 0, 0, 361, 362, 1, 0, 0, 0, 362, 365, 1, 0, 0, 0, 363, 364, 5, 28, 0, 0, 364, 366, 3, 30, 15, 0, 365, 363, 1, 0, 0, 0, 365, 366, 1, 0, 0, 0, 366, 49, 1, 0, 0, 0, 367, 368, 5, 4, 0, 0, 368, 369, 3, 30, 15, 0, 369, 51, 1, 0, 0, 0, 370, 372, 5, 15, 0, 0, 371, 373, 3, 54, 27, 0, 372, 371, 1, 0, 0, 0, 372, 373, 1, 0, 0, 0, 373, 376, 1, 0, 0, 0, 374, 375, 5, 28, 0, 0, 375, 377, 3, 30, 15, 0, 376, 374, 1, 0, 0, 0, 376, 377, 1, 0, 0, 0, 377, 53, 1, 0, 0, 0, 378, 383, 3, 56, 28, 0, 379, 380, 5, 33, 0, 0, 380, 382, 3, 56, 28, 0, 381, 379, 1, 0, 0, 0, 382, 385, 1, 0, 0, 0, 383, 381, 1, 0, 0, 0, 383, 384, 1, 0, 0, 0, 384, 55, 1, 0, 0, 0, 385, 383, 1, 0, 0, 0, 386, 387, 3, 32, 16, 0, 387, 390, 4, 28, 10, 0, 388, 389, 5, 16, 0, 0, 389, 391, 3, 10, 5, 0, 390, 388, 1, 0, 0, 0, 390, 391, 1, 0, 0, 0, 391, 57, 1, 0, 0, 0, 392, 397, 3, 72, 36, 0, 393, 394, 5, 35, 0, 0, 394, 396, 3, 72, 36, 0, 395, 393, 1, 0, 0, 0, 396, 399, 1, 0, 0, 0, 397, 395, 1, 0, 0, 0, 397, 398, 1, 0, 0, 0, 398, 59, 1, 0, 0, 0, 399, 397, 1, 0, 0, 0, 400, 405, 3, 66, 33, 0, 401, 402, 5, 35, 0, 0, 402, 404, 3, 66, 33, 0, 403, 401, 1, 0, 0, 0, 404, 407, 1, 0, 0, 0, 405, 403, 1, 0, 0, 0, 405, 406, 1, 0, 0, 0, 406, 61, 1, 0, 0, 0, 407, 405, 1, 0, 0, 0, 408, 413, 3, 60, 30, 0, 409, 410, 5, 33, 0, 0, 410, 412, 3, 60, 30, 0, 411, 409, 1, 0, 0, 0, 412, 415, 1, 0, 0, 0, 413, 411, 1, 0, 0, 0, 413, 414, 1, 0, 0, 0, 414, 63, 1, 0, 0, 0, 415, 413, 1, 0, 0, 0, 416, 417, 7, 3, 0, 0, 417, 65, 1, 0, 0, 0, 418, 421, 5, 80, 0, 0, 419, 421, 3, 70, 35, 0, 420, 418, 1, 0, 0, 0, 420, 419, 1, 0, 0, 0, 421, 67, 1, 0, 0, 0, 422, 465, 5, 44, 0, 0, 423, 424, 3, 104, 52, 0, 424, 425, 5, 67, 0, 0, 425, 465, 1, 0, 0, 0, 426, 465, 3, 102, 51, 0, 427, 465, 3, 104, 52, 0, 428, 465, 3, 98, 49, 0, 429, 465, 3, 70, 35, 0, 430, 465, 3, 106, 53, 0, 431, 432, 5, 65, 0, 0, 432, 437, 3, 100, 50, 0, 433, 434, 5, 33, 0, 0, 434, 436, 3, 100, 50, 0, 435, 433, 1, 0, 0, 0, 436, 439, 1, 0, 0, 0, 437, 435, 1, 0, 0, 0, 437, 438, 1, 0, 0, 0, 438, 440, 1, 0, 0, 0, 439, 437, 1, 0, 0, 0, 440, 441, 5, 66, 0, 0, 441, 465, 1, 0, 0, 0, 442, 443, 5, 65, 0, 0, 443, 448, 3, 98, 49, 0, 444, 445, 5, 33, 0, 0, 445, 447, 3, 98, 49, 0, 446, 444, 1, 0, 0, 0, 447, 450, 1, 0, 0, 0, 448, 446, 1, 0, 0, 0, 448, 449, 1, 0, 0, 0, 449, 451, 1, 0, 0, 0, 450, 448, 1, 0, 0, 0, 451, 452, 5, 66, 0, 0, 452, 465, 1, 0, 0, 0, 453, 454, 5, 65, 0, 0, 454, 459, 3, 106, 53, 0, 455, 456, 5, 33, 0, 0, 456, 458, 3, 106, 53, 0, 457, 455, 1, 0, 0, 0, 458, 461, 1, 0, 0, 0, 459, 457, 1, 0, 0, 0, 459, 460, 1, 0, 0, 0, 460, 462, 1, 0, 0, 0, 461, 459, 1, 0, 0, 0, 462, 463, 5, 66, 0, 0, 463, 465, 1, 0, 0, 0, 464, 422, 1, 0, 0, 0, 464, 423, 1, 0, 0, 0, 464, 426, 1, 0, 0, 0, 464, 427, 1, 0, 0, 0, 464, 428, 1, 0, 0, 0, 464, 429, 1, 0, 0, 0, 464, 430, 1, 0, 0, 0, 464, 431, 1, 0, 0, 0, 464, 442, 1, 0, 0, 0, 464, 453, 1, 0, 0, 0, 465, 69, 1, 0, 0, 0, 466, 469, 5, 47, 0, 0, 467, 469, 5, 64, 0, 0, 468, 466, 1, 0, 0, 0, 468, 467, 1, 0, 0, 0, 469, 71, 1, 0, 0, 0, 470, 473, 3, 64, 32, 0, 471, 473, 3, 70, 35, 0, 472, 470, 1, 0, 0, 0, 472, 471, 1, 0, 0, 0, 473, 73, 1, 0, 0, 0, 474, 475, 5, 9, 0, 0, 475, 476, 5, 26, 0, 0, 476, 75, 1, 0, 0, 0, 477, 478, 5, 14, 0, 0, 478, 483, 3, 78, 39, 0, 479, 480, 5, 33, 0, 0, 480, 482, 3, 78, 39, 0, 481, 479, 1, 0, 0, 0, 482, 485, 1, 0, 0, 0, 483, 481, 1, 0, 0, 0, 483, 484, 1, 0, 0, 0, 484, 77, 1, 0, 0, 0, 485, 483, 1, 0, 0, 0, 486, 488, 3, 10, 5, 0, 487, 489, 7, 4, 0, 0, 488, 487, 1, 0, 0, 0, 488, 489, 1, 0, 0, 0, 489, 492, 1, 0, 0, 0, 490, 491, 5, 45, 0, 0, 491, 493, 7, 5, 0, 0, 492, 490, 1, 0, 0, 0, 492, 493, 1, 0, 0, 0, 493, 79, 1, 0, 0, 0, 494, 495, 5, 8, 0, 0, 495, 496, 3, 62, 31, 0, 496, 81, 1, 0, 0, 0, 497, 498, 5, 2, 0, 0, 498, 499, 3, 62, 31, 0, 499, 83, 1, 0, 0, 0, 500, 501, 5, 11, 0, 0, 501, 506, 3, 86, 43, 0, 502, 503, 5, 33, 0, 0, 503, 505, 3, 86, 43, 0, 504, 502, 1, 0, 0, 0, 505, 508, 1, 0, 0, 0, 506, 504, 1, 0, 0, 0, 506, 507, 1, 0, 0, 0, 507, 85, 1, 0, 0, 0, 508, 506, 1, 0, 0, 0, 509, 510, 3, 60, 30, 0, 510, 511, 5, 84, 0, 0, 511, 512, 3, 60, 30, 0, 512, 87, 1, 0, 0, 0, 513, 514, 5, 1, 0, 0, 514, 515, 3, 20, 10, 0, 515, 517, 3, 106, 53, 0, 516, 518, 3, 94, 47, 0, 517, 516, 1, 0, 0, 0, 517, 518, 1, 0, 0, 0, 518, 89, 1, 0, 0, 0, 519, 520, 5, 7, 0, 0, 520, 521, 3, 20, 10, 0, 521, 522, 3, 106, 53, 0, 522, 91, 1, 0, 0, 0, 523, 524, 5, 10, 0, 0, 524, 525, 3, 58, 29, 0, 525, 93, 1, 0, 0, 0, 526, 531, 3, 96, 48, 0, 527, 528, 5, 33, 0, 0, 528, 530, 3, 96, 48, 0, 529, 527, 1, 0, 0, 0, 530, 533, 1, 0, 0, 0, 531, 529, 1, 0, 0, 0, 531, 532, 1, 0, 0, 0, 532, 95, 1, 0, 0, 0, 533, 531, 1, 0, 0, 0, 534, 535, 3, 64, 32, 0, 535, 536, 5, 31, 0, 0, 536, 537, 3, 68, 34, 0, 537, 97, 1, 0, 0, 0, 538, 539, 7, 6, 0, 0, 539, 99, 1, 0, 0, 0, 540, 543, 3, 102, 51, 0, 541, 543, 3, 104, 52, 0, 542, 540, 1, 0, 0, 0, 542, 541, 1, 0, 0, 0, 543, 101, 1, 0, 0, 0, 544, 546, 7, 0, 0, 0, 545, 544, 1, 0, 0, 0, 545, 546, 1, 0, 0, 0, 546, 547, 1, 0, 0, 0, 547, 548, 5, 27, 0, 0, 548, 103, 1, 0, 0, 0, 549, 551, 7, 0, 0, 0, 550, 549, 1, 0, 0, 0, 550, 551, 1, 0, 0, 0, 551, 552, 1, 0, 0, 0, 552, 553, 5, 26, 0, 0, 553, 105, 1, 0, 0, 0, 554, 555, 5, 25, 0, 0, 555, 107, 1, 0, 0, 0, 556, 557, 7, 7, 0, 0, 557, 109, 1, 0, 0, 0, 558, 559, 5, 5, 0, 0, 559, 560, 3, 112, 56, 0, 560, 111, 1, 0, 0, 0, 561, 562, 5, 65, 0, 0, 562, 563, 3, 2, 1, 0, 563, 564, 5, 66, 0, 0, 564, 113, 1, 0, 0, 0, 565, 566, 5, 13, 0, 0, 566, 567, 5, 100, 0, 0, 567, 115, 1, 0, 0, 0, 568, 569, 5, 3, 0, 0, 569, 572, 5, 90, 0, 0, 570, 571, 5, 88, 0, 0, 571, 573, 3, 60, 30, 0, 572, 570, 1, 0, 0, 0, 572, 573, 1, 0, 0, 0, 573, 583, 1, 0, 0, 0, 574, 575, 5, 89, 0, 0, 575, 580, 3, 118, 59, 0, 576, 577, 5, 33, 0, 0, 577, 579, 3, 118, 59, 0, 578, 576, 1, 0, 0, 0, 579, 582, 1, 0, 0, 0, 580, 578, 1, 0, 0, 0, 580, 581, 1, 0, 0, 0, 581, 584, 1, 0, 0, 0, 582, 580, 1, 0, 0, 0, 583, 574, 1, 0, 0, 0, 583, 584, 1, 0, 0, 0, 584, 117, 1, 0, 0, 0, 585, 586, 3, 60, 30, 0, 586, 587, 5, 31, 0, 0, 587, 589, 1, 0, 0, 0, 588, 585, 1, 0, 0, 0, 588, 589, 1, 0, 0, 0, 589, 590, 1, 0, 0, 0, 590, 591, 3, 60, 30, 0, 591, 119, 1, 0, 0, 0, 592, 593, 5, 18, 0, 0, 593, 594, 3, 36, 18, 0, 594, 595, 5, 88, 0, 0, 595, 596, 3, 62, 31, 0, 596, 121, 1, 0, 0, 0, 597, 598, 5, 17, 0, 0, 598, 601, 3, 54, 27, 0, 599, 600, 5, 28, 0, 0, 600, 602, 3, 30, 15, 0, 601, 599, 1, 0, 0, 0, 601, 602, 1, 0, 0, 0, 602, 123, 1, 0, 0, 0, 59, 135, 144, 162, 174, 183, 191, 197, 205, 207, 212, 219, 224, 235, 241, 249, 251, 262, 269, 280, 283, 289, 301, 307, 317, 321, 326, 336, 344, 357, 361, 365, 372, 376, 383, 390, 397, 405, 413, 420, 437, 448, 459, 464, 468, 472, 483, 488, 492, 506, 517, 531, 542, 545, 550, 572, 580, 583, 588, 601] \ No newline at end of file +[4, 1, 120, 603, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 134, 8, 1, 10, 1, 12, 1, 137, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 145, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 163, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 175, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 182, 8, 5, 10, 5, 12, 5, 185, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 192, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 198, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 206, 8, 5, 10, 5, 12, 5, 209, 9, 5, 1, 6, 1, 6, 3, 6, 213, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 220, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 225, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 236, 8, 8, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 242, 8, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 5, 9, 250, 8, 9, 10, 9, 12, 9, 253, 9, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 3, 10, 263, 8, 10, 1, 10, 1, 10, 1, 10, 5, 10, 268, 8, 10, 10, 10, 12, 10, 271, 9, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 5, 11, 279, 8, 11, 10, 11, 12, 11, 282, 9, 11, 3, 11, 284, 8, 11, 1, 11, 1, 11, 1, 12, 1, 12, 3, 12, 290, 8, 12, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 5, 15, 300, 8, 15, 10, 15, 12, 15, 303, 9, 15, 1, 16, 1, 16, 1, 16, 3, 16, 308, 8, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 5, 17, 316, 8, 17, 10, 17, 12, 17, 319, 9, 17, 1, 17, 3, 17, 322, 8, 17, 1, 18, 1, 18, 1, 18, 3, 18, 327, 8, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 20, 1, 20, 1, 21, 1, 21, 3, 21, 337, 8, 21, 1, 22, 1, 22, 1, 22, 1, 22, 5, 22, 343, 8, 22, 10, 22, 12, 22, 346, 9, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 5, 24, 356, 8, 24, 10, 24, 12, 24, 359, 9, 24, 1, 24, 3, 24, 362, 8, 24, 1, 24, 1, 24, 3, 24, 366, 8, 24, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 3, 26, 373, 8, 26, 1, 26, 1, 26, 3, 26, 377, 8, 26, 1, 27, 1, 27, 1, 27, 5, 27, 382, 8, 27, 10, 27, 12, 27, 385, 9, 27, 1, 28, 1, 28, 1, 28, 3, 28, 390, 8, 28, 1, 29, 1, 29, 1, 29, 5, 29, 395, 8, 29, 10, 29, 12, 29, 398, 9, 29, 1, 30, 1, 30, 1, 30, 5, 30, 403, 8, 30, 10, 30, 12, 30, 406, 9, 30, 1, 31, 1, 31, 1, 31, 5, 31, 411, 8, 31, 10, 31, 12, 31, 414, 9, 31, 1, 32, 1, 32, 1, 33, 1, 33, 3, 33, 420, 8, 33, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 435, 8, 34, 10, 34, 12, 34, 438, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 446, 8, 34, 10, 34, 12, 34, 449, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 457, 8, 34, 10, 34, 12, 34, 460, 9, 34, 1, 34, 1, 34, 3, 34, 464, 8, 34, 1, 35, 1, 35, 3, 35, 468, 8, 35, 1, 36, 1, 36, 3, 36, 472, 8, 36, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 5, 38, 481, 8, 38, 10, 38, 12, 38, 484, 9, 38, 1, 39, 1, 39, 3, 39, 488, 8, 39, 1, 39, 1, 39, 3, 39, 492, 8, 39, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 5, 42, 504, 8, 42, 10, 42, 12, 42, 507, 9, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 3, 44, 517, 8, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 5, 47, 529, 8, 47, 10, 47, 12, 47, 532, 9, 47, 1, 48, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 50, 1, 50, 3, 50, 542, 8, 50, 1, 51, 3, 51, 545, 8, 51, 1, 51, 1, 51, 1, 52, 3, 52, 550, 8, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 572, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 5, 58, 578, 8, 58, 10, 58, 12, 58, 581, 9, 58, 3, 58, 583, 8, 58, 1, 59, 1, 59, 1, 59, 3, 59, 588, 8, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 3, 61, 601, 8, 61, 1, 61, 0, 4, 2, 10, 18, 20, 62, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 118, 120, 122, 0, 8, 1, 0, 58, 59, 1, 0, 60, 62, 2, 0, 25, 25, 76, 76, 1, 0, 67, 68, 2, 0, 30, 30, 34, 34, 2, 0, 37, 37, 40, 40, 2, 0, 36, 36, 50, 50, 2, 0, 51, 51, 53, 57, 629, 0, 124, 1, 0, 0, 0, 2, 127, 1, 0, 0, 0, 4, 144, 1, 0, 0, 0, 6, 162, 1, 0, 0, 0, 8, 164, 1, 0, 0, 0, 10, 197, 1, 0, 0, 0, 12, 224, 1, 0, 0, 0, 14, 226, 1, 0, 0, 0, 16, 235, 1, 0, 0, 0, 18, 241, 1, 0, 0, 0, 20, 262, 1, 0, 0, 0, 22, 272, 1, 0, 0, 0, 24, 289, 1, 0, 0, 0, 26, 291, 1, 0, 0, 0, 28, 293, 1, 0, 0, 0, 30, 296, 1, 0, 0, 0, 32, 307, 1, 0, 0, 0, 34, 311, 1, 0, 0, 0, 36, 326, 1, 0, 0, 0, 38, 330, 1, 0, 0, 0, 40, 332, 1, 0, 0, 0, 42, 336, 1, 0, 0, 0, 44, 338, 1, 0, 0, 0, 46, 347, 1, 0, 0, 0, 48, 351, 1, 0, 0, 0, 50, 367, 1, 0, 0, 0, 52, 370, 1, 0, 0, 0, 54, 378, 1, 0, 0, 0, 56, 386, 1, 0, 0, 0, 58, 391, 1, 0, 0, 0, 60, 399, 1, 0, 0, 0, 62, 407, 1, 0, 0, 0, 64, 415, 1, 0, 0, 0, 66, 419, 1, 0, 0, 0, 68, 463, 1, 0, 0, 0, 70, 467, 1, 0, 0, 0, 72, 471, 1, 0, 0, 0, 74, 473, 1, 0, 0, 0, 76, 476, 1, 0, 0, 0, 78, 485, 1, 0, 0, 0, 80, 493, 1, 0, 0, 0, 82, 496, 1, 0, 0, 0, 84, 499, 1, 0, 0, 0, 86, 508, 1, 0, 0, 0, 88, 512, 1, 0, 0, 0, 90, 518, 1, 0, 0, 0, 92, 522, 1, 0, 0, 0, 94, 525, 1, 0, 0, 0, 96, 533, 1, 0, 0, 0, 98, 537, 1, 0, 0, 0, 100, 541, 1, 0, 0, 0, 102, 544, 1, 0, 0, 0, 104, 549, 1, 0, 0, 0, 106, 553, 1, 0, 0, 0, 108, 555, 1, 0, 0, 0, 110, 557, 1, 0, 0, 0, 112, 560, 1, 0, 0, 0, 114, 564, 1, 0, 0, 0, 116, 567, 1, 0, 0, 0, 118, 587, 1, 0, 0, 0, 120, 591, 1, 0, 0, 0, 122, 596, 1, 0, 0, 0, 124, 125, 3, 2, 1, 0, 125, 126, 5, 0, 0, 1, 126, 1, 1, 0, 0, 0, 127, 128, 6, 1, -1, 0, 128, 129, 3, 4, 2, 0, 129, 135, 1, 0, 0, 0, 130, 131, 10, 1, 0, 0, 131, 132, 5, 24, 0, 0, 132, 134, 3, 6, 3, 0, 133, 130, 1, 0, 0, 0, 134, 137, 1, 0, 0, 0, 135, 133, 1, 0, 0, 0, 135, 136, 1, 0, 0, 0, 136, 3, 1, 0, 0, 0, 137, 135, 1, 0, 0, 0, 138, 145, 3, 110, 55, 0, 139, 145, 3, 34, 17, 0, 140, 145, 3, 28, 14, 0, 141, 145, 3, 114, 57, 0, 142, 143, 4, 2, 1, 0, 143, 145, 3, 48, 24, 0, 144, 138, 1, 0, 0, 0, 144, 139, 1, 0, 0, 0, 144, 140, 1, 0, 0, 0, 144, 141, 1, 0, 0, 0, 144, 142, 1, 0, 0, 0, 145, 5, 1, 0, 0, 0, 146, 163, 3, 50, 25, 0, 147, 163, 3, 8, 4, 0, 148, 163, 3, 80, 40, 0, 149, 163, 3, 74, 37, 0, 150, 163, 3, 52, 26, 0, 151, 163, 3, 76, 38, 0, 152, 163, 3, 82, 41, 0, 153, 163, 3, 84, 42, 0, 154, 163, 3, 88, 44, 0, 155, 163, 3, 90, 45, 0, 156, 163, 3, 116, 58, 0, 157, 163, 3, 92, 46, 0, 158, 159, 4, 3, 2, 0, 159, 163, 3, 122, 61, 0, 160, 161, 4, 3, 3, 0, 161, 163, 3, 120, 60, 0, 162, 146, 1, 0, 0, 0, 162, 147, 1, 0, 0, 0, 162, 148, 1, 0, 0, 0, 162, 149, 1, 0, 0, 0, 162, 150, 1, 0, 0, 0, 162, 151, 1, 0, 0, 0, 162, 152, 1, 0, 0, 0, 162, 153, 1, 0, 0, 0, 162, 154, 1, 0, 0, 0, 162, 155, 1, 0, 0, 0, 162, 156, 1, 0, 0, 0, 162, 157, 1, 0, 0, 0, 162, 158, 1, 0, 0, 0, 162, 160, 1, 0, 0, 0, 163, 7, 1, 0, 0, 0, 164, 165, 5, 16, 0, 0, 165, 166, 3, 10, 5, 0, 166, 9, 1, 0, 0, 0, 167, 168, 6, 5, -1, 0, 168, 169, 5, 43, 0, 0, 169, 198, 3, 10, 5, 8, 170, 198, 3, 16, 8, 0, 171, 198, 3, 12, 6, 0, 172, 174, 3, 16, 8, 0, 173, 175, 5, 43, 0, 0, 174, 173, 1, 0, 0, 0, 174, 175, 1, 0, 0, 0, 175, 176, 1, 0, 0, 0, 176, 177, 5, 38, 0, 0, 177, 178, 5, 42, 0, 0, 178, 183, 3, 16, 8, 0, 179, 180, 5, 33, 0, 0, 180, 182, 3, 16, 8, 0, 181, 179, 1, 0, 0, 0, 182, 185, 1, 0, 0, 0, 183, 181, 1, 0, 0, 0, 183, 184, 1, 0, 0, 0, 184, 186, 1, 0, 0, 0, 185, 183, 1, 0, 0, 0, 186, 187, 5, 49, 0, 0, 187, 198, 1, 0, 0, 0, 188, 189, 3, 16, 8, 0, 189, 191, 5, 39, 0, 0, 190, 192, 5, 43, 0, 0, 191, 190, 1, 0, 0, 0, 191, 192, 1, 0, 0, 0, 192, 193, 1, 0, 0, 0, 193, 194, 5, 44, 0, 0, 194, 198, 1, 0, 0, 0, 195, 196, 4, 5, 4, 0, 196, 198, 3, 14, 7, 0, 197, 167, 1, 0, 0, 0, 197, 170, 1, 0, 0, 0, 197, 171, 1, 0, 0, 0, 197, 172, 1, 0, 0, 0, 197, 188, 1, 0, 0, 0, 197, 195, 1, 0, 0, 0, 198, 207, 1, 0, 0, 0, 199, 200, 10, 5, 0, 0, 200, 201, 5, 29, 0, 0, 201, 206, 3, 10, 5, 6, 202, 203, 10, 4, 0, 0, 203, 204, 5, 46, 0, 0, 204, 206, 3, 10, 5, 5, 205, 199, 1, 0, 0, 0, 205, 202, 1, 0, 0, 0, 206, 209, 1, 0, 0, 0, 207, 205, 1, 0, 0, 0, 207, 208, 1, 0, 0, 0, 208, 11, 1, 0, 0, 0, 209, 207, 1, 0, 0, 0, 210, 212, 3, 16, 8, 0, 211, 213, 5, 43, 0, 0, 212, 211, 1, 0, 0, 0, 212, 213, 1, 0, 0, 0, 213, 214, 1, 0, 0, 0, 214, 215, 5, 41, 0, 0, 215, 216, 3, 106, 53, 0, 216, 225, 1, 0, 0, 0, 217, 219, 3, 16, 8, 0, 218, 220, 5, 43, 0, 0, 219, 218, 1, 0, 0, 0, 219, 220, 1, 0, 0, 0, 220, 221, 1, 0, 0, 0, 221, 222, 5, 48, 0, 0, 222, 223, 3, 106, 53, 0, 223, 225, 1, 0, 0, 0, 224, 210, 1, 0, 0, 0, 224, 217, 1, 0, 0, 0, 225, 13, 1, 0, 0, 0, 226, 227, 3, 16, 8, 0, 227, 228, 5, 63, 0, 0, 228, 229, 3, 106, 53, 0, 229, 15, 1, 0, 0, 0, 230, 236, 3, 18, 9, 0, 231, 232, 3, 18, 9, 0, 232, 233, 3, 108, 54, 0, 233, 234, 3, 18, 9, 0, 234, 236, 1, 0, 0, 0, 235, 230, 1, 0, 0, 0, 235, 231, 1, 0, 0, 0, 236, 17, 1, 0, 0, 0, 237, 238, 6, 9, -1, 0, 238, 242, 3, 20, 10, 0, 239, 240, 7, 0, 0, 0, 240, 242, 3, 18, 9, 3, 241, 237, 1, 0, 0, 0, 241, 239, 1, 0, 0, 0, 242, 251, 1, 0, 0, 0, 243, 244, 10, 2, 0, 0, 244, 245, 7, 1, 0, 0, 245, 250, 3, 18, 9, 3, 246, 247, 10, 1, 0, 0, 247, 248, 7, 0, 0, 0, 248, 250, 3, 18, 9, 2, 249, 243, 1, 0, 0, 0, 249, 246, 1, 0, 0, 0, 250, 253, 1, 0, 0, 0, 251, 249, 1, 0, 0, 0, 251, 252, 1, 0, 0, 0, 252, 19, 1, 0, 0, 0, 253, 251, 1, 0, 0, 0, 254, 255, 6, 10, -1, 0, 255, 263, 3, 68, 34, 0, 256, 263, 3, 58, 29, 0, 257, 263, 3, 22, 11, 0, 258, 259, 5, 42, 0, 0, 259, 260, 3, 10, 5, 0, 260, 261, 5, 49, 0, 0, 261, 263, 1, 0, 0, 0, 262, 254, 1, 0, 0, 0, 262, 256, 1, 0, 0, 0, 262, 257, 1, 0, 0, 0, 262, 258, 1, 0, 0, 0, 263, 269, 1, 0, 0, 0, 264, 265, 10, 1, 0, 0, 265, 266, 5, 32, 0, 0, 266, 268, 3, 26, 13, 0, 267, 264, 1, 0, 0, 0, 268, 271, 1, 0, 0, 0, 269, 267, 1, 0, 0, 0, 269, 270, 1, 0, 0, 0, 270, 21, 1, 0, 0, 0, 271, 269, 1, 0, 0, 0, 272, 273, 3, 24, 12, 0, 273, 283, 5, 42, 0, 0, 274, 284, 5, 60, 0, 0, 275, 280, 3, 10, 5, 0, 276, 277, 5, 33, 0, 0, 277, 279, 3, 10, 5, 0, 278, 276, 1, 0, 0, 0, 279, 282, 1, 0, 0, 0, 280, 278, 1, 0, 0, 0, 280, 281, 1, 0, 0, 0, 281, 284, 1, 0, 0, 0, 282, 280, 1, 0, 0, 0, 283, 274, 1, 0, 0, 0, 283, 275, 1, 0, 0, 0, 283, 284, 1, 0, 0, 0, 284, 285, 1, 0, 0, 0, 285, 286, 5, 49, 0, 0, 286, 23, 1, 0, 0, 0, 287, 290, 5, 63, 0, 0, 288, 290, 3, 72, 36, 0, 289, 287, 1, 0, 0, 0, 289, 288, 1, 0, 0, 0, 290, 25, 1, 0, 0, 0, 291, 292, 3, 64, 32, 0, 292, 27, 1, 0, 0, 0, 293, 294, 5, 12, 0, 0, 294, 295, 3, 30, 15, 0, 295, 29, 1, 0, 0, 0, 296, 301, 3, 32, 16, 0, 297, 298, 5, 33, 0, 0, 298, 300, 3, 32, 16, 0, 299, 297, 1, 0, 0, 0, 300, 303, 1, 0, 0, 0, 301, 299, 1, 0, 0, 0, 301, 302, 1, 0, 0, 0, 302, 31, 1, 0, 0, 0, 303, 301, 1, 0, 0, 0, 304, 305, 3, 58, 29, 0, 305, 306, 5, 31, 0, 0, 306, 308, 1, 0, 0, 0, 307, 304, 1, 0, 0, 0, 307, 308, 1, 0, 0, 0, 308, 309, 1, 0, 0, 0, 309, 310, 3, 10, 5, 0, 310, 33, 1, 0, 0, 0, 311, 312, 5, 6, 0, 0, 312, 317, 3, 36, 18, 0, 313, 314, 5, 33, 0, 0, 314, 316, 3, 36, 18, 0, 315, 313, 1, 0, 0, 0, 316, 319, 1, 0, 0, 0, 317, 315, 1, 0, 0, 0, 317, 318, 1, 0, 0, 0, 318, 321, 1, 0, 0, 0, 319, 317, 1, 0, 0, 0, 320, 322, 3, 42, 21, 0, 321, 320, 1, 0, 0, 0, 321, 322, 1, 0, 0, 0, 322, 35, 1, 0, 0, 0, 323, 324, 3, 38, 19, 0, 324, 325, 5, 104, 0, 0, 325, 327, 1, 0, 0, 0, 326, 323, 1, 0, 0, 0, 326, 327, 1, 0, 0, 0, 327, 328, 1, 0, 0, 0, 328, 329, 3, 40, 20, 0, 329, 37, 1, 0, 0, 0, 330, 331, 5, 76, 0, 0, 331, 39, 1, 0, 0, 0, 332, 333, 7, 2, 0, 0, 333, 41, 1, 0, 0, 0, 334, 337, 3, 44, 22, 0, 335, 337, 3, 46, 23, 0, 336, 334, 1, 0, 0, 0, 336, 335, 1, 0, 0, 0, 337, 43, 1, 0, 0, 0, 338, 339, 5, 75, 0, 0, 339, 344, 5, 76, 0, 0, 340, 341, 5, 33, 0, 0, 341, 343, 5, 76, 0, 0, 342, 340, 1, 0, 0, 0, 343, 346, 1, 0, 0, 0, 344, 342, 1, 0, 0, 0, 344, 345, 1, 0, 0, 0, 345, 45, 1, 0, 0, 0, 346, 344, 1, 0, 0, 0, 347, 348, 5, 65, 0, 0, 348, 349, 3, 44, 22, 0, 349, 350, 5, 66, 0, 0, 350, 47, 1, 0, 0, 0, 351, 352, 5, 19, 0, 0, 352, 357, 3, 36, 18, 0, 353, 354, 5, 33, 0, 0, 354, 356, 3, 36, 18, 0, 355, 353, 1, 0, 0, 0, 356, 359, 1, 0, 0, 0, 357, 355, 1, 0, 0, 0, 357, 358, 1, 0, 0, 0, 358, 361, 1, 0, 0, 0, 359, 357, 1, 0, 0, 0, 360, 362, 3, 54, 27, 0, 361, 360, 1, 0, 0, 0, 361, 362, 1, 0, 0, 0, 362, 365, 1, 0, 0, 0, 363, 364, 5, 28, 0, 0, 364, 366, 3, 30, 15, 0, 365, 363, 1, 0, 0, 0, 365, 366, 1, 0, 0, 0, 366, 49, 1, 0, 0, 0, 367, 368, 5, 4, 0, 0, 368, 369, 3, 30, 15, 0, 369, 51, 1, 0, 0, 0, 370, 372, 5, 15, 0, 0, 371, 373, 3, 54, 27, 0, 372, 371, 1, 0, 0, 0, 372, 373, 1, 0, 0, 0, 373, 376, 1, 0, 0, 0, 374, 375, 5, 28, 0, 0, 375, 377, 3, 30, 15, 0, 376, 374, 1, 0, 0, 0, 376, 377, 1, 0, 0, 0, 377, 53, 1, 0, 0, 0, 378, 383, 3, 56, 28, 0, 379, 380, 5, 33, 0, 0, 380, 382, 3, 56, 28, 0, 381, 379, 1, 0, 0, 0, 382, 385, 1, 0, 0, 0, 383, 381, 1, 0, 0, 0, 383, 384, 1, 0, 0, 0, 384, 55, 1, 0, 0, 0, 385, 383, 1, 0, 0, 0, 386, 389, 3, 32, 16, 0, 387, 388, 5, 16, 0, 0, 388, 390, 3, 10, 5, 0, 389, 387, 1, 0, 0, 0, 389, 390, 1, 0, 0, 0, 390, 57, 1, 0, 0, 0, 391, 396, 3, 72, 36, 0, 392, 393, 5, 35, 0, 0, 393, 395, 3, 72, 36, 0, 394, 392, 1, 0, 0, 0, 395, 398, 1, 0, 0, 0, 396, 394, 1, 0, 0, 0, 396, 397, 1, 0, 0, 0, 397, 59, 1, 0, 0, 0, 398, 396, 1, 0, 0, 0, 399, 404, 3, 66, 33, 0, 400, 401, 5, 35, 0, 0, 401, 403, 3, 66, 33, 0, 402, 400, 1, 0, 0, 0, 403, 406, 1, 0, 0, 0, 404, 402, 1, 0, 0, 0, 404, 405, 1, 0, 0, 0, 405, 61, 1, 0, 0, 0, 406, 404, 1, 0, 0, 0, 407, 412, 3, 60, 30, 0, 408, 409, 5, 33, 0, 0, 409, 411, 3, 60, 30, 0, 410, 408, 1, 0, 0, 0, 411, 414, 1, 0, 0, 0, 412, 410, 1, 0, 0, 0, 412, 413, 1, 0, 0, 0, 413, 63, 1, 0, 0, 0, 414, 412, 1, 0, 0, 0, 415, 416, 7, 3, 0, 0, 416, 65, 1, 0, 0, 0, 417, 420, 5, 80, 0, 0, 418, 420, 3, 70, 35, 0, 419, 417, 1, 0, 0, 0, 419, 418, 1, 0, 0, 0, 420, 67, 1, 0, 0, 0, 421, 464, 5, 44, 0, 0, 422, 423, 3, 104, 52, 0, 423, 424, 5, 67, 0, 0, 424, 464, 1, 0, 0, 0, 425, 464, 3, 102, 51, 0, 426, 464, 3, 104, 52, 0, 427, 464, 3, 98, 49, 0, 428, 464, 3, 70, 35, 0, 429, 464, 3, 106, 53, 0, 430, 431, 5, 65, 0, 0, 431, 436, 3, 100, 50, 0, 432, 433, 5, 33, 0, 0, 433, 435, 3, 100, 50, 0, 434, 432, 1, 0, 0, 0, 435, 438, 1, 0, 0, 0, 436, 434, 1, 0, 0, 0, 436, 437, 1, 0, 0, 0, 437, 439, 1, 0, 0, 0, 438, 436, 1, 0, 0, 0, 439, 440, 5, 66, 0, 0, 440, 464, 1, 0, 0, 0, 441, 442, 5, 65, 0, 0, 442, 447, 3, 98, 49, 0, 443, 444, 5, 33, 0, 0, 444, 446, 3, 98, 49, 0, 445, 443, 1, 0, 0, 0, 446, 449, 1, 0, 0, 0, 447, 445, 1, 0, 0, 0, 447, 448, 1, 0, 0, 0, 448, 450, 1, 0, 0, 0, 449, 447, 1, 0, 0, 0, 450, 451, 5, 66, 0, 0, 451, 464, 1, 0, 0, 0, 452, 453, 5, 65, 0, 0, 453, 458, 3, 106, 53, 0, 454, 455, 5, 33, 0, 0, 455, 457, 3, 106, 53, 0, 456, 454, 1, 0, 0, 0, 457, 460, 1, 0, 0, 0, 458, 456, 1, 0, 0, 0, 458, 459, 1, 0, 0, 0, 459, 461, 1, 0, 0, 0, 460, 458, 1, 0, 0, 0, 461, 462, 5, 66, 0, 0, 462, 464, 1, 0, 0, 0, 463, 421, 1, 0, 0, 0, 463, 422, 1, 0, 0, 0, 463, 425, 1, 0, 0, 0, 463, 426, 1, 0, 0, 0, 463, 427, 1, 0, 0, 0, 463, 428, 1, 0, 0, 0, 463, 429, 1, 0, 0, 0, 463, 430, 1, 0, 0, 0, 463, 441, 1, 0, 0, 0, 463, 452, 1, 0, 0, 0, 464, 69, 1, 0, 0, 0, 465, 468, 5, 47, 0, 0, 466, 468, 5, 64, 0, 0, 467, 465, 1, 0, 0, 0, 467, 466, 1, 0, 0, 0, 468, 71, 1, 0, 0, 0, 469, 472, 3, 64, 32, 0, 470, 472, 3, 70, 35, 0, 471, 469, 1, 0, 0, 0, 471, 470, 1, 0, 0, 0, 472, 73, 1, 0, 0, 0, 473, 474, 5, 9, 0, 0, 474, 475, 5, 26, 0, 0, 475, 75, 1, 0, 0, 0, 476, 477, 5, 14, 0, 0, 477, 482, 3, 78, 39, 0, 478, 479, 5, 33, 0, 0, 479, 481, 3, 78, 39, 0, 480, 478, 1, 0, 0, 0, 481, 484, 1, 0, 0, 0, 482, 480, 1, 0, 0, 0, 482, 483, 1, 0, 0, 0, 483, 77, 1, 0, 0, 0, 484, 482, 1, 0, 0, 0, 485, 487, 3, 10, 5, 0, 486, 488, 7, 4, 0, 0, 487, 486, 1, 0, 0, 0, 487, 488, 1, 0, 0, 0, 488, 491, 1, 0, 0, 0, 489, 490, 5, 45, 0, 0, 490, 492, 7, 5, 0, 0, 491, 489, 1, 0, 0, 0, 491, 492, 1, 0, 0, 0, 492, 79, 1, 0, 0, 0, 493, 494, 5, 8, 0, 0, 494, 495, 3, 62, 31, 0, 495, 81, 1, 0, 0, 0, 496, 497, 5, 2, 0, 0, 497, 498, 3, 62, 31, 0, 498, 83, 1, 0, 0, 0, 499, 500, 5, 11, 0, 0, 500, 505, 3, 86, 43, 0, 501, 502, 5, 33, 0, 0, 502, 504, 3, 86, 43, 0, 503, 501, 1, 0, 0, 0, 504, 507, 1, 0, 0, 0, 505, 503, 1, 0, 0, 0, 505, 506, 1, 0, 0, 0, 506, 85, 1, 0, 0, 0, 507, 505, 1, 0, 0, 0, 508, 509, 3, 60, 30, 0, 509, 510, 5, 84, 0, 0, 510, 511, 3, 60, 30, 0, 511, 87, 1, 0, 0, 0, 512, 513, 5, 1, 0, 0, 513, 514, 3, 20, 10, 0, 514, 516, 3, 106, 53, 0, 515, 517, 3, 94, 47, 0, 516, 515, 1, 0, 0, 0, 516, 517, 1, 0, 0, 0, 517, 89, 1, 0, 0, 0, 518, 519, 5, 7, 0, 0, 519, 520, 3, 20, 10, 0, 520, 521, 3, 106, 53, 0, 521, 91, 1, 0, 0, 0, 522, 523, 5, 10, 0, 0, 523, 524, 3, 58, 29, 0, 524, 93, 1, 0, 0, 0, 525, 530, 3, 96, 48, 0, 526, 527, 5, 33, 0, 0, 527, 529, 3, 96, 48, 0, 528, 526, 1, 0, 0, 0, 529, 532, 1, 0, 0, 0, 530, 528, 1, 0, 0, 0, 530, 531, 1, 0, 0, 0, 531, 95, 1, 0, 0, 0, 532, 530, 1, 0, 0, 0, 533, 534, 3, 64, 32, 0, 534, 535, 5, 31, 0, 0, 535, 536, 3, 68, 34, 0, 536, 97, 1, 0, 0, 0, 537, 538, 7, 6, 0, 0, 538, 99, 1, 0, 0, 0, 539, 542, 3, 102, 51, 0, 540, 542, 3, 104, 52, 0, 541, 539, 1, 0, 0, 0, 541, 540, 1, 0, 0, 0, 542, 101, 1, 0, 0, 0, 543, 545, 7, 0, 0, 0, 544, 543, 1, 0, 0, 0, 544, 545, 1, 0, 0, 0, 545, 546, 1, 0, 0, 0, 546, 547, 5, 27, 0, 0, 547, 103, 1, 0, 0, 0, 548, 550, 7, 0, 0, 0, 549, 548, 1, 0, 0, 0, 549, 550, 1, 0, 0, 0, 550, 551, 1, 0, 0, 0, 551, 552, 5, 26, 0, 0, 552, 105, 1, 0, 0, 0, 553, 554, 5, 25, 0, 0, 554, 107, 1, 0, 0, 0, 555, 556, 7, 7, 0, 0, 556, 109, 1, 0, 0, 0, 557, 558, 5, 5, 0, 0, 558, 559, 3, 112, 56, 0, 559, 111, 1, 0, 0, 0, 560, 561, 5, 65, 0, 0, 561, 562, 3, 2, 1, 0, 562, 563, 5, 66, 0, 0, 563, 113, 1, 0, 0, 0, 564, 565, 5, 13, 0, 0, 565, 566, 5, 100, 0, 0, 566, 115, 1, 0, 0, 0, 567, 568, 5, 3, 0, 0, 568, 571, 5, 90, 0, 0, 569, 570, 5, 88, 0, 0, 570, 572, 3, 60, 30, 0, 571, 569, 1, 0, 0, 0, 571, 572, 1, 0, 0, 0, 572, 582, 1, 0, 0, 0, 573, 574, 5, 89, 0, 0, 574, 579, 3, 118, 59, 0, 575, 576, 5, 33, 0, 0, 576, 578, 3, 118, 59, 0, 577, 575, 1, 0, 0, 0, 578, 581, 1, 0, 0, 0, 579, 577, 1, 0, 0, 0, 579, 580, 1, 0, 0, 0, 580, 583, 1, 0, 0, 0, 581, 579, 1, 0, 0, 0, 582, 573, 1, 0, 0, 0, 582, 583, 1, 0, 0, 0, 583, 117, 1, 0, 0, 0, 584, 585, 3, 60, 30, 0, 585, 586, 5, 31, 0, 0, 586, 588, 1, 0, 0, 0, 587, 584, 1, 0, 0, 0, 587, 588, 1, 0, 0, 0, 588, 589, 1, 0, 0, 0, 589, 590, 3, 60, 30, 0, 590, 119, 1, 0, 0, 0, 591, 592, 5, 18, 0, 0, 592, 593, 3, 36, 18, 0, 593, 594, 5, 88, 0, 0, 594, 595, 3, 62, 31, 0, 595, 121, 1, 0, 0, 0, 596, 597, 5, 17, 0, 0, 597, 600, 3, 54, 27, 0, 598, 599, 5, 28, 0, 0, 599, 601, 3, 30, 15, 0, 600, 598, 1, 0, 0, 0, 600, 601, 1, 0, 0, 0, 601, 123, 1, 0, 0, 0, 59, 135, 144, 162, 174, 183, 191, 197, 205, 207, 212, 219, 224, 235, 241, 249, 251, 262, 269, 280, 283, 289, 301, 307, 317, 321, 326, 336, 344, 357, 361, 365, 372, 376, 383, 389, 396, 404, 412, 419, 436, 447, 458, 463, 467, 471, 482, 487, 491, 505, 516, 530, 541, 544, 549, 571, 579, 582, 587, 600] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java index ee1ed0a05e978..af5c03a27592b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java @@ -2935,16 +2935,14 @@ public final AggFieldContext aggField() throws RecognitionException { { setState(386); field(); - setState(387); - if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(390); + setState(389); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,34,_ctx) ) { case 1: { - setState(388); + setState(387); match(WHERE); - setState(389); + setState(388); booleanExpression(0); } break; @@ -3001,23 +2999,23 @@ public final QualifiedNameContext qualifiedName() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(392); + setState(391); identifierOrParameter(); - setState(397); + setState(396); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,35,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(393); + setState(392); match(DOT); - setState(394); + setState(393); identifierOrParameter(); } } } - setState(399); + setState(398); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,35,_ctx); } @@ -3073,23 +3071,23 @@ public final QualifiedNamePatternContext qualifiedNamePattern() throws Recogniti int _alt; enterOuterAlt(_localctx, 1); { - setState(400); + setState(399); identifierPattern(); - setState(405); + setState(404); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,36,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(401); + setState(400); match(DOT); - setState(402); + setState(401); identifierPattern(); } } } - setState(407); + setState(406); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,36,_ctx); } @@ -3145,23 +3143,23 @@ public final QualifiedNamePatternsContext qualifiedNamePatterns() throws Recogni int _alt; enterOuterAlt(_localctx, 1); { - setState(408); + setState(407); qualifiedNamePattern(); - setState(413); + setState(412); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,37,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(409); + setState(408); match(COMMA); - setState(410); + setState(409); qualifiedNamePattern(); } } } - setState(415); + setState(414); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,37,_ctx); } @@ -3209,7 +3207,7 @@ public final IdentifierContext identifier() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(416); + setState(415); _la = _input.LA(1); if ( !(_la==UNQUOTED_IDENTIFIER || _la==QUOTED_IDENTIFIER) ) { _errHandler.recoverInline(this); @@ -3262,13 +3260,13 @@ public final IdentifierPatternContext identifierPattern() throws RecognitionExce IdentifierPatternContext _localctx = new IdentifierPatternContext(_ctx, getState()); enterRule(_localctx, 66, RULE_identifierPattern); try { - setState(420); + setState(419); _errHandler.sync(this); switch (_input.LA(1)) { case ID_PATTERN: enterOuterAlt(_localctx, 1); { - setState(418); + setState(417); match(ID_PATTERN); } break; @@ -3276,7 +3274,7 @@ public final IdentifierPatternContext identifierPattern() throws RecognitionExce case NAMED_OR_POSITIONAL_PARAM: enterOuterAlt(_localctx, 2); { - setState(419); + setState(418); parameter(); } break; @@ -3551,14 +3549,14 @@ public final ConstantContext constant() throws RecognitionException { enterRule(_localctx, 68, RULE_constant); int _la; try { - setState(464); + setState(463); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,42,_ctx) ) { case 1: _localctx = new NullLiteralContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(422); + setState(421); match(NULL); } break; @@ -3566,9 +3564,9 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new QualifiedIntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(423); + setState(422); integerValue(); - setState(424); + setState(423); match(UNQUOTED_IDENTIFIER); } break; @@ -3576,7 +3574,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new DecimalLiteralContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(426); + setState(425); decimalValue(); } break; @@ -3584,7 +3582,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new IntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(427); + setState(426); integerValue(); } break; @@ -3592,7 +3590,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanLiteralContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(428); + setState(427); booleanValue(); } break; @@ -3600,7 +3598,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new InputParameterContext(_localctx); enterOuterAlt(_localctx, 6); { - setState(429); + setState(428); parameter(); } break; @@ -3608,7 +3606,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringLiteralContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(430); + setState(429); string(); } break; @@ -3616,27 +3614,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new NumericArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 8); { - setState(431); + setState(430); match(OPENING_BRACKET); - setState(432); + setState(431); numericValue(); - setState(437); + setState(436); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(433); + setState(432); match(COMMA); - setState(434); + setState(433); numericValue(); } } - setState(439); + setState(438); _errHandler.sync(this); _la = _input.LA(1); } - setState(440); + setState(439); match(CLOSING_BRACKET); } break; @@ -3644,27 +3642,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 9); { - setState(442); + setState(441); match(OPENING_BRACKET); - setState(443); + setState(442); booleanValue(); - setState(448); + setState(447); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(444); + setState(443); match(COMMA); - setState(445); + setState(444); booleanValue(); } } - setState(450); + setState(449); _errHandler.sync(this); _la = _input.LA(1); } - setState(451); + setState(450); match(CLOSING_BRACKET); } break; @@ -3672,27 +3670,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 10); { - setState(453); + setState(452); match(OPENING_BRACKET); - setState(454); + setState(453); string(); - setState(459); + setState(458); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(455); + setState(454); match(COMMA); - setState(456); + setState(455); string(); } } - setState(461); + setState(460); _errHandler.sync(this); _la = _input.LA(1); } - setState(462); + setState(461); match(CLOSING_BRACKET); } break; @@ -3766,14 +3764,14 @@ public final ParameterContext parameter() throws RecognitionException { ParameterContext _localctx = new ParameterContext(_ctx, getState()); enterRule(_localctx, 70, RULE_parameter); try { - setState(468); + setState(467); _errHandler.sync(this); switch (_input.LA(1)) { case PARAM: _localctx = new InputParamContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(466); + setState(465); match(PARAM); } break; @@ -3781,7 +3779,7 @@ public final ParameterContext parameter() throws RecognitionException { _localctx = new InputNamedOrPositionalParamContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(467); + setState(466); match(NAMED_OR_POSITIONAL_PARAM); } break; @@ -3832,14 +3830,14 @@ public final IdentifierOrParameterContext identifierOrParameter() throws Recogni IdentifierOrParameterContext _localctx = new IdentifierOrParameterContext(_ctx, getState()); enterRule(_localctx, 72, RULE_identifierOrParameter); try { - setState(472); + setState(471); _errHandler.sync(this); switch (_input.LA(1)) { case UNQUOTED_IDENTIFIER: case QUOTED_IDENTIFIER: enterOuterAlt(_localctx, 1); { - setState(470); + setState(469); identifier(); } break; @@ -3847,7 +3845,7 @@ public final IdentifierOrParameterContext identifierOrParameter() throws Recogni case NAMED_OR_POSITIONAL_PARAM: enterOuterAlt(_localctx, 2); { - setState(471); + setState(470); parameter(); } break; @@ -3896,9 +3894,9 @@ public final LimitCommandContext limitCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(474); + setState(473); match(LIMIT); - setState(475); + setState(474); match(INTEGER_LITERAL); } } @@ -3953,25 +3951,25 @@ public final SortCommandContext sortCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(477); + setState(476); match(SORT); - setState(478); + setState(477); orderExpression(); - setState(483); + setState(482); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,45,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(479); + setState(478); match(COMMA); - setState(480); + setState(479); orderExpression(); } } } - setState(485); + setState(484); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,45,_ctx); } @@ -4027,14 +4025,14 @@ public final OrderExpressionContext orderExpression() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(486); + setState(485); booleanExpression(0); - setState(488); + setState(487); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,46,_ctx) ) { case 1: { - setState(487); + setState(486); ((OrderExpressionContext)_localctx).ordering = _input.LT(1); _la = _input.LA(1); if ( !(_la==ASC || _la==DESC) ) { @@ -4048,14 +4046,14 @@ public final OrderExpressionContext orderExpression() throws RecognitionExceptio } break; } - setState(492); + setState(491); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,47,_ctx) ) { case 1: { - setState(490); + setState(489); match(NULLS); - setState(491); + setState(490); ((OrderExpressionContext)_localctx).nullOrdering = _input.LT(1); _la = _input.LA(1); if ( !(_la==FIRST || _la==LAST) ) { @@ -4114,9 +4112,9 @@ public final KeepCommandContext keepCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(494); + setState(493); match(KEEP); - setState(495); + setState(494); qualifiedNamePatterns(); } } @@ -4163,9 +4161,9 @@ public final DropCommandContext dropCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(497); + setState(496); match(DROP); - setState(498); + setState(497); qualifiedNamePatterns(); } } @@ -4220,25 +4218,25 @@ public final RenameCommandContext renameCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(500); + setState(499); match(RENAME); - setState(501); + setState(500); renameClause(); - setState(506); + setState(505); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,48,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(502); + setState(501); match(COMMA); - setState(503); + setState(502); renameClause(); } } } - setState(508); + setState(507); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,48,_ctx); } @@ -4292,11 +4290,11 @@ public final RenameClauseContext renameClause() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(509); + setState(508); ((RenameClauseContext)_localctx).oldName = qualifiedNamePattern(); - setState(510); + setState(509); match(AS); - setState(511); + setState(510); ((RenameClauseContext)_localctx).newName = qualifiedNamePattern(); } } @@ -4349,18 +4347,18 @@ public final DissectCommandContext dissectCommand() throws RecognitionException try { enterOuterAlt(_localctx, 1); { - setState(513); + setState(512); match(DISSECT); - setState(514); + setState(513); primaryExpression(0); - setState(515); + setState(514); string(); - setState(517); + setState(516); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,49,_ctx) ) { case 1: { - setState(516); + setState(515); commandOptions(); } break; @@ -4413,11 +4411,11 @@ public final GrokCommandContext grokCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(519); + setState(518); match(GROK); - setState(520); + setState(519); primaryExpression(0); - setState(521); + setState(520); string(); } } @@ -4464,9 +4462,9 @@ public final MvExpandCommandContext mvExpandCommand() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(523); + setState(522); match(MV_EXPAND); - setState(524); + setState(523); qualifiedName(); } } @@ -4520,23 +4518,23 @@ public final CommandOptionsContext commandOptions() throws RecognitionException int _alt; enterOuterAlt(_localctx, 1); { - setState(526); + setState(525); commandOption(); - setState(531); + setState(530); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,50,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(527); + setState(526); match(COMMA); - setState(528); + setState(527); commandOption(); } } } - setState(533); + setState(532); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,50,_ctx); } @@ -4588,11 +4586,11 @@ public final CommandOptionContext commandOption() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(534); + setState(533); identifier(); - setState(535); + setState(534); match(ASSIGN); - setState(536); + setState(535); constant(); } } @@ -4638,7 +4636,7 @@ public final BooleanValueContext booleanValue() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(538); + setState(537); _la = _input.LA(1); if ( !(_la==FALSE || _la==TRUE) ) { _errHandler.recoverInline(this); @@ -4693,20 +4691,20 @@ public final NumericValueContext numericValue() throws RecognitionException { NumericValueContext _localctx = new NumericValueContext(_ctx, getState()); enterRule(_localctx, 100, RULE_numericValue); try { - setState(542); + setState(541); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,51,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(540); + setState(539); decimalValue(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(541); + setState(540); integerValue(); } break; @@ -4755,12 +4753,12 @@ public final DecimalValueContext decimalValue() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(545); + setState(544); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(544); + setState(543); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -4773,7 +4771,7 @@ public final DecimalValueContext decimalValue() throws RecognitionException { } } - setState(547); + setState(546); match(DECIMAL_LITERAL); } } @@ -4820,12 +4818,12 @@ public final IntegerValueContext integerValue() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(550); + setState(549); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(549); + setState(548); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -4838,7 +4836,7 @@ public final IntegerValueContext integerValue() throws RecognitionException { } } - setState(552); + setState(551); match(INTEGER_LITERAL); } } @@ -4882,7 +4880,7 @@ public final StringContext string() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(554); + setState(553); match(QUOTED_STRING); } } @@ -4932,7 +4930,7 @@ public final ComparisonOperatorContext comparisonOperator() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(556); + setState(555); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 281474976710656000L) != 0)) ) { _errHandler.recoverInline(this); @@ -4987,9 +4985,9 @@ public final ExplainCommandContext explainCommand() throws RecognitionException try { enterOuterAlt(_localctx, 1); { - setState(558); + setState(557); match(EXPLAIN); - setState(559); + setState(558); subqueryExpression(); } } @@ -5037,11 +5035,11 @@ public final SubqueryExpressionContext subqueryExpression() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(561); + setState(560); match(OPENING_BRACKET); - setState(562); + setState(561); query(0); - setState(563); + setState(562); match(CLOSING_BRACKET); } } @@ -5098,9 +5096,9 @@ public final ShowCommandContext showCommand() throws RecognitionException { _localctx = new ShowInfoContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(565); + setState(564); match(SHOW); - setState(566); + setState(565); match(INFO); } } @@ -5163,46 +5161,46 @@ public final EnrichCommandContext enrichCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(568); + setState(567); match(ENRICH); - setState(569); + setState(568); ((EnrichCommandContext)_localctx).policyName = match(ENRICH_POLICY_NAME); - setState(572); + setState(571); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,54,_ctx) ) { case 1: { - setState(570); + setState(569); match(ON); - setState(571); + setState(570); ((EnrichCommandContext)_localctx).matchField = qualifiedNamePattern(); } break; } - setState(583); + setState(582); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,56,_ctx) ) { case 1: { - setState(574); + setState(573); match(WITH); - setState(575); + setState(574); enrichWithClause(); - setState(580); + setState(579); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,55,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(576); + setState(575); match(COMMA); - setState(577); + setState(576); enrichWithClause(); } } } - setState(582); + setState(581); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,55,_ctx); } @@ -5259,19 +5257,19 @@ public final EnrichWithClauseContext enrichWithClause() throws RecognitionExcept try { enterOuterAlt(_localctx, 1); { - setState(588); + setState(587); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,57,_ctx) ) { case 1: { - setState(585); + setState(584); ((EnrichWithClauseContext)_localctx).newName = qualifiedNamePattern(); - setState(586); + setState(585); match(ASSIGN); } break; } - setState(590); + setState(589); ((EnrichWithClauseContext)_localctx).enrichField = qualifiedNamePattern(); } } @@ -5324,13 +5322,13 @@ public final LookupCommandContext lookupCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(592); + setState(591); match(DEV_LOOKUP); - setState(593); + setState(592); ((LookupCommandContext)_localctx).tableName = indexPattern(); - setState(594); + setState(593); match(ON); - setState(595); + setState(594); ((LookupCommandContext)_localctx).matchFields = qualifiedNamePatterns(); } } @@ -5383,18 +5381,18 @@ public final InlinestatsCommandContext inlinestatsCommand() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(597); + setState(596); match(DEV_INLINESTATS); - setState(598); + setState(597); ((InlinestatsCommandContext)_localctx).stats = aggFields(); - setState(601); + setState(600); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,58,_ctx) ) { case 1: { - setState(599); + setState(598); match(BY); - setState(600); + setState(599); ((InlinestatsCommandContext)_localctx).grouping = fields(); } break; @@ -5426,8 +5424,6 @@ public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { return operatorExpression_sempred((OperatorExpressionContext)_localctx, predIndex); case 10: return primaryExpression_sempred((PrimaryExpressionContext)_localctx, predIndex); - case 28: - return aggField_sempred((AggFieldContext)_localctx, predIndex); } return true; } @@ -5481,16 +5477,9 @@ private boolean primaryExpression_sempred(PrimaryExpressionContext _localctx, in } return true; } - private boolean aggField_sempred(AggFieldContext _localctx, int predIndex) { - switch (predIndex) { - case 10: - return this.isDevVersion(); - } - return true; - } public static final String _serializedATN = - "\u0004\u0001x\u025c\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ + "\u0004\u0001x\u025b\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ "\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004\u0002"+ "\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007\u0002"+ "\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b\u0002"+ @@ -5547,331 +5536,330 @@ private boolean aggField_sempred(AggFieldContext _localctx, int predIndex) { "\u0019\u0001\u0019\u0001\u001a\u0001\u001a\u0003\u001a\u0175\b\u001a\u0001"+ "\u001a\u0001\u001a\u0003\u001a\u0179\b\u001a\u0001\u001b\u0001\u001b\u0001"+ "\u001b\u0005\u001b\u017e\b\u001b\n\u001b\f\u001b\u0181\t\u001b\u0001\u001c"+ - "\u0001\u001c\u0001\u001c\u0001\u001c\u0003\u001c\u0187\b\u001c\u0001\u001d"+ - "\u0001\u001d\u0001\u001d\u0005\u001d\u018c\b\u001d\n\u001d\f\u001d\u018f"+ - "\t\u001d\u0001\u001e\u0001\u001e\u0001\u001e\u0005\u001e\u0194\b\u001e"+ - "\n\u001e\f\u001e\u0197\t\u001e\u0001\u001f\u0001\u001f\u0001\u001f\u0005"+ - "\u001f\u019c\b\u001f\n\u001f\f\u001f\u019f\t\u001f\u0001 \u0001 \u0001"+ - "!\u0001!\u0003!\u01a5\b!\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001"+ - "\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005\"\u01b4"+ - "\b\"\n\"\f\"\u01b7\t\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\""+ - "\u0005\"\u01bf\b\"\n\"\f\"\u01c2\t\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001"+ - "\"\u0001\"\u0005\"\u01ca\b\"\n\"\f\"\u01cd\t\"\u0001\"\u0001\"\u0003\""+ - "\u01d1\b\"\u0001#\u0001#\u0003#\u01d5\b#\u0001$\u0001$\u0003$\u01d9\b"+ - "$\u0001%\u0001%\u0001%\u0001&\u0001&\u0001&\u0001&\u0005&\u01e2\b&\n&"+ - "\f&\u01e5\t&\u0001\'\u0001\'\u0003\'\u01e9\b\'\u0001\'\u0001\'\u0003\'"+ - "\u01ed\b\'\u0001(\u0001(\u0001(\u0001)\u0001)\u0001)\u0001*\u0001*\u0001"+ - "*\u0001*\u0005*\u01f9\b*\n*\f*\u01fc\t*\u0001+\u0001+\u0001+\u0001+\u0001"+ - ",\u0001,\u0001,\u0001,\u0003,\u0206\b,\u0001-\u0001-\u0001-\u0001-\u0001"+ - ".\u0001.\u0001.\u0001/\u0001/\u0001/\u0005/\u0212\b/\n/\f/\u0215\t/\u0001"+ - "0\u00010\u00010\u00010\u00011\u00011\u00012\u00012\u00032\u021f\b2\u0001"+ - "3\u00033\u0222\b3\u00013\u00013\u00014\u00034\u0227\b4\u00014\u00014\u0001"+ - "5\u00015\u00016\u00016\u00017\u00017\u00017\u00018\u00018\u00018\u0001"+ - "8\u00019\u00019\u00019\u0001:\u0001:\u0001:\u0001:\u0003:\u023d\b:\u0001"+ - ":\u0001:\u0001:\u0001:\u0005:\u0243\b:\n:\f:\u0246\t:\u0003:\u0248\b:"+ - "\u0001;\u0001;\u0001;\u0003;\u024d\b;\u0001;\u0001;\u0001<\u0001<\u0001"+ - "<\u0001<\u0001<\u0001=\u0001=\u0001=\u0001=\u0003=\u025a\b=\u0001=\u0000"+ - "\u0004\u0002\n\u0012\u0014>\u0000\u0002\u0004\u0006\b\n\f\u000e\u0010"+ - "\u0012\u0014\u0016\u0018\u001a\u001c\u001e \"$&(*,.02468:<>@BDFHJLNPR"+ - "TVXZ\\^`bdfhjlnprtvxz\u0000\b\u0001\u0000:;\u0001\u0000<>\u0002\u0000"+ - "\u0019\u0019LL\u0001\u0000CD\u0002\u0000\u001e\u001e\"\"\u0002\u0000%"+ - "%((\u0002\u0000$$22\u0002\u00003359\u0276\u0000|\u0001\u0000\u0000\u0000"+ - "\u0002\u007f\u0001\u0000\u0000\u0000\u0004\u0090\u0001\u0000\u0000\u0000"+ - "\u0006\u00a2\u0001\u0000\u0000\u0000\b\u00a4\u0001\u0000\u0000\u0000\n"+ - "\u00c5\u0001\u0000\u0000\u0000\f\u00e0\u0001\u0000\u0000\u0000\u000e\u00e2"+ - "\u0001\u0000\u0000\u0000\u0010\u00eb\u0001\u0000\u0000\u0000\u0012\u00f1"+ - "\u0001\u0000\u0000\u0000\u0014\u0106\u0001\u0000\u0000\u0000\u0016\u0110"+ - "\u0001\u0000\u0000\u0000\u0018\u0121\u0001\u0000\u0000\u0000\u001a\u0123"+ - "\u0001\u0000\u0000\u0000\u001c\u0125\u0001\u0000\u0000\u0000\u001e\u0128"+ - "\u0001\u0000\u0000\u0000 \u0133\u0001\u0000\u0000\u0000\"\u0137\u0001"+ - "\u0000\u0000\u0000$\u0146\u0001\u0000\u0000\u0000&\u014a\u0001\u0000\u0000"+ - "\u0000(\u014c\u0001\u0000\u0000\u0000*\u0150\u0001\u0000\u0000\u0000,"+ - "\u0152\u0001\u0000\u0000\u0000.\u015b\u0001\u0000\u0000\u00000\u015f\u0001"+ - "\u0000\u0000\u00002\u016f\u0001\u0000\u0000\u00004\u0172\u0001\u0000\u0000"+ - "\u00006\u017a\u0001\u0000\u0000\u00008\u0182\u0001\u0000\u0000\u0000:"+ - "\u0188\u0001\u0000\u0000\u0000<\u0190\u0001\u0000\u0000\u0000>\u0198\u0001"+ - "\u0000\u0000\u0000@\u01a0\u0001\u0000\u0000\u0000B\u01a4\u0001\u0000\u0000"+ - "\u0000D\u01d0\u0001\u0000\u0000\u0000F\u01d4\u0001\u0000\u0000\u0000H"+ - "\u01d8\u0001\u0000\u0000\u0000J\u01da\u0001\u0000\u0000\u0000L\u01dd\u0001"+ - "\u0000\u0000\u0000N\u01e6\u0001\u0000\u0000\u0000P\u01ee\u0001\u0000\u0000"+ - "\u0000R\u01f1\u0001\u0000\u0000\u0000T\u01f4\u0001\u0000\u0000\u0000V"+ - "\u01fd\u0001\u0000\u0000\u0000X\u0201\u0001\u0000\u0000\u0000Z\u0207\u0001"+ - "\u0000\u0000\u0000\\\u020b\u0001\u0000\u0000\u0000^\u020e\u0001\u0000"+ - "\u0000\u0000`\u0216\u0001\u0000\u0000\u0000b\u021a\u0001\u0000\u0000\u0000"+ - "d\u021e\u0001\u0000\u0000\u0000f\u0221\u0001\u0000\u0000\u0000h\u0226"+ - "\u0001\u0000\u0000\u0000j\u022a\u0001\u0000\u0000\u0000l\u022c\u0001\u0000"+ - "\u0000\u0000n\u022e\u0001\u0000\u0000\u0000p\u0231\u0001\u0000\u0000\u0000"+ - "r\u0235\u0001\u0000\u0000\u0000t\u0238\u0001\u0000\u0000\u0000v\u024c"+ - "\u0001\u0000\u0000\u0000x\u0250\u0001\u0000\u0000\u0000z\u0255\u0001\u0000"+ - "\u0000\u0000|}\u0003\u0002\u0001\u0000}~\u0005\u0000\u0000\u0001~\u0001"+ - "\u0001\u0000\u0000\u0000\u007f\u0080\u0006\u0001\uffff\uffff\u0000\u0080"+ - "\u0081\u0003\u0004\u0002\u0000\u0081\u0087\u0001\u0000\u0000\u0000\u0082"+ - "\u0083\n\u0001\u0000\u0000\u0083\u0084\u0005\u0018\u0000\u0000\u0084\u0086"+ - "\u0003\u0006\u0003\u0000\u0085\u0082\u0001\u0000\u0000\u0000\u0086\u0089"+ - "\u0001\u0000\u0000\u0000\u0087\u0085\u0001\u0000\u0000\u0000\u0087\u0088"+ - "\u0001\u0000\u0000\u0000\u0088\u0003\u0001\u0000\u0000\u0000\u0089\u0087"+ - "\u0001\u0000\u0000\u0000\u008a\u0091\u0003n7\u0000\u008b\u0091\u0003\""+ - "\u0011\u0000\u008c\u0091\u0003\u001c\u000e\u0000\u008d\u0091\u0003r9\u0000"+ - "\u008e\u008f\u0004\u0002\u0001\u0000\u008f\u0091\u00030\u0018\u0000\u0090"+ - "\u008a\u0001\u0000\u0000\u0000\u0090\u008b\u0001\u0000\u0000\u0000\u0090"+ - "\u008c\u0001\u0000\u0000\u0000\u0090\u008d\u0001\u0000\u0000\u0000\u0090"+ - "\u008e\u0001\u0000\u0000\u0000\u0091\u0005\u0001\u0000\u0000\u0000\u0092"+ - "\u00a3\u00032\u0019\u0000\u0093\u00a3\u0003\b\u0004\u0000\u0094\u00a3"+ - "\u0003P(\u0000\u0095\u00a3\u0003J%\u0000\u0096\u00a3\u00034\u001a\u0000"+ - "\u0097\u00a3\u0003L&\u0000\u0098\u00a3\u0003R)\u0000\u0099\u00a3\u0003"+ - "T*\u0000\u009a\u00a3\u0003X,\u0000\u009b\u00a3\u0003Z-\u0000\u009c\u00a3"+ - "\u0003t:\u0000\u009d\u00a3\u0003\\.\u0000\u009e\u009f\u0004\u0003\u0002"+ - "\u0000\u009f\u00a3\u0003z=\u0000\u00a0\u00a1\u0004\u0003\u0003\u0000\u00a1"+ - "\u00a3\u0003x<\u0000\u00a2\u0092\u0001\u0000\u0000\u0000\u00a2\u0093\u0001"+ - "\u0000\u0000\u0000\u00a2\u0094\u0001\u0000\u0000\u0000\u00a2\u0095\u0001"+ - "\u0000\u0000\u0000\u00a2\u0096\u0001\u0000\u0000\u0000\u00a2\u0097\u0001"+ - "\u0000\u0000\u0000\u00a2\u0098\u0001\u0000\u0000\u0000\u00a2\u0099\u0001"+ - "\u0000\u0000\u0000\u00a2\u009a\u0001\u0000\u0000\u0000\u00a2\u009b\u0001"+ - "\u0000\u0000\u0000\u00a2\u009c\u0001\u0000\u0000\u0000\u00a2\u009d\u0001"+ - "\u0000\u0000\u0000\u00a2\u009e\u0001\u0000\u0000\u0000\u00a2\u00a0\u0001"+ - "\u0000\u0000\u0000\u00a3\u0007\u0001\u0000\u0000\u0000\u00a4\u00a5\u0005"+ - "\u0010\u0000\u0000\u00a5\u00a6\u0003\n\u0005\u0000\u00a6\t\u0001\u0000"+ - "\u0000\u0000\u00a7\u00a8\u0006\u0005\uffff\uffff\u0000\u00a8\u00a9\u0005"+ - "+\u0000\u0000\u00a9\u00c6\u0003\n\u0005\b\u00aa\u00c6\u0003\u0010\b\u0000"+ - "\u00ab\u00c6\u0003\f\u0006\u0000\u00ac\u00ae\u0003\u0010\b\u0000\u00ad"+ - "\u00af\u0005+\u0000\u0000\u00ae\u00ad\u0001\u0000\u0000\u0000\u00ae\u00af"+ - "\u0001\u0000\u0000\u0000\u00af\u00b0\u0001\u0000\u0000\u0000\u00b0\u00b1"+ - "\u0005&\u0000\u0000\u00b1\u00b2\u0005*\u0000\u0000\u00b2\u00b7\u0003\u0010"+ - "\b\u0000\u00b3\u00b4\u0005!\u0000\u0000\u00b4\u00b6\u0003\u0010\b\u0000"+ - "\u00b5\u00b3\u0001\u0000\u0000\u0000\u00b6\u00b9\u0001\u0000\u0000\u0000"+ - "\u00b7\u00b5\u0001\u0000\u0000\u0000\u00b7\u00b8\u0001\u0000\u0000\u0000"+ - "\u00b8\u00ba\u0001\u0000\u0000\u0000\u00b9\u00b7\u0001\u0000\u0000\u0000"+ - "\u00ba\u00bb\u00051\u0000\u0000\u00bb\u00c6\u0001\u0000\u0000\u0000\u00bc"+ - "\u00bd\u0003\u0010\b\u0000\u00bd\u00bf\u0005\'\u0000\u0000\u00be\u00c0"+ - "\u0005+\u0000\u0000\u00bf\u00be\u0001\u0000\u0000\u0000\u00bf\u00c0\u0001"+ - "\u0000\u0000\u0000\u00c0\u00c1\u0001\u0000\u0000\u0000\u00c1\u00c2\u0005"+ - ",\u0000\u0000\u00c2\u00c6\u0001\u0000\u0000\u0000\u00c3\u00c4\u0004\u0005"+ - "\u0004\u0000\u00c4\u00c6\u0003\u000e\u0007\u0000\u00c5\u00a7\u0001\u0000"+ - "\u0000\u0000\u00c5\u00aa\u0001\u0000\u0000\u0000\u00c5\u00ab\u0001\u0000"+ - "\u0000\u0000\u00c5\u00ac\u0001\u0000\u0000\u0000\u00c5\u00bc\u0001\u0000"+ - "\u0000\u0000\u00c5\u00c3\u0001\u0000\u0000\u0000\u00c6\u00cf\u0001\u0000"+ - "\u0000\u0000\u00c7\u00c8\n\u0005\u0000\u0000\u00c8\u00c9\u0005\u001d\u0000"+ - "\u0000\u00c9\u00ce\u0003\n\u0005\u0006\u00ca\u00cb\n\u0004\u0000\u0000"+ - "\u00cb\u00cc\u0005.\u0000\u0000\u00cc\u00ce\u0003\n\u0005\u0005\u00cd"+ - "\u00c7\u0001\u0000\u0000\u0000\u00cd\u00ca\u0001\u0000\u0000\u0000\u00ce"+ - "\u00d1\u0001\u0000\u0000\u0000\u00cf\u00cd\u0001\u0000\u0000\u0000\u00cf"+ - "\u00d0\u0001\u0000\u0000\u0000\u00d0\u000b\u0001\u0000\u0000\u0000\u00d1"+ - "\u00cf\u0001\u0000\u0000\u0000\u00d2\u00d4\u0003\u0010\b\u0000\u00d3\u00d5"+ - "\u0005+\u0000\u0000\u00d4\u00d3\u0001\u0000\u0000\u0000\u00d4\u00d5\u0001"+ - "\u0000\u0000\u0000\u00d5\u00d6\u0001\u0000\u0000\u0000\u00d6\u00d7\u0005"+ - ")\u0000\u0000\u00d7\u00d8\u0003j5\u0000\u00d8\u00e1\u0001\u0000\u0000"+ - "\u0000\u00d9\u00db\u0003\u0010\b\u0000\u00da\u00dc\u0005+\u0000\u0000"+ - "\u00db\u00da\u0001\u0000\u0000\u0000\u00db\u00dc\u0001\u0000\u0000\u0000"+ - "\u00dc\u00dd\u0001\u0000\u0000\u0000\u00dd\u00de\u00050\u0000\u0000\u00de"+ - "\u00df\u0003j5\u0000\u00df\u00e1\u0001\u0000\u0000\u0000\u00e0\u00d2\u0001"+ - "\u0000\u0000\u0000\u00e0\u00d9\u0001\u0000\u0000\u0000\u00e1\r\u0001\u0000"+ - "\u0000\u0000\u00e2\u00e3\u0003\u0010\b\u0000\u00e3\u00e4\u0005?\u0000"+ - "\u0000\u00e4\u00e5\u0003j5\u0000\u00e5\u000f\u0001\u0000\u0000\u0000\u00e6"+ - "\u00ec\u0003\u0012\t\u0000\u00e7\u00e8\u0003\u0012\t\u0000\u00e8\u00e9"+ - "\u0003l6\u0000\u00e9\u00ea\u0003\u0012\t\u0000\u00ea\u00ec\u0001\u0000"+ - "\u0000\u0000\u00eb\u00e6\u0001\u0000\u0000\u0000\u00eb\u00e7\u0001\u0000"+ - "\u0000\u0000\u00ec\u0011\u0001\u0000\u0000\u0000\u00ed\u00ee\u0006\t\uffff"+ - "\uffff\u0000\u00ee\u00f2\u0003\u0014\n\u0000\u00ef\u00f0\u0007\u0000\u0000"+ - "\u0000\u00f0\u00f2\u0003\u0012\t\u0003\u00f1\u00ed\u0001\u0000\u0000\u0000"+ - "\u00f1\u00ef\u0001\u0000\u0000\u0000\u00f2\u00fb\u0001\u0000\u0000\u0000"+ - "\u00f3\u00f4\n\u0002\u0000\u0000\u00f4\u00f5\u0007\u0001\u0000\u0000\u00f5"+ - "\u00fa\u0003\u0012\t\u0003\u00f6\u00f7\n\u0001\u0000\u0000\u00f7\u00f8"+ - "\u0007\u0000\u0000\u0000\u00f8\u00fa\u0003\u0012\t\u0002\u00f9\u00f3\u0001"+ - "\u0000\u0000\u0000\u00f9\u00f6\u0001\u0000\u0000\u0000\u00fa\u00fd\u0001"+ - "\u0000\u0000\u0000\u00fb\u00f9\u0001\u0000\u0000\u0000\u00fb\u00fc\u0001"+ - "\u0000\u0000\u0000\u00fc\u0013\u0001\u0000\u0000\u0000\u00fd\u00fb\u0001"+ - "\u0000\u0000\u0000\u00fe\u00ff\u0006\n\uffff\uffff\u0000\u00ff\u0107\u0003"+ - "D\"\u0000\u0100\u0107\u0003:\u001d\u0000\u0101\u0107\u0003\u0016\u000b"+ - "\u0000\u0102\u0103\u0005*\u0000\u0000\u0103\u0104\u0003\n\u0005\u0000"+ - "\u0104\u0105\u00051\u0000\u0000\u0105\u0107\u0001\u0000\u0000\u0000\u0106"+ - "\u00fe\u0001\u0000\u0000\u0000\u0106\u0100\u0001\u0000\u0000\u0000\u0106"+ - "\u0101\u0001\u0000\u0000\u0000\u0106\u0102\u0001\u0000\u0000\u0000\u0107"+ - "\u010d\u0001\u0000\u0000\u0000\u0108\u0109\n\u0001\u0000\u0000\u0109\u010a"+ - "\u0005 \u0000\u0000\u010a\u010c\u0003\u001a\r\u0000\u010b\u0108\u0001"+ - "\u0000\u0000\u0000\u010c\u010f\u0001\u0000\u0000\u0000\u010d\u010b\u0001"+ - "\u0000\u0000\u0000\u010d\u010e\u0001\u0000\u0000\u0000\u010e\u0015\u0001"+ - "\u0000\u0000\u0000\u010f\u010d\u0001\u0000\u0000\u0000\u0110\u0111\u0003"+ - "\u0018\f\u0000\u0111\u011b\u0005*\u0000\u0000\u0112\u011c\u0005<\u0000"+ - "\u0000\u0113\u0118\u0003\n\u0005\u0000\u0114\u0115\u0005!\u0000\u0000"+ - "\u0115\u0117\u0003\n\u0005\u0000\u0116\u0114\u0001\u0000\u0000\u0000\u0117"+ - "\u011a\u0001\u0000\u0000\u0000\u0118\u0116\u0001\u0000\u0000\u0000\u0118"+ - "\u0119\u0001\u0000\u0000\u0000\u0119\u011c\u0001\u0000\u0000\u0000\u011a"+ - "\u0118\u0001\u0000\u0000\u0000\u011b\u0112\u0001\u0000\u0000\u0000\u011b"+ - "\u0113\u0001\u0000\u0000\u0000\u011b\u011c\u0001\u0000\u0000\u0000\u011c"+ - "\u011d\u0001\u0000\u0000\u0000\u011d\u011e\u00051\u0000\u0000\u011e\u0017"+ - "\u0001\u0000\u0000\u0000\u011f\u0122\u0005?\u0000\u0000\u0120\u0122\u0003"+ - "H$\u0000\u0121\u011f\u0001\u0000\u0000\u0000\u0121\u0120\u0001\u0000\u0000"+ - "\u0000\u0122\u0019\u0001\u0000\u0000\u0000\u0123\u0124\u0003@ \u0000\u0124"+ - "\u001b\u0001\u0000\u0000\u0000\u0125\u0126\u0005\f\u0000\u0000\u0126\u0127"+ - "\u0003\u001e\u000f\u0000\u0127\u001d\u0001\u0000\u0000\u0000\u0128\u012d"+ - "\u0003 \u0010\u0000\u0129\u012a\u0005!\u0000\u0000\u012a\u012c\u0003 "+ - "\u0010\u0000\u012b\u0129\u0001\u0000\u0000\u0000\u012c\u012f\u0001\u0000"+ - "\u0000\u0000\u012d\u012b\u0001\u0000\u0000\u0000\u012d\u012e\u0001\u0000"+ - "\u0000\u0000\u012e\u001f\u0001\u0000\u0000\u0000\u012f\u012d\u0001\u0000"+ - "\u0000\u0000\u0130\u0131\u0003:\u001d\u0000\u0131\u0132\u0005\u001f\u0000"+ - "\u0000\u0132\u0134\u0001\u0000\u0000\u0000\u0133\u0130\u0001\u0000\u0000"+ - "\u0000\u0133\u0134\u0001\u0000\u0000\u0000\u0134\u0135\u0001\u0000\u0000"+ - "\u0000\u0135\u0136\u0003\n\u0005\u0000\u0136!\u0001\u0000\u0000\u0000"+ - "\u0137\u0138\u0005\u0006\u0000\u0000\u0138\u013d\u0003$\u0012\u0000\u0139"+ - "\u013a\u0005!\u0000\u0000\u013a\u013c\u0003$\u0012\u0000\u013b\u0139\u0001"+ - "\u0000\u0000\u0000\u013c\u013f\u0001\u0000\u0000\u0000\u013d\u013b\u0001"+ - "\u0000\u0000\u0000\u013d\u013e\u0001\u0000\u0000\u0000\u013e\u0141\u0001"+ - "\u0000\u0000\u0000\u013f\u013d\u0001\u0000\u0000\u0000\u0140\u0142\u0003"+ - "*\u0015\u0000\u0141\u0140\u0001\u0000\u0000\u0000\u0141\u0142\u0001\u0000"+ - "\u0000\u0000\u0142#\u0001\u0000\u0000\u0000\u0143\u0144\u0003&\u0013\u0000"+ - "\u0144\u0145\u0005h\u0000\u0000\u0145\u0147\u0001\u0000\u0000\u0000\u0146"+ - "\u0143\u0001\u0000\u0000\u0000\u0146\u0147\u0001\u0000\u0000\u0000\u0147"+ - "\u0148\u0001\u0000\u0000\u0000\u0148\u0149\u0003(\u0014\u0000\u0149%\u0001"+ - "\u0000\u0000\u0000\u014a\u014b\u0005L\u0000\u0000\u014b\'\u0001\u0000"+ - "\u0000\u0000\u014c\u014d\u0007\u0002\u0000\u0000\u014d)\u0001\u0000\u0000"+ - "\u0000\u014e\u0151\u0003,\u0016\u0000\u014f\u0151\u0003.\u0017\u0000\u0150"+ - "\u014e\u0001\u0000\u0000\u0000\u0150\u014f\u0001\u0000\u0000\u0000\u0151"+ - "+\u0001\u0000\u0000\u0000\u0152\u0153\u0005K\u0000\u0000\u0153\u0158\u0005"+ - "L\u0000\u0000\u0154\u0155\u0005!\u0000\u0000\u0155\u0157\u0005L\u0000"+ - "\u0000\u0156\u0154\u0001\u0000\u0000\u0000\u0157\u015a\u0001\u0000\u0000"+ - "\u0000\u0158\u0156\u0001\u0000\u0000\u0000\u0158\u0159\u0001\u0000\u0000"+ - "\u0000\u0159-\u0001\u0000\u0000\u0000\u015a\u0158\u0001\u0000\u0000\u0000"+ - "\u015b\u015c\u0005A\u0000\u0000\u015c\u015d\u0003,\u0016\u0000\u015d\u015e"+ - "\u0005B\u0000\u0000\u015e/\u0001\u0000\u0000\u0000\u015f\u0160\u0005\u0013"+ - "\u0000\u0000\u0160\u0165\u0003$\u0012\u0000\u0161\u0162\u0005!\u0000\u0000"+ - "\u0162\u0164\u0003$\u0012\u0000\u0163\u0161\u0001\u0000\u0000\u0000\u0164"+ - "\u0167\u0001\u0000\u0000\u0000\u0165\u0163\u0001\u0000\u0000\u0000\u0165"+ - "\u0166\u0001\u0000\u0000\u0000\u0166\u0169\u0001\u0000\u0000\u0000\u0167"+ - "\u0165\u0001\u0000\u0000\u0000\u0168\u016a\u00036\u001b\u0000\u0169\u0168"+ - "\u0001\u0000\u0000\u0000\u0169\u016a\u0001\u0000\u0000\u0000\u016a\u016d"+ - "\u0001\u0000\u0000\u0000\u016b\u016c\u0005\u001c\u0000\u0000\u016c\u016e"+ - "\u0003\u001e\u000f\u0000\u016d\u016b\u0001\u0000\u0000\u0000\u016d\u016e"+ - "\u0001\u0000\u0000\u0000\u016e1\u0001\u0000\u0000\u0000\u016f\u0170\u0005"+ - "\u0004\u0000\u0000\u0170\u0171\u0003\u001e\u000f\u0000\u01713\u0001\u0000"+ - "\u0000\u0000\u0172\u0174\u0005\u000f\u0000\u0000\u0173\u0175\u00036\u001b"+ - "\u0000\u0174\u0173\u0001\u0000\u0000\u0000\u0174\u0175\u0001\u0000\u0000"+ - "\u0000\u0175\u0178\u0001\u0000\u0000\u0000\u0176\u0177\u0005\u001c\u0000"+ - "\u0000\u0177\u0179\u0003\u001e\u000f\u0000\u0178\u0176\u0001\u0000\u0000"+ - "\u0000\u0178\u0179\u0001\u0000\u0000\u0000\u01795\u0001\u0000\u0000\u0000"+ - "\u017a\u017f\u00038\u001c\u0000\u017b\u017c\u0005!\u0000\u0000\u017c\u017e"+ - "\u00038\u001c\u0000\u017d\u017b\u0001\u0000\u0000\u0000\u017e\u0181\u0001"+ - "\u0000\u0000\u0000\u017f\u017d\u0001\u0000\u0000\u0000\u017f\u0180\u0001"+ - "\u0000\u0000\u0000\u01807\u0001\u0000\u0000\u0000\u0181\u017f\u0001\u0000"+ - "\u0000\u0000\u0182\u0183\u0003 \u0010\u0000\u0183\u0186\u0004\u001c\n"+ - "\u0000\u0184\u0185\u0005\u0010\u0000\u0000\u0185\u0187\u0003\n\u0005\u0000"+ - "\u0186\u0184\u0001\u0000\u0000\u0000\u0186\u0187\u0001\u0000\u0000\u0000"+ - "\u01879\u0001\u0000\u0000\u0000\u0188\u018d\u0003H$\u0000\u0189\u018a"+ - "\u0005#\u0000\u0000\u018a\u018c\u0003H$\u0000\u018b\u0189\u0001\u0000"+ - "\u0000\u0000\u018c\u018f\u0001\u0000\u0000\u0000\u018d\u018b\u0001\u0000"+ - "\u0000\u0000\u018d\u018e\u0001\u0000\u0000\u0000\u018e;\u0001\u0000\u0000"+ - "\u0000\u018f\u018d\u0001\u0000\u0000\u0000\u0190\u0195\u0003B!\u0000\u0191"+ - "\u0192\u0005#\u0000\u0000\u0192\u0194\u0003B!\u0000\u0193\u0191\u0001"+ - "\u0000\u0000\u0000\u0194\u0197\u0001\u0000\u0000\u0000\u0195\u0193\u0001"+ - "\u0000\u0000\u0000\u0195\u0196\u0001\u0000\u0000\u0000\u0196=\u0001\u0000"+ - "\u0000\u0000\u0197\u0195\u0001\u0000\u0000\u0000\u0198\u019d\u0003<\u001e"+ - "\u0000\u0199\u019a\u0005!\u0000\u0000\u019a\u019c\u0003<\u001e\u0000\u019b"+ - "\u0199\u0001\u0000\u0000\u0000\u019c\u019f\u0001\u0000\u0000\u0000\u019d"+ - "\u019b\u0001\u0000\u0000\u0000\u019d\u019e\u0001\u0000\u0000\u0000\u019e"+ - "?\u0001\u0000\u0000\u0000\u019f\u019d\u0001\u0000\u0000\u0000\u01a0\u01a1"+ - "\u0007\u0003\u0000\u0000\u01a1A\u0001\u0000\u0000\u0000\u01a2\u01a5\u0005"+ - "P\u0000\u0000\u01a3\u01a5\u0003F#\u0000\u01a4\u01a2\u0001\u0000\u0000"+ - "\u0000\u01a4\u01a3\u0001\u0000\u0000\u0000\u01a5C\u0001\u0000\u0000\u0000"+ - "\u01a6\u01d1\u0005,\u0000\u0000\u01a7\u01a8\u0003h4\u0000\u01a8\u01a9"+ - "\u0005C\u0000\u0000\u01a9\u01d1\u0001\u0000\u0000\u0000\u01aa\u01d1\u0003"+ - "f3\u0000\u01ab\u01d1\u0003h4\u0000\u01ac\u01d1\u0003b1\u0000\u01ad\u01d1"+ - "\u0003F#\u0000\u01ae\u01d1\u0003j5\u0000\u01af\u01b0\u0005A\u0000\u0000"+ - "\u01b0\u01b5\u0003d2\u0000\u01b1\u01b2\u0005!\u0000\u0000\u01b2\u01b4"+ - "\u0003d2\u0000\u01b3\u01b1\u0001\u0000\u0000\u0000\u01b4\u01b7\u0001\u0000"+ - "\u0000\u0000\u01b5\u01b3\u0001\u0000\u0000\u0000\u01b5\u01b6\u0001\u0000"+ - "\u0000\u0000\u01b6\u01b8\u0001\u0000\u0000\u0000\u01b7\u01b5\u0001\u0000"+ - "\u0000\u0000\u01b8\u01b9\u0005B\u0000\u0000\u01b9\u01d1\u0001\u0000\u0000"+ - "\u0000\u01ba\u01bb\u0005A\u0000\u0000\u01bb\u01c0\u0003b1\u0000\u01bc"+ - "\u01bd\u0005!\u0000\u0000\u01bd\u01bf\u0003b1\u0000\u01be\u01bc\u0001"+ - "\u0000\u0000\u0000\u01bf\u01c2\u0001\u0000\u0000\u0000\u01c0\u01be\u0001"+ - "\u0000\u0000\u0000\u01c0\u01c1\u0001\u0000\u0000\u0000\u01c1\u01c3\u0001"+ - "\u0000\u0000\u0000\u01c2\u01c0\u0001\u0000\u0000\u0000\u01c3\u01c4\u0005"+ - "B\u0000\u0000\u01c4\u01d1\u0001\u0000\u0000\u0000\u01c5\u01c6\u0005A\u0000"+ - "\u0000\u01c6\u01cb\u0003j5\u0000\u01c7\u01c8\u0005!\u0000\u0000\u01c8"+ - "\u01ca\u0003j5\u0000\u01c9\u01c7\u0001\u0000\u0000\u0000\u01ca\u01cd\u0001"+ - "\u0000\u0000\u0000\u01cb\u01c9\u0001\u0000\u0000\u0000\u01cb\u01cc\u0001"+ - "\u0000\u0000\u0000\u01cc\u01ce\u0001\u0000\u0000\u0000\u01cd\u01cb\u0001"+ - "\u0000\u0000\u0000\u01ce\u01cf\u0005B\u0000\u0000\u01cf\u01d1\u0001\u0000"+ - "\u0000\u0000\u01d0\u01a6\u0001\u0000\u0000\u0000\u01d0\u01a7\u0001\u0000"+ - "\u0000\u0000\u01d0\u01aa\u0001\u0000\u0000\u0000\u01d0\u01ab\u0001\u0000"+ - "\u0000\u0000\u01d0\u01ac\u0001\u0000\u0000\u0000\u01d0\u01ad\u0001\u0000"+ - "\u0000\u0000\u01d0\u01ae\u0001\u0000\u0000\u0000\u01d0\u01af\u0001\u0000"+ - "\u0000\u0000\u01d0\u01ba\u0001\u0000\u0000\u0000\u01d0\u01c5\u0001\u0000"+ - "\u0000\u0000\u01d1E\u0001\u0000\u0000\u0000\u01d2\u01d5\u0005/\u0000\u0000"+ - "\u01d3\u01d5\u0005@\u0000\u0000\u01d4\u01d2\u0001\u0000\u0000\u0000\u01d4"+ - "\u01d3\u0001\u0000\u0000\u0000\u01d5G\u0001\u0000\u0000\u0000\u01d6\u01d9"+ - "\u0003@ \u0000\u01d7\u01d9\u0003F#\u0000\u01d8\u01d6\u0001\u0000\u0000"+ - "\u0000\u01d8\u01d7\u0001\u0000\u0000\u0000\u01d9I\u0001\u0000\u0000\u0000"+ - "\u01da\u01db\u0005\t\u0000\u0000\u01db\u01dc\u0005\u001a\u0000\u0000\u01dc"+ - "K\u0001\u0000\u0000\u0000\u01dd\u01de\u0005\u000e\u0000\u0000\u01de\u01e3"+ - "\u0003N\'\u0000\u01df\u01e0\u0005!\u0000\u0000\u01e0\u01e2\u0003N\'\u0000"+ - "\u01e1\u01df\u0001\u0000\u0000\u0000\u01e2\u01e5\u0001\u0000\u0000\u0000"+ - "\u01e3\u01e1\u0001\u0000\u0000\u0000\u01e3\u01e4\u0001\u0000\u0000\u0000"+ - "\u01e4M\u0001\u0000\u0000\u0000\u01e5\u01e3\u0001\u0000\u0000\u0000\u01e6"+ - "\u01e8\u0003\n\u0005\u0000\u01e7\u01e9\u0007\u0004\u0000\u0000\u01e8\u01e7"+ - "\u0001\u0000\u0000\u0000\u01e8\u01e9\u0001\u0000\u0000\u0000\u01e9\u01ec"+ - "\u0001\u0000\u0000\u0000\u01ea\u01eb\u0005-\u0000\u0000\u01eb\u01ed\u0007"+ - "\u0005\u0000\u0000\u01ec\u01ea\u0001\u0000\u0000\u0000\u01ec\u01ed\u0001"+ - "\u0000\u0000\u0000\u01edO\u0001\u0000\u0000\u0000\u01ee\u01ef\u0005\b"+ - "\u0000\u0000\u01ef\u01f0\u0003>\u001f\u0000\u01f0Q\u0001\u0000\u0000\u0000"+ - "\u01f1\u01f2\u0005\u0002\u0000\u0000\u01f2\u01f3\u0003>\u001f\u0000\u01f3"+ - "S\u0001\u0000\u0000\u0000\u01f4\u01f5\u0005\u000b\u0000\u0000\u01f5\u01fa"+ - "\u0003V+\u0000\u01f6\u01f7\u0005!\u0000\u0000\u01f7\u01f9\u0003V+\u0000"+ - "\u01f8\u01f6\u0001\u0000\u0000\u0000\u01f9\u01fc\u0001\u0000\u0000\u0000"+ - "\u01fa\u01f8\u0001\u0000\u0000\u0000\u01fa\u01fb\u0001\u0000\u0000\u0000"+ - "\u01fbU\u0001\u0000\u0000\u0000\u01fc\u01fa\u0001\u0000\u0000\u0000\u01fd"+ - "\u01fe\u0003<\u001e\u0000\u01fe\u01ff\u0005T\u0000\u0000\u01ff\u0200\u0003"+ - "<\u001e\u0000\u0200W\u0001\u0000\u0000\u0000\u0201\u0202\u0005\u0001\u0000"+ - "\u0000\u0202\u0203\u0003\u0014\n\u0000\u0203\u0205\u0003j5\u0000\u0204"+ - "\u0206\u0003^/\u0000\u0205\u0204\u0001\u0000\u0000\u0000\u0205\u0206\u0001"+ - "\u0000\u0000\u0000\u0206Y\u0001\u0000\u0000\u0000\u0207\u0208\u0005\u0007"+ - "\u0000\u0000\u0208\u0209\u0003\u0014\n\u0000\u0209\u020a\u0003j5\u0000"+ - "\u020a[\u0001\u0000\u0000\u0000\u020b\u020c\u0005\n\u0000\u0000\u020c"+ - "\u020d\u0003:\u001d\u0000\u020d]\u0001\u0000\u0000\u0000\u020e\u0213\u0003"+ - "`0\u0000\u020f\u0210\u0005!\u0000\u0000\u0210\u0212\u0003`0\u0000\u0211"+ - "\u020f\u0001\u0000\u0000\u0000\u0212\u0215\u0001\u0000\u0000\u0000\u0213"+ - "\u0211\u0001\u0000\u0000\u0000\u0213\u0214\u0001\u0000\u0000\u0000\u0214"+ - "_\u0001\u0000\u0000\u0000\u0215\u0213\u0001\u0000\u0000\u0000\u0216\u0217"+ - "\u0003@ \u0000\u0217\u0218\u0005\u001f\u0000\u0000\u0218\u0219\u0003D"+ - "\"\u0000\u0219a\u0001\u0000\u0000\u0000\u021a\u021b\u0007\u0006\u0000"+ - "\u0000\u021bc\u0001\u0000\u0000\u0000\u021c\u021f\u0003f3\u0000\u021d"+ - "\u021f\u0003h4\u0000\u021e\u021c\u0001\u0000\u0000\u0000\u021e\u021d\u0001"+ - "\u0000\u0000\u0000\u021fe\u0001\u0000\u0000\u0000\u0220\u0222\u0007\u0000"+ - "\u0000\u0000\u0221\u0220\u0001\u0000\u0000\u0000\u0221\u0222\u0001\u0000"+ - "\u0000\u0000\u0222\u0223\u0001\u0000\u0000\u0000\u0223\u0224\u0005\u001b"+ - "\u0000\u0000\u0224g\u0001\u0000\u0000\u0000\u0225\u0227\u0007\u0000\u0000"+ - "\u0000\u0226\u0225\u0001\u0000\u0000\u0000\u0226\u0227\u0001\u0000\u0000"+ - "\u0000\u0227\u0228\u0001\u0000\u0000\u0000\u0228\u0229\u0005\u001a\u0000"+ - "\u0000\u0229i\u0001\u0000\u0000\u0000\u022a\u022b\u0005\u0019\u0000\u0000"+ - "\u022bk\u0001\u0000\u0000\u0000\u022c\u022d\u0007\u0007\u0000\u0000\u022d"+ - "m\u0001\u0000\u0000\u0000\u022e\u022f\u0005\u0005\u0000\u0000\u022f\u0230"+ - "\u0003p8\u0000\u0230o\u0001\u0000\u0000\u0000\u0231\u0232\u0005A\u0000"+ - "\u0000\u0232\u0233\u0003\u0002\u0001\u0000\u0233\u0234\u0005B\u0000\u0000"+ - "\u0234q\u0001\u0000\u0000\u0000\u0235\u0236\u0005\r\u0000\u0000\u0236"+ - "\u0237\u0005d\u0000\u0000\u0237s\u0001\u0000\u0000\u0000\u0238\u0239\u0005"+ - "\u0003\u0000\u0000\u0239\u023c\u0005Z\u0000\u0000\u023a\u023b\u0005X\u0000"+ - "\u0000\u023b\u023d\u0003<\u001e\u0000\u023c\u023a\u0001\u0000\u0000\u0000"+ - "\u023c\u023d\u0001\u0000\u0000\u0000\u023d\u0247\u0001\u0000\u0000\u0000"+ - "\u023e\u023f\u0005Y\u0000\u0000\u023f\u0244\u0003v;\u0000\u0240\u0241"+ - "\u0005!\u0000\u0000\u0241\u0243\u0003v;\u0000\u0242\u0240\u0001\u0000"+ - "\u0000\u0000\u0243\u0246\u0001\u0000\u0000\u0000\u0244\u0242\u0001\u0000"+ - "\u0000\u0000\u0244\u0245\u0001\u0000\u0000\u0000\u0245\u0248\u0001\u0000"+ - "\u0000\u0000\u0246\u0244\u0001\u0000\u0000\u0000\u0247\u023e\u0001\u0000"+ - "\u0000\u0000\u0247\u0248\u0001\u0000\u0000\u0000\u0248u\u0001\u0000\u0000"+ - "\u0000\u0249\u024a\u0003<\u001e\u0000\u024a\u024b\u0005\u001f\u0000\u0000"+ - "\u024b\u024d\u0001\u0000\u0000\u0000\u024c\u0249\u0001\u0000\u0000\u0000"+ - "\u024c\u024d\u0001\u0000\u0000\u0000\u024d\u024e\u0001\u0000\u0000\u0000"+ - "\u024e\u024f\u0003<\u001e\u0000\u024fw\u0001\u0000\u0000\u0000\u0250\u0251"+ - "\u0005\u0012\u0000\u0000\u0251\u0252\u0003$\u0012\u0000\u0252\u0253\u0005"+ - "X\u0000\u0000\u0253\u0254\u0003>\u001f\u0000\u0254y\u0001\u0000\u0000"+ - "\u0000\u0255\u0256\u0005\u0011\u0000\u0000\u0256\u0259\u00036\u001b\u0000"+ - "\u0257\u0258\u0005\u001c\u0000\u0000\u0258\u025a\u0003\u001e\u000f\u0000"+ - "\u0259\u0257\u0001\u0000\u0000\u0000\u0259\u025a\u0001\u0000\u0000\u0000"+ - "\u025a{\u0001\u0000\u0000\u0000;\u0087\u0090\u00a2\u00ae\u00b7\u00bf\u00c5"+ + "\u0001\u001c\u0001\u001c\u0003\u001c\u0186\b\u001c\u0001\u001d\u0001\u001d"+ + "\u0001\u001d\u0005\u001d\u018b\b\u001d\n\u001d\f\u001d\u018e\t\u001d\u0001"+ + "\u001e\u0001\u001e\u0001\u001e\u0005\u001e\u0193\b\u001e\n\u001e\f\u001e"+ + "\u0196\t\u001e\u0001\u001f\u0001\u001f\u0001\u001f\u0005\u001f\u019b\b"+ + "\u001f\n\u001f\f\u001f\u019e\t\u001f\u0001 \u0001 \u0001!\u0001!\u0003"+ + "!\u01a4\b!\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001"+ + "\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005\"\u01b3\b\"\n\"\f\"\u01b6"+ + "\t\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005\"\u01be\b\""+ + "\n\"\f\"\u01c1\t\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005"+ + "\"\u01c9\b\"\n\"\f\"\u01cc\t\"\u0001\"\u0001\"\u0003\"\u01d0\b\"\u0001"+ + "#\u0001#\u0003#\u01d4\b#\u0001$\u0001$\u0003$\u01d8\b$\u0001%\u0001%\u0001"+ + "%\u0001&\u0001&\u0001&\u0001&\u0005&\u01e1\b&\n&\f&\u01e4\t&\u0001\'\u0001"+ + "\'\u0003\'\u01e8\b\'\u0001\'\u0001\'\u0003\'\u01ec\b\'\u0001(\u0001(\u0001"+ + "(\u0001)\u0001)\u0001)\u0001*\u0001*\u0001*\u0001*\u0005*\u01f8\b*\n*"+ + "\f*\u01fb\t*\u0001+\u0001+\u0001+\u0001+\u0001,\u0001,\u0001,\u0001,\u0003"+ + ",\u0205\b,\u0001-\u0001-\u0001-\u0001-\u0001.\u0001.\u0001.\u0001/\u0001"+ + "/\u0001/\u0005/\u0211\b/\n/\f/\u0214\t/\u00010\u00010\u00010\u00010\u0001"+ + "1\u00011\u00012\u00012\u00032\u021e\b2\u00013\u00033\u0221\b3\u00013\u0001"+ + "3\u00014\u00034\u0226\b4\u00014\u00014\u00015\u00015\u00016\u00016\u0001"+ + "7\u00017\u00017\u00018\u00018\u00018\u00018\u00019\u00019\u00019\u0001"+ + ":\u0001:\u0001:\u0001:\u0003:\u023c\b:\u0001:\u0001:\u0001:\u0001:\u0005"+ + ":\u0242\b:\n:\f:\u0245\t:\u0003:\u0247\b:\u0001;\u0001;\u0001;\u0003;"+ + "\u024c\b;\u0001;\u0001;\u0001<\u0001<\u0001<\u0001<\u0001<\u0001=\u0001"+ + "=\u0001=\u0001=\u0003=\u0259\b=\u0001=\u0000\u0004\u0002\n\u0012\u0014"+ + ">\u0000\u0002\u0004\u0006\b\n\f\u000e\u0010\u0012\u0014\u0016\u0018\u001a"+ + "\u001c\u001e \"$&(*,.02468:<>@BDFHJLNPRTVXZ\\^`bdfhjlnprtvxz\u0000\b\u0001"+ + "\u0000:;\u0001\u0000<>\u0002\u0000\u0019\u0019LL\u0001\u0000CD\u0002\u0000"+ + "\u001e\u001e\"\"\u0002\u0000%%((\u0002\u0000$$22\u0002\u00003359\u0275"+ + "\u0000|\u0001\u0000\u0000\u0000\u0002\u007f\u0001\u0000\u0000\u0000\u0004"+ + "\u0090\u0001\u0000\u0000\u0000\u0006\u00a2\u0001\u0000\u0000\u0000\b\u00a4"+ + "\u0001\u0000\u0000\u0000\n\u00c5\u0001\u0000\u0000\u0000\f\u00e0\u0001"+ + "\u0000\u0000\u0000\u000e\u00e2\u0001\u0000\u0000\u0000\u0010\u00eb\u0001"+ + "\u0000\u0000\u0000\u0012\u00f1\u0001\u0000\u0000\u0000\u0014\u0106\u0001"+ + "\u0000\u0000\u0000\u0016\u0110\u0001\u0000\u0000\u0000\u0018\u0121\u0001"+ + "\u0000\u0000\u0000\u001a\u0123\u0001\u0000\u0000\u0000\u001c\u0125\u0001"+ + "\u0000\u0000\u0000\u001e\u0128\u0001\u0000\u0000\u0000 \u0133\u0001\u0000"+ + "\u0000\u0000\"\u0137\u0001\u0000\u0000\u0000$\u0146\u0001\u0000\u0000"+ + "\u0000&\u014a\u0001\u0000\u0000\u0000(\u014c\u0001\u0000\u0000\u0000*"+ + "\u0150\u0001\u0000\u0000\u0000,\u0152\u0001\u0000\u0000\u0000.\u015b\u0001"+ + "\u0000\u0000\u00000\u015f\u0001\u0000\u0000\u00002\u016f\u0001\u0000\u0000"+ + "\u00004\u0172\u0001\u0000\u0000\u00006\u017a\u0001\u0000\u0000\u00008"+ + "\u0182\u0001\u0000\u0000\u0000:\u0187\u0001\u0000\u0000\u0000<\u018f\u0001"+ + "\u0000\u0000\u0000>\u0197\u0001\u0000\u0000\u0000@\u019f\u0001\u0000\u0000"+ + "\u0000B\u01a3\u0001\u0000\u0000\u0000D\u01cf\u0001\u0000\u0000\u0000F"+ + "\u01d3\u0001\u0000\u0000\u0000H\u01d7\u0001\u0000\u0000\u0000J\u01d9\u0001"+ + "\u0000\u0000\u0000L\u01dc\u0001\u0000\u0000\u0000N\u01e5\u0001\u0000\u0000"+ + "\u0000P\u01ed\u0001\u0000\u0000\u0000R\u01f0\u0001\u0000\u0000\u0000T"+ + "\u01f3\u0001\u0000\u0000\u0000V\u01fc\u0001\u0000\u0000\u0000X\u0200\u0001"+ + "\u0000\u0000\u0000Z\u0206\u0001\u0000\u0000\u0000\\\u020a\u0001\u0000"+ + "\u0000\u0000^\u020d\u0001\u0000\u0000\u0000`\u0215\u0001\u0000\u0000\u0000"+ + "b\u0219\u0001\u0000\u0000\u0000d\u021d\u0001\u0000\u0000\u0000f\u0220"+ + "\u0001\u0000\u0000\u0000h\u0225\u0001\u0000\u0000\u0000j\u0229\u0001\u0000"+ + "\u0000\u0000l\u022b\u0001\u0000\u0000\u0000n\u022d\u0001\u0000\u0000\u0000"+ + "p\u0230\u0001\u0000\u0000\u0000r\u0234\u0001\u0000\u0000\u0000t\u0237"+ + "\u0001\u0000\u0000\u0000v\u024b\u0001\u0000\u0000\u0000x\u024f\u0001\u0000"+ + "\u0000\u0000z\u0254\u0001\u0000\u0000\u0000|}\u0003\u0002\u0001\u0000"+ + "}~\u0005\u0000\u0000\u0001~\u0001\u0001\u0000\u0000\u0000\u007f\u0080"+ + "\u0006\u0001\uffff\uffff\u0000\u0080\u0081\u0003\u0004\u0002\u0000\u0081"+ + "\u0087\u0001\u0000\u0000\u0000\u0082\u0083\n\u0001\u0000\u0000\u0083\u0084"+ + "\u0005\u0018\u0000\u0000\u0084\u0086\u0003\u0006\u0003\u0000\u0085\u0082"+ + "\u0001\u0000\u0000\u0000\u0086\u0089\u0001\u0000\u0000\u0000\u0087\u0085"+ + "\u0001\u0000\u0000\u0000\u0087\u0088\u0001\u0000\u0000\u0000\u0088\u0003"+ + "\u0001\u0000\u0000\u0000\u0089\u0087\u0001\u0000\u0000\u0000\u008a\u0091"+ + "\u0003n7\u0000\u008b\u0091\u0003\"\u0011\u0000\u008c\u0091\u0003\u001c"+ + "\u000e\u0000\u008d\u0091\u0003r9\u0000\u008e\u008f\u0004\u0002\u0001\u0000"+ + "\u008f\u0091\u00030\u0018\u0000\u0090\u008a\u0001\u0000\u0000\u0000\u0090"+ + "\u008b\u0001\u0000\u0000\u0000\u0090\u008c\u0001\u0000\u0000\u0000\u0090"+ + "\u008d\u0001\u0000\u0000\u0000\u0090\u008e\u0001\u0000\u0000\u0000\u0091"+ + "\u0005\u0001\u0000\u0000\u0000\u0092\u00a3\u00032\u0019\u0000\u0093\u00a3"+ + "\u0003\b\u0004\u0000\u0094\u00a3\u0003P(\u0000\u0095\u00a3\u0003J%\u0000"+ + "\u0096\u00a3\u00034\u001a\u0000\u0097\u00a3\u0003L&\u0000\u0098\u00a3"+ + "\u0003R)\u0000\u0099\u00a3\u0003T*\u0000\u009a\u00a3\u0003X,\u0000\u009b"+ + "\u00a3\u0003Z-\u0000\u009c\u00a3\u0003t:\u0000\u009d\u00a3\u0003\\.\u0000"+ + "\u009e\u009f\u0004\u0003\u0002\u0000\u009f\u00a3\u0003z=\u0000\u00a0\u00a1"+ + "\u0004\u0003\u0003\u0000\u00a1\u00a3\u0003x<\u0000\u00a2\u0092\u0001\u0000"+ + "\u0000\u0000\u00a2\u0093\u0001\u0000\u0000\u0000\u00a2\u0094\u0001\u0000"+ + "\u0000\u0000\u00a2\u0095\u0001\u0000\u0000\u0000\u00a2\u0096\u0001\u0000"+ + "\u0000\u0000\u00a2\u0097\u0001\u0000\u0000\u0000\u00a2\u0098\u0001\u0000"+ + "\u0000\u0000\u00a2\u0099\u0001\u0000\u0000\u0000\u00a2\u009a\u0001\u0000"+ + "\u0000\u0000\u00a2\u009b\u0001\u0000\u0000\u0000\u00a2\u009c\u0001\u0000"+ + "\u0000\u0000\u00a2\u009d\u0001\u0000\u0000\u0000\u00a2\u009e\u0001\u0000"+ + "\u0000\u0000\u00a2\u00a0\u0001\u0000\u0000\u0000\u00a3\u0007\u0001\u0000"+ + "\u0000\u0000\u00a4\u00a5\u0005\u0010\u0000\u0000\u00a5\u00a6\u0003\n\u0005"+ + "\u0000\u00a6\t\u0001\u0000\u0000\u0000\u00a7\u00a8\u0006\u0005\uffff\uffff"+ + "\u0000\u00a8\u00a9\u0005+\u0000\u0000\u00a9\u00c6\u0003\n\u0005\b\u00aa"+ + "\u00c6\u0003\u0010\b\u0000\u00ab\u00c6\u0003\f\u0006\u0000\u00ac\u00ae"+ + "\u0003\u0010\b\u0000\u00ad\u00af\u0005+\u0000\u0000\u00ae\u00ad\u0001"+ + "\u0000\u0000\u0000\u00ae\u00af\u0001\u0000\u0000\u0000\u00af\u00b0\u0001"+ + "\u0000\u0000\u0000\u00b0\u00b1\u0005&\u0000\u0000\u00b1\u00b2\u0005*\u0000"+ + "\u0000\u00b2\u00b7\u0003\u0010\b\u0000\u00b3\u00b4\u0005!\u0000\u0000"+ + "\u00b4\u00b6\u0003\u0010\b\u0000\u00b5\u00b3\u0001\u0000\u0000\u0000\u00b6"+ + "\u00b9\u0001\u0000\u0000\u0000\u00b7\u00b5\u0001\u0000\u0000\u0000\u00b7"+ + "\u00b8\u0001\u0000\u0000\u0000\u00b8\u00ba\u0001\u0000\u0000\u0000\u00b9"+ + "\u00b7\u0001\u0000\u0000\u0000\u00ba\u00bb\u00051\u0000\u0000\u00bb\u00c6"+ + "\u0001\u0000\u0000\u0000\u00bc\u00bd\u0003\u0010\b\u0000\u00bd\u00bf\u0005"+ + "\'\u0000\u0000\u00be\u00c0\u0005+\u0000\u0000\u00bf\u00be\u0001\u0000"+ + "\u0000\u0000\u00bf\u00c0\u0001\u0000\u0000\u0000\u00c0\u00c1\u0001\u0000"+ + "\u0000\u0000\u00c1\u00c2\u0005,\u0000\u0000\u00c2\u00c6\u0001\u0000\u0000"+ + "\u0000\u00c3\u00c4\u0004\u0005\u0004\u0000\u00c4\u00c6\u0003\u000e\u0007"+ + "\u0000\u00c5\u00a7\u0001\u0000\u0000\u0000\u00c5\u00aa\u0001\u0000\u0000"+ + "\u0000\u00c5\u00ab\u0001\u0000\u0000\u0000\u00c5\u00ac\u0001\u0000\u0000"+ + "\u0000\u00c5\u00bc\u0001\u0000\u0000\u0000\u00c5\u00c3\u0001\u0000\u0000"+ + "\u0000\u00c6\u00cf\u0001\u0000\u0000\u0000\u00c7\u00c8\n\u0005\u0000\u0000"+ + "\u00c8\u00c9\u0005\u001d\u0000\u0000\u00c9\u00ce\u0003\n\u0005\u0006\u00ca"+ + "\u00cb\n\u0004\u0000\u0000\u00cb\u00cc\u0005.\u0000\u0000\u00cc\u00ce"+ + "\u0003\n\u0005\u0005\u00cd\u00c7\u0001\u0000\u0000\u0000\u00cd\u00ca\u0001"+ + "\u0000\u0000\u0000\u00ce\u00d1\u0001\u0000\u0000\u0000\u00cf\u00cd\u0001"+ + "\u0000\u0000\u0000\u00cf\u00d0\u0001\u0000\u0000\u0000\u00d0\u000b\u0001"+ + "\u0000\u0000\u0000\u00d1\u00cf\u0001\u0000\u0000\u0000\u00d2\u00d4\u0003"+ + "\u0010\b\u0000\u00d3\u00d5\u0005+\u0000\u0000\u00d4\u00d3\u0001\u0000"+ + "\u0000\u0000\u00d4\u00d5\u0001\u0000\u0000\u0000\u00d5\u00d6\u0001\u0000"+ + "\u0000\u0000\u00d6\u00d7\u0005)\u0000\u0000\u00d7\u00d8\u0003j5\u0000"+ + "\u00d8\u00e1\u0001\u0000\u0000\u0000\u00d9\u00db\u0003\u0010\b\u0000\u00da"+ + "\u00dc\u0005+\u0000\u0000\u00db\u00da\u0001\u0000\u0000\u0000\u00db\u00dc"+ + "\u0001\u0000\u0000\u0000\u00dc\u00dd\u0001\u0000\u0000\u0000\u00dd\u00de"+ + "\u00050\u0000\u0000\u00de\u00df\u0003j5\u0000\u00df\u00e1\u0001\u0000"+ + "\u0000\u0000\u00e0\u00d2\u0001\u0000\u0000\u0000\u00e0\u00d9\u0001\u0000"+ + "\u0000\u0000\u00e1\r\u0001\u0000\u0000\u0000\u00e2\u00e3\u0003\u0010\b"+ + "\u0000\u00e3\u00e4\u0005?\u0000\u0000\u00e4\u00e5\u0003j5\u0000\u00e5"+ + "\u000f\u0001\u0000\u0000\u0000\u00e6\u00ec\u0003\u0012\t\u0000\u00e7\u00e8"+ + "\u0003\u0012\t\u0000\u00e8\u00e9\u0003l6\u0000\u00e9\u00ea\u0003\u0012"+ + "\t\u0000\u00ea\u00ec\u0001\u0000\u0000\u0000\u00eb\u00e6\u0001\u0000\u0000"+ + "\u0000\u00eb\u00e7\u0001\u0000\u0000\u0000\u00ec\u0011\u0001\u0000\u0000"+ + "\u0000\u00ed\u00ee\u0006\t\uffff\uffff\u0000\u00ee\u00f2\u0003\u0014\n"+ + "\u0000\u00ef\u00f0\u0007\u0000\u0000\u0000\u00f0\u00f2\u0003\u0012\t\u0003"+ + "\u00f1\u00ed\u0001\u0000\u0000\u0000\u00f1\u00ef\u0001\u0000\u0000\u0000"+ + "\u00f2\u00fb\u0001\u0000\u0000\u0000\u00f3\u00f4\n\u0002\u0000\u0000\u00f4"+ + "\u00f5\u0007\u0001\u0000\u0000\u00f5\u00fa\u0003\u0012\t\u0003\u00f6\u00f7"+ + "\n\u0001\u0000\u0000\u00f7\u00f8\u0007\u0000\u0000\u0000\u00f8\u00fa\u0003"+ + "\u0012\t\u0002\u00f9\u00f3\u0001\u0000\u0000\u0000\u00f9\u00f6\u0001\u0000"+ + "\u0000\u0000\u00fa\u00fd\u0001\u0000\u0000\u0000\u00fb\u00f9\u0001\u0000"+ + "\u0000\u0000\u00fb\u00fc\u0001\u0000\u0000\u0000\u00fc\u0013\u0001\u0000"+ + "\u0000\u0000\u00fd\u00fb\u0001\u0000\u0000\u0000\u00fe\u00ff\u0006\n\uffff"+ + "\uffff\u0000\u00ff\u0107\u0003D\"\u0000\u0100\u0107\u0003:\u001d\u0000"+ + "\u0101\u0107\u0003\u0016\u000b\u0000\u0102\u0103\u0005*\u0000\u0000\u0103"+ + "\u0104\u0003\n\u0005\u0000\u0104\u0105\u00051\u0000\u0000\u0105\u0107"+ + "\u0001\u0000\u0000\u0000\u0106\u00fe\u0001\u0000\u0000\u0000\u0106\u0100"+ + "\u0001\u0000\u0000\u0000\u0106\u0101\u0001\u0000\u0000\u0000\u0106\u0102"+ + "\u0001\u0000\u0000\u0000\u0107\u010d\u0001\u0000\u0000\u0000\u0108\u0109"+ + "\n\u0001\u0000\u0000\u0109\u010a\u0005 \u0000\u0000\u010a\u010c\u0003"+ + "\u001a\r\u0000\u010b\u0108\u0001\u0000\u0000\u0000\u010c\u010f\u0001\u0000"+ + "\u0000\u0000\u010d\u010b\u0001\u0000\u0000\u0000\u010d\u010e\u0001\u0000"+ + "\u0000\u0000\u010e\u0015\u0001\u0000\u0000\u0000\u010f\u010d\u0001\u0000"+ + "\u0000\u0000\u0110\u0111\u0003\u0018\f\u0000\u0111\u011b\u0005*\u0000"+ + "\u0000\u0112\u011c\u0005<\u0000\u0000\u0113\u0118\u0003\n\u0005\u0000"+ + "\u0114\u0115\u0005!\u0000\u0000\u0115\u0117\u0003\n\u0005\u0000\u0116"+ + "\u0114\u0001\u0000\u0000\u0000\u0117\u011a\u0001\u0000\u0000\u0000\u0118"+ + "\u0116\u0001\u0000\u0000\u0000\u0118\u0119\u0001\u0000\u0000\u0000\u0119"+ + "\u011c\u0001\u0000\u0000\u0000\u011a\u0118\u0001\u0000\u0000\u0000\u011b"+ + "\u0112\u0001\u0000\u0000\u0000\u011b\u0113\u0001\u0000\u0000\u0000\u011b"+ + "\u011c\u0001\u0000\u0000\u0000\u011c\u011d\u0001\u0000\u0000\u0000\u011d"+ + "\u011e\u00051\u0000\u0000\u011e\u0017\u0001\u0000\u0000\u0000\u011f\u0122"+ + "\u0005?\u0000\u0000\u0120\u0122\u0003H$\u0000\u0121\u011f\u0001\u0000"+ + "\u0000\u0000\u0121\u0120\u0001\u0000\u0000\u0000\u0122\u0019\u0001\u0000"+ + "\u0000\u0000\u0123\u0124\u0003@ \u0000\u0124\u001b\u0001\u0000\u0000\u0000"+ + "\u0125\u0126\u0005\f\u0000\u0000\u0126\u0127\u0003\u001e\u000f\u0000\u0127"+ + "\u001d\u0001\u0000\u0000\u0000\u0128\u012d\u0003 \u0010\u0000\u0129\u012a"+ + "\u0005!\u0000\u0000\u012a\u012c\u0003 \u0010\u0000\u012b\u0129\u0001\u0000"+ + "\u0000\u0000\u012c\u012f\u0001\u0000\u0000\u0000\u012d\u012b\u0001\u0000"+ + "\u0000\u0000\u012d\u012e\u0001\u0000\u0000\u0000\u012e\u001f\u0001\u0000"+ + "\u0000\u0000\u012f\u012d\u0001\u0000\u0000\u0000\u0130\u0131\u0003:\u001d"+ + "\u0000\u0131\u0132\u0005\u001f\u0000\u0000\u0132\u0134\u0001\u0000\u0000"+ + "\u0000\u0133\u0130\u0001\u0000\u0000\u0000\u0133\u0134\u0001\u0000\u0000"+ + "\u0000\u0134\u0135\u0001\u0000\u0000\u0000\u0135\u0136\u0003\n\u0005\u0000"+ + "\u0136!\u0001\u0000\u0000\u0000\u0137\u0138\u0005\u0006\u0000\u0000\u0138"+ + "\u013d\u0003$\u0012\u0000\u0139\u013a\u0005!\u0000\u0000\u013a\u013c\u0003"+ + "$\u0012\u0000\u013b\u0139\u0001\u0000\u0000\u0000\u013c\u013f\u0001\u0000"+ + "\u0000\u0000\u013d\u013b\u0001\u0000\u0000\u0000\u013d\u013e\u0001\u0000"+ + "\u0000\u0000\u013e\u0141\u0001\u0000\u0000\u0000\u013f\u013d\u0001\u0000"+ + "\u0000\u0000\u0140\u0142\u0003*\u0015\u0000\u0141\u0140\u0001\u0000\u0000"+ + "\u0000\u0141\u0142\u0001\u0000\u0000\u0000\u0142#\u0001\u0000\u0000\u0000"+ + "\u0143\u0144\u0003&\u0013\u0000\u0144\u0145\u0005h\u0000\u0000\u0145\u0147"+ + "\u0001\u0000\u0000\u0000\u0146\u0143\u0001\u0000\u0000\u0000\u0146\u0147"+ + "\u0001\u0000\u0000\u0000\u0147\u0148\u0001\u0000\u0000\u0000\u0148\u0149"+ + "\u0003(\u0014\u0000\u0149%\u0001\u0000\u0000\u0000\u014a\u014b\u0005L"+ + "\u0000\u0000\u014b\'\u0001\u0000\u0000\u0000\u014c\u014d\u0007\u0002\u0000"+ + "\u0000\u014d)\u0001\u0000\u0000\u0000\u014e\u0151\u0003,\u0016\u0000\u014f"+ + "\u0151\u0003.\u0017\u0000\u0150\u014e\u0001\u0000\u0000\u0000\u0150\u014f"+ + "\u0001\u0000\u0000\u0000\u0151+\u0001\u0000\u0000\u0000\u0152\u0153\u0005"+ + "K\u0000\u0000\u0153\u0158\u0005L\u0000\u0000\u0154\u0155\u0005!\u0000"+ + "\u0000\u0155\u0157\u0005L\u0000\u0000\u0156\u0154\u0001\u0000\u0000\u0000"+ + "\u0157\u015a\u0001\u0000\u0000\u0000\u0158\u0156\u0001\u0000\u0000\u0000"+ + "\u0158\u0159\u0001\u0000\u0000\u0000\u0159-\u0001\u0000\u0000\u0000\u015a"+ + "\u0158\u0001\u0000\u0000\u0000\u015b\u015c\u0005A\u0000\u0000\u015c\u015d"+ + "\u0003,\u0016\u0000\u015d\u015e\u0005B\u0000\u0000\u015e/\u0001\u0000"+ + "\u0000\u0000\u015f\u0160\u0005\u0013\u0000\u0000\u0160\u0165\u0003$\u0012"+ + "\u0000\u0161\u0162\u0005!\u0000\u0000\u0162\u0164\u0003$\u0012\u0000\u0163"+ + "\u0161\u0001\u0000\u0000\u0000\u0164\u0167\u0001\u0000\u0000\u0000\u0165"+ + "\u0163\u0001\u0000\u0000\u0000\u0165\u0166\u0001\u0000\u0000\u0000\u0166"+ + "\u0169\u0001\u0000\u0000\u0000\u0167\u0165\u0001\u0000\u0000\u0000\u0168"+ + "\u016a\u00036\u001b\u0000\u0169\u0168\u0001\u0000\u0000\u0000\u0169\u016a"+ + "\u0001\u0000\u0000\u0000\u016a\u016d\u0001\u0000\u0000\u0000\u016b\u016c"+ + "\u0005\u001c\u0000\u0000\u016c\u016e\u0003\u001e\u000f\u0000\u016d\u016b"+ + "\u0001\u0000\u0000\u0000\u016d\u016e\u0001\u0000\u0000\u0000\u016e1\u0001"+ + "\u0000\u0000\u0000\u016f\u0170\u0005\u0004\u0000\u0000\u0170\u0171\u0003"+ + "\u001e\u000f\u0000\u01713\u0001\u0000\u0000\u0000\u0172\u0174\u0005\u000f"+ + "\u0000\u0000\u0173\u0175\u00036\u001b\u0000\u0174\u0173\u0001\u0000\u0000"+ + "\u0000\u0174\u0175\u0001\u0000\u0000\u0000\u0175\u0178\u0001\u0000\u0000"+ + "\u0000\u0176\u0177\u0005\u001c\u0000\u0000\u0177\u0179\u0003\u001e\u000f"+ + "\u0000\u0178\u0176\u0001\u0000\u0000\u0000\u0178\u0179\u0001\u0000\u0000"+ + "\u0000\u01795\u0001\u0000\u0000\u0000\u017a\u017f\u00038\u001c\u0000\u017b"+ + "\u017c\u0005!\u0000\u0000\u017c\u017e\u00038\u001c\u0000\u017d\u017b\u0001"+ + "\u0000\u0000\u0000\u017e\u0181\u0001\u0000\u0000\u0000\u017f\u017d\u0001"+ + "\u0000\u0000\u0000\u017f\u0180\u0001\u0000\u0000\u0000\u01807\u0001\u0000"+ + "\u0000\u0000\u0181\u017f\u0001\u0000\u0000\u0000\u0182\u0185\u0003 \u0010"+ + "\u0000\u0183\u0184\u0005\u0010\u0000\u0000\u0184\u0186\u0003\n\u0005\u0000"+ + "\u0185\u0183\u0001\u0000\u0000\u0000\u0185\u0186\u0001\u0000\u0000\u0000"+ + "\u01869\u0001\u0000\u0000\u0000\u0187\u018c\u0003H$\u0000\u0188\u0189"+ + "\u0005#\u0000\u0000\u0189\u018b\u0003H$\u0000\u018a\u0188\u0001\u0000"+ + "\u0000\u0000\u018b\u018e\u0001\u0000\u0000\u0000\u018c\u018a\u0001\u0000"+ + "\u0000\u0000\u018c\u018d\u0001\u0000\u0000\u0000\u018d;\u0001\u0000\u0000"+ + "\u0000\u018e\u018c\u0001\u0000\u0000\u0000\u018f\u0194\u0003B!\u0000\u0190"+ + "\u0191\u0005#\u0000\u0000\u0191\u0193\u0003B!\u0000\u0192\u0190\u0001"+ + "\u0000\u0000\u0000\u0193\u0196\u0001\u0000\u0000\u0000\u0194\u0192\u0001"+ + "\u0000\u0000\u0000\u0194\u0195\u0001\u0000\u0000\u0000\u0195=\u0001\u0000"+ + "\u0000\u0000\u0196\u0194\u0001\u0000\u0000\u0000\u0197\u019c\u0003<\u001e"+ + "\u0000\u0198\u0199\u0005!\u0000\u0000\u0199\u019b\u0003<\u001e\u0000\u019a"+ + "\u0198\u0001\u0000\u0000\u0000\u019b\u019e\u0001\u0000\u0000\u0000\u019c"+ + "\u019a\u0001\u0000\u0000\u0000\u019c\u019d\u0001\u0000\u0000\u0000\u019d"+ + "?\u0001\u0000\u0000\u0000\u019e\u019c\u0001\u0000\u0000\u0000\u019f\u01a0"+ + "\u0007\u0003\u0000\u0000\u01a0A\u0001\u0000\u0000\u0000\u01a1\u01a4\u0005"+ + "P\u0000\u0000\u01a2\u01a4\u0003F#\u0000\u01a3\u01a1\u0001\u0000\u0000"+ + "\u0000\u01a3\u01a2\u0001\u0000\u0000\u0000\u01a4C\u0001\u0000\u0000\u0000"+ + "\u01a5\u01d0\u0005,\u0000\u0000\u01a6\u01a7\u0003h4\u0000\u01a7\u01a8"+ + "\u0005C\u0000\u0000\u01a8\u01d0\u0001\u0000\u0000\u0000\u01a9\u01d0\u0003"+ + "f3\u0000\u01aa\u01d0\u0003h4\u0000\u01ab\u01d0\u0003b1\u0000\u01ac\u01d0"+ + "\u0003F#\u0000\u01ad\u01d0\u0003j5\u0000\u01ae\u01af\u0005A\u0000\u0000"+ + "\u01af\u01b4\u0003d2\u0000\u01b0\u01b1\u0005!\u0000\u0000\u01b1\u01b3"+ + "\u0003d2\u0000\u01b2\u01b0\u0001\u0000\u0000\u0000\u01b3\u01b6\u0001\u0000"+ + "\u0000\u0000\u01b4\u01b2\u0001\u0000\u0000\u0000\u01b4\u01b5\u0001\u0000"+ + "\u0000\u0000\u01b5\u01b7\u0001\u0000\u0000\u0000\u01b6\u01b4\u0001\u0000"+ + "\u0000\u0000\u01b7\u01b8\u0005B\u0000\u0000\u01b8\u01d0\u0001\u0000\u0000"+ + "\u0000\u01b9\u01ba\u0005A\u0000\u0000\u01ba\u01bf\u0003b1\u0000\u01bb"+ + "\u01bc\u0005!\u0000\u0000\u01bc\u01be\u0003b1\u0000\u01bd\u01bb\u0001"+ + "\u0000\u0000\u0000\u01be\u01c1\u0001\u0000\u0000\u0000\u01bf\u01bd\u0001"+ + "\u0000\u0000\u0000\u01bf\u01c0\u0001\u0000\u0000\u0000\u01c0\u01c2\u0001"+ + "\u0000\u0000\u0000\u01c1\u01bf\u0001\u0000\u0000\u0000\u01c2\u01c3\u0005"+ + "B\u0000\u0000\u01c3\u01d0\u0001\u0000\u0000\u0000\u01c4\u01c5\u0005A\u0000"+ + "\u0000\u01c5\u01ca\u0003j5\u0000\u01c6\u01c7\u0005!\u0000\u0000\u01c7"+ + "\u01c9\u0003j5\u0000\u01c8\u01c6\u0001\u0000\u0000\u0000\u01c9\u01cc\u0001"+ + "\u0000\u0000\u0000\u01ca\u01c8\u0001\u0000\u0000\u0000\u01ca\u01cb\u0001"+ + "\u0000\u0000\u0000\u01cb\u01cd\u0001\u0000\u0000\u0000\u01cc\u01ca\u0001"+ + "\u0000\u0000\u0000\u01cd\u01ce\u0005B\u0000\u0000\u01ce\u01d0\u0001\u0000"+ + "\u0000\u0000\u01cf\u01a5\u0001\u0000\u0000\u0000\u01cf\u01a6\u0001\u0000"+ + "\u0000\u0000\u01cf\u01a9\u0001\u0000\u0000\u0000\u01cf\u01aa\u0001\u0000"+ + "\u0000\u0000\u01cf\u01ab\u0001\u0000\u0000\u0000\u01cf\u01ac\u0001\u0000"+ + "\u0000\u0000\u01cf\u01ad\u0001\u0000\u0000\u0000\u01cf\u01ae\u0001\u0000"+ + "\u0000\u0000\u01cf\u01b9\u0001\u0000\u0000\u0000\u01cf\u01c4\u0001\u0000"+ + "\u0000\u0000\u01d0E\u0001\u0000\u0000\u0000\u01d1\u01d4\u0005/\u0000\u0000"+ + "\u01d2\u01d4\u0005@\u0000\u0000\u01d3\u01d1\u0001\u0000\u0000\u0000\u01d3"+ + "\u01d2\u0001\u0000\u0000\u0000\u01d4G\u0001\u0000\u0000\u0000\u01d5\u01d8"+ + "\u0003@ \u0000\u01d6\u01d8\u0003F#\u0000\u01d7\u01d5\u0001\u0000\u0000"+ + "\u0000\u01d7\u01d6\u0001\u0000\u0000\u0000\u01d8I\u0001\u0000\u0000\u0000"+ + "\u01d9\u01da\u0005\t\u0000\u0000\u01da\u01db\u0005\u001a\u0000\u0000\u01db"+ + "K\u0001\u0000\u0000\u0000\u01dc\u01dd\u0005\u000e\u0000\u0000\u01dd\u01e2"+ + "\u0003N\'\u0000\u01de\u01df\u0005!\u0000\u0000\u01df\u01e1\u0003N\'\u0000"+ + "\u01e0\u01de\u0001\u0000\u0000\u0000\u01e1\u01e4\u0001\u0000\u0000\u0000"+ + "\u01e2\u01e0\u0001\u0000\u0000\u0000\u01e2\u01e3\u0001\u0000\u0000\u0000"+ + "\u01e3M\u0001\u0000\u0000\u0000\u01e4\u01e2\u0001\u0000\u0000\u0000\u01e5"+ + "\u01e7\u0003\n\u0005\u0000\u01e6\u01e8\u0007\u0004\u0000\u0000\u01e7\u01e6"+ + "\u0001\u0000\u0000\u0000\u01e7\u01e8\u0001\u0000\u0000\u0000\u01e8\u01eb"+ + "\u0001\u0000\u0000\u0000\u01e9\u01ea\u0005-\u0000\u0000\u01ea\u01ec\u0007"+ + "\u0005\u0000\u0000\u01eb\u01e9\u0001\u0000\u0000\u0000\u01eb\u01ec\u0001"+ + "\u0000\u0000\u0000\u01ecO\u0001\u0000\u0000\u0000\u01ed\u01ee\u0005\b"+ + "\u0000\u0000\u01ee\u01ef\u0003>\u001f\u0000\u01efQ\u0001\u0000\u0000\u0000"+ + "\u01f0\u01f1\u0005\u0002\u0000\u0000\u01f1\u01f2\u0003>\u001f\u0000\u01f2"+ + "S\u0001\u0000\u0000\u0000\u01f3\u01f4\u0005\u000b\u0000\u0000\u01f4\u01f9"+ + "\u0003V+\u0000\u01f5\u01f6\u0005!\u0000\u0000\u01f6\u01f8\u0003V+\u0000"+ + "\u01f7\u01f5\u0001\u0000\u0000\u0000\u01f8\u01fb\u0001\u0000\u0000\u0000"+ + "\u01f9\u01f7\u0001\u0000\u0000\u0000\u01f9\u01fa\u0001\u0000\u0000\u0000"+ + "\u01faU\u0001\u0000\u0000\u0000\u01fb\u01f9\u0001\u0000\u0000\u0000\u01fc"+ + "\u01fd\u0003<\u001e\u0000\u01fd\u01fe\u0005T\u0000\u0000\u01fe\u01ff\u0003"+ + "<\u001e\u0000\u01ffW\u0001\u0000\u0000\u0000\u0200\u0201\u0005\u0001\u0000"+ + "\u0000\u0201\u0202\u0003\u0014\n\u0000\u0202\u0204\u0003j5\u0000\u0203"+ + "\u0205\u0003^/\u0000\u0204\u0203\u0001\u0000\u0000\u0000\u0204\u0205\u0001"+ + "\u0000\u0000\u0000\u0205Y\u0001\u0000\u0000\u0000\u0206\u0207\u0005\u0007"+ + "\u0000\u0000\u0207\u0208\u0003\u0014\n\u0000\u0208\u0209\u0003j5\u0000"+ + "\u0209[\u0001\u0000\u0000\u0000\u020a\u020b\u0005\n\u0000\u0000\u020b"+ + "\u020c\u0003:\u001d\u0000\u020c]\u0001\u0000\u0000\u0000\u020d\u0212\u0003"+ + "`0\u0000\u020e\u020f\u0005!\u0000\u0000\u020f\u0211\u0003`0\u0000\u0210"+ + "\u020e\u0001\u0000\u0000\u0000\u0211\u0214\u0001\u0000\u0000\u0000\u0212"+ + "\u0210\u0001\u0000\u0000\u0000\u0212\u0213\u0001\u0000\u0000\u0000\u0213"+ + "_\u0001\u0000\u0000\u0000\u0214\u0212\u0001\u0000\u0000\u0000\u0215\u0216"+ + "\u0003@ \u0000\u0216\u0217\u0005\u001f\u0000\u0000\u0217\u0218\u0003D"+ + "\"\u0000\u0218a\u0001\u0000\u0000\u0000\u0219\u021a\u0007\u0006\u0000"+ + "\u0000\u021ac\u0001\u0000\u0000\u0000\u021b\u021e\u0003f3\u0000\u021c"+ + "\u021e\u0003h4\u0000\u021d\u021b\u0001\u0000\u0000\u0000\u021d\u021c\u0001"+ + "\u0000\u0000\u0000\u021ee\u0001\u0000\u0000\u0000\u021f\u0221\u0007\u0000"+ + "\u0000\u0000\u0220\u021f\u0001\u0000\u0000\u0000\u0220\u0221\u0001\u0000"+ + "\u0000\u0000\u0221\u0222\u0001\u0000\u0000\u0000\u0222\u0223\u0005\u001b"+ + "\u0000\u0000\u0223g\u0001\u0000\u0000\u0000\u0224\u0226\u0007\u0000\u0000"+ + "\u0000\u0225\u0224\u0001\u0000\u0000\u0000\u0225\u0226\u0001\u0000\u0000"+ + "\u0000\u0226\u0227\u0001\u0000\u0000\u0000\u0227\u0228\u0005\u001a\u0000"+ + "\u0000\u0228i\u0001\u0000\u0000\u0000\u0229\u022a\u0005\u0019\u0000\u0000"+ + "\u022ak\u0001\u0000\u0000\u0000\u022b\u022c\u0007\u0007\u0000\u0000\u022c"+ + "m\u0001\u0000\u0000\u0000\u022d\u022e\u0005\u0005\u0000\u0000\u022e\u022f"+ + "\u0003p8\u0000\u022fo\u0001\u0000\u0000\u0000\u0230\u0231\u0005A\u0000"+ + "\u0000\u0231\u0232\u0003\u0002\u0001\u0000\u0232\u0233\u0005B\u0000\u0000"+ + "\u0233q\u0001\u0000\u0000\u0000\u0234\u0235\u0005\r\u0000\u0000\u0235"+ + "\u0236\u0005d\u0000\u0000\u0236s\u0001\u0000\u0000\u0000\u0237\u0238\u0005"+ + "\u0003\u0000\u0000\u0238\u023b\u0005Z\u0000\u0000\u0239\u023a\u0005X\u0000"+ + "\u0000\u023a\u023c\u0003<\u001e\u0000\u023b\u0239\u0001\u0000\u0000\u0000"+ + "\u023b\u023c\u0001\u0000\u0000\u0000\u023c\u0246\u0001\u0000\u0000\u0000"+ + "\u023d\u023e\u0005Y\u0000\u0000\u023e\u0243\u0003v;\u0000\u023f\u0240"+ + "\u0005!\u0000\u0000\u0240\u0242\u0003v;\u0000\u0241\u023f\u0001\u0000"+ + "\u0000\u0000\u0242\u0245\u0001\u0000\u0000\u0000\u0243\u0241\u0001\u0000"+ + "\u0000\u0000\u0243\u0244\u0001\u0000\u0000\u0000\u0244\u0247\u0001\u0000"+ + "\u0000\u0000\u0245\u0243\u0001\u0000\u0000\u0000\u0246\u023d\u0001\u0000"+ + "\u0000\u0000\u0246\u0247\u0001\u0000\u0000\u0000\u0247u\u0001\u0000\u0000"+ + "\u0000\u0248\u0249\u0003<\u001e\u0000\u0249\u024a\u0005\u001f\u0000\u0000"+ + "\u024a\u024c\u0001\u0000\u0000\u0000\u024b\u0248\u0001\u0000\u0000\u0000"+ + "\u024b\u024c\u0001\u0000\u0000\u0000\u024c\u024d\u0001\u0000\u0000\u0000"+ + "\u024d\u024e\u0003<\u001e\u0000\u024ew\u0001\u0000\u0000\u0000\u024f\u0250"+ + "\u0005\u0012\u0000\u0000\u0250\u0251\u0003$\u0012\u0000\u0251\u0252\u0005"+ + "X\u0000\u0000\u0252\u0253\u0003>\u001f\u0000\u0253y\u0001\u0000\u0000"+ + "\u0000\u0254\u0255\u0005\u0011\u0000\u0000\u0255\u0258\u00036\u001b\u0000"+ + "\u0256\u0257\u0005\u001c\u0000\u0000\u0257\u0259\u0003\u001e\u000f\u0000"+ + "\u0258\u0256\u0001\u0000\u0000\u0000\u0258\u0259\u0001\u0000\u0000\u0000"+ + "\u0259{\u0001\u0000\u0000\u0000;\u0087\u0090\u00a2\u00ae\u00b7\u00bf\u00c5"+ "\u00cd\u00cf\u00d4\u00db\u00e0\u00eb\u00f1\u00f9\u00fb\u0106\u010d\u0118"+ "\u011b\u0121\u012d\u0133\u013d\u0141\u0146\u0150\u0158\u0165\u0169\u016d"+ - "\u0174\u0178\u017f\u0186\u018d\u0195\u019d\u01a4\u01b5\u01c0\u01cb\u01d0"+ - "\u01d4\u01d8\u01e3\u01e8\u01ec\u01fa\u0205\u0213\u021e\u0221\u0226\u023c"+ - "\u0244\u0247\u024c\u0259"; + "\u0174\u0178\u017f\u0185\u018c\u0194\u019c\u01a3\u01b4\u01bf\u01ca\u01cf"+ + "\u01d3\u01d7\u01e2\u01e7\u01eb\u01f9\u0204\u0212\u021d\u0220\u0225\u023b"+ + "\u0243\u0246\u024b\u0258"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { From 630af382f134a2346e06a9d4337f22150207a46e Mon Sep 17 00:00:00 2001 From: David Kyle Date: Tue, 15 Oct 2024 19:11:23 +0100 Subject: [PATCH 119/449] [ML] Create an ml node inference endpoint referencing an existing deployment (#114750) --- docs/changelog/114750.yaml | 5 + .../org/elasticsearch/TransportVersions.java | 1 + .../inference/InferenceService.java | 4 +- .../inference/CreateFromDeploymentIT.java | 161 ++++++++++++++++++ .../xpack/inference/CustomElandModelIT.java | 33 +++- .../inference/InferenceBaseRestTest.java | 2 +- ...ransportDeleteInferenceEndpointAction.java | 5 +- .../BaseElasticsearchInternalService.java | 37 +++- .../elasticsearch/ElasticDeployedModel.java | 45 +++++ .../ElasticsearchInternalModel.java | 8 + .../ElasticsearchInternalService.java | 140 +++++++++++++-- .../ElasticsearchInternalServiceSettings.java | 50 +++++- ...ticsearchInternalServiceSettingsTests.java | 20 ++- .../ElasticsearchInternalServiceTests.java | 34 ++-- 14 files changed, 482 insertions(+), 63 deletions(-) create mode 100644 docs/changelog/114750.yaml create mode 100644 x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/CreateFromDeploymentIT.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticDeployedModel.java diff --git a/docs/changelog/114750.yaml b/docs/changelog/114750.yaml new file mode 100644 index 0000000000000..f7a3c8c283934 --- /dev/null +++ b/docs/changelog/114750.yaml @@ -0,0 +1,5 @@ +pr: 114750 +summary: Create an ml node inference endpoint referencing an existing model +area: Machine Learning +type: enhancement +issues: [] diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 3cb4695e867df..4038d5a224850 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -244,6 +244,7 @@ static TransportVersion def(int id) { public static final TransportVersion OPT_IN_ESQL_CCS_EXECUTION_INFO = def(8_768_00_0); public static final TransportVersion QUERY_RULE_TEST_API = def(8_769_00_0); public static final TransportVersion ESQL_PER_AGGREGATE_FILTER = def(8_770_00_0); + public static final TransportVersion ML_INFERENCE_ATTACH_TO_EXISTSING_DEPLOYMENT = def(8_771_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/inference/InferenceService.java b/server/src/main/java/org/elasticsearch/inference/InferenceService.java index 835262ff28edc..2c99563955746 100644 --- a/server/src/main/java/org/elasticsearch/inference/InferenceService.java +++ b/server/src/main/java/org/elasticsearch/inference/InferenceService.java @@ -129,10 +129,10 @@ void chunkedInfer( /** * Stop the model deployment. * The default action does nothing except acknowledge the request (true). - * @param modelId The ID of the model to be stopped + * @param unparsedModel The unparsed model configuration * @param listener The listener */ - default void stop(String modelId, ActionListener listener) { + default void stop(UnparsedModel unparsedModel, ActionListener listener) { listener.onResponse(true); } diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/CreateFromDeploymentIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/CreateFromDeploymentIT.java new file mode 100644 index 0000000000000..f81ebc25dc860 --- /dev/null +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/CreateFromDeploymentIT.java @@ -0,0 +1,161 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference; + +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.core.Strings; +import org.elasticsearch.inference.TaskType; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.is; + +public class CreateFromDeploymentIT extends InferenceBaseRestTest { + + @SuppressWarnings("unchecked") + public void testAttachToDeployment() throws IOException { + var modelId = "attach_to_deployment"; + var deploymentId = "existing_deployment"; + + CustomElandModelIT.createMlNodeTextExpansionModel(modelId, client()); + var response = startMlNodeDeploymemnt(modelId, deploymentId); + assertOkOrCreated(response); + + var inferenceId = "inference_on_existing_deployment"; + var putModel = putModel(inferenceId, endpointConfig(deploymentId), TaskType.SPARSE_EMBEDDING); + var serviceSettings = putModel.get("service_settings"); + assertThat( + putModel.toString(), + serviceSettings, + is(Map.of("num_allocations", 1, "num_threads", 1, "model_id", "attach_to_deployment", "deployment_id", "existing_deployment")) + ); + + var results = infer(inferenceId, List.of("washing machine")); + assertNotNull(results.get("sparse_embedding")); + + deleteModel(inferenceId); + // assert deployment not stopped + var stats = (List>) getTrainedModelStats(modelId).get("trained_model_stats"); + var deploymentStats = stats.get(0).get("deployment_stats"); + assertNotNull(stats.toString(), deploymentStats); + + stopMlNodeDeployment(deploymentId); + } + + public void testAttachWithModelId() throws IOException { + var modelId = "attach_with_model_id"; + var deploymentId = "existing_deployment_with_model_id"; + + CustomElandModelIT.createMlNodeTextExpansionModel(modelId, client()); + var response = startMlNodeDeploymemnt(modelId, deploymentId); + assertOkOrCreated(response); + + var inferenceId = "inference_on_existing_deployment"; + var putModel = putModel(inferenceId, endpointConfig(modelId, deploymentId), TaskType.SPARSE_EMBEDDING); + var serviceSettings = putModel.get("service_settings"); + assertThat( + putModel.toString(), + serviceSettings, + is( + Map.of( + "num_allocations", + 1, + "num_threads", + 1, + "model_id", + "attach_with_model_id", + "deployment_id", + "existing_deployment_with_model_id" + ) + ) + ); + + var results = infer(inferenceId, List.of("washing machine")); + assertNotNull(results.get("sparse_embedding")); + + stopMlNodeDeployment(deploymentId); + } + + public void testModelIdDoesNotMatch() throws IOException { + var modelId = "attach_with_model_id"; + var deploymentId = "existing_deployment_with_model_id"; + var aDifferentModelId = "not_the_same_as_the_one_used_in_the_deployment"; + + CustomElandModelIT.createMlNodeTextExpansionModel(modelId, client()); + var response = startMlNodeDeploymemnt(modelId, deploymentId); + assertOkOrCreated(response); + + var inferenceId = "inference_on_existing_deployment"; + var e = expectThrows( + ResponseException.class, + () -> putModel(inferenceId, endpointConfig(aDifferentModelId, deploymentId), TaskType.SPARSE_EMBEDDING) + ); + assertThat( + e.getMessage(), + containsString( + "Deployment [existing_deployment_with_model_id] uses model [attach_with_model_id] " + + "which does not match the model [not_the_same_as_the_one_used_in_the_deployment] in the request." + ) + ); + } + + private String endpointConfig(String deploymentId) { + return Strings.format(""" + { + "service": "elasticsearch", + "service_settings": { + "deployment_id": "%s" + } + } + """, deploymentId); + } + + private String endpointConfig(String modelId, String deploymentId) { + return Strings.format(""" + { + "service": "elasticsearch", + "service_settings": { + "model_id": "%s", + "deployment_id": "%s" + } + } + """, modelId, deploymentId); + } + + private Response startMlNodeDeploymemnt(String modelId, String deploymentId) throws IOException { + String endPoint = "/_ml/trained_models/" + + modelId + + "/deployment/_start?timeout=10s&wait_for=started" + + "&threads_per_allocation=1" + + "&number_of_allocations=1"; + + if (deploymentId != null) { + endPoint = endPoint + "&deployment_id=" + deploymentId; + } + + Request request = new Request("POST", endPoint); + return client().performRequest(request); + } + + protected void stopMlNodeDeployment(String deploymentId) throws IOException { + String endpoint = "/_ml/trained_models/" + deploymentId + "/deployment/_stop"; + Request request = new Request("POST", endpoint); + request.addParameter("force", "true"); + client().performRequest(request); + } + + protected Map getTrainedModelStats(String modelId) throws IOException { + Request request = new Request("GET", "/_ml/trained_models/" + modelId + "/_stats"); + return entityAsMap(client().performRequest(request)); + } +} diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/CustomElandModelIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/CustomElandModelIT.java index c05d08fa33692..e6d959bafea3f 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/CustomElandModelIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/CustomElandModelIT.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.inference; import org.elasticsearch.client.Request; +import org.elasticsearch.client.RestClient; import org.elasticsearch.core.Strings; import org.elasticsearch.inference.TaskType; @@ -65,11 +66,12 @@ public class CustomElandModelIT extends InferenceBaseRestTest { public void testSparse() throws IOException { String modelId = "custom-text-expansion-model"; - createTextExpansionModel(modelId); - putModelDefinition(modelId, BASE_64_ENCODED_MODEL, RAW_MODEL_SIZE); + createTextExpansionModel(modelId, client()); + putModelDefinition(modelId, BASE_64_ENCODED_MODEL, RAW_MODEL_SIZE, client()); putVocabulary( List.of("these", "are", "my", "words", "the", "washing", "machine", "is", "leaking", "octopus", "comforter", "smells"), - modelId + modelId, + client() ); var inferenceConfig = """ @@ -90,7 +92,7 @@ public void testSparse() throws IOException { assertNotNull(results.get("sparse_embedding")); } - protected void createTextExpansionModel(String modelId) throws IOException { + static void createTextExpansionModel(String modelId, RestClient client) throws IOException { // with_special_tokens: false for this test with limited vocab Request request = new Request("PUT", "/_ml/trained_models/" + modelId); request.setJsonEntity(""" @@ -107,10 +109,10 @@ protected void createTextExpansionModel(String modelId) throws IOException { } } }"""); - client().performRequest(request); + client.performRequest(request); } - protected void putVocabulary(List vocabulary, String modelId) throws IOException { + static void putVocabulary(List vocabulary, String modelId, RestClient client) throws IOException { List vocabularyWithPad = new ArrayList<>(); vocabularyWithPad.add("[PAD]"); vocabularyWithPad.add("[UNK]"); @@ -121,14 +123,27 @@ protected void putVocabulary(List vocabulary, String modelId) throws IOE request.setJsonEntity(Strings.format(""" { "vocabulary": [%s] } """, quotedWords)); - client().performRequest(request); + client.performRequest(request); } - protected void putModelDefinition(String modelId, String base64EncodedModel, long unencodedModelSize) throws IOException { + static void putModelDefinition(String modelId, String base64EncodedModel, long unencodedModelSize, RestClient client) + throws IOException { Request request = new Request("PUT", "_ml/trained_models/" + modelId + "/definition/0"); String body = Strings.format(""" {"total_definition_length":%s,"definition": "%s","total_parts": 1}""", unencodedModelSize, base64EncodedModel); request.setJsonEntity(body); - client().performRequest(request); + client.performRequest(request); } + + // Create the model including definition and vocab + static void createMlNodeTextExpansionModel(String modelId, RestClient client) throws IOException { + createTextExpansionModel(modelId, client); + putModelDefinition(modelId, BASE_64_ENCODED_MODEL, RAW_MODEL_SIZE, client); + putVocabulary( + List.of("these", "are", "my", "words", "the", "washing", "machine", "is", "leaking", "octopus", "comforter", "smells"), + modelId, + client + ); + } + } diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java index 3ca6b45c2948e..74c1e2f0d3356 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java @@ -207,7 +207,7 @@ protected void putSemanticText(String endpointId, String searchEndpointId, Strin } protected Map putModel(String modelId, String modelConfig, TaskType taskType) throws IOException { - String endpoint = Strings.format("_inference/%s/%s", taskType, modelId); + String endpoint = Strings.format("_inference/%s/%s?error_trace", taskType, modelId); return putRequest(endpoint, modelConfig); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceEndpointAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceEndpointAction.java index 829a6b6c67ff9..c1dbd8cfec9d5 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceEndpointAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceEndpointAction.java @@ -9,8 +9,6 @@ package org.elasticsearch.xpack.inference.action; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRunnable; @@ -47,7 +45,6 @@ public class TransportDeleteInferenceEndpointAction extends TransportMasterNodeA private final ModelRegistry modelRegistry; private final InferenceServiceRegistry serviceRegistry; - private static final Logger logger = LogManager.getLogger(TransportDeleteInferenceEndpointAction.class); private final Executor executor; @Inject @@ -118,7 +115,7 @@ private void doExecuteForked( var service = serviceRegistry.getService(unparsedModel.service()); if (service.isPresent()) { - service.get().stop(request.getInferenceEndpointId(), listener); + service.get().stop(unparsedModel, listener); } else { listener.onFailure( new ElasticsearchStatusException( diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/BaseElasticsearchInternalService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/BaseElasticsearchInternalService.java index 98777e9722242..cd0c33082cb30 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/BaseElasticsearchInternalService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/BaseElasticsearchInternalService.java @@ -22,6 +22,7 @@ import org.elasticsearch.inference.InputType; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.TaskType; +import org.elasticsearch.inference.UnparsedModel; import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.ml.MachineLearningField; import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction; @@ -98,6 +99,12 @@ public void start(Model model, ActionListener finalListener) { return; } + if (esModel.usesExistingDeployment()) { + // don't start a deployment + finalListener.onResponse(Boolean.TRUE); + return; + } + SubscribableListener.newForked(forkedListener -> { isBuiltinModelPut(model, forkedListener); }) .andThen((l, modelConfigExists) -> { if (modelConfigExists == false) { @@ -119,14 +126,28 @@ public void start(Model model, ActionListener finalListener) { } @Override - public void stop(String inferenceEntityId, ActionListener listener) { - var request = new StopTrainedModelDeploymentAction.Request(inferenceEntityId); - request.setForce(true); - client.execute( - StopTrainedModelDeploymentAction.INSTANCE, - request, - listener.delegateFailureAndWrap((delegatedResponseListener, response) -> delegatedResponseListener.onResponse(Boolean.TRUE)) - ); + public void stop(UnparsedModel unparsedModel, ActionListener listener) { + + var model = parsePersistedConfig(unparsedModel.inferenceEntityId(), unparsedModel.taskType(), unparsedModel.settings()); + if (model instanceof ElasticsearchInternalModel esModel) { + + var serviceSettings = esModel.getServiceSettings(); + if (serviceSettings.getDeploymentId() != null) { + // configured with an existing deployment so do not stop it + listener.onResponse(Boolean.TRUE); + return; + } + + var request = new StopTrainedModelDeploymentAction.Request(esModel.mlNodeDeploymentId()); + request.setForce(true); + client.execute( + StopTrainedModelDeploymentAction.INSTANCE, + request, + listener.delegateFailureAndWrap((delegatedResponseListener, response) -> delegatedResponseListener.onResponse(Boolean.TRUE)) + ); + } else { + listener.onFailure(notElasticsearchModelException(model)); + } } protected static IllegalStateException notElasticsearchModelException(Model model) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticDeployedModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticDeployedModel.java new file mode 100644 index 0000000000000..996ef6816025d --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticDeployedModel.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.elasticsearch; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.inference.ChunkingSettings; +import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.xpack.core.ml.action.CreateTrainedModelAssignmentAction; +import org.elasticsearch.xpack.core.ml.action.StartTrainedModelDeploymentAction; + +public class ElasticDeployedModel extends ElasticsearchInternalModel { + public ElasticDeployedModel( + String inferenceEntityId, + TaskType taskType, + String service, + ElasticsearchInternalServiceSettings serviceSettings, + ChunkingSettings chunkingSettings + ) { + super(inferenceEntityId, taskType, service, serviceSettings, chunkingSettings); + } + + @Override + public boolean usesExistingDeployment() { + return true; + } + + @Override + public StartTrainedModelDeploymentAction.Request getStartTrainedModelDeploymentActionRequest() { + throw new IllegalStateException("cannot start model that uses an existing deployment"); + } + + @Override + public ActionListener getCreateTrainedModelAssignmentActionListener( + Model model, + ActionListener listener + ) { + throw new IllegalStateException("cannot start model that uses an existing deployment"); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalModel.java index f312790ded655..642f6f144abc0 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalModel.java @@ -82,6 +82,10 @@ public abstract ActionListener getC ActionListener listener ); + public boolean usesExistingDeployment() { + return internalServiceSettings.getDeploymentId() != null; + } + @Override public ElasticsearchInternalServiceSettings getServiceSettings() { return (ElasticsearchInternalServiceSettings) super.getServiceSettings(); @@ -100,4 +104,8 @@ public void updateNumAllocation(Integer numAllocations) { public String toString() { return Strings.toString(this.getConfigurations()); } + + public String mlNodeDeploymentId() { + return internalServiceSettings.getDeploymentId() == null ? getInferenceEntityId() : internalServiceSettings.getDeploymentId(); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java index 8f7b9b79c28d7..4546280b39fe2 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java @@ -34,14 +34,20 @@ import org.elasticsearch.xpack.core.inference.results.SparseEmbeddingResults; import org.elasticsearch.xpack.core.ml.action.GetDeploymentStatsAction; import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction; +import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsStatsAction; import org.elasticsearch.xpack.core.ml.action.InferModelAction; import org.elasticsearch.xpack.core.ml.inference.assignment.AdaptiveAllocationsSettings; +import org.elasticsearch.xpack.core.ml.inference.assignment.AssignmentStats; import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults; import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResults; import org.elasticsearch.xpack.core.ml.inference.results.TextExpansionResults; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.EmptyConfigUpdate; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfig; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextEmbeddingConfig; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextEmbeddingConfigUpdate; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextExpansionConfig; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextExpansionConfigUpdate; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextSimilarityConfig; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextSimilarityConfigUpdate; import org.elasticsearch.xpack.inference.chunking.ChunkingSettingsBuilder; import org.elasticsearch.xpack.inference.chunking.EmbeddingRequestChunker; @@ -54,6 +60,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.Set; import java.util.function.Consumer; import java.util.function.Function; @@ -137,7 +144,12 @@ public void parseRequestConfig( throwIfNotEmptyMap(config, name()); String modelId = (String) serviceSettingsMap.get(ElasticsearchInternalServiceSettings.MODEL_ID); - if (modelId == null) { + String deploymentId = (String) serviceSettingsMap.get(ElasticsearchInternalServiceSettings.DEPLOYMENT_ID); + if (deploymentId != null) { + validateAgainstDeployment(modelId, deploymentId, taskType, modelListener.delegateFailureAndWrap((l, settings) -> { + l.onResponse(new ElasticDeployedModel(inferenceEntityId, taskType, NAME, settings.build(), chunkingSettings)); + })); + } else if (modelId == null) { if (OLD_ELSER_SERVICE_NAME.equals(serviceName)) { // TODO complete deprecation of null model ID // throw new ValidationException().addValidationError("Error parsing request config, model id is missing"); @@ -220,6 +232,8 @@ private void customElandCase( + "]. You may need to load it into the cluster using eland." ); } else { + throwIfUnsupportedTaskType(modelId, taskType, response.getResources().results().get(0).getInferenceConfig()); + var model = createCustomElandModel( inferenceEntityId, taskType, @@ -540,7 +554,7 @@ public void inferTextEmbedding( ActionListener listener ) { var request = buildInferenceRequest( - model.getConfigurations().getInferenceEntityId(), + model.mlNodeDeploymentId(), TextEmbeddingConfigUpdate.EMPTY_INSTANCE, inputs, inputType, @@ -565,13 +579,7 @@ public void inferSparseEmbedding( TimeValue timeout, ActionListener listener ) { - var request = buildInferenceRequest( - model.getConfigurations().getInferenceEntityId(), - TextExpansionConfigUpdate.EMPTY_UPDATE, - inputs, - inputType, - timeout - ); + var request = buildInferenceRequest(model.mlNodeDeploymentId(), TextExpansionConfigUpdate.EMPTY_UPDATE, inputs, inputType, timeout); ActionListener mlResultsListener = listener.delegateFailureAndWrap( (l, inferenceResult) -> l.onResponse(SparseEmbeddingResults.of(inferenceResult.getInferenceResults())) @@ -593,13 +601,7 @@ public void inferRerank( Map requestTaskSettings, ActionListener listener ) { - var request = buildInferenceRequest( - model.getConfigurations().getInferenceEntityId(), - new TextSimilarityConfigUpdate(query), - inputs, - inputType, - timeout - ); + var request = buildInferenceRequest(model.mlNodeDeploymentId(), new TextSimilarityConfigUpdate(query), inputs, inputType, timeout); var modelSettings = (CustomElandRerankTaskSettings) model.getTaskSettings(); var requestSettings = CustomElandRerankTaskSettings.fromMap(requestTaskSettings); @@ -668,7 +670,7 @@ public void chunkedInfer( for (var batch : batchedRequests) { var inferenceRequest = buildInferenceRequest( - model.getConfigurations().getInferenceEntityId(), + esModel.mlNodeDeploymentId(), EmptyConfigUpdate.INSTANCE, batch.batch().inputs(), inputType, @@ -895,4 +897,108 @@ static EmbeddingRequestChunker.EmbeddingType embeddingTypeFromTaskTypeAndSetting ); }; } + + private void validateAgainstDeployment( + String modelId, + String deploymentId, + TaskType taskType, + ActionListener listener + ) { + getDeployment(deploymentId, listener.delegateFailureAndWrap((l, response) -> { + if (response.isPresent()) { + if (modelId != null && modelId.equals(response.get().getModelId()) == false) { + listener.onFailure( + new ElasticsearchStatusException( + "Deployment [{}] uses model [{}] which does not match the model [{}] in the request.", + RestStatus.BAD_REQUEST, // TODO better message + deploymentId, + response.get().getModelId(), + modelId + ) + ); + return; + } + + var updatedSettings = new ElasticsearchInternalServiceSettings.Builder().setNumAllocations( + response.get().getNumberOfAllocations() + ) + .setNumThreads(response.get().getThreadsPerAllocation()) + .setAdaptiveAllocationsSettings(response.get().getAdaptiveAllocationsSettings()) + .setDeploymentId(deploymentId) + .setModelId(response.get().getModelId()); + + checkTaskTypeForMlNodeModel(response.get().getModelId(), taskType, l.delegateFailureAndWrap((l2, compatibleTaskType) -> { + l2.onResponse(updatedSettings); + })); + } + })); + } + + private void getDeployment(String deploymentId, ActionListener> listener) { + client.execute( + GetTrainedModelsStatsAction.INSTANCE, + new GetTrainedModelsStatsAction.Request(deploymentId), + listener.delegateFailureAndWrap((l, response) -> { + l.onResponse( + response.getResources() + .results() + .stream() + .filter(s -> s.getDeploymentStats() != null && s.getDeploymentStats().getDeploymentId().equals(deploymentId)) + .map(GetTrainedModelsStatsAction.Response.TrainedModelStats::getDeploymentStats) + .findFirst() + ); + }) + ); + } + + private void checkTaskTypeForMlNodeModel(String modelId, TaskType taskType, ActionListener listener) { + client.execute( + GetTrainedModelsAction.INSTANCE, + new GetTrainedModelsAction.Request(modelId), + listener.delegateFailureAndWrap((l, response) -> { + if (response.getResources().results().isEmpty()) { + l.onFailure(new IllegalStateException("this shouldn't happen")); + return; + } + + var inferenceConfig = response.getResources().results().get(0).getInferenceConfig(); + throwIfUnsupportedTaskType(modelId, taskType, inferenceConfig); + l.onResponse(Boolean.TRUE); + }) + ); + } + + static void throwIfUnsupportedTaskType(String modelId, TaskType taskType, InferenceConfig inferenceConfig) { + var deploymentTaskType = inferenceConfigToTaskType(inferenceConfig); + if (deploymentTaskType == null) { + throw new ElasticsearchStatusException( + "Deployed model [{}] has type [{}] which does not map to any supported task types", + RestStatus.BAD_REQUEST, + modelId, + inferenceConfig.getWriteableName() + ); + } + if (deploymentTaskType != taskType) { + throw new ElasticsearchStatusException( + "Deployed model [{}] with type [{}] does not match the requested task type [{}]", + RestStatus.BAD_REQUEST, + modelId, + inferenceConfig.getWriteableName(), + taskType + ); + } + + } + + static TaskType inferenceConfigToTaskType(InferenceConfig config) { + if (config instanceof TextExpansionConfig) { + return TaskType.SPARSE_EMBEDDING; + } else if (config instanceof TextEmbeddingConfig) { + return TaskType.TEXT_EMBEDDING; + } else if (config instanceof TextSimilarityConfig) { + return TaskType.RERANK; + } else { + return null; + } + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettings.java index 68db964e86b10..5bd8d8cfc5c13 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettings.java @@ -36,12 +36,14 @@ public class ElasticsearchInternalServiceSettings implements ServiceSettings { public static final String NUM_ALLOCATIONS = "num_allocations"; public static final String NUM_THREADS = "num_threads"; public static final String MODEL_ID = "model_id"; + public static final String DEPLOYMENT_ID = "deployment_id"; public static final String ADAPTIVE_ALLOCATIONS = "adaptive_allocations"; private final Integer numAllocations; private final int numThreads; private final String modelId; private final AdaptiveAllocationsSettings adaptiveAllocationsSettings; + private final String deploymentId; public static ElasticsearchInternalServiceSettings fromPersistedMap(Map map) { return fromRequestMap(map).build(); @@ -95,12 +97,15 @@ protected static ElasticsearchInternalServiceSettings.Builder fromMap( ); } + String deploymentId = extractOptionalString(map, DEPLOYMENT_ID, ModelConfigurations.SERVICE_SETTINGS, validationException); + // if an error occurred while parsing, we'll set these to an invalid value, so we don't accidentally get a // null pointer when doing unboxing return new ElasticsearchInternalServiceSettings.Builder().setNumAllocations(numAllocations) .setNumThreads(Objects.requireNonNullElse(numThreads, FAILED_INT_PARSE_VALUE)) .setModelId(modelId) - .setAdaptiveAllocationsSettings(adaptiveAllocationsSettings); + .setAdaptiveAllocationsSettings(adaptiveAllocationsSettings) + .setDeploymentId(deploymentId); } public ElasticsearchInternalServiceSettings( @@ -113,6 +118,21 @@ public ElasticsearchInternalServiceSettings( this.numThreads = numThreads; this.modelId = Objects.requireNonNull(modelId); this.adaptiveAllocationsSettings = adaptiveAllocationsSettings; + this.deploymentId = null; + } + + public ElasticsearchInternalServiceSettings( + Integer numAllocations, + int numThreads, + String modelId, + AdaptiveAllocationsSettings adaptiveAllocationsSettings, + String deploymentId + ) { + this.numAllocations = numAllocations; + this.numThreads = numThreads; + this.modelId = Objects.requireNonNull(modelId); + this.adaptiveAllocationsSettings = adaptiveAllocationsSettings; + this.deploymentId = deploymentId; } protected ElasticsearchInternalServiceSettings(ElasticsearchInternalServiceSettings other) { @@ -120,6 +140,7 @@ protected ElasticsearchInternalServiceSettings(ElasticsearchInternalServiceSetti this.numThreads = other.numThreads; this.modelId = other.modelId; this.adaptiveAllocationsSettings = other.adaptiveAllocationsSettings; + this.deploymentId = other.deploymentId; } /** @@ -132,6 +153,7 @@ public ElasticsearchInternalServiceSettings(ElasticsearchInternalServiceSettings this.numThreads = other.numThreads; this.modelId = other.modelId; this.adaptiveAllocationsSettings = other.adaptiveAllocationsSettings; + this.deploymentId = other.deploymentId; } public ElasticsearchInternalServiceSettings(StreamInput in) throws IOException { @@ -145,6 +167,9 @@ public ElasticsearchInternalServiceSettings(StreamInput in) throws IOException { this.adaptiveAllocationsSettings = in.getTransportVersion().onOrAfter(TransportVersions.INFERENCE_ADAPTIVE_ALLOCATIONS) ? in.readOptionalWriteable(AdaptiveAllocationsSettings::new) : null; + this.deploymentId = in.getTransportVersion().onOrAfter(TransportVersions.ML_INFERENCE_ATTACH_TO_EXISTSING_DEPLOYMENT) + ? in.readOptionalString() + : null; } @Override @@ -159,6 +184,9 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.INFERENCE_ADAPTIVE_ALLOCATIONS)) { out.writeOptionalWriteable(getAdaptiveAllocationsSettings()); } + if (out.getTransportVersion().onOrAfter(TransportVersions.ML_INFERENCE_ATTACH_TO_EXISTSING_DEPLOYMENT)) { + out.writeOptionalString(deploymentId); + } } @Override @@ -182,6 +210,10 @@ public AdaptiveAllocationsSettings getAdaptiveAllocationsSettings() { return adaptiveAllocationsSettings; } + public String getDeploymentId() { + return deploymentId; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); @@ -199,6 +231,9 @@ protected void addInternalSettingsToXContent(XContentBuilder builder, Params par if (adaptiveAllocationsSettings != null) { builder.field(ADAPTIVE_ALLOCATIONS, adaptiveAllocationsSettings); } + if (deploymentId != null) { + builder.field(DEPLOYMENT_ID, deploymentId); + } } @Override @@ -221,9 +256,10 @@ public static class Builder { private int numThreads; private String modelId; private AdaptiveAllocationsSettings adaptiveAllocationsSettings; + private String deploymentId; public ElasticsearchInternalServiceSettings build() { - return new ElasticsearchInternalServiceSettings(numAllocations, numThreads, modelId, adaptiveAllocationsSettings); + return new ElasticsearchInternalServiceSettings(numAllocations, numThreads, modelId, adaptiveAllocationsSettings, deploymentId); } public Builder setNumAllocations(Integer numAllocations) { @@ -241,6 +277,11 @@ public Builder setModelId(String modelId) { return this; } + public Builder setDeploymentId(String deploymentId) { + this.deploymentId = deploymentId; + return this; + } + public Builder setAdaptiveAllocationsSettings(AdaptiveAllocationsSettings adaptiveAllocationsSettings) { this.adaptiveAllocationsSettings = adaptiveAllocationsSettings; return this; @@ -270,11 +311,12 @@ public boolean equals(Object o) { return Objects.equals(numAllocations, that.numAllocations) && numThreads == that.numThreads && Objects.equals(modelId, that.modelId) - && Objects.equals(adaptiveAllocationsSettings, that.adaptiveAllocationsSettings); + && Objects.equals(adaptiveAllocationsSettings, that.adaptiveAllocationsSettings) + && Objects.equals(deploymentId, that.deploymentId); } @Override public int hashCode() { - return Objects.hash(numAllocations, numThreads, modelId, adaptiveAllocationsSettings); + return Objects.hash(numAllocations, numThreads, modelId, adaptiveAllocationsSettings, deploymentId); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettingsTests.java index 419db748d793d..0db0a7669c8aa 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettingsTests.java @@ -22,11 +22,18 @@ public class ElasticsearchInternalServiceSettingsTests extends AbstractWireSeria public static ElasticsearchInternalServiceSettings validInstance(String modelId) { boolean useAdaptive = randomBoolean(); + var deploymentId = randomBoolean() ? null : randomAlphaOfLength(5); if (useAdaptive) { var adaptive = new AdaptiveAllocationsSettings(true, 1, randomIntBetween(2, 8)); - return new ElasticsearchInternalServiceSettings(randomBoolean() ? 1 : null, randomIntBetween(1, 16), modelId, adaptive); + return new ElasticsearchInternalServiceSettings( + randomBoolean() ? 1 : null, + randomIntBetween(1, 16), + modelId, + adaptive, + deploymentId + ); } else { - return new ElasticsearchInternalServiceSettings(randomIntBetween(1, 10), randomIntBetween(1, 16), modelId, null); + return new ElasticsearchInternalServiceSettings(randomIntBetween(1, 10), randomIntBetween(1, 16), modelId, null, deploymentId); } } @@ -48,7 +55,8 @@ protected ElasticsearchInternalServiceSettings mutateInstance(ElasticsearchInter instance.getNumAllocations() == null ? 1 : instance.getNumAllocations() + 1, instance.getNumThreads(), instance.modelId(), - instance.getAdaptiveAllocationsSettings() + instance.getAdaptiveAllocationsSettings(), + instance.getDeploymentId() ) ); case 1 -> new ElserInternalServiceSettings( @@ -56,7 +64,8 @@ protected ElasticsearchInternalServiceSettings mutateInstance(ElasticsearchInter instance.getNumAllocations(), instance.getNumThreads() + 1, instance.modelId(), - instance.getAdaptiveAllocationsSettings() + instance.getAdaptiveAllocationsSettings(), + instance.getDeploymentId() ) ); case 2 -> new ElserInternalServiceSettings( @@ -64,7 +73,8 @@ protected ElasticsearchInternalServiceSettings mutateInstance(ElasticsearchInter instance.getNumAllocations(), instance.getNumThreads(), instance.modelId() + "-bar", - instance.getAdaptiveAllocationsSettings() + instance.getAdaptiveAllocationsSettings(), + instance.getDeploymentId() ) ); default -> throw new IllegalStateException(); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java index 860642a23fb2c..b82b8a08f2175 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java @@ -52,7 +52,10 @@ import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResultsTests; import org.elasticsearch.xpack.core.ml.inference.results.TextExpansionResults; import org.elasticsearch.xpack.core.ml.inference.results.TextExpansionResultsTests; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextEmbeddingConfig; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextEmbeddingConfigUpdate; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextExpansionConfig; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextSimilarityConfig; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TokenizationConfigUpdate; import org.elasticsearch.xpack.inference.InferencePlugin; import org.elasticsearch.xpack.inference.chunking.ChunkingSettingsTests; @@ -564,9 +567,9 @@ public void testParseRequestConfig_Rerank() { var client = mock(Client.class); doAnswer(invocation -> { var listener = (ActionListener) invocation.getArguments()[2]; - listener.onResponse( - new GetTrainedModelsAction.Response(new QueryPage<>(List.of(mock(TrainedModelConfig.class)), 1, mock(ParseField.class))) - ); + var modelConfig = mock(TrainedModelConfig.class); + when(modelConfig.getInferenceConfig()).thenReturn(mock(TextSimilarityConfig.class)); + listener.onResponse(new GetTrainedModelsAction.Response(new QueryPage<>(List.of(modelConfig), 1, mock(ParseField.class)))); return null; }).when(client).execute(Mockito.same(GetTrainedModelsAction.INSTANCE), any(), any()); @@ -611,9 +614,9 @@ public void testParseRequestConfig_Rerank_DefaultTaskSettings() { var client = mock(Client.class); doAnswer(invocation -> { var listener = (ActionListener) invocation.getArguments()[2]; - listener.onResponse( - new GetTrainedModelsAction.Response(new QueryPage<>(List.of(mock(TrainedModelConfig.class)), 1, mock(ParseField.class))) - ); + var modelConfig = mock(TrainedModelConfig.class); + when(modelConfig.getInferenceConfig()).thenReturn(mock(TextSimilarityConfig.class)); + listener.onResponse(new GetTrainedModelsAction.Response(new QueryPage<>(List.of(modelConfig), 1, mock(ParseField.class)))); return null; }).when(client).execute(Mockito.same(GetTrainedModelsAction.INSTANCE), any(), any()); @@ -710,9 +713,9 @@ private void testParseRequestConfig_SparseEmbedding( var client = mock(Client.class); doAnswer(invocation -> { var listener = (ActionListener) invocation.getArguments()[2]; - listener.onResponse( - new GetTrainedModelsAction.Response(new QueryPage<>(List.of(mock(TrainedModelConfig.class)), 1, mock(ParseField.class))) - ); + var modelConfig = mock(TrainedModelConfig.class); + when(modelConfig.getInferenceConfig()).thenReturn(mock(TextExpansionConfig.class)); + listener.onResponse(new GetTrainedModelsAction.Response(new QueryPage<>(List.of(modelConfig), 1, mock(ParseField.class)))); return null; }).when(client).execute(Mockito.same(GetTrainedModelsAction.INSTANCE), any(), any()); @@ -1303,14 +1306,20 @@ public void testParsePersistedConfig_Rerank() { } public void testParseRequestConfigEland_PreservesTaskType() { + var taskType = randomFrom(EnumSet.of(TaskType.RERANK, TaskType.TEXT_EMBEDDING, TaskType.SPARSE_EMBEDDING)); + var modelConfig = mock(TrainedModelConfig.class); + switch (taskType) { + case RERANK -> when(modelConfig.getInferenceConfig()).thenReturn(mock(TextSimilarityConfig.class)); + case SPARSE_EMBEDDING -> when(modelConfig.getInferenceConfig()).thenReturn(mock(TextExpansionConfig.class)); + case TEXT_EMBEDDING -> when(modelConfig.getInferenceConfig()).thenReturn(mock(TextEmbeddingConfig.class)); + } + var client = mock(Client.class); doAnswer(invocationOnMock -> { @SuppressWarnings("unchecked") ActionListener listener = (ActionListener) invocationOnMock .getArguments()[2]; - listener.onResponse( - new GetTrainedModelsAction.Response(new QueryPage<>(List.of(mock(TrainedModelConfig.class)), 1, mock(ParseField.class))) - ); + listener.onResponse(new GetTrainedModelsAction.Response(new QueryPage<>(List.of(modelConfig), 1, mock(ParseField.class)))); return Void.TYPE; }).when(client).execute(eq(GetTrainedModelsAction.INSTANCE), any(), any()); when(client.threadPool()).thenReturn(threadPool); @@ -1331,7 +1340,6 @@ public void testParseRequestConfigEland_PreservesTaskType() { ) ); - var taskType = randomFrom(EnumSet.of(TaskType.RERANK, TaskType.TEXT_EMBEDDING, TaskType.SPARSE_EMBEDDING)); CustomElandModel expectedModel = getCustomElandModel(taskType); PlainActionFuture listener = new PlainActionFuture<>(); From 403f1e1472d042a7e0aaf9f7d56958b9ca0d6115 Mon Sep 17 00:00:00 2001 From: Craig Taverner Date: Tue, 15 Oct 2024 20:33:08 +0200 Subject: [PATCH 120/449] Support multi-valued fields in compute engine for ST_DISTANCE (#114836) In #112063 we added support for multivalued fields to the compute engine for ST_INTERSECTS and relatives, but not for ST_DISTANCE. In #114729 it was discovered that, at least for the common case of a field and a constant, this support was not needed due to ST_DISTANCE being re-written to ST_INTERSECTS. However, for many other cases, like ST_DISTANCE used on the coordinator node, or between two fields, this lack of support would result in null values. This PR fixes those cases, making sure ST_DISTANCE uses the Block-Builder approach similar to what was done for ST_INTERSECTS et al. --- docs/changelog/114836.yaml | 6 + .../resources/mapping-multivalue_points.json | 6 + .../src/main/resources/multivalue_points.csv | 28 +-- .../main/resources/multivalue_points.csv-spec | 86 +++++++++ .../src/main/resources/spatial.csv-spec | 11 +- ...ianPointDocValuesAndConstantEvaluator.java | 67 +++---- ...esianPointDocValuesAndSourceEvaluator.java | 89 +++------- ...ceCartesianSourceAndConstantEvaluator.java | 73 +++----- ...anceCartesianSourceAndSourceEvaluator.java | 96 +++-------- ...GeoPointDocValuesAndConstantEvaluator.java | 67 +++---- ...ceGeoPointDocValuesAndSourceEvaluator.java | 92 +++------- ...DistanceGeoSourceAndConstantEvaluator.java | 73 +++----- ...StDistanceGeoSourceAndSourceEvaluator.java | 99 +++-------- .../xpack/esql/action/EsqlCapabilities.java | 5 + .../function/scalar/spatial/StDistance.java | 163 ++++++++++++++---- .../local/EnableSpatialDistancePushdown.java | 2 +- .../BinarySpatialFunctionTestCase.java | 11 +- 17 files changed, 455 insertions(+), 519 deletions(-) create mode 100644 docs/changelog/114836.yaml diff --git a/docs/changelog/114836.yaml b/docs/changelog/114836.yaml new file mode 100644 index 0000000000000..6f21d3bfb9327 --- /dev/null +++ b/docs/changelog/114836.yaml @@ -0,0 +1,6 @@ +pr: 114836 +summary: Support multi-valued fields in compute engine for ST_DISTANCE +area: ES|QL +type: enhancement +issues: + - 112910 diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-multivalue_points.json b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-multivalue_points.json index 98a3794d977e2..cd572012c6b3a 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-multivalue_points.json +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-multivalue_points.json @@ -12,11 +12,17 @@ "centroid": { "type": "geo_point" }, + "lk": { + "type": "keyword" + }, "location": { "type": "geo_point" }, "subset": { "type": "geo_point" + }, + "disjoint": { + "type": "geo_point" } } } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/multivalue_points.csv b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/multivalue_points.csv index c5d7c7f4ee305..efb0aaaa29a4b 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/multivalue_points.csv +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/multivalue_points.csv @@ -1,14 +1,14 @@ -id:l, intersects:boolean, within:boolean, centroid:geo_point, location:geo_point, subset:geo_point -0, true, true, "POINT(0.0 5.0)", ["POINT(5 5)", "POINT(-5 5)"], "POINT(5 5)" -1, true, true, "POINT(0.5 0.5)", ["POINT(0 1)","POINT(1 0)"], "POINT(0 1)" -2, true, true, "POINT(9.0 9.0)", "POINT(9 9)", "POINT(9 9)" -3, true, true, "POINT(0.0 0.0)", ["POINT(-9 -9)","POINT(9 9)"], "POINT(-9 -9)" -4, true, false, "POINT(10.0 10.0)", ["POINT(5 5)", "POINT(15 15)"], "POINT(5 5)" -5, true, false, "POINT(5.5 5.5)", ["POINT(0 0)","POINT(11 11)"], "POINT(0 0)" -6, true, false, "POINT(0.0 -5.0)", ["POINT(-9 -19)","POINT(9 9)"], "POINT(-9 -19)" -7, false, false, "POINT(10.0 10.0)", ["POINT(5 15)", "POINT(15 5)"], "POINT(5 15)" -8, false, false, "POINT(5.5 5.5)", ["POINT(0 11)","POINT(11 0)"], "POINT(0 11)" -9, false, false, "POINT(19.0 9.0)", "POINT(19 9)", "POINT(19 9)" -10, false, false, "POINT(5.0 -5.0)", ["POINT(-9 -19)","POINT(19 9)"], "POINT(-9 -19)" -11, true, false, "POINT(0.0 0.0)", ["POINT(0 0)", "POINT(55 55)", "POINT(-55 -55)"], ["POINT(0 0)", "POINT(55 55)"] -12, true, false, "POINT(0.0 0.0)", ["POINT(0 0)", "POINT(55 55)", "POINT(-55 -55)"], ["POINT(0 0)", "POINT(55 54)"] +id:l, intersects:boolean, within:boolean, centroid:geo_point, lk:keyword, location:geo_point, subset:geo_point, disjoint:geo_point +0, true, true, "POINT(0.0 5.0)", ["POINT(5 5)", "POINT(-5 5)"], ["POINT(5 5)", "POINT(-5 5)"], "POINT(5 5)", ["POINT(55 55)", "POINT(65 65)"] +1, true, true, "POINT(0.5 0.5)", ["POINT(0 1)","POINT(1 0)"], ["POINT(0 1)","POINT(1 0)"], "POINT(0 1)", ["POINT(55 55)", "POINT(65 65)"] +2, true, true, "POINT(9.0 9.0)", "POINT(9 9)", "POINT(9 9)", "POINT(9 9)", ["POINT(55 55)", "POINT(65 65)"] +3, true, true, "POINT(0.0 0.0)", ["POINT(-9 -9)","POINT(9 9)"], ["POINT(-9 -9)","POINT(9 9)"], "POINT(-9 -9)", ["POINT(55 55)", "POINT(65 65)"] +4, true, false, "POINT(10.0 10.0)", ["POINT(5 5)", "POINT(15 15)"], ["POINT(5 5)", "POINT(15 15)"], "POINT(5 5)", ["POINT(55 55)", "POINT(65 65)"] +5, true, false, "POINT(5.5 5.5)", ["POINT(0 0)","POINT(11 11)"], ["POINT(0 0)","POINT(11 11)"], "POINT(0 0)", ["POINT(55 55)", "POINT(65 65)"] +6, true, false, "POINT(0.0 -5.0)", ["POINT(-9 -19)","POINT(9 9)"], ["POINT(-9 -19)","POINT(9 9)"], "POINT(-9 -19)", ["POINT(55 55)", "POINT(65 65)"] +7, false, false, "POINT(10.0 10.0)", ["POINT(5 15)", "POINT(15 5)"], ["POINT(5 15)", "POINT(15 5)"], "POINT(5 15)", ["POINT(55 55)", "POINT(65 65)"] +8, false, false, "POINT(5.5 5.5)", ["POINT(0 11)","POINT(11 0)"], ["POINT(0 11)","POINT(11 0)"], "POINT(0 11)", ["POINT(55 55)", "POINT(65 65)"] +9, false, false, "POINT(19.0 9.0)", "POINT(19 9)", "POINT(19 9)", "POINT(19 9)", ["POINT(55 55)", "POINT(65 65)"] +10, false, false, "POINT(5.0 -5.0)", ["POINT(-9 -19)","POINT(19 9)"], ["POINT(-9 -19)","POINT(19 9)"], "POINT(-9 -19)", ["POINT(55 55)", "POINT(65 65)"] +11, true, false, "POINT(0.0 0.0)", ["POINT(0 0)", "POINT(55 55)", "POINT(-55 -55)"], ["POINT(0 0)", "POINT(55 55)", "POINT(-55 -55)"], ["POINT(0 0)", "POINT(55 55)"], ["POINT(55 55)", "POINT(65 65)"] +12, true, false, "POINT(0.0 0.0)", ["POINT(0 0)", "POINT(55 55)", "POINT(-55 -55)"], ["POINT(0 0)", "POINT(55 55)", "POINT(-55 -55)"], ["POINT(0 0)", "POINT(55 54)"], ["POINT(55 55)", "POINT(65 65)"] diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/multivalue_points.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/multivalue_points.csv-spec index 6a3521c558fa9..ed9434e9241fc 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/multivalue_points.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/multivalue_points.csv-spec @@ -60,6 +60,92 @@ id:l | intersects:boolean | within:boolean | st_intersects:boolean | st_within:b 12 | true | false | true | false | false | "POINT(0.0 0.0)" | ["POINT(0 0)", "POINT(55 55)", "POINT(-55 -55)"] ; +spatialDistance +required_capability: st_distance +required_capability: spatial_distance_supports_multivalues + +FROM multivalue_points +| EVAL distance_origin=ST_DISTANCE(location, TO_GEOPOINT("POINT(0 0)")) +| EVAL distance_subset=ST_DISTANCE(location, subset) +| EVAL distance_disjoint=ST_DISTANCE(location, disjoint) +| EVAL distance_far=ST_DISTANCE(location, TO_GEOPOINT("POINT(55 55)")) +| KEEP id, distance_origin, distance_subset, distance_disjoint, distance_far, location, subset, disjoint +| SORT id +; + +id:l | distance_origin:d | distance_subset:d | distance_disjoint:d | distance_far:d | location:geo_point | subset:geo_point | disjoint:geo_point +0 | 785768.2986429982 | 0.0 | 7114305.127148048 | 7114305.127148048 | ["POINT(5 5)", "POINT(-5 5)"] | "POINT(5 5)" | ["POINT(55 55)", "POINT(65 65)"] +1 | 111195.07310665186 | 0.0 | 7775410.295475619 | 7775410.295475619 | ["POINT(0 1)","POINT(1 0)"] | "POINT(0 1)" | ["POINT(55 55)", "POINT(65 65)"] +2 | 1412359.654429245 | 0.0 | 6511042.96320646 | 6511042.96320646 | "POINT(9 9)" | "POINT(9 9)" | ["POINT(55 55)", "POINT(65 65)"] +3 | 1412359.654429245 | 0.0 | 6511042.96320646 | 6511042.96320646 | ["POINT(-9 -9)","POINT(9 9)"] | "POINT(-9 -9)" | ["POINT(55 55)", "POINT(65 65)"] +4 | 785768.2986429982 | 0.0 | 5612483.563947934 | 5612483.563947934 | ["POINT(5 5)", "POINT(15 15)"] | "POINT(5 5)" | ["POINT(55 55)", "POINT(65 65)"] +5 | 0.0 | 0.0 | 6210584.082742179 | 6210584.082742179 | ["POINT(0 0)","POINT(11 11)"] | "POINT(0 0)" | ["POINT(55 55)", "POINT(65 65)"] +6 | 1412359.654429245 | 0.0 | 6511042.96320646 | 6511042.96320646 | ["POINT(-9 -19)","POINT(9 9)"] | "POINT(-9 -19)" | ["POINT(55 55)", "POINT(65 65)"] +7 | 1756128.8127267 | 0.0 | 6157935.487780502 | 6157935.487780502 | ["POINT(5 15)", "POINT(15 5)"] | "POINT(5 15)" | ["POINT(55 55)", "POINT(65 65)"] +8 | 1223145.8694147274 | 0.0 | 6823348.604540896 | 6823348.604540896 | ["POINT(0 11)","POINT(11 0)"] | "POINT(0 11)" | ["POINT(55 55)", "POINT(65 65)"] +9 | 2329806.5462053656 | 0.0 | 6014935.534393433 | 6014935.534393433 | "POINT(19 9)" | "POINT(19 9)" | ["POINT(55 55)", "POINT(65 65)"] +10 | 2329806.5462053656 | 0.0 | 6014935.534393433 | 6014935.534393433 | ["POINT(-9 -19)","POINT(19 9)"] | "POINT(-9 -19)" | ["POINT(55 55)", "POINT(65 65)"] +11 | 0.0 | 0.0 | 0.0 | 0.0 | ["POINT(0 0)", "POINT(55 55)", "POINT(-55 -55)"] | ["POINT(0 0)", "POINT(55 55)"] | ["POINT(55 55)", "POINT(65 65)"] +12 | 0.0 | 0.0 | 0.0 | 0.0 | ["POINT(0 0)", "POINT(55 55)", "POINT(-55 -55)"] | ["POINT(0 0)", "POINT(55 54)"] | ["POINT(55 55)", "POINT(65 65)"] +; + +whereSpatialDistanceTwoFields +required_capability: st_distance +required_capability: spatial_distance_supports_multivalues + +FROM multivalue_points +| WHERE ST_DISTANCE(location, disjoint) < 6200000 +| KEEP id, location +| SORT id +; + +id:l | location:geo_point +4 | ["POINT(5 5)", "POINT(15 15)"] +7 | ["POINT(5 15)", "POINT(15 5)"] +9 | "POINT(19 9)" +10 | ["POINT(-9 -19)","POINT(19 9)"] +11 | ["POINT(0 0)", "POINT(55 55)", "POINT(-55 -55)"] +12 | ["POINT(0 0)", "POINT(55 55)", "POINT(-55 -55)"] +; + +whereSpatialDistanceKeyword +required_capability: st_distance +required_capability: spatial_distance_supports_multivalues + +FROM multivalue_points +| EVAL lk=lk::geo_point +| WHERE ST_DISTANCE(lk, disjoint) < 6200000 +| KEEP id, location +| SORT id +; + +id:l | location:geo_point +4 | ["POINT(5 5)", "POINT(15 15)"] +7 | ["POINT(5 15)", "POINT(15 5)"] +9 | "POINT(19 9)" +10 | ["POINT(-9 -19)","POINT(19 9)"] +11 | ["POINT(0 0)", "POINT(55 55)", "POINT(-55 -55)"] +12 | ["POINT(0 0)", "POINT(55 55)", "POINT(-55 -55)"] +; + +spatialDistanceFilterTwoFieldStats +required_capability: st_distance +required_capability: spatial_distance_supports_multivalues + +FROM multivalue_points +| EVAL distance_disjoint=ST_DISTANCE(location, disjoint) +| WHERE distance_disjoint > 0 +| EVAL kkm = TO_INTEGER(distance_disjoint / 1000000) +| STATS c=COUNT(*) BY kkm +| SORT kkm ASC +; + +c:long | kkm:integer +5 | 6 +5 | 7 +1 | 8 +; + whereIntersectsPolygon required_capability: st_intersects diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec index b72c8bcb05ae9..c5ca405005447 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec @@ -2222,17 +2222,19 @@ wkt:keyword | pt:cartesian_point | distance:double twoCitiesPointDistanceCartesian required_capability: st_distance +required_capability: spatial_distance_supports_multivalues ROW p1 = TO_CARTESIANPOINT("POINT(-90.82814 29.79511)"), p2 = TO_CARTESIANPOINT("POINT(-90.79731509999999 29.8835389)") | EVAL d = ST_DISTANCE(p1, p2) ; p1:cartesian_point | p2:cartesian_point | d:double -POINT (-90.82814 29.79511) | POINT (-90.79731509999999 29.8835389) | 0.09364744959271905 +POINT (-90.82814 29.79511) | POINT (-90.79731509999999 29.8835389) | 0.09364636296011444 ; airportCartesianCityLocationPointDistance required_capability: st_distance +required_capability: spatial_distance_supports_multivalues FROM airports_web | EVAL distance = ST_DISTANCE(location, TO_CARTESIANPOINT("POINT(1402900 7490000)")) @@ -2240,12 +2242,13 @@ FROM airports_web | STATS distance=AVG(distance), min=min(distance), max=max(distance), count=COUNT() ; -distance:double | min:double | max:double | count:long -676858.3463435044 | 7358.02077507206 | 971112.9731194031 | 12 +distance:double | min:double | max:double | count:long +676858.3629952326 | 7358.012830411482 | 971113.1663946278 | 12 ; airportCartesianDistanceToCityCopenhagen required_capability: st_distance +required_capability: spatial_distance_supports_multivalues // tag::st_distance-airports_web[] FROM airports_web @@ -2257,7 +2260,7 @@ FROM airports_web // tag::st_distance-airports_web-result[] abbrev:k | name:text | location:cartesian_point | distance:d -CPH | Copenhagen | POINT(1408119.2975413958 7484813.53657096) | 7358.02077507206 +CPH | Copenhagen | POINT(1408119.2975413958 7484813.53657096) | 7358.012830411482 // end::st_distance-airports_web-result[] ; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianPointDocValuesAndConstantEvaluator.java index a3fc2fadbf227..4917b71464dce 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianPointDocValuesAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianPointDocValuesAndConstantEvaluator.java @@ -10,7 +10,6 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; @@ -26,63 +25,42 @@ public final class StDistanceCartesianPointDocValuesAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; - private final EvalOperator.ExpressionEvaluator leftValue; + private final EvalOperator.ExpressionEvaluator left; - private final Point rightValue; + private final Point right; private final DriverContext driverContext; private Warnings warnings; public StDistanceCartesianPointDocValuesAndConstantEvaluator(Source source, - EvalOperator.ExpressionEvaluator leftValue, Point rightValue, DriverContext driverContext) { + EvalOperator.ExpressionEvaluator left, Point right, DriverContext driverContext) { this.source = source; - this.leftValue = leftValue; - this.rightValue = rightValue; + this.left = left; + this.right = right; this.driverContext = driverContext; } @Override public Block eval(Page page) { - try (LongBlock leftValueBlock = (LongBlock) leftValue.eval(page)) { - LongVector leftValueVector = leftValueBlock.asVector(); - if (leftValueVector == null) { - return eval(page.getPositionCount(), leftValueBlock); - } - return eval(page.getPositionCount(), leftValueVector); + try (LongBlock leftBlock = (LongBlock) left.eval(page)) { + return eval(page.getPositionCount(), leftBlock); } } - public DoubleBlock eval(int positionCount, LongBlock leftValueBlock) { + public DoubleBlock eval(int positionCount, LongBlock leftBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (leftValueBlock.isNull(p)) { - result.appendNull(); - continue position; + boolean allBlocksAreNulls = true; + if (!leftBlock.isNull(p)) { + allBlocksAreNulls = false; } - if (leftValueBlock.getValueCount(p) != 1) { - if (leftValueBlock.getValueCount(p) > 1) { - warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); - } + if (allBlocksAreNulls) { result.appendNull(); continue position; } try { - result.appendDouble(StDistance.processCartesianPointDocValuesAndConstant(leftValueBlock.getLong(leftValueBlock.getFirstValueIndex(p)), this.rightValue)); - } catch (IllegalArgumentException e) { - warnings().registerException(e); - result.appendNull(); - } - } - return result.build(); - } - } - - public DoubleBlock eval(int positionCount, LongVector leftValueVector) { - try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { - position: for (int p = 0; p < positionCount; p++) { - try { - result.appendDouble(StDistance.processCartesianPointDocValuesAndConstant(leftValueVector.getLong(p), this.rightValue)); + StDistance.processCartesianPointDocValuesAndConstant(result, p, leftBlock, this.right); } catch (IllegalArgumentException e) { warnings().registerException(e); result.appendNull(); @@ -94,12 +72,12 @@ public DoubleBlock eval(int positionCount, LongVector leftValueVector) { @Override public String toString() { - return "StDistanceCartesianPointDocValuesAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + return "StDistanceCartesianPointDocValuesAndConstantEvaluator[" + "left=" + left + ", right=" + right + "]"; } @Override public void close() { - Releasables.closeExpectNoException(leftValue); + Releasables.closeExpectNoException(left); } private Warnings warnings() { @@ -117,25 +95,24 @@ private Warnings warnings() { static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; - private final EvalOperator.ExpressionEvaluator.Factory leftValue; + private final EvalOperator.ExpressionEvaluator.Factory left; - private final Point rightValue; + private final Point right; - public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, - Point rightValue) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory left, Point right) { this.source = source; - this.leftValue = leftValue; - this.rightValue = rightValue; + this.left = left; + this.right = right; } @Override public StDistanceCartesianPointDocValuesAndConstantEvaluator get(DriverContext context) { - return new StDistanceCartesianPointDocValuesAndConstantEvaluator(source, leftValue.get(context), rightValue, context); + return new StDistanceCartesianPointDocValuesAndConstantEvaluator(source, left.get(context), right, context); } @Override public String toString() { - return "StDistanceCartesianPointDocValuesAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + return "StDistanceCartesianPointDocValuesAndConstantEvaluator[" + "left=" + left + ", right=" + right + "]"; } } } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianPointDocValuesAndSourceEvaluator.java index 1da164436a2cb..03c8ba1a04ab6 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianPointDocValuesAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianPointDocValuesAndSourceEvaluator.java @@ -4,17 +4,12 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; -import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; -import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.DoubleBlock; -import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; @@ -29,79 +24,47 @@ public final class StDistanceCartesianPointDocValuesAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; - private final EvalOperator.ExpressionEvaluator leftValue; + private final EvalOperator.ExpressionEvaluator left; - private final EvalOperator.ExpressionEvaluator rightValue; + private final EvalOperator.ExpressionEvaluator right; private final DriverContext driverContext; private Warnings warnings; public StDistanceCartesianPointDocValuesAndSourceEvaluator(Source source, - EvalOperator.ExpressionEvaluator leftValue, EvalOperator.ExpressionEvaluator rightValue, + EvalOperator.ExpressionEvaluator left, EvalOperator.ExpressionEvaluator right, DriverContext driverContext) { this.source = source; - this.leftValue = leftValue; - this.rightValue = rightValue; + this.left = left; + this.right = right; this.driverContext = driverContext; } @Override public Block eval(Page page) { - try (LongBlock leftValueBlock = (LongBlock) leftValue.eval(page)) { - try (BytesRefBlock rightValueBlock = (BytesRefBlock) rightValue.eval(page)) { - LongVector leftValueVector = leftValueBlock.asVector(); - if (leftValueVector == null) { - return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); - } - BytesRefVector rightValueVector = rightValueBlock.asVector(); - if (rightValueVector == null) { - return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); - } - return eval(page.getPositionCount(), leftValueVector, rightValueVector).asBlock(); + try (LongBlock leftBlock = (LongBlock) left.eval(page)) { + try (BytesRefBlock rightBlock = (BytesRefBlock) right.eval(page)) { + return eval(page.getPositionCount(), leftBlock, rightBlock); } } } - public DoubleBlock eval(int positionCount, LongBlock leftValueBlock, - BytesRefBlock rightValueBlock) { + public DoubleBlock eval(int positionCount, LongBlock leftBlock, BytesRefBlock rightBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { - BytesRef rightValueScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - if (leftValueBlock.isNull(p)) { - result.appendNull(); - continue position; + boolean allBlocksAreNulls = true; + if (!leftBlock.isNull(p)) { + allBlocksAreNulls = false; } - if (leftValueBlock.getValueCount(p) != 1) { - if (leftValueBlock.getValueCount(p) > 1) { - warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); - } - result.appendNull(); - continue position; + if (!rightBlock.isNull(p)) { + allBlocksAreNulls = false; } - if (rightValueBlock.isNull(p)) { + if (allBlocksAreNulls) { result.appendNull(); continue position; } - if (rightValueBlock.getValueCount(p) != 1) { - if (rightValueBlock.getValueCount(p) > 1) { - warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); - } - result.appendNull(); - continue position; - } - result.appendDouble(StDistance.processCartesianPointDocValuesAndSource(leftValueBlock.getLong(leftValueBlock.getFirstValueIndex(p)), rightValueBlock.getBytesRef(rightValueBlock.getFirstValueIndex(p), rightValueScratch))); - } - return result.build(); - } - } - - public DoubleVector eval(int positionCount, LongVector leftValueVector, - BytesRefVector rightValueVector) { - try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { - BytesRef rightValueScratch = new BytesRef(); - position: for (int p = 0; p < positionCount; p++) { - result.appendDouble(p, StDistance.processCartesianPointDocValuesAndSource(leftValueVector.getLong(p), rightValueVector.getBytesRef(p, rightValueScratch))); + StDistance.processCartesianPointDocValuesAndSource(result, p, leftBlock, rightBlock); } return result.build(); } @@ -109,12 +72,12 @@ public DoubleVector eval(int positionCount, LongVector leftValueVector, @Override public String toString() { - return "StDistanceCartesianPointDocValuesAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + return "StDistanceCartesianPointDocValuesAndSourceEvaluator[" + "left=" + left + ", right=" + right + "]"; } @Override public void close() { - Releasables.closeExpectNoException(leftValue, rightValue); + Releasables.closeExpectNoException(left, right); } private Warnings warnings() { @@ -132,25 +95,25 @@ private Warnings warnings() { static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; - private final EvalOperator.ExpressionEvaluator.Factory leftValue; + private final EvalOperator.ExpressionEvaluator.Factory left; - private final EvalOperator.ExpressionEvaluator.Factory rightValue; + private final EvalOperator.ExpressionEvaluator.Factory right; - public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, - EvalOperator.ExpressionEvaluator.Factory rightValue) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory left, + EvalOperator.ExpressionEvaluator.Factory right) { this.source = source; - this.leftValue = leftValue; - this.rightValue = rightValue; + this.left = left; + this.right = right; } @Override public StDistanceCartesianPointDocValuesAndSourceEvaluator get(DriverContext context) { - return new StDistanceCartesianPointDocValuesAndSourceEvaluator(source, leftValue.get(context), rightValue.get(context), context); + return new StDistanceCartesianPointDocValuesAndSourceEvaluator(source, left.get(context), right.get(context), context); } @Override public String toString() { - return "StDistanceCartesianPointDocValuesAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + return "StDistanceCartesianPointDocValuesAndSourceEvaluator[" + "left=" + left + ", right=" + right + "]"; } } } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianSourceAndConstantEvaluator.java index 61329ad7606d0..1085f71e95b73 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianSourceAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianSourceAndConstantEvaluator.java @@ -4,14 +4,11 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; -import java.io.IOException; import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; -import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.DriverContext; @@ -28,66 +25,43 @@ public final class StDistanceCartesianSourceAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; - private final EvalOperator.ExpressionEvaluator leftValue; + private final EvalOperator.ExpressionEvaluator left; - private final Point rightValue; + private final Point right; private final DriverContext driverContext; private Warnings warnings; public StDistanceCartesianSourceAndConstantEvaluator(Source source, - EvalOperator.ExpressionEvaluator leftValue, Point rightValue, DriverContext driverContext) { + EvalOperator.ExpressionEvaluator left, Point right, DriverContext driverContext) { this.source = source; - this.leftValue = leftValue; - this.rightValue = rightValue; + this.left = left; + this.right = right; this.driverContext = driverContext; } @Override public Block eval(Page page) { - try (BytesRefBlock leftValueBlock = (BytesRefBlock) leftValue.eval(page)) { - BytesRefVector leftValueVector = leftValueBlock.asVector(); - if (leftValueVector == null) { - return eval(page.getPositionCount(), leftValueBlock); - } - return eval(page.getPositionCount(), leftValueVector); + try (BytesRefBlock leftBlock = (BytesRefBlock) left.eval(page)) { + return eval(page.getPositionCount(), leftBlock); } } - public DoubleBlock eval(int positionCount, BytesRefBlock leftValueBlock) { + public DoubleBlock eval(int positionCount, BytesRefBlock leftBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { - BytesRef leftValueScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - if (leftValueBlock.isNull(p)) { - result.appendNull(); - continue position; + boolean allBlocksAreNulls = true; + if (!leftBlock.isNull(p)) { + allBlocksAreNulls = false; } - if (leftValueBlock.getValueCount(p) != 1) { - if (leftValueBlock.getValueCount(p) > 1) { - warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); - } + if (allBlocksAreNulls) { result.appendNull(); continue position; } try { - result.appendDouble(StDistance.processCartesianSourceAndConstant(leftValueBlock.getBytesRef(leftValueBlock.getFirstValueIndex(p), leftValueScratch), this.rightValue)); - } catch (IllegalArgumentException | IOException e) { - warnings().registerException(e); - result.appendNull(); - } - } - return result.build(); - } - } - - public DoubleBlock eval(int positionCount, BytesRefVector leftValueVector) { - try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { - BytesRef leftValueScratch = new BytesRef(); - position: for (int p = 0; p < positionCount; p++) { - try { - result.appendDouble(StDistance.processCartesianSourceAndConstant(leftValueVector.getBytesRef(p, leftValueScratch), this.rightValue)); - } catch (IllegalArgumentException | IOException e) { + StDistance.processCartesianSourceAndConstant(result, p, leftBlock, this.right); + } catch (IllegalArgumentException e) { warnings().registerException(e); result.appendNull(); } @@ -98,12 +72,12 @@ public DoubleBlock eval(int positionCount, BytesRefVector leftValueVector) { @Override public String toString() { - return "StDistanceCartesianSourceAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + return "StDistanceCartesianSourceAndConstantEvaluator[" + "left=" + left + ", right=" + right + "]"; } @Override public void close() { - Releasables.closeExpectNoException(leftValue); + Releasables.closeExpectNoException(left); } private Warnings warnings() { @@ -121,25 +95,24 @@ private Warnings warnings() { static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; - private final EvalOperator.ExpressionEvaluator.Factory leftValue; + private final EvalOperator.ExpressionEvaluator.Factory left; - private final Point rightValue; + private final Point right; - public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, - Point rightValue) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory left, Point right) { this.source = source; - this.leftValue = leftValue; - this.rightValue = rightValue; + this.left = left; + this.right = right; } @Override public StDistanceCartesianSourceAndConstantEvaluator get(DriverContext context) { - return new StDistanceCartesianSourceAndConstantEvaluator(source, leftValue.get(context), rightValue, context); + return new StDistanceCartesianSourceAndConstantEvaluator(source, left.get(context), right, context); } @Override public String toString() { - return "StDistanceCartesianSourceAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + return "StDistanceCartesianSourceAndConstantEvaluator[" + "left=" + left + ", right=" + right + "]"; } } } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianSourceAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianSourceAndSourceEvaluator.java index c18c9a56fa77a..c8554a3041c89 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianSourceAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianSourceAndSourceEvaluator.java @@ -4,14 +4,11 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; -import java.io.IOException; import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; -import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.DriverContext; @@ -27,88 +24,49 @@ public final class StDistanceCartesianSourceAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; - private final EvalOperator.ExpressionEvaluator leftValue; + private final EvalOperator.ExpressionEvaluator left; - private final EvalOperator.ExpressionEvaluator rightValue; + private final EvalOperator.ExpressionEvaluator right; private final DriverContext driverContext; private Warnings warnings; public StDistanceCartesianSourceAndSourceEvaluator(Source source, - EvalOperator.ExpressionEvaluator leftValue, EvalOperator.ExpressionEvaluator rightValue, + EvalOperator.ExpressionEvaluator left, EvalOperator.ExpressionEvaluator right, DriverContext driverContext) { this.source = source; - this.leftValue = leftValue; - this.rightValue = rightValue; + this.left = left; + this.right = right; this.driverContext = driverContext; } @Override public Block eval(Page page) { - try (BytesRefBlock leftValueBlock = (BytesRefBlock) leftValue.eval(page)) { - try (BytesRefBlock rightValueBlock = (BytesRefBlock) rightValue.eval(page)) { - BytesRefVector leftValueVector = leftValueBlock.asVector(); - if (leftValueVector == null) { - return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); - } - BytesRefVector rightValueVector = rightValueBlock.asVector(); - if (rightValueVector == null) { - return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); - } - return eval(page.getPositionCount(), leftValueVector, rightValueVector); + try (BytesRefBlock leftBlock = (BytesRefBlock) left.eval(page)) { + try (BytesRefBlock rightBlock = (BytesRefBlock) right.eval(page)) { + return eval(page.getPositionCount(), leftBlock, rightBlock); } } } - public DoubleBlock eval(int positionCount, BytesRefBlock leftValueBlock, - BytesRefBlock rightValueBlock) { + public DoubleBlock eval(int positionCount, BytesRefBlock leftBlock, BytesRefBlock rightBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { - BytesRef leftValueScratch = new BytesRef(); - BytesRef rightValueScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - if (leftValueBlock.isNull(p)) { - result.appendNull(); - continue position; - } - if (leftValueBlock.getValueCount(p) != 1) { - if (leftValueBlock.getValueCount(p) > 1) { - warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); - } - result.appendNull(); - continue position; + boolean allBlocksAreNulls = true; + if (!leftBlock.isNull(p)) { + allBlocksAreNulls = false; } - if (rightValueBlock.isNull(p)) { - result.appendNull(); - continue position; + if (!rightBlock.isNull(p)) { + allBlocksAreNulls = false; } - if (rightValueBlock.getValueCount(p) != 1) { - if (rightValueBlock.getValueCount(p) > 1) { - warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); - } + if (allBlocksAreNulls) { result.appendNull(); continue position; } try { - result.appendDouble(StDistance.processCartesianSourceAndSource(leftValueBlock.getBytesRef(leftValueBlock.getFirstValueIndex(p), leftValueScratch), rightValueBlock.getBytesRef(rightValueBlock.getFirstValueIndex(p), rightValueScratch))); - } catch (IllegalArgumentException | IOException e) { - warnings().registerException(e); - result.appendNull(); - } - } - return result.build(); - } - } - - public DoubleBlock eval(int positionCount, BytesRefVector leftValueVector, - BytesRefVector rightValueVector) { - try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { - BytesRef leftValueScratch = new BytesRef(); - BytesRef rightValueScratch = new BytesRef(); - position: for (int p = 0; p < positionCount; p++) { - try { - result.appendDouble(StDistance.processCartesianSourceAndSource(leftValueVector.getBytesRef(p, leftValueScratch), rightValueVector.getBytesRef(p, rightValueScratch))); - } catch (IllegalArgumentException | IOException e) { + StDistance.processCartesianSourceAndSource(result, p, leftBlock, rightBlock); + } catch (IllegalArgumentException e) { warnings().registerException(e); result.appendNull(); } @@ -119,12 +77,12 @@ public DoubleBlock eval(int positionCount, BytesRefVector leftValueVector, @Override public String toString() { - return "StDistanceCartesianSourceAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + return "StDistanceCartesianSourceAndSourceEvaluator[" + "left=" + left + ", right=" + right + "]"; } @Override public void close() { - Releasables.closeExpectNoException(leftValue, rightValue); + Releasables.closeExpectNoException(left, right); } private Warnings warnings() { @@ -142,25 +100,25 @@ private Warnings warnings() { static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; - private final EvalOperator.ExpressionEvaluator.Factory leftValue; + private final EvalOperator.ExpressionEvaluator.Factory left; - private final EvalOperator.ExpressionEvaluator.Factory rightValue; + private final EvalOperator.ExpressionEvaluator.Factory right; - public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, - EvalOperator.ExpressionEvaluator.Factory rightValue) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory left, + EvalOperator.ExpressionEvaluator.Factory right) { this.source = source; - this.leftValue = leftValue; - this.rightValue = rightValue; + this.left = left; + this.right = right; } @Override public StDistanceCartesianSourceAndSourceEvaluator get(DriverContext context) { - return new StDistanceCartesianSourceAndSourceEvaluator(source, leftValue.get(context), rightValue.get(context), context); + return new StDistanceCartesianSourceAndSourceEvaluator(source, left.get(context), right.get(context), context); } @Override public String toString() { - return "StDistanceCartesianSourceAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + return "StDistanceCartesianSourceAndSourceEvaluator[" + "left=" + left + ", right=" + right + "]"; } } } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoPointDocValuesAndConstantEvaluator.java index 2ac1ff6aeb0d8..8f37d3157fac6 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoPointDocValuesAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoPointDocValuesAndConstantEvaluator.java @@ -10,7 +10,6 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; @@ -26,63 +25,42 @@ public final class StDistanceGeoPointDocValuesAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; - private final EvalOperator.ExpressionEvaluator leftValue; + private final EvalOperator.ExpressionEvaluator left; - private final Point rightValue; + private final Point right; private final DriverContext driverContext; private Warnings warnings; public StDistanceGeoPointDocValuesAndConstantEvaluator(Source source, - EvalOperator.ExpressionEvaluator leftValue, Point rightValue, DriverContext driverContext) { + EvalOperator.ExpressionEvaluator left, Point right, DriverContext driverContext) { this.source = source; - this.leftValue = leftValue; - this.rightValue = rightValue; + this.left = left; + this.right = right; this.driverContext = driverContext; } @Override public Block eval(Page page) { - try (LongBlock leftValueBlock = (LongBlock) leftValue.eval(page)) { - LongVector leftValueVector = leftValueBlock.asVector(); - if (leftValueVector == null) { - return eval(page.getPositionCount(), leftValueBlock); - } - return eval(page.getPositionCount(), leftValueVector); + try (LongBlock leftBlock = (LongBlock) left.eval(page)) { + return eval(page.getPositionCount(), leftBlock); } } - public DoubleBlock eval(int positionCount, LongBlock leftValueBlock) { + public DoubleBlock eval(int positionCount, LongBlock leftBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (leftValueBlock.isNull(p)) { - result.appendNull(); - continue position; + boolean allBlocksAreNulls = true; + if (!leftBlock.isNull(p)) { + allBlocksAreNulls = false; } - if (leftValueBlock.getValueCount(p) != 1) { - if (leftValueBlock.getValueCount(p) > 1) { - warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); - } + if (allBlocksAreNulls) { result.appendNull(); continue position; } try { - result.appendDouble(StDistance.processGeoPointDocValuesAndConstant(leftValueBlock.getLong(leftValueBlock.getFirstValueIndex(p)), this.rightValue)); - } catch (IllegalArgumentException e) { - warnings().registerException(e); - result.appendNull(); - } - } - return result.build(); - } - } - - public DoubleBlock eval(int positionCount, LongVector leftValueVector) { - try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { - position: for (int p = 0; p < positionCount; p++) { - try { - result.appendDouble(StDistance.processGeoPointDocValuesAndConstant(leftValueVector.getLong(p), this.rightValue)); + StDistance.processGeoPointDocValuesAndConstant(result, p, leftBlock, this.right); } catch (IllegalArgumentException e) { warnings().registerException(e); result.appendNull(); @@ -94,12 +72,12 @@ public DoubleBlock eval(int positionCount, LongVector leftValueVector) { @Override public String toString() { - return "StDistanceGeoPointDocValuesAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + return "StDistanceGeoPointDocValuesAndConstantEvaluator[" + "left=" + left + ", right=" + right + "]"; } @Override public void close() { - Releasables.closeExpectNoException(leftValue); + Releasables.closeExpectNoException(left); } private Warnings warnings() { @@ -117,25 +95,24 @@ private Warnings warnings() { static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; - private final EvalOperator.ExpressionEvaluator.Factory leftValue; + private final EvalOperator.ExpressionEvaluator.Factory left; - private final Point rightValue; + private final Point right; - public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, - Point rightValue) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory left, Point right) { this.source = source; - this.leftValue = leftValue; - this.rightValue = rightValue; + this.left = left; + this.right = right; } @Override public StDistanceGeoPointDocValuesAndConstantEvaluator get(DriverContext context) { - return new StDistanceGeoPointDocValuesAndConstantEvaluator(source, leftValue.get(context), rightValue, context); + return new StDistanceGeoPointDocValuesAndConstantEvaluator(source, left.get(context), right, context); } @Override public String toString() { - return "StDistanceGeoPointDocValuesAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + return "StDistanceGeoPointDocValuesAndConstantEvaluator[" + "left=" + left + ", right=" + right + "]"; } } } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoPointDocValuesAndSourceEvaluator.java index 6758d888cc7d1..a7664987739e2 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoPointDocValuesAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoPointDocValuesAndSourceEvaluator.java @@ -7,13 +7,10 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; -import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; @@ -28,85 +25,48 @@ public final class StDistanceGeoPointDocValuesAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; - private final EvalOperator.ExpressionEvaluator leftValue; + private final EvalOperator.ExpressionEvaluator left; - private final EvalOperator.ExpressionEvaluator rightValue; + private final EvalOperator.ExpressionEvaluator right; private final DriverContext driverContext; private Warnings warnings; public StDistanceGeoPointDocValuesAndSourceEvaluator(Source source, - EvalOperator.ExpressionEvaluator leftValue, EvalOperator.ExpressionEvaluator rightValue, + EvalOperator.ExpressionEvaluator left, EvalOperator.ExpressionEvaluator right, DriverContext driverContext) { this.source = source; - this.leftValue = leftValue; - this.rightValue = rightValue; + this.left = left; + this.right = right; this.driverContext = driverContext; } @Override public Block eval(Page page) { - try (LongBlock leftValueBlock = (LongBlock) leftValue.eval(page)) { - try (BytesRefBlock rightValueBlock = (BytesRefBlock) rightValue.eval(page)) { - LongVector leftValueVector = leftValueBlock.asVector(); - if (leftValueVector == null) { - return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); - } - BytesRefVector rightValueVector = rightValueBlock.asVector(); - if (rightValueVector == null) { - return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); - } - return eval(page.getPositionCount(), leftValueVector, rightValueVector); + try (LongBlock leftBlock = (LongBlock) left.eval(page)) { + try (BytesRefBlock rightBlock = (BytesRefBlock) right.eval(page)) { + return eval(page.getPositionCount(), leftBlock, rightBlock); } } } - public DoubleBlock eval(int positionCount, LongBlock leftValueBlock, - BytesRefBlock rightValueBlock) { + public DoubleBlock eval(int positionCount, LongBlock leftBlock, BytesRefBlock rightBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { - BytesRef rightValueScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - if (leftValueBlock.isNull(p)) { - result.appendNull(); - continue position; + boolean allBlocksAreNulls = true; + if (!leftBlock.isNull(p)) { + allBlocksAreNulls = false; } - if (leftValueBlock.getValueCount(p) != 1) { - if (leftValueBlock.getValueCount(p) > 1) { - warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); - } - result.appendNull(); - continue position; + if (!rightBlock.isNull(p)) { + allBlocksAreNulls = false; } - if (rightValueBlock.isNull(p)) { + if (allBlocksAreNulls) { result.appendNull(); continue position; } - if (rightValueBlock.getValueCount(p) != 1) { - if (rightValueBlock.getValueCount(p) > 1) { - warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); - } - result.appendNull(); - continue position; - } - try { - result.appendDouble(StDistance.processGeoPointDocValuesAndSource(leftValueBlock.getLong(leftValueBlock.getFirstValueIndex(p)), rightValueBlock.getBytesRef(rightValueBlock.getFirstValueIndex(p), rightValueScratch))); - } catch (IllegalArgumentException e) { - warnings().registerException(e); - result.appendNull(); - } - } - return result.build(); - } - } - - public DoubleBlock eval(int positionCount, LongVector leftValueVector, - BytesRefVector rightValueVector) { - try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { - BytesRef rightValueScratch = new BytesRef(); - position: for (int p = 0; p < positionCount; p++) { try { - result.appendDouble(StDistance.processGeoPointDocValuesAndSource(leftValueVector.getLong(p), rightValueVector.getBytesRef(p, rightValueScratch))); + StDistance.processGeoPointDocValuesAndSource(result, p, leftBlock, rightBlock); } catch (IllegalArgumentException e) { warnings().registerException(e); result.appendNull(); @@ -118,12 +78,12 @@ public DoubleBlock eval(int positionCount, LongVector leftValueVector, @Override public String toString() { - return "StDistanceGeoPointDocValuesAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + return "StDistanceGeoPointDocValuesAndSourceEvaluator[" + "left=" + left + ", right=" + right + "]"; } @Override public void close() { - Releasables.closeExpectNoException(leftValue, rightValue); + Releasables.closeExpectNoException(left, right); } private Warnings warnings() { @@ -141,25 +101,25 @@ private Warnings warnings() { static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; - private final EvalOperator.ExpressionEvaluator.Factory leftValue; + private final EvalOperator.ExpressionEvaluator.Factory left; - private final EvalOperator.ExpressionEvaluator.Factory rightValue; + private final EvalOperator.ExpressionEvaluator.Factory right; - public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, - EvalOperator.ExpressionEvaluator.Factory rightValue) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory left, + EvalOperator.ExpressionEvaluator.Factory right) { this.source = source; - this.leftValue = leftValue; - this.rightValue = rightValue; + this.left = left; + this.right = right; } @Override public StDistanceGeoPointDocValuesAndSourceEvaluator get(DriverContext context) { - return new StDistanceGeoPointDocValuesAndSourceEvaluator(source, leftValue.get(context), rightValue.get(context), context); + return new StDistanceGeoPointDocValuesAndSourceEvaluator(source, left.get(context), right.get(context), context); } @Override public String toString() { - return "StDistanceGeoPointDocValuesAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + return "StDistanceGeoPointDocValuesAndSourceEvaluator[" + "left=" + left + ", right=" + right + "]"; } } } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoSourceAndConstantEvaluator.java index 201c29cb04275..06e44f996daf5 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoSourceAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoSourceAndConstantEvaluator.java @@ -4,14 +4,11 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; -import java.io.IOException; import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; -import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.DriverContext; @@ -28,66 +25,43 @@ public final class StDistanceGeoSourceAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; - private final EvalOperator.ExpressionEvaluator leftValue; + private final EvalOperator.ExpressionEvaluator left; - private final Point rightValue; + private final Point right; private final DriverContext driverContext; private Warnings warnings; public StDistanceGeoSourceAndConstantEvaluator(Source source, - EvalOperator.ExpressionEvaluator leftValue, Point rightValue, DriverContext driverContext) { + EvalOperator.ExpressionEvaluator left, Point right, DriverContext driverContext) { this.source = source; - this.leftValue = leftValue; - this.rightValue = rightValue; + this.left = left; + this.right = right; this.driverContext = driverContext; } @Override public Block eval(Page page) { - try (BytesRefBlock leftValueBlock = (BytesRefBlock) leftValue.eval(page)) { - BytesRefVector leftValueVector = leftValueBlock.asVector(); - if (leftValueVector == null) { - return eval(page.getPositionCount(), leftValueBlock); - } - return eval(page.getPositionCount(), leftValueVector); + try (BytesRefBlock leftBlock = (BytesRefBlock) left.eval(page)) { + return eval(page.getPositionCount(), leftBlock); } } - public DoubleBlock eval(int positionCount, BytesRefBlock leftValueBlock) { + public DoubleBlock eval(int positionCount, BytesRefBlock leftBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { - BytesRef leftValueScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - if (leftValueBlock.isNull(p)) { - result.appendNull(); - continue position; + boolean allBlocksAreNulls = true; + if (!leftBlock.isNull(p)) { + allBlocksAreNulls = false; } - if (leftValueBlock.getValueCount(p) != 1) { - if (leftValueBlock.getValueCount(p) > 1) { - warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); - } + if (allBlocksAreNulls) { result.appendNull(); continue position; } try { - result.appendDouble(StDistance.processGeoSourceAndConstant(leftValueBlock.getBytesRef(leftValueBlock.getFirstValueIndex(p), leftValueScratch), this.rightValue)); - } catch (IllegalArgumentException | IOException e) { - warnings().registerException(e); - result.appendNull(); - } - } - return result.build(); - } - } - - public DoubleBlock eval(int positionCount, BytesRefVector leftValueVector) { - try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { - BytesRef leftValueScratch = new BytesRef(); - position: for (int p = 0; p < positionCount; p++) { - try { - result.appendDouble(StDistance.processGeoSourceAndConstant(leftValueVector.getBytesRef(p, leftValueScratch), this.rightValue)); - } catch (IllegalArgumentException | IOException e) { + StDistance.processGeoSourceAndConstant(result, p, leftBlock, this.right); + } catch (IllegalArgumentException e) { warnings().registerException(e); result.appendNull(); } @@ -98,12 +72,12 @@ public DoubleBlock eval(int positionCount, BytesRefVector leftValueVector) { @Override public String toString() { - return "StDistanceGeoSourceAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + return "StDistanceGeoSourceAndConstantEvaluator[" + "left=" + left + ", right=" + right + "]"; } @Override public void close() { - Releasables.closeExpectNoException(leftValue); + Releasables.closeExpectNoException(left); } private Warnings warnings() { @@ -121,25 +95,24 @@ private Warnings warnings() { static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; - private final EvalOperator.ExpressionEvaluator.Factory leftValue; + private final EvalOperator.ExpressionEvaluator.Factory left; - private final Point rightValue; + private final Point right; - public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, - Point rightValue) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory left, Point right) { this.source = source; - this.leftValue = leftValue; - this.rightValue = rightValue; + this.left = left; + this.right = right; } @Override public StDistanceGeoSourceAndConstantEvaluator get(DriverContext context) { - return new StDistanceGeoSourceAndConstantEvaluator(source, leftValue.get(context), rightValue, context); + return new StDistanceGeoSourceAndConstantEvaluator(source, left.get(context), right, context); } @Override public String toString() { - return "StDistanceGeoSourceAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + return "StDistanceGeoSourceAndConstantEvaluator[" + "left=" + left + ", right=" + right + "]"; } } } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoSourceAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoSourceAndSourceEvaluator.java index 9cbe30e22c3ed..31e20d9f42197 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoSourceAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoSourceAndSourceEvaluator.java @@ -4,14 +4,11 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; -import java.io.IOException; import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; -import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.DriverContext; @@ -27,88 +24,48 @@ public final class StDistanceGeoSourceAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; - private final EvalOperator.ExpressionEvaluator leftValue; + private final EvalOperator.ExpressionEvaluator left; - private final EvalOperator.ExpressionEvaluator rightValue; + private final EvalOperator.ExpressionEvaluator right; private final DriverContext driverContext; private Warnings warnings; - public StDistanceGeoSourceAndSourceEvaluator(Source source, - EvalOperator.ExpressionEvaluator leftValue, EvalOperator.ExpressionEvaluator rightValue, - DriverContext driverContext) { + public StDistanceGeoSourceAndSourceEvaluator(Source source, EvalOperator.ExpressionEvaluator left, + EvalOperator.ExpressionEvaluator right, DriverContext driverContext) { this.source = source; - this.leftValue = leftValue; - this.rightValue = rightValue; + this.left = left; + this.right = right; this.driverContext = driverContext; } @Override public Block eval(Page page) { - try (BytesRefBlock leftValueBlock = (BytesRefBlock) leftValue.eval(page)) { - try (BytesRefBlock rightValueBlock = (BytesRefBlock) rightValue.eval(page)) { - BytesRefVector leftValueVector = leftValueBlock.asVector(); - if (leftValueVector == null) { - return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); - } - BytesRefVector rightValueVector = rightValueBlock.asVector(); - if (rightValueVector == null) { - return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); - } - return eval(page.getPositionCount(), leftValueVector, rightValueVector); + try (BytesRefBlock leftBlock = (BytesRefBlock) left.eval(page)) { + try (BytesRefBlock rightBlock = (BytesRefBlock) right.eval(page)) { + return eval(page.getPositionCount(), leftBlock, rightBlock); } } } - public DoubleBlock eval(int positionCount, BytesRefBlock leftValueBlock, - BytesRefBlock rightValueBlock) { + public DoubleBlock eval(int positionCount, BytesRefBlock leftBlock, BytesRefBlock rightBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { - BytesRef leftValueScratch = new BytesRef(); - BytesRef rightValueScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - if (leftValueBlock.isNull(p)) { - result.appendNull(); - continue position; - } - if (leftValueBlock.getValueCount(p) != 1) { - if (leftValueBlock.getValueCount(p) > 1) { - warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); - } - result.appendNull(); - continue position; + boolean allBlocksAreNulls = true; + if (!leftBlock.isNull(p)) { + allBlocksAreNulls = false; } - if (rightValueBlock.isNull(p)) { - result.appendNull(); - continue position; + if (!rightBlock.isNull(p)) { + allBlocksAreNulls = false; } - if (rightValueBlock.getValueCount(p) != 1) { - if (rightValueBlock.getValueCount(p) > 1) { - warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); - } + if (allBlocksAreNulls) { result.appendNull(); continue position; } try { - result.appendDouble(StDistance.processGeoSourceAndSource(leftValueBlock.getBytesRef(leftValueBlock.getFirstValueIndex(p), leftValueScratch), rightValueBlock.getBytesRef(rightValueBlock.getFirstValueIndex(p), rightValueScratch))); - } catch (IllegalArgumentException | IOException e) { - warnings().registerException(e); - result.appendNull(); - } - } - return result.build(); - } - } - - public DoubleBlock eval(int positionCount, BytesRefVector leftValueVector, - BytesRefVector rightValueVector) { - try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { - BytesRef leftValueScratch = new BytesRef(); - BytesRef rightValueScratch = new BytesRef(); - position: for (int p = 0; p < positionCount; p++) { - try { - result.appendDouble(StDistance.processGeoSourceAndSource(leftValueVector.getBytesRef(p, leftValueScratch), rightValueVector.getBytesRef(p, rightValueScratch))); - } catch (IllegalArgumentException | IOException e) { + StDistance.processGeoSourceAndSource(result, p, leftBlock, rightBlock); + } catch (IllegalArgumentException e) { warnings().registerException(e); result.appendNull(); } @@ -119,12 +76,12 @@ public DoubleBlock eval(int positionCount, BytesRefVector leftValueVector, @Override public String toString() { - return "StDistanceGeoSourceAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + return "StDistanceGeoSourceAndSourceEvaluator[" + "left=" + left + ", right=" + right + "]"; } @Override public void close() { - Releasables.closeExpectNoException(leftValue, rightValue); + Releasables.closeExpectNoException(left, right); } private Warnings warnings() { @@ -142,25 +99,25 @@ private Warnings warnings() { static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; - private final EvalOperator.ExpressionEvaluator.Factory leftValue; + private final EvalOperator.ExpressionEvaluator.Factory left; - private final EvalOperator.ExpressionEvaluator.Factory rightValue; + private final EvalOperator.ExpressionEvaluator.Factory right; - public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, - EvalOperator.ExpressionEvaluator.Factory rightValue) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory left, + EvalOperator.ExpressionEvaluator.Factory right) { this.source = source; - this.leftValue = leftValue; - this.rightValue = rightValue; + this.left = left; + this.right = right; } @Override public StDistanceGeoSourceAndSourceEvaluator get(DriverContext context) { - return new StDistanceGeoSourceAndSourceEvaluator(source, leftValue.get(context), rightValue.get(context), context); + return new StDistanceGeoSourceAndSourceEvaluator(source, left.get(context), right.get(context), context); } @Override public String toString() { - return "StDistanceGeoSourceAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + return "StDistanceGeoSourceAndSourceEvaluator[" + "left=" + left + ", right=" + right + "]"; } } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index f5baaef4f579d..842501744979c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -170,6 +170,11 @@ public enum Cap { */ SPATIAL_PREDICATES_SUPPORT_MULTIVALUES, + /** + * Enable spatial distance function to support multi-values. Done in #114836. + */ + SPATIAL_DISTANCE_SUPPORTS_MULTIVALUES, + /** * Support a number of fixes and enhancements to spatial distance pushdown. Done in #112938. */ diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistance.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistance.java index ae9d3383bad39..3cf042a2db828 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistance.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistance.java @@ -14,6 +14,9 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.ann.Fixed; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.geometry.Geometry; import org.elasticsearch.geometry.Point; @@ -77,8 +80,9 @@ protected CartesianDistanceCalculator() { @Override protected double distance(Point left, Point right) { - final double diffX = left.getX() - right.getX(); - final double diffY = left.getY() - right.getY(); + // Cast coordinates to float to mimic Lucene behaviour, so we get identical results + final double diffX = (double) ((float) left.getX()) - (double) ((float) right.getX()); + final double diffY = (double) ((float) left.getY()) - (double) ((float) right.getY()); return Math.sqrt(diffX * diffX + diffY * diffY); } } @@ -100,11 +104,6 @@ protected Double compare(BytesRef left, BytesRef right) throws IOException { protected abstract double distance(Point left, Point right); - protected double distance(long encoded, Geometry right) { - Point point = spatialCoordinateType.longAsPoint(encoded); - return distance(point, (Point) right); - } - protected double distance(Geometry left, Geometry right) { return distance((Point) left, (Point) right); } @@ -113,8 +112,112 @@ public double distance(BytesRef left, BytesRef right) { return distance(this.fromBytesRef(left), this.fromBytesRef(right)); } - public double distance(BytesRef left, Point right) { - return distance(this.fromBytesRef(left), right); + public void distanceSourceAndConstant(DoubleBlock.Builder results, int position, BytesRefBlock left, Point right) { + int valueCount = left.getValueCount(position); + if (valueCount < 1) { + results.appendNull(); + } else { + final BytesRef scratch = new BytesRef(); + final int firstValueIndex = left.getFirstValueIndex(position); + double distance = Double.MAX_VALUE; + if (valueCount == 1) { + distance = distance(fromBytesRef(left.getBytesRef(firstValueIndex, scratch)), right); + } else { + for (int i = 0; i < valueCount; i++) { + double value = distance(fromBytesRef(left.getBytesRef(firstValueIndex + i, scratch)), right); + if (value < distance) { + distance = value; + } + } + } + results.appendDouble(distance); + } + } + + public void distanceSourceAndSource(DoubleBlock.Builder results, int position, BytesRefBlock left, BytesRefBlock right) { + int leftCount = left.getValueCount(position); + int rightCount = right.getValueCount(position); + if (leftCount < 1 || rightCount < 1) { + results.appendNull(); + } else { + final BytesRef scratchLeft = new BytesRef(); + final BytesRef scratchRight = new BytesRef(); + final int leftFirstValueIndex = left.getFirstValueIndex(position); + final int rightFirstValueIndex = right.getFirstValueIndex(position); + double distance = Double.MAX_VALUE; + if (leftCount == 1 && rightCount == 1) { + distance = distance( + fromBytesRef(left.getBytesRef(leftFirstValueIndex, scratchLeft)), + fromBytesRef(right.getBytesRef(rightFirstValueIndex, scratchRight)) + ); + } else { + for (int i = 0; i < leftCount; i++) { + for (int j = 0; j < rightCount; j++) { + double value = distance( + fromBytesRef(left.getBytesRef(leftFirstValueIndex + i, scratchLeft)), + fromBytesRef(right.getBytesRef(rightFirstValueIndex + j, scratchRight)) + ); + if (value < distance) { + distance = value; + } + } + } + } + results.appendDouble(distance); + } + } + + public void distancePointDocValuesAndConstant(DoubleBlock.Builder results, int position, LongBlock left, Point right) { + int valueCount = left.getValueCount(position); + if (valueCount < 1) { + results.appendNull(); + } else { + final int firstValueIndex = left.getFirstValueIndex(position); + double distance = Double.MAX_VALUE; + if (valueCount == 1) { + distance = distance(spatialCoordinateType.longAsPoint(left.getLong(firstValueIndex)), right); + } else { + for (int i = 0; i < valueCount; i++) { + double value = distance(spatialCoordinateType.longAsPoint(left.getLong(firstValueIndex + i)), right); + if (value < distance) { + distance = value; + } + } + } + results.appendDouble(distance); + } + } + + public void distancePointDocValuesAndSource(DoubleBlock.Builder results, int position, LongBlock left, BytesRefBlock right) { + int leftCount = left.getValueCount(position); + int rightCount = right.getValueCount(position); + if (leftCount < 1 || rightCount < 1) { + results.appendNull(); + } else { + final BytesRef scratchRight = new BytesRef(); + final int leftFirstValueIndex = left.getFirstValueIndex(position); + final int rightFirstValueIndex = right.getFirstValueIndex(position); + double distance = Double.MAX_VALUE; + if (leftCount == 1 && rightCount == 1) { + distance = distance( + spatialCoordinateType.longAsPoint(left.getLong(leftFirstValueIndex)), + fromBytesRef(right.getBytesRef(rightFirstValueIndex, scratchRight)) + ); + } + for (int i = 0; i < leftCount; i++) { + for (int j = 0; j < rightCount; j++) { + double value = distance( + spatialCoordinateType.longAsPoint(left.getLong(leftFirstValueIndex + i)), + fromBytesRef(right.getBytesRef(rightFirstValueIndex + j, scratchRight)) + ); + if (value < distance) { + distance = value; + } + } + } + results.appendDouble(distance); + } + } } @@ -249,45 +352,43 @@ private EvalOperator.ExpressionEvaluator.Factory toEvaluator( throw EsqlIllegalArgumentException.illegalDataType(crsType().name()); } - @Evaluator(extraName = "GeoSourceAndConstant", warnExceptions = { IllegalArgumentException.class, IOException.class }) - static double processGeoSourceAndConstant(BytesRef leftValue, @Fixed Point rightValue) throws IOException { - return GEO.distance(leftValue, rightValue); + @Evaluator(extraName = "GeoSourceAndConstant", warnExceptions = { IllegalArgumentException.class }) + static void processGeoSourceAndConstant(DoubleBlock.Builder results, int p, BytesRefBlock left, @Fixed Point right) { + GEO.distanceSourceAndConstant(results, p, left, right); } - @Evaluator(extraName = "GeoSourceAndSource", warnExceptions = { IllegalArgumentException.class, IOException.class }) - static double processGeoSourceAndSource(BytesRef leftValue, BytesRef rightValue) throws IOException { - return GEO.distance(leftValue, rightValue); + @Evaluator(extraName = "GeoSourceAndSource", warnExceptions = { IllegalArgumentException.class }) + static void processGeoSourceAndSource(DoubleBlock.Builder results, int p, BytesRefBlock left, BytesRefBlock right) { + GEO.distanceSourceAndSource(results, p, left, right); } @Evaluator(extraName = "GeoPointDocValuesAndConstant", warnExceptions = { IllegalArgumentException.class }) - static double processGeoPointDocValuesAndConstant(long leftValue, @Fixed Point rightValue) { - return GEO.distance(leftValue, rightValue); + static void processGeoPointDocValuesAndConstant(DoubleBlock.Builder results, int p, LongBlock left, @Fixed Point right) { + GEO.distancePointDocValuesAndConstant(results, p, left, right); } @Evaluator(extraName = "GeoPointDocValuesAndSource", warnExceptions = { IllegalArgumentException.class }) - static double processGeoPointDocValuesAndSource(long leftValue, BytesRef rightValue) { - Geometry geometry = SpatialCoordinateTypes.UNSPECIFIED.wkbToGeometry(rightValue); - return GEO.distance(leftValue, geometry); + static void processGeoPointDocValuesAndSource(DoubleBlock.Builder results, int p, LongBlock left, BytesRefBlock right) { + GEO.distancePointDocValuesAndSource(results, p, left, right); } - @Evaluator(extraName = "CartesianSourceAndConstant", warnExceptions = { IllegalArgumentException.class, IOException.class }) - static double processCartesianSourceAndConstant(BytesRef leftValue, @Fixed Point rightValue) throws IOException { - return CARTESIAN.distance(leftValue, rightValue); + @Evaluator(extraName = "CartesianSourceAndConstant", warnExceptions = { IllegalArgumentException.class }) + static void processCartesianSourceAndConstant(DoubleBlock.Builder results, int p, BytesRefBlock left, @Fixed Point right) { + CARTESIAN.distanceSourceAndConstant(results, p, left, right); } - @Evaluator(extraName = "CartesianSourceAndSource", warnExceptions = { IllegalArgumentException.class, IOException.class }) - static double processCartesianSourceAndSource(BytesRef leftValue, BytesRef rightValue) throws IOException { - return CARTESIAN.distance(leftValue, rightValue); + @Evaluator(extraName = "CartesianSourceAndSource", warnExceptions = { IllegalArgumentException.class }) + static void processCartesianSourceAndSource(DoubleBlock.Builder results, int p, BytesRefBlock left, BytesRefBlock right) { + CARTESIAN.distanceSourceAndSource(results, p, left, right); } @Evaluator(extraName = "CartesianPointDocValuesAndConstant", warnExceptions = { IllegalArgumentException.class }) - static double processCartesianPointDocValuesAndConstant(long leftValue, @Fixed Point rightValue) { - return CARTESIAN.distance(leftValue, rightValue); + static void processCartesianPointDocValuesAndConstant(DoubleBlock.Builder results, int p, LongBlock left, @Fixed Point right) { + CARTESIAN.distancePointDocValuesAndConstant(results, p, left, right); } @Evaluator(extraName = "CartesianPointDocValuesAndSource") - static double processCartesianPointDocValuesAndSource(long leftValue, BytesRef rightValue) { - Geometry geometry = SpatialCoordinateTypes.UNSPECIFIED.wkbToGeometry(rightValue); - return CARTESIAN.distance(leftValue, geometry); + static void processCartesianPointDocValuesAndSource(DoubleBlock.Builder results, int p, LongBlock left, BytesRefBlock right) { + CARTESIAN.distancePointDocValuesAndSource(results, p, left, right); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/EnableSpatialDistancePushdown.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/EnableSpatialDistancePushdown.java index ec25c69deba5c..cde305e52a705 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/EnableSpatialDistancePushdown.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/EnableSpatialDistancePushdown.java @@ -95,7 +95,7 @@ private FilterExec rewrite(FilterExec filterExec, EsQueryExec esQueryExec) { } return comparison; }); - if (rewritten.equals(filterExec.condition()) == false) { + if (rewritten.equals(filterExec.condition()) == false && canPushToSource(rewritten, x -> false)) { return new FilterExec(filterExec.source(), esQueryExec, rewritten); } return filterExec; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/BinarySpatialFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/BinarySpatialFunctionTestCase.java index 6794de80f7433..c93d871ca2b8c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/BinarySpatialFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/BinarySpatialFunctionTestCase.java @@ -267,19 +267,10 @@ private static DataType pickSpatialType(DataType leftType, DataType rightType) { private static Matcher spatialEvaluatorString(DataType leftType, DataType rightType) { String crsType = isSpatialGeo(pickSpatialType(leftType, rightType)) ? "Geo" : "Cartesian"; - String paramSuffix = paramSuffix(); - String channels = channelsText("left" + paramSuffix, "right" + paramSuffix); + String channels = channelsText("left", "right"); return equalTo(getFunctionClassName() + crsType + "SourceAndSourceEvaluator[" + channels + "]"); } - private static String paramSuffix() { - try { - return getSpatialRelatesFunctionClass().getSimpleName().contains("Distance") ? "Value" : ""; - } catch (ClassNotFoundException e) { - return ""; - } - } - private static String channelsText(String... args) { return IntStream.range(0, args.length).mapToObj(i -> args[i] + "=Attribute[channel=" + i + "]").collect(Collectors.joining(", ")); } From 4fa8485a26d5d57e52117ae8d9f89491853703d9 Mon Sep 17 00:00:00 2001 From: Panagiotis Bailis Date: Tue, 15 Oct 2024 22:11:55 +0300 Subject: [PATCH 121/449] Ensuring consistent ordering for inner hits in collapse test for rrf (#114740) --- muted-tests.yml | 3 -- ...rrf_retriever_search_api_compatibility.yml | 31 ++++++++++++++++--- 2 files changed, 27 insertions(+), 7 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index b105c77b34521..86585ebc5daf7 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -325,9 +325,6 @@ tests: - class: org.elasticsearch.xpack.inference.services.cohere.CohereServiceTests method: testInfer_StreamRequest_ErrorResponse issue: https://github.com/elastic/elasticsearch/issues/114327 -- class: org.elasticsearch.xpack.rank.rrf.RRFRankClientYamlTestSuiteIT - method: test {yaml=rrf/700_rrf_retriever_search_api_compatibility/rrf retriever with top-level collapse} - issue: https://github.com/elastic/elasticsearch/issues/114331 - class: org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT method: test {yaml=cluster.stats/30_ccs_stats/cross-cluster search stats search} issue: https://github.com/elastic/elasticsearch/issues/114371 diff --git a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/700_rrf_retriever_search_api_compatibility.yml b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/700_rrf_retriever_search_api_compatibility.yml index 517c162c33e95..f3914843b80ec 100644 --- a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/700_rrf_retriever_search_api_compatibility.yml +++ b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/700_rrf_retriever_search_api_compatibility.yml @@ -11,6 +11,8 @@ setup: body: mappings: properties: + id: + type: integer text: type: text text_to_highlight: @@ -39,6 +41,7 @@ setup: index: test id: "1" body: + id: 1 text: "term1" vector: [1.0] @@ -47,6 +50,7 @@ setup: index: test id: "2" body: + id: 2 text: "term2" text_to_highlight: "search for the truth" keyword: "biology" @@ -57,6 +61,7 @@ setup: index: test id: "3" body: + id: 3 text: "term3" text_to_highlight: "nothing related" keyword: "technology" @@ -67,6 +72,7 @@ setup: index: test id: "4" body: + id: 4 text: "term4" vector: [4.0] - do: @@ -74,6 +80,7 @@ setup: index: test id: "5" body: + id: 5 text: "term5" text_to_highlight: "You know, for Search!" keyword: "technology" @@ -81,9 +88,10 @@ setup: vector: [5.0] - do: index: + id: 6 index: test - id: "6" body: + id: 6 text: "term6" keyword: "biology" integer: 6 @@ -93,6 +101,7 @@ setup: index: test id: "7" body: + id: 7 text: "term7" keyword: "astronomy" vector: [77.0] @@ -102,6 +111,7 @@ setup: index: test id: "8" body: + id: 8 text: "term8" keyword: "technology" nested: { views: 100} @@ -110,6 +120,7 @@ setup: index: test id: "9" body: + id: 9 text: "term9" integer: 2 keyword: "technology" @@ -439,7 +450,19 @@ setup: rank_window_size: 5 rank_constant: 10 size: 3 - collapse: { field: keyword, inner_hits: { name: sub_hits, size: 2 } } + collapse: { + field: keyword, + inner_hits: { + name: sub_hits, + size: 2, + sort: + { + id: { + order: desc + } + } + } + } - match: { hits.total : 9 } @@ -456,8 +479,8 @@ setup: - match: { hits.hits.1.inner_hits.sub_hits.hits.total : 4 } - length: { hits.hits.1.inner_hits.sub_hits.hits.hits : 2 } - - match: { hits.hits.1.inner_hits.sub_hits.hits.hits.0._id: "5" } - - match: { hits.hits.1.inner_hits.sub_hits.hits.hits.1._id: "3" } + - match: { hits.hits.1.inner_hits.sub_hits.hits.hits.0._id: "9" } + - match: { hits.hits.1.inner_hits.sub_hits.hits.hits.1._id: "8" } - length: { hits.hits.2.inner_hits.sub_hits.hits.hits: 2 } - match: { hits.hits.2.inner_hits.sub_hits.hits.hits.0._id: "6" } From 2697f857bcceb518baeff8a10fd67b84b65a2e64 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Tue, 15 Oct 2024 13:23:31 -0700 Subject: [PATCH 122/449] Revert "[ML] Dynamically get of num allocations (#114636)" (#114861) This reverts commit 8040fbb0d05401d40ea856f0a4982e8aaab48340. --- docs/changelog/114636.yaml | 5 -- .../inference/InferenceService.java | 4 -- .../TransportGetInferenceModelAction.java | 72 +++++-------------- .../ElasticsearchInternalModel.java | 11 +-- .../ElasticsearchInternalService.java | 46 ++---------- .../ElasticsearchInternalServiceSettings.java | 4 -- .../ElserInternalModelTests.java | 30 -------- 7 files changed, 24 insertions(+), 148 deletions(-) delete mode 100644 docs/changelog/114636.yaml delete mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalModelTests.java diff --git a/docs/changelog/114636.yaml b/docs/changelog/114636.yaml deleted file mode 100644 index c63876fda67f7..0000000000000 --- a/docs/changelog/114636.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 114636 -summary: Dynamically get of num allocations -area: Machine Learning -type: enhancement -issues: [] diff --git a/server/src/main/java/org/elasticsearch/inference/InferenceService.java b/server/src/main/java/org/elasticsearch/inference/InferenceService.java index 2c99563955746..d437533a8603d 100644 --- a/server/src/main/java/org/elasticsearch/inference/InferenceService.java +++ b/server/src/main/java/org/elasticsearch/inference/InferenceService.java @@ -210,8 +210,4 @@ default List defaultConfigIds() { default void defaultConfigs(ActionListener> defaultsListener) { defaultsListener.onResponse(List.of()); } - - default void updateModelsWithDynamicFields(List model, ActionListener> listener) { - listener.onResponse(model); - } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java index 55aad5c55a2ac..5ee1e40869dbc 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java @@ -9,13 +9,13 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.GroupedActionListener; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.Strings; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.inference.InferenceServiceRegistry; -import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.TaskType; import org.elasticsearch.inference.UnparsedModel; import org.elasticsearch.injection.guice.Inject; @@ -29,11 +29,8 @@ import org.elasticsearch.xpack.inference.registry.ModelRegistry; import java.util.ArrayList; -import java.util.Comparator; -import java.util.HashMap; import java.util.List; import java.util.concurrent.Executor; -import java.util.stream.Collectors; public class TransportGetInferenceModelAction extends HandledTransportAction< GetInferenceModelAction.Request, @@ -99,69 +96,38 @@ private void getSingleModel( var model = service.get() .parsePersistedConfig(unparsedModel.inferenceEntityId(), unparsedModel.taskType(), unparsedModel.settings()); - - service.get() - .updateModelsWithDynamicFields( - List.of(model), - delegate.delegateFailureAndWrap( - (l2, updatedModels) -> l2.onResponse( - new GetInferenceModelAction.Response( - updatedModels.stream().map(Model::getConfigurations).collect(Collectors.toList()) - ) - ) - ) - ); + delegate.onResponse(new GetInferenceModelAction.Response(List.of(model.getConfigurations()))); })); } private void getAllModels(ActionListener listener) { - modelRegistry.getAllModels(listener.delegateFailureAndWrap((l, models) -> executor.execute(() -> parseModels(models, listener)))); + modelRegistry.getAllModels( + listener.delegateFailureAndWrap((l, models) -> executor.execute(ActionRunnable.supply(l, () -> parseModels(models)))) + ); } private void getModelsByTaskType(TaskType taskType, ActionListener listener) { modelRegistry.getModelsByTaskType( taskType, - listener.delegateFailureAndWrap((l, models) -> executor.execute(() -> parseModels(models, listener))) + listener.delegateFailureAndWrap((l, models) -> executor.execute(ActionRunnable.supply(l, () -> parseModels(models)))) ); } - private void parseModels(List unparsedModels, ActionListener listener) { - var parsedModelsByService = new HashMap>(); - try { - for (var unparsedModel : unparsedModels) { - var service = serviceRegistry.getService(unparsedModel.service()); - if (service.isEmpty()) { - throw serviceNotFoundException(unparsedModel.service(), unparsedModel.inferenceEntityId()); - } - var list = parsedModelsByService.computeIfAbsent(service.get().name(), s -> new ArrayList<>()); - list.add( - service.get() - .parsePersistedConfig(unparsedModel.inferenceEntityId(), unparsedModel.taskType(), unparsedModel.settings()) - ); - } - - var groupedListener = new GroupedActionListener>( - parsedModelsByService.entrySet().size(), - listener.delegateFailureAndWrap((delegate, listOfListOfModels) -> { - var modifiable = new ArrayList(); - for (var l : listOfListOfModels) { - modifiable.addAll(l); - } - modifiable.sort(Comparator.comparing(Model::getInferenceEntityId)); - delegate.onResponse( - new GetInferenceModelAction.Response(modifiable.stream().map(Model::getConfigurations).collect(Collectors.toList())) - ); - }) - ); + private GetInferenceModelAction.Response parseModels(List unparsedModels) { + var parsedModels = new ArrayList(); - for (var entry : parsedModelsByService.entrySet()) { - serviceRegistry.getService(entry.getKey()) - .get() // must be non-null to get this far - .updateModelsWithDynamicFields(entry.getValue(), groupedListener); + for (var unparsedModel : unparsedModels) { + var service = serviceRegistry.getService(unparsedModel.service()); + if (service.isEmpty()) { + throw serviceNotFoundException(unparsedModel.service(), unparsedModel.inferenceEntityId()); } - } catch (Exception e) { - listener.onFailure(e); + parsedModels.add( + service.get() + .parsePersistedConfig(unparsedModel.inferenceEntityId(), unparsedModel.taskType(), unparsedModel.settings()) + .getConfigurations() + ); } + return new GetInferenceModelAction.Response(parsedModels); } private ElasticsearchStatusException serviceNotFoundException(String service, String inferenceId) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalModel.java index 642f6f144abc0..d38def8dca47f 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalModel.java @@ -21,7 +21,7 @@ public abstract class ElasticsearchInternalModel extends Model { - protected ElasticsearchInternalServiceSettings internalServiceSettings; + protected final ElasticsearchInternalServiceSettings internalServiceSettings; public ElasticsearchInternalModel( String inferenceEntityId, @@ -91,15 +91,6 @@ public ElasticsearchInternalServiceSettings getServiceSettings() { return (ElasticsearchInternalServiceSettings) super.getServiceSettings(); } - public void updateNumAllocation(Integer numAllocations) { - this.internalServiceSettings = new ElasticsearchInternalServiceSettings( - numAllocations, - this.internalServiceSettings.getNumThreads(), - this.internalServiceSettings.modelId(), - this.internalServiceSettings.getAdaptiveAllocationsSettings() - ); - } - @Override public String toString() { return Strings.toString(this.getConfigurations()); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java index 4546280b39fe2..389a9fa369c21 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java @@ -32,7 +32,6 @@ import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import org.elasticsearch.xpack.core.inference.results.RankedDocsResults; import org.elasticsearch.xpack.core.inference.results.SparseEmbeddingResults; -import org.elasticsearch.xpack.core.ml.action.GetDeploymentStatsAction; import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction; import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsStatsAction; import org.elasticsearch.xpack.core.ml.action.InferModelAction; @@ -57,7 +56,6 @@ import java.util.ArrayList; import java.util.Collections; import java.util.EnumSet; -import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; @@ -788,47 +786,11 @@ public List defaultConfigIds() { ); } + /** + * Default configurations that can be out of the box without creating an endpoint first. + * @param defaultsListener Config listener + */ @Override - public void updateModelsWithDynamicFields(List models, ActionListener> listener) { - var modelsByDeploymentIds = new HashMap(); - for (var model : models) { - if (model instanceof ElasticsearchInternalModel esModel) { - modelsByDeploymentIds.put(esModel.internalServiceSettings.deloymentId(), esModel); - } else { - listener.onFailure( - new ElasticsearchStatusException( - "Cannot update model [{}] as it is not an Elasticsearch service model", - RestStatus.INTERNAL_SERVER_ERROR, - model.getInferenceEntityId() - ) - ); - return; - } - } - - if (modelsByDeploymentIds.isEmpty()) { - listener.onResponse(models); - return; - } - - String deploymentIds = String.join(",", modelsByDeploymentIds.keySet()); - client.execute( - GetDeploymentStatsAction.INSTANCE, - new GetDeploymentStatsAction.Request(deploymentIds), - ActionListener.wrap(stats -> { - for (var deploymentStats : stats.getStats().results()) { - var model = modelsByDeploymentIds.get(deploymentStats.getDeploymentId()); - model.updateNumAllocation(deploymentStats.getNumberOfAllocations()); - } - listener.onResponse(new ArrayList<>(modelsByDeploymentIds.values())); - }, e -> { - logger.warn("Get deployment stats failed, cannot update the endpoint's number of allocations", e); - // continue with the original response - listener.onResponse(models); - }) - ); - } - public void defaultConfigs(ActionListener> defaultsListener) { preferredModelVariantFn.accept(defaultsListener.delegateFailureAndWrap((delegate, preferredModelVariant) -> { if (PreferredModelVariant.LINUX_X86_OPTIMIZED.equals(preferredModelVariant)) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettings.java index 5bd8d8cfc5c13..fedf48fb583a3 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettings.java @@ -194,10 +194,6 @@ public String modelId() { return modelId; } - public String deloymentId() { - return modelId; - } - public Integer getNumAllocations() { return numAllocations; } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalModelTests.java deleted file mode 100644 index 74cdab79fe79b..0000000000000 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalModelTests.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference.services.elasticsearch; - -import org.elasticsearch.inference.TaskType; -import org.elasticsearch.test.ESTestCase; - -public class ElserInternalModelTests extends ESTestCase { - public void testUpdateNumAllocation() { - var model = new ElserInternalModel( - "foo", - TaskType.SPARSE_EMBEDDING, - ElasticsearchInternalService.NAME, - new ElserInternalServiceSettings(null, 1, "elser", null), - new ElserMlNodeTaskSettings(), - null - ); - - model.updateNumAllocation(1); - assertEquals(1, model.internalServiceSettings.getNumAllocations().intValue()); - - model.updateNumAllocation(null); - assertNull(model.internalServiceSettings.getNumAllocations()); - } -} From 837c0e8d0edf7d529218faaf89920758204df322 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 16 Oct 2024 08:23:50 +1100 Subject: [PATCH 123/449] Mute org.elasticsearch.license.LicensingTests org.elasticsearch.license.LicensingTests #114865 --- muted-tests.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 86585ebc5daf7..052737e7308a0 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -428,6 +428,8 @@ tests: - class: org.elasticsearch.xpack.enrich.EnrichIT method: testImmutablePolicy issue: https://github.com/elastic/elasticsearch/issues/114839 +- class: org.elasticsearch.license.LicensingTests + issue: https://github.com/elastic/elasticsearch/issues/114865 # Examples: # From 16864e985b7e12304b63a6adb42b169ba2a1b43d Mon Sep 17 00:00:00 2001 From: Nick Tindall Date: Wed, 16 Oct 2024 09:08:49 +1100 Subject: [PATCH 124/449] Retry throttled snapshot deletions (#113237) Closes ES-8562 --- docs/changelog/113237.yaml | 5 + .../snapshot-restore/repository-s3.asciidoc | 14 ++ .../s3/S3BlobStoreRepositoryMetricsTests.java | 118 +++++++++++- .../repositories/s3/S3BlobStore.java | 113 ++++++++--- .../s3/S3RepositoriesMetrics.java | 7 +- .../repositories/s3/S3Repository.java | 28 ++- .../s3/S3BlobContainerRetriesTests.java | 176 +++++++++++++++++- .../elasticsearch/common/BackoffPolicy.java | 89 +++++++++ .../common/BackoffPolicyTests.java | 31 +++ 9 files changed, 540 insertions(+), 41 deletions(-) create mode 100644 docs/changelog/113237.yaml diff --git a/docs/changelog/113237.yaml b/docs/changelog/113237.yaml new file mode 100644 index 0000000000000..45343dbf17114 --- /dev/null +++ b/docs/changelog/113237.yaml @@ -0,0 +1,5 @@ +pr: 113237 +summary: Retry throttled snapshot deletions +area: Snapshot/Restore +type: bug +issues: [] diff --git a/docs/reference/snapshot-restore/repository-s3.asciidoc b/docs/reference/snapshot-restore/repository-s3.asciidoc index 71a9fd8b87c96..b48bb5c4f059a 100644 --- a/docs/reference/snapshot-restore/repository-s3.asciidoc +++ b/docs/reference/snapshot-restore/repository-s3.asciidoc @@ -329,6 +329,20 @@ include::repository-shared-settings.asciidoc[] `1000` which is the maximum number supported by the https://docs.aws.amazon.com/AmazonS3/latest/API/API_ListMultipartUploads.html[AWS ListMultipartUploads API]. If set to `0`, {es} will not attempt to clean up dangling multipart uploads. +`throttled_delete_retry.delay_increment`:: + + (<>) This value is used as the delay before the first retry and the amount the delay is incremented by on each subsequent retry. Default is 50ms, minimum is 0ms. + +`throttled_delete_retry.maximum_delay`:: + + (<>) This is the upper bound on how long the delays between retries will grow to. Default is 5s, minimum is 0ms. + +`throttled_delete_retry.maximum_number_of_retries`:: + + (integer) Sets the number times to retry a throttled snapshot deletion. Defaults to `10`, minimum value is `0` which + will disable retries altogether. Note that if retries are enabled in the Azure client, each of these retries + comprises that many client-level retries. + NOTE: The option of defining client settings in the repository settings as documented below is considered deprecated, and will be removed in a future version. diff --git a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryMetricsTests.java b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryMetricsTests.java index e55668adea101..21f42bf9eb99c 100644 --- a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryMetricsTests.java +++ b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryMetricsTests.java @@ -20,6 +20,7 @@ import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; @@ -31,6 +32,8 @@ import org.elasticsearch.test.ESIntegTestCase; import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; @@ -38,6 +41,7 @@ import java.util.Queue; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; +import java.util.stream.IntStream; import static org.elasticsearch.repositories.RepositoriesMetrics.HTTP_REQUEST_TIME_IN_MILLIS_HISTOGRAM; import static org.elasticsearch.repositories.RepositoriesMetrics.METRIC_EXCEPTIONS_HISTOGRAM; @@ -48,9 +52,11 @@ import static org.elasticsearch.repositories.RepositoriesMetrics.METRIC_THROTTLES_HISTOGRAM; import static org.elasticsearch.repositories.RepositoriesMetrics.METRIC_THROTTLES_TOTAL; import static org.elasticsearch.repositories.RepositoriesMetrics.METRIC_UNSUCCESSFUL_OPERATIONS_TOTAL; +import static org.elasticsearch.repositories.s3.S3RepositoriesMetrics.METRIC_DELETE_RETRIES_HISTOGRAM; import static org.elasticsearch.rest.RestStatus.INTERNAL_SERVER_ERROR; import static org.elasticsearch.rest.RestStatus.NOT_FOUND; import static org.elasticsearch.rest.RestStatus.REQUESTED_RANGE_NOT_SATISFIED; +import static org.elasticsearch.rest.RestStatus.SERVICE_UNAVAILABLE; import static org.elasticsearch.rest.RestStatus.TOO_MANY_REQUESTS; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -61,14 +67,22 @@ @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST) public class S3BlobStoreRepositoryMetricsTests extends S3BlobStoreRepositoryTests { - private final Queue errorStatusQueue = new LinkedBlockingQueue<>(); + private static final S3ErrorResponse S3_SLOW_DOWN_RESPONSE = new S3ErrorResponse(SERVICE_UNAVAILABLE, """ + + + SlowDown + This is a throttling message + /bucket/ + 4442587FB7D0A2F9 + """); + private final Queue errorResponseQueue = new LinkedBlockingQueue<>(); // Always create erroneous handler @Override protected Map createHttpHandlers() { return Collections.singletonMap( "/bucket", - new S3StatsCollectorHttpHandler(new S3MetricErroneousHttpHandler(new S3BlobStoreHttpHandler("bucket"), errorStatusQueue)) + new S3StatsCollectorHttpHandler(new S3MetricErroneousHttpHandler(new S3BlobStoreHttpHandler("bucket"), errorResponseQueue)) ); } @@ -244,8 +258,74 @@ public void testMetricsForRequestRangeNotSatisfied() { } } + public void testRetrySnapshotDeleteMetricsOnEventualSuccess() throws IOException { + final int maxRetries = 5; + final String repositoryName = randomRepositoryName(); + // Disable retries in the client for this repo + createRepository( + repositoryName, + Settings.builder() + .put(repositorySettings(repositoryName)) + .put(S3ClientSettings.MAX_RETRIES_SETTING.getConcreteSettingForNamespace("placeholder").getKey(), 0) + .put(S3Repository.RETRY_THROTTLED_DELETE_DELAY_INCREMENT.getKey(), TimeValue.timeValueMillis(10)) + .put(S3Repository.RETRY_THROTTLED_DELETE_MAX_NUMBER_OF_RETRIES.getKey(), maxRetries) + .build(), + false + ); + final String dataNodeName = internalCluster().getNodeNameThat(DiscoveryNode::canContainData); + final BlobContainer blobContainer = getBlobContainer(dataNodeName, repositoryName); + final TestTelemetryPlugin plugin = getPlugin(dataNodeName); + final int numberOfDeletes = randomIntBetween(1, 3); + final List numberOfRetriesPerAttempt = new ArrayList<>(); + for (int i = 0; i < numberOfDeletes; i++) { + int numFailures = randomIntBetween(1, maxRetries); + numberOfRetriesPerAttempt.add((long) numFailures); + IntStream.range(0, numFailures).forEach(ignored -> addErrorStatus(S3_SLOW_DOWN_RESPONSE)); + blobContainer.deleteBlobsIgnoringIfNotExists( + randomFrom(OperationPurpose.SNAPSHOT_DATA, OperationPurpose.SNAPSHOT_METADATA), + List.of(randomIdentifier()).iterator() + ); + } + List longHistogramMeasurement = plugin.getLongHistogramMeasurement(METRIC_DELETE_RETRIES_HISTOGRAM); + assertThat(longHistogramMeasurement.stream().map(Measurement::getLong).toList(), equalTo(numberOfRetriesPerAttempt)); + } + + public void testRetrySnapshotDeleteMetricsWhenRetriesExhausted() { + final String repositoryName = randomRepositoryName(); + // Disable retries in the client for this repo + int maxRetries = 3; + createRepository( + repositoryName, + Settings.builder() + .put(repositorySettings(repositoryName)) + .put(S3ClientSettings.MAX_RETRIES_SETTING.getConcreteSettingForNamespace("placeholder").getKey(), 0) + .put(S3Repository.RETRY_THROTTLED_DELETE_DELAY_INCREMENT.getKey(), TimeValue.timeValueMillis(10)) + .put(S3Repository.RETRY_THROTTLED_DELETE_MAX_NUMBER_OF_RETRIES.getKey(), maxRetries) + .build(), + false + ); + final String dataNodeName = internalCluster().getNodeNameThat(DiscoveryNode::canContainData); + final BlobContainer blobContainer = getBlobContainer(dataNodeName, repositoryName); + final TestTelemetryPlugin plugin = getPlugin(dataNodeName); + // Keep throttling past the max number of retries + IntStream.range(0, maxRetries + 1).forEach(ignored -> addErrorStatus(S3_SLOW_DOWN_RESPONSE)); + assertThrows( + IOException.class, + () -> blobContainer.deleteBlobsIgnoringIfNotExists( + randomFrom(OperationPurpose.SNAPSHOT_DATA, OperationPurpose.SNAPSHOT_METADATA), + List.of(randomIdentifier()).iterator() + ) + ); + List longHistogramMeasurement = plugin.getLongHistogramMeasurement(METRIC_DELETE_RETRIES_HISTOGRAM); + assertThat(longHistogramMeasurement.get(0).getLong(), equalTo(3L)); + } + private void addErrorStatus(RestStatus... statuses) { - errorStatusQueue.addAll(Arrays.asList(statuses)); + errorResponseQueue.addAll(Arrays.stream(statuses).map(S3ErrorResponse::new).toList()); + } + + private void addErrorStatus(S3ErrorResponse... responses) { + errorResponseQueue.addAll(Arrays.asList(responses)); } private long getLongCounterValue(TestTelemetryPlugin plugin, String instrumentName, Operation operation) { @@ -275,25 +355,25 @@ private long getLongHistogramValue(TestTelemetryPlugin plugin, String instrument private static class S3MetricErroneousHttpHandler implements DelegatingHttpHandler { private final HttpHandler delegate; - private final Queue errorStatusQueue; + private final Queue errorResponseQueue; - S3MetricErroneousHttpHandler(HttpHandler delegate, Queue errorStatusQueue) { + S3MetricErroneousHttpHandler(HttpHandler delegate, Queue errorResponseQueue) { this.delegate = delegate; - this.errorStatusQueue = errorStatusQueue; + this.errorResponseQueue = errorResponseQueue; } @Override public void handle(HttpExchange exchange) throws IOException { - final RestStatus status = errorStatusQueue.poll(); - if (status == null) { + final S3ErrorResponse errorResponse = errorResponseQueue.poll(); + if (errorResponse == null) { delegate.handle(exchange); - } else if (status == INTERNAL_SERVER_ERROR) { + } else if (errorResponse.status == INTERNAL_SERVER_ERROR) { // Simulate an retryable exception throw new IOException("ouch"); } else { try (exchange) { drainInputStream(exchange.getRequestBody()); - exchange.sendResponseHeaders(status.getStatus(), -1); + errorResponse.writeResponse(exchange); } } } @@ -302,4 +382,22 @@ public HttpHandler getDelegate() { return delegate; } } + + record S3ErrorResponse(RestStatus status, String responseBody) { + + S3ErrorResponse(RestStatus status) { + this(status, null); + } + + @SuppressForbidden(reason = "this test uses a HttpServer to emulate an S3 endpoint") + public void writeResponse(HttpExchange exchange) throws IOException { + if (responseBody != null) { + byte[] responseBytes = responseBody.getBytes(StandardCharsets.UTF_8); + exchange.sendResponseHeaders(status.getStatus(), responseBytes.length); + exchange.getResponseBody().write(responseBytes); + } else { + exchange.sendResponseHeaders(status.getStatus(), -1); + } + } + } } diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java index 3e6b7c356cb11..e2efc926f7e3a 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java @@ -14,6 +14,7 @@ import com.amazonaws.Request; import com.amazonaws.Response; import com.amazonaws.metrics.RequestMetricCollector; +import com.amazonaws.retry.RetryUtils; import com.amazonaws.services.s3.model.CannedAccessControlList; import com.amazonaws.services.s3.model.DeleteObjectsRequest; import com.amazonaws.services.s3.model.MultiObjectDeleteException; @@ -25,6 +26,7 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.cluster.metadata.RepositoryMetadata; +import org.elasticsearch.common.BackoffPolicy; import org.elasticsearch.common.Strings; import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; @@ -91,6 +93,7 @@ class S3BlobStore implements BlobStore { private final StatsCollectors statsCollectors = new StatsCollectors(); private final int bulkDeletionBatchSize; + private final BackoffPolicy retryThrottledDeleteBackoffPolicy; S3BlobStore( S3Service service, @@ -102,7 +105,8 @@ class S3BlobStore implements BlobStore { RepositoryMetadata repositoryMetadata, BigArrays bigArrays, ThreadPool threadPool, - S3RepositoriesMetrics s3RepositoriesMetrics + S3RepositoriesMetrics s3RepositoriesMetrics, + BackoffPolicy retryThrottledDeleteBackoffPolicy ) { this.service = service; this.bigArrays = bigArrays; @@ -116,7 +120,7 @@ class S3BlobStore implements BlobStore { this.snapshotExecutor = threadPool.executor(ThreadPool.Names.SNAPSHOT); this.s3RepositoriesMetrics = s3RepositoriesMetrics; this.bulkDeletionBatchSize = S3Repository.DELETION_BATCH_SIZE_SETTING.get(repositoryMetadata.settings()); - + this.retryThrottledDeleteBackoffPolicy = retryThrottledDeleteBackoffPolicy; } RequestMetricCollector getMetricCollector(Operation operation, OperationPurpose purpose) { @@ -255,7 +259,8 @@ private boolean assertConsistencyBetweenHttpRequestAndOperation(Request reque private static long getCountForMetric(TimingInfo info, AWSRequestMetrics.Field field) { var count = info.getCounter(field.name()); if (count == null) { - if (field == AWSRequestMetrics.Field.RequestCount) { + // This can be null if the thread was interrupted + if (field == AWSRequestMetrics.Field.RequestCount && Thread.currentThread().isInterrupted() == false) { final String message = "Expected request count to be tracked but found not count."; assert false : message; logger.warn(message); @@ -331,18 +336,18 @@ public void deleteBlobsIgnoringIfNotExists(OperationPurpose purpose, Iterator partition = new ArrayList<>(); - try (AmazonS3Reference clientReference = clientReference()) { + try { // S3 API only allows 1k blobs per delete so we split up the given blobs into requests of max. 1k deletes final AtomicReference aex = new AtomicReference<>(); blobNames.forEachRemaining(key -> { partition.add(key); if (partition.size() == bulkDeletionBatchSize) { - deletePartition(purpose, clientReference, partition, aex); + deletePartition(purpose, partition, aex); partition.clear(); } }); if (partition.isEmpty() == false) { - deletePartition(purpose, clientReference, partition, aex); + deletePartition(purpose, partition, aex); } if (aex.get() != null) { throw aex.get(); @@ -352,30 +357,84 @@ public void deleteBlobsIgnoringIfNotExists(OperationPurpose purpose, Iterator partition, - AtomicReference aex - ) { - try { - SocketAccess.doPrivilegedVoid(() -> clientReference.client().deleteObjects(bulkDelete(purpose, this, partition))); - } catch (MultiObjectDeleteException e) { - // We are sending quiet mode requests so we can't use the deleted keys entry on the exception and instead - // first remove all keys that were sent in the request and then add back those that ran into an exception. + /** + * Delete one partition of a batch of blobs + * + * @param purpose The {@link OperationPurpose} of the deletion + * @param partition The list of blobs to delete + * @param aex A holder for any exception(s) thrown during the deletion + */ + private void deletePartition(OperationPurpose purpose, List partition, AtomicReference aex) { + final Iterator retries = retryThrottledDeleteBackoffPolicy.iterator(); + int retryCounter = 0; + while (true) { + try (AmazonS3Reference clientReference = clientReference()) { + SocketAccess.doPrivilegedVoid(() -> clientReference.client().deleteObjects(bulkDelete(purpose, this, partition))); + s3RepositoriesMetrics.retryDeletesHistogram().record(retryCounter); + return; + } catch (MultiObjectDeleteException e) { + // We are sending quiet mode requests so we can't use the deleted keys entry on the exception and instead + // first remove all keys that were sent in the request and then add back those that ran into an exception. + logger.warn( + () -> format( + "Failed to delete some blobs %s", + e.getErrors() + .stream() + .map(err -> "[" + err.getKey() + "][" + err.getCode() + "][" + err.getMessage() + "]") + .toList() + ), + e + ); + aex.set(ExceptionsHelper.useOrSuppress(aex.get(), e)); + return; + } catch (AmazonClientException e) { + if (shouldRetryDelete(purpose) && RetryUtils.isThrottlingException(e)) { + // S3 is asking us to slow down. Pause for a bit and retry + if (maybeDelayAndRetryDelete(retries)) { + retryCounter++; + } else { + s3RepositoriesMetrics.retryDeletesHistogram().record(retryCounter); + aex.set(ExceptionsHelper.useOrSuppress(aex.get(), e)); + return; + } + } else { + // The AWS client threw any unexpected exception and did not execute the request at all so we do not + // remove any keys from the outstanding deletes set. + aex.set(ExceptionsHelper.useOrSuppress(aex.get(), e)); + return; + } + } + } + } + + /** + * If there are remaining retries, pause for the configured interval then return true + * + * @param retries The retries iterator + * @return true to try the deletion again, false otherwise + */ + private boolean maybeDelayAndRetryDelete(Iterator retries) { + if (retries.hasNext()) { + try { + Thread.sleep(retries.next().millis()); + return true; + } catch (InterruptedException iex) { + Thread.currentThread().interrupt(); + // If we're interrupted, record the exception and abort retries + logger.warn("Aborting tenacious snapshot delete retries due to interrupt"); + } + } else { logger.warn( - () -> format( - "Failed to delete some blobs %s", - e.getErrors().stream().map(err -> "[" + err.getKey() + "][" + err.getCode() + "][" + err.getMessage() + "]").toList() - ), - e + "Exceeded maximum tenacious snapshot delete retries, aborting. Using back-off policy " + + retryThrottledDeleteBackoffPolicy + + ", see the throttled_delete_retry.* S3 repository properties to configure the back-off parameters" ); - aex.set(ExceptionsHelper.useOrSuppress(aex.get(), e)); - } catch (AmazonClientException e) { - // The AWS client threw any unexpected exception and did not execute the request at all so we do not - // remove any keys from the outstanding deletes set. - aex.set(ExceptionsHelper.useOrSuppress(aex.get(), e)); } + return false; + } + + private boolean shouldRetryDelete(OperationPurpose operationPurpose) { + return operationPurpose == OperationPurpose.SNAPSHOT_DATA || operationPurpose == OperationPurpose.SNAPSHOT_METADATA; } private static DeleteObjectsRequest bulkDelete(OperationPurpose purpose, S3BlobStore blobStore, List blobs) { diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RepositoriesMetrics.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RepositoriesMetrics.java index 74682ca190a0c..03106c26c9a29 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RepositoriesMetrics.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RepositoriesMetrics.java @@ -17,7 +17,8 @@ public record S3RepositoriesMetrics( RepositoriesMetrics common, LongCounter retryStartedCounter, LongCounter retryCompletedCounter, - LongHistogram retryHistogram + LongHistogram retryHistogram, + LongHistogram retryDeletesHistogram ) { public static S3RepositoriesMetrics NOOP = new S3RepositoriesMetrics(RepositoriesMetrics.NOOP); @@ -25,6 +26,7 @@ public record S3RepositoriesMetrics( public static final String METRIC_RETRY_EVENT_TOTAL = "es.repositories.s3.input_stream.retry.event.total"; public static final String METRIC_RETRY_SUCCESS_TOTAL = "es.repositories.s3.input_stream.retry.success.total"; public static final String METRIC_RETRY_ATTEMPTS_HISTOGRAM = "es.repositories.s3.input_stream.retry.attempts.histogram"; + public static final String METRIC_DELETE_RETRIES_HISTOGRAM = "es.repositories.s3.delete.retry.attempts.histogram"; public S3RepositoriesMetrics(RepositoriesMetrics common) { this( @@ -32,7 +34,8 @@ public S3RepositoriesMetrics(RepositoriesMetrics common) { common.meterRegistry().registerLongCounter(METRIC_RETRY_EVENT_TOTAL, "s3 input stream retry event count", "unit"), common.meterRegistry().registerLongCounter(METRIC_RETRY_SUCCESS_TOTAL, "s3 input stream retry success count", "unit"), common.meterRegistry() - .registerLongHistogram(METRIC_RETRY_ATTEMPTS_HISTOGRAM, "s3 input stream retry attempts histogram", "unit") + .registerLongHistogram(METRIC_RETRY_ATTEMPTS_HISTOGRAM, "s3 input stream retry attempts histogram", "unit"), + common.meterRegistry().registerLongHistogram(METRIC_DELETE_RETRIES_HISTOGRAM, "s3 delete retry attempts histogram", "unit") ); } } diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java index af385eeac6a5b..0750f6ab59d57 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java @@ -16,6 +16,7 @@ import org.elasticsearch.action.support.RefCountingRunnable; import org.elasticsearch.cluster.metadata.RepositoryMetadata; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.BackoffPolicy; import org.elasticsearch.common.ReferenceDocs; import org.elasticsearch.common.Strings; import org.elasticsearch.common.blobstore.BlobPath; @@ -202,6 +203,26 @@ class S3Repository extends MeteredBlobStoreRepository { Setting.Property.Dynamic ); + /** + * We will retry deletes that fail due to throttling. We use an {@link BackoffPolicy#linearBackoff(TimeValue, int, TimeValue)} + * with the following parameters + */ + static final Setting RETRY_THROTTLED_DELETE_DELAY_INCREMENT = Setting.timeSetting( + "throttled_delete_retry.delay_increment", + TimeValue.timeValueMillis(50), + TimeValue.ZERO + ); + static final Setting RETRY_THROTTLED_DELETE_MAXIMUM_DELAY = Setting.timeSetting( + "throttled_delete_retry.maximum_delay", + TimeValue.timeValueSeconds(5), + TimeValue.ZERO + ); + static final Setting RETRY_THROTTLED_DELETE_MAX_NUMBER_OF_RETRIES = Setting.intSetting( + "throttled_delete_retry.maximum_number_of_retries", + 10, + 0 + ); + private final S3Service service; private final String bucket; @@ -424,7 +445,12 @@ protected S3BlobStore createBlobStore() { metadata, bigArrays, threadPool, - s3RepositoriesMetrics + s3RepositoriesMetrics, + BackoffPolicy.linearBackoff( + RETRY_THROTTLED_DELETE_DELAY_INCREMENT.get(metadata.settings()), + RETRY_THROTTLED_DELETE_MAX_NUMBER_OF_RETRIES.get(metadata.settings()), + RETRY_THROTTLED_DELETE_MAXIMUM_DELAY.get(metadata.settings()) + ) ); } diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java index 1443ff704efd1..76d980c222a96 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java @@ -10,10 +10,12 @@ import fixture.s3.S3HttpHandler; +import com.amazonaws.AbortedException; import com.amazonaws.DnsResolver; import com.amazonaws.SdkClientException; import com.amazonaws.services.s3.AmazonS3ClientBuilder; import com.amazonaws.services.s3.internal.MD5DigestCalculatingInputStream; +import com.amazonaws.services.s3.model.AmazonS3Exception; import com.amazonaws.util.Base16; import com.sun.net.httpserver.HttpExchange; import com.sun.net.httpserver.HttpHandler; @@ -21,6 +23,8 @@ import org.apache.http.HttpStatus; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.cluster.metadata.RepositoryMetadata; +import org.elasticsearch.common.BackoffPolicy; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.OperationPurpose; @@ -62,15 +66,18 @@ import java.net.UnknownHostException; import java.nio.charset.StandardCharsets; import java.nio.file.NoSuchFileException; +import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Objects; import java.util.OptionalInt; +import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; +import java.util.function.IntConsumer; import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomNonDataPurpose; import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomPurpose; @@ -98,6 +105,7 @@ @SuppressForbidden(reason = "use a http server") public class S3BlobContainerRetriesTests extends AbstractBlobContainerRetriesTestCase { + private static final int MAX_NUMBER_SNAPSHOT_DELETE_RETRIES = 10; private S3Service service; private AtomicBoolean shouldErrorOnDns; private RecordingMeterRegistry recordingMeterRegistry; @@ -196,7 +204,8 @@ protected BlobContainer createBlobContainer( repositoryMetadata, BigArrays.NON_RECYCLING_INSTANCE, new DeterministicTaskQueue().getThreadPool(), - new S3RepositoriesMetrics(new RepositoriesMetrics(recordingMeterRegistry)) + new S3RepositoriesMetrics(new RepositoriesMetrics(recordingMeterRegistry)), + BackoffPolicy.constantBackoff(TimeValue.timeValueMillis(1), MAX_NUMBER_SNAPSHOT_DELETE_RETRIES) ); return new S3BlobContainer(randomBoolean() ? BlobPath.EMPTY : BlobPath.EMPTY.add("foo"), s3BlobStore) { @Override @@ -771,6 +780,171 @@ public void handle(HttpExchange exchange) throws IOException { assertThat(getRetryHistogramMeasurements(), empty()); } + public void testSnapshotDeletesRetryOnThrottlingError() throws IOException { + // disable AWS-client retries + final BlobContainer blobContainer = createBlobContainer(0, null, true, null); + + int numBlobsToDelete = randomIntBetween(500, 3000); + List blobsToDelete = new ArrayList<>(); + for (int i = 0; i < numBlobsToDelete; i++) { + blobsToDelete.add(randomIdentifier()); + } + int throttleTimesBeforeSuccess = randomIntBetween(1, MAX_NUMBER_SNAPSHOT_DELETE_RETRIES); + logger.info("--> Throttling {} times before success", throttleTimesBeforeSuccess); + ThrottlingDeleteHandler handler = new ThrottlingDeleteHandler(throttleTimesBeforeSuccess, attempt -> {}); + httpServer.createContext("/", handler); + blobContainer.deleteBlobsIgnoringIfNotExists(randomFrom(operationPurposesThatRetryOnDelete()), blobsToDelete.iterator()); + + int expectedNumberOfBatches = expectedNumberOfBatches(numBlobsToDelete); + assertThat(handler.numberOfDeleteAttempts.get(), equalTo(throttleTimesBeforeSuccess + expectedNumberOfBatches)); + assertThat(handler.numberOfSuccessfulDeletes.get(), equalTo(expectedNumberOfBatches)); + } + + public void testSnapshotDeletesAbortRetriesWhenThreadIsInterrupted() { + // disable AWS-client retries + final BlobContainer blobContainer = createBlobContainer(0, null, true, null); + + int numBlobsToDelete = randomIntBetween(500, 3000); + List blobsToDelete = new ArrayList<>(); + for (int i = 0; i < numBlobsToDelete; i++) { + blobsToDelete.add(randomIdentifier()); + } + + final Thread clientThread = Thread.currentThread(); + int interruptBeforeAttempt = randomIntBetween(0, randomIntBetween(1, 10)); + logger.info("--> Deleting {} blobs, interrupting before attempt {}", numBlobsToDelete, interruptBeforeAttempt); + ThrottlingDeleteHandler handler = new ThrottlingDeleteHandler(Integer.MAX_VALUE, attempt -> { + if (attempt == interruptBeforeAttempt) { + clientThread.interrupt(); + } + }); + httpServer.createContext("/", handler); + + try { + IOException exception = assertThrows( + IOException.class, + () -> blobContainer.deleteBlobsIgnoringIfNotExists( + randomFrom(operationPurposesThatRetryOnDelete()), + blobsToDelete.iterator() + ) + ); + assertThat(exception.getCause(), instanceOf(AbortedException.class)); + assertThat(handler.numberOfDeleteAttempts.get(), equalTo(interruptBeforeAttempt + 1)); + assertThat(handler.numberOfSuccessfulDeletes.get(), equalTo(0)); + } finally { + // interrupt should be preserved, clear it to prevent it leaking between tests + assertTrue(Thread.interrupted()); + } + } + + public void testNonSnapshotDeletesAreNotRetried() { + // disable AWS-client retries + final BlobContainer blobContainer = createBlobContainer(0, null, true, null); + + int numBlobsToDelete = randomIntBetween(500, 3000); + List blobsToDelete = new ArrayList<>(); + for (int i = 0; i < numBlobsToDelete; i++) { + blobsToDelete.add(randomIdentifier()); + } + ThrottlingDeleteHandler handler = new ThrottlingDeleteHandler(Integer.MAX_VALUE, attempt -> {}); + httpServer.createContext("/", handler); + IOException exception = assertThrows( + IOException.class, + () -> blobContainer.deleteBlobsIgnoringIfNotExists( + randomValueOtherThanMany( + op -> operationPurposesThatRetryOnDelete().contains(op), + () -> randomFrom(OperationPurpose.values()) + ), + blobsToDelete.iterator() + ) + ); + assertEquals( + ThrottlingDeleteHandler.THROTTLING_ERROR_CODE, + asInstanceOf(AmazonS3Exception.class, exception.getCause()).getErrorCode() + ); + assertThat(handler.numberOfDeleteAttempts.get(), equalTo(expectedNumberOfBatches(numBlobsToDelete))); + assertThat(handler.numberOfSuccessfulDeletes.get(), equalTo(0)); + } + + public void testNonThrottlingErrorsAreNotRetried() { + // disable AWS-client retries + final BlobContainer blobContainer = createBlobContainer(0, null, true, null); + + int numBlobsToDelete = randomIntBetween(500, 3000); + List blobsToDelete = new ArrayList<>(); + for (int i = 0; i < numBlobsToDelete; i++) { + blobsToDelete.add(randomIdentifier()); + } + ThrottlingDeleteHandler handler = new ThrottlingDeleteHandler(Integer.MAX_VALUE, attempt -> {}, "NotThrottling"); + httpServer.createContext("/", handler); + assertThrows( + IOException.class, + () -> blobContainer.deleteBlobsIgnoringIfNotExists(randomFrom(operationPurposesThatRetryOnDelete()), blobsToDelete.iterator()) + ); + assertThat(handler.numberOfDeleteAttempts.get(), equalTo(expectedNumberOfBatches(numBlobsToDelete))); + assertThat(handler.numberOfSuccessfulDeletes.get(), equalTo(0)); + } + + private int expectedNumberOfBatches(int blobsToDelete) { + return (blobsToDelete / 1_000) + (blobsToDelete % 1_000 == 0 ? 0 : 1); + } + + @SuppressForbidden(reason = "use a http server") + private class ThrottlingDeleteHandler extends S3HttpHandler { + + private static final String THROTTLING_ERROR_CODE = "SlowDown"; + + private final AtomicInteger throttleTimesBeforeSuccess; + private final AtomicInteger numberOfDeleteAttempts; + private final AtomicInteger numberOfSuccessfulDeletes; + private final IntConsumer onAttemptCallback; + private final String errorCode; + + ThrottlingDeleteHandler(int throttleTimesBeforeSuccess, IntConsumer onAttemptCallback) { + this(throttleTimesBeforeSuccess, onAttemptCallback, THROTTLING_ERROR_CODE); + } + + ThrottlingDeleteHandler(int throttleTimesBeforeSuccess, IntConsumer onAttemptCallback, String errorCode) { + super("bucket"); + this.numberOfDeleteAttempts = new AtomicInteger(); + this.numberOfSuccessfulDeletes = new AtomicInteger(); + this.throttleTimesBeforeSuccess = new AtomicInteger(throttleTimesBeforeSuccess); + this.onAttemptCallback = onAttemptCallback; + this.errorCode = errorCode; + } + + @Override + public void handle(HttpExchange exchange) throws IOException { + if (exchange.getRequestMethod().equals("POST") && exchange.getRequestURI().toString().startsWith("/bucket/?delete")) { + onAttemptCallback.accept(numberOfDeleteAttempts.get()); + numberOfDeleteAttempts.incrementAndGet(); + if (throttleTimesBeforeSuccess.getAndDecrement() > 0) { + final byte[] responseBytes = Strings.format(""" + + + %s + This is a throttling message + /bucket/ + 4442587FB7D0A2F9 + """, errorCode).getBytes(StandardCharsets.UTF_8); + + exchange.sendResponseHeaders(HttpStatus.SC_SERVICE_UNAVAILABLE, responseBytes.length); + exchange.getResponseBody().write(responseBytes); + exchange.close(); + } else { + numberOfSuccessfulDeletes.incrementAndGet(); + super.handle(exchange); + } + } else { + super.handle(exchange); + } + } + } + + private Set operationPurposesThatRetryOnDelete() { + return Set.of(OperationPurpose.SNAPSHOT_DATA, OperationPurpose.SNAPSHOT_METADATA); + } + @Override protected Matcher getMaxRetriesMatcher(int maxRetries) { // some attempts make meaningful progress and do not count towards the max retry limit diff --git a/server/src/main/java/org/elasticsearch/common/BackoffPolicy.java b/server/src/main/java/org/elasticsearch/common/BackoffPolicy.java index 27d98f9ade203..cacad64ab1d4f 100644 --- a/server/src/main/java/org/elasticsearch/common/BackoffPolicy.java +++ b/server/src/main/java/org/elasticsearch/common/BackoffPolicy.java @@ -8,6 +8,7 @@ */ package org.elasticsearch.common; +import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import java.util.Collections; @@ -81,6 +82,18 @@ public static BackoffPolicy exponentialBackoff(TimeValue initialDelay, int maxNu return new ExponentialBackoff((int) checkDelay(initialDelay).millis(), maxNumberOfRetries); } + /** + * Creates a new linear backoff policy with the provided configuration + * + * @param delayIncrement The amount by which to increment the delay on each retry + * @param maxNumberOfRetries The maximum number of retries + * @param maximumDelay The maximum delay + * @return A backoff policy with linear increase in wait time for retries. + */ + public static BackoffPolicy linearBackoff(TimeValue delayIncrement, int maxNumberOfRetries, TimeValue maximumDelay) { + return new LinearBackoff(delayIncrement, maxNumberOfRetries, maximumDelay); + } + /** * Wraps the backoff policy in one that calls a method every time a new backoff is taken from the policy. */ @@ -100,6 +113,11 @@ private static class NoBackoff extends BackoffPolicy { public Iterator iterator() { return Collections.emptyIterator(); } + + @Override + public String toString() { + return "NoBackoff"; + } } private static class ExponentialBackoff extends BackoffPolicy { @@ -118,6 +136,11 @@ private ExponentialBackoff(int start, int numberOfElements) { public Iterator iterator() { return new ExponentialBackoffIterator(start, numberOfElements); } + + @Override + public String toString() { + return "ExponentialBackoff{start=" + start + ", numberOfElements=" + numberOfElements + '}'; + } } private static class ExponentialBackoffIterator implements Iterator { @@ -163,6 +186,11 @@ private static final class ConstantBackoff extends BackoffPolicy { public Iterator iterator() { return new ConstantBackoffIterator(delay, numberOfElements); } + + @Override + public String toString() { + return "ConstantBackoff{delay=" + delay + ", numberOfElements=" + numberOfElements + '}'; + } } private static final class ConstantBackoffIterator implements Iterator { @@ -203,6 +231,11 @@ private static final class WrappedBackoffPolicy extends BackoffPolicy { public Iterator iterator() { return new WrappedBackoffIterator(delegate.iterator(), onBackoff); } + + @Override + public String toString() { + return "WrappedBackoffPolicy{delegate=" + delegate + ", onBackoff=" + onBackoff + '}'; + } } private static final class WrappedBackoffIterator implements Iterator { @@ -228,4 +261,60 @@ public TimeValue next() { return delegate.next(); } } + + private static final class LinearBackoff extends BackoffPolicy { + + private final TimeValue delayIncrement; + private final int maxNumberOfRetries; + private final TimeValue maximumDelay; + + private LinearBackoff(TimeValue delayIncrement, int maxNumberOfRetries, @Nullable TimeValue maximumDelay) { + this.delayIncrement = delayIncrement; + this.maxNumberOfRetries = maxNumberOfRetries; + this.maximumDelay = maximumDelay; + } + + @Override + public Iterator iterator() { + return new LinearBackoffIterator(delayIncrement, maxNumberOfRetries, maximumDelay); + } + + @Override + public String toString() { + return "LinearBackoff{" + + "delayIncrement=" + + delayIncrement + + ", maxNumberOfRetries=" + + maxNumberOfRetries + + ", maximumDelay=" + + maximumDelay + + '}'; + } + } + + private static final class LinearBackoffIterator implements Iterator { + + private final TimeValue delayIncrement; + private final int maxNumberOfRetries; + private final TimeValue maximumDelay; + private int curr; + + private LinearBackoffIterator(TimeValue delayIncrement, int maxNumberOfRetries, @Nullable TimeValue maximumDelay) { + this.delayIncrement = delayIncrement; + this.maxNumberOfRetries = maxNumberOfRetries; + this.maximumDelay = maximumDelay; + } + + @Override + public boolean hasNext() { + return curr < maxNumberOfRetries; + } + + @Override + public TimeValue next() { + curr++; + TimeValue timeValue = TimeValue.timeValueMillis(curr * delayIncrement.millis()); + return maximumDelay == null ? timeValue : timeValue.compareTo(maximumDelay) < 0 ? timeValue : maximumDelay; + } + } } diff --git a/server/src/test/java/org/elasticsearch/common/BackoffPolicyTests.java b/server/src/test/java/org/elasticsearch/common/BackoffPolicyTests.java index 0cbbcdc0f1674..9ffd05a5336d6 100644 --- a/server/src/test/java/org/elasticsearch/common/BackoffPolicyTests.java +++ b/server/src/test/java/org/elasticsearch/common/BackoffPolicyTests.java @@ -77,6 +77,37 @@ public void testExponentialBackOff() { } } + public void testLinearBackoffWithLimit() { + long incrementMillis = randomIntBetween(10, 500); + long limitMillis = randomIntBetween(1000, 5000); + int maxNumberOfRetries = randomIntBetween(0, 30); + BackoffPolicy timeValues = BackoffPolicy.linearBackoff( + timeValueMillis(incrementMillis), + maxNumberOfRetries, + timeValueMillis(limitMillis) + ); + int counter = 0; + for (TimeValue timeValue : timeValues) { + counter++; + long unlimitedValue = counter * incrementMillis; + long expectedValue = Math.min(unlimitedValue, limitMillis); + assertEquals(timeValueMillis(expectedValue), timeValue); + } + assertEquals(counter, maxNumberOfRetries); + } + + public void testLinearBackoffWithoutLimit() { + long incrementMillis = randomIntBetween(10, 500); + int maxNumberOfRetries = randomIntBetween(0, 30); + BackoffPolicy timeValues = BackoffPolicy.linearBackoff(timeValueMillis(incrementMillis), maxNumberOfRetries, null); + int counter = 0; + for (TimeValue timeValue : timeValues) { + counter++; + assertEquals(timeValueMillis(counter * incrementMillis), timeValue); + } + assertEquals(counter, maxNumberOfRetries); + } + public void testNoBackoff() { BackoffPolicy noBackoff = BackoffPolicy.noBackoff(); int numberOfBackoffsToPerform = randomIntBetween(1, 3); From c401a71426dd5758cae167eef71b90421cdd99ab Mon Sep 17 00:00:00 2001 From: Oleksandr Kolomiiets Date: Tue, 15 Oct 2024 15:24:39 -0700 Subject: [PATCH 125/449] Make mapping a distinct concept in logsdb data generation (#114370) --- .../logsdb/qa/DataGenerationHelper.java | 87 ++++---- ...ardVersusLogsIndexModeChallengeRestIT.java | 2 +- ...ogsIndexModeRandomDataChallengeRestIT.java | 9 +- .../logsdb/datageneration/DataGenerator.java | 75 ------- .../DataGeneratorSpecification.java | 2 +- .../datageneration/DocumentGenerator.java | 98 +++++++++ .../datageneration/FieldDataGenerator.java | 16 +- .../logsdb/datageneration/FieldType.java | 58 +++--- .../logsdb/datageneration/Mapping.java | 19 ++ .../datageneration/MappingGenerator.java | 195 ++++++++++++++++++ .../logsdb/datageneration/Template.java | 27 +++ .../datageneration/TemplateGenerator.java | 76 +++++++ .../datasource/DataSourceHandler.java | 4 + .../datasource/DataSourceRequest.java | 8 +- .../datasource/DataSourceResponse.java | 4 +- .../DefaultObjectGenerationHandler.java | 33 ++- .../logsdb/datageneration/fields/Context.java | 175 ---------------- .../GenericSubObjectFieldDataGenerator.java | 146 ------------- .../fields/NestedFieldDataGenerator.java | 68 ------ .../fields/ObjectFieldDataGenerator.java | 66 ------ .../fields/PredefinedField.java | 30 +-- .../TopLevelObjectFieldDataGenerator.java | 101 --------- .../fields/leaf/ByteFieldDataGenerator.java | 30 +-- .../fields/leaf/DoubleFieldDataGenerator.java | 30 +-- .../fields/leaf/FloatFieldDataGenerator.java | 30 +-- .../leaf/HalfFloatFieldDataGenerator.java | 30 +-- .../leaf/IntegerFieldDataGenerator.java | 30 +-- .../leaf/KeywordFieldDataGenerator.java | 30 +-- .../fields/leaf/LongFieldDataGenerator.java | 30 +-- .../leaf/ScaledFloatFieldDataGenerator.java | 30 +-- .../fields/leaf/ShortFieldDataGenerator.java | 30 +-- .../leaf/UnsignedLongFieldDataGenerator.java | 30 +-- ....java => DataGenerationSnapshotTests.java} | 167 ++++++++------- ...torTests.java => DataGenerationTests.java} | 76 +++---- .../TemplateGeneratorTests.java | 43 ++++ 35 files changed, 728 insertions(+), 1157 deletions(-) delete mode 100644 test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/DataGenerator.java create mode 100644 test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/DocumentGenerator.java create mode 100644 test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/Mapping.java create mode 100644 test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/MappingGenerator.java create mode 100644 test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/Template.java create mode 100644 test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/TemplateGenerator.java delete mode 100644 test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/Context.java delete mode 100644 test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/GenericSubObjectFieldDataGenerator.java delete mode 100644 test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/NestedFieldDataGenerator.java delete mode 100644 test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/ObjectFieldDataGenerator.java delete mode 100644 test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/TopLevelObjectFieldDataGenerator.java rename test/framework/src/test/java/org/elasticsearch/logsdb/datageneration/{DataGeneratorSnapshotTests.java => DataGenerationSnapshotTests.java} (65%) rename test/framework/src/test/java/org/elasticsearch/logsdb/datageneration/{DataGeneratorTests.java => DataGenerationTests.java} (69%) create mode 100644 test/framework/src/test/java/org/elasticsearch/logsdb/datageneration/TemplateGeneratorTests.java diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/DataGenerationHelper.java b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/DataGenerationHelper.java index 8b29b1609711f..d07e29c6b6b31 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/DataGenerationHelper.java +++ b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/DataGenerationHelper.java @@ -10,23 +10,30 @@ package org.elasticsearch.datastreams.logsdb.qa; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.index.mapper.Mapper; -import org.elasticsearch.logsdb.datageneration.DataGenerator; import org.elasticsearch.logsdb.datageneration.DataGeneratorSpecification; -import org.elasticsearch.logsdb.datageneration.FieldDataGenerator; +import org.elasticsearch.logsdb.datageneration.DocumentGenerator; +import org.elasticsearch.logsdb.datageneration.FieldType; +import org.elasticsearch.logsdb.datageneration.Mapping; +import org.elasticsearch.logsdb.datageneration.MappingGenerator; +import org.elasticsearch.logsdb.datageneration.Template; +import org.elasticsearch.logsdb.datageneration.TemplateGenerator; import org.elasticsearch.logsdb.datageneration.fields.PredefinedField; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; import java.util.List; +import java.util.Map; import java.util.function.Consumer; public class DataGenerationHelper { private final boolean keepArraySource; - private final DataGenerator dataGenerator; + private final DocumentGenerator documentGenerator; + + private final Template template; + private final Mapping mapping; public DataGenerationHelper() { this(b -> {}); @@ -40,62 +47,47 @@ public DataGenerationHelper(Consumer builder .withPredefinedFields( List.of( // Customized because it always needs doc_values for aggregations. - new PredefinedField.WithGenerator("host.name", new FieldDataGenerator() { - @Override - public CheckedConsumer mappingWriter() { - return b -> b.startObject().field("type", "keyword").endObject(); - } - - @Override - public CheckedConsumer fieldValueGenerator() { - return b -> b.value(ESTestCase.randomAlphaOfLength(5)); - } - }), + new PredefinedField.WithGenerator( + "host.name", + FieldType.KEYWORD, + Map.of("type", "keyword"), + () -> ESTestCase.randomAlphaOfLength(5) + ), // Needed for terms query - new PredefinedField.WithGenerator("method", new FieldDataGenerator() { - @Override - public CheckedConsumer mappingWriter() { - return b -> b.startObject().field("type", "keyword").endObject(); - } - - @Override - public CheckedConsumer fieldValueGenerator() { - return b -> b.value(ESTestCase.randomFrom("put", "post", "get")); - } - }), + new PredefinedField.WithGenerator( + "method", + FieldType.KEYWORD, + Map.of("type", "keyword"), + () -> ESTestCase.randomFrom("put", "post", "get") + ), // Needed for histogram aggregation - new PredefinedField.WithGenerator("memory_usage_bytes", new FieldDataGenerator() { - @Override - public CheckedConsumer mappingWriter() { - return b -> b.startObject().field("type", "long").endObject(); - } - - @Override - public CheckedConsumer fieldValueGenerator() { - // We can generate this using standard long field but we would get "too many buckets" - return b -> b.value(ESTestCase.randomLongBetween(1000, 2000)); - } - }) + new PredefinedField.WithGenerator( + "memory_usage_bytes", + FieldType.LONG, + Map.of("type", "long"), + () -> ESTestCase.randomLongBetween(1000, 2000) + ) ) ); // Customize builder if necessary builderConfigurator.accept(specificationBuilder); - this.dataGenerator = new DataGenerator(specificationBuilder.build()); - } + var specification = specificationBuilder.build(); + + this.documentGenerator = new DocumentGenerator(specification); - DataGenerator getDataGenerator() { - return dataGenerator; + this.template = new TemplateGenerator(specification).generate(); + this.mapping = new MappingGenerator(specification).generate(template); } void logsDbMapping(XContentBuilder builder) throws IOException { - dataGenerator.writeMapping(builder); + builder.map(mapping.raw()); } void standardMapping(XContentBuilder builder) throws IOException { - dataGenerator.writeMapping(builder); + builder.map(mapping.raw()); } void logsDbSettings(Settings.Builder builder) { @@ -103,4 +95,11 @@ void logsDbSettings(Settings.Builder builder) { builder.put(Mapper.SYNTHETIC_SOURCE_KEEP_INDEX_SETTING.getKey(), "arrays"); } } + + void generateDocument(XContentBuilder document, Map additionalFields) throws IOException { + var generated = documentGenerator.generate(template, mapping); + generated.putAll(additionalFields); + + document.map(generated); + } } diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/StandardVersusLogsIndexModeChallengeRestIT.java b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/StandardVersusLogsIndexModeChallengeRestIT.java index 43efdbdcf8b1c..4c896e1f262b2 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/StandardVersusLogsIndexModeChallengeRestIT.java +++ b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/StandardVersusLogsIndexModeChallengeRestIT.java @@ -53,7 +53,7 @@ * This test uses simple mapping and document structure in order to allow easier debugging of the test itself. */ public class StandardVersusLogsIndexModeChallengeRestIT extends AbstractChallengeRestTest { - private final int numShards = randomBoolean() ? randomIntBetween(2, 5) : 0; + private final int numShards = randomBoolean() ? randomIntBetween(2, 4) : 0; private final int numReplicas = randomBoolean() ? randomIntBetween(1, 3) : 0; private final boolean fullyDynamicMapping = randomBoolean(); diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/StandardVersusLogsIndexModeRandomDataChallengeRestIT.java b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/StandardVersusLogsIndexModeRandomDataChallengeRestIT.java index 751336cc1f646..6a20626634499 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/StandardVersusLogsIndexModeRandomDataChallengeRestIT.java +++ b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/StandardVersusLogsIndexModeRandomDataChallengeRestIT.java @@ -17,6 +17,7 @@ import java.io.IOException; import java.time.Instant; +import java.util.Map; /** * Challenge test (see {@link StandardVersusLogsIndexModeChallengeRestIT}) that uses randomly generated @@ -53,10 +54,10 @@ public void contenderSettings(Settings.Builder builder) { @Override protected XContentBuilder generateDocument(final Instant timestamp) throws IOException { var document = XContentFactory.jsonBuilder(); - dataGenerationHelper.getDataGenerator().generateDocument(document, doc -> { - doc.field("@timestamp", DateFormatter.forPattern(FormatNames.STRICT_DATE_OPTIONAL_TIME.getName()).format(timestamp)); - }); - + dataGenerationHelper.generateDocument( + document, + Map.of("@timestamp", DateFormatter.forPattern(FormatNames.STRICT_DATE_OPTIONAL_TIME.getName()).format(timestamp)) + ); return document; } } diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/DataGenerator.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/DataGenerator.java deleted file mode 100644 index cda571c0ef4b5..0000000000000 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/DataGenerator.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.logsdb.datageneration; - -import org.elasticsearch.core.CheckedConsumer; -import org.elasticsearch.logsdb.datageneration.fields.TopLevelObjectFieldDataGenerator; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Map; - -/** - * Entry point of data generation logic. - * Every instance of generator generates a random mapping and a document generation routine - * that produces randomly generated documents valid for this mapping. - */ -public class DataGenerator { - private final TopLevelObjectFieldDataGenerator topLevelGenerator; - - public DataGenerator(DataGeneratorSpecification specification) { - this.topLevelGenerator = new TopLevelObjectFieldDataGenerator(specification); - } - - /** - * Writes a fully built mapping document (enclosed in a top-level object) to a provided builder. - * @param mapping destination - * @throws IOException - */ - public void writeMapping(XContentBuilder mapping) throws IOException { - mapping.startObject().field("_doc"); - topLevelGenerator.mappingWriter(Map.of()).accept(mapping); - mapping.endObject(); - } - - /** - * Writes a fully built mapping document (enclosed in a top-level object) to a provided builder. - * Allows customizing parameters of top level object mapper. - * @param mapping destination - * @param customMappingParameters writer of custom mapping parameters of top level object mapping - * @throws IOException - */ - public void writeMapping(XContentBuilder mapping, Map customMappingParameters) throws IOException { - mapping.startObject().field("_doc"); - topLevelGenerator.mappingWriter(customMappingParameters).accept(mapping); - mapping.endObject(); - } - - /** - * Generates a document and writes it to a provided builder. New document is generated every time. - * @param document - * @throws IOException - */ - public void generateDocument(XContentBuilder document) throws IOException { - topLevelGenerator.fieldValueGenerator(b -> {}).accept(document); - } - - /** - * Generates a document and writes it to a provided builder. New document is generated every time. - * Supports appending custom content to generated document (e.g. a custom generated field). - * @param document - * @param customDocumentModifications - * @throws IOException - */ - public void generateDocument(XContentBuilder document, CheckedConsumer customDocumentModifications) - throws IOException { - topLevelGenerator.fieldValueGenerator(customDocumentModifications).accept(document); - } -} diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/DataGeneratorSpecification.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/DataGeneratorSpecification.java index 97eb8b91a6e64..f6ff7c521afcc 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/DataGeneratorSpecification.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/DataGeneratorSpecification.java @@ -18,7 +18,7 @@ import java.util.List; /** - * Allows configuring behavior of {@link DataGenerator}. + * Allows configuring behavior of data generation components. * @param dataSource source of generated data * @param maxFieldCountPerLevel maximum number of fields that an individual object in mapping has. * Applies to subobjects. diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/DocumentGenerator.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/DocumentGenerator.java new file mode 100644 index 0000000000000..9b2878ff7bfc8 --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/DocumentGenerator.java @@ -0,0 +1,98 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.logsdb.datageneration; + +import org.elasticsearch.logsdb.datageneration.datasource.DataSourceRequest; +import org.elasticsearch.logsdb.datageneration.datasource.DataSourceResponse; + +import java.util.ArrayList; +import java.util.Map; +import java.util.Optional; +import java.util.TreeMap; + +/** + * Generator that generates a valid random document that follows the structure of provided {@link Template}. + */ +public class DocumentGenerator { + private final DataGeneratorSpecification specification; + + private final DataSourceResponse.ObjectArrayGenerator objectArrayGenerator; + + public DocumentGenerator(DataGeneratorSpecification specification) { + this.specification = specification; + + this.objectArrayGenerator = specification.dataSource().get(new DataSourceRequest.ObjectArrayGenerator()); + } + + /** + * Generates a valid random document following the provided template. + * @param template template for the document + * @param mapping generated mapping that will be applied to the destination index of this document + * @return document as a map where subobjects are represented as nested maps + */ + public Map generate(Template template, Mapping mapping) { + var documentMap = new TreeMap(); + for (var predefinedField : specification.predefinedFields()) { + documentMap.put(predefinedField.name(), predefinedField.generator(specification.dataSource()).generateValue()); + } + + generateFields(documentMap, template.template(), new Context("", mapping.lookup())); + return documentMap; + } + + private void generateFields(Map document, Map template, Context context) { + for (var entry : template.entrySet()) { + String fieldName = entry.getKey(); + Template.Entry templateEntry = entry.getValue(); + + if (templateEntry instanceof Template.Leaf leaf) { + // Unsigned long does not play well when dynamically mapped because + // it gets mapped as just long and large values fail to index. + // Just skip it. + if (leaf.type() == FieldType.UNSIGNED_LONG && context.mappingLookup().get(context.pathTo(fieldName)) == null) { + continue; + } + + var generator = leaf.type().generator(fieldName, specification.dataSource()); + + document.put(fieldName, generator.generateValue()); + } else if (templateEntry instanceof Template.Object object) { + Optional arrayLength = objectArrayGenerator.lengthGenerator().get(); + + if (arrayLength.isPresent()) { + var children = new ArrayList<>(arrayLength.get()); + document.put(object.name(), children); + + for (int i = 0; i < arrayLength.get(); i++) { + children.add(generateObject(object, context)); + } + } else { + document.put(object.name(), generateObject(object, context)); + } + } + } + } + + private Map generateObject(Template.Object object, Context context) { + var children = new TreeMap(); + generateFields(children, object.children(), context.stepIntoObject(object.name())); + return children; + } + + record Context(String path, Map> mappingLookup) { + Context stepIntoObject(String name) { + return new Context(pathTo(name), mappingLookup); + } + + String pathTo(String leafFieldName) { + return path.isEmpty() ? leafFieldName : path + "." + leafFieldName; + } + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/FieldDataGenerator.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/FieldDataGenerator.java index 3d7a96f55cbce..7e28a0a0fab25 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/FieldDataGenerator.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/FieldDataGenerator.java @@ -9,21 +9,11 @@ package org.elasticsearch.logsdb.datageneration; -import org.elasticsearch.core.CheckedConsumer; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; - /** - * Entity responsible for generating a valid randomized mapping for a field - * and a generator of field values valid for this mapping. + * Entity responsible for generating a valid value for a field. * - * Generator is expected to produce the same mapping per instance of generator. - * Function returned by {@link FieldDataGenerator#fieldValueGenerator() } is expected - * to produce a randomized value each time. + * Generator is expected to produce a different value on every call. */ public interface FieldDataGenerator { - CheckedConsumer mappingWriter(); - - CheckedConsumer fieldValueGenerator(); + Object generateValue(); } diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/FieldType.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/FieldType.java index 4304a8c77e60b..96b75f29382e2 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/FieldType.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/FieldType.java @@ -10,7 +10,6 @@ package org.elasticsearch.logsdb.datageneration; import org.elasticsearch.logsdb.datageneration.datasource.DataSource; -import org.elasticsearch.logsdb.datageneration.datasource.DataSourceResponse; import org.elasticsearch.logsdb.datageneration.fields.leaf.ByteFieldDataGenerator; import org.elasticsearch.logsdb.datageneration.fields.leaf.DoubleFieldDataGenerator; import org.elasticsearch.logsdb.datageneration.fields.leaf.FloatFieldDataGenerator; @@ -26,33 +25,40 @@ * Lists all leaf field types that are supported for data generation. */ public enum FieldType { - KEYWORD, - LONG, - UNSIGNED_LONG, - INTEGER, - SHORT, - BYTE, - DOUBLE, - FLOAT, - HALF_FLOAT, - SCALED_FLOAT; + KEYWORD("keyword"), + LONG("long"), + UNSIGNED_LONG("unsigned_long"), + INTEGER("integer"), + SHORT("short"), + BYTE("byte"), + DOUBLE("double"), + FLOAT("float"), + HALF_FLOAT("half_float"), + SCALED_FLOAT("scaled_float"); - public FieldDataGenerator generator( - String fieldName, - DataSource dataSource, - DataSourceResponse.LeafMappingParametersGenerator mappingParametersGenerator - ) { + private final String name; + + FieldType(String name) { + this.name = name; + } + + public FieldDataGenerator generator(String fieldName, DataSource dataSource) { return switch (this) { - case KEYWORD -> new KeywordFieldDataGenerator(fieldName, dataSource, mappingParametersGenerator); - case LONG -> new LongFieldDataGenerator(fieldName, dataSource, mappingParametersGenerator); - case UNSIGNED_LONG -> new UnsignedLongFieldDataGenerator(fieldName, dataSource, mappingParametersGenerator); - case INTEGER -> new IntegerFieldDataGenerator(fieldName, dataSource, mappingParametersGenerator); - case SHORT -> new ShortFieldDataGenerator(fieldName, dataSource, mappingParametersGenerator); - case BYTE -> new ByteFieldDataGenerator(fieldName, dataSource, mappingParametersGenerator); - case DOUBLE -> new DoubleFieldDataGenerator(fieldName, dataSource, mappingParametersGenerator); - case FLOAT -> new FloatFieldDataGenerator(fieldName, dataSource, mappingParametersGenerator); - case HALF_FLOAT -> new HalfFloatFieldDataGenerator(fieldName, dataSource, mappingParametersGenerator); - case SCALED_FLOAT -> new ScaledFloatFieldDataGenerator(fieldName, dataSource, mappingParametersGenerator); + case KEYWORD -> new KeywordFieldDataGenerator(fieldName, dataSource); + case LONG -> new LongFieldDataGenerator(fieldName, dataSource); + case UNSIGNED_LONG -> new UnsignedLongFieldDataGenerator(fieldName, dataSource); + case INTEGER -> new IntegerFieldDataGenerator(fieldName, dataSource); + case SHORT -> new ShortFieldDataGenerator(fieldName, dataSource); + case BYTE -> new ByteFieldDataGenerator(fieldName, dataSource); + case DOUBLE -> new DoubleFieldDataGenerator(fieldName, dataSource); + case FLOAT -> new FloatFieldDataGenerator(fieldName, dataSource); + case HALF_FLOAT -> new HalfFloatFieldDataGenerator(fieldName, dataSource); + case SCALED_FLOAT -> new ScaledFloatFieldDataGenerator(fieldName, dataSource); }; } + + @Override + public String toString() { + return name; + } } diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/Mapping.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/Mapping.java new file mode 100644 index 0000000000000..c20c99d7cb0ba --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/Mapping.java @@ -0,0 +1,19 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.logsdb.datageneration; + +import java.util.Map; + +/** + * Contains generated mapping and supporting data. + * @param raw mapping represented as a possibly nested map (maps represent (sub-)objects) + * @param lookup supporting data structure that represent mapping in a flat form (full path to field -> mapping parameters) + */ +public record Mapping(Map raw, Map> lookup) {} diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/MappingGenerator.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/MappingGenerator.java new file mode 100644 index 0000000000000..bdb80ee09868f --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/MappingGenerator.java @@ -0,0 +1,195 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.logsdb.datageneration; + +import org.elasticsearch.index.mapper.ObjectMapper; +import org.elasticsearch.logsdb.datageneration.datasource.DataSourceRequest; +import org.elasticsearch.logsdb.datageneration.datasource.DataSourceResponse; +import org.elasticsearch.logsdb.datageneration.fields.DynamicMapping; + +import java.util.HashSet; +import java.util.Map; +import java.util.Set; +import java.util.TreeMap; + +/** + * Generator that generates a valid random mapping that follows the structure of provided {@link Template}. + * Mapping will contain all fields from the template with generated mapping parameters. + */ +public class MappingGenerator { + private final DataGeneratorSpecification specification; + + private final DataSourceResponse.DynamicMappingGenerator dynamicMappingGenerator; + + public MappingGenerator(DataGeneratorSpecification specification) { + this.specification = specification; + + this.dynamicMappingGenerator = specification.dataSource().get(new DataSourceRequest.DynamicMappingGenerator()); + } + + /** + * Generates a valid random mapping following the provided template. + * @param template template for the mapping + * @return {@link Mapping} + */ + public Mapping generate(Template template) { + var lookup = new TreeMap>(); + + // Top level mapping parameters + var mappingParametersGenerator = specification.dataSource() + .get(new DataSourceRequest.ObjectMappingParametersGenerator(true, false, ObjectMapper.Subobjects.ENABLED)) + .mappingGenerator(); + + var topLevelMappingParameters = mappingParametersGenerator.get(); + // Top-level object can't be disabled because @timestamp is a required field in data streams. + topLevelMappingParameters.remove("enabled"); + + var rawMapping = new TreeMap(); + + var childrenMapping = new TreeMap(); + for (var predefinedField : specification.predefinedFields()) { + if (predefinedField.mapping() != null) { + childrenMapping.put(predefinedField.name(), predefinedField.mapping()); + lookup.put(predefinedField.name(), predefinedField.mapping()); + } + } + topLevelMappingParameters.put("properties", childrenMapping); + + rawMapping.put("_doc", topLevelMappingParameters); + + if (specification.fullyDynamicMapping()) { + // Has to be "true" for fully dynamic mapping + topLevelMappingParameters.remove("dynamic"); + + return new Mapping(rawMapping, lookup); + } + + var dynamicMapping = topLevelMappingParameters.getOrDefault("dynamic", "true").equals("strict") + ? DynamicMapping.FORBIDDEN + : DynamicMapping.SUPPORTED; + var subobjects = ObjectMapper.Subobjects.from(topLevelMappingParameters.getOrDefault("subobjects", "true")); + + generateMapping(childrenMapping, lookup, template.template(), new Context(new HashSet<>(), "", subobjects, dynamicMapping)); + + return new Mapping(rawMapping, lookup); + } + + private void generateMapping( + Map mapping, + Map> lookup, + Map template, + Context context + ) { + for (var entry : template.entrySet()) { + String fieldName = entry.getKey(); + Template.Entry templateEntry = entry.getValue(); + + var mappingParameters = new TreeMap(); + + boolean isDynamicallyMapped = isDynamicallyMapped(templateEntry, context); + // Simply skip this field if it is dynamic. + // Lookup will contain null signaling dynamic mapping as well. + if (isDynamicallyMapped) { + continue; + } + + if (templateEntry instanceof Template.Leaf leaf) { + // For simplicity we only copy to keyword fields, synthetic source logic to handle copy_to is generic. + if (leaf.type() == FieldType.KEYWORD) { + context.addCopyToCandidate(fieldName); + } + + var mappingParametersGenerator = specification.dataSource() + .get( + new DataSourceRequest.LeafMappingParametersGenerator( + fieldName, + leaf.type(), + context.eligibleCopyToDestinations(), + context.parentDynamicMapping() + ) + ) + .mappingGenerator(); + + mappingParameters.put("type", leaf.type().toString()); + mappingParameters.putAll(mappingParametersGenerator.get()); + + } else if (templateEntry instanceof Template.Object object) { + var mappingParametersGenerator = specification.dataSource() + .get(new DataSourceRequest.ObjectMappingParametersGenerator(false, object.nested(), context.parentSubobjects())) + .mappingGenerator(); + + mappingParameters.put("type", object.nested() ? "nested" : "object"); + mappingParameters.putAll(mappingParametersGenerator.get()); + + var childrenMapping = new TreeMap(); + mappingParameters.put("properties", childrenMapping); + generateMapping( + childrenMapping, + lookup, + object.children(), + context.stepIntoObject(object.name(), object.nested(), mappingParameters) + ); + } + + mapping.put(fieldName, mappingParameters); + lookup.put(context.pathTo(fieldName), Map.copyOf(mappingParameters)); + } + } + + private boolean isDynamicallyMapped(Template.Entry templateEntry, Context context) { + return context.parentDynamicMapping != DynamicMapping.FORBIDDEN + && dynamicMappingGenerator.generator().apply(templateEntry instanceof Template.Object); + } + + record Context( + Set eligibleCopyToDestinations, + String path, + ObjectMapper.Subobjects parentSubobjects, + DynamicMapping parentDynamicMapping + ) { + Context stepIntoObject(String name, boolean nested, Map mappingParameters) { + var subobjects = determineSubobjects(mappingParameters); + var dynamicMapping = determineDynamicMapping(mappingParameters); + + // copy_to can't be used across nested documents so all currently eligible fields are not eligible inside nested document. + return new Context(nested ? new HashSet<>() : eligibleCopyToDestinations, pathTo(name), subobjects, dynamicMapping); + } + + void addCopyToCandidate(String leafFieldName) { + eligibleCopyToDestinations.add(pathTo(leafFieldName)); + } + + String pathTo(String leafFieldName) { + return path.isEmpty() ? leafFieldName : path + "." + leafFieldName; + } + + private DynamicMapping determineDynamicMapping(Map mappingParameters) { + if (parentDynamicMapping == DynamicMapping.FORCED) { + return DynamicMapping.FORCED; + } + + var dynamicParameter = mappingParameters.get("dynamic"); + // Inherited from parent + if (dynamicParameter == null) { + return parentDynamicMapping; + } + + return dynamicParameter.equals("strict") ? DynamicMapping.FORBIDDEN : DynamicMapping.SUPPORTED; + } + + private ObjectMapper.Subobjects determineSubobjects(Map mappingParameters) { + if (parentSubobjects == ObjectMapper.Subobjects.DISABLED) { + return ObjectMapper.Subobjects.DISABLED; + } + + return ObjectMapper.Subobjects.from(mappingParameters.getOrDefault("subobjects", "true")); + } + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/Template.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/Template.java new file mode 100644 index 0000000000000..9bb2d34958a27 --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/Template.java @@ -0,0 +1,27 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.logsdb.datageneration; + +import java.util.Map; + +/** + * A template used to generate mapping and documents for a test. + * Template encodes object structure, names of objects/fields and type of leaf fields. + * Having such a template allow to create interchangeable random mappings with different parameters + * but the same structure in order to f.e. test introduction of a new parameter. + * @param template actual template data + */ +public record Template(Map template) { + sealed interface Entry permits Leaf, Object {} + + record Leaf(String name, FieldType type) implements Entry {} + + record Object(String name, boolean nested, Map children) implements Entry {} +} diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/TemplateGenerator.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/TemplateGenerator.java new file mode 100644 index 0000000000000..f0a3a866a2673 --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/TemplateGenerator.java @@ -0,0 +1,76 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.logsdb.datageneration; + +import org.elasticsearch.logsdb.datageneration.datasource.DataSourceRequest; +import org.elasticsearch.logsdb.datageneration.datasource.DataSourceResponse; + +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.Supplier; + +public class TemplateGenerator { + private final DataGeneratorSpecification specification; + private final DataSourceResponse.ChildFieldGenerator childFieldGenerator; + private final Supplier fieldTypeGenerator; + + public TemplateGenerator(DataGeneratorSpecification specification) { + this.specification = specification; + this.childFieldGenerator = specification.dataSource().get(new DataSourceRequest.ChildFieldGenerator(specification)); + this.fieldTypeGenerator = specification.dataSource().get(new DataSourceRequest.FieldTypeGenerator()).generator(); + } + + public Template generate() { + var map = new HashMap(); + + generateChildFields(map, 0, new AtomicInteger(0)); + return new Template(map); + } + + private void generateChildFields(Map mapping, int depth, AtomicInteger nestedFieldsCount) { + var existingFieldNames = new HashSet(); + // no child fields is legal + var childFieldsCount = childFieldGenerator.generateChildFieldCount(); + + for (int i = 0; i < childFieldsCount; i++) { + var fieldName = generateFieldName(existingFieldNames); + + if (depth < specification.maxObjectDepth() && childFieldGenerator.generateRegularSubObject()) { + var children = new HashMap(); + mapping.put(fieldName, new Template.Object(fieldName, false, children)); + generateChildFields(children, depth + 1, nestedFieldsCount); + } else if (depth <= specification.maxObjectDepth() + && nestedFieldsCount.get() < specification.nestedFieldsLimit() + && childFieldGenerator.generateNestedSubObject()) { + nestedFieldsCount.incrementAndGet(); + + var children = new HashMap(); + mapping.put(fieldName, new Template.Object(fieldName, true, children)); + generateChildFields(children, depth + 1, nestedFieldsCount); + } else { + var fieldTypeInfo = fieldTypeGenerator.get(); + mapping.put(fieldName, new Template.Leaf(fieldName, fieldTypeInfo.fieldType())); + } + } + } + + private String generateFieldName(Set existingFields) { + var fieldName = childFieldGenerator.generateFieldName(); + while (existingFields.contains(fieldName)) { + fieldName = childFieldGenerator.generateFieldName(); + } + existingFields.add(fieldName); + + return fieldName; + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DataSourceHandler.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DataSourceHandler.java index 9a9c610e091e2..df28282fca407 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DataSourceHandler.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DataSourceHandler.java @@ -73,4 +73,8 @@ default DataSourceResponse.LeafMappingParametersGenerator handle(DataSourceReque default DataSourceResponse.ObjectMappingParametersGenerator handle(DataSourceRequest.ObjectMappingParametersGenerator request) { return null; } + + default DataSourceResponse.DynamicMappingGenerator handle(DataSourceRequest.DynamicMappingGenerator request) { + return null; + } } diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DataSourceRequest.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DataSourceRequest.java index 8dee5876aa207..d77925f097b5a 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DataSourceRequest.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DataSourceRequest.java @@ -93,7 +93,7 @@ public DataSourceResponse.ChildFieldGenerator accept(DataSourceHandler handler) } } - record FieldTypeGenerator(DynamicMapping dynamicMapping) implements DataSourceRequest { + record FieldTypeGenerator() implements DataSourceRequest { public DataSourceResponse.FieldTypeGenerator accept(DataSourceHandler handler) { return handler.handle(this); } @@ -123,4 +123,10 @@ public DataSourceResponse.ObjectMappingParametersGenerator accept(DataSourceHand return handler.handle(this); } } + + record DynamicMappingGenerator() implements DataSourceRequest { + public DataSourceResponse.DynamicMappingGenerator accept(DataSourceHandler handler) { + return handler.handle(this); + } + } } diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DataSourceResponse.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DataSourceResponse.java index a7653c10bbf43..fa8f56b3e071b 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DataSourceResponse.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DataSourceResponse.java @@ -52,7 +52,7 @@ interface ChildFieldGenerator extends DataSourceResponse { } record FieldTypeGenerator(Supplier generator) implements DataSourceResponse { - public record FieldTypeInfo(FieldType fieldType, boolean dynamic) {} + public record FieldTypeInfo(FieldType fieldType) {} } record ObjectArrayGenerator(Supplier> lengthGenerator) implements DataSourceResponse {} @@ -60,4 +60,6 @@ record ObjectArrayGenerator(Supplier> lengthGenerator) impleme record LeafMappingParametersGenerator(Supplier> mappingGenerator) implements DataSourceResponse {} record ObjectMappingParametersGenerator(Supplier> mappingGenerator) implements DataSourceResponse {} + + record DynamicMappingGenerator(Function generator) implements DataSourceResponse {} } diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultObjectGenerationHandler.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultObjectGenerationHandler.java index 62c4ef87e8a15..56ec676e53d55 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultObjectGenerationHandler.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultObjectGenerationHandler.java @@ -14,7 +14,6 @@ import java.util.Optional; import java.util.Set; -import java.util.function.Supplier; import static org.elasticsearch.test.ESTestCase.randomAlphaOfLengthBetween; import static org.elasticsearch.test.ESTestCase.randomDouble; @@ -26,24 +25,25 @@ public DataSourceResponse.ChildFieldGenerator handle(DataSourceRequest.ChildFiel return new DataSourceResponse.ChildFieldGenerator() { @Override public int generateChildFieldCount() { + // no child fields is legal return ESTestCase.randomIntBetween(0, request.specification().maxFieldCountPerLevel()); } @Override public boolean generateDynamicSubObject() { - // Using a static 5% change, this is just a chosen value that can be tweaked. + // Using a static 5% chance, this is just a chosen value that can be tweaked. return randomDouble() <= 0.05; } @Override public boolean generateNestedSubObject() { - // Using a static 5% change, this is just a chosen value that can be tweaked. + // Using a static 5% chance, this is just a chosen value that can be tweaked. return randomDouble() <= 0.05; } @Override public boolean generateRegularSubObject() { - // Using a static 5% change, this is just a chosen value that can be tweaked. + // Using a static 5% chance, this is just a chosen value that can be tweaked. return randomDouble() <= 0.05; } @@ -60,21 +60,10 @@ public String generateFieldName() { @Override public DataSourceResponse.FieldTypeGenerator handle(DataSourceRequest.FieldTypeGenerator request) { - Supplier generator = switch (request.dynamicMapping()) { - case FORBIDDEN -> () -> generateFieldTypeInfo(false); - case FORCED -> () -> generateFieldTypeInfo(true); - case SUPPORTED -> () -> generateFieldTypeInfo(ESTestCase.randomBoolean()); - }; - - return new DataSourceResponse.FieldTypeGenerator(generator); - } - - private static DataSourceResponse.FieldTypeGenerator.FieldTypeInfo generateFieldTypeInfo(boolean isDynamic) { - var excluded = isDynamic ? EXCLUDED_FROM_DYNAMIC_MAPPING : Set.of(); - var fieldType = ESTestCase.randomValueOtherThanMany(excluded::contains, () -> ESTestCase.randomFrom(FieldType.values())); - - return new DataSourceResponse.FieldTypeGenerator.FieldTypeInfo(fieldType, isDynamic); + return new DataSourceResponse.FieldTypeGenerator( + () -> new DataSourceResponse.FieldTypeGenerator.FieldTypeInfo(ESTestCase.randomFrom(FieldType.values())) + ); } @Override @@ -87,4 +76,12 @@ public DataSourceResponse.ObjectArrayGenerator handle(DataSourceRequest.ObjectAr return Optional.empty(); }); } + + @Override + public DataSourceResponse.DynamicMappingGenerator handle(DataSourceRequest.DynamicMappingGenerator request) { + // Using a static 5% chance for objects, this is just a chosen value that can be tweaked. + return new DataSourceResponse.DynamicMappingGenerator( + isObject -> isObject ? ESTestCase.randomDouble() <= 0.05 : ESTestCase.randomBoolean() + ); + } } diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/Context.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/Context.java deleted file mode 100644 index c1ec15a3479b3..0000000000000 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/Context.java +++ /dev/null @@ -1,175 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.logsdb.datageneration.fields; - -import org.elasticsearch.index.mapper.ObjectMapper; -import org.elasticsearch.logsdb.datageneration.DataGeneratorSpecification; -import org.elasticsearch.logsdb.datageneration.datasource.DataSourceRequest; -import org.elasticsearch.logsdb.datageneration.datasource.DataSourceResponse; - -import java.util.HashSet; -import java.util.Map; -import java.util.Optional; -import java.util.Set; -import java.util.concurrent.atomic.AtomicInteger; - -class Context { - private final DataGeneratorSpecification specification; - - private final DataSourceResponse.ChildFieldGenerator childFieldGenerator; - private final DataSourceResponse.ObjectArrayGenerator objectArrayGenerator; - - private final String path; - private final int objectDepth; - // We don't need atomicity, but we need to pass counter by reference to accumulate total value from sub-objects. - private final AtomicInteger nestedFieldsCount; - private final Set eligibleCopyToDestinations; - private final DynamicMapping parentDynamicMapping; - private final ObjectMapper.Subobjects currentSubobjectsConfig; - - Context( - DataGeneratorSpecification specification, - DynamicMapping parentDynamicMapping, - ObjectMapper.Subobjects currentSubobjectsConfig - ) { - this(specification, "", 0, new AtomicInteger(0), new HashSet<>(), parentDynamicMapping, currentSubobjectsConfig); - } - - private Context( - DataGeneratorSpecification specification, - String path, - int objectDepth, - AtomicInteger nestedFieldsCount, - Set eligibleCopyToDestinations, - DynamicMapping parentDynamicMapping, - ObjectMapper.Subobjects currentSubobjectsConfig - ) { - this.specification = specification; - this.childFieldGenerator = specification.dataSource().get(new DataSourceRequest.ChildFieldGenerator(specification)); - this.objectArrayGenerator = specification.dataSource().get(new DataSourceRequest.ObjectArrayGenerator()); - this.path = path; - this.objectDepth = objectDepth; - this.nestedFieldsCount = nestedFieldsCount; - this.eligibleCopyToDestinations = eligibleCopyToDestinations; - this.parentDynamicMapping = parentDynamicMapping; - this.currentSubobjectsConfig = currentSubobjectsConfig; - } - - public DataGeneratorSpecification specification() { - return specification; - } - - public DataSourceResponse.ChildFieldGenerator childFieldGenerator() { - return childFieldGenerator; - } - - public DataSourceResponse.FieldTypeGenerator fieldTypeGenerator(DynamicMapping dynamicMapping) { - return specification.dataSource().get(new DataSourceRequest.FieldTypeGenerator(dynamicMapping)); - } - - public Context subObject(String name, DynamicMapping dynamicMapping, ObjectMapper.Subobjects subobjects) { - return new Context( - specification, - pathToField(name), - objectDepth + 1, - nestedFieldsCount, - eligibleCopyToDestinations, - dynamicMapping, - subobjects - ); - } - - public Context nestedObject(String name, DynamicMapping dynamicMapping, ObjectMapper.Subobjects subobjects) { - nestedFieldsCount.incrementAndGet(); - // copy_to can't be used across nested documents so all currently eligible fields are not eligible inside nested document. - return new Context( - specification, - pathToField(name), - objectDepth + 1, - nestedFieldsCount, - new HashSet<>(), - dynamicMapping, - subobjects - ); - } - - public boolean shouldAddDynamicObjectField(DynamicMapping dynamicMapping) { - if (objectDepth >= specification.maxObjectDepth() || dynamicMapping == DynamicMapping.FORBIDDEN) { - return false; - } - - return childFieldGenerator.generateDynamicSubObject(); - } - - public boolean shouldAddObjectField() { - if (objectDepth >= specification.maxObjectDepth() || parentDynamicMapping == DynamicMapping.FORCED) { - return false; - } - - return childFieldGenerator.generateRegularSubObject(); - } - - public boolean shouldAddNestedField(ObjectMapper.Subobjects subobjects) { - if (objectDepth >= specification.maxObjectDepth() - || nestedFieldsCount.get() >= specification.nestedFieldsLimit() - || parentDynamicMapping == DynamicMapping.FORCED - || subobjects == ObjectMapper.Subobjects.DISABLED) { - return false; - } - - return childFieldGenerator.generateNestedSubObject(); - } - - public Optional generateObjectArray() { - if (objectDepth == 0) { - return Optional.empty(); - } - - return objectArrayGenerator.lengthGenerator().get(); - } - - public DynamicMapping determineDynamicMapping(Map mappingParameters) { - if (parentDynamicMapping == DynamicMapping.FORCED) { - return DynamicMapping.FORCED; - } - - var dynamicParameter = mappingParameters.get("dynamic"); - // Inherited from parent - if (dynamicParameter == null) { - return parentDynamicMapping; - } - - return dynamicParameter.equals("strict") ? DynamicMapping.FORBIDDEN : DynamicMapping.SUPPORTED; - } - - public ObjectMapper.Subobjects determineSubobjects(Map mappingParameters) { - if (currentSubobjectsConfig == ObjectMapper.Subobjects.DISABLED) { - return ObjectMapper.Subobjects.DISABLED; - } - - return ObjectMapper.Subobjects.from(mappingParameters.getOrDefault("subobjects", "true")); - } - - public Set getEligibleCopyToDestinations() { - return eligibleCopyToDestinations; - } - - public void markFieldAsEligibleForCopyTo(String field) { - eligibleCopyToDestinations.add(pathToField(field)); - } - - private String pathToField(String field) { - return path.isEmpty() ? field : path + "." + field; - } - - public ObjectMapper.Subobjects getCurrentSubobjectsConfig() { - return currentSubobjectsConfig; - } -} diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/GenericSubObjectFieldDataGenerator.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/GenericSubObjectFieldDataGenerator.java deleted file mode 100644 index 83a68519d5de1..0000000000000 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/GenericSubObjectFieldDataGenerator.java +++ /dev/null @@ -1,146 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.logsdb.datageneration.fields; - -import org.elasticsearch.core.CheckedConsumer; -import org.elasticsearch.index.mapper.ObjectMapper; -import org.elasticsearch.logsdb.datageneration.FieldDataGenerator; -import org.elasticsearch.logsdb.datageneration.FieldType; -import org.elasticsearch.logsdb.datageneration.datasource.DataSourceRequest; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.HashSet; -import java.util.List; -import java.util.Set; - -/** - * Generic generator for any type of object field (e.g. "object", "nested"). - */ -public class GenericSubObjectFieldDataGenerator { - private final Context context; - - GenericSubObjectFieldDataGenerator(Context context) { - this.context = context; - } - - List generateChildFields(DynamicMapping dynamicMapping, ObjectMapper.Subobjects subobjects) { - var existingFieldNames = new HashSet(); - // no child fields is legal - var childFieldsCount = context.childFieldGenerator().generateChildFieldCount(); - var result = new ArrayList(childFieldsCount); - - for (int i = 0; i < childFieldsCount; i++) { - var fieldName = generateFieldName(existingFieldNames); - - if (context.shouldAddDynamicObjectField(dynamicMapping)) { - result.add( - new ChildField( - fieldName, - new ObjectFieldDataGenerator(context.subObject(fieldName, DynamicMapping.FORCED, subobjects)), - true - ) - ); - } else if (context.shouldAddObjectField()) { - result.add( - new ChildField(fieldName, new ObjectFieldDataGenerator(context.subObject(fieldName, dynamicMapping, subobjects)), false) - ); - } else if (context.shouldAddNestedField(subobjects)) { - result.add( - new ChildField( - fieldName, - new NestedFieldDataGenerator(context.nestedObject(fieldName, dynamicMapping, subobjects)), - false - ) - ); - } else { - var fieldTypeInfo = context.fieldTypeGenerator(dynamicMapping).generator().get(); - - // For simplicity we only copy to keyword fields, synthetic source logic to handle copy_to is generic. - if (fieldTypeInfo.fieldType() == FieldType.KEYWORD) { - context.markFieldAsEligibleForCopyTo(fieldName); - } - - var mappingParametersGenerator = context.specification() - .dataSource() - .get( - new DataSourceRequest.LeafMappingParametersGenerator( - fieldName, - fieldTypeInfo.fieldType(), - context.getEligibleCopyToDestinations(), - dynamicMapping - ) - ); - var generator = fieldTypeInfo.fieldType() - .generator(fieldName, context.specification().dataSource(), mappingParametersGenerator); - result.add(new ChildField(fieldName, generator, fieldTypeInfo.dynamic())); - } - } - - return result; - } - - List generateChildFields(List predefinedFields) { - return predefinedFields.stream() - .map(pf -> new ChildField(pf.name(), pf.generator(context.specification().dataSource()), false)) - .toList(); - } - - static void writeChildFieldsMapping(XContentBuilder mapping, List childFields) throws IOException { - for (var childField : childFields) { - if (childField.dynamic() == false) { - mapping.field(childField.fieldName); - childField.generator.mappingWriter().accept(mapping); - } - } - } - - static void writeObjectsData(XContentBuilder document, Context context, CheckedConsumer objectWriter) - throws IOException { - var optionalLength = context.generateObjectArray(); - if (optionalLength.isPresent()) { - int size = optionalLength.get(); - - document.startArray(); - for (int i = 0; i < size; i++) { - objectWriter.accept(document); - } - document.endArray(); - } else { - objectWriter.accept(document); - } - } - - static void writeSingleObject(XContentBuilder document, Iterable childFields) throws IOException { - document.startObject(); - writeChildFieldsData(document, childFields); - document.endObject(); - } - - static void writeChildFieldsData(XContentBuilder document, Iterable childFields) throws IOException { - for (var childField : childFields) { - document.field(childField.fieldName); - childField.generator.fieldValueGenerator().accept(document); - } - } - - private String generateFieldName(Set existingFields) { - var fieldName = context.childFieldGenerator().generateFieldName(); - while (existingFields.contains(fieldName)) { - fieldName = context.childFieldGenerator().generateFieldName(); - } - existingFields.add(fieldName); - - return fieldName; - } - - record ChildField(String fieldName, FieldDataGenerator generator, boolean dynamic) {} -} diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/NestedFieldDataGenerator.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/NestedFieldDataGenerator.java deleted file mode 100644 index 69853debf9b77..0000000000000 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/NestedFieldDataGenerator.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.logsdb.datageneration.fields; - -import org.elasticsearch.core.CheckedConsumer; -import org.elasticsearch.logsdb.datageneration.FieldDataGenerator; -import org.elasticsearch.logsdb.datageneration.datasource.DataSourceRequest; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.List; -import java.util.Map; - -public class NestedFieldDataGenerator implements FieldDataGenerator { - private final Context context; - private final Map mappingParameters; - private final List childFields; - - NestedFieldDataGenerator(Context context) { - this.context = context; - - this.mappingParameters = context.specification() - .dataSource() - .get(new DataSourceRequest.ObjectMappingParametersGenerator(false, true, context.getCurrentSubobjectsConfig())) - .mappingGenerator() - .get(); - var dynamicMapping = context.determineDynamicMapping(mappingParameters); - var subobjects = context.determineSubobjects(mappingParameters); - - var genericGenerator = new GenericSubObjectFieldDataGenerator(context); - this.childFields = genericGenerator.generateChildFields(dynamicMapping, subobjects); - } - - @Override - public CheckedConsumer mappingWriter() { - return b -> { - b.startObject(); - - b.field("type", "nested"); - - for (var entry : mappingParameters.entrySet()) { - b.field(entry.getKey(), entry.getValue()); - } - - b.startObject("properties"); - GenericSubObjectFieldDataGenerator.writeChildFieldsMapping(b, childFields); - b.endObject(); - - b.endObject(); - }; - } - - @Override - public CheckedConsumer fieldValueGenerator() { - CheckedConsumer objectWriter = object -> GenericSubObjectFieldDataGenerator.writeSingleObject( - object, - childFields - ); - return b -> GenericSubObjectFieldDataGenerator.writeObjectsData(b, context, objectWriter); - } -} diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/ObjectFieldDataGenerator.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/ObjectFieldDataGenerator.java deleted file mode 100644 index 701642c57619b..0000000000000 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/ObjectFieldDataGenerator.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.logsdb.datageneration.fields; - -import org.elasticsearch.core.CheckedConsumer; -import org.elasticsearch.logsdb.datageneration.FieldDataGenerator; -import org.elasticsearch.logsdb.datageneration.datasource.DataSourceRequest; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.List; -import java.util.Map; - -public class ObjectFieldDataGenerator implements FieldDataGenerator { - private final Context context; - private final Map mappingParameters; - private final List childFields; - - ObjectFieldDataGenerator(Context context) { - this.context = context; - - this.mappingParameters = context.specification() - .dataSource() - .get(new DataSourceRequest.ObjectMappingParametersGenerator(false, false, context.getCurrentSubobjectsConfig())) - .mappingGenerator() - .get(); - var dynamicMapping = context.determineDynamicMapping(mappingParameters); - var subobjects = context.determineSubobjects(mappingParameters); - - var genericGenerator = new GenericSubObjectFieldDataGenerator(context); - this.childFields = genericGenerator.generateChildFields(dynamicMapping, subobjects); - } - - @Override - public CheckedConsumer mappingWriter() { - return b -> { - b.startObject(); - - for (var entry : mappingParameters.entrySet()) { - b.field(entry.getKey(), entry.getValue()); - } - - b.startObject("properties"); - GenericSubObjectFieldDataGenerator.writeChildFieldsMapping(b, childFields); - b.endObject(); - - b.endObject(); - }; - } - - @Override - public CheckedConsumer fieldValueGenerator() { - CheckedConsumer objectWriter = object -> GenericSubObjectFieldDataGenerator.writeSingleObject( - object, - childFields - ); - return b -> GenericSubObjectFieldDataGenerator.writeObjectsData(b, context, objectWriter); - } -} diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/PredefinedField.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/PredefinedField.java index 57e3ce3ce2a86..948f1cf80b35c 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/PredefinedField.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/PredefinedField.java @@ -12,37 +12,21 @@ import org.elasticsearch.logsdb.datageneration.FieldDataGenerator; import org.elasticsearch.logsdb.datageneration.FieldType; import org.elasticsearch.logsdb.datageneration.datasource.DataSource; -import org.elasticsearch.logsdb.datageneration.datasource.DataSourceRequest; -import java.util.Set; +import java.util.Map; public interface PredefinedField { String name(); - FieldDataGenerator generator(DataSource dataSource); + FieldType fieldType(); - record WithType(String fieldName, FieldType fieldType, DynamicMapping dynamicMapping) implements PredefinedField { - @Override - public String name() { - return fieldName; - } + Map mapping(); - @Override - public FieldDataGenerator generator(DataSource dataSource) { - // copy_to currently not supported for predefined fields, use WithGenerator if needed - var mappingParametersGenerator = dataSource.get( - new DataSourceRequest.LeafMappingParametersGenerator(fieldName, fieldType, Set.of(), dynamicMapping) - ); - return fieldType().generator(fieldName, dataSource, mappingParametersGenerator); - } - } - - record WithGenerator(String fieldName, FieldDataGenerator generator) implements PredefinedField { - @Override - public String name() { - return fieldName; - } + FieldDataGenerator generator(DataSource dataSource); + record WithGenerator(String name, FieldType fieldType, Map mapping, FieldDataGenerator generator) + implements + PredefinedField { @Override public FieldDataGenerator generator(DataSource dataSource) { return generator; diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/TopLevelObjectFieldDataGenerator.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/TopLevelObjectFieldDataGenerator.java deleted file mode 100644 index 1374362df7f4a..0000000000000 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/TopLevelObjectFieldDataGenerator.java +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.logsdb.datageneration.fields; - -import org.elasticsearch.core.CheckedConsumer; -import org.elasticsearch.index.mapper.ObjectMapper; -import org.elasticsearch.logsdb.datageneration.DataGeneratorSpecification; -import org.elasticsearch.logsdb.datageneration.datasource.DataSourceRequest; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; -import java.util.stream.Stream; - -public class TopLevelObjectFieldDataGenerator { - private final Context context; - private final Map mappingParameters; - - // Child fields of top level object that were explicitly requested, they have predefined name and type. - private final List predefinedFields; - // Child fields of top level object that are generated and merged with predefined fields. - private final List generatedChildFields; - - public TopLevelObjectFieldDataGenerator(DataGeneratorSpecification specification) { - DynamicMapping dynamicMapping; - if (specification.fullyDynamicMapping()) { - dynamicMapping = DynamicMapping.FORCED; - this.mappingParameters = Map.of(); - } else { - this.mappingParameters = new HashMap<>( - // Value of subobjects here is for a parent of this object. - // Since there is no parent we pass ENABLED to allow to set subobjects to any value at top level. - specification.dataSource() - .get(new DataSourceRequest.ObjectMappingParametersGenerator(true, false, ObjectMapper.Subobjects.ENABLED)) - .mappingGenerator() - .get() - ); - // Top-level object can't be disabled because @timestamp is a required field in data streams. - this.mappingParameters.remove("enabled"); - - dynamicMapping = mappingParameters.getOrDefault("dynamic", "true").equals("strict") - ? DynamicMapping.FORBIDDEN - : DynamicMapping.SUPPORTED; - } - var subobjects = ObjectMapper.Subobjects.from(mappingParameters.getOrDefault("subobjects", "true")); - - // Value of subobjects here is for a parent of this object. - // Since there is no parent we pass ENABLED to allow to set subobjects to any value at top level. - this.context = new Context(specification, dynamicMapping, ObjectMapper.Subobjects.ENABLED); - var genericGenerator = new GenericSubObjectFieldDataGenerator(context); - - this.predefinedFields = genericGenerator.generateChildFields(specification.predefinedFields()); - this.generatedChildFields = genericGenerator.generateChildFields(dynamicMapping, subobjects); - } - - public CheckedConsumer mappingWriter(Map customMappingParameters) { - return b -> { - b.startObject(); - - var mergedParameters = Stream.of(this.mappingParameters, customMappingParameters) - .flatMap(map -> map.entrySet().stream()) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue, (l, r) -> r)); - - for (var entry : mergedParameters.entrySet()) { - b.field(entry.getKey(), entry.getValue()); - } - - b.startObject("properties"); - GenericSubObjectFieldDataGenerator.writeChildFieldsMapping(b, predefinedFields); - GenericSubObjectFieldDataGenerator.writeChildFieldsMapping(b, generatedChildFields); - b.endObject(); - - b.endObject(); - }; - } - - public CheckedConsumer fieldValueGenerator( - CheckedConsumer customDocumentModification - ) { - CheckedConsumer objectWriter = b -> { - b.startObject(); - - customDocumentModification.accept(b); - GenericSubObjectFieldDataGenerator.writeChildFieldsData(b, predefinedFields); - GenericSubObjectFieldDataGenerator.writeChildFieldsData(b, generatedChildFields); - - b.endObject(); - }; - return b -> GenericSubObjectFieldDataGenerator.writeObjectsData(b, context, objectWriter); - } -} diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/ByteFieldDataGenerator.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/ByteFieldDataGenerator.java index 08cbfa32bcdb2..4ead8ffd0b718 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/ByteFieldDataGenerator.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/ByteFieldDataGenerator.java @@ -9,49 +9,25 @@ package org.elasticsearch.logsdb.datageneration.fields.leaf; -import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.logsdb.datageneration.FieldDataGenerator; import org.elasticsearch.logsdb.datageneration.datasource.DataSource; import org.elasticsearch.logsdb.datageneration.datasource.DataSourceRequest; -import org.elasticsearch.logsdb.datageneration.datasource.DataSourceResponse; -import org.elasticsearch.xcontent.XContentBuilder; -import java.io.IOException; -import java.util.Map; import java.util.function.Supplier; public class ByteFieldDataGenerator implements FieldDataGenerator { private final Supplier valueGenerator; - private final Map mappingParameters; - public ByteFieldDataGenerator( - String fieldName, - DataSource dataSource, - DataSourceResponse.LeafMappingParametersGenerator mappingParametersGenerator - ) { + public ByteFieldDataGenerator(String fieldName, DataSource dataSource) { var bytes = dataSource.get(new DataSourceRequest.ByteGenerator()); var nulls = dataSource.get(new DataSourceRequest.NullWrapper()); var arrays = dataSource.get(new DataSourceRequest.ArrayWrapper()); this.valueGenerator = arrays.wrapper().compose(nulls.wrapper()).apply(() -> bytes.generator().get()); - this.mappingParameters = mappingParametersGenerator.mappingGenerator().get(); } @Override - public CheckedConsumer mappingWriter() { - return b -> { - b.startObject().field("type", "byte"); - - for (var entry : mappingParameters.entrySet()) { - b.field(entry.getKey(), entry.getValue()); - } - - b.endObject(); - }; - } - - @Override - public CheckedConsumer fieldValueGenerator() { - return b -> b.value(valueGenerator.get()); + public Object generateValue() { + return valueGenerator.get(); } } diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/DoubleFieldDataGenerator.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/DoubleFieldDataGenerator.java index 0c486360d0003..cf2c4f6abdbf4 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/DoubleFieldDataGenerator.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/DoubleFieldDataGenerator.java @@ -9,49 +9,25 @@ package org.elasticsearch.logsdb.datageneration.fields.leaf; -import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.logsdb.datageneration.FieldDataGenerator; import org.elasticsearch.logsdb.datageneration.datasource.DataSource; import org.elasticsearch.logsdb.datageneration.datasource.DataSourceRequest; -import org.elasticsearch.logsdb.datageneration.datasource.DataSourceResponse; -import org.elasticsearch.xcontent.XContentBuilder; -import java.io.IOException; -import java.util.Map; import java.util.function.Supplier; public class DoubleFieldDataGenerator implements FieldDataGenerator { private final Supplier valueGenerator; - private final Map mappingParameters; - public DoubleFieldDataGenerator( - String fieldName, - DataSource dataSource, - DataSourceResponse.LeafMappingParametersGenerator mappingParametersGenerator - ) { + public DoubleFieldDataGenerator(String fieldName, DataSource dataSource) { var doubles = dataSource.get(new DataSourceRequest.DoubleGenerator()); var nulls = dataSource.get(new DataSourceRequest.NullWrapper()); var arrays = dataSource.get(new DataSourceRequest.ArrayWrapper()); this.valueGenerator = arrays.wrapper().compose(nulls.wrapper()).apply(() -> doubles.generator().get()); - this.mappingParameters = mappingParametersGenerator.mappingGenerator().get(); } @Override - public CheckedConsumer mappingWriter() { - return b -> { - b.startObject().field("type", "double"); - - for (var entry : mappingParameters.entrySet()) { - b.field(entry.getKey(), entry.getValue()); - } - - b.endObject(); - }; - } - - @Override - public CheckedConsumer fieldValueGenerator() { - return b -> b.value(valueGenerator.get()); + public Object generateValue() { + return valueGenerator.get(); } } diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/FloatFieldDataGenerator.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/FloatFieldDataGenerator.java index 97397013dd303..b59d5ceabb188 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/FloatFieldDataGenerator.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/FloatFieldDataGenerator.java @@ -9,49 +9,25 @@ package org.elasticsearch.logsdb.datageneration.fields.leaf; -import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.logsdb.datageneration.FieldDataGenerator; import org.elasticsearch.logsdb.datageneration.datasource.DataSource; import org.elasticsearch.logsdb.datageneration.datasource.DataSourceRequest; -import org.elasticsearch.logsdb.datageneration.datasource.DataSourceResponse; -import org.elasticsearch.xcontent.XContentBuilder; -import java.io.IOException; -import java.util.Map; import java.util.function.Supplier; public class FloatFieldDataGenerator implements FieldDataGenerator { private final Supplier valueGenerator; - private final Map mappingParameters; - public FloatFieldDataGenerator( - String fieldName, - DataSource dataSource, - DataSourceResponse.LeafMappingParametersGenerator mappingParametersGenerator - ) { + public FloatFieldDataGenerator(String fieldName, DataSource dataSource) { var floats = dataSource.get(new DataSourceRequest.FloatGenerator()); var nulls = dataSource.get(new DataSourceRequest.NullWrapper()); var arrays = dataSource.get(new DataSourceRequest.ArrayWrapper()); this.valueGenerator = arrays.wrapper().compose(nulls.wrapper()).apply(() -> floats.generator().get()); - this.mappingParameters = mappingParametersGenerator.mappingGenerator().get(); } @Override - public CheckedConsumer mappingWriter() { - return b -> { - b.startObject().field("type", "float"); - - for (var entry : mappingParameters.entrySet()) { - b.field(entry.getKey(), entry.getValue()); - } - - b.endObject(); - }; - } - - @Override - public CheckedConsumer fieldValueGenerator() { - return b -> b.value(valueGenerator.get()); + public Object generateValue() { + return valueGenerator.get(); } } diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/HalfFloatFieldDataGenerator.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/HalfFloatFieldDataGenerator.java index 7a8da5fd93117..e2ed299f1a4dc 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/HalfFloatFieldDataGenerator.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/HalfFloatFieldDataGenerator.java @@ -9,49 +9,25 @@ package org.elasticsearch.logsdb.datageneration.fields.leaf; -import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.logsdb.datageneration.FieldDataGenerator; import org.elasticsearch.logsdb.datageneration.datasource.DataSource; import org.elasticsearch.logsdb.datageneration.datasource.DataSourceRequest; -import org.elasticsearch.logsdb.datageneration.datasource.DataSourceResponse; -import org.elasticsearch.xcontent.XContentBuilder; -import java.io.IOException; -import java.util.Map; import java.util.function.Supplier; public class HalfFloatFieldDataGenerator implements FieldDataGenerator { private final Supplier valueGenerator; - private final Map mappingParameters; - public HalfFloatFieldDataGenerator( - String fieldName, - DataSource dataSource, - DataSourceResponse.LeafMappingParametersGenerator mappingParametersGenerator - ) { + public HalfFloatFieldDataGenerator(String fieldName, DataSource dataSource) { var halfFloats = dataSource.get(new DataSourceRequest.HalfFloatGenerator()); var nulls = dataSource.get(new DataSourceRequest.NullWrapper()); var arrays = dataSource.get(new DataSourceRequest.ArrayWrapper()); this.valueGenerator = arrays.wrapper().compose(nulls.wrapper()).apply(() -> halfFloats.generator().get()); - this.mappingParameters = mappingParametersGenerator.mappingGenerator().get(); } @Override - public CheckedConsumer mappingWriter() { - return b -> { - b.startObject().field("type", "half_float"); - - for (var entry : mappingParameters.entrySet()) { - b.field(entry.getKey(), entry.getValue()); - } - - b.endObject(); - }; - } - - @Override - public CheckedConsumer fieldValueGenerator() { - return b -> b.value(valueGenerator.get()); + public Object generateValue() { + return valueGenerator.get(); } } diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/IntegerFieldDataGenerator.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/IntegerFieldDataGenerator.java index 9a942fca3ba14..f2fe8ed8362e5 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/IntegerFieldDataGenerator.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/IntegerFieldDataGenerator.java @@ -9,49 +9,25 @@ package org.elasticsearch.logsdb.datageneration.fields.leaf; -import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.logsdb.datageneration.FieldDataGenerator; import org.elasticsearch.logsdb.datageneration.datasource.DataSource; import org.elasticsearch.logsdb.datageneration.datasource.DataSourceRequest; -import org.elasticsearch.logsdb.datageneration.datasource.DataSourceResponse; -import org.elasticsearch.xcontent.XContentBuilder; -import java.io.IOException; -import java.util.Map; import java.util.function.Supplier; public class IntegerFieldDataGenerator implements FieldDataGenerator { private final Supplier valueGenerator; - private final Map mappingParameters; - public IntegerFieldDataGenerator( - String fieldName, - DataSource dataSource, - DataSourceResponse.LeafMappingParametersGenerator mappingParametersGenerator - ) { + public IntegerFieldDataGenerator(String fieldName, DataSource dataSource) { var ints = dataSource.get(new DataSourceRequest.IntegerGenerator()); var nulls = dataSource.get(new DataSourceRequest.NullWrapper()); var arrays = dataSource.get(new DataSourceRequest.ArrayWrapper()); this.valueGenerator = arrays.wrapper().compose(nulls.wrapper()).apply(() -> ints.generator().get()); - this.mappingParameters = mappingParametersGenerator.mappingGenerator().get(); } @Override - public CheckedConsumer mappingWriter() { - return b -> { - b.startObject().field("type", "integer"); - - for (var entry : mappingParameters.entrySet()) { - b.field(entry.getKey(), entry.getValue()); - } - - b.endObject(); - }; - } - - @Override - public CheckedConsumer fieldValueGenerator() { - return b -> b.value(valueGenerator.get()); + public Object generateValue() { + return valueGenerator.get(); } } diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/KeywordFieldDataGenerator.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/KeywordFieldDataGenerator.java index 8a37c73d00397..8dc4d8b8767c4 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/KeywordFieldDataGenerator.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/KeywordFieldDataGenerator.java @@ -9,49 +9,25 @@ package org.elasticsearch.logsdb.datageneration.fields.leaf; -import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.logsdb.datageneration.FieldDataGenerator; import org.elasticsearch.logsdb.datageneration.datasource.DataSource; import org.elasticsearch.logsdb.datageneration.datasource.DataSourceRequest; -import org.elasticsearch.logsdb.datageneration.datasource.DataSourceResponse; -import org.elasticsearch.xcontent.XContentBuilder; -import java.io.IOException; -import java.util.Map; import java.util.function.Supplier; public class KeywordFieldDataGenerator implements FieldDataGenerator { private final Supplier valueGenerator; - private final Map mappingParameters; - public KeywordFieldDataGenerator( - String fieldName, - DataSource dataSource, - DataSourceResponse.LeafMappingParametersGenerator mappingParametersGenerator - ) { + public KeywordFieldDataGenerator(String fieldName, DataSource dataSource) { var strings = dataSource.get(new DataSourceRequest.StringGenerator()); var nulls = dataSource.get(new DataSourceRequest.NullWrapper()); var arrays = dataSource.get(new DataSourceRequest.ArrayWrapper()); this.valueGenerator = arrays.wrapper().compose(nulls.wrapper()).apply(() -> strings.generator().get()); - this.mappingParameters = mappingParametersGenerator.mappingGenerator().get(); } @Override - public CheckedConsumer mappingWriter() { - return b -> { - b.startObject().field("type", "keyword"); - - for (var entry : mappingParameters.entrySet()) { - b.field(entry.getKey(), entry.getValue()); - } - - b.endObject(); - }; - } - - @Override - public CheckedConsumer fieldValueGenerator() { - return b -> b.value(valueGenerator.get()); + public Object generateValue() { + return valueGenerator.get(); } } diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/LongFieldDataGenerator.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/LongFieldDataGenerator.java index 1c240fd10a953..f17610e501ed7 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/LongFieldDataGenerator.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/LongFieldDataGenerator.java @@ -9,49 +9,25 @@ package org.elasticsearch.logsdb.datageneration.fields.leaf; -import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.logsdb.datageneration.FieldDataGenerator; import org.elasticsearch.logsdb.datageneration.datasource.DataSource; import org.elasticsearch.logsdb.datageneration.datasource.DataSourceRequest; -import org.elasticsearch.logsdb.datageneration.datasource.DataSourceResponse; -import org.elasticsearch.xcontent.XContentBuilder; -import java.io.IOException; -import java.util.Map; import java.util.function.Supplier; public class LongFieldDataGenerator implements FieldDataGenerator { private final Supplier valueGenerator; - private final Map mappingParameters; - public LongFieldDataGenerator( - String fieldName, - DataSource dataSource, - DataSourceResponse.LeafMappingParametersGenerator mappingParametersGenerator - ) { + public LongFieldDataGenerator(String fieldName, DataSource dataSource) { var longs = dataSource.get(new DataSourceRequest.LongGenerator()); var nulls = dataSource.get(new DataSourceRequest.NullWrapper()); var arrays = dataSource.get(new DataSourceRequest.ArrayWrapper()); this.valueGenerator = arrays.wrapper().compose(nulls.wrapper()).apply(() -> longs.generator().get()); - this.mappingParameters = mappingParametersGenerator.mappingGenerator().get(); } @Override - public CheckedConsumer mappingWriter() { - return b -> { - b.startObject().field("type", "long"); - - for (var entry : mappingParameters.entrySet()) { - b.field(entry.getKey(), entry.getValue()); - } - - b.endObject(); - }; - } - - @Override - public CheckedConsumer fieldValueGenerator() { - return b -> b.value(valueGenerator.get()); + public Object generateValue() { + return valueGenerator.get(); } } diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/ScaledFloatFieldDataGenerator.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/ScaledFloatFieldDataGenerator.java index 359a725aed941..008dd04179dcd 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/ScaledFloatFieldDataGenerator.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/ScaledFloatFieldDataGenerator.java @@ -9,49 +9,25 @@ package org.elasticsearch.logsdb.datageneration.fields.leaf; -import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.logsdb.datageneration.FieldDataGenerator; import org.elasticsearch.logsdb.datageneration.datasource.DataSource; import org.elasticsearch.logsdb.datageneration.datasource.DataSourceRequest; -import org.elasticsearch.logsdb.datageneration.datasource.DataSourceResponse; -import org.elasticsearch.xcontent.XContentBuilder; -import java.io.IOException; -import java.util.Map; import java.util.function.Supplier; public class ScaledFloatFieldDataGenerator implements FieldDataGenerator { private final Supplier valueGenerator; - private final Map mappingParameters; - public ScaledFloatFieldDataGenerator( - String fieldName, - DataSource dataSource, - DataSourceResponse.LeafMappingParametersGenerator mappingParametersGenerator - ) { + public ScaledFloatFieldDataGenerator(String fieldName, DataSource dataSource) { var doubles = dataSource.get(new DataSourceRequest.DoubleGenerator()); var nulls = dataSource.get(new DataSourceRequest.NullWrapper()); var arrays = dataSource.get(new DataSourceRequest.ArrayWrapper()); this.valueGenerator = arrays.wrapper().compose(nulls.wrapper()).apply(() -> doubles.generator().get()); - this.mappingParameters = mappingParametersGenerator.mappingGenerator().get(); } @Override - public CheckedConsumer mappingWriter() { - return b -> { - b.startObject().field("type", "scaled_float"); - - for (var entry : mappingParameters.entrySet()) { - b.field(entry.getKey(), entry.getValue()); - } - - b.endObject(); - }; - } - - @Override - public CheckedConsumer fieldValueGenerator() { - return b -> b.value(valueGenerator.get()); + public Object generateValue() { + return valueGenerator.get(); } } diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/ShortFieldDataGenerator.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/ShortFieldDataGenerator.java index 8a056f00701d6..85bff2c85e538 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/ShortFieldDataGenerator.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/ShortFieldDataGenerator.java @@ -9,49 +9,25 @@ package org.elasticsearch.logsdb.datageneration.fields.leaf; -import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.logsdb.datageneration.FieldDataGenerator; import org.elasticsearch.logsdb.datageneration.datasource.DataSource; import org.elasticsearch.logsdb.datageneration.datasource.DataSourceRequest; -import org.elasticsearch.logsdb.datageneration.datasource.DataSourceResponse; -import org.elasticsearch.xcontent.XContentBuilder; -import java.io.IOException; -import java.util.Map; import java.util.function.Supplier; public class ShortFieldDataGenerator implements FieldDataGenerator { private final Supplier valueGenerator; - private final Map mappingParameters; - public ShortFieldDataGenerator( - String fieldName, - DataSource dataSource, - DataSourceResponse.LeafMappingParametersGenerator mappingParametersGenerator - ) { + public ShortFieldDataGenerator(String fieldName, DataSource dataSource) { var shorts = dataSource.get(new DataSourceRequest.ShortGenerator()); var nulls = dataSource.get(new DataSourceRequest.NullWrapper()); var arrays = dataSource.get(new DataSourceRequest.ArrayWrapper()); this.valueGenerator = arrays.wrapper().compose(nulls.wrapper()).apply(() -> shorts.generator().get()); - this.mappingParameters = mappingParametersGenerator.mappingGenerator().get(); } @Override - public CheckedConsumer mappingWriter() { - return b -> { - b.startObject().field("type", "short"); - - for (var entry : mappingParameters.entrySet()) { - b.field(entry.getKey(), entry.getValue()); - } - - b.endObject(); - }; - } - - @Override - public CheckedConsumer fieldValueGenerator() { - return b -> b.value(valueGenerator.get()); + public Object generateValue() { + return valueGenerator.get(); } } diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/UnsignedLongFieldDataGenerator.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/UnsignedLongFieldDataGenerator.java index d836d5625a89c..329f684bef70d 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/UnsignedLongFieldDataGenerator.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/UnsignedLongFieldDataGenerator.java @@ -9,49 +9,25 @@ package org.elasticsearch.logsdb.datageneration.fields.leaf; -import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.logsdb.datageneration.FieldDataGenerator; import org.elasticsearch.logsdb.datageneration.datasource.DataSource; import org.elasticsearch.logsdb.datageneration.datasource.DataSourceRequest; -import org.elasticsearch.logsdb.datageneration.datasource.DataSourceResponse; -import org.elasticsearch.xcontent.XContentBuilder; -import java.io.IOException; -import java.util.Map; import java.util.function.Supplier; public class UnsignedLongFieldDataGenerator implements FieldDataGenerator { private final Supplier valueGenerator; - private final Map mappingParameters; - public UnsignedLongFieldDataGenerator( - String fieldName, - DataSource dataSource, - DataSourceResponse.LeafMappingParametersGenerator mappingParametersGenerator - ) { + public UnsignedLongFieldDataGenerator(String fieldName, DataSource dataSource) { var unsignedLongs = dataSource.get(new DataSourceRequest.UnsignedLongGenerator()); var nulls = dataSource.get(new DataSourceRequest.NullWrapper()); var arrays = dataSource.get(new DataSourceRequest.ArrayWrapper()); this.valueGenerator = arrays.wrapper().compose(nulls.wrapper()).apply(() -> unsignedLongs.generator().get()); - this.mappingParameters = mappingParametersGenerator.mappingGenerator().get(); } @Override - public CheckedConsumer mappingWriter() { - return b -> { - b.startObject().field("type", "unsigned_long"); - - for (var entry : mappingParameters.entrySet()) { - b.field(entry.getKey(), entry.getValue()); - } - - b.endObject(); - }; - } - - @Override - public CheckedConsumer fieldValueGenerator() { - return b -> b.value(valueGenerator.get()); + public Object generateValue() { + return valueGenerator.get(); } } diff --git a/test/framework/src/test/java/org/elasticsearch/logsdb/datageneration/DataGeneratorSnapshotTests.java b/test/framework/src/test/java/org/elasticsearch/logsdb/datageneration/DataGenerationSnapshotTests.java similarity index 65% rename from test/framework/src/test/java/org/elasticsearch/logsdb/datageneration/DataGeneratorSnapshotTests.java rename to test/framework/src/test/java/org/elasticsearch/logsdb/datageneration/DataGenerationSnapshotTests.java index 1178a32e46d31..bac61daf554d5 100644 --- a/test/framework/src/test/java/org/elasticsearch/logsdb/datageneration/DataGeneratorSnapshotTests.java +++ b/test/framework/src/test/java/org/elasticsearch/logsdb/datageneration/DataGenerationSnapshotTests.java @@ -17,25 +17,27 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; -public class DataGeneratorSnapshotTests extends ESTestCase { +public class DataGenerationSnapshotTests extends ESTestCase { public void testSnapshot() throws Exception { - var dataGenerator = new DataGenerator( - DataGeneratorSpecification.builder() - .withDataSourceHandlers(List.of(new DataSourceOverrides())) - .withMaxFieldCountPerLevel(5) - .withMaxObjectDepth(2) - .build() - ); + var specification = DataGeneratorSpecification.builder() + .withDataSourceHandlers(List.of(new DataSourceOverrides())) + .withMaxFieldCountPerLevel(5) + .withMaxObjectDepth(2) + .build(); - var mapping = XContentBuilder.builder(XContentType.JSON.xContent()).prettyPrint(); - dataGenerator.writeMapping(mapping); + var template = new TemplateGenerator(specification).generate(); + var mapping = new MappingGenerator(specification).generate(template); - var document = XContentBuilder.builder(XContentType.JSON.xContent()).prettyPrint(); - dataGenerator.generateDocument(document); + var mappingXContent = XContentBuilder.builder(XContentType.JSON.xContent()).prettyPrint(); + mappingXContent.map(mapping.raw()); + + var documentXContent = XContentBuilder.builder(XContentType.JSON.xContent()).prettyPrint(); + documentXContent.map(new DocumentGenerator(specification).generate(template, mapping)); var expectedMapping = """ { @@ -49,53 +51,66 @@ public void testSnapshot() throws Exception { "dynamic" : "false", "properties" : { "f3" : { - "type" : "keyword", - "store" : "true" + "store" : "true", + "type" : "keyword" }, "f4" : { - "type" : "long", - "index" : "false" + "index" : "false", + "type" : "long" } - } + }, + "type" : "object" }, "f5" : { "dynamic" : "false", "properties" : { "f6" : { - "type" : "keyword", - "store" : "true" + "store" : "true", + "type" : "keyword" }, "f7" : { - "type" : "long", - "index" : "false" + "dynamic" : "false", + "properties" : { + "f8" : { + "index" : "false", + "type" : "long" + }, + "f9" : { + "store" : "true", + "type" : "keyword" + } + }, + "type" : "nested" } - } + }, + "type" : "object" } - } + }, + "type" : "object" }, - "f8" : { - "type" : "nested", + "f10" : { "dynamic" : "false", "properties" : { - "f9" : { - "type" : "nested", + "f11" : { "dynamic" : "false", "properties" : { - "f10" : { - "type" : "keyword", - "store" : "true" + "f12" : { + "index" : "false", + "type" : "long" }, - "f11" : { - "type" : "long", - "index" : "false" + "f13" : { + "store" : "true", + "type" : "keyword" } - } + }, + "type" : "nested" }, - "f12" : { - "type" : "keyword", - "store" : "true" + "f14" : { + "index" : "false", + "type" : "long" } - } + }, + "type" : "nested" } } } @@ -103,48 +118,48 @@ public void testSnapshot() throws Exception { var expectedDocument = """ { - "f1" : [ + "f1" : { + "f2" : { + "f3" : null, + "f4" : 3 + }, + "f5" : { + "f6" : [ + "string4", + "string5" + ], + "f7" : { + "f8" : null, + "f9" : "string6" + } + } + }, + "f10" : [ { - "f2" : { - "f3" : [ + "f11" : { + "f12" : [ null, - "string1" + 0 ], - "f4" : 0 + "f13" : "string1" }, - "f5" : { - "f6" : "string2", - "f7" : null - } + "f14" : 1 }, { - "f2" : { - "f3" : [ - "string3", - "string4" - ], - "f4" : 1 + "f11" : { + "f12" : null, + "f13" : [ + "string2", + "string3" + ] }, - "f5" : { - "f6" : null, - "f7" : 2 - } + "f14" : 2 } - ], - "f8" : { - "f9" : { - "f10" : [ - "string5", - "string6" - ], - "f11" : null - }, - "f12" : "string7" - } + ] }"""; - assertEquals(expectedMapping, Strings.toString(mapping)); - assertEquals(expectedDocument, Strings.toString(document)); + assertEquals(expectedMapping, Strings.toString(mappingXContent)); + assertEquals(expectedDocument, Strings.toString(documentXContent)); } private static class DataSourceOverrides implements DataSourceHandler { @@ -173,7 +188,6 @@ public DataSourceResponse.NullWrapper handle(DataSourceRequest.NullWrapper reque @Override public DataSourceResponse.ArrayWrapper handle(DataSourceRequest.ArrayWrapper request) { - return new DataSourceResponse.ArrayWrapper((values) -> () -> { if (generateArrayChecks++ % 4 == 0) { // we have nulls so can't use List.of @@ -206,14 +220,19 @@ public DataSourceResponse.FieldTypeGenerator handle(DataSourceRequest.FieldTypeG return new DataSourceResponse.FieldTypeGenerator(() -> { if (fieldType == FieldType.KEYWORD) { fieldType = FieldType.LONG; - return new DataSourceResponse.FieldTypeGenerator.FieldTypeInfo(FieldType.KEYWORD, false); + return new DataSourceResponse.FieldTypeGenerator.FieldTypeInfo(FieldType.KEYWORD); } fieldType = FieldType.KEYWORD; - return new DataSourceResponse.FieldTypeGenerator.FieldTypeInfo(FieldType.LONG, false); + return new DataSourceResponse.FieldTypeGenerator.FieldTypeInfo(FieldType.LONG); }); } + @Override + public DataSourceResponse.DynamicMappingGenerator handle(DataSourceRequest.DynamicMappingGenerator request) { + return new DataSourceResponse.DynamicMappingGenerator((ignored) -> false); + } + @Override public DataSourceResponse.LeafMappingParametersGenerator handle(DataSourceRequest.LeafMappingParametersGenerator request) { if (request.fieldType() == FieldType.KEYWORD) { @@ -229,7 +248,7 @@ public DataSourceResponse.LeafMappingParametersGenerator handle(DataSourceReques @Override public DataSourceResponse.ObjectMappingParametersGenerator handle(DataSourceRequest.ObjectMappingParametersGenerator request) { - return new DataSourceResponse.ObjectMappingParametersGenerator(() -> Map.of("dynamic", "false")); + return new DataSourceResponse.ObjectMappingParametersGenerator(() -> new HashMap<>(Map.of("dynamic", "false"))); } } diff --git a/test/framework/src/test/java/org/elasticsearch/logsdb/datageneration/DataGeneratorTests.java b/test/framework/src/test/java/org/elasticsearch/logsdb/datageneration/DataGenerationTests.java similarity index 69% rename from test/framework/src/test/java/org/elasticsearch/logsdb/datageneration/DataGeneratorTests.java rename to test/framework/src/test/java/org/elasticsearch/logsdb/datageneration/DataGenerationTests.java index b5fa66bf3006b..d9750328ff3fa 100644 --- a/test/framework/src/test/java/org/elasticsearch/logsdb/datageneration/DataGeneratorTests.java +++ b/test/framework/src/test/java/org/elasticsearch/logsdb/datageneration/DataGenerationTests.java @@ -16,7 +16,6 @@ import org.elasticsearch.logsdb.datageneration.datasource.DataSourceHandler; import org.elasticsearch.logsdb.datageneration.datasource.DataSourceRequest; import org.elasticsearch.logsdb.datageneration.datasource.DataSourceResponse; -import org.elasticsearch.logsdb.datageneration.fields.DynamicMapping; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentBuilder; @@ -28,20 +27,8 @@ import java.util.List; import java.util.Optional; -public class DataGeneratorTests extends ESTestCase { - public void testDataGeneratorSanity() throws IOException { - var dataGenerator = new DataGenerator(DataGeneratorSpecification.buildDefault()); - - var mapping = XContentBuilder.builder(XContentType.JSON.xContent()); - dataGenerator.writeMapping(mapping); - - for (int i = 0; i < 1000; i++) { - var document = XContentBuilder.builder(XContentType.JSON.xContent()); - dataGenerator.generateDocument(document); - } - } - - public void testDataGeneratorProducesValidMappingAndDocument() throws IOException { +public class DataGenerationTests extends ESTestCase { + public void testDataGenerationProducesValidMappingAndDocument() throws IOException { // Make sure objects, nested objects and all field types are covered. var testChildFieldGenerator = new DataSourceResponse.ChildFieldGenerator() { private boolean dynamicSubObjectCovered = false; @@ -101,43 +88,34 @@ public DataSourceResponse.ChildFieldGenerator handle(DataSourceRequest.ChildFiel @Override public DataSourceResponse.FieldTypeGenerator handle(DataSourceRequest.FieldTypeGenerator request) { - if (request.dynamicMapping() == DynamicMapping.FORBIDDEN || request.dynamicMapping() == DynamicMapping.SUPPORTED) { - return new DataSourceResponse.FieldTypeGenerator( - () -> new DataSourceResponse.FieldTypeGenerator.FieldTypeInfo( - FieldType.values()[generatedFields++ % FieldType.values().length], - false - ) - ); - } - - return new DataSourceResponse.FieldTypeGenerator(() -> { - var fieldType = FieldType.values()[generatedFields++ % FieldType.values().length]; - // Does not really work with dynamic mapping. - if (fieldType == FieldType.UNSIGNED_LONG) { - fieldType = FieldType.values()[generatedFields++ % FieldType.values().length]; - } + return new DataSourceResponse.FieldTypeGenerator( + () -> new DataSourceResponse.FieldTypeGenerator.FieldTypeInfo( + FieldType.values()[generatedFields++ % FieldType.values().length] + ) + ); - return new DataSourceResponse.FieldTypeGenerator.FieldTypeInfo(fieldType, true); - }); } }; - var dataGenerator = new DataGenerator( - DataGeneratorSpecification.builder().withDataSourceHandlers(List.of(dataSourceOverride)).build() - ); + var specification = DataGeneratorSpecification.builder().withDataSourceHandlers(List.of(dataSourceOverride)).build(); + + var documentGenerator = new DocumentGenerator(specification); - var mapping = XContentBuilder.builder(XContentType.JSON.xContent()); - dataGenerator.writeMapping(mapping); + var template = new TemplateGenerator(specification).generate(); + var mapping = new MappingGenerator(specification).generate(template); + + var mappingXContent = XContentBuilder.builder(XContentType.JSON.xContent()); + mappingXContent.map(mapping.raw()); var mappingService = new MapperServiceTestCase() { @Override protected Collection getPlugins() { return List.of(new UnsignedLongMapperPlugin(), new MapperExtrasPlugin()); } - }.createMapperService(mapping); + }.createMapperService(mappingXContent); var document = XContentBuilder.builder(XContentType.JSON.xContent()); - dataGenerator.generateDocument(document); + document.map(documentGenerator.generate(template, mapping)); mappingService.documentMapper().parse(new SourceToParse("1", BytesReference.bytes(document), XContentType.JSON)); } @@ -187,19 +165,17 @@ public DataSourceResponse.ObjectArrayGenerator handle(DataSourceRequest.ObjectAr @Override public DataSourceResponse.FieldTypeGenerator handle(DataSourceRequest.FieldTypeGenerator request) { return new DataSourceResponse.FieldTypeGenerator( - () -> new DataSourceResponse.FieldTypeGenerator.FieldTypeInfo(FieldType.LONG, false) + () -> new DataSourceResponse.FieldTypeGenerator.FieldTypeInfo(FieldType.LONG) ); } }; - - var dataGenerator = new DataGenerator( - DataGeneratorSpecification.builder().withDataSourceHandlers(List.of(dataSourceOverride)).withMaxObjectDepth(2).build() - ); - - var mapping = XContentBuilder.builder(XContentType.JSON.xContent()); - dataGenerator.writeMapping(mapping); - - var document = XContentBuilder.builder(XContentType.JSON.xContent()); - dataGenerator.generateDocument(document); + var specification = DataGeneratorSpecification.builder() + .withDataSourceHandlers(List.of(dataSourceOverride)) + .withMaxObjectDepth(2) + .build(); + + var template = new TemplateGenerator(specification).generate(); + var mapping = new MappingGenerator(specification).generate(template); + var ignored = new DocumentGenerator(specification).generate(template, mapping); } } diff --git a/test/framework/src/test/java/org/elasticsearch/logsdb/datageneration/TemplateGeneratorTests.java b/test/framework/src/test/java/org/elasticsearch/logsdb/datageneration/TemplateGeneratorTests.java new file mode 100644 index 0000000000000..3f57addcac51e --- /dev/null +++ b/test/framework/src/test/java/org/elasticsearch/logsdb/datageneration/TemplateGeneratorTests.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.logsdb.datageneration; + +import org.elasticsearch.test.ESTestCase; + +import java.util.Map; + +public class TemplateGeneratorTests extends ESTestCase { + public void testSanity() { + var specification = DataGeneratorSpecification.buildDefault(); + var generator = new TemplateGenerator(specification); + + var template = generator.generate(); + validateMappingTemplate(template.template()); + } + + private void validateMappingTemplate(Map template) { + // Just a high level sanity check, we test that mapping and documents make sense in DataGenerationTests. + for (var entry : template.entrySet()) { + assertNotNull(entry.getKey()); + assertFalse(entry.getKey().isEmpty()); + switch (entry.getValue()) { + case Template.Leaf leaf -> { + assertEquals(entry.getKey(), leaf.name()); + assertNotNull(leaf.type()); + } + case Template.Object object -> { + assertEquals(entry.getKey(), object.name()); + assertNotNull(object.children()); + validateMappingTemplate(object.children()); + } + } + } + } +} From 69054ac83b63469b6b7fd72a8a259f3dc11d77f7 Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Tue, 15 Oct 2024 18:59:38 -0400 Subject: [PATCH 126/449] Download IPinfo ip location databases (#114847) --- .../geoip/EnterpriseGeoIpDownloaderIT.java | 80 ++++++++--- .../geoip/EnterpriseGeoIpDownloader.java | 128 ++++++++++++++++-- ...EnterpriseGeoIpDownloaderTaskExecutor.java | 10 +- .../ingest/geoip/IngestGeoIpPlugin.java | 3 +- .../ingest/geoip/IpinfoIpDataLookups.java | 8 +- .../geoip/EnterpriseGeoIpDownloaderTests.java | 30 ++++ .../geoip/EnterpriseGeoIpHttpFixture.java | 87 +++++++----- .../ipinfo-fixture/ip_asn_sample.mmdb | Bin 0 -> 23456 bytes 8 files changed, 272 insertions(+), 74 deletions(-) create mode 100644 test/fixtures/geoip-fixture/src/main/resources/ipinfo-fixture/ip_asn_sample.mmdb diff --git a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/EnterpriseGeoIpDownloaderIT.java b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/EnterpriseGeoIpDownloaderIT.java index 0bc7114f626c4..ad1fce0ca689a 100644 --- a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/EnterpriseGeoIpDownloaderIT.java +++ b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/EnterpriseGeoIpDownloaderIT.java @@ -22,6 +22,7 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; @@ -40,18 +41,24 @@ import java.io.IOException; import java.util.Collection; +import java.util.List; import java.util.Map; import static org.elasticsearch.ingest.EnterpriseGeoIpTask.ENTERPRISE_GEOIP_DOWNLOADER; +import static org.elasticsearch.ingest.geoip.EnterpriseGeoIpDownloaderTaskExecutor.IPINFO_TOKEN_SETTING; import static org.elasticsearch.ingest.geoip.EnterpriseGeoIpDownloaderTaskExecutor.MAXMIND_LICENSE_KEY_SETTING; import static org.hamcrest.Matchers.equalTo; public class EnterpriseGeoIpDownloaderIT extends ESIntegTestCase { - private static final String DATABASE_TYPE = "GeoIP2-City"; + private static final String MAXMIND_DATABASE_TYPE = "GeoIP2-City"; + private static final String IPINFO_DATABASE_TYPE = "asn"; @ClassRule - public static final EnterpriseGeoIpHttpFixture fixture = new EnterpriseGeoIpHttpFixture(DATABASE_TYPE); + public static final EnterpriseGeoIpHttpFixture fixture = new EnterpriseGeoIpHttpFixture( + List.of(MAXMIND_DATABASE_TYPE), + List.of(IPINFO_DATABASE_TYPE) + ); protected String getEndpoint() { return fixture.getAddress(); @@ -61,6 +68,7 @@ protected String getEndpoint() { protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { MockSecureSettings secureSettings = new MockSecureSettings(); secureSettings.setString(MAXMIND_LICENSE_KEY_SETTING.getKey(), "license_key"); + secureSettings.setString(IPINFO_TOKEN_SETTING.getKey(), "token"); Settings.Builder builder = Settings.builder(); builder.setSecureSettings(secureSettings) .put(super.nodeSettings(nodeOrdinal, otherSettings)) @@ -87,22 +95,27 @@ public void testEnterpriseDownloaderTask() throws Exception { * Note that the "enterprise database" is actually just a geolite database being loaded by the GeoIpHttpFixture. */ EnterpriseGeoIpDownloader.DEFAULT_MAXMIND_ENDPOINT = getEndpoint(); - final String pipelineName = "enterprise_geoip_pipeline"; + EnterpriseGeoIpDownloader.DEFAULT_IPINFO_ENDPOINT = getEndpoint(); final String indexName = "enterprise_geoip_test_index"; + final String geoipPipelineName = "enterprise_geoip_pipeline"; + final String iplocationPipelineName = "enterprise_iplocation_pipeline"; final String sourceField = "ip"; - final String targetField = "ip-city"; + final String targetField = "ip-result"; startEnterpriseGeoIpDownloaderTask(); - configureDatabase(DATABASE_TYPE); - createGeoIpPipeline(pipelineName, DATABASE_TYPE, sourceField, targetField); + configureMaxmindDatabase(MAXMIND_DATABASE_TYPE); + configureIpinfoDatabase(IPINFO_DATABASE_TYPE); + waitAround(); + createPipeline(geoipPipelineName, "geoip", MAXMIND_DATABASE_TYPE, sourceField, targetField); + createPipeline(iplocationPipelineName, "ip_location", IPINFO_DATABASE_TYPE, sourceField, targetField); + /* + * We know that the databases index has been populated (because we waited around, :wink:), but we don't know for sure that + * the databases have been pulled down and made available on all nodes. So we run these ingest-and-check steps in assertBusy blocks. + */ assertBusy(() -> { - /* - * We know that the .geoip_databases index has been populated, but we don't know for sure that the database has been pulled - * down and made available on all nodes. So we run this ingest-and-check step in an assertBusy. - */ logger.info("Ingesting a test document"); - String documentId = ingestDocument(indexName, pipelineName, sourceField); + String documentId = ingestDocument(indexName, geoipPipelineName, sourceField, "89.160.20.128"); GetResponse getResponse = client().get(new GetRequest(indexName, documentId)).actionGet(); Map returnedSource = getResponse.getSource(); assertNotNull(returnedSource); @@ -110,6 +123,16 @@ public void testEnterpriseDownloaderTask() throws Exception { assertNotNull(targetFieldValue); assertThat(((Map) targetFieldValue).get("organization_name"), equalTo("Bredband2 AB")); }); + assertBusy(() -> { + logger.info("Ingesting another test document"); + String documentId = ingestDocument(indexName, iplocationPipelineName, sourceField, "12.10.66.1"); + GetResponse getResponse = client().get(new GetRequest(indexName, documentId)).actionGet(); + Map returnedSource = getResponse.getSource(); + assertNotNull(returnedSource); + Object targetFieldValue = returnedSource.get(targetField); + assertNotNull(targetFieldValue); + assertThat(((Map) targetFieldValue).get("organization_name"), equalTo("OAKLAWN JOCKEY CLUB, INC.")); + }); } private void startEnterpriseGeoIpDownloaderTask() { @@ -128,36 +151,53 @@ private void startEnterpriseGeoIpDownloaderTask() { ); } - private void configureDatabase(String databaseType) throws Exception { + private void configureMaxmindDatabase(String databaseType) { admin().cluster() .execute( PutDatabaseConfigurationAction.INSTANCE, new PutDatabaseConfigurationAction.Request( TimeValue.MAX_VALUE, TimeValue.MAX_VALUE, - new DatabaseConfiguration("test", databaseType, new DatabaseConfiguration.Maxmind("test_account")) + new DatabaseConfiguration("test-1", databaseType, new DatabaseConfiguration.Maxmind("test_account")) ) ) .actionGet(); + } + + private void configureIpinfoDatabase(String databaseType) { + admin().cluster() + .execute( + PutDatabaseConfigurationAction.INSTANCE, + new PutDatabaseConfigurationAction.Request( + TimeValue.MAX_VALUE, + TimeValue.MAX_VALUE, + new DatabaseConfiguration("test-2", databaseType, new DatabaseConfiguration.Ipinfo()) + ) + ) + .actionGet(); + } + + private void waitAround() throws Exception { ensureGreen(GeoIpDownloader.DATABASES_INDEX); assertBusy(() -> { SearchResponse searchResponse = client().search(new SearchRequest(GeoIpDownloader.DATABASES_INDEX)).actionGet(); try { - assertThat(searchResponse.getHits().getHits().length, equalTo(1)); + assertThat(searchResponse.getHits().getHits().length, equalTo(2)); } finally { searchResponse.decRef(); } }); } - private void createGeoIpPipeline(String pipelineName, String databaseType, String sourceField, String targetField) throws IOException { + private void createPipeline(String pipelineName, String processorType, String databaseType, String sourceField, String targetField) + throws IOException { putJsonPipeline(pipelineName, (builder, params) -> { builder.field("description", "test"); builder.startArray("processors"); { builder.startObject(); { - builder.startObject("geoip"); + builder.startObject(processorType); { builder.field("field", sourceField); builder.field("target_field", targetField); @@ -171,11 +211,11 @@ private void createGeoIpPipeline(String pipelineName, String databaseType, Strin }); } - private String ingestDocument(String indexName, String pipelineName, String sourceField) { + private String ingestDocument(String indexName, String pipelineName, String sourceField, String value) { BulkRequest bulkRequest = new BulkRequest(); - bulkRequest.add( - new IndexRequest(indexName).source("{\"" + sourceField + "\": \"89.160.20.128\"}", XContentType.JSON).setPipeline(pipelineName) - ); + bulkRequest.add(new IndexRequest(indexName).source(Strings.format(""" + { "%s": "%s"} + """, sourceField, value), XContentType.JSON).setPipeline(pipelineName)); BulkResponse response = client().bulk(bulkRequest).actionGet(); BulkItemResponse[] bulkItemResponses = response.getItems(); assertThat(bulkItemResponses.length, equalTo(1)); diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/EnterpriseGeoIpDownloader.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/EnterpriseGeoIpDownloader.java index 3bbb0539f193a..f4ae440d171d3 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/EnterpriseGeoIpDownloader.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/EnterpriseGeoIpDownloader.java @@ -23,7 +23,6 @@ import org.elasticsearch.common.CheckedSupplier; import org.elasticsearch.common.Strings; import org.elasticsearch.common.hash.MessageDigests; -import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.query.BoolQueryBuilder; @@ -39,6 +38,8 @@ import org.elasticsearch.tasks.TaskId; import org.elasticsearch.threadpool.Scheduler; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; import java.io.Closeable; @@ -57,6 +58,7 @@ import java.util.regex.Pattern; import java.util.stream.Collectors; +import static org.elasticsearch.ingest.geoip.EnterpriseGeoIpDownloaderTaskExecutor.IPINFO_SETTINGS_PREFIX; import static org.elasticsearch.ingest.geoip.EnterpriseGeoIpDownloaderTaskExecutor.MAXMIND_SETTINGS_PREFIX; /** @@ -72,6 +74,9 @@ public class EnterpriseGeoIpDownloader extends AllocatedPersistentTask { // a sha256 checksum followed by two spaces followed by an (ignored) file name private static final Pattern SHA256_CHECKSUM_PATTERN = Pattern.compile("(\\w{64})\\s\\s(.*)"); + // an md5 checksum + private static final Pattern MD5_CHECKSUM_PATTERN = Pattern.compile("(\\w{32})"); + // for overriding in tests static String DEFAULT_MAXMIND_ENDPOINT = System.getProperty( MAXMIND_SETTINGS_PREFIX + "endpoint.default", // @@ -80,6 +85,14 @@ public class EnterpriseGeoIpDownloader extends AllocatedPersistentTask { // n.b. a future enhancement might be to allow for a MAXMIND_ENDPOINT_SETTING, but // at the moment this is an unsupported system property for use in tests (only) + // for overriding in tests + static String DEFAULT_IPINFO_ENDPOINT = System.getProperty( + IPINFO_SETTINGS_PREFIX + "endpoint.default", // + "https://ipinfo.io/data" + ); + // n.b. a future enhancement might be to allow for an IPINFO_ENDPOINT_SETTING, but + // at the moment this is an unsupported system property for use in tests (only) + static final String DATABASES_INDEX = ".geoip_databases"; static final int MAX_CHUNK_SIZE = 1024 * 1024; @@ -444,16 +457,15 @@ private void scheduleNextRun(TimeValue time) { } } - @Nullable private ProviderDownload downloaderFor(DatabaseConfiguration database) { - if (database.provider() instanceof DatabaseConfiguration.Maxmind) { - return new MaxmindDownload(database.name(), (DatabaseConfiguration.Maxmind) database.provider()); - } else if (database.provider() instanceof DatabaseConfiguration.Ipinfo) { - // as a temporary implementation detail, null here means 'not actually supported *just yet*' - return null; + if (database.provider() instanceof DatabaseConfiguration.Maxmind maxmind) { + return new MaxmindDownload(database.name(), maxmind); + } else if (database.provider() instanceof DatabaseConfiguration.Ipinfo ipinfo) { + return new IpinfoDownload(database.name(), ipinfo); } else { - assert false : "Attempted to use database downloader with unsupported provider type [" + database.provider().getClass() + "]"; - return null; + throw new IllegalArgumentException( + Strings.format("Unexpected provider [%s] for configuration [%s]", database.provider().getClass(), database.id()) + ); } } @@ -488,7 +500,7 @@ public HttpClient.PasswordAuthenticationHolder buildCredentials() { @Override public boolean validCredentials() { - return auth.get() != null; + return auth != null && auth.get() != null; } @Override @@ -529,7 +541,101 @@ public CheckedSupplier download() { @Override public void close() throws IOException { - auth.close(); + if (auth != null) auth.close(); + } + } + + class IpinfoDownload implements ProviderDownload { + + final String name; + final DatabaseConfiguration.Ipinfo ipinfo; + HttpClient.PasswordAuthenticationHolder auth; + + IpinfoDownload(String name, DatabaseConfiguration.Ipinfo ipinfo) { + this.name = name; + this.ipinfo = ipinfo; + this.auth = buildCredentials(); + } + + @Override + public HttpClient.PasswordAuthenticationHolder buildCredentials() { + final char[] tokenChars = tokenProvider.apply("ipinfo"); + + // if the token is missing or empty, return null as 'no auth' + if (tokenChars == null || tokenChars.length == 0) { + return null; + } + + // ipinfo uses the token as the username component of basic auth, see https://ipinfo.io/developers#authentication + return new HttpClient.PasswordAuthenticationHolder(new String(tokenChars), new char[] {}); + } + + @Override + public boolean validCredentials() { + return auth != null && auth.get() != null; + } + + private static final Set FREE_DATABASES = Set.of("asn", "country", "country_asn"); + + @Override + public String url(String suffix) { + // note: the 'free' databases are in the sub-path 'free/' in terms of the download endpoint + final String internalName; + if (FREE_DATABASES.contains(name)) { + internalName = "free/" + name; + } else { + internalName = name; + } + + // reminder, we're passing the ipinfo token as the username part of http basic auth, + // see https://ipinfo.io/developers#authentication + + String endpointPattern = DEFAULT_IPINFO_ENDPOINT; + if (endpointPattern.contains("%")) { + throw new IllegalArgumentException("Invalid endpoint [" + endpointPattern + "]"); + } + if (endpointPattern.endsWith("/") == false) { + endpointPattern += "/"; + } + endpointPattern += "%s.%s"; + + // at this point the pattern looks like this (in the default case): + // https://ipinfo.io/data/%s.%s + // also see https://ipinfo.io/developers/database-download, + // and https://ipinfo.io/developers/database-filename-reference for more + + return Strings.format(endpointPattern, internalName, suffix); + } + + @Override + public Checksum checksum() throws IOException { + final String checksumJsonUrl = this.url("mmdb/checksums"); // a minor abuse of the idea of a 'suffix', :shrug: + byte[] data = httpClient.getBytes(auth.get(), checksumJsonUrl); // this throws if the auth is bad + Map checksums; + try (XContentParser parser = XContentType.JSON.xContent().createParser(XContentParserConfiguration.EMPTY, data)) { + checksums = parser.map(); + } + @SuppressWarnings("unchecked") + String md5 = ((Map) checksums.get("checksums")).get("md5"); + logger.info("checksum was [{}]", md5); + + var matcher = MD5_CHECKSUM_PATTERN.matcher(md5); + boolean match = matcher.matches(); + if (match == false) { + throw new RuntimeException("Unexpected md5 response from [" + checksumJsonUrl + "]"); + } + return Checksum.md5(md5); + } + + @Override + public CheckedSupplier download() { + final String mmdbUrl = this.url("mmdb"); + return () -> httpClient.get(auth.get(), mmdbUrl); + } + + @Override + public void close() throws IOException { + if (auth != null) auth.close(); } } diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/EnterpriseGeoIpDownloaderTaskExecutor.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/EnterpriseGeoIpDownloaderTaskExecutor.java index 5214c0e4a6a51..ae9bb109a3bf8 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/EnterpriseGeoIpDownloaderTaskExecutor.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/EnterpriseGeoIpDownloaderTaskExecutor.java @@ -54,11 +54,15 @@ public class EnterpriseGeoIpDownloaderTaskExecutor extends PersistentTasksExecut static final String MAXMIND_SETTINGS_PREFIX = "ingest.geoip.downloader.maxmind."; + static final String IPINFO_SETTINGS_PREFIX = "ingest.ip_location.downloader.ipinfo."; + public static final Setting MAXMIND_LICENSE_KEY_SETTING = SecureSetting.secureString( MAXMIND_SETTINGS_PREFIX + "license_key", null ); + public static final Setting IPINFO_TOKEN_SETTING = SecureSetting.secureString(IPINFO_SETTINGS_PREFIX + "token", null); + private final Client client; private final HttpClient httpClient; private final ClusterService clusterService; @@ -106,6 +110,10 @@ private char[] getSecureToken(final String type) { if (cachedSecureSettings.getSettingNames().contains(MAXMIND_LICENSE_KEY_SETTING.getKey())) { token = cachedSecureSettings.getString(MAXMIND_LICENSE_KEY_SETTING.getKey()).getChars(); } + } else if (type.equals("ipinfo")) { + if (cachedSecureSettings.getSettingNames().contains(IPINFO_TOKEN_SETTING.getKey())) { + token = cachedSecureSettings.getString(IPINFO_TOKEN_SETTING.getKey()).getChars(); + } } return token; } @@ -166,7 +174,7 @@ public synchronized void reload(Settings settings) { // `SecureSettings` are available here! cache them as they will be needed // whenever dynamic cluster settings change and we have to rebuild the accounts try { - this.cachedSecureSettings = extractSecureSettings(settings, List.of(MAXMIND_LICENSE_KEY_SETTING)); + this.cachedSecureSettings = extractSecureSettings(settings, List.of(MAXMIND_LICENSE_KEY_SETTING, IPINFO_TOKEN_SETTING)); } catch (GeneralSecurityException e) { // rethrow as a runtime exception, there's logging higher up the call chain around ReloadablePlugin throw new ElasticsearchException("Exception while reloading enterprise geoip download task executor", e); diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java index cc0bec583483e..3107f0bed55e8 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java @@ -112,7 +112,8 @@ public List> getSettings() { GeoIpDownloaderTaskExecutor.ENABLED_SETTING, GeoIpDownloader.ENDPOINT_SETTING, GeoIpDownloaderTaskExecutor.POLL_INTERVAL_SETTING, - EnterpriseGeoIpDownloaderTaskExecutor.MAXMIND_LICENSE_KEY_SETTING + EnterpriseGeoIpDownloaderTaskExecutor.MAXMIND_LICENSE_KEY_SETTING, + EnterpriseGeoIpDownloaderTaskExecutor.IPINFO_TOKEN_SETTING ); } diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookups.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookups.java index 19a98fb1b5746..5a13ea93ff032 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookups.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookups.java @@ -114,10 +114,10 @@ static Database getIpinfoDatabase(final String databaseType) { @Nullable static Function, IpDataLookup> getIpinfoLookup(final Database database) { return switch (database) { - case Database.AsnV2 -> IpinfoIpDataLookups.Asn::new; - case Database.CountryV2 -> IpinfoIpDataLookups.Country::new; - case Database.CityV2 -> IpinfoIpDataLookups.Geolocation::new; - case Database.PrivacyDetection -> IpinfoIpDataLookups.PrivacyDetection::new; + case AsnV2 -> IpinfoIpDataLookups.Asn::new; + case CountryV2 -> IpinfoIpDataLookups.Country::new; + case CityV2 -> IpinfoIpDataLookups.Geolocation::new; + case PrivacyDetection -> IpinfoIpDataLookups.PrivacyDetection::new; default -> null; }; } diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/EnterpriseGeoIpDownloaderTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/EnterpriseGeoIpDownloaderTests.java index 88c37409713ac..e1cd127be9c87 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/EnterpriseGeoIpDownloaderTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/EnterpriseGeoIpDownloaderTests.java @@ -488,6 +488,36 @@ public void testMaxmindUrls() { } } + public void testIpinfoUrls() { + // a 'free' database like 'asn' has 'free/' in the url (automatically) + final EnterpriseGeoIpDownloader.IpinfoDownload download = geoIpDownloader.new IpinfoDownload( + "asn", new DatabaseConfiguration.Ipinfo() + ); + + { + String url = "https://ipinfo.io/data/free/asn.mmdb"; + assertThat(download.url("mmdb"), equalTo(url)); + } + { + String url = "https://ipinfo.io/data/free/asn.mmdb/checksums"; + assertThat(download.url("mmdb/checksums"), equalTo(url)); + } + + // but a non-'free' database like 'standard_asn' does not + final EnterpriseGeoIpDownloader.IpinfoDownload download2 = geoIpDownloader.new IpinfoDownload( + "standard_asn", new DatabaseConfiguration.Ipinfo() + ); + + { + String url = "https://ipinfo.io/data/standard_asn.mmdb"; + assertThat(download2.url("mmdb"), equalTo(url)); + } + { + String url = "https://ipinfo.io/data/standard_asn.mmdb/checksums"; + assertThat(download2.url("mmdb/checksums"), equalTo(url)); + } + } + private static class MockClient extends NoOpClient { private final Map, BiConsumer>> handlers = new HashMap<>(); diff --git a/test/fixtures/geoip-fixture/src/main/java/fixture/geoip/EnterpriseGeoIpHttpFixture.java b/test/fixtures/geoip-fixture/src/main/java/fixture/geoip/EnterpriseGeoIpHttpFixture.java index 59205aa546cd2..3f3e0c0a25578 100644 --- a/test/fixtures/geoip-fixture/src/main/java/fixture/geoip/EnterpriseGeoIpHttpFixture.java +++ b/test/fixtures/geoip-fixture/src/main/java/fixture/geoip/EnterpriseGeoIpHttpFixture.java @@ -11,20 +11,18 @@ import com.sun.net.httpserver.HttpServer; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.hash.MessageDigests; import org.junit.rules.ExternalResource; -import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; -import java.io.UncheckedIOException; import java.net.InetAddress; import java.net.InetSocketAddress; import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.StandardCopyOption; import java.security.MessageDigest; +import java.util.List; +import java.util.Objects; /** * This fixture is used to simulate a maxmind-provided server for downloading maxmind geoip database files from the @@ -32,21 +30,17 @@ */ public class EnterpriseGeoIpHttpFixture extends ExternalResource { - private final Path source; - private final String[] databaseTypes; + private final List maxmindDatabaseTypes; + private final List ipinfoDatabaseTypes; private HttpServer server; /* - * The values in databaseTypes must be in DatabaseConfiguration.MAXMIND_NAMES, and must be one of the databases copied in the - * copyFiles method of thisi class. + * The values in maxmindDatabaseTypes must be in DatabaseConfiguration.MAXMIND_NAMES, and the ipinfoDatabaseTypes + * must be in DatabaseConfiguration.IPINFO_NAMES. */ - public EnterpriseGeoIpHttpFixture(String... databaseTypes) { - this.databaseTypes = databaseTypes; - try { - this.source = Files.createTempDirectory("source"); - } catch (IOException e) { - throw new UncheckedIOException(e); - } + public EnterpriseGeoIpHttpFixture(List maxmindDatabaseTypes, List ipinfoDatabaseTypes) { + this.maxmindDatabaseTypes = List.copyOf(maxmindDatabaseTypes); + this.ipinfoDatabaseTypes = List.copyOf(ipinfoDatabaseTypes); } public String getAddress() { @@ -55,7 +49,6 @@ public String getAddress() { @Override protected void before() throws Throwable { - copyFiles(); this.server = HttpServer.create(new InetSocketAddress(InetAddress.getLoopbackAddress(), 0), 0); // for expediency reasons, it is handy to have this test fixture be able to serve the dual purpose of actually stubbing @@ -64,26 +57,33 @@ protected void before() throws Throwable { this.server.createContext("/", exchange -> { String response = "[]"; // an empty json array exchange.sendResponseHeaders(200, response.length()); - try (OutputStream os = exchange.getResponseBody()) { - os.write(response.getBytes(StandardCharsets.UTF_8)); + try (OutputStream out = exchange.getResponseBody()) { + out.write(response.getBytes(StandardCharsets.UTF_8)); } }); // register the file types for the download fixture - for (String databaseType : databaseTypes) { - createContextForEnterpriseDatabase(databaseType); + for (String databaseType : maxmindDatabaseTypes) { + createContextForMaxmindDatabase(databaseType); + } + for (String databaseType : ipinfoDatabaseTypes) { + createContextForIpinfoDatabase(databaseType); } server.start(); } - private void createContextForEnterpriseDatabase(String databaseType) { + private static InputStream fixtureStream(String name) { + return Objects.requireNonNull(GeoIpHttpFixture.class.getResourceAsStream(name)); + } + + private void createContextForMaxmindDatabase(String databaseType) { this.server.createContext("/" + databaseType + "/download", exchange -> { exchange.sendResponseHeaders(200, 0); if (exchange.getRequestURI().toString().contains("sha256")) { MessageDigest sha256 = MessageDigests.sha256(); - try (InputStream inputStream = GeoIpHttpFixture.class.getResourceAsStream("/geoip-fixture/" + databaseType + ".tgz")) { - sha256.update(inputStream.readAllBytes()); + try (InputStream in = fixtureStream("/geoip-fixture/" + databaseType + ".tgz")) { + sha256.update(in.readAllBytes()); } exchange.getResponseBody() .write( @@ -93,10 +93,33 @@ private void createContextForEnterpriseDatabase(String databaseType) { ); } else { try ( - OutputStream outputStream = exchange.getResponseBody(); - InputStream inputStream = GeoIpHttpFixture.class.getResourceAsStream("/geoip-fixture/" + databaseType + ".tgz") + OutputStream out = exchange.getResponseBody(); + InputStream in = fixtureStream("/geoip-fixture/" + databaseType + ".tgz") + ) { + in.transferTo(out); + } + } + exchange.getResponseBody().close(); + }); + } + + private void createContextForIpinfoDatabase(String databaseType) { + this.server.createContext("/free/" + databaseType + ".mmdb", exchange -> { + exchange.sendResponseHeaders(200, 0); + if (exchange.getRequestURI().toString().contains("checksum")) { + MessageDigest md5 = MessageDigests.md5(); + try (InputStream in = fixtureStream("/ipinfo-fixture/ip_" + databaseType + "_sample.mmdb")) { + md5.update(in.readAllBytes()); + } + exchange.getResponseBody().write(Strings.format(""" + { "checksums": { "md5": "%s" } } + """, MessageDigests.toHexString(md5.digest())).getBytes(StandardCharsets.UTF_8)); + } else { + try ( + OutputStream out = exchange.getResponseBody(); + InputStream in = fixtureStream("/ipinfo-fixture/ip_" + databaseType + "_sample.mmdb") ) { - inputStream.transferTo(outputStream); + in.transferTo(out); } } exchange.getResponseBody().close(); @@ -107,14 +130,4 @@ private void createContextForEnterpriseDatabase(String databaseType) { protected void after() { server.stop(0); } - - private void copyFiles() throws Exception { - for (String databaseType : databaseTypes) { - Files.copy( - GeoIpHttpFixture.class.getResourceAsStream("/geoip-fixture/GeoIP2-City.tgz"), - source.resolve(databaseType + ".tgz"), - StandardCopyOption.REPLACE_EXISTING - ); - } - } } diff --git a/test/fixtures/geoip-fixture/src/main/resources/ipinfo-fixture/ip_asn_sample.mmdb b/test/fixtures/geoip-fixture/src/main/resources/ipinfo-fixture/ip_asn_sample.mmdb new file mode 100644 index 0000000000000000000000000000000000000000..3e1fc49ba48a522ac39e5f0242175472f81dcc64 GIT binary patch literal 23456 zcmai)1$-38`^GOMNFlft2(UP9$@N@PJ(3W+2)Q7EQo6Zpl7r+fy}JZL-QC^Y-QC^Y z-QC^)&&)h~H|?kW|M)O`-}jk!W@pECb`OWcQQ&Y4UF>i;29bph$KWOKi|7v~hmb?b zVdQ4yaB>7$OpYW+k)z2mx=9b|C4Hoy43I%GM25*SavO46ayxQ+atCrp zGD2bw$azpsR*;n>=7h|zBCE+7vX-nPqhvkVKsJ(1wo zYsr5-iD=sIpT;TJ%`kebS097w2`qLusfA4`=X0_0{S{lUr)Uo zdXJ_jsi&a#(%x0ePgBbvKdb3|)cc`t$m>JVXO01+!;m&HW;b$oau0G(McMAX5WhF| zeKdVvYWtDM3Hr&@Pa#hwPt)Q~hy5+}Gd2Azls}vLIpn!o{&~nhpE~YE-V^T2aUr#f zU|+21m%zVN^Dl#cxx+C`l(|At)FJ(=pxw)Qt|qS`uO+WT%=NTyAa5jZQj~piGvaQc zek*yKmVZ0)@1TAsc^7#%d5_{SdwlMren0tuqP*UN)E*)qCLh6d9;N*l`8fH6qAdR; z;+~@ZH2UKM+Rs3LHt!4F@jTK;NH1vRUu67C&|jwg3i&Genxeek>xg?p^WS95ThQOu z^mnMgOTLHv_i3|@vc3-;j?MhkKSs>&NS`QQ=6?$RGtK{;v0sp1BL6GeUz6V`%JTMH z{SI;8GyezjM=kCrYCj|H7o^{iens23#szI#*82zY|J2I=B_I2ag3(GVD1bjGpI=Z& zy$JeXO&>yiC^?MW3~|G0k06T`#q|qDQX8eX8OE<*47IW3IC8wAEH{DLM8r>`Jz2}& z9QqXMQ^{#sehKoY=l$VZz@LeP{wOez&P1Ap)P^)0Nwjqil8<%GCFhaz$pz#>#G+2o zR@vSy;crEI5m`z)$*mP-ybJMe>K;w^Ivm53)H&Z|e1KYz4556OcA27#+lJb<6-h+C%l%URC~=qok775*ybttNNU^4Cz?8F6bheHZv<-XDQJDTpH-fYgrE zhh*jR3p(I;YJL~|gyye) zn5G{N{Rkv+Z%1nSQSgsu{4wOQ7n}_J6r?klcPe=rdAedT zVnmW{KMV1X(LNjYIY_tDKNt3SNLSE5ANB>xmSr!bei8JGXuMZ%lji;jJX5)oq4_BF6wua_mKA@ z?mpW0E6REwfc~K7KLr0_#yz6xk2)M9F($IiQQxEQqp#HbC5)>=Ts7?) zEx#6eo#sa!jxm@^vL73ew*jdUDNd~k_EOr-TDchX7V68$X!AlL&w_5`5o{(HNT5-3FzxIeLehc=Jk+CGNr}! zB5qgeq77Nv8O1R#LhGZ}pVwsDHc}rz%ptTlk-L$*lY1a$Pi4!pdr{vT`aZPx)$;eF zwm*3Q;tr&JkQR5a!!dRk^+PrNFlvXBN03L7N0CRP-eYJVt0>!d9JS+7zkN?9Ivj;> zBK{<#zmQI5%qg(HL^>7eW~9@opH7|u?J}e@>7PZOtthW|4z+X1^ALYN?F+~Y6=nQI z)GkKcB}kVle=M#ql8BLUmm}{A+E;4&RnV`devPJI3;jCHzn(ESK);dpO^RdxpnnVU z-axt)=>=-H!M>gL9k3s!eJAX@lr7tLH~f2;cQ1J#dA}C-0JR4Z|B$lB)ob~WF#l2V zG4gT5KcU4vN&PAEY4RDwJxlvJ#c^VspLaL}bNwmS!%Ld~GRwR|zKZ^AYrqX@5d~N`9s&`}cEdUpO4&xqhVn74@&l zZ&2U2w7(<2Cx0M+B!41*CUI}F&R-R!{Tu50UGwd^@~6_q^ZpARSW^W^Gm!?Vyh4bg zj>00Ou}FjS^%M?a-caaV_l28j`NNq%0{O+7J`(;Y%^wYajPfVAHGLfP@yaipKu#nl zk(0^I$tjAm%~Mf+8ub!#x|Tmf*%SH^XCNJgG)v2$4Sx>OzDRSCwn3VQk zZIMUCmWF8 zn9ra1IQ*q~zpxp8O!HeAx*Lk z@o{C#_P0~F$PUFxh%fA-mVmtxX&q88we_&Ol`ZS(fuCevDz8tvj@qtd8gZF?eqok+ zA9T!FQJ1W5gTpbIYfbtC$lt`g-L$yfsqI1Ti8zjxEVnoOeN^6L_Iu%ejNP9+fIN^q zh&)(P)^iBzIh6Wg?@vA%{?SMeARU8r6U!V6`#7XC=pPUJgnYTe6XBnv z`KU);{}l36@-*^v#mPUR+?hz1Qa=m!*~*sX&w+og=AQ@ue9gZA{)L)<5&Vla{}N?y zZm*fklwWu`c?EeTc@=pzc@65jmbN`7uBU&4qHO1l4#(#9J>RV9w=nir@;35z@(%J& z@-FgjT=yQ@_mcOK_bX1BK>tBS*^Y;(J&f{?X!@hnA0x&3euDOsuwSJ86!|o?XOu1L zeU|!j(4VLMLOy>A)>Pq3)L$lF$>$foO6@hoy{_qRI2=>#__rAIHh$v%5OME-TZ8X{ zxYxq>kiKWk`>;Qtjabn}+0Kuke@y!m@>B9N@^eKQ{{^)#$*;(-QO`G8+_%)fb2z3B zq5T8eHJJ8~h{s%(b^fdpTT zc5~=cXiwGhr$H~#{OOFDLCz!%#Ld#;W>cR-&L!ue4>)#33$*x!D8B_`w$${kpf93c zs_9OLV_Fq;7h;Y#3ktNiIP=+b{F0;ny&)maHSA$XiOgo@^i+k=KO2;2IY!YGz(c zQEDwJrf69{R$g}n;#aCT>9@jP#qz5)eJ5&b$ej_lmi8{N+i9C*8yQzD!F(yQs9_Dr zYjx)Hq@O^{{z&VXzaI7g?QXJ%Od>C(Y+1IK`mSV}%pfkS#r47O*Zd8P+o)KAdn($b zGwpPu2X>;GeGfXDEBRSX*Zy z-KPAavtgfubQRLMTKswN&)5757=Iyo5%Moax{Qo2cE4xLau7n$MqZxAS)9-GR6}^Z7-0!M|Jc?}2}>=HCbZ zewKYe(;uYv5cx3qh~o5@Q0_6L7pXrE`w3;sc0Wn|De`IZ8N@wH`#D7!|2*^;^8Soj z@No^1C-Yy1{|fC_$=9^}*QvchzKQs^Xuqw+y#xJS>hF>7I~+5xR*ODR6xWmfNBIA7 zKIk8VCEzEF{}lG`v_FIWxw2(Dzo7ml`4#fN*7Coh{w?_(`91jq;(tW?mHtn#e@1)6 zJpV;ewomxKsTh$bul)!4C)zs#`F|;YutQPA3@)HHh%AIwq~#-c@DSz=C5LJGn<0O= z@@Me+vfg6GjwDARZnRcz4E3?(IC4BW0r3-&rqG`Rd$PkZa~t}bD+*tv!Bdf@5QF0&Lj;*8H0HxIUD8YAZ*K=MN513zKD}=!

    Z>K2RvpuyP$Q=T2SY!E`k}B7Q?_vn?gL5Ibp&~&qSTI}b~Jemc`SJx>N=kG3FL|7Ns6+b zlM#1{=AX)#)5z1wGZ1&C7Izl)v!S1(>E}{E4?5?Ur+r8O}~-aO~}8QHv3uDdn@$YsNYWBLEfo2 z3v*J$%J{nxe-G_@$@|Fr$p^>>$%n{?6=gk-pq@uH|1tQFGwuoUN%AT3Y4REJS@JpZ zdGZDFMa5Yk(SMnIg?yEK4f_)2_~6&+zd^nU-3R?G&;hkSnME*?vLjI~ayB1}C zL;9WiALO6pU->w3tsw=xYX9`yN|z5xD0=50Z4Np3|hQk3V?!C%U_W=)SlZ_)f^@Ru`gg{H5B-b#HHxtiRGT%$Obl=rxndApD% z*+#}uZ#!*EQI_j~-l_Rr@DrN9j%C(E@21^DCbhT}wO(>pGL3Q>+F7!X>?b#n8_5B3 zlcH?rZm4H>>U(JVp30v4DDwAa{yyZss@s?OMcLNBep${|0I| zk~bmlX4u%`xP`{VFkG!9JfP7GKo;`OTrv3fe#ylRuC@l0T6@lfRI^lE0C^lYfwZl7A@YeF*%YP1&*dYh8(LnpX+Goc=$f%O#mm-o08j=lH7`1M3$0Ha%<8>x4#6 zqo67h^LxWSuTfmzYYud$;)(2lsL?1{)@1uec`DtTN}Jh4DrvY^gq-fM)8liaww$Rd ztB%CHVZYDgF^cO_=K5}PLsHe%+8C*eMpiT!wTdTqA`rO_?*cCL28@u|<4OSxFo$6~>b=Frei>#=uL0yf@Dq@CvwcF=(dxK7QK-{P8 z1pUt3je5{?Zlk!l$4qAv$xMHuyW4X1o6f$BQL?feH*TaljCw6|k+D2txLZA5RP1pE ze5f~XYt_2le%x`q3q`H8YTfd3<)QC-dis)yxa_?Qn(6VO^IT51FC?m?6LzzivI|2l zHwL6J*}0KJm{FBR8>&_s4QfCS+~pOQf;j-?@A<1qs8jB z#Eg`cxV=879AH#JJ6DM}9CEt@MsZbgAnxo;^&2G>ODY;#MM2C0AEt-ssJxvYLAM_( zr?{abkhWUCT(U+wZhNkYzIVBC zJ5^?u{T_?7%x^JbR=PhCw=#A;tpSXn7uATVMcZ%Jf~nvR2f_iYrsiZO(Ut0MPb51t z-B~VxW9xQ^|uD;GLi}PxAwRG&M zWyA(DS*r)Fl(S)V5DPRYW`t;^?Rat1_C)gs{4S$7+L2Brvk7|ztQoO{{y!#}SY?qO zD~&Z__ruDd&xsW#)&;Hr+v^m)Ywu7nf?^Mlt3RH?Y{TEa^+ri+x!4cNQ{^dKBbl_~ z*?cc74+fng`dHd3IyZ_cYZ^(!BzkVRuo*w8{b2nC}Jfp%)V!v5$M0&ADwTAr85Ehc` zN!q#bMzefApSaOgnJkKwCX(%anQS_NValYs`^5H(ooY=%tR>Q1hxS!0i?uY@RK|>0 zV{}6kc|CsIN>wM8nq0Ljs#;q9Ipw%) z!)Oe)aXbh_UA7}OI`J%&doMZ$TU&K+cf3!H z{sp{F{6%d!J71Y6ga;COd4tuK*|pD<`{X9mC|SM2YBOR&$xYjZP1yk~;VhQ#YPSnZ z0e6juHXdYoCs(oC>kW%}(iD$p+R`brz0FK=Q)sElbeT!9k}+kBrhYVB)VaptcA-Xh z04rHO9yKpF8Xi|zj7F`w-o)ub-F<6BI@tH{XiLh6nAvSCb}q;Gib}Dmx-b@4yrQYL zXU`Kk)jWO=mRe(6Z00?wwgeu!VmmKcGoji_L{rJm+Ju?xH0ov6-v>^3y=HPCzp7is z(i7W+xSlYvd*)V^Rl`R6FTl(pT>1Y)uqywX*Afa$ME%A=?owEEon2E!SN=` z^UZ2E&JJR`!+s&IK}VY;ZXcdw*xhG;V3f@MRty*(EbfrYiD$4Vp&T?aKVUwWCyZ99 zPQZc_4>V^yiN=j9udZo`v{Xhb%Ny$r*%g^Sqc@%EPbA|w$MtRMO7$83Dg$#{oaSV= zV_o7Ij0G+)nDewpv=>tk^SCD7(}v!b{ZL+BzgU&V^zvg*56eLlCkS{R@f^x+xbBcY zj2(TGmFzYL>}45UMOQopu|v3Vd2|m7*nsMOb2 zwpKP9_Kl+xQG*9_DXb2HbnN=XsUzSEqq@4zRJ+;U&QoerRjR`F?B|s@tcl~5JYm_6 z-`+9ABngN80rbwwOv381l9|#j%k0i#ep;9oT=g-PWmIcfMx;u1pr{{Pl6qXp3rJ6N zq}WHWCwb(dwJw7yF%R3F>v}QNfo3aXndx|!(U|TulZj1g#^G^Tlk7;Pdz6c%oPPl4 z=4?$elTBp%vX*#y=ECFRK#76I>3-4wqAD@v)=Vgug4k8$k^H`k?z_Rt7`1)LUHhy=+B7Vq zsxN`Z^?m0-#zD(eR}@b_G1b)-ZBJV-0UTO!!N#J+9FHr`0DoJFEIS|x@=wuw%DO3Sl#Q)hRo&BRd4)0Q)t zGD@1NqK%6q(elQIhRX65Lk#k2*%)*b9yqK`IND+kxa2lnyCETZ%yQzXTGFsWIY!Lc z?2L*7gs298J$Nrv)kw#lFJh0uZYvHlP4TYIQgNni$FoFrePelbRcU=?b4^7}q`_Xf zDDQS*s(9r?Q1kM0+U0ZmaGYygpX$M)%zE7Y)_OS;S7O=-#aOu#Z=F~teR#pa)75UM z+wE1SB{`zPvF}$j)a7#pP;E`z+9c;BPMh`Rm8%*mTa1h|Eze7Mo{ARXxg#DX>V}o4 z?Ok{*`9oN9HQ4nsR@|AfjFOs&w2g?`-(@A3uer{bk0f}1yGbu*#`2`KF{?&2!&ej4 zd94kVD_aaTutu!1d3jAa7I4(rl)G8%47q9N7LGRW+#auZnX9+@GV8Ii;(UoEi1Dqo zeT;BK9UwwzXArli)|Tz*Pjrtf=nY~uEbS0)V5RuDfiDrMv@?l)EGFN|j7Dl)7MM18 zdSC!TYWC5wcL;3HVYfVzq|!LYi3bzL+L;z->lqbk9J%{b299aWz)rE=j3?Gv*tNy6 zkFwF8N*Rl-RA<_3FI{C}Ly>!hxH4MrmM;sk9rW^jA9MxX*eqi5ZAR24_9v0V9@Q-0 zXmanoIM3q?gqRrEi19`)_8~Db=;<#>7;Z7aqVaAsEjMDPDfYZEEmph>S23dX_)3B) zEO)**?gU$$Jk^=;Y@*-H=uC)%&#;P<4ce{)2|lqMt*U$6m@Ej zYK(Z@^82uJ)O7dZ$ z(|x_EE~{U@a>zz5EXF|#O~E&wK+q|l^f^cCaDT|}K?Ms7?l(%7G?>XOZb=mLIK|tq zJilvB?z%X%h_i8hW}v4nk#eg24==DW>D$L5@lX_{v74&hDd*TNKxYK~V#Jqbdpq@o zEZ(GJElpMVWo&<%L5&z`S3sRjZ6`Nkm_%-$$BTQaOC<5s&f>F#*g30elxMhi65oo% zK};T#ZAWYk>h>zpCh>h|VNnF@vq;QakIV1GXHa!uqT}(S7C8D$jZZI@*<&p36Q}r0##k2P%oc4y zJ85G_}p{A{NBL_7#EqrBo>RaPTB>~y)o4Z5*ftF7)t#&mWh(27c7 z8jS<%@L@|FtS}SgI4e_sqTm!Q4&%}M%bPfmudI!g8?!5$OIy^V3+G7rVG^hF)v`Wu z(i2Y(%~7opy&m+V#=1l=UTQP!>#9U=weaK(iBk%?QS2L{270+c77GM-geenkH?4N7 zyN~Z{%`IIPzO^aIs7j~$dQsm*Y&-5Ud|NIHn`LEUJIi?;_*3>T|ES+Yyk1WT7nvQx z8~zehG+7q)`@>~;jMz0P*Zj9yq^7FEr$!IXYYm-g%Sv{1rxN^9(^%D9S=nGLi8j_$ zs1L1crpT)Vg7Gp>M<8xrCg=Z4YwhcxYr`QI&Ro@a6G7j}PYa{e_w#@Iu##aHmE-$7 z&#-GIVViT8;dRy}uAH;~>#Fvpak#~Y_n-@Bj~KpN#@A=s6L@>W7Z$#v*W!C+d0kBW z+=HGRkD3GMMP5Xi-hUqp`x3aj03HOm0BTKaN+sL4Ce^ZNTCUXmQgyGH<_Wl5W$28K zG7~>2h&$2*+~R-TmKro)z~{vd*V2YV`cDdH6AxgTIz*qRLOTQ`vHZkOyO19 z&9e5mb{Xma>-4s}R4gQX?Zd=u%%<>DjoR$g?ZsO1icw>KqnBOkE9>x~H$B*$RF~QU zZs~t?sV5v354MF9R$_;qfZa#0(D%;@egC^cd|MSSLNR<(X-lPXtdify?EN4HHH}lP z8~b*|-nb{Bzx-uBQ_a4d{a@o@cNXTKH{iwuuf~qUmzmlMn1Ab=U|EN!&0S{U$2-+G zy6|6@z;s(M$tYeh((TxHryB8+i364RDY6;O#OKW1tA+fn zq`smuii39UgB!X(~5Gx*xme#ZBSMzu4K9TAO;~cntR9hY))qHZ=FOSxNq8 zYi+`H#-naO)_usMu0#8Ou6K7x`~tY57heE-#rJ;v#rJOc@ksVwQ>>*j5^bqA@WQ^L zu^HQ$x~I0XaL~RdX=Bjw=Rg0!c#C_&9xWaUQSsJYn#CKM{K60wzYmJ{HT63VM#yxv zWB>4Z%dj5g6YTH8e_cv#V*U_D7*AU>BQ5p$XLY@a^BTSa;zLQwh~OtHd>W5s`{MXM z&wE1a+tqw9v31*Y{po?V>XYW$9&=qPy;gp3PbJS8 z^v~QxGM{^PU0YwGyM3+In~HZ84eA{9@aXz>Gi$b)8Eb8Jpx0_m^x_>dWh8pn;y**I z&6qvC_%)%Yr@gJN-O9w%iC*y|**lA^$GwOecB ZDf}pQ@6g3{X}p@J+t+3io2+w;{{xjsLht|p literal 0 HcmV?d00001 From 8ccfb227c2131c859033f409ee37a87023fada62 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Tue, 15 Oct 2024 16:27:41 -0700 Subject: [PATCH 127/449] Revert "[EIS] Validate EIS Gateway URL if set (#114600)" (#114867) This reverts commit 39168e139d98b2eacade007fcd616715a6106c10. --- .../ElasticInferenceServiceSettings.java | 53 +-------------- ...InferenceServiceSparseEmbeddingsModel.java | 8 +-- .../ElasticInferenceServiceSettingsTests.java | 64 ------------------- ...enceServiceSparseEmbeddingsModelTests.java | 28 -------- .../elastic/ElasticInferenceServiceTests.java | 2 +- 5 files changed, 3 insertions(+), 152 deletions(-) delete mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettingsTests.java diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettings.java index 170b39e0bf76c..8525710c6cf23 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettings.java @@ -7,65 +7,14 @@ package org.elasticsearch.xpack.inference.services.elastic; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import java.net.URI; -import java.net.URISyntaxException; import java.util.List; -import java.util.Objects; -import java.util.Set; -/** - * Class encapsulating any global setting for the EIS integration. - */ public class ElasticInferenceServiceSettings { - public static final Setting EIS_GATEWAY_URL = Setting.simpleString( - "xpack.inference.eis.gateway.url", - new EisGatewayURLValidator(), - Setting.Property.NodeScope - ); - - private static final Logger log = LogManager.getLogger(ElasticInferenceServiceSettings.class); - - /** - * Class to validate the EIS Gateway url set via `xpack.inference.eis.gateway.url`. - */ - public static class EisGatewayURLValidator implements Setting.Validator { - - private static final Set VALID_EIS_GATEWAY_SCHEMES = Set.of("http", "https"); - - @Override - public void validate(String value) { - if (Objects.isNull(value) || value.isEmpty()) { - // No validation needed, if eis-gateway URL is not set - log.debug("eis-gateway url not set. Skipping validation"); - return; - } - - try { - var uri = new URI(value); - var scheme = uri.getScheme(); - - if (scheme == null || VALID_EIS_GATEWAY_SCHEMES.contains(scheme) == false) { - throw new IllegalArgumentException( - "[" - + scheme - + "] is not a valid URI scheme for the setting [" - + ElasticInferenceServiceSettings.EIS_GATEWAY_URL.getKey() - + "]. Use one of [" - + String.join(",", VALID_EIS_GATEWAY_SCHEMES) - + "]" - ); - } - } catch (URISyntaxException e) { - throw new IllegalArgumentException("[" + e.getInput() + "] is not a valid URI", e); - } - } - } + static final Setting EIS_GATEWAY_URL = Setting.simpleString("xpack.inference.eis.gateway.url", Setting.Property.NodeScope); // Adjust this variable to be volatile, if the setting can be updated at some point in time private final String eisGatewayUrl; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModel.java index b18b362dd099e..bbbae736dbeb9 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModel.java @@ -108,12 +108,6 @@ private URI createUri() throws URISyntaxException { default -> throw new IllegalArgumentException("Unsupported model for EIS [" + modelId + "]"); } - var uriString = elasticInferenceServiceComponents().eisGatewayUrl() + "/sparse-text-embedding/" + modelIdUriPath; - - // We perform the same validation here as when reading the setting to make sure that our extended URI is still valid - // This method throws, if the URI is invalid - new ElasticInferenceServiceSettings.EisGatewayURLValidator().validate(uriString); - - return new URI(uriString); + return new URI(elasticInferenceServiceComponents().eisGatewayUrl() + "/sparse-text-embedding/" + modelIdUriPath); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettingsTests.java deleted file mode 100644 index a053a5eb33cfe..0000000000000 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettingsTests.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference.services.elastic; - -import org.elasticsearch.test.ESTestCase; - -public class ElasticInferenceServiceSettingsTests extends ESTestCase { - - public void testEisGatewayURLValidator_Validate_ThrowError_OnMissingURIScheme() { - expectThrows( - IllegalArgumentException.class, - () -> new ElasticInferenceServiceSettings.EisGatewayURLValidator().validate("www.missing-scheme-gateway-url.com") - ); - } - - public void testEisGatewayURLValidator_Validate_ThrowError_OnWrongURIScheme() { - expectThrows( - IllegalArgumentException.class, - () -> new ElasticInferenceServiceSettings.EisGatewayURLValidator().validate("file://www.missing-scheme-gateway-url.com") - ); - } - - public void testEisGatewayURLValidator_Validate_DoesNotThrowError_ForHTTP() { - var scheme = "http"; - - try { - new ElasticInferenceServiceSettings.EisGatewayURLValidator().validate(scheme + "://www.valid-gateway-url.com"); - } catch (Exception e) { - fail(e, "Should not throw exception for " + "[" + scheme + "]"); - } - } - - public void testEisGatewayURLValidator_Validate_DoesNotThrowError_ForHTTPS() { - var scheme = "https"; - - try { - new ElasticInferenceServiceSettings.EisGatewayURLValidator().validate(scheme + "://www.valid-gateway-url.com"); - } catch (Exception e) { - fail(e, "Should not throw exception for " + "[" + scheme + "]"); - } - } - - public void testEisGatewayURLValidator_Validate_DoesNotThrowError_IfURLNull() { - try { - new ElasticInferenceServiceSettings.EisGatewayURLValidator().validate(null); - } catch (Exception e) { - fail(e, "Should not throw exception for, if eis-gateway URL is null"); - } - } - - public void testEisGatewayURLValidator_Validate_DoesNotThrowError_IfURLEmpty() { - try { - new ElasticInferenceServiceSettings.EisGatewayURLValidator().validate(""); - } catch (Exception e) { - fail(e, "Should not throw exception for, if eis-gateway URL is empty"); - } - } - -} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModelTests.java index 27d86e3d59461..c9f4234331221 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModelTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModelTests.java @@ -15,34 +15,6 @@ public class ElasticInferenceServiceSparseEmbeddingsModelTests extends ESTestCase { - public void testCreateURI_ThrowError_OnMissingURIScheme() { - expectThrows(IllegalArgumentException.class, () -> createModel("www.missing-scheme-gateway-url.com")); - } - - public void testCreateURI_ThrowError_OnWrongURIScheme() { - expectThrows(IllegalArgumentException.class, () -> createModel("file://www.missing-scheme-gateway-url.com")); - } - - public void testCreateURI_DoesNotThrowError_ForHTTP() { - var scheme = "http"; - - try { - createModel(scheme + "://www.valid-gateway-url.com"); - } catch (Exception e) { - fail(e, "Should not throw exception for " + "[" + scheme + "]"); - } - } - - public void testCreateURI_DoesNotThrowError_ForHTTPS() { - var scheme = "https"; - - try { - createModel(scheme + "://www.valid-gateway-url.com"); - } catch (Exception e) { - fail(e, "Should not throw exception for " + "[" + scheme + "]"); - } - } - public static ElasticInferenceServiceSparseEmbeddingsModel createModel(String url) { return createModel(url, null); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java index 5a5eae9f51670..d10c70c6f0f5e 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java @@ -492,7 +492,7 @@ private ElasticInferenceService createServiceWithMockSender() { return new ElasticInferenceService( mock(HttpRequestSender.Factory.class), createWithEmptySettings(threadPool), - new ElasticInferenceServiceComponents("http://valid-eis-gateway-url.com") + new ElasticInferenceServiceComponents(null) ); } } From 917903df0689c7a2239e0d4902e2a99552a98f0a Mon Sep 17 00:00:00 2001 From: Ed Savage Date: Wed, 16 Oct 2024 16:08:48 +1300 Subject: [PATCH 128/449] [ML] Unmute MlJobIT tests (#114553) A large number (almost the entirety) of tests in the `MlJobIT` tests suite have been muted. In all cases the cause of failure of the tests is the same, persistent tasks for `cluster:admin/xpack/ml/job/close` and `cluster:admin/xpack/ml/job/close[n]` have been detected as present after the test case has completed. Examination of the tests show that the majority of them do not call either `close` directly or indirectly, indicating that the root cause lies with some previous test. As the `close` task inherits the default timeout of half an hour, an instance of it lingering about can cause a lot of damage to subsequent tests. The approach taken in this PR is to call the `_task/_cancel` endpoint after every test execution in the `MlJobIT` suite as the final operation. This should restrict the impact of the lingering `close` task to the test responsible, and the reduction in noise should permit better identification of the culprit. Closes #105239, #113581, #113046, #112729, #113528, #112701, #113742, #113370, #112823, #112088, #112212, #112730, #113654, #113655, #112381, #113477, #112382, #113651, #112510 --- muted-tests.yml | 45 ------------------- .../xpack/ml/integration/MlJobIT.java | 26 +++++++++-- 2 files changed, 22 insertions(+), 49 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 052737e7308a0..943b5f9bff18d 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -83,9 +83,6 @@ tests: - class: org.elasticsearch.xpack.inference.InferenceRestIT method: test {p0=inference/80_random_rerank_retriever/Random rerank retriever predictably shuffles results} issue: https://github.com/elastic/elasticsearch/issues/111999 -- class: org.elasticsearch.xpack.ml.integration.MlJobIT - method: testDeleteJobAfterMissingIndex - issue: https://github.com/elastic/elasticsearch/issues/112088 - class: org.elasticsearch.smoketest.SmokeTestMultiNodeClientYamlTestSuiteIT issue: https://github.com/elastic/elasticsearch/issues/112147 - class: org.elasticsearch.smoketest.WatcherYamlRestIT @@ -94,15 +91,9 @@ tests: - class: org.elasticsearch.xpack.test.rest.XPackRestIT method: test {p0=ml/inference_processor/Test create processor with missing mandatory fields} issue: https://github.com/elastic/elasticsearch/issues/112191 -- class: org.elasticsearch.xpack.ml.integration.MlJobIT - method: testDeleteJobAsync - issue: https://github.com/elastic/elasticsearch/issues/112212 - class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT method: test {yaml=reference/rest-api/watcher/put-watch/line_120} issue: https://github.com/elastic/elasticsearch/issues/99517 -- class: org.elasticsearch.xpack.ml.integration.MlJobIT - method: testMultiIndexDelete - issue: https://github.com/elastic/elasticsearch/issues/112381 - class: org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialCentroidTests method: "testAggregateIntermediate {TestCase= #2}" issue: https://github.com/elastic/elasticsearch/issues/112461 @@ -129,9 +120,6 @@ tests: - class: org.elasticsearch.xpack.esql.EsqlAsyncSecurityIT method: testIndexPatternErrorMessageComparison_ESQL_SearchDSL issue: https://github.com/elastic/elasticsearch/issues/112630 -- class: org.elasticsearch.xpack.ml.integration.MlJobIT - method: testPutJob_GivenFarequoteConfig - issue: https://github.com/elastic/elasticsearch/issues/112382 - class: org.elasticsearch.packaging.test.PackagesSecurityAutoConfigurationTests method: test20SecurityNotAutoConfiguredOnReInstallation issue: https://github.com/elastic/elasticsearch/issues/112635 @@ -147,26 +135,11 @@ tests: - class: org.elasticsearch.xpack.sql.qa.single_node.JdbcSqlSpecIT method: test {case-functions.testUcaseInline3} issue: https://github.com/elastic/elasticsearch/issues/112643 -- class: org.elasticsearch.xpack.ml.integration.MlJobIT - method: testDelete_multipleRequest - issue: https://github.com/elastic/elasticsearch/issues/112701 -- class: org.elasticsearch.xpack.ml.integration.MlJobIT - method: testCreateJobInSharedIndexUpdatesMapping - issue: https://github.com/elastic/elasticsearch/issues/112729 -- class: org.elasticsearch.xpack.ml.integration.MlJobIT - method: testGetJob_GivenNoSuchJob - issue: https://github.com/elastic/elasticsearch/issues/112730 - class: org.elasticsearch.script.StatsSummaryTests method: testEqualsAndHashCode issue: https://github.com/elastic/elasticsearch/issues/112439 -- class: org.elasticsearch.xpack.ml.integration.MlJobIT - method: testDeleteJobAfterMissingAliases - issue: https://github.com/elastic/elasticsearch/issues/112823 - class: org.elasticsearch.repositories.blobstore.testkit.analyze.HdfsRepositoryAnalysisRestIT issue: https://github.com/elastic/elasticsearch/issues/112889 -- class: org.elasticsearch.xpack.ml.integration.MlJobIT - method: testCreateJob_WithClashingFieldMappingsFails - issue: https://github.com/elastic/elasticsearch/issues/113046 - class: org.elasticsearch.xpack.sql.qa.security.JdbcSqlSpecIT method: test {case-functions.testUcaseInline1} issue: https://github.com/elastic/elasticsearch/issues/112641 @@ -218,9 +191,6 @@ tests: - class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT method: test {yaml=reference/ccr/apis/follow/post-resume-follow/line_84} issue: https://github.com/elastic/elasticsearch/issues/113343 -- class: org.elasticsearch.xpack.ml.integration.MlJobIT - method: testDeleteJob_TimingStatsDocumentIsDeleted - issue: https://github.com/elastic/elasticsearch/issues/113370 - class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT method: test {p0=search/500_date_range/from, to, include_lower, include_upper deprecated} issue: https://github.com/elastic/elasticsearch/pull/113286 @@ -233,15 +203,6 @@ tests: - class: org.elasticsearch.xpack.inference.InferenceCrudIT method: testSupportedStream issue: https://github.com/elastic/elasticsearch/issues/113430 -- class: org.elasticsearch.xpack.ml.integration.MlJobIT - method: testOutOfOrderData - issue: https://github.com/elastic/elasticsearch/issues/113477 -- class: org.elasticsearch.xpack.ml.integration.MlJobIT - method: testCreateJobsWithIndexNameOption - issue: https://github.com/elastic/elasticsearch/issues/113528 -- class: org.elasticsearch.xpack.ml.integration.MlJobIT - method: testCantCreateJobWithSameID - issue: https://github.com/elastic/elasticsearch/issues/113581 - class: org.elasticsearch.integration.KibanaUserRoleIntegTests method: testFieldMappings issue: https://github.com/elastic/elasticsearch/issues/113592 @@ -254,12 +215,6 @@ tests: - class: org.elasticsearch.smoketest.MlWithSecurityIT method: test {yaml=ml/3rd_party_deployment/Test start and stop multiple deployments} issue: https://github.com/elastic/elasticsearch/issues/101458 -- class: org.elasticsearch.xpack.ml.integration.MlJobIT - method: testGetJobs_GivenMultipleJobs - issue: https://github.com/elastic/elasticsearch/issues/113654 -- class: org.elasticsearch.xpack.ml.integration.MlJobIT - method: testGetJobs_GivenSingleJob - issue: https://github.com/elastic/elasticsearch/issues/113655 - class: org.elasticsearch.xpack.security.authz.interceptor.SearchRequestCacheDisablingInterceptorTests method: testHasRemoteIndices issue: https://github.com/elastic/elasticsearch/issues/113660 diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java index bb322cf79adc0..d981a60adbdb5 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java @@ -747,7 +747,15 @@ public void testMultiIndexDelete() throws Exception { } } }""", AnomalyDetectorsIndex.jobResultsAliasedName(jobId), Job.ID, jobId)); - client().performRequest(extraIndex1); + + // Creating an index with a leading dot (".") is now deprecated. + // Ensure the ensuing warning exception doesn't cause a test case failure + try { + client().performRequest(extraIndex1); + } catch (org.elasticsearch.client.WarningFailureException e) { + logger.warn(e.getMessage()); + } + Request extraIndex2 = new Request("PUT", indexName + "-002"); extraIndex2.setJsonEntity(Strings.format(""" { @@ -762,7 +770,14 @@ public void testMultiIndexDelete() throws Exception { } } }""", AnomalyDetectorsIndex.jobResultsAliasedName(jobId), Job.ID, jobId)); - client().performRequest(extraIndex2); + + // Creating an index with a leading dot (".") is now deprecated. + // Ensure the ensuing warning exception doesn't cause a test case failure + try { + client().performRequest(extraIndex2); + } catch (org.elasticsearch.client.WarningFailureException e) { + logger.warn(e.getMessage()); + } // Use _cat/indices/.ml-anomalies-* instead of _cat/indices/_all to workaround https://github.com/elastic/elasticsearch/issues/45652 String indicesBeforeDelete = EntityUtils.toString( @@ -983,10 +998,10 @@ private Response openJob(String jobId, Optional timeout) throws IOExc } private void closeJob(String jobId) throws IOException { - Response openResponse = client().performRequest( + Response closeResponse = client().performRequest( new Request("POST", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_close") ); - assertThat(entityAsMap(openResponse), hasEntry("closed", true)); + assertThat(entityAsMap(closeResponse), hasEntry("closed", true)); } private Response putJob(String jobId, String jsonBody) throws IOException { @@ -1000,5 +1015,8 @@ public void clearMlState() throws Exception { new MlRestTestStateCleaner(logger, adminClient()).resetFeatures(); // Don't check analytics jobs as they are independent of anomaly detection jobs and should not be created by this test. waitForPendingTasks(adminClient(), taskName -> taskName.contains(MlTasks.DATA_FRAME_ANALYTICS_TASK_NAME)); + // Finally, clean up any lingering persistent tasks (such as "_close", "_close[n]" etc.) that may negatively + // impact subsequent tests. + client().performRequest(new Request("POST", "/_tasks/_cancel")); } } From bc0a6e8ee24b198b019e10ba91d65cf1a9198160 Mon Sep 17 00:00:00 2001 From: Panagiotis Bailis Date: Wed, 16 Oct 2024 07:23:58 +0300 Subject: [PATCH 129/449] Fixing randomization issue for RRFRetrieverBuilderNestedDocsIT (#114859) --- muted-tests.yml | 3 -- .../xpack/rank/rrf/RRFRetrieverBuilderIT.java | 50 +++++++++++++++---- .../rrf/RRFRetrieverBuilderNestedDocsIT.java | 5 +- 3 files changed, 44 insertions(+), 14 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 943b5f9bff18d..d71a6305beed8 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -372,9 +372,6 @@ tests: - class: org.elasticsearch.xpack.eql.EqlRestIT method: testUnicodeChars issue: https://github.com/elastic/elasticsearch/issues/114791 -- class: org.elasticsearch.xpack.rank.rrf.RRFRetrieverBuilderNestedDocsIT - method: testRRFExplainWithNamedRetrievers - issue: https://github.com/elastic/elasticsearch/issues/114820 - class: org.elasticsearch.ingest.geoip.HttpClientTests issue: https://github.com/elastic/elasticsearch/issues/112618 - class: org.elasticsearch.xpack.remotecluster.RemoteClusterSecurityWithApmTracingRestIT diff --git a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderIT.java b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderIT.java index def26a613775a..edd5e557aadf0 100644 --- a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderIT.java +++ b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderIT.java @@ -174,7 +174,10 @@ public void testRRFPagination() { ); // this one retrieves docs 2 and 6 due to prefilter StandardRetrieverBuilder standard1 = new StandardRetrieverBuilder( - QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_2", "doc_3", "doc_6")).boost(20L) + QueryBuilders.boolQuery() + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_2")).boost(20L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_3")).boost(10L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_6")).boost(5L)) ); standard1.getPreFilterQueryBuilders().add(QueryBuilders.queryStringQuery("search").defaultField(TEXT_FIELD)); // this one retrieves docs 2, 3, 6, and 7 @@ -221,7 +224,10 @@ public void testRRFWithAggs() { ); // this one retrieves docs 2 and 6 due to prefilter StandardRetrieverBuilder standard1 = new StandardRetrieverBuilder( - QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_2", "doc_3", "doc_6")).boost(20L) + QueryBuilders.boolQuery() + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_2")).boost(20L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_3")).boost(10L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_6")).boost(5L)) ); standard1.getPreFilterQueryBuilders().add(QueryBuilders.queryStringQuery("search").defaultField(TEXT_FIELD)); // this one retrieves docs 2, 3, 6, and 7 @@ -273,7 +279,10 @@ public void testRRFWithCollapse() { ); // this one retrieves docs 2 and 6 due to prefilter StandardRetrieverBuilder standard1 = new StandardRetrieverBuilder( - QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_2", "doc_3", "doc_6")).boost(20L) + QueryBuilders.boolQuery() + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_2")).boost(20L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_3")).boost(10L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_6")).boost(5L)) ); standard1.getPreFilterQueryBuilders().add(QueryBuilders.queryStringQuery("search").defaultField(TEXT_FIELD)); // this one retrieves docs 2, 3, 6, and 7 @@ -327,7 +336,10 @@ public void testRRFRetrieverWithCollapseAndAggs() { ); // this one retrieves docs 2 and 6 due to prefilter StandardRetrieverBuilder standard1 = new StandardRetrieverBuilder( - QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_2", "doc_3", "doc_6")).boost(20L) + QueryBuilders.boolQuery() + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_2")).boost(20L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_3")).boost(10L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_6")).boost(5L)) ); standard1.getPreFilterQueryBuilders().add(QueryBuilders.queryStringQuery("search").defaultField(TEXT_FIELD)); // this one retrieves docs 2, 3, 6, and 7 @@ -390,7 +402,10 @@ public void testMultipleRRFRetrievers() { ); // this one retrieves docs 2 and 6 due to prefilter StandardRetrieverBuilder standard1 = new StandardRetrieverBuilder( - QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_2", "doc_3", "doc_6")).boost(20L) + QueryBuilders.boolQuery() + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_2")).boost(20L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_3")).boost(10L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_6")).boost(5L)) ); standard1.getPreFilterQueryBuilders().add(QueryBuilders.queryStringQuery("search").defaultField(TEXT_FIELD)); // this one retrieves docs 2, 3, 6, and 7 @@ -453,7 +468,10 @@ public void testRRFExplainWithNamedRetrievers() { standard0.retrieverName("my_custom_retriever"); // this one retrieves docs 2 and 6 due to prefilter StandardRetrieverBuilder standard1 = new StandardRetrieverBuilder( - QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_2", "doc_3", "doc_6")).boost(20L) + QueryBuilders.boolQuery() + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_2")).boost(20L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_3")).boost(10L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_6")).boost(5L)) ); standard1.getPreFilterQueryBuilders().add(QueryBuilders.queryStringQuery("search").defaultField(TEXT_FIELD)); // this one retrieves docs 2, 3, 6, and 7 @@ -509,7 +527,10 @@ public void testRRFExplainWithAnotherNestedRRF() { standard0.retrieverName("my_custom_retriever"); // this one retrieves docs 2 and 6 due to prefilter StandardRetrieverBuilder standard1 = new StandardRetrieverBuilder( - QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_2", "doc_3", "doc_6")).boost(20L) + QueryBuilders.boolQuery() + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_2")).boost(20L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_3")).boost(10L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_6")).boost(5L)) ); standard1.getPreFilterQueryBuilders().add(QueryBuilders.queryStringQuery("search").defaultField(TEXT_FIELD)); // this one retrieves docs 2, 3, 6, and 7 @@ -577,7 +598,10 @@ public void testRRFInnerRetrieverSearchError() { QueryBuilders.constantScoreQuery(QueryBuilders.rangeQuery(VECTOR_FIELD).gte(10)) ); StandardRetrieverBuilder standard1 = new StandardRetrieverBuilder( - QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_2", "doc_3", "doc_6")).boost(20L) + QueryBuilders.boolQuery() + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_2")).boost(20L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_3")).boost(10L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_6")).boost(5L)) ); standard1.getPreFilterQueryBuilders().add(QueryBuilders.queryStringQuery("search").defaultField(TEXT_FIELD)); source.retriever( @@ -613,7 +637,10 @@ public void extractToSearchSourceBuilder(SearchSourceBuilder searchSourceBuilder } }; StandardRetrieverBuilder standard1 = new StandardRetrieverBuilder( - QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_2", "doc_3", "doc_6")).boost(20L) + QueryBuilders.boolQuery() + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_2")).boost(20L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_3")).boost(10L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_6")).boost(5L)) ); standard1.getPreFilterQueryBuilders().add(QueryBuilders.queryStringQuery("search").defaultField(TEXT_FIELD)); source.retriever( @@ -646,7 +673,10 @@ public void extractToSearchSourceBuilder(SearchSourceBuilder searchSourceBuilder } }; StandardRetrieverBuilder standard1 = new StandardRetrieverBuilder( - QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_2", "doc_3", "doc_6")).boost(20L) + QueryBuilders.boolQuery() + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_2")).boost(20L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_3")).boost(10L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_6")).boost(5L)) ); standard1.getPreFilterQueryBuilders().add(QueryBuilders.queryStringQuery("search").defaultField(TEXT_FIELD)); source.retriever( diff --git a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderNestedDocsIT.java b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderNestedDocsIT.java index ea251917cfae2..69c61fe3bca1f 100644 --- a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderNestedDocsIT.java +++ b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderNestedDocsIT.java @@ -142,7 +142,10 @@ public void testRRFRetrieverWithNestedQuery() { ); // this one retrieves docs 2 and 6 due to prefilter StandardRetrieverBuilder standard1 = new StandardRetrieverBuilder( - QueryBuilders.constantScoreQuery(QueryBuilders.termsQuery(ID_FIELD, "doc_2", "doc_3", "doc_6")).boost(20L) + QueryBuilders.boolQuery() + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_2")).boost(20L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_3")).boost(10L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_6")).boost(5L)) ); standard1.getPreFilterQueryBuilders().add(QueryBuilders.queryStringQuery("search").defaultField(TEXT_FIELD)); // this one retrieves docs 6 From b558cb07c2dae50074dfb97516b44ee944396a0d Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 16 Oct 2024 15:39:57 +1100 Subject: [PATCH 130/449] Mute org.elasticsearch.datastreams.logsdb.qa.LogsDbVersusLogsDbReindexedIntoStandardModeChallengeRestIT testTermsQuery #114873 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index d71a6305beed8..710c7f2c7065b 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -382,6 +382,9 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/114839 - class: org.elasticsearch.license.LicensingTests issue: https://github.com/elastic/elasticsearch/issues/114865 +- class: org.elasticsearch.datastreams.logsdb.qa.LogsDbVersusLogsDbReindexedIntoStandardModeChallengeRestIT + method: testTermsQuery + issue: https://github.com/elastic/elasticsearch/issues/114873 # Examples: # From 6a6b70741c3a61cfdb0f9d75de76655fa4910683 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 16 Oct 2024 16:05:15 +1100 Subject: [PATCH 131/449] Mute org.elasticsearch.xpack.enrich.EnrichIT testDeleteIsCaseSensitive #114840 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 710c7f2c7065b..0b24bac278fa6 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -385,6 +385,9 @@ tests: - class: org.elasticsearch.datastreams.logsdb.qa.LogsDbVersusLogsDbReindexedIntoStandardModeChallengeRestIT method: testTermsQuery issue: https://github.com/elastic/elasticsearch/issues/114873 +- class: org.elasticsearch.xpack.enrich.EnrichIT + method: testDeleteIsCaseSensitive + issue: https://github.com/elastic/elasticsearch/issues/114840 # Examples: # From ef2260130d4a4bc2339223dce7f22a2e7c045902 Mon Sep 17 00:00:00 2001 From: David Turner Date: Wed, 16 Oct 2024 06:46:53 +0100 Subject: [PATCH 132/449] Remove dead branches for v7 REST API (#114850) In v9 the `getRestApiVersion()` method on `RestRequest`, `XContentBuilder` and `XContentParser` can never return `V_7`, so we can replace all the expressions of the form `$x$.getRestApiVersion() == V_7` with `false`. This commit does that, and then refactors away the resulting dead code using (largely) automated transformations. --- .../RestMultiSearchTemplateAction.java | 5 - .../percolator/PercolateQueryBuilder.java | 4 - .../AbstractBulkByQueryRestHandler.java | 7 +- .../reindex/RestUpdateByQueryAction.java | 4 - .../action/DocWriteResponse.java | 4 - .../admin/indices/get/GetIndexResponse.java | 13 +- .../mapping/get/GetFieldMappingsResponse.java | 15 +- .../mapping/get/GetMappingsResponse.java | 15 +- .../indices/rollover/RolloverRequest.java | 37 ++-- .../segments/IndicesSegmentResponse.java | 5 - .../get/GetIndexTemplatesResponse.java | 8 +- .../action/bulk/BulkItemResponse.java | 8 - .../action/explain/ExplainResponse.java | 5 - .../action/get/MultiGetRequest.java | 72 ++++---- .../action/get/MultiGetResponse.java | 4 - .../ingest/WriteableIngestDocument.java | 5 - .../termvectors/MultiTermVectorsResponse.java | 5 - .../termvectors/TermVectorsResponse.java | 4 - .../elasticsearch/index/get/GetResult.java | 7 - .../query/AbstractGeometryQueryBuilder.java | 21 +-- .../index/query/BoolQueryBuilder.java | 5 +- .../query/GeoBoundingBoxQueryBuilder.java | 20 +-- .../index/query/MatchQueryBuilder.java | 15 +- .../index/query/MoreLikeThisQueryBuilder.java | 59 +++---- .../index/query/MultiMatchQueryBuilder.java | 15 +- .../index/shard/IndexingStats.java | 8 - .../elasticsearch/rest/BaseRestHandler.java | 7 - .../admin/cluster/RestNodesStatsAction.java | 6 - .../admin/indices/RestCreateIndexAction.java | 61 +------ .../indices/RestGetFieldMappingAction.java | 20 --- .../indices/RestGetIndexTemplateAction.java | 22 --- .../admin/indices/RestGetIndicesAction.java | 20 --- .../admin/indices/RestGetMappingAction.java | 21 --- .../admin/indices/RestIndicesStatsAction.java | 6 - .../indices/RestPutIndexTemplateAction.java | 36 +--- .../admin/indices/RestPutMappingAction.java | 26 +-- .../admin/indices/RestResizeHandler.java | 11 -- .../indices/RestRolloverIndexAction.java | 14 +- .../indices/RestValidateQueryAction.java | 6 - .../rest/action/cat/RestIndicesAction.java | 7 +- .../rest/action/cat/RestShardsAction.java | 8 +- .../rest/action/document/RestBulkAction.java | 8 - .../action/document/RestDeleteAction.java | 4 - .../rest/action/document/RestGetAction.java | 6 - .../action/document/RestGetSourceAction.java | 6 - .../rest/action/document/RestIndexAction.java | 5 - .../action/document/RestMultiGetAction.java | 4 - .../document/RestMultiTermVectorsAction.java | 6 - .../document/RestTermVectorsAction.java | 7 +- .../action/document/RestUpdateAction.java | 4 - .../rest/action/search/RestCountAction.java | 5 - .../rest/action/search/RestExplainAction.java | 5 - .../action/search/RestMultiSearchAction.java | 5 - .../rest/action/search/RestSearchAction.java | 19 +- .../org/elasticsearch/search/SearchHit.java | 4 - .../search/aggregations/InternalOrder.java | 10 -- .../search/builder/SearchSourceBuilder.java | 167 +++++++----------- .../search/sort/GeoDistanceSortBuilder.java | 27 +-- .../action/DocWriteResponseTests.java | 7 - .../get/GetFieldMappingsResponseTests.java | 27 +-- .../rollover/RolloverRequestTests.java | 8 +- .../action/bulk/BulkItemResponseTests.java | 25 --- .../index/get/GetResultTests.java | 53 ------ .../builder/SearchSourceBuilderTests.java | 23 --- .../license/RestGetLicenseAction.java | 10 +- .../protocol/xpack/XPackInfoResponse.java | 22 +-- .../xpack/core/ml/action/CloseJobAction.java | 6 +- .../ml/action/GetOverallBucketsAction.java | 6 +- .../core/ml/action/StopDatafeedAction.java | 6 +- .../rest/action/RestFreezeIndexAction.java | 23 --- .../graph/rest/action/RestGraphAction.java | 6 - .../apikey/RestInvalidateApiKeyAction.java | 39 +--- 72 files changed, 198 insertions(+), 996 deletions(-) diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestMultiSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestMultiSearchTemplateAction.java index eec6e003f3556..b748a0ced8be9 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestMultiSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestMultiSearchTemplateAction.java @@ -11,7 +11,6 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -65,10 +64,6 @@ public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client * Parses a {@link RestRequest} body and returns a {@link MultiSearchTemplateRequest} */ public static MultiSearchTemplateRequest parseRequest(RestRequest restRequest, boolean allowExplicitIndex) throws IOException { - if (restRequest.getRestApiVersion() == RestApiVersion.V_7 && restRequest.hasParam("type")) { - restRequest.param("type"); - } - MultiSearchTemplateRequest multiRequest = new MultiSearchTemplateRequest(); if (restRequest.hasParam("max_concurrent_searches")) { multiRequest.maxConcurrentSearchRequests(restRequest.paramAsInt("max_concurrent_searches", 0)); diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java index 0ee93474234ec..6b37b02a945b2 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java @@ -54,7 +54,6 @@ import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.mapper.LuceneDocument; import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.NestedLookup; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceToParse; @@ -312,9 +311,6 @@ protected void doXContent(XContentBuilder builder, Params params) throws IOExcep if (indexedDocumentIndex != null) { builder.field(INDEXED_DOCUMENT_FIELD_INDEX.getPreferredName(), indexedDocumentIndex); } - if (builder.getRestApiVersion() == RestApiVersion.V_7) { - builder.field(INDEXED_DOCUMENT_FIELD_TYPE.getPreferredName(), MapperService.SINGLE_MAPPING_NAME); - } if (indexedDocumentId != null) { builder.field(INDEXED_DOCUMENT_FIELD_ID.getPreferredName(), indexedDocumentId); } diff --git a/modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractBulkByQueryRestHandler.java b/modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractBulkByQueryRestHandler.java index 48a892f796f92..095d119bf2719 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractBulkByQueryRestHandler.java +++ b/modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractBulkByQueryRestHandler.java @@ -14,7 +14,6 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.reindex.AbstractBulkByScrollRequest; import org.elasticsearch.index.reindex.BulkByScrollResponse; @@ -28,7 +27,6 @@ import java.io.IOException; import java.util.Map; import java.util.function.Consumer; -import java.util.function.IntConsumer; import java.util.function.Predicate; /** @@ -54,10 +52,7 @@ protected void parseInternalRequest( SearchRequest searchRequest = internal.getSearchRequest(); try (XContentParser parser = extractRequestSpecificFields(restRequest, bodyConsumers)) { - IntConsumer sizeConsumer = restRequest.getRestApiVersion() == RestApiVersion.V_7 - ? size -> setMaxDocsFromSearchSize(internal, size) - : size -> failOnSizeSpecified(); - RestSearchAction.parseSearchRequest(searchRequest, restRequest, parser, clusterSupportsFeature, sizeConsumer); + RestSearchAction.parseSearchRequest(searchRequest, restRequest, parser, clusterSupportsFeature, size -> failOnSizeSpecified()); } searchRequest.source().size(restRequest.paramAsInt("scroll_size", searchRequest.source().size())); diff --git a/modules/reindex/src/main/java/org/elasticsearch/reindex/RestUpdateByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/reindex/RestUpdateByQueryAction.java index 67ea34f504790..632ed73b9b2fa 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/reindex/RestUpdateByQueryAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/reindex/RestUpdateByQueryAction.java @@ -10,7 +10,6 @@ package org.elasticsearch.reindex; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.reindex.UpdateByQueryAction; import org.elasticsearch.index.reindex.UpdateByQueryRequest; @@ -55,9 +54,6 @@ public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client @Override protected UpdateByQueryRequest buildRequest(RestRequest request) throws IOException { - if (request.getRestApiVersion() == RestApiVersion.V_7 && request.hasParam("type")) { - request.param("type"); - } /* * Passing the search request through UpdateByQueryRequest first allows * it to set its own defaults which differ from SearchRequest's diff --git a/server/src/main/java/org/elasticsearch/action/DocWriteResponse.java b/server/src/main/java/org/elasticsearch/action/DocWriteResponse.java index 095ccd71fa266..d47469ddf10d9 100644 --- a/server/src/main/java/org/elasticsearch/action/DocWriteResponse.java +++ b/server/src/main/java/org/elasticsearch/action/DocWriteResponse.java @@ -18,7 +18,6 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.Nullable; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.seqno.SequenceNumbers; @@ -298,9 +297,6 @@ public XContentBuilder innerToXContent(XContentBuilder builder, Params params) t builder.field(_SEQ_NO, getSeqNo()); builder.field(_PRIMARY_TERM, getPrimaryTerm()); } - if (builder.getRestApiVersion() == RestApiVersion.V_7) { - builder.field(MapperService.TYPE_FIELD_NAME, MapperService.SINGLE_MAPPING_NAME); - } return builder; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexResponse.java index c3ed0c675c3c7..23a6b9c8c61a8 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexResponse.java @@ -19,7 +19,6 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ChunkedToXContentObject; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.xcontent.ToXContent; @@ -30,9 +29,6 @@ import java.util.Map; import java.util.Objects; -import static org.elasticsearch.rest.BaseRestHandler.DEFAULT_INCLUDE_TYPE_NAME_POLICY; -import static org.elasticsearch.rest.BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER; - /** * A response for a get index action. */ @@ -200,14 +196,7 @@ public Iterator toXContentChunked(ToXContent.Params ignore if (indexMappings == null) { builder.startObject("mappings").endObject(); } else { - if (builder.getRestApiVersion() == RestApiVersion.V_7 - && params.paramAsBoolean(INCLUDE_TYPE_NAME_PARAMETER, DEFAULT_INCLUDE_TYPE_NAME_POLICY)) { - builder.startObject("mappings"); - builder.field(MapperService.SINGLE_MAPPING_NAME, indexMappings.sourceAsMap()); - builder.endObject(); - } else { - builder.field("mappings", indexMappings.sourceAsMap()); - } + builder.field("mappings", indexMappings.sourceAsMap()); } builder.startObject("settings"); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponse.java index 4398b33cd798f..01e8fe9787014 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponse.java @@ -15,10 +15,8 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentFragment; import org.elasticsearch.xcontent.ToXContentObject; @@ -31,8 +29,6 @@ import java.util.Map; import java.util.Objects; -import static org.elasticsearch.rest.BaseRestHandler.DEFAULT_INCLUDE_TYPE_NAME_POLICY; - /** * Response object for {@link GetFieldMappingsRequest} API * @@ -91,16 +87,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.startObject(indexEntry.getKey()); builder.startObject(MAPPINGS.getPreferredName()); if (indexEntry.getValue() != null) { - if (builder.getRestApiVersion() == RestApiVersion.V_7 - && params.paramAsBoolean(BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER, DEFAULT_INCLUDE_TYPE_NAME_POLICY)) { - if (indexEntry.getValue().size() > 0) { - builder.startObject(MapperService.SINGLE_MAPPING_NAME); - addFieldMappingsToBuilder(builder, params, indexEntry.getValue()); - builder.endObject(); - } - } else { - addFieldMappingsToBuilder(builder, params, indexEntry.getValue()); - } + addFieldMappingsToBuilder(builder, params, indexEntry.getValue()); } builder.endObject(); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsResponse.java index 37edae05c22af..cff2cf9ec8c78 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsResponse.java @@ -17,7 +17,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ChunkedToXContentObject; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; @@ -26,9 +25,6 @@ import java.util.Iterator; import java.util.Map; -import static org.elasticsearch.rest.BaseRestHandler.DEFAULT_INCLUDE_TYPE_NAME_POLICY; -import static org.elasticsearch.rest.BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER; - public class GetMappingsResponse extends ActionResponse implements ChunkedToXContentObject { private static final ParseField MAPPINGS = new ParseField("mappings"); @@ -73,16 +69,7 @@ public Iterator toXContentChunked(ToXContent.Params outerParams) { Iterators.single((b, p) -> b.startObject()), Iterators.map(getMappings().entrySet().iterator(), indexEntry -> (builder, params) -> { builder.startObject(indexEntry.getKey()); - boolean includeTypeName = params.paramAsBoolean(INCLUDE_TYPE_NAME_PARAMETER, DEFAULT_INCLUDE_TYPE_NAME_POLICY); - if (builder.getRestApiVersion() == RestApiVersion.V_7 && includeTypeName && indexEntry.getValue() != null) { - builder.startObject(MAPPINGS.getPreferredName()); - - if (indexEntry.getValue() != MappingMetadata.EMPTY_MAPPINGS) { - builder.field(MapperService.SINGLE_MAPPING_NAME, indexEntry.getValue().sourceAsMap()); - } - builder.endObject(); - - } else if (indexEntry.getValue() != null) { + if (indexEntry.getValue() != null) { builder.field(MAPPINGS.getPreferredName(), indexEntry.getValue().sourceAsMap()); } else { builder.startObject(MAPPINGS.getPreferredName()).endObject(); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequest.java index b6356e92ad856..1ef9194b51203 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequest.java @@ -42,7 +42,7 @@ */ public class RolloverRequest extends AcknowledgedRequest implements IndicesRequest { - private static final ObjectParser PARSER = new ObjectParser<>("rollover"); + private static final ObjectParser PARSER = new ObjectParser<>("rollover"); private static final ParseField CONDITIONS = new ParseField("conditions"); @@ -57,27 +57,18 @@ public class RolloverRequest extends AcknowledgedRequest implem CreateIndexRequest.SETTINGS, ObjectParser.ValueType.OBJECT ); - PARSER.declareField((parser, request, includeTypeName) -> { - if (includeTypeName) { - // expecting one type only - for (Map.Entry mappingsEntry : parser.map().entrySet()) { - @SuppressWarnings("unchecked") - final Map value = (Map) mappingsEntry.getValue(); - request.createIndexRequest.mapping(value); - } - } else { - // a type is not included, add a dummy _doc type - Map mappings = parser.map(); - if (MapperService.isMappingSourceTyped(MapperService.SINGLE_MAPPING_NAME, mappings)) { - throw new IllegalArgumentException( - "The mapping definition cannot be nested under a type " - + "[" - + MapperService.SINGLE_MAPPING_NAME - + "] unless include_type_name is set to true." - ); - } - request.createIndexRequest.mapping(mappings); + PARSER.declareField((parser, request, context) -> { + // a type is not included, add a dummy _doc type + Map mappings = parser.map(); + if (MapperService.isMappingSourceTyped(MapperService.SINGLE_MAPPING_NAME, mappings)) { + throw new IllegalArgumentException( + "The mapping definition cannot be nested under a type " + + "[" + + MapperService.SINGLE_MAPPING_NAME + + "] unless include_type_name is set to true." + ); } + request.createIndexRequest.mapping(mappings); }, CreateIndexRequest.MAPPINGS.forRestApiVersion(RestApiVersion.equalTo(RestApiVersion.V_7)), ObjectParser.ValueType.OBJECT); PARSER.declareField((parser, request, context) -> { // a type is not included, add a dummy _doc type @@ -290,8 +281,8 @@ public CreateIndexRequest getCreateIndexRequest() { } // param isTypeIncluded decides how mappings should be parsed from XContent - public void fromXContent(boolean isTypeIncluded, XContentParser parser) throws IOException { - PARSER.parse(parser, this, isTypeIncluded); + public void fromXContent(XContentParser parser) throws IOException { + PARSER.parse(parser, this, null); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentResponse.java index 1071f120f929e..45d784d301bf1 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentResponse.java @@ -16,10 +16,8 @@ import org.elasticsearch.action.support.broadcast.ChunkedBroadcastResponse; import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.core.Nullable; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.xcontent.ToXContent; import java.io.IOException; @@ -120,9 +118,6 @@ protected Iterator customXContentChunks(ToXContent.Params params) { builder.field(Fields.NUM_DOCS, segment.getNumDocs()); builder.field(Fields.DELETED_DOCS, segment.getDeletedDocs()); builder.humanReadableField(Fields.SIZE_IN_BYTES, Fields.SIZE, segment.getSize()); - if (builder.getRestApiVersion() == RestApiVersion.V_7) { - builder.humanReadableField(Fields.MEMORY_IN_BYTES, Fields.MEMORY, ByteSizeValue.ZERO); - } builder.field(Fields.COMMITTED, segment.isCommitted()); builder.field(Fields.SEARCH, segment.isSearch()); if (segment.getVersion() != null) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetIndexTemplatesResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetIndexTemplatesResponse.java index fa3ac8ae720c2..2d854d2c6fa45 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetIndexTemplatesResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetIndexTemplatesResponse.java @@ -12,7 +12,6 @@ import org.elasticsearch.cluster.metadata.IndexTemplateMetadata; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -22,7 +21,6 @@ import java.util.Objects; import static java.util.Collections.singletonMap; -import static org.elasticsearch.rest.BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER; public class GetIndexTemplatesResponse extends ActionResponse implements ToXContentObject { @@ -65,11 +63,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.startObject(); for (IndexTemplateMetadata indexTemplateMetadata : getIndexTemplates()) { - if (builder.getRestApiVersion() == RestApiVersion.V_7 && params.paramAsBoolean(INCLUDE_TYPE_NAME_PARAMETER, false)) { - IndexTemplateMetadata.Builder.toXContentWithTypes(indexTemplateMetadata, builder, params); - } else { - IndexTemplateMetadata.Builder.toXContent(indexTemplateMetadata, builder, params); - } + IndexTemplateMetadata.Builder.toXContent(indexTemplateMetadata, builder, params); } builder.endObject(); return builder; diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkItemResponse.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkItemResponse.java index c0ceab139ff1b..d5931c85bb2e1 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkItemResponse.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkItemResponse.java @@ -22,7 +22,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.ShardId; @@ -57,10 +56,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(STATUS, response.status().getStatus()); } else { builder.field(_INDEX, failure.getIndex()); - if (builder.getRestApiVersion() == RestApiVersion.V_7) { - builder.field(MapperService.TYPE_FIELD_NAME, MapperService.SINGLE_MAPPING_NAME); - } - builder.field(_ID, failure.getId()); builder.field(STATUS, failure.getStatus().getStatus()); failure.getFailureStoreStatus().toXContent(builder, params); @@ -301,9 +296,6 @@ public void setFailureStoreStatus(IndexDocFailureStoreStatus failureStoreStatus) @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.field(INDEX_FIELD, index); - if (builder.getRestApiVersion() == RestApiVersion.V_7) { - builder.field("type", MapperService.SINGLE_MAPPING_NAME); - } if (id != null) { builder.field(ID_FIELD, id); } diff --git a/server/src/main/java/org/elasticsearch/action/explain/ExplainResponse.java b/server/src/main/java/org/elasticsearch/action/explain/ExplainResponse.java index caedec6a563d4..b759baad2024c 100644 --- a/server/src/main/java/org/elasticsearch/action/explain/ExplainResponse.java +++ b/server/src/main/java/org/elasticsearch/action/explain/ExplainResponse.java @@ -14,7 +14,6 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.index.get.GetResult; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.rest.RestStatus; @@ -138,10 +137,6 @@ public void writeTo(StreamOutput out) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(_INDEX.getPreferredName(), index); - if (builder.getRestApiVersion() == RestApiVersion.V_7) { - builder.field(MapperService.TYPE_FIELD_NAME, MapperService.SINGLE_MAPPING_NAME); - } - builder.field(_ID.getPreferredName(), id); builder.field(MATCHED.getPreferredName(), isMatch()); if (hasExplanation()) { diff --git a/server/src/main/java/org/elasticsearch/action/get/MultiGetRequest.java b/server/src/main/java/org/elasticsearch/action/get/MultiGetRequest.java index cc1d0497bd51a..22537f1f51216 100644 --- a/server/src/main/java/org/elasticsearch/action/get/MultiGetRequest.java +++ b/server/src/main/java/org/elasticsearch/action/get/MultiGetRequest.java @@ -26,11 +26,9 @@ import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.core.Nullable; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.SourceLoader; -import org.elasticsearch.rest.action.document.RestMultiGetAction; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; @@ -454,47 +452,41 @@ private static void parseDocuments( index = parser.text(); } else if (ID.match(currentFieldName, parser.getDeprecationHandler())) { id = parser.text(); - } else if (parser.getRestApiVersion() == RestApiVersion.V_7 - && TYPE.match(currentFieldName, parser.getDeprecationHandler())) { - deprecationLogger.compatibleCritical("mget_with_types", RestMultiGetAction.TYPES_DEPRECATION_MESSAGE); - } else if (ROUTING.match(currentFieldName, parser.getDeprecationHandler())) { - routing = parser.text(); - } else if (FIELDS.match(currentFieldName, parser.getDeprecationHandler())) { - throw new ParsingException( - parser.getTokenLocation(), - "Unsupported field [fields] used, expected [stored_fields] instead" - ); - } else if (STORED_FIELDS.match(currentFieldName, parser.getDeprecationHandler())) { - storedFields = new ArrayList<>(); - storedFields.add(parser.text()); - } else if (VERSION.match(currentFieldName, parser.getDeprecationHandler())) { - version = parser.longValue(); - } else if (VERSION_TYPE.match(currentFieldName, parser.getDeprecationHandler())) { - versionType = VersionType.fromString(parser.text()); - } else if (SOURCE.match(currentFieldName, parser.getDeprecationHandler())) { - if (parser.isBooleanValue()) { - fetchSourceContext = fetchSourceContext == null - ? FetchSourceContext.of(parser.booleanValue()) - : FetchSourceContext.of( - parser.booleanValue(), - fetchSourceContext.includes(), - fetchSourceContext.excludes() - ); - } else if (token == Token.VALUE_STRING) { - fetchSourceContext = FetchSourceContext.of( - fetchSourceContext == null || fetchSourceContext.fetchSource(), - new String[] { parser.text() }, - fetchSourceContext == null ? Strings.EMPTY_ARRAY : fetchSourceContext.excludes() + } else if (ROUTING.match(currentFieldName, parser.getDeprecationHandler())) { + routing = parser.text(); + } else if (FIELDS.match(currentFieldName, parser.getDeprecationHandler())) { + throw new ParsingException( + parser.getTokenLocation(), + "Unsupported field [fields] used, expected [stored_fields] instead" + ); + } else if (STORED_FIELDS.match(currentFieldName, parser.getDeprecationHandler())) { + storedFields = new ArrayList<>(); + storedFields.add(parser.text()); + } else if (VERSION.match(currentFieldName, parser.getDeprecationHandler())) { + version = parser.longValue(); + } else if (VERSION_TYPE.match(currentFieldName, parser.getDeprecationHandler())) { + versionType = VersionType.fromString(parser.text()); + } else if (SOURCE.match(currentFieldName, parser.getDeprecationHandler())) { + if (parser.isBooleanValue()) { + fetchSourceContext = fetchSourceContext == null + ? FetchSourceContext.of(parser.booleanValue()) + : FetchSourceContext.of( + parser.booleanValue(), + fetchSourceContext.includes(), + fetchSourceContext.excludes() ); - } else { - throw new ElasticsearchParseException("illegal type for _source: [{}]", token); - } - } else { - throw new ElasticsearchParseException( - "failed to parse multi get request. unknown field [{}]", - currentFieldName + } else if (token == Token.VALUE_STRING) { + fetchSourceContext = FetchSourceContext.of( + fetchSourceContext == null || fetchSourceContext.fetchSource(), + new String[] { parser.text() }, + fetchSourceContext == null ? Strings.EMPTY_ARRAY : fetchSourceContext.excludes() ); + } else { + throw new ElasticsearchParseException("illegal type for _source: [{}]", token); } + } else { + throw new ElasticsearchParseException("failed to parse multi get request. unknown field [{}]", currentFieldName); + } } else if (token == Token.START_ARRAY) { if (FIELDS.match(currentFieldName, parser.getDeprecationHandler())) { throw new ParsingException( diff --git a/server/src/main/java/org/elasticsearch/action/get/MultiGetResponse.java b/server/src/main/java/org/elasticsearch/action/get/MultiGetResponse.java index fc126b29b3265..08db6dee8e543 100644 --- a/server/src/main/java/org/elasticsearch/action/get/MultiGetResponse.java +++ b/server/src/main/java/org/elasticsearch/action/get/MultiGetResponse.java @@ -16,7 +16,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; @@ -90,9 +89,6 @@ public void writeTo(StreamOutput out) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(INDEX.getPreferredName(), index); - if (builder.getRestApiVersion() == RestApiVersion.V_7) { - builder.field(MapperService.TYPE_FIELD_NAME, MapperService.SINGLE_MAPPING_NAME); - } builder.field(ID.getPreferredName(), id); ElasticsearchException.generateFailureXContent(builder, params, exception, true); builder.endObject(); diff --git a/server/src/main/java/org/elasticsearch/action/ingest/WriteableIngestDocument.java b/server/src/main/java/org/elasticsearch/action/ingest/WriteableIngestDocument.java index 6570343452afe..12f542aec71c1 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/WriteableIngestDocument.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/WriteableIngestDocument.java @@ -13,8 +13,6 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.util.Maps; -import org.elasticsearch.core.RestApiVersion; -import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.IngestDocument.Metadata; import org.elasticsearch.xcontent.ConstructingObjectParser; @@ -127,9 +125,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(key, value.toString()); } } - if (builder.getRestApiVersion() == RestApiVersion.V_7) { - builder.field(MapperService.TYPE_FIELD_NAME, MapperService.SINGLE_MAPPING_NAME); - } builder.field(SOURCE_FIELD, ingestDocument.getSource()); builder.field(INGEST_FIELD, ingestDocument.getIngestMetadata()); builder.endObject(); diff --git a/server/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsResponse.java b/server/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsResponse.java index 2beeaf1a26f0f..42196fac28528 100644 --- a/server/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsResponse.java +++ b/server/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsResponse.java @@ -16,8 +16,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.core.RestApiVersion; -import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -110,9 +108,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.startObject(); Failure failure = response.getFailure(); builder.field(Fields._INDEX, failure.getIndex()); - if (builder.getRestApiVersion() == RestApiVersion.V_7) { - builder.field(Fields._TYPE, MapperService.SINGLE_MAPPING_NAME); - } builder.field(Fields._ID, failure.getId()); ElasticsearchException.generateFailureXContent(builder, params, failure.getCause(), true); builder.endObject(); diff --git a/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsResponse.java b/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsResponse.java index 2d70af94d53d1..4c4aa7de46f6b 100644 --- a/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsResponse.java +++ b/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsResponse.java @@ -25,7 +25,6 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.xcontent.ToXContentObject; @@ -164,9 +163,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (isArtificial() == false) { builder.field(FieldStrings._ID, id); } - if (builder.getRestApiVersion() == RestApiVersion.V_7) { - builder.field(MapperService.TYPE_FIELD_NAME, MapperService.SINGLE_MAPPING_NAME); - } builder.field(FieldStrings._VERSION, docVersion); builder.field(FieldStrings.FOUND, isExists()); builder.field(FieldStrings.TOOK, tookInMillis); diff --git a/server/src/main/java/org/elasticsearch/index/get/GetResult.java b/server/src/main/java/org/elasticsearch/index/get/GetResult.java index 402f455d69bc2..109f645f24caf 100644 --- a/server/src/main/java/org/elasticsearch/index/get/GetResult.java +++ b/server/src/main/java/org/elasticsearch/index/get/GetResult.java @@ -20,11 +20,9 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.index.mapper.IgnoredFieldMapper; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.SourceFieldMapper; -import org.elasticsearch.rest.action.document.RestMultiGetAction; import org.elasticsearch.search.lookup.Source; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -276,9 +274,6 @@ public XContentBuilder toXContentEmbedded(XContentBuilder builder, Params params public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(_INDEX, index); - if (builder.getRestApiVersion() == RestApiVersion.V_7) { - builder.field(MapperService.TYPE_FIELD_NAME, MapperService.SINGLE_MAPPING_NAME); - } builder.field(_ID, id); if (isExists()) { if (version != -1) { @@ -316,8 +311,6 @@ public static GetResult fromXContentEmbedded(XContentParser parser, String index } else if (token.isValue()) { if (_INDEX.equals(currentFieldName)) { index = parser.text(); - } else if (parser.getRestApiVersion() == RestApiVersion.V_7 && MapperService.TYPE_FIELD_NAME.equals(currentFieldName)) { - deprecationLogger.compatibleCritical("mget_with_types", RestMultiGetAction.TYPES_DEPRECATION_MESSAGE); } else if (_ID.equals(currentFieldName)) { id = parser.text(); } else if (_VERSION.equals(currentFieldName)) { diff --git a/server/src/main/java/org/elasticsearch/index/query/AbstractGeometryQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/AbstractGeometryQueryBuilder.java index 7549873a10bc1..033151da362ef 100644 --- a/server/src/main/java/org/elasticsearch/index/query/AbstractGeometryQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/AbstractGeometryQueryBuilder.java @@ -26,7 +26,6 @@ import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.geometry.Geometry; import org.elasticsearch.geometry.GeometryCollection; import org.elasticsearch.geometry.ShapeType; @@ -429,9 +428,6 @@ protected void doXContent(XContentBuilder builder, Params params) throws IOExcep GeoJson.toXContent(shape, builder, params); } else { builder.startObject(INDEXED_SHAPE_FIELD.getPreferredName()).field(SHAPE_ID_FIELD.getPreferredName(), indexedShapeId); - if (builder.getRestApiVersion() == RestApiVersion.V_7) { - builder.field(SHAPE_TYPE_FIELD.getPreferredName(), MapperService.SINGLE_MAPPING_NAME); - } if (indexedShapeIndex != null) { builder.field(SHAPE_INDEX_FIELD.getPreferredName(), indexedShapeIndex); } @@ -555,16 +551,13 @@ public static ParsedGeometryQueryParams parsedParamsFromXContent(XContentParser } else if (token.isValue()) { if (SHAPE_ID_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { params.id = parser.text(); - } else if (parser.getRestApiVersion() == RestApiVersion.V_7 - && SHAPE_TYPE_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { - deprecationLogger.compatibleCritical("geo_share_query_with_types", TYPES_DEPRECATION_MESSAGE); - } else if (SHAPE_INDEX_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { - params.index = parser.text(); - } else if (SHAPE_PATH_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { - params.shapePath = parser.text(); - } else if (SHAPE_ROUTING_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { - params.shapeRouting = parser.text(); - } + } else if (SHAPE_INDEX_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { + params.index = parser.text(); + } else if (SHAPE_PATH_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { + params.shapePath = parser.text(); + } else if (SHAPE_ROUTING_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { + params.shapeRouting = parser.text(); + } } else { throw new ParsingException( parser.getTokenLocation(), diff --git a/server/src/main/java/org/elasticsearch/index/query/BoolQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/BoolQueryBuilder.java index a7b3b9145d2ca..2401719caaa87 100644 --- a/server/src/main/java/org/elasticsearch/index/query/BoolQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/BoolQueryBuilder.java @@ -20,7 +20,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.search.Queries; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; @@ -238,9 +237,7 @@ protected void doXContent(XContentBuilder builder, Params params) throws IOExcep doXArrayContent(FILTER, filterClauses, builder, params); doXArrayContent(MUST_NOT, mustNotClauses, builder, params); doXArrayContent(SHOULD, shouldClauses, builder, params); - if (builder.getRestApiVersion() == RestApiVersion.V_7) { - builder.field(ADJUST_PURE_NEGATIVE.getPreferredName(), adjustPureNegative); - } else if (adjustPureNegative != ADJUST_PURE_NEGATIVE_DEFAULT) { + if (adjustPureNegative != ADJUST_PURE_NEGATIVE_DEFAULT) { builder.field(ADJUST_PURE_NEGATIVE.getPreferredName(), adjustPureNegative); } if (minimumShouldMatch != null) { diff --git a/server/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilder.java index 86413404d9571..22cc68fb3a2f2 100644 --- a/server/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilder.java @@ -358,18 +358,14 @@ public static GeoBoundingBoxQueryBuilder fromXContent(XContentParser parser) thr validationMethod = GeoValidationMethod.fromString(parser.text()); } else if (IGNORE_UNMAPPED_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { ignoreUnmapped = parser.booleanValue(); - } else if (parser.getRestApiVersion() == RestApiVersion.V_7 - && TYPE_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { - deprecationLogger.compatibleCritical("geo_bounding_box_type", TYPE_PARAMETER_DEPRECATION_MESSAGE); - parser.text(); // ignore value - } else { - throw new ParsingException( - parser.getTokenLocation(), - "failed to parse [{}] query. unexpected field [{}]", - NAME, - currentFieldName - ); - } + } else { + throw new ParsingException( + parser.getTokenLocation(), + "failed to parse [{}] query. unexpected field [{}]", + NAME, + currentFieldName + ); + } } } diff --git a/server/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java index a4a76c078cb55..d72b755e7e77a 100644 --- a/server/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java @@ -528,15 +528,12 @@ public static MatchQueryBuilder fromXContent(XContentParser parser) throws IOExc queryName = parser.text(); } else if (GENERATE_SYNONYMS_PHRASE_QUERY.match(currentFieldName, parser.getDeprecationHandler())) { autoGenerateSynonymsPhraseQuery = parser.booleanValue(); - } else if (parser.getRestApiVersion() == RestApiVersion.V_7 - && CUTOFF_FREQUENCY_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { - throw new ParsingException(parser.getTokenLocation(), CUTOFF_FREQUENCY_DEPRECATION_MSG); - } else { - throw new ParsingException( - parser.getTokenLocation(), - "[" + NAME + "] query does not support [" + currentFieldName + "]" - ); - } + } else { + throw new ParsingException( + parser.getTokenLocation(), + "[" + NAME + "] query does not support [" + currentFieldName + "]" + ); + } } else { throw new ParsingException( parser.getTokenLocation(), diff --git a/server/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java index c1e97e7429643..7e644a8800bbd 100644 --- a/server/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java @@ -37,12 +37,10 @@ import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Nullable; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper.KeywordFieldType; import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.TextFieldMapper.TextFieldType; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; @@ -352,40 +350,32 @@ public static Item parse(XContentParser parser, Item item) throws IOException { } else if (currentFieldName != null) { if (INDEX.match(currentFieldName, parser.getDeprecationHandler())) { item.index = parser.text(); - } else if (parser.getRestApiVersion() == RestApiVersion.V_7 - && TYPE.match(currentFieldName, parser.getDeprecationHandler())) { - deprecationLogger.compatibleCritical("more_like_this_query_with_types", TYPES_DEPRECATION_MESSAGE); - } else if (ID.match(currentFieldName, parser.getDeprecationHandler())) { - item.id = parser.text(); - } else if (DOC.match(currentFieldName, parser.getDeprecationHandler())) { - item.doc = BytesReference.bytes(jsonBuilder().copyCurrentStructure(parser)); - item.xContentType = XContentType.JSON; - } else if (FIELDS.match(currentFieldName, parser.getDeprecationHandler())) { - if (token == XContentParser.Token.START_ARRAY) { - List fields = new ArrayList<>(); - while (parser.nextToken() != XContentParser.Token.END_ARRAY) { - fields.add(parser.text()); - } - item.fields(fields.toArray(new String[fields.size()])); - } else { - throw new ElasticsearchParseException( - "failed to parse More Like This item. field [fields] must be an array" - ); + } else if (ID.match(currentFieldName, parser.getDeprecationHandler())) { + item.id = parser.text(); + } else if (DOC.match(currentFieldName, parser.getDeprecationHandler())) { + item.doc = BytesReference.bytes(jsonBuilder().copyCurrentStructure(parser)); + item.xContentType = XContentType.JSON; + } else if (FIELDS.match(currentFieldName, parser.getDeprecationHandler())) { + if (token == XContentParser.Token.START_ARRAY) { + List fields = new ArrayList<>(); + while (parser.nextToken() != XContentParser.Token.END_ARRAY) { + fields.add(parser.text()); } - } else if (PER_FIELD_ANALYZER.match(currentFieldName, parser.getDeprecationHandler())) { - item.perFieldAnalyzer(TermVectorsRequest.readPerFieldAnalyzer(parser.map())); - } else if (ROUTING.match(currentFieldName, parser.getDeprecationHandler())) { - item.routing = parser.text(); - } else if (VERSION.match(currentFieldName, parser.getDeprecationHandler())) { - item.version = parser.longValue(); - } else if (VERSION_TYPE.match(currentFieldName, parser.getDeprecationHandler())) { - item.versionType = VersionType.fromString(parser.text()); + item.fields(fields.toArray(new String[fields.size()])); } else { - throw new ElasticsearchParseException( - "failed to parse More Like This item. unknown field [{}]", - currentFieldName - ); + throw new ElasticsearchParseException("failed to parse More Like This item. field [fields] must be an array"); } + } else if (PER_FIELD_ANALYZER.match(currentFieldName, parser.getDeprecationHandler())) { + item.perFieldAnalyzer(TermVectorsRequest.readPerFieldAnalyzer(parser.map())); + } else if (ROUTING.match(currentFieldName, parser.getDeprecationHandler())) { + item.routing = parser.text(); + } else if (VERSION.match(currentFieldName, parser.getDeprecationHandler())) { + item.version = parser.longValue(); + } else if (VERSION_TYPE.match(currentFieldName, parser.getDeprecationHandler())) { + item.versionType = VersionType.fromString(parser.text()); + } else { + throw new ElasticsearchParseException("failed to parse More Like This item. unknown field [{}]", currentFieldName); + } } } if (item.id != null && item.doc != null) { @@ -405,9 +395,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (this.index != null) { builder.field(INDEX.getPreferredName(), this.index); } - if (builder.getRestApiVersion() == RestApiVersion.V_7) { - builder.field(TYPE.getPreferredName(), MapperService.SINGLE_MAPPING_NAME); - } if (this.id != null) { builder.field(ID.getPreferredName(), this.id); } diff --git a/server/src/main/java/org/elasticsearch/index/query/MultiMatchQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/MultiMatchQueryBuilder.java index 1deba84cce355..a83fb8d1fd419 100644 --- a/server/src/main/java/org/elasticsearch/index/query/MultiMatchQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/MultiMatchQueryBuilder.java @@ -654,15 +654,12 @@ public static MultiMatchQueryBuilder fromXContent(XContentParser parser) throws autoGenerateSynonymsPhraseQuery = parser.booleanValue(); } else if (FUZZY_TRANSPOSITIONS_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { fuzzyTranspositions = parser.booleanValue(); - } else if (parser.getRestApiVersion() == RestApiVersion.V_7 - && CUTOFF_FREQUENCY_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { - throw new ParsingException(parser.getTokenLocation(), CUTOFF_FREQUENCY_DEPRECATION_MSG); - } else { - throw new ParsingException( - parser.getTokenLocation(), - "[" + NAME + "] query does not support [" + currentFieldName + "]" - ); - } + } else { + throw new ParsingException( + parser.getTokenLocation(), + "[" + NAME + "] query does not support [" + currentFieldName + "]" + ); + } } else { throw new ParsingException( parser.getTokenLocation(), diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexingStats.java b/server/src/main/java/org/elasticsearch/index/shard/IndexingStats.java index b4a8610ded04d..b0a4d333ba77f 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexingStats.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexingStats.java @@ -14,7 +14,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.xcontent.ToXContent; @@ -305,13 +304,6 @@ public Stats getTotal() { public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { builder.startObject(Fields.INDEXING); totalStats.toXContent(builder, params); - if (builder.getRestApiVersion() == RestApiVersion.V_7 && params.param("types") != null) { - builder.startObject(Fields.TYPES); - builder.startObject(MapperService.SINGLE_MAPPING_NAME); - totalStats.toXContent(builder, params); - builder.endObject(); - builder.endObject(); - } builder.endObject(); return builder; } diff --git a/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java b/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java index 2f7bb80a8d46a..0a99ee777bb76 100644 --- a/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java +++ b/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java @@ -47,13 +47,6 @@ */ public abstract class BaseRestHandler implements RestHandler { - /** - * Parameter that controls whether certain REST apis should include type names in their requests or responses. - * Note: This parameter is only available through compatible rest api for {@link RestApiVersion#V_7}. - */ - public static final String INCLUDE_TYPE_NAME_PARAMETER = "include_type_name"; - public static final boolean DEFAULT_INCLUDE_TYPE_NAME_POLICY = false; - public static final Setting MULTI_ALLOW_EXPLICIT_INDEX = Setting.boolSetting( "rest.action.multi.allow_explicit_index", true, diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesStatsAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesStatsAction.java index 933dd4d966fdf..c2848781bc7a7 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesStatsAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesStatsAction.java @@ -17,7 +17,6 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -72,11 +71,6 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - if (request.getRestApiVersion() == RestApiVersion.V_7 && request.hasParam("types")) { - deprecationLogger.compatibleCritical("nodes_stats_types", TYPES_DEPRECATION_MESSAGE); - request.param("types"); - } - String[] nodesIds = Strings.splitStringByCommaToArray(request.param("nodeId")); Set metricNames = Strings.tokenizeByCommaToSet(request.param("metric", "_all")); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestCreateIndexAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestCreateIndexAction.java index 8e1a122f98a3a..e30d2f8d5c733 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestCreateIndexAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestCreateIndexAction.java @@ -12,11 +12,8 @@ import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -25,7 +22,6 @@ import org.elasticsearch.rest.action.RestToXContentListener; import java.io.IOException; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -39,10 +35,6 @@ @ServerlessScope(Scope.PUBLIC) public class RestCreateIndexAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestCreateIndexAction.class); - public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Using include_type_name in create " - + "index requests is deprecated. The parameter will be removed in the next major version."; - @Override public List routes() { return List.of(new Route(PUT, "/{index}")); @@ -55,61 +47,10 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - CreateIndexRequest createIndexRequest; - if (request.getRestApiVersion() == RestApiVersion.V_7) { - createIndexRequest = prepareRequestV7(request); - } else { - createIndexRequest = prepareRequest(request); - } + final var createIndexRequest = prepareRequest(request); return channel -> client.admin().indices().create(createIndexRequest, new RestToXContentListener<>(channel)); } - // default scope for testing types in mapping - static CreateIndexRequest prepareRequestV7(RestRequest request) { - CreateIndexRequest createIndexRequest = new CreateIndexRequest(request.param("index")); - if (request.hasParam(INCLUDE_TYPE_NAME_PARAMETER)) { - request.param(INCLUDE_TYPE_NAME_PARAMETER);// just consume, it is always replaced with _doc - deprecationLogger.compatibleCritical("create_index_with_types", TYPES_DEPRECATION_MESSAGE); - } - - if (request.hasContent()) { - Map sourceAsMap = XContentHelper.convertToMap(request.requiredContent(), false, request.getXContentType()).v2(); - - sourceAsMap = prepareMappingsV7(sourceAsMap, request); - - createIndexRequest.source(sourceAsMap, LoggingDeprecationHandler.INSTANCE); - } - - createIndexRequest.ackTimeout(getAckTimeout(request)); - createIndexRequest.masterNodeTimeout(getMasterNodeTimeout(request)); - createIndexRequest.waitForActiveShards(ActiveShardCount.parseString(request.param("wait_for_active_shards"))); - return createIndexRequest; - } - - static Map prepareMappingsV7(Map source, RestRequest request) { - final boolean includeTypeName = request.paramAsBoolean(INCLUDE_TYPE_NAME_PARAMETER, false); - - @SuppressWarnings("unchecked") - Map mappings = (Map) source.get("mappings"); - - if (includeTypeName && mappings != null && mappings.size() == 1) { - Map newSource = new HashMap<>(); - newSource.putAll(source); // mappings will be overridden. Aliases, settings stay the same - String typeName = mappings.keySet().iterator().next(); - if (Strings.hasText(typeName) == false) { - throw new IllegalArgumentException("name cannot be empty string"); - } - @SuppressWarnings("unchecked") - Map typedMappings = (Map) mappings.get(typeName); - - // no matter what the type was, replace it with _doc, because the internal representation still uses single type `_doc`. - newSource.put("mappings", Collections.singletonMap(MapperService.SINGLE_MAPPING_NAME, typedMappings)); - return newSource; - } else { - return prepareMappings(source); - } - } - static CreateIndexRequest prepareRequest(RestRequest request) { CreateIndexRequest createIndexRequest = new CreateIndexRequest(request.param("index")); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetFieldMappingAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetFieldMappingAction.java index 37391028dbd6e..5f648ca8e77e5 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetFieldMappingAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetFieldMappingAction.java @@ -18,7 +18,6 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; @@ -58,25 +57,6 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC final String[] indices = Strings.splitStringByCommaToArray(request.param("index")); final String[] fields = Strings.splitStringByCommaToArray(request.param("fields")); - if (request.getRestApiVersion() == RestApiVersion.V_7) { - if (request.hasParam(INCLUDE_TYPE_NAME_PARAMETER)) { - deprecationLogger.compatibleCritical("get_field_mapping_with_types", INCLUDE_TYPE_DEPRECATION_MESSAGE); - } - boolean includeTypeName = request.paramAsBoolean(INCLUDE_TYPE_NAME_PARAMETER, DEFAULT_INCLUDE_TYPE_NAME_POLICY); - final String[] types = request.paramAsStringArrayOrEmptyIfAll("type"); - if (includeTypeName == false && types.length > 0) { - throw new IllegalArgumentException("Types cannot be specified unless include_type_name is set to true."); - } - - if (request.hasParam("local")) { - request.param("local"); - deprecationLogger.compatibleCritical( - "get_field_mapping_local", - "Use [local] in get field mapping requests is deprecated. The parameter will be removed in the next major version" - ); - } - } - GetFieldMappingsRequest getMappingsRequest = new GetFieldMappingsRequest(); getMappingsRequest.indices(indices).fields(fields).includeDefaults(request.paramAsBoolean("include_defaults", false)); getMappingsRequest.indicesOptions(IndicesOptions.fromRequest(request, getMappingsRequest.indicesOptions())); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetIndexTemplateAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetIndexTemplateAction.java index 25f471183d805..b0b879a6d787c 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetIndexTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetIndexTemplateAction.java @@ -12,9 +12,7 @@ import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesRequest; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; @@ -23,7 +21,6 @@ import java.util.List; import java.util.Set; -import static org.elasticsearch.common.util.set.Sets.addToCopy; import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.HEAD; import static org.elasticsearch.rest.RestStatus.NOT_FOUND; @@ -34,13 +31,6 @@ * The REST handler for get template and head template APIs. */ public class RestGetIndexTemplateAction extends BaseRestHandler { - - private static final Set COMPATIBLE_RESPONSE_PARAMS = addToCopy(Settings.FORMAT_PARAMS, INCLUDE_TYPE_NAME_PARAMETER); - - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestGetIndexTemplateAction.class); - public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Using include_type_name in get " - + "index template requests is deprecated. The parameter will be removed in the next major version."; - @Override public List routes() { return List.of(new Route(GET, "/_template"), new Route(GET, "/_template/{name}"), new Route(HEAD, "/_template/{name}")); @@ -53,9 +43,6 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - if (request.getRestApiVersion() == RestApiVersion.V_7 && request.hasParam(INCLUDE_TYPE_NAME_PARAMETER)) { - deprecationLogger.compatibleCritical("get_index_template_include_type_name", TYPES_DEPRECATION_MESSAGE); - } final String[] names = Strings.splitStringByCommaToArray(request.param("name")); final GetIndexTemplatesRequest getIndexTemplatesRequest = new GetIndexTemplatesRequest(names); @@ -75,13 +62,4 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC protected Set responseParams() { return Settings.FORMAT_PARAMS; } - - @Override - protected Set responseParams(RestApiVersion restApiVersion) { - if (restApiVersion == RestApiVersion.V_7) { - return COMPATIBLE_RESPONSE_PARAMS; - } else { - return responseParams(); - } - } } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetIndicesAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetIndicesAction.java index 3d78fa538bf04..9ca890eaff65b 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetIndicesAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetIndicesAction.java @@ -15,7 +15,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -27,7 +26,6 @@ import java.util.List; import java.util.Set; -import static org.elasticsearch.common.util.set.Sets.addToCopy; import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.HEAD; import static org.elasticsearch.rest.RestUtils.getMasterNodeTimeout; @@ -41,8 +39,6 @@ public class RestGetIndicesAction extends BaseRestHandler { public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Using `include_type_name` in get indices requests" + " is deprecated. The parameter will be removed in the next major version."; - private static final Set COMPATIBLE_RESPONSE_PARAMS = addToCopy(Settings.FORMAT_PARAMS, INCLUDE_TYPE_NAME_PARAMETER); - @Override public List routes() { return List.of(new Route(GET, "/{index}"), new Route(HEAD, "/{index}")); @@ -55,13 +51,6 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - // starting with 7.0 we don't include types by default in the response to GET requests - if (request.getRestApiVersion() == RestApiVersion.V_7 - && request.hasParam(INCLUDE_TYPE_NAME_PARAMETER) - && request.method().equals(GET)) { - deprecationLogger.compatibleCritical("get_indices_with_types", TYPES_DEPRECATION_MESSAGE); - } - String[] indices = Strings.splitStringByCommaToArray(request.param("index")); final GetIndexRequest getIndexRequest = new GetIndexRequest(); getIndexRequest.indices(indices); @@ -85,13 +74,4 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC protected Set responseParams() { return Settings.FORMAT_PARAMS; } - - @Override - protected Set responseParams(RestApiVersion restApiVersion) { - if (restApiVersion == RestApiVersion.V_7) { - return COMPATIBLE_RESPONSE_PARAMS; - } else { - return responseParams(); - } - } } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingAction.java index 5f40bea92f818..242bcd399413b 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingAction.java @@ -14,7 +14,6 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.TimeValue; import org.elasticsearch.http.HttpChannel; import org.elasticsearch.rest.BaseRestHandler; @@ -28,7 +27,6 @@ import java.util.List; import static org.elasticsearch.rest.RestRequest.Method.GET; -import static org.elasticsearch.rest.RestRequest.Method.HEAD; import static org.elasticsearch.rest.RestUtils.getMasterNodeTimeout; @ServerlessScope(Scope.PUBLIC) @@ -58,26 +56,7 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - if (request.getRestApiVersion() == RestApiVersion.V_7) { - if (request.hasParam(INCLUDE_TYPE_NAME_PARAMETER)) { - request.param(INCLUDE_TYPE_NAME_PARAMETER); - deprecationLogger.compatibleCritical("get_mapping_with_types", INCLUDE_TYPE_DEPRECATION_MSG); - } - final String[] types = request.paramAsStringArrayOrEmptyIfAll("type"); - if (request.paramAsBoolean(INCLUDE_TYPE_NAME_PARAMETER, DEFAULT_INCLUDE_TYPE_NAME_POLICY) == false && types.length > 0) { - throw new IllegalArgumentException( - "Types cannot be provided in get mapping requests, unless include_type_name is set to true." - ); - } - if (request.method().equals(HEAD)) { - deprecationLogger.compatibleCritical( - "get_mapping_types_removal", - "Type exists requests are deprecated, as types have been deprecated." - ); - } - } final String[] indices = Strings.splitStringByCommaToArray(request.param("index")); - final GetMappingsRequest getMappingsRequest = new GetMappingsRequest(); getMappingsRequest.indices(indices); getMappingsRequest.indicesOptions(IndicesOptions.fromRequest(request, getMappingsRequest.indicesOptions())); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndicesStatsAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndicesStatsAction.java index dcd4cda21f969..b5050eb8007a8 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndicesStatsAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndicesStatsAction.java @@ -17,7 +17,6 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -76,11 +75,6 @@ public boolean allowSystemIndexAccessByDefault() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - if (request.getRestApiVersion() == RestApiVersion.V_7 && request.hasParam("types")) { - deprecationLogger.compatibleCritical("indices_stats_types", TYPES_DEPRECATION_MESSAGE); - request.param("types"); - } - IndicesStatsRequest indicesStatsRequest = new IndicesStatsRequest(); boolean forbidClosedIndices = request.paramAsBoolean("forbid_closed_indices", true); IndicesOptions defaultIndicesOption = forbidClosedIndices diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutIndexTemplateAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutIndexTemplateAction.java index f70d9351e69c9..defec2fefc615 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutIndexTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutIndexTemplateAction.java @@ -21,12 +21,12 @@ import java.io.IOException; import java.util.List; -import java.util.Map; import static java.util.Arrays.asList; import static org.elasticsearch.rest.RestRequest.Method.POST; import static org.elasticsearch.rest.RestRequest.Method.PUT; import static org.elasticsearch.rest.RestUtils.getMasterNodeTimeout; +import static org.elasticsearch.rest.action.admin.indices.RestCreateIndexAction.prepareMappings; public class RestPutIndexTemplateAction extends BaseRestHandler { @@ -53,42 +53,12 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { PutIndexTemplateRequest putRequest = new PutIndexTemplateRequest(request.param("name")); - if (request.getRestApiVersion() == RestApiVersion.V_7 && request.hasParam("template")) { - deprecationLogger.compatibleCritical( - "template_parameter_deprecation", - "Deprecated parameter [template] used, replaced by [index_patterns]" - ); - putRequest.patterns(List.of(request.param("template"))); - } else { - putRequest.patterns(asList(request.paramAsStringArray("index_patterns", Strings.EMPTY_ARRAY))); - } + putRequest.patterns(asList(request.paramAsStringArray("index_patterns", Strings.EMPTY_ARRAY))); putRequest.order(request.paramAsInt("order", putRequest.order())); putRequest.masterNodeTimeout(getMasterNodeTimeout(request)); putRequest.create(request.paramAsBoolean("create", false)); putRequest.cause(request.param("cause", "")); - - Map sourceAsMap = XContentHelper.convertToMap(request.requiredContent(), false, request.getXContentType()).v2(); - if (request.getRestApiVersion() == RestApiVersion.V_7) { - if (request.hasParam(INCLUDE_TYPE_NAME_PARAMETER)) { - deprecationLogger.compatibleCritical("put_index_template_with_types", TYPES_DEPRECATION_MESSAGE); - } - boolean includeTypeName = request.paramAsBoolean(INCLUDE_TYPE_NAME_PARAMETER, DEFAULT_INCLUDE_TYPE_NAME_POLICY); - if (includeTypeName) { - sourceAsMap = RestCreateIndexAction.prepareMappingsV7(sourceAsMap, request); - } else { - sourceAsMap = RestCreateIndexAction.prepareMappings(sourceAsMap); - } - } else { - sourceAsMap = RestCreateIndexAction.prepareMappings(sourceAsMap); - } - if (request.getRestApiVersion() == RestApiVersion.V_7 && sourceAsMap.containsKey("template")) { - deprecationLogger.compatibleCritical( - "template_field_deprecation", - "Deprecated field [template] used, replaced by [index_patterns]" - ); - putRequest.patterns(List.of((String) sourceAsMap.remove("template"))); - } - putRequest.source(sourceAsMap); + putRequest.source(prepareMappings(XContentHelper.convertToMap(request.requiredContent(), false, request.getXContentType()).v2())); return channel -> client.admin().indices().putTemplate(putRequest, new RestToXContentListener<>(channel)); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutMappingAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutMappingAction.java index 014e761acc388..ee11f7f520463 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutMappingAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutMappingAction.java @@ -15,8 +15,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.core.RestApiVersion; -import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -27,6 +25,7 @@ import java.util.List; import java.util.Map; +import static org.elasticsearch.index.mapper.MapperService.SINGLE_MAPPING_NAME; import static org.elasticsearch.index.mapper.MapperService.isMappingSourceTyped; import static org.elasticsearch.rest.RestRequest.Method.POST; import static org.elasticsearch.rest.RestRequest.Method.PUT; @@ -60,27 +59,10 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC PutMappingRequest putMappingRequest = new PutMappingRequest(indices); Map sourceAsMap = XContentHelper.convertToMap(request.requiredContent(), false, request.getXContentType()).v2(); - if (request.getRestApiVersion() == RestApiVersion.V_7) { - final boolean includeTypeName = request.paramAsBoolean(INCLUDE_TYPE_NAME_PARAMETER, DEFAULT_INCLUDE_TYPE_NAME_POLICY); - if (request.hasParam(INCLUDE_TYPE_NAME_PARAMETER)) { - deprecationLogger.compatibleCritical("put_mapping_with_types", TYPES_DEPRECATION_MESSAGE); - } - final String type = request.param("type"); - if (includeTypeName == false && (type != null || isMappingSourceTyped(MapperService.SINGLE_MAPPING_NAME, sourceAsMap))) { - throw new IllegalArgumentException( - "Types cannot be provided in put mapping requests, unless the include_type_name parameter is set to true." - ); - } - - Map mappingSource = prepareV7Mappings(includeTypeName, sourceAsMap); - putMappingRequest.source(mappingSource); - } else { - if (MapperService.isMappingSourceTyped(MapperService.SINGLE_MAPPING_NAME, sourceAsMap)) { - throw new IllegalArgumentException("Types cannot be provided in put mapping requests"); - } - putMappingRequest.source(sourceAsMap); + if (isMappingSourceTyped(SINGLE_MAPPING_NAME, sourceAsMap)) { + throw new IllegalArgumentException("Types cannot be provided in put mapping requests"); } - + putMappingRequest.source(sourceAsMap); putMappingRequest.ackTimeout(getAckTimeout(request)); putMappingRequest.masterNodeTimeout(getMasterNodeTimeout(request)); putMappingRequest.indicesOptions(IndicesOptions.fromRequest(request, putMappingRequest.indicesOptions())); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestResizeHandler.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestResizeHandler.java index c6afc8e041d01..ee1710f39ce41 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestResizeHandler.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestResizeHandler.java @@ -14,8 +14,6 @@ import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.core.Booleans; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; @@ -40,15 +38,6 @@ public abstract class RestResizeHandler extends BaseRestHandler { @Override public final RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - if (request.getRestApiVersion() == RestApiVersion.V_7 && request.hasParam("copy_settings")) { - deprecationLogger.compatibleCritical("copy_settings", "parameter [copy_settings] is deprecated and will be removed in 8.0.0"); - - final String rawCopySettings = request.param("copy_settings"); - final boolean copySettings = Booleans.parseBoolean(rawCopySettings); - if (copySettings == false) { - throw new IllegalArgumentException("parameter [copy_settings] can not be explicitly set to [false]"); - } - } final ResizeRequest resizeRequest = new ResizeRequest(request.param("target"), request.param("index")); resizeRequest.setResizeType(getResizeType()); request.applyContentParser(resizeRequest::fromXContent); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRolloverIndexAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRolloverIndexAction.java index c4df68098190f..ebae4a36c6d3d 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRolloverIndexAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRolloverIndexAction.java @@ -15,7 +15,6 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -59,9 +58,8 @@ public Set supportedCapabilities() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - final boolean includeTypeName = includeTypeName(request); RolloverRequest rolloverIndexRequest = new RolloverRequest(request.param("index"), request.param("new_index")); - request.applyContentParser(parser -> rolloverIndexRequest.fromXContent(includeTypeName, parser)); + request.applyContentParser(parser -> rolloverIndexRequest.fromXContent(parser)); rolloverIndexRequest.dryRun(request.paramAsBoolean("dry_run", false)); rolloverIndexRequest.lazy(request.paramAsBoolean("lazy", false)); rolloverIndexRequest.ackTimeout(getAckTimeout(request)); @@ -83,14 +81,4 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC .rolloverIndex(rolloverIndexRequest, new RestToXContentListener<>(channel)); } - private static boolean includeTypeName(RestRequest request) { - boolean includeTypeName = false; - if (request.getRestApiVersion() == RestApiVersion.V_7) { - if (request.hasParam(INCLUDE_TYPE_NAME_PARAMETER)) { - deprecationLogger.compatibleCritical("index_rollover_with_types", TYPES_DEPRECATION_MESSAGE); - } - includeTypeName = request.paramAsBoolean(INCLUDE_TYPE_NAME_PARAMETER, DEFAULT_INCLUDE_TYPE_NAME_POLICY); - } - return includeTypeName; - } } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryAction.java index 8784fad3405d0..8bc7e9aa76551 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryAction.java @@ -17,7 +17,6 @@ import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestRequest; @@ -57,11 +56,6 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - if (request.getRestApiVersion() == RestApiVersion.V_7 && request.hasParam("type")) { - deprecationLogger.compatibleCritical("validate_query_with_types", TYPES_DEPRECATION_MESSAGE); - request.param("type"); - } - ValidateQueryRequest validateQueryRequest = new ValidateQueryRequest(Strings.splitStringByCommaToArray(request.param("index"))); validateQueryRequest.indicesOptions(IndicesOptions.fromRequest(request, validateQueryRequest.indicesOptions())); validateQueryRequest.explain(request.paramAsBoolean("explain", false)); diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java index f97ea1db7a036..a968ea4520f40 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java @@ -27,7 +27,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.rest.RestRequest; @@ -171,11 +170,7 @@ protected Table getTableWithHeader(final RestRequest request) { table.addCell("store.size", "sibling:pri;alias:ss,storeSize;text-align:right;desc:store size of primaries & replicas"); table.addCell("pri.store.size", "text-align:right;desc:store size of primaries"); - if (request.getRestApiVersion() == RestApiVersion.V_7) { - table.addCell("dataset.size", "default:false;text-align:right;desc:total size of dataset"); - } else { - table.addCell("dataset.size", "text-align:right;desc:total size of dataset"); - } + table.addCell("dataset.size", "text-align:right;desc:total size of dataset"); table.addCell("completion.size", "sibling:pri;alias:cs,completionSize;default:false;text-align:right;desc:size of completion"); table.addCell("pri.completion.size", "default:false;text-align:right;desc:size of completion"); diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java index 72f786004e0bd..3bee9c5f3b04c 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java @@ -23,7 +23,6 @@ import org.elasticsearch.common.Table; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.concurrent.ListenableFuture; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.bulk.stats.BulkStats; import org.elasticsearch.index.cache.query.QueryCacheStats; @@ -119,12 +118,7 @@ protected Table getTableWithHeader(final RestRequest request) { .addCell("state", "default:true;alias:st;desc:shard state") .addCell("docs", "alias:d,dc;text-align:right;desc:number of docs in shard") .addCell("store", "alias:sto;text-align:right;desc:store size of shard (how much disk it uses)") - .addCell( - "dataset", - request.getRestApiVersion() == RestApiVersion.V_7 - ? "default:false;text-align:right;desc:total size of dataset" - : "text-align:right;desc:total size of dataset" - ) + .addCell("dataset", "text-align:right;desc:total size of dataset") .addCell("ip", "default:true;desc:ip of node where it lives") .addCell("id", "default:false;desc:unique id of node where it lives") .addCell("node", "default:true;alias:n;desc:name of node where it lives"); diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java index 0b8e64f5eab4a..03694c7442d4d 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java @@ -24,7 +24,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.TimeValue; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestChannel; @@ -91,9 +90,6 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { if (request.isStreamedContent() == false) { - if (request.getRestApiVersion() == RestApiVersion.V_7 && request.hasParam("type")) { - request.param("type"); - } BulkRequest bulkRequest = new BulkRequest(); String defaultIndex = request.param("index"); String defaultRouting = request.param("routing"); @@ -124,10 +120,6 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC return channel -> client.bulk(bulkRequest, new RestRefCountedChunkedToXContentListener<>(channel)); } else { - if (request.getRestApiVersion() == RestApiVersion.V_7 && request.hasParam("type")) { - request.param("type"); - } - String waitForActiveShards = request.param("wait_for_active_shards"); TimeValue timeout = request.paramAsTime("timeout", BulkShardRequest.DEFAULT_TIMEOUT); String refresh = request.param("refresh"); diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestDeleteAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestDeleteAction.java index 3ee1810967153..3c795c635056d 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestDeleteAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestDeleteAction.java @@ -13,7 +13,6 @@ import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.index.VersionType; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -42,9 +41,6 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - if (request.getRestApiVersion() == RestApiVersion.V_7 && request.hasParam("type")) { - request.param("type"); - } DeleteRequest deleteRequest = new DeleteRequest(request.param("index"), request.param("id")); deleteRequest.routing(request.param("routing")); deleteRequest.timeout(request.paramAsTime("timeout", DeleteRequest.DEFAULT_TIMEOUT)); diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestGetAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestGetAction.java index cc2b820eb05f2..4e234c4cbcc3d 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestGetAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestGetAction.java @@ -12,7 +12,6 @@ import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.index.VersionType; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -44,12 +43,7 @@ public List routes() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - if (request.getRestApiVersion() == RestApiVersion.V_7) { - request.param("type"); // consume and ignore the type - } - GetRequest getRequest = new GetRequest(request.param("index"), request.param("id")); - getRequest.refresh(request.paramAsBoolean("refresh", getRequest.refresh())); getRequest.routing(request.param("routing")); getRequest.preference(request.param("preference")); diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestGetSourceAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestGetSourceAction.java index e6567d7fdf592..a09fcbd0c5273 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestGetSourceAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestGetSourceAction.java @@ -17,7 +17,6 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestRequest; @@ -57,11 +56,6 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - if (request.getRestApiVersion() == RestApiVersion.V_7 && request.hasParam("type")) { - request.param("type"); // consume and ignore the type - deprecationLogger.compatibleCritical("get_source_with_types", TYPES_DEPRECATION_MESSAGE); - } - final GetRequest getRequest = new GetRequest(request.param("index"), request.param("id")); getRequest.refresh(request.paramAsBoolean("refresh", getRequest.refresh())); getRequest.routing(request.param("routing")); diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java index 9931c10b38f3f..c2437dcb96fa6 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java @@ -15,7 +15,6 @@ import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.cluster.metadata.DataStream; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.index.VersionType; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -105,10 +104,6 @@ public RestChannelConsumer prepareRequest(RestRequest request, final NodeClient @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - if (request.getRestApiVersion() == RestApiVersion.V_7) { - request.param("type"); // consume and ignore the type - } - IndexRequest indexRequest = new IndexRequest(request.param("index")); indexRequest.id(request.param("id")); indexRequest.routing(request.param("routing")); diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestMultiGetAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestMultiGetAction.java index 0ceb9ebab7397..12769ef1c73e9 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestMultiGetAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestMultiGetAction.java @@ -13,7 +13,6 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -55,9 +54,6 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - if (request.getRestApiVersion() == RestApiVersion.V_7 && request.param("type") != null) { - request.param("type"); - } MultiGetRequest multiGetRequest = new MultiGetRequest(); multiGetRequest.refresh(request.paramAsBoolean("refresh", multiGetRequest.refresh())); multiGetRequest.preference(request.param("preference")); diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestMultiTermVectorsAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestMultiTermVectorsAction.java index 0e23362e3f5df..65aa1869a41e4 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestMultiTermVectorsAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestMultiTermVectorsAction.java @@ -14,9 +14,7 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; -import org.elasticsearch.rest.DeprecationRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; @@ -50,10 +48,6 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - if (request.getRestApiVersion() == RestApiVersion.V_7 && request.hasParam("type")) { - request.param("type"); - deprecationLogger.compatibleCritical(DeprecationRestHandler.DEPRECATED_ROUTE_KEY, TYPES_DEPRECATION_MESSAGE); - } MultiTermVectorsRequest multiTermVectorsRequest = new MultiTermVectorsRequest(); TermVectorsRequest template = new TermVectorsRequest().index(request.param("index")); diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestTermVectorsAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestTermVectorsAction.java index 1fbf35856589b..8e41e1cd09674 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestTermVectorsAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestTermVectorsAction.java @@ -12,7 +12,6 @@ import org.elasticsearch.action.termvectors.TermVectorsRequest; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.index.VersionType; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -55,11 +54,7 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - if (request.getRestApiVersion() == RestApiVersion.V_7 && request.hasParam("type")) { - request.param("type"); - } - TermVectorsRequest termVectorsRequest; - termVectorsRequest = new TermVectorsRequest(request.param("index"), request.param("id")); + TermVectorsRequest termVectorsRequest = new TermVectorsRequest(request.param("index"), request.param("id")); if (request.hasContentOrSourceParam()) { try (XContentParser parser = request.contentOrSourceParamParser()) { diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestUpdateAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestUpdateAction.java index 682d5b5c55c3f..57b3a89b2303b 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestUpdateAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestUpdateAction.java @@ -16,7 +16,6 @@ import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.index.VersionType; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -45,9 +44,6 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - if (request.getRestApiVersion() == RestApiVersion.V_7 && request.hasParam("type")) { - request.param("type"); - } UpdateRequest updateRequest = new UpdateRequest(request.param("index"), request.param("id")); updateRequest.routing(request.param("routing")); updateRequest.timeout(request.paramAsTime("timeout", updateRequest.timeout())); diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestCountAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestCountAction.java index 0c3680e09e6bf..23da666a39a7e 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/RestCountAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestCountAction.java @@ -15,7 +15,6 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -57,10 +56,6 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - if (request.getRestApiVersion() == RestApiVersion.V_7 && request.hasParam("type")) { - deprecationLogger.compatibleCritical("count_with_types", TYPES_DEPRECATION_MESSAGE); - request.param("type"); - } SearchRequest countRequest = new SearchRequest(Strings.splitStringByCommaToArray(request.param("index"))); countRequest.indicesOptions(IndicesOptions.fromRequest(request, countRequest.indicesOptions())); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().size(0).trackTotalHits(true); diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestExplainAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestExplainAction.java index 4d88d115e1da7..c5dc047933ea6 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/RestExplainAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestExplainAction.java @@ -13,7 +13,6 @@ import org.elasticsearch.action.explain.ExplainResponse; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -47,11 +46,7 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - if (request.getRestApiVersion() == RestApiVersion.V_7 && request.hasParam("type")) { - request.param("type"); - } ExplainRequest explainRequest = new ExplainRequest(request.param("index"), request.param("id")); - explainRequest.parent(request.param("parent")); explainRequest.routing(request.param("routing")); explainRequest.preference(request.param("preference")); diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java index a58904c2649d9..aeb182978e1eb 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java @@ -19,7 +19,6 @@ import org.elasticsearch.common.TriFunction; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.Tuple; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.rest.BaseRestHandler; @@ -110,10 +109,6 @@ public static MultiSearchRequest parseRequest( Predicate clusterSupportsFeature, TriFunction extraParamParser ) throws IOException { - if (restRequest.getRestApiVersion() == RestApiVersion.V_7 && restRequest.hasParam("type")) { - restRequest.param("type"); - } - MultiSearchRequest multiRequest = new MultiSearchRequest(); IndicesOptions indicesOptions = IndicesOptions.fromRequest(restRequest, multiRequest.indicesOptions()); multiRequest.indicesOptions(indicesOptions); diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java index 4fff9229372ea..af60979dfe169 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java @@ -20,7 +20,6 @@ import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.core.Booleans; import org.elasticsearch.core.Nullable; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.rest.BaseRestHandler; @@ -169,11 +168,6 @@ public static void parseSearchRequest( IntConsumer setSize, @Nullable SearchUsageHolder searchUsageHolder ) throws IOException { - if (request.getRestApiVersion() == RestApiVersion.V_7 && request.hasParam("type")) { - request.param("type"); - deprecationLogger.compatibleCritical("search_with_types", TYPES_DEPRECATION_MESSAGE); - } - if (searchRequest.source() == null) { searchRequest.source(new SearchSourceBuilder()); } @@ -250,19 +244,8 @@ private static void parseSearchSource(final SearchSourceBuilder searchSourceBuil searchSourceBuilder.from(request.paramAsInt("from", 0)); } if (request.hasParam("size")) { - int size = request.paramAsInt("size", SearchService.DEFAULT_SIZE); - if (request.getRestApiVersion() == RestApiVersion.V_7 && size == -1) { - // we treat -1 as not-set, but deprecate it to be able to later remove this funny extra treatment - deprecationLogger.compatibleCritical( - "search-api-size-1", - "Using search size of -1 is deprecated and will be removed in future versions. " - + "Instead, don't use the `size` parameter if you don't want to set it explicitly." - ); - } else { - setSize.accept(size); - } + setSize.accept(request.paramAsInt("size", SearchService.DEFAULT_SIZE)); } - if (request.hasParam("explain")) { searchSourceBuilder.explain(request.paramAsBoolean("explain", null)); } diff --git a/server/src/main/java/org/elasticsearch/search/SearchHit.java b/server/src/main/java/org/elasticsearch/search/SearchHit.java index 381b4bc0e9008..1611c95d99df4 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchHit.java +++ b/server/src/main/java/org/elasticsearch/search/SearchHit.java @@ -27,7 +27,6 @@ import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.RefCounted; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.SimpleRefCounted; import org.elasticsearch.index.mapper.IgnoredFieldMapper; import org.elasticsearch.index.mapper.MapperService; @@ -816,9 +815,6 @@ public XContentBuilder toInnerXContent(XContentBuilder builder, Params params) t if (index != null) { builder.field(Fields._INDEX, RemoteClusterAware.buildRemoteIndexName(clusterAlias, index)); } - if (builder.getRestApiVersion() == RestApiVersion.V_7 && metaFields.containsKey(MapperService.TYPE_FIELD_NAME) == false) { - builder.field(MapperService.TYPE_FIELD_NAME, MapperService.SINGLE_MAPPING_NAME); - } if (id != null) { builder.field(Fields._ID, id); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/InternalOrder.java b/server/src/main/java/org/elasticsearch/search/aggregations/InternalOrder.java index 043fab6f4f122..b2ca4a10dc4b3 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/InternalOrder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/InternalOrder.java @@ -13,7 +13,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.search.aggregations.Aggregator.BucketComparator; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation.Bucket; import org.elasticsearch.search.aggregations.support.AggregationPath; @@ -591,15 +590,6 @@ public static BucketOrder parseOrderParam(XContentParser parser) throws IOExcept if (orderKey == null) { throw new ParsingException(parser.getTokenLocation(), "Must specify at least one field for [order]"); } - // _term and _time order deprecated in 6.0; replaced by _key - if (parser.getRestApiVersion() == RestApiVersion.V_7 && ("_term".equals(orderKey) || "_time".equals(orderKey))) { - deprecationLogger.compatibleCritical( - "_term_and_time_key_removal", - "Deprecated aggregation order key [{}] used, replaced by [_key]", - orderKey - ); - return orderAsc ? KEY_ASC : KEY_DESC; - } return switch (orderKey) { case "_key" -> orderAsc ? KEY_ASC : KEY_DESC; case "_count" -> orderAsc ? COUNT_ASC : COUNT_DESC; diff --git a/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java b/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java index 9f94ec1452019..6d427aace51dd 100644 --- a/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java @@ -24,7 +24,6 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Booleans; import org.elasticsearch.core.Nullable; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.TimeValue; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.query.BoolQueryBuilder; @@ -1341,17 +1340,7 @@ private SearchSourceBuilder parseXContent( if (FROM_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { from(parser.intValue()); } else if (SIZE_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { - int parsedSize = parser.intValue(); - if (parser.getRestApiVersion() == RestApiVersion.V_7 && parsedSize == -1) { - // we treat -1 as not-set, but deprecate it to be able to later remove this funny extra treatment - deprecationLogger.compatibleCritical( - "search-api-size-1", - "Using search size of -1 is deprecated and will be removed in future versions. " - + "Instead, don't use the `size` parameter if you don't want to set it explicitly." - ); - } else { - size(parsedSize); - } + size(parser.intValue()); } else if (TIMEOUT_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { timeout = TimeValue.parseTimeValue(parser.text(), null, TIMEOUT_FIELD.getPreferredName()); } else if (TERMINATE_AFTER_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { @@ -1457,99 +1446,79 @@ private SearchSourceBuilder parseXContent( scriptFields.add(new ScriptField(parser)); } searchUsage.trackSectionUsage(SCRIPT_FIELDS_FIELD.getPreferredName()); - } else if (parser.getRestApiVersion() == RestApiVersion.V_7 - && INDICES_BOOST_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { - deprecationLogger.compatibleCritical( - "indices_boost_object_format", - "Object format in indices_boost is deprecated, please use array format instead" - ); + } else if (AGGREGATIONS_FIELD.match(currentFieldName, parser.getDeprecationHandler()) + || AGGS_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { + aggregations = AggregatorFactories.parseAggregators(parser); + if (aggregations.count() > 0) { + searchUsage.trackSectionUsage(AGGS_FIELD.getPreferredName()); + } + } else if (HIGHLIGHT_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { + highlightBuilder = HighlightBuilder.fromXContent(parser); + if (highlightBuilder.fields().size() > 0) { + searchUsage.trackSectionUsage(HIGHLIGHT_FIELD.getPreferredName()); + } + } else if (SUGGEST_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { + suggestBuilder = SuggestBuilder.fromXContent(parser); + if (suggestBuilder.getSuggestions().size() > 0) { + searchUsage.trackSectionUsage(SUGGEST_FIELD.getPreferredName()); + } + } else if (SORT_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { + sorts = new ArrayList<>(SortBuilder.fromXContent(parser)); + } else if (RESCORE_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { + rescoreBuilders = new ArrayList<>(); + rescoreBuilders.add(RescorerBuilder.parseFromXContent(parser, searchUsage::trackRescorerUsage)); + searchUsage.trackSectionUsage(RESCORE_FIELD.getPreferredName()); + } else if (EXT_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { + extBuilders = new ArrayList<>(); + String extSectionName = null; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else if (token.isValue()) { - indexBoosts.add(new IndexBoost(currentFieldName, parser.floatValue())); + extSectionName = parser.currentName(); } else { - throw new ParsingException( - parser.getTokenLocation(), - "Unknown key for a " + token + " in [" + currentFieldName + "].", - parser.getTokenLocation() - ); - } - } - searchUsage.trackSectionUsage(INDICES_BOOST_FIELD.getPreferredName()); - } else if (AGGREGATIONS_FIELD.match(currentFieldName, parser.getDeprecationHandler()) - || AGGS_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { - aggregations = AggregatorFactories.parseAggregators(parser); - if (aggregations.count() > 0) { - searchUsage.trackSectionUsage(AGGS_FIELD.getPreferredName()); - } - } else if (HIGHLIGHT_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { - highlightBuilder = HighlightBuilder.fromXContent(parser); - if (highlightBuilder.fields().size() > 0) { - searchUsage.trackSectionUsage(HIGHLIGHT_FIELD.getPreferredName()); - } - } else if (SUGGEST_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { - suggestBuilder = SuggestBuilder.fromXContent(parser); - if (suggestBuilder.getSuggestions().size() > 0) { - searchUsage.trackSectionUsage(SUGGEST_FIELD.getPreferredName()); - } - } else if (SORT_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { - sorts = new ArrayList<>(SortBuilder.fromXContent(parser)); - } else if (RESCORE_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { - rescoreBuilders = new ArrayList<>(); - rescoreBuilders.add(RescorerBuilder.parseFromXContent(parser, searchUsage::trackRescorerUsage)); - searchUsage.trackSectionUsage(RESCORE_FIELD.getPreferredName()); - } else if (EXT_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { - extBuilders = new ArrayList<>(); - String extSectionName = null; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - extSectionName = parser.currentName(); - } else { - SearchExtBuilder searchExtBuilder = parser.namedObject(SearchExtBuilder.class, extSectionName, null); - if (searchExtBuilder.getWriteableName().equals(extSectionName) == false) { - throw new IllegalStateException( - "The parsed [" - + searchExtBuilder.getClass().getName() - + "] object has a different writeable name compared to the name of the section that " - + " it was parsed from: found [" - + searchExtBuilder.getWriteableName() - + "] expected [" - + extSectionName - + "]" - ); - } - extBuilders.add(searchExtBuilder); + SearchExtBuilder searchExtBuilder = parser.namedObject(SearchExtBuilder.class, extSectionName, null); + if (searchExtBuilder.getWriteableName().equals(extSectionName) == false) { + throw new IllegalStateException( + "The parsed [" + + searchExtBuilder.getClass().getName() + + "] object has a different writeable name compared to the name of the section that " + + " it was parsed from: found [" + + searchExtBuilder.getWriteableName() + + "] expected [" + + extSectionName + + "]" + ); } + extBuilders.add(searchExtBuilder); } - if (extBuilders.size() > 0) { - searchUsage.trackSectionUsage(EXT_FIELD.getPreferredName()); - } - } else if (SLICE.match(currentFieldName, parser.getDeprecationHandler())) { - sliceBuilder = SliceBuilder.fromXContent(parser); - if (sliceBuilder.getField() != null || sliceBuilder.getId() != -1 || sliceBuilder.getMax() != -1) { - searchUsage.trackSectionUsage(SLICE.getPreferredName()); - } - } else if (COLLAPSE.match(currentFieldName, parser.getDeprecationHandler())) { - collapse = CollapseBuilder.fromXContent(parser); - if (collapse.getField() != null) { - searchUsage.trackSectionUsage(COLLAPSE.getPreferredName()); - } - } else if (POINT_IN_TIME.match(currentFieldName, parser.getDeprecationHandler())) { - pointInTimeBuilder = PointInTimeBuilder.fromXContent(parser); - searchUsage.trackSectionUsage(POINT_IN_TIME.getPreferredName()); - } else if (RUNTIME_MAPPINGS_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { - runtimeMappings = parser.map(); - if (runtimeMappings.size() > 0) { - searchUsage.trackSectionUsage(RUNTIME_MAPPINGS_FIELD.getPreferredName()); - } - } else { - throw new ParsingException( - parser.getTokenLocation(), - "Unknown key for a " + token + " in [" + currentFieldName + "].", - parser.getTokenLocation() - ); } + if (extBuilders.size() > 0) { + searchUsage.trackSectionUsage(EXT_FIELD.getPreferredName()); + } + } else if (SLICE.match(currentFieldName, parser.getDeprecationHandler())) { + sliceBuilder = SliceBuilder.fromXContent(parser); + if (sliceBuilder.getField() != null || sliceBuilder.getId() != -1 || sliceBuilder.getMax() != -1) { + searchUsage.trackSectionUsage(SLICE.getPreferredName()); + } + } else if (COLLAPSE.match(currentFieldName, parser.getDeprecationHandler())) { + collapse = CollapseBuilder.fromXContent(parser); + if (collapse.getField() != null) { + searchUsage.trackSectionUsage(COLLAPSE.getPreferredName()); + } + } else if (POINT_IN_TIME.match(currentFieldName, parser.getDeprecationHandler())) { + pointInTimeBuilder = PointInTimeBuilder.fromXContent(parser); + searchUsage.trackSectionUsage(POINT_IN_TIME.getPreferredName()); + } else if (RUNTIME_MAPPINGS_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { + runtimeMappings = parser.map(); + if (runtimeMappings.size() > 0) { + searchUsage.trackSectionUsage(RUNTIME_MAPPINGS_FIELD.getPreferredName()); + } + } else { + throw new ParsingException( + parser.getTokenLocation(), + "Unknown key for a " + token + " in [" + currentFieldName + "].", + parser.getTokenLocation() + ); + } } else if (token == XContentParser.Token.START_ARRAY) { if (STORED_FIELDS_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { storedFieldsContext = StoredFieldsContext.fromXContent(STORED_FIELDS_FIELD.getPreferredName(), parser); diff --git a/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java index 56871764ab0d4..6640f0f858404 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java @@ -31,7 +31,6 @@ import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.index.fielddata.FieldData; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; @@ -427,18 +426,7 @@ public static GeoDistanceSortBuilder fromXContent(XContentParser parser, String fieldName = currentName; } else if (token == XContentParser.Token.START_OBJECT) { - if (parser.getRestApiVersion() == RestApiVersion.V_7 - && NESTED_FILTER_FIELD.match(currentName, parser.getDeprecationHandler())) { - deprecationLogger.compatibleCritical( - "nested_filter", - "[nested_filter] has been removed in favour of the [nested] parameter" - ); - throw new ParsingException( - parser.getTokenLocation(), - "[nested_filter] has been removed in favour of the [nested] parameter", - currentName - ); - } else if (NESTED_FIELD.match(currentName, parser.getDeprecationHandler())) { + if (NESTED_FIELD.match(currentName, parser.getDeprecationHandler())) { nestedSort = NestedSortBuilder.fromXContent(parser); } else { // the json in the format of -> field : { lat : 30, lon : 12 } @@ -454,18 +442,7 @@ public static GeoDistanceSortBuilder fromXContent(XContentParser parser, String geoPoints.add(GeoUtils.parseGeoPoint(parser)); } } else if (token.isValue()) { - if (parser.getRestApiVersion() == RestApiVersion.V_7 - && NESTED_PATH_FIELD.match(currentName, parser.getDeprecationHandler())) { - deprecationLogger.compatibleCritical( - "nested_path", - "[nested_path] has been removed in favour of the [nested] parameter" - ); - throw new ParsingException( - parser.getTokenLocation(), - "[nested_path] has been removed in favour of the [nested] parameter", - currentName - ); - } else if (ORDER_FIELD.match(currentName, parser.getDeprecationHandler())) { + if (ORDER_FIELD.match(currentName, parser.getDeprecationHandler())) { order = SortOrder.fromString(parser.text()); } else if (UNIT_FIELD.match(currentName, parser.getDeprecationHandler())) { unit = DistanceUnit.fromString(parser.text()); diff --git a/server/src/test/java/org/elasticsearch/action/DocWriteResponseTests.java b/server/src/test/java/org/elasticsearch/action/DocWriteResponseTests.java index a9cc6c07e68c8..c5d995710bcec 100644 --- a/server/src/test/java/org/elasticsearch/action/DocWriteResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/DocWriteResponseTests.java @@ -114,13 +114,6 @@ public void testTypeWhenCompatible() throws IOException { ) { // DocWriteResponse is abstract so we have to sneak a subclass in here to test it. }; - try (XContentBuilder builder = XContentBuilder.builder(JsonXContent.jsonXContent, RestApiVersion.V_7)) { - response.toXContent(builder, ToXContent.EMPTY_PARAMS); - - try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { - assertThat(parser.map(), hasEntry(MapperService.TYPE_FIELD_NAME, MapperService.SINGLE_MAPPING_NAME)); - } - } try (XContentBuilder builder = XContentBuilder.builder(JsonXContent.jsonXContent, RestApiVersion.V_8)) { response.toXContent(builder, ToXContent.EMPTY_PARAMS); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponseTests.java index 3f0a6b40ce60a..7640c1e7af308 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponseTests.java @@ -17,8 +17,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.RestApiVersion; -import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; @@ -63,31 +61,8 @@ public void testToXContentIncludesType() throws Exception { FieldMappingMetadata fieldMappingMetadata = new FieldMappingMetadata("my field", new BytesArray("{}")); mappings.put("index", Collections.singletonMap("field", fieldMappingMetadata)); GetFieldMappingsResponse response = new GetFieldMappingsResponse(mappings); - ToXContent.Params params = new ToXContent.MapParams(Collections.singletonMap(BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER, "true")); + ToXContent.Params params = new ToXContent.MapParams(Collections.singletonMap("include_type_name", "true")); - // v7 with include_type_name attaches _doc - try (XContentBuilder builder = XContentBuilder.builder(JsonXContent.jsonXContent, RestApiVersion.V_7)) { - response.toXContent(builder, params); - - try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { - @SuppressWarnings("unchecked") - Map>> index = (Map>>) parser.map() - .get("index"); - assertThat(index.get("mappings"), hasKey(MapperService.SINGLE_MAPPING_NAME)); - assertThat(index.get("mappings").get(MapperService.SINGLE_MAPPING_NAME), hasKey("field")); - } - } - - // v7 with no include_type_name do not attach _doc - try (XContentBuilder builder = XContentBuilder.builder(JsonXContent.jsonXContent, RestApiVersion.V_7)) { - response.toXContent(builder, ToXContent.EMPTY_PARAMS); - - try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { - @SuppressWarnings("unchecked") - Map> index = (Map>) parser.map().get("index"); - assertThat(index.get("mappings"), hasKey("field")); - } - } // v8 does not have _doc, even when include_type_name is present // (although this throws unconsumed parameter exception in RestGetFieldMappingsAction) try (XContentBuilder builder = XContentBuilder.builder(JsonXContent.jsonXContent, RestApiVersion.V_8)) { diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java index a7fa81eb24a57..67bfa0e37dcf5 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java @@ -66,7 +66,7 @@ public void testConditionsParsing() throws Exception { .endObject() .endObject(); try (var parser = createParser(builder)) { - request.fromXContent(false, parser); + request.fromXContent(parser); } Map> conditions = request.getConditions().getConditions(); assertThat(conditions.size(), equalTo(10)); @@ -120,7 +120,7 @@ public void testParsingWithIndexSettings() throws Exception { .endObject() .endObject(); try (var parser = createParser(builder)) { - request.fromXContent(false, parser); + request.fromXContent(parser); } Map> conditions = request.getConditions().getConditions(); assertThat(conditions.size(), equalTo(3)); @@ -143,7 +143,7 @@ public void testTypelessMappingParsing() throws Exception { .endObject(); try (var parser = createParser(builder)) { - request.fromXContent(false, parser); + request.fromXContent(parser); } CreateIndexRequest createIndexRequest = request.getCreateIndexRequest(); String mapping = createIndexRequest.mappings(); @@ -216,7 +216,7 @@ public void testUnknownFields() throws IOException { BytesReference mutated = XContentTestUtils.insertRandomFields(xContentType, BytesReference.bytes(builder), null, random()); expectThrows(XContentParseException.class, () -> { try (var parser = createParser(xContentType.xContent(), mutated)) { - request.fromXContent(false, parser); + request.fromXContent(parser); } }); } diff --git a/server/src/test/java/org/elasticsearch/action/bulk/BulkItemResponseTests.java b/server/src/test/java/org/elasticsearch/action/bulk/BulkItemResponseTests.java index 788265d353698..bc7fe7a62a1da 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/BulkItemResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/BulkItemResponseTests.java @@ -24,7 +24,6 @@ import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.core.CheckedConsumer; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; @@ -34,10 +33,8 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xcontent.json.JsonXContent; import java.io.IOException; -import java.util.UUID; import static org.elasticsearch.ElasticsearchExceptionTests.assertDeepEquals; import static org.elasticsearch.ElasticsearchExceptionTests.randomExceptions; @@ -95,28 +92,6 @@ public static void parseInnerToXContent(XContentParser parser, DocWriteResponse. } } - public void testBulkItemResponseShouldContainTypeInV7CompatibilityMode() throws IOException { - BulkItemResponse bulkItemResponse = BulkItemResponse.success( - randomInt(), - DocWriteRequest.OpType.INDEX, - new IndexResponse( - new ShardId(randomAlphaOfLength(8), UUID.randomUUID().toString(), randomInt()), - randomAlphaOfLength(4), - randomNonNegativeLong(), - randomNonNegativeLong(), - randomNonNegativeLong(), - true - ) - ); - XContentBuilder xContentBuilder = bulkItemResponse.toXContent( - XContentBuilder.builder(JsonXContent.jsonXContent, RestApiVersion.V_7), - ToXContent.EMPTY_PARAMS - ); - - String json = BytesReference.bytes(xContentBuilder).utf8ToString(); - assertThat(json, containsString("\"_type\":\"_doc\"")); - } - public void testFailureToString() { Failure failure = new Failure("index", "id", new RuntimeException("test")); String toString = failure.toString(); diff --git a/server/src/test/java/org/elasticsearch/index/get/GetResultTests.java b/server/src/test/java/org/elasticsearch/index/get/GetResultTests.java index 38552d0a58683..e0d94632f06c1 100644 --- a/server/src/test/java/org/elasticsearch/index/get/GetResultTests.java +++ b/server/src/test/java/org/elasticsearch/index/get/GetResultTests.java @@ -16,7 +16,6 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.IndexFieldMapper; @@ -26,10 +25,8 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.RandomObjects; import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xcontent.json.JsonXContent; import java.io.IOException; import java.util.ArrayList; @@ -114,56 +111,6 @@ public void testToXContent() throws IOException { } } - public void testToCompatibleXContent() throws IOException { - { - GetResult getResult = new GetResult( - "index", - "id", - 0, - 1, - 1, - true, - new BytesArray(""" - { "field1" : "value1", "field2":"value2"}"""), - singletonMap("field1", new DocumentField("field1", singletonList("value1"))), - singletonMap("field1", new DocumentField("metafield", singletonList("metavalue"))) - ); - - try (XContentBuilder builder = XContentBuilder.builder(JsonXContent.jsonXContent, RestApiVersion.V_7)) { - getResult.toXContent(builder, ToXContent.EMPTY_PARAMS); - String output = Strings.toString(builder); - assertEquals(XContentHelper.stripWhitespace(""" - { - "_index": "index", - "_type": "_doc", - "_id": "id", - "_version": 1, - "_seq_no": 0, - "_primary_term": 1, - "metafield": "metavalue", - "found": true, - "_source": { - "field1": "value1", - "field2": "value2" - }, - "fields": { - "field1": [ "value1" ] - } - }"""), XContentHelper.stripWhitespace(output)); - } - } - { - GetResult getResult = new GetResult("index", "id", UNASSIGNED_SEQ_NO, 0, 1, false, null, null, null); - - try (XContentBuilder builder = XContentBuilder.builder(JsonXContent.jsonXContent, RestApiVersion.V_7)) { - getResult.toXContent(builder, ToXContent.EMPTY_PARAMS); - String output = Strings.toString(builder); - assertEquals(""" - {"_index":"index","_type":"_doc","_id":"id","found":false}""", output); - } - } - } - public void testToAndFromXContentEmbedded() throws Exception { XContentType xContentType = randomFrom(XContentType.values()); Tuple tuple = randomGetResult(xContentType); diff --git a/server/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java b/server/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java index 240a677f4cbfd..380b5189b3efc 100644 --- a/server/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java @@ -17,7 +17,6 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.MatchAllQueryBuilder; @@ -464,18 +463,6 @@ public void testToXContentWithPointInTime() throws IOException { } public void testParseIndicesBoost() throws IOException { - { - String restContent = """ - { "indices_boost": {"foo": 1.0, "bar": 2.0}}"""; - try (XContentParser parser = createParserWithCompatibilityFor(JsonXContent.jsonXContent, restContent, RestApiVersion.V_7)) { - SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().parseXContent(parser, true, nf -> false); - assertEquals(2, searchSourceBuilder.indexBoosts().size()); - assertEquals(new SearchSourceBuilder.IndexBoost("foo", 1.0f), searchSourceBuilder.indexBoosts().get(0)); - assertEquals(new SearchSourceBuilder.IndexBoost("bar", 2.0f), searchSourceBuilder.indexBoosts().get(1)); - assertCriticalWarnings("Object format in indices_boost is deprecated, please use array format instead"); - } - } - { String restContent = """ { @@ -554,16 +541,6 @@ public void testNegativeSizeErrors() throws IOException { ); assertThat(ex.getMessage(), containsString(Integer.toString(boundedRandomSize))); } - - restContent = "{\"size\" : -1}"; - try (XContentParser parser = createParserWithCompatibilityFor(JsonXContent.jsonXContent, restContent, RestApiVersion.V_7)) { - SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().parseXContent(parser, true, nf -> false); - assertEquals(-1, searchSourceBuilder.size()); - } - assertCriticalWarnings( - "Using search size of -1 is deprecated and will be removed in future versions. Instead, don't use the `size` " - + "parameter if you don't want to set it explicitly." - ); } public void testNegativeTerminateAfter() throws IOException { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestGetLicenseAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestGetLicenseAction.java index 5bf35dea51af6..baec748141903 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestGetLicenseAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestGetLicenseAction.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.logging.DeprecationCategory; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.util.Maps; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.protocol.xpack.license.GetLicenseRequest; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -26,7 +27,6 @@ import java.util.List; import java.util.Map; -import static org.elasticsearch.core.RestApiVersion.V_7; import static org.elasticsearch.core.RestApiVersion.V_8; import static org.elasticsearch.core.RestApiVersion.onOrAfter; import static org.elasticsearch.rest.RestRequest.Method.GET; @@ -57,18 +57,12 @@ public String getName() { * The licenses are sorted by latest issue_date */ @Override + @UpdateForV9(owner = UpdateForV9.Owner.SECURITY) // remove support for accept_enterprise param public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { final Map overrideParams = Maps.newMapWithExpectedSize(2); overrideParams.put(License.REST_VIEW_MODE, "true"); overrideParams.put(License.LICENSE_VERSION_MODE, String.valueOf(License.VERSION_CURRENT)); - if (request.getRestApiVersion() == V_7) { - // Hide enterprise licenses by default, there is an opt-in flag to show them - final boolean hideEnterprise = request.paramAsBoolean("accept_enterprise", false) == false; - final int licenseVersion = hideEnterprise ? License.VERSION_CRYPTO_ALGORITHMS : License.VERSION_CURRENT; - overrideParams.put(License.LICENSE_VERSION_MODE, String.valueOf(licenseVersion)); - overrideParams.put(License.XCONTENT_HIDE_ENTERPRISE, String.valueOf(hideEnterprise)); - } // In 7.x, there was an opt-in flag to show "enterprise" licenses. In 8.0 the flag is deprecated and can only be true // TODO Remove this from 9.0 if (request.hasParam("accept_enterprise")) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoResponse.java index 2f9b125352e9c..b20f1a9d9ce23 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoResponse.java @@ -14,8 +14,6 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.util.Maps; import org.elasticsearch.core.Nullable; -import org.elasticsearch.core.RestApiVersion; -import org.elasticsearch.license.License; import org.elasticsearch.protocol.xpack.license.LicenseStatus; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -206,24 +204,8 @@ public int hashCode() { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field("uid", uid); - - if (builder.getRestApiVersion() == RestApiVersion.V_7 && params.paramAsBoolean("accept_enterprise", false) == false) { - if (License.LicenseType.ENTERPRISE.getTypeName().equals(type)) { - builder.field("type", License.LicenseType.PLATINUM.getTypeName()); - } else { - builder.field("type", type); - } - - if (License.OperationMode.ENTERPRISE.description().equals(mode)) { - builder.field("mode", License.OperationMode.PLATINUM.description()); - } else { - builder.field("mode", mode); - } - } else { - builder.field("type", type); - builder.field("mode", mode); - } - + builder.field("type", type); + builder.field("mode", mode); builder.field("status", status.label()); if (expiryDate != BASIC_SELF_GENERATED_LICENSE_EXPIRATION_MILLIS) { builder.timestampFieldsFromUnixEpochMillis("expiry_date_in_millis", "expiry_date", expiryDate); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CloseJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CloseJobAction.java index 381fb30b65e68..ca81509920ca5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CloseJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CloseJobAction.java @@ -179,11 +179,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(Job.ID.getPreferredName(), jobId); builder.field(TIMEOUT.getPreferredName(), timeout.getStringRep()); builder.field(FORCE.getPreferredName(), force); - if (builder.getRestApiVersion() == RestApiVersion.V_7) { - builder.field(DEPRECATED_ALLOW_NO_JOBS_PARAM, allowNoMatch); - } else { - builder.field(ALLOW_NO_MATCH.getPreferredName(), allowNoMatch); - } + builder.field(ALLOW_NO_MATCH.getPreferredName(), allowNoMatch); builder.endObject(); return builder; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetOverallBucketsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetOverallBucketsAction.java index 12565ea49e058..12c8696a50626 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetOverallBucketsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetOverallBucketsAction.java @@ -243,11 +243,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (end != null) { builder.field(END.getPreferredName(), String.valueOf(end)); } - if (builder.getRestApiVersion() == RestApiVersion.V_7) { - builder.field(DEPRECATED_ALLOW_NO_JOBS_PARAM, allowNoMatch); - } else { - builder.field(ALLOW_NO_MATCH.getPreferredName(), allowNoMatch); - } + builder.field(ALLOW_NO_MATCH.getPreferredName(), allowNoMatch); builder.endObject(); return builder; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopDatafeedAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopDatafeedAction.java index 2fd00a5ea3983..cb89cfa8cd0e7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopDatafeedAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopDatafeedAction.java @@ -169,11 +169,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(DatafeedConfig.ID.getPreferredName(), datafeedId); builder.field(TIMEOUT.getPreferredName(), stopTimeout.getStringRep()); builder.field(FORCE.getPreferredName(), force); - if (builder.getRestApiVersion() == RestApiVersion.V_7) { - builder.field(DEPRECATED_ALLOW_NO_DATAFEEDS_PARAM, allowNoMatch); - } else { - builder.field(ALLOW_NO_MATCH.getPreferredName(), allowNoMatch); - } + builder.field(ALLOW_NO_MATCH.getPreferredName(), allowNoMatch); builder.endObject(); return builder; } diff --git a/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/rest/action/RestFreezeIndexAction.java b/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/rest/action/RestFreezeIndexAction.java index 369752ea5ed75..f07d17fee87f5 100644 --- a/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/rest/action/RestFreezeIndexAction.java +++ b/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/rest/action/RestFreezeIndexAction.java @@ -6,27 +6,20 @@ */ package org.elasticsearch.xpack.frozen.rest.action; -import org.elasticsearch.action.admin.indices.get.GetIndexRequest; -import org.elasticsearch.action.admin.indices.get.GetIndexResponse; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.protocol.xpack.frozen.FreezeRequest; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.RestResponse; -import org.elasticsearch.rest.action.RestBuilderListener; import org.elasticsearch.rest.action.RestToXContentListener; -import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.frozen.action.FreezeIndexAction; import java.util.List; import static org.elasticsearch.rest.RestRequest.Method.POST; -import static org.elasticsearch.rest.RestStatus.GONE; import static org.elasticsearch.rest.RestUtils.getAckTimeout; import static org.elasticsearch.rest.RestUtils.getMasterNodeTimeout; @@ -53,22 +46,6 @@ public List routes() { @Override protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) { - if (request.getRestApiVersion() == RestApiVersion.V_7 && request.path().endsWith("/_freeze")) { - // translate to a get indices request, so that we'll 404 on non-existent indices - final GetIndexRequest getIndexRequest = new GetIndexRequest(); - getIndexRequest.indices(Strings.splitStringByCommaToArray(request.param("index"))); - getIndexRequest.masterNodeTimeout(getMasterNodeTimeout(request)); - getIndexRequest.indicesOptions(IndicesOptions.fromRequest(request, getIndexRequest.indicesOptions())); - return channel -> client.admin().indices().getIndex(getIndexRequest, new RestBuilderListener<>(channel) { - @Override - public RestResponse buildResponse(GetIndexResponse getIndexResponse, XContentBuilder builder) throws Exception { - builder.close(); - // but if the index *does* exist, we still just respond with 410 -- there's no such thing as _freeze anymore - return new RestResponse(channel, GONE, new UnsupportedOperationException(FREEZE_REMOVED)); - } - }); - } - final var freezeRequest = new FreezeRequest( getMasterNodeTimeout(request), getAckTimeout(request), diff --git a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/rest/action/RestGraphAction.java b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/rest/action/RestGraphAction.java index 7dd360f8a2d82..973151e73d8dc 100644 --- a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/rest/action/RestGraphAction.java +++ b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/rest/action/RestGraphAction.java @@ -12,7 +12,6 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.TimeValue; import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest; import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest.TermBoost; @@ -78,11 +77,6 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - if (request.getRestApiVersion() == RestApiVersion.V_7 && request.hasParam("type")) { - deprecationLogger.compatibleCritical("graph_with_types", TYPES_DEPRECATION_MESSAGE); - request.param("type"); - } - GraphExploreRequest graphRequest = new GraphExploreRequest(Strings.splitStringByCommaToArray(request.param("index"))); graphRequest.indicesOptions(IndicesOptions.fromRequest(request, graphRequest.indicesOptions())); graphRequest.routing(request.param("routing")); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/apikey/RestInvalidateApiKeyAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/apikey/RestInvalidateApiKeyAction.java index 5615e2bc7915c..ec32b19f7e666 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/apikey/RestInvalidateApiKeyAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/apikey/RestInvalidateApiKeyAction.java @@ -8,9 +8,7 @@ package org.elasticsearch.xpack.security.rest.action.apikey; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; @@ -66,7 +64,7 @@ public List routes() { @Override protected RestChannelConsumer innerPrepareRequest(RestRequest request, NodeClient client) throws IOException { try (XContentParser parser = request.contentParser()) { - final InvalidateApiKeyRequest invalidateApiKeyRequest = getObjectParser(request).parse(parser, null); + final InvalidateApiKeyRequest invalidateApiKeyRequest = PARSER.parse(parser, null); return channel -> client.execute( InvalidateApiKeyAction.INSTANCE, invalidateApiKeyRequest, @@ -86,41 +84,6 @@ public String getName() { return "xpack_security_invalidate_api_key"; } - private static ConstructingObjectParser getObjectParser(RestRequest request) { - if (request.getRestApiVersion() == RestApiVersion.V_7) { - final ConstructingObjectParser objectParser = new ConstructingObjectParser<>( - "invalidate_api_key_v7", - a -> { - final String id = (String) a[5]; - @SuppressWarnings("unchecked") - final List ids = (List) a[4]; - if (id != null && ids != null) { - throw new IllegalArgumentException("Must use either [id] or [ids], not both at the same time"); - } - final String[] idsArray; - if (Strings.hasText(id)) { - idsArray = new String[] { id }; - } else if (ids != null) { - idsArray = ids.toArray(String[]::new); - } else { - idsArray = null; - } - return new InvalidateApiKeyRequest( - (String) a[0], - (String) a[1], - (String) a[2], - (a[3] == null) ? false : (Boolean) a[3], - idsArray - ); - } - ); - initObjectParser(objectParser, true); - return objectParser; - } else { - return PARSER; - } - } - private static void initObjectParser(ConstructingObjectParser objectParser, boolean restCompatMode) { objectParser.declareString(ConstructingObjectParser.optionalConstructorArg(), new ParseField("realm_name")); objectParser.declareString(ConstructingObjectParser.optionalConstructorArg(), new ParseField("username")); From a7e62f56a17165f3d861dd80475944f12a9fdbe4 Mon Sep 17 00:00:00 2001 From: Panagiotis Bailis Date: Wed, 16 Oct 2024 09:16:43 +0300 Subject: [PATCH 133/449] Removing tech-preview header and updating documentation for retrievers and RRF (#114810) --- docs/reference/search/retriever.asciidoc | 15 +-- docs/reference/search/rrf.asciidoc | 14 +-- .../retrievers-overview.asciidoc | 105 +++++++----------- .../retriever/CompoundRetrieverBuilder.java | 3 + 4 files changed, 49 insertions(+), 88 deletions(-) diff --git a/docs/reference/search/retriever.asciidoc b/docs/reference/search/retriever.asciidoc index 54836ac33762d..9306d83c79136 100644 --- a/docs/reference/search/retriever.asciidoc +++ b/docs/reference/search/retriever.asciidoc @@ -1,8 +1,6 @@ [[retriever]] === Retriever -preview::["This functionality is in technical preview and may be changed or removed in a future release. The syntax will likely change before GA. Elastic will work to fix any issues, but features in technical preview are not subject to the support SLA of official GA features."] - A retriever is a specification to describe top documents returned from a search. A retriever replaces other elements of the <> that also return top documents such as <> and @@ -75,7 +73,7 @@ Collapses the top documents by a specified key into a single top document per ke ===== Restrictions When a retriever tree contains a compound retriever (a retriever with two or more child -retrievers) *only* the query element is allowed. +retrievers) the <> parameter is not supported. [discrete] [[standard-retriever-example]] @@ -245,12 +243,6 @@ include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=rrf-rank-window-size] include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=rrf-filter] -===== Restrictions - -An RRF retriever is a compound retriever. Child retrievers may not use -elements that are restricted by having a compound retriever as part of -the retriever tree. - [discrete] [[rrf-retriever-example-hybrid]] ==== Example: Hybrid search @@ -413,10 +405,6 @@ Applies the specified <> to the chil If the child retriever already specifies any filters, then this top-level filter is applied in conjuction with the filter defined in the child retriever. -===== Restrictions - -A text similarity re-ranker retriever is a compound retriever. Child retrievers may not use elements that are restricted by having a compound retriever as part of the retriever tree. - [discrete] [[text-similarity-reranker-retriever-example-cohere]] ==== Example: Cohere Rerank @@ -555,4 +543,3 @@ at the top-level and instead are only allowed as elements of specific retrievers * <> * <> * <> -* <> diff --git a/docs/reference/search/rrf.asciidoc b/docs/reference/search/rrf.asciidoc index 2a676e5fba336..edd3b67e3de04 100644 --- a/docs/reference/search/rrf.asciidoc +++ b/docs/reference/search/rrf.asciidoc @@ -1,8 +1,6 @@ [[rrf]] === Reciprocal rank fusion -preview::["This functionality is in technical preview and may be changed or removed in a future release. The syntax will likely change before GA. Elastic will work to fix any issues, but features in technical preview are not subject to the support SLA of official GA features."] - https://plg.uwaterloo.ca/~gvcormac/cormacksigir09-rrf.pdf[Reciprocal rank fusion (RRF)] is a method for combining multiple result sets with different relevance indicators into a single result set. RRF requires no tuning, and the different relevance indicators do not have to be related to each other to achieve high-quality results. @@ -95,19 +93,21 @@ The `rrf` retriever supports: * <> * <> +* <> +* <> +* <> +* <> The `rrf` retriever does not currently support: * <> -* <> * <> * <> -* <> -* <> -* <> -* <> Using unsupported features as part of a search with an `rrf` retriever results in an exception. ++ +IMPORTANT: It is best to avoid providing a <> as part of the request, as +RRF creates one internally that is shared by all sub-retrievers to ensure consistent results. [[rrf-using-multiple-standard-retrievers]] ==== Reciprocal rank fusion using multiple standard retrievers diff --git a/docs/reference/search/search-your-data/retrievers-overview.asciidoc b/docs/reference/search/search-your-data/retrievers-overview.asciidoc index c0fe7471946f3..9df4026fc6445 100644 --- a/docs/reference/search/search-your-data/retrievers-overview.asciidoc +++ b/docs/reference/search/search-your-data/retrievers-overview.asciidoc @@ -1,9 +1,7 @@ [[retrievers-overview]] === Retrievers -preview::[] - -A retriever is an abstraction that was added to the Search API in *8.14.0*. +A retriever is an abstraction that was added to the Search API in *8.14.0* and was made generally available in *8.16.0*. This abstraction enables the configuration of multi-stage retrieval pipelines within a single `_search` call. This simplifies your search application logic, because you no longer need to configure complex searches via multiple {es} calls or implement additional client-side logic to combine results from different queries. @@ -32,8 +30,7 @@ with different relevance indicators into a single result set. An RRF retriever is a *compound retriever*, where its `filter` element is propagated to its sub retrievers. + -Sub retrievers may not use elements that are restricted by having a compound retriever as part of the retriever tree. -See the <> for detailed examples and information on how to use the RRF retriever. + * <>. Used for <>. Requires first creating a `rerank` task using the <>. @@ -72,82 +69,56 @@ When using compound retrievers, only the query element is allowed, which enforce [[retrievers-overview-example]] ==== Example -The following example demonstrates how using retrievers simplify the composability of queries for RRF ranking. +The following example demonstrates the powerful queries that we can now compose, and how retrievers simplify this process. We can use any combination of retrievers we want, propagating the +results of a nested retriever to its parent. In this scenario, we'll make use of all 4 (currently) available retrievers, i.e. `standard`, `knn`, `text_similarity_reranker` and `rrf`. +We'll first combine the results of a `semantic` query using the `standard` retriever, and that of a `knn` search on a dense vector field, using `rrf` to get the top 100 results. +Finally, we'll then rerank the top-50 results of `rrf` using the `text_similarity_reranker` [source,js] ---- GET example-index/_search { "retriever": { - "rrf": { - "retrievers": [ - { - "standard": { - "query": { - "sparse_vector": { - "field": "vector.tokens", - "inference_id": "my-elser-endpoint", - "query": "What blue shoes are on sale?" + "text_similarity_reranker": { + "retriever": { + "rrf": { + "retrievers": [ + { + "standard": { + "query": { + "semantic": { + "field": "inference_field", + "query": "state of the art vector database" + } + } + } + }, + { + "knn": { + "query_vector": [ + 0.54, + ..., + 0.245 + ], + "field": "embedding", + "k": 10, + "num_candidates": 15 } } - } - }, - { - "standard": { - "query": { - "match": { - "text": "blue shoes sale" - } - } - } - } - ] - } - } -} ----- -//NOTCONSOLE - -This example demonstrates how you can combine different retrieval strategies into a single `retriever` pipeline. - -Compare to `RRF` with `sub_searches` approach: - -.*Expand* for example -[%collapsible] -============== - -[source,js] ----- -GET example-index/_search -{ - "sub_searches":[ - { - "query":{ - "match":{ - "text":"blue shoes sale" - } - } - }, - { - "query":{ - "sparse_vector": { - "field": "vector.tokens", - "inference_id": "my-elser-endoint", - "query": "What blue shoes are on sale?" - } + ], + "rank_window_size": 100, + "rank_constant": 10 } - } - ], - "rank":{ - "rrf":{ - "rank_window_size":50, - "rank_constant":20 + }, + "rank_window_size": 50, + "field": "description", + "inference_text": "what's the best way to create complex pipelines and retrieve documents?", + "inference_id": "my-awesome-rerank-model" } } } ---- //NOTCONSOLE -============== [discrete] [[retrievers-overview-glossary]] diff --git a/server/src/main/java/org/elasticsearch/search/retriever/CompoundRetrieverBuilder.java b/server/src/main/java/org/elasticsearch/search/retriever/CompoundRetrieverBuilder.java index 22bef026523e9..e994c55e43452 100644 --- a/server/src/main/java/org/elasticsearch/search/retriever/CompoundRetrieverBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/retriever/CompoundRetrieverBuilder.java @@ -194,6 +194,9 @@ public ActionRequestValidationException validate( validationException ); } + for (RetrieverSource innerRetriever : innerRetrievers) { + validationException = innerRetriever.retriever().validate(source, validationException, allowPartialSearchResults); + } return validationException; } From 9082e02d752da38d2376a25081cefc858b4b2db2 Mon Sep 17 00:00:00 2001 From: David Turner Date: Wed, 16 Oct 2024 08:08:50 +0100 Subject: [PATCH 134/449] Retry `S3BlobContainer#getRegister` on all exceptions (#114813) S3 register reads are subject to the regular client retry policy, but in practice we see failures of these reads sometimes for errors that are transient but for which the SDK does not retry. This commit adds another layer of retries to these reads. Relates ES-9721 --- docs/changelog/114813.yaml | 5 ++ .../snapshot-restore/repository-s3.asciidoc | 5 ++ .../repositories/s3/S3BlobContainer.java | 52 +++++++++---- .../repositories/s3/S3BlobStore.java | 7 ++ .../repositories/s3/S3Repository.java | 10 +++ .../s3/S3BlobContainerRetriesTests.java | 75 ++++++++++++++++++- 6 files changed, 139 insertions(+), 15 deletions(-) create mode 100644 docs/changelog/114813.yaml diff --git a/docs/changelog/114813.yaml b/docs/changelog/114813.yaml new file mode 100644 index 0000000000000..1595b004178c4 --- /dev/null +++ b/docs/changelog/114813.yaml @@ -0,0 +1,5 @@ +pr: 114813 +summary: Retry `S3BlobContainer#getRegister` on all exceptions +area: Snapshot/Restore +type: enhancement +issues: [] diff --git a/docs/reference/snapshot-restore/repository-s3.asciidoc b/docs/reference/snapshot-restore/repository-s3.asciidoc index b48bb5c4f059a..36f311b1cdd97 100644 --- a/docs/reference/snapshot-restore/repository-s3.asciidoc +++ b/docs/reference/snapshot-restore/repository-s3.asciidoc @@ -343,6 +343,11 @@ include::repository-shared-settings.asciidoc[] will disable retries altogether. Note that if retries are enabled in the Azure client, each of these retries comprises that many client-level retries. +`get_register_retry_delay` + + (<>) Sets the time to wait before trying again if an attempt to read a + <> fails. Defaults to `5s`. + NOTE: The option of defining client settings in the repository settings as documented below is considered deprecated, and will be removed in a future version. diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java index 49df078453327..902dcb42fc0cb 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java @@ -40,6 +40,7 @@ import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.action.support.ThreadedActionListener; import org.elasticsearch.cluster.service.MasterService; +import org.elasticsearch.common.BackoffPolicy; import org.elasticsearch.common.Randomness; import org.elasticsearch.common.Strings; import org.elasticsearch.common.blobstore.BlobContainer; @@ -910,21 +911,44 @@ public void compareAndExchangeRegister( @Override public void getRegister(OperationPurpose purpose, String key, ActionListener listener) { ActionListener.completeWith(listener, () -> { - final var getObjectRequest = new GetObjectRequest(blobStore.bucket(), buildKey(key)); - S3BlobStore.configureRequestForMetrics(getObjectRequest, blobStore, Operation.GET_OBJECT, purpose); - try ( - var clientReference = blobStore.clientReference(); - var s3Object = SocketAccess.doPrivileged(() -> clientReference.client().getObject(getObjectRequest)); - var stream = s3Object.getObjectContent() - ) { - return OptionalBytesReference.of(getRegisterUsingConsistentRead(stream, keyPath, key)); - } catch (AmazonS3Exception e) { - logger.trace(() -> Strings.format("[%s]: getRegister failed", key), e); - if (e.getStatusCode() == 404) { - return OptionalBytesReference.EMPTY; - } else { - throw e; + final var backoffPolicy = purpose == OperationPurpose.REPOSITORY_ANALYSIS + ? BackoffPolicy.noBackoff() + : BackoffPolicy.constantBackoff(blobStore.getGetRegisterRetryDelay(), blobStore.getMaxRetries()); + final var retryDelayIterator = backoffPolicy.iterator(); + + Exception finalException = null; + while (true) { + final var getObjectRequest = new GetObjectRequest(blobStore.bucket(), buildKey(key)); + S3BlobStore.configureRequestForMetrics(getObjectRequest, blobStore, Operation.GET_OBJECT, purpose); + try ( + var clientReference = blobStore.clientReference(); + var s3Object = SocketAccess.doPrivileged(() -> clientReference.client().getObject(getObjectRequest)); + var stream = s3Object.getObjectContent() + ) { + return OptionalBytesReference.of(getRegisterUsingConsistentRead(stream, keyPath, key)); + } catch (Exception attemptException) { + logger.trace(() -> Strings.format("[%s]: getRegister failed", key), attemptException); + if (attemptException instanceof AmazonS3Exception amazonS3Exception && amazonS3Exception.getStatusCode() == 404) { + return OptionalBytesReference.EMPTY; + } else if (finalException == null) { + finalException = attemptException; + } else if (finalException != attemptException) { + finalException.addSuppressed(attemptException); + } } + if (retryDelayIterator.hasNext()) { + try { + // noinspection BusyWait + Thread.sleep(retryDelayIterator.next().millis()); + continue; + } catch (InterruptedException interruptedException) { + Thread.currentThread().interrupt(); + finalException.addSuppressed(interruptedException); + // fall through and throw the exception + } + } + + throw finalException; } }); } diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java index e2efc926f7e3a..5fb3254df819b 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java @@ -95,6 +95,8 @@ class S3BlobStore implements BlobStore { private final int bulkDeletionBatchSize; private final BackoffPolicy retryThrottledDeleteBackoffPolicy; + private final TimeValue getRegisterRetryDelay; + S3BlobStore( S3Service service, String bucket, @@ -121,6 +123,7 @@ class S3BlobStore implements BlobStore { this.s3RepositoriesMetrics = s3RepositoriesMetrics; this.bulkDeletionBatchSize = S3Repository.DELETION_BATCH_SIZE_SETTING.get(repositoryMetadata.settings()); this.retryThrottledDeleteBackoffPolicy = retryThrottledDeleteBackoffPolicy; + this.getRegisterRetryDelay = S3Repository.GET_REGISTER_RETRY_DELAY.get(repositoryMetadata.settings()); } RequestMetricCollector getMetricCollector(Operation operation, OperationPurpose purpose) { @@ -468,6 +471,10 @@ public StorageClass getStorageClass() { return storageClass; } + public TimeValue getGetRegisterRetryDelay() { + return getRegisterRetryDelay; + } + public static StorageClass initStorageClass(String storageClass) { if ((storageClass == null) || storageClass.equals("")) { return StorageClass.Standard; diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java index 0750f6ab59d57..fde15d5d6e6bc 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java @@ -223,6 +223,16 @@ class S3Repository extends MeteredBlobStoreRepository { 0 ); + /** + * Time to wait before trying again if getRegister fails. + */ + static final Setting GET_REGISTER_RETRY_DELAY = Setting.timeSetting( + "get_register_retry_delay", + new TimeValue(5, TimeUnit.SECONDS), + new TimeValue(0, TimeUnit.MILLISECONDS), + Setting.Property.Dynamic + ); + private final S3Service service; private final String bucket; diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java index 76d980c222a96..2eb2ed26153f9 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.OperationPurpose; +import org.elasticsearch.common.blobstore.OptionalBytesReference; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.lucene.store.ByteArrayIndexInput; @@ -191,7 +192,10 @@ protected BlobContainer createBlobContainer( final RepositoryMetadata repositoryMetadata = new RepositoryMetadata( "repository", S3Repository.TYPE, - Settings.builder().put(S3Repository.CLIENT_NAME.getKey(), clientName).build() + Settings.builder() + .put(S3Repository.CLIENT_NAME.getKey(), clientName) + .put(S3Repository.GET_REGISTER_RETRY_DELAY.getKey(), TimeValue.ZERO) + .build() ); final S3BlobStore s3BlobStore = new S3BlobStore( @@ -945,6 +949,75 @@ private Set operationPurposesThatRetryOnDelete() { return Set.of(OperationPurpose.SNAPSHOT_DATA, OperationPurpose.SNAPSHOT_METADATA); } + public void testGetRegisterRetries() { + final var maxRetries = between(0, 3); + final BlobContainer blobContainer = createBlobContainer(maxRetries, null, null, null); + + interface FailingHandlerFactory { + void addHandler(String blobName, Integer... responseCodes); + } + + final var requestCounter = new AtomicInteger(); + final FailingHandlerFactory countingFailingHandlerFactory = (blobName, responseCodes) -> httpServer.createContext( + downloadStorageEndpoint(blobContainer, blobName), + exchange -> { + requestCounter.incrementAndGet(); + try (exchange) { + exchange.sendResponseHeaders(randomFrom(responseCodes), -1); + } + } + ); + + countingFailingHandlerFactory.addHandler("test_register_no_internal_retries", HttpStatus.SC_UNPROCESSABLE_ENTITY); + countingFailingHandlerFactory.addHandler( + "test_register_internal_retries", + HttpStatus.SC_INTERNAL_SERVER_ERROR, + HttpStatus.SC_SERVICE_UNAVAILABLE + ); + countingFailingHandlerFactory.addHandler("test_register_not_found", HttpStatus.SC_NOT_FOUND); + + { + final var exceptionWithInternalRetries = safeAwaitFailure( + OptionalBytesReference.class, + l -> blobContainer.getRegister(randomRetryingPurpose(), "test_register_internal_retries", l) + ); + assertThat(exceptionWithInternalRetries, instanceOf(AmazonS3Exception.class)); + assertEquals((maxRetries + 1) * (maxRetries + 1), requestCounter.get()); + assertEquals(maxRetries, exceptionWithInternalRetries.getSuppressed().length); + } + + { + requestCounter.set(0); + final var exceptionWithoutInternalRetries = safeAwaitFailure( + OptionalBytesReference.class, + l -> blobContainer.getRegister(randomRetryingPurpose(), "test_register_no_internal_retries", l) + ); + assertThat(exceptionWithoutInternalRetries, instanceOf(AmazonS3Exception.class)); + assertEquals(maxRetries + 1, requestCounter.get()); + assertEquals(maxRetries, exceptionWithoutInternalRetries.getSuppressed().length); + } + + { + requestCounter.set(0); + final var repoAnalysisException = safeAwaitFailure( + OptionalBytesReference.class, + l -> blobContainer.getRegister(OperationPurpose.REPOSITORY_ANALYSIS, "test_register_no_internal_retries", l) + ); + assertThat(repoAnalysisException, instanceOf(AmazonS3Exception.class)); + assertEquals(1, requestCounter.get()); + assertEquals(0, repoAnalysisException.getSuppressed().length); + } + + { + requestCounter.set(0); + final OptionalBytesReference expectEmpty = safeAwait( + l -> blobContainer.getRegister(randomPurpose(), "test_register_not_found", l) + ); + assertEquals(OptionalBytesReference.EMPTY, expectEmpty); + assertEquals(1, requestCounter.get()); + } + } + @Override protected Matcher getMaxRetriesMatcher(int maxRetries) { // some attempts make meaningful progress and do not count towards the max retry limit From 80c163e70e2aa047abced0a11db2e23b0bc16df2 Mon Sep 17 00:00:00 2001 From: Felix Barnsteiner Date: Wed, 16 Oct 2024 09:10:09 +0200 Subject: [PATCH 135/449] OTel mappings: avoid metrics to be rejected when attributes are malformed (#114856) --- docs/changelog/114856.yaml | 5 +++ .../metrics-otel@mappings.yaml | 7 ++-- .../metrics-otel@template.yaml | 11 ------ .../src/main/resources/resources.yaml | 2 +- .../rest-api-spec/test/20_metrics_tests.yml | 34 +++++++++++++++++++ 5 files changed, 42 insertions(+), 17 deletions(-) create mode 100644 docs/changelog/114856.yaml diff --git a/docs/changelog/114856.yaml b/docs/changelog/114856.yaml new file mode 100644 index 0000000000000..da7fae3ee18ea --- /dev/null +++ b/docs/changelog/114856.yaml @@ -0,0 +1,5 @@ +pr: 114856 +summary: "OTel mappings: avoid metrics to be rejected when attributes are malformed" +area: Data streams +type: bug +issues: [] diff --git a/x-pack/plugin/otel-data/src/main/resources/component-templates/metrics-otel@mappings.yaml b/x-pack/plugin/otel-data/src/main/resources/component-templates/metrics-otel@mappings.yaml index 37dd93b7f16d9..d2df9b5629704 100644 --- a/x-pack/plugin/otel-data/src/main/resources/component-templates/metrics-otel@mappings.yaml +++ b/x-pack/plugin/otel-data/src/main/resources/component-templates/metrics-otel@mappings.yaml @@ -3,6 +3,8 @@ _meta: description: Default mappings for the OpenTelemetry metrics index template installed by x-pack managed: true template: + settings: + index.mapping.ignore_malformed: true mappings: properties: start_timestamp: @@ -19,27 +21,22 @@ template: - histogram: mapping: type: histogram - ignore_malformed: true - counter_long: mapping: type: long time_series_metric: counter - ignore_malformed: true - gauge_long: mapping: type: long time_series_metric: gauge - ignore_malformed: true - counter_double: mapping: type: double time_series_metric: counter - ignore_malformed: true - gauge_double: mapping: type: double time_series_metric: gauge - ignore_malformed: true - summary: mapping: type: aggregate_metric_double diff --git a/x-pack/plugin/otel-data/src/main/resources/index-templates/metrics-otel@template.yaml b/x-pack/plugin/otel-data/src/main/resources/index-templates/metrics-otel@template.yaml index 3b4c3127bb71c..f8489605ad1bf 100644 --- a/x-pack/plugin/otel-data/src/main/resources/index-templates/metrics-otel@template.yaml +++ b/x-pack/plugin/otel-data/src/main/resources/index-templates/metrics-otel@template.yaml @@ -27,14 +27,3 @@ template: data_stream.type: type: constant_keyword value: metrics - dynamic_templates: - - ecs_ip: - mapping: - type: ip - path_match: [ "ip", "*.ip", "*_ip" ] - match_mapping_type: string - - all_strings_to_keywords: - mapping: - ignore_above: 1024 - type: keyword - match_mapping_type: string diff --git a/x-pack/plugin/otel-data/src/main/resources/resources.yaml b/x-pack/plugin/otel-data/src/main/resources/resources.yaml index 52873287696ab..b2d30c7f85cc4 100644 --- a/x-pack/plugin/otel-data/src/main/resources/resources.yaml +++ b/x-pack/plugin/otel-data/src/main/resources/resources.yaml @@ -1,7 +1,7 @@ # "version" holds the version of the templates and ingest pipelines installed # by xpack-plugin otel-data. This must be increased whenever an existing template is # changed, in order for it to be updated on Elasticsearch upgrade. -version: 5 +version: 6 component-templates: - otel@mappings diff --git a/x-pack/plugin/otel-data/src/yamlRestTest/resources/rest-api-spec/test/20_metrics_tests.yml b/x-pack/plugin/otel-data/src/yamlRestTest/resources/rest-api-spec/test/20_metrics_tests.yml index 81aded9cef3be..1aafe3765813b 100644 --- a/x-pack/plugin/otel-data/src/yamlRestTest/resources/rest-api-spec/test/20_metrics_tests.yml +++ b/x-pack/plugin/otel-data/src/yamlRestTest/resources/rest-api-spec/test/20_metrics_tests.yml @@ -245,3 +245,37 @@ IP dimensions: - match: { .$idx0name.mappings.properties.metrics.properties.summary.type: 'aggregate_metric_double' } - match: { .$idx0name.mappings.properties.metrics.properties.summary_minmax.type: 'aggregate_metric_double' } - match: { .$idx0name.mappings.properties.metrics.properties.histogram.type: 'histogram' } +--- +Empty IP field: + - do: + bulk: + index: metrics-generic.otel-default + refresh: true + body: + - create: {"dynamic_templates":{"metrics.foo.bar":"counter_long"}} + - "@timestamp": 2024-07-18T14:48:33.467654000Z + resource: + attributes: + host.name: localhost + host.ip: "" + metrics: + foo.bar: 42 + - is_false: errors + + - do: + indices.get_data_stream: + name: metrics-generic.otel-default + - set: { data_streams.0.indices.0.index_name: idx0name } + + - do: + indices.get_mapping: + index: $idx0name + expand_wildcards: hidden + - match: { .$idx0name.mappings.properties.resource.properties.attributes.properties.host\.ip.type: 'ip' } + - do: + search: + index: metrics-generic.otel-default + body: + fields: ["*"] + - length: { hits.hits: 1 } + - match: { hits.hits.0._ignored: ["resource.attributes.host.ip"] } From 64e8659845a7d873c9580a997e422e46718e1346 Mon Sep 17 00:00:00 2001 From: Luke Whiting Date: Wed, 16 Oct 2024 08:58:13 +0100 Subject: [PATCH 136/449] #104411 Add warning headers for ingest pipelines containing special characters (#114837) * Add logs and headers For pipeline creation when name is invalid * Fix YAML tests and add YAML test for warnings * Update docs/changelog/114837.yaml * Changelog entry * Changelog entry * Update docs/changelog/114837.yaml * Changelog entry --- docs/changelog/114837.yaml | 5 +++ .../10_pipeline_with_mustache_templates.yml | 4 +-- .../test/ingest/20_combine_processors.yml | 12 +++---- ...ation_warnings_on_invalid_names_ingest.yml | 28 +++++++++++++++ .../org/elasticsearch/common/Strings.java | 1 + .../elasticsearch/ingest/IngestService.java | 21 +++++++++++ .../ingest/IngestServiceTests.java | 36 ++++++++++++++++--- 7 files changed, 95 insertions(+), 12 deletions(-) create mode 100644 docs/changelog/114837.yaml create mode 100644 qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/90_deprecation_warnings_on_invalid_names_ingest.yml diff --git a/docs/changelog/114837.yaml b/docs/changelog/114837.yaml new file mode 100644 index 0000000000000..313d88f92282c --- /dev/null +++ b/docs/changelog/114837.yaml @@ -0,0 +1,5 @@ +pr: 114837 +summary: Add warning headers for ingest pipelines containing special characters +area: Ingest Node +type: bug +issues: [ 104411 ] diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/10_pipeline_with_mustache_templates.yml b/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/10_pipeline_with_mustache_templates.yml index a8f7e1e5877c8..cc767dfa56597 100644 --- a/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/10_pipeline_with_mustache_templates.yml +++ b/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/10_pipeline_with_mustache_templates.yml @@ -214,7 +214,7 @@ "Test rolling up json object arrays": - do: ingest.put_pipeline: - id: "_id" + id: "pipeline-id" body: > { "processors": [ @@ -237,7 +237,7 @@ index: index: test id: "1" - pipeline: "_id" + pipeline: "pipeline-id" body: { values_flat : [], values: [ diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/20_combine_processors.yml b/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/20_combine_processors.yml index 9a7444c4ffc6c..ef790843b7bfb 100644 --- a/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/20_combine_processors.yml +++ b/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/20_combine_processors.yml @@ -2,7 +2,7 @@ "Test with date processor": - do: ingest.put_pipeline: - id: "_id" + id: "pipeline-id" body: > { "processors": [ @@ -44,7 +44,7 @@ index: index: test id: "1" - pipeline: "_id" + pipeline: "pipeline-id" body: { log: "89.160.20.128 - - [08/Sep/2014:02:54:42 +0000] \"GET /presentations/logstash-scale11x/images/ahhh___rage_face_by_samusmmx-d5g5zap.png HTTP/1.1\" 200 175208 \"http://mobile.rivals.com/board_posts.asp?SID=880&mid=198829575&fid=2208&tid=198829575&Team=&TeamId=&SiteId=\" \"Mozilla/5.0 (Linux; Android 4.2.2; VS980 4G Build/JDQ39B) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.135 Mobile Safari/537.36\"" } @@ -71,7 +71,7 @@ "Test with date processor and ECS-v1": - do: ingest.put_pipeline: - id: "_id" + id: "pipeline-id" body: > { "processors": [ @@ -102,7 +102,7 @@ index: index: test id: "1" - pipeline: "_id" + pipeline: "pipeline-id" body: { log: "89.160.20.128 - - [08/Sep/2014:02:54:42 +0000] \"GET /presentations/logstash-scale11x/images/ahhh___rage_face_by_samusmmx-d5g5zap.png HTTP/1.1\" 200 175208 \"http://mobile.rivals.com/board_posts.asp?SID=880&mid=198829575&fid=2208&tid=198829575&Team=&TeamId=&SiteId=\" \"Mozilla/5.0 (Linux; Android 4.2.2; VS980 4G Build/JDQ39B) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.135 Mobile Safari/537.36\"" } @@ -128,7 +128,7 @@ "Test mutate": - do: ingest.put_pipeline: - id: "_id" + id: "pipeline-id" body: > { "processors": [ @@ -188,7 +188,7 @@ index: index: test id: "1" - pipeline: "_id" + pipeline: "pipeline-id" body: { "age" : 33, "eyeColor" : "brown", diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/90_deprecation_warnings_on_invalid_names_ingest.yml b/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/90_deprecation_warnings_on_invalid_names_ingest.yml new file mode 100644 index 0000000000000..64f5ccc4609ac --- /dev/null +++ b/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/90_deprecation_warnings_on_invalid_names_ingest.yml @@ -0,0 +1,28 @@ +--- +"Test invalid name warnings": + - requires: + cluster_features: [ "ingest.pipeline_name_special_chars_warning" ] + test_runner_features: [ "warnings" ] + reason: verifying deprecation warnings from 9.0 onwards for invalid pipeline names + + - do: + cluster.health: + wait_for_status: green + + - do: + ingest.put_pipeline: + id: "Invalid*-pipeline:id" + body: > + { + "description": "_description", + "processors": [ + { + "set" : { + "field" : "field1", + "value": "_value" + } + }] + } + warnings: + - "Invalid pipeline id: Invalid*-pipeline:id" + - match: { acknowledged: true } diff --git a/server/src/main/java/org/elasticsearch/common/Strings.java b/server/src/main/java/org/elasticsearch/common/Strings.java index 4314d2e16799a..82504b5840792 100644 --- a/server/src/main/java/org/elasticsearch/common/Strings.java +++ b/server/src/main/java/org/elasticsearch/common/Strings.java @@ -285,6 +285,7 @@ private static String changeFirstCharacterCase(String str, boolean capitalize) { static final Set INVALID_CHARS = Set.of('\\', '/', '*', '?', '"', '<', '>', '|', ' ', ','); public static final String INVALID_FILENAME_CHARS = INVALID_CHARS.stream() + .sorted() .map(c -> "'" + c + "'") .collect(Collectors.joining(",", "[", "]")); diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestService.java b/server/src/main/java/org/elasticsearch/ingest/IngestService.java index 0f63d2a8dcc1b..99ff44a3cd135 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestService.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestService.java @@ -39,6 +39,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexTemplateMetadata; import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.cluster.metadata.MetadataCreateIndexService; import org.elasticsearch.cluster.metadata.MetadataIndexTemplateService; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; @@ -46,6 +47,8 @@ import org.elasticsearch.common.Priority; import org.elasticsearch.common.TriConsumer; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.logging.DeprecationCategory; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; @@ -55,7 +58,9 @@ import org.elasticsearch.core.Releasable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.env.Environment; +import org.elasticsearch.features.NodeFeature; import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.grok.MatcherWatchdog; import org.elasticsearch.index.IndexSettings; @@ -97,6 +102,7 @@ import java.util.stream.Collectors; import static org.elasticsearch.core.Strings.format; +import static org.elasticsearch.core.UpdateForV10.Owner.DATA_MANAGEMENT; /** * Holder class for several ingest related services. @@ -107,7 +113,10 @@ public class IngestService implements ClusterStateApplier, ReportingService taskQueue; private final ClusterService clusterService; @@ -652,12 +661,24 @@ public IngestMetadata execute(IngestMetadata currentIngestMetadata, Collection ingestInfos, String pipelineId, Map pipelineConfig) throws Exception { if (ingestInfos.isEmpty()) { throw new IllegalStateException("Ingest info is empty"); } + try { + MetadataCreateIndexService.validateIndexOrAliasName( + pipelineId, + (pipelineName, error) -> new IllegalArgumentException( + "Pipeline name [" + pipelineName + "] will be disallowed in a future version for the following reason: " + error + ) + ); + } catch (IllegalArgumentException e) { + deprecationLogger.critical(DeprecationCategory.API, "pipeline_name_special_chars", e.getMessage()); + } + Pipeline pipeline = Pipeline.create(pipelineId, pipelineConfig, processorFactories, scriptService); List exceptions = new ArrayList<>(); for (Processor processor : pipeline.flattenAllProcessors()) { diff --git a/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java b/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java index 3adaf398624de..d83fdbd5dd46b 100644 --- a/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java @@ -24,6 +24,7 @@ import org.elasticsearch.action.ingest.DeletePipelineRequest; import org.elasticsearch.action.ingest.PutPipelineRequest; import org.elasticsearch.action.support.ActionTestUtils; +import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.client.internal.Client; @@ -48,6 +49,7 @@ import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Strings; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; @@ -424,7 +426,7 @@ public void testDelete() { public void testValidateNoIngestInfo() throws Exception { IngestService ingestService = createWithProcessors(); - PutPipelineRequest putRequest = putJsonPipelineRequest("_id", """ + PutPipelineRequest putRequest = putJsonPipelineRequest("pipeline-id", """ {"processors": [{"set" : {"field": "_field", "value": "_value"}}]}"""); var pipelineConfig = XContentHelper.convertToMap(putRequest.getSource(), false, putRequest.getXContentType()).v2(); @@ -965,7 +967,7 @@ public void testGetPipelines() { public void testValidateProcessorTypeOnAllNodes() throws Exception { IngestService ingestService = createWithProcessors(); - PutPipelineRequest putRequest = putJsonPipelineRequest("_id", """ + PutPipelineRequest putRequest = putJsonPipelineRequest("pipeline-id", """ { "processors": [ { @@ -1009,7 +1011,7 @@ public void testValidateConfigurationExceptions() { // ordinary validation issues happen at processor construction time throw newConfigurationException("fail_validation", tag, "no_property_name", "validation failure reason"); })); - PutPipelineRequest putRequest = putJsonPipelineRequest("_id", """ + PutPipelineRequest putRequest = putJsonPipelineRequest("pipeline-id", """ { "processors": [ { @@ -1043,7 +1045,7 @@ public void extraValidation() throws Exception { } }; })); - PutPipelineRequest putRequest = putJsonPipelineRequest("_id", """ + PutPipelineRequest putRequest = putJsonPipelineRequest("pipeline-id", """ { "processors": [ { @@ -1067,6 +1069,32 @@ public void extraValidation() throws Exception { assertEquals("fail_extra_validation", e.getMetadata("es.processor_type").get(0)); } + public void testValidatePipelineName() throws Exception { + IngestService ingestService = createWithProcessors(); + for (Character badChar : List.of('\\', '/', '*', '?', '"', '<', '>', '|', ' ', ',')) { + PutPipelineRequest putRequest = new PutPipelineRequest( + TimeValue.timeValueSeconds(10), + AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, + "_id", + new BytesArray(""" + {"description":"test processor","processors":[{"set":{"field":"_field","value":"_value"}}]}"""), + XContentType.JSON + ); + var pipelineConfig = XContentHelper.convertToMap(putRequest.getSource(), false, putRequest.getXContentType()).v2(); + DiscoveryNode node1 = DiscoveryNodeUtils.create("_node_id1", buildNewFakeTransportAddress(), Map.of(), Set.of()); + Map ingestInfos = new HashMap<>(); + ingestInfos.put(node1, new IngestInfo(List.of(new ProcessorInfo("set")))); + final String name = randomAlphaOfLength(5) + badChar + randomAlphaOfLength(5); + ingestService.validatePipeline(ingestInfos, name, pipelineConfig); + assertCriticalWarnings( + "Pipeline name [" + + name + + "] will be disallowed in a future version for the following reason: must not contain the following characters" + + " [' ','\"','*',',','/','<','>','?','\\','|']" + ); + } + } + public void testExecuteIndexPipelineExistsButFailedParsing() { IngestService ingestService = createWithProcessors( Map.of("mock", (factories, tag, description, config) -> new AbstractProcessor("mock", "description") { From 5271b20b64bbf17284d344f4bf1adea42ea78f1e Mon Sep 17 00:00:00 2001 From: Mary Gouseti Date: Wed, 16 Oct 2024 11:16:50 +0300 Subject: [PATCH 137/449] [Failure store - selector syntax] Replace failureOptions with selector options internally. (#114812) **Introduction** > In order to make adoption of failure stores simpler for all users, we are introducing a new syntactical feature to index expression resolution: The selector. > > Selectors, denoted with a :: followed by a recognized suffix will allow users to specify which component of an index abstraction they would like to operate on within an API call. In this case, an index abstraction is a concrete index, data stream, or alias; Any abstraction that can be resolved to a set of indices/shards. We define a component of an index abstraction to be some searchable unit of the index abstraction. > > To start, we will support two components: data and failures. Concrete indices are their own data components, while the data component for index aliases are all of the indices contained therein. For data streams, the data component corresponds to their backing indices. Data stream aliases mirror this, treating all backing indices of the data streams they correspond to as their data component. > > The failure component is only supported by data streams and data stream aliases. The failure component of these abstractions refer to the data streams' failure stores. Indices and index aliases do not have a failure component. For more details and examples see https://github.com/elastic/elasticsearch/pull/113144. All this work has been cherry picked from there. **Purpose of this PR** This PR is replacing the `FailureStoreOptions` with the `SelectorOptions`, there shouldn't be any perceivable change to the user since we kept the query parameter "failure_store" for now. It will be removed in the next PR which will introduce the parsing of the expressions. _The current PR is just a refactoring and does not and should not change any existing behaviour._ --- .../datastreams/DataStreamsSnapshotsIT.java | 4 +- .../IngestFailureStoreMetricsIT.java | 4 +- .../lifecycle/DataStreamLifecycleService.java | 4 +- .../rest/RestGetDataStreamsAction.java | 2 +- .../DataStreamLifecycleServiceTests.java | 20 +- .../org/elasticsearch/TransportVersions.java | 1 + .../admin/indices/get/GetIndexRequest.java | 4 +- .../indices/rollover/RolloverRequest.java | 6 +- .../rollover/TransportRolloverAction.java | 4 +- .../action/bulk/BulkOperation.java | 2 +- .../action/bulk/TransportBulkAction.java | 2 +- .../datastreams/DataStreamsStatsAction.java | 4 +- .../support/IndexComponentSelector.java | 49 +++ .../action/support/IndicesOptions.java | 311 +++++++++++------- .../indices/RestRolloverIndexAction.java | 2 +- .../indices/get/GetIndexRequestTests.java | 5 +- .../MetadataRolloverServiceTests.java | 8 +- .../rollover/RolloverRequestTests.java | 17 +- .../action/support/IndicesOptionsTests.java | 27 +- .../IndexNameExpressionResolverTests.java | 19 +- .../xpack/core/ilm/RolloverStep.java | 2 +- .../core/ilm/WaitForRolloverReadyStep.java | 2 +- .../ilm/WaitForRolloverReadyStepTests.java | 4 +- 23 files changed, 304 insertions(+), 199 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/action/support/IndexComponentSelector.java diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamsSnapshotsIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamsSnapshotsIT.java index 638e4d813a79a..212b869c6d933 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamsSnapshotsIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamsSnapshotsIT.java @@ -138,9 +138,7 @@ public void setup() throws Exception { // Initialize the failure store. RolloverRequest rolloverRequest = new RolloverRequest("with-fs", null); rolloverRequest.setIndicesOptions( - IndicesOptions.builder(rolloverRequest.indicesOptions()) - .failureStoreOptions(b -> b.includeRegularIndices(false).includeFailureIndices(true)) - .build() + IndicesOptions.builder(rolloverRequest.indicesOptions()).selectorOptions(IndicesOptions.SelectorOptions.ONLY_FAILURES).build() ); response = client.execute(RolloverAction.INSTANCE, rolloverRequest).get(); assertTrue(response.isAcknowledged()); diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/IngestFailureStoreMetricsIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/IngestFailureStoreMetricsIT.java index b5d06dc33e035..679ad5b000c8f 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/IngestFailureStoreMetricsIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/IngestFailureStoreMetricsIT.java @@ -195,9 +195,7 @@ public void testRejectionFromFailureStore() throws IOException { // Initialize failure store. var rolloverRequest = new RolloverRequest(dataStream, null); rolloverRequest.setIndicesOptions( - IndicesOptions.builder(rolloverRequest.indicesOptions()) - .failureStoreOptions(opts -> opts.includeFailureIndices(true).includeRegularIndices(false)) - .build() + IndicesOptions.builder(rolloverRequest.indicesOptions()).selectorOptions(IndicesOptions.SelectorOptions.ONLY_FAILURES).build() ); var rolloverResponse = client().execute(RolloverAction.INSTANCE, rolloverRequest).actionGet(); var failureStoreIndex = rolloverResponse.getNewIndex(); diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleService.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleService.java index 878583de4861f..7bbf7137d290e 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleService.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleService.java @@ -946,7 +946,7 @@ private Set maybeExecuteForceMerge(ClusterState state, List indice UpdateSettingsRequest updateMergePolicySettingsRequest = new UpdateSettingsRequest(); updateMergePolicySettingsRequest.indicesOptions( IndicesOptions.builder(updateMergePolicySettingsRequest.indicesOptions()) - .failureStoreOptions(new IndicesOptions.FailureStoreOptions(true, true)) + .selectorOptions(IndicesOptions.SelectorOptions.DATA_AND_FAILURE) .build() ); updateMergePolicySettingsRequest.indices(indexName); @@ -1409,7 +1409,7 @@ static RolloverRequest getDefaultRolloverRequest( if (rolloverFailureStore) { rolloverRequest.setIndicesOptions( IndicesOptions.builder(rolloverRequest.indicesOptions()) - .failureStoreOptions(opts -> opts.includeFailureIndices(true).includeRegularIndices(false)) + .selectorOptions(IndicesOptions.SelectorOptions.ONLY_FAILURES) .build() ); } diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/rest/RestGetDataStreamsAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/rest/RestGetDataStreamsAction.java index 3456f4b679474..b61e38297397d 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/rest/RestGetDataStreamsAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/rest/RestGetDataStreamsAction.java @@ -43,7 +43,7 @@ public class RestGetDataStreamsAction extends BaseRestHandler { IndicesOptions.GatekeeperOptions.IGNORE_THROTTLED, "verbose" ), - DataStream.isFailureStoreFeatureFlagEnabled() ? Set.of(IndicesOptions.FailureStoreOptions.FAILURE_STORE) : Set.of() + DataStream.isFailureStoreFeatureFlagEnabled() ? Set.of(IndicesOptions.FAILURE_STORE_QUERY_PARAM) : Set.of() ) ); diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceTests.java index 0d5ce54c44b56..d6bf80798764d 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceTests.java @@ -225,17 +225,11 @@ public void testOperationsExecutedOnce() { assertThat(clientSeenRequests.get(0), instanceOf(RolloverRequest.class)); RolloverRequest rolloverBackingIndexRequest = (RolloverRequest) clientSeenRequests.get(0); assertThat(rolloverBackingIndexRequest.getRolloverTarget(), is(dataStreamName)); - assertThat( - rolloverBackingIndexRequest.indicesOptions().failureStoreOptions(), - equalTo(new IndicesOptions.FailureStoreOptions(true, false)) - ); + assertThat(rolloverBackingIndexRequest.indicesOptions().selectorOptions(), equalTo(IndicesOptions.SelectorOptions.ONLY_DATA)); assertThat(clientSeenRequests.get(1), instanceOf(RolloverRequest.class)); RolloverRequest rolloverFailureIndexRequest = (RolloverRequest) clientSeenRequests.get(1); assertThat(rolloverFailureIndexRequest.getRolloverTarget(), is(dataStreamName)); - assertThat( - rolloverFailureIndexRequest.indicesOptions().failureStoreOptions(), - equalTo(new IndicesOptions.FailureStoreOptions(false, true)) - ); + assertThat(rolloverFailureIndexRequest.indicesOptions().selectorOptions(), equalTo(IndicesOptions.SelectorOptions.ONLY_FAILURES)); List deleteRequests = clientSeenRequests.subList(2, 5) .stream() .map(transportRequest -> (DeleteIndexRequest) transportRequest) @@ -1552,17 +1546,11 @@ public void testFailureStoreIsManagedEvenWhenDisabled() { assertThat(clientSeenRequests.get(0), instanceOf(RolloverRequest.class)); RolloverRequest rolloverBackingIndexRequest = (RolloverRequest) clientSeenRequests.get(0); assertThat(rolloverBackingIndexRequest.getRolloverTarget(), is(dataStreamName)); - assertThat( - rolloverBackingIndexRequest.indicesOptions().failureStoreOptions(), - equalTo(new IndicesOptions.FailureStoreOptions(true, false)) - ); + assertThat(rolloverBackingIndexRequest.indicesOptions().selectorOptions(), equalTo(IndicesOptions.SelectorOptions.ONLY_DATA)); assertThat(clientSeenRequests.get(1), instanceOf(RolloverRequest.class)); RolloverRequest rolloverFailureIndexRequest = (RolloverRequest) clientSeenRequests.get(1); assertThat(rolloverFailureIndexRequest.getRolloverTarget(), is(dataStreamName)); - assertThat( - rolloverFailureIndexRequest.indicesOptions().failureStoreOptions(), - equalTo(new IndicesOptions.FailureStoreOptions(false, true)) - ); + assertThat(rolloverFailureIndexRequest.indicesOptions().selectorOptions(), equalTo(IndicesOptions.SelectorOptions.ONLY_FAILURES)); assertThat( ((DeleteIndexRequest) clientSeenRequests.get(2)).indices()[0], is(dataStream.getFailureIndices().getIndices().get(0).getName()) diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 4038d5a224850..f89c5a65693f2 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -245,6 +245,7 @@ static TransportVersion def(int id) { public static final TransportVersion QUERY_RULE_TEST_API = def(8_769_00_0); public static final TransportVersion ESQL_PER_AGGREGATE_FILTER = def(8_770_00_0); public static final TransportVersion ML_INFERENCE_ATTACH_TO_EXISTSING_DEPLOYMENT = def(8_771_00_0); + public static final TransportVersion CONVERT_FAILURE_STORE_OPTIONS_TO_SELECTOR_OPTIONS_INTERNALLY = def(8_772_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexRequest.java index 7ff7066a15fc2..4c5ee08beb192 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexRequest.java @@ -98,9 +98,7 @@ public GetIndexRequest() { super( DataStream.isFailureStoreFeatureFlagEnabled() ? IndicesOptions.builder(IndicesOptions.strictExpandOpen()) - .failureStoreOptions( - IndicesOptions.FailureStoreOptions.builder().includeRegularIndices(true).includeFailureIndices(true) - ) + .selectorOptions(IndicesOptions.SelectorOptions.DATA_AND_FAILURE) .build() : IndicesOptions.strictExpandOpen() ); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequest.java index 1ef9194b51203..fefc41317591b 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequest.java @@ -138,8 +138,8 @@ public ActionRequestValidationException validate() { ); } - var failureStoreOptions = indicesOptions.failureStoreOptions(); - if (failureStoreOptions.includeRegularIndices() && failureStoreOptions.includeFailureIndices()) { + var selectors = indicesOptions.selectorOptions().defaultSelectors(); + if (selectors.size() > 1) { validationException = addValidationError( "rollover cannot be applied to both regular and failure indices at the same time", validationException @@ -179,7 +179,7 @@ public IndicesOptions indicesOptions() { * @return true of the rollover request targets the failure store, false otherwise. */ public boolean targetsFailureStore() { - return DataStream.isFailureStoreFeatureFlagEnabled() && indicesOptions.failureStoreOptions().includeFailureIndices(); + return DataStream.isFailureStoreFeatureFlagEnabled() && indicesOptions.includeFailureIndices(); } public void setIndicesOptions(IndicesOptions indicesOptions) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java index d65a66dcc47fb..c5c874f9bcddf 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java @@ -150,7 +150,7 @@ protected ClusterBlockException checkBlock(RolloverRequest request, ClusterState .matchClosed(request.indicesOptions().expandWildcardsClosed()) .build(), IndicesOptions.GatekeeperOptions.DEFAULT, - request.indicesOptions().failureStoreOptions() + request.indicesOptions().selectorOptions() ); return state.blocks() @@ -247,7 +247,7 @@ protected void masterOperation( IndicesOptions.ConcreteTargetOptions.ALLOW_UNAVAILABLE_TARGETS, IndicesOptions.WildcardOptions.builder().matchClosed(true).allowEmptyExpressions(false).build(), IndicesOptions.GatekeeperOptions.DEFAULT, - rolloverRequest.indicesOptions().failureStoreOptions() + rolloverRequest.indicesOptions().selectorOptions() ); IndicesStatsRequest statsRequest = new IndicesStatsRequest().indices(rolloverRequest.getRolloverTarget()) .clear() diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java index f04d07fb690c4..007f274d7f493 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java @@ -212,7 +212,7 @@ private void rollOverFailureStores(Runnable runnable) { RolloverRequest rolloverRequest = new RolloverRequest(dataStream, null); rolloverRequest.setIndicesOptions( IndicesOptions.builder(rolloverRequest.indicesOptions()) - .failureStoreOptions(new IndicesOptions.FailureStoreOptions(false, true)) + .selectorOptions(IndicesOptions.SelectorOptions.ONLY_FAILURES) .build() ); // We are executing a lazy rollover because it is an action specialised for this situation, when we want an diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java index 61adf41a9a276..a3a73415ec4f6 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java @@ -418,7 +418,7 @@ private void rollOverDataStreams( if (targetFailureStore) { rolloverRequest.setIndicesOptions( IndicesOptions.builder(rolloverRequest.indicesOptions()) - .failureStoreOptions(new IndicesOptions.FailureStoreOptions(false, true)) + .selectorOptions(IndicesOptions.SelectorOptions.ONLY_FAILURES) .build() ); } diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/DataStreamsStatsAction.java b/server/src/main/java/org/elasticsearch/action/datastreams/DataStreamsStatsAction.java index fbb084e8cd121..1c30303915c8e 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/DataStreamsStatsAction.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/DataStreamsStatsAction.java @@ -61,9 +61,7 @@ public Request() { .allowFailureIndices(true) .build() ) - .failureStoreOptions( - IndicesOptions.FailureStoreOptions.builder().includeRegularIndices(true).includeFailureIndices(true).build() - ) + .selectorOptions(IndicesOptions.SelectorOptions.DATA_AND_FAILURE) .build() ); } diff --git a/server/src/main/java/org/elasticsearch/action/support/IndexComponentSelector.java b/server/src/main/java/org/elasticsearch/action/support/IndexComponentSelector.java new file mode 100644 index 0000000000000..65b48db8f5cf3 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/support/IndexComponentSelector.java @@ -0,0 +1,49 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.action.support; + +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +/** + * We define as index components the two different sets of indices a data stream could consist of: + * - DATA: represents the backing indices + * - FAILURES: represent the failing indices + * Note: An index is its own DATA component, but it cannot have a FAILURE component. + */ +public enum IndexComponentSelector { + DATA("data"), + FAILURES("failures"); + + private final String key; + + IndexComponentSelector(String key) { + this.key = key; + } + + public String getKey() { + return key; + } + + private static final Map REGISTRY; + + static { + Map registry = new HashMap<>(IndexComponentSelector.values().length); + for (IndexComponentSelector value : IndexComponentSelector.values()) { + registry.put(value.getKey(), value); + } + REGISTRY = Collections.unmodifiableMap(registry); + } + + public static IndexComponentSelector getByKey(String key) { + return REGISTRY.get(key); + } +} diff --git a/server/src/main/java/org/elasticsearch/action/support/IndicesOptions.java b/server/src/main/java/org/elasticsearch/action/support/IndicesOptions.java index d3ea063247704..22d019f80837d 100644 --- a/server/src/main/java/org/elasticsearch/action/support/IndicesOptions.java +++ b/server/src/main/java/org/elasticsearch/action/support/IndicesOptions.java @@ -31,7 +31,6 @@ import java.util.List; import java.util.Locale; import java.util.Map; -import java.util.function.Consumer; import java.util.stream.Collectors; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue; @@ -48,24 +47,36 @@ * @param gatekeeperOptions, applies to all the resolved indices and defines if throttled will be included and if certain type of * aliases or indices are allowed, or they will throw an error. It acts as a gatekeeper when an action * does not support certain options. - * @param failureStoreOptions, applies to all indices already matched and controls the type of indices that will be returned. Currently, - * there are two types, data stream failure indices (only certain data streams have them) and data stream - * backing indices or stand-alone indices. + * @param selectorOptions, applies to all resolved expressions, and it specifies the index component that should be included, if there + * is no index component defined on the expression level. */ public record IndicesOptions( ConcreteTargetOptions concreteTargetOptions, WildcardOptions wildcardOptions, GatekeeperOptions gatekeeperOptions, - FailureStoreOptions failureStoreOptions + SelectorOptions selectorOptions ) implements ToXContentFragment { - public IndicesOptions( - ConcreteTargetOptions concreteTargetOptions, - WildcardOptions wildcardOptions, - GatekeeperOptions gatekeeperOptions - ) { - this(concreteTargetOptions, wildcardOptions, gatekeeperOptions, FailureStoreOptions.DEFAULT); - } + /** + * @deprecated this query param will be replaced by the selector `::` on the expression level + */ + @Deprecated + public static final String FAILURE_STORE_QUERY_PARAM = "failure_store"; + /** + * @deprecated this value will be replaced by the selector `::*` on the expression level + */ + @Deprecated + public static final String INCLUDE_ALL = "include"; + /** + * @deprecated this value will be replaced by the selector `::data` on the expression level + */ + @Deprecated + public static final String INCLUDE_ONLY_REGULAR_INDICES = "exclude"; + /** + * @deprecated this value will be replaced by the selector `::failures` on the expression level + */ + @Deprecated + public static final String INCLUDE_ONLY_FAILURE_INDICES = "only"; public static IndicesOptions.Builder builder() { return new Builder(); @@ -310,7 +321,7 @@ public static Builder builder(WildcardOptions wildcardOptions) { * - The "allow*" flags, which purpose is to enable actions to define certain conditions that need to apply on the concrete indices * they accept. For example, single-index actions will set allowAliasToMultipleIndices to false, while search will not accept a * closed index etc. These options are not configurable by the end-user. - * - The ignoreThrottled flag, which is a depricared flag that will filter out frozen indices. + * - The ignoreThrottled flag, which is a deprecated flag that will filter out frozen indices. * @param allowAliasToMultipleIndices, allow aliases to multiple indices, true by default. * @param allowClosedIndices, allow closed indices, true by default. * @param allowFailureIndices, allow failure indices in the response, true by default @@ -408,87 +419,53 @@ public static Builder builder(GatekeeperOptions gatekeeperOptions) { } /** - * Applies to all indices already matched and controls the type of indices that will be returned. There are two types, data stream - * failure indices (only certain data streams have them) and data stream backing indices or stand-alone indices. - * @param includeRegularIndices, when true regular or data stream backing indices will be retrieved. - * @param includeFailureIndices, when true data stream failure indices will be included. + * Defines which selectors should be used by default for an index operation in the event that no selectors are provided. */ - public record FailureStoreOptions(boolean includeRegularIndices, boolean includeFailureIndices) - implements - Writeable, - ToXContentFragment { + public record SelectorOptions(EnumSet defaultSelectors) implements Writeable { - public static final String FAILURE_STORE = "failure_store"; - public static final String INCLUDE_ALL = "include"; - public static final String INCLUDE_ONLY_REGULAR_INDICES = "exclude"; - public static final String INCLUDE_ONLY_FAILURE_INDICES = "only"; - - public static final FailureStoreOptions DEFAULT = new FailureStoreOptions(true, false); - - public static FailureStoreOptions read(StreamInput in) throws IOException { - return new FailureStoreOptions(in.readBoolean(), in.readBoolean()); - } - - public static FailureStoreOptions parseParameters(Object failureStoreValue, FailureStoreOptions defaultOptions) { - if (failureStoreValue == null) { - return defaultOptions; - } - FailureStoreOptions.Builder builder = defaultOptions == null - ? new FailureStoreOptions.Builder() - : new FailureStoreOptions.Builder(defaultOptions); - return switch (failureStoreValue.toString()) { - case INCLUDE_ALL -> builder.includeRegularIndices(true).includeFailureIndices(true).build(); - case INCLUDE_ONLY_REGULAR_INDICES -> builder.includeRegularIndices(true).includeFailureIndices(false).build(); - case INCLUDE_ONLY_FAILURE_INDICES -> builder.includeRegularIndices(false).includeFailureIndices(true).build(); - default -> throw new IllegalArgumentException("No valid " + FAILURE_STORE + " value [" + failureStoreValue + "]"); - }; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return builder.field(FAILURE_STORE, displayValue()); - } + public static final SelectorOptions DATA_AND_FAILURE = new SelectorOptions( + EnumSet.of(IndexComponentSelector.DATA, IndexComponentSelector.FAILURES) + ); + public static final SelectorOptions ONLY_DATA = new SelectorOptions(EnumSet.of(IndexComponentSelector.DATA)); + public static final SelectorOptions ONLY_FAILURES = new SelectorOptions(EnumSet.of(IndexComponentSelector.FAILURES)); + /** + * Default instance. Uses

    ::data
    as the default selector if none are present in an index expression. + */ + public static final SelectorOptions DEFAULT = ONLY_DATA; - public String displayValue() { - if (includeRegularIndices && includeFailureIndices) { - return INCLUDE_ALL; - } else if (includeRegularIndices) { - return INCLUDE_ONLY_REGULAR_INDICES; - } - return INCLUDE_ONLY_FAILURE_INDICES; + public static SelectorOptions read(StreamInput in) throws IOException { + return new SelectorOptions(in.readEnumSet(IndexComponentSelector.class)); } @Override public void writeTo(StreamOutput out) throws IOException { - out.writeBoolean(includeRegularIndices); - out.writeBoolean(includeFailureIndices); + out.writeEnumSet(defaultSelectors); } public static class Builder { - private boolean includeRegularIndices; - private boolean includeFailureIndices; + private EnumSet defaultSelectors; public Builder() { this(DEFAULT); } - Builder(FailureStoreOptions options) { - includeRegularIndices = options.includeRegularIndices; - includeFailureIndices = options.includeFailureIndices; + Builder(SelectorOptions options) { + defaultSelectors = EnumSet.copyOf(options.defaultSelectors); } - public Builder includeRegularIndices(boolean includeRegularIndices) { - this.includeRegularIndices = includeRegularIndices; + public Builder setDefaultSelectors(IndexComponentSelector first, IndexComponentSelector... remaining) { + defaultSelectors = EnumSet.of(first, remaining); return this; } - public Builder includeFailureIndices(boolean includeFailureIndices) { - this.includeFailureIndices = includeFailureIndices; + public Builder setDefaultSelectors(EnumSet defaultSelectors) { + this.defaultSelectors = EnumSet.copyOf(defaultSelectors); return this; } - public FailureStoreOptions build() { - return new FailureStoreOptions(includeRegularIndices, includeFailureIndices); + public SelectorOptions build() { + assert defaultSelectors.isEmpty() != true : "Default selectors cannot be an empty set"; + return new SelectorOptions(EnumSet.copyOf(defaultSelectors)); } } @@ -496,8 +473,8 @@ public static Builder builder() { return new Builder(); } - public static Builder builder(FailureStoreOptions failureStoreOptions) { - return new Builder(failureStoreOptions); + public static Builder builder(SelectorOptions selectorOptions) { + return new Builder(selectorOptions); } } @@ -550,7 +527,7 @@ private enum Option { ConcreteTargetOptions.ERROR_WHEN_UNAVAILABLE_TARGETS, WildcardOptions.DEFAULT, GatekeeperOptions.DEFAULT, - FailureStoreOptions.DEFAULT + SelectorOptions.DEFAULT ); public static final IndicesOptions STRICT_EXPAND_OPEN = IndicesOptions.builder() @@ -570,7 +547,7 @@ private enum Option { .allowFailureIndices(true) .ignoreThrottled(false) ) - .failureStoreOptions(FailureStoreOptions.builder().includeRegularIndices(true).includeFailureIndices(false)) + .selectorOptions(SelectorOptions.ONLY_DATA) .build(); public static final IndicesOptions STRICT_EXPAND_OPEN_FAILURE_STORE = IndicesOptions.builder() .concreteTargetOptions(ConcreteTargetOptions.ERROR_WHEN_UNAVAILABLE_TARGETS) @@ -589,7 +566,7 @@ private enum Option { .allowFailureIndices(true) .ignoreThrottled(false) ) - .failureStoreOptions(FailureStoreOptions.builder().includeRegularIndices(true).includeFailureIndices(true)) + .selectorOptions(SelectorOptions.DATA_AND_FAILURE) .build(); public static final IndicesOptions LENIENT_EXPAND_OPEN = IndicesOptions.builder() .concreteTargetOptions(ConcreteTargetOptions.ALLOW_UNAVAILABLE_TARGETS) @@ -608,7 +585,25 @@ private enum Option { .allowFailureIndices(true) .ignoreThrottled(false) ) - .failureStoreOptions(FailureStoreOptions.builder().includeRegularIndices(true).includeFailureIndices(false)) + .selectorOptions(SelectorOptions.ONLY_DATA) + .build(); + public static final IndicesOptions LENIENT_EXPAND_OPEN_NO_SELECTORS = IndicesOptions.builder() + .concreteTargetOptions(ConcreteTargetOptions.ALLOW_UNAVAILABLE_TARGETS) + .wildcardOptions( + WildcardOptions.builder() + .matchOpen(true) + .matchClosed(false) + .includeHidden(false) + .allowEmptyExpressions(true) + .resolveAliases(true) + ) + .gatekeeperOptions( + GatekeeperOptions.builder() + .allowAliasToMultipleIndices(true) + .allowClosedIndices(true) + .allowFailureIndices(false) + .ignoreThrottled(false) + ) .build(); public static final IndicesOptions LENIENT_EXPAND_OPEN_HIDDEN = IndicesOptions.builder() .concreteTargetOptions(ConcreteTargetOptions.ALLOW_UNAVAILABLE_TARGETS) @@ -627,7 +622,7 @@ private enum Option { .allowFailureIndices(true) .ignoreThrottled(false) ) - .failureStoreOptions(FailureStoreOptions.builder().includeRegularIndices(true).includeFailureIndices(false)) + .selectorOptions(SelectorOptions.ONLY_DATA) .build(); public static final IndicesOptions LENIENT_EXPAND_OPEN_CLOSED = IndicesOptions.builder() .concreteTargetOptions(ConcreteTargetOptions.ALLOW_UNAVAILABLE_TARGETS) @@ -646,7 +641,7 @@ private enum Option { .allowFailureIndices(true) .ignoreThrottled(false) ) - .failureStoreOptions(FailureStoreOptions.builder().includeRegularIndices(true).includeFailureIndices(false)) + .selectorOptions(SelectorOptions.ONLY_DATA) .build(); public static final IndicesOptions LENIENT_EXPAND_OPEN_CLOSED_HIDDEN = IndicesOptions.builder() .concreteTargetOptions(ConcreteTargetOptions.ALLOW_UNAVAILABLE_TARGETS) @@ -660,7 +655,20 @@ private enum Option { .allowFailureIndices(true) .ignoreThrottled(false) ) - .failureStoreOptions(FailureStoreOptions.builder().includeRegularIndices(true).includeFailureIndices(false)) + .selectorOptions(SelectorOptions.ONLY_DATA) + .build(); + public static final IndicesOptions LENIENT_EXPAND_OPEN_CLOSED_HIDDEN_NO_SELECTOR = IndicesOptions.builder() + .concreteTargetOptions(ConcreteTargetOptions.ALLOW_UNAVAILABLE_TARGETS) + .wildcardOptions( + WildcardOptions.builder().matchOpen(true).matchClosed(true).includeHidden(true).allowEmptyExpressions(true).resolveAliases(true) + ) + .gatekeeperOptions( + GatekeeperOptions.builder() + .allowAliasToMultipleIndices(true) + .allowClosedIndices(true) + .allowFailureIndices(false) + .ignoreThrottled(false) + ) .build(); public static final IndicesOptions STRICT_EXPAND_OPEN_CLOSED = IndicesOptions.builder() .concreteTargetOptions(ConcreteTargetOptions.ERROR_WHEN_UNAVAILABLE_TARGETS) @@ -679,7 +687,7 @@ private enum Option { .allowFailureIndices(true) .ignoreThrottled(false) ) - .failureStoreOptions(FailureStoreOptions.builder().includeRegularIndices(true).includeFailureIndices(false)) + .selectorOptions(SelectorOptions.ONLY_DATA) .build(); public static final IndicesOptions STRICT_EXPAND_OPEN_CLOSED_HIDDEN = IndicesOptions.builder() .concreteTargetOptions(ConcreteTargetOptions.ERROR_WHEN_UNAVAILABLE_TARGETS) @@ -693,7 +701,20 @@ private enum Option { .allowFailureIndices(true) .ignoreThrottled(false) ) - .failureStoreOptions(FailureStoreOptions.builder().includeRegularIndices(true).includeFailureIndices(false)) + .selectorOptions(SelectorOptions.ONLY_DATA) + .build(); + public static final IndicesOptions STRICT_EXPAND_OPEN_CLOSED_HIDDEN_NO_SELECTORS = IndicesOptions.builder() + .concreteTargetOptions(ConcreteTargetOptions.ERROR_WHEN_UNAVAILABLE_TARGETS) + .wildcardOptions( + WildcardOptions.builder().matchOpen(true).matchClosed(true).includeHidden(true).allowEmptyExpressions(true).resolveAliases(true) + ) + .gatekeeperOptions( + GatekeeperOptions.builder() + .allowAliasToMultipleIndices(true) + .allowClosedIndices(true) + .allowFailureIndices(false) + .ignoreThrottled(false) + ) .build(); public static final IndicesOptions LENIENT_EXPAND_OPEN_CLOSED_FAILURE_STORE = IndicesOptions.builder() .concreteTargetOptions(ConcreteTargetOptions.ALLOW_UNAVAILABLE_TARGETS) @@ -712,7 +733,7 @@ private enum Option { .allowFailureIndices(true) .ignoreThrottled(false) ) - .failureStoreOptions(FailureStoreOptions.builder().includeRegularIndices(true).includeFailureIndices(true)) + .selectorOptions(SelectorOptions.DATA_AND_FAILURE) .build(); public static final IndicesOptions STRICT_EXPAND_OPEN_CLOSED_HIDDEN_FAILURE_STORE = IndicesOptions.builder() .concreteTargetOptions(ConcreteTargetOptions.ERROR_WHEN_UNAVAILABLE_TARGETS) @@ -726,7 +747,7 @@ private enum Option { .allowFailureIndices(true) .ignoreThrottled(false) ) - .failureStoreOptions(FailureStoreOptions.builder().includeRegularIndices(true).includeFailureIndices(true)) + .selectorOptions(SelectorOptions.DATA_AND_FAILURE) .build(); public static final IndicesOptions STRICT_EXPAND_OPEN_CLOSED_FAILURE_STORE = IndicesOptions.builder() .concreteTargetOptions(ConcreteTargetOptions.ERROR_WHEN_UNAVAILABLE_TARGETS) @@ -745,7 +766,7 @@ private enum Option { .allowFailureIndices(true) .ignoreThrottled(false) ) - .failureStoreOptions(FailureStoreOptions.builder().includeRegularIndices(true).includeFailureIndices(true)) + .selectorOptions(SelectorOptions.DATA_AND_FAILURE) .build(); public static final IndicesOptions STRICT_EXPAND_OPEN_FORBID_CLOSED = IndicesOptions.builder() .concreteTargetOptions(ConcreteTargetOptions.ERROR_WHEN_UNAVAILABLE_TARGETS) @@ -764,7 +785,7 @@ private enum Option { .allowFailureIndices(true) .ignoreThrottled(false) ) - .failureStoreOptions(FailureStoreOptions.builder().includeRegularIndices(true).includeFailureIndices(false)) + .selectorOptions(SelectorOptions.ONLY_DATA) .build(); public static final IndicesOptions STRICT_EXPAND_OPEN_HIDDEN_FORBID_CLOSED = IndicesOptions.builder() .concreteTargetOptions(ConcreteTargetOptions.ERROR_WHEN_UNAVAILABLE_TARGETS) @@ -783,7 +804,7 @@ private enum Option { .allowFailureIndices(true) .ignoreThrottled(false) ) - .failureStoreOptions(FailureStoreOptions.builder().includeRegularIndices(true).includeFailureIndices(false)) + .selectorOptions(SelectorOptions.ONLY_DATA) .build(); public static final IndicesOptions STRICT_EXPAND_OPEN_FORBID_CLOSED_IGNORE_THROTTLED = IndicesOptions.builder() .concreteTargetOptions(ConcreteTargetOptions.ERROR_WHEN_UNAVAILABLE_TARGETS) @@ -802,7 +823,7 @@ private enum Option { .allowFailureIndices(true) .allowAliasToMultipleIndices(true) ) - .failureStoreOptions(FailureStoreOptions.builder().includeRegularIndices(true).includeFailureIndices(false)) + .selectorOptions(SelectorOptions.ONLY_DATA) .build(); public static final IndicesOptions STRICT_SINGLE_INDEX_NO_EXPAND_FORBID_CLOSED = IndicesOptions.builder() .concreteTargetOptions(ConcreteTargetOptions.ERROR_WHEN_UNAVAILABLE_TARGETS) @@ -821,7 +842,7 @@ private enum Option { .allowFailureIndices(true) .ignoreThrottled(false) ) - .failureStoreOptions(FailureStoreOptions.builder().includeRegularIndices(true).includeFailureIndices(false)) + .selectorOptions(SelectorOptions.ONLY_DATA) .build(); public static final IndicesOptions STRICT_NO_EXPAND_FORBID_CLOSED = IndicesOptions.builder() .concreteTargetOptions(ConcreteTargetOptions.ERROR_WHEN_UNAVAILABLE_TARGETS) @@ -840,7 +861,7 @@ private enum Option { .allowFailureIndices(true) .ignoreThrottled(false) ) - .failureStoreOptions(FailureStoreOptions.builder().includeRegularIndices(true).includeFailureIndices(false)) + .selectorOptions(SelectorOptions.ONLY_DATA) .build(); /** @@ -929,14 +950,14 @@ public boolean ignoreThrottled() { * @return whether regular indices (stand-alone or backing indices) will be included in the response */ public boolean includeRegularIndices() { - return failureStoreOptions().includeRegularIndices(); + return selectorOptions().defaultSelectors().contains(IndexComponentSelector.DATA); } /** * @return whether failure indices (only supported by certain data streams) will be included in the response */ public boolean includeFailureIndices() { - return failureStoreOptions().includeFailureIndices(); + return selectorOptions().defaultSelectors().contains(IndexComponentSelector.FAILURES); } public void writeIndicesOptions(StreamOutput out) throws IOException { @@ -977,8 +998,13 @@ public void writeIndicesOptions(StreamOutput out) throws IOException { states.add(WildcardStates.HIDDEN); } out.writeEnumSet(states); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_14_0)) { - failureStoreOptions.writeTo(out); + if (out.getTransportVersion() + .between(TransportVersions.V_8_14_0, TransportVersions.CONVERT_FAILURE_STORE_OPTIONS_TO_SELECTOR_OPTIONS_INTERNALLY)) { + out.writeBoolean(includeRegularIndices()); + out.writeBoolean(includeFailureIndices()); + } + if (out.getTransportVersion().onOrAfter(TransportVersions.CONVERT_FAILURE_STORE_OPTIONS_TO_SELECTOR_OPTIONS_INTERNALLY)) { + out.writeEnumSet(selectorOptions.defaultSelectors); } } @@ -999,16 +1025,30 @@ public static IndicesOptions readIndicesOptions(StreamInput in) throws IOExcepti .allowFailureIndices(allowFailureIndices) .ignoreThrottled(options.contains(Option.IGNORE_THROTTLED)) .build(); - FailureStoreOptions failureStoreOptions = in.getTransportVersion().onOrAfter(TransportVersions.V_8_14_0) - ? FailureStoreOptions.read(in) - : FailureStoreOptions.DEFAULT; + SelectorOptions selectorOptions = SelectorOptions.DEFAULT; + if (in.getTransportVersion() + .between(TransportVersions.V_8_14_0, TransportVersions.CONVERT_FAILURE_STORE_OPTIONS_TO_SELECTOR_OPTIONS_INTERNALLY)) { + // Reading from an older node, which will be sending two booleans that we must read out and ignore. + var includeData = in.readBoolean(); + var includeFailures = in.readBoolean(); + if (includeData && includeFailures) { + selectorOptions = SelectorOptions.DATA_AND_FAILURE; + } else if (includeData) { + selectorOptions = SelectorOptions.ONLY_DATA; + } else { + selectorOptions = SelectorOptions.ONLY_FAILURES; + } + } + if (in.getTransportVersion().onOrAfter(TransportVersions.CONVERT_FAILURE_STORE_OPTIONS_TO_SELECTOR_OPTIONS_INTERNALLY)) { + selectorOptions = new SelectorOptions(in.readEnumSet(IndexComponentSelector.class)); + } return new IndicesOptions( options.contains(Option.ALLOW_UNAVAILABLE_CONCRETE_TARGETS) ? ConcreteTargetOptions.ALLOW_UNAVAILABLE_TARGETS : ConcreteTargetOptions.ERROR_WHEN_UNAVAILABLE_TARGETS, wildcardOptions, gatekeeperOptions, - failureStoreOptions + selectorOptions ); } @@ -1016,7 +1056,7 @@ public static class Builder { private ConcreteTargetOptions concreteTargetOptions; private WildcardOptions wildcardOptions; private GatekeeperOptions gatekeeperOptions; - private FailureStoreOptions failureStoreOptions; + private SelectorOptions selectorOptions; Builder() { this(DEFAULT); @@ -1026,7 +1066,7 @@ public static class Builder { concreteTargetOptions = indicesOptions.concreteTargetOptions; wildcardOptions = indicesOptions.wildcardOptions; gatekeeperOptions = indicesOptions.gatekeeperOptions; - failureStoreOptions = indicesOptions.failureStoreOptions; + selectorOptions = indicesOptions.selectorOptions; } public Builder concreteTargetOptions(ConcreteTargetOptions concreteTargetOptions) { @@ -1054,25 +1094,18 @@ public Builder gatekeeperOptions(GatekeeperOptions.Builder generalOptions) { return this; } - public Builder failureStoreOptions(FailureStoreOptions failureStoreOptions) { - this.failureStoreOptions = failureStoreOptions; + public Builder selectorOptions(SelectorOptions selectorOptions) { + this.selectorOptions = selectorOptions; return this; } - public Builder failureStoreOptions(FailureStoreOptions.Builder failureStoreOptions) { - this.failureStoreOptions = failureStoreOptions.build(); - return this; - } - - public Builder failureStoreOptions(Consumer failureStoreOptionsConfig) { - FailureStoreOptions.Builder failureStoreOptionsBuilder = FailureStoreOptions.builder(failureStoreOptions); - failureStoreOptionsConfig.accept(failureStoreOptionsBuilder); - this.failureStoreOptions = failureStoreOptionsBuilder.build(); + public Builder selectorOptions(SelectorOptions.Builder selectorOptions) { + this.selectorOptions = selectorOptions.build(); return this; } public IndicesOptions build() { - return new IndicesOptions(concreteTargetOptions, wildcardOptions, gatekeeperOptions, failureStoreOptions); + return new IndicesOptions(concreteTargetOptions, wildcardOptions, gatekeeperOptions, selectorOptions); } } @@ -1171,11 +1204,12 @@ public static IndicesOptions fromOptions( .allowClosedIndices(forbidClosedIndices == false) .ignoreThrottled(ignoreThrottled) .build(); + final SelectorOptions selectorOptions = SelectorOptions.DEFAULT; return new IndicesOptions( ignoreUnavailable ? ConcreteTargetOptions.ALLOW_UNAVAILABLE_TARGETS : ConcreteTargetOptions.ERROR_WHEN_UNAVAILABLE_TARGETS, wildcards, gatekeeperOptions, - FailureStoreOptions.DEFAULT + selectorOptions ); } @@ -1189,9 +1223,7 @@ public static IndicesOptions fromRequest(RestRequest request, IndicesOptions def request.param(ConcreteTargetOptions.IGNORE_UNAVAILABLE), request.param(WildcardOptions.ALLOW_NO_INDICES), request.param(GatekeeperOptions.IGNORE_THROTTLED), - DataStream.isFailureStoreFeatureFlagEnabled() - ? request.param(FailureStoreOptions.FAILURE_STORE) - : FailureStoreOptions.INCLUDE_ONLY_REGULAR_INDICES, + DataStream.isFailureStoreFeatureFlagEnabled() ? request.param(FAILURE_STORE_QUERY_PARAM) : INCLUDE_ONLY_REGULAR_INDICES, defaultSettings ); } @@ -1207,7 +1239,7 @@ public static IndicesOptions fromMap(Map map, IndicesOptions def map.containsKey(GatekeeperOptions.IGNORE_THROTTLED) ? map.get(GatekeeperOptions.IGNORE_THROTTLED) : map.get("ignoreThrottled"), - map.containsKey(FailureStoreOptions.FAILURE_STORE) ? map.get(FailureStoreOptions.FAILURE_STORE) : map.get("failureStore"), + map.containsKey(FAILURE_STORE_QUERY_PARAM) ? map.get(FAILURE_STORE_QUERY_PARAM) : map.get("failureStore"), defaultSettings ); } @@ -1235,7 +1267,7 @@ public static boolean isIndicesOptions(String name) { || "ignoreThrottled".equals(name) || WildcardOptions.ALLOW_NO_INDICES.equals(name) || "allowNoIndices".equals(name) - || (DataStream.isFailureStoreFeatureFlagEnabled() && FailureStoreOptions.FAILURE_STORE.equals(name)) + || (DataStream.isFailureStoreFeatureFlagEnabled() && FAILURE_STORE_QUERY_PARAM.equals(name)) || (DataStream.isFailureStoreFeatureFlagEnabled() && "failureStore".equals(name)); } @@ -1267,26 +1299,51 @@ public static IndicesOptions fromParameters( WildcardOptions wildcards = WildcardOptions.parseParameters(wildcardsString, allowNoIndicesString, defaultSettings.wildcardOptions); GatekeeperOptions gatekeeperOptions = GatekeeperOptions.parseParameter(ignoreThrottled, defaultSettings.gatekeeperOptions); - FailureStoreOptions failureStoreOptions = DataStream.isFailureStoreFeatureFlagEnabled() - ? FailureStoreOptions.parseParameters(failureStoreString, defaultSettings.failureStoreOptions) - : FailureStoreOptions.DEFAULT; + SelectorOptions selectorOptions = DataStream.isFailureStoreFeatureFlagEnabled() + ? parseFailureStoreParameters(failureStoreString, defaultSettings.selectorOptions) + : SelectorOptions.DEFAULT; // note that allowAliasesToMultipleIndices is not exposed, always true (only for internal use) return IndicesOptions.builder() .concreteTargetOptions(ConcreteTargetOptions.fromParameter(ignoreUnavailableString, defaultSettings.concreteTargetOptions)) .wildcardOptions(wildcards) .gatekeeperOptions(gatekeeperOptions) - .failureStoreOptions(failureStoreOptions) + .selectorOptions(selectorOptions) .build(); } + /** + * @deprecated This method parses the query parameter failure_store. This is a deprecated param, and it will be replaced + * the selector suffix, for example `my-data-stream::data` or `my-data-stream::failures` + */ + @Deprecated + private static SelectorOptions parseFailureStoreParameters(Object failureStoreValue, SelectorOptions defaultOptions) { + if (failureStoreValue == null) { + return defaultOptions; + } + return switch (failureStoreValue.toString()) { + case INCLUDE_ALL -> SelectorOptions.DATA_AND_FAILURE; + case INCLUDE_ONLY_REGULAR_INDICES -> SelectorOptions.ONLY_DATA; + case INCLUDE_ONLY_FAILURE_INDICES -> SelectorOptions.ONLY_FAILURES; + default -> throw new IllegalArgumentException("No valid " + FAILURE_STORE_QUERY_PARAM + " value [" + failureStoreValue + "]"); + }; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { concreteTargetOptions.toXContent(builder, params); wildcardOptions.toXContent(builder, params); gatekeeperOptions.toXContent(builder, params); if (DataStream.isFailureStoreFeatureFlagEnabled()) { - failureStoreOptions.toXContent(builder, params); + String displayValue; + if (SelectorOptions.DATA_AND_FAILURE.equals(selectorOptions())) { + displayValue = INCLUDE_ALL; + } else if (SelectorOptions.ONLY_DATA.equals(selectorOptions())) { + displayValue = INCLUDE_ONLY_REGULAR_INDICES; + } else { + displayValue = INCLUDE_ONLY_FAILURE_INDICES; + } + builder.field(FAILURE_STORE_QUERY_PARAM, displayValue); } return builder; } @@ -1295,7 +1352,7 @@ public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params par private static final ParseField IGNORE_UNAVAILABLE_FIELD = new ParseField(ConcreteTargetOptions.IGNORE_UNAVAILABLE); private static final ParseField IGNORE_THROTTLED_FIELD = new ParseField(GatekeeperOptions.IGNORE_THROTTLED).withAllDeprecated(); private static final ParseField ALLOW_NO_INDICES_FIELD = new ParseField(WildcardOptions.ALLOW_NO_INDICES); - private static final ParseField FAILURE_STORE_FIELD = new ParseField(FailureStoreOptions.FAILURE_STORE); + private static final ParseField FAILURE_STORE_FIELD = new ParseField(FAILURE_STORE_QUERY_PARAM); public static IndicesOptions fromXContent(XContentParser parser) throws IOException { return fromXContent(parser, null); @@ -1306,7 +1363,7 @@ public static IndicesOptions fromXContent(XContentParser parser, @Nullable Indic WildcardOptions.Builder wildcards = defaults == null ? null : WildcardOptions.builder(defaults.wildcardOptions()); GatekeeperOptions.Builder generalOptions = GatekeeperOptions.builder() .ignoreThrottled(defaults != null && defaults.gatekeeperOptions().ignoreThrottled()); - FailureStoreOptions failureStoreOptions = defaults == null ? FailureStoreOptions.DEFAULT : defaults.failureStoreOptions(); + SelectorOptions selectorOptions = defaults == null ? SelectorOptions.DEFAULT : defaults.selectorOptions(); Boolean allowNoIndices = defaults == null ? null : defaults.allowNoIndices(); Boolean ignoreUnavailable = defaults == null ? null : defaults.ignoreUnavailable(); Token token = parser.currentToken() == Token.START_OBJECT ? parser.currentToken() : parser.nextToken(); @@ -1358,7 +1415,7 @@ public static IndicesOptions fromXContent(XContentParser parser, @Nullable Indic generalOptions.ignoreThrottled(parser.booleanValue()); } else if (DataStream.isFailureStoreFeatureFlagEnabled() && FAILURE_STORE_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { - failureStoreOptions = FailureStoreOptions.parseParameters(parser.text(), failureStoreOptions); + selectorOptions = parseFailureStoreParameters(parser.text(), selectorOptions); } else { throw new ElasticsearchParseException( "could not read indices options. Unexpected index option [" + currentFieldName + "]" @@ -1389,7 +1446,7 @@ public static IndicesOptions fromXContent(XContentParser parser, @Nullable Indic .concreteTargetOptions(new ConcreteTargetOptions(ignoreUnavailable)) .wildcardOptions(wildcards) .gatekeeperOptions(generalOptions) - .failureStoreOptions(failureStoreOptions) + .selectorOptions(selectorOptions) .build(); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRolloverIndexAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRolloverIndexAction.java index ebae4a36c6d3d..942844dd1dd16 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRolloverIndexAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRolloverIndexAction.java @@ -69,7 +69,7 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC if (failureStore) { rolloverIndexRequest.setIndicesOptions( IndicesOptions.builder(rolloverIndexRequest.indicesOptions()) - .failureStoreOptions(new IndicesOptions.FailureStoreOptions(false, true)) + .selectorOptions(IndicesOptions.SelectorOptions.ONLY_FAILURES) .build() ); } diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/get/GetIndexRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/get/GetIndexRequestTests.java index 4cfd9b66306ad..a75b50e3a88f4 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/get/GetIndexRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/get/GetIndexRequestTests.java @@ -82,9 +82,6 @@ public void testIndicesOptions() { ); assertThat(getIndexRequest.indicesOptions().wildcardOptions(), equalTo(IndicesOptions.strictExpandOpen().wildcardOptions())); assertThat(getIndexRequest.indicesOptions().gatekeeperOptions(), equalTo(IndicesOptions.strictExpandOpen().gatekeeperOptions())); - assertThat( - getIndexRequest.indicesOptions().failureStoreOptions(), - equalTo(IndicesOptions.FailureStoreOptions.builder().includeRegularIndices(true).includeFailureIndices(true).build()) - ); + assertThat(getIndexRequest.indicesOptions().selectorOptions(), equalTo(IndicesOptions.SelectorOptions.DATA_AND_FAILURE)); } } diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverServiceTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverServiceTests.java index 848e46f2b3366..b9fdb13958632 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverServiceTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverServiceTests.java @@ -13,7 +13,7 @@ import org.elasticsearch.action.admin.indices.create.CreateIndexClusterStateUpdateRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.support.ActiveShardCount; -import org.elasticsearch.action.support.IndicesOptions.FailureStoreOptions; +import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.AliasAction; @@ -747,14 +747,14 @@ public void testValidation() throws Exception { final String defaultRolloverIndexName; final boolean useDataStream = randomBoolean(); final Metadata.Builder builder = Metadata.builder(); - var failureStoreOptions = FailureStoreOptions.DEFAULT; + var defaultSelectorOptions = IndicesOptions.SelectorOptions.DEFAULT; if (useDataStream) { DataStream dataStream = DataStreamTestHelper.randomInstance() // ensure no replicate data stream .promoteDataStream(); rolloverTarget = dataStream.getName(); if (dataStream.isFailureStoreEnabled() && randomBoolean()) { - failureStoreOptions = new FailureStoreOptions(false, true); + defaultSelectorOptions = IndicesOptions.SelectorOptions.ONLY_FAILURES; sourceIndexName = dataStream.getFailureStoreWriteIndex().getName(); defaultRolloverIndexName = DataStream.getDefaultFailureStoreName( dataStream.getName(), @@ -815,7 +815,7 @@ public void testValidation() throws Exception { true, null, null, - failureStoreOptions.includeFailureIndices() + IndicesOptions.SelectorOptions.ONLY_FAILURES.equals(defaultSelectorOptions) ); newIndexName = newIndexName == null ? defaultRolloverIndexName : newIndexName; diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java index 67bfa0e37dcf5..08e92c833dc85 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.action.support.IndexComponentSelector; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; @@ -33,7 +34,9 @@ import org.junit.Before; import java.io.IOException; +import java.util.EnumSet; import java.util.Map; +import java.util.Set; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -176,7 +179,12 @@ public void testSerialize() throws Exception { originalRequest.lazy(randomBoolean()); originalRequest.setIndicesOptions( IndicesOptions.builder(originalRequest.indicesOptions()) - .failureStoreOptions(new IndicesOptions.FailureStoreOptions(randomBoolean(), randomBoolean())) + .selectorOptions( + IndicesOptions.SelectorOptions.builder() + .setDefaultSelectors( + EnumSet.copyOf(randomNonEmptySubsetOf(Set.of(IndexComponentSelector.DATA, IndexComponentSelector.FAILURES))) + ) + ) .build() ); @@ -188,10 +196,7 @@ public void testSerialize() throws Exception { assertThat(cloneRequest.getNewIndexName(), equalTo(originalRequest.getNewIndexName())); assertThat(cloneRequest.getRolloverTarget(), equalTo(originalRequest.getRolloverTarget())); assertThat(cloneRequest.isLazy(), equalTo(originalRequest.isLazy())); - assertThat( - cloneRequest.indicesOptions().failureStoreOptions(), - equalTo(originalRequest.indicesOptions().failureStoreOptions()) - ); + assertThat(cloneRequest.indicesOptions().selectorOptions(), equalTo(originalRequest.indicesOptions().selectorOptions())); for (Map.Entry> entry : cloneRequest.getConditions().getConditions().entrySet()) { Condition condition = originalRequest.getConditions().getConditions().get(entry.getKey()); // here we compare the string representation as there is some information loss when serializing @@ -261,7 +266,7 @@ public void testValidation() { RolloverRequest rolloverRequest = new RolloverRequest("alias-index", "new-index-name"); rolloverRequest.setIndicesOptions( IndicesOptions.builder(rolloverRequest.indicesOptions()) - .failureStoreOptions(new IndicesOptions.FailureStoreOptions(true, true)) + .selectorOptions(IndicesOptions.SelectorOptions.DATA_AND_FAILURE) .build() ); ActionRequestValidationException validationException = rolloverRequest.validate(); diff --git a/server/src/test/java/org/elasticsearch/action/support/IndicesOptionsTests.java b/server/src/test/java/org/elasticsearch/action/support/IndicesOptionsTests.java index 437899a1fae1c..1784ab863bf1c 100644 --- a/server/src/test/java/org/elasticsearch/action/support/IndicesOptionsTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/IndicesOptionsTests.java @@ -10,7 +10,6 @@ package org.elasticsearch.action.support; import org.elasticsearch.action.support.IndicesOptions.ConcreteTargetOptions; -import org.elasticsearch.action.support.IndicesOptions.FailureStoreOptions; import org.elasticsearch.action.support.IndicesOptions.GatekeeperOptions; import org.elasticsearch.action.support.IndicesOptions.WildcardOptions; import org.elasticsearch.common.bytes.BytesReference; @@ -31,9 +30,11 @@ import java.util.Arrays; import java.util.Collection; import java.util.Collections; +import java.util.EnumSet; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Set; import static org.hamcrest.CoreMatchers.equalTo; @@ -57,8 +58,12 @@ public void testSerialization() throws Exception { .allowAliasToMultipleIndices(randomBoolean()) .allowClosedIndices(randomBoolean()) ) - .failureStoreOptions( - FailureStoreOptions.builder().includeRegularIndices(randomBoolean()).includeFailureIndices(randomBoolean()) + .selectorOptions( + IndicesOptions.SelectorOptions.builder() + .setDefaultSelectors( + EnumSet.copyOf(randomNonEmptySubsetOf(Set.of(IndexComponentSelector.DATA, IndexComponentSelector.FAILURES))) + ) + .build() ) .build(); @@ -345,9 +350,11 @@ public void testToXContent() throws IOException { randomBoolean() ); GatekeeperOptions gatekeeperOptions = new GatekeeperOptions(randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean()); - FailureStoreOptions failureStoreOptions = new IndicesOptions.FailureStoreOptions(randomBoolean(), randomBoolean()); + IndicesOptions.SelectorOptions selectorOptions = new IndicesOptions.SelectorOptions( + EnumSet.copyOf(randomNonEmptySubsetOf(Set.of(IndexComponentSelector.DATA, IndexComponentSelector.FAILURES))) + ); - IndicesOptions indicesOptions = new IndicesOptions(concreteTargetOptions, wildcardOptions, gatekeeperOptions, failureStoreOptions); + IndicesOptions indicesOptions = new IndicesOptions(concreteTargetOptions, wildcardOptions, gatekeeperOptions, selectorOptions); XContentType type = randomFrom(XContentType.values()); BytesReference xContentBytes = toXContentBytes(indicesOptions, type); @@ -362,7 +369,15 @@ public void testToXContent() throws IOException { assertThat(map.get("ignore_unavailable"), equalTo(concreteTargetOptions.allowUnavailableTargets())); assertThat(map.get("allow_no_indices"), equalTo(wildcardOptions.allowEmptyExpressions())); assertThat(map.get("ignore_throttled"), equalTo(gatekeeperOptions.ignoreThrottled())); - assertThat(map.get("failure_store"), equalTo(failureStoreOptions.displayValue())); + String displayValue; + if (IndicesOptions.SelectorOptions.DATA_AND_FAILURE.equals(selectorOptions)) { + displayValue = "include"; + } else if (IndicesOptions.SelectorOptions.ONLY_DATA.equals(selectorOptions)) { + displayValue = "exclude"; + } else { + displayValue = "only"; + } + assertThat(map.get("failure_store"), equalTo(displayValue)); } public void testFromXContent() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java index bddbe259e0ef3..da19bd68e288a 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java @@ -2319,7 +2319,8 @@ public void testIgnoreThrottled() { new IndicesOptions( IndicesOptions.ConcreteTargetOptions.ERROR_WHEN_UNAVAILABLE_TARGETS, IndicesOptions.WildcardOptions.DEFAULT, - IndicesOptions.GatekeeperOptions.builder().ignoreThrottled(true).build() + IndicesOptions.GatekeeperOptions.builder().ignoreThrottled(true).build(), + IndicesOptions.SelectorOptions.DEFAULT ), "ind*", "test-index" @@ -2757,7 +2758,7 @@ public void testDataStreamsWithFailureStore() { // Test include failure store with an exact data stream name { IndicesOptions indicesOptions = IndicesOptions.builder(IndicesOptions.STRICT_EXPAND_OPEN) - .failureStoreOptions(IndicesOptions.FailureStoreOptions.builder().includeRegularIndices(true).includeFailureIndices(true)) + .selectorOptions(IndicesOptions.SelectorOptions.DATA_AND_FAILURE) .build(); Index[] result = indexNameExpressionResolver.concreteIndices(state, indicesOptions, true, "my-data-stream"); assertThat(result.length, equalTo(4)); @@ -2771,7 +2772,7 @@ public void testDataStreamsWithFailureStore() { // We expect that they will be skipped { IndicesOptions indicesOptions = IndicesOptions.builder(IndicesOptions.STRICT_EXPAND_OPEN) - .failureStoreOptions(IndicesOptions.FailureStoreOptions.builder().includeRegularIndices(true).includeFailureIndices(true)) + .selectorOptions(IndicesOptions.SelectorOptions.DATA_AND_FAILURE) .gatekeeperOptions(IndicesOptions.GatekeeperOptions.builder().allowFailureIndices(false).build()) .concreteTargetOptions(IndicesOptions.ConcreteTargetOptions.ALLOW_UNAVAILABLE_TARGETS) .build(); @@ -2785,7 +2786,7 @@ public void testDataStreamsWithFailureStore() { // We expect an error { IndicesOptions indicesOptions = IndicesOptions.builder(IndicesOptions.STRICT_EXPAND_OPEN) - .failureStoreOptions(IndicesOptions.FailureStoreOptions.builder().includeRegularIndices(true).includeFailureIndices(true)) + .selectorOptions(IndicesOptions.SelectorOptions.DATA_AND_FAILURE) .gatekeeperOptions(IndicesOptions.GatekeeperOptions.builder().allowFailureIndices(false).build()) .build(); FailureIndexNotSupportedException failureIndexNotSupportedException = expectThrows( @@ -2801,7 +2802,7 @@ public void testDataStreamsWithFailureStore() { // Test only failure store with an exact data stream name { IndicesOptions indicesOptions = IndicesOptions.builder(IndicesOptions.STRICT_EXPAND_OPEN) - .failureStoreOptions(IndicesOptions.FailureStoreOptions.builder().includeRegularIndices(false).includeFailureIndices(true)) + .selectorOptions(IndicesOptions.SelectorOptions.ONLY_FAILURES) .build(); Index[] result = indexNameExpressionResolver.concreteIndices(state, indicesOptions, true, "my-data-stream"); assertThat(result.length, equalTo(2)); @@ -2828,7 +2829,7 @@ public void testDataStreamsWithFailureStore() { // Test include failure store without any expressions { IndicesOptions indicesOptions = IndicesOptions.builder(IndicesOptions.STRICT_EXPAND_OPEN) - .failureStoreOptions(IndicesOptions.FailureStoreOptions.builder().includeRegularIndices(true).includeFailureIndices(true)) + .selectorOptions(IndicesOptions.SelectorOptions.DATA_AND_FAILURE) .build(); Index[] result = indexNameExpressionResolver.concreteIndices(state, indicesOptions, true); assertThat(result.length, equalTo(5)); @@ -2848,7 +2849,7 @@ public void testDataStreamsWithFailureStore() { // Test only failure store without any expressions { IndicesOptions indicesOptions = IndicesOptions.builder(IndicesOptions.STRICT_EXPAND_OPEN) - .failureStoreOptions(IndicesOptions.FailureStoreOptions.builder().includeRegularIndices(false).includeFailureIndices(true)) + .selectorOptions(IndicesOptions.SelectorOptions.ONLY_FAILURES) .build(); Index[] result = indexNameExpressionResolver.concreteIndices(state, indicesOptions, true); assertThat(result.length, equalTo(2)); @@ -2881,7 +2882,7 @@ public void testDataStreamsWithFailureStore() { // Test include failure store with wildcard expression { IndicesOptions indicesOptions = IndicesOptions.builder(IndicesOptions.STRICT_EXPAND_OPEN) - .failureStoreOptions(IndicesOptions.FailureStoreOptions.builder().includeRegularIndices(true).includeFailureIndices(true)) + .selectorOptions(IndicesOptions.SelectorOptions.DATA_AND_FAILURE) .build(); Index[] result = indexNameExpressionResolver.concreteIndices(state, indicesOptions, true, "my-*"); assertThat(result.length, equalTo(5)); @@ -2901,7 +2902,7 @@ public void testDataStreamsWithFailureStore() { // Test only failure store with wildcard expression { IndicesOptions indicesOptions = IndicesOptions.builder(IndicesOptions.STRICT_EXPAND_OPEN) - .failureStoreOptions(IndicesOptions.FailureStoreOptions.builder().includeRegularIndices(false).includeFailureIndices(true)) + .selectorOptions(IndicesOptions.SelectorOptions.ONLY_FAILURES) .build(); Index[] result = indexNameExpressionResolver.concreteIndices(state, indicesOptions, true, "my-*"); assertThat(result.length, equalTo(2)); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/RolloverStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/RolloverStep.java index 3e6c00eeadba4..d648dd1c7edf8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/RolloverStep.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/RolloverStep.java @@ -127,7 +127,7 @@ public void performAction( if (targetFailureStore) { rolloverRequest.setIndicesOptions( IndicesOptions.builder(rolloverRequest.indicesOptions()) - .failureStoreOptions(opts -> opts.includeFailureIndices(true).includeRegularIndices(false)) + .selectorOptions(IndicesOptions.SelectorOptions.ONLY_FAILURES) .build() ); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/WaitForRolloverReadyStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/WaitForRolloverReadyStep.java index 7b751994222b1..67f65481ef63e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/WaitForRolloverReadyStep.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/WaitForRolloverReadyStep.java @@ -248,7 +248,7 @@ RolloverRequest createRolloverRequest( if (targetFailureStore) { rolloverRequest.setIndicesOptions( IndicesOptions.builder(rolloverRequest.indicesOptions()) - .failureStoreOptions(opts -> opts.includeFailureIndices(true).includeRegularIndices(false)) + .selectorOptions(IndicesOptions.SelectorOptions.ONLY_FAILURES) .build() ); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/WaitForRolloverReadyStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/WaitForRolloverReadyStepTests.java index 15958e9396d81..afb17644303bb 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/WaitForRolloverReadyStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/WaitForRolloverReadyStepTests.java @@ -308,8 +308,8 @@ public void onFailure(Exception e) { verify(indicesClient, Mockito.only()).rolloverIndex(requestCaptor.capture(), Mockito.any()); RolloverRequest request = requestCaptor.getValue(); - assertThat(request.indicesOptions().failureStoreOptions().includeFailureIndices(), equalTo(failureStoreIndex)); - assertThat(request.indicesOptions().failureStoreOptions().includeRegularIndices(), not(equalTo(failureStoreIndex))); + assertThat(request.indicesOptions().includeFailureIndices(), equalTo(failureStoreIndex)); + assertThat(request.indicesOptions().includeRegularIndices(), not(equalTo(failureStoreIndex))); } public void testSkipRolloverIfDataStreamIsAlreadyRolledOver() { From 64f2c42a76ac640f5fcf795d90a5b6c006701e94 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 16 Oct 2024 20:07:28 +1100 Subject: [PATCH 138/449] Mute org.elasticsearch.packaging.test.EnrollmentProcessTests test20DockerAutoFormCluster #114885 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 0b24bac278fa6..6817011d399b2 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -388,6 +388,9 @@ tests: - class: org.elasticsearch.xpack.enrich.EnrichIT method: testDeleteIsCaseSensitive issue: https://github.com/elastic/elasticsearch/issues/114840 +- class: org.elasticsearch.packaging.test.EnrollmentProcessTests + method: test20DockerAutoFormCluster + issue: https://github.com/elastic/elasticsearch/issues/114885 # Examples: # From ae452becc7f65a677fbd01a2485176b9dbb2ddd4 Mon Sep 17 00:00:00 2001 From: Pooya Salehi Date: Wed, 16 Oct 2024 11:08:17 +0200 Subject: [PATCH 139/449] Document _cat/indices behavior when encountering source only indices (#114884) Closes https://github.com/elastic/elasticsearch/issues/114546 --- docs/reference/cat/indices.asciidoc | 8 ++++++-- .../snapshot-restore/repository-source-only.asciidoc | 3 +++ 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/docs/reference/cat/indices.asciidoc b/docs/reference/cat/indices.asciidoc index cf1cc9f825cb2..b8dda01c2eae0 100644 --- a/docs/reference/cat/indices.asciidoc +++ b/docs/reference/cat/indices.asciidoc @@ -6,8 +6,8 @@ [IMPORTANT] ==== -cat APIs are only intended for human consumption using the command line or {kib} -console. They are _not_ intended for use by applications. For application +cat APIs are only intended for human consumption using the command line or {kib} +console. They are _not_ intended for use by applications. For application consumption, use the <>. ==== @@ -50,6 +50,10 @@ indexing and search. As a result, all document counts include hidden To get an accurate count of {es} documents, use the <> or <> APIs. +Note that information such as document count, deleted document count and store size are not shown for +indices restored from <> since these indices +do not contain the relevant data structures to retrieve this information from. + [[cat-indices-api-path-params]] ==== {api-path-parms-title} diff --git a/docs/reference/snapshot-restore/repository-source-only.asciidoc b/docs/reference/snapshot-restore/repository-source-only.asciidoc index 04e53c42aff9d..3c11d6ca6e59c 100644 --- a/docs/reference/snapshot-restore/repository-source-only.asciidoc +++ b/docs/reference/snapshot-restore/repository-source-only.asciidoc @@ -27,6 +27,9 @@ As a result, indices adopting synthetic source cannot be restored. When you rest * The mapping of the restored index is empty, but the original mapping is available from the types top level `meta` element. + * Information such as document count, deleted document count and store size are not available for such indices + since these indices do not contain the relevant data structures to retrieve this information from. Therefore, + this information is not shown for such indices in APIs such as the <>. ================================================== Before registering a source-only repository, use {kib} or the From 15c1051fb61fdaf2684694f5f82417031fe973f7 Mon Sep 17 00:00:00 2001 From: David Turner Date: Wed, 16 Oct 2024 10:35:22 +0100 Subject: [PATCH 140/449] Inline `MockTransportService#getLocalDiscoNode()` (#114883) This method just delegates to `getLocalNode()`, we may as well call the more widely-used method with the shorter name directly. --- .../netty4/SimpleNetty4TransportTests.java | 4 +- ...rossClusterSearchUnavailableClusterIT.java | 4 +- .../search/TransportSearchActionTests.java | 2 +- .../tasks/BanFailureLoggingTests.java | 6 +- .../RemoteClusterAwareClientTests.java | 14 ++-- .../transport/RemoteClusterClientTests.java | 6 +- .../RemoteClusterConnectionTests.java | 22 ++--- .../transport/RemoteClusterServiceTests.java | 84 +++++++++---------- .../transport/TransportActionProxyTests.java | 8 +- .../AbstractIndexRecoveryIntegTestCase.java | 8 +- .../test/transport/MockTransportService.java | 5 -- .../AbstractSimpleTransportTestCase.java | 16 ++-- .../exchange/ExchangeServiceTests.java | 2 +- .../enrich/EnrichPolicyResolverTests.java | 2 +- ...ty4ServerTransportAuthenticationTests.java | 2 +- 15 files changed, 90 insertions(+), 95 deletions(-) diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/SimpleNetty4TransportTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/SimpleNetty4TransportTests.java index b55f4eccafca8..cad839bed9555 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/SimpleNetty4TransportTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/SimpleNetty4TransportTests.java @@ -103,7 +103,7 @@ public void testDefaultKeepAliveSettings() throws IOException { MockTransportService serviceD = buildService("TS_D", VersionInformation.CURRENT, TransportVersion.current(), Settings.EMPTY) ) { - try (Transport.Connection connection = openConnection(serviceC, serviceD.getLocalDiscoNode(), TestProfiles.LIGHT_PROFILE)) { + try (Transport.Connection connection = openConnection(serviceC, serviceD.getLocalNode(), TestProfiles.LIGHT_PROFILE)) { assertThat(connection, instanceOf(StubbableTransport.WrappedConnection.class)); Transport.Connection conn = ((StubbableTransport.WrappedConnection) connection).getConnection(); assertThat(conn, instanceOf(TcpTransport.NodeChannels.class)); @@ -147,7 +147,7 @@ public void testTransportProfile() { MockTransportService serviceD = buildService("TS_D", VersionInformation.CURRENT, TransportVersion.current(), Settings.EMPTY) ) { - try (Transport.Connection connection = openConnection(serviceC, serviceD.getLocalDiscoNode(), connectionProfile)) { + try (Transport.Connection connection = openConnection(serviceC, serviceD.getLocalNode(), connectionProfile)) { assertThat(connection, instanceOf(StubbableTransport.WrappedConnection.class)); Transport.Connection conn = ((StubbableTransport.WrappedConnection) connection).getConnection(); assertThat(conn, instanceOf(TcpTransport.NodeChannels.class)); diff --git a/qa/ccs-unavailable-clusters/src/javaRestTest/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java b/qa/ccs-unavailable-clusters/src/javaRestTest/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java index 7b42292848395..780f3994ce627 100644 --- a/qa/ccs-unavailable-clusters/src/javaRestTest/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java +++ b/qa/ccs-unavailable-clusters/src/javaRestTest/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java @@ -154,7 +154,7 @@ public void testSearchSkipUnavailable() throws IOException { threadPool ) ) { - DiscoveryNode remoteNode = remoteTransport.getLocalDiscoNode(); + DiscoveryNode remoteNode = remoteTransport.getLocalNode(); updateRemoteClusterSettings(Collections.singletonMap("seeds", remoteNode.getAddress().toString())); @@ -307,7 +307,7 @@ public void testSkipUnavailableDependsOnSeeds() throws IOException { threadPool ) ) { - DiscoveryNode remoteNode = remoteTransport.getLocalDiscoNode(); + DiscoveryNode remoteNode = remoteTransport.getLocalNode(); { // check that skip_unavailable alone cannot be set diff --git a/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java b/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java index 06434a0c90518..70682cfd41d82 100644 --- a/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java @@ -486,7 +486,7 @@ private MockTransportService[] startTransport( threadPool ); mockTransportServices[i] = remoteSeedTransport; - DiscoveryNode remoteSeedNode = remoteSeedTransport.getLocalDiscoNode(); + DiscoveryNode remoteSeedNode = remoteSeedTransport.getLocalNode(); knownNodes.add(remoteSeedNode); nodes[i] = remoteSeedNode; settingsBuilder.put("cluster.remote.remote" + i + ".seeds", remoteSeedNode.getAddress().toString()); diff --git a/server/src/test/java/org/elasticsearch/tasks/BanFailureLoggingTests.java b/server/src/test/java/org/elasticsearch/tasks/BanFailureLoggingTests.java index 78d76476d06fc..e5cdecd25ef34 100644 --- a/server/src/test/java/org/elasticsearch/tasks/BanFailureLoggingTests.java +++ b/server/src/test/java/org/elasticsearch/tasks/BanFailureLoggingTests.java @@ -157,13 +157,13 @@ public Task createTask(long id, String type, String action, TaskId parentTaskId, parentTransportService.addSendBehavior(sendRequestBehavior); - AbstractSimpleTransportTestCase.connectToNode(parentTransportService, childTransportService.getLocalDiscoNode()); + AbstractSimpleTransportTestCase.connectToNode(parentTransportService, childTransportService.getLocalNode()); final CancellableTask parentTask = (CancellableTask) parentTransportService.getTaskManager() .register("transport", "internal:testAction", new ParentRequest()); parentTransportService.sendChildRequest( - childTransportService.getLocalDiscoNode(), + childTransportService.getLocalNode(), "internal:testAction[c]", new EmptyRequest(), parentTask, @@ -172,7 +172,7 @@ public Task createTask(long id, String type, String action, TaskId parentTaskId, ); try (MockLog mockLog = MockLog.capture(TaskCancellationService.class)) { - for (MockLog.LoggingExpectation expectation : expectations.apply(childTransportService.getLocalDiscoNode())) { + for (MockLog.LoggingExpectation expectation : expectations.apply(childTransportService.getLocalNode())) { mockLog.addExpectation(expectation); } diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterAwareClientTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterAwareClientTests.java index 43dce7d406ba2..d7cf38828b7ba 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteClusterAwareClientTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterAwareClientTests.java @@ -89,7 +89,7 @@ public void testRemoteTaskCancellationOnFailedResponse() throws Exception { ) { remoteTransport.getTaskManager().setTaskCancellationService(new TaskCancellationService(remoteTransport)); Settings.Builder builder = Settings.builder(); - builder.putList("cluster.remote.cluster1.seeds", remoteTransport.getLocalDiscoNode().getAddress().toString()); + builder.putList("cluster.remote.cluster1.seeds", remoteTransport.getLocalNode().getAddress().toString()); try ( MockTransportService localService = MockTransportService.createNewService( builder.build(), @@ -163,11 +163,11 @@ public void testSearchShards() throws Exception { MockTransportService seedTransport = startTransport("seed_node", knownNodes); MockTransportService discoverableTransport = startTransport("discoverable_node", knownNodes) ) { - knownNodes.add(seedTransport.getLocalDiscoNode()); - knownNodes.add(discoverableTransport.getLocalDiscoNode()); + knownNodes.add(seedTransport.getLocalNode()); + knownNodes.add(discoverableTransport.getLocalNode()); Collections.shuffle(knownNodes, random()); Settings.Builder builder = Settings.builder(); - builder.putList("cluster.remote.cluster1.seeds", seedTransport.getLocalDiscoNode().getAddress().toString()); + builder.putList("cluster.remote.cluster1.seeds", seedTransport.getLocalNode().getAddress().toString()); try ( MockTransportService service = MockTransportService.createNewService( builder.build(), @@ -216,11 +216,11 @@ public void testSearchShardsThreadContextHeader() { MockTransportService seedTransport = startTransport("seed_node", knownNodes); MockTransportService discoverableTransport = startTransport("discoverable_node", knownNodes) ) { - knownNodes.add(seedTransport.getLocalDiscoNode()); - knownNodes.add(discoverableTransport.getLocalDiscoNode()); + knownNodes.add(seedTransport.getLocalNode()); + knownNodes.add(discoverableTransport.getLocalNode()); Collections.shuffle(knownNodes, random()); Settings.Builder builder = Settings.builder(); - builder.putList("cluster.remote.cluster1.seeds", seedTransport.getLocalDiscoNode().getAddress().toString()); + builder.putList("cluster.remote.cluster1.seeds", seedTransport.getLocalNode().getAddress().toString()); try ( MockTransportService service = MockTransportService.createNewService( builder.build(), diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterClientTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterClientTests.java index ff0742c89bba9..0efaef7015649 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteClusterClientTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterClientTests.java @@ -73,7 +73,7 @@ public void testConnectAndExecuteRequest() throws Exception { remoteSettings ) ) { - DiscoveryNode remoteNode = remoteTransport.getLocalDiscoNode(); + DiscoveryNode remoteNode = remoteTransport.getLocalNode(); Settings localSettings = Settings.builder() .put(onlyRole(DiscoveryNodeRole.REMOTE_CLUSTER_CLIENT_ROLE)) @@ -152,7 +152,7 @@ public void testEnsureWeReconnect() throws Exception { remoteSettings ) ) { - DiscoveryNode remoteNode = remoteTransport.getLocalDiscoNode(); + DiscoveryNode remoteNode = remoteTransport.getLocalNode(); Settings localSettings = Settings.builder() .put(onlyRole(DiscoveryNodeRole.REMOTE_CLUSTER_CLIENT_ROLE)) .put("cluster.remote.test.seeds", remoteNode.getAddress().getAddress() + ":" + remoteNode.getAddress().getPort()) @@ -251,7 +251,7 @@ public void testQuicklySkipUnavailableClusters() throws Exception { remoteSettings ) ) { - DiscoveryNode remoteNode = remoteTransport.getLocalDiscoNode(); + DiscoveryNode remoteNode = remoteTransport.getLocalNode(); Settings localSettings = Settings.builder() .put(onlyRole(DiscoveryNodeRole.REMOTE_CLUSTER_CLIENT_ROLE)) diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java index 7a259cf3100f0..21346bb93ef8e 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java @@ -311,11 +311,11 @@ public void testCloseWhileConcurrentlyConnecting() throws IOException, Interrupt TransportVersion.current() ) ) { - DiscoveryNode seedNode = seedTransport.getLocalDiscoNode(); - DiscoveryNode seedNode1 = seedTransport1.getLocalDiscoNode(); - knownNodes.add(seedTransport.getLocalDiscoNode()); - knownNodes.add(discoverableTransport.getLocalDiscoNode()); - knownNodes.add(seedTransport1.getLocalDiscoNode()); + DiscoveryNode seedNode = seedTransport.getLocalNode(); + DiscoveryNode seedNode1 = seedTransport1.getLocalNode(); + knownNodes.add(seedTransport.getLocalNode()); + knownNodes.add(discoverableTransport.getLocalNode()); + knownNodes.add(seedTransport1.getLocalNode()); Collections.shuffle(knownNodes, random()); List seedNodes = addresses(seedNode1, seedNode); Collections.shuffle(seedNodes, random()); @@ -447,9 +447,9 @@ private void doTestGetConnectionInfo(boolean hasClusterCredentials) throws Excep seedTransportSettings ) ) { - DiscoveryNode node1 = transport1.getLocalDiscoNode(); - DiscoveryNode node2 = transport3.getLocalDiscoNode(); - DiscoveryNode node3 = transport2.getLocalDiscoNode(); + DiscoveryNode node1 = transport1.getLocalNode(); + DiscoveryNode node2 = transport3.getLocalNode(); + DiscoveryNode node3 = transport2.getLocalNode(); if (hasClusterCredentials) { node1 = node1.withTransportAddress(transport1.boundRemoteAccessAddress().publishAddress()); node2 = node2.withTransportAddress(transport3.boundRemoteAccessAddress().publishAddress()); @@ -645,7 +645,7 @@ private void doTestCollectNodes(boolean hasClusterCredentials) throws Exception seedTransportSettings ) ) { - DiscoveryNode seedNode = seedTransport.getLocalDiscoNode(); + DiscoveryNode seedNode = seedTransport.getLocalNode(); if (hasClusterCredentials) { seedNode = seedNode.withTransportAddress(seedTransport.boundRemoteAccessAddress().publishAddress()); } @@ -725,8 +725,8 @@ public void testNoChannelsExceptREG() throws Exception { TransportVersion.current() ) ) { - DiscoveryNode seedNode = seedTransport.getLocalDiscoNode(); - knownNodes.add(seedTransport.getLocalDiscoNode()); + DiscoveryNode seedNode = seedTransport.getLocalNode(); + knownNodes.add(seedTransport.getLocalNode()); try ( MockTransportService service = MockTransportService.createNewService( Settings.EMPTY, diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java index 11243ba088f8f..3633128c45bfa 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java @@ -141,10 +141,10 @@ public void testGroupClusterIndices() throws IOException { TransportVersion.current() ) ) { - DiscoveryNode cluster1Seed = cluster1Transport.getLocalDiscoNode(); - DiscoveryNode cluster2Seed = cluster2Transport.getLocalDiscoNode(); - knownNodes.add(cluster1Transport.getLocalDiscoNode()); - knownNodes.add(cluster2Transport.getLocalDiscoNode()); + DiscoveryNode cluster1Seed = cluster1Transport.getLocalNode(); + DiscoveryNode cluster2Seed = cluster2Transport.getLocalNode(); + knownNodes.add(cluster1Transport.getLocalNode()); + knownNodes.add(cluster2Transport.getLocalNode()); Collections.shuffle(knownNodes, random()); try ( @@ -343,10 +343,10 @@ public void testGroupIndices() throws IOException { TransportVersion.current() ) ) { - DiscoveryNode cluster1Seed = cluster1Transport.getLocalDiscoNode(); - DiscoveryNode cluster2Seed = cluster2Transport.getLocalDiscoNode(); - knownNodes.add(cluster1Transport.getLocalDiscoNode()); - knownNodes.add(cluster2Transport.getLocalDiscoNode()); + DiscoveryNode cluster1Seed = cluster1Transport.getLocalNode(); + DiscoveryNode cluster2Seed = cluster2Transport.getLocalNode(); + knownNodes.add(cluster1Transport.getLocalNode()); + knownNodes.add(cluster2Transport.getLocalNode()); Collections.shuffle(knownNodes, random()); try ( @@ -453,10 +453,10 @@ public void testIncrementallyAddClusters() throws IOException { TransportVersion.current() ) ) { - DiscoveryNode cluster1Seed = cluster1Transport.getLocalDiscoNode(); - DiscoveryNode cluster2Seed = cluster2Transport.getLocalDiscoNode(); - knownNodes.add(cluster1Transport.getLocalDiscoNode()); - knownNodes.add(cluster2Transport.getLocalDiscoNode()); + DiscoveryNode cluster1Seed = cluster1Transport.getLocalNode(); + DiscoveryNode cluster2Seed = cluster2Transport.getLocalNode(); + knownNodes.add(cluster1Transport.getLocalNode()); + knownNodes.add(cluster2Transport.getLocalNode()); Collections.shuffle(knownNodes, random()); try ( @@ -526,8 +526,8 @@ public void testDefaultPingSchedule() throws IOException { TransportVersion.current() ) ) { - DiscoveryNode seedNode = seedTransport.getLocalDiscoNode(); - knownNodes.add(seedTransport.getLocalDiscoNode()); + DiscoveryNode seedNode = seedTransport.getLocalNode(); + knownNodes.add(seedTransport.getLocalNode()); TimeValue pingSchedule; Settings.Builder settingsBuilder = Settings.builder(); settingsBuilder.putList("cluster.remote.cluster_1.seeds", seedNode.getAddress().toString()); @@ -582,10 +582,10 @@ public void testCustomPingSchedule() throws IOException { TransportVersion.current() ) ) { - DiscoveryNode cluster1Seed = cluster1Transport.getLocalDiscoNode(); - DiscoveryNode cluster2Seed = cluster2Transport.getLocalDiscoNode(); - knownNodes.add(cluster1Transport.getLocalDiscoNode()); - knownNodes.add(cluster2Transport.getLocalDiscoNode()); + DiscoveryNode cluster1Seed = cluster1Transport.getLocalNode(); + DiscoveryNode cluster2Seed = cluster2Transport.getLocalNode(); + knownNodes.add(cluster1Transport.getLocalNode()); + knownNodes.add(cluster2Transport.getLocalNode()); Collections.shuffle(knownNodes, random()); Settings.Builder settingsBuilder = Settings.builder(); if (randomBoolean()) { @@ -635,8 +635,8 @@ public void testChangeSettings() throws Exception { TransportVersion.current() ) ) { - DiscoveryNode cluster1Seed = cluster1Transport.getLocalDiscoNode(); - knownNodes.add(cluster1Transport.getLocalDiscoNode()); + DiscoveryNode cluster1Seed = cluster1Transport.getLocalNode(); + knownNodes.add(cluster1Transport.getLocalNode()); Collections.shuffle(knownNodes, random()); try ( @@ -716,10 +716,10 @@ public void testRemoteNodeAttribute() throws IOException, InterruptedException { gateway ) ) { - final DiscoveryNode c1N1Node = c1N1.getLocalDiscoNode(); - final DiscoveryNode c1N2Node = c1N2.getLocalDiscoNode(); - final DiscoveryNode c2N1Node = c2N1.getLocalDiscoNode(); - final DiscoveryNode c2N2Node = c2N2.getLocalDiscoNode(); + final DiscoveryNode c1N1Node = c1N1.getLocalNode(); + final DiscoveryNode c1N2Node = c1N2.getLocalNode(); + final DiscoveryNode c2N1Node = c2N1.getLocalNode(); + final DiscoveryNode c2N2Node = c2N2.getLocalNode(); knownNodes.add(c1N1Node); knownNodes.add(c1N2Node); knownNodes.add(c2N1Node); @@ -809,10 +809,10 @@ public void testRemoteNodeRoles() throws IOException, InterruptedException { data ) ) { - final DiscoveryNode c1N1Node = c1N1.getLocalDiscoNode(); - final DiscoveryNode c1N2Node = c1N2.getLocalDiscoNode(); - final DiscoveryNode c2N1Node = c2N1.getLocalDiscoNode(); - final DiscoveryNode c2N2Node = c2N2.getLocalDiscoNode(); + final DiscoveryNode c1N1Node = c1N1.getLocalNode(); + final DiscoveryNode c1N2Node = c1N2.getLocalNode(); + final DiscoveryNode c2N1Node = c2N1.getLocalNode(); + final DiscoveryNode c2N2Node = c2N2.getLocalNode(); knownNodes.add(c1N1Node); knownNodes.add(c1N2Node); knownNodes.add(c2N1Node); @@ -906,10 +906,10 @@ public void testCollectNodes() throws InterruptedException, IOException { settings ) ) { - final DiscoveryNode c1N1Node = c1N1.getLocalDiscoNode(); - final DiscoveryNode c1N2Node = c1N2.getLocalDiscoNode(); - final DiscoveryNode c2N1Node = c2N1.getLocalDiscoNode(); - final DiscoveryNode c2N2Node = c2N2.getLocalDiscoNode(); + final DiscoveryNode c1N1Node = c1N1.getLocalNode(); + final DiscoveryNode c1N2Node = c1N2.getLocalNode(); + final DiscoveryNode c2N1Node = c2N1.getLocalNode(); + final DiscoveryNode c2N2Node = c2N2.getLocalNode(); knownNodes_c1.add(c1N1Node); knownNodes_c1.add(c1N2Node); knownNodes_c2.add(c2N1Node); @@ -1170,8 +1170,8 @@ public void testReconnectWhenStrategySettingsUpdated() throws Exception { ) ) { - final DiscoveryNode node0 = cluster_node_0.getLocalDiscoNode(); - final DiscoveryNode node1 = cluster_node_1.getLocalDiscoNode(); + final DiscoveryNode node0 = cluster_node_0.getLocalNode(); + final DiscoveryNode node1 = cluster_node_1.getLocalNode(); knownNodes.add(node0); knownNodes.add(node1); Collections.shuffle(knownNodes, random()); @@ -1267,10 +1267,10 @@ public void testSkipUnavailable() { TransportVersion.current() ) ) { - DiscoveryNode seedNode = seedTransport.getLocalDiscoNode(); + DiscoveryNode seedNode = seedTransport.getLocalNode(); knownNodes.add(seedNode); Settings.Builder builder = Settings.builder(); - builder.putList("cluster.remote.cluster1.seeds", seedTransport.getLocalDiscoNode().getAddress().toString()); + builder.putList("cluster.remote.cluster1.seeds", seedTransport.getLocalNode().getAddress().toString()); try ( MockTransportService service = MockTransportService.createNewService( builder.build(), @@ -1353,8 +1353,8 @@ public void testUseDifferentTransportProfileForCredentialsProtectedRemoteCluster ); MockTransportService c2 = startTransport("cluster_2", knownNodes, VersionInformation.CURRENT, TransportVersion.current()); ) { - final DiscoveryNode c1Node = c1.getLocalDiscoNode().withTransportAddress(c1.boundRemoteAccessAddress().publishAddress()); - final DiscoveryNode c2Node = c2.getLocalDiscoNode(); + final DiscoveryNode c1Node = c1.getLocalNode().withTransportAddress(c1.boundRemoteAccessAddress().publishAddress()); + final DiscoveryNode c2Node = c2.getLocalNode(); final MockSecureSettings secureSettings = new MockSecureSettings(); secureSettings.setString("cluster.remote.cluster_1.credentials", randomAlphaOfLength(10)); @@ -1441,7 +1441,7 @@ public void testUpdateRemoteClusterCredentialsRebuildsConnectionWithCorrectProfi .build() ) ) { - final DiscoveryNode discoNode = c.getLocalDiscoNode().withTransportAddress(c.boundRemoteAccessAddress().publishAddress()); + final DiscoveryNode discoNode = c.getLocalNode().withTransportAddress(c.boundRemoteAccessAddress().publishAddress()); try ( MockTransportService transportService = MockTransportService.createNewService( Settings.EMPTY, @@ -1518,8 +1518,8 @@ public void testUpdateRemoteClusterCredentialsRebuildsMultipleConnectionsDespite .build() ) ) { - final DiscoveryNode c1DiscoNode = c1.getLocalDiscoNode().withTransportAddress(c1.boundRemoteAccessAddress().publishAddress()); - final DiscoveryNode c2DiscoNode = c2.getLocalDiscoNode().withTransportAddress(c2.boundRemoteAccessAddress().publishAddress()); + final DiscoveryNode c1DiscoNode = c1.getLocalNode().withTransportAddress(c1.boundRemoteAccessAddress().publishAddress()); + final DiscoveryNode c2DiscoNode = c2.getLocalNode().withTransportAddress(c2.boundRemoteAccessAddress().publishAddress()); try ( MockTransportService transportService = MockTransportService.createNewService( Settings.EMPTY, @@ -1636,7 +1636,7 @@ public void testLogsConnectionResult() throws IOException { assertThatLogger( () -> clusterSettings.applySettings( - Settings.builder().putList("cluster.remote.remote_1.seeds", remote.getLocalDiscoNode().getAddress().toString()).build() + Settings.builder().putList("cluster.remote.remote_1.seeds", remote.getLocalNode().getAddress().toString()).build() ), RemoteClusterService.class, new MockLog.SeenEventExpectation( diff --git a/server/src/test/java/org/elasticsearch/transport/TransportActionProxyTests.java b/server/src/test/java/org/elasticsearch/transport/TransportActionProxyTests.java index 398bb5f2a9106..46585ac382583 100644 --- a/server/src/test/java/org/elasticsearch/transport/TransportActionProxyTests.java +++ b/server/src/test/java/org/elasticsearch/transport/TransportActionProxyTests.java @@ -85,15 +85,15 @@ public void setUp() throws Exception { threadPool = new TestThreadPool(getClass().getName()); serviceA = buildService(version0, transportVersion0); // this one supports dynamic tracer updates serviceA.taskManager.setTaskCancellationService(new TaskCancellationService(serviceA)); - nodeA = serviceA.getLocalDiscoNode(); + nodeA = serviceA.getLocalNode(); serviceB = buildService(version1, transportVersion1); // this one doesn't support dynamic tracer updates serviceB.taskManager.setTaskCancellationService(new TaskCancellationService(serviceB)); - nodeB = serviceB.getLocalDiscoNode(); + nodeB = serviceB.getLocalNode(); serviceC = buildService(version1, transportVersion1); // this one doesn't support dynamic tracer updates serviceC.taskManager.setTaskCancellationService(new TaskCancellationService(serviceC)); - nodeC = serviceC.getLocalDiscoNode(); + nodeC = serviceC.getLocalNode(); serviceD = buildService(version1, transportVersion1); - nodeD = serviceD.getLocalDiscoNode(); + nodeD = serviceD.getLocalNode(); } @Override diff --git a/test/framework/src/main/java/org/elasticsearch/indices/recovery/AbstractIndexRecoveryIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/indices/recovery/AbstractIndexRecoveryIntegTestCase.java index 568f386a81fd1..ec85feb200984 100644 --- a/test/framework/src/main/java/org/elasticsearch/indices/recovery/AbstractIndexRecoveryIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/indices/recovery/AbstractIndexRecoveryIntegTestCase.java @@ -156,11 +156,11 @@ protected void checkTransientErrorsDuringRecoveryAreRetried(String recoveryActio Runnable connectionBreaker = () -> { // Always break connection from source to remote to ensure that actions are retried logger.info("--> closing connections from source node to target node"); - blueTransportService.disconnectFromNode(redTransportService.getLocalDiscoNode()); + blueTransportService.disconnectFromNode(redTransportService.getLocalNode()); if (randomBoolean()) { // Sometimes break connection from remote to source to ensure that recovery is re-established logger.info("--> closing connections from target node to source node"); - redTransportService.disconnectFromNode(blueTransportService.getLocalDiscoNode()); + redTransportService.disconnectFromNode(blueTransportService.getLocalNode()); } }; TransientReceiveRejected handlingBehavior = new TransientReceiveRejected(recoveryActionToBlock, recoveryStarted, connectionBreaker); @@ -258,13 +258,13 @@ public void checkDisconnectsWhileRecovering(String recoveryActionToBlock) throws blueMockTransportService.addRequestHandlingBehavior(recoveryActionToBlock, (handler, request, channel, task) -> { logger.info("--> preventing {} response by closing response channel", recoveryActionToBlock); requestFailed.countDown(); - redMockTransportService.disconnectFromNode(blueMockTransportService.getLocalDiscoNode()); + redMockTransportService.disconnectFromNode(blueMockTransportService.getLocalNode()); handler.messageReceived(request, channel, task); }); redMockTransportService.addRequestHandlingBehavior(recoveryActionToBlock, (handler, request, channel, task) -> { logger.info("--> preventing {} response by closing response channel", recoveryActionToBlock); requestFailed.countDown(); - blueMockTransportService.disconnectFromNode(redMockTransportService.getLocalDiscoNode()); + blueMockTransportService.disconnectFromNode(redMockTransportService.getLocalNode()); handler.messageReceived(request, channel, task); }); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java index c4e1c6c7a0681..fd376fcd07688 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java @@ -830,9 +830,4 @@ protected void doClose() throws IOException { assertTrue(ThreadPool.terminate(testExecutor, 10, TimeUnit.SECONDS)); } } - - public DiscoveryNode getLocalDiscoNode() { - return this.getLocalNode(); - } - } diff --git a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java index 34f67ac78a41c..4595fbf286077 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java @@ -668,7 +668,7 @@ public void testVoidMessageCompressed() throws Exception { ) .build(); ConnectionProfile connectionProfile = ConnectionProfile.buildDefaultConnectionProfile(settingsWithCompress); - connectToNode(serviceC, serviceA.getLocalDiscoNode(), connectionProfile); + connectToNode(serviceC, serviceA.getLocalNode(), connectionProfile); Future res = submitRequest( serviceC, @@ -725,7 +725,7 @@ public void testHelloWorldCompressed() throws Exception { ) .build(); ConnectionProfile connectionProfile = ConnectionProfile.buildDefaultConnectionProfile(settingsWithCompress); - connectToNode(serviceC, serviceA.getLocalDiscoNode(), connectionProfile); + connectToNode(serviceC, serviceA.getLocalNode(), connectionProfile); Future res = submitRequest( serviceC, @@ -795,8 +795,8 @@ public void testIndexingDataCompression() throws Exception { ) .build(); ConnectionProfile connectionProfile = ConnectionProfile.buildDefaultConnectionProfile(settingsWithCompress); - connectToNode(serviceC, serviceA.getLocalDiscoNode(), connectionProfile); - connectToNode(serviceA, serviceC.getLocalDiscoNode(), connectionProfile); + connectToNode(serviceC, serviceA.getLocalNode(), connectionProfile); + connectToNode(serviceA, serviceC.getLocalNode(), connectionProfile); TransportResponseHandler responseHandler = new TransportResponseHandler<>() { @Override @@ -821,14 +821,14 @@ public void handleException(TransportException exp) { Future compressed = submitRequest( serviceC, - serviceA.getLocalDiscoNode(), + serviceA.getLocalNode(), "internal:sayHello", new StringMessageRequest(text, -1, true), responseHandler ); Future uncompressed = submitRequest( serviceA, - serviceC.getLocalDiscoNode(), + serviceC.getLocalNode(), "internal:sayHello", new StringMessageRequest(text, -1, false), responseHandler @@ -1049,7 +1049,7 @@ public void onAfter() { ignoringRequestHandler ); serviceB = newService; - nodeB = newService.getLocalDiscoNode(); + nodeB = newService.getLocalNode(); connectToNode(serviceB, nodeA); connectToNode(serviceA, nodeB); } else if (serviceA.nodeConnected(nodeB)) { @@ -3419,7 +3419,7 @@ public void sendRequest( ) { final CountDownLatch latch = new CountDownLatch(1); serviceC.connectToNode( - serviceA.getLocalDiscoNode(), + serviceA.getLocalNode(), ConnectionProfile.buildDefaultConnectionProfile(Settings.EMPTY), new ActionListener<>() { @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java index ab785e739d080..9e07f9c8f5faf 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java @@ -441,7 +441,7 @@ public void sendResponse(TransportResponse transportResponse) { PlainActionFuture sourceCompletionFuture = new PlainActionFuture<>(); sourceHandler.addCompletionListener(sourceCompletionFuture); ExchangeSinkHandler sinkHandler = exchange1.createSinkHandler(exchangeId, randomIntBetween(1, 128)); - Transport.Connection connection = node0.getConnection(node1.getLocalDiscoNode()); + Transport.Connection connection = node0.getConnection(node1.getLocalNode()); sourceHandler.addRemoteSink(exchange0.newRemoteSink(task, exchangeId, node0, connection), randomIntBetween(1, 5)); Exception err = expectThrows( Exception.class, diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichPolicyResolverTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichPolicyResolverTests.java index ebad8e6e13b8c..05a7486a18068 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichPolicyResolverTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichPolicyResolverTests.java @@ -448,7 +448,7 @@ EnrichResolution resolvePolicies(Collection clusters, Collection policies) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4ServerTransportAuthenticationTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4ServerTransportAuthenticationTests.java index 3d3f96b98d5e5..d294fb50046d6 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4ServerTransportAuthenticationTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4ServerTransportAuthenticationTests.java @@ -158,7 +158,7 @@ public TransportRequestHandler interceptHandler( } } ); - DiscoveryNode remoteNode = remoteTransportService.getLocalDiscoNode(); + DiscoveryNode remoteNode = remoteTransportService.getLocalNode(); remoteTransportService.registerRequestHandler( RemoteClusterNodesAction.TYPE.name(), EsExecutors.DIRECT_EXECUTOR_SERVICE, From 4fd621833b90f0414982bd309428eac4dd2fa34d Mon Sep 17 00:00:00 2001 From: Ioana Tagirta Date: Wed, 16 Oct 2024 11:54:48 +0200 Subject: [PATCH 141/449] Better DataType string checks (#114863) * Use DataType.isString * Add DataType.stringTypes() * Fix shouldHideSignature check --- .../elasticsearch/xpack/esql/core/type/DataType.java | 6 ++++++ .../xpack/esql/core/type/DataTypeConverter.java | 4 +--- .../expression/function/aggregate/CountDistinct.java | 2 +- .../xpack/esql/expression/function/aggregate/Max.java | 2 +- .../xpack/esql/expression/function/aggregate/Min.java | 2 +- .../function/scalar/conditional/Greatest.java | 6 +----- .../expression/function/scalar/conditional/Least.java | 6 +----- .../scalar/convert/AbstractConvertFunction.java | 5 ++--- .../operator/comparison/InsensitiveEqualsMapper.java | 2 +- .../expression/function/AbstractFunctionTestCase.java | 10 +--------- .../esql/expression/function/TestCaseSupplier.java | 5 ++--- .../esql/expression/function/fulltext/MatchTests.java | 9 ++------- .../expression/function/fulltext/QueryStringTests.java | 3 +-- .../function/scalar/convert/ToVersionTests.java | 2 +- .../function/scalar/string/AbstractTrimTests.java | 2 +- .../expression/function/scalar/string/ConcatTests.java | 4 +--- .../function/scalar/string/EndsWithTests.java | 5 ++--- .../expression/function/scalar/string/LocateTests.java | 6 ++---- .../expression/function/scalar/string/RLikeTests.java | 2 +- .../function/scalar/string/StartsWithTests.java | 5 ++--- .../xpack/esql/type/EsqlDataTypeConverterTests.java | 3 +-- 21 files changed, 32 insertions(+), 59 deletions(-) diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java index b23703c6d8b66..cb1a7b2eb6fe0 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java @@ -266,6 +266,8 @@ public enum DataType { .sorted(Comparator.comparing(DataType::typeName)) .toList(); + private static final Collection STRING_TYPES = DataType.types().stream().filter(DataType::isString).toList(); + private static final Map NAME_TO_TYPE = TYPES.stream().collect(toUnmodifiableMap(DataType::typeName, t -> t)); private static final Map ES_TO_TYPE; @@ -292,6 +294,10 @@ public static Collection types() { return TYPES; } + public static Collection stringTypes() { + return STRING_TYPES; + } + /** * Resolve a type from a name. This name is sometimes user supplied, * like in the case of {@code ::} and is sometimes the name diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataTypeConverter.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataTypeConverter.java index 78b395503e700..7c91a506697c1 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataTypeConverter.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataTypeConverter.java @@ -30,11 +30,9 @@ import static org.elasticsearch.xpack.esql.core.type.DataType.FLOAT; import static org.elasticsearch.xpack.esql.core.type.DataType.INTEGER; import static org.elasticsearch.xpack.esql.core.type.DataType.IP; -import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; import static org.elasticsearch.xpack.esql.core.type.DataType.LONG; import static org.elasticsearch.xpack.esql.core.type.DataType.NULL; import static org.elasticsearch.xpack.esql.core.type.DataType.SHORT; -import static org.elasticsearch.xpack.esql.core.type.DataType.TEXT; import static org.elasticsearch.xpack.esql.core.type.DataType.UNSIGNED_LONG; import static org.elasticsearch.xpack.esql.core.type.DataType.VERSION; import static org.elasticsearch.xpack.esql.core.type.DataType.isDateTime; @@ -62,7 +60,7 @@ public static Converter converterFor(DataType from, DataType to) { return DefaultConverter.TO_NULL; } // proper converters - if (to == KEYWORD || to == TEXT) { + if (isString(to)) { return conversionToString(from); } if (to == LONG) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java index 2550e5bdcf515..756000dfbb187 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java @@ -209,7 +209,7 @@ public AggregatorFunctionSupplier supplier(List inputChannels) { if (type == DataType.DOUBLE) { return new CountDistinctDoubleAggregatorFunctionSupplier(inputChannels, precision); } - if (type == DataType.KEYWORD || type == DataType.IP || type == DataType.VERSION || type == DataType.TEXT) { + if (DataType.isString(type) || type == DataType.IP || type == DataType.VERSION) { return new CountDistinctBytesRefAggregatorFunctionSupplier(inputChannels, precision); } throw EsqlIllegalArgumentException.illegalDataType(type); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java index 47d74c71d9cc5..6119b2ce58465 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java @@ -128,7 +128,7 @@ public final AggregatorFunctionSupplier supplier(List inputChannels) { if (type == DataType.IP) { return new MaxIpAggregatorFunctionSupplier(inputChannels); } - if (type == DataType.VERSION || type == DataType.KEYWORD || type == DataType.TEXT) { + if (type == DataType.VERSION || DataType.isString(type)) { return new MaxBytesRefAggregatorFunctionSupplier(inputChannels); } throw EsqlIllegalArgumentException.illegalDataType(type); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java index ce69decca8e81..a1492f79da393 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java @@ -128,7 +128,7 @@ public final AggregatorFunctionSupplier supplier(List inputChannels) { if (type == DataType.IP) { return new MinIpAggregatorFunctionSupplier(inputChannels); } - if (type == DataType.VERSION || type == DataType.KEYWORD || type == DataType.TEXT) { + if (type == DataType.VERSION || DataType.isString(type)) { return new MinBytesRefAggregatorFunctionSupplier(inputChannels); } throw EsqlIllegalArgumentException.illegalDataType(type); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java index 9d815d15accdc..d47ebeab4ca6c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java @@ -155,11 +155,7 @@ public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { if (dataType == DataType.LONG || dataType == DataType.DATETIME) { return new GreatestLongEvaluator.Factory(source(), factories); } - if (dataType == DataType.KEYWORD - || dataType == DataType.TEXT - || dataType == DataType.IP - || dataType == DataType.VERSION - || dataType == DataType.UNSUPPORTED) { + if (DataType.isString(dataType) || dataType == DataType.IP || dataType == DataType.VERSION || dataType == DataType.UNSUPPORTED) { return new GreatestBytesRefEvaluator.Factory(source(), factories); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java index 435a14d0fef33..81c1419dcf788 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java @@ -154,11 +154,7 @@ public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { if (dataType == DataType.LONG || dataType == DataType.DATETIME) { return new LeastLongEvaluator.Factory(source(), factories); } - if (dataType == DataType.KEYWORD - || dataType == DataType.TEXT - || dataType == DataType.IP - || dataType == DataType.VERSION - || dataType == DataType.UNSUPPORTED) { + if (DataType.isString(dataType) || dataType == DataType.IP || dataType == DataType.VERSION || dataType == DataType.UNSUPPORTED) { return new LeastBytesRefEvaluator.Factory(source(), factories); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/AbstractConvertFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/AbstractConvertFunction.java index 5401fcf188d4a..06815d738e82c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/AbstractConvertFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/AbstractConvertFunction.java @@ -48,7 +48,6 @@ public abstract class AbstractConvertFunction extends UnaryScalarFunction { // the numeric types convert functions need to handle; the other numeric types are converted upstream to one of these private static final List NUMERIC_TYPES = List.of(DataType.INTEGER, DataType.LONG, DataType.UNSIGNED_LONG, DataType.DOUBLE); - public static final List STRING_TYPES = DataType.types().stream().filter(DataType::isString).toList(); protected AbstractConvertFunction(Source source, Expression field) { super(source, field); @@ -90,9 +89,9 @@ private static String supportedTypesNames(Set types) { NUMERIC_TYPES.forEach(supportTypes::remove); } - if (types.containsAll(STRING_TYPES)) { + if (types.containsAll(DataType.stringTypes())) { supportedTypesNames.add("string"); - STRING_TYPES.forEach(supportTypes::remove); + DataType.stringTypes().forEach(supportTypes::remove); } supportTypes.forEach(t -> supportedTypesNames.add(t.nameUpper().toLowerCase(Locale.ROOT))); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsMapper.java index d11f5c9b68532..f5704239993f9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsMapper.java @@ -34,7 +34,7 @@ public final ExpressionEvaluator.Factory map(InsensitiveEquals bc, Layout layout var leftEval = toEvaluator(bc.left(), layout); var rightEval = toEvaluator(bc.right(), layout); - if (leftType == DataType.KEYWORD || leftType == DataType.TEXT) { + if (DataType.isString(leftType)) { if (bc.right().foldable() && DataType.isString(rightType)) { BytesRef rightVal = BytesRefs.toBytesRef(bc.right().fold()); Automaton automaton = InsensitiveEquals.automaton(rightVal); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index ca9950a4bfe77..84a41ef040c8e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -1283,13 +1283,6 @@ public void allMemoryReleased() { } } - /** - * All string types (keyword, text, match_only_text, etc). - */ - protected static DataType[] strings() { - return DataType.types().stream().filter(DataType::isString).toArray(DataType[]::new); - } - /** * Validate that we know the types for all the test cases already created * @param suppliers - list of suppliers before adding in the illegal type combinations @@ -1316,10 +1309,9 @@ private static boolean isAggregation() { */ private static boolean shouldHideSignature(List argTypes, DataType returnType) { for (DataType dt : DataType.UNDER_CONSTRUCTION.keySet()) { - if (returnType == dt) { + if (returnType == dt || argTypes.contains(dt)) { return true; } - return argTypes.contains(dt); } return false; } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java index b3942a71edadb..2ba175657b6c2 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java @@ -21,7 +21,6 @@ import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.NumericUtils; -import org.elasticsearch.xpack.esql.expression.function.scalar.convert.AbstractConvertFunction; import org.elasticsearch.xpack.versionfield.Version; import org.hamcrest.Matcher; @@ -91,7 +90,7 @@ public static List stringCases( List lhsSuppliers = new ArrayList<>(); List rhsSuppliers = new ArrayList<>(); List suppliers = new ArrayList<>(); - for (DataType type : AbstractConvertFunction.STRING_TYPES) { + for (DataType type : DataType.stringTypes()) { lhsSuppliers.addAll(stringCases(type)); rhsSuppliers.addAll(stringCases(type)); casesCrossProduct( @@ -760,7 +759,7 @@ public static void forUnaryStrings( Function expectedValue, Function> expectedWarnings ) { - for (DataType type : AbstractConvertFunction.STRING_TYPES) { + for (DataType type : DataType.stringTypes()) { unary( suppliers, expectedEvaluatorToString, diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchTests.java index d37bc89635c1d..967b4d854c325 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchTests.java @@ -20,7 +20,6 @@ import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.hamcrest.Matcher; -import java.util.Arrays; import java.util.LinkedList; import java.util.List; import java.util.Set; @@ -40,8 +39,8 @@ public static Iterable parameters() { Set supported = Set.of(DataType.KEYWORD, DataType.TEXT); List> supportedPerPosition = List.of(supported, supported); List suppliers = new LinkedList<>(); - for (DataType fieldType : validStringDataTypes()) { - for (DataType queryType : validStringDataTypes()) { + for (DataType fieldType : DataType.stringTypes()) { + for (DataType queryType : DataType.stringTypes()) { suppliers.add( new TestCaseSupplier( "<" + fieldType + "-ES field, " + queryType + ">", @@ -67,10 +66,6 @@ private static String matchTypeErrorSupplier(boolean includeOrdinal, List validStringDataTypes() { - return Arrays.stream(DataType.values()).filter(DataType::isString).toList(); - } - private static TestCaseSupplier.TestCase testCase( DataType fieldType, String field, diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/QueryStringTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/QueryStringTests.java index 2dfdb05ec8ecc..b4b4ebcaacde6 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/QueryStringTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/QueryStringTests.java @@ -19,7 +19,6 @@ import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.hamcrest.Matcher; -import java.util.Arrays; import java.util.LinkedList; import java.util.List; import java.util.function.Supplier; @@ -36,7 +35,7 @@ public QueryStringTests(@Name("TestCase") Supplier te @ParametersFactory public static Iterable parameters() { List suppliers = new LinkedList<>(); - for (DataType strType : Arrays.stream(DataType.values()).filter(DataType::isString).toList()) { + for (DataType strType : DataType.stringTypes()) { suppliers.add( new TestCaseSupplier( "<" + strType + ">", diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersionTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersionTests.java index 46a8086f9479c..57f11331818dc 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersionTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersionTests.java @@ -49,7 +49,7 @@ public static Iterable parameters() { ); // But strings that are shaped like versions do parse to valid versions - for (DataType inputType : AbstractConvertFunction.STRING_TYPES) { + for (DataType inputType : DataType.stringTypes()) { TestCaseSupplier.unary( suppliers, read, diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/AbstractTrimTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/AbstractTrimTests.java index f77a892d8682e..d069f7ffe2298 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/AbstractTrimTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/AbstractTrimTests.java @@ -21,7 +21,7 @@ public abstract class AbstractTrimTests extends AbstractScalarFunctionTestCase { static Iterable parameters(String name, boolean trimLeading, boolean trimTrailing) { List suppliers = new ArrayList<>(); - for (DataType type : strings()) { + for (DataType type : DataType.stringTypes()) { suppliers.add(new TestCaseSupplier("no whitespace/" + type, List.of(type), () -> { String text = randomAlphaOfLength(8); return testCase(name, type, text, text); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatTests.java index bbe92ae4a6618..2ad953c9296b7 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatTests.java @@ -58,9 +58,7 @@ public static Iterable parameters() { if (rhs == DataType.NULL || DataType.isRepresentable(rhs) == false) { continue; } - boolean lhsIsString = lhs == DataType.KEYWORD || lhs == DataType.TEXT; - boolean rhsIsString = rhs == DataType.KEYWORD || rhs == DataType.TEXT; - if (lhsIsString && rhsIsString) { + if (DataType.isString(lhs) && DataType.isString(rhs)) { continue; } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithTests.java index 6d086e2626cb6..1b2e9c41cb25c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithTests.java @@ -18,7 +18,6 @@ import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.hamcrest.Matcher; -import java.util.Arrays; import java.util.LinkedList; import java.util.List; import java.util.function.Supplier; @@ -33,8 +32,8 @@ public EndsWithTests(@Name("TestCase") Supplier testC @ParametersFactory public static Iterable parameters() { List suppliers = new LinkedList<>(); - for (DataType strType : Arrays.stream(DataType.values()).filter(DataType::isString).toList()) { - for (DataType suffixType : Arrays.stream(DataType.values()).filter(DataType::isString).toList()) { + for (DataType strType : DataType.stringTypes()) { + for (DataType suffixType : DataType.stringTypes()) { suppliers.add( new TestCaseSupplier( "<" + strType + ">, empty <" + suffixType + ">", diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateTests.java index 207125bed2a19..a10f97c45aa04 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateTests.java @@ -35,13 +35,11 @@ public LocateTests(@Name("TestCase") Supplier testCas this.testCase = testCaseSupplier.get(); } - private static final DataType[] STRING_TYPES = new DataType[] { DataType.KEYWORD, DataType.TEXT }; - @ParametersFactory public static Iterable parameters() { List suppliers = new ArrayList<>(); - for (DataType strType : STRING_TYPES) { - for (DataType substrType : STRING_TYPES) { + for (DataType strType : DataType.stringTypes()) { + for (DataType substrType : DataType.stringTypes()) { suppliers.add( supplier( "", diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLikeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLikeTests.java index 5a34d850cffe3..dab2fca212ff4 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLikeTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLikeTests.java @@ -69,7 +69,7 @@ static Iterable parameters(Function escapeString, Supp casesForString(cases, "6 bytes, 2 code points", () -> "❗️", false, escapeString, optionalPattern); casesForString(cases, "100 random code points", () -> randomUnicodeOfCodepointLength(100), true, escapeString, optionalPattern); for (DataType type : DataType.types()) { - if (type == DataType.KEYWORD || type == DataType.TEXT || type == DataType.NULL) { + if (DataType.isString(type) || type == DataType.NULL) { continue; } if (DataType.isRepresentable(type) == false) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithTests.java index 8bc8cf3184a75..60ed3b05ad642 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithTests.java @@ -18,7 +18,6 @@ import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.util.ArrayList; -import java.util.Arrays; import java.util.List; import java.util.function.Supplier; @@ -32,8 +31,8 @@ public StartsWithTests(@Name("TestCase") Supplier tes @ParametersFactory public static Iterable parameters() { List suppliers = new ArrayList<>(); - for (DataType strType : Arrays.stream(DataType.values()).filter(DataType::isString).toList()) { - for (DataType prefixType : Arrays.stream(DataType.values()).filter(DataType::isString).toList()) { + for (DataType strType : DataType.stringTypes()) { + for (DataType prefixType : DataType.stringTypes()) { suppliers.add(new TestCaseSupplier(List.of(strType, prefixType), () -> { String str = randomAlphaOfLength(5); String prefix = randomAlphaOfLength(5); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverterTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverterTests.java index 8ad083683f696..babb9fc8c0bd1 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverterTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverterTests.java @@ -62,8 +62,7 @@ public void testCommonTypeNull() { } public void testCommonTypeStrings() { - List STRINGS = Arrays.stream(DataType.values()).filter(DataType::isString).toList(); - for (DataType dataType1 : STRINGS) { + for (DataType dataType1 : DataType.stringTypes()) { for (DataType dataType2 : DataType.values()) { if (dataType2 == NULL) { assertEqualsCommonType(dataType1, NULL, dataType1); From f13e495d765efee410ff9ff250a4fec52a2ed4ff Mon Sep 17 00:00:00 2001 From: Jan Kuipers <148754765+jan-elastic@users.noreply.github.com> Date: Wed, 16 Oct 2024 12:06:12 +0200 Subject: [PATCH 142/449] Fix NPE in AdaptiveAllocationsScalerService (#114880) * Fix NPE in AdaptiveAllocationsScalerService * Update docs/changelog/114880.yaml * Delete docs/changelog/114880.yaml --- .../adaptiveallocations/AdaptiveAllocationsScalerService.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerService.java index 1c3a73a409dd1..9624d619ff20a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerService.java @@ -433,7 +433,8 @@ private void processDeploymentStats(GetDeploymentStatsAction.Response statsRespo public boolean maybeStartAllocation(TrainedModelAssignment assignment) { if (assignment.getAdaptiveAllocationsSettings() != null && assignment.getAdaptiveAllocationsSettings().getEnabled() == Boolean.TRUE - && assignment.getAdaptiveAllocationsSettings().getMinNumberOfAllocations() == 0) { + && (assignment.getAdaptiveAllocationsSettings().getMinNumberOfAllocations() == null + || assignment.getAdaptiveAllocationsSettings().getMinNumberOfAllocations() == 0)) { // Prevent against a flurry of scale up requests. if (deploymentIdsWithInFlightScaleFromZeroRequests.contains(assignment.getDeploymentId()) == false) { From 0fd58394edfe4300f78535ddb5e2e847f243c85e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Iv=C3=A1n=20Cea=20Fontenla?= Date: Wed, 16 Oct 2024 13:19:56 +0200 Subject: [PATCH 143/449] ESQL: Fix MvPercentileTests precision issues (#114844) Fixes https://github.com/elastic/elasticsearch/issues/114588 Fixes https://github.com/elastic/elasticsearch/issues/114587 Fixes https://github.com/elastic/elasticsearch/issues/114586 Fixes https://github.com/elastic/elasticsearch/issues/114585 Fixes https://github.com/elastic/elasticsearch/issues/113008 Fixes https://github.com/elastic/elasticsearch/issues/113007 Fixes https://github.com/elastic/elasticsearch/issues/113006 Fixes https://github.com/elastic/elasticsearch/issues/113005 Fixed the long precision issue by allowing a +/-1 range. Also made a minor refactor to simplify using different matchers for different types. --- .../scalar/multivalue/MvPercentileTests.java | 47 ++++++++++++------- 1 file changed, 30 insertions(+), 17 deletions(-) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileTests.java index 29cc959e6a943..0a419d44e3448 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.MultivalueTestCaseSupplier; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; import java.math.BigDecimal; import java.util.ArrayList; @@ -28,6 +29,7 @@ import static org.elasticsearch.xpack.esql.core.type.DataType.DOUBLE; import static org.elasticsearch.xpack.esql.core.type.DataType.INTEGER; import static org.elasticsearch.xpack.esql.core.type.DataType.LONG; +import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.closeTo; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.nullValue; @@ -375,27 +377,25 @@ private static TestCaseSupplier makeSupplier( var values = (List) fieldTypedData.data(); var percentile = ((Number) percentileTypedData.data()).doubleValue(); - var expected = calculatePercentile(values, percentile); + var expectedMatcher = makePercentileMatcher(values, percentile); return new TestCaseSupplier.TestCase( List.of(fieldTypedData, percentileTypedData), evaluatorString(fieldSupplier.type(), percentileSupplier.type()), fieldSupplier.type(), - expected instanceof Double expectedDouble - ? closeTo(expectedDouble, Math.abs(expectedDouble * 0.0000001)) - : equalTo(expected) + expectedMatcher ); } ); } - private static Number calculatePercentile(List rawValues, double percentile) { + private static Matcher makePercentileMatcher(List rawValues, double percentile) { if (rawValues.isEmpty() || percentile < 0 || percentile > 100) { - return null; + return nullValue(); } if (rawValues.size() == 1) { - return rawValues.get(0); + return equalTo(rawValues.get(0)); } int valueCount = rawValues.size(); @@ -407,49 +407,62 @@ private static Number calculatePercentile(List rawValues, double percent if (rawValues.get(0) instanceof Integer) { var values = rawValues.stream().mapToInt(Number::intValue).sorted().toArray(); + int expected; if (percentile == 0) { - return values[0]; + expected = values[0]; } else if (percentile == 100) { - return values[valueCount - 1]; + expected = values[valueCount - 1]; } else { assert lowerIndex >= 0 && upperIndex < valueCount; var difference = (long) values[upperIndex] - values[lowerIndex]; - return values[lowerIndex] + (int) (fraction * difference); + expected = values[lowerIndex] + (int) (fraction * difference); } + + return equalTo(expected); } if (rawValues.get(0) instanceof Long) { var values = rawValues.stream().mapToLong(Number::longValue).sorted().toArray(); + long expected; if (percentile == 0) { - return values[0]; + expected = values[0]; } else if (percentile == 100) { - return values[valueCount - 1]; + expected = values[valueCount - 1]; } else { assert lowerIndex >= 0 && upperIndex < valueCount; - return calculatePercentile(fraction, new BigDecimal(values[lowerIndex]), new BigDecimal(values[upperIndex])).longValue(); + expected = calculatePercentile(fraction, BigDecimal.valueOf(values[lowerIndex]), BigDecimal.valueOf(values[upperIndex])) + .longValue(); } + + // Double*bigLong may lose precision, we allow a small range + return anyOf(equalTo(Math.min(expected, expected - 1)), equalTo(expected), equalTo(Math.max(expected, expected + 1))); } if (rawValues.get(0) instanceof Double) { var values = rawValues.stream().mapToDouble(Number::doubleValue).sorted().toArray(); + double expected; if (percentile == 0) { - return values[0]; + expected = values[0]; } else if (percentile == 100) { - return values[valueCount - 1]; + expected = values[valueCount - 1]; } else { assert lowerIndex >= 0 && upperIndex < valueCount; - return calculatePercentile(fraction, new BigDecimal(values[lowerIndex]), new BigDecimal(values[upperIndex])).doubleValue(); + expected = calculatePercentile(fraction, new BigDecimal(values[lowerIndex]), new BigDecimal(values[upperIndex])) + .doubleValue(); } + + return closeTo(expected, Math.abs(expected * 0.0000001)); } throw new IllegalArgumentException("Unsupported type: " + rawValues.get(0).getClass()); } private static BigDecimal calculatePercentile(double fraction, BigDecimal lowerValue, BigDecimal upperValue) { - return lowerValue.add(new BigDecimal(fraction).multiply(upperValue.subtract(lowerValue))); + var difference = upperValue.subtract(lowerValue); + return lowerValue.add(new BigDecimal(fraction).multiply(difference)); } private static TestCaseSupplier.TypedData percentileWithType(Number value, DataType type) { From 5faf0cdd90852b36f8978b2e20a53c4f50caa94a Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Wed, 16 Oct 2024 13:30:18 +0100 Subject: [PATCH 144/449] Remove the min_compatible_shard_node option and associated classes (#114713) Any similar functionality in the future should use capabilities instead --- .../SearchWithMinCompatibleSearchNodeIT.java | 144 ----- .../resources/rest-api-spec/api/search.json | 4 - .../elasticsearch/ElasticsearchException.java | 6 - .../org/elasticsearch/TransportVersions.java | 1 + .../search/AbstractSearchAsyncAction.java | 32 +- .../search/CanMatchPreFilterSearchPhase.java | 32 +- .../action/search/SearchRequest.java | 41 +- .../search/VersionMismatchException.java | 27 - .../rest/action/search/RestSearchAction.java | 11 +- .../ExceptionSerializationTests.java | 3 +- .../SearchQueryThenFetchAsyncActionTests.java | 495 ------------------ .../action/search/SearchRequestTests.java | 28 - .../eql/plugin/TransportEqlSearchAction.java | 22 +- .../fleet/rest/RestFleetSearchAction.java | 12 +- .../xpack/ql/plugin/TransportActionUtils.java | 81 --- .../xpack/sql/execution/search/Querier.java | 4 +- .../sql/plugin/TransportSqlQueryAction.java | 23 +- 17 files changed, 29 insertions(+), 937 deletions(-) delete mode 100644 qa/mixed-cluster/src/test/java/org/elasticsearch/backwards/SearchWithMinCompatibleSearchNodeIT.java delete mode 100644 server/src/main/java/org/elasticsearch/action/search/VersionMismatchException.java delete mode 100644 x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/plugin/TransportActionUtils.java diff --git a/qa/mixed-cluster/src/test/java/org/elasticsearch/backwards/SearchWithMinCompatibleSearchNodeIT.java b/qa/mixed-cluster/src/test/java/org/elasticsearch/backwards/SearchWithMinCompatibleSearchNodeIT.java deleted file mode 100644 index a391ee5a3bd7b..0000000000000 --- a/qa/mixed-cluster/src/test/java/org/elasticsearch/backwards/SearchWithMinCompatibleSearchNodeIT.java +++ /dev/null @@ -1,144 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ -package org.elasticsearch.backwards; - -import org.apache.http.HttpHost; -import org.elasticsearch.client.Request; -import org.elasticsearch.client.Response; -import org.elasticsearch.client.ResponseException; -import org.elasticsearch.client.RestClient; -import org.elasticsearch.core.Strings; -import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.test.rest.ESRestTestCase; -import org.elasticsearch.test.rest.ObjectPath; -import org.junit.Before; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.equalTo; - -public class SearchWithMinCompatibleSearchNodeIT extends ESRestTestCase { - - private static final String BWC_NODES_VERSION = System.getProperty("tests.bwc_nodes_version"); - private static final String NEW_NODES_VERSION = System.getProperty("tests.new_nodes_version"); - - private static String index = "test_min_version"; - private static int numShards; - private static int numReplicas = 1; - private static int numDocs; - private static MixedClusterTestNodes nodes; - private static List allNodes; - - @Before - public void prepareTestData() throws IOException { - nodes = MixedClusterTestNodes.buildNodes(client(), BWC_NODES_VERSION); - numShards = nodes.size(); - numDocs = randomIntBetween(numShards, 16); - allNodes = new ArrayList<>(); - allNodes.addAll(nodes.getBWCNodes()); - allNodes.addAll(nodes.getNewNodes()); - - if (client().performRequest(new Request("HEAD", "/" + index)).getStatusLine().getStatusCode() == 404) { - createIndex(index, indexSettings(numShards, numReplicas).build()); - for (int i = 0; i < numDocs; i++) { - Request request = new Request("PUT", index + "/_doc/" + i); - request.setJsonEntity("{\"test\": \"test_" + randomAlphaOfLength(2) + "\"}"); - assertOK(client().performRequest(request)); - } - ensureGreen(index); - } - } - - public void testMinVersionAsNewVersion() throws Exception { - try ( - RestClient client = buildClient( - restClientSettings(), - allNodes.stream().map(MixedClusterTestNode::publishAddress).toArray(HttpHost[]::new) - ) - ) { - Request newVersionRequest = new Request( - "POST", - index + "/_search?min_compatible_shard_node=" + NEW_NODES_VERSION + "&ccs_minimize_roundtrips=false" - ); - assertBusy(() -> { - ResponseException responseException = expectThrows(ResponseException.class, () -> client.performRequest(newVersionRequest)); - assertThat( - responseException.getResponse().getStatusLine().getStatusCode(), - equalTo(RestStatus.INTERNAL_SERVER_ERROR.getStatus()) - ); - assertThat(responseException.getMessage(), containsString(""" - {"error":{"root_cause":[],"type":"search_phase_execution_exception\"""")); - assertThat(responseException.getMessage(), containsString(Strings.format(""" - caused_by":{"type":"version_mismatch_exception",\ - "reason":"One of the shards is incompatible with the required minimum version [%s]\"""", NEW_NODES_VERSION))); - }); - } - } - - public void testMinVersionAsOldVersion() throws Exception { - try ( - RestClient client = buildClient( - restClientSettings(), - allNodes.stream().map(MixedClusterTestNode::publishAddress).toArray(HttpHost[]::new) - ) - ) { - Request oldVersionRequest = new Request( - "POST", - index + "/_search?min_compatible_shard_node=" + BWC_NODES_VERSION + "&ccs_minimize_roundtrips=false" - ); - oldVersionRequest.setJsonEntity(""" - {"query":{"match_all":{}},"_source":false}"""); - assertBusy(() -> { - Response response = client.performRequest(oldVersionRequest); - ObjectPath responseObject = ObjectPath.createFromResponse(response); - Map shardsResult = responseObject.evaluate("_shards"); - assertThat(shardsResult.get("total"), equalTo(numShards)); - assertThat(shardsResult.get("successful"), equalTo(numShards)); - assertThat(shardsResult.get("failed"), equalTo(0)); - Map hitsResult = responseObject.evaluate("hits.total"); - assertThat(hitsResult.get("value"), equalTo(numDocs)); - assertThat(hitsResult.get("relation"), equalTo("eq")); - }); - } - } - - public void testCcsMinimizeRoundtripsIsFalse() throws Exception { - try ( - RestClient client = buildClient( - restClientSettings(), - allNodes.stream().map(MixedClusterTestNode::publishAddress).toArray(HttpHost[]::new) - ) - ) { - String version = randomBoolean() ? NEW_NODES_VERSION : BWC_NODES_VERSION; - - Request request = new Request( - "POST", - index + "/_search?min_compatible_shard_node=" + version + "&ccs_minimize_roundtrips=true" - ); - assertBusy(() -> { - ResponseException responseException = expectThrows(ResponseException.class, () -> client.performRequest(request)); - assertThat(responseException.getResponse().getStatusLine().getStatusCode(), equalTo(RestStatus.BAD_REQUEST.getStatus())); - assertThat(responseException.getMessage(), containsString(""" - {"error":{"root_cause":[{"type":"action_request_validation_exception"\ - """)); - assertThat( - responseException.getMessage(), - containsString( - "\"reason\":\"Validation Failed: 1: " - + "[ccs_minimize_roundtrips] cannot be [true] when setting a minimum compatible shard version;\"" - ) - ); - }); - } - } -} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/search.json b/rest-api-spec/src/main/resources/rest-api-spec/api/search.json index b5dc4d62a2f0f..25b4efd9c4c37 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/search.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/search.json @@ -237,10 +237,6 @@ "description":"Indicates whether hits.total should be rendered as an integer or an object in the rest search response", "default":false }, - "min_compatible_shard_node":{ - "type":"string", - "description":"The minimum compatible version that all shards involved in search should have for this request to be successful" - }, "include_named_queries_score":{ "type": "boolean", "description":"Indicates whether hit.matched_queries should be rendered as a map that includes the name of the matched query associated with its score (true) or as an array containing the name of the matched queries (false)", diff --git a/server/src/main/java/org/elasticsearch/ElasticsearchException.java b/server/src/main/java/org/elasticsearch/ElasticsearchException.java index 5d04e31069b1c..4119e12d45f6c 100644 --- a/server/src/main/java/org/elasticsearch/ElasticsearchException.java +++ b/server/src/main/java/org/elasticsearch/ElasticsearchException.java @@ -1819,12 +1819,6 @@ private enum ElasticsearchExceptionHandle { 160, TransportVersions.V_7_10_0 ), - VERSION_MISMATCH_EXCEPTION( - org.elasticsearch.action.search.VersionMismatchException.class, - org.elasticsearch.action.search.VersionMismatchException::new, - 161, - TransportVersions.V_7_12_0 - ), AUTHENTICATION_PROCESSING_ERROR( org.elasticsearch.ElasticsearchAuthenticationProcessingError.class, org.elasticsearch.ElasticsearchAuthenticationProcessingError::new, diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index f89c5a65693f2..d1d423dcc5405 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -246,6 +246,7 @@ static TransportVersion def(int id) { public static final TransportVersion ESQL_PER_AGGREGATE_FILTER = def(8_770_00_0); public static final TransportVersion ML_INFERENCE_ATTACH_TO_EXISTSING_DEPLOYMENT = def(8_771_00_0); public static final TransportVersion CONVERT_FAILURE_STORE_OPTIONS_TO_SELECTOR_OPTIONS_INTERNALLY = def(8_772_00_0); + public static final TransportVersion REMOVE_MIN_COMPATIBLE_SHARD_NODE = def(8_773_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java index caa7453185575..0c585c705dcd0 100644 --- a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java @@ -15,7 +15,6 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.TransportVersion; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.NoShardAvailableActionException; import org.elasticsearch.action.OriginalIndices; @@ -234,15 +233,6 @@ public final void run() { } if (shardsIts.size() > 0) { doCheckNoMissingShards(getName(), request, shardsIts); - Version version = request.minCompatibleShardNode(); - if (version != null && Version.CURRENT.minimumCompatibilityVersion().equals(version) == false) { - if (checkMinimumVersion(shardsIts) == false) { - throw new VersionMismatchException( - "One of the shards is incompatible with the required minimum version [{}]", - request.minCompatibleShardNode() - ); - } - } for (int i = 0; i < shardsIts.size(); i++) { final SearchShardIterator shardRoutings = shardsIts.get(i); assert shardRoutings.skip() == false; @@ -260,21 +250,6 @@ void skipShard(SearchShardIterator iterator) { successfulShardExecution(iterator); } - private boolean checkMinimumVersion(GroupShardsIterator shardsIts) { - for (SearchShardIterator it : shardsIts) { - if (it.getTargetNodeIds().isEmpty() == false) { - boolean isCompatible = it.getTargetNodeIds().stream().anyMatch(nodeId -> { - Transport.Connection conn = getConnection(it.getClusterAlias(), nodeId); - return conn == null || conn.getNode().getVersion().onOrAfter(request.minCompatibleShardNode()); - }); - if (isCompatible == false) { - return false; - } - } - } - return true; - } - private static boolean assertExecuteOnStartThread() { // Ensure that the current code has the following stacktrace: // AbstractSearchAsyncAction#start -> AbstractSearchAsyncAction#executePhase -> AbstractSearchAsyncAction#performPhaseOnShard @@ -761,12 +736,7 @@ final void onPhaseDone() { // as a tribute to @kimchy aka. finishHim() @Override public final Transport.Connection getConnection(String clusterAlias, String nodeId) { - Transport.Connection conn = nodeIdToConnection.apply(clusterAlias, nodeId); - Version minVersion = request.minCompatibleShardNode(); - if (minVersion != null && conn != null && conn.getNode().getVersion().before(minVersion)) { - throw new VersionMismatchException("One of the shards is incompatible with the required minimum version [{}]", minVersion); - } - return conn; + return nodeIdToConnection.apply(clusterAlias, nodeId); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhase.java b/server/src/main/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhase.java index 8ce2cc7b6b19e..8dcfbf5f070a1 100644 --- a/server/src/main/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhase.java @@ -11,7 +11,6 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.util.FixedBitSet; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.common.util.Maps; @@ -133,15 +132,6 @@ private static boolean assertSearchCoordinationThread() { public void run() { assert assertSearchCoordinationThread(); checkNoMissingShards(); - Version version = request.minCompatibleShardNode(); - if (version != null && Version.CURRENT.minimumCompatibilityVersion().equals(version) == false) { - if (checkMinimumVersion(shardsIts) == false) { - throw new VersionMismatchException( - "One of the shards is incompatible with the required minimum version [{}]", - request.minCompatibleShardNode() - ); - } - } runCoordinatorRewritePhase(); } @@ -378,21 +368,6 @@ public CanMatchNodeRequest.Shard buildShardLevelRequest(SearchShardIterator shar ); } - private boolean checkMinimumVersion(GroupShardsIterator shardsIts) { - for (SearchShardIterator it : shardsIts) { - if (it.getTargetNodeIds().isEmpty() == false) { - boolean isCompatible = it.getTargetNodeIds().stream().anyMatch(nodeId -> { - Transport.Connection conn = getConnection(new SendingTarget(it.getClusterAlias(), nodeId)); - return conn == null || conn.getNode().getVersion().onOrAfter(request.minCompatibleShardNode()); - }); - if (isCompatible == false) { - return false; - } - } - } - return true; - } - @Override public void start() { if (getNumShards() == 0) { @@ -421,12 +396,7 @@ public void onPhaseFailure(String msg, Exception cause) { } public Transport.Connection getConnection(SendingTarget sendingTarget) { - Transport.Connection conn = nodeIdToConnection.apply(sendingTarget.clusterAlias, sendingTarget.nodeId); - Version minVersion = request.minCompatibleShardNode(); - if (minVersion != null && conn != null && conn.getNode().getVersion().before(minVersion)) { - throw new VersionMismatchException("One of the shards is incompatible with the required minimum version [{}]", minVersion); - } - return conn; + return nodeIdToConnection.apply(sendingTarget.clusterAlias, sendingTarget.nodeId); } private int getNumShards() { diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java b/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java index 9961c3770fa86..5aec2bcd04b26 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java @@ -92,9 +92,6 @@ public class SearchRequest extends ActionRequest implements IndicesRequest.Repla private boolean ccsMinimizeRoundtrips; - @Nullable - private final Version minCompatibleShardNode; - public static final IndicesOptions DEFAULT_INDICES_OPTIONS = IndicesOptions.strictExpandOpenAndForbidClosedIgnoreThrottled(); private IndicesOptions indicesOptions = DEFAULT_INDICES_OPTIONS; @@ -112,15 +109,10 @@ public class SearchRequest extends ActionRequest implements IndicesRequest.Repla private boolean forceSyntheticSource = false; public SearchRequest() { - this((Version) null); - } - - public SearchRequest(Version minCompatibleShardNode) { this.localClusterAlias = null; this.absoluteStartMillis = DEFAULT_ABSOLUTE_START_MILLIS; this.finalReduce = true; - this.minCompatibleShardNode = minCompatibleShardNode; - this.ccsMinimizeRoundtrips = minCompatibleShardNode == null; + this.ccsMinimizeRoundtrips = true; } /** @@ -219,7 +211,6 @@ private SearchRequest( this.localClusterAlias = localClusterAlias; this.absoluteStartMillis = absoluteStartMillis; this.finalReduce = finalReduce; - this.minCompatibleShardNode = searchRequest.minCompatibleShardNode; this.waitForCheckpoints = searchRequest.waitForCheckpoints; this.waitForCheckpointsTimeout = searchRequest.waitForCheckpointsTimeout; this.forceSyntheticSource = searchRequest.forceSyntheticSource; @@ -263,10 +254,8 @@ public SearchRequest(StreamInput in) throws IOException { finalReduce = true; } ccsMinimizeRoundtrips = in.readBoolean(); - if (in.readBoolean()) { - minCompatibleShardNode = Version.readVersion(in); - } else { - minCompatibleShardNode = null; + if (in.getTransportVersion().before(TransportVersions.REMOVE_MIN_COMPATIBLE_SHARD_NODE) && in.readBoolean()) { + Version.readVersion(in); // and drop on the floor } waitForCheckpoints = in.readMap(StreamInput::readLongArray); waitForCheckpointsTimeout = in.readTimeValue(); @@ -302,9 +291,8 @@ public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(finalReduce); } out.writeBoolean(ccsMinimizeRoundtrips); - out.writeBoolean(minCompatibleShardNode != null); - if (minCompatibleShardNode != null) { - Version.writeVersion(minCompatibleShardNode, out); + if (out.getTransportVersion().before(TransportVersions.REMOVE_MIN_COMPATIBLE_SHARD_NODE)) { + out.writeBoolean(false); } out.writeMap(waitForCheckpoints, StreamOutput::writeLongArray); out.writeTimeValue(waitForCheckpointsTimeout); @@ -351,14 +339,6 @@ public ActionRequestValidationException validate() { validationException = addValidationError("[preference] cannot be used with point in time", validationException); } } - if (minCompatibleShardNode() != null) { - if (isCcsMinimizeRoundtrips()) { - validationException = addValidationError( - "[ccs_minimize_roundtrips] cannot be [true] when setting a minimum compatible " + "shard version", - validationException - ); - } - } if (pointInTimeBuilder() != null && waitForCheckpoints.isEmpty() == false) { validationException = addValidationError("using [point in time] is not allowed with wait_for_checkpoints", validationException); @@ -401,15 +381,6 @@ long getAbsoluteStartMillis() { return absoluteStartMillis; } - /** - * Returns the minimum compatible shard version the search request needs to run on. If the version is null, then there are no - * restrictions imposed on shards versions part of this search. - */ - @Nullable - public Version minCompatibleShardNode() { - return minCompatibleShardNode; - } - /** * Sets the indices the search will be executed on. */ @@ -818,7 +789,6 @@ public boolean equals(Object o) { && Objects.equals(localClusterAlias, that.localClusterAlias) && absoluteStartMillis == that.absoluteStartMillis && ccsMinimizeRoundtrips == that.ccsMinimizeRoundtrips - && Objects.equals(minCompatibleShardNode, that.minCompatibleShardNode) && forceSyntheticSource == that.forceSyntheticSource; } @@ -840,7 +810,6 @@ public int hashCode() { localClusterAlias, absoluteStartMillis, ccsMinimizeRoundtrips, - minCompatibleShardNode, forceSyntheticSource ); } diff --git a/server/src/main/java/org/elasticsearch/action/search/VersionMismatchException.java b/server/src/main/java/org/elasticsearch/action/search/VersionMismatchException.java deleted file mode 100644 index 69ea4484ae691..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/search/VersionMismatchException.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.action.search; - -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.common.io.stream.StreamInput; - -import java.io.IOException; - -public class VersionMismatchException extends ElasticsearchException { - - public VersionMismatchException(String msg, Object... args) { - super(msg, args); - } - - public VersionMismatchException(StreamInput in) throws IOException { - super(in); - } - -} diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java index af60979dfe169..80a85d3b9b748 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java @@ -10,7 +10,6 @@ package org.elasticsearch.rest.action.search; import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.TransportSearchAction; @@ -100,12 +99,10 @@ public Set supportedCapabilities() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - SearchRequest searchRequest; - if (request.hasParam("min_compatible_shard_node")) { - searchRequest = new SearchRequest(Version.fromString(request.param("min_compatible_shard_node"))); - } else { - searchRequest = new SearchRequest(); - } + SearchRequest searchRequest = new SearchRequest(); + // access the BwC param, but just drop it + // this might be set by old clients + request.param("min_compatible_shard_node"); /* * We have to pull out the call to `source().size(size)` because * _update_by_query and _delete_by_query uses this same parsing diff --git a/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java b/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java index 31739850e2d35..2c6be01c851e4 100644 --- a/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java +++ b/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java @@ -19,7 +19,6 @@ import org.elasticsearch.action.bulk.IndexDocFailureStoreStatus; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.ShardSearchFailure; -import org.elasticsearch.action.search.VersionMismatchException; import org.elasticsearch.action.support.replication.ReplicationOperation; import org.elasticsearch.client.internal.AbstractClientHeadersTestCase; import org.elasticsearch.cluster.action.shard.ShardStateAction; @@ -816,7 +815,7 @@ public void testIds() { ids.put(158, PeerRecoveryNotFound.class); ids.put(159, NodeHealthCheckFailureException.class); ids.put(160, NoSeedNodeLeftException.class); - ids.put(161, VersionMismatchException.class); + ids.put(161, null); // was org.elasticsearch.action.search.VersionMismatchException.class ids.put(162, ElasticsearchAuthenticationProcessingError.class); ids.put(163, RepositoryConflictException.class); ids.put(164, VersionConflictException.class); diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java index b63c88f623e21..d279fa5030a8c 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java @@ -13,26 +13,18 @@ import org.apache.lucene.search.SortField; import org.apache.lucene.search.TopFieldDocs; import org.apache.lucene.search.TotalHits; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeUtils; -import org.elasticsearch.cluster.node.VersionInformation; import org.elasticsearch.cluster.routing.GroupShardsIterator; -import org.elasticsearch.cluster.routing.RecoverySource; -import org.elasticsearch.cluster.routing.ShardRouting; -import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.NoopCircuitBreaker; import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.index.Index; -import org.elasticsearch.index.IndexVersion; -import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.lucene.grouping.TopFieldGroups; import org.elasticsearch.search.DocValueFormat; @@ -47,24 +39,17 @@ import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.InternalAggregationTestCase; -import org.elasticsearch.test.VersionUtils; -import org.elasticsearch.test.index.IndexVersionUtils; import org.elasticsearch.transport.Transport; -import java.util.ArrayList; import java.util.Collections; -import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; -import static java.util.Collections.singletonList; -import static org.elasticsearch.test.VersionUtils.allVersions; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; -import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; public class SearchQueryThenFetchAsyncActionTests extends ESTestCase { @@ -257,484 +242,4 @@ public void run() { assertThat(((FieldDoc) phase.sortedTopDocs().scoreDocs()[0]).fields[0], equalTo(0)); } } - - public void testMinimumVersionSameAsNewVersion() throws Exception { - var newVersion = VersionInformation.CURRENT; - var oldVersion = new VersionInformation( - VersionUtils.randomCompatibleVersion(random(), VersionUtils.getPreviousVersion()), - IndexVersions.MINIMUM_COMPATIBLE, - IndexVersionUtils.randomCompatibleVersion(random()) - ); - testMixedVersionsShardsSearch(newVersion, oldVersion, newVersion.nodeVersion()); - } - - public void testMinimumVersionBetweenNewAndOldVersion() throws Exception { - var oldVersion = new VersionInformation( - VersionUtils.getFirstVersion(), - IndexVersions.MINIMUM_COMPATIBLE, - IndexVersionUtils.randomCompatibleVersion(random()) - ); - - var newVersion = new VersionInformation( - VersionUtils.maxCompatibleVersion(VersionUtils.getFirstVersion()), - IndexVersions.MINIMUM_COMPATIBLE, - IndexVersion.current() - ); - - var minVersion = VersionUtils.randomVersionBetween( - random(), - allVersions().get(allVersions().indexOf(oldVersion.nodeVersion()) + 1), - newVersion.nodeVersion() - ); - - testMixedVersionsShardsSearch(newVersion, oldVersion, minVersion); - } - - private void testMixedVersionsShardsSearch(VersionInformation oldVersion, VersionInformation newVersion, Version minVersion) - throws Exception { - final TransportSearchAction.SearchTimeProvider timeProvider = new TransportSearchAction.SearchTimeProvider( - 0, - System.nanoTime(), - System::nanoTime - ); - int numConcurrent = randomIntBetween(1, 4); - - Map lookup = new ConcurrentHashMap<>(); - DiscoveryNode newVersionNode = DiscoveryNodeUtils.builder("node1").version(newVersion).build(); - DiscoveryNode oldVersionNode = DiscoveryNodeUtils.builder("node2").version(oldVersion).build(); - lookup.put("node1", new SearchAsyncActionTests.MockConnection(newVersionNode)); - lookup.put("node2", new SearchAsyncActionTests.MockConnection(oldVersionNode)); - - OriginalIndices idx = new OriginalIndices(new String[] { "idx" }, SearchRequest.DEFAULT_INDICES_OPTIONS); - ArrayList list = new ArrayList<>(); - ShardRouting routingNewVersionShard = ShardRouting.newUnassigned( - new ShardId(new Index("idx", "_na_"), 0), - true, - RecoverySource.EmptyStoreRecoverySource.INSTANCE, - new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foobar"), - ShardRouting.Role.DEFAULT - ); - routingNewVersionShard = routingNewVersionShard.initialize(newVersionNode.getId(), "p0", 0); - routingNewVersionShard.started(); - list.add(new SearchShardIterator(null, new ShardId(new Index("idx", "_na_"), 0), singletonList(routingNewVersionShard), idx)); - - ShardRouting routingOldVersionShard = ShardRouting.newUnassigned( - new ShardId(new Index("idx", "_na_"), 1), - true, - RecoverySource.EmptyStoreRecoverySource.INSTANCE, - new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foobar"), - ShardRouting.Role.DEFAULT - ); - routingOldVersionShard = routingOldVersionShard.initialize(oldVersionNode.getId(), "p1", 0); - routingOldVersionShard.started(); - list.add(new SearchShardIterator(null, new ShardId(new Index("idx", "_na_"), 1), singletonList(routingOldVersionShard), idx)); - - GroupShardsIterator shardsIter = new GroupShardsIterator<>(list); - final SearchRequest searchRequest = new SearchRequest(minVersion); - searchRequest.setMaxConcurrentShardRequests(numConcurrent); - searchRequest.setBatchedReduceSize(2); - searchRequest.source(new SearchSourceBuilder().size(1)); - searchRequest.allowPartialSearchResults(false); - - SearchTransportService searchTransportService = new SearchTransportService(null, null, null); - SearchPhaseController controller = new SearchPhaseController((t, r) -> InternalAggregationTestCase.emptyReduceContextBuilder()); - SearchTask task = new SearchTask(0, "n/a", "n/a", () -> "test", null, Collections.emptyMap()); - try ( - QueryPhaseResultConsumer resultConsumer = new QueryPhaseResultConsumer( - searchRequest, - EsExecutors.DIRECT_EXECUTOR_SERVICE, - new NoopCircuitBreaker(CircuitBreaker.REQUEST), - controller, - task::isCancelled, - task.getProgressListener(), - shardsIter.size(), - exc -> {} - ) - ) { - final List responses = new ArrayList<>(); - SearchQueryThenFetchAsyncAction newSearchAsyncAction = new SearchQueryThenFetchAsyncAction( - logger, - null, - searchTransportService, - (clusterAlias, node) -> lookup.get(node), - Collections.singletonMap("_na_", AliasFilter.EMPTY), - Collections.emptyMap(), - EsExecutors.DIRECT_EXECUTOR_SERVICE, - resultConsumer, - searchRequest, - new ActionListener<>() { - @Override - public void onFailure(Exception e) { - responses.add(e); - } - - public void onResponse(SearchResponse response) { - responses.add(response); - } - - ; - }, - shardsIter, - timeProvider, - new ClusterState.Builder(new ClusterName("test")).build(), - task, - SearchResponse.Clusters.EMPTY, - null - ); - - newSearchAsyncAction.start(); - assertThat(responses, hasSize(1)); - assertThat(responses.get(0), instanceOf(SearchPhaseExecutionException.class)); - SearchPhaseExecutionException e = (SearchPhaseExecutionException) responses.get(0); - assertThat(e.getCause(), instanceOf(VersionMismatchException.class)); - assertThat( - e.getCause().getMessage(), - equalTo("One of the shards is incompatible with the required minimum version [" + minVersion + "]") - ); - } - } - - public void testMinimumVersionSameAsOldVersion() throws Exception { - var newVersion = VersionInformation.CURRENT; - var oldVersion = new VersionInformation( - VersionUtils.randomCompatibleVersion(random(), VersionUtils.getPreviousVersion()), - IndexVersions.MINIMUM_COMPATIBLE, - IndexVersionUtils.randomCompatibleVersion(random()) - ); - Version minVersion = oldVersion.nodeVersion(); - - final TransportSearchAction.SearchTimeProvider timeProvider = new TransportSearchAction.SearchTimeProvider( - 0, - System.nanoTime(), - System::nanoTime - ); - AtomicInteger successfulOps = new AtomicInteger(); - - Map lookup = new ConcurrentHashMap<>(); - DiscoveryNode newVersionNode = DiscoveryNodeUtils.builder("node1").version(newVersion).build(); - DiscoveryNode oldVersionNode = DiscoveryNodeUtils.builder("node2").version(oldVersion).build(); - lookup.put("node1", new SearchAsyncActionTests.MockConnection(newVersionNode)); - lookup.put("node2", new SearchAsyncActionTests.MockConnection(oldVersionNode)); - - OriginalIndices idx = new OriginalIndices(new String[] { "idx" }, SearchRequest.DEFAULT_INDICES_OPTIONS); - ArrayList list = new ArrayList<>(); - ShardRouting routingNewVersionShard = ShardRouting.newUnassigned( - new ShardId(new Index("idx", "_na_"), 0), - true, - RecoverySource.EmptyStoreRecoverySource.INSTANCE, - new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foobar"), - ShardRouting.Role.DEFAULT - ); - routingNewVersionShard = routingNewVersionShard.initialize(newVersionNode.getId(), "p0", 0); - routingNewVersionShard.started(); - list.add(new SearchShardIterator(null, new ShardId(new Index("idx", "_na_"), 0), singletonList(routingNewVersionShard), idx)); - - ShardRouting routingOldVersionShard = ShardRouting.newUnassigned( - new ShardId(new Index("idx", "_na_"), 1), - true, - RecoverySource.EmptyStoreRecoverySource.INSTANCE, - new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foobar"), - ShardRouting.Role.DEFAULT - ); - routingOldVersionShard = routingOldVersionShard.initialize(oldVersionNode.getId(), "p1", 0); - routingOldVersionShard.started(); - list.add(new SearchShardIterator(null, new ShardId(new Index("idx", "_na_"), 1), singletonList(routingOldVersionShard), idx)); - - GroupShardsIterator shardsIter = new GroupShardsIterator<>(list); - final SearchRequest searchRequest = new SearchRequest(minVersion); - searchRequest.allowPartialSearchResults(false); - searchRequest.source(new SearchSourceBuilder().size(1).sort(SortBuilders.fieldSort("timestamp"))); - - SearchTransportService searchTransportService = new SearchTransportService(null, null, null) { - @Override - public void sendExecuteQuery( - Transport.Connection connection, - ShardSearchRequest request, - SearchTask task, - ActionListener listener - ) { - int shardId = request.shardId().id(); - QuerySearchResult queryResult = new QuerySearchResult( - new ShardSearchContextId("N/A", 123), - new SearchShardTarget("node1", new ShardId("idx", "na", shardId), null), - null - ); - try { - SortField sortField = new SortField("timestamp", SortField.Type.LONG); - if (shardId == 0) { - queryResult.topDocs( - new TopDocsAndMaxScore( - new TopFieldDocs( - new TotalHits(1, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO), - new FieldDoc[] { new FieldDoc(randomInt(1000), Float.NaN, new Object[] { shardId }) }, - new SortField[] { sortField } - ), - Float.NaN - ), - new DocValueFormat[] { DocValueFormat.RAW } - ); - } else if (shardId == 1) { - queryResult.topDocs( - new TopDocsAndMaxScore( - new TopFieldDocs( - new TotalHits(1, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO), - new FieldDoc[] { new FieldDoc(randomInt(1000), Float.NaN, new Object[] { shardId }) }, - new SortField[] { sortField } - ), - Float.NaN - ), - new DocValueFormat[] { DocValueFormat.RAW } - ); - } - queryResult.from(0); - queryResult.size(1); - successfulOps.incrementAndGet(); - queryResult.incRef(); - new Thread(() -> ActionListener.respondAndRelease(listener, queryResult)).start(); - } finally { - queryResult.decRef(); - } - } - }; - SearchPhaseController controller = new SearchPhaseController((t, r) -> InternalAggregationTestCase.emptyReduceContextBuilder()); - SearchTask task = new SearchTask(0, "n/a", "n/a", () -> "test", null, Collections.emptyMap()); - try ( - QueryPhaseResultConsumer resultConsumer = new QueryPhaseResultConsumer( - searchRequest, - EsExecutors.DIRECT_EXECUTOR_SERVICE, - new NoopCircuitBreaker(CircuitBreaker.REQUEST), - controller, - task::isCancelled, - task.getProgressListener(), - shardsIter.size(), - exc -> {} - ) - ) { - CountDownLatch latch = new CountDownLatch(1); - SearchQueryThenFetchAsyncAction action = new SearchQueryThenFetchAsyncAction( - logger, - null, - searchTransportService, - (clusterAlias, node) -> lookup.get(node), - Collections.singletonMap("_na_", AliasFilter.EMPTY), - Collections.emptyMap(), - EsExecutors.DIRECT_EXECUTOR_SERVICE, - resultConsumer, - searchRequest, - null, - shardsIter, - timeProvider, - new ClusterState.Builder(new ClusterName("test")).build(), - task, - SearchResponse.Clusters.EMPTY, - null - ) { - @Override - protected SearchPhase getNextPhase(SearchPhaseResults results, SearchPhaseContext context) { - return new SearchPhase("test") { - @Override - public void run() { - latch.countDown(); - } - }; - } - }; - - action.start(); - latch.await(); - assertThat(successfulOps.get(), equalTo(2)); - SearchPhaseController.ReducedQueryPhase phase = action.results.reduce(); - assertThat(phase.numReducePhases(), greaterThanOrEqualTo(1)); - assertThat(phase.totalHits().value, equalTo(2L)); - assertThat(phase.totalHits().relation, equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); - } - } - - public void testMinimumVersionShardDuringPhaseExecution() throws Exception { - var newVersion = VersionInformation.CURRENT; - var oldVersion = new VersionInformation( - VersionUtils.randomCompatibleVersion(random(), VersionUtils.getPreviousVersion()), - IndexVersions.MINIMUM_COMPATIBLE, - IndexVersionUtils.randomCompatibleVersion(random()) - ); - - Version minVersion = newVersion.nodeVersion(); - - final TransportSearchAction.SearchTimeProvider timeProvider = new TransportSearchAction.SearchTimeProvider( - 0, - System.nanoTime(), - System::nanoTime - ); - AtomicInteger successfulOps = new AtomicInteger(); - - Map lookup = new ConcurrentHashMap<>(); - DiscoveryNode newVersionNode1 = DiscoveryNodeUtils.builder("node1").version(newVersion).build(); - DiscoveryNode newVersionNode2 = DiscoveryNodeUtils.builder("node2").version(newVersion).build(); - DiscoveryNode oldVersionNode = DiscoveryNodeUtils.builder("node3").version(oldVersion).build(); - lookup.put("node1", new SearchAsyncActionTests.MockConnection(newVersionNode1)); - lookup.put("node2", new SearchAsyncActionTests.MockConnection(newVersionNode2)); - lookup.put("node3", new SearchAsyncActionTests.MockConnection(oldVersionNode)); - - OriginalIndices idx = new OriginalIndices(new String[] { "idx" }, SearchRequest.DEFAULT_INDICES_OPTIONS); - ArrayList list = new ArrayList<>(); - ShardRouting routingNewVersionShard1 = ShardRouting.newUnassigned( - new ShardId(new Index("idx", "_na_"), 0), - true, - RecoverySource.EmptyStoreRecoverySource.INSTANCE, - new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foobar"), - ShardRouting.Role.DEFAULT - ); - routingNewVersionShard1 = routingNewVersionShard1.initialize(newVersionNode1.getId(), "p0", 0); - routingNewVersionShard1.started(); - list.add(new SearchShardIterator(null, new ShardId(new Index("idx", "_na_"), 0), singletonList(routingNewVersionShard1), idx)); - - ShardRouting routingNewVersionShard2 = ShardRouting.newUnassigned( - new ShardId(new Index("idx", "_na_"), 1), - true, - RecoverySource.EmptyStoreRecoverySource.INSTANCE, - new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foobar"), - ShardRouting.Role.DEFAULT - ); - routingNewVersionShard2 = routingNewVersionShard2.initialize(newVersionNode2.getId(), "p1", 0); - routingNewVersionShard2.started(); - list.add(new SearchShardIterator(null, new ShardId(new Index("idx", "_na_"), 1), singletonList(routingNewVersionShard2), idx)); - - GroupShardsIterator shardsIter = new GroupShardsIterator<>(list); - final SearchRequest searchRequest = new SearchRequest(minVersion); - searchRequest.allowPartialSearchResults(false); - searchRequest.source(new SearchSourceBuilder().size(1).sort(SortBuilders.fieldSort("timestamp"))); - - SearchTransportService searchTransportService = new SearchTransportService(null, null, null) { - @Override - public void sendExecuteQuery( - Transport.Connection connection, - ShardSearchRequest request, - SearchTask task, - ActionListener listener - ) { - int shardId = request.shardId().id(); - QuerySearchResult queryResult = new QuerySearchResult( - new ShardSearchContextId("N/A", 123), - new SearchShardTarget("node1", new ShardId("idx", "na", shardId), null), - null - ); - try { - SortField sortField = new SortField("timestamp", SortField.Type.LONG); - if (shardId == 0) { - queryResult.topDocs( - new TopDocsAndMaxScore( - new TopFieldDocs( - new TotalHits(1, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO), - new FieldDoc[] { new FieldDoc(randomInt(1000), Float.NaN, new Object[] { shardId }) }, - new SortField[] { sortField } - ), - Float.NaN - ), - new DocValueFormat[] { DocValueFormat.RAW } - ); - } else if (shardId == 1) { - queryResult.topDocs( - new TopDocsAndMaxScore( - new TopFieldDocs( - new TotalHits(1, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO), - new FieldDoc[] { new FieldDoc(randomInt(1000), Float.NaN, new Object[] { shardId }) }, - new SortField[] { sortField } - ), - Float.NaN - ), - new DocValueFormat[] { DocValueFormat.RAW } - ); - } - queryResult.from(0); - queryResult.size(1); - successfulOps.incrementAndGet(); - queryResult.incRef(); - new Thread(() -> ActionListener.respondAndRelease(listener, queryResult)).start(); - } finally { - queryResult.decRef(); - } - } - }; - SearchPhaseController controller = new SearchPhaseController((t, r) -> InternalAggregationTestCase.emptyReduceContextBuilder()); - SearchTask task = new SearchTask(0, "n/a", "n/a", () -> "test", null, Collections.emptyMap()); - - CountDownLatch latch = new CountDownLatch(1); - try ( - QueryPhaseResultConsumer resultConsumer = new QueryPhaseResultConsumer( - searchRequest, - EsExecutors.DIRECT_EXECUTOR_SERVICE, - new NoopCircuitBreaker(CircuitBreaker.REQUEST), - controller, - task::isCancelled, - task.getProgressListener(), - shardsIter.size(), - exc -> {} - ) - ) { - SearchQueryThenFetchAsyncAction action = new SearchQueryThenFetchAsyncAction( - logger, - null, - searchTransportService, - (clusterAlias, node) -> lookup.get(node), - Collections.singletonMap("_na_", AliasFilter.EMPTY), - Collections.emptyMap(), - EsExecutors.DIRECT_EXECUTOR_SERVICE, - resultConsumer, - searchRequest, - null, - shardsIter, - timeProvider, - new ClusterState.Builder(new ClusterName("test")).build(), - task, - SearchResponse.Clusters.EMPTY, - null - ) { - @Override - protected SearchPhase getNextPhase(SearchPhaseResults results, SearchPhaseContext context) { - return new SearchPhase("test") { - @Override - public void run() { - latch.countDown(); - } - }; - } - }; - ShardRouting routingOldVersionShard = ShardRouting.newUnassigned( - new ShardId(new Index("idx", "_na_"), 2), - true, - RecoverySource.EmptyStoreRecoverySource.INSTANCE, - new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foobar"), - ShardRouting.Role.DEFAULT - ); - SearchShardIterator shardIt = new SearchShardIterator( - null, - new ShardId(new Index("idx", "_na_"), 2), - singletonList(routingOldVersionShard), - idx - ); - routingOldVersionShard = routingOldVersionShard.initialize(oldVersionNode.getId(), "p2", 0); - routingOldVersionShard.started(); - action.start(); - latch.await(); - assertThat(successfulOps.get(), equalTo(2)); - SearchPhaseController.ReducedQueryPhase phase = action.results.reduce(); - assertThat(phase.numReducePhases(), greaterThanOrEqualTo(1)); - assertThat(phase.totalHits().value, equalTo(2L)); - assertThat(phase.totalHits().relation, equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); - - SearchShardTarget searchShardTarget = new SearchShardTarget("node3", shardIt.shardId(), null); - SearchActionListener listener = new SearchActionListener(searchShardTarget, 0) { - @Override - public void onFailure(Exception e) {} - - @Override - protected void innerOnResponse(SearchPhaseResult response) {} - }; - Exception e = expectThrows( - VersionMismatchException.class, - () -> action.executePhaseOnShard(shardIt, searchShardTarget, listener) - ); - assertThat(e.getMessage(), equalTo("One of the shards is incompatible with the required minimum version [" + minVersion + "]")); - } - } } diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchRequestTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchRequestTests.java index 23c956e6e52f2..3079b6d4b0371 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchRequestTests.java @@ -39,7 +39,6 @@ import org.elasticsearch.tasks.TaskId; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.TransportVersionUtils; -import org.elasticsearch.test.VersionUtils; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -438,33 +437,6 @@ public QueryBuilder topDocsQuery() { assertEquals(1, validationErrors.validationErrors().size()); assertEquals("using [point in time] is not allowed in a scroll context", validationErrors.validationErrors().get(0)); } - { - // Minimum compatible shard node version with ccs_minimize_roundtrips - SearchRequest searchRequest; - boolean isMinCompatibleShardVersion = randomBoolean(); - if (isMinCompatibleShardVersion) { - searchRequest = new SearchRequest(VersionUtils.randomVersion(random())); - } else { - searchRequest = new SearchRequest(); - } - - boolean shouldSetCcsMinimizeRoundtrips = randomBoolean(); - if (shouldSetCcsMinimizeRoundtrips) { - searchRequest.setCcsMinimizeRoundtrips(true); - } - ActionRequestValidationException validationErrors = searchRequest.validate(); - - if (isMinCompatibleShardVersion && shouldSetCcsMinimizeRoundtrips) { - assertNotNull(validationErrors); - assertEquals(1, validationErrors.validationErrors().size()); - assertEquals( - "[ccs_minimize_roundtrips] cannot be [true] when setting a minimum compatible shard version", - validationErrors.validationErrors().get(0) - ); - } else { - assertNull(validationErrors); - } - } { SearchRequest searchRequest = new SearchRequest().source( new SearchSourceBuilder().rankBuilder(new TestRankBuilder(100)) diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/TransportEqlSearchAction.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/TransportEqlSearchAction.java index 51f92bcda7da4..c0141da2432ce 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/TransportEqlSearchAction.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/TransportEqlSearchAction.java @@ -59,7 +59,6 @@ import static org.elasticsearch.action.ActionListener.wrap; import static org.elasticsearch.transport.RemoteClusterAware.buildRemoteIndexName; import static org.elasticsearch.xpack.core.ClientHelper.ASYNC_SEARCH_ORIGIN; -import static org.elasticsearch.xpack.ql.plugin.TransportActionUtils.executeRequestWithRetryAttempt; public final class TransportEqlSearchAction extends HandledTransportAction implements @@ -236,22 +235,11 @@ public static void operation( new TaskId(nodeId, task.getId()), task ); - executeRequestWithRetryAttempt( - clusterService, - listener::onFailure, - onFailure -> planExecutor.eql( - cfg, - request.query(), - params, - wrap(r -> listener.onResponse(createResponse(r, task.getExecutionId())), onFailure) - ), - node -> transportService.sendRequest( - node, - EqlSearchAction.NAME, - request, - new ActionListenerResponseHandler<>(listener, EqlSearchResponse::new, EsExecutors.DIRECT_EXECUTOR_SERVICE) - ), - log + planExecutor.eql( + cfg, + request.query(), + params, + wrap(r -> listener.onResponse(createResponse(r, task.getExecutionId())), listener::onFailure) ); } } diff --git a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/rest/RestFleetSearchAction.java b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/rest/RestFleetSearchAction.java index a6c369734f0e3..a79424b8b7d59 100644 --- a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/rest/RestFleetSearchAction.java +++ b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/rest/RestFleetSearchAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.fleet.rest; -import org.elasticsearch.Version; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.client.internal.node.NodeClient; @@ -57,12 +56,11 @@ public List routes() { @Override protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { - SearchRequest searchRequest; - if (request.hasParam("min_compatible_shard_node")) { - searchRequest = new SearchRequest(Version.fromString(request.param("min_compatible_shard_node"))); - } else { - searchRequest = new SearchRequest(); - } + SearchRequest searchRequest = new SearchRequest(); + // access the BwC param, but just drop it + // this might be set by old clients + request.param("min_compatible_shard_node"); + String[] indices = searchRequest.indices(); if (indices.length > 1) { throw new IllegalArgumentException( diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/plugin/TransportActionUtils.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/plugin/TransportActionUtils.java deleted file mode 100644 index 6431c83ee1c2e..0000000000000 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/plugin/TransportActionUtils.java +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.ql.plugin; - -import org.apache.logging.log4j.Logger; -import org.elasticsearch.action.search.SearchPhaseExecutionException; -import org.elasticsearch.action.search.VersionMismatchException; -import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.xpack.ql.util.Holder; - -import java.util.function.Consumer; - -public final class TransportActionUtils { - - /** - * Execute a *QL request and re-try it in case the first request failed with a {@code VersionMismatchException} - * - * @param clusterService The cluster service instance - * @param onFailure On-failure handler in case the request doesn't fail with a {@code VersionMismatchException} - * @param queryRunner *QL query execution code, typically a Plan Executor running the query - * @param retryRequest Re-trial logic - * @param log Log4j logger - */ - public static void executeRequestWithRetryAttempt( - ClusterService clusterService, - Consumer onFailure, - Consumer> queryRunner, - Consumer retryRequest, - Logger log - ) { - - Holder retrySecondTime = new Holder(false); - queryRunner.accept(e -> { - // the search request likely ran on nodes with different versions of ES - // we will retry on a node with an older version that should generate a backwards compatible _search request - if (e instanceof SearchPhaseExecutionException - && ((SearchPhaseExecutionException) e).getCause() instanceof VersionMismatchException) { - if (log.isDebugEnabled()) { - log.debug("Caught exception type [{}] with cause [{}].", e.getClass().getName(), e.getCause()); - } - DiscoveryNode localNode = clusterService.state().nodes().getLocalNode(); - DiscoveryNode candidateNode = null; - for (DiscoveryNode node : clusterService.state().nodes()) { - // find the first node that's older than the current node - if (node != localNode && node.getVersion().before(localNode.getVersion())) { - candidateNode = node; - break; - } - } - if (candidateNode != null) { - if (log.isDebugEnabled()) { - log.debug( - "Candidate node to resend the request to: address [{}], id [{}], name [{}], version [{}]", - candidateNode.getAddress(), - candidateNode.getId(), - candidateNode.getName(), - candidateNode.getVersion() - ); - } - // re-send the request to the older node - retryRequest.accept(candidateNode); - } else { - retrySecondTime.set(true); - } - } else { - onFailure.accept(e); - } - }); - if (retrySecondTime.get()) { - if (log.isDebugEnabled()) { - log.debug("No candidate node found, likely all were upgraded in the meantime. Re-trying the original request."); - } - queryRunner.accept(onFailure); - } - } -} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java index b2ce91140de76..06293df4f4559 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java @@ -10,7 +10,6 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.search.TotalHits; import org.apache.lucene.util.PriorityQueue; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DelegatingActionListener; import org.elasticsearch.action.search.ClosePointInTimeRequest; @@ -101,7 +100,6 @@ import static org.elasticsearch.action.ActionListener.wrap; import static org.elasticsearch.xpack.ql.execution.search.extractor.AbstractFieldHitExtractor.MultiValueSupport.LENIENT; import static org.elasticsearch.xpack.ql.execution.search.extractor.AbstractFieldHitExtractor.MultiValueSupport.NONE; -import static org.elasticsearch.xpack.sql.proto.VersionCompatibility.INTRODUCING_UNSIGNED_LONG; // TODO: add retry/back-off public class Querier { @@ -202,7 +200,7 @@ public static void closePointInTime(Client client, BytesReference pointInTimeId, public static SearchRequest prepareRequest(SearchSourceBuilder source, SqlConfiguration cfg, boolean includeFrozen, String... indices) { source.timeout(cfg.requestTimeout()); - SearchRequest searchRequest = new SearchRequest(Version.fromId(INTRODUCING_UNSIGNED_LONG.id)); + SearchRequest searchRequest = new SearchRequest(); if (source.pointInTimeBuilder() == null) { searchRequest.indices(indices); searchRequest.indicesOptions( diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java index 7a76ffe8eb109..41fa66ae36aeb 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java @@ -9,7 +9,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ActionListenerResponseHandler; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.cluster.service.ClusterService; @@ -57,7 +56,6 @@ import static java.util.Collections.unmodifiableList; import static org.elasticsearch.action.ActionListener.wrap; import static org.elasticsearch.xpack.core.ClientHelper.ASYNC_SEARCH_ORIGIN; -import static org.elasticsearch.xpack.ql.plugin.TransportActionUtils.executeRequestWithRetryAttempt; import static org.elasticsearch.xpack.sql.plugin.Transports.clusterName; import static org.elasticsearch.xpack.sql.plugin.Transports.username; import static org.elasticsearch.xpack.sql.proto.Mode.CLI; @@ -161,22 +159,11 @@ public static void operation( ); if (Strings.hasText(request.cursor()) == false) { - executeRequestWithRetryAttempt( - clusterService, - listener::onFailure, - onFailure -> planExecutor.sql( - cfg, - request.query(), - request.params(), - wrap(p -> listener.onResponse(createResponseWithSchema(request, p, task)), onFailure) - ), - node -> transportService.sendRequest( - node, - SqlQueryAction.NAME, - request, - new ActionListenerResponseHandler<>(listener, SqlQueryResponse::new, EsExecutors.DIRECT_EXECUTOR_SERVICE) - ), - log + planExecutor.sql( + cfg, + request.query(), + request.params(), + wrap(p -> listener.onResponse(createResponseWithSchema(request, p, task)), listener::onFailure) ); } else { Tuple decoded = Cursors.decodeFromStringWithZone(request.cursor(), planExecutor.writeableRegistry()); From 1a611bd2e3296a19c3b57216084d346577a30ada Mon Sep 17 00:00:00 2001 From: Craig Taverner Date: Wed, 16 Oct 2024 14:34:52 +0200 Subject: [PATCH 145/449] Fixes flaky ST_CENTROID_AGG tests (#114892) Even with Kahan summation, we were occasionally getting floating point differences at the 14th decimal point, well beyond anything a GIS use case would care about. --- muted-tests.yml | 6 -- .../aggregate/SpatialCentroidTests.java | 56 +++++++++++++++++-- 2 files changed, 50 insertions(+), 12 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 6817011d399b2..a9a4cdcbe079b 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -94,12 +94,6 @@ tests: - class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT method: test {yaml=reference/rest-api/watcher/put-watch/line_120} issue: https://github.com/elastic/elasticsearch/issues/99517 -- class: org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialCentroidTests - method: "testAggregateIntermediate {TestCase= #2}" - issue: https://github.com/elastic/elasticsearch/issues/112461 -- class: org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialCentroidTests - method: testAggregateIntermediate {TestCase=} - issue: https://github.com/elastic/elasticsearch/issues/112463 - class: org.elasticsearch.xpack.esql.action.ManyShardsIT method: testRejection issue: https://github.com/elastic/elasticsearch/issues/112406 diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialCentroidTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialCentroidTests.java index b79252c694084..15ea029a05554 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialCentroidTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialCentroidTests.java @@ -22,13 +22,15 @@ import org.elasticsearch.xpack.esql.expression.function.FunctionName; import org.elasticsearch.xpack.esql.expression.function.MultiRowTestCaseSupplier; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.BaseMatcher; +import org.hamcrest.Description; +import org.hamcrest.Matcher; -import java.nio.ByteOrder; import java.util.List; import java.util.function.Supplier; import java.util.stream.Stream; -import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.closeTo; @FunctionName("st_centroid_agg") public class SpatialCentroidTests extends AbstractAggregationTestCase { @@ -74,16 +76,58 @@ private static TestCaseSupplier makeSupplier(TestCaseSupplier.TypedDataSupplier count++; } - var expected = new BytesRef( - WellKnownBinary.toWKB(new Point(xSum.value() / count, ySum.value() / count), ByteOrder.LITTLE_ENDIAN) - ); + var expectedX = xSum.value() / count; + var expectedY = ySum.value() / count; return new TestCaseSupplier.TestCase( List.of(fieldTypedData), "SpatialCentroid[field=Attribute[channel=0]]", fieldTypedData.type(), - equalTo(expected) + centroidMatches(expectedX, expectedY, 1e-14) ); }); } + + @SuppressWarnings("SameParameterValue") + private static Matcher centroidMatches(double x, double y, double error) { + return new TestCentroidMatcher(x, y, error); + } + + private static class TestCentroidMatcher extends BaseMatcher { + private final double x; + private final double y; + private final Matcher mx; + private final Matcher my; + + private TestCentroidMatcher(double x, double y, double error) { + this.x = x; + this.y = y; + this.mx = closeTo(x, error); + this.my = closeTo(y, error); + } + + @Override + public boolean matches(Object item) { + if (item instanceof BytesRef wkb) { + var point = (Point) WellKnownBinary.fromWKB(GeometryValidator.NOOP, false, wkb.bytes, wkb.offset, wkb.length); + return mx.matches(point.getX()) && my.matches(point.getY()); + } + return false; + } + + @Override + public void describeMismatch(Object item, Description description) { + if (item instanceof BytesRef wkb) { + var point = (Point) WellKnownBinary.fromWKB(GeometryValidator.NOOP, false, wkb.bytes, wkb.offset, wkb.length); + description.appendText("was ").appendValue(point); + } else { + description.appendText("was ").appendValue(item); + } + } + + @Override + public void describeTo(Description description) { + description.appendValue(" POINT (" + x + " " + y + ")"); + } + } } From 1c0e29294d418c466c45530617946d831f6517dc Mon Sep 17 00:00:00 2001 From: Craig Taverner Date: Wed, 16 Oct 2024 14:36:24 +0200 Subject: [PATCH 146/449] Fix ST_CENTROID_AGG when no records are aggregated (#114888) This was returning an invalid result `POINT(NaN NaN)` and now instead returns `null`. --- docs/changelog/114888.yaml | 6 +++ .../spatial/CentroidPointAggregator.java | 14 ++++--- .../src/main/resources/spatial.csv-spec | 41 ++++++++++++++++++- .../xpack/esql/action/EsqlCapabilities.java | 5 +++ 4 files changed, 59 insertions(+), 7 deletions(-) create mode 100644 docs/changelog/114888.yaml diff --git a/docs/changelog/114888.yaml b/docs/changelog/114888.yaml new file mode 100644 index 0000000000000..6b99eb82d10f3 --- /dev/null +++ b/docs/changelog/114888.yaml @@ -0,0 +1,6 @@ +pr: 114888 +summary: Fix ST_CENTROID_AGG when no records are aggregated +area: ES|QL +type: bug +issues: + - 106025 diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/CentroidPointAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/CentroidPointAggregator.java index 1fc2430393c98..c66c960dd8a99 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/CentroidPointAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/CentroidPointAggregator.java @@ -58,7 +58,7 @@ public static void evaluateIntermediate(CentroidState state, DriverContext drive } public static Block evaluateFinal(CentroidState state, DriverContext driverContext) { - return driverContext.blockFactory().newConstantBytesRefBlockWith(state.encodeCentroidResult(), 1); + return state.toBlock(driverContext.blockFactory()); } public static void combineStates(GroupingCentroidState current, int groupId, GroupingCentroidState state, int statePosition) { @@ -181,10 +181,14 @@ public void add(double x, double dx, double y, double dy, long count) { this.count += count; } - protected BytesRef encodeCentroidResult() { - double x = xSum.value() / count; - double y = ySum.value() / count; - return encode(x, y); + protected Block toBlock(BlockFactory blockFactory) { + if (count > 0) { + double x = xSum.value() / count; + double y = ySum.value() / count; + return blockFactory.newConstantBytesRefBlockWith(encode(x, y), 1); + } else { + return blockFactory.newConstantNullBlock(1); + } } } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec index c5ca405005447..c1c4538c7393d 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec @@ -616,6 +616,42 @@ location:geo_point | city_location:geo_point | count:long POINT (0 0) | POINT (0 0) | 1 ; +airportCityLocationPointIntersectionCentroidGroups +required_capability: st_intersects + +FROM airports_mp +| WHERE ST_INTERSECTS(location, city_location) +| STATS location=ST_CENTROID_AGG(location), city_location=ST_CENTROID_AGG(city_location), count=COUNT() BY country +; + +location:geo_point | city_location:geo_point | count:long | country:k +POINT (0 0) | POINT (0 0) | 1 | Atlantis +; + +airportCityLocationPointIntersectionNullCentroid +required_capability: st_intersects +required_capability: spatial_centroid_no_records + +FROM airports +| WHERE ST_INTERSECTS(location, city_location) +| STATS location=ST_CENTROID_AGG(location), city_location=ST_CENTROID_AGG(city_location), count=COUNT() +; + +location:geo_point | city_location:geo_point | count:long +null | null | 0 +; + +airportCityLocationPointIntersectionNullCentroidGroups +required_capability: st_intersects + +FROM airports +| WHERE ST_INTERSECTS(location, city_location) +| STATS location=ST_CENTROID_AGG(location), city_location=ST_CENTROID_AGG(city_location), count=COUNT() BY country +; + +location:geo_point | city_location:geo_point | count:long | country:k +; + ############################################### # Tests for ST_DISJOINT on GEO_POINT type @@ -1948,14 +1984,15 @@ wkt:keyword | pt:cartesian_point cartesianCentroidFromAirportsAfterPointContainsPolygonPredicate required_capability: st_contains_within +required_capability: spatial_centroid_no_records FROM airports_web | WHERE ST_CONTAINS(location, TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))")) | STATS centroid=ST_CENTROID_AGG(location), count=COUNT() ; -centroid:cartesian_point | count:long -POINT (NaN NaN) | 0 +centroid:cartesian_point | count:long +null | 0 ; cartesianPointContainsPolygonPredicate diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index 842501744979c..18ebbe6d898af 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -180,6 +180,11 @@ public enum Cap { */ SPATIAL_DISTANCE_PUSHDOWN_ENHANCEMENTS, + /** + * Fix for spatial centroid when no records are found. + */ + SPATIAL_CENTROID_NO_RECORDS, + /** * Fix to GROK and DISSECT that allows extracting attributes with the same name as the input * https://github.com/elastic/elasticsearch/issues/110184 From 7ce484d2ad9282c93e41d390f165b00b408c2212 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 16 Oct 2024 23:44:43 +1100 Subject: [PATCH 147/449] Mute org.elasticsearch.test.rest.ClientYamlTestSuiteIT test {yaml=cluster.stats/30_ccs_stats/cross-cluster search stats search} #114902 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index a9a4cdcbe079b..df4c964340993 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -385,6 +385,9 @@ tests: - class: org.elasticsearch.packaging.test.EnrollmentProcessTests method: test20DockerAutoFormCluster issue: https://github.com/elastic/elasticsearch/issues/114885 +- class: org.elasticsearch.test.rest.ClientYamlTestSuiteIT + method: test {yaml=cluster.stats/30_ccs_stats/cross-cluster search stats search} + issue: https://github.com/elastic/elasticsearch/issues/114902 # Examples: # From 8935aad6dbee2fcaf62225fcee4334c8a0e6d674 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Thu, 17 Oct 2024 00:00:29 +1100 Subject: [PATCH 148/449] Mute org.elasticsearch.xpack.enrich.EnrichRestIT test {p0=enrich/40_synthetic_source/enrich documents over _bulk} #114825 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index df4c964340993..eb070b59f5c90 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -388,6 +388,9 @@ tests: - class: org.elasticsearch.test.rest.ClientYamlTestSuiteIT method: test {yaml=cluster.stats/30_ccs_stats/cross-cluster search stats search} issue: https://github.com/elastic/elasticsearch/issues/114902 +- class: org.elasticsearch.xpack.enrich.EnrichRestIT + method: test {p0=enrich/40_synthetic_source/enrich documents over _bulk} + issue: https://github.com/elastic/elasticsearch/issues/114825 # Examples: # From 8ae5ca468df88049ceeb6c8eda538e4131a325e5 Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Wed, 16 Oct 2024 16:13:23 +0300 Subject: [PATCH 149/449] Reset array scope tracking for nested objects (#114891) * Reset array scope tracking for nested objects * update * update * update --- muted-tests.yml | 3 -- .../index/mapper/DocumentParserContext.java | 25 ++++++++++------ .../mapper/IgnoredSourceFieldMapperTests.java | 30 +++++++++++++++++++ 3 files changed, 46 insertions(+), 12 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index eb070b59f5c90..2a3d4eac6d358 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -376,9 +376,6 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/114839 - class: org.elasticsearch.license.LicensingTests issue: https://github.com/elastic/elasticsearch/issues/114865 -- class: org.elasticsearch.datastreams.logsdb.qa.LogsDbVersusLogsDbReindexedIntoStandardModeChallengeRestIT - method: testTermsQuery - issue: https://github.com/elastic/elasticsearch/issues/114873 - class: org.elasticsearch.xpack.enrich.EnrichIT method: testDeleteIsCaseSensitive issue: https://github.com/elastic/elasticsearch/issues/114840 diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java index 2eec14bd1a8d6..ef87ce52fbabf 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java @@ -111,7 +111,7 @@ public int get() { private final Set ignoredFields; private final List ignoredFieldValues; private final List ignoredFieldsMissingValues; - private final boolean inArrayScopeEnabled; + private boolean inArrayScopeEnabled; private boolean inArrayScope; private final Map> dynamicMappers; @@ -376,13 +376,14 @@ public final Collection getIgnoredFieldsMiss * Applies to synthetic source only. */ public final DocumentParserContext maybeCloneForArray(Mapper mapper) throws IOException { - if (canAddIgnoredField() && mapper instanceof ObjectMapper && inArrayScopeEnabled) { - boolean isNested = mapper instanceof NestedObjectMapper; - if ((inArrayScope == false && isNested == false) || (inArrayScope && isNested)) { - DocumentParserContext subcontext = switchParser(parser()); - subcontext.inArrayScope = inArrayScope == false; - return subcontext; - } + if (canAddIgnoredField() + && mapper instanceof ObjectMapper + && mapper instanceof NestedObjectMapper == false + && inArrayScope == false + && inArrayScopeEnabled) { + DocumentParserContext subcontext = switchParser(parser()); + subcontext.inArrayScope = true; + return subcontext; } return this; } @@ -709,12 +710,18 @@ public final DocumentParserContext createNestedContext(NestedObjectMapper nested * Return a new context that has the provided document as the current document. */ public final DocumentParserContext switchDoc(final LuceneDocument document) { - return new Wrapper(this.parent, this) { + DocumentParserContext cloned = new Wrapper(this.parent, this) { @Override public LuceneDocument doc() { return document; } }; + // Disable tracking array scopes for ignored source, as it would be added to the parent doc. + // Nested documents are added to preserve object structure within arrays of objects, so the use + // of ignored source for arrays inside them should be mostly redundant. + cloned.inArrayScope = false; + cloned.inArrayScopeEnabled = false; + return cloned; } /** diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapperTests.java index 5eac5acdca286..934744ef3ef96 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapperTests.java @@ -932,6 +932,36 @@ public void testConflictingFieldNameAfterArray() throws IOException { {"path":{"id":0.1,"to":{"id":[1,20,3,10]}}}""", syntheticSource); } + public void testArrayWithNestedObjects() throws IOException { + DocumentMapper documentMapper = createMapperService(syntheticSourceMapping(b -> { + b.startObject("path").startObject("properties"); + { + b.startObject("to").field("type", "nested").startObject("properties"); + { + b.startObject("id").field("type", "integer").field("synthetic_source_keep", "arrays").endObject(); + } + b.endObject().endObject(); + } + b.endObject().endObject(); + })).documentMapper(); + + var syntheticSource = syntheticSource(documentMapper, b -> { + b.startArray("path"); + { + b.startObject().startArray("to"); + { + b.startObject().array("id", 1, 20, 3).endObject(); + b.startObject().field("id", 10).endObject(); + } + b.endArray().endObject(); + b.startObject().startObject("to").field("id", "0.1").endObject().endObject(); + } + b.endArray(); + }); + assertEquals(""" + {"path":{"to":[{"id":[1,20,3]},{"id":10},{"id":0}]}}""", syntheticSource); + } + public void testArrayWithinArray() throws IOException { DocumentMapper documentMapper = createMapperService(syntheticSourceMapping(b -> { b.startObject("path"); From 58b588cbcd470e1a1d7f202a82694034faddf75d Mon Sep 17 00:00:00 2001 From: Bogdan Pintea Date: Wed, 16 Oct 2024 15:16:29 +0200 Subject: [PATCH 150/449] ESQL: adapt to new range in ToDatetimeTests (#114605) Two tests shared the same name in `ToDatetimeTests`, so that needed fixing. But then also the ranges in the masked test needed adjusting after the change that added the masking test. Fixes #108093 --- .../function/scalar/convert/ToDatetimeTests.java | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeTests.java index 7799c3c756f23..2852b92ba156e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeTests.java @@ -134,9 +134,9 @@ public static Iterable parameters() { "ToDatetimeFromStringEvaluator[field=" + read + "]", List.of( new TestCaseSupplier.TypedDataSupplier( - "", - // millis before "0001-01-01T00:00:00.000Z" - () -> new BytesRef(randomDateString(Long.MIN_VALUE, -62135596800001L)), + "", + // millis before "-9999-12-31T23:59:59.999Z" + () -> new BytesRef(randomDateString(Long.MIN_VALUE, -377736739200000L)), DataType.KEYWORD ) ), @@ -154,8 +154,8 @@ public static Iterable parameters() { "ToDatetimeFromStringEvaluator[field=" + read + "]", List.of( new TestCaseSupplier.TypedDataSupplier( - "", - // millis before "0001-01-01T00:00:00.000Z" + "", + // millis after "9999-12-31T23:59:59.999Z" () -> new BytesRef(randomDateString(253402300800000L, Long.MAX_VALUE)), DataType.KEYWORD ) From 0cd306f34c77d9d35ab3d2b054980334aa1f6e21 Mon Sep 17 00:00:00 2001 From: Jan Kuipers <148754765+jan-elastic@users.noreply.github.com> Date: Wed, 16 Oct 2024 15:25:55 +0200 Subject: [PATCH 151/449] Fix setOnce in EmbeddingRequestChunker (#114900) --- .../xpack/inference/chunking/EmbeddingRequestChunker.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/EmbeddingRequestChunker.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/EmbeddingRequestChunker.java index 3ae8dc0550391..c5897f32d6eb8 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/EmbeddingRequestChunker.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/EmbeddingRequestChunker.java @@ -324,7 +324,7 @@ private ElasticsearchStatusException unexpectedResultTypeException(String got, S public void onFailure(Exception e) { var errorResult = new ErrorChunkedInferenceResults(e); for (var pos : positions) { - errors.setOnce(pos.inputIndex(), errorResult); + errors.set(pos.inputIndex(), errorResult); } if (resultCount.incrementAndGet() == totalNumberOfRequests) { From ccf6ab9ab3ca0fe2157a204e98f34bc8e957bfc0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20Zolt=C3=A1n=20Szab=C3=B3?= Date: Wed, 16 Oct 2024 15:47:13 +0200 Subject: [PATCH 152/449] [DOCS] Adds link to tutorial and API docs to trained model autoscaling. (#114904) --- .../inference/service-elser.asciidoc | 20 +++++++++---------- .../semantic-search-semantic-text.asciidoc | 8 +++++--- 2 files changed, 15 insertions(+), 13 deletions(-) diff --git a/docs/reference/inference/service-elser.asciidoc b/docs/reference/inference/service-elser.asciidoc index c7217f38d459b..6afc2a2e3ef65 100644 --- a/docs/reference/inference/service-elser.asciidoc +++ b/docs/reference/inference/service-elser.asciidoc @@ -80,12 +80,13 @@ Must be a power of 2. Max allowed value is 32. [[inference-example-elser]] ==== ELSER service example -The following example shows how to create an {infer} endpoint called -`my-elser-model` to perform a `sparse_embedding` task type. +The following example shows how to create an {infer} endpoint called `my-elser-model` to perform a `sparse_embedding` task type. Refer to the {ml-docs}/ml-nlp-elser.html[ELSER model documentation] for more info. -The request below will automatically download the ELSER model if it isn't -already downloaded and then deploy the model. +NOTE: If you want to optimize your ELSER endpoint for ingest, set the number of threads to `1` (`"num_threads": 1`). +If you want to optimize your ELSER endpoint for search, set the number of threads to greater than `1`. + +The request below will automatically download the ELSER model if it isn't already downloaded and then deploy the model. [source,console] ------------------------------------------------------------ @@ -100,7 +101,6 @@ PUT _inference/sparse_embedding/my-elser-model ------------------------------------------------------------ // TEST[skip:TBD] - Example response: [source,console-result] @@ -130,12 +130,12 @@ If using the Python client, you can set the `timeout` parameter to a higher valu [[inference-example-elser-adaptive-allocation]] ==== Setting adaptive allocation for the ELSER service -The following example shows how to create an {infer} endpoint called -`my-elser-model` to perform a `sparse_embedding` task type and configure -adaptive allocations. +NOTE: For more information on how to optimize your ELSER endpoints, refer to {ml-docs}/ml-nlp-elser.html#elser-recommendations[the ELSER recommendations] section in the model documentation. +To learn more about model autoscaling, refer to the {ml-docs}/ml-nlp-auto-scale.html[trained model autoscaling] page. + +The following example shows how to create an {infer} endpoint called `my-elser-model` to perform a `sparse_embedding` task type and configure adaptive allocations. -The request below will automatically download the ELSER model if it isn't -already downloaded and then deploy the model. +The request below will automatically download the ELSER model if it isn't already downloaded and then deploy the model. [source,console] ------------------------------------------------------------ diff --git a/docs/reference/search/search-your-data/semantic-search-semantic-text.asciidoc b/docs/reference/search/search-your-data/semantic-search-semantic-text.asciidoc index dbcfbb1b615f9..60692c19c184a 100644 --- a/docs/reference/search/search-your-data/semantic-search-semantic-text.asciidoc +++ b/docs/reference/search/search-your-data/semantic-search-semantic-text.asciidoc @@ -50,7 +50,7 @@ PUT _inference/sparse_embedding/my-elser-endpoint <1> be used and ELSER creates sparse vectors. The `inference_id` is `my-elser-endpoint`. <2> The `elser` service is used in this example. -<3> This setting enables and configures adaptive allocations. +<3> This setting enables and configures {ml-docs}/ml-nlp-auto-scale.html#nlp-model-adaptive-allocations[adaptive allocations]. Adaptive allocations make it possible for ELSER to automatically scale up or down resources based on the current load on the process. [NOTE] @@ -284,6 +284,8 @@ query from the `semantic-embedding` index: [discrete] [[semantic-text-further-examples]] -==== Further examples +==== Further examples and reading -If you want to use `semantic_text` in hybrid search, refer to https://colab.research.google.com/github/elastic/elasticsearch-labs/blob/main/notebooks/search/09-semantic-text.ipynb[this notebook] for a step-by-step guide. \ No newline at end of file +* If you want to use `semantic_text` in hybrid search, refer to https://colab.research.google.com/github/elastic/elasticsearch-labs/blob/main/notebooks/search/09-semantic-text.ipynb[this notebook] for a step-by-step guide. +* For more information on how to optimize your ELSER endpoints, refer to {ml-docs}/ml-nlp-elser.html#elser-recommendations[the ELSER recommendations] section in the model documentation. +* To learn more about model autoscaling, refer to the {ml-docs}/ml-nlp-auto-scale.html[trained model autoscaling] page. \ No newline at end of file From ff7ea1073b5dabc2f808cc41f3f981a54a806abf Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Thu, 17 Oct 2024 00:58:01 +1100 Subject: [PATCH 153/449] Mute org.elasticsearch.xpack.inference.DefaultEndPointsIT testInferDeploysDefaultElser #114913 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 2a3d4eac6d358..fb48f9e04d5c4 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -388,6 +388,9 @@ tests: - class: org.elasticsearch.xpack.enrich.EnrichRestIT method: test {p0=enrich/40_synthetic_source/enrich documents over _bulk} issue: https://github.com/elastic/elasticsearch/issues/114825 +- class: org.elasticsearch.xpack.inference.DefaultEndPointsIT + method: testInferDeploysDefaultElser + issue: https://github.com/elastic/elasticsearch/issues/114913 # Examples: # From 9bf6e3b0baf4296125f2b8d8ab2726f3a4614e3f Mon Sep 17 00:00:00 2001 From: Salvatore Campagna <93581129+salvatore-campagna@users.noreply.github.com> Date: Wed, 16 Oct 2024 16:12:56 +0200 Subject: [PATCH 154/449] Inject the `host.name` field mapping only if required for `logsdb` index mode (#114573) Here we check for the existence of a `host.name` field in index sort settings when the index mode is `logsdb` and decide to inject the field in the mapping depending on whether it exists or not. By default `host.name` is required for sorting in LogsDB. This reduces the chances for errors at mapping or template composition time as a result of injecting the `host.name` field only if strictly required. A user who wants to override index sort settings without including a `host.name` field would be able to do so without finding an additional `host.name` field in the mappings (injected automatically). If users override the sort settings and a `host.name` field is not included we don't need to inject such field since sorting does not require it anymore. As a result of this change we have the following: * the user does not provide any index sorting configuration: we are responsible for injecting the default sort fields and their mapping (for `logsdb`) * the user explicitly provides non-empty index sorting configuration: the user is also responsible for providing correct mappings and we do not modify index sorting or mappings Note also that all sort settings `index.sort.*` are `final` which means doing this check once, when mappings are merged at template composition time, is enough. --- .../metadata/MetadataCreateIndexService.java | 2 +- .../org/elasticsearch/index/IndexMode.java | 71 +- .../index/LogsIndexModeTests.java | 16 +- .../index/mapper/MapperServiceTestCase.java | 8 +- .../test/30_logsdb_default_mapping.yml | 781 ++++++++++++++++++ .../rest-api-spec/test/20_logs_tests.yml | 24 + 6 files changed, 856 insertions(+), 46 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java index 7f2c076281735..29720e98a6e7b 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java @@ -1373,7 +1373,7 @@ private static void updateIndexMappingsAndBuildSortOrder( MapperService mapperService = indexService.mapperService(); IndexMode indexMode = indexService.getIndexSettings() != null ? indexService.getIndexSettings().getMode() : IndexMode.STANDARD; List allMappings = new ArrayList<>(); - final CompressedXContent defaultMapping = indexMode.getDefaultMapping(); + final CompressedXContent defaultMapping = indexMode.getDefaultMapping(indexService.getIndexSettings()); if (defaultMapping != null) { allMappings.add(defaultMapping); } diff --git a/server/src/main/java/org/elasticsearch/index/IndexMode.java b/server/src/main/java/org/elasticsearch/index/IndexMode.java index 2d9e89223d7a6..5908bc22e21e2 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexMode.java +++ b/server/src/main/java/org/elasticsearch/index/IndexMode.java @@ -75,7 +75,7 @@ public void validateTimestampFieldMapping(boolean isDataStream, MappingLookup ma } @Override - public CompressedXContent getDefaultMapping() { + public CompressedXContent getDefaultMapping(final IndexSettings indexSettings) { return null; } @@ -171,7 +171,7 @@ public void validateTimestampFieldMapping(boolean isDataStream, MappingLookup ma } @Override - public CompressedXContent getDefaultMapping() { + public CompressedXContent getDefaultMapping(final IndexSettings indexSettings) { return DEFAULT_TIME_SERIES_TIMESTAMP_MAPPING; } @@ -249,8 +249,10 @@ public void validateTimestampFieldMapping(boolean isDataStream, MappingLookup ma } @Override - public CompressedXContent getDefaultMapping() { - return DEFAULT_LOGS_TIMESTAMP_MAPPING; + public CompressedXContent getDefaultMapping(final IndexSettings indexSettings) { + return indexSettings != null && indexSettings.getIndexSortConfig().hasPrimarySortOnField(HOST_NAME) + ? DEFAULT_LOGS_TIMESTAMP_MAPPING_WITH_HOSTNAME + : DEFAULT_TIME_SERIES_TIMESTAMP_MAPPING; } @Override @@ -308,6 +310,8 @@ public String getDefaultCodec() { } }; + private static final String HOST_NAME = "host.name"; + private static void validateTimeSeriesSettings(Map, Object> settings) { settingRequiresTimeSeries(settings, IndexMetadata.INDEX_ROUTING_PATH); settingRequiresTimeSeries(settings, IndexSettings.TIME_SERIES_START_TIME); @@ -324,48 +328,33 @@ protected static String tsdbMode() { return "[" + IndexSettings.MODE.getKey() + "=time_series]"; } - public static final CompressedXContent DEFAULT_TIME_SERIES_TIMESTAMP_MAPPING; + private static CompressedXContent createDefaultMapping(boolean includeHostName) throws IOException { + return new CompressedXContent((builder, params) -> { + builder.startObject(MapperService.SINGLE_MAPPING_NAME) + .startObject(DataStreamTimestampFieldMapper.NAME) + .field("enabled", true) + .endObject() + .startObject("properties") + .startObject(DataStreamTimestampFieldMapper.DEFAULT_PATH) + .field("type", DateFieldMapper.CONTENT_TYPE) + .endObject(); + + if (includeHostName) { + builder.startObject(HOST_NAME).field("type", KeywordFieldMapper.CONTENT_TYPE).field("ignore_above", 1024).endObject(); + } - static { - try { - DEFAULT_TIME_SERIES_TIMESTAMP_MAPPING = new CompressedXContent( - ((builder, params) -> builder.startObject(MapperService.SINGLE_MAPPING_NAME) - .startObject(DataStreamTimestampFieldMapper.NAME) - .field("enabled", true) - .endObject() - .startObject("properties") - .startObject(DataStreamTimestampFieldMapper.DEFAULT_PATH) - .field("type", DateFieldMapper.CONTENT_TYPE) - .field("ignore_malformed", "false") - .endObject() - .endObject() - .endObject()) - ); - } catch (IOException e) { - throw new AssertionError(e); - } + return builder.endObject().endObject(); + }); } - public static final CompressedXContent DEFAULT_LOGS_TIMESTAMP_MAPPING; + private static final CompressedXContent DEFAULT_TIME_SERIES_TIMESTAMP_MAPPING; + + private static final CompressedXContent DEFAULT_LOGS_TIMESTAMP_MAPPING_WITH_HOSTNAME; static { try { - DEFAULT_LOGS_TIMESTAMP_MAPPING = new CompressedXContent( - ((builder, params) -> builder.startObject(MapperService.SINGLE_MAPPING_NAME) - .startObject(DataStreamTimestampFieldMapper.NAME) - .field("enabled", true) - .endObject() - .startObject("properties") - .startObject(DataStreamTimestampFieldMapper.DEFAULT_PATH) - .field("type", DateFieldMapper.CONTENT_TYPE) - .endObject() - .startObject("host.name") - .field("type", KeywordFieldMapper.CONTENT_TYPE) - .field("ignore_above", 1024) - .endObject() - .endObject() - .endObject()) - ); + DEFAULT_TIME_SERIES_TIMESTAMP_MAPPING = createDefaultMapping(false); + DEFAULT_LOGS_TIMESTAMP_MAPPING_WITH_HOSTNAME = createDefaultMapping(true); } catch (IOException e) { throw new AssertionError(e); } @@ -421,7 +410,7 @@ public String getName() { * Get default mapping for this index or {@code null} if there is none. */ @Nullable - public abstract CompressedXContent getDefaultMapping(); + public abstract CompressedXContent getDefaultMapping(IndexSettings indexSettings); /** * Build the {@link FieldMapper} for {@code _id}. diff --git a/server/src/test/java/org/elasticsearch/index/LogsIndexModeTests.java b/server/src/test/java/org/elasticsearch/index/LogsIndexModeTests.java index 8a66bb1464a5b..23fc788a89bde 100644 --- a/server/src/test/java/org/elasticsearch/index/LogsIndexModeTests.java +++ b/server/src/test/java/org/elasticsearch/index/LogsIndexModeTests.java @@ -13,14 +13,24 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; public class LogsIndexModeTests extends ESTestCase { public void testLogsIndexModeSetting() { assertThat(IndexSettings.MODE.get(buildSettings()), equalTo(IndexMode.LOGSDB)); } - public void testSortField() { + public void testDefaultHostNameSortField() { + final IndexMetadata metadata = IndexSettingsTests.newIndexMeta("test", buildSettings()); + assertThat(metadata.getIndexMode(), equalTo(IndexMode.LOGSDB)); + final IndexSettings settings = new IndexSettings(metadata, Settings.EMPTY); + assertThat(settings.getIndexSortConfig().hasPrimarySortOnField("host.name"), equalTo(true)); + assertThat(IndexMode.LOGSDB.getDefaultMapping(settings).string(), containsString("host.name")); + } + + public void testCustomSortField() { final Settings sortSettings = Settings.builder() .put(buildSettings()) .put(IndexSortConfig.INDEX_SORT_FIELD_SETTING.getKey(), "agent_id") @@ -29,7 +39,9 @@ public void testSortField() { assertThat(metadata.getIndexMode(), equalTo(IndexMode.LOGSDB)); final IndexSettings settings = new IndexSettings(metadata, Settings.EMPTY); assertThat(settings.getMode(), equalTo(IndexMode.LOGSDB)); - assertThat("agent_id", equalTo(getIndexSetting(settings, IndexSortConfig.INDEX_SORT_FIELD_SETTING.getKey()))); + assertThat(getIndexSetting(settings, IndexSortConfig.INDEX_SORT_FIELD_SETTING.getKey()), equalTo("agent_id")); + assertThat(settings.getIndexSortConfig().hasPrimarySortOnField("host.name"), equalTo(false)); + assertThat(IndexMode.LOGSDB.getDefaultMapping(settings).string(), not(containsString("host"))); } public void testSortMode() { diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java index a9ee0317ce1ee..8bc2666bcfe3b 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java @@ -302,8 +302,12 @@ public void onRemoval(ShardId shardId, Accountable accountable) {} mapperMetrics ); - if (applyDefaultMapping && indexSettings.getMode().getDefaultMapping() != null) { - mapperService.merge(null, indexSettings.getMode().getDefaultMapping(), MapperService.MergeReason.MAPPING_UPDATE); + if (applyDefaultMapping && indexSettings.getMode().getDefaultMapping(indexSettings) != null) { + mapperService.merge( + null, + indexSettings.getMode().getDefaultMapping(indexSettings), + MapperService.MergeReason.MAPPING_UPDATE + ); } return mapperService; diff --git a/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/30_logsdb_default_mapping.yml b/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/30_logsdb_default_mapping.yml index 52c500c102cee..3f2bca2e4bcd9 100644 --- a/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/30_logsdb_default_mapping.yml +++ b/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/30_logsdb_default_mapping.yml @@ -280,3 +280,784 @@ create logsdb data stream with timestamp object mapping: - match: { error.type: "illegal_argument_exception" } - match: { error.reason: "composable template [logsdb-index-template] template after composition with component templates [logsdb-mappings] is invalid" } + +--- +create logsdb data stream with custom sorting without host.name: + - skip: + features: [ "allowed_warnings" ] + - requires: + cluster_features: [ "mapper.keyword_normalizer_synthetic_source" ] + reason: support for normalizer on keyword fields + + - do: + allowed_warnings: + - "index template [logs-template] has index patterns [logs-*-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logs-template] will take precedence during new index creation" + indices.put_index_template: + name: logs-template + body: + index_patterns: [ logs-http-prod ] + priority: 10000 + template: + settings: + index: + sort.field: [ agent.id ] + sort.order: [ desc ] + mode: logsdb + mappings: + properties: + agent.id: + type: keyword + host.hostname: + type: keyword + data_stream: { } + - is_true: acknowledged + + - do: + indices.create_data_stream: + name: logs-http-prod + - is_true: acknowledged + + - do: + indices.get_data_stream: + name: logs-http-prod + + - set: { data_streams.0.indices.0.index_name: backing_index } + - do: + indices.get_mapping: + index: $backing_index + + - match: { .$backing_index.mappings.properties.@timestamp.type: date } + - match: { .$backing_index.mappings.properties.agent.properties.id.type: keyword } + - match: { .$backing_index.mappings.properties.host.properties.hostname.type: keyword } + - match: { .$backing_index.mappings.properties.host.properties.name.type: null } + +--- +create logsdb data stream with custom sorting and host object: + - skip: + features: [ "allowed_warnings" ] + - requires: + cluster_features: [ "mapper.keyword_normalizer_synthetic_source" ] + reason: support for normalizer on keyword fields + + - do: + allowed_warnings: + - "index template [logs-template] has index patterns [logs-*-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logs-template] will take precedence during new index creation" + indices.put_index_template: + name: logs-template + body: + index_patterns: [ logs-nginx-prod ] + priority: 10000 + template: + settings: + index: + sort.field: [ host.hostname, host.region ] + sort.order: [ desc, desc ] + mode: logsdb + mappings: + properties: + host: + type: object + properties: + ip: + type: ip + hostname: + type: keyword + region: + type: keyword + name: + type: integer + + data_stream: { } + - is_true: acknowledged + + - do: + indices.create_data_stream: + name: logs-nginx-prod + - is_true: acknowledged + + - do: + indices.get_data_stream: + name: logs-nginx-prod + + - set: { data_streams.0.indices.0.index_name: backing_index } + - do: + indices.get_mapping: + index: $backing_index + + - match: { .$backing_index.mappings.properties.@timestamp.type: date } + - match: { .$backing_index.mappings.properties.host.properties.ip.type: ip } + - match: { .$backing_index.mappings.properties.host.properties.hostname.type: keyword } + - match: { .$backing_index.mappings.properties.host.properties.region.type: keyword } + - match: { .$backing_index.mappings.properties.host.properties.name.type: integer } # Overrides LogsDB injected + +--- +create logsdb data stream with custom sorting and dynamically mapped host.name: + - skip: + features: [ "allowed_warnings" ] + - requires: + cluster_features: [ "mapper.keyword_normalizer_synthetic_source" ] + reason: support for normalizer on keyword fields + + - do: + allowed_warnings: + - "index template [logs-template] has index patterns [logs-*-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logs-template] will take precedence during new index creation" + indices.put_index_template: + name: logs-template + body: + index_patterns: [ logs-kafka-qa ] + priority: 10000 + template: + settings: + index: + sort.field: [ "agent.id", "@timestamp" ] + sort.order: [ desc, asc ] + mode: logsdb + mappings: + properties: + agent: + type: object + properties: + name: + type: keyword + id: + type: keyword + + data_stream: { } + - is_true: acknowledged + + - do: + indices.create_data_stream: + name: logs-kafka-qa + - is_true: acknowledged + + - do: + bulk: + index: logs-kafka-qa + refresh: true + body: + - { "create": { } } + - { "@timestamp": "2022-01-01T00:00:00", agent.name: "foo", agent.id: "foo-568", host: { id: "db8fdcf1-b1e2-444b-8c6a-0466c61dcce4" } } + - { "create": { } } + - { "@timestamp": "2022-01-01T00:01:00", agent.name: "bar", agent.id: "foo-309", host: { id: "35e1ed10-961e-46c7-83ea-4109c913a1d6" } } + + - do: + indices.get_data_stream: + name: logs-kafka-qa + + - set: { data_streams.0.indices.0.index_name: backing_index } + - do: + indices.get_mapping: + index: $backing_index + + - match: { .$backing_index.mappings.properties.@timestamp.type: date } + - match: { .$backing_index.mappings.properties.agent.properties.name.type: keyword } + - match: { .$backing_index.mappings.properties.agent.properties.id.type: keyword } + - match: { .$backing_index.mappings.properties.host.properties.name: null } + - match: { .$backing_index.mappings.properties.host.properties.id.type: text } + +--- +create logsdb data stream with custom sorting and host.name object: + - skip: + features: [ "allowed_warnings" ] + - requires: + cluster_features: [ "mapper.keyword_normalizer_synthetic_source" ] + reason: support for normalizer on keyword fields + + - do: + allowed_warnings: + - "index template [logs-template] has index patterns [logs-*-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logs-template] will take precedence during new index creation" + indices.put_index_template: + name: logs-template + body: + index_patterns: [ logs-nginx-qa ] + priority: 10000 + template: + settings: + index: + sort.field: [ "host.name.value", "@timestamp" ] + sort.order: [ desc, desc ] + mode: logsdb + mappings: + properties: + host: + type: object + properties: + name: + type: object + properties: + value: + type: keyword + alias: + type: keyword + + data_stream: { } + - is_true: acknowledged + + - do: + indices.create_data_stream: + name: logs-nginx-qa + - is_true: acknowledged + + - do: + indices.get_data_stream: + name: logs-nginx-qa + + - set: { data_streams.0.indices.0.index_name: backing_index } + - do: + indices.get_mapping: + index: $backing_index + + - match: { .$backing_index.mappings.properties.@timestamp.type: date } + - match: { .$backing_index.mappings.properties.host.properties.name.properties.value.type: keyword } + - match: { .$backing_index.mappings.properties.host.properties.name.properties.alias.type: keyword } + +--- +create logsdb data stream with default sorting on malformed host.name: + - skip: + features: [ "allowed_warnings" ] + - requires: + cluster_features: [ "mapper.keyword_normalizer_synthetic_source" ] + reason: support for normalizer on keyword fields + + - do: + allowed_warnings: + - "index template [logs-template] has index patterns [logs-*-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logs-template] will take precedence during new index creation" + indices.put_index_template: + name: logs-template + body: + index_patterns: [ logs-win-prod ] + priority: 10000 + template: + settings: + index: + mode: logsdb + mappings: + properties: + agent: + type: object + properties: + name: + type: keyword + id: + type: keyword + + data_stream: { } + - is_true: acknowledged + + - do: + indices.create_data_stream: + name: logs-win-prod + - is_true: acknowledged + + - do: + bulk: + index: logs-win-prod + refresh: true + body: + - { "create": { } } + - { "@timestamp": "2022-01-01T00:00:00", agent.name: "foo", agent.id: "foo-568", host: { name: 192.168.10.12, id: "e70e91cd-bb3f-43f0-909c-2748e7fdfd54" } } + - { "create": { } } + - { "@timestamp": "2022-01-01T00:01:00", agent.name: "bar", agent.id: "foo-309", host: { name: 192.168.15.17, id: "ad2e3edb-2c4b-4f12-83dd-255691ed614c" } } + + - do: + indices.get_data_stream: + name: logs-win-prod + + - set: { data_streams.0.indices.0.index_name: backing_index } + - do: + indices.get_mapping: + index: $backing_index + + - match: { .$backing_index.mappings.properties.@timestamp.type: date } + - match: { .$backing_index.mappings.properties.agent.properties.name.type: keyword } + - match: { .$backing_index.mappings.properties.agent.properties.id.type: keyword } + - match: { .$backing_index.mappings.properties.host.properties.name.type: keyword } # LogsDB injected + - match: { .$backing_index.mappings.properties.host.properties.name.ignore_above: 1024 } # LogsDB injected + - match: { .$backing_index.mappings.properties.host.properties.id.type: text } + +--- +create logsdb data stream with custom sorting and host.name date field: + - skip: + features: [ "allowed_warnings" ] + - requires: + cluster_features: [ "mapper.keyword_normalizer_synthetic_source" ] + reason: support for normalizer on keyword fields + + - do: + allowed_warnings: + - "index template [logs-template] has index patterns [logs-*-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logs-template] will take precedence during new index creation" + indices.put_index_template: + name: logs-template + body: + index_patterns: [ logs-http-prod ] + priority: 10000 + template: + settings: + index: + sort.field: [ host.name, host.hostname ] + sort.order: [ desc, desc ] + mode: logsdb + mappings: + properties: + host: + type: object + properties: + hostname: + type: keyword + name: + type: date + + data_stream: { } + - is_true: acknowledged + + - do: + indices.create_data_stream: + name: logs-http-prod + - is_true: acknowledged + + - do: + indices.get_data_stream: + name: logs-http-prod + + - set: { data_streams.0.indices.0.index_name: backing_index } + - do: + indices.get_mapping: + index: $backing_index + + - match: { .$backing_index.mappings.properties.@timestamp.type: date } + - match: { .$backing_index.mappings.properties.host.properties.hostname.type: keyword } + - match: { .$backing_index.mappings.properties.host.properties.name.type: date } + +--- +create logsdb data stream with custom sorting and missing host.name field mapping: + - skip: + features: [ "allowed_warnings" ] + - requires: + cluster_features: [ "mapper.keyword_normalizer_synthetic_source" ] + reason: support for normalizer on keyword fields + + - do: + allowed_warnings: + - "index template [logs-template] has index patterns [logs-*-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logs-template] will take precedence during new index creation" + indices.put_index_template: + name: logs-template + body: + index_patterns: [ logs-http-qa ] + priority: 10000 + template: + settings: + index: + sort.field: [ host.name, host.hostname ] + sort.order: [ desc, desc ] + mode: logsdb + mappings: + properties: + host: + type: object + properties: + hostname: + type: keyword + + data_stream: { } + - is_true: acknowledged + + - do: + indices.create_data_stream: + name: logs-http-qa + - is_true: acknowledged + + - do: + indices.get_data_stream: + name: logs-http-qa + + - set: { data_streams.0.indices.0.index_name: backing_index } + - do: + indices.get_mapping: + index: $backing_index + + - match: { .$backing_index.mappings.properties.@timestamp.type: date } + - match: { .$backing_index.mappings.properties.host.properties.hostname.type: keyword } + - match: { .$backing_index.mappings.properties.host.properties.name.type: keyword } + - match: { .$backing_index.mappings.properties.host.properties.name.ignore_above: 1024 } + +--- +create logsdb data stream with custom sorting and host.name field without doc values: + - skip: + features: [ "allowed_warnings" ] + - requires: + cluster_features: [ "mapper.keyword_normalizer_synthetic_source" ] + reason: support for normalizer on keyword fields + + - do: + allowed_warnings: + - "index template [logs-template] has index patterns [logs-*-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logs-template] will take precedence during new index creation" + indices.put_index_template: + name: logs-template + body: + index_patterns: [ logs-http-dev ] + priority: 10000 + template: + settings: + index: + sort.field: [ "host.name", "@timestamp" ] + sort.order: [ desc, desc ] + mode: logsdb + mappings: + properties: + host: + type: object + properties: + name: + type: keyword + doc_values: false + + data_stream: { } + - is_true: acknowledged + + - do: + catch: bad_request + indices.create_data_stream: + name: logs-http-dev + + - match: { error.type: "illegal_argument_exception" } + - match: { error.reason: "docvalues not found for index sort field:[host.name]" } + +--- +create logsdb data stream with incompatible ignore_above on host.name: + - skip: + features: [ "allowed_warnings" ] + - requires: + cluster_features: [ "mapper.keyword_normalizer_synthetic_source" ] + reason: support for normalizer on keyword fields + + - do: + allowed_warnings: + - "index template [logsdb-index-template-ignore-above] has index patterns [logsdb-ignore-above] matching patterns from existing older templates [global]" + indices.put_index_template: + name: logsdb-index-template-ignore-above + body: + index_patterns: [ logsdb-ignore-above ] + priority: 10000 + template: + settings: + index: + sort.field: [ host.name ] + sort.order: [ desc ] + mode: logsdb + mappings: + properties: + host.name: + type: keyword + ignore_above: 128 + data_stream: {} + - is_true: acknowledged + + - do: + indices.create_data_stream: + name: logsdb-ignore-above + - is_true: acknowledged + + - do: + indices.get_data_stream: + name: logsdb-ignore-above + + - set: { data_streams.0.indices.0.index_name: backing_index } + - do: + indices.get_mapping: + index: $backing_index + + - match: { .$backing_index.mappings.properties.@timestamp.type: date } + - match: { .$backing_index.mappings.properties.host.properties.name.type: keyword } + - match: { .$backing_index.mappings.properties.host.properties.name.ignore_above: 128 } + +--- +create logsdb data stream with no sorting and host.name as text: + - skip: + features: [ "allowed_warnings" ] + - requires: + cluster_features: [ "mapper.keyword_normalizer_synthetic_source" ] + reason: support for normalizer on keyword fields + + - do: + allowed_warnings: + - "index template [logsdb-index-template-non-keyword] has index patterns [logsdb-non-keyword] matching patterns from existing older templates [global]" + indices.put_index_template: + name: logsdb-index-template-non-keyword + body: + index_patterns: [ logsdb-non-keyword ] + priority: 10000 + template: + settings: + mode: logsdb + mappings: + properties: + host.name: + type: text + data_stream: {} + - is_true: acknowledged + + - do: + catch: bad_request + indices.create_data_stream: + name: logsdb-non-keyword + + - match: { error.type: "illegal_argument_exception" } + - match: { error.reason: "docvalues not found for index sort field:[host.name]" } + +--- +create logsdb data stream without index sorting and ignore_above on host.name: + - skip: + features: [ "allowed_warnings" ] + - requires: + cluster_features: [ "mapper.keyword_normalizer_synthetic_source" ] + reason: support for normalizer on keyword fields + + - do: + allowed_warnings: + - "index template [logsdb-index-template-ignore-above-override] has index patterns [logsdb-ignore-above-override] matching patterns from existing older templates [global]" + indices.put_index_template: + name: logsdb-index-template-ignore-above-override + body: + index_patterns: [ logsdb-ignore-above-override ] + priority: 10000 + template: + settings: + index: + mode: logsdb + mappings: + properties: + host.name: + type: keyword + ignore_above: 128 + data_stream: {} + - is_true: acknowledged + + - do: + indices.create_data_stream: + name: logsdb-ignore-above-override + - is_true: acknowledged + + - do: + indices.get_data_stream: + name: logsdb-ignore-above-override + + - set: { data_streams.0.indices.0.index_name: backing_index } + - do: + indices.get_mapping: + index: $backing_index + + - match: { .$backing_index.mappings.properties.@timestamp.type: date } + - match: { .$backing_index.mappings.properties.host.properties.name.type: keyword } + - match: { .$backing_index.mappings.properties.host.properties.name.ignore_above: 128 } + +--- +create logsdb data stream with host.name as alias and sorting on it: + - skip: + features: [ "allowed_warnings" ] + - requires: + cluster_features: [ "mapper.keyword_normalizer_synthetic_source" ] + reason: support for normalizer on keyword fields + + - do: + allowed_warnings: + - "index template [logsdb-index-template-alias] has index patterns [logsdb-alias] matching patterns from existing older templates [global]" + indices.put_index_template: + name: logsdb-index-template-alias + body: + index_patterns: [ logsdb-alias ] + template: + settings: + index: + sort.field: [ host.name ] + sort.order: [ desc ] + mode: logsdb + mappings: + properties: + host.name: + type: alias + path: host.hostname + host.hostname: + type: + keyword + data_stream: {} + - do: + catch: bad_request + indices.create_data_stream: + name: logsdb-alias + + - match: { error.type: "illegal_argument_exception" } + - match: { error.reason: "Cannot use alias [host.name] as an index sort field" } + +--- +create logsdb data stream with multi-fields on host.name: + - skip: + features: [ "allowed_warnings" ] + - requires: + cluster_features: [ "mapper.keyword_normalizer_synthetic_source" ] + reason: support for normalizer on keyword fields + + - do: + allowed_warnings: + - "index template [logsdb-index-template-multi-fields] has index patterns [logsdb-multi-fields] matching patterns from existing older templates [global]" + indices.put_index_template: + name: logsdb-index-template-multi-fields + body: + index_patterns: [ logsdb-multi-fields ] + template: + settings: + index: + sort.field: [ host.name.keyword ] + sort.order: [ asc ] + mode: logsdb + mappings: + properties: + host.name: + type: "text" + fields: + keyword: + type: "keyword" + data_stream: {} + + - do: + indices.create_data_stream: + name: logsdb-multi-fields + - is_true: acknowledged + + - do: + indices.get_data_stream: + name: logsdb-multi-fields + + - set: { data_streams.0.indices.0.index_name: backing_index } + - do: + indices.get_mapping: + index: $backing_index + + - match: { .$backing_index.mappings.properties.@timestamp.type: date } + - match: { .$backing_index.mappings.properties.host.properties.name.fields.keyword.type: keyword } + +--- +create logsdb data stream with multi-fields on host.name and no sorting: + - skip: + features: [ "allowed_warnings" ] + - requires: + cluster_features: [ "mapper.keyword_normalizer_synthetic_source" ] + reason: support for normalizer on keyword fields + + - do: + allowed_warnings: + - "index template [ logsdb-no-sort-multi-fields-template ] has index patterns [logsdb-no-sort-multi-fields] matching patterns from existing older templates [global]" + indices.put_index_template: + name: logsdb-no-sort-multi-fields-template + body: + index_patterns: [ logsdb-no-sort-multi-fields ] + template: + settings: + mode: logsdb + mappings: + properties: + host.name: + type: text + fields: + keyword: + type: keyword + data_stream: {} + + - do: + catch: bad_request + indices.create_data_stream: + name: logsdb-no-sort-multi-fields + + - match: { error.type: "illegal_argument_exception" } + - match: { error.reason: "docvalues not found for index sort field:[host.name]" } + +--- +create logsdb data stream with custom empty sorting: + - skip: + features: [ "allowed_warnings" ] + - requires: + cluster_features: [ "mapper.keyword_normalizer_synthetic_source" ] + reason: support for normalizer on keyword fields + + - do: + allowed_warnings: + - "index template [logs-template] has index patterns [logs-*-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logs-template] will take precedence during new index creation" + indices.put_index_template: + name: logs-template + body: + index_patterns: [ logs-http-empty ] + priority: 10000 + template: + settings: + index: + sort.field: [ ] + sort.order: [ ] + mode: logsdb + mappings: + properties: + hostname: + type: keyword + data_stream: { } + - is_true: acknowledged + + - do: + indices.create_data_stream: + name: logs-http-empty + - is_true: acknowledged + + - do: + indices.get_data_stream: + name: logs-http-empty + + - set: { data_streams.0.indices.0.index_name: backing_index } + - do: + indices.get_mapping: + index: $backing_index + + - match: { .$backing_index.mappings.properties.@timestamp.type: date } + - match: { .$backing_index.mappings.properties.host.properties.name.type: keyword } + - match: { .$backing_index.mappings.properties.host.properties.name.ignore_above: 1024 } + +--- +create logsdb data stream with custom sorting on timestamp: + - skip: + features: [ "allowed_warnings" ] + - requires: + cluster_features: [ "mapper.keyword_normalizer_synthetic_source" ] + reason: support for normalizer on keyword fields + + - do: + allowed_warnings: + - "index template [logs-template] has index patterns [logs-*-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logs-template] will take precedence during new index creation" + indices.put_index_template: + name: logs-template + body: + index_patterns: [ logs-http-dev ] + priority: 10000 + template: + settings: + index: + sort.field: [ "@timestamp" ] + sort.order: [ "asc" ] + mode: logsdb + mappings: + properties: + hostname: + type: keyword + data_stream: { } + - is_true: acknowledged + + - do: + indices.create_data_stream: + name: logs-http-dev + - is_true: acknowledged + + - do: + indices.get_data_stream: + name: logs-http-dev + + - set: { data_streams.0.indices.0.index_name: backing_index } + - do: + indices.get_mapping: + index: $backing_index + + - match: { .$backing_index.mappings.properties.@timestamp.type: date } diff --git a/x-pack/plugin/otel-data/src/yamlRestTest/resources/rest-api-spec/test/20_logs_tests.yml b/x-pack/plugin/otel-data/src/yamlRestTest/resources/rest-api-spec/test/20_logs_tests.yml index be4de6dca6c76..6bc0cee78be4f 100644 --- a/x-pack/plugin/otel-data/src/yamlRestTest/resources/rest-api-spec/test/20_logs_tests.yml +++ b/x-pack/plugin/otel-data/src/yamlRestTest/resources/rest-api-spec/test/20_logs_tests.yml @@ -163,3 +163,27 @@ Structured log body: fields: ["event.dataset"] - length: { hits.hits: 1 } - match: { hits.hits.0.fields.event\.dataset: ["generic.otel"] } +--- +host.name pass-through: + - do: + bulk: + index: logs-generic.otel-default + refresh: true + body: + - create: {} + - "@timestamp": 2024-07-18T14:48:33.467654000Z + resource: + attributes: + host.name: localhost + - is_false: errors + - do: + search: + index: logs-generic.otel-default + body: + query: + term: + host.name: localhost + fields: [ "*" ] + - length: { hits.hits: 1 } + - match: { hits.hits.0.fields.resource\.attributes\.host\.name: [ "localhost" ] } + - match: { hits.hits.0.fields.host\.name: [ "localhost" ] } From f6a1e36d6be56a5d480765ad2d5f72f4adcaef5b Mon Sep 17 00:00:00 2001 From: Salvatore Campagna <93581129+salvatore-campagna@users.noreply.github.com> Date: Wed, 16 Oct 2024 16:17:41 +0200 Subject: [PATCH 155/449] Replace usages of `_source.mode` in documentation (#114743) We will deprecate the `_source.mode` mapping level configuration in favor of the index-level `index.mapping.source.mode` setting. As a result, we go through the documentation and update it to reflect the introduction of the setting. --- docs/plugins/mapper-annotated-text.asciidoc | 20 ++++++- .../mapping/fields/synthetic-source.asciidoc | 28 ++++++---- .../types/aggregate-metric-double.asciidoc | 10 +++- docs/reference/mapping/types/boolean.asciidoc | 10 +++- docs/reference/mapping/types/date.asciidoc | 12 ++++- .../mapping/types/date_nanos.asciidoc | 10 +++- .../mapping/types/flattened.asciidoc | 30 +++++++++-- .../mapping/types/geo-point.asciidoc | 10 +++- docs/reference/mapping/types/ip.asciidoc | 10 +++- docs/reference/mapping/types/keyword.asciidoc | 30 +++++++++-- docs/reference/mapping/types/numeric.asciidoc | 20 ++++++- docs/reference/mapping/types/range.asciidoc | 54 ++++++++++++++++--- docs/reference/mapping/types/text.asciidoc | 22 ++++++-- docs/reference/mapping/types/version.asciidoc | 19 ++++--- .../reference/mapping/types/wildcard.asciidoc | 10 +++- 15 files changed, 252 insertions(+), 43 deletions(-) diff --git a/docs/plugins/mapper-annotated-text.asciidoc b/docs/plugins/mapper-annotated-text.asciidoc index e4141e98a2285..9b6eccd136696 100644 --- a/docs/plugins/mapper-annotated-text.asciidoc +++ b/docs/plugins/mapper-annotated-text.asciidoc @@ -167,8 +167,16 @@ duplicates removed. So: ---- PUT idx { + "settings": { + "index": { + "mapping": { + "source": { + "mode": "synthetic" + } + } + } + }, "mappings": { - "_source": { "mode": "synthetic" }, "properties": { "text": { "type": "annotated_text", @@ -215,8 +223,16 @@ are preserved. ---- PUT idx { + "settings": { + "index": { + "mapping": { + "source": { + "mode": "synthetic" + } + } + } + }, "mappings": { - "_source": { "mode": "synthetic" }, "properties": { "text": { "type": "annotated_text", "store": true } } diff --git a/docs/reference/mapping/fields/synthetic-source.asciidoc b/docs/reference/mapping/fields/synthetic-source.asciidoc index 902b6c26611e5..f8666e2993d6a 100644 --- a/docs/reference/mapping/fields/synthetic-source.asciidoc +++ b/docs/reference/mapping/fields/synthetic-source.asciidoc @@ -2,7 +2,7 @@ ==== Synthetic `_source` IMPORTANT: Synthetic `_source` is Generally Available only for TSDB indices -(indices that have `index.mode` set to `time_series`). For other indices +(indices that have `index.mode` set to `time_series`). For other indices, synthetic `_source` is in technical preview. Features in technical preview may be changed or removed in a future release. Elastic will work to fix any issues, but features in technical preview are not subject to the support SLA @@ -11,15 +11,19 @@ of official GA features. Though very handy to have around, the source field takes up a significant amount of space on disk. Instead of storing source documents on disk exactly as you send them, Elasticsearch can reconstruct source content on the fly upon retrieval. -Enable this by setting `mode: synthetic` in `_source`: +Enable this by using the value `synthetic` for the index setting `index.mapping.source.mode`: [source,console,id=enable-synthetic-source-example] ---- PUT idx { - "mappings": { - "_source": { - "mode": "synthetic" + "settings": { + "index": { + "mapping": { + "source": { + "mode": "synthetic" + } + } } } } @@ -38,7 +42,7 @@ properties when used with synthetic `_source`. <> construct synthetic `_source` using existing data, most commonly <> and <>. For these field types, no additional space is needed to store the contents of `_source` field. Due to the storage layout of <>, the -generated `_source` field undergoes <> compared to original document. +generated `_source` field undergoes <> compared to the original document. For all other field types, the original value of the field is stored as is, in the same way as the `_source` field in non-synthetic mode. In this case there are no modifications and field data in `_source` is the same as in the original @@ -227,10 +231,16 @@ For instance: ---- PUT idx_keep { + "settings": { + "index": { + "mapping": { + "source": { + "mode": "synthetic" + } + } + } + }, "mappings": { - "_source": { - "mode": "synthetic" - }, "properties": { "path": { "type": "object", diff --git a/docs/reference/mapping/types/aggregate-metric-double.asciidoc b/docs/reference/mapping/types/aggregate-metric-double.asciidoc index 8e14fba976360..8a4ddffc30bbd 100644 --- a/docs/reference/mapping/types/aggregate-metric-double.asciidoc +++ b/docs/reference/mapping/types/aggregate-metric-double.asciidoc @@ -267,8 +267,16 @@ For example: ---- PUT idx { + "settings": { + "index": { + "mapping": { + "source": { + "mode": "synthetic" + } + } + } + }, "mappings": { - "_source": { "mode": "synthetic" }, "properties": { "agg_metric": { "type": "aggregate_metric_double", diff --git a/docs/reference/mapping/types/boolean.asciidoc b/docs/reference/mapping/types/boolean.asciidoc index 32f3d13edf581..494c41021dd2a 100644 --- a/docs/reference/mapping/types/boolean.asciidoc +++ b/docs/reference/mapping/types/boolean.asciidoc @@ -249,8 +249,16 @@ Synthetic source always sorts `boolean` fields. For example: ---- PUT idx { + "settings": { + "index": { + "mapping": { + "source": { + "mode": "synthetic" + } + } + } + }, "mappings": { - "_source": { "mode": "synthetic" }, "properties": { "bool": { "type": "boolean" } } diff --git a/docs/reference/mapping/types/date.asciidoc b/docs/reference/mapping/types/date.asciidoc index ca2c23f932fc3..53b17a669ae75 100644 --- a/docs/reference/mapping/types/date.asciidoc +++ b/docs/reference/mapping/types/date.asciidoc @@ -130,7 +130,7 @@ The following parameters are accepted by `date` fields: <>:: If `true`, malformed numbers are ignored. If `false` (default), malformed - numbers throw an exception and reject the whole document. Note that this + numbers throw an exception and reject the whole document. Note that this cannot be set if the `script` parameter is used. <>:: @@ -248,8 +248,16 @@ Synthetic source always sorts `date` fields. For example: ---- PUT idx { + "settings": { + "index": { + "mapping": { + "source": { + "mode": "synthetic" + } + } + } + }, "mappings": { - "_source": { "mode": "synthetic" }, "properties": { "date": { "type": "date" } } diff --git a/docs/reference/mapping/types/date_nanos.asciidoc b/docs/reference/mapping/types/date_nanos.asciidoc index 1a3b390b1690c..e9ec85c470ecf 100644 --- a/docs/reference/mapping/types/date_nanos.asciidoc +++ b/docs/reference/mapping/types/date_nanos.asciidoc @@ -160,8 +160,16 @@ Synthetic source always sorts `date_nanos` fields. For example: ---- PUT idx { + "settings": { + "index": { + "mapping": { + "source": { + "mode": "synthetic" + } + } + } + }, "mappings": { - "_source": { "mode": "synthetic" }, "properties": { "date": { "type": "date_nanos" } } diff --git a/docs/reference/mapping/types/flattened.asciidoc b/docs/reference/mapping/types/flattened.asciidoc index 0a72ebc98ecef..af6ef3e739d0f 100644 --- a/docs/reference/mapping/types/flattened.asciidoc +++ b/docs/reference/mapping/types/flattened.asciidoc @@ -334,8 +334,16 @@ For example: ---- PUT idx { + "settings": { + "index": { + "mapping": { + "source": { + "mode": "synthetic" + } + } + } + }, "mappings": { - "_source": { "mode": "synthetic" }, "properties": { "flattened": { "type": "flattened" } } @@ -367,8 +375,16 @@ For example: ---- PUT idx { + "settings": { + "index": { + "mapping": { + "source": { + "mode": "synthetic" + } + } + } + }, "mappings": { - "_source": { "mode": "synthetic" }, "properties": { "flattened": { "type": "flattened" } } @@ -407,8 +423,16 @@ For example: ---- PUT idx { + "settings": { + "index": { + "mapping": { + "source": { + "mode": "synthetic" + } + } + } + }, "mappings": { - "_source": { "mode": "synthetic" }, "properties": { "flattened": { "type": "flattened" } } diff --git a/docs/reference/mapping/types/geo-point.asciidoc b/docs/reference/mapping/types/geo-point.asciidoc index 6db05188dfb98..9ba8ea6e46782 100644 --- a/docs/reference/mapping/types/geo-point.asciidoc +++ b/docs/reference/mapping/types/geo-point.asciidoc @@ -229,8 +229,16 @@ longitude) and reduces them to their stored precision. For example: ---- PUT idx { + "settings": { + "index": { + "mapping": { + "source": { + "mode": "synthetic" + } + } + } + }, "mappings": { - "_source": { "mode": "synthetic" }, "properties": { "point": { "type": "geo_point" } } diff --git a/docs/reference/mapping/types/ip.asciidoc b/docs/reference/mapping/types/ip.asciidoc index f068916478a78..f85dd78ecbd4a 100644 --- a/docs/reference/mapping/types/ip.asciidoc +++ b/docs/reference/mapping/types/ip.asciidoc @@ -170,8 +170,16 @@ Synthetic source always sorts `ip` fields and removes duplicates. For example: ---- PUT idx { + "settings": { + "index": { + "mapping": { + "source": { + "mode": "synthetic" + } + } + } + }, "mappings": { - "_source": { "mode": "synthetic" }, "properties": { "ip": { "type": "ip" } } diff --git a/docs/reference/mapping/types/keyword.asciidoc b/docs/reference/mapping/types/keyword.asciidoc index a4be7026dffcd..b94216042427f 100644 --- a/docs/reference/mapping/types/keyword.asciidoc +++ b/docs/reference/mapping/types/keyword.asciidoc @@ -188,8 +188,16 @@ For example: ---- PUT idx { + "settings": { + "index": { + "mapping": { + "source": { + "mode": "synthetic" + } + } + } + }, "mappings": { - "_source": { "mode": "synthetic" }, "properties": { "kwd": { "type": "keyword" } } @@ -218,8 +226,16 @@ are preserved. For example: ---- PUT idx { + "settings": { + "index": { + "mapping": { + "source": { + "mode": "synthetic" + } + } + } + }, "mappings": { - "_source": { "mode": "synthetic" }, "properties": { "kwd": { "type": "keyword", "store": true } } @@ -248,8 +264,16 @@ For example: ---- PUT idx { + "settings": { + "index": { + "mapping": { + "source": { + "mode": "synthetic" + } + } + } + }, "mappings": { - "_source": { "mode": "synthetic" }, "properties": { "kwd": { "type": "keyword", "ignore_above": 3 } } diff --git a/docs/reference/mapping/types/numeric.asciidoc b/docs/reference/mapping/types/numeric.asciidoc index d1e1c037e571e..5bfa1bc7c1240 100644 --- a/docs/reference/mapping/types/numeric.asciidoc +++ b/docs/reference/mapping/types/numeric.asciidoc @@ -259,8 +259,16 @@ Synthetic source always sorts numeric fields. For example: ---- PUT idx { + "settings": { + "index": { + "mapping": { + "source": { + "mode": "synthetic" + } + } + } + }, "mappings": { - "_source": { "mode": "synthetic" }, "properties": { "long": { "type": "long" } } @@ -287,8 +295,16 @@ Scaled floats will always apply their scaling factor so: ---- PUT idx { + "settings": { + "index": { + "mapping": { + "source": { + "mode": "synthetic" + } + } + } + }, "mappings": { - "_source": { "mode": "synthetic" }, "properties": { "f": { "type": "scaled_float", "scaling_factor": 0.01 } } diff --git a/docs/reference/mapping/types/range.asciidoc b/docs/reference/mapping/types/range.asciidoc index 14c5b6098acbe..04341f68c630a 100644 --- a/docs/reference/mapping/types/range.asciidoc +++ b/docs/reference/mapping/types/range.asciidoc @@ -249,13 +249,21 @@ of official GA features. `range` fields support <> in their default configuration. Synthetic `_source` cannot be used with <> disabled. -Synthetic source always sorts values and removes duplicates for all `range` fields except `ip_range` . Ranges are sorted by their lower bound and then by upper bound. For example: +Synthetic source always sorts values and removes duplicates for all `range` fields except `ip_range`. Ranges are sorted by their lower bound and then by upper bound. For example: [source,console,id=synthetic-source-range-sorting-example] ---- PUT idx { + "settings": { + "index": { + "mapping": { + "source": { + "mode": "synthetic" + } + } + } + }, "mappings": { - "_source": { "mode": "synthetic" }, "properties": { "my_range": { "type": "long_range" } } @@ -316,8 +324,16 @@ For example: ---- PUT idx { + "settings": { + "index": { + "mapping": { + "source": { + "mode": "synthetic" + } + } + } + }, "mappings": { - "_source": { "mode": "synthetic" }, "properties": { "my_range": { "type": "ip_range" } } @@ -352,13 +368,21 @@ Will become: // TEST[s/^/{"_source":/ s/\n$/}/] [[range-synthetic-source-inclusive]] -Range field vales are always represented as inclusive on both sides with bounds adjusted accordingly. Default values for range bounds are represented as `null`. This is true even if range bound was explicitly provided. For example: +Range field values are always represented as inclusive on both sides with bounds adjusted accordingly. Default values for range bounds are represented as `null`. This is true even if range bound was explicitly provided. For example: [source,console,id=synthetic-source-range-normalization-example] ---- PUT idx { + "settings": { + "index": { + "mapping": { + "source": { + "mode": "synthetic" + } + } + } + }, "mappings": { - "_source": { "mode": "synthetic" }, "properties": { "my_range": { "type": "long_range" } } @@ -394,8 +418,16 @@ Default values for range bounds are represented as `null` in synthetic source. T ---- PUT idx { + "settings": { + "index": { + "mapping": { + "source": { + "mode": "synthetic" + } + } + } + }, "mappings": { - "_source": { "mode": "synthetic" }, "properties": { "my_range": { "type": "integer_range" } } @@ -429,8 +461,16 @@ Will become: ---- PUT idx { + "settings": { + "index": { + "mapping": { + "source": { + "mode": "synthetic" + } + } + } + }, "mappings": { - "_source": { "mode": "synthetic" }, "properties": { "my_range": { "type": "date_range" } } diff --git a/docs/reference/mapping/types/text.asciidoc b/docs/reference/mapping/types/text.asciidoc index c33af69df5607..ca69c93e8f1a8 100644 --- a/docs/reference/mapping/types/text.asciidoc +++ b/docs/reference/mapping/types/text.asciidoc @@ -177,15 +177,23 @@ a <> sub-field that supports synthetic `_source` or if the `text` field sets `store` to `true`. Either way, it may not have <>. -If using a sub-`keyword` field then the values are sorted in the same way as +If using a sub-`keyword` field, then the values are sorted in the same way as a `keyword` field's values are sorted. By default, that means sorted with duplicates removed. So: [source,console,id=synthetic-source-text-example-default] ---- PUT idx { + "settings": { + "index": { + "mapping": { + "source": { + "mode": "synthetic" + } + } + } + }, "mappings": { - "_source": { "mode": "synthetic" }, "properties": { "text": { "type": "text", @@ -233,8 +241,16 @@ are preserved. ---- PUT idx { + "settings": { + "index": { + "mapping": { + "source": { + "mode": "synthetic" + } + } + } + }, "mappings": { - "_source": { "mode": "synthetic" }, "properties": { "text": { "type": "text", "store": true } } diff --git a/docs/reference/mapping/types/version.asciidoc b/docs/reference/mapping/types/version.asciidoc index 8da0fcae80fcd..1600451432bd8 100644 --- a/docs/reference/mapping/types/version.asciidoc +++ b/docs/reference/mapping/types/version.asciidoc @@ -63,16 +63,15 @@ The following parameters are accepted by `version` fields: [discrete] ==== Limitations -This field type isn't optimized for heavy wildcard, regex or fuzzy searches. While those -type of queries work in this field, you should consider using a regular `keyword` field if -you strongly rely on these kind of queries. - +This field type isn't optimized for heavy wildcard, regex, or fuzzy searches. While those +types of queries work in this field, you should consider using a regular `keyword` field if +you strongly rely on these kinds of queries. [[version-synthetic-source]] ==== Synthetic `_source` IMPORTANT: Synthetic `_source` is Generally Available only for TSDB indices -(indices that have `index.mode` set to `time_series`). For other indices +(indices that have `index.mode` set to `time_series`). For other indices, synthetic `_source` is in technical preview. Features in technical preview may be changed or removed in a future release. Elastic will work to fix any issues, but features in technical preview are not subject to the support SLA @@ -86,8 +85,16 @@ Synthetic source always sorts `version` fields and removes duplicates. For examp ---- PUT idx { + "settings": { + "index": { + "mapping": { + "source": { + "mode": "synthetic" + } + } + } + }, "mappings": { - "_source": { "mode": "synthetic" }, "properties": { "versions": { "type": "version" } } diff --git a/docs/reference/mapping/types/wildcard.asciidoc b/docs/reference/mapping/types/wildcard.asciidoc index 79fc953051d54..89a3109a37164 100644 --- a/docs/reference/mapping/types/wildcard.asciidoc +++ b/docs/reference/mapping/types/wildcard.asciidoc @@ -141,8 +141,16 @@ Synthetic source always sorts `wildcard` fields. For example: ---- PUT idx { + "settings": { + "index": { + "mapping": { + "source": { + "mode": "synthetic" + } + } + } + }, "mappings": { - "_source": { "mode": "synthetic" }, "properties": { "card": { "type": "wildcard" } } From c76fd004d92be03e120bcf1bcbbe30e05addf717 Mon Sep 17 00:00:00 2001 From: Stef Nestor <26751266+stefnestor@users.noreply.github.com> Date: Wed, 16 Oct 2024 09:29:41 -0600 Subject: [PATCH 156/449] (Doc+) link video for resolving shards too large (#114915) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * (Doc+) link video for resolving shards too large 👋 howdy, team (cc: @anniegale9538 )! Playing forward https://github.com/elastic/elasticsearch/pull/111254, [this video](https://www.youtube.com/watch?v=sHyNYnwbYro) demonstrates an example resolving shards too large via reindex under [this section](https://www.elastic.co/guide/en/elasticsearch/reference/master/size-your-shards.html#shard-size-recommendation) as it's a top support ask. --------- Co-authored-by: Liam Thompson <32779855+leemthompo@users.noreply.github.com> --- docs/reference/how-to/size-your-shards.asciidoc | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/reference/how-to/size-your-shards.asciidoc b/docs/reference/how-to/size-your-shards.asciidoc index 5f67014d5bb4a..19848fb0338fe 100644 --- a/docs/reference/how-to/size-your-shards.asciidoc +++ b/docs/reference/how-to/size-your-shards.asciidoc @@ -208,6 +208,7 @@ index can be <>. You may then consider setting <> against the destination index for the source index's name to point to it for continuity. +See this https://www.youtube.com/watch?v=sHyNYnwbYro[fixing shard sizes video] for an example troubleshooting walkthrough. [discrete] [[shard-count-recommendation]] From bd754f798fd2c5bc55b24706e3c275e241ba13ca Mon Sep 17 00:00:00 2001 From: Stef Nestor <26751266+stefnestor@users.noreply.github.com> Date: Wed, 16 Oct 2024 09:29:57 -0600 Subject: [PATCH 157/449] (Doc+) Cross-link max shards (#114670) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * (Doc+) Cross-link max shards 👋 It appears we have two docs of similar content about max open shards. This one contains the error users search (so is what we linked the error to in https://github.com/elastic/elasticsearch/pull/110993) but the other I believe is a placeholder doc for the health api code. Should maybe consolidate some day but in the mean time at least cross-link. --------- Co-authored-by: Liam Thompson <32779855+leemthompo@users.noreply.github.com> --- docs/reference/how-to/size-your-shards.asciidoc | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/reference/how-to/size-your-shards.asciidoc b/docs/reference/how-to/size-your-shards.asciidoc index 19848fb0338fe..8770ec373bb18 100644 --- a/docs/reference/how-to/size-your-shards.asciidoc +++ b/docs/reference/how-to/size-your-shards.asciidoc @@ -572,6 +572,8 @@ PUT _cluster/settings } ---- +For more information, see <>. + [discrete] [[troubleshooting-max-docs-limit]] ==== Number of documents in the shard cannot exceed [2147483519] From 1b2ffa2651ec813f60045b4e7c2ec42e023aa4e8 Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Wed, 16 Oct 2024 12:05:54 -0400 Subject: [PATCH 158/449] Fix this log level (#114921) @masseyke noticed this in his review of https://github.com/elastic/elasticsearch/pull/114847. I fixed it in the backport to `8.x` via https://github.com/elastic/elasticsearch/pull/114872, but this PR is needed to get the same fix into `main`. --- .../elasticsearch/ingest/geoip/EnterpriseGeoIpDownloader.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/EnterpriseGeoIpDownloader.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/EnterpriseGeoIpDownloader.java index f4ae440d171d3..e04014ff693be 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/EnterpriseGeoIpDownloader.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/EnterpriseGeoIpDownloader.java @@ -617,7 +617,7 @@ public Checksum checksum() throws IOException { } @SuppressWarnings("unchecked") String md5 = ((Map) checksums.get("checksums")).get("md5"); - logger.info("checksum was [{}]", md5); + logger.trace("checksum was [{}]", md5); var matcher = MD5_CHECKSUM_PATTERN.matcher(md5); boolean match = matcher.matches(); From 0c480861700443c93a720a992423aa85cb1d974a Mon Sep 17 00:00:00 2001 From: Tim Brooks Date: Wed, 16 Oct 2024 10:50:42 -0600 Subject: [PATCH 159/449] Reenable incremental bulk tests (#114922) These tests should be fixed and can be unmuted. The associated github issues have already been closed. --- muted-tests.yml | 6 ------ 1 file changed, 6 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index fb48f9e04d5c4..69cef9acc8cb9 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -259,12 +259,6 @@ tests: - class: org.elasticsearch.xpack.esql.expression.function.aggregate.AvgTests method: "testFold {TestCase= #7}" issue: https://github.com/elastic/elasticsearch/issues/114175 -- class: org.elasticsearch.action.bulk.IncrementalBulkIT - method: testMultipleBulkPartsWithBackoff - issue: https://github.com/elastic/elasticsearch/issues/114181 -- class: org.elasticsearch.action.bulk.IncrementalBulkIT - method: testIncrementalBulkLowWatermarkBackOff - issue: https://github.com/elastic/elasticsearch/issues/114182 - class: org.elasticsearch.xpack.ilm.ExplainLifecycleIT method: testStepInfoPreservedOnAutoRetry issue: https://github.com/elastic/elasticsearch/issues/114220 From 4ca8ef54e53e23670458a0aee7a90b274c8a8cdc Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Thu, 17 Oct 2024 04:00:08 +1100 Subject: [PATCH 160/449] Add 8.16 to branches.json --- branches.json | 3 +++ 1 file changed, 3 insertions(+) diff --git a/branches.json b/branches.json index e464d6179f2ba..e81d511a88458 100644 --- a/branches.json +++ b/branches.json @@ -4,6 +4,9 @@ { "branch": "main" }, + { + "branch": "8.16" + }, { "branch": "8.x" }, From 2aec12c17383de5da35664d9160904b668944364 Mon Sep 17 00:00:00 2001 From: Brian Seeders Date: Wed, 16 Oct 2024 12:32:03 -0400 Subject: [PATCH 161/449] Bump 8.x to version 8.17.0 --- .backportrc.json | 4 +-- .buildkite/pipelines/intake.template.yml | 1 + .buildkite/pipelines/intake.yml | 2 +- .buildkite/pipelines/periodic-packaging.yml | 16 ++++++++++ .buildkite/pipelines/periodic.yml | 25 +++++++++++++-- .ci/bwcVersions | 1 + .ci/snapshotBwcVersions | 1 + docs/reference/migration/index.asciidoc | 2 ++ .../reference/migration/migrate_8_17.asciidoc | 20 ++++++++++++ docs/reference/release-notes.asciidoc | 2 ++ docs/reference/release-notes/8.17.0.asciidoc | 8 +++++ .../release-notes/highlights.asciidoc | 31 +++---------------- .../main/java/org/elasticsearch/Version.java | 1 + 13 files changed, 81 insertions(+), 33 deletions(-) create mode 100644 docs/reference/migration/migrate_8_17.asciidoc create mode 100644 docs/reference/release-notes/8.17.0.asciidoc diff --git a/.backportrc.json b/.backportrc.json index d2e92817c026b..03f3f892f9227 100644 --- a/.backportrc.json +++ b/.backportrc.json @@ -1,10 +1,10 @@ { "upstream" : "elastic/elasticsearch", - "targetBranchChoices" : [ "main", "8.x", "8.15", "8.14", "8.13", "8.12", "8.11", "8.10", "8.9", "8.8", "8.7", "8.6", "8.5", "8.4", "8.3", "8.2", "8.1", "8.0", "7.17", "6.8" ], + "targetBranchChoices" : [ "main", "8.x", "8.16", "8.15", "8.14", "8.13", "8.12", "8.11", "8.10", "8.9", "8.8", "8.7", "8.6", "8.5", "8.4", "8.3", "8.2", "8.1", "8.0", "7.17", "6.8" ], "targetPRLabels" : [ "backport" ], "branchLabelMapping" : { "^v9.0.0$" : "main", - "^v8.16.0$" : "8.x", + "^v8.17.0$" : "8.x", "^v(\\d+).(\\d+).\\d+(?:-(?:alpha|beta|rc)\\d+)?$" : "$1.$2" } } diff --git a/.buildkite/pipelines/intake.template.yml b/.buildkite/pipelines/intake.template.yml index f530f237113a9..57412bbe908bc 100644 --- a/.buildkite/pipelines/intake.template.yml +++ b/.buildkite/pipelines/intake.template.yml @@ -75,6 +75,7 @@ steps: - trigger: elasticsearch-dra-workflow label: Trigger DRA snapshot workflow async: true + branches: "main 8.* 7.17" build: branch: "$BUILDKITE_BRANCH" commit: "$BUILDKITE_COMMIT" diff --git a/.buildkite/pipelines/intake.yml b/.buildkite/pipelines/intake.yml index 1bb13c4c10966..1ddb3e82920cd 100644 --- a/.buildkite/pipelines/intake.yml +++ b/.buildkite/pipelines/intake.yml @@ -56,7 +56,7 @@ steps: timeout_in_minutes: 300 matrix: setup: - BWC_VERSION: ["8.15.3", "8.16.0", "9.0.0"] + BWC_VERSION: ["8.15.3", "8.16.0", "8.17.0", "9.0.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.buildkite/pipelines/periodic-packaging.yml b/.buildkite/pipelines/periodic-packaging.yml index b29747c60617e..03368e7e4a9c0 100644 --- a/.buildkite/pipelines/periodic-packaging.yml +++ b/.buildkite/pipelines/periodic-packaging.yml @@ -304,6 +304,22 @@ steps: env: BWC_VERSION: 8.16.0 + - label: "{{matrix.image}} / 8.17.0 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.17.0 + timeout_in_minutes: 300 + matrix: + setup: + image: + - rocky-8 + - ubuntu-2004 + agents: + provider: gcp + image: family/elasticsearch-{{matrix.image}} + machineType: custom-16-32768 + buildDirectory: /dev/shm/bk + env: + BWC_VERSION: 8.17.0 + - label: "{{matrix.image}} / 9.0.0 / packaging-tests-upgrade" command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v9.0.0 timeout_in_minutes: 300 diff --git a/.buildkite/pipelines/periodic.yml b/.buildkite/pipelines/periodic.yml index cbca7f820c7b7..d572dd104d215 100644 --- a/.buildkite/pipelines/periodic.yml +++ b/.buildkite/pipelines/periodic.yml @@ -325,6 +325,25 @@ steps: - signal_reason: agent_stop limit: 3 + - label: 8.17.0 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.17.0#bwcTest + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: n1-standard-32 + buildDirectory: /dev/shm/bk + preemptible: true + env: + BWC_VERSION: 8.17.0 + retry: + automatic: + - exit_status: "-1" + limit: 3 + signal_reason: none + - signal_reason: agent_stop + limit: 3 + - label: 9.0.0 / bwc command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v9.0.0#bwcTest timeout_in_minutes: 300 @@ -410,7 +429,7 @@ steps: setup: ES_RUNTIME_JAVA: - openjdk21 - BWC_VERSION: ["8.15.3", "8.16.0", "9.0.0"] + BWC_VERSION: ["8.15.3", "8.16.0", "8.17.0", "9.0.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 @@ -452,7 +471,7 @@ steps: ES_RUNTIME_JAVA: - openjdk21 - openjdk23 - BWC_VERSION: ["8.15.3", "8.16.0", "9.0.0"] + BWC_VERSION: ["8.15.3", "8.16.0", "8.17.0", "9.0.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 @@ -554,7 +573,7 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: n2-standard-8 buildDirectory: /dev/shm/bk - if: build.branch == "main" || build.branch == "7.17" + if: build.branch == "main" || build.branch == "8.x" || build.branch == "7.17" - label: check-branch-consistency command: .ci/scripts/run-gradle.sh branchConsistency timeout_in_minutes: 15 diff --git a/.ci/bwcVersions b/.ci/bwcVersions index de0505c61a251..cd1f7d1ae269f 100644 --- a/.ci/bwcVersions +++ b/.ci/bwcVersions @@ -16,4 +16,5 @@ BWC_VERSION: - "8.14.3" - "8.15.3" - "8.16.0" + - "8.17.0" - "9.0.0" diff --git a/.ci/snapshotBwcVersions b/.ci/snapshotBwcVersions index 24f58abc72493..67ebf0c51ab1f 100644 --- a/.ci/snapshotBwcVersions +++ b/.ci/snapshotBwcVersions @@ -1,4 +1,5 @@ BWC_VERSION: - "8.15.3" - "8.16.0" + - "8.17.0" - "9.0.0" diff --git a/docs/reference/migration/index.asciidoc b/docs/reference/migration/index.asciidoc index 0690f60495c97..719588cb4b0d0 100644 --- a/docs/reference/migration/index.asciidoc +++ b/docs/reference/migration/index.asciidoc @@ -1,5 +1,6 @@ include::migration_intro.asciidoc[] +* <> * <> * <> * <> @@ -18,6 +19,7 @@ include::migration_intro.asciidoc[] * <> * <> +include::migrate_8_17.asciidoc[] include::migrate_8_16.asciidoc[] include::migrate_8_15.asciidoc[] include::migrate_8_14.asciidoc[] diff --git a/docs/reference/migration/migrate_8_17.asciidoc b/docs/reference/migration/migrate_8_17.asciidoc new file mode 100644 index 0000000000000..15bc6431c60ba --- /dev/null +++ b/docs/reference/migration/migrate_8_17.asciidoc @@ -0,0 +1,20 @@ +[[migrating-8.17]] +== Migrating to 8.17 +++++ +8.17 +++++ + +This section discusses the changes that you need to be aware of when migrating +your application to {es} 8.17. + +See also <> and <>. + +coming::[8.17.0] + + +[discrete] +[[breaking-changes-8.17]] +=== Breaking changes + +There are no breaking changes in {es} 8.17. + diff --git a/docs/reference/release-notes.asciidoc b/docs/reference/release-notes.asciidoc index 6f32b55c49af8..c912b0e62b94d 100644 --- a/docs/reference/release-notes.asciidoc +++ b/docs/reference/release-notes.asciidoc @@ -6,6 +6,7 @@ This section summarizes the changes in each release. +* <> * <> * <> * <> @@ -72,6 +73,7 @@ This section summarizes the changes in each release. -- +include::release-notes/8.17.0.asciidoc[] include::release-notes/8.16.0.asciidoc[] include::release-notes/8.15.1.asciidoc[] include::release-notes/8.15.0.asciidoc[] diff --git a/docs/reference/release-notes/8.17.0.asciidoc b/docs/reference/release-notes/8.17.0.asciidoc new file mode 100644 index 0000000000000..59962fd83e9b7 --- /dev/null +++ b/docs/reference/release-notes/8.17.0.asciidoc @@ -0,0 +1,8 @@ +[[release-notes-8.17.0]] +== {es} version 8.17.0 + +coming[8.17.0] + +Also see <>. + + diff --git a/docs/reference/release-notes/highlights.asciidoc b/docs/reference/release-notes/highlights.asciidoc index 1e0018f590ac0..81d46b5773877 100644 --- a/docs/reference/release-notes/highlights.asciidoc +++ b/docs/reference/release-notes/highlights.asciidoc @@ -11,7 +11,8 @@ For detailed information about this release, see the <> and // Add previous release to the list Other versions: -{ref-bare}/8.15/release-highlights.html[8.15] +{ref-bare}/8.16/release-highlights.html[8.16] +| {ref-bare}/8.15/release-highlights.html[8.15] | {ref-bare}/8.14/release-highlights.html[8.14] | {ref-bare}/8.13/release-highlights.html[8.13] | {ref-bare}/8.12/release-highlights.html[8.12] @@ -30,6 +31,8 @@ Other versions: endif::[] +// The notable-highlights tag marks entries that +// should be featured in the Stack Installation and Upgrade Guide: // tag::notable-highlights[] [discrete] @@ -97,29 +100,3 @@ ZStandard offers ~12% lower storage usage and a ~14% higher indexing throughput // end::notable-highlights[] -[discrete] -[[esql_multi_value_fields_supported_in_geospatial_predicates]] -=== ESQL: Multi-value fields supported in Geospatial predicates -Supporting multi-value fields in `WHERE` predicates is a challenge due to not knowing whether `ALL` or `ANY` -of the values in the field should pass the predicate. -For example, should the field `age:[10,30]` pass the predicate `WHERE age>20` or not? -This ambiguity does not exist with the spatial predicates -`ST_INTERSECTS` and `ST_DISJOINT`, because the choice between `ANY` or `ALL` -is implied by the predicate itself. -Consider a predicate checking a field named `location` against a test geometry named `shape`: - -* `ST_INTERSECTS(field, shape)` - true if `ANY` value can intersect the shape -* `ST_DISJOINT(field, shape)` - true only if `ALL` values are disjoint from the shape - -This works even if the shape argument is itself a complex or compound geometry. - -Similar logic exists for `ST_CONTAINS` and `ST_WITHIN` predicates, but these are not as easily solved -with `ANY` or `ALL`, because a collection of geometries contains another collection if each of the contained -geometries is within at least one of the containing geometries. Evaluating this requires that the multi-value -field is first combined into a single geometry before performing the predicate check. - -* `ST_CONTAINS(field, shape)` - true if the combined geometry contains the shape -* `ST_WITHIN(field, shape)` - true if the combined geometry is within the shape - -{es-pull}112063[#112063] - diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index 4b19d4b428526..48bf08ddfc028 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -187,6 +187,7 @@ public class Version implements VersionId, ToXContentFragment { public static final Version V_8_15_2 = new Version(8_15_02_99); public static final Version V_8_15_3 = new Version(8_15_03_99); public static final Version V_8_16_0 = new Version(8_16_00_99); + public static final Version V_8_17_0 = new Version(8_17_00_99); public static final Version V_9_0_0 = new Version(9_00_00_99); public static final Version CURRENT = V_9_0_0; From 9770ab7ac2da950b916743507abf8f9e73e084c7 Mon Sep 17 00:00:00 2001 From: Stef Nestor <26751266+stefnestor@users.noreply.github.com> Date: Wed, 16 Oct 2024 11:10:59 -0600 Subject: [PATCH 162/449] (Doc+) troubleshoot ILM videos (#114528) This links to our 6 newest [Support Troubleshooting](https://www.youtube.com/playlist?list=PL_mJOmq4zsHbQlfEMEh_30_LuV_hZp-3d) videos which are about resolving general ILM Health & the top five ILM rollover errors to the existing [Troubleshooting ILM errors](https://www.elastic.co/guide/en/elasticsearch/reference/master/index-lifecycle-error-handling.html). It side quests to link the watermark error to [its troubleshooting doc](https://www.elastic.co/guide/en/elasticsearch/reference/master/fix-watermark-errors.html). --- docs/reference/ilm/error-handling.asciidoc | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/docs/reference/ilm/error-handling.asciidoc b/docs/reference/ilm/error-handling.asciidoc index f810afc6c2b5f..e8df44653e9c5 100644 --- a/docs/reference/ilm/error-handling.asciidoc +++ b/docs/reference/ilm/error-handling.asciidoc @@ -8,6 +8,9 @@ When this happens, {ilm-init} moves the index to an `ERROR` step. If {ilm-init} cannot resolve the error automatically, execution is halted until you resolve the underlying issues with the policy, index, or cluster. +See this https://www.youtube.com/watch?v=VCIqkji3IwY[{ilm-init} health video] +for example troubleshooting walkthrough. + For example, you might have a `shrink-index` policy that shrinks an index to four shards once it is at least five days old: @@ -183,6 +186,8 @@ The rollover action then manages setting and updating the alias to Do not explicitly configure this same alias in the aliases section of an index template. +See this https://www.youtube.com/watch?v=Ww5POq4zZtY[resolving `duplicate alias` video] for an example troubleshooting walkthrough. + [discrete] ==== index.lifecycle.rollover_alias [x] does not point to index [y] @@ -191,6 +196,8 @@ Either the index is using the wrong alias or the alias does not exist. Check the `index.lifecycle.rollover_alias` <>. To see what aliases are configured, use <>. +See this https://www.youtube.com/watch?v=NKSe67x7aw8[resolving `not point to index` video] for an example troubleshooting walkthrough. + [discrete] ==== Setting [index.lifecycle.rollover_alias] for index [y] is empty or not defined @@ -198,6 +205,8 @@ The `index.lifecycle.rollover_alias` setting must be configured for the rollover Update the index settings to set `index.lifecycle.rollover_alias`. +See this https://www.youtube.com/watch?v=LRpMC2GS_FQ[resolving `empty or not defined` video] for an example troubleshooting walkthrough. + [discrete] ==== Alias [x] has more than one write index [y,z] @@ -205,6 +214,8 @@ Only one index can be designated as the write index for a particular alias. Use the <> API to set `is_write_index:false` for all but one index. +See this https://www.youtube.com/watch?v=jCUvZCT5Hm4[resolving `more than one write index` video] for an example troubleshooting walkthrough. + [discrete] ==== index name [x] does not match pattern ^.*-\d+ @@ -214,6 +225,8 @@ For example, `my-index` does not match the pattern requirement. Append a numeric value to the index name, for example `my-index-000001`. +See this https://www.youtube.com/watch?v=9sp1zF6iL00[resolving `does not match pattern` video] for an example troubleshooting walkthrough. + [discrete] ==== CircuitBreakingException: [x] data too large, data for [y] @@ -227,8 +240,7 @@ For more information, see <>. This indicates that the cluster is running out of disk space. This can happen when you don't have {ilm} set up to roll over from hot to warm nodes. - -Consider adding nodes, upgrading your hardware, or deleting unneeded indices. +For more information, see <>. [discrete] ==== security_exception: action [] is unauthorized for user [] with roles [], this action is granted by the index privileges [manage_follow_index,manage,all] From f99927e2d42cb4af5b03bd969357dc118df158c0 Mon Sep 17 00:00:00 2001 From: Stanislav Malyshev Date: Wed, 16 Oct 2024 11:49:08 -0600 Subject: [PATCH 163/449] Make ESQL EnrichPolicyResolver try to do proper connection before sending requests (#114870) * Make ESQL EnrichPolicyResolver try to do proper connection before sending requests * Make encureConnected be !skipUnavailable --- .../esql/enrich/EnrichPolicyResolver.java | 45 ++++++++++--------- .../enrich/EnrichPolicyResolverTests.java | 4 +- 2 files changed, 27 insertions(+), 22 deletions(-) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichPolicyResolver.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichPolicyResolver.java index 447df09942ca8..e67c406e26929 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichPolicyResolver.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichPolicyResolver.java @@ -25,6 +25,7 @@ import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.RemoteClusterAware; +import org.elasticsearch.transport.RemoteClusterService; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportRequest; @@ -72,12 +73,14 @@ public class EnrichPolicyResolver { private final IndexResolver indexResolver; private final TransportService transportService; private final ThreadPool threadPool; + private final RemoteClusterService remoteClusterService; public EnrichPolicyResolver(ClusterService clusterService, TransportService transportService, IndexResolver indexResolver) { this.clusterService = clusterService; this.transportService = transportService; this.indexResolver = indexResolver; this.threadPool = transportService.getThreadPool(); + this.remoteClusterService = transportService.getRemoteClusterService(); transportService.registerRequestHandler( RESOLVE_ACTION_NAME, threadPool.executor(ThreadPool.Names.SEARCH), @@ -257,22 +260,21 @@ private void lookupPolicies( // remote clusters if (remotePolicies.isEmpty() == false) { for (String cluster : remoteClusters) { - final Transport.Connection connection; - try { - connection = getRemoteConnection(cluster); - } catch (Exception e) { - refs.acquire().onFailure(e); - return; - } - transportService.sendRequest( - connection, - RESOLVE_ACTION_NAME, - new LookupRequest(cluster, remotePolicies), - TransportRequestOptions.EMPTY, - new ActionListenerResponseHandler<>( - refs.acquire(resp -> lookupResponses.put(cluster, resp)), - LookupResponse::new, - threadPool.executor(ThreadPool.Names.SEARCH) + ActionListener lookupListener = refs.acquire(resp -> lookupResponses.put(cluster, resp)); + getRemoteConnection( + cluster, + lookupListener.delegateFailureAndWrap( + (delegate, connection) -> transportService.sendRequest( + connection, + RESOLVE_ACTION_NAME, + new LookupRequest(cluster, remotePolicies), + TransportRequestOptions.EMPTY, + new ActionListenerResponseHandler<>( + delegate, + LookupResponse::new, + threadPool.executor(ThreadPool.Names.SEARCH) + ) + ) ) ); } @@ -389,13 +391,16 @@ protected Map availablePolicies() { return metadata == null ? Map.of() : metadata.getPolicies(); } - protected Transport.Connection getRemoteConnection(String cluster) { - return transportService.getRemoteClusterService().getConnection(cluster); + protected void getRemoteConnection(String cluster, ActionListener listener) { + remoteClusterService.maybeEnsureConnectedAndGetConnection( + cluster, + remoteClusterService.isSkipUnavailable(cluster) == false, + listener + ); } public Map> groupIndicesPerCluster(String[] indices) { - return transportService.getRemoteClusterService() - .groupIndices(SearchRequest.DEFAULT_INDICES_OPTIONS, indices) + return remoteClusterService.groupIndices(SearchRequest.DEFAULT_INDICES_OPTIONS, indices) .entrySet() .stream() .collect(Collectors.toMap(Map.Entry::getKey, e -> Arrays.asList(e.getValue().indices()))); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichPolicyResolverTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichPolicyResolverTests.java index 05a7486a18068..39170f1a305df 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichPolicyResolverTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichPolicyResolverTests.java @@ -446,9 +446,9 @@ EnrichResolution resolvePolicies(Collection clusters, Collection listener) { assertThat("Must only called on the local cluster", cluster, equalTo(LOCAL_CLUSTER_GROUP_KEY)); - return transports.get("").getConnection(transports.get(remoteCluster).getLocalNode()); + listener.onResponse(transports.get("").getConnection(transports.get(remoteCluster).getLocalNode())); } static ClusterService mockClusterService(Map policies) { From 8b8796908ac0c7a73566adb4647476b66656119c Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Wed, 16 Oct 2024 19:51:18 +0200 Subject: [PATCH 164/449] Enhance empty queue conditional in slicing logic (#114911) With recent changes in Lucene 9.12 around not forking execution when not necessary (see https://github.com/apache/lucene/pull/13472), we have removed the search worker thread pool in #111099. The worker thread pool had unlimited queue, and we feared that we couuld have much more queueing on the search thread pool if we execute segment level searches on the same thread pool as the shard level searches, because every shard search would take up to a thread per slice when executing the query phase. We have then introduced an additional conditional to stop parallelizing when there is a queue. That is perhaps a bit extreme, as it's a decision made when creating the searcher, while a queue may no longer be there once the search is executing. This has caused some benchmarks regressions, given that having a queue may be a transient scenario, especially with short-lived segment searches being queued up. We may end up disabling inter-segment concurrency more aggressively than we would want, penalizing requests that do benefit from concurrency. At the same time, we do want to have some kind of protection against rejections of shard searches that would be caused by excessive slicing. When the queue is above a certain size, we can turn off the slicing and effectively disable inter-segment concurrency. With this commit we set that threshold to be the number of threads in the search pool. --- .../search/DefaultSearchContext.java | 2 +- .../search/DefaultSearchContextTests.java | 209 ++++++++++++------ 2 files changed, 148 insertions(+), 63 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java b/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java index 1521b17a81766..8ac35f7c40caa 100644 --- a/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java +++ b/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java @@ -291,7 +291,7 @@ static int determineMaximumNumberOfSlices( ToLongFunction fieldCardinality ) { return executor instanceof ThreadPoolExecutor tpe - && tpe.getQueue().isEmpty() + && tpe.getQueue().size() <= tpe.getMaximumPoolSize() && isParallelCollectionSupportedForResults(resultsType, request.source(), fieldCardinality, enableQueryPhaseParallelCollection) ? tpe.getMaximumPoolSize() : 1; diff --git a/server/src/test/java/org/elasticsearch/search/DefaultSearchContextTests.java b/server/src/test/java/org/elasticsearch/search/DefaultSearchContextTests.java index 0e4945e8bb8d1..a474c1dc38c50 100644 --- a/server/src/test/java/org/elasticsearch/search/DefaultSearchContextTests.java +++ b/server/src/test/java/org/elasticsearch/search/DefaultSearchContextTests.java @@ -81,6 +81,7 @@ import java.util.UUID; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; +import java.util.concurrent.ThreadPoolExecutor; import java.util.function.Function; import java.util.function.Supplier; import java.util.function.ToLongFunction; @@ -507,10 +508,10 @@ public void testNewIdLoaderWithTsdbAndRoutingPathMatch() throws Exception { } } - public void testDetermineMaximumNumberOfSlices() { + private static ShardSearchRequest createParallelRequest() { IndexShard indexShard = mock(IndexShard.class); when(indexShard.shardId()).thenReturn(new ShardId("index", "uuid", 0)); - ShardSearchRequest parallelReq = new ShardSearchRequest( + return new ShardSearchRequest( OriginalIndices.NONE, new SearchRequest().allowPartialSearchResults(randomBoolean()), indexShard.shardId(), @@ -521,69 +522,74 @@ public void testDetermineMaximumNumberOfSlices() { System.currentTimeMillis(), null ); - ShardSearchRequest singleSliceReq = new ShardSearchRequest( - OriginalIndices.NONE, - new SearchRequest().allowPartialSearchResults(randomBoolean()) - .source(new SearchSourceBuilder().sort(SortBuilders.fieldSort(FieldSortBuilder.DOC_FIELD_NAME))), - indexShard.shardId(), - 0, - 1, - AliasFilter.EMPTY, - 1f, - System.currentTimeMillis(), - null - ); - int executorPoolSize = randomIntBetween(1, 100); - ExecutorService threadPoolExecutor = EsExecutors.newFixed( - "test", - executorPoolSize, - 0, - Thread::new, - new ThreadContext(Settings.EMPTY), - EsExecutors.TaskTrackingConfig.DO_NOT_TRACK - ); - ExecutorService notThreadPoolExecutor = Executors.newWorkStealingPool(); - ToLongFunction fieldCardinality = name -> -1; + } + + public void testDetermineMaximumNumberOfSlicesNoExecutor() { + ToLongFunction fieldCardinality = name -> { throw new UnsupportedOperationException(); }; assertEquals( - executorPoolSize, + 1, DefaultSearchContext.determineMaximumNumberOfSlices( - threadPoolExecutor, - parallelReq, + null, + createParallelRequest(), SearchService.ResultsType.DFS, - true, + randomBoolean(), fieldCardinality ) ); assertEquals( - executorPoolSize, + 1, DefaultSearchContext.determineMaximumNumberOfSlices( - threadPoolExecutor, - singleSliceReq, - SearchService.ResultsType.DFS, - true, + null, + createParallelRequest(), + SearchService.ResultsType.QUERY, + randomBoolean(), fieldCardinality ) ); + } + + public void testDetermineMaximumNumberOfSlicesNotThreadPoolExecutor() { + ExecutorService notThreadPoolExecutor = Executors.newWorkStealingPool(); + ToLongFunction fieldCardinality = name -> { throw new UnsupportedOperationException(); }; assertEquals( 1, - DefaultSearchContext.determineMaximumNumberOfSlices(null, parallelReq, SearchService.ResultsType.DFS, true, fieldCardinality) + DefaultSearchContext.determineMaximumNumberOfSlices( + notThreadPoolExecutor, + createParallelRequest(), + SearchService.ResultsType.DFS, + randomBoolean(), + fieldCardinality + ) ); assertEquals( - executorPoolSize, + 1, DefaultSearchContext.determineMaximumNumberOfSlices( - threadPoolExecutor, - parallelReq, + notThreadPoolExecutor, + createParallelRequest(), SearchService.ResultsType.QUERY, - true, + randomBoolean(), fieldCardinality ) ); + } + + public void testDetermineMaximumNumberOfSlicesEnableQueryPhaseParallelCollection() { + int executorPoolSize = randomIntBetween(1, 100); + ThreadPoolExecutor threadPoolExecutor = EsExecutors.newFixed( + "test", + executorPoolSize, + 0, + Thread::new, + new ThreadContext(Settings.EMPTY), + EsExecutors.TaskTrackingConfig.DO_NOT_TRACK + ); + ToLongFunction fieldCardinality = name -> -1; assertEquals( - 1, + executorPoolSize, DefaultSearchContext.determineMaximumNumberOfSlices( threadPoolExecutor, - singleSliceReq, + createParallelRequest(), SearchService.ResultsType.QUERY, true, fieldCardinality @@ -592,54 +598,133 @@ public void testDetermineMaximumNumberOfSlices() { assertEquals( 1, DefaultSearchContext.determineMaximumNumberOfSlices( - notThreadPoolExecutor, - parallelReq, - SearchService.ResultsType.DFS, - true, + threadPoolExecutor, + createParallelRequest(), + SearchService.ResultsType.QUERY, + false, fieldCardinality ) ); - assertEquals( executorPoolSize, DefaultSearchContext.determineMaximumNumberOfSlices( threadPoolExecutor, - parallelReq, + createParallelRequest(), SearchService.ResultsType.DFS, - false, + randomBoolean(), fieldCardinality ) ); - assertEquals( + } + + public void testDetermineMaximumNumberOfSlicesSingleSortByField() { + IndexShard indexShard = mock(IndexShard.class); + when(indexShard.shardId()).thenReturn(new ShardId("index", "uuid", 0)); + ShardSearchRequest singleSliceReq = new ShardSearchRequest( + OriginalIndices.NONE, + new SearchRequest().allowPartialSearchResults(randomBoolean()) + .source(new SearchSourceBuilder().sort(SortBuilders.fieldSort(FieldSortBuilder.DOC_FIELD_NAME))), + indexShard.shardId(), + 0, 1, - DefaultSearchContext.determineMaximumNumberOfSlices(null, parallelReq, SearchService.ResultsType.DFS, false, fieldCardinality) + AliasFilter.EMPTY, + 1f, + System.currentTimeMillis(), + null ); + ToLongFunction fieldCardinality = name -> { throw new UnsupportedOperationException(); }; + int executorPoolSize = randomIntBetween(1, 100); + ThreadPoolExecutor threadPoolExecutor = EsExecutors.newFixed( + "test", + executorPoolSize, + 0, + Thread::new, + new ThreadContext(Settings.EMPTY), + EsExecutors.TaskTrackingConfig.DO_NOT_TRACK + ); + // DFS concurrency does not rely on slices, hence it kicks in regardless of the request (supportsParallelCollection is not called) assertEquals( - 1, + executorPoolSize, DefaultSearchContext.determineMaximumNumberOfSlices( threadPoolExecutor, - parallelReq, - SearchService.ResultsType.QUERY, - false, + singleSliceReq, + SearchService.ResultsType.DFS, + true, fieldCardinality ) ); - assertEquals( - 1, - DefaultSearchContext.determineMaximumNumberOfSlices(null, parallelReq, SearchService.ResultsType.QUERY, false, fieldCardinality) - ); assertEquals( 1, DefaultSearchContext.determineMaximumNumberOfSlices( - notThreadPoolExecutor, - parallelReq, - SearchService.ResultsType.DFS, - false, + threadPoolExecutor, + singleSliceReq, + SearchService.ResultsType.QUERY, + true, fieldCardinality ) ); } + public void testDetermineMaximumNumberOfSlicesWithQueue() { + int executorPoolSize = randomIntBetween(1, 100); + ThreadPoolExecutor threadPoolExecutor = EsExecutors.newFixed( + "test", + executorPoolSize, + 1000, + Thread::new, + new ThreadContext(Settings.EMPTY), + EsExecutors.TaskTrackingConfig.DO_NOT_TRACK + ); + ToLongFunction fieldCardinality = name -> { throw new UnsupportedOperationException(); }; + + for (int i = 0; i < executorPoolSize; i++) { + assertTrue(threadPoolExecutor.getQueue().offer(() -> {})); + assertEquals( + executorPoolSize, + DefaultSearchContext.determineMaximumNumberOfSlices( + threadPoolExecutor, + createParallelRequest(), + SearchService.ResultsType.DFS, + true, + fieldCardinality + ) + ); + assertEquals( + executorPoolSize, + DefaultSearchContext.determineMaximumNumberOfSlices( + threadPoolExecutor, + createParallelRequest(), + SearchService.ResultsType.QUERY, + true, + fieldCardinality + ) + ); + } + for (int i = 0; i < 100; i++) { + assertTrue(threadPoolExecutor.getQueue().offer(() -> {})); + assertEquals( + 1, + DefaultSearchContext.determineMaximumNumberOfSlices( + threadPoolExecutor, + createParallelRequest(), + SearchService.ResultsType.DFS, + true, + fieldCardinality + ) + ); + assertEquals( + 1, + DefaultSearchContext.determineMaximumNumberOfSlices( + threadPoolExecutor, + createParallelRequest(), + SearchService.ResultsType.QUERY, + true, + fieldCardinality + ) + ); + } + } + public void testIsParallelCollectionSupportedForResults() { SearchSourceBuilder searchSourceBuilderOrNull = randomBoolean() ? null : new SearchSourceBuilder(); ToLongFunction fieldCardinality = name -> -1; From 33ea3116c9295012ceddbdddb5310918973a4753 Mon Sep 17 00:00:00 2001 From: Keith Massey Date: Wed, 16 Oct 2024 13:06:06 -0500 Subject: [PATCH 165/449] Reducing error-level stack trace logging for normal events in GeoIpDownloader (#114924) --- docs/changelog/114924.yaml | 5 +++ .../ingest/geoip/GeoIpDownloader.java | 12 +++++-- .../ingest/geoip/GeoIpDownloaderTests.java | 34 ++++++------------- 3 files changed, 25 insertions(+), 26 deletions(-) create mode 100644 docs/changelog/114924.yaml diff --git a/docs/changelog/114924.yaml b/docs/changelog/114924.yaml new file mode 100644 index 0000000000000..536f446ef790d --- /dev/null +++ b/docs/changelog/114924.yaml @@ -0,0 +1,5 @@ +pr: 114924 +summary: Reducing error-level stack trace logging for normal events in `GeoIpDownloader` +area: Ingest Node +type: bug +issues: [] diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloader.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloader.java index dcaa8f6f2fb03..ae562d3c7359a 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloader.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloader.java @@ -11,7 +11,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.flush.FlushRequest; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; @@ -139,11 +138,18 @@ void updateDatabases() throws IOException { if (geoipIndex != null) { logger.trace("The {} index is not null", GeoIpDownloader.DATABASES_INDEX); if (clusterState.getRoutingTable().index(geoipIndex.getWriteIndex()).allPrimaryShardsActive() == false) { - throw new ElasticsearchException("not all primary shards of [" + DATABASES_INDEX + "] index are active"); + logger.debug( + "Not updating geoip database because not all primary shards of the [" + DATABASES_INDEX + "] index are active." + ); + return; } var blockException = clusterState.blocks().indexBlockedException(ClusterBlockLevel.WRITE, geoipIndex.getWriteIndex().getName()); if (blockException != null) { - throw blockException; + logger.debug( + "Not updating geoip database because there is a write block on the " + geoipIndex.getWriteIndex().getName() + " index", + blockException + ); + return; } } if (eagerDownloadSupplier.get() || atLeastOneGeoipProcessorSupplier.get()) { diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTests.java index e73f0a36cc632..5698328792787 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTests.java @@ -9,7 +9,6 @@ package org.elasticsearch.ingest.geoip; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; @@ -25,11 +24,9 @@ import org.elasticsearch.action.index.TransportIndexAction; import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.ReferenceDocs; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.reindex.BulkByScrollResponse; @@ -583,37 +580,28 @@ void processDatabase(Map databaseInfo) { assertFalse(it.hasNext()); } - public void testUpdateDatabasesWriteBlock() { + public void testUpdateDatabasesWriteBlock() throws IOException { + /* + * Here we make sure that we bail out before making an httpClient request if there is write block on the .geoip_databases index + */ ClusterState state = createClusterState(new PersistentTasksCustomMetadata(1L, Map.of())); var geoIpIndex = state.getMetadata().getIndicesLookup().get(GeoIpDownloader.DATABASES_INDEX).getWriteIndex().getName(); state = ClusterState.builder(state) .blocks(new ClusterBlocks.Builder().addIndexBlock(geoIpIndex, IndexMetadata.INDEX_READ_ONLY_ALLOW_DELETE_BLOCK)) .build(); when(clusterService.state()).thenReturn(state); - var e = expectThrows(ClusterBlockException.class, () -> geoIpDownloader.updateDatabases()); - assertThat( - e.getMessage(), - equalTo( - "index [" - + geoIpIndex - + "] blocked by: [TOO_MANY_REQUESTS/12/disk usage exceeded flood-stage watermark, " - + "index has read-only-allow-delete block; for more information, see " - + ReferenceDocs.FLOOD_STAGE_WATERMARK - + "];" - ) - ); + geoIpDownloader.updateDatabases(); verifyNoInteractions(httpClient); } - public void testUpdateDatabasesIndexNotReady() { + public void testUpdateDatabasesIndexNotReady() throws IOException { + /* + * Here we make sure that we bail out before making an httpClient request if there are unallocated shards on the .geoip_databases + * index + */ ClusterState state = createClusterState(new PersistentTasksCustomMetadata(1L, Map.of()), true); - var geoIpIndex = state.getMetadata().getIndicesLookup().get(GeoIpDownloader.DATABASES_INDEX).getWriteIndex().getName(); - state = ClusterState.builder(state) - .blocks(new ClusterBlocks.Builder().addIndexBlock(geoIpIndex, IndexMetadata.INDEX_READ_ONLY_ALLOW_DELETE_BLOCK)) - .build(); when(clusterService.state()).thenReturn(state); - var e = expectThrows(ElasticsearchException.class, () -> geoIpDownloader.updateDatabases()); - assertThat(e.getMessage(), equalTo("not all primary shards of [.geoip_databases] index are active")); + geoIpDownloader.updateDatabases(); verifyNoInteractions(httpClient); } From e14418489685689d91c42c3477063af122a17b45 Mon Sep 17 00:00:00 2001 From: Tim Brooks Date: Wed, 16 Oct 2024 12:18:35 -0600 Subject: [PATCH 166/449] Standardize error code when bulk body is invalid (#114869) Currently the incremental and non-incremental bulk variations will return different error codes when the json body provided is invalid. This commit ensures both version return status code 400. Additionally, this renames the incremental rest tests to bulk tests and ensures that all tests work with both bulk api versions. We set these tests to randomize which version of the api we test each run. --- docs/changelog/114869.yaml | 5 +++ ...ementalBulkRestIT.java => BulkRestIT.java} | 42 ++++++++++++++---- .../rest/action/document/RestBulkAction.java | 43 ++++++++++++------- 3 files changed, 65 insertions(+), 25 deletions(-) create mode 100644 docs/changelog/114869.yaml rename qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/{IncrementalBulkRestIT.java => BulkRestIT.java} (81%) diff --git a/docs/changelog/114869.yaml b/docs/changelog/114869.yaml new file mode 100644 index 0000000000000..755418e7ce4d9 --- /dev/null +++ b/docs/changelog/114869.yaml @@ -0,0 +1,5 @@ +pr: 114869 +summary: Standardize error code when bulk body is invalid +area: CRUD +type: bug +issues: [] diff --git a/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/IncrementalBulkRestIT.java b/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/BulkRestIT.java similarity index 81% rename from qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/IncrementalBulkRestIT.java rename to qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/BulkRestIT.java index da05011696274..369d0824bdb28 100644 --- a/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/IncrementalBulkRestIT.java +++ b/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/BulkRestIT.java @@ -9,6 +9,8 @@ package org.elasticsearch.http; +import org.apache.http.entity.ByteArrayEntity; +import org.apache.http.entity.ContentType; import org.elasticsearch.action.bulk.IncrementalBulkService; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; @@ -19,24 +21,30 @@ import org.elasticsearch.xcontent.json.JsonXContent; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.util.List; import java.util.Map; +import static org.elasticsearch.rest.RestStatus.BAD_REQUEST; import static org.elasticsearch.rest.RestStatus.OK; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.Matchers.equalTo; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.SUITE, supportsDedicatedMasters = false, numDataNodes = 2, numClientNodes = 0) -public class IncrementalBulkRestIT extends HttpSmokeTestCase { +public class BulkRestIT extends HttpSmokeTestCase { @Override protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { return Settings.builder() .put(super.nodeSettings(nodeOrdinal, otherSettings)) - .put(IncrementalBulkService.INCREMENTAL_BULK.getKey(), true) + .put(IncrementalBulkService.INCREMENTAL_BULK.getKey(), seventyFivePercentOfTheTime()) .build(); } + private static boolean seventyFivePercentOfTheTime() { + return (randomBoolean() && randomBoolean()) == false; + } + public void testBulkUriMatchingDoesNotMatchBulkCapabilitiesApi() throws IOException { Request request = new Request("GET", "/_capabilities?method=GET&path=%2F_bulk&capabilities=failure_store_status&pretty"); Response response = getRestClient().performRequest(request); @@ -51,6 +59,26 @@ public void testBulkMissingBody() throws IOException { assertThat(responseException.getMessage(), containsString("request body is required")); } + public void testBulkInvalidIndexNameString() throws IOException { + Request request = new Request("POST", "/_bulk"); + + byte[] bytes1 = "{\"create\":{\"_index\":\"".getBytes(StandardCharsets.UTF_8); + byte[] bytes2 = new byte[] { (byte) 0xfe, (byte) 0xfe, (byte) 0xff, (byte) 0xff }; + byte[] bytes3 = "\",\"_id\":\"1\"}}\n{\"field\":1}\n\r\n".getBytes(StandardCharsets.UTF_8); + byte[] bulkBody = new byte[bytes1.length + bytes2.length + bytes3.length]; + System.arraycopy(bytes1, 0, bulkBody, 0, bytes1.length); + System.arraycopy(bytes2, 0, bulkBody, bytes1.length, bytes2.length); + System.arraycopy(bytes3, 0, bulkBody, bytes1.length + bytes2.length, bytes3.length); + + request.setEntity(new ByteArrayEntity(bulkBody, ContentType.APPLICATION_JSON)); + + ResponseException responseException = expectThrows(ResponseException.class, () -> getRestClient().performRequest(request)); + assertThat(responseException.getResponse().getStatusLine().getStatusCode(), equalTo(BAD_REQUEST.getStatus())); + assertThat(responseException.getMessage(), containsString("could not parse bulk request body")); + assertThat(responseException.getMessage(), containsString("json_parse_exception")); + assertThat(responseException.getMessage(), containsString("Invalid UTF-8")); + } + public void testBulkRequestBodyImproperlyTerminated() throws IOException { Request request = new Request(randomBoolean() ? "POST" : "PUT", "/_bulk"); // missing final line of the bulk body. cannot process @@ -61,10 +89,10 @@ public void testBulkRequestBodyImproperlyTerminated() throws IOException { ); ResponseException responseException = expectThrows(ResponseException.class, () -> getRestClient().performRequest(request)); assertEquals(400, responseException.getResponse().getStatusLine().getStatusCode()); - assertThat(responseException.getMessage(), containsString("could not parse bulk request body")); + assertThat(responseException.getMessage(), containsString("The bulk request must be terminated by a newline")); } - public void testIncrementalBulk() throws IOException { + public void testBulkRequest() throws IOException { Request createRequest = new Request("PUT", "/index_name"); createRequest.setJsonEntity(""" { @@ -81,7 +109,6 @@ public void testIncrementalBulk() throws IOException { Request firstBulkRequest = new Request("POST", "/index_name/_bulk"); - // index documents for the rollup job String bulkBody = "{\"index\":{\"_index\":\"index_name\",\"_id\":\"1\"}}\n" + "{\"field\":1}\n" + "{\"index\":{\"_index\":\"index_name\",\"_id\":\"2\"}}\n" @@ -113,7 +140,6 @@ public void testBulkWithIncrementalDisabled() throws IOException { Request firstBulkRequest = new Request("POST", "/index_name/_bulk"); - // index documents for the rollup job String bulkBody = "{\"index\":{\"_index\":\"index_name\",\"_id\":\"1\"}}\n" + "{\"field\":1}\n" + "{\"index\":{\"_index\":\"index_name\",\"_id\":\"2\"}}\n" @@ -137,7 +163,7 @@ public void testBulkWithIncrementalDisabled() throws IOException { } } - public void testIncrementalMalformed() throws IOException { + public void testMalformedActionLineBulk() throws IOException { Request createRequest = new Request("PUT", "/index_name"); createRequest.setJsonEntity(""" { @@ -154,7 +180,6 @@ public void testIncrementalMalformed() throws IOException { Request bulkRequest = new Request("POST", "/index_name/_bulk"); - // index documents for the rollup job final StringBuilder bulk = new StringBuilder(); bulk.append("{\"index\":{\"_index\":\"index_name\"}}\n"); bulk.append("{\"field\":1}\n"); @@ -170,7 +195,6 @@ public void testIncrementalMalformed() throws IOException { private static void sendLargeBulk() throws IOException { Request bulkRequest = new Request("POST", "/index_name/_bulk"); - // index documents for the rollup job final StringBuilder bulk = new StringBuilder(); bulk.append("{\"delete\":{\"_index\":\"index_name\",\"_id\":\"1\"}}\n"); int updates = 0; diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java index 03694c7442d4d..1e80e6de60d65 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java @@ -104,19 +104,23 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC boolean defaultRequireDataStream = request.paramAsBoolean(DocWriteRequest.REQUIRE_DATA_STREAM, false); bulkRequest.timeout(request.paramAsTime("timeout", BulkShardRequest.DEFAULT_TIMEOUT)); bulkRequest.setRefreshPolicy(request.param("refresh")); - bulkRequest.add( - request.requiredContent(), - defaultIndex, - defaultRouting, - defaultFetchSourceContext, - defaultPipeline, - defaultRequireAlias, - defaultRequireDataStream, - defaultListExecutedPipelines, - allowExplicitIndex, - request.getXContentType(), - request.getRestApiVersion() - ); + try { + bulkRequest.add( + request.requiredContent(), + defaultIndex, + defaultRouting, + defaultFetchSourceContext, + defaultPipeline, + defaultRequireAlias, + defaultRequireDataStream, + defaultListExecutedPipelines, + allowExplicitIndex, + request.getXContentType(), + request.getRestApiVersion() + ); + } catch (Exception e) { + return channel -> new RestToXContentListener<>(channel).onFailure(parseFailureException(e)); + } return channel -> client.bulk(bulkRequest, new RestRefCountedChunkedToXContentListener<>(channel)); } else { @@ -127,6 +131,15 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC } } + private static Exception parseFailureException(Exception e) { + if (e instanceof IllegalArgumentException) { + return e; + } else { + // TODO: Maybe improve in follow-up to be XContentParseException and include line number and column + return new ElasticsearchParseException("could not parse bulk request body", e); + } + } + static class ChunkHandler implements BaseRestHandler.RequestBodyChunkConsumer { private final boolean allowExplicitIndex; @@ -219,9 +232,7 @@ public void handleChunk(RestChannel channel, ReleasableBytesReference chunk, boo } catch (Exception e) { shortCircuit(); - new RestToXContentListener<>(channel).onFailure( - new ElasticsearchParseException("could not parse bulk request body", e) - ); + new RestToXContentListener<>(channel).onFailure(parseFailureException(e)); return; } } From e79127ba2a0b47f8681bf6e41514ad3af2cc793c Mon Sep 17 00:00:00 2001 From: Panagiotis Bailis Date: Wed, 16 Oct 2024 21:56:13 +0300 Subject: [PATCH 167/449] Adding deprecation warnings for rank and sub_searches (#114854) --- docs/changelog/114854.yaml | 10 +++ ...ulkByScrollParallelizationHelperTests.java | 4 +- .../search/builder/SearchSourceBuilder.java | 6 +- .../search/AbstractSearchTestCase.java | 1 - .../vectors/KnnSearchRequestParserTests.java | 1 - .../search/RandomSearchRequestGenerator.java | 17 ------ .../xpack/rank/rrf/RRFRankBuilder.java | 7 ++- .../xpack/rank/rrf/RRFRankDoc.java | 2 +- .../xpack/rank/rrf/RRFRetrieverBuilder.java | 7 ++- .../rrf/RRFRetrieverBuilderParsingTests.java | 2 +- .../rest-api-spec/test/rrf/100_rank_rrf.yml | 25 ++++++++ .../test/rrf/150_rank_rrf_pagination.yml | 61 +++++++++++++++++++ .../test/rrf/200_rank_rrf_script.yml | 21 +++++++ .../test/rrf/550_rrf_sub_searches_explain.yml | 38 ++++++++++++ .../test/rrf/600_rrf_retriever_profile.yml | 7 +++ 15 files changed, 178 insertions(+), 31 deletions(-) create mode 100644 docs/changelog/114854.yaml diff --git a/docs/changelog/114854.yaml b/docs/changelog/114854.yaml new file mode 100644 index 0000000000000..144a10ba85043 --- /dev/null +++ b/docs/changelog/114854.yaml @@ -0,0 +1,10 @@ +pr: 114854 +summary: Adding deprecation warnings for rrf using rank and `sub_searches` +area: Search +type: deprecation +issues: [] +deprecation: + title: Adding deprecation warnings for rrf using rank and `sub_searches` + area: REST API + details: Search API parameter `sub_searches` will no longer be a supported and will be removed in future releases. Similarly, `rrf` can only be used through the specified `retriever` and no longer though the `rank` parameter + impact: Requests specifying rrf through `rank` and/or `sub_searches` elements will be disallowed in a future version. Users should instead utilize the new `retriever` parameter. diff --git a/modules/reindex/src/test/java/org/elasticsearch/reindex/BulkByScrollParallelizationHelperTests.java b/modules/reindex/src/test/java/org/elasticsearch/reindex/BulkByScrollParallelizationHelperTests.java index a6e28477f8582..ebb4471566fbd 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/reindex/BulkByScrollParallelizationHelperTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/reindex/BulkByScrollParallelizationHelperTests.java @@ -15,8 +15,8 @@ import org.elasticsearch.test.ESTestCase; import java.io.IOException; +import java.util.Collections; -import static java.util.Collections.emptyList; import static org.elasticsearch.reindex.BulkByScrollParallelizationHelper.sliceIntoSubRequests; import static org.elasticsearch.search.RandomSearchRequestGenerator.randomSearchRequest; import static org.elasticsearch.search.RandomSearchRequestGenerator.randomSearchSourceBuilder; @@ -24,7 +24,7 @@ public class BulkByScrollParallelizationHelperTests extends ESTestCase { public void testSliceIntoSubRequests() throws IOException { SearchRequest searchRequest = randomSearchRequest( - () -> randomSearchSourceBuilder(() -> null, () -> null, () -> null, () -> null, () -> emptyList(), () -> null, () -> null) + () -> randomSearchSourceBuilder(() -> null, () -> null, () -> null, Collections::emptyList, () -> null, () -> null) ); if (searchRequest.source() != null) { // Clear the slice builder if there is one set. We can't call sliceIntoSubRequests if it is. diff --git a/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java b/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java index 6d427aace51dd..6ceb02f0e797f 100644 --- a/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java @@ -25,6 +25,7 @@ import org.elasticsearch.core.Booleans; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; @@ -98,10 +99,11 @@ public final class SearchSourceBuilder implements Writeable, ToXContentObject, R public static final ParseField TIMEOUT_FIELD = new ParseField("timeout"); public static final ParseField TERMINATE_AFTER_FIELD = new ParseField("terminate_after"); public static final ParseField QUERY_FIELD = new ParseField("query"); - public static final ParseField SUB_SEARCHES_FIELD = new ParseField("sub_searches"); + @UpdateForV10(owner = UpdateForV10.Owner.SEARCH_RELEVANCE) // remove [sub_searches] and [rank] support in 10.0 + public static final ParseField SUB_SEARCHES_FIELD = new ParseField("sub_searches").withAllDeprecated("retriever"); + public static final ParseField RANK_FIELD = new ParseField("rank").withAllDeprecated("retriever"); public static final ParseField POST_FILTER_FIELD = new ParseField("post_filter"); public static final ParseField KNN_FIELD = new ParseField("knn"); - public static final ParseField RANK_FIELD = new ParseField("rank"); public static final ParseField MIN_SCORE_FIELD = new ParseField("min_score"); public static final ParseField VERSION_FIELD = new ParseField("version"); public static final ParseField SEQ_NO_PRIMARY_TERM_FIELD = new ParseField("seq_no_primary_term"); diff --git a/server/src/test/java/org/elasticsearch/search/AbstractSearchTestCase.java b/server/src/test/java/org/elasticsearch/search/AbstractSearchTestCase.java index b716f11b5fffb..88c83df3e20fc 100644 --- a/server/src/test/java/org/elasticsearch/search/AbstractSearchTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/AbstractSearchTestCase.java @@ -89,7 +89,6 @@ protected SearchSourceBuilder createSearchSourceBuilder() { return RandomSearchRequestGenerator.randomSearchSourceBuilder( HighlightBuilderTests::randomHighlighterBuilder, SuggestBuilderTests::randomSuggestBuilder, - TestRankBuilder::randomRankBuilder, QueryRescorerBuilderTests::randomRescoreBuilder, randomExtBuilders, CollapseBuilderTests::randomCollapseBuilder, diff --git a/server/src/test/java/org/elasticsearch/search/vectors/KnnSearchRequestParserTests.java b/server/src/test/java/org/elasticsearch/search/vectors/KnnSearchRequestParserTests.java index d9fe421bafb46..4e4d2158a9574 100644 --- a/server/src/test/java/org/elasticsearch/search/vectors/KnnSearchRequestParserTests.java +++ b/server/src/test/java/org/elasticsearch/search/vectors/KnnSearchRequestParserTests.java @@ -74,7 +74,6 @@ public void testParseSearchRequest() throws IOException { () -> null, () -> null, () -> null, - () -> null, Collections::emptyList, () -> null, () -> null diff --git a/test/framework/src/main/java/org/elasticsearch/search/RandomSearchRequestGenerator.java b/test/framework/src/main/java/org/elasticsearch/search/RandomSearchRequestGenerator.java index b59f1a5e5f029..363d34ca3ff86 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/RandomSearchRequestGenerator.java +++ b/test/framework/src/main/java/org/elasticsearch/search/RandomSearchRequestGenerator.java @@ -23,13 +23,11 @@ import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.builder.PointInTimeBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.search.builder.SubSearchSourceBuilder; import org.elasticsearch.search.collapse.CollapseBuilder; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.fetch.subphase.FieldAndFormat; import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.search.internal.SearchContext; -import org.elasticsearch.search.rank.RankBuilder; import org.elasticsearch.search.rescore.RescorerBuilder; import org.elasticsearch.search.searchafter.SearchAfterBuilder; import org.elasticsearch.search.slice.SliceBuilder; @@ -122,7 +120,6 @@ public static SearchRequest randomSearchRequest(Supplier ra public static SearchSourceBuilder randomSearchSourceBuilder( Supplier randomHighlightBuilder, Supplier randomSuggestBuilder, - Supplier rankContextBuilderSupplier, Supplier> randomRescoreBuilder, Supplier> randomExtBuilders, Supplier randomCollapseBuilder, @@ -250,17 +247,6 @@ public static SearchSourceBuilder randomSearchSourceBuilder( } if (randomBoolean()) { builder.query(QueryBuilders.termQuery(randomAlphaOfLengthBetween(5, 20), randomAlphaOfLengthBetween(5, 20))); - } else if (randomBoolean()) { - builder.subSearches( - List.of( - new SubSearchSourceBuilder( - QueryBuilders.termQuery(randomAlphaOfLengthBetween(5, 20), randomAlphaOfLengthBetween(5, 20)) - ), - new SubSearchSourceBuilder( - QueryBuilders.termQuery(randomAlphaOfLengthBetween(5, 20), randomAlphaOfLengthBetween(5, 20)) - ) - ) - ); } if (randomBoolean()) { builder.postFilter(QueryBuilders.termQuery(randomAlphaOfLengthBetween(5, 20), randomAlphaOfLengthBetween(5, 20))); @@ -354,9 +340,6 @@ public static SearchSourceBuilder randomSearchSourceBuilder( if (randomBoolean()) { builder.suggest(randomSuggestBuilder.get()); } - if (randomBoolean()) { - builder.rankBuilder(rankContextBuilderSupplier.get()); - } if (randomBoolean()) { int numRescores = randomIntBetween(1, 5); for (int i = 0; i < numRescores; i++) { diff --git a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRankBuilder.java b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRankBuilder.java index 10aff2f4d68cd..fb20f834937d3 100644 --- a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRankBuilder.java +++ b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRankBuilder.java @@ -36,16 +36,17 @@ /** * The builder to support RRF. Adds user-defined parameters for window size and rank constant. + * + * @deprecated RRF support is provided through the retriever framework. Please use {@link RRFRetrieverBuilder instead} */ +@Deprecated public class RRFRankBuilder extends RankBuilder { - public static final int DEFAULT_RANK_CONSTANT = 60; - public static final ParseField RANK_CONSTANT_FIELD = new ParseField("rank_constant"); static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(RRFRankPlugin.NAME, args -> { int windowSize = args[0] == null ? DEFAULT_RANK_WINDOW_SIZE : (int) args[0]; - int rankConstant = args[1] == null ? DEFAULT_RANK_CONSTANT : (int) args[1]; + int rankConstant = args[1] == null ? RRFRetrieverBuilder.DEFAULT_RANK_CONSTANT : (int) args[1]; return new RRFRankBuilder(windowSize, rankConstant); }); diff --git a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRankDoc.java b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRankDoc.java index 272df248e53e9..4cd10801b298c 100644 --- a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRankDoc.java +++ b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRankDoc.java @@ -19,7 +19,7 @@ import java.util.Arrays; import java.util.Objects; -import static org.elasticsearch.xpack.rank.rrf.RRFRankBuilder.DEFAULT_RANK_CONSTANT; +import static org.elasticsearch.xpack.rank.rrf.RRFRetrieverBuilder.DEFAULT_RANK_CONSTANT; /** * {@code RRFRankDoc} supports additional ranking information diff --git a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilder.java b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilder.java index 12c43a2f169f8..c3c9f19cde6ef 100644 --- a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilder.java +++ b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilder.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.util.Maps; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.license.LicenseUtils; +import org.elasticsearch.search.rank.RankBuilder; import org.elasticsearch.search.rank.RankDoc; import org.elasticsearch.search.retriever.CompoundRetrieverBuilder; import org.elasticsearch.search.retriever.RetrieverBuilder; @@ -31,7 +32,6 @@ import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; -import static org.elasticsearch.xpack.rank.rrf.RRFRankPlugin.NAME; /** * An rrf retriever is used to represent an rrf rank element, but @@ -50,6 +50,7 @@ public final class RRFRetrieverBuilder extends CompoundRetrieverBuilder PARSER = new ConstructingObjectParser<>( NAME, @@ -57,8 +58,8 @@ public final class RRFRetrieverBuilder extends CompoundRetrieverBuilder { List childRetrievers = (List) args[0]; List innerRetrievers = childRetrievers.stream().map(r -> new RetrieverSource(r, null)).toList(); - int rankWindowSize = args[1] == null ? RRFRankBuilder.DEFAULT_RANK_WINDOW_SIZE : (int) args[1]; - int rankConstant = args[2] == null ? RRFRankBuilder.DEFAULT_RANK_CONSTANT : (int) args[2]; + int rankWindowSize = args[1] == null ? RankBuilder.DEFAULT_RANK_WINDOW_SIZE : (int) args[1]; + int rankConstant = args[2] == null ? DEFAULT_RANK_CONSTANT : (int) args[2]; return new RRFRetrieverBuilder(innerRetrievers, rankWindowSize, rankConstant); } ); diff --git a/x-pack/plugin/rank-rrf/src/test/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderParsingTests.java b/x-pack/plugin/rank-rrf/src/test/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderParsingTests.java index d324effe41c22..cae758457a2ac 100644 --- a/x-pack/plugin/rank-rrf/src/test/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderParsingTests.java +++ b/x-pack/plugin/rank-rrf/src/test/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderParsingTests.java @@ -41,7 +41,7 @@ public static RRFRetrieverBuilder createRandomRRFRetrieverBuilder() { if (randomBoolean()) { rankWindowSize = randomIntBetween(1, 10000); } - int rankConstant = RRFRankBuilder.DEFAULT_RANK_CONSTANT; + int rankConstant = RRFRetrieverBuilder.DEFAULT_RANK_CONSTANT; if (randomBoolean()) { rankConstant = randomIntBetween(1, 1000000); } diff --git a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/100_rank_rrf.yml b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/100_rank_rrf.yml index 647540644ce9e..a5c346b386999 100644 --- a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/100_rank_rrf.yml +++ b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/100_rank_rrf.yml @@ -2,6 +2,8 @@ setup: - requires: cluster_features: "gte_v8.8.0" reason: 'rank added in 8.8' + - skip: + features: "warnings" - do: indices.create: @@ -59,7 +61,14 @@ setup: --- "Simple rank with bm25 search and kNN search": + - requires: + cluster_features: ["gte_v8.16.0"] + reason: "deprecation added in 8.16" + test_runner_features: warnings + - do: + warnings: + - "Deprecated field [rank] used, replaced by [retriever]" search: index: test body: @@ -94,7 +103,15 @@ setup: --- "Simple rank with multiple bm25 sub searches": + - requires: + cluster_features: ["gte_v8.16.0"] + reason: "deprecation added in 8.16" + test_runner_features: warnings + - do: + warnings: + - "Deprecated field [rank] used, replaced by [retriever]" + - Deprecated field [sub_searches] used, replaced by [retriever] search: index: test body: @@ -135,7 +152,15 @@ setup: --- "Simple rank with multiple bm25 sub_searches and a knn search": + - requires: + cluster_features: ["gte_v8.16.0"] + reason: "deprecation added in 8.16" + test_runner_features: warnings + - do: + warnings: + - "Deprecated field [rank] used, replaced by [retriever]" + - Deprecated field [sub_searches] used, replaced by [retriever] search: index: test body: diff --git a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/150_rank_rrf_pagination.yml b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/150_rank_rrf_pagination.yml index b4893bfec0849..94a1457a7acc8 100644 --- a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/150_rank_rrf_pagination.yml +++ b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/150_rank_rrf_pagination.yml @@ -63,7 +63,15 @@ setup: --- "Standard pagination within rank_window_size": # this test retrieves the same results from two queries, and applies a simple pagination skipping the first result + - requires: + cluster_features: [ "gte_v8.16.0" ] + reason: "deprecation added in 8.16" + test_runner_features: warnings + - do: + warnings: + - "Deprecated field [rank] used, replaced by [retriever]" + - Deprecated field [sub_searches] used, replaced by [retriever] search: index: test body: @@ -170,7 +178,15 @@ setup: --- "Standard pagination outside rank_window_size": # in this example, from starts *after* rank_window_size so, we expect 0 results to be returned + - requires: + cluster_features: [ "gte_v8.16.0" ] + reason: "deprecation added in 8.16" + test_runner_features: warnings + - do: + warnings: + - "Deprecated field [rank] used, replaced by [retriever]" + - Deprecated field [sub_searches] used, replaced by [retriever] search: index: test body: @@ -274,7 +290,15 @@ setup: --- "Standard pagination partially outside rank_window_size": # in this example we have that from starts *within* rank_window_size, but "from + size" goes over + - requires: + cluster_features: [ "gte_v8.16.0" ] + reason: "deprecation added in 8.16" + test_runner_features: warnings + - do: + warnings: + - "Deprecated field [rank] used, replaced by [retriever]" + - Deprecated field [sub_searches] used, replaced by [retriever] search: index: test body: @@ -384,7 +408,15 @@ setup: # queryA has a result set of [1, 2, 3, 4] and # queryB has a result set of [4, 3, 1, 2] # so for rank_constant=10, the expected order is [1, 4, 3, 2] + - requires: + cluster_features: ["gte_v8.16.0"] + reason: "deprecation added in 8.16" + test_runner_features: warnings + - do: + warnings: + - "Deprecated field [rank] used, replaced by [retriever]" + - Deprecated field [sub_searches] used, replaced by [retriever] search: index: test body: @@ -488,6 +520,9 @@ setup: - match: { hits.hits.1._id: "4" } - do: + warnings: + - "Deprecated field [rank] used, replaced by [retriever]" + - Deprecated field [sub_searches] used, replaced by [retriever] search: index: test body: @@ -597,7 +632,15 @@ setup: # queryA has a result set of [5, 1] and # queryB has a result set of [4, 3, 1, 2] # so for rank_constant=10, the expected order is [1, 5, 4, 3, 2] + - requires: + cluster_features: ["gte_v8.16.0"] + reason: "deprecation added in 8.16" + test_runner_features: warnings + - do: + warnings: + - "Deprecated field [rank] used, replaced by [retriever]" + - Deprecated field [sub_searches] used, replaced by [retriever] search: index: test body: @@ -685,6 +728,9 @@ setup: - match: { hits.hits.1._id: "5" } - do: + warnings: + - "Deprecated field [rank] used, replaced by [retriever]" + - Deprecated field [sub_searches] used, replaced by [retriever] search: index: test body: @@ -772,6 +818,9 @@ setup: - match: { hits.hits.1._id: "3" } - do: + warnings: + - "Deprecated field [rank] used, replaced by [retriever]" + - Deprecated field [sub_searches] used, replaced by [retriever] search: index: test body: @@ -867,7 +916,15 @@ setup: # queryB has a result set of [4, 3] # so for rank_constant=10, the expected order is [5, 4, 1, 3], # and the rank_window_size-sized result set that we'd paginate over is [5, 4] + - requires: + cluster_features: ["gte_v8.16.0"] + reason: "deprecation added in 8.16" + test_runner_features: warnings + - do: + warnings: + - "Deprecated field [rank] used, replaced by [retriever]" + - Deprecated field [sub_searches] used, replaced by [retriever] search: index: test body: @@ -955,6 +1012,10 @@ setup: - match: { hits.hits.1._id: "4" } - do: + warnings: + - "Deprecated field [rank] used, replaced by [retriever]" + - Deprecated field [sub_searches] used, replaced by [retriever] + search: index: test body: diff --git a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/200_rank_rrf_script.yml b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/200_rank_rrf_script.yml index bca39dea4ae57..36e70581f39f2 100644 --- a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/200_rank_rrf_script.yml +++ b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/200_rank_rrf_script.yml @@ -67,7 +67,14 @@ setup: --- "RRF using single knn and single BM25 with a scripted metric aggregation": + - requires: + cluster_features: ["gte_v8.16.0"] + reason: "deprecation added in 8.16" + test_runner_features: warnings + - do: + warnings: + - "Deprecated field [rank] used, replaced by [retriever]" search: index: test body: @@ -140,7 +147,14 @@ setup: --- "RRF using multi-knn only with a scripted metric aggregation": + - requires: + cluster_features: ["gte_v8.16.0"] + reason: "deprecation added in 8.16" + test_runner_features: warnings + - do: + warnings: + - "Deprecated field [rank] used, replaced by [retriever]" search: index: test body: @@ -195,7 +209,14 @@ setup: --- "RRF using multi-knn and single BM25 with a scripted metric aggregation": + - requires: + cluster_features: ["gte_v8.16.0"] + reason: "deprecation added in 8.16" + test_runner_features: warnings + - do: + warnings: + - "Deprecated field [rank] used, replaced by [retriever]" search: index: test body: diff --git a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/550_rrf_sub_searches_explain.yml b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/550_rrf_sub_searches_explain.yml index 5718cd3455526..1b74ffee62a11 100644 --- a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/550_rrf_sub_searches_explain.yml +++ b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/550_rrf_sub_searches_explain.yml @@ -75,7 +75,14 @@ setup: --- "using a top level knn and query": + - requires: + cluster_features: ["gte_v8.16.0"] + reason: "deprecation added in 8.16" + test_runner_features: warnings + - do: + warnings: + - "Deprecated field [rank] used, replaced by [retriever]" search: index: test body: @@ -129,7 +136,15 @@ setup: --- "using sub_searches": + - requires: + cluster_features: [ "gte_v8.16.0" ] + reason: "deprecation added in 8.16" + test_runner_features: warnings + - do: + warnings: + - "Deprecated field [rank] used, replaced by [retriever]" + - Deprecated field [sub_searches] used, replaced by [retriever] search: index: test body: @@ -194,7 +209,14 @@ setup: --- "using named top level knn and query": + - requires: + cluster_features: ["gte_v8.16.0"] + reason: "deprecation added in 8.16" + test_runner_features: warnings + - do: + warnings: + - "Deprecated field [rank] used, replaced by [retriever]" search: index: test body: @@ -251,7 +273,15 @@ setup: --- "using named sub_searches": + - requires: + cluster_features: ["gte_v8.16.0"] + reason: "deprecation added in 8.16" + test_runner_features: warnings + - do: + warnings: + - "Deprecated field [rank] used, replaced by [retriever]" + - Deprecated field [sub_searches] used, replaced by [retriever] search: index: test body: @@ -320,7 +350,15 @@ setup: --- "using a mix of named and unnamed queries": + - requires: + cluster_features: ["gte_v8.16.0"] + reason: "deprecation added in 8.16" + test_runner_features: warnings + - do: + warnings: + - "Deprecated field [rank] used, replaced by [retriever]" + - Deprecated field [sub_searches] used, replaced by [retriever] search: index: test body: diff --git a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/600_rrf_retriever_profile.yml b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/600_rrf_retriever_profile.yml index e34885419c7f7..a9ddb4f902929 100644 --- a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/600_rrf_retriever_profile.yml +++ b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/600_rrf_retriever_profile.yml @@ -172,7 +172,14 @@ setup: --- "using query and dfs knn search": + - requires: + cluster_features: ["gte_v8.16.0"] + reason: "deprecation added in 8.16" + test_runner_features: warnings + - do: + warnings: + - "Deprecated field [rank] used, replaced by [retriever]" search: index: test body: From d7aa33e2e470c785905d5594efb16566de9dd9e2 Mon Sep 17 00:00:00 2001 From: Panagiotis Bailis Date: Wed, 16 Oct 2024 21:59:13 +0300 Subject: [PATCH 168/449] Fixing number of shards for random_rerank_retriever tests to ensure score validation (#114877) --- muted-tests.yml | 3 --- .../test/inference/80_random_rerank_retriever.yml | 2 ++ 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 69cef9acc8cb9..2e623fa94e06a 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -80,9 +80,6 @@ tests: - class: org.elasticsearch.xpack.restart.CoreFullClusterRestartIT method: testSnapshotRestore {cluster=UPGRADED} issue: https://github.com/elastic/elasticsearch/issues/111799 -- class: org.elasticsearch.xpack.inference.InferenceRestIT - method: test {p0=inference/80_random_rerank_retriever/Random rerank retriever predictably shuffles results} - issue: https://github.com/elastic/elasticsearch/issues/111999 - class: org.elasticsearch.smoketest.SmokeTestMultiNodeClientYamlTestSuiteIT issue: https://github.com/elastic/elasticsearch/issues/112147 - class: org.elasticsearch.smoketest.WatcherYamlRestIT diff --git a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/80_random_rerank_retriever.yml b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/80_random_rerank_retriever.yml index d33f57f763db8..47d87583fffb2 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/80_random_rerank_retriever.yml +++ b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/80_random_rerank_retriever.yml @@ -8,6 +8,8 @@ setup: indices.create: index: test-index body: + settings: + number_of_shards: 1 mappings: properties: text: From 2748a965e29cc2789e67135f97345420aa5fb532 Mon Sep 17 00:00:00 2001 From: Fang Xing <155562079+fang-xing-esql@users.noreply.github.com> Date: Wed, 16 Oct 2024 15:40:18 -0400 Subject: [PATCH 169/449] [ES|QL] Make named parameter for identifier and pattern available only under snapshot (#114784) * make named parameter for identifier and pattern snapshot --- docs/changelog/114784.yaml | 5 + .../xpack/esql/qa/rest/RestEsqlTestCase.java | 5 + .../esql/src/main/antlr/EsqlBaseLexer.g4 | 16 +- .../esql/src/main/antlr/EsqlBaseParser.g4 | 4 +- .../xpack/esql/action/EsqlCapabilities.java | 2 +- .../xpack/esql/action/RequestXContent.java | 3 +- .../xpack/esql/parser/EsqlBaseLexer.interp | 2 +- .../xpack/esql/parser/EsqlBaseLexer.java | 689 +++++++------ .../xpack/esql/parser/EsqlBaseParser.interp | 2 +- .../xpack/esql/parser/EsqlBaseParser.java | 947 +++++++++--------- .../esql/action/EsqlQueryRequestTests.java | 99 +- .../xpack/esql/analysis/AnalyzerTests.java | 17 + .../esql/parser/StatementParserTests.java | 32 +- 13 files changed, 1005 insertions(+), 818 deletions(-) create mode 100644 docs/changelog/114784.yaml diff --git a/docs/changelog/114784.yaml b/docs/changelog/114784.yaml new file mode 100644 index 0000000000000..24ebe8b5fc09a --- /dev/null +++ b/docs/changelog/114784.yaml @@ -0,0 +1,5 @@ +pr: 114784 +summary: "[ES|QL] make named parameter for identifier and pattern snapshot" +area: ES|QL +type: bug +issues: [] diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java index 4fa6ac3009654..e3199649a91be 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java @@ -32,6 +32,7 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.esql.EsqlTestUtils; +import org.elasticsearch.xpack.esql.action.EsqlCapabilities; import org.junit.After; import org.junit.Assert; import org.junit.Before; @@ -670,6 +671,10 @@ public void testErrorMessageForArrayValuesInParams() throws IOException { } public void testNamedParamsForIdentifierAndIdentifierPatterns() throws IOException { + assumeTrue( + "named parameters for identifiers and patterns require snapshot build", + EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES.isEnabled() + ); bulkLoadTestData(10); // positive var query = requestObjectBuilder().query( diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 index 051e83129d12d..ffab261d3c174 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 @@ -305,8 +305,8 @@ mode PROJECT_MODE; PROJECT_PIPE : PIPE -> type(PIPE), popMode; PROJECT_DOT: DOT -> type(DOT); PROJECT_COMMA : COMMA -> type(COMMA); -PROJECT_PARAM : PARAM -> type(PARAM); -PROJECT_NAMED_OR_POSITIONAL_PARAM : NAMED_OR_POSITIONAL_PARAM -> type(NAMED_OR_POSITIONAL_PARAM); +PROJECT_PARAM : {this.isDevVersion()}? PARAM -> type(PARAM); +PROJECT_NAMED_OR_POSITIONAL_PARAM : {this.isDevVersion()}? NAMED_OR_POSITIONAL_PARAM -> type(NAMED_OR_POSITIONAL_PARAM); fragment UNQUOTED_ID_BODY_WITH_PATTERN : (LETTER | DIGIT | UNDERSCORE | ASTERISK) @@ -340,8 +340,8 @@ RENAME_PIPE : PIPE -> type(PIPE), popMode; RENAME_ASSIGN : ASSIGN -> type(ASSIGN); RENAME_COMMA : COMMA -> type(COMMA); RENAME_DOT: DOT -> type(DOT); -RENAME_PARAM : PARAM -> type(PARAM); -RENAME_NAMED_OR_POSITIONAL_PARAM : NAMED_OR_POSITIONAL_PARAM -> type(NAMED_OR_POSITIONAL_PARAM); +RENAME_PARAM : {this.isDevVersion()}? PARAM -> type(PARAM); +RENAME_NAMED_OR_POSITIONAL_PARAM : {this.isDevVersion()}? NAMED_OR_POSITIONAL_PARAM -> type(NAMED_OR_POSITIONAL_PARAM); AS : 'as'; @@ -413,8 +413,8 @@ ENRICH_FIELD_QUOTED_IDENTIFIER : QUOTED_IDENTIFIER -> type(QUOTED_IDENTIFIER) ; -ENRICH_FIELD_PARAM : PARAM -> type(PARAM); -ENRICH_FIELD_NAMED_OR_POSITIONAL_PARAM : NAMED_OR_POSITIONAL_PARAM -> type(NAMED_OR_POSITIONAL_PARAM); +ENRICH_FIELD_PARAM : {this.isDevVersion()}? PARAM -> type(PARAM); +ENRICH_FIELD_NAMED_OR_POSITIONAL_PARAM : {this.isDevVersion()}? NAMED_OR_POSITIONAL_PARAM -> type(NAMED_OR_POSITIONAL_PARAM); ENRICH_FIELD_LINE_COMMENT : LINE_COMMENT -> channel(HIDDEN) @@ -431,8 +431,8 @@ ENRICH_FIELD_WS mode MVEXPAND_MODE; MVEXPAND_PIPE : PIPE -> type(PIPE), popMode; MVEXPAND_DOT: DOT -> type(DOT); -MVEXPAND_PARAM : PARAM -> type(PARAM); -MVEXPAND_NAMED_OR_POSITIONAL_PARAM : NAMED_OR_POSITIONAL_PARAM -> type(NAMED_OR_POSITIONAL_PARAM); +MVEXPAND_PARAM : {this.isDevVersion()}? PARAM -> type(PARAM); +MVEXPAND_NAMED_OR_POSITIONAL_PARAM : {this.isDevVersion()}? NAMED_OR_POSITIONAL_PARAM -> type(NAMED_OR_POSITIONAL_PARAM); MVEXPAND_QUOTED_IDENTIFIER : QUOTED_IDENTIFIER -> type(QUOTED_IDENTIFIER) diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 index 7d489417ab4ca..f9f994f4ab329 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 @@ -195,7 +195,7 @@ identifier identifierPattern : ID_PATTERN - | parameter + | {this.isDevVersion()}? parameter ; constant @@ -218,7 +218,7 @@ parameter identifierOrParameter : identifier - | parameter + | {this.isDevVersion()}? parameter ; limitCommand diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index 18ebbe6d898af..c94791964fb90 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -375,7 +375,7 @@ public enum Cap { /** * Support named parameters for field names. */ - NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES, + NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES(true), /** * Fix sorting not allowed on _source and counters. diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RequestXContent.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RequestXContent.java index 7224aa049093d..71aface993ab9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RequestXContent.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RequestXContent.java @@ -184,7 +184,8 @@ private static QueryParams parseParams(XContentParser p) throws IOException { String paramName = entry.getKey(); checkParamNameValidity(paramName, errors, loc); - if (entry.getValue() instanceof Map values) {// parameter specified as key:value pairs + if (EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES.isEnabled() + && entry.getValue() instanceof Map values) {// parameter specified as key:value pairs Map paramElements = Maps.newMapWithExpectedSize(2); for (Object keyName : values.keySet()) { ParamParsingKey paramType = checkParamValueKeysValidity(keyName.toString(), errors, loc); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp index 1a0105b9951d2..2566da379af73 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp @@ -466,4 +466,4 @@ METRICS_MODE CLOSING_METRICS_MODE atn: -[4, 0, 120, 1471, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 2, 154, 7, 154, 2, 155, 7, 155, 2, 156, 7, 156, 2, 157, 7, 157, 2, 158, 7, 158, 2, 159, 7, 159, 2, 160, 7, 160, 2, 161, 7, 161, 2, 162, 7, 162, 2, 163, 7, 163, 2, 164, 7, 164, 2, 165, 7, 165, 2, 166, 7, 166, 2, 167, 7, 167, 2, 168, 7, 168, 2, 169, 7, 169, 2, 170, 7, 170, 2, 171, 7, 171, 2, 172, 7, 172, 2, 173, 7, 173, 2, 174, 7, 174, 2, 175, 7, 175, 2, 176, 7, 176, 2, 177, 7, 177, 2, 178, 7, 178, 2, 179, 7, 179, 2, 180, 7, 180, 2, 181, 7, 181, 2, 182, 7, 182, 2, 183, 7, 183, 2, 184, 7, 184, 2, 185, 7, 185, 2, 186, 7, 186, 2, 187, 7, 187, 2, 188, 7, 188, 2, 189, 7, 189, 2, 190, 7, 190, 2, 191, 7, 191, 2, 192, 7, 192, 2, 193, 7, 193, 2, 194, 7, 194, 2, 195, 7, 195, 2, 196, 7, 196, 2, 197, 7, 197, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 4, 19, 578, 8, 19, 11, 19, 12, 19, 579, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 5, 20, 588, 8, 20, 10, 20, 12, 20, 591, 9, 20, 1, 20, 3, 20, 594, 8, 20, 1, 20, 3, 20, 597, 8, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 5, 21, 606, 8, 21, 10, 21, 12, 21, 609, 9, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 4, 22, 617, 8, 22, 11, 22, 12, 22, 618, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 28, 1, 28, 3, 28, 638, 8, 28, 1, 28, 4, 28, 641, 8, 28, 11, 28, 12, 28, 642, 1, 29, 1, 29, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 3, 31, 652, 8, 31, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 3, 33, 659, 8, 33, 1, 34, 1, 34, 1, 34, 5, 34, 664, 8, 34, 10, 34, 12, 34, 667, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 675, 8, 34, 10, 34, 12, 34, 678, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 3, 34, 685, 8, 34, 1, 34, 3, 34, 688, 8, 34, 3, 34, 690, 8, 34, 1, 35, 4, 35, 693, 8, 35, 11, 35, 12, 35, 694, 1, 36, 4, 36, 698, 8, 36, 11, 36, 12, 36, 699, 1, 36, 1, 36, 5, 36, 704, 8, 36, 10, 36, 12, 36, 707, 9, 36, 1, 36, 1, 36, 4, 36, 711, 8, 36, 11, 36, 12, 36, 712, 1, 36, 4, 36, 716, 8, 36, 11, 36, 12, 36, 717, 1, 36, 1, 36, 5, 36, 722, 8, 36, 10, 36, 12, 36, 725, 9, 36, 3, 36, 727, 8, 36, 1, 36, 1, 36, 1, 36, 1, 36, 4, 36, 733, 8, 36, 11, 36, 12, 36, 734, 1, 36, 1, 36, 3, 36, 739, 8, 36, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 1, 39, 1, 39, 1, 39, 1, 39, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 1, 64, 1, 64, 1, 64, 1, 65, 1, 65, 1, 66, 1, 66, 1, 66, 1, 67, 1, 67, 1, 68, 1, 68, 1, 69, 1, 69, 1, 70, 1, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 72, 1, 72, 1, 72, 1, 72, 1, 73, 1, 73, 1, 73, 1, 73, 1, 74, 1, 74, 1, 74, 3, 74, 871, 8, 74, 1, 74, 5, 74, 874, 8, 74, 10, 74, 12, 74, 877, 9, 74, 1, 74, 1, 74, 4, 74, 881, 8, 74, 11, 74, 12, 74, 882, 3, 74, 885, 8, 74, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 76, 1, 76, 1, 76, 1, 76, 1, 76, 1, 77, 1, 77, 5, 77, 899, 8, 77, 10, 77, 12, 77, 902, 9, 77, 1, 77, 1, 77, 3, 77, 906, 8, 77, 1, 77, 4, 77, 909, 8, 77, 11, 77, 12, 77, 910, 3, 77, 913, 8, 77, 1, 78, 1, 78, 4, 78, 917, 8, 78, 11, 78, 12, 78, 918, 1, 78, 1, 78, 1, 79, 1, 79, 1, 80, 1, 80, 1, 80, 1, 80, 1, 81, 1, 81, 1, 81, 1, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 1, 83, 1, 83, 1, 83, 1, 84, 1, 84, 1, 84, 1, 84, 1, 84, 1, 85, 1, 85, 1, 85, 1, 85, 1, 86, 1, 86, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 1, 88, 1, 88, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 1, 92, 1, 93, 1, 93, 1, 93, 1, 93, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 3, 95, 996, 8, 95, 1, 96, 4, 96, 999, 8, 96, 11, 96, 12, 96, 1000, 1, 97, 1, 97, 1, 97, 1, 97, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 1, 100, 1, 101, 1, 101, 1, 101, 1, 101, 1, 102, 1, 102, 1, 102, 1, 102, 1, 102, 1, 103, 1, 103, 1, 103, 1, 103, 1, 104, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 106, 1, 107, 1, 107, 1, 107, 1, 107, 3, 107, 1048, 8, 107, 1, 108, 1, 108, 3, 108, 1052, 8, 108, 1, 108, 5, 108, 1055, 8, 108, 10, 108, 12, 108, 1058, 9, 108, 1, 108, 1, 108, 3, 108, 1062, 8, 108, 1, 108, 4, 108, 1065, 8, 108, 11, 108, 12, 108, 1066, 3, 108, 1069, 8, 108, 1, 109, 1, 109, 4, 109, 1073, 8, 109, 11, 109, 12, 109, 1074, 1, 110, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 1, 113, 1, 114, 1, 114, 1, 114, 1, 114, 1, 115, 1, 115, 1, 115, 1, 115, 1, 116, 1, 116, 1, 116, 1, 116, 1, 117, 1, 117, 1, 117, 1, 117, 1, 118, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 120, 1, 120, 1, 121, 1, 121, 1, 121, 1, 121, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 129, 4, 129, 1158, 8, 129, 11, 129, 12, 129, 1159, 1, 129, 1, 129, 3, 129, 1164, 8, 129, 1, 129, 4, 129, 1167, 8, 129, 11, 129, 12, 129, 1168, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 133, 1, 133, 1, 134, 1, 134, 1, 134, 1, 134, 1, 134, 1, 134, 1, 135, 1, 135, 1, 135, 1, 135, 1, 136, 1, 136, 1, 136, 1, 136, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 152, 1, 152, 1, 153, 1, 153, 1, 153, 1, 153, 1, 154, 1, 154, 1, 154, 1, 154, 1, 155, 1, 155, 1, 155, 1, 155, 1, 155, 1, 156, 1, 156, 1, 156, 1, 156, 1, 156, 1, 157, 1, 157, 1, 157, 1, 157, 1, 158, 1, 158, 1, 158, 1, 158, 1, 159, 1, 159, 1, 159, 1, 159, 1, 160, 1, 160, 1, 160, 1, 160, 1, 160, 1, 161, 1, 161, 1, 162, 1, 162, 1, 162, 1, 162, 1, 162, 4, 162, 1308, 8, 162, 11, 162, 12, 162, 1309, 1, 163, 1, 163, 1, 163, 1, 163, 1, 164, 1, 164, 1, 164, 1, 164, 1, 165, 1, 165, 1, 165, 1, 165, 1, 166, 1, 166, 1, 166, 1, 166, 1, 166, 1, 167, 1, 167, 1, 167, 1, 167, 1, 168, 1, 168, 1, 168, 1, 168, 1, 169, 1, 169, 1, 169, 1, 169, 1, 170, 1, 170, 1, 170, 1, 170, 1, 170, 1, 171, 1, 171, 1, 171, 1, 171, 1, 172, 1, 172, 1, 172, 1, 172, 1, 173, 1, 173, 1, 173, 1, 173, 1, 174, 1, 174, 1, 174, 1, 174, 1, 175, 1, 175, 1, 175, 1, 175, 1, 176, 1, 176, 1, 176, 1, 176, 1, 176, 1, 176, 1, 177, 1, 177, 1, 177, 1, 177, 1, 178, 1, 178, 1, 178, 1, 178, 1, 179, 1, 179, 1, 179, 1, 179, 1, 180, 1, 180, 1, 180, 1, 180, 1, 181, 1, 181, 1, 181, 1, 181, 1, 182, 1, 182, 1, 182, 1, 182, 1, 183, 1, 183, 1, 183, 1, 183, 1, 183, 1, 184, 1, 184, 1, 184, 1, 184, 1, 184, 1, 184, 1, 185, 1, 185, 1, 185, 1, 185, 1, 185, 1, 185, 1, 186, 1, 186, 1, 186, 1, 186, 1, 187, 1, 187, 1, 187, 1, 187, 1, 188, 1, 188, 1, 188, 1, 188, 1, 189, 1, 189, 1, 189, 1, 189, 1, 189, 1, 189, 1, 190, 1, 190, 1, 190, 1, 190, 1, 190, 1, 190, 1, 191, 1, 191, 1, 191, 1, 191, 1, 192, 1, 192, 1, 192, 1, 192, 1, 193, 1, 193, 1, 193, 1, 193, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 195, 1, 195, 1, 195, 1, 195, 1, 195, 1, 195, 1, 196, 1, 196, 1, 196, 1, 196, 1, 196, 1, 196, 1, 197, 1, 197, 1, 197, 1, 197, 1, 197, 2, 607, 676, 0, 198, 15, 1, 17, 2, 19, 3, 21, 4, 23, 5, 25, 6, 27, 7, 29, 8, 31, 9, 33, 10, 35, 11, 37, 12, 39, 13, 41, 14, 43, 15, 45, 16, 47, 17, 49, 18, 51, 19, 53, 20, 55, 21, 57, 22, 59, 23, 61, 24, 63, 0, 65, 0, 67, 0, 69, 0, 71, 0, 73, 0, 75, 0, 77, 0, 79, 0, 81, 0, 83, 25, 85, 26, 87, 27, 89, 28, 91, 29, 93, 30, 95, 31, 97, 32, 99, 33, 101, 34, 103, 35, 105, 36, 107, 37, 109, 38, 111, 39, 113, 40, 115, 41, 117, 42, 119, 43, 121, 44, 123, 45, 125, 46, 127, 47, 129, 48, 131, 49, 133, 50, 135, 51, 137, 52, 139, 53, 141, 54, 143, 55, 145, 56, 147, 57, 149, 58, 151, 59, 153, 60, 155, 61, 157, 62, 159, 63, 161, 0, 163, 64, 165, 65, 167, 66, 169, 67, 171, 0, 173, 68, 175, 69, 177, 70, 179, 71, 181, 0, 183, 0, 185, 72, 187, 73, 189, 74, 191, 0, 193, 0, 195, 0, 197, 0, 199, 0, 201, 0, 203, 75, 205, 0, 207, 76, 209, 0, 211, 0, 213, 77, 215, 78, 217, 79, 219, 0, 221, 0, 223, 0, 225, 0, 227, 0, 229, 0, 231, 0, 233, 80, 235, 81, 237, 82, 239, 83, 241, 0, 243, 0, 245, 0, 247, 0, 249, 0, 251, 0, 253, 84, 255, 0, 257, 85, 259, 86, 261, 87, 263, 0, 265, 0, 267, 88, 269, 89, 271, 0, 273, 90, 275, 0, 277, 91, 279, 92, 281, 93, 283, 0, 285, 0, 287, 0, 289, 0, 291, 0, 293, 0, 295, 0, 297, 0, 299, 0, 301, 94, 303, 95, 305, 96, 307, 0, 309, 0, 311, 0, 313, 0, 315, 0, 317, 0, 319, 97, 321, 98, 323, 99, 325, 0, 327, 100, 329, 101, 331, 102, 333, 103, 335, 0, 337, 104, 339, 105, 341, 106, 343, 107, 345, 108, 347, 0, 349, 0, 351, 0, 353, 0, 355, 0, 357, 0, 359, 0, 361, 109, 363, 110, 365, 111, 367, 0, 369, 0, 371, 0, 373, 0, 375, 112, 377, 113, 379, 114, 381, 0, 383, 0, 385, 0, 387, 115, 389, 116, 391, 117, 393, 0, 395, 0, 397, 118, 399, 119, 401, 120, 403, 0, 405, 0, 407, 0, 409, 0, 15, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 35, 2, 0, 68, 68, 100, 100, 2, 0, 73, 73, 105, 105, 2, 0, 83, 83, 115, 115, 2, 0, 69, 69, 101, 101, 2, 0, 67, 67, 99, 99, 2, 0, 84, 84, 116, 116, 2, 0, 82, 82, 114, 114, 2, 0, 79, 79, 111, 111, 2, 0, 80, 80, 112, 112, 2, 0, 78, 78, 110, 110, 2, 0, 72, 72, 104, 104, 2, 0, 86, 86, 118, 118, 2, 0, 65, 65, 97, 97, 2, 0, 76, 76, 108, 108, 2, 0, 88, 88, 120, 120, 2, 0, 70, 70, 102, 102, 2, 0, 77, 77, 109, 109, 2, 0, 71, 71, 103, 103, 2, 0, 75, 75, 107, 107, 2, 0, 87, 87, 119, 119, 2, 0, 85, 85, 117, 117, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 8, 0, 34, 34, 78, 78, 82, 82, 84, 84, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 2, 0, 66, 66, 98, 98, 2, 0, 89, 89, 121, 121, 11, 0, 9, 10, 13, 13, 32, 32, 34, 34, 44, 44, 47, 47, 58, 58, 61, 61, 91, 91, 93, 93, 124, 124, 2, 0, 42, 42, 47, 47, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1499, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0, 0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0, 0, 0, 33, 1, 0, 0, 0, 0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0, 0, 0, 39, 1, 0, 0, 0, 0, 41, 1, 0, 0, 0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0, 0, 0, 0, 47, 1, 0, 0, 0, 0, 49, 1, 0, 0, 0, 0, 51, 1, 0, 0, 0, 0, 53, 1, 0, 0, 0, 0, 55, 1, 0, 0, 0, 0, 57, 1, 0, 0, 0, 0, 59, 1, 0, 0, 0, 1, 61, 1, 0, 0, 0, 1, 83, 1, 0, 0, 0, 1, 85, 1, 0, 0, 0, 1, 87, 1, 0, 0, 0, 1, 89, 1, 0, 0, 0, 1, 91, 1, 0, 0, 0, 1, 93, 1, 0, 0, 0, 1, 95, 1, 0, 0, 0, 1, 97, 1, 0, 0, 0, 1, 99, 1, 0, 0, 0, 1, 101, 1, 0, 0, 0, 1, 103, 1, 0, 0, 0, 1, 105, 1, 0, 0, 0, 1, 107, 1, 0, 0, 0, 1, 109, 1, 0, 0, 0, 1, 111, 1, 0, 0, 0, 1, 113, 1, 0, 0, 0, 1, 115, 1, 0, 0, 0, 1, 117, 1, 0, 0, 0, 1, 119, 1, 0, 0, 0, 1, 121, 1, 0, 0, 0, 1, 123, 1, 0, 0, 0, 1, 125, 1, 0, 0, 0, 1, 127, 1, 0, 0, 0, 1, 129, 1, 0, 0, 0, 1, 131, 1, 0, 0, 0, 1, 133, 1, 0, 0, 0, 1, 135, 1, 0, 0, 0, 1, 137, 1, 0, 0, 0, 1, 139, 1, 0, 0, 0, 1, 141, 1, 0, 0, 0, 1, 143, 1, 0, 0, 0, 1, 145, 1, 0, 0, 0, 1, 147, 1, 0, 0, 0, 1, 149, 1, 0, 0, 0, 1, 151, 1, 0, 0, 0, 1, 153, 1, 0, 0, 0, 1, 155, 1, 0, 0, 0, 1, 157, 1, 0, 0, 0, 1, 159, 1, 0, 0, 0, 1, 161, 1, 0, 0, 0, 1, 163, 1, 0, 0, 0, 1, 165, 1, 0, 0, 0, 1, 167, 1, 0, 0, 0, 1, 169, 1, 0, 0, 0, 1, 173, 1, 0, 0, 0, 1, 175, 1, 0, 0, 0, 1, 177, 1, 0, 0, 0, 1, 179, 1, 0, 0, 0, 2, 181, 1, 0, 0, 0, 2, 183, 1, 0, 0, 0, 2, 185, 1, 0, 0, 0, 2, 187, 1, 0, 0, 0, 2, 189, 1, 0, 0, 0, 3, 191, 1, 0, 0, 0, 3, 193, 1, 0, 0, 0, 3, 195, 1, 0, 0, 0, 3, 197, 1, 0, 0, 0, 3, 199, 1, 0, 0, 0, 3, 201, 1, 0, 0, 0, 3, 203, 1, 0, 0, 0, 3, 207, 1, 0, 0, 0, 3, 209, 1, 0, 0, 0, 3, 211, 1, 0, 0, 0, 3, 213, 1, 0, 0, 0, 3, 215, 1, 0, 0, 0, 3, 217, 1, 0, 0, 0, 4, 219, 1, 0, 0, 0, 4, 221, 1, 0, 0, 0, 4, 223, 1, 0, 0, 0, 4, 225, 1, 0, 0, 0, 4, 227, 1, 0, 0, 0, 4, 233, 1, 0, 0, 0, 4, 235, 1, 0, 0, 0, 4, 237, 1, 0, 0, 0, 4, 239, 1, 0, 0, 0, 5, 241, 1, 0, 0, 0, 5, 243, 1, 0, 0, 0, 5, 245, 1, 0, 0, 0, 5, 247, 1, 0, 0, 0, 5, 249, 1, 0, 0, 0, 5, 251, 1, 0, 0, 0, 5, 253, 1, 0, 0, 0, 5, 255, 1, 0, 0, 0, 5, 257, 1, 0, 0, 0, 5, 259, 1, 0, 0, 0, 5, 261, 1, 0, 0, 0, 6, 263, 1, 0, 0, 0, 6, 265, 1, 0, 0, 0, 6, 267, 1, 0, 0, 0, 6, 269, 1, 0, 0, 0, 6, 273, 1, 0, 0, 0, 6, 275, 1, 0, 0, 0, 6, 277, 1, 0, 0, 0, 6, 279, 1, 0, 0, 0, 6, 281, 1, 0, 0, 0, 7, 283, 1, 0, 0, 0, 7, 285, 1, 0, 0, 0, 7, 287, 1, 0, 0, 0, 7, 289, 1, 0, 0, 0, 7, 291, 1, 0, 0, 0, 7, 293, 1, 0, 0, 0, 7, 295, 1, 0, 0, 0, 7, 297, 1, 0, 0, 0, 7, 299, 1, 0, 0, 0, 7, 301, 1, 0, 0, 0, 7, 303, 1, 0, 0, 0, 7, 305, 1, 0, 0, 0, 8, 307, 1, 0, 0, 0, 8, 309, 1, 0, 0, 0, 8, 311, 1, 0, 0, 0, 8, 313, 1, 0, 0, 0, 8, 315, 1, 0, 0, 0, 8, 317, 1, 0, 0, 0, 8, 319, 1, 0, 0, 0, 8, 321, 1, 0, 0, 0, 8, 323, 1, 0, 0, 0, 9, 325, 1, 0, 0, 0, 9, 327, 1, 0, 0, 0, 9, 329, 1, 0, 0, 0, 9, 331, 1, 0, 0, 0, 9, 333, 1, 0, 0, 0, 10, 335, 1, 0, 0, 0, 10, 337, 1, 0, 0, 0, 10, 339, 1, 0, 0, 0, 10, 341, 1, 0, 0, 0, 10, 343, 1, 0, 0, 0, 10, 345, 1, 0, 0, 0, 11, 347, 1, 0, 0, 0, 11, 349, 1, 0, 0, 0, 11, 351, 1, 0, 0, 0, 11, 353, 1, 0, 0, 0, 11, 355, 1, 0, 0, 0, 11, 357, 1, 0, 0, 0, 11, 359, 1, 0, 0, 0, 11, 361, 1, 0, 0, 0, 11, 363, 1, 0, 0, 0, 11, 365, 1, 0, 0, 0, 12, 367, 1, 0, 0, 0, 12, 369, 1, 0, 0, 0, 12, 371, 1, 0, 0, 0, 12, 373, 1, 0, 0, 0, 12, 375, 1, 0, 0, 0, 12, 377, 1, 0, 0, 0, 12, 379, 1, 0, 0, 0, 13, 381, 1, 0, 0, 0, 13, 383, 1, 0, 0, 0, 13, 385, 1, 0, 0, 0, 13, 387, 1, 0, 0, 0, 13, 389, 1, 0, 0, 0, 13, 391, 1, 0, 0, 0, 14, 393, 1, 0, 0, 0, 14, 395, 1, 0, 0, 0, 14, 397, 1, 0, 0, 0, 14, 399, 1, 0, 0, 0, 14, 401, 1, 0, 0, 0, 14, 403, 1, 0, 0, 0, 14, 405, 1, 0, 0, 0, 14, 407, 1, 0, 0, 0, 14, 409, 1, 0, 0, 0, 15, 411, 1, 0, 0, 0, 17, 421, 1, 0, 0, 0, 19, 428, 1, 0, 0, 0, 21, 437, 1, 0, 0, 0, 23, 444, 1, 0, 0, 0, 25, 454, 1, 0, 0, 0, 27, 461, 1, 0, 0, 0, 29, 468, 1, 0, 0, 0, 31, 475, 1, 0, 0, 0, 33, 483, 1, 0, 0, 0, 35, 495, 1, 0, 0, 0, 37, 504, 1, 0, 0, 0, 39, 510, 1, 0, 0, 0, 41, 517, 1, 0, 0, 0, 43, 524, 1, 0, 0, 0, 45, 532, 1, 0, 0, 0, 47, 540, 1, 0, 0, 0, 49, 555, 1, 0, 0, 0, 51, 565, 1, 0, 0, 0, 53, 577, 1, 0, 0, 0, 55, 583, 1, 0, 0, 0, 57, 600, 1, 0, 0, 0, 59, 616, 1, 0, 0, 0, 61, 622, 1, 0, 0, 0, 63, 626, 1, 0, 0, 0, 65, 628, 1, 0, 0, 0, 67, 630, 1, 0, 0, 0, 69, 633, 1, 0, 0, 0, 71, 635, 1, 0, 0, 0, 73, 644, 1, 0, 0, 0, 75, 646, 1, 0, 0, 0, 77, 651, 1, 0, 0, 0, 79, 653, 1, 0, 0, 0, 81, 658, 1, 0, 0, 0, 83, 689, 1, 0, 0, 0, 85, 692, 1, 0, 0, 0, 87, 738, 1, 0, 0, 0, 89, 740, 1, 0, 0, 0, 91, 743, 1, 0, 0, 0, 93, 747, 1, 0, 0, 0, 95, 751, 1, 0, 0, 0, 97, 753, 1, 0, 0, 0, 99, 756, 1, 0, 0, 0, 101, 758, 1, 0, 0, 0, 103, 763, 1, 0, 0, 0, 105, 765, 1, 0, 0, 0, 107, 771, 1, 0, 0, 0, 109, 777, 1, 0, 0, 0, 111, 780, 1, 0, 0, 0, 113, 783, 1, 0, 0, 0, 115, 788, 1, 0, 0, 0, 117, 793, 1, 0, 0, 0, 119, 795, 1, 0, 0, 0, 121, 799, 1, 0, 0, 0, 123, 804, 1, 0, 0, 0, 125, 810, 1, 0, 0, 0, 127, 813, 1, 0, 0, 0, 129, 815, 1, 0, 0, 0, 131, 821, 1, 0, 0, 0, 133, 823, 1, 0, 0, 0, 135, 828, 1, 0, 0, 0, 137, 831, 1, 0, 0, 0, 139, 834, 1, 0, 0, 0, 141, 837, 1, 0, 0, 0, 143, 839, 1, 0, 0, 0, 145, 842, 1, 0, 0, 0, 147, 844, 1, 0, 0, 0, 149, 847, 1, 0, 0, 0, 151, 849, 1, 0, 0, 0, 153, 851, 1, 0, 0, 0, 155, 853, 1, 0, 0, 0, 157, 855, 1, 0, 0, 0, 159, 857, 1, 0, 0, 0, 161, 863, 1, 0, 0, 0, 163, 884, 1, 0, 0, 0, 165, 886, 1, 0, 0, 0, 167, 891, 1, 0, 0, 0, 169, 912, 1, 0, 0, 0, 171, 914, 1, 0, 0, 0, 173, 922, 1, 0, 0, 0, 175, 924, 1, 0, 0, 0, 177, 928, 1, 0, 0, 0, 179, 932, 1, 0, 0, 0, 181, 936, 1, 0, 0, 0, 183, 941, 1, 0, 0, 0, 185, 946, 1, 0, 0, 0, 187, 950, 1, 0, 0, 0, 189, 954, 1, 0, 0, 0, 191, 958, 1, 0, 0, 0, 193, 963, 1, 0, 0, 0, 195, 967, 1, 0, 0, 0, 197, 971, 1, 0, 0, 0, 199, 975, 1, 0, 0, 0, 201, 979, 1, 0, 0, 0, 203, 983, 1, 0, 0, 0, 205, 995, 1, 0, 0, 0, 207, 998, 1, 0, 0, 0, 209, 1002, 1, 0, 0, 0, 211, 1006, 1, 0, 0, 0, 213, 1010, 1, 0, 0, 0, 215, 1014, 1, 0, 0, 0, 217, 1018, 1, 0, 0, 0, 219, 1022, 1, 0, 0, 0, 221, 1027, 1, 0, 0, 0, 223, 1031, 1, 0, 0, 0, 225, 1035, 1, 0, 0, 0, 227, 1039, 1, 0, 0, 0, 229, 1047, 1, 0, 0, 0, 231, 1068, 1, 0, 0, 0, 233, 1072, 1, 0, 0, 0, 235, 1076, 1, 0, 0, 0, 237, 1080, 1, 0, 0, 0, 239, 1084, 1, 0, 0, 0, 241, 1088, 1, 0, 0, 0, 243, 1093, 1, 0, 0, 0, 245, 1097, 1, 0, 0, 0, 247, 1101, 1, 0, 0, 0, 249, 1105, 1, 0, 0, 0, 251, 1109, 1, 0, 0, 0, 253, 1113, 1, 0, 0, 0, 255, 1116, 1, 0, 0, 0, 257, 1120, 1, 0, 0, 0, 259, 1124, 1, 0, 0, 0, 261, 1128, 1, 0, 0, 0, 263, 1132, 1, 0, 0, 0, 265, 1137, 1, 0, 0, 0, 267, 1142, 1, 0, 0, 0, 269, 1147, 1, 0, 0, 0, 271, 1154, 1, 0, 0, 0, 273, 1163, 1, 0, 0, 0, 275, 1170, 1, 0, 0, 0, 277, 1174, 1, 0, 0, 0, 279, 1178, 1, 0, 0, 0, 281, 1182, 1, 0, 0, 0, 283, 1186, 1, 0, 0, 0, 285, 1192, 1, 0, 0, 0, 287, 1196, 1, 0, 0, 0, 289, 1200, 1, 0, 0, 0, 291, 1204, 1, 0, 0, 0, 293, 1208, 1, 0, 0, 0, 295, 1212, 1, 0, 0, 0, 297, 1216, 1, 0, 0, 0, 299, 1220, 1, 0, 0, 0, 301, 1224, 1, 0, 0, 0, 303, 1228, 1, 0, 0, 0, 305, 1232, 1, 0, 0, 0, 307, 1236, 1, 0, 0, 0, 309, 1241, 1, 0, 0, 0, 311, 1245, 1, 0, 0, 0, 313, 1249, 1, 0, 0, 0, 315, 1253, 1, 0, 0, 0, 317, 1257, 1, 0, 0, 0, 319, 1261, 1, 0, 0, 0, 321, 1265, 1, 0, 0, 0, 323, 1269, 1, 0, 0, 0, 325, 1273, 1, 0, 0, 0, 327, 1278, 1, 0, 0, 0, 329, 1283, 1, 0, 0, 0, 331, 1287, 1, 0, 0, 0, 333, 1291, 1, 0, 0, 0, 335, 1295, 1, 0, 0, 0, 337, 1300, 1, 0, 0, 0, 339, 1307, 1, 0, 0, 0, 341, 1311, 1, 0, 0, 0, 343, 1315, 1, 0, 0, 0, 345, 1319, 1, 0, 0, 0, 347, 1323, 1, 0, 0, 0, 349, 1328, 1, 0, 0, 0, 351, 1332, 1, 0, 0, 0, 353, 1336, 1, 0, 0, 0, 355, 1340, 1, 0, 0, 0, 357, 1345, 1, 0, 0, 0, 359, 1349, 1, 0, 0, 0, 361, 1353, 1, 0, 0, 0, 363, 1357, 1, 0, 0, 0, 365, 1361, 1, 0, 0, 0, 367, 1365, 1, 0, 0, 0, 369, 1371, 1, 0, 0, 0, 371, 1375, 1, 0, 0, 0, 373, 1379, 1, 0, 0, 0, 375, 1383, 1, 0, 0, 0, 377, 1387, 1, 0, 0, 0, 379, 1391, 1, 0, 0, 0, 381, 1395, 1, 0, 0, 0, 383, 1400, 1, 0, 0, 0, 385, 1406, 1, 0, 0, 0, 387, 1412, 1, 0, 0, 0, 389, 1416, 1, 0, 0, 0, 391, 1420, 1, 0, 0, 0, 393, 1424, 1, 0, 0, 0, 395, 1430, 1, 0, 0, 0, 397, 1436, 1, 0, 0, 0, 399, 1440, 1, 0, 0, 0, 401, 1444, 1, 0, 0, 0, 403, 1448, 1, 0, 0, 0, 405, 1454, 1, 0, 0, 0, 407, 1460, 1, 0, 0, 0, 409, 1466, 1, 0, 0, 0, 411, 412, 7, 0, 0, 0, 412, 413, 7, 1, 0, 0, 413, 414, 7, 2, 0, 0, 414, 415, 7, 2, 0, 0, 415, 416, 7, 3, 0, 0, 416, 417, 7, 4, 0, 0, 417, 418, 7, 5, 0, 0, 418, 419, 1, 0, 0, 0, 419, 420, 6, 0, 0, 0, 420, 16, 1, 0, 0, 0, 421, 422, 7, 0, 0, 0, 422, 423, 7, 6, 0, 0, 423, 424, 7, 7, 0, 0, 424, 425, 7, 8, 0, 0, 425, 426, 1, 0, 0, 0, 426, 427, 6, 1, 1, 0, 427, 18, 1, 0, 0, 0, 428, 429, 7, 3, 0, 0, 429, 430, 7, 9, 0, 0, 430, 431, 7, 6, 0, 0, 431, 432, 7, 1, 0, 0, 432, 433, 7, 4, 0, 0, 433, 434, 7, 10, 0, 0, 434, 435, 1, 0, 0, 0, 435, 436, 6, 2, 2, 0, 436, 20, 1, 0, 0, 0, 437, 438, 7, 3, 0, 0, 438, 439, 7, 11, 0, 0, 439, 440, 7, 12, 0, 0, 440, 441, 7, 13, 0, 0, 441, 442, 1, 0, 0, 0, 442, 443, 6, 3, 0, 0, 443, 22, 1, 0, 0, 0, 444, 445, 7, 3, 0, 0, 445, 446, 7, 14, 0, 0, 446, 447, 7, 8, 0, 0, 447, 448, 7, 13, 0, 0, 448, 449, 7, 12, 0, 0, 449, 450, 7, 1, 0, 0, 450, 451, 7, 9, 0, 0, 451, 452, 1, 0, 0, 0, 452, 453, 6, 4, 3, 0, 453, 24, 1, 0, 0, 0, 454, 455, 7, 15, 0, 0, 455, 456, 7, 6, 0, 0, 456, 457, 7, 7, 0, 0, 457, 458, 7, 16, 0, 0, 458, 459, 1, 0, 0, 0, 459, 460, 6, 5, 4, 0, 460, 26, 1, 0, 0, 0, 461, 462, 7, 17, 0, 0, 462, 463, 7, 6, 0, 0, 463, 464, 7, 7, 0, 0, 464, 465, 7, 18, 0, 0, 465, 466, 1, 0, 0, 0, 466, 467, 6, 6, 0, 0, 467, 28, 1, 0, 0, 0, 468, 469, 7, 18, 0, 0, 469, 470, 7, 3, 0, 0, 470, 471, 7, 3, 0, 0, 471, 472, 7, 8, 0, 0, 472, 473, 1, 0, 0, 0, 473, 474, 6, 7, 1, 0, 474, 30, 1, 0, 0, 0, 475, 476, 7, 13, 0, 0, 476, 477, 7, 1, 0, 0, 477, 478, 7, 16, 0, 0, 478, 479, 7, 1, 0, 0, 479, 480, 7, 5, 0, 0, 480, 481, 1, 0, 0, 0, 481, 482, 6, 8, 0, 0, 482, 32, 1, 0, 0, 0, 483, 484, 7, 16, 0, 0, 484, 485, 7, 11, 0, 0, 485, 486, 5, 95, 0, 0, 486, 487, 7, 3, 0, 0, 487, 488, 7, 14, 0, 0, 488, 489, 7, 8, 0, 0, 489, 490, 7, 12, 0, 0, 490, 491, 7, 9, 0, 0, 491, 492, 7, 0, 0, 0, 492, 493, 1, 0, 0, 0, 493, 494, 6, 9, 5, 0, 494, 34, 1, 0, 0, 0, 495, 496, 7, 6, 0, 0, 496, 497, 7, 3, 0, 0, 497, 498, 7, 9, 0, 0, 498, 499, 7, 12, 0, 0, 499, 500, 7, 16, 0, 0, 500, 501, 7, 3, 0, 0, 501, 502, 1, 0, 0, 0, 502, 503, 6, 10, 6, 0, 503, 36, 1, 0, 0, 0, 504, 505, 7, 6, 0, 0, 505, 506, 7, 7, 0, 0, 506, 507, 7, 19, 0, 0, 507, 508, 1, 0, 0, 0, 508, 509, 6, 11, 0, 0, 509, 38, 1, 0, 0, 0, 510, 511, 7, 2, 0, 0, 511, 512, 7, 10, 0, 0, 512, 513, 7, 7, 0, 0, 513, 514, 7, 19, 0, 0, 514, 515, 1, 0, 0, 0, 515, 516, 6, 12, 7, 0, 516, 40, 1, 0, 0, 0, 517, 518, 7, 2, 0, 0, 518, 519, 7, 7, 0, 0, 519, 520, 7, 6, 0, 0, 520, 521, 7, 5, 0, 0, 521, 522, 1, 0, 0, 0, 522, 523, 6, 13, 0, 0, 523, 42, 1, 0, 0, 0, 524, 525, 7, 2, 0, 0, 525, 526, 7, 5, 0, 0, 526, 527, 7, 12, 0, 0, 527, 528, 7, 5, 0, 0, 528, 529, 7, 2, 0, 0, 529, 530, 1, 0, 0, 0, 530, 531, 6, 14, 0, 0, 531, 44, 1, 0, 0, 0, 532, 533, 7, 19, 0, 0, 533, 534, 7, 10, 0, 0, 534, 535, 7, 3, 0, 0, 535, 536, 7, 6, 0, 0, 536, 537, 7, 3, 0, 0, 537, 538, 1, 0, 0, 0, 538, 539, 6, 15, 0, 0, 539, 46, 1, 0, 0, 0, 540, 541, 4, 16, 0, 0, 541, 542, 7, 1, 0, 0, 542, 543, 7, 9, 0, 0, 543, 544, 7, 13, 0, 0, 544, 545, 7, 1, 0, 0, 545, 546, 7, 9, 0, 0, 546, 547, 7, 3, 0, 0, 547, 548, 7, 2, 0, 0, 548, 549, 7, 5, 0, 0, 549, 550, 7, 12, 0, 0, 550, 551, 7, 5, 0, 0, 551, 552, 7, 2, 0, 0, 552, 553, 1, 0, 0, 0, 553, 554, 6, 16, 0, 0, 554, 48, 1, 0, 0, 0, 555, 556, 4, 17, 1, 0, 556, 557, 7, 13, 0, 0, 557, 558, 7, 7, 0, 0, 558, 559, 7, 7, 0, 0, 559, 560, 7, 18, 0, 0, 560, 561, 7, 20, 0, 0, 561, 562, 7, 8, 0, 0, 562, 563, 1, 0, 0, 0, 563, 564, 6, 17, 8, 0, 564, 50, 1, 0, 0, 0, 565, 566, 4, 18, 2, 0, 566, 567, 7, 16, 0, 0, 567, 568, 7, 3, 0, 0, 568, 569, 7, 5, 0, 0, 569, 570, 7, 6, 0, 0, 570, 571, 7, 1, 0, 0, 571, 572, 7, 4, 0, 0, 572, 573, 7, 2, 0, 0, 573, 574, 1, 0, 0, 0, 574, 575, 6, 18, 9, 0, 575, 52, 1, 0, 0, 0, 576, 578, 8, 21, 0, 0, 577, 576, 1, 0, 0, 0, 578, 579, 1, 0, 0, 0, 579, 577, 1, 0, 0, 0, 579, 580, 1, 0, 0, 0, 580, 581, 1, 0, 0, 0, 581, 582, 6, 19, 0, 0, 582, 54, 1, 0, 0, 0, 583, 584, 5, 47, 0, 0, 584, 585, 5, 47, 0, 0, 585, 589, 1, 0, 0, 0, 586, 588, 8, 22, 0, 0, 587, 586, 1, 0, 0, 0, 588, 591, 1, 0, 0, 0, 589, 587, 1, 0, 0, 0, 589, 590, 1, 0, 0, 0, 590, 593, 1, 0, 0, 0, 591, 589, 1, 0, 0, 0, 592, 594, 5, 13, 0, 0, 593, 592, 1, 0, 0, 0, 593, 594, 1, 0, 0, 0, 594, 596, 1, 0, 0, 0, 595, 597, 5, 10, 0, 0, 596, 595, 1, 0, 0, 0, 596, 597, 1, 0, 0, 0, 597, 598, 1, 0, 0, 0, 598, 599, 6, 20, 10, 0, 599, 56, 1, 0, 0, 0, 600, 601, 5, 47, 0, 0, 601, 602, 5, 42, 0, 0, 602, 607, 1, 0, 0, 0, 603, 606, 3, 57, 21, 0, 604, 606, 9, 0, 0, 0, 605, 603, 1, 0, 0, 0, 605, 604, 1, 0, 0, 0, 606, 609, 1, 0, 0, 0, 607, 608, 1, 0, 0, 0, 607, 605, 1, 0, 0, 0, 608, 610, 1, 0, 0, 0, 609, 607, 1, 0, 0, 0, 610, 611, 5, 42, 0, 0, 611, 612, 5, 47, 0, 0, 612, 613, 1, 0, 0, 0, 613, 614, 6, 21, 10, 0, 614, 58, 1, 0, 0, 0, 615, 617, 7, 23, 0, 0, 616, 615, 1, 0, 0, 0, 617, 618, 1, 0, 0, 0, 618, 616, 1, 0, 0, 0, 618, 619, 1, 0, 0, 0, 619, 620, 1, 0, 0, 0, 620, 621, 6, 22, 10, 0, 621, 60, 1, 0, 0, 0, 622, 623, 5, 124, 0, 0, 623, 624, 1, 0, 0, 0, 624, 625, 6, 23, 11, 0, 625, 62, 1, 0, 0, 0, 626, 627, 7, 24, 0, 0, 627, 64, 1, 0, 0, 0, 628, 629, 7, 25, 0, 0, 629, 66, 1, 0, 0, 0, 630, 631, 5, 92, 0, 0, 631, 632, 7, 26, 0, 0, 632, 68, 1, 0, 0, 0, 633, 634, 8, 27, 0, 0, 634, 70, 1, 0, 0, 0, 635, 637, 7, 3, 0, 0, 636, 638, 7, 28, 0, 0, 637, 636, 1, 0, 0, 0, 637, 638, 1, 0, 0, 0, 638, 640, 1, 0, 0, 0, 639, 641, 3, 63, 24, 0, 640, 639, 1, 0, 0, 0, 641, 642, 1, 0, 0, 0, 642, 640, 1, 0, 0, 0, 642, 643, 1, 0, 0, 0, 643, 72, 1, 0, 0, 0, 644, 645, 5, 64, 0, 0, 645, 74, 1, 0, 0, 0, 646, 647, 5, 96, 0, 0, 647, 76, 1, 0, 0, 0, 648, 652, 8, 29, 0, 0, 649, 650, 5, 96, 0, 0, 650, 652, 5, 96, 0, 0, 651, 648, 1, 0, 0, 0, 651, 649, 1, 0, 0, 0, 652, 78, 1, 0, 0, 0, 653, 654, 5, 95, 0, 0, 654, 80, 1, 0, 0, 0, 655, 659, 3, 65, 25, 0, 656, 659, 3, 63, 24, 0, 657, 659, 3, 79, 32, 0, 658, 655, 1, 0, 0, 0, 658, 656, 1, 0, 0, 0, 658, 657, 1, 0, 0, 0, 659, 82, 1, 0, 0, 0, 660, 665, 5, 34, 0, 0, 661, 664, 3, 67, 26, 0, 662, 664, 3, 69, 27, 0, 663, 661, 1, 0, 0, 0, 663, 662, 1, 0, 0, 0, 664, 667, 1, 0, 0, 0, 665, 663, 1, 0, 0, 0, 665, 666, 1, 0, 0, 0, 666, 668, 1, 0, 0, 0, 667, 665, 1, 0, 0, 0, 668, 690, 5, 34, 0, 0, 669, 670, 5, 34, 0, 0, 670, 671, 5, 34, 0, 0, 671, 672, 5, 34, 0, 0, 672, 676, 1, 0, 0, 0, 673, 675, 8, 22, 0, 0, 674, 673, 1, 0, 0, 0, 675, 678, 1, 0, 0, 0, 676, 677, 1, 0, 0, 0, 676, 674, 1, 0, 0, 0, 677, 679, 1, 0, 0, 0, 678, 676, 1, 0, 0, 0, 679, 680, 5, 34, 0, 0, 680, 681, 5, 34, 0, 0, 681, 682, 5, 34, 0, 0, 682, 684, 1, 0, 0, 0, 683, 685, 5, 34, 0, 0, 684, 683, 1, 0, 0, 0, 684, 685, 1, 0, 0, 0, 685, 687, 1, 0, 0, 0, 686, 688, 5, 34, 0, 0, 687, 686, 1, 0, 0, 0, 687, 688, 1, 0, 0, 0, 688, 690, 1, 0, 0, 0, 689, 660, 1, 0, 0, 0, 689, 669, 1, 0, 0, 0, 690, 84, 1, 0, 0, 0, 691, 693, 3, 63, 24, 0, 692, 691, 1, 0, 0, 0, 693, 694, 1, 0, 0, 0, 694, 692, 1, 0, 0, 0, 694, 695, 1, 0, 0, 0, 695, 86, 1, 0, 0, 0, 696, 698, 3, 63, 24, 0, 697, 696, 1, 0, 0, 0, 698, 699, 1, 0, 0, 0, 699, 697, 1, 0, 0, 0, 699, 700, 1, 0, 0, 0, 700, 701, 1, 0, 0, 0, 701, 705, 3, 103, 44, 0, 702, 704, 3, 63, 24, 0, 703, 702, 1, 0, 0, 0, 704, 707, 1, 0, 0, 0, 705, 703, 1, 0, 0, 0, 705, 706, 1, 0, 0, 0, 706, 739, 1, 0, 0, 0, 707, 705, 1, 0, 0, 0, 708, 710, 3, 103, 44, 0, 709, 711, 3, 63, 24, 0, 710, 709, 1, 0, 0, 0, 711, 712, 1, 0, 0, 0, 712, 710, 1, 0, 0, 0, 712, 713, 1, 0, 0, 0, 713, 739, 1, 0, 0, 0, 714, 716, 3, 63, 24, 0, 715, 714, 1, 0, 0, 0, 716, 717, 1, 0, 0, 0, 717, 715, 1, 0, 0, 0, 717, 718, 1, 0, 0, 0, 718, 726, 1, 0, 0, 0, 719, 723, 3, 103, 44, 0, 720, 722, 3, 63, 24, 0, 721, 720, 1, 0, 0, 0, 722, 725, 1, 0, 0, 0, 723, 721, 1, 0, 0, 0, 723, 724, 1, 0, 0, 0, 724, 727, 1, 0, 0, 0, 725, 723, 1, 0, 0, 0, 726, 719, 1, 0, 0, 0, 726, 727, 1, 0, 0, 0, 727, 728, 1, 0, 0, 0, 728, 729, 3, 71, 28, 0, 729, 739, 1, 0, 0, 0, 730, 732, 3, 103, 44, 0, 731, 733, 3, 63, 24, 0, 732, 731, 1, 0, 0, 0, 733, 734, 1, 0, 0, 0, 734, 732, 1, 0, 0, 0, 734, 735, 1, 0, 0, 0, 735, 736, 1, 0, 0, 0, 736, 737, 3, 71, 28, 0, 737, 739, 1, 0, 0, 0, 738, 697, 1, 0, 0, 0, 738, 708, 1, 0, 0, 0, 738, 715, 1, 0, 0, 0, 738, 730, 1, 0, 0, 0, 739, 88, 1, 0, 0, 0, 740, 741, 7, 30, 0, 0, 741, 742, 7, 31, 0, 0, 742, 90, 1, 0, 0, 0, 743, 744, 7, 12, 0, 0, 744, 745, 7, 9, 0, 0, 745, 746, 7, 0, 0, 0, 746, 92, 1, 0, 0, 0, 747, 748, 7, 12, 0, 0, 748, 749, 7, 2, 0, 0, 749, 750, 7, 4, 0, 0, 750, 94, 1, 0, 0, 0, 751, 752, 5, 61, 0, 0, 752, 96, 1, 0, 0, 0, 753, 754, 5, 58, 0, 0, 754, 755, 5, 58, 0, 0, 755, 98, 1, 0, 0, 0, 756, 757, 5, 44, 0, 0, 757, 100, 1, 0, 0, 0, 758, 759, 7, 0, 0, 0, 759, 760, 7, 3, 0, 0, 760, 761, 7, 2, 0, 0, 761, 762, 7, 4, 0, 0, 762, 102, 1, 0, 0, 0, 763, 764, 5, 46, 0, 0, 764, 104, 1, 0, 0, 0, 765, 766, 7, 15, 0, 0, 766, 767, 7, 12, 0, 0, 767, 768, 7, 13, 0, 0, 768, 769, 7, 2, 0, 0, 769, 770, 7, 3, 0, 0, 770, 106, 1, 0, 0, 0, 771, 772, 7, 15, 0, 0, 772, 773, 7, 1, 0, 0, 773, 774, 7, 6, 0, 0, 774, 775, 7, 2, 0, 0, 775, 776, 7, 5, 0, 0, 776, 108, 1, 0, 0, 0, 777, 778, 7, 1, 0, 0, 778, 779, 7, 9, 0, 0, 779, 110, 1, 0, 0, 0, 780, 781, 7, 1, 0, 0, 781, 782, 7, 2, 0, 0, 782, 112, 1, 0, 0, 0, 783, 784, 7, 13, 0, 0, 784, 785, 7, 12, 0, 0, 785, 786, 7, 2, 0, 0, 786, 787, 7, 5, 0, 0, 787, 114, 1, 0, 0, 0, 788, 789, 7, 13, 0, 0, 789, 790, 7, 1, 0, 0, 790, 791, 7, 18, 0, 0, 791, 792, 7, 3, 0, 0, 792, 116, 1, 0, 0, 0, 793, 794, 5, 40, 0, 0, 794, 118, 1, 0, 0, 0, 795, 796, 7, 9, 0, 0, 796, 797, 7, 7, 0, 0, 797, 798, 7, 5, 0, 0, 798, 120, 1, 0, 0, 0, 799, 800, 7, 9, 0, 0, 800, 801, 7, 20, 0, 0, 801, 802, 7, 13, 0, 0, 802, 803, 7, 13, 0, 0, 803, 122, 1, 0, 0, 0, 804, 805, 7, 9, 0, 0, 805, 806, 7, 20, 0, 0, 806, 807, 7, 13, 0, 0, 807, 808, 7, 13, 0, 0, 808, 809, 7, 2, 0, 0, 809, 124, 1, 0, 0, 0, 810, 811, 7, 7, 0, 0, 811, 812, 7, 6, 0, 0, 812, 126, 1, 0, 0, 0, 813, 814, 5, 63, 0, 0, 814, 128, 1, 0, 0, 0, 815, 816, 7, 6, 0, 0, 816, 817, 7, 13, 0, 0, 817, 818, 7, 1, 0, 0, 818, 819, 7, 18, 0, 0, 819, 820, 7, 3, 0, 0, 820, 130, 1, 0, 0, 0, 821, 822, 5, 41, 0, 0, 822, 132, 1, 0, 0, 0, 823, 824, 7, 5, 0, 0, 824, 825, 7, 6, 0, 0, 825, 826, 7, 20, 0, 0, 826, 827, 7, 3, 0, 0, 827, 134, 1, 0, 0, 0, 828, 829, 5, 61, 0, 0, 829, 830, 5, 61, 0, 0, 830, 136, 1, 0, 0, 0, 831, 832, 5, 61, 0, 0, 832, 833, 5, 126, 0, 0, 833, 138, 1, 0, 0, 0, 834, 835, 5, 33, 0, 0, 835, 836, 5, 61, 0, 0, 836, 140, 1, 0, 0, 0, 837, 838, 5, 60, 0, 0, 838, 142, 1, 0, 0, 0, 839, 840, 5, 60, 0, 0, 840, 841, 5, 61, 0, 0, 841, 144, 1, 0, 0, 0, 842, 843, 5, 62, 0, 0, 843, 146, 1, 0, 0, 0, 844, 845, 5, 62, 0, 0, 845, 846, 5, 61, 0, 0, 846, 148, 1, 0, 0, 0, 847, 848, 5, 43, 0, 0, 848, 150, 1, 0, 0, 0, 849, 850, 5, 45, 0, 0, 850, 152, 1, 0, 0, 0, 851, 852, 5, 42, 0, 0, 852, 154, 1, 0, 0, 0, 853, 854, 5, 47, 0, 0, 854, 156, 1, 0, 0, 0, 855, 856, 5, 37, 0, 0, 856, 158, 1, 0, 0, 0, 857, 858, 7, 16, 0, 0, 858, 859, 7, 12, 0, 0, 859, 860, 7, 5, 0, 0, 860, 861, 7, 4, 0, 0, 861, 862, 7, 10, 0, 0, 862, 160, 1, 0, 0, 0, 863, 864, 3, 45, 15, 0, 864, 865, 1, 0, 0, 0, 865, 866, 6, 73, 12, 0, 866, 162, 1, 0, 0, 0, 867, 870, 3, 127, 56, 0, 868, 871, 3, 65, 25, 0, 869, 871, 3, 79, 32, 0, 870, 868, 1, 0, 0, 0, 870, 869, 1, 0, 0, 0, 871, 875, 1, 0, 0, 0, 872, 874, 3, 81, 33, 0, 873, 872, 1, 0, 0, 0, 874, 877, 1, 0, 0, 0, 875, 873, 1, 0, 0, 0, 875, 876, 1, 0, 0, 0, 876, 885, 1, 0, 0, 0, 877, 875, 1, 0, 0, 0, 878, 880, 3, 127, 56, 0, 879, 881, 3, 63, 24, 0, 880, 879, 1, 0, 0, 0, 881, 882, 1, 0, 0, 0, 882, 880, 1, 0, 0, 0, 882, 883, 1, 0, 0, 0, 883, 885, 1, 0, 0, 0, 884, 867, 1, 0, 0, 0, 884, 878, 1, 0, 0, 0, 885, 164, 1, 0, 0, 0, 886, 887, 5, 91, 0, 0, 887, 888, 1, 0, 0, 0, 888, 889, 6, 75, 0, 0, 889, 890, 6, 75, 0, 0, 890, 166, 1, 0, 0, 0, 891, 892, 5, 93, 0, 0, 892, 893, 1, 0, 0, 0, 893, 894, 6, 76, 11, 0, 894, 895, 6, 76, 11, 0, 895, 168, 1, 0, 0, 0, 896, 900, 3, 65, 25, 0, 897, 899, 3, 81, 33, 0, 898, 897, 1, 0, 0, 0, 899, 902, 1, 0, 0, 0, 900, 898, 1, 0, 0, 0, 900, 901, 1, 0, 0, 0, 901, 913, 1, 0, 0, 0, 902, 900, 1, 0, 0, 0, 903, 906, 3, 79, 32, 0, 904, 906, 3, 73, 29, 0, 905, 903, 1, 0, 0, 0, 905, 904, 1, 0, 0, 0, 906, 908, 1, 0, 0, 0, 907, 909, 3, 81, 33, 0, 908, 907, 1, 0, 0, 0, 909, 910, 1, 0, 0, 0, 910, 908, 1, 0, 0, 0, 910, 911, 1, 0, 0, 0, 911, 913, 1, 0, 0, 0, 912, 896, 1, 0, 0, 0, 912, 905, 1, 0, 0, 0, 913, 170, 1, 0, 0, 0, 914, 916, 3, 75, 30, 0, 915, 917, 3, 77, 31, 0, 916, 915, 1, 0, 0, 0, 917, 918, 1, 0, 0, 0, 918, 916, 1, 0, 0, 0, 918, 919, 1, 0, 0, 0, 919, 920, 1, 0, 0, 0, 920, 921, 3, 75, 30, 0, 921, 172, 1, 0, 0, 0, 922, 923, 3, 171, 78, 0, 923, 174, 1, 0, 0, 0, 924, 925, 3, 55, 20, 0, 925, 926, 1, 0, 0, 0, 926, 927, 6, 80, 10, 0, 927, 176, 1, 0, 0, 0, 928, 929, 3, 57, 21, 0, 929, 930, 1, 0, 0, 0, 930, 931, 6, 81, 10, 0, 931, 178, 1, 0, 0, 0, 932, 933, 3, 59, 22, 0, 933, 934, 1, 0, 0, 0, 934, 935, 6, 82, 10, 0, 935, 180, 1, 0, 0, 0, 936, 937, 3, 165, 75, 0, 937, 938, 1, 0, 0, 0, 938, 939, 6, 83, 13, 0, 939, 940, 6, 83, 14, 0, 940, 182, 1, 0, 0, 0, 941, 942, 3, 61, 23, 0, 942, 943, 1, 0, 0, 0, 943, 944, 6, 84, 15, 0, 944, 945, 6, 84, 11, 0, 945, 184, 1, 0, 0, 0, 946, 947, 3, 59, 22, 0, 947, 948, 1, 0, 0, 0, 948, 949, 6, 85, 10, 0, 949, 186, 1, 0, 0, 0, 950, 951, 3, 55, 20, 0, 951, 952, 1, 0, 0, 0, 952, 953, 6, 86, 10, 0, 953, 188, 1, 0, 0, 0, 954, 955, 3, 57, 21, 0, 955, 956, 1, 0, 0, 0, 956, 957, 6, 87, 10, 0, 957, 190, 1, 0, 0, 0, 958, 959, 3, 61, 23, 0, 959, 960, 1, 0, 0, 0, 960, 961, 6, 88, 15, 0, 961, 962, 6, 88, 11, 0, 962, 192, 1, 0, 0, 0, 963, 964, 3, 165, 75, 0, 964, 965, 1, 0, 0, 0, 965, 966, 6, 89, 13, 0, 966, 194, 1, 0, 0, 0, 967, 968, 3, 167, 76, 0, 968, 969, 1, 0, 0, 0, 969, 970, 6, 90, 16, 0, 970, 196, 1, 0, 0, 0, 971, 972, 3, 337, 161, 0, 972, 973, 1, 0, 0, 0, 973, 974, 6, 91, 17, 0, 974, 198, 1, 0, 0, 0, 975, 976, 3, 99, 42, 0, 976, 977, 1, 0, 0, 0, 977, 978, 6, 92, 18, 0, 978, 200, 1, 0, 0, 0, 979, 980, 3, 95, 40, 0, 980, 981, 1, 0, 0, 0, 981, 982, 6, 93, 19, 0, 982, 202, 1, 0, 0, 0, 983, 984, 7, 16, 0, 0, 984, 985, 7, 3, 0, 0, 985, 986, 7, 5, 0, 0, 986, 987, 7, 12, 0, 0, 987, 988, 7, 0, 0, 0, 988, 989, 7, 12, 0, 0, 989, 990, 7, 5, 0, 0, 990, 991, 7, 12, 0, 0, 991, 204, 1, 0, 0, 0, 992, 996, 8, 32, 0, 0, 993, 994, 5, 47, 0, 0, 994, 996, 8, 33, 0, 0, 995, 992, 1, 0, 0, 0, 995, 993, 1, 0, 0, 0, 996, 206, 1, 0, 0, 0, 997, 999, 3, 205, 95, 0, 998, 997, 1, 0, 0, 0, 999, 1000, 1, 0, 0, 0, 1000, 998, 1, 0, 0, 0, 1000, 1001, 1, 0, 0, 0, 1001, 208, 1, 0, 0, 0, 1002, 1003, 3, 207, 96, 0, 1003, 1004, 1, 0, 0, 0, 1004, 1005, 6, 97, 20, 0, 1005, 210, 1, 0, 0, 0, 1006, 1007, 3, 83, 34, 0, 1007, 1008, 1, 0, 0, 0, 1008, 1009, 6, 98, 21, 0, 1009, 212, 1, 0, 0, 0, 1010, 1011, 3, 55, 20, 0, 1011, 1012, 1, 0, 0, 0, 1012, 1013, 6, 99, 10, 0, 1013, 214, 1, 0, 0, 0, 1014, 1015, 3, 57, 21, 0, 1015, 1016, 1, 0, 0, 0, 1016, 1017, 6, 100, 10, 0, 1017, 216, 1, 0, 0, 0, 1018, 1019, 3, 59, 22, 0, 1019, 1020, 1, 0, 0, 0, 1020, 1021, 6, 101, 10, 0, 1021, 218, 1, 0, 0, 0, 1022, 1023, 3, 61, 23, 0, 1023, 1024, 1, 0, 0, 0, 1024, 1025, 6, 102, 15, 0, 1025, 1026, 6, 102, 11, 0, 1026, 220, 1, 0, 0, 0, 1027, 1028, 3, 103, 44, 0, 1028, 1029, 1, 0, 0, 0, 1029, 1030, 6, 103, 22, 0, 1030, 222, 1, 0, 0, 0, 1031, 1032, 3, 99, 42, 0, 1032, 1033, 1, 0, 0, 0, 1033, 1034, 6, 104, 18, 0, 1034, 224, 1, 0, 0, 0, 1035, 1036, 3, 127, 56, 0, 1036, 1037, 1, 0, 0, 0, 1037, 1038, 6, 105, 23, 0, 1038, 226, 1, 0, 0, 0, 1039, 1040, 3, 163, 74, 0, 1040, 1041, 1, 0, 0, 0, 1041, 1042, 6, 106, 24, 0, 1042, 228, 1, 0, 0, 0, 1043, 1048, 3, 65, 25, 0, 1044, 1048, 3, 63, 24, 0, 1045, 1048, 3, 79, 32, 0, 1046, 1048, 3, 153, 69, 0, 1047, 1043, 1, 0, 0, 0, 1047, 1044, 1, 0, 0, 0, 1047, 1045, 1, 0, 0, 0, 1047, 1046, 1, 0, 0, 0, 1048, 230, 1, 0, 0, 0, 1049, 1052, 3, 65, 25, 0, 1050, 1052, 3, 153, 69, 0, 1051, 1049, 1, 0, 0, 0, 1051, 1050, 1, 0, 0, 0, 1052, 1056, 1, 0, 0, 0, 1053, 1055, 3, 229, 107, 0, 1054, 1053, 1, 0, 0, 0, 1055, 1058, 1, 0, 0, 0, 1056, 1054, 1, 0, 0, 0, 1056, 1057, 1, 0, 0, 0, 1057, 1069, 1, 0, 0, 0, 1058, 1056, 1, 0, 0, 0, 1059, 1062, 3, 79, 32, 0, 1060, 1062, 3, 73, 29, 0, 1061, 1059, 1, 0, 0, 0, 1061, 1060, 1, 0, 0, 0, 1062, 1064, 1, 0, 0, 0, 1063, 1065, 3, 229, 107, 0, 1064, 1063, 1, 0, 0, 0, 1065, 1066, 1, 0, 0, 0, 1066, 1064, 1, 0, 0, 0, 1066, 1067, 1, 0, 0, 0, 1067, 1069, 1, 0, 0, 0, 1068, 1051, 1, 0, 0, 0, 1068, 1061, 1, 0, 0, 0, 1069, 232, 1, 0, 0, 0, 1070, 1073, 3, 231, 108, 0, 1071, 1073, 3, 171, 78, 0, 1072, 1070, 1, 0, 0, 0, 1072, 1071, 1, 0, 0, 0, 1073, 1074, 1, 0, 0, 0, 1074, 1072, 1, 0, 0, 0, 1074, 1075, 1, 0, 0, 0, 1075, 234, 1, 0, 0, 0, 1076, 1077, 3, 55, 20, 0, 1077, 1078, 1, 0, 0, 0, 1078, 1079, 6, 110, 10, 0, 1079, 236, 1, 0, 0, 0, 1080, 1081, 3, 57, 21, 0, 1081, 1082, 1, 0, 0, 0, 1082, 1083, 6, 111, 10, 0, 1083, 238, 1, 0, 0, 0, 1084, 1085, 3, 59, 22, 0, 1085, 1086, 1, 0, 0, 0, 1086, 1087, 6, 112, 10, 0, 1087, 240, 1, 0, 0, 0, 1088, 1089, 3, 61, 23, 0, 1089, 1090, 1, 0, 0, 0, 1090, 1091, 6, 113, 15, 0, 1091, 1092, 6, 113, 11, 0, 1092, 242, 1, 0, 0, 0, 1093, 1094, 3, 95, 40, 0, 1094, 1095, 1, 0, 0, 0, 1095, 1096, 6, 114, 19, 0, 1096, 244, 1, 0, 0, 0, 1097, 1098, 3, 99, 42, 0, 1098, 1099, 1, 0, 0, 0, 1099, 1100, 6, 115, 18, 0, 1100, 246, 1, 0, 0, 0, 1101, 1102, 3, 103, 44, 0, 1102, 1103, 1, 0, 0, 0, 1103, 1104, 6, 116, 22, 0, 1104, 248, 1, 0, 0, 0, 1105, 1106, 3, 127, 56, 0, 1106, 1107, 1, 0, 0, 0, 1107, 1108, 6, 117, 23, 0, 1108, 250, 1, 0, 0, 0, 1109, 1110, 3, 163, 74, 0, 1110, 1111, 1, 0, 0, 0, 1111, 1112, 6, 118, 24, 0, 1112, 252, 1, 0, 0, 0, 1113, 1114, 7, 12, 0, 0, 1114, 1115, 7, 2, 0, 0, 1115, 254, 1, 0, 0, 0, 1116, 1117, 3, 233, 109, 0, 1117, 1118, 1, 0, 0, 0, 1118, 1119, 6, 120, 25, 0, 1119, 256, 1, 0, 0, 0, 1120, 1121, 3, 55, 20, 0, 1121, 1122, 1, 0, 0, 0, 1122, 1123, 6, 121, 10, 0, 1123, 258, 1, 0, 0, 0, 1124, 1125, 3, 57, 21, 0, 1125, 1126, 1, 0, 0, 0, 1126, 1127, 6, 122, 10, 0, 1127, 260, 1, 0, 0, 0, 1128, 1129, 3, 59, 22, 0, 1129, 1130, 1, 0, 0, 0, 1130, 1131, 6, 123, 10, 0, 1131, 262, 1, 0, 0, 0, 1132, 1133, 3, 61, 23, 0, 1133, 1134, 1, 0, 0, 0, 1134, 1135, 6, 124, 15, 0, 1135, 1136, 6, 124, 11, 0, 1136, 264, 1, 0, 0, 0, 1137, 1138, 3, 165, 75, 0, 1138, 1139, 1, 0, 0, 0, 1139, 1140, 6, 125, 13, 0, 1140, 1141, 6, 125, 26, 0, 1141, 266, 1, 0, 0, 0, 1142, 1143, 7, 7, 0, 0, 1143, 1144, 7, 9, 0, 0, 1144, 1145, 1, 0, 0, 0, 1145, 1146, 6, 126, 27, 0, 1146, 268, 1, 0, 0, 0, 1147, 1148, 7, 19, 0, 0, 1148, 1149, 7, 1, 0, 0, 1149, 1150, 7, 5, 0, 0, 1150, 1151, 7, 10, 0, 0, 1151, 1152, 1, 0, 0, 0, 1152, 1153, 6, 127, 27, 0, 1153, 270, 1, 0, 0, 0, 1154, 1155, 8, 34, 0, 0, 1155, 272, 1, 0, 0, 0, 1156, 1158, 3, 271, 128, 0, 1157, 1156, 1, 0, 0, 0, 1158, 1159, 1, 0, 0, 0, 1159, 1157, 1, 0, 0, 0, 1159, 1160, 1, 0, 0, 0, 1160, 1161, 1, 0, 0, 0, 1161, 1162, 3, 337, 161, 0, 1162, 1164, 1, 0, 0, 0, 1163, 1157, 1, 0, 0, 0, 1163, 1164, 1, 0, 0, 0, 1164, 1166, 1, 0, 0, 0, 1165, 1167, 3, 271, 128, 0, 1166, 1165, 1, 0, 0, 0, 1167, 1168, 1, 0, 0, 0, 1168, 1166, 1, 0, 0, 0, 1168, 1169, 1, 0, 0, 0, 1169, 274, 1, 0, 0, 0, 1170, 1171, 3, 273, 129, 0, 1171, 1172, 1, 0, 0, 0, 1172, 1173, 6, 130, 28, 0, 1173, 276, 1, 0, 0, 0, 1174, 1175, 3, 55, 20, 0, 1175, 1176, 1, 0, 0, 0, 1176, 1177, 6, 131, 10, 0, 1177, 278, 1, 0, 0, 0, 1178, 1179, 3, 57, 21, 0, 1179, 1180, 1, 0, 0, 0, 1180, 1181, 6, 132, 10, 0, 1181, 280, 1, 0, 0, 0, 1182, 1183, 3, 59, 22, 0, 1183, 1184, 1, 0, 0, 0, 1184, 1185, 6, 133, 10, 0, 1185, 282, 1, 0, 0, 0, 1186, 1187, 3, 61, 23, 0, 1187, 1188, 1, 0, 0, 0, 1188, 1189, 6, 134, 15, 0, 1189, 1190, 6, 134, 11, 0, 1190, 1191, 6, 134, 11, 0, 1191, 284, 1, 0, 0, 0, 1192, 1193, 3, 95, 40, 0, 1193, 1194, 1, 0, 0, 0, 1194, 1195, 6, 135, 19, 0, 1195, 286, 1, 0, 0, 0, 1196, 1197, 3, 99, 42, 0, 1197, 1198, 1, 0, 0, 0, 1198, 1199, 6, 136, 18, 0, 1199, 288, 1, 0, 0, 0, 1200, 1201, 3, 103, 44, 0, 1201, 1202, 1, 0, 0, 0, 1202, 1203, 6, 137, 22, 0, 1203, 290, 1, 0, 0, 0, 1204, 1205, 3, 269, 127, 0, 1205, 1206, 1, 0, 0, 0, 1206, 1207, 6, 138, 29, 0, 1207, 292, 1, 0, 0, 0, 1208, 1209, 3, 233, 109, 0, 1209, 1210, 1, 0, 0, 0, 1210, 1211, 6, 139, 25, 0, 1211, 294, 1, 0, 0, 0, 1212, 1213, 3, 173, 79, 0, 1213, 1214, 1, 0, 0, 0, 1214, 1215, 6, 140, 30, 0, 1215, 296, 1, 0, 0, 0, 1216, 1217, 3, 127, 56, 0, 1217, 1218, 1, 0, 0, 0, 1218, 1219, 6, 141, 23, 0, 1219, 298, 1, 0, 0, 0, 1220, 1221, 3, 163, 74, 0, 1221, 1222, 1, 0, 0, 0, 1222, 1223, 6, 142, 24, 0, 1223, 300, 1, 0, 0, 0, 1224, 1225, 3, 55, 20, 0, 1225, 1226, 1, 0, 0, 0, 1226, 1227, 6, 143, 10, 0, 1227, 302, 1, 0, 0, 0, 1228, 1229, 3, 57, 21, 0, 1229, 1230, 1, 0, 0, 0, 1230, 1231, 6, 144, 10, 0, 1231, 304, 1, 0, 0, 0, 1232, 1233, 3, 59, 22, 0, 1233, 1234, 1, 0, 0, 0, 1234, 1235, 6, 145, 10, 0, 1235, 306, 1, 0, 0, 0, 1236, 1237, 3, 61, 23, 0, 1237, 1238, 1, 0, 0, 0, 1238, 1239, 6, 146, 15, 0, 1239, 1240, 6, 146, 11, 0, 1240, 308, 1, 0, 0, 0, 1241, 1242, 3, 103, 44, 0, 1242, 1243, 1, 0, 0, 0, 1243, 1244, 6, 147, 22, 0, 1244, 310, 1, 0, 0, 0, 1245, 1246, 3, 127, 56, 0, 1246, 1247, 1, 0, 0, 0, 1247, 1248, 6, 148, 23, 0, 1248, 312, 1, 0, 0, 0, 1249, 1250, 3, 163, 74, 0, 1250, 1251, 1, 0, 0, 0, 1251, 1252, 6, 149, 24, 0, 1252, 314, 1, 0, 0, 0, 1253, 1254, 3, 173, 79, 0, 1254, 1255, 1, 0, 0, 0, 1255, 1256, 6, 150, 30, 0, 1256, 316, 1, 0, 0, 0, 1257, 1258, 3, 169, 77, 0, 1258, 1259, 1, 0, 0, 0, 1259, 1260, 6, 151, 31, 0, 1260, 318, 1, 0, 0, 0, 1261, 1262, 3, 55, 20, 0, 1262, 1263, 1, 0, 0, 0, 1263, 1264, 6, 152, 10, 0, 1264, 320, 1, 0, 0, 0, 1265, 1266, 3, 57, 21, 0, 1266, 1267, 1, 0, 0, 0, 1267, 1268, 6, 153, 10, 0, 1268, 322, 1, 0, 0, 0, 1269, 1270, 3, 59, 22, 0, 1270, 1271, 1, 0, 0, 0, 1271, 1272, 6, 154, 10, 0, 1272, 324, 1, 0, 0, 0, 1273, 1274, 3, 61, 23, 0, 1274, 1275, 1, 0, 0, 0, 1275, 1276, 6, 155, 15, 0, 1276, 1277, 6, 155, 11, 0, 1277, 326, 1, 0, 0, 0, 1278, 1279, 7, 1, 0, 0, 1279, 1280, 7, 9, 0, 0, 1280, 1281, 7, 15, 0, 0, 1281, 1282, 7, 7, 0, 0, 1282, 328, 1, 0, 0, 0, 1283, 1284, 3, 55, 20, 0, 1284, 1285, 1, 0, 0, 0, 1285, 1286, 6, 157, 10, 0, 1286, 330, 1, 0, 0, 0, 1287, 1288, 3, 57, 21, 0, 1288, 1289, 1, 0, 0, 0, 1289, 1290, 6, 158, 10, 0, 1290, 332, 1, 0, 0, 0, 1291, 1292, 3, 59, 22, 0, 1292, 1293, 1, 0, 0, 0, 1293, 1294, 6, 159, 10, 0, 1294, 334, 1, 0, 0, 0, 1295, 1296, 3, 167, 76, 0, 1296, 1297, 1, 0, 0, 0, 1297, 1298, 6, 160, 16, 0, 1298, 1299, 6, 160, 11, 0, 1299, 336, 1, 0, 0, 0, 1300, 1301, 5, 58, 0, 0, 1301, 338, 1, 0, 0, 0, 1302, 1308, 3, 73, 29, 0, 1303, 1308, 3, 63, 24, 0, 1304, 1308, 3, 103, 44, 0, 1305, 1308, 3, 65, 25, 0, 1306, 1308, 3, 79, 32, 0, 1307, 1302, 1, 0, 0, 0, 1307, 1303, 1, 0, 0, 0, 1307, 1304, 1, 0, 0, 0, 1307, 1305, 1, 0, 0, 0, 1307, 1306, 1, 0, 0, 0, 1308, 1309, 1, 0, 0, 0, 1309, 1307, 1, 0, 0, 0, 1309, 1310, 1, 0, 0, 0, 1310, 340, 1, 0, 0, 0, 1311, 1312, 3, 55, 20, 0, 1312, 1313, 1, 0, 0, 0, 1313, 1314, 6, 163, 10, 0, 1314, 342, 1, 0, 0, 0, 1315, 1316, 3, 57, 21, 0, 1316, 1317, 1, 0, 0, 0, 1317, 1318, 6, 164, 10, 0, 1318, 344, 1, 0, 0, 0, 1319, 1320, 3, 59, 22, 0, 1320, 1321, 1, 0, 0, 0, 1321, 1322, 6, 165, 10, 0, 1322, 346, 1, 0, 0, 0, 1323, 1324, 3, 61, 23, 0, 1324, 1325, 1, 0, 0, 0, 1325, 1326, 6, 166, 15, 0, 1326, 1327, 6, 166, 11, 0, 1327, 348, 1, 0, 0, 0, 1328, 1329, 3, 337, 161, 0, 1329, 1330, 1, 0, 0, 0, 1330, 1331, 6, 167, 17, 0, 1331, 350, 1, 0, 0, 0, 1332, 1333, 3, 99, 42, 0, 1333, 1334, 1, 0, 0, 0, 1334, 1335, 6, 168, 18, 0, 1335, 352, 1, 0, 0, 0, 1336, 1337, 3, 103, 44, 0, 1337, 1338, 1, 0, 0, 0, 1338, 1339, 6, 169, 22, 0, 1339, 354, 1, 0, 0, 0, 1340, 1341, 3, 267, 126, 0, 1341, 1342, 1, 0, 0, 0, 1342, 1343, 6, 170, 32, 0, 1343, 1344, 6, 170, 33, 0, 1344, 356, 1, 0, 0, 0, 1345, 1346, 3, 207, 96, 0, 1346, 1347, 1, 0, 0, 0, 1347, 1348, 6, 171, 20, 0, 1348, 358, 1, 0, 0, 0, 1349, 1350, 3, 83, 34, 0, 1350, 1351, 1, 0, 0, 0, 1351, 1352, 6, 172, 21, 0, 1352, 360, 1, 0, 0, 0, 1353, 1354, 3, 55, 20, 0, 1354, 1355, 1, 0, 0, 0, 1355, 1356, 6, 173, 10, 0, 1356, 362, 1, 0, 0, 0, 1357, 1358, 3, 57, 21, 0, 1358, 1359, 1, 0, 0, 0, 1359, 1360, 6, 174, 10, 0, 1360, 364, 1, 0, 0, 0, 1361, 1362, 3, 59, 22, 0, 1362, 1363, 1, 0, 0, 0, 1363, 1364, 6, 175, 10, 0, 1364, 366, 1, 0, 0, 0, 1365, 1366, 3, 61, 23, 0, 1366, 1367, 1, 0, 0, 0, 1367, 1368, 6, 176, 15, 0, 1368, 1369, 6, 176, 11, 0, 1369, 1370, 6, 176, 11, 0, 1370, 368, 1, 0, 0, 0, 1371, 1372, 3, 99, 42, 0, 1372, 1373, 1, 0, 0, 0, 1373, 1374, 6, 177, 18, 0, 1374, 370, 1, 0, 0, 0, 1375, 1376, 3, 103, 44, 0, 1376, 1377, 1, 0, 0, 0, 1377, 1378, 6, 178, 22, 0, 1378, 372, 1, 0, 0, 0, 1379, 1380, 3, 233, 109, 0, 1380, 1381, 1, 0, 0, 0, 1381, 1382, 6, 179, 25, 0, 1382, 374, 1, 0, 0, 0, 1383, 1384, 3, 55, 20, 0, 1384, 1385, 1, 0, 0, 0, 1385, 1386, 6, 180, 10, 0, 1386, 376, 1, 0, 0, 0, 1387, 1388, 3, 57, 21, 0, 1388, 1389, 1, 0, 0, 0, 1389, 1390, 6, 181, 10, 0, 1390, 378, 1, 0, 0, 0, 1391, 1392, 3, 59, 22, 0, 1392, 1393, 1, 0, 0, 0, 1393, 1394, 6, 182, 10, 0, 1394, 380, 1, 0, 0, 0, 1395, 1396, 3, 61, 23, 0, 1396, 1397, 1, 0, 0, 0, 1397, 1398, 6, 183, 15, 0, 1398, 1399, 6, 183, 11, 0, 1399, 382, 1, 0, 0, 0, 1400, 1401, 3, 207, 96, 0, 1401, 1402, 1, 0, 0, 0, 1402, 1403, 6, 184, 20, 0, 1403, 1404, 6, 184, 11, 0, 1404, 1405, 6, 184, 34, 0, 1405, 384, 1, 0, 0, 0, 1406, 1407, 3, 83, 34, 0, 1407, 1408, 1, 0, 0, 0, 1408, 1409, 6, 185, 21, 0, 1409, 1410, 6, 185, 11, 0, 1410, 1411, 6, 185, 34, 0, 1411, 386, 1, 0, 0, 0, 1412, 1413, 3, 55, 20, 0, 1413, 1414, 1, 0, 0, 0, 1414, 1415, 6, 186, 10, 0, 1415, 388, 1, 0, 0, 0, 1416, 1417, 3, 57, 21, 0, 1417, 1418, 1, 0, 0, 0, 1418, 1419, 6, 187, 10, 0, 1419, 390, 1, 0, 0, 0, 1420, 1421, 3, 59, 22, 0, 1421, 1422, 1, 0, 0, 0, 1422, 1423, 6, 188, 10, 0, 1423, 392, 1, 0, 0, 0, 1424, 1425, 3, 337, 161, 0, 1425, 1426, 1, 0, 0, 0, 1426, 1427, 6, 189, 17, 0, 1427, 1428, 6, 189, 11, 0, 1428, 1429, 6, 189, 9, 0, 1429, 394, 1, 0, 0, 0, 1430, 1431, 3, 99, 42, 0, 1431, 1432, 1, 0, 0, 0, 1432, 1433, 6, 190, 18, 0, 1433, 1434, 6, 190, 11, 0, 1434, 1435, 6, 190, 9, 0, 1435, 396, 1, 0, 0, 0, 1436, 1437, 3, 55, 20, 0, 1437, 1438, 1, 0, 0, 0, 1438, 1439, 6, 191, 10, 0, 1439, 398, 1, 0, 0, 0, 1440, 1441, 3, 57, 21, 0, 1441, 1442, 1, 0, 0, 0, 1442, 1443, 6, 192, 10, 0, 1443, 400, 1, 0, 0, 0, 1444, 1445, 3, 59, 22, 0, 1445, 1446, 1, 0, 0, 0, 1446, 1447, 6, 193, 10, 0, 1447, 402, 1, 0, 0, 0, 1448, 1449, 3, 173, 79, 0, 1449, 1450, 1, 0, 0, 0, 1450, 1451, 6, 194, 11, 0, 1451, 1452, 6, 194, 0, 0, 1452, 1453, 6, 194, 30, 0, 1453, 404, 1, 0, 0, 0, 1454, 1455, 3, 169, 77, 0, 1455, 1456, 1, 0, 0, 0, 1456, 1457, 6, 195, 11, 0, 1457, 1458, 6, 195, 0, 0, 1458, 1459, 6, 195, 31, 0, 1459, 406, 1, 0, 0, 0, 1460, 1461, 3, 89, 37, 0, 1461, 1462, 1, 0, 0, 0, 1462, 1463, 6, 196, 11, 0, 1463, 1464, 6, 196, 0, 0, 1464, 1465, 6, 196, 35, 0, 1465, 408, 1, 0, 0, 0, 1466, 1467, 3, 61, 23, 0, 1467, 1468, 1, 0, 0, 0, 1468, 1469, 6, 197, 15, 0, 1469, 1470, 6, 197, 11, 0, 1470, 410, 1, 0, 0, 0, 65, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 579, 589, 593, 596, 605, 607, 618, 637, 642, 651, 658, 663, 665, 676, 684, 687, 689, 694, 699, 705, 712, 717, 723, 726, 734, 738, 870, 875, 882, 884, 900, 905, 910, 912, 918, 995, 1000, 1047, 1051, 1056, 1061, 1066, 1068, 1072, 1074, 1159, 1163, 1168, 1307, 1309, 36, 5, 1, 0, 5, 4, 0, 5, 6, 0, 5, 2, 0, 5, 3, 0, 5, 8, 0, 5, 5, 0, 5, 9, 0, 5, 11, 0, 5, 13, 0, 0, 1, 0, 4, 0, 0, 7, 16, 0, 7, 65, 0, 5, 0, 0, 7, 24, 0, 7, 66, 0, 7, 104, 0, 7, 33, 0, 7, 31, 0, 7, 76, 0, 7, 25, 0, 7, 35, 0, 7, 47, 0, 7, 64, 0, 7, 80, 0, 5, 10, 0, 5, 7, 0, 7, 90, 0, 7, 89, 0, 7, 68, 0, 7, 67, 0, 7, 88, 0, 5, 12, 0, 5, 14, 0, 7, 28, 0] \ No newline at end of file +[4, 0, 120, 1479, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 2, 154, 7, 154, 2, 155, 7, 155, 2, 156, 7, 156, 2, 157, 7, 157, 2, 158, 7, 158, 2, 159, 7, 159, 2, 160, 7, 160, 2, 161, 7, 161, 2, 162, 7, 162, 2, 163, 7, 163, 2, 164, 7, 164, 2, 165, 7, 165, 2, 166, 7, 166, 2, 167, 7, 167, 2, 168, 7, 168, 2, 169, 7, 169, 2, 170, 7, 170, 2, 171, 7, 171, 2, 172, 7, 172, 2, 173, 7, 173, 2, 174, 7, 174, 2, 175, 7, 175, 2, 176, 7, 176, 2, 177, 7, 177, 2, 178, 7, 178, 2, 179, 7, 179, 2, 180, 7, 180, 2, 181, 7, 181, 2, 182, 7, 182, 2, 183, 7, 183, 2, 184, 7, 184, 2, 185, 7, 185, 2, 186, 7, 186, 2, 187, 7, 187, 2, 188, 7, 188, 2, 189, 7, 189, 2, 190, 7, 190, 2, 191, 7, 191, 2, 192, 7, 192, 2, 193, 7, 193, 2, 194, 7, 194, 2, 195, 7, 195, 2, 196, 7, 196, 2, 197, 7, 197, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 4, 19, 578, 8, 19, 11, 19, 12, 19, 579, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 5, 20, 588, 8, 20, 10, 20, 12, 20, 591, 9, 20, 1, 20, 3, 20, 594, 8, 20, 1, 20, 3, 20, 597, 8, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 5, 21, 606, 8, 21, 10, 21, 12, 21, 609, 9, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 4, 22, 617, 8, 22, 11, 22, 12, 22, 618, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 28, 1, 28, 3, 28, 638, 8, 28, 1, 28, 4, 28, 641, 8, 28, 11, 28, 12, 28, 642, 1, 29, 1, 29, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 3, 31, 652, 8, 31, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 3, 33, 659, 8, 33, 1, 34, 1, 34, 1, 34, 5, 34, 664, 8, 34, 10, 34, 12, 34, 667, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 675, 8, 34, 10, 34, 12, 34, 678, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 3, 34, 685, 8, 34, 1, 34, 3, 34, 688, 8, 34, 3, 34, 690, 8, 34, 1, 35, 4, 35, 693, 8, 35, 11, 35, 12, 35, 694, 1, 36, 4, 36, 698, 8, 36, 11, 36, 12, 36, 699, 1, 36, 1, 36, 5, 36, 704, 8, 36, 10, 36, 12, 36, 707, 9, 36, 1, 36, 1, 36, 4, 36, 711, 8, 36, 11, 36, 12, 36, 712, 1, 36, 4, 36, 716, 8, 36, 11, 36, 12, 36, 717, 1, 36, 1, 36, 5, 36, 722, 8, 36, 10, 36, 12, 36, 725, 9, 36, 3, 36, 727, 8, 36, 1, 36, 1, 36, 1, 36, 1, 36, 4, 36, 733, 8, 36, 11, 36, 12, 36, 734, 1, 36, 1, 36, 3, 36, 739, 8, 36, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 1, 39, 1, 39, 1, 39, 1, 39, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 1, 64, 1, 64, 1, 64, 1, 65, 1, 65, 1, 66, 1, 66, 1, 66, 1, 67, 1, 67, 1, 68, 1, 68, 1, 69, 1, 69, 1, 70, 1, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 72, 1, 72, 1, 72, 1, 72, 1, 73, 1, 73, 1, 73, 1, 73, 1, 74, 1, 74, 1, 74, 3, 74, 871, 8, 74, 1, 74, 5, 74, 874, 8, 74, 10, 74, 12, 74, 877, 9, 74, 1, 74, 1, 74, 4, 74, 881, 8, 74, 11, 74, 12, 74, 882, 3, 74, 885, 8, 74, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 76, 1, 76, 1, 76, 1, 76, 1, 76, 1, 77, 1, 77, 5, 77, 899, 8, 77, 10, 77, 12, 77, 902, 9, 77, 1, 77, 1, 77, 3, 77, 906, 8, 77, 1, 77, 4, 77, 909, 8, 77, 11, 77, 12, 77, 910, 3, 77, 913, 8, 77, 1, 78, 1, 78, 4, 78, 917, 8, 78, 11, 78, 12, 78, 918, 1, 78, 1, 78, 1, 79, 1, 79, 1, 80, 1, 80, 1, 80, 1, 80, 1, 81, 1, 81, 1, 81, 1, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 1, 83, 1, 83, 1, 83, 1, 84, 1, 84, 1, 84, 1, 84, 1, 84, 1, 85, 1, 85, 1, 85, 1, 85, 1, 86, 1, 86, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 1, 88, 1, 88, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 1, 92, 1, 93, 1, 93, 1, 93, 1, 93, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 3, 95, 996, 8, 95, 1, 96, 4, 96, 999, 8, 96, 11, 96, 12, 96, 1000, 1, 97, 1, 97, 1, 97, 1, 97, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 1, 100, 1, 101, 1, 101, 1, 101, 1, 101, 1, 102, 1, 102, 1, 102, 1, 102, 1, 102, 1, 103, 1, 103, 1, 103, 1, 103, 1, 104, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 106, 1, 106, 1, 107, 1, 107, 1, 107, 1, 107, 3, 107, 1050, 8, 107, 1, 108, 1, 108, 3, 108, 1054, 8, 108, 1, 108, 5, 108, 1057, 8, 108, 10, 108, 12, 108, 1060, 9, 108, 1, 108, 1, 108, 3, 108, 1064, 8, 108, 1, 108, 4, 108, 1067, 8, 108, 11, 108, 12, 108, 1068, 3, 108, 1071, 8, 108, 1, 109, 1, 109, 4, 109, 1075, 8, 109, 11, 109, 12, 109, 1076, 1, 110, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 1, 113, 1, 114, 1, 114, 1, 114, 1, 114, 1, 115, 1, 115, 1, 115, 1, 115, 1, 116, 1, 116, 1, 116, 1, 116, 1, 117, 1, 117, 1, 117, 1, 117, 1, 117, 1, 118, 1, 118, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 120, 1, 120, 1, 121, 1, 121, 1, 121, 1, 121, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 129, 4, 129, 1162, 8, 129, 11, 129, 12, 129, 1163, 1, 129, 1, 129, 3, 129, 1168, 8, 129, 1, 129, 4, 129, 1171, 8, 129, 11, 129, 12, 129, 1172, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 133, 1, 133, 1, 134, 1, 134, 1, 134, 1, 134, 1, 134, 1, 134, 1, 135, 1, 135, 1, 135, 1, 135, 1, 136, 1, 136, 1, 136, 1, 136, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 152, 1, 152, 1, 153, 1, 153, 1, 153, 1, 153, 1, 154, 1, 154, 1, 154, 1, 154, 1, 155, 1, 155, 1, 155, 1, 155, 1, 155, 1, 156, 1, 156, 1, 156, 1, 156, 1, 156, 1, 157, 1, 157, 1, 157, 1, 157, 1, 158, 1, 158, 1, 158, 1, 158, 1, 159, 1, 159, 1, 159, 1, 159, 1, 160, 1, 160, 1, 160, 1, 160, 1, 160, 1, 161, 1, 161, 1, 162, 1, 162, 1, 162, 1, 162, 1, 162, 4, 162, 1316, 8, 162, 11, 162, 12, 162, 1317, 1, 163, 1, 163, 1, 163, 1, 163, 1, 164, 1, 164, 1, 164, 1, 164, 1, 165, 1, 165, 1, 165, 1, 165, 1, 166, 1, 166, 1, 166, 1, 166, 1, 166, 1, 167, 1, 167, 1, 167, 1, 167, 1, 168, 1, 168, 1, 168, 1, 168, 1, 169, 1, 169, 1, 169, 1, 169, 1, 170, 1, 170, 1, 170, 1, 170, 1, 170, 1, 171, 1, 171, 1, 171, 1, 171, 1, 172, 1, 172, 1, 172, 1, 172, 1, 173, 1, 173, 1, 173, 1, 173, 1, 174, 1, 174, 1, 174, 1, 174, 1, 175, 1, 175, 1, 175, 1, 175, 1, 176, 1, 176, 1, 176, 1, 176, 1, 176, 1, 176, 1, 177, 1, 177, 1, 177, 1, 177, 1, 178, 1, 178, 1, 178, 1, 178, 1, 179, 1, 179, 1, 179, 1, 179, 1, 180, 1, 180, 1, 180, 1, 180, 1, 181, 1, 181, 1, 181, 1, 181, 1, 182, 1, 182, 1, 182, 1, 182, 1, 183, 1, 183, 1, 183, 1, 183, 1, 183, 1, 184, 1, 184, 1, 184, 1, 184, 1, 184, 1, 184, 1, 185, 1, 185, 1, 185, 1, 185, 1, 185, 1, 185, 1, 186, 1, 186, 1, 186, 1, 186, 1, 187, 1, 187, 1, 187, 1, 187, 1, 188, 1, 188, 1, 188, 1, 188, 1, 189, 1, 189, 1, 189, 1, 189, 1, 189, 1, 189, 1, 190, 1, 190, 1, 190, 1, 190, 1, 190, 1, 190, 1, 191, 1, 191, 1, 191, 1, 191, 1, 192, 1, 192, 1, 192, 1, 192, 1, 193, 1, 193, 1, 193, 1, 193, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 195, 1, 195, 1, 195, 1, 195, 1, 195, 1, 195, 1, 196, 1, 196, 1, 196, 1, 196, 1, 196, 1, 196, 1, 197, 1, 197, 1, 197, 1, 197, 1, 197, 2, 607, 676, 0, 198, 15, 1, 17, 2, 19, 3, 21, 4, 23, 5, 25, 6, 27, 7, 29, 8, 31, 9, 33, 10, 35, 11, 37, 12, 39, 13, 41, 14, 43, 15, 45, 16, 47, 17, 49, 18, 51, 19, 53, 20, 55, 21, 57, 22, 59, 23, 61, 24, 63, 0, 65, 0, 67, 0, 69, 0, 71, 0, 73, 0, 75, 0, 77, 0, 79, 0, 81, 0, 83, 25, 85, 26, 87, 27, 89, 28, 91, 29, 93, 30, 95, 31, 97, 32, 99, 33, 101, 34, 103, 35, 105, 36, 107, 37, 109, 38, 111, 39, 113, 40, 115, 41, 117, 42, 119, 43, 121, 44, 123, 45, 125, 46, 127, 47, 129, 48, 131, 49, 133, 50, 135, 51, 137, 52, 139, 53, 141, 54, 143, 55, 145, 56, 147, 57, 149, 58, 151, 59, 153, 60, 155, 61, 157, 62, 159, 63, 161, 0, 163, 64, 165, 65, 167, 66, 169, 67, 171, 0, 173, 68, 175, 69, 177, 70, 179, 71, 181, 0, 183, 0, 185, 72, 187, 73, 189, 74, 191, 0, 193, 0, 195, 0, 197, 0, 199, 0, 201, 0, 203, 75, 205, 0, 207, 76, 209, 0, 211, 0, 213, 77, 215, 78, 217, 79, 219, 0, 221, 0, 223, 0, 225, 0, 227, 0, 229, 0, 231, 0, 233, 80, 235, 81, 237, 82, 239, 83, 241, 0, 243, 0, 245, 0, 247, 0, 249, 0, 251, 0, 253, 84, 255, 0, 257, 85, 259, 86, 261, 87, 263, 0, 265, 0, 267, 88, 269, 89, 271, 0, 273, 90, 275, 0, 277, 91, 279, 92, 281, 93, 283, 0, 285, 0, 287, 0, 289, 0, 291, 0, 293, 0, 295, 0, 297, 0, 299, 0, 301, 94, 303, 95, 305, 96, 307, 0, 309, 0, 311, 0, 313, 0, 315, 0, 317, 0, 319, 97, 321, 98, 323, 99, 325, 0, 327, 100, 329, 101, 331, 102, 333, 103, 335, 0, 337, 104, 339, 105, 341, 106, 343, 107, 345, 108, 347, 0, 349, 0, 351, 0, 353, 0, 355, 0, 357, 0, 359, 0, 361, 109, 363, 110, 365, 111, 367, 0, 369, 0, 371, 0, 373, 0, 375, 112, 377, 113, 379, 114, 381, 0, 383, 0, 385, 0, 387, 115, 389, 116, 391, 117, 393, 0, 395, 0, 397, 118, 399, 119, 401, 120, 403, 0, 405, 0, 407, 0, 409, 0, 15, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 35, 2, 0, 68, 68, 100, 100, 2, 0, 73, 73, 105, 105, 2, 0, 83, 83, 115, 115, 2, 0, 69, 69, 101, 101, 2, 0, 67, 67, 99, 99, 2, 0, 84, 84, 116, 116, 2, 0, 82, 82, 114, 114, 2, 0, 79, 79, 111, 111, 2, 0, 80, 80, 112, 112, 2, 0, 78, 78, 110, 110, 2, 0, 72, 72, 104, 104, 2, 0, 86, 86, 118, 118, 2, 0, 65, 65, 97, 97, 2, 0, 76, 76, 108, 108, 2, 0, 88, 88, 120, 120, 2, 0, 70, 70, 102, 102, 2, 0, 77, 77, 109, 109, 2, 0, 71, 71, 103, 103, 2, 0, 75, 75, 107, 107, 2, 0, 87, 87, 119, 119, 2, 0, 85, 85, 117, 117, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 8, 0, 34, 34, 78, 78, 82, 82, 84, 84, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 2, 0, 66, 66, 98, 98, 2, 0, 89, 89, 121, 121, 11, 0, 9, 10, 13, 13, 32, 32, 34, 34, 44, 44, 47, 47, 58, 58, 61, 61, 91, 91, 93, 93, 124, 124, 2, 0, 42, 42, 47, 47, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1507, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0, 0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0, 0, 0, 33, 1, 0, 0, 0, 0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0, 0, 0, 39, 1, 0, 0, 0, 0, 41, 1, 0, 0, 0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0, 0, 0, 0, 47, 1, 0, 0, 0, 0, 49, 1, 0, 0, 0, 0, 51, 1, 0, 0, 0, 0, 53, 1, 0, 0, 0, 0, 55, 1, 0, 0, 0, 0, 57, 1, 0, 0, 0, 0, 59, 1, 0, 0, 0, 1, 61, 1, 0, 0, 0, 1, 83, 1, 0, 0, 0, 1, 85, 1, 0, 0, 0, 1, 87, 1, 0, 0, 0, 1, 89, 1, 0, 0, 0, 1, 91, 1, 0, 0, 0, 1, 93, 1, 0, 0, 0, 1, 95, 1, 0, 0, 0, 1, 97, 1, 0, 0, 0, 1, 99, 1, 0, 0, 0, 1, 101, 1, 0, 0, 0, 1, 103, 1, 0, 0, 0, 1, 105, 1, 0, 0, 0, 1, 107, 1, 0, 0, 0, 1, 109, 1, 0, 0, 0, 1, 111, 1, 0, 0, 0, 1, 113, 1, 0, 0, 0, 1, 115, 1, 0, 0, 0, 1, 117, 1, 0, 0, 0, 1, 119, 1, 0, 0, 0, 1, 121, 1, 0, 0, 0, 1, 123, 1, 0, 0, 0, 1, 125, 1, 0, 0, 0, 1, 127, 1, 0, 0, 0, 1, 129, 1, 0, 0, 0, 1, 131, 1, 0, 0, 0, 1, 133, 1, 0, 0, 0, 1, 135, 1, 0, 0, 0, 1, 137, 1, 0, 0, 0, 1, 139, 1, 0, 0, 0, 1, 141, 1, 0, 0, 0, 1, 143, 1, 0, 0, 0, 1, 145, 1, 0, 0, 0, 1, 147, 1, 0, 0, 0, 1, 149, 1, 0, 0, 0, 1, 151, 1, 0, 0, 0, 1, 153, 1, 0, 0, 0, 1, 155, 1, 0, 0, 0, 1, 157, 1, 0, 0, 0, 1, 159, 1, 0, 0, 0, 1, 161, 1, 0, 0, 0, 1, 163, 1, 0, 0, 0, 1, 165, 1, 0, 0, 0, 1, 167, 1, 0, 0, 0, 1, 169, 1, 0, 0, 0, 1, 173, 1, 0, 0, 0, 1, 175, 1, 0, 0, 0, 1, 177, 1, 0, 0, 0, 1, 179, 1, 0, 0, 0, 2, 181, 1, 0, 0, 0, 2, 183, 1, 0, 0, 0, 2, 185, 1, 0, 0, 0, 2, 187, 1, 0, 0, 0, 2, 189, 1, 0, 0, 0, 3, 191, 1, 0, 0, 0, 3, 193, 1, 0, 0, 0, 3, 195, 1, 0, 0, 0, 3, 197, 1, 0, 0, 0, 3, 199, 1, 0, 0, 0, 3, 201, 1, 0, 0, 0, 3, 203, 1, 0, 0, 0, 3, 207, 1, 0, 0, 0, 3, 209, 1, 0, 0, 0, 3, 211, 1, 0, 0, 0, 3, 213, 1, 0, 0, 0, 3, 215, 1, 0, 0, 0, 3, 217, 1, 0, 0, 0, 4, 219, 1, 0, 0, 0, 4, 221, 1, 0, 0, 0, 4, 223, 1, 0, 0, 0, 4, 225, 1, 0, 0, 0, 4, 227, 1, 0, 0, 0, 4, 233, 1, 0, 0, 0, 4, 235, 1, 0, 0, 0, 4, 237, 1, 0, 0, 0, 4, 239, 1, 0, 0, 0, 5, 241, 1, 0, 0, 0, 5, 243, 1, 0, 0, 0, 5, 245, 1, 0, 0, 0, 5, 247, 1, 0, 0, 0, 5, 249, 1, 0, 0, 0, 5, 251, 1, 0, 0, 0, 5, 253, 1, 0, 0, 0, 5, 255, 1, 0, 0, 0, 5, 257, 1, 0, 0, 0, 5, 259, 1, 0, 0, 0, 5, 261, 1, 0, 0, 0, 6, 263, 1, 0, 0, 0, 6, 265, 1, 0, 0, 0, 6, 267, 1, 0, 0, 0, 6, 269, 1, 0, 0, 0, 6, 273, 1, 0, 0, 0, 6, 275, 1, 0, 0, 0, 6, 277, 1, 0, 0, 0, 6, 279, 1, 0, 0, 0, 6, 281, 1, 0, 0, 0, 7, 283, 1, 0, 0, 0, 7, 285, 1, 0, 0, 0, 7, 287, 1, 0, 0, 0, 7, 289, 1, 0, 0, 0, 7, 291, 1, 0, 0, 0, 7, 293, 1, 0, 0, 0, 7, 295, 1, 0, 0, 0, 7, 297, 1, 0, 0, 0, 7, 299, 1, 0, 0, 0, 7, 301, 1, 0, 0, 0, 7, 303, 1, 0, 0, 0, 7, 305, 1, 0, 0, 0, 8, 307, 1, 0, 0, 0, 8, 309, 1, 0, 0, 0, 8, 311, 1, 0, 0, 0, 8, 313, 1, 0, 0, 0, 8, 315, 1, 0, 0, 0, 8, 317, 1, 0, 0, 0, 8, 319, 1, 0, 0, 0, 8, 321, 1, 0, 0, 0, 8, 323, 1, 0, 0, 0, 9, 325, 1, 0, 0, 0, 9, 327, 1, 0, 0, 0, 9, 329, 1, 0, 0, 0, 9, 331, 1, 0, 0, 0, 9, 333, 1, 0, 0, 0, 10, 335, 1, 0, 0, 0, 10, 337, 1, 0, 0, 0, 10, 339, 1, 0, 0, 0, 10, 341, 1, 0, 0, 0, 10, 343, 1, 0, 0, 0, 10, 345, 1, 0, 0, 0, 11, 347, 1, 0, 0, 0, 11, 349, 1, 0, 0, 0, 11, 351, 1, 0, 0, 0, 11, 353, 1, 0, 0, 0, 11, 355, 1, 0, 0, 0, 11, 357, 1, 0, 0, 0, 11, 359, 1, 0, 0, 0, 11, 361, 1, 0, 0, 0, 11, 363, 1, 0, 0, 0, 11, 365, 1, 0, 0, 0, 12, 367, 1, 0, 0, 0, 12, 369, 1, 0, 0, 0, 12, 371, 1, 0, 0, 0, 12, 373, 1, 0, 0, 0, 12, 375, 1, 0, 0, 0, 12, 377, 1, 0, 0, 0, 12, 379, 1, 0, 0, 0, 13, 381, 1, 0, 0, 0, 13, 383, 1, 0, 0, 0, 13, 385, 1, 0, 0, 0, 13, 387, 1, 0, 0, 0, 13, 389, 1, 0, 0, 0, 13, 391, 1, 0, 0, 0, 14, 393, 1, 0, 0, 0, 14, 395, 1, 0, 0, 0, 14, 397, 1, 0, 0, 0, 14, 399, 1, 0, 0, 0, 14, 401, 1, 0, 0, 0, 14, 403, 1, 0, 0, 0, 14, 405, 1, 0, 0, 0, 14, 407, 1, 0, 0, 0, 14, 409, 1, 0, 0, 0, 15, 411, 1, 0, 0, 0, 17, 421, 1, 0, 0, 0, 19, 428, 1, 0, 0, 0, 21, 437, 1, 0, 0, 0, 23, 444, 1, 0, 0, 0, 25, 454, 1, 0, 0, 0, 27, 461, 1, 0, 0, 0, 29, 468, 1, 0, 0, 0, 31, 475, 1, 0, 0, 0, 33, 483, 1, 0, 0, 0, 35, 495, 1, 0, 0, 0, 37, 504, 1, 0, 0, 0, 39, 510, 1, 0, 0, 0, 41, 517, 1, 0, 0, 0, 43, 524, 1, 0, 0, 0, 45, 532, 1, 0, 0, 0, 47, 540, 1, 0, 0, 0, 49, 555, 1, 0, 0, 0, 51, 565, 1, 0, 0, 0, 53, 577, 1, 0, 0, 0, 55, 583, 1, 0, 0, 0, 57, 600, 1, 0, 0, 0, 59, 616, 1, 0, 0, 0, 61, 622, 1, 0, 0, 0, 63, 626, 1, 0, 0, 0, 65, 628, 1, 0, 0, 0, 67, 630, 1, 0, 0, 0, 69, 633, 1, 0, 0, 0, 71, 635, 1, 0, 0, 0, 73, 644, 1, 0, 0, 0, 75, 646, 1, 0, 0, 0, 77, 651, 1, 0, 0, 0, 79, 653, 1, 0, 0, 0, 81, 658, 1, 0, 0, 0, 83, 689, 1, 0, 0, 0, 85, 692, 1, 0, 0, 0, 87, 738, 1, 0, 0, 0, 89, 740, 1, 0, 0, 0, 91, 743, 1, 0, 0, 0, 93, 747, 1, 0, 0, 0, 95, 751, 1, 0, 0, 0, 97, 753, 1, 0, 0, 0, 99, 756, 1, 0, 0, 0, 101, 758, 1, 0, 0, 0, 103, 763, 1, 0, 0, 0, 105, 765, 1, 0, 0, 0, 107, 771, 1, 0, 0, 0, 109, 777, 1, 0, 0, 0, 111, 780, 1, 0, 0, 0, 113, 783, 1, 0, 0, 0, 115, 788, 1, 0, 0, 0, 117, 793, 1, 0, 0, 0, 119, 795, 1, 0, 0, 0, 121, 799, 1, 0, 0, 0, 123, 804, 1, 0, 0, 0, 125, 810, 1, 0, 0, 0, 127, 813, 1, 0, 0, 0, 129, 815, 1, 0, 0, 0, 131, 821, 1, 0, 0, 0, 133, 823, 1, 0, 0, 0, 135, 828, 1, 0, 0, 0, 137, 831, 1, 0, 0, 0, 139, 834, 1, 0, 0, 0, 141, 837, 1, 0, 0, 0, 143, 839, 1, 0, 0, 0, 145, 842, 1, 0, 0, 0, 147, 844, 1, 0, 0, 0, 149, 847, 1, 0, 0, 0, 151, 849, 1, 0, 0, 0, 153, 851, 1, 0, 0, 0, 155, 853, 1, 0, 0, 0, 157, 855, 1, 0, 0, 0, 159, 857, 1, 0, 0, 0, 161, 863, 1, 0, 0, 0, 163, 884, 1, 0, 0, 0, 165, 886, 1, 0, 0, 0, 167, 891, 1, 0, 0, 0, 169, 912, 1, 0, 0, 0, 171, 914, 1, 0, 0, 0, 173, 922, 1, 0, 0, 0, 175, 924, 1, 0, 0, 0, 177, 928, 1, 0, 0, 0, 179, 932, 1, 0, 0, 0, 181, 936, 1, 0, 0, 0, 183, 941, 1, 0, 0, 0, 185, 946, 1, 0, 0, 0, 187, 950, 1, 0, 0, 0, 189, 954, 1, 0, 0, 0, 191, 958, 1, 0, 0, 0, 193, 963, 1, 0, 0, 0, 195, 967, 1, 0, 0, 0, 197, 971, 1, 0, 0, 0, 199, 975, 1, 0, 0, 0, 201, 979, 1, 0, 0, 0, 203, 983, 1, 0, 0, 0, 205, 995, 1, 0, 0, 0, 207, 998, 1, 0, 0, 0, 209, 1002, 1, 0, 0, 0, 211, 1006, 1, 0, 0, 0, 213, 1010, 1, 0, 0, 0, 215, 1014, 1, 0, 0, 0, 217, 1018, 1, 0, 0, 0, 219, 1022, 1, 0, 0, 0, 221, 1027, 1, 0, 0, 0, 223, 1031, 1, 0, 0, 0, 225, 1035, 1, 0, 0, 0, 227, 1040, 1, 0, 0, 0, 229, 1049, 1, 0, 0, 0, 231, 1070, 1, 0, 0, 0, 233, 1074, 1, 0, 0, 0, 235, 1078, 1, 0, 0, 0, 237, 1082, 1, 0, 0, 0, 239, 1086, 1, 0, 0, 0, 241, 1090, 1, 0, 0, 0, 243, 1095, 1, 0, 0, 0, 245, 1099, 1, 0, 0, 0, 247, 1103, 1, 0, 0, 0, 249, 1107, 1, 0, 0, 0, 251, 1112, 1, 0, 0, 0, 253, 1117, 1, 0, 0, 0, 255, 1120, 1, 0, 0, 0, 257, 1124, 1, 0, 0, 0, 259, 1128, 1, 0, 0, 0, 261, 1132, 1, 0, 0, 0, 263, 1136, 1, 0, 0, 0, 265, 1141, 1, 0, 0, 0, 267, 1146, 1, 0, 0, 0, 269, 1151, 1, 0, 0, 0, 271, 1158, 1, 0, 0, 0, 273, 1167, 1, 0, 0, 0, 275, 1174, 1, 0, 0, 0, 277, 1178, 1, 0, 0, 0, 279, 1182, 1, 0, 0, 0, 281, 1186, 1, 0, 0, 0, 283, 1190, 1, 0, 0, 0, 285, 1196, 1, 0, 0, 0, 287, 1200, 1, 0, 0, 0, 289, 1204, 1, 0, 0, 0, 291, 1208, 1, 0, 0, 0, 293, 1212, 1, 0, 0, 0, 295, 1216, 1, 0, 0, 0, 297, 1220, 1, 0, 0, 0, 299, 1225, 1, 0, 0, 0, 301, 1230, 1, 0, 0, 0, 303, 1234, 1, 0, 0, 0, 305, 1238, 1, 0, 0, 0, 307, 1242, 1, 0, 0, 0, 309, 1247, 1, 0, 0, 0, 311, 1251, 1, 0, 0, 0, 313, 1256, 1, 0, 0, 0, 315, 1261, 1, 0, 0, 0, 317, 1265, 1, 0, 0, 0, 319, 1269, 1, 0, 0, 0, 321, 1273, 1, 0, 0, 0, 323, 1277, 1, 0, 0, 0, 325, 1281, 1, 0, 0, 0, 327, 1286, 1, 0, 0, 0, 329, 1291, 1, 0, 0, 0, 331, 1295, 1, 0, 0, 0, 333, 1299, 1, 0, 0, 0, 335, 1303, 1, 0, 0, 0, 337, 1308, 1, 0, 0, 0, 339, 1315, 1, 0, 0, 0, 341, 1319, 1, 0, 0, 0, 343, 1323, 1, 0, 0, 0, 345, 1327, 1, 0, 0, 0, 347, 1331, 1, 0, 0, 0, 349, 1336, 1, 0, 0, 0, 351, 1340, 1, 0, 0, 0, 353, 1344, 1, 0, 0, 0, 355, 1348, 1, 0, 0, 0, 357, 1353, 1, 0, 0, 0, 359, 1357, 1, 0, 0, 0, 361, 1361, 1, 0, 0, 0, 363, 1365, 1, 0, 0, 0, 365, 1369, 1, 0, 0, 0, 367, 1373, 1, 0, 0, 0, 369, 1379, 1, 0, 0, 0, 371, 1383, 1, 0, 0, 0, 373, 1387, 1, 0, 0, 0, 375, 1391, 1, 0, 0, 0, 377, 1395, 1, 0, 0, 0, 379, 1399, 1, 0, 0, 0, 381, 1403, 1, 0, 0, 0, 383, 1408, 1, 0, 0, 0, 385, 1414, 1, 0, 0, 0, 387, 1420, 1, 0, 0, 0, 389, 1424, 1, 0, 0, 0, 391, 1428, 1, 0, 0, 0, 393, 1432, 1, 0, 0, 0, 395, 1438, 1, 0, 0, 0, 397, 1444, 1, 0, 0, 0, 399, 1448, 1, 0, 0, 0, 401, 1452, 1, 0, 0, 0, 403, 1456, 1, 0, 0, 0, 405, 1462, 1, 0, 0, 0, 407, 1468, 1, 0, 0, 0, 409, 1474, 1, 0, 0, 0, 411, 412, 7, 0, 0, 0, 412, 413, 7, 1, 0, 0, 413, 414, 7, 2, 0, 0, 414, 415, 7, 2, 0, 0, 415, 416, 7, 3, 0, 0, 416, 417, 7, 4, 0, 0, 417, 418, 7, 5, 0, 0, 418, 419, 1, 0, 0, 0, 419, 420, 6, 0, 0, 0, 420, 16, 1, 0, 0, 0, 421, 422, 7, 0, 0, 0, 422, 423, 7, 6, 0, 0, 423, 424, 7, 7, 0, 0, 424, 425, 7, 8, 0, 0, 425, 426, 1, 0, 0, 0, 426, 427, 6, 1, 1, 0, 427, 18, 1, 0, 0, 0, 428, 429, 7, 3, 0, 0, 429, 430, 7, 9, 0, 0, 430, 431, 7, 6, 0, 0, 431, 432, 7, 1, 0, 0, 432, 433, 7, 4, 0, 0, 433, 434, 7, 10, 0, 0, 434, 435, 1, 0, 0, 0, 435, 436, 6, 2, 2, 0, 436, 20, 1, 0, 0, 0, 437, 438, 7, 3, 0, 0, 438, 439, 7, 11, 0, 0, 439, 440, 7, 12, 0, 0, 440, 441, 7, 13, 0, 0, 441, 442, 1, 0, 0, 0, 442, 443, 6, 3, 0, 0, 443, 22, 1, 0, 0, 0, 444, 445, 7, 3, 0, 0, 445, 446, 7, 14, 0, 0, 446, 447, 7, 8, 0, 0, 447, 448, 7, 13, 0, 0, 448, 449, 7, 12, 0, 0, 449, 450, 7, 1, 0, 0, 450, 451, 7, 9, 0, 0, 451, 452, 1, 0, 0, 0, 452, 453, 6, 4, 3, 0, 453, 24, 1, 0, 0, 0, 454, 455, 7, 15, 0, 0, 455, 456, 7, 6, 0, 0, 456, 457, 7, 7, 0, 0, 457, 458, 7, 16, 0, 0, 458, 459, 1, 0, 0, 0, 459, 460, 6, 5, 4, 0, 460, 26, 1, 0, 0, 0, 461, 462, 7, 17, 0, 0, 462, 463, 7, 6, 0, 0, 463, 464, 7, 7, 0, 0, 464, 465, 7, 18, 0, 0, 465, 466, 1, 0, 0, 0, 466, 467, 6, 6, 0, 0, 467, 28, 1, 0, 0, 0, 468, 469, 7, 18, 0, 0, 469, 470, 7, 3, 0, 0, 470, 471, 7, 3, 0, 0, 471, 472, 7, 8, 0, 0, 472, 473, 1, 0, 0, 0, 473, 474, 6, 7, 1, 0, 474, 30, 1, 0, 0, 0, 475, 476, 7, 13, 0, 0, 476, 477, 7, 1, 0, 0, 477, 478, 7, 16, 0, 0, 478, 479, 7, 1, 0, 0, 479, 480, 7, 5, 0, 0, 480, 481, 1, 0, 0, 0, 481, 482, 6, 8, 0, 0, 482, 32, 1, 0, 0, 0, 483, 484, 7, 16, 0, 0, 484, 485, 7, 11, 0, 0, 485, 486, 5, 95, 0, 0, 486, 487, 7, 3, 0, 0, 487, 488, 7, 14, 0, 0, 488, 489, 7, 8, 0, 0, 489, 490, 7, 12, 0, 0, 490, 491, 7, 9, 0, 0, 491, 492, 7, 0, 0, 0, 492, 493, 1, 0, 0, 0, 493, 494, 6, 9, 5, 0, 494, 34, 1, 0, 0, 0, 495, 496, 7, 6, 0, 0, 496, 497, 7, 3, 0, 0, 497, 498, 7, 9, 0, 0, 498, 499, 7, 12, 0, 0, 499, 500, 7, 16, 0, 0, 500, 501, 7, 3, 0, 0, 501, 502, 1, 0, 0, 0, 502, 503, 6, 10, 6, 0, 503, 36, 1, 0, 0, 0, 504, 505, 7, 6, 0, 0, 505, 506, 7, 7, 0, 0, 506, 507, 7, 19, 0, 0, 507, 508, 1, 0, 0, 0, 508, 509, 6, 11, 0, 0, 509, 38, 1, 0, 0, 0, 510, 511, 7, 2, 0, 0, 511, 512, 7, 10, 0, 0, 512, 513, 7, 7, 0, 0, 513, 514, 7, 19, 0, 0, 514, 515, 1, 0, 0, 0, 515, 516, 6, 12, 7, 0, 516, 40, 1, 0, 0, 0, 517, 518, 7, 2, 0, 0, 518, 519, 7, 7, 0, 0, 519, 520, 7, 6, 0, 0, 520, 521, 7, 5, 0, 0, 521, 522, 1, 0, 0, 0, 522, 523, 6, 13, 0, 0, 523, 42, 1, 0, 0, 0, 524, 525, 7, 2, 0, 0, 525, 526, 7, 5, 0, 0, 526, 527, 7, 12, 0, 0, 527, 528, 7, 5, 0, 0, 528, 529, 7, 2, 0, 0, 529, 530, 1, 0, 0, 0, 530, 531, 6, 14, 0, 0, 531, 44, 1, 0, 0, 0, 532, 533, 7, 19, 0, 0, 533, 534, 7, 10, 0, 0, 534, 535, 7, 3, 0, 0, 535, 536, 7, 6, 0, 0, 536, 537, 7, 3, 0, 0, 537, 538, 1, 0, 0, 0, 538, 539, 6, 15, 0, 0, 539, 46, 1, 0, 0, 0, 540, 541, 4, 16, 0, 0, 541, 542, 7, 1, 0, 0, 542, 543, 7, 9, 0, 0, 543, 544, 7, 13, 0, 0, 544, 545, 7, 1, 0, 0, 545, 546, 7, 9, 0, 0, 546, 547, 7, 3, 0, 0, 547, 548, 7, 2, 0, 0, 548, 549, 7, 5, 0, 0, 549, 550, 7, 12, 0, 0, 550, 551, 7, 5, 0, 0, 551, 552, 7, 2, 0, 0, 552, 553, 1, 0, 0, 0, 553, 554, 6, 16, 0, 0, 554, 48, 1, 0, 0, 0, 555, 556, 4, 17, 1, 0, 556, 557, 7, 13, 0, 0, 557, 558, 7, 7, 0, 0, 558, 559, 7, 7, 0, 0, 559, 560, 7, 18, 0, 0, 560, 561, 7, 20, 0, 0, 561, 562, 7, 8, 0, 0, 562, 563, 1, 0, 0, 0, 563, 564, 6, 17, 8, 0, 564, 50, 1, 0, 0, 0, 565, 566, 4, 18, 2, 0, 566, 567, 7, 16, 0, 0, 567, 568, 7, 3, 0, 0, 568, 569, 7, 5, 0, 0, 569, 570, 7, 6, 0, 0, 570, 571, 7, 1, 0, 0, 571, 572, 7, 4, 0, 0, 572, 573, 7, 2, 0, 0, 573, 574, 1, 0, 0, 0, 574, 575, 6, 18, 9, 0, 575, 52, 1, 0, 0, 0, 576, 578, 8, 21, 0, 0, 577, 576, 1, 0, 0, 0, 578, 579, 1, 0, 0, 0, 579, 577, 1, 0, 0, 0, 579, 580, 1, 0, 0, 0, 580, 581, 1, 0, 0, 0, 581, 582, 6, 19, 0, 0, 582, 54, 1, 0, 0, 0, 583, 584, 5, 47, 0, 0, 584, 585, 5, 47, 0, 0, 585, 589, 1, 0, 0, 0, 586, 588, 8, 22, 0, 0, 587, 586, 1, 0, 0, 0, 588, 591, 1, 0, 0, 0, 589, 587, 1, 0, 0, 0, 589, 590, 1, 0, 0, 0, 590, 593, 1, 0, 0, 0, 591, 589, 1, 0, 0, 0, 592, 594, 5, 13, 0, 0, 593, 592, 1, 0, 0, 0, 593, 594, 1, 0, 0, 0, 594, 596, 1, 0, 0, 0, 595, 597, 5, 10, 0, 0, 596, 595, 1, 0, 0, 0, 596, 597, 1, 0, 0, 0, 597, 598, 1, 0, 0, 0, 598, 599, 6, 20, 10, 0, 599, 56, 1, 0, 0, 0, 600, 601, 5, 47, 0, 0, 601, 602, 5, 42, 0, 0, 602, 607, 1, 0, 0, 0, 603, 606, 3, 57, 21, 0, 604, 606, 9, 0, 0, 0, 605, 603, 1, 0, 0, 0, 605, 604, 1, 0, 0, 0, 606, 609, 1, 0, 0, 0, 607, 608, 1, 0, 0, 0, 607, 605, 1, 0, 0, 0, 608, 610, 1, 0, 0, 0, 609, 607, 1, 0, 0, 0, 610, 611, 5, 42, 0, 0, 611, 612, 5, 47, 0, 0, 612, 613, 1, 0, 0, 0, 613, 614, 6, 21, 10, 0, 614, 58, 1, 0, 0, 0, 615, 617, 7, 23, 0, 0, 616, 615, 1, 0, 0, 0, 617, 618, 1, 0, 0, 0, 618, 616, 1, 0, 0, 0, 618, 619, 1, 0, 0, 0, 619, 620, 1, 0, 0, 0, 620, 621, 6, 22, 10, 0, 621, 60, 1, 0, 0, 0, 622, 623, 5, 124, 0, 0, 623, 624, 1, 0, 0, 0, 624, 625, 6, 23, 11, 0, 625, 62, 1, 0, 0, 0, 626, 627, 7, 24, 0, 0, 627, 64, 1, 0, 0, 0, 628, 629, 7, 25, 0, 0, 629, 66, 1, 0, 0, 0, 630, 631, 5, 92, 0, 0, 631, 632, 7, 26, 0, 0, 632, 68, 1, 0, 0, 0, 633, 634, 8, 27, 0, 0, 634, 70, 1, 0, 0, 0, 635, 637, 7, 3, 0, 0, 636, 638, 7, 28, 0, 0, 637, 636, 1, 0, 0, 0, 637, 638, 1, 0, 0, 0, 638, 640, 1, 0, 0, 0, 639, 641, 3, 63, 24, 0, 640, 639, 1, 0, 0, 0, 641, 642, 1, 0, 0, 0, 642, 640, 1, 0, 0, 0, 642, 643, 1, 0, 0, 0, 643, 72, 1, 0, 0, 0, 644, 645, 5, 64, 0, 0, 645, 74, 1, 0, 0, 0, 646, 647, 5, 96, 0, 0, 647, 76, 1, 0, 0, 0, 648, 652, 8, 29, 0, 0, 649, 650, 5, 96, 0, 0, 650, 652, 5, 96, 0, 0, 651, 648, 1, 0, 0, 0, 651, 649, 1, 0, 0, 0, 652, 78, 1, 0, 0, 0, 653, 654, 5, 95, 0, 0, 654, 80, 1, 0, 0, 0, 655, 659, 3, 65, 25, 0, 656, 659, 3, 63, 24, 0, 657, 659, 3, 79, 32, 0, 658, 655, 1, 0, 0, 0, 658, 656, 1, 0, 0, 0, 658, 657, 1, 0, 0, 0, 659, 82, 1, 0, 0, 0, 660, 665, 5, 34, 0, 0, 661, 664, 3, 67, 26, 0, 662, 664, 3, 69, 27, 0, 663, 661, 1, 0, 0, 0, 663, 662, 1, 0, 0, 0, 664, 667, 1, 0, 0, 0, 665, 663, 1, 0, 0, 0, 665, 666, 1, 0, 0, 0, 666, 668, 1, 0, 0, 0, 667, 665, 1, 0, 0, 0, 668, 690, 5, 34, 0, 0, 669, 670, 5, 34, 0, 0, 670, 671, 5, 34, 0, 0, 671, 672, 5, 34, 0, 0, 672, 676, 1, 0, 0, 0, 673, 675, 8, 22, 0, 0, 674, 673, 1, 0, 0, 0, 675, 678, 1, 0, 0, 0, 676, 677, 1, 0, 0, 0, 676, 674, 1, 0, 0, 0, 677, 679, 1, 0, 0, 0, 678, 676, 1, 0, 0, 0, 679, 680, 5, 34, 0, 0, 680, 681, 5, 34, 0, 0, 681, 682, 5, 34, 0, 0, 682, 684, 1, 0, 0, 0, 683, 685, 5, 34, 0, 0, 684, 683, 1, 0, 0, 0, 684, 685, 1, 0, 0, 0, 685, 687, 1, 0, 0, 0, 686, 688, 5, 34, 0, 0, 687, 686, 1, 0, 0, 0, 687, 688, 1, 0, 0, 0, 688, 690, 1, 0, 0, 0, 689, 660, 1, 0, 0, 0, 689, 669, 1, 0, 0, 0, 690, 84, 1, 0, 0, 0, 691, 693, 3, 63, 24, 0, 692, 691, 1, 0, 0, 0, 693, 694, 1, 0, 0, 0, 694, 692, 1, 0, 0, 0, 694, 695, 1, 0, 0, 0, 695, 86, 1, 0, 0, 0, 696, 698, 3, 63, 24, 0, 697, 696, 1, 0, 0, 0, 698, 699, 1, 0, 0, 0, 699, 697, 1, 0, 0, 0, 699, 700, 1, 0, 0, 0, 700, 701, 1, 0, 0, 0, 701, 705, 3, 103, 44, 0, 702, 704, 3, 63, 24, 0, 703, 702, 1, 0, 0, 0, 704, 707, 1, 0, 0, 0, 705, 703, 1, 0, 0, 0, 705, 706, 1, 0, 0, 0, 706, 739, 1, 0, 0, 0, 707, 705, 1, 0, 0, 0, 708, 710, 3, 103, 44, 0, 709, 711, 3, 63, 24, 0, 710, 709, 1, 0, 0, 0, 711, 712, 1, 0, 0, 0, 712, 710, 1, 0, 0, 0, 712, 713, 1, 0, 0, 0, 713, 739, 1, 0, 0, 0, 714, 716, 3, 63, 24, 0, 715, 714, 1, 0, 0, 0, 716, 717, 1, 0, 0, 0, 717, 715, 1, 0, 0, 0, 717, 718, 1, 0, 0, 0, 718, 726, 1, 0, 0, 0, 719, 723, 3, 103, 44, 0, 720, 722, 3, 63, 24, 0, 721, 720, 1, 0, 0, 0, 722, 725, 1, 0, 0, 0, 723, 721, 1, 0, 0, 0, 723, 724, 1, 0, 0, 0, 724, 727, 1, 0, 0, 0, 725, 723, 1, 0, 0, 0, 726, 719, 1, 0, 0, 0, 726, 727, 1, 0, 0, 0, 727, 728, 1, 0, 0, 0, 728, 729, 3, 71, 28, 0, 729, 739, 1, 0, 0, 0, 730, 732, 3, 103, 44, 0, 731, 733, 3, 63, 24, 0, 732, 731, 1, 0, 0, 0, 733, 734, 1, 0, 0, 0, 734, 732, 1, 0, 0, 0, 734, 735, 1, 0, 0, 0, 735, 736, 1, 0, 0, 0, 736, 737, 3, 71, 28, 0, 737, 739, 1, 0, 0, 0, 738, 697, 1, 0, 0, 0, 738, 708, 1, 0, 0, 0, 738, 715, 1, 0, 0, 0, 738, 730, 1, 0, 0, 0, 739, 88, 1, 0, 0, 0, 740, 741, 7, 30, 0, 0, 741, 742, 7, 31, 0, 0, 742, 90, 1, 0, 0, 0, 743, 744, 7, 12, 0, 0, 744, 745, 7, 9, 0, 0, 745, 746, 7, 0, 0, 0, 746, 92, 1, 0, 0, 0, 747, 748, 7, 12, 0, 0, 748, 749, 7, 2, 0, 0, 749, 750, 7, 4, 0, 0, 750, 94, 1, 0, 0, 0, 751, 752, 5, 61, 0, 0, 752, 96, 1, 0, 0, 0, 753, 754, 5, 58, 0, 0, 754, 755, 5, 58, 0, 0, 755, 98, 1, 0, 0, 0, 756, 757, 5, 44, 0, 0, 757, 100, 1, 0, 0, 0, 758, 759, 7, 0, 0, 0, 759, 760, 7, 3, 0, 0, 760, 761, 7, 2, 0, 0, 761, 762, 7, 4, 0, 0, 762, 102, 1, 0, 0, 0, 763, 764, 5, 46, 0, 0, 764, 104, 1, 0, 0, 0, 765, 766, 7, 15, 0, 0, 766, 767, 7, 12, 0, 0, 767, 768, 7, 13, 0, 0, 768, 769, 7, 2, 0, 0, 769, 770, 7, 3, 0, 0, 770, 106, 1, 0, 0, 0, 771, 772, 7, 15, 0, 0, 772, 773, 7, 1, 0, 0, 773, 774, 7, 6, 0, 0, 774, 775, 7, 2, 0, 0, 775, 776, 7, 5, 0, 0, 776, 108, 1, 0, 0, 0, 777, 778, 7, 1, 0, 0, 778, 779, 7, 9, 0, 0, 779, 110, 1, 0, 0, 0, 780, 781, 7, 1, 0, 0, 781, 782, 7, 2, 0, 0, 782, 112, 1, 0, 0, 0, 783, 784, 7, 13, 0, 0, 784, 785, 7, 12, 0, 0, 785, 786, 7, 2, 0, 0, 786, 787, 7, 5, 0, 0, 787, 114, 1, 0, 0, 0, 788, 789, 7, 13, 0, 0, 789, 790, 7, 1, 0, 0, 790, 791, 7, 18, 0, 0, 791, 792, 7, 3, 0, 0, 792, 116, 1, 0, 0, 0, 793, 794, 5, 40, 0, 0, 794, 118, 1, 0, 0, 0, 795, 796, 7, 9, 0, 0, 796, 797, 7, 7, 0, 0, 797, 798, 7, 5, 0, 0, 798, 120, 1, 0, 0, 0, 799, 800, 7, 9, 0, 0, 800, 801, 7, 20, 0, 0, 801, 802, 7, 13, 0, 0, 802, 803, 7, 13, 0, 0, 803, 122, 1, 0, 0, 0, 804, 805, 7, 9, 0, 0, 805, 806, 7, 20, 0, 0, 806, 807, 7, 13, 0, 0, 807, 808, 7, 13, 0, 0, 808, 809, 7, 2, 0, 0, 809, 124, 1, 0, 0, 0, 810, 811, 7, 7, 0, 0, 811, 812, 7, 6, 0, 0, 812, 126, 1, 0, 0, 0, 813, 814, 5, 63, 0, 0, 814, 128, 1, 0, 0, 0, 815, 816, 7, 6, 0, 0, 816, 817, 7, 13, 0, 0, 817, 818, 7, 1, 0, 0, 818, 819, 7, 18, 0, 0, 819, 820, 7, 3, 0, 0, 820, 130, 1, 0, 0, 0, 821, 822, 5, 41, 0, 0, 822, 132, 1, 0, 0, 0, 823, 824, 7, 5, 0, 0, 824, 825, 7, 6, 0, 0, 825, 826, 7, 20, 0, 0, 826, 827, 7, 3, 0, 0, 827, 134, 1, 0, 0, 0, 828, 829, 5, 61, 0, 0, 829, 830, 5, 61, 0, 0, 830, 136, 1, 0, 0, 0, 831, 832, 5, 61, 0, 0, 832, 833, 5, 126, 0, 0, 833, 138, 1, 0, 0, 0, 834, 835, 5, 33, 0, 0, 835, 836, 5, 61, 0, 0, 836, 140, 1, 0, 0, 0, 837, 838, 5, 60, 0, 0, 838, 142, 1, 0, 0, 0, 839, 840, 5, 60, 0, 0, 840, 841, 5, 61, 0, 0, 841, 144, 1, 0, 0, 0, 842, 843, 5, 62, 0, 0, 843, 146, 1, 0, 0, 0, 844, 845, 5, 62, 0, 0, 845, 846, 5, 61, 0, 0, 846, 148, 1, 0, 0, 0, 847, 848, 5, 43, 0, 0, 848, 150, 1, 0, 0, 0, 849, 850, 5, 45, 0, 0, 850, 152, 1, 0, 0, 0, 851, 852, 5, 42, 0, 0, 852, 154, 1, 0, 0, 0, 853, 854, 5, 47, 0, 0, 854, 156, 1, 0, 0, 0, 855, 856, 5, 37, 0, 0, 856, 158, 1, 0, 0, 0, 857, 858, 7, 16, 0, 0, 858, 859, 7, 12, 0, 0, 859, 860, 7, 5, 0, 0, 860, 861, 7, 4, 0, 0, 861, 862, 7, 10, 0, 0, 862, 160, 1, 0, 0, 0, 863, 864, 3, 45, 15, 0, 864, 865, 1, 0, 0, 0, 865, 866, 6, 73, 12, 0, 866, 162, 1, 0, 0, 0, 867, 870, 3, 127, 56, 0, 868, 871, 3, 65, 25, 0, 869, 871, 3, 79, 32, 0, 870, 868, 1, 0, 0, 0, 870, 869, 1, 0, 0, 0, 871, 875, 1, 0, 0, 0, 872, 874, 3, 81, 33, 0, 873, 872, 1, 0, 0, 0, 874, 877, 1, 0, 0, 0, 875, 873, 1, 0, 0, 0, 875, 876, 1, 0, 0, 0, 876, 885, 1, 0, 0, 0, 877, 875, 1, 0, 0, 0, 878, 880, 3, 127, 56, 0, 879, 881, 3, 63, 24, 0, 880, 879, 1, 0, 0, 0, 881, 882, 1, 0, 0, 0, 882, 880, 1, 0, 0, 0, 882, 883, 1, 0, 0, 0, 883, 885, 1, 0, 0, 0, 884, 867, 1, 0, 0, 0, 884, 878, 1, 0, 0, 0, 885, 164, 1, 0, 0, 0, 886, 887, 5, 91, 0, 0, 887, 888, 1, 0, 0, 0, 888, 889, 6, 75, 0, 0, 889, 890, 6, 75, 0, 0, 890, 166, 1, 0, 0, 0, 891, 892, 5, 93, 0, 0, 892, 893, 1, 0, 0, 0, 893, 894, 6, 76, 11, 0, 894, 895, 6, 76, 11, 0, 895, 168, 1, 0, 0, 0, 896, 900, 3, 65, 25, 0, 897, 899, 3, 81, 33, 0, 898, 897, 1, 0, 0, 0, 899, 902, 1, 0, 0, 0, 900, 898, 1, 0, 0, 0, 900, 901, 1, 0, 0, 0, 901, 913, 1, 0, 0, 0, 902, 900, 1, 0, 0, 0, 903, 906, 3, 79, 32, 0, 904, 906, 3, 73, 29, 0, 905, 903, 1, 0, 0, 0, 905, 904, 1, 0, 0, 0, 906, 908, 1, 0, 0, 0, 907, 909, 3, 81, 33, 0, 908, 907, 1, 0, 0, 0, 909, 910, 1, 0, 0, 0, 910, 908, 1, 0, 0, 0, 910, 911, 1, 0, 0, 0, 911, 913, 1, 0, 0, 0, 912, 896, 1, 0, 0, 0, 912, 905, 1, 0, 0, 0, 913, 170, 1, 0, 0, 0, 914, 916, 3, 75, 30, 0, 915, 917, 3, 77, 31, 0, 916, 915, 1, 0, 0, 0, 917, 918, 1, 0, 0, 0, 918, 916, 1, 0, 0, 0, 918, 919, 1, 0, 0, 0, 919, 920, 1, 0, 0, 0, 920, 921, 3, 75, 30, 0, 921, 172, 1, 0, 0, 0, 922, 923, 3, 171, 78, 0, 923, 174, 1, 0, 0, 0, 924, 925, 3, 55, 20, 0, 925, 926, 1, 0, 0, 0, 926, 927, 6, 80, 10, 0, 927, 176, 1, 0, 0, 0, 928, 929, 3, 57, 21, 0, 929, 930, 1, 0, 0, 0, 930, 931, 6, 81, 10, 0, 931, 178, 1, 0, 0, 0, 932, 933, 3, 59, 22, 0, 933, 934, 1, 0, 0, 0, 934, 935, 6, 82, 10, 0, 935, 180, 1, 0, 0, 0, 936, 937, 3, 165, 75, 0, 937, 938, 1, 0, 0, 0, 938, 939, 6, 83, 13, 0, 939, 940, 6, 83, 14, 0, 940, 182, 1, 0, 0, 0, 941, 942, 3, 61, 23, 0, 942, 943, 1, 0, 0, 0, 943, 944, 6, 84, 15, 0, 944, 945, 6, 84, 11, 0, 945, 184, 1, 0, 0, 0, 946, 947, 3, 59, 22, 0, 947, 948, 1, 0, 0, 0, 948, 949, 6, 85, 10, 0, 949, 186, 1, 0, 0, 0, 950, 951, 3, 55, 20, 0, 951, 952, 1, 0, 0, 0, 952, 953, 6, 86, 10, 0, 953, 188, 1, 0, 0, 0, 954, 955, 3, 57, 21, 0, 955, 956, 1, 0, 0, 0, 956, 957, 6, 87, 10, 0, 957, 190, 1, 0, 0, 0, 958, 959, 3, 61, 23, 0, 959, 960, 1, 0, 0, 0, 960, 961, 6, 88, 15, 0, 961, 962, 6, 88, 11, 0, 962, 192, 1, 0, 0, 0, 963, 964, 3, 165, 75, 0, 964, 965, 1, 0, 0, 0, 965, 966, 6, 89, 13, 0, 966, 194, 1, 0, 0, 0, 967, 968, 3, 167, 76, 0, 968, 969, 1, 0, 0, 0, 969, 970, 6, 90, 16, 0, 970, 196, 1, 0, 0, 0, 971, 972, 3, 337, 161, 0, 972, 973, 1, 0, 0, 0, 973, 974, 6, 91, 17, 0, 974, 198, 1, 0, 0, 0, 975, 976, 3, 99, 42, 0, 976, 977, 1, 0, 0, 0, 977, 978, 6, 92, 18, 0, 978, 200, 1, 0, 0, 0, 979, 980, 3, 95, 40, 0, 980, 981, 1, 0, 0, 0, 981, 982, 6, 93, 19, 0, 982, 202, 1, 0, 0, 0, 983, 984, 7, 16, 0, 0, 984, 985, 7, 3, 0, 0, 985, 986, 7, 5, 0, 0, 986, 987, 7, 12, 0, 0, 987, 988, 7, 0, 0, 0, 988, 989, 7, 12, 0, 0, 989, 990, 7, 5, 0, 0, 990, 991, 7, 12, 0, 0, 991, 204, 1, 0, 0, 0, 992, 996, 8, 32, 0, 0, 993, 994, 5, 47, 0, 0, 994, 996, 8, 33, 0, 0, 995, 992, 1, 0, 0, 0, 995, 993, 1, 0, 0, 0, 996, 206, 1, 0, 0, 0, 997, 999, 3, 205, 95, 0, 998, 997, 1, 0, 0, 0, 999, 1000, 1, 0, 0, 0, 1000, 998, 1, 0, 0, 0, 1000, 1001, 1, 0, 0, 0, 1001, 208, 1, 0, 0, 0, 1002, 1003, 3, 207, 96, 0, 1003, 1004, 1, 0, 0, 0, 1004, 1005, 6, 97, 20, 0, 1005, 210, 1, 0, 0, 0, 1006, 1007, 3, 83, 34, 0, 1007, 1008, 1, 0, 0, 0, 1008, 1009, 6, 98, 21, 0, 1009, 212, 1, 0, 0, 0, 1010, 1011, 3, 55, 20, 0, 1011, 1012, 1, 0, 0, 0, 1012, 1013, 6, 99, 10, 0, 1013, 214, 1, 0, 0, 0, 1014, 1015, 3, 57, 21, 0, 1015, 1016, 1, 0, 0, 0, 1016, 1017, 6, 100, 10, 0, 1017, 216, 1, 0, 0, 0, 1018, 1019, 3, 59, 22, 0, 1019, 1020, 1, 0, 0, 0, 1020, 1021, 6, 101, 10, 0, 1021, 218, 1, 0, 0, 0, 1022, 1023, 3, 61, 23, 0, 1023, 1024, 1, 0, 0, 0, 1024, 1025, 6, 102, 15, 0, 1025, 1026, 6, 102, 11, 0, 1026, 220, 1, 0, 0, 0, 1027, 1028, 3, 103, 44, 0, 1028, 1029, 1, 0, 0, 0, 1029, 1030, 6, 103, 22, 0, 1030, 222, 1, 0, 0, 0, 1031, 1032, 3, 99, 42, 0, 1032, 1033, 1, 0, 0, 0, 1033, 1034, 6, 104, 18, 0, 1034, 224, 1, 0, 0, 0, 1035, 1036, 4, 105, 3, 0, 1036, 1037, 3, 127, 56, 0, 1037, 1038, 1, 0, 0, 0, 1038, 1039, 6, 105, 23, 0, 1039, 226, 1, 0, 0, 0, 1040, 1041, 4, 106, 4, 0, 1041, 1042, 3, 163, 74, 0, 1042, 1043, 1, 0, 0, 0, 1043, 1044, 6, 106, 24, 0, 1044, 228, 1, 0, 0, 0, 1045, 1050, 3, 65, 25, 0, 1046, 1050, 3, 63, 24, 0, 1047, 1050, 3, 79, 32, 0, 1048, 1050, 3, 153, 69, 0, 1049, 1045, 1, 0, 0, 0, 1049, 1046, 1, 0, 0, 0, 1049, 1047, 1, 0, 0, 0, 1049, 1048, 1, 0, 0, 0, 1050, 230, 1, 0, 0, 0, 1051, 1054, 3, 65, 25, 0, 1052, 1054, 3, 153, 69, 0, 1053, 1051, 1, 0, 0, 0, 1053, 1052, 1, 0, 0, 0, 1054, 1058, 1, 0, 0, 0, 1055, 1057, 3, 229, 107, 0, 1056, 1055, 1, 0, 0, 0, 1057, 1060, 1, 0, 0, 0, 1058, 1056, 1, 0, 0, 0, 1058, 1059, 1, 0, 0, 0, 1059, 1071, 1, 0, 0, 0, 1060, 1058, 1, 0, 0, 0, 1061, 1064, 3, 79, 32, 0, 1062, 1064, 3, 73, 29, 0, 1063, 1061, 1, 0, 0, 0, 1063, 1062, 1, 0, 0, 0, 1064, 1066, 1, 0, 0, 0, 1065, 1067, 3, 229, 107, 0, 1066, 1065, 1, 0, 0, 0, 1067, 1068, 1, 0, 0, 0, 1068, 1066, 1, 0, 0, 0, 1068, 1069, 1, 0, 0, 0, 1069, 1071, 1, 0, 0, 0, 1070, 1053, 1, 0, 0, 0, 1070, 1063, 1, 0, 0, 0, 1071, 232, 1, 0, 0, 0, 1072, 1075, 3, 231, 108, 0, 1073, 1075, 3, 171, 78, 0, 1074, 1072, 1, 0, 0, 0, 1074, 1073, 1, 0, 0, 0, 1075, 1076, 1, 0, 0, 0, 1076, 1074, 1, 0, 0, 0, 1076, 1077, 1, 0, 0, 0, 1077, 234, 1, 0, 0, 0, 1078, 1079, 3, 55, 20, 0, 1079, 1080, 1, 0, 0, 0, 1080, 1081, 6, 110, 10, 0, 1081, 236, 1, 0, 0, 0, 1082, 1083, 3, 57, 21, 0, 1083, 1084, 1, 0, 0, 0, 1084, 1085, 6, 111, 10, 0, 1085, 238, 1, 0, 0, 0, 1086, 1087, 3, 59, 22, 0, 1087, 1088, 1, 0, 0, 0, 1088, 1089, 6, 112, 10, 0, 1089, 240, 1, 0, 0, 0, 1090, 1091, 3, 61, 23, 0, 1091, 1092, 1, 0, 0, 0, 1092, 1093, 6, 113, 15, 0, 1093, 1094, 6, 113, 11, 0, 1094, 242, 1, 0, 0, 0, 1095, 1096, 3, 95, 40, 0, 1096, 1097, 1, 0, 0, 0, 1097, 1098, 6, 114, 19, 0, 1098, 244, 1, 0, 0, 0, 1099, 1100, 3, 99, 42, 0, 1100, 1101, 1, 0, 0, 0, 1101, 1102, 6, 115, 18, 0, 1102, 246, 1, 0, 0, 0, 1103, 1104, 3, 103, 44, 0, 1104, 1105, 1, 0, 0, 0, 1105, 1106, 6, 116, 22, 0, 1106, 248, 1, 0, 0, 0, 1107, 1108, 4, 117, 5, 0, 1108, 1109, 3, 127, 56, 0, 1109, 1110, 1, 0, 0, 0, 1110, 1111, 6, 117, 23, 0, 1111, 250, 1, 0, 0, 0, 1112, 1113, 4, 118, 6, 0, 1113, 1114, 3, 163, 74, 0, 1114, 1115, 1, 0, 0, 0, 1115, 1116, 6, 118, 24, 0, 1116, 252, 1, 0, 0, 0, 1117, 1118, 7, 12, 0, 0, 1118, 1119, 7, 2, 0, 0, 1119, 254, 1, 0, 0, 0, 1120, 1121, 3, 233, 109, 0, 1121, 1122, 1, 0, 0, 0, 1122, 1123, 6, 120, 25, 0, 1123, 256, 1, 0, 0, 0, 1124, 1125, 3, 55, 20, 0, 1125, 1126, 1, 0, 0, 0, 1126, 1127, 6, 121, 10, 0, 1127, 258, 1, 0, 0, 0, 1128, 1129, 3, 57, 21, 0, 1129, 1130, 1, 0, 0, 0, 1130, 1131, 6, 122, 10, 0, 1131, 260, 1, 0, 0, 0, 1132, 1133, 3, 59, 22, 0, 1133, 1134, 1, 0, 0, 0, 1134, 1135, 6, 123, 10, 0, 1135, 262, 1, 0, 0, 0, 1136, 1137, 3, 61, 23, 0, 1137, 1138, 1, 0, 0, 0, 1138, 1139, 6, 124, 15, 0, 1139, 1140, 6, 124, 11, 0, 1140, 264, 1, 0, 0, 0, 1141, 1142, 3, 165, 75, 0, 1142, 1143, 1, 0, 0, 0, 1143, 1144, 6, 125, 13, 0, 1144, 1145, 6, 125, 26, 0, 1145, 266, 1, 0, 0, 0, 1146, 1147, 7, 7, 0, 0, 1147, 1148, 7, 9, 0, 0, 1148, 1149, 1, 0, 0, 0, 1149, 1150, 6, 126, 27, 0, 1150, 268, 1, 0, 0, 0, 1151, 1152, 7, 19, 0, 0, 1152, 1153, 7, 1, 0, 0, 1153, 1154, 7, 5, 0, 0, 1154, 1155, 7, 10, 0, 0, 1155, 1156, 1, 0, 0, 0, 1156, 1157, 6, 127, 27, 0, 1157, 270, 1, 0, 0, 0, 1158, 1159, 8, 34, 0, 0, 1159, 272, 1, 0, 0, 0, 1160, 1162, 3, 271, 128, 0, 1161, 1160, 1, 0, 0, 0, 1162, 1163, 1, 0, 0, 0, 1163, 1161, 1, 0, 0, 0, 1163, 1164, 1, 0, 0, 0, 1164, 1165, 1, 0, 0, 0, 1165, 1166, 3, 337, 161, 0, 1166, 1168, 1, 0, 0, 0, 1167, 1161, 1, 0, 0, 0, 1167, 1168, 1, 0, 0, 0, 1168, 1170, 1, 0, 0, 0, 1169, 1171, 3, 271, 128, 0, 1170, 1169, 1, 0, 0, 0, 1171, 1172, 1, 0, 0, 0, 1172, 1170, 1, 0, 0, 0, 1172, 1173, 1, 0, 0, 0, 1173, 274, 1, 0, 0, 0, 1174, 1175, 3, 273, 129, 0, 1175, 1176, 1, 0, 0, 0, 1176, 1177, 6, 130, 28, 0, 1177, 276, 1, 0, 0, 0, 1178, 1179, 3, 55, 20, 0, 1179, 1180, 1, 0, 0, 0, 1180, 1181, 6, 131, 10, 0, 1181, 278, 1, 0, 0, 0, 1182, 1183, 3, 57, 21, 0, 1183, 1184, 1, 0, 0, 0, 1184, 1185, 6, 132, 10, 0, 1185, 280, 1, 0, 0, 0, 1186, 1187, 3, 59, 22, 0, 1187, 1188, 1, 0, 0, 0, 1188, 1189, 6, 133, 10, 0, 1189, 282, 1, 0, 0, 0, 1190, 1191, 3, 61, 23, 0, 1191, 1192, 1, 0, 0, 0, 1192, 1193, 6, 134, 15, 0, 1193, 1194, 6, 134, 11, 0, 1194, 1195, 6, 134, 11, 0, 1195, 284, 1, 0, 0, 0, 1196, 1197, 3, 95, 40, 0, 1197, 1198, 1, 0, 0, 0, 1198, 1199, 6, 135, 19, 0, 1199, 286, 1, 0, 0, 0, 1200, 1201, 3, 99, 42, 0, 1201, 1202, 1, 0, 0, 0, 1202, 1203, 6, 136, 18, 0, 1203, 288, 1, 0, 0, 0, 1204, 1205, 3, 103, 44, 0, 1205, 1206, 1, 0, 0, 0, 1206, 1207, 6, 137, 22, 0, 1207, 290, 1, 0, 0, 0, 1208, 1209, 3, 269, 127, 0, 1209, 1210, 1, 0, 0, 0, 1210, 1211, 6, 138, 29, 0, 1211, 292, 1, 0, 0, 0, 1212, 1213, 3, 233, 109, 0, 1213, 1214, 1, 0, 0, 0, 1214, 1215, 6, 139, 25, 0, 1215, 294, 1, 0, 0, 0, 1216, 1217, 3, 173, 79, 0, 1217, 1218, 1, 0, 0, 0, 1218, 1219, 6, 140, 30, 0, 1219, 296, 1, 0, 0, 0, 1220, 1221, 4, 141, 7, 0, 1221, 1222, 3, 127, 56, 0, 1222, 1223, 1, 0, 0, 0, 1223, 1224, 6, 141, 23, 0, 1224, 298, 1, 0, 0, 0, 1225, 1226, 4, 142, 8, 0, 1226, 1227, 3, 163, 74, 0, 1227, 1228, 1, 0, 0, 0, 1228, 1229, 6, 142, 24, 0, 1229, 300, 1, 0, 0, 0, 1230, 1231, 3, 55, 20, 0, 1231, 1232, 1, 0, 0, 0, 1232, 1233, 6, 143, 10, 0, 1233, 302, 1, 0, 0, 0, 1234, 1235, 3, 57, 21, 0, 1235, 1236, 1, 0, 0, 0, 1236, 1237, 6, 144, 10, 0, 1237, 304, 1, 0, 0, 0, 1238, 1239, 3, 59, 22, 0, 1239, 1240, 1, 0, 0, 0, 1240, 1241, 6, 145, 10, 0, 1241, 306, 1, 0, 0, 0, 1242, 1243, 3, 61, 23, 0, 1243, 1244, 1, 0, 0, 0, 1244, 1245, 6, 146, 15, 0, 1245, 1246, 6, 146, 11, 0, 1246, 308, 1, 0, 0, 0, 1247, 1248, 3, 103, 44, 0, 1248, 1249, 1, 0, 0, 0, 1249, 1250, 6, 147, 22, 0, 1250, 310, 1, 0, 0, 0, 1251, 1252, 4, 148, 9, 0, 1252, 1253, 3, 127, 56, 0, 1253, 1254, 1, 0, 0, 0, 1254, 1255, 6, 148, 23, 0, 1255, 312, 1, 0, 0, 0, 1256, 1257, 4, 149, 10, 0, 1257, 1258, 3, 163, 74, 0, 1258, 1259, 1, 0, 0, 0, 1259, 1260, 6, 149, 24, 0, 1260, 314, 1, 0, 0, 0, 1261, 1262, 3, 173, 79, 0, 1262, 1263, 1, 0, 0, 0, 1263, 1264, 6, 150, 30, 0, 1264, 316, 1, 0, 0, 0, 1265, 1266, 3, 169, 77, 0, 1266, 1267, 1, 0, 0, 0, 1267, 1268, 6, 151, 31, 0, 1268, 318, 1, 0, 0, 0, 1269, 1270, 3, 55, 20, 0, 1270, 1271, 1, 0, 0, 0, 1271, 1272, 6, 152, 10, 0, 1272, 320, 1, 0, 0, 0, 1273, 1274, 3, 57, 21, 0, 1274, 1275, 1, 0, 0, 0, 1275, 1276, 6, 153, 10, 0, 1276, 322, 1, 0, 0, 0, 1277, 1278, 3, 59, 22, 0, 1278, 1279, 1, 0, 0, 0, 1279, 1280, 6, 154, 10, 0, 1280, 324, 1, 0, 0, 0, 1281, 1282, 3, 61, 23, 0, 1282, 1283, 1, 0, 0, 0, 1283, 1284, 6, 155, 15, 0, 1284, 1285, 6, 155, 11, 0, 1285, 326, 1, 0, 0, 0, 1286, 1287, 7, 1, 0, 0, 1287, 1288, 7, 9, 0, 0, 1288, 1289, 7, 15, 0, 0, 1289, 1290, 7, 7, 0, 0, 1290, 328, 1, 0, 0, 0, 1291, 1292, 3, 55, 20, 0, 1292, 1293, 1, 0, 0, 0, 1293, 1294, 6, 157, 10, 0, 1294, 330, 1, 0, 0, 0, 1295, 1296, 3, 57, 21, 0, 1296, 1297, 1, 0, 0, 0, 1297, 1298, 6, 158, 10, 0, 1298, 332, 1, 0, 0, 0, 1299, 1300, 3, 59, 22, 0, 1300, 1301, 1, 0, 0, 0, 1301, 1302, 6, 159, 10, 0, 1302, 334, 1, 0, 0, 0, 1303, 1304, 3, 167, 76, 0, 1304, 1305, 1, 0, 0, 0, 1305, 1306, 6, 160, 16, 0, 1306, 1307, 6, 160, 11, 0, 1307, 336, 1, 0, 0, 0, 1308, 1309, 5, 58, 0, 0, 1309, 338, 1, 0, 0, 0, 1310, 1316, 3, 73, 29, 0, 1311, 1316, 3, 63, 24, 0, 1312, 1316, 3, 103, 44, 0, 1313, 1316, 3, 65, 25, 0, 1314, 1316, 3, 79, 32, 0, 1315, 1310, 1, 0, 0, 0, 1315, 1311, 1, 0, 0, 0, 1315, 1312, 1, 0, 0, 0, 1315, 1313, 1, 0, 0, 0, 1315, 1314, 1, 0, 0, 0, 1316, 1317, 1, 0, 0, 0, 1317, 1315, 1, 0, 0, 0, 1317, 1318, 1, 0, 0, 0, 1318, 340, 1, 0, 0, 0, 1319, 1320, 3, 55, 20, 0, 1320, 1321, 1, 0, 0, 0, 1321, 1322, 6, 163, 10, 0, 1322, 342, 1, 0, 0, 0, 1323, 1324, 3, 57, 21, 0, 1324, 1325, 1, 0, 0, 0, 1325, 1326, 6, 164, 10, 0, 1326, 344, 1, 0, 0, 0, 1327, 1328, 3, 59, 22, 0, 1328, 1329, 1, 0, 0, 0, 1329, 1330, 6, 165, 10, 0, 1330, 346, 1, 0, 0, 0, 1331, 1332, 3, 61, 23, 0, 1332, 1333, 1, 0, 0, 0, 1333, 1334, 6, 166, 15, 0, 1334, 1335, 6, 166, 11, 0, 1335, 348, 1, 0, 0, 0, 1336, 1337, 3, 337, 161, 0, 1337, 1338, 1, 0, 0, 0, 1338, 1339, 6, 167, 17, 0, 1339, 350, 1, 0, 0, 0, 1340, 1341, 3, 99, 42, 0, 1341, 1342, 1, 0, 0, 0, 1342, 1343, 6, 168, 18, 0, 1343, 352, 1, 0, 0, 0, 1344, 1345, 3, 103, 44, 0, 1345, 1346, 1, 0, 0, 0, 1346, 1347, 6, 169, 22, 0, 1347, 354, 1, 0, 0, 0, 1348, 1349, 3, 267, 126, 0, 1349, 1350, 1, 0, 0, 0, 1350, 1351, 6, 170, 32, 0, 1351, 1352, 6, 170, 33, 0, 1352, 356, 1, 0, 0, 0, 1353, 1354, 3, 207, 96, 0, 1354, 1355, 1, 0, 0, 0, 1355, 1356, 6, 171, 20, 0, 1356, 358, 1, 0, 0, 0, 1357, 1358, 3, 83, 34, 0, 1358, 1359, 1, 0, 0, 0, 1359, 1360, 6, 172, 21, 0, 1360, 360, 1, 0, 0, 0, 1361, 1362, 3, 55, 20, 0, 1362, 1363, 1, 0, 0, 0, 1363, 1364, 6, 173, 10, 0, 1364, 362, 1, 0, 0, 0, 1365, 1366, 3, 57, 21, 0, 1366, 1367, 1, 0, 0, 0, 1367, 1368, 6, 174, 10, 0, 1368, 364, 1, 0, 0, 0, 1369, 1370, 3, 59, 22, 0, 1370, 1371, 1, 0, 0, 0, 1371, 1372, 6, 175, 10, 0, 1372, 366, 1, 0, 0, 0, 1373, 1374, 3, 61, 23, 0, 1374, 1375, 1, 0, 0, 0, 1375, 1376, 6, 176, 15, 0, 1376, 1377, 6, 176, 11, 0, 1377, 1378, 6, 176, 11, 0, 1378, 368, 1, 0, 0, 0, 1379, 1380, 3, 99, 42, 0, 1380, 1381, 1, 0, 0, 0, 1381, 1382, 6, 177, 18, 0, 1382, 370, 1, 0, 0, 0, 1383, 1384, 3, 103, 44, 0, 1384, 1385, 1, 0, 0, 0, 1385, 1386, 6, 178, 22, 0, 1386, 372, 1, 0, 0, 0, 1387, 1388, 3, 233, 109, 0, 1388, 1389, 1, 0, 0, 0, 1389, 1390, 6, 179, 25, 0, 1390, 374, 1, 0, 0, 0, 1391, 1392, 3, 55, 20, 0, 1392, 1393, 1, 0, 0, 0, 1393, 1394, 6, 180, 10, 0, 1394, 376, 1, 0, 0, 0, 1395, 1396, 3, 57, 21, 0, 1396, 1397, 1, 0, 0, 0, 1397, 1398, 6, 181, 10, 0, 1398, 378, 1, 0, 0, 0, 1399, 1400, 3, 59, 22, 0, 1400, 1401, 1, 0, 0, 0, 1401, 1402, 6, 182, 10, 0, 1402, 380, 1, 0, 0, 0, 1403, 1404, 3, 61, 23, 0, 1404, 1405, 1, 0, 0, 0, 1405, 1406, 6, 183, 15, 0, 1406, 1407, 6, 183, 11, 0, 1407, 382, 1, 0, 0, 0, 1408, 1409, 3, 207, 96, 0, 1409, 1410, 1, 0, 0, 0, 1410, 1411, 6, 184, 20, 0, 1411, 1412, 6, 184, 11, 0, 1412, 1413, 6, 184, 34, 0, 1413, 384, 1, 0, 0, 0, 1414, 1415, 3, 83, 34, 0, 1415, 1416, 1, 0, 0, 0, 1416, 1417, 6, 185, 21, 0, 1417, 1418, 6, 185, 11, 0, 1418, 1419, 6, 185, 34, 0, 1419, 386, 1, 0, 0, 0, 1420, 1421, 3, 55, 20, 0, 1421, 1422, 1, 0, 0, 0, 1422, 1423, 6, 186, 10, 0, 1423, 388, 1, 0, 0, 0, 1424, 1425, 3, 57, 21, 0, 1425, 1426, 1, 0, 0, 0, 1426, 1427, 6, 187, 10, 0, 1427, 390, 1, 0, 0, 0, 1428, 1429, 3, 59, 22, 0, 1429, 1430, 1, 0, 0, 0, 1430, 1431, 6, 188, 10, 0, 1431, 392, 1, 0, 0, 0, 1432, 1433, 3, 337, 161, 0, 1433, 1434, 1, 0, 0, 0, 1434, 1435, 6, 189, 17, 0, 1435, 1436, 6, 189, 11, 0, 1436, 1437, 6, 189, 9, 0, 1437, 394, 1, 0, 0, 0, 1438, 1439, 3, 99, 42, 0, 1439, 1440, 1, 0, 0, 0, 1440, 1441, 6, 190, 18, 0, 1441, 1442, 6, 190, 11, 0, 1442, 1443, 6, 190, 9, 0, 1443, 396, 1, 0, 0, 0, 1444, 1445, 3, 55, 20, 0, 1445, 1446, 1, 0, 0, 0, 1446, 1447, 6, 191, 10, 0, 1447, 398, 1, 0, 0, 0, 1448, 1449, 3, 57, 21, 0, 1449, 1450, 1, 0, 0, 0, 1450, 1451, 6, 192, 10, 0, 1451, 400, 1, 0, 0, 0, 1452, 1453, 3, 59, 22, 0, 1453, 1454, 1, 0, 0, 0, 1454, 1455, 6, 193, 10, 0, 1455, 402, 1, 0, 0, 0, 1456, 1457, 3, 173, 79, 0, 1457, 1458, 1, 0, 0, 0, 1458, 1459, 6, 194, 11, 0, 1459, 1460, 6, 194, 0, 0, 1460, 1461, 6, 194, 30, 0, 1461, 404, 1, 0, 0, 0, 1462, 1463, 3, 169, 77, 0, 1463, 1464, 1, 0, 0, 0, 1464, 1465, 6, 195, 11, 0, 1465, 1466, 6, 195, 0, 0, 1466, 1467, 6, 195, 31, 0, 1467, 406, 1, 0, 0, 0, 1468, 1469, 3, 89, 37, 0, 1469, 1470, 1, 0, 0, 0, 1470, 1471, 6, 196, 11, 0, 1471, 1472, 6, 196, 0, 0, 1472, 1473, 6, 196, 35, 0, 1473, 408, 1, 0, 0, 0, 1474, 1475, 3, 61, 23, 0, 1475, 1476, 1, 0, 0, 0, 1476, 1477, 6, 197, 15, 0, 1477, 1478, 6, 197, 11, 0, 1478, 410, 1, 0, 0, 0, 65, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 579, 589, 593, 596, 605, 607, 618, 637, 642, 651, 658, 663, 665, 676, 684, 687, 689, 694, 699, 705, 712, 717, 723, 726, 734, 738, 870, 875, 882, 884, 900, 905, 910, 912, 918, 995, 1000, 1049, 1053, 1058, 1063, 1068, 1070, 1074, 1076, 1163, 1167, 1172, 1315, 1317, 36, 5, 1, 0, 5, 4, 0, 5, 6, 0, 5, 2, 0, 5, 3, 0, 5, 8, 0, 5, 5, 0, 5, 9, 0, 5, 11, 0, 5, 13, 0, 0, 1, 0, 4, 0, 0, 7, 16, 0, 7, 65, 0, 5, 0, 0, 7, 24, 0, 7, 66, 0, 7, 104, 0, 7, 33, 0, 7, 31, 0, 7, 76, 0, 7, 25, 0, 7, 35, 0, 7, 47, 0, 7, 64, 0, 7, 80, 0, 5, 10, 0, 5, 7, 0, 7, 90, 0, 7, 89, 0, 7, 68, 0, 7, 67, 0, 7, 88, 0, 5, 12, 0, 5, 14, 0, 7, 28, 0] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java index 305126ddfae2d..cef4bc5378aaa 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java @@ -227,6 +227,22 @@ public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { return DEV_LOOKUP_sempred((RuleContext)_localctx, predIndex); case 18: return DEV_METRICS_sempred((RuleContext)_localctx, predIndex); + case 105: + return PROJECT_PARAM_sempred((RuleContext)_localctx, predIndex); + case 106: + return PROJECT_NAMED_OR_POSITIONAL_PARAM_sempred((RuleContext)_localctx, predIndex); + case 117: + return RENAME_PARAM_sempred((RuleContext)_localctx, predIndex); + case 118: + return RENAME_NAMED_OR_POSITIONAL_PARAM_sempred((RuleContext)_localctx, predIndex); + case 141: + return ENRICH_FIELD_PARAM_sempred((RuleContext)_localctx, predIndex); + case 142: + return ENRICH_FIELD_NAMED_OR_POSITIONAL_PARAM_sempred((RuleContext)_localctx, predIndex); + case 148: + return MVEXPAND_PARAM_sempred((RuleContext)_localctx, predIndex); + case 149: + return MVEXPAND_NAMED_OR_POSITIONAL_PARAM_sempred((RuleContext)_localctx, predIndex); } return true; } @@ -251,9 +267,65 @@ private boolean DEV_METRICS_sempred(RuleContext _localctx, int predIndex) { } return true; } + private boolean PROJECT_PARAM_sempred(RuleContext _localctx, int predIndex) { + switch (predIndex) { + case 3: + return this.isDevVersion(); + } + return true; + } + private boolean PROJECT_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { + switch (predIndex) { + case 4: + return this.isDevVersion(); + } + return true; + } + private boolean RENAME_PARAM_sempred(RuleContext _localctx, int predIndex) { + switch (predIndex) { + case 5: + return this.isDevVersion(); + } + return true; + } + private boolean RENAME_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { + switch (predIndex) { + case 6: + return this.isDevVersion(); + } + return true; + } + private boolean ENRICH_FIELD_PARAM_sempred(RuleContext _localctx, int predIndex) { + switch (predIndex) { + case 7: + return this.isDevVersion(); + } + return true; + } + private boolean ENRICH_FIELD_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { + switch (predIndex) { + case 8: + return this.isDevVersion(); + } + return true; + } + private boolean MVEXPAND_PARAM_sempred(RuleContext _localctx, int predIndex) { + switch (predIndex) { + case 9: + return this.isDevVersion(); + } + return true; + } + private boolean MVEXPAND_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { + switch (predIndex) { + case 10: + return this.isDevVersion(); + } + return true; + } public static final String _serializedATN = - "\u0004\u0000x\u05bf\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ + "\u0004\u0000x\u05c7\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ @@ -387,35 +459,36 @@ private boolean DEV_METRICS_sempred(RuleContext _localctx, int predIndex) { "b\u0001b\u0001b\u0001c\u0001c\u0001c\u0001c\u0001d\u0001d\u0001d\u0001"+ "d\u0001e\u0001e\u0001e\u0001e\u0001f\u0001f\u0001f\u0001f\u0001f\u0001"+ "g\u0001g\u0001g\u0001g\u0001h\u0001h\u0001h\u0001h\u0001i\u0001i\u0001"+ - "i\u0001i\u0001j\u0001j\u0001j\u0001j\u0001k\u0001k\u0001k\u0001k\u0003"+ - "k\u0418\bk\u0001l\u0001l\u0003l\u041c\bl\u0001l\u0005l\u041f\bl\nl\fl"+ - "\u0422\tl\u0001l\u0001l\u0003l\u0426\bl\u0001l\u0004l\u0429\bl\u000bl"+ - "\fl\u042a\u0003l\u042d\bl\u0001m\u0001m\u0004m\u0431\bm\u000bm\fm\u0432"+ - "\u0001n\u0001n\u0001n\u0001n\u0001o\u0001o\u0001o\u0001o\u0001p\u0001"+ - "p\u0001p\u0001p\u0001q\u0001q\u0001q\u0001q\u0001q\u0001r\u0001r\u0001"+ - "r\u0001r\u0001s\u0001s\u0001s\u0001s\u0001t\u0001t\u0001t\u0001t\u0001"+ - "u\u0001u\u0001u\u0001u\u0001v\u0001v\u0001v\u0001v\u0001w\u0001w\u0001"+ - "w\u0001x\u0001x\u0001x\u0001x\u0001y\u0001y\u0001y\u0001y\u0001z\u0001"+ - "z\u0001z\u0001z\u0001{\u0001{\u0001{\u0001{\u0001|\u0001|\u0001|\u0001"+ - "|\u0001|\u0001}\u0001}\u0001}\u0001}\u0001}\u0001~\u0001~\u0001~\u0001"+ - "~\u0001~\u0001\u007f\u0001\u007f\u0001\u007f\u0001\u007f\u0001\u007f\u0001"+ - "\u007f\u0001\u007f\u0001\u0080\u0001\u0080\u0001\u0081\u0004\u0081\u0486"+ - "\b\u0081\u000b\u0081\f\u0081\u0487\u0001\u0081\u0001\u0081\u0003\u0081"+ - "\u048c\b\u0081\u0001\u0081\u0004\u0081\u048f\b\u0081\u000b\u0081\f\u0081"+ - "\u0490\u0001\u0082\u0001\u0082\u0001\u0082\u0001\u0082\u0001\u0083\u0001"+ - "\u0083\u0001\u0083\u0001\u0083\u0001\u0084\u0001\u0084\u0001\u0084\u0001"+ - "\u0084\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0086\u0001"+ - "\u0086\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0087\u0001"+ - "\u0087\u0001\u0087\u0001\u0087\u0001\u0088\u0001\u0088\u0001\u0088\u0001"+ - "\u0088\u0001\u0089\u0001\u0089\u0001\u0089\u0001\u0089\u0001\u008a\u0001"+ - "\u008a\u0001\u008a\u0001\u008a\u0001\u008b\u0001\u008b\u0001\u008b\u0001"+ - "\u008b\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008d\u0001"+ - "\u008d\u0001\u008d\u0001\u008d\u0001\u008e\u0001\u008e\u0001\u008e\u0001"+ - "\u008e\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u0090\u0001"+ - "\u0090\u0001\u0090\u0001\u0090\u0001\u0091\u0001\u0091\u0001\u0091\u0001"+ - "\u0091\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0092\u0001"+ - "\u0093\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0094\u0001\u0094\u0001"+ - "\u0094\u0001\u0094\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0095\u0001"+ + "i\u0001i\u0001i\u0001j\u0001j\u0001j\u0001j\u0001j\u0001k\u0001k\u0001"+ + "k\u0001k\u0003k\u041a\bk\u0001l\u0001l\u0003l\u041e\bl\u0001l\u0005l\u0421"+ + "\bl\nl\fl\u0424\tl\u0001l\u0001l\u0003l\u0428\bl\u0001l\u0004l\u042b\b"+ + "l\u000bl\fl\u042c\u0003l\u042f\bl\u0001m\u0001m\u0004m\u0433\bm\u000b"+ + "m\fm\u0434\u0001n\u0001n\u0001n\u0001n\u0001o\u0001o\u0001o\u0001o\u0001"+ + "p\u0001p\u0001p\u0001p\u0001q\u0001q\u0001q\u0001q\u0001q\u0001r\u0001"+ + "r\u0001r\u0001r\u0001s\u0001s\u0001s\u0001s\u0001t\u0001t\u0001t\u0001"+ + "t\u0001u\u0001u\u0001u\u0001u\u0001u\u0001v\u0001v\u0001v\u0001v\u0001"+ + "v\u0001w\u0001w\u0001w\u0001x\u0001x\u0001x\u0001x\u0001y\u0001y\u0001"+ + "y\u0001y\u0001z\u0001z\u0001z\u0001z\u0001{\u0001{\u0001{\u0001{\u0001"+ + "|\u0001|\u0001|\u0001|\u0001|\u0001}\u0001}\u0001}\u0001}\u0001}\u0001"+ + "~\u0001~\u0001~\u0001~\u0001~\u0001\u007f\u0001\u007f\u0001\u007f\u0001"+ + "\u007f\u0001\u007f\u0001\u007f\u0001\u007f\u0001\u0080\u0001\u0080\u0001"+ + "\u0081\u0004\u0081\u048a\b\u0081\u000b\u0081\f\u0081\u048b\u0001\u0081"+ + "\u0001\u0081\u0003\u0081\u0490\b\u0081\u0001\u0081\u0004\u0081\u0493\b"+ + "\u0081\u000b\u0081\f\u0081\u0494\u0001\u0082\u0001\u0082\u0001\u0082\u0001"+ + "\u0082\u0001\u0083\u0001\u0083\u0001\u0083\u0001\u0083\u0001\u0084\u0001"+ + "\u0084\u0001\u0084\u0001\u0084\u0001\u0085\u0001\u0085\u0001\u0085\u0001"+ + "\u0085\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0086\u0001"+ + "\u0086\u0001\u0087\u0001\u0087\u0001\u0087\u0001\u0087\u0001\u0088\u0001"+ + "\u0088\u0001\u0088\u0001\u0088\u0001\u0089\u0001\u0089\u0001\u0089\u0001"+ + "\u0089\u0001\u008a\u0001\u008a\u0001\u008a\u0001\u008a\u0001\u008b\u0001"+ + "\u008b\u0001\u008b\u0001\u008b\u0001\u008c\u0001\u008c\u0001\u008c\u0001"+ + "\u008c\u0001\u008d\u0001\u008d\u0001\u008d\u0001\u008d\u0001\u008d\u0001"+ + "\u008e\u0001\u008e\u0001\u008e\u0001\u008e\u0001\u008e\u0001\u008f\u0001"+ + "\u008f\u0001\u008f\u0001\u008f\u0001\u0090\u0001\u0090\u0001\u0090\u0001"+ + "\u0090\u0001\u0091\u0001\u0091\u0001\u0091\u0001\u0091\u0001\u0092\u0001"+ + "\u0092\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0093\u0001\u0093\u0001"+ + "\u0093\u0001\u0093\u0001\u0094\u0001\u0094\u0001\u0094\u0001\u0094\u0001"+ + "\u0094\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0095\u0001"+ "\u0096\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0097\u0001\u0097\u0001"+ "\u0097\u0001\u0097\u0001\u0098\u0001\u0098\u0001\u0098\u0001\u0098\u0001"+ "\u0099\u0001\u0099\u0001\u0099\u0001\u0099\u0001\u009a\u0001\u009a\u0001"+ @@ -425,7 +498,7 @@ private boolean DEV_METRICS_sempred(RuleContext _localctx, int predIndex) { "\u009e\u0001\u009e\u0001\u009f\u0001\u009f\u0001\u009f\u0001\u009f\u0001"+ "\u00a0\u0001\u00a0\u0001\u00a0\u0001\u00a0\u0001\u00a0\u0001\u00a1\u0001"+ "\u00a1\u0001\u00a2\u0001\u00a2\u0001\u00a2\u0001\u00a2\u0001\u00a2\u0004"+ - "\u00a2\u051c\b\u00a2\u000b\u00a2\f\u00a2\u051d\u0001\u00a3\u0001\u00a3"+ + "\u00a2\u0524\b\u00a2\u000b\u00a2\f\u00a2\u0525\u0001\u00a3\u0001\u00a3"+ "\u0001\u00a3\u0001\u00a3\u0001\u00a4\u0001\u00a4\u0001\u00a4\u0001\u00a4"+ "\u0001\u00a5\u0001\u00a5\u0001\u00a5\u0001\u00a5\u0001\u00a6\u0001\u00a6"+ "\u0001\u00a6\u0001\u00a6\u0001\u00a6\u0001\u00a7\u0001\u00a7\u0001\u00a7"+ @@ -486,7 +559,7 @@ private boolean DEV_METRICS_sempred(RuleContext _localctx, int predIndex) { "\r \u0001\u000009\u0002\u0000AZaz\b\u0000\"\"NNRRTT\\\\nnrrtt\u0004\u0000"+ "\n\n\r\r\"\"\\\\\u0002\u0000++--\u0001\u0000``\u0002\u0000BBbb\u0002\u0000"+ "YYyy\u000b\u0000\t\n\r\r \"\",,//::==[[]]||\u0002\u0000**//\u000b\u0000"+ - "\t\n\r\r \"#,,//::<<>?\\\\||\u05db\u0000\u000f\u0001\u0000\u0000\u0000"+ + "\t\n\r\r \"#,,//::<<>?\\\\||\u05e3\u0000\u000f\u0001\u0000\u0000\u0000"+ "\u0000\u0011\u0001\u0000\u0000\u0000\u0000\u0013\u0001\u0000\u0000\u0000"+ "\u0000\u0015\u0001\u0000\u0000\u0000\u0000\u0017\u0001\u0000\u0000\u0000"+ "\u0000\u0019\u0001\u0000\u0000\u0000\u0000\u001b\u0001\u0000\u0000\u0000"+ @@ -623,53 +696,53 @@ private boolean DEV_METRICS_sempred(RuleContext _localctx, int predIndex) { "\u0000\u00d5\u03f2\u0001\u0000\u0000\u0000\u00d7\u03f6\u0001\u0000\u0000"+ "\u0000\u00d9\u03fa\u0001\u0000\u0000\u0000\u00db\u03fe\u0001\u0000\u0000"+ "\u0000\u00dd\u0403\u0001\u0000\u0000\u0000\u00df\u0407\u0001\u0000\u0000"+ - "\u0000\u00e1\u040b\u0001\u0000\u0000\u0000\u00e3\u040f\u0001\u0000\u0000"+ - "\u0000\u00e5\u0417\u0001\u0000\u0000\u0000\u00e7\u042c\u0001\u0000\u0000"+ - "\u0000\u00e9\u0430\u0001\u0000\u0000\u0000\u00eb\u0434\u0001\u0000\u0000"+ - "\u0000\u00ed\u0438\u0001\u0000\u0000\u0000\u00ef\u043c\u0001\u0000\u0000"+ - "\u0000\u00f1\u0440\u0001\u0000\u0000\u0000\u00f3\u0445\u0001\u0000\u0000"+ - "\u0000\u00f5\u0449\u0001\u0000\u0000\u0000\u00f7\u044d\u0001\u0000\u0000"+ - "\u0000\u00f9\u0451\u0001\u0000\u0000\u0000\u00fb\u0455\u0001\u0000\u0000"+ - "\u0000\u00fd\u0459\u0001\u0000\u0000\u0000\u00ff\u045c\u0001\u0000\u0000"+ - "\u0000\u0101\u0460\u0001\u0000\u0000\u0000\u0103\u0464\u0001\u0000\u0000"+ - "\u0000\u0105\u0468\u0001\u0000\u0000\u0000\u0107\u046c\u0001\u0000\u0000"+ - "\u0000\u0109\u0471\u0001\u0000\u0000\u0000\u010b\u0476\u0001\u0000\u0000"+ - "\u0000\u010d\u047b\u0001\u0000\u0000\u0000\u010f\u0482\u0001\u0000\u0000"+ - "\u0000\u0111\u048b\u0001\u0000\u0000\u0000\u0113\u0492\u0001\u0000\u0000"+ - "\u0000\u0115\u0496\u0001\u0000\u0000\u0000\u0117\u049a\u0001\u0000\u0000"+ - "\u0000\u0119\u049e\u0001\u0000\u0000\u0000\u011b\u04a2\u0001\u0000\u0000"+ - "\u0000\u011d\u04a8\u0001\u0000\u0000\u0000\u011f\u04ac\u0001\u0000\u0000"+ - "\u0000\u0121\u04b0\u0001\u0000\u0000\u0000\u0123\u04b4\u0001\u0000\u0000"+ - "\u0000\u0125\u04b8\u0001\u0000\u0000\u0000\u0127\u04bc\u0001\u0000\u0000"+ - "\u0000\u0129\u04c0\u0001\u0000\u0000\u0000\u012b\u04c4\u0001\u0000\u0000"+ - "\u0000\u012d\u04c8\u0001\u0000\u0000\u0000\u012f\u04cc\u0001\u0000\u0000"+ - "\u0000\u0131\u04d0\u0001\u0000\u0000\u0000\u0133\u04d4\u0001\u0000\u0000"+ - "\u0000\u0135\u04d9\u0001\u0000\u0000\u0000\u0137\u04dd\u0001\u0000\u0000"+ - "\u0000\u0139\u04e1\u0001\u0000\u0000\u0000\u013b\u04e5\u0001\u0000\u0000"+ - "\u0000\u013d\u04e9\u0001\u0000\u0000\u0000\u013f\u04ed\u0001\u0000\u0000"+ - "\u0000\u0141\u04f1\u0001\u0000\u0000\u0000\u0143\u04f5\u0001\u0000\u0000"+ - "\u0000\u0145\u04f9\u0001\u0000\u0000\u0000\u0147\u04fe\u0001\u0000\u0000"+ - "\u0000\u0149\u0503\u0001\u0000\u0000\u0000\u014b\u0507\u0001\u0000\u0000"+ - "\u0000\u014d\u050b\u0001\u0000\u0000\u0000\u014f\u050f\u0001\u0000\u0000"+ - "\u0000\u0151\u0514\u0001\u0000\u0000\u0000\u0153\u051b\u0001\u0000\u0000"+ - "\u0000\u0155\u051f\u0001\u0000\u0000\u0000\u0157\u0523\u0001\u0000\u0000"+ - "\u0000\u0159\u0527\u0001\u0000\u0000\u0000\u015b\u052b\u0001\u0000\u0000"+ - "\u0000\u015d\u0530\u0001\u0000\u0000\u0000\u015f\u0534\u0001\u0000\u0000"+ - "\u0000\u0161\u0538\u0001\u0000\u0000\u0000\u0163\u053c\u0001\u0000\u0000"+ - "\u0000\u0165\u0541\u0001\u0000\u0000\u0000\u0167\u0545\u0001\u0000\u0000"+ - "\u0000\u0169\u0549\u0001\u0000\u0000\u0000\u016b\u054d\u0001\u0000\u0000"+ - "\u0000\u016d\u0551\u0001\u0000\u0000\u0000\u016f\u0555\u0001\u0000\u0000"+ - "\u0000\u0171\u055b\u0001\u0000\u0000\u0000\u0173\u055f\u0001\u0000\u0000"+ - "\u0000\u0175\u0563\u0001\u0000\u0000\u0000\u0177\u0567\u0001\u0000\u0000"+ - "\u0000\u0179\u056b\u0001\u0000\u0000\u0000\u017b\u056f\u0001\u0000\u0000"+ - "\u0000\u017d\u0573\u0001\u0000\u0000\u0000\u017f\u0578\u0001\u0000\u0000"+ - "\u0000\u0181\u057e\u0001\u0000\u0000\u0000\u0183\u0584\u0001\u0000\u0000"+ - "\u0000\u0185\u0588\u0001\u0000\u0000\u0000\u0187\u058c\u0001\u0000\u0000"+ - "\u0000\u0189\u0590\u0001\u0000\u0000\u0000\u018b\u0596\u0001\u0000\u0000"+ - "\u0000\u018d\u059c\u0001\u0000\u0000\u0000\u018f\u05a0\u0001\u0000\u0000"+ - "\u0000\u0191\u05a4\u0001\u0000\u0000\u0000\u0193\u05a8\u0001\u0000\u0000"+ - "\u0000\u0195\u05ae\u0001\u0000\u0000\u0000\u0197\u05b4\u0001\u0000\u0000"+ - "\u0000\u0199\u05ba\u0001\u0000\u0000\u0000\u019b\u019c\u0007\u0000\u0000"+ + "\u0000\u00e1\u040b\u0001\u0000\u0000\u0000\u00e3\u0410\u0001\u0000\u0000"+ + "\u0000\u00e5\u0419\u0001\u0000\u0000\u0000\u00e7\u042e\u0001\u0000\u0000"+ + "\u0000\u00e9\u0432\u0001\u0000\u0000\u0000\u00eb\u0436\u0001\u0000\u0000"+ + "\u0000\u00ed\u043a\u0001\u0000\u0000\u0000\u00ef\u043e\u0001\u0000\u0000"+ + "\u0000\u00f1\u0442\u0001\u0000\u0000\u0000\u00f3\u0447\u0001\u0000\u0000"+ + "\u0000\u00f5\u044b\u0001\u0000\u0000\u0000\u00f7\u044f\u0001\u0000\u0000"+ + "\u0000\u00f9\u0453\u0001\u0000\u0000\u0000\u00fb\u0458\u0001\u0000\u0000"+ + "\u0000\u00fd\u045d\u0001\u0000\u0000\u0000\u00ff\u0460\u0001\u0000\u0000"+ + "\u0000\u0101\u0464\u0001\u0000\u0000\u0000\u0103\u0468\u0001\u0000\u0000"+ + "\u0000\u0105\u046c\u0001\u0000\u0000\u0000\u0107\u0470\u0001\u0000\u0000"+ + "\u0000\u0109\u0475\u0001\u0000\u0000\u0000\u010b\u047a\u0001\u0000\u0000"+ + "\u0000\u010d\u047f\u0001\u0000\u0000\u0000\u010f\u0486\u0001\u0000\u0000"+ + "\u0000\u0111\u048f\u0001\u0000\u0000\u0000\u0113\u0496\u0001\u0000\u0000"+ + "\u0000\u0115\u049a\u0001\u0000\u0000\u0000\u0117\u049e\u0001\u0000\u0000"+ + "\u0000\u0119\u04a2\u0001\u0000\u0000\u0000\u011b\u04a6\u0001\u0000\u0000"+ + "\u0000\u011d\u04ac\u0001\u0000\u0000\u0000\u011f\u04b0\u0001\u0000\u0000"+ + "\u0000\u0121\u04b4\u0001\u0000\u0000\u0000\u0123\u04b8\u0001\u0000\u0000"+ + "\u0000\u0125\u04bc\u0001\u0000\u0000\u0000\u0127\u04c0\u0001\u0000\u0000"+ + "\u0000\u0129\u04c4\u0001\u0000\u0000\u0000\u012b\u04c9\u0001\u0000\u0000"+ + "\u0000\u012d\u04ce\u0001\u0000\u0000\u0000\u012f\u04d2\u0001\u0000\u0000"+ + "\u0000\u0131\u04d6\u0001\u0000\u0000\u0000\u0133\u04da\u0001\u0000\u0000"+ + "\u0000\u0135\u04df\u0001\u0000\u0000\u0000\u0137\u04e3\u0001\u0000\u0000"+ + "\u0000\u0139\u04e8\u0001\u0000\u0000\u0000\u013b\u04ed\u0001\u0000\u0000"+ + "\u0000\u013d\u04f1\u0001\u0000\u0000\u0000\u013f\u04f5\u0001\u0000\u0000"+ + "\u0000\u0141\u04f9\u0001\u0000\u0000\u0000\u0143\u04fd\u0001\u0000\u0000"+ + "\u0000\u0145\u0501\u0001\u0000\u0000\u0000\u0147\u0506\u0001\u0000\u0000"+ + "\u0000\u0149\u050b\u0001\u0000\u0000\u0000\u014b\u050f\u0001\u0000\u0000"+ + "\u0000\u014d\u0513\u0001\u0000\u0000\u0000\u014f\u0517\u0001\u0000\u0000"+ + "\u0000\u0151\u051c\u0001\u0000\u0000\u0000\u0153\u0523\u0001\u0000\u0000"+ + "\u0000\u0155\u0527\u0001\u0000\u0000\u0000\u0157\u052b\u0001\u0000\u0000"+ + "\u0000\u0159\u052f\u0001\u0000\u0000\u0000\u015b\u0533\u0001\u0000\u0000"+ + "\u0000\u015d\u0538\u0001\u0000\u0000\u0000\u015f\u053c\u0001\u0000\u0000"+ + "\u0000\u0161\u0540\u0001\u0000\u0000\u0000\u0163\u0544\u0001\u0000\u0000"+ + "\u0000\u0165\u0549\u0001\u0000\u0000\u0000\u0167\u054d\u0001\u0000\u0000"+ + "\u0000\u0169\u0551\u0001\u0000\u0000\u0000\u016b\u0555\u0001\u0000\u0000"+ + "\u0000\u016d\u0559\u0001\u0000\u0000\u0000\u016f\u055d\u0001\u0000\u0000"+ + "\u0000\u0171\u0563\u0001\u0000\u0000\u0000\u0173\u0567\u0001\u0000\u0000"+ + "\u0000\u0175\u056b\u0001\u0000\u0000\u0000\u0177\u056f\u0001\u0000\u0000"+ + "\u0000\u0179\u0573\u0001\u0000\u0000\u0000\u017b\u0577\u0001\u0000\u0000"+ + "\u0000\u017d\u057b\u0001\u0000\u0000\u0000\u017f\u0580\u0001\u0000\u0000"+ + "\u0000\u0181\u0586\u0001\u0000\u0000\u0000\u0183\u058c\u0001\u0000\u0000"+ + "\u0000\u0185\u0590\u0001\u0000\u0000\u0000\u0187\u0594\u0001\u0000\u0000"+ + "\u0000\u0189\u0598\u0001\u0000\u0000\u0000\u018b\u059e\u0001\u0000\u0000"+ + "\u0000\u018d\u05a4\u0001\u0000\u0000\u0000\u018f\u05a8\u0001\u0000\u0000"+ + "\u0000\u0191\u05ac\u0001\u0000\u0000\u0000\u0193\u05b0\u0001\u0000\u0000"+ + "\u0000\u0195\u05b6\u0001\u0000\u0000\u0000\u0197\u05bc\u0001\u0000\u0000"+ + "\u0000\u0199\u05c2\u0001\u0000\u0000\u0000\u019b\u019c\u0007\u0000\u0000"+ "\u0000\u019c\u019d\u0007\u0001\u0000\u0000\u019d\u019e\u0007\u0002\u0000"+ "\u0000\u019e\u019f\u0007\u0002\u0000\u0000\u019f\u01a0\u0007\u0003\u0000"+ "\u0000\u01a0\u01a1\u0007\u0004\u0000\u0000\u01a1\u01a2\u0007\u0005\u0000"+ @@ -981,233 +1054,237 @@ private boolean DEV_METRICS_sempred(RuleContext _localctx, int predIndex) { "\u0000\u0000\u0403\u0404\u0003g,\u0000\u0404\u0405\u0001\u0000\u0000\u0000"+ "\u0405\u0406\u0006g\u0016\u0000\u0406\u00de\u0001\u0000\u0000\u0000\u0407"+ "\u0408\u0003c*\u0000\u0408\u0409\u0001\u0000\u0000\u0000\u0409\u040a\u0006"+ - "h\u0012\u0000\u040a\u00e0\u0001\u0000\u0000\u0000\u040b\u040c\u0003\u007f"+ - "8\u0000\u040c\u040d\u0001\u0000\u0000\u0000\u040d\u040e\u0006i\u0017\u0000"+ - "\u040e\u00e2\u0001\u0000\u0000\u0000\u040f\u0410\u0003\u00a3J\u0000\u0410"+ - "\u0411\u0001\u0000\u0000\u0000\u0411\u0412\u0006j\u0018\u0000\u0412\u00e4"+ - "\u0001\u0000\u0000\u0000\u0413\u0418\u0003A\u0019\u0000\u0414\u0418\u0003"+ - "?\u0018\u0000\u0415\u0418\u0003O \u0000\u0416\u0418\u0003\u0099E\u0000"+ - "\u0417\u0413\u0001\u0000\u0000\u0000\u0417\u0414\u0001\u0000\u0000\u0000"+ - "\u0417\u0415\u0001\u0000\u0000\u0000\u0417\u0416\u0001\u0000\u0000\u0000"+ - "\u0418\u00e6\u0001\u0000\u0000\u0000\u0419\u041c\u0003A\u0019\u0000\u041a"+ - "\u041c\u0003\u0099E\u0000\u041b\u0419\u0001\u0000\u0000\u0000\u041b\u041a"+ - "\u0001\u0000\u0000\u0000\u041c\u0420\u0001\u0000\u0000\u0000\u041d\u041f"+ - "\u0003\u00e5k\u0000\u041e\u041d\u0001\u0000\u0000\u0000\u041f\u0422\u0001"+ - "\u0000\u0000\u0000\u0420\u041e\u0001\u0000\u0000\u0000\u0420\u0421\u0001"+ - "\u0000\u0000\u0000\u0421\u042d\u0001\u0000\u0000\u0000\u0422\u0420\u0001"+ - "\u0000\u0000\u0000\u0423\u0426\u0003O \u0000\u0424\u0426\u0003I\u001d"+ - "\u0000\u0425\u0423\u0001\u0000\u0000\u0000\u0425\u0424\u0001\u0000\u0000"+ - "\u0000\u0426\u0428\u0001\u0000\u0000\u0000\u0427\u0429\u0003\u00e5k\u0000"+ - "\u0428\u0427\u0001\u0000\u0000\u0000\u0429\u042a\u0001\u0000\u0000\u0000"+ - "\u042a\u0428\u0001\u0000\u0000\u0000\u042a\u042b\u0001\u0000\u0000\u0000"+ - "\u042b\u042d\u0001\u0000\u0000\u0000\u042c\u041b\u0001\u0000\u0000\u0000"+ - "\u042c\u0425\u0001\u0000\u0000\u0000\u042d\u00e8\u0001\u0000\u0000\u0000"+ - "\u042e\u0431\u0003\u00e7l\u0000\u042f\u0431\u0003\u00abN\u0000\u0430\u042e"+ - "\u0001\u0000\u0000\u0000\u0430\u042f\u0001\u0000\u0000\u0000\u0431\u0432"+ - "\u0001\u0000\u0000\u0000\u0432\u0430\u0001\u0000\u0000\u0000\u0432\u0433"+ - "\u0001\u0000\u0000\u0000\u0433\u00ea\u0001\u0000\u0000\u0000\u0434\u0435"+ - "\u00037\u0014\u0000\u0435\u0436\u0001\u0000\u0000\u0000\u0436\u0437\u0006"+ - "n\n\u0000\u0437\u00ec\u0001\u0000\u0000\u0000\u0438\u0439\u00039\u0015"+ - "\u0000\u0439\u043a\u0001\u0000\u0000\u0000\u043a\u043b\u0006o\n\u0000"+ - "\u043b\u00ee\u0001\u0000\u0000\u0000\u043c\u043d\u0003;\u0016\u0000\u043d"+ - "\u043e\u0001\u0000\u0000\u0000\u043e\u043f\u0006p\n\u0000\u043f\u00f0"+ - "\u0001\u0000\u0000\u0000\u0440\u0441\u0003=\u0017\u0000\u0441\u0442\u0001"+ - "\u0000\u0000\u0000\u0442\u0443\u0006q\u000f\u0000\u0443\u0444\u0006q\u000b"+ - "\u0000\u0444\u00f2\u0001\u0000\u0000\u0000\u0445\u0446\u0003_(\u0000\u0446"+ - "\u0447\u0001\u0000\u0000\u0000\u0447\u0448\u0006r\u0013\u0000\u0448\u00f4"+ - "\u0001\u0000\u0000\u0000\u0449\u044a\u0003c*\u0000\u044a\u044b\u0001\u0000"+ - "\u0000\u0000\u044b\u044c\u0006s\u0012\u0000\u044c\u00f6\u0001\u0000\u0000"+ - "\u0000\u044d\u044e\u0003g,\u0000\u044e\u044f\u0001\u0000\u0000\u0000\u044f"+ - "\u0450\u0006t\u0016\u0000\u0450\u00f8\u0001\u0000\u0000\u0000\u0451\u0452"+ - "\u0003\u007f8\u0000\u0452\u0453\u0001\u0000\u0000\u0000\u0453\u0454\u0006"+ - "u\u0017\u0000\u0454\u00fa\u0001\u0000\u0000\u0000\u0455\u0456\u0003\u00a3"+ - "J\u0000\u0456\u0457\u0001\u0000\u0000\u0000\u0457\u0458\u0006v\u0018\u0000"+ - "\u0458\u00fc\u0001\u0000\u0000\u0000\u0459\u045a\u0007\f\u0000\u0000\u045a"+ - "\u045b\u0007\u0002\u0000\u0000\u045b\u00fe\u0001\u0000\u0000\u0000\u045c"+ - "\u045d\u0003\u00e9m\u0000\u045d\u045e\u0001\u0000\u0000\u0000\u045e\u045f"+ - "\u0006x\u0019\u0000\u045f\u0100\u0001\u0000\u0000\u0000\u0460\u0461\u0003"+ - "7\u0014\u0000\u0461\u0462\u0001\u0000\u0000\u0000\u0462\u0463\u0006y\n"+ - "\u0000\u0463\u0102\u0001\u0000\u0000\u0000\u0464\u0465\u00039\u0015\u0000"+ - "\u0465\u0466\u0001\u0000\u0000\u0000\u0466\u0467\u0006z\n\u0000\u0467"+ - "\u0104\u0001\u0000\u0000\u0000\u0468\u0469\u0003;\u0016\u0000\u0469\u046a"+ - "\u0001\u0000\u0000\u0000\u046a\u046b\u0006{\n\u0000\u046b\u0106\u0001"+ - "\u0000\u0000\u0000\u046c\u046d\u0003=\u0017\u0000\u046d\u046e\u0001\u0000"+ - "\u0000\u0000\u046e\u046f\u0006|\u000f\u0000\u046f\u0470\u0006|\u000b\u0000"+ - "\u0470\u0108\u0001\u0000\u0000\u0000\u0471\u0472\u0003\u00a5K\u0000\u0472"+ - "\u0473\u0001\u0000\u0000\u0000\u0473\u0474\u0006}\r\u0000\u0474\u0475"+ - "\u0006}\u001a\u0000\u0475\u010a\u0001\u0000\u0000\u0000\u0476\u0477\u0007"+ - "\u0007\u0000\u0000\u0477\u0478\u0007\t\u0000\u0000\u0478\u0479\u0001\u0000"+ - "\u0000\u0000\u0479\u047a\u0006~\u001b\u0000\u047a\u010c\u0001\u0000\u0000"+ - "\u0000\u047b\u047c\u0007\u0013\u0000\u0000\u047c\u047d\u0007\u0001\u0000"+ - "\u0000\u047d\u047e\u0007\u0005\u0000\u0000\u047e\u047f\u0007\n\u0000\u0000"+ - "\u047f\u0480\u0001\u0000\u0000\u0000\u0480\u0481\u0006\u007f\u001b\u0000"+ - "\u0481\u010e\u0001\u0000\u0000\u0000\u0482\u0483\b\"\u0000\u0000\u0483"+ - "\u0110\u0001\u0000\u0000\u0000\u0484\u0486\u0003\u010f\u0080\u0000\u0485"+ - "\u0484\u0001\u0000\u0000\u0000\u0486\u0487\u0001\u0000\u0000\u0000\u0487"+ - "\u0485\u0001\u0000\u0000\u0000\u0487\u0488\u0001\u0000\u0000\u0000\u0488"+ - "\u0489\u0001\u0000\u0000\u0000\u0489\u048a\u0003\u0151\u00a1\u0000\u048a"+ - "\u048c\u0001\u0000\u0000\u0000\u048b\u0485\u0001\u0000\u0000\u0000\u048b"+ - "\u048c\u0001\u0000\u0000\u0000\u048c\u048e\u0001\u0000\u0000\u0000\u048d"+ - "\u048f\u0003\u010f\u0080\u0000\u048e\u048d\u0001\u0000\u0000\u0000\u048f"+ - "\u0490\u0001\u0000\u0000\u0000\u0490\u048e\u0001\u0000\u0000\u0000\u0490"+ - "\u0491\u0001\u0000\u0000\u0000\u0491\u0112\u0001\u0000\u0000\u0000\u0492"+ - "\u0493\u0003\u0111\u0081\u0000\u0493\u0494\u0001\u0000\u0000\u0000\u0494"+ - "\u0495\u0006\u0082\u001c\u0000\u0495\u0114\u0001\u0000\u0000\u0000\u0496"+ - "\u0497\u00037\u0014\u0000\u0497\u0498\u0001\u0000\u0000\u0000\u0498\u0499"+ - "\u0006\u0083\n\u0000\u0499\u0116\u0001\u0000\u0000\u0000\u049a\u049b\u0003"+ - "9\u0015\u0000\u049b\u049c\u0001\u0000\u0000\u0000\u049c\u049d\u0006\u0084"+ - "\n\u0000\u049d\u0118\u0001\u0000\u0000\u0000\u049e\u049f\u0003;\u0016"+ - "\u0000\u049f\u04a0\u0001\u0000\u0000\u0000\u04a0\u04a1\u0006\u0085\n\u0000"+ - "\u04a1\u011a\u0001\u0000\u0000\u0000\u04a2\u04a3\u0003=\u0017\u0000\u04a3"+ - "\u04a4\u0001\u0000\u0000\u0000\u04a4\u04a5\u0006\u0086\u000f\u0000\u04a5"+ - "\u04a6\u0006\u0086\u000b\u0000\u04a6\u04a7\u0006\u0086\u000b\u0000\u04a7"+ - "\u011c\u0001\u0000\u0000\u0000\u04a8\u04a9\u0003_(\u0000\u04a9\u04aa\u0001"+ - "\u0000\u0000\u0000\u04aa\u04ab\u0006\u0087\u0013\u0000\u04ab\u011e\u0001"+ - "\u0000\u0000\u0000\u04ac\u04ad\u0003c*\u0000\u04ad\u04ae\u0001\u0000\u0000"+ - "\u0000\u04ae\u04af\u0006\u0088\u0012\u0000\u04af\u0120\u0001\u0000\u0000"+ - "\u0000\u04b0\u04b1\u0003g,\u0000\u04b1\u04b2\u0001\u0000\u0000\u0000\u04b2"+ - "\u04b3\u0006\u0089\u0016\u0000\u04b3\u0122\u0001\u0000\u0000\u0000\u04b4"+ - "\u04b5\u0003\u010d\u007f\u0000\u04b5\u04b6\u0001\u0000\u0000\u0000\u04b6"+ - "\u04b7\u0006\u008a\u001d\u0000\u04b7\u0124\u0001\u0000\u0000\u0000\u04b8"+ - "\u04b9\u0003\u00e9m\u0000\u04b9\u04ba\u0001\u0000\u0000\u0000\u04ba\u04bb"+ - "\u0006\u008b\u0019\u0000\u04bb\u0126\u0001\u0000\u0000\u0000\u04bc\u04bd"+ - "\u0003\u00adO\u0000\u04bd\u04be\u0001\u0000\u0000\u0000\u04be\u04bf\u0006"+ - "\u008c\u001e\u0000\u04bf\u0128\u0001\u0000\u0000\u0000\u04c0\u04c1\u0003"+ - "\u007f8\u0000\u04c1\u04c2\u0001\u0000\u0000\u0000\u04c2\u04c3\u0006\u008d"+ - "\u0017\u0000\u04c3\u012a\u0001\u0000\u0000\u0000\u04c4\u04c5\u0003\u00a3"+ - "J\u0000\u04c5\u04c6\u0001\u0000\u0000\u0000\u04c6\u04c7\u0006\u008e\u0018"+ - "\u0000\u04c7\u012c\u0001\u0000\u0000\u0000\u04c8\u04c9\u00037\u0014\u0000"+ - "\u04c9\u04ca\u0001\u0000\u0000\u0000\u04ca\u04cb\u0006\u008f\n\u0000\u04cb"+ - "\u012e\u0001\u0000\u0000\u0000\u04cc\u04cd\u00039\u0015\u0000\u04cd\u04ce"+ - "\u0001\u0000\u0000\u0000\u04ce\u04cf\u0006\u0090\n\u0000\u04cf\u0130\u0001"+ - "\u0000\u0000\u0000\u04d0\u04d1\u0003;\u0016\u0000\u04d1\u04d2\u0001\u0000"+ - "\u0000\u0000\u04d2\u04d3\u0006\u0091\n\u0000\u04d3\u0132\u0001\u0000\u0000"+ - "\u0000\u04d4\u04d5\u0003=\u0017\u0000\u04d5\u04d6\u0001\u0000\u0000\u0000"+ - "\u04d6\u04d7\u0006\u0092\u000f\u0000\u04d7\u04d8\u0006\u0092\u000b\u0000"+ - "\u04d8\u0134\u0001\u0000\u0000\u0000\u04d9\u04da\u0003g,\u0000\u04da\u04db"+ - "\u0001\u0000\u0000\u0000\u04db\u04dc\u0006\u0093\u0016\u0000\u04dc\u0136"+ - "\u0001\u0000\u0000\u0000\u04dd\u04de\u0003\u007f8\u0000\u04de\u04df\u0001"+ - "\u0000\u0000\u0000\u04df\u04e0\u0006\u0094\u0017\u0000\u04e0\u0138\u0001"+ - "\u0000\u0000\u0000\u04e1\u04e2\u0003\u00a3J\u0000\u04e2\u04e3\u0001\u0000"+ - "\u0000\u0000\u04e3\u04e4\u0006\u0095\u0018\u0000\u04e4\u013a\u0001\u0000"+ - "\u0000\u0000\u04e5\u04e6\u0003\u00adO\u0000\u04e6\u04e7\u0001\u0000\u0000"+ - "\u0000\u04e7\u04e8\u0006\u0096\u001e\u0000\u04e8\u013c\u0001\u0000\u0000"+ - "\u0000\u04e9\u04ea\u0003\u00a9M\u0000\u04ea\u04eb\u0001\u0000\u0000\u0000"+ - "\u04eb\u04ec\u0006\u0097\u001f\u0000\u04ec\u013e\u0001\u0000\u0000\u0000"+ - "\u04ed\u04ee\u00037\u0014\u0000\u04ee\u04ef\u0001\u0000\u0000\u0000\u04ef"+ - "\u04f0\u0006\u0098\n\u0000\u04f0\u0140\u0001\u0000\u0000\u0000\u04f1\u04f2"+ - "\u00039\u0015\u0000\u04f2\u04f3\u0001\u0000\u0000\u0000\u04f3\u04f4\u0006"+ - "\u0099\n\u0000\u04f4\u0142\u0001\u0000\u0000\u0000\u04f5\u04f6\u0003;"+ - "\u0016\u0000\u04f6\u04f7\u0001\u0000\u0000\u0000\u04f7\u04f8\u0006\u009a"+ - "\n\u0000\u04f8\u0144\u0001\u0000\u0000\u0000\u04f9\u04fa\u0003=\u0017"+ - "\u0000\u04fa\u04fb\u0001\u0000\u0000\u0000\u04fb\u04fc\u0006\u009b\u000f"+ - "\u0000\u04fc\u04fd\u0006\u009b\u000b\u0000\u04fd\u0146\u0001\u0000\u0000"+ - "\u0000\u04fe\u04ff\u0007\u0001\u0000\u0000\u04ff\u0500\u0007\t\u0000\u0000"+ - "\u0500\u0501\u0007\u000f\u0000\u0000\u0501\u0502\u0007\u0007\u0000\u0000"+ - "\u0502\u0148\u0001\u0000\u0000\u0000\u0503\u0504\u00037\u0014\u0000\u0504"+ - "\u0505\u0001\u0000\u0000\u0000\u0505\u0506\u0006\u009d\n\u0000\u0506\u014a"+ - "\u0001\u0000\u0000\u0000\u0507\u0508\u00039\u0015\u0000\u0508\u0509\u0001"+ - "\u0000\u0000\u0000\u0509\u050a\u0006\u009e\n\u0000\u050a\u014c\u0001\u0000"+ - "\u0000\u0000\u050b\u050c\u0003;\u0016\u0000\u050c\u050d\u0001\u0000\u0000"+ - "\u0000\u050d\u050e\u0006\u009f\n\u0000\u050e\u014e\u0001\u0000\u0000\u0000"+ - "\u050f\u0510\u0003\u00a7L\u0000\u0510\u0511\u0001\u0000\u0000\u0000\u0511"+ - "\u0512\u0006\u00a0\u0010\u0000\u0512\u0513\u0006\u00a0\u000b\u0000\u0513"+ - "\u0150\u0001\u0000\u0000\u0000\u0514\u0515\u0005:\u0000\u0000\u0515\u0152"+ - "\u0001\u0000\u0000\u0000\u0516\u051c\u0003I\u001d\u0000\u0517\u051c\u0003"+ - "?\u0018\u0000\u0518\u051c\u0003g,\u0000\u0519\u051c\u0003A\u0019\u0000"+ - "\u051a\u051c\u0003O \u0000\u051b\u0516\u0001\u0000\u0000\u0000\u051b\u0517"+ - "\u0001\u0000\u0000\u0000\u051b\u0518\u0001\u0000\u0000\u0000\u051b\u0519"+ - "\u0001\u0000\u0000\u0000\u051b\u051a\u0001\u0000\u0000\u0000\u051c\u051d"+ - "\u0001\u0000\u0000\u0000\u051d\u051b\u0001\u0000\u0000\u0000\u051d\u051e"+ - "\u0001\u0000\u0000\u0000\u051e\u0154\u0001\u0000\u0000\u0000\u051f\u0520"+ - "\u00037\u0014\u0000\u0520\u0521\u0001\u0000\u0000\u0000\u0521\u0522\u0006"+ - "\u00a3\n\u0000\u0522\u0156\u0001\u0000\u0000\u0000\u0523\u0524\u00039"+ - "\u0015\u0000\u0524\u0525\u0001\u0000\u0000\u0000\u0525\u0526\u0006\u00a4"+ - "\n\u0000\u0526\u0158\u0001\u0000\u0000\u0000\u0527\u0528\u0003;\u0016"+ - "\u0000\u0528\u0529\u0001\u0000\u0000\u0000\u0529\u052a\u0006\u00a5\n\u0000"+ - "\u052a\u015a\u0001\u0000\u0000\u0000\u052b\u052c\u0003=\u0017\u0000\u052c"+ - "\u052d\u0001\u0000\u0000\u0000\u052d\u052e\u0006\u00a6\u000f\u0000\u052e"+ - "\u052f\u0006\u00a6\u000b\u0000\u052f\u015c\u0001\u0000\u0000\u0000\u0530"+ - "\u0531\u0003\u0151\u00a1\u0000\u0531\u0532\u0001\u0000\u0000\u0000\u0532"+ - "\u0533\u0006\u00a7\u0011\u0000\u0533\u015e\u0001\u0000\u0000\u0000\u0534"+ - "\u0535\u0003c*\u0000\u0535\u0536\u0001\u0000\u0000\u0000\u0536\u0537\u0006"+ - "\u00a8\u0012\u0000\u0537\u0160\u0001\u0000\u0000\u0000\u0538\u0539\u0003"+ - "g,\u0000\u0539\u053a\u0001\u0000\u0000\u0000\u053a\u053b\u0006\u00a9\u0016"+ - "\u0000\u053b\u0162\u0001\u0000\u0000\u0000\u053c\u053d\u0003\u010b~\u0000"+ - "\u053d\u053e\u0001\u0000\u0000\u0000\u053e\u053f\u0006\u00aa \u0000\u053f"+ - "\u0540\u0006\u00aa!\u0000\u0540\u0164\u0001\u0000\u0000\u0000\u0541\u0542"+ - "\u0003\u00cf`\u0000\u0542\u0543\u0001\u0000\u0000\u0000\u0543\u0544\u0006"+ - "\u00ab\u0014\u0000\u0544\u0166\u0001\u0000\u0000\u0000\u0545\u0546\u0003"+ - "S\"\u0000\u0546\u0547\u0001\u0000\u0000\u0000\u0547\u0548\u0006\u00ac"+ - "\u0015\u0000\u0548\u0168\u0001\u0000\u0000\u0000\u0549\u054a\u00037\u0014"+ - "\u0000\u054a\u054b\u0001\u0000\u0000\u0000\u054b\u054c\u0006\u00ad\n\u0000"+ - "\u054c\u016a\u0001\u0000\u0000\u0000\u054d\u054e\u00039\u0015\u0000\u054e"+ - "\u054f\u0001\u0000\u0000\u0000\u054f\u0550\u0006\u00ae\n\u0000\u0550\u016c"+ - "\u0001\u0000\u0000\u0000\u0551\u0552\u0003;\u0016\u0000\u0552\u0553\u0001"+ - "\u0000\u0000\u0000\u0553\u0554\u0006\u00af\n\u0000\u0554\u016e\u0001\u0000"+ - "\u0000\u0000\u0555\u0556\u0003=\u0017\u0000\u0556\u0557\u0001\u0000\u0000"+ - "\u0000\u0557\u0558\u0006\u00b0\u000f\u0000\u0558\u0559\u0006\u00b0\u000b"+ - "\u0000\u0559\u055a\u0006\u00b0\u000b\u0000\u055a\u0170\u0001\u0000\u0000"+ - "\u0000\u055b\u055c\u0003c*\u0000\u055c\u055d\u0001\u0000\u0000\u0000\u055d"+ - "\u055e\u0006\u00b1\u0012\u0000\u055e\u0172\u0001\u0000\u0000\u0000\u055f"+ - "\u0560\u0003g,\u0000\u0560\u0561\u0001\u0000\u0000\u0000\u0561\u0562\u0006"+ - "\u00b2\u0016\u0000\u0562\u0174\u0001\u0000\u0000\u0000\u0563\u0564\u0003"+ - "\u00e9m\u0000\u0564\u0565\u0001\u0000\u0000\u0000\u0565\u0566\u0006\u00b3"+ - "\u0019\u0000\u0566\u0176\u0001\u0000\u0000\u0000\u0567\u0568\u00037\u0014"+ - "\u0000\u0568\u0569\u0001\u0000\u0000\u0000\u0569\u056a\u0006\u00b4\n\u0000"+ - "\u056a\u0178\u0001\u0000\u0000\u0000\u056b\u056c\u00039\u0015\u0000\u056c"+ - "\u056d\u0001\u0000\u0000\u0000\u056d\u056e\u0006\u00b5\n\u0000\u056e\u017a"+ - "\u0001\u0000\u0000\u0000\u056f\u0570\u0003;\u0016\u0000\u0570\u0571\u0001"+ - "\u0000\u0000\u0000\u0571\u0572\u0006\u00b6\n\u0000\u0572\u017c\u0001\u0000"+ - "\u0000\u0000\u0573\u0574\u0003=\u0017\u0000\u0574\u0575\u0001\u0000\u0000"+ - "\u0000\u0575\u0576\u0006\u00b7\u000f\u0000\u0576\u0577\u0006\u00b7\u000b"+ - "\u0000\u0577\u017e\u0001\u0000\u0000\u0000\u0578\u0579\u0003\u00cf`\u0000"+ - "\u0579\u057a\u0001\u0000\u0000\u0000\u057a\u057b\u0006\u00b8\u0014\u0000"+ - "\u057b\u057c\u0006\u00b8\u000b\u0000\u057c\u057d\u0006\u00b8\"\u0000\u057d"+ - "\u0180\u0001\u0000\u0000\u0000\u057e\u057f\u0003S\"\u0000\u057f\u0580"+ - "\u0001\u0000\u0000\u0000\u0580\u0581\u0006\u00b9\u0015\u0000\u0581\u0582"+ - "\u0006\u00b9\u000b\u0000\u0582\u0583\u0006\u00b9\"\u0000\u0583\u0182\u0001"+ - "\u0000\u0000\u0000\u0584\u0585\u00037\u0014\u0000\u0585\u0586\u0001\u0000"+ - "\u0000\u0000\u0586\u0587\u0006\u00ba\n\u0000\u0587\u0184\u0001\u0000\u0000"+ - "\u0000\u0588\u0589\u00039\u0015\u0000\u0589\u058a\u0001\u0000\u0000\u0000"+ - "\u058a\u058b\u0006\u00bb\n\u0000\u058b\u0186\u0001\u0000\u0000\u0000\u058c"+ - "\u058d\u0003;\u0016\u0000\u058d\u058e\u0001\u0000\u0000\u0000\u058e\u058f"+ - "\u0006\u00bc\n\u0000\u058f\u0188\u0001\u0000\u0000\u0000\u0590\u0591\u0003"+ - "\u0151\u00a1\u0000\u0591\u0592\u0001\u0000\u0000\u0000\u0592\u0593\u0006"+ - "\u00bd\u0011\u0000\u0593\u0594\u0006\u00bd\u000b\u0000\u0594\u0595\u0006"+ - "\u00bd\t\u0000\u0595\u018a\u0001\u0000\u0000\u0000\u0596\u0597\u0003c"+ - "*\u0000\u0597\u0598\u0001\u0000\u0000\u0000\u0598\u0599\u0006\u00be\u0012"+ - "\u0000\u0599\u059a\u0006\u00be\u000b\u0000\u059a\u059b\u0006\u00be\t\u0000"+ - "\u059b\u018c\u0001\u0000\u0000\u0000\u059c\u059d\u00037\u0014\u0000\u059d"+ - "\u059e\u0001\u0000\u0000\u0000\u059e\u059f\u0006\u00bf\n\u0000\u059f\u018e"+ - "\u0001\u0000\u0000\u0000\u05a0\u05a1\u00039\u0015\u0000\u05a1\u05a2\u0001"+ - "\u0000\u0000\u0000\u05a2\u05a3\u0006\u00c0\n\u0000\u05a3\u0190\u0001\u0000"+ - "\u0000\u0000\u05a4\u05a5\u0003;\u0016\u0000\u05a5\u05a6\u0001\u0000\u0000"+ - "\u0000\u05a6\u05a7\u0006\u00c1\n\u0000\u05a7\u0192\u0001\u0000\u0000\u0000"+ - "\u05a8\u05a9\u0003\u00adO\u0000\u05a9\u05aa\u0001\u0000\u0000\u0000\u05aa"+ - "\u05ab\u0006\u00c2\u000b\u0000\u05ab\u05ac\u0006\u00c2\u0000\u0000\u05ac"+ - "\u05ad\u0006\u00c2\u001e\u0000\u05ad\u0194\u0001\u0000\u0000\u0000\u05ae"+ - "\u05af\u0003\u00a9M\u0000\u05af\u05b0\u0001\u0000\u0000\u0000\u05b0\u05b1"+ - "\u0006\u00c3\u000b\u0000\u05b1\u05b2\u0006\u00c3\u0000\u0000\u05b2\u05b3"+ - "\u0006\u00c3\u001f\u0000\u05b3\u0196\u0001\u0000\u0000\u0000\u05b4\u05b5"+ - "\u0003Y%\u0000\u05b5\u05b6\u0001\u0000\u0000\u0000\u05b6\u05b7\u0006\u00c4"+ - "\u000b\u0000\u05b7\u05b8\u0006\u00c4\u0000\u0000\u05b8\u05b9\u0006\u00c4"+ - "#\u0000\u05b9\u0198\u0001\u0000\u0000\u0000\u05ba\u05bb\u0003=\u0017\u0000"+ - "\u05bb\u05bc\u0001\u0000\u0000\u0000\u05bc\u05bd\u0006\u00c5\u000f\u0000"+ - "\u05bd\u05be\u0006\u00c5\u000b\u0000\u05be\u019a\u0001\u0000\u0000\u0000"+ - "A\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e"+ - "\u0243\u024d\u0251\u0254\u025d\u025f\u026a\u027d\u0282\u028b\u0292\u0297"+ - "\u0299\u02a4\u02ac\u02af\u02b1\u02b6\u02bb\u02c1\u02c8\u02cd\u02d3\u02d6"+ - "\u02de\u02e2\u0366\u036b\u0372\u0374\u0384\u0389\u038e\u0390\u0396\u03e3"+ - "\u03e8\u0417\u041b\u0420\u0425\u042a\u042c\u0430\u0432\u0487\u048b\u0490"+ - "\u051b\u051d$\u0005\u0001\u0000\u0005\u0004\u0000\u0005\u0006\u0000\u0005"+ - "\u0002\u0000\u0005\u0003\u0000\u0005\b\u0000\u0005\u0005\u0000\u0005\t"+ - "\u0000\u0005\u000b\u0000\u0005\r\u0000\u0000\u0001\u0000\u0004\u0000\u0000"+ - "\u0007\u0010\u0000\u0007A\u0000\u0005\u0000\u0000\u0007\u0018\u0000\u0007"+ - "B\u0000\u0007h\u0000\u0007!\u0000\u0007\u001f\u0000\u0007L\u0000\u0007"+ - "\u0019\u0000\u0007#\u0000\u0007/\u0000\u0007@\u0000\u0007P\u0000\u0005"+ - "\n\u0000\u0005\u0007\u0000\u0007Z\u0000\u0007Y\u0000\u0007D\u0000\u0007"+ - "C\u0000\u0007X\u0000\u0005\f\u0000\u0005\u000e\u0000\u0007\u001c\u0000"; + "h\u0012\u0000\u040a\u00e0\u0001\u0000\u0000\u0000\u040b\u040c\u0004i\u0003"+ + "\u0000\u040c\u040d\u0003\u007f8\u0000\u040d\u040e\u0001\u0000\u0000\u0000"+ + "\u040e\u040f\u0006i\u0017\u0000\u040f\u00e2\u0001\u0000\u0000\u0000\u0410"+ + "\u0411\u0004j\u0004\u0000\u0411\u0412\u0003\u00a3J\u0000\u0412\u0413\u0001"+ + "\u0000\u0000\u0000\u0413\u0414\u0006j\u0018\u0000\u0414\u00e4\u0001\u0000"+ + "\u0000\u0000\u0415\u041a\u0003A\u0019\u0000\u0416\u041a\u0003?\u0018\u0000"+ + "\u0417\u041a\u0003O \u0000\u0418\u041a\u0003\u0099E\u0000\u0419\u0415"+ + "\u0001\u0000\u0000\u0000\u0419\u0416\u0001\u0000\u0000\u0000\u0419\u0417"+ + "\u0001\u0000\u0000\u0000\u0419\u0418\u0001\u0000\u0000\u0000\u041a\u00e6"+ + "\u0001\u0000\u0000\u0000\u041b\u041e\u0003A\u0019\u0000\u041c\u041e\u0003"+ + "\u0099E\u0000\u041d\u041b\u0001\u0000\u0000\u0000\u041d\u041c\u0001\u0000"+ + "\u0000\u0000\u041e\u0422\u0001\u0000\u0000\u0000\u041f\u0421\u0003\u00e5"+ + "k\u0000\u0420\u041f\u0001\u0000\u0000\u0000\u0421\u0424\u0001\u0000\u0000"+ + "\u0000\u0422\u0420\u0001\u0000\u0000\u0000\u0422\u0423\u0001\u0000\u0000"+ + "\u0000\u0423\u042f\u0001\u0000\u0000\u0000\u0424\u0422\u0001\u0000\u0000"+ + "\u0000\u0425\u0428\u0003O \u0000\u0426\u0428\u0003I\u001d\u0000\u0427"+ + "\u0425\u0001\u0000\u0000\u0000\u0427\u0426\u0001\u0000\u0000\u0000\u0428"+ + "\u042a\u0001\u0000\u0000\u0000\u0429\u042b\u0003\u00e5k\u0000\u042a\u0429"+ + "\u0001\u0000\u0000\u0000\u042b\u042c\u0001\u0000\u0000\u0000\u042c\u042a"+ + "\u0001\u0000\u0000\u0000\u042c\u042d\u0001\u0000\u0000\u0000\u042d\u042f"+ + "\u0001\u0000\u0000\u0000\u042e\u041d\u0001\u0000\u0000\u0000\u042e\u0427"+ + "\u0001\u0000\u0000\u0000\u042f\u00e8\u0001\u0000\u0000\u0000\u0430\u0433"+ + "\u0003\u00e7l\u0000\u0431\u0433\u0003\u00abN\u0000\u0432\u0430\u0001\u0000"+ + "\u0000\u0000\u0432\u0431\u0001\u0000\u0000\u0000\u0433\u0434\u0001\u0000"+ + "\u0000\u0000\u0434\u0432\u0001\u0000\u0000\u0000\u0434\u0435\u0001\u0000"+ + "\u0000\u0000\u0435\u00ea\u0001\u0000\u0000\u0000\u0436\u0437\u00037\u0014"+ + "\u0000\u0437\u0438\u0001\u0000\u0000\u0000\u0438\u0439\u0006n\n\u0000"+ + "\u0439\u00ec\u0001\u0000\u0000\u0000\u043a\u043b\u00039\u0015\u0000\u043b"+ + "\u043c\u0001\u0000\u0000\u0000\u043c\u043d\u0006o\n\u0000\u043d\u00ee"+ + "\u0001\u0000\u0000\u0000\u043e\u043f\u0003;\u0016\u0000\u043f\u0440\u0001"+ + "\u0000\u0000\u0000\u0440\u0441\u0006p\n\u0000\u0441\u00f0\u0001\u0000"+ + "\u0000\u0000\u0442\u0443\u0003=\u0017\u0000\u0443\u0444\u0001\u0000\u0000"+ + "\u0000\u0444\u0445\u0006q\u000f\u0000\u0445\u0446\u0006q\u000b\u0000\u0446"+ + "\u00f2\u0001\u0000\u0000\u0000\u0447\u0448\u0003_(\u0000\u0448\u0449\u0001"+ + "\u0000\u0000\u0000\u0449\u044a\u0006r\u0013\u0000\u044a\u00f4\u0001\u0000"+ + "\u0000\u0000\u044b\u044c\u0003c*\u0000\u044c\u044d\u0001\u0000\u0000\u0000"+ + "\u044d\u044e\u0006s\u0012\u0000\u044e\u00f6\u0001\u0000\u0000\u0000\u044f"+ + "\u0450\u0003g,\u0000\u0450\u0451\u0001\u0000\u0000\u0000\u0451\u0452\u0006"+ + "t\u0016\u0000\u0452\u00f8\u0001\u0000\u0000\u0000\u0453\u0454\u0004u\u0005"+ + "\u0000\u0454\u0455\u0003\u007f8\u0000\u0455\u0456\u0001\u0000\u0000\u0000"+ + "\u0456\u0457\u0006u\u0017\u0000\u0457\u00fa\u0001\u0000\u0000\u0000\u0458"+ + "\u0459\u0004v\u0006\u0000\u0459\u045a\u0003\u00a3J\u0000\u045a\u045b\u0001"+ + "\u0000\u0000\u0000\u045b\u045c\u0006v\u0018\u0000\u045c\u00fc\u0001\u0000"+ + "\u0000\u0000\u045d\u045e\u0007\f\u0000\u0000\u045e\u045f\u0007\u0002\u0000"+ + "\u0000\u045f\u00fe\u0001\u0000\u0000\u0000\u0460\u0461\u0003\u00e9m\u0000"+ + "\u0461\u0462\u0001\u0000\u0000\u0000\u0462\u0463\u0006x\u0019\u0000\u0463"+ + "\u0100\u0001\u0000\u0000\u0000\u0464\u0465\u00037\u0014\u0000\u0465\u0466"+ + "\u0001\u0000\u0000\u0000\u0466\u0467\u0006y\n\u0000\u0467\u0102\u0001"+ + "\u0000\u0000\u0000\u0468\u0469\u00039\u0015\u0000\u0469\u046a\u0001\u0000"+ + "\u0000\u0000\u046a\u046b\u0006z\n\u0000\u046b\u0104\u0001\u0000\u0000"+ + "\u0000\u046c\u046d\u0003;\u0016\u0000\u046d\u046e\u0001\u0000\u0000\u0000"+ + "\u046e\u046f\u0006{\n\u0000\u046f\u0106\u0001\u0000\u0000\u0000\u0470"+ + "\u0471\u0003=\u0017\u0000\u0471\u0472\u0001\u0000\u0000\u0000\u0472\u0473"+ + "\u0006|\u000f\u0000\u0473\u0474\u0006|\u000b\u0000\u0474\u0108\u0001\u0000"+ + "\u0000\u0000\u0475\u0476\u0003\u00a5K\u0000\u0476\u0477\u0001\u0000\u0000"+ + "\u0000\u0477\u0478\u0006}\r\u0000\u0478\u0479\u0006}\u001a\u0000\u0479"+ + "\u010a\u0001\u0000\u0000\u0000\u047a\u047b\u0007\u0007\u0000\u0000\u047b"+ + "\u047c\u0007\t\u0000\u0000\u047c\u047d\u0001\u0000\u0000\u0000\u047d\u047e"+ + "\u0006~\u001b\u0000\u047e\u010c\u0001\u0000\u0000\u0000\u047f\u0480\u0007"+ + "\u0013\u0000\u0000\u0480\u0481\u0007\u0001\u0000\u0000\u0481\u0482\u0007"+ + "\u0005\u0000\u0000\u0482\u0483\u0007\n\u0000\u0000\u0483\u0484\u0001\u0000"+ + "\u0000\u0000\u0484\u0485\u0006\u007f\u001b\u0000\u0485\u010e\u0001\u0000"+ + "\u0000\u0000\u0486\u0487\b\"\u0000\u0000\u0487\u0110\u0001\u0000\u0000"+ + "\u0000\u0488\u048a\u0003\u010f\u0080\u0000\u0489\u0488\u0001\u0000\u0000"+ + "\u0000\u048a\u048b\u0001\u0000\u0000\u0000\u048b\u0489\u0001\u0000\u0000"+ + "\u0000\u048b\u048c\u0001\u0000\u0000\u0000\u048c\u048d\u0001\u0000\u0000"+ + "\u0000\u048d\u048e\u0003\u0151\u00a1\u0000\u048e\u0490\u0001\u0000\u0000"+ + "\u0000\u048f\u0489\u0001\u0000\u0000\u0000\u048f\u0490\u0001\u0000\u0000"+ + "\u0000\u0490\u0492\u0001\u0000\u0000\u0000\u0491\u0493\u0003\u010f\u0080"+ + "\u0000\u0492\u0491\u0001\u0000\u0000\u0000\u0493\u0494\u0001\u0000\u0000"+ + "\u0000\u0494\u0492\u0001\u0000\u0000\u0000\u0494\u0495\u0001\u0000\u0000"+ + "\u0000\u0495\u0112\u0001\u0000\u0000\u0000\u0496\u0497\u0003\u0111\u0081"+ + "\u0000\u0497\u0498\u0001\u0000\u0000\u0000\u0498\u0499\u0006\u0082\u001c"+ + "\u0000\u0499\u0114\u0001\u0000\u0000\u0000\u049a\u049b\u00037\u0014\u0000"+ + "\u049b\u049c\u0001\u0000\u0000\u0000\u049c\u049d\u0006\u0083\n\u0000\u049d"+ + "\u0116\u0001\u0000\u0000\u0000\u049e\u049f\u00039\u0015\u0000\u049f\u04a0"+ + "\u0001\u0000\u0000\u0000\u04a0\u04a1\u0006\u0084\n\u0000\u04a1\u0118\u0001"+ + "\u0000\u0000\u0000\u04a2\u04a3\u0003;\u0016\u0000\u04a3\u04a4\u0001\u0000"+ + "\u0000\u0000\u04a4\u04a5\u0006\u0085\n\u0000\u04a5\u011a\u0001\u0000\u0000"+ + "\u0000\u04a6\u04a7\u0003=\u0017\u0000\u04a7\u04a8\u0001\u0000\u0000\u0000"+ + "\u04a8\u04a9\u0006\u0086\u000f\u0000\u04a9\u04aa\u0006\u0086\u000b\u0000"+ + "\u04aa\u04ab\u0006\u0086\u000b\u0000\u04ab\u011c\u0001\u0000\u0000\u0000"+ + "\u04ac\u04ad\u0003_(\u0000\u04ad\u04ae\u0001\u0000\u0000\u0000\u04ae\u04af"+ + "\u0006\u0087\u0013\u0000\u04af\u011e\u0001\u0000\u0000\u0000\u04b0\u04b1"+ + "\u0003c*\u0000\u04b1\u04b2\u0001\u0000\u0000\u0000\u04b2\u04b3\u0006\u0088"+ + "\u0012\u0000\u04b3\u0120\u0001\u0000\u0000\u0000\u04b4\u04b5\u0003g,\u0000"+ + "\u04b5\u04b6\u0001\u0000\u0000\u0000\u04b6\u04b7\u0006\u0089\u0016\u0000"+ + "\u04b7\u0122\u0001\u0000\u0000\u0000\u04b8\u04b9\u0003\u010d\u007f\u0000"+ + "\u04b9\u04ba\u0001\u0000\u0000\u0000\u04ba\u04bb\u0006\u008a\u001d\u0000"+ + "\u04bb\u0124\u0001\u0000\u0000\u0000\u04bc\u04bd\u0003\u00e9m\u0000\u04bd"+ + "\u04be\u0001\u0000\u0000\u0000\u04be\u04bf\u0006\u008b\u0019\u0000\u04bf"+ + "\u0126\u0001\u0000\u0000\u0000\u04c0\u04c1\u0003\u00adO\u0000\u04c1\u04c2"+ + "\u0001\u0000\u0000\u0000\u04c2\u04c3\u0006\u008c\u001e\u0000\u04c3\u0128"+ + "\u0001\u0000\u0000\u0000\u04c4\u04c5\u0004\u008d\u0007\u0000\u04c5\u04c6"+ + "\u0003\u007f8\u0000\u04c6\u04c7\u0001\u0000\u0000\u0000\u04c7\u04c8\u0006"+ + "\u008d\u0017\u0000\u04c8\u012a\u0001\u0000\u0000\u0000\u04c9\u04ca\u0004"+ + "\u008e\b\u0000\u04ca\u04cb\u0003\u00a3J\u0000\u04cb\u04cc\u0001\u0000"+ + "\u0000\u0000\u04cc\u04cd\u0006\u008e\u0018\u0000\u04cd\u012c\u0001\u0000"+ + "\u0000\u0000\u04ce\u04cf\u00037\u0014\u0000\u04cf\u04d0\u0001\u0000\u0000"+ + "\u0000\u04d0\u04d1\u0006\u008f\n\u0000\u04d1\u012e\u0001\u0000\u0000\u0000"+ + "\u04d2\u04d3\u00039\u0015\u0000\u04d3\u04d4\u0001\u0000\u0000\u0000\u04d4"+ + "\u04d5\u0006\u0090\n\u0000\u04d5\u0130\u0001\u0000\u0000\u0000\u04d6\u04d7"+ + "\u0003;\u0016\u0000\u04d7\u04d8\u0001\u0000\u0000\u0000\u04d8\u04d9\u0006"+ + "\u0091\n\u0000\u04d9\u0132\u0001\u0000\u0000\u0000\u04da\u04db\u0003="+ + "\u0017\u0000\u04db\u04dc\u0001\u0000\u0000\u0000\u04dc\u04dd\u0006\u0092"+ + "\u000f\u0000\u04dd\u04de\u0006\u0092\u000b\u0000\u04de\u0134\u0001\u0000"+ + "\u0000\u0000\u04df\u04e0\u0003g,\u0000\u04e0\u04e1\u0001\u0000\u0000\u0000"+ + "\u04e1\u04e2\u0006\u0093\u0016\u0000\u04e2\u0136\u0001\u0000\u0000\u0000"+ + "\u04e3\u04e4\u0004\u0094\t\u0000\u04e4\u04e5\u0003\u007f8\u0000\u04e5"+ + "\u04e6\u0001\u0000\u0000\u0000\u04e6\u04e7\u0006\u0094\u0017\u0000\u04e7"+ + "\u0138\u0001\u0000\u0000\u0000\u04e8\u04e9\u0004\u0095\n\u0000\u04e9\u04ea"+ + "\u0003\u00a3J\u0000\u04ea\u04eb\u0001\u0000\u0000\u0000\u04eb\u04ec\u0006"+ + "\u0095\u0018\u0000\u04ec\u013a\u0001\u0000\u0000\u0000\u04ed\u04ee\u0003"+ + "\u00adO\u0000\u04ee\u04ef\u0001\u0000\u0000\u0000\u04ef\u04f0\u0006\u0096"+ + "\u001e\u0000\u04f0\u013c\u0001\u0000\u0000\u0000\u04f1\u04f2\u0003\u00a9"+ + "M\u0000\u04f2\u04f3\u0001\u0000\u0000\u0000\u04f3\u04f4\u0006\u0097\u001f"+ + "\u0000\u04f4\u013e\u0001\u0000\u0000\u0000\u04f5\u04f6\u00037\u0014\u0000"+ + "\u04f6\u04f7\u0001\u0000\u0000\u0000\u04f7\u04f8\u0006\u0098\n\u0000\u04f8"+ + "\u0140\u0001\u0000\u0000\u0000\u04f9\u04fa\u00039\u0015\u0000\u04fa\u04fb"+ + "\u0001\u0000\u0000\u0000\u04fb\u04fc\u0006\u0099\n\u0000\u04fc\u0142\u0001"+ + "\u0000\u0000\u0000\u04fd\u04fe\u0003;\u0016\u0000\u04fe\u04ff\u0001\u0000"+ + "\u0000\u0000\u04ff\u0500\u0006\u009a\n\u0000\u0500\u0144\u0001\u0000\u0000"+ + "\u0000\u0501\u0502\u0003=\u0017\u0000\u0502\u0503\u0001\u0000\u0000\u0000"+ + "\u0503\u0504\u0006\u009b\u000f\u0000\u0504\u0505\u0006\u009b\u000b\u0000"+ + "\u0505\u0146\u0001\u0000\u0000\u0000\u0506\u0507\u0007\u0001\u0000\u0000"+ + "\u0507\u0508\u0007\t\u0000\u0000\u0508\u0509\u0007\u000f\u0000\u0000\u0509"+ + "\u050a\u0007\u0007\u0000\u0000\u050a\u0148\u0001\u0000\u0000\u0000\u050b"+ + "\u050c\u00037\u0014\u0000\u050c\u050d\u0001\u0000\u0000\u0000\u050d\u050e"+ + "\u0006\u009d\n\u0000\u050e\u014a\u0001\u0000\u0000\u0000\u050f\u0510\u0003"+ + "9\u0015\u0000\u0510\u0511\u0001\u0000\u0000\u0000\u0511\u0512\u0006\u009e"+ + "\n\u0000\u0512\u014c\u0001\u0000\u0000\u0000\u0513\u0514\u0003;\u0016"+ + "\u0000\u0514\u0515\u0001\u0000\u0000\u0000\u0515\u0516\u0006\u009f\n\u0000"+ + "\u0516\u014e\u0001\u0000\u0000\u0000\u0517\u0518\u0003\u00a7L\u0000\u0518"+ + "\u0519\u0001\u0000\u0000\u0000\u0519\u051a\u0006\u00a0\u0010\u0000\u051a"+ + "\u051b\u0006\u00a0\u000b\u0000\u051b\u0150\u0001\u0000\u0000\u0000\u051c"+ + "\u051d\u0005:\u0000\u0000\u051d\u0152\u0001\u0000\u0000\u0000\u051e\u0524"+ + "\u0003I\u001d\u0000\u051f\u0524\u0003?\u0018\u0000\u0520\u0524\u0003g"+ + ",\u0000\u0521\u0524\u0003A\u0019\u0000\u0522\u0524\u0003O \u0000\u0523"+ + "\u051e\u0001\u0000\u0000\u0000\u0523\u051f\u0001\u0000\u0000\u0000\u0523"+ + "\u0520\u0001\u0000\u0000\u0000\u0523\u0521\u0001\u0000\u0000\u0000\u0523"+ + "\u0522\u0001\u0000\u0000\u0000\u0524\u0525\u0001\u0000\u0000\u0000\u0525"+ + "\u0523\u0001\u0000\u0000\u0000\u0525\u0526\u0001\u0000\u0000\u0000\u0526"+ + "\u0154\u0001\u0000\u0000\u0000\u0527\u0528\u00037\u0014\u0000\u0528\u0529"+ + "\u0001\u0000\u0000\u0000\u0529\u052a\u0006\u00a3\n\u0000\u052a\u0156\u0001"+ + "\u0000\u0000\u0000\u052b\u052c\u00039\u0015\u0000\u052c\u052d\u0001\u0000"+ + "\u0000\u0000\u052d\u052e\u0006\u00a4\n\u0000\u052e\u0158\u0001\u0000\u0000"+ + "\u0000\u052f\u0530\u0003;\u0016\u0000\u0530\u0531\u0001\u0000\u0000\u0000"+ + "\u0531\u0532\u0006\u00a5\n\u0000\u0532\u015a\u0001\u0000\u0000\u0000\u0533"+ + "\u0534\u0003=\u0017\u0000\u0534\u0535\u0001\u0000\u0000\u0000\u0535\u0536"+ + "\u0006\u00a6\u000f\u0000\u0536\u0537\u0006\u00a6\u000b\u0000\u0537\u015c"+ + "\u0001\u0000\u0000\u0000\u0538\u0539\u0003\u0151\u00a1\u0000\u0539\u053a"+ + "\u0001\u0000\u0000\u0000\u053a\u053b\u0006\u00a7\u0011\u0000\u053b\u015e"+ + "\u0001\u0000\u0000\u0000\u053c\u053d\u0003c*\u0000\u053d\u053e\u0001\u0000"+ + "\u0000\u0000\u053e\u053f\u0006\u00a8\u0012\u0000\u053f\u0160\u0001\u0000"+ + "\u0000\u0000\u0540\u0541\u0003g,\u0000\u0541\u0542\u0001\u0000\u0000\u0000"+ + "\u0542\u0543\u0006\u00a9\u0016\u0000\u0543\u0162\u0001\u0000\u0000\u0000"+ + "\u0544\u0545\u0003\u010b~\u0000\u0545\u0546\u0001\u0000\u0000\u0000\u0546"+ + "\u0547\u0006\u00aa \u0000\u0547\u0548\u0006\u00aa!\u0000\u0548\u0164\u0001"+ + "\u0000\u0000\u0000\u0549\u054a\u0003\u00cf`\u0000\u054a\u054b\u0001\u0000"+ + "\u0000\u0000\u054b\u054c\u0006\u00ab\u0014\u0000\u054c\u0166\u0001\u0000"+ + "\u0000\u0000\u054d\u054e\u0003S\"\u0000\u054e\u054f\u0001\u0000\u0000"+ + "\u0000\u054f\u0550\u0006\u00ac\u0015\u0000\u0550\u0168\u0001\u0000\u0000"+ + "\u0000\u0551\u0552\u00037\u0014\u0000\u0552\u0553\u0001\u0000\u0000\u0000"+ + "\u0553\u0554\u0006\u00ad\n\u0000\u0554\u016a\u0001\u0000\u0000\u0000\u0555"+ + "\u0556\u00039\u0015\u0000\u0556\u0557\u0001\u0000\u0000\u0000\u0557\u0558"+ + "\u0006\u00ae\n\u0000\u0558\u016c\u0001\u0000\u0000\u0000\u0559\u055a\u0003"+ + ";\u0016\u0000\u055a\u055b\u0001\u0000\u0000\u0000\u055b\u055c\u0006\u00af"+ + "\n\u0000\u055c\u016e\u0001\u0000\u0000\u0000\u055d\u055e\u0003=\u0017"+ + "\u0000\u055e\u055f\u0001\u0000\u0000\u0000\u055f\u0560\u0006\u00b0\u000f"+ + "\u0000\u0560\u0561\u0006\u00b0\u000b\u0000\u0561\u0562\u0006\u00b0\u000b"+ + "\u0000\u0562\u0170\u0001\u0000\u0000\u0000\u0563\u0564\u0003c*\u0000\u0564"+ + "\u0565\u0001\u0000\u0000\u0000\u0565\u0566\u0006\u00b1\u0012\u0000\u0566"+ + "\u0172\u0001\u0000\u0000\u0000\u0567\u0568\u0003g,\u0000\u0568\u0569\u0001"+ + "\u0000\u0000\u0000\u0569\u056a\u0006\u00b2\u0016\u0000\u056a\u0174\u0001"+ + "\u0000\u0000\u0000\u056b\u056c\u0003\u00e9m\u0000\u056c\u056d\u0001\u0000"+ + "\u0000\u0000\u056d\u056e\u0006\u00b3\u0019\u0000\u056e\u0176\u0001\u0000"+ + "\u0000\u0000\u056f\u0570\u00037\u0014\u0000\u0570\u0571\u0001\u0000\u0000"+ + "\u0000\u0571\u0572\u0006\u00b4\n\u0000\u0572\u0178\u0001\u0000\u0000\u0000"+ + "\u0573\u0574\u00039\u0015\u0000\u0574\u0575\u0001\u0000\u0000\u0000\u0575"+ + "\u0576\u0006\u00b5\n\u0000\u0576\u017a\u0001\u0000\u0000\u0000\u0577\u0578"+ + "\u0003;\u0016\u0000\u0578\u0579\u0001\u0000\u0000\u0000\u0579\u057a\u0006"+ + "\u00b6\n\u0000\u057a\u017c\u0001\u0000\u0000\u0000\u057b\u057c\u0003="+ + "\u0017\u0000\u057c\u057d\u0001\u0000\u0000\u0000\u057d\u057e\u0006\u00b7"+ + "\u000f\u0000\u057e\u057f\u0006\u00b7\u000b\u0000\u057f\u017e\u0001\u0000"+ + "\u0000\u0000\u0580\u0581\u0003\u00cf`\u0000\u0581\u0582\u0001\u0000\u0000"+ + "\u0000\u0582\u0583\u0006\u00b8\u0014\u0000\u0583\u0584\u0006\u00b8\u000b"+ + "\u0000\u0584\u0585\u0006\u00b8\"\u0000\u0585\u0180\u0001\u0000\u0000\u0000"+ + "\u0586\u0587\u0003S\"\u0000\u0587\u0588\u0001\u0000\u0000\u0000\u0588"+ + "\u0589\u0006\u00b9\u0015\u0000\u0589\u058a\u0006\u00b9\u000b\u0000\u058a"+ + "\u058b\u0006\u00b9\"\u0000\u058b\u0182\u0001\u0000\u0000\u0000\u058c\u058d"+ + "\u00037\u0014\u0000\u058d\u058e\u0001\u0000\u0000\u0000\u058e\u058f\u0006"+ + "\u00ba\n\u0000\u058f\u0184\u0001\u0000\u0000\u0000\u0590\u0591\u00039"+ + "\u0015\u0000\u0591\u0592\u0001\u0000\u0000\u0000\u0592\u0593\u0006\u00bb"+ + "\n\u0000\u0593\u0186\u0001\u0000\u0000\u0000\u0594\u0595\u0003;\u0016"+ + "\u0000\u0595\u0596\u0001\u0000\u0000\u0000\u0596\u0597\u0006\u00bc\n\u0000"+ + "\u0597\u0188\u0001\u0000\u0000\u0000\u0598\u0599\u0003\u0151\u00a1\u0000"+ + "\u0599\u059a\u0001\u0000\u0000\u0000\u059a\u059b\u0006\u00bd\u0011\u0000"+ + "\u059b\u059c\u0006\u00bd\u000b\u0000\u059c\u059d\u0006\u00bd\t\u0000\u059d"+ + "\u018a\u0001\u0000\u0000\u0000\u059e\u059f\u0003c*\u0000\u059f\u05a0\u0001"+ + "\u0000\u0000\u0000\u05a0\u05a1\u0006\u00be\u0012\u0000\u05a1\u05a2\u0006"+ + "\u00be\u000b\u0000\u05a2\u05a3\u0006\u00be\t\u0000\u05a3\u018c\u0001\u0000"+ + "\u0000\u0000\u05a4\u05a5\u00037\u0014\u0000\u05a5\u05a6\u0001\u0000\u0000"+ + "\u0000\u05a6\u05a7\u0006\u00bf\n\u0000\u05a7\u018e\u0001\u0000\u0000\u0000"+ + "\u05a8\u05a9\u00039\u0015\u0000\u05a9\u05aa\u0001\u0000\u0000\u0000\u05aa"+ + "\u05ab\u0006\u00c0\n\u0000\u05ab\u0190\u0001\u0000\u0000\u0000\u05ac\u05ad"+ + "\u0003;\u0016\u0000\u05ad\u05ae\u0001\u0000\u0000\u0000\u05ae\u05af\u0006"+ + "\u00c1\n\u0000\u05af\u0192\u0001\u0000\u0000\u0000\u05b0\u05b1\u0003\u00ad"+ + "O\u0000\u05b1\u05b2\u0001\u0000\u0000\u0000\u05b2\u05b3\u0006\u00c2\u000b"+ + "\u0000\u05b3\u05b4\u0006\u00c2\u0000\u0000\u05b4\u05b5\u0006\u00c2\u001e"+ + "\u0000\u05b5\u0194\u0001\u0000\u0000\u0000\u05b6\u05b7\u0003\u00a9M\u0000"+ + "\u05b7\u05b8\u0001\u0000\u0000\u0000\u05b8\u05b9\u0006\u00c3\u000b\u0000"+ + "\u05b9\u05ba\u0006\u00c3\u0000\u0000\u05ba\u05bb\u0006\u00c3\u001f\u0000"+ + "\u05bb\u0196\u0001\u0000\u0000\u0000\u05bc\u05bd\u0003Y%\u0000\u05bd\u05be"+ + "\u0001\u0000\u0000\u0000\u05be\u05bf\u0006\u00c4\u000b\u0000\u05bf\u05c0"+ + "\u0006\u00c4\u0000\u0000\u05c0\u05c1\u0006\u00c4#\u0000\u05c1\u0198\u0001"+ + "\u0000\u0000\u0000\u05c2\u05c3\u0003=\u0017\u0000\u05c3\u05c4\u0001\u0000"+ + "\u0000\u0000\u05c4\u05c5\u0006\u00c5\u000f\u0000\u05c5\u05c6\u0006\u00c5"+ + "\u000b\u0000\u05c6\u019a\u0001\u0000\u0000\u0000A\u0000\u0001\u0002\u0003"+ + "\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e\u0243\u024d\u0251\u0254"+ + "\u025d\u025f\u026a\u027d\u0282\u028b\u0292\u0297\u0299\u02a4\u02ac\u02af"+ + "\u02b1\u02b6\u02bb\u02c1\u02c8\u02cd\u02d3\u02d6\u02de\u02e2\u0366\u036b"+ + "\u0372\u0374\u0384\u0389\u038e\u0390\u0396\u03e3\u03e8\u0419\u041d\u0422"+ + "\u0427\u042c\u042e\u0432\u0434\u048b\u048f\u0494\u0523\u0525$\u0005\u0001"+ + "\u0000\u0005\u0004\u0000\u0005\u0006\u0000\u0005\u0002\u0000\u0005\u0003"+ + "\u0000\u0005\b\u0000\u0005\u0005\u0000\u0005\t\u0000\u0005\u000b\u0000"+ + "\u0005\r\u0000\u0000\u0001\u0000\u0004\u0000\u0000\u0007\u0010\u0000\u0007"+ + "A\u0000\u0005\u0000\u0000\u0007\u0018\u0000\u0007B\u0000\u0007h\u0000"+ + "\u0007!\u0000\u0007\u001f\u0000\u0007L\u0000\u0007\u0019\u0000\u0007#"+ + "\u0000\u0007/\u0000\u0007@\u0000\u0007P\u0000\u0005\n\u0000\u0005\u0007"+ + "\u0000\u0007Z\u0000\u0007Y\u0000\u0007D\u0000\u0007C\u0000\u0007X\u0000"+ + "\u0005\f\u0000\u0005\u000e\u0000\u0007\u001c\u0000"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp index 22d8ec32f3d92..b52d842e79fb2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp @@ -310,4 +310,4 @@ inlinestatsCommand atn: -[4, 1, 120, 603, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 134, 8, 1, 10, 1, 12, 1, 137, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 145, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 163, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 175, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 182, 8, 5, 10, 5, 12, 5, 185, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 192, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 198, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 206, 8, 5, 10, 5, 12, 5, 209, 9, 5, 1, 6, 1, 6, 3, 6, 213, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 220, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 225, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 236, 8, 8, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 242, 8, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 5, 9, 250, 8, 9, 10, 9, 12, 9, 253, 9, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 3, 10, 263, 8, 10, 1, 10, 1, 10, 1, 10, 5, 10, 268, 8, 10, 10, 10, 12, 10, 271, 9, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 5, 11, 279, 8, 11, 10, 11, 12, 11, 282, 9, 11, 3, 11, 284, 8, 11, 1, 11, 1, 11, 1, 12, 1, 12, 3, 12, 290, 8, 12, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 5, 15, 300, 8, 15, 10, 15, 12, 15, 303, 9, 15, 1, 16, 1, 16, 1, 16, 3, 16, 308, 8, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 5, 17, 316, 8, 17, 10, 17, 12, 17, 319, 9, 17, 1, 17, 3, 17, 322, 8, 17, 1, 18, 1, 18, 1, 18, 3, 18, 327, 8, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 20, 1, 20, 1, 21, 1, 21, 3, 21, 337, 8, 21, 1, 22, 1, 22, 1, 22, 1, 22, 5, 22, 343, 8, 22, 10, 22, 12, 22, 346, 9, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 5, 24, 356, 8, 24, 10, 24, 12, 24, 359, 9, 24, 1, 24, 3, 24, 362, 8, 24, 1, 24, 1, 24, 3, 24, 366, 8, 24, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 3, 26, 373, 8, 26, 1, 26, 1, 26, 3, 26, 377, 8, 26, 1, 27, 1, 27, 1, 27, 5, 27, 382, 8, 27, 10, 27, 12, 27, 385, 9, 27, 1, 28, 1, 28, 1, 28, 3, 28, 390, 8, 28, 1, 29, 1, 29, 1, 29, 5, 29, 395, 8, 29, 10, 29, 12, 29, 398, 9, 29, 1, 30, 1, 30, 1, 30, 5, 30, 403, 8, 30, 10, 30, 12, 30, 406, 9, 30, 1, 31, 1, 31, 1, 31, 5, 31, 411, 8, 31, 10, 31, 12, 31, 414, 9, 31, 1, 32, 1, 32, 1, 33, 1, 33, 3, 33, 420, 8, 33, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 435, 8, 34, 10, 34, 12, 34, 438, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 446, 8, 34, 10, 34, 12, 34, 449, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 457, 8, 34, 10, 34, 12, 34, 460, 9, 34, 1, 34, 1, 34, 3, 34, 464, 8, 34, 1, 35, 1, 35, 3, 35, 468, 8, 35, 1, 36, 1, 36, 3, 36, 472, 8, 36, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 5, 38, 481, 8, 38, 10, 38, 12, 38, 484, 9, 38, 1, 39, 1, 39, 3, 39, 488, 8, 39, 1, 39, 1, 39, 3, 39, 492, 8, 39, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 5, 42, 504, 8, 42, 10, 42, 12, 42, 507, 9, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 3, 44, 517, 8, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 5, 47, 529, 8, 47, 10, 47, 12, 47, 532, 9, 47, 1, 48, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 50, 1, 50, 3, 50, 542, 8, 50, 1, 51, 3, 51, 545, 8, 51, 1, 51, 1, 51, 1, 52, 3, 52, 550, 8, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 572, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 5, 58, 578, 8, 58, 10, 58, 12, 58, 581, 9, 58, 3, 58, 583, 8, 58, 1, 59, 1, 59, 1, 59, 3, 59, 588, 8, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 3, 61, 601, 8, 61, 1, 61, 0, 4, 2, 10, 18, 20, 62, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 118, 120, 122, 0, 8, 1, 0, 58, 59, 1, 0, 60, 62, 2, 0, 25, 25, 76, 76, 1, 0, 67, 68, 2, 0, 30, 30, 34, 34, 2, 0, 37, 37, 40, 40, 2, 0, 36, 36, 50, 50, 2, 0, 51, 51, 53, 57, 629, 0, 124, 1, 0, 0, 0, 2, 127, 1, 0, 0, 0, 4, 144, 1, 0, 0, 0, 6, 162, 1, 0, 0, 0, 8, 164, 1, 0, 0, 0, 10, 197, 1, 0, 0, 0, 12, 224, 1, 0, 0, 0, 14, 226, 1, 0, 0, 0, 16, 235, 1, 0, 0, 0, 18, 241, 1, 0, 0, 0, 20, 262, 1, 0, 0, 0, 22, 272, 1, 0, 0, 0, 24, 289, 1, 0, 0, 0, 26, 291, 1, 0, 0, 0, 28, 293, 1, 0, 0, 0, 30, 296, 1, 0, 0, 0, 32, 307, 1, 0, 0, 0, 34, 311, 1, 0, 0, 0, 36, 326, 1, 0, 0, 0, 38, 330, 1, 0, 0, 0, 40, 332, 1, 0, 0, 0, 42, 336, 1, 0, 0, 0, 44, 338, 1, 0, 0, 0, 46, 347, 1, 0, 0, 0, 48, 351, 1, 0, 0, 0, 50, 367, 1, 0, 0, 0, 52, 370, 1, 0, 0, 0, 54, 378, 1, 0, 0, 0, 56, 386, 1, 0, 0, 0, 58, 391, 1, 0, 0, 0, 60, 399, 1, 0, 0, 0, 62, 407, 1, 0, 0, 0, 64, 415, 1, 0, 0, 0, 66, 419, 1, 0, 0, 0, 68, 463, 1, 0, 0, 0, 70, 467, 1, 0, 0, 0, 72, 471, 1, 0, 0, 0, 74, 473, 1, 0, 0, 0, 76, 476, 1, 0, 0, 0, 78, 485, 1, 0, 0, 0, 80, 493, 1, 0, 0, 0, 82, 496, 1, 0, 0, 0, 84, 499, 1, 0, 0, 0, 86, 508, 1, 0, 0, 0, 88, 512, 1, 0, 0, 0, 90, 518, 1, 0, 0, 0, 92, 522, 1, 0, 0, 0, 94, 525, 1, 0, 0, 0, 96, 533, 1, 0, 0, 0, 98, 537, 1, 0, 0, 0, 100, 541, 1, 0, 0, 0, 102, 544, 1, 0, 0, 0, 104, 549, 1, 0, 0, 0, 106, 553, 1, 0, 0, 0, 108, 555, 1, 0, 0, 0, 110, 557, 1, 0, 0, 0, 112, 560, 1, 0, 0, 0, 114, 564, 1, 0, 0, 0, 116, 567, 1, 0, 0, 0, 118, 587, 1, 0, 0, 0, 120, 591, 1, 0, 0, 0, 122, 596, 1, 0, 0, 0, 124, 125, 3, 2, 1, 0, 125, 126, 5, 0, 0, 1, 126, 1, 1, 0, 0, 0, 127, 128, 6, 1, -1, 0, 128, 129, 3, 4, 2, 0, 129, 135, 1, 0, 0, 0, 130, 131, 10, 1, 0, 0, 131, 132, 5, 24, 0, 0, 132, 134, 3, 6, 3, 0, 133, 130, 1, 0, 0, 0, 134, 137, 1, 0, 0, 0, 135, 133, 1, 0, 0, 0, 135, 136, 1, 0, 0, 0, 136, 3, 1, 0, 0, 0, 137, 135, 1, 0, 0, 0, 138, 145, 3, 110, 55, 0, 139, 145, 3, 34, 17, 0, 140, 145, 3, 28, 14, 0, 141, 145, 3, 114, 57, 0, 142, 143, 4, 2, 1, 0, 143, 145, 3, 48, 24, 0, 144, 138, 1, 0, 0, 0, 144, 139, 1, 0, 0, 0, 144, 140, 1, 0, 0, 0, 144, 141, 1, 0, 0, 0, 144, 142, 1, 0, 0, 0, 145, 5, 1, 0, 0, 0, 146, 163, 3, 50, 25, 0, 147, 163, 3, 8, 4, 0, 148, 163, 3, 80, 40, 0, 149, 163, 3, 74, 37, 0, 150, 163, 3, 52, 26, 0, 151, 163, 3, 76, 38, 0, 152, 163, 3, 82, 41, 0, 153, 163, 3, 84, 42, 0, 154, 163, 3, 88, 44, 0, 155, 163, 3, 90, 45, 0, 156, 163, 3, 116, 58, 0, 157, 163, 3, 92, 46, 0, 158, 159, 4, 3, 2, 0, 159, 163, 3, 122, 61, 0, 160, 161, 4, 3, 3, 0, 161, 163, 3, 120, 60, 0, 162, 146, 1, 0, 0, 0, 162, 147, 1, 0, 0, 0, 162, 148, 1, 0, 0, 0, 162, 149, 1, 0, 0, 0, 162, 150, 1, 0, 0, 0, 162, 151, 1, 0, 0, 0, 162, 152, 1, 0, 0, 0, 162, 153, 1, 0, 0, 0, 162, 154, 1, 0, 0, 0, 162, 155, 1, 0, 0, 0, 162, 156, 1, 0, 0, 0, 162, 157, 1, 0, 0, 0, 162, 158, 1, 0, 0, 0, 162, 160, 1, 0, 0, 0, 163, 7, 1, 0, 0, 0, 164, 165, 5, 16, 0, 0, 165, 166, 3, 10, 5, 0, 166, 9, 1, 0, 0, 0, 167, 168, 6, 5, -1, 0, 168, 169, 5, 43, 0, 0, 169, 198, 3, 10, 5, 8, 170, 198, 3, 16, 8, 0, 171, 198, 3, 12, 6, 0, 172, 174, 3, 16, 8, 0, 173, 175, 5, 43, 0, 0, 174, 173, 1, 0, 0, 0, 174, 175, 1, 0, 0, 0, 175, 176, 1, 0, 0, 0, 176, 177, 5, 38, 0, 0, 177, 178, 5, 42, 0, 0, 178, 183, 3, 16, 8, 0, 179, 180, 5, 33, 0, 0, 180, 182, 3, 16, 8, 0, 181, 179, 1, 0, 0, 0, 182, 185, 1, 0, 0, 0, 183, 181, 1, 0, 0, 0, 183, 184, 1, 0, 0, 0, 184, 186, 1, 0, 0, 0, 185, 183, 1, 0, 0, 0, 186, 187, 5, 49, 0, 0, 187, 198, 1, 0, 0, 0, 188, 189, 3, 16, 8, 0, 189, 191, 5, 39, 0, 0, 190, 192, 5, 43, 0, 0, 191, 190, 1, 0, 0, 0, 191, 192, 1, 0, 0, 0, 192, 193, 1, 0, 0, 0, 193, 194, 5, 44, 0, 0, 194, 198, 1, 0, 0, 0, 195, 196, 4, 5, 4, 0, 196, 198, 3, 14, 7, 0, 197, 167, 1, 0, 0, 0, 197, 170, 1, 0, 0, 0, 197, 171, 1, 0, 0, 0, 197, 172, 1, 0, 0, 0, 197, 188, 1, 0, 0, 0, 197, 195, 1, 0, 0, 0, 198, 207, 1, 0, 0, 0, 199, 200, 10, 5, 0, 0, 200, 201, 5, 29, 0, 0, 201, 206, 3, 10, 5, 6, 202, 203, 10, 4, 0, 0, 203, 204, 5, 46, 0, 0, 204, 206, 3, 10, 5, 5, 205, 199, 1, 0, 0, 0, 205, 202, 1, 0, 0, 0, 206, 209, 1, 0, 0, 0, 207, 205, 1, 0, 0, 0, 207, 208, 1, 0, 0, 0, 208, 11, 1, 0, 0, 0, 209, 207, 1, 0, 0, 0, 210, 212, 3, 16, 8, 0, 211, 213, 5, 43, 0, 0, 212, 211, 1, 0, 0, 0, 212, 213, 1, 0, 0, 0, 213, 214, 1, 0, 0, 0, 214, 215, 5, 41, 0, 0, 215, 216, 3, 106, 53, 0, 216, 225, 1, 0, 0, 0, 217, 219, 3, 16, 8, 0, 218, 220, 5, 43, 0, 0, 219, 218, 1, 0, 0, 0, 219, 220, 1, 0, 0, 0, 220, 221, 1, 0, 0, 0, 221, 222, 5, 48, 0, 0, 222, 223, 3, 106, 53, 0, 223, 225, 1, 0, 0, 0, 224, 210, 1, 0, 0, 0, 224, 217, 1, 0, 0, 0, 225, 13, 1, 0, 0, 0, 226, 227, 3, 16, 8, 0, 227, 228, 5, 63, 0, 0, 228, 229, 3, 106, 53, 0, 229, 15, 1, 0, 0, 0, 230, 236, 3, 18, 9, 0, 231, 232, 3, 18, 9, 0, 232, 233, 3, 108, 54, 0, 233, 234, 3, 18, 9, 0, 234, 236, 1, 0, 0, 0, 235, 230, 1, 0, 0, 0, 235, 231, 1, 0, 0, 0, 236, 17, 1, 0, 0, 0, 237, 238, 6, 9, -1, 0, 238, 242, 3, 20, 10, 0, 239, 240, 7, 0, 0, 0, 240, 242, 3, 18, 9, 3, 241, 237, 1, 0, 0, 0, 241, 239, 1, 0, 0, 0, 242, 251, 1, 0, 0, 0, 243, 244, 10, 2, 0, 0, 244, 245, 7, 1, 0, 0, 245, 250, 3, 18, 9, 3, 246, 247, 10, 1, 0, 0, 247, 248, 7, 0, 0, 0, 248, 250, 3, 18, 9, 2, 249, 243, 1, 0, 0, 0, 249, 246, 1, 0, 0, 0, 250, 253, 1, 0, 0, 0, 251, 249, 1, 0, 0, 0, 251, 252, 1, 0, 0, 0, 252, 19, 1, 0, 0, 0, 253, 251, 1, 0, 0, 0, 254, 255, 6, 10, -1, 0, 255, 263, 3, 68, 34, 0, 256, 263, 3, 58, 29, 0, 257, 263, 3, 22, 11, 0, 258, 259, 5, 42, 0, 0, 259, 260, 3, 10, 5, 0, 260, 261, 5, 49, 0, 0, 261, 263, 1, 0, 0, 0, 262, 254, 1, 0, 0, 0, 262, 256, 1, 0, 0, 0, 262, 257, 1, 0, 0, 0, 262, 258, 1, 0, 0, 0, 263, 269, 1, 0, 0, 0, 264, 265, 10, 1, 0, 0, 265, 266, 5, 32, 0, 0, 266, 268, 3, 26, 13, 0, 267, 264, 1, 0, 0, 0, 268, 271, 1, 0, 0, 0, 269, 267, 1, 0, 0, 0, 269, 270, 1, 0, 0, 0, 270, 21, 1, 0, 0, 0, 271, 269, 1, 0, 0, 0, 272, 273, 3, 24, 12, 0, 273, 283, 5, 42, 0, 0, 274, 284, 5, 60, 0, 0, 275, 280, 3, 10, 5, 0, 276, 277, 5, 33, 0, 0, 277, 279, 3, 10, 5, 0, 278, 276, 1, 0, 0, 0, 279, 282, 1, 0, 0, 0, 280, 278, 1, 0, 0, 0, 280, 281, 1, 0, 0, 0, 281, 284, 1, 0, 0, 0, 282, 280, 1, 0, 0, 0, 283, 274, 1, 0, 0, 0, 283, 275, 1, 0, 0, 0, 283, 284, 1, 0, 0, 0, 284, 285, 1, 0, 0, 0, 285, 286, 5, 49, 0, 0, 286, 23, 1, 0, 0, 0, 287, 290, 5, 63, 0, 0, 288, 290, 3, 72, 36, 0, 289, 287, 1, 0, 0, 0, 289, 288, 1, 0, 0, 0, 290, 25, 1, 0, 0, 0, 291, 292, 3, 64, 32, 0, 292, 27, 1, 0, 0, 0, 293, 294, 5, 12, 0, 0, 294, 295, 3, 30, 15, 0, 295, 29, 1, 0, 0, 0, 296, 301, 3, 32, 16, 0, 297, 298, 5, 33, 0, 0, 298, 300, 3, 32, 16, 0, 299, 297, 1, 0, 0, 0, 300, 303, 1, 0, 0, 0, 301, 299, 1, 0, 0, 0, 301, 302, 1, 0, 0, 0, 302, 31, 1, 0, 0, 0, 303, 301, 1, 0, 0, 0, 304, 305, 3, 58, 29, 0, 305, 306, 5, 31, 0, 0, 306, 308, 1, 0, 0, 0, 307, 304, 1, 0, 0, 0, 307, 308, 1, 0, 0, 0, 308, 309, 1, 0, 0, 0, 309, 310, 3, 10, 5, 0, 310, 33, 1, 0, 0, 0, 311, 312, 5, 6, 0, 0, 312, 317, 3, 36, 18, 0, 313, 314, 5, 33, 0, 0, 314, 316, 3, 36, 18, 0, 315, 313, 1, 0, 0, 0, 316, 319, 1, 0, 0, 0, 317, 315, 1, 0, 0, 0, 317, 318, 1, 0, 0, 0, 318, 321, 1, 0, 0, 0, 319, 317, 1, 0, 0, 0, 320, 322, 3, 42, 21, 0, 321, 320, 1, 0, 0, 0, 321, 322, 1, 0, 0, 0, 322, 35, 1, 0, 0, 0, 323, 324, 3, 38, 19, 0, 324, 325, 5, 104, 0, 0, 325, 327, 1, 0, 0, 0, 326, 323, 1, 0, 0, 0, 326, 327, 1, 0, 0, 0, 327, 328, 1, 0, 0, 0, 328, 329, 3, 40, 20, 0, 329, 37, 1, 0, 0, 0, 330, 331, 5, 76, 0, 0, 331, 39, 1, 0, 0, 0, 332, 333, 7, 2, 0, 0, 333, 41, 1, 0, 0, 0, 334, 337, 3, 44, 22, 0, 335, 337, 3, 46, 23, 0, 336, 334, 1, 0, 0, 0, 336, 335, 1, 0, 0, 0, 337, 43, 1, 0, 0, 0, 338, 339, 5, 75, 0, 0, 339, 344, 5, 76, 0, 0, 340, 341, 5, 33, 0, 0, 341, 343, 5, 76, 0, 0, 342, 340, 1, 0, 0, 0, 343, 346, 1, 0, 0, 0, 344, 342, 1, 0, 0, 0, 344, 345, 1, 0, 0, 0, 345, 45, 1, 0, 0, 0, 346, 344, 1, 0, 0, 0, 347, 348, 5, 65, 0, 0, 348, 349, 3, 44, 22, 0, 349, 350, 5, 66, 0, 0, 350, 47, 1, 0, 0, 0, 351, 352, 5, 19, 0, 0, 352, 357, 3, 36, 18, 0, 353, 354, 5, 33, 0, 0, 354, 356, 3, 36, 18, 0, 355, 353, 1, 0, 0, 0, 356, 359, 1, 0, 0, 0, 357, 355, 1, 0, 0, 0, 357, 358, 1, 0, 0, 0, 358, 361, 1, 0, 0, 0, 359, 357, 1, 0, 0, 0, 360, 362, 3, 54, 27, 0, 361, 360, 1, 0, 0, 0, 361, 362, 1, 0, 0, 0, 362, 365, 1, 0, 0, 0, 363, 364, 5, 28, 0, 0, 364, 366, 3, 30, 15, 0, 365, 363, 1, 0, 0, 0, 365, 366, 1, 0, 0, 0, 366, 49, 1, 0, 0, 0, 367, 368, 5, 4, 0, 0, 368, 369, 3, 30, 15, 0, 369, 51, 1, 0, 0, 0, 370, 372, 5, 15, 0, 0, 371, 373, 3, 54, 27, 0, 372, 371, 1, 0, 0, 0, 372, 373, 1, 0, 0, 0, 373, 376, 1, 0, 0, 0, 374, 375, 5, 28, 0, 0, 375, 377, 3, 30, 15, 0, 376, 374, 1, 0, 0, 0, 376, 377, 1, 0, 0, 0, 377, 53, 1, 0, 0, 0, 378, 383, 3, 56, 28, 0, 379, 380, 5, 33, 0, 0, 380, 382, 3, 56, 28, 0, 381, 379, 1, 0, 0, 0, 382, 385, 1, 0, 0, 0, 383, 381, 1, 0, 0, 0, 383, 384, 1, 0, 0, 0, 384, 55, 1, 0, 0, 0, 385, 383, 1, 0, 0, 0, 386, 389, 3, 32, 16, 0, 387, 388, 5, 16, 0, 0, 388, 390, 3, 10, 5, 0, 389, 387, 1, 0, 0, 0, 389, 390, 1, 0, 0, 0, 390, 57, 1, 0, 0, 0, 391, 396, 3, 72, 36, 0, 392, 393, 5, 35, 0, 0, 393, 395, 3, 72, 36, 0, 394, 392, 1, 0, 0, 0, 395, 398, 1, 0, 0, 0, 396, 394, 1, 0, 0, 0, 396, 397, 1, 0, 0, 0, 397, 59, 1, 0, 0, 0, 398, 396, 1, 0, 0, 0, 399, 404, 3, 66, 33, 0, 400, 401, 5, 35, 0, 0, 401, 403, 3, 66, 33, 0, 402, 400, 1, 0, 0, 0, 403, 406, 1, 0, 0, 0, 404, 402, 1, 0, 0, 0, 404, 405, 1, 0, 0, 0, 405, 61, 1, 0, 0, 0, 406, 404, 1, 0, 0, 0, 407, 412, 3, 60, 30, 0, 408, 409, 5, 33, 0, 0, 409, 411, 3, 60, 30, 0, 410, 408, 1, 0, 0, 0, 411, 414, 1, 0, 0, 0, 412, 410, 1, 0, 0, 0, 412, 413, 1, 0, 0, 0, 413, 63, 1, 0, 0, 0, 414, 412, 1, 0, 0, 0, 415, 416, 7, 3, 0, 0, 416, 65, 1, 0, 0, 0, 417, 420, 5, 80, 0, 0, 418, 420, 3, 70, 35, 0, 419, 417, 1, 0, 0, 0, 419, 418, 1, 0, 0, 0, 420, 67, 1, 0, 0, 0, 421, 464, 5, 44, 0, 0, 422, 423, 3, 104, 52, 0, 423, 424, 5, 67, 0, 0, 424, 464, 1, 0, 0, 0, 425, 464, 3, 102, 51, 0, 426, 464, 3, 104, 52, 0, 427, 464, 3, 98, 49, 0, 428, 464, 3, 70, 35, 0, 429, 464, 3, 106, 53, 0, 430, 431, 5, 65, 0, 0, 431, 436, 3, 100, 50, 0, 432, 433, 5, 33, 0, 0, 433, 435, 3, 100, 50, 0, 434, 432, 1, 0, 0, 0, 435, 438, 1, 0, 0, 0, 436, 434, 1, 0, 0, 0, 436, 437, 1, 0, 0, 0, 437, 439, 1, 0, 0, 0, 438, 436, 1, 0, 0, 0, 439, 440, 5, 66, 0, 0, 440, 464, 1, 0, 0, 0, 441, 442, 5, 65, 0, 0, 442, 447, 3, 98, 49, 0, 443, 444, 5, 33, 0, 0, 444, 446, 3, 98, 49, 0, 445, 443, 1, 0, 0, 0, 446, 449, 1, 0, 0, 0, 447, 445, 1, 0, 0, 0, 447, 448, 1, 0, 0, 0, 448, 450, 1, 0, 0, 0, 449, 447, 1, 0, 0, 0, 450, 451, 5, 66, 0, 0, 451, 464, 1, 0, 0, 0, 452, 453, 5, 65, 0, 0, 453, 458, 3, 106, 53, 0, 454, 455, 5, 33, 0, 0, 455, 457, 3, 106, 53, 0, 456, 454, 1, 0, 0, 0, 457, 460, 1, 0, 0, 0, 458, 456, 1, 0, 0, 0, 458, 459, 1, 0, 0, 0, 459, 461, 1, 0, 0, 0, 460, 458, 1, 0, 0, 0, 461, 462, 5, 66, 0, 0, 462, 464, 1, 0, 0, 0, 463, 421, 1, 0, 0, 0, 463, 422, 1, 0, 0, 0, 463, 425, 1, 0, 0, 0, 463, 426, 1, 0, 0, 0, 463, 427, 1, 0, 0, 0, 463, 428, 1, 0, 0, 0, 463, 429, 1, 0, 0, 0, 463, 430, 1, 0, 0, 0, 463, 441, 1, 0, 0, 0, 463, 452, 1, 0, 0, 0, 464, 69, 1, 0, 0, 0, 465, 468, 5, 47, 0, 0, 466, 468, 5, 64, 0, 0, 467, 465, 1, 0, 0, 0, 467, 466, 1, 0, 0, 0, 468, 71, 1, 0, 0, 0, 469, 472, 3, 64, 32, 0, 470, 472, 3, 70, 35, 0, 471, 469, 1, 0, 0, 0, 471, 470, 1, 0, 0, 0, 472, 73, 1, 0, 0, 0, 473, 474, 5, 9, 0, 0, 474, 475, 5, 26, 0, 0, 475, 75, 1, 0, 0, 0, 476, 477, 5, 14, 0, 0, 477, 482, 3, 78, 39, 0, 478, 479, 5, 33, 0, 0, 479, 481, 3, 78, 39, 0, 480, 478, 1, 0, 0, 0, 481, 484, 1, 0, 0, 0, 482, 480, 1, 0, 0, 0, 482, 483, 1, 0, 0, 0, 483, 77, 1, 0, 0, 0, 484, 482, 1, 0, 0, 0, 485, 487, 3, 10, 5, 0, 486, 488, 7, 4, 0, 0, 487, 486, 1, 0, 0, 0, 487, 488, 1, 0, 0, 0, 488, 491, 1, 0, 0, 0, 489, 490, 5, 45, 0, 0, 490, 492, 7, 5, 0, 0, 491, 489, 1, 0, 0, 0, 491, 492, 1, 0, 0, 0, 492, 79, 1, 0, 0, 0, 493, 494, 5, 8, 0, 0, 494, 495, 3, 62, 31, 0, 495, 81, 1, 0, 0, 0, 496, 497, 5, 2, 0, 0, 497, 498, 3, 62, 31, 0, 498, 83, 1, 0, 0, 0, 499, 500, 5, 11, 0, 0, 500, 505, 3, 86, 43, 0, 501, 502, 5, 33, 0, 0, 502, 504, 3, 86, 43, 0, 503, 501, 1, 0, 0, 0, 504, 507, 1, 0, 0, 0, 505, 503, 1, 0, 0, 0, 505, 506, 1, 0, 0, 0, 506, 85, 1, 0, 0, 0, 507, 505, 1, 0, 0, 0, 508, 509, 3, 60, 30, 0, 509, 510, 5, 84, 0, 0, 510, 511, 3, 60, 30, 0, 511, 87, 1, 0, 0, 0, 512, 513, 5, 1, 0, 0, 513, 514, 3, 20, 10, 0, 514, 516, 3, 106, 53, 0, 515, 517, 3, 94, 47, 0, 516, 515, 1, 0, 0, 0, 516, 517, 1, 0, 0, 0, 517, 89, 1, 0, 0, 0, 518, 519, 5, 7, 0, 0, 519, 520, 3, 20, 10, 0, 520, 521, 3, 106, 53, 0, 521, 91, 1, 0, 0, 0, 522, 523, 5, 10, 0, 0, 523, 524, 3, 58, 29, 0, 524, 93, 1, 0, 0, 0, 525, 530, 3, 96, 48, 0, 526, 527, 5, 33, 0, 0, 527, 529, 3, 96, 48, 0, 528, 526, 1, 0, 0, 0, 529, 532, 1, 0, 0, 0, 530, 528, 1, 0, 0, 0, 530, 531, 1, 0, 0, 0, 531, 95, 1, 0, 0, 0, 532, 530, 1, 0, 0, 0, 533, 534, 3, 64, 32, 0, 534, 535, 5, 31, 0, 0, 535, 536, 3, 68, 34, 0, 536, 97, 1, 0, 0, 0, 537, 538, 7, 6, 0, 0, 538, 99, 1, 0, 0, 0, 539, 542, 3, 102, 51, 0, 540, 542, 3, 104, 52, 0, 541, 539, 1, 0, 0, 0, 541, 540, 1, 0, 0, 0, 542, 101, 1, 0, 0, 0, 543, 545, 7, 0, 0, 0, 544, 543, 1, 0, 0, 0, 544, 545, 1, 0, 0, 0, 545, 546, 1, 0, 0, 0, 546, 547, 5, 27, 0, 0, 547, 103, 1, 0, 0, 0, 548, 550, 7, 0, 0, 0, 549, 548, 1, 0, 0, 0, 549, 550, 1, 0, 0, 0, 550, 551, 1, 0, 0, 0, 551, 552, 5, 26, 0, 0, 552, 105, 1, 0, 0, 0, 553, 554, 5, 25, 0, 0, 554, 107, 1, 0, 0, 0, 555, 556, 7, 7, 0, 0, 556, 109, 1, 0, 0, 0, 557, 558, 5, 5, 0, 0, 558, 559, 3, 112, 56, 0, 559, 111, 1, 0, 0, 0, 560, 561, 5, 65, 0, 0, 561, 562, 3, 2, 1, 0, 562, 563, 5, 66, 0, 0, 563, 113, 1, 0, 0, 0, 564, 565, 5, 13, 0, 0, 565, 566, 5, 100, 0, 0, 566, 115, 1, 0, 0, 0, 567, 568, 5, 3, 0, 0, 568, 571, 5, 90, 0, 0, 569, 570, 5, 88, 0, 0, 570, 572, 3, 60, 30, 0, 571, 569, 1, 0, 0, 0, 571, 572, 1, 0, 0, 0, 572, 582, 1, 0, 0, 0, 573, 574, 5, 89, 0, 0, 574, 579, 3, 118, 59, 0, 575, 576, 5, 33, 0, 0, 576, 578, 3, 118, 59, 0, 577, 575, 1, 0, 0, 0, 578, 581, 1, 0, 0, 0, 579, 577, 1, 0, 0, 0, 579, 580, 1, 0, 0, 0, 580, 583, 1, 0, 0, 0, 581, 579, 1, 0, 0, 0, 582, 573, 1, 0, 0, 0, 582, 583, 1, 0, 0, 0, 583, 117, 1, 0, 0, 0, 584, 585, 3, 60, 30, 0, 585, 586, 5, 31, 0, 0, 586, 588, 1, 0, 0, 0, 587, 584, 1, 0, 0, 0, 587, 588, 1, 0, 0, 0, 588, 589, 1, 0, 0, 0, 589, 590, 3, 60, 30, 0, 590, 119, 1, 0, 0, 0, 591, 592, 5, 18, 0, 0, 592, 593, 3, 36, 18, 0, 593, 594, 5, 88, 0, 0, 594, 595, 3, 62, 31, 0, 595, 121, 1, 0, 0, 0, 596, 597, 5, 17, 0, 0, 597, 600, 3, 54, 27, 0, 598, 599, 5, 28, 0, 0, 599, 601, 3, 30, 15, 0, 600, 598, 1, 0, 0, 0, 600, 601, 1, 0, 0, 0, 601, 123, 1, 0, 0, 0, 59, 135, 144, 162, 174, 183, 191, 197, 205, 207, 212, 219, 224, 235, 241, 249, 251, 262, 269, 280, 283, 289, 301, 307, 317, 321, 326, 336, 344, 357, 361, 365, 372, 376, 383, 389, 396, 404, 412, 419, 436, 447, 458, 463, 467, 471, 482, 487, 491, 505, 516, 530, 541, 544, 549, 571, 579, 582, 587, 600] \ No newline at end of file +[4, 1, 120, 605, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 134, 8, 1, 10, 1, 12, 1, 137, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 145, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 163, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 175, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 182, 8, 5, 10, 5, 12, 5, 185, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 192, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 198, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 206, 8, 5, 10, 5, 12, 5, 209, 9, 5, 1, 6, 1, 6, 3, 6, 213, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 220, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 225, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 236, 8, 8, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 242, 8, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 5, 9, 250, 8, 9, 10, 9, 12, 9, 253, 9, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 3, 10, 263, 8, 10, 1, 10, 1, 10, 1, 10, 5, 10, 268, 8, 10, 10, 10, 12, 10, 271, 9, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 5, 11, 279, 8, 11, 10, 11, 12, 11, 282, 9, 11, 3, 11, 284, 8, 11, 1, 11, 1, 11, 1, 12, 1, 12, 3, 12, 290, 8, 12, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 5, 15, 300, 8, 15, 10, 15, 12, 15, 303, 9, 15, 1, 16, 1, 16, 1, 16, 3, 16, 308, 8, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 5, 17, 316, 8, 17, 10, 17, 12, 17, 319, 9, 17, 1, 17, 3, 17, 322, 8, 17, 1, 18, 1, 18, 1, 18, 3, 18, 327, 8, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 20, 1, 20, 1, 21, 1, 21, 3, 21, 337, 8, 21, 1, 22, 1, 22, 1, 22, 1, 22, 5, 22, 343, 8, 22, 10, 22, 12, 22, 346, 9, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 5, 24, 356, 8, 24, 10, 24, 12, 24, 359, 9, 24, 1, 24, 3, 24, 362, 8, 24, 1, 24, 1, 24, 3, 24, 366, 8, 24, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 3, 26, 373, 8, 26, 1, 26, 1, 26, 3, 26, 377, 8, 26, 1, 27, 1, 27, 1, 27, 5, 27, 382, 8, 27, 10, 27, 12, 27, 385, 9, 27, 1, 28, 1, 28, 1, 28, 3, 28, 390, 8, 28, 1, 29, 1, 29, 1, 29, 5, 29, 395, 8, 29, 10, 29, 12, 29, 398, 9, 29, 1, 30, 1, 30, 1, 30, 5, 30, 403, 8, 30, 10, 30, 12, 30, 406, 9, 30, 1, 31, 1, 31, 1, 31, 5, 31, 411, 8, 31, 10, 31, 12, 31, 414, 9, 31, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 3, 33, 421, 8, 33, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 436, 8, 34, 10, 34, 12, 34, 439, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 447, 8, 34, 10, 34, 12, 34, 450, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 458, 8, 34, 10, 34, 12, 34, 461, 9, 34, 1, 34, 1, 34, 3, 34, 465, 8, 34, 1, 35, 1, 35, 3, 35, 469, 8, 35, 1, 36, 1, 36, 1, 36, 3, 36, 474, 8, 36, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 5, 38, 483, 8, 38, 10, 38, 12, 38, 486, 9, 38, 1, 39, 1, 39, 3, 39, 490, 8, 39, 1, 39, 1, 39, 3, 39, 494, 8, 39, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 5, 42, 506, 8, 42, 10, 42, 12, 42, 509, 9, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 3, 44, 519, 8, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 5, 47, 531, 8, 47, 10, 47, 12, 47, 534, 9, 47, 1, 48, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 50, 1, 50, 3, 50, 544, 8, 50, 1, 51, 3, 51, 547, 8, 51, 1, 51, 1, 51, 1, 52, 3, 52, 552, 8, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 574, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 5, 58, 580, 8, 58, 10, 58, 12, 58, 583, 9, 58, 3, 58, 585, 8, 58, 1, 59, 1, 59, 1, 59, 3, 59, 590, 8, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 3, 61, 603, 8, 61, 1, 61, 0, 4, 2, 10, 18, 20, 62, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 118, 120, 122, 0, 8, 1, 0, 58, 59, 1, 0, 60, 62, 2, 0, 25, 25, 76, 76, 1, 0, 67, 68, 2, 0, 30, 30, 34, 34, 2, 0, 37, 37, 40, 40, 2, 0, 36, 36, 50, 50, 2, 0, 51, 51, 53, 57, 631, 0, 124, 1, 0, 0, 0, 2, 127, 1, 0, 0, 0, 4, 144, 1, 0, 0, 0, 6, 162, 1, 0, 0, 0, 8, 164, 1, 0, 0, 0, 10, 197, 1, 0, 0, 0, 12, 224, 1, 0, 0, 0, 14, 226, 1, 0, 0, 0, 16, 235, 1, 0, 0, 0, 18, 241, 1, 0, 0, 0, 20, 262, 1, 0, 0, 0, 22, 272, 1, 0, 0, 0, 24, 289, 1, 0, 0, 0, 26, 291, 1, 0, 0, 0, 28, 293, 1, 0, 0, 0, 30, 296, 1, 0, 0, 0, 32, 307, 1, 0, 0, 0, 34, 311, 1, 0, 0, 0, 36, 326, 1, 0, 0, 0, 38, 330, 1, 0, 0, 0, 40, 332, 1, 0, 0, 0, 42, 336, 1, 0, 0, 0, 44, 338, 1, 0, 0, 0, 46, 347, 1, 0, 0, 0, 48, 351, 1, 0, 0, 0, 50, 367, 1, 0, 0, 0, 52, 370, 1, 0, 0, 0, 54, 378, 1, 0, 0, 0, 56, 386, 1, 0, 0, 0, 58, 391, 1, 0, 0, 0, 60, 399, 1, 0, 0, 0, 62, 407, 1, 0, 0, 0, 64, 415, 1, 0, 0, 0, 66, 420, 1, 0, 0, 0, 68, 464, 1, 0, 0, 0, 70, 468, 1, 0, 0, 0, 72, 473, 1, 0, 0, 0, 74, 475, 1, 0, 0, 0, 76, 478, 1, 0, 0, 0, 78, 487, 1, 0, 0, 0, 80, 495, 1, 0, 0, 0, 82, 498, 1, 0, 0, 0, 84, 501, 1, 0, 0, 0, 86, 510, 1, 0, 0, 0, 88, 514, 1, 0, 0, 0, 90, 520, 1, 0, 0, 0, 92, 524, 1, 0, 0, 0, 94, 527, 1, 0, 0, 0, 96, 535, 1, 0, 0, 0, 98, 539, 1, 0, 0, 0, 100, 543, 1, 0, 0, 0, 102, 546, 1, 0, 0, 0, 104, 551, 1, 0, 0, 0, 106, 555, 1, 0, 0, 0, 108, 557, 1, 0, 0, 0, 110, 559, 1, 0, 0, 0, 112, 562, 1, 0, 0, 0, 114, 566, 1, 0, 0, 0, 116, 569, 1, 0, 0, 0, 118, 589, 1, 0, 0, 0, 120, 593, 1, 0, 0, 0, 122, 598, 1, 0, 0, 0, 124, 125, 3, 2, 1, 0, 125, 126, 5, 0, 0, 1, 126, 1, 1, 0, 0, 0, 127, 128, 6, 1, -1, 0, 128, 129, 3, 4, 2, 0, 129, 135, 1, 0, 0, 0, 130, 131, 10, 1, 0, 0, 131, 132, 5, 24, 0, 0, 132, 134, 3, 6, 3, 0, 133, 130, 1, 0, 0, 0, 134, 137, 1, 0, 0, 0, 135, 133, 1, 0, 0, 0, 135, 136, 1, 0, 0, 0, 136, 3, 1, 0, 0, 0, 137, 135, 1, 0, 0, 0, 138, 145, 3, 110, 55, 0, 139, 145, 3, 34, 17, 0, 140, 145, 3, 28, 14, 0, 141, 145, 3, 114, 57, 0, 142, 143, 4, 2, 1, 0, 143, 145, 3, 48, 24, 0, 144, 138, 1, 0, 0, 0, 144, 139, 1, 0, 0, 0, 144, 140, 1, 0, 0, 0, 144, 141, 1, 0, 0, 0, 144, 142, 1, 0, 0, 0, 145, 5, 1, 0, 0, 0, 146, 163, 3, 50, 25, 0, 147, 163, 3, 8, 4, 0, 148, 163, 3, 80, 40, 0, 149, 163, 3, 74, 37, 0, 150, 163, 3, 52, 26, 0, 151, 163, 3, 76, 38, 0, 152, 163, 3, 82, 41, 0, 153, 163, 3, 84, 42, 0, 154, 163, 3, 88, 44, 0, 155, 163, 3, 90, 45, 0, 156, 163, 3, 116, 58, 0, 157, 163, 3, 92, 46, 0, 158, 159, 4, 3, 2, 0, 159, 163, 3, 122, 61, 0, 160, 161, 4, 3, 3, 0, 161, 163, 3, 120, 60, 0, 162, 146, 1, 0, 0, 0, 162, 147, 1, 0, 0, 0, 162, 148, 1, 0, 0, 0, 162, 149, 1, 0, 0, 0, 162, 150, 1, 0, 0, 0, 162, 151, 1, 0, 0, 0, 162, 152, 1, 0, 0, 0, 162, 153, 1, 0, 0, 0, 162, 154, 1, 0, 0, 0, 162, 155, 1, 0, 0, 0, 162, 156, 1, 0, 0, 0, 162, 157, 1, 0, 0, 0, 162, 158, 1, 0, 0, 0, 162, 160, 1, 0, 0, 0, 163, 7, 1, 0, 0, 0, 164, 165, 5, 16, 0, 0, 165, 166, 3, 10, 5, 0, 166, 9, 1, 0, 0, 0, 167, 168, 6, 5, -1, 0, 168, 169, 5, 43, 0, 0, 169, 198, 3, 10, 5, 8, 170, 198, 3, 16, 8, 0, 171, 198, 3, 12, 6, 0, 172, 174, 3, 16, 8, 0, 173, 175, 5, 43, 0, 0, 174, 173, 1, 0, 0, 0, 174, 175, 1, 0, 0, 0, 175, 176, 1, 0, 0, 0, 176, 177, 5, 38, 0, 0, 177, 178, 5, 42, 0, 0, 178, 183, 3, 16, 8, 0, 179, 180, 5, 33, 0, 0, 180, 182, 3, 16, 8, 0, 181, 179, 1, 0, 0, 0, 182, 185, 1, 0, 0, 0, 183, 181, 1, 0, 0, 0, 183, 184, 1, 0, 0, 0, 184, 186, 1, 0, 0, 0, 185, 183, 1, 0, 0, 0, 186, 187, 5, 49, 0, 0, 187, 198, 1, 0, 0, 0, 188, 189, 3, 16, 8, 0, 189, 191, 5, 39, 0, 0, 190, 192, 5, 43, 0, 0, 191, 190, 1, 0, 0, 0, 191, 192, 1, 0, 0, 0, 192, 193, 1, 0, 0, 0, 193, 194, 5, 44, 0, 0, 194, 198, 1, 0, 0, 0, 195, 196, 4, 5, 4, 0, 196, 198, 3, 14, 7, 0, 197, 167, 1, 0, 0, 0, 197, 170, 1, 0, 0, 0, 197, 171, 1, 0, 0, 0, 197, 172, 1, 0, 0, 0, 197, 188, 1, 0, 0, 0, 197, 195, 1, 0, 0, 0, 198, 207, 1, 0, 0, 0, 199, 200, 10, 5, 0, 0, 200, 201, 5, 29, 0, 0, 201, 206, 3, 10, 5, 6, 202, 203, 10, 4, 0, 0, 203, 204, 5, 46, 0, 0, 204, 206, 3, 10, 5, 5, 205, 199, 1, 0, 0, 0, 205, 202, 1, 0, 0, 0, 206, 209, 1, 0, 0, 0, 207, 205, 1, 0, 0, 0, 207, 208, 1, 0, 0, 0, 208, 11, 1, 0, 0, 0, 209, 207, 1, 0, 0, 0, 210, 212, 3, 16, 8, 0, 211, 213, 5, 43, 0, 0, 212, 211, 1, 0, 0, 0, 212, 213, 1, 0, 0, 0, 213, 214, 1, 0, 0, 0, 214, 215, 5, 41, 0, 0, 215, 216, 3, 106, 53, 0, 216, 225, 1, 0, 0, 0, 217, 219, 3, 16, 8, 0, 218, 220, 5, 43, 0, 0, 219, 218, 1, 0, 0, 0, 219, 220, 1, 0, 0, 0, 220, 221, 1, 0, 0, 0, 221, 222, 5, 48, 0, 0, 222, 223, 3, 106, 53, 0, 223, 225, 1, 0, 0, 0, 224, 210, 1, 0, 0, 0, 224, 217, 1, 0, 0, 0, 225, 13, 1, 0, 0, 0, 226, 227, 3, 16, 8, 0, 227, 228, 5, 63, 0, 0, 228, 229, 3, 106, 53, 0, 229, 15, 1, 0, 0, 0, 230, 236, 3, 18, 9, 0, 231, 232, 3, 18, 9, 0, 232, 233, 3, 108, 54, 0, 233, 234, 3, 18, 9, 0, 234, 236, 1, 0, 0, 0, 235, 230, 1, 0, 0, 0, 235, 231, 1, 0, 0, 0, 236, 17, 1, 0, 0, 0, 237, 238, 6, 9, -1, 0, 238, 242, 3, 20, 10, 0, 239, 240, 7, 0, 0, 0, 240, 242, 3, 18, 9, 3, 241, 237, 1, 0, 0, 0, 241, 239, 1, 0, 0, 0, 242, 251, 1, 0, 0, 0, 243, 244, 10, 2, 0, 0, 244, 245, 7, 1, 0, 0, 245, 250, 3, 18, 9, 3, 246, 247, 10, 1, 0, 0, 247, 248, 7, 0, 0, 0, 248, 250, 3, 18, 9, 2, 249, 243, 1, 0, 0, 0, 249, 246, 1, 0, 0, 0, 250, 253, 1, 0, 0, 0, 251, 249, 1, 0, 0, 0, 251, 252, 1, 0, 0, 0, 252, 19, 1, 0, 0, 0, 253, 251, 1, 0, 0, 0, 254, 255, 6, 10, -1, 0, 255, 263, 3, 68, 34, 0, 256, 263, 3, 58, 29, 0, 257, 263, 3, 22, 11, 0, 258, 259, 5, 42, 0, 0, 259, 260, 3, 10, 5, 0, 260, 261, 5, 49, 0, 0, 261, 263, 1, 0, 0, 0, 262, 254, 1, 0, 0, 0, 262, 256, 1, 0, 0, 0, 262, 257, 1, 0, 0, 0, 262, 258, 1, 0, 0, 0, 263, 269, 1, 0, 0, 0, 264, 265, 10, 1, 0, 0, 265, 266, 5, 32, 0, 0, 266, 268, 3, 26, 13, 0, 267, 264, 1, 0, 0, 0, 268, 271, 1, 0, 0, 0, 269, 267, 1, 0, 0, 0, 269, 270, 1, 0, 0, 0, 270, 21, 1, 0, 0, 0, 271, 269, 1, 0, 0, 0, 272, 273, 3, 24, 12, 0, 273, 283, 5, 42, 0, 0, 274, 284, 5, 60, 0, 0, 275, 280, 3, 10, 5, 0, 276, 277, 5, 33, 0, 0, 277, 279, 3, 10, 5, 0, 278, 276, 1, 0, 0, 0, 279, 282, 1, 0, 0, 0, 280, 278, 1, 0, 0, 0, 280, 281, 1, 0, 0, 0, 281, 284, 1, 0, 0, 0, 282, 280, 1, 0, 0, 0, 283, 274, 1, 0, 0, 0, 283, 275, 1, 0, 0, 0, 283, 284, 1, 0, 0, 0, 284, 285, 1, 0, 0, 0, 285, 286, 5, 49, 0, 0, 286, 23, 1, 0, 0, 0, 287, 290, 5, 63, 0, 0, 288, 290, 3, 72, 36, 0, 289, 287, 1, 0, 0, 0, 289, 288, 1, 0, 0, 0, 290, 25, 1, 0, 0, 0, 291, 292, 3, 64, 32, 0, 292, 27, 1, 0, 0, 0, 293, 294, 5, 12, 0, 0, 294, 295, 3, 30, 15, 0, 295, 29, 1, 0, 0, 0, 296, 301, 3, 32, 16, 0, 297, 298, 5, 33, 0, 0, 298, 300, 3, 32, 16, 0, 299, 297, 1, 0, 0, 0, 300, 303, 1, 0, 0, 0, 301, 299, 1, 0, 0, 0, 301, 302, 1, 0, 0, 0, 302, 31, 1, 0, 0, 0, 303, 301, 1, 0, 0, 0, 304, 305, 3, 58, 29, 0, 305, 306, 5, 31, 0, 0, 306, 308, 1, 0, 0, 0, 307, 304, 1, 0, 0, 0, 307, 308, 1, 0, 0, 0, 308, 309, 1, 0, 0, 0, 309, 310, 3, 10, 5, 0, 310, 33, 1, 0, 0, 0, 311, 312, 5, 6, 0, 0, 312, 317, 3, 36, 18, 0, 313, 314, 5, 33, 0, 0, 314, 316, 3, 36, 18, 0, 315, 313, 1, 0, 0, 0, 316, 319, 1, 0, 0, 0, 317, 315, 1, 0, 0, 0, 317, 318, 1, 0, 0, 0, 318, 321, 1, 0, 0, 0, 319, 317, 1, 0, 0, 0, 320, 322, 3, 42, 21, 0, 321, 320, 1, 0, 0, 0, 321, 322, 1, 0, 0, 0, 322, 35, 1, 0, 0, 0, 323, 324, 3, 38, 19, 0, 324, 325, 5, 104, 0, 0, 325, 327, 1, 0, 0, 0, 326, 323, 1, 0, 0, 0, 326, 327, 1, 0, 0, 0, 327, 328, 1, 0, 0, 0, 328, 329, 3, 40, 20, 0, 329, 37, 1, 0, 0, 0, 330, 331, 5, 76, 0, 0, 331, 39, 1, 0, 0, 0, 332, 333, 7, 2, 0, 0, 333, 41, 1, 0, 0, 0, 334, 337, 3, 44, 22, 0, 335, 337, 3, 46, 23, 0, 336, 334, 1, 0, 0, 0, 336, 335, 1, 0, 0, 0, 337, 43, 1, 0, 0, 0, 338, 339, 5, 75, 0, 0, 339, 344, 5, 76, 0, 0, 340, 341, 5, 33, 0, 0, 341, 343, 5, 76, 0, 0, 342, 340, 1, 0, 0, 0, 343, 346, 1, 0, 0, 0, 344, 342, 1, 0, 0, 0, 344, 345, 1, 0, 0, 0, 345, 45, 1, 0, 0, 0, 346, 344, 1, 0, 0, 0, 347, 348, 5, 65, 0, 0, 348, 349, 3, 44, 22, 0, 349, 350, 5, 66, 0, 0, 350, 47, 1, 0, 0, 0, 351, 352, 5, 19, 0, 0, 352, 357, 3, 36, 18, 0, 353, 354, 5, 33, 0, 0, 354, 356, 3, 36, 18, 0, 355, 353, 1, 0, 0, 0, 356, 359, 1, 0, 0, 0, 357, 355, 1, 0, 0, 0, 357, 358, 1, 0, 0, 0, 358, 361, 1, 0, 0, 0, 359, 357, 1, 0, 0, 0, 360, 362, 3, 54, 27, 0, 361, 360, 1, 0, 0, 0, 361, 362, 1, 0, 0, 0, 362, 365, 1, 0, 0, 0, 363, 364, 5, 28, 0, 0, 364, 366, 3, 30, 15, 0, 365, 363, 1, 0, 0, 0, 365, 366, 1, 0, 0, 0, 366, 49, 1, 0, 0, 0, 367, 368, 5, 4, 0, 0, 368, 369, 3, 30, 15, 0, 369, 51, 1, 0, 0, 0, 370, 372, 5, 15, 0, 0, 371, 373, 3, 54, 27, 0, 372, 371, 1, 0, 0, 0, 372, 373, 1, 0, 0, 0, 373, 376, 1, 0, 0, 0, 374, 375, 5, 28, 0, 0, 375, 377, 3, 30, 15, 0, 376, 374, 1, 0, 0, 0, 376, 377, 1, 0, 0, 0, 377, 53, 1, 0, 0, 0, 378, 383, 3, 56, 28, 0, 379, 380, 5, 33, 0, 0, 380, 382, 3, 56, 28, 0, 381, 379, 1, 0, 0, 0, 382, 385, 1, 0, 0, 0, 383, 381, 1, 0, 0, 0, 383, 384, 1, 0, 0, 0, 384, 55, 1, 0, 0, 0, 385, 383, 1, 0, 0, 0, 386, 389, 3, 32, 16, 0, 387, 388, 5, 16, 0, 0, 388, 390, 3, 10, 5, 0, 389, 387, 1, 0, 0, 0, 389, 390, 1, 0, 0, 0, 390, 57, 1, 0, 0, 0, 391, 396, 3, 72, 36, 0, 392, 393, 5, 35, 0, 0, 393, 395, 3, 72, 36, 0, 394, 392, 1, 0, 0, 0, 395, 398, 1, 0, 0, 0, 396, 394, 1, 0, 0, 0, 396, 397, 1, 0, 0, 0, 397, 59, 1, 0, 0, 0, 398, 396, 1, 0, 0, 0, 399, 404, 3, 66, 33, 0, 400, 401, 5, 35, 0, 0, 401, 403, 3, 66, 33, 0, 402, 400, 1, 0, 0, 0, 403, 406, 1, 0, 0, 0, 404, 402, 1, 0, 0, 0, 404, 405, 1, 0, 0, 0, 405, 61, 1, 0, 0, 0, 406, 404, 1, 0, 0, 0, 407, 412, 3, 60, 30, 0, 408, 409, 5, 33, 0, 0, 409, 411, 3, 60, 30, 0, 410, 408, 1, 0, 0, 0, 411, 414, 1, 0, 0, 0, 412, 410, 1, 0, 0, 0, 412, 413, 1, 0, 0, 0, 413, 63, 1, 0, 0, 0, 414, 412, 1, 0, 0, 0, 415, 416, 7, 3, 0, 0, 416, 65, 1, 0, 0, 0, 417, 421, 5, 80, 0, 0, 418, 419, 4, 33, 10, 0, 419, 421, 3, 70, 35, 0, 420, 417, 1, 0, 0, 0, 420, 418, 1, 0, 0, 0, 421, 67, 1, 0, 0, 0, 422, 465, 5, 44, 0, 0, 423, 424, 3, 104, 52, 0, 424, 425, 5, 67, 0, 0, 425, 465, 1, 0, 0, 0, 426, 465, 3, 102, 51, 0, 427, 465, 3, 104, 52, 0, 428, 465, 3, 98, 49, 0, 429, 465, 3, 70, 35, 0, 430, 465, 3, 106, 53, 0, 431, 432, 5, 65, 0, 0, 432, 437, 3, 100, 50, 0, 433, 434, 5, 33, 0, 0, 434, 436, 3, 100, 50, 0, 435, 433, 1, 0, 0, 0, 436, 439, 1, 0, 0, 0, 437, 435, 1, 0, 0, 0, 437, 438, 1, 0, 0, 0, 438, 440, 1, 0, 0, 0, 439, 437, 1, 0, 0, 0, 440, 441, 5, 66, 0, 0, 441, 465, 1, 0, 0, 0, 442, 443, 5, 65, 0, 0, 443, 448, 3, 98, 49, 0, 444, 445, 5, 33, 0, 0, 445, 447, 3, 98, 49, 0, 446, 444, 1, 0, 0, 0, 447, 450, 1, 0, 0, 0, 448, 446, 1, 0, 0, 0, 448, 449, 1, 0, 0, 0, 449, 451, 1, 0, 0, 0, 450, 448, 1, 0, 0, 0, 451, 452, 5, 66, 0, 0, 452, 465, 1, 0, 0, 0, 453, 454, 5, 65, 0, 0, 454, 459, 3, 106, 53, 0, 455, 456, 5, 33, 0, 0, 456, 458, 3, 106, 53, 0, 457, 455, 1, 0, 0, 0, 458, 461, 1, 0, 0, 0, 459, 457, 1, 0, 0, 0, 459, 460, 1, 0, 0, 0, 460, 462, 1, 0, 0, 0, 461, 459, 1, 0, 0, 0, 462, 463, 5, 66, 0, 0, 463, 465, 1, 0, 0, 0, 464, 422, 1, 0, 0, 0, 464, 423, 1, 0, 0, 0, 464, 426, 1, 0, 0, 0, 464, 427, 1, 0, 0, 0, 464, 428, 1, 0, 0, 0, 464, 429, 1, 0, 0, 0, 464, 430, 1, 0, 0, 0, 464, 431, 1, 0, 0, 0, 464, 442, 1, 0, 0, 0, 464, 453, 1, 0, 0, 0, 465, 69, 1, 0, 0, 0, 466, 469, 5, 47, 0, 0, 467, 469, 5, 64, 0, 0, 468, 466, 1, 0, 0, 0, 468, 467, 1, 0, 0, 0, 469, 71, 1, 0, 0, 0, 470, 474, 3, 64, 32, 0, 471, 472, 4, 36, 11, 0, 472, 474, 3, 70, 35, 0, 473, 470, 1, 0, 0, 0, 473, 471, 1, 0, 0, 0, 474, 73, 1, 0, 0, 0, 475, 476, 5, 9, 0, 0, 476, 477, 5, 26, 0, 0, 477, 75, 1, 0, 0, 0, 478, 479, 5, 14, 0, 0, 479, 484, 3, 78, 39, 0, 480, 481, 5, 33, 0, 0, 481, 483, 3, 78, 39, 0, 482, 480, 1, 0, 0, 0, 483, 486, 1, 0, 0, 0, 484, 482, 1, 0, 0, 0, 484, 485, 1, 0, 0, 0, 485, 77, 1, 0, 0, 0, 486, 484, 1, 0, 0, 0, 487, 489, 3, 10, 5, 0, 488, 490, 7, 4, 0, 0, 489, 488, 1, 0, 0, 0, 489, 490, 1, 0, 0, 0, 490, 493, 1, 0, 0, 0, 491, 492, 5, 45, 0, 0, 492, 494, 7, 5, 0, 0, 493, 491, 1, 0, 0, 0, 493, 494, 1, 0, 0, 0, 494, 79, 1, 0, 0, 0, 495, 496, 5, 8, 0, 0, 496, 497, 3, 62, 31, 0, 497, 81, 1, 0, 0, 0, 498, 499, 5, 2, 0, 0, 499, 500, 3, 62, 31, 0, 500, 83, 1, 0, 0, 0, 501, 502, 5, 11, 0, 0, 502, 507, 3, 86, 43, 0, 503, 504, 5, 33, 0, 0, 504, 506, 3, 86, 43, 0, 505, 503, 1, 0, 0, 0, 506, 509, 1, 0, 0, 0, 507, 505, 1, 0, 0, 0, 507, 508, 1, 0, 0, 0, 508, 85, 1, 0, 0, 0, 509, 507, 1, 0, 0, 0, 510, 511, 3, 60, 30, 0, 511, 512, 5, 84, 0, 0, 512, 513, 3, 60, 30, 0, 513, 87, 1, 0, 0, 0, 514, 515, 5, 1, 0, 0, 515, 516, 3, 20, 10, 0, 516, 518, 3, 106, 53, 0, 517, 519, 3, 94, 47, 0, 518, 517, 1, 0, 0, 0, 518, 519, 1, 0, 0, 0, 519, 89, 1, 0, 0, 0, 520, 521, 5, 7, 0, 0, 521, 522, 3, 20, 10, 0, 522, 523, 3, 106, 53, 0, 523, 91, 1, 0, 0, 0, 524, 525, 5, 10, 0, 0, 525, 526, 3, 58, 29, 0, 526, 93, 1, 0, 0, 0, 527, 532, 3, 96, 48, 0, 528, 529, 5, 33, 0, 0, 529, 531, 3, 96, 48, 0, 530, 528, 1, 0, 0, 0, 531, 534, 1, 0, 0, 0, 532, 530, 1, 0, 0, 0, 532, 533, 1, 0, 0, 0, 533, 95, 1, 0, 0, 0, 534, 532, 1, 0, 0, 0, 535, 536, 3, 64, 32, 0, 536, 537, 5, 31, 0, 0, 537, 538, 3, 68, 34, 0, 538, 97, 1, 0, 0, 0, 539, 540, 7, 6, 0, 0, 540, 99, 1, 0, 0, 0, 541, 544, 3, 102, 51, 0, 542, 544, 3, 104, 52, 0, 543, 541, 1, 0, 0, 0, 543, 542, 1, 0, 0, 0, 544, 101, 1, 0, 0, 0, 545, 547, 7, 0, 0, 0, 546, 545, 1, 0, 0, 0, 546, 547, 1, 0, 0, 0, 547, 548, 1, 0, 0, 0, 548, 549, 5, 27, 0, 0, 549, 103, 1, 0, 0, 0, 550, 552, 7, 0, 0, 0, 551, 550, 1, 0, 0, 0, 551, 552, 1, 0, 0, 0, 552, 553, 1, 0, 0, 0, 553, 554, 5, 26, 0, 0, 554, 105, 1, 0, 0, 0, 555, 556, 5, 25, 0, 0, 556, 107, 1, 0, 0, 0, 557, 558, 7, 7, 0, 0, 558, 109, 1, 0, 0, 0, 559, 560, 5, 5, 0, 0, 560, 561, 3, 112, 56, 0, 561, 111, 1, 0, 0, 0, 562, 563, 5, 65, 0, 0, 563, 564, 3, 2, 1, 0, 564, 565, 5, 66, 0, 0, 565, 113, 1, 0, 0, 0, 566, 567, 5, 13, 0, 0, 567, 568, 5, 100, 0, 0, 568, 115, 1, 0, 0, 0, 569, 570, 5, 3, 0, 0, 570, 573, 5, 90, 0, 0, 571, 572, 5, 88, 0, 0, 572, 574, 3, 60, 30, 0, 573, 571, 1, 0, 0, 0, 573, 574, 1, 0, 0, 0, 574, 584, 1, 0, 0, 0, 575, 576, 5, 89, 0, 0, 576, 581, 3, 118, 59, 0, 577, 578, 5, 33, 0, 0, 578, 580, 3, 118, 59, 0, 579, 577, 1, 0, 0, 0, 580, 583, 1, 0, 0, 0, 581, 579, 1, 0, 0, 0, 581, 582, 1, 0, 0, 0, 582, 585, 1, 0, 0, 0, 583, 581, 1, 0, 0, 0, 584, 575, 1, 0, 0, 0, 584, 585, 1, 0, 0, 0, 585, 117, 1, 0, 0, 0, 586, 587, 3, 60, 30, 0, 587, 588, 5, 31, 0, 0, 588, 590, 1, 0, 0, 0, 589, 586, 1, 0, 0, 0, 589, 590, 1, 0, 0, 0, 590, 591, 1, 0, 0, 0, 591, 592, 3, 60, 30, 0, 592, 119, 1, 0, 0, 0, 593, 594, 5, 18, 0, 0, 594, 595, 3, 36, 18, 0, 595, 596, 5, 88, 0, 0, 596, 597, 3, 62, 31, 0, 597, 121, 1, 0, 0, 0, 598, 599, 5, 17, 0, 0, 599, 602, 3, 54, 27, 0, 600, 601, 5, 28, 0, 0, 601, 603, 3, 30, 15, 0, 602, 600, 1, 0, 0, 0, 602, 603, 1, 0, 0, 0, 603, 123, 1, 0, 0, 0, 59, 135, 144, 162, 174, 183, 191, 197, 205, 207, 212, 219, 224, 235, 241, 249, 251, 262, 269, 280, 283, 289, 301, 307, 317, 321, 326, 336, 344, 357, 361, 365, 372, 376, 383, 389, 396, 404, 412, 420, 437, 448, 459, 464, 468, 473, 484, 489, 493, 507, 518, 532, 543, 546, 551, 573, 581, 584, 589, 602] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java index af5c03a27592b..9f8197830640c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java @@ -1886,26 +1886,21 @@ public final FunctionNameContext functionName() throws RecognitionException { try { setState(289); _errHandler.sync(this); - switch (_input.LA(1)) { - case MATCH: + switch ( getInterpreter().adaptivePredict(_input,20,_ctx) ) { + case 1: enterOuterAlt(_localctx, 1); { setState(287); match(MATCH); } break; - case PARAM: - case NAMED_OR_POSITIONAL_PARAM: - case UNQUOTED_IDENTIFIER: - case QUOTED_IDENTIFIER: + case 2: enterOuterAlt(_localctx, 2); { setState(288); identifierOrParameter(); } break; - default: - throw new NoViableAltException(this); } } catch (RecognitionException re) { @@ -3260,26 +3255,25 @@ public final IdentifierPatternContext identifierPattern() throws RecognitionExce IdentifierPatternContext _localctx = new IdentifierPatternContext(_ctx, getState()); enterRule(_localctx, 66, RULE_identifierPattern); try { - setState(419); + setState(420); _errHandler.sync(this); - switch (_input.LA(1)) { - case ID_PATTERN: + switch ( getInterpreter().adaptivePredict(_input,38,_ctx) ) { + case 1: enterOuterAlt(_localctx, 1); { setState(417); match(ID_PATTERN); } break; - case PARAM: - case NAMED_OR_POSITIONAL_PARAM: + case 2: enterOuterAlt(_localctx, 2); { setState(418); + if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); + setState(419); parameter(); } break; - default: - throw new NoViableAltException(this); } } catch (RecognitionException re) { @@ -3549,14 +3543,14 @@ public final ConstantContext constant() throws RecognitionException { enterRule(_localctx, 68, RULE_constant); int _la; try { - setState(463); + setState(464); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,42,_ctx) ) { case 1: _localctx = new NullLiteralContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(421); + setState(422); match(NULL); } break; @@ -3564,9 +3558,9 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new QualifiedIntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(422); - integerValue(); setState(423); + integerValue(); + setState(424); match(UNQUOTED_IDENTIFIER); } break; @@ -3574,7 +3568,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new DecimalLiteralContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(425); + setState(426); decimalValue(); } break; @@ -3582,7 +3576,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new IntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(426); + setState(427); integerValue(); } break; @@ -3590,7 +3584,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanLiteralContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(427); + setState(428); booleanValue(); } break; @@ -3598,7 +3592,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new InputParameterContext(_localctx); enterOuterAlt(_localctx, 6); { - setState(428); + setState(429); parameter(); } break; @@ -3606,7 +3600,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringLiteralContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(429); + setState(430); string(); } break; @@ -3614,27 +3608,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new NumericArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 8); { - setState(430); - match(OPENING_BRACKET); setState(431); + match(OPENING_BRACKET); + setState(432); numericValue(); - setState(436); + setState(437); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(432); - match(COMMA); setState(433); + match(COMMA); + setState(434); numericValue(); } } - setState(438); + setState(439); _errHandler.sync(this); _la = _input.LA(1); } - setState(439); + setState(440); match(CLOSING_BRACKET); } break; @@ -3642,27 +3636,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 9); { - setState(441); - match(OPENING_BRACKET); setState(442); + match(OPENING_BRACKET); + setState(443); booleanValue(); - setState(447); + setState(448); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(443); - match(COMMA); setState(444); + match(COMMA); + setState(445); booleanValue(); } } - setState(449); + setState(450); _errHandler.sync(this); _la = _input.LA(1); } - setState(450); + setState(451); match(CLOSING_BRACKET); } break; @@ -3670,27 +3664,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 10); { - setState(452); - match(OPENING_BRACKET); setState(453); + match(OPENING_BRACKET); + setState(454); string(); - setState(458); + setState(459); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(454); - match(COMMA); setState(455); + match(COMMA); + setState(456); string(); } } - setState(460); + setState(461); _errHandler.sync(this); _la = _input.LA(1); } - setState(461); + setState(462); match(CLOSING_BRACKET); } break; @@ -3764,14 +3758,14 @@ public final ParameterContext parameter() throws RecognitionException { ParameterContext _localctx = new ParameterContext(_ctx, getState()); enterRule(_localctx, 70, RULE_parameter); try { - setState(467); + setState(468); _errHandler.sync(this); switch (_input.LA(1)) { case PARAM: _localctx = new InputParamContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(465); + setState(466); match(PARAM); } break; @@ -3779,7 +3773,7 @@ public final ParameterContext parameter() throws RecognitionException { _localctx = new InputNamedOrPositionalParamContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(466); + setState(467); match(NAMED_OR_POSITIONAL_PARAM); } break; @@ -3830,27 +3824,25 @@ public final IdentifierOrParameterContext identifierOrParameter() throws Recogni IdentifierOrParameterContext _localctx = new IdentifierOrParameterContext(_ctx, getState()); enterRule(_localctx, 72, RULE_identifierOrParameter); try { - setState(471); + setState(473); _errHandler.sync(this); - switch (_input.LA(1)) { - case UNQUOTED_IDENTIFIER: - case QUOTED_IDENTIFIER: + switch ( getInterpreter().adaptivePredict(_input,44,_ctx) ) { + case 1: enterOuterAlt(_localctx, 1); { - setState(469); + setState(470); identifier(); } break; - case PARAM: - case NAMED_OR_POSITIONAL_PARAM: + case 2: enterOuterAlt(_localctx, 2); { - setState(470); + setState(471); + if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); + setState(472); parameter(); } break; - default: - throw new NoViableAltException(this); } } catch (RecognitionException re) { @@ -3894,9 +3886,9 @@ public final LimitCommandContext limitCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(473); + setState(475); match(LIMIT); - setState(474); + setState(476); match(INTEGER_LITERAL); } } @@ -3951,25 +3943,25 @@ public final SortCommandContext sortCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(476); + setState(478); match(SORT); - setState(477); + setState(479); orderExpression(); - setState(482); + setState(484); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,45,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(478); + setState(480); match(COMMA); - setState(479); + setState(481); orderExpression(); } } } - setState(484); + setState(486); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,45,_ctx); } @@ -4025,14 +4017,14 @@ public final OrderExpressionContext orderExpression() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(485); - booleanExpression(0); setState(487); + booleanExpression(0); + setState(489); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,46,_ctx) ) { case 1: { - setState(486); + setState(488); ((OrderExpressionContext)_localctx).ordering = _input.LT(1); _la = _input.LA(1); if ( !(_la==ASC || _la==DESC) ) { @@ -4046,14 +4038,14 @@ public final OrderExpressionContext orderExpression() throws RecognitionExceptio } break; } - setState(491); + setState(493); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,47,_ctx) ) { case 1: { - setState(489); + setState(491); match(NULLS); - setState(490); + setState(492); ((OrderExpressionContext)_localctx).nullOrdering = _input.LT(1); _la = _input.LA(1); if ( !(_la==FIRST || _la==LAST) ) { @@ -4112,9 +4104,9 @@ public final KeepCommandContext keepCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(493); + setState(495); match(KEEP); - setState(494); + setState(496); qualifiedNamePatterns(); } } @@ -4161,9 +4153,9 @@ public final DropCommandContext dropCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(496); + setState(498); match(DROP); - setState(497); + setState(499); qualifiedNamePatterns(); } } @@ -4218,25 +4210,25 @@ public final RenameCommandContext renameCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(499); + setState(501); match(RENAME); - setState(500); + setState(502); renameClause(); - setState(505); + setState(507); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,48,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(501); + setState(503); match(COMMA); - setState(502); + setState(504); renameClause(); } } } - setState(507); + setState(509); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,48,_ctx); } @@ -4290,11 +4282,11 @@ public final RenameClauseContext renameClause() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(508); + setState(510); ((RenameClauseContext)_localctx).oldName = qualifiedNamePattern(); - setState(509); + setState(511); match(AS); - setState(510); + setState(512); ((RenameClauseContext)_localctx).newName = qualifiedNamePattern(); } } @@ -4347,18 +4339,18 @@ public final DissectCommandContext dissectCommand() throws RecognitionException try { enterOuterAlt(_localctx, 1); { - setState(512); + setState(514); match(DISSECT); - setState(513); + setState(515); primaryExpression(0); - setState(514); - string(); setState(516); + string(); + setState(518); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,49,_ctx) ) { case 1: { - setState(515); + setState(517); commandOptions(); } break; @@ -4411,11 +4403,11 @@ public final GrokCommandContext grokCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(518); + setState(520); match(GROK); - setState(519); + setState(521); primaryExpression(0); - setState(520); + setState(522); string(); } } @@ -4462,9 +4454,9 @@ public final MvExpandCommandContext mvExpandCommand() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(522); + setState(524); match(MV_EXPAND); - setState(523); + setState(525); qualifiedName(); } } @@ -4518,23 +4510,23 @@ public final CommandOptionsContext commandOptions() throws RecognitionException int _alt; enterOuterAlt(_localctx, 1); { - setState(525); + setState(527); commandOption(); - setState(530); + setState(532); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,50,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(526); + setState(528); match(COMMA); - setState(527); + setState(529); commandOption(); } } } - setState(532); + setState(534); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,50,_ctx); } @@ -4586,11 +4578,11 @@ public final CommandOptionContext commandOption() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(533); + setState(535); identifier(); - setState(534); + setState(536); match(ASSIGN); - setState(535); + setState(537); constant(); } } @@ -4636,7 +4628,7 @@ public final BooleanValueContext booleanValue() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(537); + setState(539); _la = _input.LA(1); if ( !(_la==FALSE || _la==TRUE) ) { _errHandler.recoverInline(this); @@ -4691,20 +4683,20 @@ public final NumericValueContext numericValue() throws RecognitionException { NumericValueContext _localctx = new NumericValueContext(_ctx, getState()); enterRule(_localctx, 100, RULE_numericValue); try { - setState(541); + setState(543); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,51,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(539); + setState(541); decimalValue(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(540); + setState(542); integerValue(); } break; @@ -4753,12 +4745,12 @@ public final DecimalValueContext decimalValue() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(544); + setState(546); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(543); + setState(545); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -4771,7 +4763,7 @@ public final DecimalValueContext decimalValue() throws RecognitionException { } } - setState(546); + setState(548); match(DECIMAL_LITERAL); } } @@ -4818,12 +4810,12 @@ public final IntegerValueContext integerValue() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(549); + setState(551); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(548); + setState(550); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -4836,7 +4828,7 @@ public final IntegerValueContext integerValue() throws RecognitionException { } } - setState(551); + setState(553); match(INTEGER_LITERAL); } } @@ -4880,7 +4872,7 @@ public final StringContext string() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(553); + setState(555); match(QUOTED_STRING); } } @@ -4930,7 +4922,7 @@ public final ComparisonOperatorContext comparisonOperator() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(555); + setState(557); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 281474976710656000L) != 0)) ) { _errHandler.recoverInline(this); @@ -4985,9 +4977,9 @@ public final ExplainCommandContext explainCommand() throws RecognitionException try { enterOuterAlt(_localctx, 1); { - setState(557); + setState(559); match(EXPLAIN); - setState(558); + setState(560); subqueryExpression(); } } @@ -5035,11 +5027,11 @@ public final SubqueryExpressionContext subqueryExpression() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(560); + setState(562); match(OPENING_BRACKET); - setState(561); + setState(563); query(0); - setState(562); + setState(564); match(CLOSING_BRACKET); } } @@ -5096,9 +5088,9 @@ public final ShowCommandContext showCommand() throws RecognitionException { _localctx = new ShowInfoContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(564); + setState(566); match(SHOW); - setState(565); + setState(567); match(INFO); } } @@ -5161,46 +5153,46 @@ public final EnrichCommandContext enrichCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(567); + setState(569); match(ENRICH); - setState(568); + setState(570); ((EnrichCommandContext)_localctx).policyName = match(ENRICH_POLICY_NAME); - setState(571); + setState(573); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,54,_ctx) ) { case 1: { - setState(569); + setState(571); match(ON); - setState(570); + setState(572); ((EnrichCommandContext)_localctx).matchField = qualifiedNamePattern(); } break; } - setState(582); + setState(584); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,56,_ctx) ) { case 1: { - setState(573); + setState(575); match(WITH); - setState(574); + setState(576); enrichWithClause(); - setState(579); + setState(581); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,55,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(575); + setState(577); match(COMMA); - setState(576); + setState(578); enrichWithClause(); } } } - setState(581); + setState(583); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,55,_ctx); } @@ -5257,19 +5249,19 @@ public final EnrichWithClauseContext enrichWithClause() throws RecognitionExcept try { enterOuterAlt(_localctx, 1); { - setState(587); + setState(589); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,57,_ctx) ) { case 1: { - setState(584); + setState(586); ((EnrichWithClauseContext)_localctx).newName = qualifiedNamePattern(); - setState(585); + setState(587); match(ASSIGN); } break; } - setState(589); + setState(591); ((EnrichWithClauseContext)_localctx).enrichField = qualifiedNamePattern(); } } @@ -5322,13 +5314,13 @@ public final LookupCommandContext lookupCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(591); + setState(593); match(DEV_LOOKUP); - setState(592); + setState(594); ((LookupCommandContext)_localctx).tableName = indexPattern(); - setState(593); + setState(595); match(ON); - setState(594); + setState(596); ((LookupCommandContext)_localctx).matchFields = qualifiedNamePatterns(); } } @@ -5381,18 +5373,18 @@ public final InlinestatsCommandContext inlinestatsCommand() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(596); + setState(598); match(DEV_INLINESTATS); - setState(597); + setState(599); ((InlinestatsCommandContext)_localctx).stats = aggFields(); - setState(600); + setState(602); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,58,_ctx) ) { case 1: { - setState(598); + setState(600); match(BY); - setState(599); + setState(601); ((InlinestatsCommandContext)_localctx).grouping = fields(); } break; @@ -5424,6 +5416,10 @@ public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { return operatorExpression_sempred((OperatorExpressionContext)_localctx, predIndex); case 10: return primaryExpression_sempred((PrimaryExpressionContext)_localctx, predIndex); + case 33: + return identifierPattern_sempred((IdentifierPatternContext)_localctx, predIndex); + case 36: + return identifierOrParameter_sempred((IdentifierOrParameterContext)_localctx, predIndex); } return true; } @@ -5477,9 +5473,23 @@ private boolean primaryExpression_sempred(PrimaryExpressionContext _localctx, in } return true; } + private boolean identifierPattern_sempred(IdentifierPatternContext _localctx, int predIndex) { + switch (predIndex) { + case 10: + return this.isDevVersion(); + } + return true; + } + private boolean identifierOrParameter_sempred(IdentifierOrParameterContext _localctx, int predIndex) { + switch (predIndex) { + case 11: + return this.isDevVersion(); + } + return true; + } public static final String _serializedATN = - "\u0004\u0001x\u025b\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ + "\u0004\u0001x\u025d\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ "\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004\u0002"+ "\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007\u0002"+ "\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b\u0002"+ @@ -5540,326 +5550,327 @@ private boolean primaryExpression_sempred(PrimaryExpressionContext _localctx, in "\u0001\u001d\u0005\u001d\u018b\b\u001d\n\u001d\f\u001d\u018e\t\u001d\u0001"+ "\u001e\u0001\u001e\u0001\u001e\u0005\u001e\u0193\b\u001e\n\u001e\f\u001e"+ "\u0196\t\u001e\u0001\u001f\u0001\u001f\u0001\u001f\u0005\u001f\u019b\b"+ - "\u001f\n\u001f\f\u001f\u019e\t\u001f\u0001 \u0001 \u0001!\u0001!\u0003"+ - "!\u01a4\b!\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001"+ - "\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005\"\u01b3\b\"\n\"\f\"\u01b6"+ - "\t\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005\"\u01be\b\""+ - "\n\"\f\"\u01c1\t\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005"+ - "\"\u01c9\b\"\n\"\f\"\u01cc\t\"\u0001\"\u0001\"\u0003\"\u01d0\b\"\u0001"+ - "#\u0001#\u0003#\u01d4\b#\u0001$\u0001$\u0003$\u01d8\b$\u0001%\u0001%\u0001"+ - "%\u0001&\u0001&\u0001&\u0001&\u0005&\u01e1\b&\n&\f&\u01e4\t&\u0001\'\u0001"+ - "\'\u0003\'\u01e8\b\'\u0001\'\u0001\'\u0003\'\u01ec\b\'\u0001(\u0001(\u0001"+ - "(\u0001)\u0001)\u0001)\u0001*\u0001*\u0001*\u0001*\u0005*\u01f8\b*\n*"+ - "\f*\u01fb\t*\u0001+\u0001+\u0001+\u0001+\u0001,\u0001,\u0001,\u0001,\u0003"+ - ",\u0205\b,\u0001-\u0001-\u0001-\u0001-\u0001.\u0001.\u0001.\u0001/\u0001"+ - "/\u0001/\u0005/\u0211\b/\n/\f/\u0214\t/\u00010\u00010\u00010\u00010\u0001"+ - "1\u00011\u00012\u00012\u00032\u021e\b2\u00013\u00033\u0221\b3\u00013\u0001"+ - "3\u00014\u00034\u0226\b4\u00014\u00014\u00015\u00015\u00016\u00016\u0001"+ - "7\u00017\u00017\u00018\u00018\u00018\u00018\u00019\u00019\u00019\u0001"+ - ":\u0001:\u0001:\u0001:\u0003:\u023c\b:\u0001:\u0001:\u0001:\u0001:\u0005"+ - ":\u0242\b:\n:\f:\u0245\t:\u0003:\u0247\b:\u0001;\u0001;\u0001;\u0003;"+ - "\u024c\b;\u0001;\u0001;\u0001<\u0001<\u0001<\u0001<\u0001<\u0001=\u0001"+ - "=\u0001=\u0001=\u0003=\u0259\b=\u0001=\u0000\u0004\u0002\n\u0012\u0014"+ - ">\u0000\u0002\u0004\u0006\b\n\f\u000e\u0010\u0012\u0014\u0016\u0018\u001a"+ - "\u001c\u001e \"$&(*,.02468:<>@BDFHJLNPRTVXZ\\^`bdfhjlnprtvxz\u0000\b\u0001"+ - "\u0000:;\u0001\u0000<>\u0002\u0000\u0019\u0019LL\u0001\u0000CD\u0002\u0000"+ - "\u001e\u001e\"\"\u0002\u0000%%((\u0002\u0000$$22\u0002\u00003359\u0275"+ - "\u0000|\u0001\u0000\u0000\u0000\u0002\u007f\u0001\u0000\u0000\u0000\u0004"+ - "\u0090\u0001\u0000\u0000\u0000\u0006\u00a2\u0001\u0000\u0000\u0000\b\u00a4"+ - "\u0001\u0000\u0000\u0000\n\u00c5\u0001\u0000\u0000\u0000\f\u00e0\u0001"+ - "\u0000\u0000\u0000\u000e\u00e2\u0001\u0000\u0000\u0000\u0010\u00eb\u0001"+ - "\u0000\u0000\u0000\u0012\u00f1\u0001\u0000\u0000\u0000\u0014\u0106\u0001"+ - "\u0000\u0000\u0000\u0016\u0110\u0001\u0000\u0000\u0000\u0018\u0121\u0001"+ - "\u0000\u0000\u0000\u001a\u0123\u0001\u0000\u0000\u0000\u001c\u0125\u0001"+ - "\u0000\u0000\u0000\u001e\u0128\u0001\u0000\u0000\u0000 \u0133\u0001\u0000"+ - "\u0000\u0000\"\u0137\u0001\u0000\u0000\u0000$\u0146\u0001\u0000\u0000"+ - "\u0000&\u014a\u0001\u0000\u0000\u0000(\u014c\u0001\u0000\u0000\u0000*"+ - "\u0150\u0001\u0000\u0000\u0000,\u0152\u0001\u0000\u0000\u0000.\u015b\u0001"+ - "\u0000\u0000\u00000\u015f\u0001\u0000\u0000\u00002\u016f\u0001\u0000\u0000"+ - "\u00004\u0172\u0001\u0000\u0000\u00006\u017a\u0001\u0000\u0000\u00008"+ - "\u0182\u0001\u0000\u0000\u0000:\u0187\u0001\u0000\u0000\u0000<\u018f\u0001"+ - "\u0000\u0000\u0000>\u0197\u0001\u0000\u0000\u0000@\u019f\u0001\u0000\u0000"+ - "\u0000B\u01a3\u0001\u0000\u0000\u0000D\u01cf\u0001\u0000\u0000\u0000F"+ - "\u01d3\u0001\u0000\u0000\u0000H\u01d7\u0001\u0000\u0000\u0000J\u01d9\u0001"+ - "\u0000\u0000\u0000L\u01dc\u0001\u0000\u0000\u0000N\u01e5\u0001\u0000\u0000"+ - "\u0000P\u01ed\u0001\u0000\u0000\u0000R\u01f0\u0001\u0000\u0000\u0000T"+ - "\u01f3\u0001\u0000\u0000\u0000V\u01fc\u0001\u0000\u0000\u0000X\u0200\u0001"+ - "\u0000\u0000\u0000Z\u0206\u0001\u0000\u0000\u0000\\\u020a\u0001\u0000"+ - "\u0000\u0000^\u020d\u0001\u0000\u0000\u0000`\u0215\u0001\u0000\u0000\u0000"+ - "b\u0219\u0001\u0000\u0000\u0000d\u021d\u0001\u0000\u0000\u0000f\u0220"+ - "\u0001\u0000\u0000\u0000h\u0225\u0001\u0000\u0000\u0000j\u0229\u0001\u0000"+ - "\u0000\u0000l\u022b\u0001\u0000\u0000\u0000n\u022d\u0001\u0000\u0000\u0000"+ - "p\u0230\u0001\u0000\u0000\u0000r\u0234\u0001\u0000\u0000\u0000t\u0237"+ - "\u0001\u0000\u0000\u0000v\u024b\u0001\u0000\u0000\u0000x\u024f\u0001\u0000"+ - "\u0000\u0000z\u0254\u0001\u0000\u0000\u0000|}\u0003\u0002\u0001\u0000"+ - "}~\u0005\u0000\u0000\u0001~\u0001\u0001\u0000\u0000\u0000\u007f\u0080"+ - "\u0006\u0001\uffff\uffff\u0000\u0080\u0081\u0003\u0004\u0002\u0000\u0081"+ - "\u0087\u0001\u0000\u0000\u0000\u0082\u0083\n\u0001\u0000\u0000\u0083\u0084"+ - "\u0005\u0018\u0000\u0000\u0084\u0086\u0003\u0006\u0003\u0000\u0085\u0082"+ - "\u0001\u0000\u0000\u0000\u0086\u0089\u0001\u0000\u0000\u0000\u0087\u0085"+ - "\u0001\u0000\u0000\u0000\u0087\u0088\u0001\u0000\u0000\u0000\u0088\u0003"+ - "\u0001\u0000\u0000\u0000\u0089\u0087\u0001\u0000\u0000\u0000\u008a\u0091"+ - "\u0003n7\u0000\u008b\u0091\u0003\"\u0011\u0000\u008c\u0091\u0003\u001c"+ - "\u000e\u0000\u008d\u0091\u0003r9\u0000\u008e\u008f\u0004\u0002\u0001\u0000"+ - "\u008f\u0091\u00030\u0018\u0000\u0090\u008a\u0001\u0000\u0000\u0000\u0090"+ - "\u008b\u0001\u0000\u0000\u0000\u0090\u008c\u0001\u0000\u0000\u0000\u0090"+ - "\u008d\u0001\u0000\u0000\u0000\u0090\u008e\u0001\u0000\u0000\u0000\u0091"+ - "\u0005\u0001\u0000\u0000\u0000\u0092\u00a3\u00032\u0019\u0000\u0093\u00a3"+ - "\u0003\b\u0004\u0000\u0094\u00a3\u0003P(\u0000\u0095\u00a3\u0003J%\u0000"+ - "\u0096\u00a3\u00034\u001a\u0000\u0097\u00a3\u0003L&\u0000\u0098\u00a3"+ - "\u0003R)\u0000\u0099\u00a3\u0003T*\u0000\u009a\u00a3\u0003X,\u0000\u009b"+ - "\u00a3\u0003Z-\u0000\u009c\u00a3\u0003t:\u0000\u009d\u00a3\u0003\\.\u0000"+ - "\u009e\u009f\u0004\u0003\u0002\u0000\u009f\u00a3\u0003z=\u0000\u00a0\u00a1"+ - "\u0004\u0003\u0003\u0000\u00a1\u00a3\u0003x<\u0000\u00a2\u0092\u0001\u0000"+ - "\u0000\u0000\u00a2\u0093\u0001\u0000\u0000\u0000\u00a2\u0094\u0001\u0000"+ - "\u0000\u0000\u00a2\u0095\u0001\u0000\u0000\u0000\u00a2\u0096\u0001\u0000"+ - "\u0000\u0000\u00a2\u0097\u0001\u0000\u0000\u0000\u00a2\u0098\u0001\u0000"+ - "\u0000\u0000\u00a2\u0099\u0001\u0000\u0000\u0000\u00a2\u009a\u0001\u0000"+ - "\u0000\u0000\u00a2\u009b\u0001\u0000\u0000\u0000\u00a2\u009c\u0001\u0000"+ - "\u0000\u0000\u00a2\u009d\u0001\u0000\u0000\u0000\u00a2\u009e\u0001\u0000"+ - "\u0000\u0000\u00a2\u00a0\u0001\u0000\u0000\u0000\u00a3\u0007\u0001\u0000"+ - "\u0000\u0000\u00a4\u00a5\u0005\u0010\u0000\u0000\u00a5\u00a6\u0003\n\u0005"+ - "\u0000\u00a6\t\u0001\u0000\u0000\u0000\u00a7\u00a8\u0006\u0005\uffff\uffff"+ - "\u0000\u00a8\u00a9\u0005+\u0000\u0000\u00a9\u00c6\u0003\n\u0005\b\u00aa"+ - "\u00c6\u0003\u0010\b\u0000\u00ab\u00c6\u0003\f\u0006\u0000\u00ac\u00ae"+ - "\u0003\u0010\b\u0000\u00ad\u00af\u0005+\u0000\u0000\u00ae\u00ad\u0001"+ - "\u0000\u0000\u0000\u00ae\u00af\u0001\u0000\u0000\u0000\u00af\u00b0\u0001"+ - "\u0000\u0000\u0000\u00b0\u00b1\u0005&\u0000\u0000\u00b1\u00b2\u0005*\u0000"+ - "\u0000\u00b2\u00b7\u0003\u0010\b\u0000\u00b3\u00b4\u0005!\u0000\u0000"+ - "\u00b4\u00b6\u0003\u0010\b\u0000\u00b5\u00b3\u0001\u0000\u0000\u0000\u00b6"+ - "\u00b9\u0001\u0000\u0000\u0000\u00b7\u00b5\u0001\u0000\u0000\u0000\u00b7"+ - "\u00b8\u0001\u0000\u0000\u0000\u00b8\u00ba\u0001\u0000\u0000\u0000\u00b9"+ - "\u00b7\u0001\u0000\u0000\u0000\u00ba\u00bb\u00051\u0000\u0000\u00bb\u00c6"+ - "\u0001\u0000\u0000\u0000\u00bc\u00bd\u0003\u0010\b\u0000\u00bd\u00bf\u0005"+ - "\'\u0000\u0000\u00be\u00c0\u0005+\u0000\u0000\u00bf\u00be\u0001\u0000"+ - "\u0000\u0000\u00bf\u00c0\u0001\u0000\u0000\u0000\u00c0\u00c1\u0001\u0000"+ - "\u0000\u0000\u00c1\u00c2\u0005,\u0000\u0000\u00c2\u00c6\u0001\u0000\u0000"+ - "\u0000\u00c3\u00c4\u0004\u0005\u0004\u0000\u00c4\u00c6\u0003\u000e\u0007"+ - "\u0000\u00c5\u00a7\u0001\u0000\u0000\u0000\u00c5\u00aa\u0001\u0000\u0000"+ - "\u0000\u00c5\u00ab\u0001\u0000\u0000\u0000\u00c5\u00ac\u0001\u0000\u0000"+ - "\u0000\u00c5\u00bc\u0001\u0000\u0000\u0000\u00c5\u00c3\u0001\u0000\u0000"+ - "\u0000\u00c6\u00cf\u0001\u0000\u0000\u0000\u00c7\u00c8\n\u0005\u0000\u0000"+ - "\u00c8\u00c9\u0005\u001d\u0000\u0000\u00c9\u00ce\u0003\n\u0005\u0006\u00ca"+ - "\u00cb\n\u0004\u0000\u0000\u00cb\u00cc\u0005.\u0000\u0000\u00cc\u00ce"+ - "\u0003\n\u0005\u0005\u00cd\u00c7\u0001\u0000\u0000\u0000\u00cd\u00ca\u0001"+ - "\u0000\u0000\u0000\u00ce\u00d1\u0001\u0000\u0000\u0000\u00cf\u00cd\u0001"+ - "\u0000\u0000\u0000\u00cf\u00d0\u0001\u0000\u0000\u0000\u00d0\u000b\u0001"+ - "\u0000\u0000\u0000\u00d1\u00cf\u0001\u0000\u0000\u0000\u00d2\u00d4\u0003"+ - "\u0010\b\u0000\u00d3\u00d5\u0005+\u0000\u0000\u00d4\u00d3\u0001\u0000"+ - "\u0000\u0000\u00d4\u00d5\u0001\u0000\u0000\u0000\u00d5\u00d6\u0001\u0000"+ - "\u0000\u0000\u00d6\u00d7\u0005)\u0000\u0000\u00d7\u00d8\u0003j5\u0000"+ - "\u00d8\u00e1\u0001\u0000\u0000\u0000\u00d9\u00db\u0003\u0010\b\u0000\u00da"+ - "\u00dc\u0005+\u0000\u0000\u00db\u00da\u0001\u0000\u0000\u0000\u00db\u00dc"+ - "\u0001\u0000\u0000\u0000\u00dc\u00dd\u0001\u0000\u0000\u0000\u00dd\u00de"+ - "\u00050\u0000\u0000\u00de\u00df\u0003j5\u0000\u00df\u00e1\u0001\u0000"+ - "\u0000\u0000\u00e0\u00d2\u0001\u0000\u0000\u0000\u00e0\u00d9\u0001\u0000"+ - "\u0000\u0000\u00e1\r\u0001\u0000\u0000\u0000\u00e2\u00e3\u0003\u0010\b"+ - "\u0000\u00e3\u00e4\u0005?\u0000\u0000\u00e4\u00e5\u0003j5\u0000\u00e5"+ - "\u000f\u0001\u0000\u0000\u0000\u00e6\u00ec\u0003\u0012\t\u0000\u00e7\u00e8"+ - "\u0003\u0012\t\u0000\u00e8\u00e9\u0003l6\u0000\u00e9\u00ea\u0003\u0012"+ - "\t\u0000\u00ea\u00ec\u0001\u0000\u0000\u0000\u00eb\u00e6\u0001\u0000\u0000"+ - "\u0000\u00eb\u00e7\u0001\u0000\u0000\u0000\u00ec\u0011\u0001\u0000\u0000"+ - "\u0000\u00ed\u00ee\u0006\t\uffff\uffff\u0000\u00ee\u00f2\u0003\u0014\n"+ - "\u0000\u00ef\u00f0\u0007\u0000\u0000\u0000\u00f0\u00f2\u0003\u0012\t\u0003"+ - "\u00f1\u00ed\u0001\u0000\u0000\u0000\u00f1\u00ef\u0001\u0000\u0000\u0000"+ - "\u00f2\u00fb\u0001\u0000\u0000\u0000\u00f3\u00f4\n\u0002\u0000\u0000\u00f4"+ - "\u00f5\u0007\u0001\u0000\u0000\u00f5\u00fa\u0003\u0012\t\u0003\u00f6\u00f7"+ - "\n\u0001\u0000\u0000\u00f7\u00f8\u0007\u0000\u0000\u0000\u00f8\u00fa\u0003"+ - "\u0012\t\u0002\u00f9\u00f3\u0001\u0000\u0000\u0000\u00f9\u00f6\u0001\u0000"+ - "\u0000\u0000\u00fa\u00fd\u0001\u0000\u0000\u0000\u00fb\u00f9\u0001\u0000"+ - "\u0000\u0000\u00fb\u00fc\u0001\u0000\u0000\u0000\u00fc\u0013\u0001\u0000"+ - "\u0000\u0000\u00fd\u00fb\u0001\u0000\u0000\u0000\u00fe\u00ff\u0006\n\uffff"+ - "\uffff\u0000\u00ff\u0107\u0003D\"\u0000\u0100\u0107\u0003:\u001d\u0000"+ - "\u0101\u0107\u0003\u0016\u000b\u0000\u0102\u0103\u0005*\u0000\u0000\u0103"+ - "\u0104\u0003\n\u0005\u0000\u0104\u0105\u00051\u0000\u0000\u0105\u0107"+ - "\u0001\u0000\u0000\u0000\u0106\u00fe\u0001\u0000\u0000\u0000\u0106\u0100"+ - "\u0001\u0000\u0000\u0000\u0106\u0101\u0001\u0000\u0000\u0000\u0106\u0102"+ - "\u0001\u0000\u0000\u0000\u0107\u010d\u0001\u0000\u0000\u0000\u0108\u0109"+ - "\n\u0001\u0000\u0000\u0109\u010a\u0005 \u0000\u0000\u010a\u010c\u0003"+ - "\u001a\r\u0000\u010b\u0108\u0001\u0000\u0000\u0000\u010c\u010f\u0001\u0000"+ - "\u0000\u0000\u010d\u010b\u0001\u0000\u0000\u0000\u010d\u010e\u0001\u0000"+ - "\u0000\u0000\u010e\u0015\u0001\u0000\u0000\u0000\u010f\u010d\u0001\u0000"+ - "\u0000\u0000\u0110\u0111\u0003\u0018\f\u0000\u0111\u011b\u0005*\u0000"+ - "\u0000\u0112\u011c\u0005<\u0000\u0000\u0113\u0118\u0003\n\u0005\u0000"+ - "\u0114\u0115\u0005!\u0000\u0000\u0115\u0117\u0003\n\u0005\u0000\u0116"+ - "\u0114\u0001\u0000\u0000\u0000\u0117\u011a\u0001\u0000\u0000\u0000\u0118"+ - "\u0116\u0001\u0000\u0000\u0000\u0118\u0119\u0001\u0000\u0000\u0000\u0119"+ - "\u011c\u0001\u0000\u0000\u0000\u011a\u0118\u0001\u0000\u0000\u0000\u011b"+ - "\u0112\u0001\u0000\u0000\u0000\u011b\u0113\u0001\u0000\u0000\u0000\u011b"+ - "\u011c\u0001\u0000\u0000\u0000\u011c\u011d\u0001\u0000\u0000\u0000\u011d"+ - "\u011e\u00051\u0000\u0000\u011e\u0017\u0001\u0000\u0000\u0000\u011f\u0122"+ - "\u0005?\u0000\u0000\u0120\u0122\u0003H$\u0000\u0121\u011f\u0001\u0000"+ - "\u0000\u0000\u0121\u0120\u0001\u0000\u0000\u0000\u0122\u0019\u0001\u0000"+ - "\u0000\u0000\u0123\u0124\u0003@ \u0000\u0124\u001b\u0001\u0000\u0000\u0000"+ - "\u0125\u0126\u0005\f\u0000\u0000\u0126\u0127\u0003\u001e\u000f\u0000\u0127"+ - "\u001d\u0001\u0000\u0000\u0000\u0128\u012d\u0003 \u0010\u0000\u0129\u012a"+ - "\u0005!\u0000\u0000\u012a\u012c\u0003 \u0010\u0000\u012b\u0129\u0001\u0000"+ - "\u0000\u0000\u012c\u012f\u0001\u0000\u0000\u0000\u012d\u012b\u0001\u0000"+ - "\u0000\u0000\u012d\u012e\u0001\u0000\u0000\u0000\u012e\u001f\u0001\u0000"+ - "\u0000\u0000\u012f\u012d\u0001\u0000\u0000\u0000\u0130\u0131\u0003:\u001d"+ - "\u0000\u0131\u0132\u0005\u001f\u0000\u0000\u0132\u0134\u0001\u0000\u0000"+ - "\u0000\u0133\u0130\u0001\u0000\u0000\u0000\u0133\u0134\u0001\u0000\u0000"+ - "\u0000\u0134\u0135\u0001\u0000\u0000\u0000\u0135\u0136\u0003\n\u0005\u0000"+ - "\u0136!\u0001\u0000\u0000\u0000\u0137\u0138\u0005\u0006\u0000\u0000\u0138"+ - "\u013d\u0003$\u0012\u0000\u0139\u013a\u0005!\u0000\u0000\u013a\u013c\u0003"+ - "$\u0012\u0000\u013b\u0139\u0001\u0000\u0000\u0000\u013c\u013f\u0001\u0000"+ - "\u0000\u0000\u013d\u013b\u0001\u0000\u0000\u0000\u013d\u013e\u0001\u0000"+ - "\u0000\u0000\u013e\u0141\u0001\u0000\u0000\u0000\u013f\u013d\u0001\u0000"+ - "\u0000\u0000\u0140\u0142\u0003*\u0015\u0000\u0141\u0140\u0001\u0000\u0000"+ - "\u0000\u0141\u0142\u0001\u0000\u0000\u0000\u0142#\u0001\u0000\u0000\u0000"+ - "\u0143\u0144\u0003&\u0013\u0000\u0144\u0145\u0005h\u0000\u0000\u0145\u0147"+ - "\u0001\u0000\u0000\u0000\u0146\u0143\u0001\u0000\u0000\u0000\u0146\u0147"+ - "\u0001\u0000\u0000\u0000\u0147\u0148\u0001\u0000\u0000\u0000\u0148\u0149"+ - "\u0003(\u0014\u0000\u0149%\u0001\u0000\u0000\u0000\u014a\u014b\u0005L"+ - "\u0000\u0000\u014b\'\u0001\u0000\u0000\u0000\u014c\u014d\u0007\u0002\u0000"+ - "\u0000\u014d)\u0001\u0000\u0000\u0000\u014e\u0151\u0003,\u0016\u0000\u014f"+ - "\u0151\u0003.\u0017\u0000\u0150\u014e\u0001\u0000\u0000\u0000\u0150\u014f"+ - "\u0001\u0000\u0000\u0000\u0151+\u0001\u0000\u0000\u0000\u0152\u0153\u0005"+ - "K\u0000\u0000\u0153\u0158\u0005L\u0000\u0000\u0154\u0155\u0005!\u0000"+ - "\u0000\u0155\u0157\u0005L\u0000\u0000\u0156\u0154\u0001\u0000\u0000\u0000"+ - "\u0157\u015a\u0001\u0000\u0000\u0000\u0158\u0156\u0001\u0000\u0000\u0000"+ - "\u0158\u0159\u0001\u0000\u0000\u0000\u0159-\u0001\u0000\u0000\u0000\u015a"+ - "\u0158\u0001\u0000\u0000\u0000\u015b\u015c\u0005A\u0000\u0000\u015c\u015d"+ - "\u0003,\u0016\u0000\u015d\u015e\u0005B\u0000\u0000\u015e/\u0001\u0000"+ - "\u0000\u0000\u015f\u0160\u0005\u0013\u0000\u0000\u0160\u0165\u0003$\u0012"+ - "\u0000\u0161\u0162\u0005!\u0000\u0000\u0162\u0164\u0003$\u0012\u0000\u0163"+ - "\u0161\u0001\u0000\u0000\u0000\u0164\u0167\u0001\u0000\u0000\u0000\u0165"+ - "\u0163\u0001\u0000\u0000\u0000\u0165\u0166\u0001\u0000\u0000\u0000\u0166"+ - "\u0169\u0001\u0000\u0000\u0000\u0167\u0165\u0001\u0000\u0000\u0000\u0168"+ - "\u016a\u00036\u001b\u0000\u0169\u0168\u0001\u0000\u0000\u0000\u0169\u016a"+ - "\u0001\u0000\u0000\u0000\u016a\u016d\u0001\u0000\u0000\u0000\u016b\u016c"+ - "\u0005\u001c\u0000\u0000\u016c\u016e\u0003\u001e\u000f\u0000\u016d\u016b"+ - "\u0001\u0000\u0000\u0000\u016d\u016e\u0001\u0000\u0000\u0000\u016e1\u0001"+ - "\u0000\u0000\u0000\u016f\u0170\u0005\u0004\u0000\u0000\u0170\u0171\u0003"+ - "\u001e\u000f\u0000\u01713\u0001\u0000\u0000\u0000\u0172\u0174\u0005\u000f"+ - "\u0000\u0000\u0173\u0175\u00036\u001b\u0000\u0174\u0173\u0001\u0000\u0000"+ - "\u0000\u0174\u0175\u0001\u0000\u0000\u0000\u0175\u0178\u0001\u0000\u0000"+ - "\u0000\u0176\u0177\u0005\u001c\u0000\u0000\u0177\u0179\u0003\u001e\u000f"+ - "\u0000\u0178\u0176\u0001\u0000\u0000\u0000\u0178\u0179\u0001\u0000\u0000"+ - "\u0000\u01795\u0001\u0000\u0000\u0000\u017a\u017f\u00038\u001c\u0000\u017b"+ - "\u017c\u0005!\u0000\u0000\u017c\u017e\u00038\u001c\u0000\u017d\u017b\u0001"+ - "\u0000\u0000\u0000\u017e\u0181\u0001\u0000\u0000\u0000\u017f\u017d\u0001"+ - "\u0000\u0000\u0000\u017f\u0180\u0001\u0000\u0000\u0000\u01807\u0001\u0000"+ - "\u0000\u0000\u0181\u017f\u0001\u0000\u0000\u0000\u0182\u0185\u0003 \u0010"+ - "\u0000\u0183\u0184\u0005\u0010\u0000\u0000\u0184\u0186\u0003\n\u0005\u0000"+ - "\u0185\u0183\u0001\u0000\u0000\u0000\u0185\u0186\u0001\u0000\u0000\u0000"+ - "\u01869\u0001\u0000\u0000\u0000\u0187\u018c\u0003H$\u0000\u0188\u0189"+ - "\u0005#\u0000\u0000\u0189\u018b\u0003H$\u0000\u018a\u0188\u0001\u0000"+ - "\u0000\u0000\u018b\u018e\u0001\u0000\u0000\u0000\u018c\u018a\u0001\u0000"+ - "\u0000\u0000\u018c\u018d\u0001\u0000\u0000\u0000\u018d;\u0001\u0000\u0000"+ - "\u0000\u018e\u018c\u0001\u0000\u0000\u0000\u018f\u0194\u0003B!\u0000\u0190"+ - "\u0191\u0005#\u0000\u0000\u0191\u0193\u0003B!\u0000\u0192\u0190\u0001"+ - "\u0000\u0000\u0000\u0193\u0196\u0001\u0000\u0000\u0000\u0194\u0192\u0001"+ - "\u0000\u0000\u0000\u0194\u0195\u0001\u0000\u0000\u0000\u0195=\u0001\u0000"+ - "\u0000\u0000\u0196\u0194\u0001\u0000\u0000\u0000\u0197\u019c\u0003<\u001e"+ - "\u0000\u0198\u0199\u0005!\u0000\u0000\u0199\u019b\u0003<\u001e\u0000\u019a"+ - "\u0198\u0001\u0000\u0000\u0000\u019b\u019e\u0001\u0000\u0000\u0000\u019c"+ - "\u019a\u0001\u0000\u0000\u0000\u019c\u019d\u0001\u0000\u0000\u0000\u019d"+ - "?\u0001\u0000\u0000\u0000\u019e\u019c\u0001\u0000\u0000\u0000\u019f\u01a0"+ - "\u0007\u0003\u0000\u0000\u01a0A\u0001\u0000\u0000\u0000\u01a1\u01a4\u0005"+ - "P\u0000\u0000\u01a2\u01a4\u0003F#\u0000\u01a3\u01a1\u0001\u0000\u0000"+ - "\u0000\u01a3\u01a2\u0001\u0000\u0000\u0000\u01a4C\u0001\u0000\u0000\u0000"+ - "\u01a5\u01d0\u0005,\u0000\u0000\u01a6\u01a7\u0003h4\u0000\u01a7\u01a8"+ - "\u0005C\u0000\u0000\u01a8\u01d0\u0001\u0000\u0000\u0000\u01a9\u01d0\u0003"+ - "f3\u0000\u01aa\u01d0\u0003h4\u0000\u01ab\u01d0\u0003b1\u0000\u01ac\u01d0"+ - "\u0003F#\u0000\u01ad\u01d0\u0003j5\u0000\u01ae\u01af\u0005A\u0000\u0000"+ - "\u01af\u01b4\u0003d2\u0000\u01b0\u01b1\u0005!\u0000\u0000\u01b1\u01b3"+ - "\u0003d2\u0000\u01b2\u01b0\u0001\u0000\u0000\u0000\u01b3\u01b6\u0001\u0000"+ - "\u0000\u0000\u01b4\u01b2\u0001\u0000\u0000\u0000\u01b4\u01b5\u0001\u0000"+ - "\u0000\u0000\u01b5\u01b7\u0001\u0000\u0000\u0000\u01b6\u01b4\u0001\u0000"+ - "\u0000\u0000\u01b7\u01b8\u0005B\u0000\u0000\u01b8\u01d0\u0001\u0000\u0000"+ - "\u0000\u01b9\u01ba\u0005A\u0000\u0000\u01ba\u01bf\u0003b1\u0000\u01bb"+ - "\u01bc\u0005!\u0000\u0000\u01bc\u01be\u0003b1\u0000\u01bd\u01bb\u0001"+ - "\u0000\u0000\u0000\u01be\u01c1\u0001\u0000\u0000\u0000\u01bf\u01bd\u0001"+ - "\u0000\u0000\u0000\u01bf\u01c0\u0001\u0000\u0000\u0000\u01c0\u01c2\u0001"+ - "\u0000\u0000\u0000\u01c1\u01bf\u0001\u0000\u0000\u0000\u01c2\u01c3\u0005"+ - "B\u0000\u0000\u01c3\u01d0\u0001\u0000\u0000\u0000\u01c4\u01c5\u0005A\u0000"+ - "\u0000\u01c5\u01ca\u0003j5\u0000\u01c6\u01c7\u0005!\u0000\u0000\u01c7"+ - "\u01c9\u0003j5\u0000\u01c8\u01c6\u0001\u0000\u0000\u0000\u01c9\u01cc\u0001"+ - "\u0000\u0000\u0000\u01ca\u01c8\u0001\u0000\u0000\u0000\u01ca\u01cb\u0001"+ - "\u0000\u0000\u0000\u01cb\u01cd\u0001\u0000\u0000\u0000\u01cc\u01ca\u0001"+ - "\u0000\u0000\u0000\u01cd\u01ce\u0005B\u0000\u0000\u01ce\u01d0\u0001\u0000"+ - "\u0000\u0000\u01cf\u01a5\u0001\u0000\u0000\u0000\u01cf\u01a6\u0001\u0000"+ - "\u0000\u0000\u01cf\u01a9\u0001\u0000\u0000\u0000\u01cf\u01aa\u0001\u0000"+ - "\u0000\u0000\u01cf\u01ab\u0001\u0000\u0000\u0000\u01cf\u01ac\u0001\u0000"+ - "\u0000\u0000\u01cf\u01ad\u0001\u0000\u0000\u0000\u01cf\u01ae\u0001\u0000"+ - "\u0000\u0000\u01cf\u01b9\u0001\u0000\u0000\u0000\u01cf\u01c4\u0001\u0000"+ - "\u0000\u0000\u01d0E\u0001\u0000\u0000\u0000\u01d1\u01d4\u0005/\u0000\u0000"+ - "\u01d2\u01d4\u0005@\u0000\u0000\u01d3\u01d1\u0001\u0000\u0000\u0000\u01d3"+ - "\u01d2\u0001\u0000\u0000\u0000\u01d4G\u0001\u0000\u0000\u0000\u01d5\u01d8"+ - "\u0003@ \u0000\u01d6\u01d8\u0003F#\u0000\u01d7\u01d5\u0001\u0000\u0000"+ - "\u0000\u01d7\u01d6\u0001\u0000\u0000\u0000\u01d8I\u0001\u0000\u0000\u0000"+ - "\u01d9\u01da\u0005\t\u0000\u0000\u01da\u01db\u0005\u001a\u0000\u0000\u01db"+ - "K\u0001\u0000\u0000\u0000\u01dc\u01dd\u0005\u000e\u0000\u0000\u01dd\u01e2"+ - "\u0003N\'\u0000\u01de\u01df\u0005!\u0000\u0000\u01df\u01e1\u0003N\'\u0000"+ - "\u01e0\u01de\u0001\u0000\u0000\u0000\u01e1\u01e4\u0001\u0000\u0000\u0000"+ - "\u01e2\u01e0\u0001\u0000\u0000\u0000\u01e2\u01e3\u0001\u0000\u0000\u0000"+ - "\u01e3M\u0001\u0000\u0000\u0000\u01e4\u01e2\u0001\u0000\u0000\u0000\u01e5"+ - "\u01e7\u0003\n\u0005\u0000\u01e6\u01e8\u0007\u0004\u0000\u0000\u01e7\u01e6"+ - "\u0001\u0000\u0000\u0000\u01e7\u01e8\u0001\u0000\u0000\u0000\u01e8\u01eb"+ - "\u0001\u0000\u0000\u0000\u01e9\u01ea\u0005-\u0000\u0000\u01ea\u01ec\u0007"+ - "\u0005\u0000\u0000\u01eb\u01e9\u0001\u0000\u0000\u0000\u01eb\u01ec\u0001"+ - "\u0000\u0000\u0000\u01ecO\u0001\u0000\u0000\u0000\u01ed\u01ee\u0005\b"+ - "\u0000\u0000\u01ee\u01ef\u0003>\u001f\u0000\u01efQ\u0001\u0000\u0000\u0000"+ - "\u01f0\u01f1\u0005\u0002\u0000\u0000\u01f1\u01f2\u0003>\u001f\u0000\u01f2"+ - "S\u0001\u0000\u0000\u0000\u01f3\u01f4\u0005\u000b\u0000\u0000\u01f4\u01f9"+ - "\u0003V+\u0000\u01f5\u01f6\u0005!\u0000\u0000\u01f6\u01f8\u0003V+\u0000"+ - "\u01f7\u01f5\u0001\u0000\u0000\u0000\u01f8\u01fb\u0001\u0000\u0000\u0000"+ - "\u01f9\u01f7\u0001\u0000\u0000\u0000\u01f9\u01fa\u0001\u0000\u0000\u0000"+ - "\u01faU\u0001\u0000\u0000\u0000\u01fb\u01f9\u0001\u0000\u0000\u0000\u01fc"+ - "\u01fd\u0003<\u001e\u0000\u01fd\u01fe\u0005T\u0000\u0000\u01fe\u01ff\u0003"+ - "<\u001e\u0000\u01ffW\u0001\u0000\u0000\u0000\u0200\u0201\u0005\u0001\u0000"+ - "\u0000\u0201\u0202\u0003\u0014\n\u0000\u0202\u0204\u0003j5\u0000\u0203"+ - "\u0205\u0003^/\u0000\u0204\u0203\u0001\u0000\u0000\u0000\u0204\u0205\u0001"+ - "\u0000\u0000\u0000\u0205Y\u0001\u0000\u0000\u0000\u0206\u0207\u0005\u0007"+ - "\u0000\u0000\u0207\u0208\u0003\u0014\n\u0000\u0208\u0209\u0003j5\u0000"+ - "\u0209[\u0001\u0000\u0000\u0000\u020a\u020b\u0005\n\u0000\u0000\u020b"+ - "\u020c\u0003:\u001d\u0000\u020c]\u0001\u0000\u0000\u0000\u020d\u0212\u0003"+ - "`0\u0000\u020e\u020f\u0005!\u0000\u0000\u020f\u0211\u0003`0\u0000\u0210"+ - "\u020e\u0001\u0000\u0000\u0000\u0211\u0214\u0001\u0000\u0000\u0000\u0212"+ - "\u0210\u0001\u0000\u0000\u0000\u0212\u0213\u0001\u0000\u0000\u0000\u0213"+ - "_\u0001\u0000\u0000\u0000\u0214\u0212\u0001\u0000\u0000\u0000\u0215\u0216"+ - "\u0003@ \u0000\u0216\u0217\u0005\u001f\u0000\u0000\u0217\u0218\u0003D"+ - "\"\u0000\u0218a\u0001\u0000\u0000\u0000\u0219\u021a\u0007\u0006\u0000"+ - "\u0000\u021ac\u0001\u0000\u0000\u0000\u021b\u021e\u0003f3\u0000\u021c"+ - "\u021e\u0003h4\u0000\u021d\u021b\u0001\u0000\u0000\u0000\u021d\u021c\u0001"+ - "\u0000\u0000\u0000\u021ee\u0001\u0000\u0000\u0000\u021f\u0221\u0007\u0000"+ - "\u0000\u0000\u0220\u021f\u0001\u0000\u0000\u0000\u0220\u0221\u0001\u0000"+ - "\u0000\u0000\u0221\u0222\u0001\u0000\u0000\u0000\u0222\u0223\u0005\u001b"+ - "\u0000\u0000\u0223g\u0001\u0000\u0000\u0000\u0224\u0226\u0007\u0000\u0000"+ - "\u0000\u0225\u0224\u0001\u0000\u0000\u0000\u0225\u0226\u0001\u0000\u0000"+ - "\u0000\u0226\u0227\u0001\u0000\u0000\u0000\u0227\u0228\u0005\u001a\u0000"+ - "\u0000\u0228i\u0001\u0000\u0000\u0000\u0229\u022a\u0005\u0019\u0000\u0000"+ - "\u022ak\u0001\u0000\u0000\u0000\u022b\u022c\u0007\u0007\u0000\u0000\u022c"+ - "m\u0001\u0000\u0000\u0000\u022d\u022e\u0005\u0005\u0000\u0000\u022e\u022f"+ - "\u0003p8\u0000\u022fo\u0001\u0000\u0000\u0000\u0230\u0231\u0005A\u0000"+ - "\u0000\u0231\u0232\u0003\u0002\u0001\u0000\u0232\u0233\u0005B\u0000\u0000"+ - "\u0233q\u0001\u0000\u0000\u0000\u0234\u0235\u0005\r\u0000\u0000\u0235"+ - "\u0236\u0005d\u0000\u0000\u0236s\u0001\u0000\u0000\u0000\u0237\u0238\u0005"+ - "\u0003\u0000\u0000\u0238\u023b\u0005Z\u0000\u0000\u0239\u023a\u0005X\u0000"+ - "\u0000\u023a\u023c\u0003<\u001e\u0000\u023b\u0239\u0001\u0000\u0000\u0000"+ - "\u023b\u023c\u0001\u0000\u0000\u0000\u023c\u0246\u0001\u0000\u0000\u0000"+ - "\u023d\u023e\u0005Y\u0000\u0000\u023e\u0243\u0003v;\u0000\u023f\u0240"+ - "\u0005!\u0000\u0000\u0240\u0242\u0003v;\u0000\u0241\u023f\u0001\u0000"+ - "\u0000\u0000\u0242\u0245\u0001\u0000\u0000\u0000\u0243\u0241\u0001\u0000"+ - "\u0000\u0000\u0243\u0244\u0001\u0000\u0000\u0000\u0244\u0247\u0001\u0000"+ - "\u0000\u0000\u0245\u0243\u0001\u0000\u0000\u0000\u0246\u023d\u0001\u0000"+ - "\u0000\u0000\u0246\u0247\u0001\u0000\u0000\u0000\u0247u\u0001\u0000\u0000"+ - "\u0000\u0248\u0249\u0003<\u001e\u0000\u0249\u024a\u0005\u001f\u0000\u0000"+ - "\u024a\u024c\u0001\u0000\u0000\u0000\u024b\u0248\u0001\u0000\u0000\u0000"+ - "\u024b\u024c\u0001\u0000\u0000\u0000\u024c\u024d\u0001\u0000\u0000\u0000"+ - "\u024d\u024e\u0003<\u001e\u0000\u024ew\u0001\u0000\u0000\u0000\u024f\u0250"+ - "\u0005\u0012\u0000\u0000\u0250\u0251\u0003$\u0012\u0000\u0251\u0252\u0005"+ - "X\u0000\u0000\u0252\u0253\u0003>\u001f\u0000\u0253y\u0001\u0000\u0000"+ - "\u0000\u0254\u0255\u0005\u0011\u0000\u0000\u0255\u0258\u00036\u001b\u0000"+ - "\u0256\u0257\u0005\u001c\u0000\u0000\u0257\u0259\u0003\u001e\u000f\u0000"+ - "\u0258\u0256\u0001\u0000\u0000\u0000\u0258\u0259\u0001\u0000\u0000\u0000"+ - "\u0259{\u0001\u0000\u0000\u0000;\u0087\u0090\u00a2\u00ae\u00b7\u00bf\u00c5"+ - "\u00cd\u00cf\u00d4\u00db\u00e0\u00eb\u00f1\u00f9\u00fb\u0106\u010d\u0118"+ - "\u011b\u0121\u012d\u0133\u013d\u0141\u0146\u0150\u0158\u0165\u0169\u016d"+ - "\u0174\u0178\u017f\u0185\u018c\u0194\u019c\u01a3\u01b4\u01bf\u01ca\u01cf"+ - "\u01d3\u01d7\u01e2\u01e7\u01eb\u01f9\u0204\u0212\u021d\u0220\u0225\u023b"+ - "\u0243\u0246\u024b\u0258"; + "\u001f\n\u001f\f\u001f\u019e\t\u001f\u0001 \u0001 \u0001!\u0001!\u0001"+ + "!\u0003!\u01a5\b!\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001"+ + "\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005\"\u01b4\b\"\n"+ + "\"\f\"\u01b7\t\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005"+ + "\"\u01bf\b\"\n\"\f\"\u01c2\t\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\""+ + "\u0001\"\u0005\"\u01ca\b\"\n\"\f\"\u01cd\t\"\u0001\"\u0001\"\u0003\"\u01d1"+ + "\b\"\u0001#\u0001#\u0003#\u01d5\b#\u0001$\u0001$\u0001$\u0003$\u01da\b"+ + "$\u0001%\u0001%\u0001%\u0001&\u0001&\u0001&\u0001&\u0005&\u01e3\b&\n&"+ + "\f&\u01e6\t&\u0001\'\u0001\'\u0003\'\u01ea\b\'\u0001\'\u0001\'\u0003\'"+ + "\u01ee\b\'\u0001(\u0001(\u0001(\u0001)\u0001)\u0001)\u0001*\u0001*\u0001"+ + "*\u0001*\u0005*\u01fa\b*\n*\f*\u01fd\t*\u0001+\u0001+\u0001+\u0001+\u0001"+ + ",\u0001,\u0001,\u0001,\u0003,\u0207\b,\u0001-\u0001-\u0001-\u0001-\u0001"+ + ".\u0001.\u0001.\u0001/\u0001/\u0001/\u0005/\u0213\b/\n/\f/\u0216\t/\u0001"+ + "0\u00010\u00010\u00010\u00011\u00011\u00012\u00012\u00032\u0220\b2\u0001"+ + "3\u00033\u0223\b3\u00013\u00013\u00014\u00034\u0228\b4\u00014\u00014\u0001"+ + "5\u00015\u00016\u00016\u00017\u00017\u00017\u00018\u00018\u00018\u0001"+ + "8\u00019\u00019\u00019\u0001:\u0001:\u0001:\u0001:\u0003:\u023e\b:\u0001"+ + ":\u0001:\u0001:\u0001:\u0005:\u0244\b:\n:\f:\u0247\t:\u0003:\u0249\b:"+ + "\u0001;\u0001;\u0001;\u0003;\u024e\b;\u0001;\u0001;\u0001<\u0001<\u0001"+ + "<\u0001<\u0001<\u0001=\u0001=\u0001=\u0001=\u0003=\u025b\b=\u0001=\u0000"+ + "\u0004\u0002\n\u0012\u0014>\u0000\u0002\u0004\u0006\b\n\f\u000e\u0010"+ + "\u0012\u0014\u0016\u0018\u001a\u001c\u001e \"$&(*,.02468:<>@BDFHJLNPR"+ + "TVXZ\\^`bdfhjlnprtvxz\u0000\b\u0001\u0000:;\u0001\u0000<>\u0002\u0000"+ + "\u0019\u0019LL\u0001\u0000CD\u0002\u0000\u001e\u001e\"\"\u0002\u0000%"+ + "%((\u0002\u0000$$22\u0002\u00003359\u0277\u0000|\u0001\u0000\u0000\u0000"+ + "\u0002\u007f\u0001\u0000\u0000\u0000\u0004\u0090\u0001\u0000\u0000\u0000"+ + "\u0006\u00a2\u0001\u0000\u0000\u0000\b\u00a4\u0001\u0000\u0000\u0000\n"+ + "\u00c5\u0001\u0000\u0000\u0000\f\u00e0\u0001\u0000\u0000\u0000\u000e\u00e2"+ + "\u0001\u0000\u0000\u0000\u0010\u00eb\u0001\u0000\u0000\u0000\u0012\u00f1"+ + "\u0001\u0000\u0000\u0000\u0014\u0106\u0001\u0000\u0000\u0000\u0016\u0110"+ + "\u0001\u0000\u0000\u0000\u0018\u0121\u0001\u0000\u0000\u0000\u001a\u0123"+ + "\u0001\u0000\u0000\u0000\u001c\u0125\u0001\u0000\u0000\u0000\u001e\u0128"+ + "\u0001\u0000\u0000\u0000 \u0133\u0001\u0000\u0000\u0000\"\u0137\u0001"+ + "\u0000\u0000\u0000$\u0146\u0001\u0000\u0000\u0000&\u014a\u0001\u0000\u0000"+ + "\u0000(\u014c\u0001\u0000\u0000\u0000*\u0150\u0001\u0000\u0000\u0000,"+ + "\u0152\u0001\u0000\u0000\u0000.\u015b\u0001\u0000\u0000\u00000\u015f\u0001"+ + "\u0000\u0000\u00002\u016f\u0001\u0000\u0000\u00004\u0172\u0001\u0000\u0000"+ + "\u00006\u017a\u0001\u0000\u0000\u00008\u0182\u0001\u0000\u0000\u0000:"+ + "\u0187\u0001\u0000\u0000\u0000<\u018f\u0001\u0000\u0000\u0000>\u0197\u0001"+ + "\u0000\u0000\u0000@\u019f\u0001\u0000\u0000\u0000B\u01a4\u0001\u0000\u0000"+ + "\u0000D\u01d0\u0001\u0000\u0000\u0000F\u01d4\u0001\u0000\u0000\u0000H"+ + "\u01d9\u0001\u0000\u0000\u0000J\u01db\u0001\u0000\u0000\u0000L\u01de\u0001"+ + "\u0000\u0000\u0000N\u01e7\u0001\u0000\u0000\u0000P\u01ef\u0001\u0000\u0000"+ + "\u0000R\u01f2\u0001\u0000\u0000\u0000T\u01f5\u0001\u0000\u0000\u0000V"+ + "\u01fe\u0001\u0000\u0000\u0000X\u0202\u0001\u0000\u0000\u0000Z\u0208\u0001"+ + "\u0000\u0000\u0000\\\u020c\u0001\u0000\u0000\u0000^\u020f\u0001\u0000"+ + "\u0000\u0000`\u0217\u0001\u0000\u0000\u0000b\u021b\u0001\u0000\u0000\u0000"+ + "d\u021f\u0001\u0000\u0000\u0000f\u0222\u0001\u0000\u0000\u0000h\u0227"+ + "\u0001\u0000\u0000\u0000j\u022b\u0001\u0000\u0000\u0000l\u022d\u0001\u0000"+ + "\u0000\u0000n\u022f\u0001\u0000\u0000\u0000p\u0232\u0001\u0000\u0000\u0000"+ + "r\u0236\u0001\u0000\u0000\u0000t\u0239\u0001\u0000\u0000\u0000v\u024d"+ + "\u0001\u0000\u0000\u0000x\u0251\u0001\u0000\u0000\u0000z\u0256\u0001\u0000"+ + "\u0000\u0000|}\u0003\u0002\u0001\u0000}~\u0005\u0000\u0000\u0001~\u0001"+ + "\u0001\u0000\u0000\u0000\u007f\u0080\u0006\u0001\uffff\uffff\u0000\u0080"+ + "\u0081\u0003\u0004\u0002\u0000\u0081\u0087\u0001\u0000\u0000\u0000\u0082"+ + "\u0083\n\u0001\u0000\u0000\u0083\u0084\u0005\u0018\u0000\u0000\u0084\u0086"+ + "\u0003\u0006\u0003\u0000\u0085\u0082\u0001\u0000\u0000\u0000\u0086\u0089"+ + "\u0001\u0000\u0000\u0000\u0087\u0085\u0001\u0000\u0000\u0000\u0087\u0088"+ + "\u0001\u0000\u0000\u0000\u0088\u0003\u0001\u0000\u0000\u0000\u0089\u0087"+ + "\u0001\u0000\u0000\u0000\u008a\u0091\u0003n7\u0000\u008b\u0091\u0003\""+ + "\u0011\u0000\u008c\u0091\u0003\u001c\u000e\u0000\u008d\u0091\u0003r9\u0000"+ + "\u008e\u008f\u0004\u0002\u0001\u0000\u008f\u0091\u00030\u0018\u0000\u0090"+ + "\u008a\u0001\u0000\u0000\u0000\u0090\u008b\u0001\u0000\u0000\u0000\u0090"+ + "\u008c\u0001\u0000\u0000\u0000\u0090\u008d\u0001\u0000\u0000\u0000\u0090"+ + "\u008e\u0001\u0000\u0000\u0000\u0091\u0005\u0001\u0000\u0000\u0000\u0092"+ + "\u00a3\u00032\u0019\u0000\u0093\u00a3\u0003\b\u0004\u0000\u0094\u00a3"+ + "\u0003P(\u0000\u0095\u00a3\u0003J%\u0000\u0096\u00a3\u00034\u001a\u0000"+ + "\u0097\u00a3\u0003L&\u0000\u0098\u00a3\u0003R)\u0000\u0099\u00a3\u0003"+ + "T*\u0000\u009a\u00a3\u0003X,\u0000\u009b\u00a3\u0003Z-\u0000\u009c\u00a3"+ + "\u0003t:\u0000\u009d\u00a3\u0003\\.\u0000\u009e\u009f\u0004\u0003\u0002"+ + "\u0000\u009f\u00a3\u0003z=\u0000\u00a0\u00a1\u0004\u0003\u0003\u0000\u00a1"+ + "\u00a3\u0003x<\u0000\u00a2\u0092\u0001\u0000\u0000\u0000\u00a2\u0093\u0001"+ + "\u0000\u0000\u0000\u00a2\u0094\u0001\u0000\u0000\u0000\u00a2\u0095\u0001"+ + "\u0000\u0000\u0000\u00a2\u0096\u0001\u0000\u0000\u0000\u00a2\u0097\u0001"+ + "\u0000\u0000\u0000\u00a2\u0098\u0001\u0000\u0000\u0000\u00a2\u0099\u0001"+ + "\u0000\u0000\u0000\u00a2\u009a\u0001\u0000\u0000\u0000\u00a2\u009b\u0001"+ + "\u0000\u0000\u0000\u00a2\u009c\u0001\u0000\u0000\u0000\u00a2\u009d\u0001"+ + "\u0000\u0000\u0000\u00a2\u009e\u0001\u0000\u0000\u0000\u00a2\u00a0\u0001"+ + "\u0000\u0000\u0000\u00a3\u0007\u0001\u0000\u0000\u0000\u00a4\u00a5\u0005"+ + "\u0010\u0000\u0000\u00a5\u00a6\u0003\n\u0005\u0000\u00a6\t\u0001\u0000"+ + "\u0000\u0000\u00a7\u00a8\u0006\u0005\uffff\uffff\u0000\u00a8\u00a9\u0005"+ + "+\u0000\u0000\u00a9\u00c6\u0003\n\u0005\b\u00aa\u00c6\u0003\u0010\b\u0000"+ + "\u00ab\u00c6\u0003\f\u0006\u0000\u00ac\u00ae\u0003\u0010\b\u0000\u00ad"+ + "\u00af\u0005+\u0000\u0000\u00ae\u00ad\u0001\u0000\u0000\u0000\u00ae\u00af"+ + "\u0001\u0000\u0000\u0000\u00af\u00b0\u0001\u0000\u0000\u0000\u00b0\u00b1"+ + "\u0005&\u0000\u0000\u00b1\u00b2\u0005*\u0000\u0000\u00b2\u00b7\u0003\u0010"+ + "\b\u0000\u00b3\u00b4\u0005!\u0000\u0000\u00b4\u00b6\u0003\u0010\b\u0000"+ + "\u00b5\u00b3\u0001\u0000\u0000\u0000\u00b6\u00b9\u0001\u0000\u0000\u0000"+ + "\u00b7\u00b5\u0001\u0000\u0000\u0000\u00b7\u00b8\u0001\u0000\u0000\u0000"+ + "\u00b8\u00ba\u0001\u0000\u0000\u0000\u00b9\u00b7\u0001\u0000\u0000\u0000"+ + "\u00ba\u00bb\u00051\u0000\u0000\u00bb\u00c6\u0001\u0000\u0000\u0000\u00bc"+ + "\u00bd\u0003\u0010\b\u0000\u00bd\u00bf\u0005\'\u0000\u0000\u00be\u00c0"+ + "\u0005+\u0000\u0000\u00bf\u00be\u0001\u0000\u0000\u0000\u00bf\u00c0\u0001"+ + "\u0000\u0000\u0000\u00c0\u00c1\u0001\u0000\u0000\u0000\u00c1\u00c2\u0005"+ + ",\u0000\u0000\u00c2\u00c6\u0001\u0000\u0000\u0000\u00c3\u00c4\u0004\u0005"+ + "\u0004\u0000\u00c4\u00c6\u0003\u000e\u0007\u0000\u00c5\u00a7\u0001\u0000"+ + "\u0000\u0000\u00c5\u00aa\u0001\u0000\u0000\u0000\u00c5\u00ab\u0001\u0000"+ + "\u0000\u0000\u00c5\u00ac\u0001\u0000\u0000\u0000\u00c5\u00bc\u0001\u0000"+ + "\u0000\u0000\u00c5\u00c3\u0001\u0000\u0000\u0000\u00c6\u00cf\u0001\u0000"+ + "\u0000\u0000\u00c7\u00c8\n\u0005\u0000\u0000\u00c8\u00c9\u0005\u001d\u0000"+ + "\u0000\u00c9\u00ce\u0003\n\u0005\u0006\u00ca\u00cb\n\u0004\u0000\u0000"+ + "\u00cb\u00cc\u0005.\u0000\u0000\u00cc\u00ce\u0003\n\u0005\u0005\u00cd"+ + "\u00c7\u0001\u0000\u0000\u0000\u00cd\u00ca\u0001\u0000\u0000\u0000\u00ce"+ + "\u00d1\u0001\u0000\u0000\u0000\u00cf\u00cd\u0001\u0000\u0000\u0000\u00cf"+ + "\u00d0\u0001\u0000\u0000\u0000\u00d0\u000b\u0001\u0000\u0000\u0000\u00d1"+ + "\u00cf\u0001\u0000\u0000\u0000\u00d2\u00d4\u0003\u0010\b\u0000\u00d3\u00d5"+ + "\u0005+\u0000\u0000\u00d4\u00d3\u0001\u0000\u0000\u0000\u00d4\u00d5\u0001"+ + "\u0000\u0000\u0000\u00d5\u00d6\u0001\u0000\u0000\u0000\u00d6\u00d7\u0005"+ + ")\u0000\u0000\u00d7\u00d8\u0003j5\u0000\u00d8\u00e1\u0001\u0000\u0000"+ + "\u0000\u00d9\u00db\u0003\u0010\b\u0000\u00da\u00dc\u0005+\u0000\u0000"+ + "\u00db\u00da\u0001\u0000\u0000\u0000\u00db\u00dc\u0001\u0000\u0000\u0000"+ + "\u00dc\u00dd\u0001\u0000\u0000\u0000\u00dd\u00de\u00050\u0000\u0000\u00de"+ + "\u00df\u0003j5\u0000\u00df\u00e1\u0001\u0000\u0000\u0000\u00e0\u00d2\u0001"+ + "\u0000\u0000\u0000\u00e0\u00d9\u0001\u0000\u0000\u0000\u00e1\r\u0001\u0000"+ + "\u0000\u0000\u00e2\u00e3\u0003\u0010\b\u0000\u00e3\u00e4\u0005?\u0000"+ + "\u0000\u00e4\u00e5\u0003j5\u0000\u00e5\u000f\u0001\u0000\u0000\u0000\u00e6"+ + "\u00ec\u0003\u0012\t\u0000\u00e7\u00e8\u0003\u0012\t\u0000\u00e8\u00e9"+ + "\u0003l6\u0000\u00e9\u00ea\u0003\u0012\t\u0000\u00ea\u00ec\u0001\u0000"+ + "\u0000\u0000\u00eb\u00e6\u0001\u0000\u0000\u0000\u00eb\u00e7\u0001\u0000"+ + "\u0000\u0000\u00ec\u0011\u0001\u0000\u0000\u0000\u00ed\u00ee\u0006\t\uffff"+ + "\uffff\u0000\u00ee\u00f2\u0003\u0014\n\u0000\u00ef\u00f0\u0007\u0000\u0000"+ + "\u0000\u00f0\u00f2\u0003\u0012\t\u0003\u00f1\u00ed\u0001\u0000\u0000\u0000"+ + "\u00f1\u00ef\u0001\u0000\u0000\u0000\u00f2\u00fb\u0001\u0000\u0000\u0000"+ + "\u00f3\u00f4\n\u0002\u0000\u0000\u00f4\u00f5\u0007\u0001\u0000\u0000\u00f5"+ + "\u00fa\u0003\u0012\t\u0003\u00f6\u00f7\n\u0001\u0000\u0000\u00f7\u00f8"+ + "\u0007\u0000\u0000\u0000\u00f8\u00fa\u0003\u0012\t\u0002\u00f9\u00f3\u0001"+ + "\u0000\u0000\u0000\u00f9\u00f6\u0001\u0000\u0000\u0000\u00fa\u00fd\u0001"+ + "\u0000\u0000\u0000\u00fb\u00f9\u0001\u0000\u0000\u0000\u00fb\u00fc\u0001"+ + "\u0000\u0000\u0000\u00fc\u0013\u0001\u0000\u0000\u0000\u00fd\u00fb\u0001"+ + "\u0000\u0000\u0000\u00fe\u00ff\u0006\n\uffff\uffff\u0000\u00ff\u0107\u0003"+ + "D\"\u0000\u0100\u0107\u0003:\u001d\u0000\u0101\u0107\u0003\u0016\u000b"+ + "\u0000\u0102\u0103\u0005*\u0000\u0000\u0103\u0104\u0003\n\u0005\u0000"+ + "\u0104\u0105\u00051\u0000\u0000\u0105\u0107\u0001\u0000\u0000\u0000\u0106"+ + "\u00fe\u0001\u0000\u0000\u0000\u0106\u0100\u0001\u0000\u0000\u0000\u0106"+ + "\u0101\u0001\u0000\u0000\u0000\u0106\u0102\u0001\u0000\u0000\u0000\u0107"+ + "\u010d\u0001\u0000\u0000\u0000\u0108\u0109\n\u0001\u0000\u0000\u0109\u010a"+ + "\u0005 \u0000\u0000\u010a\u010c\u0003\u001a\r\u0000\u010b\u0108\u0001"+ + "\u0000\u0000\u0000\u010c\u010f\u0001\u0000\u0000\u0000\u010d\u010b\u0001"+ + "\u0000\u0000\u0000\u010d\u010e\u0001\u0000\u0000\u0000\u010e\u0015\u0001"+ + "\u0000\u0000\u0000\u010f\u010d\u0001\u0000\u0000\u0000\u0110\u0111\u0003"+ + "\u0018\f\u0000\u0111\u011b\u0005*\u0000\u0000\u0112\u011c\u0005<\u0000"+ + "\u0000\u0113\u0118\u0003\n\u0005\u0000\u0114\u0115\u0005!\u0000\u0000"+ + "\u0115\u0117\u0003\n\u0005\u0000\u0116\u0114\u0001\u0000\u0000\u0000\u0117"+ + "\u011a\u0001\u0000\u0000\u0000\u0118\u0116\u0001\u0000\u0000\u0000\u0118"+ + "\u0119\u0001\u0000\u0000\u0000\u0119\u011c\u0001\u0000\u0000\u0000\u011a"+ + "\u0118\u0001\u0000\u0000\u0000\u011b\u0112\u0001\u0000\u0000\u0000\u011b"+ + "\u0113\u0001\u0000\u0000\u0000\u011b\u011c\u0001\u0000\u0000\u0000\u011c"+ + "\u011d\u0001\u0000\u0000\u0000\u011d\u011e\u00051\u0000\u0000\u011e\u0017"+ + "\u0001\u0000\u0000\u0000\u011f\u0122\u0005?\u0000\u0000\u0120\u0122\u0003"+ + "H$\u0000\u0121\u011f\u0001\u0000\u0000\u0000\u0121\u0120\u0001\u0000\u0000"+ + "\u0000\u0122\u0019\u0001\u0000\u0000\u0000\u0123\u0124\u0003@ \u0000\u0124"+ + "\u001b\u0001\u0000\u0000\u0000\u0125\u0126\u0005\f\u0000\u0000\u0126\u0127"+ + "\u0003\u001e\u000f\u0000\u0127\u001d\u0001\u0000\u0000\u0000\u0128\u012d"+ + "\u0003 \u0010\u0000\u0129\u012a\u0005!\u0000\u0000\u012a\u012c\u0003 "+ + "\u0010\u0000\u012b\u0129\u0001\u0000\u0000\u0000\u012c\u012f\u0001\u0000"+ + "\u0000\u0000\u012d\u012b\u0001\u0000\u0000\u0000\u012d\u012e\u0001\u0000"+ + "\u0000\u0000\u012e\u001f\u0001\u0000\u0000\u0000\u012f\u012d\u0001\u0000"+ + "\u0000\u0000\u0130\u0131\u0003:\u001d\u0000\u0131\u0132\u0005\u001f\u0000"+ + "\u0000\u0132\u0134\u0001\u0000\u0000\u0000\u0133\u0130\u0001\u0000\u0000"+ + "\u0000\u0133\u0134\u0001\u0000\u0000\u0000\u0134\u0135\u0001\u0000\u0000"+ + "\u0000\u0135\u0136\u0003\n\u0005\u0000\u0136!\u0001\u0000\u0000\u0000"+ + "\u0137\u0138\u0005\u0006\u0000\u0000\u0138\u013d\u0003$\u0012\u0000\u0139"+ + "\u013a\u0005!\u0000\u0000\u013a\u013c\u0003$\u0012\u0000\u013b\u0139\u0001"+ + "\u0000\u0000\u0000\u013c\u013f\u0001\u0000\u0000\u0000\u013d\u013b\u0001"+ + "\u0000\u0000\u0000\u013d\u013e\u0001\u0000\u0000\u0000\u013e\u0141\u0001"+ + "\u0000\u0000\u0000\u013f\u013d\u0001\u0000\u0000\u0000\u0140\u0142\u0003"+ + "*\u0015\u0000\u0141\u0140\u0001\u0000\u0000\u0000\u0141\u0142\u0001\u0000"+ + "\u0000\u0000\u0142#\u0001\u0000\u0000\u0000\u0143\u0144\u0003&\u0013\u0000"+ + "\u0144\u0145\u0005h\u0000\u0000\u0145\u0147\u0001\u0000\u0000\u0000\u0146"+ + "\u0143\u0001\u0000\u0000\u0000\u0146\u0147\u0001\u0000\u0000\u0000\u0147"+ + "\u0148\u0001\u0000\u0000\u0000\u0148\u0149\u0003(\u0014\u0000\u0149%\u0001"+ + "\u0000\u0000\u0000\u014a\u014b\u0005L\u0000\u0000\u014b\'\u0001\u0000"+ + "\u0000\u0000\u014c\u014d\u0007\u0002\u0000\u0000\u014d)\u0001\u0000\u0000"+ + "\u0000\u014e\u0151\u0003,\u0016\u0000\u014f\u0151\u0003.\u0017\u0000\u0150"+ + "\u014e\u0001\u0000\u0000\u0000\u0150\u014f\u0001\u0000\u0000\u0000\u0151"+ + "+\u0001\u0000\u0000\u0000\u0152\u0153\u0005K\u0000\u0000\u0153\u0158\u0005"+ + "L\u0000\u0000\u0154\u0155\u0005!\u0000\u0000\u0155\u0157\u0005L\u0000"+ + "\u0000\u0156\u0154\u0001\u0000\u0000\u0000\u0157\u015a\u0001\u0000\u0000"+ + "\u0000\u0158\u0156\u0001\u0000\u0000\u0000\u0158\u0159\u0001\u0000\u0000"+ + "\u0000\u0159-\u0001\u0000\u0000\u0000\u015a\u0158\u0001\u0000\u0000\u0000"+ + "\u015b\u015c\u0005A\u0000\u0000\u015c\u015d\u0003,\u0016\u0000\u015d\u015e"+ + "\u0005B\u0000\u0000\u015e/\u0001\u0000\u0000\u0000\u015f\u0160\u0005\u0013"+ + "\u0000\u0000\u0160\u0165\u0003$\u0012\u0000\u0161\u0162\u0005!\u0000\u0000"+ + "\u0162\u0164\u0003$\u0012\u0000\u0163\u0161\u0001\u0000\u0000\u0000\u0164"+ + "\u0167\u0001\u0000\u0000\u0000\u0165\u0163\u0001\u0000\u0000\u0000\u0165"+ + "\u0166\u0001\u0000\u0000\u0000\u0166\u0169\u0001\u0000\u0000\u0000\u0167"+ + "\u0165\u0001\u0000\u0000\u0000\u0168\u016a\u00036\u001b\u0000\u0169\u0168"+ + "\u0001\u0000\u0000\u0000\u0169\u016a\u0001\u0000\u0000\u0000\u016a\u016d"+ + "\u0001\u0000\u0000\u0000\u016b\u016c\u0005\u001c\u0000\u0000\u016c\u016e"+ + "\u0003\u001e\u000f\u0000\u016d\u016b\u0001\u0000\u0000\u0000\u016d\u016e"+ + "\u0001\u0000\u0000\u0000\u016e1\u0001\u0000\u0000\u0000\u016f\u0170\u0005"+ + "\u0004\u0000\u0000\u0170\u0171\u0003\u001e\u000f\u0000\u01713\u0001\u0000"+ + "\u0000\u0000\u0172\u0174\u0005\u000f\u0000\u0000\u0173\u0175\u00036\u001b"+ + "\u0000\u0174\u0173\u0001\u0000\u0000\u0000\u0174\u0175\u0001\u0000\u0000"+ + "\u0000\u0175\u0178\u0001\u0000\u0000\u0000\u0176\u0177\u0005\u001c\u0000"+ + "\u0000\u0177\u0179\u0003\u001e\u000f\u0000\u0178\u0176\u0001\u0000\u0000"+ + "\u0000\u0178\u0179\u0001\u0000\u0000\u0000\u01795\u0001\u0000\u0000\u0000"+ + "\u017a\u017f\u00038\u001c\u0000\u017b\u017c\u0005!\u0000\u0000\u017c\u017e"+ + "\u00038\u001c\u0000\u017d\u017b\u0001\u0000\u0000\u0000\u017e\u0181\u0001"+ + "\u0000\u0000\u0000\u017f\u017d\u0001\u0000\u0000\u0000\u017f\u0180\u0001"+ + "\u0000\u0000\u0000\u01807\u0001\u0000\u0000\u0000\u0181\u017f\u0001\u0000"+ + "\u0000\u0000\u0182\u0185\u0003 \u0010\u0000\u0183\u0184\u0005\u0010\u0000"+ + "\u0000\u0184\u0186\u0003\n\u0005\u0000\u0185\u0183\u0001\u0000\u0000\u0000"+ + "\u0185\u0186\u0001\u0000\u0000\u0000\u01869\u0001\u0000\u0000\u0000\u0187"+ + "\u018c\u0003H$\u0000\u0188\u0189\u0005#\u0000\u0000\u0189\u018b\u0003"+ + "H$\u0000\u018a\u0188\u0001\u0000\u0000\u0000\u018b\u018e\u0001\u0000\u0000"+ + "\u0000\u018c\u018a\u0001\u0000\u0000\u0000\u018c\u018d\u0001\u0000\u0000"+ + "\u0000\u018d;\u0001\u0000\u0000\u0000\u018e\u018c\u0001\u0000\u0000\u0000"+ + "\u018f\u0194\u0003B!\u0000\u0190\u0191\u0005#\u0000\u0000\u0191\u0193"+ + "\u0003B!\u0000\u0192\u0190\u0001\u0000\u0000\u0000\u0193\u0196\u0001\u0000"+ + "\u0000\u0000\u0194\u0192\u0001\u0000\u0000\u0000\u0194\u0195\u0001\u0000"+ + "\u0000\u0000\u0195=\u0001\u0000\u0000\u0000\u0196\u0194\u0001\u0000\u0000"+ + "\u0000\u0197\u019c\u0003<\u001e\u0000\u0198\u0199\u0005!\u0000\u0000\u0199"+ + "\u019b\u0003<\u001e\u0000\u019a\u0198\u0001\u0000\u0000\u0000\u019b\u019e"+ + "\u0001\u0000\u0000\u0000\u019c\u019a\u0001\u0000\u0000\u0000\u019c\u019d"+ + "\u0001\u0000\u0000\u0000\u019d?\u0001\u0000\u0000\u0000\u019e\u019c\u0001"+ + "\u0000\u0000\u0000\u019f\u01a0\u0007\u0003\u0000\u0000\u01a0A\u0001\u0000"+ + "\u0000\u0000\u01a1\u01a5\u0005P\u0000\u0000\u01a2\u01a3\u0004!\n\u0000"+ + "\u01a3\u01a5\u0003F#\u0000\u01a4\u01a1\u0001\u0000\u0000\u0000\u01a4\u01a2"+ + "\u0001\u0000\u0000\u0000\u01a5C\u0001\u0000\u0000\u0000\u01a6\u01d1\u0005"+ + ",\u0000\u0000\u01a7\u01a8\u0003h4\u0000\u01a8\u01a9\u0005C\u0000\u0000"+ + "\u01a9\u01d1\u0001\u0000\u0000\u0000\u01aa\u01d1\u0003f3\u0000\u01ab\u01d1"+ + "\u0003h4\u0000\u01ac\u01d1\u0003b1\u0000\u01ad\u01d1\u0003F#\u0000\u01ae"+ + "\u01d1\u0003j5\u0000\u01af\u01b0\u0005A\u0000\u0000\u01b0\u01b5\u0003"+ + "d2\u0000\u01b1\u01b2\u0005!\u0000\u0000\u01b2\u01b4\u0003d2\u0000\u01b3"+ + "\u01b1\u0001\u0000\u0000\u0000\u01b4\u01b7\u0001\u0000\u0000\u0000\u01b5"+ + "\u01b3\u0001\u0000\u0000\u0000\u01b5\u01b6\u0001\u0000\u0000\u0000\u01b6"+ + "\u01b8\u0001\u0000\u0000\u0000\u01b7\u01b5\u0001\u0000\u0000\u0000\u01b8"+ + "\u01b9\u0005B\u0000\u0000\u01b9\u01d1\u0001\u0000\u0000\u0000\u01ba\u01bb"+ + "\u0005A\u0000\u0000\u01bb\u01c0\u0003b1\u0000\u01bc\u01bd\u0005!\u0000"+ + "\u0000\u01bd\u01bf\u0003b1\u0000\u01be\u01bc\u0001\u0000\u0000\u0000\u01bf"+ + "\u01c2\u0001\u0000\u0000\u0000\u01c0\u01be\u0001\u0000\u0000\u0000\u01c0"+ + "\u01c1\u0001\u0000\u0000\u0000\u01c1\u01c3\u0001\u0000\u0000\u0000\u01c2"+ + "\u01c0\u0001\u0000\u0000\u0000\u01c3\u01c4\u0005B\u0000\u0000\u01c4\u01d1"+ + "\u0001\u0000\u0000\u0000\u01c5\u01c6\u0005A\u0000\u0000\u01c6\u01cb\u0003"+ + "j5\u0000\u01c7\u01c8\u0005!\u0000\u0000\u01c8\u01ca\u0003j5\u0000\u01c9"+ + "\u01c7\u0001\u0000\u0000\u0000\u01ca\u01cd\u0001\u0000\u0000\u0000\u01cb"+ + "\u01c9\u0001\u0000\u0000\u0000\u01cb\u01cc\u0001\u0000\u0000\u0000\u01cc"+ + "\u01ce\u0001\u0000\u0000\u0000\u01cd\u01cb\u0001\u0000\u0000\u0000\u01ce"+ + "\u01cf\u0005B\u0000\u0000\u01cf\u01d1\u0001\u0000\u0000\u0000\u01d0\u01a6"+ + "\u0001\u0000\u0000\u0000\u01d0\u01a7\u0001\u0000\u0000\u0000\u01d0\u01aa"+ + "\u0001\u0000\u0000\u0000\u01d0\u01ab\u0001\u0000\u0000\u0000\u01d0\u01ac"+ + "\u0001\u0000\u0000\u0000\u01d0\u01ad\u0001\u0000\u0000\u0000\u01d0\u01ae"+ + "\u0001\u0000\u0000\u0000\u01d0\u01af\u0001\u0000\u0000\u0000\u01d0\u01ba"+ + "\u0001\u0000\u0000\u0000\u01d0\u01c5\u0001\u0000\u0000\u0000\u01d1E\u0001"+ + "\u0000\u0000\u0000\u01d2\u01d5\u0005/\u0000\u0000\u01d3\u01d5\u0005@\u0000"+ + "\u0000\u01d4\u01d2\u0001\u0000\u0000\u0000\u01d4\u01d3\u0001\u0000\u0000"+ + "\u0000\u01d5G\u0001\u0000\u0000\u0000\u01d6\u01da\u0003@ \u0000\u01d7"+ + "\u01d8\u0004$\u000b\u0000\u01d8\u01da\u0003F#\u0000\u01d9\u01d6\u0001"+ + "\u0000\u0000\u0000\u01d9\u01d7\u0001\u0000\u0000\u0000\u01daI\u0001\u0000"+ + "\u0000\u0000\u01db\u01dc\u0005\t\u0000\u0000\u01dc\u01dd\u0005\u001a\u0000"+ + "\u0000\u01ddK\u0001\u0000\u0000\u0000\u01de\u01df\u0005\u000e\u0000\u0000"+ + "\u01df\u01e4\u0003N\'\u0000\u01e0\u01e1\u0005!\u0000\u0000\u01e1\u01e3"+ + "\u0003N\'\u0000\u01e2\u01e0\u0001\u0000\u0000\u0000\u01e3\u01e6\u0001"+ + "\u0000\u0000\u0000\u01e4\u01e2\u0001\u0000\u0000\u0000\u01e4\u01e5\u0001"+ + "\u0000\u0000\u0000\u01e5M\u0001\u0000\u0000\u0000\u01e6\u01e4\u0001\u0000"+ + "\u0000\u0000\u01e7\u01e9\u0003\n\u0005\u0000\u01e8\u01ea\u0007\u0004\u0000"+ + "\u0000\u01e9\u01e8\u0001\u0000\u0000\u0000\u01e9\u01ea\u0001\u0000\u0000"+ + "\u0000\u01ea\u01ed\u0001\u0000\u0000\u0000\u01eb\u01ec\u0005-\u0000\u0000"+ + "\u01ec\u01ee\u0007\u0005\u0000\u0000\u01ed\u01eb\u0001\u0000\u0000\u0000"+ + "\u01ed\u01ee\u0001\u0000\u0000\u0000\u01eeO\u0001\u0000\u0000\u0000\u01ef"+ + "\u01f0\u0005\b\u0000\u0000\u01f0\u01f1\u0003>\u001f\u0000\u01f1Q\u0001"+ + "\u0000\u0000\u0000\u01f2\u01f3\u0005\u0002\u0000\u0000\u01f3\u01f4\u0003"+ + ">\u001f\u0000\u01f4S\u0001\u0000\u0000\u0000\u01f5\u01f6\u0005\u000b\u0000"+ + "\u0000\u01f6\u01fb\u0003V+\u0000\u01f7\u01f8\u0005!\u0000\u0000\u01f8"+ + "\u01fa\u0003V+\u0000\u01f9\u01f7\u0001\u0000\u0000\u0000\u01fa\u01fd\u0001"+ + "\u0000\u0000\u0000\u01fb\u01f9\u0001\u0000\u0000\u0000\u01fb\u01fc\u0001"+ + "\u0000\u0000\u0000\u01fcU\u0001\u0000\u0000\u0000\u01fd\u01fb\u0001\u0000"+ + "\u0000\u0000\u01fe\u01ff\u0003<\u001e\u0000\u01ff\u0200\u0005T\u0000\u0000"+ + "\u0200\u0201\u0003<\u001e\u0000\u0201W\u0001\u0000\u0000\u0000\u0202\u0203"+ + "\u0005\u0001\u0000\u0000\u0203\u0204\u0003\u0014\n\u0000\u0204\u0206\u0003"+ + "j5\u0000\u0205\u0207\u0003^/\u0000\u0206\u0205\u0001\u0000\u0000\u0000"+ + "\u0206\u0207\u0001\u0000\u0000\u0000\u0207Y\u0001\u0000\u0000\u0000\u0208"+ + "\u0209\u0005\u0007\u0000\u0000\u0209\u020a\u0003\u0014\n\u0000\u020a\u020b"+ + "\u0003j5\u0000\u020b[\u0001\u0000\u0000\u0000\u020c\u020d\u0005\n\u0000"+ + "\u0000\u020d\u020e\u0003:\u001d\u0000\u020e]\u0001\u0000\u0000\u0000\u020f"+ + "\u0214\u0003`0\u0000\u0210\u0211\u0005!\u0000\u0000\u0211\u0213\u0003"+ + "`0\u0000\u0212\u0210\u0001\u0000\u0000\u0000\u0213\u0216\u0001\u0000\u0000"+ + "\u0000\u0214\u0212\u0001\u0000\u0000\u0000\u0214\u0215\u0001\u0000\u0000"+ + "\u0000\u0215_\u0001\u0000\u0000\u0000\u0216\u0214\u0001\u0000\u0000\u0000"+ + "\u0217\u0218\u0003@ \u0000\u0218\u0219\u0005\u001f\u0000\u0000\u0219\u021a"+ + "\u0003D\"\u0000\u021aa\u0001\u0000\u0000\u0000\u021b\u021c\u0007\u0006"+ + "\u0000\u0000\u021cc\u0001\u0000\u0000\u0000\u021d\u0220\u0003f3\u0000"+ + "\u021e\u0220\u0003h4\u0000\u021f\u021d\u0001\u0000\u0000\u0000\u021f\u021e"+ + "\u0001\u0000\u0000\u0000\u0220e\u0001\u0000\u0000\u0000\u0221\u0223\u0007"+ + "\u0000\u0000\u0000\u0222\u0221\u0001\u0000\u0000\u0000\u0222\u0223\u0001"+ + "\u0000\u0000\u0000\u0223\u0224\u0001\u0000\u0000\u0000\u0224\u0225\u0005"+ + "\u001b\u0000\u0000\u0225g\u0001\u0000\u0000\u0000\u0226\u0228\u0007\u0000"+ + "\u0000\u0000\u0227\u0226\u0001\u0000\u0000\u0000\u0227\u0228\u0001\u0000"+ + "\u0000\u0000\u0228\u0229\u0001\u0000\u0000\u0000\u0229\u022a\u0005\u001a"+ + "\u0000\u0000\u022ai\u0001\u0000\u0000\u0000\u022b\u022c\u0005\u0019\u0000"+ + "\u0000\u022ck\u0001\u0000\u0000\u0000\u022d\u022e\u0007\u0007\u0000\u0000"+ + "\u022em\u0001\u0000\u0000\u0000\u022f\u0230\u0005\u0005\u0000\u0000\u0230"+ + "\u0231\u0003p8\u0000\u0231o\u0001\u0000\u0000\u0000\u0232\u0233\u0005"+ + "A\u0000\u0000\u0233\u0234\u0003\u0002\u0001\u0000\u0234\u0235\u0005B\u0000"+ + "\u0000\u0235q\u0001\u0000\u0000\u0000\u0236\u0237\u0005\r\u0000\u0000"+ + "\u0237\u0238\u0005d\u0000\u0000\u0238s\u0001\u0000\u0000\u0000\u0239\u023a"+ + "\u0005\u0003\u0000\u0000\u023a\u023d\u0005Z\u0000\u0000\u023b\u023c\u0005"+ + "X\u0000\u0000\u023c\u023e\u0003<\u001e\u0000\u023d\u023b\u0001\u0000\u0000"+ + "\u0000\u023d\u023e\u0001\u0000\u0000\u0000\u023e\u0248\u0001\u0000\u0000"+ + "\u0000\u023f\u0240\u0005Y\u0000\u0000\u0240\u0245\u0003v;\u0000\u0241"+ + "\u0242\u0005!\u0000\u0000\u0242\u0244\u0003v;\u0000\u0243\u0241\u0001"+ + "\u0000\u0000\u0000\u0244\u0247\u0001\u0000\u0000\u0000\u0245\u0243\u0001"+ + "\u0000\u0000\u0000\u0245\u0246\u0001\u0000\u0000\u0000\u0246\u0249\u0001"+ + "\u0000\u0000\u0000\u0247\u0245\u0001\u0000\u0000\u0000\u0248\u023f\u0001"+ + "\u0000\u0000\u0000\u0248\u0249\u0001\u0000\u0000\u0000\u0249u\u0001\u0000"+ + "\u0000\u0000\u024a\u024b\u0003<\u001e\u0000\u024b\u024c\u0005\u001f\u0000"+ + "\u0000\u024c\u024e\u0001\u0000\u0000\u0000\u024d\u024a\u0001\u0000\u0000"+ + "\u0000\u024d\u024e\u0001\u0000\u0000\u0000\u024e\u024f\u0001\u0000\u0000"+ + "\u0000\u024f\u0250\u0003<\u001e\u0000\u0250w\u0001\u0000\u0000\u0000\u0251"+ + "\u0252\u0005\u0012\u0000\u0000\u0252\u0253\u0003$\u0012\u0000\u0253\u0254"+ + "\u0005X\u0000\u0000\u0254\u0255\u0003>\u001f\u0000\u0255y\u0001\u0000"+ + "\u0000\u0000\u0256\u0257\u0005\u0011\u0000\u0000\u0257\u025a\u00036\u001b"+ + "\u0000\u0258\u0259\u0005\u001c\u0000\u0000\u0259\u025b\u0003\u001e\u000f"+ + "\u0000\u025a\u0258\u0001\u0000\u0000\u0000\u025a\u025b\u0001\u0000\u0000"+ + "\u0000\u025b{\u0001\u0000\u0000\u0000;\u0087\u0090\u00a2\u00ae\u00b7\u00bf"+ + "\u00c5\u00cd\u00cf\u00d4\u00db\u00e0\u00eb\u00f1\u00f9\u00fb\u0106\u010d"+ + "\u0118\u011b\u0121\u012d\u0133\u013d\u0141\u0146\u0150\u0158\u0165\u0169"+ + "\u016d\u0174\u0178\u017f\u0185\u018c\u0194\u019c\u01a4\u01b5\u01c0\u01cb"+ + "\u01d0\u01d4\u01d9\u01e4\u01e9\u01ed\u01fb\u0206\u0214\u021f\u0222\u0227"+ + "\u023d\u0245\u0248\u024d\u025a"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestTests.java index 0123186ced1aa..7deaff6ebe6bb 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestTests.java @@ -99,6 +99,60 @@ public void testNamedParams() throws IOException { Locale locale = randomLocale(random()); QueryBuilder filter = randomQueryBuilder(); + String paramsString = """ + ,"params":[ {"n1" : "8.15.0"}, { "n2" : 0.05}, {"n3" : -799810013}, + {"n4" : "127.0.0.1"}, {"n5" : "esql"}, {"n_6" : null}, {"n7_" : false}, + {"_n1" : "8.15.0"}, { "__n2" : 0.05}, {"__3" : -799810013}, + {"__4n" : "127.0.0.1"}, {"_n5" : "esql"}, {"_n6" : null}, {"_n7" : false}] }"""; + + List params = List.of( + paramAsConstant("n1", "8.15.0"), + paramAsConstant("n2", 0.05), + paramAsConstant("n3", -799810013), + paramAsConstant("n4", "127.0.0.1"), + paramAsConstant("n5", "esql"), + paramAsConstant("n_6", null), + paramAsConstant("n7_", false), + paramAsConstant("_n1", "8.15.0"), + paramAsConstant("__n2", 0.05), + paramAsConstant("__3", -799810013), + paramAsConstant("__4n", "127.0.0.1"), + paramAsConstant("_n5", "esql"), + paramAsConstant("_n6", null), + paramAsConstant("_n7", false) + ); + String json = String.format(Locale.ROOT, """ + { + "query": "%s", + "columnar": %s, + "locale": "%s", + "filter": %s + %s""", query, columnar, locale.toLanguageTag(), filter, paramsString); + + EsqlQueryRequest request = parseEsqlQueryRequestSync(json); + + assertEquals(query, request.query()); + assertEquals(columnar, request.columnar()); + assertEquals(locale.toLanguageTag(), request.locale().toLanguageTag()); + assertEquals(locale, request.locale()); + assertEquals(filter, request.filter()); + assertEquals(params.size(), request.params().size()); + + for (int i = 0; i < request.params().size(); i++) { + assertEquals(params.get(i), request.params().get(i + 1)); + } + } + + public void testNamedParamsForIdentifiersPatterns() throws IOException { + assumeTrue( + "named parameters for identifiers and patterns require snapshot build", + EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES.isEnabled() + ); + String query = randomAlphaOfLengthBetween(1, 100); + boolean columnar = randomBoolean(); + Locale locale = randomLocale(random()); + QueryBuilder filter = randomQueryBuilder(); + String paramsString = """ ,"params":[ {"n1" : {"value" : "f1", "kind" : "Identifier"}}, {"n2" : {"value" : "f1*", "Kind" : "identifier"}}, @@ -106,14 +160,7 @@ public void testNamedParams() throws IOException { {"n4" : {"value" : "*", "kind" : "pattern"}}, {"n5" : {"value" : "esql", "kind" : "Value"}}, {"n_6" : {"value" : "null", "kind" : "identifier"}}, - {"n7_" : {"value" : "f.1.1"}}, - {"_n1" : "8.15.0"}, - { "__n2" : 0.05}, - {"__3" : -799810013}, - {"__4n" : "127.0.0.1"}, - {"_n5" : "esql"}, - {"_n6" : null}, - {"_n7" : false}] }"""; + {"n7_" : {"value" : "f.1.1"}}] }"""; List params = List.of( paramAsIdentifier("n1", "f1"), @@ -122,14 +169,7 @@ public void testNamedParams() throws IOException { paramAsPattern("n4", "*"), paramAsConstant("n5", "esql"), paramAsIdentifier("n_6", "null"), - paramAsConstant("n7_", "f.1.1"), - paramAsConstant("_n1", "8.15.0"), - paramAsConstant("__n2", 0.05), - paramAsConstant("__3", -799810013), - paramAsConstant("__4n", "127.0.0.1"), - paramAsConstant("_n5", "esql"), - paramAsConstant("_n6", null), - paramAsConstant("_n7", false) + paramAsConstant("n7_", "f.1.1") ); String json = String.format(Locale.ROOT, """ { @@ -217,9 +257,20 @@ public void testInvalidParams() throws IOException { e2.getCause().getMessage(), containsString("Params cannot contain both named and unnamed parameters; got [{1:v1}, {1x:v1}] and [{1}, {2}]") ); + } + + public void testInvalidParamsForIdentifiersPatterns() throws IOException { + assumeTrue( + "named parameters for identifiers and patterns require snapshot build", + EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES.isEnabled() + ); + String query = randomAlphaOfLengthBetween(1, 100); + boolean columnar = randomBoolean(); + Locale locale = randomLocale(random()); + QueryBuilder filter = randomQueryBuilder(); // invalid named parameter for identifier and identifier pattern - String paramsString3 = """ + String paramsString1 = """ "params":[ {"n1" : {"v" : "v1"}}, {"n2" : {"value" : "v2", "type" : "identifier"}}, {"n3" : {"value" : "v3", "kind" : "id" }}, {"n4" : {"value" : "v4", "kind" : true}}, {"n5" : {"value" : "v5", "kind" : ["identifier", "pattern"]}}, {"n6" : {"value" : "v6", "kind" : 0}}, @@ -227,18 +278,18 @@ public void testInvalidParams() throws IOException { {"n9" : {"kind" : "identifier"}}, {"n10" : {"v" : "v10", "kind" : "identifier"}}, {"n11" : {"value" : "v11", "kind" : "pattern"}}, {"n12" : {"value" : ["x", "y"], "kind" : "identifier"}}, {"n13" : {"value" : "v13", "kind" : "identifier", "type" : "pattern"}}, {"n14" : {"v" : "v14", "kind" : "value"}}]"""; - String json3 = String.format(Locale.ROOT, """ + String json1 = String.format(Locale.ROOT, """ { %s "query": "%s", "columnar": %s, "locale": "%s", "filter": %s - }""", paramsString3, query, columnar, locale.toLanguageTag(), filter); + }""", paramsString1, query, columnar, locale.toLanguageTag(), filter); - Exception e3 = expectThrows(XContentParseException.class, () -> parseEsqlQueryRequestSync(json3)); + Exception e1 = expectThrows(XContentParseException.class, () -> parseEsqlQueryRequestSync(json1)); assertThat( - e3.getCause().getMessage(), + e1.getCause().getMessage(), containsString( "Failed to parse params: [2:16] [v] is not a valid param attribute, a valid attribute is any of VALUE, KIND; " + "[2:39] [type] is not a valid param attribute, a valid attribute is any of VALUE, KIND; " @@ -252,10 +303,12 @@ public void testInvalidParams() throws IOException { + "[6:1] [null] is not a valid value for IDENTIFIER parameter, a valid value for IDENTIFIER parameter is a string; " + "[6:35] [v] is not a valid param attribute, a valid attribute is any of VALUE, KIND; " + "[6:35] [n10={v=v10, kind=identifier}] does not have a value specified; " - + "[6:35] [null] is not a valid value for IDENTIFIER parameter, a valid value for IDENTIFIER parameter is a string; " + + "[6:35] [null] is not a valid value for IDENTIFIER parameter, " + + "a valid value for IDENTIFIER parameter is a string; " + "[7:1] [v11] is not a valid value for PATTERN parameter, " + "a valid value for PATTERN parameter is a string and contains *; " - + "[7:50] [[x, y]] is not a valid value for IDENTIFIER parameter, a valid value for IDENTIFIER parameter is a string; " + + "[7:50] [[x, y]] is not a valid value for IDENTIFIER parameter," + + " a valid value for IDENTIFIER parameter is a string; " + "[7:50] n12={kind=identifier, value=[x, y]} is not supported as a parameter; " + "[8:1] [type] is not a valid param attribute, a valid attribute is any of VALUE, KIND; " + "[8:73] [v] is not a valid param attribute, a valid attribute is any of VALUE, KIND; " diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java index d365ee3bb2e51..3048686efbe44 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.LoadMapping; import org.elasticsearch.xpack.esql.VerificationException; +import org.elasticsearch.xpack.esql.action.EsqlCapabilities; import org.elasticsearch.xpack.esql.core.expression.Alias; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expressions; @@ -2084,6 +2085,10 @@ public void testCoalesceWithMixedNumericTypes() { } public void testNamedParamsForIdentifiers() { + assumeTrue( + "named parameters for identifiers and patterns require snapshot build", + EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES.isEnabled() + ); assertProjectionWithMapping( """ from test @@ -2174,6 +2179,10 @@ public void testNamedParamsForIdentifiers() { } public void testInvalidNamedParamsForIdentifiers() { + assumeTrue( + "named parameters for identifiers and patterns require snapshot build", + EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES.isEnabled() + ); // missing field assertError( """ @@ -2243,6 +2252,10 @@ public void testInvalidNamedParamsForIdentifiers() { } public void testNamedParamsForIdentifierPatterns() { + assumeTrue( + "named parameters for identifiers and patterns require snapshot build", + EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES.isEnabled() + ); assertProjectionWithMapping( """ from test @@ -2273,6 +2286,10 @@ public void testNamedParamsForIdentifierPatterns() { } public void testInvalidNamedParamsForIdentifierPatterns() { + assumeTrue( + "named parameters for identifiers and patterns require snapshot build", + EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES.isEnabled() + ); // missing pattern assertError( """ diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java index c797f426d2ae5..0aeb25ac5c375 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.Build; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.IndexMode; +import org.elasticsearch.xpack.esql.action.EsqlCapabilities; import org.elasticsearch.xpack.esql.core.capabilities.UnresolvedException; import org.elasticsearch.xpack.esql.core.expression.Alias; import org.elasticsearch.xpack.esql.core.expression.Attribute; @@ -984,11 +985,14 @@ public void testDeprecatedIsNullFunction() { "row x = is_null(f)", "line 1:10: is_null function is not supported anymore, please use 'is null'/'is not null' predicates instead" ); - expectError( - "from test | eval x = ?fn1(f)", - List.of(paramAsIdentifier("fn1", "IS_NULL")), - "line 1:23: is_null function is not supported anymore, please use 'is null'/'is not null' predicates instead" - ); + + if (Build.current().isSnapshot()) { + expectError( + "from test | eval x = ?fn1(f)", + List.of(paramAsIdentifier("fn1", "IS_NULL")), + "line 1:23: is_null function is not supported anymore, please use 'is null'/'is not null' predicates instead" + ); + } } public void testMetadataFieldOnOtherSources() { @@ -1661,6 +1665,10 @@ public void testIntervalParam() { } public void testParamForIdentifier() { + assumeTrue( + "named parameters for identifiers and patterns require snapshot build", + EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES.isEnabled() + ); // field names can appear in eval/where/stats/sort/keep/drop/rename/dissect/grok/enrich/mvexpand // eval, where assertEquals( @@ -1918,6 +1926,10 @@ public void testParamForIdentifier() { } public void testParamForIdentifierPattern() { + assumeTrue( + "named parameters for identifiers and patterns require snapshot build", + EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES.isEnabled() + ); // name patterns can appear in keep and drop // all patterns LogicalPlan plan = statement( @@ -2007,6 +2019,10 @@ public void testParamForIdentifierPattern() { } public void testParamInInvalidPosition() { + assumeTrue( + "named parameters for identifiers and patterns require snapshot build", + EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES.isEnabled() + ); // param for pattern is not supported in eval/where/stats/sort/rename/dissect/grok/enrich/mvexpand // where/stats/sort/dissect/grok are covered in RestEsqlTestCase List invalidParamPositions = List.of("eval ?f1 = 1", "stats x = ?f1(*)", "mv_expand ?f1", "rename ?f1 as ?f2"); @@ -2058,6 +2074,10 @@ public void testParamInInvalidPosition() { } public void testMissingParam() { + assumeTrue( + "named parameters for identifiers and patterns require snapshot build", + EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES.isEnabled() + ); // cover all processing commands eval/where/stats/sort/rename/dissect/grok/enrich/mvexpand/keep/drop String error = "Unknown query parameter [f1], did you mean [f4]?"; String errorMvExpandFunctionNameCommandOption = "Query parameter [?f1] is null or undefined, cannot be used as an identifier"; @@ -2139,13 +2159,11 @@ public void testIdPatternUnquoted() throws Exception { public void testIdPatternQuoted() throws Exception { var string = "`escaped string`"; - List temp = breakIntoFragments(string); assertThat(breakIntoFragments(string), contains(string)); } public void testIdPatternQuotedWithDoubleBackticks() throws Exception { var string = "`escaped``string`"; - List temp = breakIntoFragments(string); assertThat(breakIntoFragments(string), contains(string)); } From 64ae0aea45036be62aec076717816a27aa006437 Mon Sep 17 00:00:00 2001 From: Fang Xing <155562079+fang-xing-esql@users.noreply.github.com> Date: Wed, 16 Oct 2024 15:46:12 -0400 Subject: [PATCH 170/449] [ES|QL] Skip validating remote cluster index names in parser (#114271) * skip validating remote cluster index names in parser --- docs/changelog/114271.yaml | 5 + .../xpack/esql/parser/IdentifierBuilder.java | 17 +- .../parser/AbstractStatementParserTests.java | 11 +- .../esql/parser/StatementParserTests.java | 289 ++++++------------ 4 files changed, 119 insertions(+), 203 deletions(-) create mode 100644 docs/changelog/114271.yaml diff --git a/docs/changelog/114271.yaml b/docs/changelog/114271.yaml new file mode 100644 index 0000000000000..7b47b922ff811 --- /dev/null +++ b/docs/changelog/114271.yaml @@ -0,0 +1,5 @@ +pr: 114271 +summary: "[ES|QL] Skip validating remote cluster index names in parser" +area: ES|QL +type: bug +issues: [] diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/IdentifierBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/IdentifierBuilder.java index 1872fa6e8f1f0..ae2379318474b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/IdentifierBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/IdentifierBuilder.java @@ -21,6 +21,7 @@ import java.util.List; import static org.elasticsearch.transport.RemoteClusterAware.REMOTE_CLUSTER_INDEX_SEPARATOR; +import static org.elasticsearch.transport.RemoteClusterAware.isRemoteIndexName; import static org.elasticsearch.xpack.esql.core.util.StringUtils.EXCLUSION; import static org.elasticsearch.xpack.esql.core.util.StringUtils.WILDCARD; import static org.elasticsearch.xpack.esql.parser.ParserUtils.source; @@ -61,11 +62,14 @@ public String visitIndexPattern(List ctx) { Holder hasSeenStar = new Holder<>(false); ctx.forEach(c -> { String indexPattern = visitIndexString(c.indexString()); - hasSeenStar.set(indexPattern.contains(WILDCARD) || hasSeenStar.get()); - validateIndexPattern(indexPattern, c, hasSeenStar.get()); - patterns.add( - c.clusterString() != null ? c.clusterString().getText() + REMOTE_CLUSTER_INDEX_SEPARATOR + indexPattern : indexPattern - ); + String clusterString = c.clusterString() != null ? c.clusterString().getText() : null; + // skip validating index on remote cluster, because the behavior of remote cluster is not consistent with local cluster + // For example, invalid#index is an invalid index name, however FROM *:invalid#index does not return an error + if (clusterString == null) { + hasSeenStar.set(indexPattern.contains(WILDCARD) || hasSeenStar.get()); + validateIndexPattern(indexPattern, c, hasSeenStar.get()); + } + patterns.add(clusterString != null ? clusterString + REMOTE_CLUSTER_INDEX_SEPARATOR + indexPattern : indexPattern); }); return Strings.collectionToDelimitedString(patterns, ","); } @@ -75,6 +79,9 @@ private static void validateIndexPattern(String indexPattern, EsqlBaseParser.Ind String[] indices = indexPattern.split(","); boolean hasExclusion = false; for (String index : indices) { + if (isRemoteIndexName(index)) { // skip the validation if there is remote cluster + continue; + } hasSeenStar = index.contains(WILDCARD) || hasSeenStar; index = index.replace(WILDCARD, "").strip(); if (index.isBlank()) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/AbstractStatementParserTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/AbstractStatementParserTests.java index a1bcdec2b7c5c..e6fef186721a0 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/AbstractStatementParserTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/AbstractStatementParserTests.java @@ -144,8 +144,15 @@ void expectError(String query, List params, String errorMessage) { assertThat(e.getMessage(), containsString(errorMessage)); } - void expectInvalidIndexNameErrorWithLineNumber(String query, String arg, String lineNumber, String indexString) { - expectError(LoggerMessageFormat.format(null, query, arg), lineNumber + "Invalid index name [" + indexString); + void expectInvalidIndexNameErrorWithLineNumber(String query, String indexString, String lineNumber) { + if ((indexString.contains("|") || indexString.contains(" ")) == false) { + expectInvalidIndexNameErrorWithLineNumber(query, indexString, lineNumber, indexString); + } + expectInvalidIndexNameErrorWithLineNumber(query, "\"" + indexString + "\"", lineNumber, indexString); + } + + void expectInvalidIndexNameErrorWithLineNumber(String query, String indexString, String lineNumber, String error) { + expectError(LoggerMessageFormat.format(null, query, indexString), lineNumber + "Invalid index name [" + error); } void expectDateMathErrorWithLineNumber(String query, String arg, String lineNumber, String error) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java index 0aeb25ac5c375..094d301875d8e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java @@ -59,6 +59,7 @@ import org.elasticsearch.xpack.esql.plan.logical.Row; import org.elasticsearch.xpack.esql.plan.logical.UnresolvedRelation; +import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -425,7 +426,12 @@ public void testInlineStatsWithoutGroups() { } public void testStringAsIndexPattern() { - for (String command : List.of("FROM", "METRICS")) { + List commands = new ArrayList<>(); + commands.add("FROM"); + if (Build.current().isSnapshot()) { + commands.add("METRICS"); + } + for (String command : commands) { assertStringAsIndexPattern("foo", command + " \"foo\""); assertStringAsIndexPattern("foo,test-*", command + """ "foo","test-*" @@ -457,15 +463,23 @@ public void testStringAsIndexPattern() { assertStringAsIndexPattern("`backtick`,``multiple`back``ticks```", command + " `backtick`, ``multiple`back``ticks```"); assertStringAsIndexPattern("test,metadata,metaata,.metadata", command + " test,\"metadata\", metaata, .metadata"); assertStringAsIndexPattern(".dot", command + " .dot"); - assertStringAsIndexPattern("cluster:index", command + " cluster:index"); - assertStringAsIndexPattern("cluster:.index", command + " cluster:.index"); - assertStringAsIndexPattern("cluster*:index*", command + " cluster*:index*"); - assertStringAsIndexPattern("cluster*:*", command + " cluster*:*"); - assertStringAsIndexPattern("*:index*", command + " *:index*"); - assertStringAsIndexPattern("*:*", command + " *:*"); + assertStringAsIndexPattern("cluster:index|pattern", command + " cluster:\"index|pattern\""); + assertStringAsIndexPattern("*:index|pattern", command + " \"*:index|pattern\""); + clusterAndIndexAsIndexPattern(command, "cluster:index"); + clusterAndIndexAsIndexPattern(command, "cluster:.index"); + clusterAndIndexAsIndexPattern(command, "cluster*:index*"); + clusterAndIndexAsIndexPattern(command, "cluster*:*"); + clusterAndIndexAsIndexPattern(command, "cluster*:*"); + clusterAndIndexAsIndexPattern(command, "*:index*"); + clusterAndIndexAsIndexPattern(command, "*:*"); } } + private void clusterAndIndexAsIndexPattern(String command, String clusterAndIndex) { + assertStringAsIndexPattern(clusterAndIndex, command + " " + clusterAndIndex); + assertStringAsIndexPattern(clusterAndIndex, command + " \"" + clusterAndIndex + "\""); + } + public void testStringAsLookupIndexPattern() { assumeTrue("requires snapshot build", Build.current().isSnapshot()); assertStringAsLookupIndexPattern("foo", "ROW x = 1 | LOOKUP \"foo\" ON j"); @@ -487,221 +501,104 @@ public void testStringAsLookupIndexPattern() { assertStringAsLookupIndexPattern("`backtick`", "ROW x = 1 | LOOKUP `backtick` ON j"); assertStringAsLookupIndexPattern("``multiple`back``ticks```", "ROW x = 1 | LOOKUP ``multiple`back``ticks``` ON j"); assertStringAsLookupIndexPattern(".dot", "ROW x = 1 | LOOKUP .dot ON j"); - assertStringAsLookupIndexPattern("cluster:index", "ROW x = 1 | LOOKUP cluster:index ON j"); - assertStringAsLookupIndexPattern("cluster:.index", "ROW x = 1 | LOOKUP cluster:.index ON j"); - assertStringAsLookupIndexPattern("cluster*:index*", "ROW x = 1 | LOOKUP cluster*:index* ON j"); - assertStringAsLookupIndexPattern("cluster*:*", "ROW x = 1 | LOOKUP cluster*:* ON j"); - assertStringAsLookupIndexPattern("*:index*", "ROW x = 1 | LOOKUP *:index* ON j"); - assertStringAsLookupIndexPattern("*:*", "ROW x = 1 | LOOKUP *:* ON j"); + clusterAndIndexAsLookupIndexPattern("cluster:index"); + clusterAndIndexAsLookupIndexPattern("cluster:.index"); + clusterAndIndexAsLookupIndexPattern("cluster*:index*"); + clusterAndIndexAsLookupIndexPattern("cluster*:*"); + clusterAndIndexAsLookupIndexPattern("*:index*"); + clusterAndIndexAsLookupIndexPattern("*:*"); + } + + private void clusterAndIndexAsLookupIndexPattern(String clusterAndIndex) { + assertStringAsLookupIndexPattern(clusterAndIndex, "ROW x = 1 | LOOKUP " + clusterAndIndex + " ON j"); + assertStringAsLookupIndexPattern(clusterAndIndex, "ROW x = 1 | LOOKUP \"" + clusterAndIndex + "\"" + " ON j"); } public void testInvalidCharacterInIndexPattern() { Map commands = new HashMap<>(); - commands.put("FROM {}", "line 1:8: "); + commands.put("FROM {}", "line 1:7: "); if (Build.current().isSnapshot()) { - commands.put("METRICS {}", "line 1:11: "); - commands.put("ROW x = 1 | LOOKUP {} ON j", "line 1:22: "); + commands.put("METRICS {}", "line 1:10: "); + commands.put("ROW x = 1 | LOOKUP {} ON j", "line 1:21: "); } - List clusterStrings = List.of(" ", " *:", " cluster:"); String lineNumber; for (String command : commands.keySet()) { lineNumber = commands.get(command); - for (String clusterString : clusterStrings) { - expectInvalidIndexNameErrorWithLineNumber(command, clusterString + "\"index|pattern\"", lineNumber, "index|pattern"); - expectInvalidIndexNameErrorWithLineNumber(command, clusterString + "\"index pattern\"", lineNumber, "index pattern"); - expectInvalidIndexNameErrorWithLineNumber(command, clusterString + "\"index#pattern\"", lineNumber, "index#pattern"); - expectInvalidIndexNameErrorWithLineNumber(command, clusterString + "index#pattern", lineNumber, "index#pattern"); - expectInvalidIndexNameErrorWithLineNumber(command, clusterString + "\"index?pattern\"", lineNumber, "index?pattern"); - expectInvalidIndexNameErrorWithLineNumber(command, clusterString + "index?pattern", lineNumber, "index?pattern"); - expectInvalidIndexNameErrorWithLineNumber(command, clusterString + "\"index>pattern\"", lineNumber, "index>pattern"); - expectInvalidIndexNameErrorWithLineNumber(command, clusterString + "index>pattern", lineNumber, "index>pattern"); - expectInvalidIndexNameErrorWithLineNumber(command, clusterString + "\"index\"", - lineNumber, - "-logstash-" - ); - expectInvalidIndexNameErrorWithLineNumber( - command, - clusterString + "--", - lineNumber, - "-" - ); - expectInvalidIndexNameErrorWithLineNumber(command, clusterString + "\"\"", lineNumber, "logstash#"); - expectInvalidIndexNameErrorWithLineNumber(command, clusterString + "", lineNumber, "logstash#"); - expectInvalidIndexNameErrorWithLineNumber( - command, - clusterString + "\"+\"", - lineNumber, - "+" - ); - expectInvalidIndexNameErrorWithLineNumber( - command, - clusterString + "+", - lineNumber, - "+" - ); - expectInvalidIndexNameErrorWithLineNumber( - command, - clusterString + "\"_\"", - lineNumber, - "_" - ); - expectInvalidIndexNameErrorWithLineNumber( - command, - clusterString + "_", - lineNumber, - "_" - ); - expectInvalidIndexNameErrorWithLineNumber(command, clusterString + "\"<>\"", lineNumber, ">", lineNumber, ">>\"", lineNumber, ">>", lineNumber, "\"", - lineNumber, - "logstash- " - ); - } + expectInvalidIndexNameErrorWithLineNumber(command, "index|pattern", lineNumber); + expectInvalidIndexNameErrorWithLineNumber(command, "index pattern", lineNumber); + expectInvalidIndexNameErrorWithLineNumber(command, "index#pattern", lineNumber); + expectInvalidIndexNameErrorWithLineNumber(command, "index?pattern", lineNumber); + expectInvalidIndexNameErrorWithLineNumber(command, "index>pattern", lineNumber); + expectInvalidIndexNameErrorWithLineNumber(command, "index", lineNumber); + expectInvalidIndexNameErrorWithLineNumber(command, "_", lineNumber); + expectInvalidIndexNameErrorWithLineNumber(command, "index\\pattern", lineNumber, "index\\pattern"); + expectInvalidIndexNameErrorWithLineNumber(command, "\"index\\\\pattern\"", lineNumber, "index\\pattern"); + expectInvalidIndexNameErrorWithLineNumber(command, "\"--indexpattern\"", lineNumber, "-indexpattern"); + expectInvalidIndexNameErrorWithLineNumber(command, "--indexpattern", lineNumber, "-indexpattern"); + expectInvalidIndexNameErrorWithLineNumber(command, "<--logstash-{now/M{yyyy.MM}}>", lineNumber, "-logstash-"); + expectInvalidIndexNameErrorWithLineNumber( + command, + "\"--\"", + lineNumber, + "-" + ); + expectInvalidIndexNameErrorWithLineNumber(command, "", lineNumber, "logstash#"); + expectInvalidIndexNameErrorWithLineNumber(command, "\"\"", lineNumber, "logstash#"); + expectInvalidIndexNameErrorWithLineNumber(command, "<>", lineNumber, ">\"", lineNumber, ">>", lineNumber, ">>\"", lineNumber, "\"", lineNumber, "logstash- "); } - // comma separated indices + // comma separated indices, with exclusions // Invalid index names after removing exclusion fail, when there is no index name with wildcard before it for (String command : commands.keySet()) { if (command.contains("LOOKUP")) { continue; } - for (String clusterString : clusterStrings) { - lineNumber = command.contains("FROM") - ? "line 1:" + (22 + clusterString.length() - 1) + ": " - : "line 1:" + (25 + clusterString.length() - 1) + ": "; - expectInvalidIndexNameErrorWithLineNumber( - command, - clusterString + "indexpattern, --indexpattern", - lineNumber, - "-indexpattern" - ); - expectInvalidIndexNameErrorWithLineNumber( - command, - clusterString + "indexpattern, \"--indexpattern\"", - lineNumber, - "-indexpattern" - ); - expectInvalidIndexNameErrorWithLineNumber( - command, - clusterString + "\"indexpattern, --indexpattern\"", - commands.get(command), - "-indexpattern" - ); - assertEquals( - unresolvedRelation(clusterString.strip() + "indexpattern,-indexpattern"), - statement(command, clusterString + "indexpattern, -indexpattern") - ); - assertEquals( - unresolvedRelation(clusterString.strip() + "indexpattern,-indexpattern"), - statement(command, clusterString + "indexpattern, \"-indexpattern\"") - ); - assertEquals( - unresolvedRelation(clusterString.strip() + "indexpattern, -indexpattern"), - statement(command, clusterString + "\"indexpattern, -indexpattern\"") - ); - } + + lineNumber = command.contains("FROM") ? "line 1:21: " : "line 1:24: "; + expectInvalidIndexNameErrorWithLineNumber(command, "indexpattern, --indexpattern", lineNumber, "-indexpattern"); + expectInvalidIndexNameErrorWithLineNumber(command, "indexpattern, \"--indexpattern\"", lineNumber, "-indexpattern"); + expectInvalidIndexNameErrorWithLineNumber(command, "\"indexpattern, --indexpattern\"", commands.get(command), "-indexpattern"); + clustersAndIndices(command, "indexpattern", "-indexpattern"); } // Invalid index names, except invalid DateMath, are ignored if there is an index name with wildcard before it + String dateMathError = "unit [D] not supported for date math [/D]"; for (String command : commands.keySet()) { if (command.contains("LOOKUP")) { continue; } - for (String clusterString : clusterStrings) { - lineNumber = command.contains("FROM") - ? "line 1:" + (11 + clusterString.length() - 1) + ": " - : "line 1:" + (14 + clusterString.length() - 1) + ": "; - assertEquals( - unresolvedRelation(clusterString.strip() + "*,-index#pattern"), - statement(command, clusterString + "*, \"-index#pattern\"") - ); - assertEquals( - unresolvedRelation(clusterString.strip() + "*,-index#pattern"), - statement(command, clusterString + "*, -index#pattern") - ); - assertEquals( - unresolvedRelation(clusterString.strip() + "*, -index#pattern"), - statement(command, clusterString + "\"*, -index#pattern\"") - ); - assertEquals( - unresolvedRelation(clusterString.strip() + "index*,-index#pattern"), - statement(command, clusterString + "index*, \"-index#pattern\"") - ); - assertEquals( - unresolvedRelation(clusterString.strip() + "index*,-index#pattern"), - statement(command, clusterString + "index*, -index#pattern") - ); - assertEquals( - unresolvedRelation(clusterString.strip() + "index*, -index#pattern"), - statement(command, clusterString + "\"index*, -index#pattern\"") - ); - assertEquals( - unresolvedRelation(clusterString.strip() + "*,-<--logstash-{now/M{yyyy.MM}}>"), - statement(command, clusterString + "*, \"-<--logstash-{now/M{yyyy.MM}}>\"") - ); - assertEquals( - unresolvedRelation(clusterString.strip() + "*,-<--logstash-{now/M{yyyy.MM}}>"), - statement(command, clusterString + "*, -<--logstash-{now/M{yyyy.MM}}>") - ); - assertEquals( - unresolvedRelation(clusterString.strip() + "*, -<--logstash-{now/M{yyyy.MM}}>"), - statement(command, clusterString + "\"*, -<--logstash-{now/M{yyyy.MM}}>\"") - ); - assertEquals( - unresolvedRelation(clusterString.strip() + "index*,-<--logstash#-{now/M{yyyy.MM}}>"), - statement(command, clusterString + "index*, \"-<--logstash#-{now/M{yyyy.MM}}>\"") - ); - assertEquals( - unresolvedRelation(clusterString.strip() + "index*,-<--logstash#-{now/M{yyyy.MM}}>"), - statement(command, clusterString + "index*, -<--logstash#-{now/M{yyyy.MM}}>") - ); - assertEquals( - unresolvedRelation(clusterString.strip() + "index*, -<--logstash#-{now/M{yyyy.MM}}>"), - statement(command, clusterString + "\"index*, -<--logstash#-{now/M{yyyy.MM}}>\"") - ); - expectDateMathErrorWithLineNumber( - command, - clusterString + "*, \"-<-logstash-{now/D}>\"", - lineNumber, - "unit [D] not supported for date math [/D]" - ); - expectDateMathErrorWithLineNumber( - command, - clusterString + "*, -<-logstash-{now/D}>", - lineNumber, - "unit [D] not supported for date math [/D]" - ); - expectDateMathErrorWithLineNumber( - command, - clusterString + "\"*, -<-logstash-{now/D}>\"", - commands.get(command), - "unit [D] not supported for date math [/D]" - ); - } + lineNumber = command.contains("FROM") ? "line 1:10: " : "line 1:13: "; + clustersAndIndices(command, "*", "-index#pattern"); + clustersAndIndices(command, "index*", "-index#pattern"); + clustersAndIndices(command, "*", "-<--logstash-{now/M{yyyy.MM}}>"); + clustersAndIndices(command, "index*", "-<--logstash#-{now/M{yyyy.MM}}>"); + expectDateMathErrorWithLineNumber(command, "*, \"-<-logstash-{now/D}>\"", lineNumber, dateMathError); + expectDateMathErrorWithLineNumber(command, "*, -<-logstash-{now/D}>", lineNumber, dateMathError); + expectDateMathErrorWithLineNumber(command, "\"*, -<-logstash-{now/D}>\"", commands.get(command), dateMathError); } } + private void clustersAndIndices(String command, String indexString1, String indexString2) { + assertEquals(unresolvedRelation(indexString1 + "," + indexString2), statement(command, indexString1 + ", " + indexString2)); + assertEquals( + unresolvedRelation(indexString1 + "," + indexString2), + statement(command, indexString1 + ", \"" + indexString2 + "\"") + ); + assertEquals( + unresolvedRelation(indexString1 + ", " + indexString2), + statement(command, "\"" + indexString1 + ", " + indexString2 + "\"") + ); + } + public void testInvalidQuotingAsFromIndexPattern() { expectError("FROM \"foo", ": token recognition error at: '\"foo'"); expectError("FROM \"foo | LIMIT 1", ": token recognition error at: '\"foo | LIMIT 1'"); From 32ddbb3449d19a0970b96eefe960d4ab006357fc Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Thu, 17 Oct 2024 08:36:50 +0100 Subject: [PATCH 171/449] Squash transport versions into 8.15 (#114827) --- .../ingest/geoip/GeoIpTaskState.java | 3 +- .../search/rank/FieldBasedRerankerIT.java | 2 +- .../MockedRequestActionBasedRerankerIT.java | 2 +- .../elasticsearch/ElasticsearchException.java | 2 +- .../org/elasticsearch/TransportVersions.java | 78 +------------------ .../TransportGetAllocationStatsAction.java | 4 +- .../node/stats/TransportNodesStatsAction.java | 6 +- .../cleanup/CleanupRepositoryRequest.java | 4 +- .../delete/DeleteSnapshotRequest.java | 4 +- .../delete/TransportDeleteSnapshotAction.java | 3 +- .../indices/create/CreateIndexRequest.java | 4 +- .../segments/IndicesSegmentsRequest.java | 4 +- .../admin/indices/stats/CommonStats.java | 3 +- .../action/bulk/BulkShardRequest.java | 4 +- .../ExplainIndexDataStreamLifecycle.java | 4 +- .../GetDataStreamLifecycleAction.java | 4 +- .../action/ingest/SimulateIndexResponse.java | 4 +- .../support/master/MasterNodeRequest.java | 4 +- .../elasticsearch/cluster/ClusterState.java | 4 +- .../action/shard/ShardStateAction.java | 6 +- .../cluster/coordination/JoinStatus.java | 4 +- .../cluster/metadata/DataStream.java | 4 +- .../cluster/metadata/IndexMetadata.java | 24 +++--- .../metadata/MetadataCreateIndexService.java | 2 +- .../index/stats/IndexingPressureStats.java | 4 +- .../org/elasticsearch/ingest/IngestStats.java | 6 +- .../search/builder/PointInTimeBuilder.java | 4 +- .../search/fetch/ShardFetchSearchRequest.java | 4 +- .../rank/feature/RankFeatureShardResult.java | 2 +- .../search/vectors/ExactKnnQueryBuilder.java | 4 +- .../vectors/KnnScoreDocQueryBuilder.java | 4 +- .../search/vectors/KnnSearchBuilder.java | 4 +- .../search/vectors/KnnVectorQueryBuilder.java | 4 +- .../elasticsearch/TransportVersionTests.java | 2 +- .../NodesStatsRequestParametersTests.java | 2 +- .../indices/close/CloseIndexRequestTests.java | 4 +- ...dStartedClusterStateTaskExecutorTests.java | 2 +- .../action/shard/ShardStateActionTests.java | 4 +- .../ClusterSerializationTests.java | 4 +- .../rank/RankFeatureShardPhaseTests.java | 2 +- .../ClusterStateCreationUtils.java | 10 +-- .../action/GetAutoscalingCapacityAction.java | 4 +- .../DataStreamFeatureSetUsage.java | 6 +- .../core/enrich/action/EnrichStatsAction.java | 6 +- .../action/DeleteInferenceEndpointAction.java | 8 +- .../inference/results/RankedDocsResults.java | 6 +- .../core/ml/action/InferModelAction.java | 4 +- .../inference/TrainedModelCacheMetadata.java | 2 +- .../autodetect/state/ModelSizeStats.java | 4 +- .../core/rollup/RollupFeatureSetUsage.java | 6 +- .../settings/GetSecuritySettingsAction.java | 4 +- .../UpdateSecuritySettingsAction.java | 4 +- .../user/GetUserPrivilegesResponse.java | 10 ++- .../core/security/authc/Authentication.java | 2 +- .../core/security/authz/RoleDescriptor.java | 10 ++- .../security/authz/RoleMappingMetadata.java | 2 +- .../permission/RemoteClusterPermissions.java | 4 +- .../support/SecurityMigrationTaskParams.java | 6 +- .../actions/put/GetWatcherSettingsAction.java | 4 +- .../put/UpdateWatcherSettingsAction.java | 4 +- .../results/RankedDocsResultsTests.java | 5 +- ...TrainedModelsStatsActionResponseTests.java | 2 +- .../action/InferModelActionRequestTests.java | 2 +- .../user/GetUserPrivilegesResponseTests.java | 3 +- .../security/authc/AuthenticationTests.java | 3 +- .../security/authz/RoleDescriptorTests.java | 14 ++-- .../RemoteClusterPermissionsTests.java | 6 +- .../xpack/application/rules/QueryRule.java | 4 +- .../application/rules/RuleQueryBuilder.java | 4 +- ...RuleActionResponseBWCSerializingTests.java | 4 +- ...esetActionResponseBWCSerializingTests.java | 2 +- ...yRuleActionRequestBWCSerializingTests.java | 4 +- ...lesetActionRequestBWCSerializingTests.java | 2 +- .../compute/data/AbstractArrayBlock.java | 3 +- ...AbstractPageMappingToIteratorOperator.java | 2 +- .../esql/enrich/ResolvedEnrichPolicy.java | 4 +- .../function/UnsupportedAttribute.java | 6 +- .../xpack/esql/io/stream/PlanStreamInput.java | 4 +- .../esql/io/stream/PlanStreamOutput.java | 4 +- .../xpack/esql/plan/logical/Aggregate.java | 4 +- .../xpack/esql/plan/logical/EsRelation.java | 6 +- .../xpack/esql/plugin/EsqlFeatures.java | 1 - .../xpack/esql/session/Configuration.java | 4 +- .../queries/SemanticQueryBuilder.java | 2 +- .../TextSimilarityRankBuilder.java | 2 +- .../AmazonBedrockSecretSettings.java | 4 +- .../amazonbedrock/AmazonBedrockService.java | 4 +- .../AmazonBedrockServiceSettings.java | 4 +- ...azonBedrockChatCompletionTaskSettings.java | 4 +- .../services/anthropic/AnthropicService.java | 2 +- .../AnthropicChatCompletionTaskSettings.java | 2 +- .../azureaistudio/AzureAiStudioService.java | 2 +- ...AiStudioChatCompletionServiceSettings.java | 2 +- ...zureAiStudioEmbeddingsServiceSettings.java | 2 +- .../azureopenai/AzureOpenAiService.java | 2 +- .../AzureOpenAiCompletionServiceSettings.java | 2 +- .../AzureOpenAiCompletionTaskSettings.java | 2 +- .../AzureOpenAiEmbeddingsServiceSettings.java | 4 +- .../services/cohere/CohereService.java | 2 +- .../cohere/CohereServiceSettings.java | 4 +- .../CohereCompletionServiceSettings.java | 2 +- .../rerank/CohereRerankServiceSettings.java | 4 +- ...dInternalTextEmbeddingServiceSettings.java | 4 +- .../googleaistudio/GoogleAiStudioService.java | 2 +- ...ogleAiStudioCompletionServiceSettings.java | 2 +- ...ogleAiStudioEmbeddingsServiceSettings.java | 2 +- .../GoogleVertexAiSecretSettings.java | 2 +- .../googlevertexai/GoogleVertexAiService.java | 2 +- ...ogleVertexAiEmbeddingsServiceSettings.java | 2 +- .../GoogleVertexAiEmbeddingsTaskSettings.java | 2 +- .../GoogleVertexAiRerankServiceSettings.java | 2 +- .../GoogleVertexAiRerankTaskSettings.java | 2 +- .../huggingface/HuggingFaceService.java | 2 +- .../HuggingFaceServiceSettings.java | 4 +- .../HuggingFaceElserServiceSettings.java | 4 +- .../services/mistral/MistralService.java | 4 +- .../MistralEmbeddingsServiceSettings.java | 4 +- .../services/openai/OpenAiService.java | 2 +- .../OpenAiChatCompletionServiceSettings.java | 4 +- .../OpenAiEmbeddingsServiceSettings.java | 4 +- .../CohereRerankServiceSettingsTests.java | 2 +- .../ml/queries/SparseVectorQueryBuilder.java | 2 +- .../xpack/security/authc/ApiKeyService.java | 2 +- .../authz/store/NativeRolesStore.java | 35 +++++---- .../security/authc/ApiKeyServiceTests.java | 2 +- .../authz/store/NativeRolesStoreTests.java | 5 +- .../RolesBackwardsCompatibilityIT.java | 11 +-- 127 files changed, 254 insertions(+), 345 deletions(-) diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpTaskState.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpTaskState.java index 09ed10568ce8d..47ca79e3cb3b9 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpTaskState.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpTaskState.java @@ -44,8 +44,7 @@ public class GeoIpTaskState implements PersistentTaskState, VersionedNamedWriteable { private static boolean includeSha256(TransportVersion version) { - return version.isPatchFrom(TransportVersions.ENTERPRISE_GEOIP_DOWNLOADER_BACKPORT_8_15) - || version.onOrAfter(TransportVersions.ENTERPRISE_GEOIP_DOWNLOADER); + return version.isPatchFrom(TransportVersions.V_8_15_0) || version.onOrAfter(TransportVersions.ENTERPRISE_GEOIP_DOWNLOADER); } private static final ParseField DATABASES = new ParseField("databases"); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/rank/FieldBasedRerankerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/rank/FieldBasedRerankerIT.java index 027b223e8c4ef..c8c1f50444c1d 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/rank/FieldBasedRerankerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/rank/FieldBasedRerankerIT.java @@ -234,7 +234,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.RANK_FEATURE_PHASE_ADDED; + return TransportVersions.V_8_15_0; } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/rank/MockedRequestActionBasedRerankerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/rank/MockedRequestActionBasedRerankerIT.java index ce9ce7b9cf4cf..dbdd6e8c50027 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/rank/MockedRequestActionBasedRerankerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/rank/MockedRequestActionBasedRerankerIT.java @@ -424,7 +424,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.RANK_FEATURE_PHASE_ADDED; + return TransportVersions.V_8_15_0; } } diff --git a/server/src/main/java/org/elasticsearch/ElasticsearchException.java b/server/src/main/java/org/elasticsearch/ElasticsearchException.java index 4119e12d45f6c..32198ba7584be 100644 --- a/server/src/main/java/org/elasticsearch/ElasticsearchException.java +++ b/server/src/main/java/org/elasticsearch/ElasticsearchException.java @@ -1917,7 +1917,7 @@ private enum ElasticsearchExceptionHandle { ResourceAlreadyUploadedException.class, ResourceAlreadyUploadedException::new, 181, - TransportVersions.ADD_RESOURCE_ALREADY_UPLOADED_EXCEPTION + TransportVersions.V_8_15_0 ), INGEST_PIPELINE_EXCEPTION( org.elasticsearch.ingest.IngestPipelineException.class, diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index d1d423dcc5405..dcf6f7aebdc65 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -102,80 +102,8 @@ static TransportVersion def(int id) { public static final TransportVersion V_8_13_0 = def(8_595_00_0); public static final TransportVersion V_8_13_4 = def(8_595_00_1); public static final TransportVersion V_8_14_0 = def(8_636_00_1); - // 8.15.0+ - public static final TransportVersion WATERMARK_THRESHOLDS_STATS = def(8_637_00_0); - public static final TransportVersion ENRICH_CACHE_ADDITIONAL_STATS = def(8_638_00_0); - public static final TransportVersion ML_INFERENCE_RATE_LIMIT_SETTINGS_ADDED = def(8_639_00_0); - public static final TransportVersion ML_TRAINED_MODEL_CACHE_METADATA_ADDED = def(8_640_00_0); - public static final TransportVersion TOP_LEVEL_KNN_SUPPORT_QUERY_NAME = def(8_641_00_0); - public static final TransportVersion INDEX_SEGMENTS_VECTOR_FORMATS = def(8_642_00_0); - public static final TransportVersion ADD_RESOURCE_ALREADY_UPLOADED_EXCEPTION = def(8_643_00_0); - public static final TransportVersion ESQL_MV_ORDERING_SORTED_ASCENDING = def(8_644_00_0); - public static final TransportVersion ESQL_PAGE_MAPPING_TO_ITERATOR = def(8_645_00_0); - public static final TransportVersion BINARY_PIT_ID = def(8_646_00_0); - public static final TransportVersion SECURITY_ROLE_MAPPINGS_IN_CLUSTER_STATE = def(8_647_00_0); - public static final TransportVersion ESQL_REQUEST_TABLES = def(8_648_00_0); - public static final TransportVersion ROLE_REMOTE_CLUSTER_PRIVS = def(8_649_00_0); - public static final TransportVersion NO_GLOBAL_RETENTION_FOR_SYSTEM_DATA_STREAMS = def(8_650_00_0); - public static final TransportVersion SHUTDOWN_REQUEST_TIMEOUTS_FIX = def(8_651_00_0); - public static final TransportVersion INDEXING_PRESSURE_REQUEST_REJECTIONS_COUNT = def(8_652_00_0); - public static final TransportVersion ROLLUP_USAGE = def(8_653_00_0); - public static final TransportVersion SECURITY_ROLE_DESCRIPTION = def(8_654_00_0); - public static final TransportVersion ML_INFERENCE_AZURE_OPENAI_COMPLETIONS = def(8_655_00_0); - public static final TransportVersion JOIN_STATUS_AGE_SERIALIZATION = def(8_656_00_0); - public static final TransportVersion ML_RERANK_DOC_OPTIONAL = def(8_657_00_0); - public static final TransportVersion FAILURE_STORE_FIELD_PARITY = def(8_658_00_0); - public static final TransportVersion ML_INFERENCE_AZURE_AI_STUDIO = def(8_659_00_0); - public static final TransportVersion ML_INFERENCE_COHERE_COMPLETION_ADDED = def(8_660_00_0); - public static final TransportVersion ESQL_REMOVE_ES_SOURCE_OPTIONS = def(8_661_00_0); - public static final TransportVersion NODE_STATS_INGEST_BYTES = def(8_662_00_0); - public static final TransportVersion SEMANTIC_QUERY = def(8_663_00_0); - public static final TransportVersion GET_AUTOSCALING_CAPACITY_UNUSED_TIMEOUT = def(8_664_00_0); - public static final TransportVersion SIMULATE_VALIDATES_MAPPINGS = def(8_665_00_0); - public static final TransportVersion RULE_QUERY_RENAME = def(8_666_00_0); - public static final TransportVersion SPARSE_VECTOR_QUERY_ADDED = def(8_667_00_0); - public static final TransportVersion ESQL_ADD_INDEX_MODE_TO_SOURCE = def(8_668_00_0); - public static final TransportVersion GET_SHUTDOWN_STATUS_TIMEOUT = def(8_669_00_0); - public static final TransportVersion FAILURE_STORE_TELEMETRY = def(8_670_00_0); - public static final TransportVersion ADD_METADATA_FLATTENED_TO_ROLES = def(8_671_00_0); - public static final TransportVersion ML_INFERENCE_GOOGLE_AI_STUDIO_COMPLETION_ADDED = def(8_672_00_0); - public static final TransportVersion WATCHER_REQUEST_TIMEOUTS = def(8_673_00_0); - public static final TransportVersion ML_INFERENCE_ENHANCE_DELETE_ENDPOINT = def(8_674_00_0); - public static final TransportVersion ML_INFERENCE_GOOGLE_AI_STUDIO_EMBEDDINGS_ADDED = def(8_675_00_0); - public static final TransportVersion ADD_MISTRAL_EMBEDDINGS_INFERENCE = def(8_676_00_0); - public static final TransportVersion ML_CHUNK_INFERENCE_OPTION = def(8_677_00_0); - public static final TransportVersion RANK_FEATURE_PHASE_ADDED = def(8_678_00_0); - public static final TransportVersion RANK_DOC_IN_SHARD_FETCH_REQUEST = def(8_679_00_0); - public static final TransportVersion SECURITY_SETTINGS_REQUEST_TIMEOUTS = def(8_680_00_0); - public static final TransportVersion QUERY_RULE_CRUD_API_PUT = def(8_681_00_0); - public static final TransportVersion DROP_UNUSED_NODES_REQUESTS = def(8_682_00_0); - public static final TransportVersion QUERY_RULE_CRUD_API_GET_DELETE = def(8_683_00_0); - public static final TransportVersion MORE_LIGHTER_NODES_REQUESTS = def(8_684_00_0); - public static final TransportVersion DROP_UNUSED_NODES_IDS = def(8_685_00_0); - public static final TransportVersion DELETE_SNAPSHOTS_ASYNC_ADDED = def(8_686_00_0); - public static final TransportVersion VERSION_SUPPORTING_SPARSE_VECTOR_STATS = def(8_687_00_0); - public static final TransportVersion ML_AD_OUTPUT_MEMORY_ALLOCATOR_FIELD = def(8_688_00_0); - public static final TransportVersion FAILURE_STORE_LAZY_CREATION = def(8_689_00_0); - public static final TransportVersion SNAPSHOT_REQUEST_TIMEOUTS = def(8_690_00_0); - public static final TransportVersion INDEX_METADATA_MAPPINGS_UPDATED_VERSION = def(8_691_00_0); - public static final TransportVersion ML_INFERENCE_ELAND_SETTINGS_ADDED = def(8_692_00_0); - public static final TransportVersion ML_ANTHROPIC_INTEGRATION_ADDED = def(8_693_00_0); - public static final TransportVersion ML_INFERENCE_GOOGLE_VERTEX_AI_EMBEDDINGS_ADDED = def(8_694_00_0); - public static final TransportVersion EVENT_INGESTED_RANGE_IN_CLUSTER_STATE = def(8_695_00_0); - public static final TransportVersion ESQL_ADD_AGGREGATE_TYPE = def(8_696_00_0); - public static final TransportVersion SECURITY_MIGRATIONS_MIGRATION_NEEDED_ADDED = def(8_697_00_0); - public static final TransportVersion K_FOR_KNN_QUERY_ADDED = def(8_698_00_0); - public static final TransportVersion TEXT_SIMILARITY_RERANKER_RETRIEVER = def(8_699_00_0); - public static final TransportVersion ML_INFERENCE_GOOGLE_VERTEX_AI_RERANKING_ADDED = def(8_700_00_0); - public static final TransportVersion VERSIONED_MASTER_NODE_REQUESTS = def(8_701_00_0); - public static final TransportVersion ML_INFERENCE_AMAZON_BEDROCK_ADDED = def(8_702_00_0); - public static final TransportVersion ENTERPRISE_GEOIP_DOWNLOADER_BACKPORT_8_15 = def(8_702_00_1); - public static final TransportVersion FIX_VECTOR_SIMILARITY_INNER_HITS_BACKPORT_8_15 = def(8_702_00_2); - /** - * we made a single backport for ESQL_ES_FIELD_CACHED_SERIALIZATION and ESQL_ATTRIBUTE_CACHED_SERIALIZATION - * with only one TransportVersion entry - */ - public static final TransportVersion ESQL_ATTRIBUTE_CACHED_SERIALIZATION_8_15 = def(8_702_00_3); + public static final TransportVersion V_8_15_0 = def(8_702_00_2); + public static final TransportVersion V_8_15_2 = def(8_702_00_3); public static final TransportVersion ML_INFERENCE_DONT_DELETE_WHEN_SEMANTIC_TEXT_EXISTS = def(8_703_00_0); public static final TransportVersion INFERENCE_ADAPTIVE_ALLOCATIONS = def(8_704_00_0); public static final TransportVersion INDEX_REQUEST_UPDATE_BY_SCRIPT_ORIGIN = def(8_705_00_0); @@ -313,7 +241,7 @@ static TransportVersion def(int id) { * Reference to the minimum transport version that can be used with CCS. * This should be the transport version used by the previous minor release. */ - public static final TransportVersion MINIMUM_CCS_VERSION = FIX_VECTOR_SIMILARITY_INNER_HITS_BACKPORT_8_15; + public static final TransportVersion MINIMUM_CCS_VERSION = V_8_15_0; static final NavigableMap VERSION_IDS = getAllVersionIds(TransportVersions.class); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetAllocationStatsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetAllocationStatsAction.java index 259a244bff919..e14f229f17acf 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetAllocationStatsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetAllocationStatsAction.java @@ -156,7 +156,7 @@ public Response(Map nodeAllocationStats, DiskThresh public Response(StreamInput in) throws IOException { super(in); this.nodeAllocationStats = in.readImmutableMap(StreamInput::readString, NodeAllocationStats::new); - if (in.getTransportVersion().onOrAfter(TransportVersions.WATERMARK_THRESHOLDS_STATS)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { this.diskThresholdSettings = in.readOptionalWriteable(DiskThresholdSettings::readFrom); } else { this.diskThresholdSettings = null; @@ -166,7 +166,7 @@ public Response(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { out.writeMap(nodeAllocationStats, StreamOutput::writeString, StreamOutput::writeWriteable); - if (out.getTransportVersion().onOrAfter(TransportVersions.WATERMARK_THRESHOLDS_STATS)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { out.writeOptionalWriteable(diskThresholdSettings); } else { assert diskThresholdSettings == null; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/TransportNodesStatsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/TransportNodesStatsAction.java index a4fda469da3a2..83769430d4142 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/TransportNodesStatsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/TransportNodesStatsAction.java @@ -186,8 +186,7 @@ public static class NodeStatsRequest extends TransportRequest { public NodeStatsRequest(StreamInput in) throws IOException { super(in); this.nodesStatsRequestParameters = new NodesStatsRequestParameters(in); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0) - && in.getTransportVersion().before(TransportVersions.DROP_UNUSED_NODES_IDS)) { + if (in.getTransportVersion().between(TransportVersions.V_8_13_0, TransportVersions.V_8_15_0)) { in.readStringArray(); // formerly nodeIds, now unused } } @@ -214,8 +213,7 @@ public String getDescription() { public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); nodesStatsRequestParameters.writeTo(out); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0) - && out.getTransportVersion().before(TransportVersions.DROP_UNUSED_NODES_IDS)) { + if (out.getTransportVersion().between(TransportVersions.V_8_13_0, TransportVersions.V_8_15_0)) { out.writeStringArray(Strings.EMPTY_ARRAY); // formerly nodeIds, now unused } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/cleanup/CleanupRepositoryRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/cleanup/CleanupRepositoryRequest.java index 39fcb9fd53ac6..cdcf4bdad7b1a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/cleanup/CleanupRepositoryRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/cleanup/CleanupRepositoryRequest.java @@ -29,7 +29,7 @@ public CleanupRepositoryRequest(TimeValue masterNodeTimeout, TimeValue ackTimeou } public static CleanupRepositoryRequest readFrom(StreamInput in) throws IOException { - if (in.getTransportVersion().onOrAfter(TransportVersions.SNAPSHOT_REQUEST_TIMEOUTS)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { return new CleanupRepositoryRequest(in); } else { return new CleanupRepositoryRequest(TimeValue.THIRTY_SECONDS, TimeValue.THIRTY_SECONDS, in); @@ -48,7 +48,7 @@ public CleanupRepositoryRequest(TimeValue masterNodeTimeout, TimeValue ackTimeou @Override public void writeTo(StreamOutput out) throws IOException { - if (out.getTransportVersion().onOrAfter(TransportVersions.SNAPSHOT_REQUEST_TIMEOUTS)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { super.writeTo(out); } out.writeString(repository); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotRequest.java index 7d3c31a011acc..ab073f83e14da 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotRequest.java @@ -51,7 +51,7 @@ public DeleteSnapshotRequest(StreamInput in) throws IOException { super(in); repository = in.readString(); snapshots = in.readStringArray(); - if (in.getTransportVersion().onOrAfter(TransportVersions.DELETE_SNAPSHOTS_ASYNC_ADDED)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { waitForCompletion = in.readBoolean(); } } @@ -61,7 +61,7 @@ public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeString(repository); out.writeStringArray(snapshots); - if (out.getTransportVersion().onOrAfter(TransportVersions.DELETE_SNAPSHOTS_ASYNC_ADDED)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { out.writeBoolean(waitForCompletion); } else { assert waitForCompletion : "Using wait_for_completion parameter when it should have been disallowed"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/TransportDeleteSnapshotAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/TransportDeleteSnapshotAction.java index b28c2a8e570ea..1dfd83aee0407 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/TransportDeleteSnapshotAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/TransportDeleteSnapshotAction.java @@ -64,8 +64,7 @@ protected ClusterBlockException checkBlock(DeleteSnapshotRequest request, Cluste @Override protected void doExecute(Task task, DeleteSnapshotRequest request, ActionListener listener) { - if (clusterService.state().getMinTransportVersion().before(TransportVersions.DELETE_SNAPSHOTS_ASYNC_ADDED) - && request.waitForCompletion() == false) { + if (clusterService.state().getMinTransportVersion().before(TransportVersions.V_8_15_0) && request.waitForCompletion() == false) { throw new UnsupportedOperationException("wait_for_completion parameter is not supported by all nodes in this cluster"); } super.doExecute(task, request, listener); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java index f4ac1a3fe907b..c233ed57b748e 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java @@ -113,7 +113,7 @@ public CreateIndexRequest(StreamInput in) throws IOException { } else { requireDataStream = false; } - if (in.getTransportVersion().onOrAfter(TransportVersions.FAILURE_STORE_LAZY_CREATION)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { initializeFailureStore = in.readBoolean(); } else { initializeFailureStore = true; @@ -518,7 +518,7 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeBoolean(this.requireDataStream); } - if (out.getTransportVersion().onOrAfter(TransportVersions.FAILURE_STORE_LAZY_CREATION)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { out.writeBoolean(this.initializeFailureStore); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentsRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentsRequest.java index 26447910cecfc..bdacfb0ab642d 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentsRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentsRequest.java @@ -34,7 +34,7 @@ public IndicesSegmentsRequest(StreamInput in) throws IOException { if (in.getTransportVersion().before(TransportVersions.V_8_0_0)) { in.readBoolean(); // old 'verbose' option, since removed } - if (in.getTransportVersion().onOrAfter(TransportVersions.INDEX_SEGMENTS_VECTOR_FORMATS)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { this.includeVectorFormatsInfo = in.readBoolean(); } } @@ -59,7 +59,7 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().before(TransportVersions.V_8_0_0)) { out.writeBoolean(false); } - if (out.getTransportVersion().onOrAfter(TransportVersions.INDEX_SEGMENTS_VECTOR_FORMATS)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { out.writeBoolean(includeVectorFormatsInfo); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/CommonStats.java b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/CommonStats.java index ddfe89cce01bf..1351674fffbfa 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/CommonStats.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/CommonStats.java @@ -47,12 +47,11 @@ import java.io.IOException; import java.util.Objects; -import static org.elasticsearch.TransportVersions.VERSION_SUPPORTING_SPARSE_VECTOR_STATS; - public class CommonStats implements Writeable, ToXContentFragment { private static final TransportVersion VERSION_SUPPORTING_NODE_MAPPINGS = TransportVersions.V_8_5_0; private static final TransportVersion VERSION_SUPPORTING_DENSE_VECTOR_STATS = TransportVersions.V_8_10_X; + private static final TransportVersion VERSION_SUPPORTING_SPARSE_VECTOR_STATS = TransportVersions.V_8_15_0; @Nullable public DocsStats docs; diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkShardRequest.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkShardRequest.java index 8d8f60cc3ba7b..406ba9723b427 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkShardRequest.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkShardRequest.java @@ -43,7 +43,7 @@ public final class BulkShardRequest extends ReplicatedWriteRequest i.readOptionalWriteable(inpt -> new BulkItemRequest(shardId, inpt)), BulkItemRequest[]::new); - if (in.getTransportVersion().onOrAfter(TransportVersions.SIMULATE_VALIDATES_MAPPINGS)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { isSimulated = in.readBoolean(); } else { isSimulated = false; @@ -132,7 +132,7 @@ public void writeTo(StreamOutput out) throws IOException { } super.writeTo(out); out.writeArray((o, item) -> o.writeOptional(BulkItemRequest.THIN_WRITER, item), items); - if (out.getTransportVersion().onOrAfter(TransportVersions.SIMULATE_VALIDATES_MAPPINGS)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { out.writeBoolean(isSimulated); } } diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/ExplainIndexDataStreamLifecycle.java b/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/ExplainIndexDataStreamLifecycle.java index 94c294435acd3..f9bd135968246 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/ExplainIndexDataStreamLifecycle.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/ExplainIndexDataStreamLifecycle.java @@ -82,7 +82,7 @@ public ExplainIndexDataStreamLifecycle( public ExplainIndexDataStreamLifecycle(StreamInput in) throws IOException { this.index = in.readString(); this.managedByLifecycle = in.readBoolean(); - if (in.getTransportVersion().onOrAfter(TransportVersions.NO_GLOBAL_RETENTION_FOR_SYSTEM_DATA_STREAMS)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { this.isInternalDataStream = in.readBoolean(); } else { this.isInternalDataStream = false; @@ -165,7 +165,7 @@ public XContentBuilder toXContent( public void writeTo(StreamOutput out) throws IOException { out.writeString(index); out.writeBoolean(managedByLifecycle); - if (out.getTransportVersion().onOrAfter(TransportVersions.NO_GLOBAL_RETENTION_FOR_SYSTEM_DATA_STREAMS)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { out.writeBoolean(isInternalDataStream); } if (managedByLifecycle) { diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/GetDataStreamLifecycleAction.java b/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/GetDataStreamLifecycleAction.java index bd628c88a1b1e..a43d29501a7ee 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/GetDataStreamLifecycleAction.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/GetDataStreamLifecycleAction.java @@ -173,7 +173,7 @@ public record DataStreamLifecycle( this( in.readString(), in.readOptionalWriteable(org.elasticsearch.cluster.metadata.DataStreamLifecycle::new), - in.getTransportVersion().onOrAfter(TransportVersions.NO_GLOBAL_RETENTION_FOR_SYSTEM_DATA_STREAMS) && in.readBoolean() + in.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0) && in.readBoolean() ); } @@ -181,7 +181,7 @@ public record DataStreamLifecycle( public void writeTo(StreamOutput out) throws IOException { out.writeString(dataStreamName); out.writeOptionalWriteable(lifecycle); - if (out.getTransportVersion().onOrAfter(TransportVersions.NO_GLOBAL_RETENTION_FOR_SYSTEM_DATA_STREAMS)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { out.writeBoolean(isInternalDataStream); } } diff --git a/server/src/main/java/org/elasticsearch/action/ingest/SimulateIndexResponse.java b/server/src/main/java/org/elasticsearch/action/ingest/SimulateIndexResponse.java index 1b930d5e8db3f..9d883cb075ede 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/SimulateIndexResponse.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/SimulateIndexResponse.java @@ -42,7 +42,7 @@ public SimulateIndexResponse(StreamInput in) throws IOException { this.source = in.readBytesReference(); this.sourceXContentType = XContentType.valueOf(in.readString()); setShardInfo(ShardInfo.EMPTY); - if (in.getTransportVersion().onOrAfter(TransportVersions.SIMULATE_VALIDATES_MAPPINGS)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { this.exception = in.readException(); } else { this.exception = null; @@ -102,7 +102,7 @@ public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeBytesReference(source); out.writeString(sourceXContentType.name()); - if (out.getTransportVersion().onOrAfter(TransportVersions.SIMULATE_VALIDATES_MAPPINGS)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { out.writeException(exception); } } diff --git a/server/src/main/java/org/elasticsearch/action/support/master/MasterNodeRequest.java b/server/src/main/java/org/elasticsearch/action/support/master/MasterNodeRequest.java index 2566e109b70e4..8d29025c1f0be 100644 --- a/server/src/main/java/org/elasticsearch/action/support/master/MasterNodeRequest.java +++ b/server/src/main/java/org/elasticsearch/action/support/master/MasterNodeRequest.java @@ -78,7 +78,7 @@ protected MasterNodeRequest(TimeValue masterNodeTimeout) { protected MasterNodeRequest(StreamInput in) throws IOException { super(in); masterNodeTimeout = in.readTimeValue(); - if (in.getTransportVersion().onOrAfter(TransportVersions.VERSIONED_MASTER_NODE_REQUESTS)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { masterTerm = in.readVLong(); } else { masterTerm = 0L; @@ -92,7 +92,7 @@ public void writeTo(StreamOutput out) throws IOException { assert masterTerm <= newMasterTerm : masterTerm + " vs " + newMasterTerm; super.writeTo(out); out.writeTimeValue(masterNodeTimeout); - if (out.getTransportVersion().onOrAfter(TransportVersions.VERSIONED_MASTER_NODE_REQUESTS)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { out.writeVLong(newMasterTerm); } // else no protection against routing loops in older versions } diff --git a/server/src/main/java/org/elasticsearch/cluster/ClusterState.java b/server/src/main/java/org/elasticsearch/cluster/ClusterState.java index 64df6e77326e4..f7cad013554c6 100644 --- a/server/src/main/java/org/elasticsearch/cluster/ClusterState.java +++ b/server/src/main/java/org/elasticsearch/cluster/ClusterState.java @@ -241,9 +241,7 @@ public ClusterState( } private boolean assertEventIngestedIsUnknownInMixedClusters(Metadata metadata, CompatibilityVersions compatibilityVersions) { - if (compatibilityVersions.transportVersion().before(TransportVersions.EVENT_INGESTED_RANGE_IN_CLUSTER_STATE) - && metadata != null - && metadata.indices() != null) { + if (compatibilityVersions.transportVersion().before(TransportVersions.V_8_15_0) && metadata != null && metadata.indices() != null) { for (IndexMetadata indexMetadata : metadata.indices().values()) { assert indexMetadata.getEventIngestedRange() == IndexLongFieldRange.UNKNOWN : "event.ingested range should be UNKNOWN but is " diff --git a/server/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java b/server/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java index 4b84854315a82..bd438d66549aa 100644 --- a/server/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java +++ b/server/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java @@ -722,7 +722,7 @@ public ClusterState execute(BatchExecutionContext batchE */ IndexLongFieldRange newEventIngestedMillisRange = IndexLongFieldRange.UNKNOWN; TransportVersion minTransportVersion = batchExecutionContext.initialState().getMinTransportVersion(); - if (minTransportVersion.onOrAfter(TransportVersions.EVENT_INGESTED_RANGE_IN_CLUSTER_STATE)) { + if (minTransportVersion.onOrAfter(TransportVersions.V_8_15_0)) { newEventIngestedMillisRange = currentEventIngestedMillisRange.extendWithShardRange( startedShardEntry.shardId.id(), indexMetadata.getNumberOfShards(), @@ -827,7 +827,7 @@ public static class StartedShardEntry extends TransportRequest { primaryTerm = in.readVLong(); this.message = in.readString(); this.timestampRange = ShardLongFieldRange.readFrom(in); - if (in.getTransportVersion().onOrAfter(TransportVersions.EVENT_INGESTED_RANGE_IN_CLUSTER_STATE)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { this.eventIngestedRange = ShardLongFieldRange.readFrom(in); } else { this.eventIngestedRange = ShardLongFieldRange.UNKNOWN; @@ -858,7 +858,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeVLong(primaryTerm); out.writeString(message); timestampRange.writeTo(out); - if (out.getTransportVersion().onOrAfter(TransportVersions.EVENT_INGESTED_RANGE_IN_CLUSTER_STATE)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { eventIngestedRange.writeTo(out); } } diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinStatus.java b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinStatus.java index 5182e01899c6c..89083848d488d 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinStatus.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinStatus.java @@ -25,7 +25,7 @@ public JoinStatus(StreamInput in) throws IOException { new DiscoveryNode(in), in.readLong(), in.readString(), - in.getTransportVersion().onOrAfter(TransportVersions.JOIN_STATUS_AGE_SERIALIZATION) + in.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0) ? in.readTimeValue() : new TimeValue(in.readLong(), TimeUnit.valueOf(in.readString())) ); @@ -36,7 +36,7 @@ public void writeTo(StreamOutput out) throws IOException { remoteNode.writeTo(out); out.writeLong(term); out.writeString(message); - if (out.getTransportVersion().onOrAfter(TransportVersions.JOIN_STATUS_AGE_SERIALIZATION)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { out.writeTimeValue(age); } else { out.writeLong(age.duration()); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java index dd4a52fd9beda..bedf65e1a9c8b 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java @@ -211,7 +211,7 @@ public static DataStream read(StreamInput in) throws IOException { if (in.getTransportVersion().onOrAfter(DataStream.ADDED_AUTO_SHARDING_EVENT_VERSION)) { backingIndicesBuilder.setAutoShardingEvent(in.readOptionalWriteable(DataStreamAutoShardingEvent::new)); } - if (in.getTransportVersion().onOrAfter(TransportVersions.FAILURE_STORE_FIELD_PARITY)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { failureIndicesBuilder.setRolloverOnWrite(in.readBoolean()) .setAutoShardingEvent(in.readOptionalWriteable(DataStreamAutoShardingEvent::new)); } @@ -1089,7 +1089,7 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(DataStream.ADDED_AUTO_SHARDING_EVENT_VERSION)) { out.writeOptionalWriteable(backingIndices.autoShardingEvent); } - if (out.getTransportVersion().onOrAfter(TransportVersions.FAILURE_STORE_FIELD_PARITY)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { out.writeBoolean(failureIndices.rolloverOnWrite); out.writeOptionalWriteable(failureIndices.autoShardingEvent); } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java index 23e8e49aa16db..6456240c2317e 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java @@ -41,6 +41,7 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParserUtils; import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.gateway.MetadataStateFormat; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexMode; @@ -945,9 +946,9 @@ public IndexMetadata withTimestampRanges( if (timestampRange.equals(this.timestampRange) && eventIngestedRange.equals(this.eventIngestedRange)) { return this; } + @UpdateForV9(owner = UpdateForV9.Owner.SEARCH_FOUNDATIONS) // remove this check when 8.15 is no longer communicable IndexLongFieldRange allowedEventIngestedRange = eventIngestedRange; - // remove this check when the EVENT_INGESTED_RANGE_IN_CLUSTER_STATE version is removed - if (minClusterTransportVersion.before(TransportVersions.EVENT_INGESTED_RANGE_IN_CLUSTER_STATE)) { + if (minClusterTransportVersion.before(TransportVersions.V_8_15_0)) { allowedEventIngestedRange = IndexLongFieldRange.UNKNOWN; } return new IndexMetadata( @@ -1644,7 +1645,7 @@ private static class IndexMetadataDiff implements Diff { DiffableUtils.getStringKeySerializer(), ROLLOVER_INFO_DIFF_VALUE_READER ); - if (in.getTransportVersion().onOrAfter(TransportVersions.INDEX_METADATA_MAPPINGS_UPDATED_VERSION)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { mappingsUpdatedVersion = IndexVersion.readVersion(in); } else { mappingsUpdatedVersion = IndexVersions.ZERO; @@ -1664,7 +1665,7 @@ private static class IndexMetadataDiff implements Diff { indexWriteLoadForecast = null; shardSizeInBytesForecast = null; } - if (in.getTransportVersion().onOrAfter(TransportVersions.EVENT_INGESTED_RANGE_IN_CLUSTER_STATE)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { eventIngestedRange = IndexLongFieldRange.readFrom(in); } else { eventIngestedRange = IndexLongFieldRange.UNKNOWN; @@ -1698,7 +1699,7 @@ public void writeTo(StreamOutput out) throws IOException { customData.writeTo(out); inSyncAllocationIds.writeTo(out); rolloverInfos.writeTo(out); - if (out.getTransportVersion().onOrAfter(TransportVersions.INDEX_METADATA_MAPPINGS_UPDATED_VERSION)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { IndexVersion.writeVersion(mappingsUpdatedVersion, out); } if (out.getTransportVersion().onOrAfter(SYSTEM_INDEX_FLAG_ADDED)) { @@ -1710,7 +1711,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalDouble(indexWriteLoadForecast); out.writeOptionalLong(shardSizeInBytesForecast); } - if (out.getTransportVersion().onOrAfter(TransportVersions.EVENT_INGESTED_RANGE_IN_CLUSTER_STATE)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { eventIngestedRange.writeTo(out); } else { assert eventIngestedRange == IndexLongFieldRange.UNKNOWN @@ -1809,7 +1810,7 @@ public static IndexMetadata readFrom(StreamInput in, @Nullable Function DiffableUtils.StringSetValueSerializer.getInstance().write(v, o) ); out.writeCollection(rolloverInfos.values()); - if (out.getTransportVersion().onOrAfter(TransportVersions.INDEX_METADATA_MAPPINGS_UPDATED_VERSION)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { IndexVersion.writeVersion(mappingsUpdatedVersion, out); } if (out.getTransportVersion().onOrAfter(SYSTEM_INDEX_FLAG_ADDED)) { @@ -1879,7 +1880,7 @@ public void writeTo(StreamOutput out, boolean mappingsAsHash) throws IOException out.writeOptionalDouble(writeLoadForecast); out.writeOptionalLong(shardSizeInBytesForecast); } - if (out.getTransportVersion().onOrAfter(TransportVersions.EVENT_INGESTED_RANGE_IN_CLUSTER_STATE)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { eventIngestedRange.writeTo(out); } else { assert eventIngestedRange == IndexLongFieldRange.UNKNOWN @@ -2205,8 +2206,7 @@ public Builder eventIngestedRange(IndexLongFieldRange eventIngestedRange, Transp + minClusterTransportVersion + "; eventIngestedRange = " + eventIngestedRange; - if (minClusterTransportVersion != null - && minClusterTransportVersion.before(TransportVersions.EVENT_INGESTED_RANGE_IN_CLUSTER_STATE)) { + if (minClusterTransportVersion != null && minClusterTransportVersion.before(TransportVersions.V_8_15_0)) { this.eventIngestedRange = IndexLongFieldRange.UNKNOWN; } else { this.eventIngestedRange = eventIngestedRange; diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java index 29720e98a6e7b..321719475c1f8 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java @@ -1306,7 +1306,7 @@ static IndexMetadata buildIndexMetadata( ) { IndexMetadata.Builder indexMetadataBuilder = createIndexMetadataBuilder(indexName, sourceMetadata, indexSettings, routingNumShards); indexMetadataBuilder.system(isSystem); - if (minClusterTransportVersion.before(TransportVersions.EVENT_INGESTED_RANGE_IN_CLUSTER_STATE)) { + if (minClusterTransportVersion.before(TransportVersions.V_8_15_0)) { // promote to UNKNOWN for older versions since they don't know how to handle event.ingested in cluster state indexMetadataBuilder.eventIngestedRange(IndexLongFieldRange.UNKNOWN, minClusterTransportVersion); } diff --git a/server/src/main/java/org/elasticsearch/index/stats/IndexingPressureStats.java b/server/src/main/java/org/elasticsearch/index/stats/IndexingPressureStats.java index c755862d9e828..b5197274dd519 100644 --- a/server/src/main/java/org/elasticsearch/index/stats/IndexingPressureStats.java +++ b/server/src/main/java/org/elasticsearch/index/stats/IndexingPressureStats.java @@ -80,7 +80,7 @@ public IndexingPressureStats(StreamInput in) throws IOException { primaryDocumentRejections = -1L; } - if (in.getTransportVersion().onOrAfter(TransportVersions.INDEXING_PRESSURE_REQUEST_REJECTIONS_COUNT)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { totalCoordinatingRequests = in.readVLong(); } else { totalCoordinatingRequests = -1L; @@ -157,7 +157,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeVLong(primaryDocumentRejections); } - if (out.getTransportVersion().onOrAfter(TransportVersions.INDEXING_PRESSURE_REQUEST_REJECTIONS_COUNT)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { out.writeVLong(totalCoordinatingRequests); } } diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestStats.java b/server/src/main/java/org/elasticsearch/ingest/IngestStats.java index a7f0a16b9025d..39df4e53423a2 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestStats.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestStats.java @@ -65,9 +65,7 @@ public static IngestStats read(StreamInput in) throws IOException { for (var i = 0; i < size; i++) { var pipelineId = in.readString(); var pipelineStat = new Stats(in); - var byteStat = in.getTransportVersion().onOrAfter(TransportVersions.NODE_STATS_INGEST_BYTES) - ? new ByteStats(in) - : new ByteStats(0, 0); + var byteStat = in.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0) ? new ByteStats(in) : new ByteStats(0, 0); pipelineStats.add(new PipelineStat(pipelineId, pipelineStat, byteStat)); int processorsSize = in.readVInt(); var processorStatsPerPipeline = new ArrayList(processorsSize); @@ -90,7 +88,7 @@ public void writeTo(StreamOutput out) throws IOException { for (PipelineStat pipelineStat : pipelineStats) { out.writeString(pipelineStat.pipelineId()); pipelineStat.stats().writeTo(out); - if (out.getTransportVersion().onOrAfter(TransportVersions.NODE_STATS_INGEST_BYTES)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { pipelineStat.byteStats().writeTo(out); } List processorStatsForPipeline = processorStats.get(pipelineStat.pipelineId()); diff --git a/server/src/main/java/org/elasticsearch/search/builder/PointInTimeBuilder.java b/server/src/main/java/org/elasticsearch/search/builder/PointInTimeBuilder.java index 0d11ce15b6878..eecc840e8602c 100644 --- a/server/src/main/java/org/elasticsearch/search/builder/PointInTimeBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/builder/PointInTimeBuilder.java @@ -63,7 +63,7 @@ public PointInTimeBuilder(BytesReference pitID) { } public PointInTimeBuilder(StreamInput in) throws IOException { - if (in.getTransportVersion().onOrAfter(TransportVersions.BINARY_PIT_ID)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { encodedId = in.readBytesReference(); } else { encodedId = new BytesArray(Base64.getUrlDecoder().decode(in.readString())); @@ -73,7 +73,7 @@ public PointInTimeBuilder(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { - if (out.getTransportVersion().onOrAfter(TransportVersions.BINARY_PIT_ID)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { out.writeBytesReference(encodedId); } else { out.writeString(Base64.getUrlEncoder().encodeToString(BytesReference.toBytes(encodedId))); diff --git a/server/src/main/java/org/elasticsearch/search/fetch/ShardFetchSearchRequest.java b/server/src/main/java/org/elasticsearch/search/fetch/ShardFetchSearchRequest.java index efde60f4a0d29..534c764c07b7b 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/ShardFetchSearchRequest.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/ShardFetchSearchRequest.java @@ -61,7 +61,7 @@ public ShardFetchSearchRequest(StreamInput in) throws IOException { shardSearchRequest = in.readOptionalWriteable(ShardSearchRequest::new); rescoreDocIds = new RescoreDocIds(in); aggregatedDfs = in.readOptionalWriteable(AggregatedDfs::new); - if (in.getTransportVersion().onOrAfter(TransportVersions.RANK_DOC_IN_SHARD_FETCH_REQUEST)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { this.rankDocs = in.readOptionalWriteable(RankDocShardInfo::new); } else { this.rankDocs = null; @@ -75,7 +75,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalWriteable(shardSearchRequest); rescoreDocIds.writeTo(out); out.writeOptionalWriteable(aggregatedDfs); - if (out.getTransportVersion().onOrAfter(TransportVersions.RANK_DOC_IN_SHARD_FETCH_REQUEST)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { out.writeOptionalWriteable(rankDocs); } } diff --git a/server/src/main/java/org/elasticsearch/search/rank/feature/RankFeatureShardResult.java b/server/src/main/java/org/elasticsearch/search/rank/feature/RankFeatureShardResult.java index eb8d03bd1406f..13cf9f339d7d4 100644 --- a/server/src/main/java/org/elasticsearch/search/rank/feature/RankFeatureShardResult.java +++ b/server/src/main/java/org/elasticsearch/search/rank/feature/RankFeatureShardResult.java @@ -43,7 +43,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.RANK_FEATURE_PHASE_ADDED; + return TransportVersions.V_8_15_0; } @Override diff --git a/server/src/main/java/org/elasticsearch/search/vectors/ExactKnnQueryBuilder.java b/server/src/main/java/org/elasticsearch/search/vectors/ExactKnnQueryBuilder.java index 30a32e9a615de..c8670a8dfeec2 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/ExactKnnQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/ExactKnnQueryBuilder.java @@ -56,7 +56,7 @@ public ExactKnnQueryBuilder(StreamInput in) throws IOException { } this.field = in.readString(); if (in.getTransportVersion().onOrAfter(TransportVersions.FIX_VECTOR_SIMILARITY_INNER_HITS) - || in.getTransportVersion().isPatchFrom(TransportVersions.FIX_VECTOR_SIMILARITY_INNER_HITS_BACKPORT_8_15)) { + || in.getTransportVersion().isPatchFrom(TransportVersions.V_8_15_0)) { this.vectorSimilarity = in.readOptionalFloat(); } else { this.vectorSimilarity = null; @@ -89,7 +89,7 @@ protected void doWriteTo(StreamOutput out) throws IOException { } out.writeString(field); if (out.getTransportVersion().onOrAfter(TransportVersions.FIX_VECTOR_SIMILARITY_INNER_HITS) - || out.getTransportVersion().isPatchFrom(TransportVersions.FIX_VECTOR_SIMILARITY_INNER_HITS_BACKPORT_8_15)) { + || out.getTransportVersion().isPatchFrom(TransportVersions.V_8_15_0)) { out.writeOptionalFloat(vectorSimilarity); } } diff --git a/server/src/main/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilder.java b/server/src/main/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilder.java index 15052fdad3818..f52addefc8b1c 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilder.java @@ -72,7 +72,7 @@ public KnnScoreDocQueryBuilder(StreamInput in) throws IOException { this.queryVector = null; } if (in.getTransportVersion().onOrAfter(TransportVersions.FIX_VECTOR_SIMILARITY_INNER_HITS) - || in.getTransportVersion().isPatchFrom(TransportVersions.FIX_VECTOR_SIMILARITY_INNER_HITS_BACKPORT_8_15)) { + || in.getTransportVersion().isPatchFrom(TransportVersions.V_8_15_0)) { this.vectorSimilarity = in.readOptionalFloat(); } else { this.vectorSimilarity = null; @@ -117,7 +117,7 @@ protected void doWriteTo(StreamOutput out) throws IOException { } } if (out.getTransportVersion().onOrAfter(TransportVersions.FIX_VECTOR_SIMILARITY_INNER_HITS) - || out.getTransportVersion().isPatchFrom(TransportVersions.FIX_VECTOR_SIMILARITY_INNER_HITS_BACKPORT_8_15)) { + || out.getTransportVersion().isPatchFrom(TransportVersions.V_8_15_0)) { out.writeOptionalFloat(vectorSimilarity); } } diff --git a/server/src/main/java/org/elasticsearch/search/vectors/KnnSearchBuilder.java b/server/src/main/java/org/elasticsearch/search/vectors/KnnSearchBuilder.java index a30aa0efc8bb3..8ce8fc07f3acd 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/KnnSearchBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/KnnSearchBuilder.java @@ -261,7 +261,7 @@ public KnnSearchBuilder(StreamInput in) throws IOException { } this.filterQueries = in.readNamedWriteableCollectionAsList(QueryBuilder.class); this.boost = in.readFloat(); - if (in.getTransportVersion().onOrAfter(TransportVersions.TOP_LEVEL_KNN_SUPPORT_QUERY_NAME)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { this.queryName = in.readOptionalString(); } else { this.queryName = null; @@ -501,7 +501,7 @@ public void writeTo(StreamOutput out) throws IOException { } out.writeNamedWriteableCollection(filterQueries); out.writeFloat(boost); - if (out.getTransportVersion().onOrAfter(TransportVersions.TOP_LEVEL_KNN_SUPPORT_QUERY_NAME)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { out.writeOptionalString(queryName); } if (out.getTransportVersion().before(TransportVersions.V_8_7_0) && queryVectorBuilder != null) { diff --git a/server/src/main/java/org/elasticsearch/search/vectors/KnnVectorQueryBuilder.java b/server/src/main/java/org/elasticsearch/search/vectors/KnnVectorQueryBuilder.java index 9af1968b55d14..deb7e6bd035b8 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/KnnVectorQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/KnnVectorQueryBuilder.java @@ -192,7 +192,7 @@ private KnnVectorQueryBuilder( public KnnVectorQueryBuilder(StreamInput in) throws IOException { super(in); this.fieldName = in.readString(); - if (in.getTransportVersion().onOrAfter(TransportVersions.K_FOR_KNN_QUERY_ADDED)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { this.k = in.readOptionalVInt(); } else { this.k = null; @@ -279,7 +279,7 @@ protected void doWriteTo(StreamOutput out) throws IOException { throw new IllegalStateException("missing a rewriteAndFetch?"); } out.writeString(fieldName); - if (out.getTransportVersion().onOrAfter(TransportVersions.K_FOR_KNN_QUERY_ADDED)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { out.writeOptionalVInt(k); } if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { diff --git a/server/src/test/java/org/elasticsearch/TransportVersionTests.java b/server/src/test/java/org/elasticsearch/TransportVersionTests.java index e3ab7463ad941..6c2cc5c1f4cc0 100644 --- a/server/src/test/java/org/elasticsearch/TransportVersionTests.java +++ b/server/src/test/java/org/elasticsearch/TransportVersionTests.java @@ -211,7 +211,7 @@ public void testDenseTransportVersions() { Set missingVersions = new TreeSet<>(); TransportVersion previous = null; for (var tv : TransportVersions.getAllVersions()) { - if (tv.before(TransportVersions.V_8_14_0)) { + if (tv.before(TransportVersions.V_8_15_2)) { continue; } if (previous == null) { diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsRequestParametersTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsRequestParametersTests.java index e7661bf3848f5..f37b1d1b41712 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsRequestParametersTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsRequestParametersTests.java @@ -23,7 +23,7 @@ public class NodesStatsRequestParametersTests extends ESTestCase { public void testReadWriteMetricSet() { - for (var version : List.of(TransportVersions.VERSIONED_MASTER_NODE_REQUESTS, TransportVersions.NODES_STATS_ENUM_SET)) { + for (var version : List.of(TransportVersions.V_8_15_0, TransportVersions.NODES_STATS_ENUM_SET)) { var randSet = randomSubsetOf(Metric.ALL); var metricsOut = randSet.isEmpty() ? EnumSet.noneOf(Metric.class) : EnumSet.copyOf(randSet); try { diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/close/CloseIndexRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/close/CloseIndexRequestTests.java index 733ef1c2d5935..9957acee36b9d 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/close/CloseIndexRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/close/CloseIndexRequestTests.java @@ -50,7 +50,7 @@ public void testBwcSerialization() throws Exception { in.setTransportVersion(out.getTransportVersion()); assertEquals(request.getParentTask(), TaskId.readFromStream(in)); assertEquals(request.masterNodeTimeout(), in.readTimeValue()); - if (in.getTransportVersion().onOrAfter(TransportVersions.VERSIONED_MASTER_NODE_REQUESTS)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { assertEquals(request.masterTerm(), in.readVLong()); } assertEquals(request.ackTimeout(), in.readTimeValue()); @@ -79,7 +79,7 @@ public void testBwcSerialization() throws Exception { out.setTransportVersion(version); sample.getParentTask().writeTo(out); out.writeTimeValue(sample.masterNodeTimeout()); - if (out.getTransportVersion().onOrAfter(TransportVersions.VERSIONED_MASTER_NODE_REQUESTS)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { out.writeVLong(sample.masterTerm()); } out.writeTimeValue(sample.ackTimeout()); diff --git a/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardStartedClusterStateTaskExecutorTests.java b/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardStartedClusterStateTaskExecutorTests.java index d1734dbf45f04..ca7376a43d718 100644 --- a/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardStartedClusterStateTaskExecutorTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardStartedClusterStateTaskExecutorTests.java @@ -424,7 +424,7 @@ public void testExpandsTimestampRangeForPrimary() throws Exception { } final var eventIngestedRange = resultingState.metadata().index(indexName).getEventIngestedRange(); - if (clusterState.getMinTransportVersion().before(TransportVersions.EVENT_INGESTED_RANGE_IN_CLUSTER_STATE)) { + if (clusterState.getMinTransportVersion().before(TransportVersions.V_8_15_0)) { assertThat(eventIngestedRange, sameInstance(IndexLongFieldRange.UNKNOWN)); } else { if (shardEventIngestedRange == ShardLongFieldRange.UNKNOWN) { diff --git a/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java b/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java index a1536e17a15de..3c680d891ff13 100644 --- a/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java @@ -599,7 +599,7 @@ public void testStartedShardEntrySerialization() throws Exception { assertThat(deserialized.primaryTerm, equalTo(primaryTerm)); assertThat(deserialized.message, equalTo(message)); assertThat(deserialized.timestampRange, equalTo(timestampRange)); - if (version.before(TransportVersions.EVENT_INGESTED_RANGE_IN_CLUSTER_STATE)) { + if (version.before(TransportVersions.V_8_15_0)) { assertThat(deserialized.eventIngestedRange, equalTo(ShardLongFieldRange.UNKNOWN)); } else { assertThat(deserialized.eventIngestedRange, equalTo(eventIngestedRange)); @@ -615,7 +615,7 @@ public void testStartedShardEntrySerializationWithOlderTransportVersion() throws final TransportVersion version = randomFrom( getFirstVersion(), getPreviousVersion(TransportVersions.MINIMUM_COMPATIBLE), - getPreviousVersion(TransportVersions.EVENT_INGESTED_RANGE_IN_CLUSTER_STATE) + getPreviousVersion(TransportVersions.V_8_15_0) ); final ShardLongFieldRange timestampRange = ShardLongFieldRangeWireTests.randomRange(); final ShardLongFieldRange eventIngestedRange = ShardLongFieldRangeWireTests.randomRange(); diff --git a/server/src/test/java/org/elasticsearch/cluster/serialization/ClusterSerializationTests.java b/server/src/test/java/org/elasticsearch/cluster/serialization/ClusterSerializationTests.java index ca5b9295adfd7..d8f3c4f1af48e 100644 --- a/server/src/test/java/org/elasticsearch/cluster/serialization/ClusterSerializationTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/serialization/ClusterSerializationTests.java @@ -70,7 +70,7 @@ public void testClusterStateSerialization() throws Exception { .settings(settings(IndexVersion.current())) .numberOfShards(10) .numberOfReplicas(1) - .eventIngestedRange(eventIngestedRangeInput, TransportVersions.EVENT_INGESTED_RANGE_IN_CLUSTER_STATE); + .eventIngestedRange(eventIngestedRangeInput, TransportVersions.V_8_15_0); ClusterStateTestRecord result = createAndSerializeClusterState(indexMetadataBuilder, TransportVersion.current()); @@ -90,7 +90,7 @@ public void testClusterStateSerializationWithTimestampRangesWithOlderTransportVe TransportVersion versionBeforeEventIngestedInClusterState = randomFrom( TransportVersions.V_7_0_0, TransportVersions.V_8_0_0, - TransportVersions.ML_INFERENCE_GOOGLE_VERTEX_AI_EMBEDDINGS_ADDED // version before EVENT_INGESTED_RANGE_IN_CLUSTER_STATE + TransportVersionUtils.getPreviousVersion(TransportVersions.V_8_15_0) ); { IndexLongFieldRange eventIngestedRangeInput = randomFrom( diff --git a/server/src/test/java/org/elasticsearch/search/rank/RankFeatureShardPhaseTests.java b/server/src/test/java/org/elasticsearch/search/rank/RankFeatureShardPhaseTests.java index 5862e1bd1329f..6250d1679fda3 100644 --- a/server/src/test/java/org/elasticsearch/search/rank/RankFeatureShardPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/search/rank/RankFeatureShardPhaseTests.java @@ -196,7 +196,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.RANK_FEATURE_PHASE_ADDED; + return TransportVersions.V_8_15_0; } }; } diff --git a/test/framework/src/main/java/org/elasticsearch/action/support/replication/ClusterStateCreationUtils.java b/test/framework/src/main/java/org/elasticsearch/action/support/replication/ClusterStateCreationUtils.java index 6a0b4f2dd2c1b..a11291e846ec1 100644 --- a/test/framework/src/main/java/org/elasticsearch/action/support/replication/ClusterStateCreationUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/action/support/replication/ClusterStateCreationUtils.java @@ -151,10 +151,7 @@ public static ClusterState state( .settings(indexSettings(IndexVersion.current(), 1, numberOfReplicas).put(SETTING_CREATION_DATE, System.currentTimeMillis())) .primaryTerm(0, primaryTerm) .timestampRange(timeFieldRange) - .eventIngestedRange( - timeFieldRange, - timeFieldRange == IndexLongFieldRange.UNKNOWN ? null : TransportVersions.EVENT_INGESTED_RANGE_IN_CLUSTER_STATE - ) + .eventIngestedRange(timeFieldRange, timeFieldRange == IndexLongFieldRange.UNKNOWN ? null : TransportVersions.V_8_15_0) .build(); IndexShardRoutingTable.Builder indexShardRoutingBuilder = new IndexShardRoutingTable.Builder(shardId); @@ -287,10 +284,7 @@ public static ClusterState state(final int numberOfNodes, final String[] indices .settings( indexSettings(IndexVersion.current(), numberOfPrimaries, 0).put(SETTING_CREATION_DATE, System.currentTimeMillis()) ) - .eventIngestedRange( - IndexLongFieldRange.UNKNOWN, - randomFrom(TransportVersions.V_8_0_0, TransportVersions.EVENT_INGESTED_RANGE_IN_CLUSTER_STATE) - ) + .eventIngestedRange(IndexLongFieldRange.UNKNOWN, randomFrom(TransportVersions.V_8_0_0, TransportVersions.V_8_15_0)) .build(); IndexRoutingTable.Builder indexRoutingTable = IndexRoutingTable.builder(indexMetadata.getIndex()); diff --git a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/GetAutoscalingCapacityAction.java b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/GetAutoscalingCapacityAction.java index 81b53487f9f15..836c8b155cb09 100644 --- a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/GetAutoscalingCapacityAction.java +++ b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/GetAutoscalingCapacityAction.java @@ -47,7 +47,7 @@ public Request(TimeValue masterNodeTimeout) { public Request(final StreamInput in) throws IOException { super(in); - if (in.getTransportVersion().before(TransportVersions.GET_AUTOSCALING_CAPACITY_UNUSED_TIMEOUT)) { + if (in.getTransportVersion().before(TransportVersions.V_8_15_0)) { in.readTimeValue(); // unused } } @@ -55,7 +55,7 @@ public Request(final StreamInput in) throws IOException { @Override public void writeTo(final StreamOutput out) throws IOException { super.writeTo(out); - if (out.getTransportVersion().before(TransportVersions.GET_AUTOSCALING_CAPACITY_UNUSED_TIMEOUT)) { + if (out.getTransportVersion().before(TransportVersions.V_8_15_0)) { out.writeTimeValue(AcknowledgedRequest.DEFAULT_ACK_TIMEOUT); // unused } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datastreams/DataStreamFeatureSetUsage.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datastreams/DataStreamFeatureSetUsage.java index 63fcd3dc4e798..e267c94e06892 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datastreams/DataStreamFeatureSetUsage.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datastreams/DataStreamFeatureSetUsage.java @@ -91,8 +91,8 @@ public DataStreamStats(StreamInput in) throws IOException { this( in.readVLong(), in.readVLong(), - in.getTransportVersion().onOrAfter(TransportVersions.FAILURE_STORE_TELEMETRY) ? in.readVLong() : 0, - in.getTransportVersion().onOrAfter(TransportVersions.FAILURE_STORE_TELEMETRY) ? in.readVLong() : 0 + in.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0) ? in.readVLong() : 0, + in.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0) ? in.readVLong() : 0 ); } @@ -100,7 +100,7 @@ public DataStreamStats(StreamInput in) throws IOException { public void writeTo(StreamOutput out) throws IOException { out.writeVLong(this.totalDataStreamCount); out.writeVLong(this.indicesBehindDataStream); - if (out.getTransportVersion().onOrAfter(TransportVersions.FAILURE_STORE_TELEMETRY)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { out.writeVLong(this.failureStoreEnabledDataStreamCount); out.writeVLong(this.failureStoreIndicesCount); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/EnrichStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/EnrichStatsAction.java index fc9087d97bd79..0457de6edcc9f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/EnrichStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/EnrichStatsAction.java @@ -207,8 +207,8 @@ public CacheStats(StreamInput in) throws IOException { in.readVLong(), in.readVLong(), in.readVLong(), - in.getTransportVersion().onOrAfter(TransportVersions.ENRICH_CACHE_ADDITIONAL_STATS) ? in.readLong() : -1, - in.getTransportVersion().onOrAfter(TransportVersions.ENRICH_CACHE_ADDITIONAL_STATS) ? in.readLong() : -1, + in.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0) ? in.readLong() : -1, + in.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0) ? in.readLong() : -1, in.getTransportVersion().onOrAfter(TransportVersions.ENRICH_CACHE_STATS_SIZE_ADDED) ? in.readLong() : -1 ); } @@ -233,7 +233,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeVLong(hits); out.writeVLong(misses); out.writeVLong(evictions); - if (out.getTransportVersion().onOrAfter(TransportVersions.ENRICH_CACHE_ADDITIONAL_STATS)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { out.writeLong(hitsTimeInMillis); out.writeLong(missesTimeInMillis); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/DeleteInferenceEndpointAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/DeleteInferenceEndpointAction.java index e9d612751e48f..226fe3630b387 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/DeleteInferenceEndpointAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/DeleteInferenceEndpointAction.java @@ -50,7 +50,7 @@ public Request(StreamInput in) throws IOException { super(in); this.inferenceEndpointId = in.readString(); this.taskType = TaskType.fromStream(in); - if (in.getTransportVersion().onOrAfter(TransportVersions.ML_INFERENCE_ENHANCE_DELETE_ENDPOINT)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { this.forceDelete = Boolean.TRUE.equals(in.readOptionalBoolean()); this.dryRun = Boolean.TRUE.equals(in.readOptionalBoolean()); } else { @@ -80,7 +80,7 @@ public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeString(inferenceEndpointId); taskType.writeTo(out); - if (out.getTransportVersion().onOrAfter(TransportVersions.ML_INFERENCE_ENHANCE_DELETE_ENDPOINT)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { out.writeOptionalBoolean(forceDelete); out.writeOptionalBoolean(dryRun); } @@ -121,7 +121,7 @@ public Response(boolean acknowledged, Set pipelineIds, Set seman public Response(StreamInput in) throws IOException { super(in); - if (in.getTransportVersion().onOrAfter(TransportVersions.ML_INFERENCE_ENHANCE_DELETE_ENDPOINT)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { pipelineIds = in.readCollectionAsSet(StreamInput::readString); } else { pipelineIds = Set.of(); @@ -140,7 +140,7 @@ public Response(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - if (out.getTransportVersion().onOrAfter(TransportVersions.ML_INFERENCE_ENHANCE_DELETE_ENDPOINT)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { out.writeCollection(pipelineIds, StreamOutput::writeString); } if (out.getTransportVersion().onOrAfter(TransportVersions.ML_INFERENCE_DONT_DELETE_WHEN_SEMANTIC_TEXT_EXISTS)) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/RankedDocsResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/RankedDocsResults.java index 6ebf15bf34937..e331cdbc59358 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/RankedDocsResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/RankedDocsResults.java @@ -31,8 +31,6 @@ import java.util.Objects; import java.util.stream.Collectors; -import static org.elasticsearch.TransportVersions.ML_RERANK_DOC_OPTIONAL; - public class RankedDocsResults implements InferenceServiceResults { public static final String NAME = "rerank_service_results"; public static final String RERANK = TaskType.RERANK.toString(); @@ -114,7 +112,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } public static RankedDoc of(StreamInput in) throws IOException { - if (in.getTransportVersion().onOrAfter(ML_RERANK_DOC_OPTIONAL)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { return new RankedDoc(in.readInt(), in.readFloat(), in.readOptionalString()); } else if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_14_0)) { return new RankedDoc(in.readInt(), in.readFloat(), in.readString()); @@ -125,7 +123,7 @@ public static RankedDoc of(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { - if (out.getTransportVersion().onOrAfter(ML_RERANK_DOC_OPTIONAL)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { out.writeInt(index); out.writeFloat(relevanceScore); out.writeOptionalString(text); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/InferModelAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/InferModelAction.java index e6b580f62fdd3..05d9496e434c3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/InferModelAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/InferModelAction.java @@ -198,7 +198,7 @@ public Request(StreamInput in) throws IOException { } else { prefixType = TrainedModelPrefixStrings.PrefixType.NONE; } - if (in.getTransportVersion().onOrAfter(TransportVersions.ML_CHUNK_INFERENCE_OPTION)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { chunked = in.readBoolean(); } else { chunked = false; @@ -285,7 +285,7 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_12_0)) { out.writeEnum(prefixType); } - if (out.getTransportVersion().onOrAfter(TransportVersions.ML_CHUNK_INFERENCE_OPTION)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { out.writeBoolean(chunked); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelCacheMetadata.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelCacheMetadata.java index 35c6bf96a09e1..3c8dfecc32e2a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelCacheMetadata.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelCacheMetadata.java @@ -86,7 +86,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ML_TRAINED_MODEL_CACHE_METADATA_ADDED; + return TransportVersions.V_8_15_0; } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelSizeStats.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelSizeStats.java index a95ee13f57913..3a9123445e697 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelSizeStats.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelSizeStats.java @@ -264,7 +264,7 @@ public ModelSizeStats(StreamInput in) throws IOException { } else { assignmentMemoryBasis = null; } - if (in.getTransportVersion().onOrAfter(TransportVersions.ML_AD_OUTPUT_MEMORY_ALLOCATOR_FIELD)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { outputMemoryAllocatorBytes = in.readOptionalVLong(); } else { outputMemoryAllocatorBytes = null; @@ -306,7 +306,7 @@ public void writeTo(StreamOutput out) throws IOException { } else { out.writeBoolean(false); } - if (out.getTransportVersion().onOrAfter(TransportVersions.ML_AD_OUTPUT_MEMORY_ALLOCATOR_FIELD)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { out.writeOptionalVLong(outputMemoryAllocatorBytes); } out.writeVLong(categorizedDocCount); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/RollupFeatureSetUsage.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/RollupFeatureSetUsage.java index a198c0570cd91..6b4ee3c1fdd16 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/RollupFeatureSetUsage.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/RollupFeatureSetUsage.java @@ -16,15 +16,13 @@ import java.io.IOException; -import static org.elasticsearch.TransportVersions.ROLLUP_USAGE; - public class RollupFeatureSetUsage extends XPackFeatureSet.Usage { private final int numberOfRollupJobs; public RollupFeatureSetUsage(StreamInput input) throws IOException { super(input); - this.numberOfRollupJobs = input.getTransportVersion().onOrAfter(ROLLUP_USAGE) ? input.readVInt() : 0; + this.numberOfRollupJobs = input.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0) ? input.readVInt() : 0; } public RollupFeatureSetUsage(int numberOfRollupJobs) { @@ -39,7 +37,7 @@ public int getNumberOfRollupJobs() { @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - if (out.getTransportVersion().onOrAfter(ROLLUP_USAGE)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { out.writeVInt(numberOfRollupJobs); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/settings/GetSecuritySettingsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/settings/GetSecuritySettingsAction.java index 093dfabd0bf65..88bc8a46532b5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/settings/GetSecuritySettingsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/settings/GetSecuritySettingsAction.java @@ -42,7 +42,7 @@ public Request(TimeValue masterNodeTimeout) { @UpdateForV9(owner = UpdateForV9.Owner.SECURITY) // no need for bwc any more, this can be inlined public static Request readFrom(StreamInput in) throws IOException { - if (in.getTransportVersion().onOrAfter(TransportVersions.SECURITY_SETTINGS_REQUEST_TIMEOUTS)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { return new Request(in); } else { return new Request(TimeValue.THIRTY_SECONDS); @@ -55,7 +55,7 @@ private Request(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { - if (out.getTransportVersion().onOrAfter(TransportVersions.SECURITY_SETTINGS_REQUEST_TIMEOUTS)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { super.writeTo(out); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/settings/UpdateSecuritySettingsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/settings/UpdateSecuritySettingsAction.java index 316f162729356..c3bd1c51d113c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/settings/UpdateSecuritySettingsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/settings/UpdateSecuritySettingsAction.java @@ -118,7 +118,7 @@ public Request( @UpdateForV9(owner = UpdateForV9.Owner.SECURITY) // no need for bwc any more, this can be inlined public static Request readFrom(StreamInput in) throws IOException { - if (in.getTransportVersion().onOrAfter(TransportVersions.SECURITY_SETTINGS_REQUEST_TIMEOUTS)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { return new Request(in); } else { return new Request(TimeValue.THIRTY_SECONDS, TimeValue.THIRTY_SECONDS, in); @@ -141,7 +141,7 @@ private Request(TimeValue masterNodeTimeout, TimeValue ackTimeout, StreamInput i @Override public void writeTo(StreamOutput out) throws IOException { - if (out.getTransportVersion().onOrAfter(TransportVersions.SECURITY_SETTINGS_REQUEST_TIMEOUTS)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { super.writeTo(out); } out.writeGenericMap(this.mainIndexSettings); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/GetUserPrivilegesResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/GetUserPrivilegesResponse.java index c5cbe50ef1575..de351cd59c690 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/GetUserPrivilegesResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/GetUserPrivilegesResponse.java @@ -30,6 +30,8 @@ import java.util.TreeSet; import java.util.stream.Collectors; +import static org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissions.ROLE_REMOTE_CLUSTER_PRIVS; + /** * Response for a {@link GetUserPrivilegesRequest} */ @@ -55,7 +57,7 @@ public GetUserPrivilegesResponse(StreamInput in) throws IOException { } else { remoteIndex = Set.of(); } - if (in.getTransportVersion().onOrAfter(TransportVersions.ROLE_REMOTE_CLUSTER_PRIVS)) { + if (in.getTransportVersion().onOrAfter(ROLE_REMOTE_CLUSTER_PRIVS)) { remoteClusterPermissions = new RemoteClusterPermissions(in); } else { remoteClusterPermissions = RemoteClusterPermissions.NONE; @@ -134,14 +136,14 @@ public void writeTo(StreamOutput out) throws IOException { + "]" ); } - if (out.getTransportVersion().onOrAfter(TransportVersions.ROLE_REMOTE_CLUSTER_PRIVS)) { + if (out.getTransportVersion().onOrAfter(ROLE_REMOTE_CLUSTER_PRIVS)) { remoteClusterPermissions.writeTo(out); } else if (hasRemoteClusterPrivileges()) { throw new IllegalArgumentException( "versions of Elasticsearch before [" - + TransportVersions.ROLE_REMOTE_CLUSTER_PRIVS + + ROLE_REMOTE_CLUSTER_PRIVS.toReleaseVersion() + "] can't handle remote cluster privileges and attempted to send to [" - + out.getTransportVersion() + + out.getTransportVersion().toReleaseVersion() + "]" ); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Authentication.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Authentication.java index 3e3b1178b372c..04dda75692208 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Authentication.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Authentication.java @@ -54,7 +54,6 @@ import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; -import static org.elasticsearch.TransportVersions.ROLE_REMOTE_CLUSTER_PRIVS; import static org.elasticsearch.transport.RemoteClusterPortSettings.TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; @@ -77,6 +76,7 @@ import static org.elasticsearch.xpack.core.security.authc.AuthenticationField.FALLBACK_REALM_NAME; import static org.elasticsearch.xpack.core.security.authc.AuthenticationField.FALLBACK_REALM_TYPE; import static org.elasticsearch.xpack.core.security.authc.RealmDomain.REALM_DOMAIN_PARSER; +import static org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissions.ROLE_REMOTE_CLUSTER_PRIVS; /** * The Authentication class encapsulates identity information created after successful authentication diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java index 1a8839fa0fa4a..8d069caf0496f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java @@ -50,6 +50,7 @@ import java.util.Objects; import static org.elasticsearch.common.xcontent.XContentHelper.createParserNotCompressed; +import static org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissions.ROLE_REMOTE_CLUSTER_PRIVS; /** * A holder for a Role that contains user-readable information about the Role @@ -58,6 +59,7 @@ public class RoleDescriptor implements ToXContentObject, Writeable { public static final TransportVersion WORKFLOWS_RESTRICTION_VERSION = TransportVersions.V_8_9_X; + public static final TransportVersion SECURITY_ROLE_DESCRIPTION = TransportVersions.V_8_15_0; public static final String ROLE_TYPE = "role"; @@ -220,12 +222,12 @@ public RoleDescriptor(StreamInput in) throws IOException { } else { this.restriction = Restriction.NONE; } - if (in.getTransportVersion().onOrAfter(TransportVersions.ROLE_REMOTE_CLUSTER_PRIVS)) { + if (in.getTransportVersion().onOrAfter(ROLE_REMOTE_CLUSTER_PRIVS)) { this.remoteClusterPermissions = new RemoteClusterPermissions(in); } else { this.remoteClusterPermissions = RemoteClusterPermissions.NONE; } - if (in.getTransportVersion().onOrAfter(TransportVersions.SECURITY_ROLE_DESCRIPTION)) { + if (in.getTransportVersion().onOrAfter(SECURITY_ROLE_DESCRIPTION)) { this.description = in.readOptionalString(); } else { this.description = ""; @@ -485,10 +487,10 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(WORKFLOWS_RESTRICTION_VERSION)) { restriction.writeTo(out); } - if (out.getTransportVersion().onOrAfter(TransportVersions.ROLE_REMOTE_CLUSTER_PRIVS)) { + if (out.getTransportVersion().onOrAfter(ROLE_REMOTE_CLUSTER_PRIVS)) { remoteClusterPermissions.writeTo(out); } - if (out.getTransportVersion().onOrAfter(TransportVersions.SECURITY_ROLE_DESCRIPTION)) { + if (out.getTransportVersion().onOrAfter(SECURITY_ROLE_DESCRIPTION)) { out.writeOptionalString(description); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleMappingMetadata.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleMappingMetadata.java index da6ff6ad24c34..b38b33e082382 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleMappingMetadata.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleMappingMetadata.java @@ -108,7 +108,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.SECURITY_ROLE_MAPPINGS_IN_CLUSTER_STATE; + return TransportVersions.V_8_15_0; } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/RemoteClusterPermissions.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/RemoteClusterPermissions.java index 2960c5aaa53e7..0d8880c33720b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/RemoteClusterPermissions.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/RemoteClusterPermissions.java @@ -61,13 +61,15 @@ */ public class RemoteClusterPermissions implements NamedWriteable, ToXContentObject { + public static final TransportVersion ROLE_REMOTE_CLUSTER_PRIVS = TransportVersions.V_8_15_0; + public static final String NAME = "remote_cluster_permissions"; private static final Logger logger = LogManager.getLogger(RemoteClusterPermissions.class); private final List remoteClusterPermissionGroups; // package private non-final for testing static Map> allowedRemoteClusterPermissions = Map.of( - TransportVersions.ROLE_REMOTE_CLUSTER_PRIVS, + ROLE_REMOTE_CLUSTER_PRIVS, Set.of(ClusterPrivilegeResolver.MONITOR_ENRICH.name()) ); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/SecurityMigrationTaskParams.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/SecurityMigrationTaskParams.java index 14cc4d3d6f5b9..e69a058914634 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/SecurityMigrationTaskParams.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/SecurityMigrationTaskParams.java @@ -48,7 +48,7 @@ public SecurityMigrationTaskParams(int migrationVersion, boolean migrationNeeded public SecurityMigrationTaskParams(StreamInput in) throws IOException { this.migrationVersion = in.readInt(); - if (in.getTransportVersion().onOrAfter(TransportVersions.SECURITY_MIGRATIONS_MIGRATION_NEEDED_ADDED)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { this.migrationNeeded = in.readBoolean(); } else { this.migrationNeeded = true; @@ -58,7 +58,7 @@ public SecurityMigrationTaskParams(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { out.writeInt(migrationVersion); - if (out.getTransportVersion().onOrAfter(TransportVersions.SECURITY_MIGRATIONS_MIGRATION_NEEDED_ADDED)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { out.writeBoolean(migrationNeeded); } } @@ -70,7 +70,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ADD_METADATA_FLATTENED_TO_ROLES; + return TransportVersions.V_8_15_0; } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/GetWatcherSettingsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/GetWatcherSettingsAction.java index f1d046f09b0f7..c6a22ec289be9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/GetWatcherSettingsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/GetWatcherSettingsAction.java @@ -37,7 +37,7 @@ public Request(TimeValue masterNodeTimeout) { } public static Request readFrom(StreamInput in) throws IOException { - if (in.getTransportVersion().onOrAfter(TransportVersions.WATCHER_REQUEST_TIMEOUTS)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { return new Request(in); } else { return new Request(TimeValue.THIRTY_SECONDS); @@ -50,7 +50,7 @@ private Request(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { - if (out.getTransportVersion().onOrAfter(TransportVersions.WATCHER_REQUEST_TIMEOUTS)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { super.writeTo(out); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/UpdateWatcherSettingsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/UpdateWatcherSettingsAction.java index 19cc83cda2dbb..42fc7c196bbcf 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/UpdateWatcherSettingsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/UpdateWatcherSettingsAction.java @@ -47,7 +47,7 @@ public Request(TimeValue masterNodeTimeout, TimeValue ackTimeout, Map { @Override @@ -47,7 +46,7 @@ protected RankedDocsResults mutateInstance(RankedDocsResults instance) throws IO @Override protected RankedDocsResults mutateInstanceForVersion(RankedDocsResults instance, TransportVersion fromVersion) { - if (fromVersion.onOrAfter(ML_RERANK_DOC_OPTIONAL)) { + if (fromVersion.onOrAfter(TransportVersions.V_8_15_0)) { return instance; } else { var compatibleDocs = rankedDocsNullStringToEmpty(instance.getRankedDocs()); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/GetTrainedModelsStatsActionResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/GetTrainedModelsStatsActionResponseTests.java index d60bbc6cc7713..1b9d4ebb100b2 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/GetTrainedModelsStatsActionResponseTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/GetTrainedModelsStatsActionResponseTests.java @@ -562,7 +562,7 @@ protected Response mutateInstanceForVersion(Response instance, TransportVersion RESULTS_FIELD ) ); - } else if (version.before(TransportVersions.NODE_STATS_INGEST_BYTES)) { + } else if (version.before(TransportVersions.V_8_15_0)) { // added ByteStats to IngestStats.PipelineStat return new Response( new QueryPage<>( diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/InferModelActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/InferModelActionRequestTests.java index 2e4689de787b3..05266e58964c9 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/InferModelActionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/InferModelActionRequestTests.java @@ -252,7 +252,7 @@ protected Request mutateInstanceForVersion(Request instance, TransportVersion ve r.setHighPriority(instance.isHighPriority()); r.setPrefixType(TrainedModelPrefixStrings.PrefixType.NONE); return r; - } else if (version.before(TransportVersions.ML_CHUNK_INFERENCE_OPTION)) { + } else if (version.before(TransportVersions.V_8_15_0)) { var r = new Request( instance.getId(), adjustedUpdate, diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/user/GetUserPrivilegesResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/user/GetUserPrivilegesResponseTests.java index 437f58449b4de..ab62073c12d80 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/user/GetUserPrivilegesResponseTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/user/GetUserPrivilegesResponseTests.java @@ -43,6 +43,7 @@ import java.util.stream.Collectors; import static java.util.Collections.emptySet; +import static org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissions.ROLE_REMOTE_CLUSTER_PRIVS; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -69,7 +70,7 @@ public void testSerialization() throws IOException { public void testSerializationForCurrentVersion() throws Exception { final TransportVersion version = TransportVersionUtils.randomCompatibleVersion(random()); final boolean canIncludeRemoteIndices = version.onOrAfter(TransportVersions.V_8_8_0); - final boolean canIncludeRemoteCluster = version.onOrAfter(TransportVersions.ROLE_REMOTE_CLUSTER_PRIVS); + final boolean canIncludeRemoteCluster = version.onOrAfter(ROLE_REMOTE_CLUSTER_PRIVS); final GetUserPrivilegesResponse original = randomResponse(canIncludeRemoteIndices, canIncludeRemoteCluster); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/AuthenticationTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/AuthenticationTests.java index 3d4d6106a7eaf..66e246d1c8a50 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/AuthenticationTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/AuthenticationTests.java @@ -44,6 +44,7 @@ import static java.util.Map.entry; import static org.elasticsearch.xpack.core.security.authc.AuthenticationTestHelper.randomCrossClusterAccessSubjectInfo; import static org.elasticsearch.xpack.core.security.authc.CrossClusterAccessSubjectInfoTests.randomRoleDescriptorsIntersection; +import static org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissions.ROLE_REMOTE_CLUSTER_PRIVS; import static org.hamcrest.Matchers.anEmptyMap; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -1108,7 +1109,7 @@ public void testMaybeRewriteMetadataForApiKeyRoleDescriptorsWithRemoteCluster() final Authentication original = AuthenticationTestHelper.builder() .apiKey() .metadata(metadata) - .transportVersion(TransportVersions.ROLE_REMOTE_CLUSTER_PRIVS) + .transportVersion(ROLE_REMOTE_CLUSTER_PRIVS) .build(); // pick a version before that of the authentication instance to force a rewrite diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorTests.java index 8e1bc7af1bdc8..94430a4ed5bba 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorTests.java @@ -45,10 +45,12 @@ import java.util.Map; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptor.SECURITY_ROLE_DESCRIPTION; import static org.elasticsearch.xpack.core.security.authz.RoleDescriptor.WORKFLOWS_RESTRICTION_VERSION; import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomIndicesPrivileges; import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomIndicesPrivilegesBuilder; import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomRemoteClusterPermissions; +import static org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissions.ROLE_REMOTE_CLUSTER_PRIVS; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; @@ -589,9 +591,9 @@ public void testParsingFieldPermissionsUsesCache() throws IOException { public void testSerializationForCurrentVersion() throws Exception { final TransportVersion version = TransportVersionUtils.randomCompatibleVersion(random()); final boolean canIncludeRemoteIndices = version.onOrAfter(TransportVersions.V_8_8_0); - final boolean canIncludeRemoteClusters = version.onOrAfter(TransportVersions.ROLE_REMOTE_CLUSTER_PRIVS); + final boolean canIncludeRemoteClusters = version.onOrAfter(ROLE_REMOTE_CLUSTER_PRIVS); final boolean canIncludeWorkflows = version.onOrAfter(WORKFLOWS_RESTRICTION_VERSION); - final boolean canIncludeDescription = version.onOrAfter(TransportVersions.SECURITY_ROLE_DESCRIPTION); + final boolean canIncludeDescription = version.onOrAfter(SECURITY_ROLE_DESCRIPTION); logger.info("Testing serialization with version {}", version); BytesStreamOutput output = new BytesStreamOutput(); output.setTransportVersion(version); @@ -667,9 +669,7 @@ public void testSerializationWithRemoteIndicesWithElderVersion() throws IOExcept } public void testSerializationWithRemoteClusterWithElderVersion() throws IOException { - final TransportVersion versionBeforeRemoteCluster = TransportVersionUtils.getPreviousVersion( - TransportVersions.ROLE_REMOTE_CLUSTER_PRIVS - ); + final TransportVersion versionBeforeRemoteCluster = TransportVersionUtils.getPreviousVersion(ROLE_REMOTE_CLUSTER_PRIVS); final TransportVersion version = TransportVersionUtils.randomVersionBetween( random(), TransportVersions.V_7_17_0, @@ -815,9 +815,7 @@ public void testParseRoleWithRestrictionWhenAllowRestrictionIsTrue() throws IOEx } public void testSerializationWithDescriptionAndUnsupportedVersions() throws IOException { - final TransportVersion versionBeforeRoleDescription = TransportVersionUtils.getPreviousVersion( - TransportVersions.SECURITY_ROLE_DESCRIPTION - ); + final TransportVersion versionBeforeRoleDescription = TransportVersionUtils.getPreviousVersion(SECURITY_ROLE_DESCRIPTION); final TransportVersion version = TransportVersionUtils.randomVersionBetween( random(), TransportVersions.V_7_17_0, diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/RemoteClusterPermissionsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/RemoteClusterPermissionsTests.java index 394455879bbdf..5b5a895f12ae8 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/RemoteClusterPermissionsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/RemoteClusterPermissionsTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.core.security.authz.permission; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable; @@ -29,6 +28,7 @@ import java.util.Map; import java.util.Set; +import static org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissions.ROLE_REMOTE_CLUSTER_PRIVS; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -163,11 +163,11 @@ public void testMonitorEnrichPerVersion() { // test monitor_enrich before, after and on monitor enrich version String[] privileges = randomBoolean() ? new String[] { "monitor_enrich" } : new String[] { "monitor_enrich", "foo", "bar" }; String[] before = new RemoteClusterPermissions().addGroup(new RemoteClusterPermissionGroup(privileges, new String[] { "*" })) - .privilegeNames("*", TransportVersionUtils.getPreviousVersion(TransportVersions.ROLE_REMOTE_CLUSTER_PRIVS)); + .privilegeNames("*", TransportVersionUtils.getPreviousVersion(ROLE_REMOTE_CLUSTER_PRIVS)); // empty set since monitor_enrich is not allowed in the before version assertThat(Set.of(before), equalTo(Collections.emptySet())); String[] on = new RemoteClusterPermissions().addGroup(new RemoteClusterPermissionGroup(privileges, new String[] { "*" })) - .privilegeNames("*", TransportVersions.ROLE_REMOTE_CLUSTER_PRIVS); + .privilegeNames("*", ROLE_REMOTE_CLUSTER_PRIVS); // only monitor_enrich since the other values are not allowed assertThat(Set.of(on), equalTo(Set.of("monitor_enrich"))); String[] after = new RemoteClusterPermissions().addGroup(new RemoteClusterPermissionGroup(privileges, new String[] { "*" })) diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRule.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRule.java index c14bb8e9a4ec9..e1734268fbf46 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRule.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRule.java @@ -130,7 +130,7 @@ public QueryRule(StreamInput in) throws IOException { this.criteria = in.readCollectionAsList(QueryRuleCriteria::new); this.actions = in.readGenericMap(); - if (in.getTransportVersion().onOrAfter(TransportVersions.QUERY_RULE_CRUD_API_PUT)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { this.priority = in.readOptionalVInt(); } else { this.priority = null; @@ -175,7 +175,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(type.toString()); out.writeCollection(criteria); out.writeGenericMap(actions); - if (out.getTransportVersion().onOrAfter(TransportVersions.QUERY_RULE_CRUD_API_PUT)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { out.writeOptionalVInt(priority); } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/RuleQueryBuilder.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/RuleQueryBuilder.java index a3703a5005979..3e573929731fb 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/RuleQueryBuilder.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/RuleQueryBuilder.java @@ -88,7 +88,7 @@ public RuleQueryBuilder(StreamInput in) throws IOException { super(in); organicQuery = in.readNamedWriteable(QueryBuilder.class); matchCriteria = in.readGenericMap(); - if (in.getTransportVersion().onOrAfter(TransportVersions.RULE_QUERY_RENAME)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { rulesetIds = in.readStringCollectionAsList(); } else { rulesetIds = List.of(in.readString()); @@ -144,7 +144,7 @@ protected void doWriteTo(StreamOutput out) throws IOException { out.writeNamedWriteable(organicQuery); out.writeGenericMap(matchCriteria); - if (out.getTransportVersion().onOrAfter(TransportVersions.RULE_QUERY_RENAME)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { out.writeStringCollection(rulesetIds); } else { out.writeString(rulesetIds.get(0)); diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/GetQueryRuleActionResponseBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/GetQueryRuleActionResponseBWCSerializingTests.java index b60747ce47321..1f1449d46264b 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/GetQueryRuleActionResponseBWCSerializingTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/GetQueryRuleActionResponseBWCSerializingTests.java @@ -52,8 +52,6 @@ protected GetQueryRuleAction.Response mutateInstanceForVersion(GetQueryRuleActio @Override protected List bwcVersions() { - return getAllBWCVersions().stream() - .filter(v -> v.onOrAfter(TransportVersions.QUERY_RULE_CRUD_API_GET_DELETE)) - .collect(Collectors.toList()); + return getAllBWCVersions().stream().filter(v -> v.onOrAfter(TransportVersions.V_8_15_0)).collect(Collectors.toList()); } } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/GetQueryRulesetActionResponseBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/GetQueryRulesetActionResponseBWCSerializingTests.java index ca58e42ba6248..8bfbcd5ec471b 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/GetQueryRulesetActionResponseBWCSerializingTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/GetQueryRulesetActionResponseBWCSerializingTests.java @@ -61,7 +61,7 @@ protected GetQueryRulesetAction.Response mutateInstanceForVersion(GetQueryRulese rules.add(new QueryRule(rule.id(), rule.type(), newCriteria, rule.actions(), null)); } return new GetQueryRulesetAction.Response(new QueryRuleset(instance.queryRuleset().id(), rules)); - } else if (version.before(TransportVersions.QUERY_RULE_CRUD_API_PUT)) { + } else if (version.before(TransportVersions.V_8_15_0)) { List rules = new ArrayList<>(); for (QueryRule rule : instance.queryRuleset().rules()) { rules.add(new QueryRule(rule.id(), rule.type(), rule.criteria(), rule.actions(), null)); diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/PutQueryRuleActionRequestBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/PutQueryRuleActionRequestBWCSerializingTests.java index 5b42a899feac8..b60b8718e8549 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/PutQueryRuleActionRequestBWCSerializingTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/PutQueryRuleActionRequestBWCSerializingTests.java @@ -55,8 +55,6 @@ protected PutQueryRuleAction.Request mutateInstanceForVersion(PutQueryRuleAction @Override protected List bwcVersions() { - return getAllBWCVersions().stream() - .filter(v -> v.onOrAfter(TransportVersions.QUERY_RULE_CRUD_API_PUT)) - .collect(Collectors.toList()); + return getAllBWCVersions().stream().filter(v -> v.onOrAfter(TransportVersions.V_8_15_0)).collect(Collectors.toList()); } } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/PutQueryRulesetActionRequestBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/PutQueryRulesetActionRequestBWCSerializingTests.java index f9b47f5bb2cd2..3b6195cc8f495 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/PutQueryRulesetActionRequestBWCSerializingTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/PutQueryRulesetActionRequestBWCSerializingTests.java @@ -63,7 +63,7 @@ protected PutQueryRulesetAction.Request mutateInstanceForVersion(PutQueryRuleset rules.add(new QueryRule(rule.id(), rule.type(), newCriteria, rule.actions(), null)); } return new PutQueryRulesetAction.Request(new QueryRuleset(instance.queryRuleset().id(), rules)); - } else if (version.before(TransportVersions.QUERY_RULE_CRUD_API_PUT)) { + } else if (version.before(TransportVersions.V_8_15_0)) { List rules = new ArrayList<>(); for (QueryRule rule : instance.queryRuleset().rules()) { rules.add(new QueryRule(rule.id(), rule.type(), rule.criteria(), rule.actions(), null)); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractArrayBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractArrayBlock.java index c14289ff2366b..ab125bb868388 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractArrayBlock.java @@ -191,8 +191,7 @@ void writeSubFields(StreamOutput out) throws IOException { if (nullsMask != null) { out.writeLongArray(nullsMask.toLongArray()); } - if (out.getTransportVersion().before(TransportVersions.ESQL_MV_ORDERING_SORTED_ASCENDING) - && mvOrdering == MvOrdering.SORTED_ASCENDING) { + if (out.getTransportVersion().before(TransportVersions.V_8_15_0) && mvOrdering == MvOrdering.SORTED_ASCENDING) { out.writeEnum(MvOrdering.UNORDERED); } else { out.writeEnum(mvOrdering); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AbstractPageMappingToIteratorOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AbstractPageMappingToIteratorOperator.java index 4fb4053b0c0f4..32492af157fe6 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AbstractPageMappingToIteratorOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AbstractPageMappingToIteratorOperator.java @@ -231,7 +231,7 @@ public String toString() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ESQL_PAGE_MAPPING_TO_ITERATOR; + return TransportVersions.V_8_15_0; } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/ResolvedEnrichPolicy.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/ResolvedEnrichPolicy.java index 1ee9039ccccb3..e891089aa55b5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/ResolvedEnrichPolicy.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/ResolvedEnrichPolicy.java @@ -36,7 +36,7 @@ public ResolvedEnrichPolicy(StreamInput in) throws IOException { private static Reader getEsFieldReader(StreamInput in) { if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_ES_FIELD_CACHED_SERIALIZATION) - || in.getTransportVersion().isPatchFrom(TransportVersions.ESQL_ATTRIBUTE_CACHED_SERIALIZATION_8_15)) { + || in.getTransportVersion().isPatchFrom(TransportVersions.V_8_15_2)) { return EsField::readFrom; } return EsField::new; @@ -57,7 +57,7 @@ public void writeTo(StreamOutput out) throws IOException { (o, v) -> { var field = new EsField(v.getName(), v.getDataType(), v.getProperties(), v.isAggregatable(), v.isAlias()); if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_ES_FIELD_CACHED_SERIALIZATION) - || out.getTransportVersion().isPatchFrom(TransportVersions.ESQL_ATTRIBUTE_CACHED_SERIALIZATION_8_15)) { + || out.getTransportVersion().isPatchFrom(TransportVersions.V_8_15_2)) { field.writeTo(o); } else { field.writeContent(o); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/UnsupportedAttribute.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/UnsupportedAttribute.java index 08c249662c7d2..2c709de7717ce 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/UnsupportedAttribute.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/UnsupportedAttribute.java @@ -78,9 +78,7 @@ private UnsupportedAttribute(StreamInput in) throws IOException { Source.readFrom((PlanStreamInput) in), ((PlanStreamInput) in).readCachedString(), in.getTransportVersion().onOrAfter(TransportVersions.ESQL_ES_FIELD_CACHED_SERIALIZATION) - || in.getTransportVersion().isPatchFrom(TransportVersions.ESQL_ATTRIBUTE_CACHED_SERIALIZATION_8_15) - ? EsField.readFrom(in) - : new UnsupportedEsField(in), + || in.getTransportVersion().isPatchFrom(TransportVersions.V_8_15_2) ? EsField.readFrom(in) : new UnsupportedEsField(in), in.readOptionalString(), NameId.readFrom((PlanStreamInput) in) ); @@ -92,7 +90,7 @@ public void writeTo(StreamOutput out) throws IOException { Source.EMPTY.writeTo(out); ((PlanStreamOutput) out).writeCachedString(name()); if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_ES_FIELD_CACHED_SERIALIZATION) - || out.getTransportVersion().isPatchFrom(TransportVersions.ESQL_ATTRIBUTE_CACHED_SERIALIZATION_8_15)) { + || out.getTransportVersion().isPatchFrom(TransportVersions.V_8_15_2)) { field().writeTo(out); } else { field().writeContent(out); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java index c832f64363048..9003cbec12d1e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java @@ -180,7 +180,7 @@ public NameId mapNameId(long l) { @SuppressWarnings("unchecked") public A readAttributeWithCache(CheckedFunction constructor) throws IOException { if (getTransportVersion().onOrAfter(TransportVersions.ESQL_ATTRIBUTE_CACHED_SERIALIZATION) - || getTransportVersion().isPatchFrom(TransportVersions.ESQL_ATTRIBUTE_CACHED_SERIALIZATION_8_15)) { + || getTransportVersion().isPatchFrom(TransportVersions.V_8_15_2)) { // it's safe to cast to int, since the max value for this is {@link PlanStreamOutput#MAX_SERIALIZED_ATTRIBUTES} int cacheId = Math.toIntExact(readZLong()); if (cacheId < 0) { @@ -220,7 +220,7 @@ private void cacheAttribute(int id, Attribute attr) { @SuppressWarnings("unchecked") public A readEsFieldWithCache() throws IOException { if (getTransportVersion().onOrAfter(TransportVersions.ESQL_ES_FIELD_CACHED_SERIALIZATION) - || getTransportVersion().isPatchFrom(TransportVersions.ESQL_ATTRIBUTE_CACHED_SERIALIZATION_8_15)) { + || getTransportVersion().isPatchFrom(TransportVersions.V_8_15_2)) { // it's safe to cast to int, since the max value for this is {@link PlanStreamOutput#MAX_SERIALIZED_ATTRIBUTES} int cacheId = Math.toIntExact(readZLong()); if (cacheId < 0) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java index 5e31a57ed669b..b633b10122eb3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java @@ -153,7 +153,7 @@ public void writeCachedBlock(Block block) throws IOException { @Override public boolean writeAttributeCacheHeader(Attribute attribute) throws IOException { if (getTransportVersion().onOrAfter(TransportVersions.ESQL_ATTRIBUTE_CACHED_SERIALIZATION) - || getTransportVersion().isPatchFrom(TransportVersions.ESQL_ATTRIBUTE_CACHED_SERIALIZATION_8_15)) { + || getTransportVersion().isPatchFrom(TransportVersions.V_8_15_2)) { Integer cacheId = attributeIdFromCache(attribute); if (cacheId != null) { writeZLong(cacheId); @@ -185,7 +185,7 @@ private int cacheAttribute(Attribute attr) { @Override public boolean writeEsFieldCacheHeader(EsField field) throws IOException { if (getTransportVersion().onOrAfter(TransportVersions.ESQL_ES_FIELD_CACHED_SERIALIZATION) - || getTransportVersion().isPatchFrom(TransportVersions.ESQL_ATTRIBUTE_CACHED_SERIALIZATION_8_15)) { + || getTransportVersion().isPatchFrom(TransportVersions.V_8_15_2)) { Integer cacheId = esFieldIdFromCache(field); if (cacheId != null) { writeZLong(cacheId); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Aggregate.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Aggregate.java index 3b7240dcd693b..b2f314a0e8294 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Aggregate.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Aggregate.java @@ -40,7 +40,7 @@ public enum AggregateType { METRICS; static void writeType(StreamOutput out, AggregateType type) throws IOException { - if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_ADD_AGGREGATE_TYPE)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { out.writeString(type.name()); } else if (type != STANDARD) { throw new IllegalStateException("cluster is not ready to support aggregate type [" + type + "]"); @@ -48,7 +48,7 @@ static void writeType(StreamOutput out, AggregateType type) throws IOException { } static AggregateType readType(StreamInput in) throws IOException { - if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_ADD_AGGREGATE_TYPE)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { return AggregateType.valueOf(in.readString()); } else { return STANDARD; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsRelation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsRelation.java index 0043362f23b87..951fc7ad1cf29 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsRelation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsRelation.java @@ -87,7 +87,7 @@ public void writeTo(StreamOutput out) throws IOException { } private static boolean supportingEsSourceOptions(TransportVersion version) { - return version.onOrAfter(TransportVersions.V_8_14_0) && version.before(TransportVersions.ESQL_REMOVE_ES_SOURCE_OPTIONS); + return version.between(TransportVersions.V_8_14_0, TransportVersions.V_8_15_0); } @Override @@ -180,7 +180,7 @@ public String nodeString() { } public static IndexMode readIndexMode(StreamInput in) throws IOException { - if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_ADD_INDEX_MODE_TO_SOURCE)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { return IndexMode.fromString(in.readString()); } else { return IndexMode.STANDARD; @@ -188,7 +188,7 @@ public static IndexMode readIndexMode(StreamInput in) throws IOException { } public static void writeIndexMode(StreamOutput out, IndexMode indexMode) throws IOException { - if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_ADD_INDEX_MODE_TO_SOURCE)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { out.writeString(indexMode.getName()); } else if (indexMode != IndexMode.STANDARD) { throw new IllegalStateException("not ready to support index mode [" + indexMode + "]"); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java index 4b6a38a3e8762..266f07d22eaf5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java @@ -134,7 +134,6 @@ public class EsqlFeatures implements FeatureSpecification { /** * Blocks can be labelled with {@link org.elasticsearch.compute.data.Block.MvOrdering#SORTED_ASCENDING} for optimizations. - * C.f. {@link org.elasticsearch.TransportVersions#ESQL_MV_ORDERING_SORTED_ASCENDING}. */ public static final NodeFeature MV_ORDERING_SORTED_ASCENDING = new NodeFeature("esql.mv_ordering_sorted_ascending"); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/Configuration.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/Configuration.java index b904f07bcb9c3..4ec2746b24ee4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/Configuration.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/Configuration.java @@ -96,7 +96,7 @@ public Configuration(BlockStreamInput in) throws IOException { } else { this.profile = false; } - if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_REQUEST_TABLES)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { this.tables = in.readImmutableMap(i1 -> i1.readImmutableMap(i2 -> new Column((BlockStreamInput) i2))); } else { this.tables = Map.of(); @@ -124,7 +124,7 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_12_0)) { out.writeBoolean(profile); } - if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_REQUEST_TABLES)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { out.writeMap(tables, (o1, columns) -> o1.writeMap(columns, StreamOutput::writeWriteable)); } if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_CCS_EXECUTION_INFO)) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilder.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilder.java index a33a49cc52d94..478f2e6a21868 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilder.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilder.java @@ -132,7 +132,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.SEMANTIC_QUERY; + return TransportVersions.V_8_15_0; } public static SemanticQueryBuilder fromXContent(XContentParser parser) throws IOException { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankBuilder.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankBuilder.java index fd0ad220faa3b..e4093a91c2359 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankBuilder.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankBuilder.java @@ -98,7 +98,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.TEXT_SIMILARITY_RERANKER_RETRIEVER; + return TransportVersions.V_8_15_0; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockSecretSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockSecretSettings.java index 30a7dc9ad5a2e..0ca71d47eb1b6 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockSecretSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockSecretSettings.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.inference.services.amazonbedrock; import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -22,7 +23,6 @@ import java.util.Map; import java.util.Objects; -import static org.elasticsearch.TransportVersions.ML_INFERENCE_AMAZON_BEDROCK_ADDED; import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractRequiredSecureString; import static org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockConstants.ACCESS_KEY_FIELD; import static org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockConstants.SECRET_KEY_FIELD; @@ -76,7 +76,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return ML_INFERENCE_AMAZON_BEDROCK_ADDED; + return TransportVersions.V_8_15_0; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockService.java index e1ed23a318e6c..96dd6d2b3690f 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockService.java @@ -9,6 +9,7 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.Strings; import org.elasticsearch.core.IOUtils; @@ -46,7 +47,6 @@ import java.util.Map; import java.util.Set; -import static org.elasticsearch.TransportVersions.ML_INFERENCE_AMAZON_BEDROCK_ADDED; import static org.elasticsearch.xpack.inference.services.ServiceUtils.createInvalidModelException; import static org.elasticsearch.xpack.inference.services.ServiceUtils.parsePersistedConfigErrorMsg; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMapOrDefaultEmpty; @@ -265,7 +265,7 @@ private static AmazonBedrockModel createModel( @Override public TransportVersion getMinimalSupportedVersion() { - return ML_INFERENCE_AMAZON_BEDROCK_ADDED; + return TransportVersions.V_8_15_0; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockServiceSettings.java index b572df4f1ee05..f11f5818f635c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockServiceSettings.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.inference.services.amazonbedrock; import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -24,7 +25,6 @@ import java.util.Map; import java.util.Objects; -import static org.elasticsearch.TransportVersions.ML_INFERENCE_AMAZON_BEDROCK_ADDED; import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractRequiredEnum; import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractRequiredString; import static org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockConstants.MODEL_FIELD; @@ -101,7 +101,7 @@ protected AmazonBedrockServiceSettings( @Override public TransportVersion getMinimalSupportedVersion() { - return ML_INFERENCE_AMAZON_BEDROCK_ADDED; + return TransportVersions.V_8_15_0; } public String region() { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/completion/AmazonBedrockChatCompletionTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/completion/AmazonBedrockChatCompletionTaskSettings.java index c3db1465863e4..65c227bbbd412 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/completion/AmazonBedrockChatCompletionTaskSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/completion/AmazonBedrockChatCompletionTaskSettings.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.inference.services.amazonbedrock.completion; import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -21,7 +22,6 @@ import java.util.Map; import java.util.Objects; -import static org.elasticsearch.TransportVersions.ML_INFERENCE_AMAZON_BEDROCK_ADDED; import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalDoubleInRange; import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalPositiveInteger; import static org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockConstants.MAX_NEW_TOKENS_FIELD; @@ -141,7 +141,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return ML_INFERENCE_AMAZON_BEDROCK_ADDED; + return TransportVersions.V_8_15_0; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/anthropic/AnthropicService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/anthropic/AnthropicService.java index 8d6f026e12ac5..c925053c38116 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/anthropic/AnthropicService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/anthropic/AnthropicService.java @@ -198,7 +198,7 @@ protected void doChunkedInfer( @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ML_ANTHROPIC_INTEGRATION_ADDED; + return TransportVersions.V_8_15_0; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/anthropic/completion/AnthropicChatCompletionTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/anthropic/completion/AnthropicChatCompletionTaskSettings.java index e8a6ca638c916..305502d624826 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/anthropic/completion/AnthropicChatCompletionTaskSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/anthropic/completion/AnthropicChatCompletionTaskSettings.java @@ -167,7 +167,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ML_ANTHROPIC_INTEGRATION_ADDED; + return TransportVersions.V_8_15_0; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioService.java index ba36febc3c162..5525fff6b1a7c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioService.java @@ -214,7 +214,7 @@ public String name() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ML_INFERENCE_AZURE_AI_STUDIO; + return TransportVersions.V_8_15_0; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/completion/AzureAiStudioChatCompletionServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/completion/AzureAiStudioChatCompletionServiceSettings.java index 2f8422be5ed90..ecc8f7fe8e920 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/completion/AzureAiStudioChatCompletionServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/completion/AzureAiStudioChatCompletionServiceSettings.java @@ -80,7 +80,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ML_INFERENCE_AZURE_AI_STUDIO; + return TransportVersions.V_8_15_0; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/embeddings/AzureAiStudioEmbeddingsServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/embeddings/AzureAiStudioEmbeddingsServiceSettings.java index 8977ba8b12836..6ae1f773853d1 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/embeddings/AzureAiStudioEmbeddingsServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/embeddings/AzureAiStudioEmbeddingsServiceSettings.java @@ -166,7 +166,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ML_INFERENCE_AZURE_AI_STUDIO; + return TransportVersions.V_8_15_0; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java index e1c230b98a2f7..cd657113d7b61 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java @@ -324,7 +324,7 @@ private AzureOpenAiEmbeddingsModel updateModelWithEmbeddingDetails(AzureOpenAiEm @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ML_INFERENCE_RATE_LIMIT_SETTINGS_ADDED; + return TransportVersions.V_8_15_0; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionServiceSettings.java index 16f58574a8068..eb7eed2cedb41 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionServiceSettings.java @@ -168,7 +168,7 @@ protected XContentBuilder toXContentFragmentOfExposedFields(XContentBuilder buil @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ML_INFERENCE_AZURE_OPENAI_COMPLETIONS; + return TransportVersions.V_8_15_0; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionTaskSettings.java index 3008a543b8fea..7f90b866a2eab 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionTaskSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionTaskSettings.java @@ -88,7 +88,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ML_INFERENCE_AZURE_OPENAI_COMPLETIONS; + return TransportVersions.V_8_15_0; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsServiceSettings.java index 941a4bdeeb41a..9e8ca478987ec 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsServiceSettings.java @@ -185,7 +185,7 @@ public AzureOpenAiEmbeddingsServiceSettings(StreamInput in) throws IOException { maxInputTokens = in.readOptionalVInt(); similarity = in.readOptionalEnum(SimilarityMeasure.class); - if (in.getTransportVersion().onOrAfter(TransportVersions.ML_INFERENCE_RATE_LIMIT_SETTINGS_ADDED)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { rateLimitSettings = new RateLimitSettings(in); } else { rateLimitSettings = DEFAULT_RATE_LIMIT_SETTINGS; @@ -303,7 +303,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalVInt(maxInputTokens); out.writeOptionalEnum(SimilarityMeasure.translateSimilarity(similarity, out.getTransportVersion())); - if (out.getTransportVersion().onOrAfter(TransportVersions.ML_INFERENCE_RATE_LIMIT_SETTINGS_ADDED)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { rateLimitSettings.writeTo(out); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java index 9538588700e75..ce0fa0a885a20 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java @@ -325,7 +325,7 @@ static SimilarityMeasure defaultSimilarity() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ML_INFERENCE_RATE_LIMIT_SETTINGS_ADDED; + return TransportVersions.V_8_15_0; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettings.java index 7f28459e9b8be..a1943b339a561 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettings.java @@ -124,7 +124,7 @@ public CohereServiceSettings(StreamInput in) throws IOException { maxInputTokens = in.readOptionalVInt(); modelId = in.readOptionalString(); - if (in.getTransportVersion().onOrAfter(TransportVersions.ML_INFERENCE_RATE_LIMIT_SETTINGS_ADDED)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { rateLimitSettings = new RateLimitSettings(in); } else { rateLimitSettings = DEFAULT_RATE_LIMIT_SETTINGS; @@ -219,7 +219,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalVInt(maxInputTokens); out.writeOptionalString(modelId); - if (out.getTransportVersion().onOrAfter(TransportVersions.ML_INFERENCE_RATE_LIMIT_SETTINGS_ADDED)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { rateLimitSettings.writeTo(out); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/completion/CohereCompletionServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/completion/CohereCompletionServiceSettings.java index ba9e81b461f9f..be241f3aaa7fc 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/completion/CohereCompletionServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/completion/CohereCompletionServiceSettings.java @@ -114,7 +114,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ML_INFERENCE_COHERE_COMPLETION_ADDED; + return TransportVersions.V_8_15_0; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankServiceSettings.java index 1132dff34ed6e..a3d2483a068e2 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankServiceSettings.java @@ -101,7 +101,7 @@ public CohereRerankServiceSettings(StreamInput in) throws IOException { this.modelId = in.readOptionalString(); - if (in.getTransportVersion().onOrAfter(TransportVersions.ML_INFERENCE_RATE_LIMIT_SETTINGS_ADDED)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { this.rateLimitSettings = new RateLimitSettings(in); } else { this.rateLimitSettings = DEFAULT_RATE_LIMIT_SETTINGS; @@ -172,7 +172,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalString(modelId); - if (out.getTransportVersion().onOrAfter(TransportVersions.ML_INFERENCE_RATE_LIMIT_SETTINGS_ADDED)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { rateLimitSettings.writeTo(out); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalTextEmbeddingServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalTextEmbeddingServiceSettings.java index 381c97969e79f..133be5e2b7623 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalTextEmbeddingServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalTextEmbeddingServiceSettings.java @@ -139,7 +139,7 @@ public CustomElandInternalTextEmbeddingServiceSettings( public CustomElandInternalTextEmbeddingServiceSettings(StreamInput in) throws IOException { super(in); - if (in.getTransportVersion().onOrAfter(TransportVersions.ML_INFERENCE_ELAND_SETTINGS_ADDED)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { dimensions = in.readOptionalVInt(); similarityMeasure = in.readEnum(SimilarityMeasure.class); elementType = in.readEnum(DenseVectorFieldMapper.ElementType.class); @@ -197,7 +197,7 @@ public String getWriteableName() { public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - if (out.getTransportVersion().onOrAfter(TransportVersions.ML_INFERENCE_ELAND_SETTINGS_ADDED)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { out.writeOptionalVInt(dimensions); out.writeEnum(similarityMeasure); out.writeEnum(elementType); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioService.java index c685441271194..f583caeac8ee3 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioService.java @@ -213,7 +213,7 @@ public Model parsePersistedConfig(String inferenceEntityId, TaskType taskType, M @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ML_INFERENCE_GOOGLE_AI_STUDIO_COMPLETION_ADDED; + return TransportVersions.V_8_15_0; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/completion/GoogleAiStudioCompletionServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/completion/GoogleAiStudioCompletionServiceSettings.java index 7c0b812ee213b..676fec13d1ddc 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/completion/GoogleAiStudioCompletionServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/completion/GoogleAiStudioCompletionServiceSettings.java @@ -102,7 +102,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ML_INFERENCE_GOOGLE_AI_STUDIO_COMPLETION_ADDED; + return TransportVersions.V_8_15_0; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/embeddings/GoogleAiStudioEmbeddingsServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/embeddings/GoogleAiStudioEmbeddingsServiceSettings.java index 7608f48d0638d..2ddf8efcbc112 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/embeddings/GoogleAiStudioEmbeddingsServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/embeddings/GoogleAiStudioEmbeddingsServiceSettings.java @@ -154,7 +154,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ML_INFERENCE_GOOGLE_AI_STUDIO_EMBEDDINGS_ADDED; + return TransportVersions.V_8_15_0; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiSecretSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiSecretSettings.java index 20dbadb9b3eae..44e16fa058506 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiSecretSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiSecretSettings.java @@ -82,7 +82,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ML_INFERENCE_GOOGLE_VERTEX_AI_EMBEDDINGS_ADDED; + return TransportVersions.V_8_15_0; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiService.java index ece23d4226fcd..36fb183f6de70 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiService.java @@ -151,7 +151,7 @@ public Model parsePersistedConfig(String inferenceEntityId, TaskType taskType, M @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ML_INFERENCE_GOOGLE_VERTEX_AI_EMBEDDINGS_ADDED; + return TransportVersions.V_8_15_0; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/embeddings/GoogleVertexAiEmbeddingsServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/embeddings/GoogleVertexAiEmbeddingsServiceSettings.java index 097ce6240439b..32c3008a802c2 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/embeddings/GoogleVertexAiEmbeddingsServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/embeddings/GoogleVertexAiEmbeddingsServiceSettings.java @@ -215,7 +215,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ML_INFERENCE_GOOGLE_VERTEX_AI_EMBEDDINGS_ADDED; + return TransportVersions.V_8_15_0; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/embeddings/GoogleVertexAiEmbeddingsTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/embeddings/GoogleVertexAiEmbeddingsTaskSettings.java index b7242100178a3..dcdbbda33575f 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/embeddings/GoogleVertexAiEmbeddingsTaskSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/embeddings/GoogleVertexAiEmbeddingsTaskSettings.java @@ -78,7 +78,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ML_INFERENCE_GOOGLE_VERTEX_AI_EMBEDDINGS_ADDED; + return TransportVersions.V_8_15_0; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/rerank/GoogleVertexAiRerankServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/rerank/GoogleVertexAiRerankServiceSettings.java index 431f704c10091..d123d14b67f0e 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/rerank/GoogleVertexAiRerankServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/rerank/GoogleVertexAiRerankServiceSettings.java @@ -99,7 +99,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ML_INFERENCE_GOOGLE_VERTEX_AI_RERANKING_ADDED; + return TransportVersions.V_8_15_0; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/rerank/GoogleVertexAiRerankTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/rerank/GoogleVertexAiRerankTaskSettings.java index 64bec7e6cfeef..0bc3df581d7b2 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/rerank/GoogleVertexAiRerankTaskSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/rerank/GoogleVertexAiRerankTaskSettings.java @@ -75,7 +75,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ML_INFERENCE_GOOGLE_VERTEX_AI_RERANKING_ADDED; + return TransportVersions.V_8_15_0; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceService.java index 14af29bab9078..752d1dd605cd7 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceService.java @@ -144,6 +144,6 @@ public String name() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ML_INFERENCE_RATE_LIMIT_SETTINGS_ADDED; + return TransportVersions.V_8_15_0; } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettings.java index c136e73a0c452..7429153835ee3 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettings.java @@ -120,7 +120,7 @@ public HuggingFaceServiceSettings(StreamInput in) throws IOException { maxInputTokens = null; } - if (in.getTransportVersion().onOrAfter(TransportVersions.ML_INFERENCE_RATE_LIMIT_SETTINGS_ADDED)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { rateLimitSettings = new RateLimitSettings(in); } else { rateLimitSettings = DEFAULT_RATE_LIMIT_SETTINGS; @@ -171,7 +171,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalVInt(maxInputTokens); } - if (out.getTransportVersion().onOrAfter(TransportVersions.ML_INFERENCE_RATE_LIMIT_SETTINGS_ADDED)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { rateLimitSettings.writeTo(out); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettings.java index b48e71867ea6d..be5f595d2c0fb 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettings.java @@ -76,7 +76,7 @@ public HuggingFaceElserServiceSettings(String url) { public HuggingFaceElserServiceSettings(StreamInput in) throws IOException { uri = createUri(in.readString()); - if (in.getTransportVersion().onOrAfter(TransportVersions.ML_INFERENCE_RATE_LIMIT_SETTINGS_ADDED)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { rateLimitSettings = new RateLimitSettings(in); } else { rateLimitSettings = DEFAULT_RATE_LIMIT_SETTINGS; @@ -134,7 +134,7 @@ public TransportVersion getMinimalSupportedVersion() { public void writeTo(StreamOutput out) throws IOException { out.writeString(uri.toString()); - if (out.getTransportVersion().onOrAfter(TransportVersions.ML_INFERENCE_RATE_LIMIT_SETTINGS_ADDED)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { rateLimitSettings.writeTo(out); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/mistral/MistralService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/mistral/MistralService.java index 8af817d06fefa..8ae9b91b599d9 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/mistral/MistralService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/mistral/MistralService.java @@ -9,6 +9,7 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; @@ -41,7 +42,6 @@ import java.util.List; import java.util.Map; -import static org.elasticsearch.TransportVersions.ADD_MISTRAL_EMBEDDINGS_INFERENCE; import static org.elasticsearch.xpack.inference.services.ServiceUtils.createInvalidModelException; import static org.elasticsearch.xpack.inference.services.ServiceUtils.parsePersistedConfigErrorMsg; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMapOrDefaultEmpty; @@ -206,7 +206,7 @@ public Model parsePersistedConfig(String modelId, TaskType taskType, Map PARSER = new ConstructingObjectParser<>(NAME, a -> { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java index 90566e25b4ea5..c4cf3127b897c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java @@ -139,7 +139,6 @@ import java.util.stream.Collectors; import static org.elasticsearch.TransportVersions.ADD_MANAGE_ROLES_PRIVILEGE; -import static org.elasticsearch.TransportVersions.ROLE_REMOTE_CLUSTER_PRIVS; import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.search.SearchService.DEFAULT_KEEPALIVE_SETTING; import static org.elasticsearch.transport.RemoteClusterPortSettings.TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY; @@ -148,6 +147,7 @@ import static org.elasticsearch.xpack.core.ClientHelper.SECURITY_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; import static org.elasticsearch.xpack.core.security.authz.RoleDescriptor.WORKFLOWS_RESTRICTION_VERSION; +import static org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissions.ROLE_REMOTE_CLUSTER_PRIVS; import static org.elasticsearch.xpack.security.Security.SECURITY_CRYPTO_THREAD_POOL_NAME; import static org.elasticsearch.xpack.security.support.SecurityIndexManager.Availability.PRIMARY_SHARDS; import static org.elasticsearch.xpack.security.support.SecurityIndexManager.Availability.SEARCH_SHARDS; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java index 9ddda193dba39..7c242fb07b681 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java @@ -81,7 +81,6 @@ import java.util.function.BiConsumer; import java.util.function.Supplier; -import static org.elasticsearch.TransportVersions.ROLE_REMOTE_CLUSTER_PRIVS; import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.index.query.QueryBuilders.existsQuery; import static org.elasticsearch.search.SearchService.DEFAULT_KEEPALIVE_SETTING; @@ -91,6 +90,8 @@ import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; import static org.elasticsearch.xpack.core.security.SecurityField.DOCUMENT_LEVEL_SECURITY_FEATURE; import static org.elasticsearch.xpack.core.security.authz.RoleDescriptor.ROLE_TYPE; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptor.SECURITY_ROLE_DESCRIPTION; +import static org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissions.ROLE_REMOTE_CLUSTER_PRIVS; import static org.elasticsearch.xpack.security.support.SecurityIndexManager.Availability.PRIMARY_SHARDS; import static org.elasticsearch.xpack.security.support.SecurityIndexManager.Availability.SEARCH_SHARDS; import static org.elasticsearch.xpack.security.support.SecurityMigrations.ROLE_METADATA_FLATTENED_MIGRATION_VERSION; @@ -468,24 +469,25 @@ private Exception validateRoleDescriptor(RoleDescriptor role) { } else if (role.hasRemoteClusterPermissions() && clusterService.state().getMinTransportVersion().before(ROLE_REMOTE_CLUSTER_PRIVS)) { return new IllegalStateException( - "all nodes must have version [" + ROLE_REMOTE_CLUSTER_PRIVS + "] or higher to support remote cluster privileges" + "all nodes must have version [" + + ROLE_REMOTE_CLUSTER_PRIVS.toReleaseVersion() + + "] or higher to support remote cluster privileges" ); - } else if (role.hasDescription() - && clusterService.state().getMinTransportVersion().before(TransportVersions.SECURITY_ROLE_DESCRIPTION)) { + } else if (role.hasDescription() && clusterService.state().getMinTransportVersion().before(SECURITY_ROLE_DESCRIPTION)) { + return new IllegalStateException( + "all nodes must have version [" + + SECURITY_ROLE_DESCRIPTION.toReleaseVersion() + + "] or higher to support specifying role description" + ); + } else if (Arrays.stream(role.getConditionalClusterPrivileges()) + .anyMatch(privilege -> privilege instanceof ConfigurableClusterPrivileges.ManageRolesPrivilege) + && clusterService.state().getMinTransportVersion().before(TransportVersions.ADD_MANAGE_ROLES_PRIVILEGE)) { return new IllegalStateException( "all nodes must have version [" - + TransportVersions.SECURITY_ROLE_DESCRIPTION.toReleaseVersion() - + "] or higher to support specifying role description" + + TransportVersions.ADD_MANAGE_ROLES_PRIVILEGE.toReleaseVersion() + + "] or higher to support the manage roles privilege" ); - } else if (Arrays.stream(role.getConditionalClusterPrivileges()) - .anyMatch(privilege -> privilege instanceof ConfigurableClusterPrivileges.ManageRolesPrivilege) - && clusterService.state().getMinTransportVersion().before(TransportVersions.ADD_MANAGE_ROLES_PRIVILEGE)) { - return new IllegalStateException( - "all nodes must have version [" - + TransportVersions.ADD_MANAGE_ROLES_PRIVILEGE.toReleaseVersion() - + "] or higher to support the manage roles privilege" - ); - } + } try { DLSRoleQueryValidator.validateQueryField(role.getIndicesPrivileges(), xContentRegistry); } catch (ElasticsearchException | IllegalArgumentException e) { @@ -645,8 +647,7 @@ XContentBuilder createRoleXContentBuilder(RoleDescriptor role) throws IOExceptio && clusterService.state().getMinTransportVersion().onOrAfter(ROLE_REMOTE_CLUSTER_PRIVS)) { builder.array(RoleDescriptor.Fields.REMOTE_CLUSTER.getPreferredName(), RemoteClusterPermissions.NONE); } - if (role.hasDescription() == false - && clusterService.state().getMinTransportVersion().onOrAfter(TransportVersions.SECURITY_ROLE_DESCRIPTION)) { + if (role.hasDescription() == false && clusterService.state().getMinTransportVersion().onOrAfter(SECURITY_ROLE_DESCRIPTION)) { builder.field(RoleDescriptor.Fields.DESCRIPTION.getPreferredName(), ""); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java index fa6eb307933ec..996291c52c71f 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java @@ -158,7 +158,6 @@ import java.util.stream.IntStream; import java.util.stream.LongStream; -import static org.elasticsearch.TransportVersions.ROLE_REMOTE_CLUSTER_PRIVS; import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_PRIMARY_TERM; import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; import static org.elasticsearch.test.ActionListenerUtils.anyActionListener; @@ -170,6 +169,7 @@ import static org.elasticsearch.xpack.core.security.authc.AuthenticationField.API_KEY_METADATA_KEY; import static org.elasticsearch.xpack.core.security.authc.AuthenticationField.API_KEY_TYPE_KEY; import static org.elasticsearch.xpack.core.security.authz.RoleDescriptor.WORKFLOWS_RESTRICTION_VERSION; +import static org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissions.ROLE_REMOTE_CLUSTER_PRIVS; import static org.elasticsearch.xpack.core.security.authz.store.ReservedRolesStore.SUPERUSER_ROLE_DESCRIPTOR; import static org.elasticsearch.xpack.core.security.test.TestRestrictedIndices.INTERNAL_SECURITY_MAIN_INDEX_7; import static org.elasticsearch.xpack.security.Security.SECURITY_CRYPTO_THREAD_POOL_NAME; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java index bfa358d0b7d6e..2b8a77d63588a 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java @@ -98,6 +98,7 @@ import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomClusterPrivileges; import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomRemoteIndicesPrivileges; import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomRoleDescriptorMetadata; +import static org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissions.ROLE_REMOTE_CLUSTER_PRIVS; import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_MAIN_ALIAS; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.contains; @@ -488,7 +489,7 @@ enum TEST_MODE { switch (testMode) { case REMOTE_CLUSTER_PRIVS -> { transportVersionBeforeAdvancedRemoteClusterSecurity = TransportVersionUtils.getPreviousVersion( - TransportVersions.ROLE_REMOTE_CLUSTER_PRIVS + ROLE_REMOTE_CLUSTER_PRIVS ); remoteIndicesPrivileges = null; } @@ -549,7 +550,7 @@ enum TEST_MODE { containsString( "all nodes must have version [" + (TEST_MODE.REMOTE_CLUSTER_PRIVS.equals(testMode) - ? TransportVersions.ROLE_REMOTE_CLUSTER_PRIVS + ? ROLE_REMOTE_CLUSTER_PRIVS.toReleaseVersion() : TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY.toReleaseVersion()) + "] or higher to support remote " + (remoteIndicesPrivileges != null ? "indices" : "cluster") diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RolesBackwardsCompatibilityIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RolesBackwardsCompatibilityIT.java index 650779cfbc85d..ea1b2cdac5a1f 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RolesBackwardsCompatibilityIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RolesBackwardsCompatibilityIT.java @@ -27,6 +27,7 @@ import java.util.Map; import java.util.Set; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptor.SECURITY_ROLE_DESCRIPTION; import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomApplicationPrivileges; import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomIndicesPrivileges; import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomManageRolesPrivileges; @@ -43,8 +44,8 @@ public class RolesBackwardsCompatibilityIT extends AbstractUpgradeTestCase { public void testRolesWithDescription() throws Exception { assumeTrue( - "The role description is supported after transport version: " + TransportVersions.SECURITY_ROLE_DESCRIPTION, - minimumTransportVersion().before(TransportVersions.SECURITY_ROLE_DESCRIPTION) + "The role description is supported after transport version: " + SECURITY_ROLE_DESCRIPTION, + minimumTransportVersion().before(SECURITY_ROLE_DESCRIPTION) ); switch (CLUSTER_TYPE) { case OLD -> { @@ -75,7 +76,7 @@ public void testRolesWithDescription() throws Exception { } case MIXED -> { try { - this.createClientsByVersion(TransportVersions.SECURITY_ROLE_DESCRIPTION); + this.createClientsByVersion(SECURITY_ROLE_DESCRIPTION); // succeed when role description is not provided final String initialRole = randomRoleDescriptorSerialized(); createRole(client(), "my-valid-mixed-role", initialRole); @@ -116,7 +117,7 @@ public void testRolesWithDescription() throws Exception { e.getMessage(), containsString( "all nodes must have version [" - + TransportVersions.SECURITY_ROLE_DESCRIPTION.toReleaseVersion() + + SECURITY_ROLE_DESCRIPTION.toReleaseVersion() + "] or higher to support specifying role description" ) ); @@ -130,7 +131,7 @@ public void testRolesWithDescription() throws Exception { e.getMessage(), containsString( "all nodes must have version [" - + TransportVersions.SECURITY_ROLE_DESCRIPTION.toReleaseVersion() + + SECURITY_ROLE_DESCRIPTION.toReleaseVersion() + "] or higher to support specifying role description" ) ); From 7a8f9b1579ca20fe3262f42b03f69f7ce1f1e8bb Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Thu, 17 Oct 2024 10:36:31 +0200 Subject: [PATCH 172/449] Add diagnostic output to dra workflow scripts (#114973) --- .buildkite/scripts/dra-workflow.sh | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.buildkite/scripts/dra-workflow.sh b/.buildkite/scripts/dra-workflow.sh index ecfb8088072a0..81b8225e443a4 100755 --- a/.buildkite/scripts/dra-workflow.sh +++ b/.buildkite/scripts/dra-workflow.sh @@ -22,6 +22,7 @@ if [[ "$BRANCH" == "main" ]]; then fi ES_VERSION=$(grep elasticsearch build-tools-internal/version.properties | sed "s/elasticsearch *= *//g") +echo "ES_VERSION=$ES_VERSION" VERSION_SUFFIX="" if [[ "$WORKFLOW" == "snapshot" ]]; then @@ -29,7 +30,10 @@ if [[ "$WORKFLOW" == "snapshot" ]]; then fi BEATS_BUILD_ID="$(./.ci/scripts/resolve-dra-manifest.sh beats "$RM_BRANCH" "$ES_VERSION" "$WORKFLOW")" +echo "BEATS_BUILD_ID=$BEATS_BUILD_ID" + ML_CPP_BUILD_ID="$(./.ci/scripts/resolve-dra-manifest.sh ml-cpp "$RM_BRANCH" "$ES_VERSION" "$WORKFLOW")" +echo "ML_CPP_BUILD_ID=$ML_CPP_BUILD_ID" LICENSE_KEY_ARG="" BUILD_SNAPSHOT_ARG="" From 9b466087eb87b16a39362761c7f524fa1ba5523a Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Thu, 17 Oct 2024 10:40:13 +0200 Subject: [PATCH 173/449] Reduce the number of SFM singletons. (#114969) This remove all recovery source specific SFM singletons. Whether recovery source is enabled can be checked via `DocumentParserContext`. This reduces the number of SFM instances by half. --- .../elasticsearch/index/IndexSettings.java | 11 ++ .../index/mapper/SourceFieldMapper.java | 155 +++--------------- .../mapper/DynamicFieldsBuilderTests.java | 2 +- .../query/SearchExecutionContextTests.java | 2 +- 4 files changed, 37 insertions(+), 133 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/IndexSettings.java b/server/src/main/java/org/elasticsearch/index/IndexSettings.java index 347b44a22e7c0..25e9c1e3701fb 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexSettings.java +++ b/server/src/main/java/org/elasticsearch/index/IndexSettings.java @@ -30,6 +30,7 @@ import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.translog.Translog; +import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.ingest.IngestService; import org.elasticsearch.node.Node; @@ -830,6 +831,7 @@ private void setRetentionLeaseMillis(final TimeValue retentionLease) { private volatile boolean skipIgnoredSourceRead; private volatile boolean syntheticSourceSecondDocParsingPassEnabled; private final SourceFieldMapper.Mode indexMappingSourceMode; + private final boolean recoverySourceEnabled; /** * The maximum number of refresh listeners allows on this shard. @@ -992,6 +994,7 @@ public IndexSettings(final IndexMetadata indexMetadata, final Settings nodeSetti skipIgnoredSourceRead = scopedSettings.get(IgnoredSourceFieldMapper.SKIP_IGNORED_SOURCE_READ_SETTING); syntheticSourceSecondDocParsingPassEnabled = scopedSettings.get(SYNTHETIC_SOURCE_SECOND_DOC_PARSING_PASS_SETTING); indexMappingSourceMode = scopedSettings.get(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING); + recoverySourceEnabled = RecoverySettings.INDICES_RECOVERY_SOURCE_ENABLED_SETTING.get(nodeSettings); scopedSettings.addSettingsUpdateConsumer( MergePolicyConfig.INDEX_COMPOUND_FORMAT_SETTING, @@ -1687,6 +1690,14 @@ public SourceFieldMapper.Mode getIndexMappingSourceMode() { return indexMappingSourceMode; } + /** + * @return Whether recovery source should be enabled if needed. + * Note that this is a node setting, and this setting is not sourced from index settings. + */ + public boolean isRecoverySourceEnabled() { + return recoverySourceEnabled; + } + /** * The bounds for {@code @timestamp} on this index or * {@code null} if there are no bounds. diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java index ea1ffdb7c019f..dd09dc6ea0c5c 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java @@ -39,8 +39,6 @@ import java.util.List; import java.util.Locale; -import static org.elasticsearch.indices.recovery.RecoverySettings.INDICES_RECOVERY_SOURCE_ENABLED_SETTING; - public class SourceFieldMapper extends MetadataFieldMapper { public static final NodeFeature SYNTHETIC_SOURCE_FALLBACK = new NodeFeature("mapper.source.synthetic_source_fallback"); public static final NodeFeature SYNTHETIC_SOURCE_STORED_FIELDS_ADVANCE_FIX = new NodeFeature( @@ -84,8 +82,7 @@ public enum Mode { Explicit.IMPLICIT_TRUE, Strings.EMPTY_ARRAY, Strings.EMPTY_ARRAY, - null, - true + null ); private static final SourceFieldMapper DEFAULT_DISABLED = new SourceFieldMapper( @@ -93,17 +90,7 @@ public enum Mode { Explicit.IMPLICIT_TRUE, Strings.EMPTY_ARRAY, Strings.EMPTY_ARRAY, - null, - true - ); - - private static final SourceFieldMapper DEFAULT_DISABLED_NO_RECOVERY_SOURCE = new SourceFieldMapper( - Mode.DISABLED, - Explicit.IMPLICIT_TRUE, - Strings.EMPTY_ARRAY, - Strings.EMPTY_ARRAY, - null, - false + null ); private static final SourceFieldMapper DEFAULT_SYNTHETIC = new SourceFieldMapper( @@ -111,26 +98,7 @@ public enum Mode { Explicit.IMPLICIT_TRUE, Strings.EMPTY_ARRAY, Strings.EMPTY_ARRAY, - null, - true - ); - - private static final SourceFieldMapper DEFAULT_SYNTHETIC_NO_RECOVERY_SOURCE = new SourceFieldMapper( - Mode.SYNTHETIC, - Explicit.IMPLICIT_TRUE, - Strings.EMPTY_ARRAY, - Strings.EMPTY_ARRAY, - null, - false - ); - - private static final SourceFieldMapper DEFAULT_NO_RECOVERY_SOURCE = new SourceFieldMapper( - null, - Explicit.IMPLICIT_TRUE, - Strings.EMPTY_ARRAY, - Strings.EMPTY_ARRAY, - null, - false + null ); private static final SourceFieldMapper TSDB_DEFAULT = new SourceFieldMapper( @@ -138,8 +106,7 @@ public enum Mode { Explicit.IMPLICIT_TRUE, Strings.EMPTY_ARRAY, Strings.EMPTY_ARRAY, - IndexMode.TIME_SERIES, - true + IndexMode.TIME_SERIES ); private static final SourceFieldMapper TSDB_DEFAULT_STORED = new SourceFieldMapper( @@ -147,26 +114,7 @@ public enum Mode { Explicit.IMPLICIT_TRUE, Strings.EMPTY_ARRAY, Strings.EMPTY_ARRAY, - IndexMode.TIME_SERIES, - true - ); - - private static final SourceFieldMapper TSDB_DEFAULT_NO_RECOVERY_SOURCE = new SourceFieldMapper( - Mode.SYNTHETIC, - Explicit.IMPLICIT_TRUE, - Strings.EMPTY_ARRAY, - Strings.EMPTY_ARRAY, - IndexMode.TIME_SERIES, - false - ); - - private static final SourceFieldMapper TSDB_DEFAULT_NO_RECOVERY_SOURCE_STORED = new SourceFieldMapper( - Mode.STORED, - Explicit.IMPLICIT_TRUE, - Strings.EMPTY_ARRAY, - Strings.EMPTY_ARRAY, - IndexMode.TIME_SERIES, - false + IndexMode.TIME_SERIES ); private static final SourceFieldMapper LOGSDB_DEFAULT = new SourceFieldMapper( @@ -174,8 +122,7 @@ public enum Mode { Explicit.IMPLICIT_TRUE, Strings.EMPTY_ARRAY, Strings.EMPTY_ARRAY, - IndexMode.LOGSDB, - true + IndexMode.LOGSDB ); private static final SourceFieldMapper LOGSDB_DEFAULT_STORED = new SourceFieldMapper( @@ -183,26 +130,7 @@ public enum Mode { Explicit.IMPLICIT_TRUE, Strings.EMPTY_ARRAY, Strings.EMPTY_ARRAY, - IndexMode.LOGSDB, - true - ); - - private static final SourceFieldMapper LOGSDB_DEFAULT_NO_RECOVERY_SOURCE = new SourceFieldMapper( - Mode.SYNTHETIC, - Explicit.IMPLICIT_TRUE, - Strings.EMPTY_ARRAY, - Strings.EMPTY_ARRAY, - IndexMode.LOGSDB, - false - ); - - private static final SourceFieldMapper LOGSDB_DEFAULT_NO_RECOVERY_SOURCE_STORED = new SourceFieldMapper( - Mode.STORED, - Explicit.IMPLICIT_TRUE, - Strings.EMPTY_ARRAY, - Strings.EMPTY_ARRAY, - IndexMode.LOGSDB, - false + IndexMode.LOGSDB ); /* @@ -214,17 +142,7 @@ public enum Mode { Explicit.IMPLICIT_TRUE, Strings.EMPTY_ARRAY, Strings.EMPTY_ARRAY, - IndexMode.TIME_SERIES, - true - ); - - private static final SourceFieldMapper TSDB_LEGACY_DEFAULT_NO_RECOVERY_SOURCE = new SourceFieldMapper( - null, - Explicit.IMPLICIT_TRUE, - Strings.EMPTY_ARRAY, - Strings.EMPTY_ARRAY, - IndexMode.TIME_SERIES, - false + IndexMode.TIME_SERIES ); public static class Defaults { @@ -285,20 +203,12 @@ public static class Builder extends MetadataFieldMapper.Builder { private final boolean supportsNonDefaultParameterValues; - private final boolean enableRecoverySource; - - public Builder( - IndexMode indexMode, - final Settings settings, - boolean supportsCheckForNonDefaultParams, - boolean enableRecoverySource - ) { + public Builder(IndexMode indexMode, final Settings settings, boolean supportsCheckForNonDefaultParams) { super(Defaults.NAME); this.settings = settings; this.indexMode = indexMode; this.supportsNonDefaultParameterValues = supportsCheckForNonDefaultParams == false || settings.getAsBoolean(LOSSY_PARAMETERS_ALLOWED_SETTING_NAME, true); - this.enableRecoverySource = enableRecoverySource; } public Builder setSynthetic() { @@ -333,7 +243,7 @@ public SourceFieldMapper build() { ? INDEX_MAPPER_SOURCE_MODE_SETTING.get(settings) : mode.get(); if (isDefault(sourceMode)) { - return resolveSourceMode(indexMode, sourceMode == null ? Mode.STORED : sourceMode, enableRecoverySource); + return resolveSourceMode(indexMode, sourceMode == null ? Mode.STORED : sourceMode); } if (supportsNonDefaultParameterValues == false) { @@ -364,8 +274,7 @@ public SourceFieldMapper build() { enabled.get(), includes.getValue().toArray(Strings.EMPTY_ARRAY), excludes.getValue().toArray(Strings.EMPTY_ARRAY), - indexMode, - enableRecoverySource + indexMode ); if (indexMode != null) { indexMode.validateSourceFieldMapper(sourceFieldMapper); @@ -375,16 +284,16 @@ public SourceFieldMapper build() { } - private static SourceFieldMapper resolveSourceMode(final IndexMode indexMode, final Mode sourceMode, boolean enableRecoverySource) { + private static SourceFieldMapper resolveSourceMode(final IndexMode indexMode, final Mode sourceMode) { switch (indexMode) { case STANDARD: switch (sourceMode) { case SYNTHETIC: - return enableRecoverySource ? DEFAULT_SYNTHETIC : DEFAULT_SYNTHETIC_NO_RECOVERY_SOURCE; + return DEFAULT_SYNTHETIC; case STORED: - return enableRecoverySource ? DEFAULT : DEFAULT_NO_RECOVERY_SOURCE; + return DEFAULT; case DISABLED: - return enableRecoverySource ? DEFAULT_DISABLED : DEFAULT_DISABLED_NO_RECOVERY_SOURCE; + return DEFAULT_DISABLED; default: throw new IllegalArgumentException("Unsupported source mode: " + sourceMode); } @@ -392,15 +301,9 @@ private static SourceFieldMapper resolveSourceMode(final IndexMode indexMode, fi case LOGSDB: switch (sourceMode) { case SYNTHETIC: - return enableRecoverySource - ? (indexMode == IndexMode.TIME_SERIES ? TSDB_DEFAULT : LOGSDB_DEFAULT) - : (indexMode == IndexMode.TIME_SERIES ? TSDB_DEFAULT_NO_RECOVERY_SOURCE : LOGSDB_DEFAULT_NO_RECOVERY_SOURCE); + return indexMode == IndexMode.TIME_SERIES ? TSDB_DEFAULT : LOGSDB_DEFAULT; case STORED: - return enableRecoverySource - ? (indexMode == IndexMode.TIME_SERIES ? TSDB_DEFAULT_STORED : LOGSDB_DEFAULT_STORED) - : (indexMode == IndexMode.TIME_SERIES - ? TSDB_DEFAULT_NO_RECOVERY_SOURCE_STORED - : LOGSDB_DEFAULT_NO_RECOVERY_SOURCE_STORED); + return indexMode == IndexMode.TIME_SERIES ? TSDB_DEFAULT_STORED : LOGSDB_DEFAULT_STORED; case DISABLED: throw new IllegalArgumentException("_source can not be disabled in index using [" + indexMode + "] index mode"); default: @@ -413,21 +316,19 @@ private static SourceFieldMapper resolveSourceMode(final IndexMode indexMode, fi public static final TypeParser PARSER = new ConfigurableTypeParser(c -> { final IndexMode indexMode = c.getIndexSettings().getMode(); - boolean enableRecoverySource = INDICES_RECOVERY_SOURCE_ENABLED_SETTING.get(c.getSettings()); final Mode settingSourceMode = INDEX_MAPPER_SOURCE_MODE_SETTING.get(c.getSettings()); if (indexMode.isSyntheticSourceEnabled()) { if (indexMode == IndexMode.TIME_SERIES && c.getIndexSettings().getIndexVersionCreated().before(IndexVersions.V_8_7_0)) { - return enableRecoverySource ? TSDB_LEGACY_DEFAULT : TSDB_LEGACY_DEFAULT_NO_RECOVERY_SOURCE; + return TSDB_LEGACY_DEFAULT; } } - return resolveSourceMode(indexMode, settingSourceMode == null ? Mode.STORED : settingSourceMode, enableRecoverySource); + return resolveSourceMode(indexMode, settingSourceMode == null ? Mode.STORED : settingSourceMode); }, c -> new Builder( c.getIndexSettings().getMode(), c.getSettings(), - c.indexVersionCreated().onOrAfter(IndexVersions.SOURCE_MAPPER_LOSSY_PARAMS_CHECK), - INDICES_RECOVERY_SOURCE_ENABLED_SETTING.get(c.getSettings()) + c.indexVersionCreated().onOrAfter(IndexVersions.SOURCE_MAPPER_LOSSY_PARAMS_CHECK) ) ); @@ -480,16 +381,8 @@ public BlockLoader blockLoader(BlockLoaderContext blContext) { private final SourceFilter sourceFilter; private final IndexMode indexMode; - private final boolean enableRecoverySource; - - private SourceFieldMapper( - Mode mode, - Explicit enabled, - String[] includes, - String[] excludes, - IndexMode indexMode, - boolean enableRecoverySource - ) { + + private SourceFieldMapper(Mode mode, Explicit enabled, String[] includes, String[] excludes, IndexMode indexMode) { super(new SourceFieldType((enabled.explicit() && enabled.value()) || (enabled.explicit() == false && mode != Mode.DISABLED))); assert enabled.explicit() == false || mode == null; this.mode = mode; @@ -502,7 +395,6 @@ private SourceFieldMapper( } this.complete = stored() && sourceFilter == null; this.indexMode = indexMode; - this.enableRecoverySource = enableRecoverySource; } private static SourceFilter buildSourceFilter(String[] includes, String[] excludes) { @@ -547,6 +439,7 @@ public void preParse(DocumentParserContext context) throws IOException { context.doc().add(new StoredField(fieldType().name(), ref.bytes, ref.offset, ref.length)); } + boolean enableRecoverySource = context.indexSettings().isRecoverySourceEnabled(); if (enableRecoverySource && originalSource != null && adaptedSource != originalSource) { // if we omitted source or modified it we add the _recovery_source to ensure we have it for ops based recovery BytesRef ref = originalSource.toBytesRef(); @@ -575,7 +468,7 @@ protected String contentType() { @Override public FieldMapper.Builder getMergeBuilder() { - return new Builder(indexMode, Settings.EMPTY, false, enableRecoverySource).init(this); + return new Builder(indexMode, Settings.EMPTY, false).init(this); } /** diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java index 358ad07664870..399740e6200e6 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java @@ -69,7 +69,7 @@ public void testCreateDynamicStringFieldAsKeywordForDimension() throws IOExcepti XContentParser parser = createParser(JsonXContent.jsonXContent, source); SourceToParse sourceToParse = new SourceToParse("test", new BytesArray(source), XContentType.JSON); - SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null, Settings.EMPTY, false, true).setSynthetic().build(); + SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null, Settings.EMPTY, false).setSynthetic().build(); RootObjectMapper root = new RootObjectMapper.Builder("_doc", Optional.empty()).add( new PassThroughObjectMapper.Builder("labels").setPriority(0).setContainsDimensions().dynamic(ObjectMapper.Dynamic.TRUE) ).build(MapperBuilderContext.root(false, false)); diff --git a/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java b/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java index 658176b1d31dd..fdc18264e2299 100644 --- a/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java @@ -384,7 +384,7 @@ public void testSearchRequestRuntimeFieldsAndMultifieldDetection() { public void testSyntheticSourceSearchLookup() throws IOException { // Build a mapping using synthetic source - SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null, Settings.EMPTY, false, true).setSynthetic().build(); + SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null, Settings.EMPTY, false).setSynthetic().build(); RootObjectMapper root = new RootObjectMapper.Builder("_doc", Optional.empty()).add( new KeywordFieldMapper.Builder("cat", IndexVersion.current()).ignoreAbove(100) ).build(MapperBuilderContext.root(true, false)); From 5cace0f0473dc8922e335bb1d2cd60fa3e2c417a Mon Sep 17 00:00:00 2001 From: Pat Whelan Date: Thu, 17 Oct 2024 04:43:05 -0400 Subject: [PATCH 174/449] [ML] Temporarily ignore inference index (#114928) Until we can figure out where in the tests the index is being created, temporarily ignore deleting it along with the other system indices. Relates #114748 --- muted-tests.yml | 39 ------------------- .../test/rest/ESRestTestCase.java | 2 + 2 files changed, 2 insertions(+), 39 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 2e623fa94e06a..4cabd2a912512 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -306,57 +306,18 @@ tests: - class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT method: test {yaml=reference/rest-api/usage/line_38} issue: https://github.com/elastic/elasticsearch/issues/113694 -- class: org.elasticsearch.xpack.enrich.EnrichRestIT - method: test {p0=enrich/10_basic/Test using the deprecated elasticsearch_version field results in a warning} - issue: https://github.com/elastic/elasticsearch/issues/114748 - class: org.elasticsearch.xpack.eql.EqlRestIT method: testIndexWildcardPatterns issue: https://github.com/elastic/elasticsearch/issues/114749 -- class: org.elasticsearch.xpack.eql.EqlRestIT - method: testBadRequests - issue: https://github.com/elastic/elasticsearch/issues/114752 -- class: org.elasticsearch.xpack.enrich.EnrichRestIT - method: test {p0=enrich/20_standard_index/enrich stats REST response structure} - issue: https://github.com/elastic/elasticsearch/issues/114753 -- class: org.elasticsearch.xpack.enrich.EnrichRestIT - method: test {p0=enrich/30_tsdb_index/enrich documents over _bulk} - issue: https://github.com/elastic/elasticsearch/issues/114761 -- class: org.elasticsearch.xpack.enrich.EnrichRestIT - method: test {p0=enrich/20_standard_index/enrich documents over _bulk via an alias} - issue: https://github.com/elastic/elasticsearch/issues/114763 -- class: org.elasticsearch.xpack.enrich.EnrichRestIT - method: test {p0=enrich/10_basic/Test enrich crud apis} - issue: https://github.com/elastic/elasticsearch/issues/114766 -- class: org.elasticsearch.xpack.enrich.EnrichRestIT - method: test {p0=enrich/20_standard_index/enrich documents over _bulk} - issue: https://github.com/elastic/elasticsearch/issues/114768 -- class: org.elasticsearch.xpack.enrich.EnrichRestIT - method: test {p0=enrich/50_data_stream/enrich documents over _bulk via a data stream} - issue: https://github.com/elastic/elasticsearch/issues/114769 -- class: org.elasticsearch.xpack.eql.EqlRestValidationIT - method: testDefaultIndicesOptions - issue: https://github.com/elastic/elasticsearch/issues/114771 - class: org.elasticsearch.xpack.enrich.EnrichIT method: testEnrichSpecialTypes issue: https://github.com/elastic/elasticsearch/issues/114773 - class: org.elasticsearch.xpack.security.operator.OperatorPrivilegesIT method: testEveryActionIsEitherOperatorOnlyOrNonOperator issue: https://github.com/elastic/elasticsearch/issues/102992 -- class: org.elasticsearch.xpack.enrich.EnrichIT - method: testDeleteExistingPipeline - issue: https://github.com/elastic/elasticsearch/issues/114775 - class: org.elasticsearch.xpack.inference.rest.ServerSentEventsRestActionListenerTests method: testNoStream issue: https://github.com/elastic/elasticsearch/issues/114788 -- class: org.elasticsearch.xpack.eql.EqlRestValidationIT - method: testAllowNoIndicesOption - issue: https://github.com/elastic/elasticsearch/issues/114789 -- class: org.elasticsearch.xpack.eql.EqlStatsIT - method: testEqlRestUsage - issue: https://github.com/elastic/elasticsearch/issues/114790 -- class: org.elasticsearch.xpack.eql.EqlRestIT - method: testUnicodeChars - issue: https://github.com/elastic/elasticsearch/issues/114791 - class: org.elasticsearch.ingest.geoip.HttpClientTests issue: https://github.com/elastic/elasticsearch/issues/112618 - class: org.elasticsearch.xpack.remotecluster.RemoteClusterSecurityWithApmTracingRestIT diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index d17016f850300..e5b23158d4fd4 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -1121,6 +1121,8 @@ protected static void wipeAllIndices(boolean preserveSecurityIndices) throws IOE if (preserveSecurityIndices) { indexPatterns.add("-.security-*"); } + // always preserve inference index + indexPatterns.add("-.inference"); final Request deleteRequest = new Request("DELETE", Strings.collectionToCommaDelimitedString(indexPatterns)); deleteRequest.addParameter("expand_wildcards", "open,closed,hidden"); final Response response = adminClient().performRequest(deleteRequest); From 8e26d18029b66de8e6fa65b6fa1de350ac6ed498 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20Zolt=C3=A1n=20Szab=C3=B3?= Date: Thu, 17 Oct 2024 11:35:30 +0200 Subject: [PATCH 175/449] [DOCS] Adds Update inference API reference docs (#114803) * [DOCS] Adds Update inference API reference docs. * [DOCS] Includes update inference API docs in index. --- .../inference/inference-apis.asciidoc | 2 + .../inference/update-inference.asciidoc | 87 +++++++++++++++++++ 2 files changed, 89 insertions(+) create mode 100644 docs/reference/inference/update-inference.asciidoc diff --git a/docs/reference/inference/inference-apis.asciidoc b/docs/reference/inference/inference-apis.asciidoc index 8fdf8aecc2ae5..5cb03d950f68c 100644 --- a/docs/reference/inference/inference-apis.asciidoc +++ b/docs/reference/inference/inference-apis.asciidoc @@ -21,6 +21,7 @@ the following APIs to manage {infer} models and perform {infer}: * <> * <> * <> +* <> [[inference-landscape]] .A representation of the Elastic inference landscape @@ -39,6 +40,7 @@ include::delete-inference.asciidoc[] include::get-inference.asciidoc[] include::post-inference.asciidoc[] include::put-inference.asciidoc[] +include::update-inference.asciidoc[] include::service-alibabacloud-ai-search.asciidoc[] include::service-amazon-bedrock.asciidoc[] include::service-anthropic.asciidoc[] diff --git a/docs/reference/inference/update-inference.asciidoc b/docs/reference/inference/update-inference.asciidoc new file mode 100644 index 0000000000000..166b002ea45f5 --- /dev/null +++ b/docs/reference/inference/update-inference.asciidoc @@ -0,0 +1,87 @@ +[role="xpack"] +[[update-inference-api]] +=== Update inference API + +experimental[] + +Updates an {infer} endpoint. + +IMPORTANT: The {infer} APIs enable you to use certain services, such as built-in {ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, Azure, Google AI Studio, Google Vertex AI or Hugging Face. +For built-in models and models uploaded through Eland, the {infer} APIs offer an alternative way to use and manage trained models. +However, if you do not plan to use the {infer} APIs to use these models or if you want to use non-NLP models, use the <>. + + +[discrete] +[[update-inference-api-request]] +==== {api-request-title} + +`POST _inference//_update` + +`POST _inference///_update` + + +[discrete] +[[update-inference-api-prereqs]] +==== {api-prereq-title} + +* Requires the `manage_inference` <> (the built-in inference_admin role grants this privilege) +* Requires an existing {infer} endpoint, created by using the <> + + +[discrete] +[[update-inference-api-desc]] +==== {api-description-title} + +The update inference API enables you to update the task_settings, secrets, and/or num_allocations of an existing {infer} endpoint. + +To use the update API, you can modify `task_settings`, secrets (within `service_settings`), or `num_allocations`, depending on the specific endpoint service and task_type you've created. +To view the updatable `task_settings`, the field names of secrets (specific to each service), and the services where `num_allocations` is applicable (only for the `elasticsearch` service), refer to the following list of services available through the {infer} API. +You will find the available task types next to each service name. +Click the links to review the service configuration details: + +* <> (`completion`, `rerank`, `sparse_embedding`, `text_embedding`) +* <> (`completion`, `text_embedding`) +* <> (`completion`) +* <> (`completion`, `text_embedding`) +* <> (`completion`, `text_embedding`) +* <> (`completion`, `rerank`, `text_embedding`) +* <> (`rerank`, `sparse_embedding`, `text_embedding` - this service is for built-in models and models uploaded through Eland) +* <> (`sparse_embedding`) +* <> (`completion`, `text_embedding`) +* <> (`rerank`, `text_embedding`) +* <> (`text_embedding`) +* <> (`text_embedding`) +* <> (`completion`, `text_embedding`) + + +[discrete] +[[update-inference-api-path-params]] +==== {api-path-parms-title} + +``:: +(Required, string) +The unique identifier of the {infer} endpoint. + + +``:: +(Optional, string) +The type of {infer} task that the model performs. +Refer to the service list in the <> for the available task types. + + +[discrete] +[[update-inference-api-example]] +==== {api-examples-title} + +The following example shows how to update an API key of an {infer} endpoint called `my-inference-endpoint`: + +[source,console] +------------------------------------------------------------ +POST _inference/my-inference-endpoint/_update +{ + "service_settings": { + "api_key": "" + } +} +------------------------------------------------------------ +// TEST[skip:TBD] From f95d3f8d430641e2bff22cbdc05f7d6cd9616443 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20Zolt=C3=A1n=20Szab=C3=B3?= Date: Thu, 17 Oct 2024 12:58:52 +0200 Subject: [PATCH 176/449] [DOCS] Updates adaptive allocations reference docs. (#114986) --- docs/reference/ml/ml-shared.asciidoc | 1 - 1 file changed, 1 deletion(-) diff --git a/docs/reference/ml/ml-shared.asciidoc b/docs/reference/ml/ml-shared.asciidoc index ef19fbf4e267d..d01047eac9815 100644 --- a/docs/reference/ml/ml-shared.asciidoc +++ b/docs/reference/ml/ml-shared.asciidoc @@ -3,7 +3,6 @@ Adaptive allocations configuration object. If enabled, the number of allocations of the model is set based on the current load the process gets. When the load is high, a new model allocation is automatically created (respecting the value of `max_number_of_allocations` if it's set). When the load is low, a model allocation is automatically removed (respecting the value of `min_number_of_allocations` if it's set). -The number of model allocations cannot be scaled down to less than `1` this way. If `adaptive_allocations` is enabled, do not set the number of allocations manually. end::adaptive-allocation[] From 12062cb76629dcc506168a519e50eda7eed09a9e Mon Sep 17 00:00:00 2001 From: Jan Kuipers <148754765+jan-elastic@users.noreply.github.com> Date: Thu, 17 Oct 2024 13:56:01 +0200 Subject: [PATCH 177/449] Fix ml autoscaling for zero allocations (#114982) * Fix estimated memory usage for a model with zero allocations. * Ignore number of threads of models with zero allocations in autoscaling decisions. * Add some long overdue comments. * Another estimateMemoryUsageBytes fix --- .../StartTrainedModelDeploymentAction.java | 6 + .../MlAutoscalingDeciderService.java | 4 + .../MlAutoscalingResourceTracker.java | 109 ++++++------ .../MlAutoscalingResourceTrackerTests.java | 167 ++++++++++++++++++ 4 files changed, 232 insertions(+), 54 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartTrainedModelDeploymentAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartTrainedModelDeploymentAction.java index 34ebdcb7f9f9f..ca789fee7b744 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartTrainedModelDeploymentAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartTrainedModelDeploymentAction.java @@ -623,6 +623,9 @@ public String getDeploymentId() { * @return the estimated memory (in bytes) required for the model deployment to run */ public long estimateMemoryUsageBytes() { + if (numberOfAllocations == 0) { + return 0; + } // We already take into account 2x the model bytes. If the cache size is larger than the model bytes, then // we need to take it into account when returning the estimate. if (cacheSize != null && cacheSize.getBytes() > modelBytes) { @@ -796,6 +799,9 @@ public static long estimateMemoryUsageBytes( long perAllocationMemoryBytes, int numberOfAllocations ) { + if (numberOfAllocations == 0) { + return 0; + } // While loading the model in the process we need twice the model size. // 1. If ELSER v1 or v2 then 2004MB diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingDeciderService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingDeciderService.java index 18d974473251b..fee3d729f8dfd 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingDeciderService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingDeciderService.java @@ -30,6 +30,10 @@ import static org.elasticsearch.core.Strings.format; +/** + * This handles ML autoscaling just for classic cloud. + * For serverless, see: {@link MlAutoscalingResourceTracker}. + */ public final class MlAutoscalingDeciderService implements AutoscalingDeciderService, LocalNodeMasterListener { private static final Logger logger = LogManager.getLogger(MlAutoscalingDeciderService.class); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingResourceTracker.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingResourceTracker.java index 9a9fbfa0340a9..6f14e2649a394 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingResourceTracker.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingResourceTracker.java @@ -48,7 +48,8 @@ import static org.elasticsearch.xpack.ml.job.JobNodeSelector.AWAITING_LAZY_ASSIGNMENT; /** - * backend for new kubernetes based autoscaler. + * This handles ML autoscaling just for serverless. + * For classic cloud, see: {@link MlAutoscalingDeciderService}. */ public final class MlAutoscalingResourceTracker { private static final Logger logger = LogManager.getLogger(MlAutoscalingResourceTracker.class); @@ -242,72 +243,72 @@ static void getMemoryAndProcessors( final int numMissingProcessors = numMissingAllocations * numberOfThreadsPerAllocation; int numExistingProcessorsToBeUsed = Math.min(numMissingProcessors, numberOfAvailableProcessors); + if (numberOfRequestedAllocations == 0) { + continue; + } if (assignment.getNodeRoutingTable().isEmpty() == false && assignment.getNodeRoutingTable().values().stream().allMatch(r -> r.getState().consumesMemory() == false)) { // Ignore states that don't consume memory, for example all allocations are failed or stopped // if the node routing table is empty, then it will match the above condition, but it needs to be handled in the next branch continue; + } + + if (assignment.getNodeRoutingTable().isEmpty() == false) { + // if the routing table is non-empty, this is an existing model + existingModelMemoryBytes += estimatedMemoryUsage; } else { + // only increase memory requirements for new models + extraPerNodeModelMemoryBytes += Math.max(extraPerNodeModelMemoryBytes, estimatedMemoryUsage); + extraModelMemoryInBytes += estimatedMemoryUsage; + } - if (assignment.getNodeRoutingTable().isEmpty() == false) { - // if the routing table is non-empty, this is an existing model - existingModelMemoryBytes += estimatedMemoryUsage; - } else { - // only increase memory requirements for new models - extraPerNodeModelMemoryBytes += Math.max(extraPerNodeModelMemoryBytes, estimatedMemoryUsage); - extraModelMemoryInBytes += estimatedMemoryUsage; + // if not low priority, check processor requirements. + if (Priority.LOW.equals(modelAssignment.getValue().getTaskParams().getPriority()) == false) { + if (numMissingProcessors > numberOfAvailableProcessors) { + // as assignments can be placed on different nodes, we only need numberOfThreadsPerAllocation here + extraProcessors += numMissingProcessors - numExistingProcessorsToBeUsed; + extraPerNodeProcessors = Math.max(extraPerNodeProcessors, 1); // if extra processors >0, we need at least 1 + // extraPerNodeProcessors } - - // if not low priority, check processor requirements. - if (Priority.LOW.equals(modelAssignment.getValue().getTaskParams().getPriority()) == false) { - if (numMissingProcessors > numberOfAvailableProcessors) { - // as assignments can be placed on different nodes, we only need numberOfThreadsPerAllocation here - extraProcessors += numMissingProcessors - numExistingProcessorsToBeUsed; - extraPerNodeProcessors = Math.max(extraPerNodeProcessors, 1); // if extra processors >0, we need at least 1 - // extraPerNodeProcessors - } - if (perNodeAvailableProcessors < numberOfThreadsPerAllocation) { - extraPerNodeProcessors = Math.max(extraPerNodeProcessors, numberOfThreadsPerAllocation); - } - numberOfAvailableProcessors -= numExistingProcessorsToBeUsed; + if (perNodeAvailableProcessors < numberOfThreadsPerAllocation) { + extraPerNodeProcessors = Math.max(extraPerNodeProcessors, numberOfThreadsPerAllocation); } + numberOfAvailableProcessors -= numExistingProcessorsToBeUsed; + } + + if (extraProcessors > 0 || extraPerNodeProcessors > 0 || extraModelMemoryInBytes > 0 || extraPerNodeModelMemoryBytes > 0) { + logger.info( + () -> format( + "trained model [%s] assigned to [%s], waiting for [%d] allocations to start due to missing hardware", + modelAssignment.getKey(), + Strings.arrayToCommaDelimitedString(modelAssignment.getValue().getStartedNodes()), + numMissingAllocations + ) + ); + } - if (extraProcessors > 0 || extraPerNodeProcessors > 0 || extraModelMemoryInBytes > 0 || extraPerNodeModelMemoryBytes > 0) { - logger.info( - () -> format( - "trained model [%s] assigned to [%s], waiting for [%d] allocations to start due to missing hardware", - modelAssignment.getKey(), - Strings.arrayToCommaDelimitedString(modelAssignment.getValue().getStartedNodes()), - numMissingAllocations + for (String node : modelAssignment.getValue().getNodeRoutingTable().keySet()) { + sumOfCurrentlyExistingAndUsedProcessors += modelAssignment.getValue().getNodeRoutingTable().get(node).getTargetAllocations() + * numberOfThreadsPerAllocation; + + jobRequirementsByNode.computeIfAbsent(node, k -> new ArrayList<>()) + .add( + MlJobRequirements.of( + estimatedMemoryUsage, + Priority.LOW.equals(modelAssignment.getValue().getTaskParams().getPriority()) + ? 0 + : modelAssignment.getValue().getNodeRoutingTable().get(node).getTargetAllocations() + * numberOfThreadsPerAllocation ) ); - } - - for (String node : modelAssignment.getValue().getNodeRoutingTable().keySet()) { - sumOfCurrentlyExistingAndUsedProcessors += modelAssignment.getValue() - .getNodeRoutingTable() - .get(node) - .getTargetAllocations() * numberOfThreadsPerAllocation; - - jobRequirementsByNode.computeIfAbsent(node, k -> new ArrayList<>()) - .add( - MlJobRequirements.of( - estimatedMemoryUsage, - Priority.LOW.equals(modelAssignment.getValue().getTaskParams().getPriority()) - ? 0 - : modelAssignment.getValue().getNodeRoutingTable().get(node).getTargetAllocations() - * numberOfThreadsPerAllocation - ) - ); - } - - // min(3, max(number of allocations over all deployed models) - // the minimum number of nodes is equal to the number of allocations, up to 3 - // if the number of allocations is greater than 3, then wantedMinNodes is still 3 - // in theory this should help availability for 2-3 allocations - // the planner should split over all available nodes - minNodes = Math.min(3, Math.max(minNodes, numberOfRequestedAllocations)); } + + // min(3, max(number of allocations over all deployed models) + // the minimum number of nodes is equal to the number of allocations, up to 3 + // if the number of allocations is greater than 3, then wantedMinNodes is still 3 + // in theory this should help availability for 2-3 allocations + // the planner should split over all available nodes + minNodes = Math.min(3, Math.max(minNodes, numberOfRequestedAllocations)); } // dummy autoscaling entity diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingResourceTrackerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingResourceTrackerTests.java index 3674dda3934bd..729bb708cc46f 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingResourceTrackerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingResourceTrackerTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.xpack.core.ml.action.OpenJobAction; import org.elasticsearch.xpack.core.ml.action.StartTrainedModelDeploymentAction; import org.elasticsearch.xpack.core.ml.autoscaling.MlAutoscalingStats; +import org.elasticsearch.xpack.core.ml.inference.assignment.AdaptiveAllocationsSettings; import org.elasticsearch.xpack.core.ml.inference.assignment.AssignmentState; import org.elasticsearch.xpack.core.ml.inference.assignment.Priority; import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingInfo; @@ -1800,6 +1801,172 @@ public void testGetMemoryAndProcessorsScaleDownNotPreventedByDummyEntityAsMemory ); } + public void testGetMemoryAndProcessorsScaleDownForModelWithZeroAllocations() throws InterruptedException { + long memory = 1000000000; + Map nodeAttr = Map.of( + MachineLearning.MACHINE_MEMORY_NODE_ATTR, + Long.toString(memory), + MachineLearning.MAX_JVM_SIZE_NODE_ATTR, + "400000000", + MachineLearning.ML_CONFIG_VERSION_NODE_ATTR, + "7.2.0", + MachineLearning.ALLOCATED_PROCESSORS_NODE_ATTR, + "2.0" + ); + + MlAutoscalingContext mlAutoscalingContext = new MlAutoscalingContext( + List.of(), + List.of(), + List.of(), + Map.of( + "model-with-zero-allocations", + TrainedModelAssignment.Builder.empty( + new StartTrainedModelDeploymentAction.TaskParams( + "model-with-zero-allocations", + "model-with-zero-allocations-deployment", + 400, + 0, + 2, + 100, + null, + Priority.NORMAL, + 0L, + 0L + ), + new AdaptiveAllocationsSettings(true, 0, 4) + ).build() + ), + List.of( + DiscoveryNodeUtils.builder("ml-node-1") + .name("ml-node-name-1") + .address(new TransportAddress(InetAddress.getLoopbackAddress(), 9300)) + .attributes(nodeAttr) + .roles(Set.of(DiscoveryNodeRole.ML_ROLE)) + .build() + ), + PersistentTasksCustomMetadata.builder().build() + ); + MlMemoryTracker mockTracker = mock(MlMemoryTracker.class); + + this.assertAsync( + listener -> MlAutoscalingResourceTracker.getMemoryAndProcessors( + mlAutoscalingContext, + mockTracker, + Map.of("ml-node-1", memory), + 600000000, + 2, + MachineLearning.DEFAULT_MAX_OPEN_JOBS_PER_NODE, + MlDummyAutoscalingEntity.of(0, 0), + 1, + listener + ), + stats -> { + assertEquals(memory, stats.currentPerNodeMemoryBytes()); + assertEquals(0, stats.currentTotalModelMemoryBytes()); + assertEquals(0, stats.currentTotalProcessorsInUse()); + assertEquals(1, stats.currentTotalNodes()); + assertEquals(0, stats.wantedMinNodes()); + assertEquals(0, stats.wantedExtraPerNodeNodeProcessors()); + assertEquals(0, stats.wantedExtraProcessors()); + assertEquals(0, stats.wantedExtraModelMemoryBytes()); + assertEquals(0, stats.wantedExtraPerNodeMemoryBytes()); + assertEquals(memory, stats.unwantedNodeMemoryBytesToRemove()); + assertEquals(MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD.getBytes(), stats.currentPerNodeMemoryOverheadBytes()); + } + ); + } + + public void testGetMemoryAndProcessorsIgnoreThreadsOfModelWithZeroAllocations() throws InterruptedException { + long memory = 1000000000; + Map nodeAttr = Map.of( + MachineLearning.MACHINE_MEMORY_NODE_ATTR, + Long.toString(memory), + MachineLearning.MAX_JVM_SIZE_NODE_ATTR, + "400000000", + MachineLearning.ML_CONFIG_VERSION_NODE_ATTR, + "7.2.0", + MachineLearning.ALLOCATED_PROCESSORS_NODE_ATTR, + "2.0" + ); + + MlAutoscalingContext mlAutoscalingContext = new MlAutoscalingContext( + List.of(), + List.of(), + List.of(), + Map.of( + "model-with-one-allocation", + TrainedModelAssignment.Builder.empty( + new StartTrainedModelDeploymentAction.TaskParams( + "model-with-one-allocation", + "model-with-one-allocation-deployment", + 400, + 1, + 2, + 100, + null, + Priority.NORMAL, + 0L, + 0L + ), + null + ).addRoutingEntry("ml-node-1", new RoutingInfo(1, 1, RoutingState.STARTED, "")).build(), + "model-with-zero-allocations", + TrainedModelAssignment.Builder.empty( + new StartTrainedModelDeploymentAction.TaskParams( + "model-with-zero-allocations", + "model-with-zero-allocations-deployment", + 400, + 0, + 4, + 100, + null, + Priority.NORMAL, + 0L, + 0L + ), + new AdaptiveAllocationsSettings(true, 0, 4) + ).build() + ), + List.of( + DiscoveryNodeUtils.builder("ml-node-1") + .name("ml-node-name-1") + .address(new TransportAddress(InetAddress.getLoopbackAddress(), 9300)) + .attributes(nodeAttr) + .roles(Set.of(DiscoveryNodeRole.ML_ROLE)) + .build() + ), + PersistentTasksCustomMetadata.builder().build() + ); + MlMemoryTracker mockTracker = mock(MlMemoryTracker.class); + + this.assertAsync( + listener -> MlAutoscalingResourceTracker.getMemoryAndProcessors( + mlAutoscalingContext, + mockTracker, + Map.of("ml-node-1", memory), + 600000000, + 2, + MachineLearning.DEFAULT_MAX_OPEN_JOBS_PER_NODE, + MlDummyAutoscalingEntity.of(0, 0), + 1, + listener + ), + stats -> { + assertEquals(memory, stats.currentPerNodeMemoryBytes()); + assertEquals(251659040, stats.currentTotalModelMemoryBytes()); + assertEquals(2, stats.currentTotalProcessorsInUse()); + assertEquals(1, stats.currentTotalNodes()); + assertEquals(1, stats.wantedMinNodes()); + assertEquals(0, stats.wantedExtraPerNodeNodeProcessors()); + assertEquals(0, stats.wantedExtraProcessors()); + assertEquals(0, stats.wantedExtraModelMemoryBytes()); + assertEquals(0, stats.wantedExtraPerNodeMemoryBytes()); + assertEquals(0, stats.unwantedNodeMemoryBytesToRemove()); + assertEquals(MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD.getBytes(), stats.currentPerNodeMemoryOverheadBytes()); + } + ); + } + private void assertAsync(Consumer> function, Consumer furtherTests) throws InterruptedException { CountDownLatch latch = new CountDownLatch(1); AtomicBoolean listenerCalled = new AtomicBoolean(false); From 03013555286c5444a300bbcc0f7b3ef4180cb363 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Thu, 17 Oct 2024 23:12:22 +1100 Subject: [PATCH 178/449] Mute org.elasticsearch.upgrades.MultiVersionRepositoryAccessIT testUpgradeMovesRepoToNewMetaVersion #114994 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 4cabd2a912512..850c5afd1ede9 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -343,6 +343,9 @@ tests: - class: org.elasticsearch.xpack.inference.DefaultEndPointsIT method: testInferDeploysDefaultElser issue: https://github.com/elastic/elasticsearch/issues/114913 +- class: org.elasticsearch.upgrades.MultiVersionRepositoryAccessIT + method: testUpgradeMovesRepoToNewMetaVersion + issue: https://github.com/elastic/elasticsearch/issues/114994 # Examples: # From 979710150c133840ef0852edaa4aee02c144fdb2 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Thu, 17 Oct 2024 23:12:30 +1100 Subject: [PATCH 179/449] Mute org.elasticsearch.upgrades.MultiVersionRepositoryAccessIT testReadOnlyRepo #114997 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 850c5afd1ede9..4d73351991744 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -346,6 +346,9 @@ tests: - class: org.elasticsearch.upgrades.MultiVersionRepositoryAccessIT method: testUpgradeMovesRepoToNewMetaVersion issue: https://github.com/elastic/elasticsearch/issues/114994 +- class: org.elasticsearch.upgrades.MultiVersionRepositoryAccessIT + method: testReadOnlyRepo + issue: https://github.com/elastic/elasticsearch/issues/114997 # Examples: # From f29ebd3d380295d719eda913363d1b1aa6e8beb7 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Thu, 17 Oct 2024 23:12:32 +1100 Subject: [PATCH 180/449] Mute org.elasticsearch.upgrades.MultiVersionRepositoryAccessIT testCreateAndRestoreSnapshot #114998 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 4d73351991744..78f8b76aaff64 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -349,6 +349,9 @@ tests: - class: org.elasticsearch.upgrades.MultiVersionRepositoryAccessIT method: testReadOnlyRepo issue: https://github.com/elastic/elasticsearch/issues/114997 +- class: org.elasticsearch.upgrades.MultiVersionRepositoryAccessIT + method: testCreateAndRestoreSnapshot + issue: https://github.com/elastic/elasticsearch/issues/114998 # Examples: # From 8d0e63057f17f9fa1fd39945da7423037e9a635b Mon Sep 17 00:00:00 2001 From: Panagiotis Bailis Date: Thu, 17 Oct 2024 15:33:40 +0300 Subject: [PATCH 181/449] Updating text_similarity_reranker tests to account for the test_reranking_service (#114945) --- .../70_text_similarity_rank_retriever.yml | 8 ++++-- ...ith_text_similarity_reranker_retriever.yml | 28 +++++++++---------- 2 files changed, 19 insertions(+), 17 deletions(-) diff --git a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/70_text_similarity_rank_retriever.yml b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/70_text_similarity_rank_retriever.yml index 9a4d7f4416164..2980d42d22c3e 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/70_text_similarity_rank_retriever.yml +++ b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/70_text_similarity_rank_retriever.yml @@ -1,4 +1,8 @@ setup: + - skip: + features: + - close_to + - contains - requires: cluster_features: "text_similarity_reranker_retriever_supported" reason: semantic reranking introduced in 8.15.0 @@ -206,8 +210,8 @@ setup: size: 10 explain: true - - match: { hits.hits.0._id: "doc_2" } - - match: { hits.hits.1._id: "doc_1" } + - contains: { hits.hits: { _id: "doc_2" } } + - contains: { hits.hits: { _id: "doc_1" } } - close_to: { hits.hits.0._explanation.value: { value: 0.4, error: 0.000001 } } - match: {hits.hits.0._explanation.description: "/text_similarity_reranker.match.using.inference.endpoint:.\\[my-rerank-model\\].on.document.field:.\\[text\\].*/" } diff --git a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/800_rrf_with_text_similarity_reranker_retriever.yml b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/800_rrf_with_text_similarity_reranker_retriever.yml index 105efcec8bc65..d9db1fe387625 100644 --- a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/800_rrf_with_text_similarity_reranker_retriever.yml +++ b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/800_rrf_with_text_similarity_reranker_retriever.yml @@ -1,4 +1,9 @@ setup: + - skip: + features: + - close_to + - contains + - requires: cluster_features: ['rrf_retriever_composition_supported', 'text_similarity_reranker_retriever_supported'] reason: need to have support for rrf and semantic reranking composition @@ -125,9 +130,6 @@ setup: term: { topic: "science" } - }, - "sort": { - "integer": "asc" } } }, @@ -152,8 +154,8 @@ setup: - match: { hits.total.value: 3 } - length: { hits.hits: 2 } - - match: { hits.hits.0._id: "doc_1" } - - match: { hits.hits.1._id: "doc_3" } + - contains: { hits.hits: { _id: "doc_1" } } + - contains: { hits.hits: { _id: "doc_3" } } - match: { aggregations.topics.buckets.0.key: "science" } - match: { aggregations.topics.buckets.0.doc_count: 2 } @@ -304,11 +306,8 @@ setup: standard: { query: { term: { - topic: "science" + subtopic: "astronomy" } - }, - "sort": { - "integer": "asc" } } }, @@ -327,14 +326,13 @@ setup: - match: { hits.hits.0._id: "doc_2" } - match: { hits.hits.1._id: "doc_1" } - - match: { hits.hits.2._id: "doc_3" } - - close_to: { hits.hits.0._explanation.value: { value: 0.6666667, error: 0.000001 } } - - match: {hits.hits.0._explanation.description: "/rrf.score:.\\[0.6666667\\].*/" } + - close_to: { hits.hits.0._explanation.value: { value: 0.833333, error: 0.0001 } } + - match: {hits.hits.0._explanation.description: "/rrf.score:.\\[0.8333334\\].*/" } - match: {hits.hits.0._explanation.details.0.value: 2} - match: {hits.hits.0._explanation.details.0.description: "/rrf.score:.\\[0.33333334\\].*/" } - match: {hits.hits.0._explanation.details.0.details.0.details.0.description: "/ConstantScore.*/" } - - match: {hits.hits.0._explanation.details.1.value: 2} - - match: {hits.hits.0._explanation.details.1.description: "/rrf.score:.\\[0.33333334\\].*/" } + - match: {hits.hits.0._explanation.details.1.value: 1} + - match: {hits.hits.0._explanation.details.1.description: "/rrf.score:.\\[0.5\\].*/" } - match: {hits.hits.0._explanation.details.1.details.0.description: "/text_similarity_reranker.match.using.inference.endpoint:.\\[my-rerank-model\\].on.document.field:.\\[text\\].*/" } - - match: {hits.hits.0._explanation.details.1.details.0.details.0.description: "/weight.*science.*/" } + - match: {hits.hits.0._explanation.details.1.details.0.details.0.description: "/weight.*astronomy.*/" } From caa16b4de0f18dd0e2f8725fe029dba5be6815a2 Mon Sep 17 00:00:00 2001 From: Alexander Spies Date: Thu, 17 Oct 2024 14:39:59 +0200 Subject: [PATCH 182/449] ESQL: Remove parent from FieldAttribute (#112881) To avoid serializing unnecessary data, remove FieldAttribute.parent, and instead only keep a String FieldAttribute.parentName. --- docs/changelog/112881.yaml | 5 + .../org/elasticsearch/TransportVersions.java | 1 + .../xpack/esql/core/expression/Alias.java | 7 +- .../xpack/esql/core/expression/Attribute.java | 7 +- .../esql/core/expression/FieldAttribute.java | 97 ++++++++++++------- .../core/expression/MetadataAttribute.java | 7 +- .../esql/core/expression/NamedExpression.java | 5 +- .../core/expression/ReferenceAttribute.java | 10 +- .../esql/core/expression/TypedAttribute.java | 10 +- .../core/expression/UnresolvedAttribute.java | 3 +- .../xpack/esql/core/type/DataType.java | 8 +- .../xpack/esql/core/type/DateEsField.java | 9 +- .../xpack/esql/core/type/EsField.java | 9 +- .../esql/core/type/InvalidMappedField.java | 9 +- .../xpack/esql/core/type/KeywordEsField.java | 8 +- .../esql/core/type/MultiTypeEsField.java | 9 +- .../xpack/esql/core/type/TextEsField.java | 8 +- .../esql/core/type/UnsupportedEsField.java | 13 +-- .../xpack/esql/core/util/PlanStreamInput.java | 10 ++ .../esql/core/util/PlanStreamOutput.java | 12 +++ .../expression/FieldAttributeTestUtils.java | 6 +- .../xpack/esql/analysis/Analyzer.java | 12 +-- .../function/UnsupportedAttribute.java | 12 ++- .../xpack/esql/io/stream/PlanStreamInput.java | 14 ++- .../esql/io/stream/PlanStreamOutput.java | 18 +++- .../xpack/esql/plan/logical/EsRelation.java | 7 +- .../function/FieldAttributeTests.java | 10 +- .../esql/index/EsIndexSerializationTests.java | 47 +++++++++ .../esql/io/stream/PlanStreamOutputTests.java | 17 +--- .../ExchangeSinkExecSerializationTests.java | 67 +++++++++++-- 30 files changed, 323 insertions(+), 134 deletions(-) create mode 100644 docs/changelog/112881.yaml diff --git a/docs/changelog/112881.yaml b/docs/changelog/112881.yaml new file mode 100644 index 0000000000000..a8a0d542f8201 --- /dev/null +++ b/docs/changelog/112881.yaml @@ -0,0 +1,5 @@ +pr: 112881 +summary: "ESQL: Remove parent from `FieldAttribute`" +area: ES|QL +type: enhancement +issues: [] diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index dcf6f7aebdc65..bf61752a1d771 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -175,6 +175,7 @@ static TransportVersion def(int id) { public static final TransportVersion ML_INFERENCE_ATTACH_TO_EXISTSING_DEPLOYMENT = def(8_771_00_0); public static final TransportVersion CONVERT_FAILURE_STORE_OPTIONS_TO_SELECTOR_OPTIONS_INTERNALLY = def(8_772_00_0); public static final TransportVersion REMOVE_MIN_COMPATIBLE_SHARD_NODE = def(8_773_00_0); + public static final TransportVersion ESQL_FIELD_ATTRIBUTE_PARENT_SIMPLIFIED = def(8_774_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Alias.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Alias.java index e33f9b1c20527..1f7d03ba9d905 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Alias.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Alias.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -42,11 +43,11 @@ public Alias(Source source, String name, Expression child) { this(source, name, child, null); } - public Alias(Source source, String name, Expression child, NameId id) { + public Alias(Source source, String name, Expression child, @Nullable NameId id) { this(source, name, child, id, false); } - public Alias(Source source, String name, Expression child, NameId id, boolean synthetic) { + public Alias(Source source, String name, Expression child, @Nullable NameId id, boolean synthetic) { super(source, name, singletonList(child), id, synthetic); this.child = child; } @@ -55,7 +56,7 @@ public Alias(Source source, String name, Expression child, NameId id, boolean sy /** * Old constructor from when this had a qualifier string. Still needed to not break serialization. */ - private Alias(Source source, String name, String qualifier, Expression child, NameId id, boolean synthetic) { + private Alias(Source source, String name, String qualifier, Expression child, @Nullable NameId id, boolean synthetic) { this(source, name, child, id, synthetic); } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Attribute.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Attribute.java index 05c414298fd33..45f42a754910d 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Attribute.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Attribute.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.core.expression; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.core.Nullable; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -41,15 +42,15 @@ public static List getNamedWriteables() { // can the attr be null - typically used in JOINs private final Nullability nullability; - public Attribute(Source source, String name, NameId id) { + public Attribute(Source source, String name, @Nullable NameId id) { this(source, name, Nullability.TRUE, id); } - public Attribute(Source source, String name, Nullability nullability, NameId id) { + public Attribute(Source source, String name, Nullability nullability, @Nullable NameId id) { this(source, name, nullability, id, false); } - public Attribute(Source source, String name, Nullability nullability, NameId id, boolean synthetic) { + public Attribute(Source source, String name, Nullability nullability, @Nullable NameId id, boolean synthetic) { super(source, name, emptyList(), id, synthetic); this.nullability = nullability; } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/FieldAttribute.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/FieldAttribute.java index 767d2f45f90e4..4076acdb7e7b8 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/FieldAttribute.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/FieldAttribute.java @@ -6,21 +6,25 @@ */ package org.elasticsearch.xpack.esql.core.expression; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.core.util.PlanStreamInput; import org.elasticsearch.xpack.esql.core.util.PlanStreamOutput; -import org.elasticsearch.xpack.esql.core.util.StringUtils; import java.io.IOException; import java.util.Objects; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamInput.readCachedStringWithVersionCheck; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamOutput.writeCachedStringWithVersionCheck; + /** * Attribute for an ES field. * To differentiate between the different type of fields this class offers: @@ -37,32 +41,31 @@ public class FieldAttribute extends TypedAttribute { FieldAttribute::readFrom ); - private final FieldAttribute parent; - private final String path; + private final String parentName; private final EsField field; public FieldAttribute(Source source, String name, EsField field) { this(source, null, name, field); } - public FieldAttribute(Source source, FieldAttribute parent, String name, EsField field) { - this(source, parent, name, field, Nullability.TRUE, null, false); + public FieldAttribute(Source source, @Nullable String parentName, String name, EsField field) { + this(source, parentName, name, field, Nullability.TRUE, null, false); } - public FieldAttribute(Source source, FieldAttribute parent, String name, EsField field, boolean synthetic) { - this(source, parent, name, field, Nullability.TRUE, null, synthetic); + public FieldAttribute(Source source, @Nullable String parentName, String name, EsField field, boolean synthetic) { + this(source, parentName, name, field, Nullability.TRUE, null, synthetic); } public FieldAttribute( Source source, - FieldAttribute parent, + @Nullable String parentName, String name, EsField field, Nullability nullability, - NameId id, + @Nullable NameId id, boolean synthetic ) { - this(source, parent, name, field.getDataType(), field, nullability, id, synthetic); + this(source, parentName, name, field.getDataType(), field, nullability, id, synthetic); } /** @@ -71,17 +74,16 @@ public FieldAttribute( */ FieldAttribute( Source source, - FieldAttribute parent, + @Nullable String parentName, String name, DataType type, EsField field, Nullability nullability, - NameId id, + @Nullable NameId id, boolean synthetic ) { super(source, name, type, nullability, id, synthetic); - this.path = parent != null ? parent.name() : StringUtils.EMPTY; - this.parent = parent; + this.parentName = parentName; this.field = field; } @@ -91,16 +93,16 @@ public FieldAttribute( */ private FieldAttribute( Source source, - FieldAttribute parent, + @Nullable String parentName, String name, DataType type, EsField field, - String qualifier, + @Nullable String qualifier, Nullability nullability, - NameId id, + @Nullable NameId id, boolean synthetic ) { - this(source, parent, name, type, field, nullability, id, synthetic); + this(source, parentName, name, type, field, nullability, id, synthetic); } private FieldAttribute(StreamInput in) throws IOException { @@ -114,8 +116,8 @@ private FieldAttribute(StreamInput in) throws IOException { */ this( Source.readFrom((StreamInput & PlanStreamInput) in), - in.readOptionalWriteable(FieldAttribute::readFrom), - ((PlanStreamInput) in).readCachedString(), + readParentName(in), + readCachedStringWithVersionCheck(in), DataType.readFrom(in), EsField.readFrom(in), in.readOptionalString(), @@ -129,8 +131,8 @@ private FieldAttribute(StreamInput in) throws IOException { public void writeTo(StreamOutput out) throws IOException { if (((PlanStreamOutput) out).writeAttributeCacheHeader(this)) { Source.EMPTY.writeTo(out); - out.writeOptionalWriteable(parent); - ((PlanStreamOutput) out).writeCachedString(name()); + writeParentName(out); + writeCachedStringWithVersionCheck(out, name()); dataType().writeTo(out); field.writeTo(out); // We used to write the qualifier here. We can still do if needed in the future. @@ -145,6 +147,26 @@ public static FieldAttribute readFrom(StreamInput in) throws IOException { return ((PlanStreamInput) in).readAttributeWithCache(FieldAttribute::new); } + private void writeParentName(StreamOutput out) throws IOException { + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_FIELD_ATTRIBUTE_PARENT_SIMPLIFIED)) { + ((PlanStreamOutput) out).writeOptionalCachedString(parentName); + } else { + // Previous versions only used the parent field attribute to retrieve the parent's name, so we can use just any + // fake FieldAttribute here as long as the name is correct. + FieldAttribute fakeParent = parentName() == null ? null : new FieldAttribute(Source.EMPTY, parentName(), field()); + out.writeOptionalWriteable(fakeParent); + } + } + + private static String readParentName(StreamInput in) throws IOException { + if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_FIELD_ATTRIBUTE_PARENT_SIMPLIFIED)) { + return ((PlanStreamInput) in).readOptionalCachedString(); + } + + FieldAttribute parent = in.readOptionalWriteable(FieldAttribute::readFrom); + return parent == null ? null : parent.name(); + } + @Override public String getWriteableName() { return ENTRY.name; @@ -152,15 +174,22 @@ public String getWriteableName() { @Override protected NodeInfo info() { - return NodeInfo.create(this, FieldAttribute::new, parent, name(), dataType(), field, (String) null, nullable(), id(), synthetic()); - } - - public FieldAttribute parent() { - return parent; + return NodeInfo.create( + this, + FieldAttribute::new, + parentName, + name(), + dataType(), + field, + (String) null, + nullable(), + id(), + synthetic() + ); } - public String path() { - return path; + public String parentName() { + return parentName; } /** @@ -174,7 +203,7 @@ public String fieldName() { if ((synthetic() || name().startsWith(SYNTHETIC_ATTRIBUTE_NAME_PREFIX)) == false) { return name(); } - return Strings.hasText(path) ? path + "." + field.getName() : field.getName(); + return Strings.hasText(parentName) ? parentName + "." + field.getName() : field.getName(); } public EsField.Exact getExactInfo() { @@ -190,13 +219,13 @@ public FieldAttribute exactAttribute() { } private FieldAttribute innerField(EsField type) { - return new FieldAttribute(source(), this, name() + "." + type.getName(), type, nullable(), id(), synthetic()); + return new FieldAttribute(source(), name(), name() + "." + type.getName(), type, nullable(), id(), synthetic()); } @Override protected Attribute clone(Source source, String name, DataType type, Nullability nullability, NameId id, boolean synthetic) { // Ignore `type`, this must be the same as the field's type. - return new FieldAttribute(source, parent, name, field, nullability, id, synthetic); + return new FieldAttribute(source, parentName, name, field, nullability, id, synthetic); } @Override @@ -206,13 +235,13 @@ public Attribute withDataType(DataType type) { @Override public int hashCode() { - return Objects.hash(super.hashCode(), path, field); + return Objects.hash(super.hashCode(), parentName, field); } @Override public boolean equals(Object obj) { return super.equals(obj) - && Objects.equals(path, ((FieldAttribute) obj).path) + && Objects.equals(parentName, ((FieldAttribute) obj).parentName) && Objects.equals(field, ((FieldAttribute) obj).field); } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/MetadataAttribute.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/MetadataAttribute.java index 539c55ba341cf..3641812cd6cad 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/MetadataAttribute.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/MetadataAttribute.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.IgnoredFieldMapper; @@ -59,7 +60,7 @@ public MetadataAttribute( String name, DataType dataType, Nullability nullability, - NameId id, + @Nullable NameId id, boolean synthetic, boolean searchable ) { @@ -79,9 +80,9 @@ private MetadataAttribute( Source source, String name, DataType dataType, - String qualifier, + @Nullable String qualifier, Nullability nullability, - NameId id, + @Nullable NameId id, boolean synthetic, boolean searchable ) { diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/NamedExpression.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/NamedExpression.java index ba467910bed0d..3b018f09e5ebd 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/NamedExpression.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/NamedExpression.java @@ -8,6 +8,7 @@ import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.core.Nullable; import org.elasticsearch.xpack.esql.core.tree.Source; import java.util.ArrayList; @@ -32,11 +33,11 @@ public static List getNamedWriteables() { private final NameId id; private final boolean synthetic; - public NamedExpression(Source source, String name, List children, NameId id) { + public NamedExpression(Source source, String name, List children, @Nullable NameId id) { this(source, name, children, id, false); } - public NamedExpression(Source source, String name, List children, NameId id, boolean synthetic) { + public NamedExpression(Source source, String name, List children, @Nullable NameId id, boolean synthetic) { super(source, children); this.name = name; this.id = id == null ? new NameId() : id; diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/ReferenceAttribute.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/ReferenceAttribute.java index 504e1eae8d880..3626c5d26f235 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/ReferenceAttribute.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/ReferenceAttribute.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -31,7 +32,14 @@ public ReferenceAttribute(Source source, String name, DataType dataType) { this(source, name, dataType, Nullability.FALSE, null, false); } - public ReferenceAttribute(Source source, String name, DataType dataType, Nullability nullability, NameId id, boolean synthetic) { + public ReferenceAttribute( + Source source, + String name, + DataType dataType, + Nullability nullability, + @Nullable NameId id, + boolean synthetic + ) { super(source, name, dataType, nullability, id, synthetic); } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/TypedAttribute.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/TypedAttribute.java index 0350abef99992..f8a041110798c 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/TypedAttribute.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/TypedAttribute.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.xpack.esql.core.expression; +import org.elasticsearch.core.Nullable; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -15,7 +16,14 @@ public abstract class TypedAttribute extends Attribute { private final DataType dataType; - protected TypedAttribute(Source source, String name, DataType dataType, Nullability nullability, NameId id, boolean synthetic) { + protected TypedAttribute( + Source source, + String name, + DataType dataType, + Nullability nullability, + @Nullable NameId id, + boolean synthetic + ) { super(source, name, nullability, id, synthetic); this.dataType = dataType; } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/UnresolvedAttribute.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/UnresolvedAttribute.java index d8a35adcbffde..a971a15a23c86 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/UnresolvedAttribute.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/UnresolvedAttribute.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.core.expression; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; import org.elasticsearch.xpack.esql.core.capabilities.Unresolvable; import org.elasticsearch.xpack.esql.core.capabilities.UnresolvedException; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; @@ -33,7 +34,7 @@ public UnresolvedAttribute(Source source, String name, String unresolvedMessage) } @SuppressWarnings("this-escape") - public UnresolvedAttribute(Source source, String name, NameId id, String unresolvedMessage, Object resolutionMetadata) { + public UnresolvedAttribute(Source source, String name, @Nullable NameId id, String unresolvedMessage, Object resolutionMetadata) { super(source, name, id); this.customMessage = unresolvedMessage != null; this.unresolvedMsg = unresolvedMessage == null ? errorMessage(name(), null) : unresolvedMessage; diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java index cb1a7b2eb6fe0..12699ca3ee720 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java @@ -14,8 +14,6 @@ import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.mapper.TimeSeriesIdFieldMapper; import org.elasticsearch.xpack.esql.core.plugin.EsqlCorePlugin; -import org.elasticsearch.xpack.esql.core.util.PlanStreamInput; -import org.elasticsearch.xpack.esql.core.util.PlanStreamOutput; import java.io.IOException; import java.math.BigInteger; @@ -32,6 +30,8 @@ import static java.util.stream.Collectors.toMap; import static java.util.stream.Collectors.toUnmodifiableMap; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamInput.readCachedStringWithVersionCheck; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamOutput.writeCachedStringWithVersionCheck; public enum DataType { /** @@ -535,12 +535,12 @@ public DataType counter() { } public void writeTo(StreamOutput out) throws IOException { - ((PlanStreamOutput) out).writeCachedString(typeName); + writeCachedStringWithVersionCheck(out, typeName); } public static DataType readFrom(StreamInput in) throws IOException { // TODO: Use our normal enum serialization pattern - return readFrom(((PlanStreamInput) in).readCachedString()); + return readFrom(readCachedStringWithVersionCheck(in)); } /** diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DateEsField.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DateEsField.java index 7c4b98c5af84e..3a81ec2a6f17d 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DateEsField.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DateEsField.java @@ -8,12 +8,13 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.xpack.esql.core.util.PlanStreamInput; -import org.elasticsearch.xpack.esql.core.util.PlanStreamOutput; import java.io.IOException; import java.util.Map; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamInput.readCachedStringWithVersionCheck; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamOutput.writeCachedStringWithVersionCheck; + /** * Information about a field in an ES index with the {@code date} type */ @@ -28,12 +29,12 @@ private DateEsField(String name, DataType dataType, Map propert } protected DateEsField(StreamInput in) throws IOException { - this(((PlanStreamInput) in).readCachedString(), DataType.DATETIME, in.readImmutableMap(EsField::readFrom), in.readBoolean()); + this(readCachedStringWithVersionCheck(in), DataType.DATETIME, in.readImmutableMap(EsField::readFrom), in.readBoolean()); } @Override public void writeContent(StreamOutput out) throws IOException { - ((PlanStreamOutput) out).writeCachedString(getName()); + writeCachedStringWithVersionCheck(out, getName()); out.writeMap(getProperties(), (o, x) -> x.writeTo(out)); out.writeBoolean(isAggregatable()); } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/EsField.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/EsField.java index 6235176d82de6..47dadcbb11de2 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/EsField.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/EsField.java @@ -18,6 +18,9 @@ import java.util.Map; import java.util.Objects; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamInput.readCachedStringWithVersionCheck; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamOutput.writeCachedStringWithVersionCheck; + /** * Information about a field in an ES index. */ @@ -60,7 +63,7 @@ public EsField(String name, DataType esDataType, Map properties } public EsField(StreamInput in) throws IOException { - this.name = ((PlanStreamInput) in).readCachedString(); + this.name = readCachedStringWithVersionCheck(in); this.esDataType = readDataType(in); this.properties = in.readImmutableMap(EsField::readFrom); this.aggregatable = in.readBoolean(); @@ -68,7 +71,7 @@ public EsField(StreamInput in) throws IOException { } private DataType readDataType(StreamInput in) throws IOException { - String name = ((PlanStreamInput) in).readCachedString(); + String name = readCachedStringWithVersionCheck(in); if (in.getTransportVersion().before(TransportVersions.ESQL_NESTED_UNSUPPORTED) && name.equalsIgnoreCase("NESTED")) { /* * The "nested" data type existed in older versions of ESQL but was @@ -98,7 +101,7 @@ public void writeTo(StreamOutput out) throws IOException { * This needs to be overridden by subclasses for specific serialization */ public void writeContent(StreamOutput out) throws IOException { - ((PlanStreamOutput) out).writeCachedString(name); + writeCachedStringWithVersionCheck(out, name); esDataType.writeTo(out); out.writeMap(properties, (o, x) -> x.writeTo(out)); out.writeBoolean(aggregatable); diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/InvalidMappedField.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/InvalidMappedField.java index 40825af56ccfe..f83e4652ebebd 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/InvalidMappedField.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/InvalidMappedField.java @@ -10,8 +10,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.core.QlIllegalArgumentException; -import org.elasticsearch.xpack.esql.core.util.PlanStreamInput; -import org.elasticsearch.xpack.esql.core.util.PlanStreamOutput; import java.io.IOException; import java.util.Map; @@ -20,6 +18,9 @@ import java.util.TreeMap; import java.util.stream.Collectors; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamInput.readCachedStringWithVersionCheck; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamOutput.writeCachedStringWithVersionCheck; + /** * Representation of field mapped differently across indices. * Used during mapping discovery only. @@ -54,7 +55,7 @@ private InvalidMappedField(String name, String errorMessage, Map types() { @@ -63,7 +64,7 @@ public Set types() { @Override public void writeContent(StreamOutput out) throws IOException { - ((PlanStreamOutput) out).writeCachedString(getName()); + writeCachedStringWithVersionCheck(out, getName()); out.writeString(errorMessage); out.writeMap(getProperties(), (o, x) -> x.writeTo(out)); } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/KeywordEsField.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/KeywordEsField.java index 48995bafec451..8b88884a0ce17 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/KeywordEsField.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/KeywordEsField.java @@ -8,8 +8,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.xpack.esql.core.util.PlanStreamInput; -import org.elasticsearch.xpack.esql.core.util.PlanStreamOutput; import java.io.IOException; import java.util.Collections; @@ -17,6 +15,8 @@ import java.util.Objects; import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamInput.readCachedStringWithVersionCheck; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamOutput.writeCachedStringWithVersionCheck; /** * Information about a field in an ES index with the {@code keyword} type. @@ -61,7 +61,7 @@ protected KeywordEsField( public KeywordEsField(StreamInput in) throws IOException { this( - ((PlanStreamInput) in).readCachedString(), + readCachedStringWithVersionCheck(in), KEYWORD, in.readImmutableMap(EsField::readFrom), in.readBoolean(), @@ -73,7 +73,7 @@ public KeywordEsField(StreamInput in) throws IOException { @Override public void writeContent(StreamOutput out) throws IOException { - ((PlanStreamOutput) out).writeCachedString(getName()); + writeCachedStringWithVersionCheck(out, getName()); out.writeMap(getProperties(), (o, x) -> x.writeTo(out)); out.writeBoolean(isAggregatable()); out.writeInt(precision); diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/MultiTypeEsField.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/MultiTypeEsField.java index 522cb682c0943..0d7f9ee425d6a 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/MultiTypeEsField.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/MultiTypeEsField.java @@ -10,8 +10,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.util.PlanStreamInput; -import org.elasticsearch.xpack.esql.core.util.PlanStreamOutput; import java.io.IOException; import java.util.HashMap; @@ -19,6 +17,9 @@ import java.util.Objects; import java.util.Set; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamInput.readCachedStringWithVersionCheck; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamOutput.writeCachedStringWithVersionCheck; + /** * During IndexResolution it could occur that the same field is mapped to different types in different indices. * The class MultiTypeEfField.UnresolvedField holds that information and allows for later resolution of the field @@ -39,7 +40,7 @@ public MultiTypeEsField(String name, DataType dataType, boolean aggregatable, Ma protected MultiTypeEsField(StreamInput in) throws IOException { this( - ((PlanStreamInput) in).readCachedString(), + readCachedStringWithVersionCheck(in), DataType.readFrom(in), in.readBoolean(), in.readImmutableMap(i -> i.readNamedWriteable(Expression.class)) @@ -48,7 +49,7 @@ protected MultiTypeEsField(StreamInput in) throws IOException { @Override public void writeContent(StreamOutput out) throws IOException { - ((PlanStreamOutput) out).writeCachedString(getName()); + writeCachedStringWithVersionCheck(out, getName()); getDataType().writeTo(out); out.writeBoolean(isAggregatable()); out.writeMap(getIndexToConversionExpressions(), (o, v) -> out.writeNamedWriteable(v)); diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/TextEsField.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/TextEsField.java index c6c494ef289bb..ed0d32a7696eb 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/TextEsField.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/TextEsField.java @@ -10,8 +10,6 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Tuple; import org.elasticsearch.xpack.esql.core.QlIllegalArgumentException; -import org.elasticsearch.xpack.esql.core.util.PlanStreamInput; -import org.elasticsearch.xpack.esql.core.util.PlanStreamOutput; import java.io.IOException; import java.util.Map; @@ -19,6 +17,8 @@ import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; import static org.elasticsearch.xpack.esql.core.type.DataType.TEXT; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamInput.readCachedStringWithVersionCheck; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamOutput.writeCachedStringWithVersionCheck; /** * Information about a field in an es index with the {@code text} type. @@ -34,12 +34,12 @@ public TextEsField(String name, Map properties, boolean hasDocV } protected TextEsField(StreamInput in) throws IOException { - this(((PlanStreamInput) in).readCachedString(), in.readImmutableMap(EsField::readFrom), in.readBoolean(), in.readBoolean()); + this(readCachedStringWithVersionCheck(in), in.readImmutableMap(EsField::readFrom), in.readBoolean(), in.readBoolean()); } @Override public void writeContent(StreamOutput out) throws IOException { - ((PlanStreamOutput) out).writeCachedString(getName()); + writeCachedStringWithVersionCheck(out, getName()); out.writeMap(getProperties(), (o, x) -> x.writeTo(out)); out.writeBoolean(isAggregatable()); out.writeBoolean(isAlias()); diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/UnsupportedEsField.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/UnsupportedEsField.java index 980620cb98847..02ce741243c20 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/UnsupportedEsField.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/UnsupportedEsField.java @@ -8,14 +8,15 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.xpack.esql.core.util.PlanStreamInput; -import org.elasticsearch.xpack.esql.core.util.PlanStreamOutput; import java.io.IOException; import java.util.Map; import java.util.Objects; import java.util.TreeMap; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamInput.readCachedStringWithVersionCheck; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamOutput.writeCachedStringWithVersionCheck; + /** * Information about a field in an ES index that cannot be supported by ESQL. * All the subfields (properties) of an unsupported type are also be unsupported. @@ -37,8 +38,8 @@ public UnsupportedEsField(String name, String originalType, String inherited, Ma public UnsupportedEsField(StreamInput in) throws IOException { this( - ((PlanStreamInput) in).readCachedString(), - ((PlanStreamInput) in).readCachedString(), + readCachedStringWithVersionCheck(in), + readCachedStringWithVersionCheck(in), in.readOptionalString(), in.readImmutableMap(EsField::readFrom) ); @@ -46,8 +47,8 @@ public UnsupportedEsField(StreamInput in) throws IOException { @Override public void writeContent(StreamOutput out) throws IOException { - ((PlanStreamOutput) out).writeCachedString(getName()); - ((PlanStreamOutput) out).writeCachedString(getOriginalType()); + writeCachedStringWithVersionCheck(out, getName()); + writeCachedStringWithVersionCheck(out, getOriginalType()); out.writeOptionalString(getInherited()); out.writeMap(getProperties(), (o, x) -> x.writeTo(out)); } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/PlanStreamInput.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/PlanStreamInput.java index 826b0cbfa3498..e8ccae3429001 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/PlanStreamInput.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/PlanStreamInput.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.core.util; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.xpack.esql.core.expression.Attribute; @@ -49,4 +50,13 @@ public interface PlanStreamInput { A readEsFieldWithCache() throws IOException; String readCachedString() throws IOException; + + static String readCachedStringWithVersionCheck(StreamInput planStreamInput) throws IOException { + if (planStreamInput.getTransportVersion().before(TransportVersions.ESQL_CACHED_STRING_SERIALIZATION)) { + return planStreamInput.readString(); + } + return ((PlanStreamInput) planStreamInput).readCachedString(); + } + + String readOptionalCachedString() throws IOException; } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/PlanStreamOutput.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/PlanStreamOutput.java index e4797411c3796..fb4af33d2fd60 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/PlanStreamOutput.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/PlanStreamOutput.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.esql.core.util; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.type.EsField; @@ -33,4 +35,14 @@ public interface PlanStreamOutput { boolean writeEsFieldCacheHeader(EsField field) throws IOException; void writeCachedString(String field) throws IOException; + + static void writeCachedStringWithVersionCheck(StreamOutput planStreamOutput, String string) throws IOException { + if (planStreamOutput.getTransportVersion().before(TransportVersions.ESQL_CACHED_STRING_SERIALIZATION)) { + planStreamOutput.writeString(string); + } else { + ((PlanStreamOutput) planStreamOutput).writeCachedString(string); + } + } + + void writeOptionalCachedString(String str) throws IOException; } diff --git a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/FieldAttributeTestUtils.java b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/FieldAttributeTestUtils.java index 1662b7f973c9d..c7e5056ed0267 100644 --- a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/FieldAttributeTestUtils.java +++ b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/FieldAttributeTestUtils.java @@ -12,9 +12,9 @@ import org.elasticsearch.xpack.esql.core.type.EsField; public class FieldAttributeTestUtils { - public static final FieldAttribute newFieldAttributeWithType( + public static FieldAttribute newFieldAttributeWithType( Source source, - FieldAttribute parent, + String parentName, String name, DataType type, EsField field, @@ -22,6 +22,6 @@ public static final FieldAttribute newFieldAttributeWithType( NameId id, boolean synthetic ) { - return new FieldAttribute(source, parent, name, type, field, nullability, id, synthetic); + return new FieldAttribute(source, parentName, name, type, field, nullability, id, synthetic); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java index fe7b945a9b3c1..b18f58b0a43cb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java @@ -228,13 +228,13 @@ public static List mappingAsAttributes(Source source, Map list, Source source, FieldAttribute parent, Map mapping) { + private static void mappingAsAttributes(List list, Source source, String parentName, Map mapping) { for (Map.Entry entry : mapping.entrySet()) { String name = entry.getKey(); EsField t = entry.getValue(); if (t != null) { - name = parent == null ? name : parent.fieldName() + "." + name; + name = parentName == null ? name : parentName + "." + name; var fieldProperties = t.getProperties(); var type = t.getDataType().widenSmallNumeric(); // due to a bug also copy the field since the Attribute hierarchy extracts the data type @@ -245,14 +245,14 @@ private static void mappingAsAttributes(List list, Source source, Fie FieldAttribute attribute = t instanceof UnsupportedEsField uef ? new UnsupportedAttribute(source, name, uef) - : new FieldAttribute(source, parent, name, t); + : new FieldAttribute(source, parentName, name, t); // primitive branch if (DataType.isPrimitive(type)) { list.add(attribute); } // allow compound object even if they are unknown if (fieldProperties.isEmpty() == false) { - mappingAsAttributes(list, source, attribute, fieldProperties); + mappingAsAttributes(list, source, attribute.name(), fieldProperties); } } } @@ -1252,7 +1252,7 @@ private Expression createIfDoesNotAlreadyExist( // NOTE: The name has to start with $$ to not break bwc with 8.15 - in that version, this is how we had to mark this as // synthetic to work around a bug. String unionTypedFieldName = Attribute.rawTemporaryName(fa.name(), "converted_to", resolvedField.getDataType().typeName()); - FieldAttribute unionFieldAttribute = new FieldAttribute(fa.source(), fa.parent(), unionTypedFieldName, resolvedField, true); + FieldAttribute unionFieldAttribute = new FieldAttribute(fa.source(), fa.parentName(), unionTypedFieldName, resolvedField, true); int existingIndex = unionFieldAttributes.indexOf(unionFieldAttribute); if (existingIndex >= 0) { // Do not generate multiple name/type combinations with different IDs @@ -1281,7 +1281,7 @@ private Expression typeSpecificConvert(AbstractConvertFunction convert, Source s FieldAttribute originalFieldAttr = (FieldAttribute) convert.field(); FieldAttribute resolvedAttr = new FieldAttribute( source, - originalFieldAttr.parent(), + originalFieldAttr.parentName(), originalFieldAttr.name(), field, originalFieldAttr.nullable(), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/UnsupportedAttribute.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/UnsupportedAttribute.java index 2c709de7717ce..d372eddb961ae 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/UnsupportedAttribute.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/UnsupportedAttribute.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; import org.elasticsearch.xpack.esql.core.capabilities.Unresolvable; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expression; @@ -29,6 +30,9 @@ import java.io.IOException; import java.util.Objects; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamInput.readCachedStringWithVersionCheck; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamOutput.writeCachedStringWithVersionCheck; + /** * Unsupported attribute meaning an attribute that has been found yet cannot be used (hence why UnresolvedAttribute * cannot be used) expect in special conditions (currently only in projections to allow it to flow through @@ -63,11 +67,11 @@ public UnsupportedAttribute(Source source, String name, UnsupportedEsField field this(source, name, field, null); } - public UnsupportedAttribute(Source source, String name, UnsupportedEsField field, String customMessage) { + public UnsupportedAttribute(Source source, String name, UnsupportedEsField field, @Nullable String customMessage) { this(source, name, field, customMessage, null); } - public UnsupportedAttribute(Source source, String name, UnsupportedEsField field, String customMessage, NameId id) { + public UnsupportedAttribute(Source source, String name, UnsupportedEsField field, @Nullable String customMessage, @Nullable NameId id) { super(source, null, name, field, Nullability.TRUE, id, false); this.hasCustomMessage = customMessage != null; this.message = customMessage == null ? errorMessage(name(), field) : customMessage; @@ -76,7 +80,7 @@ public UnsupportedAttribute(Source source, String name, UnsupportedEsField field private UnsupportedAttribute(StreamInput in) throws IOException { this( Source.readFrom((PlanStreamInput) in), - ((PlanStreamInput) in).readCachedString(), + readCachedStringWithVersionCheck(in), in.getTransportVersion().onOrAfter(TransportVersions.ESQL_ES_FIELD_CACHED_SERIALIZATION) || in.getTransportVersion().isPatchFrom(TransportVersions.V_8_15_2) ? EsField.readFrom(in) : new UnsupportedEsField(in), in.readOptionalString(), @@ -88,7 +92,7 @@ private UnsupportedAttribute(StreamInput in) throws IOException { public void writeTo(StreamOutput out) throws IOException { if (((PlanStreamOutput) out).writeAttributeCacheHeader(this)) { Source.EMPTY.writeTo(out); - ((PlanStreamOutput) out).writeCachedString(name()); + writeCachedStringWithVersionCheck(out, name()); if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_ES_FIELD_CACHED_SERIALIZATION) || out.getTransportVersion().isPatchFrom(TransportVersions.V_8_15_2)) { field().writeTo(out); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java index 9003cbec12d1e..1e1cc3b86a9d5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java @@ -37,6 +37,8 @@ import java.util.Map; import java.util.function.LongFunction; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamInput.readCachedStringWithVersionCheck; + /** * A customized stream input used to deserialize ESQL physical plan fragments. Complements stream * input with methods that read plan nodes, Attributes, Expressions, etc. @@ -224,7 +226,7 @@ public A readEsFieldWithCache() throws IOException { // it's safe to cast to int, since the max value for this is {@link PlanStreamOutput#MAX_SERIALIZED_ATTRIBUTES} int cacheId = Math.toIntExact(readZLong()); if (cacheId < 0) { - String className = readCachedString(); + String className = readCachedStringWithVersionCheck(this); Writeable.Reader reader = EsField.getReader(className); cacheId = -1 - cacheId; EsField result = reader.read(this); @@ -234,7 +236,7 @@ public A readEsFieldWithCache() throws IOException { return (A) esFieldFromCache(cacheId); } } else { - String className = readCachedString(); + String className = readCachedStringWithVersionCheck(this); Writeable.Reader reader = EsField.getReader(className); return (A) reader.read(this); } @@ -245,9 +247,6 @@ public A readEsFieldWithCache() throws IOException { */ @Override public String readCachedString() throws IOException { - if (getTransportVersion().before(TransportVersions.ESQL_CACHED_STRING_SERIALIZATION)) { - return readString(); - } int cacheId = Math.toIntExact(readZLong()); if (cacheId < 0) { String string = readString(); @@ -259,6 +258,11 @@ public String readCachedString() throws IOException { } } + @Override + public String readOptionalCachedString() throws IOException { + return readBoolean() ? readCachedString() : null; + } + private EsField esFieldFromCache(int id) throws IOException { EsField field = esFieldsCache[id]; if (field == null) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java index b633b10122eb3..615c4266620c7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java @@ -30,6 +30,8 @@ import java.util.IdentityHashMap; import java.util.Map; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamOutput.writeCachedStringWithVersionCheck; + /** * A customized stream output used to serialize ESQL physical plan fragments. Complements stream * output with methods that write plan nodes, Attributes, Expressions, etc. @@ -195,7 +197,7 @@ public boolean writeEsFieldCacheHeader(EsField field) throws IOException { cacheId = cacheEsField(field); writeZLong(-1 - cacheId); } - writeCachedString(field.getWriteableName()); + writeCachedStringWithVersionCheck(this, field.getWriteableName()); return true; } @@ -207,10 +209,6 @@ public boolean writeEsFieldCacheHeader(EsField field) throws IOException { */ @Override public void writeCachedString(String string) throws IOException { - if (getTransportVersion().before(TransportVersions.ESQL_CACHED_STRING_SERIALIZATION)) { - writeString(string); - return; - } Integer cacheId = stringCache.get(string); if (cacheId != null) { writeZLong(cacheId); @@ -226,6 +224,16 @@ public void writeCachedString(String string) throws IOException { writeString(string); } + @Override + public void writeOptionalCachedString(String str) throws IOException { + if (str == null) { + writeBoolean(false); + } else { + writeBoolean(true); + writeCachedString(str); + } + } + private Integer esFieldIdFromCache(EsField field) { return cachedEsFields.get(field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsRelation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsRelation.java index 951fc7ad1cf29..eb72009638396 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsRelation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsRelation.java @@ -112,7 +112,12 @@ private static List flatten(Source source, Map mappi EsField t = entry.getValue(); if (t != null) { - FieldAttribute f = new FieldAttribute(source, parent, parent != null ? parent.name() + "." + name : name, t); + FieldAttribute f = new FieldAttribute( + source, + parent != null ? parent.name() : null, + parent != null ? parent.name() + "." + name : name, + t + ); list.add(f); // object or nested if (t.getProperties().isEmpty() == false) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/FieldAttributeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/FieldAttributeTests.java index e8f0333791844..6b2040f58f84c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/FieldAttributeTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/FieldAttributeTests.java @@ -20,7 +20,7 @@ public class FieldAttributeTests extends AbstractAttributeTestCase { public static FieldAttribute createFieldAttribute(int maxDepth, boolean onlyRepresentable) { Source source = Source.EMPTY; - FieldAttribute parent = maxDepth == 0 || randomBoolean() ? null : createFieldAttribute(maxDepth - 1, onlyRepresentable); + String parentName = maxDepth == 0 || randomBoolean() ? null : randomAlphaOfLength(3); String name = randomAlphaOfLength(5); DataType type = onlyRepresentable ? randomValueOtherThanMany(t -> false == DataType.isRepresentable(t), () -> randomFrom(DataType.types())) @@ -28,7 +28,7 @@ public static FieldAttribute createFieldAttribute(int maxDepth, boolean onlyRepr EsField field = AbstractEsFieldTypeTests.randomAnyEsField(maxDepth); Nullability nullability = randomFrom(Nullability.values()); boolean synthetic = randomBoolean(); - return newFieldAttributeWithType(source, parent, name, type, field, nullability, new NameId(), synthetic); + return newFieldAttributeWithType(source, parentName, name, type, field, nullability, new NameId(), synthetic); } @Override @@ -39,20 +39,20 @@ protected FieldAttribute create() { @Override protected FieldAttribute mutate(FieldAttribute instance) { Source source = instance.source(); - FieldAttribute parent = instance.parent(); + String parentName = instance.parentName(); String name = instance.name(); DataType type = instance.dataType(); EsField field = instance.field(); Nullability nullability = instance.nullable(); boolean synthetic = instance.synthetic(); switch (between(0, 5)) { - case 0 -> parent = randomValueOtherThan(parent, () -> randomBoolean() ? null : createFieldAttribute(2, false)); + case 0 -> parentName = randomValueOtherThan(parentName, () -> randomBoolean() ? null : randomAlphaOfLength(2)); case 1 -> name = randomAlphaOfLength(name.length() + 1); case 2 -> type = randomValueOtherThan(type, () -> randomFrom(DataType.types())); case 3 -> field = randomValueOtherThan(field, () -> AbstractEsFieldTypeTests.randomAnyEsField(3)); case 4 -> nullability = randomValueOtherThan(nullability, () -> randomFrom(Nullability.values())); case 5 -> synthetic = false == synthetic; } - return newFieldAttributeWithType(source, parent, name, type, field, nullability, new NameId(), synthetic); + return newFieldAttributeWithType(source, parentName, name, type, field, nullability, new NameId(), synthetic); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/index/EsIndexSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/index/EsIndexSerializationTests.java index 687b83370f571..82dd5a88ffaf1 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/index/EsIndexSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/index/EsIndexSerializationTests.java @@ -182,4 +182,51 @@ private void testManyTypeConflicts(boolean withParent, ByteSizeValue expected) t assertThat(ByteSizeValue.ofBytes(out.bytes().length()), byteSizeEquals(expected)); } } + + public static EsIndex deeplyNestedIndex(int depth, int childrenPerLevel) { + String rootFieldName = "root"; + Map fields = Map.of(rootFieldName, fieldWithRecursiveChildren(depth, childrenPerLevel, rootFieldName)); + + return new EsIndex("deeply-nested", fields); + } + + private static EsField fieldWithRecursiveChildren(int depth, int childrenPerLevel, String name) { + assert depth >= 1; + + Map children = new TreeMap<>(); + String childName; + if (depth == 1) { + for (int i = 0; i < childrenPerLevel; i++) { + childName = "leaf" + i; + children.put(childName, new EsField(childName, DataType.KEYWORD, Map.of(), true)); + } + } else { + for (int i = 0; i < childrenPerLevel; i++) { + childName = "level" + depth + "child" + i; + children.put(childName, fieldWithRecursiveChildren(depth - 1, childrenPerLevel, childName)); + } + } + + return new EsField(name, DataType.OBJECT, children, false); + } + + /** + * Test de-/serialization and size on the wire for an index that has multiple levels of children: + * A single root with 9 children, each of which has 9 children etc. 6 levels deep. + */ + public void testDeeplyNestedFields() throws IOException { + ByteSizeValue expectedSize = ByteSizeValue.ofBytes(9425494); + /* + * History: + * 9425494b - string serialization #112929 + */ + + int depth = 6; + int childrenPerLevel = 9; + + try (BytesStreamOutput out = new BytesStreamOutput(); var pso = new PlanStreamOutput(out, null)) { + deeplyNestedIndex(depth, childrenPerLevel).writeTo(pso); + assertThat(ByteSizeValue.ofBytes(out.bytes().length()), byteSizeEquals(expectedSize)); + } + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutputTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutputTests.java index 33252b9dbaaa3..d3e1710a715af 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutputTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutputTests.java @@ -20,7 +20,6 @@ import org.elasticsearch.xpack.esql.Column; import org.elasticsearch.xpack.esql.core.InvalidArgumentException; import org.elasticsearch.xpack.esql.core.expression.Attribute; -import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.NameId; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.EsField; @@ -44,7 +43,6 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.not; -import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.sameInstance; public class PlanStreamOutputTests extends ESTestCase { @@ -118,26 +116,13 @@ public void testWriteAttributeMultipleTimes() throws IOException { for (int i = 0; i < occurrences; i++) { planStream.writeNamedWriteable(attribute); } - int depth = 0; - Attribute parent = attribute; - while (parent != null) { - depth++; - parent = parent instanceof FieldAttribute f ? f.parent() : null; - } - assertThat(planStream.cachedAttributes.size(), is(depth)); + assertThat(planStream.cachedAttributes.size(), is(1)); try (PlanStreamInput in = new PlanStreamInput(out.bytes().streamInput(), REGISTRY, configuration)) { Attribute first = in.readNamedWriteable(Attribute.class); for (int i = 1; i < occurrences; i++) { Attribute next = in.readNamedWriteable(Attribute.class); assertThat(first, sameInstance(next)); } - for (int i = 0; i < depth; i++) { - assertThat(first, equalTo(attribute)); - first = first instanceof FieldAttribute f ? f.parent() : null; - attribute = attribute instanceof FieldAttribute f ? f.parent() : null; - } - assertThat(first, is(nullValue())); - assertThat(attribute, is(nullValue())); } } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/ExchangeSinkExecSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/ExchangeSinkExecSerializationTests.java index 1f52795dbacd7..5989c0de6b61d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/ExchangeSinkExecSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/ExchangeSinkExecSerializationTests.java @@ -80,20 +80,67 @@ public void testManyTypeConflicts() throws IOException { * See {@link #testManyTypeConflicts(boolean, ByteSizeValue)} for more. */ public void testManyTypeConflictsWithParent() throws IOException { - testManyTypeConflicts(true, ByteSizeValue.ofBytes(2774214)); + testManyTypeConflicts(true, ByteSizeValue.ofBytes(2774192)); /* * History: * 2 gb+ - start * 43.3mb - Cache attribute subclasses #111447 * 5.6mb - shorten error messages for UnsupportedAttributes #111973 * 3.1mb - cache EsFields #112008 - * 2.6mb - string serialization #112929 + * 2774214b - string serialization #112929 + * 2774192b - remove field attribute #112881 */ } + private void testManyTypeConflicts(boolean withParent, ByteSizeValue expected) throws IOException { + EsIndex index = EsIndexSerializationTests.indexWithManyConflicts(withParent); + testSerializePlanWithIndex(index, expected); + } + + /** + * Test the size of serializing a plan like + * FROM index | LIMIT 10 + * with a single root field that has many children, grandchildren etc. + */ + public void testDeeplyNestedFields() throws IOException { + ByteSizeValue expected = ByteSizeValue.ofBytes(47252411); + /* + * History: + * 48223371b - string serialization #112929 + * 47252411b - remove field attribute #112881 + */ + + int depth = 6; + int childrenPerLevel = 8; + + EsIndex index = EsIndexSerializationTests.deeplyNestedIndex(depth, childrenPerLevel); + testSerializePlanWithIndex(index, expected); + } + /** - * Test the size of serializing a plan with many conflicts. Callers of - * this method intentionally use a very precise size for the serialized + * Test the size of serializing a plan like + * FROM index | LIMIT 10 | KEEP one_single_field + * with a single root field that has many children, grandchildren etc. + */ + public void testDeeplyNestedFieldsKeepOnlyOne() throws IOException { + ByteSizeValue expected = ByteSizeValue.ofBytes(9425806); + /* + * History: + * 9426058b - string serialization #112929 + * 9425806b - remove field attribute #112881 + */ + + int depth = 6; + int childrenPerLevel = 9; + + EsIndex index = EsIndexSerializationTests.deeplyNestedIndex(depth, childrenPerLevel); + testSerializePlanWithIndex(index, expected, false); + } + + /** + * Test the size of serializing the physical plan that will be sent to a data node. + * The plan corresponds to `FROM index | LIMIT 10`. + * Callers of this method intentionally use a very precise size for the serialized * data so a programmer making changes has to think when this size changes. *

    * In general, shrinking the over the wire size is great and the precise @@ -108,10 +155,14 @@ public void testManyTypeConflictsWithParent() throws IOException { * ESQL impossible to use at all for big mappings with many conflicts. *

    */ - private void testManyTypeConflicts(boolean withParent, ByteSizeValue expected) throws IOException { - EsIndex index = EsIndexSerializationTests.indexWithManyConflicts(withParent); - List attributes = Analyzer.mappingAsAttributes(randomSource(), index.mapping()); - EsRelation relation = new EsRelation(randomSource(), index, attributes, IndexMode.STANDARD); + private void testSerializePlanWithIndex(EsIndex index, ByteSizeValue expected) throws IOException { + testSerializePlanWithIndex(index, expected, true); + } + + private void testSerializePlanWithIndex(EsIndex index, ByteSizeValue expected, boolean keepAllFields) throws IOException { + List allAttributes = Analyzer.mappingAsAttributes(randomSource(), index.mapping()); + List keepAttributes = keepAllFields ? allAttributes : List.of(allAttributes.get(0)); + EsRelation relation = new EsRelation(randomSource(), index, keepAttributes, IndexMode.STANDARD); Limit limit = new Limit(randomSource(), new Literal(randomSource(), 10, DataType.INTEGER), relation); Project project = new Project(randomSource(), limit, limit.output()); FragmentExec fragmentExec = new FragmentExec(project); From cfdc65482b24c69c1b7efad59cb0efac96947beb Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Thu, 17 Oct 2024 15:41:56 +0300 Subject: [PATCH 183/449] Fix allowed warnings (#114991) Fixes #114961 --- .../test/30_logsdb_default_mapping.yml | 32 +++++++++---------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/30_logsdb_default_mapping.yml b/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/30_logsdb_default_mapping.yml index 3f2bca2e4bcd9..2f320c2cad966 100644 --- a/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/30_logsdb_default_mapping.yml +++ b/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/30_logsdb_default_mapping.yml @@ -291,7 +291,7 @@ create logsdb data stream with custom sorting without host.name: - do: allowed_warnings: - - "index template [logs-template] has index patterns [logs-*-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logs-template] will take precedence during new index creation" + - "index template [logs-template] has index patterns [logs-http-prod] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logs-template] will take precedence during new index creation" indices.put_index_template: name: logs-template body: @@ -341,7 +341,7 @@ create logsdb data stream with custom sorting and host object: - do: allowed_warnings: - - "index template [logs-template] has index patterns [logs-*-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logs-template] will take precedence during new index creation" + - "index template [logs-template] has index patterns [logs-nginx-prod] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logs-template] will take precedence during new index creation" indices.put_index_template: name: logs-template body: @@ -400,7 +400,7 @@ create logsdb data stream with custom sorting and dynamically mapped host.name: - do: allowed_warnings: - - "index template [logs-template] has index patterns [logs-*-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logs-template] will take precedence during new index creation" + - "index template [logs-template] has index patterns [logs-kafka-qa] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logs-template] will take precedence during new index creation" indices.put_index_template: name: logs-template body: @@ -465,7 +465,7 @@ create logsdb data stream with custom sorting and host.name object: - do: allowed_warnings: - - "index template [logs-template] has index patterns [logs-*-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logs-template] will take precedence during new index creation" + - "index template [logs-template] has index patterns [logs-nginx-qa] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logs-template] will take precedence during new index creation" indices.put_index_template: name: logs-template body: @@ -521,7 +521,7 @@ create logsdb data stream with default sorting on malformed host.name: - do: allowed_warnings: - - "index template [logs-template] has index patterns [logs-*-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logs-template] will take precedence during new index creation" + - "index template [logs-template] has index patterns [logs-win-prod] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logs-template] will take precedence during new index creation" indices.put_index_template: name: logs-template body: @@ -585,7 +585,7 @@ create logsdb data stream with custom sorting and host.name date field: - do: allowed_warnings: - - "index template [logs-template] has index patterns [logs-*-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logs-template] will take precedence during new index creation" + - "index template [logs-template] has index patterns [logs-http-prod] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logs-template] will take precedence during new index creation" indices.put_index_template: name: logs-template body: @@ -638,7 +638,7 @@ create logsdb data stream with custom sorting and missing host.name field mappin - do: allowed_warnings: - - "index template [logs-template] has index patterns [logs-*-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logs-template] will take precedence during new index creation" + - "index template [logs-template] has index patterns [logs-http-qa] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logs-template] will take precedence during new index creation" indices.put_index_template: name: logs-template body: @@ -690,7 +690,7 @@ create logsdb data stream with custom sorting and host.name field without doc va - do: allowed_warnings: - - "index template [logs-template] has index patterns [logs-*-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logs-template] will take precedence during new index creation" + - "index template [logs-template] has index patterns [logs-http-dev] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logs-template] will take precedence during new index creation" indices.put_index_template: name: logs-template body: @@ -732,7 +732,7 @@ create logsdb data stream with incompatible ignore_above on host.name: - do: allowed_warnings: - - "index template [logsdb-index-template-ignore-above] has index patterns [logsdb-ignore-above] matching patterns from existing older templates [global]" + - "index template [logsdb-index-template-ignore-above] has index patterns [logsdb-ignore-above] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logsdb-index-template-ignore-above] will take precedence during new index creation" indices.put_index_template: name: logsdb-index-template-ignore-above body: @@ -780,7 +780,7 @@ create logsdb data stream with no sorting and host.name as text: - do: allowed_warnings: - - "index template [logsdb-index-template-non-keyword] has index patterns [logsdb-non-keyword] matching patterns from existing older templates [global]" + - "index template [logsdb-index-template-non-keyword] has index patterns [logsdb-non-keyword] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logsdb-index-template-non-keyword] will take precedence during new index creation" indices.put_index_template: name: logsdb-index-template-non-keyword body: @@ -814,7 +814,7 @@ create logsdb data stream without index sorting and ignore_above on host.name: - do: allowed_warnings: - - "index template [logsdb-index-template-ignore-above-override] has index patterns [logsdb-ignore-above-override] matching patterns from existing older templates [global]" + - "index template [logsdb-index-template-ignore-above-override] has index patterns [logsdb-ignore-above-override] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logsdb-index-template-ignore-above-override] will take precedence during new index creation" indices.put_index_template: name: logsdb-index-template-ignore-above-override body: @@ -860,7 +860,7 @@ create logsdb data stream with host.name as alias and sorting on it: - do: allowed_warnings: - - "index template [logsdb-index-template-alias] has index patterns [logsdb-alias] matching patterns from existing older templates [global]" + - "index template [logsdb-index-template-alias] has index patterns [logsdb-alias] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logsdb-index-template-alias] will take precedence during new index creation" indices.put_index_template: name: logsdb-index-template-alias body: @@ -898,7 +898,7 @@ create logsdb data stream with multi-fields on host.name: - do: allowed_warnings: - - "index template [logsdb-index-template-multi-fields] has index patterns [logsdb-multi-fields] matching patterns from existing older templates [global]" + - "index template [logsdb-index-template-multi-fields] has index patterns [logsdb-multi-fields] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logsdb-index-template-multi-fields] will take precedence during new index creation" indices.put_index_template: name: logsdb-index-template-multi-fields body: @@ -945,7 +945,7 @@ create logsdb data stream with multi-fields on host.name and no sorting: - do: allowed_warnings: - - "index template [ logsdb-no-sort-multi-fields-template ] has index patterns [logsdb-no-sort-multi-fields] matching patterns from existing older templates [global]" + - "index template [logsdb-no-sort-multi-fields-template] has index patterns [logsdb-no-sort-multi-fields] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logsdb-no-sort-multi-fields-template] will take precedence during new index creation" indices.put_index_template: name: logsdb-no-sort-multi-fields-template body: @@ -980,7 +980,7 @@ create logsdb data stream with custom empty sorting: - do: allowed_warnings: - - "index template [logs-template] has index patterns [logs-*-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logs-template] will take precedence during new index creation" + - "index template [logs-template] has index patterns [logs-http-empty] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logs-template] will take precedence during new index creation" indices.put_index_template: name: logs-template body: @@ -1027,7 +1027,7 @@ create logsdb data stream with custom sorting on timestamp: - do: allowed_warnings: - - "index template [logs-template] has index patterns [logs-*-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logs-template] will take precedence during new index creation" + - "index template [logs-template] has index patterns [logs-http-dev] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logs-template] will take precedence during new index creation" indices.put_index_template: name: logs-template body: From 17ecb66a0653bc561f6e44a3e688942e304664d5 Mon Sep 17 00:00:00 2001 From: Alexander Spies Date: Thu, 17 Oct 2024 14:59:34 +0200 Subject: [PATCH 184/449] Revert "ESQL: Remove parent from FieldAttribute (#112881)" (#115006) This reverts commit caa16b4de0f18dd0e2f8725fe029dba5be6815a2. --- docs/changelog/112881.yaml | 5 - .../org/elasticsearch/TransportVersions.java | 1 - .../xpack/esql/core/expression/Alias.java | 7 +- .../xpack/esql/core/expression/Attribute.java | 7 +- .../esql/core/expression/FieldAttribute.java | 97 +++++++------------ .../core/expression/MetadataAttribute.java | 7 +- .../esql/core/expression/NamedExpression.java | 5 +- .../core/expression/ReferenceAttribute.java | 10 +- .../esql/core/expression/TypedAttribute.java | 10 +- .../core/expression/UnresolvedAttribute.java | 3 +- .../xpack/esql/core/type/DataType.java | 8 +- .../xpack/esql/core/type/DateEsField.java | 9 +- .../xpack/esql/core/type/EsField.java | 9 +- .../esql/core/type/InvalidMappedField.java | 9 +- .../xpack/esql/core/type/KeywordEsField.java | 8 +- .../esql/core/type/MultiTypeEsField.java | 9 +- .../xpack/esql/core/type/TextEsField.java | 8 +- .../esql/core/type/UnsupportedEsField.java | 13 ++- .../xpack/esql/core/util/PlanStreamInput.java | 10 -- .../esql/core/util/PlanStreamOutput.java | 12 --- .../expression/FieldAttributeTestUtils.java | 6 +- .../xpack/esql/analysis/Analyzer.java | 12 +-- .../function/UnsupportedAttribute.java | 12 +-- .../xpack/esql/io/stream/PlanStreamInput.java | 14 +-- .../esql/io/stream/PlanStreamOutput.java | 18 +--- .../xpack/esql/plan/logical/EsRelation.java | 7 +- .../function/FieldAttributeTests.java | 10 +- .../esql/index/EsIndexSerializationTests.java | 47 --------- .../esql/io/stream/PlanStreamOutputTests.java | 17 +++- .../ExchangeSinkExecSerializationTests.java | 67 ++----------- 30 files changed, 134 insertions(+), 323 deletions(-) delete mode 100644 docs/changelog/112881.yaml diff --git a/docs/changelog/112881.yaml b/docs/changelog/112881.yaml deleted file mode 100644 index a8a0d542f8201..0000000000000 --- a/docs/changelog/112881.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 112881 -summary: "ESQL: Remove parent from `FieldAttribute`" -area: ES|QL -type: enhancement -issues: [] diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index bf61752a1d771..dcf6f7aebdc65 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -175,7 +175,6 @@ static TransportVersion def(int id) { public static final TransportVersion ML_INFERENCE_ATTACH_TO_EXISTSING_DEPLOYMENT = def(8_771_00_0); public static final TransportVersion CONVERT_FAILURE_STORE_OPTIONS_TO_SELECTOR_OPTIONS_INTERNALLY = def(8_772_00_0); public static final TransportVersion REMOVE_MIN_COMPATIBLE_SHARD_NODE = def(8_773_00_0); - public static final TransportVersion ESQL_FIELD_ATTRIBUTE_PARENT_SIMPLIFIED = def(8_774_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Alias.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Alias.java index 1f7d03ba9d905..e33f9b1c20527 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Alias.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Alias.java @@ -9,7 +9,6 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.core.Nullable; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -43,11 +42,11 @@ public Alias(Source source, String name, Expression child) { this(source, name, child, null); } - public Alias(Source source, String name, Expression child, @Nullable NameId id) { + public Alias(Source source, String name, Expression child, NameId id) { this(source, name, child, id, false); } - public Alias(Source source, String name, Expression child, @Nullable NameId id, boolean synthetic) { + public Alias(Source source, String name, Expression child, NameId id, boolean synthetic) { super(source, name, singletonList(child), id, synthetic); this.child = child; } @@ -56,7 +55,7 @@ public Alias(Source source, String name, Expression child, @Nullable NameId id, /** * Old constructor from when this had a qualifier string. Still needed to not break serialization. */ - private Alias(Source source, String name, String qualifier, Expression child, @Nullable NameId id, boolean synthetic) { + private Alias(Source source, String name, String qualifier, Expression child, NameId id, boolean synthetic) { this(source, name, child, id, synthetic); } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Attribute.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Attribute.java index 45f42a754910d..05c414298fd33 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Attribute.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Attribute.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.esql.core.expression; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.core.Nullable; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -42,15 +41,15 @@ public static List getNamedWriteables() { // can the attr be null - typically used in JOINs private final Nullability nullability; - public Attribute(Source source, String name, @Nullable NameId id) { + public Attribute(Source source, String name, NameId id) { this(source, name, Nullability.TRUE, id); } - public Attribute(Source source, String name, Nullability nullability, @Nullable NameId id) { + public Attribute(Source source, String name, Nullability nullability, NameId id) { this(source, name, nullability, id, false); } - public Attribute(Source source, String name, Nullability nullability, @Nullable NameId id, boolean synthetic) { + public Attribute(Source source, String name, Nullability nullability, NameId id, boolean synthetic) { super(source, name, emptyList(), id, synthetic); this.nullability = nullability; } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/FieldAttribute.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/FieldAttribute.java index 4076acdb7e7b8..767d2f45f90e4 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/FieldAttribute.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/FieldAttribute.java @@ -6,25 +6,21 @@ */ package org.elasticsearch.xpack.esql.core.expression; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.core.Nullable; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.core.util.PlanStreamInput; import org.elasticsearch.xpack.esql.core.util.PlanStreamOutput; +import org.elasticsearch.xpack.esql.core.util.StringUtils; import java.io.IOException; import java.util.Objects; -import static org.elasticsearch.xpack.esql.core.util.PlanStreamInput.readCachedStringWithVersionCheck; -import static org.elasticsearch.xpack.esql.core.util.PlanStreamOutput.writeCachedStringWithVersionCheck; - /** * Attribute for an ES field. * To differentiate between the different type of fields this class offers: @@ -41,31 +37,32 @@ public class FieldAttribute extends TypedAttribute { FieldAttribute::readFrom ); - private final String parentName; + private final FieldAttribute parent; + private final String path; private final EsField field; public FieldAttribute(Source source, String name, EsField field) { this(source, null, name, field); } - public FieldAttribute(Source source, @Nullable String parentName, String name, EsField field) { - this(source, parentName, name, field, Nullability.TRUE, null, false); + public FieldAttribute(Source source, FieldAttribute parent, String name, EsField field) { + this(source, parent, name, field, Nullability.TRUE, null, false); } - public FieldAttribute(Source source, @Nullable String parentName, String name, EsField field, boolean synthetic) { - this(source, parentName, name, field, Nullability.TRUE, null, synthetic); + public FieldAttribute(Source source, FieldAttribute parent, String name, EsField field, boolean synthetic) { + this(source, parent, name, field, Nullability.TRUE, null, synthetic); } public FieldAttribute( Source source, - @Nullable String parentName, + FieldAttribute parent, String name, EsField field, Nullability nullability, - @Nullable NameId id, + NameId id, boolean synthetic ) { - this(source, parentName, name, field.getDataType(), field, nullability, id, synthetic); + this(source, parent, name, field.getDataType(), field, nullability, id, synthetic); } /** @@ -74,16 +71,17 @@ public FieldAttribute( */ FieldAttribute( Source source, - @Nullable String parentName, + FieldAttribute parent, String name, DataType type, EsField field, Nullability nullability, - @Nullable NameId id, + NameId id, boolean synthetic ) { super(source, name, type, nullability, id, synthetic); - this.parentName = parentName; + this.path = parent != null ? parent.name() : StringUtils.EMPTY; + this.parent = parent; this.field = field; } @@ -93,16 +91,16 @@ public FieldAttribute( */ private FieldAttribute( Source source, - @Nullable String parentName, + FieldAttribute parent, String name, DataType type, EsField field, - @Nullable String qualifier, + String qualifier, Nullability nullability, - @Nullable NameId id, + NameId id, boolean synthetic ) { - this(source, parentName, name, type, field, nullability, id, synthetic); + this(source, parent, name, type, field, nullability, id, synthetic); } private FieldAttribute(StreamInput in) throws IOException { @@ -116,8 +114,8 @@ private FieldAttribute(StreamInput in) throws IOException { */ this( Source.readFrom((StreamInput & PlanStreamInput) in), - readParentName(in), - readCachedStringWithVersionCheck(in), + in.readOptionalWriteable(FieldAttribute::readFrom), + ((PlanStreamInput) in).readCachedString(), DataType.readFrom(in), EsField.readFrom(in), in.readOptionalString(), @@ -131,8 +129,8 @@ private FieldAttribute(StreamInput in) throws IOException { public void writeTo(StreamOutput out) throws IOException { if (((PlanStreamOutput) out).writeAttributeCacheHeader(this)) { Source.EMPTY.writeTo(out); - writeParentName(out); - writeCachedStringWithVersionCheck(out, name()); + out.writeOptionalWriteable(parent); + ((PlanStreamOutput) out).writeCachedString(name()); dataType().writeTo(out); field.writeTo(out); // We used to write the qualifier here. We can still do if needed in the future. @@ -147,26 +145,6 @@ public static FieldAttribute readFrom(StreamInput in) throws IOException { return ((PlanStreamInput) in).readAttributeWithCache(FieldAttribute::new); } - private void writeParentName(StreamOutput out) throws IOException { - if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_FIELD_ATTRIBUTE_PARENT_SIMPLIFIED)) { - ((PlanStreamOutput) out).writeOptionalCachedString(parentName); - } else { - // Previous versions only used the parent field attribute to retrieve the parent's name, so we can use just any - // fake FieldAttribute here as long as the name is correct. - FieldAttribute fakeParent = parentName() == null ? null : new FieldAttribute(Source.EMPTY, parentName(), field()); - out.writeOptionalWriteable(fakeParent); - } - } - - private static String readParentName(StreamInput in) throws IOException { - if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_FIELD_ATTRIBUTE_PARENT_SIMPLIFIED)) { - return ((PlanStreamInput) in).readOptionalCachedString(); - } - - FieldAttribute parent = in.readOptionalWriteable(FieldAttribute::readFrom); - return parent == null ? null : parent.name(); - } - @Override public String getWriteableName() { return ENTRY.name; @@ -174,22 +152,15 @@ public String getWriteableName() { @Override protected NodeInfo info() { - return NodeInfo.create( - this, - FieldAttribute::new, - parentName, - name(), - dataType(), - field, - (String) null, - nullable(), - id(), - synthetic() - ); + return NodeInfo.create(this, FieldAttribute::new, parent, name(), dataType(), field, (String) null, nullable(), id(), synthetic()); + } + + public FieldAttribute parent() { + return parent; } - public String parentName() { - return parentName; + public String path() { + return path; } /** @@ -203,7 +174,7 @@ public String fieldName() { if ((synthetic() || name().startsWith(SYNTHETIC_ATTRIBUTE_NAME_PREFIX)) == false) { return name(); } - return Strings.hasText(parentName) ? parentName + "." + field.getName() : field.getName(); + return Strings.hasText(path) ? path + "." + field.getName() : field.getName(); } public EsField.Exact getExactInfo() { @@ -219,13 +190,13 @@ public FieldAttribute exactAttribute() { } private FieldAttribute innerField(EsField type) { - return new FieldAttribute(source(), name(), name() + "." + type.getName(), type, nullable(), id(), synthetic()); + return new FieldAttribute(source(), this, name() + "." + type.getName(), type, nullable(), id(), synthetic()); } @Override protected Attribute clone(Source source, String name, DataType type, Nullability nullability, NameId id, boolean synthetic) { // Ignore `type`, this must be the same as the field's type. - return new FieldAttribute(source, parentName, name, field, nullability, id, synthetic); + return new FieldAttribute(source, parent, name, field, nullability, id, synthetic); } @Override @@ -235,13 +206,13 @@ public Attribute withDataType(DataType type) { @Override public int hashCode() { - return Objects.hash(super.hashCode(), parentName, field); + return Objects.hash(super.hashCode(), path, field); } @Override public boolean equals(Object obj) { return super.equals(obj) - && Objects.equals(parentName, ((FieldAttribute) obj).parentName) + && Objects.equals(path, ((FieldAttribute) obj).path) && Objects.equals(field, ((FieldAttribute) obj).field); } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/MetadataAttribute.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/MetadataAttribute.java index 3641812cd6cad..539c55ba341cf 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/MetadataAttribute.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/MetadataAttribute.java @@ -10,7 +10,6 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.IgnoredFieldMapper; @@ -60,7 +59,7 @@ public MetadataAttribute( String name, DataType dataType, Nullability nullability, - @Nullable NameId id, + NameId id, boolean synthetic, boolean searchable ) { @@ -80,9 +79,9 @@ private MetadataAttribute( Source source, String name, DataType dataType, - @Nullable String qualifier, + String qualifier, Nullability nullability, - @Nullable NameId id, + NameId id, boolean synthetic, boolean searchable ) { diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/NamedExpression.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/NamedExpression.java index 3b018f09e5ebd..ba467910bed0d 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/NamedExpression.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/NamedExpression.java @@ -8,7 +8,6 @@ import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.core.Nullable; import org.elasticsearch.xpack.esql.core.tree.Source; import java.util.ArrayList; @@ -33,11 +32,11 @@ public static List getNamedWriteables() { private final NameId id; private final boolean synthetic; - public NamedExpression(Source source, String name, List children, @Nullable NameId id) { + public NamedExpression(Source source, String name, List children, NameId id) { this(source, name, children, id, false); } - public NamedExpression(Source source, String name, List children, @Nullable NameId id, boolean synthetic) { + public NamedExpression(Source source, String name, List children, NameId id, boolean synthetic) { super(source, children); this.name = name; this.id = id == null ? new NameId() : id; diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/ReferenceAttribute.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/ReferenceAttribute.java index 3626c5d26f235..504e1eae8d880 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/ReferenceAttribute.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/ReferenceAttribute.java @@ -9,7 +9,6 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.core.Nullable; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -32,14 +31,7 @@ public ReferenceAttribute(Source source, String name, DataType dataType) { this(source, name, dataType, Nullability.FALSE, null, false); } - public ReferenceAttribute( - Source source, - String name, - DataType dataType, - Nullability nullability, - @Nullable NameId id, - boolean synthetic - ) { + public ReferenceAttribute(Source source, String name, DataType dataType, Nullability nullability, NameId id, boolean synthetic) { super(source, name, dataType, nullability, id, synthetic); } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/TypedAttribute.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/TypedAttribute.java index f8a041110798c..0350abef99992 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/TypedAttribute.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/TypedAttribute.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.esql.core.expression; -import org.elasticsearch.core.Nullable; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -16,14 +15,7 @@ public abstract class TypedAttribute extends Attribute { private final DataType dataType; - protected TypedAttribute( - Source source, - String name, - DataType dataType, - Nullability nullability, - @Nullable NameId id, - boolean synthetic - ) { + protected TypedAttribute(Source source, String name, DataType dataType, Nullability nullability, NameId id, boolean synthetic) { super(source, name, nullability, id, synthetic); this.dataType = dataType; } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/UnresolvedAttribute.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/UnresolvedAttribute.java index a971a15a23c86..d8a35adcbffde 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/UnresolvedAttribute.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/UnresolvedAttribute.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.esql.core.expression; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.core.Nullable; import org.elasticsearch.xpack.esql.core.capabilities.Unresolvable; import org.elasticsearch.xpack.esql.core.capabilities.UnresolvedException; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; @@ -34,7 +33,7 @@ public UnresolvedAttribute(Source source, String name, String unresolvedMessage) } @SuppressWarnings("this-escape") - public UnresolvedAttribute(Source source, String name, @Nullable NameId id, String unresolvedMessage, Object resolutionMetadata) { + public UnresolvedAttribute(Source source, String name, NameId id, String unresolvedMessage, Object resolutionMetadata) { super(source, name, id); this.customMessage = unresolvedMessage != null; this.unresolvedMsg = unresolvedMessage == null ? errorMessage(name(), null) : unresolvedMessage; diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java index 12699ca3ee720..cb1a7b2eb6fe0 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java @@ -14,6 +14,8 @@ import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.mapper.TimeSeriesIdFieldMapper; import org.elasticsearch.xpack.esql.core.plugin.EsqlCorePlugin; +import org.elasticsearch.xpack.esql.core.util.PlanStreamInput; +import org.elasticsearch.xpack.esql.core.util.PlanStreamOutput; import java.io.IOException; import java.math.BigInteger; @@ -30,8 +32,6 @@ import static java.util.stream.Collectors.toMap; import static java.util.stream.Collectors.toUnmodifiableMap; -import static org.elasticsearch.xpack.esql.core.util.PlanStreamInput.readCachedStringWithVersionCheck; -import static org.elasticsearch.xpack.esql.core.util.PlanStreamOutput.writeCachedStringWithVersionCheck; public enum DataType { /** @@ -535,12 +535,12 @@ public DataType counter() { } public void writeTo(StreamOutput out) throws IOException { - writeCachedStringWithVersionCheck(out, typeName); + ((PlanStreamOutput) out).writeCachedString(typeName); } public static DataType readFrom(StreamInput in) throws IOException { // TODO: Use our normal enum serialization pattern - return readFrom(readCachedStringWithVersionCheck(in)); + return readFrom(((PlanStreamInput) in).readCachedString()); } /** diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DateEsField.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DateEsField.java index 3a81ec2a6f17d..7c4b98c5af84e 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DateEsField.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DateEsField.java @@ -8,13 +8,12 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.esql.core.util.PlanStreamInput; +import org.elasticsearch.xpack.esql.core.util.PlanStreamOutput; import java.io.IOException; import java.util.Map; -import static org.elasticsearch.xpack.esql.core.util.PlanStreamInput.readCachedStringWithVersionCheck; -import static org.elasticsearch.xpack.esql.core.util.PlanStreamOutput.writeCachedStringWithVersionCheck; - /** * Information about a field in an ES index with the {@code date} type */ @@ -29,12 +28,12 @@ private DateEsField(String name, DataType dataType, Map propert } protected DateEsField(StreamInput in) throws IOException { - this(readCachedStringWithVersionCheck(in), DataType.DATETIME, in.readImmutableMap(EsField::readFrom), in.readBoolean()); + this(((PlanStreamInput) in).readCachedString(), DataType.DATETIME, in.readImmutableMap(EsField::readFrom), in.readBoolean()); } @Override public void writeContent(StreamOutput out) throws IOException { - writeCachedStringWithVersionCheck(out, getName()); + ((PlanStreamOutput) out).writeCachedString(getName()); out.writeMap(getProperties(), (o, x) -> x.writeTo(out)); out.writeBoolean(isAggregatable()); } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/EsField.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/EsField.java index 47dadcbb11de2..6235176d82de6 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/EsField.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/EsField.java @@ -18,9 +18,6 @@ import java.util.Map; import java.util.Objects; -import static org.elasticsearch.xpack.esql.core.util.PlanStreamInput.readCachedStringWithVersionCheck; -import static org.elasticsearch.xpack.esql.core.util.PlanStreamOutput.writeCachedStringWithVersionCheck; - /** * Information about a field in an ES index. */ @@ -63,7 +60,7 @@ public EsField(String name, DataType esDataType, Map properties } public EsField(StreamInput in) throws IOException { - this.name = readCachedStringWithVersionCheck(in); + this.name = ((PlanStreamInput) in).readCachedString(); this.esDataType = readDataType(in); this.properties = in.readImmutableMap(EsField::readFrom); this.aggregatable = in.readBoolean(); @@ -71,7 +68,7 @@ public EsField(StreamInput in) throws IOException { } private DataType readDataType(StreamInput in) throws IOException { - String name = readCachedStringWithVersionCheck(in); + String name = ((PlanStreamInput) in).readCachedString(); if (in.getTransportVersion().before(TransportVersions.ESQL_NESTED_UNSUPPORTED) && name.equalsIgnoreCase("NESTED")) { /* * The "nested" data type existed in older versions of ESQL but was @@ -101,7 +98,7 @@ public void writeTo(StreamOutput out) throws IOException { * This needs to be overridden by subclasses for specific serialization */ public void writeContent(StreamOutput out) throws IOException { - writeCachedStringWithVersionCheck(out, name); + ((PlanStreamOutput) out).writeCachedString(name); esDataType.writeTo(out); out.writeMap(properties, (o, x) -> x.writeTo(out)); out.writeBoolean(aggregatable); diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/InvalidMappedField.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/InvalidMappedField.java index f83e4652ebebd..40825af56ccfe 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/InvalidMappedField.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/InvalidMappedField.java @@ -10,6 +10,8 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.core.QlIllegalArgumentException; +import org.elasticsearch.xpack.esql.core.util.PlanStreamInput; +import org.elasticsearch.xpack.esql.core.util.PlanStreamOutput; import java.io.IOException; import java.util.Map; @@ -18,9 +20,6 @@ import java.util.TreeMap; import java.util.stream.Collectors; -import static org.elasticsearch.xpack.esql.core.util.PlanStreamInput.readCachedStringWithVersionCheck; -import static org.elasticsearch.xpack.esql.core.util.PlanStreamOutput.writeCachedStringWithVersionCheck; - /** * Representation of field mapped differently across indices. * Used during mapping discovery only. @@ -55,7 +54,7 @@ private InvalidMappedField(String name, String errorMessage, Map types() { @@ -64,7 +63,7 @@ public Set types() { @Override public void writeContent(StreamOutput out) throws IOException { - writeCachedStringWithVersionCheck(out, getName()); + ((PlanStreamOutput) out).writeCachedString(getName()); out.writeString(errorMessage); out.writeMap(getProperties(), (o, x) -> x.writeTo(out)); } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/KeywordEsField.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/KeywordEsField.java index 8b88884a0ce17..48995bafec451 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/KeywordEsField.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/KeywordEsField.java @@ -8,6 +8,8 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.esql.core.util.PlanStreamInput; +import org.elasticsearch.xpack.esql.core.util.PlanStreamOutput; import java.io.IOException; import java.util.Collections; @@ -15,8 +17,6 @@ import java.util.Objects; import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; -import static org.elasticsearch.xpack.esql.core.util.PlanStreamInput.readCachedStringWithVersionCheck; -import static org.elasticsearch.xpack.esql.core.util.PlanStreamOutput.writeCachedStringWithVersionCheck; /** * Information about a field in an ES index with the {@code keyword} type. @@ -61,7 +61,7 @@ protected KeywordEsField( public KeywordEsField(StreamInput in) throws IOException { this( - readCachedStringWithVersionCheck(in), + ((PlanStreamInput) in).readCachedString(), KEYWORD, in.readImmutableMap(EsField::readFrom), in.readBoolean(), @@ -73,7 +73,7 @@ public KeywordEsField(StreamInput in) throws IOException { @Override public void writeContent(StreamOutput out) throws IOException { - writeCachedStringWithVersionCheck(out, getName()); + ((PlanStreamOutput) out).writeCachedString(getName()); out.writeMap(getProperties(), (o, x) -> x.writeTo(out)); out.writeBoolean(isAggregatable()); out.writeInt(precision); diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/MultiTypeEsField.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/MultiTypeEsField.java index 0d7f9ee425d6a..522cb682c0943 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/MultiTypeEsField.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/MultiTypeEsField.java @@ -10,6 +10,8 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.util.PlanStreamInput; +import org.elasticsearch.xpack.esql.core.util.PlanStreamOutput; import java.io.IOException; import java.util.HashMap; @@ -17,9 +19,6 @@ import java.util.Objects; import java.util.Set; -import static org.elasticsearch.xpack.esql.core.util.PlanStreamInput.readCachedStringWithVersionCheck; -import static org.elasticsearch.xpack.esql.core.util.PlanStreamOutput.writeCachedStringWithVersionCheck; - /** * During IndexResolution it could occur that the same field is mapped to different types in different indices. * The class MultiTypeEfField.UnresolvedField holds that information and allows for later resolution of the field @@ -40,7 +39,7 @@ public MultiTypeEsField(String name, DataType dataType, boolean aggregatable, Ma protected MultiTypeEsField(StreamInput in) throws IOException { this( - readCachedStringWithVersionCheck(in), + ((PlanStreamInput) in).readCachedString(), DataType.readFrom(in), in.readBoolean(), in.readImmutableMap(i -> i.readNamedWriteable(Expression.class)) @@ -49,7 +48,7 @@ protected MultiTypeEsField(StreamInput in) throws IOException { @Override public void writeContent(StreamOutput out) throws IOException { - writeCachedStringWithVersionCheck(out, getName()); + ((PlanStreamOutput) out).writeCachedString(getName()); getDataType().writeTo(out); out.writeBoolean(isAggregatable()); out.writeMap(getIndexToConversionExpressions(), (o, v) -> out.writeNamedWriteable(v)); diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/TextEsField.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/TextEsField.java index ed0d32a7696eb..c6c494ef289bb 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/TextEsField.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/TextEsField.java @@ -10,6 +10,8 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Tuple; import org.elasticsearch.xpack.esql.core.QlIllegalArgumentException; +import org.elasticsearch.xpack.esql.core.util.PlanStreamInput; +import org.elasticsearch.xpack.esql.core.util.PlanStreamOutput; import java.io.IOException; import java.util.Map; @@ -17,8 +19,6 @@ import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; import static org.elasticsearch.xpack.esql.core.type.DataType.TEXT; -import static org.elasticsearch.xpack.esql.core.util.PlanStreamInput.readCachedStringWithVersionCheck; -import static org.elasticsearch.xpack.esql.core.util.PlanStreamOutput.writeCachedStringWithVersionCheck; /** * Information about a field in an es index with the {@code text} type. @@ -34,12 +34,12 @@ public TextEsField(String name, Map properties, boolean hasDocV } protected TextEsField(StreamInput in) throws IOException { - this(readCachedStringWithVersionCheck(in), in.readImmutableMap(EsField::readFrom), in.readBoolean(), in.readBoolean()); + this(((PlanStreamInput) in).readCachedString(), in.readImmutableMap(EsField::readFrom), in.readBoolean(), in.readBoolean()); } @Override public void writeContent(StreamOutput out) throws IOException { - writeCachedStringWithVersionCheck(out, getName()); + ((PlanStreamOutput) out).writeCachedString(getName()); out.writeMap(getProperties(), (o, x) -> x.writeTo(out)); out.writeBoolean(isAggregatable()); out.writeBoolean(isAlias()); diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/UnsupportedEsField.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/UnsupportedEsField.java index 02ce741243c20..980620cb98847 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/UnsupportedEsField.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/UnsupportedEsField.java @@ -8,15 +8,14 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.esql.core.util.PlanStreamInput; +import org.elasticsearch.xpack.esql.core.util.PlanStreamOutput; import java.io.IOException; import java.util.Map; import java.util.Objects; import java.util.TreeMap; -import static org.elasticsearch.xpack.esql.core.util.PlanStreamInput.readCachedStringWithVersionCheck; -import static org.elasticsearch.xpack.esql.core.util.PlanStreamOutput.writeCachedStringWithVersionCheck; - /** * Information about a field in an ES index that cannot be supported by ESQL. * All the subfields (properties) of an unsupported type are also be unsupported. @@ -38,8 +37,8 @@ public UnsupportedEsField(String name, String originalType, String inherited, Ma public UnsupportedEsField(StreamInput in) throws IOException { this( - readCachedStringWithVersionCheck(in), - readCachedStringWithVersionCheck(in), + ((PlanStreamInput) in).readCachedString(), + ((PlanStreamInput) in).readCachedString(), in.readOptionalString(), in.readImmutableMap(EsField::readFrom) ); @@ -47,8 +46,8 @@ public UnsupportedEsField(StreamInput in) throws IOException { @Override public void writeContent(StreamOutput out) throws IOException { - writeCachedStringWithVersionCheck(out, getName()); - writeCachedStringWithVersionCheck(out, getOriginalType()); + ((PlanStreamOutput) out).writeCachedString(getName()); + ((PlanStreamOutput) out).writeCachedString(getOriginalType()); out.writeOptionalString(getInherited()); out.writeMap(getProperties(), (o, x) -> x.writeTo(out)); } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/PlanStreamInput.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/PlanStreamInput.java index e8ccae3429001..826b0cbfa3498 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/PlanStreamInput.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/PlanStreamInput.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.esql.core.util; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.xpack.esql.core.expression.Attribute; @@ -50,13 +49,4 @@ public interface PlanStreamInput {
    A readEsFieldWithCache() throws IOException; String readCachedString() throws IOException; - - static String readCachedStringWithVersionCheck(StreamInput planStreamInput) throws IOException { - if (planStreamInput.getTransportVersion().before(TransportVersions.ESQL_CACHED_STRING_SERIALIZATION)) { - return planStreamInput.readString(); - } - return ((PlanStreamInput) planStreamInput).readCachedString(); - } - - String readOptionalCachedString() throws IOException; } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/PlanStreamOutput.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/PlanStreamOutput.java index fb4af33d2fd60..e4797411c3796 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/PlanStreamOutput.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/PlanStreamOutput.java @@ -7,8 +7,6 @@ package org.elasticsearch.xpack.esql.core.util; -import org.elasticsearch.TransportVersions; -import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.type.EsField; @@ -35,14 +33,4 @@ public interface PlanStreamOutput { boolean writeEsFieldCacheHeader(EsField field) throws IOException; void writeCachedString(String field) throws IOException; - - static void writeCachedStringWithVersionCheck(StreamOutput planStreamOutput, String string) throws IOException { - if (planStreamOutput.getTransportVersion().before(TransportVersions.ESQL_CACHED_STRING_SERIALIZATION)) { - planStreamOutput.writeString(string); - } else { - ((PlanStreamOutput) planStreamOutput).writeCachedString(string); - } - } - - void writeOptionalCachedString(String str) throws IOException; } diff --git a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/FieldAttributeTestUtils.java b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/FieldAttributeTestUtils.java index c7e5056ed0267..1662b7f973c9d 100644 --- a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/FieldAttributeTestUtils.java +++ b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/FieldAttributeTestUtils.java @@ -12,9 +12,9 @@ import org.elasticsearch.xpack.esql.core.type.EsField; public class FieldAttributeTestUtils { - public static FieldAttribute newFieldAttributeWithType( + public static final FieldAttribute newFieldAttributeWithType( Source source, - String parentName, + FieldAttribute parent, String name, DataType type, EsField field, @@ -22,6 +22,6 @@ public static FieldAttribute newFieldAttributeWithType( NameId id, boolean synthetic ) { - return new FieldAttribute(source, parentName, name, type, field, nullability, id, synthetic); + return new FieldAttribute(source, parent, name, type, field, nullability, id, synthetic); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java index b18f58b0a43cb..fe7b945a9b3c1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java @@ -228,13 +228,13 @@ public static List mappingAsAttributes(Source source, Map list, Source source, String parentName, Map mapping) { + private static void mappingAsAttributes(List list, Source source, FieldAttribute parent, Map mapping) { for (Map.Entry entry : mapping.entrySet()) { String name = entry.getKey(); EsField t = entry.getValue(); if (t != null) { - name = parentName == null ? name : parentName + "." + name; + name = parent == null ? name : parent.fieldName() + "." + name; var fieldProperties = t.getProperties(); var type = t.getDataType().widenSmallNumeric(); // due to a bug also copy the field since the Attribute hierarchy extracts the data type @@ -245,14 +245,14 @@ private static void mappingAsAttributes(List list, Source source, Str FieldAttribute attribute = t instanceof UnsupportedEsField uef ? new UnsupportedAttribute(source, name, uef) - : new FieldAttribute(source, parentName, name, t); + : new FieldAttribute(source, parent, name, t); // primitive branch if (DataType.isPrimitive(type)) { list.add(attribute); } // allow compound object even if they are unknown if (fieldProperties.isEmpty() == false) { - mappingAsAttributes(list, source, attribute.name(), fieldProperties); + mappingAsAttributes(list, source, attribute, fieldProperties); } } } @@ -1252,7 +1252,7 @@ private Expression createIfDoesNotAlreadyExist( // NOTE: The name has to start with $$ to not break bwc with 8.15 - in that version, this is how we had to mark this as // synthetic to work around a bug. String unionTypedFieldName = Attribute.rawTemporaryName(fa.name(), "converted_to", resolvedField.getDataType().typeName()); - FieldAttribute unionFieldAttribute = new FieldAttribute(fa.source(), fa.parentName(), unionTypedFieldName, resolvedField, true); + FieldAttribute unionFieldAttribute = new FieldAttribute(fa.source(), fa.parent(), unionTypedFieldName, resolvedField, true); int existingIndex = unionFieldAttributes.indexOf(unionFieldAttribute); if (existingIndex >= 0) { // Do not generate multiple name/type combinations with different IDs @@ -1281,7 +1281,7 @@ private Expression typeSpecificConvert(AbstractConvertFunction convert, Source s FieldAttribute originalFieldAttr = (FieldAttribute) convert.field(); FieldAttribute resolvedAttr = new FieldAttribute( source, - originalFieldAttr.parentName(), + originalFieldAttr.parent(), originalFieldAttr.name(), field, originalFieldAttr.nullable(), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/UnsupportedAttribute.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/UnsupportedAttribute.java index d372eddb961ae..2c709de7717ce 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/UnsupportedAttribute.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/UnsupportedAttribute.java @@ -11,7 +11,6 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.core.Nullable; import org.elasticsearch.xpack.esql.core.capabilities.Unresolvable; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expression; @@ -30,9 +29,6 @@ import java.io.IOException; import java.util.Objects; -import static org.elasticsearch.xpack.esql.core.util.PlanStreamInput.readCachedStringWithVersionCheck; -import static org.elasticsearch.xpack.esql.core.util.PlanStreamOutput.writeCachedStringWithVersionCheck; - /** * Unsupported attribute meaning an attribute that has been found yet cannot be used (hence why UnresolvedAttribute * cannot be used) expect in special conditions (currently only in projections to allow it to flow through @@ -67,11 +63,11 @@ public UnsupportedAttribute(Source source, String name, UnsupportedEsField field this(source, name, field, null); } - public UnsupportedAttribute(Source source, String name, UnsupportedEsField field, @Nullable String customMessage) { + public UnsupportedAttribute(Source source, String name, UnsupportedEsField field, String customMessage) { this(source, name, field, customMessage, null); } - public UnsupportedAttribute(Source source, String name, UnsupportedEsField field, @Nullable String customMessage, @Nullable NameId id) { + public UnsupportedAttribute(Source source, String name, UnsupportedEsField field, String customMessage, NameId id) { super(source, null, name, field, Nullability.TRUE, id, false); this.hasCustomMessage = customMessage != null; this.message = customMessage == null ? errorMessage(name(), field) : customMessage; @@ -80,7 +76,7 @@ public UnsupportedAttribute(Source source, String name, UnsupportedEsField field private UnsupportedAttribute(StreamInput in) throws IOException { this( Source.readFrom((PlanStreamInput) in), - readCachedStringWithVersionCheck(in), + ((PlanStreamInput) in).readCachedString(), in.getTransportVersion().onOrAfter(TransportVersions.ESQL_ES_FIELD_CACHED_SERIALIZATION) || in.getTransportVersion().isPatchFrom(TransportVersions.V_8_15_2) ? EsField.readFrom(in) : new UnsupportedEsField(in), in.readOptionalString(), @@ -92,7 +88,7 @@ private UnsupportedAttribute(StreamInput in) throws IOException { public void writeTo(StreamOutput out) throws IOException { if (((PlanStreamOutput) out).writeAttributeCacheHeader(this)) { Source.EMPTY.writeTo(out); - writeCachedStringWithVersionCheck(out, name()); + ((PlanStreamOutput) out).writeCachedString(name()); if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_ES_FIELD_CACHED_SERIALIZATION) || out.getTransportVersion().isPatchFrom(TransportVersions.V_8_15_2)) { field().writeTo(out); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java index 1e1cc3b86a9d5..9003cbec12d1e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java @@ -37,8 +37,6 @@ import java.util.Map; import java.util.function.LongFunction; -import static org.elasticsearch.xpack.esql.core.util.PlanStreamInput.readCachedStringWithVersionCheck; - /** * A customized stream input used to deserialize ESQL physical plan fragments. Complements stream * input with methods that read plan nodes, Attributes, Expressions, etc. @@ -226,7 +224,7 @@ public A readEsFieldWithCache() throws IOException { // it's safe to cast to int, since the max value for this is {@link PlanStreamOutput#MAX_SERIALIZED_ATTRIBUTES} int cacheId = Math.toIntExact(readZLong()); if (cacheId < 0) { - String className = readCachedStringWithVersionCheck(this); + String className = readCachedString(); Writeable.Reader reader = EsField.getReader(className); cacheId = -1 - cacheId; EsField result = reader.read(this); @@ -236,7 +234,7 @@ public A readEsFieldWithCache() throws IOException { return (A) esFieldFromCache(cacheId); } } else { - String className = readCachedStringWithVersionCheck(this); + String className = readCachedString(); Writeable.Reader reader = EsField.getReader(className); return (A) reader.read(this); } @@ -247,6 +245,9 @@ public A readEsFieldWithCache() throws IOException { */ @Override public String readCachedString() throws IOException { + if (getTransportVersion().before(TransportVersions.ESQL_CACHED_STRING_SERIALIZATION)) { + return readString(); + } int cacheId = Math.toIntExact(readZLong()); if (cacheId < 0) { String string = readString(); @@ -258,11 +259,6 @@ public String readCachedString() throws IOException { } } - @Override - public String readOptionalCachedString() throws IOException { - return readBoolean() ? readCachedString() : null; - } - private EsField esFieldFromCache(int id) throws IOException { EsField field = esFieldsCache[id]; if (field == null) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java index 615c4266620c7..b633b10122eb3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java @@ -30,8 +30,6 @@ import java.util.IdentityHashMap; import java.util.Map; -import static org.elasticsearch.xpack.esql.core.util.PlanStreamOutput.writeCachedStringWithVersionCheck; - /** * A customized stream output used to serialize ESQL physical plan fragments. Complements stream * output with methods that write plan nodes, Attributes, Expressions, etc. @@ -197,7 +195,7 @@ public boolean writeEsFieldCacheHeader(EsField field) throws IOException { cacheId = cacheEsField(field); writeZLong(-1 - cacheId); } - writeCachedStringWithVersionCheck(this, field.getWriteableName()); + writeCachedString(field.getWriteableName()); return true; } @@ -209,6 +207,10 @@ public boolean writeEsFieldCacheHeader(EsField field) throws IOException { */ @Override public void writeCachedString(String string) throws IOException { + if (getTransportVersion().before(TransportVersions.ESQL_CACHED_STRING_SERIALIZATION)) { + writeString(string); + return; + } Integer cacheId = stringCache.get(string); if (cacheId != null) { writeZLong(cacheId); @@ -224,16 +226,6 @@ public void writeCachedString(String string) throws IOException { writeString(string); } - @Override - public void writeOptionalCachedString(String str) throws IOException { - if (str == null) { - writeBoolean(false); - } else { - writeBoolean(true); - writeCachedString(str); - } - } - private Integer esFieldIdFromCache(EsField field) { return cachedEsFields.get(field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsRelation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsRelation.java index eb72009638396..951fc7ad1cf29 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsRelation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsRelation.java @@ -112,12 +112,7 @@ private static List flatten(Source source, Map mappi EsField t = entry.getValue(); if (t != null) { - FieldAttribute f = new FieldAttribute( - source, - parent != null ? parent.name() : null, - parent != null ? parent.name() + "." + name : name, - t - ); + FieldAttribute f = new FieldAttribute(source, parent, parent != null ? parent.name() + "." + name : name, t); list.add(f); // object or nested if (t.getProperties().isEmpty() == false) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/FieldAttributeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/FieldAttributeTests.java index 6b2040f58f84c..e8f0333791844 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/FieldAttributeTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/FieldAttributeTests.java @@ -20,7 +20,7 @@ public class FieldAttributeTests extends AbstractAttributeTestCase { public static FieldAttribute createFieldAttribute(int maxDepth, boolean onlyRepresentable) { Source source = Source.EMPTY; - String parentName = maxDepth == 0 || randomBoolean() ? null : randomAlphaOfLength(3); + FieldAttribute parent = maxDepth == 0 || randomBoolean() ? null : createFieldAttribute(maxDepth - 1, onlyRepresentable); String name = randomAlphaOfLength(5); DataType type = onlyRepresentable ? randomValueOtherThanMany(t -> false == DataType.isRepresentable(t), () -> randomFrom(DataType.types())) @@ -28,7 +28,7 @@ public static FieldAttribute createFieldAttribute(int maxDepth, boolean onlyRepr EsField field = AbstractEsFieldTypeTests.randomAnyEsField(maxDepth); Nullability nullability = randomFrom(Nullability.values()); boolean synthetic = randomBoolean(); - return newFieldAttributeWithType(source, parentName, name, type, field, nullability, new NameId(), synthetic); + return newFieldAttributeWithType(source, parent, name, type, field, nullability, new NameId(), synthetic); } @Override @@ -39,20 +39,20 @@ protected FieldAttribute create() { @Override protected FieldAttribute mutate(FieldAttribute instance) { Source source = instance.source(); - String parentName = instance.parentName(); + FieldAttribute parent = instance.parent(); String name = instance.name(); DataType type = instance.dataType(); EsField field = instance.field(); Nullability nullability = instance.nullable(); boolean synthetic = instance.synthetic(); switch (between(0, 5)) { - case 0 -> parentName = randomValueOtherThan(parentName, () -> randomBoolean() ? null : randomAlphaOfLength(2)); + case 0 -> parent = randomValueOtherThan(parent, () -> randomBoolean() ? null : createFieldAttribute(2, false)); case 1 -> name = randomAlphaOfLength(name.length() + 1); case 2 -> type = randomValueOtherThan(type, () -> randomFrom(DataType.types())); case 3 -> field = randomValueOtherThan(field, () -> AbstractEsFieldTypeTests.randomAnyEsField(3)); case 4 -> nullability = randomValueOtherThan(nullability, () -> randomFrom(Nullability.values())); case 5 -> synthetic = false == synthetic; } - return newFieldAttributeWithType(source, parentName, name, type, field, nullability, new NameId(), synthetic); + return newFieldAttributeWithType(source, parent, name, type, field, nullability, new NameId(), synthetic); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/index/EsIndexSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/index/EsIndexSerializationTests.java index 82dd5a88ffaf1..687b83370f571 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/index/EsIndexSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/index/EsIndexSerializationTests.java @@ -182,51 +182,4 @@ private void testManyTypeConflicts(boolean withParent, ByteSizeValue expected) t assertThat(ByteSizeValue.ofBytes(out.bytes().length()), byteSizeEquals(expected)); } } - - public static EsIndex deeplyNestedIndex(int depth, int childrenPerLevel) { - String rootFieldName = "root"; - Map fields = Map.of(rootFieldName, fieldWithRecursiveChildren(depth, childrenPerLevel, rootFieldName)); - - return new EsIndex("deeply-nested", fields); - } - - private static EsField fieldWithRecursiveChildren(int depth, int childrenPerLevel, String name) { - assert depth >= 1; - - Map children = new TreeMap<>(); - String childName; - if (depth == 1) { - for (int i = 0; i < childrenPerLevel; i++) { - childName = "leaf" + i; - children.put(childName, new EsField(childName, DataType.KEYWORD, Map.of(), true)); - } - } else { - for (int i = 0; i < childrenPerLevel; i++) { - childName = "level" + depth + "child" + i; - children.put(childName, fieldWithRecursiveChildren(depth - 1, childrenPerLevel, childName)); - } - } - - return new EsField(name, DataType.OBJECT, children, false); - } - - /** - * Test de-/serialization and size on the wire for an index that has multiple levels of children: - * A single root with 9 children, each of which has 9 children etc. 6 levels deep. - */ - public void testDeeplyNestedFields() throws IOException { - ByteSizeValue expectedSize = ByteSizeValue.ofBytes(9425494); - /* - * History: - * 9425494b - string serialization #112929 - */ - - int depth = 6; - int childrenPerLevel = 9; - - try (BytesStreamOutput out = new BytesStreamOutput(); var pso = new PlanStreamOutput(out, null)) { - deeplyNestedIndex(depth, childrenPerLevel).writeTo(pso); - assertThat(ByteSizeValue.ofBytes(out.bytes().length()), byteSizeEquals(expectedSize)); - } - } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutputTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutputTests.java index d3e1710a715af..33252b9dbaaa3 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutputTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutputTests.java @@ -20,6 +20,7 @@ import org.elasticsearch.xpack.esql.Column; import org.elasticsearch.xpack.esql.core.InvalidArgumentException; import org.elasticsearch.xpack.esql.core.expression.Attribute; +import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.NameId; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.EsField; @@ -43,6 +44,7 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.sameInstance; public class PlanStreamOutputTests extends ESTestCase { @@ -116,13 +118,26 @@ public void testWriteAttributeMultipleTimes() throws IOException { for (int i = 0; i < occurrences; i++) { planStream.writeNamedWriteable(attribute); } - assertThat(planStream.cachedAttributes.size(), is(1)); + int depth = 0; + Attribute parent = attribute; + while (parent != null) { + depth++; + parent = parent instanceof FieldAttribute f ? f.parent() : null; + } + assertThat(planStream.cachedAttributes.size(), is(depth)); try (PlanStreamInput in = new PlanStreamInput(out.bytes().streamInput(), REGISTRY, configuration)) { Attribute first = in.readNamedWriteable(Attribute.class); for (int i = 1; i < occurrences; i++) { Attribute next = in.readNamedWriteable(Attribute.class); assertThat(first, sameInstance(next)); } + for (int i = 0; i < depth; i++) { + assertThat(first, equalTo(attribute)); + first = first instanceof FieldAttribute f ? f.parent() : null; + attribute = attribute instanceof FieldAttribute f ? f.parent() : null; + } + assertThat(first, is(nullValue())); + assertThat(attribute, is(nullValue())); } } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/ExchangeSinkExecSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/ExchangeSinkExecSerializationTests.java index 5989c0de6b61d..1f52795dbacd7 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/ExchangeSinkExecSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/ExchangeSinkExecSerializationTests.java @@ -80,67 +80,20 @@ public void testManyTypeConflicts() throws IOException { * See {@link #testManyTypeConflicts(boolean, ByteSizeValue)} for more. */ public void testManyTypeConflictsWithParent() throws IOException { - testManyTypeConflicts(true, ByteSizeValue.ofBytes(2774192)); + testManyTypeConflicts(true, ByteSizeValue.ofBytes(2774214)); /* * History: * 2 gb+ - start * 43.3mb - Cache attribute subclasses #111447 * 5.6mb - shorten error messages for UnsupportedAttributes #111973 * 3.1mb - cache EsFields #112008 - * 2774214b - string serialization #112929 - * 2774192b - remove field attribute #112881 + * 2.6mb - string serialization #112929 */ } - private void testManyTypeConflicts(boolean withParent, ByteSizeValue expected) throws IOException { - EsIndex index = EsIndexSerializationTests.indexWithManyConflicts(withParent); - testSerializePlanWithIndex(index, expected); - } - - /** - * Test the size of serializing a plan like - * FROM index | LIMIT 10 - * with a single root field that has many children, grandchildren etc. - */ - public void testDeeplyNestedFields() throws IOException { - ByteSizeValue expected = ByteSizeValue.ofBytes(47252411); - /* - * History: - * 48223371b - string serialization #112929 - * 47252411b - remove field attribute #112881 - */ - - int depth = 6; - int childrenPerLevel = 8; - - EsIndex index = EsIndexSerializationTests.deeplyNestedIndex(depth, childrenPerLevel); - testSerializePlanWithIndex(index, expected); - } - /** - * Test the size of serializing a plan like - * FROM index | LIMIT 10 | KEEP one_single_field - * with a single root field that has many children, grandchildren etc. - */ - public void testDeeplyNestedFieldsKeepOnlyOne() throws IOException { - ByteSizeValue expected = ByteSizeValue.ofBytes(9425806); - /* - * History: - * 9426058b - string serialization #112929 - * 9425806b - remove field attribute #112881 - */ - - int depth = 6; - int childrenPerLevel = 9; - - EsIndex index = EsIndexSerializationTests.deeplyNestedIndex(depth, childrenPerLevel); - testSerializePlanWithIndex(index, expected, false); - } - - /** - * Test the size of serializing the physical plan that will be sent to a data node. - * The plan corresponds to `FROM index | LIMIT 10`. - * Callers of this method intentionally use a very precise size for the serialized + * Test the size of serializing a plan with many conflicts. Callers of + * this method intentionally use a very precise size for the serialized * data so a programmer making changes has to think when this size changes. *

    * In general, shrinking the over the wire size is great and the precise @@ -155,14 +108,10 @@ public void testDeeplyNestedFieldsKeepOnlyOne() throws IOException { * ESQL impossible to use at all for big mappings with many conflicts. *

    */ - private void testSerializePlanWithIndex(EsIndex index, ByteSizeValue expected) throws IOException { - testSerializePlanWithIndex(index, expected, true); - } - - private void testSerializePlanWithIndex(EsIndex index, ByteSizeValue expected, boolean keepAllFields) throws IOException { - List allAttributes = Analyzer.mappingAsAttributes(randomSource(), index.mapping()); - List keepAttributes = keepAllFields ? allAttributes : List.of(allAttributes.get(0)); - EsRelation relation = new EsRelation(randomSource(), index, keepAttributes, IndexMode.STANDARD); + private void testManyTypeConflicts(boolean withParent, ByteSizeValue expected) throws IOException { + EsIndex index = EsIndexSerializationTests.indexWithManyConflicts(withParent); + List attributes = Analyzer.mappingAsAttributes(randomSource(), index.mapping()); + EsRelation relation = new EsRelation(randomSource(), index, attributes, IndexMode.STANDARD); Limit limit = new Limit(randomSource(), new Literal(randomSource(), 10, DataType.INTEGER), relation); Project project = new Project(randomSource(), limit, limit.output()); FragmentExec fragmentExec = new FragmentExec(project); From 6d039c2b9dfb1dfc1eaac86ecff37b3db6521908 Mon Sep 17 00:00:00 2001 From: Craig Taverner Date: Thu, 17 Oct 2024 15:36:23 +0200 Subject: [PATCH 185/449] Make inline sort alias name a prefix match in testPushTopNInlineDistanceToSource (#114984) * Make inline sort alias name a prefix match This reduces flakiness of this test. * Added comments on prefix check for variable name --- .../optimizer/PhysicalPlanOptimizerTests.java | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java index 114aed68761fe..964039268e30d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java @@ -4769,12 +4769,24 @@ public void testPushTopNInlineDistanceToSource() { var exchange = asRemoteExchange(topN.child()); project = as(exchange.child(), ProjectExec.class); - assertThat(names(project.projections()), contains("abbrev", "name", "location", "country", "city", "$$order_by$0$0")); + // Depending on what is run before this test, the synthetic name could have variable suffixes, so we must only assert on the prefix + assertThat( + names(project.projections()), + contains( + equalTo("abbrev"), + equalTo("name"), + equalTo("location"), + equalTo("country"), + equalTo("city"), + startsWith("$$order_by$0$") + ) + ); var extract = as(project.child(), FieldExtractExec.class); assertThat(names(extract.attributesToExtract()), contains("abbrev", "name", "country", "city")); var evalExec = as(extract.child(), EvalExec.class); var alias = as(evalExec.fields().get(0), Alias.class); - assertThat(alias.name(), is("$$order_by$0$0")); + assertThat(alias.name(), startsWith("$$order_by$0$")); + var aliasName = alias.name(); // We need this name to know what to assert on later when comparing the Order to the Sort var stDistance = as(alias.child(), StDistance.class); assertThat(stDistance.left().toString(), startsWith("location")); extract = as(evalExec.child(), FieldExtractExec.class); @@ -4784,7 +4796,7 @@ public void testPushTopNInlineDistanceToSource() { // Assert that the TopN(distance) is pushed down as geo-sort(location) assertThat(source.limit(), is(topN.limit())); Set orderSet = orderAsSet(topN.order()); - Set sortsSet = sortsAsSet(source.sorts(), Map.of("location", "$$order_by$0$0")); + Set sortsSet = sortsAsSet(source.sorts(), Map.of("location", aliasName)); assertThat(orderSet, is(sortsSet)); // Fine-grained checks on the pushed down sort From 588929a0a790a488dc127f2c701f58f89e82f936 Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Thu, 17 Oct 2024 15:22:47 +0100 Subject: [PATCH 186/449] Revert change to transport protocol (#115009) Add a new version for reverting the change - in v9, this just adds a boolean. This is so the transport protocol is the same between 8.x and main, for ease of backports. When the versions are collapsed, this can be removed. --- .../org/elasticsearch/TransportVersions.java | 1 + .../action/search/SearchRequest.java | 10 +++++++--- .../action/search/SearchRequestTests.java | 20 +++++++++++++++++++ 3 files changed, 28 insertions(+), 3 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index dcf6f7aebdc65..acddc517012cf 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -175,6 +175,7 @@ static TransportVersion def(int id) { public static final TransportVersion ML_INFERENCE_ATTACH_TO_EXISTSING_DEPLOYMENT = def(8_771_00_0); public static final TransportVersion CONVERT_FAILURE_STORE_OPTIONS_TO_SELECTOR_OPTIONS_INTERNALLY = def(8_772_00_0); public static final TransportVersion REMOVE_MIN_COMPATIBLE_SHARD_NODE = def(8_773_00_0); + public static final TransportVersion REVERT_REMOVE_MIN_COMPATIBLE_SHARD_NODE = def(8_774_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java b/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java index 5aec2bcd04b26..2e1d58e042f09 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java @@ -20,6 +20,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.index.mapper.SourceLoader; import org.elasticsearch.index.query.QueryRewriteContext; import org.elasticsearch.index.query.Rewriteable; @@ -254,8 +255,10 @@ public SearchRequest(StreamInput in) throws IOException { finalReduce = true; } ccsMinimizeRoundtrips = in.readBoolean(); - if (in.getTransportVersion().before(TransportVersions.REMOVE_MIN_COMPATIBLE_SHARD_NODE) && in.readBoolean()) { - Version.readVersion(in); // and drop on the floor + if ((in.getTransportVersion().before(TransportVersions.REMOVE_MIN_COMPATIBLE_SHARD_NODE) + || in.getTransportVersion().onOrAfter(TransportVersions.REVERT_REMOVE_MIN_COMPATIBLE_SHARD_NODE)) && in.readBoolean()) { + @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) // this can be removed (again) when the v9 transport version can diverge + Version v = Version.readVersion(in); // and drop on the floor } waitForCheckpoints = in.readMap(StreamInput::readLongArray); waitForCheckpointsTimeout = in.readTimeValue(); @@ -291,7 +294,8 @@ public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(finalReduce); } out.writeBoolean(ccsMinimizeRoundtrips); - if (out.getTransportVersion().before(TransportVersions.REMOVE_MIN_COMPATIBLE_SHARD_NODE)) { + if (out.getTransportVersion().before(TransportVersions.REMOVE_MIN_COMPATIBLE_SHARD_NODE) + || out.getTransportVersion().onOrAfter(TransportVersions.REVERT_REMOVE_MIN_COMPATIBLE_SHARD_NODE)) { out.writeBoolean(false); } out.writeMap(waitForCheckpoints, StreamOutput::writeLongArray); diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchRequestTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchRequestTests.java index 3079b6d4b0371..c6ca97fd5694a 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchRequestTests.java @@ -16,9 +16,12 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; +import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.ArrayUtils; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.TermQueryBuilder; @@ -102,6 +105,23 @@ public void testSerialization() throws Exception { assertNotSame(deserializedRequest, searchRequest); } + @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) // this can be removed when the affected transport version constants are collapsed + public void testSerializationConstants() throws Exception { + SearchRequest searchRequest = createSearchRequest(); + + // something serialized with previous version to remove, should read correctly with the reversion + try (BytesStreamOutput output = new BytesStreamOutput()) { + output.setTransportVersion(TransportVersionUtils.getPreviousVersion(TransportVersions.REMOVE_MIN_COMPATIBLE_SHARD_NODE)); + searchRequest.writeTo(output); + try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), namedWriteableRegistry)) { + in.setTransportVersion(TransportVersions.REVERT_REMOVE_MIN_COMPATIBLE_SHARD_NODE); + SearchRequest copiedRequest = new SearchRequest(in); + assertEquals(copiedRequest, searchRequest); + assertEquals(copiedRequest.hashCode(), searchRequest.hashCode()); + } + } + } + public void testSerializationMultiKNN() throws Exception { SearchRequest searchRequest = createSearchRequest(); if (searchRequest.source() == null) { From d1f26ab6f72aaec58bdd4a26c42febd34229a79b Mon Sep 17 00:00:00 2001 From: Liam Thompson <32779855+leemthompo@users.noreply.github.com> Date: Thu, 17 Oct 2024 16:23:33 +0200 Subject: [PATCH 187/449] [DOCS] Update local data extraction version info (#115001) --- .../connector/docs/connectors-content-extraction.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/connector/docs/connectors-content-extraction.asciidoc b/docs/reference/connector/docs/connectors-content-extraction.asciidoc index b785d62f0f553..5d2a9550a7c3c 100644 --- a/docs/reference/connector/docs/connectors-content-extraction.asciidoc +++ b/docs/reference/connector/docs/connectors-content-extraction.asciidoc @@ -90,7 +90,7 @@ include::_connectors-list-local-content-extraction.asciidoc[] Self-hosted content extraction is handled by a *separate* extraction service. The versions for the extraction service do not align with the Elastic stack. -For version `8.11.x`, you should use extraction service version `0.3.x`. +For versions after `8.11.x` (including {version}), you should use extraction service version `0.3.x`. You can run the service with the following command: From 831d55fcc8354a1798c243f90bc5b0998158d2ae Mon Sep 17 00:00:00 2001 From: Max Hniebergall <137079448+maxhniebergall@users.noreply.github.com> Date: Thu, 17 Oct 2024 10:25:37 -0400 Subject: [PATCH 188/449] Add missing preventDeletionLock.remove in corner case (#115010) --- .../elasticsearch/xpack/inference/registry/ModelRegistry.java | 1 + 1 file changed, 1 insertion(+) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java index 33a97f1e91621..260d4e663dafd 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java @@ -397,6 +397,7 @@ public void updateModelTransaction(Model newModel, Model existingModel, ActionLi logger.error( format("Failed to update inference endpoint [%s] due to [%s]", inferenceEntityId, configResponse.buildFailureMessage()) ); + preventDeletionLock.remove(inferenceEntityId); // Since none of our updates succeeded at this point, we can simply return. finalListener.onFailure( new ElasticsearchStatusException( From b8688b38c8728540fc91ff6833b793c222ac0654 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Thu, 17 Oct 2024 14:52:28 +0000 Subject: [PATCH 189/449] Bump versions after 8.15.3 release --- .buildkite/pipelines/intake.yml | 2 +- .buildkite/pipelines/periodic-packaging.yml | 6 +++--- .buildkite/pipelines/periodic.yml | 10 +++++----- .ci/bwcVersions | 2 +- .ci/snapshotBwcVersions | 2 +- server/src/main/java/org/elasticsearch/Version.java | 1 + .../resources/org/elasticsearch/TransportVersions.csv | 1 + .../org/elasticsearch/index/IndexVersions.csv | 1 + 8 files changed, 14 insertions(+), 11 deletions(-) diff --git a/.buildkite/pipelines/intake.yml b/.buildkite/pipelines/intake.yml index 1ddb3e82920cd..37ea49e3a6d95 100644 --- a/.buildkite/pipelines/intake.yml +++ b/.buildkite/pipelines/intake.yml @@ -56,7 +56,7 @@ steps: timeout_in_minutes: 300 matrix: setup: - BWC_VERSION: ["8.15.3", "8.16.0", "8.17.0", "9.0.0"] + BWC_VERSION: ["8.15.4", "8.16.0", "8.17.0", "9.0.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.buildkite/pipelines/periodic-packaging.yml b/.buildkite/pipelines/periodic-packaging.yml index 03368e7e4a9c0..8819a5f7f493f 100644 --- a/.buildkite/pipelines/periodic-packaging.yml +++ b/.buildkite/pipelines/periodic-packaging.yml @@ -272,8 +272,8 @@ steps: env: BWC_VERSION: 8.14.3 - - label: "{{matrix.image}} / 8.15.3 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.15.3 + - label: "{{matrix.image}} / 8.15.4 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.15.4 timeout_in_minutes: 300 matrix: setup: @@ -286,7 +286,7 @@ steps: machineType: custom-16-32768 buildDirectory: /dev/shm/bk env: - BWC_VERSION: 8.15.3 + BWC_VERSION: 8.15.4 - label: "{{matrix.image}} / 8.16.0 / packaging-tests-upgrade" command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.16.0 diff --git a/.buildkite/pipelines/periodic.yml b/.buildkite/pipelines/periodic.yml index d572dd104d215..7b6a6ea72fe83 100644 --- a/.buildkite/pipelines/periodic.yml +++ b/.buildkite/pipelines/periodic.yml @@ -287,8 +287,8 @@ steps: - signal_reason: agent_stop limit: 3 - - label: 8.15.3 / bwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.15.3#bwcTest + - label: 8.15.4 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.15.4#bwcTest timeout_in_minutes: 300 agents: provider: gcp @@ -297,7 +297,7 @@ steps: buildDirectory: /dev/shm/bk preemptible: true env: - BWC_VERSION: 8.15.3 + BWC_VERSION: 8.15.4 retry: automatic: - exit_status: "-1" @@ -429,7 +429,7 @@ steps: setup: ES_RUNTIME_JAVA: - openjdk21 - BWC_VERSION: ["8.15.3", "8.16.0", "8.17.0", "9.0.0"] + BWC_VERSION: ["8.15.4", "8.16.0", "8.17.0", "9.0.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 @@ -471,7 +471,7 @@ steps: ES_RUNTIME_JAVA: - openjdk21 - openjdk23 - BWC_VERSION: ["8.15.3", "8.16.0", "8.17.0", "9.0.0"] + BWC_VERSION: ["8.15.4", "8.16.0", "8.17.0", "9.0.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.ci/bwcVersions b/.ci/bwcVersions index cd1f7d1ae269f..2e77631450825 100644 --- a/.ci/bwcVersions +++ b/.ci/bwcVersions @@ -14,7 +14,7 @@ BWC_VERSION: - "8.12.2" - "8.13.4" - "8.14.3" - - "8.15.3" + - "8.15.4" - "8.16.0" - "8.17.0" - "9.0.0" diff --git a/.ci/snapshotBwcVersions b/.ci/snapshotBwcVersions index 67ebf0c51ab1f..c6edc709a8ceb 100644 --- a/.ci/snapshotBwcVersions +++ b/.ci/snapshotBwcVersions @@ -1,5 +1,5 @@ BWC_VERSION: - - "8.15.3" + - "8.15.4" - "8.16.0" - "8.17.0" - "9.0.0" diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index 48bf08ddfc028..5e4df05c10182 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -186,6 +186,7 @@ public class Version implements VersionId, ToXContentFragment { public static final Version V_8_15_1 = new Version(8_15_01_99); public static final Version V_8_15_2 = new Version(8_15_02_99); public static final Version V_8_15_3 = new Version(8_15_03_99); + public static final Version V_8_15_4 = new Version(8_15_04_99); public static final Version V_8_16_0 = new Version(8_16_00_99); public static final Version V_8_17_0 = new Version(8_17_00_99); public static final Version V_9_0_0 = new Version(9_00_00_99); diff --git a/server/src/main/resources/org/elasticsearch/TransportVersions.csv b/server/src/main/resources/org/elasticsearch/TransportVersions.csv index 44c752def351e..b0ef5b780e775 100644 --- a/server/src/main/resources/org/elasticsearch/TransportVersions.csv +++ b/server/src/main/resources/org/elasticsearch/TransportVersions.csv @@ -130,3 +130,4 @@ 8.15.0,8702002 8.15.1,8702002 8.15.2,8702003 +8.15.3,8702003 diff --git a/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv index 971940041f9b1..e3681cc975988 100644 --- a/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv +++ b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv @@ -130,3 +130,4 @@ 8.15.0,8512000 8.15.1,8512000 8.15.2,8512000 +8.15.3,8512000 From 8670dd799ef97b69298021b440a0cdf6441dadd1 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Thu, 17 Oct 2024 14:54:19 +0000 Subject: [PATCH 190/449] Prune changelogs after 8.15.3 release --- docs/changelog/111684.yaml | 5 ----- docs/changelog/112761.yaml | 6 ------ docs/changelog/113123.yaml | 6 ------ docs/changelog/113129.yaml | 6 ------ docs/changelog/113266.yaml | 5 ----- docs/changelog/113437.yaml | 6 ------ docs/changelog/113697.yaml | 6 ------ docs/changelog/113699.yaml | 5 ----- docs/changelog/113846.yaml | 6 ------ docs/changelog/113869.yaml | 5 ----- docs/changelog/113961.yaml | 5 ----- docs/changelog/114116.yaml | 5 ----- docs/changelog/114264.yaml | 5 ----- docs/changelog/114337.yaml | 5 ----- 14 files changed, 76 deletions(-) delete mode 100644 docs/changelog/111684.yaml delete mode 100644 docs/changelog/112761.yaml delete mode 100644 docs/changelog/113123.yaml delete mode 100644 docs/changelog/113129.yaml delete mode 100644 docs/changelog/113266.yaml delete mode 100644 docs/changelog/113437.yaml delete mode 100644 docs/changelog/113697.yaml delete mode 100644 docs/changelog/113699.yaml delete mode 100644 docs/changelog/113846.yaml delete mode 100644 docs/changelog/113869.yaml delete mode 100644 docs/changelog/113961.yaml delete mode 100644 docs/changelog/114116.yaml delete mode 100644 docs/changelog/114264.yaml delete mode 100644 docs/changelog/114337.yaml diff --git a/docs/changelog/111684.yaml b/docs/changelog/111684.yaml deleted file mode 100644 index 32edb5723cb0a..0000000000000 --- a/docs/changelog/111684.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 111684 -summary: Write downloaded model parts async -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/112761.yaml b/docs/changelog/112761.yaml deleted file mode 100644 index fe63f38f365a4..0000000000000 --- a/docs/changelog/112761.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 112761 -summary: Fix collapse interaction with stored fields -area: Search -type: bug -issues: - - 112646 diff --git a/docs/changelog/113123.yaml b/docs/changelog/113123.yaml deleted file mode 100644 index 43008eaa80f43..0000000000000 --- a/docs/changelog/113123.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 113123 -summary: "ES|QL: Skip CASE function from `InferIsNotNull` rule checks" -area: ES|QL -type: bug -issues: - - 112704 diff --git a/docs/changelog/113129.yaml b/docs/changelog/113129.yaml deleted file mode 100644 index d88d86387ac10..0000000000000 --- a/docs/changelog/113129.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 113129 -summary: Fix `needsScore` computation in `GlobalOrdCardinalityAggregator` -area: Aggregations -type: bug -issues: - - 112975 diff --git a/docs/changelog/113266.yaml b/docs/changelog/113266.yaml deleted file mode 100644 index d423387d45738..0000000000000 --- a/docs/changelog/113266.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 113266 -summary: "[M] Fix error message formatting" -area: Machine Learning -type: bug -issues: [] diff --git a/docs/changelog/113437.yaml b/docs/changelog/113437.yaml deleted file mode 100644 index 98831958e63f8..0000000000000 --- a/docs/changelog/113437.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 113437 -summary: Fix check on E5 model platform compatibility -area: Machine Learning -type: bug -issues: - - 113577 diff --git a/docs/changelog/113697.yaml b/docs/changelog/113697.yaml deleted file mode 100644 index 1362e01fcc89b..0000000000000 --- a/docs/changelog/113697.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 113697 -summary: Handle parsing ingest processors where definition is not a object -area: Machine Learning -type: bug -issues: - - 113615 diff --git a/docs/changelog/113699.yaml b/docs/changelog/113699.yaml deleted file mode 100644 index 3876c8147e7eb..0000000000000 --- a/docs/changelog/113699.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 113699 -summary: "[ESQL] Fix init value in max float aggregation" -area: ES|QL -type: bug -issues: [] diff --git a/docs/changelog/113846.yaml b/docs/changelog/113846.yaml deleted file mode 100644 index 5fdd56e98d706..0000000000000 --- a/docs/changelog/113846.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 113846 -summary: Don't validate internal stats if they are empty -area: Aggregations -type: bug -issues: - - 113811 diff --git a/docs/changelog/113869.yaml b/docs/changelog/113869.yaml deleted file mode 100644 index f1cd1ec423966..0000000000000 --- a/docs/changelog/113869.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 113869 -summary: Upgrade protobufer to 3.25.5 -area: Snapshot/Restore -type: upgrade -issues: [] diff --git a/docs/changelog/113961.yaml b/docs/changelog/113961.yaml deleted file mode 100644 index 24cb1f45f029e..0000000000000 --- a/docs/changelog/113961.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 113961 -summary: "[ESQL] Support datetime data type in Least and Greatest functions" -area: ES|QL -type: bug -issues: [] diff --git a/docs/changelog/114116.yaml b/docs/changelog/114116.yaml deleted file mode 100644 index 8d1c9e162ae23..0000000000000 --- a/docs/changelog/114116.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 114116 -summary: "ES|QL: Ensure minimum capacity for `PlanStreamInput` caches" -area: ES|QL -type: bug -issues: [] diff --git a/docs/changelog/114264.yaml b/docs/changelog/114264.yaml deleted file mode 100644 index fe421f6422830..0000000000000 --- a/docs/changelog/114264.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 114264 -summary: "Fix analyzed wildcard query in simple_query_string when disjunctions is empty" -area: Search -type: bug -issues: [114185] diff --git a/docs/changelog/114337.yaml b/docs/changelog/114337.yaml deleted file mode 100644 index ec55be8bb179b..0000000000000 --- a/docs/changelog/114337.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 114337 -summary: "Enables cluster state role mapper, to include ECK operator-defined role mappings in role resolution" -area: Authentication -type: bug -issues: [] From 1ebe1b3f15414ed4d98e3e029fafae92c458a756 Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Thu, 17 Oct 2024 16:59:42 +0200 Subject: [PATCH 191/449] Don't normalize coordinates in GeoTileUtils (#114929) The main users of this class use as input latitudes and longitudes read from doc values. These coordinates are always on bounds so there is no point to try to normalise them, more over when this piece of code is in the hot path for aggregations. --- .../bucket/geogrid/GeoTileUtils.java | 18 +++++----- .../search/DocValueFormatTests.java | 5 +-- .../bucket/geogrid/GeoTileUtilsTests.java | 34 ++++++++++--------- 3 files changed, 29 insertions(+), 28 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileUtils.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileUtils.java index 89240a94f14ce..da16d8d79fef4 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileUtils.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileUtils.java @@ -21,8 +21,6 @@ import java.io.IOException; import java.util.Locale; -import static org.elasticsearch.common.geo.GeoUtils.normalizeLat; -import static org.elasticsearch.common.geo.GeoUtils.normalizeLon; import static org.elasticsearch.common.geo.GeoUtils.quantizeLat; /** @@ -113,15 +111,13 @@ public static int checkPrecisionRange(int precision) { * Calculates the x-coordinate in the tile grid for the specified longitude given * the number of tile columns for a pre-determined zoom-level. * - * @param longitude the longitude to use when determining the tile x-coordinate + * @param longitude the longitude to use when determining the tile x-coordinate. Longitude is in degrees + * and must be between -180 and 180 degrees. * @param tiles the number of tiles per row for a pre-determined zoom-level */ public static int getXTile(double longitude, int tiles) { - // normalizeLon treats this as 180, which is not friendly for tile mapping - if (longitude == -180) { - return 0; - } - final double xTile = (normalizeLon(longitude) + 180.0) / 360.0 * tiles; + assert longitude >= -180 && longitude <= 180 : "Longitude must be between -180 and 180 degrees"; + final double xTile = (longitude + 180.0) / 360.0 * tiles; // Edge values may generate invalid values, and need to be clipped. return Math.max(0, Math.min(tiles - 1, (int) Math.floor(xTile))); } @@ -130,11 +126,13 @@ public static int getXTile(double longitude, int tiles) { * Calculates the y-coordinate in the tile grid for the specified longitude given * the number of tile rows for pre-determined zoom-level. * - * @param latitude the latitude to use when determining the tile y-coordinate + * @param latitude the latitude to use when determining the tile y-coordinate. Latitude is in degrees + * and must be between -90 and 90 degrees. * @param tiles the number of tiles per column for a pre-determined zoom-level */ public static int getYTile(double latitude, int tiles) { - final double latSin = SloppyMath.cos(PI_DIV_2 - Math.toRadians(normalizeLat(latitude))); + assert latitude >= -90 && latitude <= 90 : "Latitude must be between -90 and 90 degrees"; + final double latSin = SloppyMath.cos(PI_DIV_2 - Math.toRadians(latitude)); final double yTile = (0.5 - (ESSloppyMath.log((1.0 + latSin) / (1.0 - latSin)) / PI_TIMES_4)) * tiles; // Edge values may generate invalid values, and need to be clipped. // For example, polar regions (above/below lat 85.05112878) get normalized. diff --git a/server/src/test/java/org/elasticsearch/search/DocValueFormatTests.java b/server/src/test/java/org/elasticsearch/search/DocValueFormatTests.java index 6b42dbbb39c9f..5371893993318 100644 --- a/server/src/test/java/org/elasticsearch/search/DocValueFormatTests.java +++ b/server/src/test/java/org/elasticsearch/search/DocValueFormatTests.java @@ -11,6 +11,7 @@ import org.apache.lucene.document.InetAddressPoint; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; @@ -173,8 +174,8 @@ public void testGeoTileFormat() { assertEquals("29/536869420/0", DocValueFormat.GEOTILE.format(longEncode(179.999, 89.999, 29))); assertEquals("29/1491/536870911", DocValueFormat.GEOTILE.format(longEncode(-179.999, -89.999, 29))); assertEquals("2/2/1", DocValueFormat.GEOTILE.format(longEncode(1, 1, 2))); - assertEquals("1/1/0", DocValueFormat.GEOTILE.format(longEncode(13, 95, 1))); - assertEquals("1/1/1", DocValueFormat.GEOTILE.format(longEncode(13, -95, 1))); + assertEquals("1/1/0", DocValueFormat.GEOTILE.format(longEncode(13, GeoUtils.normalizeLat(95), 1))); + assertEquals("1/1/1", DocValueFormat.GEOTILE.format(longEncode(13, GeoUtils.normalizeLat(-95), 1))); } public void testRawParse() { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileUtilsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileUtilsTests.java index b056cc1fcc988..975c1af3dc3d9 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileUtilsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileUtilsTests.java @@ -16,6 +16,8 @@ import org.elasticsearch.test.ESTestCase; import org.hamcrest.Matchers; +import static org.elasticsearch.common.geo.GeoUtils.normalizeLat; +import static org.elasticsearch.common.geo.GeoUtils.normalizeLon; import static org.elasticsearch.search.aggregations.bucket.geogrid.GeoTileUtils.MAX_ZOOM; import static org.elasticsearch.search.aggregations.bucket.geogrid.GeoTileUtils.checkPrecisionRange; import static org.elasticsearch.search.aggregations.bucket.geogrid.GeoTileUtils.hashToGeoPoint; @@ -53,20 +55,20 @@ public void testLongEncode() { assertEquals(0x77FFFF4580000000L, longEncode(179.999, 89.999, 29)); assertEquals(0x740000BA7FFFFFFFL, longEncode(-179.999, -89.999, 29)); assertEquals(0x0800000040000001L, longEncode(1, 1, 2)); - assertEquals(0x0C00000060000000L, longEncode(-20, 100, 3)); + assertEquals(0x0C00000060000000L, longEncode(-20, normalizeLat(100), 3)); assertEquals(0x71127D27C8ACA67AL, longEncode(13, -15, 28)); assertEquals(0x4C0077776003A9ACL, longEncode(-12, 15, 19)); - assertEquals(0x140000024000000EL, longEncode(-328.231870, 16.064082, 5)); - assertEquals(0x6436F96B60000000L, longEncode(-590.769588, 89.549167, 25)); - assertEquals(0x6411BD6BA0A98359L, longEncode(999.787079, 51.830093, 25)); - assertEquals(0x751BD6BBCA983596L, longEncode(999.787079, 51.830093, 29)); - assertEquals(0x77CF880A20000000L, longEncode(-557.039740, -632.103969, 29)); + assertEquals(0x140000024000000EL, longEncode(normalizeLon(-328.231870), 16.064082, 5)); + assertEquals(0x6436F96B60000000L, longEncode(normalizeLon(-590.769588), 89.549167, 25)); + assertEquals(0x6411BD6BA0A98359L, longEncode(normalizeLon(999.787079), 51.830093, 25)); + assertEquals(0x751BD6BBCA983596L, longEncode(normalizeLon(999.787079), 51.830093, 29)); + assertEquals(0x77CF880A20000000L, longEncode(normalizeLon(-557.039740), normalizeLat(-632.103969), 29)); assertEquals(0x7624FA4FA0000000L, longEncode(13, 88, 29)); assertEquals(0x7624FA4FBFFFFFFFL, longEncode(13, -88, 29)); assertEquals(0x0400000020000000L, longEncode(13, 89, 1)); assertEquals(0x0400000020000001L, longEncode(13, -89, 1)); - assertEquals(0x0400000020000000L, longEncode(13, 95, 1)); - assertEquals(0x0400000020000001L, longEncode(13, -95, 1)); + assertEquals(0x0400000020000000L, longEncode(13, normalizeLat(95), 1)); + assertEquals(0x0400000020000001L, longEncode(13, normalizeLat(-95), 1)); expectThrows(IllegalArgumentException.class, () -> longEncode(0, 0, -1)); expectThrows(IllegalArgumentException.class, () -> longEncode(-1, 0, MAX_ZOOM + 1)); @@ -78,20 +80,20 @@ public void testLongEncodeFromString() { assertEquals(0x77FFFF4580000000L, longEncode(stringEncode(longEncode(179.999, 89.999, 29)))); assertEquals(0x740000BA7FFFFFFFL, longEncode(stringEncode(longEncode(-179.999, -89.999, 29)))); assertEquals(0x0800000040000001L, longEncode(stringEncode(longEncode(1, 1, 2)))); - assertEquals(0x0C00000060000000L, longEncode(stringEncode(longEncode(-20, 100, 3)))); + assertEquals(0x0C00000060000000L, longEncode(stringEncode(longEncode(-20, normalizeLat(100), 3)))); assertEquals(0x71127D27C8ACA67AL, longEncode(stringEncode(longEncode(13, -15, 28)))); assertEquals(0x4C0077776003A9ACL, longEncode(stringEncode(longEncode(-12, 15, 19)))); - assertEquals(0x140000024000000EL, longEncode(stringEncode(longEncode(-328.231870, 16.064082, 5)))); - assertEquals(0x6436F96B60000000L, longEncode(stringEncode(longEncode(-590.769588, 89.549167, 25)))); - assertEquals(0x6411BD6BA0A98359L, longEncode(stringEncode(longEncode(999.787079, 51.830093, 25)))); - assertEquals(0x751BD6BBCA983596L, longEncode(stringEncode(longEncode(999.787079, 51.830093, 29)))); - assertEquals(0x77CF880A20000000L, longEncode(stringEncode(longEncode(-557.039740, -632.103969, 29)))); + assertEquals(0x140000024000000EL, longEncode(stringEncode(longEncode(normalizeLon(-328.231870), 16.064082, 5)))); + assertEquals(0x6436F96B60000000L, longEncode(stringEncode(longEncode(normalizeLon(-590.769588), 89.549167, 25)))); + assertEquals(0x6411BD6BA0A98359L, longEncode(stringEncode(longEncode(normalizeLon(999.787079), 51.830093, 25)))); + assertEquals(0x751BD6BBCA983596L, longEncode(stringEncode(longEncode(normalizeLon(999.787079), 51.830093, 29)))); + assertEquals(0x77CF880A20000000L, longEncode(stringEncode(longEncode(normalizeLon(-557.039740), normalizeLat(-632.103969), 29)))); assertEquals(0x7624FA4FA0000000L, longEncode(stringEncode(longEncode(13, 88, 29)))); assertEquals(0x7624FA4FBFFFFFFFL, longEncode(stringEncode(longEncode(13, -88, 29)))); assertEquals(0x0400000020000000L, longEncode(stringEncode(longEncode(13, 89, 1)))); assertEquals(0x0400000020000001L, longEncode(stringEncode(longEncode(13, -89, 1)))); - assertEquals(0x0400000020000000L, longEncode(stringEncode(longEncode(13, 95, 1)))); - assertEquals(0x0400000020000001L, longEncode(stringEncode(longEncode(13, -95, 1)))); + assertEquals(0x0400000020000000L, longEncode(stringEncode(longEncode(13, normalizeLat(95), 1)))); + assertEquals(0x0400000020000001L, longEncode(stringEncode(longEncode(13, normalizeLat(-95), 1)))); expectThrows(IllegalArgumentException.class, () -> longEncode("12/asdf/1")); expectThrows(IllegalArgumentException.class, () -> longEncode("foo")); From 441eea7d24369e2cbe5066eb8b3a64ab43000044 Mon Sep 17 00:00:00 2001 From: Oleksandr Kolomiiets Date: Thu, 17 Oct 2024 08:32:10 -0700 Subject: [PATCH 192/449] Move logsdb tests to logsdb plugin (#114952) --- .../logsdb/LogsIndexModeCustomSettingsIT.java | 10 ++++------ .../LogsIndexModeDisabledRestTestIT.java | 10 ++++------ .../LogsIndexModeEnabledRestTestIT.java | 10 ++++------ .../logsdb/LogsIndexModeRestTestIT.java | 10 ++++------ .../logsdb/qa/AbstractChallengeRestTest.java | 10 ++++------ .../logsdb/qa/DataGenerationHelper.java | 10 ++++------ ...ndexedIntoStandardModeChallengeRestIT.java | 10 ++++------ ...bVersusReindexedLogsDbChallengeRestIT.java | 10 ++++------ .../logsdb/qa/ReindexChallengeRestIT.java | 10 ++++------ ...ardVersusLogsIndexModeChallengeRestIT.java | 14 ++++++------- ...ogsIndexModeRandomDataChallengeRestIT.java | 10 ++++------ ...ndomDataDynamicMappingChallengeRestIT.java | 10 ++++------ ...ardReindexedIntoLogsDbChallengeRestIT.java | 10 ++++------ .../logsdb/qa/matchers/ArrayEqualMatcher.java | 14 ++++++------- .../qa/matchers/GenericEqualsMatcher.java | 12 +++++------ .../logsdb/qa/matchers/ListEqualMatcher.java | 14 ++++++------- .../logsdb/qa/matchers/MatchResult.java | 10 ++++------ .../xpack}/logsdb/qa/matchers/Matcher.java | 12 +++++------ .../xpack}/logsdb/qa/matchers/Messages.java | 10 ++++------ .../logsdb/qa/matchers/ObjectMatcher.java | 12 +++++------ .../matchers/source/DynamicFieldMatcher.java | 16 +++++++-------- .../matchers/source/FieldSpecificMatcher.java | 16 +++++++-------- .../qa/matchers/source/MappingTransforms.java | 10 ++++------ .../qa/matchers/source/SourceMatcher.java | 20 +++++++++---------- .../qa/matchers/source/SourceTransforms.java | 10 ++++------ 25 files changed, 120 insertions(+), 170 deletions(-) rename {modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams => x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack}/logsdb/LogsIndexModeCustomSettingsIT.java (97%) rename {modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams => x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack}/logsdb/LogsIndexModeDisabledRestTestIT.java (90%) rename {modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams => x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack}/logsdb/LogsIndexModeEnabledRestTestIT.java (96%) rename {modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams => x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack}/logsdb/LogsIndexModeRestTestIT.java (92%) rename {modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams => x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack}/logsdb/qa/AbstractChallengeRestTest.java (96%) rename {modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams => x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack}/logsdb/qa/DataGenerationHelper.java (89%) rename {modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams => x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack}/logsdb/qa/LogsDbVersusLogsDbReindexedIntoStandardModeChallengeRestIT.java (84%) rename {modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams => x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack}/logsdb/qa/LogsDbVersusReindexedLogsDbChallengeRestIT.java (84%) rename {modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams => x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack}/logsdb/qa/ReindexChallengeRestIT.java (76%) rename {modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams => x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack}/logsdb/qa/StandardVersusLogsIndexModeChallengeRestIT.java (97%) rename {modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams => x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack}/logsdb/qa/StandardVersusLogsIndexModeRandomDataChallengeRestIT.java (82%) rename {modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams => x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack}/logsdb/qa/StandardVersusLogsIndexModeRandomDataDynamicMappingChallengeRestIT.java (63%) rename {modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams => x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack}/logsdb/qa/StandardVersusStandardReindexedIntoLogsDbChallengeRestIT.java (74%) rename {modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams => x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack}/logsdb/qa/matchers/ArrayEqualMatcher.java (83%) rename {modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams => x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack}/logsdb/qa/matchers/GenericEqualsMatcher.java (85%) rename {modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams => x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack}/logsdb/qa/matchers/ListEqualMatcher.java (83%) rename {modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams => x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack}/logsdb/qa/matchers/MatchResult.java (73%) rename {modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams => x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack}/logsdb/qa/matchers/Matcher.java (90%) rename {modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams => x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack}/logsdb/qa/matchers/Messages.java (80%) rename {modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams => x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack}/logsdb/qa/matchers/ObjectMatcher.java (68%) rename {modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams => x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack}/logsdb/qa/matchers/source/DynamicFieldMatcher.java (80%) rename {modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams => x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack}/logsdb/qa/matchers/source/FieldSpecificMatcher.java (92%) rename {modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams => x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack}/logsdb/qa/matchers/source/MappingTransforms.java (88%) rename {modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams => x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack}/logsdb/qa/matchers/source/SourceMatcher.java (90%) rename {modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams => x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack}/logsdb/qa/matchers/source/SourceTransforms.java (86%) diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/LogsIndexModeCustomSettingsIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeCustomSettingsIT.java similarity index 97% rename from modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/LogsIndexModeCustomSettingsIT.java rename to x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeCustomSettingsIT.java index ab78f48b6cddf..c5ccee1d36b72 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/LogsIndexModeCustomSettingsIT.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeCustomSettingsIT.java @@ -1,13 +1,11 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. */ -package org.elasticsearch.datastreams.logsdb; +package org.elasticsearch.xpack.logsdb; import org.elasticsearch.client.Request; import org.elasticsearch.client.ResponseException; diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/LogsIndexModeDisabledRestTestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeDisabledRestTestIT.java similarity index 90% rename from modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/LogsIndexModeDisabledRestTestIT.java rename to x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeDisabledRestTestIT.java index 123ca3b806153..40aab696dc9c4 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/LogsIndexModeDisabledRestTestIT.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeDisabledRestTestIT.java @@ -1,13 +1,11 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. */ -package org.elasticsearch.datastreams.logsdb; +package org.elasticsearch.xpack.logsdb; import org.elasticsearch.client.RestClient; import org.elasticsearch.index.IndexMode; diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/LogsIndexModeEnabledRestTestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeEnabledRestTestIT.java similarity index 96% rename from modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/LogsIndexModeEnabledRestTestIT.java rename to x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeEnabledRestTestIT.java index a024a2c0f303c..63094852c3626 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/LogsIndexModeEnabledRestTestIT.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeEnabledRestTestIT.java @@ -1,13 +1,11 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. */ -package org.elasticsearch.datastreams.logsdb; +package org.elasticsearch.xpack.logsdb; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/LogsIndexModeRestTestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeRestTestIT.java similarity index 92% rename from modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/LogsIndexModeRestTestIT.java rename to x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeRestTestIT.java index 22ac2b6d7d239..dbee5d1d2de8c 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/LogsIndexModeRestTestIT.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeRestTestIT.java @@ -1,13 +1,11 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. */ -package org.elasticsearch.datastreams.logsdb; +package org.elasticsearch.xpack.logsdb; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/AbstractChallengeRestTest.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/AbstractChallengeRestTest.java similarity index 96% rename from modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/AbstractChallengeRestTest.java rename to x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/AbstractChallengeRestTest.java index 6464b4e966823..60c7d07115ef2 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/AbstractChallengeRestTest.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/AbstractChallengeRestTest.java @@ -1,13 +1,11 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. */ -package org.elasticsearch.datastreams.logsdb.qa; +package org.elasticsearch.xpack.logsdb.qa; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/DataGenerationHelper.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/DataGenerationHelper.java similarity index 89% rename from modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/DataGenerationHelper.java rename to x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/DataGenerationHelper.java index d07e29c6b6b31..c03e8aea9c2ac 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/DataGenerationHelper.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/DataGenerationHelper.java @@ -1,13 +1,11 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. */ -package org.elasticsearch.datastreams.logsdb.qa; +package org.elasticsearch.xpack.logsdb.qa; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.mapper.Mapper; diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/LogsDbVersusLogsDbReindexedIntoStandardModeChallengeRestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/LogsDbVersusLogsDbReindexedIntoStandardModeChallengeRestIT.java similarity index 84% rename from modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/LogsDbVersusLogsDbReindexedIntoStandardModeChallengeRestIT.java rename to x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/LogsDbVersusLogsDbReindexedIntoStandardModeChallengeRestIT.java index e1cafc40f706f..0329f7723a108 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/LogsDbVersusLogsDbReindexedIntoStandardModeChallengeRestIT.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/LogsDbVersusLogsDbReindexedIntoStandardModeChallengeRestIT.java @@ -1,13 +1,11 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. */ -package org.elasticsearch.datastreams.logsdb.qa; +package org.elasticsearch.xpack.logsdb.qa; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/LogsDbVersusReindexedLogsDbChallengeRestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/LogsDbVersusReindexedLogsDbChallengeRestIT.java similarity index 84% rename from modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/LogsDbVersusReindexedLogsDbChallengeRestIT.java rename to x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/LogsDbVersusReindexedLogsDbChallengeRestIT.java index dd80917b5f080..1c425cf30907b 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/LogsDbVersusReindexedLogsDbChallengeRestIT.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/LogsDbVersusReindexedLogsDbChallengeRestIT.java @@ -1,13 +1,11 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. */ -package org.elasticsearch.datastreams.logsdb.qa; +package org.elasticsearch.xpack.logsdb.qa; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/ReindexChallengeRestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/ReindexChallengeRestIT.java similarity index 76% rename from modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/ReindexChallengeRestIT.java rename to x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/ReindexChallengeRestIT.java index b48dce9ca4c57..83344b688ff8c 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/ReindexChallengeRestIT.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/ReindexChallengeRestIT.java @@ -1,13 +1,11 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. */ -package org.elasticsearch.datastreams.logsdb.qa; +package org.elasticsearch.xpack.logsdb.qa; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/StandardVersusLogsIndexModeChallengeRestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeChallengeRestIT.java similarity index 97% rename from modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/StandardVersusLogsIndexModeChallengeRestIT.java rename to x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeChallengeRestIT.java index 4c896e1f262b2..dd7806fc9c8fa 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/StandardVersusLogsIndexModeChallengeRestIT.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeChallengeRestIT.java @@ -1,13 +1,11 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. */ -package org.elasticsearch.datastreams.logsdb.qa; +package org.elasticsearch.xpack.logsdb.qa; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; @@ -18,8 +16,6 @@ import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.time.FormatNames; import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.datastreams.logsdb.qa.matchers.MatchResult; -import org.elasticsearch.datastreams.logsdb.qa.matchers.Matcher; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.aggregations.AggregationBuilders; @@ -31,6 +27,8 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.logsdb.qa.matchers.MatchResult; +import org.elasticsearch.xpack.logsdb.qa.matchers.Matcher; import org.hamcrest.Matchers; import java.io.IOException; diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/StandardVersusLogsIndexModeRandomDataChallengeRestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeRandomDataChallengeRestIT.java similarity index 82% rename from modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/StandardVersusLogsIndexModeRandomDataChallengeRestIT.java rename to x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeRandomDataChallengeRestIT.java index 6a20626634499..3b141908f45b1 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/StandardVersusLogsIndexModeRandomDataChallengeRestIT.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeRandomDataChallengeRestIT.java @@ -1,13 +1,11 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. */ -package org.elasticsearch.datastreams.logsdb.qa; +package org.elasticsearch.xpack.logsdb.qa; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.time.DateFormatter; diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/StandardVersusLogsIndexModeRandomDataDynamicMappingChallengeRestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeRandomDataDynamicMappingChallengeRestIT.java similarity index 63% rename from modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/StandardVersusLogsIndexModeRandomDataDynamicMappingChallengeRestIT.java rename to x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeRandomDataDynamicMappingChallengeRestIT.java index 6b0e4d4d0b34d..c1f97823b963a 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/StandardVersusLogsIndexModeRandomDataDynamicMappingChallengeRestIT.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeRandomDataDynamicMappingChallengeRestIT.java @@ -1,13 +1,11 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. */ -package org.elasticsearch.datastreams.logsdb.qa; +package org.elasticsearch.xpack.logsdb.qa; import org.elasticsearch.common.settings.Settings; diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/StandardVersusStandardReindexedIntoLogsDbChallengeRestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusStandardReindexedIntoLogsDbChallengeRestIT.java similarity index 74% rename from modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/StandardVersusStandardReindexedIntoLogsDbChallengeRestIT.java rename to x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusStandardReindexedIntoLogsDbChallengeRestIT.java index d6cfebed1445a..5adf44f10be45 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/StandardVersusStandardReindexedIntoLogsDbChallengeRestIT.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusStandardReindexedIntoLogsDbChallengeRestIT.java @@ -1,13 +1,11 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. */ -package org.elasticsearch.datastreams.logsdb.qa; +package org.elasticsearch.xpack.logsdb.qa; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.xcontent.XContentBuilder; diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/ArrayEqualMatcher.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/ArrayEqualMatcher.java similarity index 83% rename from modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/ArrayEqualMatcher.java rename to x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/ArrayEqualMatcher.java index b8f68a3c17494..b98ad65ac4d4f 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/ArrayEqualMatcher.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/ArrayEqualMatcher.java @@ -1,13 +1,11 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. */ -package org.elasticsearch.datastreams.logsdb.qa.matchers; +package org.elasticsearch.xpack.logsdb.qa.matchers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.xcontent.XContentBuilder; @@ -15,8 +13,8 @@ import java.util.Arrays; import java.util.List; -import static org.elasticsearch.datastreams.logsdb.qa.matchers.Messages.formatErrorMessage; -import static org.elasticsearch.datastreams.logsdb.qa.matchers.Messages.prettyPrintArrays; +import static org.elasticsearch.xpack.logsdb.qa.matchers.Messages.formatErrorMessage; +import static org.elasticsearch.xpack.logsdb.qa.matchers.Messages.prettyPrintArrays; class ArrayEqualMatcher extends GenericEqualsMatcher { ArrayEqualMatcher( diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/GenericEqualsMatcher.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/GenericEqualsMatcher.java similarity index 85% rename from modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/GenericEqualsMatcher.java rename to x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/GenericEqualsMatcher.java index 3edc9aeec6da8..933c7eb86f65a 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/GenericEqualsMatcher.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/GenericEqualsMatcher.java @@ -1,20 +1,18 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. */ -package org.elasticsearch.datastreams.logsdb.qa.matchers; +package org.elasticsearch.xpack.logsdb.qa.matchers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.xcontent.XContentBuilder; import java.util.List; -import static org.elasticsearch.datastreams.logsdb.qa.matchers.Messages.formatErrorMessage; +import static org.elasticsearch.xpack.logsdb.qa.matchers.Messages.formatErrorMessage; public class GenericEqualsMatcher extends Matcher { protected final XContentBuilder actualMappings; diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/ListEqualMatcher.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/ListEqualMatcher.java similarity index 83% rename from modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/ListEqualMatcher.java rename to x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/ListEqualMatcher.java index 00e22e9714283..447aa21b932c2 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/ListEqualMatcher.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/ListEqualMatcher.java @@ -1,21 +1,19 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. */ -package org.elasticsearch.datastreams.logsdb.qa.matchers; +package org.elasticsearch.xpack.logsdb.qa.matchers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.xcontent.XContentBuilder; import java.util.List; -import static org.elasticsearch.datastreams.logsdb.qa.matchers.Messages.formatErrorMessage; -import static org.elasticsearch.datastreams.logsdb.qa.matchers.Messages.prettyPrintCollections; +import static org.elasticsearch.xpack.logsdb.qa.matchers.Messages.formatErrorMessage; +import static org.elasticsearch.xpack.logsdb.qa.matchers.Messages.prettyPrintCollections; public class ListEqualMatcher extends GenericEqualsMatcher> { public ListEqualMatcher( diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/MatchResult.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/MatchResult.java similarity index 73% rename from modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/MatchResult.java rename to x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/MatchResult.java index 4334209bcfa30..a890a0375ef03 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/MatchResult.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/MatchResult.java @@ -1,13 +1,11 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. */ -package org.elasticsearch.datastreams.logsdb.qa.matchers; +package org.elasticsearch.xpack.logsdb.qa.matchers; import java.util.Objects; diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/Matcher.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/Matcher.java similarity index 90% rename from modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/Matcher.java rename to x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/Matcher.java index 313bf3b3d2392..e08e401c19530 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/Matcher.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/Matcher.java @@ -1,17 +1,15 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. */ -package org.elasticsearch.datastreams.logsdb.qa.matchers; +package org.elasticsearch.xpack.logsdb.qa.matchers; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.datastreams.logsdb.qa.matchers.source.SourceMatcher; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.logsdb.qa.matchers.source.SourceMatcher; import java.util.List; import java.util.Map; diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/Messages.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/Messages.java similarity index 80% rename from modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/Messages.java rename to x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/Messages.java index ddd8212c093eb..122e3b2d6261c 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/Messages.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/Messages.java @@ -1,13 +1,11 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. */ -package org.elasticsearch.datastreams.logsdb.qa.matchers; +package org.elasticsearch.xpack.logsdb.qa.matchers; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/ObjectMatcher.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/ObjectMatcher.java similarity index 68% rename from modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/ObjectMatcher.java rename to x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/ObjectMatcher.java index d071419f5aa6a..f2f08b1dfac14 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/ObjectMatcher.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/ObjectMatcher.java @@ -1,18 +1,16 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. */ -package org.elasticsearch.datastreams.logsdb.qa.matchers; +package org.elasticsearch.xpack.logsdb.qa.matchers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.xcontent.XContentBuilder; -import static org.elasticsearch.datastreams.logsdb.qa.matchers.Messages.formatErrorMessage; +import static org.elasticsearch.xpack.logsdb.qa.matchers.Messages.formatErrorMessage; public class ObjectMatcher extends GenericEqualsMatcher { ObjectMatcher( diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/source/DynamicFieldMatcher.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/source/DynamicFieldMatcher.java similarity index 80% rename from modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/source/DynamicFieldMatcher.java rename to x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/source/DynamicFieldMatcher.java index 5d7ab12f06d7b..d6812c41f7611 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/source/DynamicFieldMatcher.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/source/DynamicFieldMatcher.java @@ -1,17 +1,15 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. */ -package org.elasticsearch.datastreams.logsdb.qa.matchers.source; +package org.elasticsearch.xpack.logsdb.qa.matchers.source; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.datastreams.logsdb.qa.matchers.MatchResult; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.logsdb.qa.matchers.MatchResult; import java.util.List; import java.util.Objects; @@ -20,8 +18,8 @@ import java.util.function.Function; import java.util.stream.Collectors; -import static org.elasticsearch.datastreams.logsdb.qa.matchers.Messages.formatErrorMessage; -import static org.elasticsearch.datastreams.logsdb.qa.matchers.Messages.prettyPrintCollections; +import static org.elasticsearch.xpack.logsdb.qa.matchers.Messages.formatErrorMessage; +import static org.elasticsearch.xpack.logsdb.qa.matchers.Messages.prettyPrintCollections; class DynamicFieldMatcher { private final XContentBuilder actualMappings; diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/source/FieldSpecificMatcher.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/source/FieldSpecificMatcher.java similarity index 92% rename from modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/source/FieldSpecificMatcher.java rename to x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/source/FieldSpecificMatcher.java index 96423125410a7..0c970f1b5fd9a 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/source/FieldSpecificMatcher.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/source/FieldSpecificMatcher.java @@ -1,18 +1,16 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. */ -package org.elasticsearch.datastreams.logsdb.qa.matchers.source; +package org.elasticsearch.xpack.logsdb.qa.matchers.source; import org.apache.lucene.sandbox.document.HalfFloatPoint; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.datastreams.logsdb.qa.matchers.MatchResult; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.logsdb.qa.matchers.MatchResult; import java.math.BigInteger; import java.util.List; @@ -22,8 +20,8 @@ import java.util.function.Function; import java.util.stream.Collectors; -import static org.elasticsearch.datastreams.logsdb.qa.matchers.Messages.formatErrorMessage; -import static org.elasticsearch.datastreams.logsdb.qa.matchers.Messages.prettyPrintCollections; +import static org.elasticsearch.xpack.logsdb.qa.matchers.Messages.formatErrorMessage; +import static org.elasticsearch.xpack.logsdb.qa.matchers.Messages.prettyPrintCollections; interface FieldSpecificMatcher { MatchResult match(List actual, List expected, Map actualMapping, Map expectedMapping); diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/source/MappingTransforms.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/source/MappingTransforms.java similarity index 88% rename from modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/source/MappingTransforms.java rename to x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/source/MappingTransforms.java index ef5775eec3703..dbe73e3c2a4c2 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/source/MappingTransforms.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/source/MappingTransforms.java @@ -1,13 +1,11 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. */ -package org.elasticsearch.datastreams.logsdb.qa.matchers.source; +package org.elasticsearch.xpack.logsdb.qa.matchers.source; import java.util.ArrayList; import java.util.HashMap; diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/source/SourceMatcher.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/source/SourceMatcher.java similarity index 90% rename from modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/source/SourceMatcher.java rename to x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/source/SourceMatcher.java index 0d12fabe081fa..cd2bb361d065d 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/source/SourceMatcher.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/source/SourceMatcher.java @@ -1,29 +1,27 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. */ -package org.elasticsearch.datastreams.logsdb.qa.matchers.source; +package org.elasticsearch.xpack.logsdb.qa.matchers.source; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.datastreams.logsdb.qa.matchers.GenericEqualsMatcher; -import org.elasticsearch.datastreams.logsdb.qa.matchers.ListEqualMatcher; -import org.elasticsearch.datastreams.logsdb.qa.matchers.MatchResult; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.logsdb.qa.matchers.GenericEqualsMatcher; +import org.elasticsearch.xpack.logsdb.qa.matchers.ListEqualMatcher; +import org.elasticsearch.xpack.logsdb.qa.matchers.MatchResult; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; -import static org.elasticsearch.datastreams.logsdb.qa.matchers.Messages.formatErrorMessage; -import static org.elasticsearch.datastreams.logsdb.qa.matchers.Messages.prettyPrintCollections; +import static org.elasticsearch.xpack.logsdb.qa.matchers.Messages.formatErrorMessage; +import static org.elasticsearch.xpack.logsdb.qa.matchers.Messages.prettyPrintCollections; public class SourceMatcher extends GenericEqualsMatcher>> { private final Map actualNormalizedMapping; diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/source/SourceTransforms.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/source/SourceTransforms.java similarity index 86% rename from modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/source/SourceTransforms.java rename to x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/source/SourceTransforms.java index 23e3f090cafc3..c21383d411212 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/source/SourceTransforms.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/source/SourceTransforms.java @@ -1,13 +1,11 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. */ -package org.elasticsearch.datastreams.logsdb.qa.matchers.source; +package org.elasticsearch.xpack.logsdb.qa.matchers.source; import java.util.ArrayList; import java.util.Collections; From 73ca4f52b27e1cbc73e8a95eec2ce81c3621c6de Mon Sep 17 00:00:00 2001 From: Alexander Spies Date: Thu, 17 Oct 2024 17:36:30 +0200 Subject: [PATCH 193/449] Reapply "ESQL: Remove parent from FieldAttribute (#112881)" (#115006) (#115007) This reverts commit 17ecb66a0653bc561f6e44a3e688942e304664d5 and reapplies https://github.com/elastic/elasticsearch/pull/112881 once the previous, non-backported transport version bump is dealt with. --- docs/changelog/112881.yaml | 5 + .../org/elasticsearch/TransportVersions.java | 1 + .../xpack/esql/core/expression/Alias.java | 7 +- .../xpack/esql/core/expression/Attribute.java | 7 +- .../esql/core/expression/FieldAttribute.java | 97 ++++++++++++------- .../core/expression/MetadataAttribute.java | 7 +- .../esql/core/expression/NamedExpression.java | 5 +- .../core/expression/ReferenceAttribute.java | 10 +- .../esql/core/expression/TypedAttribute.java | 10 +- .../core/expression/UnresolvedAttribute.java | 3 +- .../xpack/esql/core/type/DataType.java | 8 +- .../xpack/esql/core/type/DateEsField.java | 9 +- .../xpack/esql/core/type/EsField.java | 9 +- .../esql/core/type/InvalidMappedField.java | 9 +- .../xpack/esql/core/type/KeywordEsField.java | 8 +- .../esql/core/type/MultiTypeEsField.java | 9 +- .../xpack/esql/core/type/TextEsField.java | 8 +- .../esql/core/type/UnsupportedEsField.java | 13 +-- .../xpack/esql/core/util/PlanStreamInput.java | 10 ++ .../esql/core/util/PlanStreamOutput.java | 12 +++ .../expression/FieldAttributeTestUtils.java | 6 +- .../xpack/esql/analysis/Analyzer.java | 12 +-- .../function/UnsupportedAttribute.java | 12 ++- .../xpack/esql/io/stream/PlanStreamInput.java | 14 ++- .../esql/io/stream/PlanStreamOutput.java | 18 +++- .../xpack/esql/plan/logical/EsRelation.java | 7 +- .../function/FieldAttributeTests.java | 10 +- .../esql/index/EsIndexSerializationTests.java | 47 +++++++++ .../esql/io/stream/PlanStreamOutputTests.java | 17 +--- .../ExchangeSinkExecSerializationTests.java | 67 +++++++++++-- 30 files changed, 323 insertions(+), 134 deletions(-) create mode 100644 docs/changelog/112881.yaml diff --git a/docs/changelog/112881.yaml b/docs/changelog/112881.yaml new file mode 100644 index 0000000000000..a8a0d542f8201 --- /dev/null +++ b/docs/changelog/112881.yaml @@ -0,0 +1,5 @@ +pr: 112881 +summary: "ESQL: Remove parent from `FieldAttribute`" +area: ES|QL +type: enhancement +issues: [] diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index acddc517012cf..d85990b4ede8c 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -176,6 +176,7 @@ static TransportVersion def(int id) { public static final TransportVersion CONVERT_FAILURE_STORE_OPTIONS_TO_SELECTOR_OPTIONS_INTERNALLY = def(8_772_00_0); public static final TransportVersion REMOVE_MIN_COMPATIBLE_SHARD_NODE = def(8_773_00_0); public static final TransportVersion REVERT_REMOVE_MIN_COMPATIBLE_SHARD_NODE = def(8_774_00_0); + public static final TransportVersion ESQL_FIELD_ATTRIBUTE_PARENT_SIMPLIFIED = def(8_775_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Alias.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Alias.java index e33f9b1c20527..1f7d03ba9d905 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Alias.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Alias.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -42,11 +43,11 @@ public Alias(Source source, String name, Expression child) { this(source, name, child, null); } - public Alias(Source source, String name, Expression child, NameId id) { + public Alias(Source source, String name, Expression child, @Nullable NameId id) { this(source, name, child, id, false); } - public Alias(Source source, String name, Expression child, NameId id, boolean synthetic) { + public Alias(Source source, String name, Expression child, @Nullable NameId id, boolean synthetic) { super(source, name, singletonList(child), id, synthetic); this.child = child; } @@ -55,7 +56,7 @@ public Alias(Source source, String name, Expression child, NameId id, boolean sy /** * Old constructor from when this had a qualifier string. Still needed to not break serialization. */ - private Alias(Source source, String name, String qualifier, Expression child, NameId id, boolean synthetic) { + private Alias(Source source, String name, String qualifier, Expression child, @Nullable NameId id, boolean synthetic) { this(source, name, child, id, synthetic); } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Attribute.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Attribute.java index 05c414298fd33..45f42a754910d 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Attribute.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Attribute.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.core.expression; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.core.Nullable; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -41,15 +42,15 @@ public static List getNamedWriteables() { // can the attr be null - typically used in JOINs private final Nullability nullability; - public Attribute(Source source, String name, NameId id) { + public Attribute(Source source, String name, @Nullable NameId id) { this(source, name, Nullability.TRUE, id); } - public Attribute(Source source, String name, Nullability nullability, NameId id) { + public Attribute(Source source, String name, Nullability nullability, @Nullable NameId id) { this(source, name, nullability, id, false); } - public Attribute(Source source, String name, Nullability nullability, NameId id, boolean synthetic) { + public Attribute(Source source, String name, Nullability nullability, @Nullable NameId id, boolean synthetic) { super(source, name, emptyList(), id, synthetic); this.nullability = nullability; } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/FieldAttribute.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/FieldAttribute.java index 767d2f45f90e4..4076acdb7e7b8 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/FieldAttribute.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/FieldAttribute.java @@ -6,21 +6,25 @@ */ package org.elasticsearch.xpack.esql.core.expression; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.core.util.PlanStreamInput; import org.elasticsearch.xpack.esql.core.util.PlanStreamOutput; -import org.elasticsearch.xpack.esql.core.util.StringUtils; import java.io.IOException; import java.util.Objects; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamInput.readCachedStringWithVersionCheck; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamOutput.writeCachedStringWithVersionCheck; + /** * Attribute for an ES field. * To differentiate between the different type of fields this class offers: @@ -37,32 +41,31 @@ public class FieldAttribute extends TypedAttribute { FieldAttribute::readFrom ); - private final FieldAttribute parent; - private final String path; + private final String parentName; private final EsField field; public FieldAttribute(Source source, String name, EsField field) { this(source, null, name, field); } - public FieldAttribute(Source source, FieldAttribute parent, String name, EsField field) { - this(source, parent, name, field, Nullability.TRUE, null, false); + public FieldAttribute(Source source, @Nullable String parentName, String name, EsField field) { + this(source, parentName, name, field, Nullability.TRUE, null, false); } - public FieldAttribute(Source source, FieldAttribute parent, String name, EsField field, boolean synthetic) { - this(source, parent, name, field, Nullability.TRUE, null, synthetic); + public FieldAttribute(Source source, @Nullable String parentName, String name, EsField field, boolean synthetic) { + this(source, parentName, name, field, Nullability.TRUE, null, synthetic); } public FieldAttribute( Source source, - FieldAttribute parent, + @Nullable String parentName, String name, EsField field, Nullability nullability, - NameId id, + @Nullable NameId id, boolean synthetic ) { - this(source, parent, name, field.getDataType(), field, nullability, id, synthetic); + this(source, parentName, name, field.getDataType(), field, nullability, id, synthetic); } /** @@ -71,17 +74,16 @@ public FieldAttribute( */ FieldAttribute( Source source, - FieldAttribute parent, + @Nullable String parentName, String name, DataType type, EsField field, Nullability nullability, - NameId id, + @Nullable NameId id, boolean synthetic ) { super(source, name, type, nullability, id, synthetic); - this.path = parent != null ? parent.name() : StringUtils.EMPTY; - this.parent = parent; + this.parentName = parentName; this.field = field; } @@ -91,16 +93,16 @@ public FieldAttribute( */ private FieldAttribute( Source source, - FieldAttribute parent, + @Nullable String parentName, String name, DataType type, EsField field, - String qualifier, + @Nullable String qualifier, Nullability nullability, - NameId id, + @Nullable NameId id, boolean synthetic ) { - this(source, parent, name, type, field, nullability, id, synthetic); + this(source, parentName, name, type, field, nullability, id, synthetic); } private FieldAttribute(StreamInput in) throws IOException { @@ -114,8 +116,8 @@ private FieldAttribute(StreamInput in) throws IOException { */ this( Source.readFrom((StreamInput & PlanStreamInput) in), - in.readOptionalWriteable(FieldAttribute::readFrom), - ((PlanStreamInput) in).readCachedString(), + readParentName(in), + readCachedStringWithVersionCheck(in), DataType.readFrom(in), EsField.readFrom(in), in.readOptionalString(), @@ -129,8 +131,8 @@ private FieldAttribute(StreamInput in) throws IOException { public void writeTo(StreamOutput out) throws IOException { if (((PlanStreamOutput) out).writeAttributeCacheHeader(this)) { Source.EMPTY.writeTo(out); - out.writeOptionalWriteable(parent); - ((PlanStreamOutput) out).writeCachedString(name()); + writeParentName(out); + writeCachedStringWithVersionCheck(out, name()); dataType().writeTo(out); field.writeTo(out); // We used to write the qualifier here. We can still do if needed in the future. @@ -145,6 +147,26 @@ public static FieldAttribute readFrom(StreamInput in) throws IOException { return ((PlanStreamInput) in).readAttributeWithCache(FieldAttribute::new); } + private void writeParentName(StreamOutput out) throws IOException { + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_FIELD_ATTRIBUTE_PARENT_SIMPLIFIED)) { + ((PlanStreamOutput) out).writeOptionalCachedString(parentName); + } else { + // Previous versions only used the parent field attribute to retrieve the parent's name, so we can use just any + // fake FieldAttribute here as long as the name is correct. + FieldAttribute fakeParent = parentName() == null ? null : new FieldAttribute(Source.EMPTY, parentName(), field()); + out.writeOptionalWriteable(fakeParent); + } + } + + private static String readParentName(StreamInput in) throws IOException { + if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_FIELD_ATTRIBUTE_PARENT_SIMPLIFIED)) { + return ((PlanStreamInput) in).readOptionalCachedString(); + } + + FieldAttribute parent = in.readOptionalWriteable(FieldAttribute::readFrom); + return parent == null ? null : parent.name(); + } + @Override public String getWriteableName() { return ENTRY.name; @@ -152,15 +174,22 @@ public String getWriteableName() { @Override protected NodeInfo info() { - return NodeInfo.create(this, FieldAttribute::new, parent, name(), dataType(), field, (String) null, nullable(), id(), synthetic()); - } - - public FieldAttribute parent() { - return parent; + return NodeInfo.create( + this, + FieldAttribute::new, + parentName, + name(), + dataType(), + field, + (String) null, + nullable(), + id(), + synthetic() + ); } - public String path() { - return path; + public String parentName() { + return parentName; } /** @@ -174,7 +203,7 @@ public String fieldName() { if ((synthetic() || name().startsWith(SYNTHETIC_ATTRIBUTE_NAME_PREFIX)) == false) { return name(); } - return Strings.hasText(path) ? path + "." + field.getName() : field.getName(); + return Strings.hasText(parentName) ? parentName + "." + field.getName() : field.getName(); } public EsField.Exact getExactInfo() { @@ -190,13 +219,13 @@ public FieldAttribute exactAttribute() { } private FieldAttribute innerField(EsField type) { - return new FieldAttribute(source(), this, name() + "." + type.getName(), type, nullable(), id(), synthetic()); + return new FieldAttribute(source(), name(), name() + "." + type.getName(), type, nullable(), id(), synthetic()); } @Override protected Attribute clone(Source source, String name, DataType type, Nullability nullability, NameId id, boolean synthetic) { // Ignore `type`, this must be the same as the field's type. - return new FieldAttribute(source, parent, name, field, nullability, id, synthetic); + return new FieldAttribute(source, parentName, name, field, nullability, id, synthetic); } @Override @@ -206,13 +235,13 @@ public Attribute withDataType(DataType type) { @Override public int hashCode() { - return Objects.hash(super.hashCode(), path, field); + return Objects.hash(super.hashCode(), parentName, field); } @Override public boolean equals(Object obj) { return super.equals(obj) - && Objects.equals(path, ((FieldAttribute) obj).path) + && Objects.equals(parentName, ((FieldAttribute) obj).parentName) && Objects.equals(field, ((FieldAttribute) obj).field); } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/MetadataAttribute.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/MetadataAttribute.java index 539c55ba341cf..3641812cd6cad 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/MetadataAttribute.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/MetadataAttribute.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.IgnoredFieldMapper; @@ -59,7 +60,7 @@ public MetadataAttribute( String name, DataType dataType, Nullability nullability, - NameId id, + @Nullable NameId id, boolean synthetic, boolean searchable ) { @@ -79,9 +80,9 @@ private MetadataAttribute( Source source, String name, DataType dataType, - String qualifier, + @Nullable String qualifier, Nullability nullability, - NameId id, + @Nullable NameId id, boolean synthetic, boolean searchable ) { diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/NamedExpression.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/NamedExpression.java index ba467910bed0d..3b018f09e5ebd 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/NamedExpression.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/NamedExpression.java @@ -8,6 +8,7 @@ import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.core.Nullable; import org.elasticsearch.xpack.esql.core.tree.Source; import java.util.ArrayList; @@ -32,11 +33,11 @@ public static List getNamedWriteables() { private final NameId id; private final boolean synthetic; - public NamedExpression(Source source, String name, List children, NameId id) { + public NamedExpression(Source source, String name, List children, @Nullable NameId id) { this(source, name, children, id, false); } - public NamedExpression(Source source, String name, List children, NameId id, boolean synthetic) { + public NamedExpression(Source source, String name, List children, @Nullable NameId id, boolean synthetic) { super(source, children); this.name = name; this.id = id == null ? new NameId() : id; diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/ReferenceAttribute.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/ReferenceAttribute.java index 504e1eae8d880..3626c5d26f235 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/ReferenceAttribute.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/ReferenceAttribute.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -31,7 +32,14 @@ public ReferenceAttribute(Source source, String name, DataType dataType) { this(source, name, dataType, Nullability.FALSE, null, false); } - public ReferenceAttribute(Source source, String name, DataType dataType, Nullability nullability, NameId id, boolean synthetic) { + public ReferenceAttribute( + Source source, + String name, + DataType dataType, + Nullability nullability, + @Nullable NameId id, + boolean synthetic + ) { super(source, name, dataType, nullability, id, synthetic); } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/TypedAttribute.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/TypedAttribute.java index 0350abef99992..f8a041110798c 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/TypedAttribute.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/TypedAttribute.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.xpack.esql.core.expression; +import org.elasticsearch.core.Nullable; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -15,7 +16,14 @@ public abstract class TypedAttribute extends Attribute { private final DataType dataType; - protected TypedAttribute(Source source, String name, DataType dataType, Nullability nullability, NameId id, boolean synthetic) { + protected TypedAttribute( + Source source, + String name, + DataType dataType, + Nullability nullability, + @Nullable NameId id, + boolean synthetic + ) { super(source, name, nullability, id, synthetic); this.dataType = dataType; } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/UnresolvedAttribute.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/UnresolvedAttribute.java index d8a35adcbffde..a971a15a23c86 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/UnresolvedAttribute.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/UnresolvedAttribute.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.core.expression; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; import org.elasticsearch.xpack.esql.core.capabilities.Unresolvable; import org.elasticsearch.xpack.esql.core.capabilities.UnresolvedException; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; @@ -33,7 +34,7 @@ public UnresolvedAttribute(Source source, String name, String unresolvedMessage) } @SuppressWarnings("this-escape") - public UnresolvedAttribute(Source source, String name, NameId id, String unresolvedMessage, Object resolutionMetadata) { + public UnresolvedAttribute(Source source, String name, @Nullable NameId id, String unresolvedMessage, Object resolutionMetadata) { super(source, name, id); this.customMessage = unresolvedMessage != null; this.unresolvedMsg = unresolvedMessage == null ? errorMessage(name(), null) : unresolvedMessage; diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java index cb1a7b2eb6fe0..12699ca3ee720 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java @@ -14,8 +14,6 @@ import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.mapper.TimeSeriesIdFieldMapper; import org.elasticsearch.xpack.esql.core.plugin.EsqlCorePlugin; -import org.elasticsearch.xpack.esql.core.util.PlanStreamInput; -import org.elasticsearch.xpack.esql.core.util.PlanStreamOutput; import java.io.IOException; import java.math.BigInteger; @@ -32,6 +30,8 @@ import static java.util.stream.Collectors.toMap; import static java.util.stream.Collectors.toUnmodifiableMap; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamInput.readCachedStringWithVersionCheck; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamOutput.writeCachedStringWithVersionCheck; public enum DataType { /** @@ -535,12 +535,12 @@ public DataType counter() { } public void writeTo(StreamOutput out) throws IOException { - ((PlanStreamOutput) out).writeCachedString(typeName); + writeCachedStringWithVersionCheck(out, typeName); } public static DataType readFrom(StreamInput in) throws IOException { // TODO: Use our normal enum serialization pattern - return readFrom(((PlanStreamInput) in).readCachedString()); + return readFrom(readCachedStringWithVersionCheck(in)); } /** diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DateEsField.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DateEsField.java index 7c4b98c5af84e..3a81ec2a6f17d 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DateEsField.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DateEsField.java @@ -8,12 +8,13 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.xpack.esql.core.util.PlanStreamInput; -import org.elasticsearch.xpack.esql.core.util.PlanStreamOutput; import java.io.IOException; import java.util.Map; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamInput.readCachedStringWithVersionCheck; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamOutput.writeCachedStringWithVersionCheck; + /** * Information about a field in an ES index with the {@code date} type */ @@ -28,12 +29,12 @@ private DateEsField(String name, DataType dataType, Map propert } protected DateEsField(StreamInput in) throws IOException { - this(((PlanStreamInput) in).readCachedString(), DataType.DATETIME, in.readImmutableMap(EsField::readFrom), in.readBoolean()); + this(readCachedStringWithVersionCheck(in), DataType.DATETIME, in.readImmutableMap(EsField::readFrom), in.readBoolean()); } @Override public void writeContent(StreamOutput out) throws IOException { - ((PlanStreamOutput) out).writeCachedString(getName()); + writeCachedStringWithVersionCheck(out, getName()); out.writeMap(getProperties(), (o, x) -> x.writeTo(out)); out.writeBoolean(isAggregatable()); } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/EsField.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/EsField.java index 6235176d82de6..47dadcbb11de2 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/EsField.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/EsField.java @@ -18,6 +18,9 @@ import java.util.Map; import java.util.Objects; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamInput.readCachedStringWithVersionCheck; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamOutput.writeCachedStringWithVersionCheck; + /** * Information about a field in an ES index. */ @@ -60,7 +63,7 @@ public EsField(String name, DataType esDataType, Map properties } public EsField(StreamInput in) throws IOException { - this.name = ((PlanStreamInput) in).readCachedString(); + this.name = readCachedStringWithVersionCheck(in); this.esDataType = readDataType(in); this.properties = in.readImmutableMap(EsField::readFrom); this.aggregatable = in.readBoolean(); @@ -68,7 +71,7 @@ public EsField(StreamInput in) throws IOException { } private DataType readDataType(StreamInput in) throws IOException { - String name = ((PlanStreamInput) in).readCachedString(); + String name = readCachedStringWithVersionCheck(in); if (in.getTransportVersion().before(TransportVersions.ESQL_NESTED_UNSUPPORTED) && name.equalsIgnoreCase("NESTED")) { /* * The "nested" data type existed in older versions of ESQL but was @@ -98,7 +101,7 @@ public void writeTo(StreamOutput out) throws IOException { * This needs to be overridden by subclasses for specific serialization */ public void writeContent(StreamOutput out) throws IOException { - ((PlanStreamOutput) out).writeCachedString(name); + writeCachedStringWithVersionCheck(out, name); esDataType.writeTo(out); out.writeMap(properties, (o, x) -> x.writeTo(out)); out.writeBoolean(aggregatable); diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/InvalidMappedField.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/InvalidMappedField.java index 40825af56ccfe..f83e4652ebebd 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/InvalidMappedField.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/InvalidMappedField.java @@ -10,8 +10,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.core.QlIllegalArgumentException; -import org.elasticsearch.xpack.esql.core.util.PlanStreamInput; -import org.elasticsearch.xpack.esql.core.util.PlanStreamOutput; import java.io.IOException; import java.util.Map; @@ -20,6 +18,9 @@ import java.util.TreeMap; import java.util.stream.Collectors; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamInput.readCachedStringWithVersionCheck; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamOutput.writeCachedStringWithVersionCheck; + /** * Representation of field mapped differently across indices. * Used during mapping discovery only. @@ -54,7 +55,7 @@ private InvalidMappedField(String name, String errorMessage, Map types() { @@ -63,7 +64,7 @@ public Set types() { @Override public void writeContent(StreamOutput out) throws IOException { - ((PlanStreamOutput) out).writeCachedString(getName()); + writeCachedStringWithVersionCheck(out, getName()); out.writeString(errorMessage); out.writeMap(getProperties(), (o, x) -> x.writeTo(out)); } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/KeywordEsField.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/KeywordEsField.java index 48995bafec451..8b88884a0ce17 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/KeywordEsField.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/KeywordEsField.java @@ -8,8 +8,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.xpack.esql.core.util.PlanStreamInput; -import org.elasticsearch.xpack.esql.core.util.PlanStreamOutput; import java.io.IOException; import java.util.Collections; @@ -17,6 +15,8 @@ import java.util.Objects; import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamInput.readCachedStringWithVersionCheck; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamOutput.writeCachedStringWithVersionCheck; /** * Information about a field in an ES index with the {@code keyword} type. @@ -61,7 +61,7 @@ protected KeywordEsField( public KeywordEsField(StreamInput in) throws IOException { this( - ((PlanStreamInput) in).readCachedString(), + readCachedStringWithVersionCheck(in), KEYWORD, in.readImmutableMap(EsField::readFrom), in.readBoolean(), @@ -73,7 +73,7 @@ public KeywordEsField(StreamInput in) throws IOException { @Override public void writeContent(StreamOutput out) throws IOException { - ((PlanStreamOutput) out).writeCachedString(getName()); + writeCachedStringWithVersionCheck(out, getName()); out.writeMap(getProperties(), (o, x) -> x.writeTo(out)); out.writeBoolean(isAggregatable()); out.writeInt(precision); diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/MultiTypeEsField.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/MultiTypeEsField.java index 522cb682c0943..0d7f9ee425d6a 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/MultiTypeEsField.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/MultiTypeEsField.java @@ -10,8 +10,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.util.PlanStreamInput; -import org.elasticsearch.xpack.esql.core.util.PlanStreamOutput; import java.io.IOException; import java.util.HashMap; @@ -19,6 +17,9 @@ import java.util.Objects; import java.util.Set; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamInput.readCachedStringWithVersionCheck; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamOutput.writeCachedStringWithVersionCheck; + /** * During IndexResolution it could occur that the same field is mapped to different types in different indices. * The class MultiTypeEfField.UnresolvedField holds that information and allows for later resolution of the field @@ -39,7 +40,7 @@ public MultiTypeEsField(String name, DataType dataType, boolean aggregatable, Ma protected MultiTypeEsField(StreamInput in) throws IOException { this( - ((PlanStreamInput) in).readCachedString(), + readCachedStringWithVersionCheck(in), DataType.readFrom(in), in.readBoolean(), in.readImmutableMap(i -> i.readNamedWriteable(Expression.class)) @@ -48,7 +49,7 @@ protected MultiTypeEsField(StreamInput in) throws IOException { @Override public void writeContent(StreamOutput out) throws IOException { - ((PlanStreamOutput) out).writeCachedString(getName()); + writeCachedStringWithVersionCheck(out, getName()); getDataType().writeTo(out); out.writeBoolean(isAggregatable()); out.writeMap(getIndexToConversionExpressions(), (o, v) -> out.writeNamedWriteable(v)); diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/TextEsField.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/TextEsField.java index c6c494ef289bb..ed0d32a7696eb 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/TextEsField.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/TextEsField.java @@ -10,8 +10,6 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Tuple; import org.elasticsearch.xpack.esql.core.QlIllegalArgumentException; -import org.elasticsearch.xpack.esql.core.util.PlanStreamInput; -import org.elasticsearch.xpack.esql.core.util.PlanStreamOutput; import java.io.IOException; import java.util.Map; @@ -19,6 +17,8 @@ import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; import static org.elasticsearch.xpack.esql.core.type.DataType.TEXT; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamInput.readCachedStringWithVersionCheck; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamOutput.writeCachedStringWithVersionCheck; /** * Information about a field in an es index with the {@code text} type. @@ -34,12 +34,12 @@ public TextEsField(String name, Map properties, boolean hasDocV } protected TextEsField(StreamInput in) throws IOException { - this(((PlanStreamInput) in).readCachedString(), in.readImmutableMap(EsField::readFrom), in.readBoolean(), in.readBoolean()); + this(readCachedStringWithVersionCheck(in), in.readImmutableMap(EsField::readFrom), in.readBoolean(), in.readBoolean()); } @Override public void writeContent(StreamOutput out) throws IOException { - ((PlanStreamOutput) out).writeCachedString(getName()); + writeCachedStringWithVersionCheck(out, getName()); out.writeMap(getProperties(), (o, x) -> x.writeTo(out)); out.writeBoolean(isAggregatable()); out.writeBoolean(isAlias()); diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/UnsupportedEsField.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/UnsupportedEsField.java index 980620cb98847..02ce741243c20 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/UnsupportedEsField.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/UnsupportedEsField.java @@ -8,14 +8,15 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.xpack.esql.core.util.PlanStreamInput; -import org.elasticsearch.xpack.esql.core.util.PlanStreamOutput; import java.io.IOException; import java.util.Map; import java.util.Objects; import java.util.TreeMap; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamInput.readCachedStringWithVersionCheck; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamOutput.writeCachedStringWithVersionCheck; + /** * Information about a field in an ES index that cannot be supported by ESQL. * All the subfields (properties) of an unsupported type are also be unsupported. @@ -37,8 +38,8 @@ public UnsupportedEsField(String name, String originalType, String inherited, Ma public UnsupportedEsField(StreamInput in) throws IOException { this( - ((PlanStreamInput) in).readCachedString(), - ((PlanStreamInput) in).readCachedString(), + readCachedStringWithVersionCheck(in), + readCachedStringWithVersionCheck(in), in.readOptionalString(), in.readImmutableMap(EsField::readFrom) ); @@ -46,8 +47,8 @@ public UnsupportedEsField(StreamInput in) throws IOException { @Override public void writeContent(StreamOutput out) throws IOException { - ((PlanStreamOutput) out).writeCachedString(getName()); - ((PlanStreamOutput) out).writeCachedString(getOriginalType()); + writeCachedStringWithVersionCheck(out, getName()); + writeCachedStringWithVersionCheck(out, getOriginalType()); out.writeOptionalString(getInherited()); out.writeMap(getProperties(), (o, x) -> x.writeTo(out)); } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/PlanStreamInput.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/PlanStreamInput.java index 826b0cbfa3498..e8ccae3429001 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/PlanStreamInput.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/PlanStreamInput.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.core.util; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.xpack.esql.core.expression.Attribute; @@ -49,4 +50,13 @@ public interface PlanStreamInput { A readEsFieldWithCache() throws IOException; String readCachedString() throws IOException; + + static String readCachedStringWithVersionCheck(StreamInput planStreamInput) throws IOException { + if (planStreamInput.getTransportVersion().before(TransportVersions.ESQL_CACHED_STRING_SERIALIZATION)) { + return planStreamInput.readString(); + } + return ((PlanStreamInput) planStreamInput).readCachedString(); + } + + String readOptionalCachedString() throws IOException; } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/PlanStreamOutput.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/PlanStreamOutput.java index e4797411c3796..fb4af33d2fd60 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/PlanStreamOutput.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/PlanStreamOutput.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.esql.core.util; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.type.EsField; @@ -33,4 +35,14 @@ public interface PlanStreamOutput { boolean writeEsFieldCacheHeader(EsField field) throws IOException; void writeCachedString(String field) throws IOException; + + static void writeCachedStringWithVersionCheck(StreamOutput planStreamOutput, String string) throws IOException { + if (planStreamOutput.getTransportVersion().before(TransportVersions.ESQL_CACHED_STRING_SERIALIZATION)) { + planStreamOutput.writeString(string); + } else { + ((PlanStreamOutput) planStreamOutput).writeCachedString(string); + } + } + + void writeOptionalCachedString(String str) throws IOException; } diff --git a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/FieldAttributeTestUtils.java b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/FieldAttributeTestUtils.java index 1662b7f973c9d..c7e5056ed0267 100644 --- a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/FieldAttributeTestUtils.java +++ b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/FieldAttributeTestUtils.java @@ -12,9 +12,9 @@ import org.elasticsearch.xpack.esql.core.type.EsField; public class FieldAttributeTestUtils { - public static final FieldAttribute newFieldAttributeWithType( + public static FieldAttribute newFieldAttributeWithType( Source source, - FieldAttribute parent, + String parentName, String name, DataType type, EsField field, @@ -22,6 +22,6 @@ public static final FieldAttribute newFieldAttributeWithType( NameId id, boolean synthetic ) { - return new FieldAttribute(source, parent, name, type, field, nullability, id, synthetic); + return new FieldAttribute(source, parentName, name, type, field, nullability, id, synthetic); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java index fe7b945a9b3c1..b18f58b0a43cb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java @@ -228,13 +228,13 @@ public static List mappingAsAttributes(Source source, Map list, Source source, FieldAttribute parent, Map mapping) { + private static void mappingAsAttributes(List list, Source source, String parentName, Map mapping) { for (Map.Entry entry : mapping.entrySet()) { String name = entry.getKey(); EsField t = entry.getValue(); if (t != null) { - name = parent == null ? name : parent.fieldName() + "." + name; + name = parentName == null ? name : parentName + "." + name; var fieldProperties = t.getProperties(); var type = t.getDataType().widenSmallNumeric(); // due to a bug also copy the field since the Attribute hierarchy extracts the data type @@ -245,14 +245,14 @@ private static void mappingAsAttributes(List list, Source source, Fie FieldAttribute attribute = t instanceof UnsupportedEsField uef ? new UnsupportedAttribute(source, name, uef) - : new FieldAttribute(source, parent, name, t); + : new FieldAttribute(source, parentName, name, t); // primitive branch if (DataType.isPrimitive(type)) { list.add(attribute); } // allow compound object even if they are unknown if (fieldProperties.isEmpty() == false) { - mappingAsAttributes(list, source, attribute, fieldProperties); + mappingAsAttributes(list, source, attribute.name(), fieldProperties); } } } @@ -1252,7 +1252,7 @@ private Expression createIfDoesNotAlreadyExist( // NOTE: The name has to start with $$ to not break bwc with 8.15 - in that version, this is how we had to mark this as // synthetic to work around a bug. String unionTypedFieldName = Attribute.rawTemporaryName(fa.name(), "converted_to", resolvedField.getDataType().typeName()); - FieldAttribute unionFieldAttribute = new FieldAttribute(fa.source(), fa.parent(), unionTypedFieldName, resolvedField, true); + FieldAttribute unionFieldAttribute = new FieldAttribute(fa.source(), fa.parentName(), unionTypedFieldName, resolvedField, true); int existingIndex = unionFieldAttributes.indexOf(unionFieldAttribute); if (existingIndex >= 0) { // Do not generate multiple name/type combinations with different IDs @@ -1281,7 +1281,7 @@ private Expression typeSpecificConvert(AbstractConvertFunction convert, Source s FieldAttribute originalFieldAttr = (FieldAttribute) convert.field(); FieldAttribute resolvedAttr = new FieldAttribute( source, - originalFieldAttr.parent(), + originalFieldAttr.parentName(), originalFieldAttr.name(), field, originalFieldAttr.nullable(), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/UnsupportedAttribute.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/UnsupportedAttribute.java index 2c709de7717ce..d372eddb961ae 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/UnsupportedAttribute.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/UnsupportedAttribute.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; import org.elasticsearch.xpack.esql.core.capabilities.Unresolvable; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expression; @@ -29,6 +30,9 @@ import java.io.IOException; import java.util.Objects; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamInput.readCachedStringWithVersionCheck; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamOutput.writeCachedStringWithVersionCheck; + /** * Unsupported attribute meaning an attribute that has been found yet cannot be used (hence why UnresolvedAttribute * cannot be used) expect in special conditions (currently only in projections to allow it to flow through @@ -63,11 +67,11 @@ public UnsupportedAttribute(Source source, String name, UnsupportedEsField field this(source, name, field, null); } - public UnsupportedAttribute(Source source, String name, UnsupportedEsField field, String customMessage) { + public UnsupportedAttribute(Source source, String name, UnsupportedEsField field, @Nullable String customMessage) { this(source, name, field, customMessage, null); } - public UnsupportedAttribute(Source source, String name, UnsupportedEsField field, String customMessage, NameId id) { + public UnsupportedAttribute(Source source, String name, UnsupportedEsField field, @Nullable String customMessage, @Nullable NameId id) { super(source, null, name, field, Nullability.TRUE, id, false); this.hasCustomMessage = customMessage != null; this.message = customMessage == null ? errorMessage(name(), field) : customMessage; @@ -76,7 +80,7 @@ public UnsupportedAttribute(Source source, String name, UnsupportedEsField field private UnsupportedAttribute(StreamInput in) throws IOException { this( Source.readFrom((PlanStreamInput) in), - ((PlanStreamInput) in).readCachedString(), + readCachedStringWithVersionCheck(in), in.getTransportVersion().onOrAfter(TransportVersions.ESQL_ES_FIELD_CACHED_SERIALIZATION) || in.getTransportVersion().isPatchFrom(TransportVersions.V_8_15_2) ? EsField.readFrom(in) : new UnsupportedEsField(in), in.readOptionalString(), @@ -88,7 +92,7 @@ private UnsupportedAttribute(StreamInput in) throws IOException { public void writeTo(StreamOutput out) throws IOException { if (((PlanStreamOutput) out).writeAttributeCacheHeader(this)) { Source.EMPTY.writeTo(out); - ((PlanStreamOutput) out).writeCachedString(name()); + writeCachedStringWithVersionCheck(out, name()); if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_ES_FIELD_CACHED_SERIALIZATION) || out.getTransportVersion().isPatchFrom(TransportVersions.V_8_15_2)) { field().writeTo(out); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java index 9003cbec12d1e..1e1cc3b86a9d5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java @@ -37,6 +37,8 @@ import java.util.Map; import java.util.function.LongFunction; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamInput.readCachedStringWithVersionCheck; + /** * A customized stream input used to deserialize ESQL physical plan fragments. Complements stream * input with methods that read plan nodes, Attributes, Expressions, etc. @@ -224,7 +226,7 @@ public A readEsFieldWithCache() throws IOException { // it's safe to cast to int, since the max value for this is {@link PlanStreamOutput#MAX_SERIALIZED_ATTRIBUTES} int cacheId = Math.toIntExact(readZLong()); if (cacheId < 0) { - String className = readCachedString(); + String className = readCachedStringWithVersionCheck(this); Writeable.Reader reader = EsField.getReader(className); cacheId = -1 - cacheId; EsField result = reader.read(this); @@ -234,7 +236,7 @@ public A readEsFieldWithCache() throws IOException { return (A) esFieldFromCache(cacheId); } } else { - String className = readCachedString(); + String className = readCachedStringWithVersionCheck(this); Writeable.Reader reader = EsField.getReader(className); return (A) reader.read(this); } @@ -245,9 +247,6 @@ public A readEsFieldWithCache() throws IOException { */ @Override public String readCachedString() throws IOException { - if (getTransportVersion().before(TransportVersions.ESQL_CACHED_STRING_SERIALIZATION)) { - return readString(); - } int cacheId = Math.toIntExact(readZLong()); if (cacheId < 0) { String string = readString(); @@ -259,6 +258,11 @@ public String readCachedString() throws IOException { } } + @Override + public String readOptionalCachedString() throws IOException { + return readBoolean() ? readCachedString() : null; + } + private EsField esFieldFromCache(int id) throws IOException { EsField field = esFieldsCache[id]; if (field == null) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java index b633b10122eb3..615c4266620c7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java @@ -30,6 +30,8 @@ import java.util.IdentityHashMap; import java.util.Map; +import static org.elasticsearch.xpack.esql.core.util.PlanStreamOutput.writeCachedStringWithVersionCheck; + /** * A customized stream output used to serialize ESQL physical plan fragments. Complements stream * output with methods that write plan nodes, Attributes, Expressions, etc. @@ -195,7 +197,7 @@ public boolean writeEsFieldCacheHeader(EsField field) throws IOException { cacheId = cacheEsField(field); writeZLong(-1 - cacheId); } - writeCachedString(field.getWriteableName()); + writeCachedStringWithVersionCheck(this, field.getWriteableName()); return true; } @@ -207,10 +209,6 @@ public boolean writeEsFieldCacheHeader(EsField field) throws IOException { */ @Override public void writeCachedString(String string) throws IOException { - if (getTransportVersion().before(TransportVersions.ESQL_CACHED_STRING_SERIALIZATION)) { - writeString(string); - return; - } Integer cacheId = stringCache.get(string); if (cacheId != null) { writeZLong(cacheId); @@ -226,6 +224,16 @@ public void writeCachedString(String string) throws IOException { writeString(string); } + @Override + public void writeOptionalCachedString(String str) throws IOException { + if (str == null) { + writeBoolean(false); + } else { + writeBoolean(true); + writeCachedString(str); + } + } + private Integer esFieldIdFromCache(EsField field) { return cachedEsFields.get(field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsRelation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsRelation.java index 951fc7ad1cf29..eb72009638396 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsRelation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsRelation.java @@ -112,7 +112,12 @@ private static List flatten(Source source, Map mappi EsField t = entry.getValue(); if (t != null) { - FieldAttribute f = new FieldAttribute(source, parent, parent != null ? parent.name() + "." + name : name, t); + FieldAttribute f = new FieldAttribute( + source, + parent != null ? parent.name() : null, + parent != null ? parent.name() + "." + name : name, + t + ); list.add(f); // object or nested if (t.getProperties().isEmpty() == false) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/FieldAttributeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/FieldAttributeTests.java index e8f0333791844..6b2040f58f84c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/FieldAttributeTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/FieldAttributeTests.java @@ -20,7 +20,7 @@ public class FieldAttributeTests extends AbstractAttributeTestCase { public static FieldAttribute createFieldAttribute(int maxDepth, boolean onlyRepresentable) { Source source = Source.EMPTY; - FieldAttribute parent = maxDepth == 0 || randomBoolean() ? null : createFieldAttribute(maxDepth - 1, onlyRepresentable); + String parentName = maxDepth == 0 || randomBoolean() ? null : randomAlphaOfLength(3); String name = randomAlphaOfLength(5); DataType type = onlyRepresentable ? randomValueOtherThanMany(t -> false == DataType.isRepresentable(t), () -> randomFrom(DataType.types())) @@ -28,7 +28,7 @@ public static FieldAttribute createFieldAttribute(int maxDepth, boolean onlyRepr EsField field = AbstractEsFieldTypeTests.randomAnyEsField(maxDepth); Nullability nullability = randomFrom(Nullability.values()); boolean synthetic = randomBoolean(); - return newFieldAttributeWithType(source, parent, name, type, field, nullability, new NameId(), synthetic); + return newFieldAttributeWithType(source, parentName, name, type, field, nullability, new NameId(), synthetic); } @Override @@ -39,20 +39,20 @@ protected FieldAttribute create() { @Override protected FieldAttribute mutate(FieldAttribute instance) { Source source = instance.source(); - FieldAttribute parent = instance.parent(); + String parentName = instance.parentName(); String name = instance.name(); DataType type = instance.dataType(); EsField field = instance.field(); Nullability nullability = instance.nullable(); boolean synthetic = instance.synthetic(); switch (between(0, 5)) { - case 0 -> parent = randomValueOtherThan(parent, () -> randomBoolean() ? null : createFieldAttribute(2, false)); + case 0 -> parentName = randomValueOtherThan(parentName, () -> randomBoolean() ? null : randomAlphaOfLength(2)); case 1 -> name = randomAlphaOfLength(name.length() + 1); case 2 -> type = randomValueOtherThan(type, () -> randomFrom(DataType.types())); case 3 -> field = randomValueOtherThan(field, () -> AbstractEsFieldTypeTests.randomAnyEsField(3)); case 4 -> nullability = randomValueOtherThan(nullability, () -> randomFrom(Nullability.values())); case 5 -> synthetic = false == synthetic; } - return newFieldAttributeWithType(source, parent, name, type, field, nullability, new NameId(), synthetic); + return newFieldAttributeWithType(source, parentName, name, type, field, nullability, new NameId(), synthetic); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/index/EsIndexSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/index/EsIndexSerializationTests.java index 687b83370f571..82dd5a88ffaf1 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/index/EsIndexSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/index/EsIndexSerializationTests.java @@ -182,4 +182,51 @@ private void testManyTypeConflicts(boolean withParent, ByteSizeValue expected) t assertThat(ByteSizeValue.ofBytes(out.bytes().length()), byteSizeEquals(expected)); } } + + public static EsIndex deeplyNestedIndex(int depth, int childrenPerLevel) { + String rootFieldName = "root"; + Map fields = Map.of(rootFieldName, fieldWithRecursiveChildren(depth, childrenPerLevel, rootFieldName)); + + return new EsIndex("deeply-nested", fields); + } + + private static EsField fieldWithRecursiveChildren(int depth, int childrenPerLevel, String name) { + assert depth >= 1; + + Map children = new TreeMap<>(); + String childName; + if (depth == 1) { + for (int i = 0; i < childrenPerLevel; i++) { + childName = "leaf" + i; + children.put(childName, new EsField(childName, DataType.KEYWORD, Map.of(), true)); + } + } else { + for (int i = 0; i < childrenPerLevel; i++) { + childName = "level" + depth + "child" + i; + children.put(childName, fieldWithRecursiveChildren(depth - 1, childrenPerLevel, childName)); + } + } + + return new EsField(name, DataType.OBJECT, children, false); + } + + /** + * Test de-/serialization and size on the wire for an index that has multiple levels of children: + * A single root with 9 children, each of which has 9 children etc. 6 levels deep. + */ + public void testDeeplyNestedFields() throws IOException { + ByteSizeValue expectedSize = ByteSizeValue.ofBytes(9425494); + /* + * History: + * 9425494b - string serialization #112929 + */ + + int depth = 6; + int childrenPerLevel = 9; + + try (BytesStreamOutput out = new BytesStreamOutput(); var pso = new PlanStreamOutput(out, null)) { + deeplyNestedIndex(depth, childrenPerLevel).writeTo(pso); + assertThat(ByteSizeValue.ofBytes(out.bytes().length()), byteSizeEquals(expectedSize)); + } + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutputTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutputTests.java index 33252b9dbaaa3..d3e1710a715af 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutputTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutputTests.java @@ -20,7 +20,6 @@ import org.elasticsearch.xpack.esql.Column; import org.elasticsearch.xpack.esql.core.InvalidArgumentException; import org.elasticsearch.xpack.esql.core.expression.Attribute; -import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.NameId; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.EsField; @@ -44,7 +43,6 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.not; -import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.sameInstance; public class PlanStreamOutputTests extends ESTestCase { @@ -118,26 +116,13 @@ public void testWriteAttributeMultipleTimes() throws IOException { for (int i = 0; i < occurrences; i++) { planStream.writeNamedWriteable(attribute); } - int depth = 0; - Attribute parent = attribute; - while (parent != null) { - depth++; - parent = parent instanceof FieldAttribute f ? f.parent() : null; - } - assertThat(planStream.cachedAttributes.size(), is(depth)); + assertThat(planStream.cachedAttributes.size(), is(1)); try (PlanStreamInput in = new PlanStreamInput(out.bytes().streamInput(), REGISTRY, configuration)) { Attribute first = in.readNamedWriteable(Attribute.class); for (int i = 1; i < occurrences; i++) { Attribute next = in.readNamedWriteable(Attribute.class); assertThat(first, sameInstance(next)); } - for (int i = 0; i < depth; i++) { - assertThat(first, equalTo(attribute)); - first = first instanceof FieldAttribute f ? f.parent() : null; - attribute = attribute instanceof FieldAttribute f ? f.parent() : null; - } - assertThat(first, is(nullValue())); - assertThat(attribute, is(nullValue())); } } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/ExchangeSinkExecSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/ExchangeSinkExecSerializationTests.java index 1f52795dbacd7..5989c0de6b61d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/ExchangeSinkExecSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/ExchangeSinkExecSerializationTests.java @@ -80,20 +80,67 @@ public void testManyTypeConflicts() throws IOException { * See {@link #testManyTypeConflicts(boolean, ByteSizeValue)} for more. */ public void testManyTypeConflictsWithParent() throws IOException { - testManyTypeConflicts(true, ByteSizeValue.ofBytes(2774214)); + testManyTypeConflicts(true, ByteSizeValue.ofBytes(2774192)); /* * History: * 2 gb+ - start * 43.3mb - Cache attribute subclasses #111447 * 5.6mb - shorten error messages for UnsupportedAttributes #111973 * 3.1mb - cache EsFields #112008 - * 2.6mb - string serialization #112929 + * 2774214b - string serialization #112929 + * 2774192b - remove field attribute #112881 */ } + private void testManyTypeConflicts(boolean withParent, ByteSizeValue expected) throws IOException { + EsIndex index = EsIndexSerializationTests.indexWithManyConflicts(withParent); + testSerializePlanWithIndex(index, expected); + } + + /** + * Test the size of serializing a plan like + * FROM index | LIMIT 10 + * with a single root field that has many children, grandchildren etc. + */ + public void testDeeplyNestedFields() throws IOException { + ByteSizeValue expected = ByteSizeValue.ofBytes(47252411); + /* + * History: + * 48223371b - string serialization #112929 + * 47252411b - remove field attribute #112881 + */ + + int depth = 6; + int childrenPerLevel = 8; + + EsIndex index = EsIndexSerializationTests.deeplyNestedIndex(depth, childrenPerLevel); + testSerializePlanWithIndex(index, expected); + } + /** - * Test the size of serializing a plan with many conflicts. Callers of - * this method intentionally use a very precise size for the serialized + * Test the size of serializing a plan like + * FROM index | LIMIT 10 | KEEP one_single_field + * with a single root field that has many children, grandchildren etc. + */ + public void testDeeplyNestedFieldsKeepOnlyOne() throws IOException { + ByteSizeValue expected = ByteSizeValue.ofBytes(9425806); + /* + * History: + * 9426058b - string serialization #112929 + * 9425806b - remove field attribute #112881 + */ + + int depth = 6; + int childrenPerLevel = 9; + + EsIndex index = EsIndexSerializationTests.deeplyNestedIndex(depth, childrenPerLevel); + testSerializePlanWithIndex(index, expected, false); + } + + /** + * Test the size of serializing the physical plan that will be sent to a data node. + * The plan corresponds to `FROM index | LIMIT 10`. + * Callers of this method intentionally use a very precise size for the serialized * data so a programmer making changes has to think when this size changes. *

    * In general, shrinking the over the wire size is great and the precise @@ -108,10 +155,14 @@ public void testManyTypeConflictsWithParent() throws IOException { * ESQL impossible to use at all for big mappings with many conflicts. *

    */ - private void testManyTypeConflicts(boolean withParent, ByteSizeValue expected) throws IOException { - EsIndex index = EsIndexSerializationTests.indexWithManyConflicts(withParent); - List attributes = Analyzer.mappingAsAttributes(randomSource(), index.mapping()); - EsRelation relation = new EsRelation(randomSource(), index, attributes, IndexMode.STANDARD); + private void testSerializePlanWithIndex(EsIndex index, ByteSizeValue expected) throws IOException { + testSerializePlanWithIndex(index, expected, true); + } + + private void testSerializePlanWithIndex(EsIndex index, ByteSizeValue expected, boolean keepAllFields) throws IOException { + List allAttributes = Analyzer.mappingAsAttributes(randomSource(), index.mapping()); + List keepAttributes = keepAllFields ? allAttributes : List.of(allAttributes.get(0)); + EsRelation relation = new EsRelation(randomSource(), index, keepAttributes, IndexMode.STANDARD); Limit limit = new Limit(randomSource(), new Literal(randomSource(), 10, DataType.INTEGER), relation); Project project = new Project(randomSource(), limit, limit.output()); FragmentExec fragmentExec = new FragmentExec(project); From d3fceaddefcc32c71321768d05f268bce2374634 Mon Sep 17 00:00:00 2001 From: Iraklis Psaroudakis Date: Thu, 17 Oct 2024 20:11:15 +0300 Subject: [PATCH 194/449] Revert fast refresh using search shards (#115019) As this induces ES-8275 and makes fleet time outs for some APIs. Relates ES-9573 --- .../refresh/TransportShardRefreshAction.java | 32 +++++++++++------- ...ansportUnpromotableShardRefreshAction.java | 15 --------- .../action/get/TransportGetAction.java | 4 ++- .../get/TransportShardMultiGetAction.java | 4 ++- .../support/replication/PostWriteRefresh.java | 9 +++-- .../cluster/routing/OperationRouting.java | 9 +---- .../index/cache/bitset/BitsetFilterCache.java | 7 +++- .../routing/IndexRoutingTableTests.java | 24 +++++--------- .../cache/bitset/BitSetFilterCacheTests.java | 33 ++++++++++++++----- 9 files changed, 74 insertions(+), 63 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java index cb667400240f0..7857e9a22e9b9 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java @@ -23,6 +23,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.injection.guice.Inject; @@ -119,18 +120,27 @@ public void onPrimaryOperationComplete( ActionListener listener ) { assert replicaRequest.primaryRefreshResult.refreshed() : "primary has not refreshed"; - UnpromotableShardRefreshRequest unpromotableReplicaRequest = new UnpromotableShardRefreshRequest( - indexShardRoutingTable, - replicaRequest.primaryRefreshResult.primaryTerm(), - replicaRequest.primaryRefreshResult.generation(), - false - ); - transportService.sendRequest( - transportService.getLocalNode(), - TransportUnpromotableShardRefreshAction.NAME, - unpromotableReplicaRequest, - new ActionListenerResponseHandler<>(listener.safeMap(r -> null), in -> ActionResponse.Empty.INSTANCE, refreshExecutor) + boolean fastRefresh = IndexSettings.INDEX_FAST_REFRESH_SETTING.get( + clusterService.state().metadata().index(indexShardRoutingTable.shardId().getIndex()).getSettings() ); + + // Indices marked with fast refresh do not rely on refreshing the unpromotables + if (fastRefresh) { + listener.onResponse(null); + } else { + UnpromotableShardRefreshRequest unpromotableReplicaRequest = new UnpromotableShardRefreshRequest( + indexShardRoutingTable, + replicaRequest.primaryRefreshResult.primaryTerm(), + replicaRequest.primaryRefreshResult.generation(), + false + ); + transportService.sendRequest( + transportService.getLocalNode(), + TransportUnpromotableShardRefreshAction.NAME, + unpromotableReplicaRequest, + new ActionListenerResponseHandler<>(listener.safeMap(r -> null), in -> ActionResponse.Empty.INSTANCE, refreshExecutor) + ); + } } } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportUnpromotableShardRefreshAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportUnpromotableShardRefreshAction.java index f91a983d47885..6c24ec2d17604 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportUnpromotableShardRefreshAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportUnpromotableShardRefreshAction.java @@ -24,9 +24,6 @@ import java.util.List; -import static org.elasticsearch.TransportVersions.FAST_REFRESH_RCO; -import static org.elasticsearch.index.IndexSettings.INDEX_FAST_REFRESH_SETTING; - public class TransportUnpromotableShardRefreshAction extends TransportBroadcastUnpromotableAction< UnpromotableShardRefreshRequest, ActionResponse.Empty> { @@ -76,18 +73,6 @@ protected void unpromotableShardOperation( return; } - // During an upgrade to FAST_REFRESH_RCO, we expect search shards to be first upgraded before the primary is upgraded. Thus, - // when the primary is upgraded, and starts to deliver unpromotable refreshes, we expect the search shards to be upgraded already. - // Note that the fast refresh setting is final. - // TODO: remove assertion (ES-9563) - assert INDEX_FAST_REFRESH_SETTING.get(shard.indexSettings().getSettings()) == false - || transportService.getLocalNodeConnection().getTransportVersion().onOrAfter(FAST_REFRESH_RCO) - : "attempted to refresh a fast refresh search shard " - + shard - + " on transport version " - + transportService.getLocalNodeConnection().getTransportVersion() - + " (before FAST_REFRESH_RCO)"; - ActionListener.run(responseListener, listener -> { shard.waitForPrimaryTermAndGeneration( request.getPrimaryTerm(), diff --git a/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java b/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java index fb4b3907d2bfd..9e535344c9589 100644 --- a/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java +++ b/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java @@ -126,10 +126,12 @@ protected void asyncShardOperation(GetRequest request, ShardId shardId, ActionLi IndexService indexService = indicesService.indexServiceSafe(shardId.getIndex()); IndexShard indexShard = indexService.getShard(shardId.id()); if (indexShard.routingEntry().isPromotableToPrimary() == false) { + // TODO: Re-evaluate assertion (ES-8227) + // assert indexShard.indexSettings().isFastRefresh() == false + // : "a search shard should not receive a TransportGetAction for an index with fast refresh"; handleGetOnUnpromotableShard(request, indexShard, listener); return; } - // TODO: adapt assertion to assert only that it is not stateless (ES-9563) assert DiscoveryNode.isStateless(clusterService.getSettings()) == false || indexShard.indexSettings().isFastRefresh() : "in Stateless a promotable to primary shard can receive a TransportGetAction only if an index has the fast refresh setting"; if (request.realtime()) { // we are not tied to a refresh cycle here anyway diff --git a/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java b/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java index 633e7ef6793ab..34b3ae50e0b51 100644 --- a/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java +++ b/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java @@ -124,10 +124,12 @@ protected void asyncShardOperation(MultiGetShardRequest request, ShardId shardId IndexService indexService = indicesService.indexServiceSafe(shardId.getIndex()); IndexShard indexShard = indexService.getShard(shardId.id()); if (indexShard.routingEntry().isPromotableToPrimary() == false) { + // TODO: Re-evaluate assertion (ES-8227) + // assert indexShard.indexSettings().isFastRefresh() == false + // : "a search shard should not receive a TransportShardMultiGetAction for an index with fast refresh"; handleMultiGetOnUnpromotableShard(request, indexShard, listener); return; } - // TODO: adapt assertion to assert only that it is not stateless (ES-9563) assert DiscoveryNode.isStateless(clusterService.getSettings()) == false || indexShard.indexSettings().isFastRefresh() : "in Stateless a promotable to primary shard can receive a TransportShardMultiGetAction only if an index has " + "the fast refresh setting"; diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/PostWriteRefresh.java b/server/src/main/java/org/elasticsearch/action/support/replication/PostWriteRefresh.java index 7414aeeb2c405..683c3589c893d 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/PostWriteRefresh.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/PostWriteRefresh.java @@ -19,6 +19,7 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.translog.Translog; @@ -52,7 +53,9 @@ public void refreshShard( case WAIT_UNTIL -> waitUntil(indexShard, location, new ActionListener<>() { @Override public void onResponse(Boolean forced) { - if (location != null && indexShard.routingEntry().isSearchable() == false) { + // Fast refresh indices do not depend on the unpromotables being refreshed + boolean fastRefresh = IndexSettings.INDEX_FAST_REFRESH_SETTING.get(indexShard.indexSettings().getSettings()); + if (location != null && (indexShard.routingEntry().isSearchable() == false && fastRefresh == false)) { refreshUnpromotables(indexShard, location, listener, forced, postWriteRefreshTimeout); } else { listener.onResponse(forced); @@ -65,7 +68,9 @@ public void onFailure(Exception e) { } }); case IMMEDIATE -> immediate(indexShard, listener.delegateFailureAndWrap((l, r) -> { - if (indexShard.getReplicationGroup().getRoutingTable().unpromotableShards().size() > 0) { + // Fast refresh indices do not depend on the unpromotables being refreshed + boolean fastRefresh = IndexSettings.INDEX_FAST_REFRESH_SETTING.get(indexShard.indexSettings().getSettings()); + if (indexShard.getReplicationGroup().getRoutingTable().unpromotableShards().size() > 0 && fastRefresh == false) { sendUnpromotableRequests(indexShard, r.generation(), true, l, postWriteRefreshTimeout); } else { l.onResponse(true); diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java b/server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java index 9120e25b443d7..f7812d284f2af 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java @@ -32,7 +32,6 @@ import java.util.Set; import java.util.stream.Collectors; -import static org.elasticsearch.TransportVersions.FAST_REFRESH_RCO; import static org.elasticsearch.index.IndexSettings.INDEX_FAST_REFRESH_SETTING; public class OperationRouting { @@ -306,14 +305,8 @@ public ShardId shardId(ClusterState clusterState, String index, String id, @Null } public static boolean canSearchShard(ShardRouting shardRouting, ClusterState clusterState) { - // TODO: remove if and always return isSearchable (ES-9563) if (INDEX_FAST_REFRESH_SETTING.get(clusterState.metadata().index(shardRouting.index()).getSettings())) { - // Until all the cluster is upgraded, we send searches/gets to the primary (even if it has been upgraded) to execute locally. - if (clusterState.getMinTransportVersion().onOrAfter(FAST_REFRESH_RCO)) { - return shardRouting.isSearchable(); - } else { - return shardRouting.isPromotableToPrimary(); - } + return shardRouting.isPromotableToPrimary(); } else { return shardRouting.isSearchable(); } diff --git a/server/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java b/server/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java index 5792cafb91b77..5277999271984 100644 --- a/server/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java +++ b/server/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java @@ -58,6 +58,8 @@ import java.util.concurrent.ExecutionException; import java.util.concurrent.Executor; +import static org.elasticsearch.index.IndexSettings.INDEX_FAST_REFRESH_SETTING; + /** * This is a cache for {@link BitDocIdSet} based filters and is unbounded by size or time. *

    @@ -103,7 +105,10 @@ static boolean shouldLoadRandomAccessFiltersEagerly(IndexSettings settings) { boolean loadFiltersEagerlySetting = settings.getValue(INDEX_LOAD_RANDOM_ACCESS_FILTERS_EAGERLY_SETTING); boolean isStateless = DiscoveryNode.isStateless(settings.getNodeSettings()); if (isStateless) { - return loadFiltersEagerlySetting && DiscoveryNode.hasRole(settings.getNodeSettings(), DiscoveryNodeRole.SEARCH_ROLE); + return loadFiltersEagerlySetting + && (DiscoveryNode.hasRole(settings.getNodeSettings(), DiscoveryNodeRole.SEARCH_ROLE) + || (DiscoveryNode.hasRole(settings.getNodeSettings(), DiscoveryNodeRole.INDEX_ROLE) + && INDEX_FAST_REFRESH_SETTING.get(settings.getSettings()))); } else { return loadFiltersEagerlySetting; } diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/IndexRoutingTableTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/IndexRoutingTableTests.java index 6a7f4bb27a324..21b30557cafea 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/IndexRoutingTableTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/IndexRoutingTableTests.java @@ -9,7 +9,6 @@ package org.elasticsearch.cluster.routing; -import org.elasticsearch.TransportVersion; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.settings.Settings; @@ -20,7 +19,6 @@ import java.util.List; -import static org.elasticsearch.TransportVersions.FAST_REFRESH_RCO; import static org.elasticsearch.index.IndexSettings.INDEX_FAST_REFRESH_SETTING; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; @@ -29,22 +27,16 @@ public class IndexRoutingTableTests extends ESTestCase { public void testReadyForSearch() { - innerReadyForSearch(false, false); - innerReadyForSearch(false, true); - innerReadyForSearch(true, false); - innerReadyForSearch(true, true); + innerReadyForSearch(false); + innerReadyForSearch(true); } - // TODO: remove if (fastRefresh && beforeFastRefreshRCO) branches (ES-9563) - private void innerReadyForSearch(boolean fastRefresh, boolean beforeFastRefreshRCO) { + private void innerReadyForSearch(boolean fastRefresh) { Index index = new Index(randomIdentifier(), UUIDs.randomBase64UUID()); ClusterState clusterState = mock(ClusterState.class, Mockito.RETURNS_DEEP_STUBS); when(clusterState.metadata().index(any(Index.class)).getSettings()).thenReturn( Settings.builder().put(INDEX_FAST_REFRESH_SETTING.getKey(), fastRefresh).build() ); - when(clusterState.getMinTransportVersion()).thenReturn( - beforeFastRefreshRCO ? TransportVersion.fromId(FAST_REFRESH_RCO.id() - 1_00_0) : TransportVersion.current() - ); // 2 primaries that are search and index ShardId p1 = new ShardId(index, 0); IndexShardRoutingTable shardTable1 = new IndexShardRoutingTable( @@ -63,7 +55,7 @@ private void innerReadyForSearch(boolean fastRefresh, boolean beforeFastRefreshR shardTable1 = new IndexShardRoutingTable(p1, List.of(getShard(p1, true, ShardRoutingState.STARTED, ShardRouting.Role.INDEX_ONLY))); shardTable2 = new IndexShardRoutingTable(p2, List.of(getShard(p2, true, ShardRoutingState.STARTED, ShardRouting.Role.INDEX_ONLY))); indexRoutingTable = new IndexRoutingTable(index, new IndexShardRoutingTable[] { shardTable1, shardTable2 }); - if (fastRefresh && beforeFastRefreshRCO) { + if (fastRefresh) { assertTrue(indexRoutingTable.readyForSearch(clusterState)); } else { assertFalse(indexRoutingTable.readyForSearch(clusterState)); @@ -99,7 +91,7 @@ private void innerReadyForSearch(boolean fastRefresh, boolean beforeFastRefreshR ) ); indexRoutingTable = new IndexRoutingTable(index, new IndexShardRoutingTable[] { shardTable1, shardTable2 }); - if (fastRefresh && beforeFastRefreshRCO) { + if (fastRefresh) { assertTrue(indexRoutingTable.readyForSearch(clusterState)); } else { assertFalse(indexRoutingTable.readyForSearch(clusterState)); @@ -126,6 +118,8 @@ private void innerReadyForSearch(boolean fastRefresh, boolean beforeFastRefreshR assertTrue(indexRoutingTable.readyForSearch(clusterState)); // 2 unassigned primaries that are index only with some replicas that are all available + // Fast refresh indices do not support replicas so this can not practically happen. If we add support we will want to ensure + // that readyForSearch allows for searching replicas when the index shard is not available. shardTable1 = new IndexShardRoutingTable( p1, List.of( @@ -143,8 +137,8 @@ private void innerReadyForSearch(boolean fastRefresh, boolean beforeFastRefreshR ) ); indexRoutingTable = new IndexRoutingTable(index, new IndexShardRoutingTable[] { shardTable1, shardTable2 }); - if (fastRefresh && beforeFastRefreshRCO) { - assertFalse(indexRoutingTable.readyForSearch(clusterState)); + if (fastRefresh) { + assertFalse(indexRoutingTable.readyForSearch(clusterState)); // if we support replicas for fast refreshes this needs to change } else { assertTrue(indexRoutingTable.readyForSearch(clusterState)); } diff --git a/server/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java b/server/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java index b12cd256eebcc..d7d5c886e0741 100644 --- a/server/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java +++ b/server/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java @@ -48,6 +48,7 @@ import java.util.concurrent.atomic.AtomicLong; import static org.elasticsearch.cluster.node.DiscoveryNode.STATELESS_ENABLED_SETTING_NAME; +import static org.elasticsearch.index.IndexSettings.INDEX_FAST_REFRESH_SETTING; import static org.elasticsearch.index.cache.bitset.BitsetFilterCache.INDEX_LOAD_RANDOM_ACCESS_FILTERS_EAGERLY_SETTING; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; @@ -272,21 +273,35 @@ public void testShouldLoadRandomAccessFiltersEagerly() { for (var hasIndexRole : values) { for (var loadFiltersEagerly : values) { for (var isStateless : values) { - boolean result = BitsetFilterCache.shouldLoadRandomAccessFiltersEagerly( - bitsetFilterCacheSettings(isStateless, hasIndexRole, loadFiltersEagerly) - ); - if (isStateless) { - assertEquals(loadFiltersEagerly && hasIndexRole == false, result); - } else { - assertEquals(loadFiltersEagerly, result); + for (var fastRefresh : values) { + if (isStateless == false && fastRefresh) { + // fast refresh is only relevant for stateless indices + continue; + } + + boolean result = BitsetFilterCache.shouldLoadRandomAccessFiltersEagerly( + bitsetFilterCacheSettings(isStateless, hasIndexRole, loadFiltersEagerly, fastRefresh) + ); + if (isStateless) { + assertEquals(loadFiltersEagerly && ((hasIndexRole && fastRefresh) || hasIndexRole == false), result); + } else { + assertEquals(loadFiltersEagerly, result); + } } } } } } - private IndexSettings bitsetFilterCacheSettings(boolean isStateless, boolean hasIndexRole, boolean loadFiltersEagerly) { - var indexSettingsBuilder = Settings.builder().put(INDEX_LOAD_RANDOM_ACCESS_FILTERS_EAGERLY_SETTING.getKey(), loadFiltersEagerly); + private IndexSettings bitsetFilterCacheSettings( + boolean isStateless, + boolean hasIndexRole, + boolean loadFiltersEagerly, + boolean fastRefresh + ) { + var indexSettingsBuilder = Settings.builder() + .put(INDEX_LOAD_RANDOM_ACCESS_FILTERS_EAGERLY_SETTING.getKey(), loadFiltersEagerly) + .put(INDEX_FAST_REFRESH_SETTING.getKey(), fastRefresh); var nodeSettingsBuilder = Settings.builder() .putList( From f99321b325e742dee383817afbc4055e7b7eaf6f Mon Sep 17 00:00:00 2001 From: Mike Pellegrini Date: Thu, 17 Oct 2024 14:19:14 -0400 Subject: [PATCH 195/449] Support semantic_text in object fields (#114601) --- docs/changelog/114601.yaml | 6 + .../xpack/inference/InferenceFeatures.java | 4 + .../ShardBulkInferenceActionFilter.java | 2 +- .../mapper/SemanticTextFieldMapper.java | 131 +++++++ .../mapper/SemanticTextFieldMapperTests.java | 330 ++++++++++++++++++ .../10_semantic_text_field_mapping.yml | 28 ++ .../inference/30_semantic_text_inference.yml | 42 +++ 7 files changed, 542 insertions(+), 1 deletion(-) create mode 100644 docs/changelog/114601.yaml diff --git a/docs/changelog/114601.yaml b/docs/changelog/114601.yaml new file mode 100644 index 0000000000000..d2f563d62a639 --- /dev/null +++ b/docs/changelog/114601.yaml @@ -0,0 +1,6 @@ +pr: 114601 +summary: Support semantic_text in object fields +area: Vector Search +type: bug +issues: + - 114401 diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java index 87b7be717d31b..216b5c984eca5 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java @@ -36,4 +36,8 @@ public Set getFeatures() { return Set.copyOf(features); } + @Override + public Set getTestFeatures() { + return Set.of(SemanticTextFieldMapper.SEMANTIC_TEXT_IN_OBJECT_FIELD_FIX); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java index a4eb94c2674d1..1a6e4760fe125 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java @@ -397,7 +397,7 @@ private void applyInferenceResponses(BulkItemRequest item, FieldInferenceRespons ), indexRequest.getContentType() ); - newDocMap.put(fieldName, result); + SemanticTextFieldMapper.insertValue(fieldName, newDocMap, result); } indexRequest.source(newDocMap, indexRequest.getContentType()); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java index a5702b38ea3f2..ce0b3a099d472 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java @@ -32,6 +32,7 @@ import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.index.mapper.MapperMergeContext; +import org.elasticsearch.index.mapper.MappingLookup; import org.elasticsearch.index.mapper.NestedObjectMapper; import org.elasticsearch.index.mapper.ObjectMapper; import org.elasticsearch.index.mapper.SimpleMappedFieldType; @@ -86,6 +87,7 @@ public class SemanticTextFieldMapper extends FieldMapper implements InferenceFieldMapper { public static final NodeFeature SEMANTIC_TEXT_SEARCH_INFERENCE_ID = new NodeFeature("semantic_text.search_inference_id"); public static final NodeFeature SEMANTIC_TEXT_DEFAULT_ELSER_2 = new NodeFeature("semantic_text.default_elser_2"); + public static final NodeFeature SEMANTIC_TEXT_IN_OBJECT_FIELD_FIX = new NodeFeature("semantic_text.in_object_field_fix"); public static final String CONTENT_TYPE = "semantic_text"; public static final String DEFAULT_ELSER_2_INFERENCE_ID = DEFAULT_ELSER_ID; @@ -398,6 +400,25 @@ public Object getOriginalValue(Map sourceAsMap) { return XContentMapValues.extractValue(TEXT_FIELD, fieldValueMap); } + @Override + protected void doValidate(MappingLookup mappers) { + int parentPathIndex = fullPath().lastIndexOf(leafName()); + if (parentPathIndex > 0) { + // Check that the parent object field allows subobjects. + // Subtract one from the parent path index to omit the trailing dot delimiter. + ObjectMapper parentMapper = mappers.objectMappers().get(fullPath().substring(0, parentPathIndex - 1)); + if (parentMapper == null) { + throw new IllegalStateException(CONTENT_TYPE + " field [" + fullPath() + "] does not have a parent object mapper"); + } + + if (parentMapper.subobjects() == ObjectMapper.Subobjects.DISABLED) { + throw new IllegalArgumentException( + CONTENT_TYPE + " field [" + fullPath() + "] cannot be in an object field with subobjects disabled" + ); + } + } + } + public static class SemanticTextFieldType extends SimpleMappedFieldType { private final String inferenceId; private final String searchInferenceId; @@ -592,6 +613,116 @@ private String generateInvalidQueryInferenceResultsMessage(StringBuilder baseMes } } + /** + *

    + * Insert or replace the path's value in the map with the provided new value. The map will be modified in-place. + * If the complete path does not exist in the map, it will be added to the deepest (sub-)map possible. + *

    + *

    + * For example, given the map: + *

    + *
    +     * {
    +     *   "path1": {
    +     *     "path2": {
    +     *       "key1": "value1"
    +     *     }
    +     *   }
    +     * }
    +     * 
    + *

    + * And the caller wanted to insert {@code "path1.path2.path3.key2": "value2"}, the method would emit the modified map: + *

    + *
    +     * {
    +     *   "path1": {
    +     *     "path2": {
    +     *       "key1": "value1",
    +     *       "path3.key2": "value2"
    +     *     }
    +     *   }
    +     * }
    +     * 
    + * + * @param path the value's path in the map. + * @param map the map to search and modify in-place. + * @param newValue the new value to assign to the path. + * + * @throws IllegalArgumentException If either the path cannot be fully traversed or there is ambiguity about where to insert the new + * value. + */ + public static void insertValue(String path, Map map, Object newValue) { + String[] pathElements = path.split("\\."); + if (pathElements.length == 0) { + return; + } + + List suffixMaps = extractSuffixMaps(pathElements, 0, map); + if (suffixMaps.isEmpty()) { + // This should never happen. Throw in case it does for some reason. + throw new IllegalStateException("extractSuffixMaps returned an empty suffix map list"); + } else if (suffixMaps.size() == 1) { + SuffixMap suffixMap = suffixMaps.getFirst(); + suffixMap.map().put(suffixMap.suffix(), newValue); + } else { + throw new IllegalArgumentException( + "Path [" + path + "] could be inserted in " + suffixMaps.size() + " distinct ways, it is ambiguous which one to use" + ); + } + } + + private record SuffixMap(String suffix, Map map) {} + + private static List extractSuffixMaps(String[] pathElements, int index, Object currentValue) { + if (currentValue instanceof List valueList) { + List suffixMaps = new ArrayList<>(valueList.size()); + for (Object o : valueList) { + suffixMaps.addAll(extractSuffixMaps(pathElements, index, o)); + } + + return suffixMaps; + } else if (currentValue instanceof Map) { + @SuppressWarnings("unchecked") + Map map = (Map) currentValue; + List suffixMaps = new ArrayList<>(map.size()); + + String key = pathElements[index]; + while (index < pathElements.length) { + if (map.containsKey(key)) { + if (index + 1 == pathElements.length) { + // We found the complete path + suffixMaps.add(new SuffixMap(key, map)); + } else { + // We've matched that path partially, keep traversing to try to match it fully + suffixMaps.addAll(extractSuffixMaps(pathElements, index + 1, map.get(key))); + } + } + + if (++index < pathElements.length) { + key += "." + pathElements[index]; + } + } + + if (suffixMaps.isEmpty()) { + // We checked for all remaining elements in the path, and they do not exist. This means we found a leaf map that we should + // add the value to. + suffixMaps.add(new SuffixMap(key, map)); + } + + return suffixMaps; + } else { + throw new IllegalArgumentException( + "Path [" + + String.join(".", Arrays.copyOfRange(pathElements, 0, index)) + + "] has value [" + + currentValue + + "] of type [" + + currentValue.getClass().getSimpleName() + + "], which cannot be traversed into further" + ); + } + } + private static ObjectMapper createInferenceField( MapperBuilderContext context, IndexVersion indexVersionCreated, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java index 7c8d1bbf9fb4d..8416d58cb1328 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java @@ -57,6 +57,8 @@ import org.elasticsearch.search.NestedDocuments; import org.elasticsearch.search.SearchHit; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.inference.DefaultElserFeatureFlag; @@ -65,12 +67,16 @@ import org.junit.AssumptionViolatedException; import java.io.IOException; +import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Set; import java.util.function.BiConsumer; +import java.util.stream.Stream; import static java.util.Collections.singletonList; import static org.elasticsearch.xpack.inference.mapper.SemanticTextField.CHUNKED_EMBEDDINGS_FIELD; @@ -795,6 +801,266 @@ public void testExistsQueryDenseVector() throws IOException { assertThat(existsQuery, instanceOf(ESToParentBlockJoinQuery.class)); } + public void testInsertValueMapTraversal() throws IOException { + { + XContentBuilder builder = XContentFactory.jsonBuilder().startObject().field("test", "value").endObject(); + + Map map = toSourceMap(Strings.toString(builder)); + SemanticTextFieldMapper.insertValue("test", map, "value2"); + assertThat(getMapValue(map, "test"), equalTo("value2")); + SemanticTextFieldMapper.insertValue("something.else", map, "something_else_value"); + assertThat(getMapValue(map, "something\\.else"), equalTo("something_else_value")); + } + { + XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); + builder.startObject("path1").startObject("path2").field("test", "value").endObject().endObject(); + builder.endObject(); + + Map map = toSourceMap(Strings.toString(builder)); + SemanticTextFieldMapper.insertValue("path1.path2.test", map, "value2"); + assertThat(getMapValue(map, "path1.path2.test"), equalTo("value2")); + SemanticTextFieldMapper.insertValue("path1.path2.test_me", map, "test_me_value"); + assertThat(getMapValue(map, "path1.path2.test_me"), equalTo("test_me_value")); + SemanticTextFieldMapper.insertValue("path1.non_path2.test", map, "test_value"); + assertThat(getMapValue(map, "path1.non_path2\\.test"), equalTo("test_value")); + + SemanticTextFieldMapper.insertValue("path1.path2", map, Map.of("path3", "bar")); + assertThat(getMapValue(map, "path1.path2"), equalTo(Map.of("path3", "bar"))); + + SemanticTextFieldMapper.insertValue("path1", map, "baz"); + assertThat(getMapValue(map, "path1"), equalTo("baz")); + + SemanticTextFieldMapper.insertValue("path3.path4", map, Map.of("test", "foo")); + assertThat(getMapValue(map, "path3\\.path4"), equalTo(Map.of("test", "foo"))); + } + { + XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); + builder.startObject("path1").array("test", "value1", "value2").endObject(); + builder.endObject(); + Map map = toSourceMap(Strings.toString(builder)); + + SemanticTextFieldMapper.insertValue("path1.test", map, List.of("value3", "value4", "value5")); + assertThat(getMapValue(map, "path1.test"), equalTo(List.of("value3", "value4", "value5"))); + + SemanticTextFieldMapper.insertValue("path2.test", map, List.of("value6", "value7", "value8")); + assertThat(getMapValue(map, "path2\\.test"), equalTo(List.of("value6", "value7", "value8"))); + } + } + + public void testInsertValueListTraversal() throws IOException { + { + XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); + { + builder.startObject("path1"); + { + builder.startArray("path2"); + builder.startObject().field("test", "value1").endObject(); + builder.endArray(); + } + builder.endObject(); + } + { + builder.startObject("path3"); + { + builder.startArray("path4"); + builder.startObject().field("test", "value1").endObject(); + builder.endArray(); + } + builder.endObject(); + } + builder.endObject(); + Map map = toSourceMap(Strings.toString(builder)); + + SemanticTextFieldMapper.insertValue("path1.path2.test", map, "value2"); + assertThat(getMapValue(map, "path1.path2.test"), equalTo("value2")); + SemanticTextFieldMapper.insertValue("path1.path2.test2", map, "value3"); + assertThat(getMapValue(map, "path1.path2.test2"), equalTo("value3")); + assertThat(getMapValue(map, "path1.path2"), equalTo(List.of(Map.of("test", "value2", "test2", "value3")))); + + SemanticTextFieldMapper.insertValue("path3.path4.test", map, "value4"); + assertThat(getMapValue(map, "path3.path4.test"), equalTo("value4")); + } + { + XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); + { + builder.startObject("path1"); + { + builder.startArray("path2"); + builder.startArray(); + builder.startObject().field("test", "value1").endObject(); + builder.endArray(); + builder.endArray(); + } + builder.endObject(); + } + builder.endObject(); + Map map = toSourceMap(Strings.toString(builder)); + + SemanticTextFieldMapper.insertValue("path1.path2.test", map, "value2"); + assertThat(getMapValue(map, "path1.path2.test"), equalTo("value2")); + SemanticTextFieldMapper.insertValue("path1.path2.test2", map, "value3"); + assertThat(getMapValue(map, "path1.path2.test2"), equalTo("value3")); + assertThat(getMapValue(map, "path1.path2"), equalTo(List.of(List.of(Map.of("test", "value2", "test2", "value3"))))); + } + } + + public void testInsertValueFieldsWithDots() throws IOException { + { + XContentBuilder builder = XContentFactory.jsonBuilder().startObject().field("xxx.yyy", "value1").endObject(); + Map map = toSourceMap(Strings.toString(builder)); + + SemanticTextFieldMapper.insertValue("xxx.yyy", map, "value2"); + assertThat(getMapValue(map, "xxx\\.yyy"), equalTo("value2")); + + SemanticTextFieldMapper.insertValue("xxx", map, "value3"); + assertThat(getMapValue(map, "xxx"), equalTo("value3")); + } + { + XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); + { + builder.startObject("path1.path2"); + { + builder.startObject("path3.path4"); + builder.field("test", "value1"); + builder.endObject(); + } + builder.endObject(); + } + builder.endObject(); + Map map = toSourceMap(Strings.toString(builder)); + + SemanticTextFieldMapper.insertValue("path1.path2.path3.path4.test", map, "value2"); + assertThat(getMapValue(map, "path1\\.path2.path3\\.path4.test"), equalTo("value2")); + + SemanticTextFieldMapper.insertValue("path1.path2.path3.path4.test2", map, "value3"); + assertThat(getMapValue(map, "path1\\.path2.path3\\.path4.test2"), equalTo("value3")); + assertThat(getMapValue(map, "path1\\.path2.path3\\.path4"), equalTo(Map.of("test", "value2", "test2", "value3"))); + } + } + + public void testInsertValueAmbiguousPath() throws IOException { + // Mixed dotted object notation + { + XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); + { + builder.startObject("path1.path2"); + { + builder.startObject("path3"); + builder.field("test1", "value1"); + builder.endObject(); + } + builder.endObject(); + } + { + builder.startObject("path1"); + { + builder.startObject("path2.path3"); + builder.field("test2", "value2"); + builder.endObject(); + } + builder.endObject(); + } + builder.endObject(); + Map map = toSourceMap(Strings.toString(builder)); + final Map originalMap = Collections.unmodifiableMap(toSourceMap(Strings.toString(builder))); + + IllegalArgumentException ex = assertThrows( + IllegalArgumentException.class, + () -> SemanticTextFieldMapper.insertValue("path1.path2.path3.test1", map, "value3") + ); + assertThat( + ex.getMessage(), + equalTo("Path [path1.path2.path3.test1] could be inserted in 2 distinct ways, it is ambiguous which one to use") + ); + + ex = assertThrows( + IllegalArgumentException.class, + () -> SemanticTextFieldMapper.insertValue("path1.path2.path3.test3", map, "value4") + ); + assertThat( + ex.getMessage(), + equalTo("Path [path1.path2.path3.test3] could be inserted in 2 distinct ways, it is ambiguous which one to use") + ); + + assertThat(map, equalTo(originalMap)); + } + + // traversal through lists + { + XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); + { + builder.startObject("path1.path2"); + { + builder.startArray("path3"); + builder.startObject().field("test1", "value1").endObject(); + builder.endArray(); + } + builder.endObject(); + } + { + builder.startObject("path1"); + { + builder.startArray("path2.path3"); + builder.startObject().field("test2", "value2").endObject(); + builder.endArray(); + } + builder.endObject(); + } + builder.endObject(); + Map map = toSourceMap(Strings.toString(builder)); + final Map originalMap = Collections.unmodifiableMap(toSourceMap(Strings.toString(builder))); + + IllegalArgumentException ex = assertThrows( + IllegalArgumentException.class, + () -> SemanticTextFieldMapper.insertValue("path1.path2.path3.test1", map, "value3") + ); + assertThat( + ex.getMessage(), + equalTo("Path [path1.path2.path3.test1] could be inserted in 2 distinct ways, it is ambiguous which one to use") + ); + + ex = assertThrows( + IllegalArgumentException.class, + () -> SemanticTextFieldMapper.insertValue("path1.path2.path3.test3", map, "value4") + ); + assertThat( + ex.getMessage(), + equalTo("Path [path1.path2.path3.test3] could be inserted in 2 distinct ways, it is ambiguous which one to use") + ); + + assertThat(map, equalTo(originalMap)); + } + } + + public void testInsertValueCannotTraversePath() throws IOException { + XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); + { + builder.startObject("path1"); + { + builder.startArray("path2"); + builder.startArray(); + builder.startObject().field("test", "value1").endObject(); + builder.endArray(); + builder.endArray(); + } + builder.endObject(); + } + builder.endObject(); + Map map = toSourceMap(Strings.toString(builder)); + final Map originalMap = Collections.unmodifiableMap(toSourceMap(Strings.toString(builder))); + + IllegalArgumentException ex = assertThrows( + IllegalArgumentException.class, + () -> SemanticTextFieldMapper.insertValue("path1.path2.test.test2", map, "value2") + ); + assertThat( + ex.getMessage(), + equalTo("Path [path1.path2.test] has value [value1] of type [String], which cannot be traversed into further") + ); + + assertThat(map, equalTo(originalMap)); + } + @Override protected void assertExistsQuery(MappedFieldType fieldType, Query query, LuceneDocument fields) { // Until a doc is indexed, the query is rewritten as match no docs @@ -875,4 +1141,68 @@ private static void assertSparseFeatures(LuceneDocument doc, String fieldName, i } assertThat(count, equalTo(expectedCount)); } + + private Map toSourceMap(String source) throws IOException { + try (XContentParser parser = createParser(JsonXContent.jsonXContent, source)) { + return parser.map(); + } + } + + private static Object getMapValue(Map map, String key) { + // Split the path on unescaped "." chars and then unescape the escaped "." chars + final String[] pathElements = Arrays.stream(key.split("(? k.replace("\\.", ".")).toArray(String[]::new); + + Object value = null; + Object nextLayer = map; + for (int i = 0; i < pathElements.length; i++) { + if (nextLayer instanceof Map nextMap) { + value = nextMap.get(pathElements[i]); + } else if (nextLayer instanceof List nextList) { + final String pathElement = pathElements[i]; + List values = nextList.stream().flatMap(v -> { + Stream.Builder streamBuilder = Stream.builder(); + if (v instanceof List innerList) { + traverseList(innerList, streamBuilder); + } else { + streamBuilder.add(v); + } + return streamBuilder.build(); + }).filter(v -> v instanceof Map).map(v -> ((Map) v).get(pathElement)).filter(Objects::nonNull).toList(); + + if (values.isEmpty()) { + return null; + } else if (values.size() > 1) { + throw new AssertionError("List " + nextList + " contains multiple values for [" + pathElement + "]"); + } else { + value = values.getFirst(); + } + } else if (nextLayer == null) { + break; + } else { + throw new AssertionError( + "Path [" + + String.join(".", Arrays.copyOfRange(pathElements, 0, i)) + + "] has value [" + + value + + "] of type [" + + value.getClass().getSimpleName() + + "], which cannot be traversed into further" + ); + } + + nextLayer = value; + } + + return value; + } + + private static void traverseList(List list, Stream.Builder streamBuilder) { + for (Object value : list) { + if (value instanceof List innerList) { + traverseList(innerList, streamBuilder); + } else { + streamBuilder.add(value); + } + } + } } diff --git a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/10_semantic_text_field_mapping.yml b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/10_semantic_text_field_mapping.yml index 3f907ae1de6cd..71fb1fd95989f 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/10_semantic_text_field_mapping.yml +++ b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/10_semantic_text_field_mapping.yml @@ -343,3 +343,31 @@ setup: inference_id: sparse-inference-id another_field: type: keyword + +--- +"Cannot be in an object field with subobjects disabled": + - requires: + cluster_features: "semantic_text.in_object_field_fix" + reason: object field fix added in 8.16.0 & 8.15.4 + + - do: + catch: bad_request + indices.create: + index: test-subobjects-index + body: + mappings: + properties: + level_1: + type: object + properties: + level_2: + type: object + subobjects: false + properties: + sparse_field: + type: semantic_text + inference_id: sparse-inference-id + + - match: { error.type: illegal_argument_exception } + - match: { error.reason: "semantic_text field [level_1.level_2.sparse_field] cannot be in an object field with + subobjects disabled" } diff --git a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/30_semantic_text_inference.yml b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/30_semantic_text_inference.yml index 1795d754d2a9c..b4ba527a5bf45 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/30_semantic_text_inference.yml +++ b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/30_semantic_text_inference.yml @@ -578,3 +578,45 @@ setup: - match: { _source.sparse_field.text: "inference test" } - exists: _source.sparse_field.inference.chunks.0.embeddings - match: { _source.sparse_field.inference.chunks.0.text: "inference test" } + +--- +"Can be used inside an object field": + - requires: + cluster_features: "semantic_text.in_object_field_fix" + reason: object field fix added in 8.16.0 & 8.15.4 + + - do: + indices.create: + index: test-in-object-index + body: + mappings: + properties: + level_1: + properties: + sparse_field: + type: semantic_text + inference_id: sparse-inference-id + dense_field: + type: semantic_text + inference_id: dense-inference-id + + - do: + index: + index: test-in-object-index + id: doc_1 + body: + level_1: + sparse_field: "inference test" + dense_field: "another inference test" + + - do: + get: + index: test-in-object-index + id: doc_1 + + - match: { _source.level_1.sparse_field.text: "inference test" } + - exists: _source.level_1.sparse_field.inference.chunks.0.embeddings + - match: { _source.level_1.sparse_field.inference.chunks.0.text: "inference test" } + - match: { _source.level_1.dense_field.text: "another inference test" } + - exists: _source.level_1.dense_field.inference.chunks.0.embeddings + - match: { _source.level_1.dense_field.inference.chunks.0.text: "another inference test" } From 5bf446ea2e10aac093f0e02dd123db39466b6c56 Mon Sep 17 00:00:00 2001 From: Lee Hinman Date: Thu, 17 Oct 2024 15:53:48 -0600 Subject: [PATCH 196/449] Reconcile differences between Stateful and Stateless dot-prefix validation (#114946) This commit makes the dot prefix deprecation match the existing changes to validation for the SLO and SLA UIs. Relates to #112571 --- .../validation/DotPrefixValidator.java | 25 +++- .../validation/DotPrefixValidatorTests.java | 116 ++++++++++++++++++ 2 files changed, 137 insertions(+), 4 deletions(-) create mode 100644 modules/dot-prefix-validation/src/test/java/org/elasticsearch/validation/DotPrefixValidatorTests.java diff --git a/modules/dot-prefix-validation/src/main/java/org/elasticsearch/validation/DotPrefixValidator.java b/modules/dot-prefix-validation/src/main/java/org/elasticsearch/validation/DotPrefixValidator.java index e2c75a6401187..fc8d701b953f6 100644 --- a/modules/dot-prefix-validation/src/main/java/org/elasticsearch/validation/DotPrefixValidator.java +++ b/modules/dot-prefix-validation/src/main/java/org/elasticsearch/validation/DotPrefixValidator.java @@ -56,6 +56,7 @@ public abstract class DotPrefixValidator implements MappedActionFil * * .elastic-connectors-* is used by enterprise search * .ml-* is used by ML + * .slo-observability-* is used by Observability */ private static Set IGNORED_INDEX_NAMES = Set.of( ".elastic-connectors-v1", @@ -63,7 +64,11 @@ public abstract class DotPrefixValidator implements MappedActionFil ".ml-state", ".ml-anomalies-unrelated" ); - private static Set IGNORED_INDEX_PATTERNS = Set.of(Pattern.compile("\\.ml-state-\\d+")); + private static Set IGNORED_INDEX_PATTERNS = Set.of( + Pattern.compile("\\.ml-state-\\d+"), + Pattern.compile("\\.slo-observability\\.sli-v\\d+.*"), + Pattern.compile("\\.slo-observability\\.summary-v\\d+.*") + ); DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(DotPrefixValidator.class); @@ -99,10 +104,11 @@ void validateIndices(@Nullable Set indices) { if (Strings.hasLength(index)) { char c = getFirstChar(index); if (c == '.') { - if (IGNORED_INDEX_NAMES.contains(index)) { + final String strippedName = stripDateMath(index); + if (IGNORED_INDEX_NAMES.contains(strippedName)) { return; } - if (IGNORED_INDEX_PATTERNS.stream().anyMatch(p -> p.matcher(index).matches())) { + if (IGNORED_INDEX_PATTERNS.stream().anyMatch(p -> p.matcher(strippedName).matches())) { return; } deprecationLogger.warn( @@ -132,7 +138,18 @@ private static char getFirstChar(String index) { return c; } - private boolean isInternalRequest() { + private static String stripDateMath(String index) { + char c = index.charAt(0); + if (c == '<') { + assert index.charAt(index.length() - 1) == '>' + : "expected index name with date math to start with < and end with >, how did this pass request validation? " + index; + return index.substring(1, index.length() - 1); + } else { + return index; + } + } + + boolean isInternalRequest() { final String actionOrigin = threadContext.getTransient(ThreadContext.ACTION_ORIGIN_TRANSIENT_NAME); final boolean isSystemContext = threadContext.isSystemContext(); final boolean isInternalOrigin = Optional.ofNullable(actionOrigin).map(Strings::hasText).orElse(false); diff --git a/modules/dot-prefix-validation/src/test/java/org/elasticsearch/validation/DotPrefixValidatorTests.java b/modules/dot-prefix-validation/src/test/java/org/elasticsearch/validation/DotPrefixValidatorTests.java new file mode 100644 index 0000000000000..9adb33d51f510 --- /dev/null +++ b/modules/dot-prefix-validation/src/test/java/org/elasticsearch/validation/DotPrefixValidatorTests.java @@ -0,0 +1,116 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.validation; + +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; +import org.junit.BeforeClass; + +import java.util.HashSet; +import java.util.Set; + +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class DotPrefixValidatorTests extends ESTestCase { + private final OperatorValidator opV = new OperatorValidator<>(); + private final NonOperatorValidator nonOpV = new NonOperatorValidator<>(); + private static final Set> settings; + + private static ClusterService clusterService; + private static ClusterSettings clusterSettings; + + static { + Set> cSettings = new HashSet<>(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + cSettings.add(DotPrefixValidator.VALIDATE_DOT_PREFIXES); + settings = cSettings; + } + + @BeforeClass + public static void beforeClass() { + clusterService = mock(ClusterService.class); + clusterSettings = new ClusterSettings(Settings.EMPTY, Sets.newHashSet(DotPrefixValidator.VALIDATE_DOT_PREFIXES)); + when(clusterService.getClusterSettings()).thenReturn(clusterSettings); + when(clusterService.getSettings()).thenReturn(Settings.EMPTY); + when(clusterService.threadPool()).thenReturn(mock(ThreadPool.class)); + } + + public void testValidation() { + + nonOpV.validateIndices(Set.of("regular")); + opV.validateIndices(Set.of("regular")); + assertFails(Set.of(".regular")); + opV.validateIndices(Set.of(".regular")); + assertFails(Set.of("first", ".second")); + assertFails(Set.of("<.regular-{MM-yy-dd}>")); + + // Test ignored names + nonOpV.validateIndices(Set.of(".elastic-connectors-v1")); + nonOpV.validateIndices(Set.of(".elastic-connectors-sync-jobs-v1")); + nonOpV.validateIndices(Set.of(".ml-state")); + nonOpV.validateIndices(Set.of(".ml-anomalies-unrelated")); + + // Test ignored patterns + nonOpV.validateIndices(Set.of(".ml-state-21309")); + nonOpV.validateIndices(Set.of(">.ml-state-21309>")); + nonOpV.validateIndices(Set.of(".slo-observability.sli-v2")); + nonOpV.validateIndices(Set.of(".slo-observability.sli-v2.3")); + nonOpV.validateIndices(Set.of(".slo-observability.sli-v2.3-2024-01-01")); + nonOpV.validateIndices(Set.of("<.slo-observability.sli-v3.3.{2024-10-16||/M{yyyy-MM-dd|UTC}}>")); + nonOpV.validateIndices(Set.of(".slo-observability.summary-v2")); + nonOpV.validateIndices(Set.of(".slo-observability.summary-v2.3")); + nonOpV.validateIndices(Set.of(".slo-observability.summary-v2.3-2024-01-01")); + nonOpV.validateIndices(Set.of("<.slo-observability.summary-v3.3.{2024-10-16||/M{yyyy-MM-dd|UTC}}>")); + } + + private void assertFails(Set indices) { + nonOpV.validateIndices(indices); + assertWarnings( + "Index [" + + indices.stream().filter(i -> i.startsWith(".") || i.startsWith("<.")).toList().getFirst() + + "] name begins with a dot (.), which is deprecated, and will not be allowed in a future Elasticsearch version." + ); + } + + private class NonOperatorValidator extends DotPrefixValidator { + + private NonOperatorValidator() { + super(new ThreadContext(Settings.EMPTY), clusterService); + } + + @Override + protected Set getIndicesFromRequest(Object request) { + return Set.of(); + } + + @Override + public String actionName() { + return ""; + } + + @Override + boolean isInternalRequest() { + return false; + } + } + + private class OperatorValidator extends NonOperatorValidator { + @Override + boolean isInternalRequest() { + return true; + } + } +} From c62a96c8abea380fece088b94a6ab472b3f8efb9 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Fri, 18 Oct 2024 07:49:00 +0200 Subject: [PATCH 197/449] Include ignored source as part of loading field values in ValueSourceReaderOperator via BlockSourceReader. (#114903) Currently, in compute engine when loading source if source mode is synthetic, the synthetic source loader is already used. But the ignored_source field isn't always marked as a required source field, causing the source to potentially miss a lot of fields. This change includes _ignored_source field as a required stored field and allowing keyword fields without doc values or stored fields to be used in case of synthetic source. Relying on synthetic source to get the values (because a field doesn't have stored fields / doc values) is slow. In case of synthetic source we already keep ignored field/values in a special place, named ignored source. Long term in case of synthetic source we should only load ignored source in case a field has no doc values or stored field. Like is being explored in #114886 Thereby avoiding synthesizing the complete _source in order to get only one field. --- .../extras/MatchOnlyTextFieldMapper.java | 3 +- .../mapper/extras/ScaledFloatFieldMapper.java | 3 +- .../mapper/AbstractGeometryFieldMapper.java | 3 +- .../index/mapper/BlockSourceReader.java | 47 +-- .../index/mapper/BooleanFieldMapper.java | 2 +- .../index/mapper/DateFieldMapper.java | 3 +- .../index/mapper/KeywordFieldMapper.java | 12 +- .../index/mapper/NumberFieldMapper.java | 65 +++- .../index/mapper/TextFieldMapper.java | 9 +- .../index/mapper/BlockSourceReaderTests.java | 2 +- .../index/mapper/MapperTestCase.java | 7 +- .../ValueSourceReaderTypeConversionTests.java | 6 +- .../ValuesSourceReaderOperatorTests.java | 7 +- x-pack/plugin/logsdb/build.gradle | 2 +- ...ardVersusLogsIndexModeChallengeRestIT.java | 15 + ..._esql_synthetic_source_disabled_fields.yml | 305 ++++++++++++++++++ .../test/51_esql_synthetic_source.yml | 177 ++++++++++ .../unsignedlong/UnsignedLongFieldMapper.java | 3 +- 18 files changed, 613 insertions(+), 58 deletions(-) create mode 100644 x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/50_esql_synthetic_source_disabled_fields.yml create mode 100644 x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/51_esql_synthetic_source.yml diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapper.java index 5904169308fab..cd252fcff2376 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapper.java @@ -364,7 +364,8 @@ public BlockLoader blockLoader(BlockLoaderContext blContext) { SourceValueFetcher fetcher = SourceValueFetcher.toString(blContext.sourcePaths(name())); // MatchOnlyText never has norms, so we have to use the field names field BlockSourceReader.LeafIteratorLookup lookup = BlockSourceReader.lookupFromFieldNames(blContext.fieldNames(), name()); - return new BlockSourceReader.BytesRefsBlockLoader(fetcher, lookup); + var sourceMode = blContext.indexSettings().getIndexMappingSourceMode(); + return new BlockSourceReader.BytesRefsBlockLoader(fetcher, lookup, sourceMode); } @Override diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapper.java index b845545133e19..1f647cb977cf5 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapper.java @@ -319,7 +319,8 @@ public BlockLoader blockLoader(BlockLoaderContext blContext) { BlockSourceReader.LeafIteratorLookup lookup = isStored() || isIndexed() ? BlockSourceReader.lookupFromFieldNames(blContext.fieldNames(), name()) : BlockSourceReader.lookupMatchingAll(); - return new BlockSourceReader.DoublesBlockLoader(valueFetcher, lookup); + var sourceMode = blContext.indexSettings().getIndexMappingSourceMode(); + return new BlockSourceReader.DoublesBlockLoader(valueFetcher, lookup, sourceMode); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/AbstractGeometryFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/AbstractGeometryFieldMapper.java index c38b5beeb55a0..3512989c115ee 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/AbstractGeometryFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/AbstractGeometryFieldMapper.java @@ -189,7 +189,8 @@ public BlockLoader blockLoader(BlockLoaderContext blContext) { protected BlockLoader blockLoaderFromSource(BlockLoaderContext blContext) { ValueFetcher fetcher = valueFetcher(blContext.sourcePaths(name()), nullValue, GeometryFormatterFactory.WKB); // TODO consider optimization using BlockSourceReader.lookupFromFieldNames(blContext.fieldNames(), name()) - return new BlockSourceReader.GeometriesBlockLoader(fetcher, BlockSourceReader.lookupMatchingAll()); + var sourceMode = blContext.indexSettings().getIndexMappingSourceMode(); + return new BlockSourceReader.GeometriesBlockLoader(fetcher, BlockSourceReader.lookupMatchingAll(), sourceMode); } protected abstract Object nullValueAsSource(T nullValue); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BlockSourceReader.java b/server/src/main/java/org/elasticsearch/index/mapper/BlockSourceReader.java index 19a1cce746172..105943c732a5e 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/BlockSourceReader.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/BlockSourceReader.java @@ -22,6 +22,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; +import java.util.Set; /** * Loads values from {@code _source}. This whole process is very slow and cast-tastic, @@ -29,6 +30,14 @@ * slow. */ public abstract class BlockSourceReader implements BlockLoader.RowStrideReader { + + // _ignored_source is needed when source mode is synthetic. + static final StoredFieldsSpec NEEDS_SOURCE_AND_IGNORED_SOURCE = new StoredFieldsSpec( + true, + false, + Set.of(IgnoredSourceFieldMapper.NAME) + ); + private final ValueFetcher fetcher; private final List ignoredValues = new ArrayList<>(); private final DocIdSetIterator iter; @@ -91,10 +100,12 @@ public interface LeafIteratorLookup { private abstract static class SourceBlockLoader implements BlockLoader { protected final ValueFetcher fetcher; private final LeafIteratorLookup lookup; + private final SourceFieldMapper.Mode sourceMode; - private SourceBlockLoader(ValueFetcher fetcher, LeafIteratorLookup lookup) { + private SourceBlockLoader(ValueFetcher fetcher, LeafIteratorLookup lookup, SourceFieldMapper.Mode sourceMode) { this.fetcher = fetcher; this.lookup = lookup; + this.sourceMode = sourceMode; } @Override @@ -104,7 +115,7 @@ public final ColumnAtATimeReader columnAtATimeReader(LeafReaderContext context) @Override public final StoredFieldsSpec rowStrideStoredFieldSpec() { - return StoredFieldsSpec.NEEDS_SOURCE; + return sourceMode == SourceFieldMapper.Mode.SYNTHETIC ? NEEDS_SOURCE_AND_IGNORED_SOURCE : StoredFieldsSpec.NEEDS_SOURCE; } @Override @@ -140,8 +151,8 @@ public final String toString() { * Load {@code boolean}s from {@code _source}. */ public static class BooleansBlockLoader extends SourceBlockLoader { - public BooleansBlockLoader(ValueFetcher fetcher, LeafIteratorLookup lookup) { - super(fetcher, lookup); + public BooleansBlockLoader(ValueFetcher fetcher, LeafIteratorLookup lookup, SourceFieldMapper.Mode sourceMode) { + super(fetcher, lookup, sourceMode); } @Override @@ -180,8 +191,8 @@ public String toString() { * Load {@link BytesRef}s from {@code _source}. */ public static class BytesRefsBlockLoader extends SourceBlockLoader { - public BytesRefsBlockLoader(ValueFetcher fetcher, LeafIteratorLookup lookup) { - super(fetcher, lookup); + public BytesRefsBlockLoader(ValueFetcher fetcher, LeafIteratorLookup lookup, SourceFieldMapper.Mode sourceMode) { + super(fetcher, lookup, sourceMode); } @Override @@ -191,7 +202,7 @@ public final Builder builder(BlockFactory factory, int expectedCount) { @Override protected RowStrideReader rowStrideReader(LeafReaderContext context, DocIdSetIterator iter) throws IOException { - return new BytesRefs(fetcher, iter); + return new BytesRefs(fetcher, iter, null); } @Override @@ -201,8 +212,8 @@ protected String name() { } public static class GeometriesBlockLoader extends SourceBlockLoader { - public GeometriesBlockLoader(ValueFetcher fetcher, LeafIteratorLookup lookup) { - super(fetcher, lookup); + public GeometriesBlockLoader(ValueFetcher fetcher, LeafIteratorLookup lookup, SourceFieldMapper.Mode sourceMode) { + super(fetcher, lookup, sourceMode); } @Override @@ -212,7 +223,7 @@ public final Builder builder(BlockFactory factory, int expectedCount) { @Override protected RowStrideReader rowStrideReader(LeafReaderContext context, DocIdSetIterator iter) { - return new Geometries(fetcher, iter); + return new Geometries(fetcher, iter, null); } @Override @@ -224,7 +235,7 @@ protected String name() { private static class BytesRefs extends BlockSourceReader { private final BytesRef scratch = new BytesRef(); - BytesRefs(ValueFetcher fetcher, DocIdSetIterator iter) { + BytesRefs(ValueFetcher fetcher, DocIdSetIterator iter, SourceFieldMapper.Mode sourceMode) { super(fetcher, iter); } @@ -241,7 +252,7 @@ public String toString() { private static class Geometries extends BlockSourceReader { - Geometries(ValueFetcher fetcher, DocIdSetIterator iter) { + Geometries(ValueFetcher fetcher, DocIdSetIterator iter, SourceFieldMapper.Mode sourceMode) { super(fetcher, iter); } @@ -264,8 +275,8 @@ public String toString() { * Load {@code double}s from {@code _source}. */ public static class DoublesBlockLoader extends SourceBlockLoader { - public DoublesBlockLoader(ValueFetcher fetcher, LeafIteratorLookup lookup) { - super(fetcher, lookup); + public DoublesBlockLoader(ValueFetcher fetcher, LeafIteratorLookup lookup, SourceFieldMapper.Mode sourceMode) { + super(fetcher, lookup, sourceMode); } @Override @@ -304,8 +315,8 @@ public String toString() { * Load {@code int}s from {@code _source}. */ public static class IntsBlockLoader extends SourceBlockLoader { - public IntsBlockLoader(ValueFetcher fetcher, LeafIteratorLookup lookup) { - super(fetcher, lookup); + public IntsBlockLoader(ValueFetcher fetcher, LeafIteratorLookup lookup, SourceFieldMapper.Mode sourceMode) { + super(fetcher, lookup, sourceMode); } @Override @@ -344,8 +355,8 @@ public String toString() { * Load {@code long}s from {@code _source}. */ public static class LongsBlockLoader extends SourceBlockLoader { - public LongsBlockLoader(ValueFetcher fetcher, LeafIteratorLookup lookup) { - super(fetcher, lookup); + public LongsBlockLoader(ValueFetcher fetcher, LeafIteratorLookup lookup, SourceFieldMapper.Mode sourceMode) { + super(fetcher, lookup, sourceMode); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java index 5aaaf7dce83c9..c2bf9e18bfeec 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java @@ -314,7 +314,7 @@ public BlockLoader blockLoader(BlockLoaderContext blContext) { BlockSourceReader.LeafIteratorLookup lookup = isIndexed() || isStored() ? BlockSourceReader.lookupFromFieldNames(blContext.fieldNames(), name()) : BlockSourceReader.lookupMatchingAll(); - return new BlockSourceReader.BooleansBlockLoader(fetcher, lookup); + return new BlockSourceReader.BooleansBlockLoader(fetcher, lookup, blContext.indexSettings().getIndexMappingSourceMode()); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java index 7be5ee2200b5c..57572dea8ac0f 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java @@ -792,7 +792,8 @@ public BlockLoader blockLoader(BlockLoaderContext blContext) { BlockSourceReader.LeafIteratorLookup lookup = isStored() || isIndexed() ? BlockSourceReader.lookupFromFieldNames(blContext.fieldNames(), name()) : BlockSourceReader.lookupMatchingAll(); - return new BlockSourceReader.LongsBlockLoader(sourceValueFetcher(blContext.sourcePaths(name())), lookup); + var sourceMode = blContext.indexSettings().getIndexMappingSourceMode(); + return new BlockSourceReader.LongsBlockLoader(sourceValueFetcher(blContext.sourcePaths(name())), lookup, sourceMode); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java index 529ff19bfffd7..1ff9fd2f699c9 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java @@ -632,18 +632,12 @@ public BlockLoader blockLoader(BlockLoaderContext blContext) { if (hasDocValues()) { return new BlockDocValuesReader.BytesRefsFromOrdsBlockLoader(name()); } - if (isSyntheticSource) { - if (false == isStored()) { - throw new IllegalStateException( - "keyword field [" - + name() - + "] is only supported in synthetic _source index if it creates doc values or stored fields" - ); - } + if (isStored()) { return new BlockStoredFieldsReader.BytesFromBytesRefsBlockLoader(name()); } SourceValueFetcher fetcher = sourceValueFetcher(blContext.sourcePaths(name())); - return new BlockSourceReader.BytesRefsBlockLoader(fetcher, sourceBlockLoaderLookup(blContext)); + var sourceMode = blContext.indexSettings().getIndexMappingSourceMode(); + return new BlockSourceReader.BytesRefsBlockLoader(fetcher, sourceBlockLoaderLookup(blContext), sourceMode); } private BlockSourceReader.LeafIteratorLookup sourceBlockLoaderLookup(BlockLoaderContext blContext) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java index 8cc67cc481b9b..2e815554dc829 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java @@ -461,8 +461,12 @@ BlockLoader blockLoaderFromDocValues(String fieldName) { } @Override - BlockLoader blockLoaderFromSource(SourceValueFetcher sourceValueFetcher, BlockSourceReader.LeafIteratorLookup lookup) { - return new BlockSourceReader.DoublesBlockLoader(sourceValueFetcher, lookup); + BlockLoader blockLoaderFromSource( + SourceValueFetcher sourceValueFetcher, + BlockSourceReader.LeafIteratorLookup lookup, + SourceFieldMapper.Mode sourceMode + ) { + return new BlockSourceReader.DoublesBlockLoader(sourceValueFetcher, lookup, sourceMode); } }, FLOAT("float", NumericType.FLOAT) { @@ -645,8 +649,12 @@ BlockLoader blockLoaderFromDocValues(String fieldName) { } @Override - BlockLoader blockLoaderFromSource(SourceValueFetcher sourceValueFetcher, BlockSourceReader.LeafIteratorLookup lookup) { - return new BlockSourceReader.DoublesBlockLoader(sourceValueFetcher, lookup); + BlockLoader blockLoaderFromSource( + SourceValueFetcher sourceValueFetcher, + BlockSourceReader.LeafIteratorLookup lookup, + SourceFieldMapper.Mode sourceMode + ) { + return new BlockSourceReader.DoublesBlockLoader(sourceValueFetcher, lookup, sourceMode); } }, DOUBLE("double", NumericType.DOUBLE) { @@ -795,8 +803,12 @@ BlockLoader blockLoaderFromDocValues(String fieldName) { } @Override - BlockLoader blockLoaderFromSource(SourceValueFetcher sourceValueFetcher, BlockSourceReader.LeafIteratorLookup lookup) { - return new BlockSourceReader.DoublesBlockLoader(sourceValueFetcher, lookup); + BlockLoader blockLoaderFromSource( + SourceValueFetcher sourceValueFetcher, + BlockSourceReader.LeafIteratorLookup lookup, + SourceFieldMapper.Mode sourceMode + ) { + return new BlockSourceReader.DoublesBlockLoader(sourceValueFetcher, lookup, sourceMode); } }, BYTE("byte", NumericType.BYTE) { @@ -908,8 +920,12 @@ BlockLoader blockLoaderFromDocValues(String fieldName) { } @Override - BlockLoader blockLoaderFromSource(SourceValueFetcher sourceValueFetcher, BlockSourceReader.LeafIteratorLookup lookup) { - return new BlockSourceReader.IntsBlockLoader(sourceValueFetcher, lookup); + BlockLoader blockLoaderFromSource( + SourceValueFetcher sourceValueFetcher, + BlockSourceReader.LeafIteratorLookup lookup, + SourceFieldMapper.Mode sourceMode + ) { + return new BlockSourceReader.IntsBlockLoader(sourceValueFetcher, lookup, sourceMode); } private boolean isOutOfRange(Object value) { @@ -1021,8 +1037,12 @@ BlockLoader blockLoaderFromDocValues(String fieldName) { } @Override - BlockLoader blockLoaderFromSource(SourceValueFetcher sourceValueFetcher, BlockSourceReader.LeafIteratorLookup lookup) { - return new BlockSourceReader.IntsBlockLoader(sourceValueFetcher, lookup); + BlockLoader blockLoaderFromSource( + SourceValueFetcher sourceValueFetcher, + BlockSourceReader.LeafIteratorLookup lookup, + SourceFieldMapper.Mode sourceMode + ) { + return new BlockSourceReader.IntsBlockLoader(sourceValueFetcher, lookup, sourceMode); } private boolean isOutOfRange(Object value) { @@ -1208,8 +1228,12 @@ BlockLoader blockLoaderFromDocValues(String fieldName) { } @Override - BlockLoader blockLoaderFromSource(SourceValueFetcher sourceValueFetcher, BlockSourceReader.LeafIteratorLookup lookup) { - return new BlockSourceReader.IntsBlockLoader(sourceValueFetcher, lookup); + BlockLoader blockLoaderFromSource( + SourceValueFetcher sourceValueFetcher, + BlockSourceReader.LeafIteratorLookup lookup, + SourceFieldMapper.Mode sourceMode + ) { + return new BlockSourceReader.IntsBlockLoader(sourceValueFetcher, lookup, sourceMode); } }, LONG("long", NumericType.LONG) { @@ -1355,8 +1379,12 @@ BlockLoader blockLoaderFromDocValues(String fieldName) { } @Override - BlockLoader blockLoaderFromSource(SourceValueFetcher sourceValueFetcher, BlockSourceReader.LeafIteratorLookup lookup) { - return new BlockSourceReader.LongsBlockLoader(sourceValueFetcher, lookup); + BlockLoader blockLoaderFromSource( + SourceValueFetcher sourceValueFetcher, + BlockSourceReader.LeafIteratorLookup lookup, + SourceFieldMapper.Mode sourceMode + ) { + return new BlockSourceReader.LongsBlockLoader(sourceValueFetcher, lookup, sourceMode); } private boolean isOutOfRange(Object value) { @@ -1634,7 +1662,11 @@ protected void writeValue(XContentBuilder b, long value) throws IOException { abstract BlockLoader blockLoaderFromDocValues(String fieldName); - abstract BlockLoader blockLoaderFromSource(SourceValueFetcher sourceValueFetcher, BlockSourceReader.LeafIteratorLookup lookup); + abstract BlockLoader blockLoaderFromSource( + SourceValueFetcher sourceValueFetcher, + BlockSourceReader.LeafIteratorLookup lookup, + SourceFieldMapper.Mode sourceMode + ); } public static class NumberFieldType extends SimpleMappedFieldType { @@ -1773,7 +1805,8 @@ public BlockLoader blockLoader(BlockLoaderContext blContext) { BlockSourceReader.LeafIteratorLookup lookup = isStored() || isIndexed() ? BlockSourceReader.lookupFromFieldNames(blContext.fieldNames(), name()) : BlockSourceReader.lookupMatchingAll(); - return type.blockLoaderFromSource(sourceValueFetcher(blContext.sourcePaths(name())), lookup); + var sourceMode = blContext.indexSettings().getIndexMappingSourceMode(); + return type.blockLoaderFromSource(sourceValueFetcher(blContext.sourcePaths(name())), lookup, sourceMode); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java index 2c55fc35db57d..0a3911a73a2fc 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java @@ -1012,17 +1012,20 @@ protected String delegatingTo() { if (isStored()) { return new BlockStoredFieldsReader.BytesFromStringsBlockLoader(name()); } - if (isSyntheticSource) { + if (isSyntheticSource && syntheticSourceDelegate == null) { /* * When we're in synthetic source mode we don't currently * support text fields that are not stored and are not children * of perfect keyword fields. We'd have to load from the parent - * field and then convert the result to a string. + * field and then convert the result to a string. In this case, + * even if we would synthesize the source, the current field + * would be missing. */ return null; } SourceValueFetcher fetcher = SourceValueFetcher.toString(blContext.sourcePaths(name())); - return new BlockSourceReader.BytesRefsBlockLoader(fetcher, blockReaderDisiLookup(blContext)); + var sourceMode = blContext.indexSettings().getIndexMappingSourceMode(); + return new BlockSourceReader.BytesRefsBlockLoader(fetcher, blockReaderDisiLookup(blContext), sourceMode); } /** diff --git a/server/src/test/java/org/elasticsearch/index/mapper/BlockSourceReaderTests.java b/server/src/test/java/org/elasticsearch/index/mapper/BlockSourceReaderTests.java index 357ada3ad656d..286be8d12570d 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/BlockSourceReaderTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/BlockSourceReaderTests.java @@ -51,7 +51,7 @@ public void testEmptyArray() throws IOException { private void loadBlock(LeafReaderContext ctx, Consumer test) throws IOException { ValueFetcher valueFetcher = SourceValueFetcher.toString(Set.of("field")); BlockSourceReader.LeafIteratorLookup lookup = BlockSourceReader.lookupFromNorms("field"); - BlockLoader loader = new BlockSourceReader.BytesRefsBlockLoader(valueFetcher, lookup); + BlockLoader loader = new BlockSourceReader.BytesRefsBlockLoader(valueFetcher, lookup, null); assertThat(loader.columnAtATimeReader(ctx), nullValue()); BlockLoader.RowStrideReader reader = loader.rowStrideReader(ctx); assertThat(loader.rowStrideStoredFieldSpec(), equalTo(StoredFieldsSpec.NEEDS_SOURCE)); diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java index d8298d49d3ebc..7669ada750c14 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java @@ -1321,12 +1321,15 @@ private BlockLoader getBlockLoader(boolean columnReader) { return mapper.fieldType(loaderFieldName).blockLoader(new MappedFieldType.BlockLoaderContext() { @Override public String indexName() { - throw new UnsupportedOperationException(); + return "test_index"; } @Override public IndexSettings indexSettings() { - throw new UnsupportedOperationException(); + var imd = IndexMetadata.builder(indexName()) + .settings(MapperTestCase.indexSettings(IndexVersion.current(), 1, 1).put(Settings.EMPTY)) + .build(); + return new IndexSettings(imd, Settings.EMPTY); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValueSourceReaderTypeConversionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValueSourceReaderTypeConversionTests.java index ccc3dea78adc8..f6d81af7c14e5 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValueSourceReaderTypeConversionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValueSourceReaderTypeConversionTests.java @@ -26,6 +26,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.Randomness; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.NoopCircuitBreaker; @@ -546,7 +547,10 @@ public String indexName() { @Override public IndexSettings indexSettings() { - throw new UnsupportedOperationException(); + var imd = IndexMetadata.builder("test_index") + .settings(ValueSourceReaderTypeConversionTests.indexSettings(IndexVersion.current(), 1, 1).put(Settings.EMPTY)) + .build(); + return new IndexSettings(imd, Settings.EMPTY); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java index 848415c4490fa..c8dd6f87be5fc 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java @@ -24,9 +24,11 @@ import org.apache.lucene.tests.mockfile.HandleLimitFS; import org.apache.lucene.tests.util.LuceneTestCase; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.Randomness; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.lucene.Lucene; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; @@ -500,7 +502,10 @@ public String indexName() { @Override public IndexSettings indexSettings() { - throw new UnsupportedOperationException(); + var imd = IndexMetadata.builder("test_index") + .settings(ValueSourceReaderTypeConversionTests.indexSettings(IndexVersion.current(), 1, 1).put(Settings.EMPTY)) + .build(); + return new IndexSettings(imd, Settings.EMPTY); } @Override diff --git a/x-pack/plugin/logsdb/build.gradle b/x-pack/plugin/logsdb/build.gradle index 929d7dad2f5e6..60578f832d153 100644 --- a/x-pack/plugin/logsdb/build.gradle +++ b/x-pack/plugin/logsdb/build.gradle @@ -25,7 +25,7 @@ base { restResources { restApi { - include 'bulk', 'search', '_common', 'indices', 'index', 'cluster', 'data_stream', 'ingest', 'cat', 'capabilities' + include 'bulk', 'search', '_common', 'indices', 'index', 'cluster', 'data_stream', 'ingest', 'cat', 'capabilities', 'esql.query' } } diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeChallengeRestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeChallengeRestIT.java index dd7806fc9c8fa..8d7a813b206d8 100644 --- a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeChallengeRestIT.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeChallengeRestIT.java @@ -301,6 +301,21 @@ public void testEsqlTermsAggregation() throws IOException { assertTrue(matchResult.getMessage(), matchResult.isMatch()); } + public void testEsqlTermsAggregationByMethod() throws IOException { + int numberOfDocuments = ESTestCase.randomIntBetween(100, 200); + final List documents = generateDocuments(numberOfDocuments); + + indexDocuments(documents); + + final String query = "FROM $index | STATS count(*) BY method | SORT method | LIMIT " + numberOfDocuments; + final MatchResult matchResult = Matcher.mappings(getContenderMappings(), getBaselineMappings()) + .settings(getContenderSettings(), getBaselineSettings()) + .expected(getEsqlStatsResults(esqlBaseline(query))) + .ignoringSort(true) + .isEqualTo(getEsqlStatsResults(esqlContender(query))); + assertTrue(matchResult.getMessage(), matchResult.isMatch()); + } + public void testFieldCaps() throws IOException { int numberOfDocuments = ESTestCase.randomIntBetween(20, 50); final List documents = generateDocuments(numberOfDocuments); diff --git a/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/50_esql_synthetic_source_disabled_fields.yml b/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/50_esql_synthetic_source_disabled_fields.yml new file mode 100644 index 0000000000000..68597afda6c78 --- /dev/null +++ b/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/50_esql_synthetic_source_disabled_fields.yml @@ -0,0 +1,305 @@ +--- +setup: + - requires: + test_runner_features: allowed_warnings_regex + + - do: + indices.create: + index: my-index + body: + settings: + index: + mode: logsdb + mappings: + properties: + "@timestamp": + type: date + host.name: + type: keyword + agent_id: + type: keyword + doc_values: false + store: false + process_id: + type: integer + doc_values: false + store: false + http_method: + type: keyword + doc_values: false + store: false + is_https: + type: boolean + doc_values: false + store: false + location: + type: geo_point + doc_values: false + store: false + message: + type: text + store: false + fields: + raw: + type: keyword + + - do: + bulk: + index: my-index + refresh: true + body: + - { "index": { } } + - { "@timestamp": "2024-02-12T10:30:00Z", "host.name": "foo", "agent_id": "darth-vader", "process_id": 101, "http_method": "GET", "is_https": false, "location": {"lat" : 40.7128, "lon" : -74.0060}, "message": "No, I am your father." } + - { "index": { } } + - { "@timestamp": "2024-02-12T10:31:00Z", "host.name": "bar", "agent_id": "yoda", "process_id": 102, "http_method": "PUT", "is_https": false, "location": {"lat" : 40.7128, "lon" : -74.0060}, "message": "Do. Or do not. There is no try." } + - { "index": { } } + - { "@timestamp": "2024-02-12T10:32:00Z", "host.name": "foo", "agent_id": "obi-wan", "process_id": 103, "http_method": "GET", "is_https": false, "location": {"lat" : 40.7128, "lon" : -74.0060}, "message": "May the force be with you." } + - { "index": { } } + - { "@timestamp": "2024-02-12T10:33:00Z", "host.name": "baz", "agent_id": "darth-vader", "process_id": 102, "http_method": "POST", "is_https": true, "location": {"lat" : 40.7128, "lon" : -74.0060}, "message": "I find your lack of faith disturbing." } + - { "index": { } } + - { "@timestamp": "2024-02-12T10:34:00Z", "host.name": "baz", "agent_id": "yoda", "process_id": 104, "http_method": "POST", "is_https": false, "location": {"lat" : 40.7128, "lon" : -74.0060}, "message": "Wars not make one great." } + - { "index": { } } + - { "@timestamp": "2024-02-12T10:35:00Z", "host.name": "foo", "agent_id": "obi-wan", "process_id": 105, "http_method": "GET", "is_https": false, "location": {"lat" : 40.7128, "lon" : -74.0060}, "message": "That's no moon. It's a space station." } + +--- +teardown: + - do: + indices.delete: + index: my-index + +--- +"Simple from": + - do: + esql.query: + body: + query: 'FROM my-index | SORT host.name, @timestamp | LIMIT 1' + + - match: {columns.0.name: "@timestamp"} + - match: {columns.0.type: "date"} + - match: {columns.1.name: "agent_id"} + - match: {columns.1.type: "keyword"} + - match: {columns.2.name: "host.name"} + - match: {columns.2.type: "keyword"} + - match: {columns.3.name: "http_method" } + - match: {columns.3.type: "keyword" } + - match: {columns.4.name: "is_https"} + - match: {columns.4.type: "boolean"} + - match: {columns.5.name: "location"} + - match: {columns.5.type: "geo_point"} + - match: {columns.6.name: "message"} + - match: {columns.6.type: "text"} + - match: {columns.7.name: "message.raw"} + - match: {columns.7.type: "keyword"} + - match: {columns.8.name: "process_id"} + - match: {columns.8.type: "integer"} + + - match: {values.0.0: "2024-02-12T10:31:00.000Z"} + - match: {values.0.1: "yoda"} + - match: {values.0.2: "bar"} + - match: {values.0.3: "PUT"} + - match: {values.0.4: false} + - match: {values.0.5: "POINT (-74.006 40.7128)"} + - match: {values.0.6: "Do. Or do not. There is no try."} + - match: {values.0.7: "Do. Or do not. There is no try."} + - match: {values.0.8: 102} + +--- +"Simple from geo point": + - do: + esql.query: + body: + query: 'FROM my-index | SORT host.name, @timestamp | KEEP location | LIMIT 10' + + - match: {columns.0.name: "location"} + - match: {columns.0.type: "geo_point"} + + - match: {values.0.0: "POINT (-74.006 40.7128)"} + - match: {values.1.0: "POINT (-74.006 40.7128)"} + - match: {values.2.0: "POINT (-74.006 40.7128)"} + - match: {values.3.0: "POINT (-74.006 40.7128)"} + - match: {values.4.0: "POINT (-74.006 40.7128)"} + - match: {values.5.0: "POINT (-74.006 40.7128)"} + +--- +"Simple from number fields": + - do: + esql.query: + body: + query: 'FROM my-index | SORT host.name, @timestamp | KEEP process_id | LIMIT 10' + + - match: {columns.0.name: "process_id"} + - match: {columns.0.type: "integer"} + + - match: {values.0.0: 102} + - match: {values.1.0: 102} + - match: {values.2.0: 104} + - match: {values.3.0: 101} + - match: {values.4.0: 103} + - match: {values.5.0: 105} + +--- +"Simple from keyword fields": + - do: + esql.query: + body: + query: 'FROM my-index | SORT host.name, @timestamp | KEEP agent_id, http_method | LIMIT 10' + + - match: {columns.0.name: "agent_id"} + - match: {columns.0.type: "keyword"} + - match: {columns.1.name: "http_method"} + - match: {columns.1.type: "keyword"} + + - match: {values.0.0: "yoda"} + - match: {values.0.1: "PUT"} + - match: {values.1.0: "darth-vader"} + - match: {values.1.1: "POST"} + - match: {values.2.0: "yoda"} + - match: {values.2.1: "POST"} + - match: {values.3.0: "darth-vader"} + - match: {values.3.1: "GET"} + - match: {values.4.0: "obi-wan"} + - match: {values.4.1: "GET"} + - match: {values.5.0: "obi-wan"} + - match: {values.5.1: "GET"} + +--- +"Simple from boolean fields": + - do: + esql.query: + body: + query: 'FROM my-index | SORT host.name, @timestamp | KEEP is_https | LIMIT 10' + + - match: {columns.0.name: "is_https"} + - match: {columns.0.type: "boolean"} + + - match: {values.0.0: false} + - match: {values.1.0: true} + - match: {values.2.0: false} + - match: {values.3.0: false} + - match: {values.4.0: false} + - match: {values.5.0: false} + +--- +"Simple from text fields": + - do: + esql.query: + body: + query: 'FROM my-index | SORT host.name, @timestamp | KEEP message | LIMIT 10' + + - match: {columns.0.name: "message"} + - match: {columns.0.type: "text"} + + - match: {values.0.0: "Do. Or do not. There is no try."} + - match: {values.1.0: "I find your lack of faith disturbing."} + - match: {values.2.0: "Wars not make one great."} + - match: {values.3.0: "No, I am your father."} + - match: {values.4.0: "May the force be with you."} + - match: {values.5.0: "That's no moon. It's a space station."} + +--- +"message field without keyword multi-field": + - do: + indices.create: + index: my-index2 + body: + settings: + index: + mode: logsdb + mappings: + properties: + "@timestamp": + type: date + host.name: + type: keyword + agent_id: + type: keyword + doc_values: false + store: false + process_id: + type: integer + doc_values: false + store: false + http_method: + type: keyword + doc_values: false + store: false + is_https: + type: boolean + doc_values: false + store: false + location: + type: geo_point + doc_values: false + store: false + message: + type: text + store: false + + - do: + bulk: + index: my-index2 + refresh: true + body: + - { "index": { } } + - { "@timestamp": "2024-02-12T10:30:00Z", "host.name": "foo", "agent_id": "darth-vader", "process_id": 101, "http_method": "GET", "is_https": false, "location": { "lat": 40.7128, "lon": -74.0060 }, "message": "No, I am your father." } + - { "index": { } } + - { "@timestamp": "2024-02-12T10:31:00Z", "host.name": "bar", "agent_id": "yoda", "process_id": 102, "http_method": "PUT", "is_https": false, "location": { "lat": 40.7128, "lon": -74.0060 }, "message": "Do. Or do not. There is no try." } + - { "index": { } } + - { "@timestamp": "2024-02-12T10:32:00Z", "host.name": "foo", "agent_id": "obi-wan", "process_id": 103, "http_method": "GET", "is_https": false, "location": { "lat": 40.7128, "lon": -74.0060 }, "message": "May the force be with you." } + - { "index": { } } + - { "@timestamp": "2024-02-12T10:33:00Z", "host.name": "baz", "agent_id": "darth-vader", "process_id": 102, "http_method": "POST", "is_https": true, "location": { "lat": 40.7128, "lon": -74.0060 }, "message": "I find your lack of faith disturbing." } + - { "index": { } } + - { "@timestamp": "2024-02-12T10:34:00Z", "host.name": "baz", "agent_id": "yoda", "process_id": 104, "http_method": "POST", "is_https": false, "location": { "lat": 40.7128, "lon": -74.0060 }, "message": "Wars not make one great." } + - { "index": { } } + - { "@timestamp": "2024-02-12T10:35:00Z", "host.name": "foo", "agent_id": "obi-wan", "process_id": 105, "http_method": "GET", "is_https": false, "location": { "lat": 40.7128, "lon": -74.0060 }, "message": "That's no moon. It's a space station." } + + - do: + allowed_warnings_regex: + - "Field \\[.*\\] cannot be retrieved, it is unsupported or not indexed; returning null" + esql.query: + body: + query: 'FROM my-index2 | SORT host.name, @timestamp | LIMIT 1' + + - match: {columns.0.name: "@timestamp"} + - match: {columns.0.type: "date"} + - match: {columns.1.name: "agent_id"} + - match: {columns.1.type: "keyword"} + - match: {columns.2.name: "host.name"} + - match: {columns.2.type: "keyword"} + - match: {columns.3.name: "http_method" } + - match: {columns.3.type: "keyword" } + - match: {columns.4.name: "is_https"} + - match: {columns.4.type: "boolean"} + - match: {columns.5.name: "location"} + - match: {columns.5.type: "geo_point"} + - match: {columns.6.name: "message"} + - match: {columns.6.type: "text"} + - match: {columns.7.name: "process_id"} + - match: {columns.7.type: "integer"} + + - match: {values.0.0: "2024-02-12T10:31:00.000Z"} + - match: {values.0.1: "yoda"} + - match: {values.0.2: "bar"} + - match: {values.0.3: "PUT"} + - match: {values.0.4: false} + - match: {values.0.5: "POINT (-74.006 40.7128)"} + - match: {values.0.6: null} # null is expected, because text fields aren't stored in ignored source + - match: {values.0.7: 102} + + - do: + allowed_warnings_regex: + - "Field \\[.*\\] cannot be retrieved, it is unsupported or not indexed; returning null" + esql.query: + body: + query: 'FROM my-index2 | SORT host.name, @timestamp | KEEP message | LIMIT 10' + + - match: {columns.0.name: "message"} + - match: {columns.0.type: "text"} + + # null is expected, because text fields aren't stored in ignored source + - match: {values.0.0: null} + - match: {values.1.0: null} + - match: {values.2.0: null} + - match: {values.3.0: null} + - match: {values.4.0: null} + - match: {values.5.0: null} diff --git a/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/51_esql_synthetic_source.yml b/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/51_esql_synthetic_source.yml new file mode 100644 index 0000000000000..7e305bda4ef4e --- /dev/null +++ b/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/51_esql_synthetic_source.yml @@ -0,0 +1,177 @@ +--- +setup: + - do: + indices.create: + index: my-index + body: + settings: + index: + mode: logsdb + mappings: + properties: + "@timestamp": + type: date + host.name: + type: keyword + agent_id: + type: keyword + process_id: + type: integer + http_method: + type: keyword + is_https: + type: boolean + location: + type: geo_point + message: + type: text + + - do: + bulk: + index: my-index + refresh: true + body: + - { "index": { } } + - { "@timestamp": "2024-02-12T10:30:00Z", "host.name": "foo", "agent_id": "darth-vader", "process_id": 101, "http_method": "GET", "is_https": false, "location": {"lat" : 40.7128, "lon" : -74.0060}, "message": "No, I am your father." } + - { "index": { } } + - { "@timestamp": "2024-02-12T10:31:00Z", "host.name": "bar", "agent_id": "yoda", "process_id": 102, "http_method": "PUT", "is_https": false, "location": {"lat" : 40.7128, "lon" : -74.0060}, "message": "Do. Or do not. There is no try." } + - { "index": { } } + - { "@timestamp": "2024-02-12T10:32:00Z", "host.name": "foo", "agent_id": "obi-wan", "process_id": 103, "http_method": "GET", "is_https": false, "location": {"lat" : 40.7128, "lon" : -74.0060}, "message": "May the force be with you." } + - { "index": { } } + - { "@timestamp": "2024-02-12T10:33:00Z", "host.name": "baz", "agent_id": "darth-vader", "process_id": 102, "http_method": "POST", "is_https": true, "location": {"lat" : 40.7128, "lon" : -74.0060}, "message": "I find your lack of faith disturbing." } + - { "index": { } } + - { "@timestamp": "2024-02-12T10:34:00Z", "host.name": "baz", "agent_id": "yoda", "process_id": 104, "http_method": "POST", "is_https": false, "location": {"lat" : 40.7128, "lon" : -74.0060}, "message": "Wars not make one great." } + - { "index": { } } + - { "@timestamp": "2024-02-12T10:35:00Z", "host.name": "foo", "agent_id": "obi-wan", "process_id": 105, "http_method": "GET", "is_https": false, "location": {"lat" : 40.7128, "lon" : -74.0060}, "message": "That's no moon. It's a space station." } + +--- +teardown: + - do: + indices.delete: + index: my-index + +--- +"Simple from": + - do: + esql.query: + body: + query: 'FROM my-index | SORT host.name, @timestamp | LIMIT 1' + + - match: {columns.0.name: "@timestamp"} + - match: {columns.0.type: "date"} + - match: {columns.1.name: "agent_id"} + - match: {columns.1.type: "keyword"} + - match: {columns.2.name: "host.name"} + - match: {columns.2.type: "keyword"} + - match: {columns.3.name: "http_method" } + - match: {columns.3.type: "keyword" } + - match: {columns.4.name: "is_https"} + - match: {columns.4.type: "boolean"} + - match: {columns.5.name: "location"} + - match: {columns.5.type: "geo_point"} + - match: {columns.6.name: "message"} + - match: {columns.6.type: "text"} + - match: {columns.7.name: "process_id"} + - match: {columns.7.type: "integer"} + + - match: {values.0.0: "2024-02-12T10:31:00.000Z"} + - match: {values.0.1: "yoda"} + - match: {values.0.2: "bar"} + - match: {values.0.3: "PUT"} + - match: {values.0.4: false} + - match: {values.0.5: "POINT (-74.00600004941225 40.712799984030426)"} + - match: {values.0.6: "Do. Or do not. There is no try."} + - match: {values.0.7: 102} + +--- +"Simple from geo point": + - do: + esql.query: + body: + query: 'FROM my-index | SORT host.name, @timestamp | KEEP location | LIMIT 10' + + - match: {columns.0.name: "location"} + - match: {columns.0.type: "geo_point"} + + - match: {values.0.0: "POINT (-74.00600004941225 40.712799984030426)"} + - match: {values.1.0: "POINT (-74.00600004941225 40.712799984030426)"} + - match: {values.2.0: "POINT (-74.00600004941225 40.712799984030426)"} + - match: {values.3.0: "POINT (-74.00600004941225 40.712799984030426)"} + - match: {values.4.0: "POINT (-74.00600004941225 40.712799984030426)"} + - match: {values.5.0: "POINT (-74.00600004941225 40.712799984030426)"} + +--- +"Simple from number fields": + - do: + esql.query: + body: + query: 'FROM my-index | SORT host.name, @timestamp | KEEP process_id | LIMIT 10' + + - match: {columns.0.name: "process_id"} + - match: {columns.0.type: "integer"} + + - match: {values.0.0: 102} + - match: {values.1.0: 102} + - match: {values.2.0: 104} + - match: {values.3.0: 101} + - match: {values.4.0: 103} + - match: {values.5.0: 105} + +--- +"Simple from keyword fields": + - do: + esql.query: + body: + query: 'FROM my-index | SORT host.name, @timestamp | KEEP agent_id, http_method | LIMIT 10' + + - match: {columns.0.name: "agent_id"} + - match: {columns.0.type: "keyword"} + - match: {columns.1.name: "http_method"} + - match: {columns.1.type: "keyword"} + + - match: {values.0.0: "yoda"} + - match: {values.0.1: "PUT"} + - match: {values.1.0: "darth-vader"} + - match: {values.1.1: "POST"} + - match: {values.2.0: "yoda"} + - match: {values.2.1: "POST"} + - match: {values.3.0: "darth-vader"} + - match: {values.3.1: "GET"} + - match: {values.4.0: "obi-wan"} + - match: {values.4.1: "GET"} + - match: {values.5.0: "obi-wan"} + - match: {values.5.1: "GET"} + +--- +"Simple from boolean fields": + - do: + esql.query: + body: + query: 'FROM my-index | SORT host.name, @timestamp | KEEP is_https | LIMIT 10' + + - match: {columns.0.name: "is_https"} + - match: {columns.0.type: "boolean"} + + - match: {values.0.0: false} + - match: {values.1.0: true} + - match: {values.2.0: false} + - match: {values.3.0: false} + - match: {values.4.0: false} + - match: {values.5.0: false} + +--- +"Simple from text fields": + - do: + esql.query: + body: + query: 'FROM my-index | SORT host.name, @timestamp | KEEP message | LIMIT 10' + + - match: {columns.0.name: "message"} + - match: {columns.0.type: "text"} + + - match: {values.0.0: "Do. Or do not. There is no try."} + - match: {values.1.0: "I find your lack of faith disturbing."} + - match: {values.2.0: "Wars not make one great."} + - match: {values.3.0: "No, I am your father."} + - match: {values.4.0: "May the force be with you."} + - match: {values.5.0: "That's no moon. It's a space station."} diff --git a/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java b/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java index 5b04225cee105..303b94ec655dc 100644 --- a/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java +++ b/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java @@ -339,7 +339,8 @@ protected Object parseSourceValue(Object value) { BlockSourceReader.LeafIteratorLookup lookup = isStored() || isIndexed() ? BlockSourceReader.lookupFromFieldNames(blContext.fieldNames(), name()) : BlockSourceReader.lookupMatchingAll(); - return new BlockSourceReader.LongsBlockLoader(valueFetcher, lookup); + var sourceMode = blContext.indexSettings().getIndexMappingSourceMode(); + return new BlockSourceReader.LongsBlockLoader(valueFetcher, lookup, sourceMode); } @Override From 0a8a32343d9691df25a21e4c1d7766c62c61cdbb Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Fri, 18 Oct 2024 17:36:39 +1100 Subject: [PATCH 198/449] Mute org.elasticsearch.index.mapper.TextFieldMapperTests testBlockLoaderFromRowStrideReaderWithSyntheticSource #115066 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 78f8b76aaff64..1d9d8edb04c14 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -352,6 +352,9 @@ tests: - class: org.elasticsearch.upgrades.MultiVersionRepositoryAccessIT method: testCreateAndRestoreSnapshot issue: https://github.com/elastic/elasticsearch/issues/114998 +- class: org.elasticsearch.index.mapper.TextFieldMapperTests + method: testBlockLoaderFromRowStrideReaderWithSyntheticSource + issue: https://github.com/elastic/elasticsearch/issues/115066 # Examples: # From 5e381a3a89fb734ade6a1a299881ef1be4df7019 Mon Sep 17 00:00:00 2001 From: Matteo Piergiovanni <134913285+piergm@users.noreply.github.com> Date: Fri, 18 Oct 2024 08:44:39 +0200 Subject: [PATCH 199/449] Bool query early termination should also consider must_not clauses (#115031) * Bool query early termination should also consider must_not clauses * Update docs/changelog/115031.yaml --- docs/changelog/115031.yaml | 5 +++++ .../org/elasticsearch/index/query/BoolQueryBuilder.java | 1 + .../elasticsearch/index/query/BoolQueryBuilderTests.java | 6 ++++++ 3 files changed, 12 insertions(+) create mode 100644 docs/changelog/115031.yaml diff --git a/docs/changelog/115031.yaml b/docs/changelog/115031.yaml new file mode 100644 index 0000000000000..d8d6e1a3f8166 --- /dev/null +++ b/docs/changelog/115031.yaml @@ -0,0 +1,5 @@ +pr: 115031 +summary: Bool query early termination should also consider `must_not` clauses +area: Search +type: enhancement +issues: [] diff --git a/server/src/main/java/org/elasticsearch/index/query/BoolQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/BoolQueryBuilder.java index 2401719caaa87..5329dbf01975a 100644 --- a/server/src/main/java/org/elasticsearch/index/query/BoolQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/BoolQueryBuilder.java @@ -356,6 +356,7 @@ protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws if (mustClauses.size() == 0 && filterClauses.size() == 0 && shouldClauses.size() > 0 + && mustNotClauses.size() == 0 && newBuilder.shouldClauses.stream().allMatch(b -> b instanceof MatchNoneQueryBuilder)) { return new MatchNoneQueryBuilder("The \"" + getName() + "\" query was rewritten to a \"match_none\" query."); } diff --git a/server/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java index 25d4c1008ba7d..0fa8f70525e8a 100644 --- a/server/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java @@ -449,6 +449,12 @@ public void testRewriteWithMatchNone() throws IOException { rewritten = Rewriteable.rewrite(boolQueryBuilder, createSearchExecutionContext()); assertNotEquals(new MatchNoneQueryBuilder(), rewritten); + boolQueryBuilder = new BoolQueryBuilder(); + boolQueryBuilder.should(new WrapperQueryBuilder(new MatchNoneQueryBuilder().toString())); + boolQueryBuilder.mustNot(new TermQueryBuilder(TEXT_FIELD_NAME, "bar")); + rewritten = Rewriteable.rewrite(boolQueryBuilder, createSearchExecutionContext()); + assertNotEquals(new MatchNoneQueryBuilder(), rewritten); + boolQueryBuilder = new BoolQueryBuilder(); boolQueryBuilder.filter(new TermQueryBuilder(TEXT_FIELD_NAME, "bar")); boolQueryBuilder.mustNot(new WrapperQueryBuilder(new WrapperQueryBuilder(new MatchAllQueryBuilder().toString()).toString())); From f718c564fbbdf4b297c1083b5eb6a6d46b42a50d Mon Sep 17 00:00:00 2001 From: David Turner Date: Fri, 18 Oct 2024 07:51:40 +0100 Subject: [PATCH 200/449] Reword docs on snapshot repo backup (#115062) Because of #93575 it's not sufficient to mark repositories with `readonly: true` while taking a backup. The only safe way to avoid writes is to completely unregister them. --- .../snapshot-restore/register-repository.asciidoc | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/docs/reference/snapshot-restore/register-repository.asciidoc b/docs/reference/snapshot-restore/register-repository.asciidoc index 2147ad3c684f3..6c1319c2c71b1 100644 --- a/docs/reference/snapshot-restore/register-repository.asciidoc +++ b/docs/reference/snapshot-restore/register-repository.asciidoc @@ -248,10 +248,11 @@ that you have an archive copy of its contents that you can use to recreate the repository in its current state at a later date. You must ensure that {es} does not write to the repository while you are taking -the backup of its contents. You can do this by unregistering it, or registering -it with `readonly: true`, on all your clusters. If {es} writes any data to the -repository during the backup then the contents of the backup may not be -consistent and it may not be possible to recover any data from it in future. +the backup of its contents. If {es} writes any data to the repository during +the backup then the contents of the backup may not be consistent and it may not +be possible to recover any data from it in future. Prevent writes to the +repository by unregistering the repository from the cluster which has write +access to it. Alternatively, if your repository supports it, you may take an atomic snapshot of the underlying filesystem and then take a backup of this filesystem From 09d4217420435eef4904b673d3e0b1d0a31ce4cb Mon Sep 17 00:00:00 2001 From: David Turner Date: Fri, 18 Oct 2024 08:06:42 +0100 Subject: [PATCH 201/449] Reduce scope of `AmazonS3Reference` (#114989) It's possible that the client config, particularly its credentials, might change in the middle of a long-running operation such as a large multipart upload. Prior to this commit we would hold onto the same `AmazonS3` instance for the entire operation, but really there's no need to do so, we can obtain a potentially-fresher instance for each API call. --- .../repositories/s3/S3BlobContainer.java | 162 +++++++++--------- .../s3/S3BlobStoreContainerTests.java | 31 +++- 2 files changed, 108 insertions(+), 85 deletions(-) diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java index 902dcb42fc0cb..9757d3af861a9 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java @@ -160,23 +160,22 @@ public void writeMetadataBlob( ) throws IOException { assert purpose != OperationPurpose.SNAPSHOT_DATA && BlobContainer.assertPurposeConsistency(purpose, blobName) : purpose; final String absoluteBlobKey = buildKey(blobName); - try ( - AmazonS3Reference clientReference = blobStore.clientReference(); - ChunkedBlobOutputStream out = new ChunkedBlobOutputStream<>(blobStore.bigArrays(), blobStore.bufferSizeInBytes()) { + try (ChunkedBlobOutputStream out = new ChunkedBlobOutputStream<>(blobStore.bigArrays(), blobStore.bufferSizeInBytes()) { - private final SetOnce uploadId = new SetOnce<>(); + private final SetOnce uploadId = new SetOnce<>(); - @Override - protected void flushBuffer() throws IOException { - flushBuffer(false); - } + @Override + protected void flushBuffer() throws IOException { + flushBuffer(false); + } - private void flushBuffer(boolean lastPart) throws IOException { - if (buffer.size() == 0) { - return; - } - if (flushedBytes == 0L) { - assert lastPart == false : "use single part upload if there's only a single part"; + private void flushBuffer(boolean lastPart) throws IOException { + if (buffer.size() == 0) { + return; + } + if (flushedBytes == 0L) { + assert lastPart == false : "use single part upload if there's only a single part"; + try (AmazonS3Reference clientReference = blobStore.clientReference()) { uploadId.set( SocketAccess.doPrivileged( () -> clientReference.client() @@ -184,51 +183,54 @@ private void flushBuffer(boolean lastPart) throws IOException { .getUploadId() ) ); - if (Strings.isEmpty(uploadId.get())) { - throw new IOException("Failed to initialize multipart upload " + absoluteBlobKey); - } } - assert lastPart == false || successful : "must only write last part if successful"; - final UploadPartRequest uploadRequest = createPartUploadRequest( - purpose, - buffer.bytes().streamInput(), - uploadId.get(), - parts.size() + 1, - absoluteBlobKey, - buffer.size(), - lastPart - ); - final UploadPartResult uploadResponse = SocketAccess.doPrivileged( - () -> clientReference.client().uploadPart(uploadRequest) - ); - finishPart(uploadResponse.getPartETag()); + if (Strings.isEmpty(uploadId.get())) { + throw new IOException("Failed to initialize multipart upload " + absoluteBlobKey); + } } + assert lastPart == false || successful : "must only write last part if successful"; + final UploadPartRequest uploadRequest = createPartUploadRequest( + purpose, + buffer.bytes().streamInput(), + uploadId.get(), + parts.size() + 1, + absoluteBlobKey, + buffer.size(), + lastPart + ); + final UploadPartResult uploadResponse; + try (AmazonS3Reference clientReference = blobStore.clientReference()) { + uploadResponse = SocketAccess.doPrivileged(() -> clientReference.client().uploadPart(uploadRequest)); + } + finishPart(uploadResponse.getPartETag()); + } - @Override - protected void onCompletion() throws IOException { - if (flushedBytes == 0L) { - writeBlob(purpose, blobName, buffer.bytes(), failIfAlreadyExists); - } else { - flushBuffer(true); - final CompleteMultipartUploadRequest complRequest = new CompleteMultipartUploadRequest( - blobStore.bucket(), - absoluteBlobKey, - uploadId.get(), - parts - ); - S3BlobStore.configureRequestForMetrics(complRequest, blobStore, Operation.PUT_MULTIPART_OBJECT, purpose); + @Override + protected void onCompletion() throws IOException { + if (flushedBytes == 0L) { + writeBlob(purpose, blobName, buffer.bytes(), failIfAlreadyExists); + } else { + flushBuffer(true); + final CompleteMultipartUploadRequest complRequest = new CompleteMultipartUploadRequest( + blobStore.bucket(), + absoluteBlobKey, + uploadId.get(), + parts + ); + S3BlobStore.configureRequestForMetrics(complRequest, blobStore, Operation.PUT_MULTIPART_OBJECT, purpose); + try (AmazonS3Reference clientReference = blobStore.clientReference()) { SocketAccess.doPrivilegedVoid(() -> clientReference.client().completeMultipartUpload(complRequest)); } } + } - @Override - protected void onFailure() { - if (Strings.hasText(uploadId.get())) { - abortMultiPartUpload(purpose, uploadId.get(), absoluteBlobKey); - } + @Override + protected void onFailure() { + if (Strings.hasText(uploadId.get())) { + abortMultiPartUpload(purpose, uploadId.get(), absoluteBlobKey); } } - ) { + }) { writer.accept(out); out.markSuccess(); } @@ -360,12 +362,9 @@ public void deleteBlobsIgnoringIfNotExists(OperationPurpose purpose, Iterator listBlobsByPrefix(OperationPurpose purpose, @Nullable String blobNamePrefix) throws IOException { - try (AmazonS3Reference clientReference = blobStore.clientReference()) { - return executeListing( - purpose, - clientReference, - listObjectsRequest(purpose, blobNamePrefix == null ? keyPath : buildKey(blobNamePrefix)) - ).stream() + try { + return executeListing(purpose, listObjectsRequest(purpose, blobNamePrefix == null ? keyPath : buildKey(blobNamePrefix))) + .stream() .flatMap(listing -> listing.getObjectSummaries().stream()) .map(summary -> new BlobMetadata(summary.getKey().substring(keyPath.length()), summary.getSize())) .collect(Collectors.toMap(BlobMetadata::name, Function.identity())); @@ -381,8 +380,8 @@ public Map listBlobs(OperationPurpose purpose) throws IOEx @Override public Map children(OperationPurpose purpose) throws IOException { - try (AmazonS3Reference clientReference = blobStore.clientReference()) { - return executeListing(purpose, clientReference, listObjectsRequest(purpose, keyPath)).stream().flatMap(listing -> { + try { + return executeListing(purpose, listObjectsRequest(purpose, keyPath)).stream().flatMap(listing -> { assert listing.getObjectSummaries().stream().noneMatch(s -> { for (String commonPrefix : listing.getCommonPrefixes()) { if (s.getKey().substring(keyPath.length()).startsWith(commonPrefix)) { @@ -403,21 +402,19 @@ public Map children(OperationPurpose purpose) throws IOEx } } - private List executeListing( - OperationPurpose purpose, - AmazonS3Reference clientReference, - ListObjectsRequest listObjectsRequest - ) { + private List executeListing(OperationPurpose purpose, ListObjectsRequest listObjectsRequest) { final List results = new ArrayList<>(); ObjectListing prevListing = null; while (true) { ObjectListing list; - if (prevListing != null) { - final var listNextBatchOfObjectsRequest = new ListNextBatchOfObjectsRequest(prevListing); - S3BlobStore.configureRequestForMetrics(listNextBatchOfObjectsRequest, blobStore, Operation.LIST_OBJECTS, purpose); - list = SocketAccess.doPrivileged(() -> clientReference.client().listNextBatchOfObjects(listNextBatchOfObjectsRequest)); - } else { - list = SocketAccess.doPrivileged(() -> clientReference.client().listObjects(listObjectsRequest)); + try (AmazonS3Reference clientReference = blobStore.clientReference()) { + if (prevListing != null) { + final var listNextBatchOfObjectsRequest = new ListNextBatchOfObjectsRequest(prevListing); + S3BlobStore.configureRequestForMetrics(listNextBatchOfObjectsRequest, blobStore, Operation.LIST_OBJECTS, purpose); + list = SocketAccess.doPrivileged(() -> clientReference.client().listNextBatchOfObjects(listNextBatchOfObjectsRequest)); + } else { + list = SocketAccess.doPrivileged(() -> clientReference.client().listObjects(listObjectsRequest)); + } } results.add(list); if (list.isTruncated()) { @@ -504,13 +501,14 @@ void executeMultipartUpload( final SetOnce uploadId = new SetOnce<>(); final String bucketName = s3BlobStore.bucket(); boolean success = false; - try (AmazonS3Reference clientReference = s3BlobStore.clientReference()) { - - uploadId.set( - SocketAccess.doPrivileged( - () -> clientReference.client().initiateMultipartUpload(initiateMultiPartUpload(purpose, blobName)).getUploadId() - ) - ); + try { + try (AmazonS3Reference clientReference = s3BlobStore.clientReference()) { + uploadId.set( + SocketAccess.doPrivileged( + () -> clientReference.client().initiateMultipartUpload(initiateMultiPartUpload(purpose, blobName)).getUploadId() + ) + ); + } if (Strings.isEmpty(uploadId.get())) { throw new IOException("Failed to initialize multipart upload " + blobName); } @@ -531,8 +529,12 @@ void executeMultipartUpload( ); bytesCount += uploadRequest.getPartSize(); - final UploadPartResult uploadResponse = SocketAccess.doPrivileged(() -> clientReference.client().uploadPart(uploadRequest)); - parts.add(uploadResponse.getPartETag()); + try (AmazonS3Reference clientReference = s3BlobStore.clientReference()) { + final UploadPartResult uploadResponse = SocketAccess.doPrivileged( + () -> clientReference.client().uploadPart(uploadRequest) + ); + parts.add(uploadResponse.getPartETag()); + } } if (bytesCount != blobSize) { @@ -548,7 +550,9 @@ void executeMultipartUpload( parts ); S3BlobStore.configureRequestForMetrics(complRequest, blobStore, Operation.PUT_MULTIPART_OBJECT, purpose); - SocketAccess.doPrivilegedVoid(() -> clientReference.client().completeMultipartUpload(complRequest)); + try (AmazonS3Reference clientReference = s3BlobStore.clientReference()) { + SocketAccess.doPrivilegedVoid(() -> clientReference.client().completeMultipartUpload(complRequest)); + } success = true; } catch (final AmazonClientException e) { diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreContainerTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreContainerTests.java index f52b3f4b53a62..58bb11874fbe6 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreContainerTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreContainerTests.java @@ -114,9 +114,7 @@ public void testExecuteSingleUpload() throws IOException { when(blobStore.getCannedACL()).thenReturn(cannedAccessControlList); } - final AmazonS3 client = mock(AmazonS3.class); - final AmazonS3Reference clientReference = new AmazonS3Reference(client); - when(blobStore.clientReference()).thenReturn(clientReference); + final AmazonS3 client = configureMockClient(blobStore); final ArgumentCaptor argumentCaptor = ArgumentCaptor.forClass(PutObjectRequest.class); when(client.putObject(argumentCaptor.capture())).thenReturn(new PutObjectResult()); @@ -187,9 +185,7 @@ public void testExecuteMultipartUpload() throws IOException { when(blobStore.getCannedACL()).thenReturn(cannedAccessControlList); } - final AmazonS3 client = mock(AmazonS3.class); - final AmazonS3Reference clientReference = new AmazonS3Reference(client); - when(blobStore.clientReference()).thenReturn(clientReference); + final AmazonS3 client = configureMockClient(blobStore); final ArgumentCaptor initArgCaptor = ArgumentCaptor.forClass(InitiateMultipartUploadRequest.class); final InitiateMultipartUploadResult initResult = new InitiateMultipartUploadResult(); @@ -260,6 +256,8 @@ public void testExecuteMultipartUpload() throws IOException { final List actualETags = compRequest.getPartETags().stream().map(PartETag::getETag).collect(Collectors.toList()); assertEquals(expectedEtags, actualETags); + + closeMockClient(blobStore); } public void testExecuteMultipartUploadAborted() { @@ -356,6 +354,27 @@ public void testExecuteMultipartUploadAborted() { assertEquals(blobName, abortRequest.getKey()); assertEquals(uploadId, abortRequest.getUploadId()); } + + closeMockClient(blobStore); + } + + private static AmazonS3 configureMockClient(S3BlobStore blobStore) { + final AmazonS3 client = mock(AmazonS3.class); + try (AmazonS3Reference clientReference = new AmazonS3Reference(client)) { + clientReference.mustIncRef(); // held by the mock, ultimately released in closeMockClient + when(blobStore.clientReference()).then(invocation -> { + clientReference.mustIncRef(); + return clientReference; + }); + } + return client; + } + + private static void closeMockClient(S3BlobStore blobStore) { + final var finalClientReference = blobStore.clientReference(); + assertFalse(finalClientReference.decRef()); + assertTrue(finalClientReference.decRef()); + assertFalse(finalClientReference.hasReferences()); } public void testNumberOfMultipartsWithZeroPartSize() { From 9a8de1c6b77731c45ba017bfc9c490812c3ca7ee Mon Sep 17 00:00:00 2001 From: matthewabbott Date: Fri, 18 Oct 2024 00:09:27 -0700 Subject: [PATCH 202/449] Add link to MAX_RETRY allocation explain docs (#113657) --- docs/reference/cluster/allocation-explain.asciidoc | 11 +++++++---- .../allocation/decider/MaxRetryAllocationDecider.java | 5 ++++- .../java/org/elasticsearch/common/ReferenceDocs.java | 1 + .../org/elasticsearch/common/reference-docs-links.txt | 1 + 4 files changed, 13 insertions(+), 5 deletions(-) diff --git a/docs/reference/cluster/allocation-explain.asciidoc b/docs/reference/cluster/allocation-explain.asciidoc index 6aa0c6110277c..bbbea192f0f86 100644 --- a/docs/reference/cluster/allocation-explain.asciidoc +++ b/docs/reference/cluster/allocation-explain.asciidoc @@ -159,6 +159,7 @@ node. <5> The decider which led to the `no` decision for the node. <6> An explanation as to why the decider returned a `no` decision, with a helpful hint pointing to the setting that led to the decision. In this example, a newly created index has <> that requires that it only be allocated to a node named `nonexistent_node`, which does not exist, so the index is unable to allocate. +[[maximum-number-of-retries-exceeded]] ====== Maximum number of retries exceeded The following response contains an allocation explanation for an unassigned @@ -195,7 +196,7 @@ primary shard that has reached the maximum number of allocation retry attempts. { "decider": "max_retry", "decision" : "NO", - "explanation": "shard has exceeded the maximum number of retries [5] on failed allocation attempts - manually call [/_cluster/reroute?retry_failed=true] to retry, [unassigned_info[[reason=ALLOCATION_FAILED], at[2024-07-30T21:04:12.166Z], failed_attempts[5], failed_nodes[[mEKjwwzLT1yJVb8UxT6anw]], delayed=false, details[failed shard on node [mEKjwwzLT1yJVb8UxT6anw]: failed recovery, failure RecoveryFailedException], allocation_status[deciders_no]]]" + "explanation": "shard has exceeded the maximum number of retries [5] on failed allocation attempts - manually call [POST /_cluster/reroute?retry_failed] to retry, [unassigned_info[[reason=ALLOCATION_FAILED], at[2024-07-30T21:04:12.166Z], failed_attempts[5], failed_nodes[[mEKjwwzLT1yJVb8UxT6anw]], delayed=false, details[failed shard on node [mEKjwwzLT1yJVb8UxT6anw]: failed recovery, failure RecoveryFailedException], allocation_status[deciders_no]]]" } ] } @@ -203,9 +204,11 @@ primary shard that has reached the maximum number of allocation retry attempts. } ---- // NOTCONSOLE - -If decider message indicates a transient allocation issue, use -the <> API to retry allocation. +When Elasticsearch is unable to allocate a shard, it will attempt to retry allocation up to +the maximum number of retries allowed. After this, Elasticsearch will stop attempting to +allocate the shard in order to prevent infinite retries which may impact cluster +performance. Run the <> API to retry allocation, which +will allocate the shard if the issue preventing allocation has been resolved. [[no-valid-shard-copy]] ====== No valid shard copy diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/MaxRetryAllocationDecider.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/MaxRetryAllocationDecider.java index a55522ff14c83..0ab842276efc4 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/MaxRetryAllocationDecider.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/MaxRetryAllocationDecider.java @@ -14,6 +14,7 @@ import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; +import org.elasticsearch.common.ReferenceDocs; import org.elasticsearch.common.settings.Setting; /** @@ -72,9 +73,11 @@ private static Decision debugDecision(Decision decision, UnassignedInfo info, in return Decision.single( Decision.Type.NO, NAME, - "shard has exceeded the maximum number of retries [%d] on failed allocation attempts - manually call [%s] to retry, [%s]", + "shard has exceeded the maximum number of retries [%d] on failed allocation attempts - " + + "manually call [%s] to retry, and for more information, see [%s] [%s]", maxRetries, RETRY_FAILED_API, + ReferenceDocs.ALLOCATION_EXPLAIN_MAX_RETRY, info.toString() ); } else { diff --git a/server/src/main/java/org/elasticsearch/common/ReferenceDocs.java b/server/src/main/java/org/elasticsearch/common/ReferenceDocs.java index ae8de474daf93..43acda1e1ec2d 100644 --- a/server/src/main/java/org/elasticsearch/common/ReferenceDocs.java +++ b/server/src/main/java/org/elasticsearch/common/ReferenceDocs.java @@ -82,6 +82,7 @@ public enum ReferenceDocs { FORMING_SINGLE_NODE_CLUSTERS, CIRCUIT_BREAKER_ERRORS, ALLOCATION_EXPLAIN_NO_COPIES, + ALLOCATION_EXPLAIN_MAX_RETRY, // this comment keeps the ';' on the next line so every entry above has a trailing ',' which makes the diff for adding new links cleaner ; diff --git a/server/src/main/resources/org/elasticsearch/common/reference-docs-links.txt b/server/src/main/resources/org/elasticsearch/common/reference-docs-links.txt index b2a4c7992d1fe..3b0816aabf4aa 100644 --- a/server/src/main/resources/org/elasticsearch/common/reference-docs-links.txt +++ b/server/src/main/resources/org/elasticsearch/common/reference-docs-links.txt @@ -44,3 +44,4 @@ X_OPAQUE_ID api-conventions. FORMING_SINGLE_NODE_CLUSTERS modules-discovery-bootstrap-cluster.html#modules-discovery-bootstrap-cluster-joining CIRCUIT_BREAKER_ERRORS circuit-breaker-errors.html ALLOCATION_EXPLAIN_NO_COPIES cluster-allocation-explain.html#no-valid-shard-copy +ALLOCATION_EXPLAIN_MAX_RETRY cluster-allocation-explain.html#maximum-number-of-retries-exceeded From 0d5f85c6ad4e560ae83f3ad0b7f790c942981090 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Fri, 18 Oct 2024 18:42:29 +1100 Subject: [PATCH 203/449] Mute org.elasticsearch.index.mapper.TextFieldMapperTests testBlockLoaderFromColumnReaderWithSyntheticSource #115073 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 1d9d8edb04c14..106a595b8ab25 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -355,6 +355,9 @@ tests: - class: org.elasticsearch.index.mapper.TextFieldMapperTests method: testBlockLoaderFromRowStrideReaderWithSyntheticSource issue: https://github.com/elastic/elasticsearch/issues/115066 +- class: org.elasticsearch.index.mapper.TextFieldMapperTests + method: testBlockLoaderFromColumnReaderWithSyntheticSource + issue: https://github.com/elastic/elasticsearch/issues/115073 # Examples: # From 2446140a1cee68e1ca177be0b0ddb86a1b27aa78 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Fri, 18 Oct 2024 18:42:42 +1100 Subject: [PATCH 204/449] Mute org.elasticsearch.index.mapper.annotatedtext.AnnotatedTextFieldMapperTests testBlockLoaderFromColumnReaderWithSyntheticSource #115074 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 106a595b8ab25..ff827763a73b4 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -358,6 +358,9 @@ tests: - class: org.elasticsearch.index.mapper.TextFieldMapperTests method: testBlockLoaderFromColumnReaderWithSyntheticSource issue: https://github.com/elastic/elasticsearch/issues/115073 +- class: org.elasticsearch.index.mapper.annotatedtext.AnnotatedTextFieldMapperTests + method: testBlockLoaderFromColumnReaderWithSyntheticSource + issue: https://github.com/elastic/elasticsearch/issues/115074 # Examples: # From 077998320701fd1838d69c24e4ab6d988ed5cf30 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Fri, 18 Oct 2024 18:50:46 +1100 Subject: [PATCH 205/449] Mute org.elasticsearch.index.mapper.annotatedtext.AnnotatedTextFieldMapperTests testBlockLoaderFromRowStrideReaderWithSyntheticSource #115076 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index ff827763a73b4..dcd70ad3fb83c 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -361,6 +361,9 @@ tests: - class: org.elasticsearch.index.mapper.annotatedtext.AnnotatedTextFieldMapperTests method: testBlockLoaderFromColumnReaderWithSyntheticSource issue: https://github.com/elastic/elasticsearch/issues/115074 +- class: org.elasticsearch.index.mapper.annotatedtext.AnnotatedTextFieldMapperTests + method: testBlockLoaderFromRowStrideReaderWithSyntheticSource + issue: https://github.com/elastic/elasticsearch/issues/115076 # Examples: # From 6f608803302230a24c2d81fc9c4b863eb3513cb8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Aur=C3=A9lien=20FOUCRET?= Date: Fri, 18 Oct 2024 09:58:26 +0200 Subject: [PATCH 206/449] [KQL Query] Create the ANTLR parser (#114927) --- x-pack/plugin/kql/build.gradle | 113 ++ x-pack/plugin/kql/src/main/antlr/KqlBase.g4 | 145 ++ .../plugin/kql/src/main/antlr/KqlBase.tokens | 21 + .../kql/src/main/antlr/KqlBaseLexer.tokens | 21 + .../elasticsearch/xpack/kql/KqlPlugin.java | 16 + .../xpack/kql/parser/KqlAstBuilder.java | 29 + .../xpack/kql/parser/KqlBase.interp | 52 + .../xpack/kql/parser/KqlBaseBaseListener.java | 252 ++++ .../xpack/kql/parser/KqlBaseBaseVisitor.java | 142 ++ .../xpack/kql/parser/KqlBaseLexer.interp | 71 + .../xpack/kql/parser/KqlBaseLexer.java | 245 ++++ .../xpack/kql/parser/KqlBaseListener.java | 194 +++ .../xpack/kql/parser/KqlBaseParser.java | 1203 +++++++++++++++++ .../xpack/kql/parser/KqlBaseVisitor.java | 126 ++ .../xpack/kql/parser/KqlParser.java | 77 ++ .../xpack/kql/parser/KqlParsingException.java | 59 + .../xpack/kql/parser/KqlParserTests.java | 115 ++ .../kql/src/test/resources/supported-queries | 111 ++ .../src/test/resources/unsupported-queries | 41 + 19 files changed, 3033 insertions(+) create mode 100644 x-pack/plugin/kql/build.gradle create mode 100644 x-pack/plugin/kql/src/main/antlr/KqlBase.g4 create mode 100644 x-pack/plugin/kql/src/main/antlr/KqlBase.tokens create mode 100644 x-pack/plugin/kql/src/main/antlr/KqlBaseLexer.tokens create mode 100644 x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/KqlPlugin.java create mode 100644 x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlAstBuilder.java create mode 100644 x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBase.interp create mode 100644 x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseBaseListener.java create mode 100644 x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseBaseVisitor.java create mode 100644 x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseLexer.interp create mode 100644 x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseLexer.java create mode 100644 x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseListener.java create mode 100644 x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseParser.java create mode 100644 x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseVisitor.java create mode 100644 x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlParser.java create mode 100644 x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlParsingException.java create mode 100644 x-pack/plugin/kql/src/test/java/org/elasticsearch/xpack/kql/parser/KqlParserTests.java create mode 100644 x-pack/plugin/kql/src/test/resources/supported-queries create mode 100644 x-pack/plugin/kql/src/test/resources/unsupported-queries diff --git a/x-pack/plugin/kql/build.gradle b/x-pack/plugin/kql/build.gradle new file mode 100644 index 0000000000000..d1c949834b021 --- /dev/null +++ b/x-pack/plugin/kql/build.gradle @@ -0,0 +1,113 @@ +import org.elasticsearch.gradle.internal.info.BuildParams + +apply plugin: 'elasticsearch.internal-es-plugin' +apply plugin: 'elasticsearch.internal-cluster-test' +apply plugin: 'elasticsearch.publish' + +esplugin { + name 'x-pack-kql' + description 'Elasticsearch Expanded Pack Plugin - KQL query' + classname 'org.elasticsearch.xpack.kql.KqlPlugin' + extendedPlugins = ['x-pack-core'] +} +base { + archivesName = 'x-pack-kql' +} + +dependencies { + compileOnly project(path: xpackModule('core')) + compileOnly "org.antlr:antlr4-runtime:${versions.antlr4}" + + testImplementation "org.antlr:antlr4-runtime:${versions.antlr4}" + testImplementation project(':test:framework') + testImplementation(testArtifact(project(xpackModule('core')))) +} + +/**************************************************************** + * Enable QA/rest integration tests for snapshot builds only * + * TODO: Enable for all builds upon this feature release * + ****************************************************************/ +if (BuildParams.isSnapshotBuild()) { + addQaCheckDependencies(project) +} + +/********************************** + * KQL parser configuration * + **********************************/ +configurations { + regenerate +} + +dependencies { + regenerate "org.antlr:antlr4:${versions.antlr4}" +} + +String grammarPath = 'src/main/antlr' +String outputPath = 'src/main/java/org/elasticsearch/xpack/kql/parser' + +pluginManager.withPlugin('com.diffplug.spotless') { + spotless { + java { + // for some reason "${outputPath}/KqlBaser*.java" does not match the same files... + targetExclude "src/main/java/org/elasticsearch/xpack/kql/parser/KqlBase*.java" + } + } +} +tasks.named('checkstyleMain').configure { + exclude { it.file.toString().contains("src/main/java/org/elasticsearch/xpack/kql/parser/KqlBase") } +} + +tasks.register("cleanGenerated", Delete) { + delete fileTree(grammarPath) { + include '*.tokens' + } + delete fileTree(outputPath) { + include 'KqlBase*.java' + include 'KqlBase*.interp' + } +} + +tasks.register("regenParser", JavaExec) { + dependsOn "cleanGenerated" + mainClass = 'org.antlr.v4.Tool' + classpath = configurations.regenerate + systemProperty 'file.encoding', 'UTF-8' + systemProperty 'user.language', 'en' + systemProperty 'user.country', 'US' + systemProperty 'user.variant', '' + args '-Werror', + '-package', 'org.elasticsearch.xpack.kql.parser', + '-listener', + '-visitor', + '-o', outputPath, + "${file(grammarPath)}/KqlBase.g4" +} + +tasks.register("regen") { + dependsOn "regenParser" + doLast { + // moves token files to grammar directory for use with IDE's + ant.move(file: "${outputPath}/KqlBase.tokens", toDir: grammarPath) + ant.move(file: "${outputPath}/KqlBaseLexer.tokens", toDir: grammarPath) + // make the generated classes package private + ant.replaceregexp(match: 'public ((interface|class) \\QKqlBase\\E\\w+)', + replace: '\\1', + encoding: 'UTF-8') { + fileset(dir: outputPath, includes: 'KqlBase*.java') + } + // nuke timestamps/filenames in generated files + ant.replaceregexp(match: '\\Q// Generated from \\E.*', + replace: '\\/\\/ ANTLR GENERATED CODE: DO NOT EDIT', + encoding: 'UTF-8') { + fileset(dir: outputPath, includes: 'KqlBase*.java') + } + // remove tabs in antlr generated files + ant.replaceregexp(match: '\t', flags: 'g', replace: ' ', encoding: 'UTF-8') { + fileset(dir: outputPath, includes: 'KqlBase*.java') + } + // fix line endings + ant.fixcrlf(srcdir: outputPath, eol: 'lf') { + patternset(includes: 'KqlBase*.java') + } + } +} diff --git a/x-pack/plugin/kql/src/main/antlr/KqlBase.g4 b/x-pack/plugin/kql/src/main/antlr/KqlBase.g4 new file mode 100644 index 0000000000000..cffa2db9f959a --- /dev/null +++ b/x-pack/plugin/kql/src/main/antlr/KqlBase.g4 @@ -0,0 +1,145 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +grammar KqlBase; + + +@header { +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +} + +options { + caseInsensitive=true; +} + +topLevelQuery + : query? EOF + ; + +query + : query (AND | OR) query #booleanQuery + | NOT subQuery=simpleQuery #notQuery + | simpleQuery #defaultQuery + ; + +simpleQuery + : nestedQuery + | expression + | parenthesizedQuery + ; + +expression + : fieldTermQuery + | fieldRangeQuery + ; + +nestedQuery + : fieldName COLON LEFT_CURLY_BRACKET query RIGHT_CURLY_BRACKET + ; + +parenthesizedQuery: + LEFT_PARENTHESIS query RIGHT_PARENTHESIS; + +fieldRangeQuery + : fieldName operator=OP_COMPARE rangeQueryValue + ; + +fieldTermQuery + : (fieldName COLON)? termQueryValue + ; + +fieldName + : wildcardExpression + | unquotedLiteralExpression + | quotedStringExpression + ; + +rangeQueryValue + : unquotedLiteralExpression + | quotedStringExpression + ; + +termQueryValue + : wildcardExpression + | quotedStringExpression + | termValue=unquotedLiteralExpression + | groupingTermExpression; + +groupingTermExpression + : LEFT_PARENTHESIS unquotedLiteralExpression RIGHT_PARENTHESIS + ; + +unquotedLiteralExpression + : UNQUOTED_LITERAL+ + ; + +quotedStringExpression + : QUOTED_STRING + ; + +wildcardExpression + : WILDCARD +; + + +DEFAULT_SKIP: WHITESPACE -> skip; + +AND: 'and'; +OR: 'or'; +NOT: 'not'; + +COLON: ':'; +OP_COMPARE: OP_LESS | OP_MORE | OP_LESS_EQ | OP_MORE_EQ; + +LEFT_PARENTHESIS: '('; +RIGHT_PARENTHESIS: ')'; +LEFT_CURLY_BRACKET: '{'; +RIGHT_CURLY_BRACKET: '}'; + +UNQUOTED_LITERAL: WILDCARD* UNQUOTED_LITERAL_CHAR+ WILDCARD*; + +QUOTED_STRING: '"'QUOTED_CHAR*'"'; + +WILDCARD: WILDCARD_CHAR+; + +fragment WILDCARD_CHAR: '*'; +fragment OP_LESS: '<'; +fragment OP_LESS_EQ: '<='; +fragment OP_MORE: '>'; +fragment OP_MORE_EQ: '>='; + +fragment UNQUOTED_LITERAL_CHAR + : ESCAPED_WHITESPACE + | ESCAPED_SPECIAL_CHAR + | ESCAPE_UNICODE_SEQUENCE + | '\\' (AND | OR | NOT) + | WILDCARD_CHAR UNQUOTED_LITERAL_CHAR + | NON_SPECIAL_CHAR + ; + +fragment QUOTED_CHAR + : ESCAPED_WHITESPACE + | ESCAPE_UNICODE_SEQUENCE + | ESCAPED_QUOTE + | ~["] + ; + +fragment WHITESPACE: [ \t\n\r\u3000]; +fragment ESCAPED_WHITESPACE: '\\r' | '\\t' | '\\n'; +fragment NON_SPECIAL_CHAR: ~[ \\():<>"*{}]; +fragment ESCAPED_SPECIAL_CHAR: '\\'[ \\():<>"*{}]; + +fragment ESCAPED_QUOTE: '\\"'; + +fragment ESCAPE_UNICODE_SEQUENCE: '\\' UNICODE_SEQUENCE; +fragment UNICODE_SEQUENCE: 'u' HEX_DIGIT HEX_DIGIT HEX_DIGIT HEX_DIGIT; +fragment HEX_DIGIT: [0-9a-f]; diff --git a/x-pack/plugin/kql/src/main/antlr/KqlBase.tokens b/x-pack/plugin/kql/src/main/antlr/KqlBase.tokens new file mode 100644 index 0000000000000..268ae0613b9f0 --- /dev/null +++ b/x-pack/plugin/kql/src/main/antlr/KqlBase.tokens @@ -0,0 +1,21 @@ +DEFAULT_SKIP=1 +AND=2 +OR=3 +NOT=4 +COLON=5 +OP_COMPARE=6 +LEFT_PARENTHESIS=7 +RIGHT_PARENTHESIS=8 +LEFT_CURLY_BRACKET=9 +RIGHT_CURLY_BRACKET=10 +UNQUOTED_LITERAL=11 +QUOTED_STRING=12 +WILDCARD=13 +'and'=2 +'or'=3 +'not'=4 +':'=5 +'('=7 +')'=8 +'{'=9 +'}'=10 diff --git a/x-pack/plugin/kql/src/main/antlr/KqlBaseLexer.tokens b/x-pack/plugin/kql/src/main/antlr/KqlBaseLexer.tokens new file mode 100644 index 0000000000000..268ae0613b9f0 --- /dev/null +++ b/x-pack/plugin/kql/src/main/antlr/KqlBaseLexer.tokens @@ -0,0 +1,21 @@ +DEFAULT_SKIP=1 +AND=2 +OR=3 +NOT=4 +COLON=5 +OP_COMPARE=6 +LEFT_PARENTHESIS=7 +RIGHT_PARENTHESIS=8 +LEFT_CURLY_BRACKET=9 +RIGHT_CURLY_BRACKET=10 +UNQUOTED_LITERAL=11 +QUOTED_STRING=12 +WILDCARD=13 +'and'=2 +'or'=3 +'not'=4 +':'=5 +'('=7 +')'=8 +'{'=9 +'}'=10 diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/KqlPlugin.java b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/KqlPlugin.java new file mode 100644 index 0000000000000..4734924b23618 --- /dev/null +++ b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/KqlPlugin.java @@ -0,0 +1,16 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.kql; + +import org.elasticsearch.plugins.ExtensiblePlugin; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.SearchPlugin; + +public class KqlPlugin extends Plugin implements SearchPlugin, ExtensiblePlugin { + +} diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlAstBuilder.java b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlAstBuilder.java new file mode 100644 index 0000000000000..4ee7bdc1c7f21 --- /dev/null +++ b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlAstBuilder.java @@ -0,0 +1,29 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.kql.parser; + +import org.antlr.v4.runtime.ParserRuleContext; +import org.elasticsearch.index.query.MatchAllQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.SearchExecutionContext; + +class KqlAstBuilder extends KqlBaseBaseVisitor { + private final SearchExecutionContext searchExecutionContext; + + KqlAstBuilder(SearchExecutionContext searchExecutionContext) { + this.searchExecutionContext = searchExecutionContext; + } + + public QueryBuilder toQueryBuilder(ParserRuleContext ctx) { + if (ctx instanceof KqlBaseParser.TopLevelQueryContext topLeveQueryContext) { + return new MatchAllQueryBuilder(); + } + + throw new IllegalArgumentException("context should be of type TopLevelQueryContext"); + } +} diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBase.interp b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBase.interp new file mode 100644 index 0000000000000..1954195b52363 --- /dev/null +++ b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBase.interp @@ -0,0 +1,52 @@ +token literal names: +null +null +'and' +'or' +'not' +':' +null +'(' +')' +'{' +'}' +null +null +null + +token symbolic names: +null +DEFAULT_SKIP +AND +OR +NOT +COLON +OP_COMPARE +LEFT_PARENTHESIS +RIGHT_PARENTHESIS +LEFT_CURLY_BRACKET +RIGHT_CURLY_BRACKET +UNQUOTED_LITERAL +QUOTED_STRING +WILDCARD + +rule names: +topLevelQuery +query +simpleQuery +expression +nestedQuery +parenthesizedQuery +fieldRangeQuery +fieldTermQuery +fieldName +rangeQueryValue +termQueryValue +groupingTermExpression +unquotedLiteralExpression +quotedStringExpression +wildcardExpression + + +atn: +[4, 1, 13, 108, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 1, 0, 3, 0, 32, 8, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 3, 1, 40, 8, 1, 1, 1, 1, 1, 1, 1, 5, 1, 45, 8, 1, 10, 1, 12, 1, 48, 9, 1, 1, 2, 1, 2, 1, 2, 3, 2, 53, 8, 2, 1, 3, 1, 3, 3, 3, 57, 8, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 3, 7, 76, 8, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 3, 8, 83, 8, 8, 1, 9, 1, 9, 3, 9, 87, 8, 9, 1, 10, 1, 10, 1, 10, 1, 10, 3, 10, 93, 8, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 4, 12, 100, 8, 12, 11, 12, 12, 12, 101, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 0, 1, 2, 15, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 0, 1, 1, 0, 2, 3, 106, 0, 31, 1, 0, 0, 0, 2, 39, 1, 0, 0, 0, 4, 52, 1, 0, 0, 0, 6, 56, 1, 0, 0, 0, 8, 58, 1, 0, 0, 0, 10, 64, 1, 0, 0, 0, 12, 68, 1, 0, 0, 0, 14, 75, 1, 0, 0, 0, 16, 82, 1, 0, 0, 0, 18, 86, 1, 0, 0, 0, 20, 92, 1, 0, 0, 0, 22, 94, 1, 0, 0, 0, 24, 99, 1, 0, 0, 0, 26, 103, 1, 0, 0, 0, 28, 105, 1, 0, 0, 0, 30, 32, 3, 2, 1, 0, 31, 30, 1, 0, 0, 0, 31, 32, 1, 0, 0, 0, 32, 33, 1, 0, 0, 0, 33, 34, 5, 0, 0, 1, 34, 1, 1, 0, 0, 0, 35, 36, 6, 1, -1, 0, 36, 37, 5, 4, 0, 0, 37, 40, 3, 4, 2, 0, 38, 40, 3, 4, 2, 0, 39, 35, 1, 0, 0, 0, 39, 38, 1, 0, 0, 0, 40, 46, 1, 0, 0, 0, 41, 42, 10, 3, 0, 0, 42, 43, 7, 0, 0, 0, 43, 45, 3, 2, 1, 4, 44, 41, 1, 0, 0, 0, 45, 48, 1, 0, 0, 0, 46, 44, 1, 0, 0, 0, 46, 47, 1, 0, 0, 0, 47, 3, 1, 0, 0, 0, 48, 46, 1, 0, 0, 0, 49, 53, 3, 8, 4, 0, 50, 53, 3, 6, 3, 0, 51, 53, 3, 10, 5, 0, 52, 49, 1, 0, 0, 0, 52, 50, 1, 0, 0, 0, 52, 51, 1, 0, 0, 0, 53, 5, 1, 0, 0, 0, 54, 57, 3, 14, 7, 0, 55, 57, 3, 12, 6, 0, 56, 54, 1, 0, 0, 0, 56, 55, 1, 0, 0, 0, 57, 7, 1, 0, 0, 0, 58, 59, 3, 16, 8, 0, 59, 60, 5, 5, 0, 0, 60, 61, 5, 9, 0, 0, 61, 62, 3, 2, 1, 0, 62, 63, 5, 10, 0, 0, 63, 9, 1, 0, 0, 0, 64, 65, 5, 7, 0, 0, 65, 66, 3, 2, 1, 0, 66, 67, 5, 8, 0, 0, 67, 11, 1, 0, 0, 0, 68, 69, 3, 16, 8, 0, 69, 70, 5, 6, 0, 0, 70, 71, 3, 18, 9, 0, 71, 13, 1, 0, 0, 0, 72, 73, 3, 16, 8, 0, 73, 74, 5, 5, 0, 0, 74, 76, 1, 0, 0, 0, 75, 72, 1, 0, 0, 0, 75, 76, 1, 0, 0, 0, 76, 77, 1, 0, 0, 0, 77, 78, 3, 20, 10, 0, 78, 15, 1, 0, 0, 0, 79, 83, 3, 28, 14, 0, 80, 83, 3, 24, 12, 0, 81, 83, 3, 26, 13, 0, 82, 79, 1, 0, 0, 0, 82, 80, 1, 0, 0, 0, 82, 81, 1, 0, 0, 0, 83, 17, 1, 0, 0, 0, 84, 87, 3, 24, 12, 0, 85, 87, 3, 26, 13, 0, 86, 84, 1, 0, 0, 0, 86, 85, 1, 0, 0, 0, 87, 19, 1, 0, 0, 0, 88, 93, 3, 28, 14, 0, 89, 93, 3, 26, 13, 0, 90, 93, 3, 24, 12, 0, 91, 93, 3, 22, 11, 0, 92, 88, 1, 0, 0, 0, 92, 89, 1, 0, 0, 0, 92, 90, 1, 0, 0, 0, 92, 91, 1, 0, 0, 0, 93, 21, 1, 0, 0, 0, 94, 95, 5, 7, 0, 0, 95, 96, 3, 24, 12, 0, 96, 97, 5, 8, 0, 0, 97, 23, 1, 0, 0, 0, 98, 100, 5, 11, 0, 0, 99, 98, 1, 0, 0, 0, 100, 101, 1, 0, 0, 0, 101, 99, 1, 0, 0, 0, 101, 102, 1, 0, 0, 0, 102, 25, 1, 0, 0, 0, 103, 104, 5, 12, 0, 0, 104, 27, 1, 0, 0, 0, 105, 106, 5, 13, 0, 0, 106, 29, 1, 0, 0, 0, 10, 31, 39, 46, 52, 56, 75, 82, 86, 92, 101] \ No newline at end of file diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseBaseListener.java b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseBaseListener.java new file mode 100644 index 0000000000000..1b4282b5dbbea --- /dev/null +++ b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseBaseListener.java @@ -0,0 +1,252 @@ +// ANTLR GENERATED CODE: DO NOT EDIT +package org.elasticsearch.xpack.kql.parser; + +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + + +import org.antlr.v4.runtime.ParserRuleContext; +import org.antlr.v4.runtime.tree.ErrorNode; +import org.antlr.v4.runtime.tree.TerminalNode; + +/** + * This class provides an empty implementation of {@link KqlBaseListener}, + * which can be extended to create a listener which only needs to handle a subset + * of the available methods. + */ +@SuppressWarnings("CheckReturnValue") +class KqlBaseBaseListener implements KqlBaseListener { + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterTopLevelQuery(KqlBaseParser.TopLevelQueryContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitTopLevelQuery(KqlBaseParser.TopLevelQueryContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterNotQuery(KqlBaseParser.NotQueryContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitNotQuery(KqlBaseParser.NotQueryContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterBooleanQuery(KqlBaseParser.BooleanQueryContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitBooleanQuery(KqlBaseParser.BooleanQueryContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterDefaultQuery(KqlBaseParser.DefaultQueryContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitDefaultQuery(KqlBaseParser.DefaultQueryContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterSimpleQuery(KqlBaseParser.SimpleQueryContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitSimpleQuery(KqlBaseParser.SimpleQueryContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterExpression(KqlBaseParser.ExpressionContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitExpression(KqlBaseParser.ExpressionContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterNestedQuery(KqlBaseParser.NestedQueryContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitNestedQuery(KqlBaseParser.NestedQueryContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterParenthesizedQuery(KqlBaseParser.ParenthesizedQueryContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitParenthesizedQuery(KqlBaseParser.ParenthesizedQueryContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterFieldRangeQuery(KqlBaseParser.FieldRangeQueryContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitFieldRangeQuery(KqlBaseParser.FieldRangeQueryContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterFieldTermQuery(KqlBaseParser.FieldTermQueryContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitFieldTermQuery(KqlBaseParser.FieldTermQueryContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterFieldName(KqlBaseParser.FieldNameContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitFieldName(KqlBaseParser.FieldNameContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterRangeQueryValue(KqlBaseParser.RangeQueryValueContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitRangeQueryValue(KqlBaseParser.RangeQueryValueContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterTermQueryValue(KqlBaseParser.TermQueryValueContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitTermQueryValue(KqlBaseParser.TermQueryValueContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterGroupingTermExpression(KqlBaseParser.GroupingTermExpressionContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitGroupingTermExpression(KqlBaseParser.GroupingTermExpressionContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterUnquotedLiteralExpression(KqlBaseParser.UnquotedLiteralExpressionContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitUnquotedLiteralExpression(KqlBaseParser.UnquotedLiteralExpressionContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterQuotedStringExpression(KqlBaseParser.QuotedStringExpressionContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitQuotedStringExpression(KqlBaseParser.QuotedStringExpressionContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterWildcardExpression(KqlBaseParser.WildcardExpressionContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitWildcardExpression(KqlBaseParser.WildcardExpressionContext ctx) { } + + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterEveryRule(ParserRuleContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitEveryRule(ParserRuleContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void visitTerminal(TerminalNode node) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void visitErrorNode(ErrorNode node) { } +} diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseBaseVisitor.java b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseBaseVisitor.java new file mode 100644 index 0000000000000..09cd668804154 --- /dev/null +++ b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseBaseVisitor.java @@ -0,0 +1,142 @@ +// ANTLR GENERATED CODE: DO NOT EDIT +package org.elasticsearch.xpack.kql.parser; + +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import org.antlr.v4.runtime.tree.AbstractParseTreeVisitor; + +/** + * This class provides an empty implementation of {@link KqlBaseVisitor}, + * which can be extended to create a visitor which only needs to handle a subset + * of the available methods. + * + * @param The return type of the visit operation. Use {@link Void} for + * operations with no return type. + */ +@SuppressWarnings("CheckReturnValue") +class KqlBaseBaseVisitor extends AbstractParseTreeVisitor implements KqlBaseVisitor { + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitTopLevelQuery(KqlBaseParser.TopLevelQueryContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitNotQuery(KqlBaseParser.NotQueryContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitBooleanQuery(KqlBaseParser.BooleanQueryContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitDefaultQuery(KqlBaseParser.DefaultQueryContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitSimpleQuery(KqlBaseParser.SimpleQueryContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitExpression(KqlBaseParser.ExpressionContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitNestedQuery(KqlBaseParser.NestedQueryContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitParenthesizedQuery(KqlBaseParser.ParenthesizedQueryContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitFieldRangeQuery(KqlBaseParser.FieldRangeQueryContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitFieldTermQuery(KqlBaseParser.FieldTermQueryContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitFieldName(KqlBaseParser.FieldNameContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitRangeQueryValue(KqlBaseParser.RangeQueryValueContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitTermQueryValue(KqlBaseParser.TermQueryValueContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitGroupingTermExpression(KqlBaseParser.GroupingTermExpressionContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitUnquotedLiteralExpression(KqlBaseParser.UnquotedLiteralExpressionContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitQuotedStringExpression(KqlBaseParser.QuotedStringExpressionContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitWildcardExpression(KqlBaseParser.WildcardExpressionContext ctx) { return visitChildren(ctx); } +} diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseLexer.interp b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseLexer.interp new file mode 100644 index 0000000000000..d178df5fcbc88 --- /dev/null +++ b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseLexer.interp @@ -0,0 +1,71 @@ +token literal names: +null +null +'and' +'or' +'not' +':' +null +'(' +')' +'{' +'}' +null +null +null + +token symbolic names: +null +DEFAULT_SKIP +AND +OR +NOT +COLON +OP_COMPARE +LEFT_PARENTHESIS +RIGHT_PARENTHESIS +LEFT_CURLY_BRACKET +RIGHT_CURLY_BRACKET +UNQUOTED_LITERAL +QUOTED_STRING +WILDCARD + +rule names: +DEFAULT_SKIP +AND +OR +NOT +COLON +OP_COMPARE +LEFT_PARENTHESIS +RIGHT_PARENTHESIS +LEFT_CURLY_BRACKET +RIGHT_CURLY_BRACKET +UNQUOTED_LITERAL +QUOTED_STRING +WILDCARD +WILDCARD_CHAR +OP_LESS +OP_LESS_EQ +OP_MORE +OP_MORE_EQ +UNQUOTED_LITERAL_CHAR +QUOTED_CHAR +WHITESPACE +ESCAPED_WHITESPACE +NON_SPECIAL_CHAR +ESCAPED_SPECIAL_CHAR +ESCAPED_QUOTE +ESCAPE_UNICODE_SEQUENCE +UNICODE_SEQUENCE +HEX_DIGIT + +channel names: +DEFAULT_TOKEN_CHANNEL +HIDDEN + +mode names: +DEFAULT_MODE + +atn: +[4, 0, 13, 181, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 79, 8, 5, 1, 6, 1, 6, 1, 7, 1, 7, 1, 8, 1, 8, 1, 9, 1, 9, 1, 10, 5, 10, 90, 8, 10, 10, 10, 12, 10, 93, 9, 10, 1, 10, 4, 10, 96, 8, 10, 11, 10, 12, 10, 97, 1, 10, 5, 10, 101, 8, 10, 10, 10, 12, 10, 104, 9, 10, 1, 11, 1, 11, 5, 11, 108, 8, 11, 10, 11, 12, 11, 111, 9, 11, 1, 11, 1, 11, 1, 12, 4, 12, 116, 8, 12, 11, 12, 12, 12, 117, 1, 13, 1, 13, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 3, 18, 139, 8, 18, 1, 18, 1, 18, 1, 18, 1, 18, 3, 18, 145, 8, 18, 1, 19, 1, 19, 1, 19, 1, 19, 3, 19, 151, 8, 19, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 3, 21, 161, 8, 21, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 0, 0, 28, 1, 1, 3, 2, 5, 3, 7, 4, 9, 5, 11, 6, 13, 7, 15, 8, 17, 9, 19, 10, 21, 11, 23, 12, 25, 13, 27, 0, 29, 0, 31, 0, 33, 0, 35, 0, 37, 0, 39, 0, 41, 0, 43, 0, 45, 0, 47, 0, 49, 0, 51, 0, 53, 0, 55, 0, 1, 0, 11, 2, 0, 65, 65, 97, 97, 2, 0, 78, 78, 110, 110, 2, 0, 68, 68, 100, 100, 2, 0, 79, 79, 111, 111, 2, 0, 82, 82, 114, 114, 2, 0, 84, 84, 116, 116, 1, 0, 34, 34, 4, 0, 9, 10, 13, 13, 32, 32, 12288, 12288, 9, 0, 32, 32, 34, 34, 40, 42, 58, 58, 60, 60, 62, 62, 92, 92, 123, 123, 125, 125, 2, 0, 85, 85, 117, 117, 3, 0, 48, 57, 65, 70, 97, 102, 185, 0, 1, 1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, 1, 0, 0, 0, 0, 9, 1, 0, 0, 0, 0, 11, 1, 0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0, 0, 25, 1, 0, 0, 0, 1, 57, 1, 0, 0, 0, 3, 61, 1, 0, 0, 0, 5, 65, 1, 0, 0, 0, 7, 68, 1, 0, 0, 0, 9, 72, 1, 0, 0, 0, 11, 78, 1, 0, 0, 0, 13, 80, 1, 0, 0, 0, 15, 82, 1, 0, 0, 0, 17, 84, 1, 0, 0, 0, 19, 86, 1, 0, 0, 0, 21, 91, 1, 0, 0, 0, 23, 105, 1, 0, 0, 0, 25, 115, 1, 0, 0, 0, 27, 119, 1, 0, 0, 0, 29, 121, 1, 0, 0, 0, 31, 123, 1, 0, 0, 0, 33, 126, 1, 0, 0, 0, 35, 128, 1, 0, 0, 0, 37, 144, 1, 0, 0, 0, 39, 150, 1, 0, 0, 0, 41, 152, 1, 0, 0, 0, 43, 160, 1, 0, 0, 0, 45, 162, 1, 0, 0, 0, 47, 164, 1, 0, 0, 0, 49, 167, 1, 0, 0, 0, 51, 170, 1, 0, 0, 0, 53, 173, 1, 0, 0, 0, 55, 179, 1, 0, 0, 0, 57, 58, 3, 41, 20, 0, 58, 59, 1, 0, 0, 0, 59, 60, 6, 0, 0, 0, 60, 2, 1, 0, 0, 0, 61, 62, 7, 0, 0, 0, 62, 63, 7, 1, 0, 0, 63, 64, 7, 2, 0, 0, 64, 4, 1, 0, 0, 0, 65, 66, 7, 3, 0, 0, 66, 67, 7, 4, 0, 0, 67, 6, 1, 0, 0, 0, 68, 69, 7, 1, 0, 0, 69, 70, 7, 3, 0, 0, 70, 71, 7, 5, 0, 0, 71, 8, 1, 0, 0, 0, 72, 73, 5, 58, 0, 0, 73, 10, 1, 0, 0, 0, 74, 79, 3, 29, 14, 0, 75, 79, 3, 33, 16, 0, 76, 79, 3, 31, 15, 0, 77, 79, 3, 35, 17, 0, 78, 74, 1, 0, 0, 0, 78, 75, 1, 0, 0, 0, 78, 76, 1, 0, 0, 0, 78, 77, 1, 0, 0, 0, 79, 12, 1, 0, 0, 0, 80, 81, 5, 40, 0, 0, 81, 14, 1, 0, 0, 0, 82, 83, 5, 41, 0, 0, 83, 16, 1, 0, 0, 0, 84, 85, 5, 123, 0, 0, 85, 18, 1, 0, 0, 0, 86, 87, 5, 125, 0, 0, 87, 20, 1, 0, 0, 0, 88, 90, 3, 25, 12, 0, 89, 88, 1, 0, 0, 0, 90, 93, 1, 0, 0, 0, 91, 89, 1, 0, 0, 0, 91, 92, 1, 0, 0, 0, 92, 95, 1, 0, 0, 0, 93, 91, 1, 0, 0, 0, 94, 96, 3, 37, 18, 0, 95, 94, 1, 0, 0, 0, 96, 97, 1, 0, 0, 0, 97, 95, 1, 0, 0, 0, 97, 98, 1, 0, 0, 0, 98, 102, 1, 0, 0, 0, 99, 101, 3, 25, 12, 0, 100, 99, 1, 0, 0, 0, 101, 104, 1, 0, 0, 0, 102, 100, 1, 0, 0, 0, 102, 103, 1, 0, 0, 0, 103, 22, 1, 0, 0, 0, 104, 102, 1, 0, 0, 0, 105, 109, 5, 34, 0, 0, 106, 108, 3, 39, 19, 0, 107, 106, 1, 0, 0, 0, 108, 111, 1, 0, 0, 0, 109, 107, 1, 0, 0, 0, 109, 110, 1, 0, 0, 0, 110, 112, 1, 0, 0, 0, 111, 109, 1, 0, 0, 0, 112, 113, 5, 34, 0, 0, 113, 24, 1, 0, 0, 0, 114, 116, 3, 27, 13, 0, 115, 114, 1, 0, 0, 0, 116, 117, 1, 0, 0, 0, 117, 115, 1, 0, 0, 0, 117, 118, 1, 0, 0, 0, 118, 26, 1, 0, 0, 0, 119, 120, 5, 42, 0, 0, 120, 28, 1, 0, 0, 0, 121, 122, 5, 60, 0, 0, 122, 30, 1, 0, 0, 0, 123, 124, 5, 60, 0, 0, 124, 125, 5, 61, 0, 0, 125, 32, 1, 0, 0, 0, 126, 127, 5, 62, 0, 0, 127, 34, 1, 0, 0, 0, 128, 129, 5, 62, 0, 0, 129, 130, 5, 61, 0, 0, 130, 36, 1, 0, 0, 0, 131, 145, 3, 43, 21, 0, 132, 145, 3, 47, 23, 0, 133, 145, 3, 51, 25, 0, 134, 138, 5, 92, 0, 0, 135, 139, 3, 3, 1, 0, 136, 139, 3, 5, 2, 0, 137, 139, 3, 7, 3, 0, 138, 135, 1, 0, 0, 0, 138, 136, 1, 0, 0, 0, 138, 137, 1, 0, 0, 0, 139, 145, 1, 0, 0, 0, 140, 141, 3, 27, 13, 0, 141, 142, 3, 37, 18, 0, 142, 145, 1, 0, 0, 0, 143, 145, 3, 45, 22, 0, 144, 131, 1, 0, 0, 0, 144, 132, 1, 0, 0, 0, 144, 133, 1, 0, 0, 0, 144, 134, 1, 0, 0, 0, 144, 140, 1, 0, 0, 0, 144, 143, 1, 0, 0, 0, 145, 38, 1, 0, 0, 0, 146, 151, 3, 43, 21, 0, 147, 151, 3, 51, 25, 0, 148, 151, 3, 49, 24, 0, 149, 151, 8, 6, 0, 0, 150, 146, 1, 0, 0, 0, 150, 147, 1, 0, 0, 0, 150, 148, 1, 0, 0, 0, 150, 149, 1, 0, 0, 0, 151, 40, 1, 0, 0, 0, 152, 153, 7, 7, 0, 0, 153, 42, 1, 0, 0, 0, 154, 155, 5, 92, 0, 0, 155, 161, 7, 4, 0, 0, 156, 157, 5, 92, 0, 0, 157, 161, 7, 5, 0, 0, 158, 159, 5, 92, 0, 0, 159, 161, 7, 1, 0, 0, 160, 154, 1, 0, 0, 0, 160, 156, 1, 0, 0, 0, 160, 158, 1, 0, 0, 0, 161, 44, 1, 0, 0, 0, 162, 163, 8, 8, 0, 0, 163, 46, 1, 0, 0, 0, 164, 165, 5, 92, 0, 0, 165, 166, 7, 8, 0, 0, 166, 48, 1, 0, 0, 0, 167, 168, 5, 92, 0, 0, 168, 169, 5, 34, 0, 0, 169, 50, 1, 0, 0, 0, 170, 171, 5, 92, 0, 0, 171, 172, 3, 53, 26, 0, 172, 52, 1, 0, 0, 0, 173, 174, 7, 9, 0, 0, 174, 175, 3, 55, 27, 0, 175, 176, 3, 55, 27, 0, 176, 177, 3, 55, 27, 0, 177, 178, 3, 55, 27, 0, 178, 54, 1, 0, 0, 0, 179, 180, 7, 10, 0, 0, 180, 56, 1, 0, 0, 0, 11, 0, 78, 91, 97, 102, 109, 117, 138, 144, 150, 160, 1, 6, 0, 0] \ No newline at end of file diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseLexer.java b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseLexer.java new file mode 100644 index 0000000000000..b397a412d5e8e --- /dev/null +++ b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseLexer.java @@ -0,0 +1,245 @@ +// ANTLR GENERATED CODE: DO NOT EDIT +package org.elasticsearch.xpack.kql.parser; + +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import org.antlr.v4.runtime.Lexer; +import org.antlr.v4.runtime.CharStream; +import org.antlr.v4.runtime.Token; +import org.antlr.v4.runtime.TokenStream; +import org.antlr.v4.runtime.*; +import org.antlr.v4.runtime.atn.*; +import org.antlr.v4.runtime.dfa.DFA; +import org.antlr.v4.runtime.misc.*; + +@SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast", "CheckReturnValue", "this-escape"}) +class KqlBaseLexer extends Lexer { + static { RuntimeMetaData.checkVersion("4.13.1", RuntimeMetaData.VERSION); } + + protected static final DFA[] _decisionToDFA; + protected static final PredictionContextCache _sharedContextCache = + new PredictionContextCache(); + public static final int + DEFAULT_SKIP=1, AND=2, OR=3, NOT=4, COLON=5, OP_COMPARE=6, LEFT_PARENTHESIS=7, + RIGHT_PARENTHESIS=8, LEFT_CURLY_BRACKET=9, RIGHT_CURLY_BRACKET=10, UNQUOTED_LITERAL=11, + QUOTED_STRING=12, WILDCARD=13; + public static String[] channelNames = { + "DEFAULT_TOKEN_CHANNEL", "HIDDEN" + }; + + public static String[] modeNames = { + "DEFAULT_MODE" + }; + + private static String[] makeRuleNames() { + return new String[] { + "DEFAULT_SKIP", "AND", "OR", "NOT", "COLON", "OP_COMPARE", "LEFT_PARENTHESIS", + "RIGHT_PARENTHESIS", "LEFT_CURLY_BRACKET", "RIGHT_CURLY_BRACKET", "UNQUOTED_LITERAL", + "QUOTED_STRING", "WILDCARD", "WILDCARD_CHAR", "OP_LESS", "OP_LESS_EQ", + "OP_MORE", "OP_MORE_EQ", "UNQUOTED_LITERAL_CHAR", "QUOTED_CHAR", "WHITESPACE", + "ESCAPED_WHITESPACE", "NON_SPECIAL_CHAR", "ESCAPED_SPECIAL_CHAR", "ESCAPED_QUOTE", + "ESCAPE_UNICODE_SEQUENCE", "UNICODE_SEQUENCE", "HEX_DIGIT" + }; + } + public static final String[] ruleNames = makeRuleNames(); + + private static String[] makeLiteralNames() { + return new String[] { + null, null, "'and'", "'or'", "'not'", "':'", null, "'('", "')'", "'{'", + "'}'" + }; + } + private static final String[] _LITERAL_NAMES = makeLiteralNames(); + private static String[] makeSymbolicNames() { + return new String[] { + null, "DEFAULT_SKIP", "AND", "OR", "NOT", "COLON", "OP_COMPARE", "LEFT_PARENTHESIS", + "RIGHT_PARENTHESIS", "LEFT_CURLY_BRACKET", "RIGHT_CURLY_BRACKET", "UNQUOTED_LITERAL", + "QUOTED_STRING", "WILDCARD" + }; + } + private static final String[] _SYMBOLIC_NAMES = makeSymbolicNames(); + public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES); + + /** + * @deprecated Use {@link #VOCABULARY} instead. + */ + @Deprecated + public static final String[] tokenNames; + static { + tokenNames = new String[_SYMBOLIC_NAMES.length]; + for (int i = 0; i < tokenNames.length; i++) { + tokenNames[i] = VOCABULARY.getLiteralName(i); + if (tokenNames[i] == null) { + tokenNames[i] = VOCABULARY.getSymbolicName(i); + } + + if (tokenNames[i] == null) { + tokenNames[i] = ""; + } + } + } + + @Override + @Deprecated + public String[] getTokenNames() { + return tokenNames; + } + + @Override + + public Vocabulary getVocabulary() { + return VOCABULARY; + } + + + public KqlBaseLexer(CharStream input) { + super(input); + _interp = new LexerATNSimulator(this,_ATN,_decisionToDFA,_sharedContextCache); + } + + @Override + public String getGrammarFileName() { return "KqlBase.g4"; } + + @Override + public String[] getRuleNames() { return ruleNames; } + + @Override + public String getSerializedATN() { return _serializedATN; } + + @Override + public String[] getChannelNames() { return channelNames; } + + @Override + public String[] getModeNames() { return modeNames; } + + @Override + public ATN getATN() { return _ATN; } + + public static final String _serializedATN = + "\u0004\u0000\r\u00b5\u0006\uffff\uffff\u0002\u0000\u0007\u0000\u0002\u0001"+ + "\u0007\u0001\u0002\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004"+ + "\u0007\u0004\u0002\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007"+ + "\u0007\u0007\u0002\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b"+ + "\u0007\u000b\u0002\f\u0007\f\u0002\r\u0007\r\u0002\u000e\u0007\u000e\u0002"+ + "\u000f\u0007\u000f\u0002\u0010\u0007\u0010\u0002\u0011\u0007\u0011\u0002"+ + "\u0012\u0007\u0012\u0002\u0013\u0007\u0013\u0002\u0014\u0007\u0014\u0002"+ + "\u0015\u0007\u0015\u0002\u0016\u0007\u0016\u0002\u0017\u0007\u0017\u0002"+ + "\u0018\u0007\u0018\u0002\u0019\u0007\u0019\u0002\u001a\u0007\u001a\u0002"+ + "\u001b\u0007\u001b\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001"+ + "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0002\u0001\u0002\u0001"+ + "\u0002\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0004\u0001"+ + "\u0004\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003\u0005O\b"+ + "\u0005\u0001\u0006\u0001\u0006\u0001\u0007\u0001\u0007\u0001\b\u0001\b"+ + "\u0001\t\u0001\t\u0001\n\u0005\nZ\b\n\n\n\f\n]\t\n\u0001\n\u0004\n`\b"+ + "\n\u000b\n\f\na\u0001\n\u0005\ne\b\n\n\n\f\nh\t\n\u0001\u000b\u0001\u000b"+ + "\u0005\u000bl\b\u000b\n\u000b\f\u000bo\t\u000b\u0001\u000b\u0001\u000b"+ + "\u0001\f\u0004\ft\b\f\u000b\f\f\fu\u0001\r\u0001\r\u0001\u000e\u0001\u000e"+ + "\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u0010\u0001\u0010\u0001\u0011"+ + "\u0001\u0011\u0001\u0011\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012"+ + "\u0001\u0012\u0001\u0012\u0001\u0012\u0003\u0012\u008b\b\u0012\u0001\u0012"+ + "\u0001\u0012\u0001\u0012\u0001\u0012\u0003\u0012\u0091\b\u0012\u0001\u0013"+ + "\u0001\u0013\u0001\u0013\u0001\u0013\u0003\u0013\u0097\b\u0013\u0001\u0014"+ + "\u0001\u0014\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015"+ + "\u0001\u0015\u0003\u0015\u00a1\b\u0015\u0001\u0016\u0001\u0016\u0001\u0017"+ + "\u0001\u0017\u0001\u0017\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0019"+ + "\u0001\u0019\u0001\u0019\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a"+ + "\u0001\u001a\u0001\u001a\u0001\u001b\u0001\u001b\u0000\u0000\u001c\u0001"+ + "\u0001\u0003\u0002\u0005\u0003\u0007\u0004\t\u0005\u000b\u0006\r\u0007"+ + "\u000f\b\u0011\t\u0013\n\u0015\u000b\u0017\f\u0019\r\u001b\u0000\u001d"+ + "\u0000\u001f\u0000!\u0000#\u0000%\u0000\'\u0000)\u0000+\u0000-\u0000/"+ + "\u00001\u00003\u00005\u00007\u0000\u0001\u0000\u000b\u0002\u0000AAaa\u0002"+ + "\u0000NNnn\u0002\u0000DDdd\u0002\u0000OOoo\u0002\u0000RRrr\u0002\u0000"+ + "TTtt\u0001\u0000\"\"\u0004\u0000\t\n\r\r \u3000\u3000\t\u0000 \"\"("+ + "*::<<>>\\\\{{}}\u0002\u0000UUuu\u0003\u000009AFaf\u00b9\u0000\u0001\u0001"+ + "\u0000\u0000\u0000\u0000\u0003\u0001\u0000\u0000\u0000\u0000\u0005\u0001"+ + "\u0000\u0000\u0000\u0000\u0007\u0001\u0000\u0000\u0000\u0000\t\u0001\u0000"+ + "\u0000\u0000\u0000\u000b\u0001\u0000\u0000\u0000\u0000\r\u0001\u0000\u0000"+ + "\u0000\u0000\u000f\u0001\u0000\u0000\u0000\u0000\u0011\u0001\u0000\u0000"+ + "\u0000\u0000\u0013\u0001\u0000\u0000\u0000\u0000\u0015\u0001\u0000\u0000"+ + "\u0000\u0000\u0017\u0001\u0000\u0000\u0000\u0000\u0019\u0001\u0000\u0000"+ + "\u0000\u00019\u0001\u0000\u0000\u0000\u0003=\u0001\u0000\u0000\u0000\u0005"+ + "A\u0001\u0000\u0000\u0000\u0007D\u0001\u0000\u0000\u0000\tH\u0001\u0000"+ + "\u0000\u0000\u000bN\u0001\u0000\u0000\u0000\rP\u0001\u0000\u0000\u0000"+ + "\u000fR\u0001\u0000\u0000\u0000\u0011T\u0001\u0000\u0000\u0000\u0013V"+ + "\u0001\u0000\u0000\u0000\u0015[\u0001\u0000\u0000\u0000\u0017i\u0001\u0000"+ + "\u0000\u0000\u0019s\u0001\u0000\u0000\u0000\u001bw\u0001\u0000\u0000\u0000"+ + "\u001dy\u0001\u0000\u0000\u0000\u001f{\u0001\u0000\u0000\u0000!~\u0001"+ + "\u0000\u0000\u0000#\u0080\u0001\u0000\u0000\u0000%\u0090\u0001\u0000\u0000"+ + "\u0000\'\u0096\u0001\u0000\u0000\u0000)\u0098\u0001\u0000\u0000\u0000"+ + "+\u00a0\u0001\u0000\u0000\u0000-\u00a2\u0001\u0000\u0000\u0000/\u00a4"+ + "\u0001\u0000\u0000\u00001\u00a7\u0001\u0000\u0000\u00003\u00aa\u0001\u0000"+ + "\u0000\u00005\u00ad\u0001\u0000\u0000\u00007\u00b3\u0001\u0000\u0000\u0000"+ + "9:\u0003)\u0014\u0000:;\u0001\u0000\u0000\u0000;<\u0006\u0000\u0000\u0000"+ + "<\u0002\u0001\u0000\u0000\u0000=>\u0007\u0000\u0000\u0000>?\u0007\u0001"+ + "\u0000\u0000?@\u0007\u0002\u0000\u0000@\u0004\u0001\u0000\u0000\u0000"+ + "AB\u0007\u0003\u0000\u0000BC\u0007\u0004\u0000\u0000C\u0006\u0001\u0000"+ + "\u0000\u0000DE\u0007\u0001\u0000\u0000EF\u0007\u0003\u0000\u0000FG\u0007"+ + "\u0005\u0000\u0000G\b\u0001\u0000\u0000\u0000HI\u0005:\u0000\u0000I\n"+ + "\u0001\u0000\u0000\u0000JO\u0003\u001d\u000e\u0000KO\u0003!\u0010\u0000"+ + "LO\u0003\u001f\u000f\u0000MO\u0003#\u0011\u0000NJ\u0001\u0000\u0000\u0000"+ + "NK\u0001\u0000\u0000\u0000NL\u0001\u0000\u0000\u0000NM\u0001\u0000\u0000"+ + "\u0000O\f\u0001\u0000\u0000\u0000PQ\u0005(\u0000\u0000Q\u000e\u0001\u0000"+ + "\u0000\u0000RS\u0005)\u0000\u0000S\u0010\u0001\u0000\u0000\u0000TU\u0005"+ + "{\u0000\u0000U\u0012\u0001\u0000\u0000\u0000VW\u0005}\u0000\u0000W\u0014"+ + "\u0001\u0000\u0000\u0000XZ\u0003\u0019\f\u0000YX\u0001\u0000\u0000\u0000"+ + "Z]\u0001\u0000\u0000\u0000[Y\u0001\u0000\u0000\u0000[\\\u0001\u0000\u0000"+ + "\u0000\\_\u0001\u0000\u0000\u0000][\u0001\u0000\u0000\u0000^`\u0003%\u0012"+ + "\u0000_^\u0001\u0000\u0000\u0000`a\u0001\u0000\u0000\u0000a_\u0001\u0000"+ + "\u0000\u0000ab\u0001\u0000\u0000\u0000bf\u0001\u0000\u0000\u0000ce\u0003"+ + "\u0019\f\u0000dc\u0001\u0000\u0000\u0000eh\u0001\u0000\u0000\u0000fd\u0001"+ + "\u0000\u0000\u0000fg\u0001\u0000\u0000\u0000g\u0016\u0001\u0000\u0000"+ + "\u0000hf\u0001\u0000\u0000\u0000im\u0005\"\u0000\u0000jl\u0003\'\u0013"+ + "\u0000kj\u0001\u0000\u0000\u0000lo\u0001\u0000\u0000\u0000mk\u0001\u0000"+ + "\u0000\u0000mn\u0001\u0000\u0000\u0000np\u0001\u0000\u0000\u0000om\u0001"+ + "\u0000\u0000\u0000pq\u0005\"\u0000\u0000q\u0018\u0001\u0000\u0000\u0000"+ + "rt\u0003\u001b\r\u0000sr\u0001\u0000\u0000\u0000tu\u0001\u0000\u0000\u0000"+ + "us\u0001\u0000\u0000\u0000uv\u0001\u0000\u0000\u0000v\u001a\u0001\u0000"+ + "\u0000\u0000wx\u0005*\u0000\u0000x\u001c\u0001\u0000\u0000\u0000yz\u0005"+ + "<\u0000\u0000z\u001e\u0001\u0000\u0000\u0000{|\u0005<\u0000\u0000|}\u0005"+ + "=\u0000\u0000} \u0001\u0000\u0000\u0000~\u007f\u0005>\u0000\u0000\u007f"+ + "\"\u0001\u0000\u0000\u0000\u0080\u0081\u0005>\u0000\u0000\u0081\u0082"+ + "\u0005=\u0000\u0000\u0082$\u0001\u0000\u0000\u0000\u0083\u0091\u0003+"+ + "\u0015\u0000\u0084\u0091\u0003/\u0017\u0000\u0085\u0091\u00033\u0019\u0000"+ + "\u0086\u008a\u0005\\\u0000\u0000\u0087\u008b\u0003\u0003\u0001\u0000\u0088"+ + "\u008b\u0003\u0005\u0002\u0000\u0089\u008b\u0003\u0007\u0003\u0000\u008a"+ + "\u0087\u0001\u0000\u0000\u0000\u008a\u0088\u0001\u0000\u0000\u0000\u008a"+ + "\u0089\u0001\u0000\u0000\u0000\u008b\u0091\u0001\u0000\u0000\u0000\u008c"+ + "\u008d\u0003\u001b\r\u0000\u008d\u008e\u0003%\u0012\u0000\u008e\u0091"+ + "\u0001\u0000\u0000\u0000\u008f\u0091\u0003-\u0016\u0000\u0090\u0083\u0001"+ + "\u0000\u0000\u0000\u0090\u0084\u0001\u0000\u0000\u0000\u0090\u0085\u0001"+ + "\u0000\u0000\u0000\u0090\u0086\u0001\u0000\u0000\u0000\u0090\u008c\u0001"+ + "\u0000\u0000\u0000\u0090\u008f\u0001\u0000\u0000\u0000\u0091&\u0001\u0000"+ + "\u0000\u0000\u0092\u0097\u0003+\u0015\u0000\u0093\u0097\u00033\u0019\u0000"+ + "\u0094\u0097\u00031\u0018\u0000\u0095\u0097\b\u0006\u0000\u0000\u0096"+ + "\u0092\u0001\u0000\u0000\u0000\u0096\u0093\u0001\u0000\u0000\u0000\u0096"+ + "\u0094\u0001\u0000\u0000\u0000\u0096\u0095\u0001\u0000\u0000\u0000\u0097"+ + "(\u0001\u0000\u0000\u0000\u0098\u0099\u0007\u0007\u0000\u0000\u0099*\u0001"+ + "\u0000\u0000\u0000\u009a\u009b\u0005\\\u0000\u0000\u009b\u00a1\u0007\u0004"+ + "\u0000\u0000\u009c\u009d\u0005\\\u0000\u0000\u009d\u00a1\u0007\u0005\u0000"+ + "\u0000\u009e\u009f\u0005\\\u0000\u0000\u009f\u00a1\u0007\u0001\u0000\u0000"+ + "\u00a0\u009a\u0001\u0000\u0000\u0000\u00a0\u009c\u0001\u0000\u0000\u0000"+ + "\u00a0\u009e\u0001\u0000\u0000\u0000\u00a1,\u0001\u0000\u0000\u0000\u00a2"+ + "\u00a3\b\b\u0000\u0000\u00a3.\u0001\u0000\u0000\u0000\u00a4\u00a5\u0005"+ + "\\\u0000\u0000\u00a5\u00a6\u0007\b\u0000\u0000\u00a60\u0001\u0000\u0000"+ + "\u0000\u00a7\u00a8\u0005\\\u0000\u0000\u00a8\u00a9\u0005\"\u0000\u0000"+ + "\u00a92\u0001\u0000\u0000\u0000\u00aa\u00ab\u0005\\\u0000\u0000\u00ab"+ + "\u00ac\u00035\u001a\u0000\u00ac4\u0001\u0000\u0000\u0000\u00ad\u00ae\u0007"+ + "\t\u0000\u0000\u00ae\u00af\u00037\u001b\u0000\u00af\u00b0\u00037\u001b"+ + "\u0000\u00b0\u00b1\u00037\u001b\u0000\u00b1\u00b2\u00037\u001b\u0000\u00b2"+ + "6\u0001\u0000\u0000\u0000\u00b3\u00b4\u0007\n\u0000\u0000\u00b48\u0001"+ + "\u0000\u0000\u0000\u000b\u0000N[afmu\u008a\u0090\u0096\u00a0\u0001\u0006"+ + "\u0000\u0000"; + public static final ATN _ATN = + new ATNDeserializer().deserialize(_serializedATN.toCharArray()); + static { + _decisionToDFA = new DFA[_ATN.getNumberOfDecisions()]; + for (int i = 0; i < _ATN.getNumberOfDecisions(); i++) { + _decisionToDFA[i] = new DFA(_ATN.getDecisionState(i), i); + } + } +} diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseListener.java b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseListener.java new file mode 100644 index 0000000000000..bce2044fa8175 --- /dev/null +++ b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseListener.java @@ -0,0 +1,194 @@ +// ANTLR GENERATED CODE: DO NOT EDIT +package org.elasticsearch.xpack.kql.parser; + +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import org.antlr.v4.runtime.tree.ParseTreeListener; + +/** + * This interface defines a complete listener for a parse tree produced by + * {@link KqlBaseParser}. + */ +interface KqlBaseListener extends ParseTreeListener { + /** + * Enter a parse tree produced by {@link KqlBaseParser#topLevelQuery}. + * @param ctx the parse tree + */ + void enterTopLevelQuery(KqlBaseParser.TopLevelQueryContext ctx); + /** + * Exit a parse tree produced by {@link KqlBaseParser#topLevelQuery}. + * @param ctx the parse tree + */ + void exitTopLevelQuery(KqlBaseParser.TopLevelQueryContext ctx); + /** + * Enter a parse tree produced by the {@code notQuery} + * labeled alternative in {@link KqlBaseParser#query}. + * @param ctx the parse tree + */ + void enterNotQuery(KqlBaseParser.NotQueryContext ctx); + /** + * Exit a parse tree produced by the {@code notQuery} + * labeled alternative in {@link KqlBaseParser#query}. + * @param ctx the parse tree + */ + void exitNotQuery(KqlBaseParser.NotQueryContext ctx); + /** + * Enter a parse tree produced by the {@code booleanQuery} + * labeled alternative in {@link KqlBaseParser#query}. + * @param ctx the parse tree + */ + void enterBooleanQuery(KqlBaseParser.BooleanQueryContext ctx); + /** + * Exit a parse tree produced by the {@code booleanQuery} + * labeled alternative in {@link KqlBaseParser#query}. + * @param ctx the parse tree + */ + void exitBooleanQuery(KqlBaseParser.BooleanQueryContext ctx); + /** + * Enter a parse tree produced by the {@code defaultQuery} + * labeled alternative in {@link KqlBaseParser#query}. + * @param ctx the parse tree + */ + void enterDefaultQuery(KqlBaseParser.DefaultQueryContext ctx); + /** + * Exit a parse tree produced by the {@code defaultQuery} + * labeled alternative in {@link KqlBaseParser#query}. + * @param ctx the parse tree + */ + void exitDefaultQuery(KqlBaseParser.DefaultQueryContext ctx); + /** + * Enter a parse tree produced by {@link KqlBaseParser#simpleQuery}. + * @param ctx the parse tree + */ + void enterSimpleQuery(KqlBaseParser.SimpleQueryContext ctx); + /** + * Exit a parse tree produced by {@link KqlBaseParser#simpleQuery}. + * @param ctx the parse tree + */ + void exitSimpleQuery(KqlBaseParser.SimpleQueryContext ctx); + /** + * Enter a parse tree produced by {@link KqlBaseParser#expression}. + * @param ctx the parse tree + */ + void enterExpression(KqlBaseParser.ExpressionContext ctx); + /** + * Exit a parse tree produced by {@link KqlBaseParser#expression}. + * @param ctx the parse tree + */ + void exitExpression(KqlBaseParser.ExpressionContext ctx); + /** + * Enter a parse tree produced by {@link KqlBaseParser#nestedQuery}. + * @param ctx the parse tree + */ + void enterNestedQuery(KqlBaseParser.NestedQueryContext ctx); + /** + * Exit a parse tree produced by {@link KqlBaseParser#nestedQuery}. + * @param ctx the parse tree + */ + void exitNestedQuery(KqlBaseParser.NestedQueryContext ctx); + /** + * Enter a parse tree produced by {@link KqlBaseParser#parenthesizedQuery}. + * @param ctx the parse tree + */ + void enterParenthesizedQuery(KqlBaseParser.ParenthesizedQueryContext ctx); + /** + * Exit a parse tree produced by {@link KqlBaseParser#parenthesizedQuery}. + * @param ctx the parse tree + */ + void exitParenthesizedQuery(KqlBaseParser.ParenthesizedQueryContext ctx); + /** + * Enter a parse tree produced by {@link KqlBaseParser#fieldRangeQuery}. + * @param ctx the parse tree + */ + void enterFieldRangeQuery(KqlBaseParser.FieldRangeQueryContext ctx); + /** + * Exit a parse tree produced by {@link KqlBaseParser#fieldRangeQuery}. + * @param ctx the parse tree + */ + void exitFieldRangeQuery(KqlBaseParser.FieldRangeQueryContext ctx); + /** + * Enter a parse tree produced by {@link KqlBaseParser#fieldTermQuery}. + * @param ctx the parse tree + */ + void enterFieldTermQuery(KqlBaseParser.FieldTermQueryContext ctx); + /** + * Exit a parse tree produced by {@link KqlBaseParser#fieldTermQuery}. + * @param ctx the parse tree + */ + void exitFieldTermQuery(KqlBaseParser.FieldTermQueryContext ctx); + /** + * Enter a parse tree produced by {@link KqlBaseParser#fieldName}. + * @param ctx the parse tree + */ + void enterFieldName(KqlBaseParser.FieldNameContext ctx); + /** + * Exit a parse tree produced by {@link KqlBaseParser#fieldName}. + * @param ctx the parse tree + */ + void exitFieldName(KqlBaseParser.FieldNameContext ctx); + /** + * Enter a parse tree produced by {@link KqlBaseParser#rangeQueryValue}. + * @param ctx the parse tree + */ + void enterRangeQueryValue(KqlBaseParser.RangeQueryValueContext ctx); + /** + * Exit a parse tree produced by {@link KqlBaseParser#rangeQueryValue}. + * @param ctx the parse tree + */ + void exitRangeQueryValue(KqlBaseParser.RangeQueryValueContext ctx); + /** + * Enter a parse tree produced by {@link KqlBaseParser#termQueryValue}. + * @param ctx the parse tree + */ + void enterTermQueryValue(KqlBaseParser.TermQueryValueContext ctx); + /** + * Exit a parse tree produced by {@link KqlBaseParser#termQueryValue}. + * @param ctx the parse tree + */ + void exitTermQueryValue(KqlBaseParser.TermQueryValueContext ctx); + /** + * Enter a parse tree produced by {@link KqlBaseParser#groupingTermExpression}. + * @param ctx the parse tree + */ + void enterGroupingTermExpression(KqlBaseParser.GroupingTermExpressionContext ctx); + /** + * Exit a parse tree produced by {@link KqlBaseParser#groupingTermExpression}. + * @param ctx the parse tree + */ + void exitGroupingTermExpression(KqlBaseParser.GroupingTermExpressionContext ctx); + /** + * Enter a parse tree produced by {@link KqlBaseParser#unquotedLiteralExpression}. + * @param ctx the parse tree + */ + void enterUnquotedLiteralExpression(KqlBaseParser.UnquotedLiteralExpressionContext ctx); + /** + * Exit a parse tree produced by {@link KqlBaseParser#unquotedLiteralExpression}. + * @param ctx the parse tree + */ + void exitUnquotedLiteralExpression(KqlBaseParser.UnquotedLiteralExpressionContext ctx); + /** + * Enter a parse tree produced by {@link KqlBaseParser#quotedStringExpression}. + * @param ctx the parse tree + */ + void enterQuotedStringExpression(KqlBaseParser.QuotedStringExpressionContext ctx); + /** + * Exit a parse tree produced by {@link KqlBaseParser#quotedStringExpression}. + * @param ctx the parse tree + */ + void exitQuotedStringExpression(KqlBaseParser.QuotedStringExpressionContext ctx); + /** + * Enter a parse tree produced by {@link KqlBaseParser#wildcardExpression}. + * @param ctx the parse tree + */ + void enterWildcardExpression(KqlBaseParser.WildcardExpressionContext ctx); + /** + * Exit a parse tree produced by {@link KqlBaseParser#wildcardExpression}. + * @param ctx the parse tree + */ + void exitWildcardExpression(KqlBaseParser.WildcardExpressionContext ctx); +} diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseParser.java b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseParser.java new file mode 100644 index 0000000000000..3bd9cc4104d2c --- /dev/null +++ b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseParser.java @@ -0,0 +1,1203 @@ +// ANTLR GENERATED CODE: DO NOT EDIT +package org.elasticsearch.xpack.kql.parser; + +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import org.antlr.v4.runtime.atn.*; +import org.antlr.v4.runtime.dfa.DFA; +import org.antlr.v4.runtime.*; +import org.antlr.v4.runtime.misc.*; +import org.antlr.v4.runtime.tree.*; +import java.util.List; +import java.util.Iterator; +import java.util.ArrayList; + +@SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast", "CheckReturnValue"}) +class KqlBaseParser extends Parser { + static { RuntimeMetaData.checkVersion("4.13.1", RuntimeMetaData.VERSION); } + + protected static final DFA[] _decisionToDFA; + protected static final PredictionContextCache _sharedContextCache = + new PredictionContextCache(); + public static final int + DEFAULT_SKIP=1, AND=2, OR=3, NOT=4, COLON=5, OP_COMPARE=6, LEFT_PARENTHESIS=7, + RIGHT_PARENTHESIS=8, LEFT_CURLY_BRACKET=9, RIGHT_CURLY_BRACKET=10, UNQUOTED_LITERAL=11, + QUOTED_STRING=12, WILDCARD=13; + public static final int + RULE_topLevelQuery = 0, RULE_query = 1, RULE_simpleQuery = 2, RULE_expression = 3, + RULE_nestedQuery = 4, RULE_parenthesizedQuery = 5, RULE_fieldRangeQuery = 6, + RULE_fieldTermQuery = 7, RULE_fieldName = 8, RULE_rangeQueryValue = 9, + RULE_termQueryValue = 10, RULE_groupingTermExpression = 11, RULE_unquotedLiteralExpression = 12, + RULE_quotedStringExpression = 13, RULE_wildcardExpression = 14; + private static String[] makeRuleNames() { + return new String[] { + "topLevelQuery", "query", "simpleQuery", "expression", "nestedQuery", + "parenthesizedQuery", "fieldRangeQuery", "fieldTermQuery", "fieldName", + "rangeQueryValue", "termQueryValue", "groupingTermExpression", "unquotedLiteralExpression", + "quotedStringExpression", "wildcardExpression" + }; + } + public static final String[] ruleNames = makeRuleNames(); + + private static String[] makeLiteralNames() { + return new String[] { + null, null, "'and'", "'or'", "'not'", "':'", null, "'('", "')'", "'{'", + "'}'" + }; + } + private static final String[] _LITERAL_NAMES = makeLiteralNames(); + private static String[] makeSymbolicNames() { + return new String[] { + null, "DEFAULT_SKIP", "AND", "OR", "NOT", "COLON", "OP_COMPARE", "LEFT_PARENTHESIS", + "RIGHT_PARENTHESIS", "LEFT_CURLY_BRACKET", "RIGHT_CURLY_BRACKET", "UNQUOTED_LITERAL", + "QUOTED_STRING", "WILDCARD" + }; + } + private static final String[] _SYMBOLIC_NAMES = makeSymbolicNames(); + public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES); + + /** + * @deprecated Use {@link #VOCABULARY} instead. + */ + @Deprecated + public static final String[] tokenNames; + static { + tokenNames = new String[_SYMBOLIC_NAMES.length]; + for (int i = 0; i < tokenNames.length; i++) { + tokenNames[i] = VOCABULARY.getLiteralName(i); + if (tokenNames[i] == null) { + tokenNames[i] = VOCABULARY.getSymbolicName(i); + } + + if (tokenNames[i] == null) { + tokenNames[i] = ""; + } + } + } + + @Override + @Deprecated + public String[] getTokenNames() { + return tokenNames; + } + + @Override + + public Vocabulary getVocabulary() { + return VOCABULARY; + } + + @Override + public String getGrammarFileName() { return "KqlBase.g4"; } + + @Override + public String[] getRuleNames() { return ruleNames; } + + @Override + public String getSerializedATN() { return _serializedATN; } + + @Override + public ATN getATN() { return _ATN; } + + public KqlBaseParser(TokenStream input) { + super(input); + _interp = new ParserATNSimulator(this,_ATN,_decisionToDFA,_sharedContextCache); + } + + @SuppressWarnings("CheckReturnValue") + public static class TopLevelQueryContext extends ParserRuleContext { + public TerminalNode EOF() { return getToken(KqlBaseParser.EOF, 0); } + public QueryContext query() { + return getRuleContext(QueryContext.class,0); + } + public TopLevelQueryContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_topLevelQuery; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterTopLevelQuery(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitTopLevelQuery(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor)visitor).visitTopLevelQuery(this); + else return visitor.visitChildren(this); + } + } + + public final TopLevelQueryContext topLevelQuery() throws RecognitionException { + TopLevelQueryContext _localctx = new TopLevelQueryContext(_ctx, getState()); + enterRule(_localctx, 0, RULE_topLevelQuery); + int _la; + try { + enterOuterAlt(_localctx, 1); + { + setState(31); + _errHandler.sync(this); + _la = _input.LA(1); + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & 14480L) != 0)) { + { + setState(30); + query(0); + } + } + + setState(33); + match(EOF); + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + @SuppressWarnings("CheckReturnValue") + public static class QueryContext extends ParserRuleContext { + public QueryContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_query; } + + public QueryContext() { } + public void copyFrom(QueryContext ctx) { + super.copyFrom(ctx); + } + } + @SuppressWarnings("CheckReturnValue") + public static class NotQueryContext extends QueryContext { + public SimpleQueryContext subQuery; + public TerminalNode NOT() { return getToken(KqlBaseParser.NOT, 0); } + public SimpleQueryContext simpleQuery() { + return getRuleContext(SimpleQueryContext.class,0); + } + public NotQueryContext(QueryContext ctx) { copyFrom(ctx); } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterNotQuery(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitNotQuery(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor)visitor).visitNotQuery(this); + else return visitor.visitChildren(this); + } + } + @SuppressWarnings("CheckReturnValue") + public static class BooleanQueryContext extends QueryContext { + public List query() { + return getRuleContexts(QueryContext.class); + } + public QueryContext query(int i) { + return getRuleContext(QueryContext.class,i); + } + public TerminalNode AND() { return getToken(KqlBaseParser.AND, 0); } + public TerminalNode OR() { return getToken(KqlBaseParser.OR, 0); } + public BooleanQueryContext(QueryContext ctx) { copyFrom(ctx); } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterBooleanQuery(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitBooleanQuery(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor)visitor).visitBooleanQuery(this); + else return visitor.visitChildren(this); + } + } + @SuppressWarnings("CheckReturnValue") + public static class DefaultQueryContext extends QueryContext { + public SimpleQueryContext simpleQuery() { + return getRuleContext(SimpleQueryContext.class,0); + } + public DefaultQueryContext(QueryContext ctx) { copyFrom(ctx); } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterDefaultQuery(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitDefaultQuery(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor)visitor).visitDefaultQuery(this); + else return visitor.visitChildren(this); + } + } + + public final QueryContext query() throws RecognitionException { + return query(0); + } + + private QueryContext query(int _p) throws RecognitionException { + ParserRuleContext _parentctx = _ctx; + int _parentState = getState(); + QueryContext _localctx = new QueryContext(_ctx, _parentState); + QueryContext _prevctx = _localctx; + int _startState = 2; + enterRecursionRule(_localctx, 2, RULE_query, _p); + int _la; + try { + int _alt; + enterOuterAlt(_localctx, 1); + { + setState(39); + _errHandler.sync(this); + switch (_input.LA(1)) { + case NOT: + { + _localctx = new NotQueryContext(_localctx); + _ctx = _localctx; + _prevctx = _localctx; + + setState(36); + match(NOT); + setState(37); + ((NotQueryContext)_localctx).subQuery = simpleQuery(); + } + break; + case LEFT_PARENTHESIS: + case UNQUOTED_LITERAL: + case QUOTED_STRING: + case WILDCARD: + { + _localctx = new DefaultQueryContext(_localctx); + _ctx = _localctx; + _prevctx = _localctx; + setState(38); + simpleQuery(); + } + break; + default: + throw new NoViableAltException(this); + } + _ctx.stop = _input.LT(-1); + setState(46); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input,2,_ctx); + while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { + if ( _alt==1 ) { + if ( _parseListeners!=null ) triggerExitRuleEvent(); + _prevctx = _localctx; + { + { + _localctx = new BooleanQueryContext(new QueryContext(_parentctx, _parentState)); + pushNewRecursionContext(_localctx, _startState, RULE_query); + setState(41); + if (!(precpred(_ctx, 3))) throw new FailedPredicateException(this, "precpred(_ctx, 3)"); + setState(42); + _la = _input.LA(1); + if ( !(_la==AND || _la==OR) ) { + _errHandler.recoverInline(this); + } + else { + if ( _input.LA(1)==Token.EOF ) matchedEOF = true; + _errHandler.reportMatch(this); + consume(); + } + setState(43); + query(4); + } + } + } + setState(48); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input,2,_ctx); + } + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + unrollRecursionContexts(_parentctx); + } + return _localctx; + } + + @SuppressWarnings("CheckReturnValue") + public static class SimpleQueryContext extends ParserRuleContext { + public NestedQueryContext nestedQuery() { + return getRuleContext(NestedQueryContext.class,0); + } + public ExpressionContext expression() { + return getRuleContext(ExpressionContext.class,0); + } + public ParenthesizedQueryContext parenthesizedQuery() { + return getRuleContext(ParenthesizedQueryContext.class,0); + } + public SimpleQueryContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_simpleQuery; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterSimpleQuery(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitSimpleQuery(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor)visitor).visitSimpleQuery(this); + else return visitor.visitChildren(this); + } + } + + public final SimpleQueryContext simpleQuery() throws RecognitionException { + SimpleQueryContext _localctx = new SimpleQueryContext(_ctx, getState()); + enterRule(_localctx, 4, RULE_simpleQuery); + try { + setState(52); + _errHandler.sync(this); + switch ( getInterpreter().adaptivePredict(_input,3,_ctx) ) { + case 1: + enterOuterAlt(_localctx, 1); + { + setState(49); + nestedQuery(); + } + break; + case 2: + enterOuterAlt(_localctx, 2); + { + setState(50); + expression(); + } + break; + case 3: + enterOuterAlt(_localctx, 3); + { + setState(51); + parenthesizedQuery(); + } + break; + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + @SuppressWarnings("CheckReturnValue") + public static class ExpressionContext extends ParserRuleContext { + public FieldTermQueryContext fieldTermQuery() { + return getRuleContext(FieldTermQueryContext.class,0); + } + public FieldRangeQueryContext fieldRangeQuery() { + return getRuleContext(FieldRangeQueryContext.class,0); + } + public ExpressionContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_expression; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterExpression(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitExpression(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor)visitor).visitExpression(this); + else return visitor.visitChildren(this); + } + } + + public final ExpressionContext expression() throws RecognitionException { + ExpressionContext _localctx = new ExpressionContext(_ctx, getState()); + enterRule(_localctx, 6, RULE_expression); + try { + setState(56); + _errHandler.sync(this); + switch ( getInterpreter().adaptivePredict(_input,4,_ctx) ) { + case 1: + enterOuterAlt(_localctx, 1); + { + setState(54); + fieldTermQuery(); + } + break; + case 2: + enterOuterAlt(_localctx, 2); + { + setState(55); + fieldRangeQuery(); + } + break; + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + @SuppressWarnings("CheckReturnValue") + public static class NestedQueryContext extends ParserRuleContext { + public FieldNameContext fieldName() { + return getRuleContext(FieldNameContext.class,0); + } + public TerminalNode COLON() { return getToken(KqlBaseParser.COLON, 0); } + public TerminalNode LEFT_CURLY_BRACKET() { return getToken(KqlBaseParser.LEFT_CURLY_BRACKET, 0); } + public QueryContext query() { + return getRuleContext(QueryContext.class,0); + } + public TerminalNode RIGHT_CURLY_BRACKET() { return getToken(KqlBaseParser.RIGHT_CURLY_BRACKET, 0); } + public NestedQueryContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_nestedQuery; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterNestedQuery(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitNestedQuery(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor)visitor).visitNestedQuery(this); + else return visitor.visitChildren(this); + } + } + + public final NestedQueryContext nestedQuery() throws RecognitionException { + NestedQueryContext _localctx = new NestedQueryContext(_ctx, getState()); + enterRule(_localctx, 8, RULE_nestedQuery); + try { + enterOuterAlt(_localctx, 1); + { + setState(58); + fieldName(); + setState(59); + match(COLON); + setState(60); + match(LEFT_CURLY_BRACKET); + setState(61); + query(0); + setState(62); + match(RIGHT_CURLY_BRACKET); + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + @SuppressWarnings("CheckReturnValue") + public static class ParenthesizedQueryContext extends ParserRuleContext { + public TerminalNode LEFT_PARENTHESIS() { return getToken(KqlBaseParser.LEFT_PARENTHESIS, 0); } + public QueryContext query() { + return getRuleContext(QueryContext.class,0); + } + public TerminalNode RIGHT_PARENTHESIS() { return getToken(KqlBaseParser.RIGHT_PARENTHESIS, 0); } + public ParenthesizedQueryContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_parenthesizedQuery; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterParenthesizedQuery(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitParenthesizedQuery(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor)visitor).visitParenthesizedQuery(this); + else return visitor.visitChildren(this); + } + } + + public final ParenthesizedQueryContext parenthesizedQuery() throws RecognitionException { + ParenthesizedQueryContext _localctx = new ParenthesizedQueryContext(_ctx, getState()); + enterRule(_localctx, 10, RULE_parenthesizedQuery); + try { + enterOuterAlt(_localctx, 1); + { + setState(64); + match(LEFT_PARENTHESIS); + setState(65); + query(0); + setState(66); + match(RIGHT_PARENTHESIS); + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + @SuppressWarnings("CheckReturnValue") + public static class FieldRangeQueryContext extends ParserRuleContext { + public Token operator; + public FieldNameContext fieldName() { + return getRuleContext(FieldNameContext.class,0); + } + public RangeQueryValueContext rangeQueryValue() { + return getRuleContext(RangeQueryValueContext.class,0); + } + public TerminalNode OP_COMPARE() { return getToken(KqlBaseParser.OP_COMPARE, 0); } + public FieldRangeQueryContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_fieldRangeQuery; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterFieldRangeQuery(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitFieldRangeQuery(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor)visitor).visitFieldRangeQuery(this); + else return visitor.visitChildren(this); + } + } + + public final FieldRangeQueryContext fieldRangeQuery() throws RecognitionException { + FieldRangeQueryContext _localctx = new FieldRangeQueryContext(_ctx, getState()); + enterRule(_localctx, 12, RULE_fieldRangeQuery); + try { + enterOuterAlt(_localctx, 1); + { + setState(68); + fieldName(); + setState(69); + ((FieldRangeQueryContext)_localctx).operator = match(OP_COMPARE); + setState(70); + rangeQueryValue(); + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + @SuppressWarnings("CheckReturnValue") + public static class FieldTermQueryContext extends ParserRuleContext { + public TermQueryValueContext termQueryValue() { + return getRuleContext(TermQueryValueContext.class,0); + } + public FieldNameContext fieldName() { + return getRuleContext(FieldNameContext.class,0); + } + public TerminalNode COLON() { return getToken(KqlBaseParser.COLON, 0); } + public FieldTermQueryContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_fieldTermQuery; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterFieldTermQuery(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitFieldTermQuery(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor)visitor).visitFieldTermQuery(this); + else return visitor.visitChildren(this); + } + } + + public final FieldTermQueryContext fieldTermQuery() throws RecognitionException { + FieldTermQueryContext _localctx = new FieldTermQueryContext(_ctx, getState()); + enterRule(_localctx, 14, RULE_fieldTermQuery); + try { + enterOuterAlt(_localctx, 1); + { + setState(75); + _errHandler.sync(this); + switch ( getInterpreter().adaptivePredict(_input,5,_ctx) ) { + case 1: + { + setState(72); + fieldName(); + setState(73); + match(COLON); + } + break; + } + setState(77); + termQueryValue(); + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + @SuppressWarnings("CheckReturnValue") + public static class FieldNameContext extends ParserRuleContext { + public WildcardExpressionContext wildcardExpression() { + return getRuleContext(WildcardExpressionContext.class,0); + } + public UnquotedLiteralExpressionContext unquotedLiteralExpression() { + return getRuleContext(UnquotedLiteralExpressionContext.class,0); + } + public QuotedStringExpressionContext quotedStringExpression() { + return getRuleContext(QuotedStringExpressionContext.class,0); + } + public FieldNameContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_fieldName; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterFieldName(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitFieldName(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor)visitor).visitFieldName(this); + else return visitor.visitChildren(this); + } + } + + public final FieldNameContext fieldName() throws RecognitionException { + FieldNameContext _localctx = new FieldNameContext(_ctx, getState()); + enterRule(_localctx, 16, RULE_fieldName); + try { + setState(82); + _errHandler.sync(this); + switch (_input.LA(1)) { + case WILDCARD: + enterOuterAlt(_localctx, 1); + { + setState(79); + wildcardExpression(); + } + break; + case UNQUOTED_LITERAL: + enterOuterAlt(_localctx, 2); + { + setState(80); + unquotedLiteralExpression(); + } + break; + case QUOTED_STRING: + enterOuterAlt(_localctx, 3); + { + setState(81); + quotedStringExpression(); + } + break; + default: + throw new NoViableAltException(this); + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + @SuppressWarnings("CheckReturnValue") + public static class RangeQueryValueContext extends ParserRuleContext { + public UnquotedLiteralExpressionContext unquotedLiteralExpression() { + return getRuleContext(UnquotedLiteralExpressionContext.class,0); + } + public QuotedStringExpressionContext quotedStringExpression() { + return getRuleContext(QuotedStringExpressionContext.class,0); + } + public RangeQueryValueContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_rangeQueryValue; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterRangeQueryValue(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitRangeQueryValue(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor)visitor).visitRangeQueryValue(this); + else return visitor.visitChildren(this); + } + } + + public final RangeQueryValueContext rangeQueryValue() throws RecognitionException { + RangeQueryValueContext _localctx = new RangeQueryValueContext(_ctx, getState()); + enterRule(_localctx, 18, RULE_rangeQueryValue); + try { + setState(86); + _errHandler.sync(this); + switch (_input.LA(1)) { + case UNQUOTED_LITERAL: + enterOuterAlt(_localctx, 1); + { + setState(84); + unquotedLiteralExpression(); + } + break; + case QUOTED_STRING: + enterOuterAlt(_localctx, 2); + { + setState(85); + quotedStringExpression(); + } + break; + default: + throw new NoViableAltException(this); + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + @SuppressWarnings("CheckReturnValue") + public static class TermQueryValueContext extends ParserRuleContext { + public UnquotedLiteralExpressionContext termValue; + public WildcardExpressionContext wildcardExpression() { + return getRuleContext(WildcardExpressionContext.class,0); + } + public QuotedStringExpressionContext quotedStringExpression() { + return getRuleContext(QuotedStringExpressionContext.class,0); + } + public UnquotedLiteralExpressionContext unquotedLiteralExpression() { + return getRuleContext(UnquotedLiteralExpressionContext.class,0); + } + public GroupingTermExpressionContext groupingTermExpression() { + return getRuleContext(GroupingTermExpressionContext.class,0); + } + public TermQueryValueContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_termQueryValue; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterTermQueryValue(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitTermQueryValue(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor)visitor).visitTermQueryValue(this); + else return visitor.visitChildren(this); + } + } + + public final TermQueryValueContext termQueryValue() throws RecognitionException { + TermQueryValueContext _localctx = new TermQueryValueContext(_ctx, getState()); + enterRule(_localctx, 20, RULE_termQueryValue); + try { + setState(92); + _errHandler.sync(this); + switch (_input.LA(1)) { + case WILDCARD: + enterOuterAlt(_localctx, 1); + { + setState(88); + wildcardExpression(); + } + break; + case QUOTED_STRING: + enterOuterAlt(_localctx, 2); + { + setState(89); + quotedStringExpression(); + } + break; + case UNQUOTED_LITERAL: + enterOuterAlt(_localctx, 3); + { + setState(90); + ((TermQueryValueContext)_localctx).termValue = unquotedLiteralExpression(); + } + break; + case LEFT_PARENTHESIS: + enterOuterAlt(_localctx, 4); + { + setState(91); + groupingTermExpression(); + } + break; + default: + throw new NoViableAltException(this); + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + @SuppressWarnings("CheckReturnValue") + public static class GroupingTermExpressionContext extends ParserRuleContext { + public TerminalNode LEFT_PARENTHESIS() { return getToken(KqlBaseParser.LEFT_PARENTHESIS, 0); } + public UnquotedLiteralExpressionContext unquotedLiteralExpression() { + return getRuleContext(UnquotedLiteralExpressionContext.class,0); + } + public TerminalNode RIGHT_PARENTHESIS() { return getToken(KqlBaseParser.RIGHT_PARENTHESIS, 0); } + public GroupingTermExpressionContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_groupingTermExpression; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterGroupingTermExpression(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitGroupingTermExpression(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor)visitor).visitGroupingTermExpression(this); + else return visitor.visitChildren(this); + } + } + + public final GroupingTermExpressionContext groupingTermExpression() throws RecognitionException { + GroupingTermExpressionContext _localctx = new GroupingTermExpressionContext(_ctx, getState()); + enterRule(_localctx, 22, RULE_groupingTermExpression); + try { + enterOuterAlt(_localctx, 1); + { + setState(94); + match(LEFT_PARENTHESIS); + setState(95); + unquotedLiteralExpression(); + setState(96); + match(RIGHT_PARENTHESIS); + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + @SuppressWarnings("CheckReturnValue") + public static class UnquotedLiteralExpressionContext extends ParserRuleContext { + public List UNQUOTED_LITERAL() { return getTokens(KqlBaseParser.UNQUOTED_LITERAL); } + public TerminalNode UNQUOTED_LITERAL(int i) { + return getToken(KqlBaseParser.UNQUOTED_LITERAL, i); + } + public UnquotedLiteralExpressionContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_unquotedLiteralExpression; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterUnquotedLiteralExpression(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitUnquotedLiteralExpression(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor)visitor).visitUnquotedLiteralExpression(this); + else return visitor.visitChildren(this); + } + } + + public final UnquotedLiteralExpressionContext unquotedLiteralExpression() throws RecognitionException { + UnquotedLiteralExpressionContext _localctx = new UnquotedLiteralExpressionContext(_ctx, getState()); + enterRule(_localctx, 24, RULE_unquotedLiteralExpression); + try { + int _alt; + enterOuterAlt(_localctx, 1); + { + setState(99); + _errHandler.sync(this); + _alt = 1; + do { + switch (_alt) { + case 1: + { + { + setState(98); + match(UNQUOTED_LITERAL); + } + } + break; + default: + throw new NoViableAltException(this); + } + setState(101); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input,9,_ctx); + } while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ); + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + @SuppressWarnings("CheckReturnValue") + public static class QuotedStringExpressionContext extends ParserRuleContext { + public TerminalNode QUOTED_STRING() { return getToken(KqlBaseParser.QUOTED_STRING, 0); } + public QuotedStringExpressionContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_quotedStringExpression; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterQuotedStringExpression(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitQuotedStringExpression(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor)visitor).visitQuotedStringExpression(this); + else return visitor.visitChildren(this); + } + } + + public final QuotedStringExpressionContext quotedStringExpression() throws RecognitionException { + QuotedStringExpressionContext _localctx = new QuotedStringExpressionContext(_ctx, getState()); + enterRule(_localctx, 26, RULE_quotedStringExpression); + try { + enterOuterAlt(_localctx, 1); + { + setState(103); + match(QUOTED_STRING); + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + @SuppressWarnings("CheckReturnValue") + public static class WildcardExpressionContext extends ParserRuleContext { + public TerminalNode WILDCARD() { return getToken(KqlBaseParser.WILDCARD, 0); } + public WildcardExpressionContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_wildcardExpression; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterWildcardExpression(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitWildcardExpression(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor)visitor).visitWildcardExpression(this); + else return visitor.visitChildren(this); + } + } + + public final WildcardExpressionContext wildcardExpression() throws RecognitionException { + WildcardExpressionContext _localctx = new WildcardExpressionContext(_ctx, getState()); + enterRule(_localctx, 28, RULE_wildcardExpression); + try { + enterOuterAlt(_localctx, 1); + { + setState(105); + match(WILDCARD); + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { + switch (ruleIndex) { + case 1: + return query_sempred((QueryContext)_localctx, predIndex); + } + return true; + } + private boolean query_sempred(QueryContext _localctx, int predIndex) { + switch (predIndex) { + case 0: + return precpred(_ctx, 3); + } + return true; + } + + public static final String _serializedATN = + "\u0004\u0001\rl\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ + "\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004\u0002"+ + "\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007\u0002"+ + "\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b\u0002"+ + "\f\u0007\f\u0002\r\u0007\r\u0002\u000e\u0007\u000e\u0001\u0000\u0003\u0000"+ + " \b\u0000\u0001\u0000\u0001\u0000\u0001\u0001\u0001\u0001\u0001\u0001"+ + "\u0001\u0001\u0003\u0001(\b\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ + "\u0005\u0001-\b\u0001\n\u0001\f\u00010\t\u0001\u0001\u0002\u0001\u0002"+ + "\u0001\u0002\u0003\u00025\b\u0002\u0001\u0003\u0001\u0003\u0003\u0003"+ + "9\b\u0003\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004"+ + "\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0006"+ + "\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0007\u0001\u0007\u0001\u0007"+ + "\u0003\u0007L\b\u0007\u0001\u0007\u0001\u0007\u0001\b\u0001\b\u0001\b"+ + "\u0003\bS\b\b\u0001\t\u0001\t\u0003\tW\b\t\u0001\n\u0001\n\u0001\n\u0001"+ + "\n\u0003\n]\b\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001"+ + "\f\u0004\fd\b\f\u000b\f\f\fe\u0001\r\u0001\r\u0001\u000e\u0001\u000e\u0001"+ + "\u000e\u0000\u0001\u0002\u000f\u0000\u0002\u0004\u0006\b\n\f\u000e\u0010"+ + "\u0012\u0014\u0016\u0018\u001a\u001c\u0000\u0001\u0001\u0000\u0002\u0003"+ + "j\u0000\u001f\u0001\u0000\u0000\u0000\u0002\'\u0001\u0000\u0000\u0000"+ + "\u00044\u0001\u0000\u0000\u0000\u00068\u0001\u0000\u0000\u0000\b:\u0001"+ + "\u0000\u0000\u0000\n@\u0001\u0000\u0000\u0000\fD\u0001\u0000\u0000\u0000"+ + "\u000eK\u0001\u0000\u0000\u0000\u0010R\u0001\u0000\u0000\u0000\u0012V"+ + "\u0001\u0000\u0000\u0000\u0014\\\u0001\u0000\u0000\u0000\u0016^\u0001"+ + "\u0000\u0000\u0000\u0018c\u0001\u0000\u0000\u0000\u001ag\u0001\u0000\u0000"+ + "\u0000\u001ci\u0001\u0000\u0000\u0000\u001e \u0003\u0002\u0001\u0000\u001f"+ + "\u001e\u0001\u0000\u0000\u0000\u001f \u0001\u0000\u0000\u0000 !\u0001"+ + "\u0000\u0000\u0000!\"\u0005\u0000\u0000\u0001\"\u0001\u0001\u0000\u0000"+ + "\u0000#$\u0006\u0001\uffff\uffff\u0000$%\u0005\u0004\u0000\u0000%(\u0003"+ + "\u0004\u0002\u0000&(\u0003\u0004\u0002\u0000\'#\u0001\u0000\u0000\u0000"+ + "\'&\u0001\u0000\u0000\u0000(.\u0001\u0000\u0000\u0000)*\n\u0003\u0000"+ + "\u0000*+\u0007\u0000\u0000\u0000+-\u0003\u0002\u0001\u0004,)\u0001\u0000"+ + "\u0000\u0000-0\u0001\u0000\u0000\u0000.,\u0001\u0000\u0000\u0000./\u0001"+ + "\u0000\u0000\u0000/\u0003\u0001\u0000\u0000\u00000.\u0001\u0000\u0000"+ + "\u000015\u0003\b\u0004\u000025\u0003\u0006\u0003\u000035\u0003\n\u0005"+ + "\u000041\u0001\u0000\u0000\u000042\u0001\u0000\u0000\u000043\u0001\u0000"+ + "\u0000\u00005\u0005\u0001\u0000\u0000\u000069\u0003\u000e\u0007\u0000"+ + "79\u0003\f\u0006\u000086\u0001\u0000\u0000\u000087\u0001\u0000\u0000\u0000"+ + "9\u0007\u0001\u0000\u0000\u0000:;\u0003\u0010\b\u0000;<\u0005\u0005\u0000"+ + "\u0000<=\u0005\t\u0000\u0000=>\u0003\u0002\u0001\u0000>?\u0005\n\u0000"+ + "\u0000?\t\u0001\u0000\u0000\u0000@A\u0005\u0007\u0000\u0000AB\u0003\u0002"+ + "\u0001\u0000BC\u0005\b\u0000\u0000C\u000b\u0001\u0000\u0000\u0000DE\u0003"+ + "\u0010\b\u0000EF\u0005\u0006\u0000\u0000FG\u0003\u0012\t\u0000G\r\u0001"+ + "\u0000\u0000\u0000HI\u0003\u0010\b\u0000IJ\u0005\u0005\u0000\u0000JL\u0001"+ + "\u0000\u0000\u0000KH\u0001\u0000\u0000\u0000KL\u0001\u0000\u0000\u0000"+ + "LM\u0001\u0000\u0000\u0000MN\u0003\u0014\n\u0000N\u000f\u0001\u0000\u0000"+ + "\u0000OS\u0003\u001c\u000e\u0000PS\u0003\u0018\f\u0000QS\u0003\u001a\r"+ + "\u0000RO\u0001\u0000\u0000\u0000RP\u0001\u0000\u0000\u0000RQ\u0001\u0000"+ + "\u0000\u0000S\u0011\u0001\u0000\u0000\u0000TW\u0003\u0018\f\u0000UW\u0003"+ + "\u001a\r\u0000VT\u0001\u0000\u0000\u0000VU\u0001\u0000\u0000\u0000W\u0013"+ + "\u0001\u0000\u0000\u0000X]\u0003\u001c\u000e\u0000Y]\u0003\u001a\r\u0000"+ + "Z]\u0003\u0018\f\u0000[]\u0003\u0016\u000b\u0000\\X\u0001\u0000\u0000"+ + "\u0000\\Y\u0001\u0000\u0000\u0000\\Z\u0001\u0000\u0000\u0000\\[\u0001"+ + "\u0000\u0000\u0000]\u0015\u0001\u0000\u0000\u0000^_\u0005\u0007\u0000"+ + "\u0000_`\u0003\u0018\f\u0000`a\u0005\b\u0000\u0000a\u0017\u0001\u0000"+ + "\u0000\u0000bd\u0005\u000b\u0000\u0000cb\u0001\u0000\u0000\u0000de\u0001"+ + "\u0000\u0000\u0000ec\u0001\u0000\u0000\u0000ef\u0001\u0000\u0000\u0000"+ + "f\u0019\u0001\u0000\u0000\u0000gh\u0005\f\u0000\u0000h\u001b\u0001\u0000"+ + "\u0000\u0000ij\u0005\r\u0000\u0000j\u001d\u0001\u0000\u0000\u0000\n\u001f"+ + "\'.48KRV\\e"; + public static final ATN _ATN = + new ATNDeserializer().deserialize(_serializedATN.toCharArray()); + static { + _decisionToDFA = new DFA[_ATN.getNumberOfDecisions()]; + for (int i = 0; i < _ATN.getNumberOfDecisions(); i++) { + _decisionToDFA[i] = new DFA(_ATN.getDecisionState(i), i); + } + } +} diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseVisitor.java b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseVisitor.java new file mode 100644 index 0000000000000..55fa21f0e899d --- /dev/null +++ b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseVisitor.java @@ -0,0 +1,126 @@ +// ANTLR GENERATED CODE: DO NOT EDIT +package org.elasticsearch.xpack.kql.parser; + +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import org.antlr.v4.runtime.tree.ParseTreeVisitor; + +/** + * This interface defines a complete generic visitor for a parse tree produced + * by {@link KqlBaseParser}. + * + * @param The return type of the visit operation. Use {@link Void} for + * operations with no return type. + */ +interface KqlBaseVisitor extends ParseTreeVisitor { + /** + * Visit a parse tree produced by {@link KqlBaseParser#topLevelQuery}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitTopLevelQuery(KqlBaseParser.TopLevelQueryContext ctx); + /** + * Visit a parse tree produced by the {@code notQuery} + * labeled alternative in {@link KqlBaseParser#query}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitNotQuery(KqlBaseParser.NotQueryContext ctx); + /** + * Visit a parse tree produced by the {@code booleanQuery} + * labeled alternative in {@link KqlBaseParser#query}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitBooleanQuery(KqlBaseParser.BooleanQueryContext ctx); + /** + * Visit a parse tree produced by the {@code defaultQuery} + * labeled alternative in {@link KqlBaseParser#query}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitDefaultQuery(KqlBaseParser.DefaultQueryContext ctx); + /** + * Visit a parse tree produced by {@link KqlBaseParser#simpleQuery}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitSimpleQuery(KqlBaseParser.SimpleQueryContext ctx); + /** + * Visit a parse tree produced by {@link KqlBaseParser#expression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitExpression(KqlBaseParser.ExpressionContext ctx); + /** + * Visit a parse tree produced by {@link KqlBaseParser#nestedQuery}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitNestedQuery(KqlBaseParser.NestedQueryContext ctx); + /** + * Visit a parse tree produced by {@link KqlBaseParser#parenthesizedQuery}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitParenthesizedQuery(KqlBaseParser.ParenthesizedQueryContext ctx); + /** + * Visit a parse tree produced by {@link KqlBaseParser#fieldRangeQuery}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitFieldRangeQuery(KqlBaseParser.FieldRangeQueryContext ctx); + /** + * Visit a parse tree produced by {@link KqlBaseParser#fieldTermQuery}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitFieldTermQuery(KqlBaseParser.FieldTermQueryContext ctx); + /** + * Visit a parse tree produced by {@link KqlBaseParser#fieldName}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitFieldName(KqlBaseParser.FieldNameContext ctx); + /** + * Visit a parse tree produced by {@link KqlBaseParser#rangeQueryValue}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitRangeQueryValue(KqlBaseParser.RangeQueryValueContext ctx); + /** + * Visit a parse tree produced by {@link KqlBaseParser#termQueryValue}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitTermQueryValue(KqlBaseParser.TermQueryValueContext ctx); + /** + * Visit a parse tree produced by {@link KqlBaseParser#groupingTermExpression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitGroupingTermExpression(KqlBaseParser.GroupingTermExpressionContext ctx); + /** + * Visit a parse tree produced by {@link KqlBaseParser#unquotedLiteralExpression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitUnquotedLiteralExpression(KqlBaseParser.UnquotedLiteralExpressionContext ctx); + /** + * Visit a parse tree produced by {@link KqlBaseParser#quotedStringExpression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitQuotedStringExpression(KqlBaseParser.QuotedStringExpressionContext ctx); + /** + * Visit a parse tree produced by {@link KqlBaseParser#wildcardExpression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitWildcardExpression(KqlBaseParser.WildcardExpressionContext ctx); +} diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlParser.java b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlParser.java new file mode 100644 index 0000000000000..41bda7524c653 --- /dev/null +++ b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlParser.java @@ -0,0 +1,77 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.kql.parser; + +import org.antlr.v4.runtime.BaseErrorListener; +import org.antlr.v4.runtime.CharStreams; +import org.antlr.v4.runtime.CommonTokenStream; +import org.antlr.v4.runtime.ParserRuleContext; +import org.antlr.v4.runtime.RecognitionException; +import org.antlr.v4.runtime.Recognizer; +import org.antlr.v4.runtime.atn.PredictionMode; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.SearchExecutionContext; +import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; + +import java.util.function.BiFunction; +import java.util.function.Function; + +public class KqlParser { + private static final Logger log = LogManager.getLogger(KqlParser.class); + + public QueryBuilder parseKqlQuery(String kqlQuery, SearchExecutionContext searchExecutionContext) { + if (log.isDebugEnabled()) { + log.debug("Parsing KQL query: {}", kqlQuery); + } + + return invokeParser(kqlQuery, searchExecutionContext, KqlBaseParser::topLevelQuery, KqlAstBuilder::toQueryBuilder); + } + + private T invokeParser( + String kqlQuery, + SearchExecutionContext searchExecutionContext, + Function parseFunction, + BiFunction visitor + ) { + KqlBaseLexer lexer = new KqlBaseLexer(CharStreams.fromString(kqlQuery)); + + lexer.removeErrorListeners(); + lexer.addErrorListener(ERROR_LISTENER); + + CommonTokenStream tokenStream = new CommonTokenStream(lexer); + KqlBaseParser parser = new KqlBaseParser(tokenStream); + + parser.removeErrorListeners(); + parser.addErrorListener(ERROR_LISTENER); + + parser.getInterpreter().setPredictionMode(PredictionMode.SLL); + + ParserRuleContext tree = parseFunction.apply(parser); + + if (log.isTraceEnabled()) { + log.trace("Parse tree: {}", tree.toStringTree()); + } + + return visitor.apply(new KqlAstBuilder(searchExecutionContext), tree); + } + + private static final BaseErrorListener ERROR_LISTENER = new BaseErrorListener() { + @Override + public void syntaxError( + Recognizer recognizer, + Object offendingSymbol, + int line, + int charPositionInLine, + String message, + RecognitionException e + ) { + throw new KqlParsingException(message, line, charPositionInLine, e); + } + }; +} diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlParsingException.java b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlParsingException.java new file mode 100644 index 0000000000000..e6ac9020174f2 --- /dev/null +++ b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlParsingException.java @@ -0,0 +1,59 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.kql.parser; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.rest.RestStatus; + +import static org.elasticsearch.common.logging.LoggerMessageFormat.format; + +public class KqlParsingException extends ElasticsearchException { + + private final int line; + private final int charPositionInLine; + + public KqlParsingException(String message, Exception cause, int line, int charPositionInLine) { + super(message, cause); + this.line = line; + this.charPositionInLine = charPositionInLine; + } + + public KqlParsingException(String message, int line, int charPositionInLine, Object... args) { + super(message, args); + this.line = line; + this.charPositionInLine = charPositionInLine; + } + + public KqlParsingException(String message, Throwable cause, int line, int charPositionInLine, Object... args) { + super(message, cause, args); + this.line = line; + this.charPositionInLine = charPositionInLine; + } + + public int getLineNumber() { + return line; + } + + public int getColumnNumber() { + return charPositionInLine + 1; + } + + public String getErrorMessage() { + return super.getMessage(); + } + + @Override + public String getMessage() { + return format("line {}:{}: {}", getLineNumber(), getColumnNumber(), getErrorMessage()); + } + + @Override + public RestStatus status() { + return RestStatus.BAD_REQUEST; + } +} diff --git a/x-pack/plugin/kql/src/test/java/org/elasticsearch/xpack/kql/parser/KqlParserTests.java b/x-pack/plugin/kql/src/test/java/org/elasticsearch/xpack/kql/parser/KqlParserTests.java new file mode 100644 index 0000000000000..58b162409412d --- /dev/null +++ b/x-pack/plugin/kql/src/test/java/org/elasticsearch/xpack/kql/parser/KqlParserTests.java @@ -0,0 +1,115 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.kql.parser; + +import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.index.query.MatchAllQueryBuilder; +import org.elasticsearch.index.query.SearchExecutionContext; +import org.elasticsearch.test.AbstractBuilderTestCase; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.net.URL; +import java.net.URLConnection; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.isA; + +public class KqlParserTests extends AbstractBuilderTestCase { + + public void testEmptyQueryParsing() { + KqlParser parser = new KqlParser(); + SearchExecutionContext searchExecutionContext = createSearchExecutionContext(); + assertThat(parser.parseKqlQuery("", searchExecutionContext), isA(MatchAllQueryBuilder.class)); + } + + public void testSupportedQueries() throws Exception { + KqlParser parser = new KqlParser(); + SearchExecutionContext searchExecutionContext = createSearchExecutionContext(); + + for (String query : readQueries("/supported-queries")) { + try { + parser.parseKqlQuery(query, searchExecutionContext); + } catch (Throwable e) { + throw new AssertionError("Unexpected error during query parsing [ " + query + "]", e); + } + } + } + + public void testUnsupportedQueries() throws Exception { + KqlParser parser = new KqlParser(); + SearchExecutionContext searchExecutionContext = createSearchExecutionContext(); + + for (String query : readQueries("/unsupported-queries")) { + assertThrows( + "Was expecting a KqlParsingException exception to be thrown while parsing query [" + query + "]", + KqlParsingException.class, + () -> parser.parseKqlQuery(query, searchExecutionContext) + ); + } + } + + public void testSyntaxErrorsHandling() { + KqlParser parser = new KqlParser(); + SearchExecutionContext searchExecutionContext = createSearchExecutionContext(); + + { + KqlParsingException e = assertThrows( + KqlParsingException.class, + () -> parser.parseKqlQuery("foo: \"bar", searchExecutionContext) + ); + assertThat(e.getLineNumber(), equalTo(1)); + assertThat(e.getColumnNumber(), equalTo(6)); + assertThat(e.getMessage(), equalTo("line 1:6: token recognition error at: '\"bar'")); + } + + { + KqlParsingException e = assertThrows( + KqlParsingException.class, + () -> parser.parseKqlQuery("foo: (bar baz AND qux", searchExecutionContext) + ); + assertThat(e.getLineNumber(), equalTo(1)); + assertThat(e.getColumnNumber(), equalTo(15)); + assertThat(e.getMessage(), equalTo("line 1:15: missing ')' at 'AND'")); + } + } + + private static List readQueries(String source) throws Exception { + URL url = KqlParserTests.class.getResource(source); + Objects.requireNonNull(source, "Cannot find resource " + url); + + List queries = new ArrayList<>(); + + try (BufferedReader reader = new BufferedReader(new InputStreamReader(readFromJarUrl(url), StandardCharsets.UTF_8))) { + String line; + + while ((line = reader.readLine()) != null) { + String query = line.trim(); + // ignore comments + if (query.isEmpty() == false && query.startsWith("//") == false) { + queries.add(line.trim()); + } + } + } + return queries; + } + + @SuppressForbidden(reason = "test reads from jar") + private static InputStream readFromJarUrl(URL source) throws IOException { + URLConnection con = source.openConnection(); + // do not to cache files (to avoid keeping file handles around) + con.setUseCaches(false); + return con.getInputStream(); + } +} diff --git a/x-pack/plugin/kql/src/test/resources/supported-queries b/x-pack/plugin/kql/src/test/resources/supported-queries new file mode 100644 index 0000000000000..d750f16149112 --- /dev/null +++ b/x-pack/plugin/kql/src/test/resources/supported-queries @@ -0,0 +1,111 @@ +// Match all queries +* +*:* +(*:*) + +// Queries with no field +200 +foo +foo bar +(foo bar) +foo* +*foo +f*oo +"foo bar" + +// Queries with all fields + *:200 + *:foo + *:foo bar + *:foo* + *:f*oo + *: *foo + *:"foo bar" + +// Querying a field +foo_field:200 +foo_field:foo +foo_field:foo bar +foo_field:(foo bar) +foo_field:foo* +foo_field: f*oo +foo_field: *foo +foo_field:"foo bar" +foo_field.subfield:foo +foo_*_field:foo +foo_field:* +foo_*:* + +// Range queries +foo_field<200 +foo_field=200 +foo_field>=foo +foo_field>"foo bar" +foo_field<=foo +foo_field>=foo + +// Boolean queries +NOT foo +NOT foo bar +NOT foo_field:foo +NOT foo_fieldbar +(foo_field:foo) AND (foo_field:foo bar) +foo_field:foo OR foo_field:foo bar +NOT(foo_field:foo OR foo_field:foo bar) +NOT(foo_field:foo AND foo_field:foo bar) +NOT foo_field:foo AND NOT foo_field:foo bar +(NOT foo_field:foo) AND (NOT foo_field:foo bar) +NOT(foo_field:foo) AND NOT(foo_field:foo bar) +foo_field:foo AND foo_field:foo bar AND foo bar +foo_field:foo AND foo_field:foo bar OR foo bar +foo_field:foo OR foo_field:foo bar OR foo bar +foo_field:foo OR foo_field:foo bar AND foo bar +foo_field:foo AND (foo_field:foo bar OR foo bar) +foo_field:foo AND (foo_field:foo bar OR foo bar) +foo_field:foo OR (foo_field:foo bar OR foo bar) + +// Nested queries +nested_field: { NOT foo } +nested_field: { NOT foo bar } +nested_field: { NOT foo_field:foo } +nested_field: { foo_field:foo AND foo_field:foo bar } +nested_field: { foo_fieldbar } +nested_field: { (foo_field:foo) AND (foo_field:foo bar) } +nested_field: { foo_field:foo OR foo_field:foo bar } +nested_field: { NOT(foo_field:foo OR foo_field:foo bar) } +nested_field: { NOT(foo_field:foo AND foo_field:foo bar) } +nested_field: { NOT foo_field:foo AND NOT foo_field:foo bar } +nested_field: { (NOT foo_field:foo) AND (NOT foo_field:foo bar) } +nested_field: { NOT(foo_field:foo) AND NOT(foo_field:foo bar) } +nested_field: { foo_field:foo AND foo_field:foo bar AND foo bar } +nested_field: { foo_field:foo AND foo_field:foo bar OR foo bar } +nested_field: { foo_field:foo OR foo_field:foo bar OR foo bar } +nested_field: { foo_field:foo OR foo_field:foo bar AND foo bar } +nested_field: { foo_field:foo AND (foo_field:foo bar OR foo bar) } +nested_field: { foo_field:foo AND (foo_field:foo bar OR foo bar) } +nested_field: { foo_field:foo OR (foo_field:foo bar OR foo bar) } +nested_field: { sub_nested_field : { foo_field:foo } AND foo_field:foo bar } + +// Queries with escape sequences +foo_field : (foo\(bar\)) +foo_field : foo\:bar +foo_field : (foo \and bar) +foo_field : (foo \or bar) +foo_field : foo \not bar +foo_field : foo \{bar\} +foo_field : foo \(bar\) +foo_field : foo \\ bar +foo_field : foo \"bar\" + +foo_field : "foo and bar" +foo_field : "foo not bar" +foo_field : "foo or bar" +foo_field : "foo : bar" +foo_field : "foo { bar }" +foo_field : "foo (bar)" +foo_field : "foo \\ bar" +foo_field : "foo \"bar\"" diff --git a/x-pack/plugin/kql/src/test/resources/unsupported-queries b/x-pack/plugin/kql/src/test/resources/unsupported-queries new file mode 100644 index 0000000000000..545b03576b331 --- /dev/null +++ b/x-pack/plugin/kql/src/test/resources/unsupported-queries @@ -0,0 +1,41 @@ + +// Incomplete expressions +foo_field : +foo_field < +foo_field > +foo_field >= +foo_field <= + +// Parentheses mismatch +foo_field: (foo bar +foo_field: foo bar) +NOT foo_field:foo OR foo_field:foo bar) +NOT (foo_field:foo AND) foo_field:foo bar + +// Quotes mismatch +foo_field: "foo bar +foo_field: foo bar" + + +// Invalid boolean queries +foo AND +AND foo +foo OR +OR foo +NOT foo: + +// Can't nest grouping terms parentheses +foo_field:(foo (bar)) + +// Bad syntax for nested fields: +nested_field { foo: bar } + +// Missing escape sequences: +foo_field: foo:bar +foo_field: (foo and bar) +foo_field: (foo or bar) +foo_field: foo not bar +foo_field: foo { bar } +foo_field: foo (bar) +foo_field: foo "bar" +foo_field: "foo "bar"" From 5dec36e9fba460654faa447b09b1ddd6b6589920 Mon Sep 17 00:00:00 2001 From: Mary Gouseti Date: Fri, 18 Oct 2024 12:16:33 +0300 Subject: [PATCH 207/449] Replace IntermittentLongGCDisruption with blocking cluster state updates (#115075) In JDK 23 `Thread.resume` has been removed this means that we cannot use `IntermittentLongGCDisruption` that depends on it. We simulate the master node disruption with a `CyclicBarrier` that blocks cluster state updates. Closes: https://github.com/elastic/elasticsearch/issues/115045 The backport will close: https://github.com/elastic/elasticsearch/issues/112634 --- .../LazyRolloverDuringDisruptionIT.java | 55 ++++++++++++------- 1 file changed, 36 insertions(+), 19 deletions(-) diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/LazyRolloverDuringDisruptionIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/LazyRolloverDuringDisruptionIT.java index 83d34571a1597..00dfd5c65b126 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/LazyRolloverDuringDisruptionIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/LazyRolloverDuringDisruptionIT.java @@ -18,17 +18,19 @@ import org.elasticsearch.action.datastreams.GetDataStreamAction; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.metadata.DataStream; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.disruption.IntermittentLongGCDisruption; -import org.elasticsearch.test.disruption.SingleNodeDisruption; import org.elasticsearch.xcontent.XContentType; import java.util.Collection; import java.util.List; import java.util.concurrent.CountDownLatch; +import java.util.concurrent.CyclicBarrier; import java.util.concurrent.ExecutionException; import static org.hamcrest.Matchers.equalTo; @@ -43,7 +45,7 @@ protected Collection> nodePlugins() { } public void testRolloverIsExecutedOnce() throws ExecutionException, InterruptedException { - String masterNode = internalCluster().startMasterOnlyNode(); + internalCluster().startMasterOnlyNode(); internalCluster().startDataOnlyNodes(3); ensureStableCluster(4); @@ -51,7 +53,7 @@ public void testRolloverIsExecutedOnce() throws ExecutionException, InterruptedE createDataStream(dataStreamName); // Mark it to lazy rollover - new RolloverRequestBuilder(client()).setRolloverTarget(dataStreamName).lazy(true).execute().get(); + safeGet(new RolloverRequestBuilder(client()).setRolloverTarget(dataStreamName).lazy(true).execute()); // Verify that the data stream is marked for rollover and that it has currently one index DataStream dataStream = getDataStream(dataStreamName); @@ -59,9 +61,22 @@ public void testRolloverIsExecutedOnce() throws ExecutionException, InterruptedE assertThat(dataStream.getBackingIndices().getIndices().size(), equalTo(1)); // Introduce a disruption to the master node that should delay the rollover execution - SingleNodeDisruption masterNodeDisruption = new IntermittentLongGCDisruption(random(), masterNode, 100, 200, 30000, 60000); - internalCluster().setDisruptionScheme(masterNodeDisruption); - masterNodeDisruption.startDisrupting(); + final var barrier = new CyclicBarrier(2); + internalCluster().getCurrentMasterNodeInstance(ClusterService.class) + .submitUnbatchedStateUpdateTask("block", new ClusterStateUpdateTask() { + @Override + public ClusterState execute(ClusterState currentState) { + safeAwait(barrier); + safeAwait(barrier); + return currentState; + } + + @Override + public void onFailure(Exception e) { + fail(e); + } + }); + safeAwait(barrier); // Start indexing operations int docs = randomIntBetween(5, 10); @@ -84,10 +99,10 @@ public void onFailure(Exception e) { } // End the disruption so that all pending tasks will complete - masterNodeDisruption.stopDisrupting(); + safeAwait(barrier); // Wait for all the indexing requests to be processed successfully - countDownLatch.await(); + safeAwait(countDownLatch); // Verify that the rollover has happened once dataStream = getDataStream(dataStreamName); @@ -96,10 +111,12 @@ public void onFailure(Exception e) { } private DataStream getDataStream(String dataStreamName) { - return client().execute( - GetDataStreamAction.INSTANCE, - new GetDataStreamAction.Request(TEST_REQUEST_TIMEOUT, new String[] { dataStreamName }) - ).actionGet().getDataStreams().get(0).getDataStream(); + return safeGet( + client().execute( + GetDataStreamAction.INSTANCE, + new GetDataStreamAction.Request(TEST_REQUEST_TIMEOUT, new String[] { dataStreamName }) + ) + ).getDataStreams().get(0).getDataStream(); } private void createDataStream(String dataStreamName) throws InterruptedException, ExecutionException { @@ -111,10 +128,9 @@ private void createDataStream(String dataStreamName) throws InterruptedException .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate(false, false)) .build() ); - final AcknowledgedResponse putComposableTemplateResponse = client().execute( - TransportPutComposableIndexTemplateAction.TYPE, - putComposableTemplateRequest - ).actionGet(); + final AcknowledgedResponse putComposableTemplateResponse = safeGet( + client().execute(TransportPutComposableIndexTemplateAction.TYPE, putComposableTemplateRequest) + ); assertThat(putComposableTemplateResponse.isAcknowledged(), is(true)); final CreateDataStreamAction.Request createDataStreamRequest = new CreateDataStreamAction.Request( @@ -122,8 +138,9 @@ private void createDataStream(String dataStreamName) throws InterruptedException TEST_REQUEST_TIMEOUT, dataStreamName ); - final AcknowledgedResponse createDataStreamResponse = client().execute(CreateDataStreamAction.INSTANCE, createDataStreamRequest) - .get(); + final AcknowledgedResponse createDataStreamResponse = safeGet( + client().execute(CreateDataStreamAction.INSTANCE, createDataStreamRequest) + ); assertThat(createDataStreamResponse.isAcknowledged(), is(true)); } } From 3bb20e39ab1bc094ff412602e3217be08f230631 Mon Sep 17 00:00:00 2001 From: Luigi Dell'Aquila Date: Fri, 18 Oct 2024 11:17:18 +0200 Subject: [PATCH 208/449] ES|QL: Fix stats by constant expression (#114899) --- docs/changelog/114899.yaml | 5 ++++ .../src/main/resources/stats.csv-spec | 29 +++++++++++++++++++ .../xpack/esql/action/EsqlCapabilities.java | 7 ++++- .../xpack/esql/plan/logical/Aggregate.java | 9 +++++- 4 files changed, 48 insertions(+), 2 deletions(-) create mode 100644 docs/changelog/114899.yaml diff --git a/docs/changelog/114899.yaml b/docs/changelog/114899.yaml new file mode 100644 index 0000000000000..399aa5cf35409 --- /dev/null +++ b/docs/changelog/114899.yaml @@ -0,0 +1,5 @@ +pr: 114899 +summary: "ES|QL: Fix stats by constant expression" +area: ES|QL +type: bug +issues: [] diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec index 496a747fd9c2b..ac4351413129e 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec @@ -2473,3 +2473,32 @@ c2:l |c2_f:l |m2:i |m2_f:i |c:l 1 |0 |2 |2 |19 1 |1 |5 |5 |21 ; + + +statsByConstantExpressionNoAggs +required_capability: fix_stats_by_foldable_expression +FROM employees | eval x = [1,2,3], y = 5 + 6 | stats by y+1 +; + +y+1:integer +12 +; + + +statsByConstantExpressionNoAggsWithAlias +required_capability: fix_stats_by_foldable_expression +FROM employees | eval x = [1,2,3], y = 5 + 6 | stats by yy = y+1 +; + +yy:integer +12 +; + +statsByConstantExpression +required_capability: fix_stats_by_foldable_expression +FROM employees | eval x = [1,2,3], y = 5 + 6 | stats m = max(y) by y+1 +; + +m:integer | y+1:integer +11 | 12 +; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index c94791964fb90..b31fc005a0a5d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -385,7 +385,12 @@ public enum Cap { /** * Allow filter per individual aggregation. */ - PER_AGG_FILTERING; + PER_AGG_FILTERING, + + /** + * Fix for https://github.com/elastic/elasticsearch/issues/114714 + */ + FIX_STATS_BY_FOLDABLE_EXPRESSION; private final boolean snapshotOnly; private final FeatureFlag featureFlag; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Aggregate.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Aggregate.java index b2f314a0e8294..e1632db4f79a2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Aggregate.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Aggregate.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.core.capabilities.Resolvables; +import org.elasticsearch.xpack.esql.core.expression.Alias; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.AttributeSet; import org.elasticsearch.xpack.esql.core.expression.Expression; @@ -157,7 +158,13 @@ protected AttributeSet computeReferences() { } public static AttributeSet computeReferences(List aggregates, List groupings) { - return Expressions.references(groupings).combine(Expressions.references(aggregates)); + AttributeSet result = Expressions.references(groupings).combine(Expressions.references(aggregates)); + for (Expression grouping : groupings) { + if (grouping instanceof Alias) { + result.remove(((Alias) grouping).toAttribute()); + } + } + return result; } @Override From 4434f841e2c0c1430800fb7502cb9bf3efa6b87a Mon Sep 17 00:00:00 2001 From: David Turner Date: Fri, 18 Oct 2024 12:11:13 +0100 Subject: [PATCH 209/449] Handle remaining refs to `RestApiVersion#V_7` (#114881) Removes several more references to the now-unused `RestApiVersion#V_7` constant, and decorates all remaining references with an `@UpdateForV9` annotation so that they all have clear owners. --- .../percolator/PercolateQueryBuilder.java | 10 ------- .../indices/rollover/RolloverRequest.java | 16 +---------- .../action/bulk/BulkRequestParser.java | 18 ++++-------- .../metadata/IndexTemplateMetadata.java | 7 +---- .../index/query/CommonTermsQueryBuilder.java | 2 ++ .../query/GeoBoundingBoxQueryBuilder.java | 2 -- .../index/query/MatchQueryBuilder.java | 7 +---- .../index/query/MultiMatchQueryBuilder.java | 7 +---- .../index/query/TypeQueryV7Builder.java | 2 ++ .../index/reindex/ReindexRequest.java | 18 ++++-------- .../elasticsearch/indices/TermsLookup.java | 3 -- .../cluster/RestClusterRerouteAction.java | 27 +++++++++--------- .../admin/indices/RestGetAliasesAction.java | 2 ++ .../indices/RestPutIndexTemplateAction.java | 7 ----- .../rest/action/cat/RestAliasAction.java | 3 ++ .../bucket/histogram/DateIntervalWrapper.java | 28 ------------------- .../search/fetch/subphase/FieldAndFormat.java | 16 +---------- .../search/sort/SortBuilder.java | 3 ++ .../org/elasticsearch/license/License.java | 2 ++ .../license/RestGetLicenseAction.java | 4 +-- .../xpack/core/ml/action/CloseJobAction.java | 4 +++ .../ml/action/GetOverallBucketsAction.java | 3 ++ .../action/GetTrainedModelsStatsAction.java | 4 +-- .../core/ml/action/StopDatafeedAction.java | 3 ++ .../core/ml/inference/TrainedModelConfig.java | 10 ------- .../core/rest/action/RestXPackInfoAction.java | 7 ++--- .../ml/rest/cat/RestCatDatafeedsAction.java | 2 ++ .../xpack/ml/rest/cat/RestCatJobsAction.java | 2 ++ .../datafeeds/RestGetDatafeedStatsAction.java | 2 ++ .../datafeeds/RestGetDatafeedsAction.java | 2 ++ .../datafeeds/RestStopDatafeedAction.java | 2 ++ .../xpack/ml/rest/job/RestCloseJobAction.java | 2 ++ .../ml/rest/job/RestGetJobStatsAction.java | 2 ++ .../xpack/ml/rest/job/RestGetJobsAction.java | 2 ++ .../results/RestGetOverallBucketsAction.java | 2 ++ 35 files changed, 76 insertions(+), 157 deletions(-) diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java index 6b37b02a945b2..85af5b120f6fd 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java @@ -46,7 +46,6 @@ import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.fielddata.FieldDataContext; @@ -84,7 +83,6 @@ import java.util.function.BiConsumer; import java.util.function.Supplier; -import static org.elasticsearch.core.RestApiVersion.equalTo; import static org.elasticsearch.search.SearchService.ALLOW_EXPENSIVE_QUERIES; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; @@ -368,14 +366,6 @@ protected void doXContent(XContentBuilder builder, Params params) throws IOExcep DOCUMENTS_FIELD.getPreferredName(), INDEXED_DOCUMENT_FIELD_ID.getPreferredName() ); - PARSER.declareString( - deprecateAndIgnoreType("percolate_with_type", TYPE_DEPRECATION_MESSAGE), - INDEXED_DOCUMENT_FIELD_TYPE.forRestApiVersion(equalTo(RestApiVersion.V_7)) - ); - PARSER.declareString( - deprecateAndIgnoreType("percolate_with_document_type", DOCUMENT_TYPE_DEPRECATION_MESSAGE), - DOCUMENT_TYPE_FIELD.forRestApiVersion(equalTo(RestApiVersion.V_7)) - ); } private static BiConsumer deprecateAndIgnoreType(String key, String message) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequest.java index fefc41317591b..5a7f330be50c0 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequest.java @@ -18,7 +18,6 @@ import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.tasks.Task; @@ -57,19 +56,6 @@ public class RolloverRequest extends AcknowledgedRequest implem CreateIndexRequest.SETTINGS, ObjectParser.ValueType.OBJECT ); - PARSER.declareField((parser, request, context) -> { - // a type is not included, add a dummy _doc type - Map mappings = parser.map(); - if (MapperService.isMappingSourceTyped(MapperService.SINGLE_MAPPING_NAME, mappings)) { - throw new IllegalArgumentException( - "The mapping definition cannot be nested under a type " - + "[" - + MapperService.SINGLE_MAPPING_NAME - + "] unless include_type_name is set to true." - ); - } - request.createIndexRequest.mapping(mappings); - }, CreateIndexRequest.MAPPINGS.forRestApiVersion(RestApiVersion.equalTo(RestApiVersion.V_7)), ObjectParser.ValueType.OBJECT); PARSER.declareField((parser, request, context) -> { // a type is not included, add a dummy _doc type Map mappings = parser.map(); @@ -78,7 +64,7 @@ public class RolloverRequest extends AcknowledgedRequest implem throw new IllegalArgumentException("The mapping definition cannot be nested under a type"); } request.createIndexRequest.mapping(mappings); - }, CreateIndexRequest.MAPPINGS.forRestApiVersion(RestApiVersion.onOrAfter(RestApiVersion.V_8)), ObjectParser.ValueType.OBJECT); + }, CreateIndexRequest.MAPPINGS, ObjectParser.ValueType.OBJECT); PARSER.declareField( (parser, request, context) -> request.createIndexRequest.aliases(parser.map()), diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestParser.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestParser.java index cd74989e5df7b..4c475bee985ab 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestParser.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestParser.java @@ -22,7 +22,6 @@ import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.seqno.SequenceNumbers; -import org.elasticsearch.rest.action.document.RestBulkAction; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContent; @@ -271,18 +270,11 @@ public int incrementalParse( } index = stringDeduplicator.computeIfAbsent(parser.text(), Function.identity()); } else if (TYPE.match(currentFieldName, parser.getDeprecationHandler())) { - if (parser.getRestApiVersion().matches(RestApiVersion.equalTo(RestApiVersion.V_7))) { - // for bigger bulks, deprecation throttling might not be enough - if (deprecateOrErrorOnType && typesDeprecationLogged == false) { - deprecationLogger.compatibleCritical("bulk_with_types", RestBulkAction.TYPES_DEPRECATION_MESSAGE); - typesDeprecationLogged = true; - } - } else if (parser.getRestApiVersion().matches(RestApiVersion.onOrAfter(RestApiVersion.V_8)) - && deprecateOrErrorOnType) { - throw new IllegalArgumentException( - "Action/metadata line [" + line + "] contains an unknown parameter [" + currentFieldName + "]" - ); - } + if (deprecateOrErrorOnType) { + throw new IllegalArgumentException( + "Action/metadata line [" + line + "] contains an unknown parameter [" + currentFieldName + "]" + ); + } type = stringDeduplicator.computeIfAbsent(parser.text(), Function.identity()); } else if (ID.match(currentFieldName, parser.getDeprecationHandler())) { id = parser.text(); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetadata.java index 6ddcd6a45e4b6..1379489182b53 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetadata.java @@ -35,9 +35,6 @@ import java.util.Objects; import java.util.Set; -import static org.elasticsearch.core.RestApiVersion.V_8; -import static org.elasticsearch.core.RestApiVersion.onOrAfter; - public class IndexTemplateMetadata implements SimpleDiffable { private final String name; @@ -379,9 +376,7 @@ private static void toInnerXContent( indexTemplateMetadata.settings().toXContent(builder, params); builder.endObject(); - if (builder.getRestApiVersion().matches(onOrAfter(V_8))) { - includeTypeName &= (params.paramAsBoolean("reduce_mappings", false) == false); - } + includeTypeName &= (params.paramAsBoolean("reduce_mappings", false) == false); CompressedXContent m = indexTemplateMetadata.mappings(); if (m != null) { diff --git a/server/src/main/java/org/elasticsearch/index/query/CommonTermsQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/CommonTermsQueryBuilder.java index 263c6bd35bcca..0b9663d9112fa 100644 --- a/server/src/main/java/org/elasticsearch/index/query/CommonTermsQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/CommonTermsQueryBuilder.java @@ -16,6 +16,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.core.RestApiVersion; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; @@ -27,6 +28,7 @@ public class CommonTermsQueryBuilder extends AbstractQueryBuilder { - private static final String CUTOFF_FREQUENCY_DEPRECATION_MSG = "cutoff_freqency is not supported. " - + "The [match] query can skip block of documents efficiently if the total number of hits is not tracked"; - public static final ParseField CUTOFF_FREQUENCY_FIELD = new ParseField("cutoff_frequency").withAllDeprecated( - CUTOFF_FREQUENCY_DEPRECATION_MSG - ).forRestApiVersion(RestApiVersion.equalTo(RestApiVersion.V_7)); + public static final ParseField ZERO_TERMS_QUERY_FIELD = new ParseField("zero_terms_query"); public static final ParseField LENIENT_FIELD = new ParseField("lenient"); public static final ParseField FUZZY_TRANSPOSITIONS_FIELD = new ParseField("fuzzy_transpositions"); diff --git a/server/src/main/java/org/elasticsearch/index/query/MultiMatchQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/MultiMatchQueryBuilder.java index a83fb8d1fd419..17e651ab24696 100644 --- a/server/src/main/java/org/elasticsearch/index/query/MultiMatchQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/MultiMatchQueryBuilder.java @@ -21,7 +21,6 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.index.query.support.QueryParsers; import org.elasticsearch.index.search.MatchQueryParser; import org.elasticsearch.index.search.MultiMatchQueryParser; @@ -45,11 +44,7 @@ public final class MultiMatchQueryBuilder extends AbstractQueryBuilder { public static final String NAME = "multi_match"; - private static final String CUTOFF_FREQUENCY_DEPRECATION_MSG = "cutoff_freqency is not supported." - + " The [multi_match] query can skip block of documents efficiently if the total number of hits is not tracked"; - private static final ParseField CUTOFF_FREQUENCY_FIELD = new ParseField("cutoff_frequency").withAllDeprecated( - CUTOFF_FREQUENCY_DEPRECATION_MSG - ).forRestApiVersion(RestApiVersion.equalTo(RestApiVersion.V_7)); + public static final MultiMatchQueryBuilder.Type DEFAULT_TYPE = MultiMatchQueryBuilder.Type.BEST_FIELDS; public static final Operator DEFAULT_OPERATOR = Operator.OR; public static final int DEFAULT_PHRASE_SLOP = MatchQueryParser.DEFAULT_PHRASE_SLOP; diff --git a/server/src/main/java/org/elasticsearch/index/query/TypeQueryV7Builder.java b/server/src/main/java/org/elasticsearch/index/query/TypeQueryV7Builder.java index f707684cb46e5..c9aae0195acf7 100644 --- a/server/src/main/java/org/elasticsearch/index/query/TypeQueryV7Builder.java +++ b/server/src/main/java/org/elasticsearch/index/query/TypeQueryV7Builder.java @@ -18,6 +18,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.core.RestApiVersion; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; @@ -26,6 +27,7 @@ import java.io.IOException; +@UpdateForV9(owner = UpdateForV9.Owner.SEARCH_RELEVANCE) // v7 REST API no longer exists: eliminate ref to RestApiVersion.V_7 public class TypeQueryV7Builder extends AbstractQueryBuilder { private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(TypeQueryV7Builder.class); public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Type queries are deprecated, " diff --git a/server/src/main/java/org/elasticsearch/index/reindex/ReindexRequest.java b/server/src/main/java/org/elasticsearch/index/reindex/ReindexRequest.java index b7ec486df612a..f9d027e0c9c1c 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/ReindexRequest.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/ReindexRequest.java @@ -20,8 +20,8 @@ import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.query.QueryBuilder; @@ -354,20 +354,10 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws ObjectParser.ValueType.OBJECT ); - PARSER.declareInt( - ReindexRequest::setMaxDocsValidateIdentical, - new ParseField("max_docs", "size").forRestApiVersion(RestApiVersion.equalTo(RestApiVersion.V_7)) - ); + PARSER.declareInt(ReindexRequest::setMaxDocsValidateIdentical, new ParseField("max_docs")); - PARSER.declareInt( - ReindexRequest::setMaxDocsValidateIdentical, - new ParseField("max_docs").forRestApiVersion(RestApiVersion.onOrAfter(RestApiVersion.V_8)) - ); // avoid silently accepting an ignored size. - PARSER.declareInt( - (r, s) -> failOnSizeSpecified(), - new ParseField("size").forRestApiVersion(RestApiVersion.onOrAfter(RestApiVersion.V_8)) - ); + PARSER.declareInt((r, s) -> failOnSizeSpecified(), new ParseField("size")); PARSER.declareField((p, v, c) -> v.setScript(Script.parse(p)), new ParseField("script"), ObjectParser.ValueType.OBJECT); PARSER.declareString(ReindexRequest::setConflicts, new ParseField("conflicts")); @@ -509,6 +499,8 @@ static void setMaxDocsValidateIdentical(AbstractBulkByScrollRequest request, } } + @UpdateForV9(owner = UpdateForV9.Owner.DISTRIBUTED_INDEXING) + // do we still need this ref to [max_docs] or can we remove the field entirely so it's rejected with the default message? private static void failOnSizeSpecified() { throw new IllegalArgumentException("invalid parameter [size], use [max_docs] instead"); } diff --git a/server/src/main/java/org/elasticsearch/indices/TermsLookup.java b/server/src/main/java/org/elasticsearch/indices/TermsLookup.java index b1c74c606df64..0bab521a984e2 100644 --- a/server/src/main/java/org/elasticsearch/indices/TermsLookup.java +++ b/server/src/main/java/org/elasticsearch/indices/TermsLookup.java @@ -13,7 +13,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.TermsQueryBuilder; import org.elasticsearch.xcontent.ConstructingObjectParser; @@ -25,7 +24,6 @@ import java.io.IOException; import java.util.Objects; -import static org.elasticsearch.core.RestApiVersion.equalTo; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; /** @@ -109,7 +107,6 @@ public TermsLookup routing(String routing) { PARSER.declareString(constructorArg(), new ParseField("id")); PARSER.declareString(constructorArg(), new ParseField("path")); PARSER.declareString(TermsLookup::routing, new ParseField("routing")); - PARSER.declareString((termLookup, type) -> {}, new ParseField("type").forRestApiVersion(equalTo(RestApiVersion.V_7))); } public static TermsLookup parseTermsLookup(XContentParser parser) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterRerouteAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterRerouteAction.java index fada07d60b74e..970733b36f7f8 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterRerouteAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterRerouteAction.java @@ -19,6 +19,7 @@ import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; +import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -95,20 +96,19 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC request.params().put("explain", Boolean.TRUE.toString()); } - switch (request.getRestApiVersion()) { - case V_9 -> { - // always avoid returning the cluster state by forcing `?metric=none`; emit a warning if `?metric` is even present - if (request.hasParam("metric")) { - deprecationLogger.critical(DeprecationCategory.API, "cluster-reroute-metric-param", METRIC_DEPRECATION_MESSAGE); - } - request.params().put("metric", "none"); + if (request.getRestApiVersion().matches(RestApiVersion.onOrAfter(RestApiVersion.V_9))) { + // always avoid returning the cluster state by forcing `?metric=none`; emit a warning if `?metric` is even present + if (request.hasParam("metric")) { + deprecationLogger.critical(DeprecationCategory.API, "cluster-reroute-metric-param", METRIC_DEPRECATION_MESSAGE); } - case V_8, V_7 -> { - // by default, return everything but metadata - final String metric = request.param("metric"); - if (metric == null) { - request.params().put("metric", V8_DEFAULT_METRICS); - } + request.params().put("metric", "none"); + } else { + assert request.getRestApiVersion().matches(RestApiVersion.equalTo(RestApiVersion.V_8)); + @UpdateForV10(owner = UpdateForV10.Owner.DISTRIBUTED_COORDINATION) // forbid this parameter in the v10 API + // by default, return everything but metadata + final String metric = request.param("metric"); + if (metric == null) { + request.params().put("metric", V8_DEFAULT_METRICS); } } @@ -117,6 +117,7 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC clusterRerouteRequest, new RestRefCountedChunkedToXContentListener<>(channel) ); + } @Override diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetAliasesAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetAliasesAction.java index 79e5c574d93bf..7780ae08ac0ff 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetAliasesAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetAliasesAction.java @@ -199,6 +199,8 @@ static RestResponse buildRestResponse( } @Override + @UpdateForV9(owner = UpdateForV9.Owner.DATA_MANAGEMENT) + // v7 REST API no longer exists: eliminate ref to RestApiVersion.V_7; reject local parameter in v9 too? public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { // The TransportGetAliasesAction was improved do the same post processing as is happening here. // We can't remove this logic yet to support mixed clusters. We should be able to remove this logic here diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutIndexTemplateAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutIndexTemplateAction.java index defec2fefc615..592679a2a02a0 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutIndexTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutIndexTemplateAction.java @@ -12,9 +12,7 @@ import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; @@ -30,12 +28,7 @@ public class RestPutIndexTemplateAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestPutIndexTemplateAction.class); public static final String DEPRECATION_WARNING = "Legacy index templates are deprecated in favor of composable templates."; - private static final RestApiVersion DEPRECATION_VERSION = RestApiVersion.V_8; - public static final String TYPES_DEPRECATION_MESSAGE = "[types removal]" - + " Specifying include_type_name in put index template requests is deprecated." - + " The parameter will be removed in the next major version."; @Override public List routes() { diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestAliasAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestAliasAction.java index 0957821ef5531..191746b421c98 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestAliasAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestAliasAction.java @@ -18,6 +18,7 @@ import org.elasticsearch.common.logging.DeprecationCategory; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.core.RestApiVersion; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.Scope; @@ -51,6 +52,8 @@ public boolean allowSystemIndexAccessByDefault() { } @Override + @UpdateForV9(owner = UpdateForV9.Owner.DATA_MANAGEMENT) + // v7 REST API no longer exists: eliminate ref to RestApiVersion.V_7; reject local parameter in v9 too? protected RestChannelConsumer doCatRequest(final RestRequest request, final NodeClient client) { final GetAliasesRequest getAliasesRequest = request.hasParam("alias") ? new GetAliasesRequest(Strings.commaDelimitedListToStringArray(request.param("alias"))) diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateIntervalWrapper.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateIntervalWrapper.java index 6f03f9e5952cf..6886b0e22c20a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateIntervalWrapper.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateIntervalWrapper.java @@ -15,22 +15,17 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.logging.DeprecationCategory; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.TimeValue; import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentFragment; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.time.ZoneId; import java.util.Objects; -import static org.elasticsearch.core.RestApiVersion.equalTo; - /** * A class that handles all the parsing, bwc and deprecations surrounding date histogram intervals. * @@ -91,29 +86,6 @@ public String getPreferredName() { private IntervalTypeEnum intervalType = IntervalTypeEnum.NONE; public static > void declareIntervalFields(ObjectParser parser) { - /* - REST version compatibility. When in V_7 compatibility mode, continue to parse the old style interval parameter, - but immediately adapt it into either fixed or calendar interval. - */ - parser.declareField((wrapper, interval) -> { - DEPRECATION_LOGGER.warn(DeprecationCategory.AGGREGATIONS, "date-interval-getter", DEPRECATION_TEXT); - if (interval instanceof Long) { - wrapper.fixedInterval(new DateHistogramInterval(interval + "ms")); - } else { - if (interval != null && DateHistogramAggregationBuilder.DATE_FIELD_UNITS.containsKey(interval.toString())) { - wrapper.calendarInterval((DateHistogramInterval) interval); - } else { - wrapper.fixedInterval((DateHistogramInterval) interval); - } - } - }, p -> { - if (p.currentToken() == XContentParser.Token.VALUE_NUMBER) { - return p.longValue(); - } else { - return new DateHistogramInterval(p.text()); - } - }, Histogram.INTERVAL_FIELD.forRestApiVersion(equalTo(RestApiVersion.V_7)), ObjectParser.ValueType.LONG); - parser.declareField( DateIntervalConsumer::calendarInterval, p -> new DateHistogramInterval(p.text()), diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FieldAndFormat.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FieldAndFormat.java index 719729f6787ae..f623b3040f1c5 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FieldAndFormat.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FieldAndFormat.java @@ -15,9 +15,7 @@ import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.core.Nullable; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContent; @@ -27,9 +25,6 @@ import java.io.IOException; import java.util.Objects; -import static org.elasticsearch.core.RestApiVersion.equalTo; -import static org.elasticsearch.core.RestApiVersion.onOrAfter; - /** * Wrapper around a field name and the format that should be used to * display values of this field. @@ -49,16 +44,7 @@ public final class FieldAndFormat implements Writeable, ToXContentObject { static { PARSER.declareString(ConstructingObjectParser.constructorArg(), FIELD_FIELD); - PARSER.declareStringOrNull( - ConstructingObjectParser.optionalConstructorArg(), - FORMAT_FIELD.forRestApiVersion(onOrAfter(RestApiVersion.V_8)) - ); - PARSER.declareField( - ConstructingObjectParser.optionalConstructorArg(), - ignoreUseFieldMappingStringParser(), - FORMAT_FIELD.forRestApiVersion(equalTo(RestApiVersion.V_7)), - ObjectParser.ValueType.STRING_OR_NULL - ); + PARSER.declareStringOrNull(ConstructingObjectParser.optionalConstructorArg(), FORMAT_FIELD); PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), INCLUDE_UNMAPPED_FIELD); } diff --git a/server/src/main/java/org/elasticsearch/search/sort/SortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/SortBuilder.java index 9f943e63ef1e6..0ac3b42dd5b10 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/SortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/SortBuilder.java @@ -19,6 +19,7 @@ import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.core.RestApiVersion; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; import org.elasticsearch.index.mapper.NestedObjectMapper; import org.elasticsearch.index.query.QueryBuilder; @@ -50,6 +51,8 @@ public abstract class SortBuilder> // parse fields common to more than one SortBuilder public static final ParseField ORDER_FIELD = new ParseField("order"); + + @UpdateForV9(owner = UpdateForV9.Owner.SEARCH_FOUNDATIONS) // v7 REST API no longer exists: eliminate ref to RestApiVersion.V_7 public static final ParseField NESTED_FILTER_FIELD = new ParseField("nested_filter").withAllDeprecated() .forRestApiVersion(RestApiVersion.equalTo(RestApiVersion.V_7)); public static final ParseField NESTED_PATH_FIELD = new ParseField("nested_path").withAllDeprecated() diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/License.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/License.java index 0d1a007db0d39..f280dcf9b3edf 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/License.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/License.java @@ -16,6 +16,7 @@ import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.RestApiVersion; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -145,6 +146,7 @@ static boolean isEnterprise(String typeName) { * XContent param name to map the "enterprise" license type to "platinum" * for backwards compatibility with older clients */ + @UpdateForV9(owner = UpdateForV9.Owner.SECURITY) // v7 REST API no longer exists: eliminate ref to RestApiVersion.V_7 public static final String XCONTENT_HIDE_ENTERPRISE = "hide_enterprise"; public static final Comparator LATEST_ISSUE_DATE_FIRST = Comparator.comparing(License::issueDate).reversed(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestGetLicenseAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestGetLicenseAction.java index baec748141903..92e46e3ea6564 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestGetLicenseAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestGetLicenseAction.java @@ -27,8 +27,6 @@ import java.util.List; import java.util.Map; -import static org.elasticsearch.core.RestApiVersion.V_8; -import static org.elasticsearch.core.RestApiVersion.onOrAfter; import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestStatus.NOT_FOUND; import static org.elasticsearch.rest.RestStatus.OK; @@ -72,7 +70,7 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC "Including [accept_enterprise] in get license requests is deprecated." + " The parameter will be removed in the next major version" ); - if (request.paramAsBoolean("accept_enterprise", true) == false && request.getRestApiVersion().matches(onOrAfter(V_8))) { + if (request.paramAsBoolean("accept_enterprise", true) == false) { throw new IllegalArgumentException("The [accept_enterprise] parameters may not be false"); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CloseJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CloseJobAction.java index ca81509920ca5..bddae0417e467 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CloseJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CloseJobAction.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.tasks.Task; import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; @@ -43,7 +44,10 @@ public static class Request extends BaseTasksRequest implements ToXCont public static final ParseField TIMEOUT = new ParseField("timeout"); public static final ParseField FORCE = new ParseField("force"); + + @UpdateForV9(owner = UpdateForV9.Owner.MACHINE_LEARNING) // v7 REST API no longer exists: eliminate forRestApiVersion public static final ParseField ALLOW_NO_MATCH = new ParseField("allow_no_match").forRestApiVersion(onOrAfter(RestApiVersion.V_8)); + @UpdateForV9(owner = UpdateForV9.Owner.MACHINE_LEARNING) // v7 REST API no longer exists: eliminate ref to RestApiVersion.V_7 public static final ParseField ALLOW_NO_MATCH_V7 = new ParseField("allow_no_match", DEPRECATED_ALLOW_NO_JOBS_PARAM) .forRestApiVersion(equalTo(RestApiVersion.V_7)); public static final ObjectParser PARSER = new ObjectParser<>(NAME, Request::new); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetOverallBucketsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetOverallBucketsAction.java index 12c8696a50626..47bc6df5f6536 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetOverallBucketsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetOverallBucketsAction.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.time.DateMathParser; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; @@ -67,7 +68,9 @@ public static class Request extends ActionRequest implements ToXContentObject { public static final ParseField EXCLUDE_INTERIM = new ParseField("exclude_interim"); public static final ParseField START = new ParseField("start"); public static final ParseField END = new ParseField("end"); + @UpdateForV9(owner = UpdateForV9.Owner.MACHINE_LEARNING) // v7 REST API no longer exists: eliminate forRestApiVersion public static final ParseField ALLOW_NO_MATCH = new ParseField("allow_no_match").forRestApiVersion(onOrAfter(RestApiVersion.V_8)); + @UpdateForV9(owner = UpdateForV9.Owner.MACHINE_LEARNING) // v7 REST API no longer exists: eliminate ref to RestApiVersion.V_7 public static final ParseField ALLOW_NO_MATCH_V7 = new ParseField("allow_no_match", DEPRECATED_ALLOW_NO_JOBS_PARAM) .forRestApiVersion(equalTo(RestApiVersion.V_7)); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetTrainedModelsStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetTrainedModelsStatsAction.java index c61c62ed23a4d..8ad275e29b09a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetTrainedModelsStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetTrainedModelsStatsAction.java @@ -13,7 +13,6 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ChunkedToXContent; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.ingest.IngestStats; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; @@ -34,7 +33,6 @@ import java.util.Objects; import java.util.Set; -import static org.elasticsearch.core.RestApiVersion.onOrAfter; import static org.elasticsearch.core.Strings.format; public class GetTrainedModelsStatsAction extends ActionType { @@ -172,7 +170,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (this.inferenceStats != null) { builder.field(INFERENCE_STATS.getPreferredName(), this.inferenceStats); } - if (deploymentStats != null && builder.getRestApiVersion().matches(onOrAfter(RestApiVersion.V_8))) { + if (deploymentStats != null) { builder.field(DEPLOYMENT_STATS.getPreferredName(), this.deploymentStats); } builder.endObject(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopDatafeedAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopDatafeedAction.java index cb89cfa8cd0e7..bd4aac7ccad89 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopDatafeedAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopDatafeedAction.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.tasks.Task; import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; @@ -46,7 +47,9 @@ public static class Request extends BaseTasksRequest implements ToXCont public static final ParseField TIMEOUT = new ParseField("timeout"); public static final ParseField FORCE = new ParseField("force"); + @UpdateForV9(owner = UpdateForV9.Owner.MACHINE_LEARNING) // v7 REST API no longer exists: eliminate forRestApiVersion public static final ParseField ALLOW_NO_MATCH = new ParseField("allow_no_match").forRestApiVersion(onOrAfter(RestApiVersion.V_8)); + @UpdateForV9(owner = UpdateForV9.Owner.MACHINE_LEARNING) // v7 REST API no longer exists: eliminate ref to RestApiVersion.V_7 public static final ParseField ALLOW_NO_MATCH_V7 = new ParseField("allow_no_match", DEPRECATED_ALLOW_NO_DATAFEEDS_PARAM) .forRestApiVersion(equalTo(RestApiVersion.V_7)); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelConfig.java index 5ae19f6db6bb4..3fb4bb7fa598b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelConfig.java @@ -17,7 +17,6 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.core.Nullable; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.license.License; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.ObjectParser; @@ -514,15 +513,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws CREATE_TIME.getPreferredName() + "_string", createTime.toEpochMilli() ); - // If we are NOT storing the model, we should return the deprecated field name - if (params.paramAsBoolean(ToXContentParams.FOR_INTERNAL_STORAGE, false) == false - && builder.getRestApiVersion().matches(RestApiVersion.equalTo(RestApiVersion.V_7))) { - builder.humanReadableField( - DEPRECATED_ESTIMATED_HEAP_MEMORY_USAGE_BYTES.getPreferredName(), - ESTIMATED_HEAP_MEMORY_USAGE_HUMAN, - ByteSizeValue.ofBytes(modelSize) - ); - } builder.humanReadableField(MODEL_SIZE_BYTES.getPreferredName(), MODEL_SIZE_HUMAN, ByteSizeValue.ofBytes(modelSize)); builder.field(ESTIMATED_OPERATIONS.getPreferredName(), estimatedOperations); builder.field(LICENSE_LEVEL.getPreferredName(), licenseLevel.description()); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rest/action/RestXPackInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rest/action/RestXPackInfoAction.java index cfa2efc53ce96..4e558e2a394dc 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rest/action/RestXPackInfoAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rest/action/RestXPackInfoAction.java @@ -9,7 +9,7 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.logging.DeprecationCategory; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.core.RestApiVersion; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.protocol.xpack.XPackInfoRequest; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -22,7 +22,6 @@ import java.util.EnumSet; import java.util.List; -import static org.elasticsearch.core.RestApiVersion.onOrAfter; import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.HEAD; @@ -42,6 +41,7 @@ public String getName() { } @Override + @UpdateForV9(owner = UpdateForV9.Owner.SECURITY) // accept_enterprise parameter no longer supported? public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { // we piggyback verbosity on "human" output @@ -56,8 +56,7 @@ public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client "Including [accept_enterprise] in get license requests is deprecated." + " The parameter will be removed in the next major version" ); - if (request.paramAsBoolean("accept_enterprise", true) == false - && request.getRestApiVersion().matches(onOrAfter(RestApiVersion.V_8))) { + if (request.paramAsBoolean("accept_enterprise", true) == false) { throw new IllegalArgumentException("The [accept_enterprise] parameters may not be false"); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/cat/RestCatDatafeedsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/cat/RestCatDatafeedsAction.java index b93089eabda2d..205bb4f68a62c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/cat/RestCatDatafeedsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/cat/RestCatDatafeedsAction.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.Table; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.Scope; @@ -51,6 +52,7 @@ protected RestChannelConsumer doCatRequest(RestRequest restRequest, NodeClient c if (Strings.isNullOrEmpty(datafeedId)) { datafeedId = GetDatafeedsStatsAction.ALL; } + @UpdateForV9(owner = UpdateForV9.Owner.MACHINE_LEARNING) // v7 REST API no longer exists: eliminate ref to RestApiVersion.V_7 Request request = new Request(datafeedId); checkAndSetDeprecatedParam( DEPRECATED_ALLOW_NO_DATAFEEDS_PARAM, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/cat/RestCatJobsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/cat/RestCatJobsAction.java index cb02990da74c9..b27819bceee44 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/cat/RestCatJobsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/cat/RestCatJobsAction.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.Scope; @@ -56,6 +57,7 @@ protected RestChannelConsumer doCatRequest(RestRequest restRequest, NodeClient c if (Strings.isNullOrEmpty(jobId)) { jobId = Metadata.ALL; } + @UpdateForV9(owner = UpdateForV9.Owner.MACHINE_LEARNING) // v7 REST API no longer exists: eliminate ref to RestApiVersion.V_7 Request request = new Request(jobId); checkAndSetDeprecatedParam( DEPRECATED_ALLOW_NO_JOBS_PARAM, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestGetDatafeedStatsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestGetDatafeedStatsAction.java index eb930edffe055..8c85c055fca3b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestGetDatafeedStatsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestGetDatafeedStatsAction.java @@ -9,6 +9,7 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; import org.elasticsearch.core.RestApiVersion; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -47,6 +48,7 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient if (Strings.isNullOrEmpty(datafeedId)) { datafeedId = GetDatafeedsStatsAction.ALL; } + @UpdateForV9(owner = UpdateForV9.Owner.MACHINE_LEARNING) // v7 REST API no longer exists: eliminate ref to RestApiVersion.V_7 Request request = new Request(datafeedId); checkAndSetDeprecatedParam( DEPRECATED_ALLOW_NO_DATAFEEDS_PARAM, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestGetDatafeedsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestGetDatafeedsAction.java index 6b3a857cdbb9b..fd0681f68a3a5 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestGetDatafeedsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestGetDatafeedsAction.java @@ -8,6 +8,7 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.core.RestApiVersion; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -50,6 +51,7 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient if (datafeedId == null) { datafeedId = GetDatafeedsAction.ALL; } + @UpdateForV9(owner = UpdateForV9.Owner.MACHINE_LEARNING) // v7 REST API no longer exists: eliminate ref to RestApiVersion.V_7 Request request = new Request(datafeedId); checkAndSetDeprecatedParam( DEPRECATED_ALLOW_NO_DATAFEEDS_PARAM, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestStopDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestStopDatafeedAction.java index bdbdc18a0d9cb..8235e2785cc37 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestStopDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestStopDatafeedAction.java @@ -9,6 +9,7 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; @@ -48,6 +49,7 @@ public String getName() { @Override protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { String datafeedId = restRequest.param(DatafeedConfig.ID.getPreferredName()); + @UpdateForV9(owner = UpdateForV9.Owner.MACHINE_LEARNING) // v7 REST API no longer exists: eliminate ref to RestApiVersion.V_7 Request request; if (restRequest.hasContentOrSourceParam()) { XContentParser parser = restRequest.contentOrSourceParamParser(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestCloseJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestCloseJobAction.java index 0986a6a2400db..f98a2f5a933ae 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestCloseJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestCloseJobAction.java @@ -9,6 +9,7 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -42,6 +43,7 @@ public String getName() { @Override protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + @UpdateForV9(owner = UpdateForV9.Owner.MACHINE_LEARNING) // v7 REST API no longer exists: eliminate ref to RestApiVersion.V_7 Request request; if (restRequest.hasContentOrSourceParam()) { request = Request.parseRequest(restRequest.param(Job.ID.getPreferredName()), restRequest.contentParser()); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestGetJobStatsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestGetJobStatsAction.java index 1ecc0ff0cefa3..2899faabdc40f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestGetJobStatsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestGetJobStatsAction.java @@ -10,6 +10,7 @@ import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.common.Strings; import org.elasticsearch.core.RestApiVersion; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -51,6 +52,7 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient if (Strings.isNullOrEmpty(jobId)) { jobId = Metadata.ALL; } + @UpdateForV9(owner = UpdateForV9.Owner.MACHINE_LEARNING) // v7 REST API no longer exists: eliminate ref to RestApiVersion.V_7 Request request = new Request(jobId); checkAndSetDeprecatedParam( DEPRECATED_ALLOW_NO_JOBS_PARAM, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestGetJobsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestGetJobsAction.java index e6f4325024c3c..ae8d234d1d8bd 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestGetJobsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestGetJobsAction.java @@ -10,6 +10,7 @@ import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.common.Strings; import org.elasticsearch.core.RestApiVersion; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -51,6 +52,7 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient if (Strings.isNullOrEmpty(jobId)) { jobId = Metadata.ALL; } + @UpdateForV9(owner = UpdateForV9.Owner.MACHINE_LEARNING) // v7 REST API no longer exists: eliminate ref to RestApiVersion.V_7 Request request = new Request(jobId); checkAndSetDeprecatedParam( DEPRECATED_ALLOW_NO_JOBS_PARAM, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetOverallBucketsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetOverallBucketsAction.java index e73e5b32d3a1c..2700e01cb9f6b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetOverallBucketsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetOverallBucketsAction.java @@ -8,6 +8,7 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.core.RestApiVersion; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -47,6 +48,7 @@ public String getName() { @Override protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { String jobId = restRequest.param(Job.ID.getPreferredName()); + @UpdateForV9(owner = UpdateForV9.Owner.MACHINE_LEARNING) // v7 REST API no longer exists: eliminate ref to RestApiVersion.V_7 final Request request; if (restRequest.hasContentOrSourceParam()) { XContentParser parser = restRequest.contentOrSourceParamParser(); From 39949c1454564974f169bf6f89e4c201bff06441 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20Zolt=C3=A1n=20Szab=C3=B3?= Date: Fri, 18 Oct 2024 13:19:24 +0200 Subject: [PATCH 210/449] [DOCS] Modifies inference landscape image. (#115090) --- .../inference/images/inference-landscape.jpg | Bin 0 -> 84460 bytes .../inference/images/inference-landscape.png | Bin 96237 -> 0 bytes .../reference/inference/inference-apis.asciidoc | 2 +- 3 files changed, 1 insertion(+), 1 deletion(-) create mode 100644 docs/reference/inference/images/inference-landscape.jpg delete mode 100644 docs/reference/inference/images/inference-landscape.png diff --git a/docs/reference/inference/images/inference-landscape.jpg b/docs/reference/inference/images/inference-landscape.jpg new file mode 100644 index 0000000000000000000000000000000000000000..d66d67763cab532265a93c77b64050c7a119a7bc GIT binary patch literal 84460 zcmeFZ2UwHMwl5yKf=H7hNR!?b=`8}%1dK@U0wNuxccLgDT|huVh*G6SdQYSmL8MD3 zq4$IuAcPygeRgx7=iYt)?VSI8z9B>2Cz-tSt~G1c`mHsy=HlnY0)YCyx|TYCfB*o{ z#Qy;<5C9*w0H>z_fUYh;5C8yN0uU2014!^!_$B})0+zpBs}t}8i2nGT5CDjB0ucX4 zn+N#!Uj=`+KYsI$-rGGSp;=W_+6@HnVfsyGtGY>Bxzkr~Gq?ELbteomSHFXV5 zt^4{0hDOFFre=0e?4LR~Iyrm$`1<(=1O~l)6%iTrIyxpfl^63{e#1!W6a6vFS-Z- zM1LU*|NlRwiv~{@Au%x#G1)J=2nhXu5l%x)!g-sNR#}hC#*^-*_=`)|RFXbbv|i@A z^9V(6>or8qz%7B`LH{D{56b>M!ovSAQT8{&{!Z64KovmvMBSV_3K0Qb zn22ZqK*0HQb)_`_6On2MwnL9I9`I@rHbN^O+zrKE8c(&z+fj+BNI6f2?~meC61P$> z0NI6m^kr(IPmKk|!Ij3iAV`nie|%l@Irn|kpP#Dy$%ns)%b$GslMjFDhd(tU?oW;Q zf5$?mT1(5lU&n# zbSo!@AMj`We}xA`7ejxt{=Y`Nh%SczWc_~?>qq`%{eKnfNB?csmz#IaNuuB{Bnrnv z3_q*dI5)H=!5lnxat@y=g*PO|%+nnVMzXP+pX5k4W^IMf>x|?ATN>-#W5^!nmaw(K z0)JpHmtZmtt#4pWQzdonyWC2{4%0qFy-E6nfQ-e5KGkY}Q>L+b^6CTsT;Ke)saf?i zo^g;`Wz&!E-nFmizzG)sn&v00IA)Bd^5cFZ5e?Ino~yaJd3W78PH!-Ob74=Mf({l3 ztjv9d^GIyoB3%6QT$QxIbea{$D@hicJ|0mI}2Z`7n+fUVwqVN0OItdY^Yx# zBbF0LpU8(SucfM~fdNz5QLnIu7l53ya`1vA9AjKQWCr%I1*bu9Nq zj>!B15D~O}?H`|sB=P12;1wSddQR`=eE}dSy8yH$VpPHeF91VE6F?O6JXUDJG50K( z4qJTzKpBeR?h5~f?!P_twcm-dYEomL{s~zwya0q3f%gdPdf>l3cJ6Ov9ZvD%<~l9_ zB0wy?_66V#1)JLM&;75w)i^rhJ8`-IEEHQ~pPTH7{qs}v{|i|yelfth3g9=;1%Twy z1z?HrzvXan{_)&jmO>2i^1*^1;-YjA<}4p^0B3Ct(|^+#|Bkcb=T^7bVT^#nPIYzB zgG?)*B3zQP-S?5j!VWlTPA{6PP22Zz9D}8XTFUpIEwc!&XGA}qOblgTMGrW+r>0wtuAJ)Gw0XoroUh=l!7=?t4N3+}8NzVW#6l;~f$=H4Y~pFPi-Bu~^BR9n z%o3BH8YidnQyHRU&|Q84t!`deAi8dw3eiG=pIfi2TaUakRaUb#+tpZ!hwP^IXC(=X zvYmmeabX+*>5EsJLKmP>b#4U##-KOT8;wiWo9;{OVHEn48tR@Hzw-8WHfq%I%j%^O z1w{we())m?dhfZpFj=P+ihK0zJ2Js{7iJ{OB1KylJw#np~&^;vk6lglL#HsEQCSIog; z=9#3cBKbor1LOtZdXe=0lc1`9V?GGN|ND#TpPJ*pl_KY|CJ(WxRx@^`rm z!umXgng?Va#iE)#0)4;?(^o?ZDD7ofabe0z;Vp78@1O4E)ZUB`fXaDV$6Yf!xpnZ= zKOF1`zUnenhrKy>s|wNLx#vPiX#=-gAUY6{QhKhj8{{G{? zIgdHn0V1|8vF~a5-`HpU6#RV1jS}jrxd2QtBKV@*;0)rsEgK|(8u7-zg4h3t1*;*G9u_XU7X11kw`8$4Mo#~Y)9 z#S1`m?zz_B1z_!JA&#(pN~7VRg?|5E+u)et2nQ^B0*uo4ya03z;qq2500(6Hcq?qP z%{6ms6?2mNdz%XYr8TMw7qAJ(2-IHykd7CCM|iV+N>qdg6km>#c0$F}LWcg6R*_JQ z=>?BAFLoON!0(@1LbTz)r8P{d|fh+ z3%EQIwMTys(lnfdJHuE|H5E;0wnCS} zd!+sr_lvqelcivcV`p)rkV_(yX>+(8EYb6EKd*R<%T&{S_6fZ4W<8_%ck) z+gc3ILhkAPuO%hO`tPD5_UEwwIY0j!C{hI3wjnA_KiYnDJ9I~4J(sW*>*(d14fZY4&G|ljAI_^sa*s7 zo0Z^V?BDXNi|guY8+|{VMjXUawp4paRLZvHaLn|BJ`$E0e+SlFt?+b1%_@l4aB9yp zI7?+HsDb*6?nI`-UNDkPmW=j_PrS2RTNtTRNcO1Qz}??n(#f~?{uXkh5(`4Rt#&B} zvVIKYu0d^A!~LeZ?$iyC2Wbfqr>wb53r40FPCLoDmA`))E+b;2X{WXz_z^&GVt9HM zH^@{$5BDp3Ae2UNVq0rn5^C;I6i&{c(~OR|rgs+~1ink+urZZd8=r6G!k8Bu%o6Bs z&++6n;?eet1JY_C_6}(?`?Qyz=UlYmpvYl|M$7!;yPtJlI`+DjtCN`lPM}}mj7l;A zQB5y|eBk^L{N?xN^gk&GC_WK{~niN&X(Q7+~Ma(-- z8NDZ**)UE!dIO(L4m6Sgs&o+D_q7x@>kjb{>8#GXw6bsf%`1q+bzzn46ZCsS$o(?Z zAVMsn**DiIed733ZJ<9WDuyRbNZ#i)$HVEsz-+}l(p4hKF2|!c0f`Hs$Yxnj@!tGz zXm?h{N8?(=K-<7oQvO(KlE+CN1(0%P=Zj zi5JuNf6X0x`%##Ek9}{KcV|FY6Ed&{52i@%)44yTg-}gRGhNEcP*$R)z#lV4*Vz}SO|Ez zX(7E$6pHkSgi!k`BV_VZa16MVt5L4{JdekbNg}TbZ*WLeF%0@8B!Kd42t3ek5n?Vd}$#u8=%Gtoicc1x?12yGsYt^>M3HFltOqPRR94 z#Ka3<_Dh{^-UwZ3B+17HhhsPbAI*vPZwX6s-c7A|9))_(}rgR(A#U59AuaUJb zN}>?;uHN5-q#;Jt>!?YS)JF7{#=vlb-^(nB|D1E9Rd%@cS&vBS+b>S}Cas2Dn!*}H zTzqq-<`&adchjYk^H4p~+(}=2Y&MvlsfBqtvN5E0SK$UT*gJZjwrEMX`MJcpq z`_E~M;r~eZAC-y@-Ju^Mg;s=gB~s;nt_|46s=6`GHjd0}i_^0{dG+S&X_(|uK^EAd z_^0_sP)Lbl&8U`yIA>p@42h=FD`wQUGZwc8U5gJ|lto%bL03KpiOUGKqe&~__AO@q zm|*XP&111Z1m&Y$jW>1rH$X5h+)g(+aJFoJ3TWA9Bx>F9anyYGvUM6B*C*HIC9lO4 zY;A=K;8pr+WUI9#?uV7gsJ*Tje*Ts-Q`&XvgpDa^tX2�^#XgJLzY!n**T&)^r%T zg@ATsT_TL5_pB!eW=RCH>oJiyBvgTH^>R`kLcc6*akB=!k1z}vN)!hKc|Jf#=*!vUn5Y_Jk<1^ zo*MnTKeX&GP3t*NjRi@EH#EgPVb?Iy z`cbB6rP6HXnuNYV@m?bHZyv%>QBoQ^lXtX1%=?gNDY4QJ6->U}qN1wmh{s}s+Hm<{ zk?5nn;lZumBPSm;W1n9w=v!|XrTw7C9d;*=JBPGqro$3#>iMB=P7!k5$rY1qL5>Y(MVZ*%10}JF<0XJ-Oe`%vOTbwv;|&?2^kf5-5{^+J}S$Yp00-*Xrd z{n!*oo4=6szbIYv zTw`Z=3IBtaGP_o-XEIJkwbpc!)ALj=O6+c;i)yuu%o2aU+;s7 zJcZM9NS@j$mhYlDiiNPP?QWRMci7Dk1JnheeV2@iSrfe>6(8*IBz!6Wwe2sb`le%> zQ+mhau?wQM(ozf|{mN&Tdh?C*9C0S{8hlE8eHZw%th$!A+6c=1&aYNGo5F#`Z{M=8 zi;`lYX7H<9m~|xD+|r5JI=VVDbFk{pH#awqhMEL#HZAgSQqEeG_-IRRZMZLMR$}9h zWDE1~6w9}!Ge?ekKqq&~P44$yQWYFh-czL(&T^r7T9UJ)w>0?p_cTA3y=V&8BzpnL z4-rAu<7nnWZ0o8qsXS0DM+HW32rHV>^R1vlymz%)O_NlQDbYiAdbr+F=pLjCNU21G z0e7kFAiK^wgW{aibv7kVpEw1Lmk9LOLJ2>vCPyYZvi&&cse)CGx{dFG>SI>O&MMoG z_gsU-lPS&0gWKllP`lyPj?Ib)^GnsUa->O%by4#4`L?@p$$_@IK9WVnc;NN?Z?zrG z7v%$CaI7;UZ2;6)HK`;Nem<~Qd=KI{9bcx;7p%&ce3V(8R#Y}}*L%KdH`bR)-vFb9 z%#4JXqH+mb6be_Hhx1qFEVA>-dT<{cGER+PeCYT#3=j!#-+e7`R$mAQU){JOGk-NF zPqq01;3uo>d}iB>yWOTbJ<=~AB#2&daxKWz*B&c(@p+^Wh!lpxQWgB9aBW#B z+8VO*Q>RQ)5$1S*S>MCg(l}YBuE^BA#lNQB=u^g+0J$ zd(=4BiH!z&7PPLd6-wJs<{v#pDS`<1k3oGqw z6D$T41BgD}AUETO-opFU4+;J^Dv_yS*95_fEp;{TjP92q0?13cu&TT%ORi`B9r(2) z*mL4A-{B#%A*j*YhIOsNS$P6vMF6-Pd^z9E8sU4;7@5ivn9j!1;?3;}=@7|N$hG^p zJ_XCfc%bjJL+GkiOB^4d2Z&CjYxU#n7*Y}{dRjCp)->)fmb78LNpn%~4n3?BDoA=q z$1@?xSZQ zQTC|Yd_+VtUfeKy_=stTBQ=ofM$C21)0f7Zd+TivQ{qxHkg3-s}Xo+}*vWl%#g7hmVAg zY~!a_ugc_9J0Vp0hV%2#TO^h_P3XWhp=-NVZsoC0nv;#R0=b_iE8}s&rGJDke}yss zv-eB9wYBw3_0c?R37_EapbBWJHV+b)?W4R(X7`VUKE_jlXAIu^dX@ne zVSJRLAqq3rDRjmPaAi#XBGs#Olp};*l*JS^+kUAmIljv+cp@7U-|dPt<2x4u$39N`2#hYT~qb>&HVVNnodtx~F+BPtIHa zWHU*>GKb(-8Ga5EZ}rr|{9Lc;sR90-3ceWsx7=Q6D$vpT0K$e98ljraG#KSmdS6>- z#Qq&t0>z^=C;ox7eAI47>23O`*J`E7exN$_hYmkzYYsn9Z-3CYv~cFxSRWAU%$0** z{;8*7rAEJY{co=6FVMsZh+|WRW-2L$Jj7I>3N=TYZ4iT3*O{S_2PKXL^%)y`n&tk@ zvDYb>i--x^Tl{e(XtBh+;hh?>SSe;;)DNbC5;Lp!c0TsKqQh?F8aDduRORd5NiXz)B}g}5A=Q0Bz{l@vo7zqNwJ6~2Q)4nT5a}0<`qr9J7$@d8SN`^B z=$p&6p31a__c!(S*)CDR$+A9X%Eq)=6CvvEXK{4h`c|KO@!(dP#&m9O+I$d5qbB?rznfsEgc^j< zk{J7uqH9U*jAQ!|qI~1oW}|OGPLkA;`R7NE%QlE!#`vzh%yDLZHndxPW*$pIpjQ1R zSA`ID>y;bR_KQy4&=Myzlei84w4-qSTA-3)Y~X=q$CGoR0i|mpy4LIg1PUHyJhg8l zj~~|9tRU&l<%`_LVqr?CIJzCj#+LAnBSktK%{kebup@Y~7BczOjUihlkZChf%zX|v zcJ&h?k#Yk$>uV|4hcdI`?yiu&K%uNg6O4`=RRf+ljWn3c57<^>8-Lm@D51u5b zU46=$w{Y&F{&zt03)~>W02hGV5M4}B0d&^ayP)n8YB}FRjBL!?;61c-6fUb=SC^y{dZ`cB7zR zKKS9NFH72CZ$nZb-rTzE1Yuq-ggEF9`R-on-P8SqiPEnM7&JUbdt#`u#%ax>>n|p=cUV&PJn2j>zB?2CM0jO4jFP;Z~1)`z?{N$lm zE&xw^i)2Rg1#gZf<;8XRVLNV0X_ST9U#-X+3_5i+t{n_y`Zk^Sc^EoIFas1!n5(<9p%a6uy3;mIu0f1HT_aYH5+{*oCb2#HM`D@F4t@ie**brD@3?)_ zV>TAl_FVRq?s&R`NJ^9D_@D=i$`f0Y+O{TtS}nfogOYyu#jcU+dZn?VSyiV?GW&c< zwmPTowj|sc*_o8p{>ign-`u~OV3edHYx9Oay0zzq?Ykb~0}(pVNJ64b;neicH_Ej! z*EBWV8}3SdI9|+nnuItfYey@{F=ROvYH-eMx{#|don}3EK!1iK_)!y+5xYkjtkv}`$Dw>2&3WwZT)$JKXbbAz4f{zu1*k9xnD=jZ3nMCl(tKz_XO z;qkB=r&Q8tHpL#=1mG|da0v+Yax-a8K}H=_z8 z4k_ds*mfl76sdh9|3suE!1MRPs(72n82bLLOBoAa7B$?dSA-N+30;RoD88 z%xf914i(o#j6%!GxZfcqvY*7`mo_Vz!olQr5^H(HG)X-Y)&Cf=CqTffrFX(uS#?6U z!Z~kQ~|cYY&ZXoM#GFQxyqBKpXUIJlquLeD?ifT zwUCSvE@`|Mh6f*-#lc^4VuTh365qj{%}?C1Tsa|1wO*B&Z|2n_%Yx6;@^n`pOdkFG zn6Qwsw!OVjU6(LE$sl8+mLQ=QVW9UMumvc;v$!CJHb&g=x&U-HG>=avo=a+*^jp6l z@RvL_pv_NSDAI5e7&vX{V^5W^5sSuS9E+QO^9BFGFH{OZvv13yrrX)L$FO3dSF04q zBgB=#JO!DO9n?)ZJ$G&r-mZBexvj*6mWo;wA?)sCV`GfBDs(R%mNU;bY zIgT1Dfr^CsdS#2J=#Pr}>Rz4E^;~-EvPk~{)VAaFwRcI7E~e8URL4+b;&k+!IR_4` z0l$Vb`*^&_zLnv0NFidZHF1zBLSZ~6GEUJHtG!wBL@C6M%WL&xkr3WLe8G1UUApw3DobAC2fqs{ya|!@Bm=t&EI2zIcq`06MCgd z+aA;wodQ~qq-_#;r~OQ*-X|)Ogry8!_7+O`aFUAmKT3r0aa6!RR>79pvX;ZrabvyG zFM})^H*fV-%Cr&((HQX;WIot@CJUeRc$6VV>>NhY3~!B_TIqhcbZJ>lL03z zC*Gp=rG?lKZ!WS=5p~XSz4fR=)s)q~w9CGm)S7{KzSWx0794Zfh=xP}YT;;J)B1X) zJELP!=XWf#t=DveCBw?H^RL`{hQ`=jQVaPAJ#K;D)P& z)8#%h6BCtVM!KG772QnV=ne>LTvmY7Gs-K}kI z49E=@I?pFeYn`>Epjm1)_<0p>*BKN34awMf8y~>ZI?v@n8&sM*n;rNiBD`rs<6WPQ z>b{8v&F_zexy@S%T72-QOBY>Tv!_Qn#ERj>0$z5^Gu4QYbsd}Wq(6^up0a#(s|)>& z?&Q0hQyvvjCv{({rfRc``aWSXl>WeSSudVR$5VcibY_N8=D^T1V*;3FGfp)`%sRLtM|1sx*OWdzfeI?5ULk-!KMA6EYkqhm3!3*uXJnIwH>d|`&r zYr^WJOQY@%4>z7Ex**{ZKpNPcO|$OskO%f?Qm6wm`1OQoU1No2b&0uoo-J?nTh^Zu zwTIuAiO0a?y~soY>>UgmIV;p5ofudckZzIyKuDE!cuoVSl^o&b(NsWkid@U=F2$&#fxoe8H|y_l7YO{@odPw zox^vO(_IRAmwQ;XI-f~FXqEV|nm=&VPN)8_S(M>uwYfT*606Csa$ZZ3r~1OqlYsgt zucIrW#UHx2sbFi+Y#8AtOgIC~9KhbiT(VhbMKhk(M|uB=K}N9C_F^_rLX2d>lQ?nR zT4jvS*K4G)9={;m^^dMGJrcr_X33{M90In5ufyoZ+$4#)A|0E*CKrGk`zEct)l1O(^z*~(*4SYBE!5!yo$nEc5AdN$tx0Hx8@V~V<&>prs1xOOlY0zO zBJIvbIT6tbbA?4z`*s{+=1NCmv#8rUZMKo06_yRhP{W)?6w}l@ z1ziP8_83a7ph8bc-=`t@sKw4Nn2|OqxxPfvMTZn$nzJDbwY{EKe(h9!O&NQ@*JdIU ztw0vM*Q_L!(hB2gATy381y@0zEN%m+P8>MT=Auwg!`gkJP5y7{pqg}0d{HJ1bEL~N%{f+l>r_UFz_85=%)3Wj{J9n=|7 zBHUznxaoHa%Lm3%1n??S=HDFD=aq_?o6SGT;GYi+E9T-hR8-X&kpxcyyBTmCbs4)| zr+#H5^D(q*9u0|l+C6nWc;6_^+CgM_F;PtE@KodKi%;enF7&W0Ct8yV7eh^&dso_t zy9lwb67uXMZx2i496w?a@;d%XQLvYCCxVGkHSq(%*V$~r&^K9Ji6blz*eO-9^0#W> zM+Q;aX9E59*xco`h}i)fE+3eE?L^0okT^}EsbV6ykc~DQ^(BrtLyjJ`Xpp^RF0|B{e<_s#GTK9W6-Fhf1CG`=<3zy~|QaW+Lf* z`dx0NC0GR|9i?;~YmK=-cePfE5~<5*9AzxH({(Vd>?WusiuBiN>7AX(IFngt)(T%4 zoiu;vVVT#E*r%H!f4-9B86`6FVWq%Lsdc5UA=uH({NXRpR)qQ)5wT2PIDm}lyMUmJ zS7^ozHu?)FQUDcmx~rCa8?C2cyIBzEegSZ!V>NIy8D&3IpZ9d7Z1^5nUWHWrkTKXH z7M*4#Ju0Cnb^heFlfT*YO#W=mr&`cWb1I?b6KlTZB{(Tu2Z>6I;A}~J6s110*z_vp z#<~k_Peg*&6-JI^t-+$ALw3plf8)lOGsEGnT~Q9amnPCd)3=V>6i*gZOiSbYxo(Hh zb*3WKMq6uEVa`KIaOqSpgw1sun#4666xaM1z13xuJQw1yt-668uPy*z62HPUmL@MPzCtVKmh9XkGe{tY?U9&66Brvt9|`Qvl7E0Mdp#!X24h(t<|*x;(@NHU zZyfkg-9+>*Ux98Y#0>$VoHW;UaL6}R_V5aq%}JP()Y4NYY>XgyegoibMzelIV-ed^ z_DQQlg+Q?D6Ec>oM($Z9?Fzu?R<8nmgxW?0&*<)`LYjPabBv!Fv z99|;^uJaY6nWf0Ok94G!$(st7h_=zIu)AkLpuaWSd*r?jHfIOo1{%Mo+M&4~q1@6^ zKYkNCx*-iM!?3re1pT%Bb{C2ni=GNK<}`or8bggoT5AG%)0ISt zmPM}ln_zrt zXSyTTpAa7ln!AUVv?);J&W3oRTi|gX)P>;+Vm{V7@ln`UyU4{syu5znPn&S_Yaz_(dS<bilf60YnBwc=IRw$ex4Z* zqiP*AF2|-S=+_8VJWJWi)YZn#CW8ie8 zFzQCd5_6CN+|}S%qOpQltHrk(wvGD;mKo2COcstw7KiF3+u*S%$6uG**@XhQw(amR z66p$DTs^zfHU~QQIb$84$J=N0ZkzUHNfrpfhWKU%LlW|5h#K97@V1ZdW|6`wqfwne ze3QM329Tdc6FSaR>iV%U`;;Xhx1r$@-+R4#$*)^4u|EIiM*Jc>t3*odN`O11-eJ8& z3IE}BakFi=MY`mm6b{Qt#gmmPpZ=^n`2)kUBr}Zy!^QK zw6!|7&@40>((mHIsgQhDwot}YM2Y%K+#~QMSrnQ58}y9w*o3k?Sc(?(t3Mlb7%R*uWvi?={^e3!{GYOx#r=` z_S=@&8z@kum~UpNhx%xkuxUHlk#|2w6ez23)`_Jz%JD}!`Q%xiEnKt4l0|dE4c1v^ zYChlkX3nr*Tkwg0JK-J8M$v?{@gpZ2*V-d5;ac>ihZ+L`|1WF6|u ztaS2*nb$eJo?Tv)%#@A{k@j@D3Q#D+A<_{Xp_OE(F#}5Vg_q*hx3TT*4FVe5r4_gU~WZH;YlG9HKl@!zzCtlUo!b zGhVI2i!yjO9CFK@O`_RyMLtzLq(tC#;~V&Gd63yTbJdRnOW*@JwTOtCmS|!Y$4~cO zMQ*+&avBUtaB4J;I$$MdtQ(TpG{3Yg>r)$X^D21i5^2zJ?j*$|`-d=^mkZ6-j7o&? z_RAb@_f34I#=^dD_4oS}-I|b<*>4^=4?G)NhDm}Vld()_`_-5{ReW+A?fN6nJMd{Q zNyDj3??etWFH))1LK*0|Hv8oOW1R-Z4EJ~&muKa}mu+feWQo2Anuveqc%!M*G31S4 zC%2l@`aI`26>_W(;Lb2N$<_cYx!dM_pq4v*o_@zjE@ zbuzT)KvKUAM}(ShhrDJd5W{l0k4r0YkJPYOJ~i}zI!fqd?PLTI_Lu9ICYt7jt!3UH z-eE&(ZEY_Fe||+*)lq8y%7LbVYvlo`&r4Qfv0%ObsIsP%XRA`=U01_PuPr6h?%vds z2Q!}C_n{7D-CKw7!-f+UWC13Ws@>AE8PuXA%+8YJHyoI=Ycr-k18F+)sCq1h+UXMd ziI3nhHOm)ZHPFVf?vhkgn|sqvQsIm3jhh3Moeg!b>(404N!Qz{no{5~$Me*juqo=(m6n8V{1*NceF7uI*3t>{r*6~6R4(7Z{j8P!Bl z^Voo8-)@~d0tyC#uBlq7So)%ZnuS>klZ0ViMUU!f?bPaYCHwj`9#1-ltlq3bg(YLq zuVbEWkpalfW@i45>YLkf%u<3?fGKg%yyp(~w zT(I(!mNRZC4Ik1LF<*N%`&vPk0;`I_$E3%>ao? zCjI@ZvHY;rD*UdievZFK`u~>TPZaY5OMn<4dW7aJvZrf?2{)RzXg&21mrk-VxmB!A zf1jV&J7a<8JX?uPYavwV>+M*Ks}D<&m_+95o%bHX_DLU-u6%VjVCkV4@nB2cdHZNt zWmzL$4JFjd)tYWa)|jH#Tm^&IjqVgSGzGfnH`Jub=Gt*N03_~9Jtrj<3P!TG?OqMh z(!G87UUXYb!X>idOUsvc(=*e(H<{*Lo(K5K*vz%C#mGMN%58InJ6J~d}@kpg?neHXsfg1e7$QKF^NqcgG!qAoJ=J;WE*s;*;N&VEEQb4@ zdC;0l<$ktYM>m@Vj6&HqYM)t+L`2Hp ztyj#Yi9R(4H(p(=zxtw~GkBR^A+`elT8%}uxAow}o~5jIVQ}N;>nF9-v3kqsy|As| z{efE{!l;kcDBrYRER^uwgQT;O)IE`b;=m|&;Lf0;>`DuBjGtp z;|y&ftw7Fd=$o^+jGN1U<4bx{cc-J7?!(eE&sJ!ThY};kXun$uPOamuRy6IjoiDD& zI~}<7Q;91>*rtt<$NR@bj-Hk>CqH+9aRkr-N`R5Js2P_CJhT=9rLI(rEW?tv+^lp~ zzeT-xk@8%Nuv!d8jA6im7*4{EWE5Pk>l#f7g@2L%@^HB8UEBTEyTWo4l|veWKV|h6 z*(5M@3s9DMQ|G#5XJ+5S)ORDWv4UC2(kj!$6FVXPlL>{m1K!%LFd7V(PI{ZTQ*EL% z?6P^pux<2F7sM(0)0@V{RpHVHAoPui0zFVFRqBoOsAl?47(0`R!&wn~Ml0^7gdmzPy$9`Ie$OK?jtlY1HiukNM#u}Z;FHld7&3G#PpgLsyVt0h zk7NsC<`}d6O%~&|>eX zJTW+3NI2YD+Dl-5G$4q*7P@O1`ueoN95<_bKxh2pbQe?^tNW#P8!eK#&z{2 zANP+jm{l`O@FF}sJ$W8OI^{u&ky9Dg%1a-ZjgnPT28JnssXY2%qd6^G>92*BzZ(qe z=ZnSAVyq#)Jj3?~mp_SzJM`f9ZI>)0((@+_KGb&UM4HRPXDrlb$CA(kLHzg(M2?p| zT%XEwyMSa$ECE~9u$w=&^BWXmKTq^!fsS7mTn+v1@-6e|@sRL@LsW=9l2SxtmT>Jm zBMxUgs&Tu=lOjWsPwVJy;6$IE(>YrFS+ZQQP&_`_&KK?A5o5(c^kwfca5xd;;K6{; zxz|UNouB;5^dG^|0@vh#7!qsTT@lf4do@ou>ie7s{&aytB-Poz2ksiUT|S@=7Xx_> z+#P^p@QMDH*$-IRv4qfHCkLe4Rp!*s|9(>-BC8pH2!I->gZ+4A9?OrnjPPiFjzKa60Hz{yWu@DINEp*_y&fz;snOfV|Oq&%yO>1~HXBHWM|4?@v3wsdSB{V9OPSY8n!|cLWa9)VT zW|hNR>3tbjW-`{FO6tO2e;@YAl2#dUI|yZ)lGhf}?0DEtJ@q>F2Z5P{o;8VVpO<%L zQ*b8U34cU@k8F^=4l%~uwppCKiRO*)@&PfJh@G+(Qn7qpzqSyH8GH~(k1&E?^97U5 zX&~m~GAuGk)=ud39$G(W;EMmoK`a_dg|PNAaxckUH`~r!KOM-Et{pti$s&#c6V1=T z;DShqgNlrB7o;W+#u)s79g)@u_R}gKzmqDzkw`J(8!u$?%EHO`jb)d*s_IQR*;+uE zU=KdU)_ynQSlSJoqY;WY$>fd~U0;YxnJIXtYX{syl&bix18K0z_Uy;Eufa>=CMqkg zvsyYKKPVKEowW8{06g*h_q+CQOc!J9|EWU!r-u3eO8w(gNlKl}6nGa6o5P21iJLjm zlLF}m9v9-}RL5^)9xtcCzvR@q_{#YA_h(`uVzo`?+wSta)H?MX&gme9#|f`^?~>*t z#pTRKJ7rH)sP;4CVGGOYcD>~={tcX1b4oj$^q@%cY`*?9mN3J&Lc!gMimLjE^>m^! zgdab37q0W5d7FHuR%1Oz&hHpxW_@b7eV1w`&Sx=Yt|j>X&_NmbfW$fTcNU>1XBBtD zCE1@7L4t;%ERslwle&p;Bcw79C*mP{0f;ekIv-8m4d%6Qnl{42^pF=He*q`s_z-i9 zh6@^>_+U7^Gp&hMN-#Ybc)+>Osc8EKd`)-8Z$oI(v3#cKcE@8+nob-0Z<6N{;{%ep zAoXfGt&%&4Z9Zh`Zj>)EDzS5#0fnoK>NBq;+hTn6j7=bHZFYj}#JCFYVa;~&)}->r z9R&Oohjh>iO@yc+5~(m%x+$7yUI=Nf6F+B87EwZ|{L7u&RwRLEA{e>r{W!X@!^EpQ zoAmkLB)CSLl~fj|V90FW6gy)@mmd&w(Ey-sVD9>E(7Spqx1td}3kU2SNDSlhOJvk* zF6E(+&n)cGct{|J`8y;qf};}uYC*w!gm`0^Mc?($P4KAU%I0rY`0r|DTeRCj%@cz9 z4*O@^@sFMS3+|}OsUO5RSpO54!Q+mQ|0(Vm`b7@(H{9{hz5b3n*2?evvT48Jj^F$J z4R`n$U;j@i{=Mhlafh5x+#h)DpK!Pt$qpOonr2bbhx3*T+K!%8da5p6KeYIH~oGqc~;FS z)N)Fvex$Y(@7bKt-Ngrc2rdyn-lGf&UG#`#6TsM&Sz1gF?$SH;VH4wYAi+;#OfUxn_k`yn2t6G*OB8bFVH3bTZ2eK`X7* z_6F?&6k^|H{>Xg{X)g<6^-+?-+(xZ-?^6s?uiwrbBNkb1xMfieUe9^FI!i$D1gJTalco@8sR;M^CBGga#q)6y_%v5jlsPU!kyN?Gy0 zcm;pX&i|uk=YMz2>CMK2hD3&NJw1kTjB?i8!!bO5VjD!FyiXhKX{fO^T|C%Tgl6nu zbXh<9j6VeDdCXRb(3xDwEmX^rS&4~O$;NHgZ@$I%Cmygj5bTkS(2Fr9=~;~3rY%~& zBndtX@~eXi9b7f_Q;^sFfpt2&YjMep1xgt}3EEvw)iG}TI5?xGkmt|t`}%kyFWE5}s4)S+*7|1mi(_a2hI-G*00i3Bx3fJ-blTJ6ZWawCwv)`4 zcm!%4dJJ(_OT-EP0v~AnTH(Ky|3&uy+(E8HOR>=CFww-L^p~w#0pN;^+t+Kp{?z7_ zZ}EQ$tAmC^;Y%nH9NyE*83_F!?0t7!6W_LP6crT#r78$fLFphUptRTk5fSN1i%16n z=|~$D0jZI$2!!4t(gi8fyL3YDy(ZKUlJLggIp<#WHqY;K?z!*1%O8ltOlI$_z4mH* zt?ydYx%NJ(*|J`WRbd?tCMMCh*!}Hb^H2YNAy7|EV>UvIGtF3ZK3g}k#jY0kQ0QS! zootzNkCW^&7g%=;XyL(7LY#W2+JdyyjJ;0mfJpffr;i#~$7)FqO|+3HV=hl-aM0`B z2M44!7YoLKJW)ar5xOWq^3>gVP;xKfE|++v2uNWA6Y!tkDm40!o}*v81EVR`9?dU} zIc{EMr+3C*bL`onOODPft!`HE!=iJxJtAK?xbosI+a%0vqvG!UD6st5{Q{@WPR$JLg4P zKuat(%2~r2iy+AXKEPs?;=C?w_dL*eIfPU(ll{Kdw@+tTqPz!tDbYbcIVR^8qhVxy|QegsnvMN zu3HK?J(qT9;?KG|UeZ40AI4R{7n3rSuYr)p?v8DwLP<1!SKEyEj}k<>k(lywYM{MG ze-GjY0#DIukJP2C6=B#;=~ z8Z6Aaz}wDwnI6R6{P|6?1Ob%C=!Wp4J;t6#_IS0$;l_Of&==4*hkp?I9+9^|$fg1E5(dHCC%)A0mv8>-IFezC(|XH#!>nJVL4?p>zWJ{Mxo{(!s`p6@uH`?0#HGMwZ-`V5 z23f;UxL9(h@CtWeO7nG)eScZ)+qT%|vD)F%!Xj3VPr+P4LEYi!!@Q2}mes0qb^C`F z!-wvt9FxvS@F9i@H!CQP8;IFGpsb6w=2cFyKVg%CrQc|E*Q}mY5FJC*yx2r~YZeq1 zj}LCO6n5OTVbhQHPds=A=9vS3VgeL!RPm|zUo7iR?3_Ml={s|bs%wr(_=&lz46CI! zR^xu1r&H>h+yWv#6-5gg>;X8?0lh5&-JSr^^#VYblad*uMe=fjt=87>L71`ueU%;f zkkzv39z;ko*B1-JnT)<3-M9iuZFhbct_Oy?XOsWFaG6c{&j*Fq*#aiz{>Qc9aVPV9|f70are2mzKk|5yWk87d$e>Exp3$FY}^MR1!0BkImOK;-QN0x93e2iV7`lFP|4XFG89Tj@QUW!H8J9i(2YlPkVE1pUyefVJ96?wxV(sqw(Rd z^wqhgH+@KEY>E7xV%oCiD5Lo{-Pl?BVr-&26%)Wlbwo^Kj=?XX z&k2yr@33pAQ7=^D6+E|;E~urZ`z<2RrR;Hl=rXs9bZ&b$m@f5h;n!pLKYI<^`^WWL z$YuodKPg>lCsVg@AMd?h{#w-fy7t~=84mbJoTq;SO&67!<3sBcw6GJN?f zNp%!w(B?6^z6c`nzCw`bHwx%cz2MpKl`C6xdyp{bRWQVO>AOvh)LJE+_~UT+%Gfpm zKMIV2w!NntpENA9hh)TBg z$MKrJQh6FX?_G`qO^8TfJVc5bXv^9r9f5s6!awl=wA|gSDlmeeaaJB4kJz8^K81gi zCFs@R{fy&hy*W#wbQ|rCf|bGV(A{)On~@bQwSQqwu%g`S22NoQA}MS|IIj5J))gYJ zvf7!?0LGL-yBHt&2XA^TfKF!Ujxp&B*|;cSqo)GH^@k1J06kVQo16u1J~_~A%~<#g z<5OpJkyNI^?L_@%biGR*Wb?$oSh?q1FM;6Ekwv7#pqa;nO8a9=y_Z5M?kI)>Hf<2SdT#qN6e4(`w^zt%h#cQ|m*D;y zOx$>eqU@g^;p-^9ZGJjXL9Q%s+me9-kQS(*#hd#ih&r$2?*hZZ{tVyA6BSRZS^pAPFro*-+igUBD%*b323tPhN#BkhYlxi;6 zgG8GH)&FJ%vpIY-2=z~^mH&6G_@8FfdoTm&o3BJ77yugV{Jz1$f81anUOK{cs%zIT+5z#C0t1yHotqJAU2u0_uk35?|Nl%4 z_3)3o0t9H6CBvAh%~0s|ep~Uo-V0ze3eH3<{a+2Wxa4_f2ZS*zGD%C?)Z%27G>zv& z@=qBXN{=Ev|As&P5BLq;`~BWm|Dcfje-w)Ud;d;0h8na&d4LdBYYYpF zG+-Sl>&HDvVh6ebC>({8??HxS!9v!>#WpYKnu6=aJ;)PDQ4rW5A$y==1+x1*u&k(B+@XH9=%RiVF$vmWnz1*}F$94gPZglj@HxkLrBm?L^$GWaSqFUm}B z#vYCjvB7r~yO|s?*?;IiBSAVk&HiE(w*T(=&nD4&L0)6QCmnKFzFxbws{U7+!dOUx zV5X^i#soD^R*F-{Eq?uWu&Sr)TfK*u2IRSXua0EX-wTO>G2y$mG(gM>1ikA(>q6r= z3u^~2U&r#ee|`{mrYUox+46Mkj-TLiRETaf)^W_ytDNs6PsX5k7iwwHYGC}vDm!@y z?LnCr(~6iVjIC}OSfeA0N%k=vUp{`gC^y$%mWJvxi#+R6eYj*}t7A{?E!9wE^x8F3 zy%eTwh54P2p~@gDfOCG%Qq>^`t~|&ako>>?4Z=tFF`_r1dGQ8?^rh*|1RH$&!TpE+ zLlS%isq^>rM)*yT$Y{V6<~z_<1$fuPF$C)}V(XH+($xeT83PTQU6(Ogf2by&a|7j7 zVR=ijs`>mLMCpqak=7di`0Hn;)8Ecj=~)ZTyh#pGH2QK%seqwlO^%pbejRAP1v>DM zgai^Z&twGg5HFZ8F1DlTOX}T&Jan=my)*^JPVGV1zHQ~LxPhh|Wf-=Z^qK)o;b(zj z`r&6Q=FWp*h?h#}rfwxtEM}Al=>(l6Hqy;y;~{*?k1vId|1caXEH#06bG!n7APM}m zEWu--t$0(BAfiz(fIkWvmSc2wK((POhy)eu31;%k!g-Wr@gCK{1{v5%4}#bsFF@KZ zAaT!7f$)u!IpBz#V0OG$6K)zz4Wt0e1p>>};}M(WVAb>7kqFWO=o$zsJ=+8OamF1# zeHqs*Y3KqqCD?3-Qh;y}HpSZO)|L+ZIOSg{1nXvH2B*ga#~lJ1Br|uBs$it%hbhbc z_-*Xh3h01T2~gkMxLSb$-Pm_7kGG}Lex5Vp$M3T^ZN~Z25}ey#fgjlk7wOp~GQ0vy>tWgas;vfVM z3hYu20&^dIcLC75no6kz0?zc{&g|RtkyDl1iD+21b6u2n1B@}V_dvJPh<5UPipe&< zF3(X=)Dd*569={UyYCb#^;&!le-KM!-m=-&KRE&OCWzZlFwbSa@6)6jv;5Po_#h z$pZ!WuYObzTfI-}Db(_t@_jMZXK>+KpJ5cgl+w_vkq9MXbUC}J`&nd-#^rumQ-c!E zZ&49<2mBc_$Pb$M7dB!gDsRw*{Oz6^0uWaP65AM+IiYFYxlJe5c zos9;W4`YZ`1G>W_!-*oV9jBFyuB#jAKEDoG_gx=7jf3f8pl>Zp1tzqBJh&$PQ0$6~ z#(|&??-QAcC#CK6@}j6Nso zYcwEfmU#0~Z%4R0H|EqwI@`3mEyg^LFkFlO9HgY`%>DAPgF%#{tWf6MEh-_oku3-7 zj~*HdDd}as;p-h-x0+^Y!M)dY^6$yx|3^PdpA`!tE!cwjwx$ zIzVbTuY7W;D2sjYx!~IQ#zy*8C07;9Lt?VT#fh2T2~uSa2LEVS$CadNd3C3~uG$hn z3;F9a2bzzI@l070SUhWcQ0j@(&grJhDUxD#!})V*$)$ty2Kj1CIUCyL*D%DCO6cJ^ z%uu~`hRFzLad$TFi=lJlp@lL-Qi8qZ?{FblcvXpKgm3BNaRj}IlJo=%la^{O>6=kc zlIY1qD=S{_z=~RAsDAbUVJf~jCyK(OM%~j?VfdcS0H4$gt^%W&RvyV^be@`K`*xeZ zyI{xWu=sdExu0`H!Yxd+XV%bdhdwJd<8`(tH+DWTUTdhkj!G}J#rjp@eHUY-F!jE6 zJU27uRuA?yCg(?fk-J#JrX|NQ%=Y}u^{>!-g#LJc|F|$ zVUgYR>e+5qwpi+F*Z}gtw72?AM$g)at(?Oz^J=Fo_(`zY;C270)8#F@3k_NpQR+7@oXSQM0a7G;MJ&d=nJ7evHj^LDc4Sb-GDY?j`OA}tz&dV=y_LIF} z@aawRLTlXv*xA|&%zG!y$R(6TR;{7FxgpDRbWVY1??v`Ew;PYQ+~A0M;(%g|FKWIf zPwmli@qL}8a?x!V6%2OJ1C?7Ue|E3UD2yc>Nb$GkRuO{IAv2$$RrqMy57M)KqcQRVsISj zjkC4lo_!W1Z_So)Vr)FrLtBS_^R(+n%G-1~R~h*d8sdYM?yYFDf5|=jQT;oFW%}J; z5rzJl*Xo7fY2^7<7^yrC3@Fzc^!|LU=Sw<7giZ+;6Hl1J7Y>`NKL{hmsuuu}_1V8G zXMx`=srz@NoB6cBK;HQF+w$|SVoY!^Jx0dD2+YV)8?w7^gU_pR1dNupNri0=9rjD` zn&4$|y%YK*IeWy%IY>>Hk_{X0yLE|(Bnfdz2)u(tXy*d~e{jHfrj zaYE1;)^9op*7!1S5*>A&0G@sowwel7{glAQZxEh>RR+52Bs_=$;Fmx&Z-mVWMUYN- z?|A1$=+~#B^uVM@GOQoT)1_^90Y>6S;2y$GSIlXE?fc3Tz}hD8oIrC;5q|Glrgo^!g9RDl5Bw9)F?FEUhc<8da>gF~jUVd0&fn@OmEJYTUVS99kgYy2^APZaEf%bjJm}pV$Dli> zB(8&*@E@VXlVKfVa?1X_OLg?jX`BvMVLvimajU<;(p{j0lT_oq8((A+dt(3ce;G9% z0VUiun%YWHFAsx=`;=-&FLQN+&E z7E!YTY*}FOy8l0{c>T{7z5m(I!TuRlLV5NU`tt41F7^ofr7kU>V@?cLKOPGpiZpl3 zYk)%Pm-!jVg&oiMH=n=p;0}951uJIlJkXwH4^{?) zOIJ6CkG7ho&Qe}+tB`iR`ETxm8CU2I*qnL^n2VkdmI~XsJt$i9?oD%)8jUYM_iye3 z)#h`^4IUj}OcTJCNTq1)++I}ug0$*@pZxh2Y`kDY5l^38f#HMUhN0k`<2KWE+KT2- zZ-2Rk8NhQ8kE7u@1|-1}k=9Fki0|Q_9o*2Uf&P38XW>Rm@-#;gC=^MevfYaSr<+-X z?J}MRtWf#s)~_U7u&)S}Dv&SUBK`(_J2u5eo1pvT{oh4iYOn^ve6FmN?{C{?kwS%- zP|oFghx|6qV!YUbX?ELaj7Q;u<>cx+HpR4=$jY9DOb)-4j-gqYIf`+>2~n#Fc-n<* zd38JuTe%Nhe#~CUQ@^eD0+lUw&p?)th46L3!?P5Du$@X0#(F~3n$+TgCo zoFfg1v-+|GGx1XmSxHN?P<_V2wqk#`#EZ)YMRRVO(ylz_yN242WX@p)-pt4sjMpf? zmjQ-@~wAB>=3?54**A*))MIOFrK{#j$_ zcRvUF?V{ZO5~cLtEsajjN#-(-#Q0nahBvr4F_5W z4Jj6^@HW`HQqgA(2a}6lmbj8|x5^m)rGVM|uB?oM1m`jwjuRhStzJFUFKeDSe_4+E zCF_hGP3!qhBR#wwii{Y*dqMHoqH`K^N^zO0G0f#?N=5N?RPG{*R`iSu=Uh;$_iOV3 zc?ZKih~8Yq1~mw`QNaZ^^!ed}K26$2Ct6(wzQyVXR>JMVch`{PvOivEhAKHQ?BD;9 zB>VLFfm!|pe`uV`&o4NnS$+20=32YJ+QCdE>ApuG9CQ!55j!!wBULE{=72 z=#OOA3|09;!6vXvEe$56xS@zUikO5SFS71&?%&_1&yOte-@qTurN$Vm@$Ep~#KIoC z_iP_t9kaE3&6OY(7Xgj9w?kXyjPL=Bz{W7AoH#T;yHrq`({OqYt}NRY7$^~VsZa21 z2$c&E^LU?1F}2$pFkAAive4I4dOfs8KE0yjWLgX}V3@YlkE6nro5ca=$fA_ES&>3nDATW zw|@CUK`p=_^T=xa4k%1!MSs^C_#b}`x4qQ_#;3j~?xEH}3ZVPcu&c}8Nx&bn2Z7Im z74+Hu0@$Z|J|sFs0*M7^@BwqR(QlymquJj+w|T?YE||bJ$H_^rdGaz-uvbi$Ui)&U zp8jvluRO5#dsQg=ED`_mPq7!Fh?ppG6MK)(f^dG)?``(gxn%Qbsk>R7=h-d3iiaM& zd-f#QcKa?$92bD8uZGd)f3|BoB{Mp=mYJi#z@S<=MHc9P?Lx`r9<}QucdJjyePi&G z;Mvl{Qr8O5&5v4O&tj=5`)~0*3t5^!8(d=juIT`GLh*w6Vq|N_MgZ()7hiq=H+@ll zL4dyR)zil_G}WRXr?ocnFy5mQl=}iE{6CKT-7xc?dG2>B1^(Bt*Ho!K;G&;0D67X2 z!nZu7UjAg^_5EPk3i@93sY&D}lPu9Qt0JT~G;HQ=8YYit7plCPZ3f-O0G`l3d<5~x zCiN~3R#kGG$I?i@DG&hfgJNbRZVv*G+BtpA4RqzLVj} z%CtHKG*E4mK3A+IC$6RmX|fFa&nz^J%zK^aV1GkZ8wy=i&k&UXPwpiVM3>v3EIrY#87pO& zv%nt>I+%n(w@4O~qJ}*Z<7zuz%we8!+Cf({q3~jnc63uLak@>yhG$_tYpyzr2o@(5 zHj;$tB4=LCDJ$G+$vAlQO8skU!trQ{$z%7lM+RUx8nBCt*^_it1dreDhn=`x5m)Y> zQgc*t*$eiq5{3`l4bv>|wC|U`InDNpAlq4?wD_Tmp8!o5@MF0&`5?bY-RRV9-n%bz z8m}EX5u$lpyF*doRyPR1YXG89r|*nVc}ETR*wU9XLr8*RVtKkdMga35$T)4l^jXNLnb8FHPLND*``gKj6qhFMw=m9} zme|U6nK1S*l^$QEoYVC3L0w9lIBM{v_+8(vQT@nDW$DPrv2rX9T7<+awCibKrzYnD zWN)+=c*_a9iFEEv9#+bJB6rWD`{2TJF1}J&Y=4@(g2=bSmkWwdRWgxL2pS1PHYS^Zh>^^ORsT+qZhwr_MwtY$BOx)1MbaN8L*zaoHSSXJL)b| zvk7lqm@5%eI9&B;khid%7{hon)?~a+BL7-b^0k3$Tj}8>Modpq=~j4&g8xJ9cSb&Z zld7R;wowpsvL3~#vThJbZ9#*_;IHPF#5%|be!MAA|}t4h?O;V5s@wh z8@|Rpi0auf$8U_+>m`q>{%R=mKHU;14qf1?%Su zDY+`cdW;&L*&MNkYxEj8^COPsn>@2f5INwP_DIR~<>U#ruRCN^R6|Dj3)~8F!}Lej zvTlK)2wpHA1Noo+;bE?UuaT93DMJE;45OSi@%6VCf}VULSjze@`;GihXHdlf>81m!m)v9~#O*eN zUbeN{!#oXj%fzk@Yj!I64i_1eX>p$nok}?D0FN)70p5mpu|yt0+NTN5jk~5IItEcw z+;1XLhyv&)px@cPaqdha^r1Vvsa&iGI_)@JxsC=lR`^f^$^qz(;NoqznpJgB@YCk) z767jn3X~^EbzLN>?5Ez)^^P9k1O2qJlZ$HEtQuOth&)}U>RQ_vmmWsLJAQh(rM~#X zzfD~qv(>FE>hwSu`cunhAxae35Prw46B_mm?* z-^D%1#z}MGayAoklB9kqn1qupvNYO8uRD0Ra|z7G>FqpSF?@p?A1^QraXZ2jJu@V$ zGLdnQqKPJCcc^m@(kTB(yIfpGu~*Lhn0t~4wspuuM3a6hq3C@hQG$!f zr|Lb3&g?K~f%&52bC6qQiWB-X71d1Epde-@$ivkT)i+&-kP8uGPYScJN36!XEcPJ7 zBYTi<@i0t`!uIC-^Q=LGTtr3Tjt=eIrt~&kUkNz+=UfCSsL3Qp5Pq0bVAme@ZT7NV z8*MYGqK1)-RGgl48Rz&ggy>$D*0?ulqkP@+9?1G)NOv@_E6RJ_P_bj_yRw5-v!c1usrlaGPShy%hQ`qp@6IK zA2We1pU@x>B1XPx<3y`dNIEz0SEv`2kcV(idyr;wsGK&SxyYG0U9}@V`qyttm&dC) zkXgQE1I2jEv4uYF#`{cxg~MsUICUO;Q`9kN5AsDEP|>lcj~thXcD-gIAZ^uD3z$6} z*<{`snkM(MR3XX~rAd>PMklx??_y8D7bI>w0gK5{yoQpr$!A+K=y;lJXOi@e9L@Ui zwu!t=fB2m3Q|Q^-q!b-l)?h6jV{F=yIP4MD;681j+#iXjw4+ncX}U-~HeoJHnrJ1_ z*xV^q23W4acI^ck+0MAiPnrOelk#~VNuOJq!6diix#hw=AtWgR?DNDOvU$}Flv%#m>aW3~K595f^tk?NC`U&(3SJCFn_s^Unv5 z#0OPu8>ruc3Hs_mFfwMDy$l*Wg-&emoP|8JGTYv!5$NaJ2&w}Fec>HfAAI>itt$CW z`Ax)veK@{1$Ptk{GT0o`$Q`m{ES#b%5Az^gmy*l9+qlZ055ui+=Q`c*Ur$_c?0qzs z#>Ip4YPD0DXjus%lBj5P#+BRlAP1t9!M*7}9sXn*qh{UIXH5gPvM^V^q?kPt>!#q?IY~HVSFe&LOL$lcU^s1S zoeTX9gU~=_sPWw9ZuEzFJ9Vm{2{#Ju(_NO6LA!Mi`VXwG6;iYIl)-gg*X!kkhPhsp zz9t!^X}&5Ors;W4c`AbeX&ehSNbNQF-D3T}Jx5>TG)t5L*+ST&_k*LF6#&arKm%(% zsSFJJwI4RskSH>~)dwo&pg+8yza|a;Nzi#RKdgEgpf6k)5dTJW`xVl~KT}ZPaKno2 zR~DX+MVQtPAD2rqL=v1{2o16H!In_cWJUDRlS=6bR!xP(!!P2xB&wOCj?uP0(UA0M zgWfAkaYEfM4OuE;@7F<59KI=-7J7S6hGi=&+>v!qc<@)SE3MH{>y(V zEYJB%IA3fNG;C_RCBzdK6lU5WkG+Q@VZk?K?_aK*Kqf3Q75bzP%$3xDDKUAam&DB@ zCRE>@cmpCZFpOQ3w3SIg&5Mc6;K$W=Wh$<|**YLAv5TevUSupp)_JCC-*~npllJUg zEe9TL^lVkH@5vKtXPoPxNaghV)Cr+kQfCsCAytlz+^%YV!Ni?6RxM za1pD4ue?6S!TOa6v^5n~>U-1LA3g(uJy1Dqi#hYW|8*2u z!X+kh0+D)mx&jv|8TEx+!X;XgzUAjZ5=gX^WF&WQ;@T+K6!n{qM~vjR7~dJ#_~mxk z>>M!4_q~DcFT)%2SGKXvDoWczG4D;F>l?7u@b5i&|IPS$5l?0jw*UE;Oz3a^WLsa{ z)uCPg<8*%>A0))}$)WkVZ2cc+`}61^h{xIg)x??y{;~{zBZRt}SDJoWTyT?GnCjRh z8~G5c`K{R4e-)**<=7)3Da}(lL)>!y%at4(Xyirc?JpRFR(7Ae zK}qx{M1lMUDDU8Z2&JLC^1_NY+L{gQ0VS9BAQH;f=61sDTYSC$VKnd5@S#FuNjrr4*gTeMlnlm7L1 zI$gz3`h$p2!xcgTzX>W-2^gS_z{V#n6zujOCv3=S<5`6%MYj63FTe!J3CGNaSoty2 zl>Z%vm4P@S7%G1s#oxn^Hi0|$I^C71}50-|zh;Y6GjH7bw9eN*xpGjFP?tx9{#R*`#(JP^#ZQJP4uXhpC4O+ufqf-v58q7qW2`8rl^l4Cm zNya^xig2ERdO-^AWoKO%{r$hYprs^dmaFUS2&rk7!tz#kYAOu%v<#oa^E65airD^^~Dp^XRzx_IW)b(U+6$oejO_Yd zc)wFjP(^~`nz6n#=v{rB^B_L?j3DA3sdxvv3KASY!bSq>NzJOC^DZv>C6^h~?|dRP z*8y#5ZY_%@**dPK;|}wwvY?j#1bVAxhFB}F!Q?pViMyn*Ql}8kh-%+tuoE3_5AuOp zpq6i5eg&AaQ&nfeMclBAd;|4+rNUnpFDk!lwTn-{qwm;89V%zS09$JyMwRHB>>`id z4_VSifW=$|m=$9V9#)4COh|(rz$^(3BD`_ndXWQ8upJ_la18hW)5sv8R*a(QKM;%J zL4SjYB9aub3A{bjl2>>U15IzI_+vB13!zb-43c#xVI9;f@m2#wwPlN&1g8+e~1^A5}G^MtCq2_TA?RRAxu8NH50hIOrJ2C--I} zPE8Bqa#Yd83h4UPM<-MUqRMALJLVD?rnW>k)wi>K?|R=g=|)a^T-phIp@7amlHzUD zAk=obhxidbnDhLVrm5N2aA-x?!qO_};M(*fHWNW3qYwnRR7w~vN}k&St|*ZTr(s69 z?mo?H_kn(=>`z5m5i{SKd?$hN^JwrxO0oCdMhxJch-Kd|R!-aboCsSQ$e6H(Z*2fAOR3qS#(_z@ZF=OpuSUTFt%O~U+D~B4E8anS6a6Y#p)#Yq0^*KB4XennQA(SHi4@GIAOvnf3*&FRB0%6JOZ6DXyX}U%|8r0QwJ{@aO@u7~#a!|VS8OGw-_}jpRm(Xd= zn`-WJWb=v(JHzn6f@=aRm5s!;VE7JaF~)j;#yv}*QlNVZR#2++qBcA&D@!KbwZ`>|nf?oDxQ`^MeCpj3n!ptu9h;a-VB4x_GE%h&vNhwx z;sLKJLzN`YvJrW!Bbrl>( zapDjfpE^s4BN~I2Pm9(?CdFOrbov^;i5X>^YW56{nZ3DVu5T5b9_ukz+euh0<=IC2rp~vsXAmr>ZhxrOx%cf2ZYu*c~=JpP}+b1R#b5s4yA*j*~ z+8+-74$Q?sY;s4a;XEL6FzOKtPou#Z^mIx(-Y0(;>F+uc z{QfzOET>})UF>kYxE{d){u`PBQfqR?9tpkOBRj2dxym`7bSp7FsY<|3%1GT@ zTxPeu&OM!*p)()7+xrQbPL0XfP^YPEkElIFx?7-bZqyRqp0B>!dl|ZGR57k3>QyQ= z>hhovw$%t!70zY9Fu)yiRXqplu^y6)AESdRnRh9Ov^ zLH#{e3)QIs+UmFSPBQfx+@mMbNEQpsHXlBTyltemoA`)t0EW zixZ1wGo&qJ{-7j|;ruslCjrm^$0hb>((C$!qzH#s5yBJ>78 zWvxq13t|VCxaaIaIwu?rM^`Ckcdnqp$Qkkd&dGmuz<#foCY!Auwr~}(OQ8jDWRHAC zfD}vZge`%YkTC@b*m!ONZ;=8}V%m+~P0nnn$_w;7=!!iAsk9R;&B(faGO{Y^KI8P+GnqhzPia* z@Too!(q=YOdNE@N$8Y5Wrz6mgn`e<%ZQ-xlYMKs!2-tkRapq+)cV3=UIzC2QC)buO z6@`VqgzMt`#-S{J9mkz?gfE)wvrfVW$JS(^&j<5Rw~=OjxL4yE*rL}3sH64Ic?Z?= zBHJIRw4C~&n!Q}CJt--~oz26_3dV(`cum}CJZj20+w%cf4Yi)t_%8GH86jrBHIbyQ zYhRBi(x?o%bWct~0|=7T{#W5==QZuz&tpcs1~${Wroxt1vT~a}DyTBEArk9RSY7W) zMK3re+=*EMQ!3`yXT~I$8&mx-fb4X3X5msuGshvC(rjnef@xL`oYsUnHM#2zEa1Co zg}Ym-kb$$Y5E954AjVwFN>I0^dzfa^^3CRf9LFog8&_RIW`G7$@O6 zSq5AqT{}xL_u6+F@_eLJ%78CjBgiqp2TD(H=kE;28HPsI}_}&$Ua~T#LmDnDV&%ft@2y?a_ zDYN>Dq@KbUd)f<7mMshy#<`^G9(&_*kYd)DqR7_RXvtV&_u&%xt6FTogg9pVYaxlX zYeSN1y{3Jr@sMM%aCV3xWnTIE+Piv=(+_3j7PXNz0>0qsR1M&ft7B@OT%T&Hd#P*5 zDrUquyi=X_ysx~YN^oF}B{};E|CusftNtw#x~iNyw?nw$3A*%b#3`+jHY1M07@lzb z3v!mx zlXhXo30Z`z_1GZG^@%b}I8q|K^^;^?Mc5~!u% z!XuIVPb_T=Utc-5hLoDgbTS{a=zW#qbfL+@)Dm7Lx-FV3x8;U3+r8lqp?<^Ws4M#^ zLH2q2p6V6rqwBMe1e`Ic5jC= z#v0E6}>Tat(m4eB00q)wp-@o ztVAxQbU;}ZPiqlTx4oL#c^94Giz&j(mCt%Bn!nQGkV?4r&aD^Q z(G!=Qc4W*M9}{CF^sV(WoaAp{-+-ms_6j!(*z_;H47UrKqLFZSe4m(bx$XXLhaH)# zId)YEQ~tS}3bQyTvNkWKmNh+8vfQQR(~{%$$fD|0#K3iqmo6$x1Y>h7*#u+NDC6TA zJq1~XZUaHuFZz09Cjw(xy*o<~bgm5O6$xcm_*j+=`hMA$Pp_+p0+|eWuEQnEa{B6q zePeDnSGCX}p73U|6@ew|v3XNTgx8^Zjn*!dA0n5dEiB#?6PM~PYdxJT6x*bqMMKYS>X3~`&j#4fVn|8P1>!#C zhRBo}=LXBaKKS9oTD#}eC-$i0TQF7}P&0C&QyE<~P~$A!ZR4$PZO$@XI^SN;Zsk!# z>^)K{)3s&gf}mK$I}~T$vbV4#jjG7|Q3w|n9C$_T37GIH zK2)TgFF|dwkW%7J))pDM<|3#iLmPbSo84L->)ab#Z@7Lsjn|!vUZ<)Y<=Rn4SaMAq&P>K! zz?GEb<5$8GO}Cs215%viHluH5i;77^oXV@Vym9`X#FOy0Yr-ld1}i^H*f>1~UadeY zucN5cuHS3K7{IR1;81)tC%$$)obMg^jUz8IM#yjn$X}75P6?GkX00!5C$1KIFL&12 zo)EgD=XLqZH+8zNU!PDvQ!PPMqG%p#J2~oNU~l6)H-~9ub!bdi_@p{4?9Gloxi5`2 z=P~g3j8}?1J#rm4Jb^f-IKBs=(AApL6!o!}KO?XTRWG&g$_f@;tIEhmJ34$jA_G}4 z=bzmQ`eNm|rF$heCr7KlAS7O=mHivuFfm=5*(><6PMo;R2-3+`$Joc65|^NXtR`K` z!}9V?WU+per|?@`JUUxc_FA2)sr_J_LlO7Qwlv{lSalm~iid@rIW@gjb{2i}kain? z1li-E%r~U@tU8f+kG1h4s;Q3H0x<#45uV;sc&uESx$Rtp4g2z0M<&WVhz)r=;HqVdbCgZxsB&v=(FoIiNj_oS8;8T%~zx^A`H z3RsrKbzn~FAef}!2%Z8}Y?*E5be4vEg)e4<_GAR9Rku(~-i?Z$Gmb^+=3rr71(G5P#tJ=|mP;4fKG+{_vAOYnu|)c50_%*| z%c1z3t8FwmTg}grT{1n-P_splW|o2)TZtzdL#Z{{ZVRPSmoZ^=>2}O+Ru3XnlyBzA ze~uhG)&1h8-z+APTvpn4o5YJL409oVOj{Ili|*GU-Pu->=5JelW<-f_RUL@%kq}R~ zN%<`&z5h|Q^{W#RqNl2YE$2Udf%Gr?w;$=D4@+GvVGVjJEUBebkem1H>BNMcFWZ-t zZ!L^5Lue%7I?e+YD2!m}4=XTNAXjBj4;d7+mXNtT-d`jX64LXWtB+bO{_~=nm9lq@ zSc_Qgw-Blz{S!hr8m}{2ubi>9$#l42yuvEPqg-9fdZs5SW>mf>)9PY@PFGS{i%tGL z8JqD(VW(HD?@|u%RFyLj)Vih4o0YSbL@KaSQp*>Ik90iwEZ};FUo9z$p)aRUN*RJj zXU&&i#H{352+stiKZf75_^iNJP-Bzm;=Z*dbKZJXlB}a zYK&VbZ|{oU@8#uR**fO>mOnmR#~WA3!!Wq&uUf=<5wo0}!s_Fg-hVr_m6Y8n|F&~{ zo!Hk*4tp63&VXUHrkv$dw_e9;ffkR;E@D$8cSt6rk+k%z@*{>mwLMS5<4ezN!^@5*tOL#OE^3PP zSEe#A>Y3X$ynA4z$gLjo=IG8qEn*lFwG~Ur%@csxDo!m&ro)|*7v+4#C2uWoPUl4B z3l7xb5X&cKk6ia7uP36g21S()<#(b2szANU+8Yiw?Oah?G=nq?=JGLnZw z9-S{~)3PAOpJ1A*$#t&HTuk94PdB3sPrbVt3r7u!8J!eENv)pKZBQs~BrpGd)&!C8 z#)#Yfb1Y@k_v7MC-~i|wi7Db&zI)`M8vitp{7+Tr{Ubbb`kmFUQ9UrHC`<%`SaD!1}+&!<^gGJ7Eh0-iOU`?Uf|gfDL;W#eB){yM{1Xnz$R84 zgHId@m|#|*;ggzN7CH5nwQe}rm9J}+`NPXOsa1ELuI96&SJYqYQQ{ZvyHu zdNP0XKx?AxxZkpRn^HT&LU*gaX{twqS5E>$yV0NNu+45l@ETBBf>;3aHrSPF}N{8n@Re2Lq3|aJ46>D{bX!i7yn* z0DKi=+Ew;uH>K_n^4)^Akb&+}PKsYi7o_M&Eofn)GV|Tl?BxydBW33DkNpk>TYb39 zY+98)!xo!OUIRNT>>_{|IfgNcHq4NV#d+L{Tr&y(deA8H(H)`UkY_oW$6GF)ea3D; ztIb&7cF-x~5%w9r-rJQtTQOq~Vo+sOTr=%@@k;L0$gSm*{d8}_V5LoXW3n_v z6`b)ZoSDFeEi6bz(u_WGa@QfN$O;a! z{ggW7m0M?C`iQOB)|$`nLGoo^rC{94CKb$@wJ^&mUM&|$l==^X2d3N-)2pXbM{-~I zf8|tXr@7*fZi{TI1bj547_pwc$fOK22P8wq__fZ=OEJPa%1r}z`yXC2dPL|M(r`b@ zEkv%li8^T}C5cPI)Yob{`j*oLS=jK67qze2o9LHx>mAlrUq7#YS62j5B|r7)&~iO@ zGOI^k*``Jhk3|;>J}+38B%M6q;BM?VjtpcygWednQt#d(1?oh z`Naol$gJgcRJa|rM+E~Y#F1}fr~=Vi7_?^riuz)BSTgRTn$82m9;N10pN?rktuPAa zgEb-R`e!v1FvjDEx9B=NRndgN!No{PFimrbe?mdyF_t$ntW-m#=DcBk;kw~4okvK8 z5kz+)J(-r`bnCiV;s0sxyTh8=*EItuh@wdEy{R-odW)bm5kl_}6_5@}4K+0BMG!$j zKtOtlAiauo5s)sOgx(XR1W4kneeayPd!OgIGv}O{Gjs2q{Ra>5uvly5Ti^QqzVg2B zH{o+MWwje{S#n#N&bvH5# zLPH&=n|a*z%$&$dVTTPLA}A$nuAs-y1fRb%GP^0GN@QyGu28bkZ7oOp^1S?(9T#82NnUIdR$&e^0HW z)Qy!~s(^P|b7S-2mD4OTxO=GWIJ}ZoUr&H^ZTCYsmxZugpc>0CjhCWNwb0p{ z=dhSP7{6&$A?;ebyTTuOu7SPMMG%bZl;caQ_jdOT5Xo|{=q=JXQ<&1Fr)0J)k?l}p zsf$>`%>nJG$oAT6amXO8?lX=q2F%4npElWv!{zdz6pO4o4`1xXns-R{%f#HhMQ3!i6QPM7-Y_rfuOlT-%p&%y9bSB8Q7F7DOhz`WF_qrtXgK39rY*ws`b9 z^W>LZyC(=zX1f|_kB&M{pUlUnN`h&zrJ9v3j!2kV`sy4F-{9^l3UO>3L2V9Ed>Wm$ zQpY$oopZe=NXr~+g|Z@f7%^N7-C#KOL5G!E`&C#=JgmAb-bIl@)*9H16rDCIoN8l* zEn76X%AeER8;NXxze~xsR*^p|VeF}~)vaa0#Q6NBkI1zN${8^=dg z72YN@0+9OksTIH8FTX(1a4PpUTHJ+k$<_e9bp1)3P4TqT!GOmu-?*4&a2`{pz&F){ zagYb;$R_l|(dmdYdH}T;(MB71(?aypoYUEGgu>%er;`~&X`d<$VK;#`haXzJ+0bvF zxIXj7AIL3l4@YGyU{2yBuarR<0+rg)f!Dj(1>>5}wQAq?cigSp`}X0Q2dwNHlBr(* zb@{tG=hLj3#mA3%QH}Ni(_TTz2gBVQ3G5PB=(%C9r?2;O>CQfhAdy+idT|=}M>Fq! zs$V_sJgA(uF-ragYWlGr@82G!B+!W*j)t;G<}f>Z6h#J#iX`3*0#_*Vp-0)~Va{g~J2#>Z9EEKHOt3a?bVRpVOd=BM$b=JCNCs~=y1#zIjGw#Ut$Ut>?u61`A#5sSy zAhM#ud|IMOC)4X(J>AoT(Zx=L1Zm18<6`o?c#~Tw`Q_Ko*VQ@0zfY~{w4zyh%;7OD z-pJYgi`&*BV+v-71*bX_V*O|14zG59NhoX z5cx~D>u>n}H(+2l5hvPm8NWbY6K~lCm)%dV6ycvi@#JMmm7P6{2Mmz2rS>x#6AtSH z(NOjvXadlrF@8#`XSBB>X0CnfNMk-~oY;Ycd$F6Hy-E9lgFEt|CMILy%)~WW`BrJO zp#0-tqenXABnm$@b76=tgqw+G3esgiwo1rnxjr={E^&buehkK+Pjw0tZ~p?35ttlK zzV*!ZT1(hZ(oz?CWv{Int0}cN8-X7jxjKJ2qwH)?$|h?$ zf+_UHafgV<%T1V?D1FRplc~oZVIC-ww{bc7lfG=Ds+(AR*hr-%eH@zLrjVpa`-%~? ze9KQpAn~F(=46+vc!OPVs>b!g1{F|t{L1ir>%{fq>dz*>__Wv$l&>{TGdb%S^IyLh zMmRFPru}ps#KemR(ft&u#f0}ZO|~NOSCGN43N7o}bF*x<$~12YU6#UCtg_ZG%=XCc z#}*)P&P}aYvNBlP+NHyJ=fRwN4tfJ!8A)%CJ*yJQwR;r~+0I8-ZIs?rPt;UA0MZff z-PVcHG*TF!Cig|9lc5iYfyL}w7FSg|4QrBg$3z~t)V_HtyOeB~@LA?-Q_2Thn{X&o z-~|mFx$81GyxNIsF^)~qJ3y0K@5W7U(%H}kYMD3B*8+;vSo1oI{pM3*4p^k@6sij>+? zYFFiLC#CM+bC-=zi2BN?y|UzXSnxP|MT3ODargS5EcOO5KnRhh?) znoMM2^310vog6d-Zh;uD1QL`|QlVJ~Do$3!tG}k-E;-Lun_%?$RT&-F@0nm%8)Kk4 zHf{-g$zp4YSt(OAJhRK!Jh1u{06RGBH8k2%mxivoQ24XHGgze?Zreb-#08vdILVR4 z*3NUN$XzgAV6@e0s12wKJ2=*Eo)^~BAaU5R;LH}YBCW+l_09}Ox5y(GU!3W@eKtdg z^bS?3;4NVVNnNf__3->|)2=k`Q8jnw5sTgL0oRzs1S(cI$+41jN=+%T)yEvk9I^Cs z1vGwjPZ8wm_{>WB%mdaPqzq6wsApgUUuK}OE=DO=^(W<1jkj`po&`s~QucH^I2eZ1 zrX3StyA??N1MNG}%c^Wzcdmm@_;Xx6UH#nICO(uXP#42i62kLKHjF|!!?y(2DQBPt zi9b7=z@4_u7P5hMC8~Bp&kM@a?%-U;SI`6NE zHea2t$l`YSGBmOpEyiB9kX6&bp{oZKff=CoyDD_tnFh)>n_)ZLxWM+-7Z6~T?3!9ANoRQjB^jbf)WNQjYSF1WUq`x4w>HB;^Y-PRR8G#jFjHp%gy`QOT z1=?8PYPYx~v@6Q2{bZy-h&u(=|HCMnD1^k#KO?YQBq#@HScm-g#`-_GAV>AK0!a9Q zypPcO6zD=;4fezeAR!5o;7eFD1vd71l3x0ojwpKyhCU1DB6kU5k3n9u_;Zs@Gf+IU z6SrC}k8Z5oMMe@d-2-E~q~UpK(=i+a`jz2FLf@EM&8dQ~pDl+`Ou^U=k8wQj8297Y zLeksnlEYY^t;zK-94rPpO=Ei%96F&98LG`Qrp<4D(8o$WJ)vL|AYH$s7)A7EOYZDg3vrRO8gRsO3RO=nc4x69$D6i~GV_u7~ zb?owE3lPTn1&Nvaqci=y3GOcrvdP25KR6EUzqvez**FI$9GcfePzV8ctP1jz}vmGMBcvckAxjU?|6B{8{=-r#5CxsrKY z0>845w&r?O$e1fXnZ&fv_r*}lMP4qx-7w@&^qUc=_;R8SStwtO;*{I1Y&xC=wXl?x zX+HU6mczgJEz;^+F{w)0G9$tSpZ)eHEi{)hD|0ZaMZW z&qB)#b*_N!<%Q1KU4$Jg7@)!~5t;I6bZk+ntRH+>p;r>dhM77;nb8pDi;>{{dDGDr zIwYJ7HjC8mjXc~C;+^ms$P8F}L7bWDKsB6+KhWIrLsZ9KuX-|4B653L?WoL&e5y8H0chcXa8-OKliY#36mH*N0o33Wq*>q#;{o$Ym`eI>% zHyjn@-Wkj}vX{qB-5j~qt30~?sb1i#C(E~8o}zc};)8Zf7UL)$6wuKM+P(yV)+^Op z?_ErIU0Q9SO#9$;j*2)u$MTxP+7%88=}tUY3;){n2tl?frd~0>NskEdy&&71O`2@| z>|3){2b0kIgFA*wOcoC5*_`7?IoDuOYK^A2o4!{kBEt*j%)Tu0+c-x#vV&!1L9Ak= zZFs7llGW0X5E}-Qr@yz{)D(Uxe@B6Tvwo=H9_26q} zj_EVQAy2}sM}@q1r99lqT@K??C*DRp6ya{o{Kn%bJJ9q(I81J8ldm=5b^aam*=};r zG_m*k!41UGJ1%)|E1!h%z9#%E1@&${&j{?WSw)0qgU~)j@MS2?R)%W9TCsv%`K7%* zZZp0ZeYLw)pNI_~)svj}PPVLn&fu^2#(*JMmg0LiN0eB$>NCy;`OO6CGp`o#FXQyy zeo)HcWUGSu@jma(5mwpbOC%MC7#zJcAgWLqXtoS*A0>-YP`$iCq0^pg+WHRt0m5in`7`xf>s>M*!xfX++h#N7U61O*WbzgD zXm90w*0(U7HcOLA!>CxUw$xmCwI4dj+VNz9R!{#n!UWBM3kQIomAe2~sKQUxza2pG zuCNKA60<({x{9V`71~d$yc1tb`f?NGA@!h z5Rge$qpolxRkQuVAro2bhG4;XP4m?!BRk%V#xQ7Uz0DS-u6i*-Cg)4r&F}q6Y|UER z%3o&|Xj;|t`sB_@VsW1&2A-cj6=}Qmn!)&mrssLa(A5uF%g6ZzV+kt=HKOm{&jr3Ej>kOG>0PkSp&M>B+5btXAp&=db{jJEWn!|H*N2a(Stk=G! z#!>IJ979~{+B*x)A_wL3DWBSvycA!{*=t6{O9)x*2R=S}lY?betu;FcI8Pp6k7JtE zzuO3ZfNwSX{{go;nbZ?qhOV9mP^v`+(7lXLJl%nRg-!hdn;R^Fan#7ECC&PXM6R)Sb{0raOD;B;`c8TG!o zx#nYC+VPpgZpD)R!5|RA-?!GfBL*Uxdj)x0W%@HfG|JN)w*L_GE+Z)ipOaUYG9qyc zA$W&}NsG(Ka(7k5CR=cj;n=Ibxn``!3T_amH{u$25002Ot*yG^HRWP1lOs-5C+^oL z-+D3Vrn9$mkSpD4>d2hMw_H>j|Fy(~K07*^|3ArE%!Lp9N4opL5|LJE3I8oPnA ztL9eIP>b2wtLe|?a`l>>IVqxEYDn6AQ6j~(hj{IiVDH=?w`mwT^lr4W_N30uVr=fA zGi|;(H=kLLwBA-_c;r{#H5?U#A=9c(oH^4LG#YgSQZ4Oq9z7n5IRVg)!fl=~`B>B= z>^)Q~TxZHvi67e*^lY0R`dGu2i(V>i;9U$o-BVJ_5MP>I>t7&M29ouCavV3t3K4zS z3h&Jpu!6yYC$+S1F}idY$IC^CKe$)_mj62`jCKmkCLeeQ616x>-YOYdiZ9z)zi=f9 zJTScm|Lobk$X2!?ccYnPUG#b&9fk|zgFrzpz#^wK;<|)VoUU7Kjx36Qi>nzRk-r*m zQgy+%+qW*~+FBiE?d`IeB!~PtSWV7A<*|{YY1^{MDwhfV`|Aw>qtTu`2~R&a(V089 zYsC@SHmnR(>ppWQJvVZUMt9vGjk&pV#_ny1h0U)TrRlcA@@N{X7D1@E(4_@_>>ZSB z%i|JkSqD33?WkH)Rf5WuL0Qp|6tVijh-Q&qo++N0=6UIdn4z5It8(KDJ;clABA3d6g-& zcF`o`BhM6^yP>2t5H{@DVt2Oqb*CBEgwUFr(V}p#=U;al*PfV0+I$auAG**O6HKg` zYl+Q+M9S4D(bi-5v*?PFFBTrwHN+!}?(!<9s~Cn54a*|(9rAXBU;>@{FO-DQ^-=a{ zlW58FtLk?%zBqHaI~L+oEQboL)lvt!d^l4)#38bkTAhnT3a&-Y!PhfQF&WT}1@eaE z!&$0FJjuDR89p;RYP&4Mcrmsn+qJgLbp5gStx?*)0lW9;vKiS`&1?;OP|-STSF$x& zjjU*_Mg!F!HD$+n)Ej@>XFt0543kp+;WowPm&zBO##NavD_lim6!`MCnfX?~x5t^Z zx3vVSJ()=_|7LSO_>Qy(HvAqTgNAlM8DMf}osfErYE3rF=pIGg@#6F|d~N<#sifQ%?P zFLiIjTk$Si_BX_(I0bByK&-HLGvYdnqN3NXQ_w9^%GL6)I}!cKiMH;_@yB#1W`WMW z7FSF-O6Xv_X(9R8xmoo}0aP)sze#BIOAt!_X-f~Aoc0x@NOj+dN!=Mk5~`^wTYqf$ z*nuCvQu)lRE$z8mNs7=XXx}^zLR4{uy`Kcm71qDEFwrS;4N1jufc|=_odCDY8HiO%r>{1tE z*(fPKkh}iCQM=MH?^wWe;m3UUx?tm%67AVgfe^7*(A#EM`Fm*lZpZ~hSUKlt!8lGV z003A&Rc6c@;khyTDDWF2et9r(Q(}R*4m})xs97`3iNPb{%ZEyM>#BvnMmEaNXnu9I zg{M2rdB}O?r|Y_$&FuH)Fdywlv-6|tLtFzTgaQ?XsF)=6yw_zj0FvWD&)pXL?Sx=t@b|r3D%r@I6-LRDL04q7KYYQL`vn8+t7+syQzf6j$86hJR%y=tCjb~|Tpyd%`Q4GnI@7Q^=^@Xv zR_EyMDzf$ri>F{_tf+S5NIp8LBgg>7923ZK@x|A9&1VUG?|h$1(yw`UQarkz843c# zaAFN$a#nXf%k_p_Oh^jlnegUs%-)A&Sltw{x*Kcuh?ILIZE>}J{=!A?c7jv zUW&UUBos^%s7C0S6qanZ@;KkAyS{mR$IOgx9vMqTm-k?>)m*LbhEI6TJ*cH2dw*aV{Xc`HjQk@R#&04SMCH-T&c%-cs4~_hDKEmH z27Hw@8@}ua#)Cz|nI-d1BtW4*?HuX<-^$DYTtLGvePtUsbIQq9=RU zy|NwjhAZG-ne}(TD_B@bZ9J zDnUqz2Z1b~^_&T)WHYB@^KwwY)2Hd5CD6=*v8SW1h`amFO2c?2T@I?nS)llLPi6ec zky!6K5w^k~PdA`+sp}qomP60)fy7$-;hO=VStDUKD2;nSbt)@5f%uHs$7%1*q0aBMr|pEBP=F0vu!$9HnJ zi8%Vk6^x&>3AI)K0(C$RioYJif?osg+$;D2|2*9bd<=%j-*R*KNS)*Bt8-Bl_aUJl zUiJ|DvV`!x8xeQW3_cq}(0{yh)2o8A9XHad`sf$PJ`f;7W&4BiC(?jYau$Y#;I{$r zL+2KeTIUbi@kEormE3W?M=<#C5BQSCLf%LCr#2q=up$THJQH_=FoaD2c;1Kb^F4Q9 zTE)NpN9PtAAO!)e+0V)Ua-FAvYKkTr(ENw+0?}jXF8XbyH#y2&1i+d#Vi079G5mP7 zT`K&C!L~UE97p04+X+|!-kWez%R+mm0{K}4|01^Rvt3~!Fgnh$=9DIS8-5LH_6xLB zdpfRiY$!JsMbHKuGt+PFz+Td6`32f;Q3ceQXw3@@q|-ao z4Nt$u;!3MzkjB_({1{Mu&~_3Of)N9|@Dw&&AAB@80X{a+r;K6_fcN$klxP$1y;vgi zTTF*-tXJE+^{*VP@eN<7uQ*X9)QP5m`eo>Jw6%1i@ZRG$35T)Zw*}D1VA`S0+zb!- z{^wkThk|MxL@+h>3|nCUDN6*$U&@%2HKB$OEJu1$eDtJ}N%fJ2wScYEbU zV_LwJXM-w>1Y%_SA?pN?kVGoavMpvT&L>LS09BKzyiR00XV2Hosx9z0!RX>Co=-NB|srxd5OyHCFKs?tafLHv$03MS$bOV9> z4wV)-mGCp)UKcQdZzX?>pSeN3gAu5D+KQQ*b>lM`m;n&K>En_+qI*z+&6AnwWxw&Dj&yW^ z#q_rY_3!%nXR)#Xg!I4O2qF{in4k+mD#{~wq@C@-@kfkab#LF?h#k)wYo_PC_~X)W zYY?BcG-@%tNi<0zzz&tehzD}3nptw;3a_%bppAN(+oE^Gp0KQfh-x1F%y3{@fnN!P zKVRApTM+&jnmg`JVC@fZyMi?r5Ka(M^j!z!k(juv9TpJfA;?NELBlWF9*d*w6RQah7*8oMhsMpsy? zKA~)%wBTm|LX>@vzghtbLS+dn9Cv2kOUZDdp56?bVd0NYsZ5U1*|E>%TyJHj7zBWD zi_r(LdaF^K-s3z-KJ4jNi-ZS&Sb=jS)!eN#EpPFxa$I01>$O9e?=nZ%y<(V+Ato}a za0ct%V?w;B6BOaEfN9^>K-rJU>VWczk~ z6aKU*+aknMaE@t3=v0T`oSVzr0v7Ihu))-@JPMRR_qDJ`f;cVTVZ}rt`)fdFoWbYa%tjq3P4DxfA zvz+brWSDzRH>DIWF8F1*!wk~7Ple?;xAJwKd3P4wcrM$;?MQb_`%}rqwa1s@%)}iz zK*>CftvA%xU*+h0Ic=s~*hNS3cI5e;6w)1PP$$i<=Is07RD$$Y#xTGVQKan6c+5Yg zJh~k8Ky0GClK)^u_V8htQ$rF%*39fa4@k+muZ!a*~==$hN1OwVHAq&5(YQYw$Pa0o>5G6!cOxjcAk zZjII~I_Qeq+V^oDXi{i1s8p?yEz@RGV;$kB1P0|X7Zw7wbC%d~S+^a%-7U9Dg^!ji z@l}lddxA75(RiFAYBAJ&GS3Ys8NlmXmy9n`ZAx=sr@6&$m^ISJ0xGfTah`M`(7^Il z(61N5kvTN#_P)={=ADAQ>Zpb<#CWW>#5_)pv12d&3fZ(g&OBF~qr43Jq&ewboN0hD zOn!JmPp{?d?1H8fd$Zel-4FUEZNQ+3R1<+y#+K!81yiA=qiW{qe_Bk|G>X`#t*&cn zn1e`>HR*z3GTW~vO9Ss?OE4Utee@tfeoL8xZ-g#(4;HNMd&5}og~A2*>{hKFWzPT|ZD zu(-}P&YH12jBtwCLcu~t{ipj>6n zo%#(8-@gAm8(UeAB*}y@1gd|}m#weE5H&Irle6jOq|&2WS94#~yCvuVGAA#u?f9RK*XaJB5bM!!&G(2W zNxXZExB!4M>5hk~rnlO-Fve8?=HL6|IH5ArXGFsJ8Tk$CyJ704KM#oKOxpES9*Nng zKK0gRYI-RPHji|c*NQR|;Yu`yG!5m(;>kCS9RSrjVPU(dor6x6?lGGif~Q`&6~fk( z)h%Lgo+k+vPbU?M8Q*wM;rt6kxzY5_=jEcgs*sQLO<%Vc&q@>?y?92(WYx=#n;iT% zHCdv+D9c(SmcXZEn!uw4atUSggV;ahYLhZaA&Zol>^Rc}Zh-5|wvT@fU$u}-K#86b z0lKDB3DuYmRcUuU86{51ivO+!4wBoP*;Jia!;-xww;E_++~ z0>vYu@DxQs|Eo8hY^`Ismh) zP{m&lS@ClldhroZD&qNlF%$d`>#+axwxPdwi~Ae4Emc~hx7S-P;hiX})Hh8W-d~13 zi5ol2-_i3Ae(e(rDmcE;3Vxu}%~14TGrO*5CJi0=mWxB^ahOCCExD-s?cc_f_|m;C z+oboOe`UH5{_&%oos6B`#^+=1%6BO#E^o`aKiKF2WpLyrotd5zKvS?6NPpCET;xEg zonQdS3MLV_Yg^>s%ml@y8iBDYG=y;GB0Rvvz^QVeBLwiIl8|!(7#k~k4e59+@~>xt zoKZOCoZ#RXTKJdiIfslB;C(>y5Ot$P{zcJ`Mpd8;7r_PI5G#c*VgR8TJs?I@fq%CF zZlZk|;5y}@_vmw;tkOXU*CT++<-d1Rah(n3f8NudcKXww{(SHL%dZ8dZQPs=zCebY zEs85W`Qz&R5=%W~DZG$hY}x(tq5ikaQi{*skLic`HoSW}QSNmnjSCa{%Mc$)DjU@V zkdZEk&;{FOxm}Qrz(&&V#cl6Nt zt2yh0&i3vl=(J|O9qE1z=&(LXn6c1Lw@)DohZKh)22HCnC;Rr7ftOjrD)8u&@5FBD zah6)ceWmiLZ|NQIJ+PypW3(_8G5p8Bny3sSuh`e9&zR7d82@T)h%!;CBLBRF5ip4v zhlkBSS1Jo9A^MM2Fi?#Ce>3UujT&n zpXDU~?{fZkIU!;6|44M|<&_;>ay`Dj4f&Ti{5xQbpb!^M@1Q^JhC~sVF%KFy2|T=i z%ne2Uzi~@)nje7h`D0?e#JHX*6Sm+lKb-ioH(82CBM7Lu_7J?I-2eHYhboM4*ZS$@ zU8_(~@eqnmJ3-hgCgvWB3m^LkIX&2nZ?}0JX|jVB`HO zs02`>Z7MYWnMF}BVS*Bs8UE^k?>~bnkX6KYBFhN>`BFjBM%76X zDmH*1X)Ug(f95LDxb6*t0hNyWv<9W1asR z?ElZ#{_7UAgGUztL)r^wEf-aiD!!-UFyHO=XHEnM%F|;t)zjS)18eMd1Z-m1B)N0oe z0I&k}+he?wdMK)# z+iq;&e9p9g&LDb4b@M9>%q9iHzv}CAZ#xuS&IjR91NKVdzYmeqY&b7c{j!|`+!cTF zHskcSo0_@SG5=*zZY{UshH9rG$d5QBA0fms`)EfnK7)&H3x1q>yvk%QeivU$kc z4~y<~$5alw&s0h3=vA$gdTDQ5s}}GFRb`j6D>t)q50iQxpN3>81z*+-_xd(Bxr=BT zHr?F9d~7-lGTY`K=9iDpcO4*Bty?{JTpBDCsC6biOpp1?>Fr!9#4tdHFTx&eeztZ= z-T5; zg;fGJb+wJoxexgl%{~B|W~(RUWa^77`PgeuT0CQ92gZX7Cf|LuCDUEn_4YcPRvkb+ znVoYM{tD$nECExNvC5qArV>fakyAi_6m}ZIp#R^xxO2d_y?}%Ve*&=DY=P$wXZ1^9 z;|0I@Sgc5vzOEI)=K=<$Gfdb8C|uhA0UoGxAp3Mo|A^c>IPJUg_M;A!BLTmKzCgi= zMcLt)U1l-QAVEKQKS@wHd*2frLZ@zUc*6c4C-X>$fLTckuk|gP62& zJodfln045c8%eg}>B&uM>SLw)#^Moe<;-6~(3kleJ|(YOga0$mlqo2f_mCW=-Zc9+ zc->aoyfZT~piDVPJ+Vn7)ku(J(H^OyUu;AI$KxoZHCeo>G#?x^yrx)Fx^TGtFRLRc z#2+%Z!#dg;TaiGK%nAm=rjxR+1SM?L#bGfoYMW?*>YBUi{%*-U+;g2zv(=s-vuiFR zDLAfJJH}Q17x=Z&0@(w3+Pn6>b+_uiZ6gRh-+9PHtA5*}c-UG%2@^~ReRU(Ppcg$! zyNV(UtiaH)*kqqiz#RvjjYk}IYQP(I^+a{q)KPH5V%MS1FFQ}*7gZK#{cYyq6MOuQIGS4KxYjInv9pxkh4lV&0N_Z_lOu5@Gr5s`lcxn ztig(~sZ%@>Kx+hCn3*Q6ak{Hi^4BhDs_DTavsadNL1{>uux~m&kv(rjwI@m!SgUbN zud|9AJ)9;WSvdPS4Icny8n=v`uOQv_{zAw~nEIzBhhIbNokH)UI>Y}8bhdsZluA4f zFJ?9PGToo8$vIc{s~eSNx3LpIu>dA`#1PrF{-U; zUc|qK(e{r7mN`le|K93Va$Cuiq1H)S7Y&n;nxCwAmu^XjvOE-HTJLa78lduz%tW%h zL2X?f6dN#JZ4RI;uk!qV;EuPDDAt;I=ph8Lx_hi{Lu(VKd-7h25<>? z8HIKq_dfDiFJW4RW|N3p6o=~cEsm_(m@cm#qrWu72OF?rA_Aa$A8U#+@a*Yj35j0@ zI8uw6tE(}1X#UQ+0iPof=4UYpe9ye8<0(sw362=^o@}{Y2;l#dPWz~B7Xk_k`1PoL z368d6zx@YJ^)h!*e{S^OE@|1EV0C5E*_7*zUdNnP2qa_*&Ujv@9jMm1E!aIDbrfhA(|e?g!`C7#&_OEwSYI zdEdb-INCh)M9=bjs}R?i(h5MVAUX{nzQ9Zd1x4IYJp0|k&4_@=^kR+epASP{8 z2&7c-&IYQ#tRG$y*Wo9-gcJiOwP90>#U)fmbxn=H5UikFLttif*Cval!Z(ReWn?$4 zXY~&}K6}D`3yC?KS{>t(@W)?rQ_=Xbxs3X;t?aK(7fw11&a(ODZ}$+4SC|k~pRKI# z{4Zr$(d76%$5RbmXp7J+Or>6a&m*B>!YDTR>hgG$ReIwyxp%xeX_-vNrI@N1*z4Ux z%7S61!J`jo8WMkFA{xTN2=5TfeN2M7Ikh0G$pzhodKjQ(x+fC5TsY*g?_^3XfCsmf>zV5@QUdbVr zi2+VY3^^X0R=Q#R%G~0afbR-6v4ifr#)46LzN`Dvo$Y;=@TU2#^QD~S*T~>B7h?TpS@&8Wi`50)5QaR3dj9D;5=q)L;spRqq!AQ53TzA| z+z0={-iA6v6e#E}W?mF;7=yA^{l*kJ!)-fp+z!t^;9+|zv4m&!cnAkwtthiIA`Nudwa`7b*DrJMMQ~@?d5|Z0=7^ zKKDMyix<{aqvnPUa|6CasiqwgMmHF|z@;F(2>%`gk&h4qafR<$qjL-yojRwVo0K%& zfacHc_i=e=rtOAHlu*WNwg|r}`ZixanI&NxS;!#Re`m7@a%*~-O%A2va7(H4D0MN3 zk5_lpy69Q?*)rvS7}!+)SJQob^c549{6tIDnJ^S7f+)0ai#=vd%A{hD&9)pJrH{x^ zv4LFqw>3w0Vi7+Q@)yzcsVFJSHyk+lc@`LX(>O3e7L{fL+x$!h1d_||wHO|7bl=B(?6LTuLy;|$KtWWIJ z&}HLa;BqoVkF=iJJ6cB!)pj%aXC%Q>MZ$l3Nz{$701bViILfRwOGS z1&%iKXeO2kQ$bZeLLk)m9smGW>bSGRcOgpu6XoMA{>1ZVMjujADX3@4Gh9U29h!2a z>eaQTnILbYJKmvs?URm(gDt@gJgsRW` zJkUx=FGm-Vx61!hWwg3m$WB&10W4uyt#0RhLDNq3xSfgg@^5sP1|Fg|V%|N%%JzZ%5+l zMX8+%Zi(VO+H@pR+n*@`2rLAxgMQVC7{#2AzYUzl;td?{ zd^EqrV;dHzOsnZru~EPggod*RUkwN_2!hw9PZ$|**$JqBMeE6nyAz=OX(@}_r(Qbswbfx!t?I?@e?ay4}+e9e=sItGufjHmzf?OC2+8v^vY5obs zYf@y%(qk`?a0YKvaKPg;x#bU#EbeDFE%2~R7!$WI;*Fe4!LF??qee{;x@60+iBnE2 zZBHJ$V0y^Gl=B;FCgdFcA=__r?FqWpS4(XBHOB4I$?^G!VH9!7Uh6kNTBvsYc~yxg z5?ubMl!7vqAT`vh9Zpe}@r6>fk~ZsvAXIz%WWgsn`r7+*)QjPk?Qh)KVYo|!xjRJ1kH^wXDX)l;$3pO3E!?6c-Nw9FjcdX;|= zGLu`cK0O8iYOXEDlku)ExF};-ZS4D%sO&$K;pJU!4Ct85Yw%!q4KV5>AuTWptyIQ8 zMx@uXP&1ppX3Y2M8KWxRHXVIO&*gQ!)^O{22)yYTv*MckqpPF)_OMHT-tk5@Qq@;@ zf)u+O^@y~FfN<}j4gdaV7=|vlLq{~PaxPLm^ty^xHM2W}YjS2%Sn|ulO!#i=ASzz` z2d6k(6k#SN9`0_3<6PZl-I@Tf7mvk_4kew*!Y{YtE+uyPvZsTr++&Z~J%0cMkNy-t zGwezyBJqQED&HhOcJu70!*%U`%+j7ca7_0gscRZNe&(K%mV$XuXT_t}kavF|wYj(- zK!hSZVZht>{bfXwalY$D$CW~JsNA|PLs$1}B3ZmpTFcfplX=bsF8;};1Nj?`hrX}h z`^WLFT$NtzBUJA06(eQtoO7RNT(0tUrR>dik8xmLQWAFOVO_+s$>SmhcN#6GeaFBI ziPCXd-HT674NQGn1LM4ugv2DQ&9P0UZYGApD2t(tyzH?J(e+Nw%Z>y2eEz;bse`rh zr%fcoTjqdM`t!#f2gnHQYvWnlt&VfRy#nB-BzX|k%@Gas-$r{!^2Kvmg~r4qzdb>E zcme95ig7WxKsW;JOQbtm#_Xx7w;6gL-7ap0ZUcjJB4lS2F>nPDq?{saPyG7z6x7pw zs@2Elaa#`%L)=K}|Gcq<8!U8{C13~nn76$p+vtPFFr7pBqc@d}ZWI;8JjTF+bEI~D z30l{ad|S~bqRVqrXV=ezg)wfcxn`DymY1wDq&4tG5ygLsmqJ@B&no{GB=x??KB%Ng z)ifR1>zB1IElf)stfv(pTyE3t#E$YrZHbEMOGru0{+)AmbyFbVF_zoJ37b*XSMx1< z)95=5y|}n2zX2m{Uk$+t_F$+~YU-Djlv#hbmy=T<%H+Q$RUOg@6ptee2|Me0+RuZB2X# zO^G=tg^xRt2!K{vURhpQv3K2m;`Cvge$WKNq^WOnXA#?}#wlB0zn%kp_&JA@mWQ#) zkUHo0?-BM3KB52mMb68#1|OLGf_41@N7p zyEMi{rCMpT6xXqwt<`g}2 zR}*$QxcneS5QuAC}W`L7~(o&7XQ2KSz zDj84IEO7aZs`+KB&v3^g)`dSy*WIC{tV+DUWyZZJug352<+nm#92=stntxkmTM@#e zg@OXu#>T*q%^SKE1-p@z$6tG1WqEaG*OB+S_Ik~K8<+-Uzh8@IX;OR2= zS9?f_oL&VjTEyeU$j1jJBWdV(u@m@Q!!e|zlbLROH5{n@alWfwn2x`&tn7Hg@O@FD zM>Xr5Rpbe`Za2PS04jY^Q@i9`xE{!TWtFu)uUC<&s-ofU zn0R=N?-!jxL+I^~Bf8~T94>7-+5x{o>wA`ARZV_wyGzOH@*3tbW6e(2@I)9P?x09%nLv_Pvf4uSd6NMTs0w7DIy(*mO$Sg2_j0 zea88tuJy$~5d$I+1$=+!=Xs|amYZvRP|P`M*FWd;1#|eO^iAJPOQU)Gne7F;^oNc4 zjo65tOQun*3qJqy{rTf3e5Gy!3p>@;Li%?zG-MSxUF&FYL2}4Thcp}-Q)#~LD=*&}>-(x;K zJCmL?^A!%6QCE+~TIWp_7?kNcx27)vH{ zLObAXRZlRTPP(+e)-V=S+UVG#CINGqO09Hxs1DZC>=4NBUp%VKTxya69%K$;>PCP+vg?gV9Iz+do zp?@5M=I=%u)!%se7;|xPjw)VW>R^==6+PTm)d9R%fX+f3*B1<974^~U%{p{h^LTXA zBT$g0L9tvwxD1iS^b_iO9zADCHWUMeyGvFX*5w!_q?NI0n(bp-Dsggh8(m-twgBh4 zIP9)m=^}p&)UL9LT|Umt4DOj(U;}_}Og`8qeR`FV8Ic_L<2bkq2J8+SRs(+NpD+>{a@X604)_ZS_#i z!m)EWV)*^u|Dj7MSA?s?D8ch<$#98?-~CY9rcsg66;jR#;u-|37HprWCZYzy0#2DT zkQ`(mtyZW^FERq>D8rheek`)>mDHusM zg7IrI{31Md{S8x$J{$Y>!|&RfL+;N=a+mmax|@97rZUMTrKH^5ftw+mES4l*7~_hNyxFu}>mrLez>4Ch1u0AN zqy28?h8CXdf%SRdGlBNl&~(_ux?hVTUi(8H`o=08UZDonRdEjA-|clDC9B%z@iMX; ziekN+e0DEA?Oak?3djEbKrukJ10lmGrTWeZ(kmU)$9gvTtY*d$#57ohPh`;`mt+2C zgB*Lg5~5W5C#)H|R0$3~Rn(&ZfRfISAz-o0V~-*}00 zvXPo}0$-(eqcfAiPl!-GB85IVKe`gMAJPs0>4*1doR;Staux>7U+>16*4bb|u~Foi z8Nv>Gg5d8NI+;EOhR4}>Br#x7HvMe<@;(mFUw9lA!P<18f`;~QTNe?1@_VcT?oXe% z*!@kt<|M#Gxv4)1h=XWdR~Z9nfxTNBB9dmPF33aWS5B*lXh|ul0&9YXCM%Aex6!x4 z7Bm6)gCB||IZGVM$TXM2A4wSDvT*EIZ)U$eZ()HW-n(=4%tK<+6o;*27N$rU3R$&d zhYmdGE<=~yct0l_woN;>t)2Mo2=vJW15C)o*r+9>>Jq7?$@u-kF##;zB$eSeJ1*w? zhylTA1o-$9y0&wmPP?STF6Uc0U-4{hI_esSbcW zmUO+qTfLv^NscXN?iGh?e`Gl2t!z6LX$*Qlt77g-NJ|+TTHr05vzDTcrD+I~riWI+ zI3E>UmF)u0*P>&id4Pje$mnv7(s>wt+Vr%`5hn0OnLIw&bUI{mC5?mkWBXEQQRyz( z1vQTRK?%*CTsN91oo3iO`dJfI?X%m3xVC)tGCNKk_UN z`$MT!IEK+#ci>4o?fAHo?f@<8mzl3KyW>&gZu` z8Mz3`iRqNwtUXP(MovNPy=GX_lqP3q!qu6JL^i99-Fnm8mjvHqW@L(lthWmCBP5v) z@4YXKjmfw1Vf?W;(yS@t4KcK>^0aUJX!|9kx0ght^ka(HPlhmuRkY8D{nGXb!Aj>7 z?smN9pxzRS*dHz1q_+*7wJrv>W=!lld5%>>BEsvXSI_PPo3*Pe)4?3snD6V^I~AX&gssFtcqWtuqy7G527>#DO<65Mm~Y#WTJtLYaFV}oaJUV_t5Hy;43!;# z$_~LT@1?)dlL+BL%t@32gG1AGzI}D-*xKE#`kBD?^{ine+D?AT< zgX-cio=Nw|Ny(vBk&Q9CaiL}oKd_?@l+Kzt0!)I@NzAaWXgm zsMz5J;L~)onNhm+VCi1l84l2{*NEq&8h%`5^XVBpzQB~-2uME-7wr4O$I$-0dFt-& z$>sXw*HLH~HIy0y%&rLvjHiW0Gv7s1QQJ_K$t7hEdwMUx#nf3H*fxHAIVcfpOrGzY z)8!h2h}N%coDdqQ=9PCdoQi(`J2^^&DWeWUgd))O?Rotj)*jKJ%9~!8!qJE#C=BLW z3Yjt;Q;qlY@BB{lEBD1-b9y;t+m+6|+NUS)<8_XTZ)te760zG~-?6X~yjMC1T2x2_ zK5ID?!S!}lceQ9JZ|-=r4^TA9Vyb)AVKEZ-GmPKwRbLLvSrhkGA;45T2(Z}usXb${Jz{yJo>l!VYv6hm6Z+MbWjUYqbF}l-c9-kjvQ)Noz-a#p%6XT!IrrcD(OkO}P(43z zuzgH`+qY)-BaJ-FP+CyF1JWM-L7g>&DMPdhS22BX!I$rw74yf`h=6Hk(@7NRfs*tP z;z3q--Py6vJ_L4XrZQLFy~-F=fA9BiHdTGK0ZNZP!m z)ISd((*hkPxn;yK4IYL98lr^)}GnO^HF zCo>b%ii{{nq#+F|eYx){v!U9r-11!5v!QvF{h@c5ew~P!Ly9ZT5$ZkWk_d)~2-)BvItGC+j)(PypsVH?LEp2#mmEU}n}4!V zbZz?=W?bP5zyQ<1IB6zY23&gKI`D@iK2sK%yc=uqKJEd6HSaM zwL67%-7QAP$5aC}`%d8>YM;i;n2HA|Jd;wAHCEd#ta*dxh$dpzW$F~zQif?6%8?)q>a+w*GM zB=pOS`1OP4V;k^QLSy6)kq62Sc<>V)^|G)RmF4|0-`+x=>HFoVaWCW+a44yOt;=z2n(H~1^kJt=8EZQ+LpQS-rC$pv#Z)gDfqZF9DGJ4_;p%93TI~}x zcV;fwiZ)S>4j-?sOPfw^gx>ItQj1R2nVj$D>iTM!G}IyzA1fjY>@|jkMkXLrR})Wp zhpmbV!m{=2S4j;m8h`3p0dFZTZ$?;o1k{WHS0&kNLwVBJhSU^(E>sJy?f=u z&mN=VN6Al=Csec0FatJ~f4GIWL}hXD_|l}%rK?&rHW1Nn9#*D!Pvl;jX!8Q0>3Sjj zRu}pqiUA19MjR3hyGt!@zhZyP(R`DesCOsm)be(_k)hjov3!3yhu?kiXh%cuS0qVV zs;ErzCVRt7=ZkrXh?&~{)vw?%9V{zhL@A4WJOO9fLl;snaja4G@jHpY-Dx-Z=Im^h zl%&+Q6I-3x93!J4X1t~;z4j<9QqYa`AS-wQZ}<_Cp-O>W9Tmk;#W;bbJr6N%qaR7`&AIBJCv2ZXvJ;TYOoD| zX5PCNxmhc|NfgV}%A4+zQa_&_7LNG=KnFF<&o3Q)_YtJRQ*PkSRjpdSo6TK?FsW*m z{527O*?d;qruKhDLg9T^1wdHR;#+CS#g9=Hq@lkePpbvAuZA{!mAl3L$i&}2TK`As zN6YTwmh}Ho3jFN|nN;DX;B@o7q`&VZJtK6{|ClO3xS3btDl;1xfh^3cimCd#XY{`P zekB4yUS8MV9=nW=cHQqygiPVR5V_Z?!w-+l0`G&qFb0Ojfq$13F;WS#!(fLayi!z` z=hdebrxmd>Yn8WL24f*=POUBCkrJ++lkio~pIXQb3r;cxqbW-eTg0|MHB=kA{W>~* zTO}jeLo$0f8`^};QNNGvIuwFX)>bVxn_o34QpbzMo^9!)HVIT?#;O&S{b@q2v-L@L zXrp3c3Y+c)E*?itbpK$>iPCR{aGQN+?D<7t^+J@^Qwm0pe)aO6jO9EFOnS0ESvwQ1 zop_#pss{R-$^J=eY2cyY1X@c-34}7TgP-gl4Dsy&!*$R5tG77H~U+N!s!}DFRT}g4cJu`@eZeB zN`A-KKEZJO4|tV1c-aZ}kUJ5CJNL$Cq2gd_@Ed>E-VvxoC;*(J2;F_ml&X^ChX?PKR2nCvuq@H1+Fj}SSn3* z!7NV5E$;wySFHfyEMb%;t^h2*d)y z`6*$AS}$9dVW^7wrRIoO6h<=-K4ES1{=dPNt865J^p$j|q%c>iwL3a&=LC*X)N0XICz`c%HDa zoKzC9u3v(Oxn;2KH!gqD82cz`oCP~=2a$(b1i^h@zO&6t5-BMtrs3p@InkLkfyi0m zUc1z{ZQ4(j8;sH}DCF|%=PX2!uw?XjYdwW$7B77q_wA1tYo~Q@n*{BF&R^x+Ex2or z$kE>K6$XS~$2ougZZKb;@*M~nfPiNH7owchwL)p*Ei@7B2EH99&WkIAE*ygvFB>&W zn@Dis$GqG#UzntQk}@)1cJO?9$Dicj$|1_gLF7_y91Yvf2;+P%Px*(^Nz5F-Y!Mr5@Npv z=@rF{yXVK^?kM`4H{8?VDtMhG_YR{FHNuBi?8=B&$t{S+iSaX_q zIS1>7yQ(^C-pEB>SBD(l4-gs%Os2{C{iSm}SvOQfHC;AQL^^>NQ4pSi2(Hiz5*xgyHYdEpJRw%B#i@?sH^JfHjyV}X#YsXBf!JM2;3<1 z`4(YO++IYe(s7p45q6iNnbOs8W=AbvG{fiQ)01hl4wjkl`PTDvvWRFzlo)5`(0b}% zzcZw$1FoBzS!0Eq*#k#$_(&ktT)N{?@x$iGDYY;N1;zmD#vm`kO;O+w+%*7n#REL+;ZLA?g z$DD>?tJmXBOCrGN;?j$wY!$ES5zMjO!)bsP#y%jHuhFMd=m zzaBuIwj*tpc2_bB>!(iSynawsxo%~19W8gvQe#P0ZqAj1BqR}&Go8RNY)yNhB8=4Tk;3AFnRP4SI62jH93{2 z;GuqyGiiXUK_-2J1IMx8Db9jThOI2XT(|#8e>l((eJdKCHNehJDmepA82RPef3O|E zA!OsDv^G4Q*p%szsiUAKk3GoF<;LZs8>jgLL5;@Zo?92Fe7}huHvM(bL@_838jY&H=C_O42KVxa9PrDK#kqBye z4i)}i{{kSNTYAz>tuh!V$;6e@EZIlk;f$@v{tF%_sQ+Y$_^SLhFw;m!MJwHsTs=9jT(`EB| z!!#APodS;?=B0x}Vn>HZCq}zGc1;vl$8y%zUYxLzucH>yVOQ<%ho`a8!Z-v4Umzd` zoX@b_L{^evaOUP~qtCk`@{mjEi`QdCE76ECNX^!91n#K@lv ze$UUsvFn2+GIw#oh{l=8d0T!Bxo&T`ndK1C25SGQal7 zc|mvsOih|pD;^t>deT%+^M(Jwh125L55Y{w?X2JXDgqQ|)DiwZ+W#wW(_P$DjT(-s z7X^I+A{Ut8@pnHQk=%12tKwl0b{g19t}6EkiHh6=JG@`%)@RNMFw;aC)7=@yK04%6 z2SpIhI2VOHlEr(?SL!u+9UdVB zJZI`=yxq*^mfCB)LU=flhvYROSm-{J5ZRw@?f@w>`|{tQq`-;E^7u~148$vQcPV!& z_qu@8zBAF@4f69ziAk2Or7QEp-!9ptqy$sd`w$WfkWWcYg79`WvmT?!bu(6(b9UF* z7>I;jq{PDKd56>U15)#+^F{43`|ZyHKOqBdt`&2ydH6rKR&;dKwQ&H1jd$~t4}{T+ zQcRn&pMhk&UO$V9+U8fI^u(;zfCk>N_2sFIEL@!27nG`|7RtGasfoart*iY^;pfcU zcYG^A{l!TLhoQr3X-x~}?a;1sGLE9}d-VO3A+Oo{)T^{I>F^j)Sbxx0@tyPE*Qs0S zA-@tm&z_>l#T7uL`|^;#GN1&BK~4|TE|{7`+s5&k|X^F6J%U zuDvc)PPdl=`WIsF=0In&rH&?ecE1*G;DT$rM+O+Tqh;T(4WnG}XF29we!rWQR^wh=xHGM#L)BU~g-*VgUMkIYy2CGSt+Gu+*@%rJes z!XbVvhN54-xBvL@qh`-Y zX%e(BlzB8nj?j|T<;ns!2hGG(#z15qW~%7`bNE3fAGed68fJkeh``H4>JmhzeBR{U z$RP`%V-pgS6+y}nw0hFO`>+A21^fGvJ)q3BHA?}1C^qwlq=Ka-g3G|EcT-ox-RVue zYm!aMGA8(U02ha@7#j){QyLc$MY>= z;YVx&*T*a9WcrO?IQL?wB*qB8_^{aN?7_eCjO3kfZlPuT)g?JAN6SE(7f8A-~{huR4)8V7uHlwu_=$W99j+z!+>j zb^PscxT?{QtHXoneic7G`Lcb~*9qb!HL)X!?M}<^wy&1T+0l4vZgp3QtnU}s=zakV zhb{`S%v)@5QCwRC4BzOP+%vKYVw{PX#s)uu z9To!)$UK(@k{k57yH({*^>Dq_f z4g!?VyAGZ1H;{LbU0xqn$?Nh5g;E3VmPOw8z#%hMfRlj=JXT7k-h-FA!tp&y>`|Pn zsIfG5(i&KLIHyTEvvbHv2bA;dUr-iwArb~@RU3YKC=M!1{ z*cG4n`S<>|s1fvnvxS=+d;9BmIKK*yx z7S%jBT`YWI=S)Wp2Zj4=C-V?g{~dSu!?W(^(r{H;pZ1Kug2C8FjdINy^%WVOaXJ)H zFadH}O4`J83Oz9qm3dF8EH^lidYg;Ia%H)y6I)BN$+u;9G-D~VTxt8y$f8qF1clp0 zU6&R~cw}UTh)vFK9nidkUQl=T{bJ_AK4J>K76+%prmt{k({P`Y(TL^BM%{FsryR?l zQU92h%RbuJ4P~7S)V7+xHByehq5Xds5cvgG1bl{Xzxcv{v5lWopXR0doZI;}b2=Cb zobMV9j%@mbHZi{#$1pR)uEC^dUxMf2kJ5Qy)<-O}5%@#;3;MDSq$c*`VpX))Kx+~4 zv`@jf-sxOgF?%V99;>G56(kZk@>PyyUeakRqtD%vc}TGrY_R%__enU{f1+n~-d<0a z6L{ItKkY|M$=n=i764Gbh3HysDdsaAPPx5Rd(5usY>)xqA*E~)b>FhqhK3MTm%7SO zq1Cf<)HAFtEcO{xQiQTuh$PVppn$2!92%J>MJ5f!n4MX2-07tIOhs7G-lNYL^L@tsA9m^Guobw&kD%^qHk;5*E7;O&Unxj2l!Li2 zbDER9y#Yx2eSBt5?Y#NXV^C1vckVnyy}+1&(8HJyB3z4_avod&`EfnJ__^eDuJ5y6 zr997J(|vpGJiTu;>`;EVgQ!v&3(l9N$&yn!V*Gu998HVq(b74i)T2l#7ds9?ZKzez zl}e^RFbvtLN3$qU9IsujNInId&6?*0kzB*-JsIJIxj?^zGk~NEvkR`?W3{t@sn>lm zq96*frmgipOvLm9~o(Yh&a~-md0H#y$w<1c%An8 z=?#6ca52*X+=*^Def;sMPQccCu}#T8-0NvnN*D5IRmJ5LM3VA0*PYbQ*04QRXXHii z!W__K$cStc)cKN?K6+C5Um7@{B2bx_={!8Mln~^Q{iMQ4d(2T9Yr-SYj7$Vp@chm# zr^u>*8i#wHqXL9u+a1QD=VWDU-=s{mdp=%{a(-wRu~;BHrPI3Y7|QORF}e`v2CY6$ zI*8u@hS;x=Vfi>t*MflS+2kv9IiRWPjflx`?=F;ke{2wZ81YDa4Q5;%DC4G8xtkLT4XpG@#oo zw)-G#zEtq$_0yP!YWodCD6}v_;9~r%2}+7#o0y0tiU}B`t)U^sEtFmp}n=u zoSoQ>4MKY&c-z16=gfxer#Ftre;&*?uoZJ(RjicK?a&8k((U479Zs(7FC6}2n!?tL zqnw;t951k+tJiKKcHqd!$jnqrNGQTn?u)K*2$!bD z`l;%wp4(bn4n5z3(;k^bpaL!VtKo&a6$~GlHIRDK=P~_49lkvwd?L8}81VV`Cm6bM zihXIl3AbLy)3-1O=glj3x!)S$Zr_RceWFn&w$xskNZi=i3~X2*cBHUp!f=x8dyVaJ z!dVOgv*L%bXlD5bHW~;Qh9jr#TQlKo$*Q5uygM^DU)R%A1%Khn*KPBe_NQz-{p=6Q zB}XOGe%fhEx&HOy_yF2mpx57*8(E3+7Xu(}vBi*u0^!%SeOn{FNi`b8fNl+lJ59 z++?3r{=|G8wJe?$=H~+K4A=16X47~-A*`d+O3*Z|GRF39fbwE=Tz_kj>1XIJ zI3nPBx*p`qXmV8?5^(@$=krUh8wGp7z51A?5Is&1%#uWq77 z56?(p0jBKs8g@D>*0;8Of1EmE!<^Tbz%0U!+8@t3+aauM8*-9-2xB}&GO1UaX{6*d zilf7IG!DlmSGXi3-NsSNX$wx1P(7!Y=qJ(W=H)1uxsYMDg+VfY;}d=9*We`LP)DU$ zWeJJPMeeN2wy-1PG0!wYY3hVFa12RH!etDz>2%5CqA#bhvDIiG90Fr@Vo}*FMm0ux z00O^B57oibLx6;Z4pZOvFapL27h06G?8#PQq+RX%1|$8~lKP`PZuin_trl>VxxQ^? z?(6CHUWd-~1C+K_8iCEX4n3*$w zU(D=RY2qEa`My7%zdsed|6~&_SFRmk#hwi_wsY6;h-FM;v_|o*2f-B zJSUm^O;T_4TB_ErLD(K&(4p2PE$F*%ufII|%67gkcfM%@w|aPyMwydNAI_G8oN(FX z4qx!VVmyR))cwl6<=!AA?tb?Duxt-YXJi+d!u;CIKe_F110vlff~+2y74tCb-> zLmyi~5jy%2*5Vd5mxNo)w%_*bOEu@(i8L+4pXRGQTrV9LV_~Ef!|?ZpL+RSjuFJE* ziJ@B`x&A<&Df4Q!Fn+qiA%qs&nU3K~UYs?nvj*%HZe%XG5kbs5NE{#9FBy@>q-;CD zfs3>7S;5>07TKgT05L2BLwkk0@)@Clx3s#U{fz2jbczFo6?PmIU+E&yr}?PrK6qketBTuX;AGNoqf+QX^XnJUXi z_pkwjZ-E?MdS?%F(rS5DoK21PSX2sU0lY{l52s6=Skph#W@}+a7{k|Q+l?Z zUc#b#F63UXrBr1I@*?z!c{EW5*nqCb;KF6lmNSo@-WxmH{^M_&(aRPrH73pe0RE;; z!lqJvtww4Jrn=I?-_<3I6KMtHPuUD?LX^zNXU%34A3qj0Z>O$*I_Bi!zDypaArdY& zQd2Ju=nF~5gNqC$5rLXAWPSNM`<)tmBd63CduO4qfaHf1(bF$it1)pQ2s^>;$kb5i z)UwE~wsnc$Uz4gyz{R2|)A@!)F4Cp_%DUicbNZ?$em&nmzc%ig!y>2W_t(9&-0Lh~ zQ*TR#>5O%9??1`BTy5*KnScX#1QghwarN;aaBUqq4?DekBgGh#E-D3fFzl0sLNljT z_r`8nh5n4XzHVKnMhiSQk@UqclWUYocua1xR#pTZemewV%Jx(cQ;B3rI5;Q5VPL|u zX)=W^imD?UH;`^Ib(dxO6d~|+HRs$MC$ANFx0j^dxet^*M13cR!K959pJsR@I}3Ewj1eJg&#jw_}O*ya1YS^TH=n&>p;uF2mH}T8|OS zPnNeShUVaMcYt#>Q8Re@P=XSmY(M>~cJ@4CKgV0y4!C8%z#G>LAExl;WZ`ihI% zqD>xPZ}>W3v=Ik4k@R(t>66>GQ6NkY1x-}E9>3X2aw?%Zqt>toA-7KMD4r~%FyAHu z_;@Go-AC#>+hU=7y1=k-z;yl>CU^$(!g#qr%`S=*x_eE-n~qvTn`}%%s5p!WguAk` zc?$g3x4QK>jVSIp`87>< zv6$|491MSL0s_QHUb~JfHF`zW(;x9Z!n$OU_TndHmuY5^Mv%uYC;XmE<-dMO$jI3{ z2zhNyZMB5ahIqs4zfY8%%Yaj3X;=p6^5ZkV7@c`mwGW~&vY7Q0T!r=F8I$0H4CfiW zEmn_Q95K`n6_x!}yhOszG&d(?&t_^@MWCy6@T6Nwm42_&h)$)69<$Y#y@ zdMzG{)+D&A{5>7DmQ1d;Bk4MUs{*GZ;zIb z)hf5!lKJcSF&W{cAf%SN(8rIJGqN(vAPWi;*5Z41OW&$6HMtA_d zo12~E!5kZ!-|@43F0a$(Slh>I+;L3_`2hh4Ck|NR_DJ%_CvcgverK)80{G0(?Rm}W ze0H|0dnznEd((K_j|`b?97O$`CQo<9*XRY@hM7?*x7zY5SwY z+k;p6vgNVe_bYIn?I-1r|fo9P-FtrAe`sCa^ zRSUhse4~^Hr7M|#Ldg9xM+FvEvNCAoXmXF&9#=l^sm(|Z+q-_+kc=@r?`50Hd^ z5z^rIfo41=m*H&SOm*VLZHM~0ckYgmBZVG3&}N018SG9r`ew>k)TTr683hj3=%W%b z`PsNfOHr-AiEI~MQ+F}ALaK02%t;kJ^ydcS!*bJFd++8-<(M}0WXI38?>n^uOcja+K@0U=YXHMN;@O03(-{$+= z(ki8q4(i3m@}9qIZW(PyL~}Mn@Y#Tqvtqx-?JE|IUO0{na?G9?VBcrtO#6(_1}758}B&ORs$kK{W8 zWBM2qVi(yD$xOp>Z+sXE;G<59IfZ;Ke-WKq@&zdt$* zxAlKpW^?_l%`WqfIT1eix>ZtGZ8|KugGYc@1RNRiGI14ryPu975tqwp0axjvgXg&c3`x?p`Msn~w?CtQdbYnw)9kaO$4240O3vb@w$!bq8vLSf}BpZu;k!^Rz1yGX?`zU~#BzcMz{Ao*A&Rv>hkKwm}= zcW%%S^~mlgBvjo1SHF3(VG*XRvulOA@HDvbmC|O}mw(@^eNtuD-=-Kt2{RfOSTs;X z@1*aQ8L<6%C~C8*f1#bJsLre>>cQ2U)Hs=4=nkeR9IhN?1{UZkbPu7Qo6%xVqPhZp z8c~;K`uzeNfUk_IiGBoqoQM-|Qy=k)lYYU_#I1bSc0-ldo!tb`%$Zkj24w^=bw zL;^VL41ZMJC^Y>%{PKq}g$BD;^$X;)5)w5OBUv(*Tu6-d<68R^>oquv>|Q&uL(?2= zu^*aZzHWHF@c8NrWz*`)iZ;@nah?(in_a_g;^P}W9=r%Chl{g7kKS0Rru%v}r58Z@ z4vzPyDvxw{qE^}99#qDEOkDz{$d%p3q&P*exz&KQ*~cFZC>J;TPvEpXt7x!f@89r^ zzk3DS6td|-zM?W%)xr0aL-)ti;1LiJi!~uTk0q(Mjl~++pGCesc-JNr)6%nx8Di31-IVE=%6Cic`BIbg#MTD{MgpPi8Id~2s&jDJoG zUkCB(Rm-%lKis^b*FT=3g?EF?hn?^5bgF<$Ym5HoVP7Z_7be%9HPMBu(>ELZQGXml zB7C#v*w=bMj*dB$kgPL;4pA8zxDe(f(jFDKKTKOyuXTUQtKa{CxiOqqIlsQPC*DB$ z=jB5p5#w)C-GF?fym8h(v9uSs?yCClm>YvH4p1S>hy;Uvep@Az8jNk{TuilOxr`Dy+fQlSTK2$K+n~^^iop%tzl)zm@#zmF%>Ux-4Y$bFn-wG@-^MM;h$lx zc8(QEB89}ShDHMlKEmmWNS9W{{hS+pw6G#1BubitfW;P7&sm+SE1modql-2;$2@~rj;bz)$AJ4D&GX#d$|BZbf}tf5 zH8sPo{a@sFm&-z*IjjtU{UT{OAxkGeL95NL$2As4HNU7y5FNNL`O%ID&Ysz2JLgk} z!&##TJ}k$RT!{5i_Hl|GH(k%RQPn|xXMB(~h$rv32$xDY< zF(6zp?z26Qa!r%lT2xfnLH*$5Ux1&mR$kUUl+KQD6|ncrIeyS253W~7Y1Yt7Ov?xZ z{tlAUuNc{OsL})$nyq1O!Aw>AUZcIIK171K%^-EOk0?rBQ&FTRyfl}4DHLJJGeZ^B zx|bMtcgy|hO}zfyCM^c22eyMcfflki`h%8CKNYE^({T7X&OU@t)P?;Qhs8*e8}wm4 ztueoCZ%6-f0>jQbo{Ih9!K39VboI;SQ?*@#k6=qK>)BC}{ifDX|dJ>ZDw z!Bp73SKRN{6kdv1PHH1-*xBT(k*m0ZBE&1|Y^n4X`+13{>k&ek<9)1_K4ZgH zbc#T%zG=MCc(D7oOHcj56YZR;vRZNP(CCln?1q`mTzoF3%!gl`W3jM>&~T9?Wc6v9 z@J~oF$}KT?yYd5H0?`zGS>wI0wmRKU6yCy?r8G;D`hf{7A>SpeT$I`PbZ>u$Ih*dw zc}kY`ZMC^@$d^s-BJ7%(VBV4^(FXR(d`wCaK{I0=7#!?;K5A`WTLT~Ss(K&qflKgB zER~izUr*n!AGx#GU>$SMn+ChJRfBUr6VS+FQV!tG? zb(duP%rVLI(A9M4a#rB9{eHJSWLV}EQ!}Ep(o&uFLlAU_0al!ZvEUVMvq#_-z>kb900nPscXo9fH0!og#ZZeSKLnv3k#`$Mx&Wq|81q zwDWpQEAciGq_bI*kMawC=yX|C2`iYe=`)-Wk)oHz8QQ?>QmTthTDE$DmMwNZxo)-{ zUEtc0oK1#FsspM$jvSxA>?FM(taw)0DX9oAumV$+I)k^1qDtsOY(ecxU1x7Y+G(0YX5-$k@zh{|0U2+grG=P_js;ME#z3EPS7D<$b8A-wtvpP zjc+7TR(p<0#C3Gt?NC-$>>91}!E@Kk(NVaD5ND-5cHs{v0i`NlG*CsE?<`(2R8V=M zKiIRcukj4k*3?vJLyFv8cBbY>JKqO}`0L|)-vVkOOS){oFi+&wbafuFSyGY(Q5Vq! zG0?&XZoBrY#3I7PGMb@b7MX~cLPdJMrY8>Wz?l~zYQ%2qr7d-%^x45}mX6Bl`@PP? zxm`G+Qv&G%tw5RgSr*?HSD|M`E2A*_>^F;Gb{zS*3Uf=#2+sUrHBjzRMbi3Hx4iGg zve1i~@|tCH2wtW@Ioczd?^CbP^HP>T>wVWtRyg6jo!fOkx6s2&FX_w9vt>~sMr}j; z0IO6iRjI~RLS6KWK`YWQ%1RP}l9X~&F{)mtlP;E6G3t>$fs`cFlO^ut`TMM|G>OUk znOS_Q7}C6gve5hLWXF5d`hpi0fsl3_pM|BnYXD-9vP9gE;BBmSm=o298zNZJ>} z+*}|N8?Lb!-Q>b@y-yx2&b&T-P$*Yw^Q;z)qG&_PQ8qr;dk|!|Ru#hZjrc|ecPH>6 zD(dsyh=|L@K~W)4wD@_gkSQhgD_u!WVos`<0)-*+%@Rszj#r#rjURvC;U-!Hw0d!3 zGXz*nKI!#KkPEC@+Bb)FuGXt}Qbt7kn!kT-FOW=H zF~%98V`5rfYtbb&{s~FNT42f^xG}e|PRzudZY5CB=V&u$zBtWT=Je^Ht~%6c z`WpCH78pFRxzN;HTT#h%ePE&zftt;j$9?4ykm#ZrNS#^}X)ONs8j-mobNSIpR3dvR)IC4kG z(pjQOa-+6IYjv|A;(Zt=KK)anG*e2vWiBi1YKEPvvb1P;fpfPlQ=~iFvEb3EAnJbW+JMN>IG z&ANO=>Ev=B%1C?S!6(k(OTRzXX({N(-V1ujlM~9$2_BodT#x$u0a^Vr#bn9uN6Uq|HIj@~&G zahXcN%&mV>#5u~@{c)5{5>;%NI{Q^$@w&*=&HR&mosuGG_)yZs&cS6q3$^`uDzBI+ zd@IeC=*OiZ$XT?ZHv*l>=RVX*CCK;rdA**qsT9@naRGejqz=yFT7O-B*X-@1D)rOSVcH^L_eWUOE8IgHOtpSwI@S$q=%7D|)9WHO*Ont8}8Eye+SzfqeXg z9`(o>{95IMX=Qo&zU-_1tu@(;x27bX0++jpLQ&aCkeZ}~vL@U+(SE!wJP-S(63F!4 z!!S|Ic7(69U?Av38$Dh&Wj(_{i9xULh!(wL?2k5XNe~jW&`@YvpnIg4K3q!rI#sZp zx9)ZI1@WdBFE$uPcez6hFFmze>(t8GNiTLd11Dgb;&CPq>348CpRFF>IkMsF72#J| zmxS?vV$_b8LS;Cv5>4;CmUjaH@kDKke>t&q7SXM__R(gh0^A%>Ko$+gYlY1fujsy5 zA*@m5IJ1A5W}&!1!;v=#21XNvr*x?{PFqzyu_$de_gj|9V{cFZ=U8$;Oqu6fSk8OqFlw$WI)yeqS_ z(^_xI4;F3Fm)TX|_~`x8aXou|Duj5xf|ZPCHKKH<3OA=E9f((&@xvrR--Xp`Ay<>0 zJVa6wP_YA&Rg!fSqZt4R+Fb`vRJC130s+8wVr40IJXa-c&~;Hc+WPFd-1vRxxK~ZRVL_?-u;G~yyZ7#m zpwSLf`i_hW8OJ&&`!1qiyU({=?Up^WP<$Sb$z4-Z3*ZZEv~N{rK8%b^Hb~Du zBv%!TR}sd7y6Yc3K9hUHgZO0?@lh<&;k;vTvqEwpo>$lVJkJ*4P~p}e_eNGeSX35)imxF0r-R|5z?BoLtH;Jo*Bp{znK@WP=1$OV^OL?vTxZiD z&|&WP*aN3VJVCz$U2;2CS4=H4tH9y&PT)gO-8>NWec_!m|F|Z>=67zlugW>_tQ2+W zvUqjrOnyuy(*+Op7oZUuL?RoPWRXufe)3k)32$~P_!4lHkVtTZ>p+DOsW{fspu6t< z(+HRBl6?{;SMhl$hBXd+WB-yd&<5L~TwW>y4KdX##J=2{8W$F;c`$C(wC#U(hELHVs)kQBX* zVq*f2L5oxQp*y{03l%#BNj=&AemUKNt!IA{=ERJ!bfMK>poC#-Y0#sV3DUP_(Z*}p zx2@K}nw-|{UQVh?4Vo%Vhr@U((S1o#EF>qxvPRcv`i_7&%nic(7RTxb z&Qbbs?oP}lSF+^SOAk`fygiF;yu8ui)b|TTwetJQuJ$j8eSJ8;6j^Uh zt3vMh<4(UD;?6cy-O0-@6Er(#PgYjkFMu3H9N93ft`q><$~t^S-F4eqZrcnwy;T(& zCeF`Ca&k-R%vym^a$LsM;ruUMiuoZ3eTE%KW?(*6+zMG5oZ27$<2#xYeVvv2 z@m+Rms3pGQ_!0YO%aE}2f#s)w9e)4`d@;+}EkohiK$g(y7}n*Hc6T64@NW`shD$_j zV8LotS{#A6NT)c>fOW)r;dZEo`*{JE!T_lqC{X0Lu) zwA5ZvqtAzpZJD=bIhAr8Ll`l`T_JvyPGSJvI1u?(9on{0UPo^g^tu;KImt1@!<07$ zN>U*pJXmhF>tjEa+_M6+l=?-u{Xsa=AHh{v9?u!RrxIQPo3;n7iv(_w@4=zN&8=pr z0LB51B6{7AbLZ2IsuN7zDHdk2x~t?WBS!8Y>~*l;W6M=t7gsuGh$h1h5K)NkSQ1gD(w z-Sq^47X5lGCmp`8c~@aYdBJ}>PmZUsYC)~N%nnom-|?8}mb$n?0SSiN1IG1`_})Wv zMe4(@-B0>5wTIct>M&RaYwOD>)MbJ%VnSbUgESVhnSYY7QKi8%iUPRdxg16=C&!Jl zQaCsoO)*uBY+Tg!r3kFk_96QWuU?>q>fN50RjPDdk{8zDQFN!h7u1${Z*^yeELYNf zi}4wpbO2d99VE{y7S{(fg?lHUa>-Ny*U?D8#KdYPU$NZGGR7 z#54HdUfvi1`Aj?Zj5x?69G*XRzL%lEV=YUJLd#-Ywx@yN;j!2f94s>2I>Q?V9|M;# zb1TBq_vaz3&h%&8_wWeC}iXo;@l?SSS*g z^Xei;VM%E=OgNF#fF6j>^E=t+Jr@Ro%g5JXwWk)2&Ymm^)GQJR@BAwL&vT#l#vGJI zV%zTurpoMs94-AQgN#7zD*Cg6%dXGCv+-QGw}<<*6!?YvSO9tkrtA;JvyU6G$|x-9 zY&gE|x6(ODdT4Bb8L6xk9e%-@WF3A6&Cxi5o^ma$sfMhj{uQ7p9c>;-|8(&zKD0$`UV|rE9h#aZy#qFtL>SNiRbYb1f3h7L6g2_#n5@5;>1r=qfC0IDD%77;&*0+` zJe#djdZk1ODh||pq{Ouqq@g|uGs}=D4n8D4YMQsZ5qfvLXlN1vPK}sguR3ZhZ>qx$ ztl7TmGzP;m$#xRMKVipghOs2c6S;e~`SBgm*ZMv+b67V9hk$QW|5$x^QPv$hGkD$v zd0v`pQTxbs{MsEoy}TQ4P`peJq$o~$ddefH?4sL@w!oY+iSG$3lJArE{eC2#(P5>x zV9?G2%f*L@291kUZp}D=pOUPXUNlxQog5&gGr4JyT~2u)95bXdOqy2P(Z)5}0NJ!Z z$)kLFv}Law(%CYeVq0C=t)LUk(m(igY1#!kc5ygKNy;&&z}!qph5HZlTGs$Cu}% zl|DnqgV_R}O0C%pp=}-XAFB=69dqWEC=qh{9^+q%S^A|??f0h+>HEsVqn=>l(nDEY zV0iwVhd*+&*;?sUk1xMGWiHoKWusLTZ3hDh+8n<(qW)M{R#AZpd*W&|2XhfPoOlP@ zrDvo|kn)}m`cyk|gvS?chzv^}Z8)x%k;KzbzUH~O7zOY3j4uV6S>>a6GILe8gpf8? zFQHO{lz}Z5Efi=`e}Onyf{>^lB!4j>d&}J-)hhtS6QPK-yypRzxSb1SVh zhhWG&yUB&{rJ`)Vlw9Pa;F(C5CsCIRimLE4=?V( z;r+sId3VcWs0Hn8Cc{0Fi=kbEFh2M(#UYW4;9x4-E_>1fHP3Wxt|2&=mB}X9w&$3S zH7k`cl0b~i;=qb##8kY!{I0Fec zzZoAPHz*^8Afwn-mFWnSWoRYV4Q`@G`}HtSCs&Y^Pjn_%*bT>g*Cxk*l_Mmu@1S#L zFx=kjYFzan*M6-iYvWd|e>*9TXe`a?fS9WVWF%nDu{v-I&s9sgDCoL^2UqlQsThUw zKS4TBCJ5uh$~=H=#KoLbh*8M0%(ca}+>!P6KA7KV1e0n*P8t)sfyCR}+u-0Vlh;mG;_QPFm(wiG|F9fb_Tymq0|ODjXLV6o@E%44p zY$6l*zE4UD&q={z`&NqFn{&C86d{vpx%3&ZBpuFcN5>A-ndj#XH}``j!EsX(aI+w7vFWi9H#)2WxoYz^EKcu({ z)JqhVwq_GDujQ>q(yRDZ&8EO;3Bi$oRuB^L1cKKwtao_$+o{vh#w%j+2+!$C?xsJ0 z00tbc_)}Ck=k%7q$Mh9*Zc4q7?~ipo^-(r+$kkO&))KINu}njzX>PsjS9r&>^Yv`Y zhaX0LT2o#&{OCiqeukHF7wtn9_*;#?7|ZOJLOydo5~RHX;X&=c+YU$bNE0msiu5y| z5K1kJeU(dtEoqMHQ(TyXRjF_Y_WS{fFd&?+Q{3zUO*xtBA%tK{XCW97!rPUdg`Xzw z$|!Z#+SMTxxXRTds~+P2Wf$w9MK<7fEbz>PZhv!0#@~$6v$A2 zV6t)?5WAjcFhfIz@?%&idYI(4Sww!O8o(wRYr#iu8C@6RD4@D+#@tqMc8K31`Az}e z`;rcy4-XT3d;*e*iaro&(MZvR=(PyuCk&H>HUx@!qSG$Q@qJ^p$D85e{hTS-d|-v1u! zuc4!V16UI>{=h-VBm8L;;AH=?EvLRxuhziX$i?kO|7?s4F^N=+dWROd0$iw|<5g?o z;^GFmcSvA=0Qb;HoLd!gYbu*0y{NLWvnZ~(Q@I;vgnT4Jk4n4ucxruu{oRvuTc%Z3 zMt_x~5)T0Zok$LLbUwPB9AWq$O6+VkXp<82;&6{=Xn$5oOj3))E6Net4E|kmYblh4=@LTsaKfg0`wPT_)Mb(H( z=dVp+ffUIz5|`I+cDprITO>>$Qb`vBy6Ecf_Kv=!;KQpV{0O+(cpLVU3{j>gNAn#AtO*84{LRg5Zj*q zb5hIgD9?;BgaqMzf2dxpjs40Tp^S39Rj&m9pO=UI7m~m~8D;%6#%9|75d0U4z&|x2 zKhz}e9@Mb>Ur1DQg3%&fn>l7b{7)!20Q*2X8$#C*9;ibjN@Sy82=ZvyYT2? znvo>WpZ*)|A0Vaw;^mwyjbi_e^#76C|Em*QLuWdP_hUi|Vq$}m~9HGO>&<3x@!PPhIPEd*H_SQ={Up04HgZpG#+B_W9p zFbMriT<`;XB-rqDyya9t`<1SZA5R3v?JunX!U1wV>;u_-9<7 z(RAWlZcCcFV=zDv@qpV*k!L575jRXUx5kT+%0!sV73R})aV6C1OOQy8g4^fMsja-! zX7D>1em=g`c5p|obmY@*UwX0O{i%?^OU3^2i<=xk-wmvCwvD%*&R&KiPNHPJ@NQb! z`OU@U`G1&tKr+ULmP@n!#*t?0D z=0{gxEG(^Ed0j0b3>4`#ikszdS783_=SKrQKvQ(nzuzdHRuO)`{M+NxyD_FeKRP}M zF)0b5>oYCbjHs!`m(2Yji4Xfk=GW!;7cj-6yxYmMzlxO6j~rW;#_L9;&I|E4ww4VN zYJyd8C&=%}Kvq@M#Ux0|NK?)A-9TO5S1&$Xm}~>W-&5dKw;jroIInG9O%$gW4JlMp@BY{*@KZDd-MGpw^fj^Gduw{+-y~T#x#Gyf`H&3U`t3U0pwbhk%wNJl^ zx`W?14-JmRKAn2z8DX7afHx!)_xZ>Pf3Pt#gZb6Jb5cTXbckJCPHSanY2PYQoN?E% zteaH1snhq}yehaAg#T7AlaH)J)#a4OxI8RWGzX7PS?Rteel83@=*d{@vlQezP|~*4C2r! zcb%h!1TZ+7qiQ8X`upi~z#n-k8mWFWvZS@NutbpE%RQ{*s%3%th`U!5pk$^DXo65( zHLP)apZc|AU4|JcYhJi2A*yL%;!fwnnzkQcwA(S__C9++b_CLizP2A>WQR(>TIrq8_vcu88SuK!3Gzz zno+w`f(zL0^UL(TiM9q`@FxsqcJLz3u1V@V(9ZtbdJK?&h=f>LhRr-E@ayBk3-`m$X&*~R?&$<#1LdR$42Z zChz3NsW1Dm|A6g(Ep>Hm4Ze}_r?R@?W@Y*#ULxf91Z)%86e#iy#DlsbNf;reg!gen3n>f&{h~S@N)P3GlElEenC>mVKLAeZ35oB0%x&wKH3% z8N?xXwhOL0%sYwvVAWLuWKb1SD9+a2;-tLVNgA>ZV|5GXz#r_F1Q$PS-SpQbRYu6m zwDha3GN6VWThd!O6KmuvATRg_UAbqcFXB~OOJASR)ARB0A2qK0xJH-al%Flg`;>zi zO8`reYO=k9A1uG|lveaZ5`}Ay*lJlj^~jky(c{FN)2FKHeQCVa@+N;TMQ8CJZ#XL=|8q{CF}GxLVq9J9VK! zd9-rsL<$^kavB|2UH&}3pawo%D7(;yb`%9GU}ZYOl_C!$kVA-*&*$yx_FCO`<@nID^;kSLf#0&AK|3&3~U*ezB!)&^8c2K{)WtKtS*a<^^%n9&!kv2|&W{6!>i|9o=;}oR-QSgK6&^M~s(VrU_yK>0g#@n_mf09`RtW;z?@d z@a*)A_<)#*1fO79R@Ws};n<_8sGUYyv=GdA{7sOG#lv7HXKuX=A5bp?0o&nJQ-;q9maL(9BEjba;Z&bpxi z-dS@BM1Xjn;<#ObkROs3yyZdpL?l-g(Na-N!v6l=-D>8&;1{3M`PIj~IP*XN0O}eq zDG?X%u#^(p*#76E`A4n!E-9Z*p$#;6oeP4;REH{#Z?d)U{8E6LWCPaXJytLoXy8gn z?cfViY$8eEImQ5xIe}%wZYDkQZ#HuEk=j@i83Eey!pXcrvLRTQCu{FCzgnOl1#2Hjm6DKEVP9@%AFS^aXxj1}b~tzlbJa6A>9XD!s%0bS$XMTZCd*AD<@ja;c)l z_0=aLyxQ}4fH;lYJg}!BZ|C3Nxj_f!PnmqWVCUv9itFEoI|a~cI=l7qx~VS|llmO3 z_?`si*+{(9{@M$!s>*0DUT3cKAIM*6`+kb_>{==+qgSTD^KW)M#Oym45FSL$qL`CJqnu|VZcNtNp}8|-JJ2CUgW zk|9~EqGZ%e8OWl)wZ&# zgq{<9UnDp>JezJc!n8FV8u)+Nepsra+u<0i#=hlSo+$<|I%}>sUL;B}i zx?;fuiZLae`0viNwEn-ZX5ft2-b#O81z=`Z|I-fO0dAMS5C{6Z9eB-WVhua(Ds{tc z9;BpA&D2Jka+OH_@t4RLzJGxqiG))(afR`g`N-wwYw@2{V`>x$ySDgJhZ0I2X6 z+VpU8kedGi`+sH;fI)_<%vzKh@PC{pa4+x#{o??*|1|zbOn__yu!Y1PD=EzW=ga?% z2`~wR0)Ld_24ytZ#{bu7wD{mPhjA*Uqy0b5;Eo?y@I*Wa#$Ye@zq%ocf!Dk{-4gNl zf5!O7z2hYVFjD`2Y+9^h)P;4L8q19?Jb&j#K(?@;ho#NN6k9BcZN=;&&oWbwZV;jV zy#{FT$dTdU;j*;Q+i%*@+l#kg8o6P=K6Me`>K^)^wYYEtFKX2NrtaVChNx(8T<8L} z;qr{`qMCgC*9-r< z#DV8?@b3Q)82pPc$#Ab#WUPPwYh*wU0?|sV_j)B$@VY{G#(OK8=a0p6Zq(D|4qx&8 zSMG=Zi8pl3f;T&NExJaLf0?Q5Y-rB#Rl-TqKh=fajUY?3Jm32~_B8Ht@7t-j0LYyG z(M2E_d1Nz#RhhDv`A5DOqEBPgq8nCEw}&LuH?ob z{_c2D2~ZX}U1lMrXQGqC&1oyYVW<))S}c#V`k$=_T_YBZe=3I>w!bw4hNjx2H#G_E z41s#!5+#I7@0^3U?A%p{o(CB*{2$ThfAyR1w*XKg{4Jy_?Tx9o3@qhY?lWhb)#kUZ z%8Ex_4XKcSnj33%fe))DZrJvU?3O1~l08u2piHPluh$L#S&V;V4PO4C0KmUbZ>%~X z1#0d49z(LDX1q`m|9?bX#t^5(=>j`!DYB7C{PO>+ffDABkMghh+F`jzur;Qp*ys8f zB*|>N!%F#I>Rjy%0b>}8X9aDNcs58fiUSVU=ybz8(;U~evc9`_C$@e2*Yf@60S^BX zx=$yhwr;?hzz-TfGTQTigB35xV^MA9?r#(sA>>T|0nfki>>m#uOODS(ndEZ1$6ORo z`GvgN7z|x$YR?zc(%z-6T1KXHA*^OLO@bLxPsJf7?3ERQ~ z9`%E2I!Qu~fG0=csAsIi%Vn(dG?l&_{8+Z43#07EZ6waWDb78_Drxbv^>%iG8%bkt zND15TIovT?zECMYRwaSC1PC8w{YEO|Qzqzo(+%@2PB$LZHO%<$SAP@s{TLrfk;AU{ z=acs)Qg><$vf2k=X=H(--qRMm(pq)rC;FjFqhA{o<)=CC>u&H z$EKFT<*hC7uK~cG8E|AowtYuvK^cYoC))GRK%sr8eRs*~N<1Ihn|aGm63`3|2m+q} z`8EF9OCg6NNMCf#)a`x1)gN*6*Tt6kKUXNsf#H1Jzf%I~$=}PT42aO7fdj~K|GI)!1q}Cv_bwXm zPtyQYsEiuWHP8Ys;(xBdsRP5kNqHw8_7jYLjXDh&dX%Qs2fzPZ@udZN+Q0ssk&W!q z?7L@AeBJqO=wy3cY|fJ1UzB*r6D*x4ahN1?yqP(Ra_p|8Ramf|I-eFjGWahUSG zz}k928ol>SQgZ6Xc=G)Ein%s{yeM9kP7E%#2QNcgj<4ZAA+uvknRZ=}` zYI+2awG4sdJmWA2*d=v<))U7AB&yX?m!v!CMjT!bAxS-&)&tkAQysl>ZQ>FkLpM_z z+vDKrGe)(=x$P#`S+$0V;+l@O_k|P8+q>ll{5tb~{BHxJoF+AQR?b(Bgi`g&pES@- zjjrL=<#u7FI2xZz>~dkLfOushr%H<_J?_{T*zhtZIJwRsd#DrHhjp3?*@wG^TK!8+ zOj)rL5Kp-|T}KpHpEP0DS(6Qu0>>|?)p9yB7pX|t!bc%-BLvw0u!toTU{&!rWc5++ z)jd@Piq+z=u|jky3aD&GW=gnFd7|jNQ?KgF3v7<8%K73L#L({}ax3dBIj)0dR=!%U z)GA7E+R`0lOVebeNOTKI19jP8kYFkgmOM4(jf_7p#ywla_rP2%o4<>(^23h5*+1iy z5eqy@Na?J&&9)dw6ZPBM#wO4Iil>&aHRQ6jdl3q>obXxBESE4aWE(9Ozr!fV9}cfu z2G}iC7S|9`tZFl%O_xm>!Cd&15k7QK<8)hB2B)P#d#UJCvjoM$LW!#(Ykx+ z!93h$8P!g|jI@>xuIS2$WJ8>dKk+ZP;>^;`evo-+r#{eDUJU{Tj3k(CG1QnTyV7+S>=xShx=t?<^D~ zm6=O|)q1#7*em2*H^Kz>3T}ha=IWMrFTU=y#62SyI-Zz(3HQkT$mFK)aZbdCuPlSO z2+16l?nl#tFKGsGZ9Ib94lQ@rSn)&t(OW5*~Q*jfPK3hjM!;m!oiLB|S1N;cy59QITVN2&>R^1DO?6Vm&Iw7J&m!@Ts=Mh zw-}-98G-3B*=jAhi{T^q*s+L7b9L*uY8HwjqPqhY!ux6C?IyP!p1Oif*TQbbLrQYD z?p4_96e8dH2+1{UF1=aUmzX$qZEHH1$3{-?Pgo)dpS@4yu5ZG*9wz@3yxJ;WiRimE z2ZO4d$#km`(uhoBP)>Brb>rDy$xP&CT54cAmZ=pZ4+{<~hpi1OS-Q1^ePLQQwdu`q zhjL;Sxl42m$KZZ=^FZXFBenA;yi{&h#T-XPdB$==Vg)*A_%*5h{+PL&P@&byMEM}h zJS25~qR<$1&*h*GQsw?|8oCz8$yf6V;R9-*Hv1A*zGhj;*20&iE&HebI%ReRFzR3PLLvoU_;NoxDtypDlp?IzK}}pRKw=we>R> z>u!sgD5zrBobZiq=$D-LR<@T#-%LwhpMcNE(cFnK7owH{MJXTC9D{wl$UF*>DdIV1NuFXJVlpW-SV1Q)NR69@+TXHE987Z?i-8mSy1FQSk;+K6F%Oy zfClj5$zA(rnzL!YcVGxw*p6AR4@jEn55v;Mqll?DUGHq(RFAcZd}AAD!Ahta7|32- zZ{hjsy&-f*72iiA^H0I~9yb2+_sm}S-zRh9b7KPq zmMjy?Q)-&@Wv60IhwM)itnt2e8_D+99U5QXUEgu~JA`Givd8bJp%SvNUc+2f1Rt!* z=J*DCQ}W$j8=ECG<&B$QW-wp7(n_lG+@-21!DF`v7HK%?G{BuYE(YfZj~<4XU!LFD zDlDhz4`XD~^BTT8xC<0H2+J&A($xADGjmYugh)sjG1=}Ld;Q2Yr@8G!!m^&K2Kt-C zzdaoaYri|cV;Nh#4(A|dj7NMb`j0L)qYq8h#UC2GjZO4TNPm;*64Z=B3vj-fY7UNq zgLIEk9PnBnbX&DlQY~~~KzgEB?(}Zr!BdsI1aC=gFz7r14tL+=1rPMcj^?xwcSiGk z$Yo8ZKt$w84h&F-C>a3{kqLQfZAwOP9TbnB0HT~+0)<+?z8?&}v2hf45Sh97i}HU% z`*5^F@=t%|T@RqrRRKiA67W&LCf_fI`QsBkKt(eg@Fsl#(BDISg36m&84sRnTw!nHv2w#C?f;okr!SIsbM?Mq0pmkABJgZ|n`*2gGS{ z+b5##5Q^#BlJg2!m9#qb0)O6x*{lYN*?%@ zGgUUJg^;AyQ%syc*Oi)teip+oxCZ^0Zm#udu-I&^V|reoznsEOl}=Y?sQQex80;S_ z_o~2i`5gVzRCjPqMYg0aEVlrVL8E%gIn5UK?V0JOg(rR4-~CxV{~`Er6EB&r>MFtW zVI%xFUfeBWdTj#sJE-1X%o^>8&<$-?-Ql=i%rB!+Ukbg>9$6y5sg0{D{YT%7RnoTPXzQczbb7qXa|3VK~5HLRUqCm}ka z`TZynRWja=8DmBbNXN;SqoS*nV)CfX`t%oCx^*~DL{yCMXxR`Z`IG-ls(ygD7YvkL z=z7sBjiP?7GRoa_f{^4>YqnRGXG;t9FA0bNZ64{{R?t1m=m z0YgUkn`HnN=G6&TGiheo*X};iK6CSFW_EtPwEK_@d<&?GN8xqqQ{^}0kH9t7pwF5h z=*_B4K&!ptC9gL6Qwsb98Q|Rn!8mTirE^f{$F#MH!`$KQ;18uvmHo2q67Uz6?hd;2 zf&Nd&-$jWHYD*RUUFaqd1g{f$oxS^lJf(7pHxj4BlON;0{`gFFd`eOlzFQ?^0@+)P ziMjh%K=F?Py~ebro5tXy;~_ocC$I-HlDt_S!@*0GXO2ujRd)FymG#2KteKs^yX>}g z_i43|JRRI$n3uPdZ^ z3tst@&N3dZGddcrnw3R=ojvnc2@eQ`cLDaKtPsNy_EZCu^Cx|b2 z5YK-3Z{RR20t_ajmZv`;lLn)Nh6~RToME3BBWzK7wdsd(0Pvd!)LO)ZL;v5QC<4H% zFa~fG|I`uSpRWLdn}n(5uJ3=U6acXUfLC;uvtMO|e{oyxel6p1T>li=KXdy()B1NM z0V?YMM}*S<@1>P$4N22m=I&5d?>AVoSNo~^l;&R-&YcC^Am_a<(}Om^DG*=;55l(_ z4}a)bfie{+{pnP^?Cw?6pXf&kGQ0^q!niMf5?LfvH$6h-JI1HTU-JJ!u72_mR(R$> zk>zA3W;2}QAJuR$u-MN>gY7hdy84l#N^f%e%0<>WY6ioN1Z>yJg-bo6V-@U z<}QYY*800z(%Q-gp5W8~xp7#fZ$7&r>*$aP#L1D#&=pSKkNy{`zH4YF$1lt+6q zuB6NgynBmzyn35YF@}`1T{ta#@t_aX_G`!LeiP`rCRt? z^(1FocxESyOetT)BevC8nRIDMn%zzZ+L{WjNI4ds&S()|D@E_*QRIp?r`)EnNK?Hy z0Qj3Oklr5e*l}L~XH+OFQGgd31SSAFWRmIfh z1?2@T0A``d+tQ-xdRaUhAQF%!PgSC;a~qKuPt)PIuTOsovEwg~wIMt_yY))wjYjA$ z&LlqCsc8i*{@!--K1~TacrvO?4GTlpSY}7e&i2;ioM|_aaT^69%jsO>^X^WEC*5$^cn{_45?GrqTHVGm15SW!*aJM*9^td-i%)s zM0`bRYZrDBa4qDaB@_eF_NxuwzDQAkfLY`TeW^62$Yuo?JU~7yTBy%>vkp)YV~TW{ zKNT-3t<-#avaed5&*D3^Z^P929k6{*%zLw6ril=RWTde!&e0wnUD)3qE;N5GJ~~>Y zJzr&Xovlb48q~^dSP_7*5xn}ttO+oo?5xOAouj`j_Z*k4`>9E0RX8Z_9F>2vT-Z{& zYcr(6H?jFZ;p|hHn1R{L-4i@tz zj;==Tl9fOYrrkdK&op$k!t;v5be-kYps1jT+hKjA7B~VcvYLod*>ZuBj*gNFZgNs# z%?_*!7jQ5oPXQ4ty-d<03TFb}UUCOO7+%>bZ>5@1QAw@d_I0Q2wS$r}BgD&rL=wh_ z42FLI-dL!%>SIS_q;BX-`|U4^rwZ|bLW3Ih3o`2Rs!I9E1034mi~)e3*H`pBswnH} zZ$a*2@k(wK=-1(i!LpIojN+Uu9-f1N)V0}Tj`-0|Vx!HU6h98RzO=B1Gg)YpFk#AW z6-u0eQB#}ZZ1c}xs(>^3*usGe&Owq&cwXlM<8lQv&t1vF#tU*adyW`ryNMQo{s6_? zWJ5B_*=7X!rA@_xsxclxcXT1-e{8Dhn^$TecrA~7S`$559#ud?pm#wANqa}V0 z(Fw`Sq^BsXN`WQQNIblGxOrhm^0kyP{GP`_5CzPIG4J&Z>}m-=GsdJ_lqEEJ@UT@j z?f{MI1C8Pl7~SKG+wWzOUk`6*xv^QuFhZU499o>}t!H zpbz&Dc|dYgrgj%bHN8mG#zmOl=^lLra4HGp?H+i9Z_v zHGT1qcE4hfUla3c2bKvVp#=J`IRXFtS7<-S`L$FfDgbinqW&n*Jw%Q_F2(_bo+wJy zk$*XXzt-zv%l-@Yubcn6=l=u3Le{7c`g19oBOxVvIUTOPX6;m4%uamJLyaQVTd{;= zJ8-z$b;QR&_dPAE%Xd3C8#r60H)2Kd@tH^RPndJ$p`RDt@LPawJtlB%-HJI zPVELKgPPNGG9ciLcqp91r@*u*n1Mbq^qFTl^}nrMrgpOxmO5*Z`s6~ouV@zD6Ms&w zi3-+nK2r!FR(J#s1>~IB{8G&Azj0y+%7Oj^`@!9XnVGp%DRC*jEEs%K_Hz4`nR#_K zrRW&mizmgvqj`CwriRJ2B-#ASBf1A3NN=xZKNgFOj1~D&CLdxwxXv0{{W;$cEEd;} z=0L-ptPB~dD7hpckY+*Pq?&&>cORa5wSr%ky5#(qTmYX@_! z@fj>wYp}>sqwqdLPksl;3;EdRMjw7FgYgT%Dnx(UU#t~|IPvqA!f~tg%A=s3Zm<~5 zwlDaSCL@3>^9r~z&|uM~n@rMn{o6tjl%RQC&1?fC+CmdEB?2;Bxc!+rla2KcBavUT zGFz&Euk!Q4(`!larfBy3Mls+&gJO=6bG2=Aa|zW*t}N=rLh&Uk+2W5lzw`69U8%}<3=?yya134lPfq5 z=m6*37uh}avp!ZvHZ;!1&G4L(`thVvj6e8|Wj8iA%s<%+g$CICO>B^me?C?JvhTMHR^?#=I&vJVG|BAGbCEnow zE3A6b!)m_oYe2qv!(GkFc5^gaJfIz=v5$}ap|k&E$JJ{5$&0C~+lF^Hiej=Cvj_Fb z3(;af=~&!AOLPZnPN2jOO>ZuP1cGO(o?Zv@{a4u|3p3Fg$xW&J-O}%F{nM>4K#Nz$ zH>V~V6NGiHSJjqkDILsWzrrXIU?@TI;Lk)QG+?(UO)lT|$vX$zbl;Mq1$Ov}bj+c_ z_=$LYYuUhm&Xe`2U08UQYw}>B?yV|}o)Mtr1fL)ci6UxDEaxlsYmFR8zZsBx!k|TX z5@yIO!^rZy)ErbvDZgilfgXVVQ`yx46S3KG3&(+EB-g?6`${+8-~%*A!+%IW<#G4f z?>QccJp1VP!>1lr@GB2`9v4sfFUAHpQ@p%&?g0G`O=bnG9(SX98+4j$Qfkrr6}A4xSzX+WcD9^gP5X4j5aQJx<-&hg>wcMz4 zyf+BCL@#liKRm=bEx&yAu zmz*CI`-N*Gy2cXwE5TRip)<%M= zbA>^3zPwrG$T6?_LeN-QpcWMLWtPpc{(N{CIBfGRGsbL_Uhg`h8Cp(JUfx)T6|Tp` z;q~I(g)s@nbpf9TC_mk8)Q{zEO_ZKCiCqW$*Tj3g?wNR3o~HNri^9uV5RRq4~31G8(&(;$xUwJ^frW7uvix z8V9l2&k-QUD(Ep3eXwk=i)ter=UE2#b`)Y~E$p%2IQdF}Rza65{}-Ad7ItPvUY0_~ zf+g~Hil1iz6i}PbvavHNDXXvHet`MmA;6b5781&&0_$67Y2sLop01j}j?;N?Yo9TAP9_XqycPZFn z+HcW>F?YESXP*seECnc7L~g^8TW2(V1cIPFT1lSUflVf$osBeWj>V;SUZT~e(8NlK zVq@~)ox|xwQpZgEOuTrzHrird5rs0Yk*TQ~#A9;DU)yJJ_uDFINpE|CJ*8K$&G%I@ z;6so;Bqk;YZuVqQ;Leo2DFkN*N|TT7U9B@2Jp)S??@J&|LzS0mBIG%7EUu`~HzYew zIOmhq&(yO|m}xpedcwvswmsjSw`E^2J68Hi&=~60Yi2uNS=&iuM^vOU_;pZ^c(Ang zrZOyuqqKB_dOiI}`>EOX2_trJK_~FC6DHWJwVmo2b?mznGpp?|3#&UT^poM9(m=5s zas@>LB8(z*?jP(g=Aci*Q8^Mg!>Lv_S&ZI(#^1#TD z_GX7wv-chyOfv38nAMpLV< zP|q@;f(Dn<%ztPvYU|b5nCII-(TNW`cmV~)G(NWMkT$RQ%e{b3!gU$HE;zvY!g0V` z+Gt*N*DEMRlYaVCCzH5YQ{hAh{%-++e;GE@#y`i}j)L-k#B*8z_-vh${HQnm9zia> z1-u_#PV0}|+J6&yAiMdqVeD51eJ{q&$Mg;dD|SvuUd-@QC@JXQfMV^@Zh87Z*>fhE zjcNDSgMn+ggGN`oHG@?rgH@6#((r04^>z#5^>$=rwhQsq3+|Ki<_@Yj>BnZVnVF~Wq{UpwUEdd6AXto zke-Q58NIJq;>A%rB-Qpq6r44N;4w>PS)WSg;`x^y2k0mN!hJrIATK6RMyk_4wVpY6 z$+lLWZ6UuLV=^0mw3Wia1kw>Ux1HG#(%LT`I}7xD^;NY64NCMRF`(R-qmgd(&;$>NZT4ZY3eO1z8tZHHv{Fisr zu1K3TMJM9b;5Gw+1cCfdA#K5JpQMveRdK$WFB{u;;Hp2nm|nrV?*GiTqN?X=-n4dP z>Y6<}y*D6h>ie;cVmtJM~cC6c|#*6*2R=a-@Y$UZc_+a7hAzPsQ8ivs$fU=6Q~Pixyu9!R${Ej`j3qm z7Y{I=%%BhJvC_wpjXNC@o-zww3l0XjpQyZOg`ja4htaP=f$=JIj)q=Od5NH}5}nt= zpln(c>|Ynzt|s@Kf3)98-FC+9rGy-`mSIUM{u3peG4io})B#yKNWbv#J8w?{0$5n^ z-~VunL%t7oj@}esl!pe>OKpc9|GG~Pt+uQoF_SgFZpzk+He)W*)DMA@y7?$d1IwW} z-IbZEnP=BCuf2rJTY(vo5z*Tc**N!jsNs1Q^jkJ#wi|QtmgOAg=ROtQhH^FMFO2Kj zJ}b~bAoXX!@oTce`ykoJL2pkjCB4{$-u6_r;#Ju5v)5JHlt*{eo0kX;N!O_{2@Iv8 zsA~T_e15i!)PNJack=vP%P3F&L@)~UY}Kq!+&3&I zK3numfOxr|w~O(|`P`^*%2SHQkSFLfGE70Mj2zfIX^LW@U)#+am&qtm4V#o<%P&P{ zlG0f5sWQ-9Y)~N~!36k`k$?N6bBv7q!tl5xIyspJ9IN<*Z5ld0)Qe1To=x4LlEVm8 zl&Gp9m`o!>yDpeDMt_M3DwdPDct3^f#hRkS1V`2t^6qhod`1`MS8dqP#|+4Y6s3?KRI&Ne>H;%#`l~9kz5K^_83ROOPUojN%E3~c@55tvzpINGTg>Pv0Foiab1nR>ArjXl){UjPX5HSwhU$0~RVHQ7Az1af`GNy27L2XLaD0FG)+O7PbQgkkqQa zQb8J0P#DsG7iM}A=IIz4WX!k45r=>oh4tk8P?UIs-AQ39cFqA(GALW%sI5WQbZk+F z;u&I}iz9V}F|vaU6EqZhPBxy)9Bf{I_>R&QwgLHWL8i1gMAmn-w;3Ad-Z9o9w#**L z%)|M+n0n{InDK_Iz7d`OnG*ebU_yegy6!07KT3O9$VzHu!g#f;Ap??;x#>-v5@4Wt zpQZA?*Rxh#qyv2Lsp)x`>`Uk=3>cDL`jAjigmbF8G422OTlow|jdSh#koh+4{$cps6|3wUL zPg zS`ZvkK2XP{m`X8t4$@21&r?oe8F6*+(K(?R8XbhH-g!`%JY;&oJnJKt4DJwXy5prc zW9n%NtlK8L2eRF@imaAvI})E!-$;Ic{NxT2r0q>hFpLtOt`W4)v_!0)Z%6YN$iae# zL4ZxS@ji(*+QXu9m9?tP$r)QJu2zVk>lx3QyxC!zGX#^yV8?7yac$-XX(vAGM9JCJ zgl^zKu3zf+Dr?Tv)HmZaOOcRwUKTu(2`W9RqofQSu|8E3{YkX5OHj;Uu(*~_jz~3C2}=8;m>PsTk}k{~ZQM)a=rlh{RC-$3 z{=^}Y_e#!5YhXr<^d3qeM=`sn{;Bdpg!b+6FWOI09#x*jvawV@Dcb`E035QbuCa@ZY<}H<32qe0xP>}IjD%c+QhAy0z$gU#( zutb)+fQ~r95|%=vlcWd~+p}B4fT->PCl^HJ;KJeoAIHe8)YYvQrPJM;_Wc<)1u)YL zQpc~$V2s#bV$Awr%tA!AAx*(cQNsPl$J|WR}o> zsqZQmMRtOP?Al*Fsf;4-tAons9PV|xN~9R*Dwt)xC`wl18%LhK$&fSZ*Q-dgsxU=kU))k% zv$mg}OX^_-fgWjfY<5+iqB+Nh&2-B{Q=&F>q9pq|pRp(P5dLxFd&PC)hK0ra=472L zl@V?dLf_c{T4bvdDE^gmvJ&K)bnHCh0u(- z-}7W<^U$c(qas6T@8?>|fZ888G96KM3I zHSvepyZ9B1%b&k?97V|}hNBI6z7iNvh}A*?ua!a%Vw$pBaQ`eH*#blyOHl-#T4XZ9 z@H4s=9U4YU;KVisj-@zG65|r8db4&larB75V4K_ zu=-ZZ#p8p$?E~j{X)aW)w;kLCPtAKJ|CrZ5Xio=T*6C62B;7uYAU!Lr+%FwEpb=Kk zPp%t9tP4+w;E)2F&{NK(kIsp1^uqQjB&644g49=Wuv)_ocOUikUbEKzZ zB^TkTm@pytgFRaZKgfq@hm5jKJgb_Rx9b{pu}lRGksl!%eZ_2>IB%@!{u9W+1a!*o zk+uvaN$Iv02dUQ7L;pk(HDa|nX_8OtOmB1>yhk6(%b&m5@;sEpty@g3qKZ0s2EUf- z4uujo@FNNK5#dm@{9n&}|NSH6jV?+iNHFCD&^DLIy9TDC;19X;XPz7{+xo((se1vJ z8Q?i@(kV)2nnS!r?FZ^rJd-?r!*@tx@^@Gc3m)YS7*7=VO2uy5yvv8r;{ySRHNsQw zt?k%`YFff^6$FPS$kxzY#H`821SbG9S~(EoDMfEb7XorLs?_v+_fEpl+2 zZY@*DMnBpF-om5ukpx!a5tvabv71l-zBQW%1@OhgNV31cq?v>O7=gr-od-nEt?8Uu z%=(;&M0Mn(-jC5t0N^3e_hp{0)>S>*@7|QY`aORBCz|$xx&Jl9qxf!rLS1uc9QxK# zuBcCxW$>R&)g04wnX2r(p{d>F09l@Yd>&SoJ-C1Bzx8HD5B1M}b}f(+T$ zbsQDNV}1+K{~>oT`C~bjQ@LUio^~@n3dP7Cf^hRpiK_3jIvLfE1oV$#ON>T~qx0&W z%e8uy-aJ)$p$QAlgyv;(l%AY6#pk37L|RxGar0lEU3>GGhk};fGxj2fHFc0Q*Fv0o z_CM*Da~Yj|F|hY;rV&mpo*1_qG9F~O+A$xHF*=^aZ3*Ts#Ej^GMPYiK@|4MC+NEIK zebC0>jy0pWmS+DAGFD6dX8&wJkN?&=N}}z-n%hB)Tk#1jbo2`-FO$ILfXM>|4_WEg zZsaB-fHPVR*cmH98C26RQ&}J^9{WDA=KSzH^~C45{MVddap&L7d?$OuXiC` z!wtQgcLhA@9G`%ANQUUh82i*t6CU2 z1%62x3ZKvHUSotqiVWx-QG|H%nHiQC)@|+C!c*zdH=K`7oJ(57eb#qNM{#xcCw4OkJ6BYeM0TS@4juI_4~!R@Z{|1hxQI zOP|lWXbh5`$_+8mYryHVPStm2!UbbB|14uQd%;E!6#LAh#lSf#&=daqNC`(DCdw?U zmZXH+6OUXoP*E7-gqtv#pTaq7Cel_4cz;b7^;Bn-kC087a)b=K}F5=Ni=Fusdr<0P? zm9B)R{1z7o>tyb3pS&QRQ9U=R?u8*Drn96S-pnp&(ejL!@O6n#>E-*pPbEw$3g%2l z90X=A^x*5slJ0kI*77EV0Mcr~A~ZT=FNAGOwAGVQ3*Sdp^x*MrXc+4)uXq+P09ewh z&lxgCbqFA+Wf>OZg?!dAdkehkW+!pqGNWl?lad(r%O<5T&B}gXB^S8D62yrlf8;#A zDOvY&$|FhQgl0;+egmpeY_CQ&>35-p)f1!Yh9`T0?M%3|nI18c@E920UkMJ}d>5 zA3j+PCa0meCNYp8jIB8(MmYyat41>lI8?Tj3sg>df5J+}vniX>usCE)PJSvaofpa< zn5-N1>By1m#$tf~h7#8!u*y>?_Zb%yIO%R=rCEw=$EWlp1$1z&ZVd3Qj+B_7*T zaL(`ICPN3q!ni4c*|u;$P5$nJa|17g@RUi5F&RvdQrl^-hGDSkx}BKc_KW7#V9TX& z1I6xl2VvuvX2;-hZ3!N1^oVEVrOI34$gxzg5(^vi3aSx(g2+x4un5PoVGiN`2bvP8 zcz9eKwpBzrjk!7EY8BHiC><)*fFwML6jR{9Y%`4WEO&RQc%H34iNe2}HaaW0Jjuns z+X;UZD~n}pB#tokEGqsjmm9i32CgZAak?U$RxI|ud&Zc<7}=3z%J$c;?l2TEh}W2U zsohw{Rfq&mCdauiqx$FHG$Ipdq|(4BOhqi0M!ZUnCBAdYd53E`-2;L1VWcQi-?&Et zV_lv~8*in~fI>hgh>)NIQ-0UG%kH;`ln@BHKD*MbCs^9RZA zy*xd*6NlH?DVE<25gQ`Fpoi`;HbI6qI4`9(?YPNk&K_fnj3sk=29iS*qVAQG@Qo=h zv1azU=xHMN^elkzYqzeXUe7A-wx%3t=l`Z#W8UHE;)er4wwDek{w+N{)cT8*^;PGr z%=^ps1ao+A%velXjBoDsb@YLM=iSJL9;7Rp5otW9sAG2gQw z>KO%?o0gvz@D$n%VHq0+2YZXu8cror7)LvH5Qwg1Br@*uQkl`oVT%UL`y(`_IcN(H zO+7cfBA2_lYKDBQ%4C+Zono`>7pgH#p}}23`<%)(qBm1h;fV>}moGYb@Ec!JT(LI5 zX9J>wzakFT3UN`S?s@aKHL7vcJ2BDK6D99T>P9Eid}pltBPebDEC^&)I1 zbq6V$@xAL5NB0||tU(I>$jfKE^z0}vCqf*(3{MznLU(0ot*L#EKT36BetQGzmsFLf zJ*0`ZkcEPnXe7YnauV^=Ww_8TbnR6C2DC2qbGA$c+U>y-cFgvw*mqQ#DBD!+ zi+SXp(z$|1j>}}loq{73>Vrhd3ThR?Ud9<)=^fZ^Nr=KQBWpZ<|=Y|560`rgLAq*e{1hsIOl-W>ipTy)@cvaKxil~*)T{J@>CCFFvDR$oJ!Fm+dBGaJ=*Tx=}m4Kd~C1kAF$l z?xj{};eI_{0R2P~##-Syqd7gV3GKkrW(@SL-60VA@&d*hmwIi2J45J9; z!|(@wAJ-V`=lcj>5du+>B3#OBpLs-Z4>+@f^_w~@ak(iaKxA*oc)ey zbA&CHK5^1D{?EfklX6dDpR+@SZ~zv`7A**4u!&>JqN+E6z_Jv}%PF*jz-4;MRG;eN z>c-aYhj0rJL#YnXxreC8su?>1$x0b-D1r%@`+0mHTPK znw_YX7~_TbVx4n&6CcgxN=?kin7eHsq|p(%6nB@qi{qNtw@bPns=}h~#9Cx<% zE-x;+tE$fSW~;~s#juUQ6vtUQr4vKK!e(oDshOjrgTt;Trl(IQ=oTM8c{0;rW~89M zx`LtnSq2HYy5^?$lh!0od_CXSkc;@R9e>x9iB~ zDHoaevR=yc*somO`eEthdj&(~b|^vzQ&&5qFJoC$rs89=GYaLBa=k6ZkbI!r_m|dE z`w3_bx`R_?nPsy~B`nvp`Ww2+#I$!M_QP{1*xdP|ChtoIcqrg6J{6>=r)OocI1k`z zy}G@45f345|o=Wf)$t@!L&FkWkq{QBVaeDR%wPP315N^M+=B<;(&%;ZHhjDnFP-u#dPTGRi z&zM{^eFhaLQVd@5%HKR&s)0}~0tyQ1lqe2$Zm-ge$5i0b<`yu-+P%%{td4dv+%8|q zyLXR*RBiT+z-pwNmtur%*D$Z$%Gwj&pz0mVp4l2ye4XOCTQEbEkr%$Yth4YsZ z>3hpui{g%gy}7@%OtZ7k`6{aFXjqNLZ)ySp-FKMu6yy}v8r?YrfN5$P>d~pGe1w8% z7prBusSyA?JXLKOugjsem)G0harN1hD&6*s(S@ZY-A>oevX-KTN){?c`0*yL1HBW;op zBAXp9Jd8wng$)f=RaqjqOXh<^gIjGbJl#7r^Sc_2j)P<9w0(V0**Z8lIF^2t47HV& zJbd3xH`f}S4#|YfXAE_HeB|WlSpu|$K^HdWjA9BX?e7k4VwrbC;N~>H!H=Ywggt3~ zudaT1fs9t)okSHf`-!3)U-nB{+5~ya@7|C_x_<$#}Z`_Q>k7n^FxN$<5WgLVP}o4ve8gh%945$ zBnT=71|r+?%t~64*ZyNRPiLo3deHMQXZ8=mv3uL?E~hs+Un_0ev%r1h zDu4gs1CfK6wB-Lm!6`6-ruAJHD7mBQZ8k>Q;h_FAwbjqmd zsdaNx7B@EDr@Xq2a0yXy_8i*d@VL~JlwMy`^^EPdHy3>_JYCJzIewpWm zEj3~02nZs{PJTa{(!YMV2`L2t;Da|tPcK6#S{(Kd1;0RaqDS*LeBL&nknIGwchYY^ zp$Ucg&NsTUM|%)Zlp=fU5O)F;{M8f=DIZC^Tw8Z@V}<3kz}Z+K>e99IY@cfAxGb zNKKl_x1CLxtetDG&b$~|;tp*?=k+u^Af0G&8;%na#8uGvZTTUn({_W&gSWp0|aNnJtk3)#k=X_2k*>g%+BtfHc9q%3x-CUOjo&gLp- zGmWbIZtIPg_WFsOnpC>Fs_ONeHa7tA?BwKSb$UC{L0U$}Um5)L_w8-1@5|)D0hdm! z0|r&t|4cq5{QvsF4=Xa3Z~q?;4k(WmRCjyo6$wi>b#s$9bYP-`3Jg!7{iO{40m=hD zdV7egAE+JhNz^PSHx-kZn8;SCez{SPkCYmf3RIZ>LYS?y8g@mBc+F!31JC8Tw${>u zy<{Q8=XItO@O!988Ffk`wzVMSV7R=?^&=H;eVi*E@8ahN zerpUNXcl1IhB3jE)LNOV)oyQVD=`~Q9OP-TL4b$7b>L2=*Gx=H16b|bbN`SYN5tn3 z^o3@E8-T89rZZoiBcH~1r|1#zhsr0;&(6+f{N@$~%fiA!rLL|{>MJEDw}gXn#@#Dg zciKAY^mMt3jgIc~cWg1KbXpM7X1&ey?-=@`St4mTS&@j-5d|p>FiIb@C?6}(2^7IS zeZASP@IIhsYg5Wdm04Dm^Q&RPSs zEnrZ=%UnXnIvSa&u(o{Rzl1J~7yoL=nl5vC-T#(AU=87dXln;P0Uv3Wy+X)CfqCF+ zA(G^LiUsNzbduWfIaBl**<6XRD#X5@pNs&A9Koe{pOdl0vpSZnLF(rwQ89_Wg*vXV zf$arP2<}#X=*ttb}K?D39A?QP)4D<6LK8q%D?JmAX$tJTRR08jb zH_AIYNcibR@2S*Io*4F@}!ld8`td!I!W|2e`OY3uMiSfut+Xwsi znu+P@iHUt{so%A=bhJWQOF9d|f0witr(hLCi@}}Pr<+qxK&Dq?gUtZ+18)Iv2;ffk zanX_YH#Z@D$0wHre1w%l0zHH^k>S;W?IBfB{tSmm3w7Tul(^g5knYN;3)YO{FUE;rjlbT%e4*@?qI#jQlmBnoIX zScK#Mn1%IFc~VTGd6s$WDVf2LGz-UaYR905LzNriTa;T>nqrJP@5`zu45B;|3 zZ{R`;`9IysNfdAxn15Ouo%jzYYEVH0l6{lHP|5q_eCFc=oQS!s*90Qg|HtFB!_7^` zR34}^f|u9hZbAYL9NWX6r>7^YtK)?;p*$2+)QUo1&eV<(1zw*IpI0iCK=N4s2rLKE zF{lJYMS<;?%O@@qclb0oCA!}6Xp2!19AhKL(|b)4hif5X1eQHGQ7Vz9?xwt*ypb}h zsPNryWl0MQ$r~G);TC{`fCSCoWN2oF&PNk?kjSF&VS#}`iZ>8YkdCzkP`xY=6*mjH zaB3mPWX@EcL>fpSC+Zcbwjb9hR0^!ny`X={GNP~rOPCpF=H?vlSB8T*XTPrzIn&;k zkP8=<7Fq3Nh7$*ZaQhtMdEyx*>`b{{ZUnp_(vpb6ognOe!*O~66dAcpDCgp$<0BNZ z4)m9CA?F~jDpY%8hsXJVxW%7r3Nn-p$BOr(jmCUV7$47Lie|>uuc)rey zt9{j)5@gib-0al3DH{oo!CgiR4nbnYXP!2dW&u7o{mSEBU`td=|3m+!iT@igV(5R# zsQw4-DWL-M?ec{HJsDlSc=m0^(QN**aZkKx5l&_F(;1ThB>?8K!{Y~2k=?OQ{N10%Mg-O9ccj(^&%ZzZ ze1CAUu)tBw-ste5(o&jWqJU`-FKqT3BwiHjZ5X7?LMg%xqlFTv*Kw&x}Np~H`<>iUEwJ-tctI%5z z5kMI?siFrG%McC(UWs=}#z3_vI6ikESTxlNgaXpbcm+|ENiseSgabgaf-F~CQyl}3 z1g#o{^!s%d5(|waZe%Pt1B&_Vr1a^mN@{B|jhIhGO|1{vL9qu}Vk`U_Fd(F;sPV|n z*=5lS#umA*tEefZfvt|mYX3vr6nrh&aNhUkk-_24q4gIm<~~C0e|MB=vHuP(er?8t z=KndkOklyQbdx&XCvz!C0^Fi=Mv1VyoKcxfL?o1A|qFA#7}~-kOvuKSmrY3>q8~Cgh{j z>`A2|zV@4?xZVjxJB94`&Ja#4T|BY0=xh?}f-{aC7swx3|-}Er4CHPRBP_ z0%24d;*>*jN=kCFJq&nk6g0H#T)$LnVPx|A(TKEjqf2R$(#Kd=ciP`5LM8?v9hUu2 z*JR9(`w^k-vXabiO5O zlNPPDyJ7z7jCOw>sSgb;Nh>RxyLFUoD}B5m_hGB)nSi)7>+OR)uBbUW=)o8iO}g|n z`63NX3i}@vOqmeWgEsedS<;N^KM z#FxU;rlzE1vXauiVGzJNh~S>ZyMfjOFGz2(RqzbZh;Tg2@M7AUzBN@3%jb~ja?rtd z3@uG&{-W3IJCK5GG&EkSkCWGFb&Zh{Y6%VtseDOy-=mc=qCmAvV#CYJQ`a~Nggl{& zBOxWp*Q&6pkm^iMWS-89N*D0+H8TUZ{xI&#AL-fX@Wmd zufm5^easWtZ(kwgmMC+n|L0Od1ZW0=h*xyqY3t8t^&OLoNEDTnybo`G@!`pI#O(=1 zEr7x16}P51b6f9N8lZ41U^yL;bwBqxVGZN3r!;0L9!DzvqMyu4$}%w(n)4}{$bEX) zex3C10U?-!zYL{Y&}O$gJw6sZ>uq(eDbyNilyS6xmtdq9+7dp8Z?9T-xx0x98PCTT zXTKN3p^yxawLbdAAi}xN=hR^B{B%!rDJi2dqEQ%I0UIi~f{Kb)5V!aI7Bbxl7SLx> z7Eoj;622uWK*k;>BXv+4(T`e=_gfg&F2%jqy# zp?YFz`F-*`2s#exMC*A0DC~NwY$ki1#}de?<*`?Xd$a9UhxN(a<0Css93WU}w5ab} zpMnJ0i}=I*!is1MvM3B`-z-o<>i!&6>l={skk!dkl=!zwvlB-N%2SN6Pv&TnLl0WK zL=@qHr`b-w(MZfm8qSgIa5~QuGYkUiGdx;RhK-KwTTjH05&g0M%1QO~7|CDue6t+yeB9Y5XjWKlk^4I-K+;CIMV*IY51VbrpF_fR_B><;rxCN`v<^< zh04kOXS4&#xK_)7ow1*R^XxLOfyqObef zsx`L>Vxcg{I2|$cvC%Q#$;4UY5%w8fo8udCF)=-LSjzU1Ph>9m=O|Rh#QaT0>FxnH z5EcZ%Xf*~C(<1-;a~ifdJA{)AY-ht&r2^Pb1??3A1DAK`Y}LSBb3Q+xpJ_bc9@N5; zJ1<* zWNlH?X=Xl@-z}GrcX^lv1;fdqneAKm;!{9AdX|=kHo9U%) z$uG~wm>VKMocX=>Z$m4g3{Lb_3Pea_(3_$5CfEEQ{t)>Y0#Mi3^yp=r^Jv*5=zO`q zpWSh?1;D#4p!>$b^vQaeZP}3Ed~#4e571z@TMOThc*>X8B=E!8pT72$2&;B_e&4aa zbi+Ux=3?#0lz$9=(KszdG2gNqxH;k~K=XJ$LFrj-D=C3m405$Ny}T?hDwhYG#7ruIw z{KFb^rdvMVMmV;0rj(F!Xyfelb7HF_bK6SH&X@q}DA|iL4sPW5A(qo&!l85hdWk#3 zuoN^3|4!Bm4|ymz`YBH-$Kzk&zpE*)lKjsbQvxjfh(4Do)6p13G704QdLEbQr7(!X zWPxO+krO{khygMrU;|NxMAfZnhlht9f!=h+;nDLVb+tpKFTgP|m>C5&xGq|O6YTPj zbwyRj`{|<9Cn%^-XQ$X;F(8xOW}}6SMFfuO7@M|W-NVDi=7OqDNm*G%{IS*Pq_x}6 zACW<~Yhz{xD@>G*qNKSwPEcSEf-90BMn{*6!`6T}IMZdRtx<#R5Ri^-uQQd)Xka$xFUR5a9>Bk?<(S#oZ3x4^7Mng<6f+wY;V zik}+l9WixS-ATE*NnSxRdPVnI&8`Se`_52p&M&avz6-Nl3PI7HW%D%M@DBysZFFd= z=>`0-`|C(Im`XHS=%eS$AIo0PRPXZOVrq(h5VzjtBCYi^G8Etr{8d4lsvf4~s5y(3 zAO`6e;6~S?VQDsRFE0$*8LhS#S_giG9%%5hc0i!-%#|P@<_5;WO1)MuD{i5(6C4o|>TA+d>X3 zWfLiDvhYU3ej=|>YDX~>oTPA&U!x8V4sv1mS`att78({Vva*_%T)WnyR3`y|J8Bx5 z0M>}82r+grF30zSQQ#1bnZpB;*`ujmqdR{yj#H=wPOJ)2_{043_5m9(Hb_@&x7Cti=7p{(slND_;a+l;ab+Epw z>T+^QO85QVUHp%q*>q|lOsAPKjrv`l7X#(Wc*Jvvm!Vu8&+kqv!o8yL9q#>U^Eb&81lp`qJ~^1DEnbEc-INQYzl<#+$^^+iVM7z@0; zXwTpc=oXv>oKe@7y`N?#tSIh6i>f_Gj#X$l4)J7ZI2%{pg-rWkfR}fDer9smxw*KB zwDp#&{`%Eeqg1{cE(<>K+~OWcwQ0LtzfVcjq2d13BWQamTN*dr##Y%Iy!dfo-o0}l z{0xDit)vAN*W@-$FRHrz@rksiwH|JaZ{PY4nj1QeS?&(53ibWm&HZ#S$P*wplc@%0MczWn+U^K>XO-u-#>q-Hkt)xG7;ZDHZ;`L_A>Sc|(%=XsZs-NWS&j+43m zbv@nH>@|L?p1Xa1^#miA^^6SEFa2YNq(uEUM^qk(kTJ|qYa5-}7Lj)r!z@t>l7VAu z5rKtMc%($Mzwm93DWt}Y)*1`tKc%i!|e zBMXLvfDYi^9G5F3EmYH@I~y~R7Ap<;V~|9HIGE=3Jf zRVDm*78Mccw=gi&$TL%Po9-G?(4J;)Wvz6&2OpblLaWbe5y_wRJu3 z-PS*UQ~+=~Nk`Mt6mBaw`;--xR&^bk9To}e*BxJ<&{=8q7K+z>knrZ!n5ro8j+e?_ zPmH@YmjqR{`+G(n`y^7{4h|<^~(I-_E|F0c15QK^!cAw$p^@WnC`62t1tTA&R2LtEd$!^gp6)1t&olyYBy*XL#G>OIx3E6d5v zApC~9(t0Oun`ubkW|SbLgMp^9y2kEM4f^zAW227x>l9o~+Wf>PZXg%frk^y!Uy=)PcQ z!Fo@tAgka{dlDH4f{g=e5#l~Y6Vq#8d;ow6IahEjug)EQ3{oLFm@?f`Wv zKldiwx=$+Ao{}>)=hC(yAW%d&BO?P@XU4|LYCVff3Ofp;kHpl>Y_6Ma5gG&72;{LS z*qE4^^a!%Cv7y}rze3f+uOZx4)hmIa;^N|JX=&wA?ugDbFrp!XlhM|u-BpjLrVC?a zy6qw-rvPO*fFzcal#C||aPb!A)mN)mJ&b3AhV=&zQ_@pQNLBYYfk6!6wOYF^scXSn zg$t0Jq2lA)go%jo9uM*TO@&@_QUD2OFhkTD{{;$$qQtk!M^653e zd|aXK8SdQ?Y(dt95@`G?_Zc0P;+4NV_}h<0{09_5Jd4MUo-a8ym1Q~}8NFOZK|wMa zl_amO4w*i9%io94 zb}A<^@^$J`ud=!Om7(@-iI6`;jEPRIb|dR9NN7gd>usl4J!f?!tmZMO=r%Qs-sS23 z?G^D;BR?}ePcBbBv$~=3XxsPkX78Sim2D|G+2Rpx>pnd9y+fWenicau^wLQjNT&I1 zWN*a5_V-|A1EF}xH7qfPSQ`&1%ac)Mn!XTZ+@>c+<{9KyZE_n%SmHSd@gQbz1b8rS z(8*^K(T<7MJIw2!hEA6bUM(C>Vc5(Dh=f)EO8e+_zCn&_WfX?2nzn7cBKl4~7 z?9<#&Q#woyh^Ua&Bz!dYtgz}8Nl^<<UrobGO~ z61AeC!u@wFst9)t%oUJ?gdM{7$vU7h(WOMY4|1j({+S zuCDGxK=R`&@-drCKkLTiYzHw#x^>9V`>H>^eBcD;prk&hK+*?K*`3z&Gke zMMKNi67LxoGnrE0T_Xy0+I3h^d8S!k9c}CS>L*NUhz(;kZpt=mbI`yQW#Wz6@_6zQ zRhmHiyYN|>GnbuaKE6320UQj|{bEc*kVJ`?=o7o4*&A?rirgjkG#5lq z-5r$$XOr6QXK_yDJD4#~5r-Zo89+Snw+&?whiNQ;FRL!Ec#=2UUp1xpmZ)PZC9gxK zcsBzNd1ZeDCjAjcc31M#!Xx^=H*<`@9m@DUf#*N{g4F*33C+;KR%*@6=%=PSL7#+n zkYH2S+C1Sv|6oOimse=j8)3g<0uq3Pex6np)G5!rf+KlK-4;Jqxc~lW3avVmhi;1j z=on_7$;tr-Ygx`qTVD(^2^nr1;W0eCsZ08#Knx2rhFP_)RScL^K$xwiCN_A_hz6QM zF$`_^;yIK@SpNNm@y8(~$AL5FPSGz0{hv&tK(t+y1z|FN|7q{Lic>qj2ONK|!uO@D z*;ac=0oHD5+R#8WHdVu=N*oK!uHjR&*PMnl%6xIolT+`zHi9Mi9~PqHHrmZ}S}kP+ zR_Q|}na#E+7{Np#>3Qb6j}xChC}u;RJ{X3kKw3FlH=B1L51L@8y}2kj_~76G*m#ks z{JgT*?sfnd#=U}4ORuA-v_^k^g&n;e8)wk0JVa*3UJ5)vUNz6r>-?!|EMn|qy#H{X z6(^siH83e1VERBBk{=Yoj0p1vA;=@#H#^(=<0{5FoJDXuMv;WS0mHJD#qi zZ52DJIm~U>>M6adAg7+b&`V0lN%ksz&lXAb@_g%lS}MO&zxo{Bc1Kh^68OB9wA24m z@2(4zOEYy4+e}byJ*vgb6WbkN=xQNn=g#8K)aH@bYw@}Ksb^^_;w|%>^PSe!P7?#8 zzE-(1p&|7)PhhE5>v8sc%nC>iP_uIV-+Sb8(MZ?Txde58khhCu|LYAugbpg(3 zOn6YWTracz&+N*;%R86>;pYYKr;$aQvs4TE!^!13!EP;OF5~Hm?uK2T{pCi3<(|8+l`6V{uc- zFT$96MHO~d|9N6CK&t*JZf1rd|d(JIZXudp=e12+RB^v?CM`53c9CZ*BzP{IQtG6Z|21R2_#U6sIt@Mmfkb0-q<-6_u4CQP`kY z6aS2M6J1@u+hB-i*0%oIoWO1~9cJF%+;Je~PRZt`#(%?1QP+4#dYXQPe(rjF|CJ~B zr45CJ6!_@jVpbd0dujh(3H zYVoMqAkeA#&*Afnb}7SAz2G@8E1b&Xt|4dLE&Zikwi>^D)Zy=9OfxJEk;0a{X&BVy zL<%oI*CB-%utWQ_xsb@-^vUI^vy|D^DQF|<4|E6%sMkv1>aHw(DnwhIO&~Vx4M=gB zL~DaD#HrR!pPGhYl&HFiVz9w=c$ZUL)b~D1{ce*z3ZwW}72)eTsQ*FMM&HW@h;0+f z%$Hi`%ud{abD|)7M||BE@*WbbprKOtlp^sI5A*(k zrv&gSL9B=g`0Zfh$z3C33lTy*Lj&Rc{)+e?5rVk9LYEjA`Jf&<-YZG{_y#6{fWVG% z6N@3@GcvEF?3Oyx6dvUNB~dF9Gnp3TqguHkXx>XPt`tvc1!+0MqsV#F9M z7KK623)BGhib(vT)0@D>x2*7JcmK=6P+VLRZE^mAtc_jeG1B?*8AkYBMt*p+35ebf zKiZrvRd2TNpM4t$EX)1RBkUA1}CCIs9(#^sO~g2g$`zLqM-j1?o9lAebV|C1V+fwcRfZQ!(Q^IxD+qFH%`{hk;yP z&DA~2k1u>y%~j(Io=IK4m~B`WUbj;T(h#>DkMD=zUQ2zDf=@(|0q4hOxPnG#C>Zz9 z`d;+hB*|2|$7$$MM{k_?m^Iscu7^X1F4&9|y#b)9zu~^R{k)&5`ATnYo*zSrO1?Ro(AzZr7Bb^8(&3e-mGP zz-y3Vb=Ny6`e6a5=cgZsx!xXzKaE^;;i!DPFNdPumSRQ+{b67dyI$-F@wiiBh{Agc z4u8p+rTMh~sp$H8t`SpQG>1wTmgU^KJUFr4q=!Rr8Ai_;!5TNmL zwf(+2Ex_6BHtr}MHb+zW^L-8T>2kX(ar~zGiXb@m_u+MS_no4$^bv6B-g4fe=iA%Y zFo)mGW^V3Es{>4WgScOpmbsMBf8VJH5P@{PR?id!tR^t$L8PLTl$2rlYn+SP$zuMx zO(KI!Ws~gF2V?{2FfiwTJS53+eJ35Rtu>onA9hKH5efLmp7`E1jf{-q#qO-q^>AYF zMG`nGA^~`J;#=fK-q+xVs)Ao;p3lhhnZZu@X;*ucj(<WPKS@S#a!H^dlU3~mNYRLx{v{7r^BXJaZ6aOYdk2Deql3UdLq8wY!g7#{wq6%1d71sa-)@2 zozC3I`SUi4!M=6081lBI&w8VU-*k%?iqu0{+6t~}J|?qS1M!e;0%6F;LgPh%_Mc8m zO~*|0;2D6t1I}UC`4Sp}IMf==NlVQi*&SPx(-z9URi&8v2e^)-xWJxB);)~Rs~AK= z@7&;!d~gnC4+ksq8kx2D@w$$v&rsZ3%YLu={f9-Wxm&GNHl}<->XTibxsf+xhUpLV zMcH8LYOO>ZLfkG}F>Y^ILYn4FwH@dVDmEBQ4<)!sX(h+BbM4||T$_MTDHug5cD_-y zaNLrB`yn1Xl%k=)WC$lf&`36q8!d4(dzBs*niz8Y&7nDZcYlQE#X_xa4is=VD*sOh zPm){0=M0oO4Wwo>(+;QOIcP%tx4o$KEk17;Ve_w8RSz*$wV*JnJQ&Z9;tRj84-o<1 zs|jF_x_%0pcx>i7cv@T@i62zR6n*qm^_J=9OL&1)IX+6ej~Qz2R>KTT^hnJa0~7bL z^)lxO#7X7{_3gUA-}HX|t{3l2T3c0AK+HwF=HQo5!S?6de?i#5tdZw3BA$`PbG!}E z72)eQdsUakINw|4#5GjUp6T&4|68BV`tlpN+&8-JKLP{DxyQ+&Lr~sS2^;$43b@NlMpA}t`9W)6_;*p!`FYBnim(@4E8;` z+2gADyYu|xk=bjaFwOsY0u6G6 zdU-+XU*xG+K%s-P^eK`dIVJkSG3fe`2yy3noQQSV#pgs!3Y5pr@*|6ztVr%7AF@W| z)G0h9O0WeHOM7}|+r&L@58Fo9cms5-=Da z|CLvbsmEF8az;@Q1_mBvF+{zVoAyU&VZRk zvyb~3#lYKaK20AuUUeiQX)yw1{R9j=Gki+Ej(~!RvtN@YaY~z^Aq9E)+Odg8xcd2_ zui~6mYJJ38cbXLZy$qeteN*^6lC22Pz^GdseC+a@`%pz700Wd{|HEXo1`w%uKj zLh00$x5&QdtXCBI<4?Rv)O<@`%2P7D(_dZ@H{#m_5lQ;488xjl736vS^swu6w#?t~ z@zQB5E!pu_W%pBzu0K2@Y#EW6b;pY?F~0k=^vUmf>ZF;!E&SsmF~q6+(@K6)K6Y_q z%+1&JJDInNaH*1U9N+yV}^dvS2u$ho$ucbgZ-H9a&mBvH&urr*nAbGIKOM=c(;g0OYV zQyke=45h|lJAzNa@XPUlONX7at@YH!>E(r6>+yDO`rV3c7{$I*TW16GUU@@9ScpH3 z)q#PqHAHTq=Ri&lzi(8Rw$k0#=Yu@!A%q?%7-eh&t53}a+`rJG>lr|NKw|;eZ6}h4 zfSs(a?q+K%hkc&G`cSj?BV{lHijjhjjxZM3X<8V81qmgIWqD1F>9pW}MFq|8xzo;c z?$l;s7cW7MU?^mVnfdj0r{ShaT;N}knHY>vF0Ik)6o1{~k_vLoDfF7fZ-B%1Ic8d} z!>*~RVY;ikH;H%nnDVccXj^-ig{0( z^c;kv5{(tpq@sy{8N!u#M6YaD8nG7NXM~ZrqeLMgz9ej6Mpc#VxG9f#1gRQ#CeDo9 z-RCw$!moceEDtNZCE#Qy1(N*0N0MXaDeLcrCy!B19lSk)+8c^Qd1(NNn6R+8ykMB7 z=FutGd>)fW25$rnBt)S$LY_d2CQ^ELqYRDZC?|vy${|wI*0^F9SFaF|UR3dNvv4Vv zSMLuECTmmeK$`6E+A*Dy)klfa$j3GvLAmnn7?0=Q=&-U7AmNmr=Avc_5ZNOANrr-~ zc)~}5ro13yK{bF|k%w}3Par+Ug7xnpUk|1f%YF+e3rAK)XV!Z=Y?|tRn+gLc3VJ|3!kxjKfGfQNKu`i(b7=!IyQN1?slWH8Vr0HMVjF)tT;J6!M)SI zs*xR^JEAn$(AM4-CYo!d^|(tc$dB3aeBSWA82kA#7jzCvpk@*^(9@@-yRL{ z=ccsf@x9zd@uOcjL#bOuO(!Z>}^eAL`5;N}RaSDez7p;11^RG9R zvcwjsZm75XX&XwW1vK4+v+oNWsGH)6A5F^Yb=zh?=IgY(vaQ40ZnZPBbr_9H$O*df zPt7_YI$Wkn3|HZqqyI%mXZ&#v;^o<RL-L z_JF+nnf2$4SG8cqmPn#`|8~o>`n9A|U{qRwtcJNvRw9}r zOT{**rx(=y@&L-bWNchpioTwPQV=#Q;dfJ21oy6<^XtXdmI6K8r^J4X@wVvN&sl*D z>Z+n>kl?uZn#(S>@wq^m-$4s~}oa6z!1WtboerpA$^?a>OqMnJ8 zjC0l(l)zXwt~*F;tJ@gUvXmyl$Vp|u{MV}{g+fH1*V>xyX6JkUSI1g;M7S^RbcE?% z8)@#ob#(N-X>)5S)*A{HJXK~M)%{L;ljcS&?dvm;V~5m3CGXxTTs_>kmSgO zkB|S|ruvz4U>GS9fMt7k7&GyGM&+4$)3YZ%5+uLGz4Ju<=mlfvms^c!TCfmlbq|%#A=0U^u$RmQF5Jg9 zD|I;?TpA2PjQdK5R8scgi*uFzqYpqr?R{*r;Mycaryos%$G;A5X*>li7(w$a+$KR1 z0#2LS0>b^Xw6vt_^<{dOLWGy}7TaB$l{uUff&?glPb>c?SW&L6syag>x<(SybKyCCdzdcB*riH$=PA$8;`wQzX-E~5dE2s{aoRdJ~HE+)l9eFsMsS{Hl;ZrGh?M@ZJMrG)r{y>3=Zd#^)HanqaLp2HOk ze!i)04H2pNEc|{3q=pJ#geR@hoqjpqA@+tA z78a?$tE#N*e|32IZ3+(lk)pb2KAmmOFs$hk&~Mu1DQ>d&;e$>h!@HrDtF)7?I2itX7|P_Lzxh2UD6 z8%zZjpeL~JX@8)6{%Fw!zAt`nsd~MIv~q&s#1g~#DYIx{njOM@GVwRg8OcS*I0uGD z;zZMtf0bL)zhDdN_PyU*hJdngjLeIjh%-`aS-q`X$Z0TI(D|D?BnBdB%`W#(yNHYrPFbgtmV1<@I1NvnE3H}`}fWLix}!Rg+ur*7d-+18V#hjjK@otg=au&Gn1 zjiduWVDvXhEpyRH+G$&B+q2i}XEtZP3Gb7FgRi$`BmMobqNJLnAzzc zQ&RO|VGwC(s6VdzI5BJIZkNmDgO*ZM)GThrO7w{zN|7wM zzgL-nRro15siLA58Ya&wGn&=iyUyb!cJ-odde3>lZJ6_xyB1~p7pR{Hx+~iq?kJdK zs}jrhoIr1gdRn*Em`sw#~h=j0n!-CN{7UDhFr&L zW8<`1sg2}mI1tYCw7*1_aP9Sg7$i#gt|Jah+Q*n!;x}pgu@sU>+imJsg*2pG=@Pz_AKm> zM*V;k51o$)=~ZoRZA1$R$<39QF`+W+$#5lBepuf2er-%`+H)zu{rXGj7g6i5MxI*8 za6Z14k)o-$D!511784h0|6=aos6Zkgyxygr+8aM}p7!#L?Z7`Yy|c@Lh4YR|doo z(HpvG$nERUO(akSIcezV% z4{Cw;_3e3r{;g7QI8OIv|0ATY%O?$nx-(BTXz50eO`|3Bo!XLt35RJ9-zyx9@jvN| z!eo;R{s;fqzk8`hgld_zGzMPhKCXHFn7~p7DArouhWv^qNRm+B9(G29mDmw4W~Kj1 z7u=aA^{mL)|0`@mda{`j?^(RgeTF0J&L_hWhv2(LrlGlUB;=r6|nSefj za%wu7-vzlA?4L0wo71MzXef2Zc;wfZCIzP96NBkM-Dvc9kyN#CX3L6(OjJ_R5hI~d z@`$;pD5~k4G1`uZ1!`mt(8crD91Tx~Oi|klR|lRl@!Ctsllw%M)k}_5LfHxJx#ws3 zBN-bE>_7sh`{8zOqqsuMn?n-5tuW+3&Iru@vKzJ^FrMx!A6vW=9ly% zU-XG(kt*rUeVS>5j{+r3U`~Di@g$xfUNzLtR4?}g=Ozq#usPQ*gGhQt{7q`DmFlEtF)~d5$JOV5wE%%VCd51Y z_r2wq3|QO?glLRlKAk>WAEdss^#W=JU~jHPw6C%RWBzm%jQFC&DEUFc!DYojU9jhl zF&G*euk5jc6Vv-20-)g?)4v@y7zzRTa3)b^PL#}nK15o*Mk$C0GF9Ol@OvYpZQZr% zr_1ASapmtgoZxJRVlxX+2tp+J289p`;e(^)1I>z%2W5W`x-LsAO6&Cb3ddxG5@dxTj{ns%EAf% zKlc7IEXuBH0EQJ&2@#Z%p+V^on4wd;8M+baM!Hi#y1To(8$r5Z=#~(MmXdzY>(V>E z%jf_1zQ=L@83#_;`&?_Uwf5>oI_^Y)p}6TD_pkgZ!@t^r4|N7JNZ#oqIUP=278y}P zL@t;|pwBVGh$8tjJT?^HwiO z1eu(+^z`bcV2V1vf&5(z6O((r_phpfw9>atk;q(sEJwuiDv;G2o z+>}#O^Q;RC)RE6WNg_!qW#Xgx%t(N3j4ueC9c!3?X;M2{{hMGkF?$5}SJx$Q*`5A(snlqpSO7cf0b!L6|(ClRgIOKljtFwH^fL2aV ztd5ja5q-w1V%6G8Lmft7(>KtS%Er8e;;nuUx9%|||5pv{Tp*AlQGL=hSoEKAz|~_? z{v#7RMX*QNPuRC8)P*YRF9@tRKil7Xb#nIw$3#b?)_My(3x(hc-at*%E~DNg!s_d4 zUnU_cJVHkheb6odlOJfgwB|fQDxfEM+^3tv8T2!KMRYh^4y?%m zx>w}Y;S?cQ`U#9@c3?6o9mj;CoNki%Y`p##QH*Xq$Ib5w9a2 zM6$5kon?f3T5fG}#ct>K8dTfNap+X|oVXj>XfGp1#odo(5m#{*W1U=r{C zcP|GTo6j#`{_9~-=-CbI+m(7oap&Ql`URaGCZ z>ny&Hrr*b>-jn&DQVT(_J;AbTpx^j9uES6-6GPVQ2(B=k+Asl10}~a zbrz8HnNhw>F|8t0*SiWBs_qT3FNQIT-DciwzNSwmG*hOEmVc7ttpbj|INN1Yo+;&Y z6T2K?=Li!?8K3P+_i(tXZ@=yWx`aT4V`PMU?`VvmIa$tmFw~vU^47fBL9Ph@JtggG zCupt@Yqcs$W%2`R7)s7!6D-t(SC3C6Z#pkY42A&uW->8a#$U&6u{Bc>=zlx z5#5OUL2|zbcVi@2Aq;SV8^c9%c^r>@s$PZKEK@0?Iti!Wa2YRvtxUM?RSUP;+FMQ?|8t1D}(7wwzgh%8#}<`&-bBx zsdrx<3UeGN+I30O!wN;EC4o|}?5P6Ru=6Q9nWAtr5)xcrsU^3NQmVmPJU3X#VgzmpPMV?8nlG=kzSxtGFNVY6D$n<35XI9| zIddJ@gu8`Z^cJQ|P2azq)}J+0=TS0haIrK92%dRQd#y8%OB`fIktZ2nUO)z#a~$jm z1yP9fPCVa&i8#P`P5rAKM~Q~Zb4c|-Abc3IY5zCjB2@$7+$wZoNeOK*tg`)wx;LQ9GGARG349IocuCLFmJnj<}rKP1sCt}aQ`?ha+h2{Bk913}( z;LGKnK|#B#wxUpUea_XcAma9O$~S3!Pc0a^u;}pS-~wdMRfu>i?(}mk(+}0iJd4Sl z^=xe_$6QHC@+`pjel#LS@Z_a82hOY8WT!Cnp*4B>3|-cN)Vt=9jxK8lXR0BqcSVdy zy`m(-2!UY-(soB|vI9B*fa)#Ya~<_lHA7mq*?ktHRpHsA;6vjKH9g5q;> zpuZ7lRhjQDQ{1lCTivpo3Mm&Z-46K@$0C;90KQ&Zuk*>oa~zHshb7Ntpid(8>yBtwS@V#e+!G#Z*J zG>sToi^I=~c|>P=0obGBrY4EPAO~1>U7qqqT(|}Vn$bf#G9x9=X|UvA8j`}+euj3| zGvPenw4=5_>*jotydpO}xineYBib!oB$9dchPq(l33ig%SbghsiB7%9g`OdazRCox z#J-uwRlH~BLu0!JzT0v1_KQ@u*7}-=)Y#JKF`sRin9)gIsQ(}Vm8jB$1X^3U^}f~p z6`$$3fYQQ(3IujVe7}7iR0F1w*7feuVT~UM_6FN3oPVyX$3;a7q0Qi<1)PCrlDV2H zqwt2A(*h1IF46w%`qrq)t+=?XI}a|+`g)HixZuyR2-0XZrIFR?`Kn4vfjRS&CN7Xr zg#+x|@Z^yZR}OMiz=^IHFow?t!lk3iH5m$tknamYvVO`-9%7;qIVy=}5=Hj9IT%HyB>R<^(7ed~+!<#?6ZMYUlyD!d zCTiWt9mGLxi3f}nrls9r-KqVIMp<__sHG(bZX%j$Bizj{*CfB57HN~d)U2&&y!l2) z&#W7Sc!L1fcKp7~1JS4T{KmH5ztaj{n2Dync{9{k#f!jqd5nJgLJWm7U!#GhD2XOi zshQ~3WNW{SPAOgCa5IF?*p0S(#WXF?GEJX}<(&j63mHES%ip-p(bu_I8^yHfCA%!{ zLOu5<>(cSb-Rze$LBw#P@D^I=4RkPIIyRm&(`OnL>XSi|QsS>a28BDUEH8;?a~rqv zm5MRQXlYX{ccdyBgLlN1FD|uqp@ZtWBG7N^PsbfrrZ!qiX}W&YA)mnK)K;wC%`$y`O@ zob@}RurO7)mWs=wjX5%kr@hEJYun@#OK-hFeBOp2!i&~Q*PYRa zwQtPLTR7gn&nsB>=I2N3`+#6WDlZo*c3dZRIkS%L`J(+EeMC;QX75bS&R=jFD+N11 z=z(`33j1+h{mbD)cHjNVqV|xDa^z6AbnYpH+2?rXJ&K`ET> zA}6UPITmNJlTVT+fH3ulX~!W@)U)%bY`k#P?M~SJR;gCBSff=!u)|n_8BHXc)fm3% zc-JY6hLl^lI|Y_a-32<6XwOz=*VT!z#1nFPyhFRuo^2EyUg5pni!pqvkl*flL2P`4 zOK5z57hfDlU;Cmu&-MD-UOcCSqpX$1KFS-$TlZU#ld3{@xQ1@C{JJ2}SxB6#h#&G= zF}W6Kn`OCJA=7N;>qM4$TmgA7sZxI!!$7yxsJ9R&oxDe^JX5~%^T*BQ)s>m4Wxl|G zWBX5^-dsbMdC68(N(pOzr`?(1?5@PG~{xbEupdex8rYEk}jZxJ6Mdn;Y(+$E{?;RKRZLMKH1Q@@slIn)M8@i^IT~LsQ%DOMI5xrmLc7(* zpl|pCST%MFq9E1yQShCs$+nIk1fEsk&)O^NQSz}6z@qzYlf4A(NE+Au@-?wBGX$-| z=5io*WP|}i59QR<&|qN)=gci)FPhJ0T<#5m2hQ@-(JoxoVZmHE6V|FsuE@GZ=|h~k$q0*P@*%eoJWlI!G3$y``fpubxN5O_RGrUNo^38 z=44Z`_#j*Ru{~PF1ngAqq`@qXY2Px8S#vL<31KHzLSadJv2lGf{=D$xz~TK-SyBlY z0w~DR8XC>&WgG>f~GM;zcvBfH_eEYBxk*D;GwD-%7 z?WNF;6JV@p2)Sq&5ubC_4m4V%LjE?Kh^uVuyC~m{H>>iN4d8L4*Ece1yrT7}elqX&i7)YlhxF0U=p9DU*+00fM@2yw)t06kO~ z7qxs1$K9&Y@zeVhC%45NZsHLge6^zwImXG?N*s>H4;z&gA9;Vr5Id!|no?boxxh9$ zszeE5xMQn)!^ir?Y$MQWHf{aYUeNC$k=0}wpM}uRWF+7jIS=h1=Mj0QRia8EHrMC- zg|Lj=+S=@F_wHq$S_ou-0=x#ac%0b}OIi>MH;{}kBn_b;r`3}CafxemxGbCIHqR9l zrmdoq-`@w$oSm<=X9MA@YiPg*3rkz2ORd1+t`@^6XDsrAG12L6S!!#ogtZaa!6sqs z-3$)7O<0A@DAKS&@$c6SrP7fR$~{?3*adIJbR}Fx1ENxsk~R&UjwT+*Vq*stz+}EJ z-(GWmnqQ&{l{W!}7v90o@+hKLg=}pWnUXt?g%%OTxG|9YAE-&aEfX{~d(n^fz3j8{ zty0cuEI_YZzMuPmFzD;>Ig@pZepwiW{D z#z|~EbHf@sw`)rmdGd_*mh$!4K0a=5i08EloxxExOVHK`n3K?C^k$oUb$q3U_Q$&yl1dy4(;G!f^|AhvLMI@&xL)ub|w zQo9j159PuLjggEi?Lwwd5j^FQmw>V0ioZgo+ds;tLyh+Ad4X+QUeZ;*=iQ7LY$!i3 zQ#WS4px_f1d*v1Q&C8F{uH~%EcPJGh{3wspe!k7AAR^$LLePaoAmdt%7G3KEme~eQ zqkk(U_P=Vkc|ydwPh3ncSnyu2##pey`cbA-Q8`wx=zA1|5ib!h2|F!ijfAoH!I+27 zyIrS-=;hDVOKw7~*W#=lgsHf12w_-3dYHfG?TY2{gF&=Zbc#<&(Hg&%2V89x%!xtA zkHF^xLaRM!&S^dmW7hHIAg^iL;w0d?yMrrw~8nCz=zU5E^ zuG9jVPI>K)7k5BQ#umrgjQROb-5S)yO7CoE+g*;ZL%r5ZXFyzm_Q?xe;$1HnhWZw~ z9wW+A9ep)|_?@ked1D~;c5(z~sXZm@=K7$-Ctc$XvT|k{|Jbu*f_mn@$$j@0ix%m7 zSrC>b60XHFBPHft36e5UEdP6<4m@L-W&f`VEyS&T8;$Q+qsYpy#7WLwTJEab_Lv_3_Lcuv;09ovWgD4qs$HkrI!=Y<`J(3^odVv<@4 zM59I|yDpR!m5mQL2zS@I0!*2!O{XLb#7!kmeje)~8>=ub-dItpQ!FmNoGnct!?gjHtPUI9U*_DX~iUT*wgEq>a zZPFQ>=Xz-g7e$jI1j^mF3R&r8efOW7TxIp%K}$r+1;yx*iQob z?%nO-wk!I^06~GX+seS6N%WnCteP6T+bes(Z*muYe=3`FA|`mr>jF!GiT2lDZfEs) zk5@1zO{&`~Z)I_$!PNTOw4l&RL+PbNmh}58Hmh>=#vgaS*E3e&7LT7ts9g2!HCBfj zR_!N)I`@XV)AaMVi;XArrQx1ehxaXjf@a}nx!hD+XIYzi<#H-UM;ajyAUfv9)2r1r zxtfl}76TO#>;}aaydX!@CyPSYM#!N&bCs$9GQM$R_1wQQEnX&B)45-HSlhg@|+XFZ~!ca75 zTOnn%#@NKIv5*r%0b^|Mb-Tx|#gePrA(vHLe7vk~_^M?VhcfTaBXW1=GSb(|fkAgtQ`2}D-ko4oNXH%$oqCNJ=9493W5owhMaBFw{sXJk z=yv}D&XbLE(8TMpD=#;@z1iM$H~EP2=Teux2e6SV%e)rzwcP+6E!Cz~5xs19vq_h~ zhK{>ITh%xrKe&iGYf4t!Q=rEziOfCM~5etxNe_hvMh9lq@n;XF2D#JFSvLBF@4itT!I?W0YtsY&zU;WJc6NK`8%J(S_ zeVJl-vDu6zmC1aiUNVvhde=*YLL8D{@iT0syW_4Gn6*N&Z7M>xaUv2I0pECJR#cFO zdnQW9sYW>5>jfW$r`$DXj!Xtx%TPrT4wO!8&_yohuBGGxdWHd~8$OWXCWYop8!<*H zBRz4z6BaK%M|^qj7H0vnv~!b*{|L~Kg)-3hf-q()65nWsjEYwhUx>K zy5bpi#v9t58=YC_6eB8azn=q5q2MPFF#FHqm6oNnr7{_Q&cel(#q^u)r&qIcdAd5D z>t^LdoW@tOjp0hg%D4ME_ewIBWftD@ii#Dk7iIek^@RjzGC%_BrIGVxR{*frIh*IL zg5%BV28AJ<@9g%_T_xJWY+Gwe=)N$DQ`536aa4n_u?6j##mojuN;VDJeg4k*&Be#LZS#g9);-59|z9GjSyXFKUk6mH`->^S`zxw^@VHwJoC z%|mXi*^Th{ub*4iv#MTtJJV$xJOX;*!9s)P?`L#c&%dE^rI((GzW+>6t2@4(*0kVu zbd>oLPopwVG#sdFnBn904MEm9HE^&_(wlQGZ@;|{Nwwz^o`4gCV zkBfx^27}?nuJ=4gEOEn@NK&Hw(7^*O#v<>}oQK20IhE zGGuG4`gpfdBI$*WE7$xl`hn(g5dXs+79DVX-JV8++?RkKNzuffS<005_QkkL%F7Mo z*4O8R!ihm*id6$@74N+rkv0jJt8l=rb}}-FTG=+d+%`4GTf;z~Tr;F)S`|1kw_C=! z<+*QmQCuvV)efV;Am=Ag2DKaXEUL(21+pXQ4Nr8Fr?4CtRfr}|ZE&dRF2cxtYbqb3 zIF;l3xuJ6AwRltx=B?LmP&TXZ0BMKa{*p1*>dSJW?-5~z6u{2sD}); zQuM;;RKEW6nwaup4$E9-2}<|NR{>UJO77ixa~M8Us;g0)7C#0+?P!{@{M~^ z=JYT=lFs`Q%(9&@YaoUSch2EwiBc&}w{E*m(a4@Q%`~=DT?_8l88VOI#^5Jmc?{?T zySwFuyWaklu(9Ouq-6U;aNIjba`nP%%{IHsATQSTs>?P$078H&E$*6!` z+ah0)Q|}bmVUAj0FyMc-Ylj6fPrRy(St?k8PMjMyz3GL@5<>(hpNf7Q3bR;XwbV?H z#h#QT6w}?_q&+ir%+~e**%{6RGZsZF%9BR7+r873j?Te4lemRI#Kxth01Lv^;ZUA| zqirdaw%){XF*q=yz& z-Posu$i0fRE<(5|W3t#Lhn}#BFQg=Tgy~G$h8x#-~-Uh9hi#Z1i45sVmGU z>EdRM;{x8A>1VUfJDuIZaK?e1Xml@YgNzaIp{?R8GaWN6`4CX-K= zTDLLsWzrix@7s*ZUJwA6aK_x8%i3#y^HGrh2^E4MZY;FzX+nd7Y z6m>k$Zv28Th`AN%YVCKo=#yCs*E;&6)2XJRoj#S6|FZkwB+)~iKQERdbzuLh&(nEznD3Iw^5Op|!=Z=r0nimzM3 zWlVrnJJST4jcmR6-Cb$iC-ooO{3R~G?KK{xrL}NRM&_R#>p@a3SG&mJJK6E3%lEw( zykjYJEBDcdMPJ$tn|N+7zoUGBVF^geMHIsJwl} zqq=lXf5rXqRx*Cq!U_THMUmG!zLlFaG%URLttmc{)bf zNu(7z0^}a#`M<3rj;MltwC&GVR?6;CR8gT#w;v3o@jHHg40E4*;O)!^___}nKNiW# zT}0?Q#w4x2mTcQKVQu3iZeP;9(y%!1+_$dNI1B?ALd^Xsn>GgD}#lFRIT$rA}m-ttu z!iWLIpq&2LWvAPU&X~J9?~yVYKlY>x73$deJ>&J6=Y&OCYQ-87OPHn|Kygpe%yVM- zqnx4Kf(m16>-XJ3n<&aQRn@Vaw?LOJP4XNEJBQHZ?hRZV{q8EPPm$eD@xFd`SaNjV zDBkuj#qTvzxg|yxe9RjV7uAJ_bZh_`RrK}IK?rJrMjh95Ncnpf>yX5l8A+onDKiAL z6y2T?>vTw1VUx}4q<5LFVfbV0YYE9Ps*>>)VknD}LJ2zoJxX^_XOLl4&{;{mAxBY# zNLTe_Mi1r4H!xy~T+3jMHFQ49sMZv34+*W7>mxm_&kp#|lpB|Fjn~Xc3%`NN2ag#u zVw3Ki?!*DR+N>XM?{zsl!tcQr)-rn9{o}wo;0pn52bc}j( z?A|4*2)9~+@bN8hvgxzO6Fx+q?au6^604}uVU%x2!nddVKZj0mg?&2@(r0xzhvzM z#W0v?F972V3bjHg+|+7|lv&n`Sp4Yc?n@4jVzUb9M=w$1V}=#u;%U()B$iC!YNc8+ z?FElRZE&p0lajEL^2LV{_CTp^Bhc@~PLjyMhTc#Z8Jd;ry-%V#TS?<>xU6EH_-y+ejnrS616Ly6f?_H)$Jeb&Q1N9kM zyNIY!z~p4qY>K|PJjsvQcnFLthB}<&ULazK;d_^rjQ~}mZxHqg_eI0g$1>hpQ0y4D z+ng1N^wtofV3`_|X1_npe$BM;b6lkk zt+7)IXV8@JZD8KJ5=#jehKDWo&Cw}(+j@44F!9~2{p40 z+~H~sAi%M_Ww_)M^}&AW$MO{WmvlE52{daL7Z z&(CQ1p);hAaXEr)Rzgd*Tnfmu@wr3}9}x-%NGQi%S1PY~Fl+g^;UiyPP~EvNxhO>M zK=QgRbDHe0P}trn-l*tKSK(xO`t6TF!iD>o+3D z=6JFS`}=%Uwl+|MIAGN~@LDrA5NOInRN4cqav;=PKmiI+1+-zJ@T6D~t=5lvnOlRY z?|U?3v$Yn>Euvo%ymV;Kv`@_9Z8-t*euJsebEPR>6mN<^Cr)1tKH`*nPx|t{g47^=0`xDk~7J+Bze~FH=$KcmNJ=#0_`*YPA zdS?eU0V<;zsJIN;tuC#Z0!uMcI4s=(pfqkv_Ywy(M!=TU>t!{d1vK6<3>pW=O)fY= zP97T&OnHjlQZmBp(0X-d{Bz;TZ03;1Nlrr}8%uj&hFdpUlP3+QRZU|Wv@2hiQ;K?Fc~5Yl`6T^Y4Pg6TPj^Tn+m5FxJ^*BMU#O0 zsmaC-(f(qhXualv(WN3(c(p4Jc_% zWKo+5Q1#k92fdG?=4i>#pnb{Oim|=!txan#RQgXW3t&d&U{^K1OT! zuoz2JxF$v|X-OG>IKkp+9wcWxo7Y-@Lfds5v^0UHW|suw5Ly0+eHq2uN()5-+t9m6#mEqbf*Zhcq-m?tRrh(Nj^ zXQx=&Hc3ps`v-igSh=*MbPB1Nn26(Qdz}2te8PM#7+0kzP=bzgjyO$1H;jnO;h?j- zE8w{$Fmou&rAYZHx-kwM4dkKf3zLfK)@*XxxkV4Bvd0yMejE?QXN!45ucIBCUS836 z;|6HrKvx?jlO%b^y)|Gc8*Cy!GgH7W$rQddtO2iziDp^!6}KJU*xE3yGar6<0~DuA zgzx3ZfbgBnp(O&puFW8X=v!%yZnD{D-e3$Tb=B-ASZk3)vZmlnWL%LX8DjzG&_jc) z-cZ9uubxw%9f*4dj8l+D=Twig`?QhPb-3DD1r<)X&N}^S#1iZEY)Fp~<%qSmo*tHG zOtx{ix#15&o}QJV7F`ej6$t*wRM!TnB|c_yRKs9bV1J*>$wUrCm#o8=lDE5}7iU(6 zY{bj*$bp?P-1XtAWhS4ohS7wVOd5wIPS)GZ(oY~);d9#k!B;Vfv}K=!or8eOp@Dv4 zDR#c&T*0=u6B7Z6BtjdwN>f$JSEv|i748Yq_CC;-`*jE7l$I1)SeQ)dPpKF3owqm+ zOP*Zx_vz3c@LG;$etzN!4aflOR+EgyUA8jZ4cvm^;Kbnhn-`#ojbWP7`g|1y3aw@r zEib?5^gYrZg;EG{)0^E{j|-^O;p^t!~-Fh0<&0Z-3_om+eDL`g~jkWF#RAb-RDe1-ELpTkcn4Qi9b(n=wP_yx|EmcZ)|$Y~Dw?G!C@rgl*!LCcIb9 zM3xr%3j~Rt3E?^PSNXMk&WW^$VZIL#s_Cq6_LK;9cat_1_`>d_l=0E7<$4p%9Eqge zSM28Gr6uRNIAbznuy7apI;J()-0*jJhf|o`a2HI4qCf|7x z+70_8w6NfN)J~jB7h&y>YeMEew+xumsmt&fgV`a0=A=j6X4rngr5A+5>T77=Er3uecvo z{GIqq!Rq0xF>>Ri-CClV7(=y3j+DQ1FL!Z{zUXq~v%NgKr1EErl-mGQ}cLw4t|B(l}?obMiDVh>1uicqK2 zjY-M8iC$*$^4lkiW^~NWkQ=ALD?O_{iBh~}{@$}Vchqd!^n%&Fwu%>l6BwAE8%7P) z%7-orWXlMafW{>vtVBy@c8?r^R1DOQlHdLgyswHsM61pcVUjIlDYru{EdwuKRaN;d zS7o7b`9{@?=zbzP&s|pX$ix!ytS#UOxlhj-FgQIr|G{X?(wd80Iyfk8V7GhFTI`&3 zdZZ1f$F|g=`8md5s*>gXz6EX1r?97wdvd)+iJSXz?XnFmM4i^&cw)43LjIP%Nbhnt zJvB8pl^(mgeENEHp6k;Y>_B~;54G7lSs9NCTN?=JnNAH11WDYJw%Dlh|4V%8Gbw!f z00c&aIw+o#3pal0?h78jm#uKIdlUz#RDfxcfaYDv^i?q|OYwvHmrqH>1&<;n&I2F{ z9AgUgi8*%)g=-3qDx^Rgpee(;?qP-9t|v%;^QqKpKv|A_w)^7OR=`q*+Wdk3GekyP zlIfc-DNV`1g8{ite=Ba66mREa?AlWCz5^Wc>(77j`dDC;WYCJMEW=T|JO&y z4L0DFZl8%`5&RpR|N8J={VOl~FiDR5m*(&bmR|t11NQRC#k`H-KX)R*;0CO)RV_@@ zPk+3|iHaAU;hna|$g{sQEdKk$hms!$v&HOP=zneU8^3peMmn@&HkkOwyYn9b88kkQ z(giPm13VPL&(1CQ82St$ksRYR94#F_bg8u=Wh6qfoxYREF?dSrM@Mm6h6nj&Fh20^ zf82S{wdB80*I(#_>#hIrI&s{H>M}H@jWyk*9^p%ovV=4{JAwkv*!Q06XVT7;tSJTn zbZSrX{>C`~fLNqgt7EYBZXB8C^W-pus}^?BUJnP6iSUk;fW~-m`ggBxA@Ac%GNcen z3?|}Ny=|Bc=x0vYBPsU!Oo_Io916cs%Phlxk2pLSX0T{t+ljKf-0E4Dp~^A@vH@Fbb_7*)LiFa!5|CWO|uy=}yN|7ry<7a^lW}oJV#BeKbZy zpnA_o?Gh$JR;dbWQ{<_|NSQBUzPf>{L7|oo(-S`)A0raOS2c-EU(!9irk=K?0|kma zy2^Zxqef#Yh!c%s9~StVRH`Xfm#TM=&PSy07ssWy#IOV(C8}lTubyHZSuX5-?{+oe z6A)mhRO)}iR3~x1%nU!ueBRle`CI_&h_~DOR_-a(Vq{kchvoYQ0mg6YGSmTvjZO)i zV!Mzf*!XjB|%=;mqsbrYZ8R>u3h)6*3qhZj0hJ)CrIB1<{MnE zNzuPg@x2Gi>)T)j##=V?yUk1IyA*67_I~6kwO5ta>PLtU%aDH>*`!?iRPj|TT@U>Av{o56p z*b{;G6o@|e57z{FFlQ2(nZ-zc62qT0y^>9%Q5%XTYWWvAI#YJE3w>XDv-ycx%M0B; z@BWGP9a2s^|La>>ZGYKFE(HY3{|4VTFJ_i$W zew=95_@JQQg!=o}fLQ9_#sDPSQ`|^}KQ{BOc>%=MlO=_P|07xd1uhX1AZ~nv`+%7^ z|5@wrZ~XJXqnu%2_V6DtrxbnqmlRzkvqAY| z!3b91k%?_#0o5O}9H6nP1ZV3P9FPB4Pz)b1#A@giH^57dUZ8$PEX{IOt8 zD3DM$uSrAmuiU}^yv+$eAhC%wg=8N7vEV_Cy=?#O=jAWCaZlsEn5}UJLx(bQwV}NRd79rF4}J1d7mbg{iN(QXhc}Kq`i+?_36ht{ zW-wMz?no4y+7R6rdAfi7L5SBK-m5@iqdYo^UnUno;0Pogv#k#skQD#KEc0KCpvM5A z{PR43vh`r(4&jXdgJ75dg2A&b z`bEK?^r9aEtk)sK$ zK5UPJ`5ZmnwDaRjWTPC=4e=jZ5Jm#|!(J#^WR={9vg%jpN&-V2s}UDbe_l%b@1G+| zkuhO~QxcT7_7M=kJU=Jgcc;+rnolw0spv{^r(p+&)x;)NuiKLRQ+LDYt zB;g9rp$I-MtMj3{oplh%bC-jIkero1{Ha82rFzXE^@OBm;hxvJr>#dz3*cm*D7Mqx zS7R(rD!+=Z^0wWYP3`)_rtU~Vc1jGce`^##ZQP#S5#52 zW%|reH8So=_JfJVz9#^vbR^t8(`nU9eeqr*l zlIhP!N1A}DFxuTa@(2809W{Ugl-J$e^M5?D1spBhxWruK|3vWvgvpnH5rcz^|9?Ch zeZ!A)qUpBxf4u$w9{_{U;;*#1wzp=}78;l0Il506wg~h{y(ge|9Evu#cc?2DF6_uUeoO!;dp$KKYmq}syZAt!{G=PjnW8CP0-Gj$W zq`e%DI|Fkk@rwAK7}@bC1~nA1wHvd4;P$QUUyknOMB31(Jkr(2Or|f^se&7wIMP0s z7Zf$koEn=`*M#Ajb%Op$I1|wK+O@&Z>qtDScUfQcqnP(!P|m&2T0sW3D-+jXD_y9l zV|W=Bg8#K3qS1u#!jjCsL<>378&1UhMoKxNtRj4$tgISe9*O3^*fZUDW0y)ms_iL4~Qq_s19Fm?k z{7@0A<;M7$-1}U99HkgsoQ-Q0Q?*=3V`@T6z2bh4h9iYqp<^vjWlb;Vx)X$Cy_o%) z&@L)|R3U6c*q*9yP(xLnxu4Vt_1}2?bxhu;*kYTrtxYEN)=iIAT69ALobGH|FXHN% z-=IpU-jELR-QCbDW&h}poh+Sg5C|D&OjD1s393wuC1PbWyAU2Og7a$luvVkq#VXqm zUeyj5gVihUOLNvuAKj}_wsM`tv&i?s(0k+lc}O{a`~at;iG>tL4#m6%GmW4`R^ZOMHl)erw26F%V1Bs+*AE^no=|Os`kI>%Sv>+%nXvbZaNw9 z$Y(S*{1|_djoa7iP_#e9Dlp;g^bZ(FF!;S;quKh=sL2Ymu1-q}$#q2J=(Y1>rEQk5 z+T40=4LI+LWtOJM;nm^BzQ(flIX$B_P@SzsKTEHa#Ft;80dF8zdfOPH(l$*=D05h3 zx%@=CyJbnKjYpt4x87qfZdpTCt3vQ(e%O4|qITaK(O}m~WnNR{VQ9OmlB%79&b`Zh z)so>)s?E*$<))pGp>X_**=dGznx7YEakatAQ{mGEF7r@S&azg1&QEa`-pdSh|AgVK z&SUu%29c^J${G45A7odnLv(@DGD}!27nOq3VL@Yntcw%W2WOAIwcdN6=F2B7J@N;K zT2^1mW(06!lfQngn;&Zy&vjl1*E(Vj3v?KFaSh{)lbP(1rRf!tjPH$O%Y|su<(4Wr z*XkapXQ*H{Pw-wljz75?Z_9fenq1^0cA8wW^?QO|NVdY5w z^Y(0_hlw0N0s6^e_m$_c=gWUO zg9j=H?*=GKVNaGpR=wXf<-CJb%y)@WlWuzVO*ahq=lL&#wc&Gp%~KBLP&va97a0tP zUoOx8ToS8=2!)5)IW+U{JExTrCFOHlKMB_#Xy+3x4Y?*$dyQRH2Ku+2)U(}~DzC3} zd(v6tS)8ty_cV}(l_m#GFzBIwpEAmN9BU={HgKqWZQR06^D!;vKJ&p9AT}-aWNn?C za~PvMH&tgpqNYzvkZy~9&m~^i>TXuOQ=jvYSJxwd#p8;WVaKqQduS?CjPQqAz(0?S zNERHK6p!U(LB%b>*j_>Zp(Va4pnk9^nk|%WV6b)j3o3h!wktJb!j0v9VQNUrfU$t^ z90DcdWP5P^UN_VCa!vO-cEd#ykR>MX60s;P7&+z-=! zTodYH$5Eq=bgs3ZMQpn-GJiIE#{Ewn;BENhP7AHpLQbCK7fu&z(JiMFh|QJSG@p+x zcc!RBc6=}5WO{Y->T2db)lYb~trLL@5$`4Gufur{4%UhKJmr*}*ke`e4F`MH9QbJl zdhqV(qvI%1KD4OM?epMiOa&^>54TvHPOx%K_-O4E&Yy`h?gLVa)n*(T(=uy&F;EFe zzib6Db(0g2tHAz8pkAlOS|D&K#OR+l;8r3a;uD-jI7%m>+>uzlDIBhA-(ri$OG*L< z5?_B#G{&9&CvaZ;V~7Y~2dgn-!&=@tXyCV3_R1691ig)lp7d+4i)d*NAUq&fW1oad zirgTe~DWM4f6VWm? diff --git a/docs/reference/inference/inference-apis.asciidoc b/docs/reference/inference/inference-apis.asciidoc index 5cb03d950f68c..88421e4f64cfd 100644 --- a/docs/reference/inference/inference-apis.asciidoc +++ b/docs/reference/inference/inference-apis.asciidoc @@ -25,7 +25,7 @@ the following APIs to manage {infer} models and perform {infer}: [[inference-landscape]] .A representation of the Elastic inference landscape -image::images/inference-landscape.png[A representation of the Elastic inference landscape,align="center"] +image::images/inference-landscape.jpg[A representation of the Elastic inference landscape,align="center"] An {infer} endpoint enables you to use the corresponding {ml} model without manual deployment and apply it to your data at ingestion time through From ebd363d4afb5a43839d8e0696a8ba30443ff3c9b Mon Sep 17 00:00:00 2001 From: Salvatore Campagna <93581129+salvatore-campagna@users.noreply.github.com> Date: Fri, 18 Oct 2024 13:48:32 +0200 Subject: [PATCH 211/449] Update synthetic source documentation (#112363) * docs: update synthetic source docs * fix: also doc values false works * Revert "fix: also doc values false works" This reverts commit 0895a7675809a9b1074a3271c6a33694e48b7b4f. * fix: update synthetic source documentation * fix: all field types support it * fix: no need to explicitly mention it * fix: synthetic source sorting * fix: may instead of might --- docs/plugins/mapper-annotated-text.asciidoc | 5 ----- docs/reference/how-to/knn-search.asciidoc | 3 +-- .../mapping/types/aggregate-metric-double.asciidoc | 3 --- docs/reference/mapping/types/binary.asciidoc | 2 +- docs/reference/mapping/types/boolean.asciidoc | 5 ++--- docs/reference/mapping/types/date.asciidoc | 6 +----- docs/reference/mapping/types/date_nanos.asciidoc | 7 +------ docs/reference/mapping/types/flattened.asciidoc | 5 ++--- docs/reference/mapping/types/geo-point.asciidoc | 6 +----- docs/reference/mapping/types/geo-shape.asciidoc | 3 --- docs/reference/mapping/types/histogram.asciidoc | 2 +- docs/reference/mapping/types/ip.asciidoc | 6 +----- docs/reference/mapping/types/keyword.asciidoc | 6 +----- docs/reference/mapping/types/numeric.asciidoc | 2 +- docs/reference/mapping/types/range.asciidoc | 4 ++-- docs/reference/mapping/types/search-as-you-type.asciidoc | 3 +-- docs/reference/mapping/types/text.asciidoc | 4 ---- docs/reference/mapping/types/token-count.asciidoc | 3 +-- docs/reference/mapping/types/version.asciidoc | 6 +++--- docs/reference/mapping/types/wildcard.asciidoc | 4 +--- 20 files changed, 21 insertions(+), 64 deletions(-) diff --git a/docs/plugins/mapper-annotated-text.asciidoc b/docs/plugins/mapper-annotated-text.asciidoc index 9b6eccd136696..956b6bedffff1 100644 --- a/docs/plugins/mapper-annotated-text.asciidoc +++ b/docs/plugins/mapper-annotated-text.asciidoc @@ -155,11 +155,6 @@ be changed or removed in a future release. Elastic will work to fix any issues, but features in technical preview are not subject to the support SLA of official GA features. -`annotated_text` fields support {ref}/mapping-source-field.html#synthetic-source[synthetic `_source`] if they have -a {ref}/keyword.html#keyword-synthetic-source[`keyword`] sub-field that supports synthetic -`_source` or if the `annotated_text` field sets `store` to `true`. Either way, it may -not have {ref}/copy-to.html[`copy_to`]. - If using a sub-`keyword` field then the values are sorted in the same way as a `keyword` field's values are sorted. By default, that means sorted with duplicates removed. So: diff --git a/docs/reference/how-to/knn-search.asciidoc b/docs/reference/how-to/knn-search.asciidoc index 18882380ce160..1d9c988f7b6c9 100644 --- a/docs/reference/how-to/knn-search.asciidoc +++ b/docs/reference/how-to/knn-search.asciidoc @@ -59,8 +59,7 @@ since it relies on separate data structures to perform the search. Before using the <> parameter, make sure to review the downsides of omitting fields from `_source`. -Another option is to use <> if all -your index fields support it. +Another option is to use <>. [discrete] === Ensure data nodes have enough memory diff --git a/docs/reference/mapping/types/aggregate-metric-double.asciidoc b/docs/reference/mapping/types/aggregate-metric-double.asciidoc index 8a4ddffc30bbd..faae5118e42bb 100644 --- a/docs/reference/mapping/types/aggregate-metric-double.asciidoc +++ b/docs/reference/mapping/types/aggregate-metric-double.asciidoc @@ -259,9 +259,6 @@ be changed or removed in a future release. Elastic will work to fix any issues, but features in technical preview are not subject to the support SLA of official GA features. -`aggregate_metric-double` fields support <> in their default -configuration. - For example: [source,console,id=synthetic-source-aggregate-metric-double-example] ---- diff --git a/docs/reference/mapping/types/binary.asciidoc b/docs/reference/mapping/types/binary.asciidoc index a06e5b4f572e0..5733a28eb711a 100644 --- a/docs/reference/mapping/types/binary.asciidoc +++ b/docs/reference/mapping/types/binary.asciidoc @@ -63,7 +63,7 @@ be changed or removed in a future release. Elastic will work to fix any issues, but features in technical preview are not subject to the support SLA of official GA features. -`binary` fields support <> only when <> are enabled. Synthetic source always sorts `binary` values in order of their byte representation. For example: +Synthetic source may sort `binary` values in order of their byte representation. For example: [source,console,id=synthetic-source-binary-example] ---- PUT idx diff --git a/docs/reference/mapping/types/boolean.asciidoc b/docs/reference/mapping/types/boolean.asciidoc index 494c41021dd2a..268be9016987f 100644 --- a/docs/reference/mapping/types/boolean.asciidoc +++ b/docs/reference/mapping/types/boolean.asciidoc @@ -241,10 +241,9 @@ any issues, but features in technical preview are not subject to the support SLA of official GA features. `boolean` fields support <> in their -default configuration. Synthetic `_source` cannot be used together with -<> or with <> disabled. +default configuration. -Synthetic source always sorts `boolean` fields. For example: +Synthetic source may sort `boolean` field values. For example: [source,console,id=synthetic-source-boolean-example] ---- PUT idx diff --git a/docs/reference/mapping/types/date.asciidoc b/docs/reference/mapping/types/date.asciidoc index 53b17a669ae75..4261d502ca104 100644 --- a/docs/reference/mapping/types/date.asciidoc +++ b/docs/reference/mapping/types/date.asciidoc @@ -239,11 +239,7 @@ be changed or removed in a future release. Elastic will work to fix any issues, but features in technical preview are not subject to the support SLA of official GA features. -`date` fields support <> in their -default configuration. Synthetic `_source` cannot be used together with -<> or with <> disabled. - -Synthetic source always sorts `date` fields. For example: +Synthetic source may sort `date` field values. For example: [source,console,id=synthetic-source-date-example] ---- PUT idx diff --git a/docs/reference/mapping/types/date_nanos.asciidoc b/docs/reference/mapping/types/date_nanos.asciidoc index e9ec85c470ecf..31f5ae09e7a63 100644 --- a/docs/reference/mapping/types/date_nanos.asciidoc +++ b/docs/reference/mapping/types/date_nanos.asciidoc @@ -150,12 +150,7 @@ be changed or removed in a future release. Elastic will work to fix any issues, but features in technical preview are not subject to the support SLA of official GA features. -`date_nanos` fields support <> in their -default configuration. Synthetic `_source` cannot be used together with -<>, <> set to true -or with <> disabled. - -Synthetic source always sorts `date_nanos` fields. For example: +Synthetic source may sort `date_nanos` field values. For example: [source,console,id=synthetic-source-date-nanos-example] ---- PUT idx diff --git a/docs/reference/mapping/types/flattened.asciidoc b/docs/reference/mapping/types/flattened.asciidoc index af6ef3e739d0f..96b230794003a 100644 --- a/docs/reference/mapping/types/flattened.asciidoc +++ b/docs/reference/mapping/types/flattened.asciidoc @@ -325,10 +325,9 @@ any issues, but features in technical preview are not subject to the support SLA of official GA features. Flattened fields support <> in their default -configuration. Synthetic `_source` cannot be used with <> -disabled. +configuration. -Synthetic source always sorts alphabetically and de-duplicates flattened fields. +Synthetic source may sort `flattened` field values and remove duplicates. For example: [source,console,id=synthetic-source-flattened-sorting-example] ---- diff --git a/docs/reference/mapping/types/geo-point.asciidoc b/docs/reference/mapping/types/geo-point.asciidoc index 9ba8ea6e46782..0958997d3fb00 100644 --- a/docs/reference/mapping/types/geo-point.asciidoc +++ b/docs/reference/mapping/types/geo-point.asciidoc @@ -219,11 +219,7 @@ be changed or removed in a future release. Elastic will work to fix any issues, but features in technical preview are not subject to the support SLA of official GA features. -`geo_point` fields support <> in their -default configuration. Synthetic `_source` cannot be used together with <> or with -<> disabled. - -Synthetic source always sorts `geo_point` fields (first by latitude and then +Synthetic source may sort `geo_point` fields (first by latitude and then longitude) and reduces them to their stored precision. For example: [source,console,id=synthetic-source-geo-point-example] ---- diff --git a/docs/reference/mapping/types/geo-shape.asciidoc b/docs/reference/mapping/types/geo-shape.asciidoc index e50c7d73b1b76..affebc6f721e4 100644 --- a/docs/reference/mapping/types/geo-shape.asciidoc +++ b/docs/reference/mapping/types/geo-shape.asciidoc @@ -502,6 +502,3 @@ synthetic `_source` is in technical preview. Features in technical preview may be changed or removed in a future release. Elastic will work to fix any issues, but features in technical preview are not subject to the support SLA of official GA features. - -`geo_shape` fields support <> in their -default configuration. diff --git a/docs/reference/mapping/types/histogram.asciidoc b/docs/reference/mapping/types/histogram.asciidoc index 8cd30110250bf..cdebe97000d68 100644 --- a/docs/reference/mapping/types/histogram.asciidoc +++ b/docs/reference/mapping/types/histogram.asciidoc @@ -79,7 +79,7 @@ any issues, but features in technical preview are not subject to the support SLA of official GA features. `histogram` fields support <> in their -default configuration. Synthetic `_source` cannot be used together with <>. +default configuration. NOTE: To save space, zero-count buckets are not stored in the histogram doc values. As a result, when indexing a histogram field in an index with synthetic source enabled, diff --git a/docs/reference/mapping/types/ip.asciidoc b/docs/reference/mapping/types/ip.asciidoc index f85dd78ecbd4a..bafc25a977caa 100644 --- a/docs/reference/mapping/types/ip.asciidoc +++ b/docs/reference/mapping/types/ip.asciidoc @@ -161,11 +161,7 @@ be changed or removed in a future release. Elastic will work to fix any issues, but features in technical preview are not subject to the support SLA of official GA features. -`ip` fields support <> in their default -configuration. Synthetic `_source` cannot be used together with -<> or with <> disabled. - -Synthetic source always sorts `ip` fields and removes duplicates. For example: +Synthetic source may sort `ip` field values and remove duplicates. For example: [source,console,id=synthetic-source-ip-example] ---- PUT idx diff --git a/docs/reference/mapping/types/keyword.asciidoc b/docs/reference/mapping/types/keyword.asciidoc index b94216042427f..165d9d7900441 100644 --- a/docs/reference/mapping/types/keyword.asciidoc +++ b/docs/reference/mapping/types/keyword.asciidoc @@ -178,11 +178,7 @@ be changed or removed in a future release. Elastic will work to fix any issues, but features in technical preview are not subject to the support SLA of official GA features. -`keyword` fields support <> in their -default configuration. Synthetic `_source` cannot be used together with -a <> or <>. - -By default, synthetic source sorts `keyword` fields and removes duplicates. +Synthetic source may sort `keyword` fields and remove duplicates. For example: [source,console,id=synthetic-source-keyword-example-default] ---- diff --git a/docs/reference/mapping/types/numeric.asciidoc b/docs/reference/mapping/types/numeric.asciidoc index 5bfa1bc7c1240..2fba1931a2a29 100644 --- a/docs/reference/mapping/types/numeric.asciidoc +++ b/docs/reference/mapping/types/numeric.asciidoc @@ -254,7 +254,7 @@ All numeric fields support <>, or with <> disabled. -Synthetic source always sorts numeric fields. For example: +Synthetic source may sort numeric field values. For example: [source,console,id=synthetic-source-numeric-example] ---- PUT idx diff --git a/docs/reference/mapping/types/range.asciidoc b/docs/reference/mapping/types/range.asciidoc index 04341f68c630a..3b31a1885e5b9 100644 --- a/docs/reference/mapping/types/range.asciidoc +++ b/docs/reference/mapping/types/range.asciidoc @@ -247,9 +247,9 @@ any issues, but features in technical preview are not subject to the support SLA of official GA features. `range` fields support <> in their default -configuration. Synthetic `_source` cannot be used with <> disabled. +configuration. -Synthetic source always sorts values and removes duplicates for all `range` fields except `ip_range`. Ranges are sorted by their lower bound and then by upper bound. For example: +Synthetic source may sort `range` field values and remove duplicates for all `range` fields except `ip_range`. Ranges are sorted by their lower bound and then by upper bound. For example: [source,console,id=synthetic-source-range-sorting-example] ---- PUT idx diff --git a/docs/reference/mapping/types/search-as-you-type.asciidoc b/docs/reference/mapping/types/search-as-you-type.asciidoc index c0bdc75f13392..3c71389f4cebb 100644 --- a/docs/reference/mapping/types/search-as-you-type.asciidoc +++ b/docs/reference/mapping/types/search-as-you-type.asciidoc @@ -266,5 +266,4 @@ any issues, but features in technical preview are not subject to the support SLA of official GA features. `search_as_you_type` fields support <> in their -default configuration. Synthetic `_source` cannot be used together with -<>. +default configuration. diff --git a/docs/reference/mapping/types/text.asciidoc b/docs/reference/mapping/types/text.asciidoc index ca69c93e8f1a8..b10484fc5ded8 100644 --- a/docs/reference/mapping/types/text.asciidoc +++ b/docs/reference/mapping/types/text.asciidoc @@ -134,10 +134,6 @@ The following parameters are accepted by `text` fields: Whether the field value should be stored and retrievable separately from the <> field. Accepts `true` or `false` (default). - This parameter will be automatically set to `true` for TSDB indices - (indices that have `index.mode` set to `time_series`) - if there is no <> - sub-field that supports synthetic `_source`. <>:: diff --git a/docs/reference/mapping/types/token-count.asciidoc b/docs/reference/mapping/types/token-count.asciidoc index 7d9dffcc82082..2e5bd111122c8 100644 --- a/docs/reference/mapping/types/token-count.asciidoc +++ b/docs/reference/mapping/types/token-count.asciidoc @@ -103,5 +103,4 @@ any issues, but features in technical preview are not subject to the support SLA of official GA features. `token_count` fields support <> in their -default configuration. Synthetic `_source` cannot be used together with -<>. +default configuration. diff --git a/docs/reference/mapping/types/version.asciidoc b/docs/reference/mapping/types/version.asciidoc index 1600451432bd8..1d9f927a80ce4 100644 --- a/docs/reference/mapping/types/version.asciidoc +++ b/docs/reference/mapping/types/version.asciidoc @@ -77,10 +77,10 @@ be changed or removed in a future release. Elastic will work to fix any issues, but features in technical preview are not subject to the support SLA of official GA features. -`version` fields support <> so long as they don't -declare <>. +`version` fields support <> in their +default configuration.. -Synthetic source always sorts `version` fields and removes duplicates. For example: +Synthetic source may sort `version` field values and remove duplicates. For example: [source,console,id=synthetic-source-version-example] ---- PUT idx diff --git a/docs/reference/mapping/types/wildcard.asciidoc b/docs/reference/mapping/types/wildcard.asciidoc index 89a3109a37164..255e34ecd959b 100644 --- a/docs/reference/mapping/types/wildcard.asciidoc +++ b/docs/reference/mapping/types/wildcard.asciidoc @@ -133,10 +133,8 @@ The following parameters are accepted by `wildcard` fields: [[wildcard-synthetic-source]] ==== Synthetic `_source` -`wildcard` fields support <> so long as they don't -declare <>. -Synthetic source always sorts `wildcard` fields. For example: +Synthetic source may sort `wildcard` field values. For example: [source,console,id=synthetic-source-wildcard-example] ---- PUT idx From becd08da24df2af93eee28053d32929298cdccbd Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Fri, 18 Oct 2024 09:25:58 -0400 Subject: [PATCH 212/449] Close exchanges in HttpClientTests (#115059) --- .../java/org/elasticsearch/ingest/geoip/HttpClientTests.java | 2 ++ muted-tests.yml | 2 -- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/HttpClientTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/HttpClientTests.java index 43ed96afb07e4..f4a3cfbde4f4c 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/HttpClientTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/HttpClientTests.java @@ -47,6 +47,7 @@ public static void startServer() throws Throwable { server.createContext("/404/", exchange -> { try { exchange.sendResponseHeaders(404, 0); + exchange.close(); } catch (Exception e) { fail(e); } @@ -102,6 +103,7 @@ public boolean checkCredentials(String username, String password) { exchange.getResponseHeaders().add("Location", "/" + destination + "/"); } exchange.sendResponseHeaders(302, 0); + exchange.close(); } catch (Exception e) { fail(e); } diff --git a/muted-tests.yml b/muted-tests.yml index dcd70ad3fb83c..821a96217d05c 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -318,8 +318,6 @@ tests: - class: org.elasticsearch.xpack.inference.rest.ServerSentEventsRestActionListenerTests method: testNoStream issue: https://github.com/elastic/elasticsearch/issues/114788 -- class: org.elasticsearch.ingest.geoip.HttpClientTests - issue: https://github.com/elastic/elasticsearch/issues/112618 - class: org.elasticsearch.xpack.remotecluster.RemoteClusterSecurityWithApmTracingRestIT method: testTracingCrossCluster issue: https://github.com/elastic/elasticsearch/issues/112731 From 906bf46ee8804cda18ffc0be2fbe410f963a860a Mon Sep 17 00:00:00 2001 From: Craig Taverner Date: Fri, 18 Oct 2024 16:20:43 +0200 Subject: [PATCH 213/449] Cannot skip tests named "values" (#115096) When trying to use a gradle `skipTest` rule on tests named "values", we get a class-caste exception in Jackson. This PR needs to rename this function for all versions of Elasticsearch that the `yamlRestCompatTestTransform` task will run on, so that later PRs that add skipTests will be able to pass. Since this test was added in 8.14, we must backport all the way back to there. ``` class com.fasterxml.jackson.databind.node.IntNode cannot be cast to class com.fasterxml.jackson.databind.node.ArrayNode (com.fasterxml.jackson.databind.node.IntNode and com.fasterxml.jackson.databind.node.ArrayNode are in unnamed module of loader org.gradle.internal.classloader.VisitableURLClassLoader$InstrumentingVisitableURLClassLoader @50337c96) ``` --- .../yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml index 939f153b8b0ea..88ef03a22d70c 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml @@ -563,7 +563,7 @@ setup: - match: { values.1.0: "Payroll Specialist" } --- -values: +"values function": - requires: cluster_features: esql.agg_values reason: "values is available in 8.14+" From e927aaaa0b28645e6eff2dc36b3fd8cecd578f13 Mon Sep 17 00:00:00 2001 From: Jan Kuipers <148754765+jan-elastic@users.noreply.github.com> Date: Fri, 18 Oct 2024 17:00:10 +0200 Subject: [PATCH 214/449] Fix ML autoscaling (classic cloud) for models with zero allocations (#115082) * Fix ML autoscaling (classic cloud) for models with zero allocations * refactor a bit --- .../ml/autoscaling/MlAutoscalingContext.java | 2 +- .../MlAutoscalingDeciderServiceTests.java | 69 +++++++++++++++++++ 2 files changed, 70 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingContext.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingContext.java index cca59f27d5c76..f266dda6e3e5d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingContext.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingContext.java @@ -177,7 +177,7 @@ public boolean isEmpty() { return anomalyDetectionTasks.isEmpty() && snapshotUpgradeTasks.isEmpty() && dataframeAnalyticsTasks.isEmpty() - && modelAssignments.isEmpty(); + && modelAssignments.values().stream().allMatch(assignment -> assignment.totalTargetAllocations() == 0); } public List findPartiallyAllocatedModels() { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingDeciderServiceTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingDeciderServiceTests.java index 632730bc7f141..a1db31c474f31 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingDeciderServiceTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingDeciderServiceTests.java @@ -29,6 +29,12 @@ import org.elasticsearch.xpack.autoscaling.capacity.AutoscalingDeciderContext; import org.elasticsearch.xpack.autoscaling.capacity.AutoscalingDeciderResult; import org.elasticsearch.xpack.core.ml.MachineLearningField; +import org.elasticsearch.xpack.core.ml.action.StartTrainedModelDeploymentAction; +import org.elasticsearch.xpack.core.ml.inference.assignment.AdaptiveAllocationsSettings; +import org.elasticsearch.xpack.core.ml.inference.assignment.AssignmentState; +import org.elasticsearch.xpack.core.ml.inference.assignment.Priority; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.core.ml.job.config.JobState; import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.job.NodeLoad; @@ -262,6 +268,69 @@ public void testScale_GivenUndeterminedMemory_ShouldReturnNullCapacity() { assertThat(result.requiredCapacity(), is(nullValue())); } + public void testScale_GivenModelWithZeroAllocations() { + MlAutoscalingDeciderService service = buildService(); + service.onMaster(); + + ClusterState clusterState = new ClusterState.Builder(new ClusterName("cluster")).metadata( + Metadata.builder() + .putCustom( + TrainedModelAssignmentMetadata.NAME, + new TrainedModelAssignmentMetadata( + Map.of( + "model-with-zero-allocations", + TrainedModelAssignment.Builder.empty( + new StartTrainedModelDeploymentAction.TaskParams( + "model-with-zero-allocations", + "model-with-zero-allocations-deployment", + 400, + 0, + 2, + 100, + null, + Priority.NORMAL, + 0L, + 0L + ), + new AdaptiveAllocationsSettings(true, 0, 4) + ).setAssignmentState(AssignmentState.STARTED).build() + ) + ) + ) + .build() + ).nodes(DiscoveryNodes.builder().add(buildNode("ml-node", ByteSizeValue.ofGb(4), 8)).build()).build(); + + AutoscalingDeciderResult result = service.scale( + Settings.EMPTY, + new DeciderContext( + clusterState, + new AutoscalingCapacity( + new AutoscalingCapacity.AutoscalingResources(null, ByteSizeValue.ofGb(4), null), + new AutoscalingCapacity.AutoscalingResources(null, ByteSizeValue.ofGb(4), null) + ) + ) + ); + // First call doesn't downscale as delay has not been satisfied + assertThat(result.reason().summary(), containsString("down scale delay has not been satisfied")); + + // Let's move time forward 1 hour + timeSupplier.setOffset(TimeValue.timeValueHours(1)); + + result = service.scale( + Settings.EMPTY, + new DeciderContext( + clusterState, + new AutoscalingCapacity( + new AutoscalingCapacity.AutoscalingResources(null, ByteSizeValue.ofGb(4), null), + new AutoscalingCapacity.AutoscalingResources(null, ByteSizeValue.ofGb(4), null) + ) + ) + ); + assertThat(result.reason().summary(), equalTo("Requesting scale down as tier and/or node size could be smaller")); + assertThat(result.requiredCapacity().total().memory().getBytes(), equalTo(0L)); + assertThat(result.requiredCapacity().node().memory().getBytes(), equalTo(0L)); + } + private DiscoveryNode buildNode(String id, ByteSizeValue machineMemory, int allocatedProcessors) { return DiscoveryNodeUtils.create( id, From d0c8ff59328db1265c2e77c8791aed0382fc2425 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Fri, 18 Oct 2024 08:01:04 -0700 Subject: [PATCH 215/449] Refactor TSDB doc_values util allow introduce new codec (#115042) This PR refactors the doc_values utils used in the TSDB codec to allow sharing between the current codec and the new codec. --- .../AbstractDocValuesForUtilBenchmark.java | 2 +- .../codec/tsdb/internal/DecodeBenchmark.java | 3 +- .../index/codec/tsdb/DocValuesForUtil.java | 10 +++-- .../codec/tsdb/ES87TSDBDocValuesConsumer.java | 2 +- .../codec/tsdb/ES87TSDBDocValuesProducer.java | 6 +-- ...Encoder.java => TSDBDocValuesEncoder.java} | 44 ++++++++++--------- .../codec/tsdb/DocValuesForUtilTests.java | 26 +++++------ .../tsdb/ES87TSDBDocValuesEncoderTests.java | 4 +- 8 files changed, 49 insertions(+), 48 deletions(-) rename server/src/main/java/org/elasticsearch/index/codec/tsdb/{ES87TSDBDocValuesEncoder.java => TSDBDocValuesEncoder.java} (89%) diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/index/codec/tsdb/internal/AbstractDocValuesForUtilBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/index/codec/tsdb/internal/AbstractDocValuesForUtilBenchmark.java index 58b1d2455a7a6..53723f05728b5 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/index/codec/tsdb/internal/AbstractDocValuesForUtilBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/index/codec/tsdb/internal/AbstractDocValuesForUtilBenchmark.java @@ -21,7 +21,7 @@ public abstract class AbstractDocValuesForUtilBenchmark { protected final int blockSize; public AbstractDocValuesForUtilBenchmark() { - this.forUtil = new DocValuesForUtil(); + this.forUtil = new DocValuesForUtil(ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE); this.blockSize = ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE; } diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/index/codec/tsdb/internal/DecodeBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/index/codec/tsdb/internal/DecodeBenchmark.java index b8f0a11e21c8f..284324b3d9206 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/index/codec/tsdb/internal/DecodeBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/index/codec/tsdb/internal/DecodeBenchmark.java @@ -12,7 +12,6 @@ import org.apache.lucene.store.ByteArrayDataInput; import org.apache.lucene.store.ByteArrayDataOutput; import org.apache.lucene.store.DataOutput; -import org.elasticsearch.index.codec.tsdb.DocValuesForUtil; import org.openjdk.jmh.infra.Blackhole; import java.io.IOException; @@ -44,7 +43,7 @@ public void setupInvocation(int bitsPerValue) { @Override public void benchmark(int bitsPerValue, Blackhole bh) throws IOException { - DocValuesForUtil.decode(bitsPerValue, this.dataInput, this.output); + forUtil.decode(bitsPerValue, this.dataInput, this.output); bh.consume(this.output); } } diff --git a/server/src/main/java/org/elasticsearch/index/codec/tsdb/DocValuesForUtil.java b/server/src/main/java/org/elasticsearch/index/codec/tsdb/DocValuesForUtil.java index 648913098ff0d..db9c352ee30f8 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/tsdb/DocValuesForUtil.java +++ b/server/src/main/java/org/elasticsearch/index/codec/tsdb/DocValuesForUtil.java @@ -21,10 +21,12 @@ public class DocValuesForUtil { private static final int BITS_IN_FIVE_BYTES = 5 * Byte.SIZE; private static final int BITS_IN_SIX_BYTES = 6 * Byte.SIZE; private static final int BITS_IN_SEVEN_BYTES = 7 * Byte.SIZE; - private static final int blockSize = ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE; + private final int blockSize; private final byte[] encoded = new byte[1024]; - public DocValuesForUtil() {} + public DocValuesForUtil(int numericBlockSize) { + this.blockSize = numericBlockSize; + } public static int roundBits(int bitsPerValue) { if (bitsPerValue > 24 && bitsPerValue <= 32) { @@ -67,7 +69,7 @@ private void encodeFiveSixOrSevenBytesPerValue(long[] in, int bitsPerValue, fina out.writeBytes(this.encoded, bytesPerValue * in.length); } - public static void decode(int bitsPerValue, final DataInput in, long[] out) throws IOException { + public void decode(int bitsPerValue, final DataInput in, long[] out) throws IOException { if (bitsPerValue <= 24) { ForUtil.decode(bitsPerValue, in, out); } else if (bitsPerValue <= 32) { @@ -81,7 +83,7 @@ public static void decode(int bitsPerValue, final DataInput in, long[] out) thro } } - private static void decodeFiveSixOrSevenBytesPerValue(int bitsPerValue, final DataInput in, long[] out) throws IOException { + private void decodeFiveSixOrSevenBytesPerValue(int bitsPerValue, final DataInput in, long[] out) throws IOException { // NOTE: we expect multibyte values to be written "least significant byte" first int bytesPerValue = bitsPerValue / Byte.SIZE; long mask = (1L << bitsPerValue) - 1; diff --git a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesConsumer.java b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesConsumer.java index 71d9768ac5ff7..5d79807fe6674 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesConsumer.java +++ b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesConsumer.java @@ -144,7 +144,7 @@ private long[] writeField(FieldInfo field, DocValuesProducer valuesProducer, lon if (maxOrd != 1) { final long[] buffer = new long[ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE]; int bufferSize = 0; - final ES87TSDBDocValuesEncoder encoder = new ES87TSDBDocValuesEncoder(); + final TSDBDocValuesEncoder encoder = new TSDBDocValuesEncoder(ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE); values = valuesProducer.getSortedNumeric(field); final int bitsPerOrd = maxOrd >= 0 ? PackedInts.bitsRequired(maxOrd - 1) : -1; for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { diff --git a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesProducer.java b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesProducer.java index e3c2daddba80e..e3f7e829c1d2e 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesProducer.java +++ b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesProducer.java @@ -965,7 +965,7 @@ public long longValue() { private final int maxDoc = ES87TSDBDocValuesProducer.this.maxDoc; private int doc = -1; - private final ES87TSDBDocValuesEncoder decoder = new ES87TSDBDocValuesEncoder(); + private final TSDBDocValuesEncoder decoder = new TSDBDocValuesEncoder(ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE); private long currentBlockIndex = -1; private final long[] currentBlock = new long[ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE]; @@ -1030,7 +1030,7 @@ public long longValue() throws IOException { ); return new NumericDocValues() { - private final ES87TSDBDocValuesEncoder decoder = new ES87TSDBDocValuesEncoder(); + private final TSDBDocValuesEncoder decoder = new TSDBDocValuesEncoder(ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE); private long currentBlockIndex = -1; private final long[] currentBlock = new long[ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE]; @@ -1092,7 +1092,7 @@ private NumericValues getValues(NumericEntry entry, final long maxOrd) throws IO final int bitsPerOrd = maxOrd >= 0 ? PackedInts.bitsRequired(maxOrd - 1) : -1; return new NumericValues() { - private final ES87TSDBDocValuesEncoder decoder = new ES87TSDBDocValuesEncoder(); + private final TSDBDocValuesEncoder decoder = new TSDBDocValuesEncoder(ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE); private long currentBlockIndex = -1; private final long[] currentBlock = new long[ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE]; diff --git a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesEncoder.java b/server/src/main/java/org/elasticsearch/index/codec/tsdb/TSDBDocValuesEncoder.java similarity index 89% rename from server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesEncoder.java rename to server/src/main/java/org/elasticsearch/index/codec/tsdb/TSDBDocValuesEncoder.java index 4e95ce34dc410..3af9d726af4fc 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesEncoder.java +++ b/server/src/main/java/org/elasticsearch/index/codec/tsdb/TSDBDocValuesEncoder.java @@ -44,8 +44,8 @@ * * * - * Notice that encoding and decoding are written in a nested way, for instance {@link ES87TSDBDocValuesEncoder#deltaEncode} calling - * {@link ES87TSDBDocValuesEncoder#removeOffset} and so on. This allows us to easily introduce new encoding schemes or remove existing + * Notice that encoding and decoding are written in a nested way, for instance {@link TSDBDocValuesEncoder#deltaEncode} calling + * {@link TSDBDocValuesEncoder#removeOffset} and so on. This allows us to easily introduce new encoding schemes or remove existing * (non-effective) encoding schemes in a backward-compatible way. * * A token is used as a bitmask to represent which encoding is applied and allows us to detect the applied encoding scheme at decoding time. @@ -54,11 +54,13 @@ * * Of course, decoding follows the opposite order with respect to encoding. */ -public class ES87TSDBDocValuesEncoder { +public class TSDBDocValuesEncoder { private final DocValuesForUtil forUtil; + private final int numericBlockSize; - public ES87TSDBDocValuesEncoder() { - this.forUtil = new DocValuesForUtil(); + public TSDBDocValuesEncoder(int numericBlockSize) { + this.forUtil = new DocValuesForUtil(numericBlockSize); + this.numericBlockSize = numericBlockSize; } /** @@ -68,7 +70,7 @@ public ES87TSDBDocValuesEncoder() { private void deltaEncode(int token, int tokenBits, long[] in, DataOutput out) throws IOException { int gts = 0; int lts = 0; - for (int i = 1; i < ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE; ++i) { + for (int i = 1; i < numericBlockSize; ++i) { if (in[i] > in[i - 1]) { gts++; } else if (in[i] < in[i - 1]) { @@ -79,7 +81,7 @@ private void deltaEncode(int token, int tokenBits, long[] in, DataOutput out) th final boolean doDeltaCompression = (gts == 0 && lts >= 2) || (lts == 0 && gts >= 2); long first = 0; if (doDeltaCompression) { - for (int i = ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE - 1; i > 0; --i) { + for (int i = numericBlockSize - 1; i > 0; --i) { in[i] -= in[i - 1]; } // Avoid setting in[0] to 0 in case there is a minimum interval between @@ -115,7 +117,7 @@ private void removeOffset(int token, int tokenBits, long[] in, DataOutput out) t } if (min != 0) { - for (int i = 0; i < ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE; ++i) { + for (int i = 0; i < numericBlockSize; ++i) { in[i] -= min; } token = (token << 1) | 0x01; @@ -143,7 +145,7 @@ private void gcdEncode(int token, int tokenBits, long[] in, DataOutput out) thro } final boolean doGcdCompression = Long.compareUnsigned(gcd, 1) > 0; if (doGcdCompression) { - for (int i = 0; i < ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE; ++i) { + for (int i = 0; i < numericBlockSize; ++i) { in[i] /= gcd; } token = (token << 1) | 0x01; @@ -174,7 +176,7 @@ private void forEncode(int token, int tokenBits, long[] in, DataOutput out) thro * Encode the given longs using a combination of delta-coding, GCD factorization and bit packing. */ void encode(long[] in, DataOutput out) throws IOException { - assert in.length == ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE; + assert in.length == numericBlockSize; deltaEncode(0, 0, in, out); } @@ -192,7 +194,7 @@ void encode(long[] in, DataOutput out) throws IOException { * */ void encodeOrdinals(long[] in, DataOutput out, int bitsPerOrd) throws IOException { - assert in.length == ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE; + assert in.length == numericBlockSize; int numRuns = 1; long firstValue = in[0]; long previousValue = firstValue; @@ -259,7 +261,7 @@ void encodeOrdinals(long[] in, DataOutput out, int bitsPerOrd) throws IOExceptio } void decodeOrdinals(DataInput in, long[] out, int bitsPerOrd) throws IOException { - assert out.length == ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE : out.length; + assert out.length == numericBlockSize : out.length; long v1 = in.readVLong(); int encoding = Long.numberOfTrailingZeros(~v1); @@ -275,7 +277,7 @@ void decodeOrdinals(DataInput in, long[] out, int bitsPerOrd) throws IOException Arrays.fill(out, runLen, out.length, v2); } else if (encoding == 2) { // bit-packed - DocValuesForUtil.decode(bitsPerOrd, in, out); + forUtil.decode(bitsPerOrd, in, out); } else if (encoding == 3) { // cycle encoding int cycleLength = (int) v1; @@ -293,13 +295,13 @@ void decodeOrdinals(DataInput in, long[] out, int bitsPerOrd) throws IOException /** Decode longs that have been encoded with {@link #encode}. */ void decode(DataInput in, long[] out) throws IOException { - assert out.length == ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE : out.length; + assert out.length == numericBlockSize : out.length; final int token = in.readVInt(); final int bitsPerValue = token >>> 3; if (bitsPerValue != 0) { - DocValuesForUtil.decode(bitsPerValue, in, out); + forUtil.decode(bitsPerValue, in, out); } else { Arrays.fill(out, 0L); } @@ -330,21 +332,21 @@ void decode(DataInput in, long[] out) throws IOException { } // this loop should auto-vectorize - private static void mul(long[] arr, long m) { - for (int i = 0; i < ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE; ++i) { + private void mul(long[] arr, long m) { + for (int i = 0; i < numericBlockSize; ++i) { arr[i] *= m; } } // this loop should auto-vectorize - private static void add(long[] arr, long min) { - for (int i = 0; i < ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE; ++i) { + private void add(long[] arr, long min) { + for (int i = 0; i < numericBlockSize; ++i) { arr[i] += min; } } - private static void deltaDecode(long[] arr) { - for (int i = 1; i < ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE; ++i) { + private void deltaDecode(long[] arr) { + for (int i = 1; i < numericBlockSize; ++i) { arr[i] += arr[i - 1]; } } diff --git a/server/src/test/java/org/elasticsearch/index/codec/tsdb/DocValuesForUtilTests.java b/server/src/test/java/org/elasticsearch/index/codec/tsdb/DocValuesForUtilTests.java index 7da5463ea46ff..62474113d73d2 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/tsdb/DocValuesForUtilTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/tsdb/DocValuesForUtilTests.java @@ -31,17 +31,18 @@ import java.util.Random; public class DocValuesForUtilTests extends LuceneTestCase { + int NUMERIC_BLOCK_SIZE = 1 << 7; public void testEncodeDecode() throws IOException { final int iterations = RandomNumbers.randomIntBetween(random(), 50, 1000); - final long[] values = new long[iterations * ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE]; + final long[] values = new long[iterations * NUMERIC_BLOCK_SIZE]; final int[] bpvs = new int[iterations]; for (int i = 0; i < iterations; ++i) { final int bpv = TestUtil.nextInt(random(), 1, 64); bpvs[i] = DocValuesForUtil.roundBits(bpv); - for (int j = 0; j < ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE; ++j) { - values[i * ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE + j] = bpv == 64 + for (int j = 0; j < NUMERIC_BLOCK_SIZE; ++j) { + values[i * NUMERIC_BLOCK_SIZE + j] = bpv == 64 ? random().nextLong() : TestUtil.nextLong(random(), 0, PackedInts.maxValue(bpv)); } @@ -53,12 +54,12 @@ public void testEncodeDecode() throws IOException { { // encode IndexOutput out = d.createOutput("test.bin", IOContext.DEFAULT); - final DocValuesForUtil forUtil = new DocValuesForUtil(); + final DocValuesForUtil forUtil = new DocValuesForUtil(NUMERIC_BLOCK_SIZE); for (int i = 0; i < iterations; ++i) { - long[] source = new long[ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE]; - for (int j = 0; j < ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE; ++j) { - source[j] = values[i * ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE + j]; + long[] source = new long[NUMERIC_BLOCK_SIZE]; + for (int j = 0; j < NUMERIC_BLOCK_SIZE; ++j) { + source[j] = values[i * NUMERIC_BLOCK_SIZE + j]; } out.writeByte((byte) bpvs[i]); forUtil.encode(source, bpvs[i], out); @@ -70,17 +71,14 @@ public void testEncodeDecode() throws IOException { { // decode IndexInput in = d.openInput("test.bin", IOContext.READONCE); - final long[] restored = new long[ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE]; + final DocValuesForUtil forUtil = new DocValuesForUtil(NUMERIC_BLOCK_SIZE); + final long[] restored = new long[NUMERIC_BLOCK_SIZE]; for (int i = 0; i < iterations; ++i) { final int bitsPerValue = in.readByte(); - DocValuesForUtil.decode(bitsPerValue, in, restored); + forUtil.decode(bitsPerValue, in, restored); assertArrayEquals( Arrays.toString(restored), - ArrayUtil.copyOfSubArray( - values, - i * ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE, - (i + 1) * ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE - ), + ArrayUtil.copyOfSubArray(values, i * NUMERIC_BLOCK_SIZE, (i + 1) * NUMERIC_BLOCK_SIZE), restored ); } diff --git a/server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesEncoderTests.java b/server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesEncoderTests.java index 288830276915e..0010c25179b69 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesEncoderTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesEncoderTests.java @@ -23,11 +23,11 @@ public class ES87TSDBDocValuesEncoderTests extends LuceneTestCase { - private final ES87TSDBDocValuesEncoder encoder; + private final TSDBDocValuesEncoder encoder; private final int blockSize = ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE; public ES87TSDBDocValuesEncoderTests() { - this.encoder = new ES87TSDBDocValuesEncoder(); + this.encoder = new TSDBDocValuesEncoder(blockSize); } public void testRandomValues() throws IOException { From 9050f8df024db01af0d9512e35c615fc18db14fa Mon Sep 17 00:00:00 2001 From: Brian Seeders Date: Fri, 18 Oct 2024 11:39:08 -0400 Subject: [PATCH 216/449] [CI] Use console=plain so that Buildkite logs aren't a mess (#115049) --- .buildkite/hooks/pre-command | 4 ++-- .ci/scripts/packaging-test.sh | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.buildkite/hooks/pre-command b/.buildkite/hooks/pre-command index a886220c84cda..0ece129a3c238 100644 --- a/.buildkite/hooks/pre-command +++ b/.buildkite/hooks/pre-command @@ -16,10 +16,10 @@ export COMPOSE_HTTP_TIMEOUT JOB_BRANCH="$BUILDKITE_BRANCH" export JOB_BRANCH -GRADLEW="./gradlew --parallel --scan --build-cache --no-watch-fs -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/" +GRADLEW="./gradlew --console=plain --parallel --scan --build-cache --no-watch-fs -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/" export GRADLEW -GRADLEW_BAT="./gradlew.bat --parallel --scan --build-cache --no-watch-fs -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/" +GRADLEW_BAT="./gradlew.bat --console=plain --parallel --scan --build-cache --no-watch-fs -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/" export GRADLEW_BAT export $(cat .ci/java-versions.properties | grep '=' | xargs) diff --git a/.ci/scripts/packaging-test.sh b/.ci/scripts/packaging-test.sh index bb7547933b213..4d84eded8a3ff 100755 --- a/.ci/scripts/packaging-test.sh +++ b/.ci/scripts/packaging-test.sh @@ -78,5 +78,5 @@ sudo -E env \ --unset=JAVA_HOME \ SYSTEM_JAVA_HOME=`readlink -f -n $BUILD_JAVA_HOME` \ DOCKER_CONFIG="${HOME}/.docker" \ - ./gradlew -g $HOME/.gradle --scan --parallel --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ --continue $@ + ./gradlew -g $HOME/.gradle --console=plain --scan --parallel --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ --continue $@ From 156ba2c6d11bb5ee0041e355db5dd51f7d060274 Mon Sep 17 00:00:00 2001 From: Michael Peterson Date: Fri, 18 Oct 2024 12:25:17 -0400 Subject: [PATCH 217/449] ES|QL per-cluster took time is incorrectly calculated and causes fatal exceptions (#115017) The model for calculating per-cluster `took` times from remote clusters in https://github.com/elastic/elasticsearch/pull/112595 was flawed. It attempted to use Java's System.nanoTime between the local and remote clusters, which is not safe. This results in per-cluster took times that have arbitrary (invalid) values including negative values which cause exceptions to be thrown by the `TimeValue` constructor. (Note: the overall took time calculation was done correctly, so it was the remote per-cluster took times that were flawed.) In this PR, I've done a redesign to address this. A key decision of this re-design was whether to always calculate took times only on the querying cluster (bypassing this whole problem) or to continue to allow the remote clusters to calculate their own took times for the remote processing and report that back to the querying cluster via the `ComputeResponse`. I decided in favor of having remote clusters compute their own took times for the remote processing and to additionally track "planning" time (encompassing field-caps and policy enrich remote calls), so that total per-cluster took time is a combination of the two. In _search, remote cluster took times are calculated entirely on the remote cluster, so network time is not included in the per-cluster took times. This has been helpful in diagnosing issues on user environments because if you see an overall took time that is significantly larger than the per cluster took times, that may indicate a network issue, which has happened in diagnosing cross-cluster issues in _search. I moved relative time tracking into `EsqlExecutionInfo`. The "planning time" marker is currently only used in cross-cluster searches, so it will conflict with the INLINESTATS 2 phase model (where planning can be done twice). We will improve this design to handle a 2 phase model in a later ticket, as part of the INLINESTATS work. I tested the current overall took time calculation model with local-only INLINESTATS queries and they work correctly. I also fixed another secondary bug in this PR. If the remote cluster is an older version that does not return took time (and shard info) in the ComputeResponse, the per-cluster took time is then calculated on the querying cluster as a fallback. Finally, I fixed some minor inconsistencies about whether the `_shards` info is shown in the response. The rule now is that `_shards` is always shown with 0 shards for SKIPPED clusters, with actual counts for SUCCESSFUL clusters and for remotes running an older version that doesn't report shard stats, the `_shards` field is left out of the XContent response. Fixes https://github.com/elastic/elasticsearch/issues/115022 --- .../esql/action/CrossClustersQueryIT.java | 74 ++++++---- .../xpack/esql/action/EsqlExecutionInfo.java | 49 ++++++- .../xpack/esql/plugin/ComputeListener.java | 84 ++++++++---- .../xpack/esql/plugin/ComputeService.java | 58 ++++---- .../xpack/esql/session/EsqlSession.java | 127 +++++++++--------- .../esql/plugin/ComputeListenerTests.java | 62 ++++++++- .../xpack/esql/session/EsqlSessionTests.java | 45 +++++++ 7 files changed, 331 insertions(+), 168 deletions(-) diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersQueryIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersQueryIT.java index adfa2fc7273cd..ddd5cff014ed2 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersQueryIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersQueryIT.java @@ -97,7 +97,8 @@ public void testSuccessfulPathways() { EsqlExecutionInfo executionInfo = resp.getExecutionInfo(); assertNotNull(executionInfo); assertThat(executionInfo.isCrossClusterSearch(), is(true)); - assertThat(executionInfo.overallTook().millis(), greaterThanOrEqualTo(0L)); + long overallTookMillis = executionInfo.overallTook().millis(); + assertThat(overallTookMillis, greaterThanOrEqualTo(0L)); assertThat(executionInfo.includeCCSMetadata(), equalTo(responseExpectMeta)); assertThat(executionInfo.clusterAliases(), equalTo(Set.of(REMOTE_CLUSTER, LOCAL_CLUSTER))); @@ -106,6 +107,7 @@ public void testSuccessfulPathways() { assertThat(remoteCluster.getIndexExpression(), equalTo("logs-*")); assertThat(remoteCluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL)); assertThat(remoteCluster.getTook().millis(), greaterThanOrEqualTo(0L)); + assertThat(remoteCluster.getTook().millis(), lessThanOrEqualTo(overallTookMillis)); assertThat(remoteCluster.getTotalShards(), equalTo(remoteNumShards)); assertThat(remoteCluster.getSuccessfulShards(), equalTo(remoteNumShards)); assertThat(remoteCluster.getSkippedShards(), equalTo(0)); @@ -115,6 +117,7 @@ public void testSuccessfulPathways() { assertThat(localCluster.getIndexExpression(), equalTo("logs-*")); assertThat(localCluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL)); assertThat(localCluster.getTook().millis(), greaterThanOrEqualTo(0L)); + assertThat(localCluster.getTook().millis(), lessThanOrEqualTo(overallTookMillis)); assertThat(localCluster.getTotalShards(), equalTo(localNumShards)); assertThat(localCluster.getSuccessfulShards(), equalTo(localNumShards)); assertThat(localCluster.getSkippedShards(), equalTo(0)); @@ -133,7 +136,8 @@ public void testSuccessfulPathways() { EsqlExecutionInfo executionInfo = resp.getExecutionInfo(); assertNotNull(executionInfo); assertThat(executionInfo.isCrossClusterSearch(), is(true)); - assertThat(executionInfo.overallTook().millis(), greaterThanOrEqualTo(0L)); + long overallTookMillis = executionInfo.overallTook().millis(); + assertThat(overallTookMillis, greaterThanOrEqualTo(0L)); assertThat(executionInfo.includeCCSMetadata(), equalTo(responseExpectMeta)); assertThat(executionInfo.clusterAliases(), equalTo(Set.of(REMOTE_CLUSTER, LOCAL_CLUSTER))); @@ -142,6 +146,7 @@ public void testSuccessfulPathways() { assertThat(remoteCluster.getIndexExpression(), equalTo("logs-*")); assertThat(remoteCluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL)); assertThat(remoteCluster.getTook().millis(), greaterThanOrEqualTo(0L)); + assertThat(remoteCluster.getTook().millis(), lessThanOrEqualTo(overallTookMillis)); assertThat(remoteCluster.getTotalShards(), equalTo(remoteNumShards)); assertThat(remoteCluster.getSuccessfulShards(), equalTo(remoteNumShards)); assertThat(remoteCluster.getSkippedShards(), equalTo(0)); @@ -151,6 +156,7 @@ public void testSuccessfulPathways() { assertThat(localCluster.getIndexExpression(), equalTo("logs-*")); assertThat(localCluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL)); assertThat(localCluster.getTook().millis(), greaterThanOrEqualTo(0L)); + assertThat(localCluster.getTook().millis(), lessThanOrEqualTo(overallTookMillis)); assertThat(localCluster.getTotalShards(), equalTo(localNumShards)); assertThat(localCluster.getSuccessfulShards(), equalTo(localNumShards)); assertThat(localCluster.getSkippedShards(), equalTo(0)); @@ -180,7 +186,8 @@ public void testSearchesWhereMissingIndicesAreSpecified() { assertNotNull(executionInfo); assertThat(executionInfo.isCrossClusterSearch(), is(true)); - assertThat(executionInfo.overallTook().millis(), greaterThanOrEqualTo(0L)); + long overallTookMillis = executionInfo.overallTook().millis(); + assertThat(overallTookMillis, greaterThanOrEqualTo(0L)); assertThat(executionInfo.includeCCSMetadata(), equalTo(responseExpectMeta)); assertThat(executionInfo.clusterAliases(), equalTo(Set.of(REMOTE_CLUSTER, LOCAL_CLUSTER))); @@ -189,6 +196,7 @@ public void testSearchesWhereMissingIndicesAreSpecified() { assertThat(remoteCluster.getIndexExpression(), equalTo("no_such_index")); assertThat(remoteCluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SKIPPED)); assertThat(remoteCluster.getTook().millis(), greaterThanOrEqualTo(0L)); + assertThat(remoteCluster.getTook().millis(), lessThanOrEqualTo(overallTookMillis)); assertThat(remoteCluster.getTotalShards(), equalTo(0)); // 0 since no matching index, thus no shards to search assertThat(remoteCluster.getSuccessfulShards(), equalTo(0)); assertThat(remoteCluster.getSkippedShards(), equalTo(0)); @@ -198,6 +206,7 @@ public void testSearchesWhereMissingIndicesAreSpecified() { assertThat(localCluster.getIndexExpression(), equalTo("logs-*")); assertThat(localCluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL)); assertThat(localCluster.getTook().millis(), greaterThanOrEqualTo(0L)); + assertThat(localCluster.getTook().millis(), lessThanOrEqualTo(overallTookMillis)); assertThat(localCluster.getTotalShards(), equalTo(localNumShards)); assertThat(localCluster.getSuccessfulShards(), equalTo(localNumShards)); assertThat(localCluster.getSkippedShards(), equalTo(0)); @@ -219,7 +228,8 @@ public void testSearchesWhereMissingIndicesAreSpecified() { EsqlExecutionInfo executionInfo = resp.getExecutionInfo(); assertNotNull(executionInfo); assertThat(executionInfo.isCrossClusterSearch(), is(true)); - assertThat(executionInfo.overallTook().millis(), greaterThanOrEqualTo(0L)); + long overallTookMillis = executionInfo.overallTook().millis(); + assertThat(overallTookMillis, greaterThanOrEqualTo(0L)); assertThat(executionInfo.includeCCSMetadata(), equalTo(responseExpectMeta)); assertThat(executionInfo.clusterAliases(), equalTo(Set.of(REMOTE_CLUSTER, LOCAL_CLUSTER))); @@ -228,6 +238,7 @@ public void testSearchesWhereMissingIndicesAreSpecified() { assertThat(remoteCluster.getIndexExpression(), equalTo("logs-*")); assertThat(remoteCluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL)); assertThat(remoteCluster.getTook().millis(), greaterThanOrEqualTo(0L)); + assertThat(remoteCluster.getTook().millis(), lessThanOrEqualTo(overallTookMillis)); assertThat(remoteCluster.getTotalShards(), equalTo(remoteNumShards)); assertThat(remoteCluster.getSuccessfulShards(), equalTo(remoteNumShards)); assertThat(remoteCluster.getSkippedShards(), equalTo(0)); @@ -235,8 +246,9 @@ public void testSearchesWhereMissingIndicesAreSpecified() { EsqlExecutionInfo.Cluster localCluster = executionInfo.getCluster(LOCAL_CLUSTER); assertThat(localCluster.getIndexExpression(), equalTo("no_such_index")); - assertThat(localCluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL)); + assertThat(localCluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SKIPPED)); assertThat(localCluster.getTook().millis(), greaterThanOrEqualTo(0L)); + assertThat(localCluster.getTook().millis(), lessThanOrEqualTo(overallTookMillis)); assertThat(localCluster.getTotalShards(), equalTo(0)); assertThat(localCluster.getSuccessfulShards(), equalTo(0)); assertThat(localCluster.getSkippedShards(), equalTo(0)); @@ -258,7 +270,8 @@ public void testSearchesWhereMissingIndicesAreSpecified() { EsqlExecutionInfo executionInfo = resp.getExecutionInfo(); assertNotNull(executionInfo); assertThat(executionInfo.isCrossClusterSearch(), is(true)); - assertThat(executionInfo.overallTook().millis(), greaterThanOrEqualTo(0L)); + long overallTookMillis = executionInfo.overallTook().millis(); + assertThat(overallTookMillis, greaterThanOrEqualTo(0L)); assertThat(executionInfo.includeCCSMetadata(), equalTo(responseExpectMeta)); assertThat(executionInfo.clusterAliases(), equalTo(Set.of(REMOTE_CLUSTER, LOCAL_CLUSTER))); @@ -267,6 +280,7 @@ public void testSearchesWhereMissingIndicesAreSpecified() { assertThat(remoteCluster.getIndexExpression(), equalTo("no_such_index1,no_such_index2")); assertThat(remoteCluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SKIPPED)); assertThat(remoteCluster.getTook().millis(), greaterThanOrEqualTo(0L)); + assertThat(remoteCluster.getTook().millis(), lessThanOrEqualTo(overallTookMillis)); assertThat(remoteCluster.getTotalShards(), equalTo(0)); assertThat(remoteCluster.getSuccessfulShards(), equalTo(0)); assertThat(remoteCluster.getSkippedShards(), equalTo(0)); @@ -276,6 +290,7 @@ public void testSearchesWhereMissingIndicesAreSpecified() { assertThat(localCluster.getIndexExpression(), equalTo("no_such_index*,logs-1")); assertThat(localCluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL)); assertThat(localCluster.getTook().millis(), greaterThanOrEqualTo(0L)); + assertThat(localCluster.getTook().millis(), lessThanOrEqualTo(overallTookMillis)); assertThat(localCluster.getTotalShards(), equalTo(localNumShards)); assertThat(localCluster.getSuccessfulShards(), equalTo(localNumShards)); assertThat(localCluster.getSkippedShards(), equalTo(0)); @@ -291,7 +306,8 @@ public void testSearchesWhereMissingIndicesAreSpecified() { assertNotNull(executionInfo); assertThat(executionInfo.isCrossClusterSearch(), is(true)); - assertThat(executionInfo.overallTook().millis(), greaterThanOrEqualTo(0L)); + long overallTookMillis = executionInfo.overallTook().millis(); + assertThat(overallTookMillis, greaterThanOrEqualTo(0L)); assertThat(executionInfo.includeCCSMetadata(), equalTo(responseExpectMeta)); assertThat(executionInfo.clusterAliases(), equalTo(Set.of(REMOTE_CLUSTER, LOCAL_CLUSTER))); @@ -300,6 +316,7 @@ public void testSearchesWhereMissingIndicesAreSpecified() { assertThat(remoteCluster.getIndexExpression(), equalTo("no_such_index*")); assertThat(remoteCluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SKIPPED)); assertThat(remoteCluster.getTook().millis(), greaterThanOrEqualTo(0L)); + assertThat(remoteCluster.getTook().millis(), lessThanOrEqualTo(overallTookMillis)); assertThat(remoteCluster.getTotalShards(), equalTo(0)); assertThat(remoteCluster.getSuccessfulShards(), equalTo(0)); assertThat(remoteCluster.getSkippedShards(), equalTo(0)); @@ -309,6 +326,7 @@ public void testSearchesWhereMissingIndicesAreSpecified() { assertThat(localCluster.getIndexExpression(), equalTo("logs-*")); assertThat(localCluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL)); assertThat(localCluster.getTook().millis(), greaterThanOrEqualTo(0L)); + assertThat(localCluster.getTook().millis(), lessThanOrEqualTo(overallTookMillis)); assertThat(localCluster.getTotalShards(), equalTo(localNumShards)); assertThat(localCluster.getSuccessfulShards(), equalTo(localNumShards)); assertThat(localCluster.getSkippedShards(), equalTo(0)); @@ -414,20 +432,20 @@ public void testCCSExecutionOnSearchesWithLimit0() { assertThat(remoteCluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL)); assertThat(remoteCluster.getTook().millis(), greaterThanOrEqualTo(0L)); assertThat(remoteCluster.getTook().millis(), lessThanOrEqualTo(overallTookMillis)); - assertNull(remoteCluster.getTotalShards()); - assertNull(remoteCluster.getSuccessfulShards()); - assertNull(remoteCluster.getSkippedShards()); - assertNull(remoteCluster.getFailedShards()); + assertThat(remoteCluster.getTotalShards(), equalTo(0)); + assertThat(remoteCluster.getSuccessfulShards(), equalTo(0)); + assertThat(remoteCluster.getSkippedShards(), equalTo(0)); + assertThat(remoteCluster.getFailedShards(), equalTo(0)); EsqlExecutionInfo.Cluster localCluster = executionInfo.getCluster(LOCAL_CLUSTER); assertThat(localCluster.getIndexExpression(), equalTo("logs*")); assertThat(localCluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL)); assertThat(localCluster.getTook().millis(), greaterThanOrEqualTo(0L)); assertThat(localCluster.getTook().millis(), lessThanOrEqualTo(overallTookMillis)); - assertNull(localCluster.getTotalShards()); - assertNull(localCluster.getSuccessfulShards()); - assertNull(localCluster.getSkippedShards()); - assertNull(localCluster.getFailedShards()); + assertThat(remoteCluster.getTotalShards(), equalTo(0)); + assertThat(remoteCluster.getSuccessfulShards(), equalTo(0)); + assertThat(remoteCluster.getSkippedShards(), equalTo(0)); + assertThat(remoteCluster.getFailedShards(), equalTo(0)); } try (EsqlQueryResponse resp = runQuery("FROM logs*,cluster-a:nomatch* | LIMIT 0", requestIncludeMeta)) { @@ -442,7 +460,8 @@ public void testCCSExecutionOnSearchesWithLimit0() { EsqlExecutionInfo.Cluster remoteCluster = executionInfo.getCluster(REMOTE_CLUSTER); assertThat(remoteCluster.getIndexExpression(), equalTo("nomatch*")); assertThat(remoteCluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SKIPPED)); - assertThat(remoteCluster.getTook().millis(), equalTo(0L)); + assertThat(remoteCluster.getTook().millis(), greaterThanOrEqualTo(0L)); + assertThat(remoteCluster.getTook().millis(), lessThanOrEqualTo(overallTookMillis)); assertThat(remoteCluster.getTotalShards(), equalTo(0)); assertThat(remoteCluster.getSuccessfulShards(), equalTo(0)); assertThat(remoteCluster.getSkippedShards(), equalTo(0)); @@ -453,10 +472,10 @@ public void testCCSExecutionOnSearchesWithLimit0() { assertThat(localCluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL)); assertThat(localCluster.getTook().millis(), greaterThanOrEqualTo(0L)); assertThat(localCluster.getTook().millis(), lessThanOrEqualTo(overallTookMillis)); - assertNull(localCluster.getTotalShards()); - assertNull(localCluster.getSuccessfulShards()); - assertNull(localCluster.getSkippedShards()); - assertNull(localCluster.getFailedShards()); + assertThat(localCluster.getTotalShards(), equalTo(0)); + assertThat(localCluster.getSuccessfulShards(), equalTo(0)); + assertThat(localCluster.getSkippedShards(), equalTo(0)); + assertThat(localCluster.getFailedShards(), equalTo(0)); } try (EsqlQueryResponse resp = runQuery("FROM nomatch*,cluster-a:* | LIMIT 0", requestIncludeMeta)) { @@ -473,17 +492,20 @@ public void testCCSExecutionOnSearchesWithLimit0() { assertThat(remoteCluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL)); assertThat(remoteCluster.getTook().millis(), greaterThanOrEqualTo(0L)); assertThat(remoteCluster.getTook().millis(), lessThanOrEqualTo(overallTookMillis)); - assertNull(remoteCluster.getTotalShards()); - assertNull(remoteCluster.getSuccessfulShards()); - assertNull(remoteCluster.getSkippedShards()); - assertNull(remoteCluster.getFailedShards()); + assertThat(remoteCluster.getTotalShards(), equalTo(0)); + assertThat(remoteCluster.getSuccessfulShards(), equalTo(0)); + assertThat(remoteCluster.getSkippedShards(), equalTo(0)); + assertThat(remoteCluster.getFailedShards(), equalTo(0)); EsqlExecutionInfo.Cluster localCluster = executionInfo.getCluster(LOCAL_CLUSTER); assertThat(localCluster.getIndexExpression(), equalTo("nomatch*")); - // TODO: in https://github.com/elastic/elasticsearch/issues/112886, this will be changed to be SKIPPED - assertThat(localCluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL)); + assertThat(localCluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SKIPPED)); assertThat(localCluster.getTook().millis(), greaterThanOrEqualTo(0L)); assertThat(localCluster.getTook().millis(), lessThanOrEqualTo(overallTookMillis)); + assertThat(remoteCluster.getTotalShards(), equalTo(0)); + assertThat(remoteCluster.getSuccessfulShards(), equalTo(0)); + assertThat(remoteCluster.getSkippedShards(), equalTo(0)); + assertThat(remoteCluster.getFailedShards(), equalTo(0)); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlExecutionInfo.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlExecutionInfo.java index dabccd4ffeb17..aeac14091f378 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlExecutionInfo.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlExecutionInfo.java @@ -33,6 +33,7 @@ import java.util.Objects; import java.util.Set; import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.TimeUnit; import java.util.function.BiFunction; import java.util.function.Predicate; @@ -55,29 +56,33 @@ public class EsqlExecutionInfo implements ChunkedToXContentObject, Writeable { public static final ParseField DETAILS_FIELD = new ParseField("details"); public static final ParseField TOOK = new ParseField("took"); - // map key is clusterAlias on the primary querying cluster of a CCS minimize_roundtrips=true query - // the Map itself is immutable after construction - all Clusters will be accounted for at the start of the search - // updates to the Cluster occur with the updateCluster method that given the key to map transforms an + // Map key is clusterAlias on the primary querying cluster of a CCS minimize_roundtrips=true query + // The Map itself is immutable after construction - all Clusters will be accounted for at the start of the search. + // Updates to the Cluster occur with the updateCluster method that given the key to map transforms an // old Cluster Object to a new Cluster Object with the remapping function. public final Map clusterInfo; - // not Writeable since it is only needed on the primary CCS coordinator - private final transient Predicate skipUnavailablePredicate; private TimeValue overallTook; - // whether the user has asked for CCS metadata to be in the JSON response (the overall took will always be present) private final boolean includeCCSMetadata; + // fields that are not Writeable since they are only needed on the primary CCS coordinator + private final transient Predicate skipUnavailablePredicate; + private final transient Long relativeStartNanos; // start time for an ESQL query for calculating took times + private transient TimeValue planningTookTime; // time elapsed since start of query to calling ComputeService.execute + public EsqlExecutionInfo(boolean includeCCSMetadata) { this(Predicates.always(), includeCCSMetadata); // default all clusters to skip_unavailable=true } /** * @param skipUnavailablePredicate provide lookup for whether a given cluster has skip_unavailable set to true or false + * @param includeCCSMetadata (user defined setting) whether to include the CCS metadata in the HTTP response */ public EsqlExecutionInfo(Predicate skipUnavailablePredicate, boolean includeCCSMetadata) { this.clusterInfo = ConcurrentCollections.newConcurrentMap(); this.skipUnavailablePredicate = skipUnavailablePredicate; this.includeCCSMetadata = includeCCSMetadata; + this.relativeStartNanos = System.nanoTime(); } /** @@ -88,6 +93,7 @@ public EsqlExecutionInfo(Predicate skipUnavailablePredicate, boolean inc this.clusterInfo = clusterInfo; this.includeCCSMetadata = includeCCSMetadata; this.skipUnavailablePredicate = Predicates.always(); + this.relativeStartNanos = null; } public EsqlExecutionInfo(StreamInput in) throws IOException { @@ -106,6 +112,7 @@ public EsqlExecutionInfo(StreamInput in) throws IOException { this.includeCCSMetadata = false; } this.skipUnavailablePredicate = Predicates.always(); + this.relativeStartNanos = null; } @Override @@ -125,7 +132,35 @@ public boolean includeCCSMetadata() { return includeCCSMetadata; } - public void overallTook(TimeValue took) { + public Long getRelativeStartNanos() { + return relativeStartNanos; + } + + /** + * Call when ES|QL "planning" phase is complete and query execution (in ComputeService) is about to start. + * Note this is currently only built for a single phase planning/execution model. When INLINESTATS + * moves towards GA we may need to revisit this model. Currently, it should never be called more than once. + */ + public void markEndPlanning() { + assert planningTookTime == null : "markEndPlanning should only be called once"; + assert relativeStartNanos != null : "Relative start time must be set when markEndPlanning is called"; + planningTookTime = new TimeValue(System.nanoTime() - relativeStartNanos, TimeUnit.NANOSECONDS); + } + + public TimeValue planningTookTime() { + return planningTookTime; + } + + /** + * Call when ES|QL execution is complete in order to set the overall took time for an ES|QL query. + */ + public void markEndQuery() { + assert relativeStartNanos != null : "Relative start time must be set when markEndQuery is called"; + overallTook = new TimeValue(System.nanoTime() - relativeStartNanos, TimeUnit.NANOSECONDS); + } + + // for testing only - use markEndQuery in production code + void overallTook(TimeValue took) { this.overallTook = took; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeListener.java index d8fc4da070767..49af4a593e6e5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeListener.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeListener.java @@ -47,7 +47,6 @@ final class ComputeListener implements Releasable { private final List collectedProfiles; private final ResponseHeadersCollector responseHeaders; private final EsqlExecutionInfo esqlExecutionInfo; - private final long queryStartTimeNanos; // clusterAlias indicating where this ComputeListener is running // used by the top level ComputeListener in ComputeService on both local and remote clusters private final String whereRunning; @@ -61,7 +60,7 @@ public static ComputeListener create( CancellableTask task, ActionListener delegate ) { - return new ComputeListener(transportService, task, null, null, -1, delegate); + return new ComputeListener(transportService, task, null, null, delegate); } /** @@ -75,7 +74,6 @@ public static ComputeListener create( * @param transportService * @param task * @param executionInfo {@link EsqlExecutionInfo} to capture execution metadata - * @param queryStartTimeNanos Start time of the ES|QL query (stored in {@link org.elasticsearch.xpack.esql.session.Configuration}) * @param delegate */ public static ComputeListener create( @@ -83,10 +81,9 @@ public static ComputeListener create( TransportService transportService, CancellableTask task, EsqlExecutionInfo executionInfo, - long queryStartTimeNanos, ActionListener delegate ) { - return new ComputeListener(transportService, task, clusterAlias, executionInfo, queryStartTimeNanos, delegate); + return new ComputeListener(transportService, task, clusterAlias, executionInfo, delegate); } private ComputeListener( @@ -94,7 +91,6 @@ private ComputeListener( CancellableTask task, String clusterAlias, EsqlExecutionInfo executionInfo, - long queryStartTimeNanos, ActionListener delegate ) { this.transportService = transportService; @@ -102,7 +98,6 @@ private ComputeListener( this.responseHeaders = new ResponseHeadersCollector(transportService.getThreadPool().getThreadContext()); this.collectedProfiles = Collections.synchronizedList(new ArrayList<>()); this.esqlExecutionInfo = executionInfo; - this.queryStartTimeNanos = queryStartTimeNanos; this.whereRunning = clusterAlias; // for the DataNodeHandler ComputeListener, clusterAlias and executionInfo will be null // for the top level ComputeListener in ComputeService both will be non-null @@ -129,11 +124,15 @@ private ComputeListener( } else { result = new ComputeResponse(collectedProfiles.isEmpty() ? List.of() : collectedProfiles.stream().toList()); if (coordinatingClusterIsSearchedInCCS()) { - // mark local cluster as finished once the coordinator and all data nodes have finished processing - executionInfo.swapCluster( - RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY, - (k, v) -> new EsqlExecutionInfo.Cluster.Builder(v).setStatus(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL).build() - ); + // if not already marked as SKIPPED, mark the local cluster as finished once the coordinator and all + // data nodes have finished processing + executionInfo.swapCluster(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY, (k, v) -> { + if (v.getStatus() != EsqlExecutionInfo.Cluster.Status.SKIPPED) { + return new EsqlExecutionInfo.Cluster.Builder(v).setStatus(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL).build(); + } else { + return v; + } + }); } } delegate.onResponse(result); @@ -196,8 +195,8 @@ ActionListener acquireAvoid() { * info to be gathered (namely, the DataNodeRequestHandler ComputeListener) should pass in null. */ ActionListener acquireCompute(@Nullable String computeClusterAlias) { - assert computeClusterAlias == null || (esqlExecutionInfo != null && queryStartTimeNanos > 0) - : "When clusterAlias is provided to acquireCompute, executionInfo must be non-null and queryStartTimeNanos must be positive"; + assert computeClusterAlias == null || (esqlExecutionInfo != null && esqlExecutionInfo.getRelativeStartNanos() != null) + : "When clusterAlias is provided to acquireCompute, executionInfo and relativeStartTimeNanos must be non-null"; return acquireAvoid().map(resp -> { responseHeaders.collect(); @@ -209,24 +208,17 @@ ActionListener acquireCompute(@Nullable String computeClusterAl return null; } if (isCCSListener(computeClusterAlias)) { - // this is the callback for the listener to the CCS compute - esqlExecutionInfo.swapCluster( - computeClusterAlias, - (k, v) -> new EsqlExecutionInfo.Cluster.Builder(v) - // for now ESQL doesn't return partial results, so set status to SUCCESSFUL - .setStatus(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL) - .setTook(resp.getTook()) - .setTotalShards(resp.getTotalShards()) - .setSuccessfulShards(resp.getSuccessfulShards()) - .setSkippedShards(resp.getSkippedShards()) - .setFailedShards(resp.getFailedShards()) - .build() - ); + // this is the callback for the listener on the primary coordinator that receives a remote ComputeResponse + updateExecutionInfoWithRemoteResponse(computeClusterAlias, resp); + } else if (shouldRecordTookTime()) { + Long relativeStartNanos = esqlExecutionInfo.getRelativeStartNanos(); // handler for this cluster's data node and coordinator completion (runs on "local" and remote clusters) - TimeValue tookTime = new TimeValue(System.nanoTime() - queryStartTimeNanos, TimeUnit.NANOSECONDS); + assert relativeStartNanos != null : "queryStartTimeNanos not set properly"; + TimeValue tookTime = new TimeValue(System.nanoTime() - relativeStartNanos, TimeUnit.NANOSECONDS); esqlExecutionInfo.swapCluster(computeClusterAlias, (k, v) -> { - if (v.getTook() == null || v.getTook().nanos() < tookTime.nanos()) { + if (v.getStatus() != EsqlExecutionInfo.Cluster.Status.SKIPPED + && (v.getTook() == null || v.getTook().nanos() < tookTime.nanos())) { return new EsqlExecutionInfo.Cluster.Builder(v).setTook(tookTime).build(); } else { return v; @@ -237,6 +229,40 @@ ActionListener acquireCompute(@Nullable String computeClusterAl }); } + private void updateExecutionInfoWithRemoteResponse(String computeClusterAlias, ComputeResponse resp) { + TimeValue tookOnCluster; + if (resp.getTook() != null) { + TimeValue remoteExecutionTime = resp.getTook(); + TimeValue planningTookTime = esqlExecutionInfo.planningTookTime(); + tookOnCluster = new TimeValue(planningTookTime.nanos() + remoteExecutionTime.nanos(), TimeUnit.NANOSECONDS); + esqlExecutionInfo.swapCluster( + computeClusterAlias, + (k, v) -> new EsqlExecutionInfo.Cluster.Builder(v) + // for now ESQL doesn't return partial results, so set status to SUCCESSFUL + .setStatus(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL) + .setTook(tookOnCluster) + .setTotalShards(resp.getTotalShards()) + .setSuccessfulShards(resp.getSuccessfulShards()) + .setSkippedShards(resp.getSkippedShards()) + .setFailedShards(resp.getFailedShards()) + .build() + ); + } else { + // if the cluster is an older version and does not send back took time, then calculate it here on the coordinator + // and leave shard info unset, so it is not shown in the CCS metadata section of the JSON response + long remoteTook = System.nanoTime() - esqlExecutionInfo.getRelativeStartNanos(); + tookOnCluster = new TimeValue(remoteTook, TimeUnit.NANOSECONDS); + esqlExecutionInfo.swapCluster( + computeClusterAlias, + (k, v) -> new EsqlExecutionInfo.Cluster.Builder(v) + // for now ESQL doesn't return partial results, so set status to SUCCESSFUL + .setStatus(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL) + .setTook(tookOnCluster) + .build() + ); + } + } + /** * Use this method when no execution metadata needs to be added to {@link EsqlExecutionInfo} */ diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java index f714695504a1d..108e70d7d3a50 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java @@ -32,7 +32,6 @@ import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; -import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.Index; import org.elasticsearch.index.query.QueryBuilder; @@ -81,7 +80,6 @@ import java.util.Map; import java.util.Set; import java.util.concurrent.Executor; -import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import static org.elasticsearch.xpack.esql.plugin.EsqlPlugin.ESQL_WORKER_THREAD_POOL_NAME; @@ -173,19 +171,10 @@ public void execute( null ); String local = RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY; - try ( - var computeListener = ComputeListener.create( - local, - transportService, - rootTask, - execInfo, - configuration.getQueryStartTimeNanos(), - listener.map(r -> { - updateExecutionInfoAfterCoordinatorOnlyQuery(configuration.getQueryStartTimeNanos(), execInfo); - return new Result(physicalPlan.output(), collectedPages, r.getProfiles(), execInfo); - }) - ) - ) { + try (var computeListener = ComputeListener.create(local, transportService, rootTask, execInfo, listener.map(r -> { + updateExecutionInfoAfterCoordinatorOnlyQuery(execInfo); + return new Result(physicalPlan.output(), collectedPages, r.getProfiles(), execInfo); + }))) { runCompute(rootTask, computeContext, coordinatorPlan, computeListener.acquireCompute(local)); return; } @@ -205,7 +194,6 @@ public void execute( queryPragmas.exchangeBufferSize(), transportService.getThreadPool().executor(ThreadPool.Names.SEARCH) ); - long start = configuration.getQueryStartTimeNanos(); String local = RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY; /* * Grab the output attributes here, so we can pass them to @@ -216,9 +204,8 @@ public void execute( try ( Releasable ignored = exchangeSource.addEmptySink(); // this is the top level ComputeListener called once at the end (e.g., once all clusters have finished for a CCS) - var computeListener = ComputeListener.create(local, transportService, rootTask, execInfo, start, listener.map(r -> { - long tookTimeNanos = System.nanoTime() - configuration.getQueryStartTimeNanos(); - execInfo.overallTook(new TimeValue(tookTimeNanos, TimeUnit.NANOSECONDS)); + var computeListener = ComputeListener.create(local, transportService, rootTask, execInfo, listener.map(r -> { + execInfo.markEndQuery(); // TODO: revisit this time recording model as part of INLINESTATS improvements return new Result(outputAttributes, collectedPages, r.getProfiles(), execInfo); })) ) { @@ -258,22 +245,24 @@ public void execute( } } - private static void updateExecutionInfoAfterCoordinatorOnlyQuery(long queryStartNanos, EsqlExecutionInfo execInfo) { - long tookTimeNanos = System.nanoTime() - queryStartNanos; - execInfo.overallTook(new TimeValue(tookTimeNanos, TimeUnit.NANOSECONDS)); + // For queries like: FROM logs* | LIMIT 0 (including cross-cluster LIMIT 0 queries) + private static void updateExecutionInfoAfterCoordinatorOnlyQuery(EsqlExecutionInfo execInfo) { + execInfo.markEndQuery(); // TODO: revisit this time recording model as part of INLINESTATS improvements if (execInfo.isCrossClusterSearch()) { + assert execInfo.planningTookTime() != null : "Planning took time should be set on EsqlExecutionInfo but is null"; for (String clusterAlias : execInfo.clusterAliases()) { - // The local cluster 'took' time gets updated as part of the acquireCompute(local) call in the coordinator, so - // here we only need to update status for remote clusters since there are no remote ComputeListeners in this case. - // This happens in cross cluster searches that use LIMIT 0, e.g, FROM logs*,remote*:logs* | LIMIT 0. - if (clusterAlias.equals(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY) == false) { - execInfo.swapCluster(clusterAlias, (k, v) -> { - if (v.getStatus() == EsqlExecutionInfo.Cluster.Status.RUNNING) { - return new EsqlExecutionInfo.Cluster.Builder(v).setStatus(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL).build(); - } else { - return v; - } - }); + // took time and shard counts for SKIPPED clusters were added at end of planning, so only update other cases here + if (execInfo.getCluster(clusterAlias).getStatus() != EsqlExecutionInfo.Cluster.Status.SKIPPED) { + execInfo.swapCluster( + clusterAlias, + (k, v) -> new EsqlExecutionInfo.Cluster.Builder(v).setTook(execInfo.overallTook()) + .setStatus(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL) + .setTotalShards(0) + .setSuccessfulShards(0) + .setSkippedShards(0) + .setFailedShards(0) + .build() + ); } } } @@ -837,8 +826,7 @@ public void messageReceived(ClusterComputeRequest request, TransportChannel chan EsqlExecutionInfo execInfo = new EsqlExecutionInfo(true); execInfo.swapCluster(clusterAlias, (k, v) -> new EsqlExecutionInfo.Cluster(clusterAlias, Arrays.toString(request.indices()))); CancellableTask cancellable = (CancellableTask) task; - long start = request.configuration().getQueryStartTimeNanos(); - try (var computeListener = ComputeListener.create(clusterAlias, transportService, cancellable, execInfo, start, listener)) { + try (var computeListener = ComputeListener.create(clusterAlias, transportService, cancellable, execInfo, listener)) { runComputeOnRemoteCluster( clusterAlias, request.sessionId(), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java index 96391c841856f..788b2827d7c8e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java @@ -72,7 +72,6 @@ import java.util.List; import java.util.Map; import java.util.Set; -import java.util.concurrent.TimeUnit; import java.util.function.BiConsumer; import java.util.function.BiFunction; import java.util.function.Predicate; @@ -164,6 +163,7 @@ public void executeOptimizedPlan( LogicalPlan firstPhase = Phased.extractFirstPhase(optimizedPlan); if (firstPhase == null) { runPhase.accept(logicalPlanToPhysicalPlan(optimizedPlan, request), listener); + updateExecutionInfoAtEndOfPlanning(executionInfo); } else { executePhased(new ArrayList<>(), optimizedPlan, request, executionInfo, firstPhase, runPhase, listener); } @@ -246,7 +246,6 @@ private void preAnalyze( if (indexResolution.isValid()) { updateExecutionInfoWithClustersWithNoMatchingIndices(executionInfo, indexResolution); updateExecutionInfoWithUnavailableClusters(executionInfo, indexResolution.getUnavailableClusters()); - updateTookTimeForRemoteClusters(executionInfo); Set newClusters = enrichPolicyResolver.groupIndicesPerCluster( indexResolution.get().concreteIndices().toArray(String[]::new) ).keySet(); @@ -267,68 +266,6 @@ private void preAnalyze( })); } - // visible for testing - static void updateExecutionInfoWithUnavailableClusters(EsqlExecutionInfo executionInfo, Set unavailableClusters) { - for (String clusterAlias : unavailableClusters) { - executionInfo.swapCluster( - clusterAlias, - (k, v) -> new EsqlExecutionInfo.Cluster.Builder(v).setStatus(EsqlExecutionInfo.Cluster.Status.SKIPPED).build() - ); - // TODO: follow-on PR will set SKIPPED status when skip_unavailable=true and throw an exception when skip_un=false - } - } - - // visible for testing - static void updateExecutionInfoWithClustersWithNoMatchingIndices(EsqlExecutionInfo executionInfo, IndexResolution indexResolution) { - Set clustersWithResolvedIndices = new HashSet<>(); - // determine missing clusters - for (String indexName : indexResolution.get().indexNameWithModes().keySet()) { - clustersWithResolvedIndices.add(RemoteClusterAware.parseClusterAlias(indexName)); - } - Set clustersRequested = executionInfo.clusterAliases(); - Set clustersWithNoMatchingIndices = Sets.difference(clustersRequested, clustersWithResolvedIndices); - clustersWithNoMatchingIndices.removeAll(indexResolution.getUnavailableClusters()); - /* - * These are clusters in the original request that are not present in the field-caps response. They were - * specified with an index or indices that do not exist, so the search on that cluster is done. - * Mark it as SKIPPED with 0 shards searched and took=0. - */ - for (String c : clustersWithNoMatchingIndices) { - executionInfo.swapCluster( - c, - (k, v) -> new EsqlExecutionInfo.Cluster.Builder(v).setStatus(EsqlExecutionInfo.Cluster.Status.SKIPPED) - .setTook(new TimeValue(0)) - .setTotalShards(0) - .setSuccessfulShards(0) - .setSkippedShards(0) - .setFailedShards(0) - .build() - ); - } - } - - private void updateTookTimeForRemoteClusters(EsqlExecutionInfo executionInfo) { - if (executionInfo.isCrossClusterSearch()) { - for (String clusterAlias : executionInfo.clusterAliases()) { - if (clusterAlias.equals(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY) == false) { - executionInfo.swapCluster(clusterAlias, (k, v) -> { - if (v.getTook() == null && v.getStatus() != EsqlExecutionInfo.Cluster.Status.SKIPPED) { - // set took time in case we are finished with the remote cluster (e.g., FROM foo | LIMIT 0). - // this will be overwritten later if ES|QL operations happen on the remote cluster (the typical scenario) - TimeValue took = new TimeValue( - System.nanoTime() - configuration.getQueryStartTimeNanos(), - TimeUnit.NANOSECONDS - ); - return new EsqlExecutionInfo.Cluster.Builder(v).setTook(took).build(); - } else { - return v; - } - }); - } - } - } - } - private void preAnalyzeIndices( LogicalPlan parsed, EsqlExecutionInfo executionInfo, @@ -508,4 +445,66 @@ public PhysicalPlan optimizedPhysicalPlan(LogicalPlan optimizedPlan) { LOGGER.debug("Optimized physical plan:\n{}", plan); return plan; } + + // visible for testing + static void updateExecutionInfoWithUnavailableClusters(EsqlExecutionInfo executionInfo, Set unavailableClusters) { + for (String clusterAlias : unavailableClusters) { + executionInfo.swapCluster( + clusterAlias, + (k, v) -> new EsqlExecutionInfo.Cluster.Builder(v).setStatus(EsqlExecutionInfo.Cluster.Status.SKIPPED).build() + ); + // TODO: follow-on PR will set SKIPPED status when skip_unavailable=true and throw an exception when skip_un=false + } + } + + // visible for testing + static void updateExecutionInfoWithClustersWithNoMatchingIndices(EsqlExecutionInfo executionInfo, IndexResolution indexResolution) { + Set clustersWithResolvedIndices = new HashSet<>(); + // determine missing clusters + for (String indexName : indexResolution.get().indexNameWithModes().keySet()) { + clustersWithResolvedIndices.add(RemoteClusterAware.parseClusterAlias(indexName)); + } + Set clustersRequested = executionInfo.clusterAliases(); + Set clustersWithNoMatchingIndices = Sets.difference(clustersRequested, clustersWithResolvedIndices); + clustersWithNoMatchingIndices.removeAll(indexResolution.getUnavailableClusters()); + /* + * These are clusters in the original request that are not present in the field-caps response. They were + * specified with an index or indices that do not exist, so the search on that cluster is done. + * Mark it as SKIPPED with 0 shards searched and took=0. + */ + for (String c : clustersWithNoMatchingIndices) { + executionInfo.swapCluster( + c, + (k, v) -> new EsqlExecutionInfo.Cluster.Builder(v).setStatus(EsqlExecutionInfo.Cluster.Status.SKIPPED) + .setTook(new TimeValue(0)) + .setTotalShards(0) + .setSuccessfulShards(0) + .setSkippedShards(0) + .setFailedShards(0) + .build() + ); + } + } + + // visible for testing + static void updateExecutionInfoAtEndOfPlanning(EsqlExecutionInfo execInfo) { + // TODO: this logic assumes a single phase execution model, so it may need to altered once INLINESTATS is made CCS compatible + if (execInfo.isCrossClusterSearch()) { + execInfo.markEndPlanning(); + for (String clusterAlias : execInfo.clusterAliases()) { + EsqlExecutionInfo.Cluster cluster = execInfo.getCluster(clusterAlias); + if (cluster.getStatus() == EsqlExecutionInfo.Cluster.Status.SKIPPED) { + execInfo.swapCluster( + clusterAlias, + (k, v) -> new EsqlExecutionInfo.Cluster.Builder(v).setTook(execInfo.planningTookTime()) + .setTotalShards(0) + .setSuccessfulShards(0) + .setSkippedShards(0) + .setFailedShards(0) + .build() + ); + } + } + } + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/ComputeListenerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/ComputeListenerTests.java index 8cfcb605a19d5..5fbd5dd28050f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/ComputeListenerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/ComputeListenerTests.java @@ -132,7 +132,6 @@ public void testEmpty() { transportService, newTask(), executionInfo, - System.nanoTime(), results ) ) { @@ -152,7 +151,6 @@ public void testCollectComputeResults() { transportService, newTask(), executionInfo, - System.nanoTime(), future ) ) { @@ -196,6 +194,7 @@ public void testAcquireComputeCCSListener() { String remoteAlias = "rc1"; EsqlExecutionInfo executionInfo = new EsqlExecutionInfo(true); executionInfo.swapCluster(remoteAlias, (k, v) -> new EsqlExecutionInfo.Cluster(remoteAlias, "logs*", false)); + executionInfo.markEndPlanning(); // set planning took time, so it can be used to calculate per-cluster took time try ( ComputeListener computeListener = ComputeListener.create( // 'whereRunning' for this test is the local cluster, waiting for a response from the remote cluster @@ -203,7 +202,6 @@ public void testAcquireComputeCCSListener() { transportService, newTask(), executionInfo, - System.nanoTime(), future ) ) { @@ -239,6 +237,60 @@ public void testAcquireComputeCCSListener() { Mockito.verifyNoInteractions(transportService.getTaskManager()); } + /** + * Tests the acquireCompute functionality running on the querying ("local") cluster, that is waiting upon + * a ComputeResponse from a remote cluster where we simulate connecting to a remote cluster running a version + * of ESQL that does not record and return CCS metadata. Ensure that the local cluster {@link EsqlExecutionInfo} + * is properly updated with took time and shard info is left unset. + */ + public void testAcquireComputeCCSListenerWithComputeResponseFromOlderCluster() { + PlainActionFuture future = new PlainActionFuture<>(); + List allProfiles = new ArrayList<>(); + String remoteAlias = "rc1"; + EsqlExecutionInfo executionInfo = new EsqlExecutionInfo(true); + executionInfo.swapCluster(remoteAlias, (k, v) -> new EsqlExecutionInfo.Cluster(remoteAlias, "logs*", false)); + executionInfo.markEndPlanning(); // set planning took time, so it can be used to calculate per-cluster took time + try ( + ComputeListener computeListener = ComputeListener.create( + // 'whereRunning' for this test is the local cluster, waiting for a response from the remote cluster + RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY, + transportService, + newTask(), + executionInfo, + future + ) + ) { + int tasks = randomIntBetween(1, 5); + for (int t = 0; t < tasks; t++) { + ComputeResponse resp = randomResponse(false); // older clusters will not return CCS metadata in response + allProfiles.addAll(resp.getProfiles()); + // Use remoteAlias here to indicate what remote cluster alias the listener is waiting to hear back from + ActionListener subListener = computeListener.acquireCompute(remoteAlias); + threadPool.schedule( + ActionRunnable.wrap(subListener, l -> l.onResponse(resp)), + TimeValue.timeValueNanos(between(0, 100)), + threadPool.generic() + ); + } + } + ComputeResponse response = future.actionGet(10, TimeUnit.SECONDS); + assertThat( + response.getProfiles().stream().collect(Collectors.toMap(p -> p, p -> 1, Integer::sum)), + equalTo(allProfiles.stream().collect(Collectors.toMap(p -> p, p -> 1, Integer::sum))) + ); + + assertTrue(executionInfo.isCrossClusterSearch()); + EsqlExecutionInfo.Cluster rc1Cluster = executionInfo.getCluster(remoteAlias); + assertThat(rc1Cluster.getTook().millis(), greaterThanOrEqualTo(0L)); + assertNull(rc1Cluster.getTotalShards()); + assertNull(rc1Cluster.getSuccessfulShards()); + assertNull(rc1Cluster.getSkippedShards()); + assertNull(rc1Cluster.getFailedShards()); + assertThat(rc1Cluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL)); + + Mockito.verifyNoInteractions(transportService.getTaskManager()); + } + /** * Run an acquireCompute cycle on the RemoteCluster. * AcquireCompute will fill in the took time on the EsqlExecutionInfo (the shard info is filled in before this, @@ -271,7 +323,6 @@ public void testAcquireComputeRunningOnRemoteClusterFillsInTookTime() { transportService, newTask(), executionInfo, - System.nanoTime(), future ) ) { @@ -331,7 +382,6 @@ public void testAcquireComputeRunningOnQueryingClusterFillsInTookTime() { transportService, newTask(), executionInfo, - System.nanoTime(), future ) ) { @@ -379,7 +429,6 @@ public void testCancelOnFailure() throws Exception { transportService, rootTask, execInfo, - System.nanoTime(), rootListener ) ) { @@ -443,7 +492,6 @@ public void onFailure(Exception e) { transportService, newTask(), executionInfo, - System.nanoTime(), ActionListener.runAfter(rootListener, latch::countDown) ) ) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/EsqlSessionTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/EsqlSessionTests.java index 7e93213fcee21..32b31cf78650b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/EsqlSessionTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/EsqlSessionTests.java @@ -21,6 +21,7 @@ import java.util.Set; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; public class EsqlSessionTests extends ESTestCase { @@ -243,6 +244,50 @@ public void testUpdateExecutionInfoWithClustersWithNoMatchingIndices() { } } + public void testUpdateExecutionInfoAtEndOfPlanning() { + String localClusterAlias = RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY; + String remote1Alias = "remote1"; + String remote2Alias = "remote2"; + EsqlExecutionInfo executionInfo = new EsqlExecutionInfo(true); + executionInfo.swapCluster(localClusterAlias, (k, v) -> new EsqlExecutionInfo.Cluster(localClusterAlias, "logs*", false)); + executionInfo.swapCluster( + remote1Alias, + (k, v) -> new EsqlExecutionInfo.Cluster(remote1Alias, "*", true, EsqlExecutionInfo.Cluster.Status.SKIPPED) + ); + executionInfo.swapCluster(remote2Alias, (k, v) -> new EsqlExecutionInfo.Cluster(remote2Alias, "mylogs1,mylogs2,logs*", false)); + + assertNull(executionInfo.planningTookTime()); + assertNull(executionInfo.overallTook()); + try { + Thread.sleep(1); + } catch (InterruptedException e) {} + + EsqlSession.updateExecutionInfoAtEndOfPlanning(executionInfo); + + assertThat(executionInfo.planningTookTime().millis(), greaterThanOrEqualTo(0L)); + assertNull(executionInfo.overallTook()); + + // only remote1 should be altered, since it is the only one marked as SKIPPED when passed into updateExecutionInfoAtEndOfPlanning + EsqlExecutionInfo.Cluster localCluster = executionInfo.getCluster(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY); + assertThat(localCluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.RUNNING)); + assertNull(localCluster.getTotalShards()); + assertNull(localCluster.getTook()); + + EsqlExecutionInfo.Cluster remote1Cluster = executionInfo.getCluster(remote1Alias); + assertThat(remote1Cluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SKIPPED)); + assertThat(remote1Cluster.getTotalShards(), equalTo(0)); + assertThat(remote1Cluster.getSuccessfulShards(), equalTo(0)); + assertThat(remote1Cluster.getSkippedShards(), equalTo(0)); + assertThat(remote1Cluster.getFailedShards(), equalTo(0)); + assertThat(remote1Cluster.getTook().millis(), greaterThanOrEqualTo(0L)); + assertThat(remote1Cluster.getTook().millis(), equalTo(executionInfo.planningTookTime().millis())); + + EsqlExecutionInfo.Cluster remote2Cluster = executionInfo.getCluster(remote2Alias); + assertThat(remote2Cluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.RUNNING)); + assertNull(remote2Cluster.getTotalShards()); + assertNull(remote2Cluster.getTook()); + } + private void assertClusterStatusAndHasNullCounts(EsqlExecutionInfo.Cluster cluster, EsqlExecutionInfo.Cluster.Status status) { assertThat(cluster.getStatus(), equalTo(status)); assertNull(cluster.getTook()); From be22d2a4ea7596cc418f7ad33eb0ee01e746a3a5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Slobodan=20Adamovi=C4=87?= Date: Fri, 18 Oct 2024 18:52:04 +0200 Subject: [PATCH 218/449] [Test] Fix SearchRequestCacheDisablingInterceptorTests (#114828) The https://github.com/elastic/elasticsearch/pull/113501 PR introduced a change where `:index-name` is no longer considered a valid remote index name. The valid remote index name has to have a non-empty remote cluster name, e.g. `my-remote-cluster:index-name`. This PR changes tests to avoid randomly generating empty remote cluster names. Resolves https://github.com/elastic/elasticsearch/issues/113659, https://github.com/elastic/elasticsearch/issues/113660 --- muted-tests.yml | 6 ------ .../SearchRequestCacheDisablingInterceptorTests.java | 4 ++-- 2 files changed, 2 insertions(+), 8 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 821a96217d05c..1c0b45bc1527c 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -206,12 +206,6 @@ tests: - class: org.elasticsearch.smoketest.MlWithSecurityIT method: test {yaml=ml/3rd_party_deployment/Test start and stop multiple deployments} issue: https://github.com/elastic/elasticsearch/issues/101458 -- class: org.elasticsearch.xpack.security.authz.interceptor.SearchRequestCacheDisablingInterceptorTests - method: testHasRemoteIndices - issue: https://github.com/elastic/elasticsearch/issues/113660 -- class: org.elasticsearch.xpack.security.authz.interceptor.SearchRequestCacheDisablingInterceptorTests - method: testRequestCacheWillBeDisabledWhenSearchRemoteIndices - issue: https://github.com/elastic/elasticsearch/issues/113659 - class: org.elasticsearch.xpack.esql.qa.mixed.MixedClusterEsqlSpecIT method: test {categorize.Categorize ASYNC} issue: https://github.com/elastic/elasticsearch/issues/113721 diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/interceptor/SearchRequestCacheDisablingInterceptorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/interceptor/SearchRequestCacheDisablingInterceptorTests.java index 2c4a03b7df501..b09527061f0d5 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/interceptor/SearchRequestCacheDisablingInterceptorTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/interceptor/SearchRequestCacheDisablingInterceptorTests.java @@ -91,7 +91,7 @@ public void testRequestCacheWillBeDisabledWhenSearchRemoteIndices() { 0, 3, String[]::new, - () -> randomAlphaOfLengthBetween(0, 5) + ":" + randomAlphaOfLengthBetween(3, 8) + () -> randomAlphaOfLengthBetween(1, 5) + ":" + randomAlphaOfLengthBetween(3, 8) ); final ArrayList allIndices = Arrays.stream(ArrayUtils.concat(localIndices, remoteIndices)) .collect(Collectors.toCollection(ArrayList::new)); @@ -121,7 +121,7 @@ public void testHasRemoteIndices() { 0, 3, String[]::new, - () -> randomAlphaOfLengthBetween(0, 5) + ":" + randomAlphaOfLengthBetween(3, 8) + () -> randomAlphaOfLengthBetween(1, 5) + ":" + randomAlphaOfLengthBetween(3, 8) ); final ArrayList allIndices = Arrays.stream(ArrayUtils.concat(localIndices, remoteIndices)) .collect(Collectors.toCollection(ArrayList::new)); From f80723e710049a6558d753c0d121e5495bf091ec Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sat, 19 Oct 2024 04:18:58 +1100 Subject: [PATCH 219/449] Mute org.elasticsearch.xpack.esql.ccq.MultiClusterSpecIT test {string.ValuesGrouped} #115126 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 1c0b45bc1527c..6f2f08e22a805 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -356,6 +356,9 @@ tests: - class: org.elasticsearch.index.mapper.annotatedtext.AnnotatedTextFieldMapperTests method: testBlockLoaderFromRowStrideReaderWithSyntheticSource issue: https://github.com/elastic/elasticsearch/issues/115076 +- class: org.elasticsearch.xpack.esql.ccq.MultiClusterSpecIT + method: test {string.ValuesGrouped} + issue: https://github.com/elastic/elasticsearch/issues/115126 # Examples: # From 8e091579cd01f852fc45d85d41ff6d2b8e2b763b Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sat, 19 Oct 2024 04:37:03 +1100 Subject: [PATCH 220/449] Mute org.elasticsearch.xpack.esql.action.CrossClustersQueryIT testCCSExecutionOnSearchesWithLimit0 #115129 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 6f2f08e22a805..1e46b95bff1ef 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -359,6 +359,9 @@ tests: - class: org.elasticsearch.xpack.esql.ccq.MultiClusterSpecIT method: test {string.ValuesGrouped} issue: https://github.com/elastic/elasticsearch/issues/115126 +- class: org.elasticsearch.xpack.esql.action.CrossClustersQueryIT + method: testCCSExecutionOnSearchesWithLimit0 + issue: https://github.com/elastic/elasticsearch/issues/115129 # Examples: # From a5118c2d9fa5e2442c408089387e75337b2a5d4a Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Fri, 18 Oct 2024 13:46:32 -0400 Subject: [PATCH 221/449] Add timeout and cancellation check to rescore phase (#115048) This adds cancellation checks to rescore phase. This cancellation checks for the parent task being cancelled and for timeout checks. The assumption is that rescore is always significantly more expensive than a regular query, so we check for timeout as frequently as the most frequent check in ExitableDirectoryReader. For LTR, we check on hit inference. Maybe we should also check for per feature extraction? For QueryRescorer, we check in the combine method. closes: https://github.com/elastic/elasticsearch/issues/114955 --- docs/changelog/115048.yaml | 5 + .../search/functionscore/QueryRescorerIT.java | 124 +++++++++++++++++ .../search/internal/ContextIndexSearcher.java | 2 +- .../search/rescore/QueryRescorer.java | 6 + .../search/rescore/RescoreContext.java | 11 ++ .../search/rescore/RescorePhase.java | 37 +++++ .../search/rescore/RescorePhaseTests.java | 127 ++++++++++++++++++ .../inference/ltr/LearningToRankRescorer.java | 9 ++ 8 files changed, 320 insertions(+), 1 deletion(-) create mode 100644 docs/changelog/115048.yaml create mode 100644 server/src/test/java/org/elasticsearch/search/rescore/RescorePhaseTests.java diff --git a/docs/changelog/115048.yaml b/docs/changelog/115048.yaml new file mode 100644 index 0000000000000..10844b83c6d01 --- /dev/null +++ b/docs/changelog/115048.yaml @@ -0,0 +1,5 @@ +pr: 115048 +summary: Add timeout and cancellation check to rescore phase +area: Ranking +type: enhancement +issues: [] diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java index 025d224923dc0..6043688b7670a 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java @@ -9,19 +9,30 @@ package org.elasticsearch.search.functionscore; +import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.Explanation; import org.apache.lucene.tests.util.English; +import org.elasticsearch.TransportVersion; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchType; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.search.function.CombineFunction; +import org.elasticsearch.common.lucene.search.function.LeafScoreFunction; +import org.elasticsearch.common.lucene.search.function.ScoreFunction; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings.Builder; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.query.Operator; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.index.query.SearchExecutionContext; +import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder; import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.collapse.CollapseBuilder; @@ -29,11 +40,14 @@ import org.elasticsearch.search.rescore.QueryRescorerBuilder; import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; +import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.Comparator; import java.util.List; @@ -979,9 +993,119 @@ public void testRescoreAfterCollapseRandom() throws Exception { }); } + public void testRescoreWithTimeout() throws Exception { + // no dummy docs since merges can change scores while we run queries. + int numDocs = indexRandomNumbers("whitespace", -1, false); + + String intToEnglish = English.intToEnglish(between(0, numDocs - 1)); + String query = intToEnglish.split(" ")[0]; + assertResponse( + prepareSearch().setSearchType(SearchType.QUERY_THEN_FETCH) + .setQuery(QueryBuilders.matchQuery("field1", query).operator(Operator.OR)) + .setSize(10) + .addRescorer(new QueryRescorerBuilder(functionScoreQuery(new TestTimedScoreFunctionBuilder())).windowSize(100)) + .setTimeout(TimeValue.timeValueMillis(10)), + r -> assertTrue(r.isTimedOut()) + ); + } + + @Override + protected Collection> nodePlugins() { + return List.of(TestTimedQueryPlugin.class); + } + private QueryBuilder fieldValueScoreQuery(String scoreField) { return functionScoreQuery(termQuery("shouldFilter", false), ScoreFunctionBuilders.fieldValueFactorFunction(scoreField)).boostMode( CombineFunction.REPLACE ); } + + public static class TestTimedQueryPlugin extends Plugin implements SearchPlugin { + @Override + public List> getScoreFunctions() { + return List.of( + new ScoreFunctionSpec<>( + new ParseField("timed"), + TestTimedScoreFunctionBuilder::new, + p -> new TestTimedScoreFunctionBuilder() + ) + ); + } + } + + static class TestTimedScoreFunctionBuilder extends ScoreFunctionBuilder { + private final long time = 500; + + TestTimedScoreFunctionBuilder() {} + + TestTimedScoreFunctionBuilder(StreamInput in) throws IOException { + super(in); + } + + @Override + protected void doWriteTo(StreamOutput out) {} + + @Override + public String getName() { + return "timed"; + } + + @Override + protected void doXContent(XContentBuilder builder, Params params) {} + + @Override + protected boolean doEquals(TestTimedScoreFunctionBuilder functionBuilder) { + return false; + } + + @Override + protected int doHashCode() { + return 0; + } + + @Override + protected ScoreFunction doToFunction(SearchExecutionContext context) throws IOException { + return new ScoreFunction(REPLACE) { + @Override + public LeafScoreFunction getLeafScoreFunction(LeafReaderContext ctx) throws IOException { + return new LeafScoreFunction() { + @Override + public double score(int docId, float subQueryScore) { + try { + Thread.sleep(time); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + } + return time; + } + + @Override + public Explanation explainScore(int docId, Explanation subQueryScore) { + return null; + } + }; + } + + @Override + public boolean needsScores() { + return true; + } + + @Override + protected boolean doEquals(ScoreFunction other) { + return false; + } + + @Override + protected int doHashCode() { + return 0; + } + }; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersion.current(); + } + } } diff --git a/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java b/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java index 18de4b81cbf8c..da5d2d093fbd8 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java +++ b/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java @@ -407,7 +407,7 @@ public void throwTimeExceededException() { } } - private static class TimeExceededException extends RuntimeException { + public static class TimeExceededException extends RuntimeException { // This exception should never be re-thrown, but we fill in the stacktrace to be able to trace where it does not get properly caught } diff --git a/server/src/main/java/org/elasticsearch/search/rescore/QueryRescorer.java b/server/src/main/java/org/elasticsearch/search/rescore/QueryRescorer.java index 5cd947a1cc73b..cb9169dbeb5e5 100644 --- a/server/src/main/java/org/elasticsearch/search/rescore/QueryRescorer.java +++ b/server/src/main/java/org/elasticsearch/search/rescore/QueryRescorer.java @@ -26,6 +26,7 @@ public final class QueryRescorer implements Rescorer { + private static final int MAX_CALLS_BEFORE_QUERY_TIMEOUT_CHECK = 10; public static final Rescorer INSTANCE = new QueryRescorer(); @Override @@ -39,9 +40,14 @@ public TopDocs rescore(TopDocs topDocs, IndexSearcher searcher, RescoreContext r final QueryRescoreContext rescore = (QueryRescoreContext) rescoreContext; org.apache.lucene.search.Rescorer rescorer = new org.apache.lucene.search.QueryRescorer(rescore.parsedQuery().query()) { + int count = 0; @Override protected float combine(float firstPassScore, boolean secondPassMatches, float secondPassScore) { + if (count % MAX_CALLS_BEFORE_QUERY_TIMEOUT_CHECK == 0) { + rescore.checkCancellation(); + } + count++; if (secondPassMatches) { return rescore.scoreMode.combine( firstPassScore * rescore.queryWeight(), diff --git a/server/src/main/java/org/elasticsearch/search/rescore/RescoreContext.java b/server/src/main/java/org/elasticsearch/search/rescore/RescoreContext.java index 297b197a6d0c1..0ae6c326ddcdc 100644 --- a/server/src/main/java/org/elasticsearch/search/rescore/RescoreContext.java +++ b/server/src/main/java/org/elasticsearch/search/rescore/RescoreContext.java @@ -24,6 +24,7 @@ public class RescoreContext { private final int windowSize; private final Rescorer rescorer; private Set rescoredDocs; // doc Ids for which rescoring was applied + private Runnable isCancelled; /** * Build the context. @@ -34,6 +35,16 @@ public RescoreContext(int windowSize, Rescorer rescorer) { this.rescorer = rescorer; } + public void setCancellationChecker(Runnable isCancelled) { + this.isCancelled = isCancelled; + } + + public void checkCancellation() { + if (isCancelled != null) { + isCancelled.run(); + } + } + /** * The rescorer to actually apply. */ diff --git a/server/src/main/java/org/elasticsearch/search/rescore/RescorePhase.java b/server/src/main/java/org/elasticsearch/search/rescore/RescorePhase.java index be961b8ef942b..1227db5d8e1db 100644 --- a/server/src/main/java/org/elasticsearch/search/rescore/RescorePhase.java +++ b/server/src/main/java/org/elasticsearch/search/rescore/RescorePhase.java @@ -14,12 +14,18 @@ import org.apache.lucene.search.SortField; import org.apache.lucene.search.TopDocs; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.search.SearchShardTask; import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore; import org.elasticsearch.common.util.Maps; import org.elasticsearch.lucene.grouping.TopFieldGroups; +import org.elasticsearch.search.internal.ContextIndexSearcher; import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.search.query.QueryPhase; +import org.elasticsearch.search.query.SearchTimeoutException; import java.io.IOException; +import java.util.ArrayList; +import java.util.List; import java.util.Map; /** @@ -44,11 +50,14 @@ public static void execute(SearchContext context) { topGroups = topFieldGroups; } try { + Runnable cancellationCheck = getCancellationChecks(context); for (RescoreContext ctx : context.rescore()) { + ctx.setCancellationChecker(cancellationCheck); topDocs = ctx.rescorer().rescore(topDocs, context.searcher(), ctx); // It is the responsibility of the rescorer to sort the resulted top docs, // here we only assert that this condition is met. assert context.sort() == null && topDocsSortedByScore(topDocs) : "topdocs should be sorted after rescore"; + ctx.setCancellationChecker(null); } if (topGroups != null) { assert context.collapse() != null; @@ -63,6 +72,11 @@ public static void execute(SearchContext context) { .topDocs(new TopDocsAndMaxScore(topDocs, topDocs.scoreDocs[0].score), context.queryResult().sortValueFormats()); } catch (IOException e) { throw new ElasticsearchException("Rescore Phase Failed", e); + } catch (ContextIndexSearcher.TimeExceededException e) { + if (context.request().allowPartialSearchResults() == false) { + throw new SearchTimeoutException(context.shardTarget(), "Time exceeded"); + } + context.queryResult().searchTimedOut(true); } } @@ -106,4 +120,27 @@ private static boolean topDocsSortedByScore(TopDocs topDocs) { } return true; } + + static Runnable getCancellationChecks(SearchContext context) { + List cancellationChecks = new ArrayList<>(); + if (context.lowLevelCancellation()) { + cancellationChecks.add(() -> { + final SearchShardTask task = context.getTask(); + if (task != null) { + task.ensureNotCancelled(); + } + }); + } + + final Runnable timeoutRunnable = QueryPhase.getTimeoutCheck(context); + if (timeoutRunnable != null) { + cancellationChecks.add(timeoutRunnable); + } + + return () -> { + for (var check : cancellationChecks) { + check.run(); + } + }; + } } diff --git a/server/src/test/java/org/elasticsearch/search/rescore/RescorePhaseTests.java b/server/src/test/java/org/elasticsearch/search/rescore/RescorePhaseTests.java new file mode 100644 index 0000000000000..5a1c4b789b460 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/search/rescore/RescorePhaseTests.java @@ -0,0 +1,127 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.search.rescore; + +import org.apache.lucene.document.Document; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.NoMergePolicy; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.QueryCachingPolicy; +import org.apache.lucene.search.ScoreDoc; +import org.apache.lucene.search.TopDocs; +import org.apache.lucene.search.TotalHits; +import org.apache.lucene.store.Directory; +import org.apache.lucene.tests.index.RandomIndexWriter; +import org.elasticsearch.action.search.SearchShardTask; +import org.elasticsearch.index.query.ParsedQuery; +import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.index.shard.IndexShardTestCase; +import org.elasticsearch.search.fetch.subphase.FetchDocValuesContext; +import org.elasticsearch.search.fetch.subphase.FetchFieldsContext; +import org.elasticsearch.search.internal.ContextIndexSearcher; +import org.elasticsearch.search.internal.FilteredSearchContext; +import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.tasks.TaskCancelHelper; +import org.elasticsearch.tasks.TaskCancelledException; +import org.elasticsearch.test.TestSearchContext; + +import java.io.IOException; +import java.util.Collections; + +public class RescorePhaseTests extends IndexShardTestCase { + + public void testRescorePhaseCancellation() throws IOException { + IndexWriterConfig iwc = newIndexWriterConfig().setMergePolicy(NoMergePolicy.INSTANCE); + try (Directory dir = newDirectory()) { + try (RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc)) { + final int numDocs = scaledRandomIntBetween(100, 200); + for (int i = 0; i < numDocs; ++i) { + Document doc = new Document(); + w.addDocument(doc); + } + } + try (IndexReader reader = DirectoryReader.open(dir)) { + ContextIndexSearcher s = new ContextIndexSearcher( + reader, + IndexSearcher.getDefaultSimilarity(), + IndexSearcher.getDefaultQueryCache(), + new QueryCachingPolicy() { + @Override + public void onUse(Query query) {} + + @Override + public boolean shouldCache(Query query) { + return false; + } + }, + true + ); + IndexShard shard = newShard(true); + try (TestSearchContext context = new TestSearchContext(null, shard, s)) { + context.parsedQuery(new ParsedQuery(new MatchAllDocsQuery())); + SearchShardTask task = new SearchShardTask(123L, "", "", "", null, Collections.emptyMap()); + context.setTask(task); + SearchContext wrapped = new FilteredSearchContext(context) { + @Override + public boolean lowLevelCancellation() { + return true; + } + + @Override + public FetchDocValuesContext docValuesContext() { + return context.docValuesContext(); + } + + @Override + public SearchContext docValuesContext(FetchDocValuesContext docValuesContext) { + return context.docValuesContext(docValuesContext); + } + + @Override + public FetchFieldsContext fetchFieldsContext() { + return context.fetchFieldsContext(); + } + + @Override + public SearchContext fetchFieldsContext(FetchFieldsContext fetchFieldsContext) { + return context.fetchFieldsContext(fetchFieldsContext); + } + }; + try (wrapped) { + Runnable cancellationChecks = RescorePhase.getCancellationChecks(wrapped); + assertNotNull(cancellationChecks); + TaskCancelHelper.cancel(task, "test cancellation"); + assertTrue(wrapped.isCancelled()); + expectThrows(TaskCancelledException.class, cancellationChecks::run); + QueryRescorer.QueryRescoreContext rescoreContext = new QueryRescorer.QueryRescoreContext(10); + rescoreContext.setQuery(new ParsedQuery(new MatchAllDocsQuery())); + rescoreContext.setCancellationChecker(cancellationChecks); + expectThrows( + TaskCancelledException.class, + () -> new QueryRescorer().rescore( + new TopDocs( + new TotalHits(10, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO), + new ScoreDoc[] { new ScoreDoc(0, 1.0f) } + ), + context.searcher(), + rescoreContext + ) + ); + } + } + closeShards(shard); + } + } + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorer.java index 70d0b980bb3bf..54a9fe908fa87 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorer.java @@ -35,6 +35,7 @@ public class LearningToRankRescorer implements Rescorer { + private static final int MAX_CALLS_BEFORE_QUERY_TIMEOUT_CHECK = 10; public static final LearningToRankRescorer INSTANCE = new LearningToRankRescorer(); private static final Logger logger = LogManager.getLogger(LearningToRankRescorer.class); @@ -78,7 +79,12 @@ public TopDocs rescore(TopDocs topDocs, IndexSearcher searcher, RescoreContext r List featureExtractors = ltrRescoreContext.buildFeatureExtractors(searcher); List> docFeatures = new ArrayList<>(topDocIDs.size()); int featureSize = featureExtractors.stream().mapToInt(fe -> fe.featureNames().size()).sum(); + int count = 0; while (hitUpto < hitsToRescore.length) { + if (count % MAX_CALLS_BEFORE_QUERY_TIMEOUT_CHECK == 0) { + rescoreContext.checkCancellation(); + } + count++; final ScoreDoc hit = hitsToRescore[hitUpto]; final int docID = hit.doc; while (docID >= endDoc) { @@ -106,6 +112,9 @@ public TopDocs rescore(TopDocs topDocs, IndexSearcher searcher, RescoreContext r hitUpto++; } for (int i = 0; i < hitsToRescore.length; i++) { + if (i % MAX_CALLS_BEFORE_QUERY_TIMEOUT_CHECK == 0) { + rescoreContext.checkCancellation(); + } Map features = docFeatures.get(i); try { InferenceResults results = definition.inferLtr(features, ltrRescoreContext.learningToRankConfig); From d9c2eba47158f3832a675c82693c3c053f209c44 Mon Sep 17 00:00:00 2001 From: Keith Massey Date: Fri, 18 Oct 2024 12:48:09 -0500 Subject: [PATCH 222/449] Avoiding possibility of duplicate index names in IndexLifecycleServiceTests.testExceptionStillProcessesOtherIndices (#115118) --- .../elasticsearch/xpack/ilm/IndexLifecycleServiceTests.java | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleServiceTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleServiceTests.java index 209839c9d24df..eceb81542377a 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleServiceTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleServiceTests.java @@ -390,7 +390,10 @@ public void doTestExceptionStillProcessesOtherIndices(boolean useOnMaster) { MockAction mockAction = new MockAction(Collections.singletonList(i2mockStep)); Phase i2phase = new Phase("phase", TimeValue.ZERO, Collections.singletonMap("action", mockAction)); LifecyclePolicy i2policy = newTestLifecyclePolicy(policy1, Collections.singletonMap(i2phase.getName(), i1phase)); - Index index2 = new Index(randomAlphaOfLengthBetween(1, 20), randomAlphaOfLengthBetween(1, 20)); + Index index2 = new Index( + randomValueOtherThan(index1.getName(), () -> randomAlphaOfLengthBetween(1, 20)), + randomAlphaOfLengthBetween(1, 20) + ); LifecycleExecutionState.Builder i2lifecycleState = LifecycleExecutionState.builder(); i2lifecycleState.setPhase(i2currentStepKey.phase()); i2lifecycleState.setAction(i2currentStepKey.action()); From 8a613e7f171afee63af2d5efc8ddbd5ab9f5e903 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sat, 19 Oct 2024 05:10:33 +1100 Subject: [PATCH 223/449] Mute org.elasticsearch.xpack.esql.ccq.MultiClusterSpecIT org.elasticsearch.xpack.esql.ccq.MultiClusterSpecIT #115135 --- muted-tests.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 1e46b95bff1ef..4b69eacba7b1a 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -362,6 +362,8 @@ tests: - class: org.elasticsearch.xpack.esql.action.CrossClustersQueryIT method: testCCSExecutionOnSearchesWithLimit0 issue: https://github.com/elastic/elasticsearch/issues/115129 +- class: org.elasticsearch.xpack.esql.ccq.MultiClusterSpecIT + issue: https://github.com/elastic/elasticsearch/issues/115135 # Examples: # From 16b86a61b180553889e7d3a2747a0e76e7c244b9 Mon Sep 17 00:00:00 2001 From: Oleksandr Kolomiiets Date: Fri, 18 Oct 2024 13:37:31 -0700 Subject: [PATCH 224/449] Remove temporary mutes of compatibility tests (#115140) --- rest-api-spec/build.gradle | 4 ---- x-pack/plugin/build.gradle | 1 - 2 files changed, 5 deletions(-) diff --git a/rest-api-spec/build.gradle b/rest-api-spec/build.gradle index 27ae0c7f99db1..a742e83255bbb 100644 --- a/rest-api-spec/build.gradle +++ b/rest-api-spec/build.gradle @@ -57,8 +57,4 @@ tasks.named("precommit").configure { tasks.named("yamlRestCompatTestTransform").configure({task -> task.skipTest("indices.sort/10_basic/Index Sort", "warning does not exist for compatibility") task.skipTest("search/330_fetch_fields/Test search rewrite", "warning does not exist for compatibility") - task.skipTest("tsdb/20_mapping/disabled source", "temporary until backported") - task.skipTest("logsdb/20_source_mapping/disabled _source is not supported", "temporary until backported") - task.skipTest("tsdb/20_mapping/regular source", "temporary until backported") - task.skipTest("logsdb/20_source_mapping/stored _source mode is not supported", "temporary until backported") }) diff --git a/x-pack/plugin/build.gradle b/x-pack/plugin/build.gradle index 3e5aaea43a9b9..8297ef5161fb0 100644 --- a/x-pack/plugin/build.gradle +++ b/x-pack/plugin/build.gradle @@ -82,7 +82,6 @@ tasks.named("precommit").configure { tasks.named("yamlRestCompatTestTransform").configure({ task -> task.skipTest("security/10_forbidden/Test bulk response with invalid credentials", "warning does not exist for compatibility") - task.skipTest("wildcard/30_ignore_above_synthetic_source/wildcard field type ignore_above", "Temporary until backported") task.skipTest("inference/inference_crud/Test get all", "Assertions on number of inference models break due to default configs") task.skipTest("esql/60_usage/Basic ESQL usage output (telemetry)", "The telemetry output changed. We dropped a column. That's safe.") }) From 0c287384e7b1dbf368e222fc3dc10c9ca7c01a0e Mon Sep 17 00:00:00 2001 From: "elastic-renovate-prod[bot]" <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> Date: Fri, 18 Oct 2024 22:40:27 +0200 Subject: [PATCH 225/449] Update docker.elastic.co/wolfi/chainguard-base:latest Docker digest to bf163e1 (#114985) Co-authored-by: elastic-renovate-prod[bot] <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> --- .../main/java/org/elasticsearch/gradle/internal/DockerBase.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java index d80256ee36a17..fb52daf7e164f 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java @@ -27,7 +27,7 @@ public enum DockerBase { // Chainguard based wolfi image with latest jdk // This is usually updated via renovatebot // spotless:off - WOLFI("docker.elastic.co/wolfi/chainguard-base:latest@sha256:277ebb42c458ef39cb4028f9204f0b3d51d8cd628ea737a65696a1143c3e42fe", + WOLFI("docker.elastic.co/wolfi/chainguard-base:latest@sha256:bf163e1977002301f7b9fd28fe6837a8cb2dd5c83e4cd45fb67fb28d15d5d40f", "-wolfi", "apk" ), From 16bde5189176b6d3fb218e2cd027f207d7c436f0 Mon Sep 17 00:00:00 2001 From: Oleksandr Kolomiiets Date: Fri, 18 Oct 2024 13:48:12 -0700 Subject: [PATCH 226/449] Remove IndexMode#isSyntheticSourceEnabled (#114963) --- .../AnnotatedTextFieldMapper.java | 24 +-- .../test/logsdb/20_source_mapping.yml | 93 +++++++++ .../rest-api-spec/test/tsdb/20_mapping.yml | 30 +++ .../org/elasticsearch/index/IndexMode.java | 16 +- .../index/mapper/BinaryFieldMapper.java | 18 +- .../index/mapper/DynamicFieldsBuilder.java | 16 +- .../index/mapper/MappingParser.java | 5 +- .../index/mapper/SourceFieldMapper.java | 190 +++++++----------- .../index/mapper/TextFieldMapper.java | 25 +-- .../index/query/QueryRewriteContext.java | 3 +- .../fielddata/AbstractFieldDataTestCase.java | 10 +- .../index/fielddata/FilterFieldDataTests.java | 9 +- .../fielddata/IndexFieldDataServiceTests.java | 7 +- .../highlight/HighlightBuilderTests.java | 3 +- .../rescore/QueryRescorerBuilderTests.java | 5 +- 15 files changed, 259 insertions(+), 195 deletions(-) diff --git a/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapper.java b/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapper.java index 709d6892788c4..c12849d545b33 100644 --- a/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapper.java +++ b/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapper.java @@ -31,6 +31,7 @@ import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.MapperBuilderContext; +import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.mapper.StringStoredFieldFieldLoader; import org.elasticsearch.index.mapper.TextFieldMapper; import org.elasticsearch.index.mapper.TextParams; @@ -91,15 +92,10 @@ public static class Builder extends FieldMapper.Builder { private final IndexVersion indexCreatedVersion; private final TextParams.Analyzers analyzers; - private final boolean isSyntheticSourceEnabledViaIndexMode; + private final boolean isSyntheticSourceEnabled; private final Parameter store; - public Builder( - String name, - IndexVersion indexCreatedVersion, - IndexAnalyzers indexAnalyzers, - boolean isSyntheticSourceEnabledViaIndexMode - ) { + public Builder(String name, IndexVersion indexCreatedVersion, IndexAnalyzers indexAnalyzers, boolean isSyntheticSourceEnabled) { super(name); this.indexCreatedVersion = indexCreatedVersion; this.analyzers = new TextParams.Analyzers( @@ -108,10 +104,10 @@ public Builder( m -> builder(m).analyzers.positionIncrementGap.getValue(), indexCreatedVersion ); - this.isSyntheticSourceEnabledViaIndexMode = isSyntheticSourceEnabledViaIndexMode; + this.isSyntheticSourceEnabled = isSyntheticSourceEnabled; this.store = Parameter.storeParam( m -> builder(m).store.getValue(), - () -> isSyntheticSourceEnabledViaIndexMode && multiFieldsBuilder.hasSyntheticSourceCompatibleKeywordField() == false + () -> isSyntheticSourceEnabled && multiFieldsBuilder.hasSyntheticSourceCompatibleKeywordField() == false ); } @@ -172,7 +168,7 @@ public AnnotatedTextFieldMapper build(MapperBuilderContext context) { } public static TypeParser PARSER = new TypeParser( - (n, c) -> new Builder(n, c.indexVersionCreated(), c.getIndexAnalyzers(), c.getIndexSettings().getMode().isSyntheticSourceEnabled()) + (n, c) -> new Builder(n, c.indexVersionCreated(), c.getIndexAnalyzers(), SourceFieldMapper.isSynthetic(c.getIndexSettings())) ); /** @@ -560,12 +556,8 @@ protected String contentType() { @Override public FieldMapper.Builder getMergeBuilder() { - return new Builder( - leafName(), - builder.indexCreatedVersion, - builder.analyzers.indexAnalyzers, - builder.isSyntheticSourceEnabledViaIndexMode - ).init(this); + return new Builder(leafName(), builder.indexCreatedVersion, builder.analyzers.indexAnalyzers, builder.isSyntheticSourceEnabled) + .init(this); } @Override diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/20_source_mapping.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/20_source_mapping.yml index 03c8def9f558c..b4709a4e4d176 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/20_source_mapping.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/20_source_mapping.yml @@ -1,3 +1,22 @@ +--- +synthetic _source is default: + - requires: + cluster_features: ["mapper.source.remove_synthetic_source_only_validation"] + reason: requires new validation logic + + - do: + indices.create: + index: test-default-source + body: + settings: + index: + mode: logsdb + - do: + indices.get: + index: test-default-source + + - match: { test-default-source.mappings._source.mode: "synthetic" } + --- stored _source mode is supported: - requires: @@ -57,3 +76,77 @@ disabled _source is not supported: - match: { error.type: "mapper_parsing_exception" } - match: { error.root_cause.0.type: "mapper_parsing_exception" } - match: { error.reason: "Failed to parse mapping: _source can not be disabled in index using [logsdb] index mode" } + +--- +include/exclude is not supported with synthetic _source: + - requires: + cluster_features: ["mapper.source.remove_synthetic_source_only_validation"] + reason: requires new validation logic + + - do: + catch: '/filtering the stored _source is incompatible with synthetic source/' + indices.create: + index: test-includes + body: + settings: + index: + mode: logsdb + mappings: + _source: + includes: [a] + + - do: + catch: '/filtering the stored _source is incompatible with synthetic source/' + indices.create: + index: test-excludes + body: + settings: + index: + mode: logsdb + mappings: + _source: + excludes: [b] + +--- +include/exclude is supported with stored _source: + - requires: + cluster_features: ["mapper.source.remove_synthetic_source_only_validation"] + reason: requires new validation logic + + - do: + indices.create: + index: test-includes + body: + settings: + index: + mode: logsdb + mappings: + _source: + mode: stored + includes: [a] + + - do: + indices.get: + index: test-includes + + - match: { test-includes.mappings._source.mode: "stored" } + - match: { test-includes.mappings._source.includes: ["a"] } + + - do: + indices.create: + index: test-excludes + body: + settings: + index: + mode: logsdb + mappings: + _source: + mode: stored + excludes: [b] + + - do: + indices.get: + index: test-excludes + + - match: { test-excludes.mappings._source.mode: "stored" } + - match: { test-excludes.mappings._source.excludes: ["b"] } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml index 6a59c7bf75cbf..c5669cd6414b1 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml @@ -528,6 +528,36 @@ disabled source is not supported: - match: { error.root_cause.0.type: "mapper_parsing_exception" } - match: { error.reason: "Failed to parse mapping: _source can not be disabled in index using [time_series] index mode" } + - do: + catch: bad_request + indices.create: + index: tsdb_index + body: + settings: + index: + mode: time_series + routing_path: [k8s.pod.uid] + time_series: + start_time: 2021-04-28T00:00:00Z + end_time: 2021-04-29T00:00:00Z + mappings: + _source: + enabled: false + properties: + "@timestamp": + type: date + k8s: + properties: + pod: + properties: + uid: + type: keyword + time_series_dimension: true + + - match: { error.type: "mapper_parsing_exception" } + - match: { error.root_cause.0.type: "mapper_parsing_exception" } + - match: { error.reason: "Failed to parse mapping: _source can not be disabled in index using [time_series] index mode" } + --- source include/exclude: - requires: diff --git a/server/src/main/java/org/elasticsearch/index/IndexMode.java b/server/src/main/java/org/elasticsearch/index/IndexMode.java index 5908bc22e21e2..75ec67f26dd3a 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexMode.java +++ b/server/src/main/java/org/elasticsearch/index/IndexMode.java @@ -120,8 +120,8 @@ public boolean shouldValidateTimestamp() { public void validateSourceFieldMapper(SourceFieldMapper sourceFieldMapper) {} @Override - public boolean isSyntheticSourceEnabled() { - return false; + public SourceFieldMapper.Mode defaultSourceMode() { + return SourceFieldMapper.Mode.STORED; } }, TIME_SERIES("time_series") { @@ -223,8 +223,8 @@ public void validateSourceFieldMapper(SourceFieldMapper sourceFieldMapper) { } @Override - public boolean isSyntheticSourceEnabled() { - return true; + public SourceFieldMapper.Mode defaultSourceMode() { + return SourceFieldMapper.Mode.SYNTHETIC; } }, LOGSDB("logsdb") { @@ -300,8 +300,8 @@ public void validateSourceFieldMapper(SourceFieldMapper sourceFieldMapper) { } @Override - public boolean isSyntheticSourceEnabled() { - return true; + public SourceFieldMapper.Mode defaultSourceMode() { + return SourceFieldMapper.Mode.SYNTHETIC; } @Override @@ -460,9 +460,9 @@ public String getName() { public abstract void validateSourceFieldMapper(SourceFieldMapper sourceFieldMapper); /** - * @return whether synthetic source is the only allowed source mode. + * @return default source mode for this mode */ - public abstract boolean isSyntheticSourceEnabled(); + public abstract SourceFieldMapper.Mode defaultSourceMode(); public String getDefaultCodec() { return CodecService.DEFAULT_CODEC; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BinaryFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/BinaryFieldMapper.java index 06bf66a4a09c6..87c123d71aae5 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/BinaryFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/BinaryFieldMapper.java @@ -49,13 +49,13 @@ public static class Builder extends FieldMapper.Builder { private final Parameter stored = Parameter.storeParam(m -> toType(m).stored, false); private final Parameter> meta = Parameter.metaParam(); - private final boolean isSyntheticSourceEnabledViaIndexMode; + private final boolean isSyntheticSourceEnabled; private final Parameter hasDocValues; - public Builder(String name, boolean isSyntheticSourceEnabledViaIndexMode) { + public Builder(String name, boolean isSyntheticSourceEnabled) { super(name); - this.isSyntheticSourceEnabledViaIndexMode = isSyntheticSourceEnabledViaIndexMode; - this.hasDocValues = Parameter.docValuesParam(m -> toType(m).hasDocValues, isSyntheticSourceEnabledViaIndexMode); + this.isSyntheticSourceEnabled = isSyntheticSourceEnabled; + this.hasDocValues = Parameter.docValuesParam(m -> toType(m).hasDocValues, isSyntheticSourceEnabled); } @Override @@ -79,9 +79,7 @@ public BinaryFieldMapper build(MapperBuilderContext context) { } } - public static final TypeParser PARSER = new TypeParser( - (n, c) -> new Builder(n, c.getIndexSettings().getMode().isSyntheticSourceEnabled()) - ); + public static final TypeParser PARSER = new TypeParser((n, c) -> new Builder(n, SourceFieldMapper.isSynthetic(c.getIndexSettings()))); public static final class BinaryFieldType extends MappedFieldType { private BinaryFieldType(String name, boolean isStored, boolean hasDocValues, Map meta) { @@ -140,13 +138,13 @@ public Query termQuery(Object value, SearchExecutionContext context) { private final boolean stored; private final boolean hasDocValues; - private final boolean isSyntheticSourceEnabledViaIndexMode; + private final boolean isSyntheticSourceEnabled; protected BinaryFieldMapper(String simpleName, MappedFieldType mappedFieldType, BuilderParams builderParams, Builder builder) { super(simpleName, mappedFieldType, builderParams); this.stored = builder.stored.getValue(); this.hasDocValues = builder.hasDocValues.getValue(); - this.isSyntheticSourceEnabledViaIndexMode = builder.isSyntheticSourceEnabledViaIndexMode; + this.isSyntheticSourceEnabled = builder.isSyntheticSourceEnabled; } @Override @@ -186,7 +184,7 @@ public void indexValue(DocumentParserContext context, byte[] value) { @Override public FieldMapper.Builder getMergeBuilder() { - return new BinaryFieldMapper.Builder(leafName(), isSyntheticSourceEnabledViaIndexMode).init(this); + return new BinaryFieldMapper.Builder(leafName(), isSyntheticSourceEnabled).init(this); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DynamicFieldsBuilder.java b/server/src/main/java/org/elasticsearch/index/mapper/DynamicFieldsBuilder.java index 4b6419b85e155..0793dd748c67e 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DynamicFieldsBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DynamicFieldsBuilder.java @@ -334,13 +334,10 @@ public boolean newDynamicStringField(DocumentParserContext context, String name) ); } else { return createDynamicField( - new TextFieldMapper.Builder( - name, - context.indexAnalyzers(), - context.indexSettings().getMode().isSyntheticSourceEnabled() - ).addMultiField( - new KeywordFieldMapper.Builder("keyword", context.indexSettings().getIndexVersionCreated()).ignoreAbove(256) - ), + new TextFieldMapper.Builder(name, context.indexAnalyzers(), SourceFieldMapper.isSynthetic(context.indexSettings())) + .addMultiField( + new KeywordFieldMapper.Builder("keyword", context.indexSettings().getIndexVersionCreated()).ignoreAbove(256) + ), context ); } @@ -412,10 +409,7 @@ public boolean newDynamicDateField(DocumentParserContext context, String name, D } boolean newDynamicBinaryField(DocumentParserContext context, String name) throws IOException { - return createDynamicField( - new BinaryFieldMapper.Builder(name, context.indexSettings().getMode().isSyntheticSourceEnabled()), - context - ); + return createDynamicField(new BinaryFieldMapper.Builder(name, SourceFieldMapper.isSynthetic(context.indexSettings())), context); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MappingParser.java b/server/src/main/java/org/elasticsearch/index/mapper/MappingParser.java index 9afa77161bef1..f30a0089e4eff 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MappingParser.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MappingParser.java @@ -124,7 +124,10 @@ Mapping parse(@Nullable String type, MergeReason reason, Map map Map, MetadataFieldMapper> metadataMappers = metadataMappersSupplier.get(); Map meta = null; - boolean isSourceSynthetic = mappingParserContext.getIndexSettings().getMode().isSyntheticSourceEnabled(); + // TODO this should be the final value once `_source.mode` mapping parameter is not used anymore + // and it should not be reassigned below. + // For now it is still possible to set `_source.mode` so this is correct. + boolean isSourceSynthetic = SourceFieldMapper.isSynthetic(mappingParserContext.getIndexSettings()); boolean isDataStream = false; Iterator> iterator = mappingSource.entrySet().iterator(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java index dd09dc6ea0c5c..372e0bbdfecf4 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java @@ -64,10 +64,7 @@ public class SourceFieldMapper extends MetadataFieldMapper { public static final Setting INDEX_MAPPER_SOURCE_MODE_SETTING = Setting.enumSetting(SourceFieldMapper.Mode.class, settings -> { final IndexMode indexMode = IndexSettings.MODE.get(settings); - return switch (indexMode) { - case IndexMode.LOGSDB, IndexMode.TIME_SERIES -> Mode.SYNTHETIC.name(); - default -> Mode.STORED.name(); - }; + return indexMode.defaultSourceMode().name(); }, "index.mapping.source.mode", value -> {}, Setting.Property.Final, Setting.Property.IndexScope); /** The source mode */ @@ -81,68 +78,28 @@ public enum Mode { null, Explicit.IMPLICIT_TRUE, Strings.EMPTY_ARRAY, - Strings.EMPTY_ARRAY, - null - ); - - private static final SourceFieldMapper DEFAULT_DISABLED = new SourceFieldMapper( - Mode.DISABLED, - Explicit.IMPLICIT_TRUE, - Strings.EMPTY_ARRAY, - Strings.EMPTY_ARRAY, - null - ); - - private static final SourceFieldMapper DEFAULT_SYNTHETIC = new SourceFieldMapper( - Mode.SYNTHETIC, - Explicit.IMPLICIT_TRUE, - Strings.EMPTY_ARRAY, - Strings.EMPTY_ARRAY, - null - ); - - private static final SourceFieldMapper TSDB_DEFAULT = new SourceFieldMapper( - Mode.SYNTHETIC, - Explicit.IMPLICIT_TRUE, - Strings.EMPTY_ARRAY, - Strings.EMPTY_ARRAY, - IndexMode.TIME_SERIES + Strings.EMPTY_ARRAY ); - private static final SourceFieldMapper TSDB_DEFAULT_STORED = new SourceFieldMapper( + private static final SourceFieldMapper STORED = new SourceFieldMapper( Mode.STORED, Explicit.IMPLICIT_TRUE, Strings.EMPTY_ARRAY, - Strings.EMPTY_ARRAY, - IndexMode.TIME_SERIES + Strings.EMPTY_ARRAY ); - private static final SourceFieldMapper LOGSDB_DEFAULT = new SourceFieldMapper( + private static final SourceFieldMapper SYNTHETIC = new SourceFieldMapper( Mode.SYNTHETIC, Explicit.IMPLICIT_TRUE, Strings.EMPTY_ARRAY, - Strings.EMPTY_ARRAY, - IndexMode.LOGSDB + Strings.EMPTY_ARRAY ); - private static final SourceFieldMapper LOGSDB_DEFAULT_STORED = new SourceFieldMapper( - Mode.STORED, - Explicit.IMPLICIT_TRUE, - Strings.EMPTY_ARRAY, - Strings.EMPTY_ARRAY, - IndexMode.LOGSDB - ); - - /* - * Synthetic source was added as the default for TSDB in v.8.7. The legacy field mapper below - * is used in bwc tests and mixed clusters containing time series indexes created in an earlier version. - */ - private static final SourceFieldMapper TSDB_LEGACY_DEFAULT = new SourceFieldMapper( - null, + private static final SourceFieldMapper DISABLED = new SourceFieldMapper( + Mode.DISABLED, Explicit.IMPLICIT_TRUE, Strings.EMPTY_ARRAY, - Strings.EMPTY_ARRAY, - IndexMode.TIME_SERIES + Strings.EMPTY_ARRAY ); public static class Defaults { @@ -221,12 +178,7 @@ protected Parameter[] getParameters() { return new Parameter[] { enabled, mode, includes, excludes }; } - private boolean isDefault(final Mode sourceMode) { - if (sourceMode != null - && (((indexMode != null && indexMode.isSyntheticSourceEnabled() && sourceMode == Mode.SYNTHETIC) == false) - || sourceMode == Mode.DISABLED)) { - return false; - } + private boolean isDefault() { return enabled.get().value() && includes.getValue().isEmpty() && excludes.getValue().isEmpty(); } @@ -237,15 +189,9 @@ public SourceFieldMapper build() { throw new MapperParsingException("Cannot set both [mode] and [enabled] parameters"); } } - // NOTE: if the `index.mapper.source.mode` exists it takes precedence to determine the source mode for `_source` - // otherwise the mode is determined according to `index.mode` and `_source.mode`. - final Mode sourceMode = INDEX_MAPPER_SOURCE_MODE_SETTING.exists(settings) - ? INDEX_MAPPER_SOURCE_MODE_SETTING.get(settings) - : mode.get(); - if (isDefault(sourceMode)) { - return resolveSourceMode(indexMode, sourceMode == null ? Mode.STORED : sourceMode); - } + final Mode sourceMode = resolveSourceMode(); + if (supportsNonDefaultParameterValues == false) { List disallowed = new ArrayList<>(); if (enabled.get().value() == false) { @@ -269,61 +215,75 @@ public SourceFieldMapper build() { } } - SourceFieldMapper sourceFieldMapper = new SourceFieldMapper( - sourceMode, - enabled.get(), - includes.getValue().toArray(Strings.EMPTY_ARRAY), - excludes.getValue().toArray(Strings.EMPTY_ARRAY), - indexMode - ); + if (sourceMode == Mode.SYNTHETIC && (includes.getValue().isEmpty() == false || excludes.getValue().isEmpty() == false)) { + throw new IllegalArgumentException("filtering the stored _source is incompatible with synthetic source"); + } + + SourceFieldMapper sourceFieldMapper; + if (isDefault()) { + // Needed for bwc so that "mode" is not serialized in case of a standard index with stored source. + if (sourceMode == null) { + sourceFieldMapper = DEFAULT; + } else { + sourceFieldMapper = resolveStaticInstance(sourceMode); + } + } else { + sourceFieldMapper = new SourceFieldMapper( + sourceMode, + enabled.get(), + includes.getValue().toArray(Strings.EMPTY_ARRAY), + excludes.getValue().toArray(Strings.EMPTY_ARRAY) + ); + } if (indexMode != null) { indexMode.validateSourceFieldMapper(sourceFieldMapper); } return sourceFieldMapper; } - } + private Mode resolveSourceMode() { + // If the `index.mapper.source.mode` exists it takes precedence to determine the source mode for `_source` + // otherwise the mode is determined according to `_source.mode`. + if (INDEX_MAPPER_SOURCE_MODE_SETTING.exists(settings)) { + return INDEX_MAPPER_SOURCE_MODE_SETTING.get(settings); + } - private static SourceFieldMapper resolveSourceMode(final IndexMode indexMode, final Mode sourceMode) { - switch (indexMode) { - case STANDARD: - switch (sourceMode) { - case SYNTHETIC: - return DEFAULT_SYNTHETIC; - case STORED: - return DEFAULT; - case DISABLED: - return DEFAULT_DISABLED; - default: - throw new IllegalArgumentException("Unsupported source mode: " + sourceMode); + // If `_source.mode` is not set we need to apply a default according to index mode. + if (mode.get() == null) { + if (indexMode == null || indexMode == IndexMode.STANDARD) { + // Special case to avoid serializing mode. + return null; } - case TIME_SERIES: - case LOGSDB: - switch (sourceMode) { - case SYNTHETIC: - return indexMode == IndexMode.TIME_SERIES ? TSDB_DEFAULT : LOGSDB_DEFAULT; - case STORED: - return indexMode == IndexMode.TIME_SERIES ? TSDB_DEFAULT_STORED : LOGSDB_DEFAULT_STORED; - case DISABLED: - throw new IllegalArgumentException("_source can not be disabled in index using [" + indexMode + "] index mode"); - default: - throw new IllegalArgumentException("Unsupported source mode: " + sourceMode); - } - default: - throw new IllegalArgumentException("Unsupported index mode: " + indexMode); + + return indexMode.defaultSourceMode(); + } + + return mode.get(); } } + private static SourceFieldMapper resolveStaticInstance(final Mode sourceMode) { + return switch (sourceMode) { + case SYNTHETIC -> SYNTHETIC; + case STORED -> STORED; + case DISABLED -> DISABLED; + }; + } + public static final TypeParser PARSER = new ConfigurableTypeParser(c -> { final IndexMode indexMode = c.getIndexSettings().getMode(); - final Mode settingSourceMode = INDEX_MAPPER_SOURCE_MODE_SETTING.get(c.getSettings()); - if (indexMode.isSyntheticSourceEnabled()) { - if (indexMode == IndexMode.TIME_SERIES && c.getIndexSettings().getIndexVersionCreated().before(IndexVersions.V_8_7_0)) { - return TSDB_LEGACY_DEFAULT; - } + if (indexMode == IndexMode.TIME_SERIES && c.getIndexSettings().getIndexVersionCreated().before(IndexVersions.V_8_7_0)) { + return DEFAULT; + } + + final Mode settingSourceMode = INDEX_MAPPER_SOURCE_MODE_SETTING.get(c.getSettings()); + // Needed for bwc so that "mode" is not serialized in case of standard index with stored source. + if (indexMode == IndexMode.STANDARD && settingSourceMode == Mode.STORED) { + return DEFAULT; } - return resolveSourceMode(indexMode, settingSourceMode == null ? Mode.STORED : settingSourceMode); + + return resolveStaticInstance(settingSourceMode); }, c -> new Builder( c.getIndexSettings().getMode(), @@ -380,21 +340,14 @@ public BlockLoader blockLoader(BlockLoaderContext blContext) { private final String[] excludes; private final SourceFilter sourceFilter; - private final IndexMode indexMode; - - private SourceFieldMapper(Mode mode, Explicit enabled, String[] includes, String[] excludes, IndexMode indexMode) { + private SourceFieldMapper(Mode mode, Explicit enabled, String[] includes, String[] excludes) { super(new SourceFieldType((enabled.explicit() && enabled.value()) || (enabled.explicit() == false && mode != Mode.DISABLED))); - assert enabled.explicit() == false || mode == null; this.mode = mode; this.enabled = enabled; this.sourceFilter = buildSourceFilter(includes, excludes); this.includes = includes; this.excludes = excludes; - if (this.sourceFilter != null && (mode == Mode.SYNTHETIC || indexMode == IndexMode.TIME_SERIES)) { - throw new IllegalArgumentException("filtering the stored _source is incompatible with synthetic source"); - } this.complete = stored() && sourceFilter == null; - this.indexMode = indexMode; } private static SourceFilter buildSourceFilter(String[] includes, String[] excludes) { @@ -432,9 +385,6 @@ public void preParse(DocumentParserContext context) throws IOException { final BytesReference adaptedSource = applyFilters(originalSource, contentType); if (adaptedSource != null) { - assert context.indexSettings().getIndexVersionCreated().before(IndexVersions.V_8_7_0) - || indexMode == null - || indexMode.isSyntheticSourceEnabled() == false; final BytesRef ref = adaptedSource.toBytesRef(); context.doc().add(new StoredField(fieldType().name(), ref.bytes, ref.offset, ref.length)); } @@ -468,7 +418,7 @@ protected String contentType() { @Override public FieldMapper.Builder getMergeBuilder() { - return new Builder(indexMode, Settings.EMPTY, false).init(this); + return new Builder(null, Settings.EMPTY, false).init(this); } /** @@ -485,6 +435,10 @@ public boolean isSynthetic() { return mode == Mode.SYNTHETIC; } + public static boolean isSynthetic(IndexSettings indexSettings) { + return INDEX_MAPPER_SOURCE_MODE_SETTING.get(indexSettings.getSettings()) == SourceFieldMapper.Mode.SYNTHETIC; + } + public boolean isDisabled() { return mode == Mode.DISABLED; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java index 0a3911a73a2fc..642539fbbc2f8 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java @@ -239,7 +239,7 @@ public static class Builder extends FieldMapper.Builder { private final IndexVersion indexCreatedVersion; private final Parameter store; - private final boolean isSyntheticSourceEnabledViaIndexMode; + private final boolean isSyntheticSourceEnabled; private final Parameter index = Parameter.indexParam(m -> ((TextFieldMapper) m).index, true); @@ -286,16 +286,11 @@ public static class Builder extends FieldMapper.Builder { final TextParams.Analyzers analyzers; - public Builder(String name, IndexAnalyzers indexAnalyzers, boolean isSyntheticSourceEnabledViaIndexMode) { - this(name, IndexVersion.current(), indexAnalyzers, isSyntheticSourceEnabledViaIndexMode); + public Builder(String name, IndexAnalyzers indexAnalyzers, boolean isSyntheticSourceEnabled) { + this(name, IndexVersion.current(), indexAnalyzers, isSyntheticSourceEnabled); } - public Builder( - String name, - IndexVersion indexCreatedVersion, - IndexAnalyzers indexAnalyzers, - boolean isSyntheticSourceEnabledViaIndexMode - ) { + public Builder(String name, IndexVersion indexCreatedVersion, IndexAnalyzers indexAnalyzers, boolean isSyntheticSourceEnabled) { super(name); // If synthetic source is used we need to either store this field @@ -306,7 +301,7 @@ public Builder( // If 'store' parameter was explicitly provided we'll reject the request. this.store = Parameter.storeParam( m -> ((TextFieldMapper) m).store, - () -> isSyntheticSourceEnabledViaIndexMode && multiFieldsBuilder.hasSyntheticSourceCompatibleKeywordField() == false + () -> isSyntheticSourceEnabled && multiFieldsBuilder.hasSyntheticSourceCompatibleKeywordField() == false ); this.indexCreatedVersion = indexCreatedVersion; this.analyzers = new TextParams.Analyzers( @@ -315,7 +310,7 @@ public Builder( m -> (((TextFieldMapper) m).positionIncrementGap), indexCreatedVersion ); - this.isSyntheticSourceEnabledViaIndexMode = isSyntheticSourceEnabledViaIndexMode; + this.isSyntheticSourceEnabled = isSyntheticSourceEnabled; } public Builder index(boolean index) { @@ -488,7 +483,7 @@ public TextFieldMapper build(MapperBuilderContext context) { private static final IndexVersion MINIMUM_COMPATIBILITY_VERSION = IndexVersion.fromId(5000099); public static final TypeParser PARSER = new TypeParser( - (n, c) -> new Builder(n, c.indexVersionCreated(), c.getIndexAnalyzers(), c.getIndexSettings().getMode().isSyntheticSourceEnabled()), + (n, c) -> new Builder(n, c.indexVersionCreated(), c.getIndexAnalyzers(), SourceFieldMapper.isSynthetic(c.getIndexSettings())), MINIMUM_COMPATIBILITY_VERSION ); @@ -1242,7 +1237,7 @@ public Query existsQuery(SearchExecutionContext context) { private final SubFieldInfo prefixFieldInfo; private final SubFieldInfo phraseFieldInfo; - private final boolean isSyntheticSourceEnabledViaIndexMode; + private final boolean isSyntheticSourceEnabled; private TextFieldMapper( String simpleName, @@ -1275,7 +1270,7 @@ private TextFieldMapper( this.indexPrefixes = builder.indexPrefixes.getValue(); this.freqFilter = builder.freqFilter.getValue(); this.fieldData = builder.fieldData.get(); - this.isSyntheticSourceEnabledViaIndexMode = builder.isSyntheticSourceEnabledViaIndexMode; + this.isSyntheticSourceEnabled = builder.isSyntheticSourceEnabled; } @Override @@ -1299,7 +1294,7 @@ public Map indexAnalyzers() { @Override public FieldMapper.Builder getMergeBuilder() { - return new Builder(leafName(), indexCreatedVersion, indexAnalyzers, isSyntheticSourceEnabledViaIndexMode).init(this); + return new Builder(leafName(), indexCreatedVersion, indexAnalyzers, isSyntheticSourceEnabled).init(this); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java b/server/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java index 157ed617f3eb5..8808cd79072f6 100644 --- a/server/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java +++ b/server/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java @@ -23,6 +23,7 @@ import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MappingLookup; +import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.mapper.TextFieldMapper; import org.elasticsearch.script.ScriptCompiler; import org.elasticsearch.search.aggregations.support.ValuesSourceRegistry; @@ -235,7 +236,7 @@ MappedFieldType failIfFieldMappingNotFound(String name, MappedFieldType fieldMap TextFieldMapper.Builder builder = new TextFieldMapper.Builder( name, getIndexAnalyzers(), - getIndexSettings() != null && getIndexSettings().getMode().isSyntheticSourceEnabled() + getIndexSettings() != null && SourceFieldMapper.isSynthetic(getIndexSettings()) ); return builder.build(MapperBuilderContext.root(false, false)).fieldType(); } else { diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java index 83f668f20de7b..f809a53d753fb 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java @@ -34,6 +34,7 @@ import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.NumberFieldMapper; +import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.mapper.TextFieldMapper; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.index.shard.ShardId; @@ -94,7 +95,7 @@ public > IFD getForField(String type, String field fieldType = new TextFieldMapper.Builder( fieldName, createDefaultIndexAnalyzers(), - indexService.getIndexSettings().getMode().isSyntheticSourceEnabled() + SourceFieldMapper.isSynthetic(indexService.getIndexSettings()) ).fielddata(true).build(context).fieldType(); } } else if (type.equals("float")) { @@ -162,10 +163,9 @@ public > IFD getForField(String type, String field docValues ).build(context).fieldType(); } else if (type.equals("binary")) { - fieldType = new BinaryFieldMapper.Builder(fieldName, indexService.getIndexSettings().getMode().isSyntheticSourceEnabled()) - .docValues(docValues) - .build(context) - .fieldType(); + fieldType = new BinaryFieldMapper.Builder(fieldName, SourceFieldMapper.isSynthetic(indexService.getIndexSettings())).docValues( + docValues + ).build(context).fieldType(); } else { throw new UnsupportedOperationException(type); } diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/FilterFieldDataTests.java b/server/src/test/java/org/elasticsearch/index/fielddata/FilterFieldDataTests.java index b0a30211c0f47..a7277b79e5c00 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/FilterFieldDataTests.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/FilterFieldDataTests.java @@ -15,6 +15,7 @@ import org.apache.lucene.index.SortedSetDocValues; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperBuilderContext; +import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.mapper.TextFieldMapper; import java.util.List; @@ -56,7 +57,7 @@ public void testFilterByFrequency() throws Exception { MappedFieldType ft = new TextFieldMapper.Builder( "high_freq", createDefaultIndexAnalyzers(), - indexService.getIndexSettings().getMode().isSyntheticSourceEnabled() + SourceFieldMapper.isSynthetic(indexService.getIndexSettings()) ).fielddata(true).fielddataFrequencyFilter(0, random.nextBoolean() ? 100 : 0.5d, 0).build(builderContext).fieldType(); IndexOrdinalsFieldData fieldData = searchExecutionContext.getForField(ft, MappedFieldType.FielddataOperation.SEARCH); for (LeafReaderContext context : contexts) { @@ -72,7 +73,7 @@ public void testFilterByFrequency() throws Exception { MappedFieldType ft = new TextFieldMapper.Builder( "high_freq", createDefaultIndexAnalyzers(), - indexService.getIndexSettings().getMode().isSyntheticSourceEnabled() + SourceFieldMapper.isSynthetic(indexService.getIndexSettings()) ).fielddata(true) .fielddataFrequencyFilter(random.nextBoolean() ? 101 : 101d / 200.0d, 201, 100) .build(builderContext) @@ -91,7 +92,7 @@ public void testFilterByFrequency() throws Exception { MappedFieldType ft = new TextFieldMapper.Builder( "med_freq", createDefaultIndexAnalyzers(), - indexService.getIndexSettings().getMode().isSyntheticSourceEnabled() + SourceFieldMapper.isSynthetic(indexService.getIndexSettings()) ).fielddata(true) .fielddataFrequencyFilter(random.nextBoolean() ? 101 : 101d / 200.0d, Integer.MAX_VALUE, 101) .build(builderContext) @@ -111,7 +112,7 @@ public void testFilterByFrequency() throws Exception { MappedFieldType ft = new TextFieldMapper.Builder( "med_freq", createDefaultIndexAnalyzers(), - indexService.getIndexSettings().getMode().isSyntheticSourceEnabled() + SourceFieldMapper.isSynthetic(indexService.getIndexSettings()) ).fielddata(true) .fielddataFrequencyFilter(random.nextBoolean() ? 101 : 101d / 200.0d, Integer.MAX_VALUE, 101) .build(builderContext) diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java b/server/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java index 7616ea5119b6c..36c25b352a792 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java @@ -34,6 +34,7 @@ import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.NumberFieldMapper.NumberType; +import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.mapper.TextFieldMapper; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.IndicesService; @@ -163,12 +164,12 @@ public void testClearField() throws Exception { final MappedFieldType mapper1 = new TextFieldMapper.Builder( "field_1", createDefaultIndexAnalyzers(), - indexService.getIndexSettings().getMode().isSyntheticSourceEnabled() + SourceFieldMapper.isSynthetic(indexService.getIndexSettings()) ).fielddata(true).build(context).fieldType(); final MappedFieldType mapper2 = new TextFieldMapper.Builder( "field_2", createDefaultIndexAnalyzers(), - indexService.getIndexSettings().getMode().isSyntheticSourceEnabled() + SourceFieldMapper.isSynthetic(indexService.getIndexSettings()) ).fielddata(true).build(context).fieldType(); final IndexWriter writer = new IndexWriter(new ByteBuffersDirectory(), new IndexWriterConfig(new KeywordAnalyzer())); Document doc = new Document(); @@ -234,7 +235,7 @@ public void testFieldDataCacheListener() throws Exception { final MappedFieldType mapper1 = new TextFieldMapper.Builder( "s", createDefaultIndexAnalyzers(), - indexService.getIndexSettings().getMode().isSyntheticSourceEnabled() + SourceFieldMapper.isSynthetic(indexService.getIndexSettings()) ).fielddata(true).build(context).fieldType(); final IndexWriter writer = new IndexWriter(new ByteBuffersDirectory(), new IndexWriterConfig(new KeywordAnalyzer())); Document doc = new Document(); diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java index 138ee899dd906..3699cdee3912b 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.index.mapper.MapperMetrics; import org.elasticsearch.index.mapper.MappingLookup; +import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.mapper.TextFieldMapper; import org.elasticsearch.index.query.IdsQueryBuilder; import org.elasticsearch.index.query.MatchAllQueryBuilder; @@ -323,7 +324,7 @@ public MappedFieldType getFieldType(String name) { TextFieldMapper.Builder builder = new TextFieldMapper.Builder( name, createDefaultIndexAnalyzers(), - idxSettings.getMode().isSyntheticSourceEnabled() + SourceFieldMapper.isSynthetic(idxSettings) ); return builder.build(MapperBuilderContext.root(false, false)).fieldType(); } diff --git a/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java b/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java index 4a02e84bbe4f8..209dfdcc16969 100644 --- a/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.index.mapper.MapperMetrics; import org.elasticsearch.index.mapper.MappingLookup; +import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.mapper.TextFieldMapper; import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; @@ -166,7 +167,7 @@ public MappedFieldType getFieldType(String name) { TextFieldMapper.Builder builder = new TextFieldMapper.Builder( name, createDefaultIndexAnalyzers(), - idxSettings.getMode().isSyntheticSourceEnabled() + SourceFieldMapper.isSynthetic(idxSettings) ); return builder.build(MapperBuilderContext.root(false, false)).fieldType(); } @@ -233,7 +234,7 @@ public MappedFieldType getFieldType(String name) { TextFieldMapper.Builder builder = new TextFieldMapper.Builder( name, createDefaultIndexAnalyzers(), - idxSettings.getMode().isSyntheticSourceEnabled() + SourceFieldMapper.isSynthetic(idxSettings) ); return builder.build(MapperBuilderContext.root(false, false)).fieldType(); } From cc0da6d30991c4a837ca8332f29c3230a2585c69 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Fri, 18 Oct 2024 14:10:11 -0700 Subject: [PATCH 227/449] Upgrade develocity plugin (#115139) --- gradle/build.versions.toml | 2 +- gradle/verification-metadata.xml | 5 +++++ plugins/examples/settings.gradle | 2 +- settings.gradle | 2 +- 4 files changed, 8 insertions(+), 3 deletions(-) diff --git a/gradle/build.versions.toml b/gradle/build.versions.toml index 35c26ef10f9ec..d11c4b7fd9c91 100644 --- a/gradle/build.versions.toml +++ b/gradle/build.versions.toml @@ -17,7 +17,7 @@ commons-codec = "commons-codec:commons-codec:1.11" commmons-io = "commons-io:commons-io:2.2" docker-compose = "com.avast.gradle:gradle-docker-compose-plugin:0.17.5" forbiddenApis = "de.thetaphi:forbiddenapis:3.6" -gradle-enterprise = "com.gradle:develocity-gradle-plugin:3.17.4" +gradle-enterprise = "com.gradle:develocity-gradle-plugin:3.18.1" hamcrest = "org.hamcrest:hamcrest:2.1" httpcore = "org.apache.httpcomponents:httpcore:4.4.12" httpclient = "org.apache.httpcomponents:httpclient:4.5.14" diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 0b5c1ae6528f9..0156f13b4b05d 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -799,6 +799,11 @@ + + + + + diff --git a/plugins/examples/settings.gradle b/plugins/examples/settings.gradle index 78248ecab92d2..1f168525d4b1d 100644 --- a/plugins/examples/settings.gradle +++ b/plugins/examples/settings.gradle @@ -8,7 +8,7 @@ */ plugins { - id "com.gradle.develocity" version "3.17.4" + id "com.gradle.develocity" version "3.18.1" } // Include all subdirectories as example projects diff --git a/settings.gradle b/settings.gradle index be0844de1164a..a95a46a3569d7 100644 --- a/settings.gradle +++ b/settings.gradle @@ -17,7 +17,7 @@ pluginManagement { } plugins { - id "com.gradle.develocity" version "3.17.4" + id "com.gradle.develocity" version "3.18.1" id 'elasticsearch.java-toolchain' } From 68f0f00dd181317d2071403cb959fe9019ab8587 Mon Sep 17 00:00:00 2001 From: Jack Conradson Date: Fri, 18 Oct 2024 16:02:28 -0700 Subject: [PATCH 228/449] Add initial entitlement policy parsing (#114448) This change adds entitlement policy parsing with the following design: * YAML file for readability and re-use of our x-content parsers * hierarchical structure to group entitlements under a single scope * no general entitlements without a scope or for the entire project --- .../tools/entitlement-runtime/build.gradle | 6 +- .../src/main/java/module-info.java | 1 + .../runtime/policy/Entitlement.java | 19 ++ .../runtime/policy/ExternalEntitlement.java | 36 ++++ .../runtime/policy/FileEntitlement.java | 67 +++++++ .../entitlement/runtime/policy/Policy.java | 46 +++++ .../runtime/policy/PolicyParser.java | 176 ++++++++++++++++++ .../runtime/policy/PolicyParserException.java | 92 +++++++++ .../entitlement/runtime/policy/Scope.java | 46 +++++ .../policy/PolicyParserFailureTests.java | 83 +++++++++ .../runtime/policy/PolicyParserTests.java | 28 +++ .../runtime/policy/test-policy.yaml | 7 + 12 files changed, 602 insertions(+), 5 deletions(-) create mode 100644 distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/policy/Entitlement.java create mode 100644 distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/policy/ExternalEntitlement.java create mode 100644 distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileEntitlement.java create mode 100644 distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/policy/Policy.java create mode 100644 distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyParser.java create mode 100644 distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserException.java create mode 100644 distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/policy/Scope.java create mode 100644 distribution/tools/entitlement-runtime/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserFailureTests.java create mode 100644 distribution/tools/entitlement-runtime/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserTests.java create mode 100644 distribution/tools/entitlement-runtime/src/test/resources/org/elasticsearch/entitlement/runtime/policy/test-policy.yaml diff --git a/distribution/tools/entitlement-runtime/build.gradle b/distribution/tools/entitlement-runtime/build.gradle index 0fb7bdec883f8..55471272c1b5f 100644 --- a/distribution/tools/entitlement-runtime/build.gradle +++ b/distribution/tools/entitlement-runtime/build.gradle @@ -11,16 +11,12 @@ apply plugin: 'elasticsearch.publish' dependencies { compileOnly project(':libs:elasticsearch-core') // For @SuppressForbidden + compileOnly project(":libs:elasticsearch-x-content") // for parsing policy files compileOnly project(':server') // To access the main server module for special permission checks compileOnly project(':distribution:tools:entitlement-bridge') - testImplementation project(":test:framework") } tasks.named('forbiddenApisMain').configure { replaceSignatureFiles 'jdk-signatures' } - -tasks.named('forbiddenApisMain').configure { - replaceSignatureFiles 'jdk-signatures' -} diff --git a/distribution/tools/entitlement-runtime/src/main/java/module-info.java b/distribution/tools/entitlement-runtime/src/main/java/module-info.java index d0bfc804f8024..12e6905014512 100644 --- a/distribution/tools/entitlement-runtime/src/main/java/module-info.java +++ b/distribution/tools/entitlement-runtime/src/main/java/module-info.java @@ -9,6 +9,7 @@ module org.elasticsearch.entitlement.runtime { requires org.elasticsearch.entitlement.bridge; + requires org.elasticsearch.xcontent; requires org.elasticsearch.server; exports org.elasticsearch.entitlement.runtime.api; diff --git a/distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/policy/Entitlement.java b/distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/policy/Entitlement.java new file mode 100644 index 0000000000000..5b53c399cc1b7 --- /dev/null +++ b/distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/policy/Entitlement.java @@ -0,0 +1,19 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.runtime.policy; + +/** + * Marker interface to ensure that only {@link Entitlement} are + * part of a {@link Policy}. All entitlement classes should implement + * this. + */ +public interface Entitlement { + +} diff --git a/distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/policy/ExternalEntitlement.java b/distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/policy/ExternalEntitlement.java new file mode 100644 index 0000000000000..bb1205696b49e --- /dev/null +++ b/distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/policy/ExternalEntitlement.java @@ -0,0 +1,36 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.runtime.policy; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * This annotation indicates an {@link Entitlement} is available + * to "external" classes such as those used in plugins. Any {@link Entitlement} + * using this annotation is considered parseable as part of a policy file + * for entitlements. + */ +@Target(ElementType.CONSTRUCTOR) +@Retention(RetentionPolicy.RUNTIME) +public @interface ExternalEntitlement { + + /** + * This is the list of parameter names that are + * parseable in {@link PolicyParser#parseEntitlement(String, String)}. + * The number and order of parameter names much match the number and order + * of constructor parameters as this is how the parser will pass in the + * parsed values from a policy file. However, the names themselves do NOT + * have to match the parameter names of the constructor. + */ + String[] parameterNames() default {}; +} diff --git a/distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileEntitlement.java b/distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileEntitlement.java new file mode 100644 index 0000000000000..8df199591d3e4 --- /dev/null +++ b/distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileEntitlement.java @@ -0,0 +1,67 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.runtime.policy; + +import java.util.List; +import java.util.Objects; + +/** + * Describes a file entitlement with a path and actions. + */ +public class FileEntitlement implements Entitlement { + + public static final int READ_ACTION = 0x1; + public static final int WRITE_ACTION = 0x2; + + private final String path; + private final int actions; + + @ExternalEntitlement(parameterNames = { "path", "actions" }) + public FileEntitlement(String path, List actionsList) { + this.path = path; + int actionsInt = 0; + + for (String actionString : actionsList) { + if ("read".equals(actionString)) { + if ((actionsInt & READ_ACTION) == READ_ACTION) { + throw new IllegalArgumentException("file action [read] specified multiple times"); + } + actionsInt |= READ_ACTION; + } else if ("write".equals(actionString)) { + if ((actionsInt & WRITE_ACTION) == WRITE_ACTION) { + throw new IllegalArgumentException("file action [write] specified multiple times"); + } + actionsInt |= WRITE_ACTION; + } else { + throw new IllegalArgumentException("unknown file action [" + actionString + "]"); + } + } + + this.actions = actionsInt; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + FileEntitlement that = (FileEntitlement) o; + return actions == that.actions && Objects.equals(path, that.path); + } + + @Override + public int hashCode() { + return Objects.hash(path, actions); + } + + @Override + public String toString() { + return "FileEntitlement{" + "path='" + path + '\'' + ", actions=" + actions + '}'; + } +} diff --git a/distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/policy/Policy.java b/distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/policy/Policy.java new file mode 100644 index 0000000000000..e8bd7a3fff357 --- /dev/null +++ b/distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/policy/Policy.java @@ -0,0 +1,46 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.runtime.policy; + +import java.util.Collections; +import java.util.List; +import java.util.Objects; + +/** + * A holder for scoped entitlements. + */ +public class Policy { + + public final String name; + public final List scopes; + + public Policy(String name, List scopes) { + this.name = Objects.requireNonNull(name); + this.scopes = Collections.unmodifiableList(Objects.requireNonNull(scopes)); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Policy policy = (Policy) o; + return Objects.equals(name, policy.name) && Objects.equals(scopes, policy.scopes); + } + + @Override + public int hashCode() { + return Objects.hash(name, scopes); + } + + @Override + public String toString() { + return "Policy{" + "name='" + name + '\'' + ", scopes=" + scopes + '}'; + } +} diff --git a/distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyParser.java b/distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyParser.java new file mode 100644 index 0000000000000..229ccec3b8b2c --- /dev/null +++ b/distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyParser.java @@ -0,0 +1,176 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.runtime.policy; + +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.yaml.YamlXContent; + +import java.io.IOException; +import java.io.InputStream; +import java.io.UncheckedIOException; +import java.lang.reflect.Constructor; +import java.lang.reflect.InvocationTargetException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.entitlement.runtime.policy.PolicyParserException.newPolicyParserException; + +/** + * A parser to parse policy files for entitlements. + */ +public class PolicyParser { + + protected static final ParseField ENTITLEMENTS_PARSEFIELD = new ParseField("entitlements"); + + protected static final String entitlementPackageName = Entitlement.class.getPackage().getName(); + + protected final XContentParser policyParser; + protected final String policyName; + + public PolicyParser(InputStream inputStream, String policyName) throws IOException { + this.policyParser = YamlXContent.yamlXContent.createParser(XContentParserConfiguration.EMPTY, Objects.requireNonNull(inputStream)); + this.policyName = policyName; + } + + public Policy parsePolicy() { + try { + if (policyParser.nextToken() != XContentParser.Token.START_OBJECT) { + throw newPolicyParserException("expected object "); + } + List scopes = new ArrayList<>(); + while (policyParser.nextToken() != XContentParser.Token.END_OBJECT) { + if (policyParser.currentToken() != XContentParser.Token.FIELD_NAME) { + throw newPolicyParserException("expected object "); + } + String scopeName = policyParser.currentName(); + Scope scope = parseScope(scopeName); + scopes.add(scope); + } + return new Policy(policyName, scopes); + } catch (IOException ioe) { + throw new UncheckedIOException(ioe); + } + } + + protected Scope parseScope(String scopeName) throws IOException { + try { + if (policyParser.nextToken() != XContentParser.Token.START_OBJECT) { + throw newPolicyParserException(scopeName, "expected object [" + ENTITLEMENTS_PARSEFIELD.getPreferredName() + "]"); + } + if (policyParser.nextToken() != XContentParser.Token.FIELD_NAME + || policyParser.currentName().equals(ENTITLEMENTS_PARSEFIELD.getPreferredName()) == false) { + throw newPolicyParserException(scopeName, "expected object [" + ENTITLEMENTS_PARSEFIELD.getPreferredName() + "]"); + } + if (policyParser.nextToken() != XContentParser.Token.START_ARRAY) { + throw newPolicyParserException(scopeName, "expected array of "); + } + List entitlements = new ArrayList<>(); + while (policyParser.nextToken() != XContentParser.Token.END_ARRAY) { + if (policyParser.currentToken() != XContentParser.Token.START_OBJECT) { + throw newPolicyParserException(scopeName, "expected object "); + } + if (policyParser.nextToken() != XContentParser.Token.FIELD_NAME) { + throw newPolicyParserException(scopeName, "expected object "); + } + String entitlementType = policyParser.currentName(); + Entitlement entitlement = parseEntitlement(scopeName, entitlementType); + entitlements.add(entitlement); + if (policyParser.nextToken() != XContentParser.Token.END_OBJECT) { + throw newPolicyParserException(scopeName, "expected closing object"); + } + } + if (policyParser.nextToken() != XContentParser.Token.END_OBJECT) { + throw newPolicyParserException(scopeName, "expected closing object"); + } + return new Scope(scopeName, entitlements); + } catch (IOException ioe) { + throw new UncheckedIOException(ioe); + } + } + + protected Entitlement parseEntitlement(String scopeName, String entitlementType) throws IOException { + Class entitlementClass; + try { + entitlementClass = Class.forName( + entitlementPackageName + + "." + + Character.toUpperCase(entitlementType.charAt(0)) + + entitlementType.substring(1) + + "Entitlement" + ); + } catch (ClassNotFoundException cnfe) { + throw newPolicyParserException(scopeName, "unknown entitlement type [" + entitlementType + "]"); + } + if (Entitlement.class.isAssignableFrom(entitlementClass) == false) { + throw newPolicyParserException(scopeName, "unknown entitlement type [" + entitlementType + "]"); + } + Constructor entitlementConstructor = entitlementClass.getConstructors()[0]; + ExternalEntitlement entitlementMetadata = entitlementConstructor.getAnnotation(ExternalEntitlement.class); + if (entitlementMetadata == null) { + throw newPolicyParserException(scopeName, "unknown entitlement type [" + entitlementType + "]"); + } + + if (policyParser.nextToken() != XContentParser.Token.START_OBJECT) { + throw newPolicyParserException(scopeName, entitlementType, "expected entitlement parameters"); + } + Map parsedValues = policyParser.map(); + + Class[] parameterTypes = entitlementConstructor.getParameterTypes(); + String[] parametersNames = entitlementMetadata.parameterNames(); + Object[] parameterValues = new Object[parameterTypes.length]; + for (int parameterIndex = 0; parameterIndex < parameterTypes.length; ++parameterIndex) { + String parameterName = parametersNames[parameterIndex]; + Object parameterValue = parsedValues.remove(parameterName); + if (parameterValue == null) { + throw newPolicyParserException(scopeName, entitlementType, "missing entitlement parameter [" + parameterName + "]"); + } + Class parameterType = parameterTypes[parameterIndex]; + if (parameterType.isAssignableFrom(parameterValue.getClass()) == false) { + throw newPolicyParserException( + scopeName, + entitlementType, + "unexpected parameter type [" + parameterType.getSimpleName() + "] for entitlement parameter [" + parameterName + "]" + ); + } + parameterValues[parameterIndex] = parameterValue; + } + if (parsedValues.isEmpty() == false) { + throw newPolicyParserException(scopeName, entitlementType, "extraneous entitlement parameter(s) " + parsedValues); + } + + try { + return (Entitlement) entitlementConstructor.newInstance(parameterValues); + } catch (InvocationTargetException | InstantiationException | IllegalAccessException e) { + throw new IllegalStateException("internal error"); + } + } + + protected PolicyParserException newPolicyParserException(String message) { + return PolicyParserException.newPolicyParserException(policyParser.getTokenLocation(), policyName, message); + } + + protected PolicyParserException newPolicyParserException(String scopeName, String message) { + return PolicyParserException.newPolicyParserException(policyParser.getTokenLocation(), policyName, scopeName, message); + } + + protected PolicyParserException newPolicyParserException(String scopeName, String entitlementType, String message) { + return PolicyParserException.newPolicyParserException( + policyParser.getTokenLocation(), + policyName, + scopeName, + entitlementType, + message + ); + } +} diff --git a/distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserException.java b/distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserException.java new file mode 100644 index 0000000000000..5dfa12f11d0be --- /dev/null +++ b/distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserException.java @@ -0,0 +1,92 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.runtime.policy; + +import org.elasticsearch.xcontent.XContentLocation; + +/** + * An exception specifically for policy parsing errors. + */ +public class PolicyParserException extends RuntimeException { + + public static PolicyParserException newPolicyParserException(XContentLocation location, String policyName, String message) { + return new PolicyParserException( + "[" + location.lineNumber() + ":" + location.columnNumber() + "] policy parsing error for [" + policyName + "]: " + message + ); + } + + public static PolicyParserException newPolicyParserException( + XContentLocation location, + String policyName, + String scopeName, + String message + ) { + if (scopeName == null) { + return new PolicyParserException( + "[" + location.lineNumber() + ":" + location.columnNumber() + "] policy parsing error for [" + policyName + "]: " + message + ); + } else { + return new PolicyParserException( + "[" + + location.lineNumber() + + ":" + + location.columnNumber() + + "] policy parsing error for [" + + policyName + + "] in scope [" + + scopeName + + "]: " + + message + ); + } + } + + public static PolicyParserException newPolicyParserException( + XContentLocation location, + String policyName, + String scopeName, + String entitlementType, + String message + ) { + if (scopeName == null) { + return new PolicyParserException( + "[" + + location.lineNumber() + + ":" + + location.columnNumber() + + "] policy parsing error for [" + + policyName + + "] for entitlement type [" + + entitlementType + + "]: " + + message + ); + } else { + return new PolicyParserException( + "[" + + location.lineNumber() + + ":" + + location.columnNumber() + + "] policy parsing error for [" + + policyName + + "] in scope [" + + scopeName + + "] for entitlement type [" + + entitlementType + + "]: " + + message + ); + } + } + + private PolicyParserException(String message) { + super(message); + } +} diff --git a/distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/policy/Scope.java b/distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/policy/Scope.java new file mode 100644 index 0000000000000..0fe63eb8da1b7 --- /dev/null +++ b/distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/policy/Scope.java @@ -0,0 +1,46 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.runtime.policy; + +import java.util.Collections; +import java.util.List; +import java.util.Objects; + +/** + * A holder for entitlements within a single scope. + */ +public class Scope { + + public final String name; + public final List entitlements; + + public Scope(String name, List entitlements) { + this.name = Objects.requireNonNull(name); + this.entitlements = Collections.unmodifiableList(Objects.requireNonNull(entitlements)); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Scope scope = (Scope) o; + return Objects.equals(name, scope.name) && Objects.equals(entitlements, scope.entitlements); + } + + @Override + public int hashCode() { + return Objects.hash(name, entitlements); + } + + @Override + public String toString() { + return "Scope{" + "name='" + name + '\'' + ", entitlements=" + entitlements + '}'; + } +} diff --git a/distribution/tools/entitlement-runtime/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserFailureTests.java b/distribution/tools/entitlement-runtime/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserFailureTests.java new file mode 100644 index 0000000000000..b21d206f3eb6a --- /dev/null +++ b/distribution/tools/entitlement-runtime/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserFailureTests.java @@ -0,0 +1,83 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.runtime.policy; + +import org.elasticsearch.test.ESTestCase; + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.nio.charset.StandardCharsets; + +public class PolicyParserFailureTests extends ESTestCase { + + public void testParserSyntaxFailures() { + PolicyParserException ppe = expectThrows( + PolicyParserException.class, + () -> new PolicyParser(new ByteArrayInputStream("[]".getBytes(StandardCharsets.UTF_8)), "test-failure-policy.yaml") + .parsePolicy() + ); + assertEquals("[1:1] policy parsing error for [test-failure-policy.yaml]: expected object ", ppe.getMessage()); + } + + public void testEntitlementDoesNotExist() throws IOException { + PolicyParserException ppe = expectThrows(PolicyParserException.class, () -> new PolicyParser(new ByteArrayInputStream(""" + entitlement-module-name: + entitlements: + - does_not_exist: {} + """.getBytes(StandardCharsets.UTF_8)), "test-failure-policy.yaml").parsePolicy()); + assertEquals( + "[3:7] policy parsing error for [test-failure-policy.yaml] in scope [entitlement-module-name]: " + + "unknown entitlement type [does_not_exist]", + ppe.getMessage() + ); + } + + public void testEntitlementMissingParameter() throws IOException { + PolicyParserException ppe = expectThrows(PolicyParserException.class, () -> new PolicyParser(new ByteArrayInputStream(""" + entitlement-module-name: + entitlements: + - file: {} + """.getBytes(StandardCharsets.UTF_8)), "test-failure-policy.yaml").parsePolicy()); + assertEquals( + "[3:14] policy parsing error for [test-failure-policy.yaml] in scope [entitlement-module-name] " + + "for entitlement type [file]: missing entitlement parameter [path]", + ppe.getMessage() + ); + + ppe = expectThrows(PolicyParserException.class, () -> new PolicyParser(new ByteArrayInputStream(""" + entitlement-module-name: + entitlements: + - file: + path: test-path + """.getBytes(StandardCharsets.UTF_8)), "test-failure-policy.yaml").parsePolicy()); + assertEquals( + "[5:1] policy parsing error for [test-failure-policy.yaml] in scope [entitlement-module-name] " + + "for entitlement type [file]: missing entitlement parameter [actions]", + ppe.getMessage() + ); + } + + public void testEntitlementExtraneousParameter() throws IOException { + PolicyParserException ppe = expectThrows(PolicyParserException.class, () -> new PolicyParser(new ByteArrayInputStream(""" + entitlement-module-name: + entitlements: + - file: + path: test-path + actions: + - read + extra: test + """.getBytes(StandardCharsets.UTF_8)), "test-failure-policy.yaml").parsePolicy()); + assertEquals( + "[8:1] policy parsing error for [test-failure-policy.yaml] in scope [entitlement-module-name] " + + "for entitlement type [file]: extraneous entitlement parameter(s) {extra=test}", + ppe.getMessage() + ); + } +} diff --git a/distribution/tools/entitlement-runtime/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserTests.java b/distribution/tools/entitlement-runtime/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserTests.java new file mode 100644 index 0000000000000..40016b2e3027e --- /dev/null +++ b/distribution/tools/entitlement-runtime/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserTests.java @@ -0,0 +1,28 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.runtime.policy; + +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.List; + +public class PolicyParserTests extends ESTestCase { + + public void testPolicyBuilder() throws IOException { + Policy parsedPolicy = new PolicyParser(PolicyParserTests.class.getResourceAsStream("test-policy.yaml"), "test-policy.yaml") + .parsePolicy(); + Policy builtPolicy = new Policy( + "test-policy.yaml", + List.of(new Scope("entitlement-module-name", List.of(new FileEntitlement("test/path/to/file", List.of("read", "write"))))) + ); + assertEquals(parsedPolicy, builtPolicy); + } +} diff --git a/distribution/tools/entitlement-runtime/src/test/resources/org/elasticsearch/entitlement/runtime/policy/test-policy.yaml b/distribution/tools/entitlement-runtime/src/test/resources/org/elasticsearch/entitlement/runtime/policy/test-policy.yaml new file mode 100644 index 0000000000000..b58287cfc83b7 --- /dev/null +++ b/distribution/tools/entitlement-runtime/src/test/resources/org/elasticsearch/entitlement/runtime/policy/test-policy.yaml @@ -0,0 +1,7 @@ +entitlement-module-name: + entitlements: + - file: + path: "test/path/to/file" + actions: + - "read" + - "write" From ac25dbe70692df19bd424e7ef1e4bc2c16c41329 Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Fri, 18 Oct 2024 20:19:30 -0400 Subject: [PATCH 229/449] Fix IPinfo geolocation schema (#115147) --- docs/changelog/115147.yaml | 5 ++ .../ingest/geoip/IpinfoIpDataLookups.java | 17 ++--- .../ingest/geoip/GeoIpProcessorTests.java | 6 +- .../geoip/IpinfoIpDataLookupsTests.java | 65 +++++++++--------- .../src/test/resources/ipinfo/asn_sample.mmdb | Bin 25210 -> 25728 bytes .../test/resources/ipinfo/ip_asn_sample.mmdb | Bin 23456 -> 24333 bytes .../resources/ipinfo/ip_country_sample.mmdb | Bin 32292 -> 30088 bytes .../ipinfo/ip_geolocation_sample.mmdb | Bin 33552 -> 0 bytes .../ip_geolocation_standard_sample.mmdb | Bin 0 -> 30105 bytes .../ipinfo/privacy_detection_sample.mmdb | Bin 26352 -> 26456 bytes 10 files changed, 50 insertions(+), 43 deletions(-) create mode 100644 docs/changelog/115147.yaml delete mode 100644 modules/ingest-geoip/src/test/resources/ipinfo/ip_geolocation_sample.mmdb create mode 100644 modules/ingest-geoip/src/test/resources/ipinfo/ip_geolocation_standard_sample.mmdb diff --git a/docs/changelog/115147.yaml b/docs/changelog/115147.yaml new file mode 100644 index 0000000000000..36f40bba1da17 --- /dev/null +++ b/docs/changelog/115147.yaml @@ -0,0 +1,5 @@ +pr: 115147 +summary: Fix IPinfo geolocation schema +area: Ingest Node +type: bug +issues: [] diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookups.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookups.java index 5a13ea93ff032..8ce2424844d9d 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookups.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookups.java @@ -218,8 +218,8 @@ public record CountryResult( public record GeolocationResult( String city, String country, - Double latitude, - Double longitude, + Double lat, + Double lng, String postalCode, String region, String timezone @@ -229,14 +229,15 @@ public record GeolocationResult( public GeolocationResult( @MaxMindDbParameter(name = "city") String city, @MaxMindDbParameter(name = "country") String country, - @MaxMindDbParameter(name = "latitude") String latitude, - @MaxMindDbParameter(name = "longitude") String longitude, - // @MaxMindDbParameter(name = "network") String network, // for now we're not exposing this + // @MaxMindDbParameter(name = "geoname_id") String geonameId, // for now we're not exposing this + @MaxMindDbParameter(name = "lat") String lat, + @MaxMindDbParameter(name = "lng") String lng, @MaxMindDbParameter(name = "postal_code") String postalCode, @MaxMindDbParameter(name = "region") String region, + // @MaxMindDbParameter(name = "region_code") String regionCode, // for now we're not exposing this @MaxMindDbParameter(name = "timezone") String timezone ) { - this(city, country, parseLocationDouble(latitude), parseLocationDouble(longitude), postalCode, region, timezone); + this(city, country, parseLocationDouble(lat), parseLocationDouble(lng), postalCode, region, timezone); } } @@ -395,8 +396,8 @@ protected Map transform(final Result result) } } case LOCATION -> { - Double latitude = response.latitude; - Double longitude = response.longitude; + Double latitude = response.lat; + Double longitude = response.lng; if (latitude != null && longitude != null) { Map locationObject = new HashMap<>(); locationObject.put("lat", latitude); diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java index 640480ed277c5..4548e92239ce1 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java @@ -82,13 +82,13 @@ public void testMaxmindCity() throws Exception { } public void testIpinfoGeolocation() throws Exception { - String ip = "13.107.39.238"; + String ip = "72.20.12.220"; GeoIpProcessor processor = new GeoIpProcessor( IP_LOCATION_TYPE, // n.b. this is an "ip_location" processor randomAlphaOfLength(10), null, "source_field", - loader("ipinfo/ip_geolocation_sample.mmdb"), + loader("ipinfo/ip_geolocation_standard_sample.mmdb"), () -> true, "target_field", getIpinfoGeolocationLookup(), @@ -107,7 +107,7 @@ public void testIpinfoGeolocation() throws Exception { Map data = (Map) ingestDocument.getSourceAndMetadata().get("target_field"); assertThat(data, notNullValue()); assertThat(data.get("ip"), equalTo(ip)); - assertThat(data.get("city_name"), equalTo("Des Moines")); + assertThat(data.get("city_name"), equalTo("Chicago")); // see IpinfoIpDataLookupsTests for more tests of the data lookup behavior } diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookupsTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookupsTests.java index e998748efbcad..d0cdc5a3e1b5e 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookupsTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookupsTests.java @@ -102,17 +102,17 @@ public void testParseLocationDouble() { public void testAsnFree() { assumeFalse("https://github.com/elastic/elasticsearch/issues/114266", Constants.WINDOWS); String databaseName = "ip_asn_sample.mmdb"; - String ip = "5.182.109.0"; + String ip = "23.32.184.0"; assertExpectedLookupResults( databaseName, ip, new IpinfoIpDataLookups.Asn(Database.AsnV2.properties()), Map.ofEntries( entry("ip", ip), - entry("organization_name", "M247 Europe SRL"), - entry("asn", 9009L), - entry("network", "5.182.109.0/24"), - entry("domain", "m247.com") + entry("organization_name", "Akamai Technologies, Inc."), + entry("asn", 16625L), + entry("network", "23.32.184.0/21"), + entry("domain", "akamai.com") ) ); } @@ -120,17 +120,17 @@ public void testAsnFree() { public void testAsnStandard() { assumeFalse("https://github.com/elastic/elasticsearch/issues/114266", Constants.WINDOWS); String databaseName = "asn_sample.mmdb"; - String ip = "23.53.116.0"; + String ip = "69.19.224.0"; assertExpectedLookupResults( databaseName, ip, new IpinfoIpDataLookups.Asn(Database.AsnV2.properties()), Map.ofEntries( entry("ip", ip), - entry("organization_name", "Akamai Technologies, Inc."), - entry("asn", 32787L), - entry("network", "23.53.116.0/24"), - entry("domain", "akamai.com"), + entry("organization_name", "TPx Communications"), + entry("asn", 14265L), + entry("network", "69.19.224.0/22"), + entry("domain", "tpx.com"), entry("type", "hosting"), entry("country_iso_code", "US") ) @@ -177,25 +177,25 @@ public void testAsnInvariants() { public void testCountryFree() { assumeFalse("https://github.com/elastic/elasticsearch/issues/114266", Constants.WINDOWS); String databaseName = "ip_country_sample.mmdb"; - String ip = "4.221.143.168"; + String ip = "20.33.76.0"; assertExpectedLookupResults( databaseName, ip, new IpinfoIpDataLookups.Country(Database.CountryV2.properties()), Map.ofEntries( entry("ip", ip), - entry("country_name", "South Africa"), - entry("country_iso_code", "ZA"), - entry("continent_name", "Africa"), - entry("continent_code", "AF") + entry("country_name", "Ireland"), + entry("country_iso_code", "IE"), + entry("continent_name", "Europe"), + entry("continent_code", "EU") ) ); } public void testGeolocationStandard() { assumeFalse("https://github.com/elastic/elasticsearch/issues/114266", Constants.WINDOWS); - String databaseName = "ip_geolocation_sample.mmdb"; - String ip = "2.124.90.182"; + String databaseName = "ip_geolocation_standard_sample.mmdb"; + String ip = "62.69.48.19"; assertExpectedLookupResults( databaseName, ip, @@ -215,36 +215,37 @@ public void testGeolocationStandard() { public void testGeolocationInvariants() { assumeFalse("https://github.com/elastic/elasticsearch/issues/114266", Constants.WINDOWS); Path configDir = tmpDir; - copyDatabase("ipinfo/ip_geolocation_sample.mmdb", configDir.resolve("ip_geolocation_sample.mmdb")); + copyDatabase("ipinfo/ip_geolocation_standard_sample.mmdb", configDir.resolve("ip_geolocation_standard_sample.mmdb")); { final Set expectedColumns = Set.of( - "network", "city", + "geoname_id", "region", + "region_code", "country", "postal_code", "timezone", - "latitude", - "longitude" + "lat", + "lng" ); - Path databasePath = configDir.resolve("ip_geolocation_sample.mmdb"); + Path databasePath = configDir.resolve("ip_geolocation_standard_sample.mmdb"); assertDatabaseInvariants(databasePath, (ip, row) -> { assertThat(row.keySet(), equalTo(expectedColumns)); { - String latitude = (String) row.get("latitude"); + String latitude = (String) row.get("lat"); assertThat(latitude, equalTo(latitude.trim())); Double parsed = parseLocationDouble(latitude); assertThat(parsed, notNullValue()); - assertThat(latitude, equalTo(Double.toString(parsed))); // reverse it + assertThat(Double.parseDouble(latitude), equalTo(Double.parseDouble(Double.toString(parsed)))); // reverse it } { - String longitude = (String) row.get("longitude"); + String longitude = (String) row.get("lng"); assertThat(longitude, equalTo(longitude.trim())); Double parsed = parseLocationDouble(longitude); assertThat(parsed, notNullValue()); - assertThat(longitude, equalTo(Double.toString(parsed))); // reverse it + assertThat(Double.parseDouble(longitude), equalTo(Double.parseDouble(Double.toString(parsed)))); // reverse it } }); } @@ -253,7 +254,7 @@ public void testGeolocationInvariants() { public void testPrivacyDetectionStandard() { assumeFalse("https://github.com/elastic/elasticsearch/issues/114266", Constants.WINDOWS); String databaseName = "privacy_detection_sample.mmdb"; - String ip = "1.53.59.33"; + String ip = "2.57.109.154"; assertExpectedLookupResults( databaseName, ip, @@ -272,16 +273,16 @@ public void testPrivacyDetectionStandard() { public void testPrivacyDetectionStandardNonEmptyService() { assumeFalse("https://github.com/elastic/elasticsearch/issues/114266", Constants.WINDOWS); String databaseName = "privacy_detection_sample.mmdb"; - String ip = "216.131.74.65"; + String ip = "59.29.201.246"; assertExpectedLookupResults( databaseName, ip, new IpinfoIpDataLookups.PrivacyDetection(Database.PrivacyDetection.properties()), Map.ofEntries( entry("ip", ip), - entry("hosting", true), + entry("hosting", false), entry("proxy", false), - entry("service", "FastVPN"), + entry("service", "VPNGate"), entry("relay", false), entry("tor", false), entry("vpn", true) @@ -391,13 +392,13 @@ public void testDatabaseTypeParsing() throws IOException { // pedantic about where precisely it should be. copyDatabase("ipinfo/ip_asn_sample.mmdb", tmpDir.resolve("ip_asn_sample.mmdb")); - copyDatabase("ipinfo/ip_geolocation_sample.mmdb", tmpDir.resolve("ip_geolocation_sample.mmdb")); + copyDatabase("ipinfo/ip_geolocation_standard_sample.mmdb", tmpDir.resolve("ip_geolocation_standard_sample.mmdb")); copyDatabase("ipinfo/asn_sample.mmdb", tmpDir.resolve("asn_sample.mmdb")); copyDatabase("ipinfo/ip_country_sample.mmdb", tmpDir.resolve("ip_country_sample.mmdb")); copyDatabase("ipinfo/privacy_detection_sample.mmdb", tmpDir.resolve("privacy_detection_sample.mmdb")); assertThat(parseDatabaseFromType("ip_asn_sample.mmdb"), is(Database.AsnV2)); - assertThat(parseDatabaseFromType("ip_geolocation_sample.mmdb"), is(Database.CityV2)); + assertThat(parseDatabaseFromType("ip_geolocation_standard_sample.mmdb"), is(Database.CityV2)); assertThat(parseDatabaseFromType("asn_sample.mmdb"), is(Database.AsnV2)); assertThat(parseDatabaseFromType("ip_country_sample.mmdb"), is(Database.CountryV2)); assertThat(parseDatabaseFromType("privacy_detection_sample.mmdb"), is(Database.PrivacyDetection)); diff --git a/modules/ingest-geoip/src/test/resources/ipinfo/asn_sample.mmdb b/modules/ingest-geoip/src/test/resources/ipinfo/asn_sample.mmdb index 916a8252a5df1d5d2ea15dfb14061e55360d6cd0..289318a124d75d770c4e26d429e5fa592589ed06 100644 GIT binary patch literal 25728 zcmbuF1$Y}r_w^OqahMx$LmPIQSQT1Olv(CjcG4tuQ;}^Yw&Y53sLagF%*>1_Gcz+Y z<9BA}NSYL0`}u!;I{Dpu=I+c6?(E7YlWCC2G^EsIGUbu^Fr)Ab=np0f$sy!Wau_+B z96^pGN0Fn+G2~cs966qxKyE=!Bqx!R$th$Jxg|N3oJLM3XOJ^VgPcXqCg+fI$$8{_ zasgRPE+n@i7m<9eXSH9Y~pmwCHs zdJ=j{^V5v!12g@eFE8ldDri-}is9&$?H$cBp^KW9z&Ezd6)3AxOZ$s?6NVg+Bg>;8@ z-JS67()_#O-^1%N<945xzX|#GGyefie~{WkeGU8sL{VMqy`MRPk{|#zyl5gRCl%{SQt5llosM16iVsK`;cRoo6x)@6h}L>VpwiNPCEu zKa|=qayU5x@gr%EB1bFAdd4VwIOfWram*i2P9V2Xl<}D3gC=SIWcX7US43_}PSxV3 zQJaqV8Ja#5zM=WEOr{YF89RrZOU~2c=2Kfh7L%E4x1zs@T#R~3ls#g9YFk5l1j&Nb zi)2OG8Oes^V~icPgSL~*jFC(EvR~ZH^B~@<>D$2HR`a)mzrE(~!0UEY9D#W}s0^uv zdO7S0+LdILqO7+X`8Cx2nqCXNj(WYOH$ZQs-lXZx)RriYM4f{I)LON;Hfl@BWr*8} z_HuHCqP)MA${u+Z^V-Q>aNR1}yDG}~Aav|g(hidmGOERKZ4Bx}e3z!j7`vKWLv|ys zhjyGyD2lp9Vx11!4e1!9B+`M5Ns(!?k6eqGb;_3ax}N$5awEAr;`X4ur=pDCi`w4g zKIFdSez=bFRK_1*GL7OGN&g_^AI#W86s3MB^usj&aK;=#9*O*;XdkUO>NfhvBJUie zleON#fUti$$qm#chGifdM4k+JU8Mz zw7pu~Hq^Eyw?o|aw0F?rc7*QJ{4!;ayB~T5<15K3vYM<>l=tgLd@c1lvYu?v;u;ax zr1{N^SpvO9+2b)rvdmVpja;fI^OhlgC+f?|734}SPV{fP=I_FoRnT{(9VA0!n2acj z`o<>_-$A_-pT=H?-(@o8i#Z*mzM5P^c9T71oJ^3tLFB>YA>^To@_r7Z zb~t$ic_euh$~&6&G32p|<6owKJb8kmEcZleCy^(Ur=Yx3kuFC%joRtt8RVJdS>)N| zIf}CUb8-E7ntwiHE+8*N{zY2;#qclD{7d0qru+$+d3gn|y^_3&yjoG#dkwC;mil#? zem(RXsNYE5q~+g?{982tR>s^$-j4h`ls(}gE$%MpcWeGVjK7z>kKBZ~`)NNwK1i~> zyoc#OLO!Y}+O@@Y)E-AUPtbmne2RP;d7sjL2KKW^Z_+~hv|m(|{q_1A87iA@IPYS$D00$vL{YM{%6=1 z27#Yz`Cq{QlGl9&`)8!Dwft|G_bvGy^1rA3gBJHAwVzC;i3!@jkiU|@DayA0PVEo! zPn7#tErqWMGN4`b|bas)Y&9Hl7An^c7OG1xzs zf@7IKjvP-;Ah#eVBDM!<64G*{$<(L7u0}$c@?N)u?V>#uHpW8g(_wFgG=u(3MVV(n zpGAGPrq6*sm-;+SpHFQ8SxhcO+v2nrk&DR^MOmMj+SVxFLfcB(NV}qpcOcHG`RGHT zPr}^JcQf8YddY3bZ53rXSdaPJQ{O?;cckVc%gA!F0@qd2u2P)L`&()z+a{LyDB?B z2tB0fVd@bwiu?}Ron#joBUh7a6y<$(D|-s&M?U69{+mcxxA|8i^&*{#v>VcXNJ(Cw zBGY6axt3f`}yHnc(QxV!G z^Vr|gzLwf`DF1q-`;cy+b|ZNcc{6#7qP+fA#NS5!c1^#7+MVQGh`XEiJ>B+!Jm~b<9~tw ztLFa(|98#*gJt{){V!z~n8-n7p5n9-^bsJ+7O7w`QlaJ#p*|G)Fijs0KXd&^#*EVP zM=N_8)_uWPO&8Ge!3TNTO)axXCbZ7Y`M4sJ9P)? zM1E#0T-4l(({ELj_k;2!w?X{2n!X+N?a3XGzoVA#gI}in85mPpMg>_(R*}_YjiQY8 z+JHNOddv+@g;kS%0@Y*m!yx0y^cmQl}eC;H3D6)5v=q?Pn{CflK% ziL?vSZb+-B@2V)v4^j&uKdk8y_)+F{key@~86#IK%JSAw>n3|pPF%}R!0%Q5j3+cb z2|dMY(wg1}eJ%BMu#Z7nk8}Xi1}$zQV|ItW2kkwz{Jp5{P3}YPi}?L$@2@x$b5geD zK=L5+U`3gC2(CL+^ABUp;n0uJ^dsRP#k`{xXX4&v8OLgI$06=`>L-vVYWXK2|77Z? zkf&<-ry>7z>SyHina@!>i#!`~=j8GW&ZT}Hc|Lgoc_HF2qJ6QVy!T6>UrPNl@^bPD z@=Ee5@@n!L@>=pb@_O$XChN6phS0d4u|!CevWdm4dhEzfHbFzDvGGzE6ID>uk90LvS?s5%rJBPZVW) zKSlm$)ITS`z;$`hztrNsg8wzrFO2yH_P1L8cksX0{2v(iBl#2Zf6nF48VUc`oL}%8 z{NEY-2l*%Ym*QX(IY?35|EvS4<&y;@f<^w|LUIT>lpKb<;j~96%6Qz@;8Dt-#qp8) z80ce>X7QSFu*cJ$KyE=!RFv0ELfmBPQ#8E@`j(nM75+5FO($oNGf6{HlskJXYO`_O z9NKep`Ge;{pHCfYM_eQCr!L5oHE+wWQC$EuhL|ivx0gxVnawZNNv>ouxqvW zI{5XP-vGZ+^PAu|YyJ}WEy^Dp(DYVi&v~7BOA)tB%ijt9a?Q`&<4VTwjQn=myO68M zUCE&0+%SD`@E%AJq+X<`(q%t%!0%*Sm!`+4ttQuy-H7j@9VZitqP)2;Qriu2$(%ko zMLkXSk!umRPK#R)e*^P2lDp^f=V5#X?@4_xa&K}Ua$iN+zWu1}kLwPgeIR)dd9b34 zKSbH{zF^*AW=tPs{zF>+!?^Ad&3}|Jk3oN&_7mijTHI5}e_HdO zf&VPyo+F>n$Kk>-z486-&U0U_739TrT!lI zKKX$b_aWjw%K60+_@5yCg!C!nKg;D0{+wE7eSAs(E5v_I`y29G@;mZ-@(1!qMRC8y z=+nVJQ~!lTAIY}-M*dFzp(ykI#C3luztBVu%H^dV@8sr$k8O`cHvms1u0;yucix>wm-oz8rWUEy}Lx7Xr!fWD*V`{0)`uAHnOE6FNF zSx+^!8k1>}K3=sbrw(an=GT)A&>Cqsk&I)QP zabHK!Zbw-?NV{lptC+Vd^dRjJ873o&vfL=*I;eM&U0QyO+G@nD(e!R*FZzx7aWa9p zUfR1U%6gK}Q`FOBAGwxXN3K_t*KMG-k=z~S?4j(%Y+}NZ7b5;5+82|TD9Zc4 z6#18F{^g9h0{WG-uOhD|uOY7`uTzxeT~F->Tz{joOV}Qne>3!3kRC<4m9e*xw=2rH zJCJ{;=HCVXZq2`k*W63qhx|>n?C>2bzCK|V=7 zg}m31p3YrY_ze7KHUByI&oll7@&xix7aMV+E8HquTy zNT;IZCPfUH_TPu)!Bo7)7qEL=cDt=Qoal?ElN-v~0#%VjPcRk_gk$LqiH=}A7)~V9 z(Udun?5v0fd!kFqkzph{3|}UzIv!20OC;AUv)Roai`nfqTS}Dav6^j8XGMBLZ?v{E zu{N5F_eA6A4~;>n*j8$@I~~U8va%MVXjx~hGl*(a(VpI9G!;yl!-*bqC}}v_$5#8R zeMW=NA26!?Wi39VwX(jlys4qB(O>Q>Z>kI!0kh9+6xaK!+bS0tbt~;|RBAEXEEe2H z){%E&tSOJBdQpEh8t$^#t*E}T+9;~;iKWsdXojd3bq;Ot2U-k&yd#n938rI-xDkjZ z*T%xplo5<5SawYccUbB%TRrTBj4eh1rOgVf(rgxwRdjb5y1N!Vo$5<= zm_wb0V@YE)T@nx;RHHr}G2;f^F0<8!He1D9$~vebdxMxkR=d;XwxCRZqfyitj3ql_ zp=c8KX^zDW$8ujRStSGm6^QAXGMd-Q3XHmTlhtB3TV18-hEmj!^ZwokF3eKYP%df+ zcg5mC(QZu5FlyMjTnI*+RE$7ZG`=w!H_!@ETc#2lI>sueqNqf9oFB5IWjk!8rJ`^A z6-H4*D7Y?|>h3dlq8Smx(N-3W1mi|?G~OvPVD!Yg`?95DQrK)RHo-`rm4CnB7=IR+}uE4(>lYq*cCv$8Hhp zx26sYsH-zv5{sw1RoU$YHPz)tb$22Z?Ed>~sgL!<($R>h#3~9z4WbU(s6ZR!N?*Wb zL(hmgSc5z7Hly)apMxqA6`5h(Y?+p0d%=jKY`e|u#1N|n&_PG33B)bHX>(a!YPB?` zu{iqT332~&-)R%81%lC>#Ksj#B*oMW%97n?J0^}(P8{qn($S|Ty2j?Tims_`Hj3)v z>2$AH>E_kFhGS)8Ypc;*{pSLR~JvL4T=p#Jwr^5O-p@^ zm977LcGS04_{5;PTxLA1tPZguW*nzJsG$CyTJ<7Ag1G*81KBXL3u{XoHQq} zg3udo?5mg?>N%A2^hG2VzsKbg+iY1&8|HXAm<}e*$v({S>At>nBA)0;^rej6zEF28 zY{cSto}~Na1F0s~*=3|NYj=au8^oUDZ!M8TgGB9ETUf+sP{z^g#v{;a#UP>Tib}L6 zoQS7`A-MnyM}4{SjOw0HO|~^+BgXd50)^+obj@xF;-PAHixpMY=EI&KmQ-gl(bsG4 z3!*{81AU=XED{T1DpiVQ+MC4lUOts%R7u;KBn}=?F&rM;shiIy&5?Cwo=*->smEiC zp1sZ}YD)F>Mw5Kp%BM)JkcwoaS+JH|*vUar4Lz&=;1l!D>6FjCMzNl3>0tL7(f;^G!?8?ynJ0+9IlI}5 z7TC>BY~+9_I_qrK0&GK~C&X^%$MZTF75lp3XpLdZie?JSJ??CH+*z?r;y@tEq{HQ( zIl(w^f)Qscl-aUdIu*gVsNv=o+@8OrFW4;($Js~fhU~dGXKR)((r;i&}KQ z1OIvdTTnQN(Go{NM_XlgEEO~YLd9`d95{`p4X9H! zveXs!+8oYsu2TB{UF$49>};M=x7f%p6QQU${L4e4Y-{_FTCDPvQI(8t(9h&J z3xsmh2X``6w%!^FTf7d7CEMRpaQ#PLvj^mW7Z*(AZ4?*GF&xDORyNP>4cYZFmETX1 za%cP>MRtT7-f;Fd?Ox&kS7+(t^shyVrrW(%o89AeYh_9y^uL?_-}jj*)E)g>p%DIm z3-vfR?@+4{2GXP0sjxGMC;z+%R;4(vgo5!k@;uiVfL8`jo>;_#f2s{L!HaF*m2E;+ z7*G?2EK(H}5Ak3|3=xZrWxB(*?D)wtg)oo-6o*Ea*t|pos&ODy+hO}4aT>+8qQA{; zeyE!yTjQ|WqhgtfN~LWdzyPCCkH;#WBdyCsw?(57c_K@%!(pJXF}ltWr}aLZ2E>cD zxC^;);(pB7E#-TlY`9hQV@T^v3jK6uc0+Z$K~3e@TycBFP!lf-xv*sFd?3 zcbJ}xzIJ=f(L@4%;rz zPdK};XN7*Y3_0N9VDAvyBF?9e;V>47r_6YM$qy6lgK!+ew+->XQG&^jBQaj^)e*m< zF~Id6^hO;Hy%Q+((+TRaGGZ51aq$RD?&1*}b7gjPgf?qZR0soUk~rPFaX4YWhJvw3 zAGeVyWzpDb@j_T8M6rOxqQV!%XgnNMU(9kZD*Eyky9;{QZj~p8tliJ<;*Uo9)VC*o zTC#i4lXAEl!uZCBw?0Iu*~cMojIK``)mZd=TlI_g4+I+Jl`S|J+t5GiUcy$VTkUjV zoSJ9^8@pVrefzdf)qRR=j zq`b!8C_W{Wl=;f*%J2$=78GF`N4>$IMccBJ-_MxH9SgO(tayo*Xa3r?v0y65=X2fC zSTGPwFIFGrFhfi6{4I4`Wy`X5KP}7F;So;+@m^7!e?Iy?|61Air8valtE7CqXYGL$ zSv=;;|0>Sm42r&&#R>cWTHI7sM9^l-&Nf-55c-*InP-9BgGUM9ryAp_l1}+e(!4Q& z?+lLS=B`*W+T7K>!KjF)Vx4jH$Yj*%^18x!1Ul`i6*>36)-6ACSBP&!s#6-03A`$$ zu*-@Fh+DcG8l`m9wYRX7O54Ymn#Yod54{Ykcmi6)mQZ-6B3x`|!hp z{EnHKutEHOvb0srV8fT}#Bob}u$I4VIaL*Jnj)zT0HJ0OsH`m zqcNF6u@*Kj6vF?x&`Ih}T^$}VAo3=KJ&6SgFG%zKV%ESQ1HM73DTz z`I}%~MWESe32b&3n-1P|BECY254!3#%x!Ua@lh!E%9|DbYZ>e3S$P{yn_KLexQ%A~ zDvGru&K9CS#9g#hG&SH^V2BTE{7sU5k9s36_C4)>`aZK=;TJc|099pcZxU6>-`T=J z?T3lx793WUzUksk=O_5Xgj{+KyfWBxXI9Z-DfBb{a@CjGGc69_19uAFM&yql*+#dI z!EcAnz&E1osY?D-Qesp#H)kh=y|lxjZ3(z9Aq@P!9Hrvh8%Cl@ zeA>rn;@m*APpBzr@RzqV8NNWkA84&?Ecg42%0P1^Snprq!=kQiF#L^GO)U*ZeMPHS zBVwb%O}O!uCG$#`6$Uy#T-H+Yo+0k4p}RwyrC75g>N{|TQa`w=ACFZZHS~3-@e3Hv z{6-}nKHahGy||+Ih^8+YDGX%E;1OWA$uGA4R;(D_NDLoTV?njc$zQWDn+(4cv1Rp& zAI(KK1o3^)7Q$F$s?G`nsakx+b=l;vBGqNsUK72db?FVL*4(!SKO{6!5VJH}A0FVF z*C&O6)F+#2#qLyAC4NT73v;}aA9w4Tq?6l@MMJ_Nyjx_RL|I`V(+)M2x*g&N`!a0C z;&ZzAW?;tmKS!+?SU5)6QZ~uvwYqIqts)8osR&(Y^EkziZ)E}0)DTKWBlx})v8kb} zFU$CbuPj@g&8~fN3|eenDGa1K*)Fla%dejeqFo*G+p7BVnJ?O=1iUce&mZC&)Q>=T zS5BClYSiZ_QKQ{r%e|gvg@Ftr_E!9YVX@)c(1LC7pz99eaE-72P4&J8qqiGBcx8V^ z6Y~=vy+Tn8C7&i)VW7*@;=o^CpuJ*(Hm&RJ5g+i@iLcCJK{%S1H8&XIE1r-zODCaL zr`LvW7~;T@wfot`v7)xji=n%Jw*yydlU8Zm83j8x{&Nj7V!(~B*j zRBI@OfizUSo_k6i;+)f1kI!wbEzyoqg6Asx;_!$nTt-hoYz!64mI4&<9GkSFt z`l&Aal(IT44)LQ|c?~8{B81-UG8|P+Wv%Kb2{C6}UJri6(z=pDKV6w=k=0ge7hNg# zQ$N03u8W4uG5jRa5U8wNR#_(JXSe7*F;iUL4jcZFGllj5XNtq^aEjj{d|3DXSZ{Ys z{PhavWUaq>C(M$n73EFrGLILXoO@ISEffaW9*4~?esrJfS4TShE*MHC*5EHC91WNf z(e;~s>lB5Df)1zlM4-^m0Lk9N{({d$nSIkANu}fBX&_cwStWikY^pc3S7*_G__iCu zc~$lw?E&^5Ue(-U;)xkwkH1Pt$$^rUEw2~uvS{eP=6uv^b%ymVn?gT(Xl8~uU3RB< z=@LI7HLZ=|iz0qNk4M94xs5K-mCTRmMQCe>*XDL=(^C2UjG~fyn3ZC!p`x|Xxcn$9TGl=|Abg`D)+r86V#dfn;mZEepZaYT`-;`-4O+1d zvnPs-Fwn+g=Mr5kjudslltZ}c$IjMKUNCbb6Db9viAmRjr3gw7q z?0y@4`4e9?1lKpj;t|oApR5ew?IpQkmHLauRXxGgiR3EzK_U@9H}CJcv3M@`oVrk7 ztUIy_e+LxqD#+^`yKILB@!NL@%WIYR%g&{-Uh&ru2L7IORVvui+Z{Fc^h82+k!UKM zjN$l~h`&=7jsHhjZLBwYABBEAnL7J|ozc{w0$frXPeh`t9*Ki7#=<$f{Iq JWAt3({{cz751{}6 literal 25210 zcmbW71$Y}rw1s5~wGFi4w9Ph66UT{FF}P@s46+m1!Mdp|t!>4!5JLAF>d1s2Q5jVu;RAz1`-OZdg~OUM!ANOBZ8 znjAxJKyFB)Zdw0WavV9HoIq|&ZbD8ZCy|rMDdbdg8abVuL2gQJMs7~dBn@&4au$he zm*+d1+=`q-&LvC9d1M(mpIktelU8zT(ni`z2k9hTq?`1RUeZVU$qI5Ca$9mca(i+I za!0a~tRkz)8nTwGBkRe9WPofS8_6cJnGBLGWGlIdTug?@HnN>uLM|njk;};yYYw1Ke2PJ~CjeQ*T7#(KRd4np`F0|^{{tCIe`A|u=mi8-;?@YAzV{vpb?97-NW9!?%X9!VZW9*ugAQMPRFSn9`-$20ad@=m075_vLX z8zFWI%Bh-v8vN5W{|p{;CiJs3{cMY6RCYd{i+L~^@$*ogLpdMiZj=j9uH*3+k{3a{ znD!;)rHb-8E<^ls&A);{{-CBmMEzmtkI;Tpi$4baaq3TyPm)h* zc~2wn8S2mG_0jXFJx{)Xycbb^M!|VoK1O+&u~+iPSzd+yn&!U_{|(K56aHJ8|2F)0 zH2+=r?`i(~%C>v}{X>+Gls_8lOcdE?pJ@4?Qu~bj9LIb?`%CgGMR}~4*WYOVx6Jtt z`uDVd(BeNr|0(Z}Mju#y!Mq*8++X4UM*Da25Asj)FGbOQ0n#O56clQD5&UAtO7i-c za_A$ekIL%>qoI${{0*41At~A#OM9G_H=f!A#5bnB2|1CRq$tjFOh59bpe#h0n%4`a z!Jm$@fXB>$y{Q)8jQZx#XVNyb_!iJ-QQuP2XH(k>@j0~TYVlI&oT~+8u;;7T7_1jj z#5v3U75!$_{H@{JP_{#{GsmIDozPvH?`Dn%x>wVE@co)!!MtstZ>#JLvVF3>mbU}* zc0^t!?JBZb%d0`WR`ctaQ?Izeh4cf+YtZr<;WweQp)@lmNVaHst<)Bgi^-71vSGIG z+NmuemnzDBT}Ev=+F7CLD{*`eV;zXSg0eHpF(|80dKudVc9?djc6ok85{(8;d zjmPXx?t%E8%HC+QmbW+beVDVarte2>fARqGK=L5+U>tu4?L*1K6y^0CPVETtNb)H1 zXp3c|>^L2Zy6#0e4&^44<54a^If1z+k|&WTlcy-k_D)6qX_|jJbIyQ%rly}o{cQ3a z@?7MdNBew5QSU}~QOnNvi;#D5K3;GM{7W_eGUi`SUO{4B6kMg{T}}NO@>=pb@_OrmpOK#F#a|94f!qk9r-%&$SbR@3XK*F#@OJD|lIs5K(qr0fZ& zQwwT&E!0}cMdV^KgyY+2w=2qiSOR@1^<|pA9Qq2)UkQIF=5>%eYw=Zx@1pr(_}Tdr zVSbb}$u7kSTt|gH)MMmo)UlR!FByl{M?0Y?uOmsVAMssjr?fcco6O6Q1ISyGk8ivl z{<^$hILJJq??!ufExre}JrUna)Ay#n5A=O$@0X8n{0Z~}s2@lkL>`>aD?9{whoYRo z*kP~_r+ox@BzY8hG3#WD8^&(Qoc znRAxKvdKf5eh%`^W$ZlieDVUs?n1ec{zb5_L%CS_^4u-_+u7QG1(whkO_L?_Dft=lmm>djup0aVoCSW#_*dlDit^ZR5dW6?cgUYa`+Mj=z_)-uYWY7g{xkGn zH2qijzcKbZ`G*$&6Y;2Ihg>k)O0r9O@vPfj2=R+RahAb+CrCm+W6WO9m@H?vV`8&d|)ch)CPrZvV6UO>|u zs5g>Lh&OBTAp91_TFFJ^VlqUwk?rIX#VM!JUxvEwLs^b;4z(4qSE9t}?*zMp_Ri!g zau-F}FJa_$YJP+{QPM=bi*`5JqbQG$A-KSqX@iny9lIs*@{vhJ(sqd!gyIU+%FQmRFat@`v7wo;YynW#BtNHseZ-3|qX!?QF z4}yNMrXQm0X=2`By%wE_a<~>h0{)Sjf0VL|jwX*m{8%l19QEU&pODw5;T(%j%KJqp zGw&4Wr)qk3j-1Z;8Hk@r`z-Qo#c40nKUY!q=Xuo5$8i^+T#u6N$BVSQiy$rzE9Tsw#c!l`6Y_4ReG7Rjc^i4VqOA80YIl-% z;rP3?_&xCN&HK}Ftwr}U_kp}#^dPl|5Pz8VBjlsxW8~wCvi&EhJxM-AK8<>w(ej?9 z{v7!{;xA}%jMa>6zr4(xSIAe%*T~n&H^?{f9yf{hTd4PKly6bqq4qBM9{E1`0r?^M z5&1Fs3Hd4cnWDVzTZcD~f~ui$^p*f)872IgJScR0^t@cX=8^aK1KHUB3b`!n=k zH2qijzcKbZ`3L!@miL#k(Tt_IfOcU%UR-3cY&t{pOPDi)97&EMN28uGv^P)`*I&FL z3MlQdnm!Ktc+H;xe`C$xgvU(8d2kIEPex7?$`q6el&L85m^TgfbnSSad-0}>Z-)5h zv}bC02J|hc&my-ZXOo<7#dGM-Rg~9MYO!plw_Aq#=4a){=E(J-Lt!kPT#` zqNs0k%%kFFlrUpKvISaeKCgHY{Kbrg$TqT_T%st?b1CwdQD083AXkz*ksajD%BcUIa_h({kijT?r#mDlPI~n>ZTHdMD zPa{uPocSXCGm(FmvgP@l4gVa*&einusGX1a1+*_DFCs5il=WPKyi2KHMqW-{L0(B- zrD)8je+_vpc^!E@>U;*}2Ib3kZiIgm<2S>8fc7o0Z>4=3c{_QBqCEaiYIh<3Zrb;d z_iB0fA%4H|4fdft{z2wGL_UnXN3`P}h5s01kCRW3Pm)h5%KDzRShm=d`m^M7sN)@! z=e0bn1DT&4$Cv28OumBrSGD}tsJ{;V4cc##Z)tgND|-u!P4T;ozem0g?JJZIwEPd@ zf5iC5YtNekY6g!nnC|-@*72Yec$5v?==5==KMhZi1<&mf7bGTf&MG? z-xOzYofZE<{ZH~Q#S#lyfPJRdVks#^!LcPpD43Ha#VBnkNGzFyGJ^Rd$x+az&>l^W zAvYj5M9xOE$CBg7@rvTQOD0g;81^QbK9Tw)aEvU~TwM93Sm6*z=Vw+gkv?oG~l8wHCJ_Zl~_h zbSE_z=_Wm-S8+?swGuzI3fT21+tA;Z+>YFy+(A*E-;UHOaa^NVGx?e=Oj`8bZKY(%r{TmhKxZXtVX7U!~-Aem5@^(d;e+S}sQooD5 zTZ`XA?Ox>FNBe%oxgTnI4>I-;@*dXoN2osv{W02)Yw;(bKS}*5@@euJE$>-s&msSL zO@BezrC4hvFQI(Gyq96WLi<(nHS%>udHru7?@j7&k#CdlknfW3k?$+Y<3GUhdOv&w z|6>&|{f7CUlAmeEeU7{@H2+KHdan@hbE`aDf9gFm0K1!OsCCATJRq+L;-rvt}3HQ%M|GOX_r9_D*V zAL&<=$5&9>hTN9i4##h=e<3BX6Y^--+=Ka%aR>(cXm& zlbwq4JR-=8Qa8yiE#8fI5A~R)uU7W_PY{nY-Us_0lmz`G?0snW!`@ZdvfUK?wB~2v z4={HPxmJs>qc(`V^|W^*cPIByl=bgPZ7<}buY@Mb0`$pQ`&}H5N{D(FF5%`a4{$o7maqAAK?6dF5L% z7p3+h`4agu`3m_e`I@3U=5=askZ+=1%q~By$P`*6oL--$Q z{>RJ}>-|$r|BU+Q(7&Mlr567R`qz1X>v8bEW!`t>_xbpU9}xdh^M8W>GxL5Sf7Rl@ zA^yAO|Do)y_l5o!bqiUL&l_0?yNG@9z#ggjqhODwKSt9xfW4vew|+qp&1wIA zSRGC$>MKJ|hu7z=k0b{Ynbcrad#Em&>TmJwL$@>~5lEs-y+ z+9jaGqpYR=A4*`U$7^*stgD1dtg${?;>r@qD z^wx=vGs>1$C)XL($-cgUL@W}{#FB}$93vyJ%;Uq*s8gi*&l+7GpWDG2V|@`-V{4$f zy1cpvwRI;K7)?#pxLB{t>acsQeq3$Nl!LIab0Cd=O{djJxLgiDDy*un!bl_r!ik8f zD($cY+be_BwMKPIb8~wzP+i#;XbFalra*I`t+oau?RQu)!gj9{ZDehyQZf$7_1Z!K2P`}k!ZzQcrD@N5X<|c~GF6zuXs!}nk z=vBAV=k};R2qadQ$JVIWal0|KPE6<2 zfKj%xr9Xo{j2qSA&bVm=)sjhz35RoA>UUdR9;@FgYR#H|KQEuhAtrUG9@n}i8I73< zc4hGr>0y{NnAw<3p=5kOjeKjyR5QB6BCgtQvsoST>hm5Jl-{SfHjhs%%;x3B)K*NQ zHD)Ruj*F(OeS=s+o3v*&OtVD7jHs6MUBM`Ao@a!j39C`nBQ9>G-R87<{1{BpX`1to zmYpu24J}uq+gfAdV!BhYsBAfm4(zbF>kVuNOIsE<8ZE80i#cUl+m;(mZJ08$8N1DA z#dJ~4NQc{loHSzJ@!G|dX=pX3)+aI<_F+M=t?i%7raGDGPp0G=!noV97@cbM(8SsQ zW8B?7zaMok!M;~>k*Sn{%Ob9R0N(X~#eP0r9- zj30LMSQwM0Kb#m;ecak6B+u@m%SU+-1-DXa|Ui6%W*1H6lr=Nt@$z6xA6 zo7;h*6q|KJEYXz;$Hi3`WlaI)a3e%*=n}CliET=A^0lE~d^Rr!w7D-9NhQR6idbK#ob3#OB;1+XDRL|F27u=ZHsYBNe}d!sieHCsT~V9yS!us;$|k6 zj`bM}W9$3OOx#Sxj7n=ru1+szq0?=}rHICqgSPX7fNSu(-C_t@8;z-r{h5fmr}p<^ zVivZBd&REP6zhv+%&5_fbqintCymy&>L&EL1MA<87Tn^7p*eqR0sD?v{Q>Od%@H#m z7duWQX^mk!?-*IF1fwpt4s|z)TQ??B{w8NMSzC?33Wp6hO(!OkY)9Fi?AVIwge!5| zY~0^l#3mDu?K)s4I3A0{9!g~aE+K;cS?<7=;==`q?#bJK8)28viQW;<13~#LBW}KY zjOi$l&oPF$$+a{axREBpc8}kv3P*Z7F?&V#IB;jT^Q>v-2b-tu!i`Baf!-0@yx14y zgHp#>S*cN*UEfAH5j7T@X)~OP^knZeMm4U|Oo{6eI~i_);&POkYiz5p)J@Rg@`)*1 z6%>P!NUlky&D0v+x`RRZA>kWU)+Okmj)@MN&0gX6L@O%LK{TCRU0oF@|N0N_;bJsg z9&xJ-V9%&CV<~IQ#O6~QOQHL6Gb-CcXQwmUf-r|_A-jFJ>^_$m@wdgbn3*oz(vzuf z`9QcbDE(|7rHz*Uh|#(RlUY0lc1)936YX+Txa}@mw(Xq%e_S00&4JBabYly)5lk#I zkxW?=*ha)&C>$e*Gck?I5E`2y8$)+^E3p0KT9bnJKl)L2FglaX`E5?onRVC?7A4F9 zYzGN*9UcL(j*|V@yn>pRZ?V(q`Fo2(7^+3_2!`Rqtq@!O?2|C@aee7bN^ZqgaldZC zH3~r+#>w&$eQrz<&J<}6XCO<9D^)wC#X+O2w0KqS{CyseyTYgSm=rpP*JDnnLu@)V zn0|7Ada-Jwn1Z1ib0CwB^q9Dd#7(I3CMajRv%+rgQs=MCVV!uOBzU*uy#b|D!suOD+jnov{DE)$xk`QBQ78 z$vTBFoJl1fxBNc8OYB(0J$nWrcZ96IeR!g zDqjEGKCy?2^Qm7QOY#&fi^a36c-n5j2Ln6Ck2xh2hLJ@r32G z@dm#T?`b~I-|p?r$<7$w8vpjplQ!y8$$@@|Ry>Z%*GM_z{S`iU#FbnB;>4sdto1+o zaP^Q;I{FmM_|fNc6`_i7r#`1B{MQKoZKL|{IwPH(f9o%w5QfuVrTvf2c$3tA8KN8FE^GC;M^>o#0W>U$xInak!uj(EX5Axz&UVlZDTSK@a z;`Pc0A(xHzP$$5z8Fsw=+Hf-xQ?DVKz=xq&M18d?5O414f1_Ny%wQ@StplC$Sj4Eo zM-#Qtg)7X6J*sslg`v8WAI%(2zg@JB%P%ds8PD9j?x49YW7K0wX5WOxQzRCsQC+(j zZ{rTO9;t}fO;ek66o!~|@`1Bfe9Gb5Rv-i)YK!D9JSPls z;qs9U)6VJl+C6+bY>&peW0`PV?0xC}aKucj?I%>*AB(j28w->1D88S{4|=lOMPpqN zfBx|&Ck)kkcF?ie<2zWg=>g3It_P;6`2?}1oc zqJgL|MA=@>+AkaC*9Qd^H;_9lr=S zs_~ujYGfUq?#|r9Pfi%ljS9o(@VI>rF?{lRqU!6XSP@onJ@`D(h2KW-nJk41S!`_* zmonMm!!mS5(1&;+%-KUtOs)cl$LW$Ei>uIqt$pH4O7_vm+KDEYH#cU#dA8Ly)rwCD z_}E!#G~?G0tU@2wgxz8HA>#7aBl7?E^S5&Iu&W^R) z;m+S{hsAxP=1YAr)>(Y^-V2Rl}*(x!C-B5TlQ;27z@;H+BzG}nzb5Vb~>}S)q<)N?k4G;>=ep>!Al#h;-@7nC((|>=8-RlIeR#Li%WES z?D!Bj>k;ho_?VjLHs#MjJGDsLXidh`z1V~BQxQ~A>BIShP zOe8eybl{sF-kI7kq}&F@tvvf_x;?uS2Be7Hqd&V5ik9$AH2;d46Na@BPHJGYrIVIn zBb*e^U5~?O&p)xtN`)|->%pDGg`Zi)J{ZCcvmxCtf2OpiamVbi)DOhF#9gd^K<X%97d_%vj9tB>5``&nJOD`oA%2HY_&l*BJB_#v$hlPD67rA?z| zz`&buEGY)Mzejw;EjL0q3Z11UlcU1tvgdDY;zXn{tP!0u7X2>Hp>)b-7||(nv`W2r zsp0)nRSIF4y^7yi8|1F8Qe| zEbgwt9?mI<$1lIjC4K=DXRr+OHkw&$rh3ito^W5E8SfmxS7-5YaRu&U3>gb^q3kJ$ z&B%p!IiJ3GDGaCc(G_^AcX{wsHfJfuI+?KJRW)wKye(^MNf>s#(!}EiejLKDn;H2# zoa}jgF?8A7;)WK+W2m%;dD(7!!j#WUm|D&Ksc^TMDvxo;54B1ky92&ONAM=8`dM70 z7jI6H{HN!fFw`R9YW3juE56-|-zXOKBvY~VNxYh>2haq5G-_L@wCt@W`)h@0I$Yt6 zM6f1vONqizONmY6_kEZ6ja)QcA9F_4D^p|4StBj{EW#xl7ES2mNMWdP6z!BwuR{}+ z=+n~a?brvVlepZ+Jd9|#V^+Cc9>3y+m^U- zm{|XQ?b?(-)TN0V2iBik{c2lPiS^gol`^v*68T3qt#y#(^COwGgte5H`PjXPNsj1{8Z)B=@v69^Y*uMpYaB%qcE^{N+#L ztMM%nzyD_Mo1qvssZ5M#g9jlKFWI^2Wv4Kl-oO}m@ROJLlb=vEHUs=lD_>{$M>Gp# zviIe0YuM~jGk$24zv<=dVQxA2%Ht8O$Q6jcC)m|0mVBTUmbT8(fU655n_K5uVK@_4 zJOugFyP23zfmnYWzfaX zRSJKodB%rCxB5FCydX_&!nQAdxWiuytj7~-5YO7;7dt$S@vHb`b^Z>AU)&Ks&Bn|vU=BjkKuRm^D`}(4t zjZrflNyYG&W68ujRc7Kp(i&p@*_O^35s2fxX8r%kckLES6%j#9|pb6aEnT zL&;&}aB>7Wk{m^jCdZIt$#LXn_vWxcAD%{l=baOEkkC>9FFVP@&@23DC4J7I|K1EX`iLV&sO$`n-M=3<#v?w zn0G$xt5GhXe<691qOAX7YL}3gl9%DQ%eA~K;9tquRf;1a$vUnfuO+V|uP1LHZ&Z}W z+=PSCW?@VJ7WlW)zD;o?=Z}ovf%u(T{$23z*8F>T?7h(M)AakPKVY$pRC&{PCr^#o?XUXRjWxJoJ_5$j8k@ibk{AFsdApWYdM-8X; zI{60qrlPF(E#$wg`R_32UGhEheewhHL-Hf?W7PABvSq!W!vCz`k7{Jz7vz`ZSIGOi zkZ1V@{ze|9s^(8)-gI&XITLxaPz?IBVb8HxMsEeqB{x?T^-6u7%9S?OtfKh^U!Hp* zG!N@5g}sP&8M&C;lEk^>aZAXhq?O!SQRdjF*-@`U)1B~LjJXv@_iJ%4;y%XxWI4GF zxh=UJxjnf9xg!}+lj8_mBlg!_d}j@HQ7$CQIxT@IR8$X-@%-np|7L8ixv+3} z>_lFdvd65amVkB`ZD>V3D7&L1l_uNM%XkWUns%QS-<4Vh@ht6}7RS9W8lb*G(>FpN z)cj5GcT@hDTbREGxhJ`oqHNdR)b=6w#qs-T@%`Z+p!o+f?;z+0(>_FtAFAxJ!x29m zqKZQA`LO)H@PgnL>jJcvS zHT^93XKVgB$}T#WJP+~nXQDwGSg{EOgU%=jgmekt_JH2-quTmk(`WslX{eKq6P zkk^veA^&>XH;^}yH<33hihdjWGV*Ul`3dDVl=qo)J9!7RJ5gRlxeMhX>UYDwhxWbX zedPV*1LT8>;(X)0)E>t1j}-KxN8vxF`H#bYg1JwUPmxb+dCws4S?bSe`t#IYP#lLb zS@aS$T(fA495=5(f0g!Yn*KVqHxPf5_FG!~ZEEk3?;`KLLVO(Tq7Mpw(TB|Yi2NAw zPqg@_)IWp%xu$;s|4YWc()6#Pe?$FSP5+MC_lW`QtTx0=0>VPtx?s)TdZ1<4b8zLrxjWbow*2{F&5dk+YFEM~ly;zBy?iK2M8p zL47{-1)9DPzIpydN*~|H+{IenmdM+R`Vw-f7Plh4wdUKHV~6gb?Sx%U+eNxb4~cnK z>_eYmUKINk#l4gMHY!Kj+akW5=5JrfE#A>$DOrhl00ncdxB}&PluDG1C{;XHHSA85 zWhkqt)xfT$T}Lig6zwRkM+s7IAREahvRP5)uRwka^^m5wQbRj2L<-}xjrwXF*G_v4 zxt83C>>zhml;>MVZ5JFL*7OMcC}S})t~i11lgD=4;0y!LFk8}Y@)xLqKxfMZ4Yu!TKovakEDJSiE$#^cr??CnaY;+pGEy_=;zQrSBsl{d_LN7C2}qx zFC;HglyzOg<1ZyIBQMwLy8?Bf9+5APxeET(ntu)I`+>3R$m_`)$Q#L<$eR^qeYc># zTdChh-cH^@-bvo2D37}v$K6x#C+*0b`=H-X`+-8d_(5t9kq?uPApcR?kCBfn%JV!y z?Md<}@@euJi)9k;iOhcv^*xVust9}m>EX6_hrtQH?fZ8PMJr#*q3s5lwptz;6~1C|2dmdbVcFM%oc~XmKZem*%_SdzkB0oN_6Bzm``{Z5!lotLfWO z-=5q7@g22z0Dgt$SHiEdSf=8BiuRf;Sq7cswWLRMv@;Zam8^}hoiEKvx z3T4au7Wg5}Z&mixLFlV^OdEchO2J(ZZd&b56XJ<)00XQG;MiZz05_srJqKAAMITgrJkXd zC3DE@r#(P!P?Y%_EtYBK+}y@2lziQQx0D0PzEr zJq`UX&wa3>v=2f2P?Y0P4%76*nR^6zB;rS*98Di%OB_G#8ED5)JGP+7x{s%R0(l~N z5_vLt3VAB(I!)Qr4U0vTlG9PnC}^_Yv!LCMayH79JoX&&T=G1`&Zm8WqCECOY8R0g zBkvO0muh*JLBE{(6^hd_=4BmMX?a&uyN0}$ypFsc_1r-FMnzf2P1J5CZ$aLzv~Mfq zO-Emq+)?mL?qtqgIL#69_`nW`w_|;^xssJZFq~? z+vGdQdsmCUNBw>1AJG1=5TAj0UGg#YPYQa;r_??pKS$mdTKr4+Uupi=@W0XgZ{dGe z@MoZZN`An4Hy->E9Ij#|KaoE}|AqFih2u(ogZ{hb|G}I;6^B?*R}t-EvV=rX^wE%^ z5HjvNp5Aa z%;fzZvQ*2nGQKrwBkicqLEA~X6lHtdhfi>8O+M;ME0dJKA;dMDXMc58VF zYU{}!9GBGMy~>^~#%CJkQj|U|zAO9;%4sNBltWQ+TD)J`Lk6H@&J5WHWuq1!WPB63 z8{)fb@jc-0$=F`x-ioskllNp_MQQJc{QXf5K{-Iv4`l8^^GG?hhtG5e;DJ3 zlSiPQBb6=VN5SV@lm5}n!&n(|EbZgS0vgP@&qkcW~8#Mh!_%|_jv!>rd?N-EZqkX%@GS{zclSA%AT+EFJ znR7RJ4|y+Q_bFSp<9_N7D9**akorU9!{j44?onmS_+!)`hyH}7KMDUS&3~GC&nV6{ z`{Oz4&yz2ZFXH%@XunLpLcU7AM!v2n+w%rBjM*V?(SBQTbKEPL^RA+_-=p?E@;}h@ z52=4devJ4hv_B<3Q`|g`*yku;X#STLi_qmczE+g>H`Kl*zr%6gYw;hb{|Nmj+CO8Q z7192M{1x%va1Qgn{!aZ55@U3zr4Sc+LyJ{R+9fJBbO>}Lind987&)9Ap(x@*N2xq% zk5=~3G0?}-9!G9Qj#rfV6R1rjC*iosT6_xisnDls`gH0u6b&3RbQZPQ7RxY+diT!Ne)+Dl|Y8G<`?t0nM*~Ux~7exm9GfqS1j^ z4Yk^WCa-BZVyjW=84r>TWFy%`Hj^t9WgRWlLa3*e_DU_j3VNIJjb|8dC)bc`$(F_0+zt7=)82#JQ&HBt7vg(U--q0n+>hL!JV0@t z*+&OqUZJmt9?bY5A@5|`r)YVnLO+fA>Es#YndDjI+2lFox#W4|`Q!!Uh2%x##pEUArQ~Jg<>VFQ zm5TG8q<^)dy#8ybT}xg^UXSa!LCd=l{!N;HGyGeadnj?ckT>SLD~^H{`d9vYp?tj_=7IaQu&2{3rN7^SEEgUkh<@K6%dH$v=?) zr>aZn!z@Z4Rzwz)C5R2t@`h5!0mFtXeb|UXT(ouAC=`sPVWSxzqbOrzp^u}!89APu zfLIyriLfWpo=i?rl=VzSe46G@XU+`hGc|n{{Mn4nA?K2tlLk3YQP#f&wfQ)H0quok zDY-~dm=2c0@eDcm-KWR*}`@GDUe@4YgVvUq^en7O$rkBpb*^&YH6N%oQ{GEMfOj=fNJRlck@!&sKgA>OaW2jFjDY$G{HZqo90L*DMx z_aOIFT=0UHw>M+^Ab(#?-;etK%|vi*l5ei-$`6&IT0=Sb>DY5AB# z!;YqY40$Yh9C0(l~N5_vLt3W+%>+j1ItI(Y_prlQO_3+F$Z`Z?seTKqi3&)575 z;9tnRi!}XW=$B~zrOIB2Ya4bs^RFPUB(EZ`R+R0z2Km=gzmB|~yn(!tyotP7Q67H_ zj=z=qZJK^NwL1{MQ`7IFemC@cXx~fTN8V39Kt4!5L_SPDLOx19rnvAY`cIHg;#?o0 zJVpO$@)`13@;UN(QXjW3QhSMfnS2HHzN&25AFok=oqU6Q6M1iGd2hpihp~4x{XOXK zQ~!YcP_Z;d|6}qK@>B9N@^i2W^?U*D0DejRD{wCSuaz&``wjeW;TM75G4Ffw2l7Xf z{VI?98Tr3Z|CRiW{GI%R{8MqbMX{9keRwf-%*o*;w1<#G$zdc87Uvy40%auaQRHZH z3^|q@M{Y)rCnt~-$w}m7atb+>oJLM3XOJ_=S>$YTj^d(L`kRvmIgb?Avjxh0`U^CD zA?;Fdk*1f?UQBLDZUru(E$UrL+e&Ut+DJR;0G+hOaW2|!&_mlx`bd%IM=96*ZD4P! z`P;$X-eOsFhtfh<(T>$9?ey1>YssBRk=KE;GyQesE@YUDkWrHJdw86FC)q`IlL>M? z*+V8tasFPE6#X>WM~e8av@;ez44CeRq5VDDiJk8(e89|lab4W zV~JFk5zZQQt<6hA<^e`cZ=}{JH7e6qqrRpl70+!*XL{NkR=3CMaJj9vrFqlgcE4v7 z)dWIzkH_IQ#w`!`gwX|sdO^km567T7T!N7f)IfDWkNdqBhXdT4w~LWCU|DR^xV}RW?-(?R>Kw zHowbej5CfZw5BeVi)T_{(KX?uQDI%hYCLF-&7rE1HmWtR&f~CqT*kQi?)YGDZ#bnc zZ*8sej8Hr?5YJ@MUzxsiM%HXZ*5ab<4!_lbtCUx0I;v9KGN<3|b2*H0tzF4*G!f54 zGU;$E5>CZb^($(WY*d)K>NUH|Wi>CCcD~oVULPu~4JWfbs*;W|fn+$_6Gq#5d;4+k zW$Us=XfT_L_hyZnOuE0%yeDhWWQWsj^?Ou{g>ARn*$r}#+3h|j`XL;{KpN=FS~LAd zX)qwXRm~xzsi_GUjuB#a_^b|_xV4&-?@fo>?!#EA@5?5ZMl)&E(ol1#&Zul^Xbv>C z8^OAUy4LC{T(HYyb$L1J3U>a2eQp=-OH*Ga(Hj>71>?>d9YmX&sTj?{N>n3msM}$6 zc+n)SrdYul3sHY*B*tI_q+YpcN76helw8bM!jG#4Wl~uW{ejMr$SLac-TN}7e zx66-#o9yq6bDV^N@qu{Ka2n0wOgxoCO`=I_ajY2eb~Hy;B!_>#x6vqv*J(plAzV{m zKgO(JI5lX+d^Aeew06hEO_dR8AY|UiHjm$G!)TUGG|hbX+g*N#9i0|MWBN0L$wbN= zEX9>d3PSqZh+(T&dhMv$OQQgRNmj(B&swbZC;Pn=ND@eDpV#0j5%}N9*-BD z*f@|*VBRO==tMban^!@=$|M!u5v)aG(AwqYX->W}m)GIP{92wxHzmTVZ7m_`7*+99 zZ#dIq1hx)gy!kvbq&+iVpklHKp|iD6Qoi$n%9x&Ev*y2&VQZfG(p+FM#zh78Os zD^}k~yBKOVj~H>br3E|RBB#^o^J2KA($U05)#b|@h0Xib?#C)*cUrM#;P||)ju$i9 z=JorrKsI)#v$;%qFr195%jzhuh0-FGzbxQJ^FU54z8Jx>KEKrtvU24VTIcaQFs+t% z4TP=X&J0!`qjY7>ssQd|E^fpY)#0vW;zlTdE?X-WU(5-ybl8^CK~06#$La8Syy*VM zY$BP6;=$H4$hMY*lwyQ>22pn^+n>yd2U}TlYx`m&*ouWjR`0MotWL}aSv{S6V_j~W z=!yDVJSAoZtDYR{j;A)EpcAAn#-N&X<{BylOg}L6(Tdjpj^xAQ6ja#Li$tJef2iIHMi+#)&g}#NF0Cbw=^bc8X_hLn0MRiv=K> zwkB}O6?KhOP5E;yF`8R3tnGHM)r~PJtCA*aVcSJbJ{z9?wRW2glP8LYf5*7W?nEkV zG^8Vmq+Iy)rgBco&2`xDEE7{u4yU|l-UQ=$>;N{!s@Y-A+bQ-o_No#K+ ziB2o3N>>?Je8u(+cc3$!=~cTEvCiZ6MpoPL6!h7xa!x4Ij((Fv2)DuQ^5HfF`!eZ` ziC(ctiCtYiMu&8b##ORuxJ=R0SkuI{YF?pfHjfkAhGuL)mUd>ssh-aMOb**pF&>wv zyThrJ(Uwkjc7-u#Ytr#tcYNu}R6^{j61i~BSY{q#gnNyKFxKrzo85)mqsEW4aIV7b zvirPp(T{f}#JYw~RZU-BT~jBPHL*6}u44c?FjM7nZ#sAyt7eN;+Uc|V#YUw!9gd1; zq_qdrCD_musI0eQw8@19_eR!(4)DmEZF>6nvAcavv9`pqy+~o9mL1tKu3EW9S#?8o zV6hR1!p9|JXKGZ78-#65HlF1kb){WgqE&3aO)KBQcDK`phK90(8C+W!vrpXeta>1< z3@Ozd3D|?n9f)W-S}mpqyMUfs8~JXZ&5fFy;v4a{fCs3k(%O%8JXlR7KSc5H6^|MY zQPUH9%EGeeaEmo5=t`&sqSqQhkG8rJjn%DUVbv!!Zjje&6^~)L+s!-XW3&xKgds7@d$f@hC>Ars4hs1d!w8)ka?4Rd)h*02h5;G0`0 z^*qPmz_kDC8&LkSrK%UZ4ZBA!%&1;@<}DPPQor4fm9PmfX7T(m!x$~?7$2x>sWxyU z+nQSHLwHyPjfUzfY~x#60*%#8Mkr{kY^-apZV5Ft27+d9Ih?o}j2aATaXRUl_e}H_ z7BxFMz9oUxE1T%;OUC6gB*)DPv7}KHqcDsO6E4D!hZOeFb~O<4o>^-ZcB*0+WRu+G zx8ije+x191BRVJF9qn?xby_`g#)-8Tws|APJA)fd!`3T;v5c3PELO9+3TYdG3e^zx zNa4bW8pH#}?l%Xp+y~$}fVW+=Cc&Il3^HfV(iIt;Q537lWZ>*Md4`E7@Hh7 zCP8yoG93vgv6)WAqd7T-n`@+mU9^zoP*zRG;h3S~^3i1FiiRA{%y?Bqu_!gaGdfG~XuphnC+KYF1ym0^hJ5Pg>_3u2g zjdq{Uil-cw{3DHV^G`HN=btM&(qY5%8V^1PdIw*c@*X-g|M>EHJmM*ieR<*R z;-lJ5O<0;+TQOs>PPYc?8XKw`TMM6G);jP~Cf z&FE$H|Nc-giod3f=2i%qEJsK$%?DqjW~qv8cXe8R#sm9GRN)GGmY zija(ocw)WS*U5Jmys4SH9o%pSrWziR>Zz7@%uSA13Y>^x9chR~b4#PibU#1A?Nl|0 z52yHGS}EW8MD_V^nS*A<*fhyoFDn*~d6&?1yT^?cCYalRHi{+An%jWMF{L$}*bq)( zzZZ_hMfHu`qL;M>U@pc${qL=MyR2V)%t0@S3&KFlFTohJSTw{ZtlI9LNJ_1y9YbpC zD;l+(q*3_7n}5n-_=p|9_)LedeX45ZnZpN{;rF=kkXqh@+lD8Y=slg(bb)kA6c=>Y;L26j#r;*#DK@q zV)<0x;PPI6EkJMJ&BcSBU!F-vd(d9A8DqT#4l4fJm^23>>&1sD@g+{RQGBK!1jj)yobL`!@OEP&ArOew_Op4os!mXgbM{ zXB{Ien}V(SkmCzFzuOk}Dr+2gpO#M)*>GjMv}x+c8!YC3wx=-ta}&JGHq_= z9d$Y6d*}0mFJcLukw4I9^SJbir2qd_Q`F&(=`}?i9^3zFj?~a5xePg-Vi{T< z!|H&3kUx8bs@ejLMrdVob5l!e$Y@rdv)JTVr>ETM^V!T3<^6wYvN(~;Z}W-|ECVUA zFb#+&_7Hr)Z@>pgY&UBgn}SU>?f?BXzGD*J#cbvH3MbE)xBvCRFl#(czX#V9j9|{l z#YugOnp}Yw{v_7GY`4)8&&I=q20OLgO7+^eZ*f}_0CjdtLM z=k0&GV6%JC3KzZwV@;@EAKtV{j52Fq58geS8!Cgw*1@VeqpsO##j8rF4j-sGW}#|V zxf466a(mP~S6=wvBTG9MHb~;NB{&%FPUAy!RE_GErf{y?s7)uu&u;k9P3}iJX5v(y zatuM7DrQ$#ML*u@asJz&5i63{Bc@N;n2+VQOMWHeBeA9*{ky3<-EZL0BfkNOq3*Kz z%6%?(xZGoB7v}x{IbY1HDx35NPg6E|EPDt?`rV%LutN<&Sv7@!txJwUo6~{Y(IlVi z__82B&C3V-gv#2w#(?+?Dt6$@`qwA0XyYT)06H=bUtW^CM!TwsxT-kDrC;@)GQI!W z1S~uryGQJ^@trBLDV>V+XYuxyT9KT7e83m)3DL9|vYpAnNZNc=td!ES_sh!j_CHl@4qdT@!5fd* z0B7)VO$@dk%zyl3TiP+VD$}3M4Wx}tVS@o|GJEig?E1LTB+^aUh^5omW~RIFv-#3B zaeS+iEehjG@h&Y}l(+w1EfTlTAs2h>7zXgOLA2Z2jW57!>gXADm3ZY>-{Z4JeP>Qw z{v?MuL9A2Fw!HnX&$@ZD4f*r;ye(0|EL7kqcX&Fzis!K_}t`028Jj zAD4Ru#UrXye7tKAUYpp?X0!O^i9dhiR)uYy<HBhZHr5Td{Aj`B!6>MFNmiBYF02>(1wFMa&WhVVx5Q-~PHAFstnqHtz# zo!Zu~>kY3@XV%H@+v(KV#ea_`QibSQ^^yKWGPW+>myUK1DefBIwnIZKoC`i;29bph$KWOKi|7v~hmb?b zVdQ4yaB>7$OpYW+k)z2mx=9b|C4Hoy43I%GM25*SavO46ayxQ+atCrp zGD2bw$azpsR*;n>=7h|zBCE+7vX-nPqhvkVKsJ(1wo zYsr5-iD=sIpT;TJ%`kebS097w2`qLusfA4`=X0_0{S{lUr)Uo zdXJ_jsi&a#(%x0ePgBbvKdb3|)cc`t$m>JVXO01+!;m&HW;b$oau0G(McMAX5WhF| zeKdVvYWtDM3Hr&@Pa#hwPt)Q~hy5+}Gd2Azls}vLIpn!o{&~nhpE~YE-V^T2aUr#f zU|+21m%zVN^Dl#cxx+C`l(|At)FJ(=pxw)Qt|qS`uO+WT%=NTyAa5jZQj~piGvaQc zek*yKmVZ0)@1TAsc^7#%d5_{SdwlMren0tuqP*UN)E*)qCLh6d9;N*l`8fH6qAdR; z;+~@ZH2UKM+Rs3LHt!4F@jTK;NH1vRUu67C&|jwg3i&Genxeek>xg?p^WS95ThQOu z^mnMgOTLHv_i3|@vc3-;j?MhkKSs>&NS`QQ=6?$RGtK{;v0sp1BL6GeUz6V`%JTMH z{SI;8GyezjM=kCrYCj|H7o^{iens23#szI#*82zY|J2I=B_I2ag3(GVD1bjGpI=Z& zy$JeXO&>yiC^?MW3~|G0k06T`#q|qDQX8eX8OE<*47IW3IC8wAEH{DLM8r>`Jz2}& z9QqXMQ^{#sehKoY=l$VZz@LeP{wOez&P1Ap)P^)0Nwjqil8<%GCFhaz$pz#>#G+2o zR@vSy;crEI5m`z)$*mP-ybJMe>K;w^Ivm53)H&Z|e1KYz4556OcA27#+lJb<6-h+C%l%URC~=qok775*ybttNNU^4Cz?8F6bheHZv<-XDQJDTpH-fYgrE zhh*jR3p(I;YJL~|gyye) zn5G{N{Rkv+Z%1nSQSgsu{4wOQ7n}_J6r?klcPe=rdAedT zVnmW{KMV1X(LNjYIY_tDKNt3SNLSE5ANB>xmSr!bei8JGXuMZ%lji;jJX5)oq4_BF6wua_mKA@ z?mpW0E6REwfc~K7KLr0_#yz6xk2)M9F($IiQQxEQqp#HbC5)>=Ts7?) zEx#6eo#sa!jxm@^vL73ew*jdUDNd~k_EOr-TDchX7V68$X!AlL&w_5`5o{(HNT5-3FzxIeLehc=Jk+CGNr}! zB5qgeq77Nv8O1R#LhGZ}pVwsDHc}rz%ptTlk-L$*lY1a$Pi4!pdr{vT`aZPx)$;eF zwm*3Q;tr&JkQR5a!!dRk^+PrNFlvXBN03L7N0CRP-eYJVt0>!d9JS+7zkN?9Ivj;> zBK{<#zmQI5%qg(HL^>7eW~9@opH7|u?J}e@>7PZOtthW|4z+X1^ALYN?F+~Y6=nQI z)GkKcB}kVle=M#ql8BLUmm}{A+E;4&RnV`devPJI3;jCHzn(ESK);dpO^RdxpnnVU z-axt)=>=-H!M>gL9k3s!eJAX@lr7tLH~f2;cQ1J#dA}C-0JR4Z|B$lB)ob~WF#l2V zG4gT5KcU4vN&PAEY4RDwJxlvJ#c^VspLaL}bNwmS!%Ld~GRwR|zKZ^AYrqX@5d~N`9s&`}cEdUpO4&xqhVn74@&l zZ&2U2w7(<2Cx0M+B!41*CUI}F&R-R!{Tu50UGwd^@~6_q^ZpARSW^W^Gm!?Vyh4bg zj>00Ou}FjS^%M?a-caaV_l28j`NNq%0{O+7J`(;Y%^wYajPfVAHGLfP@yaipKu#nl zk(0^I$tjAm%~Mf+8ub!#x|Tmf*%SH^XCNJgG)v2$4Sx>OzDRSCwn3VQk zZIMUCmWF8 zn9ra1IQ*q~zpxp8O!HeAx*Lk z@o{C#_P0~F$PUFxh%fA-mVmtxX&q88we_&Ol`ZS(fuCevDz8tvj@qtd8gZF?eqok+ zA9T!FQJ1W5gTpbIYfbtC$lt`g-L$yfsqI1Ti8zjxEVnoOeN^6L_Iu%ejNP9+fIN^q zh&)(P)^iBzIh6Wg?@vA%{?SMeARU8r6U!V6`#7XC=pPUJgnYTe6XBnv z`KU);{}l36@-*^v#mPUR+?hz1Qa=m!*~*sX&w+og=AQ@ue9gZA{)L)<5&Vla{}N?y zZm*fklwWu`c?EeTc@=pzc@65jmbN`7uBU&4qHO1l4#(#9J>RV9w=nir@;35z@(%J& z@-FgjT=yQ@_mcOK_bX1BK>tBS*^Y;(J&f{?X!@hnA0x&3euDOsuwSJ86!|o?XOu1L zeU|!j(4VLMLOy>A)>Pq3)L$lF$>$foO6@hoy{_qRI2=>#__rAIHh$v%5OME-TZ8X{ zxYxq>kiKWk`>;Qtjabn}+0Kuke@y!m@>B9N@^eKQ{{^)#$*;(-QO`G8+_%)fb2z3B zq5T8eHJJ8~h{s%(b^fdpTT zc5~=cXiwGhr$H~#{OOFDLCz!%#Ld#;W>cR-&L!ue4>)#33$*x!D8B_`w$${kpf93c zs_9OLV_Fq;7h;Y#3ktNiIP=+b{F0;ny&)maHSA$XiOgo@^i+k=KO2;2IY!YGz(c zQEDwJrf69{R$g}n;#aCT>9@jP#qz5)eJ5&b$ej_lmi8{N+i9C*8yQzD!F(yQs9_Dr zYjx)Hq@O^{{z&VXzaI7g?QXJ%Od>C(Y+1IK`mSV}%pfkS#r47O*Zd8P+o)KAdn($b zGwpPu2X>;GeGfXDEBRSX*Zy z-KPAavtgfubQRLMTKswN&)5757=Iyo5%Moax{Qo2cE4xLau7n$MqZxAS)9-GR6}^Z7-0!M|Jc?}2}>=HCbZ zewKYe(;uYv5cx3qh~o5@Q0_6L7pXrE`w3;sc0Wn|De`IZ8N@wH`#D7!|2*^;^8Soj z@No^1C-Yy1{|fC_$=9^}*QvchzKQs^Xuqw+y#xJS>hF>7I~+5xR*ODR6xWmfNBIA7 zKIk8VCEzEF{}lG`v_FIWxw2(Dzo7ml`4#fN*7Coh{w?_(`91jq;(tW?mHtn#e@1)6 zJpV;ewomxKsTh$bul)!4C)zs#`F|;YutQPA3@)HHh%AIwq~#-c@DSz=C5LJGn<0O= z@@Me+vfg6GjwDARZnRcz4E3?(IC4BW0r3-&rqG`Rd$PkZa~t}bD+*tv!Bdf@5QF0&Lj;*8H0HxIUD8YAZ*K=MN513zKD}=!

    Z>K2RvpuyP$Q=T2SY!E`k}B7Q?_vn?gL5Ibp&~&qSTI}b~Jemc`SJx>N=kG3FL|7Ns6+b zlM#1{=AX)#)5z1wGZ1&C7Izl)v!S1(>E}{E4?5?Ur+r8O}~-aO~}8QHv3uDdn@$YsNYWBLEfo2 z3v*J$%J{nxe-G_@$@|Fr$p^>>$%n{?6=gk-pq@uH|1tQFGwuoUN%AT3Y4REJS@JpZ zdGZDFMa5Yk(SMnIg?yEK4f_)2_~6&+zd^nU-3R?G&;hkSnME*?vLjI~ayB1}C zL;9WiALO6pU->w3tsw=xYX9`yN|z5xD0=50Z4Np3|hQk3V?!C%U_W=)SlZ_)f^@Ru`gg{H5B-b#HHxtiRGT%$Obl=rxndApD% z*+#}uZ#!*EQI_j~-l_Rr@DrN9j%C(E@21^DCbhT}wO(>pGL3Q>+F7!X>?b#n8_5B3 zlcH?rZm4H>>U(JVp30v4DDwAa{yyZss@s?OMcLNBep${|0I| zk~bmlX4u%`xP`{VFkG!9JfP7GKo;`OTrv3fe#ylRuC@l0T6@lfRI^lE0C^lYfwZl7A@YeF*%YP1&*dYh8(LnpX+Goc=$f%O#mm-o08j=lH7`1M3$0Ha%<8>x4#6 zqo67h^LxWSuTfmzYYud$;)(2lsL?1{)@1uec`DtTN}Jh4DrvY^gq-fM)8liaww$Rd ztB%CHVZYDgF^cO_=K5}PLsHe%+8C*eMpiT!wTdTqA`rO_?*cCL28@u|<4OSxFo$6~>b=Frei>#=uL0yf@Dq@CvwcF=(dxK7QK-{P8 z1pUt3je5{?Zlk!l$4qAv$xMHuyW4X1o6f$BQL?feH*TaljCw6|k+D2txLZA5RP1pE ze5f~XYt_2le%x`q3q`H8YTfd3<)QC-dis)yxa_?Qn(6VO^IT51FC?m?6LzzivI|2l zHwL6J*}0KJm{FBR8>&_s4QfCS+~pOQf;j-?@A<1qs8jB z#Eg`cxV=879AH#JJ6DM}9CEt@MsZbgAnxo;^&2G>ODY;#MM2C0AEt-ssJxvYLAM_( zr?{abkhWUCT(U+wZhNkYzIVBC zJ5^?u{T_?7%x^JbR=PhCw=#A;tpSXn7uATVMcZ%Jf~nvR2f_iYrsiZO(Ut0MPb51t z-B~VxW9xQ^|uD;GLi}PxAwRG&M zWyA(DS*r)Fl(S)V5DPRYW`t;^?Rat1_C)gs{4S$7+L2Brvk7|ztQoO{{y!#}SY?qO zD~&Z__ruDd&xsW#)&;Hr+v^m)Ywu7nf?^Mlt3RH?Y{TEa^+ri+x!4cNQ{^dKBbl_~ z*?cc74+fng`dHd3IyZ_cYZ^(!BzkVRuo*w8{b2nC}Jfp%)V!v5$M0&ADwTAr85Ehc` zN!q#bMzefApSaOgnJkKwCX(%anQS_NValYs`^5H(ooY=%tR>Q1hxS!0i?uY@RK|>0 zV{}6kc|CsIN>wM8nq0Ljs#;q9Ipw%) z!)Oe)aXbh_UA7}OI`J%&doMZ$TU&K+cf3!H z{sp{F{6%d!J71Y6ga;COd4tuK*|pD<`{X9mC|SM2YBOR&$xYjZP1yk~;VhQ#YPSnZ z0e6juHXdYoCs(oC>kW%}(iD$p+R`brz0FK=Q)sElbeT!9k}+kBrhYVB)VaptcA-Xh z04rHO9yKpF8Xi|zj7F`w-o)ub-F<6BI@tH{XiLh6nAvSCb}q;Gib}Dmx-b@4yrQYL zXU`Kk)jWO=mRe(6Z00?wwgeu!VmmKcGoji_L{rJm+Ju?xH0ov6-v>^3y=HPCzp7is z(i7W+xSlYvd*)V^Rl`R6FTl(pT>1Y)uqywX*Afa$ME%A=?owEEon2E!SN=` z^UZ2E&JJR`!+s&IK}VY;ZXcdw*xhG;V3f@MRty*(EbfrYiD$4Vp&T?aKVUwWCyZ99 zPQZc_4>V^yiN=j9udZo`v{Xhb%Ny$r*%g^Sqc@%EPbA|w$MtRMO7$83Dg$#{oaSV= zV_o7Ij0G+)nDewpv=>tk^SCD7(}v!b{ZL+BzgU&V^zvg*56eLlCkS{R@f^x+xbBcY zj2(TGmFzYL>}45UMOQopu|v3Vd2|m7*nsMOb2 zwpKP9_Kl+xQG*9_DXb2HbnN=XsUzSEqq@4zRJ+;U&QoerRjR`F?B|s@tcl~5JYm_6 z-`+9ABngN80rbwwOv381l9|#j%k0i#ep;9oT=g-PWmIcfMx;u1pr{{Pl6qXp3rJ6N zq}WHWCwb(dwJw7yF%R3F>v}QNfo3aXndx|!(U|TulZj1g#^G^Tlk7;Pdz6c%oPPl4 z=4?$elTBp%vX*#y=ECFRK#76I>3-4wqAD@v)=Vgug4k8$k^H`k?z_Rt7`1)LUHhy=+B7Vq zsxN`Z^?m0-#zD(eR}@b_G1b)-ZBJV-0UTO!!N#J+9FHr`0DoJFEIS|x@=wuw%DO3Sl#Q)hRo&BRd4)0Q)t zGD@1NqK%6q(elQIhRX65Lk#k2*%)*b9yqK`IND+kxa2lnyCETZ%yQzXTGFsWIY!Lc z?2L*7gs298J$Nrv)kw#lFJh0uZYvHlP4TYIQgNni$FoFrePelbRcU=?b4^7}q`_Xf zDDQS*s(9r?Q1kM0+U0ZmaGYygpX$M)%zE7Y)_OS;S7O=-#aOu#Z=F~teR#pa)75UM z+wE1SB{`zPvF}$j)a7#pP;E`z+9c;BPMh`Rm8%*mTa1h|Eze7Mo{ARXxg#DX>V}o4 z?Ok{*`9oN9HQ4nsR@|AfjFOs&w2g?`-(@A3uer{bk0f}1yGbu*#`2`KF{?&2!&ej4 zd94kVD_aaTutu!1d3jAa7I4(rl)G8%47q9N7LGRW+#auZnX9+@GV8Ii;(UoEi1Dqo zeT;BK9UwwzXArli)|Tz*Pjrtf=nY~uEbS0)V5RuDfiDrMv@?l)EGFN|j7Dl)7MM18 zdSC!TYWC5wcL;3HVYfVzq|!LYi3bzL+L;z->lqbk9J%{b299aWz)rE=j3?Gv*tNy6 zkFwF8N*Rl-RA<_3FI{C}Ly>!hxH4MrmM;sk9rW^jA9MxX*eqi5ZAR24_9v0V9@Q-0 zXmanoIM3q?gqRrEi19`)_8~Db=;<#>7;Z7aqVaAsEjMDPDfYZEEmph>S23dX_)3B) zEO)**?gU$$Jk^=;Y@*-H=uC)%&#;P<4ce{)2|lqMt*U$6m@Ej zYK(Z@^82uJ)O7dZ$ z(|x_EE~{U@a>zz5EXF|#O~E&wK+q|l^f^cCaDT|}K?Ms7?l(%7G?>XOZb=mLIK|tq zJilvB?z%X%h_i8hW}v4nk#eg24==DW>D$L5@lX_{v74&hDd*TNKxYK~V#Jqbdpq@o zEZ(GJElpMVWo&<%L5&z`S3sRjZ6`Nkm_%-$$BTQaOC<5s&f>F#*g30elxMhi65oo% zK};T#ZAWYk>h>zpCh>h|VNnF@vq;QakIV1GXHa!uqT}(S7C8D$jZZI@*<&p36Q}r0##k2P%oc4y zJ85G_}p{A{NBL_7#EqrBo>RaPTB>~y)o4Z5*ftF7)t#&mWh(27c7 z8jS<%@L@|FtS}SgI4e_sqTm!Q4&%}M%bPfmudI!g8?!5$OIy^V3+G7rVG^hF)v`Wu z(i2Y(%~7opy&m+V#=1l=UTQP!>#9U=weaK(iBk%?QS2L{270+c77GM-geenkH?4N7 zyN~Z{%`IIPzO^aIs7j~$dQsm*Y&-5Ud|NIHn`LEUJIi?;_*3>T|ES+Yyk1WT7nvQx z8~zehG+7q)`@>~;jMz0P*Zj9yq^7FEr$!IXYYm-g%Sv{1rxN^9(^%D9S=nGLi8j_$ zs1L1crpT)Vg7Gp>M<8xrCg=Z4YwhcxYr`QI&Ro@a6G7j}PYa{e_w#@Iu##aHmE-$7 z&#-GIVViT8;dRy}uAH;~>#Fvpak#~Y_n-@Bj~KpN#@A=s6L@>W7Z$#v*W!C+d0kBW z+=HGRkD3GMMP5Xi-hUqp`x3aj03HOm0BTKaN+sL4Ce^ZNTCUXmQgyGH<_Wl5W$28K zG7~>2h&$2*+~R-TmKro)z~{vd*V2YV`cDdH6AxgTIz*qRLOTQ`vHZkOyO19 z&9e5mb{Xma>-4s}R4gQX?Zd=u%%<>DjoR$g?ZsO1icw>KqnBOkE9>x~H$B*$RF~QU zZs~t?sV5v354MF9R$_;qfZa#0(D%;@egC^cd|MSSLNR<(X-lPXtdify?EN4HHH}lP z8~b*|-nb{Bzx-uBQ_a4d{a@o@cNXTKH{iwuuf~qUmzmlMn1Ab=U|EN!&0S{U$2-+G zy6|6@z;s(M$tYeh((TxHryB8+i364RDY6;O#OKW1tA+fn zq`smuii39UgB!X(~5Gx*xme#ZBSMzu4K9TAO;~cntR9hY))qHZ=FOSxNq8 zYi+`H#-naO)_usMu0#8Ou6K7x`~tY57heE-#rJ;v#rJOc@ksVwQ>>*j5^bqA@WQ^L zu^HQ$x~I0XaL~RdX=Bjw=Rg0!c#C_&9xWaUQSsJYn#CKM{K60wzYmJ{HT63VM#yxv zWB>4Z%dj5g6YTH8e_cv#V*U_D7*AU>BQ5p$XLY@a^BTSa;zLQwh~OtHd>W5s`{MXM z&wE1a+tqw9v31*Y{po?V>XYW$9&=qPy;gp3PbJS8 z^v~QxGM{^PU0YwGyM3+In~HZ84eA{9@aXz>Gi$b)8Eb8Jpx0_m^x_>dWh8pn;y**I z&6qvC_%)%Yr@gJN-O9w%iC*y|**lA^$GwOecB ZDf}pQ@6g3{X}p@J+t+3io2+w;{{xjsLht|p diff --git a/modules/ingest-geoip/src/test/resources/ipinfo/ip_country_sample.mmdb b/modules/ingest-geoip/src/test/resources/ipinfo/ip_country_sample.mmdb index 88428315ee8d6d164a89a40a9e8fff42338e3e2f..caa218f02770bebc48b634c15ba74b0f9939fa5a 100644 GIT binary patch literal 30088 zcmbW62VhjywuUF6gx-4{ItqlD$)pV+5SnzPBOpwY351eh2wg?7D~bvzc2H3f6?+#I zd%<3?0xAkBV!__tfA;<+v%zrndEeXjTkBtIuU*dGXHK|2pD)ShtA4D{=Szkae7=g( zd=>F4!OAcNR)JMvHCP?ifHh$)SR2-XbzwbNA2xsuVI$ZWHi1oHGuRxqfGuGw*c$p_ z8`u`MgY97l*b#Puo#9dNXxIgIh23B(><$Ak2tzOphG9C)fSE80X2S^V0eiw;@EF(| z_JMt24(tc}!vSz090YY{%()p1hrpq57#t2qz>#niJPwYAW8hdg4vvQt;6ykH9uFtO zDR3%00ZxO{VN^%|LY?cKlQ1FDLI*M?Vd1h2p29 zKkCHKBz_h=TY2YTpX=nEhju<(sk{rYwZ~@etKdcOVoMXd1Z_23qyB51_@&rCNMEP+ zGDV-6;Pi$G=(rCUS3qo8hhS zHcPX<+tIebJK&u@UyZKVcUzkLdsP2k^!wob@B#RsrOAH??P2%`)U)($#eU4v2bTv7eLvy!=Yy3yFN+i}GL6Pm6a>wu?H`zL&*-_zHbr zm0wf-Yii%9y{`5#u{YqG@GbbZa^At-VQJR!uJrfN--jQ-4;9;q{gGu2ojI@hcB$=7 zXkIN-?-S{tV(+mu`e)KVN8jt{`_R5n{7Xmw3VlEPTJZykc#Zg8d@KDs?T28-`5qpW zptEkYAK_1yM*A7<7x*ju4gL=QP@O;3{<40}mOh`?yc$Uw)T6PJVFg%GaXs6zcxChy z=~W!Ps{CqdHPouB_DCD^7`KyZDb|MmwPn}At_$nI`j%!s4bU1YzmcOiMsFg$sakw3 z&Cr`mYoW1MSl?q(D`Kr3&7a6gYD>JGr5V4y`gK6>2s=6P&S*zL-HW6y)~>0$o1{CP z)Qwmw?4F35F@oqJ<>slSsf|Gk%T8D8ho2!k6FUoLTbf!Cv>vc0>;;ce|K8R%`F-T~ zC6<%WYkn-fKl%VTFcD80gyx-}!HzxzeW>(djy_!e2x2-DNylN2vaF@&oHRONdvi1M z8!LUBwT(X>eS-9f*pr<2@zN(d{uFYiNrhz!e;WBKq@V8SXUIR3*jWj^)@JGFsEKCX=gK}0`+T@k+Er>7B>E*?NNkn#iyZx8 z`Ik8UYWZu(T?;RD;+HAD&halN=L+dpTD#UxCw{f`Ysk44u6N=a(5_S7_1HJSjZWT; zir?h;H_PAT__tWQc3sP)TNS?z`*zu{t8GzxOzjS*-<|UBBJXZTzeoDL==VAL{b&!s z2bK2__QOuzBhnv5-)dPq{w{gk$$NsFZSYC>l=?sI}~1qpznb1!uR0&@B{dv`s~8~ zNVd*JZO*MnuO;nP``G3f{}cJ2CVX#fGyZ4NKUX`bw%2M#-$(ol>0e@h<;3@+eGLyN z?;9unt^DtZeV@>4Yb{AXp#KPefxGY0|^k=`aIk!Yr7r-2Q42>zlegi1n1- z%h8XK-`nx~kf(h%{c;rVmx$Nl-XsqoJ`fIqdgi9zU^oO0g~Q-*)g6I7(z33er|Ekf z91X`!Q; z;PsZK-wo=w5&cHlH>=&Gz5dZ^9^`Kz@*YHcNOA44nad-Ics=d2(YC_Jl=ryW4*VzJHuxlb3O=peXRK}NJd6Gu zd>+04UsV1});9Uu<-hFsugHIu+}Ggi@C_&LP366X{x*Ck5wFLcN`9Bvd&++w`-4PY z@`q?2DZUeX7u*d$wlwqo1npC}NBus-{@k*j&Zx=Vr`Vs^U&#K_+9vju{QZvqwfqB) z|Bd`_9sfIPCw~tQ!XK2!nM?l3$^TjUFXa3x`*-Z$EbHm{CjXJJy*ZhA{iVC-Q>$bB z3Q4k)t?k7tC|IE)u}ZKqOmXt6D6cAdHAk3;M!uq|u{+rtj9BkTk_Th`COKUy{W zV|RgFrFFwjwKVhUjuucnh#i7yFbvZnX&78<7bl-k>116d&=+S_{Wgf8}@;H zVUF_q>Dd%m-(!UV);8k|ByW&(&QgWJPJD>;q2vsM!{G=gZ=~`@p&tiFJMl4!k9GWU zPhWwet z3gIj`8_t1qVG%60^y<`q9_>WcE5V)*OW^{z(9(>*2yHQ30++&Ns&|sLP5#N~r@-Zk zpX$UuCg@!E<(E)UIJIE{~GMI zmJK@NUk2AH=VrCb@#D|`O6!~YS1Eoq@oOCYTD0|WgYvG!z8>BHH(Hu{H!ANY>o zpH1Z5qS!vQ&1z4m-RktaP5$kUzlA<`z&qhx@NRgI`rV6tpQV|{{n8&mf6&n%lKwFI zBaZ$k+E&FMbM(ip-9XQz!Zx+H$a_-uQ`k?#XW+AzW**Nek27ri7v#T4-b-*hd>OuC zY4Tr1drkdbcl0;V-?VI?xm0-DiN8a92YeU4r@Z%_ybt7mNbDoH6Yhe$EzNvBM*Bql zK6Ugx@;`I@&*kq;_zmLs;|uzKsrXme`xAK;zLtIf{Tui#{LabyUU>%{{|EVuVdnW0 zeSU_&z+aX3o73-i`F{}m6aHmc(U-_;n5zG|KN-D(&sX^}`AVp$XIQZkdS#dbtH7$T z8mta$z?zocyermHtBqa<)`j(8edXMM-9UCj>_)J$v{TfYSl`rbN~{@du6PUVmavtj z$#0G3hizb6*bcUb9biY;33i4@!J}ap*cEnz-adE74!|G`!890#=`aIk!Yr5#Bd`bT z346g~U~kw5_JujHAM6hYz=3cO)bliZIT#LsL*Xzu9FBk^;V5_<91X|7v2Yw54=2Eh za1uNoPKHz9RCofMW@+X=UC$$mo(uC}K8(QvIK$HPpNUooXTjNUj_S{~w#hG&UrcNs zJkg1lpv{M+%3t8b7osnMi{TQuRQb!WPl6{~dgC{YuW`BZP9=UCTmesqXTUSzS@3Ll z4m=m02hWEq;RWzQxC&kbFNT-E)o_ia*|)Wt@1>4^nf!H*f4TfC=yxT&3SJGb(fV&z z-nDQ&+yJkG*ISzXQob2~BfL@lZ?d-0!# zIPrTGzt8dSC+7j_4`M&$#2=Rah~qyhe=B*9!N=hfaGP?TReMtFKPWy0pN7v^ntk!& z&(ZIB_=2VB`(h%u;&%O>q$~er@?L?j!q?#I@C~1@lI}>wH^o|LZ^?ff`yIH$((K>6 zXz#)ImH&Yg|4{x%j=xj>E^>FnkKrfqQ@96y20w>;;XX?-1rN2ftTbdu(d{0DrVJ+E0rA?D)Ts^DF#K@!!?{#Q(#xQ9o&aC2X&j*D58c zC0pD070@d}O|VjBC!Qj|3bCrNnzZWHZnRc*4fL7`&D5@~*a^hyz`D}vS=;2)M{gj# zp`$mF-`Me+$Ztw+GuRxqfGuGw*c$p_8`u`Mvo!N)ulgO-v~R}mD7zDOXD9C{w4-4c z<#omG22){o7=S?-vNYqQp@m^O%uwA-Coc;Mi49QRKI<R4!e6{Ntr&RU=?1gX+e;Mi-nLe+= z*DQ_py7J#Ze-pmt#NS4H2kub*yH5N)^!KHIfc+u-NZLikOHZ;t-E^gq!5bo9UUY<$+QoCK2-@yZnxuju%dR^=2}#fevyUJboEtdWQ} z(b=n93%xe11M9+ius&>HX~t}*{*BNZ!zNC=sp8Gho5L1Pyrtr;9A9U(vfuIB$ZzZT z?W|q7J?sEG!cMR=JWBnKwzipX7x`U@b%UusUsLTt6dUIMB%(q&&^b)E+Fm1bc|=q1eOVa5w^v zgrnebaI~eVIR!W-dD z@MfsFn7+4I8hbO^t*Un$_U&+slXr*WccR|~?}qold*OZXe)s@<5IzJShL6BUHOAxE zTj67tO@C7C3AAnS$wbVod$b{oYaj4)k~7d+>eu0sIht1b14RI=fV7H~Pnp{)zNY(f7d5 z;OB5J+y}paU&628e#>UsGjB{Y?g97>{1$$valgktXle3)kp3gOo|(~phQBy@zoPx7 z{NJ(vaN>VT|106A`1H(@te=wX=oQc^!b-}kjIB=bb);00UKPC>tPX3yny?nEZQ1O6 zORrW+U1IfMeQ6DxctiP(h&6^yU{fcrnev+Ze9bsJ9#dM9*UHIjE!~ga#?jkKZ|C^! z$>|`yqoa3{-SmL_k6^pWVJe7@#$tnD#n zH2N4g7LJ4CMZIrQCir~SW{DF;%{gU~&sX&ta*mfj8G8zx3QvI3l=HUQbhVq*qH1Rm z%axsnoeyJ{X8r|=&p@9E3!&!Z<(RzL%42OQbJ2=mF`Nfage7o3EQJd!P2Gj6y9j-; zqc4%Z6nz;y37+iaoua(u=%>Qd;0ky;JOiF-+5A3BGmo=X=N!jBSN?g9&)rU0Dg6Sg zr(6hE!Hb;yiV-mOK%)g=Vr#(WNF5~1#L6D72XDK*L=2M-(hL`-6{Pp^t&DX9<+NEXa7^~ zM|%K1=;S}7yob>rfsewi@GdCJFPRdPOopGx235f0F*Q=9@(97x*juP4VBgXF7)|e^{Dv{v`gF`uY;Omscg(#;R0+ z6=5Z)JLuJ@l45E4sE9|SS5s`6T6ML4YBkVn%I=I^3)Yra2fHq;2kXNIupw*&8^b2B zsim1iGu3VG_$}nObo^HGTRXmAejCScYwarSV0*F60S6J|Mi*~*Kc_kcZNFL;clsoNW^kNWj>^c-uq98J8x z;seyC;}3*`r0Fb}T7%&bI1~uybymj28@7k8c?uGYBzdw;z2A_ma!KYR8 z6SZgXpOyVK_H*!gX)j>E2w#HR;mhz9<-V@=s-E3fXs=m%ew8=S-h^)@V&>VrgT4d4 zYiVNdssH=vAHWacM{p3RSJ9R^4i?wfA20VkTZwv09E_8@&#!t9U)^`mh0P zs8|cNMn0eaXtc)in>c<`^k&kVCv-2rtWPV&PE~8IHb%`)pEj_qrI~L##oMEIfE{5c z*cl!L<8$kR-&Hkws&%uzshcXlJMjPvI`NR=Y3N}`PnVwI_?hyv96y^rI=fYSSlhqU ziT5IY4D7AEK2E%^{2XHaV1H>tu?N6`(gtZyU&4<+-@*7pEWLQuVQ9l0ZG`lZ=%XC{ zI9s#o=tOO^Ut`G~2gkz+sxuLL5n+1wW#&Yc)7&#q~|+&481`5 z3`d_SztHh#kvCiV9PGKU$jK}A`P%4Adt;h$PgH&h_Iy|h7g(CSh00rmz8Efn?(<%T zc9QZ>#y$luxAf|@(VVKDhQ0!xo`_dH1MN)ZorQfiJO`c&&x7Z~mGA;fGye6zl z#qbiiTH~#;w&}MP{Zi?dVXuRiJ9$?qekJ-&$%hBJK{to(%gl^XGp7o9WzVbdG??XrbNcv9nU2r%27=8jjg?r#< z@N-Ku&R*5qhyDfp(useCwjX}2`~yz>8~NX={iybx_Ay`l-qO@RDF26q@5N2dPw;2O zf3fw9{;T}o627VNhcn)vivMN(YCf2hh*wKStDt{dt%zL-R(A4I6t99_)zMY7T6M>- zK~7ESwXkc$I!<0)G@XxX^|2ekhE851w8pRrYzmvf=CB2930tXtYiz%zw@205sI^V_ zZFSG8wO1Rg)`9$viFmb6(mSIc<>*JFb%9-#*9|+>$?GmXfF6V)m5H8M`@#N}CVv3hKsX3$@4R@s_%j=#dPA*d`VAv* zI2-{-DsL3_ad0#o1INN~a6Fs;$nb>E+vn|d1&p|sEo(Io|E7ku3>ocl;~lUrF9omhGOxzXo1wY5K26+W@aq|Ld`D zaPl@vzY+Z=cr)AtZ-JXFP5oQd?>5K3UH%rwzeE0=^t%h*4ex>XTAF(I`F!mgp+5j0 zbm9-G-ot9U)gE#5N9AvI{Kx3~IDA6!ZP-sj_RWm@wBpa8KMS9e_7?W@@CEpyVlQc~ zx--?bTbepA%YOy^Rrnfw9ll{{^53-eOr5u>{f?vUP`!82--GWv@edUL(D6TFjGfYV zC3G{^$MpXMerjoAdzAkf`sZ*j+~?$df%c{Hzrx-RzlI0kH}G5d9sJ(XjC)XZen9`x z(SMTuv*Z6l&aYY@=fd>=U3q^vd4H1km*_)Jg30h$v4UDf{7SGgOo3HkRp|Arrl!K) z*wt&mny?nE4eP+V(CbqVyFP3H8$!;0^~U&3U{kRfc5}-P@%gnxZw0+^TC4f-+cdN^?f`8&T#= zD~isI4H}sjixw3|2jvwnC@L*klGAtGFiaC4+;_qJ(vs*LWfa9q7ZsPx9v4gv2U7Ka z-DHGQL+R;W+v88l88FUYYv6zdCB^e%{th{V2lxYn2A9OLmscC7cnHlO= zX40?k7=Nw)ediZO)ob`9f8f~R(K*p2oftOsjnoV_7tb7}!=Hv7B2y(pIt}ral=}lzD(|sR@wUrL&j_b-OzE6#L>J?S zy_Lj|Z_eNmdMOMp%J(j+o@82Tnl>Qly?x3YT^jS=D(jP@i!~%CS~O!$G(R?fCLJ@C zSJu%uf9u%K8?4{V!Xgbe$*ZroNOp*^JUjl1EE{UfAb()+n8KnN(RsxsdIhOh*n691 z=#F{2UFMW`XS5y@*ToHmy+h-8+Jl_F1N9#6Tfpn4{}8=V`_C%OEnZMsXhzb_(TU44 z^NZWTklFJ1JGH;}@*Wf`nHw!yV&6{M^^A~q-CMl3^N0A}&d0kB9IUPb3ybV~-+Qqs zHA~Ofyx6jB*Mxt_L(>BotcO-y6fyu_dvA0d%#b<-GrJw? zYRZS-vdO`4T53oSLO)hIVhQKaBi3<~KhW`Z?_h z4ZS6l9(OViOV1fHO6PD$bY4`ygcE}L(bUginwe0-E@$$3MwxoFID zAM^*>|KhD6E96bddq0G91{0onKJnAgroLYDO}ta*Ro9Er{3evyhc356)F0?jV75P7 z&)qwkTB7&E>bPb$@$Jv)r=R74{h~$DeDj0sJ%Lb2w@q(H?>i&z#dqqEew}^0G%@dW zo2k+4hl#bzE_nP*j~=ICMlYCePUU3pWvvGn(oc!5g6o-A5S{!x{Q>`{-fFyzV1|B0 zycO9FarcnT${96G!;C8C=9qpVZ_jkm=~w0+a$CKAos)E~ys^A<7%+z`BdE!_Zh6nX zfwF^vwE!hy{ENHD)ZeUA`us+j#Z`#80UP0tHNf|2w5*VTeB;W*Og*F{?pyq*l)Wfyy{znTek3mti)88K z*a>Dw9P!`RYpMCgBH_%8NH7-8h-3!h^~yxe@1M)9)_ZK7w#}QK?U^15Yq=cd!^9)j z``a<`rdOacx?!0GWjEsxt(*rMf2w~g*DRC~NYnkw4nzt9nURogf44)Wa9ZMdCM@sC za^B+acbzv|D;m(QrA0!yxrt(qR?hTPEk3iH;S==&93ER-m{+Xrs~^hJt0_+l&GU-k zgo25g9AcAvMFT+^U9VdJ2x?Wd6sh+{0sHESf)CyLQy?99e3?{Fyy_#&9yvSEcXTC)r)_Z zjrBh0#x5wC9rHG zgzT!ldX5;2{==fn-nh12UMLjF$Wx7IdS1TwFuZyRF+;EU^6x)esRmrPh8WL`j^$>=Z2oTjLx}9i2v$19(r!*oRJxG*6WJ@a)!=&V_I*%u7v#Bi&38pO95p{f_W7pSj}0UK%k?Ini0TC+ zv8;UO1e$shvqS&1V)KGHe1^H+Ze`?Wn9uI`3=`u2p5c)y2K8=8Ghcj8Ma2J5G0(dY z1-be`a2|6){NEMJZi!#FM4$8C+dnO&ua?ZrT(n!NGpiMyl;H7{!qkK%m4B6e((7Q)0`ar zv+qlLsZT9pJqKM#~&EMzef+%KS|{m&(*}6XrHya zS(+}_+<5=|ffE1q?hyY9_O>89k{iuS*I!|2F}=(~<~yt$^$7dR_kiA>g7f<^luuG%mVh$@yn@W=IVa=#Q!!_DIG`sZ5@c@ zhSD<4aXdsQ|IBROeqeZX@$kZ;{M5YSx%(#P>fbv{mP{)s*1r;#PMaH@Ra`P{VXS0+ zVR6xhKVn^#geuT&%L&WRQML;4U^iO;L@U|mdofX|4{ iPpYV{LyC&?V|I*f)sG!o63Z(t$)7gAaA|CV|Nj8O1k}?2 literal 32292 zcmbW62Yi%8_qLY+fe?D{EWIb$&896NB-GH0NEcHU2qnP~s@TN>_J(2u3#eePSWvNm z3id7*R1{Feh6USqKQq^62MqfC-}m~Yy;yZ*F0d=?2D`%^uqRA{$uI>5U=XIl5KM#VFau`7EEtBpU~kw59tZov zelQ#6!2WOm90&)&!Egx7g+t*mI2?|EBjG4G8jgWu;W(%>Y0l+%cmkXNC&Ec^GMoaZ zLY-w(=R`Oio&@uB^xxEZ&sW>4R-ks7S|Ra>>{GF4$exKk%hJ>?LYuAl97msvJ`WZv zUgE^(qn|9j6ng|$cCOS?2?o4H&r?FsU(fLBVpiaebM z-_>fX)z%ZgM)tK%d=2__(yzz9!HM4}{U*m>OU^p!H)G!dZ*}r+vv#fK#5Ta&;T`Z! zxDjqr-Mg%9)_6DiJ@8(5pYrZ^@*Y5cQ2IlT{;>Q87JMqVSzFN8`zQ--im`}>r zej9s>?5D7whR-uBHcJe+!{}k?qpDFKi>@VOR_$Bozc1|zbke(C*B>c2kfc5B((s3GEA|oJ=5p&TBV>`YD}vX zlCQO!xu(Z-<7Zmm*jdU8JANp@z;{GPWsJ`evABDiQVSt>!ojS{M*U7L;9WA8{sA=?=Hpf zM!yH%3-5#XTYB@XqqA7)LH#a$SbRu~&eQWBk>65$RPC_ZX0?54k5T7w+3%}8f&V1j zB5eouQ?j4Ne#X-5&sOQrqCW?pcjCHJm9{zlc5+^n{*t4=Z0$PQ>qyG~hId8(Z6n`809r&)LneThrE4Tj#%Ky-b@1oB~(m!_ePvn2<_`BtQM(*eE3%Cb< z3BOYAUTfE_>+^Zdn~Ryxe#O4V{sta^-@=3NJ4@5=d$b?a|3~bf;2|gPXT^U({}uig zi`VUk_Pg@_!2T2drE^mo`)}DkYkT9zRkC)R#JI|iUPXRYV)2e%4Xrv%fHjn#=;YOu zU(5P+OC7zAboMQ-o}<^7-oW~C4PhhL*vV_6yr$^Q9KE^p7S^x3o_H&16VzI(C9AbT z_rtc9W*^!q-rn&$$nWU*o#@jUc2T^m6YqxJU3w2k?X(HbwlwSLCB3&=Ut)dWahCP;{NnndWjk7q&*#NVjR9~V9As%?gVBbl zPA>LPCq4{qI2@t8k!oYqMrjOhexohDdd441?l?=MMPuWYe?m+*bC@WtSZxyhCc`O~ zCTFVR(;WXq`O_W$B>8!cpKtBB0_la09+5vot%&$cILor0=Ha!t*=loQTHIXu^J2c4 zQ;D=Q=s#cf$=L4wSRlR3>ZZ>^au&hGiZ8)l3YS@${8JQP?)ayYa~eF|vff(dor!i9 zJX_jjYUkjufahA8`sbmY4=+&uO6*neLU@s->31>OCGb-1+gsR|J9$^A-j$AjmHey8 zTkYuApk1r@8b`kl{d(y)VBhG(Z?bm%%8IWeezOz51?^VFZ^K^i#5YL4-SO`r=T5j0 zZi06y|88rWeYgkxUU;A4_hUccJ{HT1dmQZv_@wf;IPs^@ zpO*fNqi>b}tm8jN-t+JU#kXN^hcCjH;LFN+6?+HVDeV=XufFbsw|J&yk+IbXrOitlsc`_aFa{tfm4_^q@bun)rT;P;mGb>6%d_oLcR#16$`-Wtt1 ze}TWk-;{sY+9v+H{6C2O>F9q+|J(XH`leRpN-!>#S6RWzRnV(Cdc5>%=+$8Ytf5#< z>_p4@dInyL&bxAL<!w3*;A)7jg6%(r2R2a`YnUvmJj9Ide7lo3V?Pvktoi&X;yFcB$-@*b878)cVal z7s17F30w-7!BgOJcq%*%p02UZu(qjtCi+?MY^rwX|9>yS!c<^6J8R%Byc}GhPGqhER9c z=#60$*c3KX|K`>QyaQsa9S!%t=4_h``DXn+R_Qo*d90&Wt zelQ#6!2aq#z}lwYK>34+4R-V)Xt|0HwRWS;Xv3Yn5sHsQ9|cFlF;3oCv~kL3&Sw1a z=+WmfLGg)bli*}alRriAsWHEi?pBo($(!!;Ro8j1a+1$i^>Q%}Js%dpLKuNF;7m9R z7Fn8cXRF>E^tr09Ggf6DezEMI)JmNE`SMRz^PX2Jc?+C)8QMa)NO_B~m%ycP89c?( z%xAgsPj&p$NXUIR3erGxQ+0xHJU*YKIqMfJs`Pdi4;*Hy)ty26#?2BUYDi=$? z#PKhce;Ijt7G};@z$@WZ@M^dkUIVX%Yv6V8dUyl85#9vX!gcUwcniE0-ezg$wqEny zfPOo?1KtTY!cFilcsINU-V5)8_rnL^gYY5vFnk0)3ZwV!G5p8j6Yxp61wI9zhR?vQ z@LBjAd>+04x54f3MfehY8Sa2P;VbY}_!@j2z5(BaZ^5_WJMdlj9(*5u06&Dg;79Od z_zCzmgaoy*ZKGw{ToL=ApKj%KPdk@$Nygb4{ASJt?^u+ z&ud;yQ}>W!KRf;}@_%*w-{c=A_jmXQ{1g5K|F-n%R`sb>a{M^?l^wr|d?lIOcvuZq zhY7F-OoTOIEm+&q)T)D4SM}>**LUI#q&IZ@M&vYxO<+^l3^rH47T7H<&3LV(w|4wC z)^4o3SG6tq?VNaf=^fN&sdZEvMNTK!Sy~tDu1>#hXx$a>f!z}(!DN^M1270vVF;$d zbeLgj)|IJovK&85PA}=bvHQT|U|-nJ($vXD%YpsXe*pGCI0z1gLtri(sy@T4-9%@h z>Tt&%;qw`7q@_2e8F#eu#$b=NH2OH{U*!0U z>9Yhbh0Bz83ifhKv#wL6pN4)qJOiEy&w^)Lnttb~-wMY+mz?w9`S1d`Qu(W}FSImu zE|Pw+<6k2GQpdl{+D$e|zryFMrZZmkN~=}93SJFY!)xHRK3}}Gle!Zm` z{|4zdqTdA9N_!4_o$Q;jZ-KYM+u(Y*0p1SpfOo=;P-`(`-v#fsG}=9A_rm+&{qOeW@kzJ^J_VnK&%mwlSxc{u*Wc889=@P@+pxDg z3rA6{#o^Zv9{T-U*-Qs?69N%F8vSmKjB~SZ_9X|ZG0vCI9M50fmNY8cw@#_gVkYz zrHR!*ON2FHE%mF7T?f|H`O;mAuV-nVhvzpS-VipjH1!)R-UMBT(`e0LbJ#-lv`6tR zVJp}gwt;@w7Pf=!VF%a|c7mN@7uXecvov$+&RlwE9zBUC#o}guDT?V##|Lb#=@*or zs&*3nL$cGb(_se8gjp~Qds&)#z0vxp&T-g%op?XAY{hf1`@;cnARJ_A>JCO5qJFuK zK2-iNV#DDGI1-M6qv06!nSec3_Bfxfnd-;utQn5CG~=FN^E}mYV_5z*I-{G z`&!L;pY=UP?fB?AZXowYOH=PA^;?U+4&Lm-x<@J>kh_m ziuv(($-kT2d*HqBJ}2*fv?V{0Sa{Kf_-vP5och|2Oo*@OLNv z2il*?`^(Y)wswnj?MWqTSBry{V|mr8pjCzO%GV}(V|%Syb(kQ#26iH>DXkrLEm#}Y zfpuX$<<`e;02{(aurX|6X~u7g)(kdRofg2zR zyHKq+ejj+8rI|-xw0_FV#?FELoxA~x4@4gX2g4ywUasbIh3~=l;RoiW{|3&_gI6aEGN)_()_#qwI}8C8$7e)Y<-b+)Tl!LJJAVKt}% z-khr^SQ@*A{6w|W)oK#21#3&2q*g~Qq*fQbp3}cRS_9Y+HiC`SuL*WjOEYdW>CMqw zz?M$D6Fv=wIC@9vozOeOF0iYU*G+ld(R;w2FbO8Z6c~U(m}=RI zc~wudzFB9w>SvIb>F8N#Va0nndT;rCh#d#}!hSFt=D_}N02~Mh!NHbhK0{PL7k#Ls z50gF|eFPi{N5Ro>3><4|>W))?oh@UJhbK7k31|~7Tg_JfWV9)8s->whP4N>Qe>yoQ z!919+yaMb(OVd9heFpkWN1r9V2wih9xpUxLI1d)X5;z~8Y-#$IqAgI}GDlx1f05%a zmcNADrEnQM1uloD!qY5UJ%)bO|0;5>mcAPM8h9;S1FwVE`+TjdD*py}BfJT&h3nwW@D_M0 zybZ308!WwbnfiAy-koqG+yw6uLz>6kK3|Rd#e2~2_4yLMlz*Sv`)Z1ry?OvX2p@tE z!$;twa5IcP*T=1IYCWO4PZHk(pMp=rXW&-&tflGyocceH{sP;w2A+yy_fG&vumeWGzc#oi4+ zgP&WP{4bQZ2VKv~XkR(;y^8PCbJSg~zF(|K&e!l8cmRH@yo1=^!SCS@@JIL)JOqD+ zzrbHD&76KiI}Cr&tF{-4Hf{jbRhm6gGp+VGGz2wz4#H zY_0lj9N#a$EqU!6y}k4f=pA7vY2DR2Yi)W430*8r-LCSx#e6Sra(YPbY5N;LNq%z7 z_xhUt0qH^PR2YJ3Fdb&VOqd13mZn}WwBE1}JWk{E#qI~Qr47Z-f&HZo@cG&d#vcd= zNtfp72}59REYIk}$Q=$xSen>K^&5pg+R?{QYiz7mLUeD&+dQ+b6X-huPE@@~*puND zI2BHVC&KCQB$#Ju>gTIo0eYdMN6=;{KGV@>p%+P??dWsl&m}g`(TmYaEZeMB-pOj) z&`aS0Sfi9Js=XpR4$J zj(@)V3mku?{8f&Bq5O-ipK!6GUt;YxUlF@Zd6#2f0k4Et!K9hQFG#e|J9+Z)r2dl$T0@q4iE zh4;bx;REnN_z-*;J^~+wn=Q>akD)ydpU`+uVsC*@!KW=vzh{)U)$yMt=Q;Si;xEMF zem&!a?ZjS$FTs~%c?mnvb}IiB>{sDy@OAhGd=tI}-?lXKd`ETOb^Q0_zfax=j{YIq zF2z4`^pDX$fuF+N@H6#ApL9nZ{Pv=Ej$Rn zgWp@4_5Gm!KRW(T@(+>sGyDbq3V(x#;qUMd_$T}e{%u*q2P?riSQ%DM+DKG$oFcpSi8cc^7FcW6MFzf|;!#?mh*cbML*)Rw8 zhXde1OY>|8>A4R^9|CjXP&f<@ha=!fI0}x2W8hdg4(gnlF~`FbEIqAlbPp$LK9h(~ zhErmBHKr;)4gEwo9i9a9U_LB>g)jnVsMa~yGvO?0Mb|_`xHylf4THi9se}>r#t=`^f?or1<$r@8{L}~=;x~c zdDb@d&qu#N`bz9o@IrVIyck{rFNK#`ntGS3&J~V-B{^3~zZ!cryarxtY5K2G-gW5L zJNgZ1H!6M;_F5;tPG{;N^jqMq@HV&}Zm=}t-Hvt#yi@fzVsC-pLF~!zl8m=>^*8boc=rIze4;~M}JNF>*#O5H{o0G zZTJp+7rqDIw>0zo0PRE7-R0;X$^Y2#Kau~b5XVO2%{=%}|DEu!i%{X77?N$Ch zN8d01YhvHP1MpjT5Pk>0hd;m{;ZN`o{2BfNe}%un!eoa^l)`oRpU04s+hYc*f`FVBBJR8Br zkb9BXRO2^8YYtn$mar9U4ckCJYzy1L_OJugT{Ls(1Uthnuq*5ayTcx^r{)+`OY-^J zA15Zm6d17d#_-}MCpD%errA1X4(aNjp{8e_n5lN9T9(>kwXoU=YP}e@H|zsd+tlc5 zY3zQ=&vyJA`TZS#0DT6+L5dH?9s+aWP&mxe)E}<=5$GcwU3V&RwBwHBKSw8RGK&%WdbYhEQxn|x=$XTj>%dBnmQ_z>gQ{idKJ01HB zcqTjxo(<1|E8w~CJWDgq`DhoY?n-Q)Q{shA-bKVNR^BDpmpbvwq+gDHg=Gib6EntD z@M^ePx%XpV1Fw~~2KzdAy``yfgW@+j{!Q}NlDE#$ZG+l}@a{9N_Fz}^GD zgkQnEa39*>R3v8M}&D)zRaztBKXI6JQOP2y4Px@OZJdS{=u)i(OBw zkKMqDH*S?k(Um={0!_&F-vWrS{SVt z^zwRR_kqWWeX;w&Z0P0ZsP)&lW5of`n}_Fn@j+^Xu|0nXcCI)Sdl(!Jy}S|FBgIkJ zqv06nPWGUY<<9itym^s9*)vLu3i1XQlq@VR zD_xp1IeR#!2@fAvvaoEHpQPeQ+2WGYIa5MO!C)XM5K2u7q-XYwdBL=_J^o5LBeVUr zM~o~f^*YXrc;)>aa>kGICyy9kTvQe*^p7jcD~l}f=S~TzLP}avAT1>+rDwU5nna<2 zH;n{k8oI{T=)({K@{6{@h7v zLCrLj6bNXjs2!cBf3uReXqwBqyb(C51AR0vX=kMD6k+kDA{!UwTGTKpPPVrFgC95pJlW6=i6C zDM@L1j+vpJfZ>QU~bnhlHg7@Xrz9?5ea zR#ZH*uw1v9;TDbbz-2Kti+4~~@1%PZDsQ!{lgQZ>8OP?DZdxhFN4 z;!M%MO;2ct&O(;*0-9P9B<)!sGuZP8H`VKLNXOx@zeC&0{mH|}fV02Ho~N67lMIcQl{;I7c|5D<0zO8I;26J*XR(qp$dE0 zS?P&-mr*Cc4pQzPy(n*1Q_LV4SxKp>N#z}^XBJWMq_R?z^#4G*F1xphQP+D&?yh+Q z*sThtWNI1Os(_tTxu0eZowL)XPbIIcw@-l-Zx}O5xtmteI_%>N2K6G*t3WrwRIqL^ z_1L==)LBuwntdeYepW>jGrJYkZPu|#V_JB}-Ysw4ncfSMrAJ+N8975o>w*r=o1dpw zj<=RTlHR33?^cJriN##AVeKpVYqyWrn<{&}-iX-?7nGIe&GlY3nxO8Z_Bq2lGwN9G z1-+Mtj&kj`tJN=OtUq~BPHEn&GJ=|6 zdb(aECO_(#eTcpavh}JToIN++Tv+weOU>K8fZje@mE*Y=CTHAWEoz+JIeGI-N+Y_} z0q;WVr4z`~7Gm4aL-YmLw#c8{R$DhIpszt7l%zj1lpAx77~uE`TK(~PMT@=No)SpQ z(5zCD^g{CvLCmv9Ai9>}+UlXh7cPm+%P(12I@6qN?{%BzeRK4}3Iuuvj`Y3TK=0_= zZE_$(Pb8#+9n>q-ae_ze+aT}p3@XhlF3|FIgS-RkeYNzF3zV3-bm_ZJhQRR29}Sc*?C_aZMb(iva(Dc z$9MX4-tSNDe9(KsS!qe>8pB(bPKDzgvFl^?;T}A;MBgp@O4mnbCS$H(DV5zt-A+h(7n@ z78jK*vp*a))h4!Zz7`ysQC<_bg2|a7j$V3~xM}694iy%rg!5(u^+;v}%U6ArT0vp2 zCif_3$S_^OAtl8#{lmOJa;LS?+a)EOnW;ZgBPqeKK26?ST`{$y#h1^P%ANM=oE=cX z7N&1jK}6FG=rK_&CR8xP@?u9noYpFq7S7kB4(nT^KOz)7Qp~7mhTgLOrCv~vCmb** zyL=4&jibU-T|U3)Hs+4>ezF!9nuG04GAkTPNlyW&1wy)>&X7gNKj!DHW-qu+7f z4rYZjv(iH0^n&!%Xswu_S`|*ydC<|1p)O2$vAj?~H_zKkW@jo|HUEFASP;s{*TRpg zXw;1VWsZ5;Jk2pH?}&=1n&N-0I;X$)T>8(lUzhTwt3X~LFI;fsM5Ai@vCCF;d?Ml0 z)Pk`7#-t~mAD!nB;<4);{iMB<6VXdH-Mf1k;neh4JxBb{dS=;O;#cYR#VV%HsGy=1 z|79biKPYm&zbECEM(kf*rnL;F>t!2A$Wk{3p9&Gwiv%`y7p zvyp%y1->8S++pg#V9qMqd&CDAd|?4C+jW^|ZY&s=0q&HAnpa zuh-1f3#COu;mnlOwCK!Y!ZAzz<89bVrPB2+E(`~CH=?ipnD!qp!{}oel&iBcsHm8; z(md#0Uu||UB{dk1*ySD}9eak9EdN2(TD1bP%q!i zC`(@#rygQz=zpl^Ep$d8-+Z5<^UF74F!*0TA1auizMpy#=7%Eg{G#H~mn?etqQByD zykAZBr$clHQ^OhF2QQSCWT>KyAn3=9~6DpixKJ?K!#>9WG82y{kG@p)KFsxs4dc69Ldd$(aw8oniZ#o(=x)LeBN_MN)@m7A5X(OC3FnT@P5+=Q@Kz_s-d)F-O;0e)H@YZ z^9yw8GxcGNJ>ax(Os#m1(Z7H=ze~&K7)lRj=?7Ja*9dcrsmG{z^i%fUbkV+9`5EE# zqZS>?%FmRTf2=hhU9Xi>&->d z>C)*lO7yS&Wz*;7%`PdOz9>?Ag2nm;}IU$PeD&6__rk~D8#Vg9hf$by2>qWNVS zX?ISf_&?Hzs!DWT@wxgHHFII!%*cXDRn&E8aY&ki EKQa5Jod5s; diff --git a/modules/ingest-geoip/src/test/resources/ipinfo/ip_geolocation_sample.mmdb b/modules/ingest-geoip/src/test/resources/ipinfo/ip_geolocation_sample.mmdb deleted file mode 100644 index ed738bdde145082d329a40b878618bc5f4595932..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 33552 zcmb`N2Y6IP_r^CNfrQ?BS$Zg0wl_r$q!(HOh=_5MY?6g!7dHt_MX~qZE1-yq1;wt| z0L5Ohi(&;E_Fn$)%)FaD@Hd~|_dMb={?2*Nxik09otZoLZY&l{ipA3LH;cuRO186D z(){q#=%#30dFNeQE_($`&W1t@^^p(_) zvsgNDp6Yg2A@2l{cOv|ggnu%RI|cfwLO%`u>B2vQd1pdDi}u+fehzf>wQln~p`TCf z0`fxgBJyJL5*&Xi$`$l4BQH1Xv>LH1sa++st8u-E>2|M0&I>5;E%#Ad1ADEpbCgg{(<~IMf@*gr$AQqroiZOP%@sEQa%*B); zC>X;jLs2jnQ-+~jVB#rO*uzoEQAVHyQARR0O4K)++8D&g(#{g`an!~mK7sZ`GF#-? z5T8WdE_4SqC+R|-Tf{x^y^Q%tKN+xCy0UK)OgRAg2a4k-!{_}?Iauh2K+h5W6xKNv zdals(;O8?|AoN1$MZzzJUn2Zc9#ck6Lwq{QOq3Zo??pyeOqrFm^_Z-Hwh(1DkDWu# zHPktWQk#do!-PH`{^7!30RIT)Rto({Y9Yj{gkDX(h72QKE8=zV>xCb|wd`Q7x|U;6 z8t6BYQ8H$z&wnwsqsTapYogsuwvbE6rH1F%OpJAxa>rCipQ9oPg=TJMBJdZrzV(G@~)W=^){UY*W z)OQK(OAU42Wz;T5{0gC8N&PD5SJS>m#IJ?En)(`{ucdY!;@1m(9rYW?_2iA@P2|lu zZiBJ)`Q2jdZm&bXjXAfA_(p1XAbuz9O(K35wYw3&N9gxbzmL2h@ds!>DDobnwi)q< zX+I+3k3xS;_>VJZ3-qnV?#}t1@&xwDW1&CEob9lGM0rXa|1|t(P)$(+AXhb5_}roe}w>PkhMirP*i)5-Q^2jpeY&NS5d z9iexk-dX5fES4TI;det`cj5QoF+Is%i1()5hwN*p+v$gRf9eB-J`nmK>VwH4B0iMb zFyvWj4<|>EBgs+ZXhYrZ7+eSDfwr?y$9$A=^vA=VN_zr15n8sf^)WW;lSn(_4iR_4 zcQNJ`x(B*f_&)f4<^_Zvq;>$}2MT>M{DT-fm^?(pbEr+RSbE|*)Vb>O&Bbwf!q10a zApAo3MZz!UaV2Ca;$^g_k<$%z{WGY|BxfPNT*NEj&t`0n(C0!wl=?jKuw=aFmDCPL z-U5^*^pAjDN&84LWT?-l3h`>{HDp-CYZ0%bUN7_r^o7(H3B7?@BN;^==RxXXk#`jO z9dkc5j?!dw?Ki`3N&5O6mx|+-A->%BsVjtjG__;MW0AK~#E+wXJoHsUKLP%U!as?5 zCzGcjek$$LMBeGp&!B!Ld6r=>bx+T>STxTu)YoyI#nNsMV&|jWz+)~TFNAgx?TZcd zF_%!g6!FW1emV6kpeOGARU&>h^lOBFE%R5CYsj_8yN>qtu-6&8*IhVo6nzfsq5p(( zBgz((n~bhwH^bk+yjz5RE4ACm+sTc{zk~Lj!?&n?P z2Z(=2`y=vWL!I{t;-6CAO=1kU*7F(Sp9}vB_+K*bE1~bD_BHtp`7QFlqy0VXAB^2w zwfkez*5~pw=8w9jU%)8%D|iryYiidW{GIjuLH=o|kNwNo?JUSmN$TxVsUx8B+NF`{ zWP7p$Vj0HPd71D#GS-RYIBM5L|V5cC-d6%q1Kn|hrIqG zJ^=ne#s&#}Fts7%P~;7xZ54ULsf|E&OH0=8s{TR8JJc^7P>V9mZ){J^vXfGj`io9jimXj-xe>Cl5$YTw4y(?CR@8}?KEc`C|k>Zg&XLpy`^nTGoKv#6bo_&Gv9m->0o&!>HXh+jzU zBE&Bi`X%r$W$ZHYa`FoDO7bd0eI8fixNC%et+D&v0eub1eLQ9@c^!E@xsJR6x$BLs z>%EcsP2|mpZ=ijP$h#H#ZPafU`bKK%Jnj_wChB)VzuRKz-48JcKA;T|7qqu1N~W{KL`JL z#$FKmi?|j!K3_)OE6jV9+(EuZzD~YjsL$g~9QT&+-)7D`au<&Oz}ULJ z52=3y{bQkjg7f_X+NUVHjjnusOk#e2j-Sp4QB%2J!tVfnCFJQU2iVx z;W?(|p}u_h=)1H66wJM}Lgp2b#bgO$QQD=j%VPL{3Lpgdb*3E%Z8}*He#>3lU$0(n!C- za3K0l6)N|Ipn$IdF1)z1>}X~MdZcgB_!s&KG(|(wS76x z^9t%$l2?&elh=^flB>xz}qgypg<#yqVlU-a_6=-bUU|ZY1v@?<6;o zcae9K_mKCJ_mTIL50DR%50RV6hsj6CN6E*?$H^_^R&pEp1o-P3FHvzKy&;X}?3h3vDOu_YC#1??c~3 z{R8qt5&sDBkA?pU{7;3y8~z?1_Zj&)`Gv^)5_w-y-z)U5seOa^x3s?_zbAh%)aUyn z@_w>d26YC1CVwG+C4VD-C;u>1$E)LXJ%3>>w0Lx#SH`*_ue;EDQ11!77s>#X-Z2SNQ#?_cv5|s-zD@8I;u0 z2g4sC{Gp-_tI<_|qz^|KVf^%wi;&kqyOE5NF+*Md zV&okq{5bq3;Wt|>Lx%CVC5WAYvXuTZayhxeP}h4j@{XZ?EV+_Ajy#@RWvGukf!c}W zNvP*!+NX%TQ>mRso^CkgM&zF<^3G!XZ0P3*{apB%ue#0ig?<6_3x$6X{EL})37Ht5 zmx;W~k#_~-R|@?qYFAqo=jE zn`v(_)cP&dZbkeyq2Eq@BlJ6rJv4FrCdTd}?Z9;zn`jf)n4*x0UJxxACJ}dH`L*Dbke}OqKk}o0tGVNDH z-mB1eP=AelUBus@_NK)$%t`xg@*VPBLw&tFsl7+OkNS6s_y_Pm6#hrd`* zCijq^8R~XEN8T64ABKCZwXZCe4wzr*d%+RZzJ~ve#nS#y`rpF;j`sKD59E*JPvpNbAG@xQ_EPycuLf6!(dI`1!|wYMNQg?6f;innivk|z9g=HP(#9cX8enPf+@ z6WN*ULUuLO^>;%(-KqBwdQWP-$lhchXzl!1&7GSux1hCYP)P;!`v zTM-{FeBPJ#Bbhr2@zEka2L4#avdD2FJ|6K2!k-8~TlhBklX#q+bdVgs`n+A#+=kY3 zah#W$kMu)}paf86q6Ceuk3RtZfy|ps9z-52@(w{>4)rPIR5DlOY{>Ck5wdwAkr&tj~ctRQC_>f`57n@b*wdgh7vVescOb~w2}#E+m> ziM%6eheW&zdbRLt;D?!4OV*L~$-Lp}{w$=vD5nSCYqxyyK~@vRFo>(>@W$Y(+VV{>kJiR*op&+vE}?!Yc^P>*Vrx*Y5cyX! zb`|uig?N95g$y!)u%FZ2hfJxD%8ZbtsYv>!3l?L7+pF_bMRk6SDw zIW`p&cKdBS?g`|4fbt~$?V_Hipg&Fh8S+{3Ir4e(1@cAmCGutR74lU>eV#k0y@qyP z7y29U-xU5^@ZVS;nxhu&WJI9TUrkeP^gq}_?^Om-o=8tUV^QR`0jAbXO%$lhch)ZdqO zKSNzlf9L~*KM?*P;SYvCgvSjfhmlrtxS^_V6vj`7ktm0ujADE=IffidW|8B_@#F+@ zqM<&|Y-%>tJBhZPbdXL%o$o@Po4SYel0MQ;2FReHKK=k4e;`T;%4Eh5A`d1H5&1dP zrXYW+&~xGEF_tg%0&0bb7YV)C*rUVHONCwrf12>88@s~{=rd{0BFo7Na<-wqt~tn` zOZ`xCUNSx!_fO}|Cl5zH$9;z*s8y0jk|DB+tR`znT#Kqx*ISGF>S))K5pp59h-@Gm z$tW2!)b%Y!eMeD`3%vBhES52e zemW7kmry^6JQ><4v`-~ZBTpyKFx1y|CbhH3v&nPFbIJ2i@A<~o$6WybLdGr<`o+c` z^C;q%qCAUo8Onnwmoxtg@=Ee5@@hj}|25RE#c`{JzJ~f*=-1J{o?J)XK&~fmByS>b zHq`BGpmqy+D|s7vJGqg(1MS|4axeW&aIxU+UOnLVq0k7U6GY&Nk>z(0)?Hw^Mrx@uz7&V>p)gtiyBEpC?}+UnE~5UnXB6 zUqziiqU#XS5^J$aJ#3p{hTl0}3iow)Qj0j$|jYGuegg zisQOjELn@e?qmF1LLWFd|(GPbU- zn0g8HQlXc@pT^j9at4{Gdlvn2vVxp#sOz6YZ7z8z>YYdXFmgV5xS>970ktELUrGB& zG9>b?I;;D)cI;293}iXbDGFz zvV~kiF2!-njIHZgZtU?Ks~JZlevF783xB2XkAr`_@K?bIl6 z6Yzry$#SCUtW z_|?>|F&w{({%RbzhW1+6PoZ3gau2oZVXvcogQ4#4_0Vq={!PrencP6$Lf%T=Mq*55 zY&5p6?+)sBlAFl8$h!?EaPDT@OZ`67b3g3|40U}ELVt+*W}!b!?GeNu75ZcFA7^Zf z(6>V0Cj2Lu^CY?5Vwu3c)$Kk_{TUqhtT^sD>d%uekS~%iA^&A#>v~?H{wlcx@z-d- zPQF3DNxntCZK&&c$Ji5o!|^*Af6rpcM7Ve=l>sCch!SCBGxTCx5{4Khpk*{F(g4P_;909`e9U%#DoS z8UF*mny-J-{|o*hC>E4aM$1egQ=zppwyG~P4JBRp?csM2eg=G0q;orxokYAd;$5hB z6?!*n-4X9WyQhfvqShPnKD7Ii{mA}?y6pkT8z}ri%oz-Q2<@TdFw#m6Cr6MY4JYy* zWR5nzZf6X0#*$g6f1HSqr#=DtM4@L>w?Uso+fF)2C+Q;HhPqu3j`LFYk$y5j1`T!o z0n`p8C*$~oMEqc5XRm;sBlIcor=o;;TrQbM<|9@>yO1n0)a@1{UP8T;EEDl*)TWa& z$eH9UvYf0yeY1_N>z_k?F7!i%j&ZN!he4k&^uysVVC)F8l01?OkyT_hS!0;}hM}&% z7WLH$zn-}f=nI9u2!4a`8{tQp8zUEs_)&<*sW*|$WQ)jKg1n{FmkE72HQY~qo<}3^ z7~02*yp`0BL;QG)#fCOASCJ=>Cz2;ooWO%uVE7w$iJ8N zedPV*1LT9`L*!=iVe%0}bsr}sj(v==$5H>5WIS^#{B6R20{)Z2-wyvN;XiHc%xB1F z$>)&wJna|A7Y+4zeF^cGh5w4hVjsY~9pr1|>m=u8=9}=tIQ}hgIQTZ$1$@Wk>H6P= zzti}c?~(75yF}gx)IKCX!f_wd{)GIL+)eH=)a`tR{Lh8|1^h3W_Z7L9{F?lR{FeOA zP}lc8wI9eIQQuFreK#{;ofTgj{HA_{wMsul72@E?G!MTb~_PIgPl&lJ=uZG zATvo+W?v4w6WN)>xoBOTQ&*I3%vXMQlpgeZlD){@q{{01`HoT%&VI1ptJ zUx0Wt_4AmRtYo-F)>Xdet7LOX|?LaKJBqT~uc4|cxr z3t$)0FA{n&>=OE=WEnY)oKDUlReLj0X3;MvD@YZejWUP+T=GzI9(fo!pH#;kPJ01( z1Z~xaDt4ry;}I}qv1nGot~Pqd8WC6dwJ0y6)S;Y>Qjena2+Gm47m|y>2HGm8k#-b} z34Jl`qsTbfL^hKx*&jZ)cJ|Db*wmOdsQ7)o?F?k88;+LXa zM*ni5UqSmy@+$Ib@*475axQCi}9ZltfYn`qxmZXj$5qr2i`E$_+ir6>JwA6qG~B#27GE^a z3EyoCK=wPcrBkvv*3=wosH~3Fgs0{?=H|!4brCd_7p-duMQaM1BaPt|v1qs~uO%K^ z9G+y(W4AS>I5*s6EsI5>K$j`A%dPzj-42`AZuhvYdm)&QLYALMfZH0gerjE5{R%bbmxfy6;Re=Z%D@=Z2dS9gH*a`(0ks!5HuXyIqeRROEDcY)(u9htrJAS&-tYFR znQh0(yRnu`o`;U{xNR7=PLD(NQmcbToNl`pQ@gM<)>sva*DSZLEaA+a6>AKmy{Q#p zbzk$aKIP*~eF047{A`Q|jF`#u{4N_hSxvTpGrQGsVnM3Rb9nrIjL3q5P(!3z4gJIt zSQe_m1*xvjX%TbGjU~?Rb778|aqo2c-I&)4ECEb1ta?s2)-AU;dmmRd6!6<|!(zpA zLrwKq=hV$yIpQbOUofLS7LG=iB_^@UhH>t~jX(nqtaCoMU5`suqaSx6fbr=w4bU}L z;>U7>1`2ZG5iI4nlGat04WO;UDPhdvB`sX~3UDEgz`{)QGIx!y2I0Ohf!6)?+o~GhkZ_Tyd&VnZE)DJHlT}K z7*41&>0tGC`JGM&ZgD|D%fe7R)NEZ@n$r{sO)7~sEDAM;aOy=f>UTZ3-deh4Gg_IW zTe0K*dR%H2IP4x<09T+NMC#e&$8c8j8(kgR>s?7neJExv$5f8ip~;FuTo~?$%Y*;)&~@6}F3c}n7$%4x0~d>k8x6KP zs;itXhYQ!0TcIy%?1`wjcxo(yaoik><2H=huBvs|@I*yj=r6A=;PpCi8|LXXTR%}< z=r6T^YX`TeGT-aQO;N+CLM=78lA0Fl%CS9BdtOCs(Q-8zif35+7oe}bel;749C#pk z16Y*x4b}?}29(cj^WvO^>$6)cF)tn2ey<1jILlIOU0JN}g{~Maps6;HgFGGJ=}STx)`#ba6HS!HS|XSrYM$#Risr`R)mYBMaWv2eW6FcB@dwndrVv{}r&IN| zI)AUti5um1+uXP@!o&GbojVI>Z(V{aPerd3O^Jt^w%n@vt;>`tv(yd2*!KAXW`}{b zBY-(>77VQ0xZMs9YIk^0uW+$dD)QlW=sS*?mmjVP<8D}UBe*M(YE1Q+iCsW>C{{Tm z)Y70ggy_U@Bpy@yPByL>nBnkJ07)IZdiD_V>VcC=1hVgPs6kG{r@$D_pKN(^cB#s-&(avY`raBo7d;}>dlCH_<3A@>>N-xE({x8Y<8WvC&JTr z#NB%KS?wZ;GcG9$FN;*itW)tWg!;=4Rcj;e2cEHJBH~<~KCB07qWIN=7AqoF8FkZz zt2aG(;c#LJ$VzW7wwb2>N>#s__A@Yl&^v5y26lx7Si@cFfy=ELZiX7*sSfp0g*O28 z@_`3c_I^GlCfc)OV&+ecHEh|0XZy6opf-)5xvbQS*lAg*>#XCmQtvP|V*`uT02ih@ z3pGpMZ>`Kq-HFRc{VdT*#Zy}v>RUq9Th?HIY5E8&k{^RZZ{7la+zza`s%~uTu|8ta zb>Xb-p6va6oVDA97f3vI3$b@=sKa(PEBy$ZYJP60p#ekKydYt?V0(`zzFG_MV72Qh zG1qCwMjY>Acon0GTcP{j=Rr%>SYAbV87eUgRNnNa(4xdcqYx{*H{kN&^#Lop+MD6s z5oer&zQs!y3i>$^$d)diNf?wKw+l}WFM2ww{RfFbsUA}1ftO{Oi=MTFP}fkk2-NO{j;Tc{t^jMew9eEZ%AvbUXE!#H4Wq zut=cAuDWuM2QAuz_Mn=e!JywXxt|(F-@9Dul47GC<)*oybUC_6E#On8O~+8g8!&d^ zSilR=DPE`EW)9O$xV>&q(Czd(g4hKlE-ESh%?0V}!*kK$(i`aEe1ldfde3 zOv7tky>%9L!QpK62C}%`Jew0cZ@e)yM4}jl({h?Zb)n^<>_)6=s`K@prV!%=FUop2 z>P?Nyr@N-FKDCguCY(4cZM**e-IZrmh%JlSim0U`dAEyl8r7q;1{*}&j8gMBMLTwM zMS&Y{N&&BWJ5o)$f=;*JVVb1<4^8IcwqyL3nSM}j)%XOH{8-W2G&2;!-3zPw6Wp^$q{s8v;?t~R0o|QGc5L3|KB+0u*mn}8;;t3&&-Lp zL_^iMA=ns)YQjzR+*Ov8gyLH_G|E%KCk(eM;7}V3^=xwbv0dEfRPfU22v*nbcPdod zJQa`KqdyfDRbVrS2eW#f>5Xl{l*q!+;+8~DIIs|58-o|ILLUYMR=oXs!i8?a^xv;} zs%Y!XPCs!b&_daEzRZXF^`4RygB%=zxUvQi(YVH#Z;43 zEq!jRcut&1!0ikN-L0LlkJx4(xUn#)XIV*Es2)u<)noU}UdZaSmOYTwX(LwJP7mp6 zfcNvDD-_n#pw<3|tIq252j*PoG_0_l)%O{EHE9Y(lQ-C@-aYX;tu74@7QY)`j?DN9 z1aWzoYt?pV(1%slboIV!n-}M_yVR$`SlR4m^y7ZReEJMrZ^PDAc$sZ%!B@CMFXL2v zUWYmrzuRUHc=Q{&nsusCuQTWj`$IuLcDcz@Nvdt03aWIfv6er*Cel=&=;D&X<=7x^ zT~)PpRZXIOwavjbo33zsy!&(o9yV^L!xaoy%l4CMo9(NYBsK7|I`vGJiWji;zzjqEIoKQXQ6;)4?Up35sf zGrBN|OtYT-n#FR4?~L}Kcb|?6<$a{Kx}wl)cVY{LI~dFAxgfD&o>mqL z<6~`0c6I%>4cpFcZC!nf4LHmv9rae?P@iZ;>!Io(wj#Xo=7hA=X6yK62p`>0W2_** zWl>8^J+aN6y^Yrbx4kM@jTerfJLCz5!il!_5!-5Q|JQ=QeI5=%SJnjmu39(RNmNgv zt-U#SV*l@J~!yRFSA%+yCQ3z6e|T`pBXXeg))kR6oPucNi=~Me3I} z7Z!H4wN`yti=9i*5vs0AyyhgO>i=*_g?4=R#RdjXw!*^LqDAhWoj3velp`{g~U&cK3tuCVZ;iL$$~G7>LM|9g3c-@#v%tw ztKe&6&U2vTsd=xiu(dh!%S%?Dr_D=hR-c{NpY_?pX52w3|r6Aag2^oJdO{YHj0UI=Zqi}ytQ z(5Ii#?RW6gL0i{^_iTqREUt+{TXp~aHMLcHAQ%c&sYSueVEoQ)gny^@P%wZGn4)$H z|4QvHeD|mg*CuOE2>(v)u3*6BuHyHNY_I69%hl{jHNQ;s4_tR8J~v zY_@1CH@r9$Z*B}n@m+gX;uBypj@7lOARbzRcepLL;>66CZS`XSwut7$)Q2ypPdya- z;l$j*8VnPc3(qXK{HlUgu)L)KE1~}Kot1VI+s{h7 zA1g)LHg<(Oh@S#@;QP3JVK4PdkS zaYtQrVYd3y5xn53n}Kh*0sYP0+zh<7S7CQw8?Ff@E;=c+dDUL0SFiiHA^B6oq4<_H z;+KVdd~?D(^$PV%;xyGK=#S!%{(&KR7x1ek{vbe~5uQr;u~qL8hv{B-hHKn-&GNYt zj|X)tl49FEQ=}iT>Jt+>Xi&2I2h2pnYFuUws7Ch>i0M^R)f%Yr>209i1GNhOG`R-c zjrs>{#_b)jLp{UWs^1&*`?2pyENj?47@@5zNGzKFZT*3uM}6{1JkR`CHns2%b1Cb; zt-m(t@r0eNi;o|>1TFlJ{Zm*G!|zNjO=>*mtMQ0Cj)$TX8`A=8tMIWwJ)j5UOB&vC zu`zb}y+J&u^%su)w7*^R)c0NWh{l(s>DBn#u23|heq-jJE#NbQ{sW$@%yZGc`oNf# zxf-J&bAuiQA$3!{xJ!5$(4(ML_@}GP`~ssO^E-@!j_uSaXsiAJzMeSoO4(XJg@3wB z`_-SHi`{_!$x;0w6@FpH+q3#2h+hivyH7yQ`Nlx72Dpr*zlT{kvhMLOo z;#hTkT54VD_HJc0*qu~`n!=UM%NK_iq(v4-qO~z=WN~F(7;~^1KbXg&l}(|>#SLLw zV`EKK34SQ3jz<2oWBot0JaBRV diff --git a/modules/ingest-geoip/src/test/resources/ipinfo/ip_geolocation_standard_sample.mmdb b/modules/ingest-geoip/src/test/resources/ipinfo/ip_geolocation_standard_sample.mmdb new file mode 100644 index 0000000000000000000000000000000000000000..205bd77fd53e24eece4bd04460465da2a5dc4b78 GIT binary patch literal 30105 zcma)@bzoH27Vd`tL5jOGcq$3wQlVsG6Awwyl%|tpNQNXcVI~1WHFcvxl`0e}K&dNr zr0z~zN})>KUEa6%{-(2f-}~d;d(Zu?^{ut{?6c3dbJAKYmTneH-!Ci{OB&hTV(C=@ zzX$!EWIEZ4>`nF|`;rHd{mB000CFHXh#X7~A%~K~$l>G&awIv598HcP$CBg7@#F+@ zB55Tjk(0?OSPx=MTN-e^gDZ^d5?o76zHW0P{3e!< zkV~PROuLzEf!1nlT`megCj2($ErTAXogmxE4ntjjIkgq!O0pB>SJ6J+Q0Jcj{Y2^~ zrS!gkP&odk~7y2F4?j-Lb??(B1MBcsB?}L87&>x`wAi0fv z2zd|Fe#B6>?@?-xA^y0~pD=d60Q4O~-wA)0@Silcml-bQcov)k-ZK3AB+6H zxaQ$H2-j4``@!ySY+e5V>I0z<68d2HLxevR{xIPWXSosNNOBZ8njAxpMfq{G#~bS3 zp8$QL@U8GC34b!nOfl?#AN^^_n=bNZz|X*SG~)-uK7{t6BJVKhnbfn$nIdkZW=Ecb zwo}Ai)ZB=BXnRQ?={MB(cQ~~pkbk7mkAgqT_yc6W&t`r$;+XrojvO-AP-}VA^2q|^ z2Wc0Iydvnu!Y_eeD*Q5*DJLrspCjUx)T_v9#OJ2s0~+Da$F&UCf|TBEA^b%wQ$rpj z;)@Z-7*+S8@28eL4tj`o9a(Rv^BSmy$t9#}N0Z2lP+v+mlPzQ`8AZ7m?KZ;!&(Mz} zb}sD%*-myKww(3~awXYmsN1`W+VSKGD0iZWp9KG8;h)01F7j0JH1c%v4Dw9!Eb?sB zcaE{u?+?U0?RFlnjkwNF>D^Ytzkp>f6#7NbFBbk9_?HO(QkJ<4`sG5ug8G%v*V4X< zyjtX4gZQ<=zm7TALtiKK_3&>H{svAbuzHyM%r>^n0k^OD5N%`$gUZ$a|3SZ9;#D+QWDzXoLDK{d8wxjV^SBK{=ePYM5N<|N1Pvxq+@;?Fbw0`wP!{u1?<(Y`-uze>JFzHX@7 z{RYavDg3wKzsdk^_OxtIKa+(+&=)W7#3%6~-tW1)Xy>_L@?e`c|C z$1~}60I$yYt!|%#g&_Jm%?^GE_Fx@f!T(y+^9}WH$?p*VUc`TZ|D*7Kg8wsfe<6P* ze7R`KMgC3xgSJ^x`DxvZod!{rPwQ?tXbt_I(9&^@q2G(_P4+>oud#Lc zgW&fQet-A_m^+XhL=Gm0kV6f1J;SICNBI##9|?aHW2000AdJVfvDC+rjt*SK z^h;pZ;VPwHMwT1u{;NQIj_@ntS23@eoJ-Cl=aUNzbv+BIEkgMk+Q*3aVrs`CUMuwD zsD}&(KTp3Nc@4C~xGu!iDe_k_emwLOgnlCYlNdXhJcaBMdC6ya8e^xEXOL%-XQ95cjjj9R z9O~zi=OKQ+h_8l!f$@j1|I;plb_4S-hP_7QUqby-@-oCPr+o!^rJ?@)wbZU6uSWhg zw67(vGt~LlQ(H%_M?c>w;u}zYBV#v`H<34!w~(8VdmpaN#@F?4VeD4uTWQ}$-Y)X) zKwSRrUCg-~`aQJoP2~^m2mSt(pY{NA9)!M)_Cq56Ftta>N0IlKh(AvK335B)J5uqX z+0=JIe=?<~Jw^R#=+Dr8R>Ys9_B`S*(0-A8N#wmu?G@y`D)iUjzb^bYnD-|67Wp>v zcGG@`eAiGthoKmAX?w6wqu=y%d7s=1{R3RT(BB7pKkX06j|_FWkEwk^ev16hMEn5! z&xMcQPWzI%Uy)yv-;m!L>iWN<_C3n|AoL%p{{;Q#ls*)C+OLfLM*dFzLH=o|>;DV+ ze^dV_rFXZWZ@LLT4L(Fwrh5;vCz&qtdLiDMdLN$uWky?PIBpBgc~y$cdyC^-iKanVe#%>zPVzn#D5g zY@yGfo`LcQ(>_GR52bb(nTfnC5uXX)#+aRSh`5uQi$p)__IOAy%KMD1^kJBL-4Dk# z2iFnIIWiUReiXG?h#yURwuob%cMk|Z2Y#;b^Wf(TzrfhtgU}0wUIf2b_$ACMg^*Y1h zjCF57UKrN}j4y%RNV|!QKwCw7DcKCIg?6i<{{1NQ81*)xFQXPmJR$UU_#MJu&b$@m zO2j*jJ$#eMJ0AK8%sEl$Cs8|@JcaB+{;9N2Gt})p9r_v6&m_+xIUl;8L;qYuUG6-T zJD>V$!x5OnI(8vqH_*Na_QkZ<80wr$s9lQqWwb9RuOP1^*Ba__S0Vpu>erChiuiTZ zu1DTFp|3ag2#lHT8Q4OcS$5c3`;A3^Lj+KidMgpW26pBNrm?V-f#^@lT$UJZS&@aMswFZ=~8gL$fdYY|z4yklrDCXY4Ld9{cim-0vBo_o~cx&l`{ zt`k|N0d_c**JBC%M&UOxFG4OQo5>ckm5h=xvW;AZdgI2{?MqN^hu$Id<L&^PWNN37UF4~#=QP@5f^owb)5%Ei)Un=~|m~*+|=x^v>iM+MA?!tAI$iEu?HO#qI=+{BNUij`g z9`asM?mootr~Ux>ponin{2}TO3;hvlk0SmU?Z?R{$nA#ues&;lr|@^de^U5Q!GBu# z&!Fw-GhOyMizOX%w8!%nOHVb=UcmJkt``}9iF{dKHB?5{6lIVksp(vke}jylVk7z?sEj{`5f%QIG$yX zFKK^esQcz?=*e>5GX9;2e^2cP@<-(VMEhs*7eig&uZaI9{NI`L2l*%Bf6@M1bV4Zr23r6GjGIq7_?Z|To-wEHva&FQi;$CV#-$>+{}SO}3jZ?UUvBJi4?(}u*ge<6zK!-(u&);R*TBD4_}4M-dU74&>qYzq z>KmYM6#9+SZ-Ra^?OVuA z7kLj*e-QdMp{xFRn6XEM{wTG_5Pw|gPZ)bV=4j6y*uO`BJ8|t{{w~;0(te73ntX_W6zDd3%^4_Mln|uf5-WBopESB-fXZ=26 zU*Os+;vc}@$Flp$4@LYVY9EuIke{O5XS5F(>ihWIVwup5`j^Q02iI5hzb3ySzctk5 zzC+&k!vBFeKSKXW=s#2c1^TZ-{|)}{jQt_>KdJpi{%ts+3D4MK>~yv_J&k^ML)ETy zjMwy@!bhR>Uc&FqGJVLth#y3|pUCSEeE{`=(~=Y?R9uc>(x2 zjO7YFk6J!iV6jYGE8>OJi^yWsQzGJ})XSik)2 z+#R?U;p)IuBg!2Ef3fh7Wtm#?IK)G=>qK5XwFbn)w3mo@BXrCmJq9A=QnHzBAzKY~ zZWQHW)Z2u<40@b;f^0WbYqBo8961-^T48*xt%Tpn_$u;v5kCR(6NP^g{F9k?3fU#% zry_ou@K1+-2J_Ao`dQS@CeI!w&Rn)I0uMzQUsa=P>>uIke*NeOx5Z^$3qtI`pb`yCsc?Z%^q~^mY19T)!~B3-*(=pMt%Y_S3MRq5Z6(ZufK0pQru;`6Bre`LdzT ze}&qsBk`>8zZN7Ozb_mTTi?!#1GdNS{0 z#y=rHB|jq%ke{Rc7sl52h4HQV74)x#{tfkS$?wSTk@thh`;q!j(0?}eqzr1m;@{Jo z4*fT90JY!AKgd53|BLqD`NXb^7>Kh zZL6M1&%4&giDyM*s%84u|t zeWaf}++vw@FYO}@bvurtHj6wO_0OiAO$H2geh%Wf)boU%54}M6LHLEjFJhTu=p{lg zrCtWTJf%;qdaACgms-`}doJY>2#IK=#t63HeZ)R)@d21@(Yb)ZnQNNwMgS=DZ-9_zg z!#2JwiSz;*TNzxbUA~&UWZKXzwI-N8l_$#zuC0`?7C*L67B;O+6Hq`a(ruGi`F6wy7+Hq`C<2W2fOzjrrd z_fCV}UFbdF_hc+x=)It$0##Siva2fZ9OB2MK*J{2{_0%DiFFhYNiK z^^wp=(H@{bEaKy6C$$NwyxtQTvm$R2?aAa6aw<8EoKDUlGsuI^IFJO#G@h}quvI6 znX#ulMlFGL0?)8_JN*vW*V0}Ndj;*4WGA`GP`CGZ?6RFpeS z=%-UZ1Nxb?&l2&op`RoCbD47Aul!5?Yj*1TrT`8m~$oc zwY0AyuO_cCoSK}s*BM{ecRk9jW8Ql5266+rk-U+-31#-vz8Ur{xOUOs1bZ{>E#$3+ z`uDaXew*-bhkpn2?iBi6)b1wlLEgQz?<4OgA28JQKS*sG`4IUq%0D9V9)+JAXOA=I z339v0+d*xo;nd_9dlKcJV(e*={|xkJsXr(5=c&Db_=~h(67iRzze4?0@-^~x@(uD$ zLw&z*QF|NZcMJU;`0on;J@|Ws|31s@h5mui_ZfSty1x%G|GI%6G52Hg6Y^8?Geg~; z1IYiJ`WHg~68cxd|C%}9K>t?g-@*T$u^-4E$)7~t&(wZF{;xv+4gT+p{Xzao{w4DM zM&3Wh?_){n(}v-m)2MfcJrP%rRJ>15`00%IB72jlNcBOVzJ}U92!226{mB93KynZ{ zm>fb5HPrQ>U7EvD-w2_Pq&^DzXxd}QvE(>IU4A_BCZzmn^Wj@lexFG!Ga336+EdAC z*9zq^U9!6#w>ULyNn@QS8JLw>uq>FT;T^`zA(ntEq!wsixGSt6!Bx6UR z{4Cl>le5WeGC<~#xnv%hZ>Z}npcX{Eg+ecaU(8quSt{aX)XI@pA@n)$D}`SLznZyo z$$2E6oo>SdLv1fa{vzSm7<(G#Sf9nrKbEW|k0V25ouRI;o>~LShiNYn@kZ!P!jHgT zD*R^nEiBh6^eD9$;%&5-k#RC%sN2y_tpoYXX|EtxlAYu#@_6zD@q`1-$*ah#$!o}K4Rt%OL%HjzuOru!H;^01jmUk9_KoCC&~7%iuJ0E3a=vYb zzlFKCl3PXoZPacj??C>YB7PV3yP@AB^n2moC;a>2Kfv4vg}x2?L&AR;{v*PF6#io@ z_c-|kxt-iW?j(1SPa3NGo}L^lPc!xm>U}m9@ADk>=gAikf06b}BJXABuTX!Ld`-k( zr}hThlTpPsz{7 z1LWr*#!DaM_r;v+^Ck7Kz-stkgNK3NQ2*9YxBENj-&6mA{87YzqV_ZS3-W)3JqY}b z+VA8aq^j3}%PD*pZ8zwl?G>2Z@%_WomeE18fsd^UTT13BwJce9M9!u7e$B`kjj;tpe$S}EtRKM4Vt4a6~ z*h}d*lPx0NN;?Y1Xt#;@GTL!60k+fbAeWQs_gCOrNxzd^MXLDmv`;Y9|8C$(u&>2+ zGGi+56xvo5@?q zO{B`(jBAVVZ-u>8__x8no&Fs{zmxV|;N7(E5%GIz-v{0=^ap4^2yPSlL$DvF|A^2Z zrTrNAIPE9M?c@$}C%KD!(r`wyf1jrQ45{w_SzOQ2e_rS>z^JGZMZQh$Cf_07CEp`eyY}FEU-)}re?Wg9xnIOTr2P^3G5Cpye@go^@PN=i zw^(MVy78O;{m+5i`bc|cMP($>6pLC*!qtgLXJ;&0P#^1vw#PdIg(XFe;aD`(8m^5r zdOS{RSx#=A-C9$S7j0|_MH{S%oV>CE zYgcJrM?BURo>>x$HpHT+Gru$zjmN^MbZ!+Ybvd0*kH?QnosKNG&F{3UO8t&3yWNjU z^SpM4&1YQ?GS6Q;^+;DVx zIIe0A?%THBid$$zv*(qeYCEcP*->@SVaxKnZ7x-}-R8}*d2v6e8&$jA)|!$z9s9Oz zK)JF=vH;-Rs5}^7>GHzSHKg;dg2ZD?>{|iT0*Y6m?e?CEH#cYiSO( zhfr@`MJOIgpxXROHA4J8pBs-0&Gu#a+)fuBX1*iK>GJv1qjcMBXmi<)3tGaN4dKlE zcqm#Q#y}~|%}31~D7r$`;_^gOXhjHB=VYU5j}2Yo^{J|zcz!lKIMpNg8JFMf#po-s z7hv?2N88aL^(4#7#2~4T#beQSJxD6T?eSPk1OsVgAfOuUbYd*K9H=?dpXK!XJZN-b zrpt!WX!BtR7P>u7n=Mnfy{IA_$5gEAs8>HxktYUAWw<`FB#ed!OG9xqy`_bt(B*JB z(b=dUqtNNMyPT?ipEt|ncY0jt7mve*j;aY3wxC%t^k%|3(b_f1tLut%hE|1|BWP?< zg&KE0x5sN&J*=j`-Q&R=EVRRRX}c`w^XaCdpQnyT6EQBkiu8D^jx~3x4#?Anb>!3x zQ=QGOrgVYRo#pZQ^(aO&J$}0v;~;3a*)x3}w+FSgSre1KvUW|aQ_rR})DVwgum}X@SC7eK`ispwBO_6A0J2ngR6E3^U z=GVRD#B9Jw)8p5RrO4@VVrJxFDy#mlPA>n}*b*z2ypGm7-FLY;)~?BwsJajH+yj~1m^&H%)%X}_9=bTte^)itYbOx<8`ExqLb>VtEg*m8c$}y;^fQxdn zg+8Ab6HAQ`dzQ}&3%#f&03I~j=dpQgPK=K}V&IqN#OhSiHT-8jQ7=9WqcB#A?l2vs)AG;dnzx?Yo#N zBd0!Moji4Wa!%P)6ZM?JPZHhFCLa-j0npW`wM&hLE|@Z0xGZ`>bk@)i*cATGV|O zWG9-q>zOI)#cH}te5{qRR=s>z#_F5UBWeblMqz)z z3WP>s2HSDgQBMNr6F1hNvfT0#bi{ufRan&&Ua}+-Zc)2AtMFkenJSz<2gWF>zQA>4-#Eg)`@=#REI`8*GsJr4N`Z9 z0gRPWRgW!ORX_PVtVwB@+`Uyr3&Ys_arlbXb?D(yIT!7;yL=vp4SiqW_F(I`W3XZU zK!-WdOZf=t{c}zg4pa>-vE_+oYk6G@juzPdtL9-Jt<;;;f2vkvIuMI@CfXuck!D2I z#I<9aQ>QLWTy-i^LmCqogT|bSeAub2HMzx+@NzY2u_n*hjB1K=S~`|ymWA44^=hzG zmZK_<&+hQ6s$7^I9=(y~I`D+eP}Zj(T?|Vl4&rLC7ygkDL6t$MBDzJUV{4mPTJIN9R>g6D!m z12$;~9vGgC%kRN%R1++X)Hg*M_4%x{P@H&j)iEccnpd8WRmWNq>S$2QiyqhNaO}bW z#@xj(;0TDPqz>Nr1uUjGlB;D92Xov<&5SB*V#ea+7Eu{#z)qyj_uPD3E~mqDvMiL42zqYmry0Wy2yJcy1gEcp_VrN%Fq;=;OwbrQh z*>1NvF*VUN7xo=&LU_#R2TVk@q0PohT8VQ}LzA^qzu=%kJpQg?J%mgI_gz>TZmElP z#MNFL$W~(%#|9rxDj1`gPHez-wVl~fFLqB1m9kt69<@Hq!z&W@53Gkd^9r%0Rb_Xm z1r76Q=D$^<{`^wxpdp;hCdbtHcX-`6Nm^Zbb|1#0nxtx}Rja>R8ubobTpGfl!Tzn5 zM|1*K|H-$ZreGi*X>1ZZIu4h$Msl#tN)rS+6&!(nRp38@MV6DmMF-uj0$DYxn z4E2;%;^i;4JTn@~%*E!ZMmlagU(I3cM;J=pEIbq()G%rMSzd?Jf#;l$tx(^0zA25n zt}MsnNxrz1#uD|h6{_~^M5r;;8Om(M;kgy}%@d>tZ*6KG;RJ~jpB|`ob%ewkg55oc z39HV|=-B@pA&bx#Ve|xch-43{*93jqaHuyyEGKBE&zI$JJM|iZ-AbLhYKqnRhWDjt zBMvHw!2E#d#Y7zrlBvgycO;xFT!onqFBV%bx)cu_%eW0kQY>$|=EjR~fC)zvW~<7X zQ-ULEW$KaR+2~r)0|ix~wl=&;@YeBiiM3pJEUGk1NWL9M9Q1Hm(B^UDzSWKx491$9 zLuz9-<5VA^OVvxCdO7>gtVfL)qUup$vA4T$48d&jy0Wm9Vbdtfv!f5t1;Kv{CjwTLM>BYXP=C>a&Pkx+zaqMz>ajH=(NFsr0qNgSHY5m?& zRJCJ6%vurZXu-`+IUeI%9hlUffVU*9l5UqddtvRw_My(Pg}Bw^TT({v%W$hz71pjf z$wO>aC|0YhGu;?z54GaC>k))QvCV1DDQ+|lJEeL`Y6Q7G>dg)(XZ1>hExab9_xGq6 zKWAP;3aY}P_V#~QG`y1geJ*w2!9d5M67SFItc}rx>5W=(*7o6r##%FdrW!YRQ=UFb zHHR;U$wM#ZEM9q3E$S5pZw>kxm{&vfqEPD2w$@B3#g2%r)f{&VI`AG+hZ|y7dF(zM zRb2U*4h#mpa^rp2;qW=t>m+tc7hfk+9hK3051Nu+6mDn;HDLgoWzdHAAa(pkH8@T5Ru#e* z25Sz!e&7J4R`DwA5a#JEz^AkDDr-a7S`doY@7Nl$R$ZpL5nNu0T z*PodBP2Di{RU>&**xB%kgAZOf)Y$MouWt%J<@BLf%L;L<^zx?i0-+}TTmqOArO7F) zMf^s2Wuz_~iR%R+9##+N-(OH0h*}VETK956Q2Q#DS3S=E9}9x%sX}!m!BIpV-A!dq zj7|MLO1}V|n2GmUw@V#;$_hQGi>=Mi35Vi4)}!llaBlo}h55HiG`Rq;LKUF| znrdFcaU?^}qN(^$;>F7=YRAH0^JHQCny-v_02oSn)!~(@&jZy9un?|Mqa|B?OptHe zc+c~yb8b+*ZR_0*%p)V{}bM|1Z&^0PaA=0=P=I$?RRiu( zZ$|h~;&!3;YNj1-O-#e2*5!i>W6iOq)bfj$M4uBAR&U_yYb@5ge6##w?R7Y8ICbLl zT}`kUA72~Q7dfoJF0R0ZK@7jhuGLG$aX_6XOl=;w8{ccpVvggW8g^LBeQGA5RsT5- z6qGk(ivGJN;4>HAbktqp!?yaKgbe|o{dDWff;b6e;?o=+SjNC~GzU%QhEN^af6Cg( zlGFm~#W5L;!3f2NQ#>D>Bh@D=)h@MwX7oK0<*`d9mreEcu`PsMd1Rm%9|zHyeth=P zuYcG^u^g*+w?aD>3LiQd`(B<6KY?vDuPPktQ0Eb~JDD$TTEQ5~55&W*VevwTThkvO zaB9G4#9%DIP{JFv+M*mj{Gz(C{irL5vyFaexy7+MtXIizcKYiazDc2P{P?1bRTqaZ zySk^K*JIDb7a{a-ez0@b>PB_wzYEq(8vTcpV_%vOHE*{OO=VpXY;E?3yCAs$Mm- z1L4S0J^BMV`a?F3*d8y&oBG7>*PmSqaW=-Q0*<$8lThmhZX#oF7CIt;H@H$=YZX3e zH#UVLYKm9v?27K((iUn@9z-1;8~&!Dh7ERopBHc6_&AR5Ha0GaMhR3WA|{ zr}>LSurhfFt%@v1ha^(31$fKSA6Wf(1;h@ls>1gZKmLoVJoxyB(_&3gHBQtm*0NAT z2bx)puQ2+>Ah)7J?cjXDaN|3*S}pK`f%j`1jZl>n@7iv4-pt4I)4O;^&o5BLzwZ_W zfu>m7&Mv$_|NENa!`Eiqn-eE%mwtmrgRn2D>50Pu8i{)=!^hdCPPL(i8gNWkHR3pD z-d3t&|I=M=d_mQ_8s2_JCm7cc&bi(wZVA;vf2Rjd(VH5f%)a3mDn$nyDYv7;Ez7)YYV=z;uMZg`&gm< zc*}P<^@#J}%a9rpLj%}d%d@f6XY`wnfe^%Yy&QiA!e2>}JDkm}Rz~ch*f+iSJAkSQ z>po5+c<03)ss@)i7-aN24R@zD(2RZ;smk*3af5%0;=8A7CI0z;`UEBTYFBAoCoRD2=D_7swZFp2zfpYV(ykUK=s*knv9i}P= zryusXWh)=_6dB_Wf(JN~F=zVa@3*VH6R$@(P$f}RE9;m6q zF)Q9#i*Jdoq4wI=(9&4ER((^!VzoZ)f1~&#%fHce#dSCqHq?gkX{f13TH}zCS)~m) z+|`8=;oA1jw(#QgNLwVjBxa4Y)wbaCQ9G)vt&cRsYZIZ?ww7>KYimPYG3H1;HVzfs zhd0yzuR=wrG1%Zv*z}9_CEXk{#R9IW{t^Yax<9{6*ZYm?xcswlyDuiC$$%ufb=GP$b@7f zGBKHiOiCsrlandPlw>M0HJOG?OQs{!lNrd2WF|5*nT5;gtU@2(oXu3{$v0dNCuI?WC)3SsP1DivN&0S{EaM0hLWYo z(qtL3ELn~$PgWo+l9kBHWEHY1S&ght)*x$=waD6J9kMQ2kE~BNARCflWFs=1Y)m#G zo084Q=46D)l;AHs?=A2Od#stkszLJQ){eEoN0r)E!F_6D0=NQ)jeoW zuLIc;xoFlgBG(CiXQ5-6=>k7a_+9CDgWp~FJ?Qs@--~r`(eFdAFZ%t2-ye1S{Thhg z6s$qa4i>rJ;SZrdlpIF>K@KNJXsYucNpBQ6n*5U-gYDv)8(EbfOO7MQqd!6PC(@q; zf3m|*G@brbavB-$&p`ffN8a=g^h}{=F+ZD}L(V1VAwM5$CG`T-3sE1TUPLY?m!Q8? z_f5+*)%h)lzrvx7dDAM+tkzWC8u)94UZ-bF>&Xq|MoqQt&8Tr5O0)J!~-Ix(39b4jrRu##y{ zZ9h5tDae%QrxN|t&}rC9i#iYMbg0v_&LHv`;b#&$Gc#G>XBB=n`q@ceG6$KH%!Rq! zx=w7oYq(D8xbu?v(8n|K!!H0`ki9}=VbM3EZxOl(GgkOE;oG78g!b2UVm?c5ft(8> zgOLvr`S{~32EVxQOECW%{F1^Cg)Sv@Y3MS{mPK8Sbve}KO{OHcPHq)6)%B>zekHOp z`c;^#>d2|N>de&8R9;QwYtgUm@YS~Kq8GznJ+eO8faE*j7DnBO3@00tO|Y$|tea`7 zeKdz3LB9prQuJG)A1QQeX4=4S%Q{N*+tF)Jc0jJ9=ttw4)M2j^dLytpQ^$&27kY8% zcV*p;>`wL|dy>7#-kR$C`e44V(EXU{4}XC02SN{GZ!q~gIYi{}e!C5$|A+90)JJ?!lj{yzBog+9Q{LHLJ+ ze;E1*dq*99(v0wr;deEjRk!1yKQkwwPeNw~Phow>I?di0)Q?!7MSYI-dGdm$I-iU5 zE+KcB^%e3ed5yfTspf9byNS75!oN-b4*a{qzXyF^XybZ6bm*kjp&xVhiNkk$3jZ1X z=fZzM?KB-wFMmnGf(k3jY)IXZF63Uq$~v^uKAHw2!8{iF9-1++iBm z$K8YUMC~Q|381}&_F*m|{6xY}44p*iq|nKjO)mTt@Ka)?XD=1%)Fx9>zBle^=%K?#;AQ^;QFjft$5bde+GVWb5W{L~H1pMEG zF3C(N{8Fq-lVwD%EWL7MdE_gwu1HqWRQsu#do9oI45?}^L=tX`tun=|q6 zdSCSW(d#dA1LzGT2a$u3|DE*^O?5wp!XHNe58)4|Hv;{U!XE`aTIfHa$1wYs@W;X* zC-it-CtC=ABGy69OhP>wYbDkcdQ-`13h!_-H}qvXHjF-^6dAap@y3Xz4;H;cZ7ei8Uq);7_%>pD5#0}p@X z0bPsut3%c$>mgrX(6?C$PJ`72>rpVe`U!$7kax{?;-b!{yy~g3w?l@gYXZrJ}ml2=p9A>U*R8vJ}&eL=#xU9;@oNS40)C` zuIqVSr;I=Li(0GmxP<;?X0HhUD!psyUuS)TyeV?G(7#Rpj_~iozeoQ*`9Sm^qW?(f z$IwrhdrCeN{paYvaOjjBpkLvA_zipw^4@s7;p|)Z?;LrL_w+x&|H%52=zoU)h5lFJ z{|En@(BF0KX(HW7chZAIz_`AiURViqt+Y3NA2K1HEAd`=CPqF9Rvz|~qE05}lhaQD zKPBr_WNI=EnU+kasrH{9a~Xur$V?{qnT4N)epWIY`q{B^Qu~rQG*jX^^vp#sx5HEW z%u7F?rt_*^bBM#NK<*i@I&b1 zKB;3WCi=zEFX7OtUlLvetWf4lX{ugn_+^AH3tdj=@|>vvzoPIf(XUKaA*+(r$m*D@ z!MY|{OH=K?Hu`nw*A;#}`1Q3;g=?d-4aqRFk*4Z}(`$^mCc=(fy7k?qiL&l>NxI+l)Pw5H*u!uCBoVco*&?C?Efp}Sy>;7lCZ6<#-y z>rTH1*%SRL3B72ne zDAc2|W>f!(dW^2s_WzmS1@KBh-&^=T@DmE1h?&G>67-Xbelq&W;iq7ol1xRWCevuD`-5lD zD;-t?tn^rTzSMDLATyGg$jr!O(Y4BDh0exacH#TN&mnY9W^%#L%{mX6SLE{1%a2?E z)&bYMaR0>G`4W&pLn%B!kFcGDK7D1NTa?7+IVwf$jY! zawVZd*()Xd(k4?HoV&3NwVkqLIpoW;t|0mq;a3v6GISN@s*=^n>LOPIxtjEA3BNYI zI_TFGem&^=TBl*&t0BB-tT2&l1Rc(8G$xyfepB@EuKzf{=Fkz$wIExHek=4Ng>DVq zM(DPjiGtrw`0b%P2;C9ab&$|8oa=;KXV$S~7c!3QN_Hc=lRe0un(BV_qSqVS?ZdjS z==X!)U+4kM41_O_Z!nH`oyZMgZYXlYg#QQhaG^&qHJirg4_f1y8?^*GTV z4}XHt6PcL=f3onW(4R_9Lw~yH&(L+6+wlLv|IQQdu-8nivskmRwqVWX>>P5gn4gFK ze4!UWFBEzaXBLx7&|k`WnaC}Nze4Dh(5skRE&Mg`*9y&h;I$t92Ie=Co5;-a#vYj(^SWQo!$-fZ?e86`o?o| zN9eoE+#~O!|3LH~LO){fG5G}E3)W9jKQo!q#=n=(wQp#zm(08(Upsp0Sl?=`>UYe% zCqH2RBkND(XYz}tn)^!cKjgo${_f}}FzGsh8|hAZ;F^1~_R=)AmjDBb-lPwi5WPeq zmsrS(U7Ye0A0}G}ZQM(yK+*My?L)x*}H(etr54$cAK?$Th-w#P7Q? zX1ZZDVXmpjHKW&@j6kl1=(nWb3VtN()?^!zYm0sq{dU4{Pp<>n5xHpAF=QuAbsajR zA1ibh=s2Of;=BfMt~=R->?v}+FxOk?KFsum-%t4cp}9S^|AEkhm>n!~ztbB+4n=O5 z=>I`~IQ$W;M~eO^_@jmXlbJD^>DHh>)@1Vj4;+W}4Qo8sVdf^Fo`|)QdJ^i%tfy$I z=oe^}4dRCBZ7&lY+P^jx9mab`aJ1;Sqly@21UIwzJ+L`a9w85_&iE9_IE6 ze;>X5*P(B)cT@Pc=-o#Dj_~j5I(;qp_l1x9r_Spk`G|as+!NMMHC65z zz31c$@+J8SbFW#y(NuG9>AfT0Blm&zN0IwP?=$*eSbrt)ZW-G$R(iaz3BKcftE08I z3AGz*chtyudpPpmp3q)cxtU3T8bijmynRG2A#@_46GJCqE-9IeOfGUM=%qwH6;@{I z)TqB$V5>R2)&m&u{k9G^+=tlVZcGCS!@<{)!ws@YtQZF%QmKQEb& z%&)2TUw~dgvJhFAG~@Uztcz%>IV*gd&~|40;QO-J7%kHuUU;m1LD6}lUAccFVg_vCCZ zvNzdB3U$ z4C}whvE(>SwZHMmO`t!KoJ39*xheFfB0r7wbaDpyx2D?eKlEmjv&h*vpWPxim;O9# zcRuR{qQ8*dB62ag1o@?`myydg)&5tYzf$N`(5r=B!0cwSi~bGtZ_>Xd{M+#F2z{5C zd+_f&{EQ9hKZO5?^<(l0`ILM{KG#&|`-0v}%)es&n#8>|GAj3$d`G?~KVa@7>rdoo z@{6XL|BCC5_fpm0$nPeT58eYG6INy{H%H#b9oj=^PiQZ#eG&-Yn;tgelaO^H(N9b- z3HnJ{CnJ-KTnhA4(oZG))b!GjX~}eCdNKoECPL@FKH<2qzKa?zmereG!L%%Hia>6f9zXDki{YtDWlT|d; z@mHl+4Y}&N&J=(BYl>Vg=4z95$hw%TCvx?n8?e`q3={oE=!es9Ec_<$n+n~Gnda~# zSho=UmhfBAj}(4ulPMF9!8kW{er?GpvK{8yv+jU;AXZ1}XiYU2L$4G1orNC@-G#k4 zva9HKqt~76fqYNV?*-kPy*^}LvLD%BQys?uoX<$12Vs7&(7!_u;oMN+52N=7Ih-7U zyxiU>`lI3hi8YCO4C=qI?>*FGp~q?MGhX-;*q^9rnf3f{ztdEl?$dj7tdYz(oS~GJI%$=oo4)@C&`aJao)UU8EYOUrk zL0@M7itw+(zb5o`W^Ta0$@-S)-==qmyi49A?~@NO{}AgL^&|2z`2@YECR66tS{w9v z&fW|1rK4w@gF27b$i30E(r=;PvG<<*K+6053AxYozi6uYukim9`WrLfaZOFSPUuFu zlOCGJd_qqwFZu~IGxPmQ=p%C2Si(eDzRV^@okaAL(oY6IIqMXnpORiG^ivBz4Rl)e z(g{C3y$tAQWSvR$GtqTji{z4ShRnKauk{nQ#g^8;Hy_tRSp`^ny``VAZ6KKc8aM z#nCIFYqh`M=$9lz(Jv+XrRkR;%c5V7b$OAi0KX#rO2V&9uL}BAgj_;Sx`EIQIU7bcLO)#e8$&k{x+!zb$mZxrV6~xcfx0D*EgEd4 zsg5y{eZ0qpR{gf3j-uBNbL}0zn(e5ysYZCU?lN-=G!+ImR3EpPb zTga{CHgY?;gWO5((p2|tH@!XB-d@)GM1Md01N09H{}BAc^pB87$$!aXnrc7Ckv}2y zN$69|oz~1+hWf0?okQ+CecWTUt&8L(@-mo`^%dl=vc87;CF|>`Z|GWW<0k!E@Ncue zL*6Ctk@q##+yi!SzM=OP{dcV2lOM>B zNMkM`>qKN?lFx9Wq}0hw zrmVOJiIS5kNMjqRm`P2h0rO#{rA|ktCo_=7Y)00Zz|6wWf;y|v*;r=>eOc!qbCSmP zaTWim+z|ZLIC2AL&m9kb$JJ-yqh(V2G}> z_0+^|75{orp>0f;xTxqhp%tPdXzKI(6S!{upK)c267i1Q-Z{bj1 zzfj+zp}u88eJlD_Wjx%^Vh^zDWe>rCR(6XmIK+7*6ceGoiR$@V{QQkw_y?FBrE^qm z9oaAh0&zAtE5BeP9WQ>`bbP*nzr|mlx6O+9kJ`o7jZ+N^z)@N)Hh+uV-)eSu5=*;)S2p&*OhxG~lsJ@iAm7%~nm+!kQm09>aK+*EVC3bI;)Y|bOR ztl@U!(&MuGVIbJ-q?`xhFIJcxClg|~gy0HbKnU0Qa6cO^Rfr|X*on>Tr2fZ5puZ&m zhi1pag!v!J^+Xes3I*dr;dWR9^vnwMpv7+UGmF}FIt+;*JlD8Rfx$-n zhj5Wsw*t2zAQ&6Rc?V$!4i)m#bHZDJlfa$EnIew6?r(OME_S0H^&pFJzl~?sZuS?# zbv_&sTL7+Tu<^hf0}k~+CW37if8#d#s|l)rpB||nFL#)+1DtzMkR>R4C$A!BdWHUP{7bDn3xN-ABY?csQRhz%rNx7bAVp8`S9}ULM4hk_lOV>l; zL3pfi3G6rq9CX8oZ?+4E# z9$$3{j7`K#yVZH>$764Nmg0PDHXK`Ukl9%}&;0lhQlG!L40cO!fST~vBG7d{9FMH} zh{L_K`8ah59cjp zuu(7OHjCPM821hl{X*btHqaPwDAyCl0pU{$U$Y@NTSx6W-NY1PyaV{~z~{U1 zVc;xX4>br0wBXsm7Zg6(MD03_Ut@TE?8eVA{8+$~;VfMY=_k;Jk2E_TKY#UMjVXup z_jfU6JW%eqNXC1lz71`5KTJ7k*JI)8w>Vz#kPu67FlO-R#;adV;A1Bk-<-G~_ziE9 z%Jl@Uf(@I+vg42f12H3|>!EO)9Y5KP-#*wCe#T1WdIGz_FGS;~qVZ!qz=jzqe>r5l zP{vP7d>!KVK7Pdu`O6_ZWi~sKcyt1Bsh#96hTLt&k4!7RJB-^JfL%H3UyPZI)3M_> zr*ZM{S~^Kre`Kh4%@&A*!;QrEFm@C#@EAGoCH{NST}AK$*o`koKU{BP#G#$1;y(cJ z%;D42_?2UPOBmnvKWP_Z`snaMi8I3S*|94jTnwlWOPle-1N$-l$gmnCQo0@rSARDd zFA#oKS3S&4X8c0M&q+Ml#$JAiUwp*ku<)|sGYZ!? z*cfoApFYFmuW|#M5y#do#(jye{eSiI%iR`&hZ&ES`Y~iYX3qK-r(w5Z`}iEhhj^e_ z3fK82?&_ASZ!J7A_-Xi){>7NdZpD$|r^;fxZV#cjew+GkJI7BDyVrS~(*ZB5UpY+C08ovPZPuj(p z{;XGjwBd2aDcbEejNugHrRzW5!c+#|2|)oC{8elWP`KEMdVP#n2fwn^U#!M|?jq$+ zzvbeeI$Q_ieKWpKj6X{7C&o|u7yH5(&R>1N8{e$Be9qd%n7R&n4j;aD{GE)aP|s0g z8_sk5+zMA4$5%N15W*i?PRe=U$7z}D#jUKkDxj<67Xi@K4&s zm^!I+_CTwxXo%e!QM9PpQTw%S-MT22%YQg;%Q#WBsm%5WYe>;RYfF1kf2-AMZ10B= z^ovcYef*E@MPPgW)}k$Jt%LRU97g2->+z+tN7$@IZ4oVl)TZL=|DRJeHsx0|u!Yr6 zZ;JK*%chnddr>W-N42QvR+g4A9Uq0ah>DJg?b)n#Ol-%9xMm$A+Qq~+>lPW?B`PL* zw)@ZBsAxxbR+ScAqdK%|7TGDLWm`}8HYpmEtkxg72P_< zw^M9Xw}_TKo3)CJi)7Ai-_w2ExjGNx;E+@3^5tHegOjEQa4tV>j{$l1RC2R1F; AjsO4v literal 26352 zcmb811$0zbw1y`N!QCaeX2?t&DZ#C{6DWibAV>lsXesXQ?(Xg_6fG2YZK1fkw52V) z|D5wDv!`$9darApe*6FT-shfs@40tovbt)qSlld@6af~C#hvu9SbTnm_N4YAy~$)` za?*!PL8c^Ak*UcvWLh#EnV!r*W+XF_naM09`q8(o~Y|3^1bP+b@yf5PvpOa-=F?>oDG0crcJe~14#N!c9qMkrbgs(gd%Vd$C z!u(WnnxU?nA>x_nncN7;}wpnJ4n|;V%$+p{ZB*MBkUi$S+~Mlw4-0^ULY2 zK;25=uYz7J^cv{3La&2f&)yBf-$-v0xtZL8`mKz&k=w}~hI&0aQMZfwAoF|3?~%KJ z@dNcsOQC8mP7Op3;ziGpXeVI{xNz#lgCkag7HaFcMAS#q0c~{W!*X9 zpGW+b{zdWh)j8yf=jYg*Cszzlre_;}*T&MBQ!pcj(_G?}_|<=x6MCj=C4Ze+m7Hx!2?ylIO3_=N-NGsQD6*lzfjs#U|y9m7LtPiQZpy`hr{ot%9>@KXpsC3Gt0QVTx~ zy|lPaJ%yhh^%Y!8oU=%S8|O#4Qiwydv*QFCX&x85a=w zg7gX@Ul^k}brG^CSr0XTq>Z$biT5KA^931C_Xk6VuqKoY zBTJKI40U~3)RzS?{bLfVB==K<+CPshO) zoebgY$)=L=rtyrpuVZdH>2MiehcA8&~FL9mGHlTZp~a9GE(H* z(u+b}I}@kE-d5+V&$~U@0r`%?kAdzabZ6);tnEt1l5wK08|u0X9nYE`WCHT?Iq5~e zH`#~mi@JV{zcoz7ec<*Taxvc_~KMnqL#xorGRJf*YvoJO@H=CRTZ!Y6`j(WHG@E6cu zDEvk67Yn_FHA~55$m5w#y8kPnR|>s~b*tg8u~<@JZnw4MIzxRv>shyf+=#kOj=b*M z0`COIR@Q7Iw;Sqtb|Ak~=v}PYP3|GTCx1ZwUdH^*YYdKL;QENqrZfFA9AL`m)ehps%v` zn((i~zajK5(7$5bW$jJGw=9;_p5SlfZ9~1DJEl(O?}_+6YagKRq3|Dx-Y59|5RLp( z9_tx&AmiuoUqE|Nzl46p__d)v{u}sj>Aw^Hd-xxO{vG-cq5ovhNBDmU|8MAjg#HBm z*`eJnSd*L3Fx)+icE>eRYf2p~^4`oPBa@Rp=u3f-3nL}HRAg#0jiH*;JuOB$`sv9G zWJWR*nVHN&W+k&3>iM!`z8pg5G;!+otj$g4A@ic%*TlL%A9Q}^3XlcKLZYrP>WW}k znJbF87_QL_>f&SxvLxwesOsEHiP#_7#u__`>!YuG;+#RKsf`hAe7)Wf=un}cI zxq70$KD`FWHxzy&`i6l&`a63j9gBxAXk#B(6^fL8bh`IG+4X)I{NF$4UW9~MtYl2w^{gG=x-&rk=s$X zgYiy7eVul}-!1eW)_f2D2gZ9vejnDx=h1yXYRA7h+2 z-*M^_AV{JXBLY`9q>8*7vxL$zVKgx z$?3g@e#7{!p+2v7@ZSslfi=Iw|3mnHLVpzcFV_7{{zKyWx_>sYhlO-ARLAph$3Q~I zo}?G)O(ru;+s;te`Y@LQ{V5rz68Y5h(vWFUmyU6IGJ~O>FC+4qgw70|Md+;1+1Qs| z_&Mn1L_U}BbJNd5<~2+^lR6*j^D{1hIE-;YvJhFAEJ7AVZ865h4fXm$y7vI1F=tc2rL zW?aQkucs<}>}?fmU7f5!)i9H&UjgW6F z@=fSBC7Y4W$rfY;`dTt>MSf$b*WDWVHuNLOwqz73_jfev+Z$i^cYy9Fbd1H44)mlJjV0M1)^>ty+x>7?C{fJJswLjb}_#U z@p8s19CaQm;ja>UHEY(uU(0wMxt`o$sLy93>Ne5eOl}eRt;lbqzg_q{;O{g#-G1Tk zX6+vGd-4ZzFS(EWk>ng62dEE{hseV?-Vw$>8S3>PrFRVZpM`(i#OYo!f08^!o+i(b zXUTJz=RD&JhWa=c;a?K^GV~SJT^0T{_}A&*5dJUle--*B^exu?M&2gxIO@_DLft)l zj%Nk$V|-%G1M(sH2)W0OK948RPlbL4{aol5&@b7Wc#U3*{2S)pqV65z_ags+-tXie zsQXjoKSKY-+~30g$HeJdF!$Ne(}Ht!W9&|P7^>^LCH0YsB0tgki~9bIzas}2>h%qzHwblug+GM;Q24_b z4;T3n@J9+g3VJl_#t45bJv^(P;~7tI!u2S8olYr&tN>0oF(dJ z)0>03xs2zL^F`eP^gZbNLO)>NL->z`{}>wAQaxMx`Fct| zga2IkFX+D{Uy-j3GuEMgOTI(@dlTzhjM#f# zSyK!C*TSz2&Ashamo@ds`l7A@ zy@sf7B>cuE&V+mE)f5AJ+N&7`o?ox#7$Y%Sur~s6JmZ#REAkuUS~G4#MjGn#X^VW6 z(Ct_g4Zl6(4rE6%hU{dh=jn|4E<$%@O)MElc0*lvM_ndd8?PSBC6GPIUXD7i-l*$C zzc1NO@jk(d}7;-Ff<1iLc zk4HR#@kB#?ohHGbEc6uUsX|X<&vbGI@-sz#7X8`q=Lml;^gQO~3x5H+Mco$Uw>osDr_kG3$Mt&c zq~3+xNyfX$J>>V~59D5QA9{W?vA+KM=^uc9knth%FnPpK*Z+jNqe35p{#oeb>^WhW z*%$d!B7d6sGvrzF9O}-Cx(m=3nY$$X%kZxVeU&xW;9qBaL*##f|Eth9p>G+Txq1NaXaKN9)J@Sg~c`=+ngGx9n40(CDLzcSQyuj#!({w>A_>UZRO z!^|U*`<>n&!uu2cNBVyW|8I*$_2^@K!hh#njqzv17L05dZpQO=Cp`>RkGCg=7kxA+ zPy5N>Cl}fWI)%_Fp;NIoweZu>ON)Fu;ispc0e(itnaIqdE(^V^I4|sFy_W3g@n@Wa z%t_`#E;r*mhPo#&J?u~Ke8SI9zW`Yf`9dOJn0^uXMTK9CesQt{SrT=AjICrT!_2tP zdM!56P9|RCK%;d{5atPHK1BGT@Wber7JeD{W$Bj_{#WqJ(+?*rkQK>FhWfab=~W@C z8fL+HdRG^9HK1!^L}Jtu{@3iQP1ZrauE^J;Umrf7NAHFr-w1wV)-)lTlFdY2bJVq< zA3?Sh`BwD4AzP!qjft}$R_ChEw=Efkd^^U`BHtc<2cbJc$FQyw*_rG@b~V)V$D%%t zemCKFrx#E5AQQ-*WH0phHnEy7%S7nD7E5wmXYYO%OS134Z!zv;^v5`kp}d*wA3zR- zH^?!M_h9-%$f3v&V?3N3VW`hzB=V!^k0!^6{8;422|b=Q6X0XbY7RZmBStqYVeK4pF1&f7Za(w^<`$BRM1C>7C8%2}{AJL~gxy{qIk)L&awCQo6y;zbFeNanM>qzBcDg;ysYsh z^O5<<0;n&@xR9Yf-@@>V&@U?dV)TlWd=|3dnkMrTbynz7tnnvpB5$Xcczpv=A4D%$ z)P>LsCBw+ls4v5~tf5|aIeK3qU!HL|;;M`*Ag*Y!WK+khWT^WpGhf9}<#kOp)KzD$ z23eD=<*3zj)TUp@Pc=`Wt4OkNg0PyYvSl9)vN6dN4Tz-cZKF4E6ek!yiF^ zr0_@48;$%J#$(BG|PZxRy^h}{=v2V6vw)4o(#W+rX z9;wcM0me4!g@_lKSg&U>{Uz|1GG0b5Cs!Ei`jzxnk*moy=wB=9)P6|+-a!Su?zX#Lhph8o^?M6e=og#$p0w({m=)5KFGR5@DDRSLjFV^ zC65{Eb^L6xWDgPg1o}_1?i6{NJR|DPqV61h+&ewj1(ClkHgr6QdgU}hFGqE-^nT5WVWiVW`(p z622dXKZey}$)T=aDMOW4T6;E!mmI$C4J3oeU@H0YWE(OPeQg;>k?joiywUX9qpkzvj)*%k zjxo#;hx>@3d+-iX)NxnT#tNNS(~bG=$j6I(5BdpYPvm=vd~f=F;P++RPvpO)*Pr~3 z96$~v2cds3<00fwL%r@{^oHZS7Be1+|38S@f1@zI#~6*V1Y->IV@3Zs_~V71z?zBh zCkcNt{VDLL3V#~(bmnG|Gev$Dz1id()Xx?9dC>EPUI4vN=tb&XqM+sJs6p}x+W>1{!NE8}eO3HCRX$1 zOb-16b9)iL!Ptj!gWiwie)0fv2N@qC50gg>^*Vn--BF>BvF2y;IPxbLpA>bc;Gf30 z#M~M3EWC4AZ#eaN=nFz$G``BKk^C~o6^EA`_bmA}_Fi}Rdab|E|J6`?H|gCXe?$Lm z#&^iOa;>ey=d zU?Rnw*zZ0m>7^o5lWEAbhPp2uz4T-T^k-z8Nz`SApGD}b(Ak8}&Ym1(PULfmd~W)A z$h=r9@3&7rQJ0_j0_ZO&{6h2#!!N?PC|QgwZm7?z1nNo(?FVfYx)gi-;oF37hfchv zfvgF#SaRWf^}NA`O8bOj;Cb{3!&r$?8lw+J8H@(3FH4pqze29OiS>MVrhFdJNnUNd$I#^F&G_jUxpe_F)`Nb>de|M zsOxHC?Z?uOgWrvDcQT&rVVG+#Y7#K7Vo$Oc;@*zDp0h8!N$l;1_*)a}Tz}~AgdV`U zf$#?je=z+a@Oe%?!{`ksN01{8_57pgjYi)X;g6+1jvP-;K;1+~U9K1OCzDgispK@& zPiH)XoN1_!KMVQULeF8%Tyh@r^F@9E{e|Qr&Xq|Mnk>+O{m*Ue+#*l+(vFEcaS^LvybsE#Jeq)+}Qg*dkpow-$VaE zANxn;m9Oi56!Cu49pGFCg?|YCVWE#W=FsyWMgAE3en$Ko760Z zlIO_t45c57_{8;3l(0hvfGse$J?sYxSOXOeCe{HD!H}K!me@DI-`41+}t?ut1%>M~y zVf>N&3;Dkp|3iKvKO3g7kZzhLy#>hjRSNOh&^9h}wF`n-f z1sN9-`NE8gkVU~_jEj>c$daTVX(iQqN-_2)ZJ?bo@_EYP<~aTHuv*8s9tknA9m1={ zMkn-+i|^dnW(^9k28CGz0{k3dm2UBIefox1iI46Q**CmekLdVbF;UUsl@j9O!z=ab z7F#(XzGw79UpL?Cz9}MXR+}xvY6}js+JZtciB=*0R=dC5w`jO;sc_%maNqLbzLmq- z(LBIv4@Lu;0_=WLIM@4DM?<)8iiUw!yEP=#8fy1TidR%0s=NMtf zbqK`iDcJp-l=C5yIs$^N0l4O&I7XnqpOgA>hdn?a!)DhV4)ukOKs#0xVhzK^w1xON zDHk2;O0^8cnsDxRtj!+gC)JlOmafKj3BdIY^%E-eUmHlDy54hvA$ncv+=B!B911td zd0o6A%>%=%xOZXxsK+6Ma4t_ef@-h_hFSyE9aIfT!nq-FPq$PhK{)kb8?Kgapn~(i zTht!G)(Z`^+Uz!*ioc(ea&Abv<3V`1)T}rQe{@Lor4F@K1FfOBA2^L5sa$uovfu>l zcC0H{U8_JpC;5fV%O*9u+P-0?DM|j!Q3_+LI~IO5hw}4z0nV$Zb(vJxGfQ(>Q>^Z!n*K3UjXc>b{cZGkpFDV)m_H*s^6s2dm-XblKf^-01-y;@Q$3yN@m z@p$1<`Qz;?q|NoB(4*>c7TszKfWa3eHR?!=cK;S zf!Cr{-;Pjg7`}=~<+`Jl#paKP#6L)VG~ii5kCZN&%$BsN4=X(6>TwSVLYG5^erZeE z{PCp#FGaP#@TfRT7e`X>2vrn>OAzR94bdHm!gYOw%^zFDA7_uVQ4K=5?!X1Wi_wN{ zk2`7$4fAuBuA3rs5k4C6e!%NdsxQ8V@sd-U(Hf?%R4{s^bbT1LHdNuUSD%w%I3FkJ zx(RDj4;CI_>|)g-l;i=RUqI zaV$G7HwuGsxz!$ZlFmJeugq5NHnlo+Cg`-|%ih2A7us+vw5e|}>bod*sgrcw)ZB*c zsecYw?P_%n;krITmDufJR&}HB_(wZYYa~E@55zTa)-Dc&_Z99f%IxYZy!u?gOU_BUZfdUXsCsJ$=?7K_*Y!9EzRKG| z@D&jINWJEqdE5n-fu2VNu2-o!yHhjlYw*#9j1UFME7ae?! z;3cYWh7vPTDD~S2uOIcJSWS{7U2lh0?&?$0riOYs z;5X&J^cRj5q06w>@Z%*|U3W}Cm2~y0RgLVC(4<}+{gs`rDx)f|lP3jB0- zK9;!bA?oL=O?_{`*Ax61LP+PH#I2-G3*YzgT?>~SPolaOPTEDAx`fTtpLp;}z}pBr z(8RdOfl24&_%oLJxnRfV7j~=K={9U~sazZ&u|r)byo>{I+0^G>qH@up?`lqauzx`5 zApfX<(oz2Y{-sO%IgIxBr}UTpEMj_(l@7rWT~-`SX0M|M%IwmoCI zv}+yREiS5~r+WwYV`*!*i%f`Y8`&eebwb~6(XEojbc=~?ALrXGKBiY>RNvO^q7$N{ z5-@S=9+6$Ub&0li?b@zwt#;8pqT*w^DgSoG=-B^TSIwAiNvrg((IqmrL(j+#(LLNe kF+mD)vq8I!A4^hu&qyPW_ From b2a2a5308d9eff1f8f2bacecdecde16ba811c224 Mon Sep 17 00:00:00 2001 From: Michael Peterson Date: Sat, 19 Oct 2024 11:54:32 -0400 Subject: [PATCH 230/449] Update execution info at end of planning before kicking off execution phase (#115127) The revised took time model bug fix #115017 introduced a new bug that allows a race condition between updating the execution info with "end of planning" timestamp and using that timestamp during execution. This one line fix reverses the order to ensure the planning phase execution update occurs before starting the ESQL query execution phase. --- .../java/org/elasticsearch/xpack/esql/session/EsqlSession.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java index 788b2827d7c8e..ccd167942340c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java @@ -162,8 +162,8 @@ public void executeOptimizedPlan( ) { LogicalPlan firstPhase = Phased.extractFirstPhase(optimizedPlan); if (firstPhase == null) { - runPhase.accept(logicalPlanToPhysicalPlan(optimizedPlan, request), listener); updateExecutionInfoAtEndOfPlanning(executionInfo); + runPhase.accept(logicalPlanToPhysicalPlan(optimizedPlan, request), listener); } else { executePhased(new ArrayList<>(), optimizedPlan, request, executionInfo, firstPhase, runPhase, listener); } From b4a58175b7f54a858ede46b81ab6f5d80c1be97c Mon Sep 17 00:00:00 2001 From: Ed Savage Date: Mon, 21 Oct 2024 09:10:14 +1300 Subject: [PATCH 231/449] [ML] Unmute MLModelDeploymentFullClusterRestartIT.testDeploymentSurvivesRestart (#115060) After several hundreds of iterations of ``` ./gradlew ":x-pack:qa:full-cluster-restart:v8.0.1#bwcTest" -Dtests.class="org.elasticsearch.xpack.restart.MLModelDeploymentFullClusterRestartIT" -Dtests.method="testDeploymentSurvivesRestart" -Dtests.seed=A7BE2CA36E251E1E -Dtests.bwc=true -Dtests.locale=af-ZA -Dtests.timezone=Antarctica/South_Pole -Druntime.java=22 ``` No failures were observed. Given the location of the failure mentioned in #112980 it was likely due to a timeout on a busy CI machine. Just in case I've bumped the timeout in the busy wait loop. Also removed the now unneeded `@UpdateForV9` annotation in passing. Closes #112980 --- muted-tests.yml | 3 --- .../restart/MLModelDeploymentFullClusterRestartIT.java | 7 +------ 2 files changed, 1 insertion(+), 9 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 4b69eacba7b1a..b7323bfc1de18 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -214,9 +214,6 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/113722 - class: org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDateNanosTests issue: https://github.com/elastic/elasticsearch/issues/113661 -- class: org.elasticsearch.xpack.restart.MLModelDeploymentFullClusterRestartIT - method: testDeploymentSurvivesRestart {cluster=UPGRADED} - issue: https://github.com/elastic/elasticsearch/issues/112980 - class: org.elasticsearch.ingest.geoip.DatabaseNodeServiceIT method: testNonGzippedDatabase issue: https://github.com/elastic/elasticsearch/issues/113821 diff --git a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MLModelDeploymentFullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MLModelDeploymentFullClusterRestartIT.java index 3e57faea848bf..dc9afb1bec237 100644 --- a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MLModelDeploymentFullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MLModelDeploymentFullClusterRestartIT.java @@ -18,8 +18,6 @@ import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.Strings; -import org.elasticsearch.core.UpdateForV9; -import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.elasticsearch.upgrades.FullClusterRestartUpgradeStatus; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.ml.inference.assignment.AllocationStatus; @@ -93,9 +91,6 @@ protected Settings restClientSettings() { } public void testDeploymentSurvivesRestart() throws Exception { - @UpdateForV9(owner = UpdateForV9.Owner.MACHINE_LEARNING) // condition will always be true from v8, can be removed - var originalClusterSupportsNlpModels = oldClusterHasFeature(RestTestLegacyFeatures.ML_NLP_SUPPORTED); - assumeTrue("NLP model deployments added in 8.0", originalClusterSupportsNlpModels); String modelId = "trained-model-full-cluster-restart"; @@ -139,7 +134,7 @@ private void waitForDeploymentStarted(String modelId) throws Exception { equalTo("fully_allocated") ); assertThat(stat.toString(), XContentMapValues.extractValue("deployment_stats.state", stat), equalTo("started")); - }, 90, TimeUnit.SECONDS); + }, 120, TimeUnit.SECONDS); } private void assertInfer(String modelId) throws IOException { From 22b4d814d19f460b82391975775e8ca0e487d86a Mon Sep 17 00:00:00 2001 From: Yang Wang Date: Mon, 21 Oct 2024 11:31:24 +1100 Subject: [PATCH 232/449] [Test] Use stream.next instead of setAutoRead in test (#115063) For a more realistic simulation. --- .../netty4/Netty4IncrementalRequestHandlingIT.java | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4IncrementalRequestHandlingIT.java b/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4IncrementalRequestHandlingIT.java index 26d31b941f356..b5c272f41a1d5 100644 --- a/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4IncrementalRequestHandlingIT.java +++ b/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4IncrementalRequestHandlingIT.java @@ -175,12 +175,16 @@ public void testClientConnectionCloseMidStream() throws Exception { var handler = ctx.awaitRestChannelAccepted(opaqueId); assertBusy(() -> assertNotNull(handler.stream.buf())); - // enable auto-read to receive channel close event - handler.stream.channel().config().setAutoRead(true); assertFalse(handler.streamClosed); - // terminate connection and wait resources are released + // terminate client connection ctx.clientChannel.close(); + // read the first half of the request + handler.stream.next(); + // attempt to read more data and it should notice channel being closed eventually + handler.stream.next(); + + // wait for resources to be released assertBusy(() -> { assertNull(handler.stream.buf()); assertTrue(handler.streamClosed); From 7d4f75ab802a9e270970763329b05e57d1044518 Mon Sep 17 00:00:00 2001 From: Luigi Dell'Aquila Date: Mon, 21 Oct 2024 09:36:14 +0200 Subject: [PATCH 233/449] ES|QL: add metrics for functions (#114620) --- docs/changelog/114620.yaml | 5 + docs/reference/rest-api/usage.asciidoc | 3 +- .../xpack/esql/EsqlTestUtils.java | 3 +- .../xpack/esql/action/EsqlCapabilities.java | 7 +- .../xpack/esql/analysis/Verifier.java | 4 + .../xpack/esql/execution/PlanExecutor.java | 2 +- .../xpack/esql/stats/Metrics.java | 44 ++++++++- .../LocalPhysicalPlanOptimizerTests.java | 2 +- .../esql/planner/QueryTranslatorTests.java | 2 +- .../esql/stats/VerifierMetricsTests.java | 95 ++++++++++++++++++- .../rest-api-spec/test/esql/60_usage.yml | 15 ++- 11 files changed, 171 insertions(+), 11 deletions(-) create mode 100644 docs/changelog/114620.yaml diff --git a/docs/changelog/114620.yaml b/docs/changelog/114620.yaml new file mode 100644 index 0000000000000..92498db92061f --- /dev/null +++ b/docs/changelog/114620.yaml @@ -0,0 +1,5 @@ +pr: 114620 +summary: "ES|QL: add metrics for functions" +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/reference/rest-api/usage.asciidoc b/docs/reference/rest-api/usage.asciidoc index 5fd2304ff9378..27cc1723265c9 100644 --- a/docs/reference/rest-api/usage.asciidoc +++ b/docs/reference/rest-api/usage.asciidoc @@ -38,9 +38,10 @@ include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] ------------------------------------------------------------ GET /_xpack/usage ------------------------------------------------------------ -// TEST[s/usage/usage?filter_path=-watcher.execution.actions.index*\,-watcher.execution.actions.logging*,-watcher.execution.actions.email*/] +// TEST[s/usage/usage?filter_path=-watcher.execution.actions.index*\,-watcher.execution.actions.logging*,-watcher.execution.actions.email*,-esql.functions*/] // This response filter removes watcher logging results if they are included // to avoid errors in the CI builds. +// Same for ES|QL functions, that is a long list and quickly evolving. [source,console-result] ------------------------------------------------------------ diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java index 693b6fa8bd670..f5bcb37c63e84 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java @@ -46,6 +46,7 @@ import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.core.util.DateUtils; import org.elasticsearch.xpack.esql.core.util.StringUtils; +import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; import org.elasticsearch.xpack.esql.expression.function.scalar.string.RLike; import org.elasticsearch.xpack.esql.expression.function.scalar.string.WildcardLike; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.Equals; @@ -260,7 +261,7 @@ public boolean isIndexed(String field) { public static final Configuration TEST_CFG = configuration(new QueryPragmas(Settings.EMPTY)); - public static final Verifier TEST_VERIFIER = new Verifier(new Metrics()); + public static final Verifier TEST_VERIFIER = new Verifier(new Metrics(new EsqlFunctionRegistry())); private EsqlTestUtils() {} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index b31fc005a0a5d..adfba4c487618 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -390,7 +390,12 @@ public enum Cap { /** * Fix for https://github.com/elastic/elasticsearch/issues/114714 */ - FIX_STATS_BY_FOLDABLE_EXPRESSION; + FIX_STATS_BY_FOLDABLE_EXPRESSION, + + /** + * Adding stats for functions (stack telemetry) + */ + FUNCTION_STATS; private final boolean snapshotOnly; private final FeatureFlag featureFlag; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java index ef39220d7ffcc..e2717cd9af0d1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java @@ -58,6 +58,7 @@ import java.util.ArrayList; import java.util.BitSet; import java.util.Collection; +import java.util.HashSet; import java.util.LinkedHashSet; import java.util.List; import java.util.Locale; @@ -480,6 +481,9 @@ private void gatherMetrics(LogicalPlan plan, BitSet b) { for (int i = b.nextSetBit(0); i >= 0; i = b.nextSetBit(i + 1)) { metrics.inc(FeatureMetric.values()[i]); } + Set> functions = new HashSet<>(); + plan.forEachExpressionDown(Function.class, p -> functions.add(p.getClass())); + functions.forEach(f -> metrics.incFunctionMetric(f)); } /** diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/execution/PlanExecutor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/execution/PlanExecutor.java index 7d8e0cd736445..ee8822889bedb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/execution/PlanExecutor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/execution/PlanExecutor.java @@ -48,7 +48,7 @@ public PlanExecutor(IndexResolver indexResolver, MeterRegistry meterRegistry) { this.preAnalyzer = new PreAnalyzer(); this.functionRegistry = new EsqlFunctionRegistry(); this.mapper = new Mapper(functionRegistry); - this.metrics = new Metrics(); + this.metrics = new Metrics(functionRegistry); this.verifier = new Verifier(metrics); this.planningMetricsManager = new PlanningMetricsManager(meterRegistry); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/Metrics.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/Metrics.java index 6c5d9faf18ac4..092fecb3142db 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/Metrics.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/Metrics.java @@ -10,8 +10,11 @@ import org.elasticsearch.common.metrics.CounterMetric; import org.elasticsearch.common.util.Maps; import org.elasticsearch.xpack.core.watcher.common.stats.Counters; +import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; +import org.elasticsearch.xpack.esql.expression.function.FunctionDefinition; import java.util.Collections; +import java.util.HashMap; import java.util.LinkedHashMap; import java.util.Locale; import java.util.Map; @@ -36,10 +39,17 @@ public String toString() { private final Map> opsByTypeMetrics; // map that holds one counter per esql query "feature" (eval, sort, limit, where....) private final Map featuresMetrics; + private final Map functionMetrics; protected static String QPREFIX = "queries."; protected static String FPREFIX = "features."; + protected static String FUNC_PREFIX = "functions."; - public Metrics() { + private final EsqlFunctionRegistry functionRegistry; + private final Map, String> classToFunctionName; + + public Metrics(EsqlFunctionRegistry functionRegistry) { + this.functionRegistry = functionRegistry.snapshotRegistry(); + this.classToFunctionName = initClassToFunctionType(); Map> qMap = new LinkedHashMap<>(); for (QueryMetric metric : QueryMetric.values()) { Map metricsMap = Maps.newLinkedHashMapWithExpectedSize(OperationType.values().length); @@ -56,6 +66,26 @@ public Metrics() { fMap.put(featureMetric, new CounterMetric()); } featuresMetrics = Collections.unmodifiableMap(fMap); + + functionMetrics = initFunctionMetrics(); + } + + private Map initFunctionMetrics() { + Map result = new LinkedHashMap<>(); + for (var entry : classToFunctionName.entrySet()) { + result.put(entry.getValue(), new CounterMetric()); + } + return Collections.unmodifiableMap(result); + } + + private Map, String> initClassToFunctionType() { + Map, String> tmp = new HashMap<>(); + for (FunctionDefinition func : functionRegistry.listFunctions()) { + if (tmp.containsKey(func.clazz()) == false) { + tmp.put(func.clazz(), func.name()); + } + } + return Collections.unmodifiableMap(tmp); } /** @@ -81,6 +111,13 @@ public void inc(FeatureMetric metric) { this.featuresMetrics.get(metric).inc(); } + public void incFunctionMetric(Class functionType) { + String functionName = classToFunctionName.get(functionType); + if (functionName != null) { + functionMetrics.get(functionName).inc(); + } + } + public Counters stats() { Counters counters = new Counters(); @@ -102,6 +139,11 @@ public Counters stats() { counters.inc(FPREFIX + entry.getKey().toString(), entry.getValue().count()); } + // function metrics + for (Entry entry : functionMetrics.entrySet()) { + counters.inc(FUNC_PREFIX + entry.getKey(), entry.getValue().count()); + } + return counters; } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java index 8501dd6e478df..72060bccb520a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java @@ -143,7 +143,7 @@ private Analyzer makeAnalyzer(String mappingFileName, EnrichResolution enrichRes return new Analyzer( new AnalyzerContext(config, new EsqlFunctionRegistry(), getIndexResult, enrichResolution), - new Verifier(new Metrics()) + new Verifier(new Metrics(new EsqlFunctionRegistry())) ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/QueryTranslatorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/QueryTranslatorTests.java index 760d8a327ad20..cf90cf96fe683 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/QueryTranslatorTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/QueryTranslatorTests.java @@ -46,7 +46,7 @@ private static Analyzer makeAnalyzer(String mappingFileName) { return new Analyzer( new AnalyzerContext(EsqlTestUtils.TEST_CFG, new EsqlFunctionRegistry(), getIndexResult, new EnrichResolution()), - new Verifier(new Metrics()) + new Verifier(new Metrics(new EsqlFunctionRegistry())) ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/VerifierMetricsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/VerifierMetricsTests.java index 203e5c3bd37ee..5e6588d2295f9 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/VerifierMetricsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/VerifierMetricsTests.java @@ -10,9 +10,14 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.watcher.common.stats.Counters; import org.elasticsearch.xpack.esql.analysis.Verifier; +import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; +import org.elasticsearch.xpack.esql.expression.function.FunctionDefinition; import org.elasticsearch.xpack.esql.parser.EsqlParser; +import java.util.HashMap; import java.util.List; +import java.util.Map; +import java.util.Set; import static org.elasticsearch.xpack.esql.EsqlTestUtils.withDefaultLimitWarning; import static org.elasticsearch.xpack.esql.analysis.AnalyzerTestUtils.analyzer; @@ -32,6 +37,7 @@ import static org.elasticsearch.xpack.esql.stats.FeatureMetric.STATS; import static org.elasticsearch.xpack.esql.stats.FeatureMetric.WHERE; import static org.elasticsearch.xpack.esql.stats.Metrics.FPREFIX; +import static org.elasticsearch.xpack.esql.stats.Metrics.FUNC_PREFIX; public class VerifierMetricsTests extends ESTestCase { @@ -54,6 +60,8 @@ public void testDissectQuery() { assertEquals(0, drop(c)); assertEquals(0, keep(c)); assertEquals(0, rename(c)); + + assertEquals(1, function("concat", c)); } public void testEvalQuery() { @@ -73,6 +81,8 @@ public void testEvalQuery() { assertEquals(0, drop(c)); assertEquals(0, keep(c)); assertEquals(0, rename(c)); + + assertEquals(1, function("length", c)); } public void testGrokQuery() { @@ -92,6 +102,8 @@ public void testGrokQuery() { assertEquals(0, drop(c)); assertEquals(0, keep(c)); assertEquals(0, rename(c)); + + assertEquals(1, function("concat", c)); } public void testLimitQuery() { @@ -149,6 +161,8 @@ public void testStatsQuery() { assertEquals(0, drop(c)); assertEquals(0, keep(c)); assertEquals(0, rename(c)); + + assertEquals(1, function("max", c)); } public void testWhereQuery() { @@ -190,7 +204,7 @@ public void testTwoWhereQuery() { } public void testTwoQueriesExecuted() { - Metrics metrics = new Metrics(); + Metrics metrics = new Metrics(new EsqlFunctionRegistry()); Verifier verifier = new Verifier(metrics); esqlWithVerifier(""" from employees @@ -226,6 +240,64 @@ public void testTwoQueriesExecuted() { assertEquals(0, drop(c)); assertEquals(0, keep(c)); assertEquals(0, rename(c)); + + assertEquals(1, function("length", c)); + assertEquals(1, function("concat", c)); + assertEquals(1, function("max", c)); + assertEquals(1, function("min", c)); + + assertEquals(0, function("sin", c)); + assertEquals(0, function("cos", c)); + } + + public void testMultipleFunctions() { + Metrics metrics = new Metrics(new EsqlFunctionRegistry()); + Verifier verifier = new Verifier(metrics); + esqlWithVerifier(""" + from employees + | where languages > 2 + | limit 5 + | eval name_len = length(first_name), surname_len = length(last_name) + | sort length(first_name) + | limit 3 + """, verifier); + + Counters c = metrics.stats(); + assertEquals(1, function("length", c)); + assertEquals(0, function("concat", c)); + + esqlWithVerifier(""" + from employees + | where languages > 2 + | sort first_name desc nulls first + | dissect concat(first_name, " ", last_name) "%{a} %{b}" + | grok concat(first_name, " ", last_name) "%{WORD:a} %{WORD:b}" + | eval name_len = length(first_name), surname_len = length(last_name) + | stats x = max(languages) + | sort x + | stats y = min(x) by x + """, verifier); + c = metrics.stats(); + + assertEquals(2, function("length", c)); + assertEquals(1, function("concat", c)); + assertEquals(1, function("max", c)); + assertEquals(1, function("min", c)); + + EsqlFunctionRegistry fr = new EsqlFunctionRegistry().snapshotRegistry(); + Map, String> functions = new HashMap<>(); + for (FunctionDefinition func : fr.listFunctions()) { + if (functions.containsKey(func.clazz()) == false) { + functions.put(func.clazz(), func.name()); + } + } + for (String value : functions.values()) { + if (Set.of("length", "concat", "max", "min").contains(value) == false) { + assertEquals(0, function(value, c)); + } + } + Map map = (Map) c.toNestedMap().get("functions"); + assertEquals(functions.size(), map.size()); } public void testEnrich() { @@ -251,6 +323,8 @@ public void testEnrich() { assertEquals(0, drop(c)); assertEquals(1L, keep(c)); assertEquals(0, rename(c)); + + assertEquals(1, function("to_string", c)); } public void testMvExpand() { @@ -298,6 +372,8 @@ public void testShowInfo() { assertEquals(0, drop(c)); assertEquals(0, keep(c)); assertEquals(0, rename(c)); + + assertEquals(1, function("count", c)); } public void testRow() { @@ -336,6 +412,8 @@ public void testDropAndRename() { assertEquals(1L, drop(c)); assertEquals(0, keep(c)); assertEquals(1L, rename(c)); + + assertEquals(1, function("count", c)); } public void testKeep() { @@ -422,6 +500,19 @@ private long rename(Counters c) { return c.get(FPREFIX + RENAME); } + private long function(String function, Counters c) { + return c.get(FUNC_PREFIX + function); + } + + private void assertNullFunction(String function, Counters c) { + try { + c.get(FUNC_PREFIX + function); + fail(); + } catch (NullPointerException npe) { + + } + } + private Counters esql(String esql) { return esql(esql, null); } @@ -434,7 +525,7 @@ private Counters esql(String esql, Verifier v) { Verifier verifier = v; Metrics metrics = null; if (v == null) { - metrics = new Metrics(); + metrics = new Metrics(new EsqlFunctionRegistry()); verifier = new Verifier(metrics); } analyzer(verifier).analyze(parser.createStatement(esql)); diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml index 8bbdb27a87d1a..e1fd9b0201a35 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml @@ -5,7 +5,7 @@ setup: - method: POST path: /_query parameters: [ method, path, parameters, capabilities ] - capabilities: [ no_meta ] + capabilities: [ function_stats ] reason: "META command removed which changes the count of the data returned" test_runner_features: [capabilities] @@ -51,11 +51,16 @@ setup: - set: {esql.queries.kibana.failed: kibana_failed_counter} - set: {esql.queries._all.total: all_total_counter} - set: {esql.queries._all.failed: all_failed_counter} + - set: {esql.functions.max: functions_max} + - set: {esql.functions.min: functions_min} + - set: {esql.functions.cos: functions_cos} + - set: {esql.functions.to_long: functions_to_long} + - set: {esql.functions.coalesce: functions_coalesce} - do: esql.query: body: - query: 'from test | where data > 2 | sort count desc | limit 5 | stats m = max(data)' + query: 'from test | where data > 2 and to_long(data) > 2 | sort count desc | limit 5 | stats m = max(data)' - do: {xpack.usage: {}} - match: { esql.available: true } @@ -73,3 +78,9 @@ setup: - match: {esql.queries.kibana.failed: $kibana_failed_counter} - gt: {esql.queries._all.total: $all_total_counter} - match: {esql.queries._all.failed: $all_failed_counter} + - gt: {esql.functions.max: $functions_max} + - match: {esql.functions.min: $functions_min} + - match: {esql.functions.cos: $functions_cos} + - gt: {esql.functions.to_long: $functions_to_long} + - match: {esql.functions.coalesce: $functions_coalesce} + - length: {esql.functions: 117} From ecf4af1e8895617f735195fb44b7f5c647c02afe Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20Zolt=C3=A1n=20Szab=C3=B3?= Date: Mon, 21 Oct 2024 09:41:55 +0200 Subject: [PATCH 234/449] [DOCS] Documents watsonx service of the Inference API (#115088) Co-authored-by: Saikat Sarkar <132922331+saikatsarkar056@users.noreply.github.com> --- .../inference/delete-inference.asciidoc | 9 +- .../inference/get-inference.asciidoc | 9 +- .../inference/inference-apis.asciidoc | 1 + .../inference/post-inference.asciidoc | 9 +- .../inference/put-inference.asciidoc | 10 +- .../inference/service-watsonx-ai.asciidoc | 115 ++++++++++++++++++ .../inference/update-inference.asciidoc | 2 +- 7 files changed, 129 insertions(+), 26 deletions(-) create mode 100644 docs/reference/inference/service-watsonx-ai.asciidoc diff --git a/docs/reference/inference/delete-inference.asciidoc b/docs/reference/inference/delete-inference.asciidoc index bee39bf9b9851..4fc4beaca6d8e 100644 --- a/docs/reference/inference/delete-inference.asciidoc +++ b/docs/reference/inference/delete-inference.asciidoc @@ -6,12 +6,9 @@ experimental[] Deletes an {infer} endpoint. -IMPORTANT: The {infer} APIs enable you to use certain services, such as built-in -{ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, Azure, Google AI Studio, Google Vertex AI or -Hugging Face. For built-in models and models uploaded through Eland, the {infer} -APIs offer an alternative way to use and manage trained models. However, if you -do not plan to use the {infer} APIs to use these models or if you want to use -non-NLP models, use the <>. +IMPORTANT: The {infer} APIs enable you to use certain services, such as built-in {ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, Azure, Google AI Studio, Google Vertex AI, Anthropic, Watsonx.ai, or Hugging Face. +For built-in models and models uploaded through Eland, the {infer} APIs offer an alternative way to use and manage trained models. +However, if you do not plan to use the {infer} APIs to use these models or if you want to use non-NLP models, use the <>. [discrete] diff --git a/docs/reference/inference/get-inference.asciidoc b/docs/reference/inference/get-inference.asciidoc index c3fe841603bcc..d991729fe77c9 100644 --- a/docs/reference/inference/get-inference.asciidoc +++ b/docs/reference/inference/get-inference.asciidoc @@ -6,12 +6,9 @@ experimental[] Retrieves {infer} endpoint information. -IMPORTANT: The {infer} APIs enable you to use certain services, such as built-in -{ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, Azure, Google AI Studio, Google Vertex AI or -Hugging Face. For built-in models and models uploaded through Eland, the {infer} -APIs offer an alternative way to use and manage trained models. However, if you -do not plan to use the {infer} APIs to use these models or if you want to use -non-NLP models, use the <>. +IMPORTANT: The {infer} APIs enable you to use certain services, such as built-in {ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, Azure, Google AI Studio, Google Vertex AI, Anthropic, Watsonx.ai, or Hugging Face. +For built-in models and models uploaded through Eland, the {infer} APIs offer an alternative way to use and manage trained models. +However, if you do not plan to use the {infer} APIs to use these models or if you want to use non-NLP models, use the <>. [discrete] diff --git a/docs/reference/inference/inference-apis.asciidoc b/docs/reference/inference/inference-apis.asciidoc index 88421e4f64cfd..e756831075027 100644 --- a/docs/reference/inference/inference-apis.asciidoc +++ b/docs/reference/inference/inference-apis.asciidoc @@ -54,3 +54,4 @@ include::service-google-vertex-ai.asciidoc[] include::service-hugging-face.asciidoc[] include::service-mistral.asciidoc[] include::service-openai.asciidoc[] +include::service-watsonx-ai.asciidoc[] diff --git a/docs/reference/inference/post-inference.asciidoc b/docs/reference/inference/post-inference.asciidoc index 52131c0b10776..ce51abaff07f8 100644 --- a/docs/reference/inference/post-inference.asciidoc +++ b/docs/reference/inference/post-inference.asciidoc @@ -6,12 +6,9 @@ experimental[] Performs an inference task on an input text by using an {infer} endpoint. -IMPORTANT: The {infer} APIs enable you to use certain services, such as built-in -{ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, Azure, Google AI Studio, Google Vertex AI or -Hugging Face. For built-in models and models uploaded through Eland, the {infer} -APIs offer an alternative way to use and manage trained models. However, if you -do not plan to use the {infer} APIs to use these models or if you want to use -non-NLP models, use the <>. +IMPORTANT: The {infer} APIs enable you to use certain services, such as built-in {ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, Azure, Google AI Studio, Google Vertex AI, Anthropic, Watsonx.ai, or Hugging Face. +For built-in models and models uploaded through Eland, the {infer} APIs offer an alternative way to use and manage trained models. +However, if you do not plan to use the {infer} APIs to use these models or if you want to use non-NLP models, use the <>. [discrete] diff --git a/docs/reference/inference/put-inference.asciidoc b/docs/reference/inference/put-inference.asciidoc index 96e127e741d56..6d6b61ffea771 100644 --- a/docs/reference/inference/put-inference.asciidoc +++ b/docs/reference/inference/put-inference.asciidoc @@ -8,13 +8,8 @@ Creates an {infer} endpoint to perform an {infer} task. [IMPORTANT] ==== -* The {infer} APIs enable you to use certain services, such as built-in -{ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, Mistral, -Azure OpenAI, Google AI Studio, Google Vertex AI, Anthropic or Hugging Face. -* For built-in models and models uploaded through Eland, the {infer} APIs offer an -alternative way to use and manage trained models. However, if you do not plan to -use the {infer} APIs to use these models or if you want to use non-NLP models, -use the <>. +* The {infer} APIs enable you to use certain services, such as built-in {ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, Mistral, Azure OpenAI, Google AI Studio, Google Vertex AI, Anthropic, Watsonx.ai, or Hugging Face. +* For built-in models and models uploaded through Eland, the {infer} APIs offer an alternative way to use and manage trained models. However, if you do not plan to use the {infer} APIs to use these models or if you want to use non-NLP models, use the <>. ==== @@ -71,6 +66,7 @@ Click the links to review the configuration details of the services: * <> (`text_embedding`) * <> (`text_embedding`) * <> (`completion`, `text_embedding`) +* <> (`text_embedding`) The {es} and ELSER services run on a {ml} node in your {es} cluster. The rest of the services connect to external providers. \ No newline at end of file diff --git a/docs/reference/inference/service-watsonx-ai.asciidoc b/docs/reference/inference/service-watsonx-ai.asciidoc new file mode 100644 index 0000000000000..597afc27fd0cf --- /dev/null +++ b/docs/reference/inference/service-watsonx-ai.asciidoc @@ -0,0 +1,115 @@ +[[infer-service-watsonx-ai]] +=== Watsonx {infer} service + +Creates an {infer} endpoint to perform an {infer} task with the `watsonxai` service. + +You need an https://cloud.ibm.com/docs/databases-for-elasticsearch?topic=databases-for-elasticsearch-provisioning&interface=api[IBM Cloud® Databases for Elasticsearch deployment] to use the `watsonxai` {infer} service. +You can provision one through the https://cloud.ibm.com/databases/databases-for-elasticsearch/create[IBM catalog], the https://cloud.ibm.com/docs/databases-cli-plugin?topic=databases-cli-plugin-cdb-reference[Cloud Databases CLI plug-in], the https://cloud.ibm.com/apidocs/cloud-databases-api[Cloud Databases API], or https://registry.terraform.io/providers/IBM-Cloud/ibm/latest/docs/resources/database[Terraform]. + + +[discrete] +[[infer-service-watsonx-ai-api-request]] +==== {api-request-title} + +`PUT /_inference//` + +[discrete] +[[infer-service-watsonx-ai-api-path-params]] +==== {api-path-parms-title} + +``:: +(Required, string) +include::inference-shared.asciidoc[tag=inference-id] + +``:: +(Required, string) +include::inference-shared.asciidoc[tag=task-type] ++ +-- +Available task types: + +* `text_embedding`. +-- + +[discrete] +[[infer-service-watsonx-ai-api-request-body]] +==== {api-request-body-title} + +`service`:: +(Required, string) +The type of service supported for the specified task type. In this case, +`watsonxai`. + +`service_settings`:: +(Required, object) +include::inference-shared.asciidoc[tag=service-settings] ++ +-- +These settings are specific to the `watsonxai` service. +-- + +`api_key`::: +(Required, string) +A valid API key of your Watsonx account. +You can find your Watsonx API keys or you can create a new one https://cloud.ibm.com/iam/apikeys[on the API keys page]. ++ +-- +include::inference-shared.asciidoc[tag=api-key-admonition] +-- + +`api_version`::: +(Required, string) +Version parameter that takes a version date in the format of `YYYY-MM-DD`. +For the active version data parameters, refer to the https://cloud.ibm.com/apidocs/watsonx-ai#active-version-dates[documentation]. + +`model_id`::: +(Required, string) +The name of the model to use for the {infer} task. +Refer to the IBM Embedding Models section in the https://www.ibm.com/products/watsonx-ai/foundation-models[Watsonx documentation] for the list of available text embedding models. + +`url`::: +(Required, string) +The URL endpoint to use for the requests. + +`project_id`::: +(Required, string) +The name of the project to use for the {infer} task. + +`rate_limit`::: +(Optional, object) +By default, the `watsonxai` service sets the number of requests allowed per minute to `120`. +This helps to minimize the number of rate limit errors returned from Watsonx. +To modify this, set the `requests_per_minute` setting of this object in your service settings: ++ +-- +include::inference-shared.asciidoc[tag=request-per-minute-example] +-- + + +[discrete] +[[inference-example-watsonx-ai]] +==== Watsonx AI service example + +The following example shows how to create an {infer} endpoint called `watsonx-embeddings` to perform a `text_embedding` task type. + +[source,console] +------------------------------------------------------------ +PUT _inference/text_embedding/watsonx-embeddings +{ + "service": "watsonxai", + "service_settings": { + "api_key": "", <1> + "url": "", <2> + "model_id": "ibm/slate-30m-english-rtrvr", + "project_id": "", <3> + "api_version": "2024-03-14" <4> + } +} + +------------------------------------------------------------ +// TEST[skip:TBD] +<1> A valid Watsonx API key. +You can find on the https://cloud.ibm.com/iam/apikeys[API keys page of your account]. +<2> The {infer} endpoint URL you created on Watsonx. +<3> The ID of your IBM Cloud project. +<4> A valid API version parameter. You can find the active version data parameters https://cloud.ibm.com/apidocs/watsonx-ai#active-version-dates[here]. \ No newline at end of file diff --git a/docs/reference/inference/update-inference.asciidoc b/docs/reference/inference/update-inference.asciidoc index 166b002ea45f5..01a99d7f53062 100644 --- a/docs/reference/inference/update-inference.asciidoc +++ b/docs/reference/inference/update-inference.asciidoc @@ -6,7 +6,7 @@ experimental[] Updates an {infer} endpoint. -IMPORTANT: The {infer} APIs enable you to use certain services, such as built-in {ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, Azure, Google AI Studio, Google Vertex AI or Hugging Face. +IMPORTANT: The {infer} APIs enable you to use certain services, such as built-in {ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, Azure, Google AI Studio, Google Vertex AI, Anthropic, Watsonx.ai, or Hugging Face. For built-in models and models uploaded through Eland, the {infer} APIs offer an alternative way to use and manage trained models. However, if you do not plan to use the {infer} APIs to use these models or if you want to use non-NLP models, use the <>. From 6be3036c01caffb8f82711e32f15ffbc6b8fda2d Mon Sep 17 00:00:00 2001 From: mccheah Date: Mon, 21 Oct 2024 01:28:44 -0700 Subject: [PATCH 235/449] Do not exclude empty arrays or empty objects in source filtering with Jackson streaming (#112250) --- docs/changelog/112250.yaml | 5 +++ .../filtering/FilterPathBasedFilter.java | 35 +++++++++++++++ .../search/lookup/SourceFilterTests.java | 44 +++++++++++++++++++ 3 files changed, 84 insertions(+) create mode 100644 docs/changelog/112250.yaml diff --git a/docs/changelog/112250.yaml b/docs/changelog/112250.yaml new file mode 100644 index 0000000000000..edbb5667d4b9d --- /dev/null +++ b/docs/changelog/112250.yaml @@ -0,0 +1,5 @@ +pr: 112250 +summary: Do not exclude empty arrays or empty objects in source filtering +area: Search +type: bug +issues: [109668] diff --git a/libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/filtering/FilterPathBasedFilter.java b/libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/filtering/FilterPathBasedFilter.java index e0b5875c6c108..4562afa8af693 100644 --- a/libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/filtering/FilterPathBasedFilter.java +++ b/libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/filtering/FilterPathBasedFilter.java @@ -96,6 +96,41 @@ public TokenFilter includeProperty(String name) { return filter; } + /** + * This is overridden in order to keep empty arrays in nested exclusions - see #109668. + *

    + * If we are excluding contents, we only want to exclude based on property name - but empty arrays in themselves do not have a property + * name. If the empty array were to be excluded, it should be done by excluding the parent. + *

    + * Note though that the expected behavior seems to be ambiguous if contentsFiltered is true - that is, that the filter has pruned all + * the contents of a given array, such that we are left with the empty array. The behavior below drops that array, for at the time of + * writing, not doing so would cause assertions in JsonXContentFilteringTests to fail, which expect this behavior. Yet it is not obvious + * if dropping the empty array in this case is correct. For example, one could expect this sort of behavior: + *

    + * From the user's perspective, this could reasonably yield either of: + *
      + *
    1. { "myArray": []}
    2. + *
    3. Removing {@code myArray} entirely.
    4. + *
    + */ + @Override + public boolean includeEmptyArray(boolean contentsFiltered) { + return inclusive == false && contentsFiltered == false; + } + + /** + * This is overridden in order to keep empty objects in nested exclusions - see #109668. + *

    + * The same logic applies to this as to {@link #includeEmptyArray(boolean)}, only for nested objects instead of nested arrays. + */ + @Override + public boolean includeEmptyObject(boolean contentsFiltered) { + return inclusive == false && contentsFiltered == false; + } + @Override protected boolean _includeScalar() { return inclusive == false; diff --git a/server/src/test/java/org/elasticsearch/search/lookup/SourceFilterTests.java b/server/src/test/java/org/elasticsearch/search/lookup/SourceFilterTests.java index 370584e3f29f5..bddfd53b2b120 100644 --- a/server/src/test/java/org/elasticsearch/search/lookup/SourceFilterTests.java +++ b/server/src/test/java/org/elasticsearch/search/lookup/SourceFilterTests.java @@ -112,4 +112,48 @@ public Source filter(SourceFilter sourceFilter) { } + // Verification for issue #109668 + public void testIncludeParentAndExcludeChildEmptyArray() { + Source fromMap = Source.fromMap(Map.of("myArray", List.of()), XContentType.JSON); + Source filteredMap = fromMap.filter(new SourceFilter(new String[] { "myArray" }, new String[] { "myArray.myField" })); + assertEquals(filteredMap.source(), Map.of("myArray", List.of())); + Source fromBytes = Source.fromBytes(new BytesArray("{\"myArray\": []}"), XContentType.JSON); + Source filteredBytes = fromBytes.filter(new SourceFilter(new String[] { "myArray" }, new String[] { "myArray.myField" })); + assertEquals(filteredBytes.source(), Map.of("myArray", List.of())); + } + + public void testIncludeParentAndExcludeChildEmptyObject() { + Source fromMap = Source.fromMap(Map.of("myObject", Map.of()), XContentType.JSON); + Source filteredMap = fromMap.filter(new SourceFilter(new String[] { "myObject" }, new String[] { "myObject.myField" })); + assertEquals(filteredMap.source(), Map.of("myObject", Map.of())); + Source fromBytes = Source.fromBytes(new BytesArray("{\"myObject\": {}}"), XContentType.JSON); + Source filteredBytes = fromBytes.filter(new SourceFilter(new String[] { "myObject" }, new String[] { "myObject.myField" })); + assertEquals(filteredBytes.source(), Map.of("myObject", Map.of())); + } + + public void testIncludeParentAndExcludeChildSubFieldsArrays() { + Source fromMap = Source.fromMap( + Map.of("myArray", List.of(Map.of("myField", "myValue", "other", "otherValue"))), + XContentType.JSON + ); + Source filteredMap = fromMap.filter(new SourceFilter(new String[] { "myArray" }, new String[] { "myArray.myField" })); + assertEquals(filteredMap.source(), Map.of("myArray", List.of(Map.of("other", "otherValue")))); + Source fromBytes = Source.fromBytes(new BytesArray(""" + { "myArray": [ { "myField": "myValue", "other": "otherValue" } ] }"""), XContentType.JSON); + Source filteredBytes = fromBytes.filter(new SourceFilter(new String[] { "myArray" }, new String[] { "myArray.myField" })); + assertEquals(filteredBytes.source(), Map.of("myArray", List.of(Map.of("other", "otherValue")))); + } + + public void testIncludeParentAndExcludeChildSubFieldsObjects() { + Source fromMap = Source.fromMap( + Map.of("myObject", Map.of("myField", "myValue", "other", "otherValue")), + XContentType.JSON + ); + Source filteredMap = fromMap.filter(new SourceFilter(new String[] { "myObject" }, new String[] { "myObject.myField" })); + assertEquals(filteredMap.source(), Map.of("myObject", Map.of("other", "otherValue"))); + Source fromBytes = Source.fromBytes(new BytesArray(""" + { "myObject": { "myField": "myValue", "other": "otherValue" } }"""), XContentType.JSON); + Source filteredBytes = fromBytes.filter(new SourceFilter(new String[] { "myObject" }, new String[] { "myObject.myField" })); + assertEquals(filteredBytes.source(), Map.of("myObject", Map.of("other", "otherValue"))); + } } From 5645240976cd88ff9f1a30240c0923e9788f3f4c Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Mon, 21 Oct 2024 11:32:07 +0300 Subject: [PATCH 236/449] SyntheticSourceIndexSettingsProvider restores stored source (#114978) * SyntheticSourceIndexSettingsProvider restores stored source * remove asserts * add and fix tests * fix test * more tests * fix assert * remove assert --- .../DisabledSecurityDataStreamTestCase.java | 1 + .../xpack/downsample/DownsampleRestIT.java | 2 +- .../xpack/logsdb/LogsdbRestIT.java | 34 ++++++++ .../xpack/logsdb/LogsdbRestIT.java | 3 + .../SyntheticSourceIndexSettingsProvider.java | 5 +- ...heticSourceIndexSettingsProviderTests.java | 86 ++++++++++++++++++- 6 files changed, 128 insertions(+), 3 deletions(-) diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/DisabledSecurityDataStreamTestCase.java b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/DisabledSecurityDataStreamTestCase.java index 9839f9abb080e..619bfd74d853c 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/DisabledSecurityDataStreamTestCase.java +++ b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/DisabledSecurityDataStreamTestCase.java @@ -28,6 +28,7 @@ public abstract class DisabledSecurityDataStreamTestCase extends ESRestTestCase public static ElasticsearchCluster cluster = ElasticsearchCluster.local() .distribution(DistributionType.DEFAULT) .feature(FeatureFlag.FAILURE_STORE_ENABLED) + .setting("xpack.license.self_generated.type", "trial") .setting("xpack.security.enabled", "false") .setting("xpack.watcher.enabled", "false") .build(); diff --git a/x-pack/plugin/downsample/qa/rest/src/yamlRestTest/java/org/elasticsearch/xpack/downsample/DownsampleRestIT.java b/x-pack/plugin/downsample/qa/rest/src/yamlRestTest/java/org/elasticsearch/xpack/downsample/DownsampleRestIT.java index 504326f1bd4b1..6794bc47fa3cd 100644 --- a/x-pack/plugin/downsample/qa/rest/src/yamlRestTest/java/org/elasticsearch/xpack/downsample/DownsampleRestIT.java +++ b/x-pack/plugin/downsample/qa/rest/src/yamlRestTest/java/org/elasticsearch/xpack/downsample/DownsampleRestIT.java @@ -20,7 +20,7 @@ public class DownsampleRestIT extends ESClientYamlSuiteTestCase { @ClassRule public static ElasticsearchCluster cluster = ElasticsearchCluster.local() .distribution(DistributionType.DEFAULT) - .setting("xpack.license.self_generated.type", "basic") + .setting("xpack.license.self_generated.type", "trial") .setting("xpack.security.enabled", "false") .build(); diff --git a/x-pack/plugin/logsdb/qa/with-basic/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbRestIT.java b/x-pack/plugin/logsdb/qa/with-basic/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbRestIT.java index 813a181045f2e..edecf4eb9669e 100644 --- a/x-pack/plugin/logsdb/qa/with-basic/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbRestIT.java +++ b/x-pack/plugin/logsdb/qa/with-basic/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbRestIT.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.logsdb; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.elasticsearch.test.rest.ESRestTestCase; @@ -72,4 +73,37 @@ public void testFeatureUsageWithLogsdbIndex() throws IOException { } } + public void testLogsdbIndexGetsStoredSource() throws IOException { + final String index = "test-index"; + createIndex(index, Settings.builder().put("index.mode", "logsdb").build()); + var settings = (Map) ((Map) getIndexSettings(index).get(index)).get("settings"); + assertEquals("logsdb", settings.get("index.mode")); + assertEquals(SourceFieldMapper.Mode.STORED.toString(), settings.get("index.mapping.source.mode")); + } + + public void testLogsdbOverrideSyntheticSourceModeInMapping() throws IOException { + final String index = "test-index"; + String mapping = """ + { + "_source": { + "mode": "synthetic" + } + } + """; + createIndex(index, Settings.builder().put("index.mode", "logsdb").build(), mapping); + var settings = (Map) ((Map) getIndexSettings(index).get(index)).get("settings"); + assertEquals("logsdb", settings.get("index.mode")); + assertEquals(SourceFieldMapper.Mode.STORED.toString(), settings.get("index.mapping.source.mode")); + } + + public void testLogsdbNoOverrideSyntheticSourceSetting() throws IOException { + final String index = "test-index"; + createIndex( + index, + Settings.builder().put("index.mode", "logsdb").put("index.mapping.source.mode", SourceFieldMapper.Mode.SYNTHETIC).build() + ); + var settings = (Map) ((Map) getIndexSettings(index).get(index)).get("settings"); + assertEquals("logsdb", settings.get("index.mode")); + assertEquals(SourceFieldMapper.Mode.SYNTHETIC.toString(), settings.get("index.mapping.source.mode")); + } } diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbRestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbRestIT.java index b2d2978a254df..16759c3292f7a 100644 --- a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbRestIT.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbRestIT.java @@ -75,6 +75,9 @@ public void testFeatureUsageWithLogsdbIndex() throws IOException { Map feature = features.stream().filter(map -> "mappings".equals(map.get("family"))).findFirst().get(); assertThat(feature.get("name"), equalTo("synthetic-source")); assertThat(feature.get("license_level"), equalTo("enterprise")); + + var settings = (Map) ((Map) getIndexSettings("test-index").get("test-index")).get("settings"); + assertNull(settings.get("index.mapping.source.mode")); // Default, no downgrading. } } diff --git a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java index a190ff72de8df..f60c941c75a7c 100644 --- a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java +++ b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java @@ -21,6 +21,7 @@ import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.SourceFieldMapper; import java.io.IOException; import java.time.Instant; @@ -62,7 +63,9 @@ public Settings getAdditionalIndexSettings( if (newIndexHasSyntheticSourceUsage(indexName, templateIndexMode, indexTemplateAndCreateRequestSettings, combinedTemplateMappings) && syntheticSourceLicenseService.fallbackToStoredSource(isTemplateValidation)) { LOGGER.debug("creation of index [{}] with synthetic source without it being allowed", indexName); - // TODO: handle falling back to stored source + return Settings.builder() + .put(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.STORED.toString()) + .build(); } return Settings.EMPTY; } diff --git a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderTests.java b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderTests.java index 738487b9365a7..362b387726105 100644 --- a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderTests.java +++ b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderTests.java @@ -8,22 +8,42 @@ package org.elasticsearch.xpack.logsdb; import org.elasticsearch.cluster.metadata.DataStream; +import org.elasticsearch.cluster.metadata.DataStreamTestHelper; +import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.Tuple; +import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.MapperTestUtils; +import org.elasticsearch.index.mapper.SourceFieldMapper; +import org.elasticsearch.license.MockLicenseState; import org.elasticsearch.test.ESTestCase; import org.junit.Before; import java.io.IOException; +import java.time.Instant; import java.util.List; +import static org.elasticsearch.common.settings.Settings.builder; +import static org.hamcrest.Matchers.equalTo; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + public class SyntheticSourceIndexSettingsProviderTests extends ESTestCase { + private SyntheticSourceLicenseService syntheticSourceLicenseService; private SyntheticSourceIndexSettingsProvider provider; @Before public void setup() { - SyntheticSourceLicenseService syntheticSourceLicenseService = new SyntheticSourceLicenseService(Settings.EMPTY); + MockLicenseState licenseState = mock(MockLicenseState.class); + when(licenseState.isAllowed(any())).thenReturn(true); + var licenseService = new SyntheticSourceLicenseService(Settings.EMPTY); + licenseService.setLicenseState(licenseState); + syntheticSourceLicenseService = new SyntheticSourceLicenseService(Settings.EMPTY); + syntheticSourceLicenseService.setLicenseState(licenseState); + provider = new SyntheticSourceIndexSettingsProvider( syntheticSourceLicenseService, im -> MapperTestUtils.newMapperService(xContentRegistry(), createTempDir(), im.getSettings(), im.getIndex().getName()) @@ -226,4 +246,68 @@ public void testNewIndexHasSyntheticSourceUsage_invalidSettings() throws IOExcep } } + public void testGetAdditionalIndexSettingsDowngradeFromSyntheticSource() throws IOException { + String dataStreamName = "logs-app1"; + Metadata.Builder mb = Metadata.builder( + DataStreamTestHelper.getClusterStateWithDataStreams( + List.of(Tuple.tuple(dataStreamName, 1)), + List.of(), + Instant.now().toEpochMilli(), + builder().build(), + 1 + ).getMetadata() + ); + Metadata metadata = mb.build(); + + Settings settings = builder().put(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.SYNTHETIC) + .build(); + + Settings result = provider.getAdditionalIndexSettings( + DataStream.getDefaultBackingIndexName(dataStreamName, 2), + dataStreamName, + null, + metadata, + Instant.ofEpochMilli(1L), + settings, + List.of() + ); + assertThat(result.size(), equalTo(0)); + + syntheticSourceLicenseService.setSyntheticSourceFallback(true); + result = provider.getAdditionalIndexSettings( + DataStream.getDefaultBackingIndexName(dataStreamName, 2), + dataStreamName, + null, + metadata, + Instant.ofEpochMilli(1L), + settings, + List.of() + ); + assertThat(result.size(), equalTo(1)); + assertEquals(SourceFieldMapper.Mode.STORED, SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.get(result)); + + result = provider.getAdditionalIndexSettings( + DataStream.getDefaultBackingIndexName(dataStreamName, 2), + dataStreamName, + IndexMode.TIME_SERIES, + metadata, + Instant.ofEpochMilli(1L), + settings, + List.of() + ); + assertThat(result.size(), equalTo(1)); + assertEquals(SourceFieldMapper.Mode.STORED, SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.get(result)); + + result = provider.getAdditionalIndexSettings( + DataStream.getDefaultBackingIndexName(dataStreamName, 2), + dataStreamName, + IndexMode.LOGSDB, + metadata, + Instant.ofEpochMilli(1L), + settings, + List.of() + ); + assertThat(result.size(), equalTo(1)); + assertEquals(SourceFieldMapper.Mode.STORED, SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.get(result)); + } } From 78a43981b64d384ca5fb22ea0a94d327c05e5358 Mon Sep 17 00:00:00 2001 From: Nikolaj Volgushev Date: Mon, 21 Oct 2024 11:18:26 +0200 Subject: [PATCH 237/449] Reprocess operator file settings on service start (#114295) Changes `FileSettingsService` to reprocess file settings on every restart or master node change, even if versions match between file and cluster-state metadata. If the file version is lower than the metadata version, processing is still skipped to avoid applying stale settings. This makes it easier for consumers of file settings to change their behavior w.r.t. file settings contents. For instance, an update of how role mappings are stored will automatically apply on the next restart, without the need to manually increment the file settings version to force reprocessing. Relates: ES-9628 --- docs/changelog/114295.yaml | 5 + .../FileSettingsRoleMappingUpgradeIT.java | 111 ++++++++++ .../service/FileSettingsServiceIT.java | 127 ++++++++++- .../file/AbstractFileWatchingService.java | 22 +- .../reservedstate/service/ErrorState.java | 19 +- .../service/FileSettingsService.java | 22 +- .../service/ReservedClusterStateService.java | 46 +++- .../service/ReservedStateErrorTask.java | 6 +- .../service/ReservedStateUpdateTask.java | 55 +++-- .../service/ReservedStateVersionCheck.java | 40 ++++ .../service/FileSettingsServiceTests.java | 68 +++++- .../ReservedClusterStateServiceTests.java | 120 +++++++++-- .../service/ReservedStateUpdateTaskTests.java | 10 +- .../ReservedLifecycleStateServiceTests.java | 9 +- .../RoleMappingFileSettingsIT.java | 12 +- .../FileSettingsRoleMappingsRestartIT.java | 200 ++++++++++++++---- ...vedSnapshotLifecycleStateServiceTests.java | 5 +- 17 files changed, 762 insertions(+), 115 deletions(-) create mode 100644 docs/changelog/114295.yaml create mode 100644 qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FileSettingsRoleMappingUpgradeIT.java create mode 100644 server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateVersionCheck.java diff --git a/docs/changelog/114295.yaml b/docs/changelog/114295.yaml new file mode 100644 index 0000000000000..2acdc293a206c --- /dev/null +++ b/docs/changelog/114295.yaml @@ -0,0 +1,5 @@ +pr: 114295 +summary: "Reprocess operator file settings when settings service starts, due to node restart or master node change" +area: Infra/Settings +type: enhancement +issues: [ ] diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FileSettingsRoleMappingUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FileSettingsRoleMappingUpgradeIT.java new file mode 100644 index 0000000000000..3275f3e0e136f --- /dev/null +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FileSettingsRoleMappingUpgradeIT.java @@ -0,0 +1,111 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.upgrades; + +import com.carrotsearch.randomizedtesting.annotations.Name; + +import org.elasticsearch.client.Request; +import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.test.XContentTestUtils; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.cluster.util.resource.Resource; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TemporaryFolder; +import org.junit.rules.TestRule; + +import java.io.IOException; +import java.util.List; +import java.util.function.Supplier; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.nullValue; + +public class FileSettingsRoleMappingUpgradeIT extends ParameterizedRollingUpgradeTestCase { + + private static final String settingsJSON = """ + { + "metadata": { + "version": "1", + "compatibility": "8.4.0" + }, + "state": { + "role_mappings": { + "everyone_kibana": { + "enabled": true, + "roles": [ "kibana_user" ], + "rules": { "field": { "username": "*" } } + } + } + } + }"""; + + private static final TemporaryFolder repoDirectory = new TemporaryFolder(); + + private static final ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .version(getOldClusterTestVersion()) + .nodes(NODE_NUM) + .setting("path.repo", new Supplier<>() { + @Override + @SuppressForbidden(reason = "TemporaryFolder only has io.File methods, not nio.File") + public String get() { + return repoDirectory.getRoot().getPath(); + } + }) + .setting("xpack.security.enabled", "true") + // workaround to avoid having to set up clients and authorization headers + .setting("xpack.security.authc.anonymous.roles", "superuser") + .configFile("operator/settings.json", Resource.fromString(settingsJSON)) + .build(); + + @ClassRule + public static TestRule ruleChain = RuleChain.outerRule(repoDirectory).around(cluster); + + public FileSettingsRoleMappingUpgradeIT(@Name("upgradedNodes") int upgradedNodes) { + super(upgradedNodes); + } + + @Override + protected ElasticsearchCluster getUpgradeCluster() { + return cluster; + } + + @Before + public void checkVersions() { + assumeTrue( + "Only relevant when upgrading from a version before role mappings were stored in cluster state", + oldClusterHasFeature("gte_v8.4.0") && oldClusterHasFeature("gte_v8.15.0") == false + ); + } + + public void testRoleMappingsAppliedOnUpgrade() throws IOException { + if (isOldCluster()) { + Request clusterStateRequest = new Request("GET", "/_cluster/state/metadata"); + List roleMappings = new XContentTestUtils.JsonMapView(entityAsMap(client().performRequest(clusterStateRequest))).get( + "metadata.role_mappings.role_mappings" + ); + assertThat(roleMappings, is(nullValue())); + } else if (isUpgradedCluster()) { + // the nodes have all been upgraded. Check they re-processed the role mappings in the settings file on + // upgrade + Request clusterStateRequest = new Request("GET", "/_cluster/state/metadata"); + List roleMappings = new XContentTestUtils.JsonMapView(entityAsMap(client().performRequest(clusterStateRequest))).get( + "metadata.role_mappings.role_mappings" + ); + assertThat(roleMappings, is(not(nullValue()))); + assertThat(roleMappings.size(), equalTo(1)); + } + } +} diff --git a/server/src/internalClusterTest/java/org/elasticsearch/reservedstate/service/FileSettingsServiceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/reservedstate/service/FileSettingsServiceIT.java index c618e354802a7..f9122ccfb4a3e 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/reservedstate/service/FileSettingsServiceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/reservedstate/service/FileSettingsServiceIT.java @@ -25,6 +25,7 @@ import org.elasticsearch.core.Tuple; import org.elasticsearch.reservedstate.action.ReservedClusterSettingsAction; import org.elasticsearch.test.ESIntegTestCase; +import org.junit.Before; import java.nio.charset.StandardCharsets; import java.nio.file.Files; @@ -40,6 +41,7 @@ import static org.elasticsearch.test.NodeRoles.dataOnlyNode; import static org.elasticsearch.test.NodeRoles.masterNode; import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; @@ -50,7 +52,12 @@ @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0, autoManageMasterNodes = false) public class FileSettingsServiceIT extends ESIntegTestCase { - private static final AtomicLong versionCounter = new AtomicLong(1); + private final AtomicLong versionCounter = new AtomicLong(1); + + @Before + public void resetVersionCounter() { + versionCounter.set(1); + } private static final String testJSON = """ { @@ -102,6 +109,19 @@ public class FileSettingsServiceIT extends ESIntegTestCase { } }"""; + private static final String testOtherErrorJSON = """ + { + "metadata": { + "version": "%s", + "compatibility": "8.4.0" + }, + "state": { + "bad_cluster_settings": { + "search.allow_expensive_queries": "false" + } + } + }"""; + private void assertMasterNode(Client client, String node) { assertThat( client.admin().cluster().prepareState(TEST_REQUEST_TIMEOUT).get().getState().nodes().getMasterNode().getName(), @@ -109,8 +129,9 @@ private void assertMasterNode(Client client, String node) { ); } - public static void writeJSONFile(String node, String json, AtomicLong versionCounter, Logger logger) throws Exception { - long version = versionCounter.incrementAndGet(); + public static void writeJSONFile(String node, String json, AtomicLong versionCounter, Logger logger, boolean incrementVersion) + throws Exception { + long version = incrementVersion ? versionCounter.incrementAndGet() : versionCounter.get(); FileSettingsService fileSettingsService = internalCluster().getInstance(FileSettingsService.class, node); @@ -124,6 +145,15 @@ public static void writeJSONFile(String node, String json, AtomicLong versionCou logger.info("--> After writing new settings file: [{}]", settingsFileContent); } + public static void writeJSONFile(String node, String json, AtomicLong versionCounter, Logger logger) throws Exception { + writeJSONFile(node, json, versionCounter, logger, true); + } + + public static void writeJSONFileWithoutVersionIncrement(String node, String json, AtomicLong versionCounter, Logger logger) + throws Exception { + writeJSONFile(node, json, versionCounter, logger, false); + } + private Tuple setupCleanupClusterStateListener(String node) { ClusterService clusterService = internalCluster().clusterService(node); CountDownLatch savedClusterState = new CountDownLatch(1); @@ -171,7 +201,10 @@ public void clusterChanged(ClusterChangedEvent event) { private void assertClusterStateSaveOK(CountDownLatch savedClusterState, AtomicLong metadataVersion, String expectedBytesPerSec) throws Exception { assertTrue(savedClusterState.await(20, TimeUnit.SECONDS)); + assertExpectedRecoveryBytesSettingAndVersion(metadataVersion, expectedBytesPerSec); + } + private static void assertExpectedRecoveryBytesSettingAndVersion(AtomicLong metadataVersion, String expectedBytesPerSec) { final ClusterStateResponse clusterStateResponse = clusterAdmin().state( new ClusterStateRequest(TEST_REQUEST_TIMEOUT).waitForMetadataVersion(metadataVersion.get()) ).actionGet(); @@ -337,6 +370,77 @@ public void testErrorSaved() throws Exception { assertClusterStateNotSaved(savedClusterState.v1(), savedClusterState.v2()); } + public void testErrorCanRecoverOnRestart() throws Exception { + internalCluster().setBootstrapMasterNodeIndex(0); + logger.info("--> start data node / non master node"); + String dataNode = internalCluster().startNode(Settings.builder().put(dataOnlyNode()).put("discovery.initial_state_timeout", "1s")); + FileSettingsService dataFileSettingsService = internalCluster().getInstance(FileSettingsService.class, dataNode); + + assertFalse(dataFileSettingsService.watching()); + + logger.info("--> start master node"); + final String masterNode = internalCluster().startMasterOnlyNode( + Settings.builder().put(INITIAL_STATE_TIMEOUT_SETTING.getKey(), "0s").build() + ); + assertMasterNode(internalCluster().nonMasterClient(), masterNode); + var savedClusterState = setupClusterStateListenerForError(masterNode); + + FileSettingsService masterFileSettingsService = internalCluster().getInstance(FileSettingsService.class, masterNode); + + assertTrue(masterFileSettingsService.watching()); + assertFalse(dataFileSettingsService.watching()); + + writeJSONFile(masterNode, testErrorJSON, versionCounter, logger); + AtomicLong metadataVersion = savedClusterState.v2(); + assertClusterStateNotSaved(savedClusterState.v1(), metadataVersion); + assertHasErrors(metadataVersion, "not_cluster_settings"); + + // write valid json without version increment to simulate ES being able to process settings after a restart (usually, this would be + // due to a code change) + writeJSONFileWithoutVersionIncrement(masterNode, testJSON, versionCounter, logger); + internalCluster().restartNode(masterNode); + ensureGreen(); + + // we don't know the exact metadata version to wait for so rely on an assertBusy instead + assertBusy(() -> assertExpectedRecoveryBytesSettingAndVersion(metadataVersion, "50mb")); + assertBusy(() -> assertNoErrors(metadataVersion)); + } + + public void testNewErrorOnRestartReprocessing() throws Exception { + internalCluster().setBootstrapMasterNodeIndex(0); + logger.info("--> start data node / non master node"); + String dataNode = internalCluster().startNode(Settings.builder().put(dataOnlyNode()).put("discovery.initial_state_timeout", "1s")); + FileSettingsService dataFileSettingsService = internalCluster().getInstance(FileSettingsService.class, dataNode); + + assertFalse(dataFileSettingsService.watching()); + + logger.info("--> start master node"); + final String masterNode = internalCluster().startMasterOnlyNode( + Settings.builder().put(INITIAL_STATE_TIMEOUT_SETTING.getKey(), "0s").build() + ); + assertMasterNode(internalCluster().nonMasterClient(), masterNode); + var savedClusterState = setupClusterStateListenerForError(masterNode); + + FileSettingsService masterFileSettingsService = internalCluster().getInstance(FileSettingsService.class, masterNode); + + assertTrue(masterFileSettingsService.watching()); + assertFalse(dataFileSettingsService.watching()); + + writeJSONFile(masterNode, testErrorJSON, versionCounter, logger); + AtomicLong metadataVersion = savedClusterState.v2(); + assertClusterStateNotSaved(savedClusterState.v1(), metadataVersion); + assertHasErrors(metadataVersion, "not_cluster_settings"); + + // write json with new error without version increment to simulate ES failing to process settings after a restart for a new reason + // (usually, this would be due to a code change) + writeJSONFileWithoutVersionIncrement(masterNode, testOtherErrorJSON, versionCounter, logger); + assertHasErrors(metadataVersion, "not_cluster_settings"); + internalCluster().restartNode(masterNode); + ensureGreen(); + + assertBusy(() -> assertHasErrors(metadataVersion, "bad_cluster_settings")); + } + public void testSettingsAppliedOnMasterReElection() throws Exception { internalCluster().setBootstrapMasterNodeIndex(0); logger.info("--> start master node"); @@ -383,4 +487,21 @@ public void testSettingsAppliedOnMasterReElection() throws Exception { assertClusterStateSaveOK(savedClusterState.v1(), savedClusterState.v2(), "43mb"); } + private void assertHasErrors(AtomicLong waitForMetadataVersion, String expectedError) { + var errorMetadata = getErrorMetadata(waitForMetadataVersion); + assertThat(errorMetadata, is(notNullValue())); + assertThat(errorMetadata.errors(), containsInAnyOrder(containsString(expectedError))); + } + + private void assertNoErrors(AtomicLong waitForMetadataVersion) { + var errorMetadata = getErrorMetadata(waitForMetadataVersion); + assertThat(errorMetadata, is(nullValue())); + } + + private ReservedStateErrorMetadata getErrorMetadata(AtomicLong waitForMetadataVersion) { + final ClusterStateResponse clusterStateResponse = clusterAdmin().state( + new ClusterStateRequest(TEST_REQUEST_TIMEOUT).waitForMetadataVersion(waitForMetadataVersion.get()) + ).actionGet(); + return clusterStateResponse.getState().getMetadata().reservedStateMetadata().get(FileSettingsService.NAMESPACE).errorMetadata(); + } } diff --git a/server/src/main/java/org/elasticsearch/common/file/AbstractFileWatchingService.java b/server/src/main/java/org/elasticsearch/common/file/AbstractFileWatchingService.java index dcb28a17a9b49..a900722397edd 100644 --- a/server/src/main/java/org/elasticsearch/common/file/AbstractFileWatchingService.java +++ b/server/src/main/java/org/elasticsearch/common/file/AbstractFileWatchingService.java @@ -77,6 +77,15 @@ public AbstractFileWatchingService(Path watchedFile) { protected abstract void processInitialFileMissing() throws InterruptedException, ExecutionException, IOException; + /** + * Defaults to generic {@link #processFileChanges()} behavior. + * An implementation can override this to define different file handling when the file is processed during + * initial service start. + */ + protected void processFileOnServiceStart() throws IOException, ExecutionException, InterruptedException { + processFileChanges(); + } + public final void addFileChangedListener(FileChangedListener listener) { eventListeners.add(listener); } @@ -174,7 +183,7 @@ protected final void watcherThread() { if (Files.exists(path)) { logger.debug("found initial operator settings file [{}], applying...", path); - processSettingsAndNotifyListeners(); + processSettingsOnServiceStartAndNotifyListeners(); } else { processInitialFileMissing(); // Notify everyone we don't have any initial file settings @@ -290,6 +299,17 @@ final WatchKey enableDirectoryWatcher(WatchKey previousKey, Path settingsDir) th } while (true); } + void processSettingsOnServiceStartAndNotifyListeners() throws InterruptedException { + try { + processFileOnServiceStart(); + for (var listener : eventListeners) { + listener.watchedFileChanged(); + } + } catch (IOException | ExecutionException e) { + logger.error(() -> "Error processing watched file: " + watchedFile(), e); + } + } + void processSettingsAndNotifyListeners() throws InterruptedException { try { processFileChanges(); diff --git a/server/src/main/java/org/elasticsearch/reservedstate/service/ErrorState.java b/server/src/main/java/org/elasticsearch/reservedstate/service/ErrorState.java index 1a58974985ba8..af0512b78cb7e 100644 --- a/server/src/main/java/org/elasticsearch/reservedstate/service/ErrorState.java +++ b/server/src/main/java/org/elasticsearch/reservedstate/service/ErrorState.java @@ -15,9 +15,22 @@ import static org.elasticsearch.ExceptionsHelper.stackTrace; -record ErrorState(String namespace, Long version, List errors, ReservedStateErrorMetadata.ErrorKind errorKind) { - ErrorState(String namespace, Long version, Exception e, ReservedStateErrorMetadata.ErrorKind errorKind) { - this(namespace, version, List.of(stackTrace(e)), errorKind); +record ErrorState( + String namespace, + Long version, + ReservedStateVersionCheck versionCheck, + List errors, + ReservedStateErrorMetadata.ErrorKind errorKind +) { + + ErrorState( + String namespace, + Long version, + ReservedStateVersionCheck versionCheck, + Exception e, + ReservedStateErrorMetadata.ErrorKind errorKind + ) { + this(namespace, version, versionCheck, List.of(stackTrace(e)), errorKind); } public String toString() { diff --git a/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java b/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java index c29f83c780d39..811b59465ce76 100644 --- a/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java +++ b/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java @@ -27,6 +27,8 @@ import java.nio.file.Files; import java.util.concurrent.ExecutionException; +import static org.elasticsearch.reservedstate.service.ReservedStateVersionCheck.HIGHER_OR_SAME_VERSION; +import static org.elasticsearch.reservedstate.service.ReservedStateVersionCheck.HIGHER_VERSION_ONLY; import static org.elasticsearch.xcontent.XContentType.JSON; /** @@ -115,20 +117,34 @@ protected boolean shouldRefreshFileState(ClusterState clusterState) { */ @Override protected void processFileChanges() throws ExecutionException, InterruptedException, IOException { - PlainActionFuture completion = new PlainActionFuture<>(); logger.info("processing path [{}] for [{}]", watchedFile(), NAMESPACE); + processFileChanges(HIGHER_VERSION_ONLY); + } + + /** + * Read settings and pass them to {@link ReservedClusterStateService} for application. + * Settings will be reprocessed even if the cluster-state version equals that found in the settings file. + */ + @Override + protected void processFileOnServiceStart() throws IOException, ExecutionException, InterruptedException { + logger.info("processing path [{}] for [{}] on service start", watchedFile(), NAMESPACE); + processFileChanges(HIGHER_OR_SAME_VERSION); + } + + private void processFileChanges(ReservedStateVersionCheck versionCheck) throws IOException, InterruptedException, ExecutionException { + PlainActionFuture completion = new PlainActionFuture<>(); try ( var fis = Files.newInputStream(watchedFile()); var bis = new BufferedInputStream(fis); var parser = JSON.xContent().createParser(XContentParserConfiguration.EMPTY, bis) ) { - stateService.process(NAMESPACE, parser, (e) -> completeProcessing(e, completion)); + stateService.process(NAMESPACE, parser, versionCheck, (e) -> completeProcessing(e, completion)); } completion.get(); } @Override - protected void processInitialFileMissing() throws ExecutionException, InterruptedException, IOException { + protected void processInitialFileMissing() throws ExecutionException, InterruptedException { PlainActionFuture completion = new PlainActionFuture<>(); logger.info("setting file [{}] not found, initializing [{}] as empty", watchedFile(), NAMESPACE); stateService.initEmpty(NAMESPACE, completion); diff --git a/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedClusterStateService.java b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedClusterStateService.java index 5571fcfb08544..0c5fa61b29cfe 100644 --- a/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedClusterStateService.java +++ b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedClusterStateService.java @@ -110,7 +110,13 @@ ReservedStateChunk parse(String namespace, XContentParser parser) { try { return stateChunkParser.apply(parser, null); } catch (Exception e) { - ErrorState errorState = new ErrorState(namespace, EMPTY_VERSION, e, ReservedStateErrorMetadata.ErrorKind.PARSING); + ErrorState errorState = new ErrorState( + namespace, + EMPTY_VERSION, + ReservedStateVersionCheck.HIGHER_VERSION_ONLY, + e, + ReservedStateErrorMetadata.ErrorKind.PARSING + ); updateErrorState(errorState); logger.debug("error processing state change request for [{}] with the following errors [{}]", namespace, errorState); @@ -123,16 +129,22 @@ ReservedStateChunk parse(String namespace, XContentParser parser) { * * @param namespace the namespace under which we'll store the reserved keys in the cluster state metadata * @param parser the XContentParser to process + * @param versionCheck determines if current and new versions of reserved state require processing or should be skipped * @param errorListener a consumer called with {@link IllegalStateException} if the content has errors and the * cluster state cannot be correctly applied, null if successful or state couldn't be applied because of incompatible version. */ - public void process(String namespace, XContentParser parser, Consumer errorListener) { + public void process( + String namespace, + XContentParser parser, + ReservedStateVersionCheck versionCheck, + Consumer errorListener + ) { ReservedStateChunk stateChunk; try { stateChunk = parse(namespace, parser); } catch (Exception e) { - ErrorState errorState = new ErrorState(namespace, EMPTY_VERSION, e, ReservedStateErrorMetadata.ErrorKind.PARSING); + ErrorState errorState = new ErrorState(namespace, EMPTY_VERSION, versionCheck, e, ReservedStateErrorMetadata.ErrorKind.PARSING); updateErrorState(errorState); logger.debug("error processing state change request for [{}] with the following errors [{}]", namespace, errorState); @@ -142,7 +154,7 @@ public void process(String namespace, XContentParser parser, Consumer return; } - process(namespace, stateChunk, errorListener); + process(namespace, stateChunk, versionCheck, errorListener); } public void initEmpty(String namespace, ActionListener listener) { @@ -153,6 +165,7 @@ public void initEmpty(String namespace, ActionListener lis new ReservedStateUpdateTask( namespace, emptyState, + ReservedStateVersionCheck.HIGHER_VERSION_ONLY, Map.of(), List.of(), // error state should not be possible since there is no metadata being parsed or processed @@ -172,9 +185,14 @@ public void initEmpty(String namespace, ActionListener lis * @param errorListener a consumer called with {@link IllegalStateException} if the content has errors and the * cluster state cannot be correctly applied, null if successful or the state failed to apply because of incompatible version. */ - public void process(String namespace, ReservedStateChunk reservedStateChunk, Consumer errorListener) { + public void process( + String namespace, + ReservedStateChunk reservedStateChunk, + ReservedStateVersionCheck versionCheck, + Consumer errorListener + ) { Map reservedState = reservedStateChunk.state(); - final ReservedStateVersion reservedStateVersion = reservedStateChunk.metadata(); + ReservedStateVersion reservedStateVersion = reservedStateChunk.metadata(); LinkedHashSet orderedHandlers; try { @@ -183,6 +201,7 @@ public void process(String namespace, ReservedStateChunk reservedStateChunk, Con ErrorState errorState = new ErrorState( namespace, reservedStateVersion.version(), + versionCheck, e, ReservedStateErrorMetadata.ErrorKind.PARSING ); @@ -201,7 +220,7 @@ public void process(String namespace, ReservedStateChunk reservedStateChunk, Con // We check if we should exit early on the state version from clusterService. The ReservedStateUpdateTask // will check again with the most current state version if this continues. - if (checkMetadataVersion(namespace, existingMetadata, reservedStateVersion) == false) { + if (checkMetadataVersion(namespace, existingMetadata, reservedStateVersion, versionCheck) == false) { errorListener.accept(null); return; } @@ -209,7 +228,7 @@ public void process(String namespace, ReservedStateChunk reservedStateChunk, Con // We trial run all handler validations to ensure that we can process all of the cluster state error free. var trialRunErrors = trialRun(namespace, state, reservedStateChunk, orderedHandlers); // this is not using the modified trial state above, but that doesn't matter, we're just setting errors here - var error = checkAndReportError(namespace, trialRunErrors, reservedStateVersion); + var error = checkAndReportError(namespace, trialRunErrors, reservedStateVersion, versionCheck); if (error != null) { errorListener.accept(error); @@ -220,6 +239,7 @@ public void process(String namespace, ReservedStateChunk reservedStateChunk, Con new ReservedStateUpdateTask( namespace, reservedStateChunk, + versionCheck, handlers, orderedHandlers, ReservedClusterStateService.this::updateErrorState, @@ -233,7 +253,7 @@ public void onResponse(ActionResponse.Empty empty) { @Override public void onFailure(Exception e) { // Don't spam the logs on repeated errors - if (isNewError(existingMetadata, reservedStateVersion.version())) { + if (isNewError(existingMetadata, reservedStateVersion.version(), versionCheck)) { logger.debug("Failed to apply reserved cluster state", e); errorListener.accept(e); } else { @@ -247,7 +267,12 @@ public void onFailure(Exception e) { } // package private for testing - Exception checkAndReportError(String namespace, List errors, ReservedStateVersion reservedStateVersion) { + Exception checkAndReportError( + String namespace, + List errors, + ReservedStateVersion reservedStateVersion, + ReservedStateVersionCheck versionCheck + ) { // Any errors should be discovered through validation performed in the transform calls if (errors.isEmpty() == false) { logger.debug("Error processing state change request for [{}] with the following errors [{}]", namespace, errors); @@ -255,6 +280,7 @@ Exception checkAndReportError(String namespace, List errors, ReservedSta var errorState = new ErrorState( namespace, reservedStateVersion.version(), + versionCheck, errors, ReservedStateErrorMetadata.ErrorKind.VALIDATION ); diff --git a/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateErrorTask.java b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateErrorTask.java index 9296981e64d2d..e9fb736608d53 100644 --- a/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateErrorTask.java +++ b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateErrorTask.java @@ -51,10 +51,10 @@ ActionListener listener() { } // package private for testing - static boolean isNewError(ReservedStateMetadata existingMetadata, Long newStateVersion) { + static boolean isNewError(ReservedStateMetadata existingMetadata, Long newStateVersion, ReservedStateVersionCheck versionCheck) { return (existingMetadata == null || existingMetadata.errorMetadata() == null - || existingMetadata.errorMetadata().version() < newStateVersion + || versionCheck.test(existingMetadata.errorMetadata().version(), newStateVersion) || newStateVersion.equals(RESTORED_VERSION) || newStateVersion.equals(EMPTY_VERSION) || newStateVersion.equals(NO_VERSION)); @@ -63,7 +63,7 @@ static boolean isNewError(ReservedStateMetadata existingMetadata, Long newStateV static boolean checkErrorVersion(ClusterState currentState, ErrorState errorState) { ReservedStateMetadata existingMetadata = currentState.metadata().reservedStateMetadata().get(errorState.namespace()); // check for noop here - if (isNewError(existingMetadata, errorState.version()) == false) { + if (isNewError(existingMetadata, errorState.version(), errorState.versionCheck()) == false) { logger.info( () -> format( "Not updating error state because version [%s] is less or equal to the last state error version [%s]", diff --git a/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTask.java b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTask.java index 17d4de65506ff..92e248f160f0f 100644 --- a/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTask.java +++ b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTask.java @@ -47,6 +47,7 @@ public class ReservedStateUpdateTask implements ClusterStateTaskListener { private final String namespace; private final ReservedStateChunk stateChunk; + private final ReservedStateVersionCheck versionCheck; private final Map> handlers; private final Collection orderedHandlers; private final Consumer errorReporter; @@ -55,6 +56,7 @@ public class ReservedStateUpdateTask implements ClusterStateTaskListener { public ReservedStateUpdateTask( String namespace, ReservedStateChunk stateChunk, + ReservedStateVersionCheck versionCheck, Map> handlers, Collection orderedHandlers, Consumer errorReporter, @@ -62,6 +64,7 @@ public ReservedStateUpdateTask( ) { this.namespace = namespace; this.stateChunk = stateChunk; + this.versionCheck = versionCheck; this.handlers = handlers; this.orderedHandlers = orderedHandlers; this.errorReporter = errorReporter; @@ -89,7 +92,7 @@ protected ClusterState execute(final ClusterState currentState) { Map reservedState = stateChunk.state(); ReservedStateVersion reservedStateVersion = stateChunk.metadata(); - if (checkMetadataVersion(namespace, existingMetadata, reservedStateVersion) == false) { + if (checkMetadataVersion(namespace, existingMetadata, reservedStateVersion, versionCheck) == false) { return currentState; } @@ -110,7 +113,7 @@ protected ClusterState execute(final ClusterState currentState) { } } - checkAndThrowOnError(errors, reservedStateVersion); + checkAndThrowOnError(errors, reservedStateVersion, versionCheck); // Remove the last error if we had previously encountered any in prior processing of reserved state reservedMetadataBuilder.errorMetadata(null); @@ -121,14 +124,15 @@ protected ClusterState execute(final ClusterState currentState) { return stateBuilder.metadata(metadataBuilder).build(); } - private void checkAndThrowOnError(List errors, ReservedStateVersion reservedStateVersion) { + private void checkAndThrowOnError(List errors, ReservedStateVersion version, ReservedStateVersionCheck versionCheck) { // Any errors should be discovered through validation performed in the transform calls if (errors.isEmpty() == false) { logger.debug("Error processing state change request for [{}] with the following errors [{}]", namespace, errors); var errorState = new ErrorState( namespace, - reservedStateVersion.version(), + version.version(), + versionCheck, errors, ReservedStateErrorMetadata.ErrorKind.VALIDATION ); @@ -155,7 +159,8 @@ static Set keysForHandler(ReservedStateMetadata reservedStateMetadata, S static boolean checkMetadataVersion( String namespace, ReservedStateMetadata existingMetadata, - ReservedStateVersion reservedStateVersion + ReservedStateVersion reservedStateVersion, + ReservedStateVersionCheck versionCheck ) { if (Version.CURRENT.before(reservedStateVersion.minCompatibleVersion())) { logger.warn( @@ -168,35 +173,45 @@ static boolean checkMetadataVersion( return false; } - if (reservedStateVersion.version().equals(ReservedStateMetadata.EMPTY_VERSION)) { + Long newVersion = reservedStateVersion.version(); + if (newVersion.equals(ReservedStateMetadata.EMPTY_VERSION)) { return true; } // require a regular positive version, reject any special version - if (reservedStateVersion.version() <= 0L) { + if (newVersion <= 0L) { logger.warn( () -> format( "Not updating reserved cluster state for namespace [%s], because version [%s] is less or equal to 0", namespace, - reservedStateVersion.version() + newVersion ) ); return false; } - if (existingMetadata != null && existingMetadata.version() >= reservedStateVersion.version()) { - logger.warn( - () -> format( - "Not updating reserved cluster state for namespace [%s], because version [%s] is less or equal" - + " to the current metadata version [%s]", - namespace, - reservedStateVersion.version(), - existingMetadata.version() - ) - ); - return false; + if (existingMetadata == null) { + return true; + } + + Long currentVersion = existingMetadata.version(); + if (versionCheck.test(currentVersion, newVersion)) { + return true; } - return true; + logger.warn( + () -> format( + "Not updating reserved cluster state for namespace [%s], because version [%s] is %s the current metadata version [%s]", + namespace, + newVersion, + switch (versionCheck) { + case ReservedStateVersionCheck.HIGHER_OR_SAME_VERSION -> "less than"; + case ReservedStateVersionCheck.HIGHER_VERSION_ONLY -> "less than or equal to"; + }, + currentVersion + ) + ); + return false; } + } diff --git a/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateVersionCheck.java b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateVersionCheck.java new file mode 100644 index 0000000000000..6907331edf1d6 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateVersionCheck.java @@ -0,0 +1,40 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.reservedstate.service; + +import java.util.function.BiPredicate; + +/** + * Enum representing the logic for determining whether a reserved state should be processed + * based on the current and new versions. + */ +public enum ReservedStateVersionCheck implements BiPredicate { + /** + * Returns {@code true} if the new version is higher than the current version. + * This is the default behavior when processing changes to file settings. + */ + HIGHER_VERSION_ONLY { + @Override + public boolean test(Long currentVersion, Long newVersion) { + return currentVersion < newVersion; + } + }, + /** + * Returns {@code true} if the new version is higher or equal to the current version. + * This allows re-processing of the same version. + * Used when processing file settings during service startup. + */ + HIGHER_OR_SAME_VERSION { + @Override + public boolean test(Long currentVersion, Long newVersion) { + return currentVersion <= newVersion; + } + }; +} diff --git a/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java index aa6a9667ce39e..8ee2754427dda 100644 --- a/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java @@ -54,6 +54,7 @@ import static org.hamcrest.Matchers.anEmptyMap; import static org.hamcrest.Matchers.hasEntry; import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; @@ -148,9 +149,9 @@ public void testOperatorDirName() { @SuppressWarnings("unchecked") public void testInitialFileError() throws Exception { doAnswer((Answer) invocation -> { - ((Consumer) invocation.getArgument(2)).accept(new IllegalStateException("Some exception")); + ((Consumer) invocation.getArgument(3)).accept(new IllegalStateException("Some exception")); return null; - }).when(controller).process(any(), any(XContentParser.class), any()); + }).when(controller).process(any(), any(XContentParser.class), eq(randomFrom(ReservedStateVersionCheck.values())), any()); AtomicBoolean settingsChanged = new AtomicBoolean(false); CountDownLatch latch = new CountDownLatch(1); @@ -163,7 +164,7 @@ public void testInitialFileError() throws Exception { } finally { latch.countDown(); } - }).when(fileSettingsService).processFileChanges(); + }).when(fileSettingsService).processFileOnServiceStart(); Files.createDirectories(fileSettingsService.watchedFileDir()); // contents of the JSON don't matter, we just need a file to exist @@ -175,7 +176,8 @@ public void testInitialFileError() throws Exception { // wait until the watcher thread has started, and it has discovered the file assertTrue(latch.await(20, TimeUnit.SECONDS)); - verify(fileSettingsService, times(1)).processFileChanges(); + verify(fileSettingsService, times(1)).processFileOnServiceStart(); + verify(controller, times(1)).process(any(), any(XContentParser.class), eq(ReservedStateVersionCheck.HIGHER_OR_SAME_VERSION), any()); // assert we never notified any listeners of successful application of file based settings assertFalse(settingsChanged.get()); } @@ -184,9 +186,9 @@ public void testInitialFileError() throws Exception { public void testInitialFileWorks() throws Exception { // Let's check that if we didn't throw an error that everything works doAnswer((Answer) invocation -> { - ((Consumer) invocation.getArgument(2)).accept(null); + ((Consumer) invocation.getArgument(3)).accept(null); return null; - }).when(controller).process(any(), any(XContentParser.class), any()); + }).when(controller).process(any(), any(XContentParser.class), any(), any()); CountDownLatch latch = new CountDownLatch(1); @@ -196,13 +198,67 @@ public void testInitialFileWorks() throws Exception { // contents of the JSON don't matter, we just need a file to exist writeTestFile(fileSettingsService.watchedFile(), "{}"); + doAnswer((Answer) invocation -> { + try { + return invocation.callRealMethod(); + } finally { + latch.countDown(); + } + }).when(fileSettingsService).processFileOnServiceStart(); + fileSettingsService.start(); fileSettingsService.clusterChanged(new ClusterChangedEvent("test", clusterService.state(), ClusterState.EMPTY_STATE)); // wait for listener to be called assertTrue(latch.await(20, TimeUnit.SECONDS)); + verify(fileSettingsService, times(1)).processFileOnServiceStart(); + verify(controller, times(1)).process(any(), any(XContentParser.class), eq(ReservedStateVersionCheck.HIGHER_OR_SAME_VERSION), any()); + } + + @SuppressWarnings("unchecked") + public void testProcessFileChanges() throws Exception { + doAnswer((Answer) invocation -> { + ((Consumer) invocation.getArgument(3)).accept(null); + return null; + }).when(controller).process(any(), any(XContentParser.class), any(), any()); + + // we get three events: initial clusterChanged event, first write, second write + CountDownLatch latch = new CountDownLatch(3); + + fileSettingsService.addFileChangedListener(latch::countDown); + + Files.createDirectories(fileSettingsService.watchedFileDir()); + // contents of the JSON don't matter, we just need a file to exist + writeTestFile(fileSettingsService.watchedFile(), "{}"); + + doAnswer((Answer) invocation -> { + try { + return invocation.callRealMethod(); + } finally { + latch.countDown(); + } + }).when(fileSettingsService).processFileOnServiceStart(); + doAnswer((Answer) invocation -> { + try { + return invocation.callRealMethod(); + } finally { + latch.countDown(); + } + }).when(fileSettingsService).processFileChanges(); + + fileSettingsService.start(); + fileSettingsService.clusterChanged(new ClusterChangedEvent("test", clusterService.state(), ClusterState.EMPTY_STATE)); + // second file change; contents still don't matter + writeTestFile(fileSettingsService.watchedFile(), "{}"); + + // wait for listener to be called (once for initial processing, once for subsequent update) + assertTrue(latch.await(20, TimeUnit.SECONDS)); + + verify(fileSettingsService, times(1)).processFileOnServiceStart(); + verify(controller, times(1)).process(any(), any(XContentParser.class), eq(ReservedStateVersionCheck.HIGHER_OR_SAME_VERSION), any()); verify(fileSettingsService, times(1)).processFileChanges(); + verify(controller, times(1)).process(any(), any(XContentParser.class), eq(ReservedStateVersionCheck.HIGHER_VERSION_ONLY), any()); } @SuppressWarnings("unchecked") diff --git a/server/src/test/java/org/elasticsearch/reservedstate/service/ReservedClusterStateServiceTests.java b/server/src/test/java/org/elasticsearch/reservedstate/service/ReservedClusterStateServiceTests.java index 217b82d7729ae..d96387618e6bd 100644 --- a/server/src/test/java/org/elasticsearch/reservedstate/service/ReservedClusterStateServiceTests.java +++ b/server/src/test/java/org/elasticsearch/reservedstate/service/ReservedClusterStateServiceTests.java @@ -167,7 +167,12 @@ public void testOperatorController() throws IOException { AtomicReference x = new AtomicReference<>(); try (XContentParser parser = XContentType.JSON.xContent().createParser(XContentParserConfiguration.EMPTY, testJSON)) { - controller.process("operator", parser, x::set); + controller.process( + "operator", + parser, + randomFrom(ReservedStateVersionCheck.HIGHER_VERSION_ONLY, ReservedStateVersionCheck.HIGHER_OR_SAME_VERSION), + x::set + ); assertThat(x.get(), instanceOf(IllegalStateException.class)); assertThat(x.get().getMessage(), containsString("Error processing state change request for operator")); @@ -197,7 +202,12 @@ public void testOperatorController() throws IOException { """; try (XContentParser parser = XContentType.JSON.xContent().createParser(XContentParserConfiguration.EMPTY, testJSON)) { - controller.process("operator", parser, Assert::assertNull); + controller.process( + "operator", + parser, + randomFrom(ReservedStateVersionCheck.HIGHER_VERSION_ONLY, ReservedStateVersionCheck.HIGHER_OR_SAME_VERSION), + Assert::assertNull + ); } } @@ -236,7 +246,15 @@ public void testUpdateStateTasks() throws Exception { AtomicBoolean successCalled = new AtomicBoolean(false); ReservedStateUpdateTask task = spy( - new ReservedStateUpdateTask("test", null, Map.of(), Set.of(), errorState -> {}, ActionListener.noop()) + new ReservedStateUpdateTask( + "test", + null, + ReservedStateVersionCheck.HIGHER_VERSION_ONLY, + Map.of(), + Set.of(), + errorState -> {}, + ActionListener.noop() + ) ); doReturn(state).when(task).execute(any()); @@ -275,7 +293,13 @@ public void testUpdateErrorState() { ReservedClusterStateService service = new ReservedClusterStateService(clusterService, mock(RerouteService.class), List.of()); - ErrorState error = new ErrorState("namespace", 2L, List.of("error"), ReservedStateErrorMetadata.ErrorKind.TRANSIENT); + ErrorState error = new ErrorState( + "namespace", + 2L, + ReservedStateVersionCheck.HIGHER_VERSION_ONLY, + List.of("error"), + ReservedStateErrorMetadata.ErrorKind.TRANSIENT + ); service.updateErrorState(error); assertThat(updateTask.getValue(), notNullValue()); @@ -296,7 +320,13 @@ public void testUpdateErrorState() { // it should not update if the error version is less than the current version when(clusterService.state()).thenReturn(updatedState); - ErrorState oldError = new ErrorState("namespace", 1L, List.of("old error"), ReservedStateErrorMetadata.ErrorKind.TRANSIENT); + ErrorState oldError = new ErrorState( + "namespace", + 1L, + ReservedStateVersionCheck.HIGHER_VERSION_ONLY, + List.of("old error"), + ReservedStateErrorMetadata.ErrorKind.TRANSIENT + ); service.updateErrorState(oldError); verifyNoMoreInteractions(errorQueue); } @@ -308,7 +338,13 @@ public void testErrorStateTask() throws Exception { ReservedStateErrorTask task = spy( new ReservedStateErrorTask( - new ErrorState("test", 1L, List.of("some parse error", "some io error"), ReservedStateErrorMetadata.ErrorKind.PARSING), + new ErrorState( + "test", + 1L, + ReservedStateVersionCheck.HIGHER_VERSION_ONLY, + List.of("some parse error", "some io error"), + ReservedStateErrorMetadata.ErrorKind.PARSING + ), ActionListener.running(() -> listenerCompleted.set(true)) ) ); @@ -353,10 +389,12 @@ public TransformState transform(Object source, TransformState prevState) throws Metadata metadata = Metadata.builder().put(operatorMetadata).build(); ClusterState state = ClusterState.builder(new ClusterName("test")).metadata(metadata).build(); - assertFalse(ReservedStateErrorTask.isNewError(operatorMetadata, 2L)); - assertFalse(ReservedStateErrorTask.isNewError(operatorMetadata, 1L)); - assertTrue(ReservedStateErrorTask.isNewError(operatorMetadata, 3L)); - assertTrue(ReservedStateErrorTask.isNewError(null, 1L)); + assertFalse(ReservedStateErrorTask.isNewError(operatorMetadata, 2L, ReservedStateVersionCheck.HIGHER_VERSION_ONLY)); + assertFalse(ReservedStateErrorTask.isNewError(operatorMetadata, 1L, ReservedStateVersionCheck.HIGHER_VERSION_ONLY)); + assertTrue(ReservedStateErrorTask.isNewError(operatorMetadata, 2L, ReservedStateVersionCheck.HIGHER_OR_SAME_VERSION)); + assertTrue(ReservedStateErrorTask.isNewError(operatorMetadata, 3L, ReservedStateVersionCheck.HIGHER_VERSION_ONLY)); + assertTrue(ReservedStateErrorTask.isNewError(null, 1L, ReservedStateVersionCheck.HIGHER_VERSION_ONLY)); + assertTrue(ReservedStateErrorTask.isNewError(null, 1L, ReservedStateVersionCheck.HIGHER_OR_SAME_VERSION)); var chunk = new ReservedStateChunk(Map.of("one", "two", "maker", "three"), new ReservedStateVersion(2L, Version.CURRENT)); var orderedHandlers = List.of(exceptionThrower.name(), newStateMaker.name()); @@ -367,9 +405,10 @@ public TransformState transform(Object source, TransformState prevState) throws ReservedStateUpdateTask task = new ReservedStateUpdateTask( "namespace_one", chunk, + ReservedStateVersionCheck.HIGHER_VERSION_ONLY, Map.of(exceptionThrower.name(), exceptionThrower, newStateMaker.name(), newStateMaker), orderedHandlers, - errorState -> assertFalse(ReservedStateErrorTask.isNewError(operatorMetadata, errorState.version())), + errorState -> assertFalse(ReservedStateErrorTask.isNewError(operatorMetadata, errorState.version(), errorState.versionCheck())), ActionListener.noop() ); @@ -414,9 +453,21 @@ public void testCheckMetadataVersion() { ReservedStateMetadata operatorMetadata = ReservedStateMetadata.builder("test").version(123L).build(); ClusterState state = ClusterState.builder(new ClusterName("test")).metadata(Metadata.builder().put(operatorMetadata)).build(); + ReservedStateUpdateTask task = new ReservedStateUpdateTask( "test", new ReservedStateChunk(Map.of(), new ReservedStateVersion(124L, Version.CURRENT)), + ReservedStateVersionCheck.HIGHER_VERSION_ONLY, + Map.of(), + List.of(), + e -> {}, + ActionListener.noop() + ); + assertThat("Cluster state should be modified", task.execute(state), not(sameInstance(state))); + task = new ReservedStateUpdateTask( + "test", + new ReservedStateChunk(Map.of(), new ReservedStateVersion(124L, Version.CURRENT)), + ReservedStateVersionCheck.HIGHER_VERSION_ONLY, Map.of(), List.of(), e -> {}, @@ -427,16 +478,59 @@ public void testCheckMetadataVersion() { task = new ReservedStateUpdateTask( "test", new ReservedStateChunk(Map.of(), new ReservedStateVersion(123L, Version.CURRENT)), + ReservedStateVersionCheck.HIGHER_VERSION_ONLY, Map.of(), List.of(), e -> {}, ActionListener.noop() ); assertThat("Cluster state should not be modified", task.execute(state), sameInstance(state)); + task = new ReservedStateUpdateTask( + "test", + new ReservedStateChunk(Map.of(), new ReservedStateVersion(123L, Version.CURRENT)), + ReservedStateVersionCheck.HIGHER_OR_SAME_VERSION, + Map.of(), + List.of(), + e -> {}, + ActionListener.noop() + ); + assertThat("Cluster state should be modified", task.execute(state), not(sameInstance(state))); + task = new ReservedStateUpdateTask( + "test", + new ReservedStateChunk(Map.of(), new ReservedStateVersion(122L, Version.CURRENT)), + ReservedStateVersionCheck.HIGHER_VERSION_ONLY, + Map.of(), + List.of(), + e -> {}, + ActionListener.noop() + ); + assertThat("Cluster state should not be modified", task.execute(state), sameInstance(state)); + task = new ReservedStateUpdateTask( + "test", + new ReservedStateChunk(Map.of(), new ReservedStateVersion(122L, Version.CURRENT)), + ReservedStateVersionCheck.HIGHER_OR_SAME_VERSION, + Map.of(), + List.of(), + e -> {}, + ActionListener.noop() + ); + assertThat("Cluster state should not be modified", task.execute(state), sameInstance(state)); + + task = new ReservedStateUpdateTask( + "test", + new ReservedStateChunk(Map.of(), new ReservedStateVersion(124L, Version.fromId(Version.CURRENT.id + 1))), + ReservedStateVersionCheck.HIGHER_VERSION_ONLY, + Map.of(), + List.of(), + e -> {}, + ActionListener.noop() + ); + assertThat("Cluster state should not be modified", task.execute(state), sameInstance(state)); task = new ReservedStateUpdateTask( "test", new ReservedStateChunk(Map.of(), new ReservedStateVersion(124L, Version.fromId(Version.CURRENT.id + 1))), + ReservedStateVersionCheck.HIGHER_OR_SAME_VERSION, Map.of(), List.of(), e -> {}, @@ -530,11 +624,11 @@ public void testCheckAndReportError() { final var controller = spy(new ReservedClusterStateService(clusterService, mock(RerouteService.class), List.of())); - assertNull(controller.checkAndReportError("test", List.of(), null)); + assertNull(controller.checkAndReportError("test", List.of(), null, ReservedStateVersionCheck.HIGHER_VERSION_ONLY)); verify(controller, times(0)).updateErrorState(any()); var version = new ReservedStateVersion(2L, Version.CURRENT); - var error = controller.checkAndReportError("test", List.of("test error"), version); + var error = controller.checkAndReportError("test", List.of("test error"), version, ReservedStateVersionCheck.HIGHER_VERSION_ONLY); assertThat(error, instanceOf(IllegalStateException.class)); assertThat(error.getMessage(), is("Error processing state change request for test, errors: test error")); verify(controller, times(1)).updateErrorState(any()); diff --git a/server/src/test/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTaskTests.java b/server/src/test/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTaskTests.java index 9a2ab779669bc..1f453abf32303 100644 --- a/server/src/test/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTaskTests.java +++ b/server/src/test/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTaskTests.java @@ -23,7 +23,15 @@ public class ReservedStateUpdateTaskTests extends ESTestCase { public void testBlockedClusterState() { - var task = new ReservedStateUpdateTask("dummy", null, Map.of(), List.of(), e -> {}, ActionListener.noop()); + var task = new ReservedStateUpdateTask( + "dummy", + null, + ReservedStateVersionCheck.HIGHER_VERSION_ONLY, + Map.of(), + List.of(), + e -> {}, + ActionListener.noop() + ); ClusterState notRecoveredClusterState = ClusterState.builder(ClusterName.DEFAULT) .blocks(ClusterBlocks.builder().addGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK)) .build(); diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/action/ReservedLifecycleStateServiceTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/action/ReservedLifecycleStateServiceTests.java index 3f3285c5c2bd7..aab89c6620b52 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/action/ReservedLifecycleStateServiceTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/action/ReservedLifecycleStateServiceTests.java @@ -30,6 +30,7 @@ import org.elasticsearch.reservedstate.service.ReservedStateUpdateTask; import org.elasticsearch.reservedstate.service.ReservedStateUpdateTaskExecutor; import org.elasticsearch.reservedstate.service.ReservedStateVersion; +import org.elasticsearch.reservedstate.service.ReservedStateVersionCheck; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.ParseField; @@ -362,7 +363,7 @@ public void testOperatorControllerFromJSONContent() throws IOException { AtomicReference x = new AtomicReference<>(); try (XContentParser parser = XContentType.JSON.xContent().createParser(XContentParserConfiguration.EMPTY, testJSON)) { - controller.process("operator", parser, x::set); + controller.process("operator", parser, randomFrom(ReservedStateVersionCheck.values()), x::set); assertThat(x.get(), instanceOf(IllegalStateException.class)); assertThat(x.get().getMessage(), containsString("Error processing state change request for operator")); @@ -383,7 +384,7 @@ public void testOperatorControllerFromJSONContent() throws IOException { ); try (XContentParser parser = XContentType.JSON.xContent().createParser(XContentParserConfiguration.EMPTY, testJSON)) { - controller.process("operator", parser, Assert::assertNull); + controller.process("operator", parser, randomFrom(ReservedStateVersionCheck.values()), Assert::assertNull); } } @@ -420,7 +421,7 @@ public void testOperatorControllerWithPluginPackage() { new ReservedStateVersion(123L, Version.CURRENT) ); - controller.process("operator", pack, x::set); + controller.process("operator", pack, randomFrom(ReservedStateVersionCheck.values()), x::set); assertThat(x.get(), instanceOf(IllegalStateException.class)); assertThat(x.get().getMessage(), containsString("Error processing state change request for operator")); @@ -439,6 +440,6 @@ public void testOperatorControllerWithPluginPackage() { ) ); - controller.process("operator", pack, Assert::assertNull); + controller.process("operator", pack, randomFrom(ReservedStateVersionCheck.values()), Assert::assertNull); } } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/RoleMappingFileSettingsIT.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/RoleMappingFileSettingsIT.java index 778d88d832887..3b6ffd0698623 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/RoleMappingFileSettingsIT.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/RoleMappingFileSettingsIT.java @@ -154,7 +154,17 @@ public void cleanUp() { } public static void writeJSONFile(String node, String json, Logger logger, AtomicLong versionCounter) throws Exception { - long version = versionCounter.incrementAndGet(); + writeJSONFile(node, json, logger, versionCounter, true); + } + + public static void writeJSONFileWithoutVersionIncrement(String node, String json, Logger logger, AtomicLong versionCounter) + throws Exception { + writeJSONFile(node, json, logger, versionCounter, false); + } + + private static void writeJSONFile(String node, String json, Logger logger, AtomicLong versionCounter, boolean incrementVersion) + throws Exception { + long version = incrementVersion ? versionCounter.incrementAndGet() : versionCounter.get(); FileSettingsService fileSettingsService = internalCluster().getInstance(FileSettingsService.class, node); assertTrue(fileSettingsService.watching()); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/FileSettingsRoleMappingsRestartIT.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/FileSettingsRoleMappingsRestartIT.java index c0f82adc88784..6c6582138ce89 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/FileSettingsRoleMappingsRestartIT.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/FileSettingsRoleMappingsRestartIT.java @@ -16,25 +16,33 @@ import org.elasticsearch.xpack.core.security.authc.support.mapper.expressiondsl.FieldExpression; import org.elasticsearch.xpack.core.security.authz.RoleMappingMetadata; import org.elasticsearch.xpack.security.action.rolemapping.ReservedRoleMappingAction; +import org.junit.Before; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; import java.util.Map; +import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; import static org.elasticsearch.integration.RoleMappingFileSettingsIT.setupClusterStateListener; import static org.elasticsearch.integration.RoleMappingFileSettingsIT.setupClusterStateListenerForCleanup; import static org.elasticsearch.integration.RoleMappingFileSettingsIT.writeJSONFile; +import static org.elasticsearch.integration.RoleMappingFileSettingsIT.writeJSONFileWithoutVersionIncrement; import static org.hamcrest.Matchers.containsInAnyOrder; -import static org.hamcrest.Matchers.emptyIterable; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0, autoManageMasterNodes = false) public class FileSettingsRoleMappingsRestartIT extends SecurityIntegTestCase { - private static AtomicLong versionCounter = new AtomicLong(1); + private final AtomicLong versionCounter = new AtomicLong(1); - private static String testJSONOnlyRoleMappings = """ + @Before + public void resetVersion() { + versionCounter.set(1); + } + + private static final String testJSONOnlyRoleMappings = """ { "metadata": { "version": "%s", @@ -64,7 +72,28 @@ public class FileSettingsRoleMappingsRestartIT extends SecurityIntegTestCase { } }"""; - private static String emptyJSON = """ + private static final String testJSONOnlyUpdatedRoleMappings = """ + { + "metadata": { + "version": "%s", + "compatibility": "8.4.0" + }, + "state": { + "role_mappings": { + "everyone_kibana_together": { + "enabled": true, + "roles": [ "kibana_user", "kibana_admin" ], + "rules": { "field": { "username": "*" } }, + "metadata": { + "uuid" : "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7", + "_foo": "something" + } + } + } + } + }"""; + + private static final String emptyJSON = """ { "metadata": { "version": "%s", @@ -88,12 +117,34 @@ public void testReservedStatePersistsOnRestart() throws Exception { boolean awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); assertTrue(awaitSuccessful); - var clusterState = clusterAdmin().state(new ClusterStateRequest(TEST_REQUEST_TIMEOUT)).actionGet().getState(); - assertRoleMappingReservedMetadata(clusterState, "everyone_kibana_alone", "everyone_fleet_alone"); - List roleMappings = new ArrayList<>(RoleMappingMetadata.getFromClusterState(clusterState).getRoleMappings()); - assertThat( - roleMappings, - containsInAnyOrder( + assertRoleMappingsInClusterState( + new ExpressionRoleMapping( + "everyone_kibana_alone", + new FieldExpression("username", List.of(new FieldExpression.FieldValue("*"))), + List.of("kibana_user"), + List.of(), + Map.of("uuid", "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7", "_foo", "something"), + true + ), + new ExpressionRoleMapping( + "everyone_fleet_alone", + new FieldExpression("username", List.of(new FieldExpression.FieldValue("*"))), + List.of("fleet_user"), + List.of(), + Map.of("uuid", "b9a59ba9-6b92-4be3-bb8d-02bb270cb3a7", "_foo", "something_else"), + false + ) + ); + + logger.info("--> restart master"); + internalCluster().restartNode(masterNode); + ensureGreen(); + awaitFileSettingsWatcher(); + + // assert busy to give mappings time to update after restart; otherwise, the role mapping names might be dummy values + // `name_not_available_after_deserialization` + assertBusy( + () -> assertRoleMappingsInClusterState( new ExpressionRoleMapping( "everyone_kibana_alone", new FieldExpression("username", List.of(new FieldExpression.FieldValue("*"))), @@ -113,59 +164,118 @@ public void testReservedStatePersistsOnRestart() throws Exception { ) ); + // now remove the role mappings via the same settings file + cleanupClusterState(masterNode); + + // no role mappings + assertRoleMappingsInClusterState(); + + // and restart the master to confirm the role mappings are all gone + logger.info("--> restart master again"); + internalCluster().restartNode(masterNode); + ensureGreen(); + + // no role mappings + assertRoleMappingsInClusterState(); + } + + public void testFileSettingsReprocessedOnRestartWithoutVersionChange() throws Exception { + internalCluster().setBootstrapMasterNodeIndex(0); + + final String masterNode = internalCluster().getMasterName(); + + var savedClusterState = setupClusterStateListener(masterNode, "everyone_kibana_alone"); + awaitFileSettingsWatcher(); + logger.info("--> write some role mappings, no other file settings"); + writeJSONFile(masterNode, testJSONOnlyRoleMappings, logger, versionCounter); + boolean awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); + assertTrue(awaitSuccessful); + + assertRoleMappingsInClusterState( + new ExpressionRoleMapping( + "everyone_kibana_alone", + new FieldExpression("username", List.of(new FieldExpression.FieldValue("*"))), + List.of("kibana_user"), + List.of(), + Map.of("uuid", "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7", "_foo", "something"), + true + ), + new ExpressionRoleMapping( + "everyone_fleet_alone", + new FieldExpression("username", List.of(new FieldExpression.FieldValue("*"))), + List.of("fleet_user"), + List.of(), + Map.of("uuid", "b9a59ba9-6b92-4be3-bb8d-02bb270cb3a7", "_foo", "something_else"), + false + ) + ); + + final CountDownLatch latch = new CountDownLatch(1); + final FileSettingsService fileSettingsService = internalCluster().getInstance(FileSettingsService.class, masterNode); + fileSettingsService.addFileChangedListener(latch::countDown); + // Don't increment version but write new file contents to test re-processing on restart + writeJSONFileWithoutVersionIncrement(masterNode, testJSONOnlyUpdatedRoleMappings, logger, versionCounter); + // Make sure we saw a file settings update so that we know it got processed, but it did not affect cluster state + assertTrue(latch.await(20, TimeUnit.SECONDS)); + + // Nothing changed yet because version is the same and there was no restart + assertRoleMappingsInClusterState( + new ExpressionRoleMapping( + "everyone_kibana_alone", + new FieldExpression("username", List.of(new FieldExpression.FieldValue("*"))), + List.of("kibana_user"), + List.of(), + Map.of("uuid", "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7", "_foo", "something"), + true + ), + new ExpressionRoleMapping( + "everyone_fleet_alone", + new FieldExpression("username", List.of(new FieldExpression.FieldValue("*"))), + List.of("fleet_user"), + List.of(), + Map.of("uuid", "b9a59ba9-6b92-4be3-bb8d-02bb270cb3a7", "_foo", "something_else"), + false + ) + ); + logger.info("--> restart master"); internalCluster().restartNode(masterNode); ensureGreen(); + awaitFileSettingsWatcher(); - // assert role mappings are recovered from "disk" - clusterState = clusterAdmin().state(new ClusterStateRequest(TEST_REQUEST_TIMEOUT)).actionGet().getState(); - assertRoleMappingReservedMetadata(clusterState, "everyone_kibana_alone", "everyone_fleet_alone"); - roleMappings = new ArrayList<>(RoleMappingMetadata.getFromClusterState(clusterState).getRoleMappings()); - assertThat( - roleMappings, - containsInAnyOrder( + // Assert busy to give mappings time to update + assertBusy( + () -> assertRoleMappingsInClusterState( new ExpressionRoleMapping( - "name_not_available_after_deserialization", + "everyone_kibana_together", new FieldExpression("username", List.of(new FieldExpression.FieldValue("*"))), - List.of("kibana_user"), + List.of("kibana_user", "kibana_admin"), List.of(), Map.of("uuid", "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7", "_foo", "something"), true - ), - new ExpressionRoleMapping( - "name_not_available_after_deserialization", - new FieldExpression("username", List.of(new FieldExpression.FieldValue("*"))), - List.of("fleet_user"), - List.of(), - Map.of("uuid", "b9a59ba9-6b92-4be3-bb8d-02bb270cb3a7", "_foo", "something_else"), - false ) ) ); + cleanupClusterState(masterNode); + } + + private void assertRoleMappingsInClusterState(ExpressionRoleMapping... expectedRoleMappings) { + var clusterState = clusterAdmin().state(new ClusterStateRequest(TEST_REQUEST_TIMEOUT)).actionGet().getState(); + String[] expectedRoleMappingNames = Arrays.stream(expectedRoleMappings).map(ExpressionRoleMapping::getName).toArray(String[]::new); + assertRoleMappingReservedMetadata(clusterState, expectedRoleMappingNames); + var actualRoleMappings = new ArrayList<>(RoleMappingMetadata.getFromClusterState(clusterState).getRoleMappings()); + assertThat(actualRoleMappings, containsInAnyOrder(expectedRoleMappings)); + } + + private void cleanupClusterState(String masterNode) throws Exception { // now remove the role mappings via the same settings file - savedClusterState = setupClusterStateListenerForCleanup(masterNode); + var savedClusterState = setupClusterStateListenerForCleanup(masterNode); awaitFileSettingsWatcher(); logger.info("--> remove the role mappings with an empty settings file"); writeJSONFile(masterNode, emptyJSON, logger, versionCounter); - awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); + boolean awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); assertTrue(awaitSuccessful); - - clusterState = clusterAdmin().state(new ClusterStateRequest(TEST_REQUEST_TIMEOUT)).actionGet().getState(); - assertRoleMappingReservedMetadata(clusterState); - roleMappings = new ArrayList<>(RoleMappingMetadata.getFromClusterState(clusterState).getRoleMappings()); - assertThat(roleMappings, emptyIterable()); - - // and restart the master to confirm the role mappings are all gone - logger.info("--> restart master again"); - internalCluster().restartNode(masterNode); - ensureGreen(); - - // assert empty role mappings are recovered from "disk" - clusterState = clusterAdmin().state(new ClusterStateRequest(TEST_REQUEST_TIMEOUT)).actionGet().getState(); - assertRoleMappingReservedMetadata(clusterState); - roleMappings = new ArrayList<>(RoleMappingMetadata.getFromClusterState(clusterState).getRoleMappings()); - assertThat(roleMappings, emptyIterable()); } private void assertRoleMappingReservedMetadata(ClusterState clusterState, String... names) { diff --git a/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/action/ReservedSnapshotLifecycleStateServiceTests.java b/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/action/ReservedSnapshotLifecycleStateServiceTests.java index 0fcc4b8007c6d..b993633e3d17d 100644 --- a/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/action/ReservedSnapshotLifecycleStateServiceTests.java +++ b/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/action/ReservedSnapshotLifecycleStateServiceTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.reservedstate.service.ReservedClusterStateService; import org.elasticsearch.reservedstate.service.ReservedStateUpdateTask; import org.elasticsearch.reservedstate.service.ReservedStateUpdateTaskExecutor; +import org.elasticsearch.reservedstate.service.ReservedStateVersionCheck; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.MockUtils; import org.elasticsearch.threadpool.ThreadPool; @@ -399,7 +400,7 @@ public void testOperatorControllerFromJSONContent() throws IOException { AtomicReference x = new AtomicReference<>(); try (XContentParser parser = XContentType.JSON.xContent().createParser(XContentParserConfiguration.EMPTY, testJSON)) { - controller.process("operator", parser, x::set); + controller.process("operator", parser, randomFrom(ReservedStateVersionCheck.values()), x::set); assertThat(x.get(), instanceOf(IllegalStateException.class)); assertThat(x.get().getMessage(), containsString("Error processing state change request for operator")); @@ -419,7 +420,7 @@ public void testOperatorControllerFromJSONContent() throws IOException { ); try (XContentParser parser = XContentType.JSON.xContent().createParser(XContentParserConfiguration.EMPTY, testJSON)) { - controller.process("operator", parser, Assert::assertNull); + controller.process("operator", parser, randomFrom(ReservedStateVersionCheck.values()), Assert::assertNull); } } From d5a19578772c7f5d9eb12f774d9040fbdfb48e30 Mon Sep 17 00:00:00 2001 From: Luigi Dell'Aquila Date: Mon, 21 Oct 2024 11:30:08 +0200 Subject: [PATCH 238/449] ES|QL: remove dead code for LIKE operator (#115037) --- .../core/expression/predicate/regex/Like.java | 46 --------- .../predicate/regex/LikePattern.java | 95 ------------------ .../core/planner/ExpressionTranslators.java | 4 - .../predicate/regex/StringPatternTests.java | 98 +++++++++---------- .../rules/logical/ConstantFoldingTests.java | 3 - .../rules/logical/ReplaceRegexMatchTests.java | 36 +------ .../esql/tree/EsqlNodeSubclassTests.java | 8 -- 7 files changed, 54 insertions(+), 236 deletions(-) delete mode 100644 x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/Like.java delete mode 100644 x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/LikePattern.java diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/Like.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/Like.java deleted file mode 100644 index 6d8ce8cbdf47f..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/Like.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.expression.predicate.regex; - -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.tree.NodeInfo; -import org.elasticsearch.xpack.esql.core.tree.Source; - -import java.io.IOException; - -public class Like extends RegexMatch { - - public Like(Source source, Expression left, LikePattern pattern) { - this(source, left, pattern, false); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - throw new UnsupportedOperationException(); - } - - @Override - public String getWriteableName() { - throw new UnsupportedOperationException(); - } - - public Like(Source source, Expression left, LikePattern pattern, boolean caseInsensitive) { - super(source, left, pattern, caseInsensitive); - } - - @Override - protected NodeInfo info() { - return NodeInfo.create(this, Like::new, field(), pattern(), caseInsensitive()); - } - - @Override - protected Like replaceChild(Expression newLeft) { - return new Like(source(), newLeft, pattern(), caseInsensitive()); - } - -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/LikePattern.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/LikePattern.java deleted file mode 100644 index 52ce2636e914b..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/LikePattern.java +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.expression.predicate.regex; - -import org.apache.lucene.index.Term; -import org.apache.lucene.search.WildcardQuery; -import org.apache.lucene.util.automaton.Automaton; -import org.apache.lucene.util.automaton.MinimizationOperations; -import org.apache.lucene.util.automaton.Operations; -import org.elasticsearch.xpack.esql.core.util.StringUtils; - -import java.util.Objects; - -/** - * A SQL 'like' pattern. - * Similar to basic regex, supporting '_' instead of '?' and '%' instead of '*'. - *

    - * Allows escaping based on a regular char. - * - * To prevent conflicts with ES, the string and char must be validated to not contain '*'. - */ -public class LikePattern extends AbstractStringPattern { - - private final String pattern; - private final char escape; - - private final String regex; - private final String wildcard; - private final String indexNameWildcard; - - public LikePattern(String pattern, char escape) { - this.pattern = pattern; - this.escape = escape; - // early initialization to force string validation - this.regex = StringUtils.likeToJavaPattern(pattern, escape); - this.wildcard = StringUtils.likeToLuceneWildcard(pattern, escape); - this.indexNameWildcard = StringUtils.likeToIndexWildcard(pattern, escape); - } - - public String pattern() { - return pattern; - } - - public char escape() { - return escape; - } - - @Override - public Automaton createAutomaton() { - Automaton automaton = WildcardQuery.toAutomaton(new Term(null, wildcard)); - return MinimizationOperations.minimize(automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); - } - - @Override - public String asJavaRegex() { - return regex; - } - - /** - * Returns the pattern in (Lucene) wildcard format. - */ - public String asLuceneWildcard() { - return wildcard; - } - - /** - * Returns the pattern in (IndexNameExpressionResolver) wildcard format. - */ - public String asIndexNameWildcard() { - return indexNameWildcard; - } - - @Override - public int hashCode() { - return Objects.hash(pattern, escape); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - LikePattern other = (LikePattern) obj; - return Objects.equals(pattern, other.pattern) && escape == other.escape; - } -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/planner/ExpressionTranslators.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/planner/ExpressionTranslators.java index 176250222512b..366630eadb5fe 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/planner/ExpressionTranslators.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/planner/ExpressionTranslators.java @@ -19,7 +19,6 @@ import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNotNull; import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNull; -import org.elasticsearch.xpack.esql.core.expression.predicate.regex.Like; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.RLike; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.RegexMatch; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.WildcardLike; @@ -66,9 +65,6 @@ public static Query doTranslate(RegexMatch e, TranslatorHandler handler) { } private static Query translateField(RegexMatch e, String targetFieldName) { - if (e instanceof Like l) { - return new WildcardQuery(e.source(), targetFieldName, l.pattern().asLuceneWildcard(), l.caseInsensitive()); - } if (e instanceof WildcardLike l) { return new WildcardQuery(e.source(), targetFieldName, l.pattern().asLuceneWildcard(), l.caseInsensitive()); } diff --git a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/StringPatternTests.java b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/StringPatternTests.java index 43cae475cff7e..c361b7e3726ed 100644 --- a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/StringPatternTests.java +++ b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/StringPatternTests.java @@ -12,78 +12,78 @@ public class StringPatternTests extends ESTestCase { - private LikePattern like(String pattern, char escape) { - return new LikePattern(pattern, escape); + private WildcardPattern like(String pattern) { + return new WildcardPattern(pattern); } private RLikePattern rlike(String pattern) { return new RLikePattern(pattern); } - private boolean matchesAll(String pattern, char escape) { - return like(pattern, escape).matchesAll(); + private boolean likeMatchesAll(String pattern) { + return like(pattern).matchesAll(); } - private boolean exactMatch(String pattern, char escape) { - String escaped = pattern.replace(Character.toString(escape), StringUtils.EMPTY); - return escaped.equals(like(pattern, escape).exactMatch()); + private boolean likeExactMatch(String pattern) { + String escaped = pattern.replace("\\", StringUtils.EMPTY); + return escaped.equals(like(pattern).exactMatch()); } - private boolean matchesAll(String pattern) { + private boolean rlikeMatchesAll(String pattern) { return rlike(pattern).matchesAll(); } - private boolean exactMatch(String pattern) { + private boolean rlikeExactMatch(String pattern) { return pattern.equals(rlike(pattern).exactMatch()); } - public void testWildcardMatchAll() throws Exception { - assertTrue(matchesAll("%", '0')); - assertTrue(matchesAll("%%", '0')); + public void testWildcardMatchAll() { + assertTrue(likeMatchesAll("*")); + assertTrue(likeMatchesAll("**")); - assertFalse(matchesAll("a%", '0')); - assertFalse(matchesAll("%_", '0')); - assertFalse(matchesAll("%_%_%", '0')); - assertFalse(matchesAll("_%", '0')); - assertFalse(matchesAll("0%", '0')); + assertFalse(likeMatchesAll("a*")); + assertFalse(likeMatchesAll("*?")); + assertFalse(likeMatchesAll("*?*?*")); + assertFalse(likeMatchesAll("?*")); + assertFalse(likeMatchesAll("\\*")); } - public void testRegexMatchAll() throws Exception { - assertTrue(matchesAll(".*")); - assertTrue(matchesAll(".*.*")); - assertTrue(matchesAll(".*.?")); - assertTrue(matchesAll(".?.*")); - assertTrue(matchesAll(".*.?.*")); + public void testRegexMatchAll() { + assertTrue(rlikeMatchesAll(".*")); + assertTrue(rlikeMatchesAll(".*.*")); + assertTrue(rlikeMatchesAll(".*.?")); + assertTrue(rlikeMatchesAll(".?.*")); + assertTrue(rlikeMatchesAll(".*.?.*")); - assertFalse(matchesAll("..*")); - assertFalse(matchesAll("ab.")); - assertFalse(matchesAll("..?")); + assertFalse(rlikeMatchesAll("..*")); + assertFalse(rlikeMatchesAll("ab.")); + assertFalse(rlikeMatchesAll("..?")); } - public void testWildcardExactMatch() throws Exception { - assertTrue(exactMatch("0%", '0')); - assertTrue(exactMatch("0_", '0')); - assertTrue(exactMatch("123", '0')); - assertTrue(exactMatch("1230_", '0')); - assertTrue(exactMatch("1230_321", '0')); - - assertFalse(exactMatch("%", '0')); - assertFalse(exactMatch("%%", '0')); - assertFalse(exactMatch("a%", '0')); - assertFalse(exactMatch("a_", '0')); + public void testWildcardExactMatch() { + assertTrue(likeExactMatch("\\*")); + assertTrue(likeExactMatch("\\?")); + assertTrue(likeExactMatch("123")); + assertTrue(likeExactMatch("123\\?")); + assertTrue(likeExactMatch("123\\?321")); + + assertFalse(likeExactMatch("*")); + assertFalse(likeExactMatch("**")); + assertFalse(likeExactMatch("a*")); + assertFalse(likeExactMatch("a?")); } - public void testRegexExactMatch() throws Exception { - assertFalse(exactMatch(".*")); - assertFalse(exactMatch(".*.*")); - assertFalse(exactMatch(".*.?")); - assertFalse(exactMatch(".?.*")); - assertFalse(exactMatch(".*.?.*")); - assertFalse(exactMatch("..*")); - assertFalse(exactMatch("ab.")); - assertFalse(exactMatch("..?")); - - assertTrue(exactMatch("abc")); - assertTrue(exactMatch("12345")); + public void testRegexExactMatch() { + assertFalse(rlikeExactMatch(".*")); + assertFalse(rlikeExactMatch(".*.*")); + assertFalse(rlikeExactMatch(".*.?")); + assertFalse(rlikeExactMatch(".?.*")); + assertFalse(rlikeExactMatch(".*.?.*")); + assertFalse(rlikeExactMatch("..*")); + assertFalse(rlikeExactMatch("ab.")); + assertFalse(rlikeExactMatch("..?")); + + assertTrue(rlikeExactMatch("abc")); + assertTrue(rlikeExactMatch("12345")); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ConstantFoldingTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ConstantFoldingTests.java index a74ceb4e1426c..c2e85cc43284a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ConstantFoldingTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ConstantFoldingTests.java @@ -17,8 +17,6 @@ import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; -import org.elasticsearch.xpack.esql.core.expression.predicate.regex.Like; -import org.elasticsearch.xpack.esql.core.expression.predicate.regex.LikePattern; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.RLike; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.RLikePattern; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.WildcardLike; @@ -101,7 +99,6 @@ public void testConstantNot() { } public void testConstantFoldingLikes() { - assertEquals(TRUE, new ConstantFolding().rule(new Like(EMPTY, of("test_emp"), new LikePattern("test%", (char) 0))).canonical()); assertEquals(TRUE, new ConstantFolding().rule(new WildcardLike(EMPTY, of("test_emp"), new WildcardPattern("test*"))).canonical()); assertEquals(TRUE, new ConstantFolding().rule(new RLike(EMPTY, of("test_emp"), new RLikePattern("test.emp"))).canonical()); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceRegexMatchTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceRegexMatchTests.java index c5e64d41be4dc..20d638a113bf2 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceRegexMatchTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceRegexMatchTests.java @@ -11,8 +11,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNotNull; -import org.elasticsearch.xpack.esql.core.expression.predicate.regex.Like; -import org.elasticsearch.xpack.esql.core.expression.predicate.regex.LikePattern; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.RLike; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.RLikePattern; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.WildcardLike; @@ -26,18 +24,6 @@ public class ReplaceRegexMatchTests extends ESTestCase { - public void testMatchAllLikeToExist() { - for (String s : asList("%", "%%", "%%%")) { - LikePattern pattern = new LikePattern(s, (char) 0); - FieldAttribute fa = getFieldAttribute(); - Like l = new Like(EMPTY, fa, pattern); - Expression e = new ReplaceRegexMatch().rule(l); - assertEquals(IsNotNull.class, e.getClass()); - IsNotNull inn = (IsNotNull) e; - assertEquals(fa, inn.field()); - } - } - public void testMatchAllWildcardLikeToExist() { for (String s : asList("*", "**", "***")) { WildcardPattern pattern = new WildcardPattern(s); @@ -60,31 +46,19 @@ public void testMatchAllRLikeToExist() { assertEquals(fa, inn.field()); } - public void testExactMatchLike() { - for (String s : asList("ab", "ab0%", "ab0_c")) { - LikePattern pattern = new LikePattern(s, '0'); + public void testExactMatchWildcardLike() { + for (String s : asList("ab", "ab\\*", "ab\\?c")) { + WildcardPattern pattern = new WildcardPattern(s); FieldAttribute fa = getFieldAttribute(); - Like l = new Like(EMPTY, fa, pattern); + WildcardLike l = new WildcardLike(EMPTY, fa, pattern); Expression e = new ReplaceRegexMatch().rule(l); assertEquals(Equals.class, e.getClass()); Equals eq = (Equals) e; assertEquals(fa, eq.left()); - assertEquals(s.replace("0", StringUtils.EMPTY), eq.right().fold()); + assertEquals(s.replace("\\", StringUtils.EMPTY), eq.right().fold()); } } - public void testExactMatchWildcardLike() { - String s = "ab"; - WildcardPattern pattern = new WildcardPattern(s); - FieldAttribute fa = getFieldAttribute(); - WildcardLike l = new WildcardLike(EMPTY, fa, pattern); - Expression e = new ReplaceRegexMatch().rule(l); - assertEquals(Equals.class, e.getClass()); - Equals eq = (Equals) e; - assertEquals(fa, eq.left()); - assertEquals(s, eq.right().fold()); - } - public void testExactMatchRLike() { RLikePattern pattern = new RLikePattern("abc"); FieldAttribute fa = getFieldAttribute(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java index 7075c9fe58d63..2bee0188b9fab 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java @@ -28,8 +28,6 @@ import org.elasticsearch.xpack.esql.core.expression.UnresolvedNamedExpression; import org.elasticsearch.xpack.esql.core.expression.function.Function; import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.FullTextPredicate; -import org.elasticsearch.xpack.esql.core.expression.predicate.regex.Like; -import org.elasticsearch.xpack.esql.core.expression.predicate.regex.LikePattern; import org.elasticsearch.xpack.esql.core.tree.AbstractNodeTestCase; import org.elasticsearch.xpack.esql.core.tree.Node; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; @@ -422,12 +420,6 @@ public void accept(Page page) { } return b.toString(); } - } else if (toBuildClass == Like.class) { - - if (argClass == LikePattern.class) { - return new LikePattern(randomAlphaOfLength(16), randomFrom('\\', '|', '/', '`')); - } - } else if (argClass == Dissect.Parser.class) { // Dissect.Parser is a record / final, cannot be mocked String pattern = randomDissectPattern(); From af18f1027b0b0b4616668feccb30eeaf86e56cda Mon Sep 17 00:00:00 2001 From: Jan Kuipers <148754765+jan-elastic@users.noreply.github.com> Date: Mon, 21 Oct 2024 12:34:29 +0200 Subject: [PATCH 239/449] Fix scale up for model allocations (#115189) --- .../ml/autoscaling/MlAutoscalingContext.java | 2 +- .../MlAutoscalingDeciderServiceTests.java | 48 +++++++++++++++++++ 2 files changed, 49 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingContext.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingContext.java index f266dda6e3e5d..dfe52897caf2c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingContext.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingContext.java @@ -177,7 +177,7 @@ public boolean isEmpty() { return anomalyDetectionTasks.isEmpty() && snapshotUpgradeTasks.isEmpty() && dataframeAnalyticsTasks.isEmpty() - && modelAssignments.values().stream().allMatch(assignment -> assignment.totalTargetAllocations() == 0); + && modelAssignments.values().stream().allMatch(assignment -> assignment.getTaskParams().getNumberOfAllocations() == 0); } public List findPartiallyAllocatedModels() { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingDeciderServiceTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingDeciderServiceTests.java index a1db31c474f31..cf78e5f900e15 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingDeciderServiceTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingDeciderServiceTests.java @@ -54,6 +54,7 @@ import static org.elasticsearch.xpack.ml.utils.NativeMemoryCalculator.STATIC_JVM_UPPER_THRESHOLD; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; import static org.mockito.ArgumentMatchers.any; @@ -331,6 +332,53 @@ public void testScale_GivenModelWithZeroAllocations() { assertThat(result.requiredCapacity().node().memory().getBytes(), equalTo(0L)); } + public void testScale_GivenTrainedModelAllocationAndNoMlNode() { + MlAutoscalingDeciderService service = buildService(); + service.onMaster(); + + ClusterState clusterState = new ClusterState.Builder(new ClusterName("cluster")).metadata( + Metadata.builder() + .putCustom( + TrainedModelAssignmentMetadata.NAME, + new TrainedModelAssignmentMetadata( + Map.of( + "model", + TrainedModelAssignment.Builder.empty( + new StartTrainedModelDeploymentAction.TaskParams( + "model", + "model-deployment", + 400, + 1, + 2, + 100, + null, + Priority.NORMAL, + 0L, + 0L + ), + new AdaptiveAllocationsSettings(true, 0, 4) + ).setAssignmentState(AssignmentState.STARTING).build() + ) + ) + ) + .build() + ).build(); + + AutoscalingDeciderResult result = service.scale( + Settings.EMPTY, + new DeciderContext( + clusterState, + new AutoscalingCapacity(AutoscalingCapacity.AutoscalingResources.ZERO, AutoscalingCapacity.AutoscalingResources.ZERO) + ) + ); + + assertThat(result.reason().summary(), containsString("requesting scale up")); + assertThat(result.requiredCapacity().total().memory().getBytes(), greaterThan(TEST_JOB_SIZE)); + assertThat(result.requiredCapacity().total().processors().count(), equalTo(2.0)); + assertThat(result.requiredCapacity().node().memory().getBytes(), greaterThan(TEST_JOB_SIZE)); + assertThat(result.requiredCapacity().node().processors().count(), equalTo(2.0)); + } + private DiscoveryNode buildNode(String id, ByteSizeValue machineMemory, int allocatedProcessors) { return DiscoveryNodeUtils.create( id, From 8c23fd77122cea2e235718034ddfcb4a2e945d92 Mon Sep 17 00:00:00 2001 From: Yang Wang Date: Mon, 21 Oct 2024 21:38:46 +1100 Subject: [PATCH 240/449] [Test] Flush response body for progress (#115177) In JDK23, response headers are no longer always immediately sent. See also https://bugs.openjdk.org/browse/JDK-8331847 This PR adds flush call for the response body to make progress. Resolves: #115145 Resolves: #115164 --- .../repositories/s3/S3BlobContainerRetriesTests.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java index 2eb2ed26153f9..b292dc5872994 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java @@ -586,16 +586,16 @@ public void handle(HttpExchange exchange) throws IOException { ), -1 ); + exchange.getResponseBody().flush(); } else if (randomBoolean()) { final var bytesSent = sendIncompleteContent(exchange, bytes); if (bytesSent < meaningfulProgressBytes) { failuresWithoutProgress += 1; - } else { - exchange.getResponseBody().flush(); } } else { failuresWithoutProgress += 1; } + exchange.getResponseBody().flush(); exchange.close(); } } @@ -640,6 +640,7 @@ public void handle(HttpExchange exchange) throws IOException { failureCount += 1; Streams.readFully(exchange.getRequestBody()); sendIncompleteContent(exchange, bytes); + exchange.getResponseBody().flush(); exchange.close(); } } From 3fad5f485880e1b3f88dc135f0dbeccbd517c1e4 Mon Sep 17 00:00:00 2001 From: Ioana Tagirta Date: Mon, 21 Oct 2024 12:47:18 +0200 Subject: [PATCH 241/449] Enable tests for out of range comparisons for float/half_float fields (#113122) * Enable tests for out of range comparisons for float/half_float fields * Address feedback comments * Implement suggestions --------- Co-authored-by: Elastic Machine --- .../xpack/esql/qa/rest/RestEsqlTestCase.java | 5 +- .../LocalPhysicalPlanOptimizerTests.java | 71 ++++++++++++++++--- 2 files changed, 62 insertions(+), 14 deletions(-) diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java index e3199649a91be..2a50988e9e35e 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java @@ -398,9 +398,8 @@ public void testOutOfRangeComparisons() throws IOException { "long", // TODO: https://github.com/elastic/elasticsearch/issues/102935 // "unsigned_long", - // TODO: https://github.com/elastic/elasticsearch/issues/100130 - // "half_float", - // "float", + "half_float", + "float", "double", "scaled_float" ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java index 72060bccb520a..3436502610d62 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.index.query.MatchQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.index.query.RangeQueryBuilder; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.xpack.core.enrich.EnrichPolicy; import org.elasticsearch.xpack.esql.EsqlTestUtils; @@ -147,6 +148,10 @@ private Analyzer makeAnalyzer(String mappingFileName, EnrichResolution enrichRes ); } + private Analyzer makeAnalyzer(String mappingFileName) { + return makeAnalyzer(mappingFileName, new EnrichResolution()); + } + /** * Expects * LimitExec[1000[INTEGER]] @@ -449,7 +454,7 @@ public void testQueryStringFunctionWithFunctionsPushedToLucene() { from test | where qstr("last_name: Smith") and cidr_match(ip, "127.0.0.1/32") """; - var analyzer = makeAnalyzer("mapping-all-types.json", new EnrichResolution()); + var analyzer = makeAnalyzer("mapping-all-types.json"); var plan = plannerOptimizer.plan(queryText, IS_SV_STATS, analyzer); var limit = as(plan, LimitExec.class); @@ -610,7 +615,7 @@ public void testMatchFunctionWithFunctionsPushedToLucene() { from test | where match(text, "beta") and cidr_match(ip, "127.0.0.1/32") """; - var analyzer = makeAnalyzer("mapping-all-types.json", new EnrichResolution()); + var analyzer = makeAnalyzer("mapping-all-types.json"); var plan = plannerOptimizer.plan(queryText, IS_SV_STATS, analyzer); var limit = as(plan, LimitExec.class); @@ -892,8 +897,15 @@ public void testIsNotNull_TextField_Pushdown_WithCount() { private record OutOfRangeTestCase(String fieldName, String tooLow, String tooHigh) {}; + private static final String LT = "<"; + private static final String LTE = "<="; + private static final String GT = ">"; + private static final String GTE = ">="; + private static final String EQ = "=="; + private static final String NEQ = "!="; + public void testOutOfRangeFilterPushdown() { - var allTypeMappingAnalyzer = makeAnalyzer("mapping-all-types.json", new EnrichResolution()); + var allTypeMappingAnalyzer = makeAnalyzer("mapping-all-types.json"); String largerThanInteger = String.valueOf(randomLongBetween(Integer.MAX_VALUE + 1L, Long.MAX_VALUE)); String smallerThanInteger = String.valueOf(randomLongBetween(Long.MIN_VALUE, Integer.MIN_VALUE - 1L)); @@ -910,16 +922,8 @@ public void testOutOfRangeFilterPushdown() { new OutOfRangeTestCase("integer", smallerThanInteger, largerThanInteger), new OutOfRangeTestCase("long", smallerThanLong, largerThanLong) // TODO: add unsigned_long https://github.com/elastic/elasticsearch/issues/102935 - // TODO: add half_float, float https://github.com/elastic/elasticsearch/issues/100130 ); - final String LT = "<"; - final String LTE = "<="; - final String GT = ">"; - final String GTE = ">="; - final String EQ = "=="; - final String NEQ = "!="; - for (OutOfRangeTestCase testCase : cases) { List trueForSingleValuesPredicates = List.of( LT + testCase.tooHigh, @@ -972,6 +976,51 @@ public void testOutOfRangeFilterPushdown() { } } + public void testOutOfRangeFilterPushdownWithFloatAndHalfFloat() { + var allTypeMappingAnalyzer = makeAnalyzer("mapping-all-types.json"); + + String smallerThanFloat = String.valueOf(randomDoubleBetween(-Double.MAX_VALUE, -Float.MAX_VALUE - 1d, true)); + String largerThanFloat = String.valueOf(randomDoubleBetween(Float.MAX_VALUE + 1d, Double.MAX_VALUE, true)); + + List cases = List.of( + new OutOfRangeTestCase("float", smallerThanFloat, largerThanFloat), + new OutOfRangeTestCase("half_float", smallerThanFloat, largerThanFloat) + ); + + for (OutOfRangeTestCase testCase : cases) { + for (var value : List.of(testCase.tooHigh, testCase.tooLow)) { + for (String predicate : List.of(LT, LTE, GT, GTE, EQ, NEQ)) { + String comparison = testCase.fieldName + predicate + value; + var query = "from test | where " + comparison; + + Source expectedSource = new Source(1, 18, comparison); + + logger.info("Query: " + query); + EsQueryExec actualQueryExec = doTestOutOfRangeFilterPushdown(query, allTypeMappingAnalyzer); + + assertThat(actualQueryExec.query(), is(instanceOf(SingleValueQuery.Builder.class))); + var actualLuceneQuery = (SingleValueQuery.Builder) actualQueryExec.query(); + assertThat(actualLuceneQuery.field(), equalTo(testCase.fieldName)); + assertThat(actualLuceneQuery.source(), equalTo(expectedSource)); + + QueryBuilder actualInnerLuceneQuery = actualLuceneQuery.next(); + + if (predicate.equals(EQ)) { + QueryBuilder expectedInnerQuery = QueryBuilders.termQuery(testCase.fieldName, Double.parseDouble(value)); + assertThat(actualInnerLuceneQuery, equalTo(expectedInnerQuery)); + } else if (predicate.equals(NEQ)) { + QueryBuilder expectedInnerQuery = QueryBuilders.boolQuery() + .mustNot(QueryBuilders.termQuery(testCase.fieldName, Double.parseDouble(value))); + assertThat(actualInnerLuceneQuery, equalTo(expectedInnerQuery)); + } else { // one of LT, LTE, GT, GTE + assertTrue(actualInnerLuceneQuery instanceof RangeQueryBuilder); + assertThat(((RangeQueryBuilder) actualInnerLuceneQuery).fieldName(), equalTo(testCase.fieldName)); + } + } + } + } + } + /** * Expects e.g. * LimitExec[1000[INTEGER]] From 1cae3c83615fcd7f716b4f00dc4ac8aad2215906 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20Zolt=C3=A1n=20Szab=C3=B3?= Date: Mon, 21 Oct 2024 12:51:10 +0200 Subject: [PATCH 242/449] [DOCS] Documents that dynamic templates are not supported by semantic_text. (#115195) --- docs/reference/mapping/types/semantic-text.asciidoc | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/reference/mapping/types/semantic-text.asciidoc b/docs/reference/mapping/types/semantic-text.asciidoc index 07abbff986643..ac23c153e01a3 100644 --- a/docs/reference/mapping/types/semantic-text.asciidoc +++ b/docs/reference/mapping/types/semantic-text.asciidoc @@ -221,4 +221,5 @@ Notice that both the `semantic_text` field and the source field are updated in t `semantic_text` field types have the following limitations: * `semantic_text` fields are not currently supported as elements of <>. +* `semantic_text` fields can't currently be set as part of <>. * `semantic_text` fields can't be defined as <> of another field, nor can they contain other fields as multi-fields. From f2567525011ae14f3b15b8a4d4b0161e60530432 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20Zolt=C3=A1n=20Szab=C3=B3?= Date: Mon, 21 Oct 2024 12:56:56 +0200 Subject: [PATCH 243/449] [DOCS] Removes experimental tag from Inference API pages (#113857) --- docs/reference/inference/delete-inference.asciidoc | 2 -- docs/reference/inference/get-inference.asciidoc | 2 -- docs/reference/inference/inference-apis.asciidoc | 2 -- docs/reference/inference/post-inference.asciidoc | 2 -- docs/reference/inference/put-inference.asciidoc | 2 -- docs/reference/inference/update-inference.asciidoc | 2 -- 6 files changed, 12 deletions(-) diff --git a/docs/reference/inference/delete-inference.asciidoc b/docs/reference/inference/delete-inference.asciidoc index 4fc4beaca6d8e..a83fb1a516b80 100644 --- a/docs/reference/inference/delete-inference.asciidoc +++ b/docs/reference/inference/delete-inference.asciidoc @@ -2,8 +2,6 @@ [[delete-inference-api]] === Delete {infer} API -experimental[] - Deletes an {infer} endpoint. IMPORTANT: The {infer} APIs enable you to use certain services, such as built-in {ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, Azure, Google AI Studio, Google Vertex AI, Anthropic, Watsonx.ai, or Hugging Face. diff --git a/docs/reference/inference/get-inference.asciidoc b/docs/reference/inference/get-inference.asciidoc index d991729fe77c9..16e38d2aa148b 100644 --- a/docs/reference/inference/get-inference.asciidoc +++ b/docs/reference/inference/get-inference.asciidoc @@ -2,8 +2,6 @@ [[get-inference-api]] === Get {infer} API -experimental[] - Retrieves {infer} endpoint information. IMPORTANT: The {infer} APIs enable you to use certain services, such as built-in {ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, Azure, Google AI Studio, Google Vertex AI, Anthropic, Watsonx.ai, or Hugging Face. diff --git a/docs/reference/inference/inference-apis.asciidoc b/docs/reference/inference/inference-apis.asciidoc index e756831075027..b291b464be498 100644 --- a/docs/reference/inference/inference-apis.asciidoc +++ b/docs/reference/inference/inference-apis.asciidoc @@ -2,8 +2,6 @@ [[inference-apis]] == {infer-cap} APIs -experimental[] - IMPORTANT: The {infer} APIs enable you to use certain services, such as built-in {ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, Azure, Google AI Studio or Hugging Face. For built-in models and models uploaded diff --git a/docs/reference/inference/post-inference.asciidoc b/docs/reference/inference/post-inference.asciidoc index ce51abaff07f8..4edefcc911e2e 100644 --- a/docs/reference/inference/post-inference.asciidoc +++ b/docs/reference/inference/post-inference.asciidoc @@ -2,8 +2,6 @@ [[post-inference-api]] === Perform inference API -experimental[] - Performs an inference task on an input text by using an {infer} endpoint. IMPORTANT: The {infer} APIs enable you to use certain services, such as built-in {ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, Azure, Google AI Studio, Google Vertex AI, Anthropic, Watsonx.ai, or Hugging Face. diff --git a/docs/reference/inference/put-inference.asciidoc b/docs/reference/inference/put-inference.asciidoc index 6d6b61ffea771..e7e25ec98b49d 100644 --- a/docs/reference/inference/put-inference.asciidoc +++ b/docs/reference/inference/put-inference.asciidoc @@ -2,8 +2,6 @@ [[put-inference-api]] === Create {infer} API -experimental[] - Creates an {infer} endpoint to perform an {infer} task. [IMPORTANT] diff --git a/docs/reference/inference/update-inference.asciidoc b/docs/reference/inference/update-inference.asciidoc index 01a99d7f53062..efd29231ac12e 100644 --- a/docs/reference/inference/update-inference.asciidoc +++ b/docs/reference/inference/update-inference.asciidoc @@ -2,8 +2,6 @@ [[update-inference-api]] === Update inference API -experimental[] - Updates an {infer} endpoint. IMPORTANT: The {infer} APIs enable you to use certain services, such as built-in {ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, Azure, Google AI Studio, Google Vertex AI, Anthropic, Watsonx.ai, or Hugging Face. From 671458a999c53c7c8b9df05ed2a2269a7a4a3d68 Mon Sep 17 00:00:00 2001 From: Pooya Salehi Date: Mon, 21 Oct 2024 13:01:58 +0200 Subject: [PATCH 244/449] Always flush response body in AbstractBlobContainerRetriesTestCase#sendIncompleteContent with JDK23 (#115197) Resolves https://github.com/elastic/elasticsearch/issues/115172 --- .../blobstore/AbstractBlobContainerRetriesTestCase.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/AbstractBlobContainerRetriesTestCase.java b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/AbstractBlobContainerRetriesTestCase.java index 90c621c62c305..12094b31a049d 100644 --- a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/AbstractBlobContainerRetriesTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/AbstractBlobContainerRetriesTestCase.java @@ -418,7 +418,9 @@ protected int sendIncompleteContent(HttpExchange exchange, byte[] bytes) throws if (bytesToSend > 0) { exchange.getResponseBody().write(bytes, rangeStart, bytesToSend); } - if (randomBoolean()) { + if (randomBoolean() || Runtime.version().feature() >= 23) { + // For now in JDK23 we need to always flush. See https://bugs.openjdk.org/browse/JDK-8331847. + // TODO: remove the JDK version check once that issue is fixed exchange.getResponseBody().flush(); } return bytesToSend; From 8efd08b019b9160f5e703520c6ce3b6a9f92cfbd Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Mon, 21 Oct 2024 13:38:23 +0200 Subject: [PATCH 245/449] Upgrade to Lucene 10 (#114741) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The most relevant ES changes that upgrading to Lucene 10 requires are: - use the appropriate IOContext - Scorer / ScorerSupplier breaking changes - Regex automaton are no longer determinized by default - minimize moved to test classes - introduce Elasticsearch900Codec - adjust slicing code according to the added support for intra-segment concurrency - disable intra-segment concurrency in tests - adjust accessor methods for many Lucene classes that became a record - adapt to breaking changes in the analysis area Co-authored-by: Christoph Büscher Co-authored-by: Mayya Sharipova Co-authored-by: ChrisHegarty Co-authored-by: Brian Seeders Co-authored-by: Armin Braun Co-authored-by: Panagiotis Bailis Co-authored-by: Benjamin Trent <4357155+benwtrent@users.noreply.github.com> --- .../pipelines/lucene-snapshot/run-tests.yml | 1 - .../vector/VectorScorerBenchmark.java | 10 +- .../forbidden/es-server-signatures.txt | 4 - build-tools-internal/version.properties | 2 +- distribution/src/config/jvm.options | 3 + docs/Versions.asciidoc | 4 +- docs/changelog/113482.yaml | 27 + docs/changelog/113614.yaml | 18 + docs/changelog/114124.yaml | 18 + docs/changelog/114146.yaml | 20 + docs/changelog/114741.yaml | 5 + docs/plugins/analysis-nori.asciidoc | 12 +- .../analysis/analyzers/lang-analyzer.asciidoc | 3 +- .../tokenfilters/stemmer-tokenfilter.asciidoc | 1 - .../pathhierarchy-tokenizer.asciidoc | 24 +- docs/reference/search/profile.asciidoc | 2 +- gradle/verification-metadata.xml | 150 +- .../simdvec/VectorScorerFactory.java | 10 +- .../simdvec/VectorScorerFactoryImpl.java | 6 +- .../simdvec/VectorScorerFactoryImpl.java | 6 +- .../simdvec/internal/Int7SQVectorScorer.java | 8 +- .../internal/Int7SQVectorScorerSupplier.java | 24 +- .../simdvec/internal/Int7SQVectorScorer.java | 21 +- .../simdvec/VectorScorerFactoryTests.java | 7 +- modules/analysis-common/build.gradle | 4 + .../common/LegacyRomanianStemmer.java | 741 +++++++++ .../common/PersianAnalyzerProvider.java | 56 +- .../common/RomanianAnalyzerProvider.java | 46 +- .../common/StemmerTokenFilterFactory.java | 68 +- .../common/HighlighterWithAnalyzersTests.java | 2 +- .../common/PersianAnalyzerProviderTests.java | 78 + .../common/RomanianAnalyzerTests.java | 80 + .../StemmerTokenFilterFactoryTests.java | 38 + .../test/analysis-common/20_analyzers.yml | 29 +- .../apm/internal/tracing/APMTracer.java | 5 +- .../datastreams/DataStreamIT.java | 4 +- .../datastreams/TSDBIndexingIT.java | 2 +- .../ingest/geoip/GeoIpDownloaderIT.java | 4 +- .../script/expression/MoreExpressionIT.java | 64 +- .../ExpressionDoubleValuesScript.java | 14 +- .../expression/ExpressionScriptEngine.java | 48 +- .../mustache/SearchTemplateResponseTests.java | 2 +- .../ScriptedMetricAggContextsTests.java | 5 - .../painless/SimilarityScriptTests.java | 4 +- .../RankFeaturesMapperIntegrationIT.java | 8 +- .../TokenCountFieldMapperIntegrationIT.java | 2 +- .../extras/SearchAsYouTypeFieldMapper.java | 4 +- .../extras/SourceConfirmedTextQuery.java | 32 +- .../extras/MatchOnlyTextFieldMapperTests.java | 4 +- .../extras/SourceConfirmedTextQueryTests.java | 2 +- .../extras/SourceIntervalsSourceTests.java | 2 +- .../join/aggregations/ChildrenIT.java | 8 +- .../join/query/ChildQuerySearchIT.java | 126 +- .../elasticsearch/join/query/InnerHitsIT.java | 40 +- .../aggregations/ParentJoinAggregator.java | 9 +- .../ParentChildInnerHitContextBuilder.java | 8 +- .../ChildrenToParentAggregatorTests.java | 2 +- .../ParentToChildrenAggregatorTests.java | 2 +- .../join/query/HasChildQueryBuilderTests.java | 13 +- .../percolator/PercolateQuery.java | 140 +- .../percolator/PercolatorFieldMapper.java | 2 +- .../PercolatorMatchedSlotSubFetchPhase.java | 2 +- .../percolator/QueryAnalyzer.java | 16 +- .../percolator/CandidateQueryTests.java | 64 +- .../percolator/PercolateQueryTests.java | 8 +- .../PercolatorFieldMapperTests.java | 19 +- .../PercolatorQuerySearchTests.java | 2 +- .../percolator/QueryAnalyzerTests.java | 2 +- .../index/reindex/CrossClusterReindexIT.java | 10 +- .../reindex/ReindexValidator.java | 3 +- .../reindex/remote/RemoteResponseParsers.java | 4 +- .../AnnotatedTextFieldMapperTests.java | 2 +- .../AnnotatedTextHighlighterTests.java | 2 +- .../store/smb/SmbMmapFsDirectoryFactory.java | 1 - .../upgrades/FullClusterRestartIT.java | 206 +++ rest-api-spec/build.gradle | 8 +- .../rest-api-spec/test/search/370_profile.yml | 44 +- .../action/IndicesRequestIT.java | 4 +- .../admin/indices/create/CreateIndexIT.java | 4 +- .../admin/indices/create/SplitIndexIT.java | 2 +- .../action/bulk/BulkProcessor2RetryIT.java | 6 +- .../action/bulk/BulkProcessorRetryIT.java | 6 +- .../action/bulk/IncrementalBulkIT.java | 6 +- .../bulk/TransportSimulateBulkActionIT.java | 4 +- .../action/bulk/WriteAckDelayIT.java | 4 +- .../action/search/PointInTimeIT.java | 8 +- .../action/search/TransportSearchIT.java | 14 +- .../elasticsearch/aliases/IndexAliasesIT.java | 30 +- .../broadcast/BroadcastActionsIT.java | 2 +- .../document/DocumentActionsIT.java | 4 +- .../elasticsearch/index/FinalPipelineIT.java | 8 +- .../index/engine/MaxDocsLimitIT.java | 4 +- .../mapper/CopyToMapperIntegrationIT.java | 2 +- .../index/store/ExceptionRetryIT.java | 2 +- .../elasticsearch/indexing/IndexActionIT.java | 8 +- .../indices/IndicesRequestCacheIT.java | 46 +- .../state/CloseWhileRelocatingShardsIT.java | 2 +- .../recovery/RecoveryWhileUnderLoadIT.java | 6 +- .../elasticsearch/recovery/RelocationIT.java | 8 +- .../elasticsearch/routing/AliasRoutingIT.java | 2 +- .../routing/PartitionedRoutingIT.java | 6 +- .../elasticsearch/search/SearchTimeoutIT.java | 4 +- .../search/aggregations/CombiIT.java | 2 +- .../search/aggregations/EquivalenceIT.java | 4 +- .../aggregations/FiltersAggsRewriteIT.java | 2 +- .../aggregations/bucket/DateHistogramIT.java | 6 +- .../bucket/DateHistogramOffsetIT.java | 6 +- .../aggregations/bucket/DateRangeIT.java | 18 +- .../search/aggregations/bucket/FilterIT.java | 2 +- .../search/aggregations/bucket/FiltersIT.java | 4 +- .../aggregations/bucket/GeoDistanceIT.java | 2 +- .../aggregations/bucket/HistogramIT.java | 2 +- .../search/aggregations/bucket/NestedIT.java | 2 +- .../search/aggregations/bucket/RangeIT.java | 2 +- .../aggregations/metrics/ExtendedStatsIT.java | 4 +- .../metrics/HDRPercentileRanksIT.java | 4 +- .../metrics/HDRPercentilesIT.java | 4 +- .../metrics/ScriptedMetricIT.java | 28 +- .../search/aggregations/metrics/StatsIT.java | 2 +- .../search/aggregations/metrics/SumIT.java | 2 +- .../metrics/TDigestPercentileRanksIT.java | 4 +- .../metrics/TDigestPercentilesIT.java | 4 +- .../aggregations/metrics/TopHitsIT.java | 40 +- .../aggregations/metrics/ValueCountIT.java | 2 +- .../basic/SearchWhileCreatingIndexIT.java | 6 +- .../search/basic/SearchWhileRelocatingIT.java | 6 +- .../basic/TransportTwoNodesSearchIT.java | 14 +- .../search/ccs/CrossClusterSearchIT.java | 2 +- .../search/fetch/FetchSubPhasePluginIT.java | 2 +- .../search/fetch/subphase/InnerHitsIT.java | 50 +- .../highlight/HighlighterSearchIT.java | 6 +- .../search/fields/SearchFieldsIT.java | 36 +- .../functionscore/DecayFunctionScoreIT.java | 30 +- .../functionscore/ExplainableScriptIT.java | 2 +- .../search/functionscore/FunctionScoreIT.java | 18 +- .../search/functionscore/QueryRescorerIT.java | 10 +- .../functionscore/RandomScoreFunctionIT.java | 6 +- .../search/nested/SimpleNestedIT.java | 2 +- .../search/profile/query/QueryProfilerIT.java | 4 +- .../elasticsearch/search/query/ExistsIT.java | 2 +- .../search/query/MultiMatchQueryIT.java | 10 +- .../search/query/QueryStringIT.java | 2 +- .../search/query/SearchQueryIT.java | 14 +- .../search/query/SimpleQueryStringIT.java | 2 +- .../retriever/MinimalCompoundRetrieverIT.java | 2 +- .../search/retriever/RetrieverRewriteIT.java | 8 +- .../search/routing/SearchPreferenceIT.java | 6 +- .../routing/SearchReplicaSelectionIT.java | 6 +- .../scriptfilter/ScriptQuerySearchIT.java | 8 +- .../search/scroll/DuelScrollIT.java | 10 +- .../search/scroll/SearchScrollIT.java | 34 +- .../search/searchafter/SearchAfterIT.java | 2 +- .../search/simple/SimpleSearchIT.java | 2 +- .../search/slice/SearchSliceIT.java | 10 +- .../search/sort/FieldSortIT.java | 60 +- .../search/sort/SimpleSortIT.java | 8 +- .../search/source/MetadataFetchingIT.java | 4 +- .../similarity/SimilarityIT.java | 4 +- server/src/main/java/module-info.java | 3 +- .../diskusage/IndexDiskUsageAnalyzer.java | 21 +- .../search/BottomSortValuesCollector.java | 2 +- .../CountOnlyQueryPhaseResultConsumer.java | 4 +- .../action/search/SearchPhaseController.java | 4 +- .../bootstrap/BootstrapChecks.java | 2 +- .../elasticsearch/common/lucene/Lucene.java | 6 +- .../lucene/index/FilterableTermsEnum.java | 6 + .../lucene/search/AutomatonQueries.java | 7 +- .../search/CaseInsensitivePrefixQuery.java | 8 +- .../search/CaseInsensitiveWildcardQuery.java | 4 +- .../common/lucene/search/Queries.java | 2 +- .../SpanBooleanQueryRewriteWithMaxClause.java | 13 +- .../common/lucene/search/XMoreLikeThis.java | 5 +- .../search/function/FunctionScoreQuery.java | 89 +- .../search/function/MinScoreScorer.java | 8 +- .../search/function/ScriptScoreFunction.java | 10 +- .../search/function/ScriptScoreQuery.java | 82 +- .../org/elasticsearch/common/regex/Regex.java | 4 +- .../common/settings/KeyStoreWrapper.java | 2 +- .../xcontent/support/XContentMapValues.java | 8 +- .../gateway/PersistedClusterStateService.java | 6 +- .../org/elasticsearch/index/IndexModule.java | 3 +- .../elasticsearch/index/IndexVersions.java | 55 +- .../index/codec/CodecService.java | 6 +- .../codec/DeduplicatingFieldInfosFormat.java | 3 +- .../index/codec/Elasticsearch816Codec.java | 2 +- .../index/codec/Elasticsearch900Codec.java | 131 ++ .../codec/LegacyPerFieldMapperCodec.java | 6 +- .../index/codec/PerFieldMapperCodec.java | 2 +- .../ES85BloomFilterPostingsFormat.java | 8 +- .../ES87BloomFilterPostingsFormat.java | 8 +- .../codec/tsdb/ES87TSDBDocValuesConsumer.java | 210 ++- .../codec/tsdb/ES87TSDBDocValuesFormat.java | 46 +- .../codec/tsdb/ES87TSDBDocValuesProducer.java | 139 +- .../vectors/BinarizedByteVectorValues.java | 61 +- .../codec/vectors/ES813FlatVectorFormat.java | 5 - .../vectors/ES813Int8FlatVectorFormat.java | 6 - .../ES814ScalarQuantizedVectorsFormat.java | 15 +- .../vectors/ES815BitFlatVectorsFormat.java | 48 +- .../vectors/ES816BinaryFlatVectorsScorer.java | 22 +- .../ES816BinaryQuantizedVectorsReader.java | 38 +- .../ES816BinaryQuantizedVectorsWriter.java | 143 +- .../vectors/OffHeapBinarizedVectorValues.java | 100 +- ...RandomAccessBinarizedByteVectorValues.java | 84 - .../index/engine/LuceneChangesSnapshot.java | 4 +- .../RecoverySourcePruneMergePolicy.java | 6 + .../index/engine/TranslogDirectoryReader.java | 21 +- .../ordinals/GlobalOrdinalMapping.java | 7 +- .../fielddata/ordinals/MultiOrdinals.java | 15 +- .../index/mapper/DateFieldMapper.java | 5 +- .../index/mapper/DocumentLeafReader.java | 23 +- .../index/mapper/IdFieldMapper.java | 5 +- .../index/mapper/IpPrefixAutomatonUtil.java | 5 +- .../index/mapper/KeywordFieldMapper.java | 4 +- .../index/mapper/LegacyTypeFieldMapper.java | 3 +- .../index/mapper/NumberFieldMapper.java | 9 +- .../index/mapper/StringFieldType.java | 8 +- .../index/mapper/TermBasedFieldType.java | 3 +- .../index/mapper/TextFieldMapper.java | 4 +- .../flattened/FlattenedFieldMapper.java | 8 +- .../KeyedFlattenedLeafFieldData.java | 14 +- .../DenormalizedCosineFloatVectorValues.java | 29 +- .../vectors/DenseVectorFieldMapper.java | 14 +- .../query/CombinedFieldsQueryBuilder.java | 4 +- .../index/query/IntervalBuilder.java | 6 +- .../index/query/NestedQueryBuilder.java | 8 +- .../elasticsearch/index/query/RegexpFlag.java | 10 +- .../index/query/RegexpQueryBuilder.java | 4 +- .../index/query/ScriptQueryBuilder.java | 6 +- .../index/query/TermsSetQueryBuilder.java | 5 +- .../reindex/ClientScrollableHitSource.java | 2 +- .../index/search/MatchQueryParser.java | 9 +- .../index/search/MultiMatchQueryParser.java | 2 +- .../index/search/NestedHelper.java | 41 +- .../index/search/QueryStringQueryParser.java | 9 +- .../RemoveCorruptedLuceneSegmentsAction.java | 4 +- .../index/shard/ShardSplittingQuery.java | 168 +- .../index/store/FsDirectoryFactory.java | 71 +- .../org/elasticsearch/index/store/Store.java | 18 +- .../index/store/StoreFileMetadata.java | 2 +- .../index/termvectors/TermVectorsService.java | 4 +- .../indices/AssociatedIndexDescriptor.java | 2 +- .../indices/IndicesQueryCache.java | 14 - .../indices/SystemIndexDescriptor.java | 10 +- .../elasticsearch/indices/SystemIndices.java | 17 +- .../recovery/RecoverySourceHandler.java | 2 +- .../grouping/GroupingDocValuesSelector.java | 4 +- .../lucene/grouping/TopFieldGroups.java | 4 +- .../queries/BinaryDocValuesRangeQuery.java | 7 +- .../lucene/queries/BlendedTermQuery.java | 13 +- .../lucene/queries/MinDocQuery.java | 7 +- .../queries/SearchAfterSortedDocQuery.java | 8 +- .../lucene/spatial/ShapeDocValuesQuery.java | 20 +- .../elasticsearch/node/NodeConstruction.java | 4 +- .../blobstore/BlobStoreRepository.java | 2 +- .../rest/action/cat/RestCountAction.java | 4 +- .../rest/action/search/RestCountAction.java | 2 +- .../org/elasticsearch/script/ScoreScript.java | 5 + .../SortedSetDocValuesStringFieldScript.java | 5 +- .../script/field/IpDocValuesField.java | 3 +- .../ByteKnnDenseVectorDocValuesField.java | 13 +- .../vectors/KnnDenseVectorDocValuesField.java | 13 +- .../elasticsearch/search/MultiValueMode.java | 8 +- .../elasticsearch/search/SearchFeatures.java | 5 +- .../org/elasticsearch/search/SearchHits.java | 6 +- .../aggregations/MultiBucketCollector.java | 30 +- .../bucket/composite/CompositeAggregator.java | 2 +- .../composite/GlobalOrdinalValuesSource.java | 11 +- .../countedterms/CountedTermsAggregator.java | 4 +- .../bucket/filter/MergedPointRangeQuery.java | 24 - .../bucket/filter/QueryToFilterAdapter.java | 5 +- .../bucket/global/GlobalAggregator.java | 4 +- .../bucket/nested/NestedAggregator.java | 8 - .../bucket/range/BinaryRangeAggregator.java | 3 +- .../sampler/BestDocsDeferringCollector.java | 12 +- .../DiversifiedOrdinalsSamplerAggregator.java | 4 +- .../sampler/random/RandomSamplingQuery.java | 7 +- .../GlobalOrdinalsStringTermsAggregator.java | 16 +- .../bucket/terms/IncludeExclude.java | 6 +- .../metrics/CardinalityAggregator.java | 3 +- .../GlobalOrdCardinalityAggregator.java | 7 +- .../aggregations/metrics/InternalTopHits.java | 10 +- .../metrics/MetricInspectionHelper.java | 2 +- .../metrics/TopHitsAggregator.java | 8 +- .../aggregations/support/MissingValues.java | 26 +- .../support/TimeSeriesIndexSearcher.java | 2 +- .../fetch/subphase/UnmappedFieldFetcher.java | 8 +- .../highlight/FragmentBuilderHelper.java | 6 +- .../search/internal/ContextIndexSearcher.java | 42 +- .../internal/ExitableDirectoryReader.java | 146 +- .../FieldUsageTrackingDirectoryReader.java | 14 - .../search/profile/query/ProfileScorer.java | 11 +- .../search/profile/query/ProfileWeight.java | 35 +- .../search/query/QueryPhase.java | 2 +- .../search/query/QueryPhaseCollector.java | 9 +- .../retriever/rankdoc/RankDocsQuery.java | 16 +- .../runtime/AbstractScriptFieldQuery.java | 6 +- ...oPointScriptFieldDistanceFeatureQuery.java | 9 +- .../LongScriptFieldDistanceFeatureQuery.java | 10 +- .../runtime/StringScriptFieldRegexpQuery.java | 8 +- .../StringScriptFieldWildcardQuery.java | 3 +- .../search/slice/DocIdSliceQuery.java | 6 +- .../search/slice/DocValuesSliceQuery.java | 6 +- .../search/slice/TermsSliceQuery.java | 6 +- .../search/sort/ScoreSortBuilder.java | 1 - .../completion/CompletionSuggester.java | 3 +- .../phrase/DirectCandidateGenerator.java | 4 +- .../search/suggest/phrase/LaplaceScorer.java | 2 +- .../phrase/LinearInterpolatingScorer.java | 2 +- .../phrase/NoisyChannelSpellChecker.java | 2 +- .../suggest/phrase/StupidBackoffScorer.java | 4 +- .../search/vectors/DenseVectorQuery.java | 12 +- ...iversifyingChildrenByteKnnVectorQuery.java | 2 +- ...versifyingChildrenFloatKnnVectorQuery.java | 2 +- .../search/vectors/ESKnnByteVectorQuery.java | 2 +- .../search/vectors/ESKnnFloatVectorQuery.java | 2 +- .../search/vectors/KnnScoreDocQuery.java | 6 +- .../search/vectors/VectorSimilarityQuery.java | 19 +- .../SynonymsManagementAPIService.java | 6 +- .../services/org.apache.lucene.codecs.Codec | 1 + .../IndexDiskUsageAnalyzerTests.java | 26 +- ...ountOnlyQueryPhaseResultConsumerTests.java | 8 +- .../action/search/DfsQueryPhaseTests.java | 6 +- .../action/search/FetchSearchPhaseTests.java | 10 +- .../search/SearchPhaseControllerTests.java | 28 +- .../SearchQueryThenFetchAsyncActionTests.java | 8 +- .../search/SearchResponseMergerTests.java | 28 +- .../action/search/SearchResponseTests.java | 4 +- .../AbstractTermVectorsTestCase.java | 2 +- .../MetadataCreateIndexServiceTests.java | 4 + .../common/lucene/LuceneTests.java | 33 +- .../lucene/index/FreqTermsEnumTests.java | 4 +- .../search/function/MinScoreScorerTests.java | 31 +- .../morelikethis/XMoreLikeThisTests.java | 6 +- .../deps/lucene/SimpleLuceneTests.java | 6 +- .../deps/lucene/VectorHighlighterTests.java | 8 +- .../gateway/MetadataStateFormatTests.java | 2 +- .../PersistedClusterStateServiceTests.java | 4 +- .../index/IndexServiceTests.java | 6 +- .../elasticsearch/index/codec/CodecTests.java | 2 +- .../tsdb/ES87TSDBDocValuesFormatTests.java | 2 - ...ValuesFormatVariableSkipIntervalTests.java | 196 +++ .../BaseKnnBitVectorsFormatTestCase.java | 6 +- .../vectors/ES813FlatVectorFormatTests.java | 4 +- .../ES813Int8FlatVectorFormatTests.java | 4 +- ...HnswScalarQuantizedVectorsFormatTests.java | 21 +- .../ES815BitFlatVectorFormatTests.java | 4 +- .../ES815HnswBitVectorsFormatTests.java | 4 +- .../ES816BinaryFlatVectorsScorerTests.java | 43 +- ...S816BinaryQuantizedVectorsFormatTests.java | 28 +- ...HnswBinaryQuantizedVectorsFormatTests.java | 12 +- .../codec/zstd/StoredFieldCodecDuelTests.java | 6 +- ...estCompressionStoredFieldsFormatTests.java | 4 +- ...td814BestSpeedStoredFieldsFormatTests.java | 4 +- .../engine/CompletionStatsCacheTests.java | 4 +- .../index/engine/InternalEngineTests.java | 69 +- .../index/engine/LiveVersionMapTests.java | 27 +- .../RecoverySourcePruneMergePolicyTests.java | 10 +- .../index/engine/SegmentTests.java | 2 +- .../AbstractFieldDataImplTestCase.java | 12 +- .../AbstractStringFieldDataTestCase.java | 22 +- .../ordinals/MultiOrdinalsTests.java | 2 - .../FieldStatsProviderRefreshTests.java | 6 +- .../mapper/BooleanScriptFieldTypeTests.java | 12 +- .../index/mapper/DoubleIndexingDocTests.java | 14 +- .../mapper/DoubleScriptFieldTypeTests.java | 17 +- .../mapper/FieldNamesFieldMapperTests.java | 1 - .../index/mapper/IdFieldTypeTests.java | 4 +- .../mapper/IpPrefixAutomatonUtilTests.java | 9 +- .../index/mapper/IpScriptFieldTypeTests.java | 8 +- .../index/mapper/KeywordFieldTypeTests.java | 2 +- .../mapper/KeywordScriptFieldTypeTests.java | 17 +- .../mapper/LongScriptFieldTypeTests.java | 17 +- .../mapper/StoredNumericValuesTests.java | 2 +- .../index/mapper/TextFieldMapperTests.java | 2 +- .../KeyedFlattenedFieldTypeTests.java | 2 +- .../KeyedFlattenedLeafFieldDataTests.java | 9 +- ...ormalizedCosineFloatVectorValuesTests.java | 8 +- .../KnnDenseVectorScriptDocValuesTests.java | 104 +- .../index/query/BoolQueryBuilderTests.java | 10 +- .../CombinedFieldsQueryParsingTests.java | 8 +- .../DistanceFeatureQueryBuilderTests.java | 4 +- .../index/query/ExistsQueryBuilderTests.java | 4 +- .../MatchBoolPrefixQueryBuilderTests.java | 13 +- .../index/query/MatchQueryBuilderTests.java | 11 +- .../query/MoreLikeThisQueryBuilderTests.java | 4 +- .../query/QueryStringQueryBuilderTests.java | 12 +- .../query/SimpleQueryStringBuilderTests.java | 2 +- .../query/SpanMultiTermQueryBuilderTests.java | 9 +- .../query/TermsSetQueryBuilderTests.java | 8 +- .../IndexLevelReplicationTests.java | 2 +- .../AbstractNumberNestedSortingTestCase.java | 14 +- .../nested/DoubleNestedSortingTests.java | 2 +- .../nested/FloatNestedSortingTests.java | 2 +- .../search/nested/NestedSortingTests.java | 104 +- .../index/shard/IndexReaderWrapperTests.java | 8 +- .../index/shard/IndexShardTests.java | 8 +- .../index/shard/RefreshListenersTests.java | 5 +- .../index/shard/ShardSplittingQueryTests.java | 4 +- .../similarity/ScriptedSimilarityTests.java | 4 +- .../index/store/FsDirectoryFactoryTests.java | 60 +- .../indices/IndicesQueryCacheTests.java | 25 +- .../indices/IndicesRequestCacheTests.java | 4 +- ...PassGroupingCollectorSearchAfterTests.java | 6 +- .../SinglePassGroupingCollectorTests.java | 12 +- .../lucene/queries/BlendedTermQueryTests.java | 28 +- .../CustomUnifiedHighlighterTests.java | 2 +- .../script/ScriptTermStatsTests.java | 4 +- .../search/MultiValueModeTests.java | 13 +- .../search/SearchCancellationTests.java | 16 +- .../search/SearchServiceTests.java | 34 +- .../MultiBucketCollectorTests.java | 84 +- .../bucket/ShardSizeTestCase.java | 4 +- .../composite/CompositeAggregatorTests.java | 10 +- .../CompositeValuesCollectorQueueTests.java | 148 +- .../SingleDimensionValuesSourceTests.java | 3 +- .../range/BinaryRangeAggregatorTests.java | 4 +- .../terms/RareTermsAggregatorTests.java | 2 +- .../bucket/terms/TermsAggregatorTests.java | 4 +- .../metrics/InternalTopHitsTests.java | 8 +- .../metrics/TopHitsAggregatorTests.java | 14 +- .../support/IncludeExcludeTests.java | 9 +- .../support/MissingValuesTests.java | 9 +- .../support/TimeSeriesIndexSearcherTests.java | 5 +- .../internal/ContextIndexSearcherTests.java | 33 +- .../query/ProfileCollectorManagerTests.java | 4 +- .../profile/query/ProfileScorerTests.java | 30 +- .../profile/query/QueryProfilerTests.java | 5 - .../query/QueryPhaseCollectorTests.java | 76 +- .../search/query/QueryPhaseTests.java | 154 +- .../search/query/QueryPhaseTimeoutTests.java | 86 +- .../rankdoc/RankDocsQueryBuilderTests.java | 8 +- .../sort/BucketedSortForFloatsTests.java | 6 - .../CategoryContextMappingTests.java | 5 +- .../phrase/DirectCandidateGeneratorTests.java | 8 +- .../AbstractDenseVectorQueryTestCase.java | 2 +- .../vectors/KnnScoreDocQueryBuilderTests.java | 4 +- .../vectors/VectorSimilarityQueryTests.java | 8 +- .../snapshots/SnapshotResiliencyTests.java | 9 +- ...ncySimulatingBlobStoreRepositoryTests.java | 2 +- .../index/engine/EngineTestCase.java | 9 +- .../AbstractScriptFieldTypeTestCase.java | 2 +- .../index/mapper/FieldTypeTestCase.java | 2 + .../analysis/AnalysisFactoryTestCase.java | 4 +- .../search/SearchResponseUtils.java | 2 +- .../aggregations/AggregatorTestCase.java | 31 +- .../metrics/AbstractGeoTestCase.java | 2 +- .../metrics/CentroidAggregationTestBase.java | 2 +- .../SpatialBoundsAggregationTestBase.java | 2 +- .../geo/BasePointShapeQueryTestCase.java | 18 +- .../search/geo/BaseShapeQueryTestCase.java | 8 +- .../geo/DatelinePointShapeQueryTestCase.java | 6 +- .../geo/GeoBoundingBoxQueryIntegTestCase.java | 12 +- .../search/geo/GeoShapeQueryTestCase.java | 2 +- .../elasticsearch/test/CorruptionUtils.java | 2 +- .../elasticsearch/test/ESIntegTestCase.java | 6 +- .../org/elasticsearch/test/ESTestCase.java | 42 + .../engine/ThrowingLeafReaderWrapper.java | 7 - .../hamcrest/ElasticsearchAssertions.java | 12 +- .../rate/TimeSeriesRateAggregatorTests.java | 16 +- .../StringStatsAggregatorTests.java | 2 +- .../search/AsyncSearchIntegTestCase.java | 4 +- .../search/AsyncSearchSingleNodeTests.java | 4 +- .../common/BlobCacheBufferedIndexInput.java | 3 +- .../repository/CcrRestoreSourceService.java | 2 +- .../elasticsearch/xpack/CcrIntegTestCase.java | 2 +- .../sourceonly/SourceOnlySnapshotIT.java | 2 +- .../frozen/RewriteCachingDirectoryReader.java | 18 +- .../sourceonly/SourceOnlySnapshot.java | 1 + .../AbstractTransportGetResourcesAction.java | 2 +- .../ml/dataframe/evaluation/Evaluation.java | 2 +- .../xpack/core/security/ScrollHelper.java | 6 +- .../profile/SuggestProfilesResponse.java | 4 +- .../accesscontrol/FieldSubsetReader.java | 16 - .../permission/ApplicationPermission.java | 4 +- .../authz/permission/ClusterPermission.java | 3 +- .../authz/permission/FieldPermissions.java | 13 +- .../authz/permission/IndicesPermission.java | 4 +- .../security/authz/privilege/Privilege.java | 3 +- .../core/security/support/Automatons.java | 84 +- .../termsenum/action/SimpleTermCountEnum.java | 6 + .../SourceOnlySnapshotShardTests.java | 12 +- .../sourceonly/SourceOnlySnapshotTests.java | 13 +- .../WeightedTokensQueryBuilderTests.java | 6 +- .../DocumentSubsetReaderTests.java | 8 +- .../accesscontrol/FieldSubsetReaderTests.java | 60 +- ...ityIndexReaderWrapperIntegrationTests.java | 2 +- .../authz/privilege/IndexPrivilegeTests.java | 16 +- .../authz/privilege/PrivilegeTests.java | 14 +- .../security/support/AutomatonsTests.java | 5 +- .../xpack/enrich/EnrichPolicyRunnerTests.java | 70 +- .../EnrichShardMultiSearchActionTests.java | 2 +- .../connector/ConnectorIndexService.java | 4 +- .../syncjob/ConnectorSyncJobIndexService.java | 2 +- .../rules/QueryRulesIndexService.java | 2 +- .../search/SearchApplicationIndexService.java | 2 +- .../xpack/eql/action/EqlSearchResponse.java | 4 +- .../test/resources/querytranslator_tests.txt | 6 +- .../predicate/regex/RLikePattern.java | 6 +- .../predicate/regex/WildcardPattern.java | 4 +- .../compute/lucene/LuceneSliceQueue.java | 4 +- .../lucene/LuceneTopNSourceOperator.java | 4 +- .../lucene/PartialLeafReaderContext.java | 5 + .../enrich/EnrichQuerySourceOperator.java | 3 +- .../function/scalar/string/AutomataMatch.java | 2 +- .../xpack/esql/parser/ExpressionBuilder.java | 2 +- .../querydsl/query/SingleValueMatchQuery.java | 58 +- .../EnrichQuerySourceOperatorTests.java | 2 +- .../querydsl/query/SingleValueQueryTests.java | 7 +- .../xpack/graph/test/GraphTests.java | 4 +- .../action/TransportGraphExploreAction.java | 6 +- .../ShardBulkInferenceActionFilterIT.java | 2 +- .../mapper/SemanticTextFieldMapperTests.java | 8 +- .../queries/SemanticQueryBuilderTests.java | 10 +- .../action/TransportGetPipelineAction.java | 8 +- .../mapper/ConstantKeywordFieldMapper.java | 6 +- .../CountedKeywordFieldMapper.java | 7 +- .../CountedKeywordFieldTypeTests.java | 6 +- .../unsignedlong/UnsignedLongFieldMapper.java | 2 +- .../xpack/versionfield/VersionEncoder.java | 5 +- .../VersionFieldWildcardQuery.java | 6 +- .../VersionStringDocValuesField.java | 3 +- .../VersionStringFieldMapper.java | 3 +- .../versionfield/VersionStringFieldTests.java | 38 +- .../ml/integration/DeleteExpiredDataIT.java | 2 +- .../MlNativeAutodetectIntegTestCase.java | 2 +- .../xpack/ml/integration/PersistJobIT.java | 6 +- .../xpack/ml/integration/RegressionIT.java | 2 +- .../ml/integration/RevertModelSnapshotIT.java | 2 +- .../integration/RunDataFrameAnalyticsIT.java | 4 +- .../BucketCorrelationAggregationIT.java | 2 +- .../xpack/ml/integration/DatafeedCcsIT.java | 2 +- .../integration/MlDistributedFailureIT.java | 2 +- .../TransportGetOverallBucketsAction.java | 2 +- .../TransportPutTrainedModelAction.java | 4 +- ...ransportStartDataFrameAnalyticsAction.java | 2 +- .../extractor/DataExtractorUtils.java | 2 +- .../persistence/DatafeedConfigProvider.java | 4 +- .../extractor/DataFrameDataExtractor.java | 4 +- .../dataframe/inference/InferenceRunner.java | 2 +- .../ml/dataframe/steps/InferenceStep.java | 2 +- .../TrainTestSplitterFactory.java | 2 +- .../ChunkedTrainedModelRestorer.java | 2 +- .../persistence/TrainedModelProvider.java | 2 +- .../ml/job/persistence/JobConfigProvider.java | 2 +- .../ml/job/persistence/JobDataDeleter.java | 2 +- .../job/persistence/JobResultsProvider.java | 18 +- .../retention/ExpiredForecastsRemover.java | 2 +- .../persistence/BatchedDocumentsIterator.java | 2 +- .../SearchAfterDocumentsIterator.java | 2 +- .../SparseVectorQueryBuilderTests.java | 4 +- .../TextExpansionQueryBuilderTests.java | 4 +- .../monitoring/integration/MonitoringIT.java | 4 +- .../local/LocalExporterIntegTests.java | 4 +- .../LocalExporterResourceIntegTests.java | 6 +- .../lucene/bwc/AbstractArchiveTestCase.java | 2 +- .../xpack/lucene/bwc/OldSegmentInfos.java | 4 +- .../xpack/lucene/bwc/codecs/BWCCodec.java | 7 +- .../index/LegacyDocValuesIterables.java | 17 +- .../LegacySortedSetDocValuesWrapper.java | 6 +- .../lucene/bwc/codecs/lucene50/ForUtil.java | 12 +- .../lucene50/Lucene50FieldInfosFormat.java | 2 + .../lucene54/Lucene54DocValuesProducer.java | 6 + .../lucene60/MetadataOnlyBKDReader.java | 22 +- .../bwc/codecs/lucene70/BWCLucene70Codec.java | 8 +- .../bwc/codecs/lucene70/IndexedDISI.java | 327 ++++ .../bwc/codecs/lucene70/Lucene70Codec.java | 15 + .../lucene70/Lucene70DocValuesConsumer.java | 681 ++++++++ .../lucene70/Lucene70DocValuesFormat.java | 171 ++ .../lucene70/Lucene70DocValuesProducer.java | 1461 +++++++++++++++++ .../services/org.apache.lucene.codecs.Codec | 1 + .../org.apache.lucene.codecs.DocValuesFormat | 1 + .../bwc/codecs/OldCodecsAvailableTests.java | 2 +- .../lucene50/BlockPostingsFormat3Tests.java | 8 +- .../Lucene54DocValuesFormatTests.java | 4 +- .../Lucene70DocValuesFormatTests.java | 26 + .../action/TransportGetStackTracesAction.java | 4 +- .../action/TransportGetStatusAction.java | 2 +- .../predicate/regex/LikePattern.java | 4 +- .../predicate/regex/RLikePattern.java | 6 +- .../predicate/regex/WildcardPattern.java | 4 +- .../xpack/rank/rrf/RRFRankMultiShardIT.java | 6 +- .../xpack/rank/rrf/RRFRankSingleShardIT.java | 6 +- .../xpack/rank/rrf/RRFRetrieverBuilderIT.java | 32 +- .../rrf/RRFRetrieverBuilderNestedDocsIT.java | 4 +- .../PinnedQueryBuilderIT.java | 4 +- .../searchbusinessrules/CappedScoreQuery.java | 30 +- .../CappedScoreWeight.java | 19 +- .../searchbusinessrules/CappedScorer.java | 5 +- ...pshotsCanMatchOnCoordinatorIntegTests.java | 8 +- ...napshotsRecoverFromSnapshotIntegTests.java | 2 +- ...archableSnapshotsRepositoryIntegTests.java | 8 +- ...tsBlobStoreCacheMaintenanceIntegTests.java | 2 +- .../BlobStoreCacheMaintenanceService.java | 2 +- .../cache/full/PersistentCache.java | 7 +- .../input/CachedBlobContainerIndexInput.java | 3 +- .../AbstractSearchableSnapshotsTestCase.java | 4 +- .../InMemoryNoOpCommitDirectoryTests.java | 4 +- .../SearchableSnapshotDirectoryTests.java | 10 +- ...tRemoteClusterSecurityDlsAndFlsRestIT.java | 2 +- .../RemoteClusterSecurityCcrIT.java | 2 +- .../RemoteClusterSecurityCcrMigrationIT.java | 2 +- .../RemoteClusterSecurityMutualTlsIT.java | 2 +- .../RemoteClusterSecurityRestIT.java | 2 +- .../RemoteClusterSecuritySpecialUserIT.java | 2 +- .../RemoteClusterSecurityTopologyRestIT.java | 4 +- .../DateMathExpressionIntegTests.java | 2 +- .../integration/DlsFlsRequestCacheTests.java | 2 +- .../DocumentLevelSecurityRandomTests.java | 2 +- .../DocumentLevelSecurityTests.java | 22 +- .../FieldLevelSecurityRandomTests.java | 6 +- .../integration/FieldLevelSecurityTests.java | 48 +- .../integration/KibanaUserRoleIntegTests.java | 8 +- .../MultipleIndicesPermissionsTests.java | 2 +- .../authc/esnative/NativeRealmIntegTests.java | 8 +- .../security/authz/ReadActionsTests.java | 12 +- .../security/authz/SecurityScrollTests.java | 4 +- .../security/profile/ProfileIntegTests.java | 2 +- .../xpack/security/authc/ApiKeyService.java | 2 +- .../authc/esnative/NativeUsersStore.java | 6 +- .../xpack/security/authz/RBACEngine.java | 3 +- .../DeprecationRoleDescriptorConsumer.java | 4 +- .../authz/store/NativeRolesStore.java | 12 +- .../security/profile/ProfileService.java | 6 +- .../security/support/SecurityMigrations.java | 4 +- .../authz/store/FileRolesStoreTests.java | 16 +- .../slm/SLMSnapshotBlockingIntegTests.java | 2 +- .../SnapshotBasedIndexRecoveryIT.java | 4 +- .../GeoGridAggAndQueryConsistencyIT.java | 4 +- .../search/GeoShapeWithDocValuesIT.java | 2 +- .../search/ShapeQueryOverShapeTests.java | 2 +- .../spatial/search/ShapeQueryTestCase.java | 14 +- .../spatial/ingest/CircleProcessorTests.java | 8 +- .../xpack/sql/execution/search/Querier.java | 4 +- .../search/extractor/TopHitsAggExtractor.java | 2 +- .../TransformUsageTransportAction.java | 2 +- .../TimeBasedCheckpointProvider.java | 2 +- .../IndexBasedTransformConfigManager.java | 4 +- .../common/AbstractCompositeAggFunction.java | 2 +- .../CompositeBucketsChangeCollector.java | 4 +- .../watcher/WatcherConcreteIndexTests.java | 2 +- .../actions/TimeThrottleIntegrationTests.java | 2 +- .../history/HistoryActionConditionTests.java | 6 +- .../HistoryTemplateEmailMappingsTests.java | 2 +- .../HistoryTemplateHttpMappingsTests.java | 2 +- ...storyTemplateIndexActionMappingsTests.java | 2 +- ...storyTemplateSearchInputMappingsTests.java | 2 +- .../AbstractWatcherIntegrationTestCase.java | 10 +- .../test/integration/BootStrapTests.java | 6 +- .../integration/RejectedExecutionTests.java | 2 +- .../test/integration/SingleNodeTests.java | 2 +- .../test/integration/WatchMetadataTests.java | 2 +- .../transform/TransformIntegrationTests.java | 12 +- .../xpack/watcher/WatcherService.java | 2 +- .../xpack/watcher/common/http/HttpClient.java | 3 +- .../execution/TriggeredWatchStore.java | 2 +- .../input/search/ExecutableSearchInput.java | 2 +- .../actions/TransportQueryWatchesAction.java | 4 +- .../BinaryDvConfirmedAutomatonQuery.java | 65 +- .../wildcard/mapper/WildcardFieldMapper.java | 12 +- .../mapper/WildcardFieldMapperTests.java | 35 +- .../oldrepos/OldRepositoryAccessIT.java | 8 +- .../TokenBackwardsCompatibilityIT.java | 2 +- 662 files changed, 8792 insertions(+), 3627 deletions(-) create mode 100644 docs/changelog/113482.yaml create mode 100644 docs/changelog/113614.yaml create mode 100644 docs/changelog/114124.yaml create mode 100644 docs/changelog/114146.yaml create mode 100644 docs/changelog/114741.yaml create mode 100644 modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LegacyRomanianStemmer.java create mode 100644 modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/PersianAnalyzerProviderTests.java create mode 100644 modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/RomanianAnalyzerTests.java create mode 100644 server/src/main/java/org/elasticsearch/index/codec/Elasticsearch900Codec.java delete mode 100644 server/src/main/java/org/elasticsearch/index/codec/vectors/RandomAccessBinarizedByteVectorValues.java create mode 100644 server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormatVariableSkipIntervalTests.java create mode 100644 x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/IndexedDISI.java create mode 100644 x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70Codec.java create mode 100644 x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesConsumer.java create mode 100644 x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesFormat.java create mode 100644 x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesProducer.java create mode 100644 x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesFormatTests.java diff --git a/.buildkite/pipelines/lucene-snapshot/run-tests.yml b/.buildkite/pipelines/lucene-snapshot/run-tests.yml index c76c54a56494e..f7293e051467c 100644 --- a/.buildkite/pipelines/lucene-snapshot/run-tests.yml +++ b/.buildkite/pipelines/lucene-snapshot/run-tests.yml @@ -56,7 +56,6 @@ steps: matrix: setup: BWC_VERSION: - - 7.17.13 - 8.9.1 - 8.10.0 agents: diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/vector/VectorScorerBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/vector/VectorScorerBenchmark.java index 569e8909e1e12..b294fe97c7e7c 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/vector/VectorScorerBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/vector/VectorScorerBenchmark.java @@ -19,7 +19,7 @@ import org.apache.lucene.store.MMapDirectory; import org.apache.lucene.util.hnsw.RandomVectorScorer; import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; -import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues; +import org.apache.lucene.util.quantization.QuantizedByteVectorValues; import org.apache.lucene.util.quantization.ScalarQuantizer; import org.elasticsearch.common.logging.LogConfigurator; import org.elasticsearch.core.IOUtils; @@ -217,19 +217,17 @@ public float squareDistanceScalar() { return 1 / (1f + adjustedDistance); } - RandomAccessQuantizedByteVectorValues vectorValues(int dims, int size, IndexInput in, VectorSimilarityFunction sim) throws IOException { + QuantizedByteVectorValues vectorValues(int dims, int size, IndexInput in, VectorSimilarityFunction sim) throws IOException { var sq = new ScalarQuantizer(0.1f, 0.9f, (byte) 7); var slice = in.slice("values", 0, in.length()); return new OffHeapQuantizedByteVectorValues.DenseOffHeapVectorValues(dims, size, sq, false, sim, null, slice); } - RandomVectorScorerSupplier luceneScoreSupplier(RandomAccessQuantizedByteVectorValues values, VectorSimilarityFunction sim) - throws IOException { + RandomVectorScorerSupplier luceneScoreSupplier(QuantizedByteVectorValues values, VectorSimilarityFunction sim) throws IOException { return new Lucene99ScalarQuantizedVectorScorer(null).getRandomVectorScorerSupplier(sim, values); } - RandomVectorScorer luceneScorer(RandomAccessQuantizedByteVectorValues values, VectorSimilarityFunction sim, float[] queryVec) - throws IOException { + RandomVectorScorer luceneScorer(QuantizedByteVectorValues values, VectorSimilarityFunction sim, float[] queryVec) throws IOException { return new Lucene99ScalarQuantizedVectorScorer(null).getRandomVectorScorer(sim, values, queryVec); } diff --git a/build-tools-internal/src/main/resources/forbidden/es-server-signatures.txt b/build-tools-internal/src/main/resources/forbidden/es-server-signatures.txt index 58ccf69406ff2..5388f942be8d7 100644 --- a/build-tools-internal/src/main/resources/forbidden/es-server-signatures.txt +++ b/build-tools-internal/src/main/resources/forbidden/es-server-signatures.txt @@ -59,10 +59,6 @@ org.apache.lucene.util.Version#parseLeniently(java.lang.String) org.apache.lucene.index.NoMergePolicy#INSTANCE @ explicit use of NoMergePolicy risks forgetting to configure NoMergeScheduler; use org.elasticsearch.common.lucene.Lucene#indexWriterConfigWithNoMerging() instead. -@defaultMessage Spawns a new thread which is solely under lucenes control use ThreadPool#relativeTimeInMillis instead -org.apache.lucene.search.TimeLimitingCollector#getGlobalTimerThread() -org.apache.lucene.search.TimeLimitingCollector#getGlobalCounter() - @defaultMessage Don't interrupt threads use FutureUtils#cancel(Future) instead java.util.concurrent.Future#cancel(boolean) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 169c187ef115a..6bc3c2ad4d253 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 9.0.0 -lucene = 9.12.0 +lucene = 10.0.0 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/distribution/src/config/jvm.options b/distribution/src/config/jvm.options index a523c3ec85ba1..f55d90933ed61 100644 --- a/distribution/src/config/jvm.options +++ b/distribution/src/config/jvm.options @@ -62,6 +62,9 @@ 23:-XX:CompileCommand=dontinline,java/lang/invoke/MethodHandle.setAsTypeCache 23:-XX:CompileCommand=dontinline,java/lang/invoke/MethodHandle.asTypeUncached +# Lucene 10: apply MADV_NORMAL advice to enable more aggressive readahead +-Dorg.apache.lucene.store.defaultReadAdvice=normal + ## heap dumps # generate a heap dump when an allocation from the Java heap fails; heap dumps diff --git a/docs/Versions.asciidoc b/docs/Versions.asciidoc index b65b974cd6b69..bdb0704fcd880 100644 --- a/docs/Versions.asciidoc +++ b/docs/Versions.asciidoc @@ -1,8 +1,8 @@ include::{docs-root}/shared/versions/stack/{source_branch}.asciidoc[] -:lucene_version: 9.12.0 -:lucene_version_path: 9_12_0 +:lucene_version: 10.0.0 +:lucene_version_path: 10_0_0 :jdk: 11.0.2 :jdk_major: 11 :build_type: tar diff --git a/docs/changelog/113482.yaml b/docs/changelog/113482.yaml new file mode 100644 index 0000000000000..cb5823f0ccfcc --- /dev/null +++ b/docs/changelog/113482.yaml @@ -0,0 +1,27 @@ +pr: 113482 +summary: The 'persian' analyzer has stemmer by default +area: Analysis +type: breaking +issues: +- 113050 +breaking: + title: The 'persian' analyzer has stemmer by default + area: Analysis + details: >- + Lucene 10 has added a final stemming step to its PersianAnalyzer that Elasticsearch + exposes as 'persian' analyzer. Existing indices will keep the old + non-stemming behaviour while new indices will see the updated behaviour with + added stemming. + Users that wish to maintain the non-stemming behaviour need to define their + own analyzer as outlined in + https://www.elastic.co/guide/en/elasticsearch/reference/8.15/analysis-lang-analyzer.html#persian-analyzer. + Users that wish to use the new stemming behaviour for existing indices will + have to reindex their data. + impact: >- + Indexing with the 'persian' analyzer will produce slightly different tokens. + Users should check if this impacts their search results. If they wish to + maintain the legacy non-stemming behaviour they can define their own + analyzer equivalent as explained in + https://www.elastic.co/guide/en/elasticsearch/reference/8.15/analysis-lang-analyzer.html#persian-analyzer. + notable: false + diff --git a/docs/changelog/113614.yaml b/docs/changelog/113614.yaml new file mode 100644 index 0000000000000..bd9dcb3e38772 --- /dev/null +++ b/docs/changelog/113614.yaml @@ -0,0 +1,18 @@ +pr: 113614 +summary: The 'german2' stemmer is now an alias for the 'german' snowball stemmer +area: Analysis +type: breaking +issues: [] +breaking: + title: The "german2" snowball stemmer is now an alias for the "german" stemmer + area: Analysis + details: >- + Lucene 10 has merged the improved "german2" snowball language stemmer with the + "german" stemmer. For Elasticsearch, "german2" is now a deprecated alias for + "german". This may results in slightly different tokens being generated for + terms with umlaut substitution (like "ue" for "ü" etc...) + impact: >- + Replace usages of "german2" with "german" in analysis configuration. Old + indices that use the "german" stemmer should be reindexed if possible. + notable: false + diff --git a/docs/changelog/114124.yaml b/docs/changelog/114124.yaml new file mode 100644 index 0000000000000..c812c6a468902 --- /dev/null +++ b/docs/changelog/114124.yaml @@ -0,0 +1,18 @@ +pr: 114124 +summary: The Korean dictionary for Nori has been updated +area: Analysis +type: breaking +issues: [] +breaking: + title: The Korean dictionary for Nori has been updated + area: Analysis + details: >- + Lucene 10 ships with an updated Korean dictionary (mecab-ko-dic-2.1.1). + For details see https://github.com/apache/lucene/issues/11452. Users + experiencing changes in search behaviour on existing data are advised to + reindex. + impact: >- + The change is small and should generally provide better analysis results. + Existing indices for full-text use cases should be reindexed though. + notable: false + diff --git a/docs/changelog/114146.yaml b/docs/changelog/114146.yaml new file mode 100644 index 0000000000000..be2096a64105c --- /dev/null +++ b/docs/changelog/114146.yaml @@ -0,0 +1,20 @@ +pr: 114146 +summary: Snowball stemmers have been upgraded +area: Analysis +type: breaking +issues: [] +breaking: + title: Snowball stemmers have been upgraded + area: Analysis + details: >- + Lucene 10 ships with an upgrade of its Snowball stemmers. + For details see https://github.com/apache/lucene/issues/13209. Users using + Snowball stemmers that are experiencing changes in search behaviour on + existing data are advised to reindex. + impact: >- + The upgrade should generally provide improved stemming results. Small changes + in token analysis can lead to mismatches with previously index data, so + existing indices using Snowball stemmers as part of their analysis chain + should be reindexed. + notable: false + diff --git a/docs/changelog/114741.yaml b/docs/changelog/114741.yaml new file mode 100644 index 0000000000000..ae45c183cddf9 --- /dev/null +++ b/docs/changelog/114741.yaml @@ -0,0 +1,5 @@ +pr: 114741 +summary: Upgrade to Lucene 10 +area: Search +type: upgrade +issues: [] diff --git a/docs/plugins/analysis-nori.asciidoc b/docs/plugins/analysis-nori.asciidoc index 02980a4ed8a8c..0d3e76f71d238 100644 --- a/docs/plugins/analysis-nori.asciidoc +++ b/docs/plugins/analysis-nori.asciidoc @@ -244,11 +244,11 @@ Which responds with: "end_offset": 3, "type": "word", "position": 1, - "leftPOS": "J(Ending Particle)", + "leftPOS": "JKS(Subject case marker)", "morphemes": null, "posType": "MORPHEME", "reading": null, - "rightPOS": "J(Ending Particle)" + "rightPOS": "JKS(Subject case marker)" }, { "token": "깊", @@ -268,11 +268,11 @@ Which responds with: "end_offset": 6, "type": "word", "position": 3, - "leftPOS": "E(Verbal endings)", + "leftPOS": "ETM(Adnominal form transformative ending)", "morphemes": null, "posType": "MORPHEME", "reading": null, - "rightPOS": "E(Verbal endings)" + "rightPOS": "ETM(Adnominal form transformative ending)" }, { "token": "나무", @@ -292,11 +292,11 @@ Which responds with: "end_offset": 10, "type": "word", "position": 5, - "leftPOS": "J(Ending Particle)", + "leftPOS": "JX(Auxiliary postpositional particle)", "morphemes": null, "posType": "MORPHEME", "reading": null, - "rightPOS": "J(Ending Particle)" + "rightPOS": "JX(Auxiliary postpositional particle)" } ] }, diff --git a/docs/reference/analysis/analyzers/lang-analyzer.asciidoc b/docs/reference/analysis/analyzers/lang-analyzer.asciidoc index 5273537389e3d..881970787f5a6 100644 --- a/docs/reference/analysis/analyzers/lang-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/lang-analyzer.asciidoc @@ -1430,7 +1430,8 @@ PUT /persian_example "decimal_digit", "arabic_normalization", "persian_normalization", - "persian_stop" + "persian_stop", + "persian_stem" ] } } diff --git a/docs/reference/analysis/tokenfilters/stemmer-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/stemmer-tokenfilter.asciidoc index 4cd088935af19..d9e2120afe6d1 100644 --- a/docs/reference/analysis/tokenfilters/stemmer-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/stemmer-tokenfilter.asciidoc @@ -173,7 +173,6 @@ http://bvg.udc.es/recursos_lingua/stemming.jsp[`minimal_galician`] (Plural step German:: https://dl.acm.org/citation.cfm?id=1141523[*`light_german`*], https://snowballstem.org/algorithms/german/stemmer.html[`german`], -https://snowballstem.org/algorithms/german2/stemmer.html[`german2`], http://members.unine.ch/jacques.savoy/clef/morpho.pdf[`minimal_german`] Greek:: diff --git a/docs/reference/analysis/tokenizers/pathhierarchy-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/pathhierarchy-tokenizer.asciidoc index 2cf01b77d57ab..5f98807387280 100644 --- a/docs/reference/analysis/tokenizers/pathhierarchy-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/pathhierarchy-tokenizer.asciidoc @@ -40,14 +40,14 @@ POST _analyze "start_offset": 0, "end_offset": 8, "type": "word", - "position": 0 + "position": 1 }, { "token": "/one/two/three", "start_offset": 0, "end_offset": 14, "type": "word", - "position": 0 + "position": 2 } ] } @@ -144,14 +144,14 @@ POST my-index-000001/_analyze "start_offset": 7, "end_offset": 18, "type": "word", - "position": 0 + "position": 1 }, { "token": "/three/four/five", "start_offset": 7, "end_offset": 23, "type": "word", - "position": 0 + "position": 2 } ] } @@ -178,14 +178,14 @@ If we were to set `reverse` to `true`, it would produce the following: [[analysis-pathhierarchy-tokenizer-detailed-examples]] === Detailed examples -A common use-case for the `path_hierarchy` tokenizer is filtering results by -file paths. If indexing a file path along with the data, the use of the -`path_hierarchy` tokenizer to analyze the path allows filtering the results +A common use-case for the `path_hierarchy` tokenizer is filtering results by +file paths. If indexing a file path along with the data, the use of the +`path_hierarchy` tokenizer to analyze the path allows filtering the results by different parts of the file path string. This example configures an index to have two custom analyzers and applies -those analyzers to multifields of the `file_path` text field that will +those analyzers to multifields of the `file_path` text field that will store filenames. One of the two analyzers uses reverse tokenization. Some sample documents are then indexed to represent some file paths for photos inside photo folders of two different users. @@ -264,8 +264,8 @@ POST file-path-test/_doc/5 -------------------------------------------------- -A search for a particular file path string against the text field matches all -the example documents, with Bob's documents ranking highest due to `bob` also +A search for a particular file path string against the text field matches all +the example documents, with Bob's documents ranking highest due to `bob` also being one of the terms created by the standard analyzer boosting relevance for Bob's documents. @@ -301,7 +301,7 @@ GET file-path-test/_search With the reverse parameter for this tokenizer, it's also possible to match from the other end of the file path, such as individual file names or a deep level subdirectory. The following example shows a search for all files named -`my_photo1.jpg` within any directory via the `file_path.tree_reversed` field +`my_photo1.jpg` within any directory via the `file_path.tree_reversed` field configured to use the reverse parameter in the mapping. @@ -342,7 +342,7 @@ POST file-path-test/_analyze It's also useful to be able to filter with file paths when combined with other -types of searches, such as this example looking for any files paths with `16` +types of searches, such as this example looking for any files paths with `16` that also must be in Alice's photo directory. [source,console] diff --git a/docs/reference/search/profile.asciidoc b/docs/reference/search/profile.asciidoc index 3fed14231808c..5f1a0ccfdd6b4 100644 --- a/docs/reference/search/profile.asciidoc +++ b/docs/reference/search/profile.asciidoc @@ -1298,7 +1298,7 @@ One of the `dfs.knn` sections for a shard looks like the following: "query" : [ { "type" : "DocAndScoreQuery", - "description" : "DocAndScore[100]", + "description" : "DocAndScoreQuery[0,...][0.008961825,...],0.008961825", "time_in_nanos" : 444414, "breakdown" : { "set_min_competitive_score_count" : 0, diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 0156f13b4b05d..4d9b96184d07a 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2824,129 +2824,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + diff --git a/libs/simdvec/src/main/java/org/elasticsearch/simdvec/VectorScorerFactory.java b/libs/simdvec/src/main/java/org/elasticsearch/simdvec/VectorScorerFactory.java index e2aea6b3ebd9f..4ed60b2f5e8b2 100644 --- a/libs/simdvec/src/main/java/org/elasticsearch/simdvec/VectorScorerFactory.java +++ b/libs/simdvec/src/main/java/org/elasticsearch/simdvec/VectorScorerFactory.java @@ -13,7 +13,7 @@ import org.apache.lucene.store.IndexInput; import org.apache.lucene.util.hnsw.RandomVectorScorer; import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; -import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues; +import org.apache.lucene.util.quantization.QuantizedByteVectorValues; import java.util.Optional; @@ -39,7 +39,7 @@ static Optional instance() { Optional getInt7SQVectorScorerSupplier( VectorSimilarityType similarityType, IndexInput input, - RandomAccessQuantizedByteVectorValues values, + QuantizedByteVectorValues values, float scoreCorrectionConstant ); @@ -52,9 +52,5 @@ Optional getInt7SQVectorScorerSupplier( * @param queryVector the query vector * @return an optional containing the vector scorer, or empty */ - Optional getInt7SQVectorScorer( - VectorSimilarityFunction sim, - RandomAccessQuantizedByteVectorValues values, - float[] queryVector - ); + Optional getInt7SQVectorScorer(VectorSimilarityFunction sim, QuantizedByteVectorValues values, float[] queryVector); } diff --git a/libs/simdvec/src/main/java/org/elasticsearch/simdvec/VectorScorerFactoryImpl.java b/libs/simdvec/src/main/java/org/elasticsearch/simdvec/VectorScorerFactoryImpl.java index a22d787980252..6248902c32e7a 100644 --- a/libs/simdvec/src/main/java/org/elasticsearch/simdvec/VectorScorerFactoryImpl.java +++ b/libs/simdvec/src/main/java/org/elasticsearch/simdvec/VectorScorerFactoryImpl.java @@ -13,7 +13,7 @@ import org.apache.lucene.store.IndexInput; import org.apache.lucene.util.hnsw.RandomVectorScorer; import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; -import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues; +import org.apache.lucene.util.quantization.QuantizedByteVectorValues; import java.util.Optional; @@ -25,7 +25,7 @@ final class VectorScorerFactoryImpl implements VectorScorerFactory { public Optional getInt7SQVectorScorerSupplier( VectorSimilarityType similarityType, IndexInput input, - RandomAccessQuantizedByteVectorValues values, + QuantizedByteVectorValues values, float scoreCorrectionConstant ) { throw new UnsupportedOperationException("should not reach here"); @@ -34,7 +34,7 @@ public Optional getInt7SQVectorScorerSupplier( @Override public Optional getInt7SQVectorScorer( VectorSimilarityFunction sim, - RandomAccessQuantizedByteVectorValues values, + QuantizedByteVectorValues values, float[] queryVector ) { throw new UnsupportedOperationException("should not reach here"); diff --git a/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/VectorScorerFactoryImpl.java b/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/VectorScorerFactoryImpl.java index a65fe582087d9..a863d9e3448ca 100644 --- a/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/VectorScorerFactoryImpl.java +++ b/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/VectorScorerFactoryImpl.java @@ -15,7 +15,7 @@ import org.apache.lucene.store.MemorySegmentAccessInput; import org.apache.lucene.util.hnsw.RandomVectorScorer; import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; -import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues; +import org.apache.lucene.util.quantization.QuantizedByteVectorValues; import org.elasticsearch.nativeaccess.NativeAccess; import org.elasticsearch.simdvec.internal.Int7SQVectorScorer; import org.elasticsearch.simdvec.internal.Int7SQVectorScorerSupplier.DotProductSupplier; @@ -38,7 +38,7 @@ private VectorScorerFactoryImpl() {} public Optional getInt7SQVectorScorerSupplier( VectorSimilarityType similarityType, IndexInput input, - RandomAccessQuantizedByteVectorValues values, + QuantizedByteVectorValues values, float scoreCorrectionConstant ) { input = FilterIndexInput.unwrapOnlyTest(input); @@ -57,7 +57,7 @@ public Optional getInt7SQVectorScorerSupplier( @Override public Optional getInt7SQVectorScorer( VectorSimilarityFunction sim, - RandomAccessQuantizedByteVectorValues values, + QuantizedByteVectorValues values, float[] queryVector ) { return Int7SQVectorScorer.create(sim, values, queryVector); diff --git a/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorer.java b/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorer.java index 0b41436ce2242..e02df124ad0f0 100644 --- a/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorer.java +++ b/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorer.java @@ -11,18 +11,14 @@ import org.apache.lucene.index.VectorSimilarityFunction; import org.apache.lucene.util.hnsw.RandomVectorScorer; -import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues; +import org.apache.lucene.util.quantization.QuantizedByteVectorValues; import java.util.Optional; public final class Int7SQVectorScorer { // Unconditionally returns an empty optional on <= JDK 21, since the scorer is only supported on JDK 22+ - public static Optional create( - VectorSimilarityFunction sim, - RandomAccessQuantizedByteVectorValues values, - float[] queryVector - ) { + public static Optional create(VectorSimilarityFunction sim, QuantizedByteVectorValues values, float[] queryVector) { return Optional.empty(); } diff --git a/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorerSupplier.java b/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorerSupplier.java index f6d874cd3e728..198e10406056e 100644 --- a/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorerSupplier.java +++ b/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorerSupplier.java @@ -12,7 +12,7 @@ import org.apache.lucene.store.MemorySegmentAccessInput; import org.apache.lucene.util.hnsw.RandomVectorScorer; import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; -import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues; +import org.apache.lucene.util.quantization.QuantizedByteVectorValues; import org.apache.lucene.util.quantization.ScalarQuantizedVectorSimilarity; import java.io.IOException; @@ -31,12 +31,12 @@ public abstract sealed class Int7SQVectorScorerSupplier implements RandomVectorS final int maxOrd; final float scoreCorrectionConstant; final MemorySegmentAccessInput input; - final RandomAccessQuantizedByteVectorValues values; // to support ordToDoc/getAcceptOrds + final QuantizedByteVectorValues values; // to support ordToDoc/getAcceptOrds final ScalarQuantizedVectorSimilarity fallbackScorer; protected Int7SQVectorScorerSupplier( MemorySegmentAccessInput input, - RandomAccessQuantizedByteVectorValues values, + QuantizedByteVectorValues values, float scoreCorrectionConstant, ScalarQuantizedVectorSimilarity fallbackScorer ) { @@ -104,11 +104,7 @@ public float score(int node) throws IOException { public static final class EuclideanSupplier extends Int7SQVectorScorerSupplier { - public EuclideanSupplier( - MemorySegmentAccessInput input, - RandomAccessQuantizedByteVectorValues values, - float scoreCorrectionConstant - ) { + public EuclideanSupplier(MemorySegmentAccessInput input, QuantizedByteVectorValues values, float scoreCorrectionConstant) { super(input, values, scoreCorrectionConstant, fromVectorSimilarity(EUCLIDEAN, scoreCorrectionConstant, BITS)); } @@ -127,11 +123,7 @@ public EuclideanSupplier copy() { public static final class DotProductSupplier extends Int7SQVectorScorerSupplier { - public DotProductSupplier( - MemorySegmentAccessInput input, - RandomAccessQuantizedByteVectorValues values, - float scoreCorrectionConstant - ) { + public DotProductSupplier(MemorySegmentAccessInput input, QuantizedByteVectorValues values, float scoreCorrectionConstant) { super(input, values, scoreCorrectionConstant, fromVectorSimilarity(DOT_PRODUCT, scoreCorrectionConstant, BITS)); } @@ -151,11 +143,7 @@ public DotProductSupplier copy() { public static final class MaxInnerProductSupplier extends Int7SQVectorScorerSupplier { - public MaxInnerProductSupplier( - MemorySegmentAccessInput input, - RandomAccessQuantizedByteVectorValues values, - float scoreCorrectionConstant - ) { + public MaxInnerProductSupplier(MemorySegmentAccessInput input, QuantizedByteVectorValues values, float scoreCorrectionConstant) { super(input, values, scoreCorrectionConstant, fromVectorSimilarity(MAXIMUM_INNER_PRODUCT, scoreCorrectionConstant, BITS)); } diff --git a/libs/simdvec/src/main22/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorer.java b/libs/simdvec/src/main22/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorer.java index c9659ea1af9a8..3d0e1e71a3744 100644 --- a/libs/simdvec/src/main22/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorer.java +++ b/libs/simdvec/src/main22/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorer.java @@ -15,7 +15,7 @@ import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.MemorySegmentAccessInput; import org.apache.lucene.util.hnsw.RandomVectorScorer; -import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues; +import org.apache.lucene.util.quantization.QuantizedByteVectorValues; import org.apache.lucene.util.quantization.ScalarQuantizer; import java.io.IOException; @@ -35,11 +35,7 @@ public abstract sealed class Int7SQVectorScorer extends RandomVectorScorer.Abstr byte[] scratch; /** Return an optional whose value, if present, is the scorer. Otherwise, an empty optional is returned. */ - public static Optional create( - VectorSimilarityFunction sim, - RandomAccessQuantizedByteVectorValues values, - float[] queryVector - ) { + public static Optional create(VectorSimilarityFunction sim, QuantizedByteVectorValues values, float[] queryVector) { checkDimensions(queryVector.length, values.dimension()); var input = values.getSlice(); if (input == null) { @@ -63,12 +59,7 @@ public static Optional create( }; } - Int7SQVectorScorer( - MemorySegmentAccessInput input, - RandomAccessQuantizedByteVectorValues values, - byte[] queryVector, - float queryCorrection - ) { + Int7SQVectorScorer(MemorySegmentAccessInput input, QuantizedByteVectorValues values, byte[] queryVector, float queryCorrection) { super(values); this.input = input; assert queryVector.length == values.getVectorByteLength(); @@ -105,7 +96,7 @@ final void checkOrdinal(int ord) { } public static final class DotProductScorer extends Int7SQVectorScorer { - public DotProductScorer(MemorySegmentAccessInput in, RandomAccessQuantizedByteVectorValues values, byte[] query, float correction) { + public DotProductScorer(MemorySegmentAccessInput in, QuantizedByteVectorValues values, byte[] query, float correction) { super(in, values, query, correction); } @@ -122,7 +113,7 @@ public float score(int node) throws IOException { } public static final class EuclideanScorer extends Int7SQVectorScorer { - public EuclideanScorer(MemorySegmentAccessInput in, RandomAccessQuantizedByteVectorValues values, byte[] query, float correction) { + public EuclideanScorer(MemorySegmentAccessInput in, QuantizedByteVectorValues values, byte[] query, float correction) { super(in, values, query, correction); } @@ -136,7 +127,7 @@ public float score(int node) throws IOException { } public static final class MaxInnerProductScorer extends Int7SQVectorScorer { - public MaxInnerProductScorer(MemorySegmentAccessInput in, RandomAccessQuantizedByteVectorValues values, byte[] query, float corr) { + public MaxInnerProductScorer(MemorySegmentAccessInput in, QuantizedByteVectorValues values, byte[] query, float corr) { super(in, values, query, corr); } diff --git a/libs/simdvec/src/test/java/org/elasticsearch/simdvec/VectorScorerFactoryTests.java b/libs/simdvec/src/test/java/org/elasticsearch/simdvec/VectorScorerFactoryTests.java index db57dc936e794..0f967127f6f2c 100644 --- a/libs/simdvec/src/test/java/org/elasticsearch/simdvec/VectorScorerFactoryTests.java +++ b/libs/simdvec/src/test/java/org/elasticsearch/simdvec/VectorScorerFactoryTests.java @@ -21,7 +21,7 @@ import org.apache.lucene.store.MMapDirectory; import org.apache.lucene.util.hnsw.RandomVectorScorer; import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; -import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues; +import org.apache.lucene.util.quantization.QuantizedByteVectorValues; import org.apache.lucene.util.quantization.ScalarQuantizer; import java.io.IOException; @@ -431,14 +431,13 @@ public Optional call() { } } - RandomAccessQuantizedByteVectorValues vectorValues(int dims, int size, IndexInput in, VectorSimilarityFunction sim) throws IOException { + QuantizedByteVectorValues vectorValues(int dims, int size, IndexInput in, VectorSimilarityFunction sim) throws IOException { var sq = new ScalarQuantizer(0.1f, 0.9f, (byte) 7); var slice = in.slice("values", 0, in.length()); return new OffHeapQuantizedByteVectorValues.DenseOffHeapVectorValues(dims, size, sq, false, sim, null, slice); } - RandomVectorScorerSupplier luceneScoreSupplier(RandomAccessQuantizedByteVectorValues values, VectorSimilarityFunction sim) - throws IOException { + RandomVectorScorerSupplier luceneScoreSupplier(QuantizedByteVectorValues values, VectorSimilarityFunction sim) throws IOException { return new Lucene99ScalarQuantizedVectorScorer(null).getRandomVectorScorerSupplier(sim, values); } diff --git a/modules/analysis-common/build.gradle b/modules/analysis-common/build.gradle index b16c6eaaaa1d1..f4f7e787d2b7b 100644 --- a/modules/analysis-common/build.gradle +++ b/modules/analysis-common/build.gradle @@ -33,3 +33,7 @@ dependencies { artifacts { restTests(new File(projectDir, "src/yamlRestTest/resources/rest-api-spec/test")) } + +tasks.named("yamlRestCompatTestTransform").configure { task -> + task.replaceValueInMatch("tokens.0.token", "absenț", "romanian") +} diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LegacyRomanianStemmer.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LegacyRomanianStemmer.java new file mode 100644 index 0000000000000..0eb8d916307ae --- /dev/null +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LegacyRomanianStemmer.java @@ -0,0 +1,741 @@ +/* + * @notice + * Generated by Snowball 2.0.0 - https://snowballstem.org/ + * + * Modifications copyright (C) 2024 Elasticsearch B.V. + */ + +package org.elasticsearch.analysis.common; + +import org.tartarus.snowball.Among; + +/** +* This class implements the stemming algorithm defined by a snowball script. +* NOTE: This is the RomanianStemmer used in Lucene 9 and should only be used for backwards compatibility +*/ +@SuppressWarnings("checkstyle:DescendantToken") +class LegacyRomanianStemmer extends org.tartarus.snowball.SnowballStemmer { + + private static final java.lang.invoke.MethodHandles.Lookup methodObject = java.lang.invoke.MethodHandles.lookup(); + + private static final Among a_0[] = { new Among("", -1, 3), new Among("I", 0, 1), new Among("U", 0, 2) }; + + private static final Among a_1[] = { + new Among("ea", -1, 3), + new Among("a\u0163ia", -1, 7), + new Among("aua", -1, 2), + new Among("iua", -1, 4), + new Among("a\u0163ie", -1, 7), + new Among("ele", -1, 3), + new Among("ile", -1, 5), + new Among("iile", 6, 4), + new Among("iei", -1, 4), + new Among("atei", -1, 6), + new Among("ii", -1, 4), + new Among("ului", -1, 1), + new Among("ul", -1, 1), + new Among("elor", -1, 3), + new Among("ilor", -1, 4), + new Among("iilor", 14, 4) }; + + private static final Among a_2[] = { + new Among("icala", -1, 4), + new Among("iciva", -1, 4), + new Among("ativa", -1, 5), + new Among("itiva", -1, 6), + new Among("icale", -1, 4), + new Among("a\u0163iune", -1, 5), + new Among("i\u0163iune", -1, 6), + new Among("atoare", -1, 5), + new Among("itoare", -1, 6), + new Among("\u0103toare", -1, 5), + new Among("icitate", -1, 4), + new Among("abilitate", -1, 1), + new Among("ibilitate", -1, 2), + new Among("ivitate", -1, 3), + new Among("icive", -1, 4), + new Among("ative", -1, 5), + new Among("itive", -1, 6), + new Among("icali", -1, 4), + new Among("atori", -1, 5), + new Among("icatori", 18, 4), + new Among("itori", -1, 6), + new Among("\u0103tori", -1, 5), + new Among("icitati", -1, 4), + new Among("abilitati", -1, 1), + new Among("ivitati", -1, 3), + new Among("icivi", -1, 4), + new Among("ativi", -1, 5), + new Among("itivi", -1, 6), + new Among("icit\u0103i", -1, 4), + new Among("abilit\u0103i", -1, 1), + new Among("ivit\u0103i", -1, 3), + new Among("icit\u0103\u0163i", -1, 4), + new Among("abilit\u0103\u0163i", -1, 1), + new Among("ivit\u0103\u0163i", -1, 3), + new Among("ical", -1, 4), + new Among("ator", -1, 5), + new Among("icator", 35, 4), + new Among("itor", -1, 6), + new Among("\u0103tor", -1, 5), + new Among("iciv", -1, 4), + new Among("ativ", -1, 5), + new Among("itiv", -1, 6), + new Among("ical\u0103", -1, 4), + new Among("iciv\u0103", -1, 4), + new Among("ativ\u0103", -1, 5), + new Among("itiv\u0103", -1, 6) }; + + private static final Among a_3[] = { + new Among("ica", -1, 1), + new Among("abila", -1, 1), + new Among("ibila", -1, 1), + new Among("oasa", -1, 1), + new Among("ata", -1, 1), + new Among("ita", -1, 1), + new Among("anta", -1, 1), + new Among("ista", -1, 3), + new Among("uta", -1, 1), + new Among("iva", -1, 1), + new Among("ic", -1, 1), + new Among("ice", -1, 1), + new Among("abile", -1, 1), + new Among("ibile", -1, 1), + new Among("isme", -1, 3), + new Among("iune", -1, 2), + new Among("oase", -1, 1), + new Among("ate", -1, 1), + new Among("itate", 17, 1), + new Among("ite", -1, 1), + new Among("ante", -1, 1), + new Among("iste", -1, 3), + new Among("ute", -1, 1), + new Among("ive", -1, 1), + new Among("ici", -1, 1), + new Among("abili", -1, 1), + new Among("ibili", -1, 1), + new Among("iuni", -1, 2), + new Among("atori", -1, 1), + new Among("osi", -1, 1), + new Among("ati", -1, 1), + new Among("itati", 30, 1), + new Among("iti", -1, 1), + new Among("anti", -1, 1), + new Among("isti", -1, 3), + new Among("uti", -1, 1), + new Among("i\u015Fti", -1, 3), + new Among("ivi", -1, 1), + new Among("it\u0103i", -1, 1), + new Among("o\u015Fi", -1, 1), + new Among("it\u0103\u0163i", -1, 1), + new Among("abil", -1, 1), + new Among("ibil", -1, 1), + new Among("ism", -1, 3), + new Among("ator", -1, 1), + new Among("os", -1, 1), + new Among("at", -1, 1), + new Among("it", -1, 1), + new Among("ant", -1, 1), + new Among("ist", -1, 3), + new Among("ut", -1, 1), + new Among("iv", -1, 1), + new Among("ic\u0103", -1, 1), + new Among("abil\u0103", -1, 1), + new Among("ibil\u0103", -1, 1), + new Among("oas\u0103", -1, 1), + new Among("at\u0103", -1, 1), + new Among("it\u0103", -1, 1), + new Among("ant\u0103", -1, 1), + new Among("ist\u0103", -1, 3), + new Among("ut\u0103", -1, 1), + new Among("iv\u0103", -1, 1) }; + + private static final Among a_4[] = { + new Among("ea", -1, 1), + new Among("ia", -1, 1), + new Among("esc", -1, 1), + new Among("\u0103sc", -1, 1), + new Among("ind", -1, 1), + new Among("\u00E2nd", -1, 1), + new Among("are", -1, 1), + new Among("ere", -1, 1), + new Among("ire", -1, 1), + new Among("\u00E2re", -1, 1), + new Among("se", -1, 2), + new Among("ase", 10, 1), + new Among("sese", 10, 2), + new Among("ise", 10, 1), + new Among("use", 10, 1), + new Among("\u00E2se", 10, 1), + new Among("e\u015Fte", -1, 1), + new Among("\u0103\u015Fte", -1, 1), + new Among("eze", -1, 1), + new Among("ai", -1, 1), + new Among("eai", 19, 1), + new Among("iai", 19, 1), + new Among("sei", -1, 2), + new Among("e\u015Fti", -1, 1), + new Among("\u0103\u015Fti", -1, 1), + new Among("ui", -1, 1), + new Among("ezi", -1, 1), + new Among("\u00E2i", -1, 1), + new Among("a\u015Fi", -1, 1), + new Among("se\u015Fi", -1, 2), + new Among("ase\u015Fi", 29, 1), + new Among("sese\u015Fi", 29, 2), + new Among("ise\u015Fi", 29, 1), + new Among("use\u015Fi", 29, 1), + new Among("\u00E2se\u015Fi", 29, 1), + new Among("i\u015Fi", -1, 1), + new Among("u\u015Fi", -1, 1), + new Among("\u00E2\u015Fi", -1, 1), + new Among("a\u0163i", -1, 2), + new Among("ea\u0163i", 38, 1), + new Among("ia\u0163i", 38, 1), + new Among("e\u0163i", -1, 2), + new Among("i\u0163i", -1, 2), + new Among("\u00E2\u0163i", -1, 2), + new Among("ar\u0103\u0163i", -1, 1), + new Among("ser\u0103\u0163i", -1, 2), + new Among("aser\u0103\u0163i", 45, 1), + new Among("seser\u0103\u0163i", 45, 2), + new Among("iser\u0103\u0163i", 45, 1), + new Among("user\u0103\u0163i", 45, 1), + new Among("\u00E2ser\u0103\u0163i", 45, 1), + new Among("ir\u0103\u0163i", -1, 1), + new Among("ur\u0103\u0163i", -1, 1), + new Among("\u00E2r\u0103\u0163i", -1, 1), + new Among("am", -1, 1), + new Among("eam", 54, 1), + new Among("iam", 54, 1), + new Among("em", -1, 2), + new Among("asem", 57, 1), + new Among("sesem", 57, 2), + new Among("isem", 57, 1), + new Among("usem", 57, 1), + new Among("\u00E2sem", 57, 1), + new Among("im", -1, 2), + new Among("\u00E2m", -1, 2), + new Among("\u0103m", -1, 2), + new Among("ar\u0103m", 65, 1), + new Among("ser\u0103m", 65, 2), + new Among("aser\u0103m", 67, 1), + new Among("seser\u0103m", 67, 2), + new Among("iser\u0103m", 67, 1), + new Among("user\u0103m", 67, 1), + new Among("\u00E2ser\u0103m", 67, 1), + new Among("ir\u0103m", 65, 1), + new Among("ur\u0103m", 65, 1), + new Among("\u00E2r\u0103m", 65, 1), + new Among("au", -1, 1), + new Among("eau", 76, 1), + new Among("iau", 76, 1), + new Among("indu", -1, 1), + new Among("\u00E2ndu", -1, 1), + new Among("ez", -1, 1), + new Among("easc\u0103", -1, 1), + new Among("ar\u0103", -1, 1), + new Among("ser\u0103", -1, 2), + new Among("aser\u0103", 84, 1), + new Among("seser\u0103", 84, 2), + new Among("iser\u0103", 84, 1), + new Among("user\u0103", 84, 1), + new Among("\u00E2ser\u0103", 84, 1), + new Among("ir\u0103", -1, 1), + new Among("ur\u0103", -1, 1), + new Among("\u00E2r\u0103", -1, 1), + new Among("eaz\u0103", -1, 1) }; + + private static final Among a_5[] = { + new Among("a", -1, 1), + new Among("e", -1, 1), + new Among("ie", 1, 1), + new Among("i", -1, 1), + new Among("\u0103", -1, 1) }; + + private static final char g_v[] = { 17, 65, 16, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 32, 0, 0, 4 }; + + private boolean B_standard_suffix_removed; + private int I_p2; + private int I_p1; + private int I_pV; + + private boolean r_prelude() { + while (true) { + int v_1 = cursor; + lab0: { + golab1: while (true) { + int v_2 = cursor; + lab2: { + if (!(in_grouping(g_v, 97, 259))) { + break lab2; + } + bra = cursor; + lab3: { + int v_3 = cursor; + lab4: { + if (!(eq_s("u"))) { + break lab4; + } + ket = cursor; + if (!(in_grouping(g_v, 97, 259))) { + break lab4; + } + slice_from("U"); + break lab3; + } + cursor = v_3; + if (!(eq_s("i"))) { + break lab2; + } + ket = cursor; + if (!(in_grouping(g_v, 97, 259))) { + break lab2; + } + slice_from("I"); + } + cursor = v_2; + break golab1; + } + cursor = v_2; + if (cursor >= limit) { + break lab0; + } + cursor++; + } + continue; + } + cursor = v_1; + break; + } + return true; + } + + private boolean r_mark_regions() { + I_pV = limit; + I_p1 = limit; + I_p2 = limit; + int v_1 = cursor; + lab0: { + lab1: { + int v_2 = cursor; + lab2: { + if (!(in_grouping(g_v, 97, 259))) { + break lab2; + } + lab3: { + int v_3 = cursor; + lab4: { + if (!(out_grouping(g_v, 97, 259))) { + break lab4; + } + golab5: while (true) { + lab6: { + if (!(in_grouping(g_v, 97, 259))) { + break lab6; + } + break golab5; + } + if (cursor >= limit) { + break lab4; + } + cursor++; + } + break lab3; + } + cursor = v_3; + if (!(in_grouping(g_v, 97, 259))) { + break lab2; + } + golab7: while (true) { + lab8: { + if (!(out_grouping(g_v, 97, 259))) { + break lab8; + } + break golab7; + } + if (cursor >= limit) { + break lab2; + } + cursor++; + } + } + break lab1; + } + cursor = v_2; + if (!(out_grouping(g_v, 97, 259))) { + break lab0; + } + lab9: { + int v_6 = cursor; + lab10: { + if (!(out_grouping(g_v, 97, 259))) { + break lab10; + } + golab11: while (true) { + lab12: { + if (!(in_grouping(g_v, 97, 259))) { + break lab12; + } + break golab11; + } + if (cursor >= limit) { + break lab10; + } + cursor++; + } + break lab9; + } + cursor = v_6; + if (!(in_grouping(g_v, 97, 259))) { + break lab0; + } + if (cursor >= limit) { + break lab0; + } + cursor++; + } + } + I_pV = cursor; + } + cursor = v_1; + int v_8 = cursor; + lab13: { + golab14: while (true) { + lab15: { + if (!(in_grouping(g_v, 97, 259))) { + break lab15; + } + break golab14; + } + if (cursor >= limit) { + break lab13; + } + cursor++; + } + golab16: while (true) { + lab17: { + if (!(out_grouping(g_v, 97, 259))) { + break lab17; + } + break golab16; + } + if (cursor >= limit) { + break lab13; + } + cursor++; + } + I_p1 = cursor; + golab18: while (true) { + lab19: { + if (!(in_grouping(g_v, 97, 259))) { + break lab19; + } + break golab18; + } + if (cursor >= limit) { + break lab13; + } + cursor++; + } + golab20: while (true) { + lab21: { + if (!(out_grouping(g_v, 97, 259))) { + break lab21; + } + break golab20; + } + if (cursor >= limit) { + break lab13; + } + cursor++; + } + I_p2 = cursor; + } + cursor = v_8; + return true; + } + + private boolean r_postlude() { + int among_var; + while (true) { + int v_1 = cursor; + lab0: { + bra = cursor; + among_var = find_among(a_0); + if (among_var == 0) { + break lab0; + } + ket = cursor; + switch (among_var) { + case 1: + slice_from("i"); + break; + case 2: + slice_from("u"); + break; + case 3: + if (cursor >= limit) { + break lab0; + } + cursor++; + break; + } + continue; + } + cursor = v_1; + break; + } + return true; + } + + private boolean r_RV() { + if (!(I_pV <= cursor)) { + return false; + } + return true; + } + + private boolean r_R1() { + if (!(I_p1 <= cursor)) { + return false; + } + return true; + } + + private boolean r_R2() { + if (!(I_p2 <= cursor)) { + return false; + } + return true; + } + + private boolean r_step_0() { + int among_var; + ket = cursor; + among_var = find_among_b(a_1); + if (among_var == 0) { + return false; + } + bra = cursor; + if (!r_R1()) { + return false; + } + switch (among_var) { + case 1: + slice_del(); + break; + case 2: + slice_from("a"); + break; + case 3: + slice_from("e"); + break; + case 4: + slice_from("i"); + break; + case 5: { + int v_1 = limit - cursor; + lab0: { + if (!(eq_s_b("ab"))) { + break lab0; + } + return false; + } + cursor = limit - v_1; + } + slice_from("i"); + break; + case 6: + slice_from("at"); + break; + case 7: + slice_from("a\u0163i"); + break; + } + return true; + } + + private boolean r_combo_suffix() { + int among_var; + int v_1 = limit - cursor; + ket = cursor; + among_var = find_among_b(a_2); + if (among_var == 0) { + return false; + } + bra = cursor; + if (!r_R1()) { + return false; + } + switch (among_var) { + case 1: + slice_from("abil"); + break; + case 2: + slice_from("ibil"); + break; + case 3: + slice_from("iv"); + break; + case 4: + slice_from("ic"); + break; + case 5: + slice_from("at"); + break; + case 6: + slice_from("it"); + break; + } + B_standard_suffix_removed = true; + cursor = limit - v_1; + return true; + } + + private boolean r_standard_suffix() { + int among_var; + B_standard_suffix_removed = false; + while (true) { + int v_1 = limit - cursor; + lab0: { + if (!r_combo_suffix()) { + break lab0; + } + continue; + } + cursor = limit - v_1; + break; + } + ket = cursor; + among_var = find_among_b(a_3); + if (among_var == 0) { + return false; + } + bra = cursor; + if (!r_R2()) { + return false; + } + switch (among_var) { + case 1: + slice_del(); + break; + case 2: + if (!(eq_s_b("\u0163"))) { + return false; + } + bra = cursor; + slice_from("t"); + break; + case 3: + slice_from("ist"); + break; + } + B_standard_suffix_removed = true; + return true; + } + + private boolean r_verb_suffix() { + int among_var; + if (cursor < I_pV) { + return false; + } + int v_2 = limit_backward; + limit_backward = I_pV; + ket = cursor; + among_var = find_among_b(a_4); + if (among_var == 0) { + limit_backward = v_2; + return false; + } + bra = cursor; + switch (among_var) { + case 1: + lab0: { + int v_3 = limit - cursor; + lab1: { + if (!(out_grouping_b(g_v, 97, 259))) { + break lab1; + } + break lab0; + } + cursor = limit - v_3; + if (!(eq_s_b("u"))) { + limit_backward = v_2; + return false; + } + } + slice_del(); + break; + case 2: + slice_del(); + break; + } + limit_backward = v_2; + return true; + } + + private boolean r_vowel_suffix() { + ket = cursor; + if (find_among_b(a_5) == 0) { + return false; + } + bra = cursor; + if (!r_RV()) { + return false; + } + slice_del(); + return true; + } + + @Override + public boolean stem() { + int v_1 = cursor; + r_prelude(); + cursor = v_1; + r_mark_regions(); + limit_backward = cursor; + cursor = limit; + int v_3 = limit - cursor; + r_step_0(); + cursor = limit - v_3; + int v_4 = limit - cursor; + r_standard_suffix(); + cursor = limit - v_4; + int v_5 = limit - cursor; + lab0: { + lab1: { + int v_6 = limit - cursor; + lab2: { + if (!(B_standard_suffix_removed)) { + break lab2; + } + break lab1; + } + cursor = limit - v_6; + if (!r_verb_suffix()) { + break lab0; + } + } + } + cursor = limit - v_5; + int v_7 = limit - cursor; + r_vowel_suffix(); + cursor = limit - v_7; + cursor = limit_backward; + int v_8 = cursor; + r_postlude(); + cursor = v_8; + return true; + } + + @Override + public boolean equals(Object o) { + return o instanceof LegacyRomanianStemmer; + } + + @Override + public int hashCode() { + return LegacyRomanianStemmer.class.getName().hashCode(); + } +} diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PersianAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PersianAnalyzerProvider.java index 9ea3a9fa4eee9..917a45188123c 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PersianAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PersianAnalyzerProvider.java @@ -9,24 +9,72 @@ package org.elasticsearch.analysis.common; +import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.analysis.LowerCaseFilter; +import org.apache.lucene.analysis.StopFilter; +import org.apache.lucene.analysis.StopwordAnalyzerBase; +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.Tokenizer; +import org.apache.lucene.analysis.ar.ArabicNormalizationFilter; +import org.apache.lucene.analysis.core.DecimalDigitFilter; import org.apache.lucene.analysis.fa.PersianAnalyzer; +import org.apache.lucene.analysis.fa.PersianCharFilter; +import org.apache.lucene.analysis.fa.PersianNormalizationFilter; +import org.apache.lucene.analysis.standard.StandardTokenizer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; import org.elasticsearch.index.analysis.Analysis; -public class PersianAnalyzerProvider extends AbstractIndexAnalyzerProvider { +import java.io.Reader; - private final PersianAnalyzer analyzer; +public class PersianAnalyzerProvider extends AbstractIndexAnalyzerProvider { + + private final StopwordAnalyzerBase analyzer; PersianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(name, settings); - analyzer = new PersianAnalyzer(Analysis.parseStopWords(env, settings, PersianAnalyzer.getDefaultStopSet())); + if (indexSettings.getIndexVersionCreated().onOrAfter(IndexVersions.UPGRADE_TO_LUCENE_10_0_0)) { + // since Lucene 10 this analyzer contains stemming by default + analyzer = new PersianAnalyzer(Analysis.parseStopWords(env, settings, PersianAnalyzer.getDefaultStopSet())); + } else { + // for older index versions we need the old analyzer behaviour without stemming + analyzer = new StopwordAnalyzerBase(Analysis.parseStopWords(env, settings, PersianAnalyzer.getDefaultStopSet())) { + + protected Analyzer.TokenStreamComponents createComponents(String fieldName) { + final Tokenizer source = new StandardTokenizer(); + TokenStream result = new LowerCaseFilter(source); + result = new DecimalDigitFilter(result); + result = new ArabicNormalizationFilter(result); + /* additional persian-specific normalization */ + result = new PersianNormalizationFilter(result); + /* + * the order here is important: the stopword list is normalized with the + * above! + */ + return new TokenStreamComponents(source, new StopFilter(result, stopwords)); + } + + protected TokenStream normalize(String fieldName, TokenStream in) { + TokenStream result = new LowerCaseFilter(in); + result = new DecimalDigitFilter(result); + result = new ArabicNormalizationFilter(result); + /* additional persian-specific normalization */ + result = new PersianNormalizationFilter(result); + return result; + } + + protected Reader initReader(String fieldName, Reader reader) { + return new PersianCharFilter(reader); + } + }; + } } @Override - public PersianAnalyzer get() { + public StopwordAnalyzerBase get() { return this.analyzer; } } diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/RomanianAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/RomanianAnalyzerProvider.java index cf33a38abd634..6c28df83a6d36 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/RomanianAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/RomanianAnalyzerProvider.java @@ -9,28 +9,60 @@ package org.elasticsearch.analysis.common; +import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.CharArraySet; +import org.apache.lucene.analysis.StopwordAnalyzerBase; +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.Tokenizer; +import org.apache.lucene.analysis.core.LowerCaseFilter; +import org.apache.lucene.analysis.core.StopFilter; +import org.apache.lucene.analysis.miscellaneous.SetKeywordMarkerFilter; import org.apache.lucene.analysis.ro.RomanianAnalyzer; +import org.apache.lucene.analysis.snowball.SnowballFilter; +import org.apache.lucene.analysis.standard.StandardTokenizer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; import org.elasticsearch.index.analysis.Analysis; -public class RomanianAnalyzerProvider extends AbstractIndexAnalyzerProvider { +public class RomanianAnalyzerProvider extends AbstractIndexAnalyzerProvider { - private final RomanianAnalyzer analyzer; + private final StopwordAnalyzerBase analyzer; RomanianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(name, settings); - analyzer = new RomanianAnalyzer( - Analysis.parseStopWords(env, settings, RomanianAnalyzer.getDefaultStopSet()), - Analysis.parseStemExclusion(settings, CharArraySet.EMPTY_SET) - ); + CharArraySet stopwords = Analysis.parseStopWords(env, settings, RomanianAnalyzer.getDefaultStopSet()); + CharArraySet stemExclusionSet = Analysis.parseStemExclusion(settings, CharArraySet.EMPTY_SET); + if (indexSettings.getIndexVersionCreated().onOrAfter(IndexVersions.UPGRADE_TO_LUCENE_10_0_0)) { + // since Lucene 10, this analyzer a modern unicode form and normalizes cedilla forms to forms with commas + analyzer = new RomanianAnalyzer(stopwords, stemExclusionSet); + } else { + // for older index versions we need the old behaviour without normalization + analyzer = new StopwordAnalyzerBase(Analysis.parseStopWords(env, settings, RomanianAnalyzer.getDefaultStopSet())) { + + protected Analyzer.TokenStreamComponents createComponents(String fieldName) { + final Tokenizer source = new StandardTokenizer(); + TokenStream result = new LowerCaseFilter(source); + result = new StopFilter(result, stopwords); + if (stemExclusionSet.isEmpty() == false) { + result = new SetKeywordMarkerFilter(result, stemExclusionSet); + } + result = new SnowballFilter(result, new LegacyRomanianStemmer()); + return new TokenStreamComponents(source, result); + } + + protected TokenStream normalize(String fieldName, TokenStream in) { + return new LowerCaseFilter(in); + } + }; + + } } @Override - public RomanianAnalyzer get() { + public StopwordAnalyzerBase get() { return this.analyzer; } } diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactory.java index 1c71c64311517..7548c8ad2b88b 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactory.java @@ -9,6 +9,7 @@ package org.elasticsearch.analysis.common; +import org.apache.lucene.analysis.TokenFilter; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.ar.ArabicStemFilter; import org.apache.lucene.analysis.bg.BulgarianStemFilter; @@ -38,8 +39,9 @@ import org.apache.lucene.analysis.lv.LatvianStemFilter; import org.apache.lucene.analysis.miscellaneous.EmptyTokenStream; import org.apache.lucene.analysis.no.NorwegianLightStemFilter; -import org.apache.lucene.analysis.no.NorwegianLightStemmer; +import org.apache.lucene.analysis.no.NorwegianLightStemFilterFactory; import org.apache.lucene.analysis.no.NorwegianMinimalStemFilter; +import org.apache.lucene.analysis.no.NorwegianMinimalStemFilterFactory; import org.apache.lucene.analysis.pt.PortugueseLightStemFilter; import org.apache.lucene.analysis.pt.PortugueseMinimalStemFilter; import org.apache.lucene.analysis.pt.PortugueseStemFilter; @@ -62,14 +64,11 @@ import org.tartarus.snowball.ext.EstonianStemmer; import org.tartarus.snowball.ext.FinnishStemmer; import org.tartarus.snowball.ext.FrenchStemmer; -import org.tartarus.snowball.ext.German2Stemmer; import org.tartarus.snowball.ext.GermanStemmer; import org.tartarus.snowball.ext.HungarianStemmer; import org.tartarus.snowball.ext.IrishStemmer; import org.tartarus.snowball.ext.ItalianStemmer; -import org.tartarus.snowball.ext.KpStemmer; import org.tartarus.snowball.ext.LithuanianStemmer; -import org.tartarus.snowball.ext.LovinsStemmer; import org.tartarus.snowball.ext.NorwegianStemmer; import org.tartarus.snowball.ext.PortugueseStemmer; import org.tartarus.snowball.ext.RomanianStemmer; @@ -80,6 +79,7 @@ import org.tartarus.snowball.ext.TurkishStemmer; import java.io.IOException; +import java.util.Collections; public class StemmerTokenFilterFactory extends AbstractTokenFilterFactory { @@ -87,27 +87,15 @@ public class StemmerTokenFilterFactory extends AbstractTokenFilterFactory { private static final TokenStream EMPTY_TOKEN_STREAM = new EmptyTokenStream(); - private String language; + private final String language; + + private static final DeprecationLogger DEPRECATION_LOGGER = DeprecationLogger.getLogger(StemmerTokenFilterFactory.class); StemmerTokenFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) throws IOException { super(name, settings); this.language = Strings.capitalize(settings.get("language", settings.get("name", "porter"))); // check that we have a valid language by trying to create a TokenStream create(EMPTY_TOKEN_STREAM).close(); - if ("lovins".equalsIgnoreCase(language)) { - deprecationLogger.critical( - DeprecationCategory.ANALYSIS, - "lovins_deprecation", - "The [lovins] stemmer is deprecated and will be removed in a future version." - ); - } - if ("dutch_kp".equalsIgnoreCase(language) || "dutchKp".equalsIgnoreCase(language) || "kp".equalsIgnoreCase(language)) { - deprecationLogger.critical( - DeprecationCategory.ANALYSIS, - "dutch_kp_deprecation", - "The [dutch_kp] stemmer is deprecated and will be removed in a future version." - ); - } } @Override @@ -135,8 +123,17 @@ public TokenStream create(TokenStream tokenStream) { } else if ("dutch".equalsIgnoreCase(language)) { return new SnowballFilter(tokenStream, new DutchStemmer()); } else if ("dutch_kp".equalsIgnoreCase(language) || "dutchKp".equalsIgnoreCase(language) || "kp".equalsIgnoreCase(language)) { - return new SnowballFilter(tokenStream, new KpStemmer()); - + deprecationLogger.critical( + DeprecationCategory.ANALYSIS, + "dutch_kp_deprecation", + "The [dutch_kp] stemmer is deprecated and will be removed in a future version." + ); + return new TokenFilter(tokenStream) { + @Override + public boolean incrementToken() { + return false; + } + }; // English stemmers } else if ("english".equalsIgnoreCase(language)) { return new PorterStemFilter(tokenStream); @@ -145,7 +142,17 @@ public TokenStream create(TokenStream tokenStream) { || "kstem".equalsIgnoreCase(language)) { return new KStemFilter(tokenStream); } else if ("lovins".equalsIgnoreCase(language)) { - return new SnowballFilter(tokenStream, new LovinsStemmer()); + deprecationLogger.critical( + DeprecationCategory.ANALYSIS, + "lovins_deprecation", + "The [lovins] stemmer is deprecated and will be removed in a future version." + ); + return new TokenFilter(tokenStream) { + @Override + public boolean incrementToken() { + return false; + } + }; } else if ("porter".equalsIgnoreCase(language)) { return new PorterStemFilter(tokenStream); } else if ("porter2".equalsIgnoreCase(language)) { @@ -185,7 +192,13 @@ public TokenStream create(TokenStream tokenStream) { } else if ("german".equalsIgnoreCase(language)) { return new SnowballFilter(tokenStream, new GermanStemmer()); } else if ("german2".equalsIgnoreCase(language)) { - return new SnowballFilter(tokenStream, new German2Stemmer()); + DEPRECATION_LOGGER.critical( + DeprecationCategory.ANALYSIS, + "german2_stemmer_deprecation", + "The 'german2' stemmer has been deprecated and folded into the 'german' Stemmer. " + + "Replace all usages of 'german2' with 'german'." + ); + return new SnowballFilter(tokenStream, new GermanStemmer()); } else if ("light_german".equalsIgnoreCase(language) || "lightGerman".equalsIgnoreCase(language)) { return new GermanLightStemFilter(tokenStream); } else if ("minimal_german".equalsIgnoreCase(language) || "minimalGerman".equalsIgnoreCase(language)) { @@ -231,10 +244,13 @@ public TokenStream create(TokenStream tokenStream) { // Norwegian (Nynorsk) stemmers } else if ("light_nynorsk".equalsIgnoreCase(language) || "lightNynorsk".equalsIgnoreCase(language)) { - return new NorwegianLightStemFilter(tokenStream, NorwegianLightStemmer.NYNORSK); + NorwegianLightStemFilterFactory factory = new NorwegianLightStemFilterFactory(Collections.singletonMap("variant", "nn")); + return factory.create(tokenStream); } else if ("minimal_nynorsk".equalsIgnoreCase(language) || "minimalNynorsk".equalsIgnoreCase(language)) { - return new NorwegianMinimalStemFilter(tokenStream, NorwegianLightStemmer.NYNORSK); - + NorwegianMinimalStemFilterFactory factory = new NorwegianMinimalStemFilterFactory( + Collections.singletonMap("variant", "nn") + ); + return factory.create(tokenStream); // Persian stemmers } else if ("persian".equalsIgnoreCase(language)) { return new PersianStemFilter(tokenStream); diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/HighlighterWithAnalyzersTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/HighlighterWithAnalyzersTests.java index b406fa8335779..0d936666e92cd 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/HighlighterWithAnalyzersTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/HighlighterWithAnalyzersTests.java @@ -278,7 +278,7 @@ public void testPhrasePrefix() throws IOException { boolQuery().should(matchPhrasePrefixQuery("field1", "test")).should(matchPhrasePrefixQuery("field1", "bro")) ).highlighter(highlight().field("field1").order("score").preTags("").postTags("")), resp -> { - assertThat(resp.getHits().getTotalHits().value, equalTo(2L)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(2L)); for (int i = 0; i < 2; i++) { assertHighlight( resp, diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/PersianAnalyzerProviderTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/PersianAnalyzerProviderTests.java new file mode 100644 index 0000000000000..7b962538c2a10 --- /dev/null +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/PersianAnalyzerProviderTests.java @@ -0,0 +1,78 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.analysis.common; + +import org.apache.lucene.analysis.Analyzer; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.ESTokenStreamTestCase; +import org.elasticsearch.test.IndexSettingsModule; +import org.elasticsearch.test.index.IndexVersionUtils; + +import java.io.IOException; + +import static org.apache.lucene.tests.analysis.BaseTokenStreamTestCase.assertAnalyzesTo; + +/** + * Tests Persian Analyzer factory and behavioural changes with Lucene 10 + */ +public class PersianAnalyzerProviderTests extends ESTokenStreamTestCase { + + public void testPersianAnalyzerPostLucene10() throws IOException { + IndexVersion postLucene10Version = IndexVersionUtils.randomVersionBetween( + random(), + IndexVersions.UPGRADE_TO_LUCENE_10_0_0, + IndexVersion.current() + ); + Settings settings = ESTestCase.indexSettings(1, 1) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put(IndexMetadata.SETTING_VERSION_CREATED, postLucene10Version) + .build(); + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); + Environment environment = new Environment(settings, null); + + PersianAnalyzerProvider persianAnalyzerProvider = new PersianAnalyzerProvider( + idxSettings, + environment, + "my-analyzer", + Settings.EMPTY + ); + Analyzer analyzer = persianAnalyzerProvider.get(); + assertAnalyzesTo(analyzer, "من کتاب های زیادی خوانده ام", new String[] { "كتاب", "زياد", "خوانده" }); + } + + public void testPersianAnalyzerPreLucene10() throws IOException { + IndexVersion preLucene10Version = IndexVersionUtils.randomVersionBetween( + random(), + IndexVersionUtils.getFirstVersion(), + IndexVersionUtils.getPreviousVersion(IndexVersions.UPGRADE_TO_LUCENE_10_0_0) + ); + Settings settings = ESTestCase.indexSettings(1, 1) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put(IndexMetadata.SETTING_VERSION_CREATED, preLucene10Version) + .build(); + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); + Environment environment = new Environment(settings, null); + + PersianAnalyzerProvider persianAnalyzerProvider = new PersianAnalyzerProvider( + idxSettings, + environment, + "my-analyzer", + Settings.EMPTY + ); + Analyzer analyzer = persianAnalyzerProvider.get(); + assertAnalyzesTo(analyzer, "من کتاب های زیادی خوانده ام", new String[] { "كتاب", "زيادي", "خوانده" }); + } +} diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/RomanianAnalyzerTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/RomanianAnalyzerTests.java new file mode 100644 index 0000000000000..1af44bc71f35d --- /dev/null +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/RomanianAnalyzerTests.java @@ -0,0 +1,80 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.analysis.common; + +import org.apache.lucene.analysis.Analyzer; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.ESTokenStreamTestCase; +import org.elasticsearch.test.IndexSettingsModule; +import org.elasticsearch.test.index.IndexVersionUtils; + +import java.io.IOException; + +import static org.apache.lucene.tests.analysis.BaseTokenStreamTestCase.assertAnalyzesTo; + +/** + * Verifies the behavior of Romanian analyzer. + */ +public class RomanianAnalyzerTests extends ESTokenStreamTestCase { + + public void testRomanianAnalyzerPostLucene10() throws IOException { + IndexVersion postLucene10Version = IndexVersionUtils.randomVersionBetween( + random(), + IndexVersions.UPGRADE_TO_LUCENE_10_0_0, + IndexVersion.current() + ); + Settings settings = ESTestCase.indexSettings(1, 1) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put(IndexMetadata.SETTING_VERSION_CREATED, postLucene10Version) + .build(); + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); + Environment environment = new Environment(settings, null); + + RomanianAnalyzerProvider romanianAnalyzerProvider = new RomanianAnalyzerProvider( + idxSettings, + environment, + "my-analyzer", + Settings.EMPTY + ); + Analyzer analyzer = romanianAnalyzerProvider.get(); + assertAnalyzesTo(analyzer, "absenţa", new String[] { "absenț" }); + assertAnalyzesTo(analyzer, "cunoştinţă", new String[] { "cunoștinț" }); + } + + public void testRomanianAnalyzerPreLucene10() throws IOException { + IndexVersion preLucene10Version = IndexVersionUtils.randomVersionBetween( + random(), + IndexVersionUtils.getFirstVersion(), + IndexVersionUtils.getPreviousVersion(IndexVersions.UPGRADE_TO_LUCENE_10_0_0) + ); + Settings settings = ESTestCase.indexSettings(1, 1) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put(IndexMetadata.SETTING_VERSION_CREATED, preLucene10Version) + .build(); + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); + Environment environment = new Environment(settings, null); + + RomanianAnalyzerProvider romanianAnalyzerProvider = new RomanianAnalyzerProvider( + idxSettings, + environment, + "my-analyzer", + Settings.EMPTY + ); + Analyzer analyzer = romanianAnalyzerProvider.get(); + assertAnalyzesTo(analyzer, "absenţa", new String[] { "absenţ" }); + assertAnalyzesTo(analyzer, "cunoştinţă", new String[] { "cunoştinţ" }); + } +} diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactoryTests.java index 8f3d52f0174c6..bb06c221873b5 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactoryTests.java @@ -8,6 +8,7 @@ */ package org.elasticsearch.analysis.common; +import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.core.WhitespaceTokenizer; @@ -16,6 +17,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.analysis.AnalysisTestsHelper; import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.index.analysis.NamedAnalyzer; @@ -103,6 +105,42 @@ public void testMultipleLanguagesThrowsException() throws IOException { assertEquals("Invalid stemmer class specified: [english, light_english]", e.getMessage()); } + public void testGermanAndGerman2Stemmer() throws IOException { + IndexVersion v = IndexVersionUtils.randomVersionBetween(random(), IndexVersions.UPGRADE_TO_LUCENE_10_0_0, IndexVersion.current()); + Analyzer analyzer = createGermanStemmer("german", v); + assertAnalyzesTo(analyzer, "Buecher Bücher", new String[] { "Buch", "Buch" }); + + analyzer = createGermanStemmer("german2", v); + assertAnalyzesTo(analyzer, "Buecher Bücher", new String[] { "Buch", "Buch" }); + assertWarnings( + "The 'german2' stemmer has been deprecated and folded into the 'german' Stemmer. " + + "Replace all usages of 'german2' with 'german'." + ); + } + + private static Analyzer createGermanStemmer(String variant, IndexVersion v) throws IOException { + + Settings settings = Settings.builder() + .put("index.analysis.filter.my_german.type", "stemmer") + .put("index.analysis.filter.my_german.language", variant) + .put("index.analysis.analyzer.my_german.tokenizer", "whitespace") + .put("index.analysis.analyzer.my_german.filter", "my_german") + .put(SETTING_VERSION_CREATED, v) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .build(); + + ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings, PLUGIN); + TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_german"); + assertThat(tokenFilter, instanceOf(StemmerTokenFilterFactory.class)); + Tokenizer tokenizer = new WhitespaceTokenizer(); + tokenizer.setReader(new StringReader("Buecher oder Bücher")); + TokenStream create = tokenFilter.create(tokenizer); + assertThat(create, instanceOf(SnowballFilter.class)); + IndexAnalyzers indexAnalyzers = analysis.indexAnalyzers; + NamedAnalyzer analyzer = indexAnalyzers.get("my_german"); + return analyzer; + } + public void testKpDeprecation() throws IOException { IndexVersion v = IndexVersionUtils.randomVersion(random()); Settings settings = Settings.builder() diff --git a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/analysis-common/20_analyzers.yml b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/analysis-common/20_analyzers.yml index c03bdb3111050..8930e485aa249 100644 --- a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/analysis-common/20_analyzers.yml +++ b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/analysis-common/20_analyzers.yml @@ -901,6 +901,31 @@ - length: { tokens: 1 } - match: { tokens.0.token: خورد } +--- +"persian stemming": + - requires: + cluster_features: ["lucene_10_upgrade"] + reason: "test requires persian analyzer stemming capabilities that come with Lucene 10" + + - do: + indices.create: + index: test + body: + settings: + analysis: + analyzer: + my_analyzer: + type: persian + + - do: + indices.analyze: + index: test + body: + text: كتابها + analyzer: my_analyzer + - length: { tokens: 1 } + - match: { tokens.0.token: كتاب } + --- "portuguese": - do: @@ -948,7 +973,7 @@ text: absenţa analyzer: romanian - length: { tokens: 1 } - - match: { tokens.0.token: absenţ } + - match: { tokens.0.token: absenț } - do: indices.analyze: @@ -957,7 +982,7 @@ text: absenţa analyzer: my_analyzer - length: { tokens: 1 } - - match: { tokens.0.token: absenţ } + - match: { tokens.0.token: absenț } --- "russian": diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracer.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracer.java index 8f1c0cf515e14..cb74d62137815 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracer.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracer.java @@ -24,7 +24,6 @@ import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CharacterRunAutomaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.RegExp; import org.elasticsearch.Build; @@ -440,13 +439,13 @@ private static CharacterRunAutomaton buildAutomaton(List includePatterns ? includeAutomaton : Operations.minus(includeAutomaton, excludeAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); - return new CharacterRunAutomaton(MinimizationOperations.minimize(finalAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT)); + return new CharacterRunAutomaton(Operations.determinize(finalAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT)); } private static Automaton patternsToAutomaton(List patterns) { final List automata = patterns.stream().map(s -> { final String regex = s.replace(".", "\\.").replace("*", ".*"); - return new RegExp(regex).toAutomaton(); + return new RegExp(regex, RegExp.ALL | RegExp.DEPRECATED_COMPLEMENT).toAutomaton(); }).toList(); if (automata.isEmpty()) { return null; diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java index 8e7ecfa49f144..777ddc28fefdc 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java @@ -1706,7 +1706,7 @@ public void testSegmentsSortedOnTimestampDesc() throws Exception { assertResponse( prepareSearch("metrics-foo").addFetchField(new FieldAndFormat(DEFAULT_TIMESTAMP_FIELD, "epoch_millis")).setSize(totalDocs), resp -> { - assertEquals(totalDocs, resp.getHits().getTotalHits().value); + assertEquals(totalDocs, resp.getHits().getTotalHits().value()); SearchHit[] hits = resp.getHits().getHits(); assertEquals(totalDocs, hits.length); @@ -2027,7 +2027,7 @@ static void indexDocs(String dataStream, int numDocs) { static void verifyDocs(String dataStream, long expectedNumHits, List expectedIndices) { assertResponse(prepareSearch(dataStream).setSize((int) expectedNumHits), resp -> { - assertThat(resp.getHits().getTotalHits().value, equalTo(expectedNumHits)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(expectedNumHits)); Arrays.stream(resp.getHits().getHits()).forEach(hit -> assertTrue(expectedIndices.contains(hit.getIndex()))); }); } diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java index 686e253d1d173..a2557a4de6e6d 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java @@ -545,7 +545,7 @@ public void testTrimId() throws Exception { var searchRequest = new SearchRequest(dataStreamName); searchRequest.source().trackTotalHits(true); assertResponse(client().search(searchRequest), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo((long) numBulkRequests * numDocsPerBulk)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo((long) numBulkRequests * numDocsPerBulk)); String id = searchResponse.getHits().getHits()[0].getId(); assertThat(id, notNullValue()); diff --git a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java index 6942cc3733d1e..f8c8d2bd359f3 100644 --- a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java +++ b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java @@ -256,8 +256,8 @@ public void testGeoIpDatabasesDownload() throws Exception { res -> { try { TotalHits totalHits = res.getHits().getTotalHits(); - assertEquals(TotalHits.Relation.EQUAL_TO, totalHits.relation); - assertEquals(size, totalHits.value); + assertEquals(TotalHits.Relation.EQUAL_TO, totalHits.relation()); + assertEquals(size, totalHits.value()); assertEquals(size, res.getHits().getHits().length); List data = new ArrayList<>(); diff --git a/modules/lang-expression/src/internalClusterTest/java/org/elasticsearch/script/expression/MoreExpressionIT.java b/modules/lang-expression/src/internalClusterTest/java/org/elasticsearch/script/expression/MoreExpressionIT.java index 570c2a5f3783a..df6780aba7222 100644 --- a/modules/lang-expression/src/internalClusterTest/java/org/elasticsearch/script/expression/MoreExpressionIT.java +++ b/modules/lang-expression/src/internalClusterTest/java/org/elasticsearch/script/expression/MoreExpressionIT.java @@ -81,7 +81,7 @@ public void testBasic() throws Exception { ensureGreen("test"); prepareIndex("test").setId("1").setSource("foo", 4).setRefreshPolicy(IMMEDIATE).get(); assertResponse(buildRequest("doc['foo'] + 1"), rsp -> { - assertEquals(1, rsp.getHits().getTotalHits().value); + assertEquals(1, rsp.getHits().getTotalHits().value()); assertEquals(5.0, rsp.getHits().getAt(0).field("foo").getValue(), 0.0D); }); } @@ -91,7 +91,7 @@ public void testFunction() throws Exception { ensureGreen("test"); prepareIndex("test").setId("1").setSource("foo", 4).setRefreshPolicy(IMMEDIATE).get(); assertNoFailuresAndResponse(buildRequest("doc['foo'] + abs(1)"), rsp -> { - assertEquals(1, rsp.getHits().getTotalHits().value); + assertEquals(1, rsp.getHits().getTotalHits().value()); assertEquals(5.0, rsp.getHits().getAt(0).field("foo").getValue(), 0.0D); }); } @@ -102,7 +102,7 @@ public void testBasicUsingDotValue() throws Exception { prepareIndex("test").setId("1").setSource("foo", 4).setRefreshPolicy(IMMEDIATE).get(); assertResponse(buildRequest("doc['foo'].value + 1"), rsp -> { - assertEquals(1, rsp.getHits().getTotalHits().value); + assertEquals(1, rsp.getHits().getTotalHits().value()); assertEquals(5.0, rsp.getHits().getAt(0).field("foo").getValue(), 0.0D); }); } @@ -125,7 +125,7 @@ public void testScore() throws Exception { assertResponse(req, rsp -> { assertNoFailures(rsp); SearchHits hits = rsp.getHits(); - assertEquals(3, hits.getTotalHits().value); + assertEquals(3, hits.getTotalHits().value()); assertEquals("1", hits.getAt(0).getId()); assertEquals("3", hits.getAt(1).getId()); assertEquals("2", hits.getAt(2).getId()); @@ -148,25 +148,25 @@ public void testDateMethods() throws Exception { prepareIndex("test").setId("2").setSource("id", 2, "date0", "2013-12-25T11:56:45Z", "date1", "1983-10-13T23:15:00Z") ); assertResponse(buildRequest("doc['date0'].getSeconds() - doc['date0'].getMinutes()"), rsp -> { - assertEquals(2, rsp.getHits().getTotalHits().value); + assertEquals(2, rsp.getHits().getTotalHits().value()); SearchHits hits = rsp.getHits(); assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(-11.0, hits.getAt(1).field("foo").getValue(), 0.0D); }); assertResponse(buildRequest("doc['date0'].getHourOfDay() + doc['date1'].getDayOfMonth()"), rsp -> { - assertEquals(2, rsp.getHits().getTotalHits().value); + assertEquals(2, rsp.getHits().getTotalHits().value()); SearchHits hits = rsp.getHits(); assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(24.0, hits.getAt(1).field("foo").getValue(), 0.0D); }); assertResponse(buildRequest("doc['date1'].getMonth() + 1"), rsp -> { - assertEquals(2, rsp.getHits().getTotalHits().value); + assertEquals(2, rsp.getHits().getTotalHits().value()); SearchHits hits = rsp.getHits(); assertEquals(9.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(10.0, hits.getAt(1).field("foo").getValue(), 0.0D); }); assertResponse(buildRequest("doc['date1'].getYear()"), rsp -> { - assertEquals(2, rsp.getHits().getTotalHits().value); + assertEquals(2, rsp.getHits().getTotalHits().value()); SearchHits hits = rsp.getHits(); assertEquals(1985.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(1983.0, hits.getAt(1).field("foo").getValue(), 0.0D); @@ -182,25 +182,25 @@ public void testDateObjectMethods() throws Exception { prepareIndex("test").setId("2").setSource("id", 2, "date0", "2013-12-25T11:56:45Z", "date1", "1983-10-13T23:15:00Z") ); assertResponse(buildRequest("doc['date0'].date.secondOfMinute - doc['date0'].date.minuteOfHour"), rsp -> { - assertEquals(2, rsp.getHits().getTotalHits().value); + assertEquals(2, rsp.getHits().getTotalHits().value()); SearchHits hits = rsp.getHits(); assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(-11.0, hits.getAt(1).field("foo").getValue(), 0.0D); }); assertResponse(buildRequest("doc['date0'].date.getHourOfDay() + doc['date1'].date.dayOfMonth"), rsp -> { - assertEquals(2, rsp.getHits().getTotalHits().value); + assertEquals(2, rsp.getHits().getTotalHits().value()); SearchHits hits = rsp.getHits(); assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(24.0, hits.getAt(1).field("foo").getValue(), 0.0D); }); assertResponse(buildRequest("doc['date1'].date.monthOfYear + 1"), rsp -> { - assertEquals(2, rsp.getHits().getTotalHits().value); + assertEquals(2, rsp.getHits().getTotalHits().value()); SearchHits hits = rsp.getHits(); assertEquals(10.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(11.0, hits.getAt(1).field("foo").getValue(), 0.0D); }); assertResponse(buildRequest("doc['date1'].date.year"), rsp -> { - assertEquals(2, rsp.getHits().getTotalHits().value); + assertEquals(2, rsp.getHits().getTotalHits().value()); SearchHits hits = rsp.getHits(); assertEquals(1985.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(1983.0, hits.getAt(1).field("foo").getValue(), 0.0D); @@ -238,7 +238,7 @@ public void testMultiValueMethods() throws Exception { assertNoFailuresAndResponse(buildRequest("doc['double0'].count() + doc['double1'].count()"), rsp -> { SearchHits hits = rsp.getHits(); - assertEquals(3, hits.getTotalHits().value); + assertEquals(3, hits.getTotalHits().value()); assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(2.0, hits.getAt(1).field("foo").getValue(), 0.0D); assertEquals(5.0, hits.getAt(2).field("foo").getValue(), 0.0D); @@ -246,7 +246,7 @@ public void testMultiValueMethods() throws Exception { assertNoFailuresAndResponse(buildRequest("doc['double0'].sum()"), rsp -> { SearchHits hits = rsp.getHits(); - assertEquals(3, hits.getTotalHits().value); + assertEquals(3, hits.getTotalHits().value()); assertEquals(7.5, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(5.0, hits.getAt(1).field("foo").getValue(), 0.0D); assertEquals(6.0, hits.getAt(2).field("foo").getValue(), 0.0D); @@ -254,7 +254,7 @@ public void testMultiValueMethods() throws Exception { assertNoFailuresAndResponse(buildRequest("doc['double0'].avg() + doc['double1'].avg()"), rsp -> { SearchHits hits = rsp.getHits(); - assertEquals(3, hits.getTotalHits().value); + assertEquals(3, hits.getTotalHits().value()); assertEquals(4.3, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(8.0, hits.getAt(1).field("foo").getValue(), 0.0D); assertEquals(5.5, hits.getAt(2).field("foo").getValue(), 0.0D); @@ -262,7 +262,7 @@ public void testMultiValueMethods() throws Exception { assertNoFailuresAndResponse(buildRequest("doc['double0'].median()"), rsp -> { SearchHits hits = rsp.getHits(); - assertEquals(3, hits.getTotalHits().value); + assertEquals(3, hits.getTotalHits().value()); assertEquals(1.5, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(5.0, hits.getAt(1).field("foo").getValue(), 0.0D); assertEquals(1.25, hits.getAt(2).field("foo").getValue(), 0.0D); @@ -270,7 +270,7 @@ public void testMultiValueMethods() throws Exception { assertNoFailuresAndResponse(buildRequest("doc['double0'].min()"), rsp -> { SearchHits hits = rsp.getHits(); - assertEquals(3, hits.getTotalHits().value); + assertEquals(3, hits.getTotalHits().value()); assertEquals(1.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(5.0, hits.getAt(1).field("foo").getValue(), 0.0D); assertEquals(-1.5, hits.getAt(2).field("foo").getValue(), 0.0D); @@ -278,7 +278,7 @@ public void testMultiValueMethods() throws Exception { assertNoFailuresAndResponse(buildRequest("doc['double0'].max()"), rsp -> { SearchHits hits = rsp.getHits(); - assertEquals(3, hits.getTotalHits().value); + assertEquals(3, hits.getTotalHits().value()); assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(5.0, hits.getAt(1).field("foo").getValue(), 0.0D); assertEquals(5.0, hits.getAt(2).field("foo").getValue(), 0.0D); @@ -286,7 +286,7 @@ public void testMultiValueMethods() throws Exception { assertNoFailuresAndResponse(buildRequest("doc['double0'].sum()/doc['double0'].count()"), rsp -> { SearchHits hits = rsp.getHits(); - assertEquals(3, hits.getTotalHits().value); + assertEquals(3, hits.getTotalHits().value()); assertEquals(2.5, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(5.0, hits.getAt(1).field("foo").getValue(), 0.0D); assertEquals(1.5, hits.getAt(2).field("foo").getValue(), 0.0D); @@ -295,7 +295,7 @@ public void testMultiValueMethods() throws Exception { // make sure count() works for missing assertNoFailuresAndResponse(buildRequest("doc['double2'].count()"), rsp -> { SearchHits hits = rsp.getHits(); - assertEquals(3, hits.getTotalHits().value); + assertEquals(3, hits.getTotalHits().value()); assertEquals(1.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(0.0, hits.getAt(1).field("foo").getValue(), 0.0D); assertEquals(0.0, hits.getAt(2).field("foo").getValue(), 0.0D); @@ -304,7 +304,7 @@ public void testMultiValueMethods() throws Exception { // make sure .empty works in the same way assertNoFailuresAndResponse(buildRequest("doc['double2'].empty ? 5.0 : 2.0"), rsp -> { SearchHits hits = rsp.getHits(); - assertEquals(3, hits.getTotalHits().value); + assertEquals(3, hits.getTotalHits().value()); assertEquals(2.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(5.0, hits.getAt(1).field("foo").getValue(), 0.0D); assertEquals(5.0, hits.getAt(2).field("foo").getValue(), 0.0D); @@ -342,7 +342,7 @@ public void testSparseField() throws Exception { ); assertNoFailuresAndResponse(buildRequest("doc['x'] + 1"), rsp -> { SearchHits hits = rsp.getHits(); - assertEquals(2, rsp.getHits().getTotalHits().value); + assertEquals(2, rsp.getHits().getTotalHits().value()); assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(1.0, hits.getAt(1).field("foo").getValue(), 0.0D); }); @@ -378,7 +378,7 @@ public void testParams() throws Exception { String script = "doc['x'] * a + b + ((c + doc['x']) > 5000000009 ? 1 : 0)"; assertResponse(buildRequest(script, "a", 2, "b", 3.5, "c", 5000000000L), rsp -> { SearchHits hits = rsp.getHits(); - assertEquals(3, hits.getTotalHits().value); + assertEquals(3, hits.getTotalHits().value()); assertEquals(24.5, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(9.5, hits.getAt(1).field("foo").getValue(), 0.0D); assertEquals(13.5, hits.getAt(2).field("foo").getValue(), 0.0D); @@ -501,7 +501,7 @@ public void testSpecialValueVariable() throws Exception { ); assertResponse(req, rsp -> { - assertEquals(3, rsp.getHits().getTotalHits().value); + assertEquals(3, rsp.getHits().getTotalHits().value()); Stats stats = rsp.getAggregations().get("int_agg"); assertEquals(39.0, stats.getMax(), 0.0001); @@ -655,22 +655,22 @@ public void testGeo() throws Exception { refresh(); // access .lat assertNoFailuresAndResponse(buildRequest("doc['location'].lat"), rsp -> { - assertEquals(1, rsp.getHits().getTotalHits().value); + assertEquals(1, rsp.getHits().getTotalHits().value()); assertEquals(61.5240, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D); }); // access .lon assertNoFailuresAndResponse(buildRequest("doc['location'].lon"), rsp -> { - assertEquals(1, rsp.getHits().getTotalHits().value); + assertEquals(1, rsp.getHits().getTotalHits().value()); assertEquals(105.3188, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D); }); // access .empty assertNoFailuresAndResponse(buildRequest("doc['location'].empty ? 1 : 0"), rsp -> { - assertEquals(1, rsp.getHits().getTotalHits().value); + assertEquals(1, rsp.getHits().getTotalHits().value()); assertEquals(0, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D); }); // call haversin assertNoFailuresAndResponse(buildRequest("haversin(38.9072, 77.0369, doc['location'].lat, doc['location'].lon)"), rsp -> { - assertEquals(1, rsp.getHits().getTotalHits().value); + assertEquals(1, rsp.getHits().getTotalHits().value()); assertEquals(3170D, rsp.getHits().getAt(0).field("foo").getValue(), 50D); }); } @@ -693,14 +693,14 @@ public void testBoolean() throws Exception { ); // access .value assertNoFailuresAndResponse(buildRequest("doc['vip'].value"), rsp -> { - assertEquals(3, rsp.getHits().getTotalHits().value); + assertEquals(3, rsp.getHits().getTotalHits().value()); assertEquals(1.0D, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D); assertEquals(0.0D, rsp.getHits().getAt(1).field("foo").getValue(), 1.0D); assertEquals(0.0D, rsp.getHits().getAt(2).field("foo").getValue(), 1.0D); }); // access .empty assertNoFailuresAndResponse(buildRequest("doc['vip'].empty ? 1 : 0"), rsp -> { - assertEquals(3, rsp.getHits().getTotalHits().value); + assertEquals(3, rsp.getHits().getTotalHits().value()); assertEquals(0.0D, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D); assertEquals(0.0D, rsp.getHits().getAt(1).field("foo").getValue(), 1.0D); assertEquals(1.0D, rsp.getHits().getAt(2).field("foo").getValue(), 1.0D); @@ -708,7 +708,7 @@ public void testBoolean() throws Exception { // ternary operator // vip's have a 50% discount assertNoFailuresAndResponse(buildRequest("doc['vip'] ? doc['price']/2 : doc['price']"), rsp -> { - assertEquals(3, rsp.getHits().getTotalHits().value); + assertEquals(3, rsp.getHits().getTotalHits().value()); assertEquals(0.5D, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D); assertEquals(2.0D, rsp.getHits().getAt(1).field("foo").getValue(), 1.0D); assertEquals(2.0D, rsp.getHits().getAt(2).field("foo").getValue(), 1.0D); @@ -727,7 +727,7 @@ public void testFilterScript() throws Exception { Script script = new Script(ScriptType.INLINE, "expression", "doc['foo'].value", Collections.emptyMap()); builder.setQuery(QueryBuilders.boolQuery().filter(QueryBuilders.scriptQuery(script))); assertNoFailuresAndResponse(builder, rsp -> { - assertEquals(1, rsp.getHits().getTotalHits().value); + assertEquals(1, rsp.getHits().getTotalHits().value()); assertEquals(1.0D, rsp.getHits().getAt(0).field("foo").getValue(), 0.0D); }); } diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionDoubleValuesScript.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionDoubleValuesScript.java index 0952ff8fe856f..bb714d4674ed6 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionDoubleValuesScript.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionDoubleValuesScript.java @@ -17,6 +17,8 @@ import org.apache.lucene.search.SortField; import org.elasticsearch.script.DoubleValuesScript; +import java.io.IOException; +import java.io.UncheckedIOException; import java.util.function.Function; /** @@ -37,12 +39,20 @@ public DoubleValuesScript newInstance() { return new DoubleValuesScript() { @Override public double execute() { - return exprScript.evaluate(new DoubleValues[0]); + try { + return exprScript.evaluate(new DoubleValues[0]); + } catch (IOException e) { + throw new UncheckedIOException(e); + } } @Override public double evaluate(DoubleValues[] functionValues) { - return exprScript.evaluate(functionValues); + try { + return exprScript.evaluate(functionValues); + } catch (IOException e) { + throw new UncheckedIOException(e); + } } @Override diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java index b306f104d7ba5..58cd9ea293aef 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java @@ -24,7 +24,6 @@ import org.elasticsearch.script.AggregationScript; import org.elasticsearch.script.BucketAggregationScript; import org.elasticsearch.script.BucketAggregationSelectorScript; -import org.elasticsearch.script.ClassPermission; import org.elasticsearch.script.DoubleValuesScript; import org.elasticsearch.script.FieldScript; import org.elasticsearch.script.FilterScript; @@ -36,9 +35,8 @@ import org.elasticsearch.script.TermsSetQueryScript; import org.elasticsearch.search.lookup.SearchLookup; -import java.security.AccessControlContext; -import java.security.AccessController; -import java.security.PrivilegedAction; +import java.io.IOException; +import java.io.UncheckedIOException; import java.text.ParseException; import java.util.ArrayList; import java.util.HashMap; @@ -156,36 +154,14 @@ public String getType() { @Override public T compile(String scriptName, String scriptSource, ScriptContext context, Map params) { - // classloader created here - final SecurityManager sm = System.getSecurityManager(); SpecialPermission.check(); - Expression expr = AccessController.doPrivileged(new PrivilegedAction() { - @Override - public Expression run() { - try { - // snapshot our context here, we check on behalf of the expression - AccessControlContext engineContext = AccessController.getContext(); - ClassLoader loader = getClass().getClassLoader(); - if (sm != null) { - loader = new ClassLoader(loader) { - @Override - protected Class loadClass(String name, boolean resolve) throws ClassNotFoundException { - try { - engineContext.checkPermission(new ClassPermission(name)); - } catch (SecurityException e) { - throw new ClassNotFoundException(name, e); - } - return super.loadClass(name, resolve); - } - }; - } - // NOTE: validation is delayed to allow runtime vars, and we don't have access to per index stuff here - return JavascriptCompiler.compile(scriptSource, JavascriptCompiler.DEFAULT_FUNCTIONS, loader); - } catch (ParseException e) { - throw convertToScriptException("compile error", scriptSource, scriptSource, e); - } - } - }); + Expression expr; + try { + // NOTE: validation is delayed to allow runtime vars, and we don't have access to per index stuff here + expr = JavascriptCompiler.compile(scriptSource, JavascriptCompiler.DEFAULT_FUNCTIONS); + } catch (ParseException e) { + throw convertToScriptException("compile error", scriptSource, scriptSource, e); + } if (contexts.containsKey(context) == false) { throw new IllegalArgumentException("expression engine does not know how to handle script context [" + context.name + "]"); } @@ -233,7 +209,11 @@ public Double execute() { placeholder.setValue(((Number) value).doubleValue()); } }); - return expr.evaluate(functionValuesArray); + try { + return expr.evaluate(functionValuesArray); + } catch (IOException e) { + throw new UncheckedIOException(e); + } } }; }; diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateResponseTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateResponseTests.java index 3efcfde684ebc..a3c0c60d75436 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateResponseTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateResponseTests.java @@ -138,7 +138,7 @@ protected void assertEqualInstances(SearchTemplateResponse expectedInstance, Sea SearchResponse expectedResponse = expectedInstance.getResponse(); SearchResponse newResponse = newInstance.getResponse(); - assertEquals(expectedResponse.getHits().getTotalHits().value, newResponse.getHits().getTotalHits().value); + assertEquals(expectedResponse.getHits().getTotalHits().value(), newResponse.getHits().getTotalHits().value()); assertEquals(expectedResponse.getHits().getMaxScore(), newResponse.getHits().getMaxScore(), 0.0001); } } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptedMetricAggContextsTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptedMetricAggContextsTests.java index fed598e46fbd9..cbb0e19d64a6e 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptedMetricAggContextsTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptedMetricAggContextsTests.java @@ -74,11 +74,6 @@ public void testMapBasic() throws IOException { Map state = new HashMap<>(); Scorable scorer = new Scorable() { - @Override - public int docID() { - return 0; - } - @Override public float score() { return 0.5f; diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/SimilarityScriptTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/SimilarityScriptTests.java index 01a9e995450aa..7edd6d5303252 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/SimilarityScriptTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/SimilarityScriptTests.java @@ -85,7 +85,7 @@ public void testBasics() throws IOException { 3.2f ); TopDocs topDocs = searcher.search(query, 1); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); assertEquals((float) (3.2 * 2 / 3), topDocs.scoreDocs[0].score, 0); } } @@ -134,7 +134,7 @@ public void testWeightScript() throws IOException { 3.2f ); TopDocs topDocs = searcher.search(query, 1); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); assertEquals((float) (3.2 * 2 / 3), topDocs.scoreDocs[0].score, 0); } } diff --git a/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/RankFeaturesMapperIntegrationIT.java b/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/RankFeaturesMapperIntegrationIT.java index 19173c650c24a..1c6ffe75e3fd2 100644 --- a/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/RankFeaturesMapperIntegrationIT.java +++ b/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/RankFeaturesMapperIntegrationIT.java @@ -43,7 +43,7 @@ public void testRankFeaturesTermQuery() throws IOException { assertNoFailuresAndResponse( prepareSearch(INDEX_NAME).setQuery(QueryBuilders.termQuery(FIELD_NAME, HIGHER_RANKED_FEATURE)), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(2L)); for (SearchHit hit : searchResponse.getHits().getHits()) { assertThat(hit.getScore(), equalTo(20f)); } @@ -52,7 +52,7 @@ public void testRankFeaturesTermQuery() throws IOException { assertNoFailuresAndResponse( prepareSearch(INDEX_NAME).setQuery(QueryBuilders.termQuery(FIELD_NAME, HIGHER_RANKED_FEATURE).boost(100f)), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(2L)); for (SearchHit hit : searchResponse.getHits().getHits()) { assertThat(hit.getScore(), equalTo(2000f)); } @@ -67,7 +67,7 @@ public void testRankFeaturesTermQuery() throws IOException { .minimumShouldMatch(1) ), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(3L)); for (SearchHit hit : searchResponse.getHits().getHits()) { if (hit.getId().equals("all")) { assertThat(hit.getScore(), equalTo(50f)); @@ -83,7 +83,7 @@ public void testRankFeaturesTermQuery() throws IOException { ); assertNoFailuresAndResponse( prepareSearch(INDEX_NAME).setQuery(QueryBuilders.termQuery(FIELD_NAME, "missing_feature")), - response -> assertThat(response.getHits().getTotalHits().value, equalTo(0L)) + response -> assertThat(response.getHits().getTotalHits().value(), equalTo(0L)) ); } diff --git a/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/TokenCountFieldMapperIntegrationIT.java b/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/TokenCountFieldMapperIntegrationIT.java index 4fc4fc69e0ee8..97c97a643e9c8 100644 --- a/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/TokenCountFieldMapperIntegrationIT.java +++ b/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/TokenCountFieldMapperIntegrationIT.java @@ -203,7 +203,7 @@ private SearchRequestBuilder prepareTokenCountFieldMapperSearch() { } private void assertSearchReturns(SearchResponse result, String... ids) { - assertThat(result.getHits().getTotalHits().value, equalTo((long) ids.length)); + assertThat(result.getHits().getTotalHits().value(), equalTo((long) ids.length)); assertThat(result.getHits().getHits().length, equalTo(ids.length)); List foundIds = new ArrayList<>(); for (SearchHit hit : result.getHits()) { diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SearchAsYouTypeFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SearchAsYouTypeFieldMapper.java index bce6ffb5e0ea3..f277d28eed922 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SearchAsYouTypeFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SearchAsYouTypeFieldMapper.java @@ -468,8 +468,8 @@ public Query prefixQuery( } Automaton automaton = Operations.concatenate(automata); AutomatonQuery query = method == null - ? new AutomatonQuery(new Term(name(), value + "*"), automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT, false) - : new AutomatonQuery(new Term(name(), value + "*"), automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT, false, method); + ? new AutomatonQuery(new Term(name(), value + "*"), automaton, false) + : new AutomatonQuery(new Term(name(), value + "*"), automaton, false, method); return new BooleanQuery.Builder().add(query, BooleanClause.Occur.SHOULD) .add(new TermQuery(new Term(parentField, value)), BooleanClause.Occur.SHOULD) .build(); diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SourceConfirmedTextQuery.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SourceConfirmedTextQuery.java index d16034c5de2fd..a992f68d93d9e 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SourceConfirmedTextQuery.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SourceConfirmedTextQuery.java @@ -34,6 +34,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TermStatistics; import org.apache.lucene.search.TwoPhaseIterator; @@ -266,7 +267,7 @@ public boolean isCacheable(LeafReaderContext ctx) { @Override public Explanation explain(LeafReaderContext context, int doc) throws IOException { - RuntimePhraseScorer scorer = scorer(context); + RuntimePhraseScorer scorer = (RuntimePhraseScorer) scorerSupplier(context).get(0); if (scorer == null) { return Explanation.noMatch("No matching phrase"); } @@ -286,15 +287,26 @@ public Explanation explain(LeafReaderContext context, int doc) throws IOExceptio } @Override - public RuntimePhraseScorer scorer(LeafReaderContext context) throws IOException { - final Scorer approximationScorer = approximationWeight != null ? approximationWeight.scorer(context) : null; - if (approximationScorer == null) { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { + ScorerSupplier approximationSupplier = approximationWeight != null ? approximationWeight.scorerSupplier(context) : null; + if (approximationSupplier == null) { return null; } - final DocIdSetIterator approximation = approximationScorer.iterator(); - final LeafSimScorer leafSimScorer = new LeafSimScorer(simScorer, context.reader(), field, scoreMode.needsScores()); - final CheckedIntFunction, IOException> valueFetcher = valueFetcherProvider.apply(context); - return new RuntimePhraseScorer(this, approximation, leafSimScorer, valueFetcher, field, in); + return new ScorerSupplier() { + @Override + public Scorer get(long leadCost) throws IOException { + final Scorer approximationScorer = approximationSupplier.get(leadCost); + final DocIdSetIterator approximation = approximationScorer.iterator(); + final LeafSimScorer leafSimScorer = new LeafSimScorer(simScorer, context.reader(), field, scoreMode.needsScores()); + final CheckedIntFunction, IOException> valueFetcher = valueFetcherProvider.apply(context); + return new RuntimePhraseScorer(approximation, leafSimScorer, valueFetcher, field, in); + } + + @Override + public long cost() { + return approximationSupplier.cost(); + } + }; } @Override @@ -310,7 +322,7 @@ public Matches matches(LeafReaderContext context, int doc) throws IOException { Weight innerWeight = in.createWeight(searcher, ScoreMode.COMPLETE_NO_SCORES, 1); return innerWeight.matches(context, doc); } - RuntimePhraseScorer scorer = scorer(context); + RuntimePhraseScorer scorer = (RuntimePhraseScorer) scorerSupplier(context).get(0L); if (scorer == null) { return null; } @@ -336,14 +348,12 @@ private class RuntimePhraseScorer extends Scorer { private float freq; private RuntimePhraseScorer( - Weight weight, DocIdSetIterator approximation, LeafSimScorer scorer, CheckedIntFunction, IOException> valueFetcher, String field, Query query ) { - super(weight); this.scorer = scorer; this.valueFetcher = valueFetcher; this.field = field; diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapperTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapperTests.java index 922b92263d712..1eb6083cfe453 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapperTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapperTests.java @@ -89,8 +89,8 @@ private void assertPhraseQuery(MapperService mapperService) throws IOException { SearchExecutionContext context = createSearchExecutionContext(mapperService, newSearcher(reader)); MatchPhraseQueryBuilder queryBuilder = new MatchPhraseQueryBuilder("field", "brown fox"); TopDocs docs = context.searcher().search(queryBuilder.toQuery(context), 1); - assertThat(docs.totalHits.value, equalTo(1L)); - assertThat(docs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(docs.totalHits.value(), equalTo(1L)); + assertThat(docs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(docs.scoreDocs[0].doc, equalTo(0)); } } diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/SourceConfirmedTextQueryTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/SourceConfirmedTextQueryTests.java index 84139409e8bc6..a49e0c2a3e38d 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/SourceConfirmedTextQueryTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/SourceConfirmedTextQueryTests.java @@ -61,7 +61,7 @@ public class SourceConfirmedTextQueryTests extends ESTestCase { private static final IOFunction, IOException>> SOURCE_FETCHER_PROVIDER = context -> docID -> { sourceFetchCount.incrementAndGet(); - return Collections.singletonList(context.reader().document(docID).get("body")); + return Collections.singletonList(context.reader().storedFields().document(docID).get("body")); }; public void testTerm() throws Exception { diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/SourceIntervalsSourceTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/SourceIntervalsSourceTests.java index 0fef801b22009..2befcfb576017 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/SourceIntervalsSourceTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/SourceIntervalsSourceTests.java @@ -41,7 +41,7 @@ public class SourceIntervalsSourceTests extends ESTestCase { private static final IOFunction, IOException>> SOURCE_FETCHER_PROVIDER = - context -> docID -> Collections.singletonList(context.reader().document(docID).get("body")); + context -> docID -> Collections.singletonList(context.reader().storedFields().document(docID).get("body")); public void testIntervals() throws IOException { final FieldType ft = new FieldType(TextField.TYPE_STORED); diff --git a/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/aggregations/ChildrenIT.java b/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/aggregations/ChildrenIT.java index ad8e252e3fd63..9c0e5ce071dc6 100644 --- a/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/aggregations/ChildrenIT.java +++ b/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/aggregations/ChildrenIT.java @@ -115,7 +115,7 @@ public void testParentWithMultipleBuckets() { logger.info("bucket={}", bucket.getKey()); Children childrenBucket = bucket.getAggregations().get("to_comment"); TopHits topHits = childrenBucket.getAggregations().get("top_comments"); - logger.info("total_hits={}", topHits.getHits().getTotalHits().value); + logger.info("total_hits={}", topHits.getHits().getTotalHits().value()); for (SearchHit searchHit : topHits.getHits()) { logger.info("hit= {} {}", searchHit.getSortValues()[0], searchHit.getId()); } @@ -129,7 +129,7 @@ public void testParentWithMultipleBuckets() { assertThat(childrenBucket.getName(), equalTo("to_comment")); assertThat(childrenBucket.getDocCount(), equalTo(2L)); TopHits topHits = childrenBucket.getAggregations().get("top_comments"); - assertThat(topHits.getHits().getTotalHits().value, equalTo(2L)); + assertThat(topHits.getHits().getTotalHits().value(), equalTo(2L)); assertThat(topHits.getHits().getAt(0).getId(), equalTo("e")); assertThat(topHits.getHits().getAt(1).getId(), equalTo("f")); @@ -141,7 +141,7 @@ public void testParentWithMultipleBuckets() { assertThat(childrenBucket.getName(), equalTo("to_comment")); assertThat(childrenBucket.getDocCount(), equalTo(1L)); topHits = childrenBucket.getAggregations().get("top_comments"); - assertThat(topHits.getHits().getTotalHits().value, equalTo(1L)); + assertThat(topHits.getHits().getTotalHits().value(), equalTo(1L)); assertThat(topHits.getHits().getAt(0).getId(), equalTo("f")); categoryBucket = categoryTerms.getBucketByKey("c"); @@ -152,7 +152,7 @@ public void testParentWithMultipleBuckets() { assertThat(childrenBucket.getName(), equalTo("to_comment")); assertThat(childrenBucket.getDocCount(), equalTo(1L)); topHits = childrenBucket.getAggregations().get("top_comments"); - assertThat(topHits.getHits().getTotalHits().value, equalTo(1L)); + assertThat(topHits.getHits().getTotalHits().value(), equalTo(1L)); assertThat(topHits.getHits().getAt(0).getId(), equalTo("f")); } ); diff --git a/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/ChildQuerySearchIT.java b/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/ChildQuerySearchIT.java index 872165014f5a4..cce0ef06cbf62 100644 --- a/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/ChildQuerySearchIT.java +++ b/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/ChildQuerySearchIT.java @@ -107,7 +107,7 @@ public void testMultiLevelChild() throws Exception { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("p1")); } ); @@ -117,7 +117,7 @@ public void testMultiLevelChild() throws Exception { boolQuery().must(matchAllQuery()).filter(hasParentQuery("parent", termQuery("p_field", "p_value1"), false)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("c1")); } ); @@ -127,7 +127,7 @@ public void testMultiLevelChild() throws Exception { boolQuery().must(matchAllQuery()).filter(hasParentQuery("child", termQuery("c_field", "c_value1"), false)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("gc1")); } ); @@ -135,7 +135,7 @@ public void testMultiLevelChild() throws Exception { assertNoFailuresAndResponse( prepareSearch("test").setQuery(hasParentQuery("parent", termQuery("p_field", "p_value1"), false)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("c1")); } ); @@ -143,7 +143,7 @@ public void testMultiLevelChild() throws Exception { assertNoFailuresAndResponse( prepareSearch("test").setQuery(hasParentQuery("child", termQuery("c_field", "c_value1"), false)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("gc1")); } ); @@ -161,7 +161,7 @@ public void test2744() throws IOException { assertNoFailuresAndResponse( prepareSearch("test").setQuery(hasChildQuery("test", matchQuery("foo", 1), ScoreMode.None)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); } ); @@ -182,7 +182,7 @@ public void testSimpleChildQuery() throws Exception { // TEST FETCHING _parent from child assertNoFailuresAndResponse(prepareSearch("test").setQuery(idsQuery().addIds("c1")), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("c1")); assertThat(extractValue("join_field.name", response.getHits().getAt(0).getSourceAsMap()), equalTo("child")); assertThat(extractValue("join_field.parent", response.getHits().getAt(0).getSourceAsMap()), equalTo("p1")); @@ -195,7 +195,7 @@ public void testSimpleChildQuery() throws Exception { boolQuery().filter(termQuery("join_field#parent", "p1")).filter(termQuery("join_field", "child")) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getAt(0).getId(), anyOf(equalTo("c1"), equalTo("c2"))); assertThat(extractValue("join_field.name", response.getHits().getAt(0).getSourceAsMap()), equalTo("child")); assertThat(extractValue("join_field.parent", response.getHits().getAt(0).getSourceAsMap()), equalTo("p1")); @@ -208,7 +208,7 @@ public void testSimpleChildQuery() throws Exception { // HAS CHILD assertNoFailuresAndResponse(prepareSearch("test").setQuery(randomHasChild("child", "c_field", "yellow")), response -> { assertHitCount(response, 1L); - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("p1")); }); @@ -307,8 +307,8 @@ public void testHasParentFilter() throws Exception { ).setSize(numChildDocsPerParent), response -> { Set childIds = parentToChildrenEntry.getValue(); - assertThat(response.getHits().getTotalHits().value, equalTo((long) childIds.size())); - for (int i = 0; i < response.getHits().getTotalHits().value; i++) { + assertThat(response.getHits().getTotalHits().value(), equalTo((long) childIds.size())); + for (int i = 0; i < response.getHits().getTotalHits().value(); i++) { assertThat(childIds.remove(response.getHits().getAt(i).getId()), is(true)); assertThat(response.getHits().getAt(i).getScore(), is(1.0f)); } @@ -341,7 +341,7 @@ public void testSimpleChildQueryWithFlush() throws Exception { assertNoFailuresAndResponse( prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.None)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("p1")); } ); @@ -349,7 +349,7 @@ public void testSimpleChildQueryWithFlush() throws Exception { assertNoFailuresAndResponse( prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "blue"), ScoreMode.None)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("p2")); } ); @@ -357,7 +357,7 @@ public void testSimpleChildQueryWithFlush() throws Exception { assertNoFailuresAndResponse( prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "red"), ScoreMode.None)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getAt(0).getId(), anyOf(equalTo("p2"), equalTo("p1"))); assertThat(response.getHits().getAt(1).getId(), anyOf(equalTo("p2"), equalTo("p1"))); } @@ -367,7 +367,7 @@ public void testSimpleChildQueryWithFlush() throws Exception { assertNoFailuresAndResponse( prepareSearch("test").setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.None))), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("p1")); } ); @@ -375,7 +375,7 @@ public void testSimpleChildQueryWithFlush() throws Exception { assertNoFailuresAndResponse( prepareSearch("test").setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "blue"), ScoreMode.None))), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("p2")); } ); @@ -383,7 +383,7 @@ public void testSimpleChildQueryWithFlush() throws Exception { assertNoFailuresAndResponse( prepareSearch("test").setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "red"), ScoreMode.None))), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getAt(0).getId(), anyOf(equalTo("p2"), equalTo("p1"))); assertThat(response.getHits().getAt(1).getId(), anyOf(equalTo("p2"), equalTo("p1"))); } @@ -426,7 +426,7 @@ public void testScopedFacet() throws Exception { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getAt(0).getId(), anyOf(equalTo("p2"), equalTo("p1"))); assertThat(response.getHits().getAt(1).getId(), anyOf(equalTo("p2"), equalTo("p1"))); @@ -458,7 +458,7 @@ public void testDeletedParent() throws Exception { assertNoFailuresAndResponse( prepareSearch("test").setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.None))), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("p1")); assertThat(response.getHits().getAt(0).getSourceAsString(), containsString("\"p_value1\"")); } @@ -472,7 +472,7 @@ public void testDeletedParent() throws Exception { assertNoFailuresAndResponse( prepareSearch("test").setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.None))), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("p1")); assertThat(response.getHits().getAt(0).getSourceAsString(), containsString("\"p_value1_updated\"")); } @@ -647,7 +647,7 @@ public void testScoreForParentChildQueriesWithFunctionScore() throws Exception { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("1")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(6f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -667,7 +667,7 @@ public void testScoreForParentChildQueriesWithFunctionScore() throws Exception { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(4f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("2")); @@ -687,7 +687,7 @@ public void testScoreForParentChildQueriesWithFunctionScore() throws Exception { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(4f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("2")); @@ -707,7 +707,7 @@ public void testScoreForParentChildQueriesWithFunctionScore() throws Exception { ) ).addSort(SortBuilders.fieldSort("c_field3")).addSort(SortBuilders.scoreSort()), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(7L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(7L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("16")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(5f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("17")); @@ -768,7 +768,7 @@ public void testHasChildAndHasParentFilter_withFilter() throws Exception { boolQuery().must(matchAllQuery()).filter(hasChildQuery("child", termQuery("c_field", 1), ScoreMode.None)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("1")); } ); @@ -778,7 +778,7 @@ public void testHasChildAndHasParentFilter_withFilter() throws Exception { boolQuery().must(matchAllQuery()).filter(hasParentQuery("parent", termQuery("p_field", 1), false)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("2")); } ); @@ -801,7 +801,7 @@ public void testHasChildInnerHitsHighlighting() throws Exception { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("1")); SearchHit[] searchHits = response.getHits().getHits()[0].getInnerHits().get("child").getHits(); assertThat(searchHits.length, equalTo(1)); @@ -888,7 +888,7 @@ public void testSimpleQueryRewrite() throws Exception { .addSort("p_field", SortOrder.ASC) .setSize(5), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(10L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(10L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("p000")); assertThat(response.getHits().getHits()[1].getId(), equalTo("p001")); assertThat(response.getHits().getHits()[2].getId(), equalTo("p002")); @@ -903,7 +903,7 @@ public void testSimpleQueryRewrite() throws Exception { .addSort("c_field", SortOrder.ASC) .setSize(5), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(500L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(500L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("c000")); assertThat(response.getHits().getHits()[1].getId(), equalTo("c001")); assertThat(response.getHits().getHits()[2].getId(), equalTo("c002")); @@ -932,7 +932,7 @@ public void testReIndexingParentAndChildDocuments() throws Exception { assertNoFailuresAndResponse( prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.Total)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("p1")); assertThat(response.getHits().getAt(0).getSourceAsString(), containsString("\"p_value1\"")); } @@ -943,7 +943,7 @@ public void testReIndexingParentAndChildDocuments() throws Exception { boolQuery().must(matchQuery("c_field", "x")).must(hasParentQuery("parent", termQuery("p_field", "p_value2"), true)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getAt(0).getId(), equalTo("c3")); assertThat(response.getHits().getAt(1).getId(), equalTo("c4")); } @@ -961,7 +961,7 @@ public void testReIndexingParentAndChildDocuments() throws Exception { assertNoFailuresAndResponse( prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.Total)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("p1")); assertThat(response.getHits().getAt(0).getSourceAsString(), containsString("\"p_value1\"")); } @@ -972,7 +972,7 @@ public void testReIndexingParentAndChildDocuments() throws Exception { boolQuery().must(matchQuery("c_field", "x")).must(hasParentQuery("parent", termQuery("p_field", "p_value2"), true)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getAt(0).getId(), Matchers.anyOf(equalTo("c3"), equalTo("c4"))); assertThat(response.getHits().getAt(1).getId(), Matchers.anyOf(equalTo("c3"), equalTo("c4"))); } @@ -996,7 +996,7 @@ public void testHasChildQueryWithMinimumScore() throws Exception { assertNoFailuresAndResponse( prepareSearch("test").setQuery(hasChildQuery("child", matchAllQuery(), ScoreMode.Total)).setMinScore(3), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("p2")); assertThat(response.getHits().getAt(0).getScore(), equalTo(3.0f)); } @@ -1411,7 +1411,7 @@ public void testParentChildQueriesViaScrollApi() throws Exception { 10, (respNum, response) -> { assertNoFailures(response); - assertThat(response.getHits().getTotalHits().value, equalTo(10L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(10L)); } ); } @@ -1469,7 +1469,7 @@ public void testMinMaxChildren() throws Exception { // Score mode = NONE assertResponse(minMaxQuery(ScoreMode.None, 1, null), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("2")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1479,7 +1479,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.None, 2, null), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("4")); @@ -1487,7 +1487,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.None, 3, null), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f)); }); @@ -1495,7 +1495,7 @@ public void testMinMaxChildren() throws Exception { assertHitCount(minMaxQuery(ScoreMode.None, 4, null), 0L); assertResponse(minMaxQuery(ScoreMode.None, 1, 4), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("2")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1505,7 +1505,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.None, 1, 3), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("2")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1515,7 +1515,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.None, 1, 2), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("2")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1523,7 +1523,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.None, 2, 2), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f)); }); @@ -1533,7 +1533,7 @@ public void testMinMaxChildren() throws Exception { // Score mode = SUM assertResponse(minMaxQuery(ScoreMode.Total, 1, null), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(6f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1543,7 +1543,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.Total, 2, null), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(6f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1551,7 +1551,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.Total, 3, null), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(6f)); }); @@ -1559,7 +1559,7 @@ public void testMinMaxChildren() throws Exception { assertHitCount(minMaxQuery(ScoreMode.Total, 4, null), 0L); assertResponse(minMaxQuery(ScoreMode.Total, 1, 4), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(6f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1569,7 +1569,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.Total, 1, 3), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(6f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1579,7 +1579,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.Total, 1, 2), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("2")); @@ -1587,7 +1587,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.Total, 2, 2), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f)); }); @@ -1597,7 +1597,7 @@ public void testMinMaxChildren() throws Exception { // Score mode = MAX assertResponse(minMaxQuery(ScoreMode.Max, 1, null), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1607,7 +1607,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.Max, 2, null), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1615,7 +1615,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.Max, 3, null), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f)); }); @@ -1623,7 +1623,7 @@ public void testMinMaxChildren() throws Exception { assertHitCount(minMaxQuery(ScoreMode.Max, 4, null), 0L); assertResponse(minMaxQuery(ScoreMode.Max, 1, 4), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1633,7 +1633,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.Max, 1, 3), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1643,7 +1643,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.Max, 1, 2), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("2")); @@ -1651,7 +1651,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.Max, 2, 2), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f)); }); @@ -1661,7 +1661,7 @@ public void testMinMaxChildren() throws Exception { // Score mode = AVG assertResponse(minMaxQuery(ScoreMode.Avg, 1, null), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1671,7 +1671,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.Avg, 2, null), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1679,7 +1679,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.Avg, 3, null), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f)); }); @@ -1687,7 +1687,7 @@ public void testMinMaxChildren() throws Exception { assertHitCount(minMaxQuery(ScoreMode.Avg, 4, null), 0L); assertResponse(minMaxQuery(ScoreMode.Avg, 1, 4), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1697,7 +1697,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.Avg, 1, 3), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1707,7 +1707,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.Avg, 1, 2), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(1.5f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("2")); @@ -1715,7 +1715,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.Avg, 2, 2), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(1.5f)); }); diff --git a/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/InnerHitsIT.java b/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/InnerHitsIT.java index 0ae10b297f709..6d6072b2992ca 100644 --- a/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/InnerHitsIT.java +++ b/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/InnerHitsIT.java @@ -128,7 +128,7 @@ public void testSimpleParentChild() throws Exception { assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment"); - assertThat(innerHits.getTotalHits().value, equalTo(2L)); + assertThat(innerHits.getTotalHits().value(), equalTo(2L)); assertThat(innerHits.getAt(0).getId(), equalTo("c1")); assertThat(innerHits.getAt(1).getId(), equalTo("c2")); @@ -148,7 +148,7 @@ public void testSimpleParentChild() throws Exception { assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment"); - assertThat(innerHits.getTotalHits().value, equalTo(3L)); + assertThat(innerHits.getTotalHits().value(), equalTo(3L)); assertThat(innerHits.getAt(0).getId(), equalTo("c4")); assertThat(innerHits.getAt(1).getId(), equalTo("c5")); @@ -280,7 +280,7 @@ public void testRandomParentChild() throws Exception { assertThat(searchHit.getShard(), notNullValue()); SearchHits inner = searchHit.getInnerHits().get("a"); - assertThat(inner.getTotalHits().value, equalTo((long) child1InnerObjects[parent])); + assertThat(inner.getTotalHits().value(), equalTo((long) child1InnerObjects[parent])); for (int child = 0; child < child1InnerObjects[parent] && child < size; child++) { SearchHit innerHit = inner.getAt(child); String childId = String.format(Locale.ENGLISH, "c1_%04d", offset1 + child); @@ -290,7 +290,7 @@ public void testRandomParentChild() throws Exception { offset1 += child1InnerObjects[parent]; inner = searchHit.getInnerHits().get("b"); - assertThat(inner.getTotalHits().value, equalTo((long) child2InnerObjects[parent])); + assertThat(inner.getTotalHits().value(), equalTo((long) child2InnerObjects[parent])); for (int child = 0; child < child2InnerObjects[parent] && child < size; child++) { SearchHit innerHit = inner.getAt(child); String childId = String.format(Locale.ENGLISH, "c2_%04d", offset2 + child); @@ -347,12 +347,12 @@ public void testInnerHitsOnHasParent() throws Exception { SearchHit searchHit = response.getHits().getAt(0); assertThat(searchHit.getId(), equalTo("3")); - assertThat(searchHit.getInnerHits().get("question").getTotalHits().value, equalTo(1L)); + assertThat(searchHit.getInnerHits().get("question").getTotalHits().value(), equalTo(1L)); assertThat(searchHit.getInnerHits().get("question").getAt(0).getId(), equalTo("1")); searchHit = response.getHits().getAt(1); assertThat(searchHit.getId(), equalTo("4")); - assertThat(searchHit.getInnerHits().get("question").getTotalHits().value, equalTo(1L)); + assertThat(searchHit.getInnerHits().get("question").getTotalHits().value(), equalTo(1L)); assertThat(searchHit.getInnerHits().get("question").getAt(0).getId(), equalTo("2")); } ); @@ -394,11 +394,11 @@ public void testParentChildMultipleLayers() throws Exception { assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerHits.getAt(0).getId(), equalTo("3")); innerHits = innerHits.getAt(0).getInnerHits().get("remark"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerHits.getAt(0).getId(), equalTo("5")); } ); @@ -417,11 +417,11 @@ public void testParentChildMultipleLayers() throws Exception { assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerHits.getAt(0).getId(), equalTo("4")); innerHits = innerHits.getAt(0).getInnerHits().get("remark"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerHits.getAt(0).getId(), equalTo("6")); } ); @@ -482,34 +482,34 @@ public void testRoyals() throws Exception { assertThat(response.getHits().getAt(0).getId(), equalTo("duke")); SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("earls"); - assertThat(innerHits.getTotalHits().value, equalTo(4L)); + assertThat(innerHits.getTotalHits().value(), equalTo(4L)); assertThat(innerHits.getAt(0).getId(), equalTo("earl1")); assertThat(innerHits.getAt(1).getId(), equalTo("earl2")); assertThat(innerHits.getAt(2).getId(), equalTo("earl3")); assertThat(innerHits.getAt(3).getId(), equalTo("earl4")); SearchHits innerInnerHits = innerHits.getAt(0).getInnerHits().get("barons"); - assertThat(innerInnerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerInnerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerInnerHits.getAt(0).getId(), equalTo("baron1")); innerInnerHits = innerHits.getAt(1).getInnerHits().get("barons"); - assertThat(innerInnerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerInnerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerInnerHits.getAt(0).getId(), equalTo("baron2")); innerInnerHits = innerHits.getAt(2).getInnerHits().get("barons"); - assertThat(innerInnerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerInnerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerInnerHits.getAt(0).getId(), equalTo("baron3")); innerInnerHits = innerHits.getAt(3).getInnerHits().get("barons"); - assertThat(innerInnerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerInnerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerInnerHits.getAt(0).getId(), equalTo("baron4")); innerHits = response.getHits().getAt(0).getInnerHits().get("princes"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerHits.getAt(0).getId(), equalTo("prince")); innerInnerHits = innerHits.getAt(0).getInnerHits().get("kings"); - assertThat(innerInnerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerInnerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerInnerHits.getAt(0).getId(), equalTo("king")); } ); @@ -532,12 +532,12 @@ public void testMatchesQueriesParentChildInnerHits() throws Exception { response -> { assertHitCount(response, 2); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); - assertThat(response.getHits().getAt(0).getInnerHits().get("child").getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(0).getInnerHits().get("child").getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getInnerHits().get("child").getAt(0).getMatchedQueries().length, equalTo(1)); assertThat(response.getHits().getAt(0).getInnerHits().get("child").getAt(0).getMatchedQueries()[0], equalTo("_name1")); assertThat(response.getHits().getAt(1).getId(), equalTo("2")); - assertThat(response.getHits().getAt(1).getInnerHits().get("child").getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(1).getInnerHits().get("child").getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(1).getInnerHits().get("child").getAt(0).getMatchedQueries().length, equalTo(1)); assertThat(response.getHits().getAt(1).getInnerHits().get("child").getAt(0).getMatchedQueries()[0], equalTo("_name1")); } @@ -549,7 +549,7 @@ public void testMatchesQueriesParentChildInnerHits() throws Exception { assertResponse(prepareSearch("index").setQuery(query).addSort("id", SortOrder.ASC), response -> { assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); - assertThat(response.getHits().getAt(0).getInnerHits().get("child").getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(0).getInnerHits().get("child").getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getInnerHits().get("child").getAt(0).getMatchedQueries().length, equalTo(1)); assertThat(response.getHits().getAt(0).getInnerHits().get("child").getAt(0).getMatchedQueries()[0], equalTo("_name2")); }); diff --git a/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentJoinAggregator.java b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentJoinAggregator.java index 258cbe743d7d3..60412179807a5 100644 --- a/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentJoinAggregator.java +++ b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentJoinAggregator.java @@ -102,7 +102,7 @@ public final LeafBucketCollector getLeafCollector(AggregationExecutionContext ag public void collect(int docId, long owningBucketOrd) throws IOException { if (parentDocs.get(docId) && globalOrdinals.advanceExact(docId)) { int globalOrdinal = (int) globalOrdinals.nextOrd(); - assert globalOrdinal != -1 && globalOrdinals.nextOrd() == SortedSetDocValues.NO_MORE_ORDS; + assert globalOrdinal != -1 && globalOrdinals.docValueCount() == 1; collectionStrategy.add(owningBucketOrd, globalOrdinal); } } @@ -134,11 +134,6 @@ protected void prepareSubAggs(long[] ordsToCollect) throws IOException { public float score() { return 1f; } - - @Override - public int docID() { - return childDocsIter.docID(); - } }); final Bits liveDocs = ctx.reader().getLiveDocs(); @@ -150,7 +145,7 @@ public int docID() { continue; } int globalOrdinal = (int) globalOrdinals.nextOrd(); - assert globalOrdinal != -1 && globalOrdinals.nextOrd() == SortedSetDocValues.NO_MORE_ORDS; + assert globalOrdinal != -1 && globalOrdinals.docValueCount() == 1; /* * Check if we contain every ordinal. It's almost certainly be * faster to replay all the matching ordinals and filter them down diff --git a/modules/parent-join/src/main/java/org/elasticsearch/join/query/ParentChildInnerHitContextBuilder.java b/modules/parent-join/src/main/java/org/elasticsearch/join/query/ParentChildInnerHitContextBuilder.java index 9ecf4ed821e2a..6b00e94431bef 100644 --- a/modules/parent-join/src/main/java/org/elasticsearch/join/query/ParentChildInnerHitContextBuilder.java +++ b/modules/parent-join/src/main/java/org/elasticsearch/join/query/ParentChildInnerHitContextBuilder.java @@ -20,8 +20,8 @@ import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopDocsCollector; -import org.apache.lucene.search.TopFieldCollector; -import org.apache.lucene.search.TopScoreDocCollector; +import org.apache.lucene.search.TopFieldCollectorManager; +import org.apache.lucene.search.TopScoreDocCollectorManager; import org.apache.lucene.search.TotalHitCountCollector; import org.apache.lucene.search.TotalHits; import org.apache.lucene.search.Weight; @@ -137,12 +137,12 @@ public TopDocsAndMaxScore topDocs(SearchHit hit) throws IOException { TopDocsCollector topDocsCollector; MaxScoreCollector maxScoreCollector = null; if (sort() != null) { - topDocsCollector = TopFieldCollector.create(sort().sort, topN, Integer.MAX_VALUE); + topDocsCollector = new TopFieldCollectorManager(sort().sort, topN, null, Integer.MAX_VALUE, false).newCollector(); if (trackScores()) { maxScoreCollector = new MaxScoreCollector(); } } else { - topDocsCollector = TopScoreDocCollector.create(topN, Integer.MAX_VALUE); + topDocsCollector = new TopScoreDocCollectorManager(topN, null, Integer.MAX_VALUE, false).newCollector(); maxScoreCollector = new MaxScoreCollector(); } for (LeafReaderContext ctx : this.context.searcher().getIndexReader().leaves()) { diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenToParentAggregatorTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenToParentAggregatorTests.java index 03a1677e60f47..707fcc822665f 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenToParentAggregatorTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenToParentAggregatorTests.java @@ -107,7 +107,7 @@ public void testParentChild() throws IOException { // verify for each children for (String parent : expectedParentChildRelations.keySet()) { - testCase(new TermInSetQuery(IdFieldMapper.NAME, Uid.encodeId("child0_" + parent)), indexReader, aggregation -> { + testCase(new TermInSetQuery(IdFieldMapper.NAME, List.of(Uid.encodeId("child0_" + parent))), indexReader, aggregation -> { assertEquals( "Expected one result for min-aggregation for parent: " + parent + ", but had aggregation-results: " + aggregation, 1, diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregatorTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregatorTests.java index 91ec0e3c67691..ca90b0e588b18 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregatorTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregatorTests.java @@ -104,7 +104,7 @@ public void testParentChild() throws IOException { }); for (String parent : expectedParentChildRelations.keySet()) { - testCase(new TermInSetQuery(IdFieldMapper.NAME, Uid.encodeId(parent)), indexReader, child -> { + testCase(new TermInSetQuery(IdFieldMapper.NAME, List.of(Uid.encodeId(parent))), indexReader, child -> { assertEquals((long) expectedParentChildRelations.get(parent).v1(), child.getDocCount()); assertEquals( expectedParentChildRelations.get(parent).v2(), diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java index d4fe49ec8c773..9244f815cd957 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java @@ -54,6 +54,7 @@ import java.util.Collection; import java.util.Collections; import java.util.HashMap; +import java.util.List; import java.util.Map; import static org.elasticsearch.join.query.JoinQueryBuilders.hasChildQuery; @@ -341,13 +342,13 @@ static void assertLateParsingQuery(Query query, String type, String id) throws I BooleanQuery booleanQuery = (BooleanQuery) lateParsingQuery.getInnerQuery(); assertThat(booleanQuery.clauses().size(), equalTo(2)); // check the inner ids query, we have to call rewrite to get to check the type it's executed against - assertThat(booleanQuery.clauses().get(0).getOccur(), equalTo(BooleanClause.Occur.MUST)); - assertThat(booleanQuery.clauses().get(0).getQuery(), instanceOf(TermInSetQuery.class)); - TermInSetQuery termsQuery = (TermInSetQuery) booleanQuery.clauses().get(0).getQuery(); - assertEquals(new TermInSetQuery(IdFieldMapper.NAME, Uid.encodeId(id)), termsQuery); + assertThat(booleanQuery.clauses().get(0).occur(), equalTo(BooleanClause.Occur.MUST)); + assertThat(booleanQuery.clauses().get(0).query(), instanceOf(TermInSetQuery.class)); + TermInSetQuery termsQuery = (TermInSetQuery) booleanQuery.clauses().get(0).query(); + assertEquals(new TermInSetQuery(IdFieldMapper.NAME, List.of(Uid.encodeId(id))), termsQuery); // check the type filter - assertThat(booleanQuery.clauses().get(1).getOccur(), equalTo(BooleanClause.Occur.FILTER)); - assertEquals(new TermQuery(new Term("join_field", type)), booleanQuery.clauses().get(1).getQuery()); + assertThat(booleanQuery.clauses().get(1).occur(), equalTo(BooleanClause.Occur.FILTER)); + assertEquals(new TermQuery(new Term("join_field", type)), booleanQuery.clauses().get(1).query()); } @Override diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQuery.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQuery.java index 255131b51a57a..393c7b6157077 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQuery.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQuery.java @@ -110,74 +110,93 @@ public Explanation explain(LeafReaderContext leafReaderContext, int docId) throw } @Override - public Scorer scorer(LeafReaderContext leafReaderContext) throws IOException { - final Scorer approximation = candidateMatchesWeight.scorer(leafReaderContext); - if (approximation == null) { + public ScorerSupplier scorerSupplier(LeafReaderContext leafReaderContext) throws IOException { + final ScorerSupplier approximationSupplier = candidateMatchesWeight.scorerSupplier(leafReaderContext); + if (approximationSupplier == null) { return null; } - final CheckedFunction percolatorQueries = queryStore.getQueries(leafReaderContext); + ScorerSupplier verifiedDocsScorer; if (scoreMode.needsScores()) { - return new BaseScorer(this, approximation) { - - float score; - - @Override - boolean matchDocId(int docId) throws IOException { - Query query = percolatorQueries.apply(docId); - if (query != null) { - if (nonNestedDocsFilter != null) { - query = new BooleanQuery.Builder().add(query, Occur.MUST) - .add(nonNestedDocsFilter, Occur.FILTER) - .build(); - } - TopDocs topDocs = percolatorIndexSearcher.search(query, 1); - if (topDocs.scoreDocs.length > 0) { - score = topDocs.scoreDocs[0].score; - return true; - } else { - return false; + verifiedDocsScorer = null; + } else { + verifiedDocsScorer = verifiedMatchesWeight.scorerSupplier(leafReaderContext); + } + + return new ScorerSupplier() { + @Override + public Scorer get(long leadCost) throws IOException { + final Scorer approximation = approximationSupplier.get(leadCost); + final CheckedFunction percolatorQueries = queryStore.getQueries(leafReaderContext); + if (scoreMode.needsScores()) { + return new BaseScorer(approximation) { + + float score; + + @Override + boolean matchDocId(int docId) throws IOException { + Query query = percolatorQueries.apply(docId); + if (query != null) { + if (nonNestedDocsFilter != null) { + query = new BooleanQuery.Builder().add(query, Occur.MUST) + .add(nonNestedDocsFilter, Occur.FILTER) + .build(); + } + TopDocs topDocs = percolatorIndexSearcher.search(query, 1); + if (topDocs.scoreDocs.length > 0) { + score = topDocs.scoreDocs[0].score; + return true; + } else { + return false; + } + } else { + return false; + } } - } else { - return false; - } - } - @Override - public float score() { - return score; - } - }; - } else { - ScorerSupplier verifiedDocsScorer = verifiedMatchesWeight.scorerSupplier(leafReaderContext); - Bits verifiedDocsBits = Lucene.asSequentialAccessBits(leafReaderContext.reader().maxDoc(), verifiedDocsScorer); - return new BaseScorer(this, approximation) { + @Override + public float score() { + return score; + } + }; + } else { + Bits verifiedDocsBits = Lucene.asSequentialAccessBits(leafReaderContext.reader().maxDoc(), verifiedDocsScorer); + return new BaseScorer(approximation) { + + @Override + public float score() throws IOException { + return 0f; + } - @Override - public float score() throws IOException { - return 0f; + boolean matchDocId(int docId) throws IOException { + // We use the verifiedDocsBits to skip the expensive MemoryIndex verification. + // If docId also appears in the verifiedDocsBits then that means during indexing + // we were able to extract all query terms and for this candidate match + // and we determined based on the nature of the query that it is safe to skip + // the MemoryIndex verification. + if (verifiedDocsBits.get(docId)) { + return true; + } + Query query = percolatorQueries.apply(docId); + if (query == null) { + return false; + } + if (nonNestedDocsFilter != null) { + query = new BooleanQuery.Builder().add(query, Occur.MUST) + .add(nonNestedDocsFilter, Occur.FILTER) + .build(); + } + return Lucene.exists(percolatorIndexSearcher, query); + } + }; } + } - boolean matchDocId(int docId) throws IOException { - // We use the verifiedDocsBits to skip the expensive MemoryIndex verification. - // If docId also appears in the verifiedDocsBits then that means during indexing - // we were able to extract all query terms and for this candidate match - // and we determined based on the nature of the query that it is safe to skip - // the MemoryIndex verification. - if (verifiedDocsBits.get(docId)) { - return true; - } - Query query = percolatorQueries.apply(docId); - if (query == null) { - return false; - } - if (nonNestedDocsFilter != null) { - query = new BooleanQuery.Builder().add(query, Occur.MUST).add(nonNestedDocsFilter, Occur.FILTER).build(); - } - return Lucene.exists(percolatorIndexSearcher, query); - } - }; - } + @Override + public long cost() { + return approximationSupplier.cost(); + } + }; } @Override @@ -265,8 +284,7 @@ abstract static class BaseScorer extends Scorer { final Scorer approximation; - BaseScorer(Weight weight, Scorer approximation) { - super(weight); + BaseScorer(Scorer approximation) { this.approximation = approximation; } diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java index 409b6fd70c3c7..d6422efdfed26 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java @@ -294,7 +294,7 @@ Tuple createCandidateQuery(IndexReader indexReader) throw List extractedTerms = t.v1(); Map> encodedPointValuesByField = t.v2(); // `1 + ` is needed to take into account the EXTRACTION_FAILED should clause - boolean canUseMinimumShouldMatchField = 1 + extractedTerms.size() + encodedPointValuesByField.size() <= BooleanQuery + boolean canUseMinimumShouldMatchField = 1 + extractedTerms.size() + encodedPointValuesByField.size() <= IndexSearcher .getMaxClauseCount(); List subQueries = new ArrayList<>(); diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhase.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhase.java index c363746856681..8413b564c2041 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhase.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhase.java @@ -91,7 +91,7 @@ public void process(HitContext hitContext) throws IOException { query = percolatorIndexSearcher.rewrite(query); int memoryIndexMaxDoc = percolatorIndexSearcher.getIndexReader().maxDoc(); TopDocs topDocs = percolatorIndexSearcher.search(query, memoryIndexMaxDoc, new Sort(SortField.FIELD_DOC)); - if (topDocs.totalHits.value == 0) { + if (topDocs.totalHits.value() == 0) { // This hit didn't match with a percolate query, // likely to happen when percolating multiple documents continue; diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java index da4b10956dcf8..0e9aa6de3a0c0 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java @@ -8,7 +8,6 @@ */ package org.elasticsearch.percolator; -import org.apache.lucene.index.PrefixCodedTerms; import org.apache.lucene.index.Term; import org.apache.lucene.queries.spans.SpanOrQuery; import org.apache.lucene.queries.spans.SpanTermQuery; @@ -26,12 +25,15 @@ import org.apache.lucene.search.TermInSetQuery; import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.BytesRefIterator; import org.apache.lucene.util.NumericUtils; import org.apache.lucene.util.automaton.ByteRunAutomaton; import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery; import org.elasticsearch.index.query.DateRangeIncludingNowQuery; import org.elasticsearch.lucene.queries.BlendedTermQuery; +import java.io.IOException; +import java.io.UncheckedIOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -162,7 +164,7 @@ public QueryVisitor getSubVisitor(Occur occur, Query parent) { int minimumShouldMatchValue = 0; if (parent instanceof BooleanQuery bq) { if (bq.getMinimumNumberShouldMatch() == 0 - && bq.clauses().stream().anyMatch(c -> c.getOccur() == Occur.MUST || c.getOccur() == Occur.FILTER)) { + && bq.clauses().stream().anyMatch(c -> c.occur() == Occur.MUST || c.occur() == Occur.FILTER)) { return QueryVisitor.EMPTY_VISITOR; } minimumShouldMatchValue = bq.getMinimumNumberShouldMatch(); @@ -198,11 +200,15 @@ public void consumeTerms(Query query, Term... termsToConsume) { @Override public void consumeTermsMatching(Query query, String field, Supplier automaton) { if (query instanceof TermInSetQuery q) { - PrefixCodedTerms.TermIterator ti = q.getTermData().iterator(); + BytesRefIterator bytesRefIterator = q.getBytesRefIterator(); BytesRef term; Set qe = new HashSet<>(); - while ((term = ti.next()) != null) { - qe.add(new QueryExtraction(new Term(field, term))); + try { + while ((term = bytesRefIterator.next()) != null) { + qe.add(new QueryExtraction(new Term(field, term))); + } + } catch (IOException e) { + throw new UncheckedIOException(e); } this.terms.add(new Result(true, qe, 1)); } else { diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java index 31e893ace72fd..ff321303b56c0 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java @@ -31,6 +31,7 @@ import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.PostingsEnum; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.index.memory.MemoryIndex; @@ -56,6 +57,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; import org.apache.lucene.search.TermInSetQuery; @@ -246,15 +248,13 @@ public void testDuel() throws Exception { queryFunctions.add( () -> new TermInSetQuery( field1, - new BytesRef(randomFrom(stringContent.get(field1))), - new BytesRef(randomFrom(stringContent.get(field1))) + List.of(new BytesRef(randomFrom(stringContent.get(field1))), new BytesRef(randomFrom(stringContent.get(field1)))) ) ); queryFunctions.add( () -> new TermInSetQuery( field2, - new BytesRef(randomFrom(stringContent.get(field1))), - new BytesRef(randomFrom(stringContent.get(field1))) + List.of(new BytesRef(randomFrom(stringContent.get(field1))), new BytesRef(randomFrom(stringContent.get(field1)))) ) ); // many iterations with boolean queries, which are the most complex queries to deal with when nested @@ -647,7 +647,7 @@ public void testRangeQueries() throws Exception { v ); TopDocs topDocs = shardSearcher.search(query, 1); - assertEquals(1L, topDocs.totalHits.value); + assertEquals(1L, topDocs.totalHits.value()); assertEquals(1, topDocs.scoreDocs.length); assertEquals(0, topDocs.scoreDocs[0].doc); @@ -655,7 +655,7 @@ public void testRangeQueries() throws Exception { percolateSearcher = memoryIndex.createSearcher(); query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, false, v); topDocs = shardSearcher.search(query, 1); - assertEquals(1L, topDocs.totalHits.value); + assertEquals(1L, topDocs.totalHits.value()); assertEquals(1, topDocs.scoreDocs.length); assertEquals(1, topDocs.scoreDocs[0].doc); @@ -663,7 +663,7 @@ public void testRangeQueries() throws Exception { percolateSearcher = memoryIndex.createSearcher(); query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, false, v); topDocs = shardSearcher.search(query, 1); - assertEquals(1L, topDocs.totalHits.value); + assertEquals(1L, topDocs.totalHits.value()); assertEquals(1, topDocs.scoreDocs.length); assertEquals(2, topDocs.scoreDocs[0].doc); @@ -671,7 +671,7 @@ public void testRangeQueries() throws Exception { percolateSearcher = memoryIndex.createSearcher(); query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, false, v); topDocs = shardSearcher.search(query, 1); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); assertEquals(1, topDocs.scoreDocs.length); assertEquals(3, topDocs.scoreDocs[0].doc); @@ -679,7 +679,7 @@ public void testRangeQueries() throws Exception { percolateSearcher = memoryIndex.createSearcher(); query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, false, v); topDocs = shardSearcher.search(query, 1); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); assertEquals(1, topDocs.scoreDocs.length); assertEquals(4, topDocs.scoreDocs[0].doc); @@ -690,7 +690,7 @@ public void testRangeQueries() throws Exception { percolateSearcher = memoryIndex.createSearcher(); query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, false, v); topDocs = shardSearcher.search(query, 1); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); assertEquals(1, topDocs.scoreDocs.length); assertEquals(5, topDocs.scoreDocs[0].doc); } @@ -836,14 +836,14 @@ public void testPercolateMatchAll() throws Exception { IndexVersion.current() ); TopDocs topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC)); - assertEquals(3L, topDocs.totalHits.value); + assertEquals(3L, topDocs.totalHits.value()); assertEquals(3, topDocs.scoreDocs.length); assertEquals(0, topDocs.scoreDocs[0].doc); assertEquals(1, topDocs.scoreDocs[1].doc); assertEquals(4, topDocs.scoreDocs[2].doc); topDocs = shardSearcher.search(new ConstantScoreQuery(query), 10); - assertEquals(3L, topDocs.totalHits.value); + assertEquals(3L, topDocs.totalHits.value()); assertEquals(3, topDocs.scoreDocs.length); assertEquals(0, topDocs.scoreDocs[0].doc); assertEquals(1, topDocs.scoreDocs[1].doc); @@ -875,7 +875,7 @@ public void testFunctionScoreQuery() throws Exception { IndexVersion.current() ); TopDocs topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC)); - assertEquals(2L, topDocs.totalHits.value); + assertEquals(2L, topDocs.totalHits.value()); assertEquals(2, topDocs.scoreDocs.length); assertEquals(0, topDocs.scoreDocs[0].doc); assertEquals(2, topDocs.scoreDocs[1].doc); @@ -931,15 +931,15 @@ public void testPercolateSmallAndLargeDocument() throws Exception { v ); BooleanQuery candidateQuery = (BooleanQuery) query.getCandidateMatchesQuery(); - assertThat(candidateQuery.clauses().get(0).getQuery(), instanceOf(CoveringQuery.class)); + assertThat(candidateQuery.clauses().get(0).query(), instanceOf(CoveringQuery.class)); TopDocs topDocs = shardSearcher.search(query, 10); - assertEquals(2L, topDocs.totalHits.value); + assertEquals(2L, topDocs.totalHits.value()); assertEquals(2, topDocs.scoreDocs.length); assertEquals(0, topDocs.scoreDocs[0].doc); assertEquals(2, topDocs.scoreDocs[1].doc); topDocs = shardSearcher.search(new ConstantScoreQuery(query), 10); - assertEquals(2L, topDocs.totalHits.value); + assertEquals(2L, topDocs.totalHits.value()); assertEquals(2, topDocs.scoreDocs.length); assertEquals(0, topDocs.scoreDocs[0].doc); assertEquals(2, topDocs.scoreDocs[1].doc); @@ -947,10 +947,10 @@ public void testPercolateSmallAndLargeDocument() throws Exception { } // This will trigger using the TermsQuery instead of individual term query clauses in the CoveringQuery: - int origMaxClauseCount = BooleanQuery.getMaxClauseCount(); + int origMaxClauseCount = IndexSearcher.getMaxClauseCount(); try (Directory directory = new ByteBuffersDirectory()) { final int maxClauseCount = 100; - BooleanQuery.setMaxClauseCount(maxClauseCount); + IndexSearcher.setMaxClauseCount(maxClauseCount); try (IndexWriter iw = new IndexWriter(directory, newIndexWriterConfig())) { Document document = new Document(); for (int i = 0; i < maxClauseCount; i++) { @@ -970,22 +970,22 @@ public void testPercolateSmallAndLargeDocument() throws Exception { v ); BooleanQuery candidateQuery = (BooleanQuery) query.getCandidateMatchesQuery(); - assertThat(candidateQuery.clauses().get(0).getQuery(), instanceOf(TermInSetQuery.class)); + assertThat(candidateQuery.clauses().get(0).query(), instanceOf(TermInSetQuery.class)); TopDocs topDocs = shardSearcher.search(query, 10); - assertEquals(2L, topDocs.totalHits.value); + assertEquals(2L, topDocs.totalHits.value()); assertEquals(2, topDocs.scoreDocs.length); assertEquals(1, topDocs.scoreDocs[0].doc); assertEquals(2, topDocs.scoreDocs[1].doc); topDocs = shardSearcher.search(new ConstantScoreQuery(query), 10); - assertEquals(2L, topDocs.totalHits.value); + assertEquals(2L, topDocs.totalHits.value()); assertEquals(2, topDocs.scoreDocs.length); assertEquals(1, topDocs.scoreDocs[0].doc); assertEquals(2, topDocs.scoreDocs[1].doc); } } finally { - BooleanQuery.setMaxClauseCount(origMaxClauseCount); + IndexSearcher.setMaxClauseCount(origMaxClauseCount); } } @@ -1032,7 +1032,7 @@ public void testDuplicatedClauses() throws Exception { IndexSearcher percolateSearcher = memoryIndex.createSearcher(); PercolateQuery query = (PercolateQuery) fieldType.percolateQuery("_name", queryStore, sources, percolateSearcher, false, v); TopDocs topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC)); - assertEquals(2L, topDocs.totalHits.value); + assertEquals(2L, topDocs.totalHits.value()); assertEquals(0, topDocs.scoreDocs[0].doc); assertEquals(1, topDocs.scoreDocs[1].doc); } @@ -1066,7 +1066,7 @@ public void testDuplicatedClauses2() throws Exception { IndexSearcher percolateSearcher = memoryIndex.createSearcher(); PercolateQuery query = (PercolateQuery) fieldType.percolateQuery("_name", queryStore, sources, percolateSearcher, false, v); TopDocs topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC)); - assertEquals(1L, topDocs.totalHits.value); + assertEquals(1L, topDocs.totalHits.value()); assertEquals(0, topDocs.scoreDocs[0].doc); memoryIndex = new MemoryIndex(); @@ -1074,7 +1074,7 @@ public void testDuplicatedClauses2() throws Exception { percolateSearcher = memoryIndex.createSearcher(); query = (PercolateQuery) fieldType.percolateQuery("_name", queryStore, sources, percolateSearcher, false, v); topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC)); - assertEquals(1L, topDocs.totalHits.value); + assertEquals(1L, topDocs.totalHits.value()); assertEquals(0, topDocs.scoreDocs[0].doc); memoryIndex = new MemoryIndex(); @@ -1082,7 +1082,7 @@ public void testDuplicatedClauses2() throws Exception { percolateSearcher = memoryIndex.createSearcher(); query = (PercolateQuery) fieldType.percolateQuery("_name", queryStore, sources, percolateSearcher, false, v); topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC)); - assertEquals(1L, topDocs.totalHits.value); + assertEquals(1L, topDocs.totalHits.value()); assertEquals(0, topDocs.scoreDocs[0].doc); } @@ -1117,7 +1117,7 @@ public void testMsmAndRanges_disjunction() throws Exception { IndexSearcher percolateSearcher = memoryIndex.createSearcher(); PercolateQuery query = (PercolateQuery) fieldType.percolateQuery("_name", queryStore, sources, percolateSearcher, false, v); TopDocs topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC)); - assertEquals(1L, topDocs.totalHits.value); + assertEquals(1L, topDocs.totalHits.value()); assertEquals(0, topDocs.scoreDocs[0].doc); } @@ -1141,7 +1141,7 @@ private void duelRun(PercolateQuery.QueryStore percolateQueryStore, MemoryIndex TopDocs controlTopDocs = shardSearcher.search(controlQuery, 100); try { - assertThat(topDocs.totalHits.value, equalTo(controlTopDocs.totalHits.value)); + assertThat(topDocs.totalHits.value(), equalTo(controlTopDocs.totalHits.value())); assertThat(topDocs.scoreDocs.length, equalTo(controlTopDocs.scoreDocs.length)); for (int j = 0; j < topDocs.scoreDocs.length; j++) { assertThat(topDocs.scoreDocs[j].doc, equalTo(controlTopDocs.scoreDocs[j].doc)); @@ -1164,12 +1164,13 @@ private void duelRun(PercolateQuery.QueryStore percolateQueryStore, MemoryIndex logger.error("topDocs.scoreDocs[{}].doc={}", i, topDocs.scoreDocs[i].doc); logger.error("topDocs.scoreDocs[{}].score={}", i, topDocs.scoreDocs[i].score); } + StoredFields storedFields = shardSearcher.storedFields(); for (int i = 0; i < controlTopDocs.scoreDocs.length; i++) { logger.error("controlTopDocs.scoreDocs[{}].doc={}", i, controlTopDocs.scoreDocs[i].doc); logger.error("controlTopDocs.scoreDocs[{}].score={}", i, controlTopDocs.scoreDocs[i].score); // Additional stored information that is useful when debugging: - String queryToString = shardSearcher.doc(controlTopDocs.scoreDocs[i].doc).get("query_to_string"); + String queryToString = storedFields.document(controlTopDocs.scoreDocs[i].doc).get("query_to_string"); logger.error("controlTopDocs.scoreDocs[{}].query_to_string={}", i, queryToString); TermsEnum tenum = MultiTerms.getTerms(shardSearcher.getIndexReader(), fieldType.queryTermsField.name()).iterator(); @@ -1289,7 +1290,7 @@ public String toString() { } @Override - public Scorer scorer(LeafReaderContext context) throws IOException { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { float _score[] = new float[] { boost }; DocIdSetIterator allDocs = DocIdSetIterator.all(context.reader().maxDoc()); CheckedFunction leaf = queryStore.getQueries(context); @@ -1313,7 +1314,7 @@ protected boolean match(int doc) { } } }; - return new Scorer(this) { + Scorer scorer = new Scorer() { @Override public int docID() { @@ -1335,6 +1336,7 @@ public float getMaxScore(int upTo) throws IOException { return _score[0]; } }; + return new DefaultScorerSupplier(scorer); } @Override diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryTests.java index 075d4d429fb39..04a8105b5fb82 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryTests.java @@ -118,7 +118,7 @@ public void testPercolateQuery() throws Exception { ) ); TopDocs topDocs = shardSearcher.search(query, 10); - assertThat(topDocs.totalHits.value, equalTo(1L)); + assertThat(topDocs.totalHits.value(), equalTo(1L)); assertThat(topDocs.scoreDocs.length, equalTo(1)); assertThat(topDocs.scoreDocs[0].doc, equalTo(0)); Explanation explanation = shardSearcher.explain(query, 0); @@ -137,7 +137,7 @@ public void testPercolateQuery() throws Exception { ) ); topDocs = shardSearcher.search(query, 10); - assertThat(topDocs.totalHits.value, equalTo(3L)); + assertThat(topDocs.totalHits.value(), equalTo(3L)); assertThat(topDocs.scoreDocs.length, equalTo(3)); assertThat(topDocs.scoreDocs[0].doc, equalTo(1)); explanation = shardSearcher.explain(query, 1); @@ -166,7 +166,7 @@ public void testPercolateQuery() throws Exception { ) ); topDocs = shardSearcher.search(query, 10); - assertThat(topDocs.totalHits.value, equalTo(4L)); + assertThat(topDocs.totalHits.value(), equalTo(4L)); query = new PercolateQuery( "_name", @@ -178,7 +178,7 @@ public void testPercolateQuery() throws Exception { new MatchNoDocsQuery("") ); topDocs = shardSearcher.search(query, 10); - assertThat(topDocs.totalHits.value, equalTo(3L)); + assertThat(topDocs.totalHits.value(), equalTo(3L)); assertThat(topDocs.scoreDocs.length, equalTo(3)); assertThat(topDocs.scoreDocs[0].doc, equalTo(3)); explanation = shardSearcher.explain(query, 3); diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java index 100cda66acdcc..f72c68c6fd2e3 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java @@ -23,6 +23,7 @@ import org.apache.lucene.sandbox.search.CoveringQuery; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.PhraseQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermInSetQuery; @@ -417,10 +418,10 @@ public void testExtractTermsAndRanges() throws Exception { } public void testCreateCandidateQuery() throws Exception { - int origMaxClauseCount = BooleanQuery.getMaxClauseCount(); + int origMaxClauseCount = IndexSearcher.getMaxClauseCount(); try { final int maxClauseCount = 100; - BooleanQuery.setMaxClauseCount(maxClauseCount); + IndexSearcher.setMaxClauseCount(maxClauseCount); addQueryFieldMappings(); MemoryIndex memoryIndex = new MemoryIndex(false); @@ -435,8 +436,8 @@ public void testCreateCandidateQuery() throws Exception { Tuple t = fieldType.createCandidateQuery(indexReader); assertTrue(t.v2()); assertEquals(2, t.v1().clauses().size()); - assertThat(t.v1().clauses().get(0).getQuery(), instanceOf(CoveringQuery.class)); - assertThat(t.v1().clauses().get(1).getQuery(), instanceOf(TermQuery.class)); + assertThat(t.v1().clauses().get(0).query(), instanceOf(CoveringQuery.class)); + assertThat(t.v1().clauses().get(1).query(), instanceOf(TermQuery.class)); // Now push it over the edge, so that it falls back using TermInSetQuery memoryIndex.addField("field2", "value", new WhitespaceAnalyzer()); @@ -444,12 +445,12 @@ public void testCreateCandidateQuery() throws Exception { t = fieldType.createCandidateQuery(indexReader); assertFalse(t.v2()); assertEquals(3, t.v1().clauses().size()); - TermInSetQuery terms = (TermInSetQuery) t.v1().clauses().get(0).getQuery(); - assertEquals(maxClauseCount - 1, terms.getTermData().size()); - assertThat(t.v1().clauses().get(1).getQuery().toString(), containsString(fieldName + ".range_field: { - assertEquals(2, response.getHits().getTotalHits().value); + assertEquals(2, response.getHits().getTotalHits().value()); SearchHit[] hits = response.getHits().getHits(); assertThat(hits[0].getFields().get("_percolator_document_slot").getValues(), equalTo(Arrays.asList(0, 1))); diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryAnalyzerTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryAnalyzerTests.java index a9c3e09e7f4ed..81427060615ea 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryAnalyzerTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryAnalyzerTests.java @@ -82,7 +82,7 @@ public void testExtractQueryMetadata_termQuery() { } public void testExtractQueryMetadata_termsQuery() { - TermInSetQuery termsQuery = new TermInSetQuery("_field", new BytesRef("_term1"), new BytesRef("_term2")); + TermInSetQuery termsQuery = new TermInSetQuery("_field", List.of(new BytesRef("_term1"), new BytesRef("_term2"))); Result result = analyze(termsQuery); assertThat(result.verified, is(true)); assertThat(result.minimumShouldMatch, equalTo(1)); diff --git a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/index/reindex/CrossClusterReindexIT.java b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/index/reindex/CrossClusterReindexIT.java index a76ddf13e4595..8b94337141243 100644 --- a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/index/reindex/CrossClusterReindexIT.java +++ b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/index/reindex/CrossClusterReindexIT.java @@ -70,7 +70,7 @@ public void testReindexFromRemoteGivenIndexExists() throws Exception { final TotalHits totalHits = SearchResponseUtils.getTotalHits( client(LOCAL_CLUSTER).prepareSearch("desc-index-001").setQuery(new MatchAllQueryBuilder()).setSize(1000) ); - return totalHits.relation == TotalHits.Relation.EQUAL_TO && totalHits.value == docsNumber; + return totalHits.relation() == TotalHits.Relation.EQUAL_TO && totalHits.value() == docsNumber; })); } @@ -85,7 +85,7 @@ public void testReindexFromRemoteGivenSameIndexNames() throws Exception { final TotalHits totalHits = SearchResponseUtils.getTotalHits( client(LOCAL_CLUSTER).prepareSearch("test-index-001").setQuery(new MatchAllQueryBuilder()).setSize(1000) ); - return totalHits.relation == TotalHits.Relation.EQUAL_TO && totalHits.value == docsNumber; + return totalHits.relation() == TotalHits.Relation.EQUAL_TO && totalHits.value() == docsNumber; })); } @@ -114,7 +114,7 @@ public void testReindexManyTimesFromRemoteGivenSameIndexNames() throws Exception final TotalHits totalHits = SearchResponseUtils.getTotalHits( client(LOCAL_CLUSTER).prepareSearch("test-index-001").setQuery(new MatchAllQueryBuilder()).setSize(1000) ); - return totalHits.relation == TotalHits.Relation.EQUAL_TO && totalHits.value == docsNumber; + return totalHits.relation() == TotalHits.Relation.EQUAL_TO && totalHits.value() == docsNumber; })); } } @@ -146,7 +146,7 @@ public void testReindexFromRemoteGivenSimpleDateMathIndexName() throws Interrupt final TotalHits totalHits = SearchResponseUtils.getTotalHits( client(LOCAL_CLUSTER).prepareSearch("desc-index-001").setQuery(new MatchAllQueryBuilder()).setSize(1000) ); - return totalHits.relation == TotalHits.Relation.EQUAL_TO && totalHits.value == docsNumber; + return totalHits.relation() == TotalHits.Relation.EQUAL_TO && totalHits.value() == docsNumber; })); } @@ -162,7 +162,7 @@ public void testReindexFromRemoteGivenComplexDateMathIndexName() throws Interrup final TotalHits totalHits = SearchResponseUtils.getTotalHits( client(LOCAL_CLUSTER).prepareSearch("desc-index-001").setQuery(new MatchAllQueryBuilder()).setSize(1000) ); - return totalHits.relation == TotalHits.Relation.EQUAL_TO && totalHits.value == docsNumber; + return totalHits.relation() == TotalHits.Relation.EQUAL_TO && totalHits.value() == docsNumber; })); } diff --git a/modules/reindex/src/main/java/org/elasticsearch/reindex/ReindexValidator.java b/modules/reindex/src/main/java/org/elasticsearch/reindex/ReindexValidator.java index 4b960e97ce0e0..d046ba881b5d4 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/reindex/ReindexValidator.java +++ b/modules/reindex/src/main/java/org/elasticsearch/reindex/ReindexValidator.java @@ -12,7 +12,6 @@ import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CharacterRunAutomaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.DocWriteRequest; @@ -96,7 +95,7 @@ static CharacterRunAutomaton buildRemoteWhitelist(List whitelist) { return new CharacterRunAutomaton(Automata.makeEmpty()); } Automaton automaton = Regex.simpleMatchToAutomaton(whitelist.toArray(Strings.EMPTY_ARRAY)); - automaton = MinimizationOperations.minimize(automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + automaton = Operations.determinize(automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); if (Operations.isTotal(automaton)) { throw new IllegalArgumentException( "Refusing to start because whitelist " diff --git a/modules/reindex/src/main/java/org/elasticsearch/reindex/remote/RemoteResponseParsers.java b/modules/reindex/src/main/java/org/elasticsearch/reindex/remote/RemoteResponseParsers.java index b924f8c311115..01459e2ff61bb 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/reindex/remote/RemoteResponseParsers.java +++ b/modules/reindex/src/main/java/org/elasticsearch/reindex/remote/RemoteResponseParsers.java @@ -97,8 +97,8 @@ class Fields { HITS_PARSER.declareField(constructorArg(), (p, c) -> { if (p.currentToken() == XContentParser.Token.START_OBJECT) { final TotalHits totalHits = SearchHits.parseTotalHitsFragment(p); - assert totalHits.relation == TotalHits.Relation.EQUAL_TO; - return totalHits.value; + assert totalHits.relation() == TotalHits.Relation.EQUAL_TO; + return totalHits.value(); } else { // For BWC with nodes pre 7.0 return p.longValue(); diff --git a/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapperTests.java b/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapperTests.java index 593d4b41df712..6c77186089644 100644 --- a/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapperTests.java +++ b/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapperTests.java @@ -242,7 +242,7 @@ public void testIndexedTermVectors() throws IOException { withLuceneIndex(mapperService, iw -> iw.addDocument(doc.rootDoc()), reader -> { LeafReader leaf = reader.leaves().get(0).reader(); - Terms terms = leaf.getTermVector(0, "field"); + Terms terms = leaf.termVectors().get(0, "field"); TermsEnum iterator = terms.iterator(); BytesRef term; Set foundTerms = new HashSet<>(); diff --git a/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextHighlighterTests.java b/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextHighlighterTests.java index 61abd64e98a96..d4c4ccfaa442d 100644 --- a/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextHighlighterTests.java +++ b/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextHighlighterTests.java @@ -130,7 +130,7 @@ private void assertHighlightOneDoc( } TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 1, Sort.INDEXORDER); - assertThat(topDocs.totalHits.value, equalTo(1L)); + assertThat(topDocs.totalHits.value(), equalTo(1L)); String rawValue = Strings.collectionToDelimitedString(plainTextForHighlighter, String.valueOf(MULTIVAL_SEP_CHAR)); UnifiedHighlighter.Builder builder = UnifiedHighlighter.builder(searcher, hiliteAnalyzer); builder.withBreakIterator(() -> breakIterator); diff --git a/plugins/store-smb/src/main/java/org/elasticsearch/index/store/smb/SmbMmapFsDirectoryFactory.java b/plugins/store-smb/src/main/java/org/elasticsearch/index/store/smb/SmbMmapFsDirectoryFactory.java index 4594e8d71c6fb..b9f4943b1dab6 100644 --- a/plugins/store-smb/src/main/java/org/elasticsearch/index/store/smb/SmbMmapFsDirectoryFactory.java +++ b/plugins/store-smb/src/main/java/org/elasticsearch/index/store/smb/SmbMmapFsDirectoryFactory.java @@ -27,7 +27,6 @@ protected Directory newFSDirectory(Path location, LockFactory lockFactory, Index return new SmbDirectoryWrapper( setPreload( new MMapDirectory(location, lockFactory), - lockFactory, new HashSet<>(indexSettings.getValue(IndexModule.INDEX_STORE_PRE_LOAD_SETTING)) ) ); diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java index 8570662f7b523..73f291da15ead 100644 --- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java @@ -33,6 +33,7 @@ import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.rest.action.admin.indices.RestPutIndexTemplateAction; +import org.elasticsearch.search.SearchFeatures; import org.elasticsearch.test.NotEqualMessageBuilder; import org.elasticsearch.test.XContentTestUtils; import org.elasticsearch.test.cluster.ElasticsearchCluster; @@ -1694,6 +1695,211 @@ public void testSystemIndexMetadataIsUpgraded() throws Exception { } } + /** + * This test ensures that search results on old indices using "persian" analyzer don't change + * after we introduce Lucene 10 + */ + public void testPersianAnalyzerBWC() throws Exception { + var originalClusterLegacyPersianAnalyzer = oldClusterHasFeature(SearchFeatures.LUCENE_10_0_0_UPGRADE) == false; + assumeTrue("Don't run this test if both versions already support stemming", originalClusterLegacyPersianAnalyzer); + final String indexName = "test_persian_stemmer"; + Settings idxSettings = indexSettings(1, 1).build(); + String mapping = """ + { + "properties": { + "textfield" : { + "type": "text", + "analyzer": "persian" + } + } + } + """; + + String query = """ + { + "query": { + "match": { + "textfield": "كتابها" + } + } + } + """; + + if (isRunningAgainstOldCluster()) { + createIndex(client(), indexName, idxSettings, mapping); + ensureGreen(indexName); + + assertOK( + client().performRequest( + newXContentRequest( + HttpMethod.POST, + "/" + indexName + "/" + "_doc/1", + (builder, params) -> builder.field("textfield", "كتابها") + ) + ) + ); + assertOK( + client().performRequest( + newXContentRequest( + HttpMethod.POST, + "/" + indexName + "/" + "_doc/2", + (builder, params) -> builder.field("textfield", "كتاب") + ) + ) + ); + refresh(indexName); + + assertNumHits(indexName, 2, 1); + + Request searchRequest = new Request("POST", "/" + indexName + "/_search"); + searchRequest.setJsonEntity(query); + assertTotalHits(1, entityAsMap(client().performRequest(searchRequest))); + } else { + // old index should still only return one doc + Request searchRequest = new Request("POST", "/" + indexName + "/_search"); + searchRequest.setJsonEntity(query); + assertTotalHits(1, entityAsMap(client().performRequest(searchRequest))); + + String newIndexName = indexName + "_new"; + createIndex(client(), newIndexName, idxSettings, mapping); + ensureGreen(newIndexName); + + assertOK( + client().performRequest( + newXContentRequest( + HttpMethod.POST, + "/" + newIndexName + "/" + "_doc/1", + (builder, params) -> builder.field("textfield", "كتابها") + ) + ) + ); + assertOK( + client().performRequest( + newXContentRequest( + HttpMethod.POST, + "/" + newIndexName + "/" + "_doc/2", + (builder, params) -> builder.field("textfield", "كتاب") + ) + ) + ); + refresh(newIndexName); + + searchRequest = new Request("POST", "/" + newIndexName + "/_search"); + searchRequest.setJsonEntity(query); + assertTotalHits(2, entityAsMap(client().performRequest(searchRequest))); + + // searching both indices (old and new analysis version) we should get 1 hit from the old and 2 from the new index + searchRequest = new Request("POST", "/" + indexName + "," + newIndexName + "/_search"); + searchRequest.setJsonEntity(query); + assertTotalHits(3, entityAsMap(client().performRequest(searchRequest))); + } + } + + /** + * This test ensures that search results on old indices using "romanain" analyzer don't change + * after we introduce Lucene 10 + */ + public void testRomanianAnalyzerBWC() throws Exception { + var originalClusterLegacyRomanianAnalyzer = oldClusterHasFeature(SearchFeatures.LUCENE_10_0_0_UPGRADE) == false; + assumeTrue("Don't run this test if both versions already support stemming", originalClusterLegacyRomanianAnalyzer); + final String indexName = "test_romanian_stemmer"; + Settings idxSettings = indexSettings(1, 1).build(); + String cedillaForm = "absenţa"; + String commaForm = "absența"; + + String mapping = """ + { + "properties": { + "textfield" : { + "type": "text", + "analyzer": "romanian" + } + } + } + """; + + // query that uses the cedilla form of "t" + String query = """ + { + "query": { + "match": { + "textfield": "absenţa" + } + } + } + """; + + if (isRunningAgainstOldCluster()) { + createIndex(client(), indexName, idxSettings, mapping); + ensureGreen(indexName); + + assertOK( + client().performRequest( + newXContentRequest( + HttpMethod.POST, + "/" + indexName + "/" + "_doc/1", + (builder, params) -> builder.field("textfield", cedillaForm) + ) + ) + ); + assertOK( + client().performRequest( + newXContentRequest( + HttpMethod.POST, + "/" + indexName + "/" + "_doc/2", + // this doc uses the comma form + (builder, params) -> builder.field("textfield", commaForm) + ) + ) + ); + refresh(indexName); + + assertNumHits(indexName, 2, 1); + + Request searchRequest = new Request("POST", "/" + indexName + "/_search"); + searchRequest.setJsonEntity(query); + assertTotalHits(1, entityAsMap(client().performRequest(searchRequest))); + } else { + // old index should still only return one doc + Request searchRequest = new Request("POST", "/" + indexName + "/_search"); + searchRequest.setJsonEntity(query); + assertTotalHits(1, entityAsMap(client().performRequest(searchRequest))); + + String newIndexName = indexName + "_new"; + createIndex(client(), newIndexName, idxSettings, mapping); + ensureGreen(newIndexName); + + assertOK( + client().performRequest( + newXContentRequest( + HttpMethod.POST, + "/" + newIndexName + "/" + "_doc/1", + (builder, params) -> builder.field("textfield", cedillaForm) + ) + ) + ); + assertOK( + client().performRequest( + newXContentRequest( + HttpMethod.POST, + "/" + newIndexName + "/" + "_doc/2", + (builder, params) -> builder.field("textfield", commaForm) + ) + ) + ); + refresh(newIndexName); + + searchRequest = new Request("POST", "/" + newIndexName + "/_search"); + searchRequest.setJsonEntity(query); + assertTotalHits(2, entityAsMap(client().performRequest(searchRequest))); + + // searching both indices (old and new analysis version) we should get 1 hit from the old and 2 from the new index + searchRequest = new Request("POST", "/" + indexName + "," + newIndexName + "/_search"); + searchRequest.setJsonEntity(query); + assertTotalHits(3, entityAsMap(client().performRequest(searchRequest))); + } + } + public void testForbidDisableSoftDeletesOnRestore() throws Exception { final String snapshot = "snapshot-" + index; if (isRunningAgainstOldCluster()) { diff --git a/rest-api-spec/build.gradle b/rest-api-spec/build.gradle index a742e83255bbb..7525ff2dc12d2 100644 --- a/rest-api-spec/build.gradle +++ b/rest-api-spec/build.gradle @@ -54,7 +54,9 @@ tasks.named("precommit").configure { dependsOn 'enforceYamlTestConvention' } -tasks.named("yamlRestCompatTestTransform").configure({task -> - task.skipTest("indices.sort/10_basic/Index Sort", "warning does not exist for compatibility") - task.skipTest("search/330_fetch_fields/Test search rewrite", "warning does not exist for compatibility") +tasks.named("yamlRestCompatTestTransform").configure ({ task -> + task.replaceValueInMatch("profile.shards.0.dfs.knn.0.query.0.description", "DocAndScoreQuery[0,...][0.009673266,...],0.009673266", "dfs knn vector profiling") + task.replaceValueInMatch("profile.shards.0.dfs.knn.0.query.0.description", "DocAndScoreQuery[0,...][0.009673266,...],0.009673266", "dfs knn vector profiling with vector_operations_count") + task.skipTest("indices.sort/10_basic/Index Sort", "warning does not exist for compatibility") + task.skipTest("search/330_fetch_fields/Test search rewrite", "warning does not exist for compatibility") }) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml index dc79961ae78cd..81ca84a06f815 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml @@ -212,7 +212,6 @@ dfs knn vector profiling: - match: { hits.total.value: 1 } - match: { profile.shards.0.dfs.knn.0.query.0.type: "DocAndScoreQuery" } - - match: { profile.shards.0.dfs.knn.0.query.0.description: "DocAndScore[100]" } - gt: { profile.shards.0.dfs.knn.0.query.0.time_in_nanos: 0 } - match: { profile.shards.0.dfs.knn.0.query.0.breakdown.set_min_competitive_score_count: 0 } - match: { profile.shards.0.dfs.knn.0.query.0.breakdown.set_min_competitive_score: 0 } @@ -235,6 +234,47 @@ dfs knn vector profiling: - match: { profile.shards.0.dfs.knn.0.collector.0.reason: "search_top_hits" } - gt: { profile.shards.0.dfs.knn.0.collector.0.time_in_nanos: 0 } +--- +dfs knn vector profiling description: + - requires: + cluster_features: ["lucene_10_upgrade"] + reason: "the profile description changed with Lucene 10" + - do: + indices.create: + index: images + body: + settings: + index.number_of_shards: 1 + mappings: + properties: + image: + type: "dense_vector" + dims: 3 + index: true + similarity: "l2_norm" + + - do: + index: + index: images + id: "1" + refresh: true + body: + image: [1, 5, -20] + + - do: + search: + index: images + body: + profile: true + knn: + field: "image" + query_vector: [-5, 9, -12] + k: 1 + num_candidates: 100 + + - match: { hits.total.value: 1 } + - match: { profile.shards.0.dfs.knn.0.query.0.description: "DocAndScoreQuery[0,...][0.009673266,...],0.009673266" } + --- dfs knn vector profiling with vector_operations_count: - requires: @@ -276,7 +316,6 @@ dfs knn vector profiling with vector_operations_count: - match: { hits.total.value: 1 } - match: { profile.shards.0.dfs.knn.0.query.0.type: "DocAndScoreQuery" } - - match: { profile.shards.0.dfs.knn.0.query.0.description: "DocAndScore[100]" } - match: { profile.shards.0.dfs.knn.0.vector_operations_count: 1 } - gt: { profile.shards.0.dfs.knn.0.query.0.time_in_nanos: 0 } - match: { profile.shards.0.dfs.knn.0.query.0.breakdown.set_min_competitive_score_count: 0 } @@ -300,7 +339,6 @@ dfs knn vector profiling with vector_operations_count: - match: { profile.shards.0.dfs.knn.0.collector.0.reason: "search_top_hits" } - gt: { profile.shards.0.dfs.knn.0.collector.0.time_in_nanos: 0 } - --- dfs profile for search with dfs_query_then_fetch: - requires: diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/IndicesRequestIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/IndicesRequestIT.java index c56bc201e7f86..8bedf436e3698 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/IndicesRequestIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/IndicesRequestIT.java @@ -571,7 +571,7 @@ public void testSearchQueryThenFetch() throws Exception { SearchRequest searchRequest = new SearchRequest(randomIndicesOrAliases).searchType(SearchType.QUERY_THEN_FETCH); assertNoFailuresAndResponse( internalCluster().coordOnlyNodeClient().search(searchRequest), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)) ); clearInterceptedActions(); @@ -601,7 +601,7 @@ public void testSearchDfsQueryThenFetch() throws Exception { SearchRequest searchRequest = new SearchRequest(randomIndicesOrAliases).searchType(SearchType.DFS_QUERY_THEN_FETCH); assertNoFailuresAndResponse( internalCluster().coordOnlyNodeClient().search(searchRequest), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)) ); clearInterceptedActions(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java index cc6329a973b37..e8160a311bedb 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java @@ -306,8 +306,8 @@ public void onFailure(Exception e) { prepareSearch("test").setIndicesOptions(IndicesOptions.lenientExpandOpen()) .setQuery(new RangeQueryBuilder("index_version").from(indexVersion.get(), true)), expected -> assertNoFailuresAndResponse(prepareSearch("test").setIndicesOptions(IndicesOptions.lenientExpandOpen()), all -> { - assertEquals(expected + " vs. " + all, expected.getHits().getTotalHits().value, all.getHits().getTotalHits().value); - logger.info("total: {}", expected.getHits().getTotalHits().value); + assertEquals(expected + " vs. " + all, expected.getHits().getTotalHits().value(), all.getHits().getTotalHits().value()); + logger.info("total: {}", expected.getHits().getTotalHits().value()); }) ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/SplitIndexIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/SplitIndexIT.java index e1bf5bce6f3ae..8391ab270b1d1 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/SplitIndexIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/SplitIndexIT.java @@ -253,7 +253,7 @@ public void assertNested(String index, int numDocs) { // now, do a nested query assertNoFailuresAndResponse( prepareSearch(index).setQuery(nestedQuery("nested1", termQuery("nested1.n_field1", "n_value1_1"), ScoreMode.Avg)), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo((long) numDocs)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo((long) numDocs)) ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkProcessor2RetryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkProcessor2RetryIT.java index 8b8b62da98f97..2fd6ee9a16808 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkProcessor2RetryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkProcessor2RetryIT.java @@ -141,11 +141,11 @@ public void afterBulk(long executionId, BulkRequest request, Exception failure) assertResponse(prepareSearch(INDEX_NAME).setQuery(QueryBuilders.matchAllQuery()).setSize(0), results -> { assertThat(bulkProcessor.getTotalBytesInFlight(), equalTo(0L)); if (rejectedExecutionExpected) { - assertThat((int) results.getHits().getTotalHits().value, lessThanOrEqualTo(numberOfAsyncOps)); + assertThat((int) results.getHits().getTotalHits().value(), lessThanOrEqualTo(numberOfAsyncOps)); } else if (finalRejectedAfterAllRetries) { - assertThat((int) results.getHits().getTotalHits().value, lessThan(numberOfAsyncOps)); + assertThat((int) results.getHits().getTotalHits().value(), lessThan(numberOfAsyncOps)); } else { - assertThat((int) results.getHits().getTotalHits().value, equalTo(numberOfAsyncOps)); + assertThat((int) results.getHits().getTotalHits().value(), equalTo(numberOfAsyncOps)); } }); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkProcessorRetryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkProcessorRetryIT.java index 37904e9f639ac..4ed19065f32f2 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkProcessorRetryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkProcessorRetryIT.java @@ -136,11 +136,11 @@ public void afterBulk(long executionId, BulkRequest request, Throwable failure) final boolean finalRejectedAfterAllRetries = rejectedAfterAllRetries; assertResponse(prepareSearch(INDEX_NAME).setQuery(QueryBuilders.matchAllQuery()).setSize(0), results -> { if (rejectedExecutionExpected) { - assertThat((int) results.getHits().getTotalHits().value, lessThanOrEqualTo(numberOfAsyncOps)); + assertThat((int) results.getHits().getTotalHits().value(), lessThanOrEqualTo(numberOfAsyncOps)); } else if (finalRejectedAfterAllRetries) { - assertThat((int) results.getHits().getTotalHits().value, lessThan(numberOfAsyncOps)); + assertThat((int) results.getHits().getTotalHits().value(), lessThan(numberOfAsyncOps)); } else { - assertThat((int) results.getHits().getTotalHits().value, equalTo(numberOfAsyncOps)); + assertThat((int) results.getHits().getTotalHits().value(), equalTo(numberOfAsyncOps)); } }); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/IncrementalBulkIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/IncrementalBulkIT.java index cde8d41b292b7..4977d87d5a348 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/IncrementalBulkIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/IncrementalBulkIT.java @@ -90,7 +90,7 @@ public void testSingleBulkRequest() { assertResponse(prepareSearch(index).setQuery(QueryBuilders.matchAllQuery()), searchResponse -> { assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo((long) 1)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo((long) 1)); }); assertFalse(refCounted.hasReferences()); @@ -268,7 +268,7 @@ public void testMultipleBulkPartsWithBackoff() { assertResponse(prepareSearch(index).setQuery(QueryBuilders.matchAllQuery()), searchResponse -> { assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(docs)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(docs)); }); } } @@ -358,7 +358,7 @@ public void testBulkLevelBulkFailureAfterFirstIncrementalRequest() throws Except assertResponse(prepareSearch(index).setQuery(QueryBuilders.matchAllQuery()), searchResponse -> { assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(hits.get())); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(hits.get())); }); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionIT.java index af99a0344e030..cd17c5b345c59 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionIT.java @@ -81,7 +81,7 @@ public void testMappingValidationIndexExists() { ); indicesAdmin().refresh(new RefreshRequest(indexName)).actionGet(); SearchResponse searchResponse = client().search(new SearchRequest(indexName)).actionGet(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(0L)); searchResponse.decRef(); ClusterStateResponse clusterStateResponse = admin().cluster().state(new ClusterStateRequest(TEST_REQUEST_TIMEOUT)).actionGet(); Map indexMapping = clusterStateResponse.getState().metadata().index(indexName).mapping().sourceAsMap(); @@ -138,7 +138,7 @@ public void testMappingValidationIndexExistsTemplateSubstitutions() throws IOExc // Now make sure nothing was actually changed: indicesAdmin().refresh(new RefreshRequest(indexName)).actionGet(); SearchResponse searchResponse = client().search(new SearchRequest(indexName)).actionGet(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(0L)); searchResponse.decRef(); ClusterStateResponse clusterStateResponse = admin().cluster().state(new ClusterStateRequest(TEST_REQUEST_TIMEOUT)).actionGet(); Map indexMapping = clusterStateResponse.getState().metadata().index(indexName).mapping().sourceAsMap(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/WriteAckDelayIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/WriteAckDelayIT.java index 274cf90ec9529..f17196c3d97f1 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/WriteAckDelayIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/WriteAckDelayIT.java @@ -45,9 +45,9 @@ public void testIndexWithWriteDelayEnabled() throws Exception { try { logger.debug("running search"); assertResponse(prepareSearch("test"), response -> { - if (response.getHits().getTotalHits().value != numOfDocs) { + if (response.getHits().getTotalHits().value() != numOfDocs) { final String message = "Count is " - + response.getHits().getTotalHits().value + + response.getHits().getTotalHits().value() + " but " + numOfDocs + " was expected. " diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/search/PointInTimeIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/search/PointInTimeIT.java index 66323e687eefb..e47925cef913b 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/search/PointInTimeIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/search/PointInTimeIT.java @@ -612,7 +612,7 @@ public void testMissingShardsWithPointInTime() throws Exception { assertThat(resp.getSuccessfulShards(), equalTo(numShards - shardsRemoved)); assertThat(resp.getFailedShards(), equalTo(shardsRemoved)); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, lessThan((long) numDocs)); + assertThat(resp.getHits().getTotalHits().value(), lessThan((long) numDocs)); }); // create a PIT when some shards are missing @@ -637,7 +637,7 @@ public void testMissingShardsWithPointInTime() throws Exception { assertThat(resp.getFailedShards(), equalTo(shardsRemoved)); assertThat(resp.pointInTimeId(), equalTo(pointInTimeResponseOneNodeDown.getPointInTimeId())); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, lessThan((long) numDocs)); + assertThat(resp.getHits().getTotalHits().value(), lessThan((long) numDocs)); } ); @@ -661,7 +661,7 @@ public void testMissingShardsWithPointInTime() throws Exception { assertThat(resp.getSuccessfulShards(), equalTo(numShards)); assertThat(resp.getFailedShards(), equalTo(0)); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, greaterThan((long) numDocs)); + assertThat(resp.getHits().getTotalHits().value(), greaterThan((long) numDocs)); }); // ensure that when using the previously created PIT, we'd see the same number of documents as before regardless of the @@ -681,7 +681,7 @@ public void testMissingShardsWithPointInTime() throws Exception { } assertNotNull(resp.getHits().getTotalHits()); // we expect less documents as the newly indexed ones should not be part of the PIT - assertThat(resp.getHits().getTotalHits().value, lessThan((long) numDocs)); + assertThat(resp.getHits().getTotalHits().value(), lessThan((long) numDocs)); } ); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java index d1a68c68e7de5..a1395f81eb091 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java @@ -143,7 +143,7 @@ public void testLocalClusterAlias() throws ExecutionException, InterruptedExcept randomBoolean() ); assertResponse(client().search(searchRequest), searchResponse -> { - assertEquals(1, searchResponse.getHits().getTotalHits().value); + assertEquals(1, searchResponse.getHits().getTotalHits().value()); SearchHit[] hits = searchResponse.getHits().getHits(); assertEquals(1, hits.length); SearchHit hit = hits[0]; @@ -162,7 +162,7 @@ public void testLocalClusterAlias() throws ExecutionException, InterruptedExcept randomBoolean() ); assertResponse(client().search(searchRequest), searchResponse -> { - assertEquals(1, searchResponse.getHits().getTotalHits().value); + assertEquals(1, searchResponse.getHits().getTotalHits().value()); SearchHit[] hits = searchResponse.getHits().getHits(); assertEquals(1, hits.length); SearchHit hit = hits[0]; @@ -221,7 +221,7 @@ public void testAbsoluteStartMillis() throws ExecutionException, InterruptedExce ); searchRequest.indices(""); assertResponse(client().search(searchRequest), searchResponse -> { - assertEquals(1, searchResponse.getHits().getTotalHits().value); + assertEquals(1, searchResponse.getHits().getTotalHits().value()); assertEquals("test-1970.01.01", searchResponse.getHits().getHits()[0].getIndex()); }); } @@ -241,7 +241,7 @@ public void testAbsoluteStartMillis() throws ExecutionException, InterruptedExce sourceBuilder.query(rangeQuery); searchRequest.source(sourceBuilder); assertResponse(client().search(searchRequest), searchResponse -> { - assertEquals(1, searchResponse.getHits().getTotalHits().value); + assertEquals(1, searchResponse.getHits().getTotalHits().value()); assertEquals("test-1970.01.01", searchResponse.getHits().getHits()[0].getIndex()); }); } @@ -280,7 +280,7 @@ public void testFinalReduce() throws ExecutionException, InterruptedException { ? originalRequest : SearchRequest.subSearchRequest(taskId, originalRequest, Strings.EMPTY_ARRAY, "remote", nowInMillis, true); assertResponse(client().search(searchRequest), searchResponse -> { - assertEquals(2, searchResponse.getHits().getTotalHits().value); + assertEquals(2, searchResponse.getHits().getTotalHits().value()); InternalAggregations aggregations = searchResponse.getAggregations(); LongTerms longTerms = aggregations.get("terms"); assertEquals(1, longTerms.getBuckets().size()); @@ -296,7 +296,7 @@ public void testFinalReduce() throws ExecutionException, InterruptedException { false ); assertResponse(client().search(searchRequest), searchResponse -> { - assertEquals(2, searchResponse.getHits().getTotalHits().value); + assertEquals(2, searchResponse.getHits().getTotalHits().value()); InternalAggregations aggregations = searchResponse.getAggregations(); LongTerms longTerms = aggregations.get("terms"); assertEquals(2, longTerms.getBuckets().size()); @@ -432,7 +432,7 @@ public void testSearchIdle() throws Exception { () -> assertResponse( prepareSearch("test").setQuery(new RangeQueryBuilder("created_date").gte("2020-01-02").lte("2020-01-03")) .setPreFilterShardSize(randomIntBetween(1, 3)), - resp -> assertThat(resp.getHits().getTotalHits().value, equalTo(2L)) + resp -> assertThat(resp.getHits().getTotalHits().value(), equalTo(2L)) ) ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/aliases/IndexAliasesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/aliases/IndexAliasesIT.java index 848c5cacda1b9..b70da34c8fe3f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/aliases/IndexAliasesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/aliases/IndexAliasesIT.java @@ -396,7 +396,7 @@ public void testSearchingFilteringAliasesTwoIndices() throws Exception { ); assertResponse( prepareSearch("foos").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(2L)) ); logger.info("--> checking filtering alias for one index"); @@ -406,7 +406,7 @@ public void testSearchingFilteringAliasesTwoIndices() throws Exception { ); assertResponse( prepareSearch("bars").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(1L)) ); logger.info("--> checking filtering alias for two indices and one complete index"); @@ -416,7 +416,7 @@ public void testSearchingFilteringAliasesTwoIndices() throws Exception { ); assertResponse( prepareSearch("foos", "test1").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(5L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(5L)) ); logger.info("--> checking filtering alias for two indices and non-filtering alias for one index"); @@ -426,17 +426,17 @@ public void testSearchingFilteringAliasesTwoIndices() throws Exception { ); assertResponse( prepareSearch("foos", "aliasToTest1").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(5L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(5L)) ); logger.info("--> checking filtering alias for two indices and non-filtering alias for both indices"); assertResponse( prepareSearch("foos", "aliasToTests").setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(8L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(8L)) ); assertResponse( prepareSearch("foos", "aliasToTests").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(8L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(8L)) ); logger.info("--> checking filtering alias for two indices and non-filtering alias for both indices"); @@ -446,7 +446,7 @@ public void testSearchingFilteringAliasesTwoIndices() throws Exception { ); assertResponse( prepareSearch("foos", "aliasToTests").setSize(0).setQuery(QueryBuilders.termQuery("name", "something")), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(2L)) ); } @@ -508,7 +508,7 @@ public void testSearchingFilteringAliasesMultipleIndices() throws Exception { ); assertResponse( prepareSearch("filter23", "filter13").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(4L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(4L)) ); assertResponse( @@ -517,7 +517,7 @@ public void testSearchingFilteringAliasesMultipleIndices() throws Exception { ); assertResponse( prepareSearch("filter23", "filter1").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(5L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(5L)) ); assertResponse( @@ -526,7 +526,7 @@ public void testSearchingFilteringAliasesMultipleIndices() throws Exception { ); assertResponse( prepareSearch("filter13", "filter1").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(4L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(4L)) ); assertResponse( @@ -535,7 +535,7 @@ public void testSearchingFilteringAliasesMultipleIndices() throws Exception { ); assertResponse( prepareSearch("filter13", "filter1", "filter23").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(6L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(6L)) ); assertResponse( @@ -544,7 +544,7 @@ public void testSearchingFilteringAliasesMultipleIndices() throws Exception { ); assertResponse( prepareSearch("filter23", "filter13", "test2").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(6L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(6L)) ); assertResponse( @@ -553,7 +553,7 @@ public void testSearchingFilteringAliasesMultipleIndices() throws Exception { ); assertResponse( prepareSearch("filter23", "filter13", "test1", "test2").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(8L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(8L)) ); } @@ -608,7 +608,7 @@ public void testDeletingByQueryFilteringAliases() throws Exception { logger.info("--> checking counts before delete"); assertResponse( prepareSearch("bars").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(1L)) ); } @@ -1399,7 +1399,7 @@ private void checkAliases() { } private void assertHits(SearchHits hits, String... ids) { - assertThat(hits.getTotalHits().value, equalTo((long) ids.length)); + assertThat(hits.getTotalHits().value(), equalTo((long) ids.length)); Set hitIds = new HashSet<>(); for (SearchHit hit : hits.getHits()) { hitIds.add(hit.getId()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/broadcast/BroadcastActionsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/broadcast/BroadcastActionsIT.java index 4e7c22f0d8847..f7dae8a92c2d6 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/broadcast/BroadcastActionsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/broadcast/BroadcastActionsIT.java @@ -44,7 +44,7 @@ public void testBroadcastOperations() throws IOException { for (int i = 0; i < 5; i++) { // test successful assertResponse(prepareSearch("test").setSize(0).setQuery(matchAllQuery()), countResponse -> { - assertThat(countResponse.getHits().getTotalHits().value, equalTo(2L)); + assertThat(countResponse.getHits().getTotalHits().value(), equalTo(2L)); assertThat(countResponse.getTotalShards(), equalTo(numShards.numPrimaries)); assertThat(countResponse.getSuccessfulShards(), equalTo(numShards.numPrimaries)); assertThat(countResponse.getFailedShards(), equalTo(0)); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/document/DocumentActionsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/document/DocumentActionsIT.java index eb10877f5892d..97994a38c277c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/document/DocumentActionsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/document/DocumentActionsIT.java @@ -152,7 +152,7 @@ public void testIndexActions() throws Exception { for (int i = 0; i < 5; i++) { // test successful assertNoFailuresAndResponse(prepareSearch("test").setSize(0).setQuery(matchAllQuery()), countResponse -> { - assertThat(countResponse.getHits().getTotalHits().value, equalTo(2L)); + assertThat(countResponse.getHits().getTotalHits().value(), equalTo(2L)); assertThat(countResponse.getSuccessfulShards(), equalTo(numShards.numPrimaries)); assertThat(countResponse.getFailedShards(), equalTo(0)); }); @@ -164,7 +164,7 @@ public void testIndexActions() throws Exception { countResponse.getShardFailures() == null ? 0 : countResponse.getShardFailures().length, equalTo(0) ); - assertThat(countResponse.getHits().getTotalHits().value, equalTo(2L)); + assertThat(countResponse.getHits().getTotalHits().value(), equalTo(2L)); assertThat(countResponse.getSuccessfulShards(), equalTo(numShards.numPrimaries)); assertThat(countResponse.getFailedShards(), equalTo(0)); }); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/FinalPipelineIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/FinalPipelineIT.java index 5da9788e3079f..4d1ed9bce6440 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/FinalPipelineIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/FinalPipelineIT.java @@ -115,7 +115,7 @@ public void testFinalPipelineOfOldDestinationIsNotInvoked() { .get(); assertEquals(RestStatus.CREATED, indexResponse.status()); assertResponse(prepareSearch("target"), response -> { - assertEquals(1, response.getHits().getTotalHits().value); + assertEquals(1, response.getHits().getTotalHits().value()); assertFalse(response.getHits().getAt(0).getSourceAsMap().containsKey("final")); }); } @@ -139,7 +139,7 @@ public void testFinalPipelineOfNewDestinationIsInvoked() { .get(); assertEquals(RestStatus.CREATED, indexResponse.status()); assertResponse(prepareSearch("target"), response -> { - assertEquals(1, response.getHits().getTotalHits().value); + assertEquals(1, response.getHits().getTotalHits().value()); assertEquals(true, response.getHits().getAt(0).getSourceAsMap().get("final")); }); } @@ -163,7 +163,7 @@ public void testDefaultPipelineOfNewDestinationIsNotInvoked() { .get(); assertEquals(RestStatus.CREATED, indexResponse.status()); assertResponse(prepareSearch("target"), response -> { - assertEquals(1, response.getHits().getTotalHits().value); + assertEquals(1, response.getHits().getTotalHits().value()); assertFalse(response.getHits().getAt(0).getSourceAsMap().containsKey("final")); }); } @@ -187,7 +187,7 @@ public void testDefaultPipelineOfRerouteDestinationIsInvoked() { .get(); assertEquals(RestStatus.CREATED, indexResponse.status()); assertResponse(prepareSearch("target"), response -> { - assertEquals(1, response.getHits().getTotalHits().value); + assertEquals(1, response.getHits().getTotalHits().value()); assertTrue(response.getHits().getAt(0).getSourceAsMap().containsKey("final")); }); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/engine/MaxDocsLimitIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/engine/MaxDocsLimitIT.java index 7b7433e3aa4c3..cb280d5577fae 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/engine/MaxDocsLimitIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/engine/MaxDocsLimitIT.java @@ -107,7 +107,7 @@ public void testMaxDocsLimit() throws Exception { indicesAdmin().prepareRefresh("test").get(); assertNoFailuresAndResponse( prepareSearch("test").setQuery(new MatchAllQueryBuilder()).setTrackTotalHitsUpTo(Integer.MAX_VALUE).setSize(0), - response -> assertThat(response.getHits().getTotalHits().value, equalTo((long) maxDocs.get())) + response -> assertThat(response.getHits().getTotalHits().value(), equalTo((long) maxDocs.get())) ); if (randomBoolean()) { indicesAdmin().prepareFlush("test").get(); @@ -117,7 +117,7 @@ public void testMaxDocsLimit() throws Exception { ensureGreen("test"); assertNoFailuresAndResponse( prepareSearch("test").setQuery(new MatchAllQueryBuilder()).setTrackTotalHitsUpTo(Integer.MAX_VALUE).setSize(0), - response -> assertThat(response.getHits().getTotalHits().value, equalTo((long) maxDocs.get())) + response -> assertThat(response.getHits().getTotalHits().value(), equalTo((long) maxDocs.get())) ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/mapper/CopyToMapperIntegrationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/mapper/CopyToMapperIntegrationIT.java index 81a0e0ede7cd3..1194218c68ff1 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/mapper/CopyToMapperIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/mapper/CopyToMapperIntegrationIT.java @@ -46,7 +46,7 @@ public void testDynamicTemplateCopyTo() throws Exception { AggregationBuilders.terms("test_raw").field("test_field_raw").size(recordCount * 2).collectMode(aggCollectionMode) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo((long) recordCount)); + assertThat(response.getHits().getTotalHits().value(), equalTo((long) recordCount)); assertThat(((Terms) response.getAggregations().get("test")).getBuckets().size(), equalTo(recordCount + 1)); assertThat(((Terms) response.getAggregations().get("test_raw")).getBuckets().size(), equalTo(recordCount)); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/store/ExceptionRetryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/store/ExceptionRetryIT.java index 03afabaae1d0d..902dd911ddcd3 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/store/ExceptionRetryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/store/ExceptionRetryIT.java @@ -115,7 +115,7 @@ public void testRetryDueToExceptionOnNetworkLayer() throws ExecutionException, I assertResponse( prepareSearch("index").setQuery(termQuery("_id", response.getHits().getHits()[i].getId())).setExplain(true), dupIdResponse -> { - assertThat(dupIdResponse.getHits().getTotalHits().value, greaterThan(1L)); + assertThat(dupIdResponse.getHits().getTotalHits().value(), greaterThan(1L)); logger.info("found a duplicate id:"); for (SearchHit hit : dupIdResponse.getHits()) { logger.info("Doc {} was found on shard {}", hit.getId(), hit.getShard().getShardId()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indexing/IndexActionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indexing/IndexActionIT.java index 62c5f934ec8b6..37fbc95d56506 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indexing/IndexActionIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indexing/IndexActionIT.java @@ -57,9 +57,9 @@ public void testAutoGenerateIdNoDuplicates() throws Exception { try { logger.debug("running search with all types"); assertResponse(prepareSearch("test"), response -> { - if (response.getHits().getTotalHits().value != numOfDocs) { + if (response.getHits().getTotalHits().value() != numOfDocs) { final String message = "Count is " - + response.getHits().getTotalHits().value + + response.getHits().getTotalHits().value() + " but " + numOfDocs + " was expected. " @@ -77,9 +77,9 @@ public void testAutoGenerateIdNoDuplicates() throws Exception { try { logger.debug("running search with a specific type"); assertResponse(prepareSearch("test"), response -> { - if (response.getHits().getTotalHits().value != numOfDocs) { + if (response.getHits().getTotalHits().value() != numOfDocs) { final String message = "Count is " - + response.getHits().getTotalHits().value + + response.getHits().getTotalHits().value() + " but " + numOfDocs + " was expected. " diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/IndicesRequestCacheIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/IndicesRequestCacheIT.java index 7db810fc70ac1..52492ba7ce657 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/IndicesRequestCacheIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/IndicesRequestCacheIT.java @@ -149,7 +149,7 @@ public void testQueryRewrite() throws Exception { .addAggregation(new GlobalAggregationBuilder("global")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(7L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(7L)); } ); assertCacheState(client, "index", 0, 5); @@ -161,7 +161,7 @@ public void testQueryRewrite() throws Exception { .addAggregation(new GlobalAggregationBuilder("global")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(7L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(7L)); } ); @@ -174,7 +174,7 @@ public void testQueryRewrite() throws Exception { .addAggregation(new GlobalAggregationBuilder("global")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(7L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(7L)); } ); assertCacheState(client, "index", 6, 9); @@ -217,7 +217,7 @@ public void testQueryRewriteMissingValues() throws Exception { .setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-19").lte("2016-03-28")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(8L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(8L)); } ); assertCacheState(client, "index", 0, 1); @@ -229,7 +229,7 @@ public void testQueryRewriteMissingValues() throws Exception { .setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-19").lte("2016-03-28")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(8L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(8L)); } ); assertCacheState(client, "index", 1, 1); @@ -241,7 +241,7 @@ public void testQueryRewriteMissingValues() throws Exception { .setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-19").lte("2016-03-28")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(8L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(8L)); } ); assertCacheState(client, "index", 2, 1); @@ -286,7 +286,7 @@ public void testQueryRewriteDates() throws Exception { .addAggregation(new GlobalAggregationBuilder("global")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(9L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(9L)); } ); assertCacheState(client, "index", 0, 1); @@ -299,7 +299,7 @@ public void testQueryRewriteDates() throws Exception { .addAggregation(new GlobalAggregationBuilder("global")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(9L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(9L)); } ); assertCacheState(client, "index", 1, 1); @@ -312,7 +312,7 @@ public void testQueryRewriteDates() throws Exception { .addAggregation(new GlobalAggregationBuilder("global")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(9L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(9L)); } ); assertCacheState(client, "index", 2, 1); @@ -364,7 +364,7 @@ public void testQueryRewriteDatesWithNow() throws Exception { .setQuery(QueryBuilders.rangeQuery("d").gte("now-7d/d").lte("now")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(8L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(8L)); } ); assertCacheState(client, "index-1", 0, 1); @@ -381,7 +381,7 @@ public void testQueryRewriteDatesWithNow() throws Exception { .setQuery(QueryBuilders.rangeQuery("d").gte("now-7d/d").lte("now")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(8L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(8L)); } ); assertCacheState(client, "index-1", 1, 1); @@ -395,7 +395,7 @@ public void testQueryRewriteDatesWithNow() throws Exception { .setQuery(QueryBuilders.rangeQuery("d").gte("now-7d/d").lte("now")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(8L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(8L)); } ); assertCacheState(client, "index-1", 2, 1); @@ -440,7 +440,7 @@ public void testCanCache() throws Exception { .setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-19").lte("2016-03-25")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(7L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(7L)); } ); assertCacheState(client, "index", 0, 0); @@ -453,7 +453,7 @@ public void testCanCache() throws Exception { .setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-20").lte("2016-03-26")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(7L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(7L)); } ); assertCacheState(client, "index", 0, 0); @@ -468,7 +468,7 @@ public void testCanCache() throws Exception { .setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-20").lte("2016-03-26")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(7L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(7L)); } ); assertCacheState(client, "index", 0, 0); @@ -483,7 +483,7 @@ public void testCanCache() throws Exception { .addAggregation(dateRange("foo").field("s").addRange("now-10y", "now")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(7L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(7L)); } ); assertCacheState(client, "index", 0, 0); @@ -497,7 +497,7 @@ public void testCanCache() throws Exception { .setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-21").lte("2016-03-27")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(7L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(7L)); } ); assertCacheState(client, "index", 0, 2); @@ -512,7 +512,7 @@ public void testCanCache() throws Exception { .addAggregation(filter("foo", QueryBuilders.rangeQuery("s").from("now-10y").to("now"))), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(7L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(7L)); } ); assertCacheState(client, "index", 0, 4); @@ -543,7 +543,7 @@ public void testCacheWithFilteredAlias() { .setQuery(QueryBuilders.rangeQuery("created_at").gte("now-7d/d")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); } ); assertCacheState(client, "index", 0, 1); @@ -555,20 +555,20 @@ public void testCacheWithFilteredAlias() { .setQuery(QueryBuilders.rangeQuery("created_at").gte("now-7d/d")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); } ); assertCacheState(client, "index", 1, 1); assertResponse(client.prepareSearch("last_week").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); }); assertCacheState(client, "index", 1, 2); assertResponse(client.prepareSearch("last_week").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); }); assertCacheState(client, "index", 2, 2); } @@ -591,7 +591,7 @@ public void testProfileDisableCache() throws Exception { client.prepareSearch("index").setRequestCache(true).setProfile(profile).setQuery(QueryBuilders.termQuery("k", "hello")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); } ); if (profile == false) { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/state/CloseWhileRelocatingShardsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/state/CloseWhileRelocatingShardsIT.java index a6b168af5268d..cbb0a67edcb83 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/state/CloseWhileRelocatingShardsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/state/CloseWhileRelocatingShardsIT.java @@ -229,7 +229,7 @@ public void testCloseWhileRelocatingShards() throws Exception { for (String index : acknowledgedCloses) { assertResponse(prepareSearch(index).setSize(0).setTrackTotalHits(true), response -> { - long docsCount = response.getHits().getTotalHits().value; + long docsCount = response.getHits().getTotalHits().value(); assertEquals( "Expected " + docsPerIndex.get(index) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java b/server/src/internalClusterTest/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java index 942f86017c617..77c4f8a26f478 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java @@ -344,7 +344,7 @@ private void iterateAssertCount(final int numberOfShards, final int iterations, prepareSearch().setSize((int) numberOfDocs).setQuery(matchAllQuery()).setTrackTotalHits(true).addSort("id", SortOrder.ASC), response -> { logSearchResponse(numberOfShards, numberOfDocs, finalI, response); - iterationHitCount[finalI] = response.getHits().getTotalHits().value; + iterationHitCount[finalI] = response.getHits().getTotalHits().value(); if (iterationHitCount[finalI] != numberOfDocs) { error[0] = true; } @@ -391,7 +391,7 @@ private void iterateAssertCount(final int numberOfShards, final int iterations, boolean[] errorOccurred = new boolean[1]; for (int i = 0; i < iterations; i++) { assertResponse(prepareSearch().setTrackTotalHits(true).setSize(0).setQuery(matchAllQuery()), response -> { - if (response.getHits().getTotalHits().value != numberOfDocs) { + if (response.getHits().getTotalHits().value() != numberOfDocs) { errorOccurred[0] = true; } }); @@ -421,7 +421,7 @@ private void logSearchResponse(int numberOfShards, long numberOfDocs, int iterat logger.info( "iteration [{}] - returned documents: {} (expected {})", iteration, - searchResponse.getHits().getTotalHits().value, + searchResponse.getHits().getTotalHits().value(), numberOfDocs ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/recovery/RelocationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/recovery/RelocationIT.java index fb1fabfd198e6..2c56f75b051eb 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/recovery/RelocationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/recovery/RelocationIT.java @@ -240,7 +240,7 @@ public void testRelocationWhileIndexingRandom() throws Exception { prepareSearch("test").setQuery(matchAllQuery()).setSize((int) indexer.totalIndexedDocs()).storedFields(), response -> { var hits = response.getHits(); - if (hits.getTotalHits().value != indexer.totalIndexedDocs()) { + if (hits.getTotalHits().value() != indexer.totalIndexedDocs()) { int[] hitIds = new int[(int) indexer.totalIndexedDocs()]; for (int hit = 0; hit < indexer.totalIndexedDocs(); hit++) { hitIds[hit] = hit + 1; @@ -254,7 +254,7 @@ public void testRelocationWhileIndexingRandom() throws Exception { } set.forEach(value -> logger.error("Missing id [{}]", value)); } - assertThat(hits.getTotalHits().value, equalTo(indexer.totalIndexedDocs())); + assertThat(hits.getTotalHits().value(), equalTo(indexer.totalIndexedDocs())); logger.info("--> DONE search test round {}", idx + 1); } ); @@ -364,9 +364,9 @@ public void indexShardStateChanged( for (Client client : clients()) { assertNoFailuresAndResponse(client.prepareSearch("test").setPreference("_local").setSize(0), response -> { if (expectedCount[0] < 0) { - expectedCount[0] = response.getHits().getTotalHits().value; + expectedCount[0] = response.getHits().getTotalHits().value(); } else { - assertEquals(expectedCount[0], response.getHits().getTotalHits().value); + assertEquals(expectedCount[0], response.getHits().getTotalHits().value()); } }); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/routing/AliasRoutingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/routing/AliasRoutingIT.java index 45dce5789b9bc..199c9a9fb4c8c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/routing/AliasRoutingIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/routing/AliasRoutingIT.java @@ -296,7 +296,7 @@ public void testAliasSearchRoutingWithConcreteAndAliasedIndices_issue3268() thro prepareSearch("index_*").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(1).setQuery(QueryBuilders.matchAllQuery()), response -> { logger.info("--> search all on index_* should find two"); - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); // Let's make sure that, even though 2 docs are available, only one is returned according to the size we set in the request // Therefore the reduce phase has taken place, which proves that the QUERY_AND_FETCH search type wasn't erroneously forced. assertThat(response.getHits().getHits().length, equalTo(1)); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/routing/PartitionedRoutingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/routing/PartitionedRoutingIT.java index 7bccf3db1284e..68bc6656cec7f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/routing/PartitionedRoutingIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/routing/PartitionedRoutingIT.java @@ -160,7 +160,7 @@ private void verifyRoutedSearches(String index, Map> routing + "] shards for routing [" + routing + "] and got hits [" - + response.getHits().getTotalHits().value + + response.getHits().getTotalHits().value() + "]" ); @@ -168,7 +168,7 @@ private void verifyRoutedSearches(String index, Map> routing response.getTotalShards() + " was not in " + expectedShards + " for " + index, expectedShards.contains(response.getTotalShards()) ); - assertEquals(expectedDocuments, response.getHits().getTotalHits().value); + assertEquals(expectedDocuments, response.getHits().getTotalHits().value()); Set found = new HashSet<>(); response.getHits().forEach(h -> found.add(h.getId())); @@ -188,7 +188,7 @@ private void verifyBroadSearches(String index, Map> routingT prepareSearch().setQuery(QueryBuilders.termQuery("_routing", routing)).setIndices(index).setSize(100), response -> { assertEquals(expectedShards, response.getTotalShards()); - assertEquals(expectedDocuments, response.getHits().getTotalHits().value); + assertEquals(expectedDocuments, response.getHits().getTotalHits().value()); Set found = new HashSet<>(); response.getHits().forEach(h -> found.add(h.getId())); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/SearchTimeoutIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/SearchTimeoutIT.java index ee1aac60da9c1..f63f09764621b 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/SearchTimeoutIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/SearchTimeoutIT.java @@ -64,7 +64,7 @@ public void testTopHitsTimeout() { assertEquals(0, searchResponse.getFailedShards()); assertThat(searchResponse.getSuccessfulShards(), greaterThan(0)); assertEquals(searchResponse.getSuccessfulShards(), searchResponse.getTotalShards()); - assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); assertThat(searchResponse.getHits().getHits().length, greaterThan(0)); } @@ -81,7 +81,7 @@ public void testAggsTimeout() { assertEquals(0, searchResponse.getFailedShards()); assertThat(searchResponse.getSuccessfulShards(), greaterThan(0)); assertEquals(searchResponse.getSuccessfulShards(), searchResponse.getTotalShards()); - assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); assertEquals(searchResponse.getHits().getHits().length, 0); StringTerms terms = searchResponse.getAggregations().get("terms"); assertEquals(1, terms.getBuckets().size()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/CombiIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/CombiIT.java index d023c9de87ca5..4a407ae66f7ad 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/CombiIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/CombiIT.java @@ -115,7 +115,7 @@ public void testSubAggregationForTopAggregationOnUnmappedField() throws Exceptio histogram("values").field("value1").interval(1).subAggregation(terms("names").field("name").collectMode(aggCollectionMode)) ), response -> { - assertThat(response.getHits().getTotalHits().value, Matchers.equalTo(0L)); + assertThat(response.getHits().getTotalHits().value(), Matchers.equalTo(0L)); Histogram values = response.getAggregations().get("values"); assertThat(values, notNullValue()); assertThat(values.getBuckets().isEmpty(), is(true)); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/EquivalenceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/EquivalenceIT.java index 5a21b600cacd4..1a6e1519d4402 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/EquivalenceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/EquivalenceIT.java @@ -293,7 +293,7 @@ public void testDuelTerms() throws Exception { ), response -> { assertAllSuccessful(response); - assertEquals(numDocs, response.getHits().getTotalHits().value); + assertEquals(numDocs, response.getHits().getTotalHits().value()); final Terms longTerms = response.getAggregations().get("long"); final Terms doubleTerms = response.getAggregations().get("double"); @@ -413,7 +413,7 @@ public void testLargeNumbersOfPercentileBuckets() throws Exception { ), response -> { assertAllSuccessful(response); - assertEquals(numDocs, response.getHits().getTotalHits().value); + assertEquals(numDocs, response.getHits().getTotalHits().value()); } ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/FiltersAggsRewriteIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/FiltersAggsRewriteIT.java index a820e6e8d1747..2bd19c9d32d44 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/FiltersAggsRewriteIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/FiltersAggsRewriteIT.java @@ -57,7 +57,7 @@ public void testWrapperQueryIsRewritten() throws IOException { metadata.put(randomAlphaOfLengthBetween(1, 20), randomAlphaOfLengthBetween(1, 20)); builder.setMetadata(metadata); assertResponse(client().prepareSearch("test").setSize(0).addAggregation(builder), response -> { - assertEquals(3, response.getHits().getTotalHits().value); + assertEquals(3, response.getHits().getTotalHits().value()); InternalFilters filters = response.getAggregations().get("titles"); assertEquals(1, filters.getBuckets().size()); assertEquals(2, filters.getBuckets().get(0).getDocCount()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java index a8e2ca818d3f4..c4560c1b00079 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java @@ -974,7 +974,7 @@ public void testEmptyAggregation() throws Exception { .subAggregation(dateHistogram("date_histo").field("value").fixedInterval(DateHistogramInterval.HOUR)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, Matchers.notNullValue()); List buckets = histo.getBuckets(); @@ -1011,7 +1011,7 @@ public void testSingleValueWithTimeZone() throws Exception { .format("yyyy-MM-dd:HH-mm-ssZZZZZ") ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(5L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(5L)); Histogram histo = response.getAggregations().get("date_histo"); List buckets = histo.getBuckets(); @@ -1175,7 +1175,7 @@ public void testSingleValueFieldWithExtendedBoundsTimezone() throws Exception { assertThat( "Expected 24 buckets for one day aggregation with hourly interval", - response.getHits().getTotalHits().value, + response.getHits().getTotalHits().value(), equalTo(2L) ); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java index 778be4ee0705f..21b36391781b8 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java @@ -78,7 +78,7 @@ public void testSingleValueWithPositiveOffset() throws Exception { dateHistogram("date_histo").field("date").offset("2h").format(DATE_FORMAT).fixedInterval(DateHistogramInterval.DAY) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(5L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(5L)); Histogram histo = response.getAggregations().get("date_histo"); List buckets = histo.getBuckets(); @@ -99,7 +99,7 @@ public void testSingleValueWithNegativeOffset() throws Exception { dateHistogram("date_histo").field("date").offset("-2h").format(DATE_FORMAT).fixedInterval(DateHistogramInterval.DAY) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(5L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(5L)); Histogram histo = response.getAggregations().get("date_histo"); List buckets = histo.getBuckets(); @@ -128,7 +128,7 @@ public void testSingleValueWithOffsetMinDocCount() throws Exception { .fixedInterval(DateHistogramInterval.DAY) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(24L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(24L)); Histogram histo = response.getAggregations().get("date_histo"); List buckets = histo.getBuckets(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java index afa3ad9d7e737..9ec459ee565e5 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java @@ -578,7 +578,7 @@ public void testEmptyAggregation() throws Exception { .subAggregation(dateRange("date_range").field("value").addRange("0-1", 0, 1)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, Matchers.notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); @@ -722,7 +722,7 @@ public void testRangeWithFormatStringValue() throws Exception { prepareSearch(indexName).setSize(0) .addAggregation(dateRange("date_range").field("date").addRange("00:16:40", "00:50:00").addRange("00:50:00", "01:06:40")), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); List buckets = checkBuckets(response.getAggregations().get("date_range"), "date_range", 2); assertBucket(buckets.get(0), 2L, "00:16:40-00:50:00", 1000000L, 3000000L); assertBucket(buckets.get(1), 1L, "00:50:00-01:06:40", 3000000L, 4000000L); @@ -739,7 +739,7 @@ public void testRangeWithFormatStringValue() throws Exception { .format("HH.mm.ss") ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); List buckets = checkBuckets(response.getAggregations().get("date_range"), "date_range", 2); assertBucket(buckets.get(0), 2L, "00.16.40-00.50.00", 1000000L, 3000000L); assertBucket(buckets.get(1), 1L, "00.50.00-01.06.40", 3000000L, 4000000L); @@ -753,7 +753,7 @@ public void testRangeWithFormatStringValue() throws Exception { dateRange("date_range").field("date").addRange(1000000, 3000000).addRange(3000000, 4000000).format("epoch_millis") ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); List buckets = checkBuckets(response.getAggregations().get("date_range"), "date_range", 2); assertBucket(buckets.get(0), 2L, "1000000-3000000", 1000000L, 3000000L); assertBucket(buckets.get(1), 1L, "3000000-4000000", 3000000L, 4000000L); @@ -788,7 +788,7 @@ public void testRangeWithFormatNumericValue() throws Exception { prepareSearch(indexName).setSize(0) .addAggregation(dateRange("date_range").field("date").addRange(1000, 3000).addRange(3000, 4000)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); List buckets = checkBuckets(response.getAggregations().get("date_range"), "date_range", 2); assertBucket(buckets.get(0), 2L, "1000-3000", 1000000L, 3000000L); assertBucket(buckets.get(1), 1L, "3000-4000", 3000000L, 4000000L); @@ -799,7 +799,7 @@ public void testRangeWithFormatNumericValue() throws Exception { prepareSearch(indexName).setSize(0) .addAggregation(dateRange("date_range").field("date").addRange("1000", "3000").addRange("3000", "4000")), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); List buckets = checkBuckets(response.getAggregations().get("date_range"), "date_range", 2); assertBucket(buckets.get(0), 2L, "1000-3000", 1000000L, 3000000L); assertBucket(buckets.get(1), 1L, "3000-4000", 3000000L, 4000000L); @@ -810,7 +810,7 @@ public void testRangeWithFormatNumericValue() throws Exception { prepareSearch(indexName).setSize(0) .addAggregation(dateRange("date_range").field("date").addRange(1.0e3, 3000.8123).addRange(3000.8123, 4.0e3)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); List buckets = checkBuckets(response.getAggregations().get("date_range"), "date_range", 2); assertBucket(buckets.get(0), 2L, "1000-3000", 1000000L, 3000000L); assertBucket(buckets.get(1), 1L, "3000-4000", 3000000L, 4000000L); @@ -827,7 +827,7 @@ public void testRangeWithFormatNumericValue() throws Exception { .format("HH.mm.ss") ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); List buckets = checkBuckets(response.getAggregations().get("date_range"), "date_range", 2); assertBucket(buckets.get(0), 2L, "00.16.40-00.50.00", 1000000L, 3000000L); assertBucket(buckets.get(1), 1L, "00.50.00-01.06.40", 3000000L, 4000000L); @@ -841,7 +841,7 @@ public void testRangeWithFormatNumericValue() throws Exception { dateRange("date_range").field("date").addRange(1000000, 3000000).addRange(3000000, 4000000).format("epoch_millis") ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); List buckets = checkBuckets(response.getAggregations().get("date_range"), "date_range", 2); assertBucket(buckets.get(0), 2L, "1000000-3000000", 1000000L, 3000000L); assertBucket(buckets.get(1), 1L, "3000000-4000000", 3000000L, 4000000L); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/FilterIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/FilterIT.java index 1b70b859426d5..96807ed119866 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/FilterIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/FilterIT.java @@ -159,7 +159,7 @@ public void testEmptyAggregation() throws Exception { histogram("histo").field("value").interval(1L).minDocCount(0).subAggregation(filter("filter", matchAllQuery())) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, Matchers.notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/FiltersIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/FiltersIT.java index b030370215cd3..439583de910c1 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/FiltersIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/FiltersIT.java @@ -247,7 +247,7 @@ public void testEmptyAggregation() throws Exception { .subAggregation(filters("filters", new KeyedFilter("all", matchAllQuery()))) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, Matchers.notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); @@ -455,7 +455,7 @@ public void testEmptyAggregationWithOtherBucket() throws Exception { .subAggregation(filters("filters", new KeyedFilter("foo", matchAllQuery())).otherBucket(true).otherBucketKey("bar")) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, Matchers.notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java index 843e50a5a7e21..907f943e68422 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java @@ -413,7 +413,7 @@ public void testEmptyAggregation() throws Exception { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, Matchers.notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/HistogramIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/HistogramIT.java index 2edd567221bef..ad65e6468b812 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/HistogramIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/HistogramIT.java @@ -915,7 +915,7 @@ public void testEmptyAggregation() throws Exception { .subAggregation(histogram("sub_histo").field(SINGLE_VALUED_FIELD_NAME).interval(1L)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, Matchers.notNullValue()); List buckets = histo.getBuckets(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java index 72f1b0cc56b25..5e7cffcc8ef0d 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java @@ -351,7 +351,7 @@ public void testEmptyAggregation() throws Exception { prepareSearch("empty_bucket_idx").setQuery(matchAllQuery()) .addAggregation(histogram("histo").field("value").interval(1L).minDocCount(0).subAggregation(nested("nested", "nested"))), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, Matchers.notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java index 8b63efd92a648..1cfd6e00af7ab 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java @@ -866,7 +866,7 @@ public void testEmptyAggregation() throws Exception { .subAggregation(range("range").field(SINGLE_VALUED_FIELD_NAME).addRange("0-2", 0.0, 2.0)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, Matchers.notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsIT.java index 5e2a44285e8fa..29bf8a8a0b45a 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsIT.java @@ -97,7 +97,7 @@ public void testEmptyAggregation() throws Exception { histogram("histo").field("value").interval(1L).minDocCount(0).subAggregation(extendedStats("stats").field("value")) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); @@ -130,7 +130,7 @@ public void testUnmapped() throws Exception { assertResponse( prepareSearch("idx_unmapped").setQuery(matchAllQuery()).addAggregation(extendedStats("stats").field("value")), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(0L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(0L)); ExtendedStats stats = response.getAggregations().get("stats"); assertThat(stats, notNullValue()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksIT.java index 762bc5bdfaf39..ff4150556c011 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksIT.java @@ -112,7 +112,7 @@ public void testEmptyAggregation() throws Exception { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); @@ -138,7 +138,7 @@ public void testUnmapped() throws Exception { .field("value") ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(0L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(0L)); PercentileRanks reversePercentiles = response.getAggregations().get("percentile_ranks"); assertThat(reversePercentiles, notNullValue()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesIT.java index 12ed0a5c1a8e0..fe6dc7abf66a8 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesIT.java @@ -116,7 +116,7 @@ public void testEmptyAggregation() throws Exception { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); @@ -143,7 +143,7 @@ public void testUnmapped() throws Exception { .percentiles(0, 10, 15, 100) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(0L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(0L)); Percentiles percentiles = response.getAggregations().get("percentiles"); assertThat(percentiles, notNullValue()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java index 52425ae1d9f17..4c8fed2c16ddc 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java @@ -358,7 +358,7 @@ public void testMap() { prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(scriptedMetric("scripted").mapScript(mapScript).combineScript(combineScript).reduceScript(reduceScript)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); assertThat(aggregation, notNullValue()); @@ -407,7 +407,7 @@ public void testMapWithParams() { .reduceScript(reduceScript) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); assertThat(aggregation, notNullValue()); @@ -467,7 +467,7 @@ public void testInitMutatesParams() { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); assertThat(aggregation, notNullValue()); @@ -522,7 +522,7 @@ public void testMapCombineWithParams() { scriptedMetric("scripted").params(params).mapScript(mapScript).combineScript(combineScript).reduceScript(reduceScript) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); assertThat(aggregation, notNullValue()); @@ -586,7 +586,7 @@ public void testInitMapCombineWithParams() { .reduceScript(reduceScript) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); assertThat(aggregation, notNullValue()); @@ -655,7 +655,7 @@ public void testInitMapCombineReduceWithParams() { .reduceScript(reduceScript) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); assertThat(aggregation, notNullValue()); @@ -714,7 +714,7 @@ public void testInitMapCombineReduceGetProperty() throws Exception { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Global global = response.getAggregations().get("global"); assertThat(global, notNullValue()); @@ -773,7 +773,7 @@ public void testMapCombineReduceWithParams() { scriptedMetric("scripted").params(params).mapScript(mapScript).combineScript(combineScript).reduceScript(reduceScript) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); assertThat(aggregation, notNullValue()); @@ -824,7 +824,7 @@ public void testInitMapReduceWithParams() { .reduceScript(reduceScript) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); assertThat(aggregation, notNullValue()); @@ -869,7 +869,7 @@ public void testMapReduceWithParams() { scriptedMetric("scripted").params(params).mapScript(mapScript).combineScript(combineScript).reduceScript(reduceScript) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); assertThat(aggregation, notNullValue()); @@ -928,7 +928,7 @@ public void testInitMapCombineReduceWithParamsAndReduceParams() { .reduceScript(reduceScript) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); assertThat(aggregation, notNullValue()); @@ -964,7 +964,7 @@ public void testInitMapCombineReduceWithParamsStored() { .reduceScript(new Script(ScriptType.STORED, null, "reduceScript_stored", Collections.emptyMap())) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); assertThat(aggregation, notNullValue()); @@ -1025,7 +1025,7 @@ public void testInitMapCombineReduceWithParamsAsSubAgg() { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("histo"); assertThat(aggregation, notNullValue()); assertThat(aggregation, instanceOf(Histogram.class)); @@ -1099,7 +1099,7 @@ public void testEmptyAggregation() throws Exception { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); Bucket bucket = histo.getBuckets().get(1); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/StatsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/StatsIT.java index fbe70ec2a40d6..1169f8bbdbf18 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/StatsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/StatsIT.java @@ -56,7 +56,7 @@ public void testEmptyAggregation() throws Exception { ), response -> { assertShardExecutionState(response, 0); - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/SumIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/SumIT.java index 2a8be6b4244dd..b3ad5c578e618 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/SumIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/SumIT.java @@ -82,7 +82,7 @@ public void testEmptyAggregation() throws Exception { prepareSearch("empty_bucket_idx").setQuery(matchAllQuery()) .addAggregation(histogram("histo").field("value").interval(1L).minDocCount(0).subAggregation(sum("sum").field("value"))), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksIT.java index 2877f8882d6d6..d6cceb2013701 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksIT.java @@ -105,7 +105,7 @@ public void testEmptyAggregation() throws Exception { .subAggregation(randomCompression(percentileRanks("percentile_ranks", new double[] { 10, 15 }).field("value"))) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); @@ -146,7 +146,7 @@ public void testUnmapped() throws Exception { prepareSearch("idx_unmapped").setQuery(matchAllQuery()) .addAggregation(randomCompression(percentileRanks("percentile_ranks", new double[] { 0, 10, 15, 100 })).field("value")), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(0L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(0L)); PercentileRanks reversePercentiles = response.getAggregations().get("percentile_ranks"); assertThat(reversePercentiles, notNullValue()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesIT.java index bbcf7b191fe1b..b4072bcf226ed 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesIT.java @@ -111,7 +111,7 @@ public void testEmptyAggregation() throws Exception { .subAggregation(randomCompression(percentiles("percentiles").field("value")).percentiles(10, 15)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); @@ -132,7 +132,7 @@ public void testUnmapped() throws Exception { prepareSearch("idx_unmapped").setQuery(matchAllQuery()) .addAggregation(randomCompression(percentiles("percentiles")).field("value").percentiles(0, 10, 15, 100)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(0L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(0L)); Percentiles percentiles = response.getAggregations().get("percentiles"); assertThat(percentiles, notNullValue()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java index 7ac8e3c7a35b4..80c47d6180db0 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java @@ -328,7 +328,7 @@ public void testBasics() throws Exception { assertThat(bucket.getDocCount(), equalTo(10L)); TopHits topHits = bucket.getAggregations().get("hits"); SearchHits hits = topHits.getHits(); - assertThat(hits.getTotalHits().value, equalTo(10L)); + assertThat(hits.getTotalHits().value(), equalTo(10L)); assertThat(hits.getHits().length, equalTo(3)); higestSortValue += 10; assertThat((Long) hits.getAt(0).getSortValues()[0], equalTo(higestSortValue)); @@ -348,7 +348,7 @@ public void testIssue11119() throws Exception { .setQuery(matchQuery("text", "x y z")) .addAggregation(terms("terms").executionHint(randomExecutionHint()).field("group").subAggregation(topHits("hits"))), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(8L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(8L)); assertThat(response.getHits().getHits().length, equalTo(0)); assertThat(response.getHits().getMaxScore(), equalTo(Float.NaN)); Terms terms = response.getAggregations().get("terms"); @@ -381,7 +381,7 @@ public void testIssue11119() throws Exception { .setQuery(matchQuery("text", "x y z")) .addAggregation(terms("terms").executionHint(randomExecutionHint()).field("group")), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(8L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(8L)); assertThat(response.getHits().getHits().length, equalTo(0)); assertThat(response.getHits().getMaxScore(), equalTo(Float.NaN)); Terms terms = response.getAggregations().get("terms"); @@ -413,7 +413,7 @@ public void testBreadthFirstWithScoreNeeded() throws Exception { assertThat(bucket.getDocCount(), equalTo(10L)); TopHits topHits = bucket.getAggregations().get("hits"); SearchHits hits = topHits.getHits(); - assertThat(hits.getTotalHits().value, equalTo(10L)); + assertThat(hits.getTotalHits().value(), equalTo(10L)); assertThat(hits.getHits().length, equalTo(3)); assertThat(hits.getAt(0).getSourceAsMap().size(), equalTo(5)); @@ -444,7 +444,7 @@ public void testBreadthFirstWithAggOrderAndScoreNeeded() throws Exception { assertThat(bucket.getDocCount(), equalTo(10L)); TopHits topHits = bucket.getAggregations().get("hits"); SearchHits hits = topHits.getHits(); - assertThat(hits.getTotalHits().value, equalTo(10L)); + assertThat(hits.getTotalHits().value(), equalTo(10L)); assertThat(hits.getHits().length, equalTo(3)); assertThat(hits.getAt(0).getSourceAsMap().size(), equalTo(5)); @@ -501,7 +501,7 @@ public void testPagination() throws Exception { assertThat(bucket.getDocCount(), equalTo(10L)); TopHits topHits = bucket.getAggregations().get("hits"); SearchHits hits = topHits.getHits(); - assertThat(hits.getTotalHits().value, equalTo(controlHits.getTotalHits().value)); + assertThat(hits.getTotalHits().value(), equalTo(controlHits.getTotalHits().value())); assertThat(hits.getHits().length, equalTo(controlHits.getHits().length)); for (int i = 0; i < hits.getHits().length; i++) { logger.info( @@ -543,7 +543,7 @@ public void testSortByBucket() throws Exception { assertThat(bucket.getDocCount(), equalTo(10L)); TopHits topHits = bucket.getAggregations().get("hits"); SearchHits hits = topHits.getHits(); - assertThat(hits.getTotalHits().value, equalTo(10L)); + assertThat(hits.getTotalHits().value(), equalTo(10L)); assertThat(hits.getHits().length, equalTo(3)); assertThat(hits.getAt(0).getSortValues()[0], equalTo(higestSortValue)); assertThat(hits.getAt(1).getSortValues()[0], equalTo(higestSortValue - 1)); @@ -578,7 +578,7 @@ public void testFieldCollapsing() throws Exception { assertThat(key(bucket), equalTo("b")); TopHits topHits = bucket.getAggregations().get("hits"); SearchHits hits = topHits.getHits(); - assertThat(hits.getTotalHits().value, equalTo(4L)); + assertThat(hits.getTotalHits().value(), equalTo(4L)); assertThat(hits.getHits().length, equalTo(1)); assertThat(hits.getAt(0).getId(), equalTo("6")); @@ -586,7 +586,7 @@ public void testFieldCollapsing() throws Exception { assertThat(key(bucket), equalTo("c")); topHits = bucket.getAggregations().get("hits"); hits = topHits.getHits(); - assertThat(hits.getTotalHits().value, equalTo(3L)); + assertThat(hits.getTotalHits().value(), equalTo(3L)); assertThat(hits.getHits().length, equalTo(1)); assertThat(hits.getAt(0).getId(), equalTo("9")); @@ -594,7 +594,7 @@ public void testFieldCollapsing() throws Exception { assertThat(key(bucket), equalTo("a")); topHits = bucket.getAggregations().get("hits"); hits = topHits.getHits(); - assertThat(hits.getTotalHits().value, equalTo(2L)); + assertThat(hits.getTotalHits().value(), equalTo(2L)); assertThat(hits.getHits().length, equalTo(1)); assertThat(hits.getAt(0).getId(), equalTo("2")); } @@ -630,7 +630,7 @@ public void testFetchFeatures() throws IOException { for (Terms.Bucket bucket : terms.getBuckets()) { TopHits topHits = bucket.getAggregations().get("hits"); SearchHits hits = topHits.getHits(); - assertThat(hits.getTotalHits().value, equalTo(10L)); + assertThat(hits.getTotalHits().value(), equalTo(10L)); assertThat(hits.getHits().length, equalTo(1)); SearchHit hit = hits.getAt(0); @@ -682,7 +682,7 @@ public void testEmptyIndex() throws Exception { TopHits hits = response.getAggregations().get("hits"); assertThat(hits, notNullValue()); assertThat(hits.getName(), equalTo("hits")); - assertThat(hits.getHits().getTotalHits().value, equalTo(0L)); + assertThat(hits.getHits().getTotalHits().value(), equalTo(0L)); }); } @@ -744,7 +744,7 @@ public void testTopHitsInNestedSimple() throws Exception { assertThat(bucket.getDocCount(), equalTo(1L)); TopHits topHits = bucket.getAggregations().get("top-comments"); SearchHits searchHits = topHits.getHits(); - assertThat(searchHits.getTotalHits().value, equalTo(1L)); + assertThat(searchHits.getTotalHits().value(), equalTo(1L)); assertThat(searchHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(searchHits.getAt(0).getNestedIdentity().getOffset(), equalTo(0)); assertThat(extractValue("date", searchHits.getAt(0).getSourceAsMap()), equalTo(1)); @@ -753,7 +753,7 @@ public void testTopHitsInNestedSimple() throws Exception { assertThat(bucket.getDocCount(), equalTo(2L)); topHits = bucket.getAggregations().get("top-comments"); searchHits = topHits.getHits(); - assertThat(searchHits.getTotalHits().value, equalTo(2L)); + assertThat(searchHits.getTotalHits().value(), equalTo(2L)); assertThat(searchHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(searchHits.getAt(0).getNestedIdentity().getOffset(), equalTo(1)); assertThat(extractValue("date", searchHits.getAt(0).getSourceAsMap()), equalTo(2)); @@ -765,7 +765,7 @@ public void testTopHitsInNestedSimple() throws Exception { assertThat(bucket.getDocCount(), equalTo(1L)); topHits = bucket.getAggregations().get("top-comments"); searchHits = topHits.getHits(); - assertThat(searchHits.getTotalHits().value, equalTo(1L)); + assertThat(searchHits.getTotalHits().value(), equalTo(1L)); assertThat(searchHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(searchHits.getAt(0).getNestedIdentity().getOffset(), equalTo(1)); assertThat(extractValue("date", searchHits.getAt(0).getSourceAsMap()), equalTo(4)); @@ -789,7 +789,7 @@ public void testTopHitsInSecondLayerNested() throws Exception { assertThat(toComments.getDocCount(), equalTo(4L)); TopHits topComments = toComments.getAggregations().get("top-comments"); - assertThat(topComments.getHits().getTotalHits().value, equalTo(4L)); + assertThat(topComments.getHits().getTotalHits().value(), equalTo(4L)); assertThat(topComments.getHits().getHits().length, equalTo(4)); assertThat(topComments.getHits().getAt(0).getId(), equalTo("2")); @@ -816,7 +816,7 @@ public void testTopHitsInSecondLayerNested() throws Exception { assertThat(toReviewers.getDocCount(), equalTo(7L)); TopHits topReviewers = toReviewers.getAggregations().get("top-reviewers"); - assertThat(topReviewers.getHits().getTotalHits().value, equalTo(7L)); + assertThat(topReviewers.getHits().getTotalHits().value(), equalTo(7L)); assertThat(topReviewers.getHits().getHits().length, equalTo(7)); assertThat(topReviewers.getHits().getAt(0).getId(), equalTo("1")); @@ -899,7 +899,7 @@ public void testNestedFetchFeatures() { assertThat(nested.getDocCount(), equalTo(4L)); SearchHits hits = ((TopHits) nested.getAggregations().get("top-comments")).getHits(); - assertThat(hits.getTotalHits().value, equalTo(4L)); + assertThat(hits.getTotalHits().value(), equalTo(4L)); SearchHit searchHit = hits.getAt(0); assertThat(searchHit.getId(), equalTo("1")); assertThat(searchHit.getNestedIdentity().getField().string(), equalTo("comments")); @@ -960,7 +960,7 @@ public void testTopHitsInNested() throws Exception { TopHits hits = nested.getAggregations().get("comments"); SearchHits searchHits = hits.getHits(); - assertThat(searchHits.getTotalHits().value, equalTo(numNestedDocs)); + assertThat(searchHits.getTotalHits().value(), equalTo(numNestedDocs)); for (int j = 0; j < 3; j++) { assertThat(searchHits.getAt(j).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(searchHits.getAt(j).getNestedIdentity().getOffset(), equalTo(0)); @@ -1064,7 +1064,7 @@ public void testNoStoredFields() throws Exception { assertThat(bucket.getDocCount(), equalTo(10L)); TopHits topHits = bucket.getAggregations().get("hits"); SearchHits hits = topHits.getHits(); - assertThat(hits.getTotalHits().value, equalTo(10L)); + assertThat(hits.getTotalHits().value(), equalTo(10L)); assertThat(hits.getHits().length, equalTo(3)); for (SearchHit hit : hits) { assertThat(hit.getSourceAsMap(), nullValue()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java index 3dee7a8d6e92f..6e00c1e5a8d90 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java @@ -67,7 +67,7 @@ protected Collection> nodePlugins() { public void testUnmapped() throws Exception { assertResponse(prepareSearch("idx_unmapped").setQuery(matchAllQuery()).addAggregation(count("count").field("value")), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(0L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(0L)); ValueCount valueCount = response.getAggregations().get("count"); assertThat(valueCount, notNullValue()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWhileCreatingIndexIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWhileCreatingIndexIT.java index 3263be081a6f7..2cd22c6a65222 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWhileCreatingIndexIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWhileCreatingIndexIT.java @@ -72,14 +72,14 @@ private void searchWhileCreatingIndex(boolean createIndex, int numberOfReplicas) .setPreference(preference + Integer.toString(counter++)) .setQuery(QueryBuilders.termQuery("field", "test")), searchResponse -> { - if (searchResponse.getHits().getTotalHits().value != 1) { + if (searchResponse.getHits().getTotalHits().value() != 1) { refresh(); assertResponse( client.prepareSearch("test").setPreference(preference).setQuery(QueryBuilders.termQuery("field", "test")), searchResponseAfterRefresh -> { logger.info( "hits count mismatch on any shard search failed, post explicit refresh hits are {}", - searchResponseAfterRefresh.getHits().getTotalHits().value + searchResponseAfterRefresh.getHits().getTotalHits().value() ); ensureGreen(); assertResponse( @@ -88,7 +88,7 @@ private void searchWhileCreatingIndex(boolean createIndex, int numberOfReplicas) .setQuery(QueryBuilders.termQuery("field", "test")), searchResponseAfterGreen -> logger.info( "hits count mismatch on any shard search failed, post explicit wait for green hits are {}", - searchResponseAfterGreen.getHits().getTotalHits().value + searchResponseAfterGreen.getHits().getTotalHits().value() ) ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWhileRelocatingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWhileRelocatingIT.java index cab70ba7d7339..0d06856ca1088 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWhileRelocatingIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWhileRelocatingIT.java @@ -77,7 +77,7 @@ public void run() { try { while (stop.get() == false) { assertResponse(prepareSearch().setSize(numDocs), response -> { - if (response.getHits().getTotalHits().value != numDocs) { + if (response.getHits().getTotalHits().value() != numDocs) { // if we did not search all shards but had no serious failures that is potentially fine // if only the hit-count is wrong. this can happen if the cluster-state is behind when the // request comes in. It's a small window but a known limitation. @@ -86,7 +86,7 @@ public void run() { .allMatch(ssf -> ssf.getCause() instanceof NoShardAvailableActionException)) { nonCriticalExceptions.add( "Count is " - + response.getHits().getTotalHits().value + + response.getHits().getTotalHits().value() + " but " + numDocs + " was expected. " @@ -100,7 +100,7 @@ public void run() { final SearchHits sh = response.getHits(); assertThat( "Expected hits to be the same size the actual hits array", - sh.getTotalHits().value, + sh.getTotalHits().value(), equalTo((long) (sh.getHits().length)) ); }); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java index 1745ad82931ba..4b59d5b9a78d5 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java @@ -126,7 +126,7 @@ public void testDfsQueryThenFetch() throws Exception { .get(); while (true) { assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); SearchHit[] hits = searchResponse.getHits().getHits(); if (hits.length == 0) { break; // finished @@ -169,7 +169,7 @@ public void testDfsQueryThenFetchWithSort() throws Exception { .get(); while (true) { assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); SearchHit[] hits = searchResponse.getHits().getHits(); if (hits.length == 0) { break; // finished @@ -208,7 +208,7 @@ public void testQueryThenFetch() throws Exception { .get(); while (true) { assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); SearchHit[] hits = searchResponse.getHits().getHits(); if (hits.length == 0) { break; // finished @@ -237,7 +237,7 @@ public void testQueryThenFetchWithFrom() throws Exception { assertNoFailuresAndResponse( client().search(new SearchRequest("test").source(source.from(0).size(60)).searchType(QUERY_THEN_FETCH)), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse.getHits().getHits().length, equalTo(60)); for (int i = 0; i < 60; i++) { SearchHit hit = searchResponse.getHits().getHits()[i]; @@ -248,7 +248,7 @@ public void testQueryThenFetchWithFrom() throws Exception { assertNoFailuresAndResponse( client().search(new SearchRequest("test").source(source.from(60).size(60)).searchType(QUERY_THEN_FETCH)), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse.getHits().getHits().length, equalTo(40)); for (int i = 0; i < 40; i++) { SearchHit hit = searchResponse.getHits().getHits()[i]; @@ -271,7 +271,7 @@ public void testQueryThenFetchWithSort() throws Exception { .get(); while (true) { assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); SearchHit[] hits = searchResponse.getHits().getHits(); if (hits.length == 0) { break; // finished @@ -301,7 +301,7 @@ public void testSimpleFacets() throws Exception { .aggregation(AggregationBuilders.filter("test1", termQuery("name", "test1"))); assertNoFailuresAndResponse(client().search(new SearchRequest("test").source(sourceBuilder)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(100L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(100L)); Global global = response.getAggregations().get("global"); Filter all = global.getAggregations().get("all"); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/ccs/CrossClusterSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/ccs/CrossClusterSearchIT.java index 223ee81e84a92..5233a0cd564ef 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/ccs/CrossClusterSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/ccs/CrossClusterSearchIT.java @@ -685,7 +685,7 @@ public void testDateMathIndexes() throws ExecutionException, InterruptedExceptio assertNotNull(localClusterSearchInfo); Cluster remoteClusterSearchInfo = clusters.getCluster(REMOTE_CLUSTER); assertNotNull(remoteClusterSearchInfo); - assertThat(Objects.requireNonNull(response.getHits().getTotalHits()).value, greaterThan(2L)); + assertThat(Objects.requireNonNull(response.getHits().getTotalHits()).value(), greaterThan(2L)); for (var hit : response.getHits()) { assertThat(hit.getIndex(), anyOf(equalTo("datemath-2001-01-01-14"), equalTo("remotemath-2001-01-01-14"))); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java index 2cb2e186b257e..91cc344614c23 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java @@ -139,7 +139,7 @@ private void hitExecute(FetchContext context, HitContext hitContext) throws IOEx hitField = new DocumentField(NAME, new ArrayList<>(1)); hitContext.hit().setDocumentField(NAME, hitField); } - Terms terms = hitContext.reader().getTermVector(hitContext.docId(), field); + Terms terms = hitContext.reader().termVectors().get(hitContext.docId(), field); if (terms != null) { TermsEnum te = terms.iterator(); Map tv = new HashMap<>(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/InnerHitsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/InnerHitsIT.java index 66d44a818b797..e39f8df9bad36 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/InnerHitsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/InnerHitsIT.java @@ -151,7 +151,7 @@ public void testSimpleNested() throws Exception { assertSearchHit(response, 1, hasId("1")); assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment"); - assertThat(innerHits.getTotalHits().value, equalTo(2L)); + assertThat(innerHits.getTotalHits().value(), equalTo(2L)); assertThat(innerHits.getHits().length, equalTo(2)); assertThat(innerHits.getAt(0).getId(), equalTo("1")); assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); @@ -171,7 +171,7 @@ public void testSimpleNested() throws Exception { assertThat(response.getHits().getAt(0).getShard(), notNullValue()); assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment"); - assertThat(innerHits.getTotalHits().value, equalTo(3L)); + assertThat(innerHits.getTotalHits().value(), equalTo(3L)); assertThat(innerHits.getHits().length, equalTo(3)); assertThat(innerHits.getAt(0).getId(), equalTo("2")); assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); @@ -196,7 +196,7 @@ public void testSimpleNested() throws Exception { ), response -> { SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comments"); - assertThat(innerHits.getTotalHits().value, equalTo(2L)); + assertThat(innerHits.getTotalHits().value(), equalTo(2L)); assertThat(innerHits.getHits().length, equalTo(1)); HighlightField highlightField = innerHits.getAt(0).getHighlightFields().get("comments.message"); assertThat(highlightField.fragments()[0].string(), equalTo("fox eat quick")); @@ -264,7 +264,7 @@ public void testRandomNested() throws Exception { SearchHit searchHit = response.getHits().getAt(i); assertThat(searchHit.getShard(), notNullValue()); SearchHits inner = searchHit.getInnerHits().get("a"); - assertThat(inner.getTotalHits().value, equalTo((long) field1InnerObjects[i])); + assertThat(inner.getTotalHits().value(), equalTo((long) field1InnerObjects[i])); for (int j = 0; j < field1InnerObjects[i] && j < size; j++) { SearchHit innerHit = inner.getAt(j); assertThat(innerHit.getNestedIdentity().getField().string(), equalTo("field1")); @@ -273,7 +273,7 @@ public void testRandomNested() throws Exception { } inner = searchHit.getInnerHits().get("b"); - assertThat(inner.getTotalHits().value, equalTo((long) field2InnerObjects[i])); + assertThat(inner.getTotalHits().value(), equalTo((long) field2InnerObjects[i])); for (int j = 0; j < field2InnerObjects[i] && j < size; j++) { SearchHit innerHit = inner.getAt(j); assertThat(innerHit.getNestedIdentity().getField().string(), equalTo("field2")); @@ -378,13 +378,13 @@ public void testNestedMultipleLayers() throws Exception { assertSearchHit(response, 1, hasId("1")); assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comments"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerHits.getHits().length, equalTo(1)); assertThat(innerHits.getAt(0).getId(), equalTo("1")); assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(innerHits.getAt(0).getNestedIdentity().getOffset(), equalTo(0)); innerHits = innerHits.getAt(0).getInnerHits().get("remark"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerHits.getHits().length, equalTo(1)); assertThat(innerHits.getAt(0).getId(), equalTo("1")); assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); @@ -409,13 +409,13 @@ public void testNestedMultipleLayers() throws Exception { assertSearchHit(response, 1, hasId("1")); assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comments"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerHits.getHits().length, equalTo(1)); assertThat(innerHits.getAt(0).getId(), equalTo("1")); assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(innerHits.getAt(0).getNestedIdentity().getOffset(), equalTo(1)); innerHits = innerHits.getAt(0).getInnerHits().get("remark"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerHits.getHits().length, equalTo(1)); assertThat(innerHits.getAt(0).getId(), equalTo("1")); assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); @@ -436,7 +436,7 @@ public void testNestedMultipleLayers() throws Exception { assertSearchHit(response, 1, hasId("2")); assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comments.remarks"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerHits.getHits().length, equalTo(1)); assertThat(innerHits.getAt(0).getId(), equalTo("2")); assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); @@ -460,13 +460,13 @@ public void testNestedMultipleLayers() throws Exception { assertSearchHit(response, 1, hasId("2")); assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comments"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerHits.getHits().length, equalTo(1)); assertThat(innerHits.getAt(0).getId(), equalTo("2")); assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(innerHits.getAt(0).getNestedIdentity().getOffset(), equalTo(0)); innerHits = innerHits.getAt(0).getInnerHits().get("remark"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerHits.getHits().length, equalTo(1)); assertThat(innerHits.getAt(0).getId(), equalTo("2")); assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); @@ -538,7 +538,7 @@ public void testNestedDefinedAsObject() throws Exception { response -> { assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getId(), equalTo("1")); assertThat( response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getField().string(), @@ -613,7 +613,7 @@ public void testInnerHitsWithObjectFieldThatHasANestedField() throws Exception { SearchHit parent = response.getHits().getAt(0); assertThat(parent.getId(), equalTo("1")); SearchHits inner = parent.getInnerHits().get("comments.messages"); - assertThat(inner.getTotalHits().value, equalTo(2L)); + assertThat(inner.getTotalHits().value(), equalTo(2L)); assertThat(inner.getAt(0).getSourceAsString(), equalTo("{\"message\":\"no fox\"}")); assertThat(inner.getAt(1).getSourceAsString(), equalTo("{\"message\":\"fox eat quick\"}")); } @@ -629,7 +629,7 @@ public void testInnerHitsWithObjectFieldThatHasANestedField() throws Exception { SearchHit hit = response.getHits().getAt(0); assertThat(hit.getId(), equalTo("1")); SearchHits messages = hit.getInnerHits().get("comments.messages"); - assertThat(messages.getTotalHits().value, equalTo(2L)); + assertThat(messages.getTotalHits().value(), equalTo(2L)); assertThat(messages.getAt(0).getId(), equalTo("1")); assertThat(messages.getAt(0).getNestedIdentity().getField().string(), equalTo("comments.messages")); assertThat(messages.getAt(0).getNestedIdentity().getOffset(), equalTo(2)); @@ -651,7 +651,7 @@ public void testInnerHitsWithObjectFieldThatHasANestedField() throws Exception { SearchHit hit = response.getHits().getAt(0); assertThat(hit.getId(), equalTo("1")); SearchHits messages = hit.getInnerHits().get("comments.messages"); - assertThat(messages.getTotalHits().value, equalTo(1L)); + assertThat(messages.getTotalHits().value(), equalTo(1L)); assertThat(messages.getAt(0).getId(), equalTo("1")); assertThat(messages.getAt(0).getNestedIdentity().getField().string(), equalTo("comments.messages")); assertThat(messages.getAt(0).getNestedIdentity().getOffset(), equalTo(1)); @@ -685,7 +685,7 @@ public void testInnerHitsWithObjectFieldThatHasANestedField() throws Exception { SearchHit hit = response.getHits().getAt(0); assertThat(hit.getId(), equalTo("1")); SearchHits messages = hit.getInnerHits().get("comments.messages"); - assertThat(messages.getTotalHits().value, equalTo(1L)); + assertThat(messages.getTotalHits().value(), equalTo(1L)); assertThat(messages.getAt(0).getId(), equalTo("1")); assertThat(messages.getAt(0).getNestedIdentity().getField().string(), equalTo("comments.messages")); assertThat(messages.getAt(0).getNestedIdentity().getOffset(), equalTo(0)); @@ -786,22 +786,22 @@ public void testMatchesQueriesNestedInnerHits() throws Exception { ); assertNoFailuresAndResponse(prepareSearch("test").setQuery(query).setSize(numDocs).addSort("field1", SortOrder.ASC), response -> { assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo((long) numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo((long) numDocs)); assertThat(response.getHits().getAt(0).getId(), equalTo("0")); - assertThat(response.getHits().getAt(0).getInnerHits().get("nested1").getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getAt(0).getInnerHits().get("nested1").getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getAt(0).getInnerHits().get("nested1").getAt(0).getMatchedQueries().length, equalTo(1)); assertThat(response.getHits().getAt(0).getInnerHits().get("nested1").getAt(0).getMatchedQueries()[0], equalTo("test1")); assertThat(response.getHits().getAt(0).getInnerHits().get("nested1").getAt(1).getMatchedQueries().length, equalTo(1)); assertThat(response.getHits().getAt(0).getInnerHits().get("nested1").getAt(1).getMatchedQueries()[0], equalTo("test3")); assertThat(response.getHits().getAt(1).getId(), equalTo("1")); - assertThat(response.getHits().getAt(1).getInnerHits().get("nested1").getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(1).getInnerHits().get("nested1").getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(1).getInnerHits().get("nested1").getAt(0).getMatchedQueries().length, equalTo(1)); assertThat(response.getHits().getAt(1).getInnerHits().get("nested1").getAt(0).getMatchedQueries()[0], equalTo("test2")); for (int i = 2; i < numDocs; i++) { assertThat(response.getHits().getAt(i).getId(), equalTo(String.valueOf(i))); - assertThat(response.getHits().getAt(i).getInnerHits().get("nested1").getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(i).getInnerHits().get("nested1").getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(i).getInnerHits().get("nested1").getAt(0).getMatchedQueries().length, equalTo(1)); assertThat(response.getHits().getAt(i).getInnerHits().get("nested1").getAt(0).getMatchedQueries()[0], equalTo("test3")); } @@ -844,7 +844,7 @@ public void testNestedSource() throws Exception { response -> { assertHitCount(response, 1); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().size(), equalTo(1)); assertThat( response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().get("message"), @@ -865,7 +865,7 @@ public void testNestedSource() throws Exception { response -> { assertHitCount(response, 1); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().size(), equalTo(2)); assertThat( response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().get("message"), @@ -891,7 +891,7 @@ public void testNestedSource() throws Exception { ), response -> { assertHitCount(response, 1); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().size(), equalTo(0)); } ); @@ -901,7 +901,7 @@ public void testNestedSource() throws Exception { .setQuery(nestedQuery("comments", matchQuery("comments.message", "fox"), ScoreMode.None).innerHit(new InnerHitBuilder())), response -> { assertHitCount(response, 1); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value(), equalTo(2L)); assertFalse(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().isEmpty()); } ); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java index 0ce4f34463b03..0805d0f366b0f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java @@ -3340,7 +3340,7 @@ public void testGeoFieldHighlightingWithDifferentHighlighters() throws IOExcepti new SearchSourceBuilder().query(query).highlighter(new HighlightBuilder().field("*").highlighterType(highlighterType)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getHighlightFields().get("text").fragments().length, equalTo(1)); } ); @@ -3412,7 +3412,7 @@ public void testKeywordFieldHighlighting() throws IOException { .highlighter(new HighlightBuilder().field("*")) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); HighlightField highlightField = response.getHits().getAt(0).getHighlightFields().get("keyword_field"); assertThat(highlightField.fragments()[0].string(), equalTo("some text")); } @@ -3569,7 +3569,7 @@ public void testHighlightQueryRewriteDatesWithNow() throws Exception { .should(QueryBuilders.termQuery("field", "hello")) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertHighlight(response, 0, "field", 0, 1, equalTo("hello world")); } ); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/fields/SearchFieldsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/fields/SearchFieldsIT.java index d1eb1ab533ab7..16e5e42e00c9f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/fields/SearchFieldsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/fields/SearchFieldsIT.java @@ -191,26 +191,26 @@ public void testStoredFields() throws Exception { indicesAdmin().prepareRefresh().get(); assertResponse(prepareSearch().setQuery(matchAllQuery()).addStoredField("field1"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getFields().size(), equalTo(1)); assertThat(response.getHits().getAt(0).getFields().get("field1").getValue().toString(), equalTo("value1")); }); // field2 is not stored, check that it is not extracted from source. assertResponse(prepareSearch().setQuery(matchAllQuery()).addStoredField("field2"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getFields().size(), equalTo(0)); assertThat(response.getHits().getAt(0).getFields().get("field2"), nullValue()); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).addStoredField("field3"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getFields().size(), equalTo(1)); assertThat(response.getHits().getAt(0).getFields().get("field3").getValue().toString(), equalTo("value3")); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).addStoredField("*3"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getFields().size(), equalTo(1)); assertThat(response.getHits().getAt(0).getFields().get("field3").getValue().toString(), equalTo("value3")); @@ -218,7 +218,7 @@ public void testStoredFields() throws Exception { assertResponse( prepareSearch().setQuery(matchAllQuery()).addStoredField("*3").addStoredField("field1").addStoredField("field2"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getFields().size(), equalTo(2)); assertThat(response.getHits().getAt(0).getFields().get("field3").getValue().toString(), equalTo("value3")); @@ -226,20 +226,20 @@ public void testStoredFields() throws Exception { } ); assertResponse(prepareSearch().setQuery(matchAllQuery()).addStoredField("field*"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getFields().size(), equalTo(2)); assertThat(response.getHits().getAt(0).getFields().get("field3").getValue().toString(), equalTo("value3")); assertThat(response.getHits().getAt(0).getFields().get("field1").getValue().toString(), equalTo("value1")); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).addStoredField("f*3"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getFields().size(), equalTo(1)); assertThat(response.getHits().getAt(0).getFields().get("field3").getValue().toString(), equalTo("value3")); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).addStoredField("*"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getSourceAsMap(), nullValue()); assertThat(response.getHits().getAt(0).getFields().size(), equalTo(2)); @@ -247,7 +247,7 @@ public void testStoredFields() throws Exception { assertThat(response.getHits().getAt(0).getFields().get("field3").getValue().toString(), equalTo("value3")); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).addStoredField("*").addStoredField("_source"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getSourceAsMap(), notNullValue()); assertThat(response.getHits().getAt(0).getFields().size(), equalTo(2)); @@ -311,7 +311,7 @@ public void testScriptDocAndFields() throws Exception { new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['date'].date.millis", Collections.emptyMap()) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertFalse(response.getHits().getAt(0).hasSource()); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); Set fields = new HashSet<>(response.getHits().getAt(0).getFields().keySet()); @@ -342,7 +342,7 @@ public void testScriptDocAndFields() throws Exception { new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['num1'].value * factor", Map.of("factor", 2.0)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); Set fields = new HashSet<>(response.getHits().getAt(0).getFields().keySet()); assertThat(fields, equalTo(singleton("sNum1"))); @@ -429,7 +429,7 @@ public void testIdBasedScriptFields() throws Exception { .setSize(numDocs) .addScriptField("id", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_fields._id.value", Collections.emptyMap())), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo((long) numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo((long) numDocs)); for (int i = 0; i < numDocs; i++) { assertThat(response.getHits().getAt(i).getId(), equalTo(Integer.toString(i))); Set fields = new HashSet<>(response.getHits().getAt(i).getFields().keySet()); @@ -638,7 +638,7 @@ public void testStoredFieldsWithoutSource() throws Exception { .addStoredField("boolean_field") .addStoredField("binary_field"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); Set fields = new HashSet<>(response.getHits().getAt(0).getFields().keySet()); assertThat( @@ -681,7 +681,7 @@ public void testSearchFieldsMetadata() throws Exception { .get(); assertResponse(prepareSearch("my-index").addStoredField("field1").addStoredField("_routing"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).field("field1"), nullValue()); assertThat(response.getHits().getAt(0).field("_routing").getValue().toString(), equalTo("1")); }); @@ -749,7 +749,7 @@ public void testGetFieldsComplexField() throws Exception { String field = "field1.field2.field3.field4"; assertResponse(prepareSearch("my-index").addStoredField(field), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).field(field).getValues().size(), equalTo(2)); assertThat(response.getHits().getAt(0).field(field).getValues().get(0).toString(), equalTo("value1")); assertThat(response.getHits().getAt(0).field(field).getValues().get(1).toString(), equalTo("value2")); @@ -866,7 +866,7 @@ public void testDocValueFields() throws Exception { builder.addDocValueField("*_field"); } assertResponse(builder, response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); Set fields = new HashSet<>(response.getHits().getAt(0).getFields().keySet()); assertThat( @@ -906,7 +906,7 @@ public void testDocValueFields() throws Exception { assertThat(response.getHits().getAt(0).getFields().get("ip_field").getValues(), equalTo(List.of("::1"))); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).addDocValueField("*field"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); Set fields = new HashSet<>(response.getHits().getAt(0).getFields().keySet()); assertThat( @@ -955,7 +955,7 @@ public void testDocValueFields() throws Exception { .addDocValueField("double_field", "#.0") .addDocValueField("date_field", "epoch_millis"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); Set fields = new HashSet<>(response.getHits().getAt(0).getFields().keySet()); assertThat( diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java index 36e75435bb5de..76384253282de 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java @@ -250,7 +250,7 @@ public void testDistanceScoreGeoLinGaussExpWithOffset() throws Exception { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (numDummyDocs + 2))); + assertThat(sh.getTotalHits().value(), equalTo((long) (numDummyDocs + 2))); assertThat(sh.getAt(0).getId(), anyOf(equalTo("1"), equalTo("2"))); assertThat(sh.getAt(1).getId(), anyOf(equalTo("1"), equalTo("2"))); assertThat(sh.getAt(1).getScore(), equalTo(sh.getAt(0).getScore())); @@ -276,7 +276,7 @@ public void testDistanceScoreGeoLinGaussExpWithOffset() throws Exception { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (numDummyDocs + 2))); + assertThat(sh.getTotalHits().value(), equalTo((long) (numDummyDocs + 2))); assertThat(sh.getAt(0).getId(), anyOf(equalTo("1"), equalTo("2"))); assertThat(sh.getAt(1).getId(), anyOf(equalTo("1"), equalTo("2"))); assertThat(sh.getAt(1).getScore(), equalTo(sh.getAt(0).getScore())); @@ -300,7 +300,7 @@ public void testDistanceScoreGeoLinGaussExpWithOffset() throws Exception { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (numDummyDocs + 2))); + assertThat(sh.getTotalHits().value(), equalTo((long) (numDummyDocs + 2))); assertThat(sh.getAt(0).getId(), anyOf(equalTo("1"), equalTo("2"))); assertThat(sh.getAt(1).getId(), anyOf(equalTo("1"), equalTo("2"))); assertThat(sh.getAt(1).getScore(), equalTo(sh.getAt(0).getScore())); @@ -373,7 +373,7 @@ public void testBoostModeSettingWorks() throws Exception { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (2))); + assertThat(sh.getTotalHits().value(), equalTo((long) (2))); assertThat(sh.getAt(0).getId(), equalTo("1")); assertThat(sh.getAt(1).getId(), equalTo("2")); } @@ -386,7 +386,7 @@ public void testBoostModeSettingWorks() throws Exception { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (2))); + assertThat(sh.getTotalHits().value(), equalTo((long) (2))); assertThat(sh.getAt(0).getId(), equalTo("1")); assertThat(sh.getAt(1).getId(), equalTo("2")); } @@ -405,7 +405,7 @@ public void testBoostModeSettingWorks() throws Exception { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (2))); + assertThat(sh.getTotalHits().value(), equalTo((long) (2))); assertThat(sh.getAt(0).getId(), equalTo("2")); assertThat(sh.getAt(1).getId(), equalTo("1")); } @@ -461,7 +461,7 @@ public void testParseGeoPoint() throws Exception { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (1))); + assertThat(sh.getTotalHits().value(), equalTo((long) (1))); assertThat(sh.getAt(0).getId(), equalTo("1")); assertThat((double) sh.getAt(0).getScore(), closeTo(1.0, 1.e-5)); } @@ -481,7 +481,7 @@ public void testParseGeoPoint() throws Exception { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (1))); + assertThat(sh.getTotalHits().value(), equalTo((long) (1))); assertThat(sh.getAt(0).getId(), equalTo("1")); assertThat((double) sh.getAt(0).getScore(), closeTo(1.0f, 1.e-5)); } @@ -528,7 +528,7 @@ public void testCombineModes() throws Exception { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (1))); + assertThat(sh.getTotalHits().value(), equalTo((long) (1))); assertThat(sh.getAt(0).getId(), equalTo("1")); assertThat((double) sh.getAt(0).getScore(), closeTo(1.0, 1.e-5)); } @@ -546,7 +546,7 @@ public void testCombineModes() throws Exception { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (1))); + assertThat(sh.getTotalHits().value(), equalTo((long) (1))); assertThat(sh.getAt(0).getId(), equalTo("1")); assertThat((double) sh.getAt(0).getScore(), closeTo(0.5, 1.e-5)); } @@ -564,7 +564,7 @@ public void testCombineModes() throws Exception { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (1))); + assertThat(sh.getTotalHits().value(), equalTo((long) (1))); assertThat(sh.getAt(0).getId(), equalTo("1")); assertThat((double) sh.getAt(0).getScore(), closeTo(2.0 + 0.5, 1.e-5)); logger.info( @@ -588,7 +588,7 @@ public void testCombineModes() throws Exception { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (1))); + assertThat(sh.getTotalHits().value(), equalTo((long) (1))); assertThat(sh.getAt(0).getId(), equalTo("1")); assertThat((double) sh.getAt(0).getScore(), closeTo((2.0 + 0.5) / 2, 1.e-5)); } @@ -606,7 +606,7 @@ public void testCombineModes() throws Exception { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (1))); + assertThat(sh.getTotalHits().value(), equalTo((long) (1))); assertThat(sh.getAt(0).getId(), equalTo("1")); assertThat((double) sh.getAt(0).getScore(), closeTo(0.5, 1.e-5)); } @@ -624,7 +624,7 @@ public void testCombineModes() throws Exception { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (1))); + assertThat(sh.getTotalHits().value(), equalTo((long) (1))); assertThat(sh.getAt(0).getId(), equalTo("1")); assertThat((double) sh.getAt(0).getScore(), closeTo(2.0, 1.e-5)); } @@ -1131,7 +1131,7 @@ public void testMultiFieldOptions() throws Exception { assertResponse(client().search(new SearchRequest(new String[] {}).source(searchSource().query(baseQuery))), response -> { assertSearchHits(response, "1", "2"); SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (2))); + assertThat(sh.getTotalHits().value(), equalTo((long) (2))); }); List lonlat = new ArrayList<>(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/ExplainableScriptIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/ExplainableScriptIT.java index 7fb06c0b83015..a85d133450bec 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/ExplainableScriptIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/ExplainableScriptIT.java @@ -144,7 +144,7 @@ public void testExplainScript() throws InterruptedException, IOException, Execut ), response -> { SearchHits hits = response.getHits(); - assertThat(hits.getTotalHits().value, equalTo(20L)); + assertThat(hits.getTotalHits().value(), equalTo(20L)); int idCounter = 19; for (SearchHit hit : hits.getHits()) { assertThat(hit.getId(), equalTo(Integer.toString(idCounter))); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java index a0fe7e661020d..a38c9dc916056 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java @@ -145,9 +145,9 @@ public void testMinScoreFunctionScoreBasic() throws Exception { ), response -> { if (score < minScore) { - assertThat(response.getHits().getTotalHits().value, is(0L)); + assertThat(response.getHits().getTotalHits().value(), is(0L)); } else { - assertThat(response.getHits().getTotalHits().value, is(1L)); + assertThat(response.getHits().getTotalHits().value(), is(1L)); } } ); @@ -167,9 +167,9 @@ public void testMinScoreFunctionScoreBasic() throws Exception { ), response -> { if (score < minScore) { - assertThat(response.getHits().getTotalHits().value, is(0L)); + assertThat(response.getHits().getTotalHits().value(), is(0L)); } else { - assertThat(response.getHits().getTotalHits().value, is(1L)); + assertThat(response.getHits().getTotalHits().value(), is(1L)); } } ); @@ -224,9 +224,9 @@ public void testMinScoreFunctionScoreManyDocsAndRandomMinScore() throws IOExcept protected void assertMinScoreSearchResponses(int numDocs, SearchResponse searchResponse, int numMatchingDocs) { assertNoFailures(searchResponse); - assertThat((int) searchResponse.getHits().getTotalHits().value, is(numMatchingDocs)); + assertThat((int) searchResponse.getHits().getTotalHits().value(), is(numMatchingDocs)); int pos = 0; - for (int hitId = numDocs - 1; (numDocs - hitId) < searchResponse.getHits().getTotalHits().value; hitId--) { + for (int hitId = numDocs - 1; (numDocs - hitId) < searchResponse.getHits().getTotalHits().value(); hitId--) { assertThat(searchResponse.getHits().getAt(pos).getId(), equalTo(Integer.toString(hitId))); pos++; } @@ -242,7 +242,7 @@ public void testWithEmptyFunctions() throws IOException, ExecutionException, Int assertNoFailuresAndResponse( client().search(new SearchRequest(new String[] {}).source(searchSource().explain(true).query(termQuery("text", "text")))), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); termQueryScore[0] = response.getHits().getAt(0).getScore(); } ); @@ -259,7 +259,7 @@ protected void testMinScoreApplied(CombineFunction boostMode, float expectedScor ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getScore(), equalTo(expectedScore)); } ); @@ -269,7 +269,7 @@ protected void testMinScoreApplied(CombineFunction boostMode, float expectedScor searchSource().explain(true).query(functionScoreQuery(termQuery("text", "text")).boostMode(boostMode).setMinScore(2f)) ) ), - response -> assertThat(response.getHits().getTotalHits().value, equalTo(0L)) + response -> assertThat(response.getHits().getTotalHits().value(), equalTo(0L)) ); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java index 6043688b7670a..9fed4ead8c248 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java @@ -149,7 +149,7 @@ public void testRescorePhrase() throws Exception { 5 ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getMaxScore(), equalTo(response.getHits().getHits()[0].getScore())); assertThat(response.getHits().getHits()[0].getId(), equalTo("1")); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -429,7 +429,7 @@ private static void assertEquivalent(String query, SearchResponse plain, SearchR assertNoFailures(rescored); SearchHits leftHits = plain.getHits(); SearchHits rightHits = rescored.getHits(); - assertThat(leftHits.getTotalHits().value, equalTo(rightHits.getTotalHits().value)); + assertThat(leftHits.getTotalHits().value(), equalTo(rightHits.getTotalHits().value())); assertThat(leftHits.getHits().length, equalTo(rightHits.getHits().length)); SearchHit[] hits = leftHits.getHits(); SearchHit[] rHits = rightHits.getHits(); @@ -855,7 +855,7 @@ public void testRescorePhaseWithInvalidSort() throws Exception { .setTrackScores(true) .addRescorer(new QueryRescorerBuilder(matchAllQuery()).setRescoreQueryWeight(100.0f), 50), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(5L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(5L)); assertThat(response.getHits().getHits().length, equalTo(5)); for (SearchHit hit : response.getHits().getHits()) { assertThat(hit.getScore(), equalTo(101f)); @@ -902,7 +902,7 @@ public void testRescoreAfterCollapse() throws Exception { .addRescorer(new QueryRescorerBuilder(fieldValueScoreQuery("secondPassScore"))) .setCollapse(new CollapseBuilder("group")); assertResponse(request, resp -> { - assertThat(resp.getHits().getTotalHits().value, equalTo(5L)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(5L)); assertThat(resp.getHits().getHits().length, equalTo(3)); SearchHit hit1 = resp.getHits().getAt(0); @@ -982,7 +982,7 @@ public void testRescoreAfterCollapseRandom() throws Exception { .setSize(Math.min(numGroups, 10)); long expectedNumHits = numHits; assertResponse(request, resp -> { - assertThat(resp.getHits().getTotalHits().value, equalTo(expectedNumHits)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(expectedNumHits)); for (int pos = 0; pos < resp.getHits().getHits().length; pos++) { SearchHit hit = resp.getHits().getAt(pos); assertThat(hit.getId(), equalTo(sortedGroups[pos].id())); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java index 7fdb31a468998..22e27d78531a6 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java @@ -268,7 +268,7 @@ public void testSeedReportedInExplain() throws Exception { .setExplain(true), response -> { assertNoFailures(response); - assertEquals(1, response.getHits().getTotalHits().value); + assertEquals(1, response.getHits().getTotalHits().value()); SearchHit firstHit = response.getHits().getAt(0); assertThat(firstHit.getExplanation().toString(), containsString("" + seed)); } @@ -283,12 +283,12 @@ public void testNoDocs() throws Exception { prepareSearch("test").setQuery( functionScoreQuery(matchAllQuery(), randomFunction().seed(1234).setField(SeqNoFieldMapper.NAME)) ), - response -> assertEquals(0, response.getHits().getTotalHits().value) + response -> assertEquals(0, response.getHits().getTotalHits().value()) ); assertNoFailuresAndResponse( prepareSearch("test").setQuery(functionScoreQuery(matchAllQuery(), randomFunction())), - response -> assertEquals(0, response.getHits().getTotalHits().value) + response -> assertEquals(0, response.getHits().getTotalHits().value()) ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/nested/SimpleNestedIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/nested/SimpleNestedIT.java index 9b574cb54a116..2fde645f0036b 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/nested/SimpleNestedIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/nested/SimpleNestedIT.java @@ -426,7 +426,7 @@ public void testExplain() throws Exception { .setExplain(true), response -> { assertNoFailures(response); - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); Explanation explanation = response.getHits().getHits()[0].getExplanation(); assertThat(explanation.getValue(), equalTo(response.getHits().getHits()[0].getScore())); assertThat(explanation.toString(), startsWith("0.36464313 = Score based on 2 child docs in range from 0 to 1")); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/profile/query/QueryProfilerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/profile/query/QueryProfilerIT.java index 6993f24b895e0..e6cd89c09b979 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/profile/query/QueryProfilerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/profile/query/QueryProfilerIT.java @@ -147,10 +147,10 @@ public void testProfileMatchesRegular() throws Exception { ); } - if (vanillaResponse.getHits().getTotalHits().value != profileResponse.getHits().getTotalHits().value) { + if (vanillaResponse.getHits().getTotalHits().value() != profileResponse.getHits().getTotalHits().value()) { Set vanillaSet = new HashSet<>(Arrays.asList(vanillaResponse.getHits().getHits())); Set profileSet = new HashSet<>(Arrays.asList(profileResponse.getHits().getHits())); - if (vanillaResponse.getHits().getTotalHits().value > profileResponse.getHits().getTotalHits().value) { + if (vanillaResponse.getHits().getTotalHits().value() > profileResponse.getHits().getTotalHits().value()) { vanillaSet.removeAll(profileSet); fail("Vanilla hits were larger than profile hits. Non-overlapping elements were: " + vanillaSet.toString()); } else { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/query/ExistsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/query/ExistsIT.java index f263ececfdc7d..26b040e2309c2 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/query/ExistsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/query/ExistsIT.java @@ -133,7 +133,7 @@ public void testExists() throws Exception { response ), count, - response.getHits().getTotalHits().value + response.getHits().getTotalHits().value() ); } catch (AssertionError e) { for (SearchHit searchHit : allDocs.getHits()) { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/query/MultiMatchQueryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/query/MultiMatchQueryIT.java index 96042e198ef43..0fd2bd6f94770 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/query/MultiMatchQueryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/query/MultiMatchQueryIT.java @@ -347,7 +347,7 @@ public void testPhraseType() { ).type(MatchQueryParser.Type.PHRASE) ) ), - response -> assertThat(response.getHits().getTotalHits().value, greaterThan(1L)) + response -> assertThat(response.getHits().getTotalHits().value(), greaterThan(1L)) ); assertSearchHitsWithoutFailures( @@ -428,8 +428,8 @@ public void testSingleField() throws NoSuchFieldException, IllegalAccessExceptio matchResp -> { assertThat( "field: " + field + " query: " + builder.toString(), - multiMatchResp.getHits().getTotalHits().value, - equalTo(matchResp.getHits().getTotalHits().value) + multiMatchResp.getHits().getTotalHits().value(), + equalTo(matchResp.getHits().getTotalHits().value()) ); SearchHits hits = multiMatchResp.getHits(); if (field.startsWith("missing")) { @@ -451,7 +451,7 @@ public void testEquivalence() { var response = prepareSearch("test").setSize(0).setQuery(matchAllQuery()).get(); final int numDocs; try { - numDocs = (int) response.getHits().getTotalHits().value; + numDocs = (int) response.getHits().getTotalHits().value(); } finally { response.decRef(); } @@ -944,7 +944,7 @@ private static void assertEquivalent(String query, SearchResponse left, SearchRe assertNoFailures(right); SearchHits leftHits = left.getHits(); SearchHits rightHits = right.getHits(); - assertThat(leftHits.getTotalHits().value, equalTo(rightHits.getTotalHits().value)); + assertThat(leftHits.getTotalHits().value(), equalTo(rightHits.getTotalHits().value())); assertThat(leftHits.getHits().length, equalTo(rightHits.getHits().length)); SearchHit[] hits = leftHits.getHits(); SearchHit[] rHits = rightHits.getHits(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/query/QueryStringIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/query/QueryStringIT.java index e25e330e072a6..c8fe9498b156f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/query/QueryStringIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/query/QueryStringIT.java @@ -263,7 +263,7 @@ public void testFieldAliasOnDisallowedFieldType() throws Exception { } private void assertHits(SearchHits hits, String... ids) { - assertThat(hits.getTotalHits().value, equalTo((long) ids.length)); + assertThat(hits.getTotalHits().value(), equalTo((long) ids.length)); Set hitIds = new HashSet<>(); for (SearchHit hit : hits.getHits()) { hitIds.add(hit.getId()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/query/SearchQueryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/query/SearchQueryIT.java index 45b98686e0484..cffba49d5941c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/query/SearchQueryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/query/SearchQueryIT.java @@ -10,7 +10,7 @@ package org.elasticsearch.search.query; import org.apache.lucene.analysis.pattern.PatternReplaceCharFilter; -import org.apache.lucene.index.IndexReader; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.join.ScoreMode; import org.apache.lucene.tests.analysis.MockTokenizer; @@ -264,7 +264,7 @@ public void testConstantScoreQuery() throws Exception { MatchQueryBuilder matchQuery = matchQuery("f", English.intToEnglish(between(0, num))); final long[] constantScoreTotalHits = new long[1]; assertResponse(prepareSearch("test_1").setQuery(constantScoreQuery(matchQuery)).setSize(num), response -> { - constantScoreTotalHits[0] = response.getHits().getTotalHits().value; + constantScoreTotalHits[0] = response.getHits().getTotalHits().value(); SearchHits hits = response.getHits(); for (SearchHit searchHit : hits) { assertThat(searchHit, hasScore(1.0f)); @@ -277,7 +277,7 @@ public void testConstantScoreQuery() throws Exception { ).setSize(num), response -> { SearchHits hits = response.getHits(); - assertThat(hits.getTotalHits().value, equalTo(constantScoreTotalHits[0])); + assertThat(hits.getTotalHits().value(), equalTo(constantScoreTotalHits[0])); if (constantScoreTotalHits[0] > 1) { float expected = hits.getAt(0).getScore(); for (SearchHit searchHit : hits) { @@ -1693,7 +1693,7 @@ public void testQueryStringParserCache() throws Exception { assertResponse( prepareSearch("test").setSearchType(SearchType.DFS_QUERY_THEN_FETCH).setQuery(QueryBuilders.queryStringQuery("xyz").boost(100)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); first[0] = response.getHits().getAt(0).getScore(); } @@ -1704,7 +1704,7 @@ public void testQueryStringParserCache() throws Exception { prepareSearch("test").setSearchType(SearchType.DFS_QUERY_THEN_FETCH) .setQuery(QueryBuilders.queryStringQuery("xyz").boost(100)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); float actual = response.getHits().getAt(0).getScore(); assertThat(finalI + " expected: " + first[0] + " actual: " + actual, Float.compare(first[0], actual), equalTo(0)); @@ -1917,7 +1917,9 @@ public Map> getTokenizers() { } /** - * Test correct handling {@link SpanBooleanQueryRewriteWithMaxClause#rewrite(IndexReader, MultiTermQuery)}. That rewrite method is e.g. + * Test correct handling + * {@link SpanBooleanQueryRewriteWithMaxClause#rewrite(IndexSearcher, MultiTermQuery)}. + * That rewrite method is e.g. * set for fuzzy queries with "constant_score" rewrite nested inside a `span_multi` query and would cause NPEs due to an unset * {@link AttributeSource}. */ diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/query/SimpleQueryStringIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/query/SimpleQueryStringIT.java index 35f11eb1429b4..522c20b687caa 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/query/SimpleQueryStringIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/query/SimpleQueryStringIT.java @@ -609,7 +609,7 @@ public void testSimpleQueryStringWithAnalysisStopWords() throws Exception { } private void assertHits(SearchHits hits, String... ids) { - assertThat(hits.getTotalHits().value, equalTo((long) ids.length)); + assertThat(hits.getTotalHits().value(), equalTo((long) ids.length)); Set hitIds = new HashSet<>(); for (SearchHit hit : hits.getHits()) { hitIds.add(hit.getId()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/MinimalCompoundRetrieverIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/MinimalCompoundRetrieverIT.java index 13a7d1fa59496..97aa428822fae 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/MinimalCompoundRetrieverIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/MinimalCompoundRetrieverIT.java @@ -75,7 +75,7 @@ public void testSimpleSearch() throws ExecutionException, InterruptedException { assertThat(clusters.getClusterStateCount(SearchResponse.Cluster.Status.RUNNING), equalTo(0)); assertThat(clusters.getClusterStateCount(SearchResponse.Cluster.Status.PARTIAL), equalTo(0)); assertThat(clusters.getClusterStateCount(SearchResponse.Cluster.Status.FAILED), equalTo(0)); - assertThat(response.getHits().getTotalHits().value, equalTo(testClusterInfo.get("total_docs"))); + assertThat(response.getHits().getTotalHits().value(), equalTo(testClusterInfo.get("total_docs"))); }); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/RetrieverRewriteIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/RetrieverRewriteIT.java index 43197b77b2c1e..25b43a2dc946e 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/RetrieverRewriteIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/RetrieverRewriteIT.java @@ -78,8 +78,8 @@ public void testRewrite() { ElasticsearchAssertions.assertResponse(req, resp -> { assertNull(resp.pointInTimeId()); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, equalTo(1L)); - assertThat(resp.getHits().getTotalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(1L)); + assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_0")); }); } @@ -91,8 +91,8 @@ public void testRewriteCompound() { ElasticsearchAssertions.assertResponse(req, resp -> { assertNull(resp.pointInTimeId()); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, equalTo(1L)); - assertThat(resp.getHits().getTotalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(1L)); + assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_2")); }); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchPreferenceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchPreferenceIT.java index 35990fa3755b1..9a7ce2c5c28ab 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchPreferenceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchPreferenceIT.java @@ -123,17 +123,17 @@ public void testSimplePreference() { assertResponse( prepareSearch().setQuery(matchAllQuery()), - response -> assertThat(response.getHits().getTotalHits().value, equalTo(1L)) + response -> assertThat(response.getHits().getTotalHits().value(), equalTo(1L)) ); assertResponse( prepareSearch().setQuery(matchAllQuery()).setPreference("_local"), - response -> assertThat(response.getHits().getTotalHits().value, equalTo(1L)) + response -> assertThat(response.getHits().getTotalHits().value(), equalTo(1L)) ); assertResponse( prepareSearch().setQuery(matchAllQuery()).setPreference("1234"), - response -> assertThat(response.getHits().getTotalHits().value, equalTo(1L)) + response -> assertThat(response.getHits().getTotalHits().value(), equalTo(1L)) ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchReplicaSelectionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchReplicaSelectionIT.java index 33b554a508e2b..06ce330213af8 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchReplicaSelectionIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchReplicaSelectionIT.java @@ -51,15 +51,15 @@ public void testNodeSelection() { // Before we've gathered stats for all nodes, we should try each node once. Set nodeIds = new HashSet<>(); assertResponse(client.prepareSearch().setQuery(matchAllQuery()), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); nodeIds.add(response.getHits().getAt(0).getShard().getNodeId()); }); assertResponse(client.prepareSearch().setQuery(matchAllQuery()), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); nodeIds.add(response.getHits().getAt(0).getShard().getNodeId()); }); assertResponse(client.prepareSearch().setQuery(matchAllQuery()), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); nodeIds.add(response.getHits().getAt(0).getShard().getNodeId()); }); assertEquals(3, nodeIds.size()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/scriptfilter/ScriptQuerySearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/scriptfilter/ScriptQuerySearchIT.java index 2c96c27a0d12d..f59be6bb75928 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/scriptfilter/ScriptQuerySearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/scriptfilter/ScriptQuerySearchIT.java @@ -122,7 +122,7 @@ public void testCustomScriptBinaryField() throws Exception { new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['binaryData'].get(0).length", emptyMap()) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("2")); assertThat(response.getHits().getAt(0).getFields().get("sbinaryData").getValues().get(0), equalTo(16)); } @@ -175,7 +175,7 @@ public void testCustomScriptBoost() throws Exception { new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['num1'].value", Collections.emptyMap()) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getAt(0).getId(), equalTo("2")); assertThat(response.getHits().getAt(0).getFields().get("sNum1").getValues().get(0), equalTo(2.0)); assertThat(response.getHits().getAt(1).getId(), equalTo("3")); @@ -196,7 +196,7 @@ public void testCustomScriptBoost() throws Exception { new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['num1'].value", Collections.emptyMap()) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("3")); assertThat(response.getHits().getAt(0).getFields().get("sNum1").getValues().get(0), equalTo(3.0)); } @@ -214,7 +214,7 @@ public void testCustomScriptBoost() throws Exception { new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['num1'].value", Collections.emptyMap()) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(0).getFields().get("sNum1").getValues().get(0), equalTo(1.0)); assertThat(response.getHits().getAt(1).getId(), equalTo("2")); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/DuelScrollIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/DuelScrollIT.java index d3da4639a3927..ac5738a9b67b2 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/DuelScrollIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/DuelScrollIT.java @@ -44,7 +44,7 @@ public void testDuelQueryThenFetch() throws Exception { prepareSearch("index").setSearchType(context.searchType).addSort(context.sort).setSize(context.numDocs), control -> { SearchHits sh = control.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) context.numDocs)); + assertThat(sh.getTotalHits().value(), equalTo((long) context.numDocs)); assertThat(sh.getHits().length, equalTo(context.numDocs)); SearchResponse searchScrollResponse = prepareSearch("index").setSearchType(context.searchType) @@ -55,7 +55,7 @@ public void testDuelQueryThenFetch() throws Exception { try { assertNoFailures(searchScrollResponse); - assertThat(searchScrollResponse.getHits().getTotalHits().value, equalTo((long) context.numDocs)); + assertThat(searchScrollResponse.getHits().getTotalHits().value(), equalTo((long) context.numDocs)); assertThat(searchScrollResponse.getHits().getHits().length, equalTo(context.scrollRequestSize)); int counter = 0; @@ -69,7 +69,7 @@ public void testDuelQueryThenFetch() throws Exception { searchScrollResponse.decRef(); searchScrollResponse = client().prepareSearchScroll(scrollId).setScroll(TimeValue.timeValueMinutes(10)).get(); assertNoFailures(searchScrollResponse); - assertThat(searchScrollResponse.getHits().getTotalHits().value, equalTo((long) context.numDocs)); + assertThat(searchScrollResponse.getHits().getTotalHits().value(), equalTo((long) context.numDocs)); if (searchScrollResponse.getHits().getHits().length == 0) { break; } @@ -241,7 +241,7 @@ private void testDuelIndexOrder(SearchType searchType, boolean trackScores, int try { while (true) { assertNoFailures(scroll); - assertEquals(control.getHits().getTotalHits().value, scroll.getHits().getTotalHits().value); + assertEquals(control.getHits().getTotalHits().value(), scroll.getHits().getTotalHits().value()); assertEquals(control.getHits().getMaxScore(), scroll.getHits().getMaxScore(), 0.01f); if (scroll.getHits().getHits().length == 0) { break; @@ -255,7 +255,7 @@ private void testDuelIndexOrder(SearchType searchType, boolean trackScores, int scroll.decRef(); scroll = client().prepareSearchScroll(scroll.getScrollId()).setScroll(TimeValue.timeValueMinutes(10)).get(); } - assertEquals(control.getHits().getTotalHits().value, scrollDocs); + assertEquals(control.getHits().getTotalHits().value(), scrollDocs); } catch (AssertionError e) { logger.info("Control:\n{}", control); logger.info("Scroll size={}, from={}:\n{}", size, scrollDocs, scroll); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/SearchScrollIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/SearchScrollIT.java index 7c3dde22ce9d0..7ac24b77a4b6d 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/SearchScrollIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/SearchScrollIT.java @@ -89,7 +89,7 @@ public void testSimpleScrollQueryThenFetch() throws Exception { try { long counter = 0; - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse.getHits().getHits().length, equalTo(35)); for (SearchHit hit : searchResponse.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter++)); @@ -98,7 +98,7 @@ public void testSimpleScrollQueryThenFetch() throws Exception { searchResponse.decRef(); searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse.getHits().getHits().length, equalTo(35)); for (SearchHit hit : searchResponse.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter++)); @@ -107,7 +107,7 @@ public void testSimpleScrollQueryThenFetch() throws Exception { searchResponse.decRef(); searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse.getHits().getHits().length, equalTo(30)); for (SearchHit hit : searchResponse.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter++)); @@ -145,7 +145,7 @@ public void testSimpleScrollQueryThenFetchSmallSizeUnevenDistribution() throws E try { long counter = 0; - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse.getHits().getHits().length, equalTo(3)); for (SearchHit hit : searchResponse.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter++)); @@ -155,7 +155,7 @@ public void testSimpleScrollQueryThenFetchSmallSizeUnevenDistribution() throws E searchResponse.decRef(); searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse.getHits().getHits().length, equalTo(3)); for (SearchHit hit : searchResponse.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter++)); @@ -166,7 +166,7 @@ public void testSimpleScrollQueryThenFetchSmallSizeUnevenDistribution() throws E searchResponse.decRef(); searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse.getHits().getHits().length, equalTo(1)); for (SearchHit hit : searchResponse.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter++)); @@ -176,7 +176,7 @@ public void testSimpleScrollQueryThenFetchSmallSizeUnevenDistribution() throws E searchResponse.decRef(); searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse.getHits().getHits().length, equalTo(0)); for (SearchHit hit : searchResponse.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter++)); @@ -262,7 +262,7 @@ public void testSimpleScrollQueryThenFetch_clearScrollIds() throws Exception { .addSort("field", SortOrder.ASC) .get(); try { - assertThat(searchResponse1.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse1.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse1.getHits().getHits().length, equalTo(35)); for (SearchHit hit : searchResponse1.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter1++)); @@ -278,7 +278,7 @@ public void testSimpleScrollQueryThenFetch_clearScrollIds() throws Exception { .addSort("field", SortOrder.ASC) .get(); try { - assertThat(searchResponse2.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse2.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse2.getHits().getHits().length, equalTo(35)); for (SearchHit hit : searchResponse2.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter2++)); @@ -289,7 +289,7 @@ public void testSimpleScrollQueryThenFetch_clearScrollIds() throws Exception { searchResponse1 = client().prepareSearchScroll(searchResponse1.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); try { - assertThat(searchResponse1.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse1.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse1.getHits().getHits().length, equalTo(35)); for (SearchHit hit : searchResponse1.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter1++)); @@ -300,7 +300,7 @@ public void testSimpleScrollQueryThenFetch_clearScrollIds() throws Exception { searchResponse2 = client().prepareSearchScroll(searchResponse2.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); try { - assertThat(searchResponse2.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse2.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse2.getHits().getHits().length, equalTo(35)); for (SearchHit hit : searchResponse2.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter2++)); @@ -381,7 +381,7 @@ public void testSimpleScrollQueryThenFetchClearAllScrollIds() throws Exception { .addSort("field", SortOrder.ASC) .get(); try { - assertThat(searchResponse1.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse1.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse1.getHits().getHits().length, equalTo(35)); for (SearchHit hit : searchResponse1.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter1++)); @@ -397,7 +397,7 @@ public void testSimpleScrollQueryThenFetchClearAllScrollIds() throws Exception { .addSort("field", SortOrder.ASC) .get(); try { - assertThat(searchResponse2.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse2.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse2.getHits().getHits().length, equalTo(35)); for (SearchHit hit : searchResponse2.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter2++)); @@ -408,7 +408,7 @@ public void testSimpleScrollQueryThenFetchClearAllScrollIds() throws Exception { searchResponse1 = client().prepareSearchScroll(searchResponse1.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); try { - assertThat(searchResponse1.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse1.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse1.getHits().getHits().length, equalTo(35)); for (SearchHit hit : searchResponse1.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter1++)); @@ -419,7 +419,7 @@ public void testSimpleScrollQueryThenFetchClearAllScrollIds() throws Exception { searchResponse2 = client().prepareSearchScroll(searchResponse2.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); try { - assertThat(searchResponse2.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse2.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse2.getHits().getHits().length, equalTo(35)); for (SearchHit hit : searchResponse2.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter2++)); @@ -535,7 +535,7 @@ public void testCloseAndReopenOrDeleteWithActiveScroll() { prepareSearch().setQuery(matchAllQuery()).setSize(35).setScroll(TimeValue.timeValueMinutes(2)).addSort("field", SortOrder.ASC), searchResponse -> { long counter = 0; - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse.getHits().getHits().length, equalTo(35)); for (SearchHit hit : searchResponse.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter++)); @@ -601,7 +601,7 @@ public void testInvalidScrollKeepAlive() throws IOException { assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(1).setScroll(TimeValue.timeValueMinutes(5)), searchResponse -> { assertNotNull(searchResponse.getScrollId()); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(2L)); assertThat(searchResponse.getHits().getHits().length, equalTo(1)); Exception ex = expectThrows( Exception.class, diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/searchafter/SearchAfterIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/searchafter/SearchAfterIT.java index 7c459f91a1ac0..353858e9d6974 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/searchafter/SearchAfterIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/searchafter/SearchAfterIT.java @@ -150,7 +150,7 @@ public void testWithNullStrings() throws InterruptedException { .setQuery(matchAllQuery()) .searchAfter(new Object[] { 0, null }), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, Matchers.equalTo(2L)); + assertThat(searchResponse.getHits().getTotalHits().value(), Matchers.equalTo(2L)); assertThat(searchResponse.getHits().getHits().length, Matchers.equalTo(1)); assertThat(searchResponse.getHits().getHits()[0].getSourceAsMap().get("field1"), Matchers.equalTo(100)); assertThat(searchResponse.getHits().getHits()[0].getSourceAsMap().get("field2"), Matchers.equalTo("toto")); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/simple/SimpleSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/simple/SimpleSearchIT.java index a62a042a3cab5..e87c4790aa665 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/simple/SimpleSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/simple/SimpleSearchIT.java @@ -555,7 +555,7 @@ public void testStrictlyCountRequest() throws Exception { assertNoFailuresAndResponse( prepareSearch("test_count_1", "test_count_2").setTrackTotalHits(true).setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(11L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(11L)); assertThat(response.getHits().getHits().length, equalTo(0)); } ); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/slice/SearchSliceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/slice/SearchSliceIT.java index 979cb9e8a8c4c..e079994003751 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/slice/SearchSliceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/slice/SearchSliceIT.java @@ -117,7 +117,7 @@ public void testWithPreferenceAndRoutings() throws Exception { setupIndex(totalDocs, numShards); assertResponse(prepareSearch("test").setQuery(matchAllQuery()).setPreference("_shards:1,4").setSize(0), sr -> { - int numDocs = (int) sr.getHits().getTotalHits().value; + int numDocs = (int) sr.getHits().getTotalHits().value(); int max = randomIntBetween(2, numShards * 3); int fetchSize = randomIntBetween(10, 100); SearchRequestBuilder request = prepareSearch("test").setQuery(matchAllQuery()) @@ -129,7 +129,7 @@ public void testWithPreferenceAndRoutings() throws Exception { }); assertResponse(prepareSearch("test").setQuery(matchAllQuery()).setRouting("foo", "bar").setSize(0), sr -> { - int numDocs = (int) sr.getHits().getTotalHits().value; + int numDocs = (int) sr.getHits().getTotalHits().value(); int max = randomIntBetween(2, numShards * 3); int fetchSize = randomIntBetween(10, 100); SearchRequestBuilder request = prepareSearch("test").setQuery(matchAllQuery()) @@ -147,7 +147,7 @@ public void testWithPreferenceAndRoutings() throws Exception { .addAliasAction(IndicesAliasesRequest.AliasActions.add().index("test").alias("alias3").routing("baz")) ); assertResponse(prepareSearch("alias1", "alias3").setQuery(matchAllQuery()).setSize(0), sr -> { - int numDocs = (int) sr.getHits().getTotalHits().value; + int numDocs = (int) sr.getHits().getTotalHits().value(); int max = randomIntBetween(2, numShards * 3); int fetchSize = randomIntBetween(10, 100); SearchRequestBuilder request = prepareSearch("alias1", "alias3").setQuery(matchAllQuery()) @@ -166,7 +166,7 @@ private void assertSearchSlicesWithScroll(SearchRequestBuilder request, String f SearchResponse searchResponse = request.slice(sliceBuilder).get(); try { totalResults += searchResponse.getHits().getHits().length; - int expectedSliceResults = (int) searchResponse.getHits().getTotalHits().value; + int expectedSliceResults = (int) searchResponse.getHits().getTotalHits().value(); int numSliceResults = searchResponse.getHits().getHits().length; String scrollId = searchResponse.getScrollId(); for (SearchHit hit : searchResponse.getHits().getHits()) { @@ -238,7 +238,7 @@ private void assertSearchSlicesWithPointInTime( SearchResponse searchResponse = request.get(); try { - int expectedSliceResults = (int) searchResponse.getHits().getTotalHits().value; + int expectedSliceResults = (int) searchResponse.getHits().getTotalHits().value(); while (true) { int numHits = searchResponse.getHits().getHits().length; diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/sort/FieldSortIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/sort/FieldSortIT.java index 3be427e37d60c..d1841ebaf8071 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/sort/FieldSortIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/sort/FieldSortIT.java @@ -286,7 +286,7 @@ public void testRandomSorting() throws IOException, InterruptedException, Execut assertNoFailuresAndResponse( prepareSearch("test").setQuery(matchAllQuery()).setSize(size).addSort("dense_bytes", SortOrder.ASC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo((long) numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo((long) numDocs)); assertThat(response.getHits().getHits().length, equalTo(size)); Set> entrySet = denseBytes.entrySet(); Iterator> iterator = entrySet.iterator(); @@ -307,7 +307,7 @@ public void testRandomSorting() throws IOException, InterruptedException, Execut .setSize(size) .addSort("sparse_bytes", SortOrder.ASC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo((long) sparseBytes.size())); + assertThat(response.getHits().getTotalHits().value(), equalTo((long) sparseBytes.size())); assertThat(response.getHits().getHits().length, equalTo(size)); Set> entrySet = sparseBytes.entrySet(); Iterator> iterator = entrySet.iterator(); @@ -818,7 +818,7 @@ public void testSortMissingNumbers() throws Exception { assertNoFailuresAndResponse( prepareSearch().setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("i_value").order(SortOrder.ASC)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(1).getId(), equalTo("3")); assertThat(response.getHits().getAt(2).getId(), equalTo("2")); @@ -828,7 +828,7 @@ public void testSortMissingNumbers() throws Exception { assertNoFailuresAndResponse( prepareSearch().setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("i_value").order(SortOrder.ASC).missing("_last")), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(1).getId(), equalTo("3")); assertThat(response.getHits().getAt(2).getId(), equalTo("2")); @@ -838,7 +838,7 @@ public void testSortMissingNumbers() throws Exception { assertNoFailuresAndResponse( prepareSearch().setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("i_value").order(SortOrder.ASC).missing("_first")), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getAt(0).getId(), equalTo("2")); assertThat(response.getHits().getAt(1).getId(), equalTo("1")); assertThat(response.getHits().getAt(2).getId(), equalTo("3")); @@ -884,7 +884,7 @@ public void testSortMissingStrings() throws IOException { response -> { assertThat(Arrays.toString(response.getShardFailures()), response.getFailedShards(), equalTo(0)); - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(1).getId(), equalTo("3")); assertThat(response.getHits().getAt(2).getId(), equalTo("2")); @@ -896,7 +896,7 @@ public void testSortMissingStrings() throws IOException { response -> { assertThat(Arrays.toString(response.getShardFailures()), response.getFailedShards(), equalTo(0)); - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(1).getId(), equalTo("3")); assertThat(response.getHits().getAt(2).getId(), equalTo("2")); @@ -908,7 +908,7 @@ public void testSortMissingStrings() throws IOException { response -> { assertThat(Arrays.toString(response.getShardFailures()), response.getFailedShards(), equalTo(0)); - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getAt(0).getId(), equalTo("2")); assertThat(response.getHits().getAt(1).getId(), equalTo("1")); assertThat(response.getHits().getAt(2).getId(), equalTo("3")); @@ -920,7 +920,7 @@ public void testSortMissingStrings() throws IOException { response -> { assertThat(Arrays.toString(response.getShardFailures()), response.getFailedShards(), equalTo(0)); - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(1).getId(), equalTo("2")); assertThat(response.getHits().getAt(2).getId(), equalTo("3")); @@ -1183,7 +1183,7 @@ public void testSortMVField() throws Exception { refresh(); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("long_values", SortOrder.ASC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(3))); @@ -1197,7 +1197,7 @@ public void testSortMVField() throws Exception { }); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("long_values", SortOrder.DESC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); @@ -1214,7 +1214,7 @@ public void testSortMVField() throws Exception { .setSize(10) .addSort(SortBuilders.fieldSort("long_values").order(SortOrder.DESC).sortMode(SortMode.SUM)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); @@ -1232,7 +1232,7 @@ public void testSortMVField() throws Exception { .setSize(10) .addSort(SortBuilders.fieldSort("long_values").order(SortOrder.DESC).sortMode(SortMode.AVG)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); @@ -1250,7 +1250,7 @@ public void testSortMVField() throws Exception { .setSize(10) .addSort(SortBuilders.fieldSort("long_values").order(SortOrder.DESC).sortMode(SortMode.MEDIAN)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); @@ -1264,7 +1264,7 @@ public void testSortMVField() throws Exception { } ); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("int_values", SortOrder.ASC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(3))); @@ -1277,7 +1277,7 @@ public void testSortMVField() throws Exception { assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(7)); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("int_values", SortOrder.DESC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); @@ -1290,7 +1290,7 @@ public void testSortMVField() throws Exception { assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(3)); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("short_values", SortOrder.ASC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(3))); @@ -1303,7 +1303,7 @@ public void testSortMVField() throws Exception { assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(7)); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("short_values", SortOrder.DESC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); @@ -1316,7 +1316,7 @@ public void testSortMVField() throws Exception { assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(3)); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("byte_values", SortOrder.ASC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(3))); @@ -1329,7 +1329,7 @@ public void testSortMVField() throws Exception { assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(7)); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("byte_values", SortOrder.DESC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); @@ -1342,7 +1342,7 @@ public void testSortMVField() throws Exception { assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(3)); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("float_values", SortOrder.ASC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(3))); @@ -1355,7 +1355,7 @@ public void testSortMVField() throws Exception { assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).floatValue(), equalTo(7f)); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("float_values", SortOrder.DESC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); @@ -1368,7 +1368,7 @@ public void testSortMVField() throws Exception { assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).floatValue(), equalTo(3f)); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("double_values", SortOrder.ASC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(3))); @@ -1381,7 +1381,7 @@ public void testSortMVField() throws Exception { assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).doubleValue(), equalTo(7d)); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("double_values", SortOrder.DESC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); @@ -1394,7 +1394,7 @@ public void testSortMVField() throws Exception { assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).doubleValue(), equalTo(3d)); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("string_values", SortOrder.ASC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(3))); @@ -1407,7 +1407,7 @@ public void testSortMVField() throws Exception { assertThat(response.getHits().getAt(2).getSortValues()[0], equalTo("07")); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("string_values", SortOrder.DESC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); @@ -1719,8 +1719,8 @@ public void testSortDuelBetweenSingleShardAndMultiShardIndex() throws Exception prepareSearch("test2").setFrom(from).setSize(size).addSort(sortField, order), singleShardResponse -> { assertThat( - multiShardResponse.getHits().getTotalHits().value, - equalTo(singleShardResponse.getHits().getTotalHits().value) + multiShardResponse.getHits().getTotalHits().value(), + equalTo(singleShardResponse.getHits().getTotalHits().value()) ); assertThat(multiShardResponse.getHits().getHits().length, equalTo(singleShardResponse.getHits().getHits().length)); for (int i = 0; i < multiShardResponse.getHits().getHits().length; i++) { @@ -1747,14 +1747,14 @@ public void testCustomFormat() throws Exception { ); assertNoFailuresAndResponse(prepareSearch("test").addSort(SortBuilders.fieldSort("ip")), response -> { - assertEquals(2, response.getHits().getTotalHits().value); + assertEquals(2, response.getHits().getTotalHits().value()); assertArrayEquals(new String[] { "192.168.1.7" }, response.getHits().getAt(0).getSortValues()); assertArrayEquals(new String[] { "2001:db8::ff00:42:8329" }, response.getHits().getAt(1).getSortValues()); }); assertNoFailuresAndResponse( prepareSearch("test").addSort(SortBuilders.fieldSort("ip")).searchAfter(new Object[] { "192.168.1.7" }), response -> { - assertEquals(2, response.getHits().getTotalHits().value); + assertEquals(2, response.getHits().getTotalHits().value()); assertEquals(1, response.getHits().getHits().length); assertArrayEquals(new String[] { "2001:db8::ff00:42:8329" }, response.getHits().getAt(0).getSortValues()); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/sort/SimpleSortIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/sort/SimpleSortIT.java index ae0d2cbeb841f..fc5d40ae18c14 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/sort/SimpleSortIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/sort/SimpleSortIT.java @@ -362,7 +362,7 @@ public void testDocumentsWithNullValue() throws Exception { assertNoFailuresAndResponse( prepareSearch().setQuery(matchAllQuery()).addScriptField("id", scripField).addSort("svalue", SortOrder.ASC), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(3L)); assertThat(searchResponse.getHits().getAt(0).field("id").getValue(), equalTo("1")); assertThat(searchResponse.getHits().getAt(1).field("id").getValue(), equalTo("3")); assertThat(searchResponse.getHits().getAt(2).field("id").getValue(), equalTo("2")); @@ -373,7 +373,7 @@ public void testDocumentsWithNullValue() throws Exception { .addScriptField("id", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['id'][0]", Collections.emptyMap())) .addSort("svalue", SortOrder.ASC), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(3L)); assertThat(searchResponse.getHits().getAt(0).field("id").getValue(), equalTo("1")); assertThat(searchResponse.getHits().getAt(1).field("id").getValue(), equalTo("3")); assertThat(searchResponse.getHits().getAt(2).field("id").getValue(), equalTo("2")); @@ -391,7 +391,7 @@ public void testDocumentsWithNullValue() throws Exception { } assertThat(searchResponse.getFailedShards(), equalTo(0)); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(3L)); assertThat(searchResponse.getHits().getAt(0).field("id").getValue(), equalTo("3")); assertThat(searchResponse.getHits().getAt(1).field("id").getValue(), equalTo("1")); assertThat(searchResponse.getHits().getAt(2).field("id").getValue(), equalTo("2")); @@ -409,7 +409,7 @@ public void testDocumentsWithNullValue() throws Exception { } assertThat(searchResponse.getFailedShards(), equalTo(0)); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(1L)); assertThat(searchResponse.getHits().getAt(0).field("id").getValue(), equalTo("2")); } ); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java index 6351d8d906389..ec9c680e17fc3 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java @@ -64,12 +64,12 @@ public void testInnerHits() { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), nullValue()); assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()); assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); SearchHits hits = response.getHits().getAt(0).getInnerHits().get("nested"); - assertThat(hits.getTotalHits().value, equalTo(1L)); + assertThat(hits.getTotalHits().value(), equalTo(1L)); assertThat(hits.getAt(0).getId(), nullValue()); assertThat(hits.getAt(0).getSourceAsString(), nullValue()); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/similarity/SimilarityIT.java b/server/src/internalClusterTest/java/org/elasticsearch/similarity/SimilarityIT.java index 2952150c2cb22..f90056c6ae859 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/similarity/SimilarityIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/similarity/SimilarityIT.java @@ -54,10 +54,10 @@ public void testCustomBM25Similarity() throws Exception { .get(); assertResponse(prepareSearch().setQuery(matchQuery("field1", "quick brown fox")), bm25SearchResponse -> { - assertThat(bm25SearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(bm25SearchResponse.getHits().getTotalHits().value(), equalTo(1L)); float bm25Score = bm25SearchResponse.getHits().getHits()[0].getScore(); assertResponse(prepareSearch().setQuery(matchQuery("field2", "quick brown fox")), booleanSearchResponse -> { - assertThat(booleanSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(booleanSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); float defaultScore = booleanSearchResponse.getHits().getHits()[0].getScore(); assertThat(bm25Score, not(equalTo(defaultScore))); }); diff --git a/server/src/main/java/module-info.java b/server/src/main/java/module-info.java index 414a6c6ba66a6..89fc5f676cb1e 100644 --- a/server/src/main/java/module-info.java +++ b/server/src/main/java/module-info.java @@ -458,7 +458,8 @@ provides org.apache.lucene.codecs.Codec with org.elasticsearch.index.codec.Elasticsearch814Codec, - org.elasticsearch.index.codec.Elasticsearch816Codec; + org.elasticsearch.index.codec.Elasticsearch816Codec, + org.elasticsearch.index.codec.Elasticsearch900Codec; provides org.apache.logging.log4j.core.util.ContextDataProvider with org.elasticsearch.common.logging.DynamicContextDataProvider; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzer.java b/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzer.java index 666708ea6ffde..e668624440351 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzer.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzer.java @@ -32,6 +32,7 @@ import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.IndexCommit; import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.PointValues; @@ -273,7 +274,7 @@ void analyzeDocValues(SegmentReader reader, IndexDiskUsageStats stats) throws IO } case SORTED_SET -> { SortedSetDocValues sortedSet = iterateDocValues(maxDocs, () -> docValuesReader.getSortedSet(field), dv -> { - while (dv.nextOrd() != SortedSetDocValues.NO_MORE_ORDS) { + for (int i = 0; i < dv.docValueCount(); i++) { cancellationChecker.logEvent(); } }); @@ -544,13 +545,14 @@ void analyzeKnnVectors(SegmentReader reader, IndexDiskUsageStats stats) throws I if (field.getVectorDimension() > 0) { switch (field.getVectorEncoding()) { case BYTE -> { - iterateDocValues(reader.maxDoc(), () -> vectorReader.getByteVectorValues(field.name), vectors -> { + iterateDocValues(reader.maxDoc(), () -> vectorReader.getByteVectorValues(field.name).iterator(), vectors -> { cancellationChecker.logEvent(); - vectors.vectorValue(); + vectors.index(); }); // do a couple of randomized searches to figure out min and max offsets of index file ByteVectorValues vectorValues = vectorReader.getByteVectorValues(field.name); + KnnVectorValues.DocIndexIterator iterator = vectorValues.iterator(); final KnnCollector collector = new TopKnnCollector( Math.max(1, Math.min(100, vectorValues.size() - 1)), Integer.MAX_VALUE @@ -558,22 +560,23 @@ void analyzeKnnVectors(SegmentReader reader, IndexDiskUsageStats stats) throws I int numDocsToVisit = reader.maxDoc() < 10 ? reader.maxDoc() : 10 * (int) Math.log10(reader.maxDoc()); int skipFactor = Math.max(reader.maxDoc() / numDocsToVisit, 1); for (int i = 0; i < reader.maxDoc(); i += skipFactor) { - if ((i = vectorValues.advance(i)) == DocIdSetIterator.NO_MORE_DOCS) { + if ((i = iterator.advance(i)) == DocIdSetIterator.NO_MORE_DOCS) { break; } cancellationChecker.checkForCancellation(); - vectorReader.search(field.name, vectorValues.vectorValue(), collector, null); + vectorReader.search(field.name, vectorValues.vectorValue(iterator.index()), collector, null); } stats.addKnnVectors(field.name, directory.getBytesRead()); } case FLOAT32 -> { - iterateDocValues(reader.maxDoc(), () -> vectorReader.getFloatVectorValues(field.name), vectors -> { + iterateDocValues(reader.maxDoc(), () -> vectorReader.getFloatVectorValues(field.name).iterator(), vectors -> { cancellationChecker.logEvent(); - vectors.vectorValue(); + vectors.index(); }); // do a couple of randomized searches to figure out min and max offsets of index file FloatVectorValues vectorValues = vectorReader.getFloatVectorValues(field.name); + KnnVectorValues.DocIndexIterator iterator = vectorValues.iterator(); final KnnCollector collector = new TopKnnCollector( Math.max(1, Math.min(100, vectorValues.size() - 1)), Integer.MAX_VALUE @@ -581,11 +584,11 @@ void analyzeKnnVectors(SegmentReader reader, IndexDiskUsageStats stats) throws I int numDocsToVisit = reader.maxDoc() < 10 ? reader.maxDoc() : 10 * (int) Math.log10(reader.maxDoc()); int skipFactor = Math.max(reader.maxDoc() / numDocsToVisit, 1); for (int i = 0; i < reader.maxDoc(); i += skipFactor) { - if ((i = vectorValues.advance(i)) == DocIdSetIterator.NO_MORE_DOCS) { + if ((i = iterator.advance(i)) == DocIdSetIterator.NO_MORE_DOCS) { break; } cancellationChecker.checkForCancellation(); - vectorReader.search(field.name, vectorValues.vectorValue(), collector, null); + vectorReader.search(field.name, vectorValues.vectorValue(iterator.index()), collector, null); } stats.addKnnVectors(field.name, directory.getBytesRead()); } diff --git a/server/src/main/java/org/elasticsearch/action/search/BottomSortValuesCollector.java b/server/src/main/java/org/elasticsearch/action/search/BottomSortValuesCollector.java index 8ac2033e2ff19..dda589a458f88 100644 --- a/server/src/main/java/org/elasticsearch/action/search/BottomSortValuesCollector.java +++ b/server/src/main/java/org/elasticsearch/action/search/BottomSortValuesCollector.java @@ -54,7 +54,7 @@ SearchSortValuesAndFormats getBottomSortValues() { } synchronized void consumeTopDocs(TopFieldDocs topDocs, DocValueFormat[] sortValuesFormat) { - totalHits += topDocs.totalHits.value; + totalHits += topDocs.totalHits.value(); if (validateShardSortFields(topDocs.fields) == false) { return; } diff --git a/server/src/main/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumer.java b/server/src/main/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumer.java index d41a2561646b8..b52d76aac4132 100644 --- a/server/src/main/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumer.java +++ b/server/src/main/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumer.java @@ -57,8 +57,8 @@ public void consumeResult(SearchPhaseResult result, Runnable next) { return; } // set the relation to the first non-equal relation - relationAtomicReference.compareAndSet(TotalHits.Relation.EQUAL_TO, result.queryResult().getTotalHits().relation); - totalHits.add(result.queryResult().getTotalHits().value); + relationAtomicReference.compareAndSet(TotalHits.Relation.EQUAL_TO, result.queryResult().getTotalHits().relation()); + totalHits.add(result.queryResult().getTotalHits().value()); terminatedEarly.compareAndSet(false, (result.queryResult().terminatedEarly() != null && result.queryResult().terminatedEarly())); timedOut.compareAndSet(false, result.queryResult().searchTimedOut()); next.run(); diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java index 74786dff1648d..ca9c4ab44c423 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java @@ -892,8 +892,8 @@ TotalHits getTotalHits() { void add(TopDocsAndMaxScore topDocs, boolean timedOut, Boolean terminatedEarly) { if (trackTotalHitsUpTo != SearchContext.TRACK_TOTAL_HITS_DISABLED) { - totalHits += topDocs.topDocs.totalHits.value; - if (topDocs.topDocs.totalHits.relation == Relation.GREATER_THAN_OR_EQUAL_TO) { + totalHits += topDocs.topDocs.totalHits.value(); + if (topDocs.topDocs.totalHits.relation() == Relation.GREATER_THAN_OR_EQUAL_TO) { totalHitsRelation = TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO; } } diff --git a/server/src/main/java/org/elasticsearch/bootstrap/BootstrapChecks.java b/server/src/main/java/org/elasticsearch/bootstrap/BootstrapChecks.java index 021ad8127a2d0..6a881163914e4 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/BootstrapChecks.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/BootstrapChecks.java @@ -701,7 +701,7 @@ String jvmVendor() { } String javaVersion() { - return Constants.JAVA_VERSION; + return Runtime.version().toString(); } @Override diff --git a/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java b/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java index 5043508c781f0..a57b8b4d23cdb 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java @@ -88,7 +88,7 @@ import java.util.Objects; public class Lucene { - public static final String LATEST_CODEC = "Lucene912"; + public static final String LATEST_CODEC = "Lucene100"; public static final String SOFT_DELETES_FIELD = "__soft_deletes"; @@ -392,8 +392,8 @@ public static ScoreDoc readScoreDoc(StreamInput in) throws IOException { private static final Class GEO_DISTANCE_SORT_TYPE_CLASS = LatLonDocValuesField.newDistanceSort("some_geo_field", 0, 0).getClass(); public static void writeTotalHits(StreamOutput out, TotalHits totalHits) throws IOException { - out.writeVLong(totalHits.value); - out.writeEnum(totalHits.relation); + out.writeVLong(totalHits.value()); + out.writeEnum(totalHits.relation()); } public static void writeTopDocs(StreamOutput out, TopDocsAndMaxScore topDocs) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/common/lucene/index/FilterableTermsEnum.java b/server/src/main/java/org/elasticsearch/common/lucene/index/FilterableTermsEnum.java index 625438ebdff97..cbceef120b877 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/index/FilterableTermsEnum.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/index/FilterableTermsEnum.java @@ -27,6 +27,7 @@ import org.apache.lucene.util.BitSet; import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.IOBooleanSupplier; import org.elasticsearch.core.Nullable; import java.io.IOException; @@ -177,6 +178,11 @@ public boolean seekExact(BytesRef text) throws IOException { } } + @Override + public IOBooleanSupplier prepareSeekExact(BytesRef bytesRef) { + return () -> this.seekExact(bytesRef); + } + @Override public int docFreq() throws IOException { return currentDocFreq; diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/AutomatonQueries.java b/server/src/main/java/org/elasticsearch/common/lucene/search/AutomatonQueries.java index 5bc52253939af..9460aba0a99cb 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/AutomatonQueries.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/AutomatonQueries.java @@ -14,7 +14,6 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.Automaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import java.util.ArrayList; @@ -38,8 +37,6 @@ public static Automaton caseInsensitivePrefix(String s) { Automaton a = Operations.concatenate(list); // since all elements in the list should be deterministic already, the concatenation also is, so no need to determinized assert a.isDeterministic(); - a = MinimizationOperations.minimize(a, 0); - assert a.isDeterministic(); return a; } @@ -100,7 +97,7 @@ public static Automaton toCaseInsensitiveWildcardAutomaton(Term wildcardquery) { i += length; } - return Operations.concatenate(automata); + return Operations.determinize(Operations.concatenate(automata), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } protected static Automaton toCaseInsensitiveString(BytesRef br) { @@ -117,7 +114,6 @@ public static Automaton toCaseInsensitiveString(String s) { Automaton a = Operations.concatenate(list); // concatenating deterministic automata should result in a deterministic automaton. No need to determinize here. assert a.isDeterministic(); - a = MinimizationOperations.minimize(a, 0); return a; } @@ -132,7 +128,6 @@ public static Automaton toCaseInsensitiveChar(int codepoint) { if (altCase != codepoint) { result = Operations.union(case1, Automata.makeChar(altCase)); // this automaton should always be deterministic, no need to determinize - result = MinimizationOperations.minimize(result, 0); assert result.isDeterministic(); } else { result = case1; diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/CaseInsensitivePrefixQuery.java b/server/src/main/java/org/elasticsearch/common/lucene/search/CaseInsensitivePrefixQuery.java index b6f102a98203f..65688b69f5aa0 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/CaseInsensitivePrefixQuery.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/CaseInsensitivePrefixQuery.java @@ -20,12 +20,12 @@ public CaseInsensitivePrefixQuery(Term term) { super(term, caseInsensitivePrefix(term.text())); } - public CaseInsensitivePrefixQuery(Term term, int determinizeWorkLimit, boolean isBinary) { - super(term, caseInsensitivePrefix(term.text()), determinizeWorkLimit, isBinary); + public CaseInsensitivePrefixQuery(Term term, boolean isBinary) { + super(term, caseInsensitivePrefix(term.text()), isBinary); } - public CaseInsensitivePrefixQuery(Term term, int determinizeWorkLimit, boolean isBinary, MultiTermQuery.RewriteMethod rewriteMethod) { - super(term, caseInsensitivePrefix(term.text()), determinizeWorkLimit, isBinary, rewriteMethod); + public CaseInsensitivePrefixQuery(Term term, boolean isBinary, MultiTermQuery.RewriteMethod rewriteMethod) { + super(term, caseInsensitivePrefix(term.text()), isBinary, rewriteMethod); } @Override diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/CaseInsensitiveWildcardQuery.java b/server/src/main/java/org/elasticsearch/common/lucene/search/CaseInsensitiveWildcardQuery.java index 91700e5ffe6c1..6368acf383120 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/CaseInsensitiveWildcardQuery.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/CaseInsensitiveWildcardQuery.java @@ -26,8 +26,8 @@ public CaseInsensitiveWildcardQuery(Term term) { super(term, toCaseInsensitiveWildcardAutomaton(term)); } - public CaseInsensitiveWildcardQuery(Term term, int determinizeWorkLimit, boolean isBinary, RewriteMethod rewriteMethod) { - super(term, toCaseInsensitiveWildcardAutomaton(term), determinizeWorkLimit, isBinary, rewriteMethod); + public CaseInsensitiveWildcardQuery(Term term, boolean isBinary, RewriteMethod rewriteMethod) { + super(term, toCaseInsensitiveWildcardAutomaton(term), isBinary, rewriteMethod); } @Override diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/Queries.java b/server/src/main/java/org/elasticsearch/common/lucene/search/Queries.java index 25fa926ada2c8..e2ac58caccd57 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/Queries.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/Queries.java @@ -123,7 +123,7 @@ public static Query applyMinimumShouldMatch(BooleanQuery query, @Nullable String } int optionalClauses = 0; for (BooleanClause c : query.clauses()) { - if (c.getOccur() == BooleanClause.Occur.SHOULD) { + if (c.occur() == BooleanClause.Occur.SHOULD) { optionalClauses++; } } diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/SpanBooleanQueryRewriteWithMaxClause.java b/server/src/main/java/org/elasticsearch/common/lucene/search/SpanBooleanQueryRewriteWithMaxClause.java index 13fae303909f5..299739fc3ba8a 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/SpanBooleanQueryRewriteWithMaxClause.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/SpanBooleanQueryRewriteWithMaxClause.java @@ -19,7 +19,7 @@ import org.apache.lucene.queries.spans.SpanOrQuery; import org.apache.lucene.queries.spans.SpanQuery; import org.apache.lucene.queries.spans.SpanTermQuery; -import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.AttributeSource; @@ -42,7 +42,7 @@ public class SpanBooleanQueryRewriteWithMaxClause extends SpanMultiTermQueryWrap private final boolean hardLimit; public SpanBooleanQueryRewriteWithMaxClause() { - this(BooleanQuery.getMaxClauseCount(), true); + this(IndexSearcher.getMaxClauseCount(), true); } public SpanBooleanQueryRewriteWithMaxClause(int maxExpansions, boolean hardLimit) { @@ -59,10 +59,11 @@ public boolean isHardLimit() { } @Override - public SpanQuery rewrite(IndexReader reader, MultiTermQuery query) throws IOException { + public SpanQuery rewrite(IndexSearcher indexSearcher, MultiTermQuery query) throws IOException { final MultiTermQuery.RewriteMethod delegate = new MultiTermQuery.RewriteMethod() { @Override - public Query rewrite(IndexReader reader, MultiTermQuery query) throws IOException { + public Query rewrite(IndexSearcher indexSearcher, MultiTermQuery query) throws IOException { + IndexReader reader = indexSearcher.getIndexReader(); Collection queries = collectTerms(reader, query); if (queries.size() == 0) { return new SpanMatchNoDocsQuery(query.getField(), "no expansion found for " + query.toString()); @@ -99,7 +100,7 @@ private Collection collectTerms(IndexReader reader, MultiTermQuery qu + query.toString() + " ] " + "exceeds maxClauseCount [ Boolean maxClauseCount is set to " - + BooleanQuery.getMaxClauseCount() + + IndexSearcher.getMaxClauseCount() + "]" ); } else { @@ -112,6 +113,6 @@ private Collection collectTerms(IndexReader reader, MultiTermQuery qu return queries; } }; - return (SpanQuery) delegate.rewrite(reader, query); + return (SpanQuery) delegate.rewrite(indexSearcher, query); } } diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/XMoreLikeThis.java b/server/src/main/java/org/elasticsearch/common/lucene/search/XMoreLikeThis.java index f8d0c81466dcc..54cd4c9946f62 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/XMoreLikeThis.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/XMoreLikeThis.java @@ -34,6 +34,7 @@ import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.similarities.TFIDFSimilarity; @@ -207,7 +208,7 @@ public final class XMoreLikeThis { /** * Return a Query with no more than this many terms. * - * @see BooleanQuery#getMaxClauseCount + * @see IndexSearcher#getMaxClauseCount * @see #setMaxQueryTerms */ public static final int DEFAULT_MAX_QUERY_TERMS = 25; @@ -468,7 +469,7 @@ private void addToQuery(PriorityQueue q, BooleanQuery.Builder query) try { query.add(tq, BooleanClause.Occur.SHOULD); - } catch (BooleanQuery.TooManyClauses ignore) { + } catch (IndexSearcher.TooManyClauses ignore) { break; } } diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/function/FunctionScoreQuery.java b/server/src/main/java/org/elasticsearch/common/lucene/search/function/FunctionScoreQuery.java index ff82160be0325..5a0c216c4e717 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/function/FunctionScoreQuery.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/function/FunctionScoreQuery.java @@ -272,44 +272,65 @@ class CustomBoostFactorWeight extends Weight { this.needsScores = needsScores; } - private FunctionFactorScorer functionScorer(LeafReaderContext context) throws IOException { - Scorer subQueryScorer = subQueryWeight.scorer(context); - if (subQueryScorer == null) { + private ScorerSupplier functionScorerSupplier(LeafReaderContext context) throws IOException { + ScorerSupplier subQueryScorerSupplier = subQueryWeight.scorerSupplier(context); + if (subQueryScorerSupplier == null) { return null; } - final long leadCost = subQueryScorer.iterator().cost(); - final LeafScoreFunction[] leafFunctions = new LeafScoreFunction[functions.length]; - final Bits[] docSets = new Bits[functions.length]; - for (int i = 0; i < functions.length; i++) { - ScoreFunction function = functions[i]; - leafFunctions[i] = function.getLeafScoreFunction(context); - if (filterWeights[i] != null) { - ScorerSupplier filterScorerSupplier = filterWeights[i].scorerSupplier(context); - docSets[i] = Lucene.asSequentialAccessBits(context.reader().maxDoc(), filterScorerSupplier, leadCost); - } else { - docSets[i] = new Bits.MatchAllBits(context.reader().maxDoc()); + return new ScorerSupplier() { + @Override + public Scorer get(long leadCost) throws IOException { + Scorer subQueryScorer = subQueryScorerSupplier.get(leadCost); + final LeafScoreFunction[] leafFunctions = new LeafScoreFunction[functions.length]; + final Bits[] docSets = new Bits[functions.length]; + for (int i = 0; i < functions.length; i++) { + ScoreFunction function = functions[i]; + leafFunctions[i] = function.getLeafScoreFunction(context); + if (filterWeights[i] != null) { + ScorerSupplier filterScorerSupplier = filterWeights[i].scorerSupplier(context); + docSets[i] = Lucene.asSequentialAccessBits(context.reader().maxDoc(), filterScorerSupplier, leadCost); + } else { + docSets[i] = new Bits.MatchAllBits(context.reader().maxDoc()); + } + } + return new FunctionFactorScorer( + subQueryScorer, + scoreMode, + functions, + maxBoost, + leafFunctions, + docSets, + combineFunction, + needsScores + ); } - } - return new FunctionFactorScorer( - this, - subQueryScorer, - scoreMode, - functions, - maxBoost, - leafFunctions, - docSets, - combineFunction, - needsScores - ); + + @Override + public long cost() { + return subQueryScorerSupplier.cost(); + } + }; } @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - Scorer scorer = functionScorer(context); - if (scorer != null && minScore != null) { - scorer = new MinScoreScorer(this, scorer, minScore); + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { + ScorerSupplier scorerSupplier = functionScorerSupplier(context); + + if (scorerSupplier == null || minScore == null) { + return scorerSupplier; } - return scorer; + + return new ScorerSupplier() { + @Override + public Scorer get(long leadCost) throws IOException { + return new MinScoreScorer(scorerSupplier.get(leadCost), minScore); + } + + @Override + public long cost() { + return scorerSupplier.cost(); + } + }; } @Override @@ -356,7 +377,8 @@ public Explanation explain(LeafReaderContext context, int doc) throws IOExceptio } else if (singleFunction && functionsExplanations.size() == 1) { factorExplanation = functionsExplanations.get(0); } else { - FunctionFactorScorer scorer = functionScorer(context); + + FunctionFactorScorer scorer = (FunctionFactorScorer) functionScorerSupplier(context).get(1L); int actualDoc = scorer.iterator().advance(doc); assert (actualDoc == doc); double score = scorer.computeScore(doc, expl.getValue().floatValue()); @@ -391,7 +413,6 @@ static class FunctionFactorScorer extends FilterScorer { private final boolean needsScores; private FunctionFactorScorer( - CustomBoostFactorWeight w, Scorer scorer, ScoreMode scoreMode, ScoreFunction[] functions, @@ -401,7 +422,7 @@ private FunctionFactorScorer( CombineFunction scoreCombiner, boolean needsScores ) throws IOException { - super(scorer, w); + super(scorer); this.scoreMode = scoreMode; this.functions = functions; this.leafFunctions = leafFunctions; diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/function/MinScoreScorer.java b/server/src/main/java/org/elasticsearch/common/lucene/search/function/MinScoreScorer.java index 3d23f66b09d82..0fd46447b3ea9 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/function/MinScoreScorer.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/function/MinScoreScorer.java @@ -12,7 +12,6 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.TwoPhaseIterator; -import org.apache.lucene.search.Weight; import java.io.IOException; @@ -25,12 +24,11 @@ public final class MinScoreScorer extends Scorer { private float curScore; private final float boost; - public MinScoreScorer(Weight weight, Scorer scorer, float minScore) { - this(weight, scorer, minScore, 1f); + public MinScoreScorer(Scorer scorer, float minScore) { + this(scorer, minScore, 1f); } - public MinScoreScorer(Weight weight, Scorer scorer, float minScore, float boost) { - super(weight); + public MinScoreScorer(Scorer scorer, float minScore, float boost) { this.in = scorer; this.minScore = minScore; this.boost = boost; diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunction.java b/server/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunction.java index 4222b5dff98ab..d38243f5348c4 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunction.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunction.java @@ -27,14 +27,8 @@ public class ScriptScoreFunction extends ScoreFunction { static final class CannedScorer extends Scorable { - protected int docid; protected float score; - @Override - public int docID() { - return docid; - } - @Override public float score() { return score; @@ -70,14 +64,13 @@ public LeafScoreFunction getLeafScoreFunction(LeafReaderContext ctx) throws IOEx if (script.needs_termStats()) { assert termStatsFactory != null; - leafScript._setTermStats(termStatsFactory.apply(ctx, scorer::docID)); + leafScript._setTermStats(termStatsFactory.apply(ctx, leafScript::docId)); } return new LeafScoreFunction() { private double score(int docId, float subQueryScore, ScoreScript.ExplanationHolder holder) throws IOException { leafScript.setDocument(docId); - scorer.docid = docId; scorer.score = subQueryScore; double result = leafScript.execute(holder); @@ -97,7 +90,6 @@ public Explanation explainScore(int docId, Explanation subQueryScore) throws IOE Explanation exp; if (leafScript instanceof ExplainableScoreScript) { leafScript.setDocument(docId); - scorer.docid = docId; scorer.score = subQueryScore.getValue().floatValue(); exp = ((ExplainableScoreScript) leafScript).explain(subQueryScore); } else { diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreQuery.java b/server/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreQuery.java index 5e3f8e8e62714..e58b2fffed001 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreQuery.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreQuery.java @@ -23,6 +23,7 @@ import org.apache.lucene.search.Scorable; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; import org.apache.lucene.util.Bits; @@ -38,6 +39,7 @@ import java.util.HashSet; import java.util.Objects; import java.util.Set; +import java.util.function.IntSupplier; /** * A query that uses a script to compute documents' scores. @@ -104,30 +106,40 @@ public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float bo } return new Weight(this) { - @Override - public BulkScorer bulkScorer(LeafReaderContext context) throws IOException { - if (minScore == null) { - final BulkScorer subQueryBulkScorer = subQueryWeight.bulkScorer(context); - if (subQueryBulkScorer == null) { - return null; - } - return new ScriptScoreBulkScorer(subQueryBulkScorer, subQueryScoreMode, makeScoreScript(context), boost); - } else { - return super.bulkScorer(context); - } - } @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - Scorer subQueryScorer = subQueryWeight.scorer(context); - if (subQueryScorer == null) { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { + ScorerSupplier subQueryScorerSupplier = subQueryWeight.scorerSupplier(context); + if (subQueryScorerSupplier == null) { return null; } - Scorer scriptScorer = new ScriptScorer(this, makeScoreScript(context), subQueryScorer, subQueryScoreMode, boost, null); - if (minScore != null) { - scriptScorer = new MinScoreScorer(this, scriptScorer, minScore); - } - return scriptScorer; + + return new ScorerSupplier() { + @Override + public Scorer get(long leadCost) throws IOException { + Scorer subQueryScorer = subQueryScorerSupplier.get(leadCost); + Scorer scriptScorer = new ScriptScorer(makeScoreScript(context), subQueryScorer, subQueryScoreMode, boost, null); + if (minScore != null) { + scriptScorer = new MinScoreScorer(scriptScorer, minScore); + } + return scriptScorer; + } + + @Override + public BulkScorer bulkScorer() throws IOException { + if (minScore == null) { + final BulkScorer subQueryBulkScorer = subQueryScorerSupplier.bulkScorer(); + return new ScriptScoreBulkScorer(subQueryBulkScorer, subQueryScoreMode, makeScoreScript(context), boost); + } else { + return super.bulkScorer(); + } + } + + @Override + public long cost() { + return subQueryScorerSupplier.cost(); + } + }; } @Override @@ -138,7 +150,6 @@ public Explanation explain(LeafReaderContext context, int doc) throws IOExceptio } ExplanationHolder explanationHolder = new ExplanationHolder(); Scorer scorer = new ScriptScorer( - this, makeScoreScript(context), subQueryWeight.scorer(context), subQueryScoreMode, @@ -231,14 +242,12 @@ private static class ScriptScorer extends Scorer { private final ExplanationHolder explanation; ScriptScorer( - Weight weight, ScoreScript scoreScript, Scorer subQueryScorer, ScoreMode subQueryScoreMode, float boost, ExplanationHolder explanation ) { - super(weight); this.scoreScript = scoreScript; if (subQueryScoreMode == ScoreMode.COMPLETE) { scoreScript.setScorer(subQueryScorer); @@ -292,19 +301,27 @@ private static class ScriptScorable extends Scorable { private final ScoreScript scoreScript; private final Scorable subQueryScorer; private final float boost; + private final IntSupplier docIDSupplier; - ScriptScorable(ScoreScript scoreScript, Scorable subQueryScorer, ScoreMode subQueryScoreMode, float boost) { + ScriptScorable( + ScoreScript scoreScript, + Scorable subQueryScorer, + ScoreMode subQueryScoreMode, + float boost, + IntSupplier docIDSupplier + ) { this.scoreScript = scoreScript; if (subQueryScoreMode == ScoreMode.COMPLETE) { scoreScript.setScorer(subQueryScorer); } this.subQueryScorer = subQueryScorer; this.boost = boost; + this.docIDSupplier = docIDSupplier; } @Override public float score() throws IOException { - int docId = docID(); + int docId = docIDSupplier.getAsInt(); scoreScript.setDocument(docId); float score = (float) scoreScript.execute(null); if (score < 0f || Float.isNaN(score)) { @@ -320,10 +337,6 @@ public float score() throws IOException { return score * boost; } - @Override - public int docID() { - return subQueryScorer.docID(); - } } /** @@ -350,9 +363,18 @@ public int score(LeafCollector collector, Bits acceptDocs, int min, int max) thr private LeafCollector wrapCollector(LeafCollector collector) { return new FilterLeafCollector(collector) { + + private int docID; + @Override public void setScorer(Scorable scorer) throws IOException { - in.setScorer(new ScriptScorable(scoreScript, scorer, subQueryScoreMode, boost)); + in.setScorer(new ScriptScorable(scoreScript, scorer, subQueryScoreMode, boost, () -> docID)); + } + + @Override + public void collect(int doc) throws IOException { + this.docID = doc; + super.collect(doc); } }; } diff --git a/server/src/main/java/org/elasticsearch/common/regex/Regex.java b/server/src/main/java/org/elasticsearch/common/regex/Regex.java index d5b2e8497fc0b..aaaab78b71736 100644 --- a/server/src/main/java/org/elasticsearch/common/regex/Regex.java +++ b/server/src/main/java/org/elasticsearch/common/regex/Regex.java @@ -69,7 +69,7 @@ public static Automaton simpleMatchToAutomaton(String pattern) { previous = i + 1; } automata.add(Automata.makeString(pattern.substring(previous))); - return Operations.concatenate(automata); + return Operations.determinize(Operations.concatenate(automata), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } /** @@ -113,7 +113,7 @@ public static Automaton simpleMatchToAutomaton(String... patterns) { prefixAutomaton.add(Automata.makeAnyString()); automata.add(Operations.concatenate(prefixAutomaton)); } - return Operations.union(automata); + return Operations.determinize(Operations.union(automata), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } /** diff --git a/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java b/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java index 232ce34b153ab..defaddb25eb47 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java +++ b/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java @@ -254,7 +254,7 @@ public static KeyStoreWrapper load(Path configDir) throws IOException { } Directory directory = new NIOFSDirectory(configDir); - try (ChecksumIndexInput input = directory.openChecksumInput(KEYSTORE_FILENAME, IOContext.READONCE)) { + try (ChecksumIndexInput input = directory.openChecksumInput(KEYSTORE_FILENAME)) { final int formatVersion; try { formatVersion = CodecUtil.checkHeader(input, KEYSTORE_FILENAME, MIN_FORMAT_VERSION, CURRENT_VERSION); diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/support/XContentMapValues.java b/server/src/main/java/org/elasticsearch/common/xcontent/support/XContentMapValues.java index 2ef96123e63d8..c4b03c712c272 100644 --- a/server/src/main/java/org/elasticsearch/common/xcontent/support/XContentMapValues.java +++ b/server/src/main/java/org/elasticsearch/common/xcontent/support/XContentMapValues.java @@ -280,8 +280,8 @@ public static Function, Map> filter(String[] include = matchAllAutomaton; } else { Automaton includeA = Regex.simpleMatchToAutomaton(includes); - includeA = makeMatchDotsInFieldNames(includeA); - include = new CharacterRunAutomaton(includeA, MAX_DETERMINIZED_STATES); + includeA = Operations.determinize(makeMatchDotsInFieldNames(includeA), MAX_DETERMINIZED_STATES); + include = new CharacterRunAutomaton(includeA); } Automaton excludeA; @@ -289,9 +289,9 @@ public static Function, Map> filter(String[] excludeA = Automata.makeEmpty(); } else { excludeA = Regex.simpleMatchToAutomaton(excludes); - excludeA = makeMatchDotsInFieldNames(excludeA); + excludeA = Operations.determinize(makeMatchDotsInFieldNames(excludeA), MAX_DETERMINIZED_STATES); } - CharacterRunAutomaton exclude = new CharacterRunAutomaton(excludeA, MAX_DETERMINIZED_STATES); + CharacterRunAutomaton exclude = new CharacterRunAutomaton(excludeA); // NOTE: We cannot use Operations.minus because of the special case that // we want all sub properties to match as soon as an object matches diff --git a/server/src/main/java/org/elasticsearch/gateway/PersistedClusterStateService.java b/server/src/main/java/org/elasticsearch/gateway/PersistedClusterStateService.java index 749946e05b745..0c6cf2c8a0761 100644 --- a/server/src/main/java/org/elasticsearch/gateway/PersistedClusterStateService.java +++ b/server/src/main/java/org/elasticsearch/gateway/PersistedClusterStateService.java @@ -25,6 +25,7 @@ import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.SegmentInfos; import org.apache.lucene.index.SerialMergeScheduler; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.index.TieredMergePolicy; import org.apache.lucene.search.DocIdSetIterator; @@ -449,7 +450,7 @@ OnDiskState loadBestOnDiskState(boolean checkClean) throws IOException { // resources during test execution checkIndex.setThreadCount(1); checkIndex.setInfoStream(printStream); - checkIndex.setChecksumsOnly(true); + checkIndex.setLevel(CheckIndex.Level.MIN_LEVEL_FOR_CHECKSUM_CHECKS); isClean = checkIndex.checkIndex().clean; } @@ -705,10 +706,11 @@ private static void consumeFromType( final Bits liveDocs = leafReaderContext.reader().getLiveDocs(); final IntPredicate isLiveDoc = liveDocs == null ? i -> true : liveDocs::get; final DocIdSetIterator docIdSetIterator = scorer.iterator(); + final StoredFields storedFields = leafReaderContext.reader().storedFields(); while (docIdSetIterator.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) { if (isLiveDoc.test(docIdSetIterator.docID())) { logger.trace("processing doc {}", docIdSetIterator.docID()); - final Document document = leafReaderContext.reader().document(docIdSetIterator.docID()); + final Document document = storedFields.document(docIdSetIterator.docID()); final BytesArray documentData = new BytesArray(document.getBinaryValue(DATA_FIELD_NAME)); if (document.getField(PAGE_FIELD_NAME) == null) { diff --git a/server/src/main/java/org/elasticsearch/index/IndexModule.java b/server/src/main/java/org/elasticsearch/index/IndexModule.java index 7eed5f2b7759d..4ff7ef60cc0a2 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexModule.java +++ b/server/src/main/java/org/elasticsearch/index/IndexModule.java @@ -18,7 +18,6 @@ import org.apache.lucene.search.similarities.BM25Similarity; import org.apache.lucene.search.similarities.Similarity; import org.apache.lucene.store.Directory; -import org.apache.lucene.store.MMapDirectory; import org.apache.lucene.util.Constants; import org.apache.lucene.util.SetOnce; import org.elasticsearch.client.internal.Client; @@ -451,7 +450,7 @@ public boolean match(String setting) { } public static Type defaultStoreType(final boolean allowMmap) { - if (allowMmap && Constants.JRE_IS_64BIT && MMapDirectory.UNMAP_SUPPORTED) { + if (allowMmap && Constants.JRE_IS_64BIT) { return Type.HYBRIDFS; } else { return Type.NIOFS; diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersions.java b/server/src/main/java/org/elasticsearch/index/IndexVersions.java index 7e04a64e74cb5..efb1facc79b3a 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersions.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersions.java @@ -15,6 +15,7 @@ import org.elasticsearch.core.UpdateForV9; import java.lang.reflect.Field; +import java.text.ParseException; import java.util.Collection; import java.util.Collections; import java.util.HashMap; @@ -48,29 +49,38 @@ private static IndexVersion def(int id, Version luceneVersion) { return new IndexVersion(id, luceneVersion); } + // TODO: this is just a hack to allow to keep the V7 IndexVersion constants, during compilation. Remove + private static Version parseUnchecked(String version) { + try { + return Version.parse(version); + } catch (ParseException e) { + throw new RuntimeException(e); + } + } + @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) // remove the index versions with which v9 will not need to interact public static final IndexVersion ZERO = def(0, Version.LATEST); - public static final IndexVersion V_7_0_0 = def(7_00_00_99, Version.LUCENE_8_0_0); - - public static final IndexVersion V_7_1_0 = def(7_01_00_99, Version.LUCENE_8_0_0); - public static final IndexVersion V_7_2_0 = def(7_02_00_99, Version.LUCENE_8_0_0); - public static final IndexVersion V_7_2_1 = def(7_02_01_99, Version.LUCENE_8_0_0); - public static final IndexVersion V_7_3_0 = def(7_03_00_99, Version.LUCENE_8_1_0); - public static final IndexVersion V_7_4_0 = def(7_04_00_99, Version.LUCENE_8_2_0); - public static final IndexVersion V_7_5_0 = def(7_05_00_99, Version.LUCENE_8_3_0); - public static final IndexVersion V_7_5_2 = def(7_05_02_99, Version.LUCENE_8_3_0); - public static final IndexVersion V_7_6_0 = def(7_06_00_99, Version.LUCENE_8_4_0); - public static final IndexVersion V_7_7_0 = def(7_07_00_99, Version.LUCENE_8_5_1); - public static final IndexVersion V_7_8_0 = def(7_08_00_99, Version.LUCENE_8_5_1); - public static final IndexVersion V_7_9_0 = def(7_09_00_99, Version.LUCENE_8_6_0); - public static final IndexVersion V_7_10_0 = def(7_10_00_99, Version.LUCENE_8_7_0); - public static final IndexVersion V_7_11_0 = def(7_11_00_99, Version.LUCENE_8_7_0); - public static final IndexVersion V_7_12_0 = def(7_12_00_99, Version.LUCENE_8_8_0); - public static final IndexVersion V_7_13_0 = def(7_13_00_99, Version.LUCENE_8_8_2); - public static final IndexVersion V_7_14_0 = def(7_14_00_99, Version.LUCENE_8_9_0); - public static final IndexVersion V_7_15_0 = def(7_15_00_99, Version.LUCENE_8_9_0); - public static final IndexVersion V_7_16_0 = def(7_16_00_99, Version.LUCENE_8_10_1); - public static final IndexVersion V_7_17_0 = def(7_17_00_99, Version.LUCENE_8_11_1); + + public static final IndexVersion V_7_0_0 = def(7_00_00_99, parseUnchecked("8.0.0")); + public static final IndexVersion V_7_1_0 = def(7_01_00_99, parseUnchecked("8.0.0")); + public static final IndexVersion V_7_2_0 = def(7_02_00_99, parseUnchecked("8.0.0")); + public static final IndexVersion V_7_2_1 = def(7_02_01_99, parseUnchecked("8.0.0")); + public static final IndexVersion V_7_3_0 = def(7_03_00_99, parseUnchecked("8.1.0")); + public static final IndexVersion V_7_4_0 = def(7_04_00_99, parseUnchecked("8.2.0")); + public static final IndexVersion V_7_5_0 = def(7_05_00_99, parseUnchecked("8.3.0")); + public static final IndexVersion V_7_5_2 = def(7_05_02_99, parseUnchecked("8.3.0")); + public static final IndexVersion V_7_6_0 = def(7_06_00_99, parseUnchecked("8.4.0")); + public static final IndexVersion V_7_7_0 = def(7_07_00_99, parseUnchecked("8.5.1")); + public static final IndexVersion V_7_8_0 = def(7_08_00_99, parseUnchecked("8.5.1")); + public static final IndexVersion V_7_9_0 = def(7_09_00_99, parseUnchecked("8.6.0")); + public static final IndexVersion V_7_10_0 = def(7_10_00_99, parseUnchecked("8.7.0")); + public static final IndexVersion V_7_11_0 = def(7_11_00_99, parseUnchecked("8.7.0")); + public static final IndexVersion V_7_12_0 = def(7_12_00_99, parseUnchecked("8.8.0")); + public static final IndexVersion V_7_13_0 = def(7_13_00_99, parseUnchecked("8.8.2")); + public static final IndexVersion V_7_14_0 = def(7_14_00_99, parseUnchecked("8.9.0")); + public static final IndexVersion V_7_15_0 = def(7_15_00_99, parseUnchecked("8.9.0")); + public static final IndexVersion V_7_16_0 = def(7_16_00_99, parseUnchecked("8.10.1")); + public static final IndexVersion V_7_17_0 = def(7_17_00_99, parseUnchecked("8.11.1")); public static final IndexVersion V_8_0_0 = def(8_00_00_99, Version.LUCENE_9_0_0); public static final IndexVersion V_8_1_0 = def(8_01_00_99, Version.LUCENE_9_0_0); public static final IndexVersion V_8_2_0 = def(8_02_00_99, Version.LUCENE_9_1_0); @@ -118,6 +128,9 @@ private static IndexVersion def(int id, Version luceneVersion) { public static final IndexVersion MERGE_ON_RECOVERY_VERSION = def(8_515_00_0, Version.LUCENE_9_11_1); public static final IndexVersion UPGRADE_TO_LUCENE_9_12 = def(8_516_00_0, Version.LUCENE_9_12_0); public static final IndexVersion ENABLE_IGNORE_ABOVE_LOGSDB = def(8_517_00_0, Version.LUCENE_9_12_0); + + public static final IndexVersion UPGRADE_TO_LUCENE_10_0_0 = def(9_000_00_0, Version.LUCENE_10_0_0); + /* * STOP! READ THIS FIRST! No, really, * ____ _____ ___ ____ _ ____ _____ _ ____ _____ _ _ ___ ____ _____ ___ ____ ____ _____ _ diff --git a/server/src/main/java/org/elasticsearch/index/codec/CodecService.java b/server/src/main/java/org/elasticsearch/index/codec/CodecService.java index 144b99abe5644..c1c392ac07f18 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/CodecService.java +++ b/server/src/main/java/org/elasticsearch/index/codec/CodecService.java @@ -12,7 +12,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.FieldInfosFormat; import org.apache.lucene.codecs.FilterCodec; -import org.apache.lucene.codecs.lucene912.Lucene912Codec; +import org.apache.lucene.codecs.lucene100.Lucene100Codec; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.FeatureFlag; import org.elasticsearch.core.Nullable; @@ -46,7 +46,7 @@ public class CodecService implements CodecProvider { public CodecService(@Nullable MapperService mapperService, BigArrays bigArrays) { final var codecs = new HashMap(); - Codec legacyBestSpeedCodec = new LegacyPerFieldMapperCodec(Lucene912Codec.Mode.BEST_SPEED, mapperService, bigArrays); + Codec legacyBestSpeedCodec = new LegacyPerFieldMapperCodec(Lucene100Codec.Mode.BEST_SPEED, mapperService, bigArrays); if (ZSTD_STORED_FIELDS_FEATURE_FLAG.isEnabled()) { codecs.put(DEFAULT_CODEC, new PerFieldMapperCodec(Zstd814StoredFieldsFormat.Mode.BEST_SPEED, mapperService, bigArrays)); } else { @@ -58,7 +58,7 @@ public CodecService(@Nullable MapperService mapperService, BigArrays bigArrays) BEST_COMPRESSION_CODEC, new PerFieldMapperCodec(Zstd814StoredFieldsFormat.Mode.BEST_COMPRESSION, mapperService, bigArrays) ); - Codec legacyBestCompressionCodec = new LegacyPerFieldMapperCodec(Lucene912Codec.Mode.BEST_COMPRESSION, mapperService, bigArrays); + Codec legacyBestCompressionCodec = new LegacyPerFieldMapperCodec(Lucene100Codec.Mode.BEST_COMPRESSION, mapperService, bigArrays); codecs.put(LEGACY_BEST_COMPRESSION_CODEC, legacyBestCompressionCodec); codecs.put(LUCENE_DEFAULT_CODEC, Codec.getDefault()); diff --git a/server/src/main/java/org/elasticsearch/index/codec/DeduplicatingFieldInfosFormat.java b/server/src/main/java/org/elasticsearch/index/codec/DeduplicatingFieldInfosFormat.java index 2ba169583b712..00614140e237a 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/DeduplicatingFieldInfosFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/DeduplicatingFieldInfosFormat.java @@ -49,11 +49,12 @@ public FieldInfos read(Directory directory, SegmentInfo segmentInfo, String segm deduplicated[i++] = new FieldInfo( FieldMapper.internFieldName(fi.getName()), fi.number, - fi.hasVectors(), + fi.hasTermVectors(), fi.omitsNorms(), fi.hasPayloads(), fi.getIndexOptions(), fi.getDocValuesType(), + fi.docValuesSkipIndexType(), fi.getDocValuesGen(), internStringStringMap(fi.attributes()), fi.getPointDimensionCount(), diff --git a/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch816Codec.java b/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch816Codec.java index 27ff19a9d8e40..9f46050f68f99 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch816Codec.java +++ b/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch816Codec.java @@ -9,12 +9,12 @@ package org.elasticsearch.index.codec; +import org.apache.lucene.backward_codecs.lucene912.Lucene912Codec; import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.KnnVectorsFormat; import org.apache.lucene.codecs.PostingsFormat; import org.apache.lucene.codecs.StoredFieldsFormat; import org.apache.lucene.codecs.lucene90.Lucene90DocValuesFormat; -import org.apache.lucene.codecs.lucene912.Lucene912Codec; import org.apache.lucene.codecs.lucene912.Lucene912PostingsFormat; import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat; import org.apache.lucene.codecs.perfield.PerFieldDocValuesFormat; diff --git a/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch900Codec.java b/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch900Codec.java new file mode 100644 index 0000000000000..4154a242c15ed --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch900Codec.java @@ -0,0 +1,131 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.index.codec; + +import org.apache.lucene.codecs.DocValuesFormat; +import org.apache.lucene.codecs.KnnVectorsFormat; +import org.apache.lucene.codecs.PostingsFormat; +import org.apache.lucene.codecs.StoredFieldsFormat; +import org.apache.lucene.codecs.lucene100.Lucene100Codec; +import org.apache.lucene.codecs.lucene90.Lucene90DocValuesFormat; +import org.apache.lucene.codecs.lucene912.Lucene912PostingsFormat; +import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat; +import org.apache.lucene.codecs.perfield.PerFieldDocValuesFormat; +import org.apache.lucene.codecs.perfield.PerFieldKnnVectorsFormat; +import org.apache.lucene.codecs.perfield.PerFieldPostingsFormat; +import org.elasticsearch.index.codec.zstd.Zstd814StoredFieldsFormat; + +/** + * Elasticsearch codec as of 9.0. This extends the Lucene 10.0 codec to compressed stored fields with ZSTD instead of LZ4/DEFLATE. See + * {@link Zstd814StoredFieldsFormat}. + */ +public class Elasticsearch900Codec extends CodecService.DeduplicateFieldInfosCodec { + + private final StoredFieldsFormat storedFieldsFormat; + + private final PostingsFormat defaultPostingsFormat; + private final PostingsFormat postingsFormat = new PerFieldPostingsFormat() { + @Override + public PostingsFormat getPostingsFormatForField(String field) { + return Elasticsearch900Codec.this.getPostingsFormatForField(field); + } + }; + + private final DocValuesFormat defaultDVFormat; + private final DocValuesFormat docValuesFormat = new PerFieldDocValuesFormat() { + @Override + public DocValuesFormat getDocValuesFormatForField(String field) { + return Elasticsearch900Codec.this.getDocValuesFormatForField(field); + } + }; + + private final KnnVectorsFormat defaultKnnVectorsFormat; + private final KnnVectorsFormat knnVectorsFormat = new PerFieldKnnVectorsFormat() { + @Override + public KnnVectorsFormat getKnnVectorsFormatForField(String field) { + return Elasticsearch900Codec.this.getKnnVectorsFormatForField(field); + } + }; + + /** Public no-arg constructor, needed for SPI loading at read-time. */ + public Elasticsearch900Codec() { + this(Zstd814StoredFieldsFormat.Mode.BEST_SPEED); + } + + /** + * Constructor. Takes a {@link Zstd814StoredFieldsFormat.Mode} that describes whether to optimize for retrieval speed at the expense of + * worse space-efficiency or vice-versa. + */ + public Elasticsearch900Codec(Zstd814StoredFieldsFormat.Mode mode) { + super("Elasticsearch900", new Lucene100Codec()); + this.storedFieldsFormat = mode.getFormat(); + this.defaultPostingsFormat = new Lucene912PostingsFormat(); + this.defaultDVFormat = new Lucene90DocValuesFormat(); + this.defaultKnnVectorsFormat = new Lucene99HnswVectorsFormat(); + } + + @Override + public StoredFieldsFormat storedFieldsFormat() { + return storedFieldsFormat; + } + + @Override + public final PostingsFormat postingsFormat() { + return postingsFormat; + } + + @Override + public final DocValuesFormat docValuesFormat() { + return docValuesFormat; + } + + @Override + public final KnnVectorsFormat knnVectorsFormat() { + return knnVectorsFormat; + } + + /** + * Returns the postings format that should be used for writing new segments of field. + * + *

    The default implementation always returns "Lucene912". + * + *

    WARNING: if you subclass, you are responsible for index backwards compatibility: + * future version of Lucene are only guaranteed to be able to read the default implementation, + */ + public PostingsFormat getPostingsFormatForField(String field) { + return defaultPostingsFormat; + } + + /** + * Returns the docvalues format that should be used for writing new segments of field + * . + * + *

    The default implementation always returns "Lucene912". + * + *

    WARNING: if you subclass, you are responsible for index backwards compatibility: + * future version of Lucene are only guaranteed to be able to read the default implementation. + */ + public DocValuesFormat getDocValuesFormatForField(String field) { + return defaultDVFormat; + } + + /** + * Returns the vectors format that should be used for writing new segments of field + * + *

    The default implementation always returns "Lucene912". + * + *

    WARNING: if you subclass, you are responsible for index backwards compatibility: + * future version of Lucene are only guaranteed to be able to read the default implementation. + */ + public KnnVectorsFormat getKnnVectorsFormatForField(String field) { + return defaultKnnVectorsFormat; + } + +} diff --git a/server/src/main/java/org/elasticsearch/index/codec/LegacyPerFieldMapperCodec.java b/server/src/main/java/org/elasticsearch/index/codec/LegacyPerFieldMapperCodec.java index 64c2ca788f63c..bf2c5a9f01e29 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/LegacyPerFieldMapperCodec.java +++ b/server/src/main/java/org/elasticsearch/index/codec/LegacyPerFieldMapperCodec.java @@ -13,7 +13,7 @@ import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.KnnVectorsFormat; import org.apache.lucene.codecs.PostingsFormat; -import org.apache.lucene.codecs.lucene912.Lucene912Codec; +import org.apache.lucene.codecs.lucene100.Lucene100Codec; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.index.mapper.MapperService; @@ -22,11 +22,11 @@ * Legacy version of {@link PerFieldMapperCodec}. This codec is preserved to give an escape hatch in case we encounter issues with new * changes in {@link PerFieldMapperCodec}. */ -public final class LegacyPerFieldMapperCodec extends Lucene912Codec { +public final class LegacyPerFieldMapperCodec extends Lucene100Codec { private final PerFieldFormatSupplier formatSupplier; - public LegacyPerFieldMapperCodec(Lucene912Codec.Mode compressionMode, MapperService mapperService, BigArrays bigArrays) { + public LegacyPerFieldMapperCodec(Lucene100Codec.Mode compressionMode, MapperService mapperService, BigArrays bigArrays) { super(compressionMode); this.formatSupplier = new PerFieldFormatSupplier(mapperService, bigArrays); // If the below assertion fails, it is a sign that Lucene released a new codec. You must create a copy of the current Elasticsearch diff --git a/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java b/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java index 83c5cb396d88b..b60b88da5949d 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java +++ b/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java @@ -26,7 +26,7 @@ * per index in real time via the mapping API. If no specific postings format or vector format is * configured for a specific field the default postings or vector format is used. */ -public final class PerFieldMapperCodec extends Elasticsearch816Codec { +public final class PerFieldMapperCodec extends Elasticsearch900Codec { private final PerFieldFormatSupplier formatSupplier; diff --git a/server/src/main/java/org/elasticsearch/index/codec/bloomfilter/ES85BloomFilterPostingsFormat.java b/server/src/main/java/org/elasticsearch/index/codec/bloomfilter/ES85BloomFilterPostingsFormat.java index d26fb52a82bcd..81129835518da 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/bloomfilter/ES85BloomFilterPostingsFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/bloomfilter/ES85BloomFilterPostingsFormat.java @@ -36,7 +36,6 @@ import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.store.ChecksumIndexInput; -import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.IndexOutput; import org.apache.lucene.store.RandomAccessInput; @@ -142,12 +141,7 @@ static final class FieldsReader extends FieldsProducer { FieldsReader(SegmentReadState state) throws IOException { boolean success = false; - try ( - ChecksumIndexInput metaIn = state.directory.openChecksumInput( - metaFile(state.segmentInfo, state.segmentSuffix), - IOContext.READONCE - ) - ) { + try (ChecksumIndexInput metaIn = state.directory.openChecksumInput(metaFile(state.segmentInfo, state.segmentSuffix))) { CodecUtil.checkIndexHeader( metaIn, BLOOM_CODEC_NAME, diff --git a/server/src/main/java/org/elasticsearch/index/codec/bloomfilter/ES87BloomFilterPostingsFormat.java b/server/src/main/java/org/elasticsearch/index/codec/bloomfilter/ES87BloomFilterPostingsFormat.java index 01d874adec14d..abf68abe51887 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/bloomfilter/ES87BloomFilterPostingsFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/bloomfilter/ES87BloomFilterPostingsFormat.java @@ -38,7 +38,6 @@ import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.store.ChecksumIndexInput; -import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.IndexOutput; import org.apache.lucene.store.RandomAccessInput; @@ -291,12 +290,7 @@ static final class FieldsReader extends FieldsProducer { FieldsReader(SegmentReadState state) throws IOException { boolean success = false; - try ( - ChecksumIndexInput metaIn = state.directory.openChecksumInput( - metaFile(state.segmentInfo, state.segmentSuffix), - IOContext.READONCE - ) - ) { + try (ChecksumIndexInput metaIn = state.directory.openChecksumInput(metaFile(state.segmentInfo, state.segmentSuffix))) { Map bloomFilters = null; Throwable priorE = null; long indexFileLength = 0; diff --git a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesConsumer.java b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesConsumer.java index 5d79807fe6674..dc73428a07c7c 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesConsumer.java +++ b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesConsumer.java @@ -15,6 +15,7 @@ import org.apache.lucene.codecs.lucene90.IndexedDISI; import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.DocValuesSkipIndexType; import org.apache.lucene.index.EmptyDocValuesProducer; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.IndexFileNames; @@ -41,9 +42,13 @@ import org.elasticsearch.core.IOUtils; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; +import java.util.List; import static org.elasticsearch.index.codec.tsdb.ES87TSDBDocValuesFormat.DIRECT_MONOTONIC_BLOCK_SHIFT; +import static org.elasticsearch.index.codec.tsdb.ES87TSDBDocValuesFormat.SKIP_INDEX_LEVEL_SHIFT; +import static org.elasticsearch.index.codec.tsdb.ES87TSDBDocValuesFormat.SKIP_INDEX_MAX_LEVEL; import static org.elasticsearch.index.codec.tsdb.ES87TSDBDocValuesFormat.SORTED_SET; final class ES87TSDBDocValuesConsumer extends DocValuesConsumer { @@ -51,9 +56,16 @@ final class ES87TSDBDocValuesConsumer extends DocValuesConsumer { IndexOutput data, meta; final int maxDoc; private byte[] termsDictBuffer; - - ES87TSDBDocValuesConsumer(SegmentWriteState state, String dataCodec, String dataExtension, String metaCodec, String metaExtension) - throws IOException { + private final int skipIndexIntervalSize; + + ES87TSDBDocValuesConsumer( + SegmentWriteState state, + int skipIndexIntervalSize, + String dataCodec, + String dataExtension, + String metaCodec, + String metaExtension + ) throws IOException { this.termsDictBuffer = new byte[1 << 14]; boolean success = false; try { @@ -76,6 +88,7 @@ final class ES87TSDBDocValuesConsumer extends DocValuesConsumer { state.segmentSuffix ); maxDoc = state.segmentInfo.maxDoc(); + this.skipIndexIntervalSize = skipIndexIntervalSize; success = true; } finally { if (success == false) { @@ -88,12 +101,17 @@ final class ES87TSDBDocValuesConsumer extends DocValuesConsumer { public void addNumericField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { meta.writeInt(field.number); meta.writeByte(ES87TSDBDocValuesFormat.NUMERIC); - writeField(field, new EmptyDocValuesProducer() { + DocValuesProducer producer = new EmptyDocValuesProducer() { @Override public SortedNumericDocValues getSortedNumeric(FieldInfo field) throws IOException { return DocValues.singleton(valuesProducer.getNumeric(field)); } - }, -1); + }; + if (field.docValuesSkipIndexType() != DocValuesSkipIndexType.NONE) { + writeSkipIndex(field, producer); + } + + writeField(field, producer, -1); } private long[] writeField(FieldInfo field, DocValuesProducer valuesProducer, long maxOrd) throws IOException { @@ -263,13 +281,11 @@ public void addBinaryField(FieldInfo field, DocValuesProducer valuesProducer) th public void addSortedField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { meta.writeInt(field.number); meta.writeByte(ES87TSDBDocValuesFormat.SORTED); - doAddSortedField(field, valuesProducer); + doAddSortedField(field, valuesProducer, false); } - private void doAddSortedField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { - SortedDocValues sorted = valuesProducer.getSorted(field); - int maxOrd = sorted.getValueCount(); - writeField(field, new EmptyDocValuesProducer() { + private void doAddSortedField(FieldInfo field, DocValuesProducer valuesProducer, boolean addTypeByte) throws IOException { + DocValuesProducer producer = new EmptyDocValuesProducer() { @Override public SortedNumericDocValues getSortedNumeric(FieldInfo field) throws IOException { SortedDocValues sorted = valuesProducer.getSorted(field); @@ -306,7 +322,16 @@ public long cost() { }; return DocValues.singleton(sortedOrds); } - }, maxOrd); + }; + if (field.docValuesSkipIndexType() != DocValuesSkipIndexType.NONE) { + writeSkipIndex(field, producer); + } + if (addTypeByte) { + meta.writeByte((byte) 0); // multiValued (0 = singleValued) + } + SortedDocValues sorted = valuesProducer.getSorted(field); + int maxOrd = sorted.getValueCount(); + writeField(field, producer, maxOrd); addTermsDict(DocValues.singleton(valuesProducer.getSorted(field))); } @@ -459,6 +484,12 @@ public void addSortedNumericField(FieldInfo field, DocValuesProducer valuesProdu } private void writeSortedNumericField(FieldInfo field, DocValuesProducer valuesProducer, long maxOrd) throws IOException { + if (field.docValuesSkipIndexType() != DocValuesSkipIndexType.NONE) { + writeSkipIndex(field, valuesProducer); + } + if (maxOrd > -1) { + meta.writeByte((byte) 1); // multiValued (1 = multiValued) + } long[] stats = writeField(field, valuesProducer, maxOrd); int numDocsWithField = Math.toIntExact(stats[0]); long numValues = stats[1]; @@ -510,16 +541,14 @@ public void addSortedSetField(FieldInfo field, DocValuesProducer valuesProducer) meta.writeByte(SORTED_SET); if (isSingleValued(valuesProducer.getSortedSet(field))) { - meta.writeByte((byte) 0); // multiValued (0 = singleValued) doAddSortedField(field, new EmptyDocValuesProducer() { @Override public SortedDocValues getSorted(FieldInfo field) throws IOException { return SortedSetSelector.wrap(valuesProducer.getSortedSet(field), SortedSetSelector.Type.MIN); } - }); + }, true); return; } - meta.writeByte((byte) 1); // multiValued (1 = multiValued) SortedSetDocValues values = valuesProducer.getSortedSet(field); long maxOrd = values.getValueCount(); @@ -603,4 +632,157 @@ public void close() throws IOException { meta = data = null; } } + + private static class SkipAccumulator { + int minDocID; + int maxDocID; + int docCount; + long minValue; + long maxValue; + + SkipAccumulator(int docID) { + minDocID = docID; + minValue = Long.MAX_VALUE; + maxValue = Long.MIN_VALUE; + docCount = 0; + } + + boolean isDone(int skipIndexIntervalSize, int valueCount, long nextValue, int nextDoc) { + if (docCount < skipIndexIntervalSize) { + return false; + } + // Once we reach the interval size, we will keep accepting documents if + // - next doc value is not a multi-value + // - current accumulator only contains a single value and next value is the same value + // - the accumulator is dense and the next doc keeps the density (no gaps) + return valueCount > 1 || minValue != maxValue || minValue != nextValue || docCount != nextDoc - minDocID; + } + + void accumulate(long value) { + minValue = Math.min(minValue, value); + maxValue = Math.max(maxValue, value); + } + + void accumulate(SkipAccumulator other) { + assert minDocID <= other.minDocID && maxDocID < other.maxDocID; + maxDocID = other.maxDocID; + minValue = Math.min(minValue, other.minValue); + maxValue = Math.max(maxValue, other.maxValue); + docCount += other.docCount; + } + + void nextDoc(int docID) { + maxDocID = docID; + ++docCount; + } + + public static SkipAccumulator merge(List list, int index, int length) { + SkipAccumulator acc = new SkipAccumulator(list.get(index).minDocID); + for (int i = 0; i < length; i++) { + acc.accumulate(list.get(index + i)); + } + return acc; + } + } + + private void writeSkipIndex(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { + assert field.docValuesSkipIndexType() != DocValuesSkipIndexType.NONE; + final long start = data.getFilePointer(); + final SortedNumericDocValues values = valuesProducer.getSortedNumeric(field); + long globalMaxValue = Long.MIN_VALUE; + long globalMinValue = Long.MAX_VALUE; + int globalDocCount = 0; + int maxDocId = -1; + final List accumulators = new ArrayList<>(); + SkipAccumulator accumulator = null; + final int maxAccumulators = 1 << (SKIP_INDEX_LEVEL_SHIFT * (SKIP_INDEX_MAX_LEVEL - 1)); + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + final long firstValue = values.nextValue(); + if (accumulator != null && accumulator.isDone(skipIndexIntervalSize, values.docValueCount(), firstValue, doc)) { + globalMaxValue = Math.max(globalMaxValue, accumulator.maxValue); + globalMinValue = Math.min(globalMinValue, accumulator.minValue); + globalDocCount += accumulator.docCount; + maxDocId = accumulator.maxDocID; + accumulator = null; + if (accumulators.size() == maxAccumulators) { + writeLevels(accumulators); + accumulators.clear(); + } + } + if (accumulator == null) { + accumulator = new SkipAccumulator(doc); + accumulators.add(accumulator); + } + accumulator.nextDoc(doc); + accumulator.accumulate(firstValue); + for (int i = 1, end = values.docValueCount(); i < end; ++i) { + accumulator.accumulate(values.nextValue()); + } + } + + if (accumulators.isEmpty() == false) { + globalMaxValue = Math.max(globalMaxValue, accumulator.maxValue); + globalMinValue = Math.min(globalMinValue, accumulator.minValue); + globalDocCount += accumulator.docCount; + maxDocId = accumulator.maxDocID; + writeLevels(accumulators); + } + meta.writeLong(start); // record the start in meta + meta.writeLong(data.getFilePointer() - start); // record the length + assert globalDocCount == 0 || globalMaxValue >= globalMinValue; + meta.writeLong(globalMaxValue); + meta.writeLong(globalMinValue); + assert globalDocCount <= maxDocId + 1; + meta.writeInt(globalDocCount); + meta.writeInt(maxDocId); + } + + private void writeLevels(List accumulators) throws IOException { + final List> accumulatorsLevels = new ArrayList<>(SKIP_INDEX_MAX_LEVEL); + accumulatorsLevels.add(accumulators); + for (int i = 0; i < SKIP_INDEX_MAX_LEVEL - 1; i++) { + accumulatorsLevels.add(buildLevel(accumulatorsLevels.get(i))); + } + int totalAccumulators = accumulators.size(); + for (int index = 0; index < totalAccumulators; index++) { + // compute how many levels we need to write for the current accumulator + final int levels = getLevels(index, totalAccumulators); + // write the number of levels + data.writeByte((byte) levels); + // write intervals in reverse order. This is done so we don't + // need to read all of them in case of slipping + for (int level = levels - 1; level >= 0; level--) { + final SkipAccumulator accumulator = accumulatorsLevels.get(level).get(index >> (SKIP_INDEX_LEVEL_SHIFT * level)); + data.writeInt(accumulator.maxDocID); + data.writeInt(accumulator.minDocID); + data.writeLong(accumulator.maxValue); + data.writeLong(accumulator.minValue); + data.writeInt(accumulator.docCount); + } + } + } + + private static List buildLevel(List accumulators) { + final int levelSize = 1 << SKIP_INDEX_LEVEL_SHIFT; + final List collector = new ArrayList<>(); + for (int i = 0; i < accumulators.size() - levelSize + 1; i += levelSize) { + collector.add(SkipAccumulator.merge(accumulators, i, levelSize)); + } + return collector; + } + + private static int getLevels(int index, int size) { + if (Integer.numberOfTrailingZeros(index) >= SKIP_INDEX_LEVEL_SHIFT) { + // TODO: can we do it in constant time rather than linearly with SKIP_INDEX_MAX_LEVEL? + final int left = size - index; + for (int level = SKIP_INDEX_MAX_LEVEL - 1; level > 0; level--) { + final int numberIntervals = 1 << (SKIP_INDEX_LEVEL_SHIFT * level); + if (left >= numberIntervals && index % numberIntervals == 0) { + return level + 1; + } + } + } + return 1; + } + } diff --git a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormat.java b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormat.java index 742249892f61f..496c41b42869a 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormat.java @@ -43,13 +43,57 @@ public class ES87TSDBDocValuesFormat extends org.apache.lucene.codecs.DocValuesF static final int TERMS_DICT_REVERSE_INDEX_SIZE = 1 << TERMS_DICT_REVERSE_INDEX_SHIFT; static final int TERMS_DICT_REVERSE_INDEX_MASK = TERMS_DICT_REVERSE_INDEX_SIZE - 1; + // number of documents in an interval + private static final int DEFAULT_SKIP_INDEX_INTERVAL_SIZE = 4096; + // bytes on an interval: + // * 1 byte : number of levels + // * 16 bytes: min / max value, + // * 8 bytes: min / max docID + // * 4 bytes: number of documents + private static final long SKIP_INDEX_INTERVAL_BYTES = 29L; + // number of intervals represented as a shift to create a new level, this is 1 << 3 == 8 + // intervals. + static final int SKIP_INDEX_LEVEL_SHIFT = 3; + // max number of levels + // Increasing this number, it increases how much heap we need at index time. + // we currently need (1 * 8 * 8 * 8) = 512 accumulators on heap + static final int SKIP_INDEX_MAX_LEVEL = 4; + // number of bytes to skip when skipping a level. It does not take into account the + // current interval that is being read. + static final long[] SKIP_INDEX_JUMP_LENGTH_PER_LEVEL = new long[SKIP_INDEX_MAX_LEVEL]; + + static { + // Size of the interval minus read bytes (1 byte for level and 4 bytes for maxDocID) + SKIP_INDEX_JUMP_LENGTH_PER_LEVEL[0] = SKIP_INDEX_INTERVAL_BYTES - 5L; + for (int level = 1; level < SKIP_INDEX_MAX_LEVEL; level++) { + // jump from previous level + SKIP_INDEX_JUMP_LENGTH_PER_LEVEL[level] = SKIP_INDEX_JUMP_LENGTH_PER_LEVEL[level - 1]; + // nodes added by new level + SKIP_INDEX_JUMP_LENGTH_PER_LEVEL[level] += (1 << (level * SKIP_INDEX_LEVEL_SHIFT)) * SKIP_INDEX_INTERVAL_BYTES; + // remove the byte levels added in the previous level + SKIP_INDEX_JUMP_LENGTH_PER_LEVEL[level] -= (1 << ((level - 1) * SKIP_INDEX_LEVEL_SHIFT)); + } + } + + private final int skipIndexIntervalSize; + + /** Default constructor. */ public ES87TSDBDocValuesFormat() { + this(DEFAULT_SKIP_INDEX_INTERVAL_SIZE); + } + + /** Doc values fields format with specified skipIndexIntervalSize. */ + public ES87TSDBDocValuesFormat(int skipIndexIntervalSize) { super(CODEC_NAME); + if (skipIndexIntervalSize < 2) { + throw new IllegalArgumentException("skipIndexIntervalSize must be > 1, got [" + skipIndexIntervalSize + "]"); + } + this.skipIndexIntervalSize = skipIndexIntervalSize; } @Override public DocValuesConsumer fieldsConsumer(SegmentWriteState state) throws IOException { - return new ES87TSDBDocValuesConsumer(state, DATA_CODEC, DATA_EXTENSION, META_CODEC, META_EXTENSION); + return new ES87TSDBDocValuesConsumer(state, skipIndexIntervalSize, DATA_CODEC, DATA_EXTENSION, META_CODEC, META_EXTENSION); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesProducer.java b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesProducer.java index e3f7e829c1d2e..d5c94de1c6942 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesProducer.java +++ b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesProducer.java @@ -16,6 +16,8 @@ import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.DocValuesSkipIndexType; +import org.apache.lucene.index.DocValuesSkipper; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfos; import org.apache.lucene.index.ImpactsEnum; @@ -27,6 +29,7 @@ import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.index.TermsEnum; +import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.store.ByteArrayDataInput; import org.apache.lucene.store.ChecksumIndexInput; import org.apache.lucene.store.DataInput; @@ -43,6 +46,8 @@ import java.util.HashMap; import java.util.Map; +import static org.elasticsearch.index.codec.tsdb.ES87TSDBDocValuesFormat.SKIP_INDEX_JUMP_LENGTH_PER_LEVEL; +import static org.elasticsearch.index.codec.tsdb.ES87TSDBDocValuesFormat.SKIP_INDEX_MAX_LEVEL; import static org.elasticsearch.index.codec.tsdb.ES87TSDBDocValuesFormat.TERMS_DICT_BLOCK_LZ4_SHIFT; public class ES87TSDBDocValuesProducer extends DocValuesProducer { @@ -51,6 +56,7 @@ public class ES87TSDBDocValuesProducer extends DocValuesProducer { private final Map sorted = new HashMap<>(); private final Map sortedSets = new HashMap<>(); private final Map sortedNumerics = new HashMap<>(); + private final Map skippers = new HashMap<>(); private final IndexInput data; private final int maxDoc; @@ -61,7 +67,7 @@ public class ES87TSDBDocValuesProducer extends DocValuesProducer { // read in the entries from the metadata file. int version = -1; - try (ChecksumIndexInput in = state.directory.openChecksumInput(metaName, state.context)) { + try (ChecksumIndexInput in = state.directory.openChecksumInput(metaName)) { Throwable priorE = null; try { @@ -659,9 +665,8 @@ public long nextOrd() throws IOException { i = 0; count = ords.docValueCount(); } - if (i++ == count) { - return NO_MORE_ORDS; - } + assert i < count; + i++; return ords.nextValue(); } @@ -700,6 +705,116 @@ public long cost() { }; } + @Override + public DocValuesSkipper getSkipper(FieldInfo field) throws IOException { + final DocValuesSkipperEntry entry = skippers.get(field.name); + + final IndexInput input = data.slice("doc value skipper", entry.offset, entry.length); + // Prefetch the first page of data. Following pages are expected to get prefetched through + // read-ahead. + if (input.length() > 0) { + input.prefetch(0, 1); + } + // TODO: should we write to disk the actual max level for this segment? + return new DocValuesSkipper() { + final int[] minDocID = new int[SKIP_INDEX_MAX_LEVEL]; + final int[] maxDocID = new int[SKIP_INDEX_MAX_LEVEL]; + + { + for (int i = 0; i < SKIP_INDEX_MAX_LEVEL; i++) { + minDocID[i] = maxDocID[i] = -1; + } + } + + final long[] minValue = new long[SKIP_INDEX_MAX_LEVEL]; + final long[] maxValue = new long[SKIP_INDEX_MAX_LEVEL]; + final int[] docCount = new int[SKIP_INDEX_MAX_LEVEL]; + int levels = 1; + + @Override + public void advance(int target) throws IOException { + if (target > entry.maxDocId) { + // skipper is exhausted + for (int i = 0; i < SKIP_INDEX_MAX_LEVEL; i++) { + minDocID[i] = maxDocID[i] = DocIdSetIterator.NO_MORE_DOCS; + } + } else { + // find next interval + assert target > maxDocID[0] : "target must be bigger that current interval"; + while (true) { + levels = input.readByte(); + assert levels <= SKIP_INDEX_MAX_LEVEL && levels > 0 : "level out of range [" + levels + "]"; + boolean valid = true; + // check if current interval is competitive or we can jump to the next position + for (int level = levels - 1; level >= 0; level--) { + if ((maxDocID[level] = input.readInt()) < target) { + input.skipBytes(SKIP_INDEX_JUMP_LENGTH_PER_LEVEL[level]); // the jump for the level + valid = false; + break; + } + minDocID[level] = input.readInt(); + maxValue[level] = input.readLong(); + minValue[level] = input.readLong(); + docCount[level] = input.readInt(); + } + if (valid) { + // adjust levels + while (levels < SKIP_INDEX_MAX_LEVEL && maxDocID[levels] >= target) { + levels++; + } + break; + } + } + } + } + + @Override + public int numLevels() { + return levels; + } + + @Override + public int minDocID(int level) { + return minDocID[level]; + } + + @Override + public int maxDocID(int level) { + return maxDocID[level]; + } + + @Override + public long minValue(int level) { + return minValue[level]; + } + + @Override + public long maxValue(int level) { + return maxValue[level]; + } + + @Override + public int docCount(int level) { + return docCount[level]; + } + + @Override + public long minValue() { + return entry.minValue; + } + + @Override + public long maxValue() { + return entry.maxValue; + } + + @Override + public int docCount() { + return entry.docCount; + } + }; + } + @Override public void checkIntegrity() throws IOException { CodecUtil.checksumEntireFile(data); @@ -717,6 +832,9 @@ private void readFields(IndexInput meta, FieldInfos infos) throws IOException { throw new CorruptIndexException("Invalid field number: " + fieldNumber, meta); } byte type = meta.readByte(); + if (info.docValuesSkipIndexType() != DocValuesSkipIndexType.NONE) { + skippers.put(info.name, readDocValueSkipperMeta(meta)); + } if (type == ES87TSDBDocValuesFormat.NUMERIC) { numerics.put(info.name, readNumeric(meta)); } else if (type == ES87TSDBDocValuesFormat.BINARY) { @@ -739,6 +857,17 @@ private static NumericEntry readNumeric(IndexInput meta) throws IOException { return entry; } + private static DocValuesSkipperEntry readDocValueSkipperMeta(IndexInput meta) throws IOException { + long offset = meta.readLong(); + long length = meta.readLong(); + long maxValue = meta.readLong(); + long minValue = meta.readLong(); + int docCount = meta.readInt(); + int maxDocID = meta.readInt(); + + return new DocValuesSkipperEntry(offset, length, minValue, maxValue, docCount, maxDocID); + } + private static void readNumeric(IndexInput meta, NumericEntry entry) throws IOException { entry.docsWithFieldOffset = meta.readLong(); entry.docsWithFieldLength = meta.readLong(); @@ -1249,6 +1378,8 @@ private void set() { } } + private record DocValuesSkipperEntry(long offset, long length, long minValue, long maxValue, int docCount, int maxDocId) {} + private static class NumericEntry { long docsWithFieldOffset; long docsWithFieldLength; diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/BinarizedByteVectorValues.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/BinarizedByteVectorValues.java index 73dd4273a794e..cf69ab0862949 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/BinarizedByteVectorValues.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/BinarizedByteVectorValues.java @@ -19,23 +19,52 @@ */ package org.elasticsearch.index.codec.vectors; -import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.index.ByteVectorValues; import org.apache.lucene.search.VectorScorer; +import org.apache.lucene.util.VectorUtil; import java.io.IOException; +import static org.elasticsearch.index.codec.vectors.BQVectorUtils.constSqrt; + /** * Copied from Lucene, replace with Lucene's implementation sometime after Lucene 10 */ -public abstract class BinarizedByteVectorValues extends DocIdSetIterator { - - public abstract float[] getCorrectiveTerms(); +public abstract class BinarizedByteVectorValues extends ByteVectorValues { - public abstract byte[] vectorValue() throws IOException; + public abstract float[] getCorrectiveTerms(int vectorOrd) throws IOException; /** Return the dimension of the vectors */ public abstract int dimension(); + /** Returns the centroid distance for the vector */ + public abstract float getCentroidDistance(int vectorOrd) throws IOException; + + /** Returns the vector magnitude for the vector */ + public abstract float getVectorMagnitude(int vectorOrd) throws IOException; + + /** Returns OOQ corrective factor for the given vector ordinal */ + public abstract float getOOQ(int targetOrd) throws IOException; + + /** + * Returns the norm of the target vector w the centroid corrective factor for the given vector + * ordinal + */ + public abstract float getNormOC(int targetOrd) throws IOException; + + /** + * Returns the target vector dot product the centroid corrective factor for the given vector + * ordinal + */ + public abstract float getODotC(int targetOrd) throws IOException; + + /** + * @return the quantizer used to quantize the vectors + */ + public abstract BinaryQuantizer getQuantizer(); + + public abstract float[] getCentroid() throws IOException; + /** * Return the number of vectors for this field. * @@ -43,9 +72,16 @@ public abstract class BinarizedByteVectorValues extends DocIdSetIterator { */ public abstract int size(); - @Override - public final long cost() { - return size(); + int discretizedDimensions() { + return BQVectorUtils.discretize(dimension(), 64); + } + + float sqrtDimensions() { + return (float) constSqrt(dimension()); + } + + float maxX1() { + return (float) (1.9 / constSqrt(discretizedDimensions() - 1.0)); } /** @@ -55,4 +91,13 @@ public final long cost() { * @return a {@link VectorScorer} instance or null */ public abstract VectorScorer scorer(float[] query) throws IOException; + + @Override + public abstract BinarizedByteVectorValues copy() throws IOException; + + float getCentroidDP() throws IOException { + // this only gets executed on-merge + float[] centroid = getCentroid(); + return VectorUtil.dotProduct(centroid, centroid); + } } diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormat.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormat.java index cc5454ee074e6..ab882c8b04648 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormat.java @@ -152,10 +152,5 @@ public void search(String field, byte[] target, KnnCollector knnCollector, Bits public void close() throws IOException { reader.close(); } - - @Override - public long ramBytesUsed() { - return reader.ramBytesUsed(); - } } } diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormat.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormat.java index 9491598653c44..662e4040511e2 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormat.java @@ -160,11 +160,5 @@ public void search(String field, byte[] target, KnnCollector knnCollector, Bits public void close() throws IOException { reader.close(); } - - @Override - public long ramBytesUsed() { - return reader.ramBytesUsed(); - } - } } diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES814ScalarQuantizedVectorsFormat.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES814ScalarQuantizedVectorsFormat.java index 10a20839ab3c5..4c4fd00806954 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES814ScalarQuantizedVectorsFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES814ScalarQuantizedVectorsFormat.java @@ -22,18 +22,17 @@ import org.apache.lucene.index.ByteVectorValues; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FloatVectorValues; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.MergeState; import org.apache.lucene.index.SegmentReadState; import org.apache.lucene.index.SegmentWriteState; import org.apache.lucene.index.Sorter; import org.apache.lucene.index.VectorSimilarityFunction; import org.apache.lucene.util.hnsw.CloseableRandomVectorScorerSupplier; -import org.apache.lucene.util.hnsw.RandomAccessVectorValues; import org.apache.lucene.util.hnsw.RandomVectorScorer; import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; import org.apache.lucene.util.quantization.QuantizedByteVectorValues; import org.apache.lucene.util.quantization.QuantizedVectorsReader; -import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues; import org.apache.lucene.util.quantization.ScalarQuantizer; import org.elasticsearch.simdvec.VectorScorerFactory; import org.elasticsearch.simdvec.VectorSimilarityType; @@ -246,9 +245,9 @@ public String toString() { } @Override - public RandomVectorScorerSupplier getRandomVectorScorerSupplier(VectorSimilarityFunction sim, RandomAccessVectorValues values) + public RandomVectorScorerSupplier getRandomVectorScorerSupplier(VectorSimilarityFunction sim, KnnVectorValues values) throws IOException { - if (values instanceof RandomAccessQuantizedByteVectorValues qValues && values.getSlice() != null) { + if (values instanceof QuantizedByteVectorValues qValues && qValues.getSlice() != null) { // TODO: optimize int4 quantization if (qValues.getScalarQuantizer().getBits() != 7) { return delegate.getRandomVectorScorerSupplier(sim, values); @@ -256,7 +255,7 @@ public RandomVectorScorerSupplier getRandomVectorScorerSupplier(VectorSimilarity if (factory != null) { var scorer = factory.getInt7SQVectorScorerSupplier( VectorSimilarityType.of(sim), - values.getSlice(), + qValues.getSlice(), qValues, qValues.getScalarQuantizer().getConstantMultiplier() ); @@ -269,9 +268,9 @@ public RandomVectorScorerSupplier getRandomVectorScorerSupplier(VectorSimilarity } @Override - public RandomVectorScorer getRandomVectorScorer(VectorSimilarityFunction sim, RandomAccessVectorValues values, float[] query) + public RandomVectorScorer getRandomVectorScorer(VectorSimilarityFunction sim, KnnVectorValues values, float[] query) throws IOException { - if (values instanceof RandomAccessQuantizedByteVectorValues qValues && values.getSlice() != null) { + if (values instanceof QuantizedByteVectorValues qValues && qValues.getSlice() != null) { // TODO: optimize int4 quantization if (qValues.getScalarQuantizer().getBits() != 7) { return delegate.getRandomVectorScorer(sim, values, query); @@ -287,7 +286,7 @@ public RandomVectorScorer getRandomVectorScorer(VectorSimilarityFunction sim, Ra } @Override - public RandomVectorScorer getRandomVectorScorer(VectorSimilarityFunction sim, RandomAccessVectorValues values, byte[] query) + public RandomVectorScorer getRandomVectorScorer(VectorSimilarityFunction sim, KnnVectorValues values, byte[] query) throws IOException { return delegate.getRandomVectorScorer(sim, values, query); } diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorsFormat.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorsFormat.java index 7e586e210afd3..18668f4f304b0 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorsFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorsFormat.java @@ -14,14 +14,15 @@ import org.apache.lucene.codecs.hnsw.FlatVectorsScorer; import org.apache.lucene.codecs.hnsw.FlatVectorsWriter; import org.apache.lucene.codecs.lucene99.Lucene99FlatVectorsFormat; +import org.apache.lucene.index.ByteVectorValues; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.SegmentReadState; import org.apache.lucene.index.SegmentWriteState; import org.apache.lucene.index.VectorSimilarityFunction; import org.apache.lucene.util.VectorUtil; -import org.apache.lucene.util.hnsw.RandomAccessVectorValues; import org.apache.lucene.util.hnsw.RandomVectorScorer; import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; -import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues; +import org.apache.lucene.util.quantization.QuantizedByteVectorValues; import java.io.IOException; @@ -68,14 +69,14 @@ public String toString() { @Override public RandomVectorScorerSupplier getRandomVectorScorerSupplier( VectorSimilarityFunction vectorSimilarityFunction, - RandomAccessVectorValues randomAccessVectorValues + KnnVectorValues vectorValues ) throws IOException { - assert randomAccessVectorValues instanceof RandomAccessVectorValues.Bytes; + assert vectorValues instanceof ByteVectorValues; assert vectorSimilarityFunction == VectorSimilarityFunction.EUCLIDEAN; - if (randomAccessVectorValues instanceof RandomAccessVectorValues.Bytes randomAccessVectorValuesBytes) { - assert randomAccessVectorValues instanceof RandomAccessQuantizedByteVectorValues == false; + if (vectorValues instanceof ByteVectorValues byteVectorValues) { + assert byteVectorValues instanceof QuantizedByteVectorValues == false; return switch (vectorSimilarityFunction) { - case DOT_PRODUCT, MAXIMUM_INNER_PRODUCT, COSINE, EUCLIDEAN -> new HammingScorerSupplier(randomAccessVectorValuesBytes); + case DOT_PRODUCT, MAXIMUM_INNER_PRODUCT, COSINE, EUCLIDEAN -> new HammingScorerSupplier(byteVectorValues); }; } throw new IllegalArgumentException("Unsupported vector type or similarity function"); @@ -84,18 +85,15 @@ public RandomVectorScorerSupplier getRandomVectorScorerSupplier( @Override public RandomVectorScorer getRandomVectorScorer( VectorSimilarityFunction vectorSimilarityFunction, - RandomAccessVectorValues randomAccessVectorValues, - byte[] bytes - ) { - assert randomAccessVectorValues instanceof RandomAccessVectorValues.Bytes; + KnnVectorValues vectorValues, + byte[] target + ) throws IOException { + assert vectorValues instanceof ByteVectorValues; assert vectorSimilarityFunction == VectorSimilarityFunction.EUCLIDEAN; - if (randomAccessVectorValues instanceof RandomAccessVectorValues.Bytes randomAccessVectorValuesBytes) { - checkDimensions(bytes.length, randomAccessVectorValuesBytes.dimension()); + if (vectorValues instanceof ByteVectorValues byteVectorValues) { + checkDimensions(target.length, byteVectorValues.dimension()); return switch (vectorSimilarityFunction) { - case DOT_PRODUCT, MAXIMUM_INNER_PRODUCT, COSINE, EUCLIDEAN -> new HammingVectorScorer( - randomAccessVectorValuesBytes, - bytes - ); + case DOT_PRODUCT, MAXIMUM_INNER_PRODUCT, COSINE, EUCLIDEAN -> new HammingVectorScorer(byteVectorValues, target); }; } throw new IllegalArgumentException("Unsupported vector type or similarity function"); @@ -103,10 +101,10 @@ public RandomVectorScorer getRandomVectorScorer( @Override public RandomVectorScorer getRandomVectorScorer( - VectorSimilarityFunction vectorSimilarityFunction, - RandomAccessVectorValues randomAccessVectorValues, - float[] floats - ) { + VectorSimilarityFunction similarityFunction, + KnnVectorValues vectorValues, + float[] target + ) throws IOException { throw new IllegalArgumentException("Unsupported vector type"); } } @@ -117,9 +115,9 @@ static float hammingScore(byte[] a, byte[] b) { static class HammingVectorScorer extends RandomVectorScorer.AbstractRandomVectorScorer { private final byte[] query; - private final RandomAccessVectorValues.Bytes byteValues; + private final ByteVectorValues byteValues; - HammingVectorScorer(RandomAccessVectorValues.Bytes byteValues, byte[] query) { + HammingVectorScorer(ByteVectorValues byteValues, byte[] query) { super(byteValues); this.query = query; this.byteValues = byteValues; @@ -132,9 +130,9 @@ public float score(int i) throws IOException { } static class HammingScorerSupplier implements RandomVectorScorerSupplier { - private final RandomAccessVectorValues.Bytes byteValues, byteValues1, byteValues2; + private final ByteVectorValues byteValues, byteValues1, byteValues2; - HammingScorerSupplier(RandomAccessVectorValues.Bytes byteValues) throws IOException { + HammingScorerSupplier(ByteVectorValues byteValues) throws IOException { this.byteValues = byteValues; this.byteValues1 = byteValues.copy(); this.byteValues2 = byteValues.copy(); diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryFlatVectorsScorer.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryFlatVectorsScorer.java index f4d22edc6dfdb..72c5da4880e75 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryFlatVectorsScorer.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryFlatVectorsScorer.java @@ -20,10 +20,10 @@ package org.elasticsearch.index.codec.vectors; import org.apache.lucene.codecs.hnsw.FlatVectorsScorer; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.VectorSimilarityFunction; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.VectorUtil; -import org.apache.lucene.util.hnsw.RandomAccessVectorValues; import org.apache.lucene.util.hnsw.RandomVectorScorer; import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; import org.elasticsearch.simdvec.ESVectorUtil; @@ -45,9 +45,9 @@ public ES816BinaryFlatVectorsScorer(FlatVectorsScorer nonQuantizedDelegate) { @Override public RandomVectorScorerSupplier getRandomVectorScorerSupplier( VectorSimilarityFunction similarityFunction, - RandomAccessVectorValues vectorValues + KnnVectorValues vectorValues ) throws IOException { - if (vectorValues instanceof RandomAccessBinarizedByteVectorValues) { + if (vectorValues instanceof BinarizedByteVectorValues) { throw new UnsupportedOperationException( "getRandomVectorScorerSupplier(VectorSimilarityFunction,RandomAccessVectorValues) not implemented for binarized format" ); @@ -58,10 +58,10 @@ public RandomVectorScorerSupplier getRandomVectorScorerSupplier( @Override public RandomVectorScorer getRandomVectorScorer( VectorSimilarityFunction similarityFunction, - RandomAccessVectorValues vectorValues, + KnnVectorValues vectorValues, float[] target ) throws IOException { - if (vectorValues instanceof RandomAccessBinarizedByteVectorValues binarizedVectors) { + if (vectorValues instanceof BinarizedByteVectorValues binarizedVectors) { BinaryQuantizer quantizer = binarizedVectors.getQuantizer(); float[] centroid = binarizedVectors.getCentroid(); // FIXME: precompute this once? @@ -82,7 +82,7 @@ public RandomVectorScorer getRandomVectorScorer( @Override public RandomVectorScorer getRandomVectorScorer( VectorSimilarityFunction similarityFunction, - RandomAccessVectorValues vectorValues, + KnnVectorValues vectorValues, byte[] target ) throws IOException { return nonQuantizedDelegate.getRandomVectorScorer(similarityFunction, vectorValues, target); @@ -91,7 +91,7 @@ public RandomVectorScorer getRandomVectorScorer( RandomVectorScorerSupplier getRandomVectorScorerSupplier( VectorSimilarityFunction similarityFunction, ES816BinaryQuantizedVectorsWriter.OffHeapBinarizedQueryVectorValues scoringVectors, - RandomAccessBinarizedByteVectorValues targetVectors + BinarizedByteVectorValues targetVectors ) { return new BinarizedRandomVectorScorerSupplier(scoringVectors, targetVectors, similarityFunction); } @@ -104,12 +104,12 @@ public String toString() { /** Vector scorer supplier over binarized vector values */ static class BinarizedRandomVectorScorerSupplier implements RandomVectorScorerSupplier { private final ES816BinaryQuantizedVectorsWriter.OffHeapBinarizedQueryVectorValues queryVectors; - private final RandomAccessBinarizedByteVectorValues targetVectors; + private final BinarizedByteVectorValues targetVectors; private final VectorSimilarityFunction similarityFunction; BinarizedRandomVectorScorerSupplier( ES816BinaryQuantizedVectorsWriter.OffHeapBinarizedQueryVectorValues queryVectors, - RandomAccessBinarizedByteVectorValues targetVectors, + BinarizedByteVectorValues targetVectors, VectorSimilarityFunction similarityFunction ) { this.queryVectors = queryVectors; @@ -149,7 +149,7 @@ public record BinaryQueryVector(byte[] vector, BinaryQuantizer.QueryFactors fact /** Vector scorer over binarized vector values */ public static class BinarizedRandomVectorScorer extends RandomVectorScorer.AbstractRandomVectorScorer { private final BinaryQueryVector queryVector; - private final RandomAccessBinarizedByteVectorValues targetVectors; + private final BinarizedByteVectorValues targetVectors; private final VectorSimilarityFunction similarityFunction; private final float sqrtDimensions; @@ -157,7 +157,7 @@ public static class BinarizedRandomVectorScorer extends RandomVectorScorer.Abstr public BinarizedRandomVectorScorer( BinaryQueryVector queryVectors, - RandomAccessBinarizedByteVectorValues targetVectors, + BinarizedByteVectorValues targetVectors, VectorSimilarityFunction similarityFunction ) { super(targetVectors); diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsReader.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsReader.java index b0378fee6793d..21c4a5c449387 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsReader.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsReader.java @@ -36,6 +36,7 @@ import org.apache.lucene.store.ChecksumIndexInput; import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; +import org.apache.lucene.store.ReadAdvice; import org.apache.lucene.util.Bits; import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.RamUsageEstimator; @@ -78,7 +79,7 @@ public ES816BinaryQuantizedVectorsReader( ES816BinaryQuantizedVectorsFormat.META_EXTENSION ); boolean success = false; - try (ChecksumIndexInput meta = state.directory.openChecksumInput(metaFileName, state.context)) { + try (ChecksumIndexInput meta = state.directory.openChecksumInput(metaFileName)) { Throwable priorE = null; try { versionMeta = CodecUtil.checkIndexHeader( @@ -102,7 +103,7 @@ public ES816BinaryQuantizedVectorsReader( ES816BinaryQuantizedVectorsFormat.VECTOR_DATA_CODEC_NAME, // Quantized vectors are accessed randomly from their node ID stored in the HNSW // graph. - state.context.withRandomAccess() + state.context.withReadAdvice(ReadAdvice.RANDOM) ); success = true; } finally { @@ -357,9 +358,9 @@ static FieldEntry create(IndexInput input, VectorEncoding vectorEncoding, Vector /** Binarized vector values holding row and quantized vector values */ protected static final class BinarizedVectorValues extends FloatVectorValues { private final FloatVectorValues rawVectorValues; - private final OffHeapBinarizedVectorValues quantizedVectorValues; + private final BinarizedByteVectorValues quantizedVectorValues; - BinarizedVectorValues(FloatVectorValues rawVectorValues, OffHeapBinarizedVectorValues quantizedVectorValues) { + BinarizedVectorValues(FloatVectorValues rawVectorValues, BinarizedByteVectorValues quantizedVectorValues) { this.rawVectorValues = rawVectorValues; this.quantizedVectorValues = quantizedVectorValues; } @@ -375,29 +376,28 @@ public int size() { } @Override - public float[] vectorValue() throws IOException { - return rawVectorValues.vectorValue(); + public float[] vectorValue(int ord) throws IOException { + return rawVectorValues.vectorValue(ord); } @Override - public int docID() { - return rawVectorValues.docID(); + public BinarizedVectorValues copy() throws IOException { + return new BinarizedVectorValues(rawVectorValues.copy(), quantizedVectorValues.copy()); } @Override - public int nextDoc() throws IOException { - int rawDocId = rawVectorValues.nextDoc(); - int quantizedDocId = quantizedVectorValues.nextDoc(); - assert rawDocId == quantizedDocId; - return quantizedDocId; + public Bits getAcceptOrds(Bits acceptDocs) { + return rawVectorValues.getAcceptOrds(acceptDocs); } @Override - public int advance(int target) throws IOException { - int rawDocId = rawVectorValues.advance(target); - int quantizedDocId = quantizedVectorValues.advance(target); - assert rawDocId == quantizedDocId; - return quantizedDocId; + public int ordToDoc(int ord) { + return rawVectorValues.ordToDoc(ord); + } + + @Override + public DocIndexIterator iterator() { + return rawVectorValues.iterator(); } @Override @@ -405,7 +405,7 @@ public VectorScorer scorer(float[] query) throws IOException { return quantizedVectorValues.scorer(query); } - protected OffHeapBinarizedVectorValues getQuantizedVectorValues() throws IOException { + protected BinarizedByteVectorValues getQuantizedVectorValues() throws IOException { return quantizedVectorValues; } } diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsWriter.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsWriter.java index 92837a8ffce45..a7774b850b64c 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsWriter.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsWriter.java @@ -30,6 +30,7 @@ import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.IndexFileNames; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.MergeState; import org.apache.lucene.index.SegmentWriteState; import org.apache.lucene.index.Sorter; @@ -44,7 +45,6 @@ import org.apache.lucene.util.RamUsageEstimator; import org.apache.lucene.util.VectorUtil; import org.apache.lucene.util.hnsw.CloseableRandomVectorScorerSupplier; -import org.apache.lucene.util.hnsw.RandomAccessVectorValues; import org.apache.lucene.util.hnsw.RandomVectorScorer; import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; import org.elasticsearch.core.SuppressForbidden; @@ -354,10 +354,11 @@ static DocsWithFieldSet writeBinarizedVectorAndQueryData( int queryCorrectionCount = binaryQuantizer.getSimilarity() != EUCLIDEAN ? 5 : 3; final ByteBuffer queryCorrectionsBuffer = ByteBuffer.allocate(Float.BYTES * queryCorrectionCount + Short.BYTES) .order(ByteOrder.LITTLE_ENDIAN); - for (int docV = floatVectorValues.nextDoc(); docV != NO_MORE_DOCS; docV = floatVectorValues.nextDoc()) { + KnnVectorValues.DocIndexIterator iterator = floatVectorValues.iterator(); + for (int docV = iterator.nextDoc(); docV != NO_MORE_DOCS; docV = iterator.nextDoc()) { // write index vector BinaryQuantizer.QueryAndIndexResults r = binaryQuantizer.quantizeQueryAndIndex( - floatVectorValues.vectorValue(), + floatVectorValues.vectorValue(iterator.index()), toIndex, toQuery, centroid @@ -393,11 +394,12 @@ static DocsWithFieldSet writeBinarizedVectorAndQueryData( static DocsWithFieldSet writeBinarizedVectorData(IndexOutput output, BinarizedByteVectorValues binarizedByteVectorValues) throws IOException { DocsWithFieldSet docsWithField = new DocsWithFieldSet(); - for (int docV = binarizedByteVectorValues.nextDoc(); docV != NO_MORE_DOCS; docV = binarizedByteVectorValues.nextDoc()) { + KnnVectorValues.DocIndexIterator iterator = binarizedByteVectorValues.iterator(); + for (int docV = iterator.nextDoc(); docV != NO_MORE_DOCS; docV = iterator.nextDoc()) { // write vector - byte[] binaryValue = binarizedByteVectorValues.vectorValue(); + byte[] binaryValue = binarizedByteVectorValues.vectorValue(iterator.index()); output.writeBytes(binaryValue, binaryValue.length); - float[] corrections = binarizedByteVectorValues.getCorrectiveTerms(); + float[] corrections = binarizedByteVectorValues.getCorrectiveTerms(iterator.index()); for (int i = 0; i < corrections.length; i++) { output.writeInt(Float.floatToIntBits(corrections[i])); } @@ -598,8 +600,9 @@ static int calculateCentroid(MergeState mergeState, FieldInfo fieldInfo, float[] if (vectorValues == null) { continue; } - for (int doc = vectorValues.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = vectorValues.nextDoc()) { - float[] vector = vectorValues.vectorValue(); + KnnVectorValues.DocIndexIterator iterator = vectorValues.iterator(); + for (int doc = iterator.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = iterator.nextDoc()) { + float[] vector = vectorValues.vectorValue(iterator.index()); // TODO Panama sum for (int j = 0; j < vector.length; j++) { centroid[j] += vector[j]; @@ -827,23 +830,31 @@ static class BinarizedFloatVectorValues extends BinarizedByteVectorValues { private final float[] centroid; private final FloatVectorValues values; private final BinaryQuantizer quantizer; - private int lastDoc; + private int lastOrd = -1; BinarizedFloatVectorValues(FloatVectorValues delegate, BinaryQuantizer quantizer, float[] centroid) { this.values = delegate; this.quantizer = quantizer; this.binarized = new byte[BQVectorUtils.discretize(delegate.dimension(), 64) / 8]; this.centroid = centroid; - lastDoc = -1; } @Override - public float[] getCorrectiveTerms() { + public float[] getCorrectiveTerms(int ord) { + if (ord != lastOrd) { + throw new IllegalStateException( + "attempt to retrieve corrective terms for different ord " + ord + " than the quantization was done for: " + lastOrd + ); + } return corrections; } @Override - public byte[] vectorValue() throws IOException { + public byte[] vectorValue(int ord) throws IOException { + if (ord != lastOrd) { + binarize(ord); + lastOrd = ord; + } return binarized; } @@ -853,33 +864,43 @@ public int dimension() { } @Override - public int size() { - return values.size(); + public float getCentroidDistance(int vectorOrd) throws IOException { + throw new UnsupportedOperationException(); } @Override - public int docID() { - return values.docID(); + public float getVectorMagnitude(int vectorOrd) throws IOException { + throw new UnsupportedOperationException(); } @Override - public int nextDoc() throws IOException { - int doc = values.nextDoc(); - if (doc != NO_MORE_DOCS) { - binarize(); - } - lastDoc = doc; - return doc; + public float getOOQ(int targetOrd) throws IOException { + throw new UnsupportedOperationException(); } @Override - public int advance(int target) throws IOException { - int doc = values.advance(target); - if (doc != NO_MORE_DOCS) { - binarize(); - } - lastDoc = doc; - return doc; + public float getNormOC(int targetOrd) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public float getODotC(int targetOrd) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public BinaryQuantizer getQuantizer() { + throw new UnsupportedOperationException(); + } + + @Override + public float[] getCentroid() throws IOException { + return centroid; + } + + @Override + public int size() { + return values.size(); } @Override @@ -887,22 +908,32 @@ public VectorScorer scorer(float[] target) throws IOException { throw new UnsupportedOperationException(); } - private void binarize() throws IOException { - if (lastDoc == docID()) return; - corrections = quantizer.quantizeForIndex(values.vectorValue(), binarized, centroid); + @Override + public BinarizedByteVectorValues copy() throws IOException { + return new BinarizedFloatVectorValues(values.copy(), quantizer, centroid); + } + + private void binarize(int ord) throws IOException { + corrections = quantizer.quantizeForIndex(values.vectorValue(ord), binarized, centroid); + } + + @Override + public DocIndexIterator iterator() { + return values.iterator(); + } + + @Override + public int ordToDoc(int ord) { + return values.ordToDoc(ord); } } static class BinarizedCloseableRandomVectorScorerSupplier implements CloseableRandomVectorScorerSupplier { private final RandomVectorScorerSupplier supplier; - private final RandomAccessVectorValues vectorValues; + private final KnnVectorValues vectorValues; private final Closeable onClose; - BinarizedCloseableRandomVectorScorerSupplier( - RandomVectorScorerSupplier supplier, - RandomAccessVectorValues vectorValues, - Closeable onClose - ) { + BinarizedCloseableRandomVectorScorerSupplier(RandomVectorScorerSupplier supplier, KnnVectorValues vectorValues, Closeable onClose) { this.supplier = supplier; this.onClose = onClose; this.vectorValues = vectorValues; @@ -932,7 +963,6 @@ public int totalVectorCount() { static final class NormalizedFloatVectorValues extends FloatVectorValues { private final FloatVectorValues values; private final float[] normalizedVector; - int curDoc = -1; NormalizedFloatVectorValues(FloatVectorValues values) { this.values = values; @@ -950,38 +980,25 @@ public int size() { } @Override - public float[] vectorValue() { - return normalizedVector; + public int ordToDoc(int ord) { + return values.ordToDoc(ord); } @Override - public VectorScorer scorer(float[] query) { - throw new UnsupportedOperationException(); - } - - @Override - public int docID() { - return values.docID(); + public float[] vectorValue(int ord) throws IOException { + System.arraycopy(values.vectorValue(ord), 0, normalizedVector, 0, normalizedVector.length); + VectorUtil.l2normalize(normalizedVector); + return normalizedVector; } @Override - public int nextDoc() throws IOException { - curDoc = values.nextDoc(); - if (curDoc != NO_MORE_DOCS) { - System.arraycopy(values.vectorValue(), 0, normalizedVector, 0, normalizedVector.length); - VectorUtil.l2normalize(normalizedVector); - } - return curDoc; + public DocIndexIterator iterator() { + return values.iterator(); } @Override - public int advance(int target) throws IOException { - curDoc = values.advance(target); - if (curDoc != NO_MORE_DOCS) { - System.arraycopy(values.vectorValue(), 0, normalizedVector, 0, normalizedVector.length); - VectorUtil.l2normalize(normalizedVector); - } - return curDoc; + public NormalizedFloatVectorValues copy() throws IOException { + return new NormalizedFloatVectorValues(values.copy()); } } } diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/OffHeapBinarizedVectorValues.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/OffHeapBinarizedVectorValues.java index 628480e273b34..e7d818bb752d6 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/OffHeapBinarizedVectorValues.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/OffHeapBinarizedVectorValues.java @@ -37,7 +37,7 @@ import static org.elasticsearch.index.codec.vectors.BQVectorUtils.constSqrt; /** Binarized vector values loaded from off-heap */ -public abstract class OffHeapBinarizedVectorValues extends BinarizedByteVectorValues implements RandomAccessBinarizedByteVectorValues { +public abstract class OffHeapBinarizedVectorValues extends BinarizedByteVectorValues { protected final int dimension; protected final int size; @@ -131,7 +131,12 @@ public float getCentroidDP() { } @Override - public float[] getCorrectiveTerms() { + public float[] getCorrectiveTerms(int targetOrd) throws IOException { + if (lastOrd == targetOrd) { + return correctiveValues; + } + slice.seek(((long) targetOrd * byteSize) + numBytes); + slice.readFloats(correctiveValues, 0, correctionsCount); return correctiveValues; } @@ -195,11 +200,6 @@ public float[] getCentroid() { return centroid; } - @Override - public IndexInput getSlice() { - return slice; - } - @Override public int getVectorByteLength() { return numBytes; @@ -252,8 +252,6 @@ public static OffHeapBinarizedVectorValues load( /** Dense off-heap binarized vector values */ public static class DenseOffHeapVectorValues extends OffHeapBinarizedVectorValues { - private int doc = -1; - public DenseOffHeapVectorValues( int dimension, int size, @@ -267,30 +265,6 @@ public DenseOffHeapVectorValues( super(dimension, size, centroid, centroidDp, binaryQuantizer, similarityFunction, vectorsScorer, slice); } - @Override - public byte[] vectorValue() throws IOException { - return vectorValue(doc); - } - - @Override - public int docID() { - return doc; - } - - @Override - public int nextDoc() { - return advance(doc + 1); - } - - @Override - public int advance(int target) { - assert docID() < target; - if (target >= size) { - return doc = NO_MORE_DOCS; - } - return doc = target; - } - @Override public DenseOffHeapVectorValues copy() throws IOException { return new DenseOffHeapVectorValues( @@ -313,19 +287,25 @@ public Bits getAcceptOrds(Bits acceptDocs) { @Override public VectorScorer scorer(float[] target) throws IOException { DenseOffHeapVectorValues copy = copy(); + DocIndexIterator iterator = copy.iterator(); RandomVectorScorer scorer = vectorsScorer.getRandomVectorScorer(similarityFunction, copy, target); return new VectorScorer() { @Override public float score() throws IOException { - return scorer.score(copy.doc); + return scorer.score(iterator.index()); } @Override public DocIdSetIterator iterator() { - return copy; + return iterator; } }; } + + @Override + public DocIndexIterator iterator() { + return createDenseIterator(); + } } /** Sparse off-heap binarized vector values */ @@ -355,27 +335,6 @@ private static class SparseOffHeapVectorValues extends OffHeapBinarizedVectorVal this.disi = configuration.getIndexedDISI(dataIn); } - @Override - public byte[] vectorValue() throws IOException { - return vectorValue(disi.index()); - } - - @Override - public int docID() { - return disi.docID(); - } - - @Override - public int nextDoc() throws IOException { - return disi.nextDoc(); - } - - @Override - public int advance(int target) throws IOException { - assert docID() < target; - return disi.advance(target); - } - @Override public SparseOffHeapVectorValues copy() throws IOException { return new SparseOffHeapVectorValues( @@ -415,19 +374,25 @@ public int length() { }; } + @Override + public DocIndexIterator iterator() { + return IndexedDISI.asDocIndexIterator(disi); + } + @Override public VectorScorer scorer(float[] target) throws IOException { SparseOffHeapVectorValues copy = copy(); + DocIndexIterator iterator = copy.iterator(); RandomVectorScorer scorer = vectorsScorer.getRandomVectorScorer(similarityFunction, copy, target); return new VectorScorer() { @Override public float score() throws IOException { - return scorer.score(copy.disi.index()); + return scorer.score(iterator.index()); } @Override public DocIdSetIterator iterator() { - return copy; + return iterator; } }; } @@ -441,23 +406,8 @@ private static class EmptyOffHeapVectorValues extends OffHeapBinarizedVectorValu } @Override - public int docID() { - return doc; - } - - @Override - public int nextDoc() { - return advance(doc + 1); - } - - @Override - public int advance(int target) { - return doc = NO_MORE_DOCS; - } - - @Override - public byte[] vectorValue() { - throw new UnsupportedOperationException(); + public DocIndexIterator iterator() { + return createDenseIterator(); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/RandomAccessBinarizedByteVectorValues.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/RandomAccessBinarizedByteVectorValues.java deleted file mode 100644 index 5163baf617c29..0000000000000 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/RandomAccessBinarizedByteVectorValues.java +++ /dev/null @@ -1,84 +0,0 @@ -/* - * @notice - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * Modifications copyright (C) 2024 Elasticsearch B.V. - */ -package org.elasticsearch.index.codec.vectors; - -import org.apache.lucene.util.VectorUtil; -import org.apache.lucene.util.hnsw.RandomAccessVectorValues; - -import java.io.IOException; - -import static org.elasticsearch.index.codec.vectors.BQVectorUtils.constSqrt; - -/** - * Copied from Lucene, replace with Lucene's implementation sometime after Lucene 10 - */ -public interface RandomAccessBinarizedByteVectorValues extends RandomAccessVectorValues.Bytes { - /** Returns the centroid distance for the vector */ - float getCentroidDistance(int vectorOrd) throws IOException; - - /** Returns the vector magnitude for the vector */ - float getVectorMagnitude(int vectorOrd) throws IOException; - - /** Returns OOQ corrective factor for the given vector ordinal */ - float getOOQ(int targetOrd) throws IOException; - - /** - * Returns the norm of the target vector w the centroid corrective factor for the given vector - * ordinal - */ - float getNormOC(int targetOrd) throws IOException; - - /** - * Returns the target vector dot product the centroid corrective factor for the given vector - * ordinal - */ - float getODotC(int targetOrd) throws IOException; - - /** - * @return the quantizer used to quantize the vectors - */ - BinaryQuantizer getQuantizer(); - - default int discretizedDimensions() { - return BQVectorUtils.discretize(dimension(), 64); - } - - default float sqrtDimensions() { - return (float) constSqrt(dimension()); - } - - default float maxX1() { - return (float) (1.9 / constSqrt(discretizedDimensions() - 1.0)); - } - - /** - * @return coarse grained centroids for the vectors - */ - float[] getCentroid() throws IOException; - - @Override - RandomAccessBinarizedByteVectorValues copy() throws IOException; - - default float getCentroidDP() throws IOException { - // this only gets executed on-merge - float[] centroid = getCentroid(); - return VectorUtil.dotProduct(centroid, centroid); - } -} diff --git a/server/src/main/java/org/elasticsearch/index/engine/LuceneChangesSnapshot.java b/server/src/main/java/org/elasticsearch/index/engine/LuceneChangesSnapshot.java index 05cc6d148be5e..e44b344d3b283 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/LuceneChangesSnapshot.java +++ b/server/src/main/java/org/elasticsearch/index/engine/LuceneChangesSnapshot.java @@ -119,7 +119,7 @@ final class LuceneChangesSnapshot implements Translog.Snapshot { this.parallelArray = new ParallelArray(this.searchBatchSize); this.indexVersionCreated = indexVersionCreated; final TopDocs topDocs = searchOperations(null, accessStats); - this.totalHits = Math.toIntExact(topDocs.totalHits.value); + this.totalHits = Math.toIntExact(topDocs.totalHits.value()); this.scoreDocs = topDocs.scoreDocs; fillParallelArray(scoreDocs, parallelArray); } @@ -341,7 +341,7 @@ private Translog.Operation readDocAsOp(int docIndex) throws IOException { assert storedFieldsReaderOrd == leaf.ord : storedFieldsReaderOrd + " != " + leaf.ord; storedFieldsReader.document(segmentDocID, fields); } else { - leaf.reader().document(segmentDocID, fields); + leaf.reader().storedFields().document(segmentDocID, fields); } final Translog.Operation op; diff --git a/server/src/main/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicy.java b/server/src/main/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicy.java index 18b5ba69ca320..3e99818d1827b 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicy.java +++ b/server/src/main/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicy.java @@ -13,6 +13,7 @@ import org.apache.lucene.codecs.StoredFieldsReader; import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.CodecReader; +import org.apache.lucene.index.DocValuesSkipper; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FilterCodecReader; import org.apache.lucene.index.FilterNumericDocValues; @@ -188,6 +189,11 @@ public SortedSetDocValues getSortedSet(FieldInfo field) throws IOException { return in.getSortedSet(field); } + @Override + public DocValuesSkipper getSkipper(FieldInfo field) throws IOException { + return in.getSkipper(field); + } + @Override public void checkIntegrity() throws IOException { in.checkIntegrity(); diff --git a/server/src/main/java/org/elasticsearch/index/engine/TranslogDirectoryReader.java b/server/src/main/java/org/elasticsearch/index/engine/TranslogDirectoryReader.java index c7acd730fadb5..0f772b49bf92b 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/TranslogDirectoryReader.java +++ b/server/src/main/java/org/elasticsearch/index/engine/TranslogDirectoryReader.java @@ -13,10 +13,11 @@ import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.ByteVectorValues; import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.DocValuesSkipIndexType; +import org.apache.lucene.index.DocValuesSkipper; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfos; -import org.apache.lucene.index.Fields; import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.ImpactsEnum; import org.apache.lucene.index.IndexCommit; @@ -152,6 +153,7 @@ private static class TranslogLeafReader extends LeafReader { false, IndexOptions.NONE, DocValuesType.NONE, + DocValuesSkipIndexType.NONE, -1, Collections.emptyMap(), 0, @@ -171,6 +173,7 @@ private static class TranslogLeafReader extends LeafReader { false, IndexOptions.NONE, DocValuesType.NONE, + DocValuesSkipIndexType.NONE, -1, Collections.emptyMap(), 0, @@ -190,6 +193,7 @@ private static class TranslogLeafReader extends LeafReader { false, IndexOptions.DOCS, DocValuesType.NONE, + DocValuesSkipIndexType.NONE, -1, Collections.emptyMap(), 0, @@ -346,6 +350,11 @@ public NumericDocValues getNormValues(String field) throws IOException { return getDelegate().getNormValues(field); } + @Override + public DocValuesSkipper getDocValuesSkipper(String field) throws IOException { + return getDelegate().getDocValuesSkipper(field); + } + @Override public FloatVectorValues getFloatVectorValues(String field) throws IOException { return getDelegate().getFloatVectorValues(field); @@ -389,11 +398,6 @@ public LeafMetaData getMetaData() { return getDelegate().getMetaData(); } - @Override - public Fields getTermVectors(int docID) throws IOException { - return getDelegate().getTermVectors(docID); - } - @Override public TermVectors termVectors() throws IOException { return getDelegate().termVectors(); @@ -429,11 +433,6 @@ public int maxDoc() { return 1; } - @Override - public void document(int docID, StoredFieldVisitor visitor) throws IOException { - storedFields().document(docID, visitor); - } - private void readStoredFieldsDirectly(StoredFieldVisitor visitor) throws IOException { if (visitor.needsField(FAKE_SOURCE_FIELD) == StoredFieldVisitor.Status.YES) { BytesReference sourceBytes = operation.source(); diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalMapping.java b/server/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalMapping.java index 84e85f3ddf2b4..d4e34181b876f 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalMapping.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalMapping.java @@ -52,12 +52,7 @@ public boolean advanceExact(int target) throws IOException { @Override public long nextOrd() throws IOException { - long segmentOrd = values.nextOrd(); - if (segmentOrd == SortedSetDocValues.NO_MORE_ORDS) { - return SortedSetDocValues.NO_MORE_ORDS; - } else { - return getGlobalOrd(segmentOrd); - } + return getGlobalOrd(values.nextOrd()); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/ordinals/MultiOrdinals.java b/server/src/main/java/org/elasticsearch/index/fielddata/ordinals/MultiOrdinals.java index 0439383ccbd05..0f72e491d8110 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/ordinals/MultiOrdinals.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/ordinals/MultiOrdinals.java @@ -40,13 +40,13 @@ public static boolean significantlySmallerThanSinglePackedOrdinals( float acceptableOverheadRatio ) { int bitsPerOrd = PackedInts.bitsRequired(numOrds); - bitsPerOrd = PackedInts.fastestFormatAndBits(numDocsWithValue, bitsPerOrd, acceptableOverheadRatio).bitsPerValue; + bitsPerOrd = PackedInts.fastestFormatAndBits(numDocsWithValue, bitsPerOrd, acceptableOverheadRatio).bitsPerValue(); // Compute the worst-case number of bits per value for offsets in the worst case, eg. if no docs have a value at the // beginning of the block and all docs have one at the end of the block final float avgValuesPerDoc = (float) numDocsWithValue / maxDoc; final int maxDelta = (int) Math.ceil(OFFSETS_PAGE_SIZE * (1 - avgValuesPerDoc) * avgValuesPerDoc); int bitsPerOffset = PackedInts.bitsRequired(maxDelta) + 1; // +1 because of the sign - bitsPerOffset = PackedInts.fastestFormatAndBits(maxDoc, bitsPerOffset, acceptableOverheadRatio).bitsPerValue; + bitsPerOffset = PackedInts.fastestFormatAndBits(maxDoc, bitsPerOffset, acceptableOverheadRatio).bitsPerValue(); final long expectedMultiSizeInBytes = (long) numDocsWithValue * bitsPerOrd + (long) maxDoc * bitsPerOffset; final long expectedSingleSizeInBytes = (long) maxDoc * bitsPerOrd; @@ -153,6 +153,7 @@ private static class MultiDocs extends AbstractSortedSetDocValues { private long currentOffset; private long currentEndOffset; + private int count; MultiDocs(MultiOrdinals ordinals, ValuesHolder values) { this.valueCount = ordinals.valueCount; @@ -170,21 +171,19 @@ public long getValueCount() { public boolean advanceExact(int docId) { currentOffset = docId != 0 ? endOffsets.get(docId - 1) : 0; currentEndOffset = endOffsets.get(docId); + count = Math.toIntExact(currentEndOffset - currentOffset); return currentOffset != currentEndOffset; } @Override public long nextOrd() { - if (currentOffset == currentEndOffset) { - return SortedSetDocValues.NO_MORE_ORDS; - } else { - return ords.get(currentOffset++); - } + assert currentOffset != currentEndOffset; + return ords.get(currentOffset++); } @Override public int docValueCount() { - return Math.toIntExact(currentEndOffset - currentOffset); + return count; } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java index 57572dea8ac0f..d05f0e477db09 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java @@ -11,6 +11,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.apache.lucene.document.Field; import org.apache.lucene.document.LongField; import org.apache.lucene.document.LongPoint; import org.apache.lucene.document.SortedNumericDocValuesField; @@ -687,7 +688,7 @@ public Query distanceFeatureQuery(Object origin, String pivot, SearchExecutionCo long pivotLong = resolution.convert(pivotTime); // As we already apply boost in AbstractQueryBuilder::toQuery, we always passing a boost of 1.0 to distanceFeatureQuery if (isIndexed()) { - return LongPoint.newDistanceFeatureQuery(name(), 1.0f, originLong, pivotLong); + return LongField.newDistanceFeatureQuery(name(), 1.0f, originLong, pivotLong); } else { return new LongScriptFieldDistanceFeatureQuery( new Script(""), @@ -959,7 +960,7 @@ private void indexValue(DocumentParserContext context, long timestamp) { } if (indexed && hasDocValues) { - context.doc().add(new LongField(fieldType().name(), timestamp)); + context.doc().add(new LongField(fieldType().name(), timestamp, Field.Store.NO)); } else if (hasDocValues) { context.doc().add(new SortedNumericDocValuesField(fieldType().name(), timestamp)); } else if (indexed) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentLeafReader.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentLeafReader.java index 494005ce12cb1..d37f6c51d288d 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentLeafReader.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentLeafReader.java @@ -11,10 +11,11 @@ import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.ByteVectorValues; +import org.apache.lucene.index.DocValuesSkipIndexType; +import org.apache.lucene.index.DocValuesSkipper; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfos; -import org.apache.lucene.index.Fields; import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; @@ -147,11 +148,6 @@ public FieldInfos getFieldInfos() { return new FieldInfos(new FieldInfo[0]); } - @Override - public void document(int docID, StoredFieldVisitor visitor) throws IOException { - storedFields().document(docID, visitor); - } - @Override public StoredFields storedFields() throws IOException { return new StoredFields() { @@ -203,6 +199,11 @@ public NumericDocValues getNormValues(String field) throws IOException { throw new UnsupportedOperationException(); } + @Override + public DocValuesSkipper getDocValuesSkipper(String s) throws IOException { + throw new UnsupportedOperationException(); + } + @Override public FloatVectorValues getFloatVectorValues(String field) throws IOException { throw new UnsupportedOperationException(); @@ -233,11 +234,6 @@ public LeafMetaData getMetaData() { throw new UnsupportedOperationException(); } - @Override - public Fields getTermVectors(int docID) throws IOException { - throw new UnsupportedOperationException(); - } - @Override public int numDocs() { throw new UnsupportedOperationException(); @@ -284,6 +280,7 @@ private static FieldInfo fieldInfo(String name) { false, IndexOptions.NONE, DocValuesType.NONE, + DocValuesSkipIndexType.NONE, -1, Collections.emptyMap(), 0, @@ -484,9 +481,7 @@ private static SortedSetDocValues sortedSetDocValues(List values) { @Override public long nextOrd() { i++; - if (i >= values.size()) { - return NO_MORE_ORDS; - } + assert i < values.size(); return i; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IdFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/IdFieldMapper.java index b9d89462c3467..8e418f45ddb3a 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IdFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IdFieldMapper.java @@ -22,6 +22,7 @@ import java.util.Arrays; import java.util.Collection; import java.util.Collections; +import java.util.List; import java.util.Map; /** @@ -97,13 +98,13 @@ public boolean isSearchable() { @Override public Query termsQuery(Collection values, SearchExecutionContext context) { failIfNotIndexed(); - BytesRef[] bytesRefs = values.stream().map(v -> { + List bytesRefs = values.stream().map(v -> { Object idObject = v; if (idObject instanceof BytesRef) { idObject = ((BytesRef) idObject).utf8ToString(); } return Uid.encodeId(idObject.toString()); - }).toArray(BytesRef[]::new); + }).toList(); return new TermInSetQuery(name(), bytesRefs); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IpPrefixAutomatonUtil.java b/server/src/main/java/org/elasticsearch/index/mapper/IpPrefixAutomatonUtil.java index 6900dcd773917..8114167c02486 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IpPrefixAutomatonUtil.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IpPrefixAutomatonUtil.java @@ -13,7 +13,6 @@ import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CompiledAutomaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import java.util.ArrayList; @@ -76,8 +75,8 @@ static CompiledAutomaton buildIpPrefixAutomaton(String ipPrefix) { } else { result = Automata.makeAnyBinary(); } - result = MinimizationOperations.minimize(result, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); - return new CompiledAutomaton(result, null, false, 0, true); + result = Operations.determinize(result, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + return new CompiledAutomaton(result, false, false, true); } private static Automaton getIpv6Automaton(String ipPrefix) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java index 1ff9fd2f699c9..802680e7f373e 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java @@ -32,7 +32,6 @@ import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CompiledAutomaton; import org.apache.lucene.util.automaton.CompiledAutomaton.AUTOMATON_TYPE; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.lucene.Lucene; @@ -491,7 +490,7 @@ public Query termsQuery(Collection values, SearchExecutionContext context) { if (isIndexed()) { return super.termsQuery(values, context); } else { - BytesRef[] bytesRefs = values.stream().map(this::indexedValueForSearch).toArray(BytesRef[]::new); + Collection bytesRefs = values.stream().map(this::indexedValueForSearch).toList(); return SortedSetDocValuesField.newSlowSetQuery(name(), bytesRefs); } } @@ -597,7 +596,6 @@ public TermsEnum getTerms(IndexReader reader, String prefix, boolean caseInsensi ? AutomatonQueries.caseInsensitivePrefix(prefix) : Operations.concatenate(Automata.makeString(prefix), Automata.makeAnyString()); assert a.isDeterministic(); - a = MinimizationOperations.minimize(a, 0); CompiledAutomaton automaton = new CompiledAutomaton(a, true, true); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/LegacyTypeFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/LegacyTypeFieldMapper.java index f1924fd04f3fe..c6f1b490a2be2 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/LegacyTypeFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/LegacyTypeFieldMapper.java @@ -11,7 +11,6 @@ import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.search.Query; -import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.query.SearchExecutionContext; @@ -70,7 +69,7 @@ public Query termQuery(Object value, SearchExecutionContext context) { @Override public Query termsQuery(Collection values, SearchExecutionContext context) { - BytesRef[] bytesRefs = values.stream().map(this::indexedValueForSearch).toArray(BytesRef[]::new); + var bytesRefs = values.stream().map(this::indexedValueForSearch).toList(); return SortedSetDocValuesField.newSlowSetQuery(name(), bytesRefs); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java index 2e815554dc829..3608e8ab261c1 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java @@ -11,6 +11,7 @@ import org.apache.lucene.document.DoubleField; import org.apache.lucene.document.DoublePoint; +import org.apache.lucene.document.Field; import org.apache.lucene.document.FloatField; import org.apache.lucene.document.FloatPoint; import org.apache.lucene.document.IntField; @@ -589,7 +590,7 @@ public Query rangeQuery( public void addFields(LuceneDocument document, String name, Number value, boolean indexed, boolean docValued, boolean stored) { final float f = value.floatValue(); if (indexed && docValued) { - document.add(new FloatField(name, f)); + document.add(new FloatField(name, f, Field.Store.NO)); } else if (docValued) { document.add(new SortedNumericDocValuesField(name, NumericUtils.floatToSortableInt(f))); } else if (indexed) { @@ -743,7 +744,7 @@ public Query rangeQuery( public void addFields(LuceneDocument document, String name, Number value, boolean indexed, boolean docValued, boolean stored) { final double d = value.doubleValue(); if (indexed && docValued) { - document.add(new DoubleField(name, d)); + document.add(new DoubleField(name, d, Field.Store.NO)); } else if (docValued) { document.add(new SortedNumericDocValuesField(name, NumericUtils.doubleToSortableLong(d))); } else if (indexed) { @@ -1179,7 +1180,7 @@ public Query rangeQuery( public void addFields(LuceneDocument document, String name, Number value, boolean indexed, boolean docValued, boolean stored) { final int i = value.intValue(); if (indexed && docValued) { - document.add(new IntField(name, i)); + document.add(new IntField(name, i, Field.Store.NO)); } else if (docValued) { document.add(new SortedNumericDocValuesField(name, i)); } else if (indexed) { @@ -1330,7 +1331,7 @@ public Query rangeQuery( public void addFields(LuceneDocument document, String name, Number value, boolean indexed, boolean docValued, boolean stored) { final long l = value.longValue(); if (indexed && docValued) { - document.add(new LongField(name, l)); + document.add(new LongField(name, l, Field.Store.NO)); } else if (docValued) { document.add(new SortedNumericDocValuesField(name, l)); } else if (indexed) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/StringFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/StringFieldType.java index 9ea16933f7ab5..ceb96b87a0983 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/StringFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/StringFieldType.java @@ -101,9 +101,7 @@ public Query prefixQuery(String value, MultiTermQuery.RewriteMethod method, bool failIfNotIndexed(); Term prefix = new Term(name(), indexedValueForSearch(value)); if (caseInsensitive) { - return method == null - ? new CaseInsensitivePrefixQuery(prefix, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT, false) - : new CaseInsensitivePrefixQuery(prefix, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT, false, method); + return method == null ? new CaseInsensitivePrefixQuery(prefix, false) : new CaseInsensitivePrefixQuery(prefix, false, method); } return method == null ? new PrefixQuery(prefix) : new PrefixQuery(prefix, method); } @@ -170,9 +168,7 @@ protected Query wildcardQuery( term = new Term(name(), indexedValueForSearch(value)); } if (caseInsensitive) { - return method == null - ? new CaseInsensitiveWildcardQuery(term) - : new CaseInsensitiveWildcardQuery(term, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT, false, method); + return method == null ? new CaseInsensitiveWildcardQuery(term) : new CaseInsensitiveWildcardQuery(term, false, method); } return method == null ? new WildcardQuery(term) : new WildcardQuery(term, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT, method); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TermBasedFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/TermBasedFieldType.java index 674a016264c3a..e2ff9cc7ea632 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TermBasedFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TermBasedFieldType.java @@ -19,6 +19,7 @@ import org.elasticsearch.index.query.SearchExecutionContext; import java.util.Collection; +import java.util.List; import java.util.Map; /** Base {@link MappedFieldType} implementation for a field that is indexed @@ -69,7 +70,7 @@ public Query termQuery(Object value, SearchExecutionContext context) { @Override public Query termsQuery(Collection values, SearchExecutionContext context) { failIfNotIndexed(); - BytesRef[] bytesRefs = values.stream().map(this::indexedValueForSearch).toArray(BytesRef[]::new); + List bytesRefs = values.stream().map(this::indexedValueForSearch).toList(); return new TermInSetQuery(name(), bytesRefs); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java index 642539fbbc2f8..3f77edc819602 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java @@ -598,8 +598,8 @@ public Query prefixQuery( } Automaton automaton = Operations.concatenate(automata); AutomatonQuery query = method == null - ? new AutomatonQuery(new Term(name(), value + "*"), automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT, false) - : new AutomatonQuery(new Term(name(), value + "*"), automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT, false, method); + ? new AutomatonQuery(new Term(name(), value + "*"), automaton, false) + : new AutomatonQuery(new Term(name(), value + "*"), automaton, false, method); return new BooleanQuery.Builder().add(query, BooleanClause.Occur.SHOULD) .add(new TermQuery(new Term(parentField.name(), value)), BooleanClause.Occur.SHOULD) .build(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapper.java index ac1de94ea7a73..93a2157b2338a 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapper.java @@ -28,10 +28,10 @@ import org.apache.lucene.search.SortField; import org.apache.lucene.util.AttributeSource; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.IOBooleanSupplier; import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CompiledAutomaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.search.AutomatonQueries; @@ -394,7 +394,6 @@ public TermsEnum getTerms(IndexReader reader, String prefix, boolean caseInsensi a = Operations.concatenate(a, Automata.makeAnyString()); } assert a.isDeterministic(); - a = MinimizationOperations.minimize(a, 0); CompiledAutomaton automaton = new CompiledAutomaton(a); if (searchAfter != null) { @@ -483,6 +482,11 @@ public AttributeSource attributes() { throw new UnsupportedOperationException(); } + @Override + public IOBooleanSupplier prepareSeekExact(BytesRef bytesRef) throws IOException { + throw new UnsupportedOperationException(); + } + @Override public boolean seekExact(BytesRef text) throws IOException { throw new UnsupportedOperationException(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedLeafFieldData.java b/server/src/main/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedLeafFieldData.java index b94ea67c8de8d..b29f093e3a217 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedLeafFieldData.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedLeafFieldData.java @@ -205,12 +205,8 @@ public long nextOrd() throws IOException { } long ord = delegate.nextOrd(); - if (ord != NO_MORE_ORDS && ord <= maxOrd) { - assert ord >= minOrd; - return mapOrd(ord); - } else { - return NO_MORE_ORDS; - } + assert ord <= maxOrd; + return mapOrd(ord); } @Override @@ -223,9 +219,9 @@ public boolean advanceExact(int target) throws IOException { if (delegate.advanceExact(target)) { int count = 0; - while (true) { + for (int i = 0; i < delegate.docValueCount(); i++) { long ord = delegate.nextOrd(); - if (ord == NO_MORE_ORDS || ord > maxOrd) { + if (ord > maxOrd) { break; } if (ord >= minOrd) { @@ -246,7 +242,7 @@ public boolean advanceExact(int target) throws IOException { while (true) { long ord = delegate.nextOrd(); - if (ord == NO_MORE_ORDS || ord > maxOrd) { + if (ord > maxOrd) { break; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenormalizedCosineFloatVectorValues.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenormalizedCosineFloatVectorValues.java index e8da3b72ae7c7..04069333deb13 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenormalizedCosineFloatVectorValues.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenormalizedCosineFloatVectorValues.java @@ -45,24 +45,13 @@ public int size() { } @Override - public float[] vectorValue() throws IOException { - // Lazy load vectors as we may iterate but not actually require the vector - return vectorValue(in.docID()); + public DocIndexIterator iterator() { + return in.iterator(); } @Override - public int docID() { - return in.docID(); - } - - @Override - public int nextDoc() throws IOException { - return in.nextDoc(); - } - - @Override - public int advance(int target) throws IOException { - return in.advance(target); + public FloatVectorValues copy() throws IOException { + return in.copy(); } @Override @@ -74,22 +63,24 @@ public float magnitude() { return magnitude; } - private float[] vectorValue(int docId) throws IOException { + @Override + public float[] vectorValue(int ord) throws IOException { + int docId = ordToDoc(ord); if (docId != this.docId) { this.docId = docId; hasMagnitude = decodedMagnitude(docId); // We should only copy and transform if we have a stored a non-unit length magnitude if (hasMagnitude) { - System.arraycopy(in.vectorValue(), 0, vector, 0, dimension()); + System.arraycopy(in.vectorValue(ord), 0, vector, 0, dimension()); for (int i = 0; i < vector.length; i++) { vector[i] *= magnitude; } return vector; } else { - return in.vectorValue(); + return in.vectorValue(ord); } } else { - return hasMagnitude ? vector : in.vectorValue(); + return hasMagnitude ? vector : in.vectorValue(ord); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java index a023837a0efb7..809532c0e8f5a 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java @@ -23,6 +23,7 @@ import org.apache.lucene.index.ByteVectorValues; import org.apache.lucene.index.FilterLeafReader; import org.apache.lucene.index.FloatVectorValues; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.SegmentReadState; @@ -2309,6 +2310,7 @@ private class IndexedSyntheticFieldLoader extends SourceLoader.DocValuesBasedSyn private ByteVectorValues byteVectorValues; private boolean hasValue; private boolean hasMagnitude; + private int ord; private final IndexVersion indexCreatedVersion; private final VectorSimilarity vectorSimilarity; @@ -2326,16 +2328,20 @@ public DocValuesLoader docValuesLoader(LeafReader leafReader, int[] docIdsInLeaf if (indexCreatedVersion.onOrAfter(NORMALIZE_COSINE) && VectorSimilarity.COSINE.equals(vectorSimilarity)) { magnitudeReader = leafReader.getNumericDocValues(fullPath() + COSINE_MAGNITUDE_FIELD_SUFFIX); } + KnnVectorValues.DocIndexIterator iterator = values.iterator(); return docId -> { - hasValue = docId == values.advance(docId); + hasValue = docId == iterator.advance(docId); hasMagnitude = hasValue && magnitudeReader != null && magnitudeReader.advanceExact(docId); + ord = iterator.index(); return hasValue; }; } byteVectorValues = leafReader.getByteVectorValues(fullPath()); if (byteVectorValues != null) { + KnnVectorValues.DocIndexIterator iterator = byteVectorValues.iterator(); return docId -> { - hasValue = docId == byteVectorValues.advance(docId); + hasValue = docId == iterator.advance(docId); + ord = iterator.index(); return hasValue; }; } @@ -2358,7 +2364,7 @@ public void write(XContentBuilder b) throws IOException { } b.startArray(leafName()); if (values != null) { - for (float v : values.vectorValue()) { + for (float v : values.vectorValue(ord)) { if (hasMagnitude) { b.value(v * magnitude); } else { @@ -2366,7 +2372,7 @@ public void write(XContentBuilder b) throws IOException { } } } else if (byteVectorValues != null) { - byte[] vectorValue = byteVectorValues.vectorValue(); + byte[] vectorValue = byteVectorValues.vectorValue(ord); for (byte value : vectorValue) { b.value(value); } diff --git a/server/src/main/java/org/elasticsearch/index/query/CombinedFieldsQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/CombinedFieldsQueryBuilder.java index 16aada4066f71..1560004b13785 100644 --- a/server/src/main/java/org/elasticsearch/index/query/CombinedFieldsQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/CombinedFieldsQueryBuilder.java @@ -412,8 +412,8 @@ public Query createPhraseQuery(String field, String queryText, int phraseSlop) { protected Query newSynonymQuery(String field, TermAndBoost[] terms) { CombinedFieldQuery.Builder query = new CombinedFieldQuery.Builder(); for (TermAndBoost termAndBoost : terms) { - assert termAndBoost.boost == BoostAttribute.DEFAULT_BOOST; - BytesRef bytes = termAndBoost.term; + assert termAndBoost.boost() == BoostAttribute.DEFAULT_BOOST; + BytesRef bytes = termAndBoost.term(); query.addTerm(bytes); } for (FieldAndBoost fieldAndBoost : fields) { diff --git a/server/src/main/java/org/elasticsearch/index/query/IntervalBuilder.java b/server/src/main/java/org/elasticsearch/index/query/IntervalBuilder.java index f21edaeb94f22..b2b37ad834178 100644 --- a/server/src/main/java/org/elasticsearch/index/query/IntervalBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/IntervalBuilder.java @@ -20,7 +20,7 @@ import org.apache.lucene.queries.intervals.IntervalMatchesIterator; import org.apache.lucene.queries.intervals.Intervals; import org.apache.lucene.queries.intervals.IntervalsSource; -import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.graph.GraphTokenStreamFiniteStrings; @@ -189,7 +189,7 @@ protected List analyzeGraph(TokenStream source) throws IOExcept List clauses = new ArrayList<>(); int[] articulationPoints = graph.articulationPoints(); int lastState = 0; - int maxClauseCount = BooleanQuery.getMaxClauseCount(); + int maxClauseCount = IndexSearcher.getMaxClauseCount(); for (int i = 0; i <= articulationPoints.length; i++) { int start = lastState; int end = -1; @@ -204,7 +204,7 @@ protected List analyzeGraph(TokenStream source) throws IOExcept TokenStream ts = it.next(); IntervalsSource phrase = combineSources(analyzeTerms(ts), 0, true); if (paths.size() >= maxClauseCount) { - throw new BooleanQuery.TooManyClauses(); + throw new IndexSearcher.TooManyClauses(); } paths.add(phrase); } diff --git a/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java index 55642ccf0275a..626875c75a5fe 100644 --- a/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java @@ -16,8 +16,8 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopDocsCollector; -import org.apache.lucene.search.TopFieldCollector; -import org.apache.lucene.search.TopScoreDocCollector; +import org.apache.lucene.search.TopFieldCollectorManager; +import org.apache.lucene.search.TopScoreDocCollectorManager; import org.apache.lucene.search.TotalHitCountCollector; import org.apache.lucene.search.TotalHits; import org.apache.lucene.search.Weight; @@ -443,12 +443,12 @@ public TopDocsAndMaxScore topDocs(SearchHit hit) throws IOException { TopDocsCollector topDocsCollector; MaxScoreCollector maxScoreCollector = null; if (sort() != null) { - topDocsCollector = TopFieldCollector.create(sort().sort, topN, Integer.MAX_VALUE); + topDocsCollector = new TopFieldCollectorManager(sort().sort, topN, null, Integer.MAX_VALUE, false).newCollector(); if (trackScores()) { maxScoreCollector = new MaxScoreCollector(); } } else { - topDocsCollector = TopScoreDocCollector.create(topN, Integer.MAX_VALUE); + topDocsCollector = new TopScoreDocCollectorManager(topN, null, Integer.MAX_VALUE, false).newCollector(); maxScoreCollector = new MaxScoreCollector(); } intersect(weight, innerHitQueryWeight, MultiCollector.wrap(topDocsCollector, maxScoreCollector), ctx); diff --git a/server/src/main/java/org/elasticsearch/index/query/RegexpFlag.java b/server/src/main/java/org/elasticsearch/index/query/RegexpFlag.java index 6072a81691ffa..30921d22a8d82 100644 --- a/server/src/main/java/org/elasticsearch/index/query/RegexpFlag.java +++ b/server/src/main/java/org/elasticsearch/index/query/RegexpFlag.java @@ -10,9 +10,12 @@ import org.apache.lucene.util.automaton.RegExp; import org.elasticsearch.common.Strings; +import org.elasticsearch.core.UpdateForV10; import java.util.Locale; +import static org.apache.lucene.util.automaton.RegExp.DEPRECATED_COMPLEMENT; + /** * Regular expression syntax flags. Each flag represents optional syntax support in the regular expression: *

      @@ -37,8 +40,11 @@ public enum RegexpFlag { /** * Enables complement expression of the form: {@code ~<expression>} + * We use the deprecated support in Lucene 10. Will be removed in Lucene 11 + * https://github.com/elastic/elasticsearch/issues/113465 */ - COMPLEMENT(RegExp.COMPLEMENT), + @UpdateForV10(owner = UpdateForV10.Owner.SEARCH_FOUNDATIONS) + COMPLEMENT(DEPRECATED_COMPLEMENT), /** * Enables empty language expression: {@code #} @@ -63,7 +69,7 @@ public enum RegexpFlag { /** * Enables all available option flags */ - ALL(RegExp.ALL); + ALL(RegExp.ALL | RegExp.DEPRECATED_COMPLEMENT); final int value; diff --git a/server/src/main/java/org/elasticsearch/index/query/RegexpQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/RegexpQueryBuilder.java index dc439fab58ffc..461dc66322434 100644 --- a/server/src/main/java/org/elasticsearch/index/query/RegexpQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/RegexpQueryBuilder.java @@ -280,7 +280,9 @@ protected Query doToQuery(SearchExecutionContext context) throws QueryShardExcep int matchFlagsValue = caseInsensitive ? RegExp.ASCII_CASE_INSENSITIVE : 0; Query query = null; // For BWC we mask irrelevant bits (RegExp changed ALL from 0xffff to 0xff) - int sanitisedSyntaxFlag = syntaxFlagsValue & RegExp.ALL; + // We need to preserve the DEPRECATED_COMPLEMENT for now though + int deprecatedComplementFlag = syntaxFlagsValue & RegExp.DEPRECATED_COMPLEMENT; + int sanitisedSyntaxFlag = syntaxFlagsValue & (RegExp.ALL | deprecatedComplementFlag); MappedFieldType fieldType = context.getFieldType(fieldName); if (fieldType != null) { diff --git a/server/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java index c96771978bd42..8d3fd1d92e1e7 100644 --- a/server/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java @@ -18,6 +18,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; import org.elasticsearch.ElasticsearchException; @@ -184,7 +185,7 @@ public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float bo return new ConstantScoreWeight(this, boost) { @Override - public Scorer scorer(LeafReaderContext context) throws IOException { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { DocIdSetIterator approximation = DocIdSetIterator.all(context.reader().maxDoc()); final FilterScript leafScript = filterScript.newInstance(new DocValuesDocReader(lookup, context)); TwoPhaseIterator twoPhase = new TwoPhaseIterator(approximation) { @@ -201,7 +202,8 @@ public float matchCost() { return 1000f; } }; - return new ConstantScoreScorer(this, score(), scoreMode, twoPhase); + Scorer scorer = new ConstantScoreScorer(score(), scoreMode, twoPhase); + return new DefaultScorerSupplier(scorer); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/query/TermsSetQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/TermsSetQueryBuilder.java index 81afdf0ebe5e0..a6116ccf2c495 100644 --- a/server/src/main/java/org/elasticsearch/index/query/TermsSetQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/TermsSetQueryBuilder.java @@ -12,7 +12,6 @@ import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.index.Term; import org.apache.lucene.sandbox.search.CoveringQuery; -import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.DoubleValues; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.LongValues; @@ -273,8 +272,8 @@ protected Query doToQuery(SearchExecutionContext context) { return Queries.newMatchNoDocsQuery("No terms supplied for \"" + getName() + "\" query."); } // Fail before we attempt to create the term queries: - if (values.size() > BooleanQuery.getMaxClauseCount()) { - throw new BooleanQuery.TooManyClauses(); + if (values.size() > IndexSearcher.getMaxClauseCount()) { + throw new IndexSearcher.TooManyClauses(); } List queries = createTermQueries(context); diff --git a/server/src/main/java/org/elasticsearch/index/reindex/ClientScrollableHitSource.java b/server/src/main/java/org/elasticsearch/index/reindex/ClientScrollableHitSource.java index 528f0bd6dae08..1327721a88427 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/ClientScrollableHitSource.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/ClientScrollableHitSource.java @@ -149,7 +149,7 @@ private static Response wrapSearchResponse(SearchResponse response) { } hits = unmodifiableList(hits); } - long total = response.getHits().getTotalHits().value; + long total = response.getHits().getTotalHits().value(); return new Response(response.isTimedOut(), failures, total, hits, response.getScrollId()); } diff --git a/server/src/main/java/org/elasticsearch/index/search/MatchQueryParser.java b/server/src/main/java/org/elasticsearch/index/search/MatchQueryParser.java index 505c20f642093..5f135c674ba1a 100644 --- a/server/src/main/java/org/elasticsearch/index/search/MatchQueryParser.java +++ b/server/src/main/java/org/elasticsearch/index/search/MatchQueryParser.java @@ -26,6 +26,7 @@ import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostAttribute; import org.apache.lucene.search.FuzzyQuery; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.QueryBuilder; @@ -690,7 +691,7 @@ private Query analyzeGraphPhrase(TokenStream source, String field, Type type, in List clauses = new ArrayList<>(); int[] articulationPoints = graph.articulationPoints(); int lastState = 0; - int maxClauseCount = BooleanQuery.getMaxClauseCount(); + int maxClauseCount = IndexSearcher.getMaxClauseCount(); for (int i = 0; i <= articulationPoints.length; i++) { int start = lastState; int end = -1; @@ -708,7 +709,7 @@ private Query analyzeGraphPhrase(TokenStream source, String field, Type type, in SpanQuery q = createSpanQuery(ts, field, usePrefix); if (q != null) { if (queries.size() >= maxClauseCount) { - throw new BooleanQuery.TooManyClauses(); + throw new IndexSearcher.TooManyClauses(); } queries.add(q); } @@ -722,14 +723,14 @@ private Query analyzeGraphPhrase(TokenStream source, String field, Type type, in Term[] terms = graph.getTerms(field, start); assert terms.length > 0; if (terms.length >= maxClauseCount) { - throw new BooleanQuery.TooManyClauses(); + throw new IndexSearcher.TooManyClauses(); } queryPos = newSpanQuery(terms, usePrefix); } if (queryPos != null) { if (clauses.size() >= maxClauseCount) { - throw new BooleanQuery.TooManyClauses(); + throw new IndexSearcher.TooManyClauses(); } clauses.add(queryPos); } diff --git a/server/src/main/java/org/elasticsearch/index/search/MultiMatchQueryParser.java b/server/src/main/java/org/elasticsearch/index/search/MultiMatchQueryParser.java index 52122ed86ef69..446d78078e642 100644 --- a/server/src/main/java/org/elasticsearch/index/search/MultiMatchQueryParser.java +++ b/server/src/main/java/org/elasticsearch/index/search/MultiMatchQueryParser.java @@ -200,7 +200,7 @@ protected Query createBooleanPrefixQuery(String field, String queryText, Boolean protected Query newSynonymQuery(String field, TermAndBoost[] terms) { BytesRef[] values = new BytesRef[terms.length]; for (int i = 0; i < terms.length; i++) { - values[i] = terms[i].term; + values[i] = terms[i].term(); } return blendTerms(context, values, tieBreaker, lenient, blendedFields); } diff --git a/server/src/main/java/org/elasticsearch/index/search/NestedHelper.java b/server/src/main/java/org/elasticsearch/index/search/NestedHelper.java index db0077284bbd3..96e8ac35c8e32 100644 --- a/server/src/main/java/org/elasticsearch/index/search/NestedHelper.java +++ b/server/src/main/java/org/elasticsearch/index/search/NestedHelper.java @@ -24,8 +24,6 @@ import org.elasticsearch.index.mapper.NestedLookup; import org.elasticsearch.index.mapper.NestedObjectMapper; -import java.io.IOException; -import java.io.UncheckedIOException; import java.util.function.Predicate; /** Utility class to filter parent and children clauses when building nested @@ -55,15 +53,10 @@ public boolean mightMatchNestedDocs(Query query) { // cover a high majority of use-cases return mightMatchNestedDocs(((TermQuery) query).getTerm().field()); } else if (query instanceof TermInSetQuery tis) { - try { - if (tis.getTermsCount() > 0) { - return mightMatchNestedDocs(tis.getField()); - } else { - return false; - } - } catch (IOException e) { - // this handling isn't needed any more once we move to Lucene 10 - throw new UncheckedIOException("We are not doing IO here, this should never happen.", e); + if (tis.getTermsCount() > 0) { + return mightMatchNestedDocs(tis.getField()); + } else { + return false; } } else if (query instanceof PointRangeQuery) { return mightMatchNestedDocs(((PointRangeQuery) query).getField()); @@ -75,13 +68,13 @@ public boolean mightMatchNestedDocs(Query query) { return bq.clauses() .stream() .filter(BooleanClause::isRequired) - .map(BooleanClause::getQuery) + .map(BooleanClause::query) .allMatch(this::mightMatchNestedDocs); } else { return bq.clauses() .stream() - .filter(c -> c.getOccur() == Occur.SHOULD) - .map(BooleanClause::getQuery) + .filter(c -> c.occur() == Occur.SHOULD) + .map(BooleanClause::query) .anyMatch(this::mightMatchNestedDocs); } } else if (query instanceof ESToParentBlockJoinQuery) { @@ -122,15 +115,10 @@ public boolean mightMatchNonNestedDocs(Query query, String nestedPath) { } else if (query instanceof TermQuery) { return mightMatchNonNestedDocs(((TermQuery) query).getTerm().field(), nestedPath); } else if (query instanceof TermInSetQuery tis) { - try { - if (tis.getTermsCount() > 0) { - return mightMatchNonNestedDocs(tis.getField(), nestedPath); - } else { - return false; - } - } catch (IOException e) { - // this handling isn't needed any more once we move to Lucene 10 - throw new UncheckedIOException("We are not doing IO here, this should never happen.", e); + if (tis.getTermsCount() > 0) { + return mightMatchNonNestedDocs(tis.getField(), nestedPath); + } else { + return false; } } else if (query instanceof PointRangeQuery) { return mightMatchNonNestedDocs(((PointRangeQuery) query).getField(), nestedPath); @@ -142,13 +130,13 @@ public boolean mightMatchNonNestedDocs(Query query, String nestedPath) { return bq.clauses() .stream() .filter(BooleanClause::isRequired) - .map(BooleanClause::getQuery) + .map(BooleanClause::query) .allMatch(q -> mightMatchNonNestedDocs(q, nestedPath)); } else { return bq.clauses() .stream() - .filter(c -> c.getOccur() == Occur.SHOULD) - .map(BooleanClause::getQuery) + .filter(c -> c.occur() == Occur.SHOULD) + .map(BooleanClause::query) .anyMatch(q -> mightMatchNonNestedDocs(q, nestedPath)); } } else { @@ -183,5 +171,4 @@ boolean mightMatchNonNestedDocs(String field, String nestedPath) { } return true; } - } diff --git a/server/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java b/server/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java index 76dba60689422..d237a03335337 100644 --- a/server/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java +++ b/server/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java @@ -760,7 +760,14 @@ private Query getRegexpQuerySingle(String field, String termStr) throws ParseExc setAnalyzer(forceAnalyzer); return super.getRegexpQuery(field, termStr); } - return currentFieldType.regexpQuery(termStr, RegExp.ALL, 0, getDeterminizeWorkLimit(), getMultiTermRewriteMethod(), context); + return currentFieldType.regexpQuery( + termStr, + RegExp.ALL | RegExp.DEPRECATED_COMPLEMENT, + 0, + getDeterminizeWorkLimit(), + getMultiTermRewriteMethod(), + context + ); } catch (RuntimeException e) { if (lenient) { return newLenientFieldQuery(field, e); diff --git a/server/src/main/java/org/elasticsearch/index/shard/RemoveCorruptedLuceneSegmentsAction.java b/server/src/main/java/org/elasticsearch/index/shard/RemoveCorruptedLuceneSegmentsAction.java index 562bf1e75dc1f..97d1b3342ca2b 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/RemoveCorruptedLuceneSegmentsAction.java +++ b/server/src/main/java/org/elasticsearch/index/shard/RemoveCorruptedLuceneSegmentsAction.java @@ -33,7 +33,7 @@ public static Tuple getClea final CheckIndex.Status status; try (CheckIndex checker = new CheckIndex(indexDirectory, writeLock)) { - checker.setChecksumsOnly(true); + checker.setLevel(CheckIndex.Level.MIN_LEVEL_FOR_CHECKSUM_CHECKS); checker.setInfoStream(printStream, verbose); status = checker.checkIndex(null); @@ -64,7 +64,7 @@ public static void execute(Terminal terminal, Directory indexDirectory, Lock wri final CheckIndex.Status status; try (CheckIndex checker = new CheckIndex(indexDirectory, writeLock)) { - checker.setChecksumsOnly(true); + checker.setLevel(CheckIndex.Level.MIN_LEVEL_FOR_CHECKSUM_CHECKS); checker.setInfoStream(printStream, verbose); status = checker.checkIndex(null); diff --git a/server/src/main/java/org/elasticsearch/index/shard/ShardSplittingQuery.java b/server/src/main/java/org/elasticsearch/index/shard/ShardSplittingQuery.java index f1291ac6faa51..94a29258f3202 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/ShardSplittingQuery.java +++ b/server/src/main/java/org/elasticsearch/index/shard/ShardSplittingQuery.java @@ -24,6 +24,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; import org.apache.lucene.search.join.BitSetProducer; @@ -73,7 +74,7 @@ public String toString() { } @Override - public Scorer scorer(LeafReaderContext context) throws IOException { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { LeafReader leafReader = context.reader(); FixedBitSet bitSet = new FixedBitSet(leafReader.maxDoc()); Terms terms = leafReader.terms(RoutingFieldMapper.NAME); @@ -82,87 +83,102 @@ public Scorer scorer(LeafReaderContext context) throws IOException { int targetShardId = indexRouting.getShard(Uid.decodeId(ref.bytes, ref.offset, ref.length), null); return shardId == targetShardId; }; - if (terms == null) { - // this is the common case - no partitioning and no _routing values - // in this case we also don't do anything special with regards to nested docs since we basically delete - // by ID and parent and nested all have the same id. - assert indexMetadata.isRoutingPartitionedIndex() == false; - findSplitDocs(IdFieldMapper.NAME, includeInShard, leafReader, bitSet::set); - } else { - final BitSet parentBitSet; - if (nestedParentBitSetProducer == null) { - parentBitSet = null; - } else { - parentBitSet = nestedParentBitSetProducer.getBitSet(context); - if (parentBitSet == null) { - return null; // no matches - } - } - if (indexMetadata.isRoutingPartitionedIndex()) { - // this is the heaviest invariant. Here we have to visit all docs stored fields do extract _id and _routing - // this index is routing partitioned. - Visitor visitor = new Visitor(leafReader); - TwoPhaseIterator twoPhaseIterator = parentBitSet == null - ? new RoutingPartitionedDocIdSetIterator(visitor) - : new NestedRoutingPartitionedDocIdSetIterator(visitor, parentBitSet); - return new ConstantScoreScorer(this, score(), scoreMode, twoPhaseIterator); - } else { - // here we potentially guard the docID consumers with our parent bitset if we have one. - // this ensures that we are only marking root documents in the nested case and if necessary - // we do a second pass to mark the corresponding children in markChildDocs - Function maybeWrapConsumer = consumer -> { - if (parentBitSet != null) { - return docId -> { - if (parentBitSet.get(docId)) { - consumer.accept(docId); + + return new ScorerSupplier() { + @Override + public Scorer get(long leadCost) throws IOException { + if (terms == null) { + // this is the common case - no partitioning and no _routing values + // in this case we also don't do anything special with regards to nested docs since we basically delete + // by ID and parent and nested all have the same id. + assert indexMetadata.isRoutingPartitionedIndex() == false; + findSplitDocs(IdFieldMapper.NAME, includeInShard, leafReader, bitSet::set); + } else { + final BitSet parentBitSet; + if (nestedParentBitSetProducer == null) { + parentBitSet = null; + } else { + parentBitSet = nestedParentBitSetProducer.getBitSet(context); + if (parentBitSet == null) { + return null; // no matches + } + } + if (indexMetadata.isRoutingPartitionedIndex()) { + // this is the heaviest invariant. Here we have to visit all docs stored fields do extract _id and _routing + // this index is routing partitioned. + Visitor visitor = new Visitor(leafReader); + TwoPhaseIterator twoPhaseIterator = parentBitSet == null + ? new RoutingPartitionedDocIdSetIterator(visitor) + : new NestedRoutingPartitionedDocIdSetIterator(visitor, parentBitSet); + return new ConstantScoreScorer(score(), scoreMode, twoPhaseIterator); + } else { + // here we potentially guard the docID consumers with our parent bitset if we have one. + // this ensures that we are only marking root documents in the nested case and if necessary + // we do a second pass to mark the corresponding children in markChildDocs + Function maybeWrapConsumer = consumer -> { + if (parentBitSet != null) { + return docId -> { + if (parentBitSet.get(docId)) { + consumer.accept(docId); + } + }; } + return consumer; }; - } - return consumer; - }; - // in the _routing case we first go and find all docs that have a routing value and mark the ones we have to delete - findSplitDocs(RoutingFieldMapper.NAME, ref -> { - int targetShardId = indexRouting.getShard(null, ref.utf8ToString()); - return shardId == targetShardId; - }, leafReader, maybeWrapConsumer.apply(bitSet::set)); - - // TODO have the IndexRouting build the query and pass routingRequired in - boolean routingRequired = indexMetadata.mapping() == null ? false : indexMetadata.mapping().routingRequired(); - // now if we have a mixed index where some docs have a _routing value and some don't we have to exclude the ones - // with a routing value from the next iteration and delete / select based on the ID. - if (routingRequired == false && terms.getDocCount() != leafReader.maxDoc()) { - /* - * This is a special case where some docs don't have routing values. - * It's annoying, but it's allowed to build an index where some documents - * hve routing and others don't. - * - * Luckily, if the routing field is required in the mapping then we can - * safely assume that all documents which are don't have a routing are - * nested documents. And we pick those up later based on the assignment - * of the document that contains them. - */ - FixedBitSet hasRoutingValue = new FixedBitSet(leafReader.maxDoc()); - findSplitDocs( - RoutingFieldMapper.NAME, - Predicates.never(), - leafReader, - maybeWrapConsumer.apply(hasRoutingValue::set) - ); - IntConsumer bitSetConsumer = maybeWrapConsumer.apply(bitSet::set); - findSplitDocs(IdFieldMapper.NAME, includeInShard, leafReader, docId -> { - if (hasRoutingValue.get(docId) == false) { - bitSetConsumer.accept(docId); + // in the _routing case we first go and find all docs that have a routing value and mark the ones we have to + // delete + findSplitDocs(RoutingFieldMapper.NAME, ref -> { + int targetShardId = indexRouting.getShard(null, ref.utf8ToString()); + return shardId == targetShardId; + }, leafReader, maybeWrapConsumer.apply(bitSet::set)); + + // TODO have the IndexRouting build the query and pass routingRequired in + boolean routingRequired = indexMetadata.mapping() == null + ? false + : indexMetadata.mapping().routingRequired(); + // now if we have a mixed index where some docs have a _routing value and some don't we have to exclude the + // ones + // with a routing value from the next iteration and delete / select based on the ID. + if (routingRequired == false && terms.getDocCount() != leafReader.maxDoc()) { + /* + * This is a special case where some docs don't have routing values. + * It's annoying, but it's allowed to build an index where some documents + * hve routing and others don't. + * + * Luckily, if the routing field is required in the mapping then we can + * safely assume that all documents which are don't have a routing are + * nested documents. And we pick those up later based on the assignment + * of the document that contains them. + */ + FixedBitSet hasRoutingValue = new FixedBitSet(leafReader.maxDoc()); + findSplitDocs( + RoutingFieldMapper.NAME, + Predicates.never(), + leafReader, + maybeWrapConsumer.apply(hasRoutingValue::set) + ); + IntConsumer bitSetConsumer = maybeWrapConsumer.apply(bitSet::set); + findSplitDocs(IdFieldMapper.NAME, includeInShard, leafReader, docId -> { + if (hasRoutingValue.get(docId) == false) { + bitSetConsumer.accept(docId); + } + }); } - }); + } + if (parentBitSet != null) { + // if nested docs are involved we also need to mark all child docs that belong to a matching parent doc. + markChildDocs(parentBitSet, bitSet); + } } + + return new ConstantScoreScorer(score(), scoreMode, new BitSetIterator(bitSet, bitSet.length())); } - if (parentBitSet != null) { - // if nested docs are involved we also need to mark all child docs that belong to a matching parent doc. - markChildDocs(parentBitSet, bitSet); - } - } - return new ConstantScoreScorer(this, score(), scoreMode, new BitSetIterator(bitSet, bitSet.length())); + @Override + public long cost() { + return leafReader.maxDoc(); + } + }; } @Override diff --git a/server/src/main/java/org/elasticsearch/index/store/FsDirectoryFactory.java b/server/src/main/java/org/elasticsearch/index/store/FsDirectoryFactory.java index 3dc5953e3d3d8..bc94db13074db 100644 --- a/server/src/main/java/org/elasticsearch/index/store/FsDirectoryFactory.java +++ b/server/src/main/java/org/elasticsearch/index/store/FsDirectoryFactory.java @@ -33,6 +33,7 @@ import java.nio.file.Path; import java.util.HashSet; import java.util.Set; +import java.util.function.BiPredicate; public class FsDirectoryFactory implements IndexStorePlugin.DirectoryFactory { @@ -67,12 +68,12 @@ protected Directory newFSDirectory(Path location, LockFactory lockFactory, Index // Use Lucene defaults final FSDirectory primaryDirectory = FSDirectory.open(location, lockFactory); if (primaryDirectory instanceof MMapDirectory mMapDirectory) { - return new HybridDirectory(lockFactory, setPreload(mMapDirectory, lockFactory, preLoadExtensions)); + return new HybridDirectory(lockFactory, setPreload(mMapDirectory, preLoadExtensions)); } else { return primaryDirectory; } case MMAPFS: - return setPreload(new MMapDirectory(location, lockFactory), lockFactory, preLoadExtensions); + return setPreload(new MMapDirectory(location, lockFactory), preLoadExtensions); case SIMPLEFS: case NIOFS: return new NIOFSDirectory(location, lockFactory); @@ -81,17 +82,23 @@ protected Directory newFSDirectory(Path location, LockFactory lockFactory, Index } } - public static MMapDirectory setPreload(MMapDirectory mMapDirectory, LockFactory lockFactory, Set preLoadExtensions) - throws IOException { - assert mMapDirectory.getPreload() == false; + /** Sets the preload, if any, on the given directory based on the extensions. Returns the same directory instance. */ + // visibility and extensibility for testing + public MMapDirectory setPreload(MMapDirectory mMapDirectory, Set preLoadExtensions) { + mMapDirectory.setPreload(getPreloadFunc(preLoadExtensions)); + return mMapDirectory; + } + + /** Gets a preload function based on the given preLoadExtensions. */ + static BiPredicate getPreloadFunc(Set preLoadExtensions) { if (preLoadExtensions.isEmpty() == false) { if (preLoadExtensions.contains("*")) { - mMapDirectory.setPreload(true); + return MMapDirectory.ALL_FILES; } else { - return new PreLoadMMapDirectory(mMapDirectory, lockFactory, preLoadExtensions); + return (name, context) -> preLoadExtensions.contains(FileSwitchDirectory.getExtension(name)); } } - return mMapDirectory; + return MMapDirectory.NO_FILES; } /** @@ -116,6 +123,8 @@ public IndexInput openInput(String name, IOContext context) throws IOException { // we need to do these checks on the outer directory since the inner doesn't know about pending deletes ensureOpen(); ensureCanRead(name); + // we switch the context here since mmap checks for the READONCE context by identity + context = context == Store.READONCE_CHECKSUM ? IOContext.READONCE : context; // we only use the mmap to open inputs. Everything else is managed by the NIOFSDirectory otherwise // we might run into trouble with files that are pendingDelete in one directory but still // listed in listAll() from the other. We on the other hand don't want to list files from both dirs @@ -162,50 +171,4 @@ MMapDirectory getDelegate() { return delegate; } } - - // TODO it would be nice to share code between PreLoadMMapDirectory and HybridDirectory but due to the nesting aspect of - // directories here makes it tricky. It would be nice to allow MMAPDirectory to pre-load on a per IndexInput basis. - static final class PreLoadMMapDirectory extends MMapDirectory { - private final MMapDirectory delegate; - private final Set preloadExtensions; - - PreLoadMMapDirectory(MMapDirectory delegate, LockFactory lockFactory, Set preload) throws IOException { - super(delegate.getDirectory(), lockFactory); - super.setPreload(false); - this.delegate = delegate; - this.delegate.setPreload(true); - this.preloadExtensions = preload; - assert getPreload() == false; - } - - @Override - public void setPreload(boolean preload) { - throw new IllegalArgumentException("can't set preload on a preload-wrapper"); - } - - @Override - public IndexInput openInput(String name, IOContext context) throws IOException { - if (useDelegate(name)) { - // we need to do these checks on the outer directory since the inner doesn't know about pending deletes - ensureOpen(); - ensureCanRead(name); - return delegate.openInput(name, context); - } - return super.openInput(name, context); - } - - @Override - public synchronized void close() throws IOException { - IOUtils.close(super::close, delegate); - } - - boolean useDelegate(String name) { - final String extension = FileSwitchDirectory.getExtension(name); - return preloadExtensions.contains(extension); - } - - MMapDirectory getDelegate() { - return delegate; - } - } } diff --git a/server/src/main/java/org/elasticsearch/index/store/Store.java b/server/src/main/java/org/elasticsearch/index/store/Store.java index a1038356735f0..c3d21b23d6a49 100644 --- a/server/src/main/java/org/elasticsearch/index/store/Store.java +++ b/server/src/main/java/org/elasticsearch/index/store/Store.java @@ -33,6 +33,7 @@ import org.apache.lucene.store.IndexOutput; import org.apache.lucene.store.Lock; import org.apache.lucene.store.NIOFSDirectory; +import org.apache.lucene.store.ReadAdvice; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.Version; @@ -147,7 +148,15 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref * Specific {@link IOContext} indicating that we will read only the Lucene file footer (containing the file checksum) * See {@link MetadataSnapshot#checksumFromLuceneFile}. */ - public static final IOContext READONCE_CHECKSUM = new IOContext(IOContext.READONCE, true); + public static final IOContext READONCE_CHECKSUM = createReadOnceContext(); + + // while equivalent, these different read once contexts are checked by identity in directory implementations + private static IOContext createReadOnceContext() { + var context = IOContext.READONCE.withReadAdvice(ReadAdvice.SEQUENTIAL); + assert context != IOContext.READONCE; + assert context.equals(IOContext.READONCE); + return context; + } private final AtomicBoolean isClosed = new AtomicBoolean(false); private final StoreDirectory directory; @@ -632,7 +641,7 @@ private static void failIfCorrupted(Directory directory) throws IOException { List ex = new ArrayList<>(); for (String file : files) { if (file.startsWith(CORRUPTED_MARKER_NAME_PREFIX)) { - try (ChecksumIndexInput input = directory.openChecksumInput(file, IOContext.READONCE)) { + try (ChecksumIndexInput input = directory.openChecksumInput(file)) { CodecUtil.checkHeader(input, CODEC, CORRUPTED_MARKER_CODEC_VERSION, CORRUPTED_MARKER_CODEC_VERSION); final int size = input.readVInt(); final byte[] buffer = new byte[size]; @@ -919,7 +928,10 @@ private static void checksumFromLuceneFile( boolean readFileAsHash, BytesRef writerUuid ) throws IOException { - try (IndexInput in = directory.openInput(file, READONCE_CHECKSUM)) { + // We select the read once context carefully here since these constants, while equivalent are + // checked by identity in the different directory implementations. + var context = file.startsWith(IndexFileNames.SEGMENTS) ? IOContext.READONCE : READONCE_CHECKSUM; + try (IndexInput in = directory.openInput(file, context)) { final long length = in.length(); if (length < CodecUtil.footerLength()) { // If the file isn't long enough to contain the footer then verifying it triggers an IAE, but really it's corrupted diff --git a/server/src/main/java/org/elasticsearch/index/store/StoreFileMetadata.java b/server/src/main/java/org/elasticsearch/index/store/StoreFileMetadata.java index 2be9d0f224e24..501c2496aacb6 100644 --- a/server/src/main/java/org/elasticsearch/index/store/StoreFileMetadata.java +++ b/server/src/main/java/org/elasticsearch/index/store/StoreFileMetadata.java @@ -195,7 +195,7 @@ public BytesRef hash() { * * This ID may be {@link StoreFileMetadata#UNAVAILABLE_WRITER_UUID} (i.e. zero-length) if unavailable, e.g.: * - * - The file was written by a version of Lucene prior to {@link org.apache.lucene.util.Version#LUCENE_8_6_0}. + * - The file was written by a version of Lucene prior to 8.6.0. * - The metadata came from a version of Elasticsearch prior to {@link StoreFileMetadata#WRITER_UUID_MIN_VERSION}). * - The file is not one of the files listed above. * diff --git a/server/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java b/server/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java index 763abb41797b5..db84be817bbd7 100644 --- a/server/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java +++ b/server/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java @@ -96,7 +96,7 @@ static TermVectorsResponse getTermVectors(IndexShard indexShard, TermVectorsRequ /* or from an existing document */ else if (docIdAndVersion != null) { // fields with stored term vectors - termVectorsByField = docIdAndVersion.reader.getTermVectors(docIdAndVersion.docId); + termVectorsByField = docIdAndVersion.reader.termVectors().get(docIdAndVersion.docId); Set selectedFields = request.selectedFields(); // generate tvs for fields where analyzer is overridden if (selectedFields == null && request.perFieldAnalyzer() != null) { @@ -301,7 +301,7 @@ private static Fields generateTermVectors( } } /* and read vectors from it */ - return index.createSearcher().getIndexReader().getTermVectors(0); + return index.createSearcher().getIndexReader().termVectors().get(0); } private static Fields generateTermVectorsFromDoc(IndexShard indexShard, TermVectorsRequest request) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/indices/AssociatedIndexDescriptor.java b/server/src/main/java/org/elasticsearch/indices/AssociatedIndexDescriptor.java index 29161814e7724..525da1670f900 100644 --- a/server/src/main/java/org/elasticsearch/indices/AssociatedIndexDescriptor.java +++ b/server/src/main/java/org/elasticsearch/indices/AssociatedIndexDescriptor.java @@ -94,7 +94,7 @@ static Automaton buildAutomaton(String pattern) { String output = pattern; output = output.replace(".", "\\."); output = output.replace("*", ".*"); - return new RegExp(output).toAutomaton(); + return new RegExp(output, RegExp.ALL | RegExp.ALL).toAutomaton(); } /** diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesQueryCache.java b/server/src/main/java/org/elasticsearch/indices/IndicesQueryCache.java index abba6ec6ae684..9bca59e9e4d62 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesQueryCache.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesQueryCache.java @@ -12,13 +12,11 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.search.BulkScorer; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.LRUQueryCache; import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryCache; import org.apache.lucene.search.QueryCachingPolicy; -import org.apache.lucene.search.Scorer; import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; import org.elasticsearch.common.lucene.ShardCoreKeyMap; @@ -173,24 +171,12 @@ public int count(LeafReaderContext context) throws IOException { return in.count(context); } - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - shardKeyMap.add(context.reader()); - return in.scorer(context); - } - @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { shardKeyMap.add(context.reader()); return in.scorerSupplier(context); } - @Override - public BulkScorer bulkScorer(LeafReaderContext context) throws IOException { - shardKeyMap.add(context.reader()); - return in.bulkScorer(context); - } - @Override public boolean isCacheable(LeafReaderContext ctx) { return in.isCacheable(ctx); diff --git a/server/src/main/java/org/elasticsearch/indices/SystemIndexDescriptor.java b/server/src/main/java/org/elasticsearch/indices/SystemIndexDescriptor.java index 08148de0591cb..f3456870114f5 100644 --- a/server/src/main/java/org/elasticsearch/indices/SystemIndexDescriptor.java +++ b/server/src/main/java/org/elasticsearch/indices/SystemIndexDescriptor.java @@ -41,6 +41,8 @@ import java.util.Objects; import java.util.Set; +import static org.apache.lucene.util.automaton.Operations.DEFAULT_DETERMINIZE_WORK_LIMIT; + /** * Uses a pattern string to define a protected space for indices belonging to a system feature, and, if needed, provides metadata for * managing indices that match the pattern. @@ -360,7 +362,7 @@ protected SystemIndexDescriptor( this.primaryIndex = primaryIndex; this.aliasName = aliasName; - final Automaton automaton = buildAutomaton(indexPattern, aliasName); + final Automaton automaton = Operations.determinize(buildAutomaton(indexPattern, aliasName), DEFAULT_DETERMINIZE_WORK_LIMIT); this.indexPatternAutomaton = new CharacterRunAutomaton(automaton); if (primaryIndex != null && indexPatternAutomaton.run(primaryIndex) == false) { throw new IllegalArgumentException("primary index does not match the index pattern!"); @@ -883,15 +885,15 @@ static Automaton buildAutomaton(String pattern, String alias) { final String patternAsRegex = patternToRegex(pattern); final String aliasAsRegex = alias == null ? null : patternToRegex(alias); - final Automaton patternAutomaton = new RegExp(patternAsRegex).toAutomaton(); + final Automaton patternAutomaton = new RegExp(patternAsRegex, RegExp.ALL | RegExp.DEPRECATED_COMPLEMENT).toAutomaton(); if (aliasAsRegex == null) { return patternAutomaton; } - final Automaton aliasAutomaton = new RegExp(aliasAsRegex).toAutomaton(); + final Automaton aliasAutomaton = new RegExp(aliasAsRegex, RegExp.ALL | RegExp.DEPRECATED_COMPLEMENT).toAutomaton(); - return Operations.union(patternAutomaton, aliasAutomaton); + return Operations.determinize(Operations.union(patternAutomaton, aliasAutomaton), DEFAULT_DETERMINIZE_WORK_LIMIT); } /** diff --git a/server/src/main/java/org/elasticsearch/indices/SystemIndices.java b/server/src/main/java/org/elasticsearch/indices/SystemIndices.java index a0a4388a4d54a..856b30d1c19e8 100644 --- a/server/src/main/java/org/elasticsearch/indices/SystemIndices.java +++ b/server/src/main/java/org/elasticsearch/indices/SystemIndices.java @@ -14,7 +14,6 @@ import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CharacterRunAutomaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.snapshots.features.ResetFeatureStateResponse.ResetFeatureStateStatus; @@ -178,7 +177,7 @@ public SystemIndices(List pluginAndModuleFeatures) { this.netNewSystemIndexAutomaton = buildNetNewIndexCharacterRunAutomaton(featureDescriptors); this.productToSystemIndicesMatcher = getProductToSystemIndicesMap(featureDescriptors); this.executorSelector = new ExecutorSelector(this); - this.systemNameAutomaton = MinimizationOperations.minimize( + this.systemNameAutomaton = Operations.determinize( Operations.union(List.of(systemIndexAutomata, systemDataStreamIndicesAutomata, buildDataStreamAutomaton(featureDescriptors))), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT ); @@ -264,9 +263,7 @@ private static Map getProductToSystemIndicesMap(M .collect( Collectors.toUnmodifiableMap( Entry::getKey, - entry -> new CharacterRunAutomaton( - MinimizationOperations.minimize(entry.getValue(), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT) - ) + entry -> new CharacterRunAutomaton(Operations.determinize(entry.getValue(), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT)) ) ); } @@ -426,7 +423,7 @@ private static Automaton buildIndexAutomaton(Map featureDescrip .stream() .map(SystemIndices::featureToIndexAutomaton) .reduce(Operations::union); - return MinimizationOperations.minimize(automaton.orElse(EMPTY), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + return Operations.determinize(automaton.orElse(EMPTY), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } private static CharacterRunAutomaton buildNetNewIndexCharacterRunAutomaton(Map featureDescriptors) { @@ -437,9 +434,7 @@ private static CharacterRunAutomaton buildNetNewIndexCharacterRunAutomaton(Map SystemIndexDescriptor.buildAutomaton(descriptor.getIndexPattern(), descriptor.getAliasName())) .reduce(Operations::union); - return new CharacterRunAutomaton( - MinimizationOperations.minimize(automaton.orElse(EMPTY), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT) - ); + return new CharacterRunAutomaton(Operations.determinize(automaton.orElse(EMPTY), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT)); } private static Automaton featureToIndexAutomaton(Feature feature) { @@ -459,7 +454,7 @@ private static Automaton buildDataStreamAutomaton(Map featureDe .map(dsName -> SystemIndexDescriptor.buildAutomaton(dsName, null)) .reduce(Operations::union); - return automaton.isPresent() ? MinimizationOperations.minimize(automaton.get(), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT) : EMPTY; + return automaton.isPresent() ? Operations.determinize(automaton.get(), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT) : EMPTY; } private static Predicate buildDataStreamNamePredicate(Map featureDescriptors) { @@ -472,7 +467,7 @@ private static Automaton buildDataStreamBackingIndicesAutomaton(Map 1) { throw new IllegalStateException( "failed to extract doc:" + target + ", the grouping field must be single valued" ); } + ord = (int) sorted.nextOrd(); return true; } else { return false; diff --git a/server/src/main/java/org/elasticsearch/lucene/grouping/TopFieldGroups.java b/server/src/main/java/org/elasticsearch/lucene/grouping/TopFieldGroups.java index ed07525c1dd7b..443963dd59dcd 100644 --- a/server/src/main/java/org/elasticsearch/lucene/grouping/TopFieldGroups.java +++ b/server/src/main/java/org/elasticsearch/lucene/grouping/TopFieldGroups.java @@ -170,10 +170,10 @@ public static TopFieldGroups merge(Sort sort, int start, int size, TopFieldGroup final TopFieldGroups shard = shardHits[shardIDX]; // totalHits can be non-zero even if no hits were // collected, when searchAfter was used: - totalHitCount += shard.totalHits.value; + totalHitCount += shard.totalHits.value(); // If any hit count is a lower bound then the merged // total hit count is a lower bound as well - if (shard.totalHits.relation == TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO) { + if (shard.totalHits.relation() == TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO) { totalHitsRelation = TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO; } if (CollectionUtils.isEmpty(shard.scoreDocs) == false) { diff --git a/server/src/main/java/org/elasticsearch/lucene/queries/BinaryDocValuesRangeQuery.java b/server/src/main/java/org/elasticsearch/lucene/queries/BinaryDocValuesRangeQuery.java index dca4ff503c788..67ece200c06ee 100644 --- a/server/src/main/java/org/elasticsearch/lucene/queries/BinaryDocValuesRangeQuery.java +++ b/server/src/main/java/org/elasticsearch/lucene/queries/BinaryDocValuesRangeQuery.java @@ -18,7 +18,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; -import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; import org.apache.lucene.util.BytesRef; @@ -61,7 +61,7 @@ public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float bo return new ConstantScoreWeight(this, boost) { @Override - public Scorer scorer(LeafReaderContext context) throws IOException { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { final BinaryDocValues values = context.reader().getBinaryDocValues(fieldName); if (values == null) { return null; @@ -106,7 +106,8 @@ public float matchCost() { return 4; // at most 4 comparisons } }; - return new ConstantScoreScorer(this, score(), scoreMode, iterator); + + return new DefaultScorerSupplier(new ConstantScoreScorer(score(), scoreMode, iterator)); } @Override diff --git a/server/src/main/java/org/elasticsearch/lucene/queries/BlendedTermQuery.java b/server/src/main/java/org/elasticsearch/lucene/queries/BlendedTermQuery.java index c75c6e2373f25..788bf76087d1f 100644 --- a/server/src/main/java/org/elasticsearch/lucene/queries/BlendedTermQuery.java +++ b/server/src/main/java/org/elasticsearch/lucene/queries/BlendedTermQuery.java @@ -22,6 +22,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.ArrayUtil; +import org.apache.lucene.util.IOSupplier; import org.apache.lucene.util.InPlaceMergeSorter; import java.io.IOException; @@ -188,7 +189,11 @@ private static TermStates adjustTTF(IndexReaderContext readerContext, TermStates int df = termContext.docFreq(); long ttf = sumTTF; for (int i = 0; i < len; i++) { - TermState termState = termContext.get(leaves.get(i)); + IOSupplier termStateSupplier = termContext.get(leaves.get(i)); + if (termStateSupplier == null) { + continue; + } + TermState termState = termStateSupplier.get(); if (termState == null) { continue; } @@ -212,7 +217,11 @@ private static TermStates adjustDF(IndexReaderContext readerContext, TermStates } TermStates newCtx = new TermStates(readerContext); for (int i = 0; i < len; ++i) { - TermState termState = ctx.get(leaves.get(i)); + IOSupplier termStateSupplier = ctx.get(leaves.get(i)); + if (termStateSupplier == null) { + continue; + } + TermState termState = termStateSupplier.get(); if (termState == null) { continue; } diff --git a/server/src/main/java/org/elasticsearch/lucene/queries/MinDocQuery.java b/server/src/main/java/org/elasticsearch/lucene/queries/MinDocQuery.java index 8e85c1d974382..13b0bf650a39e 100644 --- a/server/src/main/java/org/elasticsearch/lucene/queries/MinDocQuery.java +++ b/server/src/main/java/org/elasticsearch/lucene/queries/MinDocQuery.java @@ -19,6 +19,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; import java.io.IOException; @@ -76,15 +77,17 @@ public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float bo throw new IllegalStateException("Executing against a different reader than the query has been rewritten against"); } return new ConstantScoreWeight(this, boost) { + @Override - public Scorer scorer(LeafReaderContext context) throws IOException { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { final int maxDoc = context.reader().maxDoc(); if (context.docBase + maxDoc <= minDoc) { return null; } final int segmentMinDoc = Math.max(0, minDoc - context.docBase); final DocIdSetIterator disi = new MinDocIterator(segmentMinDoc, maxDoc); - return new ConstantScoreScorer(this, score(), scoreMode, disi); + Scorer scorer = new ConstantScoreScorer(score(), scoreMode, disi); + return new DefaultScorerSupplier(scorer); } @Override diff --git a/server/src/main/java/org/elasticsearch/lucene/queries/SearchAfterSortedDocQuery.java b/server/src/main/java/org/elasticsearch/lucene/queries/SearchAfterSortedDocQuery.java index 1a8ac203f0cb5..6575f7f416bd9 100644 --- a/server/src/main/java/org/elasticsearch/lucene/queries/SearchAfterSortedDocQuery.java +++ b/server/src/main/java/org/elasticsearch/lucene/queries/SearchAfterSortedDocQuery.java @@ -22,6 +22,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; import org.apache.lucene.search.Weight; @@ -67,8 +68,8 @@ public SearchAfterSortedDocQuery(Sort sort, FieldDoc after) { public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException { return new ConstantScoreWeight(this, 1.0f) { @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - Sort segmentSort = context.reader().getMetaData().getSort(); + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { + Sort segmentSort = context.reader().getMetaData().sort(); if (segmentSort == null || Lucene.canEarlyTerminate(sort, segmentSort) == false) { throw new IOException("search sort :[" + sort + "] does not match the index sort:[" + segmentSort + "]"); } @@ -80,7 +81,8 @@ public Scorer scorer(LeafReaderContext context) throws IOException { return null; } final DocIdSetIterator disi = new MinDocQuery.MinDocIterator(firstDoc, maxDoc); - return new ConstantScoreScorer(this, score(), scoreMode, disi); + Scorer scorer = new ConstantScoreScorer(score(), scoreMode, disi); + return new DefaultScorerSupplier(scorer); } @Override diff --git a/server/src/main/java/org/elasticsearch/lucene/spatial/ShapeDocValuesQuery.java b/server/src/main/java/org/elasticsearch/lucene/spatial/ShapeDocValuesQuery.java index bd64ee88fc300..064f8ef3eacd8 100644 --- a/server/src/main/java/org/elasticsearch/lucene/spatial/ShapeDocValuesQuery.java +++ b/server/src/main/java/org/elasticsearch/lucene/spatial/ShapeDocValuesQuery.java @@ -109,14 +109,8 @@ private ConstantScoreWeight getStandardWeight(ScoreMode scoreMode, float boost) final Component2D component2D = create(geometries); return new ConstantScoreWeight(this, boost) { - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - return scorerSupplier(context).get(Long.MAX_VALUE); - } - @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) { - final Weight weight = this; // implement ScorerSupplier, since we do some expensive stuff to make a scorer return new ScorerSupplier() { @@ -125,7 +119,7 @@ public Scorer get(long leadCost) throws IOException { // binary doc values allocate an array upfront, lets only allocate it if we are going to use it final BinaryDocValues values = context.reader().getBinaryDocValues(field); if (values == null) { - return new ConstantScoreScorer(weight, 0f, scoreMode, DocIdSetIterator.empty()); + return new ConstantScoreScorer(0f, scoreMode, DocIdSetIterator.empty()); } final GeometryDocValueReader reader = new GeometryDocValueReader(); final Component2DVisitor visitor = Component2DVisitor.getVisitor(component2D, relation, encoder); @@ -143,7 +137,7 @@ public float matchCost() { return 1000f; // TODO: what should it be? } }; - return new ConstantScoreScorer(weight, score(), scoreMode, iterator); + return new ConstantScoreScorer(score(), scoreMode, iterator); } @Override @@ -167,14 +161,8 @@ private ConstantScoreWeight getContainsWeight(ScoreMode scoreMode, float boost) } return new ConstantScoreWeight(this, boost) { - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - return scorerSupplier(context).get(Long.MAX_VALUE); - } - @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) { - final Weight weight = this; // implement ScorerSupplier, since we do some expensive stuff to make a scorer return new ScorerSupplier() { @@ -183,7 +171,7 @@ public Scorer get(long leadCost) throws IOException { // binary doc values allocate an array upfront, lets only allocate it if we are going to use it final BinaryDocValues values = context.reader().getBinaryDocValues(field); if (values == null) { - return new ConstantScoreScorer(weight, 0f, scoreMode, DocIdSetIterator.empty()); + return new ConstantScoreScorer(0f, scoreMode, DocIdSetIterator.empty()); } final Component2DVisitor[] visitors = new Component2DVisitor[components2D.size()]; for (int i = 0; i < components2D.size(); i++) { @@ -210,7 +198,7 @@ public float matchCost() { return 1000f; // TODO: what should it be? } }; - return new ConstantScoreScorer(weight, score(), scoreMode, iterator); + return new ConstantScoreScorer(score(), scoreMode, iterator); } @Override diff --git a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java index 9b33ac2ea12fc..8e66486329577 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java +++ b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java @@ -415,8 +415,8 @@ private Settings createEnvironment(Environment initialEnvironment, NodeServicePr Constants.OS_ARCH, Constants.JVM_VENDOR, Constants.JVM_NAME, - Constants.JAVA_VERSION, - Constants.JVM_VERSION + System.getProperty("java.version"), + Runtime.version().toString() ); logger.info("JVM home [{}], using bundled JDK [{}]", System.getProperty("java.home"), jvmInfo.getUsingBundledJdk()); logger.info("JVM arguments {}", Arrays.toString(jvmInfo.getInputArguments())); diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index 5df7a8ea20f54..435bf71e3b2c9 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -4029,7 +4029,7 @@ protected void snapshotFile(SnapshotShardContext context, FileInfo fileInfo) thr final String file = fileInfo.physicalName(); try ( Releasable ignored = context.withCommitRef(); - IndexInput indexInput = store.openVerifyingInput(file, IOContext.READ, fileInfo.metadata()) + IndexInput indexInput = store.openVerifyingInput(file, IOContext.DEFAULT, fileInfo.metadata()) ) { for (int i = 0; i < fileInfo.numberOfParts(); i++) { final long partBytes = fileInfo.partBytes(i); diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestCountAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestCountAction.java index a457ebb67fd47..09f31abb58eb3 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestCountAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestCountAction.java @@ -72,7 +72,7 @@ public RestChannelConsumer doCatRequest(final RestRequest request, final NodeCli return channel -> client.search(countRequest, new RestResponseListener(channel) { @Override public RestResponse buildResponse(SearchResponse countResponse) throws Exception { - assert countResponse.getHits().getTotalHits().relation == TotalHits.Relation.EQUAL_TO; + assert countResponse.getHits().getTotalHits().relation() == TotalHits.Relation.EQUAL_TO; return RestTable.buildResponse(buildTable(request, countResponse), channel); } }); @@ -90,7 +90,7 @@ protected Table getTableWithHeader(final RestRequest request) { private Table buildTable(RestRequest request, SearchResponse response) { Table table = getTableWithHeader(request); table.startRow(); - table.addCell(response.getHits().getTotalHits().value); + table.addCell(response.getHits().getTotalHits().value()); table.endRow(); return table; diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestCountAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestCountAction.java index 23da666a39a7e..c1a55874bfc58 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/RestCountAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestCountAction.java @@ -87,7 +87,7 @@ public RestResponse buildResponse(SearchResponse response, XContentBuilder build if (terminateAfter != DEFAULT_TERMINATE_AFTER) { builder.field("terminated_early", response.isTerminatedEarly()); } - builder.field("count", response.getHits().getTotalHits().value); + builder.field("count", response.getHits().getTotalHits().value()); buildBroadcastShardsHeader( builder, request, diff --git a/server/src/main/java/org/elasticsearch/script/ScoreScript.java b/server/src/main/java/org/elasticsearch/script/ScoreScript.java index c8129717b5ccd..6c7d36ee9a436 100644 --- a/server/src/main/java/org/elasticsearch/script/ScoreScript.java +++ b/server/src/main/java/org/elasticsearch/script/ScoreScript.java @@ -116,6 +116,11 @@ public void setDocument(int docid) { this.docId = docid; } + /** Get the current document. */ + public int docId() { + return docId; + } + public void setScorer(Scorable scorer) { this.scoreSupplier = () -> { try { diff --git a/server/src/main/java/org/elasticsearch/script/SortedSetDocValuesStringFieldScript.java b/server/src/main/java/org/elasticsearch/script/SortedSetDocValuesStringFieldScript.java index c80a2e7200ecc..d83530e82b16d 100644 --- a/server/src/main/java/org/elasticsearch/script/SortedSetDocValuesStringFieldScript.java +++ b/server/src/main/java/org/elasticsearch/script/SortedSetDocValuesStringFieldScript.java @@ -46,9 +46,8 @@ public void setDocument(int docID) { public void execute() { try { if (hasValue) { - long ord; - while ((ord = sortedSetDocValues.nextOrd()) != SortedSetDocValues.NO_MORE_ORDS) { - BytesRef bytesRef = sortedSetDocValues.lookupOrd(ord); + for (int i = 0; i < sortedSetDocValues.docValueCount(); i++) { + BytesRef bytesRef = sortedSetDocValues.lookupOrd(sortedSetDocValues.nextOrd()); emit(bytesRef.utf8ToString()); } } diff --git a/server/src/main/java/org/elasticsearch/script/field/IpDocValuesField.java b/server/src/main/java/org/elasticsearch/script/field/IpDocValuesField.java index 6297fbaa23187..d9550dd17a058 100644 --- a/server/src/main/java/org/elasticsearch/script/field/IpDocValuesField.java +++ b/server/src/main/java/org/elasticsearch/script/field/IpDocValuesField.java @@ -157,7 +157,8 @@ public SortedSetIpSupplier(SortedSetDocValues in) { public void setNextDocId(int docId) throws IOException { count = 0; if (in.advanceExact(docId)) { - for (long ord = in.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = in.nextOrd()) { + for (int i = 0; i < in.docValueCount(); i++) { + long ord = in.nextOrd(); ords = ArrayUtil.grow(ords, count + 1); ords[count++] = ord; } diff --git a/server/src/main/java/org/elasticsearch/script/field/vectors/ByteKnnDenseVectorDocValuesField.java b/server/src/main/java/org/elasticsearch/script/field/vectors/ByteKnnDenseVectorDocValuesField.java index fd7c5227e22ac..be1b972dcd41a 100644 --- a/server/src/main/java/org/elasticsearch/script/field/vectors/ByteKnnDenseVectorDocValuesField.java +++ b/server/src/main/java/org/elasticsearch/script/field/vectors/ByteKnnDenseVectorDocValuesField.java @@ -10,6 +10,7 @@ package org.elasticsearch.script.field.vectors; import org.apache.lucene.index.ByteVectorValues; +import org.apache.lucene.index.KnnVectorValues; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper.ElementType; import org.elasticsearch.index.mapper.vectors.DenseVectorScriptDocValues; @@ -19,7 +20,8 @@ import static org.apache.lucene.search.DocIdSetIterator.NO_MORE_DOCS; public class ByteKnnDenseVectorDocValuesField extends DenseVectorDocValuesField { - protected ByteVectorValues input; // null if no vectors + protected final ByteVectorValues input; // null if no vectors + protected final KnnVectorValues.DocIndexIterator iterator; // null if no vectors protected byte[] vector; protected final int dims; @@ -31,6 +33,7 @@ protected ByteKnnDenseVectorDocValuesField(@Nullable ByteVectorValues input, Str super(name, elementType); this.dims = dims; this.input = input; + this.iterator = input == null ? null : input.iterator(); } @Override @@ -38,15 +41,15 @@ public void setNextDocId(int docId) throws IOException { if (input == null) { return; } - int currentDoc = input.docID(); + int currentDoc = iterator.docID(); if (currentDoc == NO_MORE_DOCS || docId < currentDoc) { vector = null; } else if (docId == currentDoc) { - vector = input.vectorValue(); + vector = input.vectorValue(iterator.index()); } else { - currentDoc = input.advance(docId); + currentDoc = iterator.advance(docId); if (currentDoc == docId) { - vector = input.vectorValue(); + vector = input.vectorValue(iterator.index()); } else { vector = null; } diff --git a/server/src/main/java/org/elasticsearch/script/field/vectors/KnnDenseVectorDocValuesField.java b/server/src/main/java/org/elasticsearch/script/field/vectors/KnnDenseVectorDocValuesField.java index c7678b03dd8c5..3e38092200511 100644 --- a/server/src/main/java/org/elasticsearch/script/field/vectors/KnnDenseVectorDocValuesField.java +++ b/server/src/main/java/org/elasticsearch/script/field/vectors/KnnDenseVectorDocValuesField.java @@ -10,6 +10,7 @@ package org.elasticsearch.script.field.vectors; import org.apache.lucene.index.FloatVectorValues; +import org.apache.lucene.index.KnnVectorValues; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.mapper.vectors.DenormalizedCosineFloatVectorValues; import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper.ElementType; @@ -20,7 +21,8 @@ import static org.apache.lucene.search.DocIdSetIterator.NO_MORE_DOCS; public class KnnDenseVectorDocValuesField extends DenseVectorDocValuesField { - protected FloatVectorValues input; // null if no vectors + protected final FloatVectorValues input; // null if no vectors + protected final KnnVectorValues.DocIndexIterator iterator; protected float[] vector; protected final int dims; @@ -28,6 +30,7 @@ public KnnDenseVectorDocValuesField(@Nullable FloatVectorValues input, String na super(name, ElementType.FLOAT); this.dims = dims; this.input = input; + this.iterator = input == null ? null : input.iterator(); } @Override @@ -35,15 +38,15 @@ public void setNextDocId(int docId) throws IOException { if (input == null) { return; } - int currentDoc = input.docID(); + int currentDoc = iterator.docID(); if (currentDoc == NO_MORE_DOCS || docId < currentDoc) { vector = null; } else if (docId == currentDoc) { - vector = input.vectorValue(); + vector = input.vectorValue(iterator.index()); } else { - currentDoc = input.advance(docId); + currentDoc = iterator.advance(docId); if (currentDoc == docId) { - vector = input.vectorValue(); + vector = input.vectorValue(iterator.index()); } else { vector = null; } diff --git a/server/src/main/java/org/elasticsearch/search/MultiValueMode.java b/server/src/main/java/org/elasticsearch/search/MultiValueMode.java index 49480816bbbb1..5ac25fe0ff695 100644 --- a/server/src/main/java/org/elasticsearch/search/MultiValueMode.java +++ b/server/src/main/java/org/elasticsearch/search/MultiValueMode.java @@ -477,11 +477,11 @@ protected BytesRef pick( @Override protected int pick(SortedSetDocValues values) throws IOException { - long maxOrd = -1; - for (long ord = values.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = values.nextOrd()) { - maxOrd = ord; + int count = values.docValueCount(); + for (int i = 0; i < count - 1; ++i) { + values.nextOrd(); } - return Math.toIntExact(maxOrd); + return Math.toIntExact(values.nextOrd()); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/SearchFeatures.java b/server/src/main/java/org/elasticsearch/search/SearchFeatures.java index 6a89d66bb3411..beac39c2de304 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchFeatures.java +++ b/server/src/main/java/org/elasticsearch/search/SearchFeatures.java @@ -16,8 +16,11 @@ import java.util.Set; public final class SearchFeatures implements FeatureSpecification { + + public static final NodeFeature LUCENE_10_0_0_UPGRADE = new NodeFeature("lucene_10_upgrade"); + @Override public Set getFeatures() { - return Set.of(KnnVectorQueryBuilder.K_PARAM_SUPPORTED); + return Set.of(KnnVectorQueryBuilder.K_PARAM_SUPPORTED, LUCENE_10_0_0_UPGRADE); } } diff --git a/server/src/main/java/org/elasticsearch/search/SearchHits.java b/server/src/main/java/org/elasticsearch/search/SearchHits.java index 8ff5de3c9b8ac..896dd7f999949 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchHits.java +++ b/server/src/main/java/org/elasticsearch/search/SearchHits.java @@ -288,12 +288,12 @@ public Iterator toXContentChunked(ToXContent.Params params return Iterators.concat(Iterators.single((b, p) -> b.startObject(Fields.HITS)), Iterators.single((b, p) -> { boolean totalHitAsInt = params.paramAsBoolean(RestSearchAction.TOTAL_HITS_AS_INT_PARAM, false); if (totalHitAsInt) { - long total = totalHits == null ? -1 : totalHits.value; + long total = totalHits == null ? -1 : totalHits.value(); b.field(Fields.TOTAL, total); } else if (totalHits != null) { b.startObject(Fields.TOTAL); - b.field("value", totalHits.value); - b.field("relation", totalHits.relation == Relation.EQUAL_TO ? "eq" : "gte"); + b.field("value", totalHits.value()); + b.field("relation", totalHits.relation() == Relation.EQUAL_TO ? "eq" : "gte"); b.endObject(); } return b; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/MultiBucketCollector.java b/server/src/main/java/org/elasticsearch/search/aggregations/MultiBucketCollector.java index f66f6b4a3805d..624db3f1cfe8c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/MultiBucketCollector.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/MultiBucketCollector.java @@ -13,7 +13,6 @@ import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.MultiCollector; import org.apache.lucene.search.Scorable; -import org.apache.lucene.search.ScoreCachingWrappingScorer; import org.apache.lucene.search.ScoreMode; import java.io.IOException; @@ -201,6 +200,7 @@ private static class MultiLeafBucketCollector extends LeafBucketCollector { private final boolean cacheScores; private final LeafBucketCollector[] collectors; private int numCollectors; + private ScoreCachingScorable scorable; private MultiLeafBucketCollector(List collectors, boolean cacheScores) { this.collectors = collectors.toArray(new LeafBucketCollector[collectors.size()]); @@ -211,11 +211,11 @@ private MultiLeafBucketCollector(List collectors, boolean c @Override public void setScorer(Scorable scorer) throws IOException { if (cacheScores) { - scorer = ScoreCachingWrappingScorer.wrap(scorer); + scorable = new ScoreCachingScorable(scorer); } for (int i = 0; i < numCollectors; ++i) { final LeafCollector c = collectors[i]; - c.setScorer(scorer); + c.setScorer(cacheScores ? scorable : scorer); } } @@ -227,6 +227,9 @@ private void removeCollector(int i) { @Override public void collect(int doc, long bucket) throws IOException { + if (scorable != null) { + scorable.curDoc = doc; + } final LeafBucketCollector[] collectors = this.collectors; int numCollectors = this.numCollectors; for (int i = 0; i < numCollectors;) { @@ -244,4 +247,25 @@ public void collect(int doc, long bucket) throws IOException { } } } + + private static class ScoreCachingScorable extends Scorable { + + private final Scorable in; + private int curDoc = -1; // current document + private int scoreDoc = -1; // document that score was computed on + private float score; + + ScoreCachingScorable(Scorable in) { + this.in = in; + } + + @Override + public float score() throws IOException { + if (curDoc != scoreDoc) { + score = in.score(); + scoreDoc = curDoc; + } + return score; + } + } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java index 2e9e04eca4afc..9ee15306ce636 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java @@ -278,7 +278,7 @@ private static boolean isMaybeMultivalued(LeafReaderContext context, SortField s * optimization and null if index sort is not applicable. */ private Sort buildIndexSortPrefix(LeafReaderContext context) throws IOException { - Sort indexSort = context.reader().getMetaData().getSort(); + Sort indexSort = context.reader().getMetaData().sort(); if (indexSort == null) { return null; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/GlobalOrdinalValuesSource.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/GlobalOrdinalValuesSource.java index 0d0d2c6f922e8..dcc2ad52cbc50 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/GlobalOrdinalValuesSource.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/GlobalOrdinalValuesSource.java @@ -41,8 +41,6 @@ import java.util.List; import java.util.function.BiConsumer; -import static org.apache.lucene.index.SortedSetDocValues.NO_MORE_ORDS; - /** * A {@link SingleDimensionValuesSource} for global ordinals. */ @@ -247,9 +245,8 @@ public DocIdSetIterator competitiveIterator() { @Override public void collect(int doc, long bucket) throws IOException { if (dvs.advanceExact(doc)) { - long ord; - while ((ord = dvs.nextOrd()) != NO_MORE_ORDS) { - currentValue = ord; + for (int i = 0; i < dvs.docValueCount(); i++) { + currentValue = dvs.nextOrd(); next.collect(doc, bucket); } } else if (missingBucket) { @@ -306,8 +303,8 @@ public void collect(int doc, long bucket) throws IOException { public void collect(int doc, long bucket) throws IOException { if (currentValueIsSet == false) { if (dvs.advanceExact(doc)) { - long ord; - while ((ord = dvs.nextOrd()) != NO_MORE_ORDS) { + for (int i = 0; i < dvs.docValueCount(); i++) { + long ord = dvs.nextOrd(); if (term.equals(lookup.lookupOrd(ord))) { currentValueIsSet = true; currentValue = ord; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/countedterms/CountedTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/countedterms/CountedTermsAggregator.java index f774f67b3df8f..af4d60bf424a7 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/countedterms/CountedTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/countedterms/CountedTermsAggregator.java @@ -39,7 +39,6 @@ import java.util.function.Supplier; import static java.util.Collections.emptyList; -import static org.apache.lucene.index.SortedSetDocValues.NO_MORE_ORDS; import static org.elasticsearch.search.aggregations.InternalOrder.isKeyOrder; class CountedTermsAggregator extends TermsAggregator { @@ -77,7 +76,8 @@ private LeafBucketCollector getLeafCollector(SortedSetDocValues ords, LeafBucket @Override public void collect(int doc, long owningBucketOrd) throws IOException { if (ords.advanceExact(doc)) { - for (long ord = ords.nextOrd(); ord != NO_MORE_ORDS; ord = ords.nextOrd()) { + for (int i = 0; i < ords.docValueCount(); i++) { + long ord = ords.nextOrd(); collectOrdinal(bucketOrds.add(owningBucketOrd, ords.lookupOrd(ord)), doc, sub); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/MergedPointRangeQuery.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/MergedPointRangeQuery.java index 2969b7bf82c80..7dd192b317a57 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/MergedPointRangeQuery.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/MergedPointRangeQuery.java @@ -13,7 +13,6 @@ import org.apache.lucene.index.PointValues; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.BulkScorer; import org.apache.lucene.search.ConstantScoreWeight; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchNoDocsQuery; @@ -21,7 +20,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; -import org.apache.lucene.search.Scorer; import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; @@ -117,15 +115,6 @@ public int count(LeafReaderContext context) throws IOException { return multiValuedSegmentWeight().count(context); } - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - ScorerSupplier scorerSupplier = scorerSupplier(context); - if (scorerSupplier == null) { - return null; - } - return scorerSupplier.get(Long.MAX_VALUE); - } - @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { /* @@ -144,19 +133,6 @@ public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOExcepti return multiValuedSegmentWeight().scorerSupplier(context); } - @Override - public BulkScorer bulkScorer(LeafReaderContext context) throws IOException { - PointValues points = context.reader().getPointValues(field); - if (points == null) { - return null; - } - if (points.size() == points.getDocCount()) { - // Each doc that has points has exactly one point. - return singleValuedSegmentWeight().bulkScorer(context); - } - return multiValuedSegmentWeight().bulkScorer(context); - } - private Weight singleValuedSegmentWeight() throws IOException { if (singleValuedSegmentWeight == null) { singleValuedSegmentWeight = delegateForSingleValuedSegments.createWeight(searcher, scoreMode, boost); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/QueryToFilterAdapter.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/QueryToFilterAdapter.java index 282c09c84414c..e8e33655d47c1 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/QueryToFilterAdapter.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/QueryToFilterAdapter.java @@ -14,6 +14,7 @@ import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BulkScorer; import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexOrDocValuesQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.IndexSortSortedNumericDocValuesRangeQuery; @@ -215,7 +216,7 @@ long count(LeafReaderContext ctx, FiltersAggregator.Counter counter, Bits live) // No hits in this segment. return 0; } - scorer.score(counter, live); + scorer.score(counter, live, 0, DocIdSetIterator.NO_MORE_DOCS); return counter.readAndReset(ctx); } @@ -228,7 +229,7 @@ void collect(LeafReaderContext ctx, LeafCollector collector, Bits live) throws I // No hits in this segment. return; } - scorer.score(collector, live); + scorer.score(collector, live, 0, DocIdSetIterator.NO_MORE_DOCS); } /** diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregator.java index 4c87b5961ac1a..b5d3485e72f82 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregator.java @@ -9,6 +9,7 @@ package org.elasticsearch.search.aggregations.bucket.global; import org.apache.lucene.search.BulkScorer; +import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Scorable; @@ -45,6 +46,7 @@ public LeafBucketCollector getLeafCollector(AggregationExecutionContext aggCtx, return LeafBucketCollector.NO_OP_COLLECTOR; } grow(1); + scorer.score(new LeafCollector() { @Override public void collect(int doc) throws IOException { @@ -55,7 +57,7 @@ public void collect(int doc) throws IOException { public void setScorer(Scorable scorer) throws IOException { sub.setScorer(scorer); } - }, aggCtx.getLeafReaderContext().reader().getLiveDocs()); + }, aggCtx.getLeafReaderContext().reader().getLiveDocs(), 0, DocIdSetIterator.NO_MORE_DOCS); return LeafBucketCollector.NO_OP_COLLECTOR; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java index 12182a5931a4f..0fbb9745aa400 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java @@ -197,7 +197,6 @@ void processBufferedChildBuckets() throws IOException { } for (; childDocId < currentParentDoc; childDocId = childDocs.nextDoc()) { - cachedScorer.doc = childDocId; for (var bucket : bucketBuffer) { collectBucket(sub, childDocId, bucket); } @@ -207,19 +206,12 @@ void processBufferedChildBuckets() throws IOException { } private static class CachedScorable extends Scorable { - int doc; float score; @Override public final float score() { return score; } - - @Override - public int docID() { - return doc; - } - } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java index 2f18d2dc1e42e..6119af3cb6a57 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java @@ -157,7 +157,8 @@ abstract static class SortedSetRangeLeafCollector extends LeafBucketCollectorBas this.collector = (doc, bucket) -> { if (values.advanceExact(doc)) { int lo = 0; - for (long ord = values.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = values.nextOrd()) { + for (int i = 0; i < values.docValueCount(); i++) { + long ord = values.nextOrd(); lo = collect(doc, ord, bucket, lo); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollector.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollector.java index 0cfad5ba9e0c7..37cee75c11b48 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollector.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollector.java @@ -15,7 +15,7 @@ import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopDocsCollector; -import org.apache.lucene.search.TopScoreDocCollector; +import org.apache.lucene.search.TopScoreDocCollectorManager; import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.util.BigArrays; @@ -99,7 +99,7 @@ public void collect(int doc, long bucket) throws IOException { // Designed to be overridden by subclasses that may score docs by criteria // other than Lucene score protected TopDocsCollector createTopDocsCollector(int size) throws IOException { - return TopScoreDocCollector.create(size, Integer.MAX_VALUE); + return new TopScoreDocCollectorManager(size, null, Integer.MAX_VALUE, false).newCollector(); } // Can be overridden by subclasses that have a different priority queue implementation @@ -214,7 +214,6 @@ class PerSegmentCollects extends Scorable { private final AggregationExecutionContext aggCtx; int maxDocId = Integer.MIN_VALUE; private float currentScore; - private int currentDocId = -1; private Scorable currentScorer; PerSegmentCollects(AggregationExecutionContext aggCtx) throws IOException { @@ -249,7 +248,6 @@ public void replayRelatedMatches(List sd) throws IOException { leafCollector.setScorer(this); currentScore = 0; - currentDocId = -1; if (maxDocId < 0) { return; } @@ -259,7 +257,6 @@ public void replayRelatedMatches(List sd) throws IOException { int rebased = scoreDoc.doc - aggCtx.getLeafReaderContext().docBase; if ((rebased >= 0) && (rebased <= maxDocId)) { currentScore = scoreDoc.score; - currentDocId = rebased; // We stored the bucket ID in Lucene's shardIndex property // for convenience. leafCollector.collect(rebased, scoreDoc.shardIndex); @@ -276,11 +273,6 @@ public float score() throws IOException { return currentScore; } - @Override - public int docID() { - return currentDocId; - } - public void collect(int docId, long parentBucket) throws IOException { perBucketSamples = bigArrays.grow(perBucketSamples, parentBucket + 1); PerParentBucketSamples sampler = perBucketSamples.get((int) parentBucket); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedOrdinalsSamplerAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedOrdinalsSamplerAggregator.java index ef4101892a461..539b9440cea25 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedOrdinalsSamplerAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedOrdinalsSamplerAggregator.java @@ -118,12 +118,12 @@ public long longValue() throws IOException { @Override public boolean advanceExact(int target) throws IOException { if (globalOrds.advanceExact(target)) { - value = globalOrds.nextOrd(); // Check there isn't a second value for this // document - if (globalOrds.nextOrd() != SortedSetDocValues.NO_MORE_ORDS) { + if (globalOrds.docValueCount() > 1) { throw new IllegalArgumentException("Sample diversifying key must be a single valued-field"); } + value = globalOrds.nextOrd(); return true; } else { return false; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplingQuery.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplingQuery.java index ed39f41d9daed..89fe1a53a01cc 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplingQuery.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplingQuery.java @@ -20,6 +20,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; import java.io.IOException; @@ -76,15 +77,15 @@ public Explanation explain(LeafReaderContext context, int doc) throws IOExceptio } @Override - public Scorer scorer(LeafReaderContext context) { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { final SplittableRandom random = new SplittableRandom(BitMixer.mix(hash ^ seed)); int maxDoc = context.reader().maxDoc(); - return new ConstantScoreScorer( - this, + Scorer scorer = new ConstantScoreScorer( boost, ScoreMode.COMPLETE_NO_SCORES, new RandomSamplingIterator(maxDoc, p, random::nextInt) ); + return new DefaultScorerSupplier(scorer); } }; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java index 1b0ec8e356082..0f7c61dc9f25b 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java @@ -53,7 +53,6 @@ import java.util.function.LongPredicate; import java.util.function.LongUnaryOperator; -import static org.apache.lucene.index.SortedSetDocValues.NO_MORE_ORDS; import static org.elasticsearch.search.aggregations.InternalOrder.isKeyOrder; /** @@ -167,7 +166,8 @@ public void collect(int doc, long owningBucketOrd) throws IOException { if (false == globalOrds.advanceExact(doc)) { return; } - for (long globalOrd = globalOrds.nextOrd(); globalOrd != NO_MORE_ORDS; globalOrd = globalOrds.nextOrd()) { + for (int i = 0; i < globalOrds.docValueCount(); i++) { + long globalOrd = globalOrds.nextOrd(); collectionStrategy.collectGlobalOrd(owningBucketOrd, doc, globalOrd, sub); } } @@ -179,7 +179,8 @@ public void collect(int doc, long owningBucketOrd) throws IOException { if (false == globalOrds.advanceExact(doc)) { return; } - for (long globalOrd = globalOrds.nextOrd(); globalOrd != NO_MORE_ORDS; globalOrd = globalOrds.nextOrd()) { + for (int i = 0; i < globalOrds.docValueCount(); i++) { + long globalOrd = globalOrds.nextOrd(); if (false == acceptedGlobalOrdinals.test(globalOrd)) { continue; } @@ -350,7 +351,8 @@ public void collect(int doc, long owningBucketOrd) throws IOException { if (false == segmentOrds.advanceExact(doc)) { return; } - for (long segmentOrd = segmentOrds.nextOrd(); segmentOrd != NO_MORE_ORDS; segmentOrd = segmentOrds.nextOrd()) { + for (int i = 0; i < segmentOrds.docValueCount(); i++) { + long segmentOrd = segmentOrds.nextOrd(); int docCount = docCountProvider.getDocCount(doc); segmentDocCounts.increment(segmentOrd + 1, docCount); } @@ -524,7 +526,8 @@ private void forEachExcludeDeletedDocs(BucketInfoConsumer consumer) throws IOExc if (liveDocs == null || liveDocs.get(docId)) { // document is not deleted globalOrds = globalOrds == null ? valuesSource.globalOrdinalsValues(ctx) : globalOrds; if (globalOrds.advanceExact(docId)) { - for (long globalOrd = globalOrds.nextOrd(); globalOrd != NO_MORE_ORDS; globalOrd = globalOrds.nextOrd()) { + for (int i = 0; i < globalOrds.docValueCount(); i++) { + long globalOrd = globalOrds.nextOrd(); if (accepted.find(globalOrd) >= 0) { continue; } @@ -634,7 +637,8 @@ void forEachExcludeDeletedDocs(long owningBucketOrd, BucketInfoConsumer consumer if (liveDocs == null || liveDocs.get(docId)) { // document is not deleted globalOrds = globalOrds == null ? valuesSource.globalOrdinalsValues(ctx) : globalOrds; if (globalOrds.advanceExact(docId)) { - for (long globalOrd = globalOrds.nextOrd(); globalOrd != NO_MORE_ORDS; globalOrd = globalOrds.nextOrd()) { + for (int i = 0; i < globalOrds.docValueCount(); i++) { + long globalOrd = globalOrds.nextOrd(); if (accepted.find(globalOrd) >= 0) { continue; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/IncludeExclude.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/IncludeExclude.java index 4d78df2704740..4bcbe08ed227c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/IncludeExclude.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/IncludeExclude.java @@ -358,8 +358,8 @@ public IncludeExclude( if (exclude != null && excludeValues != null) { throw new IllegalArgumentException(); } - this.include = include == null ? null : new RegExp(include); - this.exclude = exclude == null ? null : new RegExp(exclude); + this.include = include == null ? null : new RegExp(include, RegExp.ALL | RegExp.DEPRECATED_COMPLEMENT); + this.exclude = exclude == null ? null : new RegExp(exclude, RegExp.ALL | RegExp.DEPRECATED_COMPLEMENT); this.includeValues = includeValues; this.excludeValues = excludeValues; this.incZeroBasedPartition = 0; @@ -529,7 +529,7 @@ private Automaton toAutomaton() { if (exclude != null) { a = Operations.minus(a, exclude.toAutomaton(), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } - return a; + return Operations.determinize(a, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } public StringFilter convertToStringFilter(DocValueFormat format) { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregator.java index 84cd869517702..05aa80f06448d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregator.java @@ -308,7 +308,8 @@ public void collect(int doc, long bucketOrd) throws IOException { bits = new BitArray(maxOrd, bigArrays); visitedOrds.set(bucketOrd, bits); } - for (long ord = values.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = values.nextOrd()) { + for (int i = 0; i < values.docValueCount(); i++) { + long ord = values.nextOrd(); bits.set((int) ord); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GlobalOrdCardinalityAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GlobalOrdCardinalityAggregator.java index 32be4513f5c3e..d0685b3a09262 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GlobalOrdCardinalityAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GlobalOrdCardinalityAggregator.java @@ -259,8 +259,8 @@ public CompetitiveIterator competitiveIterator() { @Override public void collect(int doc, long bucketOrd) throws IOException { if (docValues.advanceExact(doc)) { - for (long ord = docValues.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = docValues - .nextOrd()) { + for (int i = 0; i < docValues.docValueCount(); i++) { + long ord = docValues.nextOrd(); if (bits.getAndSet(ord) == false) { competitiveIterator.onVisitedOrdinal(ord); } @@ -309,7 +309,8 @@ public void collect(int doc, long bucketOrd) throws IOException { public void collect(int doc, long bucketOrd) throws IOException { if (docValues.advanceExact(doc)) { final BitArray bits = getNewOrExistingBitArray(bucketOrd); - for (long ord = docValues.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = docValues.nextOrd()) { + for (int i = 0; i < docValues.docValueCount(); i++) { + long ord = docValues.nextOrd(); bits.set((int) ord); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTopHits.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTopHits.java index f7f319618fa36..8ff381cbbc84d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTopHits.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTopHits.java @@ -135,7 +135,7 @@ public InternalAggregation get() { maxScore = reduceAndFindMaxScore(aggregations, shardDocs); reducedTopDocs = TopDocs.merge(from, size, shardDocs); } - assert reducedTopDocs.totalHits.relation == Relation.EQUAL_TO; + assert reducedTopDocs.totalHits.relation() == Relation.EQUAL_TO; return new InternalTopHits( getName(), @@ -262,8 +262,8 @@ public boolean equals(Object obj) { InternalTopHits other = (InternalTopHits) obj; if (from != other.from) return false; if (size != other.size) return false; - if (topDocs.topDocs.totalHits.value != other.topDocs.topDocs.totalHits.value) return false; - if (topDocs.topDocs.totalHits.relation != other.topDocs.topDocs.totalHits.relation) return false; + if (topDocs.topDocs.totalHits.value() != other.topDocs.topDocs.totalHits.value()) return false; + if (topDocs.topDocs.totalHits.relation() != other.topDocs.topDocs.totalHits.relation()) return false; if (topDocs.topDocs.scoreDocs.length != other.topDocs.topDocs.scoreDocs.length) return false; for (int d = 0; d < topDocs.topDocs.scoreDocs.length; d++) { ScoreDoc thisDoc = topDocs.topDocs.scoreDocs[d]; @@ -287,8 +287,8 @@ public int hashCode() { int hashCode = super.hashCode(); hashCode = 31 * hashCode + Integer.hashCode(from); hashCode = 31 * hashCode + Integer.hashCode(size); - hashCode = 31 * hashCode + Long.hashCode(topDocs.topDocs.totalHits.value); - hashCode = 31 * hashCode + topDocs.topDocs.totalHits.relation.hashCode(); + hashCode = 31 * hashCode + Long.hashCode(topDocs.topDocs.totalHits.value()); + hashCode = 31 * hashCode + topDocs.topDocs.totalHits.relation().hashCode(); for (int d = 0; d < topDocs.topDocs.scoreDocs.length; d++) { ScoreDoc doc = topDocs.topDocs.scoreDocs[d]; hashCode = 31 * hashCode + doc.doc; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MetricInspectionHelper.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MetricInspectionHelper.java index d59d824bde435..90d6c298fbd23 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MetricInspectionHelper.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MetricInspectionHelper.java @@ -51,7 +51,7 @@ public static boolean hasValue(InternalTDigestPercentiles agg) { } public static boolean hasValue(InternalTopHits agg) { - return (agg.getHits().getTotalHits().value == 0 + return (agg.getHits().getTotalHits().value() == 0 && Double.isNaN(agg.getHits().getMaxScore()) && Double.isNaN(agg.getTopDocs().maxScore)) == false; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregator.java index 9d8d98bc7c7cc..87d8f839dfca1 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregator.java @@ -19,8 +19,10 @@ import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopDocsCollector; import org.apache.lucene.search.TopFieldCollector; +import org.apache.lucene.search.TopFieldCollectorManager; import org.apache.lucene.search.TopFieldDocs; import org.apache.lucene.search.TopScoreDocCollector; +import org.apache.lucene.search.TopScoreDocCollectorManager; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.search.MaxScoreCollector; import org.elasticsearch.common.lucene.Lucene; @@ -136,12 +138,14 @@ public void collect(int docId, long bucket) throws IOException { // but here we create collectors ourselves and we need prevent OOM because of crazy an offset and size. topN = Math.min(topN, subSearchContext.searcher().getIndexReader().maxDoc()); if (sort == null) { - collectors = new Collectors(TopScoreDocCollector.create(topN, Integer.MAX_VALUE), null); + TopScoreDocCollector topScoreDocCollector = new TopScoreDocCollectorManager(topN, null, Integer.MAX_VALUE, false) + .newCollector(); + collectors = new Collectors(topScoreDocCollector, null); } else { // TODO: can we pass trackTotalHits=subSearchContext.trackTotalHits(){ // Note that this would require to catch CollectionTerminatedException collectors = new Collectors( - TopFieldCollector.create(sort.sort, topN, Integer.MAX_VALUE), + new TopFieldCollectorManager(sort.sort, topN, null, Integer.MAX_VALUE, false).newCollector(), subSearchContext.trackScores() ? new MaxScoreCollector() : null ); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/MissingValues.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/MissingValues.java index 4724bd0db05df..9b47507628dd1 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/MissingValues.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/MissingValues.java @@ -271,18 +271,17 @@ public long nextOrd() throws IOException { if (hasOrds) { return values.nextOrd(); } else { - // we want to return the next missing ord but set this to - // NO_MORE_ORDS so on the next call we indicate there are no - // more values - long ordToReturn = nextMissingOrd; - nextMissingOrd = SortedSetDocValues.NO_MORE_ORDS; - return ordToReturn; + return nextMissingOrd; } } @Override public int docValueCount() { - return values.docValueCount(); + if (hasOrds) { + return values.docValueCount(); + } else { + return 1; + } } @Override @@ -321,7 +320,11 @@ public BytesRef lookupOrd(long ord) throws IOException { @Override public int docValueCount() { - return values.docValueCount(); + if (hasOrds) { + return values.docValueCount(); + } else { + return 1; + } } @Override @@ -339,12 +342,7 @@ public long nextOrd() throws IOException { return ord + 1; } } else { - // we want to return the next missing ord but set this to - // NO_MORE_ORDS so on the next call we indicate there are no - // more values - long ordToReturn = nextMissingOrd; - nextMissingOrd = SortedSetDocValues.NO_MORE_ORDS; - return ordToReturn; + return nextMissingOrd; } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcher.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcher.java index 742d366efa7a3..472619da78622 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcher.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcher.java @@ -104,7 +104,7 @@ private void search(BucketCollector bucketCollector, Weight weight) throws IOExc Scorer scorer = weight.scorer(leaf); if (scorer != null) { if (minimumScore != null) { - scorer = new MinScoreScorer(weight, scorer, minimumScore); + scorer = new MinScoreScorer(scorer, minimumScore); } LeafWalker leafWalker = new LeafWalker(leaf, scorer, bucketCollector, () -> tsidOrd[0]); if (leafWalker.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) { diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/UnmappedFieldFetcher.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/UnmappedFieldFetcher.java index 42b29fda3c472..769effdd60240 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/UnmappedFieldFetcher.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/UnmappedFieldFetcher.java @@ -76,7 +76,7 @@ private static CharacterRunAutomaton nestedChildrenAutomaton(List nested for (String child : nestedChildren) { automata.add(Operations.concatenate(Automata.makeString(child + "."), Automata.makeAnyString())); } - return new CharacterRunAutomaton(Operations.union(automata)); + return new CharacterRunAutomaton(Operations.determinize(Operations.union(automata), AUTOMATON_MAX_DETERMINIZED_STATES)); } // Builds an automaton that will match any field that conforms to one of the input patterns @@ -84,7 +84,11 @@ private static CharacterRunAutomaton buildUnmappedFieldPatternAutomaton(List subInfos = fragInfo.getSubInfos(); CollectionUtil.introSort(subInfos, (o1, o2) -> { - int startOffset = o1.getTermsOffsets().get(0).getStartOffset(); - int startOffset2 = o2.getTermsOffsets().get(0).getStartOffset(); + int startOffset = o1.termsOffsets().get(0).getStartOffset(); + int startOffset2 = o2.termsOffsets().get(0).getStartOffset(); return Integer.compare(startOffset, startOffset2); }); return new WeightedFragInfo( - Math.min(fragInfo.getSubInfos().get(0).getTermsOffsets().get(0).getStartOffset(), fragInfo.getStartOffset()), + Math.min(fragInfo.getSubInfos().get(0).termsOffsets().get(0).getStartOffset(), fragInfo.getStartOffset()), fragInfo.getEndOffset(), subInfos, fragInfo.getTotalBoost() diff --git a/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java b/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java index da5d2d093fbd8..78d90377cdc3f 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java +++ b/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java @@ -56,6 +56,7 @@ import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.Executor; +import java.util.stream.Collectors; /** * Context-aware extension of {@link IndexSearcher}. @@ -76,6 +77,7 @@ public class ContextIndexSearcher extends IndexSearcher implements Releasable { private QueryProfiler profiler; private final MutableQueryTimeout cancellable; + private final boolean hasExecutor; private final int maximumNumberOfSlices; // don't create slices with less than this number of docs private final int minimumDocsPerSlice; @@ -133,6 +135,7 @@ public ContextIndexSearcher( int minimumDocsPerSlice ) throws IOException { super(wrapWithExitableDirectoryReader ? new ExitableDirectoryReader((DirectoryReader) reader, cancellable) : reader, executor); + this.hasExecutor = executor != null; setSimilarity(similarity); setQueryCache(queryCache); setQueryCachingPolicy(queryCachingPolicy); @@ -141,6 +144,15 @@ public ContextIndexSearcher( this.maximumNumberOfSlices = maximumNumberOfSlices; } + /** + * Whether an executor was provided at construction time or not. This indicates whether operations that support concurrency + * may be executed concurrently. It is not straightforward to deduct this from {@link #getTaskExecutor()} because {@link IndexSearcher} + * creates a {@link org.apache.lucene.search.TaskExecutor} anyways. + */ + public boolean hasExecutor() { + return hasExecutor; + } + @Override protected LeafSlice[] slices(List leaves) { // we offload to the executor unconditionally, including requests that don't support concurrency @@ -149,11 +161,6 @@ protected LeafSlice[] slices(List leaves) { return leafSlices; } - // package private for testing - int getMinimumDocsPerSlice() { - return minimumDocsPerSlice; - } - public void setProfiler(QueryProfiler profiler) { this.profiler = profiler; } @@ -243,7 +250,14 @@ public static LeafSlice[] computeSlices(List leaves, int maxS throw new IllegalArgumentException("maxSliceNum must be >= 1 (got " + maxSliceNum + ")"); } if (maxSliceNum == 1) { - return new LeafSlice[] { new LeafSlice(new ArrayList<>(leaves)) }; + return new LeafSlice[] { + new LeafSlice( + new ArrayList<>( + leaves.stream() + .map(LeafReaderContextPartition::createForEntireSegment) + .collect(Collectors.toCollection(ArrayList::new)) + ) + ) }; } // total number of documents to be searched final int numDocs = leaves.stream().mapToInt(l -> l.reader().maxDoc()).sum(); @@ -291,7 +305,11 @@ private static LeafSlice[] computeSlices(List leaves, int min for (List currentLeaf : queue) { // LeafSlice ctor reorders leaves so that leaves within a slice preserve the order they had within the IndexReader. // This is important given how Elasticsearch sorts leaves by descending @timestamp to get better query performance. - slices[upto++] = new LeafSlice(currentLeaf); + slices[upto++] = new LeafSlice( + currentLeaf.stream() + .map(LeafReaderContextPartition::createForEntireSegment) + .collect(Collectors.toCollection(ArrayList::new)) + ); } return slices; @@ -344,10 +362,10 @@ private T search(Weight weight, CollectorManager } final List> listTasks = new ArrayList<>(leafSlices.length); for (int i = 0; i < leafSlices.length; ++i) { - final LeafReaderContext[] leaves = leafSlices[i].leaves; + final LeafReaderContextPartition[] leaves = leafSlices[i].partitions; final C collector = collectors.get(i); listTasks.add(() -> { - search(Arrays.asList(leaves), weight, collector); + search(leaves, weight, collector); return collector; }); } @@ -364,7 +382,7 @@ private T search(Weight weight, CollectorManager * 2) handles the ES TimeExceededException */ @Override - public void search(List leaves, Weight weight, Collector collector) throws IOException { + public void search(LeafReaderContextPartition[] leaves, Weight weight, Collector collector) throws IOException { boolean success = false; try { super.search(leaves, weight, collector); @@ -412,7 +430,7 @@ public static class TimeExceededException extends RuntimeException { } @Override - protected void searchLeaf(LeafReaderContext ctx, Weight weight, Collector collector) throws IOException { + protected void searchLeaf(LeafReaderContext ctx, int minDocId, int maxDocId, Weight weight, Collector collector) throws IOException { cancellable.checkCancelled(); final LeafCollector leafCollector; try { @@ -432,7 +450,7 @@ protected void searchLeaf(LeafReaderContext ctx, Weight weight, Collector collec bulkScorer = new CancellableBulkScorer(bulkScorer, cancellable::checkCancelled); } try { - bulkScorer.score(leafCollector, liveDocs); + bulkScorer.score(leafCollector, liveDocs, minDocId, maxDocId); } catch (CollectionTerminatedException e) { // collection was terminated prematurely // continue with the following leaf diff --git a/server/src/main/java/org/elasticsearch/search/internal/ExitableDirectoryReader.java b/server/src/main/java/org/elasticsearch/search/internal/ExitableDirectoryReader.java index 3bdd7ff3630cf..64b54d3623f04 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/ExitableDirectoryReader.java +++ b/server/src/main/java/org/elasticsearch/search/internal/ExitableDirectoryReader.java @@ -14,9 +14,9 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.FilterDirectoryReader; import org.apache.lucene.index.FilterLeafReader; -import org.apache.lucene.index.FilterVectorValues; import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.PointValues; import org.apache.lucene.index.QueryTimeout; @@ -32,6 +32,7 @@ import org.elasticsearch.common.lucene.index.SequentialStoredFieldsLeafReader; import java.io.IOException; +import java.util.Objects; /** * Wraps an {@link IndexReader} with a {@link QueryCancellation} @@ -459,7 +460,6 @@ public void grow(int count) { } private static class ExitableByteVectorValues extends ByteVectorValues { - private int calls; private final QueryCancellation queryCancellation; private final ByteVectorValues in; @@ -479,8 +479,13 @@ public int size() { } @Override - public byte[] vectorValue() throws IOException { - return in.vectorValue(); + public byte[] vectorValue(int ord) throws IOException { + return in.vectorValue(ord); + } + + @Override + public int ordToDoc(int ord) { + return in.ordToDoc(ord); } @Override @@ -505,33 +510,17 @@ public DocIdSetIterator iterator() { } @Override - public int docID() { - return in.docID(); - } - - @Override - public int nextDoc() throws IOException { - final int nextDoc = in.nextDoc(); - checkAndThrowWithSampling(); - return nextDoc; + public DocIndexIterator iterator() { + return createExitableIterator(in.iterator(), queryCancellation); } @Override - public int advance(int target) throws IOException { - final int advance = in.advance(target); - checkAndThrowWithSampling(); - return advance; - } - - private void checkAndThrowWithSampling() { - if ((calls++ & ExitableIntersectVisitor.MAX_CALLS_BEFORE_QUERY_TIMEOUT_CHECK) == 0) { - this.queryCancellation.checkCancelled(); - } + public ByteVectorValues copy() throws IOException { + return in.copy(); } } - private static class ExitableFloatVectorValues extends FilterVectorValues { - private int calls; + private static class ExitableFloatVectorValues extends FilterFloatVectorValues { private final QueryCancellation queryCancellation; ExitableFloatVectorValues(FloatVectorValues vectorValues, QueryCancellation queryCancellation) { @@ -541,17 +530,13 @@ private static class ExitableFloatVectorValues extends FilterVectorValues { } @Override - public int advance(int target) throws IOException { - final int advance = super.advance(target); - checkAndThrowWithSampling(); - return advance; + public float[] vectorValue(int ord) throws IOException { + return in.vectorValue(ord); } @Override - public int nextDoc() throws IOException { - final int nextDoc = super.nextDoc(); - checkAndThrowWithSampling(); - return nextDoc; + public int ordToDoc(int ord) { + return in.ordToDoc(ord); } @Override @@ -575,13 +560,61 @@ public DocIdSetIterator iterator() { }; } - private void checkAndThrowWithSampling() { - if ((calls++ & ExitableIntersectVisitor.MAX_CALLS_BEFORE_QUERY_TIMEOUT_CHECK) == 0) { - this.queryCancellation.checkCancelled(); - } + @Override + public DocIndexIterator iterator() { + return createExitableIterator(in.iterator(), queryCancellation); + } + + @Override + public FloatVectorValues copy() throws IOException { + return in.copy(); } } + private static KnnVectorValues.DocIndexIterator createExitableIterator( + KnnVectorValues.DocIndexIterator delegate, + QueryCancellation queryCancellation + ) { + return new KnnVectorValues.DocIndexIterator() { + private int calls; + + @Override + public int index() { + return delegate.index(); + } + + @Override + public int docID() { + return delegate.docID(); + } + + @Override + public long cost() { + return delegate.cost(); + } + + @Override + public int nextDoc() throws IOException { + int nextDoc = delegate.nextDoc(); + checkAndThrowWithSampling(); + return nextDoc; + } + + @Override + public int advance(int target) throws IOException { + final int advance = delegate.advance(target); + checkAndThrowWithSampling(); + return advance; + } + + private void checkAndThrowWithSampling() { + if ((calls++ & ExitableIntersectVisitor.MAX_CALLS_BEFORE_QUERY_TIMEOUT_CHECK) == 0) { + queryCancellation.checkCancelled(); + } + } + }; + } + private static class ExitableDocSetIterator extends DocIdSetIterator { private int calls; private final DocIdSetIterator in; @@ -622,4 +655,43 @@ private void checkAndThrowWithSampling() { } } } + + /** Delegates all methods to a wrapped {@link FloatVectorValues}. */ + private abstract static class FilterFloatVectorValues extends FloatVectorValues { + + /** Wrapped values */ + protected final FloatVectorValues in; + + /** Sole constructor */ + protected FilterFloatVectorValues(FloatVectorValues in) { + Objects.requireNonNull(in); + this.in = in; + } + + @Override + public DocIndexIterator iterator() { + return in.iterator(); + } + + @Override + public float[] vectorValue(int ord) throws IOException { + return in.vectorValue(ord); + } + + @Override + public FloatVectorValues copy() throws IOException { + return in.copy(); + } + + @Override + public int dimension() { + return in.dimension(); + } + + @Override + public int size() { + return in.size(); + } + + } } diff --git a/server/src/main/java/org/elasticsearch/search/internal/FieldUsageTrackingDirectoryReader.java b/server/src/main/java/org/elasticsearch/search/internal/FieldUsageTrackingDirectoryReader.java index 9b594e2935504..f03be3f09b7d2 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/FieldUsageTrackingDirectoryReader.java +++ b/server/src/main/java/org/elasticsearch/search/internal/FieldUsageTrackingDirectoryReader.java @@ -103,15 +103,6 @@ static final class FieldUsageTrackingLeafReader extends SequentialStoredFieldsLe this.notifier = notifier; } - @Override - public Fields getTermVectors(int docID) throws IOException { - Fields f = super.getTermVectors(docID); - if (f != null) { - f = new FieldUsageTrackingTermVectorFields(f); - } - return f; - } - @Override public TermVectors termVectors() throws IOException { TermVectors termVectors = super.termVectors(); @@ -136,11 +127,6 @@ public PointValues getPointValues(String field) throws IOException { return pointValues; } - @Override - public void document(final int docID, final StoredFieldVisitor visitor) throws IOException { - storedFields().document(docID, visitor); - } - @Override public StoredFields storedFields() throws IOException { StoredFields storedFields = super.storedFields(); diff --git a/server/src/main/java/org/elasticsearch/search/profile/query/ProfileScorer.java b/server/src/main/java/org/elasticsearch/search/profile/query/ProfileScorer.java index cd8f381e85f83..f559325063bef 100644 --- a/server/src/main/java/org/elasticsearch/search/profile/query/ProfileScorer.java +++ b/server/src/main/java/org/elasticsearch/search/profile/query/ProfileScorer.java @@ -12,7 +12,6 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.TwoPhaseIterator; -import org.apache.lucene.search.Weight; import org.elasticsearch.search.profile.Timer; import java.io.IOException; @@ -25,15 +24,12 @@ final class ProfileScorer extends Scorer { private final Scorer scorer; - private final ProfileWeight profileWeight; private final Timer scoreTimer, nextDocTimer, advanceTimer, matchTimer, shallowAdvanceTimer, computeMaxScoreTimer, setMinCompetitiveScoreTimer; - ProfileScorer(ProfileWeight w, Scorer scorer, QueryProfileBreakdown profile) { - super(w); + ProfileScorer(Scorer scorer, QueryProfileBreakdown profile) { this.scorer = scorer; - this.profileWeight = w; scoreTimer = profile.getNewTimer(QueryTimingType.SCORE); nextDocTimer = profile.getNewTimer(QueryTimingType.NEXT_DOC); advanceTimer = profile.getNewTimer(QueryTimingType.ADVANCE); @@ -58,11 +54,6 @@ public float score() throws IOException { } } - @Override - public Weight getWeight() { - return profileWeight; - } - @Override public Collection getChildren() throws IOException { return scorer.getChildren(); diff --git a/server/src/main/java/org/elasticsearch/search/profile/query/ProfileWeight.java b/server/src/main/java/org/elasticsearch/search/profile/query/ProfileWeight.java index 27bf8ea8aae47..5d35699adec95 100644 --- a/server/src/main/java/org/elasticsearch/search/profile/query/ProfileWeight.java +++ b/server/src/main/java/org/elasticsearch/search/profile/query/ProfileWeight.java @@ -37,15 +37,6 @@ public ProfileWeight(Query query, Weight subQueryWeight, QueryProfileBreakdown p this.profile = profile; } - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - ScorerSupplier supplier = scorerSupplier(context); - if (supplier == null) { - return null; - } - return supplier.get(Long.MAX_VALUE); - } - @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { final Timer timer = profile.getNewTimer(QueryTimingType.BUILD_SCORER); @@ -67,12 +58,24 @@ public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOExcepti public Scorer get(long loadCost) throws IOException { timer.start(); try { - return new ProfileScorer(weight, subQueryScorerSupplier.get(loadCost), profile); + return new ProfileScorer(subQueryScorerSupplier.get(loadCost), profile); } finally { timer.stop(); } } + @Override + public BulkScorer bulkScorer() throws IOException { + // We use the default bulk scorer instead of the specialized one. The reason + // is that Lucene's BulkScorers do everything at once: finding matches, + // scoring them and calling the collector, so they make it impossible to + // see where time is spent, which is the purpose of query profiling. + // The default bulk scorer will pull a scorer and iterate over matches, + // this might be a significantly different execution path for some queries + // like disjunctions, but in general this is what is done anyway + return super.bulkScorer(); + } + @Override public long cost() { timer.start(); @@ -90,18 +93,6 @@ public void setTopLevelScoringClause() throws IOException { }; } - @Override - public BulkScorer bulkScorer(LeafReaderContext context) throws IOException { - // We use the default bulk scorer instead of the specialized one. The reason - // is that Lucene's BulkScorers do everything at once: finding matches, - // scoring them and calling the collector, so they make it impossible to - // see where time is spent, which is the purpose of query profiling. - // The default bulk scorer will pull a scorer and iterate over matches, - // this might be a significantly different execution path for some queries - // like disjunctions, but in general this is what is done anyway - return super.bulkScorer(context); - } - @Override public Explanation explain(LeafReaderContext context, int doc) throws IOException { return subQueryWeight.explain(context, doc); diff --git a/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java b/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java index d17cd4f69dec7..af65c30b49dcf 100644 --- a/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java +++ b/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java @@ -248,7 +248,7 @@ private static boolean canEarlyTerminate(IndexReader reader, SortAndFormats sort } final Sort sort = sortAndFormats.sort; for (LeafReaderContext ctx : reader.leaves()) { - Sort indexSort = ctx.reader().getMetaData().getSort(); + Sort indexSort = ctx.reader().getMetaData().sort(); if (indexSort == null || Lucene.canEarlyTerminate(sort, indexSort) == false) { return false; } diff --git a/server/src/main/java/org/elasticsearch/search/query/QueryPhaseCollector.java b/server/src/main/java/org/elasticsearch/search/query/QueryPhaseCollector.java index d1cbdd6adb761..00cf90fe12301 100644 --- a/server/src/main/java/org/elasticsearch/search/query/QueryPhaseCollector.java +++ b/server/src/main/java/org/elasticsearch/search/query/QueryPhaseCollector.java @@ -202,7 +202,11 @@ public DocIdSetIterator competitiveIterator() throws IOException { } }; } - return new CompositeLeafCollector(postFilterBits, topDocsLeafCollector, aggsLeafCollector); + LeafCollector leafCollector = new CompositeLeafCollector(postFilterBits, topDocsLeafCollector, aggsLeafCollector); + if (cacheScores && topDocsLeafCollector != null && aggsLeafCollector != null) { + leafCollector = ScoreCachingWrappingScorer.wrap(leafCollector); + } + return leafCollector; } private static FilterScorable wrapToIgnoreMinCompetitiveScore(Scorable scorer) { @@ -263,9 +267,6 @@ private class CompositeLeafCollector implements LeafCollector { @Override public void setScorer(Scorable scorer) throws IOException { - if (cacheScores && topDocsLeafCollector != null && aggsLeafCollector != null) { - scorer = ScoreCachingWrappingScorer.wrap(scorer); - } // Ignore calls to setMinCompetitiveScore so that if the top docs collector // wants to skip low-scoring hits, the aggs collector still sees all hits. // this is important also for terminate_after in case used when total hits tracking is early terminated. diff --git a/server/src/main/java/org/elasticsearch/search/retriever/rankdoc/RankDocsQuery.java b/server/src/main/java/org/elasticsearch/search/retriever/rankdoc/RankDocsQuery.java index b78d9e40ba120..2cb960e7e73cb 100644 --- a/server/src/main/java/org/elasticsearch/search/retriever/rankdoc/RankDocsQuery.java +++ b/server/src/main/java/org/elasticsearch/search/retriever/rankdoc/RankDocsQuery.java @@ -13,7 +13,6 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.BulkScorer; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.IndexSearcher; @@ -106,12 +105,12 @@ public Explanation explain(LeafReaderContext context, int doc) throws IOExceptio } @Override - public Scorer scorer(LeafReaderContext context) { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { /** * We return a scorer even if there are no ranked documents within the segment. * This ensures the correct propagation of the maximum score. */ - return new Scorer(this) { + Scorer scorer = new Scorer() { final int lower = segmentStarts[context.ord]; final int upper = segmentStarts[context.ord + 1]; int upTo = -1; @@ -180,6 +179,7 @@ private int currentDocId() { } }; + return new DefaultScorerSupplier(scorer); } @Override @@ -325,11 +325,6 @@ public Explanation explain(LeafReaderContext context, int doc) throws IOExceptio return topWeight.explain(context, doc); } - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - return combinedWeight.scorer(context); - } - @Override public boolean isCacheable(LeafReaderContext ctx) { return combinedWeight.isCacheable(ctx); @@ -340,11 +335,6 @@ public Matches matches(LeafReaderContext context, int doc) throws IOException { return combinedWeight.matches(context, doc); } - @Override - public BulkScorer bulkScorer(LeafReaderContext context) throws IOException { - return combinedWeight.bulkScorer(context); - } - @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { return combinedWeight.scorerSupplier(context); diff --git a/server/src/main/java/org/elasticsearch/search/runtime/AbstractScriptFieldQuery.java b/server/src/main/java/org/elasticsearch/search/runtime/AbstractScriptFieldQuery.java index 5077d68c12baa..c65c2bb6650c1 100644 --- a/server/src/main/java/org/elasticsearch/search/runtime/AbstractScriptFieldQuery.java +++ b/server/src/main/java/org/elasticsearch/search/runtime/AbstractScriptFieldQuery.java @@ -19,6 +19,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; import org.elasticsearch.script.AbstractFieldScript; @@ -69,10 +70,11 @@ public boolean isCacheable(LeafReaderContext ctx) { } @Override - public Scorer scorer(LeafReaderContext ctx) { + public ScorerSupplier scorerSupplier(LeafReaderContext ctx) throws IOException { S scriptContext = scriptContextFunction.apply(ctx); DocIdSetIterator approximation = DocIdSetIterator.all(ctx.reader().maxDoc()); - return new ConstantScoreScorer(this, score(), scoreMode, createTwoPhaseIterator(scriptContext, approximation)); + Scorer scorer = new ConstantScoreScorer(score(), scoreMode, createTwoPhaseIterator(scriptContext, approximation)); + return new DefaultScorerSupplier(scorer); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/runtime/GeoPointScriptFieldDistanceFeatureQuery.java b/server/src/main/java/org/elasticsearch/search/runtime/GeoPointScriptFieldDistanceFeatureQuery.java index 751ecb18cc68f..430d22ebc9084 100644 --- a/server/src/main/java/org/elasticsearch/search/runtime/GeoPointScriptFieldDistanceFeatureQuery.java +++ b/server/src/main/java/org/elasticsearch/search/runtime/GeoPointScriptFieldDistanceFeatureQuery.java @@ -18,6 +18,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; import org.apache.lucene.util.SloppyMath; @@ -79,8 +80,9 @@ public boolean isCacheable(LeafReaderContext ctx) { } @Override - public Scorer scorer(LeafReaderContext context) { - return new DistanceScorer(this, scriptContextFunction().apply(context), context.reader().maxDoc(), boost); + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { + Scorer scorer = new DistanceScorer(scriptContextFunction().apply(context), context.reader().maxDoc(), boost); + return new DefaultScorerSupplier(scorer); } @Override @@ -116,8 +118,7 @@ private class DistanceScorer extends Scorer { private final DocIdSetIterator disi; private final float weight; - protected DistanceScorer(Weight weight, AbstractLongFieldScript script, int maxDoc, float boost) { - super(weight); + protected DistanceScorer(AbstractLongFieldScript script, int maxDoc, float boost) { this.script = script; twoPhase = new TwoPhaseIterator(DocIdSetIterator.all(maxDoc)) { @Override diff --git a/server/src/main/java/org/elasticsearch/search/runtime/LongScriptFieldDistanceFeatureQuery.java b/server/src/main/java/org/elasticsearch/search/runtime/LongScriptFieldDistanceFeatureQuery.java index 7c8ac4a8cae63..d18098ee7de33 100644 --- a/server/src/main/java/org/elasticsearch/search/runtime/LongScriptFieldDistanceFeatureQuery.java +++ b/server/src/main/java/org/elasticsearch/search/runtime/LongScriptFieldDistanceFeatureQuery.java @@ -16,6 +16,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; import org.elasticsearch.script.AbstractLongFieldScript; @@ -56,8 +57,10 @@ public boolean isCacheable(LeafReaderContext ctx) { } @Override - public Scorer scorer(LeafReaderContext context) { - return new DistanceScorer(this, scriptContextFunction().apply(context), context.reader().maxDoc(), boost); + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { + return new DefaultScorerSupplier( + new DistanceScorer(scriptContextFunction().apply(context), context.reader().maxDoc(), boost) + ); } @Override @@ -84,8 +87,7 @@ private class DistanceScorer extends Scorer { private final DocIdSetIterator disi; private final float weight; - protected DistanceScorer(Weight weight, AbstractLongFieldScript script, int maxDoc, float boost) { - super(weight); + protected DistanceScorer(AbstractLongFieldScript script, int maxDoc, float boost) { this.script = script; twoPhase = new TwoPhaseIterator(DocIdSetIterator.all(maxDoc)) { @Override diff --git a/server/src/main/java/org/elasticsearch/search/runtime/StringScriptFieldRegexpQuery.java b/server/src/main/java/org/elasticsearch/search/runtime/StringScriptFieldRegexpQuery.java index ab32427ed4ac1..3c5931367370e 100644 --- a/server/src/main/java/org/elasticsearch/search/runtime/StringScriptFieldRegexpQuery.java +++ b/server/src/main/java/org/elasticsearch/search/runtime/StringScriptFieldRegexpQuery.java @@ -10,6 +10,7 @@ package org.elasticsearch.search.runtime; import org.apache.lucene.util.automaton.ByteRunAutomaton; +import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.RegExp; import org.elasticsearch.script.Script; import org.elasticsearch.script.StringFieldScript; @@ -34,7 +35,12 @@ public StringScriptFieldRegexpQuery( script, leafFactory, fieldName, - new ByteRunAutomaton(new RegExp(Objects.requireNonNull(pattern), syntaxFlags, matchFlags).toAutomaton(maxDeterminizedStates)) + new ByteRunAutomaton( + Operations.determinize( + new RegExp(Objects.requireNonNull(pattern), syntaxFlags, matchFlags).toAutomaton(), + maxDeterminizedStates + ) + ) ); this.pattern = pattern; this.syntaxFlags = syntaxFlags; diff --git a/server/src/main/java/org/elasticsearch/search/runtime/StringScriptFieldWildcardQuery.java b/server/src/main/java/org/elasticsearch/search/runtime/StringScriptFieldWildcardQuery.java index 5bacaf0d36b55..6c1aa6f72c4a1 100644 --- a/server/src/main/java/org/elasticsearch/search/runtime/StringScriptFieldWildcardQuery.java +++ b/server/src/main/java/org/elasticsearch/search/runtime/StringScriptFieldWildcardQuery.java @@ -13,6 +13,7 @@ import org.apache.lucene.search.WildcardQuery; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.ByteRunAutomaton; +import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.common.lucene.search.AutomatonQueries; import org.elasticsearch.script.Script; import org.elasticsearch.script.StringFieldScript; @@ -44,7 +45,7 @@ private static Automaton buildAutomaton(Term term, boolean caseInsensitive) { if (caseInsensitive) { return AutomatonQueries.toCaseInsensitiveWildcardAutomaton(term); } - return WildcardQuery.toAutomaton(term); + return WildcardQuery.toAutomaton(term, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/slice/DocIdSliceQuery.java b/server/src/main/java/org/elasticsearch/search/slice/DocIdSliceQuery.java index f4ab7e29e1684..6de888ac8aff4 100644 --- a/server/src/main/java/org/elasticsearch/search/slice/DocIdSliceQuery.java +++ b/server/src/main/java/org/elasticsearch/search/slice/DocIdSliceQuery.java @@ -16,6 +16,7 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; import org.elasticsearch.search.sort.FieldSortBuilder; @@ -58,9 +59,10 @@ public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float bo return new ConstantScoreWeight(this, boost) { @Override - public Scorer scorer(LeafReaderContext context) { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { DocIdSetIterator iterator = createIterator(context, sliceStart, sliceStart + sliceSize); - return new ConstantScoreScorer(this, boost, scoreMode, iterator); + Scorer scorer = new ConstantScoreScorer(boost, scoreMode, iterator); + return new DefaultScorerSupplier(scorer); } private static DocIdSetIterator createIterator(LeafReaderContext context, int sliceStart, int sliceEnd) { diff --git a/server/src/main/java/org/elasticsearch/search/slice/DocValuesSliceQuery.java b/server/src/main/java/org/elasticsearch/search/slice/DocValuesSliceQuery.java index b66ae219ace97..05cf173468fdc 100644 --- a/server/src/main/java/org/elasticsearch/search/slice/DocValuesSliceQuery.java +++ b/server/src/main/java/org/elasticsearch/search/slice/DocValuesSliceQuery.java @@ -20,6 +20,7 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; @@ -41,7 +42,7 @@ public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float bo return new ConstantScoreWeight(this, boost) { @Override - public Scorer scorer(LeafReaderContext context) throws IOException { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { final SortedNumericDocValues values = DocValues.getSortedNumeric(context.reader(), getField()); final DocIdSetIterator approximation = DocIdSetIterator.all(context.reader().maxDoc()); final TwoPhaseIterator twoPhase = new TwoPhaseIterator(approximation) { @@ -66,7 +67,8 @@ public float matchCost() { return 10; } }; - return new ConstantScoreScorer(this, score(), scoreMode, twoPhase); + Scorer scorer = new ConstantScoreScorer(score(), scoreMode, twoPhase); + return new DefaultScorerSupplier(scorer); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/slice/TermsSliceQuery.java b/server/src/main/java/org/elasticsearch/search/slice/TermsSliceQuery.java index 3927f54461bb8..9aecbfdd84ee6 100644 --- a/server/src/main/java/org/elasticsearch/search/slice/TermsSliceQuery.java +++ b/server/src/main/java/org/elasticsearch/search/slice/TermsSliceQuery.java @@ -21,6 +21,7 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.DocIdSetBuilder; @@ -49,10 +50,11 @@ public TermsSliceQuery(String field, int id, int max) { public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException { return new ConstantScoreWeight(this, boost) { @Override - public Scorer scorer(LeafReaderContext context) throws IOException { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { final DocIdSet disi = build(context.reader()); final DocIdSetIterator leafIt = disi.iterator(); - return new ConstantScoreScorer(this, score(), scoreMode, leafIt); + Scorer scorer = new ConstantScoreScorer(score(), scoreMode, leafIt); + return new DefaultScorerSupplier(scorer); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java index a6fd4ef90693d..e60e534d6acaa 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java @@ -126,7 +126,6 @@ public void setScorer(Scorable scorer) { @Override protected boolean advanceExact(int doc) throws IOException { - assert doc == scorer.docID() : "expected scorer to be on [" + doc + "] but was on [" + scorer.docID() + "]"; /* We will never be called by documents that don't match the * query and they'll all have a score, thus `true`. */ score = scorer.score(); diff --git a/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java b/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java index 8fdc33f38934f..fd6cfeaea639b 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java @@ -11,6 +11,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.BulkScorer; import org.apache.lucene.search.CollectionTerminatedException; +import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.Weight; @@ -84,7 +85,7 @@ private static void suggest(IndexSearcher searcher, CompletionQuery query, TopSu LeafCollector leafCollector = null; try { leafCollector = collector.getLeafCollector(context); - scorer.score(leafCollector, context.reader().getLiveDocs()); + scorer.score(leafCollector, context.reader().getLiveDocs(), 0, DocIdSetIterator.NO_MORE_DOCS); } catch (CollectionTerminatedException e) { // collection was terminated prematurely // continue with the following leaf diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGenerator.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGenerator.java index 1366da366b068..ed8197786ba7e 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGenerator.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGenerator.java @@ -133,7 +133,7 @@ public CandidateSet drawCandidates(CandidateSet set) throws IOException { * because that's what {@link DirectSpellChecker#suggestSimilar} expects * when filtering terms. */ - int threshold = thresholdTermFrequency(original.termStats.docFreq); + int threshold = thresholdTermFrequency(original.termStats.docFreq()); if (threshold == Integer.MAX_VALUE) { // the threshold is the max possible frequency so we can skip the search return set; @@ -226,7 +226,7 @@ public void nextToken() throws IOException { } private static double score(TermStats termStats, double errorScore, long dictionarySize) { - return errorScore * (((double) termStats.totalTermFreq + 1) / ((double) dictionarySize + 1)); + return errorScore * (((double) termStats.totalTermFreq() + 1) / ((double) dictionarySize + 1)); } // package protected for test diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/LaplaceScorer.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/LaplaceScorer.java index a47cd5fe5a84b..0fd3ebcd00865 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/LaplaceScorer.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/LaplaceScorer.java @@ -36,7 +36,7 @@ protected double scoreUnigram(Candidate word) throws IOException { @Override protected double scoreBigram(Candidate word, Candidate w_1) throws IOException { join(separator, spare, w_1.term, word.term); - return (alpha + frequency(spare.get())) / (w_1.termStats.totalTermFreq + alpha * numTerms); + return (alpha + frequency(spare.get())) / (w_1.termStats.totalTermFreq() + alpha * numTerms); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/LinearInterpolatingScorer.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/LinearInterpolatingScorer.java index fe64a65498776..0d66311303080 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/LinearInterpolatingScorer.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/LinearInterpolatingScorer.java @@ -58,7 +58,7 @@ protected double scoreBigram(Candidate word, Candidate w_1) throws IOException { if (count < 1) { return unigramLambda * scoreUnigram(word); } - return bigramLambda * (count / (0.5d + w_1.termStats.totalTermFreq)) + unigramLambda * scoreUnigram(word); + return bigramLambda * (count / (0.5d + w_1.termStats.totalTermFreq())) + unigramLambda * scoreUnigram(word); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellChecker.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellChecker.java index 7257a0d972459..21d1f34b68eee 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellChecker.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellChecker.java @@ -70,7 +70,7 @@ public void nextToken() throws IOException { if (posIncAttr.getPositionIncrement() == 0 && typeAttribute.type() == SynonymFilter.TYPE_SYNONYM) { assert currentSet != null; TermStats termStats = generator.termStats(term); - if (termStats.docFreq > 0) { + if (termStats.docFreq() > 0) { currentSet.addOneCandidate(generator.createCandidate(BytesRef.deepCopyOf(term), termStats, realWordLikelihood)); } } else { diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/StupidBackoffScorer.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/StupidBackoffScorer.java index d893e0986e0d3..270866c14b20a 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/StupidBackoffScorer.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/StupidBackoffScorer.java @@ -35,7 +35,7 @@ protected double scoreBigram(Candidate word, Candidate w_1) throws IOException { if (count < 1) { return discount * scoreUnigram(word); } - return count / (w_1.termStats.totalTermFreq + 0.00000000001d); + return count / (w_1.termStats.totalTermFreq() + 0.00000000001d); } @Override @@ -50,7 +50,7 @@ protected double scoreTrigram(Candidate w, Candidate w_1, Candidate w_2) throws join(separator, spare, w_2.term, w_1.term, w.term); long trigramCount = frequency(spare.get()); if (trigramCount < 1) { - return discount * (bigramCount / (w_1.termStats.totalTermFreq + 0.00000000001d)); + return discount * (bigramCount / (w_1.termStats.totalTermFreq() + 0.00000000001d)); } return trigramCount / (bigramCount + 0.00000000001d); } diff --git a/server/src/main/java/org/elasticsearch/search/vectors/DenseVectorQuery.java b/server/src/main/java/org/elasticsearch/search/vectors/DenseVectorQuery.java index 44bbd0f50951c..31e19b6784757 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/DenseVectorQuery.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/DenseVectorQuery.java @@ -19,6 +19,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.VectorScorer; import org.apache.lucene.search.Weight; @@ -70,12 +71,12 @@ public Explanation explain(LeafReaderContext leafReaderContext, int i) throws IO } @Override - public Scorer scorer(LeafReaderContext leafReaderContext) throws IOException { - VectorScorer vectorScorer = vectorScorer(leafReaderContext); + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { + VectorScorer vectorScorer = vectorScorer(context); if (vectorScorer == null) { return null; } - return new DenseVectorScorer(this, vectorScorer); + return new DefaultScorerSupplier(new DenseVectorScorer(vectorScorer, boost)); } @Override @@ -178,11 +179,10 @@ static class DenseVectorScorer extends Scorer { private final DocIdSetIterator iterator; private final float boost; - DenseVectorScorer(DenseVectorWeight weight, VectorScorer vectorScorer) { - super(weight); + DenseVectorScorer(VectorScorer vectorScorer, float boost) { this.vectorScorer = vectorScorer; this.iterator = vectorScorer.iterator(); - this.boost = weight.boost; + this.boost = boost; } @Override diff --git a/server/src/main/java/org/elasticsearch/search/vectors/ESDiversifyingChildrenByteKnnVectorQuery.java b/server/src/main/java/org/elasticsearch/search/vectors/ESDiversifyingChildrenByteKnnVectorQuery.java index 413840f2b451b..9f3d83b4da082 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/ESDiversifyingChildrenByteKnnVectorQuery.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/ESDiversifyingChildrenByteKnnVectorQuery.java @@ -34,7 +34,7 @@ public ESDiversifyingChildrenByteKnnVectorQuery( @Override protected TopDocs mergeLeafResults(TopDocs[] perLeafResults) { TopDocs topK = kParam == null ? super.mergeLeafResults(perLeafResults) : TopDocs.merge(kParam, perLeafResults); - vectorOpsCount = topK.totalHits.value; + vectorOpsCount = topK.totalHits.value(); return topK; } diff --git a/server/src/main/java/org/elasticsearch/search/vectors/ESDiversifyingChildrenFloatKnnVectorQuery.java b/server/src/main/java/org/elasticsearch/search/vectors/ESDiversifyingChildrenFloatKnnVectorQuery.java index 80704a3b552fe..3907bdf89bc6f 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/ESDiversifyingChildrenFloatKnnVectorQuery.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/ESDiversifyingChildrenFloatKnnVectorQuery.java @@ -34,7 +34,7 @@ public ESDiversifyingChildrenFloatKnnVectorQuery( @Override protected TopDocs mergeLeafResults(TopDocs[] perLeafResults) { TopDocs topK = kParam == null ? super.mergeLeafResults(perLeafResults) : TopDocs.merge(kParam, perLeafResults); - vectorOpsCount = topK.totalHits.value; + vectorOpsCount = topK.totalHits.value(); return topK; } diff --git a/server/src/main/java/org/elasticsearch/search/vectors/ESKnnByteVectorQuery.java b/server/src/main/java/org/elasticsearch/search/vectors/ESKnnByteVectorQuery.java index 14bb94a366e50..9363f67a7350b 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/ESKnnByteVectorQuery.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/ESKnnByteVectorQuery.java @@ -27,7 +27,7 @@ public ESKnnByteVectorQuery(String field, byte[] target, Integer k, int numCands protected TopDocs mergeLeafResults(TopDocs[] perLeafResults) { // if k param is set, we get only top k results from each shard TopDocs topK = kParam == null ? super.mergeLeafResults(perLeafResults) : TopDocs.merge(kParam, perLeafResults); - vectorOpsCount = topK.totalHits.value; + vectorOpsCount = topK.totalHits.value(); return topK; } diff --git a/server/src/main/java/org/elasticsearch/search/vectors/ESKnnFloatVectorQuery.java b/server/src/main/java/org/elasticsearch/search/vectors/ESKnnFloatVectorQuery.java index 590d8cfbbaba1..be0437af9131d 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/ESKnnFloatVectorQuery.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/ESKnnFloatVectorQuery.java @@ -27,7 +27,7 @@ public ESKnnFloatVectorQuery(String field, float[] target, Integer k, int numCan protected TopDocs mergeLeafResults(TopDocs[] perLeafResults) { // if k param is set, we get only top k results from each shard TopDocs topK = kParam == null ? super.mergeLeafResults(perLeafResults) : TopDocs.merge(kParam, perLeafResults); - vectorOpsCount = topK.totalHits.value; + vectorOpsCount = topK.totalHits.value(); return topK; } diff --git a/server/src/main/java/org/elasticsearch/search/vectors/KnnScoreDocQuery.java b/server/src/main/java/org/elasticsearch/search/vectors/KnnScoreDocQuery.java index 06fb109d6580e..bb83b8528c6c8 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/KnnScoreDocQuery.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/KnnScoreDocQuery.java @@ -18,6 +18,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; import java.io.IOException; @@ -88,13 +89,13 @@ public Explanation explain(LeafReaderContext context, int doc) { } @Override - public Scorer scorer(LeafReaderContext context) { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { // Segment starts indicate how many docs are in the segment, // upper equalling lower indicates no documents for this segment if (segmentStarts[context.ord] == segmentStarts[context.ord + 1]) { return null; } - return new Scorer(this) { + Scorer scorer = new Scorer() { final int lower = segmentStarts[context.ord]; final int upper = segmentStarts[context.ord + 1]; int upTo = -1; @@ -177,6 +178,7 @@ private int currentDocId() { } }; + return new DefaultScorerSupplier(scorer); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/vectors/VectorSimilarityQuery.java b/server/src/main/java/org/elasticsearch/search/vectors/VectorSimilarityQuery.java index 77f60adc4fcd8..5219778047bcd 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/VectorSimilarityQuery.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/VectorSimilarityQuery.java @@ -18,6 +18,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; import org.elasticsearch.common.lucene.search.function.MinScoreScorer; @@ -142,12 +143,22 @@ public Explanation explain(LeafReaderContext context, int doc) throws IOExceptio } @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - Scorer innerScorer = in.scorer(context); - if (innerScorer == null) { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { + ScorerSupplier inScorerSupplier = in.scorerSupplier(context); + if (inScorerSupplier == null) { return null; } - return new MinScoreScorer(this, innerScorer, docScore, boost); + return new ScorerSupplier() { + @Override + public Scorer get(long leadCost) throws IOException { + return new MinScoreScorer(inScorerSupplier.get(leadCost), docScore, boost); + } + + @Override + public long cost() { + return inScorerSupplier.cost(); + } + }; } } diff --git a/server/src/main/java/org/elasticsearch/synonyms/SynonymsManagementAPIService.java b/server/src/main/java/org/elasticsearch/synonyms/SynonymsManagementAPIService.java index b14116b3d55ba..c760e8043e262 100644 --- a/server/src/main/java/org/elasticsearch/synonyms/SynonymsManagementAPIService.java +++ b/server/src/main/java/org/elasticsearch/synonyms/SynonymsManagementAPIService.java @@ -230,7 +230,7 @@ public void getSynonymSetRules(String synonymSetId, ActionListener { - long totalSynonymRules = countResponse.getHits().getTotalHits().value; + long totalSynonymRules = countResponse.getHits().getTotalHits().value(); if (totalSynonymRules > maxSynonymsSets) { logger.warn( "The number of synonym rules in the synonym set [{}] exceeds the maximum allowed." @@ -265,7 +265,7 @@ public void getSynonymSetRules(String synonymSetId, int from, int size, ActionLi .setPreference(Preference.LOCAL.type()) .setTrackTotalHits(true) .execute(new DelegatingIndexNotFoundActionListener<>(synonymSetId, listener, (searchListener, searchResponse) -> { - final long totalSynonymRules = searchResponse.getHits().getTotalHits().value; + final long totalSynonymRules = searchResponse.getHits().getTotalHits().value(); // If there are no rules, check that the synonym set actually exists to return the proper error if (totalSynonymRules == 0) { checkSynonymSetExists(synonymSetId, searchListener.delegateFailure((existsListener, response) -> { @@ -383,7 +383,7 @@ public void putSynonymRule(String synonymsSetId, SynonymRule synonymRule, Action .setPreference(Preference.LOCAL.type()) .setTrackTotalHits(true) .execute(l1.delegateFailureAndWrap((searchListener, searchResponse) -> { - long synonymsSetSize = searchResponse.getHits().getTotalHits().value; + long synonymsSetSize = searchResponse.getHits().getTotalHits().value(); if (synonymsSetSize >= maxSynonymsSets) { listener.onFailure( new IllegalArgumentException("The number of synonym rules in a synonyms set cannot exceed " + maxSynonymsSets) diff --git a/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec b/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec index 4e85ba2cf479f..33c8081971202 100644 --- a/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec +++ b/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec @@ -1,2 +1,3 @@ org.elasticsearch.index.codec.Elasticsearch814Codec org.elasticsearch.index.codec.Elasticsearch816Codec +org.elasticsearch.index.codec.Elasticsearch900Codec diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java index 65464c7f14a5c..bf4a28b9c60b2 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java @@ -12,8 +12,8 @@ import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.KnnVectorsFormat; import org.apache.lucene.codecs.PostingsFormat; +import org.apache.lucene.codecs.lucene100.Lucene100Codec; import org.apache.lucene.codecs.lucene90.Lucene90DocValuesFormat; -import org.apache.lucene.codecs.lucene912.Lucene912Codec; import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat; import org.apache.lucene.codecs.perfield.PerFieldDocValuesFormat; import org.apache.lucene.codecs.perfield.PerFieldKnnVectorsFormat; @@ -53,6 +53,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; import org.apache.lucene.search.suggest.document.Completion912PostingsFormat; import org.apache.lucene.search.suggest.document.CompletionPostingsFormat; @@ -327,7 +328,7 @@ public void testTriangle() throws Exception { public void testCompletionField() throws Exception { IndexWriterConfig config = new IndexWriterConfig().setCommitOnClose(true) .setUseCompoundFile(false) - .setCodec(new Lucene912Codec(Lucene912Codec.Mode.BEST_SPEED) { + .setCodec(new Lucene100Codec(Lucene100Codec.Mode.BEST_SPEED) { @Override public PostingsFormat getPostingsFormatForField(String field) { if (field.startsWith("suggest_")) { @@ -414,25 +415,25 @@ private static void addFieldsToDoc(Document doc, IndexableField[] fields) { enum CodecMode { BEST_SPEED { @Override - Lucene912Codec.Mode mode() { - return Lucene912Codec.Mode.BEST_SPEED; + Lucene100Codec.Mode mode() { + return Lucene100Codec.Mode.BEST_SPEED; } }, BEST_COMPRESSION { @Override - Lucene912Codec.Mode mode() { - return Lucene912Codec.Mode.BEST_COMPRESSION; + Lucene100Codec.Mode mode() { + return Lucene100Codec.Mode.BEST_COMPRESSION; } }; - abstract Lucene912Codec.Mode mode(); + abstract Lucene100Codec.Mode mode(); } static void indexRandomly(Directory directory, CodecMode codecMode, int numDocs, Consumer addFields) throws IOException { IndexWriterConfig config = new IndexWriterConfig().setCommitOnClose(true) .setUseCompoundFile(randomBoolean()) - .setCodec(new Lucene912Codec(codecMode.mode())); + .setCodec(new Lucene100Codec(codecMode.mode())); try (IndexWriter writer = new IndexWriter(directory, config)) { for (int i = 0; i < numDocs; i++) { final Document doc = new Document(); @@ -640,7 +641,7 @@ static void rewriteIndexWithPerFieldCodec(Directory source, CodecMode mode, Dire try (DirectoryReader reader = DirectoryReader.open(source)) { IndexWriterConfig config = new IndexWriterConfig().setSoftDeletesField(Lucene.SOFT_DELETES_FIELD) .setUseCompoundFile(randomBoolean()) - .setCodec(new Lucene912Codec(mode.mode()) { + .setCodec(new Lucene100Codec(mode.mode()) { @Override public PostingsFormat getPostingsFormatForField(String field) { return new ES812PostingsFormat(); @@ -687,7 +688,7 @@ static void collectPerFieldStats(SegmentReader reader, IndexDiskUsageStats stats final String[] files; final Directory directory; if (sis.getUseCompoundFile()) { - directory = sis.getCodec().compoundFormat().getCompoundReader(reader.directory(), sis, IOContext.READ); + directory = sis.getCodec().compoundFormat().getCompoundReader(reader.directory(), sis, IOContext.DEFAULT); files = directory.listAll(); } else { directory = reader.directory(); @@ -785,14 +786,15 @@ private static class RandomMatchQuery extends Query { public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException { return new ConstantScoreWeight(this, 1.0f) { @Override - public Scorer scorer(LeafReaderContext context) { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { final FixedBitSet bits = new FixedBitSet(context.reader().maxDoc()); for (int i = 0; i < bits.length(); i++) { if (randomBoolean()) { bits.set(i); } } - return new ConstantScoreScorer(this, 1.0f, ScoreMode.COMPLETE_NO_SCORES, new BitSetIterator(bits, bits.length())); + Scorer scorer = new ConstantScoreScorer(1.0f, ScoreMode.COMPLETE_NO_SCORES, new BitSetIterator(bits, bits.length())); + return new DefaultScorerSupplier(scorer); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumerTests.java b/server/src/test/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumerTests.java index b7919878f9081..681d9d000beef 100644 --- a/server/src/test/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumerTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumerTests.java @@ -79,8 +79,8 @@ public void testNullShardResultHandling() throws Exception { queryPhaseResultConsumer.consumeResult(querySearchResult, nextCounter::incrementAndGet); } var reducePhase = queryPhaseResultConsumer.reduce(); - assertEquals(0, reducePhase.totalHits().value); - assertEquals(TotalHits.Relation.EQUAL_TO, reducePhase.totalHits().relation); + assertEquals(0, reducePhase.totalHits().value()); + assertEquals(TotalHits.Relation.EQUAL_TO, reducePhase.totalHits().relation()); assertFalse(reducePhase.isEmptyResult()); assertEquals(10, nextCounter.get()); } @@ -94,8 +94,8 @@ public void testEmptyResults() throws Exception { ) ) { var reducePhase = queryPhaseResultConsumer.reduce(); - assertEquals(0, reducePhase.totalHits().value); - assertEquals(TotalHits.Relation.EQUAL_TO, reducePhase.totalHits().relation); + assertEquals(0, reducePhase.totalHits().value()); + assertEquals(TotalHits.Relation.EQUAL_TO, reducePhase.totalHits().relation()); assertTrue(reducePhase.isEmptyResult()); } } diff --git a/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java index 90174a89209b8..99401e8a8d40a 100644 --- a/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java @@ -151,11 +151,11 @@ public void run() throws IOException { assertNotNull(responseRef.get()); assertNotNull(responseRef.get().get(0)); assertNull(responseRef.get().get(0).fetchResult()); - assertEquals(1, responseRef.get().get(0).queryResult().topDocs().topDocs.totalHits.value); + assertEquals(1, responseRef.get().get(0).queryResult().topDocs().topDocs.totalHits.value()); assertEquals(42, responseRef.get().get(0).queryResult().topDocs().topDocs.scoreDocs[0].doc); assertNotNull(responseRef.get().get(1)); assertNull(responseRef.get().get(1).fetchResult()); - assertEquals(1, responseRef.get().get(1).queryResult().topDocs().topDocs.totalHits.value); + assertEquals(1, responseRef.get().get(1).queryResult().topDocs().topDocs.totalHits.value()); assertEquals(84, responseRef.get().get(1).queryResult().topDocs().topDocs.scoreDocs[0].doc); assertTrue(mockSearchPhaseContext.releasedSearchContexts.isEmpty()); assertEquals(2, mockSearchPhaseContext.numSuccess.get()); @@ -236,7 +236,7 @@ public void run() throws IOException { assertNotNull(responseRef.get()); assertNotNull(responseRef.get().get(0)); assertNull(responseRef.get().get(0).fetchResult()); - assertEquals(1, responseRef.get().get(0).queryResult().topDocs().topDocs.totalHits.value); + assertEquals(1, responseRef.get().get(0).queryResult().topDocs().topDocs.totalHits.value()); assertEquals(42, responseRef.get().get(0).queryResult().topDocs().topDocs.scoreDocs[0].doc); assertNull(responseRef.get().get(1)); diff --git a/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java index 31ef57482cab1..09dd7821cd123 100644 --- a/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java @@ -116,7 +116,7 @@ public void testShortcutQueryAndFetchOptimization() throws Exception { mockSearchPhaseContext.assertNoFailure(); SearchResponse searchResponse = mockSearchPhaseContext.searchResponse.get(); assertNotNull(searchResponse); - assertEquals(numHits, searchResponse.getHits().getTotalHits().value); + assertEquals(numHits, searchResponse.getHits().getTotalHits().value()); if (numHits != 0) { assertEquals(42, searchResponse.getHits().getAt(0).docId()); } @@ -244,7 +244,7 @@ public void sendExecuteFetch( mockSearchPhaseContext.assertNoFailure(); SearchResponse searchResponse = mockSearchPhaseContext.searchResponse.get(); assertNotNull(searchResponse); - assertEquals(2, searchResponse.getHits().getTotalHits().value); + assertEquals(2, searchResponse.getHits().getTotalHits().value()); assertEquals(84, searchResponse.getHits().getAt(0).docId()); assertEquals(42, searchResponse.getHits().getAt(1).docId()); assertEquals(0, searchResponse.getFailedShards()); @@ -353,7 +353,7 @@ public void sendExecuteFetch( mockSearchPhaseContext.assertNoFailure(); SearchResponse searchResponse = mockSearchPhaseContext.searchResponse.get(); assertNotNull(searchResponse); - assertEquals(2, searchResponse.getHits().getTotalHits().value); + assertEquals(2, searchResponse.getHits().getTotalHits().value()); assertEquals(84, searchResponse.getHits().getAt(0).docId()); assertEquals(1, searchResponse.getFailedShards()); assertEquals(1, searchResponse.getSuccessfulShards()); @@ -468,7 +468,7 @@ public void run() { mockSearchPhaseContext.assertNoFailure(); SearchResponse searchResponse = mockSearchPhaseContext.searchResponse.get(); assertNotNull(searchResponse); - assertEquals(numHits, searchResponse.getHits().getTotalHits().value); + assertEquals(numHits, searchResponse.getHits().getTotalHits().value()); assertEquals(Math.min(numHits, resultSetSize), searchResponse.getHits().getHits().length); SearchHit[] hits = searchResponse.getHits().getHits(); for (int i = 0; i < hits.length; i++) { @@ -703,7 +703,7 @@ public void sendExecuteFetch( mockSearchPhaseContext.assertNoFailure(); SearchResponse searchResponse = mockSearchPhaseContext.searchResponse.get(); assertNotNull(searchResponse); - assertEquals(2, searchResponse.getHits().getTotalHits().value); + assertEquals(2, searchResponse.getHits().getTotalHits().value()); assertEquals(1, searchResponse.getHits().getHits().length); assertEquals(84, searchResponse.getHits().getAt(0).docId()); assertEquals(0, searchResponse.getFailedShards()); diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java index 857402d1baaac..9a507977c0123 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java @@ -297,8 +297,8 @@ public void testMerge() { if (trackTotalHits == SearchContext.TRACK_TOTAL_HITS_DISABLED) { assertNull(mergedResponse.hits.getTotalHits()); } else { - assertThat(mergedResponse.hits.getTotalHits().value, equalTo(0L)); - assertEquals(mergedResponse.hits.getTotalHits().relation, Relation.EQUAL_TO); + assertThat(mergedResponse.hits.getTotalHits().value(), equalTo(0L)); + assertEquals(mergedResponse.hits.getTotalHits().relation(), Relation.EQUAL_TO); } for (SearchHit hit : mergedResponse.hits().getHits()) { SearchPhaseResult searchPhaseResult = fetchResults.get(hit.getShard().getShardId().id()); @@ -415,8 +415,8 @@ protected boolean lessThan(RankDoc a, RankDoc b) { if (trackTotalHits == SearchContext.TRACK_TOTAL_HITS_DISABLED) { assertNull(mergedResponse.hits.getTotalHits()); } else { - assertThat(mergedResponse.hits.getTotalHits().value, equalTo(0L)); - assertEquals(mergedResponse.hits.getTotalHits().relation, Relation.EQUAL_TO); + assertThat(mergedResponse.hits.getTotalHits().value(), equalTo(0L)); + assertEquals(mergedResponse.hits.getTotalHits().relation(), Relation.EQUAL_TO); } int rank = 1; for (SearchHit hit : mergedResponse.hits().getHits()) { @@ -522,8 +522,8 @@ private static int getTotalQueryHits(AtomicArray results) { int resultCount = 0; for (SearchPhaseResult shardResult : results.asList()) { TopDocs topDocs = shardResult.queryResult().topDocs().topDocs; - assert topDocs.totalHits.relation == Relation.EQUAL_TO; - resultCount += (int) topDocs.totalHits.value; + assert topDocs.totalHits.relation() == Relation.EQUAL_TO; + resultCount += (int) topDocs.totalHits.value(); } return resultCount; } @@ -784,7 +784,7 @@ public void testConsumerConcurrently() throws Exception { assertEquals(max.get(), internalMax.value(), 0.0D); assertEquals(1, reduce.sortedTopDocs().scoreDocs().length); assertEquals(max.get(), reduce.maxScore(), 0.0f); - assertEquals(expectedNumResults, reduce.totalHits().value); + assertEquals(expectedNumResults, reduce.totalHits().value()); assertEquals(max.get(), reduce.sortedTopDocs().scoreDocs()[0].score, 0.0f); assertFalse(reduce.sortedTopDocs().isSortedByField()); assertNull(reduce.sortedTopDocs().sortFields()); @@ -844,7 +844,7 @@ public void testConsumerOnlyAggs() throws Exception { assertEquals(max.get(), internalMax.value(), 0.0D); assertEquals(0, reduce.sortedTopDocs().scoreDocs().length); assertEquals(max.get(), reduce.maxScore(), 0.0f); - assertEquals(expectedNumResults, reduce.totalHits().value); + assertEquals(expectedNumResults, reduce.totalHits().value()); assertFalse(reduce.sortedTopDocs().isSortedByField()); assertNull(reduce.sortedTopDocs().sortFields()); assertNull(reduce.sortedTopDocs().collapseField()); @@ -902,7 +902,7 @@ public void testConsumerOnlyHits() throws Exception { assertAggReduction(request); assertEquals(1, reduce.sortedTopDocs().scoreDocs().length); assertEquals(max.get(), reduce.maxScore(), 0.0f); - assertEquals(expectedNumResults, reduce.totalHits().value); + assertEquals(expectedNumResults, reduce.totalHits().value()); assertEquals(max.get(), reduce.sortedTopDocs().scoreDocs()[0].score, 0.0f); assertFalse(reduce.sortedTopDocs().isSortedByField()); assertNull(reduce.sortedTopDocs().sortFields()); @@ -969,7 +969,7 @@ public void testReduceTopNWithFromOffset() throws Exception { ScoreDoc[] scoreDocs = reduce.sortedTopDocs().scoreDocs(); assertEquals(5, scoreDocs.length); assertEquals(100.f, reduce.maxScore(), 0.0f); - assertEquals(12, reduce.totalHits().value); + assertEquals(12, reduce.totalHits().value()); assertEquals(95.0f, scoreDocs[0].score, 0.0f); assertEquals(94.0f, scoreDocs[1].score, 0.0f); assertEquals(93.0f, scoreDocs[2].score, 0.0f); @@ -1022,7 +1022,7 @@ public void testConsumerSortByField() throws Exception { SearchPhaseController.ReducedQueryPhase reduce = consumer.reduce(); assertAggReduction(request); assertEquals(Math.min(expectedNumResults, size), reduce.sortedTopDocs().scoreDocs().length); - assertEquals(expectedNumResults, reduce.totalHits().value); + assertEquals(expectedNumResults, reduce.totalHits().value()); assertEquals(max.get(), ((FieldDoc) reduce.sortedTopDocs().scoreDocs()[0]).fields[0]); assertTrue(reduce.sortedTopDocs().isSortedByField()); assertEquals(1, reduce.sortedTopDocs().sortFields().length); @@ -1079,7 +1079,7 @@ public void testConsumerFieldCollapsing() throws Exception { SearchPhaseController.ReducedQueryPhase reduce = consumer.reduce(); assertAggReduction(request); assertEquals(3, reduce.sortedTopDocs().scoreDocs().length); - assertEquals(expectedNumResults, reduce.totalHits().value); + assertEquals(expectedNumResults, reduce.totalHits().value()); assertEquals(a, ((FieldDoc) reduce.sortedTopDocs().scoreDocs()[0]).fields[0]); assertEquals(b, ((FieldDoc) reduce.sortedTopDocs().scoreDocs()[1]).fields[0]); assertEquals(c, ((FieldDoc) reduce.sortedTopDocs().scoreDocs()[2]).fields[0]); @@ -1199,7 +1199,7 @@ public void testConsumerSuggestions() throws Exception { assertEquals(maxScoreCompletion, reduce.sortedTopDocs().scoreDocs()[0].score, 0f); assertEquals(0, reduce.sortedTopDocs().scoreDocs()[0].doc); assertNotEquals(-1, reduce.sortedTopDocs().scoreDocs()[0].shardIndex); - assertEquals(0, reduce.totalHits().value); + assertEquals(0, reduce.totalHits().value()); assertFalse(reduce.sortedTopDocs().isSortedByField()); assertNull(reduce.sortedTopDocs().sortFields()); assertNull(reduce.sortedTopDocs().collapseField()); @@ -1290,7 +1290,7 @@ public void onFinalReduce(List shards, TotalHits totalHits, Interna assertEquals(max.get(), internalMax.value(), 0.0D); assertEquals(1, reduce.sortedTopDocs().scoreDocs().length); assertEquals(max.get(), reduce.maxScore(), 0.0f); - assertEquals(expectedNumResults, reduce.totalHits().value); + assertEquals(expectedNumResults, reduce.totalHits().value()); assertEquals(max.get(), reduce.sortedTopDocs().scoreDocs()[0].score, 0.0f); assertFalse(reduce.sortedTopDocs().isSortedByField()); assertNull(reduce.sortedTopDocs().sortFields()); diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java index d279fa5030a8c..e4284937474c7 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java @@ -230,11 +230,11 @@ public void run() { SearchPhaseController.ReducedQueryPhase phase = action.results.reduce(); assertThat(phase.numReducePhases(), greaterThanOrEqualTo(1)); if (withScroll) { - assertThat(phase.totalHits().value, equalTo((long) numShards)); - assertThat(phase.totalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(phase.totalHits().value(), equalTo((long) numShards)); + assertThat(phase.totalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); } else { - assertThat(phase.totalHits().value, equalTo(2L)); - assertThat(phase.totalHits().relation, equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); + assertThat(phase.totalHits().value(), equalTo(2L)); + assertThat(phase.totalHits().relation(), equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); } assertThat(phase.sortedTopDocs().scoreDocs().length, equalTo(1)); assertThat(phase.sortedTopDocs().scoreDocs()[0], instanceOf(FieldDoc.class)); diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java index 2b0ed0552e594..51796f404c283 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java @@ -762,11 +762,11 @@ public void testMergeSearchHits() throws InterruptedException { TotalHits totalHits = null; if (trackTotalHitsUpTo != SearchContext.TRACK_TOTAL_HITS_DISABLED) { totalHits = new TotalHits(randomLongBetween(0, 1000), totalHitsRelation); - long previousValue = expectedTotalHits == null ? 0 : expectedTotalHits.value; - expectedTotalHits = new TotalHits(Math.min(previousValue + totalHits.value, trackTotalHitsUpTo), totalHitsRelation); + long previousValue = expectedTotalHits == null ? 0 : expectedTotalHits.value(); + expectedTotalHits = new TotalHits(Math.min(previousValue + totalHits.value(), trackTotalHitsUpTo), totalHitsRelation); } - final int numDocs = totalHits == null || totalHits.value >= requestedSize ? requestedSize : (int) totalHits.value; + final int numDocs = totalHits == null || totalHits.value() >= requestedSize ? requestedSize : (int) totalHits.value(); int scoreFactor = randomIntBetween(1, numResponses); float maxScore = scoreSort ? numDocs * scoreFactor : Float.NaN; SearchHit[] hits = randomSearchHitArray( @@ -862,8 +862,8 @@ public void testMergeSearchHits() throws InterruptedException { assertNull(searchHits.getTotalHits()); } else { assertNotNull(searchHits.getTotalHits()); - assertEquals(expectedTotalHits.value, searchHits.getTotalHits().value); - assertSame(expectedTotalHits.relation, searchHits.getTotalHits().relation); + assertEquals(expectedTotalHits.value(), searchHits.getTotalHits().value()); + assertSame(expectedTotalHits.relation(), searchHits.getTotalHits().relation()); } if (expectedMaxScore == Float.NEGATIVE_INFINITY) { assertTrue(Float.isNaN(searchHits.getMaxScore())); @@ -910,9 +910,9 @@ public void testMergeNoResponsesAdded() { assertEquals(0, response.getNumReducePhases()); assertFalse(response.isTimedOut()); assertNotNull(response.getHits().getTotalHits()); - assertEquals(0, response.getHits().getTotalHits().value); + assertEquals(0, response.getHits().getTotalHits().value()); assertEquals(0, response.getHits().getHits().length); - assertEquals(TotalHits.Relation.EQUAL_TO, response.getHits().getTotalHits().relation); + assertEquals(TotalHits.Relation.EQUAL_TO, response.getHits().getTotalHits().relation()); assertNull(response.getScrollId()); assertSame(InternalAggregations.EMPTY, response.getAggregations()); assertNull(response.getSuggest()); @@ -1004,7 +1004,7 @@ public void testMergeEmptySearchHitsWithNonEmpty() { assertEquals(2, merger.numResponses()); SearchResponse mergedResponse = merger.getMergedResponse(clusters); try { - assertEquals(10, mergedResponse.getHits().getTotalHits().value); + assertEquals(10, mergedResponse.getHits().getTotalHits().value()); assertEquals(10, mergedResponse.getHits().getHits().length); assertEquals(2, mergedResponse.getTotalShards()); assertEquals(2, mergedResponse.getSuccessfulShards()); @@ -1032,8 +1032,8 @@ public void testMergeOnlyEmptyHits() { TotalHits totalHits = null; if (trackTotalHitsUpTo != SearchContext.TRACK_TOTAL_HITS_DISABLED) { totalHits = new TotalHits(randomLongBetween(0, 1000), totalHitsRelation); - long previousValue = expectedTotalHits == null ? 0 : expectedTotalHits.value; - expectedTotalHits = new TotalHits(Math.min(previousValue + totalHits.value, trackTotalHitsUpTo), totalHitsRelation); + long previousValue = expectedTotalHits == null ? 0 : expectedTotalHits.value(); + expectedTotalHits = new TotalHits(Math.min(previousValue + totalHits.value(), trackTotalHitsUpTo), totalHitsRelation); } SearchResponse searchResponse = new SearchResponse( SearchHits.empty(totalHits, Float.NaN), @@ -1232,7 +1232,7 @@ public void testPartialAggsMixedWithFullResponses() { SearchResponse mergedResponse = searchResponseMerger.getMergedResponse(clusters); try { SearchHits hits = mergedResponse.getHits(); - assertThat(hits.getTotalHits().value, equalTo(2L)); // should be 2 hits from remote1 + assertThat(hits.getTotalHits().value(), equalTo(2L)); // should be 2 hits from remote1 SearchHit hit1 = hits.getHits()[0]; String expectedHit1 = """ { @@ -1273,7 +1273,7 @@ public void testPartialAggsMixedWithFullResponses() { mergedResponse = searchResponseMerger.getMergedResponse(clusters); try { SearchHits hits = mergedResponse.getHits(); - assertThat(hits.getTotalHits().value, equalTo(4L)); // should be 2 hits from remote1, 2 from remote2 + assertThat(hits.getTotalHits().value(), equalTo(4L)); // should be 2 hits from remote1, 2 from remote2 SearchHit hit1 = hits.getHits()[0]; String expectedHit1 = """ @@ -1414,7 +1414,7 @@ public void testPartialAggsMixedWithFullResponses() { mergedResponse = searchResponseMerger.getMergedResponse(clusters); try { SearchHits hits = mergedResponse.getHits(); - assertThat(hits.getTotalHits().value, equalTo(4L)); // should be 2 hits from remote1, 2 from remote2 + assertThat(hits.getTotalHits().value(), equalTo(4L)); // should be 2 hits from remote1, 2 from remote2 SearchHit hit1 = hits.getHits()[0]; String expectedHit1 = """ @@ -1483,7 +1483,7 @@ public void testPartialAggsMixedWithFullResponses() { private SearchHits createSimpleDeterministicSearchHits(String clusterAlias, Index[] indices) { TotalHits totalHits = new TotalHits(2, TotalHits.Relation.EQUAL_TO); - final int numDocs = (int) totalHits.value; + final int numDocs = (int) totalHits.value(); int scoreFactor = 1; float maxScore = numDocs; int numFields = 1; diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java index afc4c6e9eccbf..bbeae6b19b8ac 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java @@ -621,8 +621,8 @@ public void testSerialization() throws IOException { if (searchResponse.getHits().getTotalHits() == null) { assertNull(deserialized.getHits().getTotalHits()); } else { - assertEquals(searchResponse.getHits().getTotalHits().value, deserialized.getHits().getTotalHits().value); - assertEquals(searchResponse.getHits().getTotalHits().relation, deserialized.getHits().getTotalHits().relation); + assertEquals(searchResponse.getHits().getTotalHits().value(), deserialized.getHits().getTotalHits().value()); + assertEquals(searchResponse.getHits().getTotalHits().relation(), deserialized.getHits().getTotalHits().relation()); } assertEquals(searchResponse.getHits().getHits().length, deserialized.getHits().getHits().length); assertEquals(searchResponse.getNumReducePhases(), deserialized.getNumReducePhases()); diff --git a/server/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java b/server/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java index c00fece686524..1b86e5b00000c 100644 --- a/server/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java +++ b/server/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java @@ -400,6 +400,6 @@ protected Fields getTermVectorsFromLucene(DirectoryReader directoryReader, TestD ScoreDoc[] scoreDocs = search.scoreDocs; assertEquals(1, scoreDocs.length); - return directoryReader.getTermVectors(scoreDocs[0].doc); + return directoryReader.termVectors().get(scoreDocs[0].doc); } } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java index b8804e9160a75..05382de49087d 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java @@ -9,6 +9,8 @@ package org.elasticsearch.cluster.metadata; +import org.apache.lucene.util.automaton.Automaton; +import org.apache.lucene.util.automaton.RegExp; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ResourceAlreadyExistsException; import org.elasticsearch.TransportVersion; @@ -602,6 +604,8 @@ public void testCalculateNumRoutingShards() { public void testValidateDotIndex() { List systemIndexDescriptors = new ArrayList<>(); systemIndexDescriptors.add(SystemIndexDescriptorUtils.createUnmanaged(".test-one*", "test")); + Automaton patternAutomaton = new RegExp("\\.test-~(one.*)", RegExp.ALL | RegExp.DEPRECATED_COMPLEMENT).toAutomaton(); + systemIndexDescriptors.add(SystemIndexDescriptorUtils.createUnmanaged(".test-~(one*)", "test")); systemIndexDescriptors.add(SystemIndexDescriptorUtils.createUnmanaged(".pattern-test*", "test-1")); diff --git a/server/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java b/server/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java index 8158917f08187..9300aa992b687 100644 --- a/server/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java +++ b/server/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java @@ -25,6 +25,7 @@ import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.SegmentInfos; import org.apache.lucene.index.SoftDeletesRetentionMergePolicy; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.IndexOrDocValuesQuery; @@ -44,7 +45,6 @@ import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.Weight; import org.apache.lucene.store.Directory; -import org.apache.lucene.store.MMapDirectory; import org.apache.lucene.tests.analysis.MockAnalyzer; import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.tests.store.MockDirectoryWrapper; @@ -172,10 +172,10 @@ public void testPruneUnreferencedFiles() throws IOException { assertEquals(3, open.maxDoc()); IndexSearcher s = newSearcher(open); - assertEquals(s.search(new TermQuery(new Term("id", "1")), 1).totalHits.value, 1); - assertEquals(s.search(new TermQuery(new Term("id", "2")), 1).totalHits.value, 1); - assertEquals(s.search(new TermQuery(new Term("id", "3")), 1).totalHits.value, 1); - assertEquals(s.search(new TermQuery(new Term("id", "4")), 1).totalHits.value, 0); + assertEquals(s.search(new TermQuery(new Term("id", "1")), 1).totalHits.value(), 1); + assertEquals(s.search(new TermQuery(new Term("id", "2")), 1).totalHits.value(), 1); + assertEquals(s.search(new TermQuery(new Term("id", "3")), 1).totalHits.value(), 1); + assertEquals(s.search(new TermQuery(new Term("id", "4")), 1).totalHits.value(), 0); for (String file : dir.listAll()) { assertFalse("unexpected file: " + file, file.equals("segments_3") || file.startsWith("_2")); @@ -403,11 +403,6 @@ public Explanation explain(LeafReaderContext context, int doc) throws IOExceptio throw new UnsupportedOperationException(); } - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - throw new UnsupportedOperationException(); - } - @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { return new ScorerSupplier() { @@ -464,18 +459,6 @@ public void testAsSequentialBitsUsesRandomAccess() throws IOException { } } - /** - * Test that the "unmap hack" is detected as supported by lucene. - * This works around the following bug: https://bugs.openjdk.java.net/browse/JDK-4724038 - *

      - * While not guaranteed, current status is "Critical Internal API": http://openjdk.java.net/jeps/260 - * Additionally this checks we did not screw up the security logic around the hack. - */ - public void testMMapHackSupported() throws Exception { - // add assume's here if needed for certain platforms, but we should know if it does not work. - assertTrue("MMapDirectory does not support unmapping: " + MMapDirectory.UNMAP_NOT_SUPPORTED_REASON, MMapDirectory.UNMAP_SUPPORTED); - } - public void testWrapAllDocsLive() throws Exception { Directory dir = newDirectory(); IndexWriterConfig config = newIndexWriterConfig().setSoftDeletesField(Lucene.SOFT_DELETES_FIELD) @@ -508,8 +491,9 @@ public void testWrapAllDocsLive() throws Exception { IndexSearcher searcher = newSearcher(reader); Set actualDocs = new HashSet<>(); TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), Integer.MAX_VALUE); + StoredFields storedFields = reader.storedFields(); for (ScoreDoc scoreDoc : topDocs.scoreDocs) { - actualDocs.add(reader.document(scoreDoc.doc).get("id")); + actualDocs.add(storedFields.document(scoreDoc.doc).get("id")); } assertThat(actualDocs, equalTo(liveDocs)); } @@ -554,8 +538,9 @@ public void testWrapLiveDocsNotExposeAbortedDocuments() throws Exception { IndexSearcher searcher = newSearcher(reader); List actualDocs = new ArrayList<>(); TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), Integer.MAX_VALUE); + StoredFields storedFields = reader.storedFields(); for (ScoreDoc scoreDoc : topDocs.scoreDocs) { - actualDocs.add(reader.document(scoreDoc.doc).get("id")); + actualDocs.add(storedFields.document(scoreDoc.doc).get("id")); } assertThat(actualDocs, equalTo(liveDocs)); } diff --git a/server/src/test/java/org/elasticsearch/common/lucene/index/FreqTermsEnumTests.java b/server/src/test/java/org/elasticsearch/common/lucene/index/FreqTermsEnumTests.java index 8332ff87a9d57..918dcc1bcbd42 100644 --- a/server/src/test/java/org/elasticsearch/common/lucene/index/FreqTermsEnumTests.java +++ b/server/src/test/java/org/elasticsearch/common/lucene/index/FreqTermsEnumTests.java @@ -20,6 +20,7 @@ import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.NoMergePolicy; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermInSetQuery; @@ -127,8 +128,9 @@ public void setUp() throws Exception { // now go over each doc, build the relevant references and filter reader = DirectoryReader.open(iw); List filterTerms = new ArrayList<>(); + StoredFields storedFields = reader.storedFields(); for (int docId = 0; docId < reader.maxDoc(); docId++) { - Document doc = reader.document(docId); + Document doc = storedFields.document(docId); addFreqs(doc, referenceAll); if (deletedIds.contains(doc.getField("id").stringValue()) == false) { addFreqs(doc, referenceNotDeleted); diff --git a/server/src/test/java/org/elasticsearch/common/lucene/search/function/MinScoreScorerTests.java b/server/src/test/java/org/elasticsearch/common/lucene/search/function/MinScoreScorerTests.java index 7a8d43ebbfd18..55ca666d8588b 100644 --- a/server/src/test/java/org/elasticsearch/common/lucene/search/function/MinScoreScorerTests.java +++ b/server/src/test/java/org/elasticsearch/common/lucene/search/function/MinScoreScorerTests.java @@ -11,15 +11,11 @@ import com.carrotsearch.randomizedtesting.generators.RandomPicks; -import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.ConjunctionUtils; import org.apache.lucene.search.DocIdSetIterator; -import org.apache.lucene.search.Explanation; -import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.TwoPhaseIterator; -import org.apache.lucene.search.Weight; import org.apache.lucene.tests.search.AssertingScorer; import org.apache.lucene.tests.util.TestUtil; import org.elasticsearch.test.ESTestCase; @@ -66,27 +62,8 @@ public int advance(int target) throws IOException { }; } - private static Weight fakeWeight() { - return new Weight(new MatchAllDocsQuery()) { - @Override - public Explanation explain(LeafReaderContext context, int doc) throws IOException { - return null; - } - - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - return null; - } - - @Override - public boolean isCacheable(LeafReaderContext ctx) { - return false; - } - }; - } - private static Scorer hideTwoPhaseIterator(Scorer in) { - return new Scorer(in.getWeight()) { + return new Scorer() { @Override public DocIdSetIterator iterator() { return TwoPhaseIterator.asDocIdSetIterator(in.twoPhaseIterator()); @@ -111,7 +88,7 @@ public int docID() { private static Scorer scorer(int maxDoc, final int[] docs, final float[] scores, final boolean twoPhase) { final DocIdSetIterator iterator = twoPhase ? DocIdSetIterator.all(maxDoc) : iterator(docs); - final Scorer scorer = new Scorer(fakeWeight()) { + final Scorer scorer = new Scorer() { int lastScoredDoc = -1; final float matchCost = (random().nextBoolean() ? 1000 : 0) + random().nextInt(2000); @@ -192,7 +169,7 @@ public void doTestRandom(boolean twoPhase) throws IOException { } Scorer scorer = scorer(maxDoc, docs, scores, twoPhase); final float minScore = random().nextFloat(); - Scorer minScoreScorer = new MinScoreScorer(fakeWeight(), scorer, minScore); + Scorer minScoreScorer = new MinScoreScorer(scorer, minScore); int doc = -1; while (doc != DocIdSetIterator.NO_MORE_DOCS) { final int target; @@ -250,7 +227,7 @@ public void testConjunction() throws Exception { final float minScore; if (randomBoolean()) { minScore = randomFloat(); - MinScoreScorer minScoreScorer = new MinScoreScorer(scorer.getWeight(), scorer, minScore); + MinScoreScorer minScoreScorer = new MinScoreScorer(scorer, minScore); scorers.add(minScoreScorer); } else { scorers.add(scorer); diff --git a/server/src/test/java/org/elasticsearch/common/lucene/search/morelikethis/XMoreLikeThisTests.java b/server/src/test/java/org/elasticsearch/common/lucene/search/morelikethis/XMoreLikeThisTests.java index b1df24f4db2ad..3894efd0b7d4c 100644 --- a/server/src/test/java/org/elasticsearch/common/lucene/search/morelikethis/XMoreLikeThisTests.java +++ b/server/src/test/java/org/elasticsearch/common/lucene/search/morelikethis/XMoreLikeThisTests.java @@ -117,7 +117,7 @@ public boolean incrementToken() throws IOException { final double boost10 = ((BooleanQuery) mlt.like("text", new StringReader("lucene|10 release|1"))).clauses() .stream() - .map(BooleanClause::getQuery) + .map(BooleanClause::query) .map(BoostQuery.class::cast) .filter(x -> ((TermQuery) x.getQuery()).getTerm().text().equals("lucene")) .mapToDouble(BoostQuery::getBoost) @@ -125,7 +125,7 @@ public boolean incrementToken() throws IOException { final double boost1 = ((BooleanQuery) mlt.like("text", new StringReader("lucene|1 release|1"))).clauses() .stream() - .map(BooleanClause::getQuery) + .map(BooleanClause::query) .map(BoostQuery.class::cast) .filter(x -> ((TermQuery) x.getQuery()).getTerm().text().equals("lucene")) .mapToDouble(BoostQuery::getBoost) @@ -178,7 +178,7 @@ public void testTopN() throws Exception { expectedTerms[idx++] = new Term("text", text); } for (BooleanClause clause : clauses) { - Term term = ((TermQuery) clause.getQuery()).getTerm(); + Term term = ((TermQuery) clause.query()).getTerm(); assertTrue(Arrays.asList(expectedTerms).contains(term)); } diff --git a/server/src/test/java/org/elasticsearch/deps/lucene/SimpleLuceneTests.java b/server/src/test/java/org/elasticsearch/deps/lucene/SimpleLuceneTests.java index 6e8eb47035d43..e0e05c84b5649 100644 --- a/server/src/test/java/org/elasticsearch/deps/lucene/SimpleLuceneTests.java +++ b/server/src/test/java/org/elasticsearch/deps/lucene/SimpleLuceneTests.java @@ -84,12 +84,12 @@ public void testSimpleNumericOps() throws Exception { try (IndexReader reader = DirectoryReader.open(indexWriter)) { IndexSearcher searcher = newSearcher(reader); TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1); - Document doc = searcher.doc(topDocs.scoreDocs[0].doc); + Document doc = searcher.storedFields().document(topDocs.scoreDocs[0].doc); IndexableField f = doc.getField("test"); assertThat(f.numericValue(), equalTo(2)); topDocs = searcher.search(IntPoint.newExactQuery("test", 2), 1); - doc = searcher.doc(topDocs.scoreDocs[0].doc); + doc = searcher.storedFields().document(topDocs.scoreDocs[0].doc); f = doc.getField("test"); assertThat(f.stringValue(), equalTo("2")); } @@ -115,7 +115,7 @@ public void testOrdering() throws Exception { IndexSearcher searcher = newSearcher(reader); TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1); final ArrayList fieldsOrder = new ArrayList<>(); - searcher.doc(topDocs.scoreDocs[0].doc, new StoredFieldVisitor() { + searcher.storedFields().document(topDocs.scoreDocs[0].doc, new StoredFieldVisitor() { @Override public Status needsField(FieldInfo fieldInfo) throws IOException { fieldsOrder.add(fieldInfo.name); diff --git a/server/src/test/java/org/elasticsearch/deps/lucene/VectorHighlighterTests.java b/server/src/test/java/org/elasticsearch/deps/lucene/VectorHighlighterTests.java index 97a6faaa5c6f6..01c4ac3c6fd6a 100644 --- a/server/src/test/java/org/elasticsearch/deps/lucene/VectorHighlighterTests.java +++ b/server/src/test/java/org/elasticsearch/deps/lucene/VectorHighlighterTests.java @@ -54,7 +54,7 @@ public void testVectorHighlighter() throws Exception { IndexSearcher searcher = newSearcher(reader); TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1); - assertThat(topDocs.totalHits.value, equalTo(1L)); + assertThat(topDocs.totalHits.value(), equalTo(1L)); FastVectorHighlighter highlighter = new FastVectorHighlighter(); String fragment = highlighter.getBestFragment( @@ -87,7 +87,7 @@ public void testVectorHighlighterPrefixQuery() throws Exception { IndexReader reader = searcher.getIndexReader(); TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1); - assertThat(topDocs.totalHits.value, equalTo(1L)); + assertThat(topDocs.totalHits.value(), equalTo(1L)); FastVectorHighlighter highlighter = new FastVectorHighlighter(); @@ -143,7 +143,7 @@ public void testVectorHighlighterNoStore() throws Exception { IndexSearcher searcher = newSearcher(reader); TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1); - assertThat(topDocs.totalHits.value, equalTo(1L)); + assertThat(topDocs.totalHits.value(), equalTo(1L)); FastVectorHighlighter highlighter = new FastVectorHighlighter(); String fragment = highlighter.getBestFragment( @@ -170,7 +170,7 @@ public void testVectorHighlighterNoTermVector() throws Exception { IndexSearcher searcher = newSearcher(reader); TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1); - assertThat(topDocs.totalHits.value, equalTo(1L)); + assertThat(topDocs.totalHits.value(), equalTo(1L)); FastVectorHighlighter highlighter = new FastVectorHighlighter(); String fragment = highlighter.getBestFragment( diff --git a/server/src/test/java/org/elasticsearch/gateway/MetadataStateFormatTests.java b/server/src/test/java/org/elasticsearch/gateway/MetadataStateFormatTests.java index 022a6994496ae..9419c63f9c48a 100644 --- a/server/src/test/java/org/elasticsearch/gateway/MetadataStateFormatTests.java +++ b/server/src/test/java/org/elasticsearch/gateway/MetadataStateFormatTests.java @@ -227,7 +227,7 @@ public static void corruptFile(Path fileToCorrupt, Logger logger) throws IOExcep } long checksumAfterCorruption; long actualChecksumAfterCorruption; - try (ChecksumIndexInput input = dir.openChecksumInput(fileToCorrupt.getFileName().toString(), IOContext.DEFAULT)) { + try (ChecksumIndexInput input = dir.openChecksumInput(fileToCorrupt.getFileName().toString())) { assertThat(input.getFilePointer(), is(0L)); input.seek(input.length() - 8); // one long is the checksum... 8 bytes checksumAfterCorruption = input.getChecksum(); diff --git a/server/src/test/java/org/elasticsearch/gateway/PersistedClusterStateServiceTests.java b/server/src/test/java/org/elasticsearch/gateway/PersistedClusterStateServiceTests.java index db656b1fc5a94..450d123f551c8 100644 --- a/server/src/test/java/org/elasticsearch/gateway/PersistedClusterStateServiceTests.java +++ b/server/src/test/java/org/elasticsearch/gateway/PersistedClusterStateServiceTests.java @@ -20,6 +20,7 @@ import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; @@ -1776,9 +1777,10 @@ private static void forEachDocument(DirectoryReader reader, Set types, C final Bits liveDocs = leafReaderContext.reader().getLiveDocs(); final IntPredicate isLiveDoc = liveDocs == null ? i -> true : liveDocs::get; final DocIdSetIterator docIdSetIterator = scorer.iterator(); + StoredFields storedFields = leafReaderContext.reader().storedFields(); while (docIdSetIterator.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) { if (isLiveDoc.test(docIdSetIterator.docID())) { - final Document document = leafReaderContext.reader().document(docIdSetIterator.docID()); + final Document document = storedFields.document(docIdSetIterator.docID()); document.add(new StringField(TYPE_FIELD_NAME, typeName, Field.Store.NO)); consumer.accept(document); } diff --git a/server/src/test/java/org/elasticsearch/index/IndexServiceTests.java b/server/src/test/java/org/elasticsearch/index/IndexServiceTests.java index 7b264ac93511b..532a2ff024e8f 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexServiceTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexServiceTests.java @@ -290,7 +290,7 @@ public void testRefreshActuallyWorks() throws Exception { // we are running on updateMetadata if the interval changes try (Engine.Searcher searcher = shard.acquireSearcher("test")) { TopDocs search = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(1, search.totalHits.value); + assertEquals(1, search.totalHits.value()); } }); assertFalse(refreshTask.isClosed()); @@ -304,7 +304,7 @@ public void testRefreshActuallyWorks() throws Exception { // this one becomes visible due to the force refresh we are running on updateMetadata if the interval changes try (Engine.Searcher searcher = shard.acquireSearcher("test")) { TopDocs search = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(2, search.totalHits.value); + assertEquals(2, search.totalHits.value()); } }); prepareIndex("test").setId("2").setSource("{\"foo\": \"bar\"}", XContentType.JSON).get(); @@ -312,7 +312,7 @@ public void testRefreshActuallyWorks() throws Exception { // this one becomes visible due to the scheduled refresh try (Engine.Searcher searcher = shard.acquireSearcher("test")) { TopDocs search = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(3, search.totalHits.value); + assertEquals(3, search.totalHits.value()); } }); } diff --git a/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java b/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java index 10b0b54d2d7e2..9e4a19eb039fd 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java @@ -52,7 +52,7 @@ public void testResolveDefaultCodecs() throws Exception { assumeTrue("Only when zstd_stored_fields feature flag is enabled", CodecService.ZSTD_STORED_FIELDS_FEATURE_FLAG.isEnabled()); CodecService codecService = createCodecService(); assertThat(codecService.codec("default"), instanceOf(PerFieldMapperCodec.class)); - assertThat(codecService.codec("default"), instanceOf(Elasticsearch816Codec.class)); + assertThat(codecService.codec("default"), instanceOf(Elasticsearch900Codec.class)); } public void testDefault() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormatTests.java index d43a1e09d71a3..12a17f5c263a8 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormatTests.java @@ -115,7 +115,6 @@ public void testSortedSetDocValuesSingleUniqueValue() throws IOException { assertEquals(0, field.nextOrd()); BytesRef scratch = field.lookupOrd(0); assertEquals("value", scratch.utf8ToString()); - assertEquals(SortedSetDocValues.NO_MORE_ORDS, field.nextOrd()); } assertEquals(DocIdSetIterator.NO_MORE_DOCS, field.nextDoc()); for (int i = 0; i < NUM_DOCS; i++) { @@ -126,7 +125,6 @@ public void testSortedSetDocValuesSingleUniqueValue() throws IOException { BytesRef scratch = fieldN.lookupOrd(0); assertEquals("value" + i, scratch.utf8ToString()); assertEquals(DocIdSetIterator.NO_MORE_DOCS, fieldN.nextDoc()); - assertEquals(SortedSetDocValues.NO_MORE_ORDS, fieldN.nextOrd()); } } } diff --git a/server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormatVariableSkipIntervalTests.java b/server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormatVariableSkipIntervalTests.java new file mode 100644 index 0000000000000..099b59808ef4a --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormatVariableSkipIntervalTests.java @@ -0,0 +1,196 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ +package org.elasticsearch.index.codec.tsdb; + +import org.apache.lucene.codecs.Codec; +import org.apache.lucene.document.Document; +import org.apache.lucene.document.NumericDocValuesField; +import org.apache.lucene.document.SortedNumericDocValuesField; +import org.apache.lucene.index.DocValuesSkipper; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.Sort; +import org.apache.lucene.search.SortField; +import org.apache.lucene.store.Directory; +import org.apache.lucene.tests.index.BaseDocValuesFormatTestCase; +import org.apache.lucene.tests.index.RandomIndexWriter; +import org.apache.lucene.tests.util.TestUtil; + +import java.io.IOException; +import java.util.Arrays; + +/** Tests ES87TSDBDocValuesFormat with custom skipper interval size. */ +public class ES87TSDBDocValuesFormatVariableSkipIntervalTests extends BaseDocValuesFormatTestCase { + + @Override + protected Codec getCodec() { + // small interval size to test with many intervals + return TestUtil.alwaysDocValuesFormat(new ES87TSDBDocValuesFormat(random().nextInt(4, 16))); + } + + public void testSkipIndexIntervalSize() { + IllegalArgumentException ex = expectThrows( + IllegalArgumentException.class, + () -> new ES87TSDBDocValuesFormat(random().nextInt(Integer.MIN_VALUE, 2)) + ); + assertTrue(ex.getMessage().contains("skipIndexIntervalSize must be > 1")); + } + + public void testSkipperAllEqualValue() throws IOException { + final IndexWriterConfig config = new IndexWriterConfig().setCodec(getCodec()); + try (Directory directory = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), directory, config)) { + final int numDocs = atLeast(100); + for (int i = 0; i < numDocs; i++) { + final Document doc = new Document(); + doc.add(NumericDocValuesField.indexedField("dv", 0L)); + writer.addDocument(doc); + } + writer.forceMerge(1); + try (IndexReader reader = writer.getReader()) { + assertEquals(1, reader.leaves().size()); + final DocValuesSkipper skipper = reader.leaves().get(0).reader().getDocValuesSkipper("dv"); + assertNotNull(skipper); + skipper.advance(0); + assertEquals(0L, skipper.minValue(0)); + assertEquals(0L, skipper.maxValue(0)); + assertEquals(numDocs, skipper.docCount(0)); + skipper.advance(skipper.maxDocID(0) + 1); + assertEquals(DocIdSetIterator.NO_MORE_DOCS, skipper.minDocID(0)); + } + } + } + + // break on different value + public void testSkipperFewValuesSorted() throws IOException { + final IndexWriterConfig config = new IndexWriterConfig().setCodec(getCodec()); + boolean reverse = random().nextBoolean(); + config.setIndexSort(new Sort(new SortField("dv", SortField.Type.LONG, reverse))); + try (Directory directory = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), directory, config)) { + final int intervals = random().nextInt(2, 10); + final int[] numDocs = new int[intervals]; + for (int i = 0; i < intervals; i++) { + numDocs[i] = random().nextInt(10) + 16; + for (int j = 0; j < numDocs[i]; j++) { + final Document doc = new Document(); + doc.add(NumericDocValuesField.indexedField("dv", i)); + writer.addDocument(doc); + } + } + writer.forceMerge(1); + try (IndexReader reader = writer.getReader()) { + assertEquals(1, reader.leaves().size()); + final DocValuesSkipper skipper = reader.leaves().get(0).reader().getDocValuesSkipper("dv"); + assertNotNull(skipper); + assertEquals(Arrays.stream(numDocs).sum(), skipper.docCount()); + skipper.advance(0); + if (reverse) { + for (int i = intervals - 1; i >= 0; i--) { + assertEquals(i, skipper.minValue(0)); + assertEquals(i, skipper.maxValue(0)); + assertEquals(numDocs[i], skipper.docCount(0)); + skipper.advance(skipper.maxDocID(0) + 1); + } + } else { + for (int i = 0; i < intervals; i++) { + assertEquals(i, skipper.minValue(0)); + assertEquals(i, skipper.maxValue(0)); + assertEquals(numDocs[i], skipper.docCount(0)); + skipper.advance(skipper.maxDocID(0) + 1); + } + } + assertEquals(DocIdSetIterator.NO_MORE_DOCS, skipper.minDocID(0)); + } + } + } + + // break on empty doc values + public void testSkipperAllEqualValueWithGaps() throws IOException { + final IndexWriterConfig config = new IndexWriterConfig().setCodec(getCodec()); + config.setIndexSort(new Sort(new SortField("sort", SortField.Type.LONG, false))); + try (Directory directory = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), directory, config)) { + final int gaps = random().nextInt(2, 10); + final int[] numDocs = new int[gaps]; + long totaldocs = 0; + for (int i = 0; i < gaps; i++) { + numDocs[i] = random().nextInt(10) + 16; + for (int j = 0; j < numDocs[i]; j++) { + final Document doc = new Document(); + doc.add(new NumericDocValuesField("sort", totaldocs++)); + doc.add(SortedNumericDocValuesField.indexedField("dv", 0L)); + writer.addDocument(doc); + } + // add doc with empty "dv" + final Document doc = new Document(); + doc.add(new NumericDocValuesField("sort", totaldocs++)); + writer.addDocument(doc); + } + writer.forceMerge(1); + try (IndexReader reader = writer.getReader()) { + assertEquals(1, reader.leaves().size()); + final DocValuesSkipper skipper = reader.leaves().get(0).reader().getDocValuesSkipper("dv"); + assertNotNull(skipper); + assertEquals(Arrays.stream(numDocs).sum(), skipper.docCount()); + skipper.advance(0); + for (int i = 0; i < gaps; i++) { + assertEquals(0L, skipper.minValue(0)); + assertEquals(0L, skipper.maxValue(0)); + assertEquals(numDocs[i], skipper.docCount(0)); + skipper.advance(skipper.maxDocID(0) + 1); + } + assertEquals(DocIdSetIterator.NO_MORE_DOCS, skipper.minDocID(0)); + } + } + } + + // break on multi-values + public void testSkipperAllEqualValueWithMultiValues() throws IOException { + final IndexWriterConfig config = new IndexWriterConfig().setCodec(getCodec()); + config.setIndexSort(new Sort(new SortField("sort", SortField.Type.LONG, false))); + try (Directory directory = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), directory, config)) { + final int gaps = random().nextInt(2, 10); + final int[] numDocs = new int[gaps]; + long totaldocs = 0; + for (int i = 0; i < gaps; i++) { + int docs = random().nextInt(10) + 16; + numDocs[i] += docs; + for (int j = 0; j < docs; j++) { + final Document doc = new Document(); + doc.add(new NumericDocValuesField("sort", totaldocs++)); + doc.add(SortedNumericDocValuesField.indexedField("dv", 0L)); + writer.addDocument(doc); + } + if (i != gaps - 1) { + // add doc with mutivalues + final Document doc = new Document(); + doc.add(new NumericDocValuesField("sort", totaldocs++)); + doc.add(SortedNumericDocValuesField.indexedField("dv", 0L)); + doc.add(SortedNumericDocValuesField.indexedField("dv", 0L)); + writer.addDocument(doc); + numDocs[i + 1] = 1; + } + } + writer.forceMerge(1); + try (IndexReader reader = writer.getReader()) { + assertEquals(1, reader.leaves().size()); + final DocValuesSkipper skipper = reader.leaves().get(0).reader().getDocValuesSkipper("dv"); + assertNotNull(skipper); + assertEquals(Arrays.stream(numDocs).sum(), skipper.docCount()); + skipper.advance(0); + for (int i = 0; i < gaps; i++) { + assertEquals(0L, skipper.minValue(0)); + assertEquals(0L, skipper.maxValue(0)); + assertEquals(numDocs[i], skipper.docCount(0)); + skipper.advance(skipper.maxDocID(0) + 1); + } + assertEquals(DocIdSetIterator.NO_MORE_DOCS, skipper.minDocID(0)); + } + } + } +} diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/BaseKnnBitVectorsFormatTestCase.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/BaseKnnBitVectorsFormatTestCase.java index 8f0a306e1eb3b..86b60d9984de5 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/BaseKnnBitVectorsFormatTestCase.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/BaseKnnBitVectorsFormatTestCase.java @@ -19,6 +19,7 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; @@ -110,8 +111,9 @@ public void testRandom() throws Exception { totalSize += vectorValues.size(); StoredFields storedFields = ctx.reader().storedFields(); int docId; - while ((docId = vectorValues.nextDoc()) != NO_MORE_DOCS) { - byte[] v = vectorValues.vectorValue(); + KnnVectorValues.DocIndexIterator iterator = vectorValues.iterator(); + while ((docId = iterator.nextDoc()) != NO_MORE_DOCS) { + byte[] v = vectorValues.vectorValue(iterator.index()); assertEquals(dimension, v.length); String idString = storedFields.document(docId).getField("id").stringValue(); int id = Integer.parseInt(idString); diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormatTests.java index aa50bc26c4443..57cca6eea86ec 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormatTests.java @@ -11,7 +11,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.KnnVectorsFormat; -import org.apache.lucene.codecs.lucene912.Lucene912Codec; +import org.apache.lucene.codecs.lucene100.Lucene100Codec; import org.apache.lucene.tests.index.BaseKnnVectorsFormatTestCase; import org.elasticsearch.common.logging.LogConfigurator; @@ -24,7 +24,7 @@ public class ES813FlatVectorFormatTests extends BaseKnnVectorsFormatTestCase { @Override protected Codec getCodec() { - return new Lucene912Codec() { + return new Lucene100Codec() { @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { return new ES813FlatVectorFormat(); diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormatTests.java index 8cb927036588a..9069b094ee483 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormatTests.java @@ -11,7 +11,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.KnnVectorsFormat; -import org.apache.lucene.codecs.lucene912.Lucene912Codec; +import org.apache.lucene.codecs.lucene100.Lucene100Codec; import org.apache.lucene.tests.index.BaseKnnVectorsFormatTestCase; import org.elasticsearch.common.logging.LogConfigurator; @@ -24,7 +24,7 @@ public class ES813Int8FlatVectorFormatTests extends BaseKnnVectorsFormatTestCase @Override protected Codec getCodec() { - return new Lucene912Codec() { + return new Lucene100Codec() { @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { return new ES813Int8FlatVectorFormat(); diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES814HnswScalarQuantizedVectorsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES814HnswScalarQuantizedVectorsFormatTests.java index cee60efb57327..549a14ca6c31b 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES814HnswScalarQuantizedVectorsFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES814HnswScalarQuantizedVectorsFormatTests.java @@ -11,7 +11,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.KnnVectorsFormat; -import org.apache.lucene.codecs.lucene912.Lucene912Codec; +import org.apache.lucene.codecs.lucene100.Lucene100Codec; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.KnnFloatVectorField; @@ -19,6 +19,7 @@ import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.VectorSimilarityFunction; @@ -41,7 +42,7 @@ public class ES814HnswScalarQuantizedVectorsFormatTests extends BaseKnnVectorsFo @Override protected Codec getCodec() { - return new Lucene912Codec() { + return new Lucene100Codec() { @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { return new ES814HnswScalarQuantizedVectorsFormat(); @@ -68,9 +69,10 @@ public void testAddIndexesDirectory0FS() throws Exception { try (IndexReader reader = DirectoryReader.open(w2)) { LeafReader r = getOnlyLeafReader(reader); FloatVectorValues vectorValues = r.getFloatVectorValues(fieldName); - assertEquals(0, vectorValues.nextDoc()); - assertEquals(0, vectorValues.vectorValue()[0], 0); - assertEquals(NO_MORE_DOCS, vectorValues.nextDoc()); + KnnVectorValues.DocIndexIterator iterator = vectorValues.iterator(); + assertEquals(0, iterator.nextDoc()); + assertEquals(0, vectorValues.vectorValue(iterator.index())[0], 0); + assertEquals(NO_MORE_DOCS, iterator.nextDoc()); } } } @@ -110,12 +112,13 @@ private void testAddIndexesDirectory01FS(VectorSimilarityFunction similarityFunc try (IndexReader reader = DirectoryReader.open(w2)) { LeafReader r = getOnlyLeafReader(reader); FloatVectorValues vectorValues = r.getFloatVectorValues(fieldName); - assertEquals(0, vectorValues.nextDoc()); + KnnVectorValues.DocIndexIterator iterator = vectorValues.iterator(); + assertEquals(0, iterator.nextDoc()); // The merge order is randomized, we might get 1 first, or 2 - float value = vectorValues.vectorValue()[0]; + float value = vectorValues.vectorValue(iterator.index())[0]; assertTrue(value == 1 || value == 2); - assertEquals(1, vectorValues.nextDoc()); - value += vectorValues.vectorValue()[0]; + assertEquals(1, iterator.nextDoc()); + value += vectorValues.vectorValue(iterator.index())[0]; assertEquals(3f, value, 0); } } diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorFormatTests.java index 90d2584feb3f2..034d428b25209 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorFormatTests.java @@ -11,7 +11,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.KnnVectorsFormat; -import org.apache.lucene.codecs.lucene912.Lucene912Codec; +import org.apache.lucene.codecs.lucene100.Lucene100Codec; import org.apache.lucene.index.VectorSimilarityFunction; import org.junit.Before; @@ -19,7 +19,7 @@ public class ES815BitFlatVectorFormatTests extends BaseKnnBitVectorsFormatTestCa @Override protected Codec getCodec() { - return new Lucene912Codec() { + return new Lucene100Codec() { @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { return new ES815BitFlatVectorFormat(); diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815HnswBitVectorsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815HnswBitVectorsFormatTests.java index add90ea271fa1..4af6a405c7705 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815HnswBitVectorsFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815HnswBitVectorsFormatTests.java @@ -11,7 +11,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.KnnVectorsFormat; -import org.apache.lucene.codecs.lucene912.Lucene912Codec; +import org.apache.lucene.codecs.lucene100.Lucene100Codec; import org.apache.lucene.index.VectorSimilarityFunction; import org.junit.Before; @@ -19,7 +19,7 @@ public class ES815HnswBitVectorsFormatTests extends BaseKnnBitVectorsFormatTestC @Override protected Codec getCodec() { - return new Lucene912Codec() { + return new Lucene100Codec() { @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { return new ES815HnswBitVectorsFormat(); diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816BinaryFlatVectorsScorerTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816BinaryFlatVectorsScorerTests.java index 04d4ef2079b99..cef5e5358f3d5 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816BinaryFlatVectorsScorerTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816BinaryFlatVectorsScorerTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.codec.vectors; import org.apache.lucene.index.VectorSimilarityFunction; +import org.apache.lucene.search.VectorScorer; import org.apache.lucene.tests.util.LuceneTestCase; import org.apache.lucene.util.VectorUtil; import org.elasticsearch.common.logging.LogConfigurator; @@ -61,7 +62,7 @@ public void testScore() throws IOException { new BinaryQuantizer.QueryFactors(quantizedSum, distanceToCentroid, vl, width, normVmC, vDotC) ); - RandomAccessBinarizedByteVectorValues targetVectors = new RandomAccessBinarizedByteVectorValues() { + BinarizedByteVectorValues targetVectors = new BinarizedByteVectorValues() { @Override public float getCentroidDistance(int vectorOrd) throws IOException { return random().nextFloat(0f, 1000f); @@ -99,7 +100,7 @@ public float[] getCentroid() throws IOException { } @Override - public RandomAccessBinarizedByteVectorValues copy() throws IOException { + public BinarizedByteVectorValues copy() throws IOException { return null; } @@ -115,6 +116,16 @@ public int size() { return 1; } + @Override + public VectorScorer scorer(float[] query) throws IOException { + return null; + } + + @Override + public float[] getCorrectiveTerms(int vectorOrd) throws IOException { + return new float[0]; + } + @Override public int dimension() { return dimensions; @@ -209,7 +220,7 @@ public void testScoreEuclidean() throws IOException { new BinaryQuantizer.QueryFactors(quantizedSum, distanceToCentroid, vl, width, 0f, 0f) ); - RandomAccessBinarizedByteVectorValues targetVectors = new RandomAccessBinarizedByteVectorValues() { + BinarizedByteVectorValues targetVectors = new BinarizedByteVectorValues() { @Override public float getCentroidDistance(int vectorOrd) { return 355.78073f; @@ -375,7 +386,7 @@ public float[] getCentroid() { } @Override - public RandomAccessBinarizedByteVectorValues copy() { + public BinarizedByteVectorValues copy() { return null; } @@ -389,6 +400,16 @@ public int size() { return 1; } + @Override + public VectorScorer scorer(float[] query) throws IOException { + return null; + } + + @Override + public float[] getCorrectiveTerms(int vectorOrd) throws IOException { + return new float[0]; + } + @Override public int dimension() { return dimensions; @@ -806,7 +827,7 @@ public void testScoreMIP() throws IOException { new BinaryQuantizer.QueryFactors(quantizedSum, distanceToCentroid, vl, width, normVmC, vDotC) ); - RandomAccessBinarizedByteVectorValues targetVectors = new RandomAccessBinarizedByteVectorValues() { + BinarizedByteVectorValues targetVectors = new BinarizedByteVectorValues() { @Override public float getCentroidDistance(int vectorOrd) { return 0f; @@ -1617,7 +1638,7 @@ public float[] getCentroid() { } @Override - public RandomAccessBinarizedByteVectorValues copy() { + public BinarizedByteVectorValues copy() { return null; } @@ -1727,6 +1748,16 @@ public int size() { return 1; } + @Override + public VectorScorer scorer(float[] query) throws IOException { + return null; + } + + @Override + public float[] getCorrectiveTerms(int vectorOrd) throws IOException { + return new float[0]; + } + @Override public int dimension() { return dimensions; diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsFormatTests.java index 0892436891ff1..42f2fbb383ac9 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsFormatTests.java @@ -22,7 +22,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.FilterCodec; import org.apache.lucene.codecs.KnnVectorsFormat; -import org.apache.lucene.codecs.lucene912.Lucene912Codec; +import org.apache.lucene.codecs.lucene100.Lucene100Codec; import org.apache.lucene.document.Document; import org.apache.lucene.document.KnnFloatVectorField; import org.apache.lucene.index.DirectoryReader; @@ -30,6 +30,7 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.VectorSimilarityFunction; import org.apache.lucene.search.IndexSearcher; @@ -58,7 +59,7 @@ public class ES816BinaryQuantizedVectorsFormatTests extends BaseKnnVectorsFormat @Override protected Codec getCodec() { - return new Lucene912Codec() { + return new Lucene100Codec() { @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { return new ES816BinaryQuantizedVectorsFormat(); @@ -90,8 +91,8 @@ public void testSearch() throws Exception { float[] queryVector = randomVector(dims); Query q = new KnnFloatVectorQuery(fieldName, queryVector, k); TopDocs collectedDocs = searcher.search(q, k); - assertEquals(k, collectedDocs.totalHits.value); - assertEquals(TotalHits.Relation.EQUAL_TO, collectedDocs.totalHits.relation); + assertEquals(k, collectedDocs.totalHits.value()); + assertEquals(TotalHits.Relation.EQUAL_TO, collectedDocs.totalHits.relation()); } } } @@ -148,7 +149,7 @@ public void testQuantizedVectorsWriteAndRead() throws IOException { LeafReader r = getOnlyLeafReader(reader); FloatVectorValues vectorValues = r.getFloatVectorValues(fieldName); assertEquals(vectorValues.size(), numVectors); - OffHeapBinarizedVectorValues qvectorValues = ((ES816BinaryQuantizedVectorsReader.BinarizedVectorValues) vectorValues) + BinarizedByteVectorValues qvectorValues = ((ES816BinaryQuantizedVectorsReader.BinarizedVectorValues) vectorValues) .getQuantizedVectorValues(); float[] centroid = qvectorValues.getCentroid(); assertEquals(centroid.length, dims); @@ -159,13 +160,18 @@ public void testQuantizedVectorsWriteAndRead() throws IOException { if (similarityFunction == VectorSimilarityFunction.COSINE) { vectorValues = new ES816BinaryQuantizedVectorsWriter.NormalizedFloatVectorValues(vectorValues); } - - while (vectorValues.nextDoc() != NO_MORE_DOCS) { - float[] corrections = quantizer.quantizeForIndex(vectorValues.vectorValue(), expectedVector, centroid); - assertArrayEquals(expectedVector, qvectorValues.vectorValue()); - assertEquals(corrections.length, qvectorValues.getCorrectiveTerms().length); + KnnVectorValues.DocIndexIterator docIndexIterator = vectorValues.iterator(); + + while (docIndexIterator.nextDoc() != NO_MORE_DOCS) { + float[] corrections = quantizer.quantizeForIndex( + vectorValues.vectorValue(docIndexIterator.index()), + expectedVector, + centroid + ); + assertArrayEquals(expectedVector, qvectorValues.vectorValue(docIndexIterator.index())); + assertEquals(corrections.length, qvectorValues.getCorrectiveTerms(docIndexIterator.index()).length); for (int i = 0; i < corrections.length; i++) { - assertEquals(corrections[i], qvectorValues.getCorrectiveTerms()[i], 0.00001f); + assertEquals(corrections[i], qvectorValues.getCorrectiveTerms(docIndexIterator.index())[i], 0.00001f); } } } diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816HnswBinaryQuantizedVectorsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816HnswBinaryQuantizedVectorsFormatTests.java index f607de57e1fd5..ca96e093b7b28 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816HnswBinaryQuantizedVectorsFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816HnswBinaryQuantizedVectorsFormatTests.java @@ -22,7 +22,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.FilterCodec; import org.apache.lucene.codecs.KnnVectorsFormat; -import org.apache.lucene.codecs.lucene912.Lucene912Codec; +import org.apache.lucene.codecs.lucene100.Lucene100Codec; import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsReader; import org.apache.lucene.document.Document; import org.apache.lucene.document.KnnFloatVectorField; @@ -30,6 +30,7 @@ import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.VectorSimilarityFunction; import org.apache.lucene.search.TopDocs; @@ -55,7 +56,7 @@ public class ES816HnswBinaryQuantizedVectorsFormatTests extends BaseKnnVectorsFo @Override protected Codec getCodec() { - return new Lucene912Codec() { + return new Lucene100Codec() { @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { return new ES816HnswBinaryQuantizedVectorsFormat(); @@ -91,12 +92,13 @@ public void testSingleVectorCase() throws Exception { try (IndexReader reader = DirectoryReader.open(w)) { LeafReader r = getOnlyLeafReader(reader); FloatVectorValues vectorValues = r.getFloatVectorValues("f"); + KnnVectorValues.DocIndexIterator docIndexIterator = vectorValues.iterator(); assert (vectorValues.size() == 1); - while (vectorValues.nextDoc() != NO_MORE_DOCS) { - assertArrayEquals(vector, vectorValues.vectorValue(), 0.00001f); + while (docIndexIterator.nextDoc() != NO_MORE_DOCS) { + assertArrayEquals(vector, vectorValues.vectorValue(docIndexIterator.index()), 0.00001f); } TopDocs td = r.searchNearestVectors("f", randomVector(vector.length), 1, null, Integer.MAX_VALUE); - assertEquals(1, td.totalHits.value); + assertEquals(1, td.totalHits.value()); assertTrue(td.scoreDocs[0].score >= 0); } } diff --git a/server/src/test/java/org/elasticsearch/index/codec/zstd/StoredFieldCodecDuelTests.java b/server/src/test/java/org/elasticsearch/index/codec/zstd/StoredFieldCodecDuelTests.java index c3fea6c7a189b..437ba1cecc11d 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/zstd/StoredFieldCodecDuelTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/zstd/StoredFieldCodecDuelTests.java @@ -10,7 +10,7 @@ package org.elasticsearch.index.codec.zstd; import org.apache.lucene.codecs.Codec; -import org.apache.lucene.codecs.lucene912.Lucene912Codec; +import org.apache.lucene.codecs.lucene100.Lucene100Codec; import org.apache.lucene.document.Document; import org.apache.lucene.document.StoredField; import org.apache.lucene.index.DirectoryReader; @@ -35,13 +35,13 @@ public class StoredFieldCodecDuelTests extends ESTestCase { private static final String DOUBLE_FIELD = "double_field_5"; public void testDuelBestSpeed() throws IOException { - var baseline = new LegacyPerFieldMapperCodec(Lucene912Codec.Mode.BEST_SPEED, null, BigArrays.NON_RECYCLING_INSTANCE); + var baseline = new LegacyPerFieldMapperCodec(Lucene100Codec.Mode.BEST_SPEED, null, BigArrays.NON_RECYCLING_INSTANCE); var contender = new PerFieldMapperCodec(Zstd814StoredFieldsFormat.Mode.BEST_SPEED, null, BigArrays.NON_RECYCLING_INSTANCE); doTestDuel(baseline, contender); } public void testDuelBestCompression() throws IOException { - var baseline = new LegacyPerFieldMapperCodec(Lucene912Codec.Mode.BEST_COMPRESSION, null, BigArrays.NON_RECYCLING_INSTANCE); + var baseline = new LegacyPerFieldMapperCodec(Lucene100Codec.Mode.BEST_COMPRESSION, null, BigArrays.NON_RECYCLING_INSTANCE); var contender = new PerFieldMapperCodec(Zstd814StoredFieldsFormat.Mode.BEST_COMPRESSION, null, BigArrays.NON_RECYCLING_INSTANCE); doTestDuel(baseline, contender); } diff --git a/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestCompressionStoredFieldsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestCompressionStoredFieldsFormatTests.java index 71c7464657e72..77a7585e3b518 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestCompressionStoredFieldsFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestCompressionStoredFieldsFormatTests.java @@ -11,11 +11,11 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.tests.index.BaseStoredFieldsFormatTestCase; -import org.elasticsearch.index.codec.Elasticsearch816Codec; +import org.elasticsearch.index.codec.Elasticsearch900Codec; public class Zstd814BestCompressionStoredFieldsFormatTests extends BaseStoredFieldsFormatTestCase { - private final Codec codec = new Elasticsearch816Codec(Zstd814StoredFieldsFormat.Mode.BEST_COMPRESSION); + private final Codec codec = new Elasticsearch900Codec(Zstd814StoredFieldsFormat.Mode.BEST_COMPRESSION); @Override protected Codec getCodec() { diff --git a/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestSpeedStoredFieldsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestSpeedStoredFieldsFormatTests.java index 02a1b10697907..3d6cfea70d121 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestSpeedStoredFieldsFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestSpeedStoredFieldsFormatTests.java @@ -11,11 +11,11 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.tests.index.BaseStoredFieldsFormatTestCase; -import org.elasticsearch.index.codec.Elasticsearch816Codec; +import org.elasticsearch.index.codec.Elasticsearch900Codec; public class Zstd814BestSpeedStoredFieldsFormatTests extends BaseStoredFieldsFormatTestCase { - private final Codec codec = new Elasticsearch816Codec(Zstd814StoredFieldsFormat.Mode.BEST_SPEED); + private final Codec codec = new Elasticsearch900Codec(Zstd814StoredFieldsFormat.Mode.BEST_SPEED); @Override protected Codec getCodec() { diff --git a/server/src/test/java/org/elasticsearch/index/engine/CompletionStatsCacheTests.java b/server/src/test/java/org/elasticsearch/index/engine/CompletionStatsCacheTests.java index 6565a11a860ec..6d205a22433b4 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/CompletionStatsCacheTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/CompletionStatsCacheTests.java @@ -9,7 +9,7 @@ package org.elasticsearch.index.engine; import org.apache.lucene.codecs.PostingsFormat; -import org.apache.lucene.codecs.lucene912.Lucene912Codec; +import org.apache.lucene.codecs.lucene100.Lucene100Codec; import org.apache.lucene.document.Document; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; @@ -45,7 +45,7 @@ public void testExceptionsAreNotCached() { public void testCompletionStatsCache() throws IOException, InterruptedException { final IndexWriterConfig indexWriterConfig = newIndexWriterConfig(); final PostingsFormat postingsFormat = new Completion912PostingsFormat(); - indexWriterConfig.setCodec(new Lucene912Codec() { + indexWriterConfig.setCodec(new Lucene100Codec() { @Override public PostingsFormat getPostingsFormatForField(String field) { return postingsFormat; // all fields are suggest fields diff --git a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index c8ca3d17de797..21aefd893de70 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -266,7 +266,7 @@ public void testVersionMapAfterAutoIDDocument() throws IOException { try (Engine.Searcher searcher = engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL)) { assertEquals(1, searcher.getIndexReader().numDocs()); TopDocs search = searcher.search(new MatchAllDocsQuery(), 1); - org.apache.lucene.document.Document luceneDoc = searcher.doc(search.scoreDocs[0].doc); + org.apache.lucene.document.Document luceneDoc = searcher.storedFields().document(search.scoreDocs[0].doc); assertEquals("test", luceneDoc.get("value")); } @@ -279,7 +279,7 @@ public void testVersionMapAfterAutoIDDocument() throws IOException { try (Engine.Searcher searcher = engine.acquireSearcher("test")) { assertEquals(1, searcher.getIndexReader().numDocs()); TopDocs search = searcher.search(new MatchAllDocsQuery(), 1); - org.apache.lucene.document.Document luceneDoc = searcher.doc(search.scoreDocs[0].doc); + org.apache.lucene.document.Document luceneDoc = searcher.storedFields().document(search.scoreDocs[0].doc); assertEquals("updated", luceneDoc.get("value")); } @@ -640,7 +640,7 @@ public void testTranslogMultipleOperationsSameDocument() throws IOException { recoverFromTranslog(recoveringEngine, translogHandler, Long.MAX_VALUE); recoveringEngine.refresh("test"); try (Engine.Searcher searcher = recoveringEngine.acquireSearcher("test")) { - Integer totalHits = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager()); + Integer totalHits = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager(searcher.getSlices())); assertThat(totalHits, equalTo(operations.get(operations.size() - 1) instanceof Engine.Delete ? 0 : 1)); } } @@ -747,7 +747,7 @@ public void testTranslogRecoveryWithMultipleGenerations() throws IOException { recoveringEngine.refresh("test"); try (Engine.Searcher searcher = recoveringEngine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), docs); - assertEquals(docs, topDocs.totalHits.value); + assertEquals(docs, topDocs.totalHits.value()); } } finally { IOUtils.close(initialEngine, recoveringEngine, store); @@ -2010,7 +2010,7 @@ public void testConcurrentOutOfOrderDocsOnReplica() throws IOException, Interrup try (Engine.Searcher searcher = engine.acquireSearcher("test")) { Integer totalHits = searcher.search( new TermQuery(new Term("value", lastFieldValueDoc1)), - new TotalHitCountCollectorManager() + new TotalHitCountCollectorManager(searcher.getSlices()) ); assertThat(totalHits, equalTo(1)); } @@ -2019,7 +2019,7 @@ public void testConcurrentOutOfOrderDocsOnReplica() throws IOException, Interrup try (Engine.Searcher searcher = engine.acquireSearcher("test")) { Integer totalHits = searcher.search( new TermQuery(new Term("value", lastFieldValueDoc2)), - new TotalHitCountCollectorManager() + new TotalHitCountCollectorManager(searcher.getSlices()) ); assertThat(totalHits, equalTo(1)); } @@ -2249,7 +2249,7 @@ private int assertOpsOnPrimary(List ops, long currentOpVersion try (Engine.Searcher searcher = engine.acquireSearcher("test")) { Integer totalHits = searcher.search( new TermQuery(new Term("value", lastFieldValue)), - new TotalHitCountCollectorManager() + new TotalHitCountCollectorManager(searcher.getSlices()) ); assertThat(totalHits, equalTo(1)); } @@ -2275,7 +2275,10 @@ private int assertOpsOnPrimary(List ops, long currentOpVersion assertVisibleCount(engine, docDeleted ? 0 : 1); if (docDeleted == false) { try (Engine.Searcher searcher = engine.acquireSearcher("test")) { - Integer totalHits = searcher.search(new TermQuery(new Term("value", lastFieldValue)), new TotalHitCountCollectorManager()); + Integer totalHits = searcher.search( + new TermQuery(new Term("value", lastFieldValue)), + new TotalHitCountCollectorManager(searcher.getSlices()) + ); assertThat(totalHits, equalTo(1)); } } @@ -2361,7 +2364,10 @@ public void testNonInternalVersioningOnPrimary() throws IOException { if (docDeleted == false) { logger.info("searching for [{}]", lastFieldValue); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { - Integer totalHits = searcher.search(new TermQuery(new Term("value", lastFieldValue)), new TotalHitCountCollectorManager()); + Integer totalHits = searcher.search( + new TermQuery(new Term("value", lastFieldValue)), + new TotalHitCountCollectorManager(searcher.getSlices()) + ); assertThat(totalHits, equalTo(1)); } } @@ -2378,7 +2384,7 @@ public void testVersioningPromotedReplica() throws IOException { final int opsOnPrimary = assertOpsOnPrimary(primaryOps, finalReplicaVersion, deletedOnReplica, replicaEngine); final long currentSeqNo = getSequenceID(replicaEngine, new Engine.Get(false, false, Term.toString(lastReplicaOp.uid()))).v1(); try (Engine.Searcher searcher = engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL)) { - Integer totalHits = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager()); + Integer totalHits = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager(searcher.getSlices())); if (totalHits > 0) { // last op wasn't delete assertThat(currentSeqNo, equalTo(finalReplicaSeqNo + opsOnPrimary)); @@ -2402,7 +2408,10 @@ public void testConcurrentExternalVersioningOnPrimary() throws IOException, Inte assertVisibleCount(engine, lastFieldValue == null ? 0 : 1); if (lastFieldValue != null) { try (Engine.Searcher searcher = engine.acquireSearcher("test")) { - Integer totalHits = searcher.search(new TermQuery(new Term("value", lastFieldValue)), new TotalHitCountCollectorManager()); + Integer totalHits = searcher.search( + new TermQuery(new Term("value", lastFieldValue)), + new TotalHitCountCollectorManager(searcher.getSlices()) + ); assertThat(totalHits, equalTo(1)); } } @@ -2434,7 +2443,7 @@ class OpAndVersion { Engine.Get engineGet = new Engine.Get(true, false, doc.id()); try (Engine.GetResult get = engine.get(engineGet, mappingLookup, documentParser, randomSearcherWrapper())) { FieldsVisitor visitor = new FieldsVisitor(true); - get.docIdAndVersion().reader.document(get.docIdAndVersion().docId, visitor); + get.docIdAndVersion().reader.storedFields().document(get.docIdAndVersion().docId, visitor); List values = new ArrayList<>(Strings.commaDelimitedListToSet(visitor.source().utf8ToString())); String removed = op % 3 == 0 && values.size() > 0 ? values.remove(0) : null; String added = "v_" + idGenerator.incrementAndGet(); @@ -2480,7 +2489,7 @@ class OpAndVersion { Engine.GetResult get = engine.get(new Engine.Get(true, false, doc.id()), mappingLookup, documentParser, randomSearcherWrapper()) ) { FieldsVisitor visitor = new FieldsVisitor(true); - get.docIdAndVersion().reader.document(get.docIdAndVersion().docId, visitor); + get.docIdAndVersion().reader.storedFields().document(get.docIdAndVersion().docId, visitor); List values = Arrays.asList(Strings.commaDelimitedListToStringArray(visitor.source().utf8ToString())); assertThat(currentValues, equalTo(new HashSet<>(values))); } @@ -3424,7 +3433,7 @@ public void testSkipTranslogReplay() throws IOException { engine.skipTranslogRecovery(); try (Engine.Searcher searcher = engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL)) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), randomIntBetween(numDocs, numDocs + 10)); - assertThat(topDocs.totalHits.value, equalTo(0L)); + assertThat(topDocs.totalHits.value(), equalTo(0L)); } } } @@ -3514,7 +3523,7 @@ public void testTranslogReplay() throws IOException { assertThat(result.getVersion(), equalTo(2L)); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), numDocs + 1); - assertThat(topDocs.totalHits.value, equalTo(numDocs + 1L)); + assertThat(topDocs.totalHits.value(), equalTo(numDocs + 1L)); } engine.close(); @@ -3523,7 +3532,7 @@ public void testTranslogReplay() throws IOException { engine.refresh("warm_up"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), numDocs + 1); - assertThat(topDocs.totalHits.value, equalTo(numDocs + 1L)); + assertThat(topDocs.totalHits.value(), equalTo(numDocs + 1L)); } assertEquals(flush ? 1 : 2, translogHandler.appliedOperations()); engine.delete(new Engine.Delete(Integer.toString(randomId), newUid(doc), primaryTerm.get())); @@ -3534,7 +3543,7 @@ public void testTranslogReplay() throws IOException { engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), numDocs); - assertThat(topDocs.totalHits.value, equalTo((long) numDocs)); + assertThat(topDocs.totalHits.value(), equalTo((long) numDocs)); } } @@ -3890,7 +3899,7 @@ public void testDoubleDeliveryPrimary() throws IOException { engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); } operation = appendOnlyPrimary(doc, false, 1, create); retry = appendOnlyPrimary(doc, true, 1, create); @@ -3925,7 +3934,7 @@ public void testDoubleDeliveryPrimary() throws IOException { engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); } } @@ -3983,7 +3992,7 @@ public void testDoubleDeliveryReplicaAppendingAndDeleteOnly() throws IOException engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(0, topDocs.totalHits.value); + assertEquals(0, topDocs.totalHits.value()); } } @@ -4007,7 +4016,7 @@ public void testDoubleDeliveryReplicaAppendingOnly() throws IOException { engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); } final boolean create = randomBoolean(); @@ -4047,7 +4056,7 @@ public void testDoubleDeliveryReplicaAppendingOnly() throws IOException { engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); } } @@ -4092,12 +4101,12 @@ public void testDoubleDeliveryReplica() throws IOException { engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); } engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); } if (engine.engineConfig.getIndexSettings().isSoftDeleteEnabled()) { List ops = readAllOperationsInLucene(engine); @@ -4172,7 +4181,7 @@ public void testRetryWithAutogeneratedIdWorksAndNoDuplicateDocs() throws IOExcep engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); } index = new Engine.Index( @@ -4194,7 +4203,7 @@ public void testRetryWithAutogeneratedIdWorksAndNoDuplicateDocs() throws IOExcep replicaEngine.refresh("test"); try (Engine.Searcher searcher = replicaEngine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); } } @@ -4264,7 +4273,7 @@ public void testRetryWithAutogeneratedIdsAndWrongOrderWorksAndNoDuplicateDocs() engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); } Engine.Index secondIndexRequestReplica = new Engine.Index( @@ -4285,7 +4294,7 @@ public void testRetryWithAutogeneratedIdsAndWrongOrderWorksAndNoDuplicateDocs() replicaEngine.refresh("test"); try (Engine.Searcher searcher = replicaEngine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); } } @@ -5678,7 +5687,7 @@ public void testConcurrentAppendUpdateAndRefresh() throws InterruptedException, try (Engine.Searcher searcher = engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL)) { TopDocs search = searcher.search(new MatchAllDocsQuery(), searcher.getIndexReader().numDocs()); for (int i = 0; i < search.scoreDocs.length; i++) { - org.apache.lucene.document.Document luceneDoc = searcher.doc(search.scoreDocs[i].doc); + org.apache.lucene.document.Document luceneDoc = searcher.storedFields().document(search.scoreDocs[i].doc); assertEquals("updated", luceneDoc.get("value")); } int totalNumDocs = numDocs - numDeletes.get(); @@ -6666,7 +6675,7 @@ public void testStoreHonorsLuceneVersion() throws IOException { engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { LeafReader leafReader = getOnlyLeafReader(searcher.getIndexReader()); - assertEquals(createdVersion.luceneVersion().major, leafReader.getMetaData().getCreatedVersionMajor()); + assertEquals(createdVersion.luceneVersion().major, leafReader.getMetaData().createdVersionMajor()); } } } diff --git a/server/src/test/java/org/elasticsearch/index/engine/LiveVersionMapTests.java b/server/src/test/java/org/elasticsearch/index/engine/LiveVersionMapTests.java index a3a21fc32e546..b6be13b9f2513 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/LiveVersionMapTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/LiveVersionMapTests.java @@ -13,7 +13,6 @@ import org.apache.lucene.tests.util.TestUtil; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; -import org.apache.lucene.util.Constants; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Tuple; @@ -71,22 +70,16 @@ public void testRamBytesUsed() throws Exception { } actualRamBytesUsed = RamUsageTester.ramUsed(map); estimatedRamBytesUsed = map.ramBytesUsed(); - long tolerance; - if (Constants.JRE_IS_MINIMUM_JAVA9) { - // With Java 9, RamUsageTester computes the memory usage of maps as - // the memory usage of an array that would contain exactly all keys - // and values. This is an under-estimation of the actual memory - // usage since it ignores the impact of the load factor and of the - // linked list/tree that is used to resolve collisions. So we use a - // bigger tolerance. - // less than 50% off - tolerance = actualRamBytesUsed / 2; - } else { - // Java 8 is more accurate by doing reflection into the actual JDK classes - // so we give it a lower error bound. - // less than 25% off - tolerance = actualRamBytesUsed / 4; - } + + // Since Java 9, RamUsageTester computes the memory usage of maps as + // the memory usage of an array that would contain exactly all keys + // and values. This is an under-estimation of the actual memory + // usage since it ignores the impact of the load factor and of the + // linked list/tree that is used to resolve collisions. So we use a + // bigger tolerance. + // less than 50% off + long tolerance = actualRamBytesUsed / 2; + assertEquals(actualRamBytesUsed, estimatedRamBytesUsed, tolerance); } diff --git a/server/src/test/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicyTests.java b/server/src/test/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicyTests.java index e7e668415cdd4..c0e365909429a 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicyTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicyTests.java @@ -25,6 +25,7 @@ import org.apache.lucene.index.SegmentCommitInfo; import org.apache.lucene.index.SegmentInfos; import org.apache.lucene.index.StandardDirectoryReader; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.MatchAllDocsQuery; @@ -69,8 +70,9 @@ public void testPruneAll() throws IOException { writer.forceMerge(1); writer.commit(); try (DirectoryReader reader = DirectoryReader.open(writer)) { + StoredFields storedFields = reader.storedFields(); for (int i = 0; i < reader.maxDoc(); i++) { - Document document = reader.document(i); + Document document = storedFields.document(i); if (pruneIdField) { assertEquals(1, document.getFields().size()); assertEquals("source", document.getFields().get(0).name()); @@ -151,8 +153,9 @@ public void testPruneSome() throws IOException { assertEquals(1, reader.leaves().size()); NumericDocValues extra_source = reader.leaves().get(0).reader().getNumericDocValues("extra_source"); assertNotNull(extra_source); + StoredFields storedFields = reader.storedFields(); for (int i = 0; i < reader.maxDoc(); i++) { - Document document = reader.document(i); + Document document = storedFields.document(i); Set collect = document.getFields().stream().map(IndexableField::name).collect(Collectors.toSet()); assertTrue(collect.contains("source")); assertTrue(collect.contains("even")); @@ -192,8 +195,9 @@ public void testPruneNone() throws IOException { assertEquals(1, reader.leaves().size()); NumericDocValues extra_source = reader.leaves().get(0).reader().getNumericDocValues("extra_source"); assertNotNull(extra_source); + StoredFields storedFields = reader.storedFields(); for (int i = 0; i < reader.maxDoc(); i++) { - Document document = reader.document(i); + Document document = storedFields.document(i); Set collect = document.getFields().stream().map(IndexableField::name).collect(Collectors.toSet()); assertTrue(collect.contains("source")); assertTrue(collect.contains("extra_source")); diff --git a/server/src/test/java/org/elasticsearch/index/engine/SegmentTests.java b/server/src/test/java/org/elasticsearch/index/engine/SegmentTests.java index a6e56c4137028..49036324e722e 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/SegmentTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/SegmentTests.java @@ -80,7 +80,7 @@ static Segment randomSegment() { segment.sizeInBytes = randomNonNegativeLong(); segment.docCount = randomIntBetween(1, Integer.MAX_VALUE); segment.delDocCount = randomIntBetween(0, segment.docCount); - segment.version = Version.LUCENE_8_0_0; + segment.version = Version.LUCENE_9_0_0; segment.compound = randomBoolean(); segment.mergeId = randomAlphaOfLengthBetween(1, 10); segment.segmentSort = randomIndexSort(); diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java index b7793a644f8b8..9d0a9cdeb1968 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java @@ -113,7 +113,7 @@ public void testSingleValueAllSet() throws Exception { TopFieldDocs topDocs; SortField sortField = indexFieldData.sortField(null, MultiValueMode.MIN, null, false); topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(sortField)); - assertThat(topDocs.totalHits.value, equalTo(3L)); + assertThat(topDocs.totalHits.value(), equalTo(3L)); assertThat(topDocs.scoreDocs[0].doc, equalTo(1)); assertThat(toString(((FieldDoc) topDocs.scoreDocs[0]).fields[0]), equalTo(one())); assertThat(topDocs.scoreDocs[1].doc, equalTo(0)); @@ -123,7 +123,7 @@ public void testSingleValueAllSet() throws Exception { sortField = indexFieldData.sortField(null, MultiValueMode.MAX, null, true); topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(sortField)); - assertThat(topDocs.totalHits.value, equalTo(3L)); + assertThat(topDocs.totalHits.value(), equalTo(3L)); assertThat(topDocs.scoreDocs[0].doc, equalTo(2)); assertThat(topDocs.scoreDocs[1].doc, equalTo(0)); assertThat(topDocs.scoreDocs[2].doc, equalTo(1)); @@ -193,14 +193,14 @@ public void testMultiValueAllSet() throws Exception { IndexSearcher searcher = newIndexSearcher(DirectoryReader.open(writer)); SortField sortField = indexFieldData.sortField(null, MultiValueMode.MIN, null, false); TopFieldDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(sortField)); - assertThat(topDocs.totalHits.value, equalTo(3L)); + assertThat(topDocs.totalHits.value(), equalTo(3L)); assertThat(topDocs.scoreDocs.length, equalTo(3)); assertThat(topDocs.scoreDocs[0].doc, equalTo(1)); assertThat(topDocs.scoreDocs[1].doc, equalTo(0)); assertThat(topDocs.scoreDocs[2].doc, equalTo(2)); sortField = indexFieldData.sortField(null, MultiValueMode.MAX, null, true); topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(sortField)); - assertThat(topDocs.totalHits.value, equalTo(3L)); + assertThat(topDocs.totalHits.value(), equalTo(3L)); assertThat(topDocs.scoreDocs.length, equalTo(3)); assertThat(topDocs.scoreDocs[0].doc, equalTo(0)); assertThat(topDocs.scoreDocs[1].doc, equalTo(2)); @@ -258,7 +258,7 @@ public void testSortMultiValuesFields() throws Exception { IndexSearcher searcher = newIndexSearcher(DirectoryReader.open(writer)); SortField sortField = indexFieldData.sortField(null, MultiValueMode.MIN, null, false); TopFieldDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(sortField)); - assertThat(topDocs.totalHits.value, equalTo(8L)); + assertThat(topDocs.totalHits.value(), equalTo(8L)); assertThat(topDocs.scoreDocs.length, equalTo(8)); assertThat(topDocs.scoreDocs[0].doc, equalTo(7)); assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).utf8ToString(), equalTo("!08")); @@ -279,7 +279,7 @@ public void testSortMultiValuesFields() throws Exception { sortField = indexFieldData.sortField(null, MultiValueMode.MAX, null, true); topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(sortField)); - assertThat(topDocs.totalHits.value, equalTo(8L)); + assertThat(topDocs.totalHits.value(), equalTo(8L)); assertThat(topDocs.scoreDocs.length, equalTo(8)); assertThat(topDocs.scoreDocs[0].doc, equalTo(6)); assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).utf8ToString(), equalTo("10")); diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java index ac77f147a7ce6..48d6cabefe345 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java @@ -262,10 +262,10 @@ public void testActualMissingValue(boolean reverse) throws IOException { randomBoolean() ? numDocs : randomIntBetween(10, numDocs), new Sort(sortField) ); - assertEquals(numDocs, topDocs.totalHits.value); + assertEquals(numDocs, topDocs.totalHits.value()); BytesRef previousValue = reverse ? UnicodeUtil.BIG_TERM : new BytesRef(); for (int i = 0; i < topDocs.scoreDocs.length; ++i) { - final String docValue = searcher.doc(topDocs.scoreDocs[i].doc).get("value"); + final String docValue = searcher.storedFields().document(topDocs.scoreDocs[i].doc).get("value"); final BytesRef value = new BytesRef(docValue == null ? missingValue : docValue); if (reverse) { assertTrue(previousValue.compareTo(value) >= 0); @@ -321,10 +321,10 @@ public void testSortMissing(boolean first, boolean reverse) throws IOException { new Sort(sortField) ); - assertThat(topDocs.totalHits.value, lessThanOrEqualTo((long) numDocs)); + assertThat(topDocs.totalHits.value(), lessThanOrEqualTo((long) numDocs)); BytesRef previousValue = first ? null : reverse ? UnicodeUtil.BIG_TERM : new BytesRef(); for (int i = 0; i < topDocs.scoreDocs.length; ++i) { - final String docValue = searcher.doc(topDocs.scoreDocs[i].doc).get("value"); + final String docValue = searcher.storedFields().document(topDocs.scoreDocs[i].doc).get("value"); if (first && docValue == null) { assertNull(previousValue); } else if (first == false && docValue != null) { @@ -414,7 +414,7 @@ public void testNestedSorting(MultiValueMode sortMode) throws IOException { assertTrue("expected " + docID + " to be a parent", parents.get(docID)); BytesRef cmpValue = null; for (int child = parents.prevSetBit(docID - 1) + 1; child < docID; ++child) { - String[] sVals = searcher.doc(child).getValues("text"); + String[] sVals = searcher.storedFields().document(child).getValues("text"); final BytesRef[] vals; if (sVals.length == 0) { vals = new BytesRef[0]; @@ -498,15 +498,11 @@ public void testGlobalOrdinals() throws Exception { ord = values.nextOrd(); assertThat(ord, equalTo(5L)); assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("04")); - ord = values.nextOrd(); - assertThat(ord, equalTo(SortedSetDocValues.NO_MORE_ORDS)); assertFalse(values.advanceExact(1)); assertTrue(values.advanceExact(2)); ord = values.nextOrd(); assertThat(ord, equalTo(4L)); assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("03")); - ord = values.nextOrd(); - assertThat(ord, equalTo(SortedSetDocValues.NO_MORE_ORDS)); // Second segment leaf = topLevelReader.leaves().get(1); @@ -522,8 +518,6 @@ public void testGlobalOrdinals() throws Exception { ord = values.nextOrd(); assertThat(ord, equalTo(7L)); assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("06")); - ord = values.nextOrd(); - assertThat(ord, equalTo(SortedSetDocValues.NO_MORE_ORDS)); assertTrue(values.advanceExact(1)); ord = values.nextOrd(); assertThat(ord, equalTo(7L)); @@ -534,8 +528,6 @@ public void testGlobalOrdinals() throws Exception { ord = values.nextOrd(); assertThat(ord, equalTo(9L)); assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("08")); - ord = values.nextOrd(); - assertThat(ord, equalTo(SortedSetDocValues.NO_MORE_ORDS)); assertFalse(values.advanceExact(2)); assertTrue(values.advanceExact(3)); ord = values.nextOrd(); @@ -547,8 +539,6 @@ public void testGlobalOrdinals() throws Exception { ord = values.nextOrd(); assertThat(ord, equalTo(11L)); assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("10")); - ord = values.nextOrd(); - assertThat(ord, equalTo(SortedSetDocValues.NO_MORE_ORDS)); // Third segment leaf = topLevelReader.leaves().get(2); @@ -564,8 +554,6 @@ public void testGlobalOrdinals() throws Exception { ord = values.nextOrd(); assertThat(ord, equalTo(2L)); assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("!10")); - ord = values.nextOrd(); - assertThat(ord, equalTo(SortedSetDocValues.NO_MORE_ORDS)); } public void testTermsEnum() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/ordinals/MultiOrdinalsTests.java b/server/src/test/java/org/elasticsearch/index/fielddata/ordinals/MultiOrdinalsTests.java index cb6732ce8bb7d..aa23dc6da19df 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/ordinals/MultiOrdinalsTests.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/ordinals/MultiOrdinalsTests.java @@ -108,7 +108,6 @@ public int compare(OrdAndId o1, OrdAndId o2) { for (Long ord : docOrds) { assertThat(docs.nextOrd(), equalTo(ord)); } - assertEquals(SortedSetDocValues.NO_MORE_ORDS, docs.nextOrd()); } for (int i = docId + 1; i < ordAndId.id; i++) { assertFalse(singleOrds.advanceExact(i)); @@ -257,7 +256,6 @@ private void assertEquals(SortedSetDocValues docs, long[][] ordinalPlan) throws for (long ord : ords) { assertThat(docs.nextOrd(), equalTo(ord)); } - assertThat(docs.nextOrd(), equalTo(SortedSetDocValues.NO_MORE_ORDS)); } } } diff --git a/server/src/test/java/org/elasticsearch/index/fieldstats/FieldStatsProviderRefreshTests.java b/server/src/test/java/org/elasticsearch/index/fieldstats/FieldStatsProviderRefreshTests.java index 86ddbbbc97599..81be71aec23c8 100644 --- a/server/src/test/java/org/elasticsearch/index/fieldstats/FieldStatsProviderRefreshTests.java +++ b/server/src/test/java/org/elasticsearch/index/fieldstats/FieldStatsProviderRefreshTests.java @@ -47,7 +47,7 @@ public void testQueryRewriteOnRefresh() throws Exception { .setSearchType(SearchType.QUERY_THEN_FETCH) .setSize(0) .setQuery(QueryBuilders.rangeQuery("s").gte("a").lte("g")), - r1 -> assertThat(r1.getHits().getTotalHits().value, equalTo(3L)) + r1 -> assertThat(r1.getHits().getTotalHits().value(), equalTo(3L)) ); assertRequestCacheStats(0, 1); @@ -57,7 +57,7 @@ public void testQueryRewriteOnRefresh() throws Exception { .setSearchType(SearchType.QUERY_THEN_FETCH) .setSize(0) .setQuery(QueryBuilders.rangeQuery("s").gte("a").lte("g")), - r2 -> assertThat(r2.getHits().getTotalHits().value, equalTo(3L)) + r2 -> assertThat(r2.getHits().getTotalHits().value(), equalTo(3L)) ); assertRequestCacheStats(1, 1); @@ -72,7 +72,7 @@ public void testQueryRewriteOnRefresh() throws Exception { .setSearchType(SearchType.QUERY_THEN_FETCH) .setSize(0) .setQuery(QueryBuilders.rangeQuery("s").gte("a").lte("g")), - r3 -> assertThat(r3.getHits().getTotalHits().value, equalTo(5L)) + r3 -> assertThat(r3.getHits().getTotalHits().value(), equalTo(5L)) ); assertRequestCacheStats(1, 2); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/BooleanScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/BooleanScriptFieldTypeTests.java index ddba993fd41cc..4aa983a78b07b 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/BooleanScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/BooleanScriptFieldTypeTests.java @@ -14,6 +14,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.search.Collector; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.LeafCollector; @@ -121,8 +122,15 @@ public void testSort() throws IOException { BooleanScriptFieldData ifd = simpleMappedFieldType().fielddataBuilder(mockFielddataContext()).build(null, null); SortField sf = ifd.sortField(null, MultiValueMode.MIN, null, false); TopFieldDocs docs = searcher.search(new MatchAllDocsQuery(), 3, new Sort(sf)); - assertThat(reader.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [false]}")); - assertThat(reader.document(docs.scoreDocs[1].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [true]}")); + StoredFields storedFields = reader.storedFields(); + assertThat( + storedFields.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(), + equalTo("{\"foo\": [false]}") + ); + assertThat( + storedFields.document(docs.scoreDocs[1].doc).getBinaryValue("_source").utf8ToString(), + equalTo("{\"foo\": [true]}") + ); } } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DoubleIndexingDocTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DoubleIndexingDocTests.java index 2c78f5f7fee20..f55d213bea581 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DoubleIndexingDocTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DoubleIndexingDocTests.java @@ -38,25 +38,25 @@ public void testDoubleIndexingSameDoc() throws Exception { }, reader -> { IndexSearcher searcher = newSearcher(reader); TopDocs topDocs = searcher.search(mapperService.fieldType("field1").termQuery("value1", context), 10); - assertThat(topDocs.totalHits.value, equalTo(2L)); + assertThat(topDocs.totalHits.value(), equalTo(2L)); topDocs = searcher.search(mapperService.fieldType("field2").termQuery("1", context), 10); - assertThat(topDocs.totalHits.value, equalTo(2L)); + assertThat(topDocs.totalHits.value(), equalTo(2L)); topDocs = searcher.search(mapperService.fieldType("field3").termQuery("1.1", context), 10); - assertThat(topDocs.totalHits.value, equalTo(2L)); + assertThat(topDocs.totalHits.value(), equalTo(2L)); topDocs = searcher.search(mapperService.fieldType("field4").termQuery("2010-01-01", context), 10); - assertThat(topDocs.totalHits.value, equalTo(2L)); + assertThat(topDocs.totalHits.value(), equalTo(2L)); topDocs = searcher.search(mapperService.fieldType("field5").termQuery("1", context), 10); - assertThat(topDocs.totalHits.value, equalTo(2L)); + assertThat(topDocs.totalHits.value(), equalTo(2L)); topDocs = searcher.search(mapperService.fieldType("field5").termQuery("2", context), 10); - assertThat(topDocs.totalHits.value, equalTo(2L)); + assertThat(topDocs.totalHits.value(), equalTo(2L)); topDocs = searcher.search(mapperService.fieldType("field5").termQuery("3", context), 10); - assertThat(topDocs.totalHits.value, equalTo(2L)); + assertThat(topDocs.totalHits.value(), equalTo(2L)); }); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DoubleScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DoubleScriptFieldTypeTests.java index 48879cdd0d77e..140137015d98a 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DoubleScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DoubleScriptFieldTypeTests.java @@ -13,6 +13,7 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NoMergePolicy; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.search.Collector; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.LeafCollector; @@ -120,9 +121,19 @@ public void testSort() throws IOException { DoubleScriptFieldData ifd = simpleMappedFieldType().fielddataBuilder(mockFielddataContext()).build(null, null); SortField sf = ifd.sortField(null, MultiValueMode.MIN, null, false); TopFieldDocs docs = searcher.search(new MatchAllDocsQuery(), 3, new Sort(sf)); - assertThat(reader.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [1.1]}")); - assertThat(reader.document(docs.scoreDocs[1].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [2.1]}")); - assertThat(reader.document(docs.scoreDocs[2].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [4.2]}")); + StoredFields storedFields = reader.storedFields(); + assertThat( + storedFields.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(), + equalTo("{\"foo\": [1.1]}") + ); + assertThat( + storedFields.document(docs.scoreDocs[1].doc).getBinaryValue("_source").utf8ToString(), + equalTo("{\"foo\": [2.1]}") + ); + assertThat( + storedFields.document(docs.scoreDocs[2].doc).getBinaryValue("_source").utf8ToString(), + equalTo("{\"foo\": [4.2]}") + ); } } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldMapperTests.java index 4284bc00cfc15..0182da8ade48a 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldMapperTests.java @@ -78,5 +78,4 @@ public void testUsingEnabledSettingThrows() { ex.getMessage() ); } - } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IdFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IdFieldTypeTests.java index 72055940b8970..4cc447d97291c 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IdFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IdFieldTypeTests.java @@ -19,6 +19,8 @@ import org.elasticsearch.test.ESTestCase; import org.mockito.Mockito; +import java.util.List; + public class IdFieldTypeTests extends ESTestCase { public void testRangeQuery() { @@ -49,7 +51,7 @@ public void testTermsQuery() { Mockito.when(context.indexVersionCreated()).thenReturn(IndexVersion.current()); MappedFieldType ft = new ProvidedIdFieldMapper.IdFieldType(() -> false); Query query = ft.termQuery("id", context); - assertEquals(new TermInSetQuery("_id", Uid.encodeId("id")), query); + assertEquals(new TermInSetQuery("_id", List.of(Uid.encodeId("id"))), query); } public void testIsAggregatable() { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IpPrefixAutomatonUtilTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IpPrefixAutomatonUtilTests.java index 4170adf0a8508..8f209fb78fc64 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IpPrefixAutomatonUtilTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IpPrefixAutomatonUtilTests.java @@ -230,13 +230,8 @@ public void testAutomatonFromIPv6Group() throws UnknownHostException { } private static CompiledAutomaton compileAutomaton(Automaton automaton) { - CompiledAutomaton compiledAutomaton = new CompiledAutomaton( - automaton, - null, - false, - Operations.DEFAULT_DETERMINIZE_WORK_LIMIT, - true - ); + automaton = Operations.determinize(automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + CompiledAutomaton compiledAutomaton = new CompiledAutomaton(automaton, false, false, true); return compiledAutomaton; } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IpScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IpScriptFieldTypeTests.java index acbfe8b8f9b38..281d2993fa29c 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IpScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IpScriptFieldTypeTests.java @@ -14,6 +14,7 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NoMergePolicy; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.search.Collector; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.LeafCollector; @@ -125,16 +126,17 @@ public void testSort() throws IOException { BinaryScriptFieldData ifd = simpleMappedFieldType().fielddataBuilder(mockFielddataContext()).build(null, null); SortField sf = ifd.sortField(null, MultiValueMode.MIN, null, false); TopFieldDocs docs = searcher.search(new MatchAllDocsQuery(), 3, new Sort(sf)); + StoredFields storedFields = reader.storedFields(); assertThat( - reader.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(), + storedFields.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [\"192.168.0.1\"]}") ); assertThat( - reader.document(docs.scoreDocs[1].doc).getBinaryValue("_source").utf8ToString(), + storedFields.document(docs.scoreDocs[1].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [\"192.168.0.2\"]}") ); assertThat( - reader.document(docs.scoreDocs[2].doc).getBinaryValue("_source").utf8ToString(), + storedFields.document(docs.scoreDocs[2].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [\"192.168.0.4\"]}") ); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldTypeTests.java index b4c7ea0ed9508..e3bdb3d45818f 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldTypeTests.java @@ -110,7 +110,7 @@ protected TokenStream normalize(String fieldName, TokenStream in) { public void testTermsQuery() { MappedFieldType ft = new KeywordFieldType("field"); - BytesRef[] terms = new BytesRef[] { new BytesRef("foo"), new BytesRef("bar") }; + List terms = List.of(new BytesRef("foo"), new BytesRef("bar")); assertEquals(new TermInSetQuery("field", terms), ft.termsQuery(Arrays.asList("foo", "bar"), MOCK_CONTEXT)); MappedFieldType ft2 = new KeywordFieldType("field", false, true, Map.of()); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/KeywordScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/KeywordScriptFieldTypeTests.java index f2e788918010c..57d52991a6442 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/KeywordScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/KeywordScriptFieldTypeTests.java @@ -13,6 +13,7 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NoMergePolicy; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.search.Collector; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.LeafCollector; @@ -115,9 +116,19 @@ public void testSort() throws IOException { BinaryScriptFieldData ifd = simpleMappedFieldType().fielddataBuilder(mockFielddataContext()).build(null, null); SortField sf = ifd.sortField(null, MultiValueMode.MIN, null, false); TopFieldDocs docs = searcher.search(new MatchAllDocsQuery(), 3, new Sort(sf)); - assertThat(reader.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [\"a\"]}")); - assertThat(reader.document(docs.scoreDocs[1].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [\"b\"]}")); - assertThat(reader.document(docs.scoreDocs[2].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [\"d\"]}")); + StoredFields storedFields = reader.storedFields(); + assertThat( + storedFields.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(), + equalTo("{\"foo\": [\"a\"]}") + ); + assertThat( + storedFields.document(docs.scoreDocs[1].doc).getBinaryValue("_source").utf8ToString(), + equalTo("{\"foo\": [\"b\"]}") + ); + assertThat( + storedFields.document(docs.scoreDocs[2].doc).getBinaryValue("_source").utf8ToString(), + equalTo("{\"foo\": [\"d\"]}") + ); } } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/LongScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/LongScriptFieldTypeTests.java index 40357399cab5b..a8cb4d51c5efa 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/LongScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/LongScriptFieldTypeTests.java @@ -14,6 +14,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.search.Collector; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.IndexSearcher; @@ -132,9 +133,19 @@ public void testSort() throws IOException { LongScriptFieldData ifd = simpleMappedFieldType().fielddataBuilder(mockFielddataContext()).build(null, null); SortField sf = ifd.sortField(null, MultiValueMode.MIN, null, false); TopFieldDocs docs = searcher.search(new MatchAllDocsQuery(), 3, new Sort(sf)); - assertThat(reader.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [1]}")); - assertThat(reader.document(docs.scoreDocs[1].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [2]}")); - assertThat(reader.document(docs.scoreDocs[2].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [4]}")); + StoredFields storedFields = reader.storedFields(); + assertThat( + storedFields.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(), + equalTo("{\"foo\": [1]}") + ); + assertThat( + storedFields.document(docs.scoreDocs[1].doc).getBinaryValue("_source").utf8ToString(), + equalTo("{\"foo\": [2]}") + ); + assertThat( + storedFields.document(docs.scoreDocs[2].doc).getBinaryValue("_source").utf8ToString(), + equalTo("{\"foo\": [4]}") + ); } } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/StoredNumericValuesTests.java b/server/src/test/java/org/elasticsearch/index/mapper/StoredNumericValuesTests.java index 5360215b5b05b..836b791af23c1 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/StoredNumericValuesTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/StoredNumericValuesTests.java @@ -80,7 +80,7 @@ public void testBytesAndNumericRepresentation() throws Exception { "field10" ); CustomFieldsVisitor fieldsVisitor = new CustomFieldsVisitor(fieldNames, false); - searcher.doc(0, fieldsVisitor); + searcher.storedFields().document(0, fieldsVisitor); fieldsVisitor.postProcess(mapperService::fieldType); assertThat(fieldsVisitor.fields().size(), equalTo(10)); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java index c8fcf486068c4..86914cfe9ced7 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java @@ -879,7 +879,7 @@ protected TokenStreamComponents createComponents(String fieldName) { IndexSearcher searcher = newSearcher(ir); MatchPhraseQueryBuilder queryBuilder = new MatchPhraseQueryBuilder("field", "Prio 1"); TopDocs td = searcher.search(queryBuilder.toQuery(searchExecutionContext), 1); - assertEquals(1, td.totalHits.value); + assertEquals(1, td.totalHits.value()); }); Exception e = expectThrows( diff --git a/server/src/test/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedFieldTypeTests.java index 5a34886d73db7..c8d7ad8127b55 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedFieldTypeTests.java @@ -81,7 +81,7 @@ public void testTermQuery() { public void testTermsQuery() { KeyedFlattenedFieldType ft = createFieldType(); - Query expected = new TermInSetQuery(ft.name(), new BytesRef("key\0value1"), new BytesRef("key\0value2")); + Query expected = new TermInSetQuery(ft.name(), List.of(new BytesRef("key\0value1"), new BytesRef("key\0value2"))); List terms = new ArrayList<>(); terms.add("value1"); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedLeafFieldDataTests.java b/server/src/test/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedLeafFieldDataTests.java index f494af259c504..b52192d6e47b4 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedLeafFieldDataTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedLeafFieldDataTests.java @@ -23,8 +23,6 @@ import java.io.IOException; -import static org.apache.lucene.index.SortedSetDocValues.NO_MORE_ORDS; - public class KeyedFlattenedLeafFieldDataTests extends ESTestCase { private LeafOrdinalsFieldData delegate; @@ -121,7 +119,8 @@ public void testNextOrd() throws IOException { docValues.advanceExact(0); int retrievedOrds = 0; - for (long ord = docValues.nextOrd(); ord != NO_MORE_ORDS; ord = docValues.nextOrd()) { + for (int i = 0; i < docValues.docValueCount(); i++) { + long ord = docValues.nextOrd(); assertTrue(0 <= ord && ord < 10); retrievedOrds++; @@ -190,9 +189,7 @@ public boolean advanceExact(int docID) { @Override public long nextOrd() { - if (index == documentOrds.length) { - return NO_MORE_ORDS; - } + assertTrue(index < documentOrds.length); return documentOrds[index++]; } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenormalizedCosineFloatVectorValuesTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenormalizedCosineFloatVectorValuesTests.java index b2ffb779be00b..de4ab0bc5df30 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenormalizedCosineFloatVectorValuesTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenormalizedCosineFloatVectorValuesTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.index.mapper.vectors; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.NumericDocValues; import org.elasticsearch.test.ESTestCase; @@ -25,7 +26,7 @@ public void testEmptyVectors() throws IOException { wrap(new float[0][0]), wrapMagnitudes(new float[0]) ); - assertEquals(NO_MORE_DOCS, normalizedCosineFloatVectorValues.nextDoc()); + assertEquals(NO_MORE_DOCS, normalizedCosineFloatVectorValues.iterator().nextDoc()); } public void testRandomVectors() throws IOException { @@ -47,9 +48,10 @@ public void testRandomVectors() throws IOException { wrapMagnitudes(magnitudes) ); + KnnVectorValues.DocIndexIterator iterator = normalizedCosineFloatVectorValues.iterator(); for (int i = 0; i < numVectors; i++) { - assertEquals(i, normalizedCosineFloatVectorValues.advance(i)); - assertArrayEquals(vectors[i], normalizedCosineFloatVectorValues.vectorValue(), (float) 1e-6); + assertEquals(i, iterator.advance(i)); + assertArrayEquals(vectors[i], normalizedCosineFloatVectorValues.vectorValue(iterator.index()), (float) 1e-6); assertEquals(magnitudes[i], normalizedCosineFloatVectorValues.magnitude(), (float) 1e-6); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/KnnDenseVectorScriptDocValuesTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/KnnDenseVectorScriptDocValuesTests.java index c007156c806eb..baade683a90fd 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/KnnDenseVectorScriptDocValuesTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/KnnDenseVectorScriptDocValuesTests.java @@ -208,7 +208,41 @@ public int size() { } @Override - public byte[] vectorValue() { + public DocIndexIterator iterator() { + return new DocIndexIterator() { + @Override + public int index() { + return index; + } + + @Override + public int docID() { + return index; + } + + @Override + public int nextDoc() { + throw new UnsupportedOperationException(); + } + + @Override + public int advance(int target) { + if (target >= size()) { + return NO_MORE_DOCS; + } + return index = target; + } + + @Override + public long cost() { + return 0; + } + }; + } + + @Override + public byte[] vectorValue(int ord) { + assert ord == index; for (int i = 0; i < byteVector.length; i++) { byteVector[i] = (byte) vectors[index][i]; } @@ -216,25 +250,12 @@ public byte[] vectorValue() { } @Override - public int docID() { - return index; - } - - @Override - public int nextDoc() { + public ByteVectorValues copy() { throw new UnsupportedOperationException(); } @Override - public int advance(int target) { - if (target >= size()) { - return NO_MORE_DOCS; - } - return index = target; - } - - @Override - public VectorScorer scorer(byte[] floats) throws IOException { + public VectorScorer scorer(byte[] floats) { throw new UnsupportedOperationException(); } }; @@ -256,30 +277,51 @@ public int size() { } @Override - public float[] vectorValue() { - return vectors[index]; - } - - @Override - public int docID() { - return index; + public DocIndexIterator iterator() { + return new DocIndexIterator() { + @Override + public int index() { + return index; + } + + @Override + public int docID() { + return index; + } + + @Override + public int nextDoc() throws IOException { + return advance(index + 1); + } + + @Override + public int advance(int target) throws IOException { + if (target >= size()) { + return NO_MORE_DOCS; + } + return index = target; + } + + @Override + public long cost() { + return 0; + } + }; } @Override - public int nextDoc() { - return advance(index + 1); + public float[] vectorValue(int ord) { + assert ord == index; + return vectors[index]; } @Override - public int advance(int target) { - if (target >= size()) { - return NO_MORE_DOCS; - } - return index = target; + public FloatVectorValues copy() { + throw new UnsupportedOperationException(); } @Override - public VectorScorer scorer(float[] floats) throws IOException { + public VectorScorer scorer(float[] floats) { throw new UnsupportedOperationException(); } }; diff --git a/server/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java index 0fa8f70525e8a..e9ef3ac8ad748 100644 --- a/server/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java @@ -207,15 +207,15 @@ public void testMinShouldMatchFilterWithoutShouldClauses() throws Exception { assertThat(booleanQuery.getMinimumNumberShouldMatch(), equalTo(0)); assertThat(booleanQuery.clauses().size(), equalTo(1)); BooleanClause booleanClause = booleanQuery.clauses().get(0); - assertThat(booleanClause.getOccur(), equalTo(BooleanClause.Occur.FILTER)); - assertThat(booleanClause.getQuery(), instanceOf(BooleanQuery.class)); - BooleanQuery innerBooleanQuery = (BooleanQuery) booleanClause.getQuery(); + assertThat(booleanClause.occur(), equalTo(BooleanClause.Occur.FILTER)); + assertThat(booleanClause.query(), instanceOf(BooleanQuery.class)); + BooleanQuery innerBooleanQuery = (BooleanQuery) booleanClause.query(); // we didn't set minimum should match initially, there are no should clauses so it should be 0 assertThat(innerBooleanQuery.getMinimumNumberShouldMatch(), equalTo(0)); assertThat(innerBooleanQuery.clauses().size(), equalTo(1)); BooleanClause innerBooleanClause = innerBooleanQuery.clauses().get(0); - assertThat(innerBooleanClause.getOccur(), equalTo(BooleanClause.Occur.MUST)); - assertThat(innerBooleanClause.getQuery(), instanceOf(MatchAllDocsQuery.class)); + assertThat(innerBooleanClause.occur(), equalTo(BooleanClause.Occur.MUST)); + assertThat(innerBooleanClause.query(), instanceOf(MatchAllDocsQuery.class)); } public void testMinShouldMatchBiggerThanNumberOfShouldClauses() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/index/query/CombinedFieldsQueryParsingTests.java b/server/src/test/java/org/elasticsearch/index/query/CombinedFieldsQueryParsingTests.java index daaa0c4653d7a..f09e524faf8ff 100644 --- a/server/src/test/java/org/elasticsearch/index/query/CombinedFieldsQueryParsingTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/CombinedFieldsQueryParsingTests.java @@ -143,8 +143,8 @@ public void testWildcardFieldPattern() throws Exception { BooleanQuery booleanQuery = (BooleanQuery) query; assertThat(booleanQuery.clauses().size(), equalTo(2)); - assertThat(booleanQuery.clauses().get(0).getQuery(), instanceOf(CombinedFieldQuery.class)); - assertThat(booleanQuery.clauses().get(1).getQuery(), instanceOf(CombinedFieldQuery.class)); + assertThat(booleanQuery.clauses().get(0).query(), instanceOf(CombinedFieldQuery.class)); + assertThat(booleanQuery.clauses().get(1).query(), instanceOf(CombinedFieldQuery.class)); }); } @@ -164,8 +164,8 @@ public void testOperator() throws Exception { assertThat(booleanQuery.getMinimumNumberShouldMatch(), equalTo(minimumShouldMatch)); assertThat(booleanQuery.clauses().size(), equalTo(2)); - assertThat(booleanQuery.clauses().get(0).getOccur(), equalTo(occur)); - assertThat(booleanQuery.clauses().get(1).getOccur(), equalTo(occur)); + assertThat(booleanQuery.clauses().get(0).occur(), equalTo(occur)); + assertThat(booleanQuery.clauses().get(1).occur(), equalTo(occur)); } public void testQueryBoost() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/index/query/DistanceFeatureQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/DistanceFeatureQueryBuilderTests.java index 49c646d243a55..ef5088eef84a7 100644 --- a/server/src/test/java/org/elasticsearch/index/query/DistanceFeatureQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/DistanceFeatureQueryBuilderTests.java @@ -10,7 +10,7 @@ package org.elasticsearch.index.query; import org.apache.lucene.document.LatLonPoint; -import org.apache.lucene.document.LongPoint; +import org.apache.lucene.document.LongField; import org.apache.lucene.search.Query; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoUtils; @@ -81,7 +81,7 @@ protected void doAssertLuceneQuery(DistanceFeatureQueryBuilder queryBuilder, Que } else { // NANOSECONDS pivotLong = pivotVal.getNanos(); } - expectedQuery = LongPoint.newDistanceFeatureQuery(fieldName, 1.0f, originLong, pivotLong); + expectedQuery = LongField.newDistanceFeatureQuery(fieldName, 1.0f, originLong, pivotLong); } assertEquals(expectedQuery, query); } diff --git a/server/src/test/java/org/elasticsearch/index/query/ExistsQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/ExistsQueryBuilderTests.java index ba3350bca8e2c..afa8fc1529604 100644 --- a/server/src/test/java/org/elasticsearch/index/query/ExistsQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/ExistsQueryBuilderTests.java @@ -68,7 +68,7 @@ protected void doAssertLuceneQuery(ExistsQueryBuilder queryBuilder, Query query, Collection childFields = context.getMatchingFieldNames(field + ".*"); assertThat(booleanQuery.clauses().size(), equalTo(childFields.size())); for (BooleanClause booleanClause : booleanQuery) { - assertThat(booleanClause.getOccur(), equalTo(BooleanClause.Occur.SHOULD)); + assertThat(booleanClause.occur(), equalTo(BooleanClause.Occur.SHOULD)); } } else if (context.getFieldType(field).hasDocValues() || context.getFieldType(field).getTextSearchInfo().hasNorms()) { assertThat(constantScoreQuery.getQuery(), instanceOf(FieldExistsQuery.class)); @@ -87,7 +87,7 @@ protected void doAssertLuceneQuery(ExistsQueryBuilder queryBuilder, Query query, assertThat(booleanQuery.clauses().size(), equalTo(fields.size())); for (int i = 0; i < fields.size(); i++) { BooleanClause booleanClause = booleanQuery.clauses().get(i); - assertThat(booleanClause.getOccur(), equalTo(BooleanClause.Occur.SHOULD)); + assertThat(booleanClause.occur(), equalTo(BooleanClause.Occur.SHOULD)); } } } diff --git a/server/src/test/java/org/elasticsearch/index/query/MatchBoolPrefixQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/MatchBoolPrefixQueryBuilderTests.java index e471858ce9c5a..79f2dcb61e508 100644 --- a/server/src/test/java/org/elasticsearch/index/query/MatchBoolPrefixQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/MatchBoolPrefixQueryBuilderTests.java @@ -37,7 +37,6 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalToIgnoringCase; -import static org.hamcrest.Matchers.hasProperty; import static org.hamcrest.Matchers.hasSize; public class MatchBoolPrefixQueryBuilderTests extends AbstractQueryTestCase { @@ -101,7 +100,7 @@ protected void doAssertLuceneQuery(MatchBoolPrefixQueryBuilder queryBuilder, Que // all queries except the last should be TermQuery or SynonymQuery final Set allQueriesExceptLast = IntStream.range(0, booleanQuery.clauses().size() - 1) .mapToObj(booleanQuery.clauses()::get) - .map(BooleanClause::getQuery) + .map(BooleanClause::query) .collect(Collectors.toSet()); assertThat( allQueriesExceptLast, @@ -122,13 +121,13 @@ protected void doAssertLuceneQuery(MatchBoolPrefixQueryBuilder queryBuilder, Que }); // the last query should be PrefixQuery - final Query shouldBePrefixQuery = booleanQuery.clauses().get(booleanQuery.clauses().size() - 1).getQuery(); + final Query shouldBePrefixQuery = booleanQuery.clauses().get(booleanQuery.clauses().size() - 1).query(); assertThat(shouldBePrefixQuery, instanceOf(PrefixQuery.class)); if (queryBuilder.minimumShouldMatch() != null) { final int optionalClauses = (int) booleanQuery.clauses() .stream() - .filter(clause -> clause.getOccur() == BooleanClause.Occur.SHOULD) + .filter(clause -> clause.occur() == BooleanClause.Occur.SHOULD) .count(); final int expected = Queries.calculateMinShouldMatch(optionalClauses, queryBuilder.minimumShouldMatch()); assertThat(booleanQuery.getMinimumNumberShouldMatch(), equalTo(expected)); @@ -266,10 +265,12 @@ private static void assertBooleanQuery(Query actual, List expectedClauseQ assertThat(actual, instanceOf(BooleanQuery.class)); final BooleanQuery actualBooleanQuery = (BooleanQuery) actual; assertThat(actualBooleanQuery.clauses(), hasSize(expectedClauseQueries.size())); - assertThat(actualBooleanQuery.clauses(), everyItem(hasProperty("occur", equalTo(BooleanClause.Occur.SHOULD)))); for (int i = 0; i < actualBooleanQuery.clauses().size(); i++) { - final Query clauseQuery = actualBooleanQuery.clauses().get(i).getQuery(); + BooleanClause clause = actualBooleanQuery.clauses().get(i); + assertEquals(BooleanClause.Occur.SHOULD, clause.occur()); + final Query clauseQuery = clause.query(); + assertThat(clauseQuery, equalTo(expectedClauseQueries.get(i))); } } diff --git a/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java index e71485647913c..ba46bf76efbfe 100644 --- a/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java @@ -18,6 +18,7 @@ import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.FuzzyQuery; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.PhraseQuery; @@ -163,7 +164,7 @@ protected void doAssertLuceneQuery(MatchQueryBuilder queryBuilder, Query query, // calculate expected minimumShouldMatch value int optionalClauses = 0; for (BooleanClause c : bq.clauses()) { - if (c.getOccur() == BooleanClause.Occur.SHOULD) { + if (c.occur() == BooleanClause.Occur.SHOULD) { optionalClauses++; } } @@ -527,9 +528,9 @@ public void testAliasWithSynonyms() throws Exception { public void testMaxBooleanClause() { MatchQueryParser query = new MatchQueryParser(createSearchExecutionContext()); query.setAnalyzer(new MockGraphAnalyzer(createGiantGraph(40))); - expectThrows(BooleanQuery.TooManyClauses.class, () -> query.parse(Type.PHRASE, TEXT_FIELD_NAME, "")); + expectThrows(IndexSearcher.TooManyClauses.class, () -> query.parse(Type.PHRASE, TEXT_FIELD_NAME, "")); query.setAnalyzer(new MockGraphAnalyzer(createGiantGraphMultiTerms())); - expectThrows(BooleanQuery.TooManyClauses.class, () -> query.parse(Type.PHRASE, TEXT_FIELD_NAME, "")); + expectThrows(IndexSearcher.TooManyClauses.class, () -> query.parse(Type.PHRASE, TEXT_FIELD_NAME, "")); } private static class MockGraphAnalyzer extends Analyzer { @@ -567,7 +568,7 @@ private static CannedBinaryTokenStream.BinaryToken[] createGiantGraph(int numPos } /** - * Creates a graph token stream with {@link BooleanQuery#getMaxClauseCount()} + * Creates a graph token stream with {@link IndexSearcher#getMaxClauseCount()} * expansions at the last position. **/ private static CannedBinaryTokenStream.BinaryToken[] createGiantGraphMultiTerms() { @@ -578,7 +579,7 @@ private static CannedBinaryTokenStream.BinaryToken[] createGiantGraphMultiTerms( tokens.add(new CannedBinaryTokenStream.BinaryToken(term1, 0, 2)); tokens.add(new CannedBinaryTokenStream.BinaryToken(term2, 1, 1)); tokens.add(new CannedBinaryTokenStream.BinaryToken(term2, 1, 1)); - for (int i = 0; i < BooleanQuery.getMaxClauseCount(); i++) { + for (int i = 0; i < IndexSearcher.getMaxClauseCount(); i++) { tokens.add(new CannedBinaryTokenStream.BinaryToken(term1, 0, 1)); } return tokens.toArray(new CannedBinaryTokenStream.BinaryToken[0]); diff --git a/server/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java index 7209ee77cb70d..7c21751b4b332 100644 --- a/server/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java @@ -246,7 +246,7 @@ private static Fields generateFields(String[] fieldNames, String text) throws IO for (String fieldName : fieldNames) { index.addField(fieldName, text, new WhitespaceAnalyzer()); } - return index.createSearcher().getIndexReader().getTermVectors(0); + return index.createSearcher().getIndexReader().termVectors().get(0); } @Override @@ -255,7 +255,7 @@ protected void doAssertLuceneQuery(MoreLikeThisQueryBuilder queryBuilder, Query assertThat(query, instanceOf(BooleanQuery.class)); BooleanQuery booleanQuery = (BooleanQuery) query; for (BooleanClause booleanClause : booleanQuery) { - if (booleanClause.getQuery() instanceof MoreLikeThisQuery moreLikeThisQuery) { + if (booleanClause.query() instanceof MoreLikeThisQuery moreLikeThisQuery) { assertThat(moreLikeThisQuery.getLikeFields().length, greaterThan(0)); } } diff --git a/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java index f4405229e857e..3dcf00e4f22f5 100644 --- a/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java @@ -749,7 +749,7 @@ public void testToQueryRegExpQueryTooComplex() throws Exception { TooComplexToDeterminizeException.class, () -> queryBuilder.toQuery(createSearchExecutionContext()) ); - assertThat(e.getMessage(), containsString("Determinizing [ac]*")); + assertThat(e.getMessage(), containsString("Determinizing automaton")); assertThat(e.getMessage(), containsString("would require more than 10000 effort.")); } @@ -775,7 +775,7 @@ public void testToQueryRegExpQueryMaxDeterminizedStatesParsing() throws Exceptio TooComplexToDeterminizeException.class, () -> queryBuilder.toQuery(createSearchExecutionContext()) ); - assertThat(e.getMessage(), containsString("Determinizing [ac]*")); + assertThat(e.getMessage(), containsString("Determinizing automaton")); assertThat(e.getMessage(), containsString("would require more than 10 effort.")); } @@ -925,10 +925,10 @@ public void testToQueryBooleanQueryMultipleBoosts() throws Exception { assertThat(query, instanceOf(BooleanQuery.class)); BooleanQuery booleanQuery = (BooleanQuery) query; assertThat(booleanQuery.getMinimumNumberShouldMatch(), equalTo(2)); - assertThat(booleanQuery.clauses().get(0).getOccur(), equalTo(BooleanClause.Occur.SHOULD)); - assertThat(booleanQuery.clauses().get(0).getQuery(), equalTo(new TermQuery(new Term(TEXT_FIELD_NAME, "foo")))); - assertThat(booleanQuery.clauses().get(1).getOccur(), equalTo(BooleanClause.Occur.SHOULD)); - assertThat(booleanQuery.clauses().get(1).getQuery(), equalTo(new TermQuery(new Term(TEXT_FIELD_NAME, "bar")))); + assertThat(booleanQuery.clauses().get(0).occur(), equalTo(BooleanClause.Occur.SHOULD)); + assertThat(booleanQuery.clauses().get(0).query(), equalTo(new TermQuery(new Term(TEXT_FIELD_NAME, "foo")))); + assertThat(booleanQuery.clauses().get(1).occur(), equalTo(BooleanClause.Occur.SHOULD)); + assertThat(booleanQuery.clauses().get(1).query(), equalTo(new TermQuery(new Term(TEXT_FIELD_NAME, "bar")))); } public void testToQueryPhraseQueryBoostAndSlop() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java index 79c4c9ec5bb20..a84cd60a99e45 100644 --- a/server/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java @@ -315,7 +315,7 @@ protected void doAssertLuceneQuery(SimpleQueryStringBuilder queryBuilder, Query private static int shouldClauses(BooleanQuery query) { int result = 0; for (BooleanClause c : query.clauses()) { - if (c.getOccur() == BooleanClause.Occur.SHOULD) { + if (c.occur() == BooleanClause.Occur.SHOULD) { result++; } } diff --git a/server/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java index cb314472e35b3..283bbbc9b100d 100644 --- a/server/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java @@ -18,7 +18,6 @@ import org.apache.lucene.queries.spans.SpanMultiTermQueryWrapper; import org.apache.lucene.queries.spans.SpanQuery; import org.apache.lucene.queries.spans.SpanTermQuery; -import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MultiTermQuery; @@ -220,7 +219,7 @@ public void testToQueryInnerTermQuery() throws IOException { assertThat(prefixQuery.getPrefix().text(), equalTo("f")); assertThat(wrapper.getRewriteMethod(), instanceOf(SpanBooleanQueryRewriteWithMaxClause.class)); SpanBooleanQueryRewriteWithMaxClause rewrite = (SpanBooleanQueryRewriteWithMaxClause) wrapper.getRewriteMethod(); - assertThat(rewrite.getMaxExpansions(), equalTo(BooleanQuery.getMaxClauseCount())); + assertThat(rewrite.getMaxExpansions(), equalTo(IndexSearcher.getMaxClauseCount())); assertTrue(rewrite.isHardLimit()); } } @@ -265,8 +264,8 @@ public void testTermExpansionExceptionOnSpanFailure() throws Exception { iw.addDocument(singleton(new TextField("body", "foo bar" + Integer.toString(i), Field.Store.NO))); } try (IndexReader reader = iw.getReader()) { - int origBoolMaxClauseCount = BooleanQuery.getMaxClauseCount(); - BooleanQuery.setMaxClauseCount(1); + int origBoolMaxClauseCount = IndexSearcher.getMaxClauseCount(); + IndexSearcher.setMaxClauseCount(1); try { QueryBuilder queryBuilder = new SpanMultiTermQueryBuilder(QueryBuilders.prefixQuery("body", "bar")); IndexSearcher searcher = newSearcher(reader); @@ -274,7 +273,7 @@ public void testTermExpansionExceptionOnSpanFailure() throws Exception { RuntimeException exc = expectThrows(RuntimeException.class, () -> query.rewrite(searcher)); assertThat(exc.getMessage(), containsString("maxClauseCount")); } finally { - BooleanQuery.setMaxClauseCount(origBoolMaxClauseCount); + IndexSearcher.setMaxClauseCount(origBoolMaxClauseCount); } } } diff --git a/server/src/test/java/org/elasticsearch/index/query/TermsSetQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/TermsSetQueryBuilderTests.java index 3edf150688384..589019093075d 100644 --- a/server/src/test/java/org/elasticsearch/index/query/TermsSetQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/TermsSetQueryBuilderTests.java @@ -238,7 +238,7 @@ public void testDoToQuery() throws Exception { 10, new Sort(SortField.FIELD_DOC) ); - assertThat(topDocsWithMinimumShouldMatchField.totalHits.value, equalTo(3L)); + assertThat(topDocsWithMinimumShouldMatchField.totalHits.value(), equalTo(3L)); assertThat(topDocsWithMinimumShouldMatchField.scoreDocs[0].doc, equalTo(1)); assertThat(topDocsWithMinimumShouldMatchField.scoreDocs[1].doc, equalTo(3)); assertThat(topDocsWithMinimumShouldMatchField.scoreDocs[2].doc, equalTo(4)); @@ -249,7 +249,7 @@ public void testDoToQuery() throws Exception { ).doToQuery(context); searcher = newSearcher(ir); TopDocs topDocsWithMinimumShouldMatch = searcher.search(queryWithMinimumShouldMatch, 10, new Sort(SortField.FIELD_DOC)); - assertThat(topDocsWithMinimumShouldMatch.totalHits.value, equalTo(5L)); + assertThat(topDocsWithMinimumShouldMatch.totalHits.value(), equalTo(5L)); assertThat(topDocsWithMinimumShouldMatch.scoreDocs[0].doc, equalTo(1)); assertThat(topDocsWithMinimumShouldMatch.scoreDocs[1].doc, equalTo(2)); assertThat(topDocsWithMinimumShouldMatch.scoreDocs[2].doc, equalTo(3)); @@ -266,7 +266,7 @@ public void testDoToQuery() throws Exception { 10, new Sort(SortField.FIELD_DOC) ); - assertThat(topDocsWithMinimumShouldMatchNegative.totalHits.value, equalTo(1L)); + assertThat(topDocsWithMinimumShouldMatchNegative.totalHits.value(), equalTo(1L)); assertThat(topDocsWithMinimumShouldMatchNegative.scoreDocs[0].doc, equalTo(5)); } } @@ -310,7 +310,7 @@ public void testDoToQuery_msmScriptField() throws Exception { .doToQuery(context); IndexSearcher searcher = newSearcher(ir); TopDocs topDocs = searcher.search(query, 10, new Sort(SortField.FIELD_DOC)); - assertThat(topDocs.totalHits.value, equalTo(3L)); + assertThat(topDocs.totalHits.value(), equalTo(3L)); assertThat(topDocs.scoreDocs[0].doc, equalTo(0)); assertThat(topDocs.scoreDocs[1].doc, equalTo(2)); assertThat(topDocs.scoreDocs[2].doc, equalTo(4)); diff --git a/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java b/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java index 47c75ee38ee1b..49b1362436ec7 100644 --- a/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java +++ b/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java @@ -305,7 +305,7 @@ public void testConflictingOpsOnReplica() throws Exception { for (IndexShard shard : shards) { try (Engine.Searcher searcher = shard.acquireSearcher("test")) { TopDocs search = searcher.search(new TermQuery(new Term("f", "2")), 10); - assertEquals("shard " + shard.routingEntry() + " misses new version", 1, search.totalHits.value); + assertEquals("shard " + shard.routingEntry() + " misses new version", 1, search.totalHits.value()); } } } diff --git a/server/src/test/java/org/elasticsearch/index/search/nested/AbstractNumberNestedSortingTestCase.java b/server/src/test/java/org/elasticsearch/index/search/nested/AbstractNumberNestedSortingTestCase.java index 5308f5f5d1f04..e8652e3a0f6d6 100644 --- a/server/src/test/java/org/elasticsearch/index/search/nested/AbstractNumberNestedSortingTestCase.java +++ b/server/src/test/java/org/elasticsearch/index/search/nested/AbstractNumberNestedSortingTestCase.java @@ -225,7 +225,7 @@ public void testNestedSorting() throws Exception { Sort sort = new Sort(new SortField("field2", nestedComparatorSource)); TopFieldDocs topDocs = searcher.search(query, 5, sort); - assertThat(topDocs.totalHits.value, equalTo(7L)); + assertThat(topDocs.totalHits.value(), equalTo(7L)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(11)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(7)); @@ -240,7 +240,7 @@ public void testNestedSorting() throws Exception { sort = new Sort(new SortField("field2", nestedComparatorSource, true)); topDocs = searcher.search(query, 5, sort); - assertThat(topDocs.totalHits.value, equalTo(7L)); + assertThat(topDocs.totalHits.value(), equalTo(7L)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(28)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(13)); @@ -258,7 +258,7 @@ public void testNestedSorting() throws Exception { query = new ToParentBlockJoinQuery(new ConstantScoreQuery(childFilter), new QueryBitSetProducer(parentFilter), ScoreMode.None); sort = new Sort(new SortField("field2", nestedComparatorSource, true)); topDocs = searcher.search(query, 5, sort); - assertThat(topDocs.totalHits.value, equalTo(6L)); + assertThat(topDocs.totalHits.value(), equalTo(6L)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(23)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(12)); @@ -273,7 +273,7 @@ public void testNestedSorting() throws Exception { sort = new Sort(new SortField("field2", nestedComparatorSource)); topDocs = searcher.search(query, 5, sort); - assertThat(topDocs.totalHits.value, equalTo(6L)); + assertThat(topDocs.totalHits.value(), equalTo(6L)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(15)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(3)); @@ -289,7 +289,7 @@ public void testNestedSorting() throws Exception { nestedComparatorSource = createFieldComparator("field2", sortMode, 127, createNested(searcher, parentFilter, childFilter)); sort = new Sort(new SortField("field2", nestedComparatorSource, true)); topDocs = searcher.search(new TermQuery(new Term("__type", "parent")), 5, sort); - assertThat(topDocs.totalHits.value, equalTo(8L)); + assertThat(topDocs.totalHits.value(), equalTo(8L)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(19)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(127)); @@ -305,7 +305,7 @@ public void testNestedSorting() throws Exception { nestedComparatorSource = createFieldComparator("field2", sortMode, -127, createNested(searcher, parentFilter, childFilter)); sort = new Sort(new SortField("field2", nestedComparatorSource)); topDocs = searcher.search(new TermQuery(new Term("__type", "parent")), 5, sort); - assertThat(topDocs.totalHits.value, equalTo(8L)); + assertThat(topDocs.totalHits.value(), equalTo(8L)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(19)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(-127)); @@ -340,7 +340,7 @@ protected void assertAvgScoreMode(Query parentFilter, IndexSearcher searcher) th ); Sort sort = new Sort(new SortField("field2", nestedComparatorSource)); TopDocs topDocs = searcher.search(query, 5, sort); - assertThat(topDocs.totalHits.value, equalTo(7L)); + assertThat(topDocs.totalHits.value(), equalTo(7L)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(11)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(2)); diff --git a/server/src/test/java/org/elasticsearch/index/search/nested/DoubleNestedSortingTests.java b/server/src/test/java/org/elasticsearch/index/search/nested/DoubleNestedSortingTests.java index cecf20360178c..ca176a5402c06 100644 --- a/server/src/test/java/org/elasticsearch/index/search/nested/DoubleNestedSortingTests.java +++ b/server/src/test/java/org/elasticsearch/index/search/nested/DoubleNestedSortingTests.java @@ -73,7 +73,7 @@ protected void assertAvgScoreMode(Query parentFilter, IndexSearcher searcher) th ); Sort sort = new Sort(new SortField("field2", nestedComparatorSource)); TopDocs topDocs = searcher.search(query, 5, sort); - assertThat(topDocs.totalHits.value, equalTo(7L)); + assertThat(topDocs.totalHits.value(), equalTo(7L)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(11)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(2)); diff --git a/server/src/test/java/org/elasticsearch/index/search/nested/FloatNestedSortingTests.java b/server/src/test/java/org/elasticsearch/index/search/nested/FloatNestedSortingTests.java index 49d9c0eedd121..60e7473a2101a 100644 --- a/server/src/test/java/org/elasticsearch/index/search/nested/FloatNestedSortingTests.java +++ b/server/src/test/java/org/elasticsearch/index/search/nested/FloatNestedSortingTests.java @@ -76,7 +76,7 @@ protected void assertAvgScoreMode( ); Sort sort = new Sort(new SortField("field2", nestedComparatorSource)); TopDocs topDocs = searcher.search(query, 5, sort); - assertThat(topDocs.totalHits.value, equalTo(7L)); + assertThat(topDocs.totalHits.value(), equalTo(7L)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(11)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(2)); diff --git a/server/src/test/java/org/elasticsearch/index/search/nested/NestedSortingTests.java b/server/src/test/java/org/elasticsearch/index/search/nested/NestedSortingTests.java index e088e8569bf8a..cd6f596cfda05 100644 --- a/server/src/test/java/org/elasticsearch/index/search/nested/NestedSortingTests.java +++ b/server/src/test/java/org/elasticsearch/index/search/nested/NestedSortingTests.java @@ -17,6 +17,7 @@ import org.apache.lucene.document.StringField; import org.apache.lucene.document.TextField; import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; @@ -310,7 +311,7 @@ public void testNestedSorting() throws Exception { Sort sort = new Sort(new SortField("field2", nestedComparatorSource)); TopFieldDocs topDocs = searcher.search(query, 5, sort); - assertThat(topDocs.totalHits.value, equalTo(7L)); + assertThat(topDocs.totalHits.value(), equalTo(7L)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(3)); assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).utf8ToString(), equalTo("a")); @@ -332,7 +333,7 @@ public void testNestedSorting() throws Exception { ); sort = new Sort(new SortField("field2", nestedComparatorSource, true)); topDocs = searcher.search(query, 5, sort); - assertThat(topDocs.totalHits.value, equalTo(7L)); + assertThat(topDocs.totalHits.value(), equalTo(7L)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(28)); assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).utf8ToString(), equalTo("o")); @@ -358,7 +359,7 @@ public void testNestedSorting() throws Exception { query = new ToParentBlockJoinQuery(new ConstantScoreQuery(childFilter), new QueryBitSetProducer(parentFilter), ScoreMode.None); sort = new Sort(new SortField("field2", nestedComparatorSource, true)); topDocs = searcher.search(query, 5, sort); - assertThat(topDocs.totalHits.value, equalTo(6L)); + assertThat(topDocs.totalHits.value(), equalTo(6L)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(23)); assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).utf8ToString(), equalTo("m")); @@ -620,42 +621,43 @@ public void testMultiLevelNestedSorting() throws IOException { sortBuilder.setNestedSort(new NestedSortBuilder("chapters").setNestedSort(new NestedSortBuilder("chapters.paragraphs"))); QueryBuilder queryBuilder = new MatchAllQueryBuilder(); TopFieldDocs topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); - assertThat(topFields.totalHits.value, equalTo(5L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("2")); + assertThat(topFields.totalHits.value(), equalTo(5L)); + StoredFields storedFields = searcher.storedFields(); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("2")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(76L)); - assertThat(searcher.doc(topFields.scoreDocs[1].doc).get("_id"), equalTo("4")); + assertThat(storedFields.document(topFields.scoreDocs[1].doc).get("_id"), equalTo("4")); assertThat(((FieldDoc) topFields.scoreDocs[1]).fields[0], equalTo(87L)); - assertThat(searcher.doc(topFields.scoreDocs[2].doc).get("_id"), equalTo("1")); + assertThat(storedFields.document(topFields.scoreDocs[2].doc).get("_id"), equalTo("1")); assertThat(((FieldDoc) topFields.scoreDocs[2]).fields[0], equalTo(234L)); - assertThat(searcher.doc(topFields.scoreDocs[3].doc).get("_id"), equalTo("3")); + assertThat(storedFields.document(topFields.scoreDocs[3].doc).get("_id"), equalTo("3")); assertThat(((FieldDoc) topFields.scoreDocs[3]).fields[0], equalTo(976L)); - assertThat(searcher.doc(topFields.scoreDocs[4].doc).get("_id"), equalTo("5")); + assertThat(storedFields.document(topFields.scoreDocs[4].doc).get("_id"), equalTo("5")); assertThat(((FieldDoc) topFields.scoreDocs[4]).fields[0], equalTo(Long.MAX_VALUE)); // Specific genre { queryBuilder = new TermQueryBuilder("genre", "romance"); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); - assertThat(topFields.totalHits.value, equalTo(1L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("2")); + assertThat(topFields.totalHits.value(), equalTo(1L)); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("2")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(76L)); queryBuilder = new TermQueryBuilder("genre", "science fiction"); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); - assertThat(topFields.totalHits.value, equalTo(1L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("1")); + assertThat(topFields.totalHits.value(), equalTo(1L)); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("1")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(234L)); queryBuilder = new TermQueryBuilder("genre", "horror"); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); - assertThat(topFields.totalHits.value, equalTo(1L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("3")); + assertThat(topFields.totalHits.value(), equalTo(1L)); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("3")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(976L)); queryBuilder = new TermQueryBuilder("genre", "cooking"); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); - assertThat(topFields.totalHits.value, equalTo(1L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); + assertThat(topFields.totalHits.value(), equalTo(1L)); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(87L)); } @@ -664,16 +666,16 @@ public void testMultiLevelNestedSorting() throws IOException { sortBuilder.order(SortOrder.DESC); queryBuilder = new MatchAllQueryBuilder(); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); - assertThat(topFields.totalHits.value, equalTo(5L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("3")); + assertThat(topFields.totalHits.value(), equalTo(5L)); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("3")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(976L)); - assertThat(searcher.doc(topFields.scoreDocs[1].doc).get("_id"), equalTo("1")); + assertThat(storedFields.document(topFields.scoreDocs[1].doc).get("_id"), equalTo("1")); assertThat(((FieldDoc) topFields.scoreDocs[1]).fields[0], equalTo(849L)); - assertThat(searcher.doc(topFields.scoreDocs[2].doc).get("_id"), equalTo("4")); + assertThat(storedFields.document(topFields.scoreDocs[2].doc).get("_id"), equalTo("4")); assertThat(((FieldDoc) topFields.scoreDocs[2]).fields[0], equalTo(180L)); - assertThat(searcher.doc(topFields.scoreDocs[3].doc).get("_id"), equalTo("2")); + assertThat(storedFields.document(topFields.scoreDocs[3].doc).get("_id"), equalTo("2")); assertThat(((FieldDoc) topFields.scoreDocs[3]).fields[0], equalTo(76L)); - assertThat(searcher.doc(topFields.scoreDocs[4].doc).get("_id"), equalTo("5")); + assertThat(storedFields.document(topFields.scoreDocs[4].doc).get("_id"), equalTo("5")); assertThat(((FieldDoc) topFields.scoreDocs[4]).fields[0], equalTo(Long.MIN_VALUE)); } @@ -681,26 +683,26 @@ public void testMultiLevelNestedSorting() throws IOException { { queryBuilder = new TermQueryBuilder("genre", "romance"); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); - assertThat(topFields.totalHits.value, equalTo(1L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("2")); + assertThat(topFields.totalHits.value(), equalTo(1L)); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("2")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(76L)); queryBuilder = new TermQueryBuilder("genre", "science fiction"); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); - assertThat(topFields.totalHits.value, equalTo(1L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("1")); + assertThat(topFields.totalHits.value(), equalTo(1L)); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("1")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(849L)); queryBuilder = new TermQueryBuilder("genre", "horror"); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); - assertThat(topFields.totalHits.value, equalTo(1L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("3")); + assertThat(topFields.totalHits.value(), equalTo(1L)); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("3")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(976L)); queryBuilder = new TermQueryBuilder("genre", "cooking"); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); - assertThat(topFields.totalHits.value, equalTo(1L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); + assertThat(topFields.totalHits.value(), equalTo(1L)); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(180L)); } @@ -717,10 +719,10 @@ public void testMultiLevelNestedSorting() throws IOException { searchExecutionContext, searcher ); - assertThat(topFields.totalHits.value, equalTo(2L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("2")); + assertThat(topFields.totalHits.value(), equalTo(2L)); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("2")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(76L)); - assertThat(searcher.doc(topFields.scoreDocs[1].doc).get("_id"), equalTo("4")); + assertThat(storedFields.document(topFields.scoreDocs[1].doc).get("_id"), equalTo("4")); assertThat(((FieldDoc) topFields.scoreDocs[1]).fields[0], equalTo(87L)); sortBuilder.order(SortOrder.DESC); @@ -730,10 +732,10 @@ public void testMultiLevelNestedSorting() throws IOException { searchExecutionContext, searcher ); - assertThat(topFields.totalHits.value, equalTo(2L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); + assertThat(topFields.totalHits.value(), equalTo(2L)); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(87L)); - assertThat(searcher.doc(topFields.scoreDocs[1].doc).get("_id"), equalTo("2")); + assertThat(storedFields.document(topFields.scoreDocs[1].doc).get("_id"), equalTo("2")); assertThat(((FieldDoc) topFields.scoreDocs[1]).fields[0], equalTo(76L)); } @@ -755,10 +757,10 @@ public void testMultiLevelNestedSorting() throws IOException { searchExecutionContext, searcher ); - assertThat(topFields.totalHits.value, equalTo(2L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); + assertThat(topFields.totalHits.value(), equalTo(2L)); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(87L)); - assertThat(searcher.doc(topFields.scoreDocs[1].doc).get("_id"), equalTo("2")); + assertThat(storedFields.document(topFields.scoreDocs[1].doc).get("_id"), equalTo("2")); assertThat(((FieldDoc) topFields.scoreDocs[1]).fields[0], equalTo(Long.MAX_VALUE)); sortBuilder.order(SortOrder.DESC); @@ -768,10 +770,10 @@ public void testMultiLevelNestedSorting() throws IOException { searchExecutionContext, searcher ); - assertThat(topFields.totalHits.value, equalTo(2L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); + assertThat(topFields.totalHits.value(), equalTo(2L)); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(87L)); - assertThat(searcher.doc(topFields.scoreDocs[1].doc).get("_id"), equalTo("2")); + assertThat(storedFields.document(topFields.scoreDocs[1].doc).get("_id"), equalTo("2")); assertThat(((FieldDoc) topFields.scoreDocs[1]).fields[0], equalTo(Long.MIN_VALUE)); } @@ -785,26 +787,26 @@ public void testMultiLevelNestedSorting() throws IOException { queryBuilder = new TermQueryBuilder("genre", "romance"); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); - assertThat(topFields.totalHits.value, equalTo(1L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("2")); + assertThat(topFields.totalHits.value(), equalTo(1L)); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("2")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(76L)); queryBuilder = new TermQueryBuilder("genre", "science fiction"); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); - assertThat(topFields.totalHits.value, equalTo(1L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("1")); + assertThat(topFields.totalHits.value(), equalTo(1L)); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("1")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(Long.MAX_VALUE)); queryBuilder = new TermQueryBuilder("genre", "horror"); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); - assertThat(topFields.totalHits.value, equalTo(1L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("3")); + assertThat(topFields.totalHits.value(), equalTo(1L)); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("3")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(Long.MAX_VALUE)); queryBuilder = new TermQueryBuilder("genre", "cooking"); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); - assertThat(topFields.totalHits.value, equalTo(1L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); + assertThat(topFields.totalHits.value(), equalTo(1L)); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(87L)); } diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexReaderWrapperTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexReaderWrapperTests.java index 8355f0156d0c3..7c7313d6b6516 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexReaderWrapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexReaderWrapperTests.java @@ -52,7 +52,7 @@ public void testReaderCloseListenerIsCalled() throws IOException { writer.addDocument(doc); DirectoryReader open = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "_na_", 1)); IndexSearcher searcher = newSearcher(open); - assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits.value); + assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits.value()); final AtomicInteger closeCalls = new AtomicInteger(0); CheckedFunction wrapper = reader -> new FieldMaskingReader( "field", @@ -82,7 +82,7 @@ public void testReaderCloseListenerIsCalled() throws IOException { } outerCount.incrementAndGet(); }); - assertEquals(0, wrap.search(new TermQuery(new Term("field", "doc")), 1).totalHits.value); + assertEquals(0, wrap.search(new TermQuery(new Term("field", "doc")), 1).totalHits.value()); wrap.close(); assertFalse("wrapped reader is closed", wrap.getIndexReader().tryIncRef()); assertEquals(sourceRefCount, open.getRefCount()); @@ -106,7 +106,7 @@ public void testIsCacheable() throws IOException { writer.addDocument(doc); DirectoryReader open = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "_na_", 1)); IndexSearcher searcher = newSearcher(open); - assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits.value); + assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits.value()); searcher.setSimilarity(iwc.getSimilarity()); final AtomicInteger closeCalls = new AtomicInteger(0); CheckedFunction wrapper = reader -> new FieldMaskingReader( @@ -153,7 +153,7 @@ public void testAlwaysWrapWithFieldUsageTrackingDirectoryReader() throws IOExcep writer.addDocument(doc); DirectoryReader open = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "_na_", 1)); IndexSearcher searcher = newSearcher(open); - assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits.value); + assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits.value()); searcher.setSimilarity(iwc.getSimilarity()); CheckedFunction wrapper = directoryReader -> directoryReader; try ( diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index f15506676dc39..d480f7bfc8d7f 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -2793,9 +2793,9 @@ public void testReaderWrapperIsUsed() throws IOException { } try (Engine.Searcher searcher = shard.acquireSearcher("test")) { TopDocs search = searcher.search(new TermQuery(new Term("foo", "bar")), 10); - assertEquals(search.totalHits.value, 1); + assertEquals(search.totalHits.value(), 1); search = searcher.search(new TermQuery(new Term("foobar", "bar")), 10); - assertEquals(search.totalHits.value, 1); + assertEquals(search.totalHits.value(), 1); } CheckedFunction wrapper = reader -> new FieldMaskingReader("foo", reader); closeShards(shard); @@ -2815,9 +2815,9 @@ public void testReaderWrapperIsUsed() throws IOException { try (Engine.Searcher searcher = newShard.acquireSearcher("test")) { TopDocs search = searcher.search(new TermQuery(new Term("foo", "bar")), 10); - assertEquals(search.totalHits.value, 0); + assertEquals(search.totalHits.value(), 0); search = searcher.search(new TermQuery(new Term("foobar", "bar")), 10); - assertEquals(search.totalHits.value, 1); + assertEquals(search.totalHits.value(), 1); } try (Engine.GetResult getResult = newShard.get(new Engine.Get(false, false, "1"))) { assertTrue(getResult.exists()); diff --git a/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java b/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java index 6fa405c091da1..ccf0bbebcc354 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java @@ -437,9 +437,8 @@ public void testLotsOfThreads() throws Exception { ) { assertTrue("document not found", getResult.exists()); assertEquals(iteration, getResult.version()); - org.apache.lucene.document.Document document = getResult.docIdAndVersion().reader.document( - getResult.docIdAndVersion().docId - ); + org.apache.lucene.document.Document document = getResult.docIdAndVersion().reader.storedFields() + .document(getResult.docIdAndVersion().docId); assertThat(document.getValues("test"), arrayContaining(testFieldValue)); } } catch (Exception t) { diff --git a/server/src/test/java/org/elasticsearch/index/shard/ShardSplittingQueryTests.java b/server/src/test/java/org/elasticsearch/index/shard/ShardSplittingQueryTests.java index 70e5143552235..aa89f31757ef4 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/ShardSplittingQueryTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/ShardSplittingQueryTests.java @@ -16,6 +16,7 @@ import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.ScoreMode; @@ -172,6 +173,7 @@ void assertSplit(Directory dir, IndexMetadata metadata, int targetShardId, boole int doc; int numActual = 0; int lastDoc = 0; + StoredFields storedFields = reader.storedFields(); while ((doc = iterator.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) { lastDoc = doc; while (shard_id.nextDoc() < doc) { @@ -181,7 +183,7 @@ void assertSplit(Directory dir, IndexMetadata metadata, int targetShardId, boole } assertEquals(shard_id.docID(), doc); long shardID = shard_id.nextValue(); - BytesRef id = reader.document(doc).getBinaryValue("_id"); + BytesRef id = storedFields.document(doc).getBinaryValue("_id"); String actualId = Uid.decodeId(id.bytes, id.offset, id.length); assertNotEquals(ctx.reader() + " docID: " + doc + " actualID: " + actualId, shardID, targetShardId); } diff --git a/server/src/test/java/org/elasticsearch/index/similarity/ScriptedSimilarityTests.java b/server/src/test/java/org/elasticsearch/index/similarity/ScriptedSimilarityTests.java index b05f287e628a3..fa5f713dfd672 100644 --- a/server/src/test/java/org/elasticsearch/index/similarity/ScriptedSimilarityTests.java +++ b/server/src/test/java/org/elasticsearch/index/similarity/ScriptedSimilarityTests.java @@ -144,7 +144,7 @@ public double execute( 3.2f ); TopDocs topDocs = searcher.search(query, 1); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); assertTrue(called.get()); assertEquals(42, topDocs.scoreDocs[0].score, 0); r.close(); @@ -236,7 +236,7 @@ public double execute( searcher.setSimilarity(sim); Query query = new BoostQuery(new TermQuery(new Term("f", "foo")), 3.2f); TopDocs topDocs = searcher.search(query, 1); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); assertTrue(initCalled.get()); assertTrue(called.get()); assertEquals(42, topDocs.scoreDocs[0].score, 0); diff --git a/server/src/test/java/org/elasticsearch/index/store/FsDirectoryFactoryTests.java b/server/src/test/java/org/elasticsearch/index/store/FsDirectoryFactoryTests.java index 394ce35c6b493..38e6ca0be0647 100644 --- a/server/src/test/java/org/elasticsearch/index/store/FsDirectoryFactoryTests.java +++ b/server/src/test/java/org/elasticsearch/index/store/FsDirectoryFactoryTests.java @@ -33,10 +33,16 @@ import java.nio.file.Files; import java.nio.file.Path; import java.util.Arrays; +import java.util.HashMap; import java.util.Locale; +import java.util.Map; +import java.util.Set; +import java.util.function.BiPredicate; public class FsDirectoryFactoryTests extends ESTestCase { + final PreLoadExposingFsDirectoryFactory fsDirectoryFactory = new PreLoadExposingFsDirectoryFactory(); + public void testPreload() throws IOException { doTestPreload(); doTestPreload("nvd", "dvd", "tim"); @@ -60,10 +66,11 @@ public void testPreload() throws IOException { assertTrue(FsDirectoryFactory.HybridDirectory.useDelegate("foo.tmp", newIOContext(random()))); assertTrue(FsDirectoryFactory.HybridDirectory.useDelegate("foo.fdt__0.tmp", newIOContext(random()))); MMapDirectory delegate = hybridDirectory.getDelegate(); - assertThat(delegate, Matchers.instanceOf(FsDirectoryFactory.PreLoadMMapDirectory.class)); - FsDirectoryFactory.PreLoadMMapDirectory preLoadMMapDirectory = (FsDirectoryFactory.PreLoadMMapDirectory) delegate; - assertTrue(preLoadMMapDirectory.useDelegate("foo.dvd")); - assertTrue(preLoadMMapDirectory.useDelegate("foo.tmp")); + assertThat(delegate, Matchers.instanceOf(MMapDirectory.class)); + var func = fsDirectoryFactory.preLoadFuncMap.get(delegate); + assertTrue(func.test("foo.dvd", newIOContext(random()))); + assertTrue(func.test("foo.tmp", newIOContext(random()))); + fsDirectoryFactory.preLoadFuncMap.clear(); } } @@ -72,7 +79,21 @@ private Directory newDirectory(Settings settings) throws IOException { Path tempDir = createTempDir().resolve(idxSettings.getUUID()).resolve("0"); Files.createDirectories(tempDir); ShardPath path = new ShardPath(false, tempDir, tempDir, new ShardId(idxSettings.getIndex(), 0)); - return new FsDirectoryFactory().newDirectory(idxSettings, path); + return fsDirectoryFactory.newDirectory(idxSettings, path); + } + + static class PreLoadExposingFsDirectoryFactory extends FsDirectoryFactory { + + // expose for testing + final Map> preLoadFuncMap = new HashMap<>(); + + @Override + public MMapDirectory setPreload(MMapDirectory mMapDirectory, Set preLoadExtensions) { + var preLoadFunc = FsDirectoryFactory.getPreloadFunc(preLoadExtensions); + mMapDirectory.setPreload(preLoadFunc); + preLoadFuncMap.put(mMapDirectory, preLoadFunc); + return mMapDirectory; + } } private void doTestPreload(String... preload) throws IOException { @@ -85,26 +106,23 @@ private void doTestPreload(String... preload) throws IOException { assertSame(dir, directory); // prevent warnings assertFalse(directory instanceof SleepingLockWrapper); var mmapDirectory = FilterDirectory.unwrap(directory); + assertTrue(directory.toString(), mmapDirectory instanceof MMapDirectory); if (preload.length == 0) { - assertTrue(directory.toString(), mmapDirectory instanceof MMapDirectory); - assertFalse(((MMapDirectory) mmapDirectory).getPreload()); + assertEquals(fsDirectoryFactory.preLoadFuncMap.get(mmapDirectory), MMapDirectory.NO_FILES); } else if (Arrays.asList(preload).contains("*")) { - assertTrue(directory.toString(), mmapDirectory instanceof MMapDirectory); - assertTrue(((MMapDirectory) mmapDirectory).getPreload()); + assertEquals(fsDirectoryFactory.preLoadFuncMap.get(mmapDirectory), MMapDirectory.ALL_FILES); } else { - assertTrue(directory.toString(), mmapDirectory instanceof FsDirectoryFactory.PreLoadMMapDirectory); - FsDirectoryFactory.PreLoadMMapDirectory preLoadMMapDirectory = (FsDirectoryFactory.PreLoadMMapDirectory) mmapDirectory; + var func = fsDirectoryFactory.preLoadFuncMap.get(mmapDirectory); + assertNotEquals(fsDirectoryFactory.preLoadFuncMap.get(mmapDirectory), MMapDirectory.ALL_FILES); + assertNotEquals(fsDirectoryFactory.preLoadFuncMap.get(mmapDirectory), MMapDirectory.NO_FILES); + assertTrue(func.test("foo.dvd", newIOContext(random()))); + assertTrue(func.test("foo.tmp", newIOContext(random()))); for (String ext : preload) { - assertTrue("ext: " + ext, preLoadMMapDirectory.useDelegate("foo." + ext)); - assertTrue("ext: " + ext, preLoadMMapDirectory.getDelegate().getPreload()); + assertTrue("ext: " + ext, func.test("foo." + ext, newIOContext(random()))); } - assertFalse(preLoadMMapDirectory.useDelegate("XXX")); - assertFalse(preLoadMMapDirectory.getPreload()); - preLoadMMapDirectory.close(); - expectThrows( - AlreadyClosedException.class, - () -> preLoadMMapDirectory.getDelegate().openInput("foo.tmp", IOContext.DEFAULT) - ); + assertFalse(func.test("XXX", newIOContext(random()))); + mmapDirectory.close(); + expectThrows(AlreadyClosedException.class, () -> mmapDirectory.openInput("foo.tmp", IOContext.DEFAULT)); } } expectThrows( @@ -148,7 +166,7 @@ private void doTestStoreDirectory(Path tempDir, String typeSettingValue, IndexMo ); break; case FS: - if (Constants.JRE_IS_64BIT && MMapDirectory.UNMAP_SUPPORTED) { + if (Constants.JRE_IS_64BIT) { assertTrue(FsDirectoryFactory.isHybridFs(directory)); } else { assertTrue(directory.toString(), directory instanceof NIOFSDirectory); diff --git a/server/src/test/java/org/elasticsearch/indices/IndicesQueryCacheTests.java b/server/src/test/java/org/elasticsearch/indices/IndicesQueryCacheTests.java index ebffd54a742ce..4f73672471942 100644 --- a/server/src/test/java/org/elasticsearch/indices/IndicesQueryCacheTests.java +++ b/server/src/test/java/org/elasticsearch/indices/IndicesQueryCacheTests.java @@ -70,8 +70,9 @@ public String toString(String field) { public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException { return new ConstantScoreWeight(this, boost) { @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - return new ConstantScoreScorer(this, score(), scoreMode, DocIdSetIterator.all(context.reader().maxDoc())); + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { + Scorer scorer = new ConstantScoreScorer(score(), scoreMode, DocIdSetIterator.all(context.reader().maxDoc())); + return new DefaultScorerSupplier(scorer); } @Override @@ -348,16 +349,22 @@ public Explanation explain(LeafReaderContext context, int doc) throws IOExceptio return weight.explain(context, doc); } - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - scorerCalled = true; - return weight.scorer(context); - } - @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { scorerSupplierCalled = true; - return weight.scorerSupplier(context); + ScorerSupplier inScorerSupplier = weight.scorerSupplier(context); + return new ScorerSupplier() { + @Override + public Scorer get(long leadCost) throws IOException { + scorerCalled = true; + return inScorerSupplier.get(leadCost); + } + + @Override + public long cost() { + return inScorerSupplier.cost(); + } + }; } @Override diff --git a/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java b/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java index dfd71bba0208c..773c660caa1c6 100644 --- a/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java +++ b/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java @@ -421,8 +421,8 @@ public BytesReference get() { try (BytesStreamOutput out = new BytesStreamOutput()) { IndexSearcher searcher = newSearcher(reader); TopDocs topDocs = searcher.search(new TermQuery(new Term("id", Integer.toString(id))), 1); - assertEquals(1, topDocs.totalHits.value); - Document document = reader.document(topDocs.scoreDocs[0].doc); + assertEquals(1, topDocs.totalHits.value()); + Document document = reader.storedFields().document(topDocs.scoreDocs[0].doc); out.writeString(document.get("value")); loadedFromCache = false; return out.bytes(); diff --git a/server/src/test/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollectorSearchAfterTests.java b/server/src/test/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollectorSearchAfterTests.java index baa35101c1c87..1c1a9a645b99b 100644 --- a/server/src/test/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollectorSearchAfterTests.java +++ b/server/src/test/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollectorSearchAfterTests.java @@ -114,11 +114,11 @@ private > void assertSearchCollapse( TopFieldDocs topDocs = searcher.search(query, topFieldCollectorManager); TopFieldGroups collapseTopFieldDocs = collapsingCollector.getTopGroups(0); assertEquals(sortField.getField(), collapseTopFieldDocs.field); - assertEquals(totalHits, collapseTopFieldDocs.totalHits.value); + assertEquals(totalHits, collapseTopFieldDocs.totalHits.value()); assertEquals(expectedNumGroups, collapseTopFieldDocs.scoreDocs.length); - assertEquals(TotalHits.Relation.EQUAL_TO, collapseTopFieldDocs.totalHits.relation); - assertEquals(totalHits, topDocs.totalHits.value); + assertEquals(TotalHits.Relation.EQUAL_TO, collapseTopFieldDocs.totalHits.relation()); + assertEquals(totalHits, topDocs.totalHits.value()); Object currentValue = null; int topDocsIndex = 0; diff --git a/server/src/test/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollectorTests.java b/server/src/test/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollectorTests.java index 90adb2d0ffcce..30c68fe708c83 100644 --- a/server/src/test/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollectorTests.java +++ b/server/src/test/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollectorTests.java @@ -49,11 +49,11 @@ public class SinglePassGroupingCollectorTests extends ESTestCase { private static class SegmentSearcher extends IndexSearcher { - private final List ctx; + private final LeafReaderContextPartition[] ctx; SegmentSearcher(LeafReaderContext ctx, IndexReaderContext parent) { super(parent); - this.ctx = Collections.singletonList(ctx); + this.ctx = new LeafReaderContextPartition[] { IndexSearcher.LeafReaderContextPartition.createForEntireSegment(ctx) }; } public void search(Weight weight, Collector collector) throws IOException { @@ -62,7 +62,7 @@ public void search(Weight weight, Collector collector) throws IOException { @Override public String toString() { - return "ShardSearcher(" + ctx.get(0) + ")"; + return "ShardSearcher(" + ctx[0] + ")"; } } @@ -140,10 +140,10 @@ private > void assertSearchCollapse( TopFieldGroups collapseTopFieldDocs = collapsingCollector.getTopGroups(0); assertEquals(collapseField.getField(), collapseTopFieldDocs.field); assertEquals(expectedNumGroups, collapseTopFieldDocs.scoreDocs.length); - assertEquals(totalHits, collapseTopFieldDocs.totalHits.value); - assertEquals(TotalHits.Relation.EQUAL_TO, collapseTopFieldDocs.totalHits.relation); + assertEquals(totalHits, collapseTopFieldDocs.totalHits.value()); + assertEquals(TotalHits.Relation.EQUAL_TO, collapseTopFieldDocs.totalHits.relation()); assertEquals(totalHits, topDocs.scoreDocs.length); - assertEquals(totalHits, topDocs.totalHits.value); + assertEquals(totalHits, topDocs.totalHits.value()); Set seen = new HashSet<>(); // collapse field is the last sort diff --git a/server/src/test/java/org/elasticsearch/lucene/queries/BlendedTermQueryTests.java b/server/src/test/java/org/elasticsearch/lucene/queries/BlendedTermQueryTests.java index 71a2703555318..076cd0af1bf26 100644 --- a/server/src/test/java/org/elasticsearch/lucene/queries/BlendedTermQueryTests.java +++ b/server/src/test/java/org/elasticsearch/lucene/queries/BlendedTermQueryTests.java @@ -88,7 +88,7 @@ public void testDismaxQuery() throws IOException { query.add(BlendedTermQuery.dismaxBlendedQuery(toTerms(fields, "generator"), 0.1f), BooleanClause.Occur.SHOULD); TopDocs search = searcher.search(query.build(), 10); ScoreDoc[] scoreDocs = search.scoreDocs; - assertEquals(Integer.toString(0), reader.document(scoreDocs[0].doc).getField("id").stringValue()); + assertEquals(Integer.toString(0), reader.storedFields().document(scoreDocs[0].doc).getField("id").stringValue()); } { BooleanQuery.Builder query = new BooleanQuery.Builder(); @@ -110,7 +110,7 @@ public void testDismaxQuery() throws IOException { query.add(gen, BooleanClause.Occur.SHOULD); TopDocs search = searcher.search(query.build(), 4); ScoreDoc[] scoreDocs = search.scoreDocs; - assertEquals(Integer.toString(1), reader.document(scoreDocs[0].doc).getField("id").stringValue()); + assertEquals(Integer.toString(1), reader.storedFields().document(scoreDocs[0].doc).getField("id").stringValue()); } { @@ -120,8 +120,8 @@ public void testDismaxQuery() throws IOException { Query rewrite = searcher.rewrite(query); assertThat(rewrite, instanceOf(BooleanQuery.class)); for (BooleanClause clause : (BooleanQuery) rewrite) { - assertThat(clause.getQuery(), instanceOf(TermQuery.class)); - TermQuery termQuery = (TermQuery) clause.getQuery(); + assertThat(clause.query(), instanceOf(TermQuery.class)); + TermQuery termQuery = (TermQuery) clause.query(); TermStates termStates = termQuery.getTermStates(); if (termQuery.getTerm().field().equals("unknown_field")) { assertThat(termStates.docFreq(), equalTo(0)); @@ -131,7 +131,7 @@ public void testDismaxQuery() throws IOException { assertThat(termStates.totalTermFreq(), greaterThan(0L)); } } - assertThat(searcher.search(query, 10).totalHits.value, equalTo((long) iters + username.length)); + assertThat(searcher.search(query, 10).totalHits.value(), equalTo((long) iters + username.length)); } { // test with an unknown field and an unknown term @@ -140,13 +140,13 @@ public void testDismaxQuery() throws IOException { Query rewrite = searcher.rewrite(query); assertThat(rewrite, instanceOf(BooleanQuery.class)); for (BooleanClause clause : (BooleanQuery) rewrite) { - assertThat(clause.getQuery(), instanceOf(TermQuery.class)); - TermQuery termQuery = (TermQuery) clause.getQuery(); + assertThat(clause.query(), instanceOf(TermQuery.class)); + TermQuery termQuery = (TermQuery) clause.query(); TermStates termStates = termQuery.getTermStates(); assertThat(termStates.docFreq(), equalTo(0)); assertThat(termStates.totalTermFreq(), equalTo(0L)); } - assertThat(searcher.search(query, 10).totalHits.value, equalTo(0L)); + assertThat(searcher.search(query, 10).totalHits.value(), equalTo(0L)); } { // test with an unknown field and a term that is present in only one field @@ -155,8 +155,8 @@ public void testDismaxQuery() throws IOException { Query rewrite = searcher.rewrite(query); assertThat(rewrite, instanceOf(BooleanQuery.class)); for (BooleanClause clause : (BooleanQuery) rewrite) { - assertThat(clause.getQuery(), instanceOf(TermQuery.class)); - TermQuery termQuery = (TermQuery) clause.getQuery(); + assertThat(clause.query(), instanceOf(TermQuery.class)); + TermQuery termQuery = (TermQuery) clause.query(); TermStates termStates = termQuery.getTermStates(); if (termQuery.getTerm().field().equals("username")) { assertThat(termStates.docFreq(), equalTo(1)); @@ -166,7 +166,7 @@ public void testDismaxQuery() throws IOException { assertThat(termStates.totalTermFreq(), equalTo(0L)); } } - assertThat(searcher.search(query, 10).totalHits.value, equalTo(1L)); + assertThat(searcher.search(query, 10).totalHits.value(), equalTo(1L)); } reader.close(); w.close(); @@ -250,7 +250,7 @@ public void testMinTTF() throws IOException { Query query = BlendedTermQuery.dismaxBlendedQuery(toTerms(fields, "foo"), 0.1f); TopDocs search = searcher.search(query, 10); ScoreDoc[] scoreDocs = search.scoreDocs; - assertEquals(Integer.toString(0), reader.document(scoreDocs[0].doc).getField("id").stringValue()); + assertEquals(Integer.toString(0), reader.storedFields().document(scoreDocs[0].doc).getField("id").stringValue()); } reader.close(); w.close(); @@ -298,7 +298,7 @@ public void testRandomFields() throws IOException { String[] fieldNames = fields.keySet().toArray(new String[0]); Query query = BlendedTermQuery.dismaxBlendedQuery(toTerms(fieldNames, "foo"), 0.1f); TopDocs search = searcher.search(query, 10); - assertTrue(search.totalHits.value > 0); + assertTrue(search.totalHits.value() > 0); assertTrue(search.scoreDocs.length > 0); } reader.close(); @@ -332,7 +332,7 @@ public void testMissingFields() throws IOException { Query query = BlendedTermQuery.dismaxBlendedQuery(toTerms(fields, "foo"), 0.1f); TopDocs search = searcher.search(query, 10); ScoreDoc[] scoreDocs = search.scoreDocs; - assertEquals(Integer.toString(0), reader.document(scoreDocs[0].doc).getField("id").stringValue()); + assertEquals(Integer.toString(0), reader.storedFields().document(scoreDocs[0].doc).getField("id").stringValue()); reader.close(); w.close(); diff --git a/server/src/test/java/org/elasticsearch/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java b/server/src/test/java/org/elasticsearch/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java index f5266568e6fdf..126641037fde7 100644 --- a/server/src/test/java/org/elasticsearch/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java +++ b/server/src/test/java/org/elasticsearch/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java @@ -141,7 +141,7 @@ private void assertHighlightOneDoc( IndexSearcher searcher = newSearcher(reader); iw.close(); TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 1, Sort.INDEXORDER); - assertThat(topDocs.totalHits.value, equalTo(1L)); + assertThat(topDocs.totalHits.value(), equalTo(1L)); String rawValue = Strings.arrayToDelimitedString(inputs, String.valueOf(MULTIVAL_SEP_CHAR)); UnifiedHighlighter.Builder builder = UnifiedHighlighter.builder(searcher, analyzer); builder.withBreakIterator(() -> breakIterator); diff --git a/server/src/test/java/org/elasticsearch/script/ScriptTermStatsTests.java b/server/src/test/java/org/elasticsearch/script/ScriptTermStatsTests.java index 239c90bdee2fd..78cd90e8f5269 100644 --- a/server/src/test/java/org/elasticsearch/script/ScriptTermStatsTests.java +++ b/server/src/test/java/org/elasticsearch/script/ScriptTermStatsTests.java @@ -16,6 +16,7 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; @@ -336,10 +337,11 @@ private void assertAllDocs(Set terms, Function fun withIndexSearcher(searcher -> { for (LeafReaderContext leafReaderContext : searcher.getLeafContexts()) { IndexReader reader = leafReaderContext.reader(); + StoredFields storedFields = reader.storedFields(); DocIdSetIterator docIdSetIterator = DocIdSetIterator.all(reader.maxDoc()); ScriptTermStats termStats = new ScriptTermStats(searcher, leafReaderContext, docIdSetIterator::docID, terms); while (docIdSetIterator.nextDoc() <= reader.maxDoc()) { - String docId = reader.document(docIdSetIterator.docID()).get("id"); + String docId = storedFields.document(docIdSetIterator.docID()).get("id"); if (expectedValues.containsKey(docId)) { assertThat(function.apply(termStats), expectedValues.get(docId)); } diff --git a/server/src/test/java/org/elasticsearch/search/MultiValueModeTests.java b/server/src/test/java/org/elasticsearch/search/MultiValueModeTests.java index 6a6f5dc44ef6f..663b39d116913 100644 --- a/server/src/test/java/org/elasticsearch/search/MultiValueModeTests.java +++ b/server/src/test/java/org/elasticsearch/search/MultiValueModeTests.java @@ -714,11 +714,8 @@ public void testMultiValuedOrds() throws Exception { @Override public long nextOrd() { - if (i < array[doc].length) { - return array[doc][i++]; - } else { - return NO_MORE_ORDS; - } + assert i < array[doc].length; + return array[doc][i++]; } @Override @@ -762,7 +759,8 @@ private void verifySortedSet(Supplier supplier, int maxDoc) } int expected = -1; if (values.advanceExact(i)) { - for (long ord = values.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = values.nextOrd()) { + for (int j = 0; j < values.docValueCount(); j++) { + long ord = values.nextOrd(); if (expected == -1) { expected = (int) ord; } else { @@ -810,7 +808,8 @@ private void verifySortedSet( if (++count > maxChildren) { break; } - for (long ord = values.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = values.nextOrd()) { + for (int i = 0; i < values.docValueCount(); i++) { + long ord = values.nextOrd(); if (expected == -1) { expected = (int) ord; } else { diff --git a/server/src/test/java/org/elasticsearch/search/SearchCancellationTests.java b/server/src/test/java/org/elasticsearch/search/SearchCancellationTests.java index 5e1296c354015..aa2e76f512cc8 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchCancellationTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchCancellationTests.java @@ -15,6 +15,7 @@ import org.apache.lucene.document.StringField; import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.PointValues; @@ -105,14 +106,17 @@ public void testCancellableCollector() throws IOException { true ); - Integer totalHits = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager()); + Integer totalHits = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager(searcher.getSlices())); assertThat(totalHits, equalTo(reader.numDocs())); searcher.addQueryCancellation(cancellation); - expectThrows(TaskCancelledException.class, () -> searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager())); + expectThrows( + TaskCancelledException.class, + () -> searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager(searcher.getSlices())) + ); searcher.removeQueryCancellation(cancellation); - Integer totalHits2 = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager()); + Integer totalHits2 = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager(searcher.getSlices())); assertThat(totalHits2, equalTo(reader.numDocs())); } @@ -203,15 +207,17 @@ public void testExitableDirectoryReaderVectors() throws IOException { cancelled.set(false); // Avoid exception during construction of the wrapper objects FloatVectorValues vectorValues = searcher.getIndexReader().leaves().get(0).reader().getFloatVectorValues(KNN_FIELD_NAME); cancelled.set(true); + KnnVectorValues.DocIndexIterator iterator = vectorValues.iterator(); // On the first doc when already canceled, it throws - expectThrows(TaskCancelledException.class, vectorValues::nextDoc); + expectThrows(TaskCancelledException.class, iterator::nextDoc); cancelled.set(false); // Avoid exception during construction of the wrapper objects FloatVectorValues uncancelledVectorValues = searcher.getIndexReader().leaves().get(0).reader().getFloatVectorValues(KNN_FIELD_NAME); + uncancelledVectorValues.iterator(); cancelled.set(true); searcher.removeQueryCancellation(cancellation); // On the first doc when already canceled, it throws, but with the cancellation removed, it should not - uncancelledVectorValues.nextDoc(); + iterator.nextDoc(); } private static class PointValuesIntersectVisitor implements PointValues.IntersectVisitor { diff --git a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java index 642804730a144..5dc07a41b3f8c 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java @@ -772,7 +772,7 @@ public RankShardResult buildRankFeatureShardResult(SearchHits hits, int shardId) ), (response) -> { SearchHits hits = response.getHits(); - assertEquals(hits.getTotalHits().value, numDocs); + assertEquals(hits.getTotalHits().value(), numDocs); assertEquals(hits.getHits().length, 2); int index = 0; for (SearchHit hit : hits.getHits()) { @@ -2505,7 +2505,7 @@ public void testWaitOnRefresh() throws ExecutionException, InterruptedException ); PlainActionFuture future = new PlainActionFuture<>(); service.executeQueryPhase(request, task, future.delegateFailure((l, r) -> { - assertEquals(1, r.queryResult().getTotalHits().value); + assertEquals(1, r.queryResult().getTotalHits().value()); l.onResponse(null); })); future.get(); @@ -2714,7 +2714,7 @@ public void testEnableSearchWorkerThreads() throws IOException { SearchShardTask task = new SearchShardTask(0, "type", "action", "description", null, emptyMap()); try (SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.DFS, randomBoolean())) { - assertNotNull(searchContext.searcher().getExecutor()); + assertTrue(searchContext.searcher().hasExecutor()); } try { @@ -2725,7 +2725,7 @@ public void testEnableSearchWorkerThreads() throws IOException { .get(); assertTrue(response.isAcknowledged()); try (SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.DFS, randomBoolean())) { - assertNull(searchContext.searcher().getExecutor()); + assertFalse(searchContext.searcher().hasExecutor()); } } finally { // reset original default setting @@ -2735,7 +2735,7 @@ public void testEnableSearchWorkerThreads() throws IOException { .setPersistentSettings(Settings.builder().putNull(SEARCH_WORKER_THREADS_ENABLED.getKey()).build()) .get(); try (SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.DFS, randomBoolean())) { - assertNotNull(searchContext.searcher().getExecutor()); + assertTrue(searchContext.searcher().hasExecutor()); } } } @@ -2778,7 +2778,7 @@ public void testSlicingBehaviourForParallelCollection() throws Exception { { try (SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.DFS, true)) { ContextIndexSearcher searcher = searchContext.searcher(); - assertNotNull(searcher.getExecutor()); + assertTrue(searcher.hasExecutor()); final int maxPoolSize = executor.getMaximumPoolSize(); assertEquals( @@ -2795,7 +2795,7 @@ public void testSlicingBehaviourForParallelCollection() throws Exception { assertNotEquals("Sanity check to ensure this isn't the default of 1 when pool size is unset", 1, expectedSlices); final long priorExecutorTaskCount = executor.getCompletedTaskCount(); - searcher.search(termQuery, new TotalHitCountCollectorManager()); + searcher.search(termQuery, new TotalHitCountCollectorManager(searcher.getSlices())); assertBusy( () -> assertEquals( "DFS supports parallel collection, so the number of slices should be > 1.", @@ -2808,7 +2808,7 @@ public void testSlicingBehaviourForParallelCollection() throws Exception { { try (SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.QUERY, true)) { ContextIndexSearcher searcher = searchContext.searcher(); - assertNotNull(searcher.getExecutor()); + assertTrue(searcher.hasExecutor()); final int maxPoolSize = executor.getMaximumPoolSize(); assertEquals( @@ -2825,7 +2825,7 @@ public void testSlicingBehaviourForParallelCollection() throws Exception { assertNotEquals("Sanity check to ensure this isn't the default of 1 when pool size is unset", 1, expectedSlices); final long priorExecutorTaskCount = executor.getCompletedTaskCount(); - searcher.search(termQuery, new TotalHitCountCollectorManager()); + searcher.search(termQuery, new TotalHitCountCollectorManager(searcher.getSlices())); assertBusy( () -> assertEquals( "QUERY supports parallel collection when enabled, so the number of slices should be > 1.", @@ -2838,9 +2838,9 @@ public void testSlicingBehaviourForParallelCollection() throws Exception { { try (SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.FETCH, true)) { ContextIndexSearcher searcher = searchContext.searcher(); - assertNull(searcher.getExecutor()); + assertFalse(searcher.hasExecutor()); final long priorExecutorTaskCount = executor.getCompletedTaskCount(); - searcher.search(termQuery, new TotalHitCountCollectorManager()); + searcher.search(termQuery, new TotalHitCountCollectorManager(searcher.getSlices())); assertBusy( () -> assertEquals( "The number of slices should be 1 as FETCH does not support parallel collection and thus runs on the calling" @@ -2854,9 +2854,9 @@ public void testSlicingBehaviourForParallelCollection() throws Exception { { try (SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.NONE, true)) { ContextIndexSearcher searcher = searchContext.searcher(); - assertNull(searcher.getExecutor()); + assertFalse(searcher.hasExecutor()); final long priorExecutorTaskCount = executor.getCompletedTaskCount(); - searcher.search(termQuery, new TotalHitCountCollectorManager()); + searcher.search(termQuery, new TotalHitCountCollectorManager(searcher.getSlices())); assertBusy( () -> assertEquals( "The number of slices should be 1 as NONE does not support parallel collection.", @@ -2877,9 +2877,9 @@ public void testSlicingBehaviourForParallelCollection() throws Exception { { try (SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.QUERY, true)) { ContextIndexSearcher searcher = searchContext.searcher(); - assertNull(searcher.getExecutor()); + assertFalse(searcher.hasExecutor()); final long priorExecutorTaskCount = executor.getCompletedTaskCount(); - searcher.search(termQuery, new TotalHitCountCollectorManager()); + searcher.search(termQuery, new TotalHitCountCollectorManager(searcher.getSlices())); assertBusy( () -> assertEquals( "The number of slices should be 1 when QUERY parallel collection is disabled.", @@ -2899,7 +2899,7 @@ public void testSlicingBehaviourForParallelCollection() throws Exception { { try (SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.QUERY, true)) { ContextIndexSearcher searcher = searchContext.searcher(); - assertNotNull(searcher.getExecutor()); + assertTrue(searcher.hasExecutor()); final int maxPoolSize = executor.getMaximumPoolSize(); assertEquals( @@ -2916,7 +2916,7 @@ public void testSlicingBehaviourForParallelCollection() throws Exception { assertNotEquals("Sanity check to ensure this isn't the default of 1 when pool size is unset", 1, expectedSlices); final long priorExecutorTaskCount = executor.getCompletedTaskCount(); - searcher.search(termQuery, new TotalHitCountCollectorManager()); + searcher.search(termQuery, new TotalHitCountCollectorManager(searcher.getSlices())); assertBusy( () -> assertEquals( "QUERY supports parallel collection when enabled, so the number of slices should be > 1.", diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/MultiBucketCollectorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/MultiBucketCollectorTests.java index 6b4618bf3257a..ac5d886c9ba10 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/MultiBucketCollectorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/MultiBucketCollectorTests.java @@ -33,14 +33,8 @@ import static org.hamcrest.Matchers.equalTo; public class MultiBucketCollectorTests extends ESTestCase { - private static class ScoreAndDoc extends Scorable { + private static class Score extends Scorable { float score; - int doc = -1; - - @Override - public int docID() { - return doc; - } @Override public float score() { @@ -247,7 +241,7 @@ public void testSetScorerAfterCollectionTerminated() throws IOException { collector1 = new TerminateAfterBucketCollector(collector1, 1); collector2 = new TerminateAfterBucketCollector(collector2, 2); - Scorable scorer = new ScoreAndDoc(); + Scorable scorer = new Score(); List collectors = Arrays.asList(collector1, collector2); Collections.shuffle(collectors, random()); @@ -275,4 +269,78 @@ public void testSetScorerAfterCollectionTerminated() throws IOException { assertFalse(setScorerCalled1.get()); assertFalse(setScorerCalled2.get()); } + + public void testCacheScores() throws IOException { + ScoringBucketCollector scoringBucketCollector1 = new ScoringBucketCollector(); + ScoringBucketCollector scoringBucketCollector2 = new ScoringBucketCollector(); + + DummyScorable scorable = new DummyScorable(); + + // First test the tester + LeafBucketCollector leafBucketCollector1 = scoringBucketCollector1.getLeafCollector(null); + LeafBucketCollector leafBucketCollector2 = scoringBucketCollector2.getLeafCollector(null); + leafBucketCollector1.setScorer(scorable); + leafBucketCollector2.setScorer(scorable); + leafBucketCollector1.collect(0, 0); + leafBucketCollector2.collect(0, 0); + assertEquals(2, scorable.numScoreCalls); + + // reset + scorable.numScoreCalls = 0; + LeafBucketCollector leafBucketCollector = MultiBucketCollector.wrap( + randomBoolean(), + Arrays.asList(scoringBucketCollector1, scoringBucketCollector2) + ).getLeafCollector(null); + leafBucketCollector.setScorer(scorable); + leafBucketCollector.collect(0, 0); + // Even though both leaf collectors called scorable.score(), it only got called once thanks to caching + assertEquals(1, scorable.numScoreCalls); + } + + private static class ScoringBucketCollector extends BucketCollector { + @Override + public ScoreMode scoreMode() { + return ScoreMode.COMPLETE; // needs scores + } + + @Override + public LeafBucketCollector getLeafCollector(AggregationExecutionContext aggCtx) throws IOException { + return new ScoringLeafBucketCollector(); + } + + @Override + public void preCollection() throws IOException { + + } + + @Override + public void postCollection() throws IOException { + + } + } + + private static class ScoringLeafBucketCollector extends LeafBucketCollector { + + private Scorable scorable; + + @Override + public void setScorer(Scorable scorer) throws IOException { + this.scorable = scorer; + } + + @Override + public void collect(int doc, long owningBucketOrd) throws IOException { + scorable.score(); + } + } + + private static class DummyScorable extends Scorable { + int numScoreCalls = 0; + + @Override + public float score() throws IOException { + numScoreCalls++; + return 42f; + } + } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTestCase.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTestCase.java index 879c7e6aeff7f..eb5fa734a8c91 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTestCase.java @@ -80,12 +80,12 @@ protected void indexData() throws Exception { indexRandom(true, docs); assertNoFailuresAndResponse(prepareSearch("idx").setRouting(routing1).setQuery(matchAllQuery()), resp -> { - long totalOnOne = resp.getHits().getTotalHits().value; + long totalOnOne = resp.getHits().getTotalHits().value(); assertThat(totalOnOne, is(15L)); }); assertNoFailuresAndResponse(prepareSearch("idx").setRouting(routing2).setQuery(matchAllQuery()), resp -> { assertNoFailures(resp); - long totalOnTwo = resp.getHits().getTotalHits().value; + long totalOnTwo = resp.getHits().getTotalHits().value(); assertThat(totalOnTwo, is(12L)); }); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java index bbd12726ac4e3..28a032e7281e6 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java @@ -2571,19 +2571,19 @@ public void testWithKeywordAndTopHits() throws Exception { TopHits topHits = result.getBuckets().get(0).getAggregations().get("top_hits"); assertNotNull(topHits); assertEquals(topHits.getHits().getHits().length, 2); - assertEquals(topHits.getHits().getTotalHits().value, 2L); + assertEquals(topHits.getHits().getTotalHits().value(), 2L); assertEquals("{keyword=c}", result.getBuckets().get(1).getKeyAsString()); assertEquals(2L, result.getBuckets().get(1).getDocCount()); topHits = result.getBuckets().get(1).getAggregations().get("top_hits"); assertNotNull(topHits); assertEquals(topHits.getHits().getHits().length, 2); - assertEquals(topHits.getHits().getTotalHits().value, 2L); + assertEquals(topHits.getHits().getTotalHits().value(), 2L); assertEquals("{keyword=d}", result.getBuckets().get(2).getKeyAsString()); assertEquals(1L, result.getBuckets().get(2).getDocCount()); topHits = result.getBuckets().get(2).getAggregations().get("top_hits"); assertNotNull(topHits); assertEquals(topHits.getHits().getHits().length, 1); - assertEquals(topHits.getHits().getTotalHits().value, 1L); + assertEquals(topHits.getHits().getTotalHits().value(), 1L); }); testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new FieldExistsQuery("keyword")), dataset, () -> { @@ -2598,13 +2598,13 @@ public void testWithKeywordAndTopHits() throws Exception { TopHits topHits = result.getBuckets().get(0).getAggregations().get("top_hits"); assertNotNull(topHits); assertEquals(topHits.getHits().getHits().length, 2); - assertEquals(topHits.getHits().getTotalHits().value, 2L); + assertEquals(topHits.getHits().getTotalHits().value(), 2L); assertEquals("{keyword=d}", result.getBuckets().get(1).getKeyAsString()); assertEquals(1L, result.getBuckets().get(1).getDocCount()); topHits = result.getBuckets().get(1).getAggregations().get("top_hits"); assertNotNull(topHits); assertEquals(topHits.getHits().getHits().length, 1); - assertEquals(topHits.getHits().getTotalHits().value, 1L); + assertEquals(topHits.getHits().getTotalHits().value(), 1L); }); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueueTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueueTests.java index 71b93888ba243..8a72f8af7035c 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueueTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueueTests.java @@ -15,14 +15,29 @@ import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.document.TextField; +import org.apache.lucene.index.BinaryDocValues; +import org.apache.lucene.index.ByteVectorValues; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.DocValuesSkipper; +import org.apache.lucene.index.FieldInfos; +import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.IndexReaderContext; import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.LeafMetaData; +import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.NumericDocValues; +import org.apache.lucene.index.PointValues; +import org.apache.lucene.index.SortedDocValues; +import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.index.SortedSetDocValues; +import org.apache.lucene.index.StoredFields; +import org.apache.lucene.index.TermVectors; +import org.apache.lucene.index.Terms; import org.apache.lucene.search.CollectionTerminatedException; import org.apache.lucene.search.DocIdSet; +import org.apache.lucene.search.KnnCollector; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; @@ -54,8 +69,6 @@ import static org.elasticsearch.index.mapper.NumberFieldMapper.NumberType.LONG; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; public class CompositeValuesCollectorQueueTests extends AggregatorTestCase { static class ClassAndName { @@ -71,11 +84,8 @@ static class ClassAndName { private IndexReader indexReader; @Before - public void setUpMocks() { - indexReader = mock(IndexReader.class); - IndexReaderContext indexReaderContext = mock(IndexReaderContext.class); - when(indexReaderContext.leaves()).thenReturn(List.of()); - when(indexReader.getContext()).thenReturn(indexReaderContext); + public void set() { + indexReader = new DummyReader(); } public void testRandomLong() throws IOException { @@ -425,4 +435,126 @@ private static void createListCombinations( } } } + + static class DummyReader extends LeafReader { + @Override + public CacheHelper getCoreCacheHelper() { + return null; + } + + @Override + public Terms terms(String field) throws IOException { + return null; + } + + @Override + public NumericDocValues getNumericDocValues(String field) throws IOException { + return null; + } + + @Override + public BinaryDocValues getBinaryDocValues(String field) throws IOException { + return null; + } + + @Override + public SortedDocValues getSortedDocValues(String field) throws IOException { + return null; + } + + @Override + public SortedNumericDocValues getSortedNumericDocValues(String field) throws IOException { + return null; + } + + @Override + public SortedSetDocValues getSortedSetDocValues(String field) throws IOException { + return null; + } + + @Override + public NumericDocValues getNormValues(String field) throws IOException { + return null; + } + + @Override + public DocValuesSkipper getDocValuesSkipper(String field) throws IOException { + return null; + } + + @Override + public FloatVectorValues getFloatVectorValues(String field) throws IOException { + return null; + } + + @Override + public ByteVectorValues getByteVectorValues(String field) throws IOException { + return null; + } + + @Override + public void searchNearestVectors(String field, float[] target, KnnCollector knnCollector, Bits acceptDocs) throws IOException { + + } + + @Override + public void searchNearestVectors(String field, byte[] target, KnnCollector knnCollector, Bits acceptDocs) throws IOException { + + } + + @Override + public FieldInfos getFieldInfos() { + return null; + } + + @Override + public Bits getLiveDocs() { + return null; + } + + @Override + public PointValues getPointValues(String field) throws IOException { + return null; + } + + @Override + public void checkIntegrity() throws IOException { + + } + + @Override + public LeafMetaData getMetaData() { + return null; + } + + @Override + public TermVectors termVectors() throws IOException { + return null; + } + + @Override + public int numDocs() { + return 0; + } + + @Override + public int maxDoc() { + return 0; + } + + @Override + public StoredFields storedFields() throws IOException { + return null; + } + + @Override + protected void doClose() throws IOException { + + } + + @Override + public CacheHelper getReaderCacheHelper() { + return null; + } + } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/SingleDimensionValuesSourceTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/SingleDimensionValuesSourceTests.java index 4b9a72bacc97d..3d1ed0704acf9 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/SingleDimensionValuesSourceTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/SingleDimensionValuesSourceTests.java @@ -11,6 +11,7 @@ import org.apache.lucene.document.LongPoint; import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.Term; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.ConstantScoreQuery; @@ -258,7 +259,7 @@ public void testNumericSorted() { } private static IndexReader mockIndexReader(int maxDoc, int numDocs) { - IndexReader reader = mock(IndexReader.class); + IndexReader reader = mock(LeafReader.class); when(reader.hasDeletions()).thenReturn(maxDoc - numDocs > 0); when(reader.maxDoc()).thenReturn(maxDoc); when(reader.numDocs()).thenReturn(numDocs); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregatorTests.java index 26e643510859c..06f1db352e8f0 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregatorTests.java @@ -42,9 +42,7 @@ public boolean advanceExact(int docID) { @Override public long nextOrd() { - if (i == ords.length) { - return NO_MORE_ORDS; - } + assert i < ords.length; return ords[i++]; } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java index 4a08295bd7bcd..48aabb61371e9 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java @@ -414,7 +414,7 @@ public void testWithNestedScoringAggregations() throws IOException { InternalTopHits topHits = bucket.getAggregations().get("top_hits"); TotalHits hits = topHits.getHits().getTotalHits(); assertNotNull(hits); - assertThat(hits.value, equalTo(counter)); + assertThat(hits.value(), equalTo(counter)); assertThat(topHits.getHits().getMaxScore(), equalTo(Float.NaN)); counter += 1; } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java index 2c76ed96da488..b267cb2e656b6 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java @@ -1730,8 +1730,8 @@ private void assertNestedTopHitsScore(InternalMultiBucketAggregation terms int ptr = 9; for (MultiBucketsAggregation.Bucket bucket : terms.getBuckets()) { InternalTopHits topHits = bucket.getAggregations().get("top_hits"); - assertThat(topHits.getHits().getTotalHits().value, equalTo((long) ptr)); - assertEquals(TotalHits.Relation.EQUAL_TO, topHits.getHits().getTotalHits().relation); + assertThat(topHits.getHits().getTotalHits().value(), equalTo((long) ptr)); + assertEquals(TotalHits.Relation.EQUAL_TO, topHits.getHits().getTotalHits().relation()); if (withScore) { assertThat(topHits.getHits().getMaxScore(), equalTo(1f)); } else { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java index 87eb2bdc29fbd..07d535167b318 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java @@ -220,8 +220,8 @@ protected void assertReduced(InternalTopHits reduced, List inpu TotalHits.Relation relation = TotalHits.Relation.EQUAL_TO; for (int input = 0; input < inputs.size(); input++) { SearchHits internalHits = inputs.get(input).getHits(); - totalHits += internalHits.getTotalHits().value; - if (internalHits.getTotalHits().relation == TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO) { + totalHits += internalHits.getTotalHits().value(); + if (internalHits.getTotalHits().relation() == TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO) { relation = TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO; } maxScore = max(maxScore, internalHits.getMaxScore()); @@ -379,14 +379,14 @@ protected InternalTopHits mutateInstance(InternalTopHits instance) { case 2 -> size += between(1, 100); case 3 -> topDocs = new TopDocsAndMaxScore( new TopDocs( - new TotalHits(topDocs.topDocs.totalHits.value + between(1, 100), topDocs.topDocs.totalHits.relation), + new TotalHits(topDocs.topDocs.totalHits.value() + between(1, 100), topDocs.topDocs.totalHits.relation()), topDocs.topDocs.scoreDocs ), topDocs.maxScore + randomFloat() ); case 4 -> { TotalHits totalHits = new TotalHits( - searchHits.getTotalHits().value + between(1, 100), + searchHits.getTotalHits().value() + between(1, 100), randomFrom(TotalHits.Relation.values()) ); searchHits = SearchHits.unpooled(searchHits.getHits(), totalHits, searchHits.getMaxScore() + randomFloat()); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregatorTests.java index 0c5217ded982b..6fb147e3ffc89 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregatorTests.java @@ -55,7 +55,7 @@ public void testTopLevel() throws Exception { result = testCase(query, topHits("_name")); } SearchHits searchHits = ((TopHits) result).getHits(); - assertEquals(3L, searchHits.getTotalHits().value); + assertEquals(3L, searchHits.getTotalHits().value()); assertEquals("3", searchHits.getAt(0).getId()); assertEquals("2", searchHits.getAt(1).getId()); assertEquals("1", searchHits.getAt(2).getId()); @@ -65,7 +65,7 @@ public void testTopLevel() throws Exception { public void testNoResults() throws Exception { TopHits result = (TopHits) testCase(new MatchNoDocsQuery(), topHits("_name").sort("string", SortOrder.DESC)); SearchHits searchHits = result.getHits(); - assertEquals(0L, searchHits.getTotalHits().value); + assertEquals(0L, searchHits.getTotalHits().value()); assertFalse(AggregationInspectionHelper.hasValue(((InternalTopHits) result))); } @@ -89,27 +89,27 @@ public void testInsideTerms() throws Exception { // The "a" bucket TopHits hits = (TopHits) terms.getBucketByKey("a").getAggregations().get("top"); SearchHits searchHits = (hits).getHits(); - assertEquals(2L, searchHits.getTotalHits().value); + assertEquals(2L, searchHits.getTotalHits().value()); assertEquals("2", searchHits.getAt(0).getId()); assertEquals("1", searchHits.getAt(1).getId()); assertTrue(AggregationInspectionHelper.hasValue(((InternalTopHits) terms.getBucketByKey("a").getAggregations().get("top")))); // The "b" bucket searchHits = ((TopHits) terms.getBucketByKey("b").getAggregations().get("top")).getHits(); - assertEquals(2L, searchHits.getTotalHits().value); + assertEquals(2L, searchHits.getTotalHits().value()); assertEquals("3", searchHits.getAt(0).getId()); assertEquals("1", searchHits.getAt(1).getId()); assertTrue(AggregationInspectionHelper.hasValue(((InternalTopHits) terms.getBucketByKey("b").getAggregations().get("top")))); // The "c" bucket searchHits = ((TopHits) terms.getBucketByKey("c").getAggregations().get("top")).getHits(); - assertEquals(1L, searchHits.getTotalHits().value); + assertEquals(1L, searchHits.getTotalHits().value()); assertEquals("2", searchHits.getAt(0).getId()); assertTrue(AggregationInspectionHelper.hasValue(((InternalTopHits) terms.getBucketByKey("c").getAggregations().get("top")))); // The "d" bucket searchHits = ((TopHits) terms.getBucketByKey("d").getAggregations().get("top")).getHits(); - assertEquals(1L, searchHits.getTotalHits().value); + assertEquals(1L, searchHits.getTotalHits().value()); assertEquals("3", searchHits.getAt(0).getId()); assertTrue(AggregationInspectionHelper.hasValue(((InternalTopHits) terms.getBucketByKey("d").getAggregations().get("top")))); } @@ -179,7 +179,7 @@ public void testSetScorer() throws Exception { .build(); AggregationBuilder agg = AggregationBuilders.topHits("top_hits"); TopHits result = searchAndReduce(reader, new AggTestConfig(agg, STRING_FIELD_TYPE).withQuery(query)); - assertEquals(3, result.getHits().getTotalHits().value); + assertEquals(3, result.getHits().getTotalHits().value()); reader.close(); directory.close(); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/support/IncludeExcludeTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/support/IncludeExcludeTests.java index e47614145e924..7fa2732191cd1 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/support/IncludeExcludeTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/support/IncludeExcludeTests.java @@ -88,12 +88,9 @@ public boolean advanceExact(int docID) { @Override public long nextOrd() { - if (consumed) { - return SortedSetDocValues.NO_MORE_ORDS; - } else { - consumed = true; - return 0; - } + assert consumed == false; + consumed = true; + return 0; } @Override diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/support/MissingValuesTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/support/MissingValuesTests.java index bc6b72d9ddd3e..2a36887cc459a 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/support/MissingValuesTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/support/MissingValuesTests.java @@ -124,11 +124,8 @@ public long getValueCount() { @Override public long nextOrd() { - if (i < ords[doc].length) { - return ords[doc][i++]; - } else { - return NO_MORE_ORDS; - } + assert i < ords[doc].length; + return ords[doc][i++]; } @Override @@ -153,10 +150,8 @@ public int docValueCount() { for (int ord : ords[i]) { assertEquals(values[ord], withMissingReplaced.lookupOrd(withMissingReplaced.nextOrd())); } - assertEquals(SortedSetDocValues.NO_MORE_ORDS, withMissingReplaced.nextOrd()); } else { assertEquals(missing, withMissingReplaced.lookupOrd(withMissingReplaced.nextOrd())); - assertEquals(SortedSetDocValues.NO_MORE_ORDS, withMissingReplaced.nextOrd()); } } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcherTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcherTests.java index caf8df55ce528..71fd3a4761cbe 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcherTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcherTests.java @@ -201,7 +201,10 @@ public void testCollectFromMiddle() throws IOException { BucketCollector collector = getBucketCollector(2 * DOC_COUNTS); // skip the first doc of segment 1 and 2 - indexSearcher.search(SortedSetDocValuesField.newSlowSetQuery("_tsid", new BytesRef("tsid0"), new BytesRef("tsid1")), collector); + indexSearcher.search( + SortedSetDocValuesField.newSlowSetQuery("_tsid", List.of(new BytesRef("tsid0"), new BytesRef("tsid1"))), + collector + ); collector.postCollection(); reader.close(); diff --git a/server/src/test/java/org/elasticsearch/search/internal/ContextIndexSearcherTests.java b/server/src/test/java/org/elasticsearch/search/internal/ContextIndexSearcherTests.java index 34ee0eec101b6..9957d8c92b955 100644 --- a/server/src/test/java/org/elasticsearch/search/internal/ContextIndexSearcherTests.java +++ b/server/src/test/java/org/elasticsearch/search/internal/ContextIndexSearcherTests.java @@ -28,7 +28,6 @@ import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.BoostQuery; -import org.apache.lucene.search.BulkScorer; import org.apache.lucene.search.Collector; import org.apache.lucene.search.CollectorManager; import org.apache.lucene.search.ConstantScoreQuery; @@ -47,6 +46,7 @@ import org.apache.lucene.search.Scorable; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TotalHitCountCollectorManager; @@ -76,7 +76,6 @@ import java.io.IOException; import java.io.UncheckedIOException; -import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.IdentityHashMap; @@ -251,7 +250,7 @@ public void testConcurrentCollection() throws Exception { Integer.MAX_VALUE, 1 ); - Integer totalHits = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager()); + Integer totalHits = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager(searcher.getSlices())); assertEquals(numDocs, totalHits.intValue()); int numExpectedTasks = ContextIndexSearcher.computeSlices(searcher.getIndexReader().leaves(), Integer.MAX_VALUE, 1).length; // check that each slice except for one that executes on the calling thread goes to the executor, no matter the queue size @@ -367,7 +366,7 @@ public void onRemoval(ShardId shardId, Accountable accountable) { assertEquals(1, searcher.count(new CreateScorerOnceQuery(new MatchAllDocsQuery()))); TopDocs topDocs = searcher.search(new BoostQuery(new ConstantScoreQuery(new TermQuery(new Term("foo", "bar"))), 3f), 1); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); assertEquals(1, topDocs.scoreDocs.length); assertEquals(3f, topDocs.scoreDocs[0].score, 0); @@ -406,7 +405,7 @@ private static void assertSlices(LeafSlice[] slices, int numDocs, int numThreads int sumDocs = 0; assertThat(slices.length, lessThanOrEqualTo(numThreads)); for (LeafSlice slice : slices) { - int sliceDocs = Arrays.stream(slice.leaves).mapToInt(l -> l.reader().maxDoc()).sum(); + int sliceDocs = slice.getMaxDocs(); assertThat(sliceDocs, greaterThanOrEqualTo((int) (0.1 * numDocs))); sumDocs += sliceDocs; } @@ -497,9 +496,14 @@ public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float bo } return new ConstantScoreWeight(this, boost) { @Override - public Scorer scorer(LeafReaderContext context) { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { contextIndexSearcher.throwTimeExceededException(); - return new ConstantScoreScorer(this, score(), scoreMode, DocIdSetIterator.all(context.reader().maxDoc())); + Scorer scorer = new ConstantScoreScorer( + score(), + scoreMode, + DocIdSetIterator.all(context.reader().maxDoc()) + ); + return new DefaultScorerSupplier(scorer); } @Override @@ -583,7 +587,10 @@ public Query rewrite(IndexSearcher indexSearcher) { return null; } }; - Integer hitCount = contextIndexSearcher.search(testQuery, new TotalHitCountCollectorManager()); + Integer hitCount = contextIndexSearcher.search( + testQuery, + new TotalHitCountCollectorManager(contextIndexSearcher.getSlices()) + ); assertEquals(0, hitCount.intValue()); assertTrue(contextIndexSearcher.timeExceeded()); } finally { @@ -747,15 +754,9 @@ public Explanation explain(LeafReaderContext context, int doc) throws IOExceptio } @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - assertTrue(seenLeaves.add(context.reader().getCoreCacheHelper().getKey())); - return weight.scorer(context); - } - - @Override - public BulkScorer bulkScorer(LeafReaderContext context) throws IOException { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { assertTrue(seenLeaves.add(context.reader().getCoreCacheHelper().getKey())); - return weight.bulkScorer(context); + return weight.scorerSupplier(context); } @Override diff --git a/server/src/test/java/org/elasticsearch/search/profile/query/ProfileCollectorManagerTests.java b/server/src/test/java/org/elasticsearch/search/profile/query/ProfileCollectorManagerTests.java index c42f3156c6d29..b728d40900570 100644 --- a/server/src/test/java/org/elasticsearch/search/profile/query/ProfileCollectorManagerTests.java +++ b/server/src/test/java/org/elasticsearch/search/profile/query/ProfileCollectorManagerTests.java @@ -125,14 +125,14 @@ public void testManagerWithSearcher() throws IOException { { CollectorManager topDocsManager = new TopScoreDocCollectorManager(10, null, 1000); TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), topDocsManager); - assertEquals(numDocs, topDocs.totalHits.value); + assertEquals(numDocs, topDocs.totalHits.value()); } { CollectorManager topDocsManager = new TopScoreDocCollectorManager(10, null, 1000); String profileReason = "profiler_reason"; ProfileCollectorManager profileCollectorManager = new ProfileCollectorManager<>(topDocsManager, profileReason); TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), profileCollectorManager); - assertEquals(numDocs, topDocs.totalHits.value); + assertEquals(numDocs, topDocs.totalHits.value()); CollectorResult result = profileCollectorManager.getCollectorTree(); assertEquals("profiler_reason", result.getReason()); assertEquals("SimpleTopScoreDocCollector", result.getName()); diff --git a/server/src/test/java/org/elasticsearch/search/profile/query/ProfileScorerTests.java b/server/src/test/java/org/elasticsearch/search/profile/query/ProfileScorerTests.java index e868293ef4a1c..98d79df63db8e 100644 --- a/server/src/test/java/org/elasticsearch/search/profile/query/ProfileScorerTests.java +++ b/server/src/test/java/org/elasticsearch/search/profile/query/ProfileScorerTests.java @@ -10,14 +10,12 @@ package org.elasticsearch.search.profile.query; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.MultiReader; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Matches; import org.apache.lucene.search.MatchesIterator; import org.apache.lucene.search.Query; -import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; @@ -34,10 +32,6 @@ private static class FakeScorer extends Scorer { public float maxScore, minCompetitiveScore; - protected FakeScorer(Weight weight) { - super(weight); - } - @Override public DocIdSetIterator iterator() { throw new UnsupportedOperationException(); @@ -75,22 +69,14 @@ public Explanation explain(LeafReaderContext context, int doc) throws IOExceptio return Explanation.match(1, "fake_description"); } - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - FakeScorer fakeScorer = new FakeScorer(this); - fakeScorer.maxScore = 42f; - return fakeScorer; - } - @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) { - Weight weight = this; return new ScorerSupplier() { private long cost = 0; @Override public Scorer get(long leadCost) { - return new Scorer(weight) { + return new Scorer() { @Override public DocIdSetIterator iterator() { return null; @@ -187,23 +173,17 @@ public Iterator iterator() { } public void testPropagateMinCompetitiveScore() throws IOException { - Query query = new MatchAllDocsQuery(); - Weight weight = query.createWeight(newSearcher(new MultiReader()), ScoreMode.TOP_SCORES, 1f); - FakeScorer fakeScorer = new FakeScorer(weight); + FakeScorer fakeScorer = new FakeScorer(); QueryProfileBreakdown profile = new QueryProfileBreakdown(); - ProfileWeight profileWeight = new ProfileWeight(query, weight, profile); - ProfileScorer profileScorer = new ProfileScorer(profileWeight, fakeScorer, profile); + ProfileScorer profileScorer = new ProfileScorer(fakeScorer, profile); profileScorer.setMinCompetitiveScore(0.42f); assertEquals(0.42f, fakeScorer.minCompetitiveScore, 0f); } public void testPropagateMaxScore() throws IOException { - Query query = new MatchAllDocsQuery(); - Weight weight = query.createWeight(newSearcher(new MultiReader()), ScoreMode.TOP_SCORES, 1f); - FakeScorer fakeScorer = new FakeScorer(weight); + FakeScorer fakeScorer = new FakeScorer(); QueryProfileBreakdown profile = new QueryProfileBreakdown(); - ProfileWeight profileWeight = new ProfileWeight(query, weight, profile); - ProfileScorer profileScorer = new ProfileScorer(profileWeight, fakeScorer, profile); + ProfileScorer profileScorer = new ProfileScorer(fakeScorer, profile); profileScorer.setMinCompetitiveScore(0.42f); fakeScorer.maxScore = 42f; assertEquals(42f, profileScorer.getMaxScore(DocIdSetIterator.NO_MORE_DOCS), 0f); diff --git a/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerTests.java b/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerTests.java index 9df03905f7be2..44c46e3f692ba 100644 --- a/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerTests.java +++ b/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerTests.java @@ -241,11 +241,6 @@ public Explanation explain(LeafReaderContext context, int doc) throws IOExceptio throw new UnsupportedOperationException(); } - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - throw new UnsupportedOperationException(); - } - @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { return new ScorerSupplier() { diff --git a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseCollectorTests.java b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseCollectorTests.java index d33c033ecef2d..de6218e912953 100644 --- a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseCollectorTests.java +++ b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseCollectorTests.java @@ -120,7 +120,7 @@ public void testTopDocsOnly() throws IOException { ); Result result = searcher.search(new MatchAllDocsQuery(), manager); assertFalse(result.terminatedAfter); - assertEquals(numDocs, result.topDocs.totalHits.value); + assertEquals(numDocs, result.topDocs.totalHits.value()); } { CollectorManager topScoreDocManager = new TopScoreDocCollectorManager(1, null, 1000); @@ -133,7 +133,7 @@ public void testTopDocsOnly() throws IOException { ); Result result = searcher.search(new TermQuery(new Term("field2", "value")), manager); assertFalse(result.terminatedAfter); - assertEquals(numField2Docs, result.topDocs.totalHits.value); + assertEquals(numField2Docs, result.topDocs.totalHits.value()); } } @@ -150,7 +150,7 @@ public void testWithAggs() throws IOException { ); Result result = searcher.search(new MatchAllDocsQuery(), manager); assertFalse(result.terminatedAfter); - assertEquals(numDocs, result.topDocs.totalHits.value); + assertEquals(numDocs, result.topDocs.totalHits.value()); assertEquals(numDocs, result.aggs.intValue()); } { @@ -165,7 +165,7 @@ public void testWithAggs() throws IOException { ); Result result = searcher.search(new TermQuery(new Term("field2", "value")), manager); assertFalse(result.terminatedAfter); - assertEquals(numField2Docs, result.topDocs.totalHits.value); + assertEquals(numField2Docs, result.topDocs.totalHits.value()); assertEquals(numField2Docs, result.aggs.intValue()); } } @@ -184,7 +184,7 @@ public void testPostFilterTopDocsOnly() throws IOException { ); Result result = searcher.search(new MatchAllDocsQuery(), manager); assertFalse(result.terminatedAfter); - assertEquals(numField2Docs, result.topDocs.totalHits.value); + assertEquals(numField2Docs, result.topDocs.totalHits.value()); } { CollectorManager topDocsManager = new TopScoreDocCollectorManager(1, null, 1000); @@ -199,7 +199,7 @@ public void testPostFilterTopDocsOnly() throws IOException { ); Result result = searcher.search(new MatchAllDocsQuery(), manager); assertFalse(result.terminatedAfter); - assertEquals(numDocs, result.topDocs.totalHits.value); + assertEquals(numDocs, result.topDocs.totalHits.value()); } } @@ -218,7 +218,7 @@ public void testPostFilterWithAggs() throws IOException { ); Result result = searcher.search(new MatchAllDocsQuery(), manager); assertFalse(result.terminatedAfter); - assertEquals(numDocs, result.topDocs.totalHits.value); + assertEquals(numDocs, result.topDocs.totalHits.value()); assertEquals(numDocs, result.aggs.intValue()); } { @@ -235,7 +235,7 @@ public void testPostFilterWithAggs() throws IOException { ); Result result = searcher.search(new MatchAllDocsQuery(), manager); assertFalse(result.terminatedAfter); - assertEquals(numField2Docs, result.topDocs.totalHits.value); + assertEquals(numField2Docs, result.topDocs.totalHits.value()); // post_filter is not applied to aggs assertEquals(reader.maxDoc(), result.aggs.intValue()); } @@ -251,7 +251,7 @@ public void testMinScoreTopDocsOnly() throws IOException { { CollectorManager topDocsManager = new TopScoreDocCollectorManager(numField2Docs + 1, null, 1000); TopDocs topDocs = searcher.search(booleanQuery, topDocsManager); - assertEquals(numDocs, topDocs.totalHits.value); + assertEquals(numDocs, topDocs.totalHits.value()); maxScore = topDocs.scoreDocs[0].score; thresholdScore = topDocs.scoreDocs[numField2Docs].score; } @@ -266,7 +266,7 @@ public void testMinScoreTopDocsOnly() throws IOException { ); Result result = searcher.search(booleanQuery, manager); assertFalse(result.terminatedAfter); - assertEquals(numField2Docs, result.topDocs.totalHits.value); + assertEquals(numField2Docs, result.topDocs.totalHits.value()); } { CollectorManager topDocsManager = new TopScoreDocCollectorManager(1, null, 1000); @@ -279,7 +279,7 @@ public void testMinScoreTopDocsOnly() throws IOException { ); Result result = searcher.search(booleanQuery, manager); assertFalse(result.terminatedAfter); - assertEquals(numDocs, result.topDocs.totalHits.value); + assertEquals(numDocs, result.topDocs.totalHits.value()); } { CollectorManager topDocsManager = new TopScoreDocCollectorManager(1, null, 1000); @@ -292,7 +292,7 @@ public void testMinScoreTopDocsOnly() throws IOException { ); Result result = searcher.search(booleanQuery, manager); assertFalse(result.terminatedAfter); - assertEquals(0, result.topDocs.totalHits.value); + assertEquals(0, result.topDocs.totalHits.value()); } } @@ -306,7 +306,7 @@ public void testMinScoreWithAggs() throws IOException { { CollectorManager topDocsManager = new TopScoreDocCollectorManager(numField2Docs + 1, null, 1000); TopDocs topDocs = searcher.search(booleanQuery, topDocsManager); - assertEquals(numDocs, topDocs.totalHits.value); + assertEquals(numDocs, topDocs.totalHits.value()); maxScore = topDocs.scoreDocs[0].score; thresholdScore = topDocs.scoreDocs[numField2Docs].score; } @@ -322,7 +322,7 @@ public void testMinScoreWithAggs() throws IOException { ); Result result = searcher.search(booleanQuery, manager); assertFalse(result.terminatedAfter); - assertEquals(numField2Docs, result.topDocs.totalHits.value); + assertEquals(numField2Docs, result.topDocs.totalHits.value()); // min_score is applied to aggs as well as top docs assertEquals(numField2Docs, result.aggs.intValue()); } @@ -338,7 +338,7 @@ public void testMinScoreWithAggs() throws IOException { ); Result result = searcher.search(booleanQuery, manager); assertFalse(result.terminatedAfter); - assertEquals(numDocs, result.topDocs.totalHits.value); + assertEquals(numDocs, result.topDocs.totalHits.value()); assertEquals(numDocs, result.aggs.intValue()); } { @@ -353,7 +353,7 @@ public void testMinScoreWithAggs() throws IOException { ); Result result = searcher.search(booleanQuery, manager); assertFalse(result.terminatedAfter); - assertEquals(0, result.topDocs.totalHits.value); + assertEquals(0, result.topDocs.totalHits.value()); assertEquals(0, result.aggs.intValue()); } } @@ -370,7 +370,7 @@ public void testPostFilterAndMinScoreTopDocsOnly() throws IOException { { CollectorManager topDocsManager = new TopScoreDocCollectorManager(numField3Docs + 1, null, 1000); TopDocs topDocs = searcher.search(booleanQuery, topDocsManager); - assertEquals(numDocs, topDocs.totalHits.value); + assertEquals(numDocs, topDocs.totalHits.value()); maxScore = topDocs.scoreDocs[0].score; thresholdScore = topDocs.scoreDocs[numField3Docs].score; } @@ -385,7 +385,7 @@ public void testPostFilterAndMinScoreTopDocsOnly() throws IOException { ); Result result = searcher.search(booleanQuery, manager); assertFalse(result.terminatedAfter); - assertEquals(numField2AndField3Docs, result.topDocs.totalHits.value); + assertEquals(numField2AndField3Docs, result.topDocs.totalHits.value()); } { CollectorManager topDocsManager = new TopScoreDocCollectorManager(1, null, 1000); @@ -398,7 +398,7 @@ public void testPostFilterAndMinScoreTopDocsOnly() throws IOException { ); Result result = searcher.search(booleanQuery, manager); assertFalse(result.terminatedAfter); - assertEquals(numField2Docs, result.topDocs.totalHits.value); + assertEquals(numField2Docs, result.topDocs.totalHits.value()); } { CollectorManager topDocsManager = new TopScoreDocCollectorManager(1, null, 1000); @@ -411,7 +411,7 @@ public void testPostFilterAndMinScoreTopDocsOnly() throws IOException { ); Result result = searcher.search(booleanQuery, manager); assertFalse(result.terminatedAfter); - assertEquals(0, result.topDocs.totalHits.value); + assertEquals(0, result.topDocs.totalHits.value()); } } @@ -427,7 +427,7 @@ public void testPostFilterAndMinScoreWithAggs() throws IOException { { CollectorManager topDocsManager = new TopScoreDocCollectorManager(numField3Docs + 1, null, 1000); TopDocs topDocs = searcher.search(booleanQuery, topDocsManager); - assertEquals(numDocs, topDocs.totalHits.value); + assertEquals(numDocs, topDocs.totalHits.value()); maxScore = topDocs.scoreDocs[0].score; thresholdScore = topDocs.scoreDocs[numField3Docs].score; } @@ -443,7 +443,7 @@ public void testPostFilterAndMinScoreWithAggs() throws IOException { ); Result result = searcher.search(booleanQuery, manager); assertFalse(result.terminatedAfter); - assertEquals(numField2AndField3Docs, result.topDocs.totalHits.value); + assertEquals(numField2AndField3Docs, result.topDocs.totalHits.value()); assertEquals(numField3Docs, result.aggs.intValue()); } { @@ -458,7 +458,7 @@ public void testPostFilterAndMinScoreWithAggs() throws IOException { ); Result result = searcher.search(booleanQuery, manager); assertFalse(result.terminatedAfter); - assertEquals(numField2Docs, result.topDocs.totalHits.value); + assertEquals(numField2Docs, result.topDocs.totalHits.value()); assertEquals(numDocs, result.aggs.intValue()); } { @@ -473,7 +473,7 @@ public void testPostFilterAndMinScoreWithAggs() throws IOException { ); Result result = searcher.search(booleanQuery, manager); assertFalse(result.terminatedAfter); - assertEquals(0, result.topDocs.totalHits.value); + assertEquals(0, result.topDocs.totalHits.value()); assertEquals(0, result.aggs.intValue()); } } @@ -623,7 +623,7 @@ public void testTerminateAfterTopDocsOnlyWithMinScore() throws IOException { { CollectorManager topDocsManager = new TopScoreDocCollectorManager(numField2Docs + 1, null, 1000); TopDocs topDocs = searcher.search(booleanQuery, topDocsManager); - assertEquals(numDocs, topDocs.totalHits.value); + assertEquals(numDocs, topDocs.totalHits.value()); maxScore = topDocs.scoreDocs[0].score; } { @@ -638,7 +638,7 @@ public void testTerminateAfterTopDocsOnlyWithMinScore() throws IOException { ); Result result = searcher.search(booleanQuery, manager); assertTrue(result.terminatedAfter); - assertEquals(terminateAfter, result.topDocs.totalHits.value); + assertEquals(terminateAfter, result.topDocs.totalHits.value()); } } @@ -651,7 +651,7 @@ public void testTerminateAfterWithAggsAndMinScore() throws IOException { { CollectorManager topDocsManager = new TopScoreDocCollectorManager(numField2Docs + 1, null, 1000); TopDocs topDocs = searcher.search(booleanQuery, topDocsManager); - assertEquals(numDocs, topDocs.totalHits.value); + assertEquals(numDocs, topDocs.totalHits.value()); maxScore = topDocs.scoreDocs[0].score; } { @@ -667,7 +667,7 @@ public void testTerminateAfterWithAggsAndMinScore() throws IOException { ); Result result = searcher.search(booleanQuery, manager); assertTrue(result.terminatedAfter); - assertEquals(terminateAfter, result.topDocs.totalHits.value); + assertEquals(terminateAfter, result.topDocs.totalHits.value()); assertEquals(terminateAfter, result.aggs.intValue()); } } @@ -683,7 +683,7 @@ public void testTerminateAfterAndPostFilterAndMinScoreTopDocsOnly() throws IOExc { CollectorManager topDocsManager = new TopScoreDocCollectorManager(numField3Docs + 1, null, 1000); TopDocs topDocs = searcher.search(booleanQuery, topDocsManager); - assertEquals(numDocs, topDocs.totalHits.value); + assertEquals(numDocs, topDocs.totalHits.value()); maxScore = topDocs.scoreDocs[0].score; } { @@ -698,7 +698,7 @@ public void testTerminateAfterAndPostFilterAndMinScoreTopDocsOnly() throws IOExc ); Result result = searcher.search(booleanQuery, manager); assertTrue(result.terminatedAfter); - assertEquals(terminateAfter, result.topDocs.totalHits.value); + assertEquals(terminateAfter, result.topDocs.totalHits.value()); } } @@ -713,7 +713,7 @@ public void testTerminateAfterAndPostFilterAndMinScoreWithAggs() throws IOExcept { CollectorManager topDocsManager = new TopScoreDocCollectorManager(numField3Docs + 1, null, 1000); TopDocs topDocs = searcher.search(booleanQuery, topDocsManager); - assertEquals(numDocs, topDocs.totalHits.value); + assertEquals(numDocs, topDocs.totalHits.value()); maxScore = topDocs.scoreDocs[0].score; } { @@ -729,7 +729,7 @@ public void testTerminateAfterAndPostFilterAndMinScoreWithAggs() throws IOExcept ); Result result = searcher.search(booleanQuery, manager); assertTrue(result.terminatedAfter); - assertEquals(terminateAfter, result.topDocs.totalHits.value); + assertEquals(terminateAfter, result.topDocs.totalHits.value()); // aggs see more documents because the filter is not applied to them assertThat(result.aggs, Matchers.greaterThanOrEqualTo(terminateAfter)); } @@ -1139,11 +1139,6 @@ public void testSetScorerAfterCollectionTerminated() throws IOException { public float score() { return 0; } - - @Override - public int docID() { - return 0; - } }; QueryPhaseCollector queryPhaseCollector = new QueryPhaseCollector( @@ -1473,11 +1468,6 @@ public float score() throws IOException { return 0; } - @Override - public int docID() { - return 0; - } - @Override public void setMinCompetitiveScore(float minScore) { setMinCompetitiveScoreCalled = true; @@ -1521,7 +1511,7 @@ public void setScorer(Scorable scorer) throws IOException { setScorerCalled = true; if (expectedScorable != null) { while (expectedScorable.equals(scorer.getClass()) == false && scorer instanceof FilterScorable) { - scorer = scorer.getChildren().iterator().next().child; + scorer = scorer.getChildren().iterator().next().child(); } assertEquals(expectedScorable, scorer.getClass()); } diff --git a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java index 56a8b0f3a8c30..1f74668158e0e 100644 --- a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java @@ -144,7 +144,7 @@ private void countTestCase(Query query, IndexReader reader, boolean shouldCollec QueryPhase.addCollectorsAndSearch(context); ContextIndexSearcher countSearcher = shouldCollectCount ? newContextSearcher(reader) : noCollectionContextSearcher(reader); - assertEquals(countSearcher.count(query), context.queryResult().topDocs().topDocs.totalHits.value); + assertEquals(countSearcher.count(query), context.queryResult().topDocs().topDocs.totalHits.value()); } } @@ -233,15 +233,15 @@ public void testPostFilterDisablesHitCountShortcut() throws Exception { try (TestSearchContext context = createContext(noCollectionContextSearcher(reader), new MatchAllDocsQuery())) { context.setSize(0); QueryPhase.addCollectorsAndSearch(context); - assertEquals(numDocs, context.queryResult().topDocs().topDocs.totalHits.value); - assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation); + assertEquals(numDocs, context.queryResult().topDocs().topDocs.totalHits.value()); + assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation()); } try (TestSearchContext context = createContext(earlyTerminationContextSearcher(reader, 10), new MatchAllDocsQuery())) { // shortcutTotalHitCount makes us not track total hits as part of the top docs collection, hence size is the threshold context.setSize(10); QueryPhase.addCollectorsAndSearch(context); - assertEquals(numDocs, context.queryResult().topDocs().topDocs.totalHits.value); - assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation); + assertEquals(numDocs, context.queryResult().topDocs().topDocs.totalHits.value()); + assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation()); } try (TestSearchContext context = createContext(newContextSearcher(reader), new MatchAllDocsQuery())) { // QueryPhaseCollector does not propagate Weight#count when a post_filter is provided, hence it forces collection despite @@ -249,16 +249,16 @@ public void testPostFilterDisablesHitCountShortcut() throws Exception { context.setSize(0); context.parsedPostFilter(new ParsedQuery(new MatchNoDocsQuery())); QueryPhase.executeQuery(context); - assertEquals(0, context.queryResult().topDocs().topDocs.totalHits.value); - assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation); + assertEquals(0, context.queryResult().topDocs().topDocs.totalHits.value()); + assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation()); } try (TestSearchContext context = createContext(newContextSearcher(reader), new MatchAllDocsQuery())) { // shortcutTotalHitCount is disabled for filter collectors, hence we collect until track_total_hits context.setSize(10); context.parsedPostFilter(new ParsedQuery(new MatchNoDocsQuery())); QueryPhase.addCollectorsAndSearch(context); - assertEquals(0, context.queryResult().topDocs().topDocs.totalHits.value); - assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation); + assertEquals(0, context.queryResult().topDocs().topDocs.totalHits.value()); + assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation()); } } @@ -269,8 +269,8 @@ public void testTerminateAfterWithFilter() throws Exception { context.setSize(10); context.parsedPostFilter(new ParsedQuery(new TermQuery(new Term("foo", "bar")))); QueryPhase.addCollectorsAndSearch(context); - assertEquals(1, context.queryResult().topDocs().topDocs.totalHits.value); - assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation); + assertEquals(1, context.queryResult().topDocs().topDocs.totalHits.value()); + assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation()); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1)); } } @@ -280,15 +280,15 @@ public void testMinScoreDisablesHitCountShortcut() throws Exception { try (TestSearchContext context = createContext(noCollectionContextSearcher(reader), new MatchAllDocsQuery())) { context.setSize(0); QueryPhase.addCollectorsAndSearch(context); - assertEquals(numDocs, context.queryResult().topDocs().topDocs.totalHits.value); - assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation); + assertEquals(numDocs, context.queryResult().topDocs().topDocs.totalHits.value()); + assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation()); } try (TestSearchContext context = createContext(earlyTerminationContextSearcher(reader, 10), new MatchAllDocsQuery())) { // shortcutTotalHitCount makes us not track total hits as part of the top docs collection, hence size is the threshold context.setSize(10); QueryPhase.addCollectorsAndSearch(context); - assertEquals(numDocs, context.queryResult().topDocs().topDocs.totalHits.value); - assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation); + assertEquals(numDocs, context.queryResult().topDocs().topDocs.totalHits.value()); + assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation()); } try (TestSearchContext context = createContext(newContextSearcher(reader), new MatchAllDocsQuery())) { // QueryPhaseCollector does not propagate Weight#count when min_score is provided, hence it forces collection despite @@ -296,16 +296,16 @@ public void testMinScoreDisablesHitCountShortcut() throws Exception { context.setSize(0); context.minimumScore(100); QueryPhase.addCollectorsAndSearch(context); - assertEquals(0, context.queryResult().topDocs().topDocs.totalHits.value); - assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation); + assertEquals(0, context.queryResult().topDocs().topDocs.totalHits.value()); + assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation()); } try (TestSearchContext context = createContext(newContextSearcher(reader), new MatchAllDocsQuery())) { // shortcutTotalHitCount is disabled for filter collectors, hence we collect until track_total_hits context.setSize(10); context.minimumScore(100); QueryPhase.executeQuery(context); - assertEquals(0, context.queryResult().topDocs().topDocs.totalHits.value); - assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation); + assertEquals(0, context.queryResult().topDocs().topDocs.totalHits.value()); + assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation()); } } @@ -336,17 +336,17 @@ public void testInOrderScrollOptimization() throws Exception { context.setSize(size); QueryPhase.addCollectorsAndSearch(context); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); - assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); + assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation()); assertNull(context.queryResult().terminatedEarly()); assertThat(context.terminateAfter(), equalTo(0)); - assertThat(context.queryResult().getTotalHits().value, equalTo((long) numDocs)); + assertThat(context.queryResult().getTotalHits().value(), equalTo((long) numDocs)); context.setSearcher(earlyTerminationContextSearcher(reader, size)); QueryPhase.addCollectorsAndSearch(context); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); - assertThat(context.queryResult().getTotalHits().value, equalTo((long) numDocs)); - assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); + assertThat(context.queryResult().getTotalHits().value(), equalTo((long) numDocs)); + assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation()); assertThat(context.queryResult().topDocs().topDocs.scoreDocs[0].doc, greaterThanOrEqualTo(size)); } } @@ -364,8 +364,8 @@ public void testTerminateAfterSize0HitCountShortcut() throws Exception { context.setSize(0); QueryPhase.addCollectorsAndSearch(context); assertFalse(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(0)); } // test interaction between trackTotalHits and terminateAfter @@ -375,8 +375,8 @@ public void testTerminateAfterSize0HitCountShortcut() throws Exception { context.trackTotalHitsUpTo(-1); QueryPhase.executeQuery(context); assertFalse(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(0L)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo(0L)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(0)); } try (TestSearchContext context = createContext(noCollectionContextSearcher(reader), new MatchAllDocsQuery())) { @@ -387,8 +387,8 @@ public void testTerminateAfterSize0HitCountShortcut() throws Exception { QueryPhase.executeQuery(context); assertFalse(context.queryResult().terminatedEarly()); // Given that total hit count does not require collection, PartialHitCountCollector does not early terminate. - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(0)); } } @@ -407,8 +407,8 @@ public void testTerminateAfterSize0NoHitCountShortcut() throws Exception { context.setSize(0); QueryPhase.executeQuery(context); assertTrue(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(1L)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo(1L)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(0)); } // test interaction between trackTotalHits and terminateAfter @@ -419,8 +419,8 @@ public void testTerminateAfterSize0NoHitCountShortcut() throws Exception { context.trackTotalHitsUpTo(-1); QueryPhase.executeQuery(context); assertFalse(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(0L)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo(0L)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(0)); } { @@ -434,9 +434,9 @@ public void testTerminateAfterSize0NoHitCountShortcut() throws Exception { context.trackTotalHitsUpTo(trackTotalHits); QueryPhase.executeQuery(context); assertFalse(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) trackTotalHits)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) trackTotalHits)); assertThat( - context.queryResult().topDocs().topDocs.totalHits.relation, + context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO) ); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(0)); @@ -449,8 +449,8 @@ public void testTerminateAfterSize0NoHitCountShortcut() throws Exception { context.trackTotalHitsUpTo(randomIntBetween(11, Integer.MAX_VALUE)); QueryPhase.executeQuery(context); assertTrue(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(10L)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo(10L)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(0)); } } @@ -468,7 +468,7 @@ public void testTerminateAfterWithHitsHitCountShortcut() throws Exception { context.setSize(10); QueryPhase.executeQuery(context); assertFalse(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(10)); } try (TestSearchContext context = createContext(earlyTerminationContextSearcher(reader, 1), new MatchAllDocsQuery())) { @@ -477,8 +477,8 @@ public void testTerminateAfterWithHitsHitCountShortcut() throws Exception { context.setSize(1); QueryPhase.addCollectorsAndSearch(context); assertTrue(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1)); } // test interaction between trackTotalHits and terminateAfter @@ -489,8 +489,8 @@ public void testTerminateAfterWithHitsHitCountShortcut() throws Exception { context.trackTotalHitsUpTo(-1); QueryPhase.executeQuery(context); assertTrue(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(0L)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo(0L)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(7)); } try (TestSearchContext context = createContext(earlyTerminationContextSearcher(reader, 7), new MatchAllDocsQuery())) { @@ -500,8 +500,8 @@ public void testTerminateAfterWithHitsHitCountShortcut() throws Exception { context.trackTotalHitsUpTo(randomIntBetween(1, Integer.MAX_VALUE)); QueryPhase.executeQuery(context); assertTrue(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(7)); } { @@ -515,8 +515,8 @@ public void testTerminateAfterWithHitsHitCountShortcut() throws Exception { context.trackTotalHitsUpTo(randomIntBetween(1, Integer.MAX_VALUE)); QueryPhase.executeQuery(context); assertThat(context.queryResult().terminatedEarly(), either(is(true)).or(is(false))); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(size)); } } @@ -535,8 +535,8 @@ public void testTerminateAfterWithHitsNoHitCountShortcut() throws Exception { context.setSize(1); QueryPhase.addCollectorsAndSearch(context); assertTrue(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(1L)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo(1L)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1)); } // test interaction between trackTotalHits and terminateAfter @@ -546,8 +546,8 @@ public void testTerminateAfterWithHitsNoHitCountShortcut() throws Exception { context.trackTotalHitsUpTo(-1); QueryPhase.executeQuery(context); assertTrue(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(0L)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo(0L)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(7)); } try (TestSearchContext context = createContext(earlyTerminationContextSearcher(reader, 7), query)) { @@ -558,8 +558,8 @@ public void testTerminateAfterWithHitsNoHitCountShortcut() throws Exception { context.trackTotalHitsUpTo(randomIntBetween(1, 6)); QueryPhase.executeQuery(context); assertTrue(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(7L)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo(7L)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(7)); } try (TestSearchContext context = createContext(earlyTerminationContextSearcher(reader, 7), query)) { @@ -572,8 +572,8 @@ public void testTerminateAfterWithHitsNoHitCountShortcut() throws Exception { // depending on docs distribution we may or may not be able to honor terminate_after: low scoring hits are skipped via // setMinCompetitiveScore, which bypasses terminate_after until the next leaf collector is pulled, when that happens. assertThat(context.queryResult().terminatedEarly(), either(is(true)).or(is(false))); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(7L)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo(7L)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(5)); } try (TestSearchContext context = createContext(earlyTerminationContextSearcher(reader, 7), query)) { @@ -584,9 +584,9 @@ public void testTerminateAfterWithHitsNoHitCountShortcut() throws Exception { context.trackTotalHitsUpTo(randomIntBetween(8, Integer.MAX_VALUE)); QueryPhase.executeQuery(context); assertTrue(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(7L)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo(7L)); // TODO this looks off, it should probably be GREATER_THAN_OR_EQUAL_TO - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(7)); } } @@ -599,8 +599,8 @@ public void testIndexSortingEarlyTermination() throws Exception { context.setSize(1); context.sort(new SortAndFormats(sort, new DocValueFormat[] { DocValueFormat.RAW })); QueryPhase.addCollectorsAndSearch(context); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs[0], instanceOf(FieldDoc.class)); FieldDoc fieldDoc = (FieldDoc) context.queryResult().topDocs().topDocs.scoreDocs[0]; @@ -612,7 +612,7 @@ public void testIndexSortingEarlyTermination() throws Exception { context.parsedPostFilter(new ParsedQuery(new MinDocQuery(1))); QueryPhase.addCollectorsAndSearch(context); assertNull(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(numDocs - 1L)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo(numDocs - 1L)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs[0], instanceOf(FieldDoc.class)); FieldDoc fieldDoc = (FieldDoc) context.queryResult().topDocs().topDocs.scoreDocs[0]; @@ -623,7 +623,7 @@ public void testIndexSortingEarlyTermination() throws Exception { context.sort(new SortAndFormats(sort, new DocValueFormat[] { DocValueFormat.RAW })); QueryPhase.executeQuery(context); assertNull(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs[0], instanceOf(FieldDoc.class)); FieldDoc fieldDoc = (FieldDoc) context.queryResult().topDocs().topDocs.scoreDocs[0]; @@ -686,18 +686,18 @@ public void testIndexSortScrollOptimization() throws Exception { context.sort(searchSortAndFormat); QueryPhase.addCollectorsAndSearch(context); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); assertNull(context.queryResult().terminatedEarly()); assertThat(context.terminateAfter(), equalTo(0)); - assertThat(context.queryResult().getTotalHits().value, equalTo((long) numDocs)); + assertThat(context.queryResult().getTotalHits().value(), equalTo((long) numDocs)); int sizeMinus1 = context.queryResult().topDocs().topDocs.scoreDocs.length - 1; FieldDoc lastDoc = (FieldDoc) context.queryResult().topDocs().topDocs.scoreDocs[sizeMinus1]; context.setSearcher(earlyTerminationContextSearcher(reader, 10)); QueryPhase.addCollectorsAndSearch(context); assertNull(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); assertThat(context.terminateAfter(), equalTo(0)); - assertThat(context.queryResult().getTotalHits().value, equalTo((long) numDocs)); + assertThat(context.queryResult().getTotalHits().value(), equalTo((long) numDocs)); FieldDoc firstDoc = (FieldDoc) context.queryResult().topDocs().topDocs.scoreDocs[0]; for (int i = 0; i < searchSortAndFormat.sort.getSort().length; i++) { @SuppressWarnings("unchecked") @@ -746,8 +746,8 @@ public void testDisableTopScoreCollection() throws Exception { ); assertEquals(collectorManager.newCollector().scoreMode(), org.apache.lucene.search.ScoreMode.COMPLETE); QueryPhase.executeQuery(context); - assertEquals(5, context.queryResult().topDocs().topDocs.totalHits.value); - assertEquals(context.queryResult().topDocs().topDocs.totalHits.relation, TotalHits.Relation.EQUAL_TO); + assertEquals(5, context.queryResult().topDocs().topDocs.totalHits.value()); + assertEquals(context.queryResult().topDocs().topDocs.totalHits.relation(), TotalHits.Relation.EQUAL_TO); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(3)); } try (TestSearchContext context = createContext(newContextSearcher(reader), q)) { @@ -764,9 +764,9 @@ public void testDisableTopScoreCollection() throws Exception { ); assertEquals(collectorManager.newCollector().scoreMode(), org.apache.lucene.search.ScoreMode.TOP_DOCS); QueryPhase.executeQuery(context); - assertEquals(5, context.queryResult().topDocs().topDocs.totalHits.value); + assertEquals(5, context.queryResult().topDocs().topDocs.totalHits.value()); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(3)); - assertEquals(context.queryResult().topDocs().topDocs.totalHits.relation, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO); + assertEquals(context.queryResult().topDocs().topDocs.totalHits.relation(), TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO); } } @@ -881,8 +881,8 @@ public void testNumericSortOptimization() throws Exception { QueryPhase.addCollectorsAndSearch(searchContext); assertTrue(searchContext.sort().sort.getSort()[0].getOptimizeSortWithPoints()); assertThat(searchContext.queryResult().topDocs().topDocs.scoreDocs, arrayWithSize(0)); - assertThat(searchContext.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); - assertThat(searchContext.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(searchContext.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); + assertThat(searchContext.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); } // 7. Test that sort optimization doesn't break a case where from = 0 and size= 0 @@ -950,8 +950,8 @@ public void testMaxScoreQueryVisitor() { // assert score docs are in order and their number is as expected private static void assertSortResults(TopDocs topDocs, long totalNumDocs, boolean isDoubleSort) { - assertEquals(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO, topDocs.totalHits.relation); - assertThat(topDocs.totalHits.value, lessThan(totalNumDocs)); // we collected less docs than total number + assertEquals(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO, topDocs.totalHits.relation()); + assertThat(topDocs.totalHits.value(), lessThan(totalNumDocs)); // we collected less docs than total number long cur1, cur2; long prev1 = Long.MIN_VALUE; long prev2 = Long.MIN_VALUE; @@ -990,7 +990,7 @@ public void testMinScore() throws Exception { context.trackTotalHitsUpTo(5); QueryPhase.addCollectorsAndSearch(context); - assertEquals(10, context.queryResult().topDocs().topDocs.totalHits.value); + assertEquals(10, context.queryResult().topDocs().topDocs.totalHits.value()); } } @@ -1136,7 +1136,7 @@ private static ContextIndexSearcher earlyTerminationContextSearcher(IndexReader ) { @Override - public void search(List leaves, Weight weight, Collector collector) throws IOException { + public void search(LeafReaderContextPartition[] partitions, Weight weight, Collector collector) throws IOException { final Collector in = new FilterCollector(collector) { @Override public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException { @@ -1153,7 +1153,7 @@ public void collect(int doc) throws IOException { }; } }; - super.search(leaves, weight, in); + super.search(partitions, weight, in); } }; } diff --git a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTimeoutTests.java b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTimeoutTests.java index 3bf9514cad547..b417f7adbc8b7 100644 --- a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTimeoutTests.java +++ b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTimeoutTests.java @@ -32,6 +32,7 @@ import org.apache.lucene.search.Scorable; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; @@ -132,7 +133,7 @@ private void scorerTimeoutTest(int size, CheckedConsumer context = mapping.parseContext(document); diff --git a/server/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java b/server/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java index 320e3fce2e832..7791073ef36fa 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java @@ -211,8 +211,8 @@ public void testFrequencyThreshold() throws Exception { ) ); assertThat(candidateSet.candidates.length, equalTo(1)); - assertThat(candidateSet.candidates[0].termStats.docFreq, equalTo(numDocs - 1)); - assertThat(candidateSet.candidates[0].termStats.totalTermFreq, equalTo((long) numDocs - 1)); + assertThat(candidateSet.candidates[0].termStats.docFreq(), equalTo(numDocs - 1)); + assertThat(candidateSet.candidates[0].termStats.totalTermFreq(), equalTo((long) numDocs - 1)); // test that it doesn't overflow assertThat(generator.thresholdTermFrequency(Integer.MAX_VALUE), equalTo(Integer.MAX_VALUE)); @@ -227,8 +227,8 @@ public void testFrequencyThreshold() throws Exception { ) ); assertThat(candidateSet.candidates.length, equalTo(1)); - assertThat(candidateSet.candidates[0].termStats.docFreq, equalTo(numDocs - 1)); - assertThat(candidateSet.candidates[0].termStats.totalTermFreq, equalTo((long) numDocs - 1)); + assertThat(candidateSet.candidates[0].termStats.docFreq(), equalTo(numDocs - 1)); + assertThat(candidateSet.candidates[0].termStats.totalTermFreq(), equalTo((long) numDocs - 1)); // test that it doesn't overflow assertThat(generator.thresholdTermFrequency(Integer.MAX_VALUE), equalTo(Integer.MAX_VALUE)); diff --git a/server/src/test/java/org/elasticsearch/search/vectors/AbstractDenseVectorQueryTestCase.java b/server/src/test/java/org/elasticsearch/search/vectors/AbstractDenseVectorQueryTestCase.java index dc9d026af0135..72ae45fd26143 100644 --- a/server/src/test/java/org/elasticsearch/search/vectors/AbstractDenseVectorQueryTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/vectors/AbstractDenseVectorQueryTestCase.java @@ -226,7 +226,7 @@ public void testRandom() throws IOException { int n = random().nextInt(100) + 1; TopDocs results = searcher.search(query, n); assert reader.hasDeletions() == false; - assertTrue(results.totalHits.value >= results.scoreDocs.length); + assertTrue(results.totalHits.value() >= results.scoreDocs.length); // verify the results are in descending score order float last = Float.MAX_VALUE; for (ScoreDoc scoreDoc : results.scoreDocs) { diff --git a/server/src/test/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilderTests.java index 18d5c8c85fbec..bef0bbfd27ff6 100644 --- a/server/src/test/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilderTests.java @@ -282,8 +282,8 @@ public void testScoreDocQuery() throws IOException { final Weight w = query.createWeight(searcher, ScoreMode.TOP_SCORES, 1.0f); TopDocs topDocs = searcher.search(query, 100); - assertEquals(scoreDocs.length, topDocs.totalHits.value); - assertEquals(TotalHits.Relation.EQUAL_TO, topDocs.totalHits.relation); + assertEquals(scoreDocs.length, topDocs.totalHits.value()); + assertEquals(TotalHits.Relation.EQUAL_TO, topDocs.totalHits.relation()); Arrays.sort(topDocs.scoreDocs, Comparator.comparingInt(scoreDoc -> scoreDoc.doc)); assertEquals(scoreDocs.length, topDocs.scoreDocs.length); diff --git a/server/src/test/java/org/elasticsearch/search/vectors/VectorSimilarityQueryTests.java b/server/src/test/java/org/elasticsearch/search/vectors/VectorSimilarityQueryTests.java index 8d9fa847a988c..f2ead93ebb6e1 100644 --- a/server/src/test/java/org/elasticsearch/search/vectors/VectorSimilarityQueryTests.java +++ b/server/src/test/java/org/elasticsearch/search/vectors/VectorSimilarityQueryTests.java @@ -62,14 +62,14 @@ public void testSimpleEuclidean() throws Exception { new VectorSimilarityQuery(new KnnFloatVectorQuery("float_vector", new float[] { 1, 1, 1 }, 5), 3f, 0.25f), 5 ); - assertThat(docs.totalHits.value, equalTo(5L)); + assertThat(docs.totalHits.value(), equalTo(5L)); // Should match only 4 docs = searcher.search( new VectorSimilarityQuery(new KnnFloatVectorQuery("float_vector", new float[] { 1, 1, 1 }, 5), 1f, 0.5f), 5 ); - assertThat(docs.totalHits.value, equalTo(4L)); + assertThat(docs.totalHits.value(), equalTo(4L)); } } } @@ -138,14 +138,14 @@ public void testSimpleCosine() throws IOException { new VectorSimilarityQuery(new KnnFloatVectorQuery("float_vector", new float[] { 1, 1, 1 }, 5), .8f, .9f), 5 ); - assertThat(docs.totalHits.value, equalTo(5L)); + assertThat(docs.totalHits.value(), equalTo(5L)); // Should match only 4 docs = searcher.search( new VectorSimilarityQuery(new KnnFloatVectorQuery("float_vector", new float[] { 1, 1, 1 }, 5), .9f, 0.95f), 5 ); - assertThat(docs.totalHits.value, equalTo(4L)); + assertThat(docs.totalHits.value(), equalTo(4L)); } } } diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java index f8c3edcbb9d42..c46d98fe1cd8b 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java @@ -372,7 +372,7 @@ public void testSuccessfulSnapshotAndRestore() { final AtomicBoolean documentCountVerified = new AtomicBoolean(); continueOrDie(searchResponseListener, r -> { - assertEquals(documents, Objects.requireNonNull(r.getHits().getTotalHits()).value); + assertEquals(documents, Objects.requireNonNull(r.getHits().getTotalHits()).value()); documentCountVerified.set(true); }); @@ -816,7 +816,10 @@ public void testConcurrentSnapshotRestoreAndDeleteOther() { var response = safeResult(searchResponseListener); try { - assertEquals(documentsFirstSnapshot + documentsSecondSnapshot, Objects.requireNonNull(response.getHits().getTotalHits()).value); + assertEquals( + documentsFirstSnapshot + documentsSecondSnapshot, + Objects.requireNonNull(response.getHits().getTotalHits()).value() + ); } finally { response.decRef(); } @@ -1177,7 +1180,7 @@ public void testSuccessfulSnapshotWithConcurrentDynamicMappingUpdates() { final AtomicBoolean documentCountVerified = new AtomicBoolean(); continueOrDie(searchResponseStepListener, r -> { - final long hitCount = r.getHits().getTotalHits().value; + final long hitCount = r.getHits().getTotalHits().value(); assertThat( "Documents were restored but the restored index mapping was older than some documents and misses some of their fields", (int) hitCount, diff --git a/test/external-modules/latency-simulating-directory/src/internalClusterTest/java/org/elasticsearch/test/simulatedlatencyrepo/LatencySimulatingBlobStoreRepositoryTests.java b/test/external-modules/latency-simulating-directory/src/internalClusterTest/java/org/elasticsearch/test/simulatedlatencyrepo/LatencySimulatingBlobStoreRepositoryTests.java index 9e5b9dd0be547..7f2cb85919d10 100644 --- a/test/external-modules/latency-simulating-directory/src/internalClusterTest/java/org/elasticsearch/test/simulatedlatencyrepo/LatencySimulatingBlobStoreRepositoryTests.java +++ b/test/external-modules/latency-simulating-directory/src/internalClusterTest/java/org/elasticsearch/test/simulatedlatencyrepo/LatencySimulatingBlobStoreRepositoryTests.java @@ -140,7 +140,7 @@ public void testRetrieveSnapshots() throws Exception { logger.info("--> run a search"); assertResponse(client.prepareSearch("test-idx").setQuery(QueryBuilders.termQuery("text", "sometext")), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); assertThat(COUNTS.intValue(), greaterThan(0)); }); } diff --git a/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java index 0b5803e9887d6..4713adf6cf01d 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java @@ -178,7 +178,7 @@ protected static void assertVisibleCount(Engine engine, int numDocs, boolean ref engine.refresh("test"); } try (Engine.Searcher searcher = engine.acquireSearcher("test")) { - Integer totalHits = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager()); + Integer totalHits = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager(searcher.getSlices())); assertThat(totalHits, equalTo(numDocs)); } } @@ -970,7 +970,7 @@ protected static void assertVisibleCount(InternalEngine engine, int numDocs, boo engine.refresh("test"); } try (Engine.Searcher searcher = engine.acquireSearcher("test")) { - Integer totalHits = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager()); + Integer totalHits = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager(searcher.getSlices())); assertThat(totalHits, equalTo(numDocs)); } } @@ -1168,7 +1168,10 @@ public static void assertOpsOnReplica( assertVisibleCount(replicaEngine, lastFieldValue == null ? 0 : 1); if (lastFieldValue != null) { try (Engine.Searcher searcher = replicaEngine.acquireSearcher("test")) { - Integer totalHits = searcher.search(new TermQuery(new Term("value", lastFieldValue)), new TotalHitCountCollectorManager()); + Integer totalHits = searcher.search( + new TermQuery(new Term("value", lastFieldValue)), + new TotalHitCountCollectorManager(searcher.getSlices()) + ); assertThat(totalHits, equalTo(1)); } } diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/AbstractScriptFieldTypeTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/AbstractScriptFieldTypeTestCase.java index 98f18829966c7..47a227cebc956 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/AbstractScriptFieldTypeTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/AbstractScriptFieldTypeTestCase.java @@ -502,7 +502,7 @@ private void assertQueryOnlyOnText(String queryName, ThrowingRunnable buildQuery } protected final String readSource(IndexReader reader, int docId) throws IOException { - return reader.document(docId).getBinaryValue("_source").utf8ToString(); + return reader.storedFields().document(docId).getBinaryValue("_source").utf8ToString(); } protected final void checkExpensiveQuery(BiConsumer queryBuilder) { diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java index b6aa3f97241e1..1c4cfa4ec7ff9 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java @@ -8,6 +8,7 @@ */ package org.elasticsearch.index.mapper; +import org.apache.lucene.index.DocValuesSkipIndexType; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfos; @@ -139,6 +140,7 @@ public FieldInfo getFieldInfoWithName(String name) { randomBoolean(), IndexOptions.NONE, DocValuesType.NONE, + DocValuesSkipIndexType.NONE, -1, new HashMap<>(), 1, diff --git a/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java b/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java index 2ddd153b8a936..c76967e5d00ac 100644 --- a/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java @@ -198,7 +198,9 @@ public abstract class AnalysisFactoryTestCase extends ESTestCase { entry("daitchmokotoffsoundex", Void.class), entry("persianstem", Void.class), // not exposed - entry("word2vecsynonym", Void.class) + entry("word2vecsynonym", Void.class), + // not exposed + entry("romaniannormalization", Void.class) ); static final Map> KNOWN_CHARFILTERS = Map.of( diff --git a/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java b/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java index cc4aac686a02d..df1ea6b756405 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java @@ -100,7 +100,7 @@ public static TotalHits getTotalHits(SearchRequestBuilder request) { } public static long getTotalHitsValue(SearchRequestBuilder request) { - return getTotalHits(request).value; + return getTotalHits(request).value(); } public static SearchResponse responseAsSearchResponse(Response searchResponse) throws IOException { diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java index d35d5282238ee..5f64d123c1bed 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java @@ -33,6 +33,7 @@ import org.apache.lucene.index.TermsEnum; import org.apache.lucene.sandbox.document.HalfFloatPoint; import org.apache.lucene.search.Collector; +import org.apache.lucene.search.CollectorManager; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; @@ -158,6 +159,7 @@ import java.net.InetAddress; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; @@ -831,11 +833,8 @@ protected void debugTestCase( QueryCachingPolicy queryCachingPolicy, MappedFieldType... fieldTypes ) throws IOException { - // Don't use searchAndReduce because we only want a single aggregator. - IndexSearcher searcher = newIndexSearcher( - reader, - aggregationBuilder.supportsParallelCollection(field -> getCardinality(reader, field)) - ); + // Don't use searchAndReduce because we only want a single aggregator, disable parallel collection too. + IndexSearcher searcher = newIndexSearcher(reader, false); if (queryCachingPolicy != null) { searcher.setQueryCachingPolicy(queryCachingPolicy); } @@ -854,7 +853,21 @@ protected void debugTestCase( try { Aggregator aggregator = createAggregator(builder, context); aggregator.preCollection(); - searcher.search(context.query(), aggregator.asCollector()); + searcher.search(context.query(), new CollectorManager() { + boolean called = false; + + @Override + public Collector newCollector() { + assert called == false : "newCollector called multiple times"; + called = true; + return aggregator.asCollector(); + } + + @Override + public Void reduce(Collection collectors) { + return null; + } + }); InternalAggregation r = aggregator.buildTopLevel(); r = doReduce( List.of(r), @@ -959,11 +972,11 @@ protected DirectoryReader wrapDirectoryReader(DirectoryReader reader) throws IOE } private static class ShardSearcher extends IndexSearcher { - private final List ctx; + private final LeafReaderContextPartition[] ctx; ShardSearcher(LeafReaderContext ctx, IndexReaderContext parent) { super(parent); - this.ctx = Collections.singletonList(ctx); + this.ctx = new LeafReaderContextPartition[] { IndexSearcher.LeafReaderContextPartition.createForEntireSegment(ctx) }; } public void search(Weight weight, Collector collector) throws IOException { @@ -972,7 +985,7 @@ public void search(Weight weight, Collector collector) throws IOException { @Override public String toString() { - return "ShardSearcher(" + ctx.get(0) + ")"; + return "ShardSearcher(" + ctx[0] + ")"; } } diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java index b1b75c1790287..29112b4bd8f5f 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java @@ -232,7 +232,7 @@ public void setupSuiteScopeCluster() throws Exception { .setSize(5000), response -> { assertNoFailures(response); - long totalHits = response.getHits().getTotalHits().value; + long totalHits = response.getHits().getTotalHits().value(); XContentBuilder builder = XContentFactory.jsonBuilder(); ChunkedToXContent.wrapAsToXContent(response).toXContent(builder, ToXContent.EMPTY_PARAMS); logger.info("Full high_card_idx Response Content:\n{ {} }", Strings.toString(builder)); diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/CentroidAggregationTestBase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/CentroidAggregationTestBase.java index f191012fb4ef8..f87a87c5ddbc8 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/CentroidAggregationTestBase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/CentroidAggregationTestBase.java @@ -40,7 +40,7 @@ public void testEmptyAggregation() { .addAggregation(centroidAgg(aggName()).field(SINGLE_VALUED_FIELD_NAME)), response -> { CentroidAggregation geoCentroid = response.getAggregations().get(aggName()); - assertThat(response.getHits().getTotalHits().value, equalTo(0L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(0L)); assertThat(geoCentroid, notNullValue()); assertThat(geoCentroid.getName(), equalTo(aggName())); assertThat(geoCentroid.centroid(), equalTo(null)); diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/SpatialBoundsAggregationTestBase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/SpatialBoundsAggregationTestBase.java index cb6a58ed65a02..a0fdb0bfabf98 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/SpatialBoundsAggregationTestBase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/SpatialBoundsAggregationTestBase.java @@ -155,7 +155,7 @@ public void testEmptyAggregation() throws Exception { assertNoFailuresAndResponse( client().prepareSearch(EMPTY_IDX_NAME).setQuery(matchAllQuery()).addAggregation(boundsAgg(aggName(), SINGLE_VALUED_FIELD_NAME)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(0L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(0L)); SpatialBounds geoBounds = response.getAggregations().get(aggName()); assertThat(geoBounds, notNullValue()); assertThat(geoBounds.getName(), equalTo(aggName())); diff --git a/test/framework/src/main/java/org/elasticsearch/search/geo/BasePointShapeQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/geo/BasePointShapeQueryTestCase.java index 71956b431d9b7..fd41213dcd81d 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/geo/BasePointShapeQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/geo/BasePointShapeQueryTestCase.java @@ -104,7 +104,7 @@ public void testIndexPointsFilterRectangle() throws Exception { client().prepareSearch(defaultIndexName) .setQuery(queryBuilder().shapeQuery(defaultFieldName, geometry).relation(ShapeRelation.INTERSECTS)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); } @@ -113,7 +113,7 @@ public void testIndexPointsFilterRectangle() throws Exception { assertNoFailuresAndResponse( client().prepareSearch(defaultIndexName).setQuery(queryBuilder().shapeQuery(defaultFieldName, geometry)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); } @@ -170,7 +170,7 @@ public void testIndexPointsPolygon() throws Exception { .setQuery(queryBuilder().shapeQuery(defaultFieldName, polygon).relation(ShapeRelation.INTERSECTS)), response -> { SearchHits searchHits = response.getHits(); - assertThat(searchHits.getTotalHits().value, equalTo(1L)); + assertThat(searchHits.getTotalHits().value(), equalTo(1L)); assertThat(searchHits.getAt(0).getId(), equalTo("1")); } ); @@ -209,7 +209,7 @@ public void testIndexPointsMultiPolygon() throws Exception { client().prepareSearch(defaultIndexName) .setQuery(queryBuilder().shapeQuery(defaultFieldName, multiPolygon).relation(ShapeRelation.INTERSECTS)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits().length, equalTo(2)); assertThat(response.getHits().getAt(0).getId(), not(equalTo("2"))); assertThat(response.getHits().getAt(1).getId(), not(equalTo("2"))); @@ -219,7 +219,7 @@ public void testIndexPointsMultiPolygon() throws Exception { client().prepareSearch(defaultIndexName) .setQuery(queryBuilder().shapeQuery(defaultFieldName, multiPolygon).relation(ShapeRelation.WITHIN)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits().length, equalTo(2)); assertThat(response.getHits().getAt(0).getId(), not(equalTo("2"))); assertThat(response.getHits().getAt(1).getId(), not(equalTo("2"))); @@ -229,7 +229,7 @@ public void testIndexPointsMultiPolygon() throws Exception { client().prepareSearch(defaultIndexName) .setQuery(queryBuilder().shapeQuery(defaultFieldName, multiPolygon).relation(ShapeRelation.DISJOINT)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getId(), equalTo("2")); } @@ -238,7 +238,7 @@ public void testIndexPointsMultiPolygon() throws Exception { client().prepareSearch(defaultIndexName) .setQuery(queryBuilder().shapeQuery(defaultFieldName, multiPolygon).relation(ShapeRelation.CONTAINS)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(0L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(0L)); assertThat(response.getHits().getHits().length, equalTo(0)); } ); @@ -264,7 +264,7 @@ public void testIndexPointsRectangle() throws Exception { client().prepareSearch(defaultIndexName) .setQuery(queryBuilder().shapeQuery(defaultFieldName, rectangle).relation(ShapeRelation.INTERSECTS)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getId(), equalTo("2")); } @@ -319,7 +319,7 @@ public void testIndexPointsIndexedRectangle() throws Exception { .indexedShapePath(indexedShapePath) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getId(), equalTo("point2")); } diff --git a/test/framework/src/main/java/org/elasticsearch/search/geo/BaseShapeQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/geo/BaseShapeQueryTestCase.java index 91c7a25682ae0..6dca91170c7a5 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/geo/BaseShapeQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/geo/BaseShapeQueryTestCase.java @@ -206,7 +206,7 @@ public void testRandomGeoCollectionQuery() throws Exception { QueryBuilder intersects = queryBuilder().intersectionQuery(defaultFieldName, queryCollection); assertNoFailuresAndResponse(client().prepareSearch(defaultIndexName).setQuery(intersects), response -> { - assertTrue("query: " + intersects + " doc: " + Strings.toString(docSource), response.getHits().getTotalHits().value > 0); + assertTrue("query: " + intersects + " doc: " + Strings.toString(docSource), response.getHits().getTotalHits().value() > 0); }); } @@ -352,7 +352,7 @@ public void testEdgeCases() throws Exception { assertNoFailuresAndResponse( client().prepareSearch(defaultIndexName).setQuery(queryBuilder().intersectionQuery(defaultFieldName, query)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getId(), equalTo("blakely")); } @@ -457,7 +457,7 @@ public void testIndexedShapeReference() throws Exception { assertNoFailuresAndResponse( client().prepareSearch(defaultIndexName).setQuery(queryBuilder().intersectionQuery(defaultFieldName, "Big_Rectangle")), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); } @@ -466,7 +466,7 @@ public void testIndexedShapeReference() throws Exception { assertNoFailuresAndResponse( client().prepareSearch(defaultIndexName).setQuery(queryBuilder().shapeQuery(defaultFieldName, "Big_Rectangle")), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); } diff --git a/test/framework/src/main/java/org/elasticsearch/search/geo/DatelinePointShapeQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/geo/DatelinePointShapeQueryTestCase.java index 4e7378d3a9606..3cd52124d8556 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/geo/DatelinePointShapeQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/geo/DatelinePointShapeQueryTestCase.java @@ -68,7 +68,7 @@ public void testRectangleSpanningDateline(BasePointShapeQueryTestCase { SearchHits searchHits = response.getHits(); - assertEquals(2, searchHits.getTotalHits().value); + assertEquals(2, searchHits.getTotalHits().value()); assertNotEquals("1", searchHits.getAt(0).getId()); assertNotEquals("1", searchHits.getAt(1).getId()); }); @@ -112,7 +112,7 @@ public void testPolygonSpanningDateline(BasePointShapeQueryTestCase { SearchHits searchHits = response.getHits(); - assertEquals(2, searchHits.getTotalHits().value); + assertEquals(2, searchHits.getTotalHits().value()); assertNotEquals("1", searchHits.getAt(0).getId()); assertNotEquals("4", searchHits.getAt(0).getId()); assertNotEquals("1", searchHits.getAt(1).getId()); @@ -155,7 +155,7 @@ public void testMultiPolygonSpanningDateline(BasePointShapeQueryTestCase { SearchHits searchHits = response.getHits(); - assertEquals(2, searchHits.getTotalHits().value); + assertEquals(2, searchHits.getTotalHits().value()); assertNotEquals("3", searchHits.getAt(0).getId()); assertNotEquals("3", searchHits.getAt(1).getId()); }); diff --git a/test/framework/src/main/java/org/elasticsearch/search/geo/GeoBoundingBoxQueryIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/geo/GeoBoundingBoxQueryIntegTestCase.java index c84d8612b1d4b..97e21f64e2648 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/geo/GeoBoundingBoxQueryIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/geo/GeoBoundingBoxQueryIntegTestCase.java @@ -102,7 +102,7 @@ public void testSimpleBoundingBoxTest() throws Exception { client().prepareSearch() // from NY .setQuery(geoBoundingBoxQuery("location").setCorners(40.73, -74.1, 40.717, -73.99)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits().length, equalTo(2)); for (SearchHit hit : response.getHits()) { assertThat(hit.getId(), anyOf(equalTo("1"), equalTo("3"), equalTo("5"))); @@ -114,7 +114,7 @@ public void testSimpleBoundingBoxTest() throws Exception { client().prepareSearch() // from NY .setQuery(geoBoundingBoxQuery("location").setCorners(40.73, -74.1, 40.717, -73.99)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits().length, equalTo(2)); for (SearchHit hit : response.getHits()) { assertThat(hit.getId(), anyOf(equalTo("1"), equalTo("3"), equalTo("5"))); @@ -126,7 +126,7 @@ public void testSimpleBoundingBoxTest() throws Exception { client().prepareSearch() // top == bottom && left == right .setQuery(geoBoundingBoxQuery("location").setCorners(40.7143528, -74.0059731, 40.7143528, -74.0059731)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); for (SearchHit hit : response.getHits()) { assertThat(hit.getId(), equalTo("1")); @@ -138,7 +138,7 @@ public void testSimpleBoundingBoxTest() throws Exception { client().prepareSearch() // top == bottom .setQuery(geoBoundingBoxQuery("location").setCorners(40.759011, -74.00009, 40.759011, -73.0059731)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); for (SearchHit hit : response.getHits()) { assertThat(hit.getId(), equalTo("2")); @@ -150,7 +150,7 @@ public void testSimpleBoundingBoxTest() throws Exception { client().prepareSearch() // left == right .setQuery(geoBoundingBoxQuery("location").setCorners(41.8, -73.9844722, 40.7, -73.9844722)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); for (SearchHit hit : response.getHits()) { assertThat(hit.getId(), equalTo("2")); @@ -163,7 +163,7 @@ public void testSimpleBoundingBoxTest() throws Exception { client().prepareSearch() // from NY .setQuery(geoDistanceQuery("location").point(40.5, -73.9).distance(25, DistanceUnit.KILOMETERS)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits().length, equalTo(2)); for (SearchHit hit : response.getHits()) { assertThat(hit.getId(), anyOf(equalTo("7"), equalTo("4"))); diff --git a/test/framework/src/main/java/org/elasticsearch/search/geo/GeoShapeQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/geo/GeoShapeQueryTestCase.java index 4e47b0c51177c..bb57cb132daa2 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/geo/GeoShapeQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/geo/GeoShapeQueryTestCase.java @@ -143,7 +143,7 @@ public void testEnvelopeSpanningDateline() throws Exception { } ); assertResponse(client().prepareSearch(defaultIndexName).setQuery(querySupplier.get()), response -> { - assertEquals(2, response.getHits().getTotalHits().value); + assertEquals(2, response.getHits().getTotalHits().value()); assertNotEquals("1", response.getHits().getAt(0).getId()); assertNotEquals("1", response.getHits().getAt(1).getId()); }); diff --git a/test/framework/src/main/java/org/elasticsearch/test/CorruptionUtils.java b/test/framework/src/main/java/org/elasticsearch/test/CorruptionUtils.java index f1ff3492426aa..a93f3b7eaf109 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/CorruptionUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/test/CorruptionUtils.java @@ -73,7 +73,7 @@ public static void corruptFile(Random random, Path... files) throws IOException long checksumAfterCorruption; long actualChecksumAfterCorruption; - try (ChecksumIndexInput input = dir.openChecksumInput(fileToCorrupt.getFileName().toString(), IOContext.DEFAULT)) { + try (ChecksumIndexInput input = dir.openChecksumInput(fileToCorrupt.getFileName().toString())) { assertThat(input.getFilePointer(), is(0L)); input.seek(input.length() - CodecUtil.footerLength()); checksumAfterCorruption = input.getChecksum(); diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index 87a834d6424b7..d7c5c598ce978 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -908,9 +908,11 @@ public void waitNoPendingTasksOnAll() throws Exception { /** Ensures the result counts are as expected, and logs the results if different */ public void assertResultsAndLogOnFailure(long expectedResults, SearchResponse searchResponse) { final TotalHits totalHits = searchResponse.getHits().getTotalHits(); - if (totalHits.value != expectedResults || totalHits.relation != TotalHits.Relation.EQUAL_TO) { + if (totalHits.value() != expectedResults || totalHits.relation() != TotalHits.Relation.EQUAL_TO) { StringBuilder sb = new StringBuilder("search result contains ["); - String value = Long.toString(totalHits.value) + (totalHits.relation == TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO ? "+" : ""); + String value = Long.toString(totalHits.value()) + (totalHits.relation() == TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO + ? "+" + : ""); sb.append(value).append("] results. expected [").append(expectedResults).append("]"); String failMsg = sb.toString(); for (SearchHit hit : searchResponse.getHits().getHits()) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 31c8e5bc3d457..e1ba661eb24d4 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -33,6 +33,8 @@ import org.apache.logging.log4j.status.StatusConsoleListener; import org.apache.logging.log4j.status.StatusData; import org.apache.logging.log4j.status.StatusLogger; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.tests.util.LuceneTestCase; import org.apache.lucene.tests.util.LuceneTestCase.SuppressCodecs; import org.apache.lucene.tests.util.TestRuleMarkFailure; @@ -208,6 +210,7 @@ import java.util.stream.LongStream; import java.util.stream.Stream; +import static com.carrotsearch.randomizedtesting.RandomizedTest.randomBoolean; import static java.util.Collections.emptyMap; import static org.elasticsearch.common.util.CollectionUtils.arrayAsArrayList; import static org.hamcrest.Matchers.anyOf; @@ -2637,4 +2640,43 @@ public static void ensureAllContextsReleased(SearchService searchService) { throw new AssertionError("Failed to verify search contexts", e); } } + + /** + * Create a new searcher over the reader. This searcher might randomly use threads. + * Provides the same functionality as {@link LuceneTestCase#newSearcher(IndexReader)}, + * with the only difference that concurrency will only ever be inter-segment and never intra-segment. + */ + public static IndexSearcher newSearcher(IndexReader r) { + return newSearcher(r, true); + } + + /** + * Create a new searcher over the reader. This searcher might randomly use threads. + * Provides the same functionality as {@link LuceneTestCase#newSearcher(IndexReader, boolean)}, + * with the only difference that concurrency will only ever be inter-segment and never intra-segment. + */ + public static IndexSearcher newSearcher(IndexReader r, boolean maybeWrap) { + return newSearcher(r, maybeWrap, true); + } + + /** + * Create a new searcher over the reader. This searcher might randomly use threads. + * Provides the same functionality as {@link LuceneTestCase#newSearcher(IndexReader, boolean, boolean)}, + * with the only difference that concurrency will only ever be inter-segment and never intra-segment. + */ + public static IndexSearcher newSearcher(IndexReader r, boolean maybeWrap, boolean wrapWithAssertions) { + return newSearcher(r, maybeWrap, wrapWithAssertions, randomBoolean()); + } + + /** + * Create a new searcher over the reader. + * Provides the same functionality as {@link LuceneTestCase#newSearcher(IndexReader, boolean, boolean, boolean)}, + * with the only difference that concurrency will only ever be inter-segment and never intra-segment. + */ + public static IndexSearcher newSearcher(IndexReader r, boolean maybeWrap, boolean wrapWithAssertions, boolean useThreads) { + if (useThreads) { + return newSearcher(r, maybeWrap, wrapWithAssertions, Concurrency.INTER_SEGMENT); + } + return newSearcher(r, maybeWrap, wrapWithAssertions, Concurrency.NONE); + } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/engine/ThrowingLeafReaderWrapper.java b/test/framework/src/main/java/org/elasticsearch/test/engine/ThrowingLeafReaderWrapper.java index db30f6e91f039..42439b5d5785d 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/engine/ThrowingLeafReaderWrapper.java +++ b/test/framework/src/main/java/org/elasticsearch/test/engine/ThrowingLeafReaderWrapper.java @@ -86,13 +86,6 @@ public Terms terms(String field) throws IOException { return terms; } - @Override - public Fields getTermVectors(int docID) throws IOException { - Fields fields = super.getTermVectors(docID); - thrower.maybeThrow(Flags.TermVectors); - return fields == null ? null : new ThrowingFields(fields, thrower); - } - /** * Wraps a Fields but with additional asserts */ diff --git a/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java index 552e301650d9d..5851fc709d14a 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java +++ b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java @@ -314,7 +314,7 @@ public static void assertHitCount(ActionFuture responseFuture, l public static void assertHitCount(SearchResponse countResponse, long expectedHitCount) { final TotalHits totalHits = countResponse.getHits().getTotalHits(); - if (totalHits.relation != TotalHits.Relation.EQUAL_TO || totalHits.value != expectedHitCount) { + if (totalHits.relation() != TotalHits.Relation.EQUAL_TO || totalHits.value() != expectedHitCount) { fail("Count is " + totalHits + " but " + expectedHitCount + " was expected. " + formatShardStatus(countResponse)); } } @@ -346,7 +346,7 @@ public static void assertFourthHit(SearchResponse searchResponse, Matcher matcher) { assertThat("SearchHit number must be greater than 0", number, greaterThan(0)); - assertThat(searchResponse.getHits().getTotalHits().value, greaterThanOrEqualTo((long) number)); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThanOrEqualTo((long) number)); assertThat(searchResponse.getHits().getAt(number - 1), matcher); } @@ -409,13 +409,13 @@ public static void assertScrollResponsesAndHitCount( responses.add(scrollResponse); int retrievedDocsCount = 0; try { - assertThat(scrollResponse.getHits().getTotalHits().value, equalTo((long) expectedTotalHitCount)); + assertThat(scrollResponse.getHits().getTotalHits().value(), equalTo((long) expectedTotalHitCount)); retrievedDocsCount += scrollResponse.getHits().getHits().length; responseConsumer.accept(responses.size(), scrollResponse); while (scrollResponse.getHits().getHits().length > 0) { scrollResponse = client.prepareSearchScroll(scrollResponse.getScrollId()).setScroll(keepAlive).get(); responses.add(scrollResponse); - assertThat(scrollResponse.getHits().getTotalHits().value, equalTo((long) expectedTotalHitCount)); + assertThat(scrollResponse.getHits().getTotalHits().value(), equalTo((long) expectedTotalHitCount)); retrievedDocsCount += scrollResponse.getHits().getHits().length; responseConsumer.accept(responses.size(), scrollResponse); } @@ -704,8 +704,8 @@ public static T assertBooleanSubQuery(Query query, Class su assertThat(query, instanceOf(BooleanQuery.class)); BooleanQuery q = (BooleanQuery) query; assertThat(q.clauses(), hasSize(greaterThan(i))); - assertThat(q.clauses().get(i).getQuery(), instanceOf(subqueryType)); - return subqueryType.cast(q.clauses().get(i).getQuery()); + assertThat(q.clauses().get(i).query(), instanceOf(subqueryType)); + return subqueryType.cast(q.clauses().get(i).query()); } /** diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/TimeSeriesRateAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/TimeSeriesRateAggregatorTests.java index 885e02a8b5e6a..f517c03468bc2 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/TimeSeriesRateAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/TimeSeriesRateAggregatorTests.java @@ -69,8 +69,12 @@ public void testSimple() throws IOException { }; AggTestConfig aggTestConfig = new AggTestConfig(tsBuilder, timeStampField(), counterField("counter_field"), dimensionField("dim")); testCase(iw -> { - iw.addDocuments(docs(1000, "1", 15, 37, 60, /*reset*/ 14)); - iw.addDocuments(docs(1000, "2", 74, 150, /*reset*/ 50, 90, /*reset*/ 40)); + for (Document document : docs(1000, "1", 15, 37, 60, /*reset*/ 14)) { + iw.addDocument(document); + } + for (Document document : docs(1000, "2", 74, 150, /*reset*/ 50, 90, /*reset*/ 40)) { + iw.addDocument(document); + } }, verifier, aggTestConfig); } @@ -109,8 +113,12 @@ public void testNestedWithinDateHistogram() throws IOException { AggTestConfig aggTestConfig = new AggTestConfig(tsBuilder, timeStampField(), counterField("counter_field"), dimensionField("dim")) .withSplitLeavesIntoSeperateAggregators(false); testCase(iw -> { - iw.addDocuments(docs(2000, "1", 15, 37, 60, /*reset*/ 14)); - iw.addDocuments(docs(2000, "2", 74, 150, /*reset*/ 50, 90, /*reset*/ 40)); + for (Document document : docs(2000, "1", 15, 37, 60, /*reset*/ 14)) { + iw.addDocument(document); + } + for (Document document : docs(2000, "2", 74, 150, /*reset*/ 50, 90, /*reset*/ 40)) { + iw.addDocument(document); + } }, verifier, aggTestConfig); } diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregatorTests.java index 20da254657c1a..04f0563e433a2 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregatorTests.java @@ -170,7 +170,7 @@ public void testNoMatchingField() throws IOException { } public void testQueryFiltering() throws IOException { - testAggregation(new TermInSetQuery("text", new BytesRef("test0"), new BytesRef("test1")), iw -> { + testAggregation(new TermInSetQuery("text", List.of(new BytesRef("test0"), new BytesRef("test1"))), iw -> { for (int i = 0; i < 10; i++) { iw.addDocument(singleton(new TextField("text", "test" + i, Field.Store.NO))); } diff --git a/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/AsyncSearchIntegTestCase.java b/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/AsyncSearchIntegTestCase.java index 52ecc40c957b7..aee344777779b 100644 --- a/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/AsyncSearchIntegTestCase.java +++ b/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/AsyncSearchIntegTestCase.java @@ -314,9 +314,9 @@ private AsyncSearchResponse doNext() throws Exception { assertThat(newResponse.getSearchResponse().getShardFailures().length, equalTo(numFailures)); assertNull(newResponse.getSearchResponse().getAggregations()); assertNotNull(newResponse.getSearchResponse().getHits().getTotalHits()); - assertThat(newResponse.getSearchResponse().getHits().getTotalHits().value, equalTo(0L)); + assertThat(newResponse.getSearchResponse().getHits().getTotalHits().value(), equalTo(0L)); assertThat( - newResponse.getSearchResponse().getHits().getTotalHits().relation, + newResponse.getSearchResponse().getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO) ); } else { diff --git a/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchSingleNodeTests.java b/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchSingleNodeTests.java index 302bb68af6c61..fd4463df07a73 100644 --- a/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchSingleNodeTests.java +++ b/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchSingleNodeTests.java @@ -62,7 +62,7 @@ public void testFetchFailuresAllShards() throws Exception { assertEquals(10, searchResponse.getSuccessfulShards()); assertEquals(0, searchResponse.getFailedShards()); assertEquals(0, searchResponse.getShardFailures().length); - assertEquals(10, searchResponse.getHits().getTotalHits().value); + assertEquals(10, searchResponse.getHits().getTotalHits().value()); assertEquals(0, searchResponse.getHits().getHits().length); StringTerms terms = searchResponse.getAggregations().get("text"); assertEquals(1, terms.getBuckets().size()); @@ -106,7 +106,7 @@ public void testFetchFailuresOnlySomeShards() throws Exception { assertEquals(10, searchResponse.getTotalShards()); assertEquals(5, searchResponse.getSuccessfulShards()); assertEquals(5, searchResponse.getFailedShards()); - assertEquals(10, searchResponse.getHits().getTotalHits().value); + assertEquals(10, searchResponse.getHits().getTotalHits().value()); assertEquals(5, searchResponse.getHits().getHits().length); StringTerms terms = searchResponse.getAggregations().get("text"); assertEquals(1, terms.getBuckets().size()); diff --git a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/common/BlobCacheBufferedIndexInput.java b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/common/BlobCacheBufferedIndexInput.java index 95b2324d03b52..16645e7523c36 100644 --- a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/common/BlobCacheBufferedIndexInput.java +++ b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/common/BlobCacheBufferedIndexInput.java @@ -390,12 +390,11 @@ public IndexInput clone() { /** Returns default buffer sizes for the given {@link IOContext} */ public static int bufferSize(IOContext context) { - switch (context.context) { + switch (context.context()) { case MERGE: return MERGE_BUFFER_SIZE; case DEFAULT: case FLUSH: - case READ: default: return BUFFER_SIZE; } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRestoreSourceService.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRestoreSourceService.java index 6b390ab5747a8..164e6ed5406ae 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRestoreSourceService.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRestoreSourceService.java @@ -245,7 +245,7 @@ private Store.MetadataSnapshot getMetadata() throws IOException { private long readFileBytes(String fileName, ByteArray reference) throws IOException { try (Releasable ignored = keyedLock.acquire(fileName)) { - var context = fileName.startsWith(IndexFileNames.SEGMENTS) ? IOContext.READONCE : IOContext.READ; + var context = fileName.startsWith(IndexFileNames.SEGMENTS) ? IOContext.READONCE : IOContext.DEFAULT; final IndexInput indexInput = cachedInputs.computeIfAbsent(fileName, f -> { try { return commitRef.getIndexCommit().getDirectory().openInput(fileName, context); diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java index 1b7875e4a36b4..618489abd687e 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java @@ -703,7 +703,7 @@ protected void atLeastDocsIndexed(Client client, String index, long numDocsRepli request.source(new SearchSourceBuilder().size(0)); assertResponse(client.search(request), response -> { assertNotNull(response.getHits().getTotalHits()); - assertThat(response.getHits().getTotalHits().value, greaterThanOrEqualTo(numDocsReplicated)); + assertThat(response.getHits().getTotalHits().value(), greaterThanOrEqualTo(numDocsReplicated)); }); }, 60, TimeUnit.SECONDS); } diff --git a/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotIT.java b/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotIT.java index 0fea3c0d3b74f..1bf52b663b30f 100644 --- a/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotIT.java +++ b/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotIT.java @@ -274,7 +274,7 @@ private void assertHits(String index, int numDocsExpected, boolean sourceHadDele }; assertResponse(prepareSearch(index).addSort(SeqNoFieldMapper.NAME, SortOrder.ASC).setSize(numDocsExpected), searchResponse -> { assertConsumer.accept(searchResponse, sourceHadDeletions); - assertEquals(numDocsExpected, searchResponse.getHits().getTotalHits().value); + assertEquals(numDocsExpected, searchResponse.getHits().getTotalHits().value()); }); SearchResponse searchResponse = prepareSearch(index).addSort(SeqNoFieldMapper.NAME, SortOrder.ASC) .setScroll(TimeValue.timeValueMinutes(1)) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/index/engine/frozen/RewriteCachingDirectoryReader.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/index/engine/frozen/RewriteCachingDirectoryReader.java index e66d41d089437..12864dd66a857 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/index/engine/frozen/RewriteCachingDirectoryReader.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/index/engine/frozen/RewriteCachingDirectoryReader.java @@ -9,9 +9,9 @@ import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.ByteVectorValues; import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.DocValuesSkipper; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfos; -import org.apache.lucene.index.Fields; import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.IndexCommit; import org.apache.lucene.index.IndexWriter; @@ -23,7 +23,6 @@ import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.index.SortedSetDocValues; -import org.apache.lucene.index.StoredFieldVisitor; import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.TermVectors; import org.apache.lucene.index.Terms; @@ -214,6 +213,11 @@ public NumericDocValues getNormValues(String field) { throw new UnsupportedOperationException(); } + @Override + public DocValuesSkipper getDocValuesSkipper(String field) throws IOException { + throw new UnsupportedOperationException(); + } + @Override public FloatVectorValues getFloatVectorValues(String field) throws IOException { throw new UnsupportedOperationException(); @@ -257,11 +261,6 @@ public LeafMetaData getMetaData() { throw new UnsupportedOperationException(); } - @Override - public Fields getTermVectors(int docId) { - throw new UnsupportedOperationException(); - } - @Override public TermVectors termVectors() throws IOException { throw new UnsupportedOperationException(); @@ -282,11 +281,6 @@ public int maxDoc() { return maxDoc; } - @Override - public void document(int docID, StoredFieldVisitor visitor) { - throw new UnsupportedOperationException(); - } - @Override protected void doClose() {} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshot.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshot.java index 093ec031d0b30..421a306babf29 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshot.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshot.java @@ -254,6 +254,7 @@ private SegmentCommitInfo syncSegment( false, IndexOptions.NONE, DocValuesType.NONE, + fieldInfo.docValuesSkipIndexType(), -1, fieldInfo.attributes(), 0, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/AbstractTransportGetResourcesAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/AbstractTransportGetResourcesAction.java index dea158b425071..d315f09ebda88 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/AbstractTransportGetResourcesAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/AbstractTransportGetResourcesAction.java @@ -105,7 +105,7 @@ protected void searchResources(AbstractGetResourcesRequest request, TaskId paren listener.delegateFailure((l, response) -> { List docs = new ArrayList<>(); Set foundResourceIds = new HashSet<>(); - long totalHitCount = response.getHits().getTotalHits().value; + long totalHitCount = response.getHits().getTotalHits().value(); for (SearchHit hit : response.getHits().getHits()) { try ( XContentParser parser = XContentHelper.createParserNotCompressed( diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/Evaluation.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/Evaluation.java index de43f744c307b..4e5f97acacf64 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/Evaluation.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/Evaluation.java @@ -145,7 +145,7 @@ default SearchSourceBuilder buildSearch(EvaluationParameters parameters, QueryBu */ default void process(SearchResponse searchResponse) { Objects.requireNonNull(searchResponse); - if (searchResponse.getHits().getTotalHits().value == 0) { + if (searchResponse.getHits().getTotalHits().value() == 0) { String requiredFieldsString = String.join(", ", getRequiredFields()); throw ExceptionsHelper.badRequestException("No documents found containing all the required fields [{}]", requiredFieldsString); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/ScrollHelper.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/ScrollHelper.java index 3154fe5999b8e..129619f6976e2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/ScrollHelper.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/ScrollHelper.java @@ -79,19 +79,19 @@ public void onResponse(SearchResponse resp) { } } - if (results.size() > resp.getHits().getTotalHits().value) { + if (results.size() > resp.getHits().getTotalHits().value()) { clearScroll.accept(lastResponse); listener.onFailure( new IllegalStateException( "scrolling returned more hits [" + results.size() + "] than expected [" - + resp.getHits().getTotalHits().value + + resp.getHits().getTotalHits().value() + "] so bailing out to prevent unbounded " + "memory consumption." ) ); - } else if (results.size() == resp.getHits().getTotalHits().value) { + } else if (results.size() == resp.getHits().getTotalHits().value()) { clearScroll.accept(resp); // Finally, return the list of the entity listener.onResponse(Collections.unmodifiableList(results)); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SuggestProfilesResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SuggestProfilesResponse.java index 0061870c73cc9..32b12c834dd9c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SuggestProfilesResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SuggestProfilesResponse.java @@ -55,8 +55,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field("took", tookInMillis); builder.startObject("total"); { - builder.field("value", totalHits.value); - builder.field("relation", totalHits.relation == TotalHits.Relation.EQUAL_TO ? "eq" : "gte"); + builder.field("value", totalHits.value()); + builder.field("relation", totalHits.relation() == TotalHits.Relation.EQUAL_TO ? "eq" : "gte"); } builder.endObject(); builder.startArray("profiles"); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReader.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReader.java index 09a49c53ee1a5..908f58c5f9147 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReader.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReader.java @@ -155,17 +155,6 @@ public FieldInfos getFieldInfos() { return fieldInfos; } - @Override - public Fields getTermVectors(int docID) throws IOException { - Fields f = super.getTermVectors(docID); - if (f == null) { - return null; - } - f = new FieldFilterFields(f); - // we need to check for emptyness, so we can return null: - return f.iterator().hasNext() ? f : null; - } - @Override public TermVectors termVectors() throws IOException { TermVectors termVectors = super.termVectors(); @@ -264,11 +253,6 @@ private static int step(CharacterRunAutomaton automaton, String key, int state) return state; } - @Override - public void document(final int docID, final StoredFieldVisitor visitor) throws IOException { - super.document(docID, new FieldSubsetStoredFieldVisitor(visitor)); - } - @Override protected StoredFieldsReader doGetSequentialStoredFieldsReader(StoredFieldsReader reader) { return new FieldSubsetStoredFieldsReader(reader); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ApplicationPermission.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ApplicationPermission.java index c85a648761ca7..5ba5c1fd1218a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ApplicationPermission.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ApplicationPermission.java @@ -187,7 +187,7 @@ private PermissionEntry(ApplicationPrivilege privilege, Set resourceName } private boolean grants(ApplicationPrivilege other, Automaton resource) { - return matchesPrivilege(other) && Operations.subsetOf(resource, this.resourceAutomaton); + return matchesPrivilege(other) && Automatons.subsetOf(resource, this.resourceAutomaton); } private boolean matchesPrivilege(ApplicationPrivilege other) { @@ -202,7 +202,7 @@ private boolean matchesPrivilege(ApplicationPrivilege other) { } return Operations.isEmpty(privilege.getAutomaton()) == false && Operations.isEmpty(other.getAutomaton()) == false - && Operations.subsetOf(other.getAutomaton(), privilege.getAutomaton()); + && Automatons.subsetOf(other.getAutomaton(), privilege.getAutomaton()); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ClusterPermission.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ClusterPermission.java index 9c41786f39eeb..4e608281a7858 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ClusterPermission.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ClusterPermission.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.security.authz.permission; import org.apache.lucene.util.automaton.Automaton; -import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authz.RestrictedIndices; @@ -215,7 +214,7 @@ public final boolean check(final String action, final TransportRequest request, @Override public final boolean implies(final PermissionCheck permissionCheck) { if (permissionCheck instanceof ActionBasedPermissionCheck) { - return Operations.subsetOf(((ActionBasedPermissionCheck) permissionCheck).automaton, this.automaton) + return Automatons.subsetOf(((ActionBasedPermissionCheck) permissionCheck).automaton, this.automaton) && doImplies((ActionBasedPermissionCheck) permissionCheck); } return false; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissions.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissions.java index f3c2d9f62e40f..235d7419d2bf0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissions.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissions.java @@ -12,7 +12,6 @@ import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CharacterRunAutomaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.common.Strings; @@ -34,8 +33,6 @@ import java.util.Set; import java.util.stream.Collectors; -import static org.apache.lucene.util.automaton.Operations.subsetOf; - /** * Stores patterns to fields which access is granted or denied to and maintains an automaton that can be used to check if permission is * allowed for a specific field. @@ -175,10 +172,14 @@ public static Automaton buildPermittedFieldsAutomaton(final String[] grantedFiel deniedFieldsAutomaton = Automatons.patterns(deniedFields); } - grantedFieldsAutomaton = MinimizationOperations.minimize(grantedFieldsAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); - deniedFieldsAutomaton = MinimizationOperations.minimize(deniedFieldsAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + grantedFieldsAutomaton = Operations.removeDeadStates( + Operations.determinize(grantedFieldsAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT) + ); + deniedFieldsAutomaton = Operations.removeDeadStates( + Operations.determinize(deniedFieldsAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT) + ); - if (subsetOf(deniedFieldsAutomaton, grantedFieldsAutomaton) == false) { + if (Automatons.subsetOf(deniedFieldsAutomaton, grantedFieldsAutomaton) == false) { throw new ElasticsearchSecurityException( "Exceptions for field permissions must be a subset of the " + "granted fields but " diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/IndicesPermission.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/IndicesPermission.java index e1b72cc43b38e..558f8e6f22ac1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/IndicesPermission.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/IndicesPermission.java @@ -288,7 +288,7 @@ public boolean checkResourcePrivileges( if (false == Operations.isEmpty(checkIndexAutomaton)) { Automaton allowedIndexPrivilegesAutomaton = null; for (var indexAndPrivilegeAutomaton : indexGroupAutomatons.entrySet()) { - if (Operations.subsetOf(checkIndexAutomaton, indexAndPrivilegeAutomaton.getValue())) { + if (Automatons.subsetOf(checkIndexAutomaton, indexAndPrivilegeAutomaton.getValue())) { if (allowedIndexPrivilegesAutomaton != null) { allowedIndexPrivilegesAutomaton = Automatons.unionAndMinimize( Arrays.asList(allowedIndexPrivilegesAutomaton, indexAndPrivilegeAutomaton.getKey()) @@ -301,7 +301,7 @@ public boolean checkResourcePrivileges( for (String privilege : checkForPrivileges) { IndexPrivilege indexPrivilege = IndexPrivilege.get(Collections.singleton(privilege)); if (allowedIndexPrivilegesAutomaton != null - && Operations.subsetOf(indexPrivilege.getAutomaton(), allowedIndexPrivilegesAutomaton)) { + && Automatons.subsetOf(indexPrivilege.getAutomaton(), allowedIndexPrivilegesAutomaton)) { if (resourcePrivilegesMapBuilder != null) { resourcePrivilegesMapBuilder.addResourcePrivilege(forIndexPattern, privilege, Boolean.TRUE); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/Privilege.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/Privilege.java index 68e3f11751aac..7434128f03129 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/Privilege.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/Privilege.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.security.authz.privilege; import org.apache.lucene.util.automaton.Automaton; -import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.common.util.Maps; import org.elasticsearch.xpack.core.security.support.Automatons; @@ -90,7 +89,7 @@ public static SortedMap sortByAccessLevel(Map subsetCount.put( name, - privileges.values().stream().filter(p2 -> p2 != priv && Operations.subsetOf(priv.automaton, p2.automaton)).count() + privileges.values().stream().filter(p2 -> p2 != priv && Automatons.subsetOf(priv.automaton, p2.automaton)).count() ) ); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Automatons.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Automatons.java index a6347d8b7ec77..201cb4b69e472 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Automatons.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Automatons.java @@ -9,9 +9,10 @@ import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CharacterRunAutomaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.RegExp; +import org.apache.lucene.util.automaton.StatePair; +import org.apache.lucene.util.automaton.Transition; import org.elasticsearch.common.cache.Cache; import org.elasticsearch.common.cache.CacheBuilder; import org.elasticsearch.common.settings.Setting; @@ -20,6 +21,7 @@ import org.elasticsearch.core.Predicates; import org.elasticsearch.core.TimeValue; +import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -223,7 +225,10 @@ private static Automaton buildAutomaton(String pattern) { ); } String regex = pattern.substring(1, pattern.length() - 1); - return new RegExp(regex).toAutomaton(); + return Operations.determinize( + new RegExp(regex, RegExp.ALL | RegExp.DEPRECATED_COMPLEMENT).toAutomaton(), + DEFAULT_DETERMINIZE_WORK_LIMIT + ); } else if (pattern.equals("*")) { return MATCH_ALL; } else { @@ -269,7 +274,7 @@ static Automaton wildcard(String text) { } i += length; } - return concatenate(automata); + return Operations.determinize(concatenate(automata), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } public static Automaton unionAndMinimize(Collection automata) { @@ -288,7 +293,7 @@ public static Automaton intersectAndMinimize(Automaton a1, Automaton a2) { } private static Automaton minimize(Automaton automaton) { - return MinimizationOperations.minimize(automaton, maxDeterminizedStates); + return Operations.determinize(automaton, maxDeterminizedStates); } public static Predicate predicate(String... patterns) { @@ -329,7 +334,8 @@ private static Predicate predicate(Automaton automaton, final String toS } else if (automaton == EMPTY) { return Predicates.never(); } - CharacterRunAutomaton runAutomaton = new CharacterRunAutomaton(automaton, maxDeterminizedStates); + automaton = Operations.determinize(automaton, maxDeterminizedStates); + CharacterRunAutomaton runAutomaton = new CharacterRunAutomaton(automaton); return new Predicate() { @Override public boolean test(String s) { @@ -368,4 +374,72 @@ static List getPatterns(Automaton automaton) { throw new IllegalArgumentException("recordPatterns is set to false"); } } + + /** + * Returns true if the language of a1 is a subset of the language of a2. + * Both automata must be determinized and must have no dead states. + * + *

      Complexity: quadratic in number of states. + * Copied of Lucene's AutomatonTestUtil + */ + public static boolean subsetOf(Automaton a1, Automaton a2) { + if (a1.isDeterministic() == false) { + throw new IllegalArgumentException("a1 must be deterministic"); + } + if (a2.isDeterministic() == false) { + throw new IllegalArgumentException("a2 must be deterministic"); + } + assert Operations.hasDeadStatesFromInitial(a1) == false; + assert Operations.hasDeadStatesFromInitial(a2) == false; + if (a1.getNumStates() == 0) { + // Empty language is alwyas a subset of any other language + return true; + } else if (a2.getNumStates() == 0) { + return Operations.isEmpty(a1); + } + + // TODO: cutover to iterators instead + Transition[][] transitions1 = a1.getSortedTransitions(); + Transition[][] transitions2 = a2.getSortedTransitions(); + ArrayDeque worklist = new ArrayDeque<>(); + HashSet visited = new HashSet<>(); + StatePair p = new StatePair(0, 0); + worklist.add(p); + visited.add(p); + while (worklist.size() > 0) { + p = worklist.removeFirst(); + if (a1.isAccept(p.s1) && a2.isAccept(p.s2) == false) { + return false; + } + Transition[] t1 = transitions1[p.s1]; + Transition[] t2 = transitions2[p.s2]; + for (int n1 = 0, b2 = 0; n1 < t1.length; n1++) { + while (b2 < t2.length && t2[b2].max < t1[n1].min) { + b2++; + } + int min1 = t1[n1].min, max1 = t1[n1].max; + + for (int n2 = b2; n2 < t2.length && t1[n1].max >= t2[n2].min; n2++) { + if (t2[n2].min > min1) { + return false; + } + if (t2[n2].max < Character.MAX_CODE_POINT) { + min1 = t2[n2].max + 1; + } else { + min1 = Character.MAX_CODE_POINT; + max1 = Character.MIN_CODE_POINT; + } + StatePair q = new StatePair(t1[n1].dest, t2[n2].dest); + if (visited.contains(q) == false) { + worklist.add(q); + visited.add(q); + } + } + if (min1 <= max1) { + return false; + } + } + } + return true; + } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/SimpleTermCountEnum.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/SimpleTermCountEnum.java index b4952373dfdd3..92568c4f31c18 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/SimpleTermCountEnum.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/SimpleTermCountEnum.java @@ -13,6 +13,7 @@ import org.apache.lucene.index.TermsEnum; import org.apache.lucene.util.AttributeSource; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.IOBooleanSupplier; import org.elasticsearch.index.mapper.MappedFieldType; import java.io.IOException; @@ -69,6 +70,11 @@ public AttributeSource attributes() { throw new UnsupportedOperationException(); } + @Override + public IOBooleanSupplier prepareSeekExact(BytesRef bytesRef) throws IOException { + throw new UnsupportedOperationException(); + } + @Override public boolean seekExact(BytesRef text) throws IOException { throw new UnsupportedOperationException(); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotShardTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotShardTests.java index e39ddc170c0a9..54390365c62af 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotShardTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotShardTests.java @@ -11,6 +11,7 @@ import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.MatchAllDocsQuery; @@ -400,14 +401,14 @@ public void onFailure(Exception e) { try (Engine.Searcher searcher = restoredShard.acquireSearcher("test")) { assertEquals(searcher.getIndexReader().maxDoc(), seqNoStats.getLocalCheckpoint()); TopDocs search = searcher.search(new MatchAllDocsQuery(), Integer.MAX_VALUE); - assertEquals(searcher.getIndexReader().numDocs(), search.totalHits.value); + assertEquals(searcher.getIndexReader().numDocs(), search.totalHits.value()); search = searcher.search( new MatchAllDocsQuery(), Integer.MAX_VALUE, new Sort(new SortField(SeqNoFieldMapper.NAME, SortField.Type.LONG)), false ); - assertEquals(searcher.getIndexReader().numDocs(), search.totalHits.value); + assertEquals(searcher.getIndexReader().numDocs(), search.totalHits.value()); long previous = -1; for (ScoreDoc doc : search.scoreDocs) { FieldDoc fieldDoc = (FieldDoc) doc; @@ -430,8 +431,9 @@ public void onFailure(Exception e) { assertEquals(original.exists(), restored.exists()); if (original.exists()) { - Document document = original.docIdAndVersion().reader.document(original.docIdAndVersion().docId); - Document restoredDocument = restored.docIdAndVersion().reader.document(restored.docIdAndVersion().docId); + StoredFields storedFields = original.docIdAndVersion().reader.storedFields(); + Document document = storedFields.document(original.docIdAndVersion().docId); + Document restoredDocument = storedFields.document(restored.docIdAndVersion().docId); for (IndexableField field : document) { assertEquals(document.get(field.name()), restoredDocument.get(field.name())); } @@ -470,7 +472,7 @@ public IndexShard reindex(DirectoryReader reader, MappingMetadata mapping) throw for (int i = 0; i < leafReader.maxDoc(); i++) { if (liveDocs == null || liveDocs.get(i)) { rootFieldsVisitor.reset(); - leafReader.document(i, rootFieldsVisitor); + leafReader.storedFields().document(i, rootFieldsVisitor); rootFieldsVisitor.postProcess(targetShard.mapperService()::fieldType); String id = rootFieldsVisitor.id(); BytesReference source = rootFieldsVisitor.source(); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotTests.java index 65d057408f8bd..8433f38e40a0c 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotTests.java @@ -32,6 +32,7 @@ import org.apache.lucene.index.SnapshotDeletionPolicy; import org.apache.lucene.index.SoftDeletesDirectoryReaderWrapper; import org.apache.lucene.index.StandardDirectoryReader; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.search.FieldExistsQuery; import org.apache.lucene.search.IndexSearcher; @@ -108,8 +109,10 @@ DirectoryReader wrapReader(DirectoryReader reader) throws IOException { logger.warn(snapReader + " " + reader); assertEquals(snapReader.maxDoc(), reader.maxDoc()); assertEquals(snapReader.numDocs(), reader.numDocs()); + StoredFields snapStoredFields = snapReader.storedFields(); + StoredFields storedFields = reader.storedFields(); for (int i = 0; i < snapReader.maxDoc(); i++) { - assertEquals(snapReader.document(i).get("_source"), reader.document(i).get("_source")); + assertEquals(snapStoredFields.document(i).get("_source"), storedFields.document(i).get("_source")); } for (LeafReaderContext ctx : snapReader.leaves()) { if (ctx.reader() instanceof SegmentReader) { @@ -188,12 +191,14 @@ public boolean useCompoundFile(SegmentInfos infos, SegmentCommitInfo mergedInfo, try (DirectoryReader snapReader = DirectoryReader.open(wrappedDir)) { assertEquals(snapReader.maxDoc(), 3); assertEquals(snapReader.numDocs(), 2); + StoredFields snapStoredFields = snapReader.storedFields(); + StoredFields storedFields = reader.storedFields(); for (int i = 0; i < 3; i++) { - assertEquals(snapReader.document(i).get("src"), reader.document(i).get("src")); + assertEquals(snapStoredFields.document(i).get("src"), storedFields.document(i).get("src")); } IndexSearcher searcher = newSearcher(snapReader); TopDocs id = searcher.search(new TermQuery(new Term("id", "1")), 10); - assertEquals(0, id.totalHits.value); + assertEquals(0, id.totalHits.value()); } targetDir = newDirectory(targetDir); @@ -321,7 +326,7 @@ public boolean keepFullyDeletedSegment(IOSupplier readerIOSupplier) try (DirectoryReader snapReader = DirectoryReader.open(wrappedDir)) { assertEquals(snapReader.maxDoc(), 1); assertEquals(snapReader.numDocs(), 1); - assertEquals("3", snapReader.document(0).getField("rank").stringValue()); + assertEquals("3", snapReader.storedFields().document(0).getField("rank").stringValue()); } try (IndexReader writerReader = DirectoryReader.open(writer)) { assertEquals(writerReader.maxDoc(), 2); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/search/WeightedTokensQueryBuilderTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/search/WeightedTokensQueryBuilderTests.java index bb727204e2651..114ad90354c61 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/search/WeightedTokensQueryBuilderTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/search/WeightedTokensQueryBuilderTests.java @@ -279,7 +279,7 @@ private void assertCorrectLuceneQuery(String name, Query query, List exp booleanClauses.size() ); for (int i = 0; i < booleanClauses.size(); i++) { - Query clauseQuery = booleanClauses.get(i).getQuery(); + Query clauseQuery = booleanClauses.get(i).query(); assertTrue(name + " query " + query + " expected to be a BoostQuery", clauseQuery instanceof BoostQuery); // FeatureQuery is not visible so we check the String representation assertTrue(name + " query " + query + " expected to be a FeatureQuery", clauseQuery.toString().contains("FeatureQuery")); @@ -353,8 +353,8 @@ protected void doAssertLuceneQuery(WeightedTokensQueryBuilder queryBuilder, Quer Class boostQueryClass = FeatureField.newLinearQuery("", "", 1.0f).getClass(); for (var clause : booleanQuery.clauses()) { - assertEquals(BooleanClause.Occur.SHOULD, clause.getOccur()); - assertThat(clause.getQuery(), either(instanceOf(featureQueryClass)).or(instanceOf(boostQueryClass))); + assertEquals(BooleanClause.Occur.SHOULD, clause.occur()); + assertThat(clause.query(), either(instanceOf(featureQueryClass)).or(instanceOf(boostQueryClass))); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetReaderTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetReaderTests.java index c40dd00e0e350..6fe271d1b05e3 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetReaderTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetReaderTests.java @@ -96,25 +96,25 @@ public void testSearch() throws Exception { ); assertThat(indexSearcher.getIndexReader().numDocs(), equalTo(1)); TopDocs result = indexSearcher.search(new MatchAllDocsQuery(), 1); - assertThat(result.totalHits.value, equalTo(1L)); + assertThat(result.totalHits.value(), equalTo(1L)); assertThat(result.scoreDocs[0].doc, equalTo(0)); indexSearcher = newSearcher(DocumentSubsetReader.wrap(directoryReader, bitsetCache, new TermQuery(new Term("field", "value2")))); assertThat(indexSearcher.getIndexReader().numDocs(), equalTo(1)); result = indexSearcher.search(new MatchAllDocsQuery(), 1); - assertThat(result.totalHits.value, equalTo(1L)); + assertThat(result.totalHits.value(), equalTo(1L)); assertThat(result.scoreDocs[0].doc, equalTo(1)); // this doc has been marked as deleted: indexSearcher = newSearcher(DocumentSubsetReader.wrap(directoryReader, bitsetCache, new TermQuery(new Term("field", "value3")))); assertThat(indexSearcher.getIndexReader().numDocs(), equalTo(0)); result = indexSearcher.search(new MatchAllDocsQuery(), 1); - assertThat(result.totalHits.value, equalTo(0L)); + assertThat(result.totalHits.value(), equalTo(0L)); indexSearcher = newSearcher(DocumentSubsetReader.wrap(directoryReader, bitsetCache, new TermQuery(new Term("field", "value4")))); assertThat(indexSearcher.getIndexReader().numDocs(), equalTo(1)); result = indexSearcher.search(new MatchAllDocsQuery(), 1); - assertThat(result.totalHits.value, equalTo(1L)); + assertThat(result.totalHits.value(), equalTo(1L)); assertThat(result.scoreDocs[0].doc, equalTo(3)); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReaderTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReaderTests.java index 560dee9b5843c..db250b16eab16 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReaderTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReaderTests.java @@ -30,6 +30,7 @@ import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NoMergePolicy; @@ -205,8 +206,9 @@ public void testKnnVectors() throws Exception { FloatVectorValues vectorValues = leafReader.getFloatVectorValues("fieldA"); assertEquals(3, vectorValues.dimension()); assertEquals(1, vectorValues.size()); - assertEquals(0, vectorValues.nextDoc()); - assertNotNull(vectorValues.vectorValue()); + KnnVectorValues.DocIndexIterator iterator = vectorValues.iterator(); + assertEquals(0, iterator.nextDoc()); + assertNotNull(vectorValues.vectorValue(iterator.index())); TopDocs topDocs = leafReader.searchNearestVectors("fieldA", new float[] { 1.0f, 1.0f, 1.0f }, 5, null, Integer.MAX_VALUE); assertNotNull(topDocs); @@ -215,7 +217,7 @@ public void testKnnVectors() throws Exception { // Check that we can't see fieldB assertNull(leafReader.getFloatVectorValues("fieldB")); topDocs = leafReader.searchNearestVectors("fieldB", new float[] { 1.0f, 1.0f, 1.0f }, 5, null, Integer.MAX_VALUE); - assertEquals(0, topDocs.totalHits.value); + assertEquals(0, topDocs.totalHits.value()); assertEquals(0, topDocs.scoreDocs.length); TestUtil.checkReader(ir); @@ -239,8 +241,9 @@ public void testKnnByteVectors() throws Exception { ByteVectorValues vectorValues = leafReader.getByteVectorValues("fieldA"); assertEquals(3, vectorValues.dimension()); assertEquals(1, vectorValues.size()); - assertEquals(0, vectorValues.nextDoc()); - assertNotNull(vectorValues.vectorValue()); + KnnVectorValues.DocIndexIterator iterator = vectorValues.iterator(); + assertEquals(0, iterator.nextDoc()); + assertNotNull(vectorValues.vectorValue(iterator.index())); TopDocs topDocs = leafReader.searchNearestVectors("fieldA", new byte[] { 1, 1, 1 }, 5, null, Integer.MAX_VALUE); assertNotNull(topDocs); @@ -249,7 +252,7 @@ public void testKnnByteVectors() throws Exception { // Check that we can't see fieldB assertNull(leafReader.getByteVectorValues("fieldB")); topDocs = leafReader.searchNearestVectors("fieldB", new byte[] { 1, 1, 1 }, 5, null, Integer.MAX_VALUE); - assertEquals(0, topDocs.totalHits.value); + assertEquals(0, topDocs.totalHits.value()); assertEquals(0, topDocs.scoreDocs.length); TestUtil.checkReader(ir); @@ -274,11 +277,6 @@ public void testStoredFieldsString() throws Exception { DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA"))); // see only one field - { - Document d2 = ir.document(0); - assertEquals(1, d2.getFields().size()); - assertEquals("testA", d2.get("fieldA")); - } { Document d2 = ir.storedFields().document(0); assertEquals(1, d2.getFields().size()); @@ -306,11 +304,6 @@ public void testStoredFieldsBinary() throws Exception { DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA"))); // see only one field - { - Document d2 = ir.document(0); - assertEquals(1, d2.getFields().size()); - assertEquals(new BytesRef("testA"), d2.getBinaryValue("fieldA")); - } { Document d2 = ir.storedFields().document(0); assertEquals(1, d2.getFields().size()); @@ -338,11 +331,6 @@ public void testStoredFieldsInt() throws Exception { DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA"))); // see only one field - { - Document d2 = ir.document(0); - assertEquals(1, d2.getFields().size()); - assertEquals(1, d2.getField("fieldA").numericValue()); - } { Document d2 = ir.storedFields().document(0); assertEquals(1, d2.getFields().size()); @@ -370,11 +358,6 @@ public void testStoredFieldsLong() throws Exception { DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA"))); // see only one field - { - Document d2 = ir.document(0); - assertEquals(1, d2.getFields().size()); - assertEquals(1L, d2.getField("fieldA").numericValue()); - } { Document d2 = ir.storedFields().document(0); assertEquals(1, d2.getFields().size()); @@ -402,11 +385,6 @@ public void testStoredFieldsFloat() throws Exception { DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA"))); // see only one field - { - Document d2 = ir.document(0); - assertEquals(1, d2.getFields().size()); - assertEquals(1F, d2.getField("fieldA").numericValue()); - } { Document d2 = ir.storedFields().document(0); assertEquals(1, d2.getFields().size()); @@ -434,11 +412,6 @@ public void testStoredFieldsDouble() throws Exception { DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA"))); // see only one field - { - Document d2 = ir.document(0); - assertEquals(1, d2.getFields().size()); - assertEquals(1D, d2.getField("fieldA").numericValue()); - } { Document d2 = ir.storedFields().document(0); assertEquals(1, d2.getFields().size()); @@ -468,7 +441,7 @@ public void testVectors() throws Exception { DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA"))); // see only one field - Fields vectors = ir.getTermVectors(0); + Fields vectors = ir.termVectors().get(0); Set seenFields = new HashSet<>(); for (String field : vectors) { seenFields.add(field); @@ -615,7 +588,6 @@ public void testSortedSetDocValues() throws Exception { assertNotNull(dv); assertTrue(dv.advanceExact(0)); assertEquals(0, dv.nextOrd()); - assertEquals(SortedSetDocValues.NO_MORE_ORDS, dv.nextOrd()); assertEquals(new BytesRef("testA"), dv.lookupOrd(0)); assertNull(segmentReader.getSortedSetDocValues("fieldB")); @@ -702,11 +674,6 @@ public void testSourceFilteringIntegration() throws Exception { DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(automaton)); // see only one field - { - Document d2 = ir.document(0); - assertEquals(1, d2.getFields().size()); - assertEquals("{\"fieldA\":\"testA\"}", d2.getBinaryValue(SourceFieldMapper.NAME).utf8ToString()); - } { Document d2 = ir.storedFields().document(0); assertEquals(1, d2.getFields().size()); @@ -1201,7 +1168,7 @@ public void testFilterAwayAllVectors() throws Exception { DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldB"))); // sees no fields - assertNull(ir.getTermVectors(0)); + assertNull(ir.termVectors().get(0)); TestUtil.checkReader(ir); IOUtils.close(ir, iw, dir); @@ -1229,14 +1196,9 @@ public void testEmpty() throws Exception { assertNull(segmentReader.terms("foo")); // see no vectors - assertNull(segmentReader.getTermVectors(0)); assertNull(segmentReader.termVectors().get(0)); // see no stored fields - { - Document document = segmentReader.document(0); - assertEquals(0, document.getFields().size()); - } { Document document = segmentReader.storedFields().document(0); assertEquals(0, document.getFields().size()); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java index df64c4f87410a..4751f66cf548e 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java @@ -193,7 +193,7 @@ protected IndicesAccessControl getIndicesAccessControl() { int expectedHitCount = valuesHitCount[i]; logger.info("Going to verify hit count with query [{}] with expected total hits [{}]", parsedQuery.query(), expectedHitCount); - Integer totalHits = indexSearcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager()); + Integer totalHits = indexSearcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager(indexSearcher.getSlices())); assertThat(totalHits, equalTo(expectedHitCount)); assertThat(wrappedDirectoryReader.numDocs(), equalTo(expectedHitCount)); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilegeTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilegeTests.java index 265714ee6ea16..073b3b92a43a5 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilegeTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilegeTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.security.authz.privilege; -import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; import org.elasticsearch.action.admin.indices.stats.IndicesStatsAction; import org.elasticsearch.action.delete.TransportDeleteAction; @@ -17,6 +16,7 @@ import org.elasticsearch.common.util.iterable.Iterables; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.rollup.action.GetRollupIndexCapsAction; +import org.elasticsearch.xpack.core.security.support.Automatons; import org.elasticsearch.xpack.core.transform.action.GetCheckpointAction; import java.util.Collection; @@ -83,7 +83,7 @@ public void testPrivilegesForGetCheckPointAction() { public void testRelationshipBetweenPrivileges() { assertThat( - Operations.subsetOf( + Automatons.subsetOf( IndexPrivilege.get(Set.of("view_index_metadata")).automaton, IndexPrivilege.get(Set.of("manage")).automaton ), @@ -91,12 +91,12 @@ public void testRelationshipBetweenPrivileges() { ); assertThat( - Operations.subsetOf(IndexPrivilege.get(Set.of("monitor")).automaton, IndexPrivilege.get(Set.of("manage")).automaton), + Automatons.subsetOf(IndexPrivilege.get(Set.of("monitor")).automaton, IndexPrivilege.get(Set.of("manage")).automaton), is(true) ); assertThat( - Operations.subsetOf( + Automatons.subsetOf( IndexPrivilege.get(Set.of("create", "create_doc", "index", "delete")).automaton, IndexPrivilege.get(Set.of("write")).automaton ), @@ -104,7 +104,7 @@ public void testRelationshipBetweenPrivileges() { ); assertThat( - Operations.subsetOf( + Automatons.subsetOf( IndexPrivilege.get(Set.of("create_index", "delete_index")).automaton, IndexPrivilege.get(Set.of("manage")).automaton ), @@ -122,7 +122,7 @@ public void testCrossClusterReplicationPrivileges() { "indices:admin/seq_no/renew_retention_lease" ).forEach(action -> assertThat(crossClusterReplication.predicate.test(action + randomAlphaOfLengthBetween(0, 8)), is(true))); assertThat( - Operations.subsetOf(crossClusterReplication.automaton, IndexPrivilege.get(Set.of("manage", "read", "monitor")).automaton), + Automatons.subsetOf(crossClusterReplication.automaton, IndexPrivilege.get(Set.of("manage", "read", "monitor")).automaton), is(true) ); @@ -139,10 +139,10 @@ public void testCrossClusterReplicationPrivileges() { ); assertThat( - Operations.subsetOf(crossClusterReplicationInternal.automaton, IndexPrivilege.get(Set.of("manage")).automaton), + Automatons.subsetOf(crossClusterReplicationInternal.automaton, IndexPrivilege.get(Set.of("manage")).automaton), is(false) ); - assertThat(Operations.subsetOf(crossClusterReplicationInternal.automaton, IndexPrivilege.get(Set.of("all")).automaton), is(true)); + assertThat(Automatons.subsetOf(crossClusterReplicationInternal.automaton, IndexPrivilege.get(Set.of("all")).automaton), is(true)); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java index 6f3c435eb12f6..a58acf82ea44e 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java @@ -6,7 +6,7 @@ */ package org.elasticsearch.xpack.core.security.authz.privilege; -import org.apache.lucene.util.automaton.Operations; +import org.apache.lucene.tests.util.automaton.AutomatonTestUtil; import org.elasticsearch.action.admin.cluster.health.TransportClusterHealthAction; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.TransportCancelTasksAction; import org.elasticsearch.action.admin.cluster.reroute.TransportClusterRerouteAction; @@ -218,13 +218,13 @@ public void testIndexCollapse() throws Exception { Set name = Sets.newHashSet(first.name().iterator().next(), second.name().iterator().next()); IndexPrivilege index = IndexPrivilege.get(name); - if (Operations.subsetOf(second.getAutomaton(), first.getAutomaton())) { - assertTrue(Operations.sameLanguage(index.getAutomaton(), first.getAutomaton())); - } else if (Operations.subsetOf(first.getAutomaton(), second.getAutomaton())) { - assertTrue(Operations.sameLanguage(index.getAutomaton(), second.getAutomaton())); + if (Automatons.subsetOf(second.getAutomaton(), first.getAutomaton())) { + assertTrue(AutomatonTestUtil.sameLanguage(index.getAutomaton(), first.getAutomaton())); + } else if (Automatons.subsetOf(first.getAutomaton(), second.getAutomaton())) { + assertTrue(AutomatonTestUtil.sameLanguage(index.getAutomaton(), second.getAutomaton())); } else { - assertFalse(Operations.sameLanguage(index.getAutomaton(), first.getAutomaton())); - assertFalse(Operations.sameLanguage(index.getAutomaton(), second.getAutomaton())); + assertFalse(AutomatonTestUtil.sameLanguage(index.getAutomaton(), first.getAutomaton())); + assertFalse(AutomatonTestUtil.sameLanguage(index.getAutomaton(), second.getAutomaton())); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/support/AutomatonsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/support/AutomatonsTests.java index 0b2e48bd20dfe..94f91f427e19a 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/support/AutomatonsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/support/AutomatonsTests.java @@ -20,7 +20,6 @@ import java.util.Arrays; import java.util.List; -import static org.apache.lucene.util.automaton.Operations.DEFAULT_DETERMINIZE_WORK_LIMIT; import static org.elasticsearch.xpack.core.security.support.Automatons.pattern; import static org.elasticsearch.xpack.core.security.support.Automatons.patterns; import static org.elasticsearch.xpack.core.security.support.Automatons.predicate; @@ -115,12 +114,12 @@ public void testPatternComplexity() { } private void assertMatch(Automaton automaton, String text) { - CharacterRunAutomaton runAutomaton = new CharacterRunAutomaton(automaton, DEFAULT_DETERMINIZE_WORK_LIMIT); + CharacterRunAutomaton runAutomaton = new CharacterRunAutomaton(automaton); assertTrue(runAutomaton.run(text)); } private void assertMismatch(Automaton automaton, String text) { - CharacterRunAutomaton runAutomaton = new CharacterRunAutomaton(automaton, DEFAULT_DETERMINIZE_WORK_LIMIT); + CharacterRunAutomaton runAutomaton = new CharacterRunAutomaton(automaton); assertFalse(runAutomaton.run(text)); } diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java index d3dcd7ae36f59..65d53d3adabe7 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java @@ -125,7 +125,7 @@ public void testRunner() throws Exception { assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = searchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); assertThat(sourceDocMap.get("field1"), is(equalTo("value1"))); @@ -176,7 +176,7 @@ public void testRunner() throws Exception { ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(3))); @@ -202,7 +202,7 @@ public void testRunnerGeoMatchType() throws Exception { assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); assertThat(sourceDocMap.get("location"), is(equalTo("POINT(10.0 10.0)"))); @@ -244,7 +244,7 @@ public void testRunnerGeoMatchType() throws Exception { ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(2))); @@ -286,7 +286,7 @@ private void testNumberRangeMatchType(String rangeType) throws Exception { assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); assertThat(sourceDocMap.get("range"), is(equalTo(Map.of("lt", 10, "gt", 1)))); @@ -330,7 +330,7 @@ private void testNumberRangeMatchType(String rangeType) throws Exception { ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(2))); @@ -376,7 +376,7 @@ public void testRunnerRangeTypeWithIpRange() throws Exception { new SearchRequest(sourceIndexName).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery())) ), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); assertThat(sourceDocMap.get("subnet"), is(equalTo("10.0.0.0/8"))); @@ -421,7 +421,7 @@ public void testRunnerRangeTypeWithIpRange() throws Exception { ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(2))); @@ -460,7 +460,7 @@ public void testRunnerMultiSource() throws Exception { new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery())) ), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); assertThat(sourceDocMap.get("idx"), is(equalTo(targetIdx))); @@ -522,7 +522,7 @@ public void testRunnerMultiSource() throws Exception { new SearchRequest(".enrich-test1").source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery())) ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(3L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(3L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(5))); @@ -564,7 +564,7 @@ public void testRunnerMultiSourceDocIdCollisions() throws Exception { new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery())) ), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); assertThat(sourceDocMap.get("idx"), is(equalTo(targetIdx))); @@ -633,7 +633,7 @@ public void testRunnerMultiSourceDocIdCollisions() throws Exception { new SearchRequest(".enrich-test1").source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery())) ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(3L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(3L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(5))); @@ -688,7 +688,7 @@ public void testRunnerMultiSourceEnrichKeyCollisions() throws Exception { new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery())) ), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); assertThat(sourceDocMap.get("idx"), is(equalTo(targetIdx))); @@ -749,7 +749,7 @@ public void testRunnerMultiSourceEnrichKeyCollisions() throws Exception { new SearchRequest(".enrich-test1").source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery())) ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(3L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(3L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(5))); @@ -943,7 +943,7 @@ public void testRunnerObjectSourceMapping() throws Exception { assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); Map dataField = ((Map) sourceDocMap.get("data")); @@ -993,7 +993,7 @@ public void testRunnerObjectSourceMapping() throws Exception { ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(1))); @@ -1051,7 +1051,7 @@ public void testRunnerExplicitObjectSourceMapping() throws Exception { assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); Map dataField = ((Map) sourceDocMap.get("data")); @@ -1100,7 +1100,7 @@ public void testRunnerExplicitObjectSourceMapping() throws Exception { ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(1))); @@ -1158,7 +1158,7 @@ public void testRunnerExplicitObjectSourceMappingRangePolicy() throws Exception assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); Map dataField = ((Map) sourceDocMap.get("data")); @@ -1209,7 +1209,7 @@ public void testRunnerExplicitObjectSourceMappingRangePolicy() throws Exception ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(1))); @@ -1273,7 +1273,7 @@ public void testRunnerTwoObjectLevelsSourceMapping() throws Exception { assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); Map dataField = ((Map) sourceDocMap.get("data")); @@ -1329,7 +1329,7 @@ public void testRunnerTwoObjectLevelsSourceMapping() throws Exception { ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(1))); @@ -1395,7 +1395,7 @@ public void testRunnerTwoObjectLevelsSourceMappingRangePolicy() throws Exception assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); Map dataField = ((Map) sourceDocMap.get("data")); @@ -1450,7 +1450,7 @@ public void testRunnerTwoObjectLevelsSourceMappingRangePolicy() throws Exception new SearchRequest(".enrich-test1").source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery())) ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(1))); @@ -1519,7 +1519,7 @@ public void testRunnerTwoObjectLevelsSourceMappingDateRangeWithFormat() throws E assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); Map dataField = ((Map) sourceDocMap.get("data")); @@ -1580,7 +1580,7 @@ public void testRunnerTwoObjectLevelsSourceMappingDateRangeWithFormat() throws E SearchSourceBuilder.searchSource().query(QueryBuilders.matchQuery("data.fields.period", "2021-08-19T14:00:00Z")) ) ), - enrichSearchResponse -> assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(0L)) + enrichSearchResponse -> assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(0L)) ); assertResponse( @@ -1590,7 +1590,7 @@ public void testRunnerTwoObjectLevelsSourceMappingDateRangeWithFormat() throws E ) ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(1))); @@ -1614,7 +1614,7 @@ public void testRunnerTwoObjectLevelsSourceMappingDateRangeWithFormat() throws E SearchSourceBuilder.searchSource().query(QueryBuilders.matchQuery("data.fields.period", "2021/08/20 at 14:00")) ) ), - enrichSearchResponse -> assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)) + enrichSearchResponse -> assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)) ); // Validate segments @@ -1657,7 +1657,7 @@ public void testRunnerDottedKeyNameSourceMapping() throws Exception { assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); assertThat(sourceDocMap.get("data.field1"), is(equalTo("value1"))); @@ -1704,7 +1704,7 @@ public void testRunnerDottedKeyNameSourceMapping() throws Exception { ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(2))); @@ -1736,7 +1736,7 @@ public void testRunnerWithForceMergeRetry() throws Exception { assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); assertThat(sourceDocMap.get("field1"), is(equalTo("value1"))); @@ -1868,7 +1868,7 @@ protected void afterRefreshEnrichIndex(ActionListener listener) { ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(3))); @@ -1901,7 +1901,7 @@ public void testRunnerWithEmptySegmentsResponse() throws Exception { assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); assertThat(sourceDocMap.get("field1"), is(equalTo("value1"))); @@ -2007,7 +2007,7 @@ public void testRunnerWithShardFailuresInSegmentResponse() throws Exception { assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); assertThat(sourceDocMap.get("field1"), is(equalTo("value1"))); @@ -2405,7 +2405,7 @@ public void testRunnerValidatesIndexIntegrity() throws Exception { assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); assertThat(sourceDocMap.get("field1"), is(equalTo("value1"))); diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/EnrichShardMultiSearchActionTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/EnrichShardMultiSearchActionTests.java index 00f22aca2cb92..8dbc9b0f4f43a 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/EnrichShardMultiSearchActionTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/EnrichShardMultiSearchActionTests.java @@ -64,7 +64,7 @@ public void testExecute() throws Exception { assertThat(response.getResponses().length, equalTo(numSearches)); for (int i = 0; i < numSearches; i++) { assertThat(response.getResponses()[i].isFailure(), is(false)); - assertThat(response.getResponses()[i].getResponse().getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getResponses()[i].getResponse().getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getResponses()[i].getResponse().getHits().getHits()[0].getSourceAsMap().size(), equalTo(1)); assertThat( response.getResponses()[i].getResponse().getHits().getHits()[0].getSourceAsMap().get("key1"), diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java index cd98b43adc159..5e1fde0dfb942 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java @@ -1073,7 +1073,7 @@ private static ConnectorIndexService.ConnectorResult mapSearchResponseToConnecto final List connectorResults = Arrays.stream(response.getHits().getHits()) .map(ConnectorIndexService::hitToConnector) .toList(); - return new ConnectorIndexService.ConnectorResult(connectorResults, (int) response.getHits().getTotalHits().value); + return new ConnectorIndexService.ConnectorResult(connectorResults, (int) response.getHits().getTotalHits().value()); } private static ConnectorSearchResult hitToConnector(SearchHit searchHit) { @@ -1115,7 +1115,7 @@ private void isDataIndexNameAlreadyInUse(String indexName, String connectorId, A client.search(searchRequest, new ActionListener<>() { @Override public void onResponse(SearchResponse searchResponse) { - boolean indexNameIsInUse = searchResponse.getHits().getTotalHits().value > 0L; + boolean indexNameIsInUse = searchResponse.getHits().getTotalHits().value() > 0L; listener.onResponse(indexNameIsInUse); } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java index 9ef895a3a5786..ce6f7f0dbf2b2 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java @@ -417,7 +417,7 @@ private ConnectorSyncJobsResult mapSearchResponseToConnectorSyncJobsList(SearchR .map(ConnectorSyncJobIndexService::hitToConnectorSyncJob) .toList(); - return new ConnectorSyncJobsResult(connectorSyncJobs, (int) searchResponse.getHits().getTotalHits().value); + return new ConnectorSyncJobsResult(connectorSyncJobs, (int) searchResponse.getHits().getTotalHits().value()); } private static ConnectorSyncJobSearchResult hitToConnectorSyncJob(SearchHit searchHit) { diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesIndexService.java index 2eec155ae8ea2..8bf4bbd5716b7 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesIndexService.java @@ -436,7 +436,7 @@ private static QueryRulesetResult mapSearchResponseToQueryRulesetList(SearchResp final List rulesetResults = Arrays.stream(response.getHits().getHits()) .map(QueryRulesIndexService::hitToQueryRulesetListItem) .toList(); - return new QueryRulesetResult(rulesetResults, (int) response.getHits().getTotalHits().value); + return new QueryRulesetResult(rulesetResults, (int) response.getHits().getTotalHits().value()); } private static QueryRulesetListItem hitToQueryRulesetListItem(SearchHit searchHit) { diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/SearchApplicationIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/SearchApplicationIndexService.java index 9e8a8f750b764..30d533aeb9ae5 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/SearchApplicationIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/SearchApplicationIndexService.java @@ -416,7 +416,7 @@ private static SearchApplicationResult mapSearchResponse(SearchResponse response final List apps = Arrays.stream(response.getHits().getHits()) .map(SearchApplicationIndexService::hitToSearchApplicationListItem) .toList(); - return new SearchApplicationResult(apps, (int) response.getHits().getTotalHits().value); + return new SearchApplicationResult(apps, (int) response.getHits().getTotalHits().value()); } private static SearchApplicationListItem hitToSearchApplicationListItem(SearchHit searchHit) { diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/action/EqlSearchResponse.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/action/EqlSearchResponse.java index be1d4c0871ca7..2b7b8b074fa71 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/action/EqlSearchResponse.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/action/EqlSearchResponse.java @@ -582,8 +582,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.startObject(Fields.HITS); if (totalHits != null) { builder.startObject(Fields.TOTAL); - builder.field("value", totalHits.value); - builder.field("relation", totalHits.relation == TotalHits.Relation.EQUAL_TO ? "eq" : "gte"); + builder.field("value", totalHits.value()); + builder.field("relation", totalHits.relation() == TotalHits.Relation.EQUAL_TO ? "eq" : "gte"); builder.endObject(); } if (events != null) { diff --git a/x-pack/plugin/eql/src/test/resources/querytranslator_tests.txt b/x-pack/plugin/eql/src/test/resources/querytranslator_tests.txt index b04d28654f1d5..00c08096fd084 100644 --- a/x-pack/plugin/eql/src/test/resources/querytranslator_tests.txt +++ b/x-pack/plugin/eql/src/test/resources/querytranslator_tests.txt @@ -769,7 +769,7 @@ process where command_line regex "^.*?net.exe" regexSingleArgInsensitive process where command_line regex~ "^.*?net.exe" ; -"regexp":{"command_line":{"value":"^.*?net.exe","flags_value":255,"case_insensitive":true +"regexp":{"command_line":{"value":"^.*?net.exe","flags_value":65791,"case_insensitive":true ; regexMultiArg @@ -781,7 +781,7 @@ process where command_line regex ("^.*?net.exe", "net\\.exe") regexMultiArgInsensitive process where command_line regex~ ("^.*?net.exe", "net\\.exe") ; -"regexp":{"command_line":{"value":"^.*?net.exe|net\\.exe","flags_value":255,"case_insensitive":true +"regexp":{"command_line":{"value":"^.*?net.exe|net\\.exe","flags_value":65791,"case_insensitive":true ; regexMultiMultiArgVariant @@ -793,7 +793,7 @@ process where command_line regex ("^.*?net.exe", "net\\.exe", "C:\\\\Windows\\\\ regexMultiMultiArgVariantInsensitive process where command_line regex~ ("^.*?net.exe", "net\\.exe", "C:\\\\Windows\\\\system32\\\\net1\\s+") ; -"regexp":{"command_line":{"value":"^.*?net.exe|net\\.exe|C:\\\\Windows\\\\system32\\\\net1\\s+","flags_value":255,"case_insensitive":true +"regexp":{"command_line":{"value":"^.*?net.exe|net\\.exe|C:\\\\Windows\\\\system32\\\\net1\\s+","flags_value":65791,"case_insensitive":true ; regexMultiArgWithScript diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/RLikePattern.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/RLikePattern.java index f437dc5819dcb..4e559f564acb1 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/RLikePattern.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/RLikePattern.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.core.expression.predicate.regex; import org.apache.lucene.util.automaton.Automaton; +import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.RegExp; import java.util.Objects; @@ -21,7 +22,10 @@ public RLikePattern(String regexpPattern) { @Override public Automaton createAutomaton() { - return new RegExp(regexpPattern).toAutomaton(); + return Operations.determinize( + new RegExp(regexpPattern, RegExp.ALL | RegExp.DEPRECATED_COMPLEMENT).toAutomaton(), + Operations.DEFAULT_DETERMINIZE_WORK_LIMIT + ); } @Override diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/WildcardPattern.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/WildcardPattern.java index 7cedbc4742138..3e9cbf92727c2 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/WildcardPattern.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/WildcardPattern.java @@ -9,7 +9,6 @@ import org.apache.lucene.index.Term; import org.apache.lucene.search.WildcardQuery; import org.apache.lucene.util.automaton.Automaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.xpack.esql.core.util.StringUtils; @@ -39,8 +38,7 @@ public String pattern() { @Override public Automaton createAutomaton() { - Automaton automaton = WildcardQuery.toAutomaton(new Term(null, wildcard)); - return MinimizationOperations.minimize(automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + return WildcardQuery.toAutomaton(new Term(null, wildcard), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSliceQueue.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSliceQueue.java index 1c9c97a364fc7..9633051781f4a 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSliceQueue.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSliceQueue.java @@ -120,7 +120,7 @@ static List> docSlices(IndexReader indexReader, i } static List> segmentSlices(List leafContexts) { - IndexSearcher.LeafSlice[] gs = IndexSearcher.slices(leafContexts, MAX_DOCS_PER_SLICE, MAX_SEGMENTS_PER_SLICE); - return Arrays.stream(gs).map(g -> Arrays.stream(g.leaves).map(PartialLeafReaderContext::new).toList()).toList(); + IndexSearcher.LeafSlice[] gs = IndexSearcher.slices(leafContexts, MAX_DOCS_PER_SLICE, MAX_SEGMENTS_PER_SLICE, false); + return Arrays.stream(gs).map(g -> Arrays.stream(g.partitions).map(PartialLeafReaderContext::new).toList()).toList(); } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java index 2e32d20a2365e..0f600958b93b3 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java @@ -15,6 +15,7 @@ import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.TopFieldCollector; +import org.apache.lucene.search.TopFieldCollectorManager; import org.elasticsearch.common.Strings; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DocVector; @@ -230,8 +231,9 @@ static final class PerShardCollector { if (sortAndFormats.isEmpty()) { throw new IllegalStateException("sorts must not be disabled in TopN"); } + // We don't use CollectorManager here as we don't retrieve the total hits and sort by score. - this.topFieldCollector = TopFieldCollector.create(sortAndFormats.get().sort, limit, 0); + this.topFieldCollector = new TopFieldCollectorManager(sortAndFormats.get().sort, limit, null, 0, false).newCollector(); } LeafCollector getLeafCollector(LeafReaderContext leafReaderContext) throws IOException { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/PartialLeafReaderContext.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/PartialLeafReaderContext.java index e9063c9597c5f..c92dc75397729 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/PartialLeafReaderContext.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/PartialLeafReaderContext.java @@ -8,6 +8,7 @@ package org.elasticsearch.compute.lucene; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.IndexSearcher; /** * A subset of a {@link LeafReaderContext}. @@ -16,6 +17,10 @@ * @param maxDoc one more than the last document */ public record PartialLeafReaderContext(LeafReaderContext leafReaderContext, int minDoc, int maxDoc) { + public PartialLeafReaderContext(IndexSearcher.LeafReaderContextPartition partition) { + this(partition.ctx, partition.minDocId, partition.maxDocId); + } + public PartialLeafReaderContext(LeafReaderContext leafReaderContext) { this(leafReaderContext, 0, leafReaderContext.reader().maxDoc()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichQuerySourceOperator.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichQuerySourceOperator.java index 6937f1a8c7772..f70cfe1dc8a41 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichQuerySourceOperator.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichQuerySourceOperator.java @@ -10,6 +10,7 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.Query; @@ -89,7 +90,7 @@ public Page getOutput() { continue; } final DocCollector collector = new DocCollector(docsBuilder); - scorer.score(collector, leaf.reader().getLiveDocs()); + scorer.score(collector, leaf.reader().getLiveDocs(), 0, DocIdSetIterator.NO_MORE_DOCS); int matches = collector.matches; if (segmentsBuilder != null) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/AutomataMatch.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/AutomataMatch.java index 09166f0cff7a8..0af22a357aeca 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/AutomataMatch.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/AutomataMatch.java @@ -37,7 +37,7 @@ public static EvalOperator.ExpressionEvaluator.Factory toEvaluator( * we couldn't get a nice toDot - so we call UTF32ToUTF8 ourselves. */ Automaton automaton = Operations.determinize(new UTF32ToUTF8().convert(utf32Automaton), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); - ByteRunAutomaton run = new ByteRunAutomaton(automaton, true, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + ByteRunAutomaton run = new ByteRunAutomaton(automaton, true); return new AutomataMatchEvaluator.Factory(source, field, run, toDot(automaton)); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java index 7ff09c23a1403..5903d725bf9c2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java @@ -441,7 +441,7 @@ public NamedExpression visitQualifiedNamePattern(EsqlBaseParser.QualifiedNamePat // use the fast run variant result = new UnresolvedNamePattern( src, - new CharacterRunAutomaton(Operations.concatenate(list)), + new CharacterRunAutomaton(Operations.determinize(Operations.concatenate(list), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT)), patternString.toString(), nameString.toString() ); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueMatchQuery.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueMatchQuery.java index 214c7b1053359..f6668db52b93b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueMatchQuery.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueMatchQuery.java @@ -69,14 +69,6 @@ public String toString(String field) { @Override public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) { return new ConstantScoreWeight(this, boost) { - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - final ScorerSupplier scorerSupplier = scorerSupplier(context); - if (scorerSupplier == null) { - return null; - } - return scorerSupplier.get(Long.MAX_VALUE); - } @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { @@ -96,12 +88,12 @@ public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOExcepti * can't do that because we need the check the number of fields. */ if (lfd instanceof LeafNumericFieldData n) { - return scorerSupplier(context, n.getLongValues(), this, boost, scoreMode); + return scorerSupplier(context, n.getLongValues(), boost, scoreMode); } if (lfd instanceof LeafOrdinalsFieldData o) { - return scorerSupplier(context, o.getOrdinalsValues(), this, boost, scoreMode); + return scorerSupplier(context, o.getOrdinalsValues(), boost, scoreMode); } - return scorerSupplier(context, lfd.getBytesValues(), this, boost, scoreMode); + return scorerSupplier(context, lfd.getBytesValues(), boost, scoreMode); } @Override @@ -113,7 +105,6 @@ public boolean isCacheable(LeafReaderContext ctx) { private ScorerSupplier scorerSupplier( LeafReaderContext context, SortedNumericDocValues sortedNumerics, - Weight weight, float boost, ScoreMode scoreMode ) throws IOException { @@ -122,16 +113,9 @@ private ScorerSupplier scorerSupplier( // check for dense field final PointValues points = context.reader().getPointValues(fieldData.getFieldName()); if (points != null && points.getDocCount() == maxDoc) { - return new DocIdSetIteratorScorerSupplier(weight, boost, scoreMode, DocIdSetIterator.all(maxDoc)); + return new DocIdSetIteratorScorerSupplier(boost, scoreMode, DocIdSetIterator.all(maxDoc)); } else { - return new PredicateScorerSupplier( - weight, - boost, - scoreMode, - maxDoc, - MULTI_VALUE_MATCH_COST, - sortedNumerics::advanceExact - ); + return new PredicateScorerSupplier(boost, scoreMode, maxDoc, MULTI_VALUE_MATCH_COST, sortedNumerics::advanceExact); } } final CheckedIntPredicate predicate = doc -> { @@ -144,13 +128,12 @@ private ScorerSupplier scorerSupplier( } return true; }; - return new PredicateScorerSupplier(weight, boost, scoreMode, maxDoc, MULTI_VALUE_MATCH_COST, predicate); + return new PredicateScorerSupplier(boost, scoreMode, maxDoc, MULTI_VALUE_MATCH_COST, predicate); } private ScorerSupplier scorerSupplier( LeafReaderContext context, SortedSetDocValues sortedSetDocValues, - Weight weight, float boost, ScoreMode scoreMode ) throws IOException { @@ -159,10 +142,9 @@ private ScorerSupplier scorerSupplier( // check for dense field final Terms terms = context.reader().terms(fieldData.getFieldName()); if (terms != null && terms.getDocCount() == maxDoc) { - return new DocIdSetIteratorScorerSupplier(weight, boost, scoreMode, DocIdSetIterator.all(maxDoc)); + return new DocIdSetIteratorScorerSupplier(boost, scoreMode, DocIdSetIterator.all(maxDoc)); } else { return new PredicateScorerSupplier( - weight, boost, scoreMode, maxDoc, @@ -181,20 +163,18 @@ private ScorerSupplier scorerSupplier( } return true; }; - return new PredicateScorerSupplier(weight, boost, scoreMode, maxDoc, MULTI_VALUE_MATCH_COST, predicate); + return new PredicateScorerSupplier(boost, scoreMode, maxDoc, MULTI_VALUE_MATCH_COST, predicate); } private ScorerSupplier scorerSupplier( LeafReaderContext context, SortedBinaryDocValues sortedBinaryDocValues, - Weight weight, float boost, ScoreMode scoreMode ) { final int maxDoc = context.reader().maxDoc(); if (FieldData.unwrapSingleton(sortedBinaryDocValues) != null) { return new PredicateScorerSupplier( - weight, boost, scoreMode, maxDoc, @@ -212,7 +192,7 @@ private ScorerSupplier scorerSupplier( } return true; }; - return new PredicateScorerSupplier(weight, boost, scoreMode, maxDoc, MULTI_VALUE_MATCH_COST, predicate); + return new PredicateScorerSupplier(boost, scoreMode, maxDoc, MULTI_VALUE_MATCH_COST, predicate); } }; } @@ -266,13 +246,11 @@ public int hashCode() { private static class DocIdSetIteratorScorerSupplier extends ScorerSupplier { - private final Weight weight; private final float score; private final ScoreMode scoreMode; private final DocIdSetIterator docIdSetIterator; - private DocIdSetIteratorScorerSupplier(Weight weight, float score, ScoreMode scoreMode, DocIdSetIterator docIdSetIterator) { - this.weight = weight; + private DocIdSetIteratorScorerSupplier(float score, ScoreMode scoreMode, DocIdSetIterator docIdSetIterator) { this.score = score; this.scoreMode = scoreMode; this.docIdSetIterator = docIdSetIterator; @@ -280,7 +258,7 @@ private DocIdSetIteratorScorerSupplier(Weight weight, float score, ScoreMode sco @Override public Scorer get(long leadCost) { - return new ConstantScoreScorer(weight, score, scoreMode, docIdSetIterator); + return new ConstantScoreScorer(score, scoreMode, docIdSetIterator); } @Override @@ -290,23 +268,13 @@ public long cost() { } private static class PredicateScorerSupplier extends ScorerSupplier { - - private final Weight weight; private final float score; private final ScoreMode scoreMode; private final int maxDoc; private final int matchCost; private final CheckedIntPredicate predicate; - private PredicateScorerSupplier( - Weight weight, - float score, - ScoreMode scoreMode, - int maxDoc, - int matchCost, - CheckedIntPredicate predicate - ) { - this.weight = weight; + private PredicateScorerSupplier(float score, ScoreMode scoreMode, int maxDoc, int matchCost, CheckedIntPredicate predicate) { this.score = score; this.scoreMode = scoreMode; this.maxDoc = maxDoc; @@ -327,7 +295,7 @@ public float matchCost() { return matchCost; } }; - return new ConstantScoreScorer(weight, score, scoreMode, iterator); + return new ConstantScoreScorer(score, scoreMode, iterator); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichQuerySourceOperatorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichQuerySourceOperatorTests.java index 107c2af11c4f1..04da5d406fbb9 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichQuerySourceOperatorTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichQuerySourceOperatorTests.java @@ -108,7 +108,7 @@ public void testQueries() throws Exception { QueryList queryList = QueryList.termQueryList(uidField, mock(SearchExecutionContext.class), inputTerms, KEYWORD); assertThat(queryList.getPositionCount(), equalTo(6)); assertThat(queryList.getQuery(0), equalTo(new TermQuery(new Term("uid", new BytesRef("b2"))))); - assertThat(queryList.getQuery(1), equalTo(new TermInSetQuery("uid", new BytesRef("c1"), new BytesRef("a2")))); + assertThat(queryList.getQuery(1), equalTo(new TermInSetQuery("uid", List.of(new BytesRef("c1"), new BytesRef("a2"))))); assertThat(queryList.getQuery(2), equalTo(new TermQuery(new Term("uid", new BytesRef("z2"))))); assertNull(queryList.getQuery(3)); assertThat(queryList.getQuery(4), equalTo(new TermQuery(new Term("uid", new BytesRef("a3"))))); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQueryTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQueryTests.java index 2ba397a3cb3de..95444c9b2423f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQueryTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQueryTests.java @@ -232,7 +232,7 @@ private Object randomValue() { private List docFor(int i, Iterable values) { List fields = new ArrayList<>(); - fields.add(new LongField("i", i)); + fields.add(new LongField("i", i, Field.Store.NO)); fields.add(new TextField("str", "the quick brown fox jumped over the lazy dog", Field.Store.NO)); switch (fieldType) { case "long", "integer", "short", "byte" -> { @@ -270,7 +270,10 @@ public List> build(RandomIndexWriter iw) throws IOException { List> fieldValues = new ArrayList<>(100); for (int i = 0; i < 100; i++) { iw.addDocument( - List.of(new LongField("i", i), new TextField("str", "the quick brown fox jumped over the lazy dog", Field.Store.NO)) + List.of( + new LongField("i", i, Field.Store.NO), + new TextField("str", "the quick brown fox jumped over the lazy dog", Field.Store.NO) + ) ); fieldValues.add(List.of()); } diff --git a/x-pack/plugin/graph/src/internalClusterTest/java/org/elasticsearch/xpack/graph/test/GraphTests.java b/x-pack/plugin/graph/src/internalClusterTest/java/org/elasticsearch/xpack/graph/test/GraphTests.java index 3623d3671e83f..6d90b0e67ee83 100644 --- a/x-pack/plugin/graph/src/internalClusterTest/java/org/elasticsearch/xpack/graph/test/GraphTests.java +++ b/x-pack/plugin/graph/src/internalClusterTest/java/org/elasticsearch/xpack/graph/test/GraphTests.java @@ -6,7 +6,7 @@ */ package org.elasticsearch.xpack.graph.test; -import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.IndexSearcher; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.admin.indices.segments.IndexShardSegments; import org.elasticsearch.action.admin.indices.segments.ShardSegments; @@ -165,7 +165,7 @@ public void testLargeNumberTermsStartCrawl() { VertexRequest peopleNames = hop1.addVertexRequest("people").minDocCount(1); peopleNames.addInclude("john", 1); - for (int i = 0; i < BooleanQuery.getMaxClauseCount() + 1; i++) { + for (int i = 0; i < IndexSearcher.getMaxClauseCount() + 1; i++) { peopleNames.addInclude("unknown" + i, 1); } diff --git a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java index 36e8eaf94c8be..b60ce13e0228c 100644 --- a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java +++ b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java @@ -8,7 +8,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.PriorityQueue; import org.elasticsearch.ExceptionsHelper; @@ -564,7 +564,7 @@ private static void addBigOrClause(Map> lastHopFindings, Boo for (Entry> entry : lastHopFindings.entrySet()) { numClauses += entry.getValue().size(); } - if (numClauses < BooleanQuery.getMaxClauseCount()) { + if (numClauses < IndexSearcher.getMaxClauseCount()) { // We can afford to build a Boolean OR query with individual // boosts for interesting terms for (Entry> entry : lastHopFindings.entrySet()) { @@ -755,7 +755,7 @@ private double getInitialTotalSignalStrength(Hop rootHop, Sampler sample) { private static void addNormalizedBoosts(BoolQueryBuilder includesContainer, VertexRequest vr) { TermBoost[] termBoosts = vr.includeValues(); - if ((includesContainer.should().size() + termBoosts.length) > BooleanQuery.getMaxClauseCount()) { + if ((includesContainer.should().size() + termBoosts.length) > IndexSearcher.getMaxClauseCount()) { // Too many terms - we need a cheaper form of query to execute this List termValues = new ArrayList<>(); for (TermBoost tb : termBoosts) { diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java index 73c0f6d4c7685..54d83af8f5d95 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java @@ -135,7 +135,7 @@ public void testBulkOperations() throws Exception { SearchSourceBuilder sourceBuilder = new SearchSourceBuilder().size(0).trackTotalHits(true); SearchResponse searchResponse = client().search(new SearchRequest(INDEX_NAME).source(sourceBuilder)).get(); try { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(totalDocs)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(totalDocs)); } finally { searchResponse.decRef(); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java index 8416d58cb1328..f444719c730f5 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java @@ -598,7 +598,7 @@ public void testSuccessfulParse() throws IOException { generateNestedTermSparseVectorQuery(mapperService.mappingLookup().nestedLookup(), fieldName1, List.of("a")), 10 ); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); assertEquals(3, topDocs.scoreDocs[0].doc); } { @@ -606,7 +606,7 @@ public void testSuccessfulParse() throws IOException { generateNestedTermSparseVectorQuery(mapperService.mappingLookup().nestedLookup(), fieldName1, List.of("a", "b")), 10 ); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); assertEquals(3, topDocs.scoreDocs[0].doc); } { @@ -614,7 +614,7 @@ public void testSuccessfulParse() throws IOException { generateNestedTermSparseVectorQuery(mapperService.mappingLookup().nestedLookup(), fieldName2, List.of("d")), 10 ); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); assertEquals(3, topDocs.scoreDocs[0].doc); } { @@ -622,7 +622,7 @@ public void testSuccessfulParse() throws IOException { generateNestedTermSparseVectorQuery(mapperService.mappingLookup().nestedLookup(), fieldName2, List.of("z")), 10 ); - assertEquals(0, topDocs.totalHits.value); + assertEquals(0, topDocs.totalHits.value()); } }); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilderTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilderTests.java index f54ce89183079..b8bcb766b53e1 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilderTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilderTests.java @@ -199,9 +199,9 @@ private void assertSparseEmbeddingLuceneQuery(Query query) { BooleanQuery innerBooleanQuery = (BooleanQuery) innerQuery; assertThat(innerBooleanQuery.clauses().size(), equalTo(queryTokenCount)); innerBooleanQuery.forEach(c -> { - assertThat(c.getOccur(), equalTo(SHOULD)); - assertThat(c.getQuery(), instanceOf(BoostQuery.class)); - assertThat(((BoostQuery) c.getQuery()).getBoost(), equalTo(TOKEN_WEIGHT)); + assertThat(c.occur(), equalTo(SHOULD)); + assertThat(c.query(), instanceOf(BoostQuery.class)); + assertThat(((BoostQuery) c.query()).getBoost(), equalTo(TOKEN_WEIGHT)); }); } @@ -223,7 +223,7 @@ private Query assertOuterBooleanQuery(Query query) { List outerMustClauses = new ArrayList<>(); List outerFilterClauses = new ArrayList<>(); for (BooleanClause clause : outerBooleanQuery.clauses()) { - BooleanClause.Occur occur = clause.getOccur(); + BooleanClause.Occur occur = clause.occur(); if (occur == MUST) { outerMustClauses.add(clause); } else if (occur == FILTER) { @@ -236,7 +236,7 @@ private Query assertOuterBooleanQuery(Query query) { assertThat(outerMustClauses.size(), equalTo(1)); assertThat(outerFilterClauses.size(), equalTo(1)); - return outerMustClauses.get(0).getQuery(); + return outerMustClauses.get(0).query(); } @Override diff --git a/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/action/TransportGetPipelineAction.java b/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/action/TransportGetPipelineAction.java index 081a170aac9f1..bb4464542a422 100644 --- a/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/action/TransportGetPipelineAction.java +++ b/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/action/TransportGetPipelineAction.java @@ -87,7 +87,7 @@ protected void doExecute(Task task, GetPipelineRequest request, ActionListener { - final int numHits = Math.toIntExact(searchResponse.getHits().getTotalHits().value); + final int numHits = Math.toIntExact(searchResponse.getHits().getTotalHits().value()); final Map pipelineSources = Maps.newMapWithExpectedSize(numHits); final Consumer clearScroll = (response) -> { if (response != null && response.getScrollId() != null) { @@ -148,14 +148,14 @@ private void handleFilteringSearchResponse( ActionListener listener ) { int numberOfHitsSeenSoFar = numberOfHitsSeenPreviously + searchResponse.getHits().getHits().length; - if (numberOfHitsSeenSoFar > searchResponse.getHits().getTotalHits().value) { + if (numberOfHitsSeenSoFar > searchResponse.getHits().getTotalHits().value()) { clearScroll.accept(searchResponse); listener.onFailure( new IllegalStateException( "scrolling returned more hits [" + numberOfHitsSeenSoFar + "] than expected [" - + searchResponse.getHits().getTotalHits().value + + searchResponse.getHits().getTotalHits().value() + "] so bailing out to prevent unbounded " + "memory consumption." ) @@ -179,7 +179,7 @@ private void handleFilteringSearchResponse( } } - if (numberOfHitsSeenSoFar == searchResponse.getHits().getTotalHits().value) { + if (numberOfHitsSeenSoFar == searchResponse.getHits().getTotalHits().value()) { clearScroll.accept(searchResponse); listener.onResponse(new GetPipelineResponse(pipelineSources)); } else { diff --git a/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java b/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java index c594c9f553164..216f82552353b 100644 --- a/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java +++ b/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java @@ -19,6 +19,7 @@ import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CharacterRunAutomaton; import org.apache.lucene.util.automaton.LevenshteinAutomata; +import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.RegExp; import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.logging.DeprecationCategory; @@ -291,7 +292,10 @@ public Query regexpQuery( return new MatchNoDocsQuery(); } - final Automaton automaton = new RegExp(regexp, syntaxFlags, matchFlags).toAutomaton(maxDeterminizedStates); + final Automaton automaton = Operations.determinize( + new RegExp(regexp, syntaxFlags, matchFlags).toAutomaton(), + maxDeterminizedStates + ); final CharacterRunAutomaton runAutomaton = new CharacterRunAutomaton(automaton); if (runAutomaton.run(this.value)) { return new MatchAllDocsQuery(); diff --git a/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapper.java b/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapper.java index e52237f4d507e..3a50cc8143485 100644 --- a/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapper.java +++ b/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapper.java @@ -242,11 +242,8 @@ public int docValueCount() { @Override public long nextOrd() { - if (ordsForThisDoc.hasNext()) { - return ordsForThisDoc.next(); - } else { - return NO_MORE_ORDS; - } + assert ordsForThisDoc.hasNext(); + return ordsForThisDoc.next(); } @Override diff --git a/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldTypeTests.java b/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldTypeTests.java index c29e4513562fc..04599549cc3cc 100644 --- a/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldTypeTests.java +++ b/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldTypeTests.java @@ -77,11 +77,7 @@ private CollectionBasedSortedSetDocValues(List docValues) { @Override public long nextOrd() { - currentOrd++; - if (currentOrd >= docValues.size()) { - return NO_MORE_ORDS; - } - return currentOrd; + return ++currentOrd; } @Override diff --git a/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java b/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java index 303b94ec655dc..e8fd0da496bbe 100644 --- a/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java +++ b/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java @@ -651,7 +651,7 @@ protected void parseCreateField(DocumentParserContext context) throws IOExceptio List fields = new ArrayList<>(); if (indexed && hasDocValues) { - fields.add(new LongField(fieldType().name(), numericValue)); + fields.add(new LongField(fieldType().name(), numericValue, Field.Store.NO)); } else if (hasDocValues) { fields.add(new SortedNumericDocValuesField(fieldType().name(), numericValue)); } else if (indexed) { diff --git a/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionEncoder.java b/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionEncoder.java index 00532d95574c0..4f42103bc4541 100644 --- a/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionEncoder.java +++ b/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionEncoder.java @@ -13,7 +13,6 @@ import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CompiledAutomaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import java.util.Locale; @@ -213,9 +212,9 @@ static CompiledAutomaton prefixAutomaton(String versionPrefix, boolean caseInsen a = Operations.concatenate(a, Automata.makeAnyBinary()); assert a.isDeterministic(); - a = MinimizationOperations.minimize(a, 0); + a = Operations.determinize(a, 0); - return new CompiledAutomaton(a, null, true, 0, true); + return new CompiledAutomaton(a, false, true, true); } static class EncodedVersion { diff --git a/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionFieldWildcardQuery.java b/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionFieldWildcardQuery.java index 387a49a29dc23..1e5ecf19bdf81 100644 --- a/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionFieldWildcardQuery.java +++ b/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionFieldWildcardQuery.java @@ -40,11 +40,11 @@ class VersionFieldWildcardQuery extends AutomatonQuery { private static final byte WILDCARD_CHAR = '?'; VersionFieldWildcardQuery(Term term, boolean caseInsensitive) { - super(term, toAutomaton(term, caseInsensitive), Integer.MAX_VALUE, true); + super(term, toAutomaton(term, caseInsensitive), true); } VersionFieldWildcardQuery(Term term, boolean caseInsensitive, RewriteMethod rewriteMethod) { - super(term, toAutomaton(term, caseInsensitive), Integer.MAX_VALUE, true, rewriteMethod); + super(term, toAutomaton(term, caseInsensitive), true, rewriteMethod); } private static Automaton toAutomaton(Term wildcardquery, boolean caseInsensitive) { @@ -114,7 +114,7 @@ private static Automaton toAutomaton(Term wildcardquery, boolean caseInsensitive if (containsPreReleaseSeparator == false) { automata.add(Operations.optional(Automata.makeChar(VersionEncoder.NO_PRERELEASE_SEPARATOR_BYTE))); } - return Operations.concatenate(automata); + return Operations.determinize(Operations.concatenate(automata), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } @Override diff --git a/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionStringDocValuesField.java b/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionStringDocValuesField.java index 17e1d70cbb471..01f0fdb256551 100644 --- a/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionStringDocValuesField.java +++ b/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionStringDocValuesField.java @@ -47,7 +47,8 @@ public VersionStringDocValuesField(SortedSetDocValues input, String name) { public void setNextDocId(int docId) throws IOException { count = 0; if (input.advanceExact(docId)) { - for (long ord = input.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = input.nextOrd()) { + for (int i = 0; i < input.docValueCount(); i++) { + long ord = input.nextOrd(); ords = ArrayUtil.grow(ords, count + 1); ords[count++] = ord; } diff --git a/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionStringFieldMapper.java b/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionStringFieldMapper.java index b49b4500ce7b7..6bf2917c601ac 100644 --- a/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionStringFieldMapper.java +++ b/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionStringFieldMapper.java @@ -187,7 +187,8 @@ public Query regexpQuery( matchFlags, DEFAULT_PROVIDER, maxDeterminizedStates, - method == null ? CONSTANT_SCORE_REWRITE : method + method == null ? CONSTANT_SCORE_REWRITE : method, + true ) { @Override diff --git a/x-pack/plugin/mapper-version/src/test/java/org/elasticsearch/xpack/versionfield/VersionStringFieldTests.java b/x-pack/plugin/mapper-version/src/test/java/org/elasticsearch/xpack/versionfield/VersionStringFieldTests.java index 94d8a144b0bd6..c89d1f8493b6b 100644 --- a/x-pack/plugin/mapper-version/src/test/java/org/elasticsearch/xpack/versionfield/VersionStringFieldTests.java +++ b/x-pack/plugin/mapper-version/src/test/java/org/elasticsearch/xpack/versionfield/VersionStringFieldTests.java @@ -117,7 +117,7 @@ public void testPrefixQuery() throws IOException { assertResponse( client().prepareSearch(indexName).setQuery(QueryBuilders.prefixQuery("version", "2.1.0-A").caseInsensitive(true)), response -> { - assertEquals(1, response.getHits().getTotalHits().value); + assertEquals(1, response.getHits().getTotalHits().value()); assertEquals("2.1.0-alpha.beta", response.getHits().getHits()[0].getSourceAsMap().get("version")); } ); @@ -134,7 +134,7 @@ public void testSort() throws IOException { assertResponse( client().prepareSearch(indexName).setQuery(QueryBuilders.matchAllQuery()).addSort("version", SortOrder.DESC), response -> { - assertEquals(8, response.getHits().getTotalHits().value); + assertEquals(8, response.getHits().getTotalHits().value()); SearchHit[] hits = response.getHits().getHits(); assertEquals("1.3.567#12", hits[0].getSortValues()[0]); assertEquals("1.2.3alpha", hits[1].getSortValues()[0]); @@ -150,7 +150,7 @@ public void testSort() throws IOException { assertResponse( client().prepareSearch(indexName).setQuery(QueryBuilders.matchAllQuery()).addSort("version", SortOrder.ASC), response -> { - assertEquals(8, response.getHits().getTotalHits().value); + assertEquals(8, response.getHits().getTotalHits().value()); var hits = response.getHits().getHits(); assertEquals("1.0.0", hits[0].getSortValues()[0]); assertEquals("1.3.0+build.1234567", hits[1].getSortValues()[0]); @@ -179,7 +179,7 @@ public void testRegexQuery() throws Exception { client().admin().indices().prepareRefresh(indexName).get(); assertResponse(client().prepareSearch(indexName).setQuery(QueryBuilders.regexpQuery("version", "2.*0")), response -> { - assertEquals(2, response.getHits().getTotalHits().value); + assertEquals(2, response.getHits().getTotalHits().value()); assertEquals("2.1.0", response.getHits().getHits()[0].getSourceAsMap().get("version")); assertEquals("2.33.0", response.getHits().getHits()[1].getSourceAsMap().get("version")); }); @@ -187,21 +187,21 @@ public void testRegexQuery() throws Exception { assertResponse( client().prepareSearch(indexName).setQuery(QueryBuilders.regexpQuery("version", "<0-10>.<0-10>.*al.*")), response -> { - assertEquals(2, response.getHits().getTotalHits().value); + assertEquals(2, response.getHits().getTotalHits().value()); assertEquals("1.0.0alpha2.1.0-rc.1", response.getHits().getHits()[0].getSourceAsMap().get("version")); assertEquals("2.1.0-alpha.beta", response.getHits().getHits()[1].getSourceAsMap().get("version")); } ); assertResponse(client().prepareSearch(indexName).setQuery(QueryBuilders.regexpQuery("version", "1.[0-9].[0-9].*")), response -> { - assertEquals(2, response.getHits().getTotalHits().value); + assertEquals(2, response.getHits().getTotalHits().value()); assertEquals("1.0.0alpha2.1.0-rc.1", response.getHits().getHits()[0].getSourceAsMap().get("version")); assertEquals("1.3.0+build.1234567", response.getHits().getHits()[1].getSourceAsMap().get("version")); }); // test case sensitivity / insensitivity assertResponse(client().prepareSearch(indexName).setQuery(QueryBuilders.regexpQuery("version", ".*alpha.*")), response -> { - assertEquals(2, response.getHits().getTotalHits().value); + assertEquals(2, response.getHits().getTotalHits().value()); assertEquals("1.0.0alpha2.1.0-rc.1", response.getHits().getHits()[0].getSourceAsMap().get("version")); assertEquals("2.1.0-alpha.beta", response.getHits().getHits()[1].getSourceAsMap().get("version")); }); @@ -211,7 +211,7 @@ public void testRegexQuery() throws Exception { assertResponse( client().prepareSearch(indexName).setQuery(QueryBuilders.regexpQuery("version", ".*Alpha.*").caseInsensitive(true)), response -> { - assertEquals(2, response.getHits().getTotalHits().value); + assertEquals(2, response.getHits().getTotalHits().value()); assertEquals("1.0.0alpha2.1.0-rc.1", response.getHits().getHits()[0].getSourceAsMap().get("version")); assertEquals("2.1.0-alpha.beta", response.getHits().getHits()[1].getSourceAsMap().get("version")); } @@ -234,7 +234,7 @@ public void testFuzzyQuery() throws Exception { client().admin().indices().prepareRefresh(indexName).get(); assertResponse(client().prepareSearch(indexName).setQuery(QueryBuilders.fuzzyQuery("version", "2.3.0")), response -> { - assertEquals(3, response.getHits().getTotalHits().value); + assertEquals(3, response.getHits().getTotalHits().value()); assertEquals("2.1.0", response.getHits().getHits()[0].getSourceAsMap().get("version")); assertEquals("2.33.0", response.getHits().getHits()[1].getSourceAsMap().get("version")); assertEquals("2.a3.0", response.getHits().getHits()[2].getSourceAsMap().get("version")); @@ -288,7 +288,7 @@ public void testWildcardQuery() throws Exception { assertResponse( client().prepareSearch(indexName).setQuery(QueryBuilders.wildcardQuery("version", "*Alpha*").caseInsensitive(true)), response -> { - assertEquals(2, response.getHits().getTotalHits().value); + assertEquals(2, response.getHits().getTotalHits().value()); assertEquals("1.0.0-alpha.2.1.0-rc.1", response.getHits().getHits()[0].getSourceAsMap().get("version")); assertEquals("2.1.0-alpha.beta", response.getHits().getHits()[1].getSourceAsMap().get("version")); } @@ -297,7 +297,7 @@ public void testWildcardQuery() throws Exception { private void checkWildcardQuery(String indexName, String query, String... expectedResults) { assertResponse(client().prepareSearch(indexName).setQuery(QueryBuilders.wildcardQuery("version", query)), response -> { - assertEquals(expectedResults.length, response.getHits().getTotalHits().value); + assertEquals(expectedResults.length, response.getHits().getTotalHits().value()); for (int i = 0; i < expectedResults.length; i++) { String expected = expectedResults[i]; Object actual = response.getHits().getHits()[i].getSourceAsMap().get("version"); @@ -321,7 +321,7 @@ public void testStoreMalformed() throws Exception { client().admin().indices().prepareRefresh(indexName).get(); assertResponse(client().prepareSearch(indexName).addDocValueField("version"), response -> { - assertEquals(4, response.getHits().getTotalHits().value); + assertEquals(4, response.getHits().getTotalHits().value()); assertEquals("1", response.getHits().getAt(0).getId()); assertEquals("1.invalid.0", response.getHits().getAt(0).field("version").getValue()); @@ -359,7 +359,7 @@ public void testStoreMalformed() throws Exception { assertResponse( client().prepareSearch(indexName).setQuery(QueryBuilders.matchAllQuery()).addSort("version", SortOrder.ASC), response -> { - assertEquals(4, response.getHits().getTotalHits().value); + assertEquals(4, response.getHits().getTotalHits().value()); SearchHit[] hits = response.getHits().getHits(); assertEquals("2.2.0", hits[0].getSortValues()[0]); assertEquals("", hits[1].getSortValues()[0]); @@ -437,36 +437,36 @@ public void testMultiValues() throws Exception { client().admin().indices().prepareRefresh(indexName).get(); assertResponse(client().prepareSearch(indexName).addSort("version", SortOrder.ASC), response -> { - assertEquals(3, response.getHits().getTotalHits().value); + assertEquals(3, response.getHits().getTotalHits().value()); assertEquals("1", response.getHits().getAt(0).getId()); assertEquals("2", response.getHits().getAt(1).getId()); assertEquals("3", response.getHits().getAt(2).getId()); }); assertResponse(client().prepareSearch(indexName).setQuery(QueryBuilders.matchQuery("version", "3.0.0")), response -> { - assertEquals(1, response.getHits().getTotalHits().value); + assertEquals(1, response.getHits().getTotalHits().value()); assertEquals("1", response.getHits().getAt(0).getId()); }); assertResponse(client().prepareSearch(indexName).setQuery(QueryBuilders.matchQuery("version", "4.alpha.0")), response -> { - assertEquals(1, response.getHits().getTotalHits().value); + assertEquals(1, response.getHits().getTotalHits().value()); assertEquals("2", response.getHits().getAt(0).getId()); }); // range assertResponse( client().prepareSearch(indexName).setQuery(QueryBuilders.rangeQuery("version").to("1.5.0")), - response -> assertEquals(1, response.getHits().getTotalHits().value) + response -> assertEquals(1, response.getHits().getTotalHits().value()) ); assertResponse( client().prepareSearch(indexName).setQuery(QueryBuilders.rangeQuery("version").from("1.5.0")), - response -> assertEquals(3, response.getHits().getTotalHits().value) + response -> assertEquals(3, response.getHits().getTotalHits().value()) ); assertResponse( client().prepareSearch(indexName).setQuery(QueryBuilders.rangeQuery("version").from("5.0.0").to("6.0.0")), - response -> assertEquals(1, response.getHits().getTotalHits().value) + response -> assertEquals(1, response.getHits().getTotalHits().value()) ); } } diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java index 8c245a4543abe..39519dc7931d0 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java @@ -345,7 +345,7 @@ private void testExpiredDeletion(Float customThrottle, int numUnusedState) throw assertResponse( prepareSearch(AnomalyDetectorsIndex.jobStateIndexPattern()).setFetchSource(false).setTrackTotalHits(true).setSize(10000), stateDocsResponse -> { - assertThat(stateDocsResponse.getHits().getTotalHits().value, greaterThanOrEqualTo(5L)); + assertThat(stateDocsResponse.getHits().getTotalHits().value(), greaterThanOrEqualTo(5L)); int nonExistingJobDocsCount = 0; List nonExistingJobExampleIds = new ArrayList<>(); diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeAutodetectIntegTestCase.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeAutodetectIntegTestCase.java index 2e096f3262cb6..9864c88d1405c 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeAutodetectIntegTestCase.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeAutodetectIntegTestCase.java @@ -372,7 +372,7 @@ protected long countForecastDocs(String jobId, String forecastId) { .filter(QueryBuilders.termQuery(Job.ID.getPreferredName(), jobId)) .filter(QueryBuilders.termQuery(Forecast.FORECAST_ID.getPreferredName(), forecastId)) ), - searchResponse -> count.set(searchResponse.getHits().getTotalHits().value) + searchResponse -> count.set(searchResponse.getHits().getTotalHits().value()) ); return count.get(); } diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/PersistJobIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/PersistJobIT.java index 94bc3150cb12e..5f82d996c87fa 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/PersistJobIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/PersistJobIT.java @@ -77,7 +77,7 @@ public void testPersistJobOnGracefulShutdown_givenTimeAdvancedAfterNoNewData() t ++numStateRecords; } } - assertThat(stateDocsResponse1.getHits().getTotalHits().value, equalTo(2L)); + assertThat(stateDocsResponse1.getHits().getTotalHits().value(), equalTo(2L)); assertThat(numQuantileRecords, equalTo(1)); assertThat(numStateRecords, equalTo(1)); } @@ -117,7 +117,7 @@ public void testPersistJobOnGracefulShutdown_givenTimeAdvancedAfterNoNewData() t } } - assertThat(stateDocsResponse2.getHits().getTotalHits().value, equalTo(3L)); + assertThat(stateDocsResponse2.getHits().getTotalHits().value(), equalTo(3L)); assertThat(numQuantileRecords, equalTo(1)); assertThat(numStateRecords, equalTo(2)); @@ -154,7 +154,7 @@ public void testPersistJobOnGracefulShutdown_givenNoDataAndTimeAdvanced() throws ++numStateRecords; } } - assertThat(stateDocsResponse.getHits().getTotalHits().value, equalTo(2L)); + assertThat(stateDocsResponse.getHits().getTotalHits().value(), equalTo(2L)); assertThat(numQuantileRecords, equalTo(1)); assertThat(numStateRecords, equalTo(1)); } diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RegressionIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RegressionIT.java index f5d0b23b437f3..8a6499ec3bb6a 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RegressionIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RegressionIT.java @@ -164,7 +164,7 @@ public void testSingleNumericFeatureAndMixedTrainingAndNonTrainingRows() throws + testDocsWithEmptyFeatureImportance + "] test docs with empty feature importance" + " from " - + sourceData.getHits().getTotalHits().value + + sourceData.getHits().getTotalHits().value() + " hits.\n" + badDocuments, trainingDocsWithEmptyFeatureImportance + testDocsWithEmptyFeatureImportance, diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RevertModelSnapshotIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RevertModelSnapshotIT.java index 260a5dea0a3c1..388583f6f8656 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RevertModelSnapshotIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RevertModelSnapshotIT.java @@ -295,7 +295,7 @@ private Quantiles getQuantiles(String jobId) throws Exception { prepareSearch(".ml-state*").setQuery(QueryBuilders.idsQuery().addIds(Quantiles.documentId(jobId))).setSize(1), response -> { SearchHits hits = response.getHits(); - assertThat(hits.getTotalHits().value, equalTo(1L)); + assertThat(hits.getTotalHits().value(), equalTo(1L)); try ( XContentParser parser = JsonXContent.jsonXContent.createParser( XContentParserConfiguration.EMPTY, diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RunDataFrameAnalyticsIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RunDataFrameAnalyticsIT.java index 8fbad7ccd3877..1505d374dfa08 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RunDataFrameAnalyticsIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RunDataFrameAnalyticsIT.java @@ -396,7 +396,7 @@ public void testStopOutlierDetectionWithEnoughDocumentsToScroll() throws Excepti } assertResponse(prepareSearch(config.getDest().getIndex()).setTrackTotalHits(true), searchResponse -> { - if (searchResponse.getHits().getTotalHits().value == docCount) { + if (searchResponse.getHits().getTotalHits().value() == docCount) { long seenCount = SearchResponseUtils.getTotalHitsValue( prepareSearch(config.getDest().getIndex()).setTrackTotalHits(true) .setQuery(QueryBuilders.existsQuery("custom_ml.outlier_score")) @@ -404,7 +404,7 @@ public void testStopOutlierDetectionWithEnoughDocumentsToScroll() throws Excepti logger.debug("We stopped during analysis: [{}] < [{}]", seenCount, docCount); assertThat(seenCount, lessThan((long) docCount)); } else { - logger.debug("We stopped during reindexing: [{}] < [{}]", searchResponse.getHits().getTotalHits().value, docCount); + logger.debug("We stopped during reindexing: [{}] < [{}]", searchResponse.getHits().getTotalHits().value(), docCount); } }); diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/BucketCorrelationAggregationIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/BucketCorrelationAggregationIT.java index c15750de3b336..edc851def4468 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/BucketCorrelationAggregationIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/BucketCorrelationAggregationIT.java @@ -77,7 +77,7 @@ public void testCountCorrelation() { .setSize(0) .setTrackTotalHits(true), percentilesSearch -> { - long totalHits = percentilesSearch.getHits().getTotalHits().value; + long totalHits = percentilesSearch.getHits().getTotalHits().value(); Percentiles percentiles = percentilesSearch.getAggregations().get("percentiles"); Tuple aggs = buildRangeAggAndSetExpectations( percentiles, diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DatafeedCcsIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DatafeedCcsIT.java index 8fddfa47c377c..139d1b074c7b2 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DatafeedCcsIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DatafeedCcsIT.java @@ -192,7 +192,7 @@ private boolean doesLocalAuditMessageExist(String message) { .setQuery(new MatchPhraseQueryBuilder("message", message)) .get(); try { - return response.getHits().getTotalHits().value > 0; + return response.getHits().getTotalHits().value() > 0; } finally { response.decRef(); } diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java index 17fe20c5115ff..dfb960794537b 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java @@ -767,7 +767,7 @@ private static DataCounts getDataCountsFromIndex(String jobId) throws IOExceptio prepareSearch().setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED_HIDDEN) .setQuery(QueryBuilders.idsQuery().addIds(DataCounts.documentId(jobId))), searchResponse -> { - if (searchResponse.getHits().getTotalHits().value != 1) { + if (searchResponse.getHits().getTotalHits().value() != 1) { setOnce.set(new DataCounts(jobId)); return; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java index a47b67e490851..210973f2601d3 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java @@ -189,7 +189,7 @@ private void initChunkedBucketSearcher( ML_ORIGIN, searchRequest, ActionListener.wrap(searchResponse -> { - long totalHits = searchResponse.getHits().getTotalHits().value; + long totalHits = searchResponse.getHits().getTotalHits().value(); if (totalHits > 0) { InternalAggregations aggregations = searchResponse.getAggregations(); Min min = aggregations.get(EARLIEST_TIME); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAction.java index 6aaa1e50f2e8a..d676e6cc9d065 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAction.java @@ -415,7 +415,7 @@ private void checkModelIdAgainstTags(String modelId, ActionListener listen ML_ORIGIN, searchRequest, ActionListener.wrap(response -> { - if (response.getHits().getTotalHits().value > 0) { + if (response.getHits().getTotalHits().value() > 0) { listener.onFailure( ExceptionsHelper.badRequestException(Messages.getMessage(Messages.INFERENCE_MODEL_ID_AND_TAGS_UNIQUE, modelId)) ); @@ -443,7 +443,7 @@ private void checkTagsAgainstModelIds(List tags, ActionListener li ML_ORIGIN, searchRequest, ActionListener.wrap(response -> { - if (response.getHits().getTotalHits().value > 0) { + if (response.getHits().getTotalHits().value() > 0) { listener.onFailure( ExceptionsHelper.badRequestException(Messages.getMessage(Messages.INFERENCE_TAGS_AND_MODEL_IDS_UNIQUE, tags)) ); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDataFrameAnalyticsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDataFrameAnalyticsAction.java index 2ec460a08caf9..759538b4cdc63 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDataFrameAnalyticsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDataFrameAnalyticsAction.java @@ -433,7 +433,7 @@ private static void checkDestIndexIsEmptyIfExists( TransportSearchAction.TYPE, destEmptySearch, ActionListener.wrap(searchResponse -> { - if (searchResponse.getHits().getTotalHits().value > 0) { + if (searchResponse.getHits().getTotalHits().value() > 0) { listener.onFailure(ExceptionsHelper.badRequestException("dest index [{}] must be empty", destIndex)); } else { listener.onResponse(startContext); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorUtils.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorUtils.java index f0e03a1e94973..7c41dbd463413 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorUtils.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorUtils.java @@ -62,7 +62,7 @@ public static DataExtractor.DataSummary getDataSummary(SearchResponse searchResp } else { Long earliestTime = toLongIfFinite((aggregations.get(EARLIEST_TIME)).value()); Long latestTime = toLongIfFinite((aggregations.get(LATEST_TIME)).value()); - long totalHits = searchResponse.getHits().getTotalHits().value; + long totalHits = searchResponse.getHits().getTotalHits().value(); return new DataExtractor.DataSummary(earliestTime, latestTime, totalHits); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/persistence/DatafeedConfigProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/persistence/DatafeedConfigProvider.java index 20da61a3d6910..7829adb395675 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/persistence/DatafeedConfigProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/persistence/DatafeedConfigProvider.java @@ -226,7 +226,7 @@ public void findDatafeedIdsForJobIds(Collection jobIds, ActionListenerdelegateFailureAndWrap((delegate, response) -> { Set datafeedIds = new HashSet<>(); // There cannot be more than one datafeed per job - assert response.getHits().getTotalHits().value <= jobIds.size(); + assert response.getHits().getTotalHits().value() <= jobIds.size(); SearchHit[] hits = response.getHits().getHits(); for (SearchHit hit : hits) { @@ -259,7 +259,7 @@ public void findDatafeedsByJobIds( listener.delegateFailureAndWrap((delegate, response) -> { Map datafeedsByJobId = new HashMap<>(); // There cannot be more than one datafeed per job - assert response.getHits().getTotalHits().value <= jobIds.size(); + assert response.getHits().getTotalHits().value() <= jobIds.size(); SearchHit[] hits = response.getHits().getHits(); for (SearchHit hit : hits) { DatafeedConfig.Builder builder = parseLenientlyFromSource(hit.getSourceRef()); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java index c890ab599c380..315d2249d00cb 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java @@ -377,7 +377,7 @@ public DataSummary collectDataSummary() { SearchRequestBuilder searchRequestBuilder = buildDataSummarySearchRequestBuilder(); SearchResponse searchResponse = executeSearchRequest(searchRequestBuilder); try { - long rows = searchResponse.getHits().getTotalHits().value; + long rows = searchResponse.getHits().getTotalHits().value(); LOGGER.debug(() -> format("[%s] Data summary rows [%s]", context.jobId, rows)); return new DataSummary(rows, organicFeatures.length + processedFeatures.length); } finally { @@ -396,7 +396,7 @@ public void collectDataSummaryAsync(ActionListener dataSummaryActio TransportSearchAction.TYPE, searchRequestBuilder.request(), dataSummaryActionListener.delegateFailureAndWrap( - (l, searchResponse) -> l.onResponse(new DataSummary(searchResponse.getHits().getTotalHits().value, numberOfFields)) + (l, searchResponse) -> l.onResponse(new DataSummary(searchResponse.getHits().getTotalHits().value(), numberOfFields)) ) ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/inference/InferenceRunner.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/inference/InferenceRunner.java index dfcc12d98be41..64cf493028ad1 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/inference/InferenceRunner.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/inference/InferenceRunner.java @@ -169,7 +169,7 @@ private InferenceState restoreInferenceState() { ); try { Max maxIncrementalIdAgg = searchResponse.getAggregations().get(DestinationIndex.INCREMENTAL_ID); - long processedTestDocCount = searchResponse.getHits().getTotalHits().value; + long processedTestDocCount = searchResponse.getHits().getTotalHits().value(); Long lastIncrementalId = processedTestDocCount == 0 ? null : (long) maxIncrementalIdAgg.value(); if (lastIncrementalId != null) { LOGGER.debug( diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/InferenceStep.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/InferenceStep.java index 482e82f9ec303..fdd4bdd120f6a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/InferenceStep.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/InferenceStep.java @@ -115,7 +115,7 @@ private void searchIfTestDocsExist(ActionListener listener) { ML_ORIGIN, TransportSearchAction.TYPE, searchRequest, - listener.delegateFailureAndWrap((l, searchResponse) -> l.onResponse(searchResponse.getHits().getTotalHits().value > 0)) + listener.delegateFailureAndWrap((l, searchResponse) -> l.onResponse(searchResponse.getHits().getTotalHits().value() > 0)) ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/traintestsplit/TrainTestSplitterFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/traintestsplit/TrainTestSplitterFactory.java index 3ef2affa5d399..0b3dd573deaae 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/traintestsplit/TrainTestSplitterFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/traintestsplit/TrainTestSplitterFactory.java @@ -70,7 +70,7 @@ private TrainTestSplitter createSingleClassSplitter(Regression regression) { regression.getDependentVariable(), regression.getTrainingPercent(), regression.getRandomizeSeed(), - searchResponse.getHits().getTotalHits().value + searchResponse.getHits().getTotalHits().value() ); } finally { searchResponse.decRef(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/ChunkedTrainedModelRestorer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/ChunkedTrainedModelRestorer.java index f56c589aea19a..c4396c4f9d2c8 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/ChunkedTrainedModelRestorer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/ChunkedTrainedModelRestorer.java @@ -196,7 +196,7 @@ private void doSearch( numDocsWritten += searchResponse.getHits().getHits().length; boolean endOfSearch = searchResponse.getHits().getHits().length < searchSize - || searchResponse.getHits().getTotalHits().value == numDocsWritten; + || searchResponse.getHits().getTotalHits().value() == numDocsWritten; if (endOfSearch) { successConsumer.accept(Boolean.TRUE); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/TrainedModelProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/TrainedModelProvider.java index f493c735d87ea..ff5f37427b18f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/TrainedModelProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/TrainedModelProvider.java @@ -1008,7 +1008,7 @@ public void expandIds( ML_ORIGIN, searchRequest, ActionListener.wrap(response -> { - long totalHitCount = response.getHits().getTotalHits().value + foundResourceIds.size(); + long totalHitCount = response.getHits().getTotalHits().value() + foundResourceIds.size(); Set foundFromDocs = new HashSet<>(); for (SearchHit hit : response.getHits().getHits()) { Map docSource = hit.getSourceAsMap(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobConfigProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobConfigProvider.java index 8493513f40bd6..df9a187f59616 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobConfigProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobConfigProvider.java @@ -683,7 +683,7 @@ public void groupExists(String groupId, ActionListener listener) { ML_ORIGIN, searchRequest, ActionListener.wrap( - response -> listener.onResponse(response.getHits().getTotalHits().value > 0), + response -> listener.onResponse(response.getHits().getTotalHits().value() > 0), listener::onFailure ), client::search diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataDeleter.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataDeleter.java index b9cc1902b7ab6..0f3abe3ab8c20 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataDeleter.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataDeleter.java @@ -352,7 +352,7 @@ public void deleteJobDocuments( } } SearchResponse searchResponse = item.getResponse(); - if (searchResponse.getHits().getTotalHits().value > 0 || indexNames.get()[i].equals(defaultSharedIndex)) { + if (searchResponse.getHits().getTotalHits().value() > 0 || indexNames.get()[i].equals(defaultSharedIndex)) { needToRunDBQTemp = true; } else { indicesToDelete.add(indexNames.get()[i]); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java index f9e4e62e4e3bc..51b3e0b55d75b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java @@ -870,7 +870,7 @@ public void buckets( throw QueryPage.emptyQueryPage(Bucket.RESULTS_FIELD); } - QueryPage buckets = new QueryPage<>(results, searchResponse.getHits().getTotalHits().value, Bucket.RESULTS_FIELD); + QueryPage buckets = new QueryPage<>(results, searchResponse.getHits().getTotalHits().value(), Bucket.RESULTS_FIELD); if (query.isExpand()) { Iterator bucketsToExpand = buckets.results() @@ -1086,7 +1086,7 @@ public void categoryDefinitions( } QueryPage result = new QueryPage<>( results, - searchResponse.getHits().getTotalHits().value, + searchResponse.getHits().getTotalHits().value(), CategoryDefinition.RESULTS_FIELD ); handler.accept(result); @@ -1143,7 +1143,7 @@ public void records( } QueryPage queryPage = new QueryPage<>( results, - searchResponse.getHits().getTotalHits().value, + searchResponse.getHits().getTotalHits().value(), AnomalyRecord.RESULTS_FIELD ); handler.accept(queryPage); @@ -1207,7 +1207,7 @@ public void influencers( } QueryPage result = new QueryPage<>( influencers, - response.getHits().getTotalHits().value, + response.getHits().getTotalHits().value(), Influencer.RESULTS_FIELD ); handler.accept(result); @@ -1375,7 +1375,7 @@ private void modelSnapshots( QueryPage result = new QueryPage<>( results, - searchResponse.getHits().getTotalHits().value, + searchResponse.getHits().getTotalHits().value(), ModelSnapshot.RESULTS_FIELD ); handler.accept(result); @@ -1411,7 +1411,7 @@ public QueryPage modelPlot(String jobId, int from, int size) { } } - return new QueryPage<>(results, searchResponse.getHits().getTotalHits().value, ModelPlot.RESULTS_FIELD); + return new QueryPage<>(results, searchResponse.getHits().getTotalHits().value(), ModelPlot.RESULTS_FIELD); } finally { searchResponse.decRef(); } @@ -1444,7 +1444,7 @@ public QueryPage categorizerStats(String jobId, int from, int } } - return new QueryPage<>(results, searchResponse.getHits().getTotalHits().value, ModelPlot.RESULTS_FIELD); + return new QueryPage<>(results, searchResponse.getHits().getTotalHits().value(), ModelPlot.RESULTS_FIELD); } finally { searchResponse.decRef(); } @@ -1700,7 +1700,7 @@ public void scheduledEvents(ScheduledEventsQueryBuilder query, ActionListener(events, response.getHits().getTotalHits().value, ScheduledEvent.RESULTS_FIELD)); + handler.onResponse(new QueryPage<>(events, response.getHits().getTotalHits().value(), ScheduledEvent.RESULTS_FIELD)); } catch (Exception e) { handler.onFailure(e); } @@ -1901,7 +1901,7 @@ public void calendars(CalendarQueryBuilder queryBuilder, ActionListener(calendars, response.getHits().getTotalHits().value, Calendar.RESULTS_FIELD)); + listener.onResponse(new QueryPage<>(calendars, response.getHits().getTotalHits().value(), Calendar.RESULTS_FIELD)); } catch (Exception e) { listener.onFailure(e); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredForecastsRemover.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredForecastsRemover.java index 886c19a65a4d0..194759c026a30 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredForecastsRemover.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredForecastsRemover.java @@ -168,7 +168,7 @@ private List findForecastsToDelete(SearchResponse searchResponse) List forecastsToDelete = new ArrayList<>(); SearchHits hits = searchResponse.getHits(); - if (hits.getTotalHits().value > MAX_FORECASTS) { + if (hits.getTotalHits().value() > MAX_FORECASTS) { LOGGER.info("More than [{}] forecasts were found. This run will only delete [{}] of them", MAX_FORECASTS, MAX_FORECASTS); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/BatchedDocumentsIterator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/BatchedDocumentsIterator.java index 86488a647baa1..ef6087f021e9d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/BatchedDocumentsIterator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/BatchedDocumentsIterator.java @@ -111,7 +111,7 @@ private SearchResponse initScroll() { ); SearchResponse searchResponse = client.search(searchRequest).actionGet(); - totalHits = searchResponse.getHits().getTotalHits().value; + totalHits = searchResponse.getHits().getTotalHits().value(); scrollId = searchResponse.getScrollId(); return searchResponse; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/SearchAfterDocumentsIterator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/SearchAfterDocumentsIterator.java index f63f6e0549179..802bcaf3b342e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/SearchAfterDocumentsIterator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/SearchAfterDocumentsIterator.java @@ -110,7 +110,7 @@ public Deque next() { SearchResponse searchResponse = doSearch(searchAfterFields()); try { if (trackTotalHits && totalHits.get() == 0) { - totalHits.set(searchResponse.getHits().getTotalHits().value); + totalHits.set(searchResponse.getHits().getTotalHits().value()); } return mapHits(searchResponse); } finally { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/SparseVectorQueryBuilderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/SparseVectorQueryBuilderTests.java index 3d17d8dd23ff6..13cf6d87728a8 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/SparseVectorQueryBuilderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/SparseVectorQueryBuilderTests.java @@ -166,8 +166,8 @@ protected void doAssertLuceneQuery(SparseVectorQueryBuilder queryBuilder, Query Class boostQueryClass = FeatureField.newLinearQuery("", "", 1.0f).getClass(); for (var clause : booleanQuery.clauses()) { - assertEquals(BooleanClause.Occur.SHOULD, clause.getOccur()); - assertThat(clause.getQuery(), either(instanceOf(featureQueryClass)).or(instanceOf(boostQueryClass))); + assertEquals(BooleanClause.Occur.SHOULD, clause.occur()); + assertThat(clause.query(), either(instanceOf(featureQueryClass)).or(instanceOf(boostQueryClass))); } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilderTests.java index 8da6fc843614e..00d50e0d0d7bb 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilderTests.java @@ -139,8 +139,8 @@ protected void doAssertLuceneQuery(TextExpansionQueryBuilder queryBuilder, Query Class boostQueryClass = FeatureField.newLinearQuery("", "", 1.0f).getClass(); for (var clause : booleanQuery.clauses()) { - assertEquals(BooleanClause.Occur.SHOULD, clause.getOccur()); - assertThat(clause.getQuery(), either(instanceOf(featureQueryClass)).or(instanceOf(boostQueryClass))); + assertEquals(BooleanClause.Occur.SHOULD, clause.occur()); + assertThat(clause.query(), either(instanceOf(featureQueryClass)).or(instanceOf(boostQueryClass))); } } diff --git a/x-pack/plugin/monitoring/src/internalClusterTest/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java b/x-pack/plugin/monitoring/src/internalClusterTest/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java index daea70abd29e3..7ddaa53a59914 100644 --- a/x-pack/plugin/monitoring/src/internalClusterTest/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java +++ b/x-pack/plugin/monitoring/src/internalClusterTest/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java @@ -149,7 +149,7 @@ public void testMonitoringBulk() throws Exception { assertResponse(client().prepareSearch(".monitoring-" + system.getSystem() + "-" + TEMPLATE_VERSION + "-*"), response -> { // exactly 3 results are expected - assertThat("No monitoring documents yet", response.getHits().getTotalHits().value, equalTo(3L)); + assertThat("No monitoring documents yet", response.getHits().getTotalHits().value(), equalTo(3L)); final List> sources = Arrays.stream(response.getHits().getHits()) .map(SearchHit::getSourceAsMap) @@ -165,7 +165,7 @@ public void testMonitoringBulk() throws Exception { assertCheckedResponse(client().prepareSearch(monitoringIndex), response -> { final SearchHits hits = response.getHits(); - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat( "Monitoring documents must have the same timestamp", Arrays.stream(hits.getHits()).map(hit -> extractValue("timestamp", hit.getSourceAsMap())).distinct().count(), diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterIntegTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterIntegTests.java index 93e055b58ddc3..d68395ef7656f 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterIntegTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterIntegTests.java @@ -113,7 +113,7 @@ public void testExport() throws Exception { assertResponse( prepareSearch(".monitoring-*"), - response -> assertThat((long) nbDocs, lessThanOrEqualTo(response.getHits().getTotalHits().value)) + response -> assertThat((long) nbDocs, lessThanOrEqualTo(response.getHits().getTotalHits().value())) ); }); @@ -260,7 +260,7 @@ private void checkMonitoringDocs() { DateFormatter dateFormatter = DateFormatter.forPattern(customTimeFormat).withZone(ZoneOffset.UTC); assertResponse(prepareSearch(".monitoring-*").setSize(100), rsp -> { - assertThat(rsp.getHits().getTotalHits().value, greaterThan(0L)); + assertThat(rsp.getHits().getTotalHits().value(), greaterThan(0L)); for (SearchHit hit : rsp.getHits().getHits()) { final Map source = hit.getSourceAsMap(); diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterResourceIntegTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterResourceIntegTests.java index d6e15ea25c8e1..d382905c1c9c2 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterResourceIntegTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterResourceIntegTests.java @@ -293,12 +293,14 @@ private void assertNoWatchesExist() { .query(QueryBuilders.matchQuery("metadata.xpack.cluster_uuid", clusterUUID)); assertResponse(prepareSearch(".watches").setSource(searchSource), response -> { - if (response.getHits().getTotalHits().value > 0) { + if (response.getHits().getTotalHits().value() > 0) { List invalidWatches = new ArrayList<>(); for (SearchHit hit : response.getHits().getHits()) { invalidWatches.add(ObjectPath.eval("metadata.xpack.watch", hit.getSourceAsMap())); } - fail("Found [" + response.getHits().getTotalHits().value + "] invalid watches when none were expected: " + invalidWatches); + fail( + "Found [" + response.getHits().getTotalHits().value() + "] invalid watches when none were expected: " + invalidWatches + ); } }); } diff --git a/x-pack/plugin/old-lucene-versions/src/internalClusterTest/java/org/elasticsearch/xpack/lucene/bwc/AbstractArchiveTestCase.java b/x-pack/plugin/old-lucene-versions/src/internalClusterTest/java/org/elasticsearch/xpack/lucene/bwc/AbstractArchiveTestCase.java index 803c7f410c41d..71f788727aa23 100644 --- a/x-pack/plugin/old-lucene-versions/src/internalClusterTest/java/org/elasticsearch/xpack/lucene/bwc/AbstractArchiveTestCase.java +++ b/x-pack/plugin/old-lucene-versions/src/internalClusterTest/java/org/elasticsearch/xpack/lucene/bwc/AbstractArchiveTestCase.java @@ -97,7 +97,7 @@ public IndexMetadata getSnapshotIndexMetaData(RepositoryData repositoryData, Sna .getAsVersionId( "version", IndexVersion::fromId, - IndexVersion.fromId(randomBoolean() ? 5000099 : 6000099) + IndexVersion.fromId(randomFrom(5000099, 6000099, 7000099)) ) ) ) diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/OldSegmentInfos.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/OldSegmentInfos.java index d1455eaa2f1c4..18adebb145f98 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/OldSegmentInfos.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/OldSegmentInfos.java @@ -196,7 +196,7 @@ static final OldSegmentInfos readCommit(Directory directory, String segmentFileN long generation = generationFromSegmentsFileName(segmentFileName); // System.out.println(Thread.currentThread() + ": SegmentInfos.readCommit " + segmentFileName); - try (ChecksumIndexInput input = directory.openChecksumInput(segmentFileName, IOContext.READONCE)) { + try (ChecksumIndexInput input = directory.openChecksumInput(segmentFileName)) { try { return readCommit(directory, input, generation, minSupportedMajorVersion); } catch (EOFException | NoSuchFileException | FileNotFoundException e) { @@ -305,7 +305,7 @@ private static void parseSegmentInfos(Directory directory, DataInput input, OldS byte[] segmentID = new byte[StringHelper.ID_LENGTH]; input.readBytes(segmentID, 0, segmentID.length); Codec codec = readCodec(input); - SegmentInfo info = codec.segmentInfoFormat().read(directory, segName, segmentID, IOContext.READ); + SegmentInfo info = codec.segmentInfoFormat().read(directory, segName, segmentID, IOContext.DEFAULT); info.setCodec(codec); totalDocs += info.maxDoc(); long delGen = CodecUtil.readBELong(input); diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/BWCCodec.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/BWCCodec.java index 25b4b685ac50f..3ed8fc26ac937 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/BWCCodec.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/BWCCodec.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.lucene.bwc.codecs; -import org.apache.lucene.backward_codecs.lucene70.Lucene70Codec; import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.FieldInfosFormat; import org.apache.lucene.codecs.FieldsConsumer; @@ -27,7 +26,6 @@ import org.apache.lucene.index.Terms; import org.apache.lucene.store.Directory; import org.apache.lucene.store.IOContext; -import org.elasticsearch.xpack.lucene.bwc.codecs.lucene70.BWCLucene70Codec; import java.io.IOException; import java.util.ArrayList; @@ -101,6 +99,7 @@ private static FieldInfos filterFields(FieldInfos fieldInfos) { false, fieldInfo.getIndexOptions(), fieldInfo.getDocValuesType(), + fieldInfo.docValuesSkipIndexType(), fieldInfo.getDocValuesGen(), fieldInfo.attributes(), fieldInfo.getPointDimensionCount(), @@ -119,9 +118,7 @@ private static FieldInfos filterFields(FieldInfos fieldInfos) { } public static SegmentInfo wrap(SegmentInfo segmentInfo) { - // special handling for Lucene70Codec (which is currently bundled with Lucene) - // Use BWCLucene70Codec instead as that one extends BWCCodec (similar to all other older codecs) - final Codec codec = segmentInfo.getCodec() instanceof Lucene70Codec ? new BWCLucene70Codec() : segmentInfo.getCodec(); + final Codec codec = segmentInfo.getCodec(); final SegmentInfo segmentInfo1 = new SegmentInfo( segmentInfo.dir, // Use Version.LATEST instead of original version, otherwise SegmentCommitInfo will bark when processing (N-1 limitation) diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/index/LegacyDocValuesIterables.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/index/LegacyDocValuesIterables.java index 5a9b1bb252308..c7abed7d69a59 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/index/LegacyDocValuesIterables.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/index/LegacyDocValuesIterables.java @@ -182,10 +182,7 @@ public Number next() { try { if (nextDocID > values.docID()) { if (values.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) { - ordCount = 0; - while (values.nextOrd() != SortedSetDocValues.NO_MORE_ORDS) { - ordCount++; - } + ordCount = values.docValueCount(); } } int result; @@ -225,6 +222,7 @@ public Iterator iterator() { return new Iterator() { private boolean nextIsSet; + private int currentIndex = 0; private long nextOrd; private void setNext() { @@ -232,17 +230,22 @@ private void setNext() { if (nextIsSet == false) { if (values.docID() == -1) { values.nextDoc(); + currentIndex = 0; } while (true) { if (values.docID() == DocIdSetIterator.NO_MORE_DOCS) { nextOrd = -1; break; } - nextOrd = values.nextOrd(); - if (nextOrd != -1) { - break; + if (currentIndex < values.docValueCount()) { + nextOrd = values.nextOrd(); + currentIndex++; + if (nextOrd != -1) { + break; + } } values.nextDoc(); + currentIndex = 0; } nextIsSet = true; } diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/index/LegacySortedSetDocValuesWrapper.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/index/LegacySortedSetDocValuesWrapper.java index 21b6818bd5613..80236f3847e12 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/index/LegacySortedSetDocValuesWrapper.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/index/LegacySortedSetDocValuesWrapper.java @@ -53,7 +53,7 @@ public int nextDoc() { while (docID < maxDoc) { values.setDocument(docID); ord = values.nextOrd(); - if (ord != NO_MORE_ORDS) { + if (ord != LegacySortedSetDocValues.NO_MORE_ORDS) { return docID; } docID++; @@ -81,7 +81,7 @@ public boolean advanceExact(int target) throws IOException { docID = target; values.setDocument(docID); ord = values.nextOrd(); - return ord != NO_MORE_ORDS; + return ord != LegacySortedSetDocValues.NO_MORE_ORDS; } @Override @@ -92,7 +92,7 @@ public long cost() { @Override public long nextOrd() { long result = ord; - if (result != NO_MORE_ORDS) { + if (result != LegacySortedSetDocValues.NO_MORE_ORDS) { ord = values.nextOrd(); } return result; diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/ForUtil.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/ForUtil.java index a567f25869407..007b398624d56 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/ForUtil.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/ForUtil.java @@ -105,14 +105,14 @@ private static int encodedSize(PackedInts.Format format, int packedIntsVersion, for (int bpv = 1; bpv <= 32; ++bpv) { final FormatAndBits formatAndBits = PackedInts.fastestFormatAndBits(BLOCK_SIZE, bpv, acceptableOverheadRatio); - assert formatAndBits.format.isSupported(formatAndBits.bitsPerValue); - assert formatAndBits.bitsPerValue <= 32; - encodedSizes[bpv] = encodedSize(formatAndBits.format, PackedInts.VERSION_CURRENT, formatAndBits.bitsPerValue); - encoders[bpv] = PackedInts.getEncoder(formatAndBits.format, PackedInts.VERSION_CURRENT, formatAndBits.bitsPerValue); - decoders[bpv] = PackedInts.getDecoder(formatAndBits.format, PackedInts.VERSION_CURRENT, formatAndBits.bitsPerValue); + assert formatAndBits.format().isSupported(formatAndBits.bitsPerValue()); + assert formatAndBits.bitsPerValue() <= 32; + encodedSizes[bpv] = encodedSize(formatAndBits.format(), PackedInts.VERSION_CURRENT, formatAndBits.bitsPerValue()); + encoders[bpv] = PackedInts.getEncoder(formatAndBits.format(), PackedInts.VERSION_CURRENT, formatAndBits.bitsPerValue()); + decoders[bpv] = PackedInts.getDecoder(formatAndBits.format(), PackedInts.VERSION_CURRENT, formatAndBits.bitsPerValue()); iterations[bpv] = computeIterations(decoders[bpv]); - out.writeVInt(formatAndBits.format.getId() << 5 | (formatAndBits.bitsPerValue - 1)); + out.writeVInt(formatAndBits.format().getId() << 5 | (formatAndBits.bitsPerValue() - 1)); } } diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/Lucene50FieldInfosFormat.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/Lucene50FieldInfosFormat.java index 83fcb17449100..06002d2d10dee 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/Lucene50FieldInfosFormat.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/Lucene50FieldInfosFormat.java @@ -23,6 +23,7 @@ import org.apache.lucene.codecs.CodecUtil; import org.apache.lucene.codecs.FieldInfosFormat; import org.apache.lucene.index.CorruptIndexException; +import org.apache.lucene.index.DocValuesSkipIndexType; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfos; @@ -103,6 +104,7 @@ public FieldInfos read(Directory directory, SegmentInfo segmentInfo, String segm storePayloads, indexOptions, docValuesType, + DocValuesSkipIndexType.NONE, dvGen, attributes, 0, diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene54/Lucene54DocValuesProducer.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene54/Lucene54DocValuesProducer.java index 09147e821d9fb..607d9903abc87 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene54/Lucene54DocValuesProducer.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene54/Lucene54DocValuesProducer.java @@ -28,6 +28,7 @@ import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.DocValuesSkipper; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfos; import org.apache.lucene.index.ImpactsEnum; @@ -1316,6 +1317,11 @@ public SortedSetDocValues getSortedSet(FieldInfo field) throws IOException { } } + @Override + public DocValuesSkipper getSkipper(FieldInfo field) throws IOException { + return null; + } + private SortedSetDocValues getSortedSetWithAddresses(FieldInfo field) throws IOException { final long valueCount = binaries.get(field.name).count; // we keep the byte[]s and list of ords on disk, these could be large diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene60/MetadataOnlyBKDReader.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene60/MetadataOnlyBKDReader.java index f3ce3ea0755e1..43203caf571f1 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene60/MetadataOnlyBKDReader.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene60/MetadataOnlyBKDReader.java @@ -63,14 +63,14 @@ public MetadataOnlyBKDReader(IndexInput metaIn) throws IOException { numLeaves = metaIn.readVInt(); assert numLeaves > 0; - minPackedValue = new byte[config.packedIndexBytesLength]; - maxPackedValue = new byte[config.packedIndexBytesLength]; - - metaIn.readBytes(minPackedValue, 0, config.packedIndexBytesLength); - metaIn.readBytes(maxPackedValue, 0, config.packedIndexBytesLength); - final ArrayUtil.ByteArrayComparator comparator = ArrayUtil.getUnsignedComparator(config.bytesPerDim); - for (int dim = 0; dim < config.numIndexDims; dim++) { - if (comparator.compare(minPackedValue, dim * config.bytesPerDim, maxPackedValue, dim * config.bytesPerDim) > 0) { + minPackedValue = new byte[config.packedIndexBytesLength()]; + maxPackedValue = new byte[config.packedIndexBytesLength()]; + + metaIn.readBytes(minPackedValue, 0, config.packedIndexBytesLength()); + metaIn.readBytes(maxPackedValue, 0, config.packedIndexBytesLength()); + final ArrayUtil.ByteArrayComparator comparator = ArrayUtil.getUnsignedComparator(config.bytesPerDim()); + for (int dim = 0; dim < config.numIndexDims(); dim++) { + if (comparator.compare(minPackedValue, dim * config.bytesPerDim(), maxPackedValue, dim * config.bytesPerDim()) > 0) { throw new CorruptIndexException( "minPackedValue " + new BytesRef(minPackedValue) @@ -104,17 +104,17 @@ public byte[] getMaxPackedValue() { @Override public int getNumDimensions() { - return config.numDims; + return config.numDims(); } @Override public int getNumIndexDimensions() { - return config.numIndexDims; + return config.numIndexDims(); } @Override public int getBytesPerDimension() { - return config.bytesPerDim; + return config.bytesPerDim(); } @Override diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/BWCLucene70Codec.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/BWCLucene70Codec.java index 0e689138acd8f..0100a8bd14635 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/BWCLucene70Codec.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/BWCLucene70Codec.java @@ -32,7 +32,7 @@ public class BWCLucene70Codec extends BWCCodec { private final LiveDocsFormat liveDocsFormat = new Lucene50LiveDocsFormat(); private final CompoundFormat compoundFormat = new Lucene50CompoundFormat(); private final StoredFieldsFormat storedFieldsFormat; - private final DocValuesFormat defaultDVFormat = DocValuesFormat.forName("Lucene70"); + private final DocValuesFormat defaultDVFormat = new Lucene70DocValuesFormat(); private final DocValuesFormat docValuesFormat = new PerFieldDocValuesFormat() { @Override public DocValuesFormat getDocValuesFormatForField(String field) { @@ -47,7 +47,11 @@ public PostingsFormat getPostingsFormatForField(String field) { }; public BWCLucene70Codec() { - super("BWCLucene70Codec"); + this("BWCLucene70Codec"); + } + + protected BWCLucene70Codec(String name) { + super(name); storedFieldsFormat = new Lucene50StoredFieldsFormat(Lucene50StoredFieldsFormat.Mode.BEST_SPEED); } diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/IndexedDISI.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/IndexedDISI.java new file mode 100644 index 0000000000000..75119247cdb13 --- /dev/null +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/IndexedDISI.java @@ -0,0 +1,327 @@ +/* + * @notice + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * Modifications copyright (C) 2021 Elasticsearch B.V. + */ +package org.elasticsearch.xpack.lucene.bwc.codecs.lucene70; + +import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.store.IndexInput; +import org.apache.lucene.store.IndexOutput; +import org.apache.lucene.util.BitSetIterator; +import org.apache.lucene.util.FixedBitSet; +import org.apache.lucene.util.RoaringDocIdSet; + +import java.io.DataInput; +import java.io.IOException; + +/** + * Disk-based implementation of a {@link DocIdSetIterator} which can return the index of the current + * document, i.e. the ordinal of the current document among the list of documents that this iterator + * can return. This is useful to implement sparse doc values by only having to encode values for + * documents that actually have a value. + * + *

      Implementation-wise, this {@link DocIdSetIterator} is inspired of {@link RoaringDocIdSet + * roaring bitmaps} and encodes ranges of {@code 65536} documents independently and picks between 3 + * encodings depending on the density of the range: + * + *

        + *
      • {@code ALL} if the range contains 65536 documents exactly, + *
      • {@code DENSE} if the range contains 4096 documents or more; in that case documents are + * stored in a bit set, + *
      • {@code SPARSE} otherwise, and the lower 16 bits of the doc IDs are stored in a {@link + * DataInput#readShort() short}. + *
      + * + *

      Only ranges that contain at least one value are encoded. + * + *

      This implementation uses 6 bytes per document in the worst-case, which happens in the case + * that all ranges contain exactly one document. + */ +final class IndexedDISI extends DocIdSetIterator { + + static final int MAX_ARRAY_LENGTH = (1 << 12) - 1; + + private static void flush(int block, FixedBitSet buffer, int cardinality, IndexOutput out) throws IOException { + assert block >= 0 && block < 65536; + out.writeShort((short) block); + assert cardinality > 0 && cardinality <= 65536; + out.writeShort((short) (cardinality - 1)); + if (cardinality > MAX_ARRAY_LENGTH) { + if (cardinality != 65536) { // all docs are set + for (long word : buffer.getBits()) { + out.writeLong(word); + } + } + } else { + BitSetIterator it = new BitSetIterator(buffer, cardinality); + for (int doc = it.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = it.nextDoc()) { + out.writeShort((short) doc); + } + } + } + + static void writeBitSet(DocIdSetIterator it, IndexOutput out) throws IOException { + int i = 0; + final FixedBitSet buffer = new FixedBitSet(1 << 16); + int prevBlock = -1; + for (int doc = it.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = it.nextDoc()) { + final int block = doc >>> 16; + if (prevBlock != -1 && block != prevBlock) { + flush(prevBlock, buffer, i, out); + buffer.clear(0, buffer.length()); + prevBlock = block; + i = 0; + } + buffer.set(doc & 0xFFFF); + i++; + prevBlock = block; + } + if (i > 0) { + flush(prevBlock, buffer, i, out); + buffer.clear(0, buffer.length()); + } + // NO_MORE_DOCS is stored explicitly + buffer.set(DocIdSetIterator.NO_MORE_DOCS & 0xFFFF); + flush(DocIdSetIterator.NO_MORE_DOCS >>> 16, buffer, 1, out); + } + + /** The slice that stores the {@link DocIdSetIterator}. */ + private final IndexInput slice; + + private final long cost; + + IndexedDISI(IndexInput in, long offset, long length, long cost) throws IOException { + this(in.slice("docs", offset, length), cost); + } + + // This constructor allows to pass the slice directly in case it helps reuse + // see eg. Lucene70 norms producer's merge instance + IndexedDISI(IndexInput slice, long cost) throws IOException { + this.slice = slice; + this.cost = cost; + } + + private int block = -1; + private long blockEnd; + private int nextBlockIndex = -1; + Method method; + + private int doc = -1; + private int index = -1; + + // SPARSE variables + boolean exists; + + // DENSE variables + private long word; + private int wordIndex = -1; + // number of one bits encountered so far, including those of `word` + private int numberOfOnes; + + // ALL variables + private int gap; + + @Override + public int docID() { + return doc; + } + + @Override + public int advance(int target) throws IOException { + final int targetBlock = target & 0xFFFF0000; + if (block < targetBlock) { + advanceBlock(targetBlock); + } + if (block == targetBlock) { + if (method.advanceWithinBlock(this, target)) { + return doc; + } + readBlockHeader(); + } + boolean found = method.advanceWithinBlock(this, block); + assert found; + return doc; + } + + public boolean advanceExact(int target) throws IOException { + final int targetBlock = target & 0xFFFF0000; + if (block < targetBlock) { + advanceBlock(targetBlock); + } + boolean found = block == targetBlock && method.advanceExactWithinBlock(this, target); + this.doc = target; + return found; + } + + private void advanceBlock(int targetBlock) throws IOException { + do { + slice.seek(blockEnd); + readBlockHeader(); + } while (block < targetBlock); + } + + private void readBlockHeader() throws IOException { + block = Short.toUnsignedInt(slice.readShort()) << 16; + assert block >= 0; + final int numValues = 1 + Short.toUnsignedInt(slice.readShort()); + index = nextBlockIndex; + nextBlockIndex = index + numValues; + if (numValues <= MAX_ARRAY_LENGTH) { + method = Method.SPARSE; + blockEnd = slice.getFilePointer() + (numValues << 1); + } else if (numValues == 65536) { + method = Method.ALL; + blockEnd = slice.getFilePointer(); + gap = block - index - 1; + } else { + method = Method.DENSE; + blockEnd = slice.getFilePointer() + (1 << 13); + wordIndex = -1; + numberOfOnes = index + 1; + } + } + + @Override + public int nextDoc() throws IOException { + return advance(doc + 1); + } + + public int index() { + return index; + } + + @Override + public long cost() { + return cost; + } + + enum Method { + SPARSE { + @Override + boolean advanceWithinBlock(IndexedDISI disi, int target) throws IOException { + final int targetInBlock = target & 0xFFFF; + // TODO: binary search + for (; disi.index < disi.nextBlockIndex;) { + int doc = Short.toUnsignedInt(disi.slice.readShort()); + disi.index++; + if (doc >= targetInBlock) { + disi.doc = disi.block | doc; + disi.exists = true; + return true; + } + } + return false; + } + + @Override + boolean advanceExactWithinBlock(IndexedDISI disi, int target) throws IOException { + final int targetInBlock = target & 0xFFFF; + // TODO: binary search + if (target == disi.doc) { + return disi.exists; + } + for (; disi.index < disi.nextBlockIndex;) { + int doc = Short.toUnsignedInt(disi.slice.readShort()); + disi.index++; + if (doc >= targetInBlock) { + if (doc != targetInBlock) { + disi.index--; + disi.slice.seek(disi.slice.getFilePointer() - Short.BYTES); + break; + } + disi.exists = true; + return true; + } + } + disi.exists = false; + return false; + } + }, + DENSE { + @Override + boolean advanceWithinBlock(IndexedDISI disi, int target) throws IOException { + final int targetInBlock = target & 0xFFFF; + final int targetWordIndex = targetInBlock >>> 6; + for (int i = disi.wordIndex + 1; i <= targetWordIndex; ++i) { + disi.word = disi.slice.readLong(); + disi.numberOfOnes += Long.bitCount(disi.word); + } + disi.wordIndex = targetWordIndex; + + long leftBits = disi.word >>> target; + if (leftBits != 0L) { + disi.doc = target + Long.numberOfTrailingZeros(leftBits); + disi.index = disi.numberOfOnes - Long.bitCount(leftBits); + return true; + } + + while (++disi.wordIndex < 1024) { + disi.word = disi.slice.readLong(); + if (disi.word != 0) { + disi.index = disi.numberOfOnes; + disi.numberOfOnes += Long.bitCount(disi.word); + disi.doc = disi.block | (disi.wordIndex << 6) | Long.numberOfTrailingZeros(disi.word); + return true; + } + } + return false; + } + + @Override + boolean advanceExactWithinBlock(IndexedDISI disi, int target) throws IOException { + final int targetInBlock = target & 0xFFFF; + final int targetWordIndex = targetInBlock >>> 6; + for (int i = disi.wordIndex + 1; i <= targetWordIndex; ++i) { + disi.word = disi.slice.readLong(); + disi.numberOfOnes += Long.bitCount(disi.word); + } + disi.wordIndex = targetWordIndex; + + long leftBits = disi.word >>> target; + disi.index = disi.numberOfOnes - Long.bitCount(leftBits); + return (leftBits & 1L) != 0; + } + }, + ALL { + @Override + boolean advanceWithinBlock(IndexedDISI disi, int target) throws IOException { + disi.doc = target; + disi.index = target - disi.gap; + return true; + } + + @Override + boolean advanceExactWithinBlock(IndexedDISI disi, int target) throws IOException { + disi.index = target - disi.gap; + return true; + } + }; + + /** + * Advance to the first doc from the block that is equal to or greater than {@code target}. + * Return true if there is such a doc and false otherwise. + */ + abstract boolean advanceWithinBlock(IndexedDISI disi, int target) throws IOException; + + /** + * Advance the iterator exactly to the position corresponding to the given {@code target} and + * return whether this document exists. + */ + abstract boolean advanceExactWithinBlock(IndexedDISI disi, int target) throws IOException; + } +} diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70Codec.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70Codec.java new file mode 100644 index 0000000000000..77de24b53069d --- /dev/null +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70Codec.java @@ -0,0 +1,15 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.lucene.bwc.codecs.lucene70; + +public class Lucene70Codec extends BWCLucene70Codec { + + public Lucene70Codec() { + super("Lucene70"); + } +} diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesConsumer.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesConsumer.java new file mode 100644 index 0000000000000..1d35a60235d35 --- /dev/null +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesConsumer.java @@ -0,0 +1,681 @@ +/* + * @notice + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * Modifications copyright (C) 2021 Elasticsearch B.V. + */ +package org.elasticsearch.xpack.lucene.bwc.codecs.lucene70; + +import org.apache.lucene.backward_codecs.packed.LegacyDirectMonotonicWriter; +import org.apache.lucene.backward_codecs.packed.LegacyDirectWriter; +import org.apache.lucene.backward_codecs.store.EndiannessReverserUtil; +import org.apache.lucene.codecs.CodecUtil; +import org.apache.lucene.codecs.DocValuesConsumer; +import org.apache.lucene.codecs.DocValuesProducer; +import org.apache.lucene.index.BinaryDocValues; +import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.EmptyDocValuesProducer; +import org.apache.lucene.index.FieldInfo; +import org.apache.lucene.index.IndexFileNames; +import org.apache.lucene.index.SegmentWriteState; +import org.apache.lucene.index.SortedDocValues; +import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.index.SortedSetDocValues; +import org.apache.lucene.index.TermsEnum; +import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.SortedSetSelector; +import org.apache.lucene.store.ByteBuffersDataOutput; +import org.apache.lucene.store.ByteBuffersIndexOutput; +import org.apache.lucene.store.IndexOutput; +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.BytesRefBuilder; +import org.apache.lucene.util.MathUtil; +import org.apache.lucene.util.StringHelper; +import org.elasticsearch.core.IOUtils; + +import java.io.IOException; +import java.util.Arrays; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.xpack.lucene.bwc.codecs.lucene70.Lucene70DocValuesFormat.DIRECT_MONOTONIC_BLOCK_SHIFT; +import static org.elasticsearch.xpack.lucene.bwc.codecs.lucene70.Lucene70DocValuesFormat.NUMERIC_BLOCK_SHIFT; +import static org.elasticsearch.xpack.lucene.bwc.codecs.lucene70.Lucene70DocValuesFormat.NUMERIC_BLOCK_SIZE; + +/** writer for {@link Lucene70DocValuesFormat} */ +final class Lucene70DocValuesConsumer extends DocValuesConsumer { + + IndexOutput data, meta; + final int maxDoc; + + /** expert: Creates a new writer */ + Lucene70DocValuesConsumer(SegmentWriteState state, String dataCodec, String dataExtension, String metaCodec, String metaExtension) + throws IOException { + boolean success = false; + try { + String dataName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, dataExtension); + data = EndiannessReverserUtil.createOutput(state.directory, dataName, state.context); + CodecUtil.writeIndexHeader( + data, + dataCodec, + Lucene70DocValuesFormat.VERSION_CURRENT, + state.segmentInfo.getId(), + state.segmentSuffix + ); + String metaName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, metaExtension); + meta = EndiannessReverserUtil.createOutput(state.directory, metaName, state.context); + CodecUtil.writeIndexHeader( + meta, + metaCodec, + Lucene70DocValuesFormat.VERSION_CURRENT, + state.segmentInfo.getId(), + state.segmentSuffix + ); + maxDoc = state.segmentInfo.maxDoc(); + success = true; + } finally { + if (success == false) { + IOUtils.closeWhileHandlingException(this); + } + } + } + + @Override + public void close() throws IOException { + boolean success = false; + try { + if (meta != null) { + meta.writeInt(-1); // write EOF marker + CodecUtil.writeFooter(meta); // write checksum + } + if (data != null) { + CodecUtil.writeFooter(data); // write checksum + } + success = true; + } finally { + if (success) { + IOUtils.close(data, meta); + } else { + IOUtils.closeWhileHandlingException(data, meta); + } + meta = data = null; + } + } + + @Override + public void addNumericField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { + meta.writeInt(field.number); + meta.writeByte(Lucene70DocValuesFormat.NUMERIC); + + writeValues(field, new EmptyDocValuesProducer() { + @Override + public SortedNumericDocValues getSortedNumeric(FieldInfo field) throws IOException { + return DocValues.singleton(valuesProducer.getNumeric(field)); + } + }); + } + + private static class MinMaxTracker { + long min, max, numValues, spaceInBits; + + MinMaxTracker() { + reset(); + spaceInBits = 0; + } + + private void reset() { + min = Long.MAX_VALUE; + max = Long.MIN_VALUE; + numValues = 0; + } + + /** Accumulate a new value. */ + void update(long v) { + min = Math.min(min, v); + max = Math.max(max, v); + ++numValues; + } + + /** Update the required space. */ + void finish() { + if (max > min) { + spaceInBits += LegacyDirectWriter.unsignedBitsRequired(max - min) * numValues; + } + } + + /** Update space usage and get ready for accumulating values for the next block. */ + void nextBlock() { + finish(); + reset(); + } + } + + private long[] writeValues(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { + SortedNumericDocValues values = valuesProducer.getSortedNumeric(field); + int numDocsWithValue = 0; + MinMaxTracker minMax = new MinMaxTracker(); + MinMaxTracker blockMinMax = new MinMaxTracker(); + long gcd = 0; + Set uniqueValues = new HashSet<>(); + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + for (int i = 0, count = values.docValueCount(); i < count; ++i) { + long v = values.nextValue(); + + if (gcd != 1) { + if (v < Long.MIN_VALUE / 2 || v > Long.MAX_VALUE / 2) { + // in that case v - minValue might overflow and make the GCD computation return + // wrong results. Since these extreme values are unlikely, we just discard + // GCD computation for them + gcd = 1; + } else if (minMax.numValues != 0) { // minValue needs to be set first + gcd = MathUtil.gcd(gcd, v - minMax.min); + } + } + + minMax.update(v); + blockMinMax.update(v); + if (blockMinMax.numValues == NUMERIC_BLOCK_SIZE) { + blockMinMax.nextBlock(); + } + + if (uniqueValues != null && uniqueValues.add(v) && uniqueValues.size() > 256) { + uniqueValues = null; + } + } + + numDocsWithValue++; + } + + minMax.finish(); + blockMinMax.finish(); + + final long numValues = minMax.numValues; + long min = minMax.min; + final long max = minMax.max; + assert blockMinMax.spaceInBits <= minMax.spaceInBits; + + if (numDocsWithValue == 0) { + meta.writeLong(-2); + meta.writeLong(0L); + } else if (numDocsWithValue == maxDoc) { + meta.writeLong(-1); + meta.writeLong(0L); + } else { + long offset = data.getFilePointer(); + meta.writeLong(offset); + values = valuesProducer.getSortedNumeric(field); + IndexedDISI.writeBitSet(values, data); + meta.writeLong(data.getFilePointer() - offset); + } + + meta.writeLong(numValues); + final int numBitsPerValue; + boolean doBlocks = false; + Map encode = null; + if (min >= max) { + numBitsPerValue = 0; + meta.writeInt(-1); + } else { + if (uniqueValues != null + && uniqueValues.size() > 1 + && LegacyDirectWriter.unsignedBitsRequired(uniqueValues.size() - 1) < LegacyDirectWriter.unsignedBitsRequired( + (max - min) / gcd + )) { + numBitsPerValue = LegacyDirectWriter.unsignedBitsRequired(uniqueValues.size() - 1); + final Long[] sortedUniqueValues = uniqueValues.toArray(new Long[0]); + Arrays.sort(sortedUniqueValues); + meta.writeInt(sortedUniqueValues.length); + for (Long v : sortedUniqueValues) { + meta.writeLong(v); + } + encode = new HashMap<>(); + for (int i = 0; i < sortedUniqueValues.length; ++i) { + encode.put(sortedUniqueValues[i], i); + } + min = 0; + gcd = 1; + } else { + uniqueValues = null; + // we do blocks if that appears to save 10+% storage + doBlocks = minMax.spaceInBits > 0 && (double) blockMinMax.spaceInBits / minMax.spaceInBits <= 0.9; + if (doBlocks) { + numBitsPerValue = 0xFF; + meta.writeInt(-2 - NUMERIC_BLOCK_SHIFT); + } else { + numBitsPerValue = LegacyDirectWriter.unsignedBitsRequired((max - min) / gcd); + if (gcd == 1 + && min > 0 + && LegacyDirectWriter.unsignedBitsRequired(max) == LegacyDirectWriter.unsignedBitsRequired(max - min)) { + min = 0; + } + meta.writeInt(-1); + } + } + } + + meta.writeByte((byte) numBitsPerValue); + meta.writeLong(min); + meta.writeLong(gcd); + long startOffset = data.getFilePointer(); + meta.writeLong(startOffset); + if (doBlocks) { + writeValuesMultipleBlocks(valuesProducer.getSortedNumeric(field), gcd); + } else if (numBitsPerValue != 0) { + writeValuesSingleBlock(valuesProducer.getSortedNumeric(field), numValues, numBitsPerValue, min, gcd, encode); + } + meta.writeLong(data.getFilePointer() - startOffset); + + return new long[] { numDocsWithValue, numValues }; + } + + private void writeValuesSingleBlock( + SortedNumericDocValues values, + long numValues, + int numBitsPerValue, + long min, + long gcd, + Map encode + ) throws IOException { + LegacyDirectWriter writer = LegacyDirectWriter.getInstance(data, numValues, numBitsPerValue); + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + for (int i = 0, count = values.docValueCount(); i < count; ++i) { + long v = values.nextValue(); + if (encode == null) { + writer.add((v - min) / gcd); + } else { + writer.add(encode.get(v)); + } + } + } + writer.finish(); + } + + private void writeValuesMultipleBlocks(SortedNumericDocValues values, long gcd) throws IOException { + final long[] buffer = new long[NUMERIC_BLOCK_SIZE]; + final ByteBuffersDataOutput encodeBuffer = ByteBuffersDataOutput.newResettableInstance(); + int upTo = 0; + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + for (int i = 0, count = values.docValueCount(); i < count; ++i) { + buffer[upTo++] = values.nextValue(); + if (upTo == NUMERIC_BLOCK_SIZE) { + writeBlock(buffer, NUMERIC_BLOCK_SIZE, gcd, encodeBuffer); + upTo = 0; + } + } + } + if (upTo > 0) { + writeBlock(buffer, upTo, gcd, encodeBuffer); + } + } + + private void writeBlock(long[] values, int length, long gcd, ByteBuffersDataOutput buffer) throws IOException { + assert length > 0; + long min = values[0]; + long max = values[0]; + for (int i = 1; i < length; ++i) { + final long v = values[i]; + assert Math.floorMod(values[i] - min, gcd) == 0; + min = Math.min(min, v); + max = Math.max(max, v); + } + if (min == max) { + data.writeByte((byte) 0); + data.writeLong(min); + } else { + final int bitsPerValue = LegacyDirectWriter.unsignedBitsRequired(max - min); + buffer.reset(); + assert buffer.size() == 0; + final LegacyDirectWriter w = LegacyDirectWriter.getInstance(buffer, length, bitsPerValue); + for (int i = 0; i < length; ++i) { + w.add((values[i] - min) / gcd); + } + w.finish(); + data.writeByte((byte) bitsPerValue); + data.writeLong(min); + data.writeInt(Math.toIntExact(buffer.size())); + buffer.copyTo(data); + } + } + + @Override + public void addBinaryField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { + meta.writeInt(field.number); + meta.writeByte(Lucene70DocValuesFormat.BINARY); + + BinaryDocValues values = valuesProducer.getBinary(field); + long start = data.getFilePointer(); + meta.writeLong(start); + int numDocsWithField = 0; + int minLength = Integer.MAX_VALUE; + int maxLength = 0; + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + numDocsWithField++; + BytesRef v = values.binaryValue(); + int length = v.length; + data.writeBytes(v.bytes, v.offset, v.length); + minLength = Math.min(length, minLength); + maxLength = Math.max(length, maxLength); + } + assert numDocsWithField <= maxDoc; + meta.writeLong(data.getFilePointer() - start); + + if (numDocsWithField == 0) { + meta.writeLong(-2); + meta.writeLong(0L); + } else if (numDocsWithField == maxDoc) { + meta.writeLong(-1); + meta.writeLong(0L); + } else { + long offset = data.getFilePointer(); + meta.writeLong(offset); + values = valuesProducer.getBinary(field); + IndexedDISI.writeBitSet(values, data); + meta.writeLong(data.getFilePointer() - offset); + } + + meta.writeInt(numDocsWithField); + meta.writeInt(minLength); + meta.writeInt(maxLength); + if (maxLength > minLength) { + start = data.getFilePointer(); + meta.writeLong(start); + meta.writeVInt(DIRECT_MONOTONIC_BLOCK_SHIFT); + + final LegacyDirectMonotonicWriter writer = LegacyDirectMonotonicWriter.getInstance( + meta, + data, + numDocsWithField + 1, + DIRECT_MONOTONIC_BLOCK_SHIFT + ); + long addr = 0; + writer.add(addr); + values = valuesProducer.getBinary(field); + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + addr += values.binaryValue().length; + writer.add(addr); + } + writer.finish(); + meta.writeLong(data.getFilePointer() - start); + } + } + + @Override + public void addSortedField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { + meta.writeInt(field.number); + meta.writeByte(Lucene70DocValuesFormat.SORTED); + doAddSortedField(field, valuesProducer); + } + + private void doAddSortedField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { + SortedDocValues values = valuesProducer.getSorted(field); + int numDocsWithField = 0; + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + numDocsWithField++; + } + + if (numDocsWithField == 0) { + meta.writeLong(-2); + meta.writeLong(0L); + } else if (numDocsWithField == maxDoc) { + meta.writeLong(-1); + meta.writeLong(0L); + } else { + long offset = data.getFilePointer(); + meta.writeLong(offset); + values = valuesProducer.getSorted(field); + IndexedDISI.writeBitSet(values, data); + meta.writeLong(data.getFilePointer() - offset); + } + + meta.writeInt(numDocsWithField); + if (values.getValueCount() <= 1) { + meta.writeByte((byte) 0); + meta.writeLong(0L); + meta.writeLong(0L); + } else { + int numberOfBitsPerOrd = LegacyDirectWriter.unsignedBitsRequired(values.getValueCount() - 1); + meta.writeByte((byte) numberOfBitsPerOrd); + long start = data.getFilePointer(); + meta.writeLong(start); + LegacyDirectWriter writer = LegacyDirectWriter.getInstance(data, numDocsWithField, numberOfBitsPerOrd); + values = valuesProducer.getSorted(field); + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + writer.add(values.ordValue()); + } + writer.finish(); + meta.writeLong(data.getFilePointer() - start); + } + + addTermsDict(DocValues.singleton(valuesProducer.getSorted(field))); + } + + private void addTermsDict(SortedSetDocValues values) throws IOException { + final long size = values.getValueCount(); + meta.writeVLong(size); + meta.writeInt(Lucene70DocValuesFormat.TERMS_DICT_BLOCK_SHIFT); + + ByteBuffersDataOutput addressBuffer = new ByteBuffersDataOutput(); + ByteBuffersIndexOutput addressIndexOut = new ByteBuffersIndexOutput(addressBuffer, "temp", "temp"); + meta.writeInt(DIRECT_MONOTONIC_BLOCK_SHIFT); + long numBlocks = (size + Lucene70DocValuesFormat.TERMS_DICT_BLOCK_MASK) >>> Lucene70DocValuesFormat.TERMS_DICT_BLOCK_SHIFT; + LegacyDirectMonotonicWriter writer = LegacyDirectMonotonicWriter.getInstance( + meta, + addressIndexOut, + numBlocks, + DIRECT_MONOTONIC_BLOCK_SHIFT + ); + + BytesRefBuilder previous = new BytesRefBuilder(); + long ord = 0; + long start = data.getFilePointer(); + int maxLength = 0; + TermsEnum iterator = values.termsEnum(); + for (BytesRef term = iterator.next(); term != null; term = iterator.next()) { + if ((ord & Lucene70DocValuesFormat.TERMS_DICT_BLOCK_MASK) == 0) { + writer.add(data.getFilePointer() - start); + data.writeVInt(term.length); + data.writeBytes(term.bytes, term.offset, term.length); + } else { + final int prefixLength = StringHelper.bytesDifference(previous.get(), term); + final int suffixLength = term.length - prefixLength; + assert suffixLength > 0; // terms are unique + + data.writeByte((byte) (Math.min(prefixLength, 15) | (Math.min(15, suffixLength - 1) << 4))); + if (prefixLength >= 15) { + data.writeVInt(prefixLength - 15); + } + if (suffixLength >= 16) { + data.writeVInt(suffixLength - 16); + } + data.writeBytes(term.bytes, term.offset + prefixLength, term.length - prefixLength); + } + maxLength = Math.max(maxLength, term.length); + previous.copyBytes(term); + ++ord; + } + writer.finish(); + meta.writeInt(maxLength); + meta.writeLong(start); + meta.writeLong(data.getFilePointer() - start); + start = data.getFilePointer(); + addressBuffer.copyTo(data); + meta.writeLong(start); + meta.writeLong(data.getFilePointer() - start); + + // Now write the reverse terms index + writeTermsIndex(values); + } + + private void writeTermsIndex(SortedSetDocValues values) throws IOException { + final long size = values.getValueCount(); + meta.writeInt(Lucene70DocValuesFormat.TERMS_DICT_REVERSE_INDEX_SHIFT); + long start = data.getFilePointer(); + + long numBlocks = 1L + ((size + Lucene70DocValuesFormat.TERMS_DICT_REVERSE_INDEX_MASK) + >>> Lucene70DocValuesFormat.TERMS_DICT_REVERSE_INDEX_SHIFT); + ByteBuffersDataOutput addressBuffer = new ByteBuffersDataOutput(); + ByteBuffersIndexOutput addressIndexOut = new ByteBuffersIndexOutput(addressBuffer, "temp", "temp"); + LegacyDirectMonotonicWriter writer = LegacyDirectMonotonicWriter.getInstance( + meta, + addressIndexOut, + numBlocks, + DIRECT_MONOTONIC_BLOCK_SHIFT + ); + + TermsEnum iterator = values.termsEnum(); + BytesRefBuilder previous = new BytesRefBuilder(); + long offset = 0; + long ord = 0; + for (BytesRef term = iterator.next(); term != null; term = iterator.next()) { + if ((ord & Lucene70DocValuesFormat.TERMS_DICT_REVERSE_INDEX_MASK) == 0) { + writer.add(offset); + final int sortKeyLength; + if (ord == 0) { + // no previous term: no bytes to write + sortKeyLength = 0; + } else { + sortKeyLength = StringHelper.sortKeyLength(previous.get(), term); + } + offset += sortKeyLength; + data.writeBytes(term.bytes, term.offset, sortKeyLength); + } else if ((ord + & Lucene70DocValuesFormat.TERMS_DICT_REVERSE_INDEX_MASK) == Lucene70DocValuesFormat.TERMS_DICT_REVERSE_INDEX_MASK) { + previous.copyBytes(term); + } + ++ord; + } + writer.add(offset); + writer.finish(); + meta.writeLong(start); + meta.writeLong(data.getFilePointer() - start); + start = data.getFilePointer(); + addressBuffer.copyTo(data); + meta.writeLong(start); + meta.writeLong(data.getFilePointer() - start); + } + + @Override + public void addSortedNumericField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { + meta.writeInt(field.number); + meta.writeByte(Lucene70DocValuesFormat.SORTED_NUMERIC); + + long[] stats = writeValues(field, valuesProducer); + int numDocsWithField = Math.toIntExact(stats[0]); + long numValues = stats[1]; + assert numValues >= numDocsWithField; + + meta.writeInt(numDocsWithField); + if (numValues > numDocsWithField) { + long start = data.getFilePointer(); + meta.writeLong(start); + meta.writeVInt(DIRECT_MONOTONIC_BLOCK_SHIFT); + + final LegacyDirectMonotonicWriter addressesWriter = LegacyDirectMonotonicWriter.getInstance( + meta, + data, + numDocsWithField + 1L, + DIRECT_MONOTONIC_BLOCK_SHIFT + ); + long addr = 0; + addressesWriter.add(addr); + SortedNumericDocValues values = valuesProducer.getSortedNumeric(field); + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + addr += values.docValueCount(); + addressesWriter.add(addr); + } + addressesWriter.finish(); + meta.writeLong(data.getFilePointer() - start); + } + } + + @Override + public void addSortedSetField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { + meta.writeInt(field.number); + meta.writeByte(Lucene70DocValuesFormat.SORTED_SET); + + SortedSetDocValues values = valuesProducer.getSortedSet(field); + int numDocsWithField = 0; + long numOrds = 0; + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + numDocsWithField++; + numOrds += values.docValueCount(); + } + + if (numDocsWithField == numOrds) { + meta.writeByte((byte) 0); + doAddSortedField(field, new EmptyDocValuesProducer() { + @Override + public SortedDocValues getSorted(FieldInfo field) throws IOException { + return SortedSetSelector.wrap(valuesProducer.getSortedSet(field), SortedSetSelector.Type.MIN); + } + }); + return; + } + meta.writeByte((byte) 1); + + assert numDocsWithField != 0; + if (numDocsWithField == maxDoc) { + meta.writeLong(-1); + meta.writeLong(0L); + } else { + long offset = data.getFilePointer(); + meta.writeLong(offset); + values = valuesProducer.getSortedSet(field); + IndexedDISI.writeBitSet(values, data); + meta.writeLong(data.getFilePointer() - offset); + } + + int numberOfBitsPerOrd = LegacyDirectWriter.unsignedBitsRequired(values.getValueCount() - 1); + meta.writeByte((byte) numberOfBitsPerOrd); + long start = data.getFilePointer(); + meta.writeLong(start); + LegacyDirectWriter writer = LegacyDirectWriter.getInstance(data, numOrds, numberOfBitsPerOrd); + values = valuesProducer.getSortedSet(field); + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + for (int i = 0; i < values.docValueCount(); i++) { + writer.add(values.nextOrd()); + } + } + writer.finish(); + meta.writeLong(data.getFilePointer() - start); + + meta.writeInt(numDocsWithField); + start = data.getFilePointer(); + meta.writeLong(start); + meta.writeVInt(DIRECT_MONOTONIC_BLOCK_SHIFT); + + final LegacyDirectMonotonicWriter addressesWriter = LegacyDirectMonotonicWriter.getInstance( + meta, + data, + numDocsWithField + 1, + DIRECT_MONOTONIC_BLOCK_SHIFT + ); + long addr = 0; + addressesWriter.add(addr); + values = valuesProducer.getSortedSet(field); + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + values.nextOrd(); + addr += values.docValueCount(); + addressesWriter.add(addr); + } + addressesWriter.finish(); + meta.writeLong(data.getFilePointer() - start); + + addTermsDict(values); + } +} diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesFormat.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesFormat.java new file mode 100644 index 0000000000000..76fce4cd15c93 --- /dev/null +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesFormat.java @@ -0,0 +1,171 @@ +/* + * @notice + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * Modifications copyright (C) 2021 Elasticsearch B.V. + */ +package org.elasticsearch.xpack.lucene.bwc.codecs.lucene70; + +import org.apache.lucene.backward_codecs.packed.LegacyDirectWriter; +import org.apache.lucene.codecs.DocValuesConsumer; +import org.apache.lucene.codecs.DocValuesFormat; +import org.apache.lucene.codecs.DocValuesProducer; +import org.apache.lucene.index.DocValuesType; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.SegmentReadState; +import org.apache.lucene.index.SegmentWriteState; +import org.apache.lucene.store.DataOutput; +import org.apache.lucene.util.SmallFloat; + +import java.io.IOException; + +/** + * Lucene 7.0 DocValues format. + * + *

      Documents that have a value for the field are encoded in a way that it is always possible to + * know the ordinal of the current document in the set of documents that have a value. For instance, + * say the set of documents that have a value for the field is {1, 5, 6, 11}. When the + * iterator is on 6, it knows that this is the 3rd item of the set. This way, values + * can be stored densely and accessed based on their index at search time. If all documents in a + * segment have a value for the field, the index is the same as the doc ID, so this case is encoded + * implicitly and is very fast at query time. On the other hand if some documents are missing a + * value for the field then the set of documents that have a value is encoded into blocks. All doc + * IDs that share the same upper 16 bits are encoded into the same block with the following + * strategies: + * + *

        + *
      • SPARSE: This strategy is used when a block contains at most 4095 documents. The lower 16 + * bits of doc IDs are stored as {@link DataOutput#writeShort(short) shorts} while the upper + * 16 bits are given by the block ID. + *
      • DENSE: This strategy is used when a block contains between 4096 and 65535 documents. The + * lower bits of doc IDs are stored in a bit set. Advancing is performed using {@link + * Long#numberOfTrailingZeros(long) ntz} operations while the index is computed by + * accumulating the {@link Long#bitCount(long) bit counts} of the visited longs. + *
      • ALL: This strategy is used when a block contains exactly 65536 documents, meaning that the + * block is full. In that case doc IDs do not need to be stored explicitly. This is typically + * faster than both SPARSE and DENSE which is a reason why it is preferable to have all + * documents that have a value for a field using contiguous doc IDs, for instance by using + * {@link IndexWriterConfig#setIndexSort(org.apache.lucene.search.Sort) index sorting}. + *
      + * + *

      Then the five per-document value types (Numeric,Binary,Sorted,SortedSet,SortedNumeric) are + * encoded using the following strategies: + * + *

      {@link DocValuesType#NUMERIC NUMERIC}: + * + *

        + *
      • Delta-compressed: per-document integers written as deltas from the minimum value, + * compressed with bitpacking. For more information, see {@link LegacyDirectWriter}. + *
      • Table-compressed: when the number of unique values is very small (< 256), and when there + * are unused "gaps" in the range of values used (such as {@link SmallFloat}), a lookup table + * is written instead. Each per-document entry is instead the ordinal to this table, and those + * ordinals are compressed with bitpacking ({@link LegacyDirectWriter}). + *
      • GCD-compressed: when all numbers share a common divisor, such as dates, the greatest common + * denominator (GCD) is computed, and quotients are stored using Delta-compressed Numerics. + *
      • Monotonic-compressed: when all numbers are monotonically increasing offsets, they are + * written as blocks of bitpacked integers, encoding the deviation from the expected delta. + *
      • Const-compressed: when there is only one possible value, no per-document data is needed and + * this value is encoded alone. + *
      + * + *

      {@link DocValuesType#BINARY BINARY}: + * + *

        + *
      • Fixed-width Binary: one large concatenated byte[] is written, along with the fixed length. + * Each document's value can be addressed directly with multiplication ({@code docID * + * length}). + *
      • Variable-width Binary: one large concatenated byte[] is written, along with end addresses + * for each document. The addresses are written as Monotonic-compressed numerics. + *
      • Prefix-compressed Binary: values are written in chunks of 16, with the first value written + * completely and other values sharing prefixes. chunk addresses are written as + * Monotonic-compressed numerics. A reverse lookup index is written from a portion of every + * 1024th term. + *
      + * + *

      {@link DocValuesType#SORTED SORTED}: + * + *

        + *
      • Sorted: a mapping of ordinals to deduplicated terms is written as Prefix-compressed Binary, + * along with the per-document ordinals written using one of the numeric strategies above. + *
      + * + *

      {@link DocValuesType#SORTED_SET SORTED_SET}: + * + *

        + *
      • Single: if all documents have 0 or 1 value, then data are written like SORTED. + *
      • SortedSet: a mapping of ordinals to deduplicated terms is written as Binary, an ordinal + * list and per-document index into this list are written using the numeric strategies above. + *
      + * + *

      {@link DocValuesType#SORTED_NUMERIC SORTED_NUMERIC}: + * + *

        + *
      • Single: if all documents have 0 or 1 value, then data are written like NUMERIC. + *
      • SortedNumeric: a value list and per-document index into this list are written using the + * numeric strategies above. + *
      + * + *

      Files: + * + *

        + *
      1. .dvd: DocValues data + *
      2. .dvm: DocValues metadata + *
      + */ +public final class Lucene70DocValuesFormat extends DocValuesFormat { + + /** Sole Constructor */ + public Lucene70DocValuesFormat() { + super("Lucene70"); + } + + @Override + public DocValuesConsumer fieldsConsumer(SegmentWriteState state) throws IOException { + return new Lucene70DocValuesConsumer(state, DATA_CODEC, DATA_EXTENSION, META_CODEC, META_EXTENSION); + } + + @Override + public DocValuesProducer fieldsProducer(SegmentReadState state) throws IOException { + return new Lucene70DocValuesProducer(state, DATA_CODEC, DATA_EXTENSION, META_CODEC, META_EXTENSION); + } + + static final String DATA_CODEC = "Lucene70DocValuesData"; + static final String DATA_EXTENSION = "dvd"; + static final String META_CODEC = "Lucene70DocValuesMetadata"; + static final String META_EXTENSION = "dvm"; + static final int VERSION_START = 0; + static final int VERSION_CURRENT = VERSION_START; + + // indicates docvalues type + static final byte NUMERIC = 0; + static final byte BINARY = 1; + static final byte SORTED = 2; + static final byte SORTED_SET = 3; + static final byte SORTED_NUMERIC = 4; + + static final int DIRECT_MONOTONIC_BLOCK_SHIFT = 16; + + static final int NUMERIC_BLOCK_SHIFT = 14; + static final int NUMERIC_BLOCK_SIZE = 1 << NUMERIC_BLOCK_SHIFT; + + static final int TERMS_DICT_BLOCK_SHIFT = 4; + static final int TERMS_DICT_BLOCK_SIZE = 1 << TERMS_DICT_BLOCK_SHIFT; + static final int TERMS_DICT_BLOCK_MASK = TERMS_DICT_BLOCK_SIZE - 1; + + static final int TERMS_DICT_REVERSE_INDEX_SHIFT = 10; + static final int TERMS_DICT_REVERSE_INDEX_SIZE = 1 << TERMS_DICT_REVERSE_INDEX_SHIFT; + static final int TERMS_DICT_REVERSE_INDEX_MASK = TERMS_DICT_REVERSE_INDEX_SIZE - 1; +} diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesProducer.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesProducer.java new file mode 100644 index 0000000000000..5164a67c428b3 --- /dev/null +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesProducer.java @@ -0,0 +1,1461 @@ +/* + * @notice + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * Modifications copyright (C) 2021 Elasticsearch B.V. + */ +package org.elasticsearch.xpack.lucene.bwc.codecs.lucene70; + +import org.apache.lucene.backward_codecs.packed.LegacyDirectMonotonicReader; +import org.apache.lucene.backward_codecs.packed.LegacyDirectReader; +import org.apache.lucene.backward_codecs.store.EndiannessReverserUtil; +import org.apache.lucene.codecs.CodecUtil; +import org.apache.lucene.codecs.DocValuesProducer; +import org.apache.lucene.index.BaseTermsEnum; +import org.apache.lucene.index.BinaryDocValues; +import org.apache.lucene.index.CorruptIndexException; +import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.DocValuesSkipper; +import org.apache.lucene.index.FieldInfo; +import org.apache.lucene.index.FieldInfos; +import org.apache.lucene.index.ImpactsEnum; +import org.apache.lucene.index.IndexFileNames; +import org.apache.lucene.index.NumericDocValues; +import org.apache.lucene.index.PostingsEnum; +import org.apache.lucene.index.SegmentReadState; +import org.apache.lucene.index.SortedDocValues; +import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.index.SortedSetDocValues; +import org.apache.lucene.index.TermsEnum; +import org.apache.lucene.index.TermsEnum.SeekStatus; +import org.apache.lucene.store.ChecksumIndexInput; +import org.apache.lucene.store.IndexInput; +import org.apache.lucene.store.RandomAccessInput; +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.LongValues; +import org.elasticsearch.core.IOUtils; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +/** reader for {@link Lucene70DocValuesFormat} */ +final class Lucene70DocValuesProducer extends DocValuesProducer { + private final Map numerics = new HashMap<>(); + private final Map binaries = new HashMap<>(); + private final Map sorted = new HashMap<>(); + private final Map sortedSets = new HashMap<>(); + private final Map sortedNumerics = new HashMap<>(); + private final IndexInput data; + private final int maxDoc; + + static final long NO_MORE_ORDS = -1; + + /** expert: instantiates a new reader */ + Lucene70DocValuesProducer(SegmentReadState state, String dataCodec, String dataExtension, String metaCodec, String metaExtension) + throws IOException { + String metaName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, metaExtension); + this.maxDoc = state.segmentInfo.maxDoc(); + + int version = -1; + + // read in the entries from the metadata file. + try (ChecksumIndexInput in = EndiannessReverserUtil.openChecksumInput(state.directory, metaName, state.context)) { + Throwable priorE = null; + try { + version = CodecUtil.checkIndexHeader( + in, + metaCodec, + Lucene70DocValuesFormat.VERSION_START, + Lucene70DocValuesFormat.VERSION_CURRENT, + state.segmentInfo.getId(), + state.segmentSuffix + ); + readFields(in, state.fieldInfos); + } catch (Throwable exception) { + priorE = exception; + } finally { + CodecUtil.checkFooter(in, priorE); + } + } + + String dataName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, dataExtension); + this.data = EndiannessReverserUtil.openInput(state.directory, dataName, state.context); + boolean success = false; + try { + final int version2 = CodecUtil.checkIndexHeader( + data, + dataCodec, + Lucene70DocValuesFormat.VERSION_START, + Lucene70DocValuesFormat.VERSION_CURRENT, + state.segmentInfo.getId(), + state.segmentSuffix + ); + if (version != version2) { + throw new CorruptIndexException("Format versions mismatch: meta=" + version + ", data=" + version2, data); + } + + // NOTE: data file is too costly to verify checksum against all the bytes on open, + // but for now we at least verify proper structure of the checksum footer: which looks + // for FOOTER_MAGIC + algorithmID. This is cheap and can detect some forms of corruption + // such as file truncation. + CodecUtil.retrieveChecksum(data); + + success = true; + } finally { + if (success == false) { + IOUtils.closeWhileHandlingException(this.data); + } + } + } + + private void readFields(ChecksumIndexInput meta, FieldInfos infos) throws IOException { + for (int fieldNumber = meta.readInt(); fieldNumber != -1; fieldNumber = meta.readInt()) { + FieldInfo info = infos.fieldInfo(fieldNumber); + if (info == null) { + throw new CorruptIndexException("Invalid field number: " + fieldNumber, meta); + } + byte type = meta.readByte(); + if (type == Lucene70DocValuesFormat.NUMERIC) { + numerics.put(info.name, readNumeric(meta)); + } else if (type == Lucene70DocValuesFormat.BINARY) { + binaries.put(info.name, readBinary(meta)); + } else if (type == Lucene70DocValuesFormat.SORTED) { + sorted.put(info.name, readSorted(meta)); + } else if (type == Lucene70DocValuesFormat.SORTED_SET) { + sortedSets.put(info.name, readSortedSet(meta)); + } else if (type == Lucene70DocValuesFormat.SORTED_NUMERIC) { + sortedNumerics.put(info.name, readSortedNumeric(meta)); + } else { + throw new CorruptIndexException("invalid type: " + type, meta); + } + } + } + + private NumericEntry readNumeric(ChecksumIndexInput meta) throws IOException { + NumericEntry entry = new NumericEntry(); + readNumeric(meta, entry); + return entry; + } + + private void readNumeric(ChecksumIndexInput meta, NumericEntry entry) throws IOException { + entry.docsWithFieldOffset = meta.readLong(); + entry.docsWithFieldLength = meta.readLong(); + entry.numValues = meta.readLong(); + int tableSize = meta.readInt(); + if (tableSize > 256) { + throw new CorruptIndexException("invalid table size: " + tableSize, meta); + } + if (tableSize >= 0) { + entry.table = new long[tableSize]; + for (int i = 0; i < tableSize; ++i) { + entry.table[i] = meta.readLong(); + } + } + if (tableSize < -1) { + entry.blockShift = -2 - tableSize; + } else { + entry.blockShift = -1; + } + entry.bitsPerValue = meta.readByte(); + entry.minValue = meta.readLong(); + entry.gcd = meta.readLong(); + entry.valuesOffset = meta.readLong(); + entry.valuesLength = meta.readLong(); + } + + private BinaryEntry readBinary(ChecksumIndexInput meta) throws IOException { + BinaryEntry entry = new BinaryEntry(); + entry.dataOffset = meta.readLong(); + entry.dataLength = meta.readLong(); + entry.docsWithFieldOffset = meta.readLong(); + entry.docsWithFieldLength = meta.readLong(); + entry.numDocsWithField = meta.readInt(); + entry.minLength = meta.readInt(); + entry.maxLength = meta.readInt(); + if (entry.minLength < entry.maxLength) { + entry.addressesOffset = meta.readLong(); + final int blockShift = meta.readVInt(); + entry.addressesMeta = LegacyDirectMonotonicReader.loadMeta(meta, entry.numDocsWithField + 1L, blockShift); + entry.addressesLength = meta.readLong(); + } + return entry; + } + + private SortedEntry readSorted(ChecksumIndexInput meta) throws IOException { + SortedEntry entry = new SortedEntry(); + entry.docsWithFieldOffset = meta.readLong(); + entry.docsWithFieldLength = meta.readLong(); + entry.numDocsWithField = meta.readInt(); + entry.bitsPerValue = meta.readByte(); + entry.ordsOffset = meta.readLong(); + entry.ordsLength = meta.readLong(); + readTermDict(meta, entry); + return entry; + } + + private SortedSetEntry readSortedSet(ChecksumIndexInput meta) throws IOException { + SortedSetEntry entry = new SortedSetEntry(); + byte multiValued = meta.readByte(); + switch (multiValued) { + case 0: // singlevalued + entry.singleValueEntry = readSorted(meta); + return entry; + case 1: // multivalued + break; + default: + throw new CorruptIndexException("Invalid multiValued flag: " + multiValued, meta); + } + entry.docsWithFieldOffset = meta.readLong(); + entry.docsWithFieldLength = meta.readLong(); + entry.bitsPerValue = meta.readByte(); + entry.ordsOffset = meta.readLong(); + entry.ordsLength = meta.readLong(); + entry.numDocsWithField = meta.readInt(); + entry.addressesOffset = meta.readLong(); + final int blockShift = meta.readVInt(); + entry.addressesMeta = LegacyDirectMonotonicReader.loadMeta(meta, entry.numDocsWithField + 1, blockShift); + entry.addressesLength = meta.readLong(); + readTermDict(meta, entry); + return entry; + } + + private static void readTermDict(ChecksumIndexInput meta, TermsDictEntry entry) throws IOException { + entry.termsDictSize = meta.readVLong(); + entry.termsDictBlockShift = meta.readInt(); + final int blockShift = meta.readInt(); + final long addressesSize = (entry.termsDictSize + (1L << entry.termsDictBlockShift) - 1) >>> entry.termsDictBlockShift; + entry.termsAddressesMeta = LegacyDirectMonotonicReader.loadMeta(meta, addressesSize, blockShift); + entry.maxTermLength = meta.readInt(); + entry.termsDataOffset = meta.readLong(); + entry.termsDataLength = meta.readLong(); + entry.termsAddressesOffset = meta.readLong(); + entry.termsAddressesLength = meta.readLong(); + entry.termsDictIndexShift = meta.readInt(); + final long indexSize = (entry.termsDictSize + (1L << entry.termsDictIndexShift) - 1) >>> entry.termsDictIndexShift; + entry.termsIndexAddressesMeta = LegacyDirectMonotonicReader.loadMeta(meta, 1 + indexSize, blockShift); + entry.termsIndexOffset = meta.readLong(); + entry.termsIndexLength = meta.readLong(); + entry.termsIndexAddressesOffset = meta.readLong(); + entry.termsIndexAddressesLength = meta.readLong(); + } + + private SortedNumericEntry readSortedNumeric(ChecksumIndexInput meta) throws IOException { + SortedNumericEntry entry = new SortedNumericEntry(); + readNumeric(meta, entry); + entry.numDocsWithField = meta.readInt(); + if (entry.numDocsWithField != entry.numValues) { + entry.addressesOffset = meta.readLong(); + final int blockShift = meta.readVInt(); + entry.addressesMeta = LegacyDirectMonotonicReader.loadMeta(meta, entry.numDocsWithField + 1, blockShift); + entry.addressesLength = meta.readLong(); + } + return entry; + } + + @Override + public void close() throws IOException { + data.close(); + } + + private static class NumericEntry { + long[] table; + int blockShift; + byte bitsPerValue; + long docsWithFieldOffset; + long docsWithFieldLength; + long numValues; + long minValue; + long gcd; + long valuesOffset; + long valuesLength; + } + + private static class BinaryEntry { + long dataOffset; + long dataLength; + long docsWithFieldOffset; + long docsWithFieldLength; + int numDocsWithField; + int minLength; + int maxLength; + long addressesOffset; + long addressesLength; + LegacyDirectMonotonicReader.Meta addressesMeta; + } + + private static class TermsDictEntry { + long termsDictSize; + int termsDictBlockShift; + LegacyDirectMonotonicReader.Meta termsAddressesMeta; + int maxTermLength; + long termsDataOffset; + long termsDataLength; + long termsAddressesOffset; + long termsAddressesLength; + int termsDictIndexShift; + LegacyDirectMonotonicReader.Meta termsIndexAddressesMeta; + long termsIndexOffset; + long termsIndexLength; + long termsIndexAddressesOffset; + long termsIndexAddressesLength; + } + + private static class SortedEntry extends TermsDictEntry { + long docsWithFieldOffset; + long docsWithFieldLength; + int numDocsWithField; + byte bitsPerValue; + long ordsOffset; + long ordsLength; + } + + private static class SortedSetEntry extends TermsDictEntry { + SortedEntry singleValueEntry; + long docsWithFieldOffset; + long docsWithFieldLength; + int numDocsWithField; + byte bitsPerValue; + long ordsOffset; + long ordsLength; + LegacyDirectMonotonicReader.Meta addressesMeta; + long addressesOffset; + long addressesLength; + } + + private static class SortedNumericEntry extends NumericEntry { + int numDocsWithField; + LegacyDirectMonotonicReader.Meta addressesMeta; + long addressesOffset; + long addressesLength; + } + + @Override + public NumericDocValues getNumeric(FieldInfo field) throws IOException { + NumericEntry entry = numerics.get(field.name); + return getNumeric(entry); + } + + private abstract static class DenseNumericDocValues extends NumericDocValues { + + final int maxDoc; + int doc = -1; + + DenseNumericDocValues(int maxDoc) { + this.maxDoc = maxDoc; + } + + @Override + public int docID() { + return doc; + } + + @Override + public int nextDoc() throws IOException { + return advance(doc + 1); + } + + @Override + public int advance(int target) throws IOException { + if (target >= maxDoc) { + return doc = NO_MORE_DOCS; + } + return doc = target; + } + + @Override + public boolean advanceExact(int target) { + doc = target; + return true; + } + + @Override + public long cost() { + return maxDoc; + } + } + + private abstract static class SparseNumericDocValues extends NumericDocValues { + + final IndexedDISI disi; + + SparseNumericDocValues(IndexedDISI disi) { + this.disi = disi; + } + + @Override + public int advance(int target) throws IOException { + return disi.advance(target); + } + + @Override + public boolean advanceExact(int target) throws IOException { + return disi.advanceExact(target); + } + + @Override + public int nextDoc() throws IOException { + return disi.nextDoc(); + } + + @Override + public int docID() { + return disi.docID(); + } + + @Override + public long cost() { + return disi.cost(); + } + } + + private NumericDocValues getNumeric(NumericEntry entry) throws IOException { + if (entry.docsWithFieldOffset == -2) { + // empty + return DocValues.emptyNumeric(); + } else if (entry.docsWithFieldOffset == -1) { + // dense + if (entry.bitsPerValue == 0) { + return new DenseNumericDocValues(maxDoc) { + @Override + public long longValue() throws IOException { + return entry.minValue; + } + }; + } else { + final RandomAccessInput slice = data.randomAccessSlice(entry.valuesOffset, entry.valuesLength); + if (entry.blockShift >= 0) { + // dense but split into blocks of different bits per value + final int shift = entry.blockShift; + final long mul = entry.gcd; + final int mask = (1 << shift) - 1; + return new DenseNumericDocValues(maxDoc) { + int block = -1; + long delta; + long offset; + long blockEndOffset; + LongValues values; + + @Override + public long longValue() throws IOException { + final int block = doc >>> shift; + if (this.block != block) { + int bitsPerValue; + do { + offset = blockEndOffset; + bitsPerValue = slice.readByte(offset++); + delta = slice.readLong(offset); + offset += Long.BYTES; + if (bitsPerValue == 0) { + blockEndOffset = offset; + } else { + final int length = slice.readInt(offset); + offset += Integer.BYTES; + blockEndOffset = offset + length; + } + this.block++; + } while (this.block != block); + values = bitsPerValue == 0 + ? LongValues.ZEROES + : LegacyDirectReader.getInstance(slice, bitsPerValue, offset); + } + return mul * values.get(doc & mask) + delta; + } + }; + } else { + final LongValues values = LegacyDirectReader.getInstance(slice, entry.bitsPerValue); + if (entry.table != null) { + final long[] table = entry.table; + return new DenseNumericDocValues(maxDoc) { + @Override + public long longValue() throws IOException { + return table[(int) values.get(doc)]; + } + }; + } else { + final long mul = entry.gcd; + final long delta = entry.minValue; + return new DenseNumericDocValues(maxDoc) { + @Override + public long longValue() throws IOException { + return mul * values.get(doc) + delta; + } + }; + } + } + } + } else { + // sparse + final IndexedDISI disi = new IndexedDISI(data, entry.docsWithFieldOffset, entry.docsWithFieldLength, entry.numValues); + if (entry.bitsPerValue == 0) { + return new SparseNumericDocValues(disi) { + @Override + public long longValue() throws IOException { + return entry.minValue; + } + }; + } else { + final RandomAccessInput slice = data.randomAccessSlice(entry.valuesOffset, entry.valuesLength); + if (entry.blockShift >= 0) { + // sparse and split into blocks of different bits per value + final int shift = entry.blockShift; + final long mul = entry.gcd; + final int mask = (1 << shift) - 1; + return new SparseNumericDocValues(disi) { + int block = -1; + long delta; + long offset; + long blockEndOffset; + LongValues values; + + @Override + public long longValue() throws IOException { + final int index = disi.index(); + final int block = index >>> shift; + if (this.block != block) { + int bitsPerValue; + do { + offset = blockEndOffset; + bitsPerValue = slice.readByte(offset++); + delta = slice.readLong(offset); + offset += Long.BYTES; + if (bitsPerValue == 0) { + blockEndOffset = offset; + } else { + final int length = slice.readInt(offset); + offset += Integer.BYTES; + blockEndOffset = offset + length; + } + this.block++; + } while (this.block != block); + values = bitsPerValue == 0 + ? LongValues.ZEROES + : LegacyDirectReader.getInstance(slice, bitsPerValue, offset); + } + return mul * values.get(index & mask) + delta; + } + }; + } else { + final LongValues values = LegacyDirectReader.getInstance(slice, entry.bitsPerValue); + if (entry.table != null) { + final long[] table = entry.table; + return new SparseNumericDocValues(disi) { + @Override + public long longValue() throws IOException { + return table[(int) values.get(disi.index())]; + } + }; + } else { + final long mul = entry.gcd; + final long delta = entry.minValue; + return new SparseNumericDocValues(disi) { + @Override + public long longValue() throws IOException { + return mul * values.get(disi.index()) + delta; + } + }; + } + } + } + } + } + + private LongValues getNumericValues(NumericEntry entry) throws IOException { + if (entry.bitsPerValue == 0) { + return new LongValues() { + @Override + public long get(long index) { + return entry.minValue; + } + }; + } else { + final RandomAccessInput slice = data.randomAccessSlice(entry.valuesOffset, entry.valuesLength); + if (entry.blockShift >= 0) { + final int shift = entry.blockShift; + final long mul = entry.gcd; + final long mask = (1L << shift) - 1; + return new LongValues() { + long block = -1; + long delta; + long offset; + long blockEndOffset; + LongValues values; + + @Override + public long get(long index) { + final long block = index >>> shift; + if (this.block != block) { + assert block > this.block : "Reading backwards is illegal: " + this.block + " < " + block; + int bitsPerValue; + do { + offset = blockEndOffset; + try { + bitsPerValue = slice.readByte(offset++); + delta = slice.readLong(offset); + offset += Long.BYTES; + if (bitsPerValue == 0) { + blockEndOffset = offset; + } else { + final int length = slice.readInt(offset); + offset += Integer.BYTES; + blockEndOffset = offset + length; + } + } catch (IOException e) { + throw new RuntimeException(e); + } + this.block++; + } while (this.block != block); + values = bitsPerValue == 0 ? LongValues.ZEROES : LegacyDirectReader.getInstance(slice, bitsPerValue, offset); + } + return mul * values.get(index & mask) + delta; + } + }; + } else { + final LongValues values = LegacyDirectReader.getInstance(slice, entry.bitsPerValue); + if (entry.table != null) { + final long[] table = entry.table; + return new LongValues() { + @Override + public long get(long index) { + return table[(int) values.get(index)]; + } + }; + } else if (entry.gcd != 1) { + final long gcd = entry.gcd; + final long minValue = entry.minValue; + return new LongValues() { + @Override + public long get(long index) { + return values.get(index) * gcd + minValue; + } + }; + } else if (entry.minValue != 0) { + final long minValue = entry.minValue; + return new LongValues() { + @Override + public long get(long index) { + return values.get(index) + minValue; + } + }; + } else { + return values; + } + } + } + } + + private abstract static class DenseBinaryDocValues extends BinaryDocValues { + + final int maxDoc; + int doc = -1; + + DenseBinaryDocValues(int maxDoc) { + this.maxDoc = maxDoc; + } + + @Override + public int nextDoc() throws IOException { + return advance(doc + 1); + } + + @Override + public int docID() { + return doc; + } + + @Override + public long cost() { + return maxDoc; + } + + @Override + public int advance(int target) throws IOException { + if (target >= maxDoc) { + return doc = NO_MORE_DOCS; + } + return doc = target; + } + + @Override + public boolean advanceExact(int target) throws IOException { + doc = target; + return true; + } + } + + private abstract static class SparseBinaryDocValues extends BinaryDocValues { + + final IndexedDISI disi; + + SparseBinaryDocValues(IndexedDISI disi) { + this.disi = disi; + } + + @Override + public int nextDoc() throws IOException { + return disi.nextDoc(); + } + + @Override + public int docID() { + return disi.docID(); + } + + @Override + public long cost() { + return disi.cost(); + } + + @Override + public int advance(int target) throws IOException { + return disi.advance(target); + } + + @Override + public boolean advanceExact(int target) throws IOException { + return disi.advanceExact(target); + } + } + + @Override + public BinaryDocValues getBinary(FieldInfo field) throws IOException { + BinaryEntry entry = binaries.get(field.name); + if (entry.docsWithFieldOffset == -2) { + return DocValues.emptyBinary(); + } + + final IndexInput bytesSlice = data.slice("fixed-binary", entry.dataOffset, entry.dataLength); + + if (entry.docsWithFieldOffset == -1) { + // dense + if (entry.minLength == entry.maxLength) { + // fixed length + final int length = entry.maxLength; + return new DenseBinaryDocValues(maxDoc) { + final BytesRef bytes = new BytesRef(new byte[length], 0, length); + + @Override + public BytesRef binaryValue() throws IOException { + bytesSlice.seek((long) doc * length); + bytesSlice.readBytes(bytes.bytes, 0, length); + return bytes; + } + }; + } else { + // variable length + final RandomAccessInput addressesData = this.data.randomAccessSlice(entry.addressesOffset, entry.addressesLength); + final LongValues addresses = LegacyDirectMonotonicReader.getInstance(entry.addressesMeta, addressesData); + return new DenseBinaryDocValues(maxDoc) { + final BytesRef bytes = new BytesRef(new byte[entry.maxLength], 0, entry.maxLength); + + @Override + public BytesRef binaryValue() throws IOException { + long startOffset = addresses.get(doc); + bytes.length = (int) (addresses.get(doc + 1L) - startOffset); + bytesSlice.seek(startOffset); + bytesSlice.readBytes(bytes.bytes, 0, bytes.length); + return bytes; + } + }; + } + } else { + // sparse + final IndexedDISI disi = new IndexedDISI(data, entry.docsWithFieldOffset, entry.docsWithFieldLength, entry.numDocsWithField); + if (entry.minLength == entry.maxLength) { + // fixed length + final int length = entry.maxLength; + return new SparseBinaryDocValues(disi) { + final BytesRef bytes = new BytesRef(new byte[length], 0, length); + + @Override + public BytesRef binaryValue() throws IOException { + bytesSlice.seek((long) disi.index() * length); + bytesSlice.readBytes(bytes.bytes, 0, length); + return bytes; + } + }; + } else { + // variable length + final RandomAccessInput addressesData = this.data.randomAccessSlice(entry.addressesOffset, entry.addressesLength); + final LongValues addresses = LegacyDirectMonotonicReader.getInstance(entry.addressesMeta, addressesData); + return new SparseBinaryDocValues(disi) { + final BytesRef bytes = new BytesRef(new byte[entry.maxLength], 0, entry.maxLength); + + @Override + public BytesRef binaryValue() throws IOException { + final int index = disi.index(); + long startOffset = addresses.get(index); + bytes.length = (int) (addresses.get(index + 1L) - startOffset); + bytesSlice.seek(startOffset); + bytesSlice.readBytes(bytes.bytes, 0, bytes.length); + return bytes; + } + }; + } + } + } + + @Override + public SortedDocValues getSorted(FieldInfo field) throws IOException { + SortedEntry entry = sorted.get(field.name); + return getSorted(entry); + } + + private SortedDocValues getSorted(SortedEntry entry) throws IOException { + if (entry.docsWithFieldOffset == -2) { + return DocValues.emptySorted(); + } + + final LongValues ords; + if (entry.bitsPerValue == 0) { + ords = new LongValues() { + @Override + public long get(long index) { + return 0L; + } + }; + } else { + final RandomAccessInput slice = data.randomAccessSlice(entry.ordsOffset, entry.ordsLength); + ords = LegacyDirectReader.getInstance(slice, entry.bitsPerValue); + } + + if (entry.docsWithFieldOffset == -1) { + // dense + return new BaseSortedDocValues(entry, data) { + + int doc = -1; + + @Override + public int nextDoc() throws IOException { + return advance(doc + 1); + } + + @Override + public int docID() { + return doc; + } + + @Override + public long cost() { + return maxDoc; + } + + @Override + public int advance(int target) throws IOException { + if (target >= maxDoc) { + return doc = NO_MORE_DOCS; + } + return doc = target; + } + + @Override + public boolean advanceExact(int target) { + doc = target; + return true; + } + + @Override + public int ordValue() { + return (int) ords.get(doc); + } + }; + } else { + // sparse + final IndexedDISI disi = new IndexedDISI(data, entry.docsWithFieldOffset, entry.docsWithFieldLength, entry.numDocsWithField); + return new BaseSortedDocValues(entry, data) { + + @Override + public int nextDoc() throws IOException { + return disi.nextDoc(); + } + + @Override + public int docID() { + return disi.docID(); + } + + @Override + public long cost() { + return disi.cost(); + } + + @Override + public int advance(int target) throws IOException { + return disi.advance(target); + } + + @Override + public boolean advanceExact(int target) throws IOException { + return disi.advanceExact(target); + } + + @Override + public int ordValue() { + return (int) ords.get(disi.index()); + } + }; + } + } + + private abstract static class BaseSortedDocValues extends SortedDocValues { + + final SortedEntry entry; + final IndexInput data; + final TermsEnum termsEnum; + + BaseSortedDocValues(SortedEntry entry, IndexInput data) throws IOException { + this.entry = entry; + this.data = data; + this.termsEnum = termsEnum(); + } + + @Override + public int getValueCount() { + return Math.toIntExact(entry.termsDictSize); + } + + @Override + public BytesRef lookupOrd(int ord) throws IOException { + termsEnum.seekExact(ord); + return termsEnum.term(); + } + + @Override + public int lookupTerm(BytesRef key) throws IOException { + SeekStatus status = termsEnum.seekCeil(key); + switch (status) { + case FOUND: + return Math.toIntExact(termsEnum.ord()); + case NOT_FOUND: + case END: + default: + return Math.toIntExact(-1L - termsEnum.ord()); + } + } + + @Override + public TermsEnum termsEnum() throws IOException { + return new TermsDict(entry, data); + } + } + + private abstract static class BaseSortedSetDocValues extends SortedSetDocValues { + + final SortedSetEntry entry; + final IndexInput data; + final TermsEnum termsEnum; + + BaseSortedSetDocValues(SortedSetEntry entry, IndexInput data) throws IOException { + this.entry = entry; + this.data = data; + this.termsEnum = termsEnum(); + } + + @Override + public long getValueCount() { + return entry.termsDictSize; + } + + @Override + public BytesRef lookupOrd(long ord) throws IOException { + termsEnum.seekExact(ord); + return termsEnum.term(); + } + + @Override + public long lookupTerm(BytesRef key) throws IOException { + SeekStatus status = termsEnum.seekCeil(key); + switch (status) { + case FOUND: + return termsEnum.ord(); + case NOT_FOUND: + case END: + default: + return -1L - termsEnum.ord(); + } + } + + @Override + public TermsEnum termsEnum() throws IOException { + return new TermsDict(entry, data); + } + } + + private static class TermsDict extends BaseTermsEnum { + + final TermsDictEntry entry; + final LongValues blockAddresses; + final IndexInput bytes; + final long blockMask; + final LongValues indexAddresses; + final IndexInput indexBytes; + final BytesRef term; + long ord = -1; + + TermsDict(TermsDictEntry entry, IndexInput data) throws IOException { + this.entry = entry; + RandomAccessInput addressesSlice = data.randomAccessSlice(entry.termsAddressesOffset, entry.termsAddressesLength); + blockAddresses = LegacyDirectMonotonicReader.getInstance(entry.termsAddressesMeta, addressesSlice); + bytes = data.slice("terms", entry.termsDataOffset, entry.termsDataLength); + blockMask = (1L << entry.termsDictBlockShift) - 1; + RandomAccessInput indexAddressesSlice = data.randomAccessSlice( + entry.termsIndexAddressesOffset, + entry.termsIndexAddressesLength + ); + indexAddresses = LegacyDirectMonotonicReader.getInstance(entry.termsIndexAddressesMeta, indexAddressesSlice); + indexBytes = data.slice("terms-index", entry.termsIndexOffset, entry.termsIndexLength); + term = new BytesRef(entry.maxTermLength); + } + + @Override + public BytesRef next() throws IOException { + if (++ord >= entry.termsDictSize) { + return null; + } + if ((ord & blockMask) == 0L) { + term.length = bytes.readVInt(); + bytes.readBytes(term.bytes, 0, term.length); + } else { + final int token = Byte.toUnsignedInt(bytes.readByte()); + int prefixLength = token & 0x0F; + int suffixLength = 1 + (token >>> 4); + if (prefixLength == 15) { + prefixLength += bytes.readVInt(); + } + if (suffixLength == 16) { + suffixLength += bytes.readVInt(); + } + term.length = prefixLength + suffixLength; + bytes.readBytes(term.bytes, prefixLength, suffixLength); + } + return term; + } + + @Override + public void seekExact(long ord) throws IOException { + if (ord < 0 || ord >= entry.termsDictSize) { + throw new IndexOutOfBoundsException(); + } + final long blockIndex = ord >>> entry.termsDictBlockShift; + final long blockAddress = blockAddresses.get(blockIndex); + bytes.seek(blockAddress); + this.ord = (blockIndex << entry.termsDictBlockShift) - 1; + do { + next(); + } while (this.ord < ord); + } + + private BytesRef getTermFromIndex(long index) throws IOException { + assert index >= 0 && index <= (entry.termsDictSize - 1) >>> entry.termsDictIndexShift; + final long start = indexAddresses.get(index); + term.length = (int) (indexAddresses.get(index + 1) - start); + indexBytes.seek(start); + indexBytes.readBytes(term.bytes, 0, term.length); + return term; + } + + private long seekTermsIndex(BytesRef text) throws IOException { + long lo = 0L; + long hi = (entry.termsDictSize - 1) >>> entry.termsDictIndexShift; + while (lo <= hi) { + final long mid = (lo + hi) >>> 1; + getTermFromIndex(mid); + final int cmp = term.compareTo(text); + if (cmp <= 0) { + lo = mid + 1; + } else { + hi = mid - 1; + } + } + + assert hi < 0 || getTermFromIndex(hi).compareTo(text) <= 0; + assert hi == ((entry.termsDictSize - 1) >>> entry.termsDictIndexShift) || getTermFromIndex(hi + 1).compareTo(text) > 0; + + return hi; + } + + private BytesRef getFirstTermFromBlock(long block) throws IOException { + assert block >= 0 && block <= (entry.termsDictSize - 1) >>> entry.termsDictBlockShift; + final long blockAddress = blockAddresses.get(block); + bytes.seek(blockAddress); + term.length = bytes.readVInt(); + bytes.readBytes(term.bytes, 0, term.length); + return term; + } + + private long seekBlock(BytesRef text) throws IOException { + long index = seekTermsIndex(text); + if (index == -1L) { + return -1L; + } + + long ordLo = index << entry.termsDictIndexShift; + long ordHi = Math.min(entry.termsDictSize, ordLo + (1L << entry.termsDictIndexShift)) - 1L; + + long blockLo = ordLo >>> entry.termsDictBlockShift; + long blockHi = ordHi >>> entry.termsDictBlockShift; + + while (blockLo <= blockHi) { + final long blockMid = (blockLo + blockHi) >>> 1; + getFirstTermFromBlock(blockMid); + final int cmp = term.compareTo(text); + if (cmp <= 0) { + blockLo = blockMid + 1; + } else { + blockHi = blockMid - 1; + } + } + + assert blockHi < 0 || getFirstTermFromBlock(blockHi).compareTo(text) <= 0; + assert blockHi == ((entry.termsDictSize - 1) >>> entry.termsDictBlockShift) + || getFirstTermFromBlock(blockHi + 1).compareTo(text) > 0; + + return blockHi; + } + + @Override + public SeekStatus seekCeil(BytesRef text) throws IOException { + final long block = seekBlock(text); + if (block == -1) { + // before the first term + seekExact(0L); + return SeekStatus.NOT_FOUND; + } + final long blockAddress = blockAddresses.get(block); + this.ord = block << entry.termsDictBlockShift; + bytes.seek(blockAddress); + term.length = bytes.readVInt(); + bytes.readBytes(term.bytes, 0, term.length); + while (true) { + int cmp = term.compareTo(text); + if (cmp == 0) { + return SeekStatus.FOUND; + } else if (cmp > 0) { + return SeekStatus.NOT_FOUND; + } + if (next() == null) { + return SeekStatus.END; + } + } + } + + @Override + public BytesRef term() throws IOException { + return term; + } + + @Override + public long ord() throws IOException { + return ord; + } + + @Override + public long totalTermFreq() throws IOException { + return -1L; + } + + @Override + public PostingsEnum postings(PostingsEnum reuse, int flags) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public ImpactsEnum impacts(int flags) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public int docFreq() throws IOException { + throw new UnsupportedOperationException(); + } + } + + @Override + public SortedNumericDocValues getSortedNumeric(FieldInfo field) throws IOException { + SortedNumericEntry entry = sortedNumerics.get(field.name); + if (entry.numValues == entry.numDocsWithField) { + return DocValues.singleton(getNumeric(entry)); + } + + final RandomAccessInput addressesInput = data.randomAccessSlice(entry.addressesOffset, entry.addressesLength); + final LongValues addresses = LegacyDirectMonotonicReader.getInstance(entry.addressesMeta, addressesInput); + + final LongValues values = getNumericValues(entry); + + if (entry.docsWithFieldOffset == -1) { + // dense + return new SortedNumericDocValues() { + + int doc = -1; + long start, end; + int count; + + @Override + public int nextDoc() throws IOException { + return advance(doc + 1); + } + + @Override + public int docID() { + return doc; + } + + @Override + public long cost() { + return maxDoc; + } + + @Override + public int advance(int target) throws IOException { + if (target >= maxDoc) { + return doc = NO_MORE_DOCS; + } + start = addresses.get(target); + end = addresses.get(target + 1L); + count = (int) (end - start); + return doc = target; + } + + @Override + public boolean advanceExact(int target) throws IOException { + start = addresses.get(target); + end = addresses.get(target + 1L); + count = (int) (end - start); + doc = target; + return true; + } + + @Override + public long nextValue() throws IOException { + return values.get(start++); + } + + @Override + public int docValueCount() { + return count; + } + }; + } else { + // sparse + final IndexedDISI disi = new IndexedDISI(data, entry.docsWithFieldOffset, entry.docsWithFieldLength, entry.numDocsWithField); + return new SortedNumericDocValues() { + + boolean set; + long start, end; + int count; + + @Override + public int nextDoc() throws IOException { + set = false; + return disi.nextDoc(); + } + + @Override + public int docID() { + return disi.docID(); + } + + @Override + public long cost() { + return disi.cost(); + } + + @Override + public int advance(int target) throws IOException { + set = false; + return disi.advance(target); + } + + @Override + public boolean advanceExact(int target) throws IOException { + set = false; + return disi.advanceExact(target); + } + + @Override + public long nextValue() throws IOException { + set(); + return values.get(start++); + } + + @Override + public int docValueCount() { + set(); + return count; + } + + private void set() { + if (set == false) { + final int index = disi.index(); + start = addresses.get(index); + end = addresses.get(index + 1L); + count = (int) (end - start); + set = true; + } + } + }; + } + } + + @Override + public SortedSetDocValues getSortedSet(FieldInfo field) throws IOException { + SortedSetEntry entry = sortedSets.get(field.name); + if (entry.singleValueEntry != null) { + return DocValues.singleton(getSorted(entry.singleValueEntry)); + } + + final RandomAccessInput slice = data.randomAccessSlice(entry.ordsOffset, entry.ordsLength); + final LongValues ords = LegacyDirectReader.getInstance(slice, entry.bitsPerValue); + + final RandomAccessInput addressesInput = data.randomAccessSlice(entry.addressesOffset, entry.addressesLength); + final LongValues addresses = LegacyDirectMonotonicReader.getInstance(entry.addressesMeta, addressesInput); + + if (entry.docsWithFieldOffset == -1) { + // dense + return new BaseSortedSetDocValues(entry, data) { + + int doc = -1; + long start, end; + int count; + + @Override + public int nextDoc() throws IOException { + return advance(doc + 1); + } + + @Override + public int docID() { + return doc; + } + + @Override + public long cost() { + return maxDoc; + } + + @Override + public int advance(int target) throws IOException { + if (target >= maxDoc) { + return doc = NO_MORE_DOCS; + } + start = addresses.get(target); + end = addresses.get(target + 1L); + count = (int) (end - start); + return doc = target; + } + + @Override + public boolean advanceExact(int target) throws IOException { + start = addresses.get(target); + end = addresses.get(target + 1L); + count = (int) (end - start); + doc = target; + return true; + } + + @Override + public long nextOrd() throws IOException { + if (start == end) { + return NO_MORE_ORDS; + } + return ords.get(start++); + } + + @Override + public int docValueCount() { + return count; + } + }; + } else { + // sparse + final IndexedDISI disi = new IndexedDISI(data, entry.docsWithFieldOffset, entry.docsWithFieldLength, entry.numDocsWithField); + return new BaseSortedSetDocValues(entry, data) { + + boolean set; + long start; + long end = 0; + int count; + + @Override + public int nextDoc() throws IOException { + set = false; + return disi.nextDoc(); + } + + @Override + public int docID() { + return disi.docID(); + } + + @Override + public long cost() { + return disi.cost(); + } + + @Override + public int advance(int target) throws IOException { + set = false; + return disi.advance(target); + } + + @Override + public boolean advanceExact(int target) throws IOException { + set = false; + return disi.advanceExact(target); + } + + private boolean set() { + if (set == false) { + final int index = disi.index(); + start = addresses.get(index); + end = addresses.get(index + 1L); + count = (int) (end - start); + set = true; + return true; + } + return false; + } + + @Override + public long nextOrd() throws IOException { + if (set()) { + return ords.get(start++); + } else if (start == end) { + return NO_MORE_ORDS; + } else { + return ords.get(start++); + } + } + + @Override + public int docValueCount() { + set(); + return count; + } + }; + } + } + + @Override + public void checkIntegrity() throws IOException { + CodecUtil.checksumEntireFile(data); + } + + @Override + public DocValuesSkipper getSkipper(FieldInfo field) { + return null; + } +} diff --git a/x-pack/plugin/old-lucene-versions/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec b/x-pack/plugin/old-lucene-versions/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec index 6e5205d664f2d..0215e9f7ca4ab 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec +++ b/x-pack/plugin/old-lucene-versions/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec @@ -6,5 +6,6 @@ # org.elasticsearch.xpack.lucene.bwc.codecs.lucene70.BWCLucene70Codec +org.elasticsearch.xpack.lucene.bwc.codecs.lucene70.Lucene70Codec org.elasticsearch.xpack.lucene.bwc.codecs.lucene62.Lucene62Codec org.elasticsearch.xpack.lucene.bwc.codecs.lucene60.Lucene60Codec diff --git a/x-pack/plugin/old-lucene-versions/src/main/resources/META-INF/services/org.apache.lucene.codecs.DocValuesFormat b/x-pack/plugin/old-lucene-versions/src/main/resources/META-INF/services/org.apache.lucene.codecs.DocValuesFormat index 2d46b4bca3d0c..8d24d86982da8 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/resources/META-INF/services/org.apache.lucene.codecs.DocValuesFormat +++ b/x-pack/plugin/old-lucene-versions/src/main/resources/META-INF/services/org.apache.lucene.codecs.DocValuesFormat @@ -14,3 +14,4 @@ # limitations under the License. org.elasticsearch.xpack.lucene.bwc.codecs.lucene54.Lucene54DocValuesFormat +org.elasticsearch.xpack.lucene.bwc.codecs.lucene70.Lucene70DocValuesFormat diff --git a/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/OldCodecsAvailableTests.java b/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/OldCodecsAvailableTests.java index f62ab9fbc4fee..0b72b96b446d4 100644 --- a/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/OldCodecsAvailableTests.java +++ b/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/OldCodecsAvailableTests.java @@ -18,7 +18,7 @@ public class OldCodecsAvailableTests extends ESTestCase { * to the next major Lucene version. */ @UpdateForV9(owner = UpdateForV9.Owner.SEARCH_FOUNDATIONS) - @AwaitsFix(bugUrl = "muted until we add bwc codecs as part of lucene 10 upgrade") + @AwaitsFix(bugUrl = "muted until we add bwc codecs to support 7.x indices in Elasticsearch 9.0") public void testLuceneBWCCodecsAvailable() { assertEquals("Add Lucene BWC codecs for Elasticsearch version 7", 8, Version.CURRENT.major); } diff --git a/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/BlockPostingsFormat3Tests.java b/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/BlockPostingsFormat3Tests.java index 304f7b0c934fb..59f5e5de1eff7 100644 --- a/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/BlockPostingsFormat3Tests.java +++ b/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/BlockPostingsFormat3Tests.java @@ -48,7 +48,9 @@ import org.apache.lucene.tests.util.TestUtil; import org.apache.lucene.tests.util.automaton.AutomatonTestUtil; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CompiledAutomaton; +import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.RegExp; import org.elasticsearch.test.ESTestCase; @@ -187,7 +189,11 @@ public void assertTerms(Terms leftTerms, Terms rightTerms, boolean deep) throws int numIntersections = atLeast(3); for (int i = 0; i < numIntersections; i++) { String re = AutomatonTestUtil.randomRegexp(random()); - CompiledAutomaton automaton = new CompiledAutomaton(new RegExp(re, RegExp.NONE).toAutomaton()); + Automaton determinized = Operations.determinize( + new RegExp(re, RegExp.NONE).toAutomaton(), + Operations.DEFAULT_DETERMINIZE_WORK_LIMIT + ); + CompiledAutomaton automaton = new CompiledAutomaton(determinized); if (automaton.type == CompiledAutomaton.AUTOMATON_TYPE.NORMAL) { // TODO: test start term too TermsEnum leftIntersection = leftTerms.intersect(automaton, null); diff --git a/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene54/Lucene54DocValuesFormatTests.java b/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene54/Lucene54DocValuesFormatTests.java index c819dca3ec6ff..1a2aca0d63bde 100644 --- a/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene54/Lucene54DocValuesFormatTests.java +++ b/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene54/Lucene54DocValuesFormatTests.java @@ -10,12 +10,12 @@ import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; import org.apache.lucene.codecs.Codec; -import org.apache.lucene.tests.index.BaseDocValuesFormatTestCase; +import org.apache.lucene.tests.index.LegacyBaseDocValuesFormatTestCase; import org.apache.lucene.tests.util.TestUtil; import org.elasticsearch.test.GraalVMThreadsFilter; @ThreadLeakFilters(filters = { GraalVMThreadsFilter.class }) -public class Lucene54DocValuesFormatTests extends BaseDocValuesFormatTestCase { +public class Lucene54DocValuesFormatTests extends LegacyBaseDocValuesFormatTestCase { private final Codec codec = TestUtil.alwaysDocValuesFormat(new Lucene54DocValuesFormat()); diff --git a/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesFormatTests.java b/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesFormatTests.java new file mode 100644 index 0000000000000..ce645feb854d1 --- /dev/null +++ b/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesFormatTests.java @@ -0,0 +1,26 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.lucene.bwc.codecs.lucene70; + +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; + +import org.apache.lucene.codecs.Codec; +import org.apache.lucene.tests.index.LegacyBaseDocValuesFormatTestCase; +import org.apache.lucene.tests.util.TestUtil; +import org.elasticsearch.test.GraalVMThreadsFilter; + +@ThreadLeakFilters(filters = { GraalVMThreadsFilter.class }) +public class Lucene70DocValuesFormatTests extends LegacyBaseDocValuesFormatTestCase { + + private final Codec codec = TestUtil.alwaysDocValuesFormat(new Lucene70DocValuesFormat()); + + @Override + protected Codec getCodec() { + return codec; + } +} diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStackTracesAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStackTracesAction.java index 48673d2002170..f447f67b4cdd2 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStackTracesAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStackTracesAction.java @@ -179,7 +179,7 @@ private void searchProfilingEvents( .setQuery(request.getQuery()) .setTrackTotalHits(true) .execute(ActionListener.wrap(searchResponse -> { - long sampleCount = searchResponse.getHits().getTotalHits().value; + long sampleCount = searchResponse.getHits().getTotalHits().value(); EventsIndex resampledIndex = mediumDownsampled.getResampledIndex(request.getSampleSize(), sampleCount); log.debug( "User requested [{}] samples, [{}] samples matched in [{}]. Picking [{}]", @@ -220,7 +220,7 @@ private void searchGenericEvents( .setPreference(String.valueOf(request.hashCode())) .setQuery(request.getQuery()) .execute(ActionListener.wrap(searchResponse -> { - long sampleCount = searchResponse.getHits().getTotalHits().value; + long sampleCount = searchResponse.getHits().getTotalHits().value(); int requestedSampleCount = request.getSampleSize(); // random sampler aggregation does not support sampling rates between 0.5 and 1.0 -> clamp to 1.0 if (sampleCount <= requestedSampleCount * 2L) { diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStatusAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStatusAction.java index 9dd46e778fb9a..dbb4cf4dc6856 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStatusAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStatusAction.java @@ -180,7 +180,7 @@ private void execute(ClusterState state, ActionListener { - boolean hasData = searchResponse.getHits().getTotalHits().value > 0; + boolean hasData = searchResponse.getHits().getTotalHits().value() > 0; listener.onResponse( new GetStatusAction.Response(pluginEnabled, resourceManagementEnabled, resourcesCreated, anyPre891Data, hasData) ); diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/LikePattern.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/LikePattern.java index 8eac03d36371e..e4f5810ac89d3 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/LikePattern.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/LikePattern.java @@ -9,7 +9,6 @@ import org.apache.lucene.index.Term; import org.apache.lucene.search.WildcardQuery; import org.apache.lucene.util.automaton.Automaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.xpack.ql.util.StringUtils; @@ -51,8 +50,7 @@ public char escape() { @Override public Automaton createAutomaton() { - Automaton automaton = WildcardQuery.toAutomaton(new Term(null, wildcard)); - return MinimizationOperations.minimize(automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + return WildcardQuery.toAutomaton(new Term(null, wildcard), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } @Override diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/RLikePattern.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/RLikePattern.java index 528872ca9b4cf..41ae97ec5e4fd 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/RLikePattern.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/RLikePattern.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.ql.expression.predicate.regex; import org.apache.lucene.util.automaton.Automaton; +import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.RegExp; import java.util.Objects; @@ -21,7 +22,10 @@ public RLikePattern(String regexpPattern) { @Override public Automaton createAutomaton() { - return new RegExp(regexpPattern).toAutomaton(); + return Operations.determinize( + new RegExp(regexpPattern, RegExp.ALL | RegExp.DEPRECATED_COMPLEMENT).toAutomaton(), + Operations.DEFAULT_DETERMINIZE_WORK_LIMIT + ); } @Override diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/WildcardPattern.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/WildcardPattern.java index fd6bd177e4c60..6703f1aeacbb5 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/WildcardPattern.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/WildcardPattern.java @@ -9,7 +9,6 @@ import org.apache.lucene.index.Term; import org.apache.lucene.search.WildcardQuery; import org.apache.lucene.util.automaton.Automaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.xpack.ql.util.StringUtils; @@ -39,8 +38,7 @@ public String pattern() { @Override public Automaton createAutomaton() { - Automaton automaton = WildcardQuery.toAutomaton(new Term(null, wildcard)); - return MinimizationOperations.minimize(automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + return WildcardQuery.toAutomaton(new Term(null, wildcard), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } @Override diff --git a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankMultiShardIT.java b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankMultiShardIT.java index b501967524a6b..29c471296b5d1 100644 --- a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankMultiShardIT.java +++ b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankMultiShardIT.java @@ -218,7 +218,7 @@ public void testMultipleOnlyKnn() { .addFetchField("text0") .setSize(19), response -> { - assertEquals(51, response.getHits().getTotalHits().value); + assertEquals(51, response.getHits().getTotalHits().value()); assertEquals(19, response.getHits().getHits().length); SearchHit hit = response.getHits().getAt(0); @@ -355,7 +355,7 @@ public void testBM25AndKnnWithBucketAggregation() { .setSize(11) .addAggregation(AggregationBuilders.terms("sums").field("int")), response -> { - assertEquals(101, response.getHits().getTotalHits().value); + assertEquals(101, response.getHits().getTotalHits().value()); assertEquals(11, response.getHits().getHits().length); SearchHit hit = response.getHits().getAt(0); @@ -483,7 +483,7 @@ public void testBM25AndMultipleKnnWithAggregation() { .addAggregation(AggregationBuilders.terms("sums").field("int")) .setStats("search"), response -> { - assertEquals(51, response.getHits().getTotalHits().value); + assertEquals(51, response.getHits().getTotalHits().value()); assertEquals(19, response.getHits().getHits().length); SearchHit hit = response.getHits().getAt(0); diff --git a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankSingleShardIT.java b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankSingleShardIT.java index 7269d9c3e5e7f..ed26aa50ffa62 100644 --- a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankSingleShardIT.java +++ b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankSingleShardIT.java @@ -217,7 +217,7 @@ public void testMultipleOnlyKnn() { .addFetchField("text0") .setSize(19), response -> { - assertEquals(51, response.getHits().getTotalHits().value); + assertEquals(51, response.getHits().getTotalHits().value()); assertEquals(19, response.getHits().getHits().length); SearchHit hit = response.getHits().getAt(0); @@ -356,7 +356,7 @@ public void testBM25AndKnnWithBucketAggregation() { .setSize(11) .addAggregation(AggregationBuilders.terms("sums").field("int")), response -> { - assertEquals(101, response.getHits().getTotalHits().value); + assertEquals(101, response.getHits().getTotalHits().value()); assertEquals(11, response.getHits().getHits().length); SearchHit hit = response.getHits().getAt(0); @@ -486,7 +486,7 @@ public void testBM25AndMultipleKnnWithAggregation() { .addAggregation(AggregationBuilders.terms("sums").field("int")) .setStats("search"), response -> { - assertEquals(51, response.getHits().getTotalHits().value); + assertEquals(51, response.getHits().getTotalHits().value()); assertEquals(19, response.getHits().getHits().length); SearchHit hit = response.getHits().getAt(0); diff --git a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderIT.java b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderIT.java index edd5e557aadf0..c5978219d94d3 100644 --- a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderIT.java +++ b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderIT.java @@ -198,8 +198,8 @@ public void testRRFPagination() { ElasticsearchAssertions.assertResponse(req, resp -> { assertNull(resp.pointInTimeId()); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, equalTo(6L)); - assertThat(resp.getHits().getTotalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(6L)); + assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(resp.getHits().getHits().length, lessThanOrEqualTo(size)); for (int k = 0; k < Math.min(size, resp.getHits().getHits().length); k++) { assertThat(resp.getHits().getAt(k).getId(), equalTo(expectedDocIds.get(k + fDocs_to_fetch))); @@ -249,8 +249,8 @@ public void testRRFWithAggs() { ElasticsearchAssertions.assertResponse(req, resp -> { assertNull(resp.pointInTimeId()); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, equalTo(6L)); - assertThat(resp.getHits().getTotalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(6L)); + assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(resp.getHits().getHits().length, equalTo(1)); assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_2")); @@ -308,8 +308,8 @@ public void testRRFWithCollapse() { ElasticsearchAssertions.assertResponse(req, resp -> { assertNull(resp.pointInTimeId()); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, equalTo(6L)); - assertThat(resp.getHits().getTotalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(6L)); + assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(resp.getHits().getHits().length, equalTo(4)); assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_2")); assertThat(resp.getHits().getAt(1).getId(), equalTo("doc_6")); @@ -366,8 +366,8 @@ public void testRRFRetrieverWithCollapseAndAggs() { ElasticsearchAssertions.assertResponse(req, resp -> { assertNull(resp.pointInTimeId()); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, equalTo(6L)); - assertThat(resp.getHits().getTotalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(6L)); + assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(resp.getHits().getHits().length, equalTo(4)); assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_2")); assertThat(resp.getHits().getAt(1).getId(), equalTo("doc_6")); @@ -441,8 +441,8 @@ public void testMultipleRRFRetrievers() { ElasticsearchAssertions.assertResponse(req, resp -> { assertNull(resp.pointInTimeId()); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, equalTo(6L)); - assertThat(resp.getHits().getTotalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(6L)); + assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_7")); assertThat(resp.getHits().getAt(1).getId(), equalTo("doc_2")); assertThat(resp.getHits().getAt(2).getId(), equalTo("doc_6")); @@ -493,8 +493,8 @@ public void testRRFExplainWithNamedRetrievers() { ElasticsearchAssertions.assertResponse(req, resp -> { assertNull(resp.pointInTimeId()); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, equalTo(6L)); - assertThat(resp.getHits().getTotalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(6L)); + assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(resp.getHits().getHits().length, equalTo(1)); assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_2")); assertThat(resp.getHits().getAt(0).getExplanation().isMatch(), equalTo(true)); @@ -564,8 +564,8 @@ public void testRRFExplainWithAnotherNestedRRF() { ElasticsearchAssertions.assertResponse(req, resp -> { assertNull(resp.pointInTimeId()); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, equalTo(6L)); - assertThat(resp.getHits().getTotalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(6L)); + assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(resp.getHits().getHits().length, equalTo(1)); assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_6")); assertThat(resp.getHits().getAt(0).getExplanation().isMatch(), equalTo(true)); @@ -733,14 +733,14 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws ); assertResponse( client().prepareSearch(INDEX).setSource(new SearchSourceBuilder().retriever(rrf)), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, is(4L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), is(4L)) ); assertThat(numAsyncCalls.get(), equalTo(2)); // check that we use the rewritten vector to build the explain query assertResponse( client().prepareSearch(INDEX).setSource(new SearchSourceBuilder().retriever(rrf).explain(true)), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, is(4L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), is(4L)) ); assertThat(numAsyncCalls.get(), equalTo(4)); } diff --git a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderNestedDocsIT.java b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderNestedDocsIT.java index 69c61fe3bca1f..b1358f11bf633 100644 --- a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderNestedDocsIT.java +++ b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderNestedDocsIT.java @@ -167,8 +167,8 @@ public void testRRFRetrieverWithNestedQuery() { ElasticsearchAssertions.assertResponse(req, resp -> { assertNull(resp.pointInTimeId()); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, equalTo(3L)); - assertThat(resp.getHits().getTotalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(3L)); + assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_6")); assertThat((double) resp.getHits().getAt(0).getScore(), closeTo(0.1742, 1e-4)); assertThat( diff --git a/x-pack/plugin/search-business-rules/src/internalClusterTest/java/org/elasticsearch/xpack/searchbusinessrules/PinnedQueryBuilderIT.java b/x-pack/plugin/search-business-rules/src/internalClusterTest/java/org/elasticsearch/xpack/searchbusinessrules/PinnedQueryBuilderIT.java index 2a17a4a1152cf..8df4e3a8dbea5 100644 --- a/x-pack/plugin/search-business-rules/src/internalClusterTest/java/org/elasticsearch/xpack/searchbusinessrules/PinnedQueryBuilderIT.java +++ b/x-pack/plugin/search-business-rules/src/internalClusterTest/java/org/elasticsearch/xpack/searchbusinessrules/PinnedQueryBuilderIT.java @@ -120,7 +120,7 @@ private void assertPinnedPromotions(PinnedQueryBuilder pqb, LinkedHashSet { - long numHits = response.getHits().getTotalHits().value; + long numHits = response.getHits().getTotalHits().value(); assertThat(numHits, lessThanOrEqualTo((long) numRelevantDocs + pins.size())); // Check pins are sorted by increasing score, (unlike organic, there are no duplicate scores) @@ -193,7 +193,7 @@ public void testExhaustiveScoring() throws Exception { private void assertExhaustiveScoring(PinnedQueryBuilder pqb) { assertResponse(prepareSearch().setQuery(pqb).setTrackTotalHits(true).setSearchType(DFS_QUERY_THEN_FETCH), response -> { - long numHits = response.getHits().getTotalHits().value; + long numHits = response.getHits().getTotalHits().value(); assertThat(numHits, equalTo(2L)); }); } diff --git a/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/CappedScoreQuery.java b/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/CappedScoreQuery.java index d9e65c385c610..2370a3dee6d03 100644 --- a/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/CappedScoreQuery.java +++ b/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/CappedScoreQuery.java @@ -79,12 +79,10 @@ public Query rewrite(IndexSearcher searcher) throws IOException { */ protected static class CappedBulkScorer extends BulkScorer { final BulkScorer bulkScorer; - final Weight weight; final float maxScore; - public CappedBulkScorer(BulkScorer bulkScorer, Weight weight, float maxScore) { + public CappedBulkScorer(BulkScorer bulkScorer, float maxScore) { this.bulkScorer = bulkScorer; - this.weight = weight; this.maxScore = maxScore; } @@ -125,15 +123,6 @@ public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float bo final Weight innerWeight = searcher.createWeight(query, scoreMode, boost); if (scoreMode.needsScores()) { return new CappedScoreWeight(this, innerWeight, maxScore) { - @Override - public BulkScorer bulkScorer(LeafReaderContext context) throws IOException { - final BulkScorer innerScorer = innerWeight.bulkScorer(context); - if (innerScorer == null) { - return null; - } - return new CappedBulkScorer(innerScorer, this, maxScore); - } - @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { ScorerSupplier innerScorerSupplier = innerWeight.scorerSupplier(context); @@ -152,7 +141,13 @@ public Scorer get(long leadCost) throws IOException { return innerScorer; } } - return new CappedScorer(innerWeight, innerScorer, maxScore); + return new CappedScorer(innerScorer, maxScore); + } + + @Override + public BulkScorer bulkScorer() throws IOException { + final BulkScorer innerScorer = innerScorerSupplier.bulkScorer(); + return new CappedBulkScorer(innerScorer, maxScore); } @Override @@ -166,15 +161,6 @@ public long cost() { public Matches matches(LeafReaderContext context, int doc) throws IOException { return innerWeight.matches(context, doc); } - - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - ScorerSupplier scorerSupplier = scorerSupplier(context); - if (scorerSupplier == null) { - return null; - } - return scorerSupplier.get(Long.MAX_VALUE); - } }; } else { return innerWeight; diff --git a/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/CappedScoreWeight.java b/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/CappedScoreWeight.java index 6ad3b9ce4ef85..ccc90e8f671a6 100644 --- a/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/CappedScoreWeight.java +++ b/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/CappedScoreWeight.java @@ -11,6 +11,7 @@ import org.apache.lucene.search.Explanation; import org.apache.lucene.search.Query; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; @@ -36,8 +37,22 @@ public boolean isCacheable(LeafReaderContext ctx) { } @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - return new CappedScorer(this, innerWeight.scorer(context), maxScore); + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { + ScorerSupplier innerScorerSupplier = innerWeight.scorerSupplier(context); + if (innerScorerSupplier == null) { + return null; + } + return new ScorerSupplier() { + @Override + public Scorer get(long leadCost) throws IOException { + return new CappedScorer(innerScorerSupplier.get(leadCost), maxScore); + } + + @Override + public long cost() { + return innerScorerSupplier.cost(); + } + }; } @Override diff --git a/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/CappedScorer.java b/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/CappedScorer.java index 57b2b62b77f6d..67813588ba3be 100644 --- a/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/CappedScorer.java +++ b/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/CappedScorer.java @@ -9,15 +9,14 @@ import org.apache.lucene.search.FilterScorer; import org.apache.lucene.search.Scorer; -import org.apache.lucene.search.Weight; import java.io.IOException; public class CappedScorer extends FilterScorer { private final float maxScore; - public CappedScorer(Weight weight, Scorer delegate, float maxScore) { - super(delegate, weight); + public CappedScorer(Scorer delegate, float maxScore) { + super(delegate); this.maxScore = maxScore; } diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsCanMatchOnCoordinatorIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsCanMatchOnCoordinatorIntegTests.java index faf41e7e655a8..eab73fbe5ad04 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsCanMatchOnCoordinatorIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsCanMatchOnCoordinatorIntegTests.java @@ -305,7 +305,7 @@ public void testSearchableSnapshotShardsAreSkippedBySearchRequestWithoutQuerying assertThat(newSearchResponse.getSuccessfulShards(), equalTo(totalShards)); assertThat(newSearchResponse.getFailedShards(), equalTo(0)); assertThat(newSearchResponse.getTotalShards(), equalTo(totalShards)); - assertThat(newSearchResponse.getHits().getTotalHits().value, equalTo((long) numDocsWithinRange)); + assertThat(newSearchResponse.getHits().getTotalHits().value(), equalTo((long) numDocsWithinRange)); }); // test with SearchShardsAPI @@ -655,7 +655,7 @@ public void testQueryPhaseIsExecutedInAnAvailableNodeWhenAllShardsCanBeSkipped() assertThat(searchResponse.getFailedShards(), equalTo(indexOutsideSearchRangeShardCount)); assertThat(searchResponse.getSkippedShards(), equalTo(searchableSnapshotShardCount)); assertThat(searchResponse.getTotalShards(), equalTo(totalShards)); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(0L)); }); } @@ -736,7 +736,7 @@ public void testQueryPhaseIsExecutedInAnAvailableNodeWhenAllShardsCanBeSkipped() // a shard that's available in order to construct the search response assertThat(newSearchResponse.getSkippedShards(), equalTo(totalShards - 1)); assertThat(newSearchResponse.getTotalShards(), equalTo(totalShards)); - assertThat(newSearchResponse.getHits().getTotalHits().value, equalTo(0L)); + assertThat(newSearchResponse.getHits().getTotalHits().value(), equalTo(0L)); }); }); @@ -850,7 +850,7 @@ public void testSearchableSnapshotShardsThatHaveMatchingDataAreNotSkippedOnTheCo SearchResponse response = client().search(request).actionGet(); logger.info( "[TEST DEBUG INFO] Search hits: {} Successful shards: {}, failed shards: {}, skipped shards: {}, total shards: {}", - response.getHits().getTotalHits().value, + response.getHits().getTotalHits().value(), response.getSuccessfulShards(), response.getFailedShards(), response.getSkippedShards(), diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsRecoverFromSnapshotIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsRecoverFromSnapshotIntegTests.java index 7615723860cff..9888afdd16499 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsRecoverFromSnapshotIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsRecoverFromSnapshotIntegTests.java @@ -79,7 +79,7 @@ public void testSearchableSnapshotRelocationDoNotUseSnapshotBasedRecoveries() th ensureGreen(restoredIndexName); - assertHitCount(prepareSearch(restoredIndexName).setTrackTotalHits(true), totalHits.value); + assertHitCount(prepareSearch(restoredIndexName).setTrackTotalHits(true), totalHits.value()); mockLog.assertAllExpectationsMatched(); } diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsRepositoryIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsRepositoryIntegTests.java index a3da932398fb1..1e76477378da2 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsRepositoryIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsRepositoryIntegTests.java @@ -66,7 +66,7 @@ public void testRepositoryUsedBySearchableSnapshotCanBeUpdatedButNotUnregistered Storage storage = randomFrom(Storage.values()); String restoredIndexName = (storage == Storage.FULL_COPY ? "fully-mounted-" : "partially-mounted-") + indexName + '-' + i; mountSnapshot(repositoryName, snapshotName, indexName, restoredIndexName, Settings.EMPTY, storage); - assertHitCount(prepareSearch(restoredIndexName).setTrackTotalHits(true), totalHits.value); + assertHitCount(prepareSearch(restoredIndexName).setTrackTotalHits(true), totalHits.value()); mountedIndices[i] = restoredIndexName; } @@ -183,7 +183,7 @@ public void testMountIndexWithDifferentDeletionOfSnapshot() throws Exception { ? equalTo(Boolean.toString(deleteSnapshot)) : nullValue() ); - assertHitCount(prepareSearch(mounted).setTrackTotalHits(true), totalHits.value); + assertHitCount(prepareSearch(mounted).setTrackTotalHits(true), totalHits.value()); final String mountedAgain = randomValueOtherThan(mounted, () -> randomAlphaOfLength(10).toLowerCase(Locale.ROOT)); final SnapshotRestoreException exception = expectThrows( @@ -208,7 +208,7 @@ public void testMountIndexWithDifferentDeletionOfSnapshot() throws Exception { ? equalTo(Boolean.toString(deleteSnapshot)) : nullValue() ); - assertHitCount(prepareSearch(mountedAgain).setTrackTotalHits(true), totalHits.value); + assertHitCount(prepareSearch(mountedAgain).setTrackTotalHits(true), totalHits.value()); assertAcked(indicesAdmin().prepareDelete(mountedAgain)); assertAcked(indicesAdmin().prepareDelete(mounted)); @@ -240,7 +240,7 @@ public void testDeletionOfSnapshotSettingCannotBeUpdated() throws Exception { ? equalTo(Boolean.toString(deleteSnapshot)) : nullValue() ); - assertHitCount(prepareSearch(mounted).setTrackTotalHits(true), totalHits.value); + assertHitCount(prepareSearch(mounted).setTrackTotalHits(true), totalHits.value()); if (randomBoolean()) { assertAcked(indicesAdmin().prepareClose(mounted)); diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheMaintenanceIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheMaintenanceIntegTests.java index 40b7e08936fa3..7eaf5d8f060c6 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheMaintenanceIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheMaintenanceIntegTests.java @@ -179,7 +179,7 @@ public void testCleanUpAfterIndicesAreDeleted() throws Exception { ) .setSize(0), res -> { - final long remainingEntriesInCache = res.getHits().getTotalHits().value; + final long remainingEntriesInCache = res.getHits().getTotalHits().value(); if (indicesToDelete.contains(mountedIndex)) { assertThat(remainingEntriesInCache, equalTo(0L)); } else if (snapshotId.equals(SNAPSHOT_SNAPSHOT_ID_SETTING.get(indexSettings))) { diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheMaintenanceService.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheMaintenanceService.java index a21e3e6beabce..21e67212f1f51 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheMaintenanceService.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheMaintenanceService.java @@ -549,7 +549,7 @@ public void run() { try (listeners) { executeSearch(new SearchRequest().source(getSearchSourceBuilder().trackTotalHits(true)), (searchResponse, refs) -> { assert total.get() == 0L; - total.set(searchResponse.getHits().getTotalHits().value); + total.set(searchResponse.getHits().getTotalHits().value()); handleSearchResponse(searchResponse, refs); }); } diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/PersistentCache.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/PersistentCache.java index da08c6b38819b..a7fb5571995b3 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/PersistentCache.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/PersistentCache.java @@ -23,6 +23,7 @@ import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.SerialMergeScheduler; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; @@ -173,9 +174,10 @@ long getCacheSize(ShardId shardId, SnapshotId snapshotId, Predicate predic final Bits liveDocs = leafReaderContext.reader().getLiveDocs(); final IntPredicate isLiveDoc = liveDocs == null ? i -> true : liveDocs::get; final DocIdSetIterator docIdSetIterator = scorer.iterator(); + StoredFields storedFields = leafReaderContext.reader().storedFields(); while (docIdSetIterator.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) { if (isLiveDoc.test(docIdSetIterator.docID())) { - final Document document = leafReaderContext.reader().document(docIdSetIterator.docID()); + final Document document = storedFields.document(docIdSetIterator.docID()); final String cacheFileId = getValue(document, CACHE_ID_FIELD); if (predicate.test(snapshotCacheDir.resolve(cacheFileId))) { long size = buildCacheFileRanges(document).stream().mapToLong(ByteRange::length).sum(); @@ -423,9 +425,10 @@ static Map loadDocuments(Path directoryPath) throws IOExceptio for (LeafReaderContext leafReaderContext : indexReader.leaves()) { final LeafReader leafReader = leafReaderContext.reader(); final Bits liveDocs = leafReader.getLiveDocs(); + final StoredFields storedFields = leafReader.storedFields(); for (int i = 0; i < leafReader.maxDoc(); i++) { if (liveDocs == null || liveDocs.get(i)) { - final Document document = leafReader.document(i); + final Document document = storedFields.document(i); logger.trace("loading document [{}]", document); documents.put(getValue(document, CACHE_ID_FIELD), document); } diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/CachedBlobContainerIndexInput.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/CachedBlobContainerIndexInput.java index 81cf205c13dd2..4711043fff281 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/CachedBlobContainerIndexInput.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/CachedBlobContainerIndexInput.java @@ -10,6 +10,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.store.IOContext; +import org.apache.lucene.store.ReadAdvice; import org.elasticsearch.blobcache.BlobCacheUtils; import org.elasticsearch.blobcache.common.ByteRange; import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardSnapshot.FileInfo; @@ -35,7 +36,7 @@ public class CachedBlobContainerIndexInput extends MetadataCachingIndexInput { * a complete part of the {@link #fileInfo} at once in the cache and should not be * used for anything else than what the {@link #prefetchPart(int, Supplier)} method does. */ - public static final IOContext CACHE_WARMING_CONTEXT = new IOContext(); + public static final IOContext CACHE_WARMING_CONTEXT = new IOContext(IOContext.Context.DEFAULT, null, null, ReadAdvice.NORMAL); private static final Logger logger = LogManager.getLogger(CachedBlobContainerIndexInput.class); diff --git a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/AbstractSearchableSnapshotsTestCase.java b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/AbstractSearchableSnapshotsTestCase.java index 41121453e41a4..4ee2bf7e65633 100644 --- a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/AbstractSearchableSnapshotsTestCase.java +++ b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/AbstractSearchableSnapshotsTestCase.java @@ -348,8 +348,8 @@ public static Tuple randomChecksumBytes(byte[] bytes) throws IOE * uses a different buffer size for them. */ public static IOContext randomIOContext() { - final IOContext ioContext = randomFrom(IOContext.DEFAULT, IOContext.READ, IOContext.READONCE); - assert ioContext.context != IOContext.Context.MERGE; + final IOContext ioContext = randomFrom(IOContext.DEFAULT, IOContext.READONCE); + assert ioContext.context() != IOContext.Context.MERGE; return ioContext; } } diff --git a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/InMemoryNoOpCommitDirectoryTests.java b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/InMemoryNoOpCommitDirectoryTests.java index c97d6cb4cab08..eab6f1a629f36 100644 --- a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/InMemoryNoOpCommitDirectoryTests.java +++ b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/InMemoryNoOpCommitDirectoryTests.java @@ -179,7 +179,7 @@ public void testSupportsNoOpCommits() throws IOException { final TopDocs topDocs = newSearcher(directoryReader).search(new MatchAllDocsQuery(), 1); assertThat(topDocs.totalHits, equalTo(new TotalHits(1L, TotalHits.Relation.EQUAL_TO))); assertThat(topDocs.scoreDocs.length, equalTo(1)); - assertThat(directoryReader.document(topDocs.scoreDocs[0].doc).getField("foo").stringValue(), equalTo("bar")); + assertThat(directoryReader.storedFields().document(topDocs.scoreDocs[0].doc).getField("foo").stringValue(), equalTo("bar")); } try (IndexWriter indexWriter = new IndexWriter(inMemoryNoOpCommitDirectory, new IndexWriterConfig())) { @@ -226,7 +226,7 @@ public void testSupportsDeletes() throws IOException { final TopDocs topDocs = newSearcher(directoryReader).search(new MatchAllDocsQuery(), 1); assertThat(topDocs.totalHits, equalTo(new TotalHits(1L, TotalHits.Relation.EQUAL_TO))); assertThat(topDocs.scoreDocs.length, equalTo(1)); - assertThat(directoryReader.document(topDocs.scoreDocs[0].doc).getField("foo").stringValue(), equalTo("bar")); + assertThat(directoryReader.storedFields().document(topDocs.scoreDocs[0].doc).getField("foo").stringValue(), equalTo("bar")); } assertEquals(1, DirectoryReader.listCommits(inMemoryNoOpCommitDirectory).size()); diff --git a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectoryTests.java b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectoryTests.java index e65c4a60f89d5..98df96eca7772 100644 --- a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectoryTests.java +++ b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectoryTests.java @@ -401,9 +401,9 @@ public void testChecksumBlobContainerIndexInput() throws Exception { false, // no prewarming in this test because we want to ensure that files are accessed on purpose (directory, snapshotDirectory) -> { for (String fileName : randomSubsetOf(Arrays.asList(snapshotDirectory.listAll()))) { - final long checksum; - try (IndexInput input = directory.openInput(fileName, Store.READONCE_CHECKSUM)) { - checksum = CodecUtil.checksumEntireFile(input); + final long expectedChecksum; + try (IndexInput input = directory.openInput(fileName, IOContext.READONCE)) { + expectedChecksum = CodecUtil.checksumEntireFile(input); } final long snapshotChecksum; @@ -418,9 +418,9 @@ public void testChecksumBlobContainerIndexInput() throws Exception { } assertThat( - "Expected checksum [" + checksum + "] but got [" + snapshotChecksum + ']', + "Expected checksum [" + expectedChecksum + "] but got [" + snapshotChecksum + ']', snapshotChecksum, - equalTo(checksum) + equalTo(expectedChecksum) ); assertThat( "File [" + fileName + "] should have been read from heap", diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/AbstractRemoteClusterSecurityDlsAndFlsRestIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/AbstractRemoteClusterSecurityDlsAndFlsRestIT.java index 6ffa09dc1f265..6d9110b564862 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/AbstractRemoteClusterSecurityDlsAndFlsRestIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/AbstractRemoteClusterSecurityDlsAndFlsRestIT.java @@ -224,7 +224,7 @@ protected void assertSearchResponseContainsEmptyResult(Response response) { assertOK(response); SearchResponse searchResponse = SearchResponseUtils.responseAsSearchResponse(response); try { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(0L)); } finally { searchResponse.decRef(); } diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityCcrIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityCcrIT.java index 767452e6fcae7..4b994ce82d92f 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityCcrIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityCcrIT.java @@ -282,7 +282,7 @@ private void verifyReplicatedDocuments(long numberOfDocs, String... indices) thr searchResponse = SearchResponseUtils.parseSearchResponse(parser); } try { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(numberOfDocs)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(numberOfDocs)); assertThat( Arrays.stream(searchResponse.getHits().getHits()).map(SearchHit::getIndex).collect(Collectors.toUnmodifiableSet()), equalTo(Set.of(indices)) diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityCcrMigrationIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityCcrMigrationIT.java index d5e77c1694640..1602a097b1b08 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityCcrMigrationIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityCcrMigrationIT.java @@ -362,7 +362,7 @@ private void verifyReplicatedDocuments(long numberOfDocs, String... indices) thr assertOK(response); final SearchResponse searchResponse = SearchResponseUtils.parseSearchResponse(responseAsParser(response)); try { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(numberOfDocs)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(numberOfDocs)); assertThat( Arrays.stream(searchResponse.getHits().getHits()).map(SearchHit::getIndex).collect(Collectors.toUnmodifiableSet()), equalTo(Set.of(indices)) diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityMutualTlsIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityMutualTlsIT.java index 8b18359fb8310..1345e275fab17 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityMutualTlsIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityMutualTlsIT.java @@ -119,7 +119,7 @@ public void testCrossClusterSearch() throws Exception { responseAsParser(performRequestWithRemoteMetricUser(metricSearchRequest)) ); try { - assertThat(metricSearchResponse.getHits().getTotalHits().value, equalTo(4L)); + assertThat(metricSearchResponse.getHits().getTotalHits().value(), equalTo(4L)); assertThat( Arrays.stream(metricSearchResponse.getHits().getHits()).map(SearchHit::getIndex).collect(Collectors.toSet()), containsInAnyOrder("shared-metrics") diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityRestIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityRestIT.java index 69331fa448113..4cbd1cab21af9 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityRestIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityRestIT.java @@ -419,7 +419,7 @@ public void testCrossClusterSearch() throws Exception { responseAsParser(performRequestWithRemoteMetricUser(metricSearchRequest)) ); try { - assertThat(metricSearchResponse.getHits().getTotalHits().value, equalTo(4L)); + assertThat(metricSearchResponse.getHits().getTotalHits().value(), equalTo(4L)); assertThat( Arrays.stream(metricSearchResponse.getHits().getHits()).map(SearchHit::getIndex).collect(Collectors.toSet()), containsInAnyOrder("shared-metrics") diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecuritySpecialUserIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecuritySpecialUserIT.java index 505b82b39b960..53c622898476a 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecuritySpecialUserIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecuritySpecialUserIT.java @@ -230,7 +230,7 @@ public void testAnonymousUserFromQueryClusterWorks() throws Exception { Arrays.stream(searchResponse5.getHits().getHits()).map(SearchHit::getIndex).collect(Collectors.toList()), containsInAnyOrder(".security-7") ); - assertThat(searchResponse5.getHits().getTotalHits().value, greaterThanOrEqualTo(1L)); + assertThat(searchResponse5.getHits().getTotalHits().value(), greaterThanOrEqualTo(1L)); } finally { searchResponse5.decRef(); } diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityTopologyRestIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityTopologyRestIT.java index 3871029b3b44b..6fa3ef1b4ef63 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityTopologyRestIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityTopologyRestIT.java @@ -134,7 +134,7 @@ public void testCrossClusterScrollWithSniffModeWhenSomeRemoteNodesAreNotDirectly final Request scrollRequest = new Request("GET", "/_search/scroll"); final String scrollId; try { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(6L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(6L)); assertThat(Arrays.stream(searchResponse.getHits().getHits()).map(SearchHit::getIndex).toList(), contains("shared-metrics")); documentFieldValues.add(searchResponse.getHits().getHits()[0].getSourceAsMap().get("name")); scrollId = searchResponse.getScrollId(); @@ -153,7 +153,7 @@ public void testCrossClusterScrollWithSniffModeWhenSomeRemoteNodesAreNotDirectly responseAsParser(performRequestWithRemoteMetricUser(scrollRequest)) ); try { - assertThat(scrollResponse.getHits().getTotalHits().value, equalTo(6L)); + assertThat(scrollResponse.getHits().getTotalHits().value(), equalTo(6L)); assertThat( Arrays.stream(scrollResponse.getHits().getHits()).map(SearchHit::getIndex).toList(), contains("shared-metrics") diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DateMathExpressionIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DateMathExpressionIntegTests.java index b1a76a4559812..9a1d653132d2d 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DateMathExpressionIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DateMathExpressionIntegTests.java @@ -81,7 +81,7 @@ public void testDateMathExpressionsCanBeAuthorized() throws Exception { assertResponse( client.prepareMultiSearch().add(client.prepareSearch(expression).setQuery(QueryBuilders.matchAllQuery()).request()), - multiSearchResponse -> assertThat(multiSearchResponse.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)) + multiSearchResponse -> assertThat(multiSearchResponse.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)) ); UpdateResponse updateResponse = client.prepareUpdate(expression, response.getId()) diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DlsFlsRequestCacheTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DlsFlsRequestCacheTests.java index b0572b265a45b..a5f827c2a4b53 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DlsFlsRequestCacheTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DlsFlsRequestCacheTests.java @@ -437,7 +437,7 @@ private void assertSearchResponse(SearchRequestBuilder requestBuilder, Set assertResponse(prepareSearch("alias" + role), searchResponse2 -> { - assertThat(searchResponse1.getHits().getTotalHits().value, equalTo(searchResponse2.getHits().getTotalHits().value)); + assertThat(searchResponse1.getHits().getTotalHits().value(), equalTo(searchResponse2.getHits().getTotalHits().value())); for (int hitI = 0; hitI < searchResponse1.getHits().getHits().length; hitI++) { assertThat(searchResponse1.getHits().getAt(hitI).getId(), equalTo(searchResponse2.getHits().getAt(hitI).getId())); } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java index c0866fa7ea694..87ca7d279c709 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java @@ -474,13 +474,13 @@ public void testMSearch() throws Exception { .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { assertFalse(response.getResponses()[0].isFailure()); - assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("id"), is(1)); assertFalse(response.getResponses()[1].isFailure()); - assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("id"), is(1)); @@ -495,13 +495,13 @@ public void testMSearch() throws Exception { .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { assertFalse(response.getResponses()[0].isFailure()); - assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("id"), is(2)); assertFalse(response.getResponses()[1].isFailure()); - assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("id"), is(2)); @@ -522,7 +522,7 @@ public void testMSearch() throws Exception { ), response -> { assertFalse(response.getResponses()[0].isFailure()); - assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(2L)); + assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(2L)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("id"), is(1)); @@ -531,7 +531,7 @@ public void testMSearch() throws Exception { assertThat(response.getResponses()[0].getResponse().getHits().getAt(1).getSourceAsMap().get("id"), is(2)); assertFalse(response.getResponses()[1].isFailure()); - assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value, is(2L)); + assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(2L)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("id"), is(1)); @@ -898,7 +898,7 @@ public void testKnnSearch() throws Exception { .addFetchField("field1") .setSize(10), response -> { - assertEquals(5, response.getHits().getTotalHits().value); + assertEquals(5, response.getHits().getTotalHits().value()); assertEquals(5, response.getHits().getHits().length); for (SearchHit hit : response.getHits().getHits()) { assertNotNull(hit.field("field1")); @@ -914,7 +914,7 @@ public void testKnnSearch() throws Exception { .addFetchField("field2") .setSize(10), response -> { - assertEquals(5, response.getHits().getTotalHits().value); + assertEquals(5, response.getHits().getTotalHits().value()); assertEquals(5, response.getHits().getHits().length); for (SearchHit hit : response.getHits().getHits()) { assertNotNull(hit.field("field2")); @@ -929,7 +929,7 @@ public void testKnnSearch() throws Exception { .setQuery(query) .setSize(10), response -> { - assertEquals(10, response.getHits().getTotalHits().value); + assertEquals(10, response.getHits().getTotalHits().value()); assertEquals(10, response.getHits().getHits().length); } ); @@ -1265,7 +1265,7 @@ public void testScroll() throws Exception { .get(); do { assertNoFailures(response); - assertThat(response.getHits().getTotalHits().value, is((long) numVisible)); + assertThat(response.getHits().getTotalHits().value(), is((long) numVisible)); assertThat(response.getHits().getAt(0).getSourceAsMap().size(), is(1)); assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); @@ -1325,7 +1325,7 @@ public void testReaderId() throws Exception { .setQuery(termQuery("field1", "value1")) .get(); assertNoFailures(response); - assertThat(response.getHits().getTotalHits().value, is((long) numVisible)); + assertThat(response.getHits().getTotalHits().value(), is((long) numVisible)); assertThat(response.getHits().getAt(0).getSourceAsMap().size(), is(1)); assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityRandomTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityRandomTests.java index 34eecd57b53d5..01020a428c318 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityRandomTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityRandomTests.java @@ -208,7 +208,7 @@ public void testDuel() throws Exception { prepareSearch("test").addSort("id", SortOrder.ASC) .setQuery(QueryBuilders.boolQuery().should(QueryBuilders.termQuery("field1", "value"))), expected -> { - assertThat(actual.getHits().getTotalHits().value, equalTo(expected.getHits().getTotalHits().value)); + assertThat(actual.getHits().getTotalHits().value(), equalTo(expected.getHits().getTotalHits().value())); assertThat(actual.getHits().getHits().length, equalTo(expected.getHits().getHits().length)); for (int i = 0; i < actual.getHits().getHits().length; i++) { assertThat(actual.getHits().getAt(i).getId(), equalTo(expected.getHits().getAt(i).getId())); @@ -231,7 +231,7 @@ public void testDuel() throws Exception { prepareSearch("test").addSort("id", SortOrder.ASC) .setQuery(QueryBuilders.boolQuery().should(QueryBuilders.termQuery("field2", "value"))), expected -> { - assertThat(actual.getHits().getTotalHits().value, equalTo(expected.getHits().getTotalHits().value)); + assertThat(actual.getHits().getTotalHits().value(), equalTo(expected.getHits().getTotalHits().value())); assertThat(actual.getHits().getHits().length, equalTo(expected.getHits().getHits().length)); for (int i = 0; i < actual.getHits().getHits().length; i++) { assertThat(actual.getHits().getAt(i).getId(), equalTo(expected.getHits().getAt(i).getId())); @@ -254,7 +254,7 @@ public void testDuel() throws Exception { prepareSearch("test").addSort("id", SortOrder.ASC) .setQuery(QueryBuilders.boolQuery().should(QueryBuilders.termQuery("field3", "value"))), expected -> { - assertThat(actual.getHits().getTotalHits().value, equalTo(expected.getHits().getTotalHits().value)); + assertThat(actual.getHits().getTotalHits().value(), equalTo(expected.getHits().getTotalHits().value())); assertThat(actual.getHits().getHits().length, equalTo(expected.getHits().getHits().length)); for (int i = 0; i < actual.getHits().getHits().length; i++) { assertThat(actual.getHits().getAt(i).getId(), equalTo(expected.getHits().getAt(i).getId())); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityTests.java index bffa53b1f4da6..66c8c0a5b1b52 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityTests.java @@ -956,10 +956,10 @@ public void testMSearchApi() throws Exception { .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { assertFalse(response.getResponses()[0].isFailure()); - assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(1)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); - assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(1)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); } @@ -975,10 +975,10 @@ public void testMSearchApi() throws Exception { .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { assertFalse(response.getResponses()[0].isFailure()); - assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(1)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); - assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(1)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); } @@ -993,11 +993,11 @@ public void testMSearchApi() throws Exception { .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { assertFalse(response.getResponses()[0].isFailure()); - assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); - assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); @@ -1013,9 +1013,9 @@ public void testMSearchApi() throws Exception { .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { assertFalse(response.getResponses()[0].isFailure()); - assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(0)); - assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(0)); } ); @@ -1029,12 +1029,12 @@ public void testMSearchApi() throws Exception { .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { assertFalse(response.getResponses()[0].isFailure()); - assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(3)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field3"), is("value3")); - assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(3)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); @@ -1051,12 +1051,12 @@ public void testMSearchApi() throws Exception { .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { assertFalse(response.getResponses()[0].isFailure()); - assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(3)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field3"), is("value3")); - assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(3)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); @@ -1073,12 +1073,12 @@ public void testMSearchApi() throws Exception { .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { assertFalse(response.getResponses()[0].isFailure()); - assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(3)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field3"), is("value3")); - assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(3)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); @@ -1095,11 +1095,11 @@ public void testMSearchApi() throws Exception { .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { assertFalse(response.getResponses()[0].isFailure()); - assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); - assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); @@ -1132,7 +1132,7 @@ public void testScroll() throws Exception { .get(); do { - assertThat(response.getHits().getTotalHits().value, is((long) numDocs)); + assertThat(response.getHits().getTotalHits().value(), is((long) numDocs)); assertThat(response.getHits().getHits().length, is(1)); assertThat(response.getHits().getAt(0).getSourceAsMap().size(), is(1)); assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); @@ -1191,7 +1191,7 @@ public void testPointInTimeId() throws Exception { .setQuery(constantScoreQuery(termQuery("field1", "value1"))) .setFetchSource(true), response -> { - assertThat(response.getHits().getTotalHits().value, is((long) numDocs)); + assertThat(response.getHits().getTotalHits().value(), is((long) numDocs)); assertThat(response.getHits().getHits().length, is(1)); assertThat(response.getHits().getAt(0).getSourceAsMap().size(), is(1)); assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); @@ -1281,7 +1281,7 @@ public void testScrollWithQueryCache() { .setSize(1) .setFetchSource(true) .get(); - assertThat(user2SearchResponse.getHits().getTotalHits().value, is((long) 0)); + assertThat(user2SearchResponse.getHits().getTotalHits().value(), is((long) 0)); assertThat(user2SearchResponse.getHits().getHits().length, is(0)); } else { user2SearchResponse.decRef(); @@ -1289,7 +1289,7 @@ public void testScrollWithQueryCache() { user2SearchResponse = client().filterWithHeader( Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD)) ).prepareSearchScroll(user2SearchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(10L)).get(); - assertThat(user2SearchResponse.getHits().getTotalHits().value, is((long) 0)); + assertThat(user2SearchResponse.getHits().getTotalHits().value(), is((long) 0)); assertThat(user2SearchResponse.getHits().getHits().length, is(0)); if (randomBoolean()) { // maybe reuse the scroll even if empty @@ -1309,7 +1309,7 @@ public void testScrollWithQueryCache() { .setSize(1) .setFetchSource(true) .get(); - assertThat(user1SearchResponse.getHits().getTotalHits().value, is((long) numDocs)); + assertThat(user1SearchResponse.getHits().getTotalHits().value(), is((long) numDocs)); assertThat(user1SearchResponse.getHits().getHits().length, is(1)); assertThat(user1SearchResponse.getHits().getAt(0).getSourceAsMap().size(), is(1)); assertThat(user1SearchResponse.getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); @@ -1319,7 +1319,7 @@ public void testScrollWithQueryCache() { user1SearchResponse = client().filterWithHeader( Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD)) ).prepareSearchScroll(user1SearchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(10L)).get(); - assertThat(user1SearchResponse.getHits().getTotalHits().value, is((long) numDocs)); + assertThat(user1SearchResponse.getHits().getTotalHits().value(), is((long) numDocs)); if (scrolledDocsUser1 < numDocs) { assertThat(user1SearchResponse.getHits().getHits().length, is(1)); assertThat(user1SearchResponse.getHits().getAt(0).getSourceAsMap().size(), is(1)); @@ -2042,7 +2042,7 @@ private void verifyParentChild() { .setQuery(hasChildQuery("child", termQuery("field1", "yellow"), ScoreMode.None)), searchResponse -> { assertHitCount(searchResponse, 1L); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(1L)); assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("p1")); } ); @@ -2061,7 +2061,7 @@ private void verifyParentChild() { .setQuery(hasChildQuery("child", termQuery("alias", "yellow"), ScoreMode.None)), searchResponse -> { assertHitCount(searchResponse, 1L); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(1L)); assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("p1")); } ); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/KibanaUserRoleIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/KibanaUserRoleIntegTests.java index d4375d15e6a6d..7d99d5817bdc0 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/KibanaUserRoleIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/KibanaUserRoleIntegTests.java @@ -103,20 +103,20 @@ public void testSearchAndMSearch() throws Exception { indexRandom(true, prepareIndex(index).setSource(field, "bar")); assertResponse(prepareSearch(index).setQuery(QueryBuilders.matchAllQuery()), response -> { - final long hits = response.getHits().getTotalHits().value; + final long hits = response.getHits().getTotalHits().value(); assertThat(hits, greaterThan(0L)); assertResponse( client().filterWithHeader( singletonMap("Authorization", UsernamePasswordToken.basicAuthHeaderValue("kibana_user", USERS_PASSWD)) ).prepareSearch(index).setQuery(QueryBuilders.matchAllQuery()), - response2 -> assertEquals(response2.getHits().getTotalHits().value, hits) + response2 -> assertEquals(response2.getHits().getTotalHits().value(), hits) ); final long multiHits; MultiSearchResponse multiSearchResponse = client().prepareMultiSearch() .add(prepareSearch(index).setQuery(QueryBuilders.matchAllQuery())) .get(); try { - multiHits = multiSearchResponse.getResponses()[0].getResponse().getHits().getTotalHits().value; + multiHits = multiSearchResponse.getResponses()[0].getResponse().getHits().getTotalHits().value(); assertThat(hits, greaterThan(0L)); } finally { multiSearchResponse.decRef(); @@ -125,7 +125,7 @@ public void testSearchAndMSearch() throws Exception { singletonMap("Authorization", UsernamePasswordToken.basicAuthHeaderValue("kibana_user", USERS_PASSWD)) ).prepareMultiSearch().add(prepareSearch(index).setQuery(QueryBuilders.matchAllQuery())).get(); try { - assertEquals(multiSearchResponse.getResponses()[0].getResponse().getHits().getTotalHits().value, multiHits); + assertEquals(multiSearchResponse.getResponses()[0].getResponse().getHits().getTotalHits().value(), multiHits); } finally { multiSearchResponse.decRef(); } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/MultipleIndicesPermissionsTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/MultipleIndicesPermissionsTests.java index af54f71779f08..6f8ea0f103a56 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/MultipleIndicesPermissionsTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/MultipleIndicesPermissionsTests.java @@ -312,7 +312,7 @@ public void testMultiNamesWorkCorrectly() { assertResponse( userAClient.prepareSearch("alias1").setSize(0), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(0L)) ); final ElasticsearchSecurityException e1 = expectThrows( diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java index 78146e58e91e2..e178f4bf3eb6c 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java @@ -343,7 +343,7 @@ private void testAddUserAndRoleThenAuth(String username, String roleName) { String token = basicAuthHeaderValue(username, new SecureString("s3krit-password")); assertResponse( client().filterWithHeader(Collections.singletonMap("Authorization", token)).prepareSearch("idx"), - searchResp -> assertEquals(1L, searchResp.getHits().getTotalHits().value) + searchResp -> assertEquals(1L, searchResp.getHits().getTotalHits().value()) ); assertClusterHealthOnlyAuthorizesWhenAnonymousRoleActive(token); @@ -366,7 +366,7 @@ public void testUpdatingUserAndAuthentication() throws Exception { String token = basicAuthHeaderValue("joe", new SecureString("s3krit-password")); assertResponse( client().filterWithHeader(Collections.singletonMap("Authorization", token)).prepareSearch("idx"), - searchResp -> assertEquals(1L, searchResp.getHits().getTotalHits().value) + searchResp -> assertEquals(1L, searchResp.getHits().getTotalHits().value()) ); preparePutUser("joe", "s3krit-password2", hasher, SecuritySettingsSource.TEST_ROLE).get(); @@ -382,7 +382,7 @@ public void testUpdatingUserAndAuthentication() throws Exception { token = basicAuthHeaderValue("joe", new SecureString("s3krit-password2")); assertResponse( client().filterWithHeader(Collections.singletonMap("Authorization", token)).prepareSearch("idx"), - searchResp -> assertEquals(1L, searchResp.getHits().getTotalHits().value) + searchResp -> assertEquals(1L, searchResp.getHits().getTotalHits().value()) ); } @@ -403,7 +403,7 @@ public void testCreateDeleteAuthenticate() { String token = basicAuthHeaderValue("joe", new SecureString("s3krit-password")); assertResponse( client().filterWithHeader(Collections.singletonMap("Authorization", token)).prepareSearch("idx"), - searchResp -> assertEquals(1L, searchResp.getHits().getTotalHits().value) + searchResp -> assertEquals(1L, searchResp.getHits().getTotalHits().value()) ); DeleteUserResponse response = new DeleteUserRequestBuilder(client()).username("joe").get(); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/ReadActionsTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/ReadActionsTests.java index f34983f7f125c..0acc281dd8440 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/ReadActionsTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/ReadActionsTests.java @@ -213,7 +213,7 @@ public void testMultiSearchUnauthorizedIndex() { assertEquals(2, multiSearchResponse.getResponses().length); assertFalse(multiSearchResponse.getResponses()[0].isFailure()); SearchResponse searchResponse = multiSearchResponse.getResponses()[0].getResponse(); - assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); assertReturnedIndices(searchResponse, "test1", "test2", "test3"); assertTrue(multiSearchResponse.getResponses()[1].isFailure()); Exception exception = multiSearchResponse.getResponses()[1].getFailure(); @@ -231,7 +231,7 @@ public void testMultiSearchUnauthorizedIndex() { assertEquals(2, multiSearchResponse.getResponses().length); assertFalse(multiSearchResponse.getResponses()[0].isFailure()); SearchResponse searchResponse = multiSearchResponse.getResponses()[0].getResponse(); - assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); assertReturnedIndices(searchResponse, "test1", "test2", "test3"); assertFalse(multiSearchResponse.getResponses()[1].isFailure()); assertNoSearchHits(multiSearchResponse.getResponses()[1].getResponse()); @@ -249,7 +249,7 @@ public void testMultiSearchMissingUnauthorizedIndex() { assertEquals(2, multiSearchResponse.getResponses().length); assertFalse(multiSearchResponse.getResponses()[0].isFailure()); SearchResponse searchResponse = multiSearchResponse.getResponses()[0].getResponse(); - assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); assertReturnedIndices(searchResponse, "test1", "test2", "test3"); assertTrue(multiSearchResponse.getResponses()[1].isFailure()); Exception exception = multiSearchResponse.getResponses()[1].getFailure(); @@ -267,7 +267,7 @@ public void testMultiSearchMissingUnauthorizedIndex() { assertEquals(2, multiSearchResponse.getResponses().length); assertFalse(multiSearchResponse.getResponses()[0].isFailure()); SearchResponse searchResponse = multiSearchResponse.getResponses()[0].getResponse(); - assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); assertReturnedIndices(searchResponse, "test1", "test2", "test3"); assertFalse(multiSearchResponse.getResponses()[1].isFailure()); assertNoSearchHits(multiSearchResponse.getResponses()[1].getResponse()); @@ -317,7 +317,7 @@ public void testMultiSearchWildcard() { assertEquals(2, multiSearchResponse.getResponses().length); assertFalse(multiSearchResponse.getResponses()[0].isFailure()); SearchResponse searchResponse = multiSearchResponse.getResponses()[0].getResponse(); - assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); assertReturnedIndices(searchResponse, "test1", "test2", "test3"); assertNoSearchHits(multiSearchResponse.getResponses()[1].getResponse()); } @@ -336,7 +336,7 @@ public void testMultiSearchWildcard() { assertEquals(2, multiSearchResponse.getResponses().length); assertFalse(multiSearchResponse.getResponses()[0].isFailure()); SearchResponse searchResponse = multiSearchResponse.getResponses()[0].getResponse(); - assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); assertReturnedIndices(searchResponse, "test1", "test2", "test3"); assertTrue(multiSearchResponse.getResponses()[1].isFailure()); Exception exception = multiSearchResponse.getResponses()[1].getFailure(); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/SecurityScrollTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/SecurityScrollTests.java index eb7c5e5276c15..a4cadeb953e14 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/SecurityScrollTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/SecurityScrollTests.java @@ -48,13 +48,13 @@ public void testScrollIsPerUser() throws Exception { indexRandom(true, docs); assertResponse(prepareSearch("foo").setScroll(TimeValue.timeValueSeconds(5L)).setQuery(matchAllQuery()).setSize(1), response -> { - assertEquals(numDocs, response.getHits().getTotalHits().value); + assertEquals(numDocs, response.getHits().getTotalHits().value()); assertEquals(1, response.getHits().getHits().length); if (randomBoolean()) { assertResponse( client().prepareSearchScroll(response.getScrollId()).setScroll(TimeValue.timeValueSeconds(5L)), response2 -> { - assertEquals(numDocs, response2.getHits().getTotalHits().value); + assertEquals(numDocs, response2.getHits().getTotalHits().value()); assertEquals(1, response2.getHits().getHits().length); } ); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileIntegTests.java index 4b8fbfd41acdf..437fb76351176 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileIntegTests.java @@ -856,7 +856,7 @@ private SuggestProfilesResponse.ProfileHit[] doSuggest(Set dataKeys, Str final SuggestProfilesRequest suggestProfilesRequest = new SuggestProfilesRequest(dataKeys, name, 10, hint); final SuggestProfilesResponse suggestProfilesResponse = client().execute(SuggestProfilesAction.INSTANCE, suggestProfilesRequest) .actionGet(); - assertThat(suggestProfilesResponse.getTotalHits().relation, is(TotalHits.Relation.EQUAL_TO)); + assertThat(suggestProfilesResponse.getTotalHits().relation(), is(TotalHits.Relation.EQUAL_TO)); return suggestProfilesResponse.getProfileHits(); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java index c4cf3127b897c..03558e72fdca3 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java @@ -2254,7 +2254,7 @@ public void queryApiKeys(SearchRequest searchRequest, boolean withLimitedBy, Act TransportSearchAction.TYPE, searchRequest, ActionListener.wrap(searchResponse -> { - long total = searchResponse.getHits().getTotalHits().value; + long total = searchResponse.getHits().getTotalHits().value(); if (total == 0) { logger.debug("No api keys found for query [{}]", searchRequest.source().query()); listener.onResponse(QueryApiKeysResult.EMPTY); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java index d866bd2a9d229..74a9aa7291ba4 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java @@ -179,7 +179,7 @@ public void queryUsers(SearchRequest searchRequest, ActionListener { - final long total = searchResponse.getHits().getTotalHits().value; + final long total = searchResponse.getHits().getTotalHits().value(); if (total == 0) { logger.debug("No users found for query [{}]", searchRequest.source().query()); listener.onResponse(QueryUserResults.EMPTY); @@ -214,7 +214,7 @@ void getUserCount(final ActionListener listener) { .setSize(0) .setTrackTotalHits(true) .request(), - listener.safeMap(response -> response.getHits().getTotalHits().value), + listener.safeMap(response -> response.getHits().getTotalHits().value()), client::search ) ); @@ -706,7 +706,7 @@ void getAllReservedUserInfo(ActionListener> listen @Override public void onResponse(SearchResponse searchResponse) { Map userInfos = new HashMap<>(); - assert searchResponse.getHits().getTotalHits().value <= 10 + assert searchResponse.getHits().getTotalHits().value() <= 10 : "there are more than 10 reserved users we need to change this to retrieve them all!"; for (SearchHit searchHit : searchResponse.getHits().getHits()) { Map sourceMap = searchHit.getSourceAsMap(); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java index 1c773a6e3963f..fa6187798da25 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java @@ -85,6 +85,7 @@ import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivilege; import org.elasticsearch.xpack.core.security.authz.privilege.NamedClusterPrivilege; import org.elasticsearch.xpack.core.security.authz.privilege.Privilege; +import org.elasticsearch.xpack.core.security.support.Automatons; import org.elasticsearch.xpack.core.security.support.StringMatcher; import org.elasticsearch.xpack.core.sql.SqlAsyncActionNames; import org.elasticsearch.xpack.security.action.user.TransportChangePasswordAction; @@ -550,7 +551,7 @@ public void validateIndexPermissionsAreSubset( Automaton existingPermissions = permissionMap.computeIfAbsent(entry.getKey(), role::allowedActionsMatcher); for (String alias : entry.getValue()) { Automaton newNamePermissions = permissionMap.computeIfAbsent(alias, role::allowedActionsMatcher); - if (Operations.subsetOf(newNamePermissions, existingPermissions) == false) { + if (Automatons.subsetOf(newNamePermissions, existingPermissions) == false) { listener.onResponse(AuthorizationResult.deny()); return; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/DeprecationRoleDescriptorConsumer.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/DeprecationRoleDescriptorConsumer.java index 40cb3ea4d9864..8ff535f3f6231 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/DeprecationRoleDescriptorConsumer.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/DeprecationRoleDescriptorConsumer.java @@ -10,7 +10,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.util.automaton.Automaton; -import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.cluster.metadata.IndexAbstraction; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.logging.DeprecationCategory; @@ -21,6 +20,7 @@ import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.IndicesPrivileges; import org.elasticsearch.xpack.core.security.authz.privilege.IndexPrivilege; +import org.elasticsearch.xpack.core.security.support.Automatons; import org.elasticsearch.xpack.core.security.support.StringMatcher; import java.time.ZoneOffset; @@ -195,7 +195,7 @@ private void logDeprecatedPermission(RoleDescriptor roleDescriptor) { index.getName(), i -> IndexPrivilege.get(indexPrivileges).getAutomaton() ); - if (false == Operations.subsetOf(indexPrivilegeAutomaton, aliasPrivilegeAutomaton)) { + if (false == Automatons.subsetOf(indexPrivilegeAutomaton, aliasPrivilegeAutomaton)) { inferiorIndexNames.add(index.getName()); } } else { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java index 7c242fb07b681..4ae17a679d205 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java @@ -279,7 +279,7 @@ public void queryRoleDescriptors(SearchSourceBuilder searchSourceBuilder, Action TransportSearchAction.TYPE, searchRequest, ActionListener.wrap(searchResponse -> { - long total = searchResponse.getHits().getTotalHits().value; + long total = searchResponse.getHits().getTotalHits().value(); if (total == 0) { logger.debug("No roles found for query [{}]", searchRequest.source().query()); listener.onResponse(QueryRoleResult.EMPTY); @@ -731,28 +731,28 @@ public void onResponse(MultiSearchResponse items) { if (responses[0].isFailure()) { usageStats.put("size", 0); } else { - usageStats.put("size", responses[0].getResponse().getHits().getTotalHits().value); + usageStats.put("size", responses[0].getResponse().getHits().getTotalHits().value()); } if (responses[1].isFailure()) { usageStats.put("fls", false); } else { - usageStats.put("fls", responses[1].getResponse().getHits().getTotalHits().value > 0L); + usageStats.put("fls", responses[1].getResponse().getHits().getTotalHits().value() > 0L); } if (responses[2].isFailure()) { usageStats.put("dls", false); } else { - usageStats.put("dls", responses[2].getResponse().getHits().getTotalHits().value > 0L); + usageStats.put("dls", responses[2].getResponse().getHits().getTotalHits().value() > 0L); } if (responses[3].isFailure()) { usageStats.put("remote_indices", 0); } else { - usageStats.put("remote_indices", responses[3].getResponse().getHits().getTotalHits().value); + usageStats.put("remote_indices", responses[3].getResponse().getHits().getTotalHits().value()); } if (responses[4].isFailure()) { usageStats.put("remote_cluster", 0); } else { - usageStats.put("remote_cluster", responses[4].getResponse().getHits().getTotalHits().value); + usageStats.put("remote_cluster", responses[4].getResponse().getHits().getTotalHits().value()); } delegate.onResponse(usageStats); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileService.java index b347ceb833f64..b347c278aae08 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileService.java @@ -414,19 +414,19 @@ public void usageStats(ActionListener> listener) { logger.debug("error on counting total profiles", items[0].getFailure()); usage.put("total", 0L); } else { - usage.put("total", items[0].getResponse().getHits().getTotalHits().value); + usage.put("total", items[0].getResponse().getHits().getTotalHits().value()); } if (items[1].isFailure()) { logger.debug("error on counting enabled profiles", items[0].getFailure()); usage.put("enabled", 0L); } else { - usage.put("enabled", items[1].getResponse().getHits().getTotalHits().value); + usage.put("enabled", items[1].getResponse().getHits().getTotalHits().value()); } if (items[2].isFailure()) { logger.debug("error on counting recent profiles", items[0].getFailure()); usage.put("recent", 0L); } else { - usage.put("recent", items[2].getResponse().getHits().getTotalHits().value); + usage.put("recent", items[2].getResponse().getHits().getTotalHits().value()); } listener.onResponse(usage); }, listener::onFailure) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityMigrations.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityMigrations.java index 5ec76a8dc3d01..5cd8cba763d3d 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityMigrations.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityMigrations.java @@ -77,8 +77,8 @@ public void migrate(SecurityIndexManager indexManager, Client client, ActionList client.search(countRequest, ActionListener.wrap(response -> { // If there are no roles, skip migration - if (response.getHits().getTotalHits().value > 0) { - logger.info("Preparing to migrate [" + response.getHits().getTotalHits().value + "] roles"); + if (response.getHits().getTotalHits().value() > 0) { + logger.info("Preparing to migrate [" + response.getHits().getTotalHits().value() + "] roles"); updateRolesByQuery(indexManager, client, filterQuery, listener); } else { listener.onResponse(null); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java index 0a2c40d2a257a..a4d9dacd1a63d 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java @@ -8,7 +8,7 @@ import org.apache.logging.log4j.Level; import org.apache.logging.log4j.Logger; -import org.apache.lucene.util.automaton.MinimizationOperations; +import org.apache.lucene.tests.util.automaton.AutomatonTestUtil; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; @@ -137,8 +137,8 @@ public void testParseFile() throws Exception { assertThat(group.indices().length, is(1)); assertThat(group.indices()[0], equalTo("idx3")); assertThat(group.privilege(), notNullValue()); - assertTrue(Operations.subsetOf(IndexPrivilege.READ.getAutomaton(), group.privilege().getAutomaton())); - assertTrue(Operations.subsetOf(IndexPrivilege.WRITE.getAutomaton(), group.privilege().getAutomaton())); + assertTrue(AutomatonTestUtil.subsetOf(IndexPrivilege.READ.getAutomaton(), group.privilege().getAutomaton())); + assertTrue(AutomatonTestUtil.subsetOf(IndexPrivilege.WRITE.getAutomaton(), group.privilege().getAutomaton())); descriptor = roles.get("role1.ab"); assertNotNull(descriptor); @@ -181,9 +181,9 @@ public void testParseFile() throws Exception { assertThat(group.indices()[0], equalTo("/.*_.*/")); assertThat(group.privilege(), notNullValue()); assertTrue( - Operations.sameLanguage( + AutomatonTestUtil.sameLanguage( group.privilege().getAutomaton(), - MinimizationOperations.minimize( + Operations.determinize( Operations.union(IndexPrivilege.READ.getAutomaton(), IndexPrivilege.WRITE.getAutomaton()), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT ) @@ -236,7 +236,7 @@ public void testParseFile() throws Exception { assertThat(group.indices().length, is(1)); assertThat(group.indices()[0], equalTo("field_idx")); assertThat(group.privilege(), notNullValue()); - assertTrue(Operations.sameLanguage(group.privilege().getAutomaton(), IndexPrivilege.READ.getAutomaton())); + assertTrue(AutomatonTestUtil.sameLanguage(group.privilege().getAutomaton(), IndexPrivilege.READ.getAutomaton())); assertTrue(group.getFieldPermissions().grantsAccessTo("foo")); assertTrue(group.getFieldPermissions().grantsAccessTo("boo")); assertTrue(group.getFieldPermissions().hasFieldLevelSecurity()); @@ -258,7 +258,7 @@ public void testParseFile() throws Exception { assertThat(group.indices().length, is(1)); assertThat(group.indices()[0], equalTo("query_idx")); assertThat(group.privilege(), notNullValue()); - assertTrue(Operations.sameLanguage(group.privilege().getAutomaton(), IndexPrivilege.READ.getAutomaton())); + assertTrue(AutomatonTestUtil.sameLanguage(group.privilege().getAutomaton(), IndexPrivilege.READ.getAutomaton())); assertFalse(group.getFieldPermissions().hasFieldLevelSecurity()); assertThat(group.getQuery(), notNullValue()); @@ -279,7 +279,7 @@ public void testParseFile() throws Exception { assertThat(group.indices().length, is(1)); assertThat(group.indices()[0], equalTo("query_fields_idx")); assertThat(group.privilege(), notNullValue()); - assertTrue(Operations.sameLanguage(group.privilege().getAutomaton(), IndexPrivilege.READ.getAutomaton())); + assertTrue(AutomatonTestUtil.sameLanguage(group.privilege().getAutomaton(), IndexPrivilege.READ.getAutomaton())); assertTrue(group.getFieldPermissions().grantsAccessTo("foo")); assertTrue(group.getFieldPermissions().grantsAccessTo("boo")); assertTrue(group.getFieldPermissions().hasFieldLevelSecurity()); diff --git a/x-pack/plugin/slm/src/internalClusterTest/java/org/elasticsearch/xpack/slm/SLMSnapshotBlockingIntegTests.java b/x-pack/plugin/slm/src/internalClusterTest/java/org/elasticsearch/xpack/slm/SLMSnapshotBlockingIntegTests.java index d42d45e430627..e5171a7c51650 100644 --- a/x-pack/plugin/slm/src/internalClusterTest/java/org/elasticsearch/xpack/slm/SLMSnapshotBlockingIntegTests.java +++ b/x-pack/plugin/slm/src/internalClusterTest/java/org/elasticsearch/xpack/slm/SLMSnapshotBlockingIntegTests.java @@ -281,7 +281,7 @@ public void testRetentionWhileSnapshotInProgress() throws Exception { completedSnapshotName, Strings.arrayToCommaDelimitedString(resp.getHits().getHits()) ); - assertThat(resp.getHits().getTotalHits().value, equalTo(2L)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(2L)); } ); }); diff --git a/x-pack/plugin/snapshot-based-recoveries/src/internalClusterTest/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/SnapshotBasedIndexRecoveryIT.java b/x-pack/plugin/snapshot-based-recoveries/src/internalClusterTest/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/SnapshotBasedIndexRecoveryIT.java index df8dc54bb7490..405a9926e2e5f 100644 --- a/x-pack/plugin/snapshot-based-recoveries/src/internalClusterTest/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/SnapshotBasedIndexRecoveryIT.java +++ b/x-pack/plugin/snapshot-based-recoveries/src/internalClusterTest/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/SnapshotBasedIndexRecoveryIT.java @@ -1595,7 +1595,7 @@ private void assertDocumentsAreEqual(String indexName, int docCount) { int docIdToMatch = randomIntBetween(0, docCount - 1); assertResponse(searchRequestBuilder.setQuery(QueryBuilders.termQuery("field", docIdToMatch)), searchResponse -> { assertThat(searchResponse.getSuccessfulShards(), equalTo(1)); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(1L)); SearchHit searchHit = searchResponse.getHits().getAt(0); Map source = searchHit.getSourceAsMap(); assertThat(source, is(notNullValue())); @@ -1613,7 +1613,7 @@ private void assertDocumentsAreEqual(String indexName, int docCount) { private void assertSearchResponseContainsAllIndexedDocs(SearchResponse searchResponse, long docCount) { assertThat(searchResponse.getSuccessfulShards(), equalTo(1)); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(docCount)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(docCount)); for (int i = 0; i < searchResponse.getHits().getHits().length; i++) { SearchHit searchHit = searchResponse.getHits().getAt(i); Map source = searchHit.getSourceAsMap(); diff --git a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoGridAggAndQueryConsistencyIT.java b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoGridAggAndQueryConsistencyIT.java index 3c64d140e2b56..e7b9156d5fb66 100644 --- a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoGridAggAndQueryConsistencyIT.java +++ b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoGridAggAndQueryConsistencyIT.java @@ -129,7 +129,7 @@ public void testKnownIssueWithCellLeftOfDatelineTouchingPolygonOnRightOfDateline client().prepareSearch("test").setTrackTotalHits(true).setQuery(queryBuilder), innerResponse -> assertThat( "Bucket " + bucket.getKeyAsString(), - innerResponse.getHits().getTotalHits().value, + innerResponse.getHits().getTotalHits().value(), Matchers.equalTo(bucket.getDocCount()) ) ); @@ -320,7 +320,7 @@ private void assertQuery(List buckets, BiFunction assertThat( "Expected hits at precision " + precision + " for H3 cell " + bucket.getKeyAsString(), - response.getHits().getTotalHits().value, + response.getHits().getTotalHits().value(), Matchers.equalTo(bucket.getDocCount()) ) ); diff --git a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoShapeWithDocValuesIT.java b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoShapeWithDocValuesIT.java index b4a3a07502abf..b4d7a472591bd 100644 --- a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoShapeWithDocValuesIT.java +++ b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoShapeWithDocValuesIT.java @@ -175,7 +175,7 @@ public void testStorePolygonDateLine() throws Exception { indexRandom(true, prepareIndex("test").setId("0").setSource(source, XContentType.JSON)); assertNoFailuresAndResponse(client().prepareSearch("test").setFetchSource(false).addStoredField("shape"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); SearchHit searchHit = response.getHits().getAt(0); assertThat(searchHit.field("shape").getValue(), instanceOf(BytesRef.class)); BytesRef bytesRef = searchHit.field("shape").getValue(); diff --git a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/ShapeQueryOverShapeTests.java b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/ShapeQueryOverShapeTests.java index 1c013aba52261..4f23b6de4c37d 100644 --- a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/ShapeQueryOverShapeTests.java +++ b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/ShapeQueryOverShapeTests.java @@ -247,7 +247,7 @@ public void testFieldAlias() { assertResponse( client().prepareSearch(INDEX).setQuery(new ShapeQueryBuilder("alias", queryGeometry).relation(ShapeRelation.INTERSECTS)), response -> { - assertTrue(response.getHits().getTotalHits().value > 0); + assertTrue(response.getHits().getTotalHits().value() > 0); } ); } diff --git a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/ShapeQueryTestCase.java b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/ShapeQueryTestCase.java index 1ac6bf3b6fd31..e26066cd89c50 100644 --- a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/ShapeQueryTestCase.java +++ b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/ShapeQueryTestCase.java @@ -112,7 +112,7 @@ public void testIndexPointsFilterRectangle() { client().prepareSearch(defaultIndexName) .setQuery(new ShapeQueryBuilder(defaultFieldName, rectangle).relation(ShapeRelation.INTERSECTS)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits().length, equalTo(2)); assertThat(response.getHits().getAt(0).getId(), anyOf(equalTo("1"), equalTo("4"))); assertThat(response.getHits().getAt(1).getId(), anyOf(equalTo("1"), equalTo("4"))); @@ -123,7 +123,7 @@ public void testIndexPointsFilterRectangle() { assertNoFailuresAndResponse( client().prepareSearch(defaultIndexName).setQuery(new ShapeQueryBuilder(defaultFieldName, rectangle)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits().length, equalTo(2)); assertThat(response.getHits().getAt(0).getId(), anyOf(equalTo("1"), equalTo("4"))); assertThat(response.getHits().getAt(1).getId(), anyOf(equalTo("1"), equalTo("4"))); @@ -138,7 +138,7 @@ public void testIndexPointsCircle() { client().prepareSearch(defaultIndexName) .setQuery(new ShapeQueryBuilder(defaultFieldName, circle).relation(ShapeRelation.INTERSECTS)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits().length, equalTo(2)); assertThat(response.getHits().getAt(0).getId(), anyOf(equalTo("1"), equalTo("4"))); assertThat(response.getHits().getAt(1).getId(), anyOf(equalTo("1"), equalTo("4"))); @@ -154,7 +154,7 @@ public void testIndexPointsPolygon() { .setQuery(new ShapeQueryBuilder(defaultFieldName, polygon).relation(ShapeRelation.INTERSECTS)), response -> { SearchHits searchHits = response.getHits(); - assertThat(searchHits.getTotalHits().value, equalTo(2L)); + assertThat(searchHits.getTotalHits().value(), equalTo(2L)); assertThat(searchHits.getAt(0).getId(), anyOf(equalTo("1"), equalTo("4"))); assertThat(searchHits.getAt(1).getId(), anyOf(equalTo("1"), equalTo("4"))); } @@ -175,7 +175,7 @@ public void testIndexPointsMultiPolygon() { client().prepareSearch(defaultIndexName) .setQuery(new ShapeQueryBuilder(defaultFieldName, mp).relation(ShapeRelation.INTERSECTS)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), not(equalTo("3"))); assertThat(response.getHits().getAt(1).getId(), not(equalTo("3"))); @@ -191,7 +191,7 @@ public void testIndexPointsRectangle() { client().prepareSearch(defaultIndexName) .setQuery(new ShapeQueryBuilder(defaultFieldName, rectangle).relation(ShapeRelation.INTERSECTS)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getId(), equalTo("2")); } @@ -232,7 +232,7 @@ public void testIndexPointsIndexedRectangle() throws Exception { .indexedShapePath(indexedShapePath) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getId(), equalTo("2")); } diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/ingest/CircleProcessorTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/ingest/CircleProcessorTests.java index 66f5597be543e..2713afc149e05 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/ingest/CircleProcessorTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/ingest/CircleProcessorTests.java @@ -230,8 +230,8 @@ public void testGeoShapeQueryAcrossDateline() throws IOException { try (IndexReader reader = w.getReader()) { IndexSearcher searcher = newSearcher(reader); - assertThat(searcher.search(sameShapeQuery, 1).totalHits.value, equalTo(1L)); - assertThat(searcher.search(pointOnDatelineQuery, 1).totalHits.value, equalTo(1L)); + assertThat(searcher.search(sameShapeQuery, 1).totalHits.value(), equalTo(1L)); + assertThat(searcher.search(pointOnDatelineQuery, 1).totalHits.value(), equalTo(1L)); } } } @@ -261,8 +261,8 @@ public void testShapeQuery() throws IOException { try (IndexReader reader = w.getReader()) { IndexSearcher searcher = newSearcher(reader); - assertThat(searcher.search(sameShapeQuery, 1).totalHits.value, equalTo(1L)); - assertThat(searcher.search(centerPointQuery, 1).totalHits.value, equalTo(1L)); + assertThat(searcher.search(sameShapeQuery, 1).totalHits.value(), equalTo(1L)); + assertThat(searcher.search(centerPointQuery, 1).totalHits.value(), equalTo(1L)); } } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java index 06293df4f4559..411a4cda868f0 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java @@ -223,7 +223,7 @@ protected static void logSearchResponse(SearchResponse response, Logger logger) } var totalHits = response.getHits().getTotalHits(); - var hits = totalHits != null ? "hits " + totalHits.relation + " " + totalHits.value + ", " : ""; + var hits = totalHits != null ? "hits " + totalHits.relation() + " " + totalHits.value() + ", " : ""; logger.trace( "Got search response [{}{} aggregations: [{}], {} failed shards, {} skipped shards, " + "{} successful shards, {} total shards, took {}, timed out [{}]]", @@ -548,7 +548,7 @@ protected List initBucketExtractors(SearchResponse response) { List exts = new ArrayList<>(refs.size()); TotalHits totalHits = response.getHits().getTotalHits(); - ConstantExtractor totalCount = new TotalHitsExtractor(totalHits == null ? -1L : totalHits.value); + ConstantExtractor totalCount = new TotalHitsExtractor(totalHits == null ? -1L : totalHits.value()); for (QueryContainer.FieldInfo ref : refs) { exts.add(createExtractor(ref.extraction(), totalCount)); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/TopHitsAggExtractor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/TopHitsAggExtractor.java index 78976ea7e83c0..cf52a5f5d7126 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/TopHitsAggExtractor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/TopHitsAggExtractor.java @@ -76,7 +76,7 @@ public Object extract(Bucket bucket) { throw new SqlIllegalArgumentException("Cannot find an aggregation named {}", name); } - if (agg.getHits().getTotalHits() == null || agg.getHits().getTotalHits().value == 0) { + if (agg.getHits().getTotalHits() == null || agg.getHits().getTotalHits().value() == 0) { return null; } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformUsageTransportAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformUsageTransportAction.java index 8ee23e38f9ffe..0ba29fef8e06d 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformUsageTransportAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformUsageTransportAction.java @@ -123,7 +123,7 @@ protected void masterOperation( Arrays.toString(transformCountSuccess.getShardFailures()) ); } - long totalTransforms = transformCountSuccess.getHits().getTotalHits().value; + long totalTransforms = transformCountSuccess.getHits().getTotalHits().value(); if (totalTransforms == 0) { var usage = new TransformFeatureSetUsage(transformsCountByState, Collections.emptyMap(), new TransformIndexerStats()); listener.onResponse(new XPackUsageFeatureResponse(usage)); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/checkpoint/TimeBasedCheckpointProvider.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/checkpoint/TimeBasedCheckpointProvider.java index f49d5fc96f3ab..cd06a4cadaa37 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/checkpoint/TimeBasedCheckpointProvider.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/checkpoint/TimeBasedCheckpointProvider.java @@ -84,7 +84,7 @@ public void sourceHasChanged(TransformCheckpoint lastCheckpoint, ActionListener< client, TransportSearchAction.TYPE, searchRequest, - ActionListener.wrap(r -> listener.onResponse(r.getHits().getTotalHits().value > 0L), listener::onFailure) + ActionListener.wrap(r -> listener.onResponse(r.getHits().getTotalHits().value() > 0L), listener::onFailure) ); } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/IndexBasedTransformConfigManager.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/IndexBasedTransformConfigManager.java index ffc4b48f9cc30..9d5175922c892 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/IndexBasedTransformConfigManager.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/IndexBasedTransformConfigManager.java @@ -509,7 +509,7 @@ public void expandTransformIds( final ExpandedIdsMatcher requiredMatches = new ExpandedIdsMatcher(idTokens, allowNoMatch); executeAsyncWithOrigin(request, foundConfigsListener.delegateFailureAndWrap((l, searchResponse) -> { - long totalHits = searchResponse.getHits().getTotalHits().value; + long totalHits = searchResponse.getHits().getTotalHits().value(); // important: preserve order Set ids = Sets.newLinkedHashSetWithExpectedSize(searchResponse.getHits().getHits().length); Set configs = Sets.newLinkedHashSetWithExpectedSize(searchResponse.getHits().getHits().length); @@ -589,7 +589,7 @@ public void resetTransform(String transformId, ActionListener listener) .trackTotalHitsUpTo(1) ); executeAsyncWithOrigin(TransportSearchAction.TYPE, searchRequest, deleteListener.delegateFailureAndWrap((l, searchResponse) -> { - if (searchResponse.getHits().getTotalHits().value == 0) { + if (searchResponse.getHits().getTotalHits().value() == 0) { listener.onFailure( new ResourceNotFoundException(TransformMessages.getMessage(TransformMessages.REST_UNKNOWN_TRANSFORM, transformId)) ); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/common/AbstractCompositeAggFunction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/common/AbstractCompositeAggFunction.java index 23bab56de5ec9..2de810b2b902d 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/common/AbstractCompositeAggFunction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/common/AbstractCompositeAggFunction.java @@ -207,7 +207,7 @@ private SearchRequest buildSearchRequestForValidation(String logId, SourceConfig @Override public void getInitialProgressFromResponse(SearchResponse response, ActionListener progressListener) { - progressListener.onResponse(new TransformProgress(response.getHits().getTotalHits().value, 0L, 0L)); + progressListener.onResponse(new TransformProgress(response.getHits().getTotalHits().value(), 0L, 0L)); } } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/CompositeBucketsChangeCollector.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/CompositeBucketsChangeCollector.java index 684e3a085405d..68b31d4f466b6 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/CompositeBucketsChangeCollector.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/CompositeBucketsChangeCollector.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.transform.transforms.pivot; -import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.IndexSearcher; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.Rounding; import org.elasticsearch.common.geo.GeoPoint; @@ -560,7 +560,7 @@ static class GeoTileFieldCollector implements FieldCollector { @Override public int getMaxPageSize() { // this collector is limited by indices.query.bool.max_clause_count, default 1024 - return BooleanQuery.getMaxClauseCount(); + return IndexSearcher.getMaxClauseCount(); } @Override diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/WatcherConcreteIndexTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/WatcherConcreteIndexTests.java index 706337768a299..5f7c6490e51f1 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/WatcherConcreteIndexTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/WatcherConcreteIndexTests.java @@ -50,7 +50,7 @@ public void testCanUseAnyConcreteIndexName() throws Exception { assertBusy(() -> { assertResponse( prepareSearch(watchResultsIndex).setTrackTotalHits(true), - searchResponse -> assertThat((int) searchResponse.getHits().getTotalHits().value, greaterThan(0)) + searchResponse -> assertThat((int) searchResponse.getHits().getTotalHits().value(), greaterThan(0)) ); }); } diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/actions/TimeThrottleIntegrationTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/actions/TimeThrottleIntegrationTests.java index f1ad29607b5b8..7fa5365afa0ab 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/actions/TimeThrottleIntegrationTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/actions/TimeThrottleIntegrationTests.java @@ -120,7 +120,7 @@ private void assertTotalHistoryEntries(String id, long expectedCount) throws Exc assertResponse( prepareSearch(HistoryStoreField.DATA_STREAM + "*").setSize(0) .setSource(new SearchSourceBuilder().query(QueryBuilders.boolQuery().must(termQuery("watch_id", id)))), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, is(oneOf(expectedCount, expectedCount + 1))) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), is(oneOf(expectedCount, expectedCount + 1))) ); }); diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryActionConditionTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryActionConditionTests.java index 60867ba5d4410..4068c534013b9 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryActionConditionTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryActionConditionTests.java @@ -122,7 +122,7 @@ public void testActionConditionWithHardFailures() throws Exception { ensureGreen(HistoryStoreField.DATA_STREAM); final SearchResponse response = searchHistory(SearchSourceBuilder.searchSource().query(termQuery("watch_id", id))); try { - assertThat(response.getHits().getTotalHits().value, is(oneOf(1L, 2L))); + assertThat(response.getHits().getTotalHits().value(), is(oneOf(1L, 2L))); searchHitReference.set(response.getHits().getAt(0).asUnpooled()); } finally { response.decRef(); @@ -176,7 +176,7 @@ public void testActionConditionWithFailures() throws Exception { ensureGreen(HistoryStoreField.DATA_STREAM); final SearchResponse response = searchHistory(SearchSourceBuilder.searchSource().query(termQuery("watch_id", id))); try { - assertThat(response.getHits().getTotalHits().value, is(oneOf(1L, 2L))); + assertThat(response.getHits().getTotalHits().value(), is(oneOf(1L, 2L))); searchHitReference.set(response.getHits().getAt(0).asUnpooled()); } finally { response.decRef(); @@ -236,7 +236,7 @@ public void testActionCondition() throws Exception { ensureGreen(HistoryStoreField.DATA_STREAM); final SearchResponse response = searchHistory(SearchSourceBuilder.searchSource().query(termQuery("watch_id", id))); try { - assertThat(response.getHits().getTotalHits().value, is(oneOf(1L, 2L))); + assertThat(response.getHits().getTotalHits().value(), is(oneOf(1L, 2L))); searchHitReference.set(response.getHits().getAt(0).asUnpooled()); } finally { response.decRef(); diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateEmailMappingsTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateEmailMappingsTests.java index 5b7ea39079f28..dac87eaa6f034 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateEmailMappingsTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateEmailMappingsTests.java @@ -105,7 +105,7 @@ public void testEmailFields() throws Exception { ), response -> { assertThat(response, notNullValue()); - assertThat(response.getHits().getTotalHits().value, greaterThanOrEqualTo(1L)); + assertThat(response.getHits().getTotalHits().value(), greaterThanOrEqualTo(1L)); InternalAggregations aggs = response.getAggregations(); assertThat(aggs, notNullValue()); diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateHttpMappingsTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateHttpMappingsTests.java index 97347de1ea23e..ffac36846414e 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateHttpMappingsTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateHttpMappingsTests.java @@ -102,7 +102,7 @@ public void testHttpFields() throws Exception { ), response -> { assertThat(response, notNullValue()); - assertThat(response.getHits().getTotalHits().value, is(oneOf(1L, 2L))); + assertThat(response.getHits().getTotalHits().value(), is(oneOf(1L, 2L))); InternalAggregations aggs = response.getAggregations(); assertThat(aggs, notNullValue()); diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateIndexActionMappingsTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateIndexActionMappingsTests.java index 7dde279fb90db..8dec5287ae607 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateIndexActionMappingsTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateIndexActionMappingsTests.java @@ -54,7 +54,7 @@ public void testIndexActionFields() throws Exception { ), response -> { assertThat(response, notNullValue()); - assertThat(response.getHits().getTotalHits().value, is(oneOf(1L, 2L))); + assertThat(response.getHits().getTotalHits().value(), is(oneOf(1L, 2L))); InternalAggregations aggs = response.getAggregations(); assertThat(aggs, notNullValue()); diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateSearchInputMappingsTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateSearchInputMappingsTests.java index 567d4acfa45e5..b268caa45f471 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateSearchInputMappingsTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateSearchInputMappingsTests.java @@ -72,7 +72,7 @@ public void testHttpFields() throws Exception { ), response -> { assertThat(response, notNullValue()); - assertThat(response.getHits().getTotalHits().value, is(oneOf(1L, 2L))); + assertThat(response.getHits().getTotalHits().value(), is(oneOf(1L, 2L))); InternalAggregations aggs = response.getAggregations(); assertThat(aggs, notNullValue()); diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java index 5dc537fc259d9..5eaf27e7b2670 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java @@ -377,7 +377,7 @@ protected void assertWatchWithMinimumPerformedActionsCount( lastResponse.set(searchResponse); assertThat( "could not find executed watch record for watch " + watchName, - searchResponse.getHits().getTotalHits().value, + searchResponse.getHits().getTotalHits().value(), greaterThanOrEqualTo(minimumExpectedWatchActionsWithActionPerformed) ); if (assertConditionMet) { @@ -396,7 +396,7 @@ protected void assertWatchWithMinimumPerformedActionsCount( } catch (AssertionError error) { SearchResponse searchResponse = lastResponse.get(); try { - logger.info("Found [{}] records for watch [{}]", searchResponse.getHits().getTotalHits().value, watchName); + logger.info("Found [{}] records for watch [{}]", searchResponse.getHits().getTotalHits().value(), watchName); int counter = 1; for (SearchHit hit : searchResponse.getHits().getHits()) { logger.info("hit [{}]=\n {}", counter++, XContentHelper.convertToJson(hit.getSourceRef(), true, true)); @@ -452,7 +452,7 @@ protected void assertWatchWithNoActionNeeded(final String watchName, final long searchResponse -> { lastResponse.set(searchResponse); assertThat( - searchResponse.getHits().getTotalHits().value, + searchResponse.getHits().getTotalHits().value(), greaterThanOrEqualTo(expectedWatchActionsWithNoActionNeeded) ); } @@ -461,7 +461,7 @@ protected void assertWatchWithNoActionNeeded(final String watchName, final long } catch (AssertionError error) { SearchResponse searchResponse = lastResponse.get(); try { - logger.info("Found [{}] records for watch [{}]", searchResponse.getHits().getTotalHits().value, watchName); + logger.info("Found [{}] records for watch [{}]", searchResponse.getHits().getTotalHits().value(), watchName); int counter = 1; for (SearchHit hit : searchResponse.getHits().getHits()) { logger.info("hit [{}]=\n {}", counter++, XContentHelper.convertToJson(hit.getSourceRef(), true, true)); @@ -497,7 +497,7 @@ protected void assertWatchWithMinimumActionsCount(final String watchName, final searchResponse -> { assertThat( "could not find executed watch record", - searchResponse.getHits().getTotalHits().value, + searchResponse.getHits().getTotalHits().value(), greaterThanOrEqualTo(recordCount) ); } diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/BootStrapTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/BootStrapTests.java index 99640d1ebc3ea..03f1e6cb57eb8 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/BootStrapTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/BootStrapTests.java @@ -296,8 +296,8 @@ private void assertSingleExecutionAndCompleteWatchHistory(final long numberOfWat AtomicLong successfulWatchExecutions = new AtomicLong(); refresh(); assertResponse(prepareSearch("output"), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, is(greaterThanOrEqualTo(numberOfWatches))); - successfulWatchExecutions.set(searchResponse.getHits().getTotalHits().value); + assertThat(searchResponse.getHits().getTotalHits().value(), is(greaterThanOrEqualTo(numberOfWatches))); + successfulWatchExecutions.set(searchResponse.getHits().getTotalHits().value()); }); // the watch history should contain entries for each triggered watch, which a few have been marked as not executed @@ -378,7 +378,7 @@ public void testWatchRecordSavedTwice() throws Exception { // the actual documents are in the output index refresh(); assertResponse(prepareSearch(HistoryStoreField.DATA_STREAM).setSize(numRecords), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, Matchers.equalTo((long) numRecords)); + assertThat(searchResponse.getHits().getTotalHits().value(), Matchers.equalTo((long) numRecords)); for (int i = 0; i < numRecords; i++) { assertThat(searchResponse.getHits().getAt(i).getSourceAsMap().get("state"), is(ExecutionState.EXECUTED.id())); } diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/RejectedExecutionTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/RejectedExecutionTests.java index e5f4091ca89eb..f3648580691cb 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/RejectedExecutionTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/RejectedExecutionTests.java @@ -55,7 +55,7 @@ public void testHistoryOnRejection() throws Exception { assertBusy(() -> { flushAndRefresh(".watcher-history-*"); assertResponse(prepareSearch(".watcher-history-*"), searchResponse -> { - assertThat("Watcher history not found", searchResponse.getHits().getTotalHits().value, greaterThanOrEqualTo(2L)); + assertThat("Watcher history not found", searchResponse.getHits().getTotalHits().value(), greaterThanOrEqualTo(2L)); assertThat( "Did not find watcher history for rejected watch", Arrays.stream(searchResponse.getHits().getHits()) diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/SingleNodeTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/SingleNodeTests.java index 7ff293ed9b150..fbb1996a4cf42 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/SingleNodeTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/SingleNodeTests.java @@ -69,7 +69,7 @@ public void testThatLoadingWithNonExistingIndexWorks() throws Exception { assertThat(refreshResponse.getStatus(), equalTo(RestStatus.OK)); assertResponse( prepareSearch(".watcher-history*").setSize(0), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, is(greaterThanOrEqualTo(1L))) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), is(greaterThanOrEqualTo(1L))) ); }, 30, TimeUnit.SECONDS); } diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/WatchMetadataTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/WatchMetadataTests.java index 4298f641cbdd2..e12805f3ace09 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/WatchMetadataTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/WatchMetadataTests.java @@ -79,7 +79,7 @@ public void testWatchMetadata() throws Exception { } assertNotNull(searchResponse); try { - assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); } finally { searchResponse.decRef(); } diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/transform/TransformIntegrationTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/transform/TransformIntegrationTests.java index 92ac91a63e097..2ec6541275d04 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/transform/TransformIntegrationTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/transform/TransformIntegrationTests.java @@ -139,13 +139,13 @@ public void testScriptTransform() throws Exception { refresh(); assertNoFailuresAndResponse(prepareSearch("output1"), response -> { - assertThat(response.getHits().getTotalHits().value, greaterThanOrEqualTo(1L)); + assertThat(response.getHits().getTotalHits().value(), greaterThanOrEqualTo(1L)); assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); assertThat(response.getHits().getAt(0).getSourceAsMap().get("key3").toString(), equalTo("20")); }); assertNoFailuresAndResponse(prepareSearch("output2"), response -> { - assertThat(response.getHits().getTotalHits().value, greaterThanOrEqualTo(1L)); + assertThat(response.getHits().getTotalHits().value(), greaterThanOrEqualTo(1L)); assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); assertThat(response.getHits().getAt(0).getSourceAsMap().get("key3").toString(), equalTo("20")); }); @@ -184,12 +184,12 @@ public void testSearchTransform() throws Exception { refresh(); assertNoFailuresAndResponse(prepareSearch("output1"), response -> { - assertThat(response.getHits().getTotalHits().value, greaterThanOrEqualTo(1L)); + assertThat(response.getHits().getTotalHits().value(), greaterThanOrEqualTo(1L)); assertThat(response.getHits().getAt(0).getSourceAsString(), containsString("mytestresult")); }); assertNoFailuresAndResponse(prepareSearch("output2"), response -> { - assertThat(response.getHits().getTotalHits().value, greaterThanOrEqualTo(1L)); + assertThat(response.getHits().getTotalHits().value(), greaterThanOrEqualTo(1L)); assertThat(response.getHits().getAt(0).getSourceAsString(), containsString("mytestresult")); }); } @@ -223,13 +223,13 @@ public void testChainTransform() throws Exception { refresh(); assertNoFailuresAndResponse(prepareSearch("output1"), response -> { - assertThat(response.getHits().getTotalHits().value, greaterThanOrEqualTo(1L)); + assertThat(response.getHits().getTotalHits().value(), greaterThanOrEqualTo(1L)); assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); assertThat(response.getHits().getAt(0).getSourceAsMap().get("key4").toString(), equalTo("30")); }); assertNoFailuresAndResponse(prepareSearch("output2"), response -> { - assertThat(response.getHits().getTotalHits().value, greaterThanOrEqualTo(1L)); + assertThat(response.getHits().getTotalHits().value(), greaterThanOrEqualTo(1L)); assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); assertThat(response.getHits().getAt(0).getSourceAsMap().get("key4").toString(), equalTo("30")); }); diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherService.java index 5389f34212270..0ea9b432d3b0f 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherService.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherService.java @@ -340,7 +340,7 @@ private Collection loadWatches(ClusterState clusterState) { throw new ElasticsearchException("Partial response while loading watches"); } - if (response.getHits().getTotalHits().value == 0) { + if (response.getHits().getTotalHits().value() == 0) { return Collections.emptyList(); } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/http/HttpClient.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/http/HttpClient.java index 9a165112c41d1..327d345af864e 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/http/HttpClient.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/http/HttpClient.java @@ -42,7 +42,6 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CharacterRunAutomaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.cluster.service.ClusterService; @@ -440,7 +439,7 @@ static CharacterRunAutomaton createAutomaton(List whiteListedHosts) { } Automaton whiteListAutomaton = Regex.simpleMatchToAutomaton(whiteListedHosts.toArray(Strings.EMPTY_ARRAY)); - whiteListAutomaton = MinimizationOperations.minimize(whiteListAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + whiteListAutomaton = Operations.determinize(whiteListAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); return new CharacterRunAutomaton(whiteListAutomaton); } } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStore.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStore.java index 6775dca424bf1..dfa0c47493ed7 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStore.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStore.java @@ -156,7 +156,7 @@ public Collection findTriggeredWatches(Collection watches SearchResponse response = null; try { response = client.search(searchRequest).actionGet(defaultSearchTimeout); - logger.debug("trying to find triggered watches for ids {}: found [{}] docs", ids, response.getHits().getTotalHits().value); + logger.debug("trying to find triggered watches for ids {}: found [{}] docs", ids, response.getHits().getTotalHits().value()); while (response.getHits().getHits().length != 0) { for (SearchHit hit : response.getHits()) { Wid wid = new Wid(hit.getId()); diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/search/ExecutableSearchInput.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/search/ExecutableSearchInput.java index 9d6186e9c1c48..e6bd1b0efb95d 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/search/ExecutableSearchInput.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/search/ExecutableSearchInput.java @@ -91,7 +91,7 @@ SearchInput.Result doExecute(WatchExecutionContext ctx, WatcherSearchTemplateReq try { if (logger.isDebugEnabled()) { - logger.debug("[{}] found [{}] hits", ctx.id(), response.getHits().getTotalHits().value); + logger.debug("[{}] found [{}] hits", ctx.id(), response.getHits().getTotalHits().value()); } final Payload payload; diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/TransportQueryWatchesAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/TransportQueryWatchesAction.java index 97ae29a26e68c..358a839e60ea5 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/TransportQueryWatchesAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/TransportQueryWatchesAction.java @@ -98,11 +98,11 @@ SearchRequest createSearchRequest(QueryWatchesAction.Request request) { } void transformResponse(SearchResponse searchResponse, ActionListener listener) { - assert searchResponse.getHits().getTotalHits().relation == TotalHits.Relation.EQUAL_TO; + assert searchResponse.getHits().getTotalHits().relation() == TotalHits.Relation.EQUAL_TO; List items = Arrays.stream(searchResponse.getHits().getHits()) .map(this::transformSearchHit) .toList(); - listener.onResponse(new QueryWatchesAction.Response(searchResponse.getHits().getTotalHits().value, items)); + listener.onResponse(new QueryWatchesAction.Response(searchResponse.getHits().getTotalHits().value(), items)); } QueryWatchesAction.Response.Item transformSearchHit(SearchHit searchHit) { diff --git a/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/BinaryDvConfirmedAutomatonQuery.java b/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/BinaryDvConfirmedAutomatonQuery.java index 608e5f1972373..191775f46cd72 100644 --- a/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/BinaryDvConfirmedAutomatonQuery.java +++ b/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/BinaryDvConfirmedAutomatonQuery.java @@ -18,6 +18,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; import org.apache.lucene.util.BytesRef; @@ -69,44 +70,56 @@ public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float bo return new ConstantScoreWeight(this, boost) { @Override - public Scorer scorer(LeafReaderContext context) throws IOException { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { ByteArrayStreamInput bytes = new ByteArrayStreamInput(); final BinaryDocValues values = DocValues.getBinary(context.reader(), field); - Scorer approxScorer = approxWeight.scorer(context); - if (approxScorer == null) { + ScorerSupplier approxScorerSupplier = approxWeight.scorerSupplier(context); + if (approxScorerSupplier == null) { // No matches to be had return null; } - DocIdSetIterator approxDisi = approxScorer.iterator(); - TwoPhaseIterator twoPhase = new TwoPhaseIterator(approxDisi) { + + return new ScorerSupplier() { @Override - public boolean matches() throws IOException { - if (values.advanceExact(approxDisi.docID()) == false) { - // Can happen when approxQuery resolves to some form of MatchAllDocs expression - return false; - } - BytesRef arrayOfValues = values.binaryValue(); - bytes.reset(arrayOfValues.bytes); - bytes.setPosition(arrayOfValues.offset); - - int size = bytes.readVInt(); - for (int i = 0; i < size; i++) { - int valLength = bytes.readVInt(); - if (bytesMatcher.run(arrayOfValues.bytes, bytes.getPosition(), valLength)) { - return true; + public Scorer get(long leadCost) throws IOException { + Scorer approxScorer = approxScorerSupplier.get(leadCost); + DocIdSetIterator approxDisi = approxScorer.iterator(); + TwoPhaseIterator twoPhase = new TwoPhaseIterator(approxDisi) { + @Override + public boolean matches() throws IOException { + if (values.advanceExact(approxDisi.docID()) == false) { + // Can happen when approxQuery resolves to some form of MatchAllDocs expression + return false; + } + BytesRef arrayOfValues = values.binaryValue(); + bytes.reset(arrayOfValues.bytes); + bytes.setPosition(arrayOfValues.offset); + + int size = bytes.readVInt(); + for (int i = 0; i < size; i++) { + int valLength = bytes.readVInt(); + if (bytesMatcher.run(arrayOfValues.bytes, bytes.getPosition(), valLength)) { + return true; + } + bytes.skipBytes(valLength); + } + return false; + } + + @Override + public float matchCost() { + // TODO: how can we compute this? + return 1000f; } - bytes.skipBytes(valLength); - } - return false; + }; + return new ConstantScoreScorer(score(), scoreMode, twoPhase); } @Override - public float matchCost() { - // TODO: how can we compute this? - return 1000f; + public long cost() { + return approxScorerSupplier.cost(); } }; - return new ConstantScoreScorer(this, score(), scoreMode, twoPhase); } @Override diff --git a/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java b/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java index 7784e7ffdda12..f3b01bb898126 100644 --- a/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java +++ b/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java @@ -38,7 +38,6 @@ import org.apache.lucene.search.WildcardQuery; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.automaton.Automaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.RegExp; import org.elasticsearch.ElasticsearchParseException; @@ -349,7 +348,7 @@ public Query wildcardQuery(String wildcardPattern, RewriteMethod method, boolean } Automaton automaton = caseInsensitive ? AutomatonQueries.toCaseInsensitiveWildcardAutomaton(new Term(name(), wildcardPattern)) - : WildcardQuery.toAutomaton(new Term(name(), wildcardPattern)); + : WildcardQuery.toAutomaton(new Term(name(), wildcardPattern), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); if (clauseCount > 0) { // We can accelerate execution with the ngram query BooleanQuery approxQuery = rewritten.build(); @@ -379,7 +378,6 @@ public Query regexpQuery( RegExp regExp = new RegExp(value, syntaxFlags, matchFlags); Automaton a = regExp.toAutomaton(); a = Operations.determinize(a, maxDeterminizedStates); - a = MinimizationOperations.minimize(a, maxDeterminizedStates); if (Operations.isTotal(a)) { // Will match all return existsQuery(context); } @@ -390,7 +388,7 @@ public Query regexpQuery( Query approxNgramQuery = rewriteBoolToNgramQuery(approxBooleanQuery); RegExp regex = new RegExp(value, syntaxFlags, matchFlags); - Automaton automaton = regex.toAutomaton(maxDeterminizedStates); + Automaton automaton = Operations.determinize(regex.toAutomaton(), maxDeterminizedStates); // We can accelerate execution with the ngram query return new BinaryDvConfirmedAutomatonQuery(approxNgramQuery, name(), value, automaton); @@ -550,9 +548,9 @@ private Query rewriteBoolToNgramQuery(Query approxQuery) { BooleanQuery.Builder rewritten = new BooleanQuery.Builder(); int clauseCount = 0; for (BooleanClause clause : bq) { - Query q = rewriteBoolToNgramQuery(clause.getQuery()); + Query q = rewriteBoolToNgramQuery(clause.query()); if (q != null) { - if (clause.getOccur().equals(Occur.FILTER)) { + if (clause.occur().equals(Occur.FILTER)) { // Can't drop "should" clauses because it can elevate a sibling optional item // to mandatory (shoulds with 1 clause) causing false negatives // Dropping MUSTs increase false positives which are OK because are verified anyway. @@ -561,7 +559,7 @@ private Query rewriteBoolToNgramQuery(Query approxQuery) { break; } } - rewritten.add(q, clause.getOccur()); + rewritten.add(q, clause.occur()); } } return rewritten.build(); diff --git a/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java b/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java index 4b9ccff6f526c..a1a01ebdcc590 100644 --- a/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java +++ b/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java @@ -41,6 +41,7 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.ByteRunAutomaton; +import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.RegExp; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.lucene.search.AutomatonQueries; @@ -182,7 +183,7 @@ public void testTooBigKeywordField() throws IOException { Query wildcardFieldQuery = wildcardFieldType.fieldType().wildcardQuery("*a*", null, null); TopDocs wildcardFieldTopDocs = searcher.search(wildcardFieldQuery, 10, Sort.INDEXORDER); - assertThat(wildcardFieldTopDocs.totalHits.value, equalTo(1L)); + assertThat(wildcardFieldTopDocs.totalHits.value(), equalTo(1L)); reader.close(); dir.close(); @@ -229,12 +230,12 @@ public void testTooBigQueryField() throws IOException { String queryString = randomABString((IndexSearcher.getMaxClauseCount() * 2) + 1); Query wildcardFieldQuery = wildcardFieldType.fieldType().wildcardQuery(queryString, null, null); TopDocs wildcardFieldTopDocs = searcher.search(wildcardFieldQuery, 10, Sort.INDEXORDER); - assertThat(wildcardFieldTopDocs.totalHits.value, equalTo(0L)); + assertThat(wildcardFieldTopDocs.totalHits.value(), equalTo(0L)); // Test regexp query wildcardFieldQuery = wildcardFieldType.fieldType().regexpQuery(queryString, RegExp.ALL, 0, 20000, null, MOCK_CONTEXT); wildcardFieldTopDocs = searcher.search(wildcardFieldQuery, 10, Sort.INDEXORDER); - assertThat(wildcardFieldTopDocs.totalHits.value, equalTo(0L)); + assertThat(wildcardFieldTopDocs.totalHits.value(), equalTo(0L)); reader.close(); dir.close(); @@ -271,13 +272,13 @@ public void testTermAndPrefixQueryIgnoreWildcardSyntax() throws IOException { private void expectTermMatch(IndexSearcher searcher, String term, long count) throws IOException { Query q = wildcardFieldType.fieldType().termQuery(term, MOCK_CONTEXT); TopDocs td = searcher.search(q, 10, Sort.RELEVANCE); - assertThat(td.totalHits.value, equalTo(count)); + assertThat(td.totalHits.value(), equalTo(count)); } private void expectPrefixMatch(IndexSearcher searcher, String term, long count) throws IOException { Query q = wildcardFieldType.fieldType().prefixQuery(term, null, MOCK_CONTEXT); TopDocs td = searcher.search(q, 10, Sort.RELEVANCE); - assertThat(td.totalHits.value, equalTo(count)); + assertThat(td.totalHits.value(), equalTo(count)); } public void testSearchResultsVersusKeywordField() throws IOException { @@ -390,8 +391,8 @@ public void testSearchResultsVersusKeywordField() throws IOException { TopDocs wildcardFieldTopDocs = searcher.search(wildcardFieldQuery, values.size() + 1, Sort.RELEVANCE); assertThat( keywordFieldQuery + "\n" + wildcardFieldQuery, - wildcardFieldTopDocs.totalHits.value, - equalTo(kwTopDocs.totalHits.value) + wildcardFieldTopDocs.totalHits.value(), + equalTo(kwTopDocs.totalHits.value()) ); HashSet expectedDocs = new HashSet<>(); @@ -497,7 +498,7 @@ public void testRangeQueryVersusKeywordField() throws IOException { TopDocs kwTopDocs = searcher.search(keywordFieldQuery, 10, Sort.RELEVANCE); TopDocs wildcardFieldTopDocs = searcher.search(wildcardFieldQuery, 10, Sort.RELEVANCE); - assertThat(wildcardFieldTopDocs.totalHits.value, equalTo(kwTopDocs.totalHits.value)); + assertThat(wildcardFieldTopDocs.totalHits.value(), equalTo(kwTopDocs.totalHits.value())); HashSet expectedDocs = new HashSet<>(); for (ScoreDoc topDoc : kwTopDocs.scoreDocs) { @@ -642,7 +643,7 @@ public void testWildcardAcceleration() throws IOException, ParseException { public void testQueryCachingEquality() throws IOException, ParseException { String pattern = "A*b*B?a"; // Case sensitivity matters when it comes to caching - Automaton caseSensitiveAutomaton = WildcardQuery.toAutomaton(new Term("field", pattern)); + Automaton caseSensitiveAutomaton = WildcardQuery.toAutomaton(new Term("field", pattern), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); Automaton caseInSensitiveAutomaton = AutomatonQueries.toCaseInsensitiveWildcardAutomaton(new Term("field", pattern)); BinaryDvConfirmedAutomatonQuery csQ = new BinaryDvConfirmedAutomatonQuery( new MatchAllDocsQuery(), @@ -660,7 +661,10 @@ public void testQueryCachingEquality() throws IOException, ParseException { assertNotEquals(csQ.hashCode(), ciQ.hashCode()); // Same query should be equal - Automaton caseSensitiveAutomaton2 = WildcardQuery.toAutomaton(new Term("field", pattern)); + Automaton caseSensitiveAutomaton2 = WildcardQuery.toAutomaton( + new Term("field", pattern), + Operations.DEFAULT_DETERMINIZE_WORK_LIMIT + ); BinaryDvConfirmedAutomatonQuery csQ2 = new BinaryDvConfirmedAutomatonQuery( new MatchAllDocsQuery(), "field", @@ -880,11 +884,11 @@ private Query rewriteFiltersToMustsForComparisonPurposes(Query q) { if (q instanceof BooleanQuery bq) { BooleanQuery.Builder result = new BooleanQuery.Builder(); for (BooleanClause cq : bq.clauses()) { - Query rewritten = rewriteFiltersToMustsForComparisonPurposes(cq.getQuery()); - if (cq.getOccur() == Occur.FILTER) { + Query rewritten = rewriteFiltersToMustsForComparisonPurposes(cq.query()); + if (cq.occur() == Occur.FILTER) { result.add(rewritten, Occur.MUST); } else { - result.add(rewritten, cq.getOccur()); + result.add(rewritten, cq.occur()); } } return result.build(); @@ -1013,8 +1017,9 @@ protected String convertToRandomRegex(String randomValue) { } // Assert our randomly generated regex actually matches the provided raw input. - RegExp regex = new RegExp(result.toString()); - Automaton automaton = regex.toAutomaton(); + int includeDeprecatedComplement = RegExp.ALL | RegExp.DEPRECATED_COMPLEMENT; + RegExp regex = new RegExp(result.toString(), includeDeprecatedComplement); + Automaton automaton = Operations.determinize(regex.toAutomaton(), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); ByteRunAutomaton bytesMatcher = new ByteRunAutomaton(automaton); BytesRef br = new BytesRef(randomValue); assertTrue( diff --git a/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldRepositoryAccessIT.java b/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldRepositoryAccessIT.java index d61c143098fcb..f502683e42eb2 100644 --- a/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldRepositoryAccessIT.java +++ b/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldRepositoryAccessIT.java @@ -377,7 +377,7 @@ private void assertDocs( try { logger.info(searchResponse); // check hit count - assertEquals(numDocs, searchResponse.getHits().getTotalHits().value); + assertEquals(numDocs, searchResponse.getHits().getTotalHits().value()); // check that _index is properly set assertTrue(Arrays.stream(searchResponse.getHits().getHits()).map(SearchHit::getIndex).allMatch(index::equals)); // check that all _ids are there @@ -404,7 +404,7 @@ private void assertDocs( ); try { logger.info(searchResponse); - assertEquals(1, searchResponse.getHits().getTotalHits().value); + assertEquals(1, searchResponse.getHits().getTotalHits().value()); assertEquals(id, searchResponse.getHits().getHits()[0].getId()); assertEquals(sourceForDoc(num), searchResponse.getHits().getHits()[0].getSourceAsString()); } finally { @@ -456,7 +456,7 @@ private void assertDocs( ); try { logger.info(searchResponse); - assertEquals(typeCount, searchResponse.getHits().getTotalHits().value); + assertEquals(typeCount, searchResponse.getHits().getTotalHits().value()); for (SearchHit hit : searchResponse.getHits().getHits()) { DocumentField typeField = hit.field("_type"); assertNotNull(typeField); @@ -482,7 +482,7 @@ private void assertDocs( ); try { logger.info(searchResponse); - assertEquals(0, searchResponse.getHits().getTotalHits().value); + assertEquals(0, searchResponse.getHits().getTotalHits().value()); assertEquals(numberOfShards, searchResponse.getSuccessfulShards()); // When all shards are skipped, at least one of them is queried in order to provide a proper search response. assertEquals(numberOfShards - 1, searchResponse.getSkippedShards()); diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TokenBackwardsCompatibilityIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TokenBackwardsCompatibilityIT.java index dddba9b7b0fba..02dc679152bf4 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TokenBackwardsCompatibilityIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TokenBackwardsCompatibilityIT.java @@ -445,7 +445,7 @@ private List getAllTokenIds() throws IOException { final SearchHits searchHits = response.getHits(); assertThat( "Search request used with size parameter that was too small to fetch all tokens.", - searchHits.getTotalHits().value, + searchHits.getTotalHits().value(), lessThanOrEqualTo(searchSize) ); final List tokenIds = Arrays.stream(searchHits.getHits()).map(searchHit -> { From 0a399b572d6927241c0ffecf3a1a3a1b6849c7fb Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Mon, 21 Oct 2024 22:53:49 +1100 Subject: [PATCH 246/449] Mute org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT #115213 --- muted-tests.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index b7323bfc1de18..3b6a0d8cdae3d 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -361,6 +361,8 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/115129 - class: org.elasticsearch.xpack.esql.ccq.MultiClusterSpecIT issue: https://github.com/elastic/elasticsearch/issues/115135 +- class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT + issue: https://github.com/elastic/elasticsearch/issues/115213 # Examples: # From 6a73f89d31722ede8fa4d71f68fe8187e00b28c7 Mon Sep 17 00:00:00 2001 From: Pawan Kartik Date: Mon, 21 Oct 2024 13:38:46 +0100 Subject: [PATCH 247/449] Use `equals()` to compare `String`-s over operators in `hasRemoteClusters()` (#115154) --- .../java/org/elasticsearch/action/search/SearchResponse.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java b/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java index 83ee6c216ad49..041b3ae73c1ee 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java @@ -787,7 +787,8 @@ public boolean hasClusterObjects() { * This will be false for local-cluster (non-CCS) only searches. */ public boolean hasRemoteClusters() { - return total > 1 || clusterInfo.keySet().stream().anyMatch(alias -> alias != RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY); + return total > 1 + || clusterInfo.keySet().stream().anyMatch(alias -> alias.equals(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY) == false); } } From 6580bfbd2d514e65b837c8c69b00b615890b9954 Mon Sep 17 00:00:00 2001 From: Jan Kuipers <148754765+jan-elastic@users.noreply.github.com> Date: Mon, 21 Oct 2024 15:12:11 +0200 Subject: [PATCH 248/449] Reduce repeated warning logs from AdaptiveAllocationsScalerService (#115089) --- .../AdaptiveAllocationsScalerService.java | 32 +++++++++++-------- 1 file changed, 18 insertions(+), 14 deletions(-) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerService.java index 9624d619ff20a..770e890512935 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerService.java @@ -41,6 +41,7 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentSkipListSet; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Function; @@ -205,13 +206,11 @@ Collection observeDouble(Function scalers; private final Map lastScaleUpTimesMillis; - private volatile Scheduler.Cancellable cancellable; private final AtomicBoolean busy; - private final long scaleToZeroAfterNoRequestsSeconds; - private final Set deploymentIdsWithInFlightScaleFromZeroRequests = new ConcurrentSkipListSet<>(); + private final Map lastWarningMessages = new ConcurrentHashMap<>(); public AdaptiveAllocationsScalerService( ThreadPool threadPool, @@ -475,7 +474,8 @@ private ActionListener updateAssigm int numberOfAllocations ) { return ActionListener.wrap(updateResponse -> { - logger.debug("adaptive allocations scaler: scaled [{}] to [{}] allocations.", deploymentId, numberOfAllocations); + lastWarningMessages.remove(deploymentId); + logger.info("adaptive allocations scaler: scaled [{}] to [{}] allocations.", deploymentId, numberOfAllocations); threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME) .execute( () -> inferenceAuditor.info( @@ -484,20 +484,24 @@ private ActionListener updateAssigm ) ); }, e -> { - logger.atLevel(Level.WARN) + Level level = e.getMessage().equals(lastWarningMessages.get(deploymentId)) ? Level.DEBUG : Level.WARN; + lastWarningMessages.put(deploymentId, e.getMessage()); + logger.atLevel(level) .withThrowable(e) .log("adaptive allocations scaler: scaling [{}] to [{}] allocations failed.", deploymentId, numberOfAllocations); - threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME) - .execute( - () -> inferenceAuditor.warning( - deploymentId, - Strings.format( - "adaptive allocations scaler: scaling [%s] to [%s] allocations failed.", + if (level == Level.WARN) { + threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME) + .execute( + () -> inferenceAuditor.warning( deploymentId, - numberOfAllocations + Strings.format( + "adaptive allocations scaler: scaling [%s] to [%s] allocations failed.", + deploymentId, + numberOfAllocations + ) ) - ) - ); + ); + } }); } } From e0e34c3f5411daaadcbd470651a492d1d7cfc923 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 22 Oct 2024 00:13:10 +1100 Subject: [PATCH 249/449] Mute org.elasticsearch.xpack.esql.expression.function.scalar.string.ReverseTests testEvaluateInManyThreads {TestCase=} #115227 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 3b6a0d8cdae3d..96b7d05f662ab 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -363,6 +363,9 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/115135 - class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT issue: https://github.com/elastic/elasticsearch/issues/115213 +- class: org.elasticsearch.xpack.esql.expression.function.scalar.string.ReverseTests + method: testEvaluateInManyThreads {TestCase=} + issue: https://github.com/elastic/elasticsearch/issues/115227 # Examples: # From 2ff0afadae2881524c117bfb84a68e95fc0bc09a Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 22 Oct 2024 00:13:25 +1100 Subject: [PATCH 250/449] Mute org.elasticsearch.xpack.esql.expression.function.scalar.string.ReverseTests testEvaluateInManyThreads {TestCase=} #115228 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 96b7d05f662ab..70c5f3ced5273 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -366,6 +366,9 @@ tests: - class: org.elasticsearch.xpack.esql.expression.function.scalar.string.ReverseTests method: testEvaluateInManyThreads {TestCase=} issue: https://github.com/elastic/elasticsearch/issues/115227 +- class: org.elasticsearch.xpack.esql.expression.function.scalar.string.ReverseTests + method: testEvaluateInManyThreads {TestCase=} + issue: https://github.com/elastic/elasticsearch/issues/115228 # Examples: # From 183ad88104ec05ebdc0765a3794b69d2449753d4 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 22 Oct 2024 00:27:36 +1100 Subject: [PATCH 251/449] Mute org.elasticsearch.xpack.test.rest.XPackRestIT test {p0=esql/60_usage/Basic ESQL usage output (telemetry)} #115231 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 70c5f3ced5273..1dda90369ce76 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -369,6 +369,9 @@ tests: - class: org.elasticsearch.xpack.esql.expression.function.scalar.string.ReverseTests method: testEvaluateInManyThreads {TestCase=} issue: https://github.com/elastic/elasticsearch/issues/115228 +- class: org.elasticsearch.xpack.test.rest.XPackRestIT + method: test {p0=esql/60_usage/Basic ESQL usage output (telemetry)} + issue: https://github.com/elastic/elasticsearch/issues/115231 # Examples: # From fd43adc78aff661b9a0b2435587f6d4b03009aa8 Mon Sep 17 00:00:00 2001 From: Ioana Tagirta Date: Mon, 21 Oct 2024 15:43:47 +0200 Subject: [PATCH 252/449] ES|QL Add initial support for semantic_text field type (#113920) * Add initial support for semantic_text field type * Update docs/changelog/113920.yaml * More tests and fixes * Use mock inference service * Fix tests * Spotless * Fix mixed-cluster and multi-clusters tests * sort * Attempt another fix for bwc tests * Spotless * Fix merge * Attempt another fix * Don't load the inference-service-test plugin for mixed versions/clusters * Add more tests, address review comments * trivial * revert * post-merge fix block loader * post-merge fix compile * add mixed version testing * whitespace * fix MultiClusterSpecIT * add more fields to mapping * Revert mixed version testing * whitespace --------- Co-authored-by: ChrisHegarty Co-authored-by: Elastic Machine --- docs/changelog/113920.yaml | 5 + .../esql/core/plugin/EsqlCorePlugin.java | 1 + .../xpack/esql/core/type/DataType.java | 12 +- .../esql/qa/mixed/MixedClusterEsqlSpecIT.java | 5 + .../xpack/esql/ccq/MultiClusterSpecIT.java | 5 + .../esql/qa/server/multi-node/build.gradle | 1 + .../xpack/esql/qa/multi_node/EsqlSpecIT.java | 2 +- .../esql/qa/server/single-node/build.gradle | 1 + .../xpack/esql/qa/single_node/EsqlSpecIT.java | 2 +- .../xpack/esql/qa/rest/EsqlSpecTestCase.java | 20 +- .../elasticsearch/xpack/esql/CsvAssert.java | 6 +- .../xpack/esql/CsvTestUtils.java | 1 + .../xpack/esql/CsvTestsDataLoader.java | 132 +++++++++++-- .../xpack/esql/EsqlTestUtils.java | 2 +- .../main/resources/mapping-semantic_text.json | 73 ++++++++ .../src/main/resources/semantic_text.csv | 4 + .../src/main/resources/semantic_text.csv-spec | 175 ++++++++++++++++++ .../xpack/esql/action/EsqlCapabilities.java | 6 +- .../xpack/esql/action/PositionToXContent.java | 2 +- .../xpack/esql/action/ResponseValueUtils.java | 2 +- .../esql/planner/LocalExecutionPlanner.java | 2 +- .../xpack/esql/planner/PlannerUtils.java | 2 +- .../esql/action/EsqlQueryResponseTests.java | 4 +- .../scalar/conditional/CaseTests.java | 2 +- .../mapper/SemanticTextFieldMapper.java | 9 + .../test/esql/40_unsupported_types.yml | 49 +++++ 26 files changed, 490 insertions(+), 35 deletions(-) create mode 100644 docs/changelog/113920.yaml create mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-semantic_text.json create mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/semantic_text.csv create mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/semantic_text.csv-spec diff --git a/docs/changelog/113920.yaml b/docs/changelog/113920.yaml new file mode 100644 index 0000000000000..4699ae6d7dd65 --- /dev/null +++ b/docs/changelog/113920.yaml @@ -0,0 +1,5 @@ +pr: 113920 +summary: Add initial support for `semantic_text` field type +area: Search +type: enhancement +issues: [] diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plugin/EsqlCorePlugin.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plugin/EsqlCorePlugin.java index 639d8ed68d0a3..d84a471815a9a 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plugin/EsqlCorePlugin.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plugin/EsqlCorePlugin.java @@ -14,4 +14,5 @@ public class EsqlCorePlugin extends Plugin implements ExtensiblePlugin { public static final FeatureFlag DATE_NANOS_FEATURE_FLAG = new FeatureFlag("esql_date_nanos"); + public static final FeatureFlag SEMANTIC_TEXT_FEATURE_FLAG = new FeatureFlag("esql_semantic_text"); } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java index 12699ca3ee720..5041c96128a1e 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java @@ -194,7 +194,14 @@ public enum DataType { * inside alongside time-series aggregations. These fields are not parsable from the * mapping and should be hidden from users. */ - PARTIAL_AGG(builder().esType("partial_agg").unknownSize()); + PARTIAL_AGG(builder().esType("partial_agg").unknownSize()), + /** + * String fields that are split into chunks, where each chunk has attached embeddings + * used for semantic search. Generally ESQL only sees {@code semantic_text} fields when + * loaded from the index and ESQL will load these fields as strings without their attached + * chunks or embeddings. + */ + SEMANTIC_TEXT(builder().esType("semantic_text").unknownSize()); /** * Types that are actively being built. These types are not returned @@ -203,7 +210,8 @@ public enum DataType { * check that sending them to a function produces a sane error message. */ public static final Map UNDER_CONSTRUCTION = Map.ofEntries( - Map.entry(DATE_NANOS, EsqlCorePlugin.DATE_NANOS_FEATURE_FLAG) + Map.entry(DATE_NANOS, EsqlCorePlugin.DATE_NANOS_FEATURE_FLAG), + Map.entry(SEMANTIC_TEXT, EsqlCorePlugin.SEMANTIC_TEXT_FEATURE_FLAG) ); private final String typeName; diff --git a/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java b/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java index d0d6d5fa49c42..0e23b29172c32 100644 --- a/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java +++ b/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java @@ -86,4 +86,9 @@ protected boolean supportsAsync() { protected boolean enableRoundingDoubleValuesOnAsserting() { return true; } + + @Override + protected boolean supportsInferenceTestService() { + return false; + } } diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java index 3e799730f7269..8446ac63f43a1 100644 --- a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java +++ b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java @@ -261,4 +261,9 @@ static boolean hasIndexMetadata(String query) { protected boolean enableRoundingDoubleValuesOnAsserting() { return true; } + + @Override + protected boolean supportsInferenceTestService() { + return false; + } } diff --git a/x-pack/plugin/esql/qa/server/multi-node/build.gradle b/x-pack/plugin/esql/qa/server/multi-node/build.gradle index 9f8ca78aba81e..2dcc001c4e159 100644 --- a/x-pack/plugin/esql/qa/server/multi-node/build.gradle +++ b/x-pack/plugin/esql/qa/server/multi-node/build.gradle @@ -11,6 +11,7 @@ dependencies { clusterPlugins project(':plugins:mapper-size') clusterPlugins project(':plugins:mapper-murmur3') + clusterPlugins project(':x-pack:plugin:inference:qa:test-service-plugin') } GradleUtils.extendSourceSet(project, "javaRestTest", "yamlRestTest") diff --git a/x-pack/plugin/esql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/multi_node/EsqlSpecIT.java b/x-pack/plugin/esql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/multi_node/EsqlSpecIT.java index bda10709ed947..64c113345bd53 100644 --- a/x-pack/plugin/esql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/multi_node/EsqlSpecIT.java +++ b/x-pack/plugin/esql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/multi_node/EsqlSpecIT.java @@ -14,7 +14,7 @@ public class EsqlSpecIT extends EsqlSpecTestCase { @ClassRule - public static ElasticsearchCluster cluster = Clusters.testCluster(spec -> {}); + public static ElasticsearchCluster cluster = Clusters.testCluster(spec -> spec.plugin("inference-service-test")); @Override protected String getTestRestCluster() { diff --git a/x-pack/plugin/esql/qa/server/single-node/build.gradle b/x-pack/plugin/esql/qa/server/single-node/build.gradle index ab8e3d4b32d9a..a37db5dc245e0 100644 --- a/x-pack/plugin/esql/qa/server/single-node/build.gradle +++ b/x-pack/plugin/esql/qa/server/single-node/build.gradle @@ -22,6 +22,7 @@ dependencies { clusterPlugins project(':plugins:mapper-size') clusterPlugins project(':plugins:mapper-murmur3') + clusterPlugins project(':x-pack:plugin:inference:qa:test-service-plugin') } restResources { diff --git a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlSpecIT.java b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlSpecIT.java index 676fffd553ca8..368eebe808eee 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlSpecIT.java +++ b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlSpecIT.java @@ -18,7 +18,7 @@ @ThreadLeakFilters(filters = TestClustersThreadFilter.class) public class EsqlSpecIT extends EsqlSpecTestCase { @ClassRule - public static ElasticsearchCluster cluster = Clusters.testCluster(); + public static ElasticsearchCluster cluster = Clusters.testCluster(spec -> spec.plugin("inference-service-test")); @Override protected String getTestRestCluster() { diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java index 319e67512c7ac..57f58fc448822 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java @@ -65,7 +65,10 @@ import static org.elasticsearch.xpack.esql.CsvTestUtils.ExpectedResults; import static org.elasticsearch.xpack.esql.CsvTestUtils.isEnabled; import static org.elasticsearch.xpack.esql.CsvTestUtils.loadCsvSpecValues; -import static org.elasticsearch.xpack.esql.CsvTestsDataLoader.CSV_DATASET_MAP; +import static org.elasticsearch.xpack.esql.CsvTestsDataLoader.availableDatasetsForEs; +import static org.elasticsearch.xpack.esql.CsvTestsDataLoader.clusterHasInferenceEndpoint; +import static org.elasticsearch.xpack.esql.CsvTestsDataLoader.createInferenceEndpoint; +import static org.elasticsearch.xpack.esql.CsvTestsDataLoader.deleteInferenceEndpoint; import static org.elasticsearch.xpack.esql.CsvTestsDataLoader.loadDataSetIntoEs; import static org.elasticsearch.xpack.esql.EsqlTestUtils.classpathResources; @@ -129,7 +132,11 @@ protected EsqlSpecTestCase( @Before public void setup() throws IOException { - if (indexExists(CSV_DATASET_MAP.keySet().iterator().next()) == false) { + if (supportsInferenceTestService() && clusterHasInferenceEndpoint(client()) == false) { + createInferenceEndpoint(client()); + } + + if (indexExists(availableDatasetsForEs(client()).iterator().next().indexName()) == false) { loadDataSetIntoEs(client()); } } @@ -148,6 +155,8 @@ public static void wipeTestData() throws IOException { throw e; } } + + deleteInferenceEndpoint(client()); } public boolean logResults() { @@ -164,6 +173,9 @@ public final void test() throws Throwable { } protected void shouldSkipTest(String testName) throws IOException { + if (testCase.requiredCapabilities.contains("semantic_text_type")) { + assumeTrue("Inference test service needs to be supported for semantic_text", supportsInferenceTestService()); + } checkCapabilities(adminClient(), testFeatureService, testName, testCase); assumeTrue("Test " + testName + " is not enabled", isEnabled(testName, instructions, Version.CURRENT)); } @@ -207,6 +219,10 @@ protected static void checkCapabilities(RestClient client, TestFeatureService te } } + protected boolean supportsInferenceTestService() { + return true; + } + protected final void doTest() throws Throwable { RequestObjectBuilder builder = new RequestObjectBuilder(randomFrom(XContentType.values())); diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvAssert.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvAssert.java index 36d785c24ab23..1a2aa122c85ca 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvAssert.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvAssert.java @@ -134,7 +134,11 @@ private static void assertMetadata( || expectedType == UNSIGNED_LONG)) { continue; } - if (blockType == Type.KEYWORD && (expectedType == Type.IP || expectedType == Type.VERSION || expectedType == Type.TEXT)) { + if (blockType == Type.KEYWORD + && (expectedType == Type.IP + || expectedType == Type.VERSION + || expectedType == Type.TEXT + || expectedType == Type.SEMANTIC_TEXT)) { // Type.asType translates all bytes references into keywords continue; } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java index eaec6811fbc24..bd8bd0f688837 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java @@ -447,6 +447,7 @@ public enum Type { SCALED_FLOAT(s -> s == null ? null : scaledFloat(s, "100"), Double.class), KEYWORD(Object::toString, BytesRef.class), TEXT(Object::toString, BytesRef.class), + SEMANTIC_TEXT(Object::toString, BytesRef.class), IP( StringUtils::parseIP, (l, r) -> l instanceof String maybeIP diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java index d63585086f1cd..cf9d66727a900 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java @@ -19,6 +19,7 @@ import org.apache.logging.log4j.core.config.plugins.util.PluginManager; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; import org.elasticsearch.client.RestClientBuilder; import org.elasticsearch.common.Strings; @@ -36,9 +37,11 @@ import java.net.URI; import java.net.URL; import java.util.ArrayList; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.stream.Collectors; import static org.elasticsearch.common.logging.LoggerMessageFormat.format; import static org.elasticsearch.xpack.esql.CsvTestUtils.COMMA_ESCAPING_REGEX; @@ -81,6 +84,7 @@ public class CsvTestsDataLoader { private static final TestsDataset K8S = new TestsDataset("k8s", "k8s-mappings.json", "k8s.csv").withSetting("k8s-settings.json"); private static final TestsDataset ADDRESSES = new TestsDataset("addresses"); private static final TestsDataset BOOKS = new TestsDataset("books"); + private static final TestsDataset SEMANTIC_TEXT = new TestsDataset("semantic_text").withInferenceEndpoint(true); public static final Map CSV_DATASET_MAP = Map.ofEntries( Map.entry(EMPLOYEES.indexName, EMPLOYEES), @@ -112,7 +116,8 @@ public class CsvTestsDataLoader { Map.entry(K8S.indexName, K8S), Map.entry(DISTANCES.indexName, DISTANCES), Map.entry(ADDRESSES.indexName, ADDRESSES), - Map.entry(BOOKS.indexName, BOOKS) + Map.entry(BOOKS.indexName, BOOKS), + Map.entry(SEMANTIC_TEXT.indexName, SEMANTIC_TEXT) ); private static final EnrichConfig LANGUAGES_ENRICH = new EnrichConfig("languages_policy", "enrich-policy-languages.json"); @@ -219,8 +224,13 @@ public static void main(String[] args) throws IOException { } } - private static void loadDataSetIntoEs(RestClient client, IndexCreator indexCreator) throws IOException { - loadDataSetIntoEs(client, LogManager.getLogger(CsvTestsDataLoader.class), indexCreator); + public static Set availableDatasetsForEs(RestClient client) throws IOException { + boolean inferenceEnabled = clusterHasInferenceEndpoint(client); + + return CSV_DATASET_MAP.values() + .stream() + .filter(d -> d.requiresInferenceEndpoint == false || inferenceEnabled) + .collect(Collectors.toCollection(HashSet::new)); } public static void loadDataSetIntoEs(RestClient client) throws IOException { @@ -229,22 +239,61 @@ public static void loadDataSetIntoEs(RestClient client) throws IOException { }); } - public static void loadDataSetIntoEs(RestClient client, Logger logger) throws IOException { - loadDataSetIntoEs(client, logger, (restClient, indexName, indexMapping, indexSettings) -> { - ESRestTestCase.createIndex(restClient, indexName, indexSettings, indexMapping, null); - }); - } + private static void loadDataSetIntoEs(RestClient client, IndexCreator indexCreator) throws IOException { + Logger logger = LogManager.getLogger(CsvTestsDataLoader.class); - private static void loadDataSetIntoEs(RestClient client, Logger logger, IndexCreator indexCreator) throws IOException { - for (var dataset : CSV_DATASET_MAP.values()) { + Set loadedDatasets = new HashSet<>(); + for (var dataset : availableDatasetsForEs(client)) { load(client, dataset, logger, indexCreator); + loadedDatasets.add(dataset.indexName); } - forceMerge(client, CSV_DATASET_MAP.keySet(), logger); + forceMerge(client, loadedDatasets, logger); for (var policy : ENRICH_POLICIES) { loadEnrichPolicy(client, policy.policyName, policy.policyFileName, logger); } } + /** The semantic_text mapping type require an inference endpoint that needs to be setup before creating the index. */ + public static void createInferenceEndpoint(RestClient client) throws IOException { + Request request = new Request("PUT", "_inference/sparse_embedding/test_sparse_inference"); + request.setJsonEntity(""" + { + "service": "test_service", + "service_settings": { + "model": "my_model", + "api_key": "abc64" + }, + "task_settings": { + } + } + """); + client.performRequest(request); + } + + public static void deleteInferenceEndpoint(RestClient client) throws IOException { + try { + client.performRequest(new Request("DELETE", "_inference/test_sparse_inference")); + } catch (ResponseException e) { + // 404 here means the endpoint was not created + if (e.getResponse().getStatusLine().getStatusCode() != 404) { + throw e; + } + } + } + + public static boolean clusterHasInferenceEndpoint(RestClient client) throws IOException { + Request request = new Request("GET", "_inference/sparse_embedding/test_sparse_inference"); + try { + client.performRequest(request); + } catch (ResponseException e) { + if (e.getResponse().getStatusLine().getStatusCode() == 404) { + return false; + } + throw e; + } + return true; + } + private static void loadEnrichPolicy(RestClient client, String policyName, String policyFileName, Logger logger) throws IOException { URL policyMapping = CsvTestsDataLoader.class.getResource("/" + policyFileName); if (policyMapping == null) { @@ -511,34 +560,79 @@ public record TestsDataset( String dataFileName, String settingFileName, boolean allowSubFields, - Map typeMapping + Map typeMapping, + boolean requiresInferenceEndpoint ) { public TestsDataset(String indexName, String mappingFileName, String dataFileName) { - this(indexName, mappingFileName, dataFileName, null, true, null); + this(indexName, mappingFileName, dataFileName, null, true, null, false); } public TestsDataset(String indexName) { - this(indexName, "mapping-" + indexName + ".json", indexName + ".csv", null, true, null); + this(indexName, "mapping-" + indexName + ".json", indexName + ".csv", null, true, null, false); } public TestsDataset withIndex(String indexName) { - return new TestsDataset(indexName, mappingFileName, dataFileName, settingFileName, allowSubFields, typeMapping); + return new TestsDataset( + indexName, + mappingFileName, + dataFileName, + settingFileName, + allowSubFields, + typeMapping, + requiresInferenceEndpoint + ); } public TestsDataset withData(String dataFileName) { - return new TestsDataset(indexName, mappingFileName, dataFileName, settingFileName, allowSubFields, typeMapping); + return new TestsDataset( + indexName, + mappingFileName, + dataFileName, + settingFileName, + allowSubFields, + typeMapping, + requiresInferenceEndpoint + ); } public TestsDataset withSetting(String settingFileName) { - return new TestsDataset(indexName, mappingFileName, dataFileName, settingFileName, allowSubFields, typeMapping); + return new TestsDataset( + indexName, + mappingFileName, + dataFileName, + settingFileName, + allowSubFields, + typeMapping, + requiresInferenceEndpoint + ); } public TestsDataset noSubfields() { - return new TestsDataset(indexName, mappingFileName, dataFileName, settingFileName, false, typeMapping); + return new TestsDataset( + indexName, + mappingFileName, + dataFileName, + settingFileName, + false, + typeMapping, + requiresInferenceEndpoint + ); } public TestsDataset withTypeMapping(Map typeMapping) { - return new TestsDataset(indexName, mappingFileName, dataFileName, settingFileName, allowSubFields, typeMapping); + return new TestsDataset( + indexName, + mappingFileName, + dataFileName, + settingFileName, + allowSubFields, + typeMapping, + requiresInferenceEndpoint + ); + } + + public TestsDataset withInferenceEndpoint(boolean needsInference) { + return new TestsDataset(indexName, mappingFileName, dataFileName, settingFileName, allowSubFields, typeMapping, needsInference); } } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java index f5bcb37c63e84..d71c66b4c467f 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java @@ -648,7 +648,7 @@ public static Literal randomLiteral(DataType type) { case KEYWORD -> new BytesRef(randomAlphaOfLength(5)); case IP -> new BytesRef(InetAddressPoint.encode(randomIp(randomBoolean()))); case TIME_DURATION -> Duration.ofMillis(randomLongBetween(-604800000L, 604800000L)); // plus/minus 7 days - case TEXT -> new BytesRef(randomAlphaOfLength(50)); + case TEXT, SEMANTIC_TEXT -> new BytesRef(randomAlphaOfLength(50)); case VERSION -> randomVersion().toBytesRef(); case GEO_POINT -> GEO.asWkb(GeometryTestUtils.randomPoint()); case CARTESIAN_POINT -> CARTESIAN.asWkb(ShapeTestUtils.randomPoint()); diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-semantic_text.json b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-semantic_text.json new file mode 100644 index 0000000000000..b110d6fd4cdd5 --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-semantic_text.json @@ -0,0 +1,73 @@ +{ + "properties": { + "semantic_text_field": { + "type": "semantic_text", + "inference_id": "test_sparse_inference" + }, + "st_bool": { + "type": "semantic_text", + "inference_id": "test_sparse_inference" + }, + "st_cartesian_point": { + "type": "semantic_text", + "inference_id": "test_sparse_inference" + }, + "st_cartesian_shape": { + "type": "semantic_text", + "inference_id": "test_sparse_inference" + }, + "st_datetime": { + "type": "semantic_text", + "inference_id": "test_sparse_inference" + }, + "st_double": { + "type": "semantic_text", + "inference_id": "test_sparse_inference" + }, + "st_geopoint": { + "type": "semantic_text", + "inference_id": "test_sparse_inference" + }, + "st_geoshape": { + "type": "semantic_text", + "inference_id": "test_sparse_inference" + }, + "st_integer": { + "type": "semantic_text", + "inference_id": "test_sparse_inference" + }, + "st_ip": { + "type": "semantic_text", + "inference_id": "test_sparse_inference" + }, + "st_long": { + "type": "semantic_text", + "inference_id": "test_sparse_inference" + }, + "st_unsigned_long": { + "type": "semantic_text", + "inference_id": "test_sparse_inference" + }, + "st_version": { + "type": "semantic_text", + "inference_id": "test_sparse_inference" + }, + "st_multi_value": { + "type": "semantic_text", + "inference_id": "test_sparse_inference" + }, + "st_unicode": { + "type": "semantic_text", + "inference_id": "test_sparse_inference" + }, + "host" : { + "type" : "keyword" + }, + "description" : { + "type" : "text" + }, + "value": { + "type": "long" + } + } +} diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/semantic_text.csv b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/semantic_text.csv new file mode 100644 index 0000000000000..c6de9a208e9a7 --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/semantic_text.csv @@ -0,0 +1,4 @@ +_id:keyword,semantic_text_field:semantic_text,st_bool:semantic_text,st_cartesian_point:semantic_text,st_cartesian_shape:semantic_text,st_datetime:semantic_text,st_double:semantic_text,st_geopoint:semantic_text,st_geoshape:semantic_text,st_integer:semantic_text,st_ip:semantic_text,st_long:semantic_text,st_unsigned_long:semantic_text,st_version:semantic_text,st_multi_value:semantic_text,st_unicode:semantic_text,host:keyword,description:text,value:long +1,live long and prosper,false,"POINT(4297.11 -1475.53)",,1953-09-02T00:00:00.000Z,5.20128E11,"POINT(42.97109630194 14.7552534413725)","POLYGON ((30 10\, 40 40\, 20 40\, 10 20\, 30 10))",23,1.1.1.1,2147483648,2147483648,1.2.3,["Hello there!", "This is a random value", "for testing purposes"],你吃饭了吗,"host1","some description1",1001 +2,all we have to decide is what to do with the time that is given to us,true,"POINT(7580.93 2272.77)",,2023-09-24T15:57:00.000Z,4541.11,"POINT(37.97109630194 21.7552534413725)","POLYGON ((30 10\, 40 40\, 20 40\, 10 20\, 30 10))",122,1.1.2.1,123,2147483648.2,9.0.0,["nice to meet you", "bye bye!"],["谢谢", "对不起我的中文不好"],"host2","some description2",1002 +3,be excellent to each other,,,,,,,,,,,,,,,"host3","some description3",1003 diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/semantic_text.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/semantic_text.csv-spec new file mode 100644 index 0000000000000..683bcdc3f7490 --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/semantic_text.csv-spec @@ -0,0 +1,175 @@ +simple +required_capability: semantic_text_type + +FROM semantic_text +| KEEP semantic_text_field +| sort semantic_text_field asc; + +semantic_text_field:semantic_text +all we have to decide is what to do with the time that is given to us +be excellent to each other +live long and prosper +; + +simpleWithUnicode +required_capability: semantic_text_type + +FROM semantic_text +| KEEP st_unicode +| SORT st_unicode +; + +st_unicode:semantic_text +你吃饭了吗 +["谢谢", "对不起我的中文不好"] +null +; + +mvExpand +required_capability: semantic_text_type + +FROM semantic_text METADATA _id +| KEEP _id, st_multi_value +| MV_EXPAND st_multi_value +| SORT st_multi_value +; + +_id:keyword | st_multi_value:semantic_text +1 | Hello there! +1 | This is a random value +2 | bye bye! +1 | for testing purposes +2 | nice to meet you +3 | null +; + +withDropAndKeep +required_capability: semantic_text_type + +FROM semantic_text METADATA _id +| KEEP _id, semantic_text_field, st_double +| DROP st_double +| SORT _id +; + +_id:keyword | semantic_text_field:semantic_text +1 | live long and prosper +2 | all we have to decide is what to do with the time that is given to us +3 | be excellent to each other +; + +rename +required_capability: semantic_text_type + +FROM semantic_text METADATA _id +| RENAME semantic_text_field AS my_field +| KEEP _id, my_field +| SORT _id +; + +_id:keyword | my_field:semantic_text +1 | live long and prosper +2 | all we have to decide is what to do with the time that is given to us +3 | be excellent to each other +; + +eval +required_capability: semantic_text_type + +FROM semantic_text METADATA _id +| EVAL my_field = semantic_text_field +| KEEP _id, my_field +| SORT _id +; + +_id:keyword | my_field:semantic_text +1 | live long and prosper +2 | all we have to decide is what to do with the time that is given to us +3 | be excellent to each other +; + +simpleStats +required_capability: semantic_text_type + +FROM semantic_text METADATA _id +| STATS COUNT(*) +; + +COUNT(*):long +3 +; + +statsWithGrouping +required_capability: semantic_text_type + +FROM semantic_text METADATA _id +| STATS COUNT(*) BY st_version +| SORT st_version +; + +COUNT(*):long | st_version:semantic_text +1 | 1.2.3 +1 | 9.0.0 +1 | null +; + +withDropKeepStatsMvExpandRenameSortLimit +required_capability: semantic_text_type + +FROM semantic_text METADATA _id +| KEEP _id, semantic_text_field, st_multi_value +| DROP semantic_text_field +| RENAME st_multi_value AS my_field +| MV_EXPAND my_field +| STATS COUNT(*) BY my_field +| SORT my_field +| LIMIT 3 +; + +COUNT(*):long | my_field:semantic_text +1 | Hello there! +1 | This is a random value +1 | bye bye! +; + +simpleWithLongValue +required_capability: semantic_text_type + +FROM semantic_text +| KEEP value, semantic_text_field +| SORT value +; + +value:long | semantic_text_field:semantic_text +1001 | live long and prosper +1002 | all we have to decide is what to do with the time that is given to us +1003 | be excellent to each other +; + +simpleWithText +required_capability: semantic_text_type + +FROM semantic_text METADATA _id +| KEEP description, semantic_text_field +| SORT description +; + +description:text | semantic_text_field:semantic_text +"some description1" | live long and prosper +"some description2" | all we have to decide is what to do with the time that is given to us +"some description3" | be excellent to each other +; + +simpleWithKeyword +required_capability: semantic_text_type + +FROM semantic_text METADATA _id +| KEEP host, semantic_text_field +| SORT host +; + +host:keyword | semantic_text_field:semantic_text +"host1" | live long and prosper +"host2" | all we have to decide is what to do with the time that is given to us +"host3" | be excellent to each other +; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index adfba4c487618..3c39406198da3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -395,7 +395,11 @@ public enum Cap { /** * Adding stats for functions (stack telemetry) */ - FUNCTION_STATS; + FUNCTION_STATS, + /** + * Support for semantic_text field mapping + */ + SEMANTIC_TEXT_TYPE(EsqlCorePlugin.SEMANTIC_TEXT_FEATURE_FLAG); private final boolean snapshotOnly; private final FeatureFlag featureFlag; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/PositionToXContent.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/PositionToXContent.java index 0b1bafdab1a99..0def56c70dc35 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/PositionToXContent.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/PositionToXContent.java @@ -89,7 +89,7 @@ protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Pa return builder.value(unsignedLongAsNumber(l)); } }; - case KEYWORD, TEXT -> new PositionToXContent(block) { + case KEYWORD, SEMANTIC_TEXT, TEXT -> new PositionToXContent(block) { @Override protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Params params, int valueIndex) throws IOException { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseValueUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseValueUtils.java index 3b18bda120e2e..49fcc167dce0f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseValueUtils.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseValueUtils.java @@ -114,7 +114,7 @@ private static Object valueAt(DataType dataType, Block block, int offset, BytesR case LONG, COUNTER_LONG -> ((LongBlock) block).getLong(offset); case INTEGER, COUNTER_INTEGER -> ((IntBlock) block).getInt(offset); case DOUBLE, COUNTER_DOUBLE -> ((DoubleBlock) block).getDouble(offset); - case KEYWORD, TEXT -> ((BytesRefBlock) block).getBytesRef(offset, scratch).utf8ToString(); + case KEYWORD, SEMANTIC_TEXT, TEXT -> ((BytesRefBlock) block).getBytesRef(offset, scratch).utf8ToString(); case IP -> { BytesRef val = ((BytesRefBlock) block).getBytesRef(offset, scratch); yield ipToString(val); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java index b28c80211c649..dc732258d9fa5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java @@ -349,7 +349,7 @@ private PhysicalOperation planTopN(TopNExec topNExec, LocalExecutionPlannerConte elementTypes[channel] = PlannerUtils.toElementType(inverse.get(channel).type()); encoders[channel] = switch (inverse.get(channel).type()) { case IP -> TopNEncoder.IP; - case TEXT, KEYWORD -> TopNEncoder.UTF8; + case TEXT, KEYWORD, SEMANTIC_TEXT -> TopNEncoder.UTF8; case VERSION -> TopNEncoder.VERSION; case BOOLEAN, NULL, BYTE, SHORT, INTEGER, LONG, DOUBLE, FLOAT, HALF_FLOAT, DATETIME, DATE_NANOS, DATE_PERIOD, TIME_DURATION, OBJECT, SCALED_FLOAT, UNSIGNED_LONG, DOC_DATA_TYPE, TSID_DATA_TYPE -> TopNEncoder.DEFAULT_SORTABLE; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java index 7beed64dda8cb..7868984d6b6e2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java @@ -247,7 +247,7 @@ public static ElementType toElementType(DataType dataType, MappedFieldType.Field case INTEGER, COUNTER_INTEGER -> ElementType.INT; case DOUBLE, COUNTER_DOUBLE -> ElementType.DOUBLE; // unsupported fields are passed through as a BytesRef - case KEYWORD, TEXT, IP, SOURCE, VERSION, UNSUPPORTED -> ElementType.BYTES_REF; + case KEYWORD, TEXT, IP, SOURCE, VERSION, SEMANTIC_TEXT, UNSUPPORTED -> ElementType.BYTES_REF; case NULL -> ElementType.NULL; case BOOLEAN -> ElementType.BOOLEAN; case DOC_DATA_TYPE -> ElementType.DOC; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java index b147cfde21721..27343bf7ce205 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java @@ -193,7 +193,7 @@ private Page randomPage(List columns) { case INTEGER, COUNTER_INTEGER -> ((IntBlock.Builder) builder).appendInt(randomInt()); case DOUBLE, COUNTER_DOUBLE -> ((DoubleBlock.Builder) builder).appendDouble(randomDouble()); case KEYWORD -> ((BytesRefBlock.Builder) builder).appendBytesRef(new BytesRef(randomAlphaOfLength(10))); - case TEXT -> ((BytesRefBlock.Builder) builder).appendBytesRef(new BytesRef(randomAlphaOfLength(10000))); + case TEXT, SEMANTIC_TEXT -> ((BytesRefBlock.Builder) builder).appendBytesRef(new BytesRef(randomAlphaOfLength(10000))); case IP -> ((BytesRefBlock.Builder) builder).appendBytesRef( new BytesRef(InetAddressPoint.encode(randomIp(randomBoolean()))) ); @@ -866,7 +866,7 @@ static Page valuesToPage(BlockFactory blockFactory, List columns case LONG, COUNTER_LONG -> ((LongBlock.Builder) builder).appendLong(((Number) value).longValue()); case INTEGER, COUNTER_INTEGER -> ((IntBlock.Builder) builder).appendInt(((Number) value).intValue()); case DOUBLE, COUNTER_DOUBLE -> ((DoubleBlock.Builder) builder).appendDouble(((Number) value).doubleValue()); - case KEYWORD, TEXT -> ((BytesRefBlock.Builder) builder).appendBytesRef(new BytesRef(value.toString())); + case KEYWORD, TEXT, SEMANTIC_TEXT -> ((BytesRefBlock.Builder) builder).appendBytesRef(new BytesRef(value.toString())); case UNSUPPORTED -> ((BytesRefBlock.Builder) builder).appendNull(); case IP -> ((BytesRefBlock.Builder) builder).appendBytesRef(stringToIP(value.toString())); case DATETIME -> { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java index 9d0d9c3da30a8..db3fce244c9a8 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java @@ -59,7 +59,7 @@ public class CaseTests extends AbstractScalarFunctionTestCase { DataType.NULL ).collect(Collectors.toList()); if (Build.current().isSnapshot()) { - t.addAll(DataType.UNDER_CONSTRUCTION.keySet()); + t.add(DataType.DATE_NANOS); } TYPES = unmodifiableList(t); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java index ce0b3a099d472..fb18cfb4959c7 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java @@ -23,6 +23,8 @@ import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.fielddata.FieldDataContext; import org.elasticsearch.index.fielddata.IndexFieldData; +import org.elasticsearch.index.mapper.BlockLoader; +import org.elasticsearch.index.mapper.BlockSourceReader; import org.elasticsearch.index.mapper.DocumentParserContext; import org.elasticsearch.index.mapper.DocumentParsingException; import org.elasticsearch.index.mapper.FieldMapper; @@ -611,6 +613,13 @@ private String generateInvalidQueryInferenceResultsMessage(StringBuilder baseMes return baseMessageBuilder.toString(); } + + @Override + public BlockLoader blockLoader(MappedFieldType.BlockLoaderContext blContext) { + SourceValueFetcher fetcher = SourceValueFetcher.toString(blContext.sourcePaths(name().concat(".text"))); + var sourceMode = blContext.indexSettings().getIndexMappingSourceMode(); + return new BlockSourceReader.BytesRefsBlockLoader(fetcher, BlockSourceReader.lookupMatchingAll(), sourceMode); + } } /** diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_unsupported_types.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_unsupported_types.yml index e100f30717aef..049895bc9f31a 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_unsupported_types.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_unsupported_types.yml @@ -504,3 +504,52 @@ double nested declared in mapping: # The `nested` field is not visible, nor are any of it's subfields. - match: { columns: [{name: name, type: keyword}] } + +--- +semantic_text declared in mapping: + - requires: + test_runner_features: [ capabilities ] + capabilities: + - method: POST + path: /_query + parameters: [ ] + capabilities: [ semantic_text_type ] + reason: "support for semantic_text type" + - do: + indices.create: + index: test_semantic_text + body: + settings: + number_of_shards: 5 + mappings: + properties: + semantic_text_field: + type: semantic_text + inference_id: my_inference_id + - do: + bulk: + index: test_semantic_text + refresh: true + body: + - { "index": { } } + - { + "semantic_text_field": { + "text": "be excellent to each other", + "inference": { + "inference_id": "my_inference_id", + "model_settings": { + "task_type": "sparse_embedding" + }, + "chunks": [{ "text": "be excellent to each other", "embeddings": { "a": 1,"b": 2 } }] + } + } + } + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'FROM test_semantic_text' + - match: { columns: [{name: semantic_text_field, type: semantic_text}] } + - length: { values: 1 } + - match: { values.0: ["be excellent to each other"] } From 4f08b7e57876cf87ffccd08f3e2b459800b71cf5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lorenzo=20Dematt=C3=A9?= Date: Mon, 21 Oct 2024 16:04:54 +0200 Subject: [PATCH 253/449] Update APM Java Agent to support JDK 23 (#115194) --- docs/changelog/115194.yaml | 7 +++++++ gradle/verification-metadata.xml | 6 +++--- modules/apm/build.gradle | 2 +- 3 files changed, 11 insertions(+), 4 deletions(-) create mode 100644 docs/changelog/115194.yaml diff --git a/docs/changelog/115194.yaml b/docs/changelog/115194.yaml new file mode 100644 index 0000000000000..0b201b9f89aa5 --- /dev/null +++ b/docs/changelog/115194.yaml @@ -0,0 +1,7 @@ +pr: 115194 +summary: Update APM Java Agent to support JDK 23 +area: Infra/Metrics +type: upgrade +issues: + - 115101 + - 115100 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 4d9b96184d07a..e2dfa89c8f3b8 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -69,9 +69,9 @@ - - - + + + diff --git a/modules/apm/build.gradle b/modules/apm/build.gradle index 4c822e44da6f6..b510e2403e933 100644 --- a/modules/apm/build.gradle +++ b/modules/apm/build.gradle @@ -19,7 +19,7 @@ dependencies { implementation "io.opentelemetry:opentelemetry-api:${otelVersion}" implementation "io.opentelemetry:opentelemetry-context:${otelVersion}" implementation "io.opentelemetry:opentelemetry-semconv:${otelSemconvVersion}" - runtimeOnly "co.elastic.apm:elastic-apm-agent:1.44.0" + runtimeOnly "co.elastic.apm:elastic-apm-agent:1.52.0" } tasks.named("dependencyLicenses").configure { From 1cf8d496c80afcba0d32f4b501927fecb10efc23 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Mon, 21 Oct 2024 15:09:20 +0100 Subject: [PATCH 254/449] [ML] Do not create the .inference index as a side effect of calling usage (#115023) The Inference usage API calls GET _inference/_all and because the default configs are persisted on read it causes the creation of the .inference index. This action is undesirable and causes test failures by leaking the system index out of the test clean up code. --- muted-tests.yml | 15 ------ .../org/elasticsearch/TransportVersions.java | 1 + .../test/rest/ESRestTestCase.java | 2 - .../action/GetInferenceModelAction.java | 34 +++++++++++- .../integration/ModelRegistryIT.java | 54 +++++++++++++++++-- .../TransportGetInferenceModelAction.java | 5 +- .../action/TransportInferenceUsageAction.java | 2 +- .../inference/registry/ModelRegistry.java | 45 ++++++++++++---- .../action/GetInferenceModelRequestTests.java | 9 +++- 9 files changed, 129 insertions(+), 38 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 1dda90369ce76..1818c55f16fdf 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -297,12 +297,6 @@ tests: - class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT method: test {yaml=reference/rest-api/usage/line_38} issue: https://github.com/elastic/elasticsearch/issues/113694 -- class: org.elasticsearch.xpack.eql.EqlRestIT - method: testIndexWildcardPatterns - issue: https://github.com/elastic/elasticsearch/issues/114749 -- class: org.elasticsearch.xpack.enrich.EnrichIT - method: testEnrichSpecialTypes - issue: https://github.com/elastic/elasticsearch/issues/114773 - class: org.elasticsearch.xpack.security.operator.OperatorPrivilegesIT method: testEveryActionIsEitherOperatorOnlyOrNonOperator issue: https://github.com/elastic/elasticsearch/issues/102992 @@ -312,23 +306,14 @@ tests: - class: org.elasticsearch.xpack.remotecluster.RemoteClusterSecurityWithApmTracingRestIT method: testTracingCrossCluster issue: https://github.com/elastic/elasticsearch/issues/112731 -- class: org.elasticsearch.xpack.enrich.EnrichIT - method: testImmutablePolicy - issue: https://github.com/elastic/elasticsearch/issues/114839 - class: org.elasticsearch.license.LicensingTests issue: https://github.com/elastic/elasticsearch/issues/114865 -- class: org.elasticsearch.xpack.enrich.EnrichIT - method: testDeleteIsCaseSensitive - issue: https://github.com/elastic/elasticsearch/issues/114840 - class: org.elasticsearch.packaging.test.EnrollmentProcessTests method: test20DockerAutoFormCluster issue: https://github.com/elastic/elasticsearch/issues/114885 - class: org.elasticsearch.test.rest.ClientYamlTestSuiteIT method: test {yaml=cluster.stats/30_ccs_stats/cross-cluster search stats search} issue: https://github.com/elastic/elasticsearch/issues/114902 -- class: org.elasticsearch.xpack.enrich.EnrichRestIT - method: test {p0=enrich/40_synthetic_source/enrich documents over _bulk} - issue: https://github.com/elastic/elasticsearch/issues/114825 - class: org.elasticsearch.xpack.inference.DefaultEndPointsIT method: testInferDeploysDefaultElser issue: https://github.com/elastic/elasticsearch/issues/114913 diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index d85990b4ede8c..cde09d33516c9 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -177,6 +177,7 @@ static TransportVersion def(int id) { public static final TransportVersion REMOVE_MIN_COMPATIBLE_SHARD_NODE = def(8_773_00_0); public static final TransportVersion REVERT_REMOVE_MIN_COMPATIBLE_SHARD_NODE = def(8_774_00_0); public static final TransportVersion ESQL_FIELD_ATTRIBUTE_PARENT_SIMPLIFIED = def(8_775_00_0); + public static final TransportVersion INFERENCE_DONT_PERSIST_ON_READ = def(8_776_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index e5b23158d4fd4..d17016f850300 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -1121,8 +1121,6 @@ protected static void wipeAllIndices(boolean preserveSecurityIndices) throws IOE if (preserveSecurityIndices) { indexPatterns.add("-.security-*"); } - // always preserve inference index - indexPatterns.add("-.inference"); final Request deleteRequest = new Request("DELETE", Strings.collectionToCommaDelimitedString(indexPatterns)); deleteRequest.addParameter("expand_wildcards", "open,closed,hidden"); final Response response = adminClient().performRequest(deleteRequest); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/GetInferenceModelAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/GetInferenceModelAction.java index 5a779ada4e182..6e06133509644 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/GetInferenceModelAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/GetInferenceModelAction.java @@ -34,19 +34,40 @@ public GetInferenceModelAction() { public static class Request extends AcknowledgedRequest { + private static boolean PERSIST_DEFAULT_CONFIGS = true; + private final String inferenceEntityId; private final TaskType taskType; + // Default endpoint configurations are persisted on first read. + // Set to false to avoid persisting on read. + // This setting only applies to GET * requests. It has + // no effect when getting a single model + private final boolean persistDefaultConfig; public Request(String inferenceEntityId, TaskType taskType) { super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.inferenceEntityId = Objects.requireNonNull(inferenceEntityId); this.taskType = Objects.requireNonNull(taskType); + this.persistDefaultConfig = PERSIST_DEFAULT_CONFIGS; + } + + public Request(String inferenceEntityId, TaskType taskType, boolean persistDefaultConfig) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + this.inferenceEntityId = Objects.requireNonNull(inferenceEntityId); + this.taskType = Objects.requireNonNull(taskType); + this.persistDefaultConfig = persistDefaultConfig; } public Request(StreamInput in) throws IOException { super(in); this.inferenceEntityId = in.readString(); this.taskType = TaskType.fromStream(in); + if (in.getTransportVersion().onOrAfter(TransportVersions.INFERENCE_DONT_PERSIST_ON_READ)) { + this.persistDefaultConfig = in.readBoolean(); + } else { + this.persistDefaultConfig = PERSIST_DEFAULT_CONFIGS; + } + } public String getInferenceEntityId() { @@ -57,11 +78,18 @@ public TaskType getTaskType() { return taskType; } + public boolean isPersistDefaultConfig() { + return persistDefaultConfig; + } + @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeString(inferenceEntityId); taskType.writeTo(out); + if (out.getTransportVersion().onOrAfter(TransportVersions.INFERENCE_DONT_PERSIST_ON_READ)) { + out.writeBoolean(this.persistDefaultConfig); + } } @Override @@ -69,12 +97,14 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Request request = (Request) o; - return Objects.equals(inferenceEntityId, request.inferenceEntityId) && taskType == request.taskType; + return Objects.equals(inferenceEntityId, request.inferenceEntityId) + && taskType == request.taskType + && persistDefaultConfig == request.persistDefaultConfig; } @Override public int hashCode() { - return Objects.hash(inferenceEntityId, taskType); + return Objects.hash(inferenceEntityId, taskType, persistDefaultConfig); } } diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java index e62cdcdc7fd2a..8713511c2f5f2 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java @@ -14,6 +14,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.inference.InferenceService; import org.elasticsearch.inference.InferenceServiceExtension; import org.elasticsearch.inference.Model; @@ -251,7 +252,7 @@ public void testGetAllModels() throws InterruptedException { } AtomicReference> modelHolder = new AtomicReference<>(); - blockingCall(listener -> modelRegistry.getAllModels(listener), modelHolder, exceptionHolder); + blockingCall(listener -> modelRegistry.getAllModels(randomBoolean(), listener), modelHolder, exceptionHolder); assertNull(exceptionHolder.get()); assertThat(modelHolder.get(), hasSize(modelCount)); var getAllModels = modelHolder.get(); @@ -333,14 +334,14 @@ public void testGetAllModels_WithDefaults() throws Exception { } AtomicReference> modelHolder = new AtomicReference<>(); - blockingCall(listener -> modelRegistry.getAllModels(listener), modelHolder, exceptionHolder); + blockingCall(listener -> modelRegistry.getAllModels(randomBoolean(), listener), modelHolder, exceptionHolder); assertNull(exceptionHolder.get()); assertThat(modelHolder.get(), hasSize(totalModelCount)); var getAllModels = modelHolder.get(); assertReturnModelIsModifiable(modelHolder.get().get(0)); // same result but configs should have been persisted this time - blockingCall(listener -> modelRegistry.getAllModels(listener), modelHolder, exceptionHolder); + blockingCall(listener -> modelRegistry.getAllModels(randomBoolean(), listener), modelHolder, exceptionHolder); assertNull(exceptionHolder.get()); assertThat(modelHolder.get(), hasSize(totalModelCount)); @@ -387,7 +388,7 @@ public void testGetAllModels_OnlyDefaults() throws Exception { AtomicReference exceptionHolder = new AtomicReference<>(); AtomicReference> modelHolder = new AtomicReference<>(); - blockingCall(listener -> modelRegistry.getAllModels(listener), modelHolder, exceptionHolder); + blockingCall(listener -> modelRegistry.getAllModels(randomBoolean(), listener), modelHolder, exceptionHolder); assertNull(exceptionHolder.get()); assertThat(modelHolder.get(), hasSize(2)); var getAllModels = modelHolder.get(); @@ -405,6 +406,44 @@ public void testGetAllModels_OnlyDefaults() throws Exception { } } + public void testGetAllModels_withDoNotPersist() throws Exception { + int defaultModelCount = 2; + var serviceName = "foo"; + var service = mock(InferenceService.class); + + var defaultConfigs = new ArrayList(); + var defaultIds = new ArrayList(); + for (int i = 0; i < defaultModelCount; i++) { + var id = "default-" + i; + var taskType = randomFrom(TaskType.values()); + defaultConfigs.add(createModel(id, taskType, serviceName)); + defaultIds.add(new InferenceService.DefaultConfigId(id, taskType, service)); + } + + doAnswer(invocation -> { + @SuppressWarnings("unchecked") + var listener = (ActionListener>) invocation.getArguments()[0]; + listener.onResponse(defaultConfigs); + return Void.TYPE; + }).when(service).defaultConfigs(any()); + + defaultIds.forEach(modelRegistry::addDefaultIds); + + AtomicReference exceptionHolder = new AtomicReference<>(); + AtomicReference> modelHolder = new AtomicReference<>(); + blockingCall(listener -> modelRegistry.getAllModels(false, listener), modelHolder, exceptionHolder); + assertNull(exceptionHolder.get()); + assertThat(modelHolder.get(), hasSize(2)); + + expectThrows(IndexNotFoundException.class, () -> client().admin().indices().prepareGetIndex().addIndices(".inference").get()); + + // this time check the index is created + blockingCall(listener -> modelRegistry.getAllModels(true, listener), modelHolder, exceptionHolder); + assertNull(exceptionHolder.get()); + assertThat(modelHolder.get(), hasSize(2)); + assertInferenceIndexExists(); + } + public void testGet_WithDefaults() throws InterruptedException { var serviceName = "foo"; var service = mock(InferenceService.class); @@ -513,6 +552,12 @@ public void testGetByTaskType_WithDefaults() throws Exception { assertReturnModelIsModifiable(modelHolder.get().get(0)); } + private void assertInferenceIndexExists() { + var indexResponse = client().admin().indices().prepareGetIndex().addIndices(".inference").get(); + assertNotNull(indexResponse.getSettings()); + assertNotNull(indexResponse.getMappings()); + } + @SuppressWarnings("unchecked") private void assertReturnModelIsModifiable(UnparsedModel unparsedModel) { var settings = unparsedModel.settings(); @@ -551,7 +596,6 @@ private Model buildElserModelConfig(String inferenceEntityId, TaskType taskType) ); default -> throw new IllegalArgumentException("task type " + taskType + " is not supported"); }; - } protected void blockingCall(Consumer> function, AtomicReference response, AtomicReference error) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java index 5ee1e40869dbc..edcec45b50a16 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java @@ -69,7 +69,7 @@ protected void doExecute( boolean inferenceEntityIdIsWildCard = Strings.isAllOrWildcard(request.getInferenceEntityId()); if (request.getTaskType() == TaskType.ANY && inferenceEntityIdIsWildCard) { - getAllModels(listener); + getAllModels(request.isPersistDefaultConfig(), listener); } else if (inferenceEntityIdIsWildCard) { getModelsByTaskType(request.getTaskType(), listener); } else { @@ -100,8 +100,9 @@ private void getSingleModel( })); } - private void getAllModels(ActionListener listener) { + private void getAllModels(boolean persistDefaultEndpoints, ActionListener listener) { modelRegistry.getAllModels( + persistDefaultEndpoints, listener.delegateFailureAndWrap((l, models) -> executor.execute(ActionRunnable.supply(l, () -> parseModels(models)))) ); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceUsageAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceUsageAction.java index 624afff9f5d11..7b7475efac334 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceUsageAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceUsageAction.java @@ -63,7 +63,7 @@ protected void masterOperation( ClusterState state, ActionListener listener ) { - GetInferenceModelAction.Request getInferenceModelAction = new GetInferenceModelAction.Request("_all", TaskType.ANY); + GetInferenceModelAction.Request getInferenceModelAction = new GetInferenceModelAction.Request("_all", TaskType.ANY, false); client.execute(GetInferenceModelAction.INSTANCE, getInferenceModelAction, listener.delegateFailureAndWrap((delegate, response) -> { Map stats = new TreeMap<>(); for (ModelConfigurations model : response.getEndpoints()) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java index 260d4e663dafd..4506a05d58054 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java @@ -69,6 +69,17 @@ import static org.elasticsearch.core.Strings.format; +/** + * Class for persisting and reading inference endpoint configurations. + * Some inference services provide default configurations, the registry is + * made aware of these at start up via {@link #addDefaultIds(InferenceService.DefaultConfigId)}. + * Only the ids and service details are registered at this point + * as the full config definition may not be known at start up. + * The full config is lazily populated on read and persisted to the + * index. This has the effect of creating the backing index on reading + * the configs. {@link #getAllModels(boolean, ActionListener)} has an option + * to not write the default configs to index on read to avoid index creation. + */ public class ModelRegistry { public record ModelConfigMap(Map config, Map secrets) {} @@ -132,7 +143,7 @@ public void getModelWithSecrets(String inferenceEntityId, ActionListener lis if (searchResponse.getHits().getHits().length == 0) { var maybeDefault = idMatchedDefault(inferenceEntityId, defaultConfigIds); if (maybeDefault.isPresent()) { - getDefaultConfig(maybeDefault.get(), listener); + getDefaultConfig(true, maybeDefault.get(), listener); } else { delegate.onFailure(inferenceNotFoundException(inferenceEntityId)); } @@ -199,7 +210,7 @@ public void getModelsByTaskType(TaskType taskType, ActionListener searchListener = listener.delegateFailureAndWrap((delegate, searchResponse) -> { var modelConfigs = parseHitsAsModels(searchResponse.getHits()).stream().map(ModelRegistry::unparsedModelFromMap).toList(); var defaultConfigsForTaskType = taskTypeMatchedDefaults(taskType, defaultConfigIds); - addAllDefaultConfigsIfMissing(modelConfigs, defaultConfigsForTaskType, delegate); + addAllDefaultConfigsIfMissing(true, modelConfigs, defaultConfigsForTaskType, delegate); }); QueryBuilder queryBuilder = QueryBuilders.constantScoreQuery(QueryBuilders.termsQuery(TASK_TYPE_FIELD, taskType.toString())); @@ -216,13 +227,20 @@ public void getModelsByTaskType(TaskType taskType, ActionListener> listener) { + public void getAllModels(boolean persistDefaultEndpoints, ActionListener> listener) { ActionListener searchListener = listener.delegateFailureAndWrap((delegate, searchResponse) -> { var foundConfigs = parseHitsAsModels(searchResponse.getHits()).stream().map(ModelRegistry::unparsedModelFromMap).toList(); - addAllDefaultConfigsIfMissing(foundConfigs, defaultConfigIds, delegate); + addAllDefaultConfigsIfMissing(persistDefaultEndpoints, foundConfigs, defaultConfigIds, delegate); }); // In theory the index should only contain model config documents @@ -241,6 +259,7 @@ public void getAllModels(ActionListener> listener) { } private void addAllDefaultConfigsIfMissing( + boolean persistDefaultEndpoints, List foundConfigs, List matchedDefaults, ActionListener> listener @@ -263,18 +282,26 @@ private void addAllDefaultConfigsIfMissing( ); for (var required : missing) { - getDefaultConfig(required, groupedListener); + getDefaultConfig(persistDefaultEndpoints, required, groupedListener); } } } - private void getDefaultConfig(InferenceService.DefaultConfigId defaultConfig, ActionListener listener) { + private void getDefaultConfig( + boolean persistDefaultEndpoints, + InferenceService.DefaultConfigId defaultConfig, + ActionListener listener + ) { defaultConfig.service().defaultConfigs(listener.delegateFailureAndWrap((delegate, models) -> { boolean foundModel = false; for (var m : models) { if (m.getInferenceEntityId().equals(defaultConfig.inferenceId())) { foundModel = true; - storeDefaultEndpoint(m, () -> listener.onResponse(modelToUnparsedModel(m))); + if (persistDefaultEndpoints) { + storeDefaultEndpoint(m, () -> listener.onResponse(modelToUnparsedModel(m))); + } else { + listener.onResponse(modelToUnparsedModel(m)); + } break; } } @@ -287,7 +314,7 @@ private void getDefaultConfig(InferenceService.DefaultConfigId defaultConfig, Ac })); } - public void storeDefaultEndpoint(Model preconfigured, Runnable runAfter) { + private void storeDefaultEndpoint(Model preconfigured, Runnable runAfter) { var responseListener = ActionListener.wrap(success -> { logger.debug("Added default inference endpoint [{}]", preconfigured.getInferenceEntityId()); }, exception -> { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/GetInferenceModelRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/GetInferenceModelRequestTests.java index 93694f167259f..314b3037fdd63 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/GetInferenceModelRequestTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/GetInferenceModelRequestTests.java @@ -15,7 +15,7 @@ public class GetInferenceModelRequestTests extends AbstractWireSerializingTestCase { public static GetInferenceModelAction.Request randomTestInstance() { - return new GetInferenceModelAction.Request(randomAlphaOfLength(8), randomFrom(TaskType.values())); + return new GetInferenceModelAction.Request(randomAlphaOfLength(8), randomFrom(TaskType.values()), randomBoolean()); } @Override @@ -30,12 +30,17 @@ protected GetInferenceModelAction.Request createTestInstance() { @Override protected GetInferenceModelAction.Request mutateInstance(GetInferenceModelAction.Request instance) { - return switch (randomIntBetween(0, 1)) { + return switch (randomIntBetween(0, 2)) { case 0 -> new GetInferenceModelAction.Request(instance.getInferenceEntityId() + "foo", instance.getTaskType()); case 1 -> { var nextTaskType = TaskType.values()[(instance.getTaskType().ordinal() + 1) % TaskType.values().length]; yield new GetInferenceModelAction.Request(instance.getInferenceEntityId(), nextTaskType); } + case 2 -> new GetInferenceModelAction.Request( + instance.getInferenceEntityId(), + instance.getTaskType(), + instance.isPersistDefaultConfig() == false + ); default -> throw new UnsupportedOperationException(); }; } From b0e3b79ad27d3131478ad907819f26786818db1a Mon Sep 17 00:00:00 2001 From: Luigi Dell'Aquila Date: Mon, 21 Oct 2024 16:34:04 +0200 Subject: [PATCH 255/449] ES|QL: relax tests on usage stats (#115214) --- .../yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml index e1fd9b0201a35..7d1a4e123299b 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml @@ -83,4 +83,3 @@ setup: - match: {esql.functions.cos: $functions_cos} - gt: {esql.functions.to_long: $functions_to_long} - match: {esql.functions.coalesce: $functions_coalesce} - - length: {esql.functions: 117} From deef8c7a9bb65b78685d4e5e31a47b1c928924b9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20Fred=C3=A9n?= <109296772+jfreden@users.noreply.github.com> Date: Mon, 21 Oct 2024 16:41:28 +0200 Subject: [PATCH 256/449] [DOCS] Add DLS multi-match limitation (#115003) --- docs/reference/security/limitations.asciidoc | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docs/reference/security/limitations.asciidoc b/docs/reference/security/limitations.asciidoc index 96af0e01c8075..b1bdd8cbbf5d5 100644 --- a/docs/reference/security/limitations.asciidoc +++ b/docs/reference/security/limitations.asciidoc @@ -81,12 +81,13 @@ including the following queries: * A search request cannot be profiled if document level security is enabled. * The <> does not return terms if document level security is enabled. +* The <> query does not support specifying fields using wildcards. NOTE: While document-level security prevents users from viewing restricted documents, it's still possible to write search requests that return aggregate information about the entire index. A user whose access is restricted to specific documents in an index could still learn about field names and terms that only exist in inaccessible -documents, and count how many inaccessible documents contain a given term. +documents, and count how many inaccessible documents contain a given term. [discrete] [[alias-limitations]] From 5e761fe4d0733c3cdc6d60c5f955dcd9086c1843 Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Mon, 21 Oct 2024 17:43:45 +0300 Subject: [PATCH 257/449] Change backwards test configuration to use trial license (#115226) * Change backwards test configuration to use trial license * unmute --- muted-tests.yml | 2 -- qa/mixed-cluster/build.gradle | 1 + 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 1818c55f16fdf..4dc177ef001fd 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -346,8 +346,6 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/115129 - class: org.elasticsearch.xpack.esql.ccq.MultiClusterSpecIT issue: https://github.com/elastic/elasticsearch/issues/115135 -- class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT - issue: https://github.com/elastic/elasticsearch/issues/115213 - class: org.elasticsearch.xpack.esql.expression.function.scalar.string.ReverseTests method: testEvaluateInManyThreads {TestCase=} issue: https://github.com/elastic/elasticsearch/issues/115227 diff --git a/qa/mixed-cluster/build.gradle b/qa/mixed-cluster/build.gradle index a5b7ae8d703ea..23d7af7603d56 100644 --- a/qa/mixed-cluster/build.gradle +++ b/qa/mixed-cluster/build.gradle @@ -71,6 +71,7 @@ BuildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> numberOfNodes = 4 setting 'path.repo', "${buildDir}/cluster/shared/repo/${baseName}" setting 'xpack.security.enabled', 'false' + setting "xpack.license.self_generated.type", "trial" /* There is a chance we have more master changes than "normal", so to avoid this test from failing, we increase the threshold (as this purpose of this test isn't to test that specific indicator). */ if (bwcVersion.onOrAfter(Version.fromString("8.4.0"))) { From 8c378754ab3ee25725cc08810b04237e5ee022e6 Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Mon, 21 Oct 2024 16:07:06 +0100 Subject: [PATCH 258/449] Remove ChunkedToXContentHelper.array method, swap for ChunkedToXContentBuilder (#114319) --- .../cluster/metadata/IndexGraveyard.java | 6 +- .../AbstractAllocationDecision.java | 7 +- .../AllocateUnassignedDecision.java | 6 +- .../routing/allocation/MoveDecision.java | 6 +- .../xcontent/ChunkedToXContentBuilder.java | 22 ++- .../xcontent/ChunkedToXContentHelper.java | 4 - .../org/elasticsearch/health/Diagnosis.java | 58 +++----- .../health/HealthIndicatorResult.java | 45 +++--- .../elasticsearch/ingest/IngestMetadata.java | 6 +- .../PersistentTasksCustomMetadata.java | 10 +- .../org/elasticsearch/script/ScriptStats.java | 38 ++--- .../org/elasticsearch/search/SearchHits.java | 31 ++-- .../health/HealthIndicatorResultTests.java | 6 +- .../results/ChatCompletionResults.java | 4 +- ...nferenceChunkedSparseEmbeddingResults.java | 4 +- ...erenceChunkedTextEmbeddingByteResults.java | 4 +- ...renceChunkedTextEmbeddingFloatResults.java | 4 +- .../InferenceTextEmbeddingByteResults.java | 4 +- .../InferenceTextEmbeddingFloatResults.java | 4 +- .../inference/results/RankedDocsResults.java | 4 +- .../results/SparseEmbeddingResults.java | 4 +- .../compute/operator/DriverProfile.java | 39 +++-- .../action/GetFlamegraphResponse.java | 138 +++++++----------- 23 files changed, 197 insertions(+), 257 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexGraveyard.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexGraveyard.java index 320be8acb0af9..62867b4260bfd 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexGraveyard.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexGraveyard.java @@ -19,7 +19,7 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.time.DateFormatter; -import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; +import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.index.Index; import org.elasticsearch.xcontent.ContextParser; import org.elasticsearch.xcontent.ObjectParser; @@ -128,8 +128,8 @@ public boolean containsIndex(final Index index) { } @Override - public Iterator toXContentChunked(ToXContent.Params ignored) { - return ChunkedToXContentHelper.array(TOMBSTONES_FIELD.getPreferredName(), tombstones.iterator()); + public Iterator toXContentChunked(ToXContent.Params params) { + return ChunkedToXContent.builder(params).array(TOMBSTONES_FIELD.getPreferredName(), tombstones.iterator()); } public static IndexGraveyard fromXContent(final XContentParser parser) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AbstractAllocationDecision.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AbstractAllocationDecision.java index 7bb97faa6b2d0..827cc378ef3a9 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AbstractAllocationDecision.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AbstractAllocationDecision.java @@ -12,6 +12,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeRole; import org.elasticsearch.cluster.routing.allocation.decider.Decision.Type; +import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -140,7 +141,11 @@ public static Iterator nodeDecisionsToXContentChunked(List toXContentChunked(ToXContent.Params params) { checkDecisionState(); - return Iterators.concat(Iterators.single((builder, p) -> { + return ChunkedToXContent.builder(params).append((builder, p) -> { builder.field("can_allocate", getAllocationDecision()); builder.field("allocate_explanation", getExplanation()); if (targetNode != null) { @@ -320,7 +320,7 @@ public Iterator toXContentChunked(ToXContent.Params params ); } return builder; - }), nodeDecisionsToXContentChunked(nodeDecisions)); + }).append(nodeDecisionsToXContentChunked(nodeDecisions)); } @Override diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/MoveDecision.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/MoveDecision.java index 891818b8e68f7..5dfac293de491 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/MoveDecision.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/MoveDecision.java @@ -12,9 +12,9 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.allocation.decider.Decision; import org.elasticsearch.cluster.routing.allocation.decider.Decision.Type; -import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.core.Nullable; import org.elasticsearch.xcontent.ToXContent; @@ -260,7 +260,7 @@ public String getExplanation() { @Override public Iterator toXContentChunked(ToXContent.Params params) { checkDecisionState(); - return Iterators.concat(Iterators.single((builder, p) -> { + return ChunkedToXContent.builder(params).append((builder, p) -> { if (targetNode != null) { builder.startObject("target_node"); discoveryNodeToXContent(targetNode, true, builder); @@ -289,7 +289,7 @@ public Iterator toXContentChunked(ToXContent.Params params builder.field("move_explanation", getExplanation()); } return builder; - }), nodeDecisionsToXContentChunked(nodeDecisions)); + }).append(nodeDecisionsToXContentChunked(nodeDecisions)); } @Override diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContentBuilder.java b/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContentBuilder.java index 0102e58c7c1dc..a3141bff7c6e2 100644 --- a/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContentBuilder.java +++ b/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContentBuilder.java @@ -248,7 +248,7 @@ private void endArray() { addChunk((b, p) -> b.endArray()); } - public ChunkedToXContentBuilder array(String name, String... values) { + public ChunkedToXContentBuilder array(String name, String[] values) { addChunk((b, p) -> b.array(name, values)); return this; } @@ -350,6 +350,26 @@ public ChunkedToXContentBuilder field(String name, Long value) { return this; } + public ChunkedToXContentBuilder field(String name, float value) { + addChunk((b, p) -> b.field(name, value)); + return this; + } + + public ChunkedToXContentBuilder field(String name, Float value) { + addChunk((b, p) -> b.field(name, value)); + return this; + } + + public ChunkedToXContentBuilder field(String name, double value) { + addChunk((b, p) -> b.field(name, value)); + return this; + } + + public ChunkedToXContentBuilder field(String name, Double value) { + addChunk((b, p) -> b.field(name, value)); + return this; + } + public ChunkedToXContentBuilder field(String name, String value) { addChunk((b, p) -> b.field(name, value)); return this; diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContentHelper.java b/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContentHelper.java index fcbe0ac2b2edb..2e78cc6f516b1 100644 --- a/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContentHelper.java +++ b/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContentHelper.java @@ -53,10 +53,6 @@ public static Iterator field(String name, String value) { return Iterators.single(((builder, params) -> builder.field(name, value))); } - public static Iterator array(String name, Iterator contents) { - return Iterators.concat(ChunkedToXContentHelper.startArray(name), contents, ChunkedToXContentHelper.endArray()); - } - /** * Creates an Iterator of a single ToXContent object that serializes the given object as a single chunk. Just wraps {@link * Iterators#single}, but still useful because it avoids any type ambiguity. diff --git a/server/src/main/java/org/elasticsearch/health/Diagnosis.java b/server/src/main/java/org/elasticsearch/health/Diagnosis.java index 41301e2d52a53..b1af4a1c383da 100644 --- a/server/src/main/java/org/elasticsearch/health/Diagnosis.java +++ b/server/src/main/java/org/elasticsearch/health/Diagnosis.java @@ -10,14 +10,12 @@ package org.elasticsearch.health; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.xcontent.ChunkedToXContent; -import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; +import org.elasticsearch.common.xcontent.ChunkedToXContentBuilder; import org.elasticsearch.core.Nullable; import org.elasticsearch.xcontent.ToXContent; import java.util.Collection; -import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Objects; @@ -78,22 +76,20 @@ public Resource(Collection nodes) { } @Override - public Iterator toXContentChunked(ToXContent.Params outerParams) { - final Iterator valuesIterator; + public Iterator toXContentChunked(ToXContent.Params params) { + var builder = ChunkedToXContent.builder(params); if (nodes != null) { - valuesIterator = Iterators.map(nodes.iterator(), node -> (builder, params) -> { - builder.startObject(); - builder.field(ID_FIELD, node.getId()); + return builder.array(type.displayValue, nodes.iterator(), node -> (b, p) -> { + b.startObject(); + b.field(ID_FIELD, node.getId()); if (node.getName() != null) { - builder.field(NAME_FIELD, node.getName()); + b.field(NAME_FIELD, node.getName()); } - builder.endObject(); - return builder; + return b.endObject(); }); } else { - valuesIterator = Iterators.map(values.iterator(), value -> (builder, params) -> builder.value(value)); + return builder.array(type.displayValue, values.toArray(String[]::new)); } - return ChunkedToXContentHelper.array(type.displayValue, valuesIterator); } @Override @@ -144,30 +140,18 @@ public String getUniqueId() { } @Override - public Iterator toXContentChunked(ToXContent.Params outerParams) { - final Iterator resourcesIterator; - if (affectedResources == null) { - resourcesIterator = Collections.emptyIterator(); - } else { - resourcesIterator = Iterators.flatMap(affectedResources.iterator(), s -> s.toXContentChunked(outerParams)); - } - return Iterators.concat(Iterators.single((ToXContent) (builder, params) -> { - builder.startObject(); - builder.field("id", definition.getUniqueId()); - builder.field("cause", definition.cause); - builder.field("action", definition.action); - builder.field("help_url", definition.helpURL); - - if (affectedResources != null && affectedResources.size() > 0) { - builder.startObject("affected_resources"); - } - return builder; - }), resourcesIterator, Iterators.single((builder, params) -> { - if (affectedResources != null && affectedResources.size() > 0) { - builder.endObject(); + public Iterator toXContentChunked(ToXContent.Params params) { + return ChunkedToXContent.builder(params).object(ob -> { + ob.append((b, p) -> { + b.field("id", definition.getUniqueId()); + b.field("cause", definition.cause); + b.field("action", definition.action); + b.field("help_url", definition.helpURL); + return b; + }); + if (affectedResources != null && affectedResources.isEmpty() == false) { + ob.object("affected_resources", affectedResources.iterator(), ChunkedToXContentBuilder::append); } - builder.endObject(); - return builder; - })); + }); } } diff --git a/server/src/main/java/org/elasticsearch/health/HealthIndicatorResult.java b/server/src/main/java/org/elasticsearch/health/HealthIndicatorResult.java index 6944ac74c8115..1a84abd9f7c16 100644 --- a/server/src/main/java/org/elasticsearch/health/HealthIndicatorResult.java +++ b/server/src/main/java/org/elasticsearch/health/HealthIndicatorResult.java @@ -9,11 +9,11 @@ package org.elasticsearch.health; -import org.elasticsearch.common.collect.Iterators; +import org.elasticsearch.common.xcontent.ChunkedToXContent; +import org.elasticsearch.common.xcontent.ChunkedToXContentBuilder; import org.elasticsearch.common.xcontent.ChunkedToXContentObject; import org.elasticsearch.xcontent.ToXContent; -import java.util.Collections; import java.util.Iterator; import java.util.List; @@ -26,33 +26,22 @@ public record HealthIndicatorResult( List diagnosisList ) implements ChunkedToXContentObject { @Override - public Iterator toXContentChunked(ToXContent.Params outerParams) { - final Iterator diagnosisIterator; - if (diagnosisList == null) { - diagnosisIterator = Collections.emptyIterator(); - } else { - diagnosisIterator = Iterators.flatMap(diagnosisList.iterator(), s -> s.toXContentChunked(outerParams)); - } - return Iterators.concat(Iterators.single((ToXContent) (builder, params) -> { - builder.startObject(); - builder.field("status", status.xContentValue()); - builder.field("symptom", symptom); - if (details != null && HealthIndicatorDetails.EMPTY.equals(details) == false) { - builder.field("details", details, params); - } - if (impacts != null && impacts.isEmpty() == false) { - builder.field("impacts", impacts); - } - if (diagnosisList != null && diagnosisList.isEmpty() == false) { - builder.startArray("diagnosis"); - } - return builder; - }), diagnosisIterator, Iterators.single((builder, params) -> { + public Iterator toXContentChunked(ToXContent.Params params) { + return ChunkedToXContent.builder(params).object(ob -> { + ob.append((b, p) -> { + b.field("status", status.xContentValue()); + b.field("symptom", symptom); + if (details != null && HealthIndicatorDetails.EMPTY.equals(details) == false) { + b.field("details", details, p); + } + if (impacts != null && impacts.isEmpty() == false) { + b.field("impacts", impacts); + } + return b; + }); if (diagnosisList != null && diagnosisList.isEmpty() == false) { - builder.endArray(); + ob.array("diagnosis", diagnosisList.iterator(), ChunkedToXContentBuilder::append); } - builder.endObject(); - return builder; - })); + }); } } diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestMetadata.java b/server/src/main/java/org/elasticsearch/ingest/IngestMetadata.java index 05da1f4784649..316f621e80669 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestMetadata.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestMetadata.java @@ -18,7 +18,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.Maps; -import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; +import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; @@ -98,8 +98,8 @@ public static IngestMetadata fromXContent(XContentParser parser) throws IOExcept } @Override - public Iterator toXContentChunked(ToXContent.Params ignored) { - return ChunkedToXContentHelper.array(PIPELINES_FIELD.getPreferredName(), pipelines.values().iterator()); + public Iterator toXContentChunked(ToXContent.Params params) { + return ChunkedToXContent.builder(params).array(PIPELINES_FIELD.getPreferredName(), pipelines.values().iterator()); } @Override diff --git a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksCustomMetadata.java b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksCustomMetadata.java index 55753f2827d2a..bde2d55ef2940 100644 --- a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksCustomMetadata.java +++ b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksCustomMetadata.java @@ -17,12 +17,11 @@ import org.elasticsearch.cluster.NamedDiff; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.VersionedNamedWriteable; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; +import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.core.Nullable; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ObjectParser; @@ -552,11 +551,8 @@ public static NamedDiff readDiffFrom(StreamInput in) throws IOE } @Override - public Iterator toXContentChunked(ToXContent.Params ignored) { - return Iterators.concat( - Iterators.single((builder, params) -> builder.field("last_allocation_id", lastAllocationId)), - ChunkedToXContentHelper.array("tasks", tasks.values().iterator()) - ); + public Iterator toXContentChunked(ToXContent.Params params) { + return ChunkedToXContent.builder(params).field("last_allocation_id", lastAllocationId).array("tasks", tasks.values().iterator()); } public static Builder builder() { diff --git a/server/src/main/java/org/elasticsearch/script/ScriptStats.java b/server/src/main/java/org/elasticsearch/script/ScriptStats.java index 9ac060ad063a0..f24052ef7e3a9 100644 --- a/server/src/main/java/org/elasticsearch/script/ScriptStats.java +++ b/server/src/main/java/org/elasticsearch/script/ScriptStats.java @@ -10,13 +10,11 @@ package org.elasticsearch.script; import org.elasticsearch.TransportVersions; -import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.xcontent.ChunkedToXContent; -import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.xcontent.ToXContent; import java.io.IOException; @@ -27,7 +25,6 @@ import java.util.Map; import java.util.Objects; -import static org.elasticsearch.common.collect.Iterators.single; import static org.elasticsearch.script.ScriptContextStats.Fields.COMPILATIONS_HISTORY; import static org.elasticsearch.script.ScriptStats.Fields.CACHE_EVICTIONS; import static org.elasticsearch.script.ScriptStats.Fields.COMPILATIONS; @@ -193,28 +190,19 @@ public ScriptCacheStats toScriptCacheStats() { } @Override - public Iterator toXContentChunked(ToXContent.Params outerParams) { - return Iterators.concat( - ChunkedToXContentHelper.startObject(SCRIPT_STATS), - ChunkedToXContentHelper.field(COMPILATIONS, compilations), - ChunkedToXContentHelper.field(CACHE_EVICTIONS, cacheEvictions), - ChunkedToXContentHelper.field(COMPILATION_LIMIT_TRIGGERED, compilationLimitTriggered), - single((builder, params) -> { - if (compilationsHistory != null && compilationsHistory.areTimingsEmpty() == false) { - builder.startObject(COMPILATIONS_HISTORY); - compilationsHistory.toXContent(builder, params); - builder.endObject(); - } - if (cacheEvictionsHistory != null && cacheEvictionsHistory.areTimingsEmpty() == false) { - builder.startObject(COMPILATIONS_HISTORY); - cacheEvictionsHistory.toXContent(builder, params); - builder.endObject(); - } - return builder; - }), - ChunkedToXContentHelper.array(CONTEXTS, contextStats.iterator()), - ChunkedToXContentHelper.endObject() - ); + public Iterator toXContentChunked(ToXContent.Params params) { + return ChunkedToXContent.builder(params).object(SCRIPT_STATS, ob -> { + ob.field(COMPILATIONS, compilations); + ob.field(CACHE_EVICTIONS, cacheEvictions); + ob.field(COMPILATION_LIMIT_TRIGGERED, compilationLimitTriggered); + if (compilationsHistory != null && compilationsHistory.areTimingsEmpty() == false) { + ob.xContentObject(COMPILATIONS_HISTORY, compilationsHistory); + } + if (cacheEvictionsHistory != null && cacheEvictionsHistory.areTimingsEmpty() == false) { + ob.xContentObject(COMPILATIONS_HISTORY, cacheEvictionsHistory); + } + ob.array(CONTEXTS, contextStats.iterator()); + }); } static final class Fields { diff --git a/server/src/main/java/org/elasticsearch/search/SearchHits.java b/server/src/main/java/org/elasticsearch/search/SearchHits.java index 896dd7f999949..fe133cbac335d 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchHits.java +++ b/server/src/main/java/org/elasticsearch/search/SearchHits.java @@ -18,7 +18,6 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.xcontent.ChunkedToXContent; -import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.RefCounted; import org.elasticsearch.core.SimpleRefCounted; @@ -285,26 +284,22 @@ public static final class Fields { @Override public Iterator toXContentChunked(ToXContent.Params params) { assert hasReferences(); - return Iterators.concat(Iterators.single((b, p) -> b.startObject(Fields.HITS)), Iterators.single((b, p) -> { - boolean totalHitAsInt = params.paramAsBoolean(RestSearchAction.TOTAL_HITS_AS_INT_PARAM, false); + return ChunkedToXContent.builder(params).object(Fields.HITS, ob -> { + boolean totalHitAsInt = ob.params().paramAsBoolean(RestSearchAction.TOTAL_HITS_AS_INT_PARAM, false); if (totalHitAsInt) { - long total = totalHits == null ? -1 : totalHits.value(); - b.field(Fields.TOTAL, total); + ob.field(Fields.TOTAL, totalHits == null ? -1 : totalHits.value()); } else if (totalHits != null) { - b.startObject(Fields.TOTAL); - b.field("value", totalHits.value()); - b.field("relation", totalHits.relation() == Relation.EQUAL_TO ? "eq" : "gte"); - b.endObject(); + ob.append((b, p) -> { + b.startObject(Fields.TOTAL); + b.field("value", totalHits.value()); + b.field("relation", totalHits.relation() == Relation.EQUAL_TO ? "eq" : "gte"); + return b.endObject(); + }); } - return b; - }), Iterators.single((b, p) -> { - if (Float.isNaN(maxScore)) { - b.nullField(Fields.MAX_SCORE); - } else { - b.field(Fields.MAX_SCORE, maxScore); - } - return b; - }), ChunkedToXContentHelper.array(Fields.HITS, Iterators.forArray(hits)), ChunkedToXContentHelper.endObject()); + + ob.field(Fields.MAX_SCORE, Float.isNaN(maxScore) ? null : maxScore); + ob.array(Fields.HITS, Iterators.forArray(hits)); + }); } @Override diff --git a/server/src/test/java/org/elasticsearch/health/HealthIndicatorResultTests.java b/server/src/test/java/org/elasticsearch/health/HealthIndicatorResultTests.java index 91936ea8b9092..cba0dacccd8bd 100644 --- a/server/src/test/java/org/elasticsearch/health/HealthIndicatorResultTests.java +++ b/server/src/test/java/org/elasticsearch/health/HealthIndicatorResultTests.java @@ -198,8 +198,8 @@ public void testChunkCount() { diagnosisList.add(diagnosis2); HealthIndicatorResult result = new HealthIndicatorResult(name, status, symptom, details, impacts, diagnosisList); - // -> each Diagnosis yields 5 chunks => 10 chunks from both diagnosis - // -> HealthIndicatorResult surrounds the diagnosis list by 2 chunks - AbstractChunkedSerializingTestCase.assertChunkCount(result, ignored -> 12); + // -> each Diagnosis yields 6 chunks => 12 chunks from both diagnosis + // -> HealthIndicatorResult surrounds the diagnosis list by 5 chunks + AbstractChunkedSerializingTestCase.assertChunkCount(result, ignored -> (6 * 2) + 5); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChatCompletionResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChatCompletionResults.java index 902c69cef558e..5c63a60103139 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChatCompletionResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChatCompletionResults.java @@ -10,7 +10,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; +import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.inference.InferenceResults; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.inference.TaskType; @@ -50,7 +50,7 @@ public ChatCompletionResults(StreamInput in) throws IOException { @Override public Iterator toXContentChunked(ToXContent.Params params) { - return ChunkedToXContentHelper.array(COMPLETION, results.iterator()); + return ChunkedToXContent.builder(params).array(COMPLETION, results.iterator()); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceChunkedSparseEmbeddingResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceChunkedSparseEmbeddingResults.java index 187b186fcd91d..c961050acefdb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceChunkedSparseEmbeddingResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceChunkedSparseEmbeddingResults.java @@ -10,7 +10,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; +import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.inference.ChunkedInferenceServiceResults; import org.elasticsearch.inference.InferenceResults; import org.elasticsearch.xcontent.ToXContent; @@ -79,7 +79,7 @@ public List getChunkedResults() { @Override public Iterator toXContentChunked(ToXContent.Params params) { - return ChunkedToXContentHelper.array(FIELD_NAME, chunkedResults.iterator()); + return ChunkedToXContent.builder(params).array(FIELD_NAME, chunkedResults.iterator()); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceChunkedTextEmbeddingByteResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceChunkedTextEmbeddingByteResults.java index cc245c40c51e3..6bd66664068d5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceChunkedTextEmbeddingByteResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceChunkedTextEmbeddingByteResults.java @@ -12,7 +12,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; +import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.inference.ChunkedInferenceServiceResults; import org.elasticsearch.inference.InferenceResults; import org.elasticsearch.xcontent.ToXContent; @@ -64,7 +64,7 @@ public InferenceChunkedTextEmbeddingByteResults(StreamInput in) throws IOExcepti @Override public Iterator toXContentChunked(ToXContent.Params params) { - return ChunkedToXContentHelper.array(FIELD_NAME, chunks.iterator()); + return ChunkedToXContent.builder(params).array(FIELD_NAME, chunks.iterator()); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceChunkedTextEmbeddingFloatResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceChunkedTextEmbeddingFloatResults.java index 4b4d77cd3f043..369f22a807913 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceChunkedTextEmbeddingFloatResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceChunkedTextEmbeddingFloatResults.java @@ -12,7 +12,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; +import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.inference.ChunkedInferenceServiceResults; import org.elasticsearch.inference.InferenceResults; import org.elasticsearch.xcontent.ToXContent; @@ -77,7 +77,7 @@ public static InferenceChunkedTextEmbeddingFloatResults ofMlResults(MlChunkedTex @Override public Iterator toXContentChunked(ToXContent.Params params) { // TODO add isTruncated flag - return ChunkedToXContentHelper.array(FIELD_NAME, chunks.iterator()); + return ChunkedToXContent.builder(params).array(FIELD_NAME, chunks.iterator()); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceTextEmbeddingByteResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceTextEmbeddingByteResults.java index 16dca7b04d526..c1be1ce265f6b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceTextEmbeddingByteResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceTextEmbeddingByteResults.java @@ -13,7 +13,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; +import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.inference.InferenceResults; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.xcontent.ToXContent; @@ -62,7 +62,7 @@ public int getFirstEmbeddingSize() { @Override public Iterator toXContentChunked(ToXContent.Params params) { - return ChunkedToXContentHelper.array(TEXT_EMBEDDING_BYTES, embeddings.iterator()); + return ChunkedToXContent.builder(params).array(TEXT_EMBEDDING_BYTES, embeddings.iterator()); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceTextEmbeddingFloatResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceTextEmbeddingFloatResults.java index 9f9bdfec7cfae..8551250348928 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceTextEmbeddingFloatResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceTextEmbeddingFloatResults.java @@ -14,7 +14,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; +import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.inference.InferenceResults; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.inference.TaskType; @@ -103,7 +103,7 @@ public int getFirstEmbeddingSize() { @Override public Iterator toXContentChunked(ToXContent.Params params) { - return ChunkedToXContentHelper.array(TEXT_EMBEDDING, embeddings.iterator()); + return ChunkedToXContent.builder(params).array(TEXT_EMBEDDING, embeddings.iterator()); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/RankedDocsResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/RankedDocsResults.java index e331cdbc59358..9c764babe33fc 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/RankedDocsResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/RankedDocsResults.java @@ -11,7 +11,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; +import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.InferenceResults; import org.elasticsearch.inference.InferenceServiceResults; @@ -174,7 +174,7 @@ public List getRankedDocs() { @Override public Iterator toXContentChunked(ToXContent.Params params) { - return ChunkedToXContentHelper.array(RERANK, rankedDocs.iterator()); + return ChunkedToXContent.builder(params).array(RERANK, rankedDocs.iterator()); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/SparseEmbeddingResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/SparseEmbeddingResults.java index dd8229c604ecb..318a292b47730 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/SparseEmbeddingResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/SparseEmbeddingResults.java @@ -12,7 +12,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; +import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.inference.InferenceResults; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.inference.TaskType; @@ -72,7 +72,7 @@ public static SparseEmbeddingResults of(List results @Override public Iterator toXContentChunked(ToXContent.Params params) { - return ChunkedToXContentHelper.array(SPARSE_EMBEDDING, embeddings.iterator()); + return ChunkedToXContent.builder(params).array(SPARSE_EMBEDDING, embeddings.iterator()); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverProfile.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverProfile.java index a685687e8bfc6..d98613f1817ab 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverProfile.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverProfile.java @@ -9,11 +9,10 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; +import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.common.xcontent.ChunkedToXContentObject; import org.elasticsearch.core.TimeValue; import org.elasticsearch.xcontent.ToXContent; @@ -168,24 +167,24 @@ public DriverSleeps sleeps() { @Override public Iterator toXContentChunked(ToXContent.Params params) { - return Iterators.concat(ChunkedToXContentHelper.startObject(), Iterators.single((b, p) -> { - b.timestampFieldsFromUnixEpochMillis("start_millis", "start", startMillis); - b.timestampFieldsFromUnixEpochMillis("stop_millis", "stop", stopMillis); - b.field("took_nanos", tookNanos); - if (b.humanReadable()) { - b.field("took_time", TimeValue.timeValueNanos(tookNanos)); - } - b.field("cpu_nanos", cpuNanos); - if (b.humanReadable()) { - b.field("cpu_time", TimeValue.timeValueNanos(cpuNanos)); - } - b.field("iterations", iterations); - return b; - }), - ChunkedToXContentHelper.array("operators", operators.iterator()), - Iterators.single((b, p) -> b.field("sleeps", sleeps)), - ChunkedToXContentHelper.endObject() - ); + return ChunkedToXContent.builder(params).object(ob -> { + ob.append((b, p) -> { + b.timestampFieldsFromUnixEpochMillis("start_millis", "start", startMillis); + b.timestampFieldsFromUnixEpochMillis("stop_millis", "stop", stopMillis); + b.field("took_nanos", tookNanos); + if (b.humanReadable()) { + b.field("took_time", TimeValue.timeValueNanos(tookNanos)); + } + b.field("cpu_nanos", cpuNanos); + if (b.humanReadable()) { + b.field("cpu_time", TimeValue.timeValueNanos(cpuNanos)); + } + b.field("iterations", iterations); + return b; + }); + ob.array("operators", operators.iterator()); + ob.field("sleeps", sleeps); + }); } @Override diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetFlamegraphResponse.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetFlamegraphResponse.java index 24f2f287f4cd6..5d32c39e350a5 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetFlamegraphResponse.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetFlamegraphResponse.java @@ -11,12 +11,13 @@ import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; +import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.common.xcontent.ChunkedToXContentObject; import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.xcontent.ToXContent; import java.io.IOException; +import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -176,89 +177,56 @@ public long getTotalSamples() { @UpdateForV9(owner = UpdateForV9.Owner.PROFILING) // change casing from Camel Case to Snake Case (requires updates in Kibana as well) @Override public Iterator toXContentChunked(ToXContent.Params params) { - return Iterators.concat( - ChunkedToXContentHelper.startObject(), - ChunkedToXContentHelper.array( - "Edges", - Iterators.flatMap( - edges.iterator(), - perNodeEdges -> Iterators.concat( - ChunkedToXContentHelper.startArray(), - Iterators.map(perNodeEdges.entrySet().iterator(), edge -> (b, p) -> b.value(edge.getValue())), - ChunkedToXContentHelper.endArray() - ) - ) - ), - ChunkedToXContentHelper.array("FileID", Iterators.map(fileIds.iterator(), e -> (b, p) -> b.value(e))), - ChunkedToXContentHelper.array("FrameType", Iterators.map(frameTypes.iterator(), e -> (b, p) -> b.value(e))), - ChunkedToXContentHelper.array("Inline", Iterators.map(inlineFrames.iterator(), e -> (b, p) -> b.value(e))), - ChunkedToXContentHelper.array("ExeFilename", Iterators.map(fileNames.iterator(), e -> (b, p) -> b.value(e))), - ChunkedToXContentHelper.array("AddressOrLine", Iterators.map(addressOrLines.iterator(), e -> (b, p) -> b.value(e))), - ChunkedToXContentHelper.array("FunctionName", Iterators.map(functionNames.iterator(), e -> (b, p) -> b.value(e))), - ChunkedToXContentHelper.singleChunk((b, p) -> { - b.startArray("FunctionOffset"); - for (int functionOffset : functionOffsets) { - b.value(functionOffset); - } - return b.endArray(); - }), - ChunkedToXContentHelper.array("SourceFilename", Iterators.map(sourceFileNames.iterator(), e -> (b, p) -> b.value(e))), - ChunkedToXContentHelper.singleChunk((b, p) -> { - b.startArray("SourceLine"); - for (int sourceLine : sourceLines) { - b.value(sourceLine); - } - return b.endArray(); - }), - ChunkedToXContentHelper.singleChunk((b, p) -> { - b.startArray("CountInclusive"); - for (long countInclusive : countInclusive) { - b.value(countInclusive); - } - return b.endArray(); - }), - ChunkedToXContentHelper.singleChunk((b, p) -> { - b.startArray("CountExclusive"); - for (long c : countExclusive) { - b.value(c); - } - return b.endArray(); - }), - ChunkedToXContentHelper.singleChunk((b, p) -> { - b.startArray("AnnualCO2TonsInclusive"); - for (double co2Tons : annualCO2TonsInclusive) { - // write as raw value - we need direct control over the output representation (here: limit to 4 decimal places) - b.rawValue(NumberUtils.doubleToString(co2Tons)); - } - return b.endArray(); - }), - ChunkedToXContentHelper.singleChunk((b, p) -> { - b.startArray("AnnualCO2TonsExclusive"); - for (double co2Tons : annualCO2TonsExclusive) { - b.rawValue(NumberUtils.doubleToString(co2Tons)); - } - return b.endArray(); - }), - ChunkedToXContentHelper.singleChunk((b, p) -> { - b.startArray("AnnualCostsUSDInclusive"); - for (double costs : annualCostsUSDInclusive) { - b.rawValue(NumberUtils.doubleToString(costs)); - } - return b.endArray(); - }), - ChunkedToXContentHelper.singleChunk((b, p) -> { - b.startArray("AnnualCostsUSDExclusive"); - for (double costs : annualCostsUSDExclusive) { - b.rawValue(NumberUtils.doubleToString(costs)); - } - return b.endArray(); - }), - Iterators.single((b, p) -> b.field("Size", size)), - Iterators.single((b, p) -> b.field("SamplingRate", samplingRate)), - Iterators.single((b, p) -> b.field("SelfCPU", selfCPU)), - Iterators.single((b, p) -> b.field("TotalCPU", totalCPU)), - Iterators.single((b, p) -> b.field("TotalSamples", totalSamples)), - ChunkedToXContentHelper.endObject() - ); + return ChunkedToXContent.builder(params).object(ob -> { + ob.array("Edges", edges.iterator(), (eb, e) -> eb.array(intValues(e.values()))); + ob.array("FileID", fileIds.toArray(String[]::new)); + ob.array("FrameType", intValues(frameTypes)); + ob.array("Inline", inlineFrames.iterator(), e -> (b, p) -> b.value(e)); + ob.array("ExeFilename", fileNames.toArray(String[]::new)); + ob.array("AddressOrLine", intValues(addressOrLines)); + ob.array("FunctionName", functionNames.toArray(String[]::new)); + ob.array("FunctionOffset", intValues(functionOffsets)); + ob.array("SourceFilename", sourceFileNames.toArray(String[]::new)); + ob.array("SourceLine", intValues(sourceLines)); + ob.array("CountInclusive", longValues(countInclusive)); + ob.array("CountExclusive", longValues(countExclusive)); + ob.array("AnnualCO2TonsInclusive", doubleValues(annualCO2TonsInclusive)); + ob.array("AnnualCO2TonsExclusive", doubleValues(annualCO2TonsExclusive)); + ob.array("AnnualCostsUSDInclusive", doubleValues(annualCostsUSDInclusive)); + ob.array("AnnualCostsUSDExclusive", doubleValues(annualCostsUSDExclusive)); + ob.field("Size", size); + ob.field("SamplingRate", samplingRate); + ob.field("SelfCPU", selfCPU); + ob.field("TotalCPU", totalCPU); + ob.field("TotalSamples", totalSamples); + }); + } + + private static Iterator intValues(Collection values) { + return Iterators.single((b, p) -> { + for (Integer i : values) { + b.value(i); + } + return b; + }); + } + + private static Iterator longValues(Collection values) { + return Iterators.single((b, p) -> { + for (Long l : values) { + b.value(l); + } + return b; + }); + } + + private static Iterator doubleValues(Collection values) { + return Iterators.single((b, p) -> { + for (Double d : values) { + // write as raw value - we need direct control over the output representation (here: limit to 4 decimal places) + b.rawValue(NumberUtils.doubleToString(d)); + } + return b; + }); } } From eae3a426e7e5074f153fd665829303930a5849d3 Mon Sep 17 00:00:00 2001 From: Imad Saddik <79410781+ImadSaddik@users.noreply.github.com> Date: Mon, 21 Oct 2024 16:22:29 +0100 Subject: [PATCH 259/449] Fixed hyperlink in search.asciidoc (#115156) --- docs/reference/search/search.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/search/search.asciidoc b/docs/reference/search/search.asciidoc index 501d645665a02..2ad407b4ae1e4 100644 --- a/docs/reference/search/search.asciidoc +++ b/docs/reference/search/search.asciidoc @@ -38,7 +38,7 @@ must have the `read` index privilege for the alias's data streams or indices. Allows you to execute a search query and get back search hits that match the query. You can provide search queries using the <> or <>. +query string parameter>> or <>. [[search-search-api-path-params]] ==== {api-path-parms-title} From 9062154462cabf79dd77e24204783868249cfb02 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Mon, 21 Oct 2024 12:22:06 -0400 Subject: [PATCH 260/449] ESQL: Fix `REVERSE` with backspace character (#115245) * ESQL: Fix `REVERSE` with backspace character If the text contains a backspace character aka `0x28` aka ctrl-H then we should use the slow reverse path. This is going to be quite rare but our test data is sure good at making rare, fun stuff. Closes #115228 Closes #115227 Closes #114372 --- docs/changelog/115245.yaml | 8 ++++++++ muted-tests.yml | 6 ------ .../expression/function/scalar/string/Reverse.java | 13 ++++++------- 3 files changed, 14 insertions(+), 13 deletions(-) create mode 100644 docs/changelog/115245.yaml diff --git a/docs/changelog/115245.yaml b/docs/changelog/115245.yaml new file mode 100644 index 0000000000000..294328567c3aa --- /dev/null +++ b/docs/changelog/115245.yaml @@ -0,0 +1,8 @@ +pr: 115245 +summary: "ESQL: Fix `REVERSE` with backspace character" +area: ES|QL +type: bug +issues: + - 114372 + - 115227 + - 115228 diff --git a/muted-tests.yml b/muted-tests.yml index 4dc177ef001fd..4f3ba742d16fa 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -346,12 +346,6 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/115129 - class: org.elasticsearch.xpack.esql.ccq.MultiClusterSpecIT issue: https://github.com/elastic/elasticsearch/issues/115135 -- class: org.elasticsearch.xpack.esql.expression.function.scalar.string.ReverseTests - method: testEvaluateInManyThreads {TestCase=} - issue: https://github.com/elastic/elasticsearch/issues/115227 -- class: org.elasticsearch.xpack.esql.expression.function.scalar.string.ReverseTests - method: testEvaluateInManyThreads {TestCase=} - issue: https://github.com/elastic/elasticsearch/issues/115228 - class: org.elasticsearch.xpack.test.rest.XPackRestIT method: test {p0=esql/60_usage/Basic ESQL usage output (telemetry)} issue: https://github.com/elastic/elasticsearch/issues/115231 diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Reverse.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Reverse.java index bf4e47d8d0de4..e161566838cd9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Reverse.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Reverse.java @@ -10,7 +10,6 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.core.expression.Expression; @@ -79,8 +78,6 @@ protected TypeResolution resolveType() { /** * Reverses a unicode string, keeping grapheme clusters together - * @param str - * @return */ public static String reverseStringWithUnicodeCharacters(String str) { BreakIterator boundary = BreakIterator.getCharacterInstance(Locale.ROOT); @@ -100,10 +97,12 @@ public static String reverseStringWithUnicodeCharacters(String str) { return reversed.toString(); } - private static boolean isOneByteUTF8(BytesRef ref) { + private static boolean reverseBytesIsReverseUnicode(BytesRef ref) { int end = ref.offset + ref.length; for (int i = ref.offset; i < end; i++) { - if (ref.bytes[i] < 0) { + if (ref.bytes[i] < 0 // Anything encoded in multibyte utf-8 + || ref.bytes[i] == 0x28 // Backspace + ) { return false; } } @@ -112,13 +111,13 @@ private static boolean isOneByteUTF8(BytesRef ref) { @Evaluator static BytesRef process(BytesRef val) { - if (isOneByteUTF8(val)) { + if (reverseBytesIsReverseUnicode(val)) { // this is the fast path. we know we can just reverse the bytes. BytesRef reversed = BytesRef.deepCopyOf(val); reverseArray(reversed.bytes, reversed.offset, reversed.length); return reversed; } - return BytesRefs.toBytesRef(reverseStringWithUnicodeCharacters(val.utf8ToString())); + return new BytesRef(reverseStringWithUnicodeCharacters(val.utf8ToString())); } @Override From f5ceafff4f3e987a128fd2ff2edbd3e0443a3c5b Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Mon, 21 Oct 2024 18:30:23 +0200 Subject: [PATCH 261/449] [Build] Fix checkstyle exclusions on windows (#115185) --- .../gradle/util/PlatformUtils.java | 23 +++++++++++++++++++ x-pack/plugin/esql/build.gradle | 7 +++--- x-pack/plugin/kql/build.gradle | 3 ++- 3 files changed, 29 insertions(+), 4 deletions(-) create mode 100644 build-tools/src/main/java/org/elasticsearch/gradle/util/PlatformUtils.java diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/util/PlatformUtils.java b/build-tools/src/main/java/org/elasticsearch/gradle/util/PlatformUtils.java new file mode 100644 index 0000000000000..2f093a19032c8 --- /dev/null +++ b/build-tools/src/main/java/org/elasticsearch/gradle/util/PlatformUtils.java @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.gradle.util; + +import java.util.stream.Collectors; + +public class PlatformUtils { + + public static String normalize(String input) { + return input.lines() + .map(it -> it.replace('\\', '/')) + .map(it -> it.replaceAll("\\d+\\.\\d\\ds", "0.00s")) + .map(it -> it.replace("file:/./", "file:./")) + .collect(Collectors.joining("\n")); + } +} diff --git a/x-pack/plugin/esql/build.gradle b/x-pack/plugin/esql/build.gradle index c8d704cd2b8bf..766d0c0f13892 100644 --- a/x-pack/plugin/esql/build.gradle +++ b/x-pack/plugin/esql/build.gradle @@ -1,6 +1,7 @@ import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.precommit.CheckForbiddenApisTask; import org.elasticsearch.gradle.internal.util.SourceDirectoryCommandLineArgumentProvider; +import static org.elasticsearch.gradle.util.PlatformUtils.normalize apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' @@ -56,7 +57,7 @@ def generatedSourceDir = projectDirectory.dir("src/main/generated") tasks.named("compileJava").configure { options.compilerArgumentProviders.add(new SourceDirectoryCommandLineArgumentProvider(generatedSourceDir)) // IntelliJ sticks generated files here and we can't stop it.... - exclude { it.file.toString().contains("src/main/generated-src/generated") } + exclude { normalize(it.file.toString()).contains("src/main/generated-src/generated") } } interface Injected { @@ -262,8 +263,8 @@ tasks.register("regen") { tasks.named("spotlessJava") { dependsOn stringTemplates } tasks.named('checkstyleMain').configure { excludes = [ "**/*.java.st" ] - exclude { it.file.toString().contains("src/main/generated-src/generated") } - exclude { it.file.toString().contains("src/main/generated") } + exclude { normalize(it.file.toString()).contains("src/main/generated-src/generated") } + exclude { normalize(it.file.toString()).contains("src/main/generated") } } def prop(Type, type, TYPE, BYTES, Array) { diff --git a/x-pack/plugin/kql/build.gradle b/x-pack/plugin/kql/build.gradle index d1c949834b021..198099329c7c0 100644 --- a/x-pack/plugin/kql/build.gradle +++ b/x-pack/plugin/kql/build.gradle @@ -1,4 +1,5 @@ import org.elasticsearch.gradle.internal.info.BuildParams +import static org.elasticsearch.gradle.util.PlatformUtils.normalize apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' @@ -54,7 +55,7 @@ pluginManager.withPlugin('com.diffplug.spotless') { } } tasks.named('checkstyleMain').configure { - exclude { it.file.toString().contains("src/main/java/org/elasticsearch/xpack/kql/parser/KqlBase") } + exclude { normalize(it.file.toString()).contains("src/main/java/org/elasticsearch/xpack/kql/parser/KqlBase") } } tasks.register("cleanGenerated", Delete) { From 6b6c3670c2da3d8e8f886ae9c5ee0836ef16eb68 Mon Sep 17 00:00:00 2001 From: Sylvain Wallez Date: Mon, 21 Oct 2024 19:48:34 +0200 Subject: [PATCH 262/449] ESQL: Add support for multivalue fields in Arrow output (#114774) --- docs/changelog/114774.yaml | 5 + x-pack/plugin/esql/arrow/build.gradle | 1 + .../xpack/esql/arrow/ArrowResponse.java | 74 +++-- .../xpack/esql/arrow/BlockConverter.java | 214 ++++++++++----- .../xpack/esql/arrow/ArrowResponseTests.java | 252 +++++++++++++++--- 5 files changed, 431 insertions(+), 115 deletions(-) create mode 100644 docs/changelog/114774.yaml diff --git a/docs/changelog/114774.yaml b/docs/changelog/114774.yaml new file mode 100644 index 0000000000000..1becfe427fda0 --- /dev/null +++ b/docs/changelog/114774.yaml @@ -0,0 +1,5 @@ +pr: 114774 +summary: "ESQL: Add support for multivalue fields in Arrow output" +area: ES|QL +type: enhancement +issues: [] diff --git a/x-pack/plugin/esql/arrow/build.gradle b/x-pack/plugin/esql/arrow/build.gradle index 20c877a12bf0d..fac0bd0a77452 100644 --- a/x-pack/plugin/esql/arrow/build.gradle +++ b/x-pack/plugin/esql/arrow/build.gradle @@ -26,6 +26,7 @@ dependencies { testImplementation project(':test:framework') testImplementation('org.apache.arrow:arrow-memory-unsafe:16.1.0') + testImplementation("com.fasterxml.jackson.datatype:jackson-datatype-jsr310:${versions.jackson}") } tasks.named("dependencyLicenses").configure { diff --git a/x-pack/plugin/esql/arrow/src/main/java/org/elasticsearch/xpack/esql/arrow/ArrowResponse.java b/x-pack/plugin/esql/arrow/src/main/java/org/elasticsearch/xpack/esql/arrow/ArrowResponse.java index 7a8328060a390..208d3308d508b 100644 --- a/x-pack/plugin/esql/arrow/src/main/java/org/elasticsearch/xpack/esql/arrow/ArrowResponse.java +++ b/x-pack/plugin/esql/arrow/src/main/java/org/elasticsearch/xpack/esql/arrow/ArrowResponse.java @@ -17,6 +17,7 @@ import org.apache.arrow.vector.ipc.message.MessageSerializer; import org.apache.arrow.vector.types.Types.MinorType; import org.apache.arrow.vector.types.pojo.Field; +import org.apache.arrow.vector.types.pojo.FieldType; import org.apache.arrow.vector.types.pojo.Schema; import org.apache.lucene.util.BytesRef; import org.elasticsearch.action.ActionListener; @@ -44,6 +45,7 @@ public class ArrowResponse implements ChunkedRestResponseBodyPart, Releasable { public static class Column { private final BlockConverter converter; private final String name; + private boolean multivalued; public Column(String esqlType, String name) { this.converter = ESQL_CONVERTERS.get(esqlType); @@ -61,20 +63,24 @@ public Column(String esqlType, String name) { public ArrowResponse(List columns, List pages) { this.columns = columns; + // Find multivalued columns + int colSize = columns.size(); + for (int col = 0; col < colSize; col++) { + for (Page page : pages) { + if (page.getBlock(col).mayHaveMultivaluedFields()) { + columns.get(col).multivalued = true; + break; + } + } + } + currentSegment = new SchemaResponse(this); List rest = new ArrayList<>(pages.size()); - for (int p = 0; p < pages.size(); p++) { - var page = pages.get(p); + + for (Page page : pages) { rest.add(new PageResponse(this, page)); - // Multivalued fields are not supported yet. - for (int b = 0; b < page.getBlockCount(); b++) { - if (page.getBlock(b).mayHaveMultivaluedFields()) { - throw new IllegalArgumentException( - "ES|QL response field [" + columns.get(b).name + "] is multi-valued. This isn't supported yet by the Arrow format" - ); - } - } } + rest.add(new EndResponse(this)); segments = rest.iterator(); } @@ -185,6 +191,9 @@ public void close() {} * @see IPC Streaming Format */ private static class SchemaResponse extends ResponseSegment { + + private static final FieldType LIST_FIELD_TYPE = FieldType.nullable(MinorType.LIST.getType()); + private boolean done = false; SchemaResponse(ArrowResponse response) { @@ -204,7 +213,20 @@ protected void encodeChunk(int sizeHint, RecyclerBytesStreamOutput out) throws I } private Schema arrowSchema() { - return new Schema(response.columns.stream().map(c -> new Field(c.name, c.converter.arrowFieldType(), List.of())).toList()); + return new Schema(response.columns.stream().map(c -> { + var fieldType = c.converter.arrowFieldType(); + if (c.multivalued) { + // A variable-sized list is a vector of offsets and a child vector of values + // See https://arrow.apache.org/docs/format/Columnar.html#variable-size-list-layout + var listType = new FieldType(true, LIST_FIELD_TYPE.getType(), null, fieldType.getMetadata()); + // Value vector is non-nullable (ES|QL multivalues cannot contain nulls). + var valueType = new FieldType(false, fieldType.getType(), fieldType.getDictionary(), null); + // The nested vector is named "$data$", following what the Arrow/Java library does. + return new Field(c.name, listType, List.of(new Field("$data$", valueType, null))); + } else { + return new Field(c.name, fieldType, null); + } + }).toList()); } } @@ -257,7 +279,14 @@ protected void encodeChunk(int sizeHint, RecyclerBytesStreamOutput out) throws I @Override public void write(ArrowBuf buffer) throws IOException { - extraPosition += bufWriters.get(bufIdx++).write(out); + var len = bufWriters.get(bufIdx++).write(out); + // Consistency check + if (len != buffer.writerIndex()) { + throw new IllegalStateException( + "Buffer [" + (bufIdx - 1) + "]: wrote [" + len + "] bytes, but expected [" + buffer.writerIndex() + "]" + ); + } + extraPosition += len; } @Override @@ -277,11 +306,26 @@ public long align() throws IOException { // Create Arrow buffers for each of the blocks in this page for (int b = 0; b < page.getBlockCount(); b++) { - var converter = response.columns.get(b).converter; + var column = response.columns.get(b); + var converter = column.converter; Block block = page.getBlock(b); - nodes.add(new ArrowFieldNode(block.getPositionCount(), converter.nullValuesCount(block))); - converter.convert(block, bufs, bufWriters); + if (column.multivalued) { + // List node. + nodes.add(new ArrowFieldNode(block.getPositionCount(), converter.nullValuesCount(block))); + // Value vector, does not contain nulls. + nodes.add(new ArrowFieldNode(BlockConverter.valueCount(block), 0)); + } else { + nodes.add(new ArrowFieldNode(block.getPositionCount(), converter.nullValuesCount(block))); + } + converter.convert(block, column.multivalued, bufs, bufWriters); + } + + // Consistency check + if (bufs.size() != bufWriters.size()) { + throw new IllegalStateException( + "Inconsistent Arrow buffers: [" + bufs.size() + "] buffers and [" + bufWriters.size() + "] writers" + ); } // Create the batch and serialize it diff --git a/x-pack/plugin/esql/arrow/src/main/java/org/elasticsearch/xpack/esql/arrow/BlockConverter.java b/x-pack/plugin/esql/arrow/src/main/java/org/elasticsearch/xpack/esql/arrow/BlockConverter.java index 0a65792ab8e13..2a305cfdbc503 100644 --- a/x-pack/plugin/esql/arrow/src/main/java/org/elasticsearch/xpack/esql/arrow/BlockConverter.java +++ b/x-pack/plugin/esql/arrow/src/main/java/org/elasticsearch/xpack/esql/arrow/BlockConverter.java @@ -71,10 +71,11 @@ public interface BufWriter { /** * Convert a block into Arrow buffers. * @param block the ESQL block + * @param multivalued is this column multivalued? This block may not, but some blocks in that column are. * @param bufs arrow buffers, used to track sizes * @param bufWriters buffer writers, that will do the actual work of writing the data */ - public abstract void convert(Block block, List bufs, List bufWriters); + public abstract void convert(Block block, boolean multivalued, List bufs, List bufWriters); /** * Conversion of Double blocks @@ -86,28 +87,31 @@ public AsFloat64(String esqlType) { } @Override - public void convert(Block b, List bufs, List bufWriters) { + public void convert(Block b, boolean multivalued, List bufs, List bufWriters) { DoubleBlock block = (DoubleBlock) b; - accumulateVectorValidity(bufs, bufWriters, block); + if (multivalued) { + addListOffsets(bufs, bufWriters, block); + } + accumulateVectorValidity(bufs, bufWriters, block, multivalued); - bufs.add(dummyArrowBuf(vectorLength(block))); + bufs.add(dummyArrowBuf(vectorByteSize(block))); bufWriters.add(out -> { if (block.areAllValuesNull()) { - return BlockConverter.writeZeroes(out, vectorLength(block)); + return BlockConverter.writeZeroes(out, vectorByteSize(block)); } // TODO could we "just" get the memory of the array and dump it? - int count = block.getPositionCount(); + int count = BlockConverter.valueCount(block); for (int i = 0; i < count; i++) { out.writeDoubleLE(block.getDouble(i)); } - return vectorLength(block); + return (long) count * Double.BYTES; }); } - private static int vectorLength(DoubleBlock b) { - return Double.BYTES * b.getPositionCount(); + private static int vectorByteSize(DoubleBlock b) { + return Double.BYTES * BlockConverter.valueCount(b); } } @@ -121,28 +125,31 @@ public AsInt32(String esqlType) { } @Override - public void convert(Block b, List bufs, List bufWriters) { + public void convert(Block b, boolean multivalued, List bufs, List bufWriters) { IntBlock block = (IntBlock) b; - accumulateVectorValidity(bufs, bufWriters, block); + if (multivalued) { + addListOffsets(bufs, bufWriters, block); + } + accumulateVectorValidity(bufs, bufWriters, block, multivalued); - bufs.add(dummyArrowBuf(vectorLength(block))); + bufs.add(dummyArrowBuf(vectorByteSize(block))); bufWriters.add(out -> { if (block.areAllValuesNull()) { - return BlockConverter.writeZeroes(out, vectorLength(block)); + return BlockConverter.writeZeroes(out, vectorByteSize(block)); } // TODO could we "just" get the memory of the array and dump it? - int count = block.getPositionCount(); + int count = BlockConverter.valueCount(block); for (int i = 0; i < count; i++) { out.writeIntLE(block.getInt(i)); } - return vectorLength(block); + return (long) count * Integer.BYTES; }); } - private static int vectorLength(IntBlock b) { - return Integer.BYTES * b.getPositionCount(); + private static int vectorByteSize(Block b) { + return Integer.BYTES * BlockConverter.valueCount(b); } } @@ -159,27 +166,31 @@ protected AsInt64(String esqlType, Types.MinorType minorType) { } @Override - public void convert(Block b, List bufs, List bufWriters) { + public void convert(Block b, boolean multivalued, List bufs, List bufWriters) { LongBlock block = (LongBlock) b; - accumulateVectorValidity(bufs, bufWriters, block); - bufs.add(dummyArrowBuf(vectorLength(block))); + if (multivalued) { + addListOffsets(bufs, bufWriters, block); + } + accumulateVectorValidity(bufs, bufWriters, block, multivalued); + + bufs.add(dummyArrowBuf(vectorByteSize(block))); bufWriters.add(out -> { if (block.areAllValuesNull()) { - return BlockConverter.writeZeroes(out, vectorLength(block)); + return BlockConverter.writeZeroes(out, vectorByteSize(block)); } // TODO could we "just" get the memory of the array and dump it? - int count = block.getPositionCount(); + int count = BlockConverter.valueCount(block); for (int i = 0; i < count; i++) { out.writeLongLE(block.getLong(i)); } - return vectorLength(block); + return (long) count * Long.BYTES; }); } - private static int vectorLength(LongBlock b) { - return Long.BYTES * b.getPositionCount(); + private static int vectorByteSize(LongBlock b) { + return Long.BYTES * BlockConverter.valueCount(b); } } @@ -192,13 +203,17 @@ public AsBoolean(String esqlType) { } @Override - public void convert(Block b, List bufs, List bufWriters) { + public void convert(Block b, boolean multivalued, List bufs, List bufWriters) { BooleanBlock block = (BooleanBlock) b; - accumulateVectorValidity(bufs, bufWriters, block); - bufs.add(dummyArrowBuf(vectorLength(block))); + if (multivalued) { + addListOffsets(bufs, bufWriters, block); + } + accumulateVectorValidity(bufs, bufWriters, block, multivalued); + + bufs.add(dummyArrowBuf(vectorByteSize(block))); bufWriters.add(out -> { - int count = block.getPositionCount(); + int count = BlockConverter.valueCount(block); BitSet bits = new BitSet(); // Only set the bits that are true, writeBitSet will take @@ -215,8 +230,8 @@ public void convert(Block b, List bufs, List bufWriters) { }); } - private static int vectorLength(BooleanBlock b) { - return BlockConverter.bitSetLength(b.getPositionCount()); + private static int vectorByteSize(BooleanBlock b) { + return BlockConverter.bitSetLength(BlockConverter.valueCount(b)); } } @@ -230,27 +245,30 @@ public BytesRefConverter(String esqlType, Types.MinorType minorType) { } @Override - public void convert(Block b, List bufs, List bufWriters) { + public void convert(Block b, boolean multivalued, List bufs, List bufWriters) { BytesRefBlock block = (BytesRefBlock) b; - BlockConverter.accumulateVectorValidity(bufs, bufWriters, block); + if (multivalued) { + addListOffsets(bufs, bufWriters, block); + } + accumulateVectorValidity(bufs, bufWriters, block, multivalued); // Offsets vector - bufs.add(dummyArrowBuf(offsetVectorLength(block))); + bufs.add(dummyArrowBuf(offsetvectorByteSize(block))); bufWriters.add(out -> { if (block.areAllValuesNull()) { - var count = block.getPositionCount() + 1; + var count = valueCount(block) + 1; for (int i = 0; i < count; i++) { out.writeIntLE(0); } - return offsetVectorLength(block); + return offsetvectorByteSize(block); } // TODO could we "just" get the memory of the array and dump it? BytesRef scratch = new BytesRef(); int offset = 0; - for (int i = 0; i < block.getPositionCount(); i++) { + for (int i = 0; i < valueCount(block); i++) { out.writeIntLE(offset); // FIXME: add a ByteRefsVector.getLength(position): there are some cases // where getBytesRef will allocate, which isn't needed here. @@ -259,11 +277,11 @@ public void convert(Block b, List bufs, List bufWriters) { offset += v.length; } out.writeIntLE(offset); - return offsetVectorLength(block); + return offsetvectorByteSize(block); }); // Data vector - bufs.add(BlockConverter.dummyArrowBuf(dataVectorLength(block))); + bufs.add(BlockConverter.dummyArrowBuf(dataVectorByteSize(block))); bufWriters.add(out -> { if (block.areAllValuesNull()) { @@ -273,7 +291,7 @@ public void convert(Block b, List bufs, List bufWriters) { // TODO could we "just" get the memory of the array and dump it? BytesRef scratch = new BytesRef(); long length = 0; - for (int i = 0; i < block.getPositionCount(); i++) { + for (int i = 0; i < valueCount(block); i++) { BytesRef v = block.getBytesRef(i, scratch); out.write(v.bytes, v.offset, v.length); @@ -283,11 +301,11 @@ public void convert(Block b, List bufs, List bufWriters) { }); } - private static int offsetVectorLength(BytesRefBlock block) { - return Integer.BYTES * (block.getPositionCount() + 1); + private static int offsetvectorByteSize(BytesRefBlock block) { + return Integer.BYTES * (valueCount(block) + 1); } - private int dataVectorLength(BytesRefBlock block) { + private int dataVectorByteSize(BytesRefBlock block) { if (block.areAllValuesNull()) { return 0; } @@ -296,7 +314,7 @@ private int dataVectorLength(BytesRefBlock block) { int length = 0; BytesRef scratch = new BytesRef(); - for (int i = 0; i < block.getPositionCount(); i++) { + for (int i = 0; i < valueCount(block); i++) { BytesRef v = block.getBytesRef(i, scratch); length += v.length; } @@ -323,10 +341,10 @@ public TransformedBytesRef(String esqlType, Types.MinorType minorType, BiFunctio } @Override - public void convert(Block b, List bufs, List bufWriters) { + public void convert(Block b, boolean multivalued, List bufs, List bufWriters) { BytesRefBlock block = (BytesRefBlock) b; try (BytesRefBlock transformed = transformValues(block)) { - super.convert(transformed, bufs, bufWriters); + super.convert(transformed, multivalued, bufs, bufWriters); } } @@ -336,20 +354,40 @@ public void convert(Block b, List bufs, List bufWriters) { private BytesRefBlock transformValues(BytesRefBlock block) { try (BytesRefBlock.Builder builder = block.blockFactory().newBytesRefBlockBuilder(block.getPositionCount())) { BytesRef scratch = new BytesRef(); - for (int i = 0; i < block.getPositionCount(); i++) { - if (block.isNull(i)) { - builder.appendNull(); - } else { - BytesRef bytes = block.getBytesRef(i, scratch); - if (bytes.length != 0) { - bytes = valueConverter.apply(bytes, scratch); + if (block.mayHaveMultivaluedFields() == false) { + for (int pos = 0; pos < valueCount(block); pos++) { + if (block.isNull(pos)) { + builder.appendNull(); + } else { + convertAndAppend(builder, block, pos, scratch); + } + } + } else { + for (int pos = 0; pos < block.getPositionCount(); pos++) { + if (block.isNull(pos)) { + builder.appendNull(); + } else { + builder.beginPositionEntry(); + int startPos = block.getFirstValueIndex(pos); + int lastPos = block.getFirstValueIndex(pos + 1); + for (int valuePos = startPos; valuePos < lastPos; valuePos++) { + convertAndAppend(builder, block, valuePos, scratch); + } + builder.endPositionEntry(); } - builder.appendBytesRef(bytes); } } return builder.build(); } } + + private void convertAndAppend(BytesRefBlock.Builder builder, BytesRefBlock block, int position, BytesRef scratch) { + BytesRef bytes = block.getBytesRef(position, scratch); + if (bytes.length != 0) { + bytes = valueConverter.apply(bytes, scratch); + } + builder.appendBytesRef(bytes); + } } public static class AsVarChar extends BytesRefConverter { @@ -370,7 +408,7 @@ public AsNull(String esqlType) { } @Override - public void convert(Block block, List bufs, List bufWriters) { + public void convert(Block block, boolean multivalued, List bufs, List bufWriters) { // Null vector in arrow has no associated buffers // See https://arrow.apache.org/docs/format/Columnar.html#null-layout } @@ -386,15 +424,38 @@ private static int bitSetLength(int totalValues) { return (totalValues + 7) / 8; } - private static void accumulateVectorValidity(List bufs, List bufWriters, Block b) { - bufs.add(dummyArrowBuf(bitSetLength(b.getPositionCount()))); + /** + * Get the value count for a block. For single-valued blocks this is the same as the position count. + * For multivalued blocks, this is the flattened number of items. + */ + static int valueCount(Block block) { + int result = block.getFirstValueIndex(block.getPositionCount()); + + // firstValueIndex is always zero for all-null blocks. + if (result == 0 && block.areAllValuesNull()) { + result = block.getPositionCount(); + } + + return result; + } + + private static void accumulateVectorValidity(List bufs, List bufWriters, Block b, boolean multivalued) { + // If that block is in a multivalued-column, validities are output in the parent Arrow List buffer (values themselves + // do not contain nulls per docvalues limitations). + if (multivalued || b.mayHaveNulls() == false) { + // Arrow IPC allows a compact form for "all true" validities using an empty buffer. + bufs.add(dummyArrowBuf(0)); + bufWriters.add(w -> 0); + return; + } + + int valueCount = b.getPositionCount(); + bufs.add(dummyArrowBuf(bitSetLength(valueCount))); bufWriters.add(out -> { - if (b.mayHaveNulls() == false) { - return writeAllTrueValidity(out, b.getPositionCount()); - } else if (b.areAllValuesNull()) { - return writeAllFalseValidity(out, b.getPositionCount()); + if (b.areAllValuesNull()) { + return writeAllFalseValidity(out, valueCount); } else { - return writeValidities(out, b); + return writeValidities(out, b, valueCount); } }); } @@ -420,10 +481,10 @@ private static long writeAllFalseValidity(RecyclerBytesStreamOutput out, int val return count; } - private static long writeValidities(RecyclerBytesStreamOutput out, Block block) { - int valueCount = block.getPositionCount(); + private static long writeValidities(RecyclerBytesStreamOutput out, Block block, int valueCount) { BitSet bits = new BitSet(valueCount); for (int i = 0; i < block.getPositionCount(); i++) { + // isNull is value indices, not multi-value positions if (block.isNull(i) == false) { bits.set(i); } @@ -449,4 +510,29 @@ private static long writeZeroes(RecyclerBytesStreamOutput out, int byteCount) { } return byteCount; } + + private static void addListOffsets(List bufs, List bufWriters, Block block) { + // Add validity buffer + accumulateVectorValidity(bufs, bufWriters, block, false); + + // Add offsets buffer + int bufferLen = Integer.BYTES * (block.getPositionCount() + 1); + + bufs.add(dummyArrowBuf(bufferLen)); + bufWriters.add(out -> { + if (block.mayHaveMultivaluedFields()) { + // '<=' is intentional to write the end position of the last item + for (int i = 0; i <= block.getPositionCount(); i++) { + // TODO could we get the block's firstValueIndexes and dump it? + out.writeIntLE(block.getFirstValueIndex(i)); + } + } else { + for (int i = 0; i <= block.getPositionCount(); i++) { + out.writeIntLE(i); + } + } + + return bufferLen; + }); + } } diff --git a/x-pack/plugin/esql/arrow/src/test/java/org/elasticsearch/xpack/esql/arrow/ArrowResponseTests.java b/x-pack/plugin/esql/arrow/src/test/java/org/elasticsearch/xpack/esql/arrow/ArrowResponseTests.java index cf49b37db2805..b187e49554f8b 100644 --- a/x-pack/plugin/esql/arrow/src/test/java/org/elasticsearch/xpack/esql/arrow/ArrowResponseTests.java +++ b/x-pack/plugin/esql/arrow/src/test/java/org/elasticsearch/xpack/esql/arrow/ArrowResponseTests.java @@ -19,7 +19,10 @@ import org.apache.arrow.vector.VarBinaryVector; import org.apache.arrow.vector.VarCharVector; import org.apache.arrow.vector.VectorSchemaRoot; +import org.apache.arrow.vector.complex.ListVector; +import org.apache.arrow.vector.complex.impl.UnionListWriter; import org.apache.arrow.vector.ipc.ArrowStreamReader; +import org.apache.arrow.vector.ipc.ArrowStreamWriter; import org.apache.arrow.vector.util.VectorSchemaRootAppender; import org.apache.lucene.document.InetAddressPoint; import org.apache.lucene.util.BytesRef; @@ -34,7 +37,6 @@ import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.IntVectorBlock; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.test.ESTestCase; @@ -42,6 +44,8 @@ import org.elasticsearch.xpack.versionfield.Version; import org.junit.AfterClass; +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.ArrayList; @@ -75,6 +79,7 @@ public static void afterClass() throws Exception { // Value creation, getters for ESQL and Arrow static final ValueType INTEGER_VALUES = new ValueTypeImpl( + "integer", factory -> factory.newIntBlockBuilder(0), block -> block.appendInt(randomInt()), (block, i, scratch) -> block.getInt(i), @@ -82,6 +87,7 @@ public static void afterClass() throws Exception { ); static final ValueType LONG_VALUES = new ValueTypeImpl( + "long", factory -> factory.newLongBlockBuilder(0), block -> block.appendLong(randomLong()), (block, i, scratch) -> block.getLong(i), @@ -89,6 +95,7 @@ public static void afterClass() throws Exception { ); static final ValueType ULONG_VALUES = new ValueTypeImpl( + "ulong", factory -> factory.newLongBlockBuilder(0), block -> block.appendLong(randomLong()), (block, i, scratch) -> block.getLong(i), @@ -96,6 +103,7 @@ public static void afterClass() throws Exception { ); static final ValueType DATE_VALUES = new ValueTypeImpl( + "date", factory -> factory.newLongBlockBuilder(0), block -> block.appendLong(randomLong()), (block, i, scratch) -> block.getLong(i), @@ -103,6 +111,7 @@ public static void afterClass() throws Exception { ); static final ValueType DOUBLE_VALUES = new ValueTypeImpl( + "double", factory -> factory.newDoubleBlockBuilder(0), block -> block.appendDouble(randomDouble()), (block, i, scratch) -> block.getDouble(i), @@ -110,6 +119,7 @@ public static void afterClass() throws Exception { ); static final ValueType BOOLEAN_VALUES = new ValueTypeImpl( + "boolean", factory -> factory.newBooleanBlockBuilder(0), block -> block.appendBoolean(randomBoolean()), (b, i, s) -> b.getBoolean(i), @@ -117,21 +127,23 @@ public static void afterClass() throws Exception { ); static final ValueType TEXT_VALUES = new ValueTypeImpl( + "text", factory -> factory.newBytesRefBlockBuilder(0), - block -> block.appendBytesRef(new BytesRef("🚀" + randomAlphaOfLengthBetween(1, 20))), + block -> block.appendBytesRef(new BytesRef(randomUnicodeOfLengthBetween(1, 20))), (b, i, s) -> b.getBytesRef(i, s).utf8ToString(), (v, i) -> new String(v.get(i), StandardCharsets.UTF_8) ); static final ValueType SOURCE_VALUES = new ValueTypeImpl( + "source", factory -> factory.newBytesRefBlockBuilder(0), - // Use a constant value, conversion is tested separately - block -> block.appendBytesRef(new BytesRef("{\"foo\": 42}")), + block -> block.appendBytesRef(new BytesRef("{\"foo\": " + randomIntBetween(-42, 42) + "}")), (b, i, s) -> b.getBytesRef(i, s).utf8ToString(), (v, i) -> new String(v.get(i), StandardCharsets.UTF_8) ); static final ValueType IP_VALUES = new ValueTypeImpl( + "ip", factory -> factory.newBytesRefBlockBuilder(0), block -> { byte[] addr = InetAddressPoint.encode(randomIp(randomBoolean())); @@ -143,6 +155,7 @@ public static void afterClass() throws Exception { ); static final ValueType BINARY_VALUES = new ValueTypeImpl( + "binary", factory -> factory.newBytesRefBlockBuilder(0), block -> block.appendBytesRef(new BytesRef(randomByteArrayOfLength(randomIntBetween(1, 100)))), BytesRefBlock::getBytesRef, @@ -150,6 +163,7 @@ public static void afterClass() throws Exception { ); static final ValueType VERSION_VALUES = new ValueTypeImpl( + "version", factory -> factory.newBytesRefBlockBuilder(0), block -> block.appendBytesRef(new Version(between(0, 100) + "." + between(0, 100) + "." + between(0, 100)).toBytesRef()), (b, i, s) -> new Version(b.getBytesRef(i, s)).toString(), @@ -157,6 +171,7 @@ public static void afterClass() throws Exception { ); static final ValueType NULL_VALUES = new ValueTypeImpl( + "null", factory -> factory.newBytesRefBlockBuilder(0), Block.Builder::appendNull, (b, i, s) -> b.isNull(i) ? null : "non-null in block", @@ -201,9 +216,10 @@ public void testTestHarness() { TestBlock emptyBlock = TestBlock.create(BLOCK_FACTORY, testColumn, Density.Empty, 7); // Test that density works as expected - assertTrue(denseBlock.block instanceof IntVectorBlock); - assertEquals("IntArrayBlock", sparseBlock.block.getClass().getSimpleName()); // non-public class - assertEquals("ConstantNullBlock", emptyBlock.block.getClass().getSimpleName()); + assertFalse(denseBlock.block.mayHaveNulls()); + assertTrue(sparseBlock.block.mayHaveNulls()); + assertFalse(sparseBlock.block.areAllValuesNull()); + assertTrue(emptyBlock.block.areAllValuesNull()); // Test that values iterator scans all pages List pages = Stream.of(denseBlock, sparseBlock, emptyBlock).map(b -> new TestPage(List.of(b))).toList(); @@ -229,7 +245,7 @@ public void testTestHarness() { */ public void testSingleColumn() throws IOException { for (var type : VALUE_TYPES.keySet()) { - TestColumn testColumn = new TestColumn("foo", type, VALUE_TYPES.get(type)); + TestColumn testColumn = new TestColumn("foo", type, VALUE_TYPES.get(type), false); List pages = new ArrayList<>(); for (var density : Density.values()) { @@ -248,7 +264,7 @@ public void testSingleBlock() throws IOException { String type = "text"; Density density = Density.Dense; - TestColumn testColumn = new TestColumn("foo", type, VALUE_TYPES.get(type)); + TestColumn testColumn = new TestColumn("foo", type, VALUE_TYPES.get(type), false); List pages = new ArrayList<>(); TestBlock testBlock = TestBlock.create(BLOCK_FACTORY, testColumn, density, 10); @@ -261,44 +277,156 @@ public void testSingleBlock() throws IOException { } /** - * Test that multivalued arrays are rejected + * Test a multivalued field with fixed size values. */ - public void testMultivaluedField() throws IOException { + public void testMultivaluedInteger() throws IOException { IntBlock.Builder builder = BLOCK_FACTORY.newIntBlockBuilder(0); + builder.beginPositionEntry(); builder.appendInt(42); + builder.appendInt(43); + builder.endPositionEntry(); + + // The multivalue can be null, but a multivalue cannot contain nulls. + // Calling appendNull within a begin/endEntry causes consistency checks to fail in build() + // See also https://github.com/elastic/elasticsearch/issues/114324 builder.appendNull(); + builder.beginPositionEntry(); builder.appendInt(44); builder.appendInt(45); builder.endPositionEntry(); + + // single value builder.appendInt(46); + IntBlock block = builder.build(); + builder.close(); - // Consistency check + // Consistency check. + // AbstractArrayBlock.assertInvariants does some of these consistency checks, but those below + // specifically verify the assumptions on which the conversion to Arrow is built. assertTrue(block.mayHaveMultivaluedFields()); + assertEquals(4, block.getPositionCount()); // counts null entries + assertEquals(5, block.getTotalValueCount()); // nulls aren't counted + + // Value 0 + assertEquals(2, block.getValueCount(0)); assertEquals(0, block.getFirstValueIndex(0)); - assertEquals(1, block.getValueCount(0)); + assertEquals(42, block.getInt(block.getFirstValueIndex(0))); + assertEquals(43, block.getInt(block.getFirstValueIndex(0) + 1)); - // null values still use one position in the array + // Value 1 assertEquals(0, block.getValueCount(1)); - assertEquals(1, block.getFirstValueIndex(1)); - assertTrue(block.isNull(1)); - assertEquals(0, block.getInt(1)); + assertTrue(block.isNull(1)); // This is the position index, not value index + // No value, but still occupies a value slot with zero + assertEquals(2, block.getFirstValueIndex(1)); + assertEquals(0, block.getInt(block.getFirstValueIndex(1))); + assertEquals(3, block.getFirstValueIndex(2)); - assertEquals(2, block.getFirstValueIndex(2)); + // Value 2 assertEquals(2, block.getValueCount(2)); - assertEquals(2, block.getFirstValueIndex(2)); + assertEquals(3, block.getFirstValueIndex(2)); + assertEquals(44, block.getInt(block.getFirstValueIndex(2))); assertEquals(45, block.getInt(block.getFirstValueIndex(2) + 1)); - assertEquals(4, block.getFirstValueIndex(3)); + // Value 3 + assertEquals(1, block.getValueCount(3)); + assertEquals(5, block.getFirstValueIndex(3)); + assertEquals(46, block.getInt(block.getFirstValueIndex(3))); - var column = TestColumn.create("some-field", "integer"); - TestCase testCase = new TestCase(List.of(column), List.of(new TestPage(List.of(new TestBlock(column, block, Density.Dense))))); + // End of block + assertEquals(6, block.getFirstValueIndex(4)); - IllegalArgumentException exc = assertThrows(IllegalArgumentException.class, () -> compareEsqlAndArrow(testCase)); + var column = TestColumn.create("some-field", "integer", true); + TestCase testCase = new TestCase(List.of(column), List.of(new TestPage(List.of(TestBlock.create(column, block))))); - assertEquals("ES|QL response field [some-field] is multi-valued. This isn't supported yet by the Arrow format", exc.getMessage()); + compareEsqlAndArrow(testCase); + } + + /** + * Test a multivalued field with variable size values. + */ + public void testMultivalueString() throws IOException { + BytesRefBlock.Builder builder = BLOCK_FACTORY.newBytesRefBlockBuilder(0); + + builder.beginPositionEntry(); + builder.appendBytesRef(new BytesRef("a")); + builder.appendBytesRef(new BytesRef("b")); + builder.endPositionEntry(); + builder.beginPositionEntry(); + builder.appendBytesRef(new BytesRef("c")); + builder.appendBytesRef(new BytesRef("d")); + builder.endPositionEntry(); + + BytesRefBlock block = builder.build(); + builder.close(); + + var column = TestColumn.create("some-field", "text"); + TestCase testCase = new TestCase(List.of(column), List.of(new TestPage(List.of(TestBlock.create(column, block))))); + + compareEsqlAndArrow(testCase); + } + + // Test exercising Arrow's multivalue API + public void testMultiValueArrow() throws IOException { + + byte[] bytes; + + try (ListVector listVector = ListVector.empty("some-field", ALLOCATOR)) { + UnionListWriter writer = listVector.getWriter(); + + writer.startList(); + writer.writeInt(42); // 0x2A + writer.writeInt(43); // 0x2A + writer.endList(); + + writer.startList(); + // Size is zero without a writeNull() + writer.writeNull(); // Adds a null value in that list + writer.endList(); + + writer.startList(); + writer.writeInt(44); // 0x2C + writer.writeInt(45); // 0x2D + writer.endList(); + + writer.startList(); + writer.writeInt(46); // 0x2E + writer.endList(); + + listVector.setValueCount(4); + bytes = getBytes(listVector); + } + + try (var reader = new ArrowStreamReader(new ByteArrayInputStream(bytes), ALLOCATOR)) { + var root = reader.getVectorSchemaRoot(); + reader.loadNextBatch(); + + ListVector listVector = (ListVector) root.getVector("some-field"); + + assertEquals(4, listVector.getValueCount()); + assertEquals(List.of(42, 43), listVector.getObject(0)); + assertEquals(Collections.singletonList((Integer) null), listVector.getObject(1)); + assertEquals(List.of(44, 45), listVector.getObject(2)); + assertEquals(List.of(46), listVector.getObject(3)); + } + } + + private static byte[] getBytes(ListVector listVector) throws IOException { + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + var fields = List.of(listVector.getField()); + List vectors = List.of(listVector); + + try ( + VectorSchemaRoot root = new VectorSchemaRoot(fields, vectors); + ArrowStreamWriter arrowWriter = new ArrowStreamWriter(root, null, baos); + ) { + arrowWriter.start(); + arrowWriter.writeBatch(); + arrowWriter.end(); + } + return baos.toByteArray(); } /** @@ -319,10 +447,6 @@ public void testRandomTypesAndSize() throws IOException { .toList(); TestCase testCase = new TestCase(columns, pages); - // System.out.println(testCase); - // for (TestPage page: pages) { - // System.out.println(page); - // } compareEsqlAndArrow(testCase); } @@ -347,8 +471,13 @@ private void compareEsqlAndArrow(TestCase testCase, VectorSchemaRoot root) { var esqlValuesIterator = new EsqlValuesIterator(testCase, i); var arrowValuesIterator = new ArrowValuesIterator(testCase, root, i); + int line = 0; + while (esqlValuesIterator.hasNext() && arrowValuesIterator.hasNext()) { - assertEquals(esqlValuesIterator.next(), arrowValuesIterator.next()); + Object esqlValue = esqlValuesIterator.next(); + Object arrowValue = arrowValuesIterator.next(); + assertEquals(("line " + line), esqlValue, arrowValue); + line++; } // Make sure we entirely consumed both sides. @@ -387,7 +516,6 @@ private VectorSchemaRoot toArrowVectors(TestCase testCase) throws IOException { static class EsqlValuesIterator implements Iterator { private final int fieldPos; private final ValueType type; - private final BytesRef scratch = new BytesRef(); private final Iterator pages; private TestPage page; @@ -412,7 +540,7 @@ public Object next() { throw new NoSuchElementException(); } Block block = page.blocks.get(fieldPos).block; - Object result = block.isNull(position) ? null : type.valueAt(block, position, scratch); + Object result = block.isNull(position) ? null : type.valueAt(block, position, new BytesRef()); position++; if (position >= block.getPositionCount()) { position = 0; @@ -475,9 +603,13 @@ public String toString() { } } - record TestColumn(String name, String type, ValueType valueType) { + record TestColumn(String name, String type, ValueType valueType, boolean multivalue) { static TestColumn create(String name, String type) { - return new TestColumn(name, type, VALUE_TYPES.get(type)); + return create(name, type, randomBoolean()); + } + + static TestColumn create(String name, String type, boolean multivalue) { + return new TestColumn(name, type, VALUE_TYPES.get(type), multivalue); } } @@ -498,6 +630,18 @@ public String toString() { record TestBlock(TestColumn column, Block block, Density density) { + static TestBlock create(TestColumn column, Block block) { + Density density; + if (block.areAllValuesNull()) { + density = Density.Empty; + } else if (block.mayHaveNulls()) { + density = Density.Sparse; + } else { + density = Density.Dense; + } + return new TestBlock(column, block, density); + } + static TestBlock create(BlockFactory factory, TestColumn column, int positions) { return create(factory, column, randomFrom(Density.values()), positions); } @@ -517,10 +661,21 @@ static TestBlock create(BlockFactory factory, TestColumn column, Density density start = 2; } for (int i = start; i < positions; i++) { - valueType.addValue(builder, density); + // If multivalued, randomly insert a series of values if the type isn't null (nulls are not allowed in multivalues) + if (column.multivalue && column.valueType != NULL_VALUES && randomBoolean()) { + builder.beginPositionEntry(); + int numEntries = randomIntBetween(2, 5); + for (int j = 0; j < numEntries; j++) { + valueType.addValue(builder, Density.Dense); + } + builder.endPositionEntry(); + } else { + valueType.addValue(builder, density); + } } // Will create an ArrayBlock if there are null values, VectorBlock otherwise block = builder.build(); + assertEquals(positions, block.getPositionCount()); } return new TestBlock(column, block, density); } @@ -553,17 +708,20 @@ interface ValueType { public static class ValueTypeImpl implements ValueType { + private final String name; private final Function builderCreator; private final Consumer valueAdder; private final TriFunction blockGetter; private final BiFunction vectorGetter; public ValueTypeImpl( + String name, Function builderCreator, Consumer valueAdder, TriFunction blockGetter, BiFunction vectorGetter ) { + this.name = name; this.builderCreator = builderCreator; this.valueAdder = valueAdder; this.blockGetter = blockGetter; @@ -588,13 +746,35 @@ public void addValue(Block.Builder builder, Density density) { @Override @SuppressWarnings("unchecked") public Object valueAt(Block block, int position, BytesRef scratch) { - return blockGetter.apply((BlockT) block, position, scratch); + // Build the list of values + var values = new ArrayList<>(); + for (int i = block.getFirstValueIndex(position); i < block.getFirstValueIndex(position + 1); i++) { + values.add(blockGetter.apply((BlockT) block, i, scratch)); + scratch = new BytesRef(); // do not overwrite previous value + } + return values.size() == 1 ? values.getFirst() : values; } @Override @SuppressWarnings("unchecked") public Object valueAt(ValueVector arrowVec, int position) { - return vectorGetter.apply((VectorT) arrowVec, position); + if (arrowVec instanceof ListVector listVector) { + var type = listVector.getField().getMetadata().get("elastic:type"); + // Build the list of values + var valueVec = listVector.getDataVector(); + var values = new ArrayList<>(); + for (int i = listVector.getElementStartIndex(position); i < listVector.getElementEndIndex(position); i++) { + values.add(vectorGetter.apply((VectorT) valueVec, i)); + } + return values.size() == 1 ? values.getFirst() : values; + } else { + return vectorGetter.apply((VectorT) arrowVec, position); + } + } + + @Override + public String toString() { + return name; } } } From ffcd62e32bd03a1ed52afb06c3510f5b76361683 Mon Sep 17 00:00:00 2001 From: Stanislav Malyshev Date: Mon, 21 Oct 2024 12:01:46 -0600 Subject: [PATCH 263/449] Fix test - times can be 0 sometimes (#115260) --- muted-tests.yml | 6 ------ .../test/cluster.stats/30_ccs_stats.yml | 16 ++++++++-------- 2 files changed, 8 insertions(+), 14 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 4f3ba742d16fa..482966e0f97f9 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -256,9 +256,6 @@ tests: - class: org.elasticsearch.xpack.inference.services.cohere.CohereServiceTests method: testInfer_StreamRequest_ErrorResponse issue: https://github.com/elastic/elasticsearch/issues/114327 -- class: org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT - method: test {yaml=cluster.stats/30_ccs_stats/cross-cluster search stats search} - issue: https://github.com/elastic/elasticsearch/issues/114371 - class: org.elasticsearch.xpack.inference.services.cohere.CohereServiceTests method: testInfer_StreamRequest issue: https://github.com/elastic/elasticsearch/issues/114385 @@ -311,9 +308,6 @@ tests: - class: org.elasticsearch.packaging.test.EnrollmentProcessTests method: test20DockerAutoFormCluster issue: https://github.com/elastic/elasticsearch/issues/114885 -- class: org.elasticsearch.test.rest.ClientYamlTestSuiteIT - method: test {yaml=cluster.stats/30_ccs_stats/cross-cluster search stats search} - issue: https://github.com/elastic/elasticsearch/issues/114902 - class: org.elasticsearch.xpack.inference.DefaultEndPointsIT method: testInferDeploysDefaultElser issue: https://github.com/elastic/elasticsearch/issues/114913 diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.stats/30_ccs_stats.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.stats/30_ccs_stats.yml index 955c68634e617..689c58dad31e6 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.stats/30_ccs_stats.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.stats/30_ccs_stats.yml @@ -121,10 +121,10 @@ - is_true: ccs._search.total - is_true: ccs._search.success - exists: ccs._search.skipped - - is_true: ccs._search.took - - is_true: ccs._search.took.max - - is_true: ccs._search.took.avg - - is_true: ccs._search.took.p90 + - exists: ccs._search.took + - exists: ccs._search.took.max + - exists: ccs._search.took.avg + - exists: ccs._search.took.p90 - is_true: ccs._search.took_mrt_true - exists: ccs._search.took_mrt_true.max - exists: ccs._search.took_mrt_true.avg @@ -145,7 +145,7 @@ - gte: {ccs._search.clusters.cluster_two.total: 1} - exists: ccs._search.clusters.cluster_one.skipped - exists: ccs._search.clusters.cluster_two.skipped - - is_true: ccs._search.clusters.cluster_one.took - - is_true: ccs._search.clusters.cluster_one.took.max - - is_true: ccs._search.clusters.cluster_one.took.avg - - is_true: ccs._search.clusters.cluster_one.took.p90 + - exists: ccs._search.clusters.cluster_one.took + - exists: ccs._search.clusters.cluster_one.took.max + - exists: ccs._search.clusters.cluster_one.took.avg + - exists: ccs._search.clusters.cluster_one.took.p90 From 7ceb4d85a942bcc0796033e1394ca9d4ebd430cf Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Mon, 21 Oct 2024 20:10:35 +0200 Subject: [PATCH 264/449] [ESQL] Make sure we built consistent OrdinalBytesRefBlock in BlockHashRandomizedTests (#115081) --- .../blockhash/BlockHashRandomizedTests.java | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashRandomizedTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashRandomizedTests.java index 76d4caf810eb8..42ac4cf2ff917 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashRandomizedTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashRandomizedTests.java @@ -475,17 +475,14 @@ public ElementType elementType() { @Override public BasicBlockTests.RandomBlock randomBlock(int positionCount, int maxValuesPerPosition, int dups) { - List> dictionary = new ArrayList<>(); + Map dictionary = new HashMap<>(); + Set keys = dictionary(maxValuesPerPosition); List> values = new ArrayList<>(positionCount); try ( IntBlock.Builder ordinals = TestBlockFactory.getNonBreakingInstance() .newIntBlockBuilder(positionCount * maxValuesPerPosition); BytesRefVector.Builder bytes = TestBlockFactory.getNonBreakingInstance().newBytesRefVectorBuilder(maxValuesPerPosition); ) { - for (String value : dictionary(maxValuesPerPosition)) { - bytes.appendBytesRef(new BytesRef(value)); - dictionary.add(Map.entry(value, dictionary.size())); - } for (int p = 0; p < positionCount; p++) { int valueCount = between(1, maxValuesPerPosition); int dupCount = between(0, dups); @@ -497,10 +494,14 @@ public BasicBlockTests.RandomBlock randomBlock(int positionCount, int maxValuesP ordinals.beginPositionEntry(); } for (int v = 0; v < valueCount; v++) { - Map.Entry value = randomFrom(dictionary); - valuesAtPosition.add(new BytesRef(value.getKey())); - ordinals.appendInt(value.getValue()); - ordsAtPosition.add(value.getValue()); + String key = randomFrom(keys); + int ordinal = dictionary.computeIfAbsent(key, k -> { + bytes.appendBytesRef(new BytesRef(k)); + return dictionary.size(); + }); + valuesAtPosition.add(new BytesRef(key)); + ordinals.appendInt(ordinal); + ordsAtPosition.add(ordinal); } for (int v = 0; v < dupCount; v++) { ordinals.appendInt(randomFrom(ordsAtPosition)); From bc57bb02c1f5271d6f20159fafbea0fa7f01de02 Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Mon, 21 Oct 2024 20:28:00 +0200 Subject: [PATCH 265/449] Always check the parent breaker with zero bytes in PreallocatedCircuitBreakerService (#115181) PreallocatedCircuitBreakerService will call the parent breaker if the nunber of bytes passed is zero. --- docs/changelog/115181.yaml | 5 +++++ .../common/breaker/PreallocatedCircuitBreakerService.java | 4 ++-- 2 files changed, 7 insertions(+), 2 deletions(-) create mode 100644 docs/changelog/115181.yaml diff --git a/docs/changelog/115181.yaml b/docs/changelog/115181.yaml new file mode 100644 index 0000000000000..65f59d5ed0add --- /dev/null +++ b/docs/changelog/115181.yaml @@ -0,0 +1,5 @@ +pr: 115181 +summary: Always check the parent breaker with zero bytes in `PreallocatedCircuitBreakerService` +area: Aggregations +type: bug +issues: [] diff --git a/server/src/main/java/org/elasticsearch/common/breaker/PreallocatedCircuitBreakerService.java b/server/src/main/java/org/elasticsearch/common/breaker/PreallocatedCircuitBreakerService.java index 9327dbe78077f..e5c9b14cf90fc 100644 --- a/server/src/main/java/org/elasticsearch/common/breaker/PreallocatedCircuitBreakerService.java +++ b/server/src/main/java/org/elasticsearch/common/breaker/PreallocatedCircuitBreakerService.java @@ -109,8 +109,8 @@ public void addEstimateBytesAndMaybeBreak(long bytes, String label) throws Circu if (closed) { throw new IllegalStateException("already closed"); } - if (preallocationUsed == preallocated) { - // Preallocation buffer was full before this request + if (preallocationUsed == preallocated || bytes == 0L) { + // Preallocation buffer was full before this request or we are checking the parent circuit breaker next.addEstimateBytesAndMaybeBreak(bytes, label); return; } From 79de53ae7b84547b00084327ce1ee38d5c7e6097 Mon Sep 17 00:00:00 2001 From: Michael Peterson Date: Mon, 21 Oct 2024 15:13:21 -0400 Subject: [PATCH 266/449] Unmute recently failing CCQ tests (#115218) --- muted-tests.yml | 11 ----------- 1 file changed, 11 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 482966e0f97f9..971fc161c4632 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -271,9 +271,6 @@ tests: - class: org.elasticsearch.packaging.test.DockerTests method: test022InstallPluginsFromLocalArchive issue: https://github.com/elastic/elasticsearch/issues/111063 -- class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT - method: test {yaml=reference/esql/esql-across-clusters/line_196} - issue: https://github.com/elastic/elasticsearch/issues/114488 - class: org.elasticsearch.gradle.internal.PublishPluginFuncTest issue: https://github.com/elastic/elasticsearch/issues/114492 - class: org.elasticsearch.xpack.inference.DefaultElserIT @@ -332,14 +329,6 @@ tests: - class: org.elasticsearch.index.mapper.annotatedtext.AnnotatedTextFieldMapperTests method: testBlockLoaderFromRowStrideReaderWithSyntheticSource issue: https://github.com/elastic/elasticsearch/issues/115076 -- class: org.elasticsearch.xpack.esql.ccq.MultiClusterSpecIT - method: test {string.ValuesGrouped} - issue: https://github.com/elastic/elasticsearch/issues/115126 -- class: org.elasticsearch.xpack.esql.action.CrossClustersQueryIT - method: testCCSExecutionOnSearchesWithLimit0 - issue: https://github.com/elastic/elasticsearch/issues/115129 -- class: org.elasticsearch.xpack.esql.ccq.MultiClusterSpecIT - issue: https://github.com/elastic/elasticsearch/issues/115135 - class: org.elasticsearch.xpack.test.rest.XPackRestIT method: test {p0=esql/60_usage/Basic ESQL usage output (telemetry)} issue: https://github.com/elastic/elasticsearch/issues/115231 From 2ff6bb05431ea1525278dcc858e01ce30fb0e15c Mon Sep 17 00:00:00 2001 From: Keith Massey Date: Mon, 21 Oct 2024 17:08:50 -0500 Subject: [PATCH 267/449] Adding support for additional mapping to simulate ingest API (#114742) --- docs/changelog/114742.yaml | 5 + .../ingest/apis/simulate-ingest.asciidoc | 13 + .../test/ingest/80_ingest_simulate.yml | 355 ++++++++++++++++++ .../bulk/TransportSimulateBulkActionIT.java | 54 ++- .../org/elasticsearch/TransportVersions.java | 1 + .../action/bulk/BulkFeatures.java | 4 +- .../action/bulk/SimulateBulkRequest.java | 77 ++-- .../bulk/TransportSimulateBulkAction.java | 172 +++++---- .../ingest/RestSimulateIngestAction.java | 17 +- .../action/bulk/SimulateBulkRequestTests.java | 94 ++++- .../TransportSimulateBulkActionTests.java | 8 +- .../ingest/SimulateIngestServiceTests.java | 6 +- 12 files changed, 675 insertions(+), 131 deletions(-) create mode 100644 docs/changelog/114742.yaml diff --git a/docs/changelog/114742.yaml b/docs/changelog/114742.yaml new file mode 100644 index 0000000000000..5bd3dad4400b8 --- /dev/null +++ b/docs/changelog/114742.yaml @@ -0,0 +1,5 @@ +pr: 114742 +summary: Adding support for additional mapping to simulate ingest API +area: Ingest Node +type: enhancement +issues: [] diff --git a/docs/reference/ingest/apis/simulate-ingest.asciidoc b/docs/reference/ingest/apis/simulate-ingest.asciidoc index 1bee03ea3e58a..da591eed7546f 100644 --- a/docs/reference/ingest/apis/simulate-ingest.asciidoc +++ b/docs/reference/ingest/apis/simulate-ingest.asciidoc @@ -108,6 +108,14 @@ POST /_ingest/_simulate "index_patterns": ["my-index-*"], "composed_of": ["component_template_1", "component_template_2"] } + }, + "mapping_addition": { <4> + "dynamic": "strict", + "properties": { + "foo": { + "type": "keyword" + } + } } } ---- @@ -117,6 +125,7 @@ POST /_ingest/_simulate These templates can be used to change the pipeline(s) used, or to modify the mapping that will be used to validate the result. <3> This replaces the existing `my-index-template` index template with the contents given here for the duration of this request. These templates can be used to change the pipeline(s) used, or to modify the mapping that will be used to validate the result. +<4> This mapping is merged into the index's final mapping just before validation. It is used only for the duration of this request. [[simulate-ingest-api-request]] ==== {api-request-title} @@ -246,6 +255,10 @@ include::{es-ref-dir}/indices/put-index-template.asciidoc[tag=request-body] ==== +`mapping_addition`:: +(Optional, <>) +Definition of a mapping that will be merged into the index's mapping for validation during the course of this request. + [[simulate-ingest-api-example]] ==== {api-examples-title} diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_ingest_simulate.yml b/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_ingest_simulate.yml index 18eb401aaa0fe..d4aa2f1ad4467 100644 --- a/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_ingest_simulate.yml +++ b/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_ingest_simulate.yml @@ -1216,3 +1216,358 @@ setup: - match: { docs.0.doc._source.foo: "FOO" } - match: { docs.0.doc.executed_pipelines: ["foo-pipeline-2"] } - not_exists: docs.0.doc.error + +--- +"Test ingest simulate with mapping addition for data streams": + # In this test, we make sure that when the index template is a data stream template, simulate ingest works the same whether the data + # stream has been created or not -- either way, we expect it to use the template rather than the data stream / index mappings and settings. + + - skip: + features: + - headers + - allowed_warnings + + - requires: + cluster_features: ["simulate.mapping.addition"] + reason: "ingest simulate mapping addition added in 8.16" + + - do: + headers: + Content-Type: application/json + ingest.put_pipeline: + id: "foo-pipeline" + body: > + { + "processors": [ + { + "set": { + "field": "foo", + "value": true + } + } + ] + } + - match: { acknowledged: true } + + - do: + cluster.put_component_template: + name: mappings_template + body: + template: + mappings: + dynamic: strict + properties: + foo: + type: boolean + + - do: + cluster.put_component_template: + name: settings_template + body: + template: + settings: + index: + default_pipeline: "foo-pipeline" + + - do: + allowed_warnings: + - "index template [test-composable-1] has index patterns [foo*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [test-composable-1] will take precedence during new index creation" + indices.put_index_template: + name: test-composable-1 + body: + index_patterns: + - foo* + composed_of: + - mappings_template + - settings_template + + - do: + allowed_warnings: + - "index template [my-template-1] has index patterns [simple-data-stream1] matching patterns from existing older templates [global] with patterns (global => [*]); this template [my-template-1] will take precedence during new index creation" + indices.put_index_template: + name: my-template-1 + body: + index_patterns: [simple-data-stream1] + composed_of: + - mappings_template + - settings_template + data_stream: {} + + # Here we replace my-template-1 with a substitute version that uses the settings_template_2 and mappings_template_2 templates defined in + # this request, and foo-pipeline-2 defined in this request. + - do: + headers: + Content-Type: application/json + simulate.ingest: + index: simple-data-stream1 + body: > + { + "docs": [ + { + "_id": "asdf", + "_source": { + "@timestamp": 1234, + "foo": false + } + } + ], + "pipeline_substitutions": { + "foo-pipeline-2": { + "processors": [ + { + "set": { + "field": "foo", + "value": "FOO" + } + } + ] + } + }, + "component_template_substitutions": { + "settings_template_2": { + "template": { + "settings": { + "index": { + "default_pipeline": "foo-pipeline-2" + } + } + } + }, + "mappings_template_2": { + "template": { + "mappings": { + "dynamic": "strict", + "properties": { + "foo": { + "type": "integer" + } + } + } + } + } + }, + "index_template_substitutions": { + "my-template-1": { + "index_patterns": ["simple-data-stream1"], + "composed_of": ["settings_template_2", "mappings_template_2"], + "data_stream": {} + } + }, + "mapping_addition": { + "dynamic": "strict", + "properties": { + "foo": { + "type": "keyword" + } + } + } + } + - length: { docs: 1 } + - match: { docs.0.doc._index: "simple-data-stream1" } + - match: { docs.0.doc._source.foo: "FOO" } + - match: { docs.0.doc.executed_pipelines: ["foo-pipeline-2"] } + - not_exists: docs.0.doc.error + + - do: + indices.create_data_stream: + name: simple-data-stream1 + - is_true: acknowledged + + - do: + cluster.health: + wait_for_status: yellow + + # Now that we have created a data stream, run the exact same simulate ingeset request to make sure we still get the same result, and that + # the substitutions and additions from the simulate ingest request are used instead of information from the data stream or its backing + # index. + - do: + headers: + Content-Type: application/json + simulate.ingest: + index: simple-data-stream1 + body: > + { + "docs": [ + { + "_id": "asdf", + "_source": { + "@timestamp": 1234, + "foo": false + } + } + ], + "pipeline_substitutions": { + "foo-pipeline-2": { + "processors": [ + { + "set": { + "field": "foo", + "value": "FOO" + } + } + ] + } + }, + "component_template_substitutions": { + "settings_template_2": { + "template": { + "settings": { + "index": { + "default_pipeline": "foo-pipeline-2" + } + } + } + }, + "mappings_template_2": { + "template": { + "mappings": { + "dynamic": "strict", + "properties": { + "foo": { + "type": "integer" + } + } + } + } + } + }, + "index_template_substitutions": { + "my-template-1": { + "index_patterns": ["simple-data-stream1"], + "composed_of": ["settings_template_2", "mappings_template_2"], + "data_stream": {} + } + }, + "mapping_addition": { + "dynamic": "strict", + "properties": { + "foo": { + "type": "keyword" + } + } + } + } + - length: { docs: 1 } + - match: { docs.0.doc._index: "simple-data-stream1" } + - match: { docs.0.doc._source.foo: "FOO" } + - match: { docs.0.doc.executed_pipelines: ["foo-pipeline-2"] } + - not_exists: docs.0.doc.error + +--- +"Test mapping addition works with legacy templates": + # In this test, we make sure that when the index template is a data stream template, simulate ingest works the same whether the data + # stream has been created or not -- either way, we expect it to use the template rather than the data stream / index mappings and settings. + + - skip: + features: + - headers + - allowed_warnings + + - requires: + cluster_features: ["simulate.mapping.addition"] + reason: "ingest simulate mapping addition added in 8.16" + + - do: + indices.put_template: + name: my-legacy-template + body: + index_patterns: foo-* + settings: + number_of_replicas: 0 + mappings: + dynamic: strict + properties: + foo: + type: integer + bar: + type: boolean + + - do: + headers: + Content-Type: application/json + simulate.ingest: + index: foo-1 + body: > + { + "docs": [ + { + "_id": "asdf", + "_source": { + "foo": 3, + "bar": "not a boolean" + } + } + ] + } + - length: { docs: 1 } + - match: { docs.0.doc._index: "foo-1" } + - match: { docs.0.doc._source.foo: 3 } + - match: { docs.0.doc._source.bar: "not a boolean" } + - match: { docs.0.doc.error.type: "document_parsing_exception" } + + - do: + headers: + Content-Type: application/json + simulate.ingest: + index: foo-1 + body: > + { + "docs": [ + { + "_id": "asdf", + "_source": { + "foo": 3, + "bar": "not a boolean" + } + } + ], + "mapping_addition": { + "dynamic": "strict", + "properties": { + "bar": { + "type": "keyword" + } + } + } + } + - length: { docs: 1 } + - match: { docs.0.doc._index: "foo-1" } + - match: { docs.0.doc._source.foo: 3 } + - match: { docs.0.doc._source.bar: "not a boolean" } + - not_exists: docs.0.doc.error + + - do: + indices.create: + index: foo-1 + - match: { acknowledged: true } + + - do: + headers: + Content-Type: application/json + simulate.ingest: + index: foo-1 + body: > + { + "docs": [ + { + "_id": "asdf", + "_source": { + "foo": 3, + "bar": "not a boolean" + } + } + ], + "mapping_addition": { + "dynamic": "strict", + "properties": { + "bar": { + "type": "keyword" + } + } + } + } + - length: { docs: 1 } + - match: { docs.0.doc._index: "foo-1" } + - match: { docs.0.doc._source.foo: 3 } + - match: { docs.0.doc._source.bar: "not a boolean" } + - not_exists: docs.0.doc.error diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionIT.java index cd17c5b345c59..d5d21c548a15d 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionIT.java @@ -34,6 +34,7 @@ import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.concurrent.TimeUnit; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -59,7 +60,7 @@ public void testMappingValidationIndexExists() { } """; indicesAdmin().create(new CreateIndexRequest(indexName).mapping(mapping)).actionGet(); - BulkRequest bulkRequest = new SimulateBulkRequest(Map.of(), Map.of(), Map.of()); + BulkRequest bulkRequest = new SimulateBulkRequest(Map.of(), Map.of(), Map.of(), Map.of()); bulkRequest.add(new IndexRequest(indexName).source(""" { "foo1": "baz" @@ -131,10 +132,10 @@ public void testMappingValidationIndexExistsTemplateSubstitutions() throws IOExc String indexName = "my-index-1"; // First, run before the index is created: - assertMappingsUpdatedFromComponentTemplateSubstitutions(indexName, indexTemplateName); + assertMappingsUpdatedFromSubstitutions(indexName, indexTemplateName); // Now, create the index and make sure the component template substitutions work the same: indicesAdmin().create(new CreateIndexRequest(indexName)).actionGet(); - assertMappingsUpdatedFromComponentTemplateSubstitutions(indexName, indexTemplateName); + assertMappingsUpdatedFromSubstitutions(indexName, indexTemplateName); // Now make sure nothing was actually changed: indicesAdmin().refresh(new RefreshRequest(indexName)).actionGet(); SearchResponse searchResponse = client().search(new SearchRequest(indexName)).actionGet(); @@ -146,7 +147,7 @@ public void testMappingValidationIndexExistsTemplateSubstitutions() throws IOExc assertThat(fields.size(), equalTo(1)); } - private void assertMappingsUpdatedFromComponentTemplateSubstitutions(String indexName, String indexTemplateName) { + private void assertMappingsUpdatedFromSubstitutions(String indexName, String indexTemplateName) { IndexRequest indexRequest1 = new IndexRequest(indexName).source(""" { "foo1": "baz" @@ -159,7 +160,7 @@ private void assertMappingsUpdatedFromComponentTemplateSubstitutions(String inde """, XContentType.JSON).id(randomUUID()); { // First we use the original component template, and expect a failure in the second document: - BulkRequest bulkRequest = new SimulateBulkRequest(Map.of(), Map.of(), Map.of()); + BulkRequest bulkRequest = new SimulateBulkRequest(Map.of(), Map.of(), Map.of(), Map.of()); bulkRequest.add(indexRequest1); bulkRequest.add(indexRequest2); BulkResponse response = client().execute(new ActionType(SimulateBulkAction.NAME), bulkRequest).actionGet(); @@ -192,6 +193,7 @@ private void assertMappingsUpdatedFromComponentTemplateSubstitutions(String inde ) ) ), + Map.of(), Map.of() ); bulkRequest.add(indexRequest1); @@ -226,7 +228,34 @@ private void assertMappingsUpdatedFromComponentTemplateSubstitutions(String inde ) ) ), - Map.of(indexTemplateName, Map.of("index_patterns", List.of(indexName), "composed_of", List.of("test-component-template-2"))) + Map.of( + indexTemplateName, + Map.of("index_patterns", List.of(indexName), "composed_of", List.of("test-component-template-2")) + ), + Map.of() + ); + bulkRequest.add(indexRequest1); + bulkRequest.add(indexRequest2); + BulkResponse response = client().execute(new ActionType(SimulateBulkAction.NAME), bulkRequest).actionGet(); + assertThat(response.getItems().length, equalTo(2)); + assertThat(response.getItems()[0].getResponse().getResult(), equalTo(DocWriteResponse.Result.CREATED)); + assertNull(((SimulateIndexResponse) response.getItems()[0].getResponse()).getException()); + assertThat(response.getItems()[1].getResponse().getResult(), equalTo(DocWriteResponse.Result.CREATED)); + assertNull(((SimulateIndexResponse) response.getItems()[1].getResponse()).getException()); + } + + { + /* + * Now we mapping_addition that defines both fields, so we expect no exception: + */ + BulkRequest bulkRequest = new SimulateBulkRequest( + Map.of(), + Map.of(), + Map.of(), + Map.of( + "_doc", + Map.of("dynamic", "strict", "properties", Map.of("foo1", Map.of("type", "text"), "foo3", Map.of("type", "text"))) + ) ); bulkRequest.add(indexRequest1); bulkRequest.add(indexRequest2); @@ -245,7 +274,7 @@ public void testMappingValidationIndexDoesNotExistsNoTemplate() { * mapping-less "random-index-template" created by the parent class), so we expect no mapping validation failure. */ String indexName = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); - BulkRequest bulkRequest = new SimulateBulkRequest(Map.of(), Map.of(), Map.of()); + BulkRequest bulkRequest = new SimulateBulkRequest(Map.of(), Map.of(), Map.of(), Map.of()); bulkRequest.add(new IndexRequest(indexName).source(""" { "foo1": "baz" @@ -292,7 +321,7 @@ public void testMappingValidationIndexDoesNotExistsV2Template() throws IOExcepti request.indexTemplate(composableIndexTemplate); client().execute(TransportPutComposableIndexTemplateAction.TYPE, request).actionGet(); - BulkRequest bulkRequest = new SimulateBulkRequest(Map.of(), Map.of(), Map.of()); + BulkRequest bulkRequest = new SimulateBulkRequest(Map.of(), Map.of(), Map.of(), Map.of()); bulkRequest.add(new IndexRequest(indexName).source(""" { "foo1": "baz" @@ -324,7 +353,7 @@ public void testMappingValidationIndexDoesNotExistsV1Template() { indicesAdmin().putTemplate( new PutIndexTemplateRequest("test-template").patterns(List.of("my-index-*")).mapping("foo1", "type=integer") ).actionGet(); - BulkRequest bulkRequest = new SimulateBulkRequest(Map.of(), Map.of(), Map.of()); + BulkRequest bulkRequest = new SimulateBulkRequest(Map.of(), Map.of(), Map.of(), Map.of()); bulkRequest.add(new IndexRequest(indexName).source(""" { "foo1": "baz" @@ -378,7 +407,7 @@ public void testMappingValidationIndexDoesNotExistsDataStream() throws IOExcepti client().execute(TransportPutComposableIndexTemplateAction.TYPE, request).actionGet(); { // First, try with no @timestamp to make sure we're picking up data-stream-specific templates - BulkRequest bulkRequest = new SimulateBulkRequest(Map.of(), Map.of(), Map.of()); + BulkRequest bulkRequest = new SimulateBulkRequest(Map.of(), Map.of(), Map.of(), Map.of()); bulkRequest.add(new IndexRequest(indexName).source(""" { "foo1": "baz" @@ -389,7 +418,8 @@ public void testMappingValidationIndexDoesNotExistsDataStream() throws IOExcepti "foo3": "baz" } """, XContentType.JSON).id(randomUUID())); - BulkResponse response = client().execute(new ActionType(SimulateBulkAction.NAME), bulkRequest).actionGet(); + BulkResponse response = client().execute(new ActionType(SimulateBulkAction.NAME), bulkRequest) + .actionGet(5, TimeUnit.SECONDS); assertThat(response.getItems().length, equalTo(2)); assertThat(response.getItems()[0].getResponse().getResult(), equalTo(DocWriteResponse.Result.CREATED)); assertThat( @@ -404,7 +434,7 @@ public void testMappingValidationIndexDoesNotExistsDataStream() throws IOExcepti } { // Now with @timestamp - BulkRequest bulkRequest = new SimulateBulkRequest(Map.of(), Map.of(), Map.of()); + BulkRequest bulkRequest = new SimulateBulkRequest(Map.of(), Map.of(), Map.of(), Map.of()); bulkRequest.add(new IndexRequest(indexName).source(""" { "@timestamp": "2024-08-27", diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index cde09d33516c9..7e06004e47cfb 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -178,6 +178,7 @@ static TransportVersion def(int id) { public static final TransportVersion REVERT_REMOVE_MIN_COMPATIBLE_SHARD_NODE = def(8_774_00_0); public static final TransportVersion ESQL_FIELD_ATTRIBUTE_PARENT_SIMPLIFIED = def(8_775_00_0); public static final TransportVersion INFERENCE_DONT_PERSIST_ON_READ = def(8_776_00_0); + public static final TransportVersion SIMULATE_MAPPING_ADDITION = def(8_777_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkFeatures.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkFeatures.java index 78e603fba9be0..22cf8a2260d87 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkFeatures.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkFeatures.java @@ -16,6 +16,7 @@ import static org.elasticsearch.action.bulk.TransportSimulateBulkAction.SIMULATE_COMPONENT_TEMPLATE_SUBSTITUTIONS; import static org.elasticsearch.action.bulk.TransportSimulateBulkAction.SIMULATE_INDEX_TEMPLATE_SUBSTITUTIONS; +import static org.elasticsearch.action.bulk.TransportSimulateBulkAction.SIMULATE_MAPPING_ADDITION; import static org.elasticsearch.action.bulk.TransportSimulateBulkAction.SIMULATE_MAPPING_VALIDATION; import static org.elasticsearch.action.bulk.TransportSimulateBulkAction.SIMULATE_MAPPING_VALIDATION_TEMPLATES; @@ -25,7 +26,8 @@ public Set getFeatures() { SIMULATE_MAPPING_VALIDATION, SIMULATE_MAPPING_VALIDATION_TEMPLATES, SIMULATE_COMPONENT_TEMPLATE_SUBSTITUTIONS, - SIMULATE_INDEX_TEMPLATE_SUBSTITUTIONS + SIMULATE_INDEX_TEMPLATE_SUBSTITUTIONS, + SIMULATE_MAPPING_ADDITION ); } } diff --git a/server/src/main/java/org/elasticsearch/action/bulk/SimulateBulkRequest.java b/server/src/main/java/org/elasticsearch/action/bulk/SimulateBulkRequest.java index 6fa22151396df..cc7fd431d8097 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/SimulateBulkRequest.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/SimulateBulkRequest.java @@ -15,12 +15,12 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.core.Nullable; import org.elasticsearch.xcontent.XContentParserConfiguration; import java.io.IOException; -import java.util.HashMap; import java.util.Map; +import java.util.Objects; +import java.util.stream.Collectors; /** * This extends BulkRequest with support for providing substitute pipeline definitions, component template definitions, and index template @@ -73,7 +73,8 @@ * } * } * } - * }, + * } + * }, * "index_template_substitutions": { * "my-index-template-1": { * "template": { @@ -84,6 +85,13 @@ * ] * } * } + * }, + * "mapping_addition": { + * "dynamic": "strict", + * "properties": { + * "foo": { + * "type": "keyword" + * } * } * * The pipelineSubstitutions Map held by this class is intended to be the result of XContentHelper.convertToMap(). The top-level keys @@ -94,6 +102,7 @@ public class SimulateBulkRequest extends BulkRequest { private final Map> pipelineSubstitutions; private final Map> componentTemplateSubstitutions; private final Map> indexTemplateSubstitutions; + private final Map mappingAddition; /** * @param pipelineSubstitutions The pipeline definitions that are to be used in place of any pre-existing pipeline definitions with @@ -103,16 +112,23 @@ public class SimulateBulkRequest extends BulkRequest { * component template definitions with the same name. * @param indexTemplateSubstitutions The index template definitions that are to be used in place of any pre-existing * index template definitions with the same name. + * @param mappingAddition A mapping that will be merged into the final index's mapping for mapping validation */ public SimulateBulkRequest( - @Nullable Map> pipelineSubstitutions, - @Nullable Map> componentTemplateSubstitutions, - @Nullable Map> indexTemplateSubstitutions + Map> pipelineSubstitutions, + Map> componentTemplateSubstitutions, + Map> indexTemplateSubstitutions, + Map mappingAddition ) { super(); + Objects.requireNonNull(pipelineSubstitutions); + Objects.requireNonNull(componentTemplateSubstitutions); + Objects.requireNonNull(indexTemplateSubstitutions); + Objects.requireNonNull(mappingAddition); this.pipelineSubstitutions = pipelineSubstitutions; this.componentTemplateSubstitutions = componentTemplateSubstitutions; this.indexTemplateSubstitutions = indexTemplateSubstitutions; + this.mappingAddition = mappingAddition; } @SuppressWarnings("unchecked") @@ -129,6 +145,11 @@ public SimulateBulkRequest(StreamInput in) throws IOException { } else { indexTemplateSubstitutions = Map.of(); } + if (in.getTransportVersion().onOrAfter(TransportVersions.SIMULATE_MAPPING_ADDITION)) { + this.mappingAddition = (Map) in.readGenericValue(); + } else { + mappingAddition = Map.of(); + } } @Override @@ -141,6 +162,9 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.SIMULATE_INDEX_TEMPLATES_SUBSTITUTIONS)) { out.writeGenericValue(indexTemplateSubstitutions); } + if (out.getTransportVersion().onOrAfter(TransportVersions.SIMULATE_MAPPING_ADDITION)) { + out.writeGenericValue(mappingAddition); + } } public Map> getPipelineSubstitutions() { @@ -153,41 +177,39 @@ public boolean isSimulated() { } @Override - public Map getComponentTemplateSubstitutions() throws IOException { - if (componentTemplateSubstitutions == null) { - return Map.of(); - } - Map result = new HashMap<>(componentTemplateSubstitutions.size()); - for (Map.Entry> rawEntry : componentTemplateSubstitutions.entrySet()) { - result.put(rawEntry.getKey(), convertRawTemplateToComponentTemplate(rawEntry.getValue())); - } - return result; + public Map getComponentTemplateSubstitutions() { + return componentTemplateSubstitutions.entrySet() + .stream() + .collect(Collectors.toMap(Map.Entry::getKey, entry -> convertRawTemplateToComponentTemplate(entry.getValue()))); } @Override - public Map getIndexTemplateSubstitutions() throws IOException { - if (indexTemplateSubstitutions == null) { - return Map.of(); - } - Map result = new HashMap<>(indexTemplateSubstitutions.size()); - for (Map.Entry> rawEntry : indexTemplateSubstitutions.entrySet()) { - result.put(rawEntry.getKey(), convertRawTemplateToIndexTemplate(rawEntry.getValue())); - } - return result; + public Map getIndexTemplateSubstitutions() { + return indexTemplateSubstitutions.entrySet() + .stream() + .collect(Collectors.toMap(Map.Entry::getKey, entry -> convertRawTemplateToIndexTemplate(entry.getValue()))); + } + + public Map getMappingAddition() { + return mappingAddition; } - private static ComponentTemplate convertRawTemplateToComponentTemplate(Map rawTemplate) throws IOException { + private static ComponentTemplate convertRawTemplateToComponentTemplate(Map rawTemplate) { ComponentTemplate componentTemplate; try (var parser = XContentHelper.mapToXContentParser(XContentParserConfiguration.EMPTY, rawTemplate)) { componentTemplate = ComponentTemplate.parse(parser); + } catch (IOException e) { + throw new RuntimeException(e); } return componentTemplate; } - private static ComposableIndexTemplate convertRawTemplateToIndexTemplate(Map rawTemplate) throws IOException { + private static ComposableIndexTemplate convertRawTemplateToIndexTemplate(Map rawTemplate) { ComposableIndexTemplate indexTemplate; try (var parser = XContentHelper.mapToXContentParser(XContentParserConfiguration.EMPTY, rawTemplate)) { indexTemplate = ComposableIndexTemplate.parse(parser); + } catch (IOException e) { + throw new RuntimeException(e); } return indexTemplate; } @@ -197,7 +219,8 @@ public BulkRequest shallowClone() { BulkRequest bulkRequest = new SimulateBulkRequest( pipelineSubstitutions, componentTemplateSubstitutions, - indexTemplateSubstitutions + indexTemplateSubstitutions, + mappingAddition ); bulkRequest.setRefreshPolicy(getRefreshPolicy()); bulkRequest.waitForActiveShards(waitForActiveShards()); diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java index d7c555879c00f..0888b70f5399c 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java @@ -26,10 +26,13 @@ import org.elasticsearch.cluster.metadata.MetadataCreateIndexService; import org.elasticsearch.cluster.metadata.Template; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.AtomicArray; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.core.Nullable; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.IndexSettingProvider; import org.elasticsearch.index.IndexSettingProviders; @@ -37,6 +40,7 @@ import org.elasticsearch.index.IndexingPressure; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.engine.Engine; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.IndexShard; @@ -50,6 +54,10 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; import java.io.IOException; import java.util.HashMap; @@ -75,6 +83,7 @@ public class TransportSimulateBulkAction extends TransportAbstractBulkAction { "simulate.component.template.substitutions" ); public static final NodeFeature SIMULATE_INDEX_TEMPLATE_SUBSTITUTIONS = new NodeFeature("simulate.index.template.substitutions"); + public static final NodeFeature SIMULATE_MAPPING_ADDITION = new NodeFeature("simulate.mapping.addition"); private final IndicesService indicesService; private final NamedXContentRegistry xContentRegistry; private final Set indexSettingProviders; @@ -122,11 +131,17 @@ protected void doInternalExecute( final AtomicArray responses = new AtomicArray<>(bulkRequest.requests.size()); Map componentTemplateSubstitutions = bulkRequest.getComponentTemplateSubstitutions(); Map indexTemplateSubstitutions = bulkRequest.getIndexTemplateSubstitutions(); + Map mappingAddition = ((SimulateBulkRequest) bulkRequest).getMappingAddition(); for (int i = 0; i < bulkRequest.requests.size(); i++) { DocWriteRequest docRequest = bulkRequest.requests.get(i); assert docRequest instanceof IndexRequest : "TransportSimulateBulkAction should only ever be called with IndexRequests"; IndexRequest request = (IndexRequest) docRequest; - Exception mappingValidationException = validateMappings(componentTemplateSubstitutions, indexTemplateSubstitutions, request); + Exception mappingValidationException = validateMappings( + componentTemplateSubstitutions, + indexTemplateSubstitutions, + mappingAddition, + request + ); responses.set( i, BulkItemResponse.success( @@ -159,6 +174,7 @@ protected void doInternalExecute( private Exception validateMappings( Map componentTemplateSubstitutions, Map indexTemplateSubstitutions, + Map mappingAddition, IndexRequest request ) { final SourceToParse sourceToParse = new SourceToParse( @@ -174,7 +190,10 @@ private Exception validateMappings( Exception mappingValidationException = null; IndexAbstraction indexAbstraction = state.metadata().getIndicesLookup().get(request.index()); try { - if (indexAbstraction != null && componentTemplateSubstitutions.isEmpty() && indexTemplateSubstitutions.isEmpty()) { + if (indexAbstraction != null + && componentTemplateSubstitutions.isEmpty() + && indexTemplateSubstitutions.isEmpty() + && mappingAddition.isEmpty()) { /* * In this case the index exists and we don't have any component template overrides. So we can just use withTempIndexService * to do the mapping validation, using all the existing logic for validation. @@ -250,36 +269,8 @@ private Exception validateMappings( indexSettingProviders ); CompressedXContent mappings = template.mappings(); - if (mappings != null) { - MappingMetadata mappingMetadata = new MappingMetadata(mappings); - Settings dummySettings = Settings.builder() - .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) - .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) - .put(IndexMetadata.SETTING_INDEX_UUID, UUIDs.randomBase64UUID()) - .build(); - final IndexMetadata imd = IndexMetadata.builder(request.index()) - .settings(dummySettings) - .putMapping(mappingMetadata) - .build(); - indicesService.withTempIndexService(imd, indexService -> { - indexService.mapperService().updateMapping(null, imd); - return IndexShard.prepareIndex( - indexService.mapperService(), - sourceToParse, - SequenceNumbers.UNASSIGNED_SEQ_NO, - -1, - -1, - VersionType.INTERNAL, - Engine.Operation.Origin.PRIMARY, - Long.MIN_VALUE, - false, - request.ifSeqNo(), - request.ifPrimaryTerm(), - 0 - ); - }); - } + CompressedXContent mergedMappings = mergeMappings(mappings, mappingAddition); + validateUpdatedMappings(mappings, mergedMappings, request, sourceToParse); } else { List matchingTemplates = findV1Templates(simulatedState.metadata(), request.index(), false); final Map mappingsMap = MetadataCreateIndexService.parseV1Mappings( @@ -287,40 +278,8 @@ private Exception validateMappings( matchingTemplates.stream().map(IndexTemplateMetadata::getMappings).collect(toList()), xContentRegistry ); - final CompressedXContent combinedMappings; - if (mappingsMap.isEmpty()) { - combinedMappings = null; - } else { - combinedMappings = new CompressedXContent(mappingsMap); - } - Settings dummySettings = Settings.builder() - .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) - .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) - .put(IndexMetadata.SETTING_INDEX_UUID, UUIDs.randomBase64UUID()) - .build(); - MappingMetadata mappingMetadata = combinedMappings == null ? null : new MappingMetadata(combinedMappings); - final IndexMetadata imd = IndexMetadata.builder(request.index()) - .putMapping(mappingMetadata) - .settings(dummySettings) - .build(); - indicesService.withTempIndexService(imd, indexService -> { - indexService.mapperService().updateMapping(null, imd); - return IndexShard.prepareIndex( - indexService.mapperService(), - sourceToParse, - SequenceNumbers.UNASSIGNED_SEQ_NO, - -1, - -1, - VersionType.INTERNAL, - Engine.Operation.Origin.PRIMARY, - Long.MIN_VALUE, - false, - request.ifSeqNo(), - request.ifPrimaryTerm(), - 0 - ); - }); + final CompressedXContent combinedMappings = mergeMappings(new CompressedXContent(mappingsMap), mappingAddition); + validateUpdatedMappings(null, combinedMappings, request, sourceToParse); } } } catch (Exception e) { @@ -329,6 +288,66 @@ private Exception validateMappings( return mappingValidationException; } + /* + * Validates that when updatedMappings are applied + */ + private void validateUpdatedMappings( + @Nullable CompressedXContent originalMappings, + @Nullable CompressedXContent updatedMappings, + IndexRequest request, + SourceToParse sourceToParse + ) throws IOException { + if (updatedMappings == null) { + return; // no validation to do + } + Settings dummySettings = Settings.builder() + .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + .put(IndexMetadata.SETTING_INDEX_UUID, UUIDs.randomBase64UUID()) + .build(); + IndexMetadata.Builder originalIndexMetadataBuilder = IndexMetadata.builder(request.index()).settings(dummySettings); + if (originalMappings != null) { + originalIndexMetadataBuilder.putMapping(new MappingMetadata(originalMappings)); + } + final IndexMetadata originalIndexMetadata = originalIndexMetadataBuilder.build(); + final IndexMetadata updatedIndexMetadata = IndexMetadata.builder(request.index()) + .settings(dummySettings) + .putMapping(new MappingMetadata(updatedMappings)) + .build(); + indicesService.withTempIndexService(originalIndexMetadata, indexService -> { + indexService.mapperService().merge(updatedIndexMetadata, MapperService.MergeReason.MAPPING_UPDATE); + return IndexShard.prepareIndex( + indexService.mapperService(), + sourceToParse, + SequenceNumbers.UNASSIGNED_SEQ_NO, + -1, + -1, + VersionType.INTERNAL, + Engine.Operation.Origin.PRIMARY, + Long.MIN_VALUE, + false, + request.ifSeqNo(), + request.ifPrimaryTerm(), + 0 + ); + }); + } + + private static CompressedXContent mergeMappings(@Nullable CompressedXContent originalMapping, Map mappingAddition) + throws IOException { + Map combinedMappingMap = new HashMap<>(); + if (originalMapping != null) { + combinedMappingMap.putAll(XContentHelper.convertToMap(originalMapping.uncompressed(), true, XContentType.JSON).v2()); + } + XContentHelper.update(combinedMappingMap, mappingAddition, true); + if (combinedMappingMap.isEmpty()) { + return null; + } else { + return convertMappingMapToXContent(combinedMappingMap); + } + } + /* * This overrides TransportSimulateBulkAction's getIngestService to allow us to provide an IngestService that handles pipeline * substitutions defined in the request. @@ -344,4 +363,25 @@ protected Boolean resolveFailureStore(String indexName, Metadata metadata, long // A simulate bulk request should not change any persistent state in the system, so we never write to the failure store return null; } + + private static CompressedXContent convertMappingMapToXContent(Map rawAdditionalMapping) throws IOException { + CompressedXContent compressedXContent; + if (rawAdditionalMapping == null || rawAdditionalMapping.isEmpty()) { + compressedXContent = null; + } else { + try (var parser = XContentHelper.mapToXContentParser(XContentParserConfiguration.EMPTY, rawAdditionalMapping)) { + compressedXContent = mappingFromXContent(parser); + } + } + return compressedXContent; + } + + private static CompressedXContent mappingFromXContent(XContentParser parser) throws IOException { + XContentParser.Token token = parser.nextToken(); + if (token == XContentParser.Token.START_OBJECT) { + return new CompressedXContent(Strings.toString(XContentFactory.jsonBuilder().map(parser.mapOrdered()))); + } else { + throw new IllegalArgumentException("Unexpected token: " + token); + } + } } diff --git a/server/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulateIngestAction.java b/server/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulateIngestAction.java index 680860332fe74..c825a8198e6e4 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulateIngestAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulateIngestAction.java @@ -74,10 +74,21 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC String defaultPipeline = request.param("pipeline"); Tuple sourceTuple = request.contentOrSourceParam(); Map sourceMap = XContentHelper.convertToMap(sourceTuple.v2(), false, sourceTuple.v1()).v2(); + Map> pipelineSubstitutions = (Map>) sourceMap.remove( + "pipeline_substitutions" + ); + Map> componentTemplateSubstitutions = (Map>) sourceMap.remove( + "component_template_substitutions" + ); + Map> indexTemplateSubstitutions = (Map>) sourceMap.remove( + "index_template_substitutions" + ); + Object mappingAddition = sourceMap.remove("mapping_addition"); SimulateBulkRequest bulkRequest = new SimulateBulkRequest( - (Map>) sourceMap.remove("pipeline_substitutions"), - (Map>) sourceMap.remove("component_template_substitutions"), - (Map>) sourceMap.remove("index_template_substitutions") + pipelineSubstitutions == null ? Map.of() : pipelineSubstitutions, + componentTemplateSubstitutions == null ? Map.of() : componentTemplateSubstitutions, + indexTemplateSubstitutions == null ? Map.of() : indexTemplateSubstitutions, + mappingAddition == null ? Map.of() : Map.of("_doc", mappingAddition) ); BytesReference transformedData = convertToBulkRequestXContentBytes(sourceMap); bulkRequest.add( diff --git a/server/src/test/java/org/elasticsearch/action/bulk/SimulateBulkRequestTests.java b/server/src/test/java/org/elasticsearch/action/bulk/SimulateBulkRequestTests.java index c94e4e46c9ee3..1e651791eb18a 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/SimulateBulkRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/SimulateBulkRequestTests.java @@ -22,32 +22,74 @@ import java.util.List; import java.util.Map; +import static java.util.Map.entry; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; public class SimulateBulkRequestTests extends ESTestCase { public void testSerialization() throws Exception { - testSerialization(getTestPipelineSubstitutions(), getTestComponentTemplateSubstitutions(), getTestIndexTemplateSubstitutions()); - testSerialization(getTestPipelineSubstitutions(), null, null); - testSerialization(getTestPipelineSubstitutions(), getTestComponentTemplateSubstitutions(), null); - testSerialization(getTestPipelineSubstitutions(), null, getTestIndexTemplateSubstitutions()); - testSerialization(null, getTestComponentTemplateSubstitutions(), getTestIndexTemplateSubstitutions()); - testSerialization(null, getTestComponentTemplateSubstitutions(), null); - testSerialization(null, null, getTestIndexTemplateSubstitutions()); - testSerialization(null, null, null); - testSerialization(Map.of(), Map.of(), Map.of()); + testSerialization( + getMapOrEmpty(getTestPipelineSubstitutions()), + getMapOrEmpty(getTestComponentTemplateSubstitutions()), + getMapOrEmpty(getTestIndexTemplateSubstitutions()), + getMapOrEmpty(getTestMappingAddition()) + ); + } + + private Map getMapOrEmpty(Map map) { + if (randomBoolean()) { + return map; + } else { + return Map.of(); + } + } + + public void testNullsNotAllowed() { + assertThrows( + NullPointerException.class, + () -> new SimulateBulkRequest( + null, + getTestPipelineSubstitutions(), + getTestComponentTemplateSubstitutions(), + getTestMappingAddition() + ) + ); + assertThrows( + NullPointerException.class, + () -> new SimulateBulkRequest( + getTestPipelineSubstitutions(), + null, + getTestComponentTemplateSubstitutions(), + getTestMappingAddition() + ) + ); + assertThrows( + NullPointerException.class, + () -> new SimulateBulkRequest(getTestPipelineSubstitutions(), getTestPipelineSubstitutions(), null, getTestMappingAddition()) + ); + assertThrows( + NullPointerException.class, + () -> new SimulateBulkRequest( + getTestPipelineSubstitutions(), + getTestPipelineSubstitutions(), + getTestComponentTemplateSubstitutions(), + null + ) + ); } private void testSerialization( Map> pipelineSubstitutions, Map> componentTemplateSubstitutions, - Map> indexTemplateSubstitutions + Map> indexTemplateSubstitutions, + Map mappingAddition ) throws IOException { SimulateBulkRequest simulateBulkRequest = new SimulateBulkRequest( pipelineSubstitutions, componentTemplateSubstitutions, - indexTemplateSubstitutions + indexTemplateSubstitutions, + mappingAddition ); /* * Note: SimulateBulkRequest does not implement equals or hashCode, so we can't test serialization in the usual way for a @@ -59,7 +101,7 @@ private void testSerialization( @SuppressWarnings({ "unchecked", "rawtypes" }) public void testGetComponentTemplateSubstitutions() throws IOException { - SimulateBulkRequest simulateBulkRequest = new SimulateBulkRequest(Map.of(), Map.of(), Map.of()); + SimulateBulkRequest simulateBulkRequest = new SimulateBulkRequest(Map.of(), Map.of(), Map.of(), Map.of()); assertThat(simulateBulkRequest.getComponentTemplateSubstitutions(), equalTo(Map.of())); String substituteComponentTemplatesString = """ { @@ -93,7 +135,7 @@ public void testGetComponentTemplateSubstitutions() throws IOException { XContentType.JSON ).v2(); Map> substituteComponentTemplates = (Map>) tempMap; - simulateBulkRequest = new SimulateBulkRequest(Map.of(), substituteComponentTemplates, Map.of()); + simulateBulkRequest = new SimulateBulkRequest(Map.of(), substituteComponentTemplates, Map.of(), Map.of()); Map componentTemplateSubstitutions = simulateBulkRequest.getComponentTemplateSubstitutions(); assertThat(componentTemplateSubstitutions.size(), equalTo(2)); assertThat( @@ -118,7 +160,7 @@ public void testGetComponentTemplateSubstitutions() throws IOException { } public void testGetIndexTemplateSubstitutions() throws IOException { - SimulateBulkRequest simulateBulkRequest = new SimulateBulkRequest(Map.of(), Map.of(), Map.of()); + SimulateBulkRequest simulateBulkRequest = new SimulateBulkRequest(Map.of(), Map.of(), Map.of(), Map.of()); assertThat(simulateBulkRequest.getIndexTemplateSubstitutions(), equalTo(Map.of())); String substituteIndexTemplatesString = """ { @@ -154,7 +196,7 @@ public void testGetIndexTemplateSubstitutions() throws IOException { randomBoolean(), XContentType.JSON ).v2(); - simulateBulkRequest = new SimulateBulkRequest(Map.of(), Map.of(), substituteIndexTemplates); + simulateBulkRequest = new SimulateBulkRequest(Map.of(), Map.of(), substituteIndexTemplates, Map.of()); Map indexTemplateSubstitutions = simulateBulkRequest.getIndexTemplateSubstitutions(); assertThat(indexTemplateSubstitutions.size(), equalTo(2)); assertThat( @@ -179,7 +221,8 @@ public void testShallowClone() throws IOException { SimulateBulkRequest simulateBulkRequest = new SimulateBulkRequest( getTestPipelineSubstitutions(), getTestComponentTemplateSubstitutions(), - getTestIndexTemplateSubstitutions() + getTestIndexTemplateSubstitutions(), + getTestMappingAddition() ); simulateBulkRequest.setRefreshPolicy(randomFrom(WriteRequest.RefreshPolicy.values())); simulateBulkRequest.waitForActiveShards(randomIntBetween(1, 10)); @@ -204,7 +247,6 @@ public void testShallowClone() throws IOException { assertThat(shallowCopy.routing(), equalTo(simulateBulkRequest.routing())); assertThat(shallowCopy.requireAlias(), equalTo(simulateBulkRequest.requireAlias())); assertThat(shallowCopy.requireDataStream(), equalTo(simulateBulkRequest.requireDataStream())); - } private static Map> getTestPipelineSubstitutions() { @@ -248,4 +290,22 @@ private static Map> getTestIndexTemplateSubstitution Map.of("template", Map.of("index_patterns", List.of("foo*", "bar*"), "mappings", Map.of(), "settings", Map.of())) ); } + + private static Map getTestMappingAddition() { + return Map.ofEntries( + entry( + "_doc", + Map.ofEntries( + entry("dynamic", "strict"), + entry( + "properties", + Map.ofEntries( + entry("foo", Map.ofEntries(entry("type", "keyword"))), + entry("bar", Map.ofEntries(entry("type", "boolean"))) + ) + ) + ) + ) + ); + } } diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionTests.java index 71bc31334920e..63d308e1579f3 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionTests.java @@ -135,7 +135,7 @@ public void tearDown() throws Exception { public void testIndexData() throws IOException { Task task = mock(Task.class); // unused - BulkRequest bulkRequest = new SimulateBulkRequest(null, null, null); + BulkRequest bulkRequest = new SimulateBulkRequest(Map.of(), Map.of(), Map.of(), Map.of()); int bulkItemCount = randomIntBetween(0, 200); for (int i = 0; i < bulkItemCount; i++) { Map source = Map.of(randomAlphaOfLength(10), randomAlphaOfLength(5)); @@ -218,7 +218,11 @@ public void testIndexDataWithValidation() throws IOException { * (7) An indexing request to a nonexistent index that matches no templates */ Task task = mock(Task.class); // unused - BulkRequest bulkRequest = new SimulateBulkRequest(null, null, null); + /* + * Here we only add a mapping_addition because if there is no mapping at all TransportSimulateBulkAction skips mapping validation + * altogether, and we need it to run for this test to pass. + */ + BulkRequest bulkRequest = new SimulateBulkRequest(Map.of(), Map.of(), Map.of(), Map.of("_doc", Map.of("dynamic", "strict"))); int bulkItemCount = randomIntBetween(0, 200); Map indicesMap = new HashMap<>(); Map v1Templates = new HashMap<>(); diff --git a/server/src/test/java/org/elasticsearch/ingest/SimulateIngestServiceTests.java b/server/src/test/java/org/elasticsearch/ingest/SimulateIngestServiceTests.java index 3b3f5bdc747b5..94b3607bd7608 100644 --- a/server/src/test/java/org/elasticsearch/ingest/SimulateIngestServiceTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/SimulateIngestServiceTests.java @@ -65,7 +65,7 @@ public void testGetPipeline() { ingestService.innerUpdatePipelines(ingestMetadata); { // First we make sure that if there are no substitutions that we get our original pipeline back: - SimulateBulkRequest simulateBulkRequest = new SimulateBulkRequest(null, null, null); + SimulateBulkRequest simulateBulkRequest = new SimulateBulkRequest(Map.of(), Map.of(), Map.of(), Map.of()); SimulateIngestService simulateIngestService = new SimulateIngestService(ingestService, simulateBulkRequest); Pipeline pipeline = simulateIngestService.getPipeline("pipeline1"); assertThat(pipeline.getProcessors(), contains(transformedMatch(Processor::getType, equalTo("processor1")))); @@ -83,7 +83,7 @@ public void testGetPipeline() { ); pipelineSubstitutions.put("pipeline2", newHashMap("processors", List.of(newHashMap("processor3", Collections.emptyMap())))); - SimulateBulkRequest simulateBulkRequest = new SimulateBulkRequest(pipelineSubstitutions, null, null); + SimulateBulkRequest simulateBulkRequest = new SimulateBulkRequest(pipelineSubstitutions, Map.of(), Map.of(), Map.of()); SimulateIngestService simulateIngestService = new SimulateIngestService(ingestService, simulateBulkRequest); Pipeline pipeline1 = simulateIngestService.getPipeline("pipeline1"); assertThat( @@ -103,7 +103,7 @@ public void testGetPipeline() { */ Map> pipelineSubstitutions = new HashMap<>(); pipelineSubstitutions.put("pipeline2", newHashMap("processors", List.of(newHashMap("processor3", Collections.emptyMap())))); - SimulateBulkRequest simulateBulkRequest = new SimulateBulkRequest(pipelineSubstitutions, null, null); + SimulateBulkRequest simulateBulkRequest = new SimulateBulkRequest(pipelineSubstitutions, Map.of(), Map.of(), Map.of()); SimulateIngestService simulateIngestService = new SimulateIngestService(ingestService, simulateBulkRequest); Pipeline pipeline1 = simulateIngestService.getPipeline("pipeline1"); assertThat(pipeline1.getProcessors(), contains(transformedMatch(Processor::getType, equalTo("processor1")))); From f6c0a245fd15519990316f1e17e3e30ef0d31662 Mon Sep 17 00:00:00 2001 From: Tim Vernum Date: Tue, 22 Oct 2024 11:20:22 +1100 Subject: [PATCH 268/449] [Test] Add client param indexExists (#115180) This allows us to use the admin client to easily check whether an index exists (that may not be visible to the standard client) --- .../java/org/elasticsearch/test/rest/ESRestTestCase.java | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index d17016f850300..22f93e6bda61f 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -1896,7 +1896,11 @@ protected Map getIndexMappingAsMap(String index) throws IOExcept } protected static boolean indexExists(String index) throws IOException { - Response response = client().performRequest(new Request("HEAD", "/" + index)); + return indexExists(client(), index); + } + + protected static boolean indexExists(RestClient client, String index) throws IOException { + Response response = client.performRequest(new Request("HEAD", "/" + index)); return RestStatus.OK.getStatus() == response.getStatusLine().getStatusCode(); } From aff4edd51bcb32c6478056087492c73890f8cf23 Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Tue, 22 Oct 2024 07:21:18 +0200 Subject: [PATCH 269/449] Don't use a BytesStreamOutput to copy keys in BytesRefBlockHash (#114819) Removes he size limit on BytesStreamOutput when copying keys. --- docs/changelog/114819.yaml | 6 +++++ .../blockhash/BytesRefBlockHash.java | 20 +++++------------ .../aggregation/blockhash/X-BlockHash.java.st | 22 +++++-------------- 3 files changed, 16 insertions(+), 32 deletions(-) create mode 100644 docs/changelog/114819.yaml diff --git a/docs/changelog/114819.yaml b/docs/changelog/114819.yaml new file mode 100644 index 0000000000000..f8d03f7024801 --- /dev/null +++ b/docs/changelog/114819.yaml @@ -0,0 +1,6 @@ +pr: 114819 +summary: Don't use a `BytesStreamOutput` to copy keys in `BytesRefBlockHash` +area: EQL +type: bug +issues: + - 114599 diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/BytesRefBlockHash.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/BytesRefBlockHash.java index 3c5bf2c18c915..b8ea7658a8247 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/BytesRefBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/BytesRefBlockHash.java @@ -8,12 +8,9 @@ package org.elasticsearch.compute.aggregation.blockhash; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BitArray; -import org.elasticsearch.common.util.BytesRefArray; import org.elasticsearch.common.util.BytesRefHash; import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; import org.elasticsearch.compute.aggregation.SeenGroupIds; @@ -30,8 +27,6 @@ import org.elasticsearch.compute.operator.mvdedupe.MultivalueDedupeInt; import org.elasticsearch.core.ReleasableIterator; -import java.io.IOException; - /** * Maps a {@link BytesRefBlock} column to group ids. * This class is generated. Do not edit it. @@ -197,26 +192,21 @@ public BytesRefBlock[] getKeys() { * without and still read from the block. */ // TODO replace with takeBytesRefsOwnership ?! + final BytesRef spare = new BytesRef(); if (seenNull) { try (var builder = blockFactory.newBytesRefBlockBuilder(Math.toIntExact(hash.size() + 1))) { builder.appendNull(); - BytesRef spare = new BytesRef(); for (long i = 0; i < hash.size(); i++) { builder.appendBytesRef(hash.get(i, spare)); } return new BytesRefBlock[] { builder.build() }; } } - - final int size = Math.toIntExact(hash.size()); - try (BytesStreamOutput out = new BytesStreamOutput()) { - hash.getBytesRefs().writeTo(out); - try (StreamInput in = out.bytes().streamInput()) { - return new BytesRefBlock[] { - blockFactory.newBytesRefArrayVector(new BytesRefArray(in, BigArrays.NON_RECYCLING_INSTANCE), size).asBlock() }; + try (var builder = blockFactory.newBytesRefBlockBuilder(Math.toIntExact(hash.size()))) { + for (long i = 0; i < hash.size(); i++) { + builder.appendBytesRef(hash.get(i, spare)); } - } catch (IOException e) { - throw new IllegalStateException(e); + return new BytesRefBlock[] { builder.build() }; } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/X-BlockHash.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/X-BlockHash.java.st index 7c21cff56d7bb..2a3d1143236ac 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/X-BlockHash.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/X-BlockHash.java.st @@ -9,15 +9,10 @@ package org.elasticsearch.compute.aggregation.blockhash; $if(BytesRef)$ import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.StreamInput; $endif$ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BitArray; -$if(BytesRef)$ -import org.elasticsearch.common.util.BytesRefArray; -$endif$ import org.elasticsearch.common.util.$Hash$; import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; import org.elasticsearch.compute.aggregation.SeenGroupIds; @@ -58,8 +53,6 @@ $endif$ import org.elasticsearch.core.ReleasableIterator; $if(BytesRef)$ -import java.io.IOException; - $else$ import java.util.BitSet; @@ -250,26 +243,21 @@ $if(BytesRef)$ * without and still read from the block. */ // TODO replace with takeBytesRefsOwnership ?! + final BytesRef spare = new BytesRef(); if (seenNull) { try (var builder = blockFactory.newBytesRefBlockBuilder(Math.toIntExact(hash.size() + 1))) { builder.appendNull(); - BytesRef spare = new BytesRef(); for (long i = 0; i < hash.size(); i++) { builder.appendBytesRef(hash.get(i, spare)); } return new BytesRefBlock[] { builder.build() }; } } - - final int size = Math.toIntExact(hash.size()); - try (BytesStreamOutput out = new BytesStreamOutput()) { - hash.getBytesRefs().writeTo(out); - try (StreamInput in = out.bytes().streamInput()) { - return new BytesRefBlock[] { - blockFactory.newBytesRefArrayVector(new BytesRefArray(in, BigArrays.NON_RECYCLING_INSTANCE), size).asBlock() }; + try (var builder = blockFactory.newBytesRefBlockBuilder(Math.toIntExact(hash.size()))) { + for (long i = 0; i < hash.size(); i++) { + builder.appendBytesRef(hash.get(i, spare)); } - } catch (IOException e) { - throw new IllegalStateException(e); + return new BytesRefBlock[] { builder.build() }; } $else$ if (seenNull) { From 013760cf4a408403044dc935122cf521d63dd92a Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Tue, 22 Oct 2024 07:37:28 +0200 Subject: [PATCH 270/449] Grow internal arrays when growing the capacity in AbstractHash implementations (#114907) This commit resizes those arrays when incrementing the capacity of the hashes to the maxSize. --- .../java/org/elasticsearch/common/util/BytesRefHash.java | 6 +++--- .../main/java/org/elasticsearch/common/util/LongHash.java | 4 ++-- .../java/org/elasticsearch/common/util/LongLongHash.java | 4 ++-- .../compute/aggregation/blockhash/BlockHashTests.java | 2 +- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/util/BytesRefHash.java b/server/src/main/java/org/elasticsearch/common/util/BytesRefHash.java index 208d29edad71d..288462ba3bbcb 100644 --- a/server/src/main/java/org/elasticsearch/common/util/BytesRefHash.java +++ b/server/src/main/java/org/elasticsearch/common/util/BytesRefHash.java @@ -48,7 +48,7 @@ public BytesRefHash(long capacity, float maxLoadFactor, BigArrays bigArrays) { boolean success = false; try { // `super` allocates a big array so we have to `close` if we fail here or we'll leak it. - this.hashes = bigArrays.newIntArray(capacity, false); + this.hashes = bigArrays.newIntArray(maxSize, false); this.bytesRefs = new BytesRefArray(capacity, bigArrays); success = true; } finally { @@ -98,7 +98,7 @@ public BytesRefHash(BytesRefArray bytesRefs, float maxLoadFactor, BigArrays bigA boolean success = false; try { // `super` allocates a big array so we have to `close` if we fail here or we'll leak it. - this.hashes = bigArrays.newIntArray(bytesRefs.size() + 1, false); + this.hashes = bigArrays.newIntArray(maxSize, false); this.bytesRefs = BytesRefArray.takeOwnershipOf(bytesRefs); success = true; } finally { @@ -182,7 +182,6 @@ private long set(BytesRef key, int code, long id) { private void append(long id, BytesRef key, int code) { assert size == id; bytesRefs.append(key); - hashes = bigArrays.grow(hashes, id + 1); hashes.set(id, code); } @@ -211,6 +210,7 @@ public long add(BytesRef key, int code) { if (size >= maxSize) { assert size == maxSize; grow(); + hashes = bigArrays.resize(hashes, maxSize); } assert size < maxSize; return set(key, rehash(code), size); diff --git a/server/src/main/java/org/elasticsearch/common/util/LongHash.java b/server/src/main/java/org/elasticsearch/common/util/LongHash.java index 0c681063c50b0..3eeb60e419a19 100644 --- a/server/src/main/java/org/elasticsearch/common/util/LongHash.java +++ b/server/src/main/java/org/elasticsearch/common/util/LongHash.java @@ -33,7 +33,7 @@ public LongHash(long capacity, float maxLoadFactor, BigArrays bigArrays) { super(capacity, maxLoadFactor, bigArrays); try { // `super` allocates a big array so we have to `close` if we fail here or we'll leak it. - keys = bigArrays.newLongArray(capacity, false); + keys = bigArrays.newLongArray(maxSize, false); } finally { if (keys == null) { close(); @@ -78,7 +78,6 @@ private long set(long key, long id) { } private void append(long id, long key) { - keys = bigArrays.grow(keys, id + 1); keys.set(id, key); } @@ -102,6 +101,7 @@ public long add(long key) { if (size >= maxSize) { assert size == maxSize; grow(); + keys = bigArrays.resize(keys, maxSize); } assert size < maxSize; return set(key, size); diff --git a/server/src/main/java/org/elasticsearch/common/util/LongLongHash.java b/server/src/main/java/org/elasticsearch/common/util/LongLongHash.java index f7708af59dde2..031794ed9c9c6 100644 --- a/server/src/main/java/org/elasticsearch/common/util/LongLongHash.java +++ b/server/src/main/java/org/elasticsearch/common/util/LongLongHash.java @@ -40,7 +40,7 @@ public LongLongHash(long capacity, float maxLoadFactor, BigArrays bigArrays) { super(capacity, maxLoadFactor, bigArrays); try { // `super` allocates a big array so we have to `close` if we fail here or we'll leak it. - keys = bigArrays.newLongArray(2 * capacity, false); + keys = bigArrays.newLongArray(2 * maxSize, false); } finally { if (keys == null) { close(); @@ -99,7 +99,6 @@ private long set(long key1, long key2, long id) { private void append(long id, long key1, long key2) { long keyOffset = 2 * id; - keys = bigArrays.grow(keys, keyOffset + 2); keys.set(keyOffset, key1); keys.set(keyOffset + 1, key2); } @@ -128,6 +127,7 @@ public long add(long key1, long key2) { if (size >= maxSize) { assert size == maxSize; grow(); + keys = bigArrays.resize(keys, maxSize * 2); } assert size < maxSize; return set(key1, key2, size); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTests.java index aeea18e52da0f..088e791348840 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTests.java @@ -1147,7 +1147,7 @@ public void testLongBytesRefHashWithMultiValuedFields() { } else { assertThat( ordsAndKeys.description, - equalTo("BytesRefLongBlockHash{keys=[BytesRefKey[channel=1], LongKey[channel=0]], entries=9, size=491b}") + equalTo("BytesRefLongBlockHash{keys=[BytesRefKey[channel=1], LongKey[channel=0]], entries=9, size=483b}") ); assertOrds( ordsAndKeys.ords, From 185bf683787008495cc417a806675654ecb9e996 Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Tue, 22 Oct 2024 08:58:21 +0200 Subject: [PATCH 271/449] Add prefilters only once in the compound and text similarity retrievers (#114983) This change ensures that the prefilters are propagated in the downstream retrievers only once. It also removes the ability to extends `explainQuery` in the compound retriever. This is not needed as the rank docs are now responsible for the explanation. --- .../retriever/CompoundRetrieverBuilder.java | 18 ++++++------------ .../TextSimilarityRankRetrieverBuilder.java | 19 +------------------ 2 files changed, 7 insertions(+), 30 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/retriever/CompoundRetrieverBuilder.java b/server/src/main/java/org/elasticsearch/search/retriever/CompoundRetrieverBuilder.java index e994c55e43452..85dabf6eb6465 100644 --- a/server/src/main/java/org/elasticsearch/search/retriever/CompoundRetrieverBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/retriever/CompoundRetrieverBuilder.java @@ -18,7 +18,6 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.TransportMultiSearchAction; -import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryRewriteContext; import org.elasticsearch.search.builder.PointInTimeBuilder; @@ -163,6 +162,11 @@ public final QueryBuilder topDocsQuery() { throw new IllegalStateException("Should not be called, missing a rewrite?"); } + @Override + public final QueryBuilder explainQuery() { + throw new IllegalStateException("Should not be called, missing a rewrite?"); + } + @Override public final void extractToSearchSourceBuilder(SearchSourceBuilder searchSourceBuilder, boolean compoundUsed) { throw new IllegalStateException("Should not be called, missing a rewrite?"); @@ -216,22 +220,12 @@ protected SearchSourceBuilder createSearchSourceBuilder(PointInTimeBuilder pit, .trackTotalHits(false) .storedFields(new StoredFieldsContext(false)) .size(rankWindowSize); + // apply the pre-filters downstream once if (preFilterQueryBuilders.isEmpty() == false) { retrieverBuilder.getPreFilterQueryBuilders().addAll(preFilterQueryBuilders); } retrieverBuilder.extractToSearchSourceBuilder(sourceBuilder, true); - // apply the pre-filters - if (preFilterQueryBuilders.size() > 0) { - QueryBuilder query = sourceBuilder.query(); - BoolQueryBuilder newQuery = new BoolQueryBuilder(); - if (query != null) { - newQuery.must(query); - } - preFilterQueryBuilders.forEach(newQuery::filter); - sourceBuilder.query(newQuery); - } - // Record the shard id in the sort result List> sortBuilders = sourceBuilder.sorts() != null ? new ArrayList<>(sourceBuilder.sorts()) : new ArrayList<>(); if (sortBuilders.isEmpty()) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilder.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilder.java index 8bccf6e7d1022..342199dc51db8 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilder.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilder.java @@ -10,7 +10,6 @@ import org.apache.lucene.search.ScoreDoc; import org.elasticsearch.common.ParsingException; import org.elasticsearch.features.NodeFeature; -import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.search.builder.PointInTimeBuilder; @@ -20,7 +19,6 @@ import org.elasticsearch.search.retriever.CompoundRetrieverBuilder; import org.elasticsearch.search.retriever.RetrieverBuilder; import org.elasticsearch.search.retriever.RetrieverParserContext; -import org.elasticsearch.search.retriever.rankdoc.RankDocsQueryBuilder; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; @@ -158,33 +156,18 @@ protected RankDoc[] combineInnerRetrieverResults(List rankResults) { return textSimilarityRankDocs; } - @Override - public QueryBuilder explainQuery() { - // the original matching set of the TextSimilarityRank retriever is specified by its nested retriever - return new RankDocsQueryBuilder(rankDocs, new QueryBuilder[] { innerRetrievers.getFirst().retriever().explainQuery() }, true); - } - @Override protected SearchSourceBuilder createSearchSourceBuilder(PointInTimeBuilder pit, RetrieverBuilder retrieverBuilder) { var sourceBuilder = new SearchSourceBuilder().pointInTimeBuilder(pit) .trackTotalHits(false) .storedFields(new StoredFieldsContext(false)) .size(rankWindowSize); + // apply the pre-filters downstream once if (preFilterQueryBuilders.isEmpty() == false) { retrieverBuilder.getPreFilterQueryBuilders().addAll(preFilterQueryBuilders); } retrieverBuilder.extractToSearchSourceBuilder(sourceBuilder, true); - // apply the pre-filters - if (preFilterQueryBuilders.size() > 0) { - QueryBuilder query = sourceBuilder.query(); - BoolQueryBuilder newQuery = new BoolQueryBuilder(); - if (query != null) { - newQuery.must(query); - } - preFilterQueryBuilders.forEach(newQuery::filter); - sourceBuilder.query(newQuery); - } sourceBuilder.rankBuilder( new TextSimilarityRankBuilder(this.field, this.inferenceId, this.inferenceText, this.rankWindowSize, this.minScore) ); From e6e147e93b178391ca001913abac140e99cace78 Mon Sep 17 00:00:00 2001 From: Mary Gouseti Date: Tue, 22 Oct 2024 10:36:06 +0300 Subject: [PATCH 272/449] Adjust failure store to work with TSDS (#114307) In this PR we add a test and we fix the issues we encountered when we enabled the failure store for TSDS and logsdb. **Logsdb** Logsdb worked out of the box, so we just added the test that indexes with a bulk request a couple of documents and tests how they are ingested. **TSDS** Here it was a bit trickier. We encountered the following issues: - TSDS requires a timestamp to determine the write index of the data stream meaning the failure happens earlier than we have anticipated so far. We added a special exception to detect this case and we treat it accordingly. - The template of a TSDS data stream sets certain settings that we do not want to have in the failure store index. We added an allowlist that gets applied before we add the necessary index settings. Furthermore, we added a test case to capture this. --- .../datastreams/TSDBIndexingIT.java | 3 +- .../datastreams/DataStreamFeatures.java | 6 ++ .../test/data_stream/150_tsdb.yml | 101 ++++++++++++++++++ .../190_failure_store_redirection.yml | 2 +- .../CreateIndexClusterStateUpdateRequest.java | 12 +++ .../action/bulk/BulkOperation.java | 7 ++ .../cluster/metadata/DataStream.java | 24 ++++- .../DataStreamFailureStoreDefinition.java | 47 ++++++-- .../MetadataCreateDataStreamService.java | 3 +- .../metadata/MetadataCreateIndexService.java | 7 +- ...DataStreamFailureStoreDefinitionTests.java | 73 +++++++++++++ .../rest-api-spec/test/20_failure_store.yml | 99 +++++++++++++++++ 12 files changed, 369 insertions(+), 15 deletions(-) create mode 100644 server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamFailureStoreDefinitionTests.java create mode 100644 x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/20_failure_store.yml diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java index a2557a4de6e6d..29ec326548f2b 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java @@ -20,6 +20,7 @@ import org.elasticsearch.action.admin.indices.template.put.PutComponentTemplateAction; import org.elasticsearch.action.admin.indices.template.put.TransportPutComposableIndexTemplateAction; import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.bulk.IndexDocFailureStoreStatus; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; @@ -170,7 +171,7 @@ public void testTimeRanges() throws Exception { var indexRequest = new IndexRequest("k8s").opType(DocWriteRequest.OpType.CREATE); time = randomBoolean() ? endTime : endTime.plusSeconds(randomIntBetween(1, 99)); indexRequest.source(DOC.replace("$time", formatInstant(time)), XContentType.JSON); - expectThrows(IllegalArgumentException.class, () -> client().index(indexRequest).actionGet()); + expectThrows(IndexDocFailureStoreStatus.ExceptionWithFailureStoreStatus.class, () -> client().index(indexRequest).actionGet()); } // Fetch UpdateTimeSeriesRangeService and increment time range of latest backing index: diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java index ab7e590b1631e..f60a3e5c47a7f 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java @@ -26,6 +26,7 @@ public class DataStreamFeatures implements FeatureSpecification { public static final NodeFeature DATA_STREAM_LIFECYCLE = new NodeFeature("data_stream.lifecycle"); + public static final NodeFeature DATA_STREAM_FAILURE_STORE_TSDB_FIX = new NodeFeature("data_stream.failure_store.tsdb_fix"); @Override public Map getHistoricalFeatures() { @@ -41,4 +42,9 @@ public Set getFeatures() { DataStreamGlobalRetention.GLOBAL_RETENTION // Added in 8.14 ); } + + @Override + public Set getTestFeatures() { + return Set.of(DATA_STREAM_FAILURE_STORE_TSDB_FIX); + } } diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/150_tsdb.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/150_tsdb.yml index 56f387c016261..de5cf3baa744e 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/150_tsdb.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/150_tsdb.yml @@ -182,6 +182,107 @@ index without timestamp: body: - '{"metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.1", "network": {"tx": 2001818691, "rx": 802133794}}}}' +--- +TSDB failures go to failure store: + - requires: + cluster_features: ["data_stream.failure_store.tsdb_fix"] + reason: "tests tsdb failure store fixes in 8.16.0 that catch timestamp errors that happen earlier in the process and redirect them to the failure store." + + - do: + allowed_warnings: + - "index template [my-template2] has index patterns [fs-k8s*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [my-template2] will take precedence during new index creation" + indices.put_index_template: + name: my-template2 + body: + index_patterns: [ "fs-k8s*" ] + data_stream: + failure_store: true + template: + settings: + index: + mode: time_series + number_of_replicas: 1 + number_of_shards: 2 + routing_path: [ metricset, time_series_dimension ] + time_series: + start_time: 2021-04-28T00:00:00Z + end_time: 2021-04-29T00:00:00Z + mappings: + properties: + "@timestamp": + type: date + metricset: + type: keyword + time_series_dimension: true + k8s: + properties: + pod: + properties: + uid: + type: keyword + time_series_dimension: true + name: + type: keyword + ip: + type: ip + network: + properties: + tx: + type: long + rx: + type: long + - do: + index: + index: fs-k8s + body: + - '{"metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.1", "network": {"tx": 2001818691, "rx": 802133794}}}}' + - match: { result : "created"} + - match: { failure_store : "used"} + + - do: + bulk: + refresh: true + body: + - '{ "create": { "_index": "fs-k8s"} }' + - '{"@timestamp":"2021-04-28T01:00:00ZZ", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.1", "network": {"tx": 2001818691, "rx": 802133794}}}}' + - '{ "create": { "_index": "k8s"} }' + - '{ "@timestamp": "2021-04-28T01:00:00ZZ", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.1", "network": {"tx": 2001818691, "rx": 802133794}}}}' + - '{ "create": { "_index": "fs-k8s"} }' + - '{ "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.1", "network": {"tx": 2001818691, "rx": 802133794}}}}' + - '{ "create": { "_index": "fs-k8s"} }' + - '{ "@timestamp":"2000-04-28T01:00:00ZZ", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.1", "network": {"tx": 2001818691, "rx": 802133794}}}}' + - '{ "create": { "_index": "k8s"} }' + - '{"metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.1", "network": {"tx": 2001818691, "rx": 802133794}}}}' + - '{ "create": { "_index": "k8s"} }' + - '{ "@timestamp":"2000-04-28T01:00:00ZZ", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.1", "network": {"tx": 2001818691, "rx": 802133794}}}}' + - is_true: errors + + # Successfully indexed to backing index + - match: { items.0.create._index: '/\.ds-fs-k8s-(\d{4}\.\d{2}\.\d{2}-)?000001/' } + - match: { items.0.create.status: 201 } + - is_false: items.0.create.failure_store + - match: { items.1.create._index: '/\.ds-k8s-(\d{4}\.\d{2}\.\d{2}-)?000001/' } + - match: { items.1.create.status: 201 } + - is_false: items.1.create.failure_store + + # Successfully indexed to failure store + - match: { items.2.create._index: '/\.fs-fs-k8s-(\d{4}\.\d{2}\.\d{2}-)?000002/' } + - match: { items.2.create.status: 201 } + - match: { items.2.create.failure_store: used } + - match: { items.3.create._index: '/\.fs-fs-k8s-(\d{4}\.\d{2}\.\d{2}-)?000002/' } + - match: { items.3.create.status: 201 } + - match: { items.3.create.failure_store: used } + + # Rejected, eligible to go to failure store, but failure store not enabled + - match: { items.4.create._index: 'k8s' } + - match: { items.4.create.status: 400 } + - match: { items.4.create.error.type: timestamp_error } + - match: { items.4.create.failure_store: not_enabled } + - match: { items.4.create._index: 'k8s' } + - match: { items.4.create.status: 400 } + - match: { items.4.create.error.type: timestamp_error } + - match: { items.4.create.failure_store: not_enabled } + --- index without timestamp with pipeline: - do: diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/190_failure_store_redirection.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/190_failure_store_redirection.yml index cb5578a282dc9..9b5a9dae8bc0a 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/190_failure_store_redirection.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/190_failure_store_redirection.yml @@ -879,7 +879,7 @@ teardown: # Successfully indexed to backing index - match: { items.0.create._index: '/\.ds-logs-foobar-(\d{4}\.\d{2}\.\d{2}-)?000001/' } - match: { items.0.create.status: 201 } - - is_false: items.1.create.failure_store + - is_false: items.0.create.failure_store # Rejected but not eligible to go to failure store - match: { items.1.create._index: '/\.ds-logs-foobar-(\d{4}\.\d{2}\.\d{2}-)?000001/' } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexClusterStateUpdateRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexClusterStateUpdateRequest.java index 080ebb5951a7a..553f784d23a87 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexClusterStateUpdateRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexClusterStateUpdateRequest.java @@ -35,6 +35,7 @@ public class CreateIndexClusterStateUpdateRequest { private ResizeType resizeType; private boolean copySettings; private SystemDataStreamDescriptor systemDataStreamDescriptor; + private boolean isFailureIndex = false; private Settings settings = Settings.EMPTY; @@ -102,6 +103,11 @@ public CreateIndexClusterStateUpdateRequest systemDataStreamDescriptor(SystemDat return this; } + public CreateIndexClusterStateUpdateRequest isFailureIndex(boolean isFailureIndex) { + this.isFailureIndex = isFailureIndex; + return this; + } + public String cause() { return cause; } @@ -168,6 +174,10 @@ public String dataStreamName() { return dataStreamName; } + public boolean isFailureIndex() { + return isFailureIndex; + } + public CreateIndexClusterStateUpdateRequest dataStreamName(String dataStreamName) { this.dataStreamName = dataStreamName; return this; @@ -228,6 +238,8 @@ public String toString() { + systemDataStreamDescriptor + ", matchingTemplate=" + matchingTemplate + + ", isFailureIndex=" + + isFailureIndex + '}'; } } diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java index 007f274d7f493..130d6286f7e02 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java @@ -320,6 +320,12 @@ private Map> groupRequestsByShards( shard -> new ArrayList<>() ); shardRequests.add(bulkItemRequest); + } catch (DataStream.TimestampError timestampError) { + IndexDocFailureStoreStatus failureStoreStatus = processFailure(bulkItemRequest, clusterState, timestampError); + if (IndexDocFailureStoreStatus.USED.equals(failureStoreStatus) == false) { + String name = ia != null ? ia.getName() : docWriteRequest.index(); + addFailureAndDiscardRequest(docWriteRequest, bulkItemRequest.id(), name, timestampError, failureStoreStatus); + } } catch (ElasticsearchParseException | IllegalArgumentException | RoutingMissingException | ResourceNotFoundException e) { String name = ia != null ? ia.getName() : docWriteRequest.index(); var failureStoreStatus = isFailureStoreRequest(docWriteRequest) @@ -545,6 +551,7 @@ private IndexDocFailureStoreStatus processFailure(BulkItemRequest bulkItemReques boolean added = addDocumentToRedirectRequests(bulkItemRequest, cause, failureStoreCandidate.getName()); if (added) { failureStoreMetrics.incrementFailureStore(bulkItemRequest.index(), errorType, FailureStoreMetrics.ErrorLocation.SHARD); + return IndexDocFailureStoreStatus.USED; } else { failureStoreMetrics.incrementRejected( bulkItemRequest.index(), diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java index bedf65e1a9c8b..4dcc7c73c280e 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java @@ -1343,7 +1343,7 @@ public Index getWriteIndex(IndexRequest request, Metadata metadata) { + "]" ) .collect(Collectors.joining()); - throw new IllegalArgumentException( + throw new TimestampError( "the document timestamp [" + timestampAsString + "] is outside of ranges of currently writable indices [" @@ -1405,10 +1405,10 @@ private static Instant getTimeStampFromRaw(Object rawTimestamp) { } else if (rawTimestamp instanceof String sTimestamp) { return DateFormatters.from(TIMESTAMP_FORMATTER.parse(sTimestamp), TIMESTAMP_FORMATTER.locale()).toInstant(); } else { - throw new IllegalArgumentException("timestamp [" + rawTimestamp + "] type [" + rawTimestamp.getClass() + "] error"); + throw new TimestampError("timestamp [" + rawTimestamp + "] type [" + rawTimestamp.getClass() + "] error"); } } catch (Exception e) { - throw new IllegalArgumentException("Error get data stream timestamp field: " + e.getMessage(), e); + throw new TimestampError("Error get data stream timestamp field: " + e.getMessage(), e); } } @@ -1432,7 +1432,7 @@ private static Instant getTimestampFromParser(BytesReference source, XContentTyp ); }; } catch (Exception e) { - throw new IllegalArgumentException("Error extracting data stream timestamp field: " + e.getMessage(), e); + throw new TimestampError("Error extracting data stream timestamp field: " + e.getMessage(), e); } } @@ -1741,4 +1741,20 @@ public DataStream build() { ); } } + + /** + * This is a specialised error to capture that a document does not have a valid timestamp + * to index a document. It is mainly applicable for TSDS data streams because they need the timestamp + * to determine the write index. + */ + public static class TimestampError extends IllegalArgumentException { + + public TimestampError(String message, Exception cause) { + super(message, cause); + } + + public TimestampError(String message) { + super(message); + } + } } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamFailureStoreDefinition.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamFailureStoreDefinition.java index fd3fc1a732acb..7315e9f7a51d3 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamFailureStoreDefinition.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamFailureStoreDefinition.java @@ -9,6 +9,7 @@ package org.elasticsearch.cluster.metadata; +import org.elasticsearch.cluster.routing.allocation.DataTier; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; @@ -19,6 +20,8 @@ import org.elasticsearch.index.mapper.RoutingFieldMapper; import java.io.IOException; +import java.util.HashSet; +import java.util.Set; /** * A utility class that contains the mappings and settings logic for failure store indices that are a part of data streams. @@ -26,12 +29,30 @@ public class DataStreamFailureStoreDefinition { public static final String FAILURE_STORE_REFRESH_INTERVAL_SETTING_NAME = "data_streams.failure_store.refresh_interval"; + public static final String INDEX_FAILURE_STORE_VERSION_SETTING_NAME = "index.failure_store.version"; public static final Settings DATA_STREAM_FAILURE_STORE_SETTINGS; + // Only a subset of user configurable settings is applicable for a failure index. Here we have an + // allowlist that will filter all other settings out. + public static final Set SUPPORTED_USER_SETTINGS = Set.of( + DataTier.TIER_PREFERENCE, + IndexMetadata.SETTING_INDEX_HIDDEN, + INDEX_FAILURE_STORE_VERSION_SETTING_NAME, + IndexMetadata.SETTING_NUMBER_OF_SHARDS, + IndexMetadata.SETTING_NUMBER_OF_REPLICAS, + IndexMetadata.SETTING_AUTO_EXPAND_REPLICAS, + IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), + IndexMetadata.LIFECYCLE_NAME + ); + public static final Set SUPPORTED_USER_SETTINGS_PREFIXES = Set.of( + IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_PREFIX + ".", + IndexMetadata.INDEX_ROUTING_INCLUDE_GROUP_PREFIX + ".", + IndexMetadata.INDEX_ROUTING_EXCLUDE_GROUP_PREFIX + "." + ); public static final CompressedXContent DATA_STREAM_FAILURE_STORE_MAPPING; public static final int FAILURE_STORE_DEFINITION_VERSION = 1; public static final Setting FAILURE_STORE_DEFINITION_VERSION_SETTING = Setting.intSetting( - "index.failure_store.version", + INDEX_FAILURE_STORE_VERSION_SETTING_NAME, 0, Setting.Property.IndexScope ); @@ -40,11 +61,6 @@ public class DataStreamFailureStoreDefinition { DATA_STREAM_FAILURE_STORE_SETTINGS = Settings.builder() // Always start with the hidden settings for a backing index. .put(IndexMetadata.SETTING_INDEX_HIDDEN, true) - // Override any pipeline settings on the failure store to not use any - // specified by the data stream template. Default pipelines are very much - // meant for the backing indices only. - .putNull(IndexSettings.DEFAULT_PIPELINE.getKey()) - .putNull(IndexSettings.FINAL_PIPELINE.getKey()) .put(FAILURE_STORE_DEFINITION_VERSION_SETTING.getKey(), FAILURE_STORE_DEFINITION_VERSION) .build(); @@ -199,4 +215,23 @@ public static Settings.Builder applyFailureStoreSettings(Settings nodeSettings, } return builder; } + + /** + * Removes the unsupported by the failure store settings from the settings provided. + * ATTENTION: This method should be applied BEFORE we set the necessary settings for an index + * @param builder the settings builder that is going to be updated + * @return the original settings builder, with the unsupported settings removed. + */ + public static Settings.Builder filterUserDefinedSettings(Settings.Builder builder) { + if (builder.keys().isEmpty() == false) { + Set existingKeys = new HashSet<>(builder.keys()); + for (String setting : existingKeys) { + if (SUPPORTED_USER_SETTINGS.contains(setting) == false + && SUPPORTED_USER_SETTINGS_PREFIXES.stream().anyMatch(setting::startsWith) == false) { + builder.remove(setting); + } + } + } + return builder; + } } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java index 2df9cf706d892..5dbf4da6f376f 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java @@ -425,7 +425,8 @@ public static ClusterState createFailureStoreIndex( .nameResolvedInstant(nameResolvedInstant) .performReroute(false) .setMatchingTemplate(template) - .settings(indexSettings); + .settings(indexSettings) + .isFailureIndex(true); try { currentState = metadataCreateIndexService.applyCreateIndexRequest( diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java index 321719475c1f8..3accdd3881c6d 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java @@ -983,6 +983,7 @@ static Settings aggregateIndexSettings( final Settings templateAndRequestSettings = Settings.builder().put(combinedTemplateSettings).put(request.settings()).build(); final IndexMode templateIndexMode = Optional.of(request) + .filter(r -> r.isFailureIndex() == false) .map(CreateIndexClusterStateUpdateRequest::matchingTemplate) .map(metadata::retrieveIndexModeFromTemplate) .orElse(null); @@ -1038,11 +1039,13 @@ static Settings aggregateIndexSettings( // Finally, we actually add the explicit defaults prior to the template settings and the // request settings, so that the precedence goes: - // Explicit Defaults -> Template -> Request -> Necessary Settings (# of shards, uuid, etc) + // Explicit Defaults -> Template -> Request -> Filter out failure store settings -> Necessary Settings (# of shards, uuid, etc) indexSettingsBuilder.put(additionalIndexSettings.build()); indexSettingsBuilder.put(templateSettings.build()); } - + if (request.isFailureIndex()) { + DataStreamFailureStoreDefinition.filterUserDefinedSettings(indexSettingsBuilder); + } // now, put the request settings, so they override templates indexSettingsBuilder.put(requestSettings.build()); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamFailureStoreDefinitionTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamFailureStoreDefinitionTests.java new file mode 100644 index 0000000000000..38d4031755a55 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamFailureStoreDefinitionTests.java @@ -0,0 +1,73 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.cluster.metadata; + +import org.elasticsearch.cluster.routing.allocation.DataTier; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.IndexMode; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.test.ESTestCase; + +import static org.elasticsearch.cluster.metadata.DataStreamFailureStoreDefinition.INDEX_FAILURE_STORE_VERSION_SETTING_NAME; +import static org.hamcrest.Matchers.equalTo; + +public class DataStreamFailureStoreDefinitionTests extends ESTestCase { + + public void testSettingsFiltering() { + // Empty + Settings.Builder builder = Settings.builder(); + Settings.Builder expectedBuilder = Settings.builder(); + assertThat(DataStreamFailureStoreDefinition.filterUserDefinedSettings(builder).keys(), equalTo(expectedBuilder.keys())); + + // All supported settings + builder.put(INDEX_FAILURE_STORE_VERSION_SETTING_NAME, 3) + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 2) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + .put(DataTier.TIER_PREFERENCE, "data_cold") + .put(IndexMetadata.SETTING_INDEX_HIDDEN, true) + .put(IndexMetadata.SETTING_AUTO_EXPAND_REPLICAS, "0-10") + .put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), "1s") + .put(IndexMetadata.LIFECYCLE_NAME, "my-policy") + .put(IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_PREFIX + "." + randomAlphaOfLength(4), randomAlphaOfLength(4)) + .put(IndexMetadata.INDEX_ROUTING_INCLUDE_GROUP_PREFIX + "." + randomAlphaOfLength(4), randomAlphaOfLength(4)) + .put(IndexMetadata.INDEX_ROUTING_EXCLUDE_GROUP_PREFIX + "." + randomAlphaOfLength(4), randomAlphaOfLength(4)); + // We expect no changes + expectedBuilder = Settings.builder().put(builder.build()); + assertThat(DataStreamFailureStoreDefinition.filterUserDefinedSettings(builder).keys(), equalTo(expectedBuilder.keys())); + + // Remove unsupported settings + String randomSetting = randomAlphaOfLength(10); + builder.put(INDEX_FAILURE_STORE_VERSION_SETTING_NAME, 3) + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 2) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + .put(DataTier.TIER_PREFERENCE, "data_cold") + .put(IndexMetadata.SETTING_INDEX_HIDDEN, true) + .put(IndexMetadata.SETTING_AUTO_EXPAND_REPLICAS, "0-10") + .put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), "1s") + .put(IndexMetadata.LIFECYCLE_NAME, "my-policy") + .put(IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_PREFIX + "." + randomAlphaOfLength(4), randomAlphaOfLength(4)) + .put(IndexMetadata.INDEX_ROUTING_INCLUDE_GROUP_PREFIX + "." + randomAlphaOfLength(4), randomAlphaOfLength(4)) + .put(IndexMetadata.INDEX_ROUTING_EXCLUDE_GROUP_PREFIX + "." + randomAlphaOfLength(4), randomAlphaOfLength(4)) + .put(IndexSettings.MODE.getKey(), randomFrom(IndexMode.values())) + .put(randomSetting, randomAlphaOfLength(10)); + // We expect no changes + expectedBuilder = Settings.builder().put(builder.build()); + assertThat( + DataStreamFailureStoreDefinition.filterUserDefinedSettings(builder).keys().size(), + equalTo(expectedBuilder.keys().size() - 2) + ); + assertThat( + DataStreamFailureStoreDefinition.filterUserDefinedSettings(builder).keys().contains(IndexSettings.MODE.getKey()), + equalTo(false) + ); + assertThat(DataStreamFailureStoreDefinition.filterUserDefinedSettings(builder).keys().contains(randomSetting), equalTo(false)); + } + +} diff --git a/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/20_failure_store.yml b/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/20_failure_store.yml new file mode 100644 index 0000000000000..21e4f49fe7af5 --- /dev/null +++ b/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/20_failure_store.yml @@ -0,0 +1,99 @@ +--- +teardown: + - do: + indices.delete_data_stream: + name: my-logs-fs + ignore: 404 + + - do: + indices.delete_index_template: + name: template + ignore: 404 + + - do: + indices.delete_data_stream: + name: my-logs-db + ignore: 404 + - do: + indices.delete_index_template: + name: template1 + ignore: 404 + +--- +Test failure store with logsdb: + - requires: + test_runner_features: [ capabilities, allowed_warnings ] + capabilities: + - method: PUT + path: /{index} + capabilities: [ logsdb_index_mode ] + - method: POST + path: /_bulk + capabilities: [ 'failure_store_status' ] + - method: PUT + path: /_bulk + capabilities: [ 'failure_store_status' ] + reason: "Support for 'logsdb' index mode & failure status capability required" + + - do: + allowed_warnings: + - "index template [my-template] has index patterns [my-logs-fs*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [my-template] will take precedence during new index creation" + indices.put_index_template: + name: my-template + body: + index_patterns: ["my-logs-fs*"] + data_stream: + failure_store: true + template: + settings: + index: + mode: logsdb + number_of_replicas: 1 + number_of_shards: 2 + - do: + allowed_warnings: + - "index template [my-template2] has index patterns [my-logs-db*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [my-template2] will take precedence during new index creation" + indices.put_index_template: + name: my-template2 + body: + index_patterns: [ "my-logs-db*" ] + data_stream: {} + template: + settings: + index: + mode: logsdb + number_of_replicas: 1 + number_of_shards: 2 + + - do: + bulk: + refresh: true + body: + - '{ "create": { "_index": "my-logs-fs"} }' + - '{"@timestamp":"2019-08-06T12:09:12.375Z", "log.level": "INFO", "message":"Tomcat started on port(s): 8080 (http) with context path ''", "service.name":"spring-petclinic","process.thread.name":"restartedMain","log.logger":"org.springframework.boot.web.embedded.tomcat.TomcatWebServer"}' + - '{ "create": { "_index": "my-logs-db"} }' + - '{ "@timestamp": "2022-01-01", "log.level": "INFO", "message":"Tomcat started on port(s): 8080 (http) with context path ''", "service.name":"spring-petclinic","process.thread.name":"restartedMain","log.logger":"org.springframework.boot.web.embedded.tomcat.TomcatWebServer" }' + - '{ "create": { "_index": "my-logs-fs"} }' + - '{"log.level": "INFO", "message":"Tomcat started on port(s): 8080 (http) with context path ''", "service.name":"spring-petclinic","process.thread.name":"restartedMain","log.logger":"org.springframework.boot.web.embedded.tomcat.TomcatWebServer"}' + - '{ "create": { "_index": "my-logs-db"} }' + - '{"log.level": "INFO", "message":"Tomcat started on port(s): 8080 (http) with context path ''", "service.name":"spring-petclinic","process.thread.name":"restartedMain","log.logger":"org.springframework.boot.web.embedded.tomcat.TomcatWebServer"}' + - is_true: errors + + # Successfully indexed to backing index + - match: { items.0.create._index: '/\.ds-my-logs-fs-(\d{4}\.\d{2}\.\d{2}-)?000001/' } + - match: { items.0.create.status: 201 } + - is_false: items.0.create.failure_store + - match: { items.1.create._index: '/\.ds-my-logs-db-(\d{4}\.\d{2}\.\d{2}-)?000001/' } + - match: { items.1.create.status: 201 } + - is_false: items.1.create.failure_store + + # Successfully indexed to failure store + - match: { items.2.create._index: '/\.fs-my-logs-fs-(\d{4}\.\d{2}\.\d{2}-)?000002/' } + - match: { items.2.create.status: 201 } + - match: { items.2.create.failure_store: used } + + # Rejected, eligible to go to failure store, but failure store not enabled + - match: { items.3.create._index: '/\.ds-my-logs-db-(\d{4}\.\d{2}\.\d{2}-)?000001/' } + - match: { items.3.create.status: 400 } + - match: { items.3.create.error.type: document_parsing_exception } + - match: { items.3.create.failure_store: not_enabled } From 477f0cd68b5b9e02dcca0ff04d7f8d9b75c23bc2 Mon Sep 17 00:00:00 2001 From: Pooya Salehi Date: Tue, 22 Oct 2024 09:41:55 +0200 Subject: [PATCH 273/449] Use pattern in wipeAllIndices and unmute testUpgradeMovesRepoToNewMetaVersion (#115232) The inference index added to the delete index call doesn't exist in all 8.x versions. AFAICT, since this is not a pattern, the wipeAllIndices call fails since it is not able to find that index. Using a wildcard instead seems to resolve the issue. Closes #114994 --- muted-tests.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 971fc161c4632..d4accd399cace 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -308,9 +308,6 @@ tests: - class: org.elasticsearch.xpack.inference.DefaultEndPointsIT method: testInferDeploysDefaultElser issue: https://github.com/elastic/elasticsearch/issues/114913 -- class: org.elasticsearch.upgrades.MultiVersionRepositoryAccessIT - method: testUpgradeMovesRepoToNewMetaVersion - issue: https://github.com/elastic/elasticsearch/issues/114994 - class: org.elasticsearch.upgrades.MultiVersionRepositoryAccessIT method: testReadOnlyRepo issue: https://github.com/elastic/elasticsearch/issues/114997 From 05fc23a44005bea35035737687bc9758f27d768e Mon Sep 17 00:00:00 2001 From: Pooya Salehi Date: Tue, 22 Oct 2024 10:50:39 +0200 Subject: [PATCH 274/449] Unmute MultiVersionRepositoryAccessIT.testReadOnlyRepo (#115215) I think the test failure opened is not correct, and relates to another failure. Relates https://github.com/elastic/elasticsearch/issues/114999 and https://github.com/elastic/elasticsearch/issues/114997. --- muted-tests.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index d4accd399cace..1cb8baa96a942 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -308,9 +308,6 @@ tests: - class: org.elasticsearch.xpack.inference.DefaultEndPointsIT method: testInferDeploysDefaultElser issue: https://github.com/elastic/elasticsearch/issues/114913 -- class: org.elasticsearch.upgrades.MultiVersionRepositoryAccessIT - method: testReadOnlyRepo - issue: https://github.com/elastic/elasticsearch/issues/114997 - class: org.elasticsearch.upgrades.MultiVersionRepositoryAccessIT method: testCreateAndRestoreSnapshot issue: https://github.com/elastic/elasticsearch/issues/114998 From f32051f4629f6f1a4192a615ec0cb5e294089fb2 Mon Sep 17 00:00:00 2001 From: Salvatore Campagna <93581129+salvatore-campagna@users.noreply.github.com> Date: Tue, 22 Oct 2024 11:09:19 +0200 Subject: [PATCH 275/449] fix: use setting instead of (#115193) --- docs/reference/mapping/types/binary.asciidoc | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/docs/reference/mapping/types/binary.asciidoc b/docs/reference/mapping/types/binary.asciidoc index 5733a28eb711a..81ba44c954e0a 100644 --- a/docs/reference/mapping/types/binary.asciidoc +++ b/docs/reference/mapping/types/binary.asciidoc @@ -68,8 +68,16 @@ Synthetic source may sort `binary` values in order of their byte representation. ---- PUT idx { + "settings": { + "index": { + "mapping": { + "source": { + "mode": "synthetic" + } + } + } + }, "mappings": { - "_source": { "mode": "synthetic" }, "properties": { "binary": { "type": "binary", "doc_values": true } } From c7f53ff3b639555736e2b6d0864e537cacbc2d59 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Tue, 22 Oct 2024 12:30:22 +0100 Subject: [PATCH 276/449] [ML] Dynamically get of num allocations for ml node models (#115233) The GET inference API which should dynamically update the num_allocations field with the actual number from the deployed model which is useful when adaptive allocations are used --- .../inference/InferenceService.java | 4 + .../inference/CreateFromDeploymentIT.java | 63 ++++++++++++++++ .../xpack/inference/InferenceCrudIT.java | 6 ++ .../TransportGetInferenceModelAction.java | 73 +++++++++++++++---- .../ElasticsearchInternalModel.java | 6 +- .../ElasticsearchInternalService.java | 49 ++++++++++++- .../ElasticsearchInternalServiceSettings.java | 10 ++- .../ElserInternalModelTests.java | 30 ++++++++ 8 files changed, 219 insertions(+), 22 deletions(-) create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalModelTests.java diff --git a/server/src/main/java/org/elasticsearch/inference/InferenceService.java b/server/src/main/java/org/elasticsearch/inference/InferenceService.java index d437533a8603d..2c99563955746 100644 --- a/server/src/main/java/org/elasticsearch/inference/InferenceService.java +++ b/server/src/main/java/org/elasticsearch/inference/InferenceService.java @@ -210,4 +210,8 @@ default List defaultConfigIds() { default void defaultConfigs(ActionListener> defaultsListener) { defaultsListener.onResponse(List.of()); } + + default void updateModelsWithDynamicFields(List model, ActionListener> listener) { + listener.onResponse(model); + } } diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/CreateFromDeploymentIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/CreateFromDeploymentIT.java index f81ebc25dc860..0bfb6e9e43b03 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/CreateFromDeploymentIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/CreateFromDeploymentIT.java @@ -109,6 +109,55 @@ public void testModelIdDoesNotMatch() throws IOException { ); } + public void testNumAllocationsIsUpdated() throws IOException { + var modelId = "update_num_allocations"; + var deploymentId = modelId; + + CustomElandModelIT.createMlNodeTextExpansionModel(modelId, client()); + var response = startMlNodeDeploymemnt(modelId, deploymentId); + assertOkOrCreated(response); + + var inferenceId = "test_num_allocations_updated"; + var putModel = putModel(inferenceId, endpointConfig(deploymentId), TaskType.SPARSE_EMBEDDING); + var serviceSettings = putModel.get("service_settings"); + assertThat( + putModel.toString(), + serviceSettings, + is( + Map.of( + "num_allocations", + 1, + "num_threads", + 1, + "model_id", + "update_num_allocations", + "deployment_id", + "update_num_allocations" + ) + ) + ); + + assertOkOrCreated(updateMlNodeDeploymemnt(deploymentId, 2)); + + var updatedServiceSettings = getModel(inferenceId).get("service_settings"); + assertThat( + updatedServiceSettings.toString(), + updatedServiceSettings, + is( + Map.of( + "num_allocations", + 2, + "num_threads", + 1, + "model_id", + "update_num_allocations", + "deployment_id", + "update_num_allocations" + ) + ) + ); + } + private String endpointConfig(String deploymentId) { return Strings.format(""" { @@ -147,6 +196,20 @@ private Response startMlNodeDeploymemnt(String modelId, String deploymentId) thr return client().performRequest(request); } + private Response updateMlNodeDeploymemnt(String deploymentId, int numAllocations) throws IOException { + String endPoint = "/_ml/trained_models/" + deploymentId + "/deployment/_update"; + + var body = Strings.format(""" + { + "number_of_allocations": %d + } + """, numAllocations); + + Request request = new Request("POST", endPoint); + request.setJsonEntity(body); + return client().performRequest(request); + } + protected void stopMlNodeDeployment(String deploymentId) throws IOException { String endpoint = "/_ml/trained_models/" + deploymentId + "/deployment/_stop"; Request request = new Request("POST", endpoint); diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java index cbc50c361e3b5..37de2caadb475 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java @@ -24,6 +24,7 @@ import java.util.stream.Stream; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalToIgnoringCase; import static org.hamcrest.Matchers.hasSize; @@ -326,4 +327,9 @@ public void testSupportedStream() throws Exception { deleteModel(modelId); } } + + public void testGetZeroModels() throws IOException { + var models = getModels("_all", TaskType.RERANK); + assertThat(models, empty()); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java index edcec45b50a16..01e663df4a3ea 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java @@ -9,13 +9,13 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.GroupedActionListener; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.Strings; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.inference.InferenceServiceRegistry; -import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.Model; import org.elasticsearch.inference.TaskType; import org.elasticsearch.inference.UnparsedModel; import org.elasticsearch.injection.guice.Inject; @@ -29,8 +29,11 @@ import org.elasticsearch.xpack.inference.registry.ModelRegistry; import java.util.ArrayList; +import java.util.Comparator; +import java.util.HashMap; import java.util.List; import java.util.concurrent.Executor; +import java.util.stream.Collectors; public class TransportGetInferenceModelAction extends HandledTransportAction< GetInferenceModelAction.Request, @@ -96,39 +99,77 @@ private void getSingleModel( var model = service.get() .parsePersistedConfig(unparsedModel.inferenceEntityId(), unparsedModel.taskType(), unparsedModel.settings()); - delegate.onResponse(new GetInferenceModelAction.Response(List.of(model.getConfigurations()))); + + service.get() + .updateModelsWithDynamicFields( + List.of(model), + delegate.delegateFailureAndWrap( + (l2, updatedModels) -> l2.onResponse( + new GetInferenceModelAction.Response( + updatedModels.stream().map(Model::getConfigurations).collect(Collectors.toList()) + ) + ) + ) + ); })); } private void getAllModels(boolean persistDefaultEndpoints, ActionListener listener) { modelRegistry.getAllModels( persistDefaultEndpoints, - listener.delegateFailureAndWrap((l, models) -> executor.execute(ActionRunnable.supply(l, () -> parseModels(models)))) + listener.delegateFailureAndWrap((l, models) -> executor.execute(() -> parseModels(models, listener))) ); } private void getModelsByTaskType(TaskType taskType, ActionListener listener) { modelRegistry.getModelsByTaskType( taskType, - listener.delegateFailureAndWrap((l, models) -> executor.execute(ActionRunnable.supply(l, () -> parseModels(models)))) + listener.delegateFailureAndWrap((l, models) -> executor.execute(() -> parseModels(models, listener))) ); } - private GetInferenceModelAction.Response parseModels(List unparsedModels) { - var parsedModels = new ArrayList(); + private void parseModels(List unparsedModels, ActionListener listener) { + if (unparsedModels.isEmpty()) { + listener.onResponse(new GetInferenceModelAction.Response(List.of())); + return; + } - for (var unparsedModel : unparsedModels) { - var service = serviceRegistry.getService(unparsedModel.service()); - if (service.isEmpty()) { - throw serviceNotFoundException(unparsedModel.service(), unparsedModel.inferenceEntityId()); + var parsedModelsByService = new HashMap>(); + try { + for (var unparsedModel : unparsedModels) { + var service = serviceRegistry.getService(unparsedModel.service()); + if (service.isEmpty()) { + throw serviceNotFoundException(unparsedModel.service(), unparsedModel.inferenceEntityId()); + } + var list = parsedModelsByService.computeIfAbsent(service.get().name(), s -> new ArrayList<>()); + list.add( + service.get() + .parsePersistedConfig(unparsedModel.inferenceEntityId(), unparsedModel.taskType(), unparsedModel.settings()) + ); } - parsedModels.add( - service.get() - .parsePersistedConfig(unparsedModel.inferenceEntityId(), unparsedModel.taskType(), unparsedModel.settings()) - .getConfigurations() + + var groupedListener = new GroupedActionListener>( + parsedModelsByService.entrySet().size(), + listener.delegateFailureAndWrap((delegate, listOfListOfModels) -> { + var modifiable = new ArrayList(); + for (var l : listOfListOfModels) { + modifiable.addAll(l); + } + modifiable.sort(Comparator.comparing(Model::getInferenceEntityId)); + delegate.onResponse( + new GetInferenceModelAction.Response(modifiable.stream().map(Model::getConfigurations).collect(Collectors.toList())) + ); + }) ); + + for (var entry : parsedModelsByService.entrySet()) { + serviceRegistry.getService(entry.getKey()) + .get() // must be non-null to get this far + .updateModelsWithDynamicFields(entry.getValue(), groupedListener); + } + } catch (Exception e) { + listener.onFailure(e); } - return new GetInferenceModelAction.Response(parsedModels); } private ElasticsearchStatusException serviceNotFoundException(String service, String inferenceId) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalModel.java index d38def8dca47f..8b2969c39b7ba 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalModel.java @@ -21,7 +21,7 @@ public abstract class ElasticsearchInternalModel extends Model { - protected final ElasticsearchInternalServiceSettings internalServiceSettings; + protected ElasticsearchInternalServiceSettings internalServiceSettings; public ElasticsearchInternalModel( String inferenceEntityId, @@ -91,6 +91,10 @@ public ElasticsearchInternalServiceSettings getServiceSettings() { return (ElasticsearchInternalServiceSettings) super.getServiceSettings(); } + public void updateNumAllocations(Integer numAllocations) { + this.internalServiceSettings.setNumAllocations(numAllocations); + } + @Override public String toString() { return Strings.toString(this.getConfigurations()); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java index 389a9fa369c21..49919fda9f89d 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java @@ -32,6 +32,7 @@ import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import org.elasticsearch.xpack.core.inference.results.RankedDocsResults; import org.elasticsearch.xpack.core.inference.results.SparseEmbeddingResults; +import org.elasticsearch.xpack.core.ml.action.GetDeploymentStatsAction; import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction; import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsStatsAction; import org.elasticsearch.xpack.core.ml.action.InferModelAction; @@ -56,6 +57,7 @@ import java.util.ArrayList; import java.util.Collections; import java.util.EnumSet; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; @@ -786,11 +788,50 @@ public List defaultConfigIds() { ); } - /** - * Default configurations that can be out of the box without creating an endpoint first. - * @param defaultsListener Config listener - */ @Override + public void updateModelsWithDynamicFields(List models, ActionListener> listener) { + + if (models.isEmpty()) { + listener.onResponse(models); + return; + } + + var modelsByDeploymentIds = new HashMap(); + for (var model : models) { + assert model instanceof ElasticsearchInternalModel; + + if (model instanceof ElasticsearchInternalModel esModel) { + modelsByDeploymentIds.put(esModel.mlNodeDeploymentId(), esModel); + } else { + listener.onFailure( + new ElasticsearchStatusException( + "Cannot update model [{}] as it is not an Elasticsearch service model", + RestStatus.INTERNAL_SERVER_ERROR, + model.getInferenceEntityId() + ) + ); + return; + } + } + + String deploymentIds = String.join(",", modelsByDeploymentIds.keySet()); + client.execute( + GetDeploymentStatsAction.INSTANCE, + new GetDeploymentStatsAction.Request(deploymentIds), + ActionListener.wrap(stats -> { + for (var deploymentStats : stats.getStats().results()) { + var model = modelsByDeploymentIds.get(deploymentStats.getDeploymentId()); + model.updateNumAllocations(deploymentStats.getNumberOfAllocations()); + } + listener.onResponse(new ArrayList<>(modelsByDeploymentIds.values())); + }, e -> { + logger.warn("Get deployment stats failed, cannot update the endpoint's number of allocations", e); + // continue with the original response + listener.onResponse(models); + }) + ); + } + public void defaultConfigs(ActionListener> defaultsListener) { preferredModelVariantFn.accept(defaultsListener.delegateFailureAndWrap((delegate, preferredModelVariant) -> { if (PreferredModelVariant.LINUX_X86_OPTIMIZED.equals(preferredModelVariant)) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettings.java index fedf48fb583a3..962c939146ef2 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettings.java @@ -39,7 +39,7 @@ public class ElasticsearchInternalServiceSettings implements ServiceSettings { public static final String DEPLOYMENT_ID = "deployment_id"; public static final String ADAPTIVE_ALLOCATIONS = "adaptive_allocations"; - private final Integer numAllocations; + private Integer numAllocations; private final int numThreads; private final String modelId; private final AdaptiveAllocationsSettings adaptiveAllocationsSettings; @@ -172,6 +172,10 @@ public ElasticsearchInternalServiceSettings(StreamInput in) throws IOException { : null; } + public void setNumAllocations(Integer numAllocations) { + this.numAllocations = numAllocations; + } + @Override public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.INFERENCE_ADAPTIVE_ALLOCATIONS)) { @@ -194,6 +198,10 @@ public String modelId() { return modelId; } + public String deloymentId() { + return modelId; + } + public Integer getNumAllocations() { return numAllocations; } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalModelTests.java new file mode 100644 index 0000000000000..96cd42efa42f5 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalModelTests.java @@ -0,0 +1,30 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.elasticsearch; + +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.test.ESTestCase; + +public class ElserInternalModelTests extends ESTestCase { + public void testUpdateNumAllocation() { + var model = new ElserInternalModel( + "foo", + TaskType.SPARSE_EMBEDDING, + ElasticsearchInternalService.NAME, + new ElserInternalServiceSettings(null, 1, "elser", null), + new ElserMlNodeTaskSettings(), + null + ); + + model.updateNumAllocations(1); + assertEquals(1, model.getServiceSettings().getNumAllocations().intValue()); + + model.updateNumAllocations(null); + assertNull(model.getServiceSettings().getNumAllocations()); + } +} From 332c9224f2f7c774dc470c65c20fd2836b9f3ee9 Mon Sep 17 00:00:00 2001 From: Patrick Doyle <810052+prdoyle@users.noreply.github.com> Date: Tue, 22 Oct 2024 09:09:21 -0400 Subject: [PATCH 277/449] onProcessFileChangesException (#115038) --- .../common/file/AbstractFileWatchingService.java | 16 ++++++++++++---- .../service/FileSettingsService.java | 12 +++++++++++- 2 files changed, 23 insertions(+), 5 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/file/AbstractFileWatchingService.java b/server/src/main/java/org/elasticsearch/common/file/AbstractFileWatchingService.java index a900722397edd..41998bf974bf9 100644 --- a/server/src/main/java/org/elasticsearch/common/file/AbstractFileWatchingService.java +++ b/server/src/main/java/org/elasticsearch/common/file/AbstractFileWatchingService.java @@ -313,12 +313,20 @@ void processSettingsOnServiceStartAndNotifyListeners() throws InterruptedExcepti void processSettingsAndNotifyListeners() throws InterruptedException { try { processFileChanges(); - for (var listener : eventListeners) { - listener.watchedFileChanged(); - } } catch (IOException | ExecutionException e) { - logger.error(() -> "Error processing watched file: " + watchedFile(), e); + onProcessFileChangesException(e); + return; } + for (var listener : eventListeners) { + listener.watchedFileChanged(); + } + } + + /** + * Called for checked exceptions only. + */ + protected void onProcessFileChangesException(Exception e) { + logger.error(() -> "Error processing watched file: " + watchedFile(), e); } // package private for testing diff --git a/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java b/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java index 811b59465ce76..601fc3c86d98f 100644 --- a/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java +++ b/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java @@ -15,6 +15,7 @@ import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateListener; +import org.elasticsearch.cluster.coordination.FailedToCommitClusterStateException; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.metadata.ReservedStateMetadata; import org.elasticsearch.cluster.service.ClusterService; @@ -144,7 +145,16 @@ private void processFileChanges(ReservedStateVersionCheck versionCheck) throws I } @Override - protected void processInitialFileMissing() throws ExecutionException, InterruptedException { + protected void onProcessFileChangesException(Exception e) { + if (e instanceof ExecutionException && e.getCause() instanceof FailedToCommitClusterStateException f) { + logger.error("Unable to commit cluster state", e); + } else { + super.onProcessFileChangesException(e); + } + } + + @Override + protected void processInitialFileMissing() throws ExecutionException, InterruptedException, IOException { PlainActionFuture completion = new PlainActionFuture<>(); logger.info("setting file [{}] not found, initializing [{}] as empty", watchedFile(), NAMESPACE); stateService.initEmpty(NAMESPACE, completion); From 003fbc73f6f4913135acf6ce4484e8e8ba032251 Mon Sep 17 00:00:00 2001 From: Panagiotis Bailis Date: Tue, 22 Oct 2024 16:26:30 +0300 Subject: [PATCH 278/449] Adding validation for incompatibility of compound retrievers and scroll (#115106) --- .../search/builder/SearchSourceBuilder.java | 2 +- .../retriever/CompoundRetrieverBuilder.java | 10 +++++++--- .../search/retriever/RetrieverBuilder.java | 1 + .../action/search/SearchRequestTests.java | 15 ++++++++++++++- 4 files changed, 23 insertions(+), 5 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java b/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java index 6ceb02f0e797f..9c96319136007 100644 --- a/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java @@ -2179,7 +2179,7 @@ public ActionRequestValidationException validate( boolean allowPartialSearchResults ) { if (retriever() != null) { - validationException = retriever().validate(this, validationException, allowPartialSearchResults); + validationException = retriever().validate(this, validationException, isScroll, allowPartialSearchResults); List specified = new ArrayList<>(); if (subSearches().isEmpty() == false) { specified.add(QUERY_FIELD.getPreferredName()); diff --git a/server/src/main/java/org/elasticsearch/search/retriever/CompoundRetrieverBuilder.java b/server/src/main/java/org/elasticsearch/search/retriever/CompoundRetrieverBuilder.java index 85dabf6eb6465..7373bc5b75049 100644 --- a/server/src/main/java/org/elasticsearch/search/retriever/CompoundRetrieverBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/retriever/CompoundRetrieverBuilder.java @@ -176,9 +176,10 @@ public final void extractToSearchSourceBuilder(SearchSourceBuilder searchSourceB public ActionRequestValidationException validate( SearchSourceBuilder source, ActionRequestValidationException validationException, + boolean isScroll, boolean allowPartialSearchResults ) { - validationException = super.validate(source, validationException, allowPartialSearchResults); + validationException = super.validate(source, validationException, isScroll, allowPartialSearchResults); if (source.size() > rankWindowSize) { validationException = addValidationError( "[" @@ -194,12 +195,15 @@ public ActionRequestValidationException validate( } if (allowPartialSearchResults) { validationException = addValidationError( - "cannot specify a compound retriever and [allow_partial_search_results]", + "cannot specify [" + getName() + "] and [allow_partial_search_results]", validationException ); } + if (isScroll) { + validationException = addValidationError("cannot specify [" + getName() + "] and [scroll]", validationException); + } for (RetrieverSource innerRetriever : innerRetrievers) { - validationException = innerRetriever.retriever().validate(source, validationException, allowPartialSearchResults); + validationException = innerRetriever.retriever().validate(source, validationException, isScroll, allowPartialSearchResults); } return validationException; } diff --git a/server/src/main/java/org/elasticsearch/search/retriever/RetrieverBuilder.java b/server/src/main/java/org/elasticsearch/search/retriever/RetrieverBuilder.java index 882d44adb79c3..5e36ad0fd4fd6 100644 --- a/server/src/main/java/org/elasticsearch/search/retriever/RetrieverBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/retriever/RetrieverBuilder.java @@ -239,6 +239,7 @@ public RetrieverBuilder rewrite(QueryRewriteContext ctx) throws IOException { public ActionRequestValidationException validate( SearchSourceBuilder source, ActionRequestValidationException validationException, + boolean isScroll, boolean allowPartialSearchResults ) { return validationException; diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchRequestTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchRequestTests.java index c6ca97fd5694a..526961d74bf52 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchRequestTests.java @@ -291,10 +291,23 @@ public void testValidate() throws IOException { assertNotNull(validationErrors); assertEquals(1, validationErrors.validationErrors().size()); assertEquals( - "cannot specify a compound retriever and [allow_partial_search_results]", + "cannot specify [test_compound_retriever_builder] and [allow_partial_search_results]", validationErrors.validationErrors().get(0) ); } + { + // scroll and compound retriever + SearchRequest searchRequest = createSearchRequest().source( + new SearchSourceBuilder().retriever(new TestCompoundRetrieverBuilder(randomIntBetween(1, 10))) + ); + searchRequest.allowPartialSearchResults(false); + searchRequest.scroll(TimeValue.timeValueMinutes(1)); + searchRequest.requestCache(false); + ActionRequestValidationException validationErrors = searchRequest.validate(); + assertNotNull(validationErrors); + assertEquals(1, validationErrors.validationErrors().size()); + assertEquals("cannot specify [test_compound_retriever_builder] and [scroll]", validationErrors.validationErrors().get(0)); + } { // allow_partial_results and non-compound retriever SearchRequest searchRequest = createSearchRequest().source(new SearchSourceBuilder().retriever(new RetrieverBuilder() { From e3c198a23a5e2f776b079ba27928c2578cadccef Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Tue, 22 Oct 2024 17:02:47 +0300 Subject: [PATCH 279/449] Handle setting merge conflicts for overruling settings providers (#115217) * Handle setting merge conflicts for overruling settings providers * spotless * update TransportSimulateIndexTemplateAction * update comment and add test * fix flakiness * fix flakiness --- .../TransportSimulateIndexTemplateAction.java | 17 +- .../metadata/MetadataCreateIndexService.java | 60 +++--- .../index/IndexSettingProvider.java | 11 ++ ...sportSimulateIndexTemplateActionTests.java | 23 ++- .../MetadataCreateIndexServiceTests.java | 175 ++++++++++++++++++ .../index/IndexSettingProviderTests.java | 40 ++-- ...RestIT.java => LogsdbWithBasicRestIT.java} | 71 ++++++- .../SyntheticSourceIndexSettingsProvider.java | 6 + 8 files changed, 362 insertions(+), 41 deletions(-) rename x-pack/plugin/logsdb/qa/with-basic/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/{LogsdbRestIT.java => LogsdbWithBasicRestIT.java} (59%) diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateAction.java index 5e3799cd14518..94d9b87467ea8 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateAction.java @@ -48,6 +48,7 @@ import java.time.Instant; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Map; @@ -270,6 +271,7 @@ public static Template resolveTemplate( // First apply settings sourced from index settings providers final var now = Instant.now(); Settings.Builder additionalSettings = Settings.builder(); + Set overrulingSettings = new HashSet<>(); for (var provider : indexSettingProviders) { Settings result = provider.getAdditionalIndexSettings( indexName, @@ -283,8 +285,21 @@ public static Template resolveTemplate( MetadataCreateIndexService.validateAdditionalSettings(provider, result, additionalSettings); dummySettings.put(result); additionalSettings.put(result); + if (provider.overrulesTemplateAndRequestSettings()) { + overrulingSettings.addAll(result.keySet()); + } } - // Then apply settings resolved from templates: + + if (overrulingSettings.isEmpty() == false) { + // Filter any conflicting settings from overruling providers, to avoid overwriting their values from templates. + final Settings.Builder filtered = Settings.builder().put(templateSettings); + for (String setting : overrulingSettings) { + filtered.remove(setting); + } + templateSettings = filtered.build(); + } + + // Apply settings resolved from templates. dummySettings.put(templateSettings); final IndexMetadata indexMetadata = IndexMetadata.builder(indexName) diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java index 3accdd3881c6d..69e3b7b70ff82 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java @@ -992,6 +992,7 @@ static Settings aggregateIndexSettings( // additionalIndexSettings map final Settings.Builder additionalIndexSettings = Settings.builder(); final var resolvedAt = Instant.ofEpochMilli(request.getNameResolvedAt()); + Set overrulingSettings = new HashSet<>(); for (IndexSettingProvider provider : indexSettingProviders) { var newAdditionalSettings = provider.getAdditionalIndexSettings( request.index(), @@ -1004,36 +1005,45 @@ static Settings aggregateIndexSettings( ); validateAdditionalSettings(provider, newAdditionalSettings, additionalIndexSettings); additionalIndexSettings.put(newAdditionalSettings); + if (provider.overrulesTemplateAndRequestSettings()) { + overrulingSettings.addAll(newAdditionalSettings.keySet()); + } } - // For all the explicit settings, we go through the template and request level settings - // and see if either a template or the request has "cancelled out" an explicit default - // setting. For example, if a plugin had as an explicit setting: - // "index.mysetting": "blah - // And either a template or create index request had: - // "index.mysetting": null - // We want to remove the explicit setting not only from the explicitly set settings, but - // also from the template and request settings, so that from the newly create index's - // perspective it is as though the setting has not been set at all (using the default - // value). for (String explicitSetting : additionalIndexSettings.keys()) { - if (templateSettings.keys().contains(explicitSetting) && templateSettings.get(explicitSetting) == null) { - logger.debug( - "removing default [{}] setting as it in set to null in a template for [{}] creation", - explicitSetting, - request.index() - ); - additionalIndexSettings.remove(explicitSetting); + if (overrulingSettings.contains(explicitSetting)) { + // Remove any conflicting template and request settings to use the provided values. templateSettings.remove(explicitSetting); - } - if (requestSettings.keys().contains(explicitSetting) && requestSettings.get(explicitSetting) == null) { - logger.debug( - "removing default [{}] setting as it in set to null in the request for [{}] creation", - explicitSetting, - request.index() - ); - additionalIndexSettings.remove(explicitSetting); requestSettings.remove(explicitSetting); + } else { + // For all the explicit settings, we go through the template and request level settings + // and see if either a template or the request has "cancelled out" an explicit default + // setting. For example, if a plugin had as an explicit setting: + // "index.mysetting": "blah + // And either a template or create index request had: + // "index.mysetting": null + // We want to remove the explicit setting not only from the explicitly set settings, but + // also from the template and request settings, so that from the newly create index's + // perspective it is as though the setting has not been set at all (using the default + // value). + if (templateSettings.keys().contains(explicitSetting) && templateSettings.get(explicitSetting) == null) { + logger.debug( + "removing default [{}] setting as it is set to null in a template for [{}] creation", + explicitSetting, + request.index() + ); + additionalIndexSettings.remove(explicitSetting); + templateSettings.remove(explicitSetting); + } + if (requestSettings.keys().contains(explicitSetting) && requestSettings.get(explicitSetting) == null) { + logger.debug( + "removing default [{}] setting as it is set to null in the request for [{}] creation", + explicitSetting, + request.index() + ); + additionalIndexSettings.remove(explicitSetting); + requestSettings.remove(explicitSetting); + } } } diff --git a/server/src/main/java/org/elasticsearch/index/IndexSettingProvider.java b/server/src/main/java/org/elasticsearch/index/IndexSettingProvider.java index 0180d2c8df119..6a553d5dc5440 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexSettingProvider.java +++ b/server/src/main/java/org/elasticsearch/index/IndexSettingProvider.java @@ -57,4 +57,15 @@ Settings getAdditionalIndexSettings( record Parameters(CheckedFunction mapperServiceFactory) { } + + /** + * Indicates whether the additional settings that this provider returns can overrule the settings defined in matching template + * or in create index request. + * + * Note that this is not used during index template validation, to avoid overruling template settings that may apply to + * different contexts (e.g. the provider is not used, or it returns different setting values). + */ + default boolean overrulesTemplateAndRequestSettings() { + return false; + } } diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateActionTests.java index 74408b99e92ce..95446149f026b 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateActionTests.java @@ -49,7 +49,9 @@ public void testSettingsProviderIsOverridden() throws Exception { matchingTemplate, ComposableIndexTemplate.builder() .indexPatterns(List.of("test_index*")) - .template(new Template(Settings.builder().put("test-setting", 1).build(), null, null)) + .template( + new Template(Settings.builder().put("test-setting", 1).put("test-setting-2", 2).build(), null, null) + ) .build() ) ) @@ -78,6 +80,24 @@ public Settings getAdditionalIndexSettings( ) { return Settings.builder().put("test-setting", 0).build(); } + }, new IndexSettingProvider() { + @Override + public Settings getAdditionalIndexSettings( + String indexName, + String dataStreamName, + IndexMode templateIndexMode, + Metadata metadata, + Instant resolvedAt, + Settings indexTemplateAndCreateRequestSettings, + List combinedTemplateMappings + ) { + return Settings.builder().put("test-setting-2", 10).build(); + } + + @Override + public boolean overrulesTemplateAndRequestSettings() { + return true; + } }); Template resolvedTemplate = TransportSimulateIndexTemplateAction.resolveTemplate( @@ -92,5 +112,6 @@ public Settings getAdditionalIndexSettings( ); assertThat(resolvedTemplate.settings().getAsInt("test-setting", -1), is(1)); + assertThat(resolvedTemplate.settings().getAsInt("test-setting-2", -1), is(10)); } } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java index 05382de49087d..96a74d2e23aad 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java @@ -44,8 +44,10 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.index.IndexSettingProvider; import org.elasticsearch.index.IndexSettingProviders; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; @@ -74,6 +76,7 @@ import org.junit.Before; import java.io.IOException; +import java.time.Instant; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -691,6 +694,178 @@ public void testAggregateSettingsAppliesSettingsFromTemplatesAndRequest() { assertThat(aggregatedIndexSettings.get("request_setting"), equalTo("value2")); } + public void testAggregateSettingsProviderOverrulesSettingsFromRequest() { + IndexTemplateMetadata templateMetadata = addMatchingTemplate(builder -> { + builder.settings(Settings.builder().put("template_setting", "value1")); + }); + Metadata metadata = new Metadata.Builder().templates(Map.of("template_1", templateMetadata)).build(); + ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT).metadata(metadata).build(); + request.settings(Settings.builder().put("request_setting", "value2").build()); + + Settings aggregatedIndexSettings = aggregateIndexSettings( + clusterState, + request, + templateMetadata.settings(), + null, + null, + Settings.EMPTY, + IndexScopedSettings.DEFAULT_SCOPED_SETTINGS, + randomShardLimitService(), + Set.of(new IndexSettingProvider() { + @Override + public Settings getAdditionalIndexSettings( + String indexName, + String dataStreamName, + IndexMode templateIndexMode, + Metadata metadata, + Instant resolvedAt, + Settings indexTemplateAndCreateRequestSettings, + List combinedTemplateMappings + ) { + return Settings.builder().put("request_setting", "overrule_value").put("other_setting", "other_value").build(); + } + + @Override + public boolean overrulesTemplateAndRequestSettings() { + return true; + } + }) + ); + + assertThat(aggregatedIndexSettings.get("template_setting"), equalTo("value1")); + assertThat(aggregatedIndexSettings.get("request_setting"), equalTo("overrule_value")); + assertThat(aggregatedIndexSettings.get("other_setting"), equalTo("other_value")); + } + + public void testAggregateSettingsProviderOverrulesNullFromRequest() { + IndexTemplateMetadata templateMetadata = addMatchingTemplate(builder -> { + builder.settings(Settings.builder().put("template_setting", "value1")); + }); + Metadata metadata = new Metadata.Builder().templates(Map.of("template_1", templateMetadata)).build(); + ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT).metadata(metadata).build(); + request.settings(Settings.builder().putNull("request_setting").build()); + + Settings aggregatedIndexSettings = aggregateIndexSettings( + clusterState, + request, + templateMetadata.settings(), + null, + null, + Settings.EMPTY, + IndexScopedSettings.DEFAULT_SCOPED_SETTINGS, + randomShardLimitService(), + Set.of(new IndexSettingProvider() { + @Override + public Settings getAdditionalIndexSettings( + String indexName, + String dataStreamName, + IndexMode templateIndexMode, + Metadata metadata, + Instant resolvedAt, + Settings indexTemplateAndCreateRequestSettings, + List combinedTemplateMappings + ) { + return Settings.builder().put("request_setting", "overrule_value").put("other_setting", "other_value").build(); + } + + @Override + public boolean overrulesTemplateAndRequestSettings() { + return true; + } + }) + ); + + assertThat(aggregatedIndexSettings.get("template_setting"), equalTo("value1")); + assertThat(aggregatedIndexSettings.get("request_setting"), equalTo("overrule_value")); + assertThat(aggregatedIndexSettings.get("other_setting"), equalTo("other_value")); + } + + public void testAggregateSettingsProviderOverrulesSettingsFromTemplates() { + IndexTemplateMetadata templateMetadata = addMatchingTemplate(builder -> { + builder.settings(Settings.builder().put("template_setting", "value1")); + }); + Metadata metadata = new Metadata.Builder().templates(Map.of("template_1", templateMetadata)).build(); + ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT).metadata(metadata).build(); + request.settings(Settings.builder().put("request_setting", "value2").build()); + + Settings aggregatedIndexSettings = aggregateIndexSettings( + clusterState, + request, + templateMetadata.settings(), + null, + null, + Settings.EMPTY, + IndexScopedSettings.DEFAULT_SCOPED_SETTINGS, + randomShardLimitService(), + Set.of(new IndexSettingProvider() { + @Override + public Settings getAdditionalIndexSettings( + String indexName, + String dataStreamName, + IndexMode templateIndexMode, + Metadata metadata, + Instant resolvedAt, + Settings indexTemplateAndCreateRequestSettings, + List combinedTemplateMappings + ) { + return Settings.builder().put("template_setting", "overrule_value").put("other_setting", "other_value").build(); + } + + @Override + public boolean overrulesTemplateAndRequestSettings() { + return true; + } + }) + ); + + assertThat(aggregatedIndexSettings.get("template_setting"), equalTo("overrule_value")); + assertThat(aggregatedIndexSettings.get("request_setting"), equalTo("value2")); + assertThat(aggregatedIndexSettings.get("other_setting"), equalTo("other_value")); + } + + public void testAggregateSettingsProviderOverrulesNullFromTemplates() { + IndexTemplateMetadata templateMetadata = addMatchingTemplate(builder -> { + builder.settings(Settings.builder().putNull("template_setting")); + }); + Metadata metadata = new Metadata.Builder().templates(Map.of("template_1", templateMetadata)).build(); + ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT).metadata(metadata).build(); + request.settings(Settings.builder().put("request_setting", "value2").build()); + + Settings aggregatedIndexSettings = aggregateIndexSettings( + clusterState, + request, + templateMetadata.settings(), + null, + null, + Settings.EMPTY, + IndexScopedSettings.DEFAULT_SCOPED_SETTINGS, + randomShardLimitService(), + Set.of(new IndexSettingProvider() { + @Override + public Settings getAdditionalIndexSettings( + String indexName, + String dataStreamName, + IndexMode templateIndexMode, + Metadata metadata, + Instant resolvedAt, + Settings indexTemplateAndCreateRequestSettings, + List combinedTemplateMappings + ) { + return Settings.builder().put("template_setting", "overrule_value").put("other_setting", "other_value").build(); + } + + @Override + public boolean overrulesTemplateAndRequestSettings() { + return true; + } + }) + ); + + assertThat(aggregatedIndexSettings.get("template_setting"), equalTo("overrule_value")); + assertThat(aggregatedIndexSettings.get("request_setting"), equalTo("value2")); + assertThat(aggregatedIndexSettings.get("other_setting"), equalTo("other_value")); + } + public void testInvalidAliasName() { final String[] invalidAliasNames = new String[] { "-alias1", "+alias2", "_alias3", "a#lias", "al:ias", ".", ".." }; String aliasName = randomFrom(invalidAliasNames); diff --git a/server/src/test/java/org/elasticsearch/index/IndexSettingProviderTests.java b/server/src/test/java/org/elasticsearch/index/IndexSettingProviderTests.java index 628de0b047bf5..adac8bf204f3e 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexSettingProviderTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexSettingProviderTests.java @@ -23,15 +23,24 @@ public class IndexSettingProviderTests extends ESSingleNodeTestCase { public void testIndexCreation() throws Exception { - var indexService = createIndex("my-index1"); + Settings settings = Settings.builder().put("index.mapping.depth.limit", 10).build(); + var indexService = createIndex("my-index1", settings); assertFalse(indexService.getIndexSettings().getSettings().hasValue("index.refresh_interval")); + assertEquals("10", indexService.getIndexSettings().getSettings().get("index.mapping.depth.limit")); INDEX_SETTING_PROVIDER1_ENABLED.set(true); - indexService = createIndex("my-index2"); + indexService = createIndex("my-index2", settings); assertTrue(indexService.getIndexSettings().getSettings().hasValue("index.refresh_interval")); + assertEquals("10", indexService.getIndexSettings().getSettings().get("index.mapping.depth.limit")); + INDEX_SETTING_OVERRULING.set(true); + indexService = createIndex("my-index3", settings); + assertTrue(indexService.getIndexSettings().getSettings().hasValue("index.refresh_interval")); + assertEquals("100", indexService.getIndexSettings().getSettings().get("index.mapping.depth.limit")); + + INDEX_SETTING_DEPTH_ENABLED.set(false); INDEX_SETTING_PROVIDER2_ENABLED.set(true); - var e = expectThrows(IllegalArgumentException.class, () -> createIndex("my-index3")); + var e = expectThrows(IllegalArgumentException.class, () -> createIndex("my-index4", settings)); assertEquals( "additional index setting [index.refresh_interval] added by [TestIndexSettingsProvider] is already present", e.getMessage() @@ -47,7 +56,7 @@ public static class Plugin1 extends Plugin { @Override public Collection getAdditionalIndexSettingProviders(IndexSettingProvider.Parameters parameters) { - return List.of(new TestIndexSettingsProvider("index.refresh_interval", "-1", INDEX_SETTING_PROVIDER1_ENABLED)); + return List.of(new TestIndexSettingsProvider("-1", INDEX_SETTING_PROVIDER1_ENABLED)); } } @@ -56,22 +65,22 @@ public static class Plugin2 extends Plugin { @Override public Collection getAdditionalIndexSettingProviders(IndexSettingProvider.Parameters parameters) { - return List.of(new TestIndexSettingsProvider("index.refresh_interval", "100s", INDEX_SETTING_PROVIDER2_ENABLED)); + return List.of(new TestIndexSettingsProvider("100s", INDEX_SETTING_PROVIDER2_ENABLED)); } } private static final AtomicBoolean INDEX_SETTING_PROVIDER1_ENABLED = new AtomicBoolean(false); private static final AtomicBoolean INDEX_SETTING_PROVIDER2_ENABLED = new AtomicBoolean(false); + private static final AtomicBoolean INDEX_SETTING_DEPTH_ENABLED = new AtomicBoolean(true); + private static final AtomicBoolean INDEX_SETTING_OVERRULING = new AtomicBoolean(false); static class TestIndexSettingsProvider implements IndexSettingProvider { - private final String settingName; - private final String settingValue; + private final String intervalValue; private final AtomicBoolean enabled; - TestIndexSettingsProvider(String settingName, String settingValue, AtomicBoolean enabled) { - this.settingName = settingName; - this.settingValue = settingValue; + TestIndexSettingsProvider(String intervalValue, AtomicBoolean enabled) { + this.intervalValue = intervalValue; this.enabled = enabled; } @@ -86,10 +95,19 @@ public Settings getAdditionalIndexSettings( List combinedTemplateMappings ) { if (enabled.get()) { - return Settings.builder().put(settingName, settingValue).build(); + var builder = Settings.builder().put("index.refresh_interval", intervalValue); + if (INDEX_SETTING_DEPTH_ENABLED.get()) { + builder.put("index.mapping.depth.limit", 100); + } + return builder.build(); } else { return Settings.EMPTY; } } + + @Override + public boolean overrulesTemplateAndRequestSettings() { + return INDEX_SETTING_OVERRULING.get(); + } } } diff --git a/x-pack/plugin/logsdb/qa/with-basic/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbRestIT.java b/x-pack/plugin/logsdb/qa/with-basic/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbWithBasicRestIT.java similarity index 59% rename from x-pack/plugin/logsdb/qa/with-basic/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbRestIT.java rename to x-pack/plugin/logsdb/qa/with-basic/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbWithBasicRestIT.java index edecf4eb9669e..f5ac107628d1a 100644 --- a/x-pack/plugin/logsdb/qa/with-basic/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbRestIT.java +++ b/x-pack/plugin/logsdb/qa/with-basic/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbWithBasicRestIT.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.logsdb; +import org.elasticsearch.client.Request; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.test.cluster.ElasticsearchCluster; @@ -19,7 +20,7 @@ import java.util.List; import java.util.Map; -public class LogsdbRestIT extends ESRestTestCase { +public class LogsdbWithBasicRestIT extends ESRestTestCase { @ClassRule public static ElasticsearchCluster cluster = ElasticsearchCluster.local() @@ -96,7 +97,7 @@ public void testLogsdbOverrideSyntheticSourceModeInMapping() throws IOException assertEquals(SourceFieldMapper.Mode.STORED.toString(), settings.get("index.mapping.source.mode")); } - public void testLogsdbNoOverrideSyntheticSourceSetting() throws IOException { + public void testLogsdbOverrideSyntheticSourceSetting() throws IOException { final String index = "test-index"; createIndex( index, @@ -104,6 +105,70 @@ public void testLogsdbNoOverrideSyntheticSourceSetting() throws IOException { ); var settings = (Map) ((Map) getIndexSettings(index).get(index)).get("settings"); assertEquals("logsdb", settings.get("index.mode")); - assertEquals(SourceFieldMapper.Mode.SYNTHETIC.toString(), settings.get("index.mapping.source.mode")); + assertEquals(SourceFieldMapper.Mode.STORED.toString(), settings.get("index.mapping.source.mode")); + } + + public void testLogsdbOverrideNullSyntheticSourceSetting() throws IOException { + final String index = "test-index"; + createIndex(index, Settings.builder().put("index.mode", "logsdb").putNull("index.mapping.source.mode").build()); + var settings = (Map) ((Map) getIndexSettings(index).get(index)).get("settings"); + assertEquals("logsdb", settings.get("index.mode")); + assertEquals(SourceFieldMapper.Mode.STORED.toString(), settings.get("index.mapping.source.mode")); + } + + public void testLogsdbOverrideSyntheticSourceSettingInTemplate() throws IOException { + var request = new Request("POST", "/_index_template/1"); + request.setJsonEntity(""" + { + "index_patterns": ["test-*"], + "template": { + "settings":{ + "index": { + "mode": "logsdb", + "mapping": { + "source": { + "mode": "synthetic" + } + } + } + } + } + } + """); + assertOK(client().performRequest(request)); + + final String index = "test-index"; + createIndex(index); + var settings = (Map) ((Map) getIndexSettings(index).get(index)).get("settings"); + assertEquals("logsdb", settings.get("index.mode")); + assertEquals(SourceFieldMapper.Mode.STORED.toString(), settings.get("index.mapping.source.mode")); + } + + public void testLogsdbOverrideNullInTemplate() throws IOException { + var request = new Request("POST", "/_index_template/1"); + request.setJsonEntity(""" + { + "index_patterns": ["test-*"], + "template": { + "settings":{ + "index": { + "mode": "logsdb", + "mapping": { + "source": { + "mode": null + } + } + } + } + } + } + """); + assertOK(client().performRequest(request)); + + final String index = "test-index"; + createIndex(index); + var settings = (Map) ((Map) getIndexSettings(index).get(index)).get("settings"); + assertEquals("logsdb", settings.get("index.mode")); + assertEquals(SourceFieldMapper.Mode.STORED.toString(), settings.get("index.mapping.source.mode")); } } diff --git a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java index f60c941c75a7c..4625fe91294d7 100644 --- a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java +++ b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java @@ -47,6 +47,12 @@ final class SyntheticSourceIndexSettingsProvider implements IndexSettingProvider this.mapperServiceFactory = mapperServiceFactory; } + @Override + public boolean overrulesTemplateAndRequestSettings() { + // Indicates that the provider value takes precedence over any user setting. + return true; + } + @Override public Settings getAdditionalIndexSettings( String indexName, From 485aba8b0221b720f4a2e71efd61c36cc90d8ee1 Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Tue, 22 Oct 2024 16:21:29 +0200 Subject: [PATCH 280/449] Change some IndexInput to RandomAccessInput in ES87TSDBDocValuesProducer (#115305) --- .../codec/tsdb/ES87TSDBDocValuesProducer.java | 21 +++++++------------ 1 file changed, 8 insertions(+), 13 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesProducer.java b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesProducer.java index d5c94de1c6942..a7560ce6f3caf 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesProducer.java +++ b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesProducer.java @@ -132,7 +132,7 @@ public BinaryDocValues getBinary(FieldInfo field) throws IOException { return DocValues.emptyBinary(); } - final IndexInput bytesSlice = data.slice("fixed-binary", entry.dataOffset, entry.dataLength); + final RandomAccessInput bytesSlice = data.randomAccessSlice(entry.dataOffset, entry.dataLength); if (entry.docsWithFieldOffset == -1) { // dense @@ -144,8 +144,7 @@ public BinaryDocValues getBinary(FieldInfo field) throws IOException { @Override public BytesRef binaryValue() throws IOException { - bytesSlice.seek((long) doc * length); - bytesSlice.readBytes(bytes.bytes, 0, length); + bytesSlice.readBytes((long) doc * length, bytes.bytes, 0, length); return bytes; } }; @@ -160,8 +159,7 @@ public BytesRef binaryValue() throws IOException { public BytesRef binaryValue() throws IOException { long startOffset = addresses.get(doc); bytes.length = (int) (addresses.get(doc + 1L) - startOffset); - bytesSlice.seek(startOffset); - bytesSlice.readBytes(bytes.bytes, 0, bytes.length); + bytesSlice.readBytes(startOffset, bytes.bytes, 0, bytes.length); return bytes; } }; @@ -184,8 +182,7 @@ public BytesRef binaryValue() throws IOException { @Override public BytesRef binaryValue() throws IOException { - bytesSlice.seek((long) disi.index() * length); - bytesSlice.readBytes(bytes.bytes, 0, length); + bytesSlice.readBytes((long) disi.index() * length, bytes.bytes, 0, length); return bytes; } }; @@ -201,8 +198,7 @@ public BytesRef binaryValue() throws IOException { final int index = disi.index(); long startOffset = addresses.get(index); bytes.length = (int) (addresses.get(index + 1L) - startOffset); - bytesSlice.seek(startOffset); - bytesSlice.readBytes(bytes.bytes, 0, bytes.length); + bytesSlice.readBytes(startOffset, bytes.bytes, 0, bytes.length); return bytes; } }; @@ -407,7 +403,7 @@ private static class TermsDict extends BaseTermsEnum { final IndexInput bytes; final long blockMask; final LongValues indexAddresses; - final IndexInput indexBytes; + final RandomAccessInput indexBytes; final BytesRef term; long ord = -1; @@ -427,7 +423,7 @@ private static class TermsDict extends BaseTermsEnum { entry.termsIndexAddressesLength ); indexAddresses = DirectMonotonicReader.getInstance(entry.termsIndexAddressesMeta, indexAddressesSlice); - indexBytes = data.slice("terms-index", entry.termsIndexOffset, entry.termsIndexLength); + indexBytes = data.randomAccessSlice(entry.termsIndexOffset, entry.termsIndexLength); term = new BytesRef(entry.maxTermLength); // add the max term length for the dictionary @@ -485,8 +481,7 @@ private BytesRef getTermFromIndex(long index) throws IOException { assert index >= 0 && index <= (entry.termsDictSize - 1) >>> entry.termsDictIndexShift; final long start = indexAddresses.get(index); term.length = (int) (indexAddresses.get(index + 1) - start); - indexBytes.seek(start); - indexBytes.readBytes(term.bytes, 0, term.length); + indexBytes.readBytes(start, term.bytes, 0, term.length); return term; } From 07374ab600c9ae708eda1f452f8c06774e50183f Mon Sep 17 00:00:00 2001 From: Luke Whiting Date: Tue, 22 Oct 2024 15:27:23 +0100 Subject: [PATCH 281/449] #111433 Watch Next Run Interval Resets On Shard Move or Node Restart (#115102) * Switch Watcher scheduler to use last exec time when restarting, moving shards or resuming from stopped. * Add tests for last runtime calculation * Update docs/changelog/115102.yaml * Add counter to watcher job executions to check no additional executions happen during test --- docs/changelog/115102.yaml | 6 + .../engine/TickerScheduleTriggerEngine.java | 40 ++- .../engine/TickerScheduleEngineTests.java | 239 ++++++++++++++++++ 3 files changed, 283 insertions(+), 2 deletions(-) create mode 100644 docs/changelog/115102.yaml diff --git a/docs/changelog/115102.yaml b/docs/changelog/115102.yaml new file mode 100644 index 0000000000000..f679bb6c223a6 --- /dev/null +++ b/docs/changelog/115102.yaml @@ -0,0 +1,6 @@ +pr: 115102 +summary: Watch Next Run Interval Resets On Shard Move or Node Restart +area: Watcher +type: bug +issues: + - 111433 diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/engine/TickerScheduleTriggerEngine.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/engine/TickerScheduleTriggerEngine.java index ced131640f0ee..cc8d0edf37014 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/engine/TickerScheduleTriggerEngine.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/engine/TickerScheduleTriggerEngine.java @@ -17,6 +17,8 @@ import org.elasticsearch.xpack.core.watcher.support.WatcherDateTimeUtils; import org.elasticsearch.xpack.core.watcher.trigger.TriggerEvent; import org.elasticsearch.xpack.core.watcher.watch.Watch; +import org.elasticsearch.xpack.core.watcher.watch.WatchStatus; +import org.elasticsearch.xpack.watcher.trigger.schedule.IntervalSchedule; import org.elasticsearch.xpack.watcher.trigger.schedule.Schedule; import org.elasticsearch.xpack.watcher.trigger.schedule.ScheduleRegistry; import org.elasticsearch.xpack.watcher.trigger.schedule.ScheduleTrigger; @@ -32,6 +34,7 @@ import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicBoolean; @@ -67,7 +70,11 @@ public synchronized void start(Collection jobs) { Map startingSchedules = Maps.newMapWithExpectedSize(jobs.size()); for (Watch job : jobs) { if (job.trigger() instanceof ScheduleTrigger trigger) { - startingSchedules.put(job.id(), new ActiveSchedule(job.id(), trigger.getSchedule(), startTime)); + if (trigger.getSchedule() instanceof IntervalSchedule) { + startingSchedules.put(job.id(), new ActiveSchedule(job.id(), trigger.getSchedule(), calculateLastStartTime(job))); + } else { + startingSchedules.put(job.id(), new ActiveSchedule(job.id(), trigger.getSchedule(), startTime)); + } } } // why are we calling putAll() here instead of assigning a brand @@ -108,10 +115,39 @@ public void add(Watch watch) { // watcher indexing listener // this also means that updating an existing watch would not retrigger the schedule time, if it remains the same schedule if (currentSchedule == null || currentSchedule.schedule.equals(trigger.getSchedule()) == false) { - schedules.put(watch.id(), new ActiveSchedule(watch.id(), trigger.getSchedule(), clock.millis())); + if (trigger.getSchedule() instanceof IntervalSchedule) { + schedules.put(watch.id(), new ActiveSchedule(watch.id(), trigger.getSchedule(), calculateLastStartTime(watch))); + } else { + schedules.put(watch.id(), new ActiveSchedule(watch.id(), trigger.getSchedule(), clock.millis())); + } + } } + /** + * Attempts to calculate the epoch millis of the last time the watch was checked, If the watch has never been checked, the timestamp of + * the last state change is used. If the watch has never been checked and has never been in an active state, the current time is used. + * @param job the watch to calculate the last start time for + * @return the epoch millis of the last time the watch was checked or now + */ + private long calculateLastStartTime(Watch job) { + var lastChecked = Optional.ofNullable(job) + .map(Watch::status) + .map(WatchStatus::lastChecked) + .map(ZonedDateTime::toInstant) + .map(Instant::toEpochMilli); + + return lastChecked.orElseGet( + () -> Optional.ofNullable(job) + .map(Watch::status) + .map(WatchStatus::state) + .map(WatchStatus.State::getTimestamp) + .map(ZonedDateTime::toInstant) + .map(Instant::toEpochMilli) + .orElse(clock.millis()) + ); + } + @Override public boolean remove(String jobId) { logger.debug("Removing watch [{}] from engine (engine is running: {})", jobId, isRunning.get()); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/schedule/engine/TickerScheduleEngineTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/schedule/engine/TickerScheduleEngineTests.java index 8b7cfa75f9229..9a12b8f394eb2 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/schedule/engine/TickerScheduleEngineTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/schedule/engine/TickerScheduleEngineTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.xpack.core.watcher.trigger.TriggerEvent; import org.elasticsearch.xpack.core.watcher.watch.ClockMock; import org.elasticsearch.xpack.core.watcher.watch.Watch; +import org.elasticsearch.xpack.core.watcher.watch.WatchStatus; import org.elasticsearch.xpack.watcher.condition.InternalAlwaysCondition; import org.elasticsearch.xpack.watcher.input.none.ExecutableNoneInput; import org.elasticsearch.xpack.watcher.trigger.schedule.Schedule; @@ -283,6 +284,244 @@ public void testAddOnlyWithNewSchedule() { assertThat(engine.getSchedules().get("_id"), not(is(activeSchedule))); } + /** + * This test verifies that a watch with a valid lastCheckedTime executes before the interval time to ensure the job resumes waiting + * from the same point it left off before the reallocation / restart + */ + public void testWatchWithLastCheckedTimeExecutesBeforeInitialInterval() throws Exception { + final var firstLatch = new CountDownLatch(1); + final var secondLatch = new CountDownLatch(1); + + Watch watch = new Watch( + "watch", + new ScheduleTrigger(interval("1s")), + new ExecutableNoneInput(), + InternalAlwaysCondition.INSTANCE, + null, + null, + Collections.emptyList(), + null, + new WatchStatus(-1L, null, null, clock.instant().minusMillis(500).atZone(ZoneOffset.UTC), null, null, null), + SequenceNumbers.UNASSIGNED_SEQ_NO, + SequenceNumbers.UNASSIGNED_PRIMARY_TERM + ); + + var watches = Collections.singletonList(watch); + + var runCount = new AtomicInteger(0); + + engine.register(events -> { + for (TriggerEvent ignored : events) { + if (runCount.get() == 0) { + logger.info("job first fire"); + firstLatch.countDown(); + } else { + logger.info("job second fire"); + secondLatch.countDown(); + } + runCount.incrementAndGet(); + } + }); + + engine.start(watches); + advanceClockIfNeeded(clock.instant().plusMillis(510).atZone(ZoneOffset.UTC)); + if (firstLatch.await(3, TimeUnit.SECONDS) == false) { + fail("waiting too long for all watches to be triggered"); + } + + advanceClockIfNeeded(clock.instant().plusMillis(1100).atZone(ZoneOffset.UTC)); + if (secondLatch.await(3, TimeUnit.SECONDS) == false) { + fail("waiting too long for all watches to be triggered"); + } + + assertThat(runCount.get(), is(2)); + + engine.stop(); + } + + /** + * This test verifies that a watch without a lastCheckedTime but with a valid activationTime executes before the interval time to + * ensure the job resumes waiting from the same point it left off before the reallocation / restart + */ + public void testWatchWithNoLastCheckedTimeButHasActivationTimeExecutesBeforeInitialInterval() throws Exception { + final var firstLatch = new CountDownLatch(1); + final var secondLatch = new CountDownLatch(1); + + Watch watch = new Watch( + "watch", + new ScheduleTrigger(interval("1s")), + new ExecutableNoneInput(), + InternalAlwaysCondition.INSTANCE, + null, + null, + Collections.emptyList(), + null, + new WatchStatus( + -1L, + new WatchStatus.State(true, clock.instant().minusMillis(500).atZone(ZoneOffset.UTC)), + null, + null, + null, + null, + null + ), + SequenceNumbers.UNASSIGNED_SEQ_NO, + SequenceNumbers.UNASSIGNED_PRIMARY_TERM + ); + + var watches = Collections.singletonList(watch); + + var runCount = new AtomicInteger(0); + + engine.register(events -> { + for (TriggerEvent ignored : events) { + if (runCount.get() == 0) { + logger.info("job first fire"); + firstLatch.countDown(); + } else { + logger.info("job second fire"); + secondLatch.countDown(); + } + runCount.incrementAndGet(); + } + }); + + engine.start(watches); + advanceClockIfNeeded(clock.instant().plusMillis(510).atZone(ZoneOffset.UTC)); + if (firstLatch.await(3, TimeUnit.SECONDS) == false) { + fail("waiting too long for all watches to be triggered"); + } + + advanceClockIfNeeded(clock.instant().plusMillis(1100).atZone(ZoneOffset.UTC)); + if (secondLatch.await(3, TimeUnit.SECONDS) == false) { + fail("waiting too long for all watches to be triggered"); + } + + assertThat(runCount.get(), is(2)); + + engine.stop(); + } + + /** + * This test verifies that a watch added after service start with a lastCheckedTime executes before the interval time to ensure the job + * resumes waiting from the same point it left off before the reallocation / restart + */ + public void testAddWithLastCheckedTimeExecutesBeforeInitialInterval() throws Exception { + final var firstLatch = new CountDownLatch(1); + final var secondLatch = new CountDownLatch(1); + + Watch watch = new Watch( + "watch", + new ScheduleTrigger(interval("1s")), + new ExecutableNoneInput(), + InternalAlwaysCondition.INSTANCE, + null, + null, + Collections.emptyList(), + null, + new WatchStatus(-1L, null, null, clock.instant().minusMillis(500).atZone(ZoneOffset.UTC), null, null, null), + SequenceNumbers.UNASSIGNED_SEQ_NO, + SequenceNumbers.UNASSIGNED_PRIMARY_TERM + ); + + var runCount = new AtomicInteger(0); + + engine.register(events -> { + for (TriggerEvent ignored : events) { + if (runCount.get() == 0) { + logger.info("job first fire"); + firstLatch.countDown(); + } else { + logger.info("job second fire"); + secondLatch.countDown(); + } + runCount.incrementAndGet(); + } + }); + + engine.start(Collections.emptyList()); + advanceClockIfNeeded(clock.instant().plusMillis(1100).atZone(ZoneOffset.UTC)); + engine.add(watch); + + advanceClockIfNeeded(clock.instant().plusMillis(510).atZone(ZoneOffset.UTC)); + if (firstLatch.await(3, TimeUnit.SECONDS) == false) { + fail("waiting too long for all watches to be triggered"); + } + + advanceClockIfNeeded(clock.instant().plusMillis(1100).atZone(ZoneOffset.UTC)); + if (secondLatch.await(3, TimeUnit.SECONDS) == false) { + fail("waiting too long for all watches to be triggered"); + } + + assertThat(runCount.get(), is(2)); + + engine.stop(); + } + + /** + * This test verifies that a watch added after service start without a lastCheckedTime but with a valid activationTime executes before + * the interval time to ensure the job resumes waiting from the same point it left off before the reallocation / restart + */ + public void testAddWithNoLastCheckedTimeButHasActivationTimeExecutesBeforeInitialInterval() throws Exception { + final var firstLatch = new CountDownLatch(1); + final var secondLatch = new CountDownLatch(1); + + Watch watch = new Watch( + "watch", + new ScheduleTrigger(interval("1s")), + new ExecutableNoneInput(), + InternalAlwaysCondition.INSTANCE, + null, + null, + Collections.emptyList(), + null, + new WatchStatus( + -1L, + new WatchStatus.State(true, clock.instant().minusMillis(500).atZone(ZoneOffset.UTC)), + null, + null, + null, + null, + null + ), + SequenceNumbers.UNASSIGNED_SEQ_NO, + SequenceNumbers.UNASSIGNED_PRIMARY_TERM + ); + + var runCount = new AtomicInteger(0); + + engine.register(events -> { + for (TriggerEvent ignored : events) { + if (runCount.get() == 0) { + logger.info("job first fire"); + firstLatch.countDown(); + } else { + logger.info("job second fire"); + secondLatch.countDown(); + } + runCount.incrementAndGet(); + } + }); + + engine.start(Collections.emptyList()); + advanceClockIfNeeded(clock.instant().plusMillis(1100).atZone(ZoneOffset.UTC)); + engine.add(watch); + + advanceClockIfNeeded(clock.instant().plusMillis(510).atZone(ZoneOffset.UTC)); + if (firstLatch.await(3, TimeUnit.SECONDS) == false) { + fail("waiting too long for all watches to be triggered"); + } + + advanceClockIfNeeded(clock.instant().plusMillis(1100).atZone(ZoneOffset.UTC)); + if (secondLatch.await(3, TimeUnit.SECONDS) == false) { + fail("waiting too long for all watches to be triggered"); + } + + assertThat(runCount.get(), is(2)); + + engine.stop(); + } + private Watch createWatch(String name, Schedule schedule) { return new Watch( name, From 36c45c15dac54e3b318cb0c92b5e737bbf8a788a Mon Sep 17 00:00:00 2001 From: Luke Whiting Date: Tue, 22 Oct 2024 15:27:42 +0100 Subject: [PATCH 282/449] #104233 Allow Watcher Node Allocation Settings (#115251) * Update settings endpoint modified Now accepts index.routing.allocation.* settings but denies changing the allocation setting that keeps watches on data nodes * Get settings endpoint modified Now returns index.routing.allocation.* settings explicitly filters out the `index.routing.allocation.include._tier_preference` setting * Tests for modified endpoints * Update docs --- .../watcher/how-watcher-works.asciidoc | 13 ++- .../put/UpdateWatcherSettingsAction.java | 30 ++++- .../10_watcher_settings.yml | 104 ++++++++++++++++++ .../10_update_watcher_settings.yml | 66 ----------- .../TransportGetWatcherSettingsAction.java | 17 ++- 5 files changed, 149 insertions(+), 81 deletions(-) create mode 100644 x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/watcher/settings_endpoints/10_watcher_settings.yml delete mode 100644 x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/watcher/update_settings/10_update_watcher_settings.yml diff --git a/docs/reference/watcher/how-watcher-works.asciidoc b/docs/reference/watcher/how-watcher-works.asciidoc index ed6e49b72e9ce..e34d4f799d99b 100644 --- a/docs/reference/watcher/how-watcher-works.asciidoc +++ b/docs/reference/watcher/how-watcher-works.asciidoc @@ -146,15 +146,18 @@ add, the more distributed the watches can be executed. If you add or remove replicas, all watches need to be reloaded. If a shard is relocated, the primary and all replicas of this particular shard will reload. -Because the watches are executed on the node, where the watch shards are, you can create -dedicated watcher nodes by using shard allocation filtering. +Because the watches are executed on the node, where the watch shards are, you +can create dedicated watcher nodes by using shard allocation filtering. To do this +, configure nodes with a dedicated `node.attr.role: watcher` property. -You could configure nodes with a dedicated `node.attr.role: watcher` property and -then configure the `.watches` index like this: +As the `.watches` index is a system index, you can't use the normal `.watcher/_settings` +endpoint to modify its routing allocation. Instead, you can use the following dedicated +endpoint to adjust the allocation of the `.watches` shards to the nodes with the +`watcher` role attribute: [source,console] ------------------------ -PUT .watches/_settings +PUT _watcher/settings { "index.routing.allocation.include.role": "watcher" } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/UpdateWatcherSettingsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/UpdateWatcherSettingsAction.java index 42fc7c196bbcf..7b0bd8a8108e9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/UpdateWatcherSettingsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/UpdateWatcherSettingsAction.java @@ -16,13 +16,13 @@ import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.UpdateForV9; import java.io.IOException; import java.util.Map; import java.util.Set; +import java.util.stream.Collectors; public class UpdateWatcherSettingsAction extends ActionType { @@ -34,6 +34,16 @@ public class UpdateWatcherSettingsAction extends ActionType ALLOWED_SETTINGS_PREFIXES = Set.of( + IndexMetadata.INDEX_ROUTING_EXCLUDE_GROUP_PREFIX, + IndexMetadata.INDEX_ROUTING_INCLUDE_GROUP_PREFIX, + IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_PREFIX + ); + + public static final Set EXPLICITLY_DENIED_SETTINGS = Set.of( + IndexMetadata.INDEX_ROUTING_INCLUDE_GROUP_PREFIX + "._tier_preference" + ); + public UpdateWatcherSettingsAction() { super(NAME); } @@ -79,13 +89,25 @@ public Map settings() { @Override public ActionRequestValidationException validate() { - Set forbiddenSettings = Sets.difference(settings.keySet(), ALLOWED_SETTING_KEYS); - if (forbiddenSettings.size() > 0) { + Set forbiddenSettings = settings.keySet() + .stream() + .filter( + setting -> (ALLOWED_SETTING_KEYS.contains(setting) == false + && ALLOWED_SETTINGS_PREFIXES.stream().noneMatch(prefix -> setting.startsWith(prefix + "."))) + || EXPLICITLY_DENIED_SETTINGS.contains(setting) + ) + .collect(Collectors.toSet()); + + if (forbiddenSettings.isEmpty() == false) { return ValidateActions.addValidationError( "illegal settings: " + forbiddenSettings + ", these settings may not be configured. Only the following settings may be configured: " - + ALLOWED_SETTING_KEYS, + + ALLOWED_SETTING_KEYS + + ", " + + ALLOWED_SETTINGS_PREFIXES.stream().map(s -> s + ".*").collect(Collectors.toSet()) + + " excluding the following explicitly denied settings: " + + EXPLICITLY_DENIED_SETTINGS, null ); } diff --git a/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/watcher/settings_endpoints/10_watcher_settings.yml b/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/watcher/settings_endpoints/10_watcher_settings.yml new file mode 100644 index 0000000000000..f639b4f8f1a77 --- /dev/null +++ b/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/watcher/settings_endpoints/10_watcher_settings.yml @@ -0,0 +1,104 @@ +--- +setup: + - do: + cluster.health: + wait_for_status: yellow + - do: + watcher.put_watch: + id: "my_watch" + body: > + { + "trigger": { + "schedule": { + "hourly": { + "minute": [ 0, 5 ] + } + } + }, + "input": { + "simple": { + "payload": { + "send": "yes" + } + } + }, + "condition": { + "always": {} + }, + "actions": { + "test_index": { + "index": { + "index": "test" + } + } + } + } + +--- +"Test update and get watch settings api": + - do: + watcher.get_settings: { } + + - match: { index.auto_expand_replicas: "0-1" } + - match: { index.number_of_replicas: "0" } + + - do: + watcher.update_settings: + body: + index.auto_expand_replicas: "0-all" + + - do: + watcher.get_settings: { } + + - match: { index.auto_expand_replicas: "0-all" } + - is_false: index.routing.allocation.include._tier_preference + + - do: + watcher.update_settings: + body: + index.auto_expand_replicas: null + index.number_of_replicas: 1 + + - do: + watcher.get_settings: { } + + - match: { index.number_of_replicas: "1" } +--- +"Test disallowed setting name throws error": + - requires: + test_runner_features: regex + - do: + watcher.update_settings: + body: + index.disallowed_setting: "some_invalid_value" + catch: bad_request + - match: + error: + type: "action_request_validation_exception" + reason: '/illegal settings\: \[index.disallowed_setting\].*/' +--- +"Test allowed prefix setting name": + - do: + watcher.update_settings: + body: + index.routing.allocation.include.role: "watcher" + index.routing.allocation.exclude.role: "noWatcher" + index.routing.allocation.require.role: "mustWatcher" + - do: + watcher.get_settings: { } + - match: { index.routing.allocation.include.role: "watcher" } + - match: { index.routing.allocation.exclude.role: "noWatcher" } + - match: { index.routing.allocation.require.role: "mustWatcher" } +--- +"Test explicitly disallowed prefix setting name throws error": + - requires: + test_runner_features: regex + - do: + watcher.update_settings: + body: + index.routing.allocation.include.disallowed_prefix: "some_invalid_value" + catch: bad_request + - match: + error: + type: "action_request_validation_exception" + reason: '/illegal settings\: \[index.routing.allocation.include.disallowed_prefix\].*/' diff --git a/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/watcher/update_settings/10_update_watcher_settings.yml b/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/watcher/update_settings/10_update_watcher_settings.yml deleted file mode 100644 index d7478d643a98a..0000000000000 --- a/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/watcher/update_settings/10_update_watcher_settings.yml +++ /dev/null @@ -1,66 +0,0 @@ ---- -setup: - - do: - cluster.health: - wait_for_status: yellow - ---- -"Test update and get watch settings api": - - do: - watcher.put_watch: - id: "my_watch" - body: > - { - "trigger": { - "schedule": { - "hourly": { - "minute": [ 0, 5 ] - } - } - }, - "input": { - "simple": { - "payload": { - "send": "yes" - } - } - }, - "condition": { - "always": {} - }, - "actions": { - "test_index": { - "index": { - "index": "test" - } - } - } - } - - match: { _id: "my_watch" } - - - do: - watcher.get_settings: {} - - - match: { index.auto_expand_replicas: "0-1" } - - match: { index.number_of_replicas: "0" } - - - do: - watcher.update_settings: - body: - index.auto_expand_replicas: "0-all" - - - do: - watcher.get_settings: {} - - - match: { index.auto_expand_replicas: "0-all" } - - - do: - watcher.update_settings: - body: - index.auto_expand_replicas: null - index.number_of_replicas: 1 - - - do: - watcher.get_settings: {} - - - match: { index.number_of_replicas: "1" } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/TransportGetWatcherSettingsAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/TransportGetWatcherSettingsAction.java index 29349735afcd2..2962bffd68b66 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/TransportGetWatcherSettingsAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/TransportGetWatcherSettingsAction.java @@ -23,8 +23,10 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.watcher.transport.actions.put.GetWatcherSettingsAction; -import org.elasticsearch.xpack.core.watcher.transport.actions.put.UpdateWatcherSettingsAction; +import static org.elasticsearch.xpack.core.watcher.transport.actions.put.UpdateWatcherSettingsAction.ALLOWED_SETTINGS_PREFIXES; +import static org.elasticsearch.xpack.core.watcher.transport.actions.put.UpdateWatcherSettingsAction.ALLOWED_SETTING_KEYS; +import static org.elasticsearch.xpack.core.watcher.transport.actions.put.UpdateWatcherSettingsAction.EXPLICITLY_DENIED_SETTINGS; import static org.elasticsearch.xpack.watcher.transport.actions.TransportUpdateWatcherSettingsAction.WATCHER_INDEX_NAME; import static org.elasticsearch.xpack.watcher.transport.actions.TransportUpdateWatcherSettingsAction.WATCHER_INDEX_REQUEST; @@ -73,11 +75,14 @@ protected void masterOperation( */ private static Settings filterSettableSettings(Settings settings) { Settings.Builder builder = Settings.builder(); - for (String settingName : UpdateWatcherSettingsAction.ALLOWED_SETTING_KEYS) { - if (settings.hasValue(settingName)) { - builder.put(settingName, settings.get(settingName)); - } - } + settings.keySet() + .stream() + .filter( + setting -> (ALLOWED_SETTING_KEYS.contains(setting) + || ALLOWED_SETTINGS_PREFIXES.stream().anyMatch(prefix -> setting.startsWith(prefix + "."))) + && EXPLICITLY_DENIED_SETTINGS.contains(setting) == false + ) + .forEach(setting -> builder.put(setting, settings.get(setting))); return builder.build(); } From 6855db5b44e957429588dd645165650e970fd001 Mon Sep 17 00:00:00 2001 From: Nikolaj Volgushev Date: Tue, 22 Oct 2024 16:34:11 +0200 Subject: [PATCH 283/449] Expose cluster-state role mappings in APIs (#114951) This PR exposes operator-defined, cluster-state role mappings in the [Get role mappings API](https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-get-role-mapping.html). Cluster-state role mappings are returned with a reserved suffix `-read-only-operator-mapping`, to disambiguate with native role mappings stored in the security index. CS role mappings are also marked with a `_read_only` metadata flag. It's possible to query a CS role mapping using its name both with and without the suffix. CS role mappings can be viewed via the API, but cannot be modified. To clarify this, the PUT and DELETE role mapping endpoints return header warnings if native role mappings that name-clash with CS role mappings are created, modified, or deleted. The PR also prevents the creation or role mappings with names ending in `-read-only-operator-mapping` to ensure that CS role mappings and native role mappings can always be fully disambiguated. Finally, the PR changes how CS role mappings are persisted in cluster-state. CS role mappings are written (and read from disk) in the `XContent` format. This format omits the role mapping's name. This means that if CS role mappings are ever recovered from disk (e.g., during a master-node restart), their names are erased. To address this, this PR changes CS role mapping serialization to persist the name of a mapping in a reserved metadata field, and recover it from metadata during serialization. This allows us to persist the name without BWC-breaks in role mapping `XContent` format. It also allows us to ensure that role mappings are re-written to cluster state in the new, name-preserving format the first time operator file settings are processed. Depends on: https://github.com/elastic/elasticsearch/pull/114295 Relates: ES-9628 --- docs/changelog/114951.yaml | 5 + .../FileSettingsRoleMappingUpgradeIT.java | 7 + .../service/ReservedStateUpdateTask.java | 4 +- .../support/mapper/ExpressionRoleMapping.java | 34 +++ .../security/authz/RoleMappingMetadata.java | 75 ++++- .../SecurityOnTrialLicenseRestTestCase.java | 11 +- .../rolemapping/RoleMappingRestIT.java | 268 ++++++++++++++++++ .../RoleMappingFileSettingsIT.java | 168 ++++++++--- .../FileSettingsRoleMappingsRestartIT.java | 84 ++++-- .../xpack/security/Security.java | 1 + .../ReservedRoleMappingAction.java | 8 +- .../TransportDeleteRoleMappingAction.java | 23 +- .../TransportGetRoleMappingsAction.java | 87 +++++- .../TransportPutRoleMappingAction.java | 20 +- .../mapper/ClusterStateRoleMapper.java | 26 +- .../TransportGetRoleMappingsActionTests.java | 247 +++++++++++++--- .../TransportPutRoleMappingActionTests.java | 43 ++- 17 files changed, 964 insertions(+), 147 deletions(-) create mode 100644 docs/changelog/114951.yaml create mode 100644 x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/rolemapping/RoleMappingRestIT.java diff --git a/docs/changelog/114951.yaml b/docs/changelog/114951.yaml new file mode 100644 index 0000000000000..4d40a063e2b02 --- /dev/null +++ b/docs/changelog/114951.yaml @@ -0,0 +1,5 @@ +pr: 114951 +summary: Expose cluster-state role mappings in APIs +area: Authentication +type: bug +issues: [] diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FileSettingsRoleMappingUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FileSettingsRoleMappingUpgradeIT.java index 3275f3e0e136f..b3d4dfc68d399 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FileSettingsRoleMappingUpgradeIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FileSettingsRoleMappingUpgradeIT.java @@ -25,9 +25,12 @@ import java.io.IOException; import java.util.List; +import java.util.Map; import java.util.function.Supplier; +import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; @@ -106,6 +109,10 @@ public void testRoleMappingsAppliedOnUpgrade() throws IOException { ); assertThat(roleMappings, is(not(nullValue()))); assertThat(roleMappings.size(), equalTo(1)); + assertThat(roleMappings, is(instanceOf(Map.class))); + @SuppressWarnings("unchecked") + Map roleMapping = (Map) roleMappings; + assertThat(roleMapping.keySet(), contains("everyone_kibana-read-only-operator-mapping")); } } } diff --git a/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTask.java b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTask.java index 92e248f160f0f..c85997f72cc78 100644 --- a/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTask.java +++ b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTask.java @@ -205,8 +205,8 @@ static boolean checkMetadataVersion( namespace, newVersion, switch (versionCheck) { - case ReservedStateVersionCheck.HIGHER_OR_SAME_VERSION -> "less than"; - case ReservedStateVersionCheck.HIGHER_VERSION_ONLY -> "less than or equal to"; + case HIGHER_OR_SAME_VERSION -> "less than"; + case HIGHER_VERSION_ONLY -> "less than or equal to"; }, currentVersion ) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/mapper/ExpressionRoleMapping.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/mapper/ExpressionRoleMapping.java index 17088cff8718b..c504ebe56ed45 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/mapper/ExpressionRoleMapping.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/mapper/ExpressionRoleMapping.java @@ -54,6 +54,18 @@ */ public class ExpressionRoleMapping implements ToXContentObject, Writeable { + /** + * Reserved suffix for read-only operator-defined role mappings. + * This suffix is added to the name of all cluster-state role mappings returned via + * the {@code TransportGetRoleMappingsAction} action. + */ + public static final String READ_ONLY_ROLE_MAPPING_SUFFIX = "-read-only-operator-mapping"; + /** + * Reserved metadata field to mark role mappings as read-only. + * This field is added to the metadata of all cluster-state role mappings returned via + * the {@code TransportGetRoleMappingsAction} action. + */ + public static final String READ_ONLY_ROLE_MAPPING_METADATA_FLAG = "_read_only"; private static final ObjectParser PARSER = new ObjectParser<>("role-mapping", Builder::new); /** @@ -136,6 +148,28 @@ public ExpressionRoleMapping(StreamInput in) throws IOException { this.metadata = in.readGenericMap(); } + public static boolean hasReadOnlySuffix(String name) { + return name.endsWith(READ_ONLY_ROLE_MAPPING_SUFFIX); + } + + public static void validateNoReadOnlySuffix(String name) { + if (hasReadOnlySuffix(name)) { + throw new IllegalArgumentException( + "Invalid mapping name [" + name + "]. [" + READ_ONLY_ROLE_MAPPING_SUFFIX + "] is not an allowed suffix" + ); + } + } + + public static String addReadOnlySuffix(String name) { + return name + READ_ONLY_ROLE_MAPPING_SUFFIX; + } + + public static String removeReadOnlySuffixIfPresent(String name) { + return name.endsWith(READ_ONLY_ROLE_MAPPING_SUFFIX) + ? name.substring(0, name.length() - READ_ONLY_ROLE_MAPPING_SUFFIX.length()) + : name; + } + @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(name); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleMappingMetadata.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleMappingMetadata.java index b38b33e082382..74c6223b1ebdd 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleMappingMetadata.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleMappingMetadata.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.core.security.authz; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; import org.elasticsearch.cluster.AbstractNamedDiffable; @@ -26,8 +28,10 @@ import java.io.IOException; import java.util.Collection; import java.util.EnumSet; +import java.util.HashMap; import java.util.Iterator; import java.util.LinkedHashSet; +import java.util.Map; import java.util.Objects; import java.util.Set; @@ -36,7 +40,11 @@ public final class RoleMappingMetadata extends AbstractNamedDiffable implements Metadata.Custom { + private static final Logger logger = LogManager.getLogger(RoleMappingMetadata.class); + public static final String TYPE = "role_mappings"; + public static final String METADATA_NAME_FIELD = "_es_reserved_role_mapping_name"; + public static final String FALLBACK_NAME = "name_not_available_after_deserialization"; @SuppressWarnings("unchecked") private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( @@ -46,12 +54,7 @@ public final class RoleMappingMetadata extends AbstractNamedDiffable ExpressionRoleMapping.parse("name_not_available_after_deserialization", p), - new ParseField(TYPE) - ); + PARSER.declareObjectArray(constructorArg(), (p, c) -> parseWithNameFromMetadata(p), new ParseField(TYPE)); } private static final RoleMappingMetadata EMPTY = new RoleMappingMetadata(Set.of()); @@ -153,4 +156,64 @@ public EnumSet context() { // are not persisted. return ALL_CONTEXTS; } + + /** + * Ensures role mapping names are preserved when stored on disk using XContent format, + * which omits names. This method copies the role mapping's name into a reserved metadata field + * during serialization, allowing recovery during deserialization (e.g., after a master-node restart). + * {@link #parseWithNameFromMetadata(XContentParser)} restores the name during parsing. + */ + public static ExpressionRoleMapping copyWithNameInMetadata(ExpressionRoleMapping roleMapping) { + Map metadata = new HashMap<>(roleMapping.getMetadata()); + // note: can't use Maps.copyWith... since these create maps that don't support `null` values in map entries + if (metadata.put(METADATA_NAME_FIELD, roleMapping.getName()) != null) { + logger.error( + "Metadata field [{}] is reserved and will be overwritten with an internal system value. " + + "Rename this field in your role mapping configuration.", + METADATA_NAME_FIELD + ); + } + return new ExpressionRoleMapping( + roleMapping.getName(), + roleMapping.getExpression(), + roleMapping.getRoles(), + roleMapping.getRoleTemplates(), + metadata, + roleMapping.isEnabled() + ); + } + + /** + * If a role mapping does not yet have a name persisted in metadata, it will use a constant fallback name. This method checks if a + * role mapping has the fallback name. + */ + public static boolean hasFallbackName(ExpressionRoleMapping expressionRoleMapping) { + return expressionRoleMapping.getName().equals(FALLBACK_NAME); + } + + /** + * Parse a role mapping from XContent, restoring the name from a reserved metadata field. + * Used to parse a role mapping annotated with its name in metadata via @see {@link #copyWithNameInMetadata(ExpressionRoleMapping)}. + */ + public static ExpressionRoleMapping parseWithNameFromMetadata(XContentParser parser) throws IOException { + ExpressionRoleMapping roleMapping = ExpressionRoleMapping.parse(FALLBACK_NAME, parser); + return new ExpressionRoleMapping( + getNameFromMetadata(roleMapping), + roleMapping.getExpression(), + roleMapping.getRoles(), + roleMapping.getRoleTemplates(), + roleMapping.getMetadata(), + roleMapping.isEnabled() + ); + } + + private static String getNameFromMetadata(ExpressionRoleMapping roleMapping) { + Map metadata = roleMapping.getMetadata(); + if (metadata.containsKey(METADATA_NAME_FIELD) && metadata.get(METADATA_NAME_FIELD) instanceof String name) { + return name; + } else { + // This is valid the first time we recover from cluster-state: the old format metadata won't have a name stored in metadata yet + return FALLBACK_NAME; + } + } } diff --git a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/SecurityOnTrialLicenseRestTestCase.java b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/SecurityOnTrialLicenseRestTestCase.java index 1abb9bbb067dc..523f04fb436f4 100644 --- a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/SecurityOnTrialLicenseRestTestCase.java +++ b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/SecurityOnTrialLicenseRestTestCase.java @@ -19,6 +19,7 @@ import org.elasticsearch.core.Tuple; import org.elasticsearch.test.TestSecurityClient; import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.LocalClusterConfigProvider; import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.elasticsearch.test.cluster.util.resource.Resource; import org.elasticsearch.test.rest.ESRestTestCase; @@ -41,9 +42,7 @@ public abstract class SecurityOnTrialLicenseRestTestCase extends ESRestTestCase { private TestSecurityClient securityClient; - @ClassRule - public static ElasticsearchCluster cluster = ElasticsearchCluster.local() - .nodes(2) + public static LocalClusterConfigProvider commonTrialSecurityClusterConfig = cluster -> cluster.nodes(2) .distribution(DistributionType.DEFAULT) .setting("xpack.ml.enabled", "false") .setting("xpack.license.self_generated.type", "trial") @@ -62,8 +61,10 @@ public abstract class SecurityOnTrialLicenseRestTestCase extends ESRestTestCase .user("admin_user", "admin-password", ROOT_USER_ROLE, true) .user("security_test_user", "security-test-password", "security_test_role", false) .user("x_pack_rest_user", "x-pack-test-password", ROOT_USER_ROLE, true) - .user("cat_test_user", "cat-test-password", "cat_test_role", false) - .build(); + .user("cat_test_user", "cat-test-password", "cat_test_role", false); + + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local().apply(commonTrialSecurityClusterConfig).build(); @Override protected String getTestRestCluster() { diff --git a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/rolemapping/RoleMappingRestIT.java b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/rolemapping/RoleMappingRestIT.java new file mode 100644 index 0000000000000..51970af4b88a0 --- /dev/null +++ b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/rolemapping/RoleMappingRestIT.java @@ -0,0 +1,268 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security.rolemapping; + +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.util.resource.Resource; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping; +import org.elasticsearch.xpack.core.security.authc.support.mapper.expressiondsl.FieldExpression; +import org.elasticsearch.xpack.security.SecurityOnTrialLicenseRestTestCase; +import org.junit.ClassRule; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; + +public class RoleMappingRestIT extends ESRestTestCase { + private static final String settingsJson = """ + { + "metadata": { + "version": "1", + "compatibility": "8.4.0" + }, + "state": { + "role_mappings": { + "role-mapping-1": { + "enabled": true, + "roles": [ "role_1" ], + "rules": { "field": { "username": "no_user" } }, + "metadata": { + "uuid" : "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7", + "_foo": "something", + "_es_reserved_role_mapping_name": "ignored" + } + }, + "role-mapping-2": { + "enabled": true, + "roles": [ "role_2" ], + "rules": { "field": { "username": "no_user" } } + }, + "role-mapping-3": { + "enabled": true, + "roles": [ "role_3" ], + "rules": { "field": { "username": "no_user" } }, + "metadata": { + "_read_only" : { "field": 1 }, + "_es_reserved_role_mapping_name": { "still_ignored": true } + } + } + } + } + }"""; + private static final ExpressionRoleMapping clusterStateMapping1 = new ExpressionRoleMapping( + "role-mapping-1-read-only-operator-mapping", + new FieldExpression("username", List.of(new FieldExpression.FieldValue("no_user"))), + List.of("role_1"), + null, + Map.of("uuid", "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7", "_foo", "something", "_read_only", true), + true + ); + private static final ExpressionRoleMapping clusterStateMapping2 = new ExpressionRoleMapping( + "role-mapping-2-read-only-operator-mapping", + new FieldExpression("username", List.of(new FieldExpression.FieldValue("no_user"))), + List.of("role_2"), + null, + Map.of("_read_only", true), + true + ); + private static final ExpressionRoleMapping clusterStateMapping3 = new ExpressionRoleMapping( + "role-mapping-3-read-only-operator-mapping", + new FieldExpression("username", List.of(new FieldExpression.FieldValue("no_user"))), + List.of("role_3"), + null, + Map.of("_read_only", true), + true + ); + + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .apply(SecurityOnTrialLicenseRestTestCase.commonTrialSecurityClusterConfig) + .configFile("operator/settings.json", Resource.fromString(settingsJson)) + .build(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + public void testGetRoleMappings() throws IOException { + expectMappings(List.of(clusterStateMapping1, clusterStateMapping2, clusterStateMapping3)); + expectMappings(List.of(clusterStateMapping1), "role-mapping-1"); + expectMappings(List.of(clusterStateMapping1, clusterStateMapping3), "role-mapping-1", "role-mapping-3"); + expectMappings(List.of(clusterStateMapping1), clusterStateMapping1.getName()); + expectMappings(List.of(clusterStateMapping1), clusterStateMapping1.getName(), "role-mapping-1"); + + expect404(() -> getMappings("role-mapping-4")); + expect404(() -> getMappings("role-mapping-4-read-only-operator-mapping")); + + ExpressionRoleMapping nativeMapping1 = expressionRoleMapping("role-mapping-1"); + putMapping(nativeMapping1, createOrUpdateWarning(nativeMapping1.getName())); + + ExpressionRoleMapping nativeMapping4 = expressionRoleMapping("role-mapping-4"); + putMapping(nativeMapping4); + + expectMappings(List.of(clusterStateMapping1, clusterStateMapping2, clusterStateMapping3, nativeMapping1, nativeMapping4)); + expectMappings(List.of(clusterStateMapping1, nativeMapping1), "role-mapping-1"); + expectMappings(List.of(clusterStateMapping1, nativeMapping1), "role-mapping-1", clusterStateMapping1.getName()); + expectMappings(List.of(clusterStateMapping1), clusterStateMapping1.getName()); + expectMappings(List.of(nativeMapping4), "role-mapping-4"); + expectMappings(List.of(nativeMapping4), "role-mapping-4", "role-mapping-4-read-only-operator-mapping"); + } + + public void testPutAndDeleteRoleMappings() throws IOException { + { + var ex = expectThrows( + ResponseException.class, + () -> putMapping(expressionRoleMapping("role-mapping-1-read-only-operator-mapping")) + ); + assertThat( + ex.getMessage(), + containsString( + "Invalid mapping name [role-mapping-1-read-only-operator-mapping]. " + + "[-read-only-operator-mapping] is not an allowed suffix" + ) + ); + } + + // Also fails even if a CS role mapping with that name does not exist + { + var ex = expectThrows( + ResponseException.class, + () -> putMapping(expressionRoleMapping("role-mapping-4-read-only-operator-mapping")) + ); + assertThat( + ex.getMessage(), + containsString( + "Invalid mapping name [role-mapping-4-read-only-operator-mapping]. " + + "[-read-only-operator-mapping] is not an allowed suffix" + ) + ); + } + + assertOK(putMapping(expressionRoleMapping("role-mapping-1"), createOrUpdateWarning("role-mapping-1"))); + + assertOK(deleteMapping("role-mapping-1", deletionWarning("role-mapping-1"))); + + // 404 without warnings if no native mapping exists + expect404(() -> deleteMapping("role-mapping-1")); + } + + private static void expect404(ThrowingRunnable clientCall) { + var ex = expectThrows(ResponseException.class, clientCall); + assertThat(ex.getResponse().getStatusLine().getStatusCode(), equalTo(404)); + } + + private static Response putMapping(ExpressionRoleMapping roleMapping) throws IOException { + return putMapping(roleMapping, null); + } + + private static Response putMapping(ExpressionRoleMapping roleMapping, @Nullable String warning) throws IOException { + Request request = new Request("PUT", "/_security/role_mapping/" + roleMapping.getName()); + XContentBuilder xContent = roleMapping.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS); + request.setJsonEntity(BytesReference.bytes(xContent).utf8ToString()); + if (warning != null) { + request.setOptions( + RequestOptions.DEFAULT.toBuilder().setWarningsHandler(warnings -> warnings.equals(List.of(warning)) == false).build() + ); + } + return client().performRequest(request); + } + + private static Response deleteMapping(String name) throws IOException { + return deleteMapping(name, null); + } + + private static Response deleteMapping(String name, @Nullable String warning) throws IOException { + Request request = new Request("DELETE", "/_security/role_mapping/" + name); + if (warning != null) { + request.setOptions( + RequestOptions.DEFAULT.toBuilder().setWarningsHandler(warnings -> warnings.equals(List.of(warning)) == false).build() + ); + } + return client().performRequest(request); + } + + private static ExpressionRoleMapping expressionRoleMapping(String name) { + return new ExpressionRoleMapping( + name, + new FieldExpression("username", List.of(new FieldExpression.FieldValue(randomAlphaOfLength(10)))), + List.of(randomAlphaOfLength(5)), + null, + Map.of(), + true + ); + } + + @SuppressWarnings("unchecked") + private static void expectMappings(List expectedMappings, String... requestedMappingNames) throws IOException { + Map map = responseAsMap(getMappings(requestedMappingNames)); + assertThat( + map.keySet(), + containsInAnyOrder(expectedMappings.stream().map(ExpressionRoleMapping::getName).toList().toArray(new String[0])) + ); + List actualMappings = new ArrayList<>(); + for (Map.Entry entry : map.entrySet()) { + XContentParser body = XContentHelper.mapToXContentParser(XContentParserConfiguration.EMPTY, (Map) entry.getValue()); + ExpressionRoleMapping actual = ExpressionRoleMapping.parse(entry.getKey(), body); + actualMappings.add(actual); + } + assertThat(actualMappings, containsInAnyOrder(expectedMappings.toArray(new ExpressionRoleMapping[0]))); + } + + private static Response getMappings(String... requestedMappingNames) throws IOException { + return client().performRequest(new Request("GET", "/_security/role_mapping/" + String.join(",", requestedMappingNames))); + } + + @Override + protected Settings restAdminSettings() { + String token = basicAuthHeaderValue("admin_user", new SecureString("admin-password".toCharArray())); + return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build(); + } + + @Override + protected Settings restClientSettings() { + String token = basicAuthHeaderValue("admin_user", new SecureString("admin-password".toCharArray())); + return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build(); + } + + private static String createOrUpdateWarning(String mappingName) { + return "A read-only role mapping with the same name [" + + mappingName + + "] has been previously defined in a configuration file. " + + "Both role mappings will be used to determine role assignments."; + } + + private static String deletionWarning(String mappingName) { + return "A read-only role mapping with the same name [" + + mappingName + + "] has previously been defined in a configuration file. " + + "The native role mapping was deleted, but the read-only mapping will remain active " + + "and will be used to determine role assignments."; + }; +} diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/RoleMappingFileSettingsIT.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/RoleMappingFileSettingsIT.java index 3b6ffd0698623..fdd854e7a9673 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/RoleMappingFileSettingsIT.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/RoleMappingFileSettingsIT.java @@ -34,6 +34,7 @@ import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingAction; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequest; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequestBuilder; +import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingResponse; import org.elasticsearch.xpack.core.security.authc.RealmConfig; import org.elasticsearch.xpack.core.security.authc.support.UserRoleMapper; import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping; @@ -45,6 +46,7 @@ import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardCopyOption; +import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Map; @@ -63,7 +65,6 @@ import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; -import static org.hamcrest.Matchers.emptyArray; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.notNullValue; @@ -271,21 +272,28 @@ private void assertRoleMappingsSaveOK(CountDownLatch savedClusterState, AtomicLo assertThat(resolveRolesFuture.get(), containsInAnyOrder("kibana_user", "fleet_user")); } - // the role mappings are not retrievable by the role mapping action (which only accesses "native" i.e. index-based role mappings) - var request = new GetRoleMappingsRequest(); - request.setNames("everyone_kibana", "everyone_fleet"); - var response = client().execute(GetRoleMappingsAction.INSTANCE, request).get(); - assertFalse(response.hasMappings()); - assertThat(response.mappings(), emptyArray()); - - // role mappings (with the same names) can also be stored in the "native" store - var putRoleMappingResponse = client().execute(PutRoleMappingAction.INSTANCE, sampleRestRequest("everyone_kibana")).actionGet(); - assertTrue(putRoleMappingResponse.isCreated()); - putRoleMappingResponse = client().execute(PutRoleMappingAction.INSTANCE, sampleRestRequest("everyone_fleet")).actionGet(); - assertTrue(putRoleMappingResponse.isCreated()); + // the role mappings are retrievable by the role mapping action for BWC + assertGetResponseHasMappings(true, "everyone_kibana", "everyone_fleet"); + + // role mappings (with the same names) can be stored in the "native" store + { + PutRoleMappingResponse response = client().execute(PutRoleMappingAction.INSTANCE, sampleRestRequest("everyone_kibana")) + .actionGet(); + assertTrue(response.isCreated()); + response = client().execute(PutRoleMappingAction.INSTANCE, sampleRestRequest("everyone_fleet")).actionGet(); + assertTrue(response.isCreated()); + } + { + // deleting role mappings that exist in the native store and in cluster-state should result in success + var response = client().execute(DeleteRoleMappingAction.INSTANCE, deleteRequest("everyone_kibana")).actionGet(); + assertTrue(response.isFound()); + response = client().execute(DeleteRoleMappingAction.INSTANCE, deleteRequest("everyone_fleet")).actionGet(); + assertTrue(response.isFound()); + } + } - public void testRoleMappingsApplied() throws Exception { + public void testClusterStateRoleMappingsAddedThenDeleted() throws Exception { ensureGreen(); var savedClusterState = setupClusterStateListener(internalCluster().getMasterName(), "everyone_kibana"); @@ -294,6 +302,12 @@ public void testRoleMappingsApplied() throws Exception { assertRoleMappingsSaveOK(savedClusterState.v1(), savedClusterState.v2()); logger.info("---> cleanup cluster settings..."); + { + // Deleting non-existent native role mappings returns not found even if they exist in config file + var response = client().execute(DeleteRoleMappingAction.INSTANCE, deleteRequest("everyone_kibana")).get(); + assertFalse(response.isFound()); + } + savedClusterState = setupClusterStateListenerForCleanup(internalCluster().getMasterName()); writeJSONFile(internalCluster().getMasterName(), emptyJSON, logger, versionCounter); @@ -308,48 +322,96 @@ public void testRoleMappingsApplied() throws Exception { clusterStateResponse.getState().metadata().persistentSettings().get(INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING.getKey()) ); - // native role mappings are not affected by the removal of the cluster-state based ones + // cluster-state role mapping was removed and is not returned in the API anymore { var request = new GetRoleMappingsRequest(); request.setNames("everyone_kibana", "everyone_fleet"); var response = client().execute(GetRoleMappingsAction.INSTANCE, request).get(); - assertTrue(response.hasMappings()); - assertThat( - Arrays.stream(response.mappings()).map(ExpressionRoleMapping::getName).toList(), - containsInAnyOrder("everyone_kibana", "everyone_fleet") - ); + assertFalse(response.hasMappings()); } - // and roles are resolved based on the native role mappings + // no role mappings means no roles are resolved for (UserRoleMapper userRoleMapper : internalCluster().getInstances(UserRoleMapper.class)) { PlainActionFuture> resolveRolesFuture = new PlainActionFuture<>(); userRoleMapper.resolveRoles( new UserRoleMapper.UserData("anyUsername", null, List.of(), Map.of(), mock(RealmConfig.class)), resolveRolesFuture ); - assertThat(resolveRolesFuture.get(), contains("kibana_user_native")); + assertThat(resolveRolesFuture.get(), empty()); } + } - { - var request = new DeleteRoleMappingRequest(); - request.setName("everyone_kibana"); - var response = client().execute(DeleteRoleMappingAction.INSTANCE, request).get(); - assertTrue(response.isFound()); - request = new DeleteRoleMappingRequest(); - request.setName("everyone_fleet"); - response = client().execute(DeleteRoleMappingAction.INSTANCE, request).get(); - assertTrue(response.isFound()); + public void testGetRoleMappings() throws Exception { + ensureGreen(); + + final List nativeMappings = List.of("everyone_kibana", "_everyone_kibana", "zzz_mapping", "123_mapping"); + for (var mapping : nativeMappings) { + client().execute(PutRoleMappingAction.INSTANCE, sampleRestRequest(mapping)).actionGet(); } - // no roles are resolved now, because both native and cluster-state based stores have been cleared - for (UserRoleMapper userRoleMapper : internalCluster().getInstances(UserRoleMapper.class)) { - PlainActionFuture> resolveRolesFuture = new PlainActionFuture<>(); - userRoleMapper.resolveRoles( - new UserRoleMapper.UserData("anyUsername", null, List.of(), Map.of(), mock(RealmConfig.class)), - resolveRolesFuture - ); - assertThat(resolveRolesFuture.get(), empty()); + var savedClusterState = setupClusterStateListener(internalCluster().getMasterName(), "everyone_kibana"); + writeJSONFile(internalCluster().getMasterName(), testJSON, logger, versionCounter); + boolean awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); + assertTrue(awaitSuccessful); + + var request = new GetRoleMappingsRequest(); + var response = client().execute(GetRoleMappingsAction.INSTANCE, request).get(); + assertTrue(response.hasMappings()); + assertThat( + Arrays.stream(response.mappings()).map(ExpressionRoleMapping::getName).toList(), + containsInAnyOrder( + "everyone_kibana", + ExpressionRoleMapping.addReadOnlySuffix("everyone_kibana"), + "_everyone_kibana", + ExpressionRoleMapping.addReadOnlySuffix("everyone_fleet"), + "zzz_mapping", + "123_mapping" + ) + ); + + List readOnlyFlags = new ArrayList<>(); + for (ExpressionRoleMapping mapping : response.mappings()) { + boolean isReadOnly = ExpressionRoleMapping.hasReadOnlySuffix(mapping.getName()) + && mapping.getMetadata().get("_read_only") != null; + readOnlyFlags.add(isReadOnly); } + // assert that cluster-state role mappings come last + assertThat(readOnlyFlags, contains(false, false, false, false, true, true)); + + // it's possible to delete overlapping native role mapping + assertTrue(client().execute(DeleteRoleMappingAction.INSTANCE, deleteRequest("everyone_kibana")).actionGet().isFound()); + + // Fetch a specific file based role + request = new GetRoleMappingsRequest(); + request.setNames(ExpressionRoleMapping.addReadOnlySuffix("everyone_kibana")); + response = client().execute(GetRoleMappingsAction.INSTANCE, request).get(); + assertTrue(response.hasMappings()); + assertThat( + Arrays.stream(response.mappings()).map(ExpressionRoleMapping::getName).toList(), + containsInAnyOrder(ExpressionRoleMapping.addReadOnlySuffix("everyone_kibana")) + ); + + savedClusterState = setupClusterStateListenerForCleanup(internalCluster().getMasterName()); + writeJSONFile(internalCluster().getMasterName(), emptyJSON, logger, versionCounter); + awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); + assertTrue(awaitSuccessful); + + final ClusterStateResponse clusterStateResponse = clusterAdmin().state( + new ClusterStateRequest(TEST_REQUEST_TIMEOUT).waitForMetadataVersion(savedClusterState.v2().get()) + ).get(); + + assertNull( + clusterStateResponse.getState().metadata().persistentSettings().get(INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING.getKey()) + ); + + // Make sure remaining native mappings can still be fetched + request = new GetRoleMappingsRequest(); + response = client().execute(GetRoleMappingsAction.INSTANCE, request).get(); + assertTrue(response.hasMappings()); + assertThat( + Arrays.stream(response.mappings()).map(ExpressionRoleMapping::getName).toList(), + containsInAnyOrder("_everyone_kibana", "zzz_mapping", "123_mapping") + ); } public static Tuple setupClusterStateListenerForError( @@ -434,11 +496,8 @@ public void testRoleMappingApplyWithSecurityIndexClosed() throws Exception { boolean awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); assertTrue(awaitSuccessful); - // no native role mappings exist - var request = new GetRoleMappingsRequest(); - request.setNames("everyone_kibana", "everyone_fleet"); - var response = client().execute(GetRoleMappingsAction.INSTANCE, request).get(); - assertFalse(response.hasMappings()); + // even if index is closed, cluster-state role mappings are still returned + assertGetResponseHasMappings(true, "everyone_kibana", "everyone_fleet"); // cluster state settings are also applied var clusterStateResponse = clusterAdmin().state( @@ -477,6 +536,12 @@ public void testRoleMappingApplyWithSecurityIndexClosed() throws Exception { } } + private DeleteRoleMappingRequest deleteRequest(String name) { + var request = new DeleteRoleMappingRequest(); + request.setName(name); + return request; + } + private PutRoleMappingRequest sampleRestRequest(String name) throws Exception { var json = """ { @@ -495,4 +560,19 @@ private PutRoleMappingRequest sampleRestRequest(String name) throws Exception { return new PutRoleMappingRequestBuilder(null).source(name, parser).request(); } } + + private static void assertGetResponseHasMappings(boolean readOnly, String... mappings) throws InterruptedException, ExecutionException { + var request = new GetRoleMappingsRequest(); + request.setNames(mappings); + var response = client().execute(GetRoleMappingsAction.INSTANCE, request).get(); + assertTrue(response.hasMappings()); + assertThat( + Arrays.stream(response.mappings()).map(ExpressionRoleMapping::getName).toList(), + containsInAnyOrder( + Arrays.stream(mappings) + .map(mapping -> readOnly ? ExpressionRoleMapping.addReadOnlySuffix(mapping) : mapping) + .toArray(String[]::new) + ) + ); + } } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/FileSettingsRoleMappingsRestartIT.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/FileSettingsRoleMappingsRestartIT.java index 6c6582138ce89..97a5f080cee4e 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/FileSettingsRoleMappingsRestartIT.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/FileSettingsRoleMappingsRestartIT.java @@ -30,6 +30,7 @@ import static org.elasticsearch.integration.RoleMappingFileSettingsIT.setupClusterStateListenerForCleanup; import static org.elasticsearch.integration.RoleMappingFileSettingsIT.writeJSONFile; import static org.elasticsearch.integration.RoleMappingFileSettingsIT.writeJSONFileWithoutVersionIncrement; +import static org.elasticsearch.xpack.core.security.authz.RoleMappingMetadata.METADATA_NAME_FIELD; import static org.hamcrest.Matchers.containsInAnyOrder; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0, autoManageMasterNodes = false) @@ -123,7 +124,7 @@ public void testReservedStatePersistsOnRestart() throws Exception { new FieldExpression("username", List.of(new FieldExpression.FieldValue("*"))), List.of("kibana_user"), List.of(), - Map.of("uuid", "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7", "_foo", "something"), + Map.of("uuid", "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7", "_foo", "something", METADATA_NAME_FIELD, "everyone_kibana_alone"), true ), new ExpressionRoleMapping( @@ -131,7 +132,14 @@ public void testReservedStatePersistsOnRestart() throws Exception { new FieldExpression("username", List.of(new FieldExpression.FieldValue("*"))), List.of("fleet_user"), List.of(), - Map.of("uuid", "b9a59ba9-6b92-4be3-bb8d-02bb270cb3a7", "_foo", "something_else"), + Map.of( + "uuid", + "b9a59ba9-6b92-4be3-bb8d-02bb270cb3a7", + "_foo", + "something_else", + METADATA_NAME_FIELD, + "everyone_fleet_alone" + ), false ) ); @@ -141,26 +149,29 @@ public void testReservedStatePersistsOnRestart() throws Exception { ensureGreen(); awaitFileSettingsWatcher(); - // assert busy to give mappings time to update after restart; otherwise, the role mapping names might be dummy values - // `name_not_available_after_deserialization` - assertBusy( - () -> assertRoleMappingsInClusterState( - new ExpressionRoleMapping( - "everyone_kibana_alone", - new FieldExpression("username", List.of(new FieldExpression.FieldValue("*"))), - List.of("kibana_user"), - List.of(), - Map.of("uuid", "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7", "_foo", "something"), - true + assertRoleMappingsInClusterState( + new ExpressionRoleMapping( + "everyone_kibana_alone", + new FieldExpression("username", List.of(new FieldExpression.FieldValue("*"))), + List.of("kibana_user"), + List.of(), + Map.of("uuid", "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7", "_foo", "something", METADATA_NAME_FIELD, "everyone_kibana_alone"), + true + ), + new ExpressionRoleMapping( + "everyone_fleet_alone", + new FieldExpression("username", List.of(new FieldExpression.FieldValue("*"))), + List.of("fleet_user"), + List.of(), + Map.of( + "uuid", + "b9a59ba9-6b92-4be3-bb8d-02bb270cb3a7", + "_foo", + "something_else", + METADATA_NAME_FIELD, + "everyone_fleet_alone" ), - new ExpressionRoleMapping( - "everyone_fleet_alone", - new FieldExpression("username", List.of(new FieldExpression.FieldValue("*"))), - List.of("fleet_user"), - List.of(), - Map.of("uuid", "b9a59ba9-6b92-4be3-bb8d-02bb270cb3a7", "_foo", "something_else"), - false - ) + false ) ); @@ -197,7 +208,7 @@ public void testFileSettingsReprocessedOnRestartWithoutVersionChange() throws Ex new FieldExpression("username", List.of(new FieldExpression.FieldValue("*"))), List.of("kibana_user"), List.of(), - Map.of("uuid", "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7", "_foo", "something"), + Map.of("uuid", "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7", "_foo", "something", METADATA_NAME_FIELD, "everyone_kibana_alone"), true ), new ExpressionRoleMapping( @@ -205,7 +216,14 @@ public void testFileSettingsReprocessedOnRestartWithoutVersionChange() throws Ex new FieldExpression("username", List.of(new FieldExpression.FieldValue("*"))), List.of("fleet_user"), List.of(), - Map.of("uuid", "b9a59ba9-6b92-4be3-bb8d-02bb270cb3a7", "_foo", "something_else"), + Map.of( + "uuid", + "b9a59ba9-6b92-4be3-bb8d-02bb270cb3a7", + "_foo", + "something_else", + METADATA_NAME_FIELD, + "everyone_fleet_alone" + ), false ) ); @@ -225,7 +243,7 @@ public void testFileSettingsReprocessedOnRestartWithoutVersionChange() throws Ex new FieldExpression("username", List.of(new FieldExpression.FieldValue("*"))), List.of("kibana_user"), List.of(), - Map.of("uuid", "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7", "_foo", "something"), + Map.of("uuid", "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7", "_foo", "something", METADATA_NAME_FIELD, "everyone_kibana_alone"), true ), new ExpressionRoleMapping( @@ -233,7 +251,14 @@ public void testFileSettingsReprocessedOnRestartWithoutVersionChange() throws Ex new FieldExpression("username", List.of(new FieldExpression.FieldValue("*"))), List.of("fleet_user"), List.of(), - Map.of("uuid", "b9a59ba9-6b92-4be3-bb8d-02bb270cb3a7", "_foo", "something_else"), + Map.of( + "uuid", + "b9a59ba9-6b92-4be3-bb8d-02bb270cb3a7", + "_foo", + "something_else", + METADATA_NAME_FIELD, + "everyone_fleet_alone" + ), false ) ); @@ -251,7 +276,14 @@ public void testFileSettingsReprocessedOnRestartWithoutVersionChange() throws Ex new FieldExpression("username", List.of(new FieldExpression.FieldValue("*"))), List.of("kibana_user", "kibana_admin"), List.of(), - Map.of("uuid", "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7", "_foo", "something"), + Map.of( + "uuid", + "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7", + "_foo", + "something", + METADATA_NAME_FIELD, + "everyone_kibana_together" + ), true ) ) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index 79a00fa1293bd..8f32bcf7ace8a 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -899,6 +899,7 @@ Collection createComponents( components.add(nativeUsersStore); components.add(new PluginComponentBinding<>(NativeRoleMappingStore.class, nativeRoleMappingStore)); components.add(new PluginComponentBinding<>(UserRoleMapper.class, userRoleMapper)); + components.add(clusterStateRoleMapper); components.add(reservedRealm); components.add(realms); this.realms.set(realms); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/ReservedRoleMappingAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/ReservedRoleMappingAction.java index 73d1a1abcdb50..837b475dea68f 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/ReservedRoleMappingAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/ReservedRoleMappingAction.java @@ -43,7 +43,7 @@ public String name() { @Override public TransformState transform(Object source, TransformState prevState) throws Exception { @SuppressWarnings("unchecked") - Set roleMappings = validate((List) source); + Set roleMappings = validateAndTranslate((List) source); RoleMappingMetadata newRoleMappingMetadata = new RoleMappingMetadata(roleMappings); if (newRoleMappingMetadata.equals(RoleMappingMetadata.getFromClusterState(prevState.state()))) { return prevState; @@ -71,7 +71,7 @@ public List fromXContent(XContentParser parser) throws IO return result; } - private Set validate(List roleMappings) { + private Set validateAndTranslate(List roleMappings) { var exceptions = new ArrayList(); for (var roleMapping : roleMappings) { // File based defined role mappings are allowed to use MetadataUtils.RESERVED_PREFIX @@ -85,6 +85,8 @@ private Set validate(List roleMapp exceptions.forEach(illegalArgumentException::addSuppressed); throw illegalArgumentException; } - return roleMappings.stream().map(PutRoleMappingRequest::getMapping).collect(Collectors.toUnmodifiableSet()); + return roleMappings.stream() + .map(r -> RoleMappingMetadata.copyWithNameInMetadata(r.getMapping())) + .collect(Collectors.toUnmodifiableSet()); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingAction.java index 74129facae70a..b1fdf2e90dd46 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingAction.java @@ -9,6 +9,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.common.logging.HeaderWarning; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.tasks.Task; @@ -16,17 +17,19 @@ import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingAction; import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingRequest; import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingResponse; +import org.elasticsearch.xpack.security.authc.support.mapper.ClusterStateRoleMapper; import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; public class TransportDeleteRoleMappingAction extends HandledTransportAction { - private final NativeRoleMappingStore roleMappingStore; + private final ClusterStateRoleMapper clusterStateRoleMapper; @Inject public TransportDeleteRoleMappingAction( ActionFilters actionFilters, TransportService transportService, - NativeRoleMappingStore roleMappingStore + NativeRoleMappingStore roleMappingStore, + ClusterStateRoleMapper clusterStateRoleMapper ) { super( DeleteRoleMappingAction.NAME, @@ -36,10 +39,24 @@ public TransportDeleteRoleMappingAction( EsExecutors.DIRECT_EXECUTOR_SERVICE ); this.roleMappingStore = roleMappingStore; + this.clusterStateRoleMapper = clusterStateRoleMapper; } @Override protected void doExecute(Task task, DeleteRoleMappingRequest request, ActionListener listener) { - roleMappingStore.deleteRoleMapping(request, listener.safeMap(DeleteRoleMappingResponse::new)); + roleMappingStore.deleteRoleMapping(request, listener.safeMap(found -> { + if (found && clusterStateRoleMapper.hasMapping(request.getName())) { + // Allow to delete a mapping with the same name in the native role mapping store as the file_settings namespace, but + // add a warning header to signal to the caller that this could be a problem. + HeaderWarning.addWarning( + "A read-only role mapping with the same name [" + + request.getName() + + "] has previously been defined in a configuration file. " + + "The native role mapping was deleted, but the read-only mapping will remain active " + + "and will be used to determine role assignments." + ); + } + return new DeleteRoleMappingResponse(found); + })); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsAction.java index ac0d3177cca09..5f16b095db0ef 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsAction.java @@ -6,6 +6,8 @@ */ package org.elasticsearch.xpack.security.action.rolemapping; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; @@ -17,21 +19,31 @@ import org.elasticsearch.xpack.core.security.action.rolemapping.GetRoleMappingsRequest; import org.elasticsearch.xpack.core.security.action.rolemapping.GetRoleMappingsResponse; import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping; +import org.elasticsearch.xpack.core.security.authz.RoleMappingMetadata; +import org.elasticsearch.xpack.security.authc.support.mapper.ClusterStateRoleMapper; import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; import java.util.Arrays; +import java.util.Collection; +import java.util.HashMap; import java.util.HashSet; +import java.util.Map; import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.Stream; public class TransportGetRoleMappingsAction extends HandledTransportAction { + private static final Logger logger = LogManager.getLogger(TransportGetRoleMappingsAction.class); private final NativeRoleMappingStore roleMappingStore; + private final ClusterStateRoleMapper clusterStateRoleMapper; @Inject public TransportGetRoleMappingsAction( ActionFilters actionFilters, TransportService transportService, - NativeRoleMappingStore nativeRoleMappingStore + NativeRoleMappingStore nativeRoleMappingStore, + ClusterStateRoleMapper clusterStateRoleMapper ) { super( GetRoleMappingsAction.NAME, @@ -41,19 +53,84 @@ public TransportGetRoleMappingsAction( EsExecutors.DIRECT_EXECUTOR_SERVICE ); this.roleMappingStore = nativeRoleMappingStore; + this.clusterStateRoleMapper = clusterStateRoleMapper; } @Override protected void doExecute(Task task, final GetRoleMappingsRequest request, final ActionListener listener) { final Set names; if (request.getNames() == null || request.getNames().length == 0) { - names = null; + names = Set.of(); } else { names = new HashSet<>(Arrays.asList(request.getNames())); } - this.roleMappingStore.getRoleMappings(names, ActionListener.wrap(mappings -> { - ExpressionRoleMapping[] array = mappings.toArray(new ExpressionRoleMapping[mappings.size()]); - listener.onResponse(new GetRoleMappingsResponse(array)); + roleMappingStore.getRoleMappings(names, ActionListener.wrap(nativeRoleMappings -> { + final Collection clusterStateRoleMappings = clusterStateRoleMapper.getMappings( + // if the API was queried with a reserved suffix for any of the names, we need to remove it because role mappings are + // stored without it in cluster-state + removeReadOnlySuffixIfPresent(names) + ); + listener.onResponse(buildResponse(clusterStateRoleMappings, nativeRoleMappings)); }, listener::onFailure)); } + + private GetRoleMappingsResponse buildResponse( + Collection clusterStateMappings, + Collection nativeMappings + ) { + Stream translatedClusterStateMappings = clusterStateMappings.stream().filter(roleMapping -> { + if (RoleMappingMetadata.hasFallbackName(roleMapping)) { + logger.warn( + "Role mapping retrieved from cluster-state with an ambiguous name. It will be omitted from the API response." + + "This is likely a transient issue during node start-up." + ); + return false; + } + return true; + }).map(this::translateClusterStateMapping); + return new GetRoleMappingsResponse( + Stream.concat(nativeMappings.stream(), translatedClusterStateMappings).toArray(ExpressionRoleMapping[]::new) + ); + } + + private Set removeReadOnlySuffixIfPresent(Set names) { + return names.stream().map(ExpressionRoleMapping::removeReadOnlySuffixIfPresent).collect(Collectors.toSet()); + } + + /** + * Translator method for ensuring unique API names and marking cluster-state role mappings as read-only. + * Role mappings retrieved from cluster-state are surfaced through both the transport and REST layers, + * along with native role mappings. Unlike native role mappings, cluster-state role mappings are + * read-only and cannot be modified via APIs. It is possible for cluster-state and native role mappings + * to have overlapping names. + * + *

      + * This does the following: + *

      + * + *
        + *
      1. Appends a reserved suffix to cluster-state role mapping names to avoid conflicts with native role mappings.
      2. + *
      3. Marks the metadata of cluster-state role mappings with a reserved read-only flag.
      4. + *
      5. Removes internal metadata flag used in processing (see {@link RoleMappingMetadata#METADATA_NAME_FIELD}).
      6. + *
      + */ + private ExpressionRoleMapping translateClusterStateMapping(ExpressionRoleMapping mapping) { + Map metadata = new HashMap<>(mapping.getMetadata()); + if (metadata.put(ExpressionRoleMapping.READ_ONLY_ROLE_MAPPING_METADATA_FLAG, true) != null) { + logger.error( + "Metadata field [{}] is reserved and will be overwritten with an internal system value. " + + "Rename this field in your role mapping configuration.", + ExpressionRoleMapping.READ_ONLY_ROLE_MAPPING_METADATA_FLAG + ); + } + metadata.remove(RoleMappingMetadata.METADATA_NAME_FIELD); + return new ExpressionRoleMapping( + ExpressionRoleMapping.addReadOnlySuffix(mapping.getName()), + mapping.getExpression(), + mapping.getRoles(), + mapping.getRoleTemplates(), + metadata, + mapping.isEnabled() + ); + } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingAction.java index 82a3b4f000064..682ade925d2ec 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingAction.java @@ -9,6 +9,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.common.logging.HeaderWarning; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.tasks.Task; @@ -16,24 +17,41 @@ import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingAction; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequest; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingResponse; +import org.elasticsearch.xpack.security.authc.support.mapper.ClusterStateRoleMapper; import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; +import static org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping.validateNoReadOnlySuffix; + public class TransportPutRoleMappingAction extends HandledTransportAction { private final NativeRoleMappingStore roleMappingStore; + private final ClusterStateRoleMapper clusterStateRoleMapper; @Inject public TransportPutRoleMappingAction( ActionFilters actionFilters, TransportService transportService, - NativeRoleMappingStore roleMappingStore + NativeRoleMappingStore roleMappingStore, + ClusterStateRoleMapper clusterStateRoleMapper ) { super(PutRoleMappingAction.NAME, transportService, actionFilters, PutRoleMappingRequest::new, EsExecutors.DIRECT_EXECUTOR_SERVICE); this.roleMappingStore = roleMappingStore; + this.clusterStateRoleMapper = clusterStateRoleMapper; } @Override protected void doExecute(Task task, final PutRoleMappingRequest request, final ActionListener listener) { + validateNoReadOnlySuffix(request.getName()); + if (clusterStateRoleMapper.hasMapping(request.getName())) { + // Allow to define a mapping with the same name in the native role mapping store as the file_settings namespace, but add a + // warning header to signal to the caller that this could be a problem. + HeaderWarning.addWarning( + "A read-only role mapping with the same name [" + + request.getName() + + "] has been previously defined in a configuration file. " + + "Both role mappings will be used to determine role assignments." + ); + } roleMappingStore.putRoleMapping( request, ActionListener.wrap(created -> listener.onResponse(new PutRoleMappingResponse(created)), listener::onFailure) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/ClusterStateRoleMapper.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/ClusterStateRoleMapper.java index 5dea6a938263c..99e3311283920 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/ClusterStateRoleMapper.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/ClusterStateRoleMapper.java @@ -14,6 +14,7 @@ import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.Nullable; import org.elasticsearch.script.ScriptService; import org.elasticsearch.xpack.core.security.authc.support.UserRoleMapper; import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping; @@ -21,6 +22,7 @@ import java.util.Objects; import java.util.Set; +import java.util.stream.Collectors; import static org.elasticsearch.xpack.core.security.SecurityExtension.SecurityComponents; @@ -28,8 +30,7 @@ * A role mapper the reads the role mapping rules (i.e. {@link ExpressionRoleMapping}s) from the cluster state * (i.e. {@link RoleMappingMetadata}). This is not enabled by default. */ -public final class ClusterStateRoleMapper extends AbstractRoleMapperClearRealmCache implements ClusterStateListener { - +public class ClusterStateRoleMapper extends AbstractRoleMapperClearRealmCache implements ClusterStateListener { /** * This setting is never registered by the xpack security plugin - in order to disable the * cluster-state based role mapper another plugin must register it as a boolean setting @@ -81,13 +82,26 @@ public void clusterChanged(ClusterChangedEvent event) { } } - private Set getMappings() { + public boolean hasMapping(String name) { + if (enabled == false) { + return false; + } + return false == getMappings(Set.of(name)).isEmpty(); + } + + public Set getMappings() { + return getMappings(null); + } + + public Set getMappings(@Nullable Set names) { if (enabled == false) { return Set.of(); - } else { - final Set mappings = RoleMappingMetadata.getFromClusterState(clusterService.state()).getRoleMappings(); - logger.trace("Retrieved [{}] mapping(s) from cluster state", mappings.size()); + } + final Set mappings = RoleMappingMetadata.getFromClusterState(clusterService.state()).getRoleMappings(); + logger.trace("Retrieved [{}] mapping(s) from cluster state", mappings.size()); + if (names == null || names.isEmpty()) { return mappings; } + return mappings.stream().filter(roleMapping -> names.contains(roleMapping.getName())).collect(Collectors.toSet()); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsActionTests.java index 6e8698f095d32..010c19e8cc1b1 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsActionTests.java @@ -9,7 +9,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.PlainActionFuture; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; @@ -19,21 +18,26 @@ import org.elasticsearch.xpack.core.security.action.rolemapping.GetRoleMappingsRequest; import org.elasticsearch.xpack.core.security.action.rolemapping.GetRoleMappingsResponse; import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping; +import org.elasticsearch.xpack.security.authc.support.mapper.ClusterStateRoleMapper; import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; -import org.hamcrest.Matchers; import org.junit.Before; -import java.util.Arrays; +import java.util.ArrayList; import java.util.Collections; import java.util.List; +import java.util.Map; import java.util.Set; +import java.util.concurrent.ExecutionException; import java.util.concurrent.atomic.AtomicReference; +import java.util.stream.Collectors; -import static org.hamcrest.Matchers.arrayContaining; +import static org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping.READ_ONLY_ROLE_MAPPING_METADATA_FLAG; +import static org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping.READ_ONLY_ROLE_MAPPING_SUFFIX; import static org.hamcrest.Matchers.arrayContainingInAnyOrder; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.notNullValue; import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anySet; import static org.mockito.ArgumentMatchers.nullable; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; @@ -42,8 +46,10 @@ public class TransportGetRoleMappingsActionTests extends ESTestCase { private NativeRoleMappingStore store; private TransportGetRoleMappingsAction action; - private AtomicReference> namesRef; - private List result; + private AtomicReference> nativeNamesRef; + private AtomicReference> clusterStateNamesRef; + private List nativeMappings; + private Set clusterStateMappings; @SuppressWarnings("unchecked") @Before @@ -58,68 +64,219 @@ public void setupMocks() { null, Collections.emptySet() ); - action = new TransportGetRoleMappingsAction(mock(ActionFilters.class), transportService, store); + ClusterStateRoleMapper clusterStateRoleMapper = mock(); + action = new TransportGetRoleMappingsAction(mock(ActionFilters.class), transportService, store, clusterStateRoleMapper); - namesRef = new AtomicReference<>(null); - result = Collections.emptyList(); + nativeNamesRef = new AtomicReference<>(null); + clusterStateNamesRef = new AtomicReference<>(null); + nativeMappings = Collections.emptyList(); + clusterStateMappings = Collections.emptySet(); + + doAnswer(invocation -> { + Object[] args = invocation.getArguments(); + assert args.length == 1; + clusterStateNamesRef.set((Set) args[0]); + return clusterStateMappings; + }).when(clusterStateRoleMapper).getMappings(anySet()); doAnswer(invocation -> { Object[] args = invocation.getArguments(); assert args.length == 2; - namesRef.set((Set) args[0]); + nativeNamesRef.set((Set) args[0]); ActionListener> listener = (ActionListener>) args[1]; - listener.onResponse(result); + listener.onResponse(nativeMappings); return null; }).when(store).getRoleMappings(nullable(Set.class), any(ActionListener.class)); } - public void testGetSingleRole() throws Exception { - final PlainActionFuture future = new PlainActionFuture<>(); - final GetRoleMappingsRequest request = new GetRoleMappingsRequest(); - request.setNames("everyone"); + public void testGetSingleRoleMappingNativeOnly() throws Exception { + testGetMappings(List.of(mapping("everyone")), Collections.emptySet(), Set.of("everyone"), Set.of("everyone"), "everyone"); + } - final ExpressionRoleMapping mapping = mock(ExpressionRoleMapping.class); - result = Collections.singletonList(mapping); - action.doExecute(mock(Task.class), request, future); - assertThat(future.get(), notNullValue()); - assertThat(future.get().mappings(), arrayContaining(mapping)); - assertThat(namesRef.get(), containsInAnyOrder("everyone")); + public void testGetMultipleNamedRoleMappingsNativeOnly() throws Exception { + testGetMappings( + List.of(mapping("admin"), mapping("engineering"), mapping("sales"), mapping("finance")), + Collections.emptySet(), + Set.of("admin", "engineering", "sales", "finance"), + Set.of("admin", "engineering", "sales", "finance"), + "admin", + "engineering", + "sales", + "finance" + ); } - public void testGetMultipleNamedRoles() throws Exception { - final PlainActionFuture future = new PlainActionFuture<>(); - final GetRoleMappingsRequest request = new GetRoleMappingsRequest(); - request.setNames("admin", "engineering", "sales", "finance"); + public void testGetAllRoleMappingsNativeOnly() throws Exception { + testGetMappings( + List.of(mapping("admin"), mapping("engineering"), mapping("sales"), mapping("finance")), + Collections.emptySet(), + Set.of(), + Set.of() + ); + } - final ExpressionRoleMapping mapping1 = mock(ExpressionRoleMapping.class); - final ExpressionRoleMapping mapping2 = mock(ExpressionRoleMapping.class); - final ExpressionRoleMapping mapping3 = mock(ExpressionRoleMapping.class); - result = Arrays.asList(mapping1, mapping2, mapping3); + public void testGetSingleRoleMappingClusterStateOnly() throws Exception { + testGetMappings(List.of(), Set.of(mapping("everyone")), Set.of("everyone"), Set.of("everyone"), "everyone"); + } - action.doExecute(mock(Task.class), request, future); + public void testGetMultipleNamedRoleMappingsClusterStateOnly() throws Exception { + testGetMappings( + List.of(), + Set.of(mapping("admin"), mapping("engineering"), mapping("sales"), mapping("finance")), + Set.of("admin", "engineering", "sales", "finance"), + Set.of("admin", "engineering", "sales", "finance"), + "admin", + "engineering", + "sales", + "finance" + ); + } + + public void testGetAllRoleMappingsClusterStateOnly() throws Exception { + testGetMappings( + List.of(), + Set.of(mapping("admin"), mapping("engineering"), mapping("sales"), mapping("finance")), + Set.of(), + Set.of() + ); + } + + public void testGetSingleRoleMappingBoth() throws Exception { + testGetMappings(List.of(mapping("everyone")), Set.of(mapping("everyone")), Set.of("everyone"), Set.of("everyone"), "everyone"); + } + + public void testGetMultipleNamedRoleMappingsBoth() throws Exception { + testGetMappings( + List.of(mapping("admin"), mapping("engineering")), + Set.of(mapping("sales"), mapping("finance")), + Set.of("admin", "engineering", "sales", "finance"), + Set.of("admin", "engineering", "sales", "finance"), + "admin", + "engineering", + "sales", + "finance" + ); + } + + public void testGetAllRoleMappingsClusterBoth() throws Exception { + testGetMappings(List.of(mapping("admin"), mapping("engineering")), Set.of(mapping("admin"), mapping("sales")), Set.of(), Set.of()); + } + + public void testGetSingleRoleMappingQueryWithReadOnlySuffix() throws Exception { + testGetMappings( + List.of(), + Set.of(mapping("everyone")), + // suffix not stripped for native store query + Set.of("everyone" + READ_ONLY_ROLE_MAPPING_SUFFIX), + // suffix is stripped for cluster state store + Set.of("everyone"), + "everyone" + READ_ONLY_ROLE_MAPPING_SUFFIX + ); + + testGetMappings( + List.of(), + Set.of(mapping("everyoneread-only-operator-mapping")), + Set.of( + "everyoneread-only-operator-mapping", + "everyone-read-only-operator-mapping-", + "everyone-read-only-operator-mapping-more" + ), + // suffix that is similar but not the same is not stripped + Set.of( + "everyoneread-only-operator-mapping", + "everyone-read-only-operator-mapping-", + "everyone-read-only-operator-mapping-more" + ), + "everyoneread-only-operator-mapping", + "everyone-read-only-operator-mapping-", + "everyone-read-only-operator-mapping-more" + ); + + testGetMappings( + List.of(mapping("everyone")), + Set.of(mapping("everyone")), + // suffix not stripped for native store query + Set.of("everyone" + READ_ONLY_ROLE_MAPPING_SUFFIX, "everyone"), + // suffix is stripped for cluster state store + Set.of("everyone"), + "everyone" + READ_ONLY_ROLE_MAPPING_SUFFIX, + "everyone" + ); + } + + public void testClusterStateRoleMappingWithFallbackNameOmitted() throws ExecutionException, InterruptedException { + testGetMappings( + List.of(), + Set.of(mapping("name_not_available_after_deserialization")), + Set.of(), + Set.of("name_not_available_after_deserialization"), + Set.of("name_not_available_after_deserialization"), + "name_not_available_after_deserialization" + ); - final GetRoleMappingsResponse response = future.get(); - assertThat(response, notNullValue()); - assertThat(response.mappings(), arrayContainingInAnyOrder(mapping1, mapping2, mapping3)); - assertThat(namesRef.get(), containsInAnyOrder("admin", "engineering", "sales", "finance")); + testGetMappings( + List.of(mapping("name_not_available_after_deserialization")), + Set.of(mapping("name_not_available_after_deserialization")), + Set.of(), + Set.of("name_not_available_after_deserialization"), + Set.of("name_not_available_after_deserialization"), + "name_not_available_after_deserialization" + ); + } + + private void testGetMappings( + List returnedNativeMappings, + Set returnedClusterStateMappings, + Set expectedNativeNames, + Set expectedClusterStateNames, + String... names + ) throws InterruptedException, ExecutionException { + testGetMappings( + returnedNativeMappings, + returnedClusterStateMappings, + returnedClusterStateMappings.stream().map(this::expectedClusterStateMapping).collect(Collectors.toSet()), + expectedNativeNames, + expectedClusterStateNames, + names + ); } - public void testGetAllRoles() throws Exception { + private void testGetMappings( + List returnedNativeMappings, + Set returnedClusterStateMappings, + Set expectedClusterStateMappings, + Set expectedNativeNames, + Set expectedClusterStateNames, + String... names + ) throws InterruptedException, ExecutionException { final PlainActionFuture future = new PlainActionFuture<>(); final GetRoleMappingsRequest request = new GetRoleMappingsRequest(); - request.setNames(Strings.EMPTY_ARRAY); - - final ExpressionRoleMapping mapping1 = mock(ExpressionRoleMapping.class); - final ExpressionRoleMapping mapping2 = mock(ExpressionRoleMapping.class); - final ExpressionRoleMapping mapping3 = mock(ExpressionRoleMapping.class); - result = Arrays.asList(mapping1, mapping2, mapping3); + request.setNames(names); + nativeMappings = returnedNativeMappings; + clusterStateMappings = returnedClusterStateMappings; action.doExecute(mock(Task.class), request, future); + assertThat(future.get(), notNullValue()); + List combined = new ArrayList<>(returnedNativeMappings); + combined.addAll(expectedClusterStateMappings); + ExpressionRoleMapping[] actualMappings = future.get().mappings(); + assertThat(actualMappings, arrayContainingInAnyOrder(combined.toArray(new ExpressionRoleMapping[0]))); + assertThat(nativeNamesRef.get(), containsInAnyOrder(expectedNativeNames.toArray(new String[0]))); + assertThat(clusterStateNamesRef.get(), containsInAnyOrder(expectedClusterStateNames.toArray(new String[0]))); + } - final GetRoleMappingsResponse response = future.get(); - assertThat(response, notNullValue()); - assertThat(response.mappings(), arrayContainingInAnyOrder(mapping1, mapping2, mapping3)); - assertThat(namesRef.get(), Matchers.nullValue(Set.class)); + private ExpressionRoleMapping mapping(String name) { + return new ExpressionRoleMapping(name, null, null, null, Map.of(), true); } + private ExpressionRoleMapping expectedClusterStateMapping(ExpressionRoleMapping mapping) { + return new ExpressionRoleMapping( + mapping.getName() + READ_ONLY_ROLE_MAPPING_SUFFIX, + null, + null, + null, + Map.of(READ_ONLY_ROLE_MAPPING_METADATA_FLAG, true), + true + ); + } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java index 6f789a10a3a6c..6d1ac864d20fd 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java @@ -19,6 +19,7 @@ import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingResponse; import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping; import org.elasticsearch.xpack.core.security.authc.support.mapper.expressiondsl.FieldExpression; +import org.elasticsearch.xpack.security.authc.support.mapper.ClusterStateRoleMapper; import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; import org.junit.Before; @@ -29,18 +30,21 @@ import static org.hamcrest.Matchers.aMapWithSize; import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.iterableWithSize; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; public class TransportPutRoleMappingActionTests extends ESTestCase { private NativeRoleMappingStore store; private TransportPutRoleMappingAction action; private AtomicReference requestRef; + private ClusterStateRoleMapper clusterStateRoleMapper; @SuppressWarnings("unchecked") @Before @@ -55,7 +59,9 @@ public void setupMocks() { null, Collections.emptySet() ); - action = new TransportPutRoleMappingAction(mock(ActionFilters.class), transportService, store); + clusterStateRoleMapper = mock(); + when(clusterStateRoleMapper.hasMapping(any())).thenReturn(false); + action = new TransportPutRoleMappingAction(mock(ActionFilters.class), transportService, store, clusterStateRoleMapper); requestRef = new AtomicReference<>(null); @@ -85,6 +91,41 @@ public void testPutValidMapping() throws Exception { assertThat(mapping.getMetadata().get("dumb"), equalTo(true)); } + public void testValidMappingClashingClusterStateMapping() throws Exception { + final FieldExpression expression = new FieldExpression("username", Collections.singletonList(new FieldExpression.FieldValue("*"))); + final PutRoleMappingResponse response = put("anarchy", expression, "superuser", Collections.singletonMap("dumb", true)); + when(clusterStateRoleMapper.hasMapping(any())).thenReturn(true); + + assertThat(response.isCreated(), equalTo(true)); + + final ExpressionRoleMapping mapping = requestRef.get().getMapping(); + assertThat(mapping.getExpression(), is(expression)); + assertThat(mapping.isEnabled(), equalTo(true)); + assertThat(mapping.getName(), equalTo("anarchy")); + assertThat(mapping.getRoles(), iterableWithSize(1)); + assertThat(mapping.getRoles(), contains("superuser")); + assertThat(mapping.getMetadata(), aMapWithSize(1)); + assertThat(mapping.getMetadata().get("dumb"), equalTo(true)); + } + + public void testInvalidSuffix() { + final FieldExpression expression = new FieldExpression("username", Collections.singletonList(new FieldExpression.FieldValue("*"))); + String name = ExpressionRoleMapping.addReadOnlySuffix("anarchy"); + final var ex = expectThrows(IllegalArgumentException.class, () -> { + put(name, expression, "superuser", Collections.singletonMap("dumb", true)); + }); + assertThat( + ex.getMessage(), + containsString( + "Invalid mapping name [" + + name + + "]. [" + + ExpressionRoleMapping.READ_ONLY_ROLE_MAPPING_SUFFIX + + "] is not an allowed suffix" + ) + ); + } + private PutRoleMappingResponse put(String name, FieldExpression expression, String role, Map metadata) throws Exception { final PutRoleMappingRequest request = new PutRoleMappingRequest(); From ac7db5d11e771674df5957077acae1e855eb6f0c Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Tue, 22 Oct 2024 10:36:40 -0400 Subject: [PATCH 284/449] ESQL: Skip unsupported grapheme cluster test (#115258) (#115321) This skips the test for reversing grapheme clusters if the node doesn't support reversing grapheme clusters. Nodes that are using a jdk before 20 won't support reversing grapheme clusters because they don't have https://bugs.openjdk.org/browse/JDK-8292387 This reworks `EsqlCapabilities` so we can easilly register it only if we're on jdk 20: ``` FN_REVERSE_GRAPHEME_CLUSTERS(Runtime.version().feature() < 20), ``` Closes #114537 Closes #114535 Closes #114536 Closes #114558 Closes #114559 Closes #114560 --- .../src/main/resources/string.csv-spec | 1 + .../xpack/esql/action/EsqlCapabilities.java | 48 ++++++++++--------- .../elasticsearch/xpack/esql/CsvTests.java | 2 +- 3 files changed, 27 insertions(+), 24 deletions(-) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec index 5313e6630c75d..dd9d519649c01 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec @@ -1236,6 +1236,7 @@ off_on_holiday:keyword | back_home_again:keyword reverseGraphemeClusters required_capability: fn_reverse +required_capability: fn_reverse_grapheme_clusters ROW message = "áéíóúàèìòùâêîôû😊👍🏽🎉💖कंठाी" | EVAL message_reversed = REVERSE(message); message:keyword | message_reversed:keyword diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index 3c39406198da3..df9f14a6ac227 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -32,6 +32,13 @@ public enum Cap { */ FN_REVERSE, + /** + * Support for reversing whole grapheme clusters. This is not supported + * on JDK versions less than 20 which are not supported in ES 9.0.0+ but this + * exists to keep the {@code 8.x} branch similar to the {@code main} branch. + */ + FN_REVERSE_GRAPHEME_CLUSTERS, + /** * Support for function {@code CBRT}. Done in #108574. */ @@ -133,7 +140,7 @@ public enum Cap { * - fixed variable shadowing * - fixed Join.references(), requiring breaking change to Join serialization */ - LOOKUP_V4(true), + LOOKUP_V4(Build.current().isSnapshot()), /** * Support for requesting the "REPEAT" command. @@ -279,7 +286,7 @@ public enum Cap { /** * Support for match operator */ - MATCH_OPERATOR(true), + MATCH_OPERATOR(Build.current().isSnapshot()), /** * Removing support for the {@code META} keyword. @@ -349,7 +356,7 @@ public enum Cap { /** * Supported the text categorization function "CATEGORIZE". */ - CATEGORIZE(true), + CATEGORIZE(Build.current().isSnapshot()), /** * QSTR function @@ -375,7 +382,7 @@ public enum Cap { /** * Support named parameters for field names. */ - NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES(true), + NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES(Build.current().isSnapshot()), /** * Fix sorting not allowed on _source and counters. @@ -401,32 +408,22 @@ public enum Cap { */ SEMANTIC_TEXT_TYPE(EsqlCorePlugin.SEMANTIC_TEXT_FEATURE_FLAG); - private final boolean snapshotOnly; - private final FeatureFlag featureFlag; + private final boolean enabled; Cap() { - this(false, null); + this.enabled = true; }; - Cap(boolean snapshotOnly) { - this(snapshotOnly, null); + Cap(boolean enabled) { + this.enabled = enabled; }; Cap(FeatureFlag featureFlag) { - this(false, featureFlag); - } - - Cap(boolean snapshotOnly, FeatureFlag featureFlag) { - assert featureFlag == null || snapshotOnly == false; - this.snapshotOnly = snapshotOnly; - this.featureFlag = featureFlag; + this.enabled = featureFlag.isEnabled(); } public boolean isEnabled() { - if (featureFlag == null) { - return Build.current().isSnapshot() || this.snapshotOnly == false; - } - return featureFlag.isEnabled(); + return enabled; } public String capabilityName() { @@ -434,12 +431,17 @@ public String capabilityName() { } } - public static final Set CAPABILITIES = capabilities(); + public static final Set CAPABILITIES = capabilities(false); - private static Set capabilities() { + /** + * Get a {@link Set} of all capabilities. If the {@code all} parameter is {@code false} + * then only enabled capabilities are returned - otherwise all + * known capabilities are returned. + */ + public static Set capabilities(boolean all) { List caps = new ArrayList<>(); for (Cap cap : Cap.values()) { - if (cap.isEnabled()) { + if (all || cap.isEnabled()) { caps.add(cap.capabilityName()); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index ce072e7b0a438..63233f0c46a0d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -257,7 +257,7 @@ public final void test() throws Throwable { assertThat( "Capability is not included in the enabled list capabilities on a snapshot build. Spelling mistake?", testCase.requiredCapabilities, - everyItem(in(EsqlCapabilities.CAPABILITIES)) + everyItem(in(EsqlCapabilities.capabilities(true))) ); } else { for (EsqlCapabilities.Cap c : EsqlCapabilities.Cap.values()) { From bd54bffab2e421e25e7066b70417d279625fd705 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Tue, 22 Oct 2024 16:43:42 +0200 Subject: [PATCH 285/449] Rename methods in o.e.x.c.security.support.Automatons (#114594) Lucene 10 stopped relying in on automaton minimization and moved the underlying Hopcroft algorithm to test code (for reasoning see https://github.com/apache/lucene/pull/528). With the upgrade to Lucene 10 we currently also only determinize automata. The security Automatons utility class currently contains several methods that sound like they would minimize the automaton, but this has changed so this PR also changes the method names accordingly. --- .../permission/ApplicationPermission.java | 2 +- .../authz/permission/ClusterPermission.java | 4 ++-- .../authz/permission/FieldPermissions.java | 9 ++++++--- .../permission/FieldPermissionsCache.java | 2 +- .../authz/permission/IndicesPermission.java | 14 ++++++------- .../authz/permission/LimitedRole.java | 2 +- .../authz/privilege/IndexPrivilege.java | 6 +++--- .../core/security/support/Automatons.java | 20 +++++++++---------- 8 files changed, 31 insertions(+), 28 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ApplicationPermission.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ApplicationPermission.java index 5ba5c1fd1218a..23c93226d5494 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ApplicationPermission.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ApplicationPermission.java @@ -53,7 +53,7 @@ public final class ApplicationPermission { return new PermissionEntry( appPriv, Sets.union(existing.resourceNames, resourceNames), - Automatons.unionAndMinimize(Arrays.asList(existing.resourceAutomaton, patterns)) + Automatons.unionAndDeterminize(Arrays.asList(existing.resourceAutomaton, patterns)) ); } })); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ClusterPermission.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ClusterPermission.java index 4e608281a7858..5f3da8f73a708 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ClusterPermission.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ClusterPermission.java @@ -137,7 +137,7 @@ public ClusterPermission build() { } List checks = this.permissionChecks; if (false == actionAutomatons.isEmpty()) { - final Automaton mergedAutomaton = Automatons.unionAndMinimize(this.actionAutomatons); + final Automaton mergedAutomaton = Automatons.unionAndDeterminize(this.actionAutomatons); checks = new ArrayList<>(this.permissionChecks.size() + 1); checks.add(new AutomatonPermissionCheck(mergedAutomaton)); checks.addAll(this.permissionChecks); @@ -156,7 +156,7 @@ private static Automaton createAutomaton(Set allowedActionPatterns, Set< } else { final Automaton allowedAutomaton = Automatons.patterns(allowedActionPatterns); final Automaton excludedAutomaton = Automatons.patterns(excludeActionPatterns); - return Automatons.minusAndMinimize(allowedAutomaton, excludedAutomaton); + return Automatons.minusAndDeterminize(allowedAutomaton, excludedAutomaton); } } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissions.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissions.java index 235d7419d2bf0..ed7bbf9158278 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissions.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissions.java @@ -147,7 +147,7 @@ public static Automaton initializePermittedFieldsAutomaton(FieldPermissionsDefin List automatonList = groups.stream() .map(g -> FieldPermissions.buildPermittedFieldsAutomaton(g.getGrantedFields(), g.getExcludedFields())) .collect(Collectors.toList()); - return Automatons.unionAndMinimize(automatonList); + return Automatons.unionAndDeterminize(automatonList); } /** @@ -189,7 +189,7 @@ public static Automaton buildPermittedFieldsAutomaton(final String[] grantedFiel ); } - grantedFieldsAutomaton = Automatons.minusAndMinimize(grantedFieldsAutomaton, deniedFieldsAutomaton); + grantedFieldsAutomaton = Automatons.minusAndDeterminize(grantedFieldsAutomaton, deniedFieldsAutomaton); return grantedFieldsAutomaton; } @@ -206,7 +206,10 @@ public static Automaton buildPermittedFieldsAutomaton(final String[] grantedFiel public FieldPermissions limitFieldPermissions(FieldPermissions limitedBy) { if (hasFieldLevelSecurity() && limitedBy != null && limitedBy.hasFieldLevelSecurity()) { // TODO: cache the automaton computation with FieldPermissionsCache - Automaton _permittedFieldsAutomaton = Automatons.intersectAndMinimize(getIncludeAutomaton(), limitedBy.getIncludeAutomaton()); + Automaton _permittedFieldsAutomaton = Automatons.intersectAndDeterminize( + getIncludeAutomaton(), + limitedBy.getIncludeAutomaton() + ); return new FieldPermissions( CollectionUtils.concatLists(fieldPermissionsDefinitions, limitedBy.fieldPermissionsDefinitions), _permittedFieldsAutomaton diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissionsCache.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissionsCache.java index 46261937a0228..a1e14bfde8aa5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissionsCache.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissionsCache.java @@ -107,7 +107,7 @@ FieldPermissions union(Collection fieldPermissionsCollection) List automatonList = fieldPermissionsCollection.stream() .map(FieldPermissions::getIncludeAutomaton) .collect(Collectors.toList()); - return new FieldPermissions(key, Automatons.unionAndMinimize(automatonList)); + return new FieldPermissions(key, Automatons.unionAndDeterminize(automatonList)); }); } catch (ExecutionException e) { throw new ElasticsearchException("unable to compute field permissions", e); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/IndicesPermission.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/IndicesPermission.java index 558f8e6f22ac1..cdd5a6f6ff72d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/IndicesPermission.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/IndicesPermission.java @@ -283,14 +283,14 @@ public boolean checkResourcePrivileges( for (String forIndexPattern : checkForIndexPatterns) { Automaton checkIndexAutomaton = Automatons.patterns(forIndexPattern); if (false == allowRestrictedIndices && false == isConcreteRestrictedIndex(forIndexPattern)) { - checkIndexAutomaton = Automatons.minusAndMinimize(checkIndexAutomaton, restrictedIndices.getAutomaton()); + checkIndexAutomaton = Automatons.minusAndDeterminize(checkIndexAutomaton, restrictedIndices.getAutomaton()); } if (false == Operations.isEmpty(checkIndexAutomaton)) { Automaton allowedIndexPrivilegesAutomaton = null; for (var indexAndPrivilegeAutomaton : indexGroupAutomatons.entrySet()) { if (Automatons.subsetOf(checkIndexAutomaton, indexAndPrivilegeAutomaton.getValue())) { if (allowedIndexPrivilegesAutomaton != null) { - allowedIndexPrivilegesAutomaton = Automatons.unionAndMinimize( + allowedIndexPrivilegesAutomaton = Automatons.unionAndDeterminize( Arrays.asList(allowedIndexPrivilegesAutomaton, indexAndPrivilegeAutomaton.getKey()) ); } else { @@ -342,7 +342,7 @@ public Automaton allowedActionsMatcher(String index) { automatonList.add(group.privilege.getAutomaton()); } } - return automatonList.isEmpty() ? Automatons.EMPTY : Automatons.unionAndMinimize(automatonList); + return automatonList.isEmpty() ? Automatons.EMPTY : Automatons.unionAndDeterminize(automatonList); } /** @@ -704,7 +704,7 @@ private Map indexGroupAutomatons(boolean combine) { Automaton indexAutomaton = group.getIndexMatcherAutomaton(); allAutomatons.compute( group.privilege().getAutomaton(), - (key, value) -> value == null ? indexAutomaton : Automatons.unionAndMinimize(List.of(value, indexAutomaton)) + (key, value) -> value == null ? indexAutomaton : Automatons.unionAndDeterminize(List.of(value, indexAutomaton)) ); if (combine) { List> combinedAutomatons = new ArrayList<>(); @@ -714,7 +714,7 @@ private Map indexGroupAutomatons(boolean combine) { group.privilege().getAutomaton() ); if (Operations.isEmpty(intersectingPrivileges) == false) { - Automaton indexPatternAutomaton = Automatons.unionAndMinimize( + Automaton indexPatternAutomaton = Automatons.unionAndDeterminize( List.of(indexAndPrivilegeAutomatons.getValue(), indexAutomaton) ); combinedAutomatons.add(new Tuple<>(intersectingPrivileges, indexPatternAutomaton)); @@ -723,7 +723,7 @@ private Map indexGroupAutomatons(boolean combine) { combinedAutomatons.forEach( automatons -> allAutomatons.compute( automatons.v1(), - (key, value) -> value == null ? automatons.v2() : Automatons.unionAndMinimize(List.of(value, automatons.v2())) + (key, value) -> value == null ? automatons.v2() : Automatons.unionAndDeterminize(List.of(value, automatons.v2())) ) ); } @@ -768,7 +768,7 @@ public Group( this.indexNameMatcher = StringMatcher.of(indices).and(name -> restrictedIndices.isRestricted(name) == false); this.indexNameAutomaton = () -> indexNameAutomatonMemo.computeIfAbsent( indices, - k -> Automatons.minusAndMinimize(Automatons.patterns(indices), restrictedIndices.getAutomaton()) + k -> Automatons.minusAndDeterminize(Automatons.patterns(indices), restrictedIndices.getAutomaton()) ); } this.fieldPermissions = Objects.requireNonNull(fieldPermissions); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/LimitedRole.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/LimitedRole.java index ea32ba13ae576..e4d283aba75a3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/LimitedRole.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/LimitedRole.java @@ -212,7 +212,7 @@ public IsResourceAuthorizedPredicate allowedIndicesMatcher(String action) { public Automaton allowedActionsMatcher(String index) { final Automaton allowedMatcher = baseRole.allowedActionsMatcher(index); final Automaton limitedByMatcher = limitedByRole.allowedActionsMatcher(index); - return Automatons.intersectAndMinimize(allowedMatcher, limitedByMatcher); + return Automatons.intersectAndDeterminize(allowedMatcher, limitedByMatcher); } /** diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java index 7174b2f616c2a..f4df99dcefea4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java @@ -57,7 +57,7 @@ import static java.util.Map.entry; import static org.elasticsearch.xpack.core.security.support.Automatons.patterns; -import static org.elasticsearch.xpack.core.security.support.Automatons.unionAndMinimize; +import static org.elasticsearch.xpack.core.security.support.Automatons.unionAndDeterminize; /** * The name of an index related action always being with `indices:` followed by a sequence of slash-separated terms @@ -110,7 +110,7 @@ public final class IndexPrivilege extends Privilege { private static final Automaton DELETE_AUTOMATON = patterns("indices:data/write/delete*", "indices:data/write/bulk*"); private static final Automaton WRITE_AUTOMATON = patterns("indices:data/write/*", TransportAutoPutMappingAction.TYPE.name()); private static final Automaton MONITOR_AUTOMATON = patterns("indices:monitor/*"); - private static final Automaton MANAGE_AUTOMATON = unionAndMinimize( + private static final Automaton MANAGE_AUTOMATON = unionAndDeterminize( Arrays.asList( MONITOR_AUTOMATON, patterns("indices:admin/*", TransportFieldCapabilitiesAction.NAME + "*", GetRollupIndexCapsAction.NAME + "*") @@ -303,7 +303,7 @@ private static IndexPrivilege resolve(Set name) { if (actions.isEmpty() == false) { automata.add(patterns(actions)); } - return new IndexPrivilege(name, unionAndMinimize(automata)); + return new IndexPrivilege(name, unionAndDeterminize(automata)); } static Map values() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Automatons.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Automatons.java index 201cb4b69e472..d3790ea64ba4b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Automatons.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Automatons.java @@ -112,7 +112,7 @@ public static Automaton patterns(Collection patterns) { private static Automaton buildAutomaton(Collection patterns) { if (patterns.size() == 1) { - return minimize(pattern(patterns.iterator().next())); + return determinize(pattern(patterns.iterator().next())); } final Function, Automaton> build = strings -> { @@ -121,7 +121,7 @@ private static Automaton buildAutomaton(Collection patterns) { final Automaton patternAutomaton = pattern(pattern); automata.add(patternAutomaton); } - return unionAndMinimize(automata); + return unionAndDeterminize(automata); }; // We originally just compiled each automaton separately and then unioned them all. @@ -188,7 +188,7 @@ private static Automaton buildAutomaton(Collection patterns) { if (misc.isEmpty() == false) { automata.add(build.apply(misc)); } - return unionAndMinimize(automata); + return unionAndDeterminize(automata); } /** @@ -277,22 +277,22 @@ static Automaton wildcard(String text) { return Operations.determinize(concatenate(automata), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } - public static Automaton unionAndMinimize(Collection automata) { + public static Automaton unionAndDeterminize(Collection automata) { Automaton res = automata.size() == 1 ? automata.iterator().next() : union(automata); - return minimize(res); + return determinize(res); } - public static Automaton minusAndMinimize(Automaton a1, Automaton a2) { + public static Automaton minusAndDeterminize(Automaton a1, Automaton a2) { Automaton res = minus(a1, a2, maxDeterminizedStates); - return minimize(res); + return determinize(res); } - public static Automaton intersectAndMinimize(Automaton a1, Automaton a2) { + public static Automaton intersectAndDeterminize(Automaton a1, Automaton a2) { Automaton res = intersection(a1, a2); - return minimize(res); + return determinize(res); } - private static Automaton minimize(Automaton automaton) { + private static Automaton determinize(Automaton automaton) { return Operations.determinize(automaton, maxDeterminizedStates); } From cf3c77a9c4dc89d2070cd84d12cfc1426632358e Mon Sep 17 00:00:00 2001 From: Dan Rubinstein Date: Tue, 22 Oct 2024 10:57:15 -0400 Subject: [PATCH 286/449] Add upper and lower max chunk size limits to ChunkingSettings (#115130) * Add upper and lower max chunk size limits to ChunkingSettings * Fix ServiceUtils tests --------- Co-authored-by: Elastic Machine --- .../SentenceBoundaryChunkingSettings.java | 6 ++- .../WordBoundaryChunkingSettings.java | 6 ++- .../inference/services/ServiceUtils.java | 26 +++++++++ .../ChunkingSettingsBuilderTests.java | 14 ++--- .../chunking/ChunkingSettingsTests.java | 8 +-- .../SentenceBoundaryChunkerTests.java | 3 +- ...SentenceBoundaryChunkingSettingsTests.java | 9 ++-- .../chunking/WordBoundaryChunkerTests.java | 2 +- .../WordBoundaryChunkingSettingsTests.java | 31 +++-------- .../inference/services/ServiceUtilsTests.java | 54 +++++++++++++++++++ 10 files changed, 117 insertions(+), 42 deletions(-) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunkingSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunkingSettings.java index 04a07eeb984ec..def52e97666f9 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunkingSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunkingSettings.java @@ -29,6 +29,8 @@ public class SentenceBoundaryChunkingSettings implements ChunkingSettings { public static final String NAME = "SentenceBoundaryChunkingSettings"; private static final ChunkingStrategy STRATEGY = ChunkingStrategy.SENTENCE; + private static final int MAX_CHUNK_SIZE_LOWER_LIMIT = 20; + private static final int MAX_CHUNK_SIZE_UPPER_LIMIT = 300; private static final Set VALID_KEYS = Set.of( ChunkingSettingsOptions.STRATEGY.toString(), ChunkingSettingsOptions.MAX_CHUNK_SIZE.toString(), @@ -62,9 +64,11 @@ public static SentenceBoundaryChunkingSettings fromMap(Map map) ); } - Integer maxChunkSize = ServiceUtils.extractRequiredPositiveInteger( + Integer maxChunkSize = ServiceUtils.extractRequiredPositiveIntegerBetween( map, ChunkingSettingsOptions.MAX_CHUNK_SIZE.toString(), + MAX_CHUNK_SIZE_LOWER_LIMIT, + MAX_CHUNK_SIZE_UPPER_LIMIT, ModelConfigurations.CHUNKING_SETTINGS, validationException ); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/WordBoundaryChunkingSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/WordBoundaryChunkingSettings.java index 5b91e122b9c80..7fb0fdc91bf72 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/WordBoundaryChunkingSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/WordBoundaryChunkingSettings.java @@ -28,6 +28,8 @@ public class WordBoundaryChunkingSettings implements ChunkingSettings { public static final String NAME = "WordBoundaryChunkingSettings"; private static final ChunkingStrategy STRATEGY = ChunkingStrategy.WORD; + private static final int MAX_CHUNK_SIZE_LOWER_LIMIT = 10; + private static final int MAX_CHUNK_SIZE_UPPER_LIMIT = 300; private static final Set VALID_KEYS = Set.of( ChunkingSettingsOptions.STRATEGY.toString(), ChunkingSettingsOptions.MAX_CHUNK_SIZE.toString(), @@ -56,9 +58,11 @@ public static WordBoundaryChunkingSettings fromMap(Map map) { ); } - Integer maxChunkSize = ServiceUtils.extractRequiredPositiveInteger( + Integer maxChunkSize = ServiceUtils.extractRequiredPositiveIntegerBetween( map, ChunkingSettingsOptions.MAX_CHUNK_SIZE.toString(), + MAX_CHUNK_SIZE_LOWER_LIMIT, + MAX_CHUNK_SIZE_UPPER_LIMIT, ModelConfigurations.CHUNKING_SETTINGS, validationException ); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java index c0e3c78b12f13..9e7f8712b4087 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java @@ -435,6 +435,32 @@ public static Integer extractRequiredPositiveIntegerLessThanOrEqualToMax( return field; } + public static Integer extractRequiredPositiveIntegerBetween( + Map map, + String settingName, + int minValue, + int maxValue, + String scope, + ValidationException validationException + ) { + Integer field = extractRequiredPositiveInteger(map, settingName, scope, validationException); + + if (field != null && field < minValue) { + validationException.addValidationError( + ServiceUtils.mustBeGreaterThanOrEqualNumberErrorMessage(settingName, scope, field, minValue) + ); + return null; + } + if (field != null && field > maxValue) { + validationException.addValidationError( + ServiceUtils.mustBeLessThanOrEqualNumberErrorMessage(settingName, scope, field, maxValue) + ); + return null; + } + + return field; + } + public static Integer extractOptionalPositiveInteger( Map map, String settingName, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/ChunkingSettingsBuilderTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/ChunkingSettingsBuilderTests.java index 5b9625073e6c6..235a3730ce4f6 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/ChunkingSettingsBuilderTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/ChunkingSettingsBuilderTests.java @@ -38,25 +38,27 @@ public void testValidChunkingSettingsMap() { } private Map, ChunkingSettings> chunkingSettingsMapToChunkingSettings() { - var maxChunkSize = randomNonNegativeInt(); - var overlap = randomIntBetween(1, maxChunkSize / 2); + var maxChunkSizeWordBoundaryChunkingSettings = randomIntBetween(10, 300); + var overlap = randomIntBetween(1, maxChunkSizeWordBoundaryChunkingSettings / 2); + var maxChunkSizeSentenceBoundaryChunkingSettings = randomIntBetween(20, 300); + return Map.of( Map.of( ChunkingSettingsOptions.STRATEGY.toString(), ChunkingStrategy.WORD.toString(), ChunkingSettingsOptions.MAX_CHUNK_SIZE.toString(), - maxChunkSize, + maxChunkSizeWordBoundaryChunkingSettings, ChunkingSettingsOptions.OVERLAP.toString(), overlap ), - new WordBoundaryChunkingSettings(maxChunkSize, overlap), + new WordBoundaryChunkingSettings(maxChunkSizeWordBoundaryChunkingSettings, overlap), Map.of( ChunkingSettingsOptions.STRATEGY.toString(), ChunkingStrategy.SENTENCE.toString(), ChunkingSettingsOptions.MAX_CHUNK_SIZE.toString(), - maxChunkSize + maxChunkSizeSentenceBoundaryChunkingSettings ), - new SentenceBoundaryChunkingSettings(maxChunkSize, 1) + new SentenceBoundaryChunkingSettings(maxChunkSizeSentenceBoundaryChunkingSettings, 1) ); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/ChunkingSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/ChunkingSettingsTests.java index 8373ae93354b1..2832c2f64e0e6 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/ChunkingSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/ChunkingSettingsTests.java @@ -21,11 +21,11 @@ public static ChunkingSettings createRandomChunkingSettings() { switch (randomStrategy) { case WORD -> { - var maxChunkSize = randomNonNegativeInt(); + var maxChunkSize = randomIntBetween(10, 300); return new WordBoundaryChunkingSettings(maxChunkSize, randomIntBetween(1, maxChunkSize / 2)); } case SENTENCE -> { - return new SentenceBoundaryChunkingSettings(randomNonNegativeInt(), randomBoolean() ? 0 : 1); + return new SentenceBoundaryChunkingSettings(randomIntBetween(20, 300), randomBoolean() ? 0 : 1); } default -> throw new IllegalArgumentException("Unsupported random strategy [" + randomStrategy + "]"); } @@ -38,13 +38,13 @@ public static Map createRandomChunkingSettingsMap() { switch (randomStrategy) { case WORD -> { - var maxChunkSize = randomNonNegativeInt(); + var maxChunkSize = randomIntBetween(10, 300); chunkingSettingsMap.put(ChunkingSettingsOptions.MAX_CHUNK_SIZE.toString(), maxChunkSize); chunkingSettingsMap.put(ChunkingSettingsOptions.OVERLAP.toString(), randomIntBetween(1, maxChunkSize / 2)); } case SENTENCE -> { - chunkingSettingsMap.put(ChunkingSettingsOptions.MAX_CHUNK_SIZE.toString(), randomNonNegativeInt()); + chunkingSettingsMap.put(ChunkingSettingsOptions.MAX_CHUNK_SIZE.toString(), randomIntBetween(20, 300)); } default -> { } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunkerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunkerTests.java index 5687ebc4dbae7..afce8c57e0350 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunkerTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunkerTests.java @@ -318,7 +318,8 @@ public void testChunkSplitLargeChunkSizesWithChunkingSettings() { } public void testInvalidChunkingSettingsProvided() { - ChunkingSettings chunkingSettings = new WordBoundaryChunkingSettings(randomNonNegativeInt(), randomNonNegativeInt()); + var maxChunkSize = randomIntBetween(10, 300); + ChunkingSettings chunkingSettings = new WordBoundaryChunkingSettings(maxChunkSize, randomIntBetween(1, maxChunkSize / 2)); assertThrows(IllegalArgumentException.class, () -> { new SentenceBoundaryChunker().chunk(TEST_TEXT, chunkingSettings); }); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunkingSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunkingSettingsTests.java index fe97d7eb3af54..47a1a116ba21e 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunkingSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunkingSettingsTests.java @@ -11,7 +11,6 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.inference.ChunkingStrategy; import org.elasticsearch.test.AbstractWireSerializingTestCase; -import org.elasticsearch.test.ESTestCase; import java.io.IOException; import java.util.HashMap; @@ -28,14 +27,14 @@ public void testMaxChunkSizeNotProvided() { } public void testInvalidInputsProvided() { - var chunkingSettingsMap = buildChunkingSettingsMap(Optional.of(randomNonNegativeInt())); + var chunkingSettingsMap = buildChunkingSettingsMap(Optional.of(randomIntBetween(20, 300))); chunkingSettingsMap.put(randomAlphaOfLength(10), randomNonNegativeInt()); assertThrows(ValidationException.class, () -> { SentenceBoundaryChunkingSettings.fromMap(chunkingSettingsMap); }); } public void testValidInputsProvided() { - int maxChunkSize = randomNonNegativeInt(); + int maxChunkSize = randomIntBetween(20, 300); SentenceBoundaryChunkingSettings settings = SentenceBoundaryChunkingSettings.fromMap( buildChunkingSettingsMap(Optional.of(maxChunkSize)) ); @@ -59,12 +58,12 @@ protected Writeable.Reader instanceReader() { @Override protected SentenceBoundaryChunkingSettings createTestInstance() { - return new SentenceBoundaryChunkingSettings(randomNonNegativeInt(), randomBoolean() ? 0 : 1); + return new SentenceBoundaryChunkingSettings(randomIntBetween(20, 300), randomBoolean() ? 0 : 1); } @Override protected SentenceBoundaryChunkingSettings mutateInstance(SentenceBoundaryChunkingSettings instance) throws IOException { - var chunkSize = randomValueOtherThan(instance.maxChunkSize, ESTestCase::randomNonNegativeInt); + var chunkSize = randomValueOtherThan(instance.maxChunkSize, () -> randomIntBetween(20, 300)); return new SentenceBoundaryChunkingSettings(chunkSize, instance.sentenceOverlap); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/WordBoundaryChunkerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/WordBoundaryChunkerTests.java index 08c0724f36270..ef643a4b36fdc 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/WordBoundaryChunkerTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/WordBoundaryChunkerTests.java @@ -136,7 +136,7 @@ public void testNumberOfChunksWithWordBoundaryChunkingSettings() { } public void testInvalidChunkingSettingsProvided() { - ChunkingSettings chunkingSettings = new SentenceBoundaryChunkingSettings(randomNonNegativeInt(), 0); + ChunkingSettings chunkingSettings = new SentenceBoundaryChunkingSettings(randomIntBetween(20, 300), 0); assertThrows(IllegalArgumentException.class, () -> { new WordBoundaryChunker().chunk(TEST_TEXT, chunkingSettings); }); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/WordBoundaryChunkingSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/WordBoundaryChunkingSettingsTests.java index c5515f7bf0512..dd91a3c7a947e 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/WordBoundaryChunkingSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/WordBoundaryChunkingSettingsTests.java @@ -14,7 +14,6 @@ import java.io.IOException; import java.util.HashMap; -import java.util.List; import java.util.Map; import java.util.Optional; @@ -28,19 +27,20 @@ public void testMaxChunkSizeNotProvided() { public void testOverlapNotProvided() { assertThrows(ValidationException.class, () -> { - WordBoundaryChunkingSettings.fromMap(buildChunkingSettingsMap(Optional.of(randomNonNegativeInt()), Optional.empty())); + WordBoundaryChunkingSettings.fromMap(buildChunkingSettingsMap(Optional.of(randomIntBetween(10, 300)), Optional.empty())); }); } public void testInvalidInputsProvided() { - var chunkingSettingsMap = buildChunkingSettingsMap(Optional.of(randomNonNegativeInt()), Optional.of(randomNonNegativeInt())); + var maxChunkSize = randomIntBetween(10, 300); + var chunkingSettingsMap = buildChunkingSettingsMap(Optional.of(maxChunkSize), Optional.of(randomIntBetween(1, maxChunkSize / 2))); chunkingSettingsMap.put(randomAlphaOfLength(10), randomNonNegativeInt()); assertThrows(ValidationException.class, () -> { WordBoundaryChunkingSettings.fromMap(chunkingSettingsMap); }); } public void testOverlapGreaterThanHalfMaxChunkSize() { - var maxChunkSize = randomNonNegativeInt(); + var maxChunkSize = randomIntBetween(10, 300); var overlap = randomIntBetween((maxChunkSize / 2) + 1, maxChunkSize); assertThrows(ValidationException.class, () -> { WordBoundaryChunkingSettings.fromMap(buildChunkingSettingsMap(Optional.of(maxChunkSize), Optional.of(overlap))); @@ -48,7 +48,7 @@ public void testOverlapGreaterThanHalfMaxChunkSize() { } public void testValidInputsProvided() { - int maxChunkSize = randomNonNegativeInt(); + int maxChunkSize = randomIntBetween(10, 300); int overlap = randomIntBetween(1, maxChunkSize / 2); WordBoundaryChunkingSettings settings = WordBoundaryChunkingSettings.fromMap( buildChunkingSettingsMap(Optional.of(maxChunkSize), Optional.of(overlap)) @@ -75,29 +75,14 @@ protected Writeable.Reader instanceReader() { @Override protected WordBoundaryChunkingSettings createTestInstance() { - var maxChunkSize = randomNonNegativeInt(); + var maxChunkSize = randomIntBetween(10, 300); return new WordBoundaryChunkingSettings(maxChunkSize, randomIntBetween(1, maxChunkSize / 2)); } @Override protected WordBoundaryChunkingSettings mutateInstance(WordBoundaryChunkingSettings instance) throws IOException { - var valueToMutate = randomFrom(List.of(ChunkingSettingsOptions.MAX_CHUNK_SIZE, ChunkingSettingsOptions.OVERLAP)); - var maxChunkSize = instance.maxChunkSize; - var overlap = instance.overlap; - - if (valueToMutate.equals(ChunkingSettingsOptions.MAX_CHUNK_SIZE)) { - while (maxChunkSize == instance.maxChunkSize) { - maxChunkSize = randomNonNegativeInt(); - } - - if (overlap > maxChunkSize / 2) { - overlap = randomIntBetween(1, maxChunkSize / 2); - } - } else if (valueToMutate.equals(ChunkingSettingsOptions.OVERLAP)) { - while (overlap == instance.overlap) { - overlap = randomIntBetween(1, maxChunkSize / 2); - } - } + var maxChunkSize = randomValueOtherThan(instance.maxChunkSize, () -> randomIntBetween(10, 300)); + var overlap = randomValueOtherThan(instance.overlap, () -> randomIntBetween(1, maxChunkSize / 2)); return new WordBoundaryChunkingSettings(maxChunkSize, overlap); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceUtilsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceUtilsTests.java index ca48d5427d18b..e3df0f0b5a2e1 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceUtilsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceUtilsTests.java @@ -605,6 +605,60 @@ public void testExtractRequiredPositiveIntegerLessThanOrEqualToMax_AddsErrorWhen assertThat(validation.validationErrors().get(1), is("[scope] does not contain the required setting [not_key]")); } + public void testExtractRequiredPositiveIntegerBetween_ReturnsValueWhenValueIsBetweenMinAndMax() { + var minValue = randomNonNegativeInt(); + var maxValue = randomIntBetween(minValue + 2, minValue + 10); + testExtractRequiredPositiveIntegerBetween_Successful(minValue, maxValue, randomIntBetween(minValue + 1, maxValue - 1)); + } + + public void testExtractRequiredPositiveIntegerBetween_ReturnsValueWhenValueIsEqualToMin() { + var minValue = randomNonNegativeInt(); + var maxValue = randomIntBetween(minValue + 1, minValue + 10); + testExtractRequiredPositiveIntegerBetween_Successful(minValue, maxValue, minValue); + } + + public void testExtractRequiredPositiveIntegerBetween_ReturnsValueWhenValueIsEqualToMax() { + var minValue = randomNonNegativeInt(); + var maxValue = randomIntBetween(minValue + 1, minValue + 10); + testExtractRequiredPositiveIntegerBetween_Successful(minValue, maxValue, maxValue); + } + + private void testExtractRequiredPositiveIntegerBetween_Successful(int minValue, int maxValue, int actualValue) { + var validation = new ValidationException(); + validation.addValidationError("previous error"); + Map map = modifiableMap(Map.of("key", actualValue)); + var parsedInt = ServiceUtils.extractRequiredPositiveIntegerBetween(map, "key", minValue, maxValue, "scope", validation); + + assertThat(validation.validationErrors(), hasSize(1)); + assertNotNull(parsedInt); + assertThat(parsedInt, is(actualValue)); + assertTrue(map.isEmpty()); + } + + public void testExtractRequiredIntBetween_AddsErrorForValueBelowMin() { + var minValue = randomNonNegativeInt(); + var maxValue = randomIntBetween(minValue, minValue + 10); + testExtractRequiredIntBetween_Unsuccessful(minValue, maxValue, minValue - 1); + } + + public void testExtractRequiredIntBetween_AddsErrorForValueAboveMax() { + var minValue = randomNonNegativeInt(); + var maxValue = randomIntBetween(minValue, minValue + 10); + testExtractRequiredIntBetween_Unsuccessful(minValue, maxValue, maxValue + 1); + } + + private void testExtractRequiredIntBetween_Unsuccessful(int minValue, int maxValue, int actualValue) { + var validation = new ValidationException(); + validation.addValidationError("previous error"); + Map map = modifiableMap(Map.of("key", actualValue)); + var parsedInt = ServiceUtils.extractRequiredPositiveIntegerBetween(map, "key", minValue, maxValue, "scope", validation); + + assertThat(validation.validationErrors(), hasSize(2)); + assertNull(parsedInt); + assertTrue(map.isEmpty()); + assertThat(validation.validationErrors().get(1), containsString("Invalid value")); + } + public void testExtractOptionalEnum_ReturnsNull_WhenFieldDoesNotExist() { var validation = new ValidationException(); Map map = modifiableMap(Map.of("key", "value")); From aee76c5123df08705eec24bee5cb0158b47267ed Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 23 Oct 2024 02:03:14 +1100 Subject: [PATCH 287/449] Mute org.elasticsearch.xpack.analytics.rate.TimeSeriesRateAggregatorTests org.elasticsearch.xpack.analytics.rate.TimeSeriesRateAggregatorTests #115334 --- muted-tests.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 1cb8baa96a942..847f9af13801c 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -326,6 +326,8 @@ tests: - class: org.elasticsearch.xpack.test.rest.XPackRestIT method: test {p0=esql/60_usage/Basic ESQL usage output (telemetry)} issue: https://github.com/elastic/elasticsearch/issues/115231 +- class: org.elasticsearch.xpack.analytics.rate.TimeSeriesRateAggregatorTests + issue: https://github.com/elastic/elasticsearch/issues/115334 # Examples: # From 2f695ef1cca152873911ebaf9eaa6abfaee4d1e7 Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Tue, 22 Oct 2024 17:10:44 +0200 Subject: [PATCH 288/449] Relax condition for H3 bins crossing the dateline (#115290) Just check the for h3 bins crossing the dateline, that the center child relationship returns something different to Disjoint. --- .../aggregations/bucket/geogrid/GeoHexVisitorTests.java | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoHexVisitorTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoHexVisitorTests.java index 8e2f713e6ed3e..d9cbbaafa1779 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoHexVisitorTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoHexVisitorTests.java @@ -83,9 +83,10 @@ private void doTestGeometry(LongFunction h3ToGeometry, boolean hasArea visitor.reset(centerChild); reader.visit(visitor); if (hasArea) { - if (h3CrossesDateline && visitor.getLeftX() > visitor.getRightX()) { - // if both polygons crosses the dateline it cannot be inside due to the polygon splitting technique - assertEquals("failing h3: " + h3, GeoRelation.QUERY_CROSSES, visitor.relation()); + if (h3CrossesDateline) { + // if the h3 crosses the dateline, we might get CROSSES due to the polygon splitting technique. We can't + // be sure which one is the correct one, so we just check that it is not DISJOINT + assertNotSame("failing h3: " + h3, GeoRelation.QUERY_DISJOINT, visitor.relation()); } else { assertEquals("failing h3: " + h3, GeoRelation.QUERY_INSIDE, visitor.relation()); } From fab22654bddb364dcf75db15b1d77ee5faba92fd Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 23 Oct 2024 02:15:19 +1100 Subject: [PATCH 289/449] Mute org.elasticsearch.xpack.watcher.trigger.schedule.engine.TickerScheduleEngineTests testAddWithNoLastCheckedTimeButHasActivationTimeExecutesBeforeInitialInterval #115339 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 847f9af13801c..a7fb95691283b 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -328,6 +328,9 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/115231 - class: org.elasticsearch.xpack.analytics.rate.TimeSeriesRateAggregatorTests issue: https://github.com/elastic/elasticsearch/issues/115334 +- class: org.elasticsearch.xpack.watcher.trigger.schedule.engine.TickerScheduleEngineTests + method: testAddWithNoLastCheckedTimeButHasActivationTimeExecutesBeforeInitialInterval + issue: https://github.com/elastic/elasticsearch/issues/115339 # Examples: # From 14c25fed66d80bc1b55106cc48a225f3449e5810 Mon Sep 17 00:00:00 2001 From: Pooya Salehi Date: Tue, 22 Oct 2024 17:25:32 +0200 Subject: [PATCH 290/449] Unmute MultiVersionRepositoryAccessIT testCreateAndRestoreSnapshot (#115326) These are either falsely marked as failure, or all related to a test setup issue that has been fixed. closes https://github.com/elastic/elasticsearch/pull/114998 --- muted-tests.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index a7fb95691283b..62140d05c9e4a 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -308,9 +308,6 @@ tests: - class: org.elasticsearch.xpack.inference.DefaultEndPointsIT method: testInferDeploysDefaultElser issue: https://github.com/elastic/elasticsearch/issues/114913 -- class: org.elasticsearch.upgrades.MultiVersionRepositoryAccessIT - method: testCreateAndRestoreSnapshot - issue: https://github.com/elastic/elasticsearch/issues/114998 - class: org.elasticsearch.index.mapper.TextFieldMapperTests method: testBlockLoaderFromRowStrideReaderWithSyntheticSource issue: https://github.com/elastic/elasticsearch/issues/115066 From d9baf6f9db1761b5a777c98e4bac82abf93598c0 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Tue, 22 Oct 2024 17:43:31 +0200 Subject: [PATCH 291/449] Revert "Add ResolvedExpression wrapper (#114592)" (#115317) This reverts commit 4c15cc077887d00ecf0e02c39b42cf01874ab6c4. This commit introduced an orders of magnitude regression when searching many shards. --- docs/changelog/115317.yaml | 5 + .../TransportClusterSearchShardsAction.java | 3 +- .../indices/resolve/ResolveIndexAction.java | 9 +- .../query/TransportValidateQueryAction.java | 3 +- .../explain/TransportExplainAction.java | 3 +- .../action/search/TransportSearchAction.java | 24 +- .../search/TransportSearchShardsAction.java | 6 +- .../metadata/IndexNameExpressionResolver.java | 196 +++++------- .../elasticsearch/indices/IndicesService.java | 3 +- .../elasticsearch/search/SearchService.java | 3 +- .../indices/resolve/ResolveIndexTests.java | 15 +- .../DateMathExpressionResolverTests.java | 89 +++--- .../cluster/metadata/ExpressionListTests.java | 108 +++---- .../IndexNameExpressionResolverTests.java | 65 ++-- .../WildcardExpressionResolverTests.java | 299 ++++++++---------- .../indices/IndicesServiceTests.java | 34 +- 16 files changed, 361 insertions(+), 504 deletions(-) create mode 100644 docs/changelog/115317.yaml diff --git a/docs/changelog/115317.yaml b/docs/changelog/115317.yaml new file mode 100644 index 0000000000000..153f7a52f0674 --- /dev/null +++ b/docs/changelog/115317.yaml @@ -0,0 +1,5 @@ +pr: 115317 +summary: Revert "Add `ResolvedExpression` wrapper" +area: Indices APIs +type: bug +issues: [] diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java index b855f2cee7613..9ffef1f178f44 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java @@ -17,7 +17,6 @@ import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardIterator; @@ -85,7 +84,7 @@ protected void masterOperation( String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(clusterState, request); Map> routingMap = indexNameExpressionResolver.resolveSearchRouting(state, request.routing(), request.indices()); Map indicesAndFilters = new HashMap<>(); - Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions(clusterState, request.indices()); + Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions(clusterState, request.indices()); for (String index : concreteIndices) { final AliasFilter aliasFilter = indicesService.buildAliasFilter(clusterState, index, indicesAndAliases); final String[] aliases = indexNameExpressionResolver.indexAliases( diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexAction.java index f5c100b7884bb..5c5c71bc002b3 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexAction.java @@ -25,7 +25,6 @@ import org.elasticsearch.cluster.metadata.IndexAbstraction; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; @@ -566,8 +565,8 @@ static void resolveIndices( if (names.length == 1 && (Metadata.ALL.equals(names[0]) || Regex.isMatchAllPattern(names[0]))) { names = new String[] { "**" }; } - Set resolvedIndexAbstractions = resolver.resolveExpressions(clusterState, indicesOptions, true, names); - for (ResolvedExpression s : resolvedIndexAbstractions) { + Set resolvedIndexAbstractions = resolver.resolveExpressions(clusterState, indicesOptions, true, names); + for (String s : resolvedIndexAbstractions) { enrichIndexAbstraction(clusterState, s, indices, aliases, dataStreams); } indices.sort(Comparator.comparing(ResolvedIndexAbstraction::getName)); @@ -598,12 +597,12 @@ private static void mergeResults( private static void enrichIndexAbstraction( ClusterState clusterState, - ResolvedExpression indexAbstraction, + String indexAbstraction, List indices, List aliases, List dataStreams ) { - IndexAbstraction ia = clusterState.metadata().getIndicesLookup().get(indexAbstraction.resource()); + IndexAbstraction ia = clusterState.metadata().getIndicesLookup().get(indexAbstraction); if (ia != null) { switch (ia.getType()) { case CONCRETE_INDEX -> { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java index e01f364712676..4e9830fe0d14e 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java @@ -21,7 +21,6 @@ import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.routing.ShardRouting; @@ -134,7 +133,7 @@ protected void doExecute(Task task, ValidateQueryRequest request, ActionListener @Override protected ShardValidateQueryRequest newShardRequest(int numShards, ShardRouting shard, ValidateQueryRequest request) { final ClusterState clusterState = clusterService.state(); - final Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions(clusterState, request.indices()); + final Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions(clusterState, request.indices()); final AliasFilter aliasFilter = searchService.buildAliasFilter(clusterState, shard.getIndexName(), indicesAndAliases); return new ShardValidateQueryRequest(shard.shardId(), aliasFilter, request); } diff --git a/server/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java b/server/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java index 84c6df7b8a66f..9c82d032014f2 100644 --- a/server/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java +++ b/server/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java @@ -18,7 +18,6 @@ import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.Writeable; @@ -110,7 +109,7 @@ protected boolean resolveIndex(ExplainRequest request) { @Override protected void resolveRequest(ClusterState state, InternalRequest request) { - final Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions(state, request.request().index()); + final Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions(state, request.request().index()); final AliasFilter aliasFilter = searchService.buildAliasFilter(state, request.concreteIndex(), indicesAndAliases); request.request().filteringAlias(aliasFilter); } diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java index b5864f64a7824..1645a378446a4 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java @@ -37,7 +37,6 @@ import org.elasticsearch.cluster.metadata.IndexAbstraction; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.GroupShardsIterator; @@ -111,7 +110,6 @@ import java.util.function.BiFunction; import java.util.function.Function; import java.util.function.LongSupplier; -import java.util.stream.Collectors; import static org.elasticsearch.action.search.SearchType.DFS_QUERY_THEN_FETCH; import static org.elasticsearch.action.search.SearchType.QUERY_THEN_FETCH; @@ -205,7 +203,7 @@ public TransportSearchAction( private Map buildPerIndexOriginalIndices( ClusterState clusterState, - Set indicesAndAliases, + Set indicesAndAliases, String[] indices, IndicesOptions indicesOptions ) { @@ -213,9 +211,6 @@ private Map buildPerIndexOriginalIndices( var blocks = clusterState.blocks(); // optimization: mostly we do not have any blocks so there's no point in the expensive per-index checking boolean hasBlocks = blocks.global().isEmpty() == false || blocks.indices().isEmpty() == false; - // Get a distinct set of index abstraction names present from the resolved expressions to help with the reverse resolution from - // concrete index to the expression that produced it. - Set indicesAndAliasesResources = indicesAndAliases.stream().map(ResolvedExpression::resource).collect(Collectors.toSet()); for (String index : indices) { if (hasBlocks) { blocks.indexBlockedRaiseException(ClusterBlockLevel.READ, index); @@ -232,8 +227,8 @@ private Map buildPerIndexOriginalIndices( String[] finalIndices = Strings.EMPTY_ARRAY; if (aliases == null || aliases.length == 0 - || indicesAndAliasesResources.contains(index) - || hasDataStreamRef(clusterState, indicesAndAliasesResources, index)) { + || indicesAndAliases.contains(index) + || hasDataStreamRef(clusterState, indicesAndAliases, index)) { finalIndices = new String[] { index }; } if (aliases != null) { @@ -252,11 +247,7 @@ private static boolean hasDataStreamRef(ClusterState clusterState, Set i return indicesAndAliases.contains(ret.getParentDataStream().getName()); } - Map buildIndexAliasFilters( - ClusterState clusterState, - Set indicesAndAliases, - Index[] concreteIndices - ) { + Map buildIndexAliasFilters(ClusterState clusterState, Set indicesAndAliases, Index[] concreteIndices) { final Map aliasFilterMap = new HashMap<>(); for (Index index : concreteIndices) { clusterState.blocks().indexBlockedRaiseException(ClusterBlockLevel.READ, index.getName()); @@ -1246,10 +1237,7 @@ private void executeSearch( } else { final Index[] indices = resolvedIndices.getConcreteLocalIndices(); concreteLocalIndices = Arrays.stream(indices).map(Index::getName).toArray(String[]::new); - final Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions( - clusterState, - searchRequest.indices() - ); + final Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions(clusterState, searchRequest.indices()); aliasFilter = buildIndexAliasFilters(clusterState, indicesAndAliases, indices); aliasFilter.putAll(remoteAliasMap); localShardIterators = getLocalShardsIterator( @@ -1822,7 +1810,7 @@ List getLocalShardsIterator( ClusterState clusterState, SearchRequest searchRequest, String clusterAlias, - Set indicesAndAliases, + Set indicesAndAliases, String[] concreteIndices ) { var routingMap = indexNameExpressionResolver.resolveSearchRouting(clusterState, searchRequest.routing(), searchRequest.indices()); diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchShardsAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchShardsAction.java index b94bd95c93d8a..f418b5617b2a1 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchShardsAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchShardsAction.java @@ -17,7 +17,6 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.index.Index; @@ -128,10 +127,7 @@ public void searchShards(Task task, SearchShardsRequest searchShardsRequest, Act searchService.getRewriteContext(timeProvider::absoluteStartMillis, resolvedIndices, null), listener.delegateFailureAndWrap((delegate, searchRequest) -> { Index[] concreteIndices = resolvedIndices.getConcreteLocalIndices(); - final Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions( - clusterState, - searchRequest.indices() - ); + final Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions(clusterState, searchRequest.indices()); final Map aliasFilters = transportSearchAction.buildIndexAliasFilters( clusterState, indicesAndAliases, diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java index eaf54034b22e0..2229166a2d779 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java @@ -74,15 +74,6 @@ public IndexNameExpressionResolver(ThreadContext threadContext, SystemIndices sy this.systemIndices = Objects.requireNonNull(systemIndices, "System Indices must not be null"); } - /** - * This contains the resolved expression in the form of the resource. - * Soon it will facilitate the index component selector. - * @param resource the resolved resolvedExpression - */ - public record ResolvedExpression(String resource) { - - } - /** * Same as {@link #concreteIndexNames(ClusterState, IndicesOptions, String...)}, but the index expressions and options * are encapsulated in the specified request. @@ -200,9 +191,8 @@ public List dataStreamNames(ClusterState state, IndicesOptions options, getSystemIndexAccessPredicate(), getNetNewSystemIndexPredicate() ); - final Collection expressions = resolveExpressions(context, indexExpressions); + final Collection expressions = resolveExpressions(context, indexExpressions); return expressions.stream() - .map(ResolvedExpression::resource) .map(x -> state.metadata().getIndicesLookup().get(x)) .filter(Objects::nonNull) .filter(ia -> ia.getType() == Type.DATA_STREAM) @@ -231,11 +221,10 @@ public IndexAbstraction resolveWriteIndexAbstraction(ClusterState state, DocWrit getNetNewSystemIndexPredicate() ); - final Collection expressions = resolveExpressions(context, request.index()); + final Collection expressions = resolveExpressions(context, request.index()); if (expressions.size() == 1) { - ResolvedExpression resolvedExpression = expressions.iterator().next(); - IndexAbstraction ia = state.metadata().getIndicesLookup().get(resolvedExpression.resource()); + IndexAbstraction ia = state.metadata().getIndicesLookup().get(expressions.iterator().next()); if (ia.getType() == Type.ALIAS) { Index writeIndex = ia.getWriteIndex(); if (writeIndex == null) { @@ -257,14 +246,14 @@ public IndexAbstraction resolveWriteIndexAbstraction(ClusterState state, DocWrit } } - protected static Collection resolveExpressions(Context context, String... expressions) { + protected static Collection resolveExpressions(Context context, String... expressions) { if (context.getOptions().expandWildcardExpressions() == false) { if (expressions == null || expressions.length == 0 || expressions.length == 1 && Metadata.ALL.equals(expressions[0])) { return List.of(); } else { return ExplicitResourceNameFilter.filterUnavailable( context, - DateMathExpressionResolver.resolve(context, Arrays.stream(expressions).map(ResolvedExpression::new).toList()) + DateMathExpressionResolver.resolve(context, List.of(expressions)) ); } } else { @@ -275,10 +264,7 @@ protected static Collection resolveExpressions(Context conte } else { return WildcardExpressionResolver.resolve( context, - ExplicitResourceNameFilter.filterUnavailable( - context, - DateMathExpressionResolver.resolve(context, Arrays.stream(expressions).map(ResolvedExpression::new).toList()) - ) + ExplicitResourceNameFilter.filterUnavailable(context, DateMathExpressionResolver.resolve(context, List.of(expressions))) ); } } @@ -353,12 +339,12 @@ String[] concreteIndexNames(Context context, String... indexExpressions) { } Index[] concreteIndices(Context context, String... indexExpressions) { - final Collection expressions = resolveExpressions(context, indexExpressions); + final Collection expressions = resolveExpressions(context, indexExpressions); final Set concreteIndicesResult = Sets.newLinkedHashSetWithExpectedSize(expressions.size()); final Map indicesLookup = context.getState().metadata().getIndicesLookup(); - for (ResolvedExpression resolvedExpression : expressions) { - final IndexAbstraction indexAbstraction = indicesLookup.get(resolvedExpression.resource()); + for (String expression : expressions) { + final IndexAbstraction indexAbstraction = indicesLookup.get(expression); assert indexAbstraction != null; if (indexAbstraction.getType() == Type.ALIAS && context.isResolveToWriteIndex()) { Index writeIndex = indexAbstraction.getWriteIndex(); @@ -392,7 +378,7 @@ Index[] concreteIndices(Context context, String... indexExpressions) { throw new IllegalArgumentException( indexAbstraction.getType().getDisplayName() + " [" - + resolvedExpression.resource() + + expression + "] has more than one index associated with it " + Arrays.toString(indexNames) + ", can't execute a single index op" @@ -656,7 +642,7 @@ public Index concreteSingleIndex(ClusterState state, IndicesRequest request) { * Utility method that allows to resolve an index expression to its corresponding single write index. * * @param state the cluster state containing all the data to resolve to expression to a concrete index - * @param request The request that defines how an alias or an index need to be resolved to a concrete index + * @param request The request that defines how the an alias or an index need to be resolved to a concrete index * and the expression that can be resolved to an alias or an index name. * @throws IllegalArgumentException if the index resolution does not lead to an index, or leads to more than one index * @return the write index obtained as a result of the index resolution @@ -748,7 +734,7 @@ public static String resolveDateMathExpression(String dateExpression, long time) /** * Resolve an array of expressions to the set of indices and aliases that these expressions match. */ - public Set resolveExpressions(ClusterState state, String... expressions) { + public Set resolveExpressions(ClusterState state, String... expressions) { return resolveExpressions(state, IndicesOptions.lenientExpandOpen(), false, expressions); } @@ -757,7 +743,7 @@ public Set resolveExpressions(ClusterState state, String... * If {@param preserveDataStreams} is {@code true}, datastreams that are covered by the wildcards from the * {@param expressions} are returned as-is, without expanding them further to their respective backing indices. */ - public Set resolveExpressions( + public Set resolveExpressions( ClusterState state, IndicesOptions indicesOptions, boolean preserveDataStreams, @@ -774,10 +760,10 @@ public Set resolveExpressions( getSystemIndexAccessPredicate(), getNetNewSystemIndexPredicate() ); - Collection resolved = resolveExpressions(context, expressions); - if (resolved instanceof Set) { + Collection resolved = resolveExpressions(context, expressions); + if (resolved instanceof Set) { // unmodifiable without creating a new collection as it might contain many items - return Collections.unmodifiableSet((Set) resolved); + return Collections.unmodifiableSet((Set) resolved); } else { return Set.copyOf(resolved); } @@ -790,7 +776,7 @@ public Set resolveExpressions( * the index itself - null is returned. Returns {@code null} if no filtering is required. * NOTE: The provided expressions must have been resolved already via {@link #resolveExpressions}. */ - public String[] filteringAliases(ClusterState state, String index, Set resolvedExpressions) { + public String[] filteringAliases(ClusterState state, String index, Set resolvedExpressions) { return indexAliases(state, index, AliasMetadata::filteringRequired, DataStreamAlias::filteringRequired, false, resolvedExpressions); } @@ -816,39 +802,39 @@ public String[] indexAliases( Predicate requiredAlias, Predicate requiredDataStreamAlias, boolean skipIdentity, - Set resolvedExpressions + Set resolvedExpressions ) { - if (isAllIndicesExpression(resolvedExpressions)) { + if (isAllIndices(resolvedExpressions)) { return null; } - Set resources = resolvedExpressions.stream().map(ResolvedExpression::resource).collect(Collectors.toSet()); + final IndexMetadata indexMetadata = state.metadata().getIndices().get(index); if (indexMetadata == null) { // Shouldn't happen throw new IndexNotFoundException(index); } - if (skipIdentity == false && resources.contains(index)) { + if (skipIdentity == false && resolvedExpressions.contains(index)) { return null; } IndexAbstraction ia = state.metadata().getIndicesLookup().get(index); DataStream dataStream = ia.getParentDataStream(); if (dataStream != null) { - if (skipIdentity == false && resources.contains(dataStream.getName())) { + if (skipIdentity == false && resolvedExpressions.contains(dataStream.getName())) { // skip the filters when the request targets the data stream name return null; } Map dataStreamAliases = state.metadata().dataStreamAliases(); List aliasesForDataStream; - if (iterateIndexAliases(dataStreamAliases.size(), resources.size())) { + if (iterateIndexAliases(dataStreamAliases.size(), resolvedExpressions.size())) { aliasesForDataStream = dataStreamAliases.values() .stream() - .filter(dataStreamAlias -> resources.contains(dataStreamAlias.getName())) + .filter(dataStreamAlias -> resolvedExpressions.contains(dataStreamAlias.getName())) .filter(dataStreamAlias -> dataStreamAlias.getDataStreams().contains(dataStream.getName())) .toList(); } else { - aliasesForDataStream = resources.stream() + aliasesForDataStream = resolvedExpressions.stream() .map(dataStreamAliases::get) .filter(dataStreamAlias -> dataStreamAlias != null && dataStreamAlias.getDataStreams().contains(dataStream.getName())) .toList(); @@ -873,15 +859,18 @@ public String[] indexAliases( } else { final Map indexAliases = indexMetadata.getAliases(); final AliasMetadata[] aliasCandidates; - if (iterateIndexAliases(indexAliases.size(), resources.size())) { + if (iterateIndexAliases(indexAliases.size(), resolvedExpressions.size())) { // faster to iterate indexAliases aliasCandidates = indexAliases.values() .stream() - .filter(aliasMetadata -> resources.contains(aliasMetadata.alias())) + .filter(aliasMetadata -> resolvedExpressions.contains(aliasMetadata.alias())) .toArray(AliasMetadata[]::new); } else { // faster to iterate resolvedExpressions - aliasCandidates = resources.stream().map(indexAliases::get).filter(Objects::nonNull).toArray(AliasMetadata[]::new); + aliasCandidates = resolvedExpressions.stream() + .map(indexAliases::get) + .filter(Objects::nonNull) + .toArray(AliasMetadata[]::new); } List aliases = null; for (AliasMetadata aliasMetadata : aliasCandidates) { @@ -920,7 +909,12 @@ public Map> resolveSearchRouting(ClusterState state, @Nullab getSystemIndexAccessPredicate(), getNetNewSystemIndexPredicate() ); - final Collection resolvedExpressions = resolveExpressions(context, expressions); + final Collection resolvedExpressions = resolveExpressions(context, expressions); + + // TODO: it appears that this can never be true? + if (isAllIndices(resolvedExpressions)) { + return resolveSearchRoutingAllIndices(state.metadata(), routing); + } Map> routings = null; Set paramRouting = null; @@ -930,8 +924,8 @@ public Map> resolveSearchRouting(ClusterState state, @Nullab paramRouting = Sets.newHashSet(Strings.splitStringByCommaToArray(routing)); } - for (ResolvedExpression resolvedExpression : resolvedExpressions) { - IndexAbstraction indexAbstraction = state.metadata().getIndicesLookup().get(resolvedExpression.resource); + for (String expression : resolvedExpressions) { + IndexAbstraction indexAbstraction = state.metadata().getIndicesLookup().get(expression); if (indexAbstraction != null && indexAbstraction.getType() == Type.ALIAS) { for (Index index : indexAbstraction.getIndices()) { String concreteIndex = index.getName(); @@ -969,7 +963,7 @@ public Map> resolveSearchRouting(ClusterState state, @Nullab } } else { // Index - routings = collectRoutings(routings, paramRouting, norouting, resolvedExpression.resource()); + routings = collectRoutings(routings, paramRouting, norouting, expression); } } @@ -1015,17 +1009,6 @@ public static Map> resolveSearchRoutingAllIndices(Metadata m return null; } - /** - * Identifies whether the array containing index names given as argument refers to all indices - * The empty or null array identifies all indices - * - * @param aliasesOrIndices the array containing index names - * @return true if the provided array maps to all indices, false otherwise - */ - public static boolean isAllIndicesExpression(Collection aliasesOrIndices) { - return isAllIndices(aliasesOrIndices.stream().map(ResolvedExpression::resource).toList()); - } - /** * Identifies whether the array containing index names given as argument refers to all indices * The empty or null array identifies all indices @@ -1266,8 +1249,8 @@ private WildcardExpressionResolver() { * Returns all the indices, datastreams, and aliases, considering the open/closed, system, and hidden context parameters. * Depending on the context, returns the names of the datastreams themselves or their backing indices. */ - public static Collection resolveAll(Context context) { - List concreteIndices = resolveEmptyOrTrivialWildcard(context); + public static Collection resolveAll(Context context) { + List concreteIndices = resolveEmptyOrTrivialWildcard(context); if (context.includeDataStreams() == false && context.getOptions().ignoreAliases()) { return concreteIndices; @@ -1282,7 +1265,7 @@ public static Collection resolveAll(Context context) { .filter(ia -> shouldIncludeIfDataStream(ia, context) || shouldIncludeIfAlias(ia, context)) .filter(ia -> ia.isSystem() == false || context.systemIndexAccessPredicate.test(ia.getName())); - Set resolved = expandToOpenClosed(context, ias).collect(Collectors.toSet()); + Set resolved = expandToOpenClosed(context, ias).collect(Collectors.toSet()); resolved.addAll(concreteIndices); return resolved; } @@ -1310,17 +1293,17 @@ private static boolean shouldIncludeIfAlias(IndexAbstraction ia, IndexNameExpres * ultimately returned, instead of the alias or datastream name * */ - public static Collection resolve(Context context, List expressions) { + public static Collection resolve(Context context, List expressions) { ExpressionList expressionList = new ExpressionList(context, expressions); // fast exit if there are no wildcards to evaluate if (expressionList.hasWildcard() == false) { return expressions; } - Set result = new HashSet<>(); + Set result = new HashSet<>(); for (ExpressionList.Expression expression : expressionList) { if (expression.isWildcard()) { Stream matchingResources = matchResourcesToWildcard(context, expression.get()); - Stream matchingOpenClosedNames = expandToOpenClosed(context, matchingResources); + Stream matchingOpenClosedNames = expandToOpenClosed(context, matchingResources); AtomicBoolean emptyWildcardExpansion = new AtomicBoolean(false); if (context.getOptions().allowNoIndices() == false) { emptyWildcardExpansion.set(true); @@ -1336,9 +1319,9 @@ public static Collection resolve(Context context, List filterIndicesLookupForSuffixWildcar * Data streams and aliases are interpreted to refer to multiple indices, * then all index resources are filtered by their open/closed status. */ - private static Stream expandToOpenClosed(Context context, Stream resources) { + private static Stream expandToOpenClosed(Context context, Stream resources) { final IndexMetadata.State excludeState = excludeState(context.getOptions()); return resources.flatMap(indexAbstraction -> { if (context.isPreserveAliases() && indexAbstraction.getType() == Type.ALIAS) { - return Stream.of(new ResolvedExpression(indexAbstraction.getName())); + return Stream.of(indexAbstraction.getName()); } else if (context.isPreserveDataStreams() && indexAbstraction.getType() == Type.DATA_STREAM) { - return Stream.of(new ResolvedExpression(indexAbstraction.getName())); + return Stream.of(indexAbstraction.getName()); } else { Stream indicesStateStream = Stream.of(); if (shouldIncludeRegularIndices(context.getOptions())) { @@ -1451,20 +1434,18 @@ private static Stream expandToOpenClosed(Context context, St if (excludeState != null) { indicesStateStream = indicesStateStream.filter(indexMeta -> indexMeta.getState() != excludeState); } - return indicesStateStream.map(indexMeta -> new ResolvedExpression(indexMeta.getIndex().getName())); + return indicesStateStream.map(indexMeta -> indexMeta.getIndex().getName()); } }); } - private static List resolveEmptyOrTrivialWildcard(Context context) { + private static List resolveEmptyOrTrivialWildcard(Context context) { final String[] allIndices = resolveEmptyOrTrivialWildcardToAllIndices(context.getOptions(), context.getState().metadata()); - Stream result; if (context.systemIndexAccessLevel == SystemIndexAccessLevel.ALL) { - result = Arrays.stream(allIndices); + return List.of(allIndices); } else { - result = resolveEmptyOrTrivialWildcardWithAllowedSystemIndices(context, allIndices).stream(); + return resolveEmptyOrTrivialWildcardWithAllowedSystemIndices(context, allIndices); } - return result.map(ResolvedExpression::new).toList(); } private static List resolveEmptyOrTrivialWildcardWithAllowedSystemIndices(Context context, String[] allIndices) { @@ -1526,8 +1507,8 @@ private DateMathExpressionResolver() { // utility class } - public static List resolve(Context context, List expressions) { - List result = new ArrayList<>(expressions.size()); + public static List resolve(Context context, List expressions) { + List result = new ArrayList<>(expressions.size()); for (ExpressionList.Expression expression : new ExpressionList(context, expressions)) { result.add(resolveExpression(expression, context::getStartTime)); } @@ -1538,15 +1519,13 @@ static String resolveExpression(String expression) { return resolveExpression(expression, System::currentTimeMillis); } - static ResolvedExpression resolveExpression(ExpressionList.Expression expression, LongSupplier getTime) { - String result; + static String resolveExpression(ExpressionList.Expression expression, LongSupplier getTime) { if (expression.isExclusion()) { // accepts date-math exclusions that are of the form "-<...{}>", i.e. the "-" is outside the "<>" date-math template - result = "-" + resolveExpression(expression.get(), getTime); + return "-" + resolveExpression(expression.get(), getTime); } else { - result = resolveExpression(expression.get(), getTime); + return resolveExpression(expression.get(), getTime); } - return new ResolvedExpression(result); } static String resolveExpression(String expression, LongSupplier getTime) { @@ -1708,26 +1687,25 @@ private ExplicitResourceNameFilter() { * Returns an expression list with "unavailable" (missing or not acceptable) resource names filtered out. * Only explicit resource names are considered for filtering. Wildcard and exclusion expressions are kept in. */ - public static List filterUnavailable(Context context, List expressions) { + public static List filterUnavailable(Context context, List expressions) { ensureRemoteIndicesRequireIgnoreUnavailable(context.getOptions(), expressions); - List result = new ArrayList<>(expressions.size()); + List result = new ArrayList<>(expressions.size()); for (ExpressionList.Expression expression : new ExpressionList(context, expressions)) { validateAliasOrIndex(expression); - if (expression.isWildcard() || expression.isExclusion() || ensureAliasOrIndexExists(context, expression)) { - result.add(expression.resolvedExpression()); + if (expression.isWildcard() || expression.isExclusion() || ensureAliasOrIndexExists(context, expression.get())) { + result.add(expression.expression()); } } return result; } /** - * This returns `true` if the given {@param resolvedExpression} is of a resource that exists. - * Otherwise, it returns `false` if the `ignore_unavailable` option is `true`, or, if `false`, it throws a "not found" type of + * This returns `true` if the given {@param name} is of a resource that exists. + * Otherwise, it returns `false` if the `ignore_unvailable` option is `true`, or, if `false`, it throws a "not found" type of * exception. */ @Nullable - private static boolean ensureAliasOrIndexExists(Context context, ExpressionList.Expression expression) { - String name = expression.get(); + private static boolean ensureAliasOrIndexExists(Context context, String name) { boolean ignoreUnavailable = context.getOptions().ignoreUnavailable(); IndexAbstraction indexAbstraction = context.getState().getMetadata().getIndicesLookup().get(name); if (indexAbstraction == null) { @@ -1759,37 +1737,32 @@ private static boolean ensureAliasOrIndexExists(Context context, ExpressionList. } private static void validateAliasOrIndex(ExpressionList.Expression expression) { - if (Strings.isEmpty(expression.resolvedExpression().resource())) { - throw notFoundException(expression.get()); + if (Strings.isEmpty(expression.expression())) { + throw notFoundException(expression.expression()); } // Expressions can not start with an underscore. This is reserved for APIs. If the check gets here, the API // does not exist and the path is interpreted as an expression. If the expression begins with an underscore, // throw a specific error that is different from the [[IndexNotFoundException]], which is typically thrown // if the expression can't be found. - if (expression.resolvedExpression().resource().charAt(0) == '_') { - throw new InvalidIndexNameException(expression.get(), "must not start with '_'."); + if (expression.expression().charAt(0) == '_') { + throw new InvalidIndexNameException(expression.expression(), "must not start with '_'."); } } - private static void ensureRemoteIndicesRequireIgnoreUnavailable( - IndicesOptions options, - List resolvedExpressions - ) { + private static void ensureRemoteIndicesRequireIgnoreUnavailable(IndicesOptions options, List indexExpressions) { if (options.ignoreUnavailable()) { return; } - for (ResolvedExpression resolvedExpression : resolvedExpressions) { - var index = resolvedExpression.resource(); + for (String index : indexExpressions) { if (RemoteClusterAware.isRemoteIndexName(index)) { - failOnRemoteIndicesNotIgnoringUnavailable(resolvedExpressions); + failOnRemoteIndicesNotIgnoringUnavailable(indexExpressions); } } } - private static void failOnRemoteIndicesNotIgnoringUnavailable(List resolvedExpressions) { + private static void failOnRemoteIndicesNotIgnoringUnavailable(List indexExpressions) { List crossClusterIndices = new ArrayList<>(); - for (ResolvedExpression resolvedExpression : resolvedExpressions) { - String index = resolvedExpression.resource(); + for (String index : indexExpressions) { if (RemoteClusterAware.isRemoteIndexName(index)) { crossClusterIndices.add(index); } @@ -1807,13 +1780,13 @@ public static final class ExpressionList implements Iterable expressionsList; private final boolean hasWildcard; - public record Expression(ResolvedExpression resolvedExpression, boolean isWildcard, boolean isExclusion) { + public record Expression(String expression, boolean isWildcard, boolean isExclusion) { public String get() { if (isExclusion()) { // drop the leading "-" if exclusion because it is easier for callers to handle it like this - return resolvedExpression().resource().substring(1); + return expression().substring(1); } else { - return resolvedExpression().resource(); + return expression(); } } } @@ -1822,17 +1795,16 @@ public String get() { * Creates the expression iterable that can be used to easily check which expression item is a wildcard or an exclusion (or both). * The {@param context} is used to check if wildcards ought to be considered or not. */ - public ExpressionList(Context context, List resolvedExpressions) { - List expressionsList = new ArrayList<>(resolvedExpressions.size()); + public ExpressionList(Context context, List expressionStrings) { + List expressionsList = new ArrayList<>(expressionStrings.size()); boolean wildcardSeen = false; - for (ResolvedExpression resolvedExpression : resolvedExpressions) { - var expressionString = resolvedExpression.resource(); + for (String expressionString : expressionStrings) { boolean isExclusion = expressionString.startsWith("-") && wildcardSeen; if (context.getOptions().expandWildcardExpressions() && isWildcard(expressionString)) { wildcardSeen = true; - expressionsList.add(new Expression(resolvedExpression, true, isExclusion)); + expressionsList.add(new Expression(expressionString, true, isExclusion)); } else { - expressionsList.add(new Expression(resolvedExpression, false, isExclusion)); + expressionsList.add(new Expression(expressionString, false, isExclusion)); } } this.expressionsList = expressionsList; diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesService.java b/server/src/main/java/org/elasticsearch/indices/IndicesService.java index 2dc5e7c28ad0b..706f788e8a310 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -38,7 +38,6 @@ import org.elasticsearch.cluster.metadata.IndexAbstraction; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.RecoverySource; @@ -1714,7 +1713,7 @@ interface IndexDeletionAllowedPredicate { IndexSettings indexSettings) -> canDeleteIndexContents(index); private final IndexDeletionAllowedPredicate ALWAYS_TRUE = (Index index, IndexSettings indexSettings) -> true; - public AliasFilter buildAliasFilter(ClusterState state, String index, Set resolvedExpressions) { + public AliasFilter buildAliasFilter(ClusterState state, String index, Set resolvedExpressions) { /* Being static, parseAliasFilter doesn't have access to whatever guts it needs to parse a query. Instead of passing in a bunch * of dependencies we pass in a function that can perform the parsing. */ CheckedFunction filterParser = bytes -> { diff --git a/server/src/main/java/org/elasticsearch/search/SearchService.java b/server/src/main/java/org/elasticsearch/search/SearchService.java index 3a900a8a9b8a6..be96b4e25d841 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchService.java +++ b/server/src/main/java/org/elasticsearch/search/SearchService.java @@ -26,7 +26,6 @@ import org.elasticsearch.action.search.SearchType; import org.elasticsearch.action.support.TransportActions; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.CheckedSupplier; @@ -1619,7 +1618,7 @@ public boolean isForceExecution() { } } - public AliasFilter buildAliasFilter(ClusterState state, String index, Set resolvedExpressions) { + public AliasFilter buildAliasFilter(ClusterState state, String index, Set resolvedExpressions) { return indicesService.buildAliasFilter(state, index, resolvedExpressions); } diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexTests.java index 1faeabb6acbf7..834bacd9e6a04 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexTests.java @@ -22,7 +22,6 @@ import org.elasticsearch.cluster.metadata.DataStreamTestHelper; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; @@ -230,19 +229,9 @@ public void testResolveHiddenProperlyWithDateMath() { .metadata(buildMetadata(new Object[][] {}, indices)) .build(); String[] requestedIndex = new String[] { "" }; - Set resolvedIndices = resolver.resolveExpressions( - clusterState, - IndicesOptions.LENIENT_EXPAND_OPEN, - true, - requestedIndex - ); + Set resolvedIndices = resolver.resolveExpressions(clusterState, IndicesOptions.LENIENT_EXPAND_OPEN, true, requestedIndex); assertThat(resolvedIndices.size(), is(1)); - assertThat( - resolvedIndices, - contains( - oneOf(new ResolvedExpression("logs-pgsql-prod-" + todaySuffix), new ResolvedExpression("logs-pgsql-prod-" + tomorrowSuffix)) - ) - ); + assertThat(resolvedIndices, contains(oneOf("logs-pgsql-prod-" + todaySuffix, "logs-pgsql-prod-" + tomorrowSuffix))); } public void testSystemIndexAccess() { diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java index fe0b7926229cb..6be5b48f9d723 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java @@ -15,7 +15,6 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.Context; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.DateMathExpressionResolver; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.indices.SystemIndices.SystemIndexAccessLevel; import org.elasticsearch.test.ESTestCase; import org.hamcrest.Matchers; @@ -27,6 +26,7 @@ import java.time.format.DateTimeFormatter; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.List; import java.util.Locale; @@ -52,11 +52,11 @@ private static String formatDate(String pattern, ZonedDateTime zonedDateTime) { public void testNormal() throws Exception { int numIndexExpressions = randomIntBetween(1, 9); - List indexExpressions = new ArrayList<>(numIndexExpressions); + List indexExpressions = new ArrayList<>(numIndexExpressions); for (int i = 0; i < numIndexExpressions; i++) { - indexExpressions.add(new ResolvedExpression(randomAlphaOfLength(10))); + indexExpressions.add(randomAlphaOfLength(10)); } - List result = DateMathExpressionResolver.resolve(context, indexExpressions); + List result = DateMathExpressionResolver.resolve(context, indexExpressions); assertThat(result.size(), equalTo(indexExpressions.size())); for (int i = 0; i < indexExpressions.size(); i++) { assertThat(result.get(i), equalTo(indexExpressions.get(i))); @@ -64,25 +64,25 @@ public void testNormal() throws Exception { } public void testExpression() throws Exception { - List indexExpressions = resolvedExpressions("<.marvel-{now}>", "<.watch_history-{now}>", ""); - List result = DateMathExpressionResolver.resolve(context, indexExpressions); + List indexExpressions = Arrays.asList("<.marvel-{now}>", "<.watch_history-{now}>", ""); + List result = DateMathExpressionResolver.resolve(context, indexExpressions); assertThat(result.size(), equalTo(3)); - assertThat(result.get(0).resource(), equalTo(".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); - assertThat(result.get(1).resource(), equalTo(".watch_history-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); - assertThat(result.get(2).resource(), equalTo("logstash-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); + assertThat(result.get(0), equalTo(".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); + assertThat(result.get(1), equalTo(".watch_history-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); + assertThat(result.get(2), equalTo("logstash-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); } public void testExpressionWithWildcardAndExclusions() { - List indexExpressions = resolvedExpressions( + List indexExpressions = Arrays.asList( "<-before-inner-{now}>", "-", "", "<-after-inner-{now}>", "-" ); - List result = DateMathExpressionResolver.resolve(context, indexExpressions); + List result = DateMathExpressionResolver.resolve(context, indexExpressions); assertThat( - result.stream().map(ResolvedExpression::resource).toList(), + result, Matchers.contains( equalTo("-before-inner-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime()))), equalTo("-"), // doesn't evaluate because it doesn't start with "<" and it is not an exclusion @@ -98,7 +98,7 @@ public void testExpressionWithWildcardAndExclusions() { ); result = DateMathExpressionResolver.resolve(noWildcardExpandContext, indexExpressions); assertThat( - result.stream().map(ResolvedExpression::resource).toList(), + result, Matchers.contains( equalTo("-before-inner-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime()))), // doesn't evaluate because it doesn't start with "<" and there can't be exclusions without wildcard expansion @@ -112,24 +112,21 @@ public void testExpressionWithWildcardAndExclusions() { } public void testEmpty() throws Exception { - List result = DateMathExpressionResolver.resolve(context, List.of()); + List result = DateMathExpressionResolver.resolve(context, Collections.emptyList()); assertThat(result.size(), equalTo(0)); } public void testExpression_Static() throws Exception { - List result = DateMathExpressionResolver.resolve(context, resolvedExpressions("<.marvel-test>")); + List result = DateMathExpressionResolver.resolve(context, Arrays.asList("<.marvel-test>")); assertThat(result.size(), equalTo(1)); - assertThat(result.get(0).resource(), equalTo(".marvel-test")); + assertThat(result.get(0), equalTo(".marvel-test")); } public void testExpression_MultiParts() throws Exception { - List result = DateMathExpressionResolver.resolve( - context, - resolvedExpressions("<.text1-{now/d}-text2-{now/M}>") - ); + List result = DateMathExpressionResolver.resolve(context, Arrays.asList("<.text1-{now/d}-text2-{now/M}>")); assertThat(result.size(), equalTo(1)); assertThat( - result.get(0).resource(), + result.get(0), equalTo( ".text1-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())) @@ -140,42 +137,33 @@ public void testExpression_MultiParts() throws Exception { } public void testExpression_CustomFormat() throws Exception { - List results = DateMathExpressionResolver.resolve( - context, - resolvedExpressions("<.marvel-{now/d{yyyy.MM.dd}}>") - ); + List results = DateMathExpressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{yyyy.MM.dd}}>")); assertThat(results.size(), equalTo(1)); - assertThat(results.get(0).resource(), equalTo(".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); + assertThat(results.get(0), equalTo(".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); } public void testExpression_EscapeStatic() throws Exception { - List result = DateMathExpressionResolver.resolve(context, resolvedExpressions("<.mar\\{v\\}el-{now/d}>")); + List result = DateMathExpressionResolver.resolve(context, Arrays.asList("<.mar\\{v\\}el-{now/d}>")); assertThat(result.size(), equalTo(1)); - assertThat(result.get(0).resource(), equalTo(".mar{v}el-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); + assertThat(result.get(0), equalTo(".mar{v}el-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); } public void testExpression_EscapeDateFormat() throws Exception { - List result = DateMathExpressionResolver.resolve( - context, - resolvedExpressions("<.marvel-{now/d{'\\{year\\}'yyyy}}>") - ); + List result = DateMathExpressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{'\\{year\\}'yyyy}}>")); assertThat(result.size(), equalTo(1)); - assertThat(result.get(0).resource(), equalTo(".marvel-" + formatDate("'{year}'yyyy", dateFromMillis(context.getStartTime())))); + assertThat(result.get(0), equalTo(".marvel-" + formatDate("'{year}'yyyy", dateFromMillis(context.getStartTime())))); } public void testExpression_MixedArray() throws Exception { - List result = DateMathExpressionResolver.resolve( + List result = DateMathExpressionResolver.resolve( context, - resolvedExpressions("name1", "<.marvel-{now/d}>", "name2", "<.logstash-{now/M{uuuu.MM}}>") + Arrays.asList("name1", "<.marvel-{now/d}>", "name2", "<.logstash-{now/M{uuuu.MM}}>") ); assertThat(result.size(), equalTo(4)); - assertThat(result.get(0).resource(), equalTo("name1")); - assertThat(result.get(1).resource(), equalTo(".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); - assertThat(result.get(2).resource(), equalTo("name2")); - assertThat( - result.get(3).resource(), - equalTo(".logstash-" + formatDate("uuuu.MM", dateFromMillis(context.getStartTime()).withDayOfMonth(1))) - ); + assertThat(result.get(0), equalTo("name1")); + assertThat(result.get(1), equalTo(".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); + assertThat(result.get(2), equalTo("name2")); + assertThat(result.get(3), equalTo(".logstash-" + formatDate("uuuu.MM", dateFromMillis(context.getStartTime()).withDayOfMonth(1)))); } public void testExpression_CustomTimeZoneInIndexName() throws Exception { @@ -214,19 +202,19 @@ public void testExpression_CustomTimeZoneInIndexName() throws Exception { name -> false, name -> false ); - List results = DateMathExpressionResolver.resolve( + List results = DateMathExpressionResolver.resolve( context, - resolvedExpressions("<.marvel-{now/d{yyyy.MM.dd|" + timeZone.getId() + "}}>") + Arrays.asList("<.marvel-{now/d{yyyy.MM.dd|" + timeZone.getId() + "}}>") ); assertThat(results.size(), equalTo(1)); logger.info("timezone: [{}], now [{}], name: [{}]", timeZone, now, results.get(0)); - assertThat(results.get(0).resource(), equalTo(".marvel-" + formatDate("uuuu.MM.dd", now.withZoneSameInstant(timeZone)))); + assertThat(results.get(0), equalTo(".marvel-" + formatDate("uuuu.MM.dd", now.withZoneSameInstant(timeZone)))); } public void testExpressionInvalidUnescaped() throws Exception { Exception e = expectThrows( ElasticsearchParseException.class, - () -> DateMathExpressionResolver.resolve(context, resolvedExpressions("<.mar}vel-{now/d}>")) + () -> DateMathExpressionResolver.resolve(context, Arrays.asList("<.mar}vel-{now/d}>")) ); assertThat(e.getMessage(), containsString("invalid dynamic name expression")); assertThat(e.getMessage(), containsString("invalid character at position [")); @@ -235,7 +223,7 @@ public void testExpressionInvalidUnescaped() throws Exception { public void testExpressionInvalidDateMathFormat() throws Exception { Exception e = expectThrows( ElasticsearchParseException.class, - () -> DateMathExpressionResolver.resolve(context, resolvedExpressions("<.marvel-{now/d{}>")) + () -> DateMathExpressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{}>")) ); assertThat(e.getMessage(), containsString("invalid dynamic name expression")); assertThat(e.getMessage(), containsString("date math placeholder is open ended")); @@ -244,7 +232,7 @@ public void testExpressionInvalidDateMathFormat() throws Exception { public void testExpressionInvalidEmptyDateMathFormat() throws Exception { Exception e = expectThrows( ElasticsearchParseException.class, - () -> DateMathExpressionResolver.resolve(context, resolvedExpressions("<.marvel-{now/d{}}>")) + () -> DateMathExpressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{}}>")) ); assertThat(e.getMessage(), containsString("invalid dynamic name expression")); assertThat(e.getMessage(), containsString("missing date format")); @@ -253,13 +241,10 @@ public void testExpressionInvalidEmptyDateMathFormat() throws Exception { public void testExpressionInvalidOpenEnded() throws Exception { Exception e = expectThrows( ElasticsearchParseException.class, - () -> DateMathExpressionResolver.resolve(context, resolvedExpressions("<.marvel-{now/d>")) + () -> DateMathExpressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d>")) ); assertThat(e.getMessage(), containsString("invalid dynamic name expression")); assertThat(e.getMessage(), containsString("date math placeholder is open ended")); } - private List resolvedExpressions(String... expressions) { - return Arrays.stream(expressions).map(ResolvedExpression::new).toList(); - } } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/ExpressionListTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/ExpressionListTests.java index 1df3bf4132b60..1ca59ff402bd8 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/ExpressionListTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/ExpressionListTests.java @@ -13,12 +13,10 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.Context; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ExpressionList; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ExpressionList.Expression; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.core.Tuple; import org.elasticsearch.test.ESTestCase; import java.util.ArrayList; -import java.util.Arrays; import java.util.Iterator; import java.util.List; import java.util.function.Supplier; @@ -41,13 +39,10 @@ public void testEmpty() { public void testExplicitSingleNameExpression() { for (IndicesOptions indicesOptions : List.of(getExpandWildcardsIndicesOptions(), getNoExpandWildcardsIndicesOptions())) { for (String expressionString : List.of("non_wildcard", "-non_exclusion")) { - ExpressionList expressionList = new ExpressionList( - getContextWithOptions(indicesOptions), - resolvedExpressions(expressionString) - ); + ExpressionList expressionList = new ExpressionList(getContextWithOptions(indicesOptions), List.of(expressionString)); assertThat(expressionList.hasWildcard(), is(false)); if (randomBoolean()) { - expressionList = new ExpressionList(getContextWithOptions(indicesOptions), resolvedExpressions((expressionString))); + expressionList = new ExpressionList(getContextWithOptions(indicesOptions), List.of(expressionString)); } Iterator expressionIterator = expressionList.iterator(); assertThat(expressionIterator.hasNext(), is(true)); @@ -67,14 +62,11 @@ public void testWildcardSingleExpression() { for (String wildcardTest : List.of("*", "a*", "*b", "a*b", "a-*b", "a*-b", "-*", "-a*", "-*b", "**", "*-*")) { ExpressionList expressionList = new ExpressionList( getContextWithOptions(getExpandWildcardsIndicesOptions()), - resolvedExpressions(wildcardTest) + List.of(wildcardTest) ); assertThat(expressionList.hasWildcard(), is(true)); if (randomBoolean()) { - expressionList = new ExpressionList( - getContextWithOptions(getExpandWildcardsIndicesOptions()), - resolvedExpressions(wildcardTest) - ); + expressionList = new ExpressionList(getContextWithOptions(getExpandWildcardsIndicesOptions()), List.of(wildcardTest)); } Iterator expressionIterator = expressionList.iterator(); assertThat(expressionIterator.hasNext(), is(true)); @@ -90,13 +82,13 @@ public void testWildcardSingleExpression() { } public void testWildcardLongerExpression() { - List onlyExplicits = randomList(7, () -> new ResolvedExpression(randomAlphaOfLengthBetween(0, 5))); - ResolvedExpression wildcard = new ResolvedExpression(randomFrom("*", "*b", "-*", "*-", "c*", "a*b", "**")); - List expressionList = new ArrayList<>(onlyExplicits.size() + 1); + List onlyExplicits = randomList(7, () -> randomAlphaOfLengthBetween(0, 5)); + String wildcard = randomFrom("*", "*b", "-*", "*-", "c*", "a*b", "**"); + List expressionList = new ArrayList<>(onlyExplicits.size() + 1); expressionList.addAll(randomSubsetOf(onlyExplicits)); int wildcardPos = expressionList.size(); expressionList.add(wildcard); - for (ResolvedExpression item : onlyExplicits) { + for (String item : onlyExplicits) { if (expressionList.contains(item) == false) { expressionList.add(item); } @@ -114,18 +106,18 @@ public void testWildcardLongerExpression() { } else { assertThat(expression.isWildcard(), is(true)); } - assertThat(expression.get(), is(expressionList.get(i++).resource())); + assertThat(expression.get(), is(expressionList.get(i++))); } } public void testWildcardsNoExclusionExpressions() { - for (List wildcardExpression : List.of( - resolvedExpressions("*"), - resolvedExpressions("a", "*"), - resolvedExpressions("-b", "*c"), - resolvedExpressions("-", "a", "c*"), - resolvedExpressions("*", "a*", "*b"), - resolvedExpressions("-*", "a", "b*") + for (List wildcardExpression : List.of( + List.of("*"), + List.of("a", "*"), + List.of("-b", "*c"), + List.of("-", "a", "c*"), + List.of("*", "a*", "*b"), + List.of("-*", "a", "b*") )) { ExpressionList expressionList = new ExpressionList( getContextWithOptions(getExpandWildcardsIndicesOptions()), @@ -138,25 +130,25 @@ public void testWildcardsNoExclusionExpressions() { int i = 0; for (Expression expression : expressionList) { assertThat(expression.isExclusion(), is(false)); - if (wildcardExpression.get(i).resource().contains("*")) { + if (wildcardExpression.get(i).contains("*")) { assertThat(expression.isWildcard(), is(true)); } else { assertThat(expression.isWildcard(), is(false)); } - assertThat(expression.get(), is(wildcardExpression.get(i++).resource())); + assertThat(expression.get(), is(wildcardExpression.get(i++))); } } } public void testWildcardExpressionNoExpandOptions() { - for (List wildcardExpression : List.of( - resolvedExpressions("*"), - resolvedExpressions("a", "*"), - resolvedExpressions("-b", "*c"), - resolvedExpressions("*d", "-"), - resolvedExpressions("*", "-*"), - resolvedExpressions("-", "a", "c*"), - resolvedExpressions("*", "a*", "*b") + for (List wildcardExpression : List.of( + List.of("*"), + List.of("a", "*"), + List.of("-b", "*c"), + List.of("*d", "-"), + List.of("*", "-*"), + List.of("-", "a", "c*"), + List.of("*", "a*", "*b") )) { ExpressionList expressionList = new ExpressionList( getContextWithOptions(getNoExpandWildcardsIndicesOptions()), @@ -170,7 +162,7 @@ public void testWildcardExpressionNoExpandOptions() { for (Expression expression : expressionList) { assertThat(expression.isWildcard(), is(false)); assertThat(expression.isExclusion(), is(false)); - assertThat(expression.get(), is(wildcardExpression.get(i++).resource())); + assertThat(expression.get(), is(wildcardExpression.get(i++))); } } } @@ -180,17 +172,17 @@ public void testSingleExclusionExpression() { int wildcardPos = randomIntBetween(0, 3); String exclusion = randomFrom("-*", "-", "-c*", "-ab", "--"); int exclusionPos = randomIntBetween(wildcardPos + 1, 7); - List exclusionExpression = new ArrayList<>(); + List exclusionExpression = new ArrayList<>(); for (int i = 0; i < wildcardPos; i++) { - exclusionExpression.add(new ResolvedExpression(randomAlphaOfLengthBetween(0, 5))); + exclusionExpression.add(randomAlphaOfLengthBetween(0, 5)); } - exclusionExpression.add(new ResolvedExpression(wildcard)); + exclusionExpression.add(wildcard); for (int i = wildcardPos + 1; i < exclusionPos; i++) { - exclusionExpression.add(new ResolvedExpression(randomAlphaOfLengthBetween(0, 5))); + exclusionExpression.add(randomAlphaOfLengthBetween(0, 5)); } - exclusionExpression.add(new ResolvedExpression(exclusion)); + exclusionExpression.add(exclusion); for (int i = 0; i < randomIntBetween(0, 3); i++) { - exclusionExpression.add(new ResolvedExpression(randomAlphaOfLengthBetween(0, 5))); + exclusionExpression.add(randomAlphaOfLengthBetween(0, 5)); } ExpressionList expressionList = new ExpressionList(getContextWithOptions(getExpandWildcardsIndicesOptions()), exclusionExpression); if (randomBoolean()) { @@ -201,28 +193,28 @@ public void testSingleExclusionExpression() { if (i == wildcardPos) { assertThat(expression.isWildcard(), is(true)); assertThat(expression.isExclusion(), is(false)); - assertThat(expression.get(), is(exclusionExpression.get(i++).resource())); + assertThat(expression.get(), is(exclusionExpression.get(i++))); } else if (i == exclusionPos) { assertThat(expression.isExclusion(), is(true)); - assertThat(expression.isWildcard(), is(exclusionExpression.get(i).resource().contains("*"))); - assertThat(expression.get(), is(exclusionExpression.get(i++).resource().substring(1))); + assertThat(expression.isWildcard(), is(exclusionExpression.get(i).contains("*"))); + assertThat(expression.get(), is(exclusionExpression.get(i++).substring(1))); } else { assertThat(expression.isWildcard(), is(false)); assertThat(expression.isExclusion(), is(false)); - assertThat(expression.get(), is(exclusionExpression.get(i++).resource())); + assertThat(expression.get(), is(exclusionExpression.get(i++))); } } } public void testExclusionsExpression() { - for (Tuple, List> exclusionExpression : List.of( - new Tuple<>(resolvedExpressions("-a", "*", "-a"), List.of(false, false, true)), - new Tuple<>(resolvedExpressions("-b*", "c", "-a"), List.of(false, false, true)), - new Tuple<>(resolvedExpressions("*d", "-", "*b"), List.of(false, true, false)), - new Tuple<>(resolvedExpressions("-", "--", "-*", "", "-*"), List.of(false, false, false, false, true)), - new Tuple<>(resolvedExpressions("*-", "-*", "a", "-b"), List.of(false, true, false, true)), - new Tuple<>(resolvedExpressions("a", "-b", "-*", "-b", "*", "-b"), List.of(false, false, false, true, false, true)), - new Tuple<>(resolvedExpressions("-a", "*d", "-a", "-*b", "-b", "--"), List.of(false, false, true, true, true, true)) + for (Tuple, List> exclusionExpression : List.of( + new Tuple<>(List.of("-a", "*", "-a"), List.of(false, false, true)), + new Tuple<>(List.of("-b*", "c", "-a"), List.of(false, false, true)), + new Tuple<>(List.of("*d", "-", "*b"), List.of(false, true, false)), + new Tuple<>(List.of("-", "--", "-*", "", "-*"), List.of(false, false, false, false, true)), + new Tuple<>(List.of("*-", "-*", "a", "-b"), List.of(false, true, false, true)), + new Tuple<>(List.of("a", "-b", "-*", "-b", "*", "-b"), List.of(false, false, false, true, false, true)), + new Tuple<>(List.of("-a", "*d", "-a", "-*b", "-b", "--"), List.of(false, false, true, true, true, true)) )) { ExpressionList expressionList = new ExpressionList( getContextWithOptions(getExpandWildcardsIndicesOptions()), @@ -235,11 +227,11 @@ public void testExclusionsExpression() { for (Expression expression : expressionList) { boolean isExclusion = exclusionExpression.v2().get(i); assertThat(expression.isExclusion(), is(isExclusion)); - assertThat(expression.isWildcard(), is(exclusionExpression.v1().get(i).resource().contains("*"))); + assertThat(expression.isWildcard(), is(exclusionExpression.v1().get(i).contains("*"))); if (isExclusion) { - assertThat(expression.get(), is(exclusionExpression.v1().get(i++).resource().substring(1))); + assertThat(expression.get(), is(exclusionExpression.v1().get(i++).substring(1))); } else { - assertThat(expression.get(), is(exclusionExpression.v1().get(i++).resource())); + assertThat(expression.get(), is(exclusionExpression.v1().get(i++))); } } } @@ -314,8 +306,4 @@ private Context getContextWithOptions(IndicesOptions indicesOptions) { when(context.getOptions()).thenReturn(indicesOptions); return context; } - - private List resolvedExpressions(String... expressions) { - return Arrays.stream(expressions).map(ResolvedExpression::new).toList(); - } } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java index da19bd68e288a..d58de5ca65ea0 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java @@ -22,7 +22,6 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetadata.State; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; @@ -1581,27 +1580,16 @@ public void testResolveExpressions() { .put(indexBuilder("test-1").state(State.OPEN).putAlias(AliasMetadata.builder("alias-1"))); ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); + assertEquals(new HashSet<>(Arrays.asList("alias-0", "alias-1")), indexNameExpressionResolver.resolveExpressions(state, "alias-*")); assertEquals( - Set.of(new ResolvedExpression("alias-0"), new ResolvedExpression("alias-1")), - indexNameExpressionResolver.resolveExpressions(state, "alias-*") - ); - assertEquals( - Set.of(new ResolvedExpression("test-0"), new ResolvedExpression("alias-0"), new ResolvedExpression("alias-1")), + new HashSet<>(Arrays.asList("test-0", "alias-0", "alias-1")), indexNameExpressionResolver.resolveExpressions(state, "test-0", "alias-*") ); assertEquals( - Set.of( - new ResolvedExpression("test-0"), - new ResolvedExpression("test-1"), - new ResolvedExpression("alias-0"), - new ResolvedExpression("alias-1") - ), + new HashSet<>(Arrays.asList("test-0", "test-1", "alias-0", "alias-1")), indexNameExpressionResolver.resolveExpressions(state, "test-*", "alias-*") ); - assertEquals( - Set.of(new ResolvedExpression("test-1"), new ResolvedExpression("alias-1")), - indexNameExpressionResolver.resolveExpressions(state, "*-1") - ); + assertEquals(new HashSet<>(Arrays.asList("test-1", "alias-1")), indexNameExpressionResolver.resolveExpressions(state, "*-1")); } public void testFilteringAliases() { @@ -1610,25 +1598,16 @@ public void testFilteringAliases() { .put(indexBuilder("test-1").state(State.OPEN).putAlias(AliasMetadata.builder("alias-1"))); ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); - Set resolvedExpressions = Set.of(new ResolvedExpression("alias-0"), new ResolvedExpression("alias-1")); + Set resolvedExpressions = new HashSet<>(Arrays.asList("alias-0", "alias-1")); String[] strings = indexNameExpressionResolver.filteringAliases(state, "test-0", resolvedExpressions); assertArrayEquals(new String[] { "alias-0" }, strings); // concrete index supersedes filtering alias - resolvedExpressions = Set.of( - new ResolvedExpression("test-0"), - new ResolvedExpression("alias-0"), - new ResolvedExpression("alias-1") - ); + resolvedExpressions = new HashSet<>(Arrays.asList("test-0", "alias-0", "alias-1")); strings = indexNameExpressionResolver.filteringAliases(state, "test-0", resolvedExpressions); assertNull(strings); - resolvedExpressions = Set.of( - new ResolvedExpression("test-0"), - new ResolvedExpression("test-1"), - new ResolvedExpression("alias-0"), - new ResolvedExpression("alias-1") - ); + resolvedExpressions = new HashSet<>(Arrays.asList("test-0", "test-1", "alias-0", "alias-1")); strings = indexNameExpressionResolver.filteringAliases(state, "test-0", resolvedExpressions); assertNull(strings); } @@ -1642,7 +1621,7 @@ public void testIndexAliases() { .putAlias(AliasMetadata.builder("test-alias-non-filtering")) ); ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); - Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, "test-*"); + Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, "test-*"); String[] strings = indexNameExpressionResolver.indexAliases(state, "test-0", x -> true, x -> true, true, resolvedExpressions); Arrays.sort(strings); @@ -1677,28 +1656,28 @@ public void testIndexAliasesDataStreamAliases() { ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); { // Only resolve aliases with with that refer to dataStreamName1 - Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, "l*"); + Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, "l*"); String index = backingIndex1.getIndex().getName(); String[] result = indexNameExpressionResolver.indexAliases(state, index, x -> true, x -> true, true, resolvedExpressions); assertThat(result, arrayContainingInAnyOrder("logs_foo", "logs", "logs_bar")); } { // Only resolve aliases with with that refer to dataStreamName2 - Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, "l*"); + Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, "l*"); String index = backingIndex2.getIndex().getName(); String[] result = indexNameExpressionResolver.indexAliases(state, index, x -> true, x -> true, true, resolvedExpressions); assertThat(result, arrayContainingInAnyOrder("logs_baz", "logs_baz2")); } { // Null is returned, because skipping identity check and resolvedExpressions contains the backing index name - Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, "l*"); + Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, "l*"); String index = backingIndex2.getIndex().getName(); String[] result = indexNameExpressionResolver.indexAliases(state, index, x -> true, x -> true, false, resolvedExpressions); assertThat(result, nullValue()); } { // Null is returned, because the wildcard expands to a list of aliases containing an unfiltered alias for dataStreamName1 - Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, "l*"); + Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, "l*"); String index = backingIndex1.getIndex().getName(); String[] result = indexNameExpressionResolver.indexAliases( state, @@ -1712,7 +1691,7 @@ public void testIndexAliasesDataStreamAliases() { } { // Null is returned, because an unfiltered alias is targeting the same data stream - Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, "logs_bar", "logs"); + Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, "logs_bar", "logs"); String index = backingIndex1.getIndex().getName(); String[] result = indexNameExpressionResolver.indexAliases( state, @@ -1726,7 +1705,7 @@ public void testIndexAliasesDataStreamAliases() { } { // The filtered alias is returned because although we target the data stream name, skipIdentity is true - Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, dataStreamName1, "logs"); + Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, dataStreamName1, "logs"); String index = backingIndex1.getIndex().getName(); String[] result = indexNameExpressionResolver.indexAliases( state, @@ -1740,7 +1719,7 @@ public void testIndexAliasesDataStreamAliases() { } { // Null is returned because we target the data stream name and skipIdentity is false - Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, dataStreamName1, "logs"); + Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, dataStreamName1, "logs"); String index = backingIndex1.getIndex().getName(); String[] result = indexNameExpressionResolver.indexAliases( state, @@ -1763,13 +1742,13 @@ public void testIndexAliasesSkipIdentity() { ); ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); - Set resolvedExpressions = Set.of(new ResolvedExpression("test-0"), new ResolvedExpression("test-alias")); + Set resolvedExpressions = new HashSet<>(Arrays.asList("test-0", "test-alias")); String[] aliases = indexNameExpressionResolver.indexAliases(state, "test-0", x -> true, x -> true, false, resolvedExpressions); assertNull(aliases); aliases = indexNameExpressionResolver.indexAliases(state, "test-0", x -> true, x -> true, true, resolvedExpressions); assertArrayEquals(new String[] { "test-alias" }, aliases); - resolvedExpressions = Collections.singleton(new ResolvedExpression("other-alias")); + resolvedExpressions = Collections.singleton("other-alias"); aliases = indexNameExpressionResolver.indexAliases(state, "test-0", x -> true, x -> true, false, resolvedExpressions); assertArrayEquals(new String[] { "other-alias" }, aliases); aliases = indexNameExpressionResolver.indexAliases(state, "test-0", x -> true, x -> true, true, resolvedExpressions); @@ -1790,7 +1769,7 @@ public void testConcreteWriteIndexSuccessful() { x -> true, x -> true, true, - Set.of(new ResolvedExpression("test-0"), new ResolvedExpression("test-alias")) + new HashSet<>(Arrays.asList("test-0", "test-alias")) ); Arrays.sort(strings); assertArrayEquals(new String[] { "test-alias" }, strings); @@ -1872,7 +1851,7 @@ public void testConcreteWriteIndexWithWildcardExpansion() { x -> true, x -> true, true, - Set.of(new ResolvedExpression("test-0"), new ResolvedExpression("test-1"), new ResolvedExpression("test-alias")) + new HashSet<>(Arrays.asList("test-0", "test-1", "test-alias")) ); Arrays.sort(strings); assertArrayEquals(new String[] { "test-alias" }, strings); @@ -1910,7 +1889,7 @@ public void testConcreteWriteIndexWithNoWriteIndexWithSingleIndex() { x -> true, x -> true, true, - Set.of(new ResolvedExpression("test-0"), new ResolvedExpression("test-alias")) + new HashSet<>(Arrays.asList("test-0", "test-alias")) ); Arrays.sort(strings); assertArrayEquals(new String[] { "test-alias" }, strings); @@ -1946,7 +1925,7 @@ public void testConcreteWriteIndexWithNoWriteIndexWithMultipleIndices() { x -> true, x -> true, true, - Set.of(new ResolvedExpression("test-0"), new ResolvedExpression("test-1"), new ResolvedExpression("test-alias")) + new HashSet<>(Arrays.asList("test-0", "test-1", "test-alias")) ); Arrays.sort(strings); assertArrayEquals(new String[] { "test-alias" }, strings); @@ -1987,7 +1966,7 @@ public void testAliasResolutionNotAllowingMultipleIndices() { x -> true, x -> true, true, - Set.of(new ResolvedExpression("test-0"), new ResolvedExpression("test-1"), new ResolvedExpression("test-alias")) + new HashSet<>(Arrays.asList("test-0", "test-1", "test-alias")) ); Arrays.sort(strings); assertArrayEquals(new String[] { "test-alias" }, strings); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java index 25ed5fb2bdab2..982394ca31b1c 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java @@ -13,7 +13,6 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetadata.State; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.indices.SystemIndices.SystemIndexAccessLevel; @@ -21,13 +20,13 @@ import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import java.util.List; -import java.util.Set; import java.util.function.Predicate; -import java.util.stream.Collectors; import static org.elasticsearch.cluster.metadata.DataStreamTestHelper.createBackingIndex; import static org.elasticsearch.common.util.set.Sets.newHashSet; +import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -51,52 +50,50 @@ public void testConvertWildcardsJustIndicesTests() { SystemIndexAccessLevel.NONE ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("testXXX"))), - equalTo(resolvedExpressionsSet("testXXX")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("testXXX"))), + equalTo(newHashSet("testXXX")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("testXXX", "testYYY"))), - equalTo(resolvedExpressionsSet("testXXX", "testYYY")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testXXX", "testYYY"))), + equalTo(newHashSet("testXXX", "testYYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("testXXX", "ku*"))), - equalTo(resolvedExpressionsSet("testXXX", "kuku")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testXXX", "ku*"))), + equalTo(newHashSet("testXXX", "kuku")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("test*"))), - equalTo(resolvedExpressionsSet("testXXX", "testXYY", "testYYY")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("test*"))), + equalTo(newHashSet("testXXX", "testXYY", "testYYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("testX*"))), - equalTo(resolvedExpressionsSet("testXXX", "testXYY")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("testX*"))), + equalTo(newHashSet("testXXX", "testXYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("testX*", "kuku"))), - equalTo(resolvedExpressionsSet("testXXX", "testXYY", "kuku")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testX*", "kuku"))), + equalTo(newHashSet("testXXX", "testXYY", "kuku")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("*"))), - equalTo(resolvedExpressionsSet("testXXX", "testXYY", "testYYY", "kuku")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("*"))), + equalTo(newHashSet("testXXX", "testXYY", "testYYY", "kuku")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("*", "-kuku"))), - equalTo(resolvedExpressionsSet("testXXX", "testXYY", "testYYY")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("*", "-kuku"))), + equalTo(newHashSet("testXXX", "testXYY", "testYYY")) ); assertThat( newHashSet( IndexNameExpressionResolver.WildcardExpressionResolver.resolve( context, - resolvedExpressions("testX*", "-doe", "-testXXX", "-testYYY") + Arrays.asList("testX*", "-doe", "-testXXX", "-testYYY") ) ), - equalTo(resolvedExpressionsSet("testXYY")) + equalTo(newHashSet("testXYY")) ); if (indicesOptions == IndicesOptions.lenientExpandOpen()) { assertThat( - newHashSet( - IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("testXXX", "-testXXX")) - ), - equalTo(resolvedExpressionsSet("testXXX", "-testXXX")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testXXX", "-testXXX"))), + equalTo(newHashSet("testXXX", "-testXXX")) ); } else if (indicesOptions == IndicesOptions.strictExpandOpen()) { IndexNotFoundException infe = expectThrows( @@ -106,8 +103,8 @@ public void testConvertWildcardsJustIndicesTests() { assertEquals("-testXXX", infe.getIndex().getName()); } assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("testXXX", "-testX*"))), - equalTo(resolvedExpressionsSet("testXXX")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testXXX", "-testX*"))), + equalTo(newHashSet("testXXX")) ); } @@ -125,24 +122,24 @@ public void testConvertWildcardsTests() { SystemIndexAccessLevel.NONE ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("testYY*", "alias*"))), - equalTo(resolvedExpressionsSet("testXXX", "testXYY", "testYYY")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testYY*", "alias*"))), + equalTo(newHashSet("testXXX", "testXYY", "testYYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("-kuku"))), - equalTo(resolvedExpressionsSet("-kuku")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("-kuku"))), + equalTo(newHashSet("-kuku")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("test*", "-testYYY"))), - equalTo(resolvedExpressionsSet("testXXX", "testXYY")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("test*", "-testYYY"))), + equalTo(newHashSet("testXXX", "testXYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("testX*", "testYYY"))), - equalTo(resolvedExpressionsSet("testXXX", "testXYY", "testYYY")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testX*", "testYYY"))), + equalTo(newHashSet("testXXX", "testXYY", "testYYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("testYYY", "testX*"))), - equalTo(resolvedExpressionsSet("testXXX", "testXYY", "testYYY")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testYYY", "testX*"))), + equalTo(newHashSet("testXXX", "testXYY", "testYYY")) ); } @@ -162,8 +159,8 @@ public void testConvertWildcardsOpenClosedIndicesTests() { SystemIndexAccessLevel.NONE ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("testX*"))), - equalTo(resolvedExpressionsSet("testXXX", "testXXY", "testXYY")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("testX*"))), + equalTo(newHashSet("testXXX", "testXXY", "testXYY")) ); context = new IndexNameExpressionResolver.Context( state, @@ -171,8 +168,8 @@ public void testConvertWildcardsOpenClosedIndicesTests() { SystemIndexAccessLevel.NONE ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("testX*"))), - equalTo(resolvedExpressionsSet("testXYY")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("testX*"))), + equalTo(newHashSet("testXYY")) ); context = new IndexNameExpressionResolver.Context( state, @@ -180,8 +177,8 @@ public void testConvertWildcardsOpenClosedIndicesTests() { SystemIndexAccessLevel.NONE ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("testX*"))), - equalTo(resolvedExpressionsSet("testXXX", "testXXY")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("testX*"))), + equalTo(newHashSet("testXXX", "testXXY")) ); context = new IndexNameExpressionResolver.Context( state, @@ -220,27 +217,28 @@ public void testMultipleWildcards() { SystemIndexAccessLevel.NONE ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("test*X*"))), - equalTo(resolvedExpressionsSet("testXXX", "testXXY", "testXYY")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("test*X*"))), + equalTo(newHashSet("testXXX", "testXXY", "testXYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("test*X*Y"))), - equalTo(resolvedExpressionsSet("testXXY", "testXYY")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("test*X*Y"))), + equalTo(newHashSet("testXXY", "testXYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("kuku*Y*"))), - equalTo(resolvedExpressionsSet("kukuYYY")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("kuku*Y*"))), + equalTo(newHashSet("kukuYYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("*Y*"))), - equalTo(resolvedExpressionsSet("testXXY", "testXYY", "testYYY", "kukuYYY")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("*Y*"))), + equalTo(newHashSet("testXXY", "testXYY", "testYYY", "kukuYYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("test*Y*X"))).size(), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("test*Y*X"))) + .size(), equalTo(0) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("*Y*X"))).size(), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("*Y*X"))).size(), equalTo(0) ); } @@ -259,11 +257,11 @@ public void testAll() { ); assertThat( newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolveAll(context)), - equalTo(resolvedExpressionsSet("testXXX", "testXYY", "testYYY")) + equalTo(newHashSet("testXXX", "testXYY", "testYYY")) ); assertThat( newHashSet(IndexNameExpressionResolver.resolveExpressions(context, "_all")), - equalTo(resolvedExpressionsSet("testXXX", "testXYY", "testYYY")) + equalTo(newHashSet("testXXX", "testXYY", "testYYY")) ); IndicesOptions noExpandOptions = IndicesOptions.fromOptions( randomBoolean(), @@ -300,7 +298,7 @@ public void testAllAliases() { IndicesOptions.lenientExpandOpen(), // don't include hidden SystemIndexAccessLevel.NONE ); - assertThat(newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolveAll(context)), equalTo(Set.of())); + assertThat(newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolveAll(context)), equalTo(newHashSet())); } { @@ -321,7 +319,7 @@ public void testAllAliases() { ); assertThat( newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolveAll(context)), - equalTo(resolvedExpressionsSet("index-visible-alias")) + equalTo(newHashSet("index-visible-alias")) ); } } @@ -364,7 +362,7 @@ public void testAllDataStreams() { assertThat( newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolveAll(context)), - equalTo(resolvedExpressionsSet(DataStream.getDefaultBackingIndexName("foo_logs", 1, epochMillis))) + equalTo(newHashSet(DataStream.getDefaultBackingIndexName("foo_logs", 1, epochMillis))) ); } @@ -387,7 +385,7 @@ public void testAllDataStreams() { NONE ); - assertThat(newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolveAll(context)), equalTo(Set.of())); + assertThat(newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolveAll(context)), equalTo(newHashSet())); } } @@ -508,16 +506,16 @@ public void testResolveAliases() { ); { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( indicesAndAliasesContext, - resolvedExpressions("foo_a*") + Collections.singletonList("foo_a*") ); - assertThat(newHashSet(indices), equalTo(resolvedExpressionsSet("foo_index", "bar_index"))); + assertThat(indices, containsInAnyOrder("foo_index", "bar_index")); } { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( skipAliasesLenientContext, - resolvedExpressions("foo_a*") + Collections.singletonList("foo_a*") ); assertEquals(0, indices.size()); } @@ -526,45 +524,45 @@ public void testResolveAliases() { IndexNotFoundException.class, () -> IndexNameExpressionResolver.WildcardExpressionResolver.resolve( skipAliasesStrictContext, - resolvedExpressions("foo_a*") + Collections.singletonList("foo_a*") ) ); assertEquals("foo_a*", infe.getIndex().getName()); } { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( indicesAndAliasesContext, - resolvedExpressions("foo*") + Collections.singletonList("foo*") ); - assertThat(newHashSet(indices), equalTo(resolvedExpressionsSet("foo_foo", "foo_index", "bar_index"))); + assertThat(indices, containsInAnyOrder("foo_foo", "foo_index", "bar_index")); } { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( skipAliasesLenientContext, - resolvedExpressions("foo*") + Collections.singletonList("foo*") ); - assertThat(newHashSet(indices), equalTo(resolvedExpressionsSet("foo_foo", "foo_index"))); + assertThat(indices, containsInAnyOrder("foo_foo", "foo_index")); } { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( skipAliasesStrictContext, - resolvedExpressions("foo*") + Collections.singletonList("foo*") ); - assertThat(newHashSet(indices), equalTo(resolvedExpressionsSet("foo_foo", "foo_index"))); + assertThat(indices, containsInAnyOrder("foo_foo", "foo_index")); } { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( indicesAndAliasesContext, - resolvedExpressions("foo_alias") + Collections.singletonList("foo_alias") ); - assertThat(newHashSet(indices), equalTo(resolvedExpressionsSet("foo_alias"))); + assertThat(indices, containsInAnyOrder("foo_alias")); } { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( skipAliasesLenientContext, - resolvedExpressions("foo_alias") + Collections.singletonList("foo_alias") ); - assertThat(newHashSet(indices), equalTo(resolvedExpressionsSet("foo_alias"))); + assertThat(indices, containsInAnyOrder("foo_alias")); } { IllegalArgumentException iae = expectThrows( @@ -583,11 +581,11 @@ public void testResolveAliases() { SystemIndexAccessLevel.NONE ); { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( noExpandNoAliasesContext, - resolvedExpressions("foo_alias") + List.of("foo_alias") ); - assertThat(newHashSet(indices), equalTo(resolvedExpressionsSet("foo_alias"))); + assertThat(indices, containsInAnyOrder("foo_alias")); } IndicesOptions strictNoExpandNoAliasesIndicesOptions = IndicesOptions.fromOptions( false, @@ -656,18 +654,18 @@ public void testResolveDataStreams() { ); // data streams are not included but expression matches the data stream - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( indicesAndAliasesContext, - resolvedExpressions("foo_*") + Collections.singletonList("foo_*") ); - assertThat(newHashSet(indices), equalTo(resolvedExpressionsSet("foo_index", "foo_foo", "bar_index"))); + assertThat(indices, containsInAnyOrder("foo_index", "foo_foo", "bar_index")); // data streams are not included and expression doesn't match the data steram indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( indicesAndAliasesContext, - resolvedExpressions("bar_*") + Collections.singletonList("bar_*") ); - assertThat(newHashSet(indices), equalTo(resolvedExpressionsSet("bar_bar", "bar_index"))); + assertThat(indices, containsInAnyOrder("bar_bar", "bar_index")); } { @@ -693,39 +691,35 @@ public void testResolveDataStreams() { ); // data stream's corresponding backing indices are resolved - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( indicesAliasesAndDataStreamsContext, - resolvedExpressions("foo_*") + Collections.singletonList("foo_*") ); assertThat( - newHashSet(indices), - equalTo( - resolvedExpressionsSet( - "foo_index", - "bar_index", - "foo_foo", - DataStream.getDefaultBackingIndexName("foo_logs", 1, epochMillis), - DataStream.getDefaultBackingIndexName("foo_logs", 2, epochMillis) - ) + indices, + containsInAnyOrder( + "foo_index", + "bar_index", + "foo_foo", + DataStream.getDefaultBackingIndexName("foo_logs", 1, epochMillis), + DataStream.getDefaultBackingIndexName("foo_logs", 2, epochMillis) ) ); // include all wildcard adds the data stream's backing indices indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( indicesAliasesAndDataStreamsContext, - resolvedExpressions("*") + Collections.singletonList("*") ); assertThat( - newHashSet(indices), - equalTo( - resolvedExpressionsSet( - "foo_index", - "bar_index", - "foo_foo", - "bar_bar", - DataStream.getDefaultBackingIndexName("foo_logs", 1, epochMillis), - DataStream.getDefaultBackingIndexName("foo_logs", 2, epochMillis) - ) + indices, + containsInAnyOrder( + "foo_index", + "bar_index", + "foo_foo", + "bar_bar", + DataStream.getDefaultBackingIndexName("foo_logs", 1, epochMillis), + DataStream.getDefaultBackingIndexName("foo_logs", 2, epochMillis) ) ); } @@ -754,39 +748,35 @@ public void testResolveDataStreams() { ); // data stream's corresponding backing indices are resolved - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( indicesAliasesDataStreamsAndHiddenIndices, - resolvedExpressions("foo_*") + Collections.singletonList("foo_*") ); assertThat( - newHashSet(indices), - equalTo( - resolvedExpressionsSet( - "foo_index", - "bar_index", - "foo_foo", - DataStream.getDefaultBackingIndexName("foo_logs", 1, epochMillis), - DataStream.getDefaultBackingIndexName("foo_logs", 2, epochMillis) - ) + indices, + containsInAnyOrder( + "foo_index", + "bar_index", + "foo_foo", + DataStream.getDefaultBackingIndexName("foo_logs", 1, epochMillis), + DataStream.getDefaultBackingIndexName("foo_logs", 2, epochMillis) ) ); // include all wildcard adds the data stream's backing indices indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( indicesAliasesDataStreamsAndHiddenIndices, - resolvedExpressions("*") + Collections.singletonList("*") ); assertThat( - newHashSet(indices), - equalTo( - resolvedExpressionsSet( - "foo_index", - "bar_index", - "foo_foo", - "bar_bar", - DataStream.getDefaultBackingIndexName("foo_logs", 1, epochMillis), - DataStream.getDefaultBackingIndexName("foo_logs", 2, epochMillis) - ) + indices, + containsInAnyOrder( + "foo_index", + "bar_index", + "foo_foo", + "bar_bar", + DataStream.getDefaultBackingIndexName("foo_logs", 1, epochMillis), + DataStream.getDefaultBackingIndexName("foo_logs", 2, epochMillis) ) ); } @@ -818,28 +808,16 @@ public void testMatchesConcreteIndicesWildcardAndAliases() { SystemIndexAccessLevel.NONE ); - Collection matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( - indicesAndAliasesContext, - List.of(new ResolvedExpression("*")) - ); - assertThat(newHashSet(matches), equalTo(resolvedExpressionsSet("bar_bar", "foo_foo", "foo_index", "bar_index"))); - matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve(onlyIndicesContext, List.of(new ResolvedExpression("*"))); - assertThat(newHashSet(matches), equalTo(resolvedExpressionsSet("bar_bar", "foo_foo", "foo_index", "bar_index"))); - matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( - indicesAndAliasesContext, - List.of(new ResolvedExpression("foo*")) - ); - assertThat(newHashSet(matches), equalTo(resolvedExpressionsSet("foo_foo", "foo_index", "bar_index"))); - matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( - onlyIndicesContext, - List.of(new ResolvedExpression("foo*")) - ); - assertThat(newHashSet(matches), equalTo(resolvedExpressionsSet("foo_foo", "foo_index"))); - matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( - indicesAndAliasesContext, - List.of(new ResolvedExpression("foo_alias")) - ); - assertThat(newHashSet(matches), equalTo(resolvedExpressionsSet("foo_alias"))); + Collection matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve(indicesAndAliasesContext, List.of("*")); + assertThat(matches, containsInAnyOrder("bar_bar", "foo_foo", "foo_index", "bar_index")); + matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve(onlyIndicesContext, List.of("*")); + assertThat(matches, containsInAnyOrder("bar_bar", "foo_foo", "foo_index", "bar_index")); + matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve(indicesAndAliasesContext, List.of("foo*")); + assertThat(matches, containsInAnyOrder("foo_foo", "foo_index", "bar_index")); + matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve(onlyIndicesContext, List.of("foo*")); + assertThat(matches, containsInAnyOrder("foo_foo", "foo_index")); + matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve(indicesAndAliasesContext, List.of("foo_alias")); + assertThat(matches, containsInAnyOrder("foo_alias")); IllegalArgumentException iae = expectThrows( IllegalArgumentException.class, () -> IndexNameExpressionResolver.resolveExpressions(onlyIndicesContext, "foo_alias") @@ -862,19 +840,8 @@ private static IndexMetadata.Builder indexBuilder(String index) { private static void assertWildcardResolvesToEmpty(IndexNameExpressionResolver.Context context, String wildcardExpression) { IndexNotFoundException infe = expectThrows( IndexNotFoundException.class, - () -> IndexNameExpressionResolver.WildcardExpressionResolver.resolve( - context, - List.of(new ResolvedExpression(wildcardExpression)) - ) + () -> IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, List.of(wildcardExpression)) ); assertEquals(wildcardExpression, infe.getIndex().getName()); } - - private List resolvedExpressions(String... expressions) { - return Arrays.stream(expressions).map(ResolvedExpression::new).toList(); - } - - private Set resolvedExpressionsSet(String... expressions) { - return Arrays.stream(expressions).map(ResolvedExpression::new).collect(Collectors.toSet()); - } } diff --git a/server/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java b/server/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java index 17975b7d18dd8..36f7355a541c1 100644 --- a/server/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java +++ b/server/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java @@ -21,7 +21,6 @@ import org.elasticsearch.cluster.metadata.DataStreamTestHelper; import org.elasticsearch.cluster.metadata.IndexGraveyard; import org.elasticsearch.cluster.metadata.IndexMetadata; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; @@ -78,7 +77,6 @@ import java.util.Optional; import java.util.Set; import java.util.concurrent.CountDownLatch; -import java.util.stream.Collectors; import java.util.stream.Stream; import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; @@ -679,27 +677,27 @@ public void testBuildAliasFilter() { ); ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); { - AliasFilter result = indicesService.buildAliasFilter(state, "test-0", resolvedExpressions("test-alias-0")); + AliasFilter result = indicesService.buildAliasFilter(state, "test-0", Set.of("test-alias-0")); assertThat(result.getAliases(), arrayContainingInAnyOrder("test-alias-0")); assertThat(result.getQueryBuilder(), equalTo(QueryBuilders.termQuery("foo", "bar"))); } { - AliasFilter result = indicesService.buildAliasFilter(state, "test-1", resolvedExpressions("test-alias-0")); + AliasFilter result = indicesService.buildAliasFilter(state, "test-1", Set.of("test-alias-0")); assertThat(result.getAliases(), arrayContainingInAnyOrder("test-alias-0")); assertThat(result.getQueryBuilder(), equalTo(QueryBuilders.termQuery("foo", "bar"))); } { - AliasFilter result = indicesService.buildAliasFilter(state, "test-0", resolvedExpressions("test-alias-1")); + AliasFilter result = indicesService.buildAliasFilter(state, "test-0", Set.of("test-alias-1")); assertThat(result.getAliases(), arrayContainingInAnyOrder("test-alias-1")); assertThat(result.getQueryBuilder(), equalTo(QueryBuilders.termQuery("foo", "baz"))); } { - AliasFilter result = indicesService.buildAliasFilter(state, "test-1", resolvedExpressions("test-alias-1")); + AliasFilter result = indicesService.buildAliasFilter(state, "test-1", Set.of("test-alias-1")); assertThat(result.getAliases(), arrayContainingInAnyOrder("test-alias-1")); assertThat(result.getQueryBuilder(), equalTo(QueryBuilders.termQuery("foo", "bax"))); } { - AliasFilter result = indicesService.buildAliasFilter(state, "test-0", resolvedExpressions("test-alias-0", "test-alias-1")); + AliasFilter result = indicesService.buildAliasFilter(state, "test-0", Set.of("test-alias-0", "test-alias-1")); assertThat(result.getAliases(), arrayContainingInAnyOrder("test-alias-0", "test-alias-1")); BoolQueryBuilder filter = (BoolQueryBuilder) result.getQueryBuilder(); assertThat(filter.filter(), empty()); @@ -708,7 +706,7 @@ public void testBuildAliasFilter() { assertThat(filter.should(), containsInAnyOrder(QueryBuilders.termQuery("foo", "baz"), QueryBuilders.termQuery("foo", "bar"))); } { - AliasFilter result = indicesService.buildAliasFilter(state, "test-1", resolvedExpressions("test-alias-0", "test-alias-1")); + AliasFilter result = indicesService.buildAliasFilter(state, "test-1", Set.of("test-alias-0", "test-alias-1")); assertThat(result.getAliases(), arrayContainingInAnyOrder("test-alias-0", "test-alias-1")); BoolQueryBuilder filter = (BoolQueryBuilder) result.getQueryBuilder(); assertThat(filter.filter(), empty()); @@ -720,7 +718,7 @@ public void testBuildAliasFilter() { AliasFilter result = indicesService.buildAliasFilter( state, "test-0", - resolvedExpressions("test-alias-0", "test-alias-1", "test-alias-non-filtering") + Set.of("test-alias-0", "test-alias-1", "test-alias-non-filtering") ); assertThat(result.getAliases(), emptyArray()); assertThat(result.getQueryBuilder(), nullValue()); @@ -729,7 +727,7 @@ public void testBuildAliasFilter() { AliasFilter result = indicesService.buildAliasFilter( state, "test-1", - resolvedExpressions("test-alias-0", "test-alias-1", "test-alias-non-filtering") + Set.of("test-alias-0", "test-alias-1", "test-alias-non-filtering") ); assertThat(result.getAliases(), emptyArray()); assertThat(result.getQueryBuilder(), nullValue()); @@ -756,19 +754,19 @@ public void testBuildAliasFilterDataStreamAliases() { ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); { String index = backingIndex1.getIndex().getName(); - AliasFilter result = indicesService.buildAliasFilter(state, index, resolvedExpressions("logs_foo")); + AliasFilter result = indicesService.buildAliasFilter(state, index, Set.of("logs_foo")); assertThat(result.getAliases(), arrayContainingInAnyOrder("logs_foo")); assertThat(result.getQueryBuilder(), equalTo(QueryBuilders.termQuery("foo", "bar"))); } { String index = backingIndex2.getIndex().getName(); - AliasFilter result = indicesService.buildAliasFilter(state, index, resolvedExpressions("logs_foo")); + AliasFilter result = indicesService.buildAliasFilter(state, index, Set.of("logs_foo")); assertThat(result.getAliases(), arrayContainingInAnyOrder("logs_foo")); assertThat(result.getQueryBuilder(), equalTo(QueryBuilders.termQuery("foo", "baz"))); } { String index = backingIndex1.getIndex().getName(); - AliasFilter result = indicesService.buildAliasFilter(state, index, resolvedExpressions("logs_foo", "logs")); + AliasFilter result = indicesService.buildAliasFilter(state, index, Set.of("logs_foo", "logs")); assertThat(result.getAliases(), arrayContainingInAnyOrder("logs_foo", "logs")); BoolQueryBuilder filter = (BoolQueryBuilder) result.getQueryBuilder(); assertThat(filter.filter(), empty()); @@ -778,7 +776,7 @@ public void testBuildAliasFilterDataStreamAliases() { } { String index = backingIndex2.getIndex().getName(); - AliasFilter result = indicesService.buildAliasFilter(state, index, resolvedExpressions("logs_foo", "logs")); + AliasFilter result = indicesService.buildAliasFilter(state, index, Set.of("logs_foo", "logs")); assertThat(result.getAliases(), arrayContainingInAnyOrder("logs_foo", "logs")); BoolQueryBuilder filter = (BoolQueryBuilder) result.getQueryBuilder(); assertThat(filter.filter(), empty()); @@ -789,13 +787,13 @@ public void testBuildAliasFilterDataStreamAliases() { { // querying an unfiltered and a filtered alias for the same data stream should drop the filters String index = backingIndex1.getIndex().getName(); - AliasFilter result = indicesService.buildAliasFilter(state, index, resolvedExpressions("logs_foo", "logs", "logs_bar")); + AliasFilter result = indicesService.buildAliasFilter(state, index, Set.of("logs_foo", "logs", "logs_bar")); assertThat(result, is(AliasFilter.EMPTY)); } { // similarly, querying the data stream name and a filtered alias should drop the filter String index = backingIndex1.getIndex().getName(); - AliasFilter result = indicesService.buildAliasFilter(state, index, resolvedExpressions("logs", dataStreamName1)); + AliasFilter result = indicesService.buildAliasFilter(state, index, Set.of("logs", dataStreamName1)); assertThat(result, is(AliasFilter.EMPTY)); } } @@ -848,8 +846,4 @@ public void testWithTempIndexServiceHandlesExistingIndex() throws Exception { return null; }); } - - private Set resolvedExpressions(String... expressions) { - return Arrays.stream(expressions).map(ResolvedExpression::new).collect(Collectors.toSet()); - } } From 0782efa3c0e417f071fb4820d5a6e913d3043a0a Mon Sep 17 00:00:00 2001 From: Pat Whelan Date: Tue, 22 Oct 2024 11:50:17 -0400 Subject: [PATCH 292/449] [ML] Unmute ServerSentEventsRestActionListenerTests (#113382) Relate #113148 Co-authored-by: Elastic Machine --- muted-tests.yml | 6 ------ 1 file changed, 6 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 62140d05c9e4a..b84e86ddd7f55 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -146,18 +146,12 @@ tests: - class: org.elasticsearch.action.admin.cluster.node.stats.NodeStatsTests method: testChunking issue: https://github.com/elastic/elasticsearch/issues/113139 -- class: org.elasticsearch.xpack.inference.rest.ServerSentEventsRestActionListenerTests - method: testResponse - issue: https://github.com/elastic/elasticsearch/issues/113148 - class: org.elasticsearch.packaging.test.WindowsServiceTests method: test30StartStop issue: https://github.com/elastic/elasticsearch/issues/113160 - class: org.elasticsearch.packaging.test.WindowsServiceTests method: test33JavaChanged issue: https://github.com/elastic/elasticsearch/issues/113177 -- class: org.elasticsearch.xpack.inference.rest.ServerSentEventsRestActionListenerTests - method: testErrorMidStream - issue: https://github.com/elastic/elasticsearch/issues/113179 - class: org.elasticsearch.xpack.esql.qa.multi_node.EsqlSpecIT method: test {categorize.Categorize SYNC} issue: https://github.com/elastic/elasticsearch/issues/113054 From 9c923db8f61576701a3dfd8b7ee4b0152a37091e Mon Sep 17 00:00:00 2001 From: Mark Tozzi Date: Tue, 22 Oct 2024 12:15:44 -0400 Subject: [PATCH 293/449] [ESQL] Support date_nanos on functions that take "any" type (#114056) Resolves #109998 For the most part, this is just adding tests. Greater and Least have actual production code changes - notably toEvaluator is modified to map date nanos to the long evaluator. This parallels the work done in #113961. I've added CSV tests and unit tests for all the functions listed in the original ticket. --------- Co-authored-by: Elastic Machine --- .../src/main/resources/date_nanos.csv | 1 + .../src/main/resources/date_nanos.csv-spec | 72 ++++++++++++++++++- .../xpack/esql/action/EsqlCapabilities.java | 5 ++ .../function/scalar/conditional/Greatest.java | 8 +-- .../function/scalar/conditional/Least.java | 8 +-- .../function/scalar/multivalue/MvDedupe.java | 2 + .../function/scalar/multivalue/MvSlice.java | 2 + .../function/scalar/multivalue/MvSort.java | 4 +- .../function/scalar/nulls/Coalesce.java | 3 + .../function/AbstractFunctionTestCase.java | 12 ++-- .../scalar/conditional/CaseTests.java | 1 + .../scalar/conditional/GreatestTests.java | 15 ++++ .../scalar/conditional/LeastTests.java | 15 ++++ .../scalar/multivalue/MvDedupeTests.java | 1 + .../scalar/multivalue/MvSliceTests.java | 17 +++++ .../scalar/multivalue/MvSortTests.java | 14 ++++ .../function/scalar/nulls/CoalesceTests.java | 13 ++++ 17 files changed, 177 insertions(+), 16 deletions(-) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv index 83a2f3cb1c281..029c3baf3cbfb 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv @@ -7,3 +7,4 @@ millis:date,nanos:date_nanos,num:long 2023-10-23T12:27:28.948Z,2023-10-23T12:27:28.948000000Z,1698064048948000000 2023-10-23T12:15:03.360Z,2023-10-23T12:15:03.360103847Z,1698063303360103847 1999-10-23T12:15:03.360Z,[2023-03-23T12:15:03.360103847Z, 2023-02-23T13:33:34.937193000Z, 2023-01-23T13:55:01.543123456Z], 0 +1999-10-22T12:15:03.360Z,[2023-03-23T12:15:03.360103847Z, 2023-03-23T12:15:03.360103847Z, 2023-03-23T12:15:03.360103847Z], 0 diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv-spec index ad7149b0f742f..515e2c9c6587f 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv-spec @@ -38,9 +38,10 @@ nanos:date_nanos mv_min on date nanos required_capability: date_nanos_type -FROM date_nanos | SORT millis ASC | EVAL nanos = MV_MIN(nanos) | KEEP nanos | LIMIT 1; +FROM date_nanos | SORT millis ASC | WHERE millis < "2000-01-01" | EVAL nanos = MV_MIN(nanos) | KEEP nanos; nanos:date_nanos +2023-03-23T12:15:03.360103847Z 2023-01-23T13:55:01.543123456Z ; @@ -56,9 +57,10 @@ ct:integer mv_first on date nanos required_capability: date_nanos_type -FROM date_nanos | SORT millis ASC | EVAL nanos = MV_FIRST(nanos) | KEEP nanos | LIMIT 1; +FROM date_nanos | SORT millis ASC | WHERE millis < "2000-01-01" | EVAL nanos = MV_FIRST(nanos) | KEEP nanos; nanos:date_nanos +2023-03-23T12:15:03.360103847Z 2023-01-23T13:55:01.543123456Z ; @@ -263,3 +265,69 @@ ROW a = TO_DATE_NANOS(null), b = TO_DATE_NANOS(null + 1::long), c = TO_DATE_NANO a:date_nanos | b:date_nanos | c:date_nanos null | null | null ; + +Coalasce date nanos +required_capability: to_date_nanos + +ROW a = COALESCE(null, TO_DATE_NANOS(1698069301543123456)); + +a:date_nanos +2023-10-23T13:55:01.543123456Z +; + +Case date nanos result +required_capability: to_date_nanos + +ROW a = CASE(false, TO_DATE_NANOS(0::long), TO_DATE_NANOS(1698069301543123456)); + +a:date_nanos +2023-10-23T13:55:01.543123456Z +; + +Greatest date nanos +required_capability: least_greatest_for_datenanos + +ROW a = GREATEST(TO_DATE_NANOS("2023-10-23T13:55:01.543123456"), TO_DATE_NANOS("2023-10-23T13:53:55.832987654")); + +a:date_nanos +2023-10-23T13:55:01.543123456Z +; + +Least date nanos +required_capability: least_greatest_for_datenanos + +ROW a = LEAST(TO_DATE_NANOS("2023-10-23T13:55:01.543123456"), TO_DATE_NANOS("2023-10-23T13:53:55.832987654")); + +a:date_nanos +2023-10-23T13:53:55.832987654Z +; + +mv_dedup over date nanos +required_capability: date_nanos_type + +FROM date_nanos | WHERE millis < "2000-01-01" | EVAL a = MV_DEDUPE(nanos) | SORT millis DESC | KEEP a; + +a:date_nanos +[2023-01-23T13:55:01.543123456Z, 2023-02-23T13:33:34.937193000Z, 2023-03-23T12:15:03.360103847Z] +2023-03-23T12:15:03.360103847Z +; + +mv_sort over date nanos +required_capability: date_nanos_type + +FROM date_nanos | WHERE millis < "2000-01-01" | EVAL a = MV_SORT(nanos, "asc") | SORT millis DESC | KEEP a; + +a:date_nanos +[2023-01-23T13:55:01.543123456Z, 2023-02-23T13:33:34.937193000Z, 2023-03-23T12:15:03.360103847Z] +[2023-03-23T12:15:03.360103847Z, 2023-03-23T12:15:03.360103847Z, 2023-03-23T12:15:03.360103847Z] +; + +mv_slice over date nanos +required_capability: date_nanos_type + +FROM date_nanos | WHERE millis < "2000-01-01" | EVAL a = MV_SLICE(MV_SORT(nanos, "asc"), 1, 2) | SORT millis DESC | KEEP a; + +a:date_nanos +[2023-02-23T13:33:34.937193000Z, 2023-03-23T12:15:03.360103847Z] +[2023-03-23T12:15:03.360103847Z, 2023-03-23T12:15:03.360103847Z] +; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index df9f14a6ac227..94211e4726a2c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -308,6 +308,11 @@ public enum Cap { */ TO_DATE_NANOS(EsqlCorePlugin.DATE_NANOS_FEATURE_FLAG), + /** + * Support Least and Greatest functions on Date Nanos type + */ + LEAST_GREATEST_FOR_DATENANOS(EsqlCorePlugin.DATE_NANOS_FEATURE_FLAG), + /** * Support for datetime in least and greatest functions */ diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java index d47ebeab4ca6c..aad2d37d414b8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java @@ -43,7 +43,7 @@ public class Greatest extends EsqlScalarFunction implements OptionalArgument { private DataType dataType; @FunctionInfo( - returnType = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "text", "version" }, + returnType = { "boolean", "date", "date_nanos", "double", "integer", "ip", "keyword", "long", "text", "version" }, description = "Returns the maximum value from multiple columns. This is similar to <>\n" + "except it is intended to run on multiple columns at once.", note = "When run on `keyword` or `text` fields, this returns the last string in alphabetical order. " @@ -54,12 +54,12 @@ public Greatest( Source source, @Param( name = "first", - type = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "text", "version" }, + type = { "boolean", "date", "date_nanos", "double", "integer", "ip", "keyword", "long", "text", "version" }, description = "First of the columns to evaluate." ) Expression first, @Param( name = "rest", - type = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "text", "version" }, + type = { "boolean", "date", "date_nanos", "double", "integer", "ip", "keyword", "long", "text", "version" }, description = "The rest of the columns to evaluate.", optional = true ) List rest @@ -152,7 +152,7 @@ public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { if (dataType == DataType.INTEGER) { return new GreatestIntEvaluator.Factory(source(), factories); } - if (dataType == DataType.LONG || dataType == DataType.DATETIME) { + if (dataType == DataType.LONG || dataType == DataType.DATETIME || dataType == DataType.DATE_NANOS) { return new GreatestLongEvaluator.Factory(source(), factories); } if (DataType.isString(dataType) || dataType == DataType.IP || dataType == DataType.VERSION || dataType == DataType.UNSUPPORTED) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java index 81c1419dcf788..70ba9319385f3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java @@ -43,7 +43,7 @@ public class Least extends EsqlScalarFunction implements OptionalArgument { private DataType dataType; @FunctionInfo( - returnType = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "text", "version" }, + returnType = { "boolean", "date", "date_nanos", "double", "integer", "ip", "keyword", "long", "text", "version" }, description = "Returns the minimum value from multiple columns. " + "This is similar to <> except it is intended to run on multiple columns at once.", examples = @Example(file = "math", tag = "least") @@ -52,12 +52,12 @@ public Least( Source source, @Param( name = "first", - type = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "text", "version" }, + type = { "boolean", "date", "date_nanos", "double", "integer", "ip", "keyword", "long", "text", "version" }, description = "First of the columns to evaluate." ) Expression first, @Param( name = "rest", - type = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "text", "version" }, + type = { "boolean", "date", "date_nanos", "double", "integer", "ip", "keyword", "long", "text", "version" }, description = "The rest of the columns to evaluate.", optional = true ) List rest @@ -151,7 +151,7 @@ public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { if (dataType == DataType.INTEGER) { return new LeastIntEvaluator.Factory(source(), factories); } - if (dataType == DataType.LONG || dataType == DataType.DATETIME) { + if (dataType == DataType.LONG || dataType == DataType.DATETIME || dataType == DataType.DATE_NANOS) { return new LeastLongEvaluator.Factory(source(), factories); } if (DataType.isString(dataType) || dataType == DataType.IP || dataType == DataType.VERSION || dataType == DataType.UNSUPPORTED) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupe.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupe.java index b17ddddb422ce..34b89b4f78997 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupe.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupe.java @@ -38,6 +38,7 @@ public class MvDedupe extends AbstractMultivalueFunction { "cartesian_point", "cartesian_shape", "date", + "date_nanos", "double", "geo_point", "geo_shape", @@ -60,6 +61,7 @@ public MvDedupe( "cartesian_point", "cartesian_shape", "date", + "date_nanos", "double", "geo_point", "geo_shape", diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSlice.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSlice.java index a829b6f1417b9..ef562c339dfd9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSlice.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSlice.java @@ -59,6 +59,7 @@ public class MvSlice extends EsqlScalarFunction implements OptionalArgument, Eva "cartesian_point", "cartesian_shape", "date", + "date_nanos", "double", "geo_point", "geo_shape", @@ -87,6 +88,7 @@ public MvSlice( "cartesian_point", "cartesian_shape", "date", + "date_nanos", "double", "geo_point", "geo_shape", diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSort.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSort.java index d9e41233952de..5ca5618bf2a54 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSort.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSort.java @@ -69,7 +69,7 @@ public class MvSort extends EsqlScalarFunction implements OptionalArgument, Vali private static final String INVALID_ORDER_ERROR = "Invalid order value in [{}], expected one of [{}, {}] but got [{}]"; @FunctionInfo( - returnType = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "text", "version" }, + returnType = { "boolean", "date", "date_nanos", "double", "integer", "ip", "keyword", "long", "text", "version" }, description = "Sorts a multivalued field in lexicographical order.", examples = @Example(file = "ints", tag = "mv_sort") ) @@ -77,7 +77,7 @@ public MvSort( Source source, @Param( name = "field", - type = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "text", "version" }, + type = { "boolean", "date", "date_nanos", "double", "integer", "ip", "keyword", "long", "text", "version" }, description = "Multivalue expression. If `null`, the function returns `null`." ) Expression field, @Param( diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java index 575bb085c41f7..6b9c8d0da025b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java @@ -53,6 +53,7 @@ public class Coalesce extends EsqlScalarFunction implements OptionalArgument { "boolean", "cartesian_point", "cartesian_shape", + "date_nanos", "date", "geo_point", "geo_shape", @@ -73,6 +74,7 @@ public Coalesce( "boolean", "cartesian_point", "cartesian_shape", + "date_nanos", "date", "geo_point", "geo_shape", @@ -90,6 +92,7 @@ public Coalesce( "boolean", "cartesian_point", "cartesian_shape", + "date_nanos", "date", "geo_point", "geo_shape", diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index 84a41ef040c8e..397c269ae49ff 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -722,17 +722,19 @@ public static void testFunctionInfo() { for (int i = 0; i < args.size() && i < types.size(); i++) { typesFromSignature.get(i).add(types.get(i).esNameIfPossible()); } - returnFromSignature.add(entry.getValue().esNameIfPossible()); + if (DataType.UNDER_CONSTRUCTION.containsKey(entry.getValue()) == false) { + returnFromSignature.add(entry.getValue().esNameIfPossible()); + } } for (int i = 0; i < args.size(); i++) { EsqlFunctionRegistry.ArgSignature arg = args.get(i); Set annotationTypes = Arrays.stream(arg.type()) - .filter(DataType.UNDER_CONSTRUCTION::containsKey) + .filter(t -> DataType.UNDER_CONSTRUCTION.containsKey(DataType.fromNameOrAlias(t)) == false) .collect(Collectors.toCollection(TreeSet::new)); Set signatureTypes = typesFromSignature.get(i) .stream() - .filter(DataType.UNDER_CONSTRUCTION::containsKey) + .filter(t -> DataType.UNDER_CONSTRUCTION.containsKey(DataType.fromNameOrAlias(t)) == false) .collect(Collectors.toCollection(TreeSet::new)); if (signatureTypes.isEmpty()) { log.info("{}: skipping", arg.name()); @@ -746,7 +748,9 @@ public static void testFunctionInfo() { ); } - Set returnTypes = Arrays.stream(description.returnType()).collect(Collectors.toCollection(TreeSet::new)); + Set returnTypes = Arrays.stream(description.returnType()) + .filter(t -> DataType.UNDER_CONSTRUCTION.containsKey(DataType.fromNameOrAlias(t)) == false) + .collect(Collectors.toCollection(TreeSet::new)); assertEquals(returnFromSignature, returnTypes); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java index db3fce244c9a8..fbb7c691b1d94 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java @@ -46,6 +46,7 @@ public class CaseTests extends AbstractScalarFunctionTestCase { DataType.TEXT, DataType.BOOLEAN, DataType.DATETIME, + DataType.DATE_NANOS, DataType.DOUBLE, DataType.INTEGER, DataType.LONG, diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestTests.java index 311e3e3d89149..07d6ae34dc1e7 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestTests.java @@ -115,6 +115,21 @@ public static Iterable parameters() { ) ) ); + suppliers.add( + new TestCaseSupplier( + "(a, b)", + List.of(DataType.DATE_NANOS, DataType.DATE_NANOS), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(1727877348000123456L, DataType.DATE_NANOS, "a"), + new TestCaseSupplier.TypedData(1727790948000987654L, DataType.DATE_NANOS, "b") + ), + "GreatestLongEvaluator[values=[MvMax[field=Attribute[channel=0]], MvMax[field=Attribute[channel=1]]]]", + DataType.DATE_NANOS, + equalTo(1727877348000123456L) + ) + ) + ); return parameterSuppliersFromTypedData(anyNullIsNull(false, suppliers)); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastTests.java index 69842fde90312..d95cc79dd22e0 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastTests.java @@ -114,6 +114,21 @@ public static Iterable parameters() { ) ) ); + suppliers.add( + new TestCaseSupplier( + "(a, b)", + List.of(DataType.DATE_NANOS, DataType.DATE_NANOS), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(1727877348000123456L, DataType.DATE_NANOS, "a"), + new TestCaseSupplier.TypedData(1727790948000987654L, DataType.DATE_NANOS, "b") + ), + "LeastLongEvaluator[values=[MvMin[field=Attribute[channel=0]], MvMin[field=Attribute[channel=1]]]]", + DataType.DATE_NANOS, + equalTo(1727790948000987654L) + ) + ) + ); return parameterSuppliersFromTypedData(anyNullIsNull(false, suppliers)); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupeTests.java index d8d3b607efcc0..f3b44274f3ade 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupeTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupeTests.java @@ -39,6 +39,7 @@ public static Iterable parameters() { booleans(cases, "mv_dedupe", "MvDedupe", (size, values) -> getMatcher(values)); bytesRefs(cases, "mv_dedupe", "MvDedupe", (size, values) -> getMatcher(values)); dateTimes(cases, "mv_dedupe", "MvDedupe", (size, values) -> getMatcher(values.mapToObj(Long::valueOf))); + dateNanos(cases, "mv_dedupe", "MvDedupe", DataType.DATE_NANOS, (size, values) -> getMatcher(values.mapToObj(Long::valueOf))); doubles(cases, "mv_dedupe", "MvDedupe", (size, values) -> getMatcher(values.mapToObj(Double::valueOf))); ints(cases, "mv_dedupe", "MvDedupe", (size, values) -> getMatcher(values.mapToObj(Integer::valueOf))); longs(cases, "mv_dedupe", "MvDedupe", (size, values) -> getMatcher(values.mapToObj(Long::valueOf))); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceTests.java index e5bac422805af..859c79090d62f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceTests.java @@ -182,6 +182,23 @@ private static void longs(List suppliers) { equalTo(start == end ? field.get(start) : field.subList(start, end + 1)) ); })); + + suppliers.add(new TestCaseSupplier(List.of(DataType.DATE_NANOS, DataType.INTEGER, DataType.INTEGER), () -> { + List field = randomList(1, 10, () -> randomLong()); + int length = field.size(); + int start = randomIntBetween(0, length - 1); + int end = randomIntBetween(start, length - 1); + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(field, DataType.DATE_NANOS, "field"), + new TestCaseSupplier.TypedData(start, DataType.INTEGER, "start"), + new TestCaseSupplier.TypedData(end, DataType.INTEGER, "end") + ), + "MvSliceLongEvaluator[field=Attribute[channel=0], start=Attribute[channel=1], end=Attribute[channel=2]]", + DataType.DATE_NANOS, + equalTo(start == end ? field.get(start) : field.subList(start, end + 1)) + ); + })); } private static void doubles(List suppliers) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSortTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSortTests.java index d07ed2aeae887..63f538059dddf 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSortTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSortTests.java @@ -110,6 +110,20 @@ private static void longs(List suppliers) { equalTo(field.size() == 1 ? field.iterator().next() : field.stream().sorted(Collections.reverseOrder()).toList()) ); })); + + suppliers.add(new TestCaseSupplier(List.of(DataType.DATE_NANOS, DataType.KEYWORD), () -> { + List field = randomList(1, 10, () -> randomLong()); + BytesRef order = new BytesRef("DESC"); + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(field, DataType.DATE_NANOS, "field"), + new TestCaseSupplier.TypedData(order, DataType.KEYWORD, "order").forceLiteral() + ), + "MvSortLong[field=Attribute[channel=0], order=false]", + DataType.DATE_NANOS, + equalTo(field.size() == 1 ? field.iterator().next() : field.stream().sorted(Collections.reverseOrder()).toList()) + ); + })); } private static void doubles(List suppliers) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceTests.java index c9b6de64e079d..797c99992815e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceTests.java @@ -96,6 +96,19 @@ public static Iterable parameters() { equalTo(firstDate == null ? secondDate : firstDate) ); })); + noNullsSuppliers.add(new TestCaseSupplier(List.of(DataType.DATE_NANOS, DataType.DATE_NANOS), () -> { + Long firstDate = randomBoolean() ? null : randomNonNegativeLong(); + Long secondDate = randomNonNegativeLong(); + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(firstDate, DataType.DATE_NANOS, "first"), + new TestCaseSupplier.TypedData(secondDate, DataType.DATE_NANOS, "second") + ), + "CoalesceEvaluator[values=[Attribute[channel=0], Attribute[channel=1]]]", + DataType.DATE_NANOS, + equalTo(firstDate == null ? secondDate : firstDate) + ); + })); List suppliers = new ArrayList<>(noNullsSuppliers); for (TestCaseSupplier s : noNullsSuppliers) { From 157c98360730421d86ff43d774a815d59f6cfb54 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 23 Oct 2024 03:46:37 +1100 Subject: [PATCH 294/449] Mute org.elasticsearch.xpack.watcher.trigger.schedule.engine.TickerScheduleEngineTests testWatchWithLastCheckedTimeExecutesBeforeInitialInterval #115354 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index b84e86ddd7f55..15bd6096c71bd 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -322,6 +322,9 @@ tests: - class: org.elasticsearch.xpack.watcher.trigger.schedule.engine.TickerScheduleEngineTests method: testAddWithNoLastCheckedTimeButHasActivationTimeExecutesBeforeInitialInterval issue: https://github.com/elastic/elasticsearch/issues/115339 +- class: org.elasticsearch.xpack.watcher.trigger.schedule.engine.TickerScheduleEngineTests + method: testWatchWithLastCheckedTimeExecutesBeforeInitialInterval + issue: https://github.com/elastic/elasticsearch/issues/115354 # Examples: # From 57fbbcb9e7f58640b6bd98927307731b118d5000 Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Tue, 22 Oct 2024 20:05:48 +0300 Subject: [PATCH 295/449] Fix leak in TimeSeriesRateAggregatorTests (#115345) Fixes #115334 --- muted-tests.yml | 2 -- .../analytics/rate/TimeSeriesRateAggregatorTests.java | 8 ++++++-- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 15bd6096c71bd..7bad160f8d5ba 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -317,8 +317,6 @@ tests: - class: org.elasticsearch.xpack.test.rest.XPackRestIT method: test {p0=esql/60_usage/Basic ESQL usage output (telemetry)} issue: https://github.com/elastic/elasticsearch/issues/115231 -- class: org.elasticsearch.xpack.analytics.rate.TimeSeriesRateAggregatorTests - issue: https://github.com/elastic/elasticsearch/issues/115334 - class: org.elasticsearch.xpack.watcher.trigger.schedule.engine.TickerScheduleEngineTests method: testAddWithNoLastCheckedTimeButHasActivationTimeExecutesBeforeInitialInterval issue: https://github.com/elastic/elasticsearch/issues/115339 diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/TimeSeriesRateAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/TimeSeriesRateAggregatorTests.java index f517c03468bc2..753ce8283afca 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/TimeSeriesRateAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/TimeSeriesRateAggregatorTests.java @@ -156,8 +156,12 @@ public void testNestedWithinAutoDateHistogram() throws IOException { AggTestConfig aggTestConfig = new AggTestConfig(tsBuilder, timeStampField(), counterField("counter_field")) .withSplitLeavesIntoSeperateAggregators(false); expectThrows(IllegalArgumentException.class, () -> testCase(iw -> { - iw.addDocuments(docs(2000, "1", 15, 37, 60, /*reset*/ 14)); - iw.addDocuments(docs(2000, "2", 74, 150, /*reset*/ 50, 90, /*reset*/ 40)); + for (Document document : docs(2000, "1", 15, 37, 60, /*reset*/ 14)) { + iw.addDocument(document); + } + for (Document document : docs(2000, "2", 74, 150, /*reset*/ 50, 90, /*reset*/ 40)) { + iw.addDocument(document); + } }, verifier, aggTestConfig)); } From 82f2fb554e3a5bb849becb0a0df411b832735451 Mon Sep 17 00:00:00 2001 From: Mark Tozzi Date: Tue, 22 Oct 2024 13:41:17 -0400 Subject: [PATCH 296/449] fix test to not run when the FF is disabled (#114260) Fixes #113661 Don't run the tests when the feature is disabled. Co-authored-by: Elastic Machine --- .../esql/functions/kibana/definition/to_date_nanos.json | 3 ++- muted-tests.yml | 2 -- .../expression/function/scalar/convert/ToDateNanosTests.java | 4 ++++ 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/docs/reference/esql/functions/kibana/definition/to_date_nanos.json b/docs/reference/esql/functions/kibana/definition/to_date_nanos.json index bafbcf2bc2038..07ffe84444f02 100644 --- a/docs/reference/esql/functions/kibana/definition/to_date_nanos.json +++ b/docs/reference/esql/functions/kibana/definition/to_date_nanos.json @@ -5,5 +5,6 @@ "description" : "Converts an input to a nanosecond-resolution date value (aka date_nanos).", "note" : "The range for date nanos is 1970-01-01T00:00:00.000000000Z to 2262-04-11T23:47:16.854775807Z. Additionally, integers cannot be converted into date nanos, as the range of integer nanoseconds only covers about 2 seconds after epoch.", "signatures" : [ ], - "preview" : true + "preview" : true, + "snapshot_only" : false } diff --git a/muted-tests.yml b/muted-tests.yml index 7bad160f8d5ba..316ff67c691a4 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -206,8 +206,6 @@ tests: - class: org.elasticsearch.xpack.esql.qa.mixed.MixedClusterEsqlSpecIT method: test {categorize.Categorize SYNC} issue: https://github.com/elastic/elasticsearch/issues/113722 -- class: org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDateNanosTests - issue: https://github.com/elastic/elasticsearch/issues/113661 - class: org.elasticsearch.ingest.geoip.DatabaseNodeServiceIT method: testNonGzippedDatabase issue: https://github.com/elastic/elasticsearch/issues/113821 diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosTests.java index e91a5cc1ebca4..485073d1a91d2 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosTests.java @@ -11,6 +11,7 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.common.time.DateUtils; +import org.elasticsearch.xpack.esql.action.EsqlCapabilities; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -29,6 +30,9 @@ public ToDateNanosTests(@Name("TestCase") Supplier te @ParametersFactory public static Iterable parameters() { + if (EsqlCapabilities.Cap.TO_DATE_NANOS.isEnabled() == false) { + return List.of(); + } final String read = "Attribute[channel=0]"; final List suppliers = new ArrayList<>(); From f8e931d6b5e4e17ef43ac3b39e4c7c40cbc24111 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 23 Oct 2024 04:43:08 +1100 Subject: [PATCH 297/449] Mute org.elasticsearch.xpack.watcher.trigger.schedule.engine.TickerScheduleEngineTests testAddWithLastCheckedTimeExecutesBeforeInitialInterval #115356 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 316ff67c691a4..6a575c9058363 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -321,6 +321,9 @@ tests: - class: org.elasticsearch.xpack.watcher.trigger.schedule.engine.TickerScheduleEngineTests method: testWatchWithLastCheckedTimeExecutesBeforeInitialInterval issue: https://github.com/elastic/elasticsearch/issues/115354 +- class: org.elasticsearch.xpack.watcher.trigger.schedule.engine.TickerScheduleEngineTests + method: testAddWithLastCheckedTimeExecutesBeforeInitialInterval + issue: https://github.com/elastic/elasticsearch/issues/115356 # Examples: # From d65a0309faa63a591845844130ba905088cd4fab Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Iv=C3=A1n=20Cea=20Fontenla?= Date: Tue, 22 Oct 2024 19:53:09 +0200 Subject: [PATCH 298/449] Fix AvgTests error on -0.0 avg (#113272) Fixes https://github.com/elastic/elasticsearch/issues/113225 Fixes https://github.com/elastic/elasticsearch/issues/114175 --- muted-tests.yml | 6 - .../function/AbstractAggregationTestCase.java | 30 +- .../function/AbstractFunctionTestCase.java | 30 + .../AbstractScalarFunctionTestCase.java | 11 +- .../function/MultiRowTestCaseSupplier.java | 521 ++++++++---------- .../function/aggregate/AvgTests.java | 38 +- 6 files changed, 276 insertions(+), 360 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 6a575c9058363..325fcbf04ce59 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -164,9 +164,6 @@ tests: - class: org.elasticsearch.packaging.test.WindowsServiceTests method: test80JavaOptsInEnvVar issue: https://github.com/elastic/elasticsearch/issues/113219 -- class: org.elasticsearch.xpack.esql.expression.function.aggregate.AvgTests - method: "testFold {TestCase= #2}" - issue: https://github.com/elastic/elasticsearch/issues/113225 - class: org.elasticsearch.packaging.test.WindowsServiceTests method: test81JavaOptsInJvmOptions issue: https://github.com/elastic/elasticsearch/issues/113313 @@ -236,9 +233,6 @@ tests: - class: org.elasticsearch.xpack.inference.InferenceCrudIT method: testGet issue: https://github.com/elastic/elasticsearch/issues/114135 -- class: org.elasticsearch.xpack.esql.expression.function.aggregate.AvgTests - method: "testFold {TestCase= #7}" - issue: https://github.com/elastic/elasticsearch/issues/114175 - class: org.elasticsearch.xpack.ilm.ExplainLifecycleIT method: testStepInfoPreservedOnAutoRetry issue: https://github.com/elastic/elasticsearch/issues/114220 diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractAggregationTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractAggregationTestCase.java index 1c917a961a343..db5d8e03458ea 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractAggregationTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractAggregationTestCase.java @@ -163,15 +163,7 @@ private void aggregateSingleMode(Expression expression) { result = extractResultFromAggregator(aggregator, PlannerUtils.toElementType(testCase.expectedType())); } - assertThat(result, not(equalTo(Double.NaN))); - assert testCase.getMatcher().matches(Double.POSITIVE_INFINITY) == false; - assertThat(result, not(equalTo(Double.POSITIVE_INFINITY))); - assert testCase.getMatcher().matches(Double.NEGATIVE_INFINITY) == false; - assertThat(result, not(equalTo(Double.NEGATIVE_INFINITY))); - assertThat(result, testCase.getMatcher()); - if (testCase.getExpectedWarnings() != null) { - assertWarnings(testCase.getExpectedWarnings()); - } + assertTestCaseResultAndWarnings(result); } private void aggregateGroupingSingleMode(Expression expression) { @@ -263,15 +255,7 @@ private void aggregateWithIntermediates(Expression expression) { result = extractResultFromAggregator(aggregator, PlannerUtils.toElementType(testCase.expectedType())); } - assertThat(result, not(equalTo(Double.NaN))); - assert testCase.getMatcher().matches(Double.POSITIVE_INFINITY) == false; - assertThat(result, not(equalTo(Double.POSITIVE_INFINITY))); - assert testCase.getMatcher().matches(Double.NEGATIVE_INFINITY) == false; - assertThat(result, not(equalTo(Double.NEGATIVE_INFINITY))); - assertThat(result, testCase.getMatcher()); - if (testCase.getExpectedWarnings() != null) { - assertWarnings(testCase.getExpectedWarnings()); - } + assertTestCaseResultAndWarnings(result); } private void evaluate(Expression evaluableExpression) { @@ -288,15 +272,7 @@ private void evaluate(Expression evaluableExpression) { if (testCase.expectedType() == DataType.UNSIGNED_LONG && result != null) { result = NumericUtils.unsignedLongAsBigInteger((Long) result); } - assertThat(result, not(equalTo(Double.NaN))); - assert testCase.getMatcher().matches(Double.POSITIVE_INFINITY) == false; - assertThat(result, not(equalTo(Double.POSITIVE_INFINITY))); - assert testCase.getMatcher().matches(Double.NEGATIVE_INFINITY) == false; - assertThat(result, not(equalTo(Double.NEGATIVE_INFINITY))); - assertThat(result, testCase.getMatcher()); - if (testCase.getExpectedWarnings() != null) { - assertWarnings(testCase.getExpectedWarnings()); - } + assertTestCaseResultAndWarnings(result); } private void resolveExpression(Expression expression, Consumer onAggregator, Consumer onEvaluableExpression) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index 397c269ae49ff..c05f8e0990b3c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -99,8 +99,10 @@ import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; /** @@ -754,6 +756,34 @@ public static void testFunctionInfo() { assertEquals(returnFromSignature, returnTypes); } + /** + * Asserts the result of a test case matches the expected result and warnings. + *

      + * The {@code result} parameter should be an object as returned by {@link #toJavaObjectUnsignedLongAware}. + *

      + */ + @SuppressWarnings("unchecked") + protected final void assertTestCaseResultAndWarnings(Object result) { + if (result instanceof Iterable) { + var collectionResult = (Iterable) result; + assertThat(collectionResult, not(hasItem(Double.NaN))); + assertThat(collectionResult, not(hasItem(Double.POSITIVE_INFINITY))); + assertThat(collectionResult, not(hasItem(Double.NEGATIVE_INFINITY))); + } + + assert testCase.getMatcher().matches(Double.NaN) == false; + assertThat(result, not(equalTo(Double.NaN))); + assert testCase.getMatcher().matches(Double.POSITIVE_INFINITY) == false; + assertThat(result, not(equalTo(Double.POSITIVE_INFINITY))); + assert testCase.getMatcher().matches(Double.NEGATIVE_INFINITY) == false; + assertThat(result, not(equalTo(Double.NEGATIVE_INFINITY))); + assertThat(result, testCase.getMatcher()); + + if (testCase.getExpectedWarnings() != null) { + assertWarnings(testCase.getExpectedWarnings()); + } + } + protected final void assertTypeResolutionFailure(Expression expression) { assertTrue("expected unresolved", expression.typeResolved().unresolved()); assertThat(expression.typeResolved().message(), equalTo(testCase.getExpectedTypeError())); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractScalarFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractScalarFunctionTestCase.java index 85db73901352b..65e8a53fc05c5 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractScalarFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractScalarFunctionTestCase.java @@ -41,7 +41,6 @@ import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.sameInstance; @@ -127,15 +126,7 @@ public final void testEvaluate() { result = toJavaObjectUnsignedLongAware(block, 0); } } - assertThat(result, not(equalTo(Double.NaN))); - assert testCase.getMatcher().matches(Double.POSITIVE_INFINITY) == false; - assertThat(result, not(equalTo(Double.POSITIVE_INFINITY))); - assert testCase.getMatcher().matches(Double.NEGATIVE_INFINITY) == false; - assertThat(result, not(equalTo(Double.NEGATIVE_INFINITY))); - assertThat(result, testCase.getMatcher()); - if (testCase.getExpectedWarnings() != null) { - assertWarnings(testCase.getExpectedWarnings()); - } + assertTestCaseResultAndWarnings(result); } /** diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/MultiRowTestCaseSupplier.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/MultiRowTestCaseSupplier.java index e740533462746..7fe67707a7976 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/MultiRowTestCaseSupplier.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/MultiRowTestCaseSupplier.java @@ -19,6 +19,7 @@ import java.math.BigInteger; import java.util.ArrayList; import java.util.List; +import java.util.function.Supplier; import static org.elasticsearch.test.ESTestCase.randomBoolean; import static org.elasticsearch.test.ESTestCase.randomList; @@ -37,56 +38,36 @@ public static List intCases(int minRows, int maxRows, int min List cases = new ArrayList<>(); if (0 <= max && 0 >= min && includeZero) { - cases.add(new TypedDataSupplier("<0 ints>", () -> randomList(minRows, maxRows, () -> 0), DataType.INTEGER, false, true)); + addSuppliers(cases, minRows, maxRows, "0 int", DataType.INTEGER, () -> 0); } if (max != 0) { - cases.add( - new TypedDataSupplier("<" + max + " ints>", () -> randomList(minRows, maxRows, () -> max), DataType.INTEGER, false, true) - ); + addSuppliers(cases, minRows, maxRows, max + " int", DataType.INTEGER, () -> max); } if (min != 0 && min != max) { - cases.add( - new TypedDataSupplier("<" + min + " ints>", () -> randomList(minRows, maxRows, () -> min), DataType.INTEGER, false, true) - ); + addSuppliers(cases, minRows, maxRows, min + " int", DataType.INTEGER, () -> min); } int lower = Math.max(min, 1); int upper = Math.min(max, Integer.MAX_VALUE); if (lower < upper) { - cases.add( - new TypedDataSupplier( - "", - () -> randomList(minRows, maxRows, () -> ESTestCase.randomIntBetween(lower, upper)), - DataType.INTEGER, - false, - true - ) - ); + addSuppliers(cases, minRows, maxRows, "positive int", DataType.INTEGER, () -> ESTestCase.randomIntBetween(lower, upper)); } int lower1 = Math.max(min, Integer.MIN_VALUE); int upper1 = Math.min(max, -1); if (lower1 < upper1) { - cases.add( - new TypedDataSupplier( - "", - () -> randomList(minRows, maxRows, () -> ESTestCase.randomIntBetween(lower1, upper1)), - DataType.INTEGER, - false, - true - ) - ); + addSuppliers(cases, minRows, maxRows, "negative int", DataType.INTEGER, () -> ESTestCase.randomIntBetween(lower1, upper1)); } if (min < 0 && max > 0) { - cases.add(new TypedDataSupplier("", () -> randomList(minRows, maxRows, () -> { + addSuppliers(cases, minRows, maxRows, "random int", DataType.INTEGER, () -> { if (includeZero) { return ESTestCase.randomIntBetween(min, max); } return randomBoolean() ? ESTestCase.randomIntBetween(min, -1) : ESTestCase.randomIntBetween(1, max); - }), DataType.INTEGER, false, true)); + }); } return cases; @@ -96,56 +77,36 @@ public static List longCases(int minRows, int maxRows, long m List cases = new ArrayList<>(); if (0 <= max && 0 >= min && includeZero) { - cases.add(new TypedDataSupplier("<0 longs>", () -> randomList(minRows, maxRows, () -> 0L), DataType.LONG, false, true)); + addSuppliers(cases, minRows, maxRows, "0 long", DataType.LONG, () -> 0L); } if (max != 0) { - cases.add( - new TypedDataSupplier("<" + max + " longs>", () -> randomList(minRows, maxRows, () -> max), DataType.LONG, false, true) - ); + addSuppliers(cases, minRows, maxRows, max + " long", DataType.LONG, () -> max); } if (min != 0 && min != max) { - cases.add( - new TypedDataSupplier("<" + min + " longs>", () -> randomList(minRows, maxRows, () -> min), DataType.LONG, false, true) - ); + addSuppliers(cases, minRows, maxRows, min + " long", DataType.LONG, () -> min); } long lower = Math.max(min, 1); long upper = Math.min(max, Long.MAX_VALUE); if (lower < upper) { - cases.add( - new TypedDataSupplier( - "", - () -> randomList(minRows, maxRows, () -> ESTestCase.randomLongBetween(lower, upper)), - DataType.LONG, - false, - true - ) - ); + addSuppliers(cases, minRows, maxRows, "positive long", DataType.LONG, () -> ESTestCase.randomLongBetween(lower, upper)); } long lower1 = Math.max(min, Long.MIN_VALUE); long upper1 = Math.min(max, -1); if (lower1 < upper1) { - cases.add( - new TypedDataSupplier( - "", - () -> randomList(minRows, maxRows, () -> ESTestCase.randomLongBetween(lower1, upper1)), - DataType.LONG, - false, - true - ) - ); + addSuppliers(cases, minRows, maxRows, "negative long", DataType.LONG, () -> ESTestCase.randomLongBetween(lower1, upper1)); } if (min < 0 && max > 0) { - cases.add(new TypedDataSupplier("", () -> randomList(minRows, maxRows, () -> { + addSuppliers(cases, minRows, maxRows, "random long", DataType.LONG, () -> { if (includeZero) { return ESTestCase.randomLongBetween(min, max); } return randomBoolean() ? ESTestCase.randomLongBetween(min, -1) : ESTestCase.randomLongBetween(1, max); - }), DataType.LONG, false, true)); + }); } return cases; @@ -156,29 +117,20 @@ public static List ulongCases(int minRows, int maxRows, BigIn // Zero if (BigInteger.ZERO.compareTo(max) <= 0 && BigInteger.ZERO.compareTo(min) >= 0 && includeZero) { - cases.add( - new TypedDataSupplier( - "<0 unsigned longs>", - () -> randomList(minRows, maxRows, () -> BigInteger.ZERO), - DataType.UNSIGNED_LONG, - false, - true - ) - ); + addSuppliers(cases, minRows, maxRows, "0 unsigned long", DataType.UNSIGNED_LONG, () -> BigInteger.ZERO); } // Small values, less than Long.MAX_VALUE BigInteger lower1 = min.max(BigInteger.ONE); BigInteger upper1 = max.min(BigInteger.valueOf(Long.MAX_VALUE)); if (lower1.compareTo(upper1) < 0) { - cases.add( - new TypedDataSupplier( - "", - () -> randomList(minRows, maxRows, () -> ESTestCase.randomUnsignedLongBetween(lower1, upper1)), - DataType.UNSIGNED_LONG, - false, - true - ) + addSuppliers( + cases, + minRows, + maxRows, + "small unsigned long", + DataType.UNSIGNED_LONG, + () -> ESTestCase.randomUnsignedLongBetween(lower1, upper1) ); } @@ -186,14 +138,13 @@ public static List ulongCases(int minRows, int maxRows, BigIn BigInteger lower2 = min.max(BigInteger.valueOf(Long.MAX_VALUE).add(BigInteger.ONE)); BigInteger upper2 = max.min(ESTestCase.UNSIGNED_LONG_MAX); if (lower2.compareTo(upper2) < 0) { - cases.add( - new TypedDataSupplier( - "", - () -> randomList(minRows, maxRows, () -> ESTestCase.randomUnsignedLongBetween(lower2, upper2)), - DataType.UNSIGNED_LONG, - false, - true - ) + addSuppliers( + cases, + minRows, + maxRows, + "big unsigned long", + DataType.UNSIGNED_LONG, + () -> ESTestCase.randomUnsignedLongBetween(lower2, upper2) ); } @@ -204,85 +155,77 @@ public static List doubleCases(int minRows, int maxRows, doub List cases = new ArrayList<>(); if (0d <= max && 0d >= min && includeZero) { - cases.add(new TypedDataSupplier("<0 doubles>", () -> randomList(minRows, maxRows, () -> 0d), DataType.DOUBLE, false, true)); - cases.add(new TypedDataSupplier("<-0 doubles>", () -> randomList(minRows, maxRows, () -> -0d), DataType.DOUBLE, false, true)); + addSuppliers(cases, minRows, maxRows, "0 double", DataType.DOUBLE, () -> 0d); + addSuppliers(cases, minRows, maxRows, "-0 double", DataType.DOUBLE, () -> -0d); } if (max != 0d) { - cases.add( - new TypedDataSupplier("<" + max + " doubles>", () -> randomList(minRows, maxRows, () -> max), DataType.DOUBLE, false, true) - ); + addSuppliers(cases, minRows, maxRows, max + " double", DataType.DOUBLE, () -> max); } if (min != 0d && min != max) { - cases.add( - new TypedDataSupplier("<" + min + " doubles>", () -> randomList(minRows, maxRows, () -> min), DataType.DOUBLE, false, true) - ); + addSuppliers(cases, minRows, maxRows, min + " double", DataType.DOUBLE, () -> min); } double lower1 = Math.max(min, 0d); double upper1 = Math.min(max, 1d); if (lower1 < upper1) { - cases.add( - new TypedDataSupplier( - "", - () -> randomList(minRows, maxRows, () -> ESTestCase.randomDoubleBetween(lower1, upper1, true)), - DataType.DOUBLE, - false, - true - ) + addSuppliers( + cases, + minRows, + maxRows, + "small positive double", + DataType.DOUBLE, + () -> ESTestCase.randomDoubleBetween(lower1, upper1, true) ); } double lower2 = Math.max(min, -1d); double upper2 = Math.min(max, 0d); if (lower2 < upper2) { - cases.add( - new TypedDataSupplier( - "", - () -> randomList(minRows, maxRows, () -> ESTestCase.randomDoubleBetween(lower2, upper2, true)), - DataType.DOUBLE, - false, - true - ) + addSuppliers( + cases, + minRows, + maxRows, + "small negative double", + DataType.DOUBLE, + () -> ESTestCase.randomDoubleBetween(lower2, upper2, true) ); } double lower3 = Math.max(min, 1d); double upper3 = Math.min(max, Double.MAX_VALUE); if (lower3 < upper3) { - cases.add( - new TypedDataSupplier( - "", - () -> randomList(minRows, maxRows, () -> ESTestCase.randomDoubleBetween(lower3, upper3, true)), - DataType.DOUBLE, - false, - true - ) + addSuppliers( + cases, + minRows, + maxRows, + "big positive double", + DataType.DOUBLE, + () -> ESTestCase.randomDoubleBetween(lower3, upper3, true) ); } double lower4 = Math.max(min, -Double.MAX_VALUE); double upper4 = Math.min(max, -1d); if (lower4 < upper4) { - cases.add( - new TypedDataSupplier( - "", - () -> randomList(minRows, maxRows, () -> ESTestCase.randomDoubleBetween(lower4, upper4, true)), - DataType.DOUBLE, - false, - true - ) + addSuppliers( + cases, + minRows, + maxRows, + "big negative double", + DataType.DOUBLE, + () -> ESTestCase.randomDoubleBetween(lower4, upper4, true) ); } if (min < 0 && max > 0) { - cases.add(new TypedDataSupplier("", () -> randomList(minRows, maxRows, () -> { + addSuppliers(cases, minRows, maxRows, "random double", DataType.DOUBLE, () -> { if (includeZero) { return ESTestCase.randomDoubleBetween(min, max, true); } return randomBoolean() ? ESTestCase.randomDoubleBetween(min, -1, true) : ESTestCase.randomDoubleBetween(1, max, true); - }), DataType.DOUBLE, false, true)); + }); } return cases; @@ -291,149 +234,126 @@ public static List doubleCases(int minRows, int maxRows, doub public static List dateCases(int minRows, int maxRows) { List cases = new ArrayList<>(); - cases.add( - new TypedDataSupplier( - "<1970-01-01T00:00:00Z dates>", - () -> randomList(minRows, maxRows, () -> 0L), - DataType.DATETIME, - false, - true - ) - ); + addSuppliers(cases, minRows, maxRows, "1970-01-01T00:00:00Z date", DataType.DATETIME, () -> 0L); - cases.add( - new TypedDataSupplier( - "", - // 1970-01-01T00:00:00Z - 2286-11-20T17:46:40Z - () -> randomList(minRows, maxRows, () -> ESTestCase.randomLongBetween(0, 10 * (long) 10e11)), - DataType.DATETIME, - false, - true - ) - ); + // 1970-01-01T00:00:00Z - 2286-11-20T17:46:40Z + addSuppliers(cases, minRows, maxRows, "random date", DataType.DATETIME, () -> ESTestCase.randomLongBetween(0, 10 * (long) 10e11)); - cases.add( - new TypedDataSupplier( - "", - // 2286-11-20T17:46:40Z - +292278994-08-17T07:12:55.807Z - () -> randomList(minRows, maxRows, () -> ESTestCase.randomLongBetween(10 * (long) 10e11, Long.MAX_VALUE)), - DataType.DATETIME, - false, - true - ) + // 2286-11-20T17:46:40Z - +292278994-08-17T07:12:55.807Z + addSuppliers( + cases, + minRows, + maxRows, + "far future date", + DataType.DATETIME, + () -> ESTestCase.randomLongBetween(10 * (long) 10e11, Long.MAX_VALUE) ); - cases.add( - new TypedDataSupplier( - "", - // very close to +292278994-08-17T07:12:55.807Z, the maximum supported millis since epoch - () -> randomList(minRows, maxRows, () -> ESTestCase.randomLongBetween(Long.MAX_VALUE / 100 * 99, Long.MAX_VALUE)), - DataType.DATETIME, - false, - true - ) + // Very close to +292278994-08-17T07:12:55.807Z, the maximum supported millis since epoch + addSuppliers( + cases, + minRows, + maxRows, + "near the end of time date", + DataType.DATETIME, + () -> ESTestCase.randomLongBetween(Long.MAX_VALUE / 100 * 99, Long.MAX_VALUE) ); return cases; } public static List booleanCases(int minRows, int maxRows) { - return List.of( - new TypedDataSupplier("", () -> randomList(minRows, maxRows, () -> true), DataType.BOOLEAN, false, true), - new TypedDataSupplier("", () -> randomList(minRows, maxRows, () -> false), DataType.BOOLEAN, false, true), - new TypedDataSupplier( - "", - () -> randomList(minRows, maxRows, ESTestCase::randomBoolean), - DataType.BOOLEAN, - false, - true - ) - ); + List cases = new ArrayList<>(); + + addSuppliers(cases, minRows, maxRows, "true boolean", DataType.BOOLEAN, () -> true); + addSuppliers(cases, minRows, maxRows, "false boolean", DataType.BOOLEAN, () -> false); + addSuppliers(cases, minRows, maxRows, "random boolean", DataType.BOOLEAN, ESTestCase::randomBoolean); + + return cases; } public static List ipCases(int minRows, int maxRows) { - return List.of( - new TypedDataSupplier( - "<127.0.0.1 ips>", - () -> randomList(minRows, maxRows, () -> new BytesRef(InetAddressPoint.encode(InetAddresses.forString("127.0.0.1")))), - DataType.IP, - false, - true - ), - new TypedDataSupplier( - "", - () -> randomList(minRows, maxRows, () -> new BytesRef(InetAddressPoint.encode(ESTestCase.randomIp(true)))), - DataType.IP, - false, - true - ), - new TypedDataSupplier( - "", - () -> randomList(minRows, maxRows, () -> new BytesRef(InetAddressPoint.encode(ESTestCase.randomIp(false)))), - DataType.IP, - false, - true - ) + List cases = new ArrayList<>(); + + addSuppliers( + cases, + minRows, + maxRows, + "127.0.0.1 ip", + DataType.IP, + () -> new BytesRef(InetAddressPoint.encode(InetAddresses.forString("127.0.0.1"))) + ); + addSuppliers( + cases, + minRows, + maxRows, + "random v4 ip", + DataType.IP, + () -> new BytesRef(InetAddressPoint.encode(ESTestCase.randomIp(true))) + ); + addSuppliers( + cases, + minRows, + maxRows, + "random v6 ip", + DataType.IP, + () -> new BytesRef(InetAddressPoint.encode(ESTestCase.randomIp(false))) ); + + return cases; } public static List versionCases(int minRows, int maxRows) { - return List.of( - new TypedDataSupplier( - "", - () -> randomList(minRows, maxRows, () -> new Version(Integer.toString(ESTestCase.between(0, 100))).toBytesRef()), - DataType.VERSION, - false, - true - ), - new TypedDataSupplier( - "", - () -> randomList( - minRows, - maxRows, - () -> new Version(ESTestCase.between(0, 100) + "." + ESTestCase.between(0, 100)).toBytesRef() - ), - DataType.VERSION, - false, - true - ), - new TypedDataSupplier( - "", - () -> randomList( - minRows, - maxRows, - () -> new Version(ESTestCase.between(0, 100) + "." + ESTestCase.between(0, 100) + "." + ESTestCase.between(0, 100)) - .toBytesRef() - ), - DataType.VERSION, - false, - true - ) + List cases = new ArrayList<>(); + + addSuppliers( + cases, + minRows, + maxRows, + "major version", + DataType.VERSION, + () -> new Version(Integer.toString(ESTestCase.between(0, 100))).toBytesRef() ); + addSuppliers( + cases, + minRows, + maxRows, + "major.minor version", + DataType.VERSION, + () -> new Version(ESTestCase.between(0, 100) + "." + ESTestCase.between(0, 100)).toBytesRef() + ); + addSuppliers( + cases, + minRows, + maxRows, + "major.minor.patch version", + DataType.VERSION, + () -> new Version(ESTestCase.between(0, 100) + "." + ESTestCase.between(0, 100) + "." + ESTestCase.between(0, 100)).toBytesRef() + ); + + return cases; } public static List geoPointCases(int minRows, int maxRows, boolean withAltitude) { List cases = new ArrayList<>(); - cases.add( - new TypedDataSupplier( - "", - () -> randomList(minRows, maxRows, () -> GEO.asWkb(GeometryTestUtils.randomPoint(false))), - DataType.GEO_POINT, - false, - true - ) + addSuppliers( + cases, + minRows, + maxRows, + "", + DataType.GEO_POINT, + () -> GEO.asWkb(GeometryTestUtils.randomPoint(false)) ); if (withAltitude) { - cases.add( - new TypedDataSupplier( - "", - () -> randomList(minRows, maxRows, () -> GEO.asWkb(GeometryTestUtils.randomPoint(false))), - DataType.GEO_POINT, - false, - true - ) + addSuppliers( + cases, + minRows, + maxRows, + "", + DataType.GEO_POINT, + () -> GEO.asWkb(GeometryTestUtils.randomPoint(true)) ); } @@ -443,25 +363,23 @@ public static List geoPointCases(int minRows, int maxRows, bo public static List cartesianPointCases(int minRows, int maxRows, boolean withAltitude) { List cases = new ArrayList<>(); - cases.add( - new TypedDataSupplier( - "", - () -> randomList(minRows, maxRows, () -> CARTESIAN.asWkb(ShapeTestUtils.randomPoint(false))), - DataType.CARTESIAN_POINT, - false, - true - ) + addSuppliers( + cases, + minRows, + maxRows, + "", + DataType.CARTESIAN_POINT, + () -> CARTESIAN.asWkb(ShapeTestUtils.randomPoint(false)) ); if (withAltitude) { - cases.add( - new TypedDataSupplier( - "", - () -> randomList(minRows, maxRows, () -> CARTESIAN.asWkb(ShapeTestUtils.randomPoint(true))), - DataType.CARTESIAN_POINT, - false, - true - ) + addSuppliers( + cases, + minRows, + maxRows, + "", + DataType.CARTESIAN_POINT, + () -> CARTESIAN.asWkb(ShapeTestUtils.randomPoint(true)) ); } @@ -471,59 +389,64 @@ public static List cartesianPointCases(int minRows, int maxRo public static List stringCases(int minRows, int maxRows, DataType type) { List cases = new ArrayList<>(); - cases.addAll( - List.of( - new TypedDataSupplier( - "", - () -> randomList(minRows, maxRows, () -> new BytesRef("")), - type, - false, - true - ), - new TypedDataSupplier( - "", - () -> randomList(minRows, maxRows, () -> new BytesRef(ESTestCase.randomAlphaOfLengthBetween(1, 30))), - type, - false, - true - ), - new TypedDataSupplier( - "", - () -> randomList(minRows, maxRows, () -> new BytesRef(ESTestCase.randomRealisticUnicodeOfLengthBetween(1, 30))), - type, - false, - true - ) - ) + addSuppliers(cases, minRows, maxRows, "empty " + type, type, () -> new BytesRef("")); + addSuppliers( + cases, + minRows, + maxRows, + "short alpha " + type, + type, + () -> new BytesRef(ESTestCase.randomAlphaOfLengthBetween(1, 30)) + ); + addSuppliers( + cases, + minRows, + maxRows, + "short unicode " + type, + type, + () -> new BytesRef(ESTestCase.randomRealisticUnicodeOfLengthBetween(1, 30)) ); if (minRows <= 100) { var longStringsMaxRows = Math.min(maxRows, 100); - cases.addAll( - List.of( - new TypedDataSupplier( - "", - () -> randomList(minRows, longStringsMaxRows, () -> new BytesRef(ESTestCase.randomAlphaOfLengthBetween(300, 1000))), - type, - false, - true - ), - new TypedDataSupplier( - "", - () -> randomList( - minRows, - longStringsMaxRows, - () -> new BytesRef(ESTestCase.randomRealisticUnicodeOfLengthBetween(300, 1000)) - ), - type, - false, - true - ) - ) + addSuppliers( + cases, + minRows, + longStringsMaxRows, + "long alpha " + type, + type, + () -> new BytesRef(ESTestCase.randomAlphaOfLengthBetween(300, 1000)) + ); + addSuppliers( + cases, + minRows, + longStringsMaxRows, + "long unicode " + type, + type, + () -> new BytesRef(ESTestCase.randomRealisticUnicodeOfLengthBetween(300, 1000)) ); } return cases; } + + private static void addSuppliers( + List cases, + int minRows, + int maxRows, + String name, + DataType type, + Supplier valueSupplier + ) { + if (minRows <= 1 && maxRows >= 1) { + cases.add(new TypedDataSupplier("", () -> randomList(1, 1, valueSupplier), type, false, true)); + } + + if (maxRows > 1) { + cases.add( + new TypedDataSupplier("<" + name + "s>", () -> randomList(Math.max(2, minRows), maxRows, valueSupplier), type, false, true) + ); + } + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AvgTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AvgTests.java index 80737dac1aa58..ac599c7ff05f8 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AvgTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AvgTests.java @@ -64,25 +64,27 @@ protected Expression build(Source source, List args) { private static TestCaseSupplier makeSupplier(TestCaseSupplier.TypedDataSupplier fieldSupplier) { return new TestCaseSupplier(List.of(fieldSupplier.type()), () -> { var fieldTypedData = fieldSupplier.get(); + var fieldData = fieldTypedData.multiRowData(); - Object expected = switch (fieldTypedData.type().widenSmallNumeric()) { - case INTEGER -> fieldTypedData.multiRowData() - .stream() - .map(v -> (Integer) v) - .collect(Collectors.summarizingInt(Integer::intValue)) - .getAverage(); - case LONG -> fieldTypedData.multiRowData() - .stream() - .map(v -> (Long) v) - .collect(Collectors.summarizingLong(Long::longValue)) - .getAverage(); - case DOUBLE -> fieldTypedData.multiRowData() - .stream() - .map(v -> (Double) v) - .collect(Collectors.summarizingDouble(Double::doubleValue)) - .getAverage(); - default -> throw new IllegalStateException("Unexpected value: " + fieldTypedData.type()); - }; + Object expected = null; + + if (fieldData.size() == 1) { + // For single elements, we directly return them to avoid precision issues + expected = ((Number) fieldData.get(0)).doubleValue(); + } else if (fieldData.size() > 1) { + expected = switch (fieldTypedData.type().widenSmallNumeric()) { + case INTEGER -> fieldData.stream() + .map(v -> (Integer) v) + .collect(Collectors.summarizingInt(Integer::intValue)) + .getAverage(); + case LONG -> fieldData.stream().map(v -> (Long) v).collect(Collectors.summarizingLong(Long::longValue)).getAverage(); + case DOUBLE -> fieldData.stream() + .map(v -> (Double) v) + .collect(Collectors.summarizingDouble(Double::doubleValue)) + .getAverage(); + default -> throw new IllegalStateException("Unexpected value: " + fieldTypedData.type()); + }; + } return new TestCaseSupplier.TestCase( List.of(fieldTypedData), From 0ab79db1831255323225dd581c67f33de3cfd084 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 23 Oct 2024 05:42:41 +1100 Subject: [PATCH 299/449] Mute org.elasticsearch.xpack.inference.DefaultEndPointsIT testInferDeploysDefaultE5 #115361 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 325fcbf04ce59..a3fd1fa395f5d 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -318,6 +318,9 @@ tests: - class: org.elasticsearch.xpack.watcher.trigger.schedule.engine.TickerScheduleEngineTests method: testAddWithLastCheckedTimeExecutesBeforeInitialInterval issue: https://github.com/elastic/elasticsearch/issues/115356 +- class: org.elasticsearch.xpack.inference.DefaultEndPointsIT + method: testInferDeploysDefaultE5 + issue: https://github.com/elastic/elasticsearch/issues/115361 # Examples: # From c9e57700af992ca3e2f3c85825f3ed6be822db2e Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Tue, 22 Oct 2024 16:05:01 -0400 Subject: [PATCH 300/449] Optimize downloader task executor (#115355) --- .../geoip/GeoIpDownloaderTaskExecutor.java | 67 +++++++++++-------- .../ingest/ConfigurationUtils.java | 3 +- 2 files changed, 40 insertions(+), 30 deletions(-) diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java index eacf2e5a2ee57..e4150005ed1ae 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java @@ -43,13 +43,14 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.RemoteTransportException; +import java.util.Collections; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; -import java.util.stream.Collectors; import static org.elasticsearch.ingest.geoip.GeoIpDownloader.DATABASES_INDEX; import static org.elasticsearch.ingest.geoip.GeoIpDownloader.GEOIP_DOWNLOADER; @@ -238,14 +239,11 @@ public void clusterChanged(ClusterChangedEvent event) { } static boolean hasAtLeastOneGeoipProcessor(ClusterState clusterState) { - if (pipelineConfigurationsWithGeoIpProcessor(clusterState, true).isEmpty() == false) { + if (pipelinesWithGeoIpProcessor(clusterState, true).isEmpty() == false) { return true; } - Set checkReferencedPipelines = pipelineConfigurationsWithGeoIpProcessor(clusterState, false).stream() - .map(PipelineConfiguration::getId) - .collect(Collectors.toSet()); - + final Set checkReferencedPipelines = pipelinesWithGeoIpProcessor(clusterState, false); if (checkReferencedPipelines.isEmpty()) { return false; } @@ -258,22 +256,24 @@ static boolean hasAtLeastOneGeoipProcessor(ClusterState clusterState) { } /** - * Retrieve list of pipelines that have at least one geoip processor. + * Retrieve the set of pipeline ids that have at least one geoip processor. * @param clusterState Cluster state. * @param downloadDatabaseOnPipelineCreation Filter the list to include only pipeline with the download_database_on_pipeline_creation * matching the param. - * @return A list of {@link PipelineConfiguration} matching criteria. + * @return A set of pipeline ids matching criteria. */ @SuppressWarnings("unchecked") - private static List pipelineConfigurationsWithGeoIpProcessor( - ClusterState clusterState, - boolean downloadDatabaseOnPipelineCreation - ) { - List pipelineDefinitions = IngestService.getPipelines(clusterState); - return pipelineDefinitions.stream().filter(pipelineConfig -> { - List> processors = (List>) pipelineConfig.getConfigAsMap().get(Pipeline.PROCESSORS_KEY); - return hasAtLeastOneGeoipProcessor(processors, downloadDatabaseOnPipelineCreation); - }).toList(); + private static Set pipelinesWithGeoIpProcessor(ClusterState clusterState, boolean downloadDatabaseOnPipelineCreation) { + List configurations = IngestService.getPipelines(clusterState); + Set ids = new HashSet<>(); + // note: this loop is unrolled rather than streaming-style because it's hot enough to show up in a flamegraph + for (PipelineConfiguration configuration : configurations) { + List> processors = (List>) configuration.getConfigAsMap().get(Pipeline.PROCESSORS_KEY); + if (hasAtLeastOneGeoipProcessor(processors, downloadDatabaseOnPipelineCreation)) { + ids.add(configuration.getId()); + } + } + return Collections.unmodifiableSet(ids); } /** @@ -283,7 +283,15 @@ private static List pipelineConfigurationsWithGeoIpProces * @return true if a geoip processor is found in the processor list. */ private static boolean hasAtLeastOneGeoipProcessor(List> processors, boolean downloadDatabaseOnPipelineCreation) { - return processors != null && processors.stream().anyMatch(p -> hasAtLeastOneGeoipProcessor(p, downloadDatabaseOnPipelineCreation)); + if (processors != null) { + // note: this loop is unrolled rather than streaming-style because it's hot enough to show up in a flamegraph + for (Map processor : processors) { + if (hasAtLeastOneGeoipProcessor(processor, downloadDatabaseOnPipelineCreation)) { + return true; + } + } + } + return false; } /** @@ -317,7 +325,7 @@ private static boolean hasAtLeastOneGeoipProcessor(Map processor } /** - * Check if a processor config is has an on_failure clause containing at least a geoip processor. + * Check if a processor config has an on_failure clause containing at least a geoip processor. * @param processor Processor config. * @param downloadDatabaseOnPipelineCreation Should the download_database_on_pipeline_creation of the geoip processor be true or false. * @return true if a geoip processor is found in the processor list. @@ -327,16 +335,17 @@ private static boolean isProcessorWithOnFailureGeoIpProcessor( Map processor, boolean downloadDatabaseOnPipelineCreation ) { - return processor != null - && processor.values() - .stream() - .anyMatch( - value -> value instanceof Map - && hasAtLeastOneGeoipProcessor( - ((Map>>) value).get("on_failure"), - downloadDatabaseOnPipelineCreation - ) - ); + // note: this loop is unrolled rather than streaming-style because it's hot enough to show up in a flamegraph + for (Object value : processor.values()) { + if (value instanceof Map + && hasAtLeastOneGeoipProcessor( + ((Map>>) value).get("on_failure"), + downloadDatabaseOnPipelineCreation + )) { + return true; + } + } + return false; } /** diff --git a/server/src/main/java/org/elasticsearch/ingest/ConfigurationUtils.java b/server/src/main/java/org/elasticsearch/ingest/ConfigurationUtils.java index 5059272aa2e23..97a68d9807688 100644 --- a/server/src/main/java/org/elasticsearch/ingest/ConfigurationUtils.java +++ b/server/src/main/java/org/elasticsearch/ingest/ConfigurationUtils.java @@ -12,6 +12,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; @@ -239,7 +240,7 @@ private static Boolean readBoolean(String processorType, String processorTag, St processorType, processorTag, propertyName, - "property isn't a boolean, but of type [" + value.getClass().getName() + "]" + Strings.format("property isn't a boolean, but of type [%s]", value.getClass().getName()) ); } From 47c4909e6d214c44ff2d0999819d19daf4377a45 Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Tue, 22 Oct 2024 16:05:25 -0400 Subject: [PATCH 301/449] Optimize IngestService#resolvePipelinesFromIndexTemplates (#115348) --- .../MetadataIndexTemplateService.java | 68 ++++++++++++++----- 1 file changed, 52 insertions(+), 16 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java index ccdfaa5518aee..d6ed28454df96 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java @@ -1200,6 +1200,42 @@ static ClusterState innerPutTemplate( return ClusterState.builder(currentState).metadata(builder).build(); } + /** + * A private, local alternative to elements.stream().anyMatch(predicate) for micro-optimization reasons. + */ + private static boolean anyMatch(final List elements, final Predicate predicate) { + for (T e : elements) { + if (predicate.test(e)) { + return true; + } + } + return false; + } + + /** + * A private, local alternative to elements.stream().noneMatch(predicate) for micro-optimization reasons. + */ + private static boolean noneMatch(final List elements, final Predicate predicate) { + for (T e : elements) { + if (predicate.test(e)) { + return false; + } + } + return true; + } + + /** + * A private, local alternative to elements.stream().filter(predicate).findFirst() for micro-optimization reasons. + */ + private static Optional findFirst(final List elements, final Predicate predicate) { + for (T e : elements) { + if (predicate.test(e)) { + return Optional.of(e); + } + } + return Optional.empty(); + } + /** * Finds index templates whose index pattern matched with the given index name. In the case of * hidden indices, a template with a match all pattern or global template will not be returned. @@ -1219,15 +1255,14 @@ public static List findV1Templates(Metadata metadata, Str final List matchedTemplates = new ArrayList<>(); for (IndexTemplateMetadata template : metadata.templates().values()) { if (isHidden == null || isHidden == Boolean.FALSE) { - final boolean matched = template.patterns().stream().anyMatch(patternMatchPredicate); - if (matched) { + if (anyMatch(template.patterns(), patternMatchPredicate)) { matchedTemplates.add(template); } } else { assert isHidden == Boolean.TRUE; - final boolean isNotMatchAllTemplate = template.patterns().stream().noneMatch(Regex::isMatchAllPattern); + final boolean isNotMatchAllTemplate = noneMatch(template.patterns(), Regex::isMatchAllPattern); if (isNotMatchAllTemplate) { - if (template.patterns().stream().anyMatch(patternMatchPredicate)) { + if (anyMatch(template.patterns(), patternMatchPredicate)) { matchedTemplates.add(template); } } @@ -1238,19 +1273,21 @@ public static List findV1Templates(Metadata metadata, Str // this is complex but if the index is not hidden in the create request but is hidden as the result of template application, // then we need to exclude global templates if (isHidden == null) { - final Optional templateWithHiddenSetting = matchedTemplates.stream() - .filter(template -> IndexMetadata.INDEX_HIDDEN_SETTING.exists(template.settings())) - .findFirst(); + final Optional templateWithHiddenSetting = findFirst( + matchedTemplates, + template -> IndexMetadata.INDEX_HIDDEN_SETTING.exists(template.settings()) + ); if (templateWithHiddenSetting.isPresent()) { final boolean templatedIsHidden = IndexMetadata.INDEX_HIDDEN_SETTING.get(templateWithHiddenSetting.get().settings()); if (templatedIsHidden) { // remove the global templates - matchedTemplates.removeIf(current -> current.patterns().stream().anyMatch(Regex::isMatchAllPattern)); + matchedTemplates.removeIf(current -> anyMatch(current.patterns(), Regex::isMatchAllPattern)); } // validate that hidden didn't change - final Optional templateWithHiddenSettingPostRemoval = matchedTemplates.stream() - .filter(template -> IndexMetadata.INDEX_HIDDEN_SETTING.exists(template.settings())) - .findFirst(); + final Optional templateWithHiddenSettingPostRemoval = findFirst( + matchedTemplates, + template -> IndexMetadata.INDEX_HIDDEN_SETTING.exists(template.settings()) + ); if (templateWithHiddenSettingPostRemoval.isEmpty() || templateWithHiddenSetting.get() != templateWithHiddenSettingPostRemoval.get()) { throw new IllegalStateException( @@ -1313,14 +1350,13 @@ static List> findV2CandidateTemplates(Met * built with a template that none of its indices match. */ if (isHidden == false || template.getDataStreamTemplate() != null) { - final boolean matched = template.indexPatterns().stream().anyMatch(patternMatchPredicate); - if (matched) { + if (anyMatch(template.indexPatterns(), patternMatchPredicate)) { candidates.add(Tuple.tuple(name, template)); } } else { - final boolean isNotMatchAllTemplate = template.indexPatterns().stream().noneMatch(Regex::isMatchAllPattern); + final boolean isNotMatchAllTemplate = noneMatch(template.indexPatterns(), Regex::isMatchAllPattern); if (isNotMatchAllTemplate) { - if (template.indexPatterns().stream().anyMatch(patternMatchPredicate)) { + if (anyMatch(template.indexPatterns(), patternMatchPredicate)) { candidates.add(Tuple.tuple(name, template)); } } @@ -1334,7 +1370,7 @@ static List> findV2CandidateTemplates(Met // Checks if a global template specifies the `index.hidden` setting. This check is important because a global // template shouldn't specify the `index.hidden` setting, we leave it up to the caller to handle this situation. private static boolean isGlobalAndHasIndexHiddenSetting(Metadata metadata, ComposableIndexTemplate template, String templateName) { - return template.indexPatterns().stream().anyMatch(Regex::isMatchAllPattern) + return anyMatch(template.indexPatterns(), Regex::isMatchAllPattern) && IndexMetadata.INDEX_HIDDEN_SETTING.exists(resolveSettings(metadata, templateName)); } From ee51f5fb7673ca10db8aed3a4049e4f7683e857e Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Tue, 22 Oct 2024 16:05:52 -0400 Subject: [PATCH 302/449] Optimize IndexTemplateRegistry#clusterChanged (#115347) --- .../cluster/metadata/ComposableIndexTemplate.java | 13 ++++++++++--- .../xpack/core/ilm/IndexLifecycleMetadata.java | 12 ++++++++---- 2 files changed, 18 insertions(+), 7 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplate.java b/server/src/main/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplate.java index 6d1a874e1c72b..ae7cff6312155 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplate.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplate.java @@ -28,6 +28,8 @@ import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Objects; @@ -189,9 +191,14 @@ public List getRequiredComponentTemplates() { if (ignoreMissingComponentTemplates == null) { return componentTemplates; } - return componentTemplates.stream() - .filter(componentTemplate -> ignoreMissingComponentTemplates.contains(componentTemplate) == false) - .toList(); + // note: this loop is unrolled rather than streaming-style because it's hot enough to show up in a flamegraph + List required = new ArrayList<>(componentTemplates.size()); + for (String template : componentTemplates) { + if (ignoreMissingComponentTemplates.contains(template) == false) { + required.add(template); + } + } + return Collections.unmodifiableList(required); } @Nullable diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleMetadata.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleMetadata.java index 3674103eda215..f8cb371687d72 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleMetadata.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleMetadata.java @@ -25,6 +25,7 @@ import java.io.IOException; import java.util.Collections; import java.util.EnumSet; +import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -92,10 +93,13 @@ public OperationMode getOperationMode() { } public Map getPolicies() { - return policyMetadatas.values() - .stream() - .map(LifecyclePolicyMetadata::getPolicy) - .collect(Collectors.toMap(LifecyclePolicy::getName, Function.identity())); + // note: this loop is unrolled rather than streaming-style because it's hot enough to show up in a flamegraph + Map policies = new HashMap<>(policyMetadatas.size()); + for (LifecyclePolicyMetadata policyMetadata : policyMetadatas.values()) { + LifecyclePolicy policy = policyMetadata.getPolicy(); + policies.put(policy.getName(), policy); + } + return Collections.unmodifiableMap(policies); } @Override From c34f766a3059ad6e6b055a6a9a34e19d81a83832 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 23 Oct 2024 07:36:51 +1100 Subject: [PATCH 303/449] Mute org.elasticsearch.xpack.watcher.trigger.schedule.engine.TickerScheduleEngineTests testWatchWithNoLastCheckedTimeButHasActivationTimeExecutesBeforeInitialInterval #115368 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index a3fd1fa395f5d..0dc82b311de7f 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -321,6 +321,9 @@ tests: - class: org.elasticsearch.xpack.inference.DefaultEndPointsIT method: testInferDeploysDefaultE5 issue: https://github.com/elastic/elasticsearch/issues/115361 +- class: org.elasticsearch.xpack.watcher.trigger.schedule.engine.TickerScheduleEngineTests + method: testWatchWithNoLastCheckedTimeButHasActivationTimeExecutesBeforeInitialInterval + issue: https://github.com/elastic/elasticsearch/issues/115368 # Examples: # From ce20a4d8e7059fd6277181e5989f0db77f81e952 Mon Sep 17 00:00:00 2001 From: Keith Massey Date: Tue, 22 Oct 2024 15:37:13 -0500 Subject: [PATCH 304/449] Adding support for simulate ingest mapping adddition for indices with mappings that do not come from templates (#115359) --- docs/changelog/115359.yaml | 6 + .../test/ingest/80_ingest_simulate.yml | 144 +++++++++++++++++- .../action/bulk/BulkFeatures.java | 4 +- .../bulk/TransportSimulateBulkAction.java | 42 ++++- 4 files changed, 186 insertions(+), 10 deletions(-) create mode 100644 docs/changelog/115359.yaml diff --git a/docs/changelog/115359.yaml b/docs/changelog/115359.yaml new file mode 100644 index 0000000000000..65b3086dfc8d0 --- /dev/null +++ b/docs/changelog/115359.yaml @@ -0,0 +1,6 @@ +pr: 115359 +summary: Adding support for simulate ingest mapping adddition for indices with mappings + that do not come from templates +area: Ingest Node +type: enhancement +issues: [] diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_ingest_simulate.yml b/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_ingest_simulate.yml index d4aa2f1ad4467..4d1a62c6f179e 100644 --- a/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_ingest_simulate.yml +++ b/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_ingest_simulate.yml @@ -1229,7 +1229,7 @@ setup: - requires: cluster_features: ["simulate.mapping.addition"] - reason: "ingest simulate mapping addition added in 8.16" + reason: "ingest simulate mapping addition added in 8.17" - do: headers: @@ -1465,7 +1465,7 @@ setup: - requires: cluster_features: ["simulate.mapping.addition"] - reason: "ingest simulate mapping addition added in 8.16" + reason: "ingest simulate mapping addition added in 8.17" - do: indices.put_template: @@ -1571,3 +1571,143 @@ setup: - match: { docs.0.doc._source.foo: 3 } - match: { docs.0.doc._source.bar: "not a boolean" } - not_exists: docs.0.doc.error + +--- +"Test mapping addition works with indices without templates": + # In this test, we make sure that when we have an index that has mapping but was not built with a template, that the additional_mapping + # is merged in with that template. + + - skip: + features: + - headers + - allowed_warnings + + - requires: + cluster_features: ["simulate.support.non.template.mapping"] + reason: "ingest simulate support for indices with mappings that didn't come from templates added in 8.17" + + # First, make sure that validation fails before we create the index (since we are only defining to bar field but trying to index a value + # for foo. + - do: + headers: + Content-Type: application/json + simulate.ingest: + index: foo-1 + body: > + { + "docs": [ + { + "_id": "asdf", + "_source": { + "foo": 3, + "bar": "some text value" + } + } + ], + "mapping_addition": { + "dynamic": "strict", + "properties": { + "bar": { + "type": "keyword" + } + } + } + } + - length: { docs: 1 } + - match: { docs.0.doc._index: "foo-1" } + - match: { docs.0.doc._source.foo: 3 } + - match: { docs.0.doc._source.bar: "some text value" } + - match: { docs.0.doc.error.type: "strict_dynamic_mapping_exception" } + + - do: + indices.create: + index: foo-1 + body: + mappings: + dynamic: strict + properties: + foo: + type: integer + - match: { acknowledged: true } + + # Now make sure that the mapping for the newly-created index is getting picked up. Validation fails because it only defined a mapping + # for foo, not for bar. + - do: + headers: + Content-Type: application/json + simulate.ingest: + index: foo-1 + body: > + { + "docs": [ + { + "_id": "asdf", + "_source": { + "foo": 3, + "bar": "some text value" + } + } + ] + } + - length: { docs: 1 } + - match: { docs.0.doc._index: "foo-1" } + - match: { docs.0.doc._source.foo: 3 } + - match: { docs.0.doc._source.bar: "some text value" } + - match: { docs.0.doc.error.type: "strict_dynamic_mapping_exception" } + + # Now we make sure that the index's mapping gets merged with the mapping_addition: + - do: + headers: + Content-Type: application/json + simulate.ingest: + index: foo-1 + body: > + { + "docs": [ + { + "_id": "asdf", + "_source": { + "foo": 3, + "bar": "some text value" + } + } + ], + "mapping_addition": { + "dynamic": "strict", + "properties": { + "bar": { + "type": "keyword" + } + } + } + } + - length: { docs: 1 } + - match: { docs.0.doc._index: "foo-1" } + - match: { docs.0.doc._source.foo: 3 } + - match: { docs.0.doc._source.bar: "some text value" } + - not_exists: docs.0.doc.error + + # This last call to simulate is just making sure that if there are no templates, no index mappings, no substitutions, and no mapping + # addition, then validation does not fail + - do: + headers: + Content-Type: application/json + simulate.ingest: + index: nonexistent + body: > + { + "docs": [ + { + "_id": "asdf", + "_source": { + "foo": 3, + "bar": "some text value" + } + } + ] + } + - length: { docs: 1 } + - match: { docs.0.doc._index: "nonexistent" } + - match: { docs.0.doc._source.foo: 3 } + - match: { docs.0.doc._source.bar: "some text value" } + - not_exists: docs.0.doc.error diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkFeatures.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkFeatures.java index 22cf8a2260d87..62a9b88cb6a57 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkFeatures.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkFeatures.java @@ -19,6 +19,7 @@ import static org.elasticsearch.action.bulk.TransportSimulateBulkAction.SIMULATE_MAPPING_ADDITION; import static org.elasticsearch.action.bulk.TransportSimulateBulkAction.SIMULATE_MAPPING_VALIDATION; import static org.elasticsearch.action.bulk.TransportSimulateBulkAction.SIMULATE_MAPPING_VALIDATION_TEMPLATES; +import static org.elasticsearch.action.bulk.TransportSimulateBulkAction.SIMULATE_SUPPORT_NON_TEMPLATE_MAPPING; public class BulkFeatures implements FeatureSpecification { public Set getFeatures() { @@ -27,7 +28,8 @@ public Set getFeatures() { SIMULATE_MAPPING_VALIDATION_TEMPLATES, SIMULATE_COMPONENT_TEMPLATE_SUBSTITUTIONS, SIMULATE_INDEX_TEMPLATE_SUBSTITUTIONS, - SIMULATE_MAPPING_ADDITION + SIMULATE_MAPPING_ADDITION, + SIMULATE_SUPPORT_NON_TEMPLATE_MAPPING ); } } diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java index 0888b70f5399c..1353fa78595ef 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java @@ -84,6 +84,7 @@ public class TransportSimulateBulkAction extends TransportAbstractBulkAction { ); public static final NodeFeature SIMULATE_INDEX_TEMPLATE_SUBSTITUTIONS = new NodeFeature("simulate.index.template.substitutions"); public static final NodeFeature SIMULATE_MAPPING_ADDITION = new NodeFeature("simulate.mapping.addition"); + public static final NodeFeature SIMULATE_SUPPORT_NON_TEMPLATE_MAPPING = new NodeFeature("simulate.support.non.template.mapping"); private final IndicesService indicesService; private final NamedXContentRegistry xContentRegistry; private final Set indexSettingProviders; @@ -258,6 +259,10 @@ private Exception validateMappings( String matchingTemplate = findV2Template(simulatedState.metadata(), request.index(), false); if (matchingTemplate != null) { + /* + * The index matches a v2 template (including possibly one or more of the substitutions passed in). So we use this + * template, and then possibly apply the mapping addition if it is not null, and validate. + */ final Template template = TransportSimulateIndexTemplateAction.resolveTemplate( matchingTemplate, request.index(), @@ -273,13 +278,36 @@ private Exception validateMappings( validateUpdatedMappings(mappings, mergedMappings, request, sourceToParse); } else { List matchingTemplates = findV1Templates(simulatedState.metadata(), request.index(), false); - final Map mappingsMap = MetadataCreateIndexService.parseV1Mappings( - "{}", - matchingTemplates.stream().map(IndexTemplateMetadata::getMappings).collect(toList()), - xContentRegistry - ); - final CompressedXContent combinedMappings = mergeMappings(new CompressedXContent(mappingsMap), mappingAddition); - validateUpdatedMappings(null, combinedMappings, request, sourceToParse); + if (matchingTemplates.isEmpty() == false) { + /* + * The index matches v1 mappings. These are not compatible with component_template_substitutions or + * index_template_substitutions, but we can apply a mapping_addition. + */ + final Map mappingsMap = MetadataCreateIndexService.parseV1Mappings( + "{}", + matchingTemplates.stream().map(IndexTemplateMetadata::getMappings).collect(toList()), + xContentRegistry + ); + final CompressedXContent combinedMappings = mergeMappings(new CompressedXContent(mappingsMap), mappingAddition); + validateUpdatedMappings(null, combinedMappings, request, sourceToParse); + } else if (indexAbstraction != null && mappingAddition.isEmpty() == false) { + /* + * The index matched no templates of any kind, including the substitutions. But it might have a mapping. So we + * merge in the mapping addition if it exists, and validate. + */ + MappingMetadata mappingFromIndex = clusterService.state().metadata().index(indexAbstraction.getName()).mapping(); + CompressedXContent currentIndexCompressedXContent = mappingFromIndex == null ? null : mappingFromIndex.source(); + CompressedXContent combinedMappings = mergeMappings(currentIndexCompressedXContent, mappingAddition); + validateUpdatedMappings(null, combinedMappings, request, sourceToParse); + } else { + /* + * The index matched no templates and had no mapping of its own. If there were component template substitutions + * or index template substitutions, they didn't match anything. So just apply the mapping addition if it exists, + * and validate. + */ + final CompressedXContent combinedMappings = mergeMappings(null, mappingAddition); + validateUpdatedMappings(null, combinedMappings, request, sourceToParse); + } } } } catch (Exception e) { From 19a35a4592cc250c0b53a76d474ec962187e9309 Mon Sep 17 00:00:00 2001 From: Keith Massey Date: Tue, 22 Oct 2024 17:29:57 -0500 Subject: [PATCH 305/449] Unmuting 80_ingest_simulate method (#115370) It looks like this test was accidentally re-muted. Unmuting again. Closes #112575 --- muted-tests.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 0dc82b311de7f..a7f36cdd06d66 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -99,9 +99,6 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/112424 - class: org.elasticsearch.ingest.geoip.IngestGeoIpClientYamlTestSuiteIT issue: https://github.com/elastic/elasticsearch/issues/111497 -- class: org.elasticsearch.smoketest.SmokeTestIngestWithAllDepsClientYamlTestSuiteIT - method: test {yaml=ingest/80_ingest_simulate/Test ingest simulate with reroute and mapping validation from templates} - issue: https://github.com/elastic/elasticsearch/issues/112575 - class: org.elasticsearch.xpack.security.authc.kerberos.SimpleKdcLdapServerTests method: testClientServiceMutualAuthentication issue: https://github.com/elastic/elasticsearch/issues/112529 From 5c2e28e7521af2b82e2d73a18ea33a1e682c040f Mon Sep 17 00:00:00 2001 From: Oleksandr Kolomiiets Date: Tue, 22 Oct 2024 15:42:19 -0700 Subject: [PATCH 306/449] Add tests for migration between source modes in logsdb data stream (#115283) --- .../logsdb/LogsDbSourceModeMigrationIT.java | 290 ++++++++++++++++++ .../logsdb/LogsIndexModeCustomSettingsIT.java | 11 - .../xpack/logsdb/LogsIndexModeRestTestIT.java | 11 + 3 files changed, 301 insertions(+), 11 deletions(-) create mode 100644 x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsDbSourceModeMigrationIT.java diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsDbSourceModeMigrationIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsDbSourceModeMigrationIT.java new file mode 100644 index 0000000000000..adb23567e3933 --- /dev/null +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsDbSourceModeMigrationIT.java @@ -0,0 +1,290 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.logsdb; + +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentType; +import org.hamcrest.Matchers; +import org.junit.Before; +import org.junit.ClassRule; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Comparator; +import java.util.List; +import java.util.Map; +import java.util.stream.Stream; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.is; + +public class LogsDbSourceModeMigrationIT extends LogsIndexModeRestTestIT { + public static final String INDEX_TEMPLATE = """ + { + "index_patterns": ["my-logs-*-*"], + "priority": 100, + "data_stream": {}, + "composed_of": [ + "my-logs-mapping", + "my-logs-original-source", + "my-logs-migrated-source" + ], + "ignore_missing_component_templates": ["my-logs-original-source", "my-logs-migrated-source"] + } + """; + + public static final String MAPPING_COMPONENT_TEMPLATE = """ + { + "template": { + "settings": { + "index": { + "mode": "logsdb" + } + }, + "mappings": { + "properties": { + "@timestamp": { + "type": "date", + "format": "epoch_millis" + }, + "message": { + "type": "text" + }, + "method": { + "type": "keyword" + }, + "hits": { + "type": "long" + } + } + } + } + }"""; + + public static final String STORED_SOURCE_COMPONENT_TEMPLATE = """ + { + "template": { + "settings": { + "index": { + "mapping.source.mode": "stored" + } + } + } + }"""; + + public static final String SYNTHETIC_SOURCE_COMPONENT_TEMPLATE = """ + { + "template": { + "settings": { + "index": { + "mapping.source.mode": "synthetic" + } + } + } + }"""; + + @ClassRule() + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .module("constant-keyword") + .module("data-streams") + .module("mapper-extras") + .module("x-pack-aggregate-metric") + .module("x-pack-stack") + .setting("xpack.security.enabled", "false") + .setting("xpack.otel_data.registry.enabled", "false") + .setting("xpack.license.self_generated.type", "trial") + .setting("cluster.logsdb.enabled", "true") + .setting("stack.templates.enabled", "false") + .build(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + @Before + public void setup() { + client = client(); + } + + private RestClient client; + + public void testSwitchFromStoredToSyntheticSource() throws IOException { + assertOK(putComponentTemplate(client, "my-logs-mapping", MAPPING_COMPONENT_TEMPLATE)); + assertOK(putComponentTemplate(client, "my-logs-original-source", STORED_SOURCE_COMPONENT_TEMPLATE)); + + assertOK(putTemplate(client, "my-logs", INDEX_TEMPLATE)); + assertOK(createDataStream(client, "my-logs-ds-test")); + + var initialSourceMode = (String) getSetting( + client, + getDataStreamBackingIndex(client, "my-logs-ds-test", 0), + "index.mapping.source.mode" + ); + assertThat(initialSourceMode, equalTo("stored")); + var initialIndexMode = (String) getSetting(client, getDataStreamBackingIndex(client, "my-logs-ds-test", 0), "index.mode"); + assertThat(initialIndexMode, equalTo("logsdb")); + + var indexedWithStoredSource = new ArrayList(); + var indexedWithSyntheticSource = new ArrayList(); + for (int i = 0; i < 10; i++) { + indexedWithStoredSource.add(generateDoc()); + indexedWithSyntheticSource.add(generateDoc()); + } + + Response storedSourceBulkResponse = bulkIndex(client, "my-logs-ds-test", indexedWithStoredSource, 0); + assertOK(storedSourceBulkResponse); + assertThat(entityAsMap(storedSourceBulkResponse).get("errors"), Matchers.equalTo(false)); + + assertOK(putComponentTemplate(client, "my-logs-migrated-source", SYNTHETIC_SOURCE_COMPONENT_TEMPLATE)); + var rolloverResponse = rolloverDataStream(client, "my-logs-ds-test"); + assertOK(rolloverResponse); + assertThat(entityAsMap(rolloverResponse).get("rolled_over"), is(true)); + + var finalSourceMode = (String) getSetting( + client, + getDataStreamBackingIndex(client, "my-logs-ds-test", 1), + "index.mapping.source.mode" + ); + assertThat(finalSourceMode, equalTo("synthetic")); + + Response syntheticSourceBulkResponse = bulkIndex(client, "my-logs-ds-test", indexedWithSyntheticSource, 10); + assertOK(syntheticSourceBulkResponse); + assertThat(entityAsMap(syntheticSourceBulkResponse).get("errors"), Matchers.equalTo(false)); + + var allDocs = Stream.concat(indexedWithStoredSource.stream(), indexedWithSyntheticSource.stream()).toList(); + + var sourceList = search(new SearchSourceBuilder().query(QueryBuilders.matchAllQuery()).size(allDocs.size()), "my-logs-ds-test"); + assertThat(sourceList.size(), equalTo(allDocs.size())); + + for (int i = 0; i < sourceList.size(); i++) { + var expected = XContentHelper.convertToMap(BytesReference.bytes(allDocs.get(i)), false, XContentType.JSON).v2(); + assertThat(sourceList.get(i), equalTo(expected)); + } + } + + public void testSwitchFromSyntheticToStoredSource() throws IOException { + assertOK(putComponentTemplate(client, "my-logs-mapping", MAPPING_COMPONENT_TEMPLATE)); + assertOK(putComponentTemplate(client, "my-logs-original-source", SYNTHETIC_SOURCE_COMPONENT_TEMPLATE)); + + assertOK(putTemplate(client, "my-logs", INDEX_TEMPLATE)); + assertOK(createDataStream(client, "my-logs-ds-test")); + + var initialSourceMode = (String) getSetting( + client, + getDataStreamBackingIndex(client, "my-logs-ds-test", 0), + "index.mapping.source.mode" + ); + assertThat(initialSourceMode, equalTo("synthetic")); + var initialIndexMode = (String) getSetting(client, getDataStreamBackingIndex(client, "my-logs-ds-test", 0), "index.mode"); + assertThat(initialIndexMode, equalTo("logsdb")); + + var indexedWithSyntheticSource = new ArrayList(); + var indexedWithStoredSource = new ArrayList(); + for (int i = 0; i < 10; i++) { + indexedWithSyntheticSource.add(generateDoc()); + indexedWithStoredSource.add(generateDoc()); + } + + Response syntheticSourceBulkResponse = bulkIndex(client, "my-logs-ds-test", indexedWithSyntheticSource, 0); + assertOK(syntheticSourceBulkResponse); + assertThat(entityAsMap(syntheticSourceBulkResponse).get("errors"), Matchers.equalTo(false)); + + assertOK(putComponentTemplate(client, "my-logs-migrated-source", STORED_SOURCE_COMPONENT_TEMPLATE)); + var rolloverResponse = rolloverDataStream(client, "my-logs-ds-test"); + assertOK(rolloverResponse); + assertThat(entityAsMap(rolloverResponse).get("rolled_over"), is(true)); + + var finalSourceMode = (String) getSetting( + client, + getDataStreamBackingIndex(client, "my-logs-ds-test", 1), + "index.mapping.source.mode" + ); + assertThat(finalSourceMode, equalTo("stored")); + + Response storedSourceBulkResponse = bulkIndex(client, "my-logs-ds-test", indexedWithStoredSource, 10); + assertOK(storedSourceBulkResponse); + assertThat(entityAsMap(storedSourceBulkResponse).get("errors"), Matchers.equalTo(false)); + + var allDocs = Stream.concat(indexedWithSyntheticSource.stream(), indexedWithStoredSource.stream()).toList(); + + var sourceList = search(new SearchSourceBuilder().query(QueryBuilders.matchAllQuery()).size(allDocs.size()), "my-logs-ds-test"); + assertThat(sourceList.size(), equalTo(allDocs.size())); + + for (int i = 0; i < sourceList.size(); i++) { + var expected = XContentHelper.convertToMap(BytesReference.bytes(allDocs.get(i)), false, XContentType.JSON).v2(); + assertThat(sourceList.get(i), equalTo(expected)); + } + } + + private static Response bulkIndex(RestClient client, String dataStreamName, List documents, int startId) + throws IOException { + var sb = new StringBuilder(); + int id = startId; + for (var document : documents) { + sb.append(Strings.format("{ \"create\": { \"_id\" : \"%d\" } }", id)).append("\n"); + sb.append(Strings.toString(document)).append("\n"); + id++; + } + + var bulkRequest = new Request("POST", "/" + dataStreamName + "/_bulk"); + bulkRequest.setJsonEntity(sb.toString()); + bulkRequest.addParameter("refresh", "true"); + return client.performRequest(bulkRequest); + } + + @SuppressWarnings("unchecked") + private List> search(SearchSourceBuilder search, String dataStreamName) throws IOException { + var request = new Request("GET", "/" + dataStreamName + "/_search"); + request.setJsonEntity(Strings.toString(search)); + var searchResponse = client.performRequest(request); + assertOK(searchResponse); + + Map searchResponseMap = XContentHelper.convertToMap( + XContentType.JSON.xContent(), + searchResponse.getEntity().getContent(), + false + ); + var hitsMap = (Map) searchResponseMap.get("hits"); + + var hitsList = (List>) hitsMap.get("hits"); + assertThat(hitsList.size(), greaterThan(0)); + + return hitsList.stream() + .sorted(Comparator.comparingInt((Map hit) -> Integer.parseInt((String) hit.get("_id")))) + .map(hit -> (Map) hit.get("_source")) + .toList(); + } + + private static XContentBuilder generateDoc() throws IOException { + var doc = XContentFactory.jsonBuilder(); + doc.startObject(); + { + doc.field("@timestamp", Long.toString(randomMillisUpToYear9999())); + doc.field("message", randomAlphaOfLengthBetween(20, 50)); + doc.field("method", randomAlphaOfLength(3)); + doc.field("hits", randomLong()); + } + doc.endObject(); + + return doc; + } +} diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeCustomSettingsIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeCustomSettingsIT.java index c5ccee1d36b72..f529b9fa1db96 100644 --- a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeCustomSettingsIT.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeCustomSettingsIT.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.logsdb; -import org.elasticsearch.client.Request; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; import org.elasticsearch.common.settings.Settings; @@ -496,16 +495,6 @@ public void testIgnoreAboveSetting() throws IOException { } } - private static Map getMapping(final RestClient client, final String indexName) throws IOException { - final Request request = new Request("GET", "/" + indexName + "/_mapping"); - - Map mappings = ((Map>) entityAsMap(client.performRequest(request)).get(indexName)).get( - "mappings" - ); - - return mappings; - } - private Function> subObject(String key) { return (mapAsObject) -> (Map) ((Map) mapAsObject).get(key); } diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeRestTestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeRestTestIT.java index dbee5d1d2de8c..cc7f5bdb33871 100644 --- a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeRestTestIT.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeRestTestIT.java @@ -98,4 +98,15 @@ protected static Response putClusterSetting(final RestClient client, final Strin request.setJsonEntity("{ \"transient\": { \"" + settingName + "\": " + settingValue + " } }"); return client.performRequest(request); } + + @SuppressWarnings("unchecked") + protected static Map getMapping(final RestClient client, final String indexName) throws IOException { + final Request request = new Request("GET", "/" + indexName + "/_mapping"); + + Map mappings = ((Map>) entityAsMap(client.performRequest(request)).get(indexName)).get( + "mappings" + ); + + return mappings; + } } From 64281ddf27e5a2df2e57ee00ee32fc8fd91484f0 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Tue, 22 Oct 2024 17:36:40 -0700 Subject: [PATCH 307/449] Don't run mixed cluster tests against the current version (#115377) By definition a "mixed cluster" has nodes of two different versions of Elasticsearch. It doesn't make sense to run these tests against the current version of Elasticsearch. --- x-pack/plugin/downsample/qa/mixed-cluster/build.gradle | 4 ++-- x-pack/plugin/esql/qa/server/mixed-cluster/build.gradle | 3 ++- x-pack/plugin/inference/qa/mixed-cluster/build.gradle | 2 +- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/x-pack/plugin/downsample/qa/mixed-cluster/build.gradle b/x-pack/plugin/downsample/qa/mixed-cluster/build.gradle index 61aa2927e46de..6b1c7e42c0fde 100644 --- a/x-pack/plugin/downsample/qa/mixed-cluster/build.gradle +++ b/x-pack/plugin/downsample/qa/mixed-cluster/build.gradle @@ -5,7 +5,7 @@ * 2.0. */ -import org.elasticsearch.gradle.Version +import org.elasticsearch.gradle.VersionProperties import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask @@ -26,7 +26,7 @@ restResources { } def supportedVersion = bwcVersion -> { - return bwcVersion.onOrAfter("8.10.0"); + return bwcVersion.onOrAfter("8.10.0") && bwcVersion != VersionProperties.elasticsearchVersion } BuildParams.bwcVersions.withWireCompatible(supportedVersion) { bwcVersion, baseName -> diff --git a/x-pack/plugin/esql/qa/server/mixed-cluster/build.gradle b/x-pack/plugin/esql/qa/server/mixed-cluster/build.gradle index e4223f03c3a03..fb47255e8d52e 100644 --- a/x-pack/plugin/esql/qa/server/mixed-cluster/build.gradle +++ b/x-pack/plugin/esql/qa/server/mixed-cluster/build.gradle @@ -1,5 +1,6 @@ import org.elasticsearch.gradle.Version +import org.elasticsearch.gradle.VersionProperties import org.elasticsearch.gradle.util.GradleUtils import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask @@ -27,7 +28,7 @@ GradleUtils.extendSourceSet(project, "javaRestTest", "yamlRestTest") // ESQL is available in 8.11 or later def supportedVersion = bwcVersion -> { - return bwcVersion.onOrAfter(Version.fromString("8.11.0")); + return bwcVersion.onOrAfter(Version.fromString("8.11.0")) && bwcVersion != VersionProperties.elasticsearchVersion } BuildParams.bwcVersions.withWireCompatible(supportedVersion) { bwcVersion, baseName -> diff --git a/x-pack/plugin/inference/qa/mixed-cluster/build.gradle b/x-pack/plugin/inference/qa/mixed-cluster/build.gradle index 1d5369468b054..0bc4813f25137 100644 --- a/x-pack/plugin/inference/qa/mixed-cluster/build.gradle +++ b/x-pack/plugin/inference/qa/mixed-cluster/build.gradle @@ -20,7 +20,7 @@ dependencies { // inference is available in 8.11 or later def supportedVersion = bwcVersion -> { - return bwcVersion.onOrAfter(Version.fromString("8.11.0")); + return bwcVersion.onOrAfter(Version.fromString("8.11.0")) && bwcVersion != VersionProperties.elasticsearchVersion } BuildParams.bwcVersions.withWireCompatible(supportedVersion) { bwcVersion, baseName -> From 43b93dcff75fd7f44586546283d6e37b0779d222 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 23 Oct 2024 16:21:23 +1100 Subject: [PATCH 308/449] Mute org.elasticsearch.reservedstate.service.FileSettingsServiceTests testProcessFileChanges #115280 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index a7f36cdd06d66..6bbccf8bb05bb 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -321,6 +321,9 @@ tests: - class: org.elasticsearch.xpack.watcher.trigger.schedule.engine.TickerScheduleEngineTests method: testWatchWithNoLastCheckedTimeButHasActivationTimeExecutesBeforeInitialInterval issue: https://github.com/elastic/elasticsearch/issues/115368 +- class: org.elasticsearch.reservedstate.service.FileSettingsServiceTests + method: testProcessFileChanges + issue: https://github.com/elastic/elasticsearch/issues/115280 # Examples: # From ba7d0954efff88295697b7d6c9809b9f8f0ba636 Mon Sep 17 00:00:00 2001 From: Carlos Delgado <6339205+carlosdelest@users.noreply.github.com> Date: Wed, 23 Oct 2024 07:57:32 +0200 Subject: [PATCH 309/449] Fix synonyms CI tests timeout (#114641) --- muted-tests.yml | 9 --------- .../test/synonyms/10_synonyms_put.yml | 8 ++++++++ .../test/synonyms/110_synonyms_invalid.yml | 5 +++++ .../test/synonyms/20_synonyms_get.yml | 5 ++++- .../test/synonyms/30_synonyms_delete.yml | 4 ++++ .../test/synonyms/40_synonyms_sets_get.yml | 19 ++++++------------- .../test/synonyms/50_synonym_rule_put.yml | 5 ++++- .../test/synonyms/60_synonym_rule_get.yml | 7 ++++--- .../test/synonyms/70_synonym_rule_delete.yml | 5 +++++ .../test/synonyms/80_synonyms_from_index.yml | 6 +++++- .../90_synonyms_reloading_for_synset.yml | 6 +++++- 11 files changed, 50 insertions(+), 29 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 6bbccf8bb05bb..ccb387986551c 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -259,15 +259,6 @@ tests: - class: org.elasticsearch.xpack.inference.DefaultElserIT method: testInferCreatesDefaultElser issue: https://github.com/elastic/elasticsearch/issues/114503 -- class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT - method: test {p0=synonyms/60_synonym_rule_get/Synonym set not found} - issue: https://github.com/elastic/elasticsearch/issues/114432 -- class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT - method: test {p0=synonyms/60_synonym_rule_get/Get a synonym rule} - issue: https://github.com/elastic/elasticsearch/issues/114443 -- class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT - method: test {p0=synonyms/60_synonym_rule_get/Synonym rule not found} - issue: https://github.com/elastic/elasticsearch/issues/114444 - class: org.elasticsearch.xpack.inference.integration.ModelRegistryIT method: testGetModel issue: https://github.com/elastic/elasticsearch/issues/114657 diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/10_synonyms_put.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/10_synonyms_put.yml index bcd58f3f7bd64..675b98133ce11 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/10_synonyms_put.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/10_synonyms_put.yml @@ -15,6 +15,10 @@ setup: - match: { result: "created" } + - do: + cluster.health: + wait_for_no_initializing_shards: true + - do: synonyms.get_synonym: id: test-update-synonyms @@ -58,6 +62,10 @@ setup: - match: { result: "created" } + - do: + cluster.health: + wait_for_no_initializing_shards: true + - do: synonyms.get_synonym: id: test-empty-synonyms diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/110_synonyms_invalid.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/110_synonyms_invalid.yml index d3d0a3bb4df70..4e77e10495109 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/110_synonyms_invalid.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/110_synonyms_invalid.yml @@ -11,6 +11,11 @@ setup: synonyms_set: synonyms: "foo => bar, baz" + # This is to ensure that all index shards (write and read) are available. In serverless this can take some time. + - do: + cluster.health: + wait_for_no_initializing_shards: true + - do: indices.create: index: test_index diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/20_synonyms_get.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/20_synonyms_get.yml index 3494f33466ce4..5e6d4ec2341ad 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/20_synonyms_get.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/20_synonyms_get.yml @@ -14,6 +14,10 @@ setup: - synonyms: "test => check" id: "test-id-3" + # This is to ensure that all index shards (write and read) are available. In serverless this can take some time. + - do: + cluster.health: + wait_for_no_initializing_shards: true --- "Get synonyms set": @@ -31,7 +35,6 @@ setup: id: "test-id-2" - synonyms: "test => check" id: "test-id-3" - --- "Get synonyms set - not found": - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/30_synonyms_delete.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/30_synonyms_delete.yml index 351ff4e186d8a..23c907f6a1137 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/30_synonyms_delete.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/30_synonyms_delete.yml @@ -12,6 +12,10 @@ setup: - synonyms: "bye => goodbye" id: "test-id-2" + # This is to ensure that all index shards (write and read) are available. In serverless this can take some time. + - do: + cluster.health: + wait_for_no_initializing_shards: true --- "Delete synonyms set": - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/40_synonyms_sets_get.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/40_synonyms_sets_get.yml index 723c41e163eb8..7c145dafd81cd 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/40_synonyms_sets_get.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/40_synonyms_sets_get.yml @@ -9,6 +9,12 @@ setup: synonyms_set: - synonyms: "hello, hi" - synonyms: "goodbye, bye" + + # This is to ensure that all index shards (write and read) are available. In serverless this can take some time. + - do: + cluster.health: + wait_for_no_initializing_shards: true + - do: synonyms.put_synonym: id: test-synonyms-1 @@ -23,21 +29,8 @@ setup: body: synonyms_set: - synonyms: "pc, computer" - # set logging to debug for issue: https://github.com/elastic/elasticsearch/issues/102261 - - do: - cluster.put_settings: - body: - persistent: - logger.org.elasticsearch.synonyms: DEBUG --- -teardown: - - do: - cluster.put_settings: - body: - persistent: - logger.org.elasticsearch.synonyms: null ---- "List synonyms set": - do: synonyms.get_synonyms_sets: { } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/50_synonym_rule_put.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/50_synonym_rule_put.yml index f3711bb0774ca..d8611000fe465 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/50_synonym_rule_put.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/50_synonym_rule_put.yml @@ -14,7 +14,10 @@ setup: - synonyms: "test => check" id: "test-id-3" - + # This is to ensure that all index shards (write and read) are available. In serverless this can take some time. + - do: + cluster.health: + wait_for_no_initializing_shards: true --- "Update a synonyms rule": - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/60_synonym_rule_get.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/60_synonym_rule_get.yml index 2a7c8aff89d8e..0c962b51e08cb 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/60_synonym_rule_get.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/60_synonym_rule_get.yml @@ -13,11 +13,12 @@ setup: id: "test-id-2" - synonyms: "test => check" id: "test-id-3" + + # This is to ensure that all index shards (write and read) are available. In serverless this can take some time. - do: cluster.health: - index: .synonyms - timeout: 1m - wait_for_status: green + wait_for_no_initializing_shards: true + --- "Get a synonym rule": diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/70_synonym_rule_delete.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/70_synonym_rule_delete.yml index a4853b0b6d414..41ab293158a35 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/70_synonym_rule_delete.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/70_synonym_rule_delete.yml @@ -14,6 +14,11 @@ setup: - synonyms: "test => check" id: "test-id-3" + # This is to ensure that all index shards (write and read) are available. In serverless this can take some time. + - do: + cluster.health: + wait_for_no_initializing_shards: true + --- "Delete synonym rule": - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/80_synonyms_from_index.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/80_synonyms_from_index.yml index 89ad933370e1c..3aba0f0b4b78b 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/80_synonyms_from_index.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/80_synonyms_from_index.yml @@ -2,7 +2,6 @@ setup: - requires: cluster_features: ["gte_v8.10.0"] reason: Loading synonyms from index is introduced in 8.10.0 - # Create a new synonyms set - do: synonyms.put_synonym: @@ -14,6 +13,11 @@ setup: - synonyms: "bye => goodbye" id: "synonym-rule-2" + # This is to ensure that all index shards (write and read) are available. In serverless this can take some time. + - do: + cluster.health: + wait_for_no_initializing_shards: true + # Create an index with synonym_filter that uses that synonyms set - do: indices.create: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/90_synonyms_reloading_for_synset.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/90_synonyms_reloading_for_synset.yml index dc94b36222402..1ceb5b43b8129 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/90_synonyms_reloading_for_synset.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/90_synonyms_reloading_for_synset.yml @@ -3,7 +3,6 @@ - requires: cluster_features: ["gte_v8.10.0"] reason: Reloading analyzers for specific synonym set is introduced in 8.10.0 - # Create synonyms_set1 - do: synonyms.put_synonym: @@ -26,6 +25,11 @@ - synonyms: "bye => goodbye" id: "synonym-rule-2" + # This is to ensure that all index shards (write and read) are available. In serverless this can take some time. + - do: + cluster.health: + wait_for_no_initializing_shards: true + # Create my_index1 with synonym_filter that uses synonyms_set1 - do: indices.create: From 32dee6aaaeb18a8d6d4f0fee8bbf338e8991650d Mon Sep 17 00:00:00 2001 From: Artem Prigoda Date: Wed, 23 Oct 2024 09:30:02 +0200 Subject: [PATCH 310/449] [test] Dynamically pick up the upper bound snapshot index version (#114703) Pick an index version between the minimum compatible and latest known version for snapshot testing. --- .../snapshots/AbstractSnapshotIntegTestCase.java | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java index 8bc81fef2157d..7a72a7bd0daf0 100644 --- a/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java @@ -34,7 +34,6 @@ import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.core.Nullable; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; import org.elasticsearch.plugins.Plugin; @@ -366,15 +365,9 @@ protected static Settings.Builder indexSettingsNoReplicas(int shards) { /** * Randomly write an empty snapshot of an older version to an empty repository to simulate an older repository metadata format. */ - @UpdateForV9(owner = UpdateForV9.Owner.DISTRIBUTED_COORDINATION) - // This used to pick an index version from 7.0.0 to 8.9.0. The minimum now is 8.0.0 but it's not clear what the upper range should be protected void maybeInitWithOldSnapshotVersion(String repoName, Path repoPath) throws Exception { if (randomBoolean() && randomBoolean()) { - initWithSnapshotVersion( - repoName, - repoPath, - IndexVersionUtils.randomVersionBetween(random(), IndexVersions.MINIMUM_COMPATIBLE, IndexVersions.V_8_9_0) - ); + initWithSnapshotVersion(repoName, repoPath, IndexVersionUtils.randomVersion()); } } From 530d15029eb8ada2cd7cd76ea5be15adbfc0e639 Mon Sep 17 00:00:00 2001 From: Artem Prigoda Date: Wed, 23 Oct 2024 09:30:16 +0200 Subject: [PATCH 311/449] Remove direct cloning of BytesTransportRequests (#114808) All request handlers should be able to read `BytesTransportRequest` to a class than can copied by re-serializing. Direct copying was only necessary by the legacy `JOIN_VALIDATE_ACTION_NAME` request handler. See #89926 --- .../test/transport/MockTransportService.java | 20 ++----------------- 1 file changed, 2 insertions(+), 18 deletions(-) diff --git a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java index fd376fcd07688..18c591166e720 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java @@ -39,7 +39,6 @@ import org.elasticsearch.core.RefCounted; import org.elasticsearch.core.Strings; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.node.Node; import org.elasticsearch.plugins.Plugin; @@ -50,7 +49,6 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.tasks.MockTaskManager; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.BytesTransportRequest; import org.elasticsearch.transport.ClusterConnectionManager; import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.ConnectionProfile; @@ -586,13 +584,8 @@ public void sendRequest( // poor mans request cloning... BytesStreamOutput bStream = new BytesStreamOutput(); request.writeTo(bStream); - final TransportRequest clonedRequest; - if (request instanceof BytesTransportRequest) { - clonedRequest = copyRawBytesForBwC(bStream); - } else { - RequestHandlerRegistry reg = MockTransportService.this.getRequestHandler(action); - clonedRequest = reg.newRequest(bStream.bytes().streamInput()); - } + RequestHandlerRegistry reg = MockTransportService.this.getRequestHandler(action); + final TransportRequest clonedRequest = reg.newRequest(bStream.bytes().streamInput()); assert clonedRequest.getClass().equals(MasterNodeRequestHelper.unwrapTermOverride(request).getClass()) : clonedRequest + " vs " + request; @@ -640,15 +633,6 @@ protected void doRun() throws IOException { } } - // Some request handlers read back a BytesTransportRequest - // into a different class that cannot be re-serialized (i.e. JOIN_VALIDATE_ACTION_NAME), - // in those cases we just copy the raw bytes back to a BytesTransportRequest. - // This is only needed for the BwC for JOIN_VALIDATE_ACTION_NAME and can be removed in the next major - @UpdateForV9(owner = UpdateForV9.Owner.DISTRIBUTED_COORDINATION) - private static TransportRequest copyRawBytesForBwC(BytesStreamOutput bStream) throws IOException { - return new BytesTransportRequest(bStream.bytes().streamInput()); - } - @Override public void clearCallback() { synchronized (this) { From aa70c41abaaecd94676a019fee464cf71b453f51 Mon Sep 17 00:00:00 2001 From: Artem Prigoda Date: Wed, 23 Oct 2024 09:30:48 +0200 Subject: [PATCH 312/449] [test] Always assume that the old cluster support replication of closed indices (#114314) Support for replicating closed indices was added in #39506 (7.1.0), we can expect the the cluster always supports replication of closed indices in 8.0/9.0 --- .../elasticsearch/upgrades/FullClusterRestartIT.java | 12 ++---------- 1 file changed, 2 insertions(+), 10 deletions(-) diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java index 73f291da15ead..92a704f793dc2 100644 --- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java @@ -27,7 +27,6 @@ import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.core.Booleans; import org.elasticsearch.core.CheckedFunction; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; @@ -1203,15 +1202,8 @@ public void testClosedIndices() throws Exception { closeIndex(index); } - @UpdateForV9(owner = UpdateForV9.Owner.DISTRIBUTED_INDEXING) // This check can be removed (always assume true) - var originalClusterSupportsReplicationOfClosedIndices = oldClusterHasFeature(RestTestLegacyFeatures.REPLICATION_OF_CLOSED_INDICES); - - if (originalClusterSupportsReplicationOfClosedIndices) { - ensureGreenLongWait(index); - assertClosedIndex(index, true); - } else { - assertClosedIndex(index, false); - } + ensureGreenLongWait(index); + assertClosedIndex(index, true); if (isRunningAgainstOldCluster() == false) { openIndex(index); From 387062eb808f2c8a6c0724d1317b57176a60539d Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 23 Oct 2024 10:20:42 +0200 Subject: [PATCH 313/449] Sometimes delegate to SourceLoader in ValueSourceReaderOperator for required stored fields (#115114) If source is required by a block loader then the StoredFieldsSpec that gets populated should be enhanced by SourceLoader#requiredStoredFields(...) in ValuesSourceReaderOperator. Otherwise in case of synthetic source many stored fields aren't loaded, which causes only a subset of _source to be synthesized. For example when unmapped fields exist or field values that exceed configured ignore above will not appear is _source. This happens when field types fallback to a block loader implementation that uses _source. The required field values are then extracted from the source once loaded. This change also reverts the production code changes introduced via #114903. That change only ensured that _ignored_source field was added to the required list of stored fields. In reality more fields could be required. This change is better fix, since it handles also other cases and the SourceLoader implementation indicates which stored fields are needed. Closes #115076 --- .../extras/MatchOnlyTextFieldMapper.java | 3 +- .../mapper/extras/ScaledFloatFieldMapper.java | 3 +- muted-tests.yml | 12 --- .../mapper/AbstractGeometryFieldMapper.java | 3 +- .../index/mapper/BlockSourceReader.java | 47 ++++------ .../index/mapper/BooleanFieldMapper.java | 2 +- .../index/mapper/DateFieldMapper.java | 3 +- .../index/mapper/KeywordFieldMapper.java | 3 +- .../index/mapper/NumberFieldMapper.java | 65 ++++--------- .../index/mapper/TextFieldMapper.java | 14 +-- .../index/mapper/BlockSourceReaderTests.java | 2 +- .../index/mapper/TextFieldMapperTests.java | 3 +- .../KeywordFieldSyntheticSourceSupport.java | 5 + .../index/mapper/MapperServiceTestCase.java | 5 + .../index/mapper/MapperTestCase.java | 47 +++++++--- ...xtFieldFamilySyntheticSourceTestSetup.java | 20 +--- .../lucene/ValuesSourceReaderOperator.java | 27 ++++-- .../mapper/SemanticTextFieldMapper.java | 3 +- ..._esql_synthetic_source_disabled_fields.yml | 92 +++++++++++++++++-- .../test/51_esql_synthetic_source.yml | 77 ++++++++++++++++ .../unsignedlong/UnsignedLongFieldMapper.java | 3 +- 21 files changed, 276 insertions(+), 163 deletions(-) diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapper.java index cd252fcff2376..5904169308fab 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapper.java @@ -364,8 +364,7 @@ public BlockLoader blockLoader(BlockLoaderContext blContext) { SourceValueFetcher fetcher = SourceValueFetcher.toString(blContext.sourcePaths(name())); // MatchOnlyText never has norms, so we have to use the field names field BlockSourceReader.LeafIteratorLookup lookup = BlockSourceReader.lookupFromFieldNames(blContext.fieldNames(), name()); - var sourceMode = blContext.indexSettings().getIndexMappingSourceMode(); - return new BlockSourceReader.BytesRefsBlockLoader(fetcher, lookup, sourceMode); + return new BlockSourceReader.BytesRefsBlockLoader(fetcher, lookup); } @Override diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapper.java index 1f647cb977cf5..b845545133e19 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapper.java @@ -319,8 +319,7 @@ public BlockLoader blockLoader(BlockLoaderContext blContext) { BlockSourceReader.LeafIteratorLookup lookup = isStored() || isIndexed() ? BlockSourceReader.lookupFromFieldNames(blContext.fieldNames(), name()) : BlockSourceReader.lookupMatchingAll(); - var sourceMode = blContext.indexSettings().getIndexMappingSourceMode(); - return new BlockSourceReader.DoublesBlockLoader(valueFetcher, lookup, sourceMode); + return new BlockSourceReader.DoublesBlockLoader(valueFetcher, lookup); } @Override diff --git a/muted-tests.yml b/muted-tests.yml index ccb387986551c..45b1398df7ace 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -282,18 +282,6 @@ tests: - class: org.elasticsearch.xpack.inference.DefaultEndPointsIT method: testInferDeploysDefaultElser issue: https://github.com/elastic/elasticsearch/issues/114913 -- class: org.elasticsearch.index.mapper.TextFieldMapperTests - method: testBlockLoaderFromRowStrideReaderWithSyntheticSource - issue: https://github.com/elastic/elasticsearch/issues/115066 -- class: org.elasticsearch.index.mapper.TextFieldMapperTests - method: testBlockLoaderFromColumnReaderWithSyntheticSource - issue: https://github.com/elastic/elasticsearch/issues/115073 -- class: org.elasticsearch.index.mapper.annotatedtext.AnnotatedTextFieldMapperTests - method: testBlockLoaderFromColumnReaderWithSyntheticSource - issue: https://github.com/elastic/elasticsearch/issues/115074 -- class: org.elasticsearch.index.mapper.annotatedtext.AnnotatedTextFieldMapperTests - method: testBlockLoaderFromRowStrideReaderWithSyntheticSource - issue: https://github.com/elastic/elasticsearch/issues/115076 - class: org.elasticsearch.xpack.test.rest.XPackRestIT method: test {p0=esql/60_usage/Basic ESQL usage output (telemetry)} issue: https://github.com/elastic/elasticsearch/issues/115231 diff --git a/server/src/main/java/org/elasticsearch/index/mapper/AbstractGeometryFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/AbstractGeometryFieldMapper.java index 3512989c115ee..c38b5beeb55a0 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/AbstractGeometryFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/AbstractGeometryFieldMapper.java @@ -189,8 +189,7 @@ public BlockLoader blockLoader(BlockLoaderContext blContext) { protected BlockLoader blockLoaderFromSource(BlockLoaderContext blContext) { ValueFetcher fetcher = valueFetcher(blContext.sourcePaths(name()), nullValue, GeometryFormatterFactory.WKB); // TODO consider optimization using BlockSourceReader.lookupFromFieldNames(blContext.fieldNames(), name()) - var sourceMode = blContext.indexSettings().getIndexMappingSourceMode(); - return new BlockSourceReader.GeometriesBlockLoader(fetcher, BlockSourceReader.lookupMatchingAll(), sourceMode); + return new BlockSourceReader.GeometriesBlockLoader(fetcher, BlockSourceReader.lookupMatchingAll()); } protected abstract Object nullValueAsSource(T nullValue); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BlockSourceReader.java b/server/src/main/java/org/elasticsearch/index/mapper/BlockSourceReader.java index 105943c732a5e..19a1cce746172 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/BlockSourceReader.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/BlockSourceReader.java @@ -22,7 +22,6 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; -import java.util.Set; /** * Loads values from {@code _source}. This whole process is very slow and cast-tastic, @@ -30,14 +29,6 @@ * slow. */ public abstract class BlockSourceReader implements BlockLoader.RowStrideReader { - - // _ignored_source is needed when source mode is synthetic. - static final StoredFieldsSpec NEEDS_SOURCE_AND_IGNORED_SOURCE = new StoredFieldsSpec( - true, - false, - Set.of(IgnoredSourceFieldMapper.NAME) - ); - private final ValueFetcher fetcher; private final List ignoredValues = new ArrayList<>(); private final DocIdSetIterator iter; @@ -100,12 +91,10 @@ public interface LeafIteratorLookup { private abstract static class SourceBlockLoader implements BlockLoader { protected final ValueFetcher fetcher; private final LeafIteratorLookup lookup; - private final SourceFieldMapper.Mode sourceMode; - private SourceBlockLoader(ValueFetcher fetcher, LeafIteratorLookup lookup, SourceFieldMapper.Mode sourceMode) { + private SourceBlockLoader(ValueFetcher fetcher, LeafIteratorLookup lookup) { this.fetcher = fetcher; this.lookup = lookup; - this.sourceMode = sourceMode; } @Override @@ -115,7 +104,7 @@ public final ColumnAtATimeReader columnAtATimeReader(LeafReaderContext context) @Override public final StoredFieldsSpec rowStrideStoredFieldSpec() { - return sourceMode == SourceFieldMapper.Mode.SYNTHETIC ? NEEDS_SOURCE_AND_IGNORED_SOURCE : StoredFieldsSpec.NEEDS_SOURCE; + return StoredFieldsSpec.NEEDS_SOURCE; } @Override @@ -151,8 +140,8 @@ public final String toString() { * Load {@code boolean}s from {@code _source}. */ public static class BooleansBlockLoader extends SourceBlockLoader { - public BooleansBlockLoader(ValueFetcher fetcher, LeafIteratorLookup lookup, SourceFieldMapper.Mode sourceMode) { - super(fetcher, lookup, sourceMode); + public BooleansBlockLoader(ValueFetcher fetcher, LeafIteratorLookup lookup) { + super(fetcher, lookup); } @Override @@ -191,8 +180,8 @@ public String toString() { * Load {@link BytesRef}s from {@code _source}. */ public static class BytesRefsBlockLoader extends SourceBlockLoader { - public BytesRefsBlockLoader(ValueFetcher fetcher, LeafIteratorLookup lookup, SourceFieldMapper.Mode sourceMode) { - super(fetcher, lookup, sourceMode); + public BytesRefsBlockLoader(ValueFetcher fetcher, LeafIteratorLookup lookup) { + super(fetcher, lookup); } @Override @@ -202,7 +191,7 @@ public final Builder builder(BlockFactory factory, int expectedCount) { @Override protected RowStrideReader rowStrideReader(LeafReaderContext context, DocIdSetIterator iter) throws IOException { - return new BytesRefs(fetcher, iter, null); + return new BytesRefs(fetcher, iter); } @Override @@ -212,8 +201,8 @@ protected String name() { } public static class GeometriesBlockLoader extends SourceBlockLoader { - public GeometriesBlockLoader(ValueFetcher fetcher, LeafIteratorLookup lookup, SourceFieldMapper.Mode sourceMode) { - super(fetcher, lookup, sourceMode); + public GeometriesBlockLoader(ValueFetcher fetcher, LeafIteratorLookup lookup) { + super(fetcher, lookup); } @Override @@ -223,7 +212,7 @@ public final Builder builder(BlockFactory factory, int expectedCount) { @Override protected RowStrideReader rowStrideReader(LeafReaderContext context, DocIdSetIterator iter) { - return new Geometries(fetcher, iter, null); + return new Geometries(fetcher, iter); } @Override @@ -235,7 +224,7 @@ protected String name() { private static class BytesRefs extends BlockSourceReader { private final BytesRef scratch = new BytesRef(); - BytesRefs(ValueFetcher fetcher, DocIdSetIterator iter, SourceFieldMapper.Mode sourceMode) { + BytesRefs(ValueFetcher fetcher, DocIdSetIterator iter) { super(fetcher, iter); } @@ -252,7 +241,7 @@ public String toString() { private static class Geometries extends BlockSourceReader { - Geometries(ValueFetcher fetcher, DocIdSetIterator iter, SourceFieldMapper.Mode sourceMode) { + Geometries(ValueFetcher fetcher, DocIdSetIterator iter) { super(fetcher, iter); } @@ -275,8 +264,8 @@ public String toString() { * Load {@code double}s from {@code _source}. */ public static class DoublesBlockLoader extends SourceBlockLoader { - public DoublesBlockLoader(ValueFetcher fetcher, LeafIteratorLookup lookup, SourceFieldMapper.Mode sourceMode) { - super(fetcher, lookup, sourceMode); + public DoublesBlockLoader(ValueFetcher fetcher, LeafIteratorLookup lookup) { + super(fetcher, lookup); } @Override @@ -315,8 +304,8 @@ public String toString() { * Load {@code int}s from {@code _source}. */ public static class IntsBlockLoader extends SourceBlockLoader { - public IntsBlockLoader(ValueFetcher fetcher, LeafIteratorLookup lookup, SourceFieldMapper.Mode sourceMode) { - super(fetcher, lookup, sourceMode); + public IntsBlockLoader(ValueFetcher fetcher, LeafIteratorLookup lookup) { + super(fetcher, lookup); } @Override @@ -355,8 +344,8 @@ public String toString() { * Load {@code long}s from {@code _source}. */ public static class LongsBlockLoader extends SourceBlockLoader { - public LongsBlockLoader(ValueFetcher fetcher, LeafIteratorLookup lookup, SourceFieldMapper.Mode sourceMode) { - super(fetcher, lookup, sourceMode); + public LongsBlockLoader(ValueFetcher fetcher, LeafIteratorLookup lookup) { + super(fetcher, lookup); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java index c2bf9e18bfeec..5aaaf7dce83c9 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java @@ -314,7 +314,7 @@ public BlockLoader blockLoader(BlockLoaderContext blContext) { BlockSourceReader.LeafIteratorLookup lookup = isIndexed() || isStored() ? BlockSourceReader.lookupFromFieldNames(blContext.fieldNames(), name()) : BlockSourceReader.lookupMatchingAll(); - return new BlockSourceReader.BooleansBlockLoader(fetcher, lookup, blContext.indexSettings().getIndexMappingSourceMode()); + return new BlockSourceReader.BooleansBlockLoader(fetcher, lookup); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java index d05f0e477db09..87e4ce5f90479 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java @@ -793,8 +793,7 @@ public BlockLoader blockLoader(BlockLoaderContext blContext) { BlockSourceReader.LeafIteratorLookup lookup = isStored() || isIndexed() ? BlockSourceReader.lookupFromFieldNames(blContext.fieldNames(), name()) : BlockSourceReader.lookupMatchingAll(); - var sourceMode = blContext.indexSettings().getIndexMappingSourceMode(); - return new BlockSourceReader.LongsBlockLoader(sourceValueFetcher(blContext.sourcePaths(name())), lookup, sourceMode); + return new BlockSourceReader.LongsBlockLoader(sourceValueFetcher(blContext.sourcePaths(name())), lookup); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java index 802680e7f373e..ecc708bc94614 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java @@ -634,8 +634,7 @@ public BlockLoader blockLoader(BlockLoaderContext blContext) { return new BlockStoredFieldsReader.BytesFromBytesRefsBlockLoader(name()); } SourceValueFetcher fetcher = sourceValueFetcher(blContext.sourcePaths(name())); - var sourceMode = blContext.indexSettings().getIndexMappingSourceMode(); - return new BlockSourceReader.BytesRefsBlockLoader(fetcher, sourceBlockLoaderLookup(blContext), sourceMode); + return new BlockSourceReader.BytesRefsBlockLoader(fetcher, sourceBlockLoaderLookup(blContext)); } private BlockSourceReader.LeafIteratorLookup sourceBlockLoaderLookup(BlockLoaderContext blContext) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java index 3608e8ab261c1..55ed1e10428aa 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java @@ -462,12 +462,8 @@ BlockLoader blockLoaderFromDocValues(String fieldName) { } @Override - BlockLoader blockLoaderFromSource( - SourceValueFetcher sourceValueFetcher, - BlockSourceReader.LeafIteratorLookup lookup, - SourceFieldMapper.Mode sourceMode - ) { - return new BlockSourceReader.DoublesBlockLoader(sourceValueFetcher, lookup, sourceMode); + BlockLoader blockLoaderFromSource(SourceValueFetcher sourceValueFetcher, BlockSourceReader.LeafIteratorLookup lookup) { + return new BlockSourceReader.DoublesBlockLoader(sourceValueFetcher, lookup); } }, FLOAT("float", NumericType.FLOAT) { @@ -650,12 +646,8 @@ BlockLoader blockLoaderFromDocValues(String fieldName) { } @Override - BlockLoader blockLoaderFromSource( - SourceValueFetcher sourceValueFetcher, - BlockSourceReader.LeafIteratorLookup lookup, - SourceFieldMapper.Mode sourceMode - ) { - return new BlockSourceReader.DoublesBlockLoader(sourceValueFetcher, lookup, sourceMode); + BlockLoader blockLoaderFromSource(SourceValueFetcher sourceValueFetcher, BlockSourceReader.LeafIteratorLookup lookup) { + return new BlockSourceReader.DoublesBlockLoader(sourceValueFetcher, lookup); } }, DOUBLE("double", NumericType.DOUBLE) { @@ -804,12 +796,8 @@ BlockLoader blockLoaderFromDocValues(String fieldName) { } @Override - BlockLoader blockLoaderFromSource( - SourceValueFetcher sourceValueFetcher, - BlockSourceReader.LeafIteratorLookup lookup, - SourceFieldMapper.Mode sourceMode - ) { - return new BlockSourceReader.DoublesBlockLoader(sourceValueFetcher, lookup, sourceMode); + BlockLoader blockLoaderFromSource(SourceValueFetcher sourceValueFetcher, BlockSourceReader.LeafIteratorLookup lookup) { + return new BlockSourceReader.DoublesBlockLoader(sourceValueFetcher, lookup); } }, BYTE("byte", NumericType.BYTE) { @@ -921,12 +909,8 @@ BlockLoader blockLoaderFromDocValues(String fieldName) { } @Override - BlockLoader blockLoaderFromSource( - SourceValueFetcher sourceValueFetcher, - BlockSourceReader.LeafIteratorLookup lookup, - SourceFieldMapper.Mode sourceMode - ) { - return new BlockSourceReader.IntsBlockLoader(sourceValueFetcher, lookup, sourceMode); + BlockLoader blockLoaderFromSource(SourceValueFetcher sourceValueFetcher, BlockSourceReader.LeafIteratorLookup lookup) { + return new BlockSourceReader.IntsBlockLoader(sourceValueFetcher, lookup); } private boolean isOutOfRange(Object value) { @@ -1038,12 +1022,8 @@ BlockLoader blockLoaderFromDocValues(String fieldName) { } @Override - BlockLoader blockLoaderFromSource( - SourceValueFetcher sourceValueFetcher, - BlockSourceReader.LeafIteratorLookup lookup, - SourceFieldMapper.Mode sourceMode - ) { - return new BlockSourceReader.IntsBlockLoader(sourceValueFetcher, lookup, sourceMode); + BlockLoader blockLoaderFromSource(SourceValueFetcher sourceValueFetcher, BlockSourceReader.LeafIteratorLookup lookup) { + return new BlockSourceReader.IntsBlockLoader(sourceValueFetcher, lookup); } private boolean isOutOfRange(Object value) { @@ -1229,12 +1209,8 @@ BlockLoader blockLoaderFromDocValues(String fieldName) { } @Override - BlockLoader blockLoaderFromSource( - SourceValueFetcher sourceValueFetcher, - BlockSourceReader.LeafIteratorLookup lookup, - SourceFieldMapper.Mode sourceMode - ) { - return new BlockSourceReader.IntsBlockLoader(sourceValueFetcher, lookup, sourceMode); + BlockLoader blockLoaderFromSource(SourceValueFetcher sourceValueFetcher, BlockSourceReader.LeafIteratorLookup lookup) { + return new BlockSourceReader.IntsBlockLoader(sourceValueFetcher, lookup); } }, LONG("long", NumericType.LONG) { @@ -1380,12 +1356,8 @@ BlockLoader blockLoaderFromDocValues(String fieldName) { } @Override - BlockLoader blockLoaderFromSource( - SourceValueFetcher sourceValueFetcher, - BlockSourceReader.LeafIteratorLookup lookup, - SourceFieldMapper.Mode sourceMode - ) { - return new BlockSourceReader.LongsBlockLoader(sourceValueFetcher, lookup, sourceMode); + BlockLoader blockLoaderFromSource(SourceValueFetcher sourceValueFetcher, BlockSourceReader.LeafIteratorLookup lookup) { + return new BlockSourceReader.LongsBlockLoader(sourceValueFetcher, lookup); } private boolean isOutOfRange(Object value) { @@ -1663,11 +1635,7 @@ protected void writeValue(XContentBuilder b, long value) throws IOException { abstract BlockLoader blockLoaderFromDocValues(String fieldName); - abstract BlockLoader blockLoaderFromSource( - SourceValueFetcher sourceValueFetcher, - BlockSourceReader.LeafIteratorLookup lookup, - SourceFieldMapper.Mode sourceMode - ); + abstract BlockLoader blockLoaderFromSource(SourceValueFetcher sourceValueFetcher, BlockSourceReader.LeafIteratorLookup lookup); } public static class NumberFieldType extends SimpleMappedFieldType { @@ -1806,8 +1774,7 @@ public BlockLoader blockLoader(BlockLoaderContext blContext) { BlockSourceReader.LeafIteratorLookup lookup = isStored() || isIndexed() ? BlockSourceReader.lookupFromFieldNames(blContext.fieldNames(), name()) : BlockSourceReader.lookupMatchingAll(); - var sourceMode = blContext.indexSettings().getIndexMappingSourceMode(); - return type.blockLoaderFromSource(sourceValueFetcher(blContext.sourcePaths(name())), lookup, sourceMode); + return type.blockLoaderFromSource(sourceValueFetcher(blContext.sourcePaths(name())), lookup); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java index 3f77edc819602..253f70f4fda47 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java @@ -1007,20 +1007,8 @@ protected String delegatingTo() { if (isStored()) { return new BlockStoredFieldsReader.BytesFromStringsBlockLoader(name()); } - if (isSyntheticSource && syntheticSourceDelegate == null) { - /* - * When we're in synthetic source mode we don't currently - * support text fields that are not stored and are not children - * of perfect keyword fields. We'd have to load from the parent - * field and then convert the result to a string. In this case, - * even if we would synthesize the source, the current field - * would be missing. - */ - return null; - } SourceValueFetcher fetcher = SourceValueFetcher.toString(blContext.sourcePaths(name())); - var sourceMode = blContext.indexSettings().getIndexMappingSourceMode(); - return new BlockSourceReader.BytesRefsBlockLoader(fetcher, blockReaderDisiLookup(blContext), sourceMode); + return new BlockSourceReader.BytesRefsBlockLoader(fetcher, blockReaderDisiLookup(blContext)); } /** diff --git a/server/src/test/java/org/elasticsearch/index/mapper/BlockSourceReaderTests.java b/server/src/test/java/org/elasticsearch/index/mapper/BlockSourceReaderTests.java index 286be8d12570d..357ada3ad656d 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/BlockSourceReaderTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/BlockSourceReaderTests.java @@ -51,7 +51,7 @@ public void testEmptyArray() throws IOException { private void loadBlock(LeafReaderContext ctx, Consumer test) throws IOException { ValueFetcher valueFetcher = SourceValueFetcher.toString(Set.of("field")); BlockSourceReader.LeafIteratorLookup lookup = BlockSourceReader.lookupFromNorms("field"); - BlockLoader loader = new BlockSourceReader.BytesRefsBlockLoader(valueFetcher, lookup, null); + BlockLoader loader = new BlockSourceReader.BytesRefsBlockLoader(valueFetcher, lookup); assertThat(loader.columnAtATimeReader(ctx), nullValue()); BlockLoader.RowStrideReader reader = loader.rowStrideReader(ctx); assertThat(loader.rowStrideStoredFieldSpec(), equalTo(StoredFieldsSpec.NEEDS_SOURCE)); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java index 86914cfe9ced7..c2375e948fda0 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java @@ -1353,6 +1353,7 @@ private void testBlockLoaderFromParent(boolean columnReader, boolean syntheticSo }; MapperService mapper = createMapperService(syntheticSource ? syntheticSourceMapping(buildFields) : mapping(buildFields)); BlockReaderSupport blockReaderSupport = getSupportedReaders(mapper, "field.sub"); - testBlockLoader(columnReader, example, blockReaderSupport); + var sourceLoader = mapper.mappingLookup().newSourceLoader(SourceFieldMetrics.NOOP); + testBlockLoader(columnReader, example, blockReaderSupport, sourceLoader); } } diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/KeywordFieldSyntheticSourceSupport.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/KeywordFieldSyntheticSourceSupport.java index 0d05c3d0cd77b..502ffdde62e5a 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/KeywordFieldSyntheticSourceSupport.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/KeywordFieldSyntheticSourceSupport.java @@ -37,6 +37,11 @@ public class KeywordFieldSyntheticSourceSupport implements MapperTestCase.Synthe this.docValues = useFallbackSyntheticSource == false || ESTestCase.randomBoolean(); } + @Override + public boolean ignoreAbove() { + return ignoreAbove != null; + } + @Override public boolean preservesExactSource() { // We opt in into fallback synthetic source implementation diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java index 8bc2666bcfe3b..da04f30ff8023 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java @@ -179,6 +179,11 @@ public final MapperService createMapperService(XContentBuilder mappings) throws return createMapperService(getVersion(), mappings); } + public final MapperService createSytheticSourceMapperService(XContentBuilder mappings) throws IOException { + var settings = Settings.builder().put("index.mapping.source.mode", "synthetic").build(); + return createMapperService(getVersion(), settings, () -> true, mappings); + } + protected IndexVersion getVersion() { return IndexVersion.current(); } diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java index 7669ada750c14..c89c0b2e37dd2 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java @@ -49,6 +49,7 @@ import org.elasticsearch.script.ScriptFactory; import org.elasticsearch.script.field.DocValuesScriptFieldFactory; import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.search.fetch.StoredFieldsSpec; import org.elasticsearch.search.lookup.LeafStoredFieldsLookup; import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.lookup.Source; @@ -1103,6 +1104,10 @@ default boolean preservesExactSource() { return false; } + default boolean ignoreAbove() { + return false; + } + /** * Examples that should work when source is generated from doc values. */ @@ -1321,15 +1326,12 @@ private BlockLoader getBlockLoader(boolean columnReader) { return mapper.fieldType(loaderFieldName).blockLoader(new MappedFieldType.BlockLoaderContext() { @Override public String indexName() { - return "test_index"; + return mapper.getIndexSettings().getIndex().getName(); } @Override public IndexSettings indexSettings() { - var imd = IndexMetadata.builder(indexName()) - .settings(MapperTestCase.indexSettings(IndexVersion.current(), 1, 1).put(Settings.EMPTY)) - .build(); - return new IndexSettings(imd, Settings.EMPTY); + return mapper.getIndexSettings(); } @Override @@ -1362,9 +1364,19 @@ public FieldNamesFieldMapper.FieldNamesFieldType fieldNames() { private void testBlockLoader(boolean syntheticSource, boolean columnReader) throws IOException { // TODO if we're not using synthetic source use a different sort of example. Or something. - SyntheticSourceExample example = syntheticSourceSupport(false, columnReader).example(5); + var syntheticSourceSupport = syntheticSourceSupport(false, columnReader); + SyntheticSourceExample example = syntheticSourceSupport.example(5); + if (syntheticSource && columnReader == false) { + // The synthetic source testing support can't always handle now the difference between stored and synthetic source mode. + // In case of ignore above, the ignored values are always appended after the valid values + // (both if field has doc values or stored field). While stored source just reads original values (from _source) and there + // is no notion of values that are ignored. + // TODO: fix this by improving block loader support: https://github.com/elastic/elasticsearch/issues/115257 + assumeTrue("inconsistent synthetic source testing support with ignore above", syntheticSourceSupport.ignoreAbove() == false); + } + // TODO: only rely index.mapping.source.mode setting XContentBuilder mapping = syntheticSource ? syntheticSourceFieldMapping(example.mapping) : fieldMapping(example.mapping); - MapperService mapper = createMapperService(mapping); + MapperService mapper = syntheticSource ? createSytheticSourceMapperService(mapping) : createMapperService(mapping); BlockReaderSupport blockReaderSupport = getSupportedReaders(mapper, "field"); if (syntheticSource) { // geo_point and point do not yet support synthetic source @@ -1373,11 +1385,16 @@ private void testBlockLoader(boolean syntheticSource, boolean columnReader) thro blockReaderSupport.syntheticSource ); } - testBlockLoader(columnReader, example, blockReaderSupport); + var sourceLoader = mapper.mappingLookup().newSourceLoader(SourceFieldMetrics.NOOP); + testBlockLoader(columnReader, example, blockReaderSupport, sourceLoader); } - protected final void testBlockLoader(boolean columnReader, SyntheticSourceExample example, BlockReaderSupport blockReaderSupport) - throws IOException { + protected final void testBlockLoader( + boolean columnReader, + SyntheticSourceExample example, + BlockReaderSupport blockReaderSupport, + SourceLoader sourceLoader + ) throws IOException { BlockLoader loader = blockReaderSupport.getBlockLoader(columnReader); Function valuesConvert = loadBlockExpected(blockReaderSupport, columnReader); if (valuesConvert == null) { @@ -1404,9 +1421,15 @@ protected final void testBlockLoader(boolean columnReader, SyntheticSourceExampl return; } } else { + StoredFieldsSpec storedFieldsSpec = loader.rowStrideStoredFieldSpec(); + if (storedFieldsSpec.requiresSource()) { + storedFieldsSpec = storedFieldsSpec.merge( + new StoredFieldsSpec(true, storedFieldsSpec.requiresMetadata(), sourceLoader.requiredStoredFields()) + ); + } BlockLoaderStoredFieldsFromLeafLoader storedFieldsLoader = new BlockLoaderStoredFieldsFromLeafLoader( - StoredFieldLoader.fromSpec(loader.rowStrideStoredFieldSpec()).getLoader(ctx, null), - loader.rowStrideStoredFieldSpec().requiresSource() ? SourceLoader.FROM_STORED_SOURCE.leaf(ctx.reader(), null) : null + StoredFieldLoader.fromSpec(storedFieldsSpec).getLoader(ctx, null), + storedFieldsSpec.requiresSource() ? sourceLoader.leaf(ctx.reader(), null) : null ); storedFieldsLoader.advanceTo(0); BlockLoader.Builder builder = loader.builder(TestBlock.factory(ctx.reader().numDocs()), 1); diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/TextFieldFamilySyntheticSourceTestSetup.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/TextFieldFamilySyntheticSourceTestSetup.java index b6a031c9ff906..97ded7f9a06f2 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/TextFieldFamilySyntheticSourceTestSetup.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/TextFieldFamilySyntheticSourceTestSetup.java @@ -51,24 +51,9 @@ public static MapperTestCase.BlockReaderSupport getSupportedReaders(MapperServic } public static Function loadBlockExpected(MapperTestCase.BlockReaderSupport blockReaderSupport, boolean columnReader) { - if (nullLoaderExpected(blockReaderSupport.mapper(), blockReaderSupport.loaderFieldName())) { - return null; - } return v -> ((BytesRef) v).utf8ToString(); } - private static boolean nullLoaderExpected(MapperService mapper, String fieldName) { - MappedFieldType type = mapper.fieldType(fieldName); - if (type instanceof TextFieldMapper.TextFieldType t) { - if (t.isSyntheticSource() == false || t.canUseSyntheticSourceDelegateForQuerying() || t.isStored()) { - return false; - } - String parentField = mapper.mappingLookup().parentField(fieldName); - return parentField == null || nullLoaderExpected(mapper, parentField); - } - return false; - } - public static void validateRoundTripReader(String syntheticSource, DirectoryReader reader, DirectoryReader roundTripReader) { // `reader` here is reader of original document and `roundTripReader` reads document // created from synthetic source. @@ -98,6 +83,11 @@ private static class TextFieldFamilySyntheticSourceSupport implements MapperTest ); } + @Override + public boolean ignoreAbove() { + return keywordMultiFieldSyntheticSourceSupport.ignoreAbove(); + } + @Override public MapperTestCase.SyntheticSourceExample example(int maxValues) { if (store) { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperator.java index ee747d98c26f8..74affb10eaf20 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperator.java @@ -241,6 +241,12 @@ private void loadFromSingleLeaf(Block[] blocks, int shard, int segment, BlockLoa } } + SourceLoader sourceLoader = null; + if (storedFieldsSpec.requiresSource()) { + sourceLoader = shardContexts.get(shard).newSourceLoader.get(); + storedFieldsSpec = storedFieldsSpec.merge(new StoredFieldsSpec(true, false, sourceLoader.requiredStoredFields())); + } + if (rowStrideReaders.isEmpty()) { return; } @@ -259,7 +265,7 @@ private void loadFromSingleLeaf(Block[] blocks, int shard, int segment, BlockLoa } BlockLoaderStoredFieldsFromLeafLoader storedFields = new BlockLoaderStoredFieldsFromLeafLoader( storedFieldLoader.getLoader(ctx, null), - storedFieldsSpec.requiresSource() ? shardContexts.get(shard).newSourceLoader.get().leaf(ctx.reader(), null) : null + sourceLoader != null ? sourceLoader.leaf(ctx.reader(), null) : null ); for (int p = 0; p < docs.count(); p++) { int doc = docs.get(p); @@ -381,13 +387,18 @@ private void fieldsMoved(LeafReaderContext ctx, int shard) throws IOException { FieldWork field = fields[f]; rowStride[f] = field.rowStride(ctx); storedFieldsSpec = storedFieldsSpec.merge(field.loader.rowStrideStoredFieldSpec()); - storedFields = new BlockLoaderStoredFieldsFromLeafLoader( - StoredFieldLoader.fromSpec(storedFieldsSpec).getLoader(ctx, null), - storedFieldsSpec.requiresSource() ? shardContexts.get(shard).newSourceLoader.get().leaf(ctx.reader(), null) : null - ); - if (false == storedFieldsSpec.equals(StoredFieldsSpec.NO_REQUIREMENTS)) { - trackStoredFields(storedFieldsSpec, false); - } + } + SourceLoader sourceLoader = null; + if (storedFieldsSpec.requiresSource()) { + sourceLoader = shardContexts.get(shard).newSourceLoader.get(); + storedFieldsSpec = storedFieldsSpec.merge(new StoredFieldsSpec(true, false, sourceLoader.requiredStoredFields())); + } + storedFields = new BlockLoaderStoredFieldsFromLeafLoader( + StoredFieldLoader.fromSpec(storedFieldsSpec).getLoader(ctx, null), + sourceLoader != null ? sourceLoader.leaf(ctx.reader(), null) : null + ); + if (false == storedFieldsSpec.equals(StoredFieldsSpec.NO_REQUIREMENTS)) { + trackStoredFields(storedFieldsSpec, false); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java index fb18cfb4959c7..4c07516051287 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java @@ -617,8 +617,7 @@ private String generateInvalidQueryInferenceResultsMessage(StringBuilder baseMes @Override public BlockLoader blockLoader(MappedFieldType.BlockLoaderContext blContext) { SourceValueFetcher fetcher = SourceValueFetcher.toString(blContext.sourcePaths(name().concat(".text"))); - var sourceMode = blContext.indexSettings().getIndexMappingSourceMode(); - return new BlockSourceReader.BytesRefsBlockLoader(fetcher, BlockSourceReader.lookupMatchingAll(), sourceMode); + return new BlockSourceReader.BytesRefsBlockLoader(fetcher, BlockSourceReader.lookupMatchingAll()); } } diff --git a/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/50_esql_synthetic_source_disabled_fields.yml b/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/50_esql_synthetic_source_disabled_fields.yml index 68597afda6c78..bc81d1eb67309 100644 --- a/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/50_esql_synthetic_source_disabled_fields.yml +++ b/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/50_esql_synthetic_source_disabled_fields.yml @@ -283,7 +283,7 @@ teardown: - match: {values.0.3: "PUT"} - match: {values.0.4: false} - match: {values.0.5: "POINT (-74.006 40.7128)"} - - match: {values.0.6: null} # null is expected, because text fields aren't stored in ignored source + - match: {values.0.6: "Do. Or do not. There is no try."} - match: {values.0.7: 102} - do: @@ -296,10 +296,86 @@ teardown: - match: {columns.0.name: "message"} - match: {columns.0.type: "text"} - # null is expected, because text fields aren't stored in ignored source - - match: {values.0.0: null} - - match: {values.1.0: null} - - match: {values.2.0: null} - - match: {values.3.0: null} - - match: {values.4.0: null} - - match: {values.5.0: null} + - match: {values.0.0: "Do. Or do not. There is no try."} + - match: {values.1.0: "I find your lack of faith disturbing."} + - match: {values.2.0: "Wars not make one great."} + - match: {values.3.0: "No, I am your father."} + - match: {values.4.0: "May the force be with you."} + - match: {values.5.0: "That's no moon. It's a space station."} + +--- +"message field with keyword multi-field with ignore_above": + - do: + indices.create: + index: my-index2 + body: + settings: + index: + mode: logsdb + mappings: + properties: + "@timestamp": + type: date + host.name: + type: keyword + store: false + message: + type: text + store: false + fields: + raw: + type: keyword + ignore_above: 3 + + - do: + bulk: + index: my-index2 + refresh: true + body: + - { "index": { } } + - { "@timestamp": "2024-02-12T10:30:00Z", "host.name": "foo", "message": "No, I am your father." } + - { "index": { } } + - { "@timestamp": "2024-02-12T10:31:00Z", "host.name": "bar", "message": "Do. Or do not. There is no try." } + - { "index": { } } + - { "@timestamp": "2024-02-12T10:32:00Z", "host.name": "foo", "message": "May the force be with you." } + - { "index": { } } + - { "@timestamp": "2024-02-12T10:33:00Z", "host.name": "baz", "message": "I find your lack of faith disturbing." } + - { "index": { } } + - { "@timestamp": "2024-02-12T10:34:00Z", "host.name": "baz", "message": "Wars not make one great." } + - { "index": { } } + - { "@timestamp": "2024-02-12T10:35:00Z", "host.name": "foo", "message": "That's no moon. It's a space station." } + + - do: + esql.query: + body: + query: 'FROM my-index2 | SORT host.name, @timestamp | LIMIT 1' + + - match: {columns.0.name: "@timestamp"} + - match: {columns.0.type: "date"} + - match: {columns.1.name: "host.name"} + - match: {columns.1.type: "keyword"} + - match: {columns.2.name: "message"} + - match: {columns.2.type: "text"} + - match: {columns.3.name: "message.raw"} + - match: {columns.3.type: "keyword"} + + - match: {values.0.0: "2024-02-12T10:31:00.000Z"} + - match: {values.0.1: "bar"} + - match: {values.0.2: "Do. Or do not. There is no try."} + # Note that isn't related to synthetic source. For both stored and synthetic source null is returned: +# - match: {values.0.3: "Do. Or do not. There is no try."} + + - do: + esql.query: + body: + query: 'FROM my-index2 | SORT host.name, @timestamp | KEEP message | LIMIT 10' + + - match: {columns.0.name: "message"} + - match: {columns.0.type: "text"} + + - match: {values.0.0: "Do. Or do not. There is no try."} + - match: {values.1.0: "I find your lack of faith disturbing."} + - match: {values.2.0: "Wars not make one great."} + - match: {values.3.0: "No, I am your father."} + - match: {values.4.0: "May the force be with you."} + - match: {values.5.0: "That's no moon. It's a space station."} diff --git a/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/51_esql_synthetic_source.yml b/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/51_esql_synthetic_source.yml index 7e305bda4ef4e..6c840a0cf9d3a 100644 --- a/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/51_esql_synthetic_source.yml +++ b/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/51_esql_synthetic_source.yml @@ -175,3 +175,80 @@ teardown: - match: {values.3.0: "No, I am your father."} - match: {values.4.0: "May the force be with you."} - match: {values.5.0: "That's no moon. It's a space station."} + +--- +"message field with stored keyword multi-field with ignore_above": + - do: + indices.create: + index: my-index2 + body: + settings: + index: + mode: logsdb + mappings: + properties: + "@timestamp": + type: date + host.name: + type: keyword + store: false + message: + type: text + store: false + fields: + raw: + type: keyword + store: true + + - do: + bulk: + index: my-index2 + refresh: true + body: + - { "index": { } } + - { "@timestamp": "2024-02-12T10:30:00Z", "host.name": "foo", "message": "No, I am your father." } + - { "index": { } } + - { "@timestamp": "2024-02-12T10:31:00Z", "host.name": "bar", "message": "Do. Or do not. There is no try." } + - { "index": { } } + - { "@timestamp": "2024-02-12T10:32:00Z", "host.name": "foo", "message": "May the force be with you." } + - { "index": { } } + - { "@timestamp": "2024-02-12T10:33:00Z", "host.name": "baz", "message": "I find your lack of faith disturbing." } + - { "index": { } } + - { "@timestamp": "2024-02-12T10:34:00Z", "host.name": "baz", "message": "Wars not make one great." } + - { "index": { } } + - { "@timestamp": "2024-02-12T10:35:00Z", "host.name": "foo", "message": "That's no moon. It's a space station." } + + - do: + esql.query: + body: + query: 'FROM my-index2 | SORT host.name, @timestamp | LIMIT 1' + + - match: {columns.0.name: "@timestamp"} + - match: {columns.0.type: "date"} + - match: {columns.1.name: "host.name"} + - match: {columns.1.type: "keyword"} + - match: {columns.2.name: "message"} + - match: {columns.2.type: "text"} + - match: {columns.3.name: "message.raw"} + - match: {columns.3.type: "keyword"} + + - match: {values.0.0: "2024-02-12T10:31:00.000Z"} + - match: {values.0.1: "bar"} + - match: {values.0.2: "Do. Or do not. There is no try."} + - match: {values.0.3: "Do. Or do not. There is no try."} + + - do: + esql.query: + body: + query: 'FROM my-index2 | SORT host.name, @timestamp | KEEP message | LIMIT 10' + + - match: {columns.0.name: "message"} + - match: {columns.0.type: "text"} + + - match: {values.0.0: "Do. Or do not. There is no try."} + - match: {values.1.0: "I find your lack of faith disturbing."} + - match: {values.2.0: "Wars not make one great."} + - match: {values.3.0: "No, I am your father."} + - match: {values.4.0: "May the force be with you."} + - match: {values.5.0: "That's no moon. It's a space station."} + diff --git a/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java b/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java index e8fd0da496bbe..b43d87c17e644 100644 --- a/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java +++ b/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java @@ -339,8 +339,7 @@ protected Object parseSourceValue(Object value) { BlockSourceReader.LeafIteratorLookup lookup = isStored() || isIndexed() ? BlockSourceReader.lookupFromFieldNames(blContext.fieldNames(), name()) : BlockSourceReader.lookupMatchingAll(); - var sourceMode = blContext.indexSettings().getIndexMappingSourceMode(); - return new BlockSourceReader.LongsBlockLoader(valueFetcher, lookup, sourceMode); + return new BlockSourceReader.LongsBlockLoader(valueFetcher, lookup); } @Override From 291ced7b482ab952f420993f43cacac49a2f9a9e Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Wed, 23 Oct 2024 10:23:32 +0100 Subject: [PATCH 314/449] Change from Version to BuildVersion in PersistedClusterStateService (#115301) --- .../elasticsearch/env/NodeEnvironmentIT.java | 4 ++-- .../org/elasticsearch/env/BuildVersion.java | 1 - .../env/DefaultBuildVersion.java | 2 +- .../org/elasticsearch/env/NodeMetadata.java | 5 ---- .../env/OverrideNodeVersionCommand.java | 6 ++--- .../gateway/PersistedClusterStateService.java | 24 +++++++------------ .../elasticsearch/env/NodeMetadataTests.java | 8 ------- .../env/OverrideNodeVersionCommandTests.java | 18 +++++++------- .../PersistedClusterStateServiceTests.java | 10 +++++--- 9 files changed, 32 insertions(+), 46 deletions(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/env/NodeEnvironmentIT.java b/server/src/internalClusterTest/java/org/elasticsearch/env/NodeEnvironmentIT.java index f813932ebe924..ecd5c5af8649f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/env/NodeEnvironmentIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/env/NodeEnvironmentIT.java @@ -123,7 +123,7 @@ public Settings onNodeStopped(String nodeName) { public void testFailsToStartIfDowngraded() { final IllegalStateException illegalStateException = expectThrowsOnRestart( - dataPaths -> PersistedClusterStateService.overrideVersion(NodeMetadataTests.tooNewVersion(), dataPaths) + dataPaths -> PersistedClusterStateService.overrideVersion(NodeMetadataTests.tooNewBuildVersion(), dataPaths) ); assertThat( illegalStateException.getMessage(), @@ -133,7 +133,7 @@ public void testFailsToStartIfDowngraded() { public void testFailsToStartIfUpgradedTooFar() { final IllegalStateException illegalStateException = expectThrowsOnRestart( - dataPaths -> PersistedClusterStateService.overrideVersion(NodeMetadataTests.tooOldVersion(), dataPaths) + dataPaths -> PersistedClusterStateService.overrideVersion(NodeMetadataTests.tooOldBuildVersion(), dataPaths) ); assertThat( illegalStateException.getMessage(), diff --git a/server/src/main/java/org/elasticsearch/env/BuildVersion.java b/server/src/main/java/org/elasticsearch/env/BuildVersion.java index 0de346249ccbc..42c45a14977eb 100644 --- a/server/src/main/java/org/elasticsearch/env/BuildVersion.java +++ b/server/src/main/java/org/elasticsearch/env/BuildVersion.java @@ -59,7 +59,6 @@ public abstract class BuildVersion { public abstract boolean isFutureVersion(); // temporary - // TODO[wrb]: remove from PersistedClusterStateService // TODO[wrb]: remove from security bootstrap checks @Deprecated public Version toVersion() { diff --git a/server/src/main/java/org/elasticsearch/env/DefaultBuildVersion.java b/server/src/main/java/org/elasticsearch/env/DefaultBuildVersion.java index e0531b5a192a0..dcc5ed3aee3f8 100644 --- a/server/src/main/java/org/elasticsearch/env/DefaultBuildVersion.java +++ b/server/src/main/java/org/elasticsearch/env/DefaultBuildVersion.java @@ -72,6 +72,6 @@ public int hashCode() { @Override public String toString() { - return Version.fromId(versionId).toString(); + return version.toString(); } } diff --git a/server/src/main/java/org/elasticsearch/env/NodeMetadata.java b/server/src/main/java/org/elasticsearch/env/NodeMetadata.java index 6a72a7e7fcda5..5b2ee39c1b622 100644 --- a/server/src/main/java/org/elasticsearch/env/NodeMetadata.java +++ b/server/src/main/java/org/elasticsearch/env/NodeMetadata.java @@ -42,7 +42,6 @@ public final class NodeMetadata { private final IndexVersion oldestIndexVersion; - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) // version should be non-null in the node metadata from v9 onwards private NodeMetadata( final String nodeId, final BuildVersion buildVersion, @@ -112,11 +111,7 @@ public IndexVersion oldestIndexVersion() { return oldestIndexVersion; } - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) public void verifyUpgradeToCurrentVersion() { - // Enable the following assertion for V9: - // assert (nodeVersion.equals(BuildVersion.empty()) == false) : "version is required in the node metadata from v9 onwards"; - if (nodeVersion.onOrAfterMinimumCompatible() == false) { throw new IllegalStateException( "cannot upgrade a node from version [" diff --git a/server/src/main/java/org/elasticsearch/env/OverrideNodeVersionCommand.java b/server/src/main/java/org/elasticsearch/env/OverrideNodeVersionCommand.java index 96158965cddfe..1ddc8d5b26bd9 100644 --- a/server/src/main/java/org/elasticsearch/env/OverrideNodeVersionCommand.java +++ b/server/src/main/java/org/elasticsearch/env/OverrideNodeVersionCommand.java @@ -74,7 +74,7 @@ protected void processDataPaths(Terminal terminal, Path[] paths, OptionSet optio "found [" + nodeMetadata + "] which is compatible with current version [" - + Version.CURRENT + + BuildVersion.current() + "], so there is no need to override the version checks" ); } catch (IllegalStateException e) { @@ -86,10 +86,10 @@ protected void processDataPaths(Terminal terminal, Path[] paths, OptionSet optio (nodeMetadata.nodeVersion().onOrAfterMinimumCompatible() == false ? TOO_OLD_MESSAGE : TOO_NEW_MESSAGE).replace( "V_OLD", nodeMetadata.nodeVersion().toString() - ).replace("V_NEW", nodeMetadata.nodeVersion().toString()).replace("V_CUR", Version.CURRENT.toString()) + ).replace("V_NEW", nodeMetadata.nodeVersion().toString()).replace("V_CUR", BuildVersion.current().toString()) ); - PersistedClusterStateService.overrideVersion(Version.CURRENT, paths); + PersistedClusterStateService.overrideVersion(BuildVersion.current(), paths); terminal.println(SUCCESS_MESSAGE); } diff --git a/server/src/main/java/org/elasticsearch/gateway/PersistedClusterStateService.java b/server/src/main/java/org/elasticsearch/gateway/PersistedClusterStateService.java index 0c6cf2c8a0761..92b8686700a05 100644 --- a/server/src/main/java/org/elasticsearch/gateway/PersistedClusterStateService.java +++ b/server/src/main/java/org/elasticsearch/gateway/PersistedClusterStateService.java @@ -42,7 +42,6 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.InfoStream; import org.apache.lucene.util.SetOnce; -import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.MappingMetadata; @@ -159,8 +158,6 @@ public class PersistedClusterStateService { public static final int IS_LAST_PAGE = 1; public static final int IS_NOT_LAST_PAGE = 0; private static final int COMMIT_DATA_SIZE = 7; - // We added CLUSTER_UUID_KEY and CLUSTER_UUID_COMMITTED_KEY in 8.8 - private static final int COMMIT_DATA_SIZE_BEFORE_8_8 = 5; private static final MergePolicy NO_MERGE_POLICY = noMergePolicy(); private static final MergePolicy DEFAULT_MERGE_POLICY = defaultMergePolicy(); @@ -350,7 +347,7 @@ public record OnDiskStateMetadata( @Nullable public static NodeMetadata nodeMetadata(Path... dataPaths) throws IOException { String nodeId = null; - Version version = null; + BuildVersion version = null; IndexVersion oldestIndexVersion = IndexVersions.ZERO; for (final Path dataPath : dataPaths) { final Path indexPath = dataPath.resolve(METADATA_DIRECTORY_NAME); @@ -367,7 +364,7 @@ public static NodeMetadata nodeMetadata(Path... dataPaths) throws IOException { ); } else if (nodeId == null) { nodeId = thisNodeId; - version = Version.fromId(Integer.parseInt(userData.get(NODE_VERSION_KEY))); + version = BuildVersion.fromVersionId(Integer.parseInt(userData.get(NODE_VERSION_KEY))); if (userData.containsKey(OLDEST_INDEX_VERSION_KEY)) { oldestIndexVersion = IndexVersion.fromId(Integer.parseInt(userData.get(OLDEST_INDEX_VERSION_KEY))); } else { @@ -382,14 +379,13 @@ public static NodeMetadata nodeMetadata(Path... dataPaths) throws IOException { if (nodeId == null) { return null; } - // TODO: remove use of Version here (ES-7343) - return new NodeMetadata(nodeId, BuildVersion.fromVersionId(version.id()), oldestIndexVersion); + return new NodeMetadata(nodeId, version, oldestIndexVersion); } /** * Overrides the version field for the metadata in the given data path */ - public static void overrideVersion(Version newVersion, Path... dataPaths) throws IOException { + public static void overrideVersion(BuildVersion newVersion, Path... dataPaths) throws IOException { for (final Path dataPath : dataPaths) { final Path indexPath = dataPath.resolve(METADATA_DIRECTORY_NAME); if (Files.exists(indexPath)) { @@ -399,7 +395,7 @@ public static void overrideVersion(Version newVersion, Path... dataPaths) throws try (IndexWriter indexWriter = createIndexWriter(new NIOFSDirectory(dataPath.resolve(METADATA_DIRECTORY_NAME)), true)) { final Map commitData = new HashMap<>(userData); - commitData.put(NODE_VERSION_KEY, Integer.toString(newVersion.id)); + commitData.put(NODE_VERSION_KEY, Integer.toString(newVersion.id())); commitData.put(OVERRIDDEN_NODE_VERSION_KEY, Boolean.toString(true)); indexWriter.setLiveCommitData(commitData.entrySet()); indexWriter.commit(); @@ -664,11 +660,9 @@ public OnDiskStateMetadata loadOnDiskStateMetadataFromUserData(Map commitData = Maps.newMapWithExpectedSize(COMMIT_DATA_SIZE); commitData.put(CURRENT_TERM_KEY, Long.toString(currentTerm)); commitData.put(LAST_ACCEPTED_VERSION_KEY, Long.toString(lastAcceptedVersion)); - commitData.put(NODE_VERSION_KEY, Integer.toString(Version.CURRENT.id)); + commitData.put(NODE_VERSION_KEY, Integer.toString(BuildVersion.current().id())); commitData.put(OLDEST_INDEX_VERSION_KEY, Integer.toString(oldestIndexVersion.id())); commitData.put(NODE_ID_KEY, nodeId); commitData.put(CLUSTER_UUID_KEY, clusterUUID); diff --git a/server/src/test/java/org/elasticsearch/env/NodeMetadataTests.java b/server/src/test/java/org/elasticsearch/env/NodeMetadataTests.java index 8bfd4c7c5ac68..22308e15f4845 100644 --- a/server/src/test/java/org/elasticsearch/env/NodeMetadataTests.java +++ b/server/src/test/java/org/elasticsearch/env/NodeMetadataTests.java @@ -167,10 +167,6 @@ public void testUpgradeMarksPreviousVersion() { assertThat(nodeMetadata.previousNodeVersion(), equalTo(buildVersion)); } - public static Version tooNewVersion() { - return Version.fromId(between(Version.CURRENT.id + 1, 99999999)); - } - public static IndexVersion tooNewIndexVersion() { return IndexVersion.fromId(between(IndexVersion.current().id() + 1, 99999999)); } @@ -179,10 +175,6 @@ public static BuildVersion tooNewBuildVersion() { return BuildVersion.fromVersionId(between(Version.CURRENT.id() + 1, 99999999)); } - public static Version tooOldVersion() { - return Version.fromId(between(1, Version.CURRENT.minimumCompatibilityVersion().id - 1)); - } - public static BuildVersion tooOldBuildVersion() { return BuildVersion.fromVersionId(between(1, Version.CURRENT.minimumCompatibilityVersion().id - 1)); } diff --git a/server/src/test/java/org/elasticsearch/env/OverrideNodeVersionCommandTests.java b/server/src/test/java/org/elasticsearch/env/OverrideNodeVersionCommandTests.java index bf3fc1697aa44..c7614e2d98eed 100644 --- a/server/src/test/java/org/elasticsearch/env/OverrideNodeVersionCommandTests.java +++ b/server/src/test/java/org/elasticsearch/env/OverrideNodeVersionCommandTests.java @@ -96,7 +96,9 @@ public void testFailsOnEmptyPath() { } public void testFailsIfUnnecessary() throws IOException { - final Version nodeVersion = Version.fromId(between(Version.CURRENT.minimumCompatibilityVersion().id, Version.CURRENT.id)); + final BuildVersion nodeVersion = BuildVersion.fromVersionId( + between(Version.CURRENT.minimumCompatibilityVersion().id, Version.CURRENT.id) + ); PersistedClusterStateService.overrideVersion(nodeVersion, dataPaths); final MockTerminal mockTerminal = MockTerminal.create(); final ElasticsearchException elasticsearchException = expectThrows( @@ -107,7 +109,7 @@ public void testFailsIfUnnecessary() throws IOException { elasticsearchException.getMessage(), allOf( containsString("compatible with current version"), - containsString(Version.CURRENT.toString()), + containsString(BuildVersion.current().toString()), containsString(nodeVersion.toString()) ) ); @@ -115,7 +117,7 @@ public void testFailsIfUnnecessary() throws IOException { } public void testWarnsIfTooOld() throws Exception { - final Version nodeVersion = NodeMetadataTests.tooOldVersion(); + final BuildVersion nodeVersion = NodeMetadataTests.tooOldBuildVersion(); PersistedClusterStateService.overrideVersion(nodeVersion, dataPaths); final MockTerminal mockTerminal = MockTerminal.create(); mockTerminal.addTextInput("n"); @@ -137,11 +139,11 @@ public void testWarnsIfTooOld() throws Exception { expectThrows(IllegalStateException.class, () -> mockTerminal.readText("")); final NodeMetadata nodeMetadata = PersistedClusterStateService.nodeMetadata(dataPaths); - assertThat(nodeMetadata.nodeVersion().toVersion(), equalTo(nodeVersion)); + assertThat(nodeMetadata.nodeVersion(), equalTo(nodeVersion)); } public void testWarnsIfTooNew() throws Exception { - final Version nodeVersion = NodeMetadataTests.tooNewVersion(); + final BuildVersion nodeVersion = NodeMetadataTests.tooNewBuildVersion(); PersistedClusterStateService.overrideVersion(nodeVersion, dataPaths); final MockTerminal mockTerminal = MockTerminal.create(); mockTerminal.addTextInput(randomFrom("yy", "Yy", "n", "yes", "true", "N", "no")); @@ -162,11 +164,11 @@ public void testWarnsIfTooNew() throws Exception { expectThrows(IllegalStateException.class, () -> mockTerminal.readText("")); final NodeMetadata nodeMetadata = PersistedClusterStateService.nodeMetadata(dataPaths); - assertThat(nodeMetadata.nodeVersion().toVersion(), equalTo(nodeVersion)); + assertThat(nodeMetadata.nodeVersion(), equalTo(nodeVersion)); } public void testOverwritesIfTooOld() throws Exception { - final Version nodeVersion = NodeMetadataTests.tooOldVersion(); + final BuildVersion nodeVersion = NodeMetadataTests.tooOldBuildVersion(); PersistedClusterStateService.overrideVersion(nodeVersion, dataPaths); final MockTerminal mockTerminal = MockTerminal.create(); mockTerminal.addTextInput(randomFrom("y", "Y")); @@ -189,7 +191,7 @@ public void testOverwritesIfTooOld() throws Exception { } public void testOverwritesIfTooNew() throws Exception { - final Version nodeVersion = NodeMetadataTests.tooNewVersion(); + final BuildVersion nodeVersion = NodeMetadataTests.tooNewBuildVersion(); PersistedClusterStateService.overrideVersion(nodeVersion, dataPaths); final MockTerminal mockTerminal = MockTerminal.create(); mockTerminal.addTextInput(randomFrom("y", "Y")); diff --git a/server/src/test/java/org/elasticsearch/gateway/PersistedClusterStateServiceTests.java b/server/src/test/java/org/elasticsearch/gateway/PersistedClusterStateServiceTests.java index 450d123f551c8..4428a7e078510 100644 --- a/server/src/test/java/org/elasticsearch/gateway/PersistedClusterStateServiceTests.java +++ b/server/src/test/java/org/elasticsearch/gateway/PersistedClusterStateServiceTests.java @@ -54,6 +54,7 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.env.BuildVersion; import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; @@ -1414,14 +1415,17 @@ public void testOverrideLuceneVersion() throws IOException { assertThat(clusterState.metadata().version(), equalTo(version)); } + @UpdateForV9(owner = UpdateForV9.Owner.SEARCH_FOUNDATIONS) + BuildVersion overrideVersion = BuildVersion.fromVersionId(Version.V_8_0_0.id); + NodeMetadata prevMetadata = PersistedClusterStateService.nodeMetadata(persistedClusterStateService.getDataPaths()); assertEquals(BuildVersion.current(), prevMetadata.nodeVersion()); - PersistedClusterStateService.overrideVersion(Version.V_8_0_0, persistedClusterStateService.getDataPaths()); + PersistedClusterStateService.overrideVersion(overrideVersion, persistedClusterStateService.getDataPaths()); NodeMetadata metadata = PersistedClusterStateService.nodeMetadata(persistedClusterStateService.getDataPaths()); - assertEquals(BuildVersion.fromVersionId(Version.V_8_0_0.id()), metadata.nodeVersion()); + assertEquals(overrideVersion, metadata.nodeVersion()); for (Path p : persistedClusterStateService.getDataPaths()) { NodeMetadata individualMetadata = PersistedClusterStateService.nodeMetadata(p); - assertEquals(BuildVersion.fromVersionId(Version.V_8_0_0.id()), individualMetadata.nodeVersion()); + assertEquals(overrideVersion, individualMetadata.nodeVersion()); } } } From 0f7ddd5c9878e03f6f0ce6ac6bce58c609e25ff5 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Wed, 23 Oct 2024 13:34:37 +0100 Subject: [PATCH 315/449] [ML] New names for the default inference endpoints (#115395) The new names are .elser-2-elasticsearch and .multilingual-e5-small-elasticsearch --- .../services/elasticsearch/ElasticsearchInternalService.java | 4 ++-- .../elasticsearch/ElasticsearchInternalServiceTests.java | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java index 49919fda9f89d..6732e5719b897 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java @@ -85,8 +85,8 @@ public class ElasticsearchInternalService extends BaseElasticsearchInternalServi ); public static final int EMBEDDING_MAX_BATCH_SIZE = 10; - public static final String DEFAULT_ELSER_ID = ".elser-2"; - public static final String DEFAULT_E5_ID = ".multi-e5-small"; + public static final String DEFAULT_ELSER_ID = ".elser-2-elasticsearch"; + public static final String DEFAULT_E5_ID = ".multilingual-e5-small-elasticsearch"; private static final Logger logger = LogManager.getLogger(ElasticsearchInternalService.class); private static final DeprecationLogger DEPRECATION_LOGGER = DeprecationLogger.getLogger(ElasticsearchInternalService.class); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java index b82b8a08f2175..5ec66687752a8 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java @@ -1561,8 +1561,8 @@ public void testEmbeddingTypeFromTaskTypeAndSettings() { public void testIsDefaultId() { var service = createService(mock(Client.class)); - assertTrue(service.isDefaultId(".elser-2")); - assertTrue(service.isDefaultId(".multi-e5-small")); + assertTrue(service.isDefaultId(".elser-2-elasticsearch")); + assertTrue(service.isDefaultId(".multilingual-e5-small-elasticsearch")); assertFalse(service.isDefaultId("foo")); } From 91a5a2e6a1eefa64bfbd39db62cb50a478f022fb Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Wed, 23 Oct 2024 15:45:09 +0300 Subject: [PATCH 316/449] Unmute SearchWithMinCompatibleSearchNodeIT tests muted for 7.17 (#115386) --- muted-tests.yml | 6 ------ 1 file changed, 6 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 45b1398df7ace..c248729b539fd 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -23,12 +23,6 @@ tests: - class: org.elasticsearch.index.store.FsDirectoryFactoryTests method: testPreload issue: https://github.com/elastic/elasticsearch/issues/110211 -- class: org.elasticsearch.backwards.SearchWithMinCompatibleSearchNodeIT - method: testMinVersionAsNewVersion - issue: https://github.com/elastic/elasticsearch/issues/95384 -- class: org.elasticsearch.backwards.SearchWithMinCompatibleSearchNodeIT - method: testCcsMinimizeRoundtripsIsFalse - issue: https://github.com/elastic/elasticsearch/issues/101974 - class: "org.elasticsearch.xpack.searchablesnapshots.FrozenSearchableSnapshotsIntegTests" issue: "https://github.com/elastic/elasticsearch/issues/110408" method: "testCreateAndRestorePartialSearchableSnapshot" From 9e95cbd86b3e2009d679c0bdbacb35f1a8cc7e27 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Thu, 24 Oct 2024 00:00:13 +1100 Subject: [PATCH 317/449] Mute org.elasticsearch.smoketest.SmokeTestIngestWithAllDepsClientYamlTestSuiteIT test {yaml=ingest/80_ingest_simulate/Test mapping addition works with legacy templates} #115412 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index c248729b539fd..93714e098f677 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -297,6 +297,9 @@ tests: - class: org.elasticsearch.reservedstate.service.FileSettingsServiceTests method: testProcessFileChanges issue: https://github.com/elastic/elasticsearch/issues/115280 +- class: org.elasticsearch.smoketest.SmokeTestIngestWithAllDepsClientYamlTestSuiteIT + method: test {yaml=ingest/80_ingest_simulate/Test mapping addition works with legacy templates} + issue: https://github.com/elastic/elasticsearch/issues/115412 # Examples: # From 728ac0c8a790fd9afc2dfc970d5c2e39c36cedc9 Mon Sep 17 00:00:00 2001 From: Jan Kuipers <148754765+jan-elastic@users.noreply.github.com> Date: Wed, 23 Oct 2024 15:07:04 +0200 Subject: [PATCH 318/449] adaptive allocations: reset time interval with zero requests upon starting an allocation (#115400) --- .../AdaptiveAllocationsScaler.java | 9 ++++-- .../AdaptiveAllocationsScalerService.java | 11 ++++++- .../AdaptiveAllocationsScalerTests.java | 29 +++++++++++++++++++ 3 files changed, 46 insertions(+), 3 deletions(-) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScaler.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScaler.java index bbd63e0d3bfe9..0dec99a9b9bb9 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScaler.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScaler.java @@ -33,6 +33,7 @@ public class AdaptiveAllocationsScaler { private final String deploymentId; private final KalmanFilter1d requestRateEstimator; private final KalmanFilter1d inferenceTimeEstimator; + private final long scaleToZeroAfterNoRequestsSeconds; private double timeWithoutRequestsSeconds; private int numberOfAllocations; @@ -44,10 +45,11 @@ public class AdaptiveAllocationsScaler { private Double lastMeasuredRequestRate; private Double lastMeasuredInferenceTime; private Long lastMeasuredQueueSize; - private long scaleToZeroAfterNoRequestsSeconds; AdaptiveAllocationsScaler(String deploymentId, int numberOfAllocations, long scaleToZeroAfterNoRequestsSeconds) { this.deploymentId = deploymentId; + this.scaleToZeroAfterNoRequestsSeconds = scaleToZeroAfterNoRequestsSeconds; + // A smoothing factor of 100 roughly means the last 100 measurements have an effect // on the estimated values. The sampling time is 10 seconds, so approximately the // last 15 minutes are taken into account. @@ -67,7 +69,6 @@ public class AdaptiveAllocationsScaler { lastMeasuredRequestRate = null; lastMeasuredInferenceTime = null; lastMeasuredQueueSize = null; - this.scaleToZeroAfterNoRequestsSeconds = scaleToZeroAfterNoRequestsSeconds; } void setMinMaxNumberOfAllocations(Integer minNumberOfAllocations, Integer maxNumberOfAllocations) { @@ -117,6 +118,10 @@ void process(AdaptiveAllocationsScalerService.Stats stats, double timeIntervalSe dynamicsChanged = false; } + void resetTimeWithoutRequests() { + timeWithoutRequestsSeconds = 0; + } + double getLoadLower() { double requestRateLower = Math.max(0.0, requestRateEstimator.lower()); double inferenceTimeLower = Math.max(0.0, inferenceTimeEstimator.hasValue() ? inferenceTimeEstimator.lower() : 1.0); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerService.java index 770e890512935..16ec3ee9b468c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerService.java @@ -188,7 +188,10 @@ Collection observeDouble(Function Date: Wed, 23 Oct 2024 09:16:13 -0400 Subject: [PATCH 319/449] [ML] Increase default queue_capacity to 10_000 and decrease max queue_capacity to 100_000 (#115041) * Increase default queue capacity and decrease max queue capacity * Update docs/changelog/115041.yaml * Update tests to match new constants --- docs/changelog/115041.yaml | 6 ++++++ .../ml/action/StartTrainedModelDeploymentAction.java | 4 ++-- .../StartTrainedModelDeploymentRequestTests.java | 10 +++++----- 3 files changed, 13 insertions(+), 7 deletions(-) create mode 100644 docs/changelog/115041.yaml diff --git a/docs/changelog/115041.yaml b/docs/changelog/115041.yaml new file mode 100644 index 0000000000000..f4c047c1569ec --- /dev/null +++ b/docs/changelog/115041.yaml @@ -0,0 +1,6 @@ +pr: 115041 +summary: Increase default `queue_capacity` to 10_000 and decrease max `queue_capacity` + to 100_000 +area: Machine Learning +type: enhancement +issues: [] diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartTrainedModelDeploymentAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartTrainedModelDeploymentAction.java index ca789fee7b744..b298d486c9e03 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartTrainedModelDeploymentAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartTrainedModelDeploymentAction.java @@ -71,7 +71,7 @@ public class StartTrainedModelDeploymentAction extends ActionType implements ToXCon /** * If the queue is created then we can OOM when we create the queue. */ - private static final int MAX_QUEUE_CAPACITY = 1_000_000; + private static final int MAX_QUEUE_CAPACITY = 100_000; public static final ParseField MODEL_ID = new ParseField("model_id"); public static final ParseField DEPLOYMENT_ID = new ParseField("deployment_id"); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/StartTrainedModelDeploymentRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/StartTrainedModelDeploymentRequestTests.java index 730d994fc5e35..46fc8a36c2c2b 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/StartTrainedModelDeploymentRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/StartTrainedModelDeploymentRequestTests.java @@ -67,7 +67,7 @@ public static Request createRandom() { request.setNumberOfAllocations(randomIntBetween(1, 8)); } if (randomBoolean()) { - request.setQueueCapacity(randomIntBetween(1, 1000000)); + request.setQueueCapacity(randomIntBetween(1, 100_000)); } if (randomBoolean()) { request.setPriority(randomFrom(Priority.values()).toString()); @@ -168,7 +168,7 @@ public void testValidate_GivenQueueCapacityIsNegative() { public void testValidate_GivenQueueCapacityIsAtLimit() { Request request = createRandom(); - request.setQueueCapacity(1_000_000); + request.setQueueCapacity(100_000); ActionRequestValidationException e = request.validate(); @@ -177,12 +177,12 @@ public void testValidate_GivenQueueCapacityIsAtLimit() { public void testValidate_GivenQueueCapacityIsOverLimit() { Request request = createRandom(); - request.setQueueCapacity(1_000_001); + request.setQueueCapacity(100_001); ActionRequestValidationException e = request.validate(); assertThat(e, is(not(nullValue()))); - assertThat(e.getMessage(), containsString("[queue_capacity] must be less than 1000000")); + assertThat(e.getMessage(), containsString("[queue_capacity] must be less than 100000")); } public void testValidate_GivenTimeoutIsNegative() { @@ -234,6 +234,6 @@ public void testDefaults() { assertThat(request.getNumberOfAllocations(), nullValue()); assertThat(request.computeNumberOfAllocations(), equalTo(1)); assertThat(request.getThreadsPerAllocation(), equalTo(1)); - assertThat(request.getQueueCapacity(), equalTo(1024)); + assertThat(request.getQueueCapacity(), equalTo(10_000)); } } From 43a6b3592eda425338ddecd357f11956710e2ca6 Mon Sep 17 00:00:00 2001 From: Panagiotis Bailis Date: Wed, 23 Oct 2024 16:29:03 +0300 Subject: [PATCH 320/449] Unmuting RankDocsRetrieverBuilderTests testRewrite (#115403) --- muted-tests.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 93714e098f677..f59ca0c213279 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -242,9 +242,6 @@ tests: - class: org.elasticsearch.xpack.inference.InferenceRestIT method: test {p0=inference/40_semantic_text_query/Query a field that uses the default ELSER 2 endpoint} issue: https://github.com/elastic/elasticsearch/issues/114376 -- class: org.elasticsearch.search.retriever.RankDocsRetrieverBuilderTests - method: testRewrite - issue: https://github.com/elastic/elasticsearch/issues/114467 - class: org.elasticsearch.packaging.test.DockerTests method: test022InstallPluginsFromLocalArchive issue: https://github.com/elastic/elasticsearch/issues/111063 From 98d53352160d6d2c397ea74567ebc690b701973f Mon Sep 17 00:00:00 2001 From: Alexander Spies Date: Wed, 23 Oct 2024 15:29:53 +0200 Subject: [PATCH 321/449] ESQL: Disable pushdown of WHERE past STATS (#115308) Fix https://github.com/elastic/elasticsearch/issues/115281 Let's disable the faulty optimization for now and re-introduce it later, correctly. --- docs/changelog/115308.yaml | 6 +++ .../src/main/resources/stats.csv-spec | 27 +++++++++++++ .../xpack/esql/action/EsqlCapabilities.java | 7 +++- .../logical/PushDownAndCombineFilters.java | 15 ++----- .../optimizer/LogicalPlanOptimizerTests.java | 40 +++++++++++-------- .../PushDownAndCombineFiltersTests.java | 1 + 6 files changed, 67 insertions(+), 29 deletions(-) create mode 100644 docs/changelog/115308.yaml diff --git a/docs/changelog/115308.yaml b/docs/changelog/115308.yaml new file mode 100644 index 0000000000000..163f0232a3e58 --- /dev/null +++ b/docs/changelog/115308.yaml @@ -0,0 +1,6 @@ +pr: 115308 +summary: "ESQL: Disable pushdown of WHERE past STATS" +area: ES|QL +type: bug +issues: + - 115281 diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec index ac4351413129e..6d4c596e8d7de 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec @@ -2291,6 +2291,33 @@ m:integer |a:double |x:integer 74999 |48249.0 |0 ; +statsWithFilterOnGroups +required_capability: fix_filter_pushdown_past_stats +FROM employees +| STATS v = VALUES(emp_no) by job_positions | WHERE job_positions == "Accountant" | MV_EXPAND v | SORT v +; + +v:integer | job_positions:keyword + 10001 | Accountant + 10012 | Accountant + 10016 | Accountant + 10023 | Accountant + 10025 | Accountant + 10028 | Accountant + 10034 | Accountant + 10037 | Accountant + 10044 | Accountant + 10045 | Accountant + 10050 | Accountant + 10051 | Accountant + 10066 | Accountant + 10081 | Accountant + 10085 | Accountant + 10089 | Accountant + 10092 | Accountant + 10094 | Accountant +; + statsWithFiltering required_capability: per_agg_filtering diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index 94211e4726a2c..5157a80022c39 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -411,7 +411,12 @@ public enum Cap { /** * Support for semantic_text field mapping */ - SEMANTIC_TEXT_TYPE(EsqlCorePlugin.SEMANTIC_TEXT_FEATURE_FLAG); + SEMANTIC_TEXT_TYPE(EsqlCorePlugin.SEMANTIC_TEXT_FEATURE_FLAG), + /** + * Fix for an optimization that caused wrong results + * https://github.com/elastic/elasticsearch/issues/115281 + */ + FIX_FILTER_PUSHDOWN_PAST_STATS; private final boolean enabled; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownAndCombineFilters.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownAndCombineFilters.java index ed09d0bc16754..15e49c22a44db 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownAndCombineFilters.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownAndCombineFilters.java @@ -15,8 +15,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; import org.elasticsearch.xpack.esql.core.expression.predicate.Predicates; -import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction; -import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.esql.plan.logical.Eval; import org.elasticsearch.xpack.esql.plan.logical.Filter; @@ -38,18 +36,13 @@ protected LogicalPlan rule(Filter filter) { LogicalPlan child = filter.child(); Expression condition = filter.condition(); + // TODO: Push down past STATS if the filter is only on the groups; but take into account how `STATS ... BY field` handles + // multi-values: It seems to be equivalent to `EVAL field = MV_DEDUPE(field) | MV_EXPAND(field) | STATS ... BY field`, where the + // last `STATS ... BY field` can assume that `field` is single-valued (to be checked more thoroughly). + // https://github.com/elastic/elasticsearch/issues/115311 if (child instanceof Filter f) { // combine nodes into a single Filter with updated ANDed condition plan = f.with(Predicates.combineAnd(List.of(f.condition(), condition))); - } else if (child instanceof Aggregate agg) { // TODO: re-evaluate along with multi-value support - // Only push [parts of] a filter past an agg if these/it operates on agg's grouping[s], not output. - plan = maybePushDownPastUnary( - filter, - agg, - e -> e instanceof Attribute && agg.output().contains(e) && agg.groupings().contains(e) == false - || e instanceof AggregateFunction, - NO_OP - ); } else if (child instanceof Eval eval) { // Don't push if Filter (still) contains references to Eval's fields. // Account for simple aliases in the Eval, though - these shouldn't stop us. diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index 8d7c1997f78e3..ff7675504d6ff 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -738,6 +738,7 @@ public void testMultipleCombineLimits() { ); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/115311") public void testSelectivelyPushDownFilterPastRefAgg() { // expected plan: "from test | where emp_no > 1 and emp_no < 3 | stats x = count(1) by emp_no | where x > 7" LogicalPlan plan = optimizedPlan(""" @@ -790,6 +791,7 @@ public void testNoPushDownOrFilterPastAgg() { assertTrue(stats.child() instanceof EsRelation); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/115311") public void testSelectivePushDownComplexFilterPastAgg() { // expected plan: from test | emp_no > 0 | stats x = count(1) by emp_no | where emp_no < 3 or x > 9 LogicalPlan plan = optimizedPlan(""" @@ -1393,13 +1395,15 @@ public void testPushDownLimitThroughMultipleSort_AfterMvExpand2() { } /** + * TODO: Push down the filter correctly https://github.com/elastic/elasticsearch/issues/115311 + * * Expected * Limit[5[INTEGER]] - * \_Aggregate[[first_name{f}#232],[MAX(salary{f}#233) AS max_s, first_name{f}#232]] - * \_Filter[ISNOTNULL(first_name{f}#232)] - * \_MvExpand[first_name{f}#232] - * \_TopN[[Order[emp_no{f}#231,ASC,LAST]],50[INTEGER]] - * \_EsRelation[employees][emp_no{f}#231, first_name{f}#232, salary{f}#233] + * \_Filter[ISNOTNULL(first_name{r}#23)] + * \_Aggregate[STANDARD,[first_name{r}#23],[MAX(salary{f}#18,true[BOOLEAN]) AS max_s, first_name{r}#23]] + * \_MvExpand[first_name{f}#14,first_name{r}#23] + * \_TopN[[Order[emp_no{f}#13,ASC,LAST]],50[INTEGER]] + * \_EsRelation[test][_meta_field{f}#19, emp_no{f}#13, first_name{f}#14, ..] */ public void testDontPushDownLimitPastAggregate_AndMvExpand() { LogicalPlan plan = optimizedPlan(""" @@ -1413,10 +1417,10 @@ public void testDontPushDownLimitPastAggregate_AndMvExpand() { | limit 5"""); var limit = as(plan, Limit.class); + var filter = as(limit.child(), Filter.class); assertThat(limit.limit().fold(), equalTo(5)); - var agg = as(limit.child(), Aggregate.class); - var filter = as(agg.child(), Filter.class); - var mvExp = as(filter.child(), MvExpand.class); + var agg = as(filter.child(), Aggregate.class); + var mvExp = as(agg.child(), MvExpand.class); var topN = as(mvExp.child(), TopN.class); assertThat(topN.limit().fold(), equalTo(50)); assertThat(orderNames(topN), contains("emp_no")); @@ -1424,14 +1428,16 @@ public void testDontPushDownLimitPastAggregate_AndMvExpand() { } /** + * TODO: Push down the filter correctly https://github.com/elastic/elasticsearch/issues/115311 + * * Expected * Limit[5[INTEGER]] - * \_Aggregate[[first_name{f}#262],[MAX(salary{f}#263) AS max_s, first_name{f}#262]] - * \_Filter[ISNOTNULL(first_name{f}#262)] - * \_Limit[50[INTEGER]] - * \_MvExpand[first_name{f}#262] - * \_Limit[50[INTEGER]] - * \_EsRelation[employees][emp_no{f}#261, first_name{f}#262, salary{f}#263] + * \_Filter[ISNOTNULL(first_name{r}#22)] + * \_Aggregate[STANDARD,[first_name{r}#22],[MAX(salary{f}#17,true[BOOLEAN]) AS max_s, first_name{r}#22]] + * \_Limit[50[INTEGER]] + * \_MvExpand[first_name{f}#13,first_name{r}#22] + * \_Limit[50[INTEGER]] + * \_EsRelation[test][_meta_field{f}#18, emp_no{f}#12, first_name{f}#13, ..] */ public void testPushDown_TheRightLimit_PastMvExpand() { LogicalPlan plan = optimizedPlan(""" @@ -1445,9 +1451,9 @@ public void testPushDown_TheRightLimit_PastMvExpand() { var limit = as(plan, Limit.class); assertThat(limit.limit().fold(), equalTo(5)); - var agg = as(limit.child(), Aggregate.class); - var filter = as(agg.child(), Filter.class); - limit = as(filter.child(), Limit.class); + var filter = as(limit.child(), Filter.class); + var agg = as(filter.child(), Aggregate.class); + limit = as(agg.child(), Limit.class); assertThat(limit.limit().fold(), equalTo(50)); var mvExp = as(limit.child(), MvExpand.class); limit = as(mvExp.child(), Limit.class); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownAndCombineFiltersTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownAndCombineFiltersTests.java index 49a738f4f4fa3..e159e5ed0bd7d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownAndCombineFiltersTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownAndCombineFiltersTests.java @@ -213,6 +213,7 @@ public void testPushDownLikeRlikeFilter() { // from ... | where a > 1 | stats count(1) by b | where count(1) >= 3 and b < 2 // => ... | where a > 1 and b < 2 | stats count(1) by b | where count(1) >= 3 + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/115311") public void testSelectivelyPushDownFilterPastFunctionAgg() { EsRelation relation = relation(); GreaterThan conditionA = greaterThanOf(getFieldAttribute("a"), ONE); From 1ca39789e27ce4ca4c1f4e88112aa24a87d96649 Mon Sep 17 00:00:00 2001 From: Panagiotis Bailis Date: Wed, 23 Oct 2024 17:40:18 +0300 Subject: [PATCH 322/449] Updating error handling for compound retrievers (#115277) --- .../retriever/CompoundRetrieverBuilder.java | 19 +++++- .../xpack/rank/rrf/RRFRetrieverBuilderIT.java | 63 ++++++++++++++++--- 2 files changed, 74 insertions(+), 8 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/retriever/CompoundRetrieverBuilder.java b/server/src/main/java/org/elasticsearch/search/retriever/CompoundRetrieverBuilder.java index 7373bc5b75049..b15798db95b6f 100644 --- a/server/src/main/java/org/elasticsearch/search/retriever/CompoundRetrieverBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/retriever/CompoundRetrieverBuilder.java @@ -11,6 +11,8 @@ import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.util.SetOnce; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.search.MultiSearchRequest; @@ -20,6 +22,7 @@ import org.elasticsearch.action.search.TransportMultiSearchAction; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryRewriteContext; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.builder.PointInTimeBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.fetch.StoredFieldsContext; @@ -121,10 +124,17 @@ public final RetrieverBuilder rewrite(QueryRewriteContext ctx) throws IOExceptio public void onResponse(MultiSearchResponse items) { List topDocs = new ArrayList<>(); List failures = new ArrayList<>(); + // capture the max status code returned by any of the responses + int statusCode = RestStatus.OK.getStatus(); + List retrieversWithFailures = new ArrayList<>(); for (int i = 0; i < items.getResponses().length; i++) { var item = items.getResponses()[i]; if (item.isFailure()) { failures.add(item.getFailure()); + retrieversWithFailures.add(innerRetrievers.get(i).retriever().getName()); + if (ExceptionsHelper.status(item.getFailure()).getStatus() > statusCode) { + statusCode = ExceptionsHelper.status(item.getFailure()).getStatus(); + } } else { assert item.getResponse() != null; var rankDocs = getRankDocs(item.getResponse()); @@ -133,7 +143,14 @@ public void onResponse(MultiSearchResponse items) { } } if (false == failures.isEmpty()) { - IllegalStateException ex = new IllegalStateException("Search failed - some nested retrievers returned errors."); + assert statusCode != RestStatus.OK.getStatus(); + final String errMessage = "[" + + getName() + + "] search failed - retrievers '" + + retrieversWithFailures + + "' returned errors. " + + "All failures are attached as suppressed exceptions."; + Exception ex = new ElasticsearchStatusException(errMessage, RestStatus.fromCode(statusCode)); failures.forEach(ex::addSuppressed); listener.onFailure(ex); } else { diff --git a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderIT.java b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderIT.java index c5978219d94d3..37e1807d138aa 100644 --- a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderIT.java +++ b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderIT.java @@ -8,6 +8,8 @@ package org.elasticsearch.xpack.rank.rrf; import org.apache.lucene.search.TotalHits; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.TransportVersion; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.search.SearchRequestBuilder; @@ -18,6 +20,7 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.builder.SearchSourceBuilder; @@ -47,7 +50,6 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.lessThanOrEqualTo; @@ -589,11 +591,11 @@ public void testRRFExplainWithAnotherNestedRRF() { }); } - public void testRRFInnerRetrieverSearchError() { + public void testRRFInnerRetrieverAll4xxSearchErrors() { final int rankWindowSize = 100; final int rankConstant = 10; SearchSourceBuilder source = new SearchSourceBuilder(); - // this will throw an error during evaluation + // this will throw a 4xx error during evaluation StandardRetrieverBuilder standard0 = new StandardRetrieverBuilder( QueryBuilders.constantScoreQuery(QueryBuilders.rangeQuery(VECTOR_FIELD).gte(10)) ); @@ -615,10 +617,57 @@ public void testRRFInnerRetrieverSearchError() { ) ); SearchRequestBuilder req = client().prepareSearch(INDEX).setSource(source); - Exception ex = expectThrows(IllegalStateException.class, req::get); - assertThat(ex, instanceOf(IllegalStateException.class)); - assertThat(ex.getMessage(), containsString("Search failed - some nested retrievers returned errors")); - assertThat(ex.getSuppressed().length, greaterThan(0)); + Exception ex = expectThrows(ElasticsearchStatusException.class, req::get); + assertThat(ex, instanceOf(ElasticsearchStatusException.class)); + assertThat( + ex.getMessage(), + containsString( + "[rrf] search failed - retrievers '[standard]' returned errors. All failures are attached as suppressed exceptions." + ) + ); + assertThat(ExceptionsHelper.status(ex), equalTo(RestStatus.BAD_REQUEST)); + assertThat(ex.getSuppressed().length, equalTo(1)); + assertThat(ex.getSuppressed()[0].getCause().getCause(), instanceOf(IllegalArgumentException.class)); + } + + public void testRRFInnerRetrieverMultipleErrorsOne5xx() { + final int rankWindowSize = 100; + final int rankConstant = 10; + SearchSourceBuilder source = new SearchSourceBuilder(); + // this will throw a 4xx error during evaluation + StandardRetrieverBuilder standard0 = new StandardRetrieverBuilder( + QueryBuilders.constantScoreQuery(QueryBuilders.rangeQuery(VECTOR_FIELD).gte(10)) + ); + // this will throw a 5xx error + TestRetrieverBuilder testRetrieverBuilder = new TestRetrieverBuilder("val") { + @Override + public void extractToSearchSourceBuilder(SearchSourceBuilder searchSourceBuilder, boolean compoundUsed) { + searchSourceBuilder.aggregation(AggregationBuilders.avg("some_invalid_param")); + } + }; + source.retriever( + new RRFRetrieverBuilder( + Arrays.asList( + new CompoundRetrieverBuilder.RetrieverSource(standard0, null), + new CompoundRetrieverBuilder.RetrieverSource(testRetrieverBuilder, null) + ), + rankWindowSize, + rankConstant + ) + ); + SearchRequestBuilder req = client().prepareSearch(INDEX).setSource(source); + Exception ex = expectThrows(ElasticsearchStatusException.class, req::get); + assertThat(ex, instanceOf(ElasticsearchStatusException.class)); + assertThat( + ex.getMessage(), + containsString( + "[rrf] search failed - retrievers '[standard, test]' returned errors. All failures are attached as suppressed exceptions." + ) + ); + assertThat(ExceptionsHelper.status(ex), equalTo(RestStatus.INTERNAL_SERVER_ERROR)); + assertThat(ex.getSuppressed().length, equalTo(2)); + assertThat(ex.getSuppressed()[0].getCause().getCause(), instanceOf(IllegalArgumentException.class)); + assertThat(ex.getSuppressed()[1].getCause().getCause(), instanceOf(IllegalStateException.class)); } public void testRRFInnerRetrieverErrorWhenExtractingToSource() { From c6bd53f21b0a0fde6f51c97545543dbd2a2f7c65 Mon Sep 17 00:00:00 2001 From: Nikolaj Volgushev Date: Wed, 23 Oct 2024 17:17:38 +0200 Subject: [PATCH 323/449] Fix `FileSettingsRoleMappingUpgradeIT` assertions (#115422) Fixes some faulty assertions in an upgrade test. Test failures only manifest on the 8.16 branch since 9.x does not qualify for these upgrade tests, and the change is not backported to 8.17 yet (unrelated CI failures). I validated this works by running it locally from the 8.16 branch. Resolves: https://github.com/elastic/elasticsearch/issues/115410 Resolves: https://github.com/elastic/elasticsearch/issues/115411 --- .../FileSettingsRoleMappingUpgradeIT.java | 22 +++++++++---------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FileSettingsRoleMappingUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FileSettingsRoleMappingUpgradeIT.java index b3d4dfc68d399..834d97f755dfb 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FileSettingsRoleMappingUpgradeIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FileSettingsRoleMappingUpgradeIT.java @@ -25,12 +25,10 @@ import java.io.IOException; import java.util.List; -import java.util.Map; import java.util.function.Supplier; -import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; @@ -104,15 +102,17 @@ public void testRoleMappingsAppliedOnUpgrade() throws IOException { // the nodes have all been upgraded. Check they re-processed the role mappings in the settings file on // upgrade Request clusterStateRequest = new Request("GET", "/_cluster/state/metadata"); - List roleMappings = new XContentTestUtils.JsonMapView(entityAsMap(client().performRequest(clusterStateRequest))).get( - "metadata.role_mappings.role_mappings" + List clusterStateRoleMappings = new XContentTestUtils.JsonMapView( + entityAsMap(client().performRequest(clusterStateRequest)) + ).get("metadata.role_mappings.role_mappings"); + assertThat(clusterStateRoleMappings, is(not(nullValue()))); + assertThat(clusterStateRoleMappings.size(), equalTo(1)); + + assertThat( + entityAsMap(client().performRequest(new Request("GET", "/_security/role_mapping"))).keySet(), + // TODO change this to `contains` once the clean-up migration work is merged + hasItem("everyone_kibana-read-only-operator-mapping") ); - assertThat(roleMappings, is(not(nullValue()))); - assertThat(roleMappings.size(), equalTo(1)); - assertThat(roleMappings, is(instanceOf(Map.class))); - @SuppressWarnings("unchecked") - Map roleMapping = (Map) roleMappings; - assertThat(roleMapping.keySet(), contains("everyone_kibana-read-only-operator-mapping")); } } } From 1c38f87db49e7ea1bc0f3e80d2fe5561b3038caf Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Wed, 23 Oct 2024 11:24:55 -0400 Subject: [PATCH 324/449] Refactor PipelineConfiguration#getVersion (#115423) --- .../ingest/PipelineConfiguration.java | 17 ++++++----------- 1 file changed, 6 insertions(+), 11 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/ingest/PipelineConfiguration.java b/server/src/main/java/org/elasticsearch/ingest/PipelineConfiguration.java index de172d86b810d..7406ee8837264 100644 --- a/server/src/main/java/org/elasticsearch/ingest/PipelineConfiguration.java +++ b/server/src/main/java/org/elasticsearch/ingest/PipelineConfiguration.java @@ -99,18 +99,13 @@ BytesReference getConfig() { } public Integer getVersion() { - var configMap = getConfigAsMap(); - if (configMap.containsKey("version")) { - Object o = configMap.get("version"); - if (o == null) { - return null; - } else if (o instanceof Number number) { - return number.intValue(); - } else { - throw new IllegalStateException("unexpected version type [" + o.getClass().getName() + "]"); - } - } else { + Object o = getConfigAsMap().get("version"); + if (o == null) { return null; + } else if (o instanceof Number number) { + return number.intValue(); + } else { + throw new IllegalStateException("unexpected version type [" + o.getClass().getName() + "]"); } } From 4bbedb8aedb27b338f11a692e421e0a212f39f45 Mon Sep 17 00:00:00 2001 From: Nikolaj Volgushev Date: Wed, 23 Oct 2024 18:03:58 +0200 Subject: [PATCH 325/449] Fix file settings service test on windows (#115234) Fix unit test on windows: it looks like the replace-existing flag is necessary to avoid AccessDeniedExceptions like this [example failure](https://gradle-enterprise.elastic.co/s/4tjgx5vzblv36/tests/task/:server:test/details/org.elasticsearch.reservedstate.service.FileSettingsServiceTests/testProcessFileChanges?top-execution=1). Resolves: https://github.com/elastic/elasticsearch/issues/115280 --- .../service/FileSettingsServiceTests.java | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java index 8ee2754427dda..c0657b5888ad2 100644 --- a/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java @@ -250,7 +250,7 @@ public void testProcessFileChanges() throws Exception { fileSettingsService.start(); fileSettingsService.clusterChanged(new ClusterChangedEvent("test", clusterService.state(), ClusterState.EMPTY_STATE)); // second file change; contents still don't matter - writeTestFile(fileSettingsService.watchedFile(), "{}"); + overwriteTestFile(fileSettingsService.watchedFile(), "{}"); // wait for listener to be called (once for initial processing, once for subsequent update) assertTrue(latch.await(20, TimeUnit.SECONDS)); @@ -355,6 +355,12 @@ public void testHandleSnapshotRestoreResetsMetadata() throws Exception { private void writeTestFile(Path path, String contents) throws IOException { Path tempFilePath = createTempFile(); Files.writeString(tempFilePath, contents); - Files.move(tempFilePath, path, StandardCopyOption.ATOMIC_MOVE); + Files.move(tempFilePath, path, StandardCopyOption.ATOMIC_MOVE, StandardCopyOption.REPLACE_EXISTING); + } + + private void overwriteTestFile(Path path, String contents) throws IOException { + Path tempFilePath = createTempFile(); + Files.writeString(tempFilePath, contents); + Files.move(tempFilePath, path, StandardCopyOption.REPLACE_EXISTING); } } From e581ae3482624983eeb4e201299735ed4ebc1066 Mon Sep 17 00:00:00 2001 From: shainaraskas <58563081+shainaraskas@users.noreply.github.com> Date: Wed, 23 Oct 2024 12:57:59 -0400 Subject: [PATCH 326/449] Reorder docs sidebar (#115360) --- docs/reference/index.asciidoc | 62 ++++++++++++------- .../release-notes/highlights.asciidoc | 11 ++-- 2 files changed, 46 insertions(+), 27 deletions(-) diff --git a/docs/reference/index.asciidoc b/docs/reference/index.asciidoc index 7e207146e38e3..18052cfb64e8f 100644 --- a/docs/reference/index.asciidoc +++ b/docs/reference/index.asciidoc @@ -6,7 +6,7 @@ include::links.asciidoc[] include::landing-page.asciidoc[] -include::release-notes/highlights.asciidoc[] +// overview / install include::intro.asciidoc[] @@ -14,33 +14,37 @@ include::quickstart/index.asciidoc[] include::setup.asciidoc[] -include::upgrade.asciidoc[] +// search solution -include::index-modules.asciidoc[] +include::search/search-your-data/search-your-data.asciidoc[] -include::mapping.asciidoc[] +include::reranking/index.asciidoc[] -include::analysis.asciidoc[] +// data management + +include::index-modules.asciidoc[] include::indices/index-templates.asciidoc[] -include::data-streams/data-streams.asciidoc[] +include::alias.asciidoc[] -include::ingest.asciidoc[] +include::mapping.asciidoc[] -include::alias.asciidoc[] +include::analysis.asciidoc[] -include::search/search-your-data/search-your-data.asciidoc[] +include::ingest.asciidoc[] -include::reranking/index.asciidoc[] +include::connector/docs/index.asciidoc[] -include::query-dsl.asciidoc[] +include::data-streams/data-streams.asciidoc[] -include::aggregations.asciidoc[] +include::data-management.asciidoc[] -include::geospatial-analysis.asciidoc[] +include::data-rollup-transform.asciidoc[] -include::connector/docs/index.asciidoc[] +// analysis tools + +include::query-dsl.asciidoc[] include::eql/eql.asciidoc[] @@ -50,34 +54,48 @@ include::sql/index.asciidoc[] include::scripting.asciidoc[] -include::data-management.asciidoc[] +include::aggregations.asciidoc[] -include::autoscaling/index.asciidoc[] +include::geospatial-analysis.asciidoc[] + +include::watcher/index.asciidoc[] + +// cluster management include::monitoring/index.asciidoc[] -include::data-rollup-transform.asciidoc[] +include::security/index.asciidoc[] + +// production tasks include::high-availability.asciidoc[] +include::how-to.asciidoc[] + +include::autoscaling/index.asciidoc[] + include::snapshot-restore/index.asciidoc[] -include::security/index.asciidoc[] +// reference -include::watcher/index.asciidoc[] +include::rest-api/index.asciidoc[] include::commands/index.asciidoc[] -include::how-to.asciidoc[] - include::troubleshooting.asciidoc[] -include::rest-api/index.asciidoc[] +// upgrades + +include::upgrade.asciidoc[] include::migration/index.asciidoc[] +include::release-notes/highlights.asciidoc[] + include::release-notes.asciidoc[] include::dependencies-versions.asciidoc[] +// etc + include::redirects.asciidoc[] \ No newline at end of file diff --git a/docs/reference/release-notes/highlights.asciidoc b/docs/reference/release-notes/highlights.asciidoc index 81d46b5773877..c3f6fb43f2ffd 100644 --- a/docs/reference/release-notes/highlights.asciidoc +++ b/docs/reference/release-notes/highlights.asciidoc @@ -1,5 +1,6 @@ +[chapter] [[release-highlights]] -== What's new in {minor-version} += What's new in {minor-version} coming::[{minor-version}] @@ -37,7 +38,7 @@ endif::[] [discrete] [[esql_inlinestats]] -=== ESQL: INLINESTATS +== ESQL: INLINESTATS This adds the `INLINESTATS` command to ESQL which performs a STATS and then enriches the results into the output stream. So, this query: @@ -62,7 +63,7 @@ Produces output like: [discrete] [[always_allow_rebalancing_by_default]] -=== Always allow rebalancing by default +== Always allow rebalancing by default In earlier versions of {es} the `cluster.routing.allocation.allow_rebalance` setting defaults to `indices_all_active` which blocks all rebalancing moves while the cluster is in `yellow` or `red` health. This was appropriate for the legacy allocator which might do too many rebalancing moves otherwise. Today's allocator has @@ -74,7 +75,7 @@ version 8.16 `allow_rebalance` setting defaults to `always` unless the legacy al [discrete] [[add_global_retention_in_data_stream_lifecycle]] -=== Add global retention in data stream lifecycle +== Add global retention in data stream lifecycle Data stream lifecycle now supports configuring retention on a cluster level, namely global retention. Global retention \nallows us to configure two different retentions: @@ -88,7 +89,7 @@ data stream lifecycle and it allows any data stream \ndata to be deleted after t [discrete] [[enable_zstandard_compression_for_indices_with_index_codec_set_to_best_compression]] -=== Enable ZStandard compression for indices with index.codec set to best_compression +== Enable ZStandard compression for indices with index.codec set to best_compression Before DEFLATE compression was used to compress stored fields in indices with index.codec index setting set to best_compression, with this change ZStandard is used as compression algorithm to stored fields for indices with index.codec index setting set to best_compression. The usage ZStandard results in less storage usage with a From 06a3e1902102a43e4ef9685c22d71c10d4bb280c Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Wed, 23 Oct 2024 10:28:17 -0700 Subject: [PATCH 327/449] Remove LongGCDisruption scheme (#115046) Long GC disruption relies on Thread.resume, which is removed in JDK 23. Tests that use it predate more modern disruption tests. This commit removes gc disruption and the master disruption tests. Note that tests relying on this scheme have already not been running since JDK 20 first deprecated Thread.resume. --- .../discovery/MasterDisruptionIT.java | 44 --- .../discovery/StableMasterDisruptionIT.java | 272 -------------- .../IntermittentLongGCDisruption.java | 109 ------ .../test/disruption/LongGCDisruption.java | 350 ------------------ .../disruption/LongGCDisruptionTests.java | 257 ------------- 5 files changed, 1032 deletions(-) delete mode 100644 test/framework/src/main/java/org/elasticsearch/test/disruption/IntermittentLongGCDisruption.java delete mode 100644 test/framework/src/main/java/org/elasticsearch/test/disruption/LongGCDisruption.java delete mode 100644 test/framework/src/test/java/org/elasticsearch/test/disruption/LongGCDisruptionTests.java diff --git a/server/src/internalClusterTest/java/org/elasticsearch/discovery/MasterDisruptionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/discovery/MasterDisruptionIT.java index 214fc47222f3a..bf81200509691 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/discovery/MasterDisruptionIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/discovery/MasterDisruptionIT.java @@ -21,21 +21,15 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.disruption.BlockMasterServiceOnMaster; -import org.elasticsearch.test.disruption.IntermittentLongGCDisruption; import org.elasticsearch.test.disruption.NetworkDisruption; import org.elasticsearch.test.disruption.NetworkDisruption.TwoPartitions; import org.elasticsearch.test.disruption.ServiceDisruptionScheme; -import org.elasticsearch.test.disruption.SingleNodeDisruption; import org.elasticsearch.xcontent.XContentType; -import java.util.ArrayList; -import java.util.HashSet; import java.util.List; -import java.util.Set; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.not; /** * Tests relating to the loss of the master. @@ -43,44 +37,6 @@ @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0) public class MasterDisruptionIT extends AbstractDisruptionTestCase { - /** - * Test that cluster recovers from a long GC on master that causes other nodes to elect a new one - */ - public void testMasterNodeGCs() throws Exception { - List nodes = startCluster(3); - // NOTE: this assume must happen after starting the cluster, so that cleanup will have something to cleanup. - assumeFalse("jdk20 removed thread suspend/resume", Runtime.version().feature() >= 20); - - String oldMasterNode = internalCluster().getMasterName(); - // a very long GC, but it's OK as we remove the disruption when it has had an effect - SingleNodeDisruption masterNodeDisruption = new IntermittentLongGCDisruption(random(), oldMasterNode, 100, 200, 30000, 60000); - internalCluster().setDisruptionScheme(masterNodeDisruption); - masterNodeDisruption.startDisrupting(); - - Set oldNonMasterNodesSet = new HashSet<>(nodes); - oldNonMasterNodesSet.remove(oldMasterNode); - - List oldNonMasterNodes = new ArrayList<>(oldNonMasterNodesSet); - - logger.info("waiting for nodes to de-elect master [{}]", oldMasterNode); - for (String node : oldNonMasterNodesSet) { - assertDifferentMaster(node, oldMasterNode); - } - - logger.info("waiting for nodes to elect a new master"); - ensureStableCluster(2, oldNonMasterNodes.get(0)); - - // restore GC - masterNodeDisruption.stopDisrupting(); - final TimeValue waitTime = new TimeValue(DISRUPTION_HEALING_OVERHEAD.millis() + masterNodeDisruption.expectedTimeToHeal().millis()); - ensureStableCluster(3, waitTime, false, oldNonMasterNodes.get(0)); - - // make sure all nodes agree on master - String newMaster = internalCluster().getMasterName(); - assertThat(newMaster, not(equalTo(oldMasterNode))); - assertMaster(newMaster, nodes); - } - /** * This test isolates the master from rest of the cluster, waits for a new master to be elected, restores the partition * and verifies that all node agree on the new cluster state diff --git a/server/src/internalClusterTest/java/org/elasticsearch/discovery/StableMasterDisruptionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/discovery/StableMasterDisruptionIT.java index 32c602791cca4..48db23635220c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/discovery/StableMasterDisruptionIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/discovery/StableMasterDisruptionIT.java @@ -14,33 +14,26 @@ import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.coordination.CoordinationDiagnosticsService; import org.elasticsearch.cluster.coordination.Coordinator; import org.elasticsearch.cluster.coordination.FollowersChecker; import org.elasticsearch.cluster.coordination.LeaderChecker; import org.elasticsearch.cluster.coordination.MasterHistoryService; -import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeUtils; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.Priority; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.core.Tuple; import org.elasticsearch.health.GetHealthAction; import org.elasticsearch.health.HealthStatus; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.disruption.LongGCDisruption; import org.elasticsearch.test.disruption.NetworkDisruption; import org.elasticsearch.test.disruption.NetworkDisruption.NetworkLinkDisruptionType; import org.elasticsearch.test.disruption.NetworkDisruption.TwoPartitions; -import org.elasticsearch.test.disruption.SingleNodeDisruption; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.ToXContent; @@ -50,17 +43,12 @@ import org.junit.Before; import java.io.IOException; -import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; -import java.util.HashMap; import java.util.HashSet; import java.util.List; -import java.util.Map; -import java.util.Objects; import java.util.Set; -import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import static java.util.Collections.singleton; @@ -227,266 +215,6 @@ private void testFollowerCheckerAfterMasterReelection(NetworkLinkDisruptionType ensureStableCluster(3); } - /** - * Tests that emulates a frozen elected master node that unfreezes and pushes its cluster state to other nodes that already are - * following another elected master node. These nodes should reject this cluster state and prevent them from following the stale master. - */ - public void testStaleMasterNotHijackingMajority() throws Exception { - assumeFalse("jdk20 removed thread suspend/resume", Runtime.version().feature() >= 20); - final List nodes = internalCluster().startNodes( - 3, - Settings.builder() - .put(LeaderChecker.LEADER_CHECK_TIMEOUT_SETTING.getKey(), "1s") - .put(Coordinator.PUBLISH_TIMEOUT_SETTING.getKey(), "1s") - .build() - ); - ensureStableCluster(3); - - // Save the current master node as old master node, because that node will get frozen - final String oldMasterNode = internalCluster().getMasterName(); - - // Simulating a painful gc by suspending all threads for a long time on the current elected master node. - SingleNodeDisruption masterNodeDisruption = new LongGCDisruption(random(), oldMasterNode); - - // Save the majority side - final List majoritySide = new ArrayList<>(nodes); - majoritySide.remove(oldMasterNode); - - // Keeps track of the previous and current master when a master node transition took place on each node on the majority side: - final Map>> masters = Collections.synchronizedMap(new HashMap<>()); - for (final String node : majoritySide) { - masters.put(node, new ArrayList<>()); - internalCluster().getInstance(ClusterService.class, node).addListener(event -> { - DiscoveryNode previousMaster = event.previousState().nodes().getMasterNode(); - DiscoveryNode currentMaster = event.state().nodes().getMasterNode(); - if (Objects.equals(previousMaster, currentMaster) == false) { - logger.info( - "--> node {} received new cluster state: {} \n and had previous cluster state: {}", - node, - event.state(), - event.previousState() - ); - String previousMasterNodeName = previousMaster != null ? previousMaster.getName() : null; - String currentMasterNodeName = currentMaster != null ? currentMaster.getName() : null; - masters.get(node).add(new Tuple<>(previousMasterNodeName, currentMasterNodeName)); - } - }); - } - - final CountDownLatch oldMasterNodeSteppedDown = new CountDownLatch(1); - internalCluster().getInstance(ClusterService.class, oldMasterNode).addListener(event -> { - if (event.state().nodes().getMasterNodeId() == null) { - oldMasterNodeSteppedDown.countDown(); - } - }); - - internalCluster().setDisruptionScheme(masterNodeDisruption); - logger.info("--> freezing node [{}]", oldMasterNode); - masterNodeDisruption.startDisrupting(); - - // Wait for majority side to elect a new master - assertBusy(() -> { - for (final Map.Entry>> entry : masters.entrySet()) { - final List> transitions = entry.getValue(); - assertTrue(entry.getKey() + ": " + transitions, transitions.stream().anyMatch(transition -> transition.v2() != null)); - } - }); - - // The old master node is frozen, but here we submit a cluster state update task that doesn't get executed, but will be queued and - // once the old master node un-freezes it gets executed. The old master node will send this update + the cluster state where it is - // flagged as master to the other nodes that follow the new master. These nodes should ignore this update. - internalCluster().getInstance(ClusterService.class, oldMasterNode) - .submitUnbatchedStateUpdateTask("sneaky-update", new ClusterStateUpdateTask(Priority.IMMEDIATE) { - @Override - public ClusterState execute(ClusterState currentState) { - return ClusterState.builder(currentState).build(); - } - - @Override - public void onFailure(Exception e) { - logger.warn("failure [sneaky-update]", e); - } - }); - - // Save the new elected master node - final String newMasterNode = internalCluster().getMasterName(majoritySide.get(0)); - logger.info("--> new detected master node [{}]", newMasterNode); - - // Stop disruption - logger.info("--> unfreezing node [{}]", oldMasterNode); - masterNodeDisruption.stopDisrupting(); - - oldMasterNodeSteppedDown.await(30, TimeUnit.SECONDS); - logger.info("--> [{}] stepped down as master", oldMasterNode); - ensureStableCluster(3); - - assertThat(masters.size(), equalTo(2)); - for (Map.Entry>> entry : masters.entrySet()) { - String nodeName = entry.getKey(); - List> transitions = entry.getValue(); - assertTrue( - "[" + nodeName + "] should not apply state from old master [" + oldMasterNode + "] but it did: " + transitions, - transitions.stream().noneMatch(t -> oldMasterNode.equals(t.v2())) - ); - } - assertGreenMasterStability(internalCluster().client()); - } - - /** - * This helper method creates a 3-node cluster where all nodes are master-eligible, and then simulates a long GC on the master node 5 - * times (forcing another node to be elected master 5 times). It then asserts that the master stability health indicator status is - * YELLOW, and that expectedMasterStabilitySymptomSubstring is contained in the symptom. - * @param expectedMasterStabilitySymptomSubstring A string to expect in the master stability health indicator symptom - * @throws Exception - */ - public void testRepeatedMasterChanges(String expectedMasterStabilitySymptomSubstring) throws Exception { - assumeFalse("jdk20 removed thread suspend/resume", Runtime.version().feature() >= 20); - final List nodes = internalCluster().startNodes( - 3, - Settings.builder() - .put(LeaderChecker.LEADER_CHECK_TIMEOUT_SETTING.getKey(), "1s") - .put(Coordinator.PUBLISH_TIMEOUT_SETTING.getKey(), "1s") - .put(CoordinationDiagnosticsService.IDENTITY_CHANGES_THRESHOLD_SETTING.getKey(), 1) - .put(CoordinationDiagnosticsService.NO_MASTER_TRANSITIONS_THRESHOLD_SETTING.getKey(), 100) - .build() - ); - ensureStableCluster(3); - String firstMaster = internalCluster().getMasterName(); - // Force the master to change 2 times: - for (int i = 0; i < 2; i++) { - // Save the current master node as old master node, because that node will get frozen - final String oldMasterNode = internalCluster().getMasterName(); - - // Simulating a painful gc by suspending all threads for a long time on the current elected master node. - SingleNodeDisruption masterNodeDisruption = new LongGCDisruption(random(), oldMasterNode); - - // Save the majority side - final List majoritySide = new ArrayList<>(nodes); - majoritySide.remove(oldMasterNode); - - // Keeps track of the previous and current master when a master node transition took place on each node on the majority side: - final Map>> masters = Collections.synchronizedMap(new HashMap<>()); - for (final String node : majoritySide) { - masters.put(node, new ArrayList<>()); - internalCluster().getInstance(ClusterService.class, node).addListener(event -> { - DiscoveryNode previousMaster = event.previousState().nodes().getMasterNode(); - DiscoveryNode currentMaster = event.state().nodes().getMasterNode(); - if (Objects.equals(previousMaster, currentMaster) == false) { - logger.info( - "--> node {} received new cluster state: {} \n and had previous cluster state: {}", - node, - event.state(), - event.previousState() - ); - String previousMasterNodeName = previousMaster != null ? previousMaster.getName() : null; - String currentMasterNodeName = currentMaster != null ? currentMaster.getName() : null; - masters.get(node).add(new Tuple<>(previousMasterNodeName, currentMasterNodeName)); - } - }); - } - - final CountDownLatch oldMasterNodeSteppedDown = new CountDownLatch(1); - internalCluster().getInstance(ClusterService.class, oldMasterNode).addListener(event -> { - if (event.state().nodes().getMasterNodeId() == null) { - oldMasterNodeSteppedDown.countDown(); - } - }); - internalCluster().clearDisruptionScheme(); - internalCluster().setDisruptionScheme(masterNodeDisruption); - logger.info("--> freezing node [{}]", oldMasterNode); - masterNodeDisruption.startDisrupting(); - - // Wait for majority side to elect a new master - assertBusy(() -> { - for (final Map.Entry>> entry : masters.entrySet()) { - final List> transitions = entry.getValue(); - assertTrue(entry.getKey() + ": " + transitions, transitions.stream().anyMatch(transition -> transition.v2() != null)); - } - }); - - // Save the new elected master node - final String newMasterNode = internalCluster().getMasterName(majoritySide.get(0)); - logger.info("--> new detected master node [{}]", newMasterNode); - - // Stop disruption - logger.info("--> unfreezing node [{}]", oldMasterNode); - masterNodeDisruption.stopDisrupting(); - - oldMasterNodeSteppedDown.await(30, TimeUnit.SECONDS); - logger.info("--> [{}] stepped down as master", oldMasterNode); - ensureStableCluster(3); - - assertThat(masters.size(), equalTo(2)); - } - List nodeNamesExceptFirstMaster = Arrays.stream(internalCluster().getNodeNames()) - .filter(name -> name.equals(firstMaster) == false) - .toList(); - /* - * It is possible that the first node that became master got re-elected repeatedly. And since it was in a simulated GC when the - * other node(s) were master, it only saw itself as master. So we want to check with another node. - */ - Client client = internalCluster().client(randomFrom(nodeNamesExceptFirstMaster)); - assertMasterStability(client, HealthStatus.YELLOW, containsString(expectedMasterStabilitySymptomSubstring)); - } - - public void testRepeatedNullMasterRecognizedAsGreenIfMasterDoesNotKnowItIsUnstable() throws Exception { - assumeFalse("jdk20 removed thread suspend/resume", Runtime.version().feature() >= 20); - /* - * In this test we have a single master-eligible node. We pause it repeatedly (simulating a long GC pause for example) so that - * other nodes decide it is no longer the master. However since there is no other master-eligible node, another node is never - * elected master. And the master node never recognizes that it had a problem. So when we run the master stability check on one - * of the data nodes, it will see that there is a problem (the master has gone null repeatedly), but when it checks with the - * master, the master says everything is fine. So we expect a GREEN status. - */ - final List masterNodes = internalCluster().startMasterOnlyNodes( - 1, - Settings.builder() - .put(LeaderChecker.LEADER_CHECK_TIMEOUT_SETTING.getKey(), "1s") - .put(Coordinator.PUBLISH_TIMEOUT_SETTING.getKey(), "1s") - .put(CoordinationDiagnosticsService.NO_MASTER_TRANSITIONS_THRESHOLD_SETTING.getKey(), 1) - .build() - ); - int nullTransitionsThreshold = 1; - final List dataNodes = internalCluster().startDataOnlyNodes( - 2, - Settings.builder() - .put(LeaderChecker.LEADER_CHECK_TIMEOUT_SETTING.getKey(), "1s") - .put(Coordinator.PUBLISH_TIMEOUT_SETTING.getKey(), "1s") - .put(CoordinationDiagnosticsService.NO_MASTER_TRANSITIONS_THRESHOLD_SETTING.getKey(), nullTransitionsThreshold) - .put(CoordinationDiagnosticsService.NODE_HAS_MASTER_LOOKUP_TIMEFRAME_SETTING.getKey(), new TimeValue(60, TimeUnit.SECONDS)) - .build() - ); - ensureStableCluster(3); - for (int i = 0; i < nullTransitionsThreshold + 1; i++) { - final String masterNode = masterNodes.get(0); - - // Simulating a painful gc by suspending all threads for a long time on the current elected master node. - SingleNodeDisruption masterNodeDisruption = new LongGCDisruption(random(), masterNode); - - final CountDownLatch dataNodeMasterSteppedDown = new CountDownLatch(2); - internalCluster().getInstance(ClusterService.class, dataNodes.get(0)).addListener(event -> { - if (event.state().nodes().getMasterNodeId() == null) { - dataNodeMasterSteppedDown.countDown(); - } - }); - internalCluster().getInstance(ClusterService.class, dataNodes.get(1)).addListener(event -> { - if (event.state().nodes().getMasterNodeId() == null) { - dataNodeMasterSteppedDown.countDown(); - } - }); - internalCluster().clearDisruptionScheme(); - internalCluster().setDisruptionScheme(masterNodeDisruption); - logger.info("--> freezing node [{}]", masterNode); - masterNodeDisruption.startDisrupting(); - dataNodeMasterSteppedDown.await(30, TimeUnit.SECONDS); - // Stop disruption - logger.info("--> unfreezing node [{}]", masterNode); - masterNodeDisruption.stopDisrupting(); - ensureStableCluster(3, TimeValue.timeValueSeconds(30), false, randomFrom(dataNodes)); - } - assertGreenMasterStability(internalCluster().client(randomFrom(dataNodes))); - } - public void testNoMasterEligibleNodes() throws Exception { /* * In this test we have a single master-eligible node. We then stop the master. We set the master lookup threshold very low on the diff --git a/test/framework/src/main/java/org/elasticsearch/test/disruption/IntermittentLongGCDisruption.java b/test/framework/src/main/java/org/elasticsearch/test/disruption/IntermittentLongGCDisruption.java deleted file mode 100644 index 9e2f8c931c84a..0000000000000 --- a/test/framework/src/main/java/org/elasticsearch/test/disruption/IntermittentLongGCDisruption.java +++ /dev/null @@ -1,109 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ -package org.elasticsearch.test.disruption; - -import org.elasticsearch.core.TimeValue; - -import java.util.HashSet; -import java.util.Random; -import java.util.Set; -import java.util.concurrent.atomic.AtomicInteger; - -/** - * Simulates irregular long gc intervals. - */ -public class IntermittentLongGCDisruption extends LongGCDisruption { - - volatile boolean disrupting; - volatile Thread worker; - - final long intervalBetweenDelaysMin; - final long intervalBetweenDelaysMax; - final long delayDurationMin; - final long delayDurationMax; - - public IntermittentLongGCDisruption( - Random random, - String disruptedNode, - long intervalBetweenDelaysMin, - long intervalBetweenDelaysMax, - long delayDurationMin, - long delayDurationMax - ) { - super(random, disruptedNode); - this.intervalBetweenDelaysMin = intervalBetweenDelaysMin; - this.intervalBetweenDelaysMax = intervalBetweenDelaysMax; - this.delayDurationMin = delayDurationMin; - this.delayDurationMax = delayDurationMax; - } - - static final AtomicInteger thread_ids = new AtomicInteger(); - - @Override - public void startDisrupting() { - disrupting = true; - worker = new Thread(new BackgroundWorker(), "long_gc_simulation_" + thread_ids.incrementAndGet()); - worker.setDaemon(true); - worker.start(); - } - - @Override - public void stopDisrupting() { - if (worker == null) { - return; - } - logger.info("stopping long GCs on [{}]", disruptedNode); - disrupting = false; - worker.interrupt(); - try { - worker.join(2 * (intervalBetweenDelaysMax + delayDurationMax)); - } catch (InterruptedException e) { - logger.info("background thread failed to stop"); - } - worker = null; - } - - private void simulateLongGC(final TimeValue duration) throws InterruptedException { - logger.info("node [{}] goes into GC for for [{}]", disruptedNode, duration); - final Set nodeThreads = new HashSet<>(); - try { - while (suspendThreads(nodeThreads)) - ; - if (nodeThreads.isEmpty() == false) { - Thread.sleep(duration.millis()); - } - } finally { - logger.info("node [{}] resumes from GC", disruptedNode); - resumeThreads(nodeThreads); - } - } - - class BackgroundWorker implements Runnable { - - @Override - public void run() { - while (disrupting) { - try { - TimeValue duration = new TimeValue(delayDurationMin + random.nextInt((int) (delayDurationMax - delayDurationMin))); - simulateLongGC(duration); - - duration = new TimeValue( - intervalBetweenDelaysMin + random.nextInt((int) (intervalBetweenDelaysMax - intervalBetweenDelaysMin)) - ); - if (disrupting) { - Thread.sleep(duration.millis()); - } - } catch (InterruptedException e) {} catch (Exception e) { - logger.error("error in background worker", e); - } - } - } - } - -} diff --git a/test/framework/src/main/java/org/elasticsearch/test/disruption/LongGCDisruption.java b/test/framework/src/main/java/org/elasticsearch/test/disruption/LongGCDisruption.java deleted file mode 100644 index dce9e2600d0a6..0000000000000 --- a/test/framework/src/main/java/org/elasticsearch/test/disruption/LongGCDisruption.java +++ /dev/null @@ -1,350 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.test.disruption; - -import org.elasticsearch.common.util.concurrent.AbstractRunnable; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.core.SuppressForbidden; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.test.InternalTestCluster; - -import java.lang.management.ManagementFactory; -import java.lang.management.ThreadInfo; -import java.lang.management.ThreadMXBean; -import java.util.Arrays; -import java.util.Random; -import java.util.Set; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicReference; -import java.util.regex.Pattern; -import java.util.stream.Collectors; - -/** - * Suspends all threads on the specified node in order to simulate a long gc. - */ -public class LongGCDisruption extends SingleNodeDisruption { - - private static final Pattern[] unsafeClasses = new Pattern[] { - // logging has shared JVM locks; we may suspend a thread and block other nodes from doing their thing - Pattern.compile("logging\\.log4j"), - // security manager is shared across all nodes and it uses synchronized maps internally - Pattern.compile("java\\.lang\\.SecurityManager"), - // SecureRandom instance from SecureRandomHolder class is shared by all nodes - Pattern.compile("java\\.security\\.SecureRandom"), - // Lucene's WindowsFS is shared across nodes and contains some coarse synchronization - Pattern.compile("org\\.apache\\.lucene\\.tests\\.mockfile\\.WindowsFS") }; - - private static final ThreadMXBean threadBean = ManagementFactory.getThreadMXBean(); - - protected final String disruptedNode; - private Set suspendedThreads; - private Thread blockDetectionThread; - - private final AtomicBoolean sawSlowSuspendBug = new AtomicBoolean(false); - - public LongGCDisruption(Random random, String disruptedNode) { - super(random); - this.disruptedNode = disruptedNode; - } - - /** - * Checks if during disruption we ran into a known JVM issue that makes {@link Thread#suspend()} calls block for multiple seconds - * was observed. - * @see JDK-8218446 - * @return true if during thread suspending a call to {@link Thread#suspend()} took more than 3s - */ - public boolean sawSlowSuspendBug() { - return sawSlowSuspendBug.get(); - } - - @Override - public synchronized void startDisrupting() { - if (suspendedThreads == null) { - boolean success = false; - try { - suspendedThreads = ConcurrentHashMap.newKeySet(); - - final String currentThreadName = Thread.currentThread().getName(); - assert isDisruptedNodeThread(currentThreadName) == false - : "current thread match pattern. thread name: " + currentThreadName + ", node: " + disruptedNode; - // we spawn a background thread to protect against deadlock which can happen - // if there are shared resources between caller thread and suspended threads - // see unsafeClasses to how to avoid that - final AtomicReference suspendingError = new AtomicReference<>(); - final Thread suspendingThread = new Thread(new AbstractRunnable() { - @Override - public void onFailure(Exception e) { - suspendingError.set(e); - } - - @Override - protected void doRun() throws Exception { - // keep trying to suspend threads, until no new threads are discovered. - while (suspendThreads(suspendedThreads)) { - if (Thread.interrupted()) { - return; - } - } - } - }); - suspendingThread.setName(currentThreadName + "[LongGCDisruption][threadSuspender]"); - suspendingThread.start(); - try { - suspendingThread.join(getSuspendingTimeoutInMillis()); - } catch (InterruptedException e) { - suspendingThread.interrupt(); // best effort to signal suspending - throw new RuntimeException(e); - } - if (suspendingError.get() != null) { - throw new RuntimeException("unknown error while suspending threads", suspendingError.get()); - } - if (suspendingThread.isAlive()) { - logger.warn( - """ - failed to suspend node [{}]'s threads within [{}] millis. Suspending thread stack trace: - {} - Threads that weren't suspended: - {}""", - disruptedNode, - getSuspendingTimeoutInMillis(), - stackTrace(suspendingThread.getStackTrace()), - suspendedThreads.stream() - .map(t -> t.getName() + "\n----\n" + stackTrace(t.getStackTrace())) - .collect(Collectors.joining("\n")) - ); - suspendingThread.interrupt(); // best effort; - try { - /* - * We need to join on the suspending thread in case it has suspended a thread that is in a critical section and - * needs to be resumed. - */ - suspendingThread.join(); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - throw new RuntimeException("suspending node threads took too long"); - } - // block detection checks if other threads are blocked waiting on an object that is held by one - // of the threads that was suspended - if (isBlockDetectionSupported()) { - blockDetectionThread = new Thread(new AbstractRunnable() { - @Override - public void onFailure(Exception e) { - if (e instanceof InterruptedException == false) { - throw new AssertionError("unexpected exception in blockDetectionThread", e); - } - } - - @Override - protected void doRun() throws Exception { - while (Thread.currentThread().isInterrupted() == false) { - ThreadInfo[] threadInfos = threadBean.dumpAllThreads(true, true); - for (ThreadInfo threadInfo : threadInfos) { - if (isDisruptedNodeThread(threadInfo.getThreadName()) == false - && threadInfo.getLockOwnerName() != null - && isDisruptedNodeThread(threadInfo.getLockOwnerName())) { - - // find ThreadInfo object of the blocking thread (if available) - ThreadInfo blockingThreadInfo = null; - for (ThreadInfo otherThreadInfo : threadInfos) { - if (otherThreadInfo.getThreadId() == threadInfo.getLockOwnerId()) { - blockingThreadInfo = otherThreadInfo; - break; - } - } - onBlockDetected(threadInfo, blockingThreadInfo); - } - } - Thread.sleep(getBlockDetectionIntervalInMillis()); - } - } - }); - blockDetectionThread.setName(currentThreadName + "[LongGCDisruption][blockDetection]"); - blockDetectionThread.start(); - } - success = true; - } finally { - if (success == false) { - stopBlockDetection(); - // resume threads if failed - resumeThreads(suspendedThreads); - suspendedThreads = null; - } - } - } else { - throw new IllegalStateException("can't disrupt twice, call stopDisrupting() first"); - } - } - - public boolean isDisruptedNodeThread(String threadName) { - return threadName.contains("[" + disruptedNode + "]"); - } - - private static String stackTrace(StackTraceElement[] stackTraceElements) { - return Arrays.stream(stackTraceElements).map(Object::toString).collect(Collectors.joining("\n")); - } - - @Override - public synchronized void stopDisrupting() { - stopBlockDetection(); - if (suspendedThreads != null) { - resumeThreads(suspendedThreads); - suspendedThreads = null; - } - } - - private void stopBlockDetection() { - if (blockDetectionThread != null) { - try { - blockDetectionThread.interrupt(); // best effort - blockDetectionThread.join(getSuspendingTimeoutInMillis()); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - blockDetectionThread = null; - } - } - - @Override - public void removeAndEnsureHealthy(InternalTestCluster cluster) { - removeFromCluster(cluster); - ensureNodeCount(cluster); - } - - @Override - public TimeValue expectedTimeToHeal() { - return TimeValue.timeValueMillis(0); - } - - /** - * resolves all threads belonging to given node and suspends them if their current stack trace - * is "safe". Threads are added to nodeThreads if suspended. - * - * returns true if some live threads were found. The caller is expected to call this method - * until no more "live" are found. - */ - @SuppressWarnings("deprecation") // suspends/resumes threads intentionally - @SuppressForbidden(reason = "suspends/resumes threads intentionally") - protected boolean suspendThreads(Set nodeThreads) { - Thread[] allThreads = null; - while (allThreads == null) { - allThreads = new Thread[Thread.activeCount()]; - if (Thread.enumerate(allThreads) > allThreads.length) { - // we didn't make enough space, retry - allThreads = null; - } - } - boolean liveThreadsFound = false; - for (Thread thread : allThreads) { - if (thread == null) { - continue; - } - String threadName = thread.getName(); - if (isDisruptedNodeThread(threadName)) { - if (thread.isAlive() && nodeThreads.add(thread)) { - liveThreadsFound = true; - logger.trace("suspending thread [{}]", threadName); - // we assume it is not safe to suspend the thread - boolean safe = false; - try { - /* - * At the bottom of this try-block we will know whether or not it is safe to suspend the thread; we start by - * assuming that it is safe. - */ - boolean definitelySafe = true; - final long startTime = System.nanoTime(); - thread.suspend(); - if (System.nanoTime() - startTime > TimeUnit.SECONDS.toNanos(3L)) { - sawSlowSuspendBug.set(true); - } - // double check the thread is not in a shared resource like logging; if so, let it go and come back - safe: for (StackTraceElement stackElement : thread.getStackTrace()) { - String className = stackElement.getClassName(); - for (Pattern unsafePattern : getUnsafeClasses()) { - if (unsafePattern.matcher(className).find()) { - // it is definitely not safe to suspend the thread - definitelySafe = false; - break safe; - } - } - } - safe = definitelySafe; - } finally { - if (safe == false) { - /* - * Do not log before resuming as we might be interrupted while logging in which case we will throw an - * interrupted exception and never resume the suspended thread that is in a critical section. Also, logging - * before resuming makes for confusing log messages if we never hit the resume. - */ - thread.resume(); - logger.trace("resumed thread [{}] as it is in a critical section", threadName); - nodeThreads.remove(thread); - } - } - } - } - } - return liveThreadsFound; - } - - // for testing - protected Pattern[] getUnsafeClasses() { - return unsafeClasses; - } - - // for testing - protected long getSuspendingTimeoutInMillis() { - return TimeValue.timeValueSeconds(30).getMillis(); - } - - public boolean isBlockDetectionSupported() { - return threadBean.isObjectMonitorUsageSupported() && threadBean.isSynchronizerUsageSupported(); - } - - // for testing - protected long getBlockDetectionIntervalInMillis() { - return 3000L; - } - - // for testing - protected void onBlockDetected(ThreadInfo blockedThread, @Nullable ThreadInfo blockingThread) { - String blockedThreadStackTrace = stackTrace(blockedThread.getStackTrace()); - String blockingThreadStackTrace = blockingThread != null ? stackTrace(blockingThread.getStackTrace()) : "not available"; - throw new AssertionError( - "Thread [" - + blockedThread.getThreadName() - + "] is blocked waiting on the resource [" - + blockedThread.getLockInfo() - + "] held by the suspended thread [" - + blockedThread.getLockOwnerName() - + "] of the disrupted node [" - + disruptedNode - + "].\n" - + "Please add this occurrence to the unsafeClasses list in [" - + LongGCDisruption.class.getName() - + "].\n" - + "Stack trace of blocked thread: " - + blockedThreadStackTrace - + "\n" - + "Stack trace of blocking thread: " - + blockingThreadStackTrace - ); - } - - @SuppressWarnings("deprecation") // suspends/resumes threads intentionally - @SuppressForbidden(reason = "suspends/resumes threads intentionally") - protected void resumeThreads(Set threads) { - for (Thread thread : threads) { - thread.resume(); - } - } -} diff --git a/test/framework/src/test/java/org/elasticsearch/test/disruption/LongGCDisruptionTests.java b/test/framework/src/test/java/org/elasticsearch/test/disruption/LongGCDisruptionTests.java deleted file mode 100644 index 72ecba8d502f1..0000000000000 --- a/test/framework/src/test/java/org/elasticsearch/test/disruption/LongGCDisruptionTests.java +++ /dev/null @@ -1,257 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ -package org.elasticsearch.test.disruption; - -import org.elasticsearch.core.Nullable; -import org.elasticsearch.test.ESTestCase; -import org.junit.BeforeClass; - -import java.lang.management.ThreadInfo; -import java.util.ArrayList; -import java.util.List; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicLong; -import java.util.concurrent.atomic.AtomicReference; -import java.util.concurrent.locks.ReentrantLock; -import java.util.regex.Pattern; - -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.greaterThan; - -public class LongGCDisruptionTests extends ESTestCase { - - static class LockedExecutor { - ReentrantLock lock = new ReentrantLock(); - - public void executeLocked(Runnable r) { - lock.lock(); - try { - r.run(); - } finally { - lock.unlock(); - } - } - } - - @BeforeClass - public static void ignoreJdk20Plus() { - assumeFalse("jdk20 removed thread suspend/resume", Runtime.version().feature() >= 20); - } - - public void testBlockingTimeout() throws Exception { - final String nodeName = "test_node"; - LongGCDisruption disruption = new LongGCDisruption(random(), nodeName) { - @Override - protected Pattern[] getUnsafeClasses() { - return new Pattern[] { Pattern.compile(LockedExecutor.class.getSimpleName()) }; - } - - @Override - protected long getSuspendingTimeoutInMillis() { - return 100; - } - }; - final AtomicBoolean stop = new AtomicBoolean(); - final CountDownLatch underLock = new CountDownLatch(1); - final CountDownLatch pauseUnderLock = new CountDownLatch(1); - final LockedExecutor lockedExecutor = new LockedExecutor(); - final AtomicLong ops = new AtomicLong(); - final Thread[] threads = new Thread[10]; - try { - for (int i = 0; i < 10; i++) { - // at least one locked and one none lock thread - final boolean lockedExec = (i < 9 && randomBoolean()) || i == 0; - threads[i] = new Thread(() -> { - while (stop.get() == false) { - if (lockedExec) { - lockedExecutor.executeLocked(() -> { - try { - underLock.countDown(); - ops.incrementAndGet(); - pauseUnderLock.await(); - } catch (InterruptedException e) { - - } - }); - } else { - ops.incrementAndGet(); - } - } - }); - threads[i].setName("[" + nodeName + "][" + i + "]"); - threads[i].start(); - } - // make sure some threads are under lock - underLock.await(); - RuntimeException e = expectThrows(RuntimeException.class, disruption::startDisrupting); - assertThat(e.getMessage(), containsString("suspending node threads took too long")); - } finally { - stop.set(true); - pauseUnderLock.countDown(); - for (final Thread thread : threads) { - thread.join(); - } - } - } - - /** - * Checks that a GC disruption never blocks threads while they are doing something "unsafe" - * but does keep retrying until all threads can be safely paused - */ - public void testNotBlockingUnsafeStackTraces() throws Exception { - final String nodeName = "test_node"; - LongGCDisruption disruption = new LongGCDisruption(random(), nodeName) { - @Override - protected Pattern[] getUnsafeClasses() { - return new Pattern[] { Pattern.compile(LockedExecutor.class.getSimpleName()) }; - } - }; - final AtomicBoolean stop = new AtomicBoolean(); - final LockedExecutor lockedExecutor = new LockedExecutor(); - final AtomicLong ops = new AtomicLong(); - final Thread[] threads = new Thread[5]; - final Runnable yieldAndIncrement = () -> { - Thread.yield(); // give some chance to catch this stack trace - ops.incrementAndGet(); - }; - try { - for (int i = 0; i < threads.length; i++) { - threads[i] = new Thread(() -> { - for (int iter = 0; stop.get() == false; iter++) { - if (iter % 2 == 0) { - lockedExecutor.executeLocked(yieldAndIncrement); - } else { - yieldAndIncrement.run(); - } - } - }); - threads[i].setName("[" + nodeName + "][" + i + "]"); - threads[i].start(); - } - // make sure some threads are under lock - try { - disruption.startDisrupting(); - } catch (RuntimeException e) { - if (e.getMessage().contains("suspending node threads took too long") && disruption.sawSlowSuspendBug()) { - return; - } - throw new AssertionError(e); - } - long first = ops.get(); - assertThat(lockedExecutor.lock.isLocked(), equalTo(false)); // no threads should own the lock - Thread.sleep(100); - assertThat(ops.get(), equalTo(first)); - disruption.stopDisrupting(); - assertBusy(() -> assertThat(ops.get(), greaterThan(first))); - } finally { - disruption.stopDisrupting(); - stop.set(true); - for (final Thread thread : threads) { - thread.join(); - } - } - } - - public void testBlockDetection() throws Exception { - final String disruptedNodeName = "disrupted_node"; - final String blockedNodeName = "blocked_node"; - CountDownLatch waitForBlockDetectionResult = new CountDownLatch(1); - AtomicReference blockDetectionResult = new AtomicReference<>(); - LongGCDisruption disruption = new LongGCDisruption(random(), disruptedNodeName) { - @Override - protected Pattern[] getUnsafeClasses() { - return new Pattern[0]; - } - - @Override - protected void onBlockDetected(ThreadInfo blockedThread, @Nullable ThreadInfo blockingThread) { - blockDetectionResult.set(blockedThread); - waitForBlockDetectionResult.countDown(); - } - - @Override - protected long getBlockDetectionIntervalInMillis() { - return 10L; - } - }; - if (disruption.isBlockDetectionSupported() == false) { - return; - } - final AtomicBoolean stop = new AtomicBoolean(); - final CountDownLatch underLock = new CountDownLatch(1); - final CountDownLatch pauseUnderLock = new CountDownLatch(1); - final LockedExecutor lockedExecutor = new LockedExecutor(); - final AtomicLong ops = new AtomicLong(); - final List threads = new ArrayList<>(); - try { - for (int i = 0; i < 5; i++) { - // at least one locked and one none lock thread - final boolean lockedExec = (i < 4 && randomBoolean()) || i == 0; - Thread thread = new Thread(() -> { - while (stop.get() == false) { - if (lockedExec) { - lockedExecutor.executeLocked(() -> { - try { - underLock.countDown(); - ops.incrementAndGet(); - pauseUnderLock.await(); - } catch (InterruptedException e) { - - } - }); - } else { - ops.incrementAndGet(); - } - } - }); - - thread.setName("[" + disruptedNodeName + "][" + i + "]"); - threads.add(thread); - thread.start(); - } - - for (int i = 0; i < 5; i++) { - // at least one locked and one none lock thread - final boolean lockedExec = (i < 4 && randomBoolean()) || i == 0; - Thread thread = new Thread(() -> { - while (stop.get() == false) { - if (lockedExec) { - lockedExecutor.executeLocked(() -> { ops.incrementAndGet(); }); - } else { - ops.incrementAndGet(); - } - } - }); - thread.setName("[" + blockedNodeName + "][" + i + "]"); - threads.add(thread); - thread.start(); - } - // make sure some threads of test_node are under lock - underLock.await(); - disruption.startDisrupting(); - assertTrue(waitForBlockDetectionResult.await(30, TimeUnit.SECONDS)); - disruption.stopDisrupting(); - - ThreadInfo threadInfo = blockDetectionResult.get(); - assertNotNull(threadInfo); - assertThat(threadInfo.getThreadName(), containsString("[" + blockedNodeName + "]")); - assertThat(threadInfo.getLockOwnerName(), containsString("[" + disruptedNodeName + "]")); - assertThat(threadInfo.getLockInfo().getClassName(), containsString(ReentrantLock.class.getName())); - } finally { - stop.set(true); - pauseUnderLock.countDown(); - for (final Thread thread : threads) { - thread.join(); - } - } - } -} From 7544c88c128e1a5b27e2291c7bcf1a461ecac6bd Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Wed, 23 Oct 2024 10:29:05 -0700 Subject: [PATCH 328/449] Consolidate @Before of rolling upgrade tests (#114677) Multiple @Before methods in junit are run in random order. This commit cosolidates the @Before methods of ParameterizedRollingUpgradeTestCase since the code has interdependencies. closes #114330 --- .../upgrades/ParameterizedRollingUpgradeTestCase.java | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedRollingUpgradeTestCase.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedRollingUpgradeTestCase.java index e7cff5cca5a92..a20981a119d8f 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedRollingUpgradeTestCase.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedRollingUpgradeTestCase.java @@ -55,14 +55,13 @@ public static Iterable parameters() { protected abstract ElasticsearchCluster getUpgradeCluster(); @Before - public void extractOldClusterFeatures() { + public void upgradeNode() throws Exception { + // extract old cluster features if (isOldCluster() && oldClusterTestFeatureService == null) { oldClusterTestFeatureService = testFeatureService; } - } - @Before - public void extractOldIndexVersion() throws Exception { + // extract old index version if (oldIndexVersion == null && upgradedNodes.isEmpty()) { IndexVersion indexVersion = null; // these should all be the same version @@ -93,13 +92,11 @@ public void extractOldIndexVersion() throws Exception { assertThat("Index version could not be read", indexVersion, notNullValue()); oldIndexVersion = indexVersion; } - } - @Before - public void upgradeNode() throws Exception { // Skip remaining tests if upgrade failed assumeFalse("Cluster upgrade failed", upgradeFailed); + // finally, upgrade node if (upgradedNodes.size() < requestedUpgradedNodes) { closeClients(); // we might be running a specific upgrade test by itself - check previous nodes too From b31e5c9609f65dd09800d8caaceb4c00881a8a8e Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Wed, 23 Oct 2024 13:35:50 -0400 Subject: [PATCH 329/449] Refactor the download_database_on_pipeline_creation checks (#115421) --- .../geoip/GeoIpDownloaderTaskExecutor.java | 4 ++-- .../ingest/geoip/GeoIpProcessor.java | 16 +++++++++++----- 2 files changed, 13 insertions(+), 7 deletions(-) diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java index e4150005ed1ae..61ca050d91c13 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java @@ -309,14 +309,14 @@ private static boolean hasAtLeastOneGeoipProcessor(Map processor { final Map processorConfig = (Map) processor.get(GEOIP_TYPE); if (processorConfig != null) { - return downloadDatabaseOnPipelineCreation(GEOIP_TYPE, processorConfig, null) == downloadDatabaseOnPipelineCreation; + return downloadDatabaseOnPipelineCreation(processorConfig) == downloadDatabaseOnPipelineCreation; } } { final Map processorConfig = (Map) processor.get(IP_LOCATION_TYPE); if (processorConfig != null) { - return downloadDatabaseOnPipelineCreation(IP_LOCATION_TYPE, processorConfig, null) == downloadDatabaseOnPipelineCreation; + return downloadDatabaseOnPipelineCreation(processorConfig) == downloadDatabaseOnPipelineCreation; } } diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java index 9508bf0346058..f99f8dbe2fdd0 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java @@ -238,9 +238,8 @@ public Processor create( boolean ignoreMissing = readBooleanProperty(type, processorTag, config, "ignore_missing", false); boolean firstOnly = readBooleanProperty(type, processorTag, config, "first_only", true); - // Validating the download_database_on_pipeline_creation even if the result - // is not used directly by the factory. - downloadDatabaseOnPipelineCreation(type, config, processorTag); + // validate (and consume) the download_database_on_pipeline_creation property even though the result is not used by the factory + readBooleanProperty(type, processorTag, config, "download_database_on_pipeline_creation", true); // noop, should be removed in 9.0 Object value = config.remove("fallback_to_default_databases"); @@ -319,8 +318,15 @@ public Processor create( ); } - public static boolean downloadDatabaseOnPipelineCreation(String type, Map config, String processorTag) { - return readBooleanProperty(type, processorTag, config, "download_database_on_pipeline_creation", true); + /** + * Get the value of the "download_database_on_pipeline_creation" property from a processor's config map. + *

      + * As with the actual property definition, the default value of the property is 'true'. Unlike the actual + * property definition, this method doesn't consume (that is, config.remove) the property from + * the config map. + */ + public static boolean downloadDatabaseOnPipelineCreation(Map config) { + return (boolean) config.getOrDefault("download_database_on_pipeline_creation", true); } } From 254cedc988f3bb3b6c731cb1c2cb33c4f8bc018a Mon Sep 17 00:00:00 2001 From: Andrei Stefan Date: Wed, 23 Oct 2024 20:36:44 +0300 Subject: [PATCH 330/449] Separate tests for snapshot and release versions (#115402) --- .../xpack/esql/action/EsqlCapabilities.java | 12 ++- .../rest-api-spec/test/esql/60_usage.yml | 82 ++++++++++++++++++- 2 files changed, 92 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index 5157a80022c39..5e336e6759b1e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -416,7 +416,17 @@ public enum Cap { * Fix for an optimization that caused wrong results * https://github.com/elastic/elasticsearch/issues/115281 */ - FIX_FILTER_PUSHDOWN_PAST_STATS; + FIX_FILTER_PUSHDOWN_PAST_STATS, + + /** + * This enables 60_usage.yml "Basic ESQL usage....snapshot" version test. See also the next capability. + */ + SNAPSHOT_TEST_FOR_TELEMETRY(Build.current().isSnapshot()), + + /** + * This enables 60_usage.yml "Basic ESQL usage....non-snapshot" version test. See also the previous capability. + */ + NON_SNAPSHOT_TEST_FOR_TELEMETRY(Build.current().isSnapshot() == false); private final boolean enabled; diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml index 7d1a4e123299b..b51bbdc4d2f87 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml @@ -23,7 +23,86 @@ setup: type: integer --- -"Basic ESQL usage output (telemetry)": +"Basic ESQL usage output (telemetry) snapshot version": + - requires: + test_runner_features: [capabilities] + capabilities: + - method: POST + path: /_query + parameters: [] + capabilities: [ snapshot_test_for_telemetry ] + reason: "Test that should only be executed on snapshot versions" + + - do: {xpack.usage: {}} + - match: { esql.available: true } + - match: { esql.enabled: true } + - length: { esql.features: 15 } + - set: {esql.features.dissect: dissect_counter} + - set: {esql.features.drop: drop_counter} + - set: {esql.features.eval: eval_counter} + - set: {esql.features.enrich: enrich_counter} + - set: {esql.features.from: from_counter} + - set: {esql.features.grok: grok_counter} + - set: {esql.features.keep: keep_counter} + - set: {esql.features.limit: limit_counter} + - set: {esql.features.mv_expand: mv_expand_counter} + - set: {esql.features.rename: rename_counter} + - set: {esql.features.row: row_counter} + - set: {esql.features.show: show_counter} + - set: {esql.features.sort: sort_counter} + - set: {esql.features.stats: stats_counter} + - set: {esql.features.where: where_counter} + - length: { esql.queries: 3 } + - set: {esql.queries.rest.total: rest_total_counter} + - set: {esql.queries.rest.failed: rest_failed_counter} + - set: {esql.queries.kibana.total: kibana_total_counter} + - set: {esql.queries.kibana.failed: kibana_failed_counter} + - set: {esql.queries._all.total: all_total_counter} + - set: {esql.queries._all.failed: all_failed_counter} + - set: {esql.functions.max: functions_max} + - set: {esql.functions.min: functions_min} + - set: {esql.functions.cos: functions_cos} + - set: {esql.functions.to_long: functions_to_long} + - set: {esql.functions.coalesce: functions_coalesce} + + - do: + esql.query: + body: + query: 'from test | where data > 2 and to_long(data) > 2 | sort count desc | limit 5 | stats m = max(data)' + + - do: {xpack.usage: {}} + - match: { esql.available: true } + - match: { esql.enabled: true } + - match: {esql.features.dissect: $dissect_counter} + - match: {esql.features.eval: $eval_counter} + - match: {esql.features.grok: $grok_counter} + - gt: {esql.features.limit: $limit_counter} + - gt: {esql.features.sort: $sort_counter} + - gt: {esql.features.stats: $stats_counter} + - gt: {esql.features.where: $where_counter} + - gt: {esql.queries.rest.total: $rest_total_counter} + - match: {esql.queries.rest.failed: $rest_failed_counter} + - match: {esql.queries.kibana.total: $kibana_total_counter} + - match: {esql.queries.kibana.failed: $kibana_failed_counter} + - gt: {esql.queries._all.total: $all_total_counter} + - match: {esql.queries._all.failed: $all_failed_counter} + - gt: {esql.functions.max: $functions_max} + - match: {esql.functions.min: $functions_min} + - match: {esql.functions.cos: $functions_cos} + - gt: {esql.functions.to_long: $functions_to_long} + - match: {esql.functions.coalesce: $functions_coalesce} + - length: {esql.functions: 117} # check the "sister" test below for a likely update to the same esql.functions length check + +--- +"Basic ESQL usage output (telemetry) non-snapshot version": + - requires: + test_runner_features: [capabilities] + capabilities: + - method: POST + path: /_query + parameters: [] + capabilities: [ non_snapshot_test_for_telemetry ] + reason: "Test that should only be executed on release versions" - do: {xpack.usage: {}} - match: { esql.available: true } @@ -83,3 +162,4 @@ setup: - match: {esql.functions.cos: $functions_cos} - gt: {esql.functions.to_long: $functions_to_long} - match: {esql.functions.coalesce: $functions_coalesce} + - length: {esql.functions: 115} # check the "sister" test above for a likely update to the same esql.functions length check From c851c25568f22455f75f8cbf99eb9b8a7b3f7302 Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Wed, 23 Oct 2024 13:41:34 -0400 Subject: [PATCH 331/449] Refactor InferenceProcessorInfoExtractor to avoid ConfigurationUtils (#115425) --- .../InferenceProcessorInfoExtractor.java | 45 +++++++++---------- 1 file changed, 21 insertions(+), 24 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/InferenceProcessorInfoExtractor.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/InferenceProcessorInfoExtractor.java index e61342d281c90..83f7832645270 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/InferenceProcessorInfoExtractor.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/InferenceProcessorInfoExtractor.java @@ -10,9 +10,7 @@ import org.apache.lucene.util.Counter; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.Metadata; -import org.elasticsearch.ingest.ConfigurationUtils; import org.elasticsearch.ingest.IngestMetadata; -import org.elasticsearch.ingest.Pipeline; import org.elasticsearch.transport.Transports; import java.util.HashMap; @@ -24,6 +22,7 @@ import java.util.function.Consumer; import static org.elasticsearch.inference.InferenceResults.MODEL_ID_RESULTS_FIELD; +import static org.elasticsearch.ingest.Pipeline.ON_FAILURE_KEY; import static org.elasticsearch.ingest.Pipeline.PROCESSORS_KEY; /** @@ -53,16 +52,10 @@ public static int countInferenceProcessors(ClusterState state) { Counter counter = Counter.newCounter(); ingestMetadata.getPipelines().forEach((pipelineId, configuration) -> { Map configMap = configuration.getConfigAsMap(); - List> processorConfigs = ConfigurationUtils.readList(null, null, configMap, PROCESSORS_KEY); + List> processorConfigs = (List>) configMap.get(PROCESSORS_KEY); for (Map processorConfigWithKey : processorConfigs) { for (Map.Entry entry : processorConfigWithKey.entrySet()) { - addModelsAndPipelines( - entry.getKey(), - pipelineId, - (Map) entry.getValue(), - pam -> counter.addAndGet(1), - 0 - ); + addModelsAndPipelines(entry.getKey(), pipelineId, entry.getValue(), pam -> counter.addAndGet(1), 0); } } }); @@ -73,7 +66,6 @@ public static int countInferenceProcessors(ClusterState state) { * @param ingestMetadata The ingestMetadata of current ClusterState * @return The set of model IDs referenced by inference processors */ - @SuppressWarnings("unchecked") public static Set getModelIdsFromInferenceProcessors(IngestMetadata ingestMetadata) { if (ingestMetadata == null) { return Set.of(); @@ -82,7 +74,7 @@ public static Set getModelIdsFromInferenceProcessors(IngestMetadata inge Set modelIds = new LinkedHashSet<>(); ingestMetadata.getPipelines().forEach((pipelineId, configuration) -> { Map configMap = configuration.getConfigAsMap(); - List> processorConfigs = ConfigurationUtils.readList(null, null, configMap, PROCESSORS_KEY); + List> processorConfigs = readList(configMap, PROCESSORS_KEY); for (Map processorConfigWithKey : processorConfigs) { for (Map.Entry entry : processorConfigWithKey.entrySet()) { addModelsAndPipelines(entry.getKey(), pipelineId, entry.getValue(), pam -> modelIds.add(pam.modelIdOrAlias()), 0); @@ -96,7 +88,6 @@ public static Set getModelIdsFromInferenceProcessors(IngestMetadata inge * @param state Current cluster state * @return a map from Model or Deployment IDs or Aliases to each pipeline referencing them. */ - @SuppressWarnings("unchecked") public static Map> pipelineIdsByResource(ClusterState state, Set ids) { assert Transports.assertNotTransportThread("non-trivial nested loops over cluster state structures"); Map> pipelineIdsByModelIds = new HashMap<>(); @@ -110,7 +101,7 @@ public static Map> pipelineIdsByResource(ClusterState state, } ingestMetadata.getPipelines().forEach((pipelineId, configuration) -> { Map configMap = configuration.getConfigAsMap(); - List> processorConfigs = ConfigurationUtils.readList(null, null, configMap, PROCESSORS_KEY); + List> processorConfigs = readList(configMap, PROCESSORS_KEY); for (Map processorConfigWithKey : processorConfigs) { for (Map.Entry entry : processorConfigWithKey.entrySet()) { addModelsAndPipelines(entry.getKey(), pipelineId, entry.getValue(), pam -> { @@ -128,7 +119,6 @@ public static Map> pipelineIdsByResource(ClusterState state, * @param state Current {@link ClusterState} * @return a map from Model or Deployment IDs or Aliases to each pipeline referencing them. */ - @SuppressWarnings("unchecked") public static Set pipelineIdsForResource(ClusterState state, Set ids) { assert Transports.assertNotTransportThread("non-trivial nested loops over cluster state structures"); Set pipelineIds = new HashSet<>(); @@ -142,7 +132,7 @@ public static Set pipelineIdsForResource(ClusterState state, Set } ingestMetadata.getPipelines().forEach((pipelineId, configuration) -> { Map configMap = configuration.getConfigAsMap(); - List> processorConfigs = ConfigurationUtils.readList(null, null, configMap, PROCESSORS_KEY); + List> processorConfigs = readList(configMap, PROCESSORS_KEY); for (Map processorConfigWithKey : processorConfigs) { for (Map.Entry entry : processorConfigWithKey.entrySet()) { addModelsAndPipelines(entry.getKey(), pipelineId, entry.getValue(), pam -> { @@ -188,7 +178,7 @@ private static void addModelsAndPipelines( addModelsAndPipelines( innerProcessorWithName.getKey(), pipelineId, - (Map) innerProcessorWithName.getValue(), + innerProcessorWithName.getValue(), handler, level + 1 ); @@ -196,13 +186,8 @@ private static void addModelsAndPipelines( } return; } - if (processorDefinition instanceof Map definitionMap && definitionMap.containsKey(Pipeline.ON_FAILURE_KEY)) { - List> onFailureConfigs = ConfigurationUtils.readList( - null, - null, - (Map) definitionMap, - Pipeline.ON_FAILURE_KEY - ); + if (processorDefinition instanceof Map definitionMap && definitionMap.containsKey(ON_FAILURE_KEY)) { + List> onFailureConfigs = readList(definitionMap, ON_FAILURE_KEY); onFailureConfigs.stream() .flatMap(map -> map.entrySet().stream()) .forEach(entry -> addModelsAndPipelines(entry.getKey(), pipelineId, entry.getValue(), handler, level + 1)); @@ -211,4 +196,16 @@ private static void addModelsAndPipelines( private record PipelineAndModel(String pipelineId, String modelIdOrAlias) {} + /** + * A local alternative to ConfigurationUtils.readList(...) that reads list properties out of the processor configuration map, + * but doesn't rely on mutating the configuration map. + */ + @SuppressWarnings("unchecked") + private static List> readList(Map processorConfig, String key) { + Object val = processorConfig.get(key); + if (val == null) { + throw new IllegalArgumentException("Missing required property [" + key + "]"); + } + return (List>) val; + } } From b6d078908ca7dce354d24f3bcf8db5d604488c14 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Wed, 23 Oct 2024 18:42:02 +0100 Subject: [PATCH 332/449] [ML] Add pathc transport version for change to Get Inference Request (#115250) --- .../src/main/java/org/elasticsearch/TransportVersions.java | 1 + .../core/inference/action/GetInferenceModelAction.java | 6 ++++-- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 7e06004e47cfb..6d9bf2ac52f2d 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -174,6 +174,7 @@ static TransportVersion def(int id) { public static final TransportVersion ESQL_PER_AGGREGATE_FILTER = def(8_770_00_0); public static final TransportVersion ML_INFERENCE_ATTACH_TO_EXISTSING_DEPLOYMENT = def(8_771_00_0); public static final TransportVersion CONVERT_FAILURE_STORE_OPTIONS_TO_SELECTOR_OPTIONS_INTERNALLY = def(8_772_00_0); + public static final TransportVersion INFERENCE_DONT_PERSIST_ON_READ_BACKPORT_8_16 = def(8_772_00_1); public static final TransportVersion REMOVE_MIN_COMPATIBLE_SHARD_NODE = def(8_773_00_0); public static final TransportVersion REVERT_REMOVE_MIN_COMPATIBLE_SHARD_NODE = def(8_774_00_0); public static final TransportVersion ESQL_FIELD_ATTRIBUTE_PARENT_SIMPLIFIED = def(8_775_00_0); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/GetInferenceModelAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/GetInferenceModelAction.java index 6e06133509644..ea0462d0f103e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/GetInferenceModelAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/GetInferenceModelAction.java @@ -62,7 +62,8 @@ public Request(StreamInput in) throws IOException { super(in); this.inferenceEntityId = in.readString(); this.taskType = TaskType.fromStream(in); - if (in.getTransportVersion().onOrAfter(TransportVersions.INFERENCE_DONT_PERSIST_ON_READ)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.INFERENCE_DONT_PERSIST_ON_READ) + || in.getTransportVersion().isPatchFrom(TransportVersions.INFERENCE_DONT_PERSIST_ON_READ_BACKPORT_8_16)) { this.persistDefaultConfig = in.readBoolean(); } else { this.persistDefaultConfig = PERSIST_DEFAULT_CONFIGS; @@ -87,7 +88,8 @@ public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeString(inferenceEntityId); taskType.writeTo(out); - if (out.getTransportVersion().onOrAfter(TransportVersions.INFERENCE_DONT_PERSIST_ON_READ)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.INFERENCE_DONT_PERSIST_ON_READ) + || out.getTransportVersion().isPatchFrom(TransportVersions.INFERENCE_DONT_PERSIST_ON_READ_BACKPORT_8_16)) { out.writeBoolean(this.persistDefaultConfig); } } From 57532e7b7fb1348c8abf50b225932cddea5937a9 Mon Sep 17 00:00:00 2001 From: Artem Prigoda Date: Wed, 23 Oct 2024 19:44:04 +0200 Subject: [PATCH 333/449] [test] Unmute FsDirectoryFactoryTests#testStoreDirectory (#115440) Resolve #110210 --- muted-tests.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index f59ca0c213279..19e8416c396c3 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -17,9 +17,6 @@ tests: - class: "org.elasticsearch.xpack.deprecation.DeprecationHttpIT" issue: "https://github.com/elastic/elasticsearch/issues/108628" method: "testDeprecatedSettingsReturnWarnings" -- class: org.elasticsearch.index.store.FsDirectoryFactoryTests - method: testStoreDirectory - issue: https://github.com/elastic/elasticsearch/issues/110210 - class: org.elasticsearch.index.store.FsDirectoryFactoryTests method: testPreload issue: https://github.com/elastic/elasticsearch/issues/110211 From e0acb56086a27c247867fa4b464973ebefef5230 Mon Sep 17 00:00:00 2001 From: Pat Whelan Date: Wed, 23 Oct 2024 14:02:47 -0400 Subject: [PATCH 334/449] [ML] Mitigate IOSession timeouts (#115414) We are seeing exceptions ~0.03% of the time in our integration tests: ``` org.apache.http.ConnectionClosedException: Connection closed unexpectedly ``` The `contentDecoder` does not always fully consume the body within `SimpleInputBuffer.consumeContent`. When we return back to Apache, the rest of the body is never delivered, and the IOSession eventually times out and gets cleaned up. During that cleanup process, Apache calls our Consumer with the above exception. If we read 0 bytes and return back immediately, Apache has a better chance to load the rest of the body/footer, and it will call `consumeContent` again. This reduces the exception rate down to ~0.001%. Fix #114105 Fix #114232 Fix #114327 Fix #114385 --- docs/changelog/115414.yaml | 9 +++++++ muted-tests.yml | 12 --------- .../http/StreamingHttpResultPublisher.java | 26 +++++++++---------- .../services/InferenceEventsAssertion.java | 10 ++++--- .../anthropic/AnthropicServiceTests.java | 2 -- .../AzureAiStudioServiceTests.java | 2 -- .../azureopenai/AzureOpenAiServiceTests.java | 2 -- .../services/cohere/CohereServiceTests.java | 2 -- .../services/openai/OpenAiServiceTests.java | 2 -- 9 files changed, 27 insertions(+), 40 deletions(-) create mode 100644 docs/changelog/115414.yaml diff --git a/docs/changelog/115414.yaml b/docs/changelog/115414.yaml new file mode 100644 index 0000000000000..7475b765bb30e --- /dev/null +++ b/docs/changelog/115414.yaml @@ -0,0 +1,9 @@ +pr: 115414 +summary: Mitigate IOSession timeouts +area: Machine Learning +type: bug +issues: + - 114385 + - 114327 + - 114105 + - 114232 diff --git a/muted-tests.yml b/muted-tests.yml index 19e8416c396c3..cce16a07e647a 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -215,24 +215,12 @@ tests: - class: org.elasticsearch.xpack.inference.TextEmbeddingCrudIT method: testPutE5Small_withPlatformSpecificVariant issue: https://github.com/elastic/elasticsearch/issues/113950 -- class: org.elasticsearch.xpack.inference.services.openai.OpenAiServiceTests - method: testInfer_StreamRequest_ErrorResponse - issue: https://github.com/elastic/elasticsearch/issues/114105 - class: org.elasticsearch.xpack.inference.InferenceCrudIT method: testGet issue: https://github.com/elastic/elasticsearch/issues/114135 - class: org.elasticsearch.xpack.ilm.ExplainLifecycleIT method: testStepInfoPreservedOnAutoRetry issue: https://github.com/elastic/elasticsearch/issues/114220 -- class: org.elasticsearch.xpack.inference.services.openai.OpenAiServiceTests - method: testInfer_StreamRequest - issue: https://github.com/elastic/elasticsearch/issues/114232 -- class: org.elasticsearch.xpack.inference.services.cohere.CohereServiceTests - method: testInfer_StreamRequest_ErrorResponse - issue: https://github.com/elastic/elasticsearch/issues/114327 -- class: org.elasticsearch.xpack.inference.services.cohere.CohereServiceTests - method: testInfer_StreamRequest - issue: https://github.com/elastic/elasticsearch/issues/114385 - class: org.elasticsearch.xpack.inference.InferenceRestIT method: test {p0=inference/30_semantic_text_inference/Calculates embeddings using the default ELSER 2 endpoint} issue: https://github.com/elastic/elasticsearch/issues/114412 diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/StreamingHttpResultPublisher.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/StreamingHttpResultPublisher.java index bf74ca86a969a..0b2268a448c8a 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/StreamingHttpResultPublisher.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/StreamingHttpResultPublisher.java @@ -96,11 +96,10 @@ public void consumeContent(ContentDecoder contentDecoder, IOControl ioControl) t try { var consumed = inputBuffer.consumeContent(contentDecoder); - var allBytes = new byte[consumed]; - inputBuffer.read(allBytes); - - // we can have empty bytes, don't bother sending them - if (allBytes.length > 0) { + // we could have read 0 bytes if the body was delayed getting in, we need to return out so apache can load the body/footer + if (consumed > 0) { + var allBytes = new byte[consumed]; + inputBuffer.read(allBytes); queue.offer(() -> { subscriber.onNext(new HttpResult(response, allBytes)); var currentBytesInQueue = bytesInQueue.updateAndGet(current -> Long.max(0, current - allBytes.length)); @@ -111,18 +110,17 @@ public void consumeContent(ContentDecoder contentDecoder, IOControl ioControl) t } } }); - } - // always check if totalByteSize > the configured setting in case the settings change - if (bytesInQueue.accumulateAndGet(allBytes.length, Long::sum) >= settings.getMaxResponseSize().getBytes()) { - pauseProducer(ioControl); - } + // always check if totalByteSize > the configured setting in case the settings change + if (bytesInQueue.accumulateAndGet(allBytes.length, Long::sum) >= settings.getMaxResponseSize().getBytes()) { + pauseProducer(ioControl); + } - // always run in case we're waking up from a pause and need to start a new thread - taskRunner.requestNextRun(); + taskRunner.requestNextRun(); - if (listenerCalled.compareAndSet(false, true)) { - listener.onResponse(this); + if (listenerCalled.compareAndSet(false, true)) { + listener.onResponse(this); + } } } finally { inputBuffer.reset(); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/InferenceEventsAssertion.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/InferenceEventsAssertion.java index f23ea2aa414b2..7cfd231be39f3 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/InferenceEventsAssertion.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/InferenceEventsAssertion.java @@ -12,7 +12,6 @@ import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.inference.InferenceServiceResults; -import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentFactory; import org.hamcrest.MatcherAssert; import org.hamcrest.Matchers; @@ -26,6 +25,7 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Stream; +import static org.elasticsearch.test.ESTestCase.fail; import static org.elasticsearch.xcontent.ToXContent.EMPTY_PARAMS; import static org.hamcrest.CoreMatchers.is; @@ -47,7 +47,9 @@ public InferenceEventsAssertion hasFinishedStream() { } public InferenceEventsAssertion hasNoErrors() { - MatcherAssert.assertThat("Expected no errors from stream.", error, Matchers.nullValue()); + if (error != null) { + fail(error, "Expected no errors from stream."); + } return this; } @@ -66,7 +68,7 @@ public InferenceEventsAssertion hasErrorWithStatusCode(int statusCode) { } t = t.getCause(); } - ESTestCase.fail(error, "Expected an underlying ElasticsearchStatusException."); + fail(error, "Expected an underlying ElasticsearchStatusException."); return this; } @@ -79,7 +81,7 @@ public InferenceEventsAssertion hasErrorContaining(String message) { } t = t.getCause(); } - ESTestCase.fail(error, "Expected exception to contain string: " + message); + fail(error, "Expected exception to contain string: " + message); return this; } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/anthropic/AnthropicServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/anthropic/AnthropicServiceTests.java index 8adf75b4c0a81..48277112d9306 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/anthropic/AnthropicServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/anthropic/AnthropicServiceTests.java @@ -532,7 +532,6 @@ public void testInfer_SendsCompletionRequest() throws IOException { } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/114385") public void testInfer_StreamRequest() throws Exception { String responseJson = """ data: {"type": "message_start", "message": {"model": "claude, probably"}} @@ -578,7 +577,6 @@ private InferenceServiceResults streamChatCompletion() throws IOException { } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/114385") public void testInfer_StreamRequest_ErrorResponse() throws Exception { String responseJson = """ data: {"type": "error", "error": {"type": "request_too_large", "message": "blah"}} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioServiceTests.java index 44b0d17d9b448..e85edf573ba96 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioServiceTests.java @@ -1308,7 +1308,6 @@ public void testInfer_UnauthorisedResponse() throws IOException { } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/114385") public void testInfer_StreamRequest() throws Exception { String responseJson = """ data: {\ @@ -1364,7 +1363,6 @@ private InferenceServiceResults streamChatCompletion() throws IOException, URISy } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/114385") public void testInfer_StreamRequest_ErrorResponse() throws Exception { String responseJson = """ { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiServiceTests.java index 900b666c0b8fb..3408fc358cac0 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiServiceTests.java @@ -1425,7 +1425,6 @@ private void testChunkedInfer(AzureOpenAiEmbeddingsModel model) throws IOExcepti } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/114385") public void testInfer_StreamRequest() throws Exception { String responseJson = """ data: {\ @@ -1484,7 +1483,6 @@ private InferenceServiceResults streamChatCompletion() throws IOException, URISy } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/114385") public void testInfer_StreamRequest_ErrorResponse() throws Exception { String responseJson = """ { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java index cf114db45619f..758c38166778b 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java @@ -1635,7 +1635,6 @@ public void testDefaultSimilarity() { assertEquals(SimilarityMeasure.DOT_PRODUCT, CohereService.defaultSimilarity()); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/114385") public void testInfer_StreamRequest() throws Exception { String responseJson = """ {"event_type":"text-generation", "text":"hello"} @@ -1669,7 +1668,6 @@ private InferenceServiceResults streamChatCompletion() throws IOException { } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/114385") public void testInfer_StreamRequest_ErrorResponse() throws Exception { String responseJson = """ { "event_type":"stream-end", "finish_reason":"ERROR", "response":{ "text": "how dare you" } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java index beba9b1a92477..cf1438b334478 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java @@ -1007,7 +1007,6 @@ public void testInfer_SendsRequest() throws IOException { } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/114385") public void testInfer_StreamRequest() throws Exception { String responseJson = """ data: {\ @@ -1057,7 +1056,6 @@ private InferenceServiceResults streamChatCompletion() throws IOException { } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/114385") public void testInfer_StreamRequest_ErrorResponse() throws Exception { String responseJson = """ { From 9eea83c45f1c800626d83067a9ecc9c1c9ef7e27 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Thu, 24 Oct 2024 05:08:01 +1100 Subject: [PATCH 335/449] Mute org.elasticsearch.xpack.security.FileSettingsRoleMappingsRestartIT testFileSettingsReprocessedOnRestartWithoutVersionChange #115450 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index cce16a07e647a..bd0145611237b 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -282,6 +282,9 @@ tests: - class: org.elasticsearch.smoketest.SmokeTestIngestWithAllDepsClientYamlTestSuiteIT method: test {yaml=ingest/80_ingest_simulate/Test mapping addition works with legacy templates} issue: https://github.com/elastic/elasticsearch/issues/115412 +- class: org.elasticsearch.xpack.security.FileSettingsRoleMappingsRestartIT + method: testFileSettingsReprocessedOnRestartWithoutVersionChange + issue: https://github.com/elastic/elasticsearch/issues/115450 # Examples: # From 176015d59b71e633822cec21042c6f07c4a7b415 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Wed, 23 Oct 2024 12:07:10 -0700 Subject: [PATCH 336/449] Temporarily disable buildkite upload on Windows agents (#115449) --- .../gradle/internal/ElasticsearchBuildCompletePlugin.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchBuildCompletePlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchBuildCompletePlugin.java index 25ad5bcf89581..7d9537feaea56 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchBuildCompletePlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchBuildCompletePlugin.java @@ -15,6 +15,7 @@ import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream; import org.apache.commons.compress.compressors.bzip2.BZip2CompressorOutputStream; import org.apache.commons.io.IOUtils; +import org.elasticsearch.gradle.OS; import org.elasticsearch.gradle.util.GradleUtils; import org.gradle.api.Plugin; import org.gradle.api.Project; @@ -61,7 +62,7 @@ public void apply(Project target) { ? System.getenv("BUILD_NUMBER") : System.getenv("BUILDKITE_BUILD_NUMBER"); String performanceTest = System.getenv("BUILD_PERFORMANCE_TEST"); - if (buildNumber != null && performanceTest == null && GradleUtils.isIncludedBuild(target) == false) { + if (buildNumber != null && performanceTest == null && GradleUtils.isIncludedBuild(target) == false && OS.current() != OS.WINDOWS) { File targetFile = calculateTargetFile(target, buildNumber); File projectDir = target.getProjectDir(); File gradleWorkersDir = new File(target.getGradle().getGradleUserHomeDir(), "workers/"); From 60678a1a9caf69177c588f7fc3c4585013e027f2 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Wed, 23 Oct 2024 15:10:50 -0400 Subject: [PATCH 337/449] ESQL: Fix filtered grouping on ords (#115312) This fixes filtered aggs when they are grouped on a field with ordinals. This looks like: ``` | STATS max = max(salary) WHERE salary > 0 BY job_positions ``` when the `job_positions` field is a keyword field with doc values. In that case we use a faster group-by-segment-ordinals algorithm that needs to be able to merge the results of aggregators from multiple segments. This previously failed with a `ClassCastException` because of a mistake. Also! the group-by-segment-ordinals algorithm wasn't properly releasing the closure used to add inputs, causing a breaker size leak. This wasn't really leaking memory, but leaking *tracking* of memory. Closes #114897 --- docs/changelog/115312.yaml | 6 + .../FilteredGroupingAggregatorFunction.java | 2 +- .../operator/OrdinalsGroupingOperator.java | 4 +- .../FilteredAggregatorFunctionTests.java | 1 - ...lteredGroupingAggregatorFunctionTests.java | 39 +++++- .../GroupingAggregatorFunctionTestCase.java | 7 +- .../src/main/resources/stats.csv-spec | 126 ++++++++++++++++++ .../xpack/esql/action/EsqlCapabilities.java | 5 + 8 files changed, 183 insertions(+), 7 deletions(-) create mode 100644 docs/changelog/115312.yaml diff --git a/docs/changelog/115312.yaml b/docs/changelog/115312.yaml new file mode 100644 index 0000000000000..acf6bbc69c36c --- /dev/null +++ b/docs/changelog/115312.yaml @@ -0,0 +1,6 @@ +pr: 115312 +summary: "ESQL: Fix filtered grouping on ords" +area: ES|QL +type: bug +issues: + - 114897 diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/FilteredGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/FilteredGroupingAggregatorFunction.java index 3e38b6d6fe9fa..8d3dbf3164c47 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/FilteredGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/FilteredGroupingAggregatorFunction.java @@ -97,7 +97,7 @@ public void addIntermediateInput(int positionOffset, IntVector groupIdVector, Pa @Override public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { - next.addIntermediateRowInput(groupId, input, position); + next.addIntermediateRowInput(groupId, ((FilteredGroupingAggregatorFunction) input).next(), position); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/OrdinalsGroupingOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/OrdinalsGroupingOperator.java index 5e0e625abb914..7cf47bc7fed1c 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/OrdinalsGroupingOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/OrdinalsGroupingOperator.java @@ -372,8 +372,8 @@ static final class OrdinalSegmentAggregator implements Releasable, SeenGroupIds } void addInput(IntVector docs, Page page) { + GroupingAggregatorFunction.AddInput[] prepared = new GroupingAggregatorFunction.AddInput[aggregators.size()]; try { - GroupingAggregatorFunction.AddInput[] prepared = new GroupingAggregatorFunction.AddInput[aggregators.size()]; for (int i = 0; i < prepared.length; i++) { prepared[i] = aggregators.get(i).prepareProcessPage(this, page); } @@ -392,7 +392,7 @@ void addInput(IntVector docs, Page page) { } catch (IOException e) { throw new UncheckedIOException(e); } finally { - page.releaseBlocks(); + Releasables.close(page::releaseBlocks, Releasables.wrap(prepared)); } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/FilteredAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/FilteredAggregatorFunctionTests.java index da2c3502144db..35ecced470e01 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/FilteredAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/FilteredAggregatorFunctionTests.java @@ -27,7 +27,6 @@ public class FilteredAggregatorFunctionTests extends AggregatorFunctionTestCase { private final List unclosed = Collections.synchronizedList(new ArrayList<>()); - // TODO some version of this test that applies across all aggs @Override protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { return new FilteredAggregatorFunctionSupplier( diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/FilteredGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/FilteredGroupingAggregatorFunctionTests.java index 87cb99bd0709f..26971dc927cd1 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/FilteredGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/FilteredGroupingAggregatorFunctionTests.java @@ -11,12 +11,14 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.compute.operator.LongIntBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; +import org.elasticsearch.core.Releasables; import org.elasticsearch.core.Tuple; import org.junit.After; @@ -31,7 +33,6 @@ public class FilteredGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { private final List unclosed = Collections.synchronizedList(new ArrayList<>()); - // TODO some version of this test that applies across all aggs @Override protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { return new FilteredAggregatorFunctionSupplier( @@ -104,6 +105,42 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { ); } + /** + * Tests {@link GroupingAggregator#addIntermediateRow} by building results using the traditional + * add mechanism and using {@link GroupingAggregator#addIntermediateRow} then asserting that they + * produce the same output. + */ + public void testAddIntermediateRowInput() { + DriverContext ctx = driverContext(); + AggregatorFunctionSupplier supplier = aggregatorFunction(channels(AggregatorMode.SINGLE)); + Block[] results = new Block[2]; + try ( + GroupingAggregatorFunction main = supplier.groupingAggregator(ctx); + GroupingAggregatorFunction leaf = supplier.groupingAggregator(ctx); + SourceOperator source = simpleInput(ctx.blockFactory(), 10); + ) { + Page p; + while ((p = source.getOutput()) != null) { + try ( + IntVector group = ctx.blockFactory().newConstantIntVector(0, p.getPositionCount()); + GroupingAggregatorFunction.AddInput addInput = leaf.prepareProcessPage(null, p) + ) { + addInput.add(0, group); + } finally { + p.releaseBlocks(); + } + } + main.addIntermediateRowInput(0, leaf, 0); + try (IntVector selected = ctx.blockFactory().newConstantIntVector(0, 1)) { + main.evaluateFinal(results, 0, selected, ctx); + leaf.evaluateFinal(results, 1, selected, ctx); + } + assertThat(results[0], equalTo(results[1])); + } finally { + Releasables.close(results); + } + } + @After public void checkUnclosed() { for (Exception tracker : unclosed) { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java index 9414e076a26e6..cb190dfffafb9 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java @@ -89,14 +89,17 @@ protected final Operator.OperatorFactory simpleWithMode(AggregatorMode mode) { return simpleWithMode(mode, Function.identity()); } + protected List channels(AggregatorMode mode) { + return mode.isInputPartial() ? range(1, 1 + aggregatorIntermediateBlockCount()).boxed().toList() : List.of(1); + } + private Operator.OperatorFactory simpleWithMode( AggregatorMode mode, Function wrap ) { - List channels = mode.isInputPartial() ? range(1, 1 + aggregatorIntermediateBlockCount()).boxed().toList() : List.of(1); int emitChunkSize = between(100, 200); - AggregatorFunctionSupplier supplier = wrap.apply(aggregatorFunction(channels)); + AggregatorFunctionSupplier supplier = wrap.apply(aggregatorFunction(channels(mode))); if (randomBoolean()) { supplier = chunkGroups(emitChunkSize, supplier); } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec index 6d4c596e8d7de..2dc21a86e6394 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec @@ -2529,3 +2529,129 @@ FROM employees | eval x = [1,2,3], y = 5 + 6 | stats m = max(y) by y+1 m:integer | y+1:integer 11 | 12 ; + +filterIsAlwaysTrue +required_capability: per_agg_filtering +FROM employees +| STATS max = max(salary) WHERE salary > 0 +; + +max:integer +74999 +; + +filterIsAlwaysFalse +required_capability: per_agg_filtering +FROM employees +| STATS max = max(salary) WHERE first_name == "" +; + +max:integer +null +; + +filterSometimesMatches +required_capability: per_agg_filtering +FROM employees +| STATS max = max(salary) WHERE first_name IS NULL +; + +max:integer +70011 +; + +groupingFilterIsAlwaysTrue +required_capability: per_agg_filtering +FROM employees +| MV_EXPAND job_positions +| STATS max = max(salary) WHERE salary > 0 BY job_positions = SUBSTRING(job_positions, 1, 1) +| SORT job_positions +| LIMIT 4 +; + +max:integer | job_positions:keyword +74970 | A +58121 | B +74999 | D +58715 | H +; + +groupingFilterIsAlwaysFalse +required_capability: per_agg_filtering +FROM employees +| MV_EXPAND job_positions +| STATS max = max(salary) WHERE first_name == "" BY job_positions = SUBSTRING(job_positions, 1, 1) +| SORT job_positions +| LIMIT 4 +; + +max:integer | job_positions:keyword +null | A +null | B +null | D +null | H +; + +groupingFilterSometimesMatches +required_capability: per_agg_filtering +FROM employees +| MV_EXPAND job_positions +| STATS max = max(salary) WHERE first_name IS NULL BY job_positions = SUBSTRING(job_positions, 1, 1) +| SORT job_positions +| LIMIT 4 +; + +max:integer | job_positions:keyword +62233 | A +39878 | B +67492 | D +null | H +; + +groupingByOrdinalsFilterIsAlwaysTrue +required_capability: per_agg_filtering +required_capability: per_agg_filtering_ords +FROM employees +| STATS max = max(salary) WHERE salary > 0 BY job_positions +| SORT job_positions +| LIMIT 4 +; + +max:integer | job_positions:keyword +74970 | Accountant +69904 | Architect +58121 | Business Analyst +74999 | Data Scientist +; + +groupingByOrdinalsFilterIsAlwaysFalse +required_capability: per_agg_filtering +required_capability: per_agg_filtering_ords +FROM employees +| STATS max = max(salary) WHERE first_name == "" BY job_positions +| SORT job_positions +| LIMIT 4 +; + +max:integer | job_positions:keyword +null | Accountant +null | Architect +null | Business Analyst +null | Data Scientist +; + +groupingByOrdinalsFilterSometimesMatches +required_capability: per_agg_filtering +required_capability: per_agg_filtering_ords +FROM employees +| STATS max = max(salary) WHERE first_name IS NULL BY job_positions +| SORT job_positions +| LIMIT 4 +; + +max:integer | job_positions:keyword +39878 | Accountant +62233 | Architect +39878 | Business Analyst +67492 | Data Scientist +; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index 5e336e6759b1e..dfca6ab2bf814 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -399,6 +399,11 @@ public enum Cap { */ PER_AGG_FILTERING, + /** + * Fix {@link #PER_AGG_FILTERING} grouped by ordinals. + */ + PER_AGG_FILTERING_ORDS, + /** * Fix for https://github.com/elastic/elasticsearch/issues/114714 */ From c9f995ad92145733900465e8383ab0f38882cda7 Mon Sep 17 00:00:00 2001 From: Ankita Kumar Date: Wed, 23 Oct 2024 16:17:40 -0400 Subject: [PATCH 338/449] Log reindexing failures (#112676) Wait for reindexing tasks to finish during shutdown for an amount of time defined by settings. Also log the number of reindexing tasks still in flight after the wait. --- .../index/reindex/ReindexNodeShutdownIT.java | 139 +++++++++++++ .../common/settings/ClusterSettings.java | 3 + .../java/org/elasticsearch/node/Node.java | 119 +---------- .../elasticsearch/node/NodeConstruction.java | 3 + .../node/ShutdownPrepareService.java | 184 ++++++++++++++++++ 5 files changed, 332 insertions(+), 116 deletions(-) create mode 100644 modules/reindex/src/internalClusterTest/java/org/elasticsearch/index/reindex/ReindexNodeShutdownIT.java create mode 100644 server/src/main/java/org/elasticsearch/node/ShutdownPrepareService.java diff --git a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/index/reindex/ReindexNodeShutdownIT.java b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/index/reindex/ReindexNodeShutdownIT.java new file mode 100644 index 0000000000000..4a001bb2d0969 --- /dev/null +++ b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/index/reindex/ReindexNodeShutdownIT.java @@ -0,0 +1,139 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.index.reindex; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.node.ShutdownPrepareService; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.reindex.ReindexPlugin; +import org.elasticsearch.tasks.TaskInfo; +import org.elasticsearch.tasks.TaskManager; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.transport.TransportService; + +import java.util.Arrays; +import java.util.Collection; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; +import java.util.stream.IntStream; + +import static org.elasticsearch.node.ShutdownPrepareService.MAXIMUM_REINDEXING_TIMEOUT_SETTING; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; + +/** + * Test that a wait added during shutdown is necessary for a large reindexing task to complete. + * The test works as follows: + * 1. Start a large (reasonably long running) reindexing request on the coordinator-only node. + * 2. Check that the reindexing task appears on the coordinating node + * 3. With a 10s timeout value for MAXIMUM_REINDEXING_TIMEOUT_SETTING, + * wait for the reindexing task to complete before closing the node + * 4. Confirm that the reindexing task succeeds with the wait (it will fail without it) + */ +@ESIntegTestCase.ClusterScope(numDataNodes = 0, numClientNodes = 0, scope = ESIntegTestCase.Scope.TEST) +public class ReindexNodeShutdownIT extends ESIntegTestCase { + + protected static final String INDEX = "reindex-shutdown-index"; + protected static final String DEST_INDEX = "dest-index"; + + @Override + protected Collection> nodePlugins() { + return Arrays.asList(ReindexPlugin.class); + } + + protected ReindexRequestBuilder reindex(String nodeName) { + return new ReindexRequestBuilder(internalCluster().client(nodeName)); + } + + public void testReindexWithShutdown() throws Exception { + final String masterNodeName = internalCluster().startMasterOnlyNode(); + final String dataNodeName = internalCluster().startDataOnlyNode(); + + final Settings COORD_SETTINGS = Settings.builder() + .put(MAXIMUM_REINDEXING_TIMEOUT_SETTING.getKey(), TimeValue.timeValueSeconds(10)) + .build(); + final String coordNodeName = internalCluster().startCoordinatingOnlyNode(Settings.EMPTY); + + ensureStableCluster(3); + + int numDocs = 20000; + createIndex(numDocs); + createReindexTaskAndShutdown(coordNodeName); + checkDestinationIndex(dataNodeName, numDocs); + } + + private void createIndex(int numDocs) { + // INDEX will be created on the dataNode + createIndex(INDEX); + + logger.debug("setting up [{}] docs", numDocs); + indexRandom( + true, + false, + true, + IntStream.range(0, numDocs) + .mapToObj(i -> prepareIndex(INDEX).setId(String.valueOf(i)).setSource("n", i)) + .collect(Collectors.toList()) + ); + + // Checks that the all documents have been indexed and correctly counted + assertHitCount(prepareSearch(INDEX).setSize(0).setTrackTotalHits(true), numDocs); + } + + private void createReindexTaskAndShutdown(final String coordNodeName) throws Exception { + AbstractBulkByScrollRequestBuilder builder = reindex(coordNodeName).source(INDEX).destination(DEST_INDEX); + AbstractBulkByScrollRequest reindexRequest = builder.request(); + ShutdownPrepareService shutdownPrepareService = internalCluster().getInstance(ShutdownPrepareService.class, coordNodeName); + + TaskManager taskManager = internalCluster().getInstance(TransportService.class, coordNodeName).getTaskManager(); + + // Now execute the reindex action... + ActionListener reindexListener = new ActionListener() { + @Override + public void onResponse(BulkByScrollResponse bulkByScrollResponse) { + assertNull(bulkByScrollResponse.getReasonCancelled()); + logger.debug(bulkByScrollResponse.toString()); + } + + @Override + public void onFailure(Exception e) { + logger.debug("Encounterd " + e.toString()); + fail(e, "Encounterd " + e.toString()); + } + }; + internalCluster().client(coordNodeName).execute(ReindexAction.INSTANCE, reindexRequest, reindexListener); + + // Check for reindex task to appear in the tasks list and Immediately stop coordinating node + waitForTask(ReindexAction.INSTANCE.name(), coordNodeName); + shutdownPrepareService.prepareForShutdown(taskManager); + internalCluster().stopNode(coordNodeName); + } + + // Make sure all documents from the source index have been reindexed into the destination index + private void checkDestinationIndex(String dataNodeName, int numDocs) throws Exception { + assertTrue(indexExists(DEST_INDEX)); + flushAndRefresh(DEST_INDEX); + assertBusy(() -> { assertHitCount(prepareSearch(DEST_INDEX).setSize(0).setTrackTotalHits(true), numDocs); }); + } + + private static void waitForTask(String actionName, String nodeName) throws Exception { + assertBusy(() -> { + ListTasksResponse tasks = clusterAdmin().prepareListTasks(nodeName).setActions(actionName).setDetailed(true).get(); + tasks.rethrowFailures("Find my task"); + for (TaskInfo taskInfo : tasks.getTasks()) { + // Skip tasks with a parent because those are children of the task we want + if (taskInfo.parentTaskId().isSet() == false) return; + } + fail("Couldn't find task after waiting, tasks=" + tasks.getTasks()); + }, 10, TimeUnit.SECONDS); + } +} diff --git a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index 8cbacccb915ac..7bb78eabc8727 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -108,6 +108,7 @@ import org.elasticsearch.monitor.process.ProcessService; import org.elasticsearch.node.Node; import org.elasticsearch.node.NodeRoleSettings; +import org.elasticsearch.node.ShutdownPrepareService; import org.elasticsearch.persistent.PersistentTasksClusterService; import org.elasticsearch.persistent.decider.EnableAssignmentDecider; import org.elasticsearch.plugins.PluginsService; @@ -456,6 +457,8 @@ public void apply(Settings value, Settings current, Settings previous) { Environment.PATH_SHARED_DATA_SETTING, NodeEnvironment.NODE_ID_SEED_SETTING, Node.INITIAL_STATE_TIMEOUT_SETTING, + ShutdownPrepareService.MAXIMUM_SHUTDOWN_TIMEOUT_SETTING, + ShutdownPrepareService.MAXIMUM_REINDEXING_TIMEOUT_SETTING, DiscoveryModule.DISCOVERY_TYPE_SETTING, DiscoveryModule.DISCOVERY_SEED_PROVIDERS_SETTING, DiscoveryModule.ELECTION_STRATEGY_SETTING, diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index 32a65302922a8..e30f76fdd9414 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -13,10 +13,6 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.util.SetOnce; import org.elasticsearch.ElasticsearchTimeoutException; -import org.elasticsearch.action.search.TransportSearchAction; -import org.elasticsearch.action.support.PlainActionFuture; -import org.elasticsearch.action.support.RefCountingListener; -import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.bootstrap.BootstrapCheck; import org.elasticsearch.bootstrap.BootstrapContext; import org.elasticsearch.client.internal.Client; @@ -82,7 +78,6 @@ import org.elasticsearch.snapshots.SnapshotShardsService; import org.elasticsearch.snapshots.SnapshotsService; import org.elasticsearch.tasks.TaskCancellationService; -import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.tasks.TaskResultsService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.RemoteClusterPortSettings; @@ -106,18 +101,12 @@ import java.util.List; import java.util.Map; import java.util.concurrent.CountDownLatch; -import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; import java.util.function.BiConsumer; import java.util.function.Function; -import java.util.function.Supplier; -import java.util.stream.Collectors; import javax.net.ssl.SNIHostName; -import static org.elasticsearch.core.Strings.format; - /** * A node represent a node within a cluster ({@code cluster.name}). The {@link #client()} can be used * in order to use a {@link Client} to perform actions/operations against the cluster. @@ -161,12 +150,6 @@ public class Node implements Closeable { Property.NodeScope ); - public static final Setting MAXIMUM_SHUTDOWN_TIMEOUT_SETTING = Setting.positiveTimeSetting( - "node.maximum_shutdown_grace_period", - TimeValue.ZERO, - Setting.Property.NodeScope - ); - private final Lifecycle lifecycle = new Lifecycle(); /** @@ -187,6 +170,7 @@ public class Node implements Closeable { private final LocalNodeFactory localNodeFactory; private final NodeService nodeService; private final TerminationHandler terminationHandler; + // for testing final NamedWriteableRegistry namedWriteableRegistry; final NamedXContentRegistry namedXContentRegistry; @@ -606,105 +590,8 @@ public synchronized void close() throws IOException { * logic should use Node Shutdown, see {@link org.elasticsearch.cluster.metadata.NodesShutdownMetadata}. */ public void prepareForClose() { - final var maxTimeout = MAXIMUM_SHUTDOWN_TIMEOUT_SETTING.get(this.settings()); - - record Stopper(String name, SubscribableListener listener) { - boolean isIncomplete() { - return listener().isDone() == false; - } - } - - final var stoppers = new ArrayList(); - final var allStoppersFuture = new PlainActionFuture(); - try (var listeners = new RefCountingListener(allStoppersFuture)) { - final BiConsumer stopperRunner = (name, action) -> { - final var stopper = new Stopper(name, new SubscribableListener<>()); - stoppers.add(stopper); - stopper.listener().addListener(listeners.acquire()); - new Thread(() -> { - try { - action.run(); - } catch (Exception ex) { - logger.warn("unexpected exception in shutdown task [" + stopper.name() + "]", ex); - } finally { - stopper.listener().onResponse(null); - } - }, stopper.name()).start(); - }; - - stopperRunner.accept("http-server-transport-stop", injector.getInstance(HttpServerTransport.class)::close); - stopperRunner.accept("async-search-stop", () -> awaitSearchTasksComplete(maxTimeout)); - if (terminationHandler != null) { - stopperRunner.accept("termination-handler-stop", terminationHandler::handleTermination); - } - } - - final Supplier incompleteStoppersDescriber = () -> stoppers.stream() - .filter(Stopper::isIncomplete) - .map(Stopper::name) - .collect(Collectors.joining(", ", "[", "]")); - - try { - if (TimeValue.ZERO.equals(maxTimeout)) { - allStoppersFuture.get(); - } else { - allStoppersFuture.get(maxTimeout.millis(), TimeUnit.MILLISECONDS); - } - } catch (ExecutionException e) { - assert false : e; // listeners are never completed exceptionally - logger.warn("failed during graceful shutdown tasks", e); - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - logger.warn("interrupted while waiting for graceful shutdown tasks: " + incompleteStoppersDescriber.get(), e); - } catch (TimeoutException e) { - logger.warn("timed out while waiting for graceful shutdown tasks: " + incompleteStoppersDescriber.get()); - } - } - - private void awaitSearchTasksComplete(TimeValue asyncSearchTimeout) { - TaskManager taskManager = injector.getInstance(TransportService.class).getTaskManager(); - long millisWaited = 0; - while (true) { - long searchTasksRemaining = taskManager.getTasks() - .values() - .stream() - .filter(task -> TransportSearchAction.TYPE.name().equals(task.getAction())) - .count(); - if (searchTasksRemaining == 0) { - logger.debug("all search tasks complete"); - return; - } else { - // Let the system work on those searches for a while. We're on a dedicated thread to manage app shutdown, so we - // literally just want to wait and not take up resources on this thread for now. Poll period chosen to allow short - // response times, but checking the tasks list is relatively expensive, and we don't want to waste CPU time we could - // be spending on finishing those searches. - final TimeValue pollPeriod = TimeValue.timeValueMillis(500); - millisWaited += pollPeriod.millis(); - if (TimeValue.ZERO.equals(asyncSearchTimeout) == false && millisWaited >= asyncSearchTimeout.millis()) { - logger.warn( - format( - "timed out after waiting [%s] for [%d] search tasks to finish", - asyncSearchTimeout.toString(), - searchTasksRemaining - ) - ); - return; - } - logger.debug(format("waiting for [%s] search tasks to finish, next poll in [%s]", searchTasksRemaining, pollPeriod)); - try { - Thread.sleep(pollPeriod.millis()); - } catch (InterruptedException ex) { - logger.warn( - format( - "interrupted while waiting [%s] for [%d] search tasks to finish", - asyncSearchTimeout.toString(), - searchTasksRemaining - ) - ); - return; - } - } - } + injector.getInstance(ShutdownPrepareService.class) + .prepareForShutdown(injector.getInstance(TransportService.class).getTaskManager()); } /** diff --git a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java index 8e66486329577..7e3991c1df1f4 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java +++ b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java @@ -1099,6 +1099,8 @@ private void construct( telemetryProvider.getTracer() ); + final ShutdownPrepareService shutdownPrepareService = new ShutdownPrepareService(settings, httpServerTransport, terminationHandler); + modules.add( loadPersistentTasksService( settingsModule, @@ -1200,6 +1202,7 @@ private void construct( b.bind(CompatibilityVersions.class).toInstance(compatibilityVersions); b.bind(DataStreamAutoShardingService.class).toInstance(dataStreamAutoShardingService); b.bind(FailureStoreMetrics.class).toInstance(failureStoreMetrics); + b.bind(ShutdownPrepareService.class).toInstance(shutdownPrepareService); }); if (ReadinessService.enabled(environment)) { diff --git a/server/src/main/java/org/elasticsearch/node/ShutdownPrepareService.java b/server/src/main/java/org/elasticsearch/node/ShutdownPrepareService.java new file mode 100644 index 0000000000000..ab9537053f45d --- /dev/null +++ b/server/src/main/java/org/elasticsearch/node/ShutdownPrepareService.java @@ -0,0 +1,184 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.node; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.search.TransportSearchAction; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.action.support.RefCountingListener; +import org.elasticsearch.action.support.SubscribableListener; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.http.HttpServerTransport; +import org.elasticsearch.index.reindex.ReindexAction; +import org.elasticsearch.node.internal.TerminationHandler; +import org.elasticsearch.tasks.TaskManager; + +import java.util.ArrayList; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; +import java.util.function.BiConsumer; +import java.util.function.Supplier; +import java.util.stream.Collectors; + +import static org.elasticsearch.core.Strings.format; + +/** + * This class was created to extract out the logic from {@link Node#prepareForClose()} to facilitate testing. + *

      + * Invokes hooks to prepare this node to be closed. This should be called when Elasticsearch receives a request to shut down + * gracefully from the underlying operating system, before system resources are closed. + *

      + * Note that this class is part of infrastructure to react to signals from the operating system - most graceful shutdown + * logic should use Node Shutdown, see {@link org.elasticsearch.cluster.metadata.NodesShutdownMetadata}. + */ +public class ShutdownPrepareService { + + private final Logger logger = LogManager.getLogger(ShutdownPrepareService.class); + private final Settings settings; + private final HttpServerTransport httpServerTransport; + private final TerminationHandler terminationHandler; + private volatile boolean hasBeenShutdown = false; + + public ShutdownPrepareService(Settings settings, HttpServerTransport httpServerTransport, TerminationHandler terminationHandler) { + this.settings = settings; + this.httpServerTransport = httpServerTransport; + this.terminationHandler = terminationHandler; + } + + public static final Setting MAXIMUM_SHUTDOWN_TIMEOUT_SETTING = Setting.positiveTimeSetting( + "node.maximum_shutdown_grace_period", + TimeValue.ZERO, + Setting.Property.NodeScope + ); + + public static final Setting MAXIMUM_REINDEXING_TIMEOUT_SETTING = Setting.positiveTimeSetting( + "node.maximum_reindexing_grace_period", + TimeValue.timeValueSeconds(10), + Setting.Property.NodeScope + ); + + /** + * Invokes hooks to prepare this node to be closed. This should be called when Elasticsearch receives a request to shut down + * gracefully from the underlying operating system, before system resources are closed. This method will block + * until the node is ready to shut down. + *

      + * Note that this class is part of infrastructure to react to signals from the operating system - most graceful shutdown + * logic should use Node Shutdown, see {@link org.elasticsearch.cluster.metadata.NodesShutdownMetadata}. + */ + public void prepareForShutdown(TaskManager taskManager) { + assert hasBeenShutdown == false; + hasBeenShutdown = true; + final var maxTimeout = MAXIMUM_SHUTDOWN_TIMEOUT_SETTING.get(settings); + final var reindexTimeout = MAXIMUM_REINDEXING_TIMEOUT_SETTING.get(settings); + + record Stopper(String name, SubscribableListener listener) { + boolean isIncomplete() { + return listener().isDone() == false; + } + } + + final var stoppers = new ArrayList(); + final var allStoppersFuture = new PlainActionFuture(); + try (var listeners = new RefCountingListener(allStoppersFuture)) { + final BiConsumer stopperRunner = (name, action) -> { + final var stopper = new Stopper(name, new SubscribableListener<>()); + stoppers.add(stopper); + stopper.listener().addListener(listeners.acquire()); + new Thread(() -> { + try { + action.run(); + } catch (Exception ex) { + logger.warn("unexpected exception in shutdown task [" + stopper.name() + "]", ex); + } finally { + stopper.listener().onResponse(null); + } + }, stopper.name()).start(); + }; + + stopperRunner.accept("http-server-transport-stop", httpServerTransport::close); + stopperRunner.accept("async-search-stop", () -> awaitSearchTasksComplete(maxTimeout, taskManager)); + stopperRunner.accept("reindex-stop", () -> awaitReindexTasksComplete(reindexTimeout, taskManager)); + if (terminationHandler != null) { + stopperRunner.accept("termination-handler-stop", terminationHandler::handleTermination); + } + } + + final Supplier incompleteStoppersDescriber = () -> stoppers.stream() + .filter(Stopper::isIncomplete) + .map(Stopper::name) + .collect(Collectors.joining(", ", "[", "]")); + + try { + if (TimeValue.ZERO.equals(maxTimeout)) { + allStoppersFuture.get(); + } else { + allStoppersFuture.get(maxTimeout.millis(), TimeUnit.MILLISECONDS); + } + } catch (ExecutionException e) { + assert false : e; // listeners are never completed exceptionally + logger.warn("failed during graceful shutdown tasks", e); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + logger.warn("interrupted while waiting for graceful shutdown tasks: " + incompleteStoppersDescriber.get(), e); + } catch (TimeoutException e) { + logger.warn("timed out while waiting for graceful shutdown tasks: " + incompleteStoppersDescriber.get()); + } + } + + private void awaitTasksComplete(TimeValue timeout, String taskName, TaskManager taskManager) { + long millisWaited = 0; + while (true) { + long tasksRemaining = taskManager.getTasks().values().stream().filter(task -> taskName.equals(task.getAction())).count(); + if (tasksRemaining == 0) { + logger.debug("all " + taskName + " tasks complete"); + return; + } else { + // Let the system work on those tasks for a while. We're on a dedicated thread to manage app shutdown, so we + // literally just want to wait and not take up resources on this thread for now. Poll period chosen to allow short + // response times, but checking the tasks list is relatively expensive, and we don't want to waste CPU time we could + // be spending on finishing those tasks. + final TimeValue pollPeriod = TimeValue.timeValueMillis(500); + millisWaited += pollPeriod.millis(); + if (TimeValue.ZERO.equals(timeout) == false && millisWaited >= timeout.millis()) { + logger.warn( + format("timed out after waiting [%s] for [%d] " + taskName + " tasks to finish", timeout.toString(), tasksRemaining) + ); + return; + } + logger.debug(format("waiting for [%s] " + taskName + " tasks to finish, next poll in [%s]", tasksRemaining, pollPeriod)); + try { + Thread.sleep(pollPeriod.millis()); + } catch (InterruptedException ex) { + logger.warn( + format( + "interrupted while waiting [%s] for [%d] " + taskName + " tasks to finish", + timeout.toString(), + tasksRemaining + ) + ); + return; + } + } + } + } + + private void awaitSearchTasksComplete(TimeValue asyncSearchTimeout, TaskManager taskManager) { + awaitTasksComplete(asyncSearchTimeout, TransportSearchAction.NAME, taskManager); + } + + private void awaitReindexTasksComplete(TimeValue asyncReindexTimeout, TaskManager taskManager) { + awaitTasksComplete(asyncReindexTimeout, ReindexAction.NAME, taskManager); + } + +} From f04bf5c3561f19f30f21ba28419c8e7ed6ed7b3a Mon Sep 17 00:00:00 2001 From: Oleksandr Kolomiiets Date: Wed, 23 Oct 2024 13:22:26 -0700 Subject: [PATCH 339/449] Apply workaround for synthetic source of object arrays inside nested objects (#115275) --- rest-api-spec/build.gradle | 1 + .../21_synthetic_source_stored.yml | 11 ++- .../index/mapper/DocumentParser.java | 6 +- .../index/mapper/DocumentParserContext.java | 39 +++++--- .../mapper/IgnoredSourceFieldMapper.java | 3 + .../index/mapper/MapperFeatures.java | 3 +- .../mapper/IgnoredSourceFieldMapperTests.java | 88 +++++++++++++++++++ 7 files changed, 132 insertions(+), 19 deletions(-) diff --git a/rest-api-spec/build.gradle b/rest-api-spec/build.gradle index 7525ff2dc12d2..4bd293f0a8641 100644 --- a/rest-api-spec/build.gradle +++ b/rest-api-spec/build.gradle @@ -59,4 +59,5 @@ tasks.named("yamlRestCompatTestTransform").configure ({ task -> task.replaceValueInMatch("profile.shards.0.dfs.knn.0.query.0.description", "DocAndScoreQuery[0,...][0.009673266,...],0.009673266", "dfs knn vector profiling with vector_operations_count") task.skipTest("indices.sort/10_basic/Index Sort", "warning does not exist for compatibility") task.skipTest("search/330_fetch_fields/Test search rewrite", "warning does not exist for compatibility") + task.skipTest("indices.create/21_synthetic_source_stored/object param - nested object with stored array", "temporary until backported") }) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/21_synthetic_source_stored.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/21_synthetic_source_stored.yml index eab51427876aa..6a4e92f694220 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/21_synthetic_source_stored.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/21_synthetic_source_stored.yml @@ -319,8 +319,8 @@ object param - nested object array next to other fields: --- object param - nested object with stored array: - requires: - cluster_features: ["mapper.synthetic_source_keep", "mapper.bwc_workaround_9_0"] - reason: requires tracking ignored source + cluster_features: ["mapper.ignored_source.always_store_object_arrays_in_nested", "mapper.bwc_workaround_9_0"] + reason: requires fix to object array handling - do: indices.create: @@ -356,8 +356,11 @@ object param - nested object with stored array: sort: name - match: { hits.total.value: 2 } - match: { hits.hits.0._source.name: A } - - match: { hits.hits.0._source.nested_array_regular.0.b.c: [ 10, 100] } - - match: { hits.hits.0._source.nested_array_regular.1.b.c: [ 20, 200] } + # due to a workaround for #115261 + - match: { hits.hits.0._source.nested_array_regular.0.b.0.c: 10 } + - match: { hits.hits.0._source.nested_array_regular.0.b.1.c: 100 } + - match: { hits.hits.0._source.nested_array_regular.1.b.0.c: 20 } + - match: { hits.hits.0._source.nested_array_regular.1.b.1.c: 200 } - match: { hits.hits.1._source.name: B } - match: { hits.hits.1._source.nested_array_stored.0.b.0.c: 10 } - match: { hits.hits.1._source.nested_array_stored.0.b.1.c: 100 } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java index bac987a3df96d..1ed0a117ddd89 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java @@ -810,8 +810,10 @@ private static void parseNonDynamicArray( boolean objectWithFallbackSyntheticSource = false; if (mapper instanceof ObjectMapper objectMapper) { mode = getSourceKeepMode(context, objectMapper.sourceKeepMode()); - objectWithFallbackSyntheticSource = (mode == Mapper.SourceKeepMode.ALL - || (mode == Mapper.SourceKeepMode.ARRAYS && objectMapper instanceof NestedObjectMapper == false)); + objectWithFallbackSyntheticSource = mode == Mapper.SourceKeepMode.ALL + // Inside nested objects we always store object arrays as a workaround for #115261. + || ((context.inNestedScope() || mode == Mapper.SourceKeepMode.ARRAYS) + && objectMapper instanceof NestedObjectMapper == false); } boolean fieldWithFallbackSyntheticSource = false; boolean fieldWithStoredArraySource = false; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java index ef87ce52fbabf..3b1f1a6d2809a 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java @@ -104,6 +104,16 @@ public int get() { } } + /** + * Defines the scope parser is currently in. + * This is used for synthetic source related logic during parsing. + */ + private enum Scope { + SINGLETON, + ARRAY, + NESTED + } + private final MappingLookup mappingLookup; private final MappingParserContext mappingParserContext; private final SourceToParse sourceToParse; @@ -112,7 +122,7 @@ public int get() { private final List ignoredFieldValues; private final List ignoredFieldsMissingValues; private boolean inArrayScopeEnabled; - private boolean inArrayScope; + private Scope currentScope; private final Map> dynamicMappers; private final DynamicMapperSize dynamicMappersSize; @@ -145,7 +155,7 @@ private DocumentParserContext( List ignoredFieldValues, List ignoredFieldsWithNoSource, boolean inArrayScopeEnabled, - boolean inArrayScope, + Scope currentScope, Map> dynamicMappers, Map dynamicObjectMappers, Map> dynamicRuntimeFields, @@ -167,7 +177,7 @@ private DocumentParserContext( this.ignoredFieldValues = ignoredFieldValues; this.ignoredFieldsMissingValues = ignoredFieldsWithNoSource; this.inArrayScopeEnabled = inArrayScopeEnabled; - this.inArrayScope = inArrayScope; + this.currentScope = currentScope; this.dynamicMappers = dynamicMappers; this.dynamicObjectMappers = dynamicObjectMappers; this.dynamicRuntimeFields = dynamicRuntimeFields; @@ -192,7 +202,7 @@ private DocumentParserContext(ObjectMapper parent, ObjectMapper.Dynamic dynamic, in.ignoredFieldValues, in.ignoredFieldsMissingValues, in.inArrayScopeEnabled, - in.inArrayScope, + in.currentScope, in.dynamicMappers, in.dynamicObjectMappers, in.dynamicRuntimeFields, @@ -224,7 +234,7 @@ protected DocumentParserContext( new ArrayList<>(), new ArrayList<>(), mappingParserContext.getIndexSettings().isSyntheticSourceSecondDocParsingPassEnabled(), - false, + Scope.SINGLETON, new HashMap<>(), new HashMap<>(), new HashMap<>(), @@ -335,7 +345,7 @@ public final void deduplicateIgnoredFieldValues(final Set fullNames) { public final DocumentParserContext addIgnoredFieldFromContext(IgnoredSourceFieldMapper.NameValue ignoredFieldWithNoSource) throws IOException { if (canAddIgnoredField()) { - if (inArrayScope) { + if (currentScope == Scope.ARRAY) { // The field is an array within an array, store all sub-array elements. ignoredFieldsMissingValues.add(ignoredFieldWithNoSource); return cloneWithRecordedSource(); @@ -379,10 +389,10 @@ public final DocumentParserContext maybeCloneForArray(Mapper mapper) throws IOEx if (canAddIgnoredField() && mapper instanceof ObjectMapper && mapper instanceof NestedObjectMapper == false - && inArrayScope == false + && currentScope != Scope.ARRAY && inArrayScopeEnabled) { DocumentParserContext subcontext = switchParser(parser()); - subcontext.inArrayScope = true; + subcontext.currentScope = Scope.ARRAY; return subcontext; } return this; @@ -673,6 +683,10 @@ public boolean isWithinCopyTo() { return false; } + public boolean inNestedScope() { + return currentScope == Scope.NESTED; + } + public final DocumentParserContext createChildContext(ObjectMapper parent) { return new Wrapper(parent, this); } @@ -716,10 +730,11 @@ public LuceneDocument doc() { return document; } }; - // Disable tracking array scopes for ignored source, as it would be added to the parent doc. - // Nested documents are added to preserve object structure within arrays of objects, so the use - // of ignored source for arrays inside them should be mostly redundant. - cloned.inArrayScope = false; + + cloned.currentScope = Scope.NESTED; + // Disable using second parsing pass since it currently can not determine which parts + // of source belong to which nested document. + // See #115261. cloned.inArrayScopeEnabled = false; return cloned; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapper.java index 296c2c5311d9a..70d73fc2ffb9a 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapper.java @@ -58,6 +58,9 @@ public class IgnoredSourceFieldMapper extends MetadataFieldMapper { static final NodeFeature TRACK_IGNORED_SOURCE = new NodeFeature("mapper.track_ignored_source"); static final NodeFeature DONT_EXPAND_DOTS_IN_IGNORED_SOURCE = new NodeFeature("mapper.ignored_source.dont_expand_dots"); + static final NodeFeature ALWAYS_STORE_OBJECT_ARRAYS_IN_NESTED_OBJECTS = new NodeFeature( + "mapper.ignored_source.always_store_object_arrays_in_nested" + ); /* Setting to disable encoding and writing values for this field. diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java index 31c89b2fc8ad4..026c7c98d7aeb 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java @@ -62,7 +62,8 @@ public Set getTestFeatures() { return Set.of( RangeFieldMapper.DATE_RANGE_INDEXING_FIX, IgnoredSourceFieldMapper.DONT_EXPAND_DOTS_IN_IGNORED_SOURCE, - SourceFieldMapper.REMOVE_SYNTHETIC_SOURCE_ONLY_VALIDATION + SourceFieldMapper.REMOVE_SYNTHETIC_SOURCE_ONLY_VALIDATION, + IgnoredSourceFieldMapper.ALWAYS_STORE_OBJECT_ARRAYS_IN_NESTED_OBJECTS ); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapperTests.java index 934744ef3ef96..7a4ce8bcb03fa 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapperTests.java @@ -962,6 +962,94 @@ public void testArrayWithNestedObjects() throws IOException { {"path":{"to":[{"id":[1,20,3]},{"id":10},{"id":0}]}}""", syntheticSource); } + public void testObjectArrayWithinNestedObjects() throws IOException { + DocumentMapper documentMapper = createMapperService(syntheticSourceMapping(b -> { + b.startObject("path").startObject("properties"); + { + b.startObject("to").field("type", "nested").startObject("properties"); + { + b.startObject("obj").startObject("properties"); + { + b.startObject("id").field("type", "integer").field("synthetic_source_keep", "arrays").endObject(); + } + b.endObject().endObject(); + } + b.endObject().endObject(); + } + b.endObject().endObject(); + })).documentMapper(); + + var syntheticSource = syntheticSource(documentMapper, b -> { + b.startObject("path"); + { + b.startObject("to"); + { + b.startArray("obj"); + { + b.startObject().array("id", 1, 20, 3).endObject(); + b.startObject().field("id", 10).endObject(); + } + b.endArray(); + } + b.endObject(); + } + b.endObject(); + }); + assertEquals(""" + {"path":{"to":{"obj":[{"id":[1,20,3]},{"id":10}]}}}""", syntheticSource); + } + + public void testObjectArrayWithinNestedObjectsArray() throws IOException { + DocumentMapper documentMapper = createMapperService(syntheticSourceMapping(b -> { + b.startObject("path").startObject("properties"); + { + b.startObject("to").field("type", "nested").startObject("properties"); + { + b.startObject("obj").startObject("properties"); + { + b.startObject("id").field("type", "integer").field("synthetic_source_keep", "arrays").endObject(); + } + b.endObject().endObject(); + } + b.endObject().endObject(); + } + b.endObject().endObject(); + })).documentMapper(); + + var syntheticSource = syntheticSource(documentMapper, b -> { + b.startObject("path"); + { + b.startArray("to"); + { + b.startObject(); + { + b.startArray("obj"); + { + b.startObject().array("id", 1, 20, 3).endObject(); + b.startObject().field("id", 10).endObject(); + } + b.endArray(); + } + b.endObject(); + b.startObject(); + { + b.startArray("obj"); + { + b.startObject().array("id", 200, 300, 500).endObject(); + b.startObject().field("id", 100).endObject(); + } + b.endArray(); + } + b.endObject(); + } + b.endArray(); + } + b.endObject(); + }); + assertEquals(""" + {"path":{"to":[{"obj":[{"id":[1,20,3]},{"id":10}]},{"obj":[{"id":[200,300,500]},{"id":100}]}]}}""", syntheticSource); + } + public void testArrayWithinArray() throws IOException { DocumentMapper documentMapper = createMapperService(syntheticSourceMapping(b -> { b.startObject("path"); From d8bcbb6bede44334719d1879bf8603425e29a731 Mon Sep 17 00:00:00 2001 From: Paul Tavares <56442535+paul-tavares@users.noreply.github.com> Date: Wed, 23 Oct 2024 16:29:34 -0400 Subject: [PATCH 340/449] [Security Solution] Add `create_index` to `kibana_system` role for Elastic Defend indices (#115241) Adds create_index privilege to the kibana_system role for Elastic Defend internal indices Indices: ``` .logs-endpoint.heartbeat-* .logs-endpoint.diagnostic.collection-* .logs-endpoint.action.responses-* ``` --- docs/changelog/115241.yaml | 6 ++++ .../KibanaOwnedReservedRoleDescriptors.java | 19 +++++++---- .../authz/store/ReservedRolesStoreTests.java | 34 +++++++++---------- 3 files changed, 35 insertions(+), 24 deletions(-) create mode 100644 docs/changelog/115241.yaml diff --git a/docs/changelog/115241.yaml b/docs/changelog/115241.yaml new file mode 100644 index 0000000000000..b7119d7f6aaeb --- /dev/null +++ b/docs/changelog/115241.yaml @@ -0,0 +1,6 @@ +pr: 115241 +summary: "[Security Solution] Add `create_index` to `kibana_system` role for index/DS\ + \ `.logs-endpoint.action.responses-*`" +area: Authorization +type: enhancement +issues: [] diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java index 0028508e87f32..5fb753ab55aab 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java @@ -152,8 +152,11 @@ static RoleDescriptor kibanaSystem(String name) { // Data telemetry reads mappings, metadata and stats of indices RoleDescriptor.IndicesPrivileges.builder().indices("*").privileges("view_index_metadata", "monitor").build(), // Endpoint diagnostic information. Kibana reads from these indices to send - // telemetry - RoleDescriptor.IndicesPrivileges.builder().indices(".logs-endpoint.diagnostic.collection-*").privileges("read").build(), + // telemetry and also creates the index when policies are first created + RoleDescriptor.IndicesPrivileges.builder() + .indices(".logs-endpoint.diagnostic.collection-*") + .privileges("read", "create_index") + .build(), // Fleet secrets. Kibana can only write to this index. RoleDescriptor.IndicesPrivileges.builder() .indices(".fleet-secrets*") @@ -277,17 +280,19 @@ static RoleDescriptor kibanaSystem(String name) { ) .build(), // Endpoint specific action responses. Kibana reads and writes (for third party - // agents) to the index - // to display action responses to the user. + // agents) to the index to display action responses to the user. + // `create_index`: is necessary in order to ensure that the DOT datastream index is + // created by Kibana in order to avoid errors on the Elastic Defend side when streaming + // documents to it. RoleDescriptor.IndicesPrivileges.builder() .indices(".logs-endpoint.action.responses-*") - .privileges("auto_configure", "read", "write") + .privileges("auto_configure", "read", "write", "create_index") .build(), // Endpoint specific actions. Kibana reads and writes to this index to track new // actions and display them. RoleDescriptor.IndicesPrivileges.builder() .indices(".logs-endpoint.actions-*") - .privileges("auto_configure", "read", "write") + .privileges("auto_configure", "read", "write", "create_index") .build(), // Legacy Osquery manager specific action responses. Kibana reads from these to // display responses to the user. @@ -475,7 +480,7 @@ static RoleDescriptor kibanaSystem(String name) { RoleDescriptor.IndicesPrivileges.builder().indices(".slo-observability.*").privileges("all").build(), // Endpoint heartbeat. Kibana reads from these to determine metering/billing for // endpoints. - RoleDescriptor.IndicesPrivileges.builder().indices(".logs-endpoint.heartbeat-*").privileges("read").build(), + RoleDescriptor.IndicesPrivileges.builder().indices(".logs-endpoint.heartbeat-*").privileges("read", "create_index").build(), // For connectors telemetry. Will be removed once we switched to connectors API RoleDescriptor.IndicesPrivileges.builder().indices(".elastic-connectors*").privileges("read").build() }, null, diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java index 26b306d6f1334..a71ac6a9b51fd 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java @@ -801,7 +801,7 @@ public void testKibanaSystemRole() { assertThat(kibanaRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(mockIndexAbstraction(index)), is(true)); assertThat( kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), - is(false) + is(true) ); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(mockIndexAbstraction(index)), is(false)); @@ -949,7 +949,7 @@ public void testKibanaSystemRole() { ); }); - // read-only index for Endpoint and Osquery manager specific action responses + // Elastic Defend internal index for response actions results Arrays.asList(".logs-endpoint.action.responses-" + randomAlphaOfLength(randomIntBetween(0, 13))).forEach((index) -> { final IndexAbstraction indexAbstraction = mockIndexAbstraction(index); assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:foo").test(indexAbstraction), is(false)); @@ -959,10 +959,7 @@ public void testKibanaSystemRole() { is(false) ); assertThat(kibanaRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(indexAbstraction), is(true)); - assertThat( - kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(indexAbstraction), - is(false) - ); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(indexAbstraction), is(true)); @@ -1069,10 +1066,7 @@ public void testKibanaSystemRole() { is(false) ); assertThat(kibanaRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(indexAbstraction), is(true)); - assertThat( - kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(indexAbstraction), - is(false) - ); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(indexAbstraction), is(true)); @@ -1097,10 +1091,7 @@ public void testKibanaSystemRole() { is(false) ); assertThat(kibanaRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(indexAbstraction), is(true)); - assertThat( - kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(indexAbstraction), - is(false) - ); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(indexAbstraction), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(indexAbstraction), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(indexAbstraction), is(true)); @@ -1319,12 +1310,21 @@ public void testKibanaSystemRole() { final boolean isAlsoAutoCreateIndex = indexName.startsWith(".logs-endpoint.actions-") || indexName.startsWith(".logs-endpoint.action.responses-"); + + final boolean isAlsoCreateIndex = indexName.startsWith(".logs-endpoint.actions-") + || indexName.startsWith(".logs-endpoint.action.responses-") + || indexName.startsWith(".logs-endpoint.diagnostic.collection-") + || indexName.startsWith(".logs-endpoint.heartbeat-"); + assertThat( kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(indexAbstraction), - is(false) + is(isAlsoCreateIndex) + ); + assertThat(kibanaRole.indices().allowedIndicesMatcher(AutoCreateAction.NAME).test(indexAbstraction), is(isAlsoCreateIndex)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(CreateDataStreamAction.NAME).test(indexAbstraction), + is(isAlsoCreateIndex) ); - assertThat(kibanaRole.indices().allowedIndicesMatcher(AutoCreateAction.NAME).test(indexAbstraction), is(isAlsoAutoCreateIndex)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateDataStreamAction.NAME).test(indexAbstraction), is(false)); assertThat( kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(indexAbstraction), is(isAlsoAutoCreateIndex) From 0fc0922ff0aade7189fadda137bb5aa8a0474997 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Wed, 23 Oct 2024 22:10:45 +0100 Subject: [PATCH 341/449] [ML] Fix NPE in Get Deployment Stats (#115404) If a node has been removed from the cluster and the trained model assignment has not been updated the GET stats action can have an inconsistent view where it thinks a model is deployed on the removed node. The bug only affected nodes with failed deployments. --- docs/changelog/115404.yaml | 5 +++ .../TransportGetDeploymentStatsAction.java | 2 +- ...ransportGetDeploymentStatsActionTests.java | 39 +++++++++++++++++++ 3 files changed, 45 insertions(+), 1 deletion(-) create mode 100644 docs/changelog/115404.yaml diff --git a/docs/changelog/115404.yaml b/docs/changelog/115404.yaml new file mode 100644 index 0000000000000..e443b152955f3 --- /dev/null +++ b/docs/changelog/115404.yaml @@ -0,0 +1,5 @@ +pr: 115404 +summary: Fix NPE in Get Deployment Stats +area: Machine Learning +type: bug +issues: [] diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetDeploymentStatsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetDeploymentStatsAction.java index 980cdc09252cb..9ebc510af4f4d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetDeploymentStatsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetDeploymentStatsAction.java @@ -220,7 +220,7 @@ static GetDeploymentStatsAction.Response addFailedRoutes( // add nodes from the failures that were not in the task responses for (var nodeRoutingState : nodeToRoutingStates.entrySet()) { - if (visitedNodes.contains(nodeRoutingState.getKey()) == false) { + if ((visitedNodes.contains(nodeRoutingState.getKey()) == false) && nodes.nodeExists(nodeRoutingState.getKey())) { updatedNodeStats.add( AssignmentStats.NodeStats.forNotStartedState( nodes.get(nodeRoutingState.getKey()), diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportGetDeploymentStatsActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportGetDeploymentStatsActionTests.java index 4a66be4a773f5..2490cd8d5ab21 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportGetDeploymentStatsActionTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportGetDeploymentStatsActionTests.java @@ -148,6 +148,45 @@ public void testAddFailedRoutes_TaskResultIsOverwritten() throws UnknownHostExce assertEquals(RoutingState.FAILED, results.get(0).getNodeStats().get(1).getRoutingState().getState()); } + public void testAddFailedRoutes_MissingNode() throws UnknownHostException { + DiscoveryNodes nodes = buildNodes("node1", "node2"); + var missingNode = DiscoveryNodeUtils.create( + "node3", + new TransportAddress(InetAddress.getByAddress(new byte[] { (byte) 192, (byte) 168, (byte) 0, (byte) 1 }), 9203) + ); + + List nodeStatsList = new ArrayList<>(); + nodeStatsList.add(AssignmentStatsTests.randomNodeStats(nodes.get("node1"))); + nodeStatsList.add(AssignmentStatsTests.randomNodeStats(nodes.get("node2"))); + + var model1 = new AssignmentStats( + "model1", + "deployment1", + randomBoolean() ? null : randomIntBetween(1, 8), + randomBoolean() ? null : randomIntBetween(1, 8), + null, + randomBoolean() ? null : randomIntBetween(1, 10000), + randomBoolean() ? null : ByteSizeValue.ofBytes(randomLongBetween(1, 1000000)), + Instant.now(), + nodeStatsList, + randomFrom(Priority.values()) + ); + var response = new GetDeploymentStatsAction.Response(Collections.emptyList(), Collections.emptyList(), List.of(model1), 1); + + // failed state for node 3 conflicts + Map> badRoutes = new HashMap<>(); + Map nodeRoutes = new HashMap<>(); + nodeRoutes.put("node3", new RoutingInfo(1, 1, RoutingState.FAILED, "failed on node3")); + badRoutes.put(createAssignment("model1"), nodeRoutes); + + var modified = TransportGetDeploymentStatsAction.addFailedRoutes(response, badRoutes, nodes); + List results = modified.getStats().results(); + assertThat(results, hasSize(1)); + assertThat(results.get(0).getNodeStats(), hasSize(2)); // 3 + assertEquals("node1", results.get(0).getNodeStats().get(0).getNode().getId()); + assertEquals("node2", results.get(0).getNodeStats().get(1).getNode().getId()); + } + private DiscoveryNodes buildNodes(String... nodeIds) throws UnknownHostException { InetAddress inetAddress = InetAddress.getByAddress(new byte[] { (byte) 192, (byte) 168, (byte) 0, (byte) 1 }); DiscoveryNodes.Builder builder = DiscoveryNodes.builder(); From f2b146ed1c19f3801224c39c67a49800d980bca9 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Wed, 23 Oct 2024 17:38:10 -0400 Subject: [PATCH 342/449] ESQL: Fix test muting (#115448) (#115466) Fix the test muting on the test for grapheme clusters - it should only allow the test if we're on the 20+ jvm. Closes #114536 --- .../src/test/java/org/elasticsearch/xpack/esql/CsvTests.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index 63233f0c46a0d..3119fd4b52153 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -259,6 +259,10 @@ public final void test() throws Throwable { testCase.requiredCapabilities, everyItem(in(EsqlCapabilities.capabilities(true))) ); + assumeTrue( + "Capability not supported in this build", + EsqlCapabilities.capabilities(false).containsAll(testCase.requiredCapabilities) + ); } else { for (EsqlCapabilities.Cap c : EsqlCapabilities.Cap.values()) { if (false == c.isEnabled()) { From 2f64d2037a23de4d9d0da3efb7dca182e066ef48 Mon Sep 17 00:00:00 2001 From: Artem Prigoda Date: Thu, 24 Oct 2024 00:42:58 +0200 Subject: [PATCH 343/449] [test] Unmute FsDirectoryFactoryTests#testPreload (#115438) Resolve #110211 --- muted-tests.yml | 3 --- .../org/elasticsearch/index/store/FsDirectoryFactoryTests.java | 2 -- 2 files changed, 5 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index bd0145611237b..8b9c3cc6ce712 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -17,9 +17,6 @@ tests: - class: "org.elasticsearch.xpack.deprecation.DeprecationHttpIT" issue: "https://github.com/elastic/elasticsearch/issues/108628" method: "testDeprecatedSettingsReturnWarnings" -- class: org.elasticsearch.index.store.FsDirectoryFactoryTests - method: testPreload - issue: https://github.com/elastic/elasticsearch/issues/110211 - class: "org.elasticsearch.xpack.searchablesnapshots.FrozenSearchableSnapshotsIntegTests" issue: "https://github.com/elastic/elasticsearch/issues/110408" method: "testCreateAndRestorePartialSearchableSnapshot" diff --git a/server/src/test/java/org/elasticsearch/index/store/FsDirectoryFactoryTests.java b/server/src/test/java/org/elasticsearch/index/store/FsDirectoryFactoryTests.java index 38e6ca0be0647..b0a14515f2fbc 100644 --- a/server/src/test/java/org/elasticsearch/index/store/FsDirectoryFactoryTests.java +++ b/server/src/test/java/org/elasticsearch/index/store/FsDirectoryFactoryTests.java @@ -115,8 +115,6 @@ private void doTestPreload(String... preload) throws IOException { var func = fsDirectoryFactory.preLoadFuncMap.get(mmapDirectory); assertNotEquals(fsDirectoryFactory.preLoadFuncMap.get(mmapDirectory), MMapDirectory.ALL_FILES); assertNotEquals(fsDirectoryFactory.preLoadFuncMap.get(mmapDirectory), MMapDirectory.NO_FILES); - assertTrue(func.test("foo.dvd", newIOContext(random()))); - assertTrue(func.test("foo.tmp", newIOContext(random()))); for (String ext : preload) { assertTrue("ext: " + ext, func.test("foo." + ext, newIOContext(random()))); } From 92ecd36a031de094cda642f14000bea545b01740 Mon Sep 17 00:00:00 2001 From: Fang Xing <155562079+fang-xing-esql@users.noreply.github.com> Date: Wed, 23 Oct 2024 22:00:48 -0400 Subject: [PATCH 344/449] [ES|QL] Simplify syntax of named parameter for identifier and pattern (#115061) * simplify syntax of named parameter for identifier and pattern --- docs/changelog/115061.yaml | 5 + .../xpack/esql/qa/rest/RestEsqlTestCase.java | 23 ++-- .../xpack/esql/action/EsqlCapabilities.java | 12 +- .../xpack/esql/action/RequestXContent.java | 105 +++++++++--------- .../esql/action/EsqlQueryRequestTests.java | 76 +++++++------ .../xpack/esql/analysis/AnalyzerTests.java | 8 +- .../esql/parser/StatementParserTests.java | 8 +- .../rest-api-spec/test/esql/10_basic.yml | 4 +- 8 files changed, 122 insertions(+), 119 deletions(-) create mode 100644 docs/changelog/115061.yaml diff --git a/docs/changelog/115061.yaml b/docs/changelog/115061.yaml new file mode 100644 index 0000000000000..7d40d5ae2629e --- /dev/null +++ b/docs/changelog/115061.yaml @@ -0,0 +1,5 @@ +pr: 115061 +summary: "[ES|QL] Simplify syntax of named parameter for identifier and pattern" +area: ES|QL +type: bug +issues: [] diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java index 2a50988e9e35e..8c52a24231a41 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java @@ -672,7 +672,7 @@ public void testErrorMessageForArrayValuesInParams() throws IOException { public void testNamedParamsForIdentifierAndIdentifierPatterns() throws IOException { assumeTrue( "named parameters for identifiers and patterns require snapshot build", - EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES.isEnabled() + EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES_SIMPLIFIED_SYNTAX.isEnabled() ); bulkLoadTestData(10); // positive @@ -684,12 +684,9 @@ public void testNamedParamsForIdentifierAndIdentifierPatterns() throws IOExcepti ) ) .params( - "[{\"n1\" : {\"value\" : \"integer\" , \"kind\" : \"identifier\"}}," - + "{\"n2\" : {\"value\" : \"short\" , \"kind\" : \"identifier\"}}, " - + "{\"n3\" : {\"value\" : \"double\" , \"kind\" : \"identifier\"}}," - + "{\"n4\" : {\"value\" : \"boolean\" , \"kind\" : \"identifier\"}}, " - + "{\"n5\" : {\"value\" : \"xx*\" , \"kind\" : \"pattern\"}}, " - + "{\"fn1\" : {\"value\" : \"max\" , \"kind\" : \"identifier\"}}]" + "[{\"n1\" : {\"identifier\" : \"integer\"}}, {\"n2\" : {\"identifier\" : \"short\"}}, " + + "{\"n3\" : {\"identifier\" : \"double\"}}, {\"n4\" : {\"identifier\" : \"boolean\"}}, " + + "{\"n5\" : {\"pattern\" : \"xx*\"}}, {\"fn1\" : {\"identifier\" : \"max\"}}]" ); Map result = runEsql(query); Map colA = Map.of("name", "boolean", "type", "boolean"); @@ -728,10 +725,7 @@ public void testNamedParamsForIdentifierAndIdentifierPatterns() throws IOExcepti ResponseException.class, () -> runEsqlSync( requestObjectBuilder().query(format(null, "from {} | {}", testIndexName(), command.getKey())) - .params( - "[{\"n1\" : {\"value\" : \"integer\" , \"kind\" : \"identifier\"}}," - + "{\"n2\" : {\"value\" : \"short\" , \"kind\" : \"identifier\"}}]" - ) + .params("[{\"n1\" : {\"identifier\" : \"integer\"}}, {\"n2\" : {\"identifier\" : \"short\"}}]") ) ); error = re.getMessage(); @@ -751,9 +745,8 @@ public void testNamedParamsForIdentifierAndIdentifierPatterns() throws IOExcepti () -> runEsqlSync( requestObjectBuilder().query(format(null, "from {} | {}", testIndexName(), command.getKey())) .params( - "[{\"n1\" : {\"value\" : \"`n1`\" , \"kind\" : \"identifier\"}}," - + "{\"n2\" : {\"value\" : \"`n2`\" , \"kind\" : \"identifier\"}}, " - + "{\"n3\" : {\"value\" : \"`n3`\" , \"kind\" : \"identifier\"}}]" + "[{\"n1\" : {\"identifier\" : \"`n1`\"}}, {\"n2\" : {\"identifier\" : \"`n2`\"}}, " + + "{\"n3\" : {\"identifier\" : \"`n3`\"}}]" ) ) ); @@ -781,7 +774,7 @@ public void testNamedParamsForIdentifierAndIdentifierPatterns() throws IOExcepti ResponseException.class, () -> runEsqlSync( requestObjectBuilder().query(format(null, "from {} | ?cmd {}", testIndexName(), command.getValue())) - .params("[{\"cmd\" : {\"value\" : \"" + command.getKey() + "\", \"kind\" : \"identifier\"}}]") + .params("[{\"cmd\" : {\"identifier\" : \"" + command.getKey() + "\"}}]") ) ); error = re.getMessage(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index dfca6ab2bf814..f22ad07a4c6f6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -384,11 +384,6 @@ public enum Cap { */ DATE_DIFF_YEAR_CALENDARIAL, - /** - * Support named parameters for field names. - */ - NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES(Build.current().isSnapshot()), - /** * Fix sorting not allowed on _source and counters. */ @@ -431,7 +426,12 @@ public enum Cap { /** * This enables 60_usage.yml "Basic ESQL usage....non-snapshot" version test. See also the previous capability. */ - NON_SNAPSHOT_TEST_FOR_TELEMETRY(Build.current().isSnapshot() == false); + NON_SNAPSHOT_TEST_FOR_TELEMETRY(Build.current().isSnapshot() == false), + + /** + * Support simplified syntax for named parameters for field and function names. + */ + NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES_SIMPLIFIED_SYNTAX(Build.current().isSnapshot()); private final boolean enabled; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RequestXContent.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RequestXContent.java index 71aface993ab9..d8904288523a7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RequestXContent.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RequestXContent.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.esql.action; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.Maps; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.query.AbstractQueryBuilder; import org.elasticsearch.xcontent.ObjectParser; @@ -90,19 +89,6 @@ String fields() { private static final ObjectParser SYNC_PARSER = objectParserSync(EsqlQueryRequest::syncEsqlQueryRequest); private static final ObjectParser ASYNC_PARSER = objectParserAsync(EsqlQueryRequest::asyncEsqlQueryRequest); - private enum ParamParsingKey { - VALUE, - KIND - } - - private static final Map paramParsingKeys = Maps.newMapWithExpectedSize(ParamParsingKey.values().length); - - static { - for (ParamParsingKey e : ParamParsingKey.values()) { - paramParsingKeys.put(e.name(), e); - } - } - /** Parses a synchronous request. */ static EsqlQueryRequest parseSync(XContentParser parser) { return SYNC_PARSER.apply(parser, null); @@ -180,25 +166,21 @@ private static QueryParams parseParams(XContentParser p) throws IOException { ); } for (Map.Entry entry : param.fields.entrySet()) { - ParserUtils.ParamClassification classification; + ParserUtils.ParamClassification classification = null; + paramValue = null; String paramName = entry.getKey(); checkParamNameValidity(paramName, errors, loc); - if (EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES.isEnabled() - && entry.getValue() instanceof Map values) {// parameter specified as key:value pairs - Map paramElements = Maps.newMapWithExpectedSize(2); - for (Object keyName : values.keySet()) { - ParamParsingKey paramType = checkParamValueKeysValidity(keyName.toString(), errors, loc); - if (paramType != null) { - paramElements.put(paramType, values.get(keyName)); + if (EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES_SIMPLIFIED_SYNTAX.isEnabled() + && entry.getValue() instanceof Map value) {// parameter specified as a key:value pair + checkParamValueSize(paramName, value, loc, errors); + for (Object keyName : value.keySet()) { + classification = getParamClassification(keyName.toString(), errors, loc); + if (classification != null) { + paramValue = value.get(keyName); + checkParamValueValidity(classification, paramValue, loc, errors); } } - paramValue = paramElements.get(ParamParsingKey.VALUE); - if (paramValue == null && values.size() > 1) { // require non-null value for identifier and pattern - errors.add(new XContentParseException(loc, "[" + entry + "] does not have a value specified")); - } - - classification = getClassificationForParam(paramElements, loc, errors); } else {// parameter specifies as a value only paramValue = entry.getValue(); classification = VALUE; @@ -280,12 +262,45 @@ private static void checkParamNameValidity(String name, List paramValue, + XContentLocation loc, + List errors + ) { + if (paramValue.size() == 1) { + return; + } + String errorMessage; + if (paramValue.isEmpty()) { + errorMessage = " has no valid param attribute"; + } else { + errorMessage = " has multiple param attributes [" + + paramValue.keySet().stream().map(Object::toString).collect(Collectors.joining(", ")) + + "]"; + } + errors.add( + new XContentParseException( + loc, + "[" + + paramName + + "]" + + errorMessage + + ", only one of " + + Arrays.stream(ParserUtils.ParamClassification.values()) + .map(ParserUtils.ParamClassification::name) + .collect(Collectors.joining(", ")) + + " can be defined in a param" + ) + ); + } + + private static ParserUtils.ParamClassification getParamClassification( String paramKeyName, List errors, XContentLocation loc ) { - ParamParsingKey paramType = paramParsingKeys.get(paramKeyName.toUpperCase(Locale.ROOT)); + ParserUtils.ParamClassification paramType = paramClassifications.get(paramKeyName.toUpperCase(Locale.ROOT)); if (paramType == null) { errors.add( new XContentParseException( @@ -293,38 +308,21 @@ private static ParamParsingKey checkParamValueKeysValidity( "[" + paramKeyName + "] is not a valid param attribute, a valid attribute is any of " - + Arrays.stream(ParamParsingKey.values()).map(ParamParsingKey::name).collect(Collectors.joining(", ")) + + Arrays.stream(ParserUtils.ParamClassification.values()) + .map(ParserUtils.ParamClassification::name) + .collect(Collectors.joining(", ")) ) ); } return paramType; } - private static ParserUtils.ParamClassification getClassificationForParam( - Map paramElements, + private static void checkParamValueValidity( + ParserUtils.ParamClassification classification, + Object value, XContentLocation loc, List errors ) { - Object value = paramElements.get(ParamParsingKey.VALUE); - Object kind = paramElements.get(ParamParsingKey.KIND); - ParserUtils.ParamClassification classification = VALUE; - if (kind != null) { - classification = paramClassifications.get(kind.toString().toUpperCase(Locale.ROOT)); - if (classification == null) { - errors.add( - new XContentParseException( - loc, - "[" - + kind - + "] is not a valid param kind, a valid kind is any of " - + Arrays.stream(ParserUtils.ParamClassification.values()) - .map(ParserUtils.ParamClassification::name) - .collect(Collectors.joining(", ")) - ) - ); - } - } - // If a param is an "identifier" or a "pattern", validate it is a string. // If a param is a "pattern", validate it contains *. if (classification == IDENTIFIER || classification == PATTERN) { @@ -345,6 +343,5 @@ private static ParserUtils.ParamClassification getClassificationForParam( ); } } - return classification; } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestTests.java index 7deaff6ebe6bb..dcb83dadfcf96 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestTests.java @@ -146,7 +146,7 @@ public void testNamedParams() throws IOException { public void testNamedParamsForIdentifiersPatterns() throws IOException { assumeTrue( "named parameters for identifiers and patterns require snapshot build", - EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES.isEnabled() + EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES_SIMPLIFIED_SYNTAX.isEnabled() ); String query = randomAlphaOfLengthBetween(1, 100); boolean columnar = randomBoolean(); @@ -154,12 +154,12 @@ public void testNamedParamsForIdentifiersPatterns() throws IOException { QueryBuilder filter = randomQueryBuilder(); String paramsString = """ - ,"params":[ {"n1" : {"value" : "f1", "kind" : "Identifier"}}, - {"n2" : {"value" : "f1*", "Kind" : "identifier"}}, - {"n3" : {"value" : "f.1*", "KIND" : "Pattern"}}, - {"n4" : {"value" : "*", "kind" : "pattern"}}, - {"n5" : {"value" : "esql", "kind" : "Value"}}, - {"n_6" : {"value" : "null", "kind" : "identifier"}}, + ,"params":[ {"n1" : {"identifier" : "f1"}}, + {"n2" : {"Identifier" : "f1*"}}, + {"n3" : {"pattern" : "f.1*"}}, + {"n4" : {"Pattern" : "*"}}, + {"n5" : {"Value" : "esql"}}, + {"n_6" : {"identifier" : "null"}}, {"n7_" : {"value" : "f.1.1"}}] }"""; List params = List.of( @@ -262,7 +262,7 @@ public void testInvalidParams() throws IOException { public void testInvalidParamsForIdentifiersPatterns() throws IOException { assumeTrue( "named parameters for identifiers and patterns require snapshot build", - EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES.isEnabled() + EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES_SIMPLIFIED_SYNTAX.isEnabled() ); String query = randomAlphaOfLengthBetween(1, 100); boolean columnar = randomBoolean(); @@ -271,13 +271,12 @@ public void testInvalidParamsForIdentifiersPatterns() throws IOException { // invalid named parameter for identifier and identifier pattern String paramsString1 = """ - "params":[ {"n1" : {"v" : "v1"}}, {"n2" : {"value" : "v2", "type" : "identifier"}}, - {"n3" : {"value" : "v3", "kind" : "id" }}, {"n4" : {"value" : "v4", "kind" : true}}, - {"n5" : {"value" : "v5", "kind" : ["identifier", "pattern"]}}, {"n6" : {"value" : "v6", "kind" : 0}}, - {"n7" : {"value" : 1, "kind" : "Identifier"}}, {"n8" : {"value" : true, "kind" : "Pattern"}}, - {"n9" : {"kind" : "identifier"}}, {"n10" : {"v" : "v10", "kind" : "identifier"}}, - {"n11" : {"value" : "v11", "kind" : "pattern"}}, {"n12" : {"value" : ["x", "y"], "kind" : "identifier"}}, - {"n13" : {"value" : "v13", "kind" : "identifier", "type" : "pattern"}}, {"n14" : {"v" : "v14", "kind" : "value"}}]"""; + "params":[{"n1" : {"v" : "v1"}}, {"n2" : {"identifier" : "v2", "pattern" : "v2"}}, + {"n3" : {"identifier" : "v3", "pattern" : "v3"}}, {"n4" : {"pattern" : "v4.1", "value" : "v4.2"}}, + {"n5" : {"value" : {"a5" : "v5"}}},{"n6" : {"identifier" : {"a6.1" : "v6.1", "a6.2" : "v6.2"}}}, {"n7" : {}}, + {"n8" : {"value" : ["x", "y"]}}, {"n9" : {"identifier" : ["x", "y"]}}, {"n10" : {"pattern" : ["x*", "y*"]}}, + {"n11" : {"identifier" : 1}}, {"n12" : {"pattern" : true}}, {"n13" : {"identifier" : null}}, {"n14" : {"pattern" : "v14"}}, + {"n15" : {"pattern" : "v15*"}, "n16" : {"identifier" : "v16"}}]"""; String json1 = String.format(Locale.ROOT, """ { %s @@ -291,28 +290,37 @@ public void testInvalidParamsForIdentifiersPatterns() throws IOException { assertThat( e1.getCause().getMessage(), containsString( - "Failed to parse params: [2:16] [v] is not a valid param attribute, a valid attribute is any of VALUE, KIND; " - + "[2:39] [type] is not a valid param attribute, a valid attribute is any of VALUE, KIND; " - + "[3:1] [id] is not a valid param kind, a valid kind is any of VALUE, IDENTIFIER, PATTERN; " - + "[3:44] [true] is not a valid param kind, a valid kind is any of VALUE, IDENTIFIER, PATTERN; " - + "[4:1] [[identifier, pattern]] is not a valid param kind, a valid kind is any of VALUE, IDENTIFIER, PATTERN; " - + "[4:64] [0] is not a valid param kind, a valid kind is any of VALUE, IDENTIFIER, PATTERN; " - + "[5:1] [1] is not a valid value for IDENTIFIER parameter, a valid value for IDENTIFIER parameter is a string; " - + "[5:48] [true] is not a valid value for PATTERN parameter, " + "[2:15] [v] is not a valid param attribute, a valid attribute is any of VALUE, IDENTIFIER, PATTERN; " + + "[2:38] [n2] has multiple param attributes [identifier, pattern], " + + "only one of VALUE, IDENTIFIER, PATTERN can be defined in a param; " + + "[2:38] [v2] is not a valid value for PATTERN parameter, " + "a valid value for PATTERN parameter is a string and contains *; " - + "[6:1] [null] is not a valid value for IDENTIFIER parameter, a valid value for IDENTIFIER parameter is a string; " - + "[6:35] [v] is not a valid param attribute, a valid attribute is any of VALUE, KIND; " - + "[6:35] [n10={v=v10, kind=identifier}] does not have a value specified; " - + "[6:35] [null] is not a valid value for IDENTIFIER parameter, " + + "[3:1] [n3] has multiple param attributes [identifier, pattern], " + + "only one of VALUE, IDENTIFIER, PATTERN can be defined in a param; " + + "[3:1] [v3] is not a valid value for PATTERN parameter, " + + "a valid value for PATTERN parameter is a string and contains *; " + + "[3:51] [n4] has multiple param attributes [pattern, value], " + + "only one of VALUE, IDENTIFIER, PATTERN can be defined in a param; " + + "[3:51] [v4.1] is not a valid value for PATTERN parameter, " + + "a valid value for PATTERN parameter is a string and contains *; " + + "[4:1] n5={value={a5=v5}} is not supported as a parameter; " + + "[4:36] [{a6.1=v6.1, a6.2=v6.2}] is not a valid value for IDENTIFIER parameter, " + "a valid value for IDENTIFIER parameter is a string; " - + "[7:1] [v11] is not a valid value for PATTERN parameter, " + + "[4:36] n6={identifier={a6.1=v6.1, a6.2=v6.2}} is not supported as a parameter; " + + "[4:98] [n7] has no valid param attribute, only one of VALUE, IDENTIFIER, PATTERN can be defined in a param; " + + "[5:1] n8={value=[x, y]} is not supported as a parameter; " + + "[5:34] [[x, y]] is not a valid value for IDENTIFIER parameter, a valid value for IDENTIFIER parameter is a string; " + + "[5:34] n9={identifier=[x, y]} is not supported as a parameter; " + + "[5:72] [[x*, y*]] is not a valid value for PATTERN parameter, " + + "a valid value for PATTERN parameter is a string and contains *; " + + "[5:72] n10={pattern=[x*, y*]} is not supported as a parameter; " + + "[6:1] [1] is not a valid value for IDENTIFIER parameter, a valid value for IDENTIFIER parameter is a string; " + + "[6:31] [true] is not a valid value for PATTERN parameter, " + + "a valid value for PATTERN parameter is a string and contains *; " + + "[6:61] [null] is not a valid value for IDENTIFIER parameter, a valid value for IDENTIFIER parameter is a string; " + + "[6:94] [v14] is not a valid value for PATTERN parameter, " + "a valid value for PATTERN parameter is a string and contains *; " - + "[7:50] [[x, y]] is not a valid value for IDENTIFIER parameter," - + " a valid value for IDENTIFIER parameter is a string; " - + "[7:50] n12={kind=identifier, value=[x, y]} is not supported as a parameter; " - + "[8:1] [type] is not a valid param attribute, a valid attribute is any of VALUE, KIND; " - + "[8:73] [v] is not a valid param attribute, a valid attribute is any of VALUE, KIND; " - + "[8:73] [n14={v=v14, kind=value}] does not have a value specified" + + "[7:1] Cannot parse more than one key:value pair as parameter, found [{n16:{identifier=v16}}, {n15:{pattern=v15*}}]" ) ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java index 3048686efbe44..c18f55a651408 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java @@ -2087,7 +2087,7 @@ public void testCoalesceWithMixedNumericTypes() { public void testNamedParamsForIdentifiers() { assumeTrue( "named parameters for identifiers and patterns require snapshot build", - EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES.isEnabled() + EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES_SIMPLIFIED_SYNTAX.isEnabled() ); assertProjectionWithMapping( """ @@ -2181,7 +2181,7 @@ public void testNamedParamsForIdentifiers() { public void testInvalidNamedParamsForIdentifiers() { assumeTrue( "named parameters for identifiers and patterns require snapshot build", - EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES.isEnabled() + EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES_SIMPLIFIED_SYNTAX.isEnabled() ); // missing field assertError( @@ -2254,7 +2254,7 @@ public void testInvalidNamedParamsForIdentifiers() { public void testNamedParamsForIdentifierPatterns() { assumeTrue( "named parameters for identifiers and patterns require snapshot build", - EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES.isEnabled() + EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES_SIMPLIFIED_SYNTAX.isEnabled() ); assertProjectionWithMapping( """ @@ -2288,7 +2288,7 @@ public void testNamedParamsForIdentifierPatterns() { public void testInvalidNamedParamsForIdentifierPatterns() { assumeTrue( "named parameters for identifiers and patterns require snapshot build", - EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES.isEnabled() + EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES_SIMPLIFIED_SYNTAX.isEnabled() ); // missing pattern assertError( diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java index 094d301875d8e..8019dbf77ffbf 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java @@ -1564,7 +1564,7 @@ public void testIntervalParam() { public void testParamForIdentifier() { assumeTrue( "named parameters for identifiers and patterns require snapshot build", - EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES.isEnabled() + EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES_SIMPLIFIED_SYNTAX.isEnabled() ); // field names can appear in eval/where/stats/sort/keep/drop/rename/dissect/grok/enrich/mvexpand // eval, where @@ -1825,7 +1825,7 @@ public void testParamForIdentifier() { public void testParamForIdentifierPattern() { assumeTrue( "named parameters for identifiers and patterns require snapshot build", - EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES.isEnabled() + EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES_SIMPLIFIED_SYNTAX.isEnabled() ); // name patterns can appear in keep and drop // all patterns @@ -1918,7 +1918,7 @@ public void testParamForIdentifierPattern() { public void testParamInInvalidPosition() { assumeTrue( "named parameters for identifiers and patterns require snapshot build", - EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES.isEnabled() + EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES_SIMPLIFIED_SYNTAX.isEnabled() ); // param for pattern is not supported in eval/where/stats/sort/rename/dissect/grok/enrich/mvexpand // where/stats/sort/dissect/grok are covered in RestEsqlTestCase @@ -1973,7 +1973,7 @@ public void testParamInInvalidPosition() { public void testMissingParam() { assumeTrue( "named parameters for identifiers and patterns require snapshot build", - EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES.isEnabled() + EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES_SIMPLIFIED_SYNTAX.isEnabled() ); // cover all processing commands eval/where/stats/sort/rename/dissect/grok/enrich/mvexpand/keep/drop String error = "Unknown query parameter [f1], did you mean [f4]?"; diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/10_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/10_basic.yml index 4e8f82d507a5f..96145e84ad2cd 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/10_basic.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/10_basic.yml @@ -430,14 +430,14 @@ setup: - method: POST path: /_query parameters: [ ] - capabilities: [ named_parameter_for_field_and_function_names ] + capabilities: [ named_parameter_for_field_and_function_names_simplified_syntax ] reason: "named or positional parameters for field names" - do: esql.query: body: query: 'from test | stats x = count(?f1), y = sum(?f2) by ?f3 | sort ?f3 | keep ?f3, x, y | limit 3' - params: [{"f1" : {"value" : "time", "kind" : "identifier" }}, {"f2" : { "value" : "count", "kind" : "identifier" }}, {"f3" : { "value" : "color", "kind" : "identifier" }}] + params: [{"f1" : {"identifier" : "time"}}, {"f2" : { "identifier" : "count" }}, {"f3" : { "identifier" : "color"}}] - length: {columns: 3} - match: {columns.0.name: "color"} From 541bcf30e5d03944cace8deec24559fc63c8bcb5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20Zolt=C3=A1n=20Szab=C3=B3?= Date: Thu, 24 Oct 2024 08:53:12 +0200 Subject: [PATCH 345/449] [DOCS] Documents that ELSER is the default service for `semantic_text` (#114615) Co-authored-by: Mike Pellegrini --- .../mapping/types/semantic-text.asciidoc | 26 ++++++++- .../semantic-search-semantic-text.asciidoc | 57 +++---------------- 2 files changed, 33 insertions(+), 50 deletions(-) diff --git a/docs/reference/mapping/types/semantic-text.asciidoc b/docs/reference/mapping/types/semantic-text.asciidoc index ac23c153e01a3..893e2c6cff8ed 100644 --- a/docs/reference/mapping/types/semantic-text.asciidoc +++ b/docs/reference/mapping/types/semantic-text.asciidoc @@ -13,25 +13,47 @@ Long passages are <> to smaller secti The `semantic_text` field type specifies an inference endpoint identifier that will be used to generate embeddings. You can create the inference endpoint by using the <>. This field type and the <> type make it simpler to perform semantic search on your data. +If you don't specify an inference endpoint, the <> is used by default. Using `semantic_text`, you won't need to specify how to generate embeddings for your data, or how to index it. The {infer} endpoint automatically determines the embedding generation, indexing, and query to use. +If you use the ELSER service, you can set up `semantic_text` with the following API request: + [source,console] ------------------------------------------------------------ PUT my-index-000001 +{ + "mappings": { + "properties": { + "inference_field": { + "type": "semantic_text" + } + } + } +} +------------------------------------------------------------ + +NOTE: In Serverless, you must create an {infer} endpoint using the <> and reference it when setting up `semantic_text` even if you use the ELSER service. + +If you use a service other than ELSER, you must create an {infer} endpoint using the <> and reference it when setting up `semantic_text` as the following example demonstrates: + +[source,console] +------------------------------------------------------------ +PUT my-index-000002 { "mappings": { "properties": { "inference_field": { "type": "semantic_text", - "inference_id": "my-elser-endpoint" + "inference_id": "my-openai-endpoint" <1> } } } } ------------------------------------------------------------ // TEST[skip:Requires inference endpoint] +<1> The `inference_id` of the {infer} endpoint to use to generate embeddings. The recommended way to use semantic_text is by having dedicated {infer} endpoints for ingestion and search. @@ -40,7 +62,7 @@ After creating dedicated {infer} endpoints for both, you can reference them usin [source,console] ------------------------------------------------------------ -PUT my-index-000002 +PUT my-index-000003 { "mappings": { "properties": { diff --git a/docs/reference/search/search-your-data/semantic-search-semantic-text.asciidoc b/docs/reference/search/search-your-data/semantic-search-semantic-text.asciidoc index 60692c19c184a..f881ca87a92e6 100644 --- a/docs/reference/search/search-your-data/semantic-search-semantic-text.asciidoc +++ b/docs/reference/search/search-your-data/semantic-search-semantic-text.asciidoc @@ -21,45 +21,11 @@ This tutorial uses the <> for demonstra [[semantic-text-requirements]] ==== Requirements -To use the `semantic_text` field type, you must have an {infer} endpoint deployed in -your cluster using the <>. +This tutorial uses the <> for demonstration, which is created automatically as needed. +To use the `semantic_text` field type with an {infer} service other than ELSER, you must create an inference endpoint using the <>. -[discrete] -[[semantic-text-infer-endpoint]] -==== Create the {infer} endpoint - -Create an inference endpoint by using the <>: +NOTE: In Serverless, you must create an {infer} endpoint using the <> and reference it when setting up `semantic_text` even if you use the ELSER service. -[source,console] ------------------------------------------------------------- -PUT _inference/sparse_embedding/my-elser-endpoint <1> -{ - "service": "elser", <2> - "service_settings": { - "adaptive_allocations": { <3> - "enabled": true, - "min_number_of_allocations": 3, - "max_number_of_allocations": 10 - }, - "num_threads": 1 - } -} ------------------------------------------------------------- -// TEST[skip:TBD] -<1> The task type is `sparse_embedding` in the path as the `elser` service will -be used and ELSER creates sparse vectors. The `inference_id` is -`my-elser-endpoint`. -<2> The `elser` service is used in this example. -<3> This setting enables and configures {ml-docs}/ml-nlp-auto-scale.html#nlp-model-adaptive-allocations[adaptive allocations]. -Adaptive allocations make it possible for ELSER to automatically scale up or down resources based on the current load on the process. - -[NOTE] -==== -You might see a 502 bad gateway error in the response when using the {kib} Console. -This error usually just reflects a timeout, while the model downloads in the background. -You can check the download progress in the {ml-app} UI. -If using the Python client, you can set the `timeout` parameter to a higher value. -==== [discrete] [[semantic-text-index-mapping]] @@ -75,8 +41,7 @@ PUT semantic-embeddings "mappings": { "properties": { "content": { <1> - "type": "semantic_text", <2> - "inference_id": "my-elser-endpoint" <3> + "type": "semantic_text" <2> } } } @@ -85,18 +50,14 @@ PUT semantic-embeddings // TEST[skip:TBD] <1> The name of the field to contain the generated embeddings. <2> The field to contain the embeddings is a `semantic_text` field. -<3> The `inference_id` is the inference endpoint you created in the previous step. -It will be used to generate the embeddings based on the input text. -Every time you ingest data into the related `semantic_text` field, this endpoint will be used for creating the vector representation of the text. +Since no `inference_id` is provided, the <> is used by default. +To use a different {infer} service, you must create an {infer} endpoint first using the <> and then specify it in the `semantic_text` field mapping using the `inference_id` parameter. [NOTE] ==== -If you're using web crawlers or connectors to generate indices, you have to -<> for these indices to -include the `semantic_text` field. Once the mapping is updated, you'll need to run -a full web crawl or a full connector sync. This ensures that all existing -documents are reprocessed and updated with the new semantic embeddings, -enabling semantic search on the updated data. +If you're using web crawlers or connectors to generate indices, you have to <> for these indices to include the `semantic_text` field. +Once the mapping is updated, you'll need to run a full web crawl or a full connector sync. +This ensures that all existing documents are reprocessed and updated with the new semantic embeddings, enabling semantic search on the updated data. ==== From bffaabb6f5c185d6e1003dd08029567ba469fe79 Mon Sep 17 00:00:00 2001 From: Luigi Dell'Aquila Date: Thu, 24 Oct 2024 09:19:46 +0200 Subject: [PATCH 346/449] ES|QL: improve docs about escaping for GROK, DISSECT, LIKE, RLIKE (#115320) --- ...ql-process-data-with-dissect-grok.asciidoc | 31 ++++++---- .../functions/kibana/definition/like.json | 2 +- .../functions/kibana/definition/rlike.json | 2 +- .../esql/functions/kibana/docs/like.md | 2 +- .../esql/functions/kibana/docs/rlike.md | 2 +- docs/reference/esql/functions/like.asciidoc | 16 ++++++ docs/reference/esql/functions/rlike.asciidoc | 16 ++++++ .../src/main/resources/docs.csv-spec | 42 +++++++++----- .../src/main/resources/string.csv-spec | 56 +++++++++++++++++++ .../function/scalar/string/RLike.java | 18 +++++- .../function/scalar/string/WildcardLike.java | 18 +++++- 11 files changed, 175 insertions(+), 30 deletions(-) diff --git a/docs/reference/esql/esql-process-data-with-dissect-grok.asciidoc b/docs/reference/esql/esql-process-data-with-dissect-grok.asciidoc index 87748fee4f202..e626e058a4e56 100644 --- a/docs/reference/esql/esql-process-data-with-dissect-grok.asciidoc +++ b/docs/reference/esql/esql-process-data-with-dissect-grok.asciidoc @@ -40,7 +40,7 @@ delimiter-based pattern, and extracts the specified keys as columns. For example, the following pattern: [source,txt] ---- -%{clientip} [%{@timestamp}] %{status} +%{clientip} [%{@timestamp}] %{status} ---- matches a log line of this format: @@ -76,8 +76,8 @@ ignore certain fields, append fields, skip over padding, etc. ===== Terminology dissect pattern:: -the set of fields and delimiters describing the textual -format. Also known as a dissection. +the set of fields and delimiters describing the textual +format. Also known as a dissection. The dissection is described using a set of `%{}` sections: `%{a} - %{b} - %{c}` @@ -91,14 +91,14 @@ Any set of characters other than `%{`, `'not }'`, or `}` is a delimiter. key:: + -- -the text between the `%{` and `}`, exclusive of the `?`, `+`, `&` prefixes -and the ordinal suffix. +the text between the `%{` and `}`, exclusive of the `?`, `+`, `&` prefixes +and the ordinal suffix. Examples: -* `%{?aaa}` - the key is `aaa` -* `%{+bbb/3}` - the key is `bbb` -* `%{&ccc}` - the key is `ccc` +* `%{?aaa}` - the key is `aaa` +* `%{+bbb/3}` - the key is `bbb` +* `%{&ccc}` - the key is `ccc` -- [[esql-dissect-examples]] @@ -218,7 +218,7 @@ Putting it together as an {esql} query: [source.merge.styled,esql] ---- -include::{esql-specs}/docs.csv-spec[tag=grokWithEscape] +include::{esql-specs}/docs.csv-spec[tag=grokWithEscapeTripleQuotes] ---- `GROK` adds the following columns to the input table: @@ -239,15 +239,24 @@ with a `\`. For example, in the earlier pattern: %{IP:ip} \[%{TIMESTAMP_ISO8601:@timestamp}\] %{GREEDYDATA:status} ---- -In {esql} queries, the backslash character itself is a special character that +In {esql} queries, when using single quotes for strings, the backslash character itself is a special character that needs to be escaped with another `\`. For this example, the corresponding {esql} query becomes: [source.merge.styled,esql] ---- include::{esql-specs}/docs.csv-spec[tag=grokWithEscape] ---- + +For this reason, in general it is more convenient to use triple quotes `"""` for GROK patterns, +that do not require escaping for backslash. + +[source.merge.styled,esql] +---- +include::{esql-specs}/docs.csv-spec[tag=grokWithEscapeTripleQuotes] +---- ==== + [[esql-grok-patterns]] ===== Grok patterns @@ -318,4 +327,4 @@ as the `GROK` command. The `GROK` command does not support configuring <>, or <>. The `GROK` command is not subject to <>. -// end::grok-limitations[] \ No newline at end of file +// end::grok-limitations[] diff --git a/docs/reference/esql/functions/kibana/definition/like.json b/docs/reference/esql/functions/kibana/definition/like.json index 97e84e0361fd2..f375c697bd60d 100644 --- a/docs/reference/esql/functions/kibana/definition/like.json +++ b/docs/reference/esql/functions/kibana/definition/like.json @@ -42,7 +42,7 @@ } ], "examples" : [ - "FROM employees\n| WHERE first_name LIKE \"?b*\"\n| KEEP first_name, last_name" + "FROM employees\n| WHERE first_name LIKE \"\"\"?b*\"\"\"\n| KEEP first_name, last_name" ], "preview" : false, "snapshot_only" : false diff --git a/docs/reference/esql/functions/kibana/definition/rlike.json b/docs/reference/esql/functions/kibana/definition/rlike.json index e442bb2c55050..7a328293383bb 100644 --- a/docs/reference/esql/functions/kibana/definition/rlike.json +++ b/docs/reference/esql/functions/kibana/definition/rlike.json @@ -42,7 +42,7 @@ } ], "examples" : [ - "FROM employees\n| WHERE first_name RLIKE \".leja.*\"\n| KEEP first_name, last_name" + "FROM employees\n| WHERE first_name RLIKE \"\"\".leja.*\"\"\"\n| KEEP first_name, last_name" ], "preview" : false, "snapshot_only" : false diff --git a/docs/reference/esql/functions/kibana/docs/like.md b/docs/reference/esql/functions/kibana/docs/like.md index 4c400bdc65479..ea2ac11b6f4b9 100644 --- a/docs/reference/esql/functions/kibana/docs/like.md +++ b/docs/reference/esql/functions/kibana/docs/like.md @@ -15,6 +15,6 @@ The following wildcard characters are supported: ``` FROM employees -| WHERE first_name LIKE "?b*" +| WHERE first_name LIKE """?b*""" | KEEP first_name, last_name ``` diff --git a/docs/reference/esql/functions/kibana/docs/rlike.md b/docs/reference/esql/functions/kibana/docs/rlike.md index ed94553e7e44f..95b57799ffe29 100644 --- a/docs/reference/esql/functions/kibana/docs/rlike.md +++ b/docs/reference/esql/functions/kibana/docs/rlike.md @@ -10,6 +10,6 @@ expression. The right-hand side of the operator represents the pattern. ``` FROM employees -| WHERE first_name RLIKE ".leja.*" +| WHERE first_name RLIKE """.leja.*""" | KEEP first_name, last_name ``` diff --git a/docs/reference/esql/functions/like.asciidoc b/docs/reference/esql/functions/like.asciidoc index 2298617be5699..a569896bc3c1e 100644 --- a/docs/reference/esql/functions/like.asciidoc +++ b/docs/reference/esql/functions/like.asciidoc @@ -23,4 +23,20 @@ include::{esql-specs}/docs.csv-spec[tag=like] |=== include::{esql-specs}/docs.csv-spec[tag=like-result] |=== + +Matching the exact characters `*` and `.` will require escaping. +The escape character is backslash `\`. Since also backslash is a special character in string literals, +it will require further escaping. + +[source.merge.styled,esql] +---- +include::{esql-specs}/string.csv-spec[tag=likeEscapingSingleQuotes] +---- + +To reduce the overhead of escaping, we suggest using triple quotes strings `"""` + +[source.merge.styled,esql] +---- +include::{esql-specs}/string.csv-spec[tag=likeEscapingTripleQuotes] +---- // end::body[] diff --git a/docs/reference/esql/functions/rlike.asciidoc b/docs/reference/esql/functions/rlike.asciidoc index 031594ae403da..f6009b2c49528 100644 --- a/docs/reference/esql/functions/rlike.asciidoc +++ b/docs/reference/esql/functions/rlike.asciidoc @@ -18,4 +18,20 @@ include::{esql-specs}/docs.csv-spec[tag=rlike] |=== include::{esql-specs}/docs.csv-spec[tag=rlike-result] |=== + +Matching special characters (eg. `.`, `*`, `(`...) will require escaping. +The escape character is backslash `\`. Since also backslash is a special character in string literals, +it will require further escaping. + +[source.merge.styled,esql] +---- +include::{esql-specs}/string.csv-spec[tag=rlikeEscapingSingleQuotes] +---- + +To reduce the overhead of escaping, we suggest using triple quotes strings `"""` + +[source.merge.styled,esql] +---- +include::{esql-specs}/string.csv-spec[tag=rlikeEscapingTripleQuotes] +---- // end::body[] diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs.csv-spec index 15fe6853ae491..a9c5a5214f159 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs.csv-spec @@ -382,7 +382,7 @@ count:long | languages:integer basicGrok // tag::basicGrok[] ROW a = "2023-01-23T12:15:00.000Z 127.0.0.1 some.email@foo.com 42" -| GROK a "%{TIMESTAMP_ISO8601:date} %{IP:ip} %{EMAILADDRESS:email} %{NUMBER:num}" +| GROK a """%{TIMESTAMP_ISO8601:date} %{IP:ip} %{EMAILADDRESS:email} %{NUMBER:num}""" | KEEP date, ip, email, num // end::basicGrok[] ; @@ -396,7 +396,7 @@ date:keyword | ip:keyword | email:keyword | num:keyword grokWithConversionSuffix // tag::grokWithConversionSuffix[] ROW a = "2023-01-23T12:15:00.000Z 127.0.0.1 some.email@foo.com 42" -| GROK a "%{TIMESTAMP_ISO8601:date} %{IP:ip} %{EMAILADDRESS:email} %{NUMBER:num:int}" +| GROK a """%{TIMESTAMP_ISO8601:date} %{IP:ip} %{EMAILADDRESS:email} %{NUMBER:num:int}""" | KEEP date, ip, email, num // end::grokWithConversionSuffix[] ; @@ -410,7 +410,7 @@ date:keyword | ip:keyword | email:keyword | num:integer grokWithToDatetime // tag::grokWithToDatetime[] ROW a = "2023-01-23T12:15:00.000Z 127.0.0.1 some.email@foo.com 42" -| GROK a "%{TIMESTAMP_ISO8601:date} %{IP:ip} %{EMAILADDRESS:email} %{NUMBER:num:int}" +| GROK a """%{TIMESTAMP_ISO8601:date} %{IP:ip} %{EMAILADDRESS:email} %{NUMBER:num:int}""" | KEEP date, ip, email, num | EVAL date = TO_DATETIME(date) // end::grokWithToDatetime[] @@ -436,11 +436,27 @@ ROW a = "1.2.3.4 [2023-01-23T12:15:00.000Z] Connected" // end::grokWithEscape-result[] ; + +grokWithEscapeTripleQuotes +// tag::grokWithEscapeTripleQuotes[] +ROW a = "1.2.3.4 [2023-01-23T12:15:00.000Z] Connected" +| GROK a """%{IP:ip} \[%{TIMESTAMP_ISO8601:@timestamp}\] %{GREEDYDATA:status}""" +// end::grokWithEscapeTripleQuotes[] +| KEEP @timestamp +; + +// tag::grokWithEscapeTripleQuotes-result[] +@timestamp:keyword +2023-01-23T12:15:00.000Z +// end::grokWithEscapeTripleQuotes-result[] +; + + grokWithDuplicateFieldNames // tag::grokWithDuplicateFieldNames[] FROM addresses | KEEP city.name, zip_code -| GROK zip_code "%{WORD:zip_parts} %{WORD:zip_parts}" +| GROK zip_code """%{WORD:zip_parts} %{WORD:zip_parts}""" // end::grokWithDuplicateFieldNames[] | SORT city.name ; @@ -456,7 +472,7 @@ Tokyo | 100-7014 | null basicDissect // tag::basicDissect[] ROW a = "2023-01-23T12:15:00.000Z - some text - 127.0.0.1" -| DISSECT a "%{date} - %{msg} - %{ip}" +| DISSECT a """%{date} - %{msg} - %{ip}""" | KEEP date, msg, ip // end::basicDissect[] ; @@ -470,7 +486,7 @@ date:keyword | msg:keyword | ip:keyword dissectWithToDatetime // tag::dissectWithToDatetime[] ROW a = "2023-01-23T12:15:00.000Z - some text - 127.0.0.1" -| DISSECT a "%{date} - %{msg} - %{ip}" +| DISSECT a """%{date} - %{msg} - %{ip}""" | KEEP date, msg, ip | EVAL date = TO_DATETIME(date) // end::dissectWithToDatetime[] @@ -485,7 +501,7 @@ some text | 127.0.0.1 | 2023-01-23T12:15:00.000Z dissectRightPaddingModifier // tag::dissectRightPaddingModifier[] ROW message="1998-08-10T17:15:42 WARN" -| DISSECT message "%{ts->} %{level}" +| DISSECT message """%{ts->} %{level}""" // end::dissectRightPaddingModifier[] ; @@ -498,7 +514,7 @@ message:keyword | ts:keyword | level:keyword dissectEmptyRightPaddingModifier#[skip:-8.11.2, reason:Support for empty right padding modifiers introduced in 8.11.2] // tag::dissectEmptyRightPaddingModifier[] ROW message="[1998-08-10T17:15:42] [WARN]" -| DISSECT message "[%{ts}]%{->}[%{level}]" +| DISSECT message """[%{ts}]%{->}[%{level}]""" // end::dissectEmptyRightPaddingModifier[] ; @@ -511,7 +527,7 @@ ROW message="[1998-08-10T17:15:42] [WARN]" dissectAppendModifier // tag::dissectAppendModifier[] ROW message="john jacob jingleheimer schmidt" -| DISSECT message "%{+name} %{+name} %{+name} %{+name}" APPEND_SEPARATOR=" " +| DISSECT message """%{+name} %{+name} %{+name} %{+name}""" APPEND_SEPARATOR=" " // end::dissectAppendModifier[] ; @@ -524,7 +540,7 @@ john jacob jingleheimer schmidt|john jacob jingleheimer schmidt dissectAppendWithOrderModifier // tag::dissectAppendWithOrderModifier[] ROW message="john jacob jingleheimer schmidt" -| DISSECT message "%{+name/2} %{+name/4} %{+name/3} %{+name/1}" APPEND_SEPARATOR="," +| DISSECT message """%{+name/2} %{+name/4} %{+name/3} %{+name/1}""" APPEND_SEPARATOR="," // end::dissectAppendWithOrderModifier[] ; @@ -537,7 +553,7 @@ john jacob jingleheimer schmidt|schmidt,john,jingleheimer,jacob dissectNamedSkipKey // tag::dissectNamedSkipKey[] ROW message="1.2.3.4 - - 30/Apr/1998:22:00:52 +0000" -| DISSECT message "%{clientip} %{?ident} %{?auth} %{@timestamp}" +| DISSECT message """%{clientip} %{?ident} %{?auth} %{@timestamp}""" // end::dissectNamedSkipKey[] ; @@ -550,7 +566,7 @@ message:keyword | clientip:keyword | @timestamp:keyword docsLike // tag::like[] FROM employees -| WHERE first_name LIKE "?b*" +| WHERE first_name LIKE """?b*""" | KEEP first_name, last_name // end::like[] | SORT first_name @@ -566,7 +582,7 @@ Eberhardt |Terkki docsRlike // tag::rlike[] FROM employees -| WHERE first_name RLIKE ".leja.*" +| WHERE first_name RLIKE """.leja.*""" | KEEP first_name, last_name // end::rlike[] ; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec index dd9d519649c01..00fa2fddb2106 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec @@ -1800,3 +1800,59 @@ warning:Line 1:29: java.lang.IllegalArgumentException: single-value function enc x:keyword null ; + + +likeEscapingSingleQuotes +// tag::likeEscapingSingleQuotes[] +ROW message = "foo * bar" +| WHERE message LIKE "foo \\* bar" +// end::likeEscapingSingleQuotes[] +; + +// tag::likeEscapingSingleQuotes-result[] +message:keyword +foo * bar +// end::likeEscapingSingleQuotes-result[] +; + + +likeEscapingTripleQuotes +// tag::likeEscapingTripleQuotes[] +ROW message = "foo * bar" +| WHERE message LIKE """foo \* bar""" +// end::likeEscapingTripleQuotes[] +; + +// tag::likeEscapingTripleQuotes-result[] +message:keyword +foo * bar +// end::likeEscapingTripleQuotes-result[] +; + + +rlikeEscapingSingleQuotes +// tag::rlikeEscapingSingleQuotes[] +ROW message = "foo ( bar" +| WHERE message RLIKE "foo \\( bar" +// end::rlikeEscapingSingleQuotes[] +; + +// tag::rlikeEscapingSingleQuotes-result[] +message:keyword +foo ( bar +// end::rlikeEscapingSingleQuotes-result[] +; + + +rlikeEscapingTripleQuotes +// tag::rlikeEscapingTripleQuotes[] +ROW message = "foo ( bar" +| WHERE message RLIKE """foo \( bar""" +// end::rlikeEscapingTripleQuotes[] +; + +// tag::rlikeEscapingTripleQuotes-result[] +message:keyword +foo ( bar +// end::rlikeEscapingTripleQuotes-result[] +; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLike.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLike.java index b46c46c89deba..cd42711177510 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLike.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLike.java @@ -33,7 +33,23 @@ public class RLike extends org.elasticsearch.xpack.esql.core.expression.predicat Use `RLIKE` to filter data based on string patterns using using <>. `RLIKE` usually acts on a field placed on the left-hand side of the operator, but it can also act on a constant (literal) - expression. The right-hand side of the operator represents the pattern.""", examples = @Example(file = "docs", tag = "rlike")) + expression. The right-hand side of the operator represents the pattern.""", detailedDescription = """ + Matching special characters (eg. `.`, `*`, `(`...) will require escaping. + The escape character is backslash `\\`. Since also backslash is a special character in string literals, + it will require further escaping. + + [source.merge.styled,esql] + ---- + include::{esql-specs}/string.csv-spec[tag=rlikeEscapingSingleQuotes] + ---- + + To reduce the overhead of escaping, we suggest using triple quotes strings `\"\"\"` + + [source.merge.styled,esql] + ---- + include::{esql-specs}/string.csv-spec[tag=rlikeEscapingTripleQuotes] + ---- + """, examples = @Example(file = "docs", tag = "rlike")) public RLike( Source source, @Param(name = "str", type = { "keyword", "text" }, description = "A literal value.") Expression value, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLike.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLike.java index 714c4ca04a862..c1b4f20f41795 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLike.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLike.java @@ -43,7 +43,23 @@ also act on a constant (literal) expression. The right-hand side of the operator The following wildcard characters are supported: * `*` matches zero or more characters. - * `?` matches one character.""", examples = @Example(file = "docs", tag = "like")) + * `?` matches one character.""", detailedDescription = """ + Matching the exact characters `*` and `.` will require escaping. + The escape character is backslash `\\`. Since also backslash is a special character in string literals, + it will require further escaping. + + [source.merge.styled,esql] + ---- + include::{esql-specs}/string.csv-spec[tag=likeEscapingSingleQuotes] + ---- + + To reduce the overhead of escaping, we suggest using triple quotes strings `\"\"\"` + + [source.merge.styled,esql] + ---- + include::{esql-specs}/string.csv-spec[tag=likeEscapingTripleQuotes] + ---- + """, examples = @Example(file = "docs", tag = "like")) public WildcardLike( Source source, @Param(name = "str", type = { "keyword", "text" }, description = "A literal expression.") Expression left, From 6f7bd550b17cbaf7d11acf68d0aacfa1d569f7c8 Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Thu, 24 Oct 2024 10:34:12 +0300 Subject: [PATCH 347/449] Use settings from LogsdbIndexModeSettingsProvider in SyntheticSourceIndexSettingsProvider (#115437) * Use settings from LogsdbIndexModeSettingsProvider in SyntheticSourceIndexSettingsProvider * update --- .../xpack/logsdb/LogsdbWithBasicRestIT.java | 32 ++++++++ .../xpack/logsdb/LogsdbRestIT.java | 39 ++++++++-- .../xpack/logsdb/LogsDBPlugin.java | 2 +- .../LogsdbIndexModeSettingsProvider.java | 18 ++--- .../SyntheticSourceIndexSettingsProvider.java | 13 +++- ...heticSourceIndexSettingsProviderTests.java | 75 ++++++++++++++++++- 6 files changed, 159 insertions(+), 20 deletions(-) diff --git a/x-pack/plugin/logsdb/qa/with-basic/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbWithBasicRestIT.java b/x-pack/plugin/logsdb/qa/with-basic/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbWithBasicRestIT.java index f5ac107628d1a..381c83ceee289 100644 --- a/x-pack/plugin/logsdb/qa/with-basic/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbWithBasicRestIT.java +++ b/x-pack/plugin/logsdb/qa/with-basic/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbWithBasicRestIT.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.logsdb; import org.elasticsearch.client.Request; +import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.test.cluster.ElasticsearchCluster; @@ -171,4 +172,35 @@ public void testLogsdbOverrideNullInTemplate() throws IOException { assertEquals("logsdb", settings.get("index.mode")); assertEquals(SourceFieldMapper.Mode.STORED.toString(), settings.get("index.mapping.source.mode")); } + + public void testLogsdbOverrideDefaultModeForLogsIndex() throws IOException { + Request request = new Request("PUT", "/_cluster/settings"); + request.setJsonEntity("{ \"transient\": { \"cluster.logsdb.enabled\": true } }"); + assertOK(client().performRequest(request)); + + request = new Request("POST", "/_index_template/1"); + request.setJsonEntity(""" + { + "index_patterns": ["logs-test-*"], + "data_stream": { + } + } + """); + assertOK(client().performRequest(request)); + + request = new Request("POST", "/logs-test-foo/_doc"); + request.setJsonEntity(""" + { + "@timestamp": "2020-01-01T00:00:00.000Z", + "host.name": "foo", + "message": "bar" + } + """); + assertOK(client().performRequest(request)); + + String index = DataStream.getDefaultBackingIndexName("logs-test-foo", 1); + var settings = (Map) ((Map) getIndexSettings(index).get(index)).get("settings"); + assertEquals("logsdb", settings.get("index.mode")); + assertEquals(SourceFieldMapper.Mode.STORED.toString(), settings.get("index.mapping.source.mode")); + } } diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbRestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbRestIT.java index 16759c3292f7a..2bf8b00cf551c 100644 --- a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbRestIT.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbRestIT.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.logsdb; +import org.elasticsearch.client.Request; +import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.local.distribution.DistributionType; @@ -35,12 +37,6 @@ protected String getTestRestCluster() { } public void testFeatureUsageWithLogsdbIndex() throws IOException { - { - var response = getAsMap("/_license/feature_usage"); - @SuppressWarnings("unchecked") - List> features = (List>) response.get("features"); - assertThat(features, Matchers.empty()); - } { if (randomBoolean()) { createIndex("test-index", Settings.builder().put("index.mode", "logsdb").build()); @@ -81,4 +77,35 @@ public void testFeatureUsageWithLogsdbIndex() throws IOException { } } + public void testLogsdbSourceModeForLogsIndex() throws IOException { + Request request = new Request("PUT", "/_cluster/settings"); + request.setJsonEntity("{ \"transient\": { \"cluster.logsdb.enabled\": true } }"); + assertOK(client().performRequest(request)); + + request = new Request("POST", "/_index_template/1"); + request.setJsonEntity(""" + { + "index_patterns": ["logs-test-*"], + "data_stream": { + } + } + """); + assertOK(client().performRequest(request)); + + request = new Request("POST", "/logs-test-foo/_doc"); + request.setJsonEntity(""" + { + "@timestamp": "2020-01-01T00:00:00.000Z", + "host.name": "foo", + "message": "bar" + } + """); + assertOK(client().performRequest(request)); + + String index = DataStream.getDefaultBackingIndexName("logs-test-foo", 1); + var settings = (Map) ((Map) getIndexSettings(index).get(index)).get("settings"); + assertEquals("logsdb", settings.get("index.mode")); + assertNull(settings.get("index.mapping.source.mode")); + } + } diff --git a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java index 49a83335671cd..089be0604146f 100644 --- a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java +++ b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java @@ -52,7 +52,7 @@ public Collection getAdditionalIndexSettingProviders(Index return List.of(logsdbIndexModeSettingsProvider); } return List.of( - new SyntheticSourceIndexSettingsProvider(licenseService, parameters.mapperServiceFactory()), + new SyntheticSourceIndexSettingsProvider(licenseService, parameters.mapperServiceFactory(), logsdbIndexModeSettingsProvider), logsdbIndexModeSettingsProvider ); } diff --git a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProvider.java b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProvider.java index ee9d6129dcd54..329cd3bc8a04b 100644 --- a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProvider.java +++ b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProvider.java @@ -48,19 +48,16 @@ public Settings getAdditionalIndexSettings( final Settings settings, final List combinedTemplateMappings ) { - if (isLogsdbEnabled == false || dataStreamName == null) { - return Settings.EMPTY; - } - - final IndexMode indexMode = resolveIndexMode(settings.get(IndexSettings.MODE.getKey())); - if (indexMode != null) { - return Settings.EMPTY; - } + return getLogsdbModeSetting(dataStreamName, settings); + } - if (matchesLogsPattern(dataStreamName)) { + Settings getLogsdbModeSetting(final String dataStreamName, final Settings settings) { + if (isLogsdbEnabled + && dataStreamName != null + && resolveIndexMode(settings.get(IndexSettings.MODE.getKey())) == null + && matchesLogsPattern(dataStreamName)) { return Settings.builder().put("index.mode", IndexMode.LOGSDB.getName()).build(); } - return Settings.EMPTY; } @@ -71,5 +68,4 @@ private static boolean matchesLogsPattern(final String name) { private IndexMode resolveIndexMode(final String mode) { return mode != null ? Enum.valueOf(IndexMode.class, mode.toUpperCase(Locale.ROOT)) : null; } - } diff --git a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java index 4625fe91294d7..e7572d6a646e1 100644 --- a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java +++ b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java @@ -38,13 +38,16 @@ final class SyntheticSourceIndexSettingsProvider implements IndexSettingProvider private final SyntheticSourceLicenseService syntheticSourceLicenseService; private final CheckedFunction mapperServiceFactory; + private final LogsdbIndexModeSettingsProvider logsdbIndexModeSettingsProvider; SyntheticSourceIndexSettingsProvider( SyntheticSourceLicenseService syntheticSourceLicenseService, - CheckedFunction mapperServiceFactory + CheckedFunction mapperServiceFactory, + LogsdbIndexModeSettingsProvider logsdbIndexModeSettingsProvider ) { this.syntheticSourceLicenseService = syntheticSourceLicenseService; this.mapperServiceFactory = mapperServiceFactory; + this.logsdbIndexModeSettingsProvider = logsdbIndexModeSettingsProvider; } @Override @@ -63,6 +66,14 @@ public Settings getAdditionalIndexSettings( Settings indexTemplateAndCreateRequestSettings, List combinedTemplateMappings ) { + var logsdbSettings = logsdbIndexModeSettingsProvider.getLogsdbModeSetting(dataStreamName, indexTemplateAndCreateRequestSettings); + if (logsdbSettings != Settings.EMPTY) { + indexTemplateAndCreateRequestSettings = Settings.builder() + .put(logsdbSettings) + .put(indexTemplateAndCreateRequestSettings) + .build(); + } + // This index name is used when validating component and index templates, we should skip this check in that case. // (See MetadataIndexTemplateService#validateIndexTemplateV2(...) method) boolean isTemplateValidation = "validate-index-name".equals(indexName); diff --git a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderTests.java b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderTests.java index 362b387726105..2ab77b38b3373 100644 --- a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderTests.java +++ b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.IndexMode; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.MapperTestUtils; import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.license.MockLicenseState; @@ -35,6 +36,10 @@ public class SyntheticSourceIndexSettingsProviderTests extends ESTestCase { private SyntheticSourceLicenseService syntheticSourceLicenseService; private SyntheticSourceIndexSettingsProvider provider; + private static LogsdbIndexModeSettingsProvider getLogsdbIndexModeSettingsProvider(boolean enabled) { + return new LogsdbIndexModeSettingsProvider(Settings.builder().put("cluster.logsdb.enabled", enabled).build()); + } + @Before public void setup() { MockLicenseState licenseState = mock(MockLicenseState.class); @@ -46,7 +51,8 @@ public void setup() { provider = new SyntheticSourceIndexSettingsProvider( syntheticSourceLicenseService, - im -> MapperTestUtils.newMapperService(xContentRegistry(), createTempDir(), im.getSettings(), im.getIndex().getName()) + im -> MapperTestUtils.newMapperService(xContentRegistry(), createTempDir(), im.getSettings(), im.getIndex().getName()), + getLogsdbIndexModeSettingsProvider(false) ); } @@ -310,4 +316,71 @@ public void testGetAdditionalIndexSettingsDowngradeFromSyntheticSource() throws assertThat(result.size(), equalTo(1)); assertEquals(SourceFieldMapper.Mode.STORED, SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.get(result)); } + + public void testGetAdditionalIndexSettingsDowngradeFromSyntheticSourceFileMatch() throws IOException { + syntheticSourceLicenseService.setSyntheticSourceFallback(true); + provider = new SyntheticSourceIndexSettingsProvider( + syntheticSourceLicenseService, + im -> MapperTestUtils.newMapperService(xContentRegistry(), createTempDir(), im.getSettings(), im.getIndex().getName()), + getLogsdbIndexModeSettingsProvider(true) + ); + final Settings settings = Settings.EMPTY; + + String dataStreamName = "logs-app1"; + Metadata.Builder mb = Metadata.builder( + DataStreamTestHelper.getClusterStateWithDataStreams( + List.of(Tuple.tuple(dataStreamName, 1)), + List.of(), + Instant.now().toEpochMilli(), + builder().build(), + 1 + ).getMetadata() + ); + Metadata metadata = mb.build(); + Settings result = provider.getAdditionalIndexSettings( + DataStream.getDefaultBackingIndexName(dataStreamName, 2), + dataStreamName, + null, + metadata, + Instant.ofEpochMilli(1L), + settings, + List.of() + ); + assertThat(result.size(), equalTo(0)); + + dataStreamName = "logs-app1-0"; + mb = Metadata.builder( + DataStreamTestHelper.getClusterStateWithDataStreams( + List.of(Tuple.tuple(dataStreamName, 1)), + List.of(), + Instant.now().toEpochMilli(), + builder().build(), + 1 + ).getMetadata() + ); + metadata = mb.build(); + + result = provider.getAdditionalIndexSettings( + DataStream.getDefaultBackingIndexName(dataStreamName, 2), + dataStreamName, + null, + metadata, + Instant.ofEpochMilli(1L), + settings, + List.of() + ); + assertThat(result.size(), equalTo(1)); + assertEquals(SourceFieldMapper.Mode.STORED, SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.get(result)); + + result = provider.getAdditionalIndexSettings( + DataStream.getDefaultBackingIndexName(dataStreamName, 2), + dataStreamName, + null, + metadata, + Instant.ofEpochMilli(1L), + builder().put(IndexSettings.MODE.getKey(), IndexMode.STANDARD.toString()).build(), + List.of() + ); + assertThat(result.size(), equalTo(0)); + } } From cc9a08a7085e87975d780bb1c2f5c698dbe19b4e Mon Sep 17 00:00:00 2001 From: Nick Tindall Date: Thu, 24 Oct 2024 18:43:51 +1100 Subject: [PATCH 348/449] Only publish desired balance gauges on master (#115383) Closes ES-9834 --- docs/changelog/115383.yaml | 5 + .../DesiredBalanceReconcilerMetricsIT.java | 69 ++++++++++ .../allocator/DesiredBalanceMetrics.java | 118 ++++++++++++++++++ .../allocator/DesiredBalanceReconciler.java | 56 +-------- .../DesiredBalanceShardsAllocator.java | 19 +-- .../allocator/DesiredBalanceMetricsTests.java | 116 +++++++++++++++++ .../DesiredBalanceReconcilerTests.java | 7 +- 7 files changed, 326 insertions(+), 64 deletions(-) create mode 100644 docs/changelog/115383.yaml create mode 100644 server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconcilerMetricsIT.java create mode 100644 server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceMetrics.java create mode 100644 server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceMetricsTests.java diff --git a/docs/changelog/115383.yaml b/docs/changelog/115383.yaml new file mode 100644 index 0000000000000..19eadd41c0726 --- /dev/null +++ b/docs/changelog/115383.yaml @@ -0,0 +1,5 @@ +pr: 115383 +summary: Only publish desired balance gauges on master +area: Allocation +type: enhancement +issues: [] diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconcilerMetricsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconcilerMetricsIT.java new file mode 100644 index 0000000000000..cb279c93b402e --- /dev/null +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconcilerMetricsIT.java @@ -0,0 +1,69 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.cluster.routing.allocation.allocator; + +import org.elasticsearch.common.util.CollectionUtils; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.PluginsService; +import org.elasticsearch.telemetry.TestTelemetryPlugin; +import org.elasticsearch.test.ESIntegTestCase; +import org.hamcrest.Matcher; + +import java.util.Collection; + +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.not; + +public class DesiredBalanceReconcilerMetricsIT extends ESIntegTestCase { + + @Override + protected Collection> nodePlugins() { + return CollectionUtils.appendToCopy(super.nodePlugins(), TestTelemetryPlugin.class); + } + + public void testDesiredBalanceGaugeMetricsAreOnlyPublishedByCurrentMaster() throws Exception { + internalCluster().ensureAtLeastNumDataNodes(2); + prepareCreate("test").setSettings(indexSettings(2, 1)).get(); + ensureGreen(); + + assertOnlyMasterIsPublishingMetrics(); + + // fail over and check again + int numFailOvers = randomIntBetween(1, 3); + for (int i = 0; i < numFailOvers; i++) { + internalCluster().restartNode(internalCluster().getMasterName()); + ensureGreen(); + + assertOnlyMasterIsPublishingMetrics(); + } + } + + private static void assertOnlyMasterIsPublishingMetrics() { + String masterNodeName = internalCluster().getMasterName(); + String[] nodeNames = internalCluster().getNodeNames(); + for (String nodeName : nodeNames) { + assertMetricsAreBeingPublished(nodeName, nodeName.equals(masterNodeName)); + } + } + + private static void assertMetricsAreBeingPublished(String nodeName, boolean shouldBePublishing) { + final TestTelemetryPlugin testTelemetryPlugin = internalCluster().getInstance(PluginsService.class, nodeName) + .filterPlugins(TestTelemetryPlugin.class) + .findFirst() + .orElseThrow(); + testTelemetryPlugin.resetMeter(); + testTelemetryPlugin.collect(); + Matcher> matcher = shouldBePublishing ? not(empty()) : empty(); + assertThat(testTelemetryPlugin.getLongGaugeMeasurement(DesiredBalanceMetrics.UNASSIGNED_SHARDS_METRIC_NAME), matcher); + assertThat(testTelemetryPlugin.getLongGaugeMeasurement(DesiredBalanceMetrics.TOTAL_SHARDS_METRIC_NAME), matcher); + assertThat(testTelemetryPlugin.getLongGaugeMeasurement(DesiredBalanceMetrics.UNDESIRED_ALLOCATION_COUNT_METRIC_NAME), matcher); + assertThat(testTelemetryPlugin.getDoubleGaugeMeasurement(DesiredBalanceMetrics.UNDESIRED_ALLOCATION_RATIO_METRIC_NAME), matcher); + } +} diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceMetrics.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceMetrics.java new file mode 100644 index 0000000000000..436f1ac38c0c2 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceMetrics.java @@ -0,0 +1,118 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.cluster.routing.allocation.allocator; + +import org.elasticsearch.telemetry.metric.DoubleWithAttributes; +import org.elasticsearch.telemetry.metric.LongWithAttributes; +import org.elasticsearch.telemetry.metric.MeterRegistry; + +import java.util.List; + +public class DesiredBalanceMetrics { + + public static final DesiredBalanceMetrics NOOP = new DesiredBalanceMetrics(MeterRegistry.NOOP); + public static final String UNASSIGNED_SHARDS_METRIC_NAME = "es.allocator.desired_balance.shards.unassigned.current"; + public static final String TOTAL_SHARDS_METRIC_NAME = "es.allocator.desired_balance.shards.current"; + public static final String UNDESIRED_ALLOCATION_COUNT_METRIC_NAME = "es.allocator.desired_balance.allocations.undesired.current"; + public static final String UNDESIRED_ALLOCATION_RATIO_METRIC_NAME = "es.allocator.desired_balance.allocations.undesired.ratio"; + + private volatile boolean nodeIsMaster = false; + + /** + * Number of unassigned shards during last reconciliation + */ + private volatile long unassignedShards; + /** + * Total number of assigned shards during last reconciliation + */ + private volatile long totalAllocations; + /** + * Number of assigned shards during last reconciliation that are not allocated on desired node and need to be moved + */ + private volatile long undesiredAllocations; + + public void updateMetrics(long unassignedShards, long totalAllocations, long undesiredAllocations) { + this.unassignedShards = unassignedShards; + this.totalAllocations = totalAllocations; + this.undesiredAllocations = undesiredAllocations; + } + + public DesiredBalanceMetrics(MeterRegistry meterRegistry) { + meterRegistry.registerLongsGauge( + UNASSIGNED_SHARDS_METRIC_NAME, + "Current number of unassigned shards", + "{shard}", + this::getUnassignedShardsMetrics + ); + meterRegistry.registerLongsGauge(TOTAL_SHARDS_METRIC_NAME, "Total number of shards", "{shard}", this::getTotalAllocationsMetrics); + meterRegistry.registerLongsGauge( + UNDESIRED_ALLOCATION_COUNT_METRIC_NAME, + "Total number of shards allocated on undesired nodes excluding shutting down nodes", + "{shard}", + this::getUndesiredAllocationsMetrics + ); + meterRegistry.registerDoublesGauge( + UNDESIRED_ALLOCATION_RATIO_METRIC_NAME, + "Ratio of undesired allocations to shard count excluding shutting down nodes", + "1", + this::getUndesiredAllocationsRatioMetrics + ); + } + + public void setNodeIsMaster(boolean nodeIsMaster) { + this.nodeIsMaster = nodeIsMaster; + } + + public long unassignedShards() { + return unassignedShards; + } + + public long totalAllocations() { + return totalAllocations; + } + + public long undesiredAllocations() { + return undesiredAllocations; + } + + private List getUnassignedShardsMetrics() { + return getIfPublishing(unassignedShards); + } + + private List getTotalAllocationsMetrics() { + return getIfPublishing(totalAllocations); + } + + private List getUndesiredAllocationsMetrics() { + return getIfPublishing(undesiredAllocations); + } + + private List getIfPublishing(long value) { + if (nodeIsMaster) { + return List.of(new LongWithAttributes(value)); + } + return List.of(); + } + + private List getUndesiredAllocationsRatioMetrics() { + if (nodeIsMaster) { + var total = totalAllocations; + var undesired = undesiredAllocations; + return List.of(new DoubleWithAttributes(total != 0 ? (double) undesired / total : 0.0)); + } + return List.of(); + } + + public void zeroAllMetrics() { + unassignedShards = 0; + totalAllocations = 0; + undesiredAllocations = 0; + } +} diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconciler.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconciler.java index da52148919cdb..dced9214a3245 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconciler.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconciler.java @@ -30,10 +30,6 @@ import org.elasticsearch.gateway.PriorityComparator; import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.telemetry.metric.DoubleGauge; -import org.elasticsearch.telemetry.metric.DoubleWithAttributes; -import org.elasticsearch.telemetry.metric.LongGaugeMetric; -import org.elasticsearch.telemetry.metric.MeterRegistry; import org.elasticsearch.threadpool.ThreadPool; import java.util.Comparator; @@ -73,23 +69,10 @@ public class DesiredBalanceReconciler { private double undesiredAllocationsLogThreshold; private final NodeAllocationOrdering allocationOrdering = new NodeAllocationOrdering(); private final NodeAllocationOrdering moveOrdering = new NodeAllocationOrdering(); + private final DesiredBalanceMetrics desiredBalanceMetrics; - // stats - /** - * Number of unassigned shards during last reconciliation - */ - protected final LongGaugeMetric unassignedShards; - /** - * Total number of assigned shards during last reconciliation - */ - protected final LongGaugeMetric totalAllocations; - /** - * Number of assigned shards during last reconciliation that are not allocated on desired node and need to be moved - */ - protected final LongGaugeMetric undesiredAllocations; - private final DoubleGauge undesiredAllocationsRatio; - - public DesiredBalanceReconciler(ClusterSettings clusterSettings, ThreadPool threadPool, MeterRegistry meterRegistry) { + public DesiredBalanceReconciler(ClusterSettings clusterSettings, ThreadPool threadPool, DesiredBalanceMetrics desiredBalanceMetrics) { + this.desiredBalanceMetrics = desiredBalanceMetrics; this.undesiredAllocationLogInterval = new FrequencyCappedAction( threadPool.relativeTimeInMillisSupplier(), TimeValue.timeValueMinutes(5) @@ -99,35 +82,6 @@ public DesiredBalanceReconciler(ClusterSettings clusterSettings, ThreadPool thre UNDESIRED_ALLOCATIONS_LOG_THRESHOLD_SETTING, value -> this.undesiredAllocationsLogThreshold = value ); - - unassignedShards = LongGaugeMetric.create( - meterRegistry, - "es.allocator.desired_balance.shards.unassigned.current", - "Current number of unassigned shards", - "{shard}" - ); - totalAllocations = LongGaugeMetric.create( - meterRegistry, - "es.allocator.desired_balance.shards.current", - "Total number of shards", - "{shard}" - ); - undesiredAllocations = LongGaugeMetric.create( - meterRegistry, - "es.allocator.desired_balance.allocations.undesired.current", - "Total number of shards allocated on undesired nodes excluding shutting down nodes", - "{shard}" - ); - undesiredAllocationsRatio = meterRegistry.registerDoubleGauge( - "es.allocator.desired_balance.allocations.undesired.ratio", - "Ratio of undesired allocations to shard count excluding shutting down nodes", - "1", - () -> { - var total = totalAllocations.get(); - var undesired = undesiredAllocations.get(); - return new DoubleWithAttributes(total != 0 ? (double) undesired / total : 0.0); - } - ); } public void reconcile(DesiredBalance desiredBalance, RoutingAllocation allocation) { @@ -578,9 +532,7 @@ private void balance() { } } - DesiredBalanceReconciler.this.unassignedShards.set(unassignedShards); - DesiredBalanceReconciler.this.undesiredAllocations.set(undesiredAllocationsExcludingShuttingDownNodes); - DesiredBalanceReconciler.this.totalAllocations.set(totalAllocations); + desiredBalanceMetrics.updateMetrics(unassignedShards, totalAllocations, undesiredAllocationsExcludingShuttingDownNodes); maybeLogUndesiredAllocationsWarning(totalAllocations, undesiredAllocationsExcludingShuttingDownNodes, routingNodes.size()); } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocator.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocator.java index ba16915f2ad2b..4171100191211 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocator.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocator.java @@ -64,6 +64,7 @@ public class DesiredBalanceShardsAllocator implements ShardsAllocator { private volatile DesiredBalance currentDesiredBalance = DesiredBalance.INITIAL; private volatile boolean resetCurrentDesiredBalance = false; private final Set processedNodeShutdowns = new HashSet<>(); + private final DesiredBalanceMetrics desiredBalanceMetrics; // stats protected final CounterMetric computationsSubmitted = new CounterMetric(); @@ -104,6 +105,7 @@ public DesiredBalanceShardsAllocator( DesiredBalanceReconcilerAction reconciler, TelemetryProvider telemetryProvider ) { + this.desiredBalanceMetrics = new DesiredBalanceMetrics(telemetryProvider.getMeterRegistry()); this.delegateAllocator = delegateAllocator; this.threadPool = threadPool; this.reconciler = reconciler; @@ -111,7 +113,7 @@ public DesiredBalanceShardsAllocator( this.desiredBalanceReconciler = new DesiredBalanceReconciler( clusterService.getClusterSettings(), threadPool, - telemetryProvider.getMeterRegistry() + desiredBalanceMetrics ); this.desiredBalanceComputation = new ContinuousComputation<>(threadPool.generic()) { @@ -168,6 +170,10 @@ public String toString() { if (event.localNodeMaster() == false) { onNoLongerMaster(); } + // Only update on change, to minimise volatile writes + if (event.localNodeMaster() != event.previousState().nodes().isLocalNodeElectedMaster()) { + desiredBalanceMetrics.setNodeIsMaster(event.localNodeMaster()); + } }); } @@ -306,9 +312,9 @@ public DesiredBalanceStats getStats() { computedShardMovements.sum(), cumulativeComputationTime.count(), cumulativeReconciliationTime.count(), - desiredBalanceReconciler.unassignedShards.get(), - desiredBalanceReconciler.totalAllocations.get(), - desiredBalanceReconciler.undesiredAllocations.get() + desiredBalanceMetrics.unassignedShards(), + desiredBalanceMetrics.totalAllocations(), + desiredBalanceMetrics.undesiredAllocations() ); } @@ -318,10 +324,7 @@ private void onNoLongerMaster() { queue.completeAllAsNotMaster(); pendingDesiredBalanceMoves.clear(); desiredBalanceReconciler.clear(); - - desiredBalanceReconciler.unassignedShards.set(0); - desiredBalanceReconciler.totalAllocations.set(0); - desiredBalanceReconciler.undesiredAllocations.set(0); + desiredBalanceMetrics.zeroAllMetrics(); } } diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceMetricsTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceMetricsTests.java new file mode 100644 index 0000000000000..2c642da665051 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceMetricsTests.java @@ -0,0 +1,116 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.cluster.routing.allocation.allocator; + +import org.elasticsearch.telemetry.InstrumentType; +import org.elasticsearch.telemetry.RecordingMeterRegistry; +import org.elasticsearch.telemetry.metric.MeterRegistry; +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.equalTo; + +public class DesiredBalanceMetricsTests extends ESTestCase { + + public void testZeroAllMetrics() { + DesiredBalanceMetrics metrics = new DesiredBalanceMetrics(MeterRegistry.NOOP); + long unassignedShards = randomNonNegativeLong(); + long totalAllocations = randomNonNegativeLong(); + long undesiredAllocations = randomNonNegativeLong(); + metrics.updateMetrics(unassignedShards, totalAllocations, undesiredAllocations); + assertEquals(totalAllocations, metrics.totalAllocations()); + assertEquals(unassignedShards, metrics.unassignedShards()); + assertEquals(undesiredAllocations, metrics.undesiredAllocations()); + metrics.zeroAllMetrics(); + assertEquals(0, metrics.totalAllocations()); + assertEquals(0, metrics.unassignedShards()); + assertEquals(0, metrics.undesiredAllocations()); + } + + public void testMetricsAreOnlyPublishedWhenNodeIsMaster() { + RecordingMeterRegistry meterRegistry = new RecordingMeterRegistry(); + DesiredBalanceMetrics metrics = new DesiredBalanceMetrics(meterRegistry); + + long unassignedShards = randomNonNegativeLong(); + long totalAllocations = randomLongBetween(100, 10000000); + long undesiredAllocations = randomLongBetween(0, totalAllocations); + metrics.updateMetrics(unassignedShards, totalAllocations, undesiredAllocations); + + // Collect when not master + meterRegistry.getRecorder().collect(); + assertThat( + meterRegistry.getRecorder() + .getMeasurements(InstrumentType.LONG_GAUGE, DesiredBalanceMetrics.UNDESIRED_ALLOCATION_COUNT_METRIC_NAME), + empty() + ); + assertThat( + meterRegistry.getRecorder().getMeasurements(InstrumentType.LONG_GAUGE, DesiredBalanceMetrics.TOTAL_SHARDS_METRIC_NAME), + empty() + ); + assertThat( + meterRegistry.getRecorder().getMeasurements(InstrumentType.LONG_GAUGE, DesiredBalanceMetrics.UNASSIGNED_SHARDS_METRIC_NAME), + empty() + ); + assertThat( + meterRegistry.getRecorder() + .getMeasurements(InstrumentType.DOUBLE_GAUGE, DesiredBalanceMetrics.UNDESIRED_ALLOCATION_RATIO_METRIC_NAME), + empty() + ); + + // Collect when master + metrics.setNodeIsMaster(true); + meterRegistry.getRecorder().collect(); + assertThat( + meterRegistry.getRecorder() + .getMeasurements(InstrumentType.LONG_GAUGE, DesiredBalanceMetrics.UNDESIRED_ALLOCATION_COUNT_METRIC_NAME) + .getFirst() + .getLong(), + equalTo(undesiredAllocations) + ); + assertThat( + meterRegistry.getRecorder() + .getMeasurements(InstrumentType.LONG_GAUGE, DesiredBalanceMetrics.TOTAL_SHARDS_METRIC_NAME) + .getFirst() + .getLong(), + equalTo(totalAllocations) + ); + assertThat( + meterRegistry.getRecorder() + .getMeasurements(InstrumentType.LONG_GAUGE, DesiredBalanceMetrics.UNASSIGNED_SHARDS_METRIC_NAME) + .getFirst() + .getLong(), + equalTo(unassignedShards) + ); + assertThat( + meterRegistry.getRecorder() + .getMeasurements(InstrumentType.DOUBLE_GAUGE, DesiredBalanceMetrics.UNDESIRED_ALLOCATION_RATIO_METRIC_NAME) + .getFirst() + .getDouble(), + equalTo((double) undesiredAllocations / totalAllocations) + ); + } + + public void testUndesiredAllocationRatioIsZeroWhenTotalShardsIsZero() { + RecordingMeterRegistry meterRegistry = new RecordingMeterRegistry(); + DesiredBalanceMetrics metrics = new DesiredBalanceMetrics(meterRegistry); + long unassignedShards = randomNonNegativeLong(); + metrics.updateMetrics(unassignedShards, 0, 0); + + metrics.setNodeIsMaster(true); + meterRegistry.getRecorder().collect(); + assertThat( + meterRegistry.getRecorder() + .getMeasurements(InstrumentType.DOUBLE_GAUGE, DesiredBalanceMetrics.UNDESIRED_ALLOCATION_RATIO_METRIC_NAME) + .getFirst() + .getDouble(), + equalTo(0d) + ); + } +} diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconcilerTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconcilerTests.java index 1ae73c9c08137..b5f44ee9e505f 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconcilerTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconcilerTests.java @@ -66,7 +66,6 @@ import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.snapshots.SnapshotShardSizeInfo; import org.elasticsearch.snapshots.SnapshotsInfoService; -import org.elasticsearch.telemetry.metric.MeterRegistry; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.MockLog; import org.elasticsearch.threadpool.ThreadPool; @@ -1215,7 +1214,7 @@ public void testRebalanceDoesNotCauseHotSpots() { var reconciler = new DesiredBalanceReconciler( clusterSettings, new DeterministicTaskQueue().getThreadPool(), - mock(MeterRegistry.class) + DesiredBalanceMetrics.NOOP ); var totalOutgoingMoves = new HashMap(); @@ -1297,7 +1296,7 @@ public void testShouldLogOnTooManyUndesiredAllocations() { final var timeInMillisSupplier = new AtomicLong(); when(threadPool.relativeTimeInMillisSupplier()).thenReturn(timeInMillisSupplier::incrementAndGet); - var reconciler = new DesiredBalanceReconciler(createBuiltInClusterSettings(), threadPool, mock(MeterRegistry.class)); + var reconciler = new DesiredBalanceReconciler(createBuiltInClusterSettings(), threadPool, DesiredBalanceMetrics.NOOP); final long initialDelayInMillis = TimeValue.timeValueMinutes(5).getMillis(); timeInMillisSupplier.addAndGet(randomLongBetween(initialDelayInMillis, 2 * initialDelayInMillis)); @@ -1349,7 +1348,7 @@ public void testShouldLogOnTooManyUndesiredAllocations() { private static void reconcile(RoutingAllocation routingAllocation, DesiredBalance desiredBalance) { final var threadPool = mock(ThreadPool.class); when(threadPool.relativeTimeInMillisSupplier()).thenReturn(new AtomicLong()::incrementAndGet); - new DesiredBalanceReconciler(createBuiltInClusterSettings(), threadPool, mock(MeterRegistry.class)).reconcile( + new DesiredBalanceReconciler(createBuiltInClusterSettings(), threadPool, DesiredBalanceMetrics.NOOP).reconcile( desiredBalance, routingAllocation ); From 6e67da52d1d3a26ede8065d9b33ed2173b081eed Mon Sep 17 00:00:00 2001 From: Liam Thompson <32779855+leemthompo@users.noreply.github.com> Date: Thu, 24 Oct 2024 10:02:47 +0200 Subject: [PATCH 349/449] =?UTF-8?q?[DOCS=E2=80=93=20Fix=20typoUpdate=20tra?= =?UTF-8?q?inedmodel.asciidoc=20(#115420)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Closes https://github.com/elastic/elasticsearch/issues/114968 --- docs/reference/cat/trainedmodel.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/cat/trainedmodel.asciidoc b/docs/reference/cat/trainedmodel.asciidoc index 45c87038f5d64..5b20a0b6e842f 100644 --- a/docs/reference/cat/trainedmodel.asciidoc +++ b/docs/reference/cat/trainedmodel.asciidoc @@ -116,7 +116,7 @@ include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-v] [source,console] -------------------------------------------------- -GET _cat/ml/trained_models?h=c,o,l,ct,v&v=ture +GET _cat/ml/trained_models?h=c,o,l,ct,v&v=true -------------------------------------------------- // TEST[skip:kibana sample data] From a281d62988f190bb9e25361325e060e0c38d15cc Mon Sep 17 00:00:00 2001 From: Artem Prigoda Date: Thu, 24 Oct 2024 10:26:16 +0200 Subject: [PATCH 350/449] Remove auto_release_flood_stage_block property check (#114696) There's no option to disable the auto release of the write block when a node exceeds the flood-stage watermark. This property was deprecated in #45274 (8.0) --- .../routing/allocation/DiskThresholdSettings.java | 14 -------------- 1 file changed, 14 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DiskThresholdSettings.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DiskThresholdSettings.java index d1d6a9761a758..57abbb8b8ed94 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DiskThresholdSettings.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DiskThresholdSettings.java @@ -19,7 +19,6 @@ import org.elasticsearch.common.unit.RelativeByteSizeValue; import org.elasticsearch.core.Strings; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.core.UpdateForV9; import java.io.IOException; import java.util.Iterator; @@ -156,19 +155,6 @@ public class DiskThresholdSettings implements Writeable { private volatile boolean enabled; private volatile TimeValue rerouteInterval; - static { - checkAutoReleaseIndexEnabled(); - } - - @UpdateForV9(owner = UpdateForV9.Owner.DISTRIBUTED_COORDINATION) // this check is unnecessary in v9 - private static void checkAutoReleaseIndexEnabled() { - final String AUTO_RELEASE_INDEX_ENABLED_KEY = "es.disk.auto_release_flood_stage_block"; - final String property = System.getProperty(AUTO_RELEASE_INDEX_ENABLED_KEY); - if (property != null) { - throw new IllegalArgumentException("system property [" + AUTO_RELEASE_INDEX_ENABLED_KEY + "] may not be set"); - } - } - public DiskThresholdSettings(Settings settings, ClusterSettings clusterSettings) { setLowWatermark(CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.get(settings)); setLowStageMaxHeadroom(CLUSTER_ROUTING_ALLOCATION_LOW_DISK_MAX_HEADROOM_SETTING.get(settings)); From cc6e7415c1d8d145f1ae65d58dbb72c3a439d32e Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Thu, 24 Oct 2024 09:44:45 +0100 Subject: [PATCH 351/449] Remove security bootstrap check that uses Version (#114923) --- .../elasticsearch/common/ReferenceDocs.java | 1 - .../org/elasticsearch/env/BuildVersion.java | 7 -- .../env/DefaultBuildVersion.java | 5 -- .../common/reference-docs-links.txt | 1 - .../xpack/security/Security.java | 12 ++- ...ecurityImplicitBehaviorBootstrapCheck.java | 67 ----------------- ...tyImplicitBehaviorBootstrapCheckTests.java | 73 ------------------- 7 files changed, 5 insertions(+), 161 deletions(-) delete mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/SecurityImplicitBehaviorBootstrapCheck.java delete mode 100644 x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityImplicitBehaviorBootstrapCheckTests.java diff --git a/server/src/main/java/org/elasticsearch/common/ReferenceDocs.java b/server/src/main/java/org/elasticsearch/common/ReferenceDocs.java index 43acda1e1ec2d..926056fec3ec8 100644 --- a/server/src/main/java/org/elasticsearch/common/ReferenceDocs.java +++ b/server/src/main/java/org/elasticsearch/common/ReferenceDocs.java @@ -66,7 +66,6 @@ public enum ReferenceDocs { BOOTSTRAP_CHECK_ROLE_MAPPINGS, BOOTSTRAP_CHECK_TLS, BOOTSTRAP_CHECK_TOKEN_SSL, - BOOTSTRAP_CHECK_SECURITY_MINIMAL_SETUP, CONTACT_SUPPORT, UNASSIGNED_SHARDS, EXECUTABLE_JNA_TMPDIR, diff --git a/server/src/main/java/org/elasticsearch/env/BuildVersion.java b/server/src/main/java/org/elasticsearch/env/BuildVersion.java index 42c45a14977eb..3fdf01d7e1bae 100644 --- a/server/src/main/java/org/elasticsearch/env/BuildVersion.java +++ b/server/src/main/java/org/elasticsearch/env/BuildVersion.java @@ -58,13 +58,6 @@ public abstract class BuildVersion { */ public abstract boolean isFutureVersion(); - // temporary - // TODO[wrb]: remove from security bootstrap checks - @Deprecated - public Version toVersion() { - return null; - } - /** * Create a {@link BuildVersion} from a version ID number. * diff --git a/server/src/main/java/org/elasticsearch/env/DefaultBuildVersion.java b/server/src/main/java/org/elasticsearch/env/DefaultBuildVersion.java index dcc5ed3aee3f8..f31b34e89c01d 100644 --- a/server/src/main/java/org/elasticsearch/env/DefaultBuildVersion.java +++ b/server/src/main/java/org/elasticsearch/env/DefaultBuildVersion.java @@ -52,11 +52,6 @@ public int id() { return versionId; } - @Override - public Version toVersion() { - return version; - } - @Override public boolean equals(Object o) { if (this == o) return true; diff --git a/server/src/main/resources/org/elasticsearch/common/reference-docs-links.txt b/server/src/main/resources/org/elasticsearch/common/reference-docs-links.txt index 3b0816aabf4aa..f9a8237d63717 100644 --- a/server/src/main/resources/org/elasticsearch/common/reference-docs-links.txt +++ b/server/src/main/resources/org/elasticsearch/common/reference-docs-links.txt @@ -28,7 +28,6 @@ BOOTSTRAP_CHECK_PKI_REALM bootstrap-checks BOOTSTRAP_CHECK_ROLE_MAPPINGS bootstrap-checks-xpack.html#bootstrap-checks-xpack-role-mappings BOOTSTRAP_CHECK_TLS bootstrap-checks-xpack.html#bootstrap-checks-tls BOOTSTRAP_CHECK_TOKEN_SSL bootstrap-checks-xpack.html#bootstrap-checks-xpack-token-ssl -BOOTSTRAP_CHECK_SECURITY_MINIMAL_SETUP security-minimal-setup.html CONTACT_SUPPORT troubleshooting.html#troubleshooting-contact-support UNASSIGNED_SHARDS red-yellow-cluster-status.html EXECUTABLE_JNA_TMPDIR executable-jna-tmpdir.html diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index 8f32bcf7ace8a..0b387a738a2c5 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -810,13 +810,11 @@ Collection createComponents( // We need to construct the checks here while the secure settings are still available. // If we wait until #getBoostrapChecks the secure settings will have been cleared/closed. final List checks = new ArrayList<>(); - checks.addAll( - Arrays.asList( - new TokenSSLBootstrapCheck(), - new PkiRealmBootstrapCheck(getSslService()), - new SecurityImplicitBehaviorBootstrapCheck(nodeMetadata, getLicenseService()), - new TransportTLSBootstrapCheck() - ) + Collections.addAll( + checks, + new TokenSSLBootstrapCheck(), + new PkiRealmBootstrapCheck(getSslService()), + new TransportTLSBootstrapCheck() ); checks.addAll(InternalRealms.getBootstrapChecks(settings, environment)); this.bootstrapChecks.set(Collections.unmodifiableList(checks)); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/SecurityImplicitBehaviorBootstrapCheck.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/SecurityImplicitBehaviorBootstrapCheck.java deleted file mode 100644 index 2d535100d468d..0000000000000 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/SecurityImplicitBehaviorBootstrapCheck.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.security; - -import org.elasticsearch.Version; -import org.elasticsearch.bootstrap.BootstrapCheck; -import org.elasticsearch.bootstrap.BootstrapContext; -import org.elasticsearch.common.ReferenceDocs; -import org.elasticsearch.env.NodeMetadata; -import org.elasticsearch.license.ClusterStateLicenseService; -import org.elasticsearch.license.License; -import org.elasticsearch.license.LicenseService; -import org.elasticsearch.xpack.core.XPackSettings; - -public class SecurityImplicitBehaviorBootstrapCheck implements BootstrapCheck { - - private final NodeMetadata nodeMetadata; - private final LicenseService licenseService; - - public SecurityImplicitBehaviorBootstrapCheck(NodeMetadata nodeMetadata, LicenseService licenseService) { - this.nodeMetadata = nodeMetadata; - this.licenseService = licenseService; - } - - @Override - public BootstrapCheckResult check(BootstrapContext context) { - if (nodeMetadata == null) { - return BootstrapCheckResult.success(); - } - if (licenseService instanceof ClusterStateLicenseService clusterStateLicenseService) { - final License license = clusterStateLicenseService.getLicense(context.metadata()); - // TODO[wrb]: Add an "isCurrentMajor" method to BuildVersion? - final Version lastKnownVersion = nodeMetadata.previousNodeVersion().toVersion(); - // pre v7.2.0 nodes have Version.EMPTY and its id is 0, so Version#before handles this successfully - if (lastKnownVersion.before(Version.V_8_0_0) - && XPackSettings.SECURITY_ENABLED.exists(context.settings()) == false - && (license.operationMode() == License.OperationMode.BASIC || license.operationMode() == License.OperationMode.TRIAL)) { - return BootstrapCheckResult.failure( - "The default value for [" - + XPackSettings.SECURITY_ENABLED.getKey() - + "] has changed in the current version. " - + " Security features were implicitly disabled for this node but they would now be enabled, possibly" - + " preventing access to the node. " - + "See " - + this.referenceDocs() - + " to configure security, or explicitly disable security by " - + "setting [xpack.security.enabled] to \"false\" in elasticsearch.yml before restarting the node." - ); - } - } - return BootstrapCheckResult.success(); - } - - public boolean alwaysEnforce() { - return true; - } - - @Override - public ReferenceDocs referenceDocs() { - return ReferenceDocs.BOOTSTRAP_CHECK_SECURITY_MINIMAL_SETUP; - } -} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityImplicitBehaviorBootstrapCheckTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityImplicitBehaviorBootstrapCheckTests.java deleted file mode 100644 index 85e8d6dd38125..0000000000000 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityImplicitBehaviorBootstrapCheckTests.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.security; - -import org.elasticsearch.Version; -import org.elasticsearch.bootstrap.BootstrapCheck; -import org.elasticsearch.cluster.metadata.Metadata; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.core.UpdateForV9; -import org.elasticsearch.env.BuildVersion; -import org.elasticsearch.env.NodeMetadata; -import org.elasticsearch.index.IndexVersion; -import org.elasticsearch.license.ClusterStateLicenseService; -import org.elasticsearch.license.License; -import org.elasticsearch.license.LicensesMetadata; -import org.elasticsearch.license.TestUtils; -import org.elasticsearch.license.internal.TrialLicenseVersion; -import org.elasticsearch.test.AbstractBootstrapCheckTestCase; -import org.elasticsearch.test.VersionUtils; -import org.elasticsearch.xpack.core.XPackSettings; - -import static org.hamcrest.Matchers.is; -import static org.mockito.Mockito.mock; - -public class SecurityImplicitBehaviorBootstrapCheckTests extends AbstractBootstrapCheckTestCase { - - @UpdateForV9(owner = UpdateForV9.Owner.SECURITY) - @AwaitsFix(bugUrl = "requires updates for version 9.0 bump") - public void testUpgradeFrom8xWithImplicitSecuritySettings() throws Exception { - final BuildVersion previousVersion = toBuildVersion(VersionUtils.randomVersionBetween(random(), Version.V_8_0_0, null)); - NodeMetadata nodeMetadata = new NodeMetadata(randomAlphaOfLength(10), previousVersion, IndexVersion.current()); - nodeMetadata = nodeMetadata.upgradeToCurrentVersion(); - ClusterStateLicenseService licenseService = mock(ClusterStateLicenseService.class); - BootstrapCheck.BootstrapCheckResult result = new SecurityImplicitBehaviorBootstrapCheck(nodeMetadata, licenseService).check( - createTestContext( - Settings.EMPTY, - createLicensesMetadata(TrialLicenseVersion.fromXContent(previousVersion.toString()), randomFrom("basic", "trial")) - ) - ); - assertThat(result.isSuccess(), is(true)); - } - - @UpdateForV9(owner = UpdateForV9.Owner.SECURITY) - @AwaitsFix(bugUrl = "requires updates for version 9.0 bump") - public void testUpgradeFrom8xWithExplicitSecuritySettings() throws Exception { - final BuildVersion previousVersion = toBuildVersion(VersionUtils.randomVersionBetween(random(), Version.V_8_0_0, null)); - NodeMetadata nodeMetadata = new NodeMetadata(randomAlphaOfLength(10), previousVersion, IndexVersion.current()); - nodeMetadata = nodeMetadata.upgradeToCurrentVersion(); - ClusterStateLicenseService licenseService = mock(ClusterStateLicenseService.class); - BootstrapCheck.BootstrapCheckResult result = new SecurityImplicitBehaviorBootstrapCheck(nodeMetadata, licenseService).check( - createTestContext( - Settings.builder().put(XPackSettings.SECURITY_ENABLED.getKey(), true).build(), - createLicensesMetadata(TrialLicenseVersion.fromXContent(previousVersion.toString()), randomFrom("basic", "trial")) - ) - ); - assertThat(result.isSuccess(), is(true)); - } - - private Metadata createLicensesMetadata(TrialLicenseVersion era, String licenseMode) throws Exception { - License license = TestUtils.generateSignedLicense(licenseMode, TimeValue.timeValueHours(2)); - return Metadata.builder().putCustom(LicensesMetadata.TYPE, new LicensesMetadata(license, era)).build(); - } - - private static BuildVersion toBuildVersion(Version version) { - return BuildVersion.fromVersionId(version.id()); - } -} From 7599d4cf43a45407bef3d88b2a0f5de8706fb540 Mon Sep 17 00:00:00 2001 From: Nick Tindall Date: Thu, 24 Oct 2024 19:51:52 +1100 Subject: [PATCH 352/449] Use Azure blob batch API to delete blobs in batches (#114566) Closes ES-9777 --- docs/changelog/114566.yaml | 5 + .../repository-azure.asciidoc | 9 + gradle/verification-metadata.xml | 5 + modules/repository-azure/build.gradle | 1 + .../AzureBlobStoreRepositoryMetricsTests.java | 91 +++++++++ .../azure/AzureBlobStoreRepositoryTests.java | 24 ++- .../AzureStorageCleanupThirdPartyTests.java | 5 + .../src/main/java/module-info.java | 5 +- .../azure/AzureBlobContainer.java | 2 +- .../repositories/azure/AzureBlobStore.java | 175 ++++++++++-------- .../azure/AzureClientProvider.java | 14 ++ .../repositories/azure/AzureRepository.java | 15 ++ .../azure/AzureBlobContainerStatsTests.java | 10 + .../java/fixture/azure/AzureHttpHandler.java | 101 ++++++++++ 14 files changed, 374 insertions(+), 88 deletions(-) create mode 100644 docs/changelog/114566.yaml diff --git a/docs/changelog/114566.yaml b/docs/changelog/114566.yaml new file mode 100644 index 0000000000000..6007152bb26ca --- /dev/null +++ b/docs/changelog/114566.yaml @@ -0,0 +1,5 @@ +pr: 114566 +summary: Use Azure blob batch API to delete blobs in batches +area: Distributed +type: enhancement +issues: [] diff --git a/docs/reference/snapshot-restore/repository-azure.asciidoc b/docs/reference/snapshot-restore/repository-azure.asciidoc index c361414052e14..0e6e1478cfc55 100644 --- a/docs/reference/snapshot-restore/repository-azure.asciidoc +++ b/docs/reference/snapshot-restore/repository-azure.asciidoc @@ -259,6 +259,15 @@ include::repository-shared-settings.asciidoc[] `primary_only` or `secondary_only`. Defaults to `primary_only`. Note that if you set it to `secondary_only`, it will force `readonly` to true. +`delete_objects_max_size`:: + + (integer) Sets the maxmimum batch size, betewen 1 and 256, used for `BlobBatch` requests. Defaults to 256 which is the maximum + number supported by the https://learn.microsoft.com/en-us/rest/api/storageservices/blob-batch#remarks[Azure blob batch API]. + +`max_concurrent_batch_deletes`:: + + (integer) Sets the maximum number of concurrent batch delete requests that will be submitted for any individual bulk delete with `BlobBatch`. Note that the effective number of concurrent deletes is further limited by the Azure client connection and event loop thread limits. Defaults to 10, minimum is 1, maximum is 100. + [[repository-azure-validation]] ==== Repository validation rules diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index e2dfa89c8f3b8..5cfe7adb5ea49 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -144,6 +144,11 @@ + + + + + diff --git a/modules/repository-azure/build.gradle b/modules/repository-azure/build.gradle index eb938f663c810..d011de81f4fb3 100644 --- a/modules/repository-azure/build.gradle +++ b/modules/repository-azure/build.gradle @@ -30,6 +30,7 @@ dependencies { api "com.azure:azure-identity:1.13.2" api "com.azure:azure-json:1.2.0" api "com.azure:azure-storage-blob:12.27.1" + api "com.azure:azure-storage-blob-batch:12.23.1" api "com.azure:azure-storage-common:12.26.1" api "com.azure:azure-storage-internal-avro:12.12.1" api "com.azure:azure-xml:1.1.0" diff --git a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryMetricsTests.java b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryMetricsTests.java index a9bf0afa37e18..61940be247861 100644 --- a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryMetricsTests.java +++ b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryMetricsTests.java @@ -9,14 +9,18 @@ package org.elasticsearch.repositories.azure; +import com.sun.net.httpserver.Headers; import com.sun.net.httpserver.HttpExchange; import com.sun.net.httpserver.HttpHandler; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.Randomness; import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.OperationPurpose; +import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.repositories.RepositoriesMetrics; @@ -31,6 +35,7 @@ import java.io.IOException; import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; +import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Queue; @@ -43,6 +48,7 @@ import java.util.stream.IntStream; import static org.elasticsearch.repositories.azure.AbstractAzureServerTestCase.randomBlobContent; +import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomPurpose; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.lessThanOrEqualTo; @@ -225,6 +231,91 @@ public void testRequestTimeIsAccurate() throws IOException { assertThat(recordedRequestTime, lessThanOrEqualTo(elapsedTimeMillis)); } + public void testBatchDeleteFailure() throws IOException { + final int deleteBatchSize = randomIntBetween(1, 30); + final String repositoryName = randomRepositoryName(); + final String repository = createRepository( + repositoryName, + Settings.builder() + .put(repositorySettings(repositoryName)) + .put(AzureRepository.Repository.DELETION_BATCH_SIZE_SETTING.getKey(), deleteBatchSize) + .build(), + true + ); + final String dataNodeName = internalCluster().getNodeNameThat(DiscoveryNode::canContainData); + final BlobContainer container = getBlobContainer(dataNodeName, repository); + + final List blobsToDelete = new ArrayList<>(); + final int numberOfBatches = randomIntBetween(3, 20); + final int numberOfBlobs = numberOfBatches * deleteBatchSize; + final int failedBatches = randomIntBetween(1, numberOfBatches); + for (int i = 0; i < numberOfBlobs; i++) { + byte[] bytes = randomBytes(randomInt(100)); + String blobName = "index-" + randomAlphaOfLength(10); + container.writeBlob(randomPurpose(), blobName, new BytesArray(bytes), false); + blobsToDelete.add(blobName); + } + Randomness.shuffle(blobsToDelete); + clearMetrics(dataNodeName); + + // Handler will fail one or more of the batch requests + final RequestHandler failNRequestRequestHandler = createFailNRequestsHandler(failedBatches); + + // Exhaust the retries + IntStream.range(0, (numberOfBatches - failedBatches) + (failedBatches * (MAX_RETRIES + 1))) + .forEach(i -> requestHandlers.offer(failNRequestRequestHandler)); + + logger.info("--> Failing {} of {} batches", failedBatches, numberOfBatches); + + final IOException exception = assertThrows( + IOException.class, + () -> container.deleteBlobsIgnoringIfNotExists(randomPurpose(), blobsToDelete.iterator()) + ); + assertEquals(Math.min(failedBatches, 10), exception.getSuppressed().length); + assertEquals( + (numberOfBatches - failedBatches) + (failedBatches * (MAX_RETRIES + 1L)), + getLongCounterTotal(dataNodeName, RepositoriesMetrics.METRIC_REQUESTS_TOTAL) + ); + assertEquals((failedBatches * (MAX_RETRIES + 1L)), getLongCounterTotal(dataNodeName, RepositoriesMetrics.METRIC_EXCEPTIONS_TOTAL)); + assertEquals(failedBatches * deleteBatchSize, container.listBlobs(randomPurpose()).size()); + } + + private long getLongCounterTotal(String dataNodeName, String metricKey) { + return getTelemetryPlugin(dataNodeName).getLongCounterMeasurement(metricKey) + .stream() + .mapToLong(Measurement::getLong) + .reduce(0L, Long::sum); + } + + /** + * Creates a {@link RequestHandler} that will persistently fail the first numberToFail distinct requests + * it sees. Any other requests are passed through to the delegate. + * + * @param numberToFail The number of requests to fail + * @return the handler + */ + private static RequestHandler createFailNRequestsHandler(int numberToFail) { + final List requestsToFail = new ArrayList<>(numberToFail); + return (exchange, delegate) -> { + final Headers requestHeaders = exchange.getRequestHeaders(); + final String requestId = requestHeaders.get("X-ms-client-request-id").get(0); + boolean failRequest = false; + synchronized (requestsToFail) { + if (requestsToFail.contains(requestId)) { + failRequest = true; + } else if (requestsToFail.size() < numberToFail) { + requestsToFail.add(requestId); + failRequest = true; + } + } + if (failRequest) { + exchange.sendResponseHeaders(500, -1); + } else { + delegate.handle(exchange); + } + }; + } + private void clearMetrics(String discoveryNode) { internalCluster().getInstance(PluginsService.class, discoveryNode) .filterPlugins(TestTelemetryPlugin.class) diff --git a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java index 473d91da6e34c..bd21f208faac4 100644 --- a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java +++ b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java @@ -89,7 +89,9 @@ protected Settings repositorySettings(String repoName) { .put(super.repositorySettings(repoName)) .put(AzureRepository.Repository.MAX_SINGLE_PART_UPLOAD_SIZE_SETTING.getKey(), new ByteSizeValue(1, ByteSizeUnit.MB)) .put(AzureRepository.Repository.CONTAINER_SETTING.getKey(), "container") - .put(AzureStorageSettings.ACCOUNT_SETTING.getKey(), "test"); + .put(AzureStorageSettings.ACCOUNT_SETTING.getKey(), "test") + .put(AzureRepository.Repository.DELETION_BATCH_SIZE_SETTING.getKey(), randomIntBetween(5, 256)) + .put(AzureRepository.Repository.MAX_CONCURRENT_BATCH_DELETES_SETTING.getKey(), randomIntBetween(1, 10)); if (randomBoolean()) { settingsBuilder.put(AzureRepository.Repository.BASE_PATH_SETTING.getKey(), randomFrom("test", "test/1")); } @@ -249,6 +251,8 @@ protected void maybeTrack(String request, Headers headers) { trackRequest("PutBlockList"); } else if (Regex.simpleMatch("PUT /*/*", request)) { trackRequest("PutBlob"); + } else if (Regex.simpleMatch("POST /*/*?*comp=batch*", request)) { + trackRequest("BlobBatch"); } } @@ -279,10 +283,22 @@ public void testLargeBlobCountDeletion() throws Exception { } public void testDeleteBlobsIgnoringIfNotExists() throws Exception { - try (BlobStore store = newBlobStore()) { + // Test with a smaller batch size here + final int deleteBatchSize = randomIntBetween(1, 30); + final String repositoryName = randomRepositoryName(); + createRepository( + repositoryName, + Settings.builder() + .put(repositorySettings(repositoryName)) + .put(AzureRepository.Repository.DELETION_BATCH_SIZE_SETTING.getKey(), deleteBatchSize) + .build(), + true + ); + try (BlobStore store = newBlobStore(repositoryName)) { final BlobContainer container = store.blobContainer(BlobPath.EMPTY); - List blobsToDelete = new ArrayList<>(); - for (int i = 0; i < 10; i++) { + final int toDeleteCount = randomIntBetween(deleteBatchSize, 3 * deleteBatchSize); + final List blobsToDelete = new ArrayList<>(); + for (int i = 0; i < toDeleteCount; i++) { byte[] bytes = randomBytes(randomInt(100)); String blobName = randomAlphaOfLength(10); container.writeBlob(randomPurpose(), blobName, new BytesArray(bytes), false); diff --git a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureStorageCleanupThirdPartyTests.java b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureStorageCleanupThirdPartyTests.java index abd4f506a0bb3..6d5c17c392141 100644 --- a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureStorageCleanupThirdPartyTests.java +++ b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureStorageCleanupThirdPartyTests.java @@ -30,6 +30,8 @@ import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.core.Booleans; +import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.repositories.AbstractThirdPartyRepositoryTestCase; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; @@ -46,6 +48,7 @@ import static org.hamcrest.Matchers.not; public class AzureStorageCleanupThirdPartyTests extends AbstractThirdPartyRepositoryTestCase { + private static final Logger logger = LogManager.getLogger(AzureStorageCleanupThirdPartyTests.class); private static final boolean USE_FIXTURE = Booleans.parseBoolean(System.getProperty("test.azure.fixture", "true")); private static final String AZURE_ACCOUNT = System.getProperty("test.azure.account"); @@ -89,8 +92,10 @@ protected SecureSettings credentials() { MockSecureSettings secureSettings = new MockSecureSettings(); secureSettings.setString("azure.client.default.account", System.getProperty("test.azure.account")); if (hasSasToken) { + logger.info("--> Using SAS token authentication"); secureSettings.setString("azure.client.default.sas_token", System.getProperty("test.azure.sas_token")); } else { + logger.info("--> Using key authentication"); secureSettings.setString("azure.client.default.key", System.getProperty("test.azure.key")); } return secureSettings; diff --git a/modules/repository-azure/src/main/java/module-info.java b/modules/repository-azure/src/main/java/module-info.java index cd6be56b71543..731f1e0a9986a 100644 --- a/modules/repository-azure/src/main/java/module-info.java +++ b/modules/repository-azure/src/main/java/module-info.java @@ -18,10 +18,7 @@ requires org.apache.logging.log4j; requires org.apache.logging.log4j.core; - requires com.azure.core; requires com.azure.http.netty; - requires com.azure.storage.blob; - requires com.azure.storage.common; requires com.azure.identity; requires io.netty.buffer; @@ -29,7 +26,7 @@ requires io.netty.resolver; requires io.netty.common; - requires reactor.core; requires reactor.netty.core; requires reactor.netty.http; + requires com.azure.storage.blob.batch; } diff --git a/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobContainer.java b/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobContainer.java index a3f26424324fa..52bc1ee1399d4 100644 --- a/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobContainer.java +++ b/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobContainer.java @@ -138,7 +138,7 @@ public void writeMetadataBlob( } @Override - public DeleteResult delete(OperationPurpose purpose) { + public DeleteResult delete(OperationPurpose purpose) throws IOException { return blobStore.deleteBlobDirectory(purpose, keyPath); } diff --git a/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobStore.java b/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobStore.java index 829868797e38c..3c64bb9f3b830 100644 --- a/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobStore.java +++ b/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobStore.java @@ -25,6 +25,10 @@ import com.azure.storage.blob.BlobContainerClient; import com.azure.storage.blob.BlobServiceAsyncClient; import com.azure.storage.blob.BlobServiceClient; +import com.azure.storage.blob.batch.BlobBatch; +import com.azure.storage.blob.batch.BlobBatchAsyncClient; +import com.azure.storage.blob.batch.BlobBatchClientBuilder; +import com.azure.storage.blob.batch.BlobBatchStorageException; import com.azure.storage.blob.models.BlobErrorCode; import com.azure.storage.blob.models.BlobItem; import com.azure.storage.blob.models.BlobItemProperties; @@ -99,6 +103,8 @@ public class AzureBlobStore implements BlobStore { private static final Logger logger = LogManager.getLogger(AzureBlobStore.class); + // See https://learn.microsoft.com/en-us/rest/api/storageservices/blob-batch#request-body + public static final int MAX_ELEMENTS_PER_BATCH = 256; private static final long DEFAULT_READ_CHUNK_SIZE = new ByteSizeValue(32, ByteSizeUnit.MB).getBytes(); private static final int DEFAULT_UPLOAD_BUFFERS_SIZE = (int) new ByteSizeValue(64, ByteSizeUnit.KB).getBytes(); @@ -110,6 +116,8 @@ public class AzureBlobStore implements BlobStore { private final String container; private final LocationMode locationMode; private final ByteSizeValue maxSinglePartUploadSize; + private final int deletionBatchSize; + private final int maxConcurrentBatchDeletes; private final RequestMetricsRecorder requestMetricsRecorder; private final AzureClientProvider.RequestMetricsHandler requestMetricsHandler; @@ -129,6 +137,8 @@ public AzureBlobStore( // locationMode is set per repository, not per client this.locationMode = Repository.LOCATION_MODE_SETTING.get(metadata.settings()); this.maxSinglePartUploadSize = Repository.MAX_SINGLE_PART_UPLOAD_SIZE_SETTING.get(metadata.settings()); + this.deletionBatchSize = Repository.DELETION_BATCH_SIZE_SETTING.get(metadata.settings()); + this.maxConcurrentBatchDeletes = Repository.MAX_CONCURRENT_BATCH_DELETES_SETTING.get(metadata.settings()); List requestMatchers = List.of( new RequestMatcher((httpMethod, url) -> httpMethod == HttpMethod.HEAD, Operation.GET_BLOB_PROPERTIES), @@ -147,17 +157,14 @@ public AzureBlobStore( && isPutBlockRequest(httpMethod, url) == false && isPutBlockListRequest(httpMethod, url) == false, Operation.PUT_BLOB - ) + ), + new RequestMatcher(AzureBlobStore::isBlobBatch, Operation.BLOB_BATCH) ); this.requestMetricsHandler = (purpose, method, url, metrics) -> { try { URI uri = url.toURI(); String path = uri.getPath() == null ? "" : uri.getPath(); - // Batch delete requests - if (path.contains(container) == false) { - return; - } assert path.contains(container) : uri.toString(); } catch (URISyntaxException ignored) { return; @@ -172,6 +179,10 @@ && isPutBlockListRequest(httpMethod, url) == false, }; } + private static boolean isBlobBatch(HttpMethod method, URL url) { + return method == HttpMethod.POST && url.getQuery() != null && url.getQuery().contains("comp=batch"); + } + private static boolean isListRequest(HttpMethod httpMethod, URL url) { return httpMethod == HttpMethod.GET && url.getQuery() != null && url.getQuery().contains("comp=list"); } @@ -231,95 +242,101 @@ public boolean blobExists(OperationPurpose purpose, String blob) throws IOExcept } } - // number of concurrent blob delete requests to use while bulk deleting - private static final int CONCURRENT_DELETES = 100; - - public DeleteResult deleteBlobDirectory(OperationPurpose purpose, String path) { + public DeleteResult deleteBlobDirectory(OperationPurpose purpose, String path) throws IOException { final AtomicInteger blobsDeleted = new AtomicInteger(0); final AtomicLong bytesDeleted = new AtomicLong(0); SocketAccess.doPrivilegedVoidException(() -> { - final BlobContainerAsyncClient blobContainerAsyncClient = asyncClient(purpose).getBlobContainerAsyncClient(container); + final AzureBlobServiceClient client = getAzureBlobServiceClientClient(purpose); + final BlobContainerAsyncClient blobContainerAsyncClient = client.getAsyncClient().getBlobContainerAsyncClient(container); final ListBlobsOptions options = new ListBlobsOptions().setPrefix(path) .setDetails(new BlobListDetails().setRetrieveMetadata(true)); - try { - blobContainerAsyncClient.listBlobs(options, null).flatMap(blobItem -> { - if (blobItem.isPrefix() != null && blobItem.isPrefix()) { - return Mono.empty(); - } else { - final String blobName = blobItem.getName(); - BlobAsyncClient blobAsyncClient = blobContainerAsyncClient.getBlobAsyncClient(blobName); - final Mono deleteTask = getDeleteTask(blobName, blobAsyncClient); - bytesDeleted.addAndGet(blobItem.getProperties().getContentLength()); - blobsDeleted.incrementAndGet(); - return deleteTask; - } - }, CONCURRENT_DELETES).then().block(); - } catch (Exception e) { - filterDeleteExceptionsAndRethrow(e, new IOException("Deleting directory [" + path + "] failed")); - } + final Flux blobsFlux = blobContainerAsyncClient.listBlobs(options).filter(bi -> bi.isPrefix() == false).map(bi -> { + bytesDeleted.addAndGet(bi.getProperties().getContentLength()); + blobsDeleted.incrementAndGet(); + return bi.getName(); + }); + deleteListOfBlobs(client, blobsFlux); }); return new DeleteResult(blobsDeleted.get(), bytesDeleted.get()); } - private static void filterDeleteExceptionsAndRethrow(Exception e, IOException exception) throws IOException { - int suppressedCount = 0; - for (Throwable suppressed : e.getSuppressed()) { - // We're only interested about the blob deletion exceptions and not in the reactor internals exceptions - if (suppressed instanceof IOException) { - exception.addSuppressed(suppressed); - suppressedCount++; - if (suppressedCount > 10) { - break; - } - } + @Override + public void deleteBlobsIgnoringIfNotExists(OperationPurpose purpose, Iterator blobNames) throws IOException { + if (blobNames.hasNext() == false) { + return; + } + SocketAccess.doPrivilegedVoidException( + () -> deleteListOfBlobs( + getAzureBlobServiceClientClient(purpose), + Flux.fromStream(StreamSupport.stream(Spliterators.spliteratorUnknownSize(blobNames, Spliterator.ORDERED), false)) + ) + ); + } + + private void deleteListOfBlobs(AzureBlobServiceClient azureBlobServiceClient, Flux blobNames) throws IOException { + // We need to use a container-scoped BlobBatchClient, so the restype=container parameter + // is sent, and we can support all SAS token types + // See https://learn.microsoft.com/en-us/rest/api/storageservices/blob-batch?tabs=shared-access-signatures#authorization + final BlobBatchAsyncClient batchAsyncClient = new BlobBatchClientBuilder( + azureBlobServiceClient.getAsyncClient().getBlobContainerAsyncClient(container) + ).buildAsyncClient(); + final List errors; + final AtomicInteger errorsCollected = new AtomicInteger(0); + try { + errors = blobNames.buffer(deletionBatchSize).flatMap(blobs -> { + final BlobBatch blobBatch = batchAsyncClient.getBlobBatch(); + blobs.forEach(blob -> blobBatch.deleteBlob(container, blob)); + return batchAsyncClient.submitBatch(blobBatch).then(Mono.empty()).onErrorResume(t -> { + // Ignore errors that are just 404s, send other errors downstream as values + if (AzureBlobStore.isIgnorableBatchDeleteException(t)) { + return Mono.empty(); + } else { + // Propagate the first 10 errors only + if (errorsCollected.getAndIncrement() < 10) { + return Mono.just(t); + } else { + return Mono.empty(); + } + } + }); + }, maxConcurrentBatchDeletes).collectList().block(); + } catch (Exception e) { + throw new IOException("Error deleting batches", e); + } + if (errors.isEmpty() == false) { + final int totalErrorCount = errorsCollected.get(); + final String errorMessage = totalErrorCount > errors.size() + ? "Some errors occurred deleting batches, the first " + + errors.size() + + " are included as suppressed, but the total count was " + + totalErrorCount + : "Some errors occurred deleting batches, all errors included as suppressed"; + final IOException ex = new IOException(errorMessage); + errors.forEach(ex::addSuppressed); + throw ex; } - throw exception; } /** - * {@inheritDoc} - *

      - * Note that in this Azure implementation we issue a series of individual - * delete blob calls rather than aggregating - * deletions into blob batch calls. - * The reason for this is that the blob batch endpoint has limited support for SAS token authentication. + * We can ignore {@link BlobBatchStorageException}s when they are just telling us some of the files were not found * - * @see - * API docs around SAS auth limitations - * @see Java SDK issue - * @see Discussion on implementing PR + * @param exception An exception throw by batch delete + * @return true if it is safe to ignore, false otherwise */ - @Override - public void deleteBlobsIgnoringIfNotExists(OperationPurpose purpose, Iterator blobs) { - if (blobs.hasNext() == false) { - return; - } - - BlobServiceAsyncClient asyncClient = asyncClient(purpose); - SocketAccess.doPrivilegedVoidException(() -> { - final BlobContainerAsyncClient blobContainerClient = asyncClient.getBlobContainerAsyncClient(container); - try { - Flux.fromStream(StreamSupport.stream(Spliterators.spliteratorUnknownSize(blobs, Spliterator.ORDERED), false)) - .flatMap(blob -> getDeleteTask(blob, blobContainerClient.getBlobAsyncClient(blob)), CONCURRENT_DELETES) - .then() - .block(); - } catch (Exception e) { - filterDeleteExceptionsAndRethrow(e, new IOException("Unable to delete blobs")); + private static boolean isIgnorableBatchDeleteException(Throwable exception) { + if (exception instanceof BlobBatchStorageException bbse) { + final Iterable batchExceptions = bbse.getBatchExceptions(); + for (BlobStorageException bse : batchExceptions) { + // If any requests failed with something other than a BLOB_NOT_FOUND, it is not ignorable + if (BlobErrorCode.BLOB_NOT_FOUND.equals(bse.getErrorCode()) == false) { + return false; + } } - }); - } - - private static Mono getDeleteTask(String blobName, BlobAsyncClient blobAsyncClient) { - return blobAsyncClient.delete() - // Ignore not found blobs, as it's possible that due to network errors a request - // for an already deleted blob is retried, causing an error. - .onErrorResume( - e -> e instanceof BlobStorageException blobStorageException && blobStorageException.getStatusCode() == 404, - throwable -> Mono.empty() - ) - .onErrorMap(throwable -> new IOException("Error deleting blob " + blobName, throwable)); + return true; + } + return false; } public InputStream getInputStream(OperationPurpose purpose, String blob, long position, final @Nullable Long length) { @@ -363,8 +380,7 @@ public Map listBlobsByPrefix(OperationPurpose purpose, Str for (final BlobItem blobItem : containerClient.listBlobsByHierarchy("/", listBlobsOptions, null)) { BlobItemProperties properties = blobItem.getProperties(); - Boolean isPrefix = blobItem.isPrefix(); - if (isPrefix != null && isPrefix) { + if (blobItem.isPrefix()) { continue; } String blobName = blobItem.getName().substring(keyPath.length()); @@ -689,7 +705,8 @@ enum Operation { GET_BLOB_PROPERTIES("GetBlobProperties"), PUT_BLOB("PutBlob"), PUT_BLOCK("PutBlock"), - PUT_BLOCK_LIST("PutBlockList"); + PUT_BLOCK_LIST("PutBlockList"), + BLOB_BATCH("BlobBatch"); private final String key; diff --git a/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureClientProvider.java b/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureClientProvider.java index 654742c980268..f92bbcbdd716d 100644 --- a/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureClientProvider.java +++ b/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureClientProvider.java @@ -317,6 +317,11 @@ private enum RetryMetricsTracker implements HttpPipelinePolicy { @Override public Mono process(HttpPipelineCallContext context, HttpPipelineNextPolicy next) { + if (requestIsPartOfABatch(context)) { + // Batch deletes fire once for each of the constituent requests, and they have a null response. Ignore those, we'll track + // metrics at the bulk level. + return next.process(); + } Optional metricsData = context.getData(RequestMetricsTracker.ES_REQUEST_METRICS_CONTEXT_KEY); if (metricsData.isPresent() == false) { assert false : "No metrics object associated with request " + context.getHttpRequest(); @@ -361,6 +366,11 @@ private RequestMetricsTracker(OperationPurpose purpose, RequestMetricsHandler re @Override public Mono process(HttpPipelineCallContext context, HttpPipelineNextPolicy next) { + if (requestIsPartOfABatch(context)) { + // Batch deletes fire once for each of the constituent requests, and they have a null response. Ignore those, we'll track + // metrics at the bulk level. + return next.process(); + } final RequestMetrics requestMetrics = new RequestMetrics(); context.setData(ES_REQUEST_METRICS_CONTEXT_KEY, requestMetrics); return next.process().doOnSuccess((httpResponse) -> { @@ -389,6 +399,10 @@ public HttpPipelinePosition getPipelinePosition() { } } + private static boolean requestIsPartOfABatch(HttpPipelineCallContext context) { + return context.getData("Batch-Operation-Info").isPresent(); + } + /** * The {@link RequestMetricsTracker} calls this when a request completes */ diff --git a/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java b/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java index 80e662343baee..316db4844e598 100644 --- a/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java +++ b/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java @@ -87,6 +87,21 @@ public static final class Repository { DEFAULT_MAX_SINGLE_UPLOAD_SIZE, Property.NodeScope ); + + /** + * The batch size for batched delete requests + */ + static final Setting DELETION_BATCH_SIZE_SETTING = Setting.intSetting( + "delete_objects_max_size", + AzureBlobStore.MAX_ELEMENTS_PER_BATCH, + 1, + AzureBlobStore.MAX_ELEMENTS_PER_BATCH + ); + + /** + * The maximum number of concurrent batch deletes + */ + static final Setting MAX_CONCURRENT_BATCH_DELETES_SETTING = Setting.intSetting("max_concurrent_batch_deletes", 10, 1, 100); } private final ByteSizeValue chunkSize; diff --git a/modules/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobContainerStatsTests.java b/modules/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobContainerStatsTests.java index 1ed01bbadc07e..6730e5c3c81bd 100644 --- a/modules/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobContainerStatsTests.java +++ b/modules/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobContainerStatsTests.java @@ -18,6 +18,7 @@ import java.io.IOException; import java.nio.ByteBuffer; +import java.util.List; import java.util.Map; public class AzureBlobContainerStatsTests extends AbstractAzureServerTestCase { @@ -47,6 +48,8 @@ public void testOperationPurposeIsReflectedInBlobStoreStats() throws IOException os.write(blobContent); os.flush(); }); + // BLOB_BATCH + blobStore.deleteBlobsIgnoringIfNotExists(purpose, List.of(randomIdentifier(), randomIdentifier(), randomIdentifier()).iterator()); Map stats = blobStore.stats(); String statsMapString = stats.toString(); @@ -55,6 +58,7 @@ public void testOperationPurposeIsReflectedInBlobStoreStats() throws IOException assertEquals(statsMapString, Long.valueOf(1L), stats.get(statsKey(purpose, AzureBlobStore.Operation.GET_BLOB_PROPERTIES))); assertEquals(statsMapString, Long.valueOf(1L), stats.get(statsKey(purpose, AzureBlobStore.Operation.PUT_BLOCK))); assertEquals(statsMapString, Long.valueOf(1L), stats.get(statsKey(purpose, AzureBlobStore.Operation.PUT_BLOCK_LIST))); + assertEquals(statsMapString, Long.valueOf(1L), stats.get(statsKey(purpose, AzureBlobStore.Operation.BLOB_BATCH))); } public void testOperationPurposeIsNotReflectedInBlobStoreStatsWhenNotServerless() throws IOException { @@ -79,6 +83,11 @@ public void testOperationPurposeIsNotReflectedInBlobStoreStatsWhenNotServerless( os.write(blobContent); os.flush(); }); + // BLOB_BATCH + blobStore.deleteBlobsIgnoringIfNotExists( + purpose, + List.of(randomIdentifier(), randomIdentifier(), randomIdentifier()).iterator() + ); } Map stats = blobStore.stats(); @@ -88,6 +97,7 @@ public void testOperationPurposeIsNotReflectedInBlobStoreStatsWhenNotServerless( assertEquals(statsMapString, Long.valueOf(repeatTimes), stats.get(AzureBlobStore.Operation.GET_BLOB_PROPERTIES.getKey())); assertEquals(statsMapString, Long.valueOf(repeatTimes), stats.get(AzureBlobStore.Operation.PUT_BLOCK.getKey())); assertEquals(statsMapString, Long.valueOf(repeatTimes), stats.get(AzureBlobStore.Operation.PUT_BLOCK_LIST.getKey())); + assertEquals(statsMapString, Long.valueOf(repeatTimes), stats.get(AzureBlobStore.Operation.BLOB_BATCH.getKey())); } private static String statsKey(OperationPurpose purpose, AzureBlobStore.Operation operation) { diff --git a/test/fixtures/azure-fixture/src/main/java/fixture/azure/AzureHttpHandler.java b/test/fixtures/azure-fixture/src/main/java/fixture/azure/AzureHttpHandler.java index d8716fd987f3e..92ce04b6bea5b 100644 --- a/test/fixtures/azure-fixture/src/main/java/fixture/azure/AzureHttpHandler.java +++ b/test/fixtures/azure-fixture/src/main/java/fixture/azure/AzureHttpHandler.java @@ -12,6 +12,9 @@ import com.sun.net.httpserver.HttpExchange; import com.sun.net.httpserver.HttpHandler; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.Streams; @@ -23,10 +26,14 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; +import java.io.BufferedReader; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStreamReader; import java.nio.charset.StandardCharsets; +import java.time.ZoneId; +import java.time.ZonedDateTime; +import java.time.format.DateTimeFormatter; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; @@ -35,6 +42,7 @@ import java.util.Map; import java.util.Objects; import java.util.Set; +import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import java.util.function.Predicate; import java.util.regex.Matcher; @@ -47,6 +55,8 @@ */ @SuppressForbidden(reason = "Uses a HttpServer to emulate an Azure endpoint") public class AzureHttpHandler implements HttpHandler { + private static final Logger logger = LogManager.getLogger(AzureHttpHandler.class); + private final Map blobs; private final String account; private final String container; @@ -264,7 +274,98 @@ public void handle(final HttpExchange exchange) throws IOException { exchange.sendResponseHeaders(RestStatus.OK.getStatus(), response.length); exchange.getResponseBody().write(response); + } else if (Regex.simpleMatch("POST /" + account + "/" + container + "*restype=container*comp=batch*", request)) { + // Blob Batch (https://learn.microsoft.com/en-us/rest/api/storageservices/blob-batch) + final StringBuilder response = new StringBuilder(); + + try (BufferedReader requestReader = new BufferedReader(new InputStreamReader(exchange.getRequestBody()))) { + final String batchBoundary = requestReader.readLine(); + final String responseBoundary = "batch_" + UUID.randomUUID(); + + String line; + String contentId = null, requestId = null, toDelete = null; + while ((line = requestReader.readLine()) != null) { + if (batchBoundary.equals(line) || (batchBoundary + "--").equals(line)) { + // Found the end of a single request, process it + if (contentId == null || requestId == null || toDelete == null) { + throw new IllegalStateException( + "Missing contentId/requestId/toDelete: " + contentId + "/" + requestId + "/" + toDelete + ); + } + + // Process the deletion + if (blobs.remove("/" + account + toDelete) != null) { + final String acceptedPart = Strings.format(""" + --%s + Content-Type: application/http + Content-ID: %s + + HTTP/1.1 202 Accepted + x-ms-delete-type-permanent: true + x-ms-request-id: %s + x-ms-version: 2018-11-09 + + """, responseBoundary, contentId, requestId).replaceAll("\n", "\r\n"); + response.append(acceptedPart); + } else { + final String notFoundBody = Strings.format( + """ + + BlobNotFoundThe specified blob does not exist. + RequestId:%s + Time:%s""", + requestId, + DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(ZonedDateTime.now(ZoneId.of("UTC"))) + ); + final String notFoundPart = Strings.format(""" + --%s + Content-Type: application/http + Content-ID: %s + + HTTP/1.1 404 The specified blob does not exist. + x-ms-error-code: BlobNotFound + x-ms-request-id: %s + x-ms-version: 2018-11-09 + Content-Length: %d + Content-Type: application/xml + + %s + """, responseBoundary, contentId, requestId, notFoundBody.length(), notFoundBody) + .replaceAll("\n", "\r\n"); + response.append(notFoundPart); + } + + // Clear the state + toDelete = null; + contentId = null; + requestId = null; + } else if (Regex.simpleMatch("x-ms-client-request-id: *", line)) { + if (requestId != null) { + throw new IllegalStateException("Got multiple request IDs in a single request?"); + } + requestId = line.split("\\s")[1]; + } else if (Regex.simpleMatch("Content-ID: *", line)) { + if (contentId != null) { + throw new IllegalStateException("Got multiple content IDs in a single request?"); + } + contentId = line.split("\\s")[1]; + } else if (Regex.simpleMatch("DELETE /" + container + "/*", line)) { + String blobName = RestUtils.decodeComponent(line.split("(\\s|\\?)")[1]); + if (toDelete != null) { + throw new IllegalStateException("Got multiple deletes in a single request?"); + } + toDelete = blobName; + } + } + response.append("--").append(responseBoundary).append("--\r\n0\r\n"); + // Send the response + exchange.getResponseHeaders().add("Content-Type", "multipart/mixed; boundary=" + responseBoundary); + exchange.sendResponseHeaders(RestStatus.ACCEPTED.getStatus(), response.length()); + logger.debug("--> Sending response:\n{}", response); + exchange.getResponseBody().write(response.toString().getBytes(StandardCharsets.UTF_8)); + } } else { + logger.warn("--> Unrecognised request received: {}", request); sendError(exchange, RestStatus.BAD_REQUEST); } } finally { From e0a458441cff9a4242cd93f4c02f06d72f2d63c4 Mon Sep 17 00:00:00 2001 From: Mary Gouseti Date: Thu, 24 Oct 2024 11:55:54 +0300 Subject: [PATCH 353/449] [Failure store - selector syntax] Introduce the `::*` selector (#115389) **Introduction** > In order to make adoption of failure stores simpler for all users, we are introducing a new syntactical feature to index expression resolution: The selector. > > Selectors, denoted with a :: followed by a recognized suffix will allow users to specify which component of an index abstraction they would like to operate on within an API call. In this case, an index abstraction is a concrete index, data stream, or alias; Any abstraction that can be resolved to a set of indices/shards. We define a component of an index abstraction to be some searchable unit of the index abstraction. > > To start, we will support two components: data and failures. Concrete indices are their own data components, while the data component for index aliases are all of the indices contained therein. For data streams, the data component corresponds to their backing indices. Data stream aliases mirror this, treating all backing indices of the data streams they correspond to as their data component. > > The failure component is only supported by data streams and data stream aliases. The failure component of these abstractions refer to the data streams' failure stores. Indices and index aliases do not have a failure component. For more details and examples see https://github.com/elastic/elasticsearch/pull/113144. All this work has been cherry picked from there. **Purpose of this PR** This PR is introducing the `::*` as another selector option and not as a combination of `::data` and `::failure`. The reason for this change is that we need to differentiate between: - `my-index::*` which should resolve to `my-index::data` only and not to `my-index::failures` and - a user explicitly requesting `my-index::data, my-index::failures` which should result potentially to an error. --- .../datastreams/DataStreamsSnapshotsIT.java | 2 +- .../IngestFailureStoreMetricsIT.java | 2 +- .../lifecycle/DataStreamLifecycleService.java | 6 +- .../DataStreamLifecycleServiceTests.java | 8 +- .../org/elasticsearch/TransportVersions.java | 1 + .../admin/indices/get/GetIndexRequest.java | 2 +- .../indices/rollover/RolloverRequest.java | 5 +- .../action/bulk/BulkOperation.java | 2 +- .../action/bulk/TransportBulkAction.java | 2 +- .../datastreams/DataStreamsStatsAction.java | 2 +- .../support/IndexComponentSelector.java | 73 ++++++++-- .../action/support/IndicesOptions.java | 133 ++++++++---------- .../indices/RestRolloverIndexAction.java | 2 +- .../indices/get/GetIndexRequestTests.java | 2 +- .../MetadataRolloverServiceTests.java | 4 +- .../rollover/RolloverRequestTests.java | 14 +- .../support/IndexComponentSelectorTests.java | 41 ++++++ .../action/support/IndicesOptionsTests.java | 18 +-- .../IndexNameExpressionResolverTests.java | 16 +-- .../xpack/core/ilm/RolloverStep.java | 4 +- .../core/ilm/WaitForRolloverReadyStep.java | 4 +- 21 files changed, 197 insertions(+), 146 deletions(-) create mode 100644 server/src/test/java/org/elasticsearch/action/support/IndexComponentSelectorTests.java diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamsSnapshotsIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamsSnapshotsIT.java index 212b869c6d933..286ad68896797 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamsSnapshotsIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamsSnapshotsIT.java @@ -138,7 +138,7 @@ public void setup() throws Exception { // Initialize the failure store. RolloverRequest rolloverRequest = new RolloverRequest("with-fs", null); rolloverRequest.setIndicesOptions( - IndicesOptions.builder(rolloverRequest.indicesOptions()).selectorOptions(IndicesOptions.SelectorOptions.ONLY_FAILURES).build() + IndicesOptions.builder(rolloverRequest.indicesOptions()).selectorOptions(IndicesOptions.SelectorOptions.FAILURES).build() ); response = client.execute(RolloverAction.INSTANCE, rolloverRequest).get(); assertTrue(response.isAcknowledged()); diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/IngestFailureStoreMetricsIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/IngestFailureStoreMetricsIT.java index 679ad5b000c8f..96def04069e24 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/IngestFailureStoreMetricsIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/IngestFailureStoreMetricsIT.java @@ -195,7 +195,7 @@ public void testRejectionFromFailureStore() throws IOException { // Initialize failure store. var rolloverRequest = new RolloverRequest(dataStream, null); rolloverRequest.setIndicesOptions( - IndicesOptions.builder(rolloverRequest.indicesOptions()).selectorOptions(IndicesOptions.SelectorOptions.ONLY_FAILURES).build() + IndicesOptions.builder(rolloverRequest.indicesOptions()).selectorOptions(IndicesOptions.SelectorOptions.FAILURES).build() ); var rolloverResponse = client().execute(RolloverAction.INSTANCE, rolloverRequest).actionGet(); var failureStoreIndex = rolloverResponse.getNewIndex(); diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleService.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleService.java index 7bbf7137d290e..7d2828e30d5ab 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleService.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleService.java @@ -946,7 +946,7 @@ private Set maybeExecuteForceMerge(ClusterState state, List indice UpdateSettingsRequest updateMergePolicySettingsRequest = new UpdateSettingsRequest(); updateMergePolicySettingsRequest.indicesOptions( IndicesOptions.builder(updateMergePolicySettingsRequest.indicesOptions()) - .selectorOptions(IndicesOptions.SelectorOptions.DATA_AND_FAILURE) + .selectorOptions(IndicesOptions.SelectorOptions.ALL_APPLICABLE) .build() ); updateMergePolicySettingsRequest.indices(indexName); @@ -1408,9 +1408,7 @@ static RolloverRequest getDefaultRolloverRequest( RolloverRequest rolloverRequest = new RolloverRequest(dataStream, null).masterNodeTimeout(TimeValue.MAX_VALUE); if (rolloverFailureStore) { rolloverRequest.setIndicesOptions( - IndicesOptions.builder(rolloverRequest.indicesOptions()) - .selectorOptions(IndicesOptions.SelectorOptions.ONLY_FAILURES) - .build() + IndicesOptions.builder(rolloverRequest.indicesOptions()).selectorOptions(IndicesOptions.SelectorOptions.FAILURES).build() ); } rolloverRequest.setConditions(rolloverConfiguration.resolveRolloverConditions(dataRetention)); diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceTests.java index d6bf80798764d..698ab427ab040 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceTests.java @@ -225,11 +225,11 @@ public void testOperationsExecutedOnce() { assertThat(clientSeenRequests.get(0), instanceOf(RolloverRequest.class)); RolloverRequest rolloverBackingIndexRequest = (RolloverRequest) clientSeenRequests.get(0); assertThat(rolloverBackingIndexRequest.getRolloverTarget(), is(dataStreamName)); - assertThat(rolloverBackingIndexRequest.indicesOptions().selectorOptions(), equalTo(IndicesOptions.SelectorOptions.ONLY_DATA)); + assertThat(rolloverBackingIndexRequest.indicesOptions().selectorOptions(), equalTo(IndicesOptions.SelectorOptions.DATA)); assertThat(clientSeenRequests.get(1), instanceOf(RolloverRequest.class)); RolloverRequest rolloverFailureIndexRequest = (RolloverRequest) clientSeenRequests.get(1); assertThat(rolloverFailureIndexRequest.getRolloverTarget(), is(dataStreamName)); - assertThat(rolloverFailureIndexRequest.indicesOptions().selectorOptions(), equalTo(IndicesOptions.SelectorOptions.ONLY_FAILURES)); + assertThat(rolloverFailureIndexRequest.indicesOptions().selectorOptions(), equalTo(IndicesOptions.SelectorOptions.FAILURES)); List deleteRequests = clientSeenRequests.subList(2, 5) .stream() .map(transportRequest -> (DeleteIndexRequest) transportRequest) @@ -1546,11 +1546,11 @@ public void testFailureStoreIsManagedEvenWhenDisabled() { assertThat(clientSeenRequests.get(0), instanceOf(RolloverRequest.class)); RolloverRequest rolloverBackingIndexRequest = (RolloverRequest) clientSeenRequests.get(0); assertThat(rolloverBackingIndexRequest.getRolloverTarget(), is(dataStreamName)); - assertThat(rolloverBackingIndexRequest.indicesOptions().selectorOptions(), equalTo(IndicesOptions.SelectorOptions.ONLY_DATA)); + assertThat(rolloverBackingIndexRequest.indicesOptions().selectorOptions(), equalTo(IndicesOptions.SelectorOptions.DATA)); assertThat(clientSeenRequests.get(1), instanceOf(RolloverRequest.class)); RolloverRequest rolloverFailureIndexRequest = (RolloverRequest) clientSeenRequests.get(1); assertThat(rolloverFailureIndexRequest.getRolloverTarget(), is(dataStreamName)); - assertThat(rolloverFailureIndexRequest.indicesOptions().selectorOptions(), equalTo(IndicesOptions.SelectorOptions.ONLY_FAILURES)); + assertThat(rolloverFailureIndexRequest.indicesOptions().selectorOptions(), equalTo(IndicesOptions.SelectorOptions.FAILURES)); assertThat( ((DeleteIndexRequest) clientSeenRequests.get(2)).indices()[0], is(dataStream.getFailureIndices().getIndices().get(0).getName()) diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 6d9bf2ac52f2d..777ff083f33f8 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -180,6 +180,7 @@ static TransportVersion def(int id) { public static final TransportVersion ESQL_FIELD_ATTRIBUTE_PARENT_SIMPLIFIED = def(8_775_00_0); public static final TransportVersion INFERENCE_DONT_PERSIST_ON_READ = def(8_776_00_0); public static final TransportVersion SIMULATE_MAPPING_ADDITION = def(8_777_00_0); + public static final TransportVersion INTRODUCE_ALL_APPLICABLE_SELECTOR = def(8_778_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexRequest.java index 4c5ee08beb192..801dbbdee0858 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexRequest.java @@ -98,7 +98,7 @@ public GetIndexRequest() { super( DataStream.isFailureStoreFeatureFlagEnabled() ? IndicesOptions.builder(IndicesOptions.strictExpandOpen()) - .selectorOptions(IndicesOptions.SelectorOptions.DATA_AND_FAILURE) + .selectorOptions(IndicesOptions.SelectorOptions.ALL_APPLICABLE) .build() : IndicesOptions.strictExpandOpen() ); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequest.java index 5a7f330be50c0..552ce727d4249 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequest.java @@ -13,6 +13,7 @@ import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.support.ActiveShardCount; +import org.elasticsearch.action.support.IndexComponentSelector; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.cluster.metadata.DataStream; @@ -124,8 +125,8 @@ public ActionRequestValidationException validate() { ); } - var selectors = indicesOptions.selectorOptions().defaultSelectors(); - if (selectors.size() > 1) { + var selector = indicesOptions.selectorOptions().defaultSelector(); + if (selector == IndexComponentSelector.ALL_APPLICABLE) { validationException = addValidationError( "rollover cannot be applied to both regular and failure indices at the same time", validationException diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java index 130d6286f7e02..ce3e189149451 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java @@ -212,7 +212,7 @@ private void rollOverFailureStores(Runnable runnable) { RolloverRequest rolloverRequest = new RolloverRequest(dataStream, null); rolloverRequest.setIndicesOptions( IndicesOptions.builder(rolloverRequest.indicesOptions()) - .selectorOptions(IndicesOptions.SelectorOptions.ONLY_FAILURES) + .selectorOptions(IndicesOptions.SelectorOptions.FAILURES) .build() ); // We are executing a lazy rollover because it is an action specialised for this situation, when we want an diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java index a3a73415ec4f6..cef68324e2a45 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java @@ -418,7 +418,7 @@ private void rollOverDataStreams( if (targetFailureStore) { rolloverRequest.setIndicesOptions( IndicesOptions.builder(rolloverRequest.indicesOptions()) - .selectorOptions(IndicesOptions.SelectorOptions.ONLY_FAILURES) + .selectorOptions(IndicesOptions.SelectorOptions.FAILURES) .build() ); } diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/DataStreamsStatsAction.java b/server/src/main/java/org/elasticsearch/action/datastreams/DataStreamsStatsAction.java index 1c30303915c8e..9266bae439b73 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/DataStreamsStatsAction.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/DataStreamsStatsAction.java @@ -61,7 +61,7 @@ public Request() { .allowFailureIndices(true) .build() ) - .selectorOptions(IndicesOptions.SelectorOptions.DATA_AND_FAILURE) + .selectorOptions(IndicesOptions.SelectorOptions.ALL_APPLICABLE) .build() ); } diff --git a/server/src/main/java/org/elasticsearch/action/support/IndexComponentSelector.java b/server/src/main/java/org/elasticsearch/action/support/IndexComponentSelector.java index 65b48db8f5cf3..910be151d1bf5 100644 --- a/server/src/main/java/org/elasticsearch/action/support/IndexComponentSelector.java +++ b/server/src/main/java/org/elasticsearch/action/support/IndexComponentSelector.java @@ -9,6 +9,12 @@ package org.elasticsearch.action.support; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.core.Nullable; + +import java.io.IOException; import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -17,33 +23,82 @@ * We define as index components the two different sets of indices a data stream could consist of: * - DATA: represents the backing indices * - FAILURES: represent the failing indices + * - ALL: represents all available in this expression components, meaning if it's a data stream both backing and failure indices and if it's + * an index only the index itself. * Note: An index is its own DATA component, but it cannot have a FAILURE component. */ -public enum IndexComponentSelector { - DATA("data"), - FAILURES("failures"); +public enum IndexComponentSelector implements Writeable { + DATA("data", (byte) 0), + FAILURES("failures", (byte) 1), + ALL_APPLICABLE("*", (byte) 2); private final String key; + private final byte id; - IndexComponentSelector(String key) { + IndexComponentSelector(String key, byte id) { this.key = key; + this.id = id; } public String getKey() { return key; } - private static final Map REGISTRY; + public byte getId() { + return id; + } + + private static final Map KEY_REGISTRY; + private static final Map ID_REGISTRY; static { - Map registry = new HashMap<>(IndexComponentSelector.values().length); + Map keyRegistry = new HashMap<>(IndexComponentSelector.values().length); + for (IndexComponentSelector value : IndexComponentSelector.values()) { + keyRegistry.put(value.getKey(), value); + } + KEY_REGISTRY = Collections.unmodifiableMap(keyRegistry); + Map idRegistry = new HashMap<>(IndexComponentSelector.values().length); for (IndexComponentSelector value : IndexComponentSelector.values()) { - registry.put(value.getKey(), value); + idRegistry.put(value.getId(), value); } - REGISTRY = Collections.unmodifiableMap(registry); + ID_REGISTRY = Collections.unmodifiableMap(idRegistry); } + /** + * Retrieves the respective selector when the suffix key is recognised + * @param key the suffix key, probably parsed from an expression + * @return the selector or null if the key was not recognised. + */ + @Nullable public static IndexComponentSelector getByKey(String key) { - return REGISTRY.get(key); + return KEY_REGISTRY.get(key); + } + + public static IndexComponentSelector read(StreamInput in) throws IOException { + return getById(in.readByte()); + } + + // Visible for testing + static IndexComponentSelector getById(byte id) { + IndexComponentSelector indexComponentSelector = ID_REGISTRY.get(id); + if (indexComponentSelector == null) { + throw new IllegalArgumentException( + "Unknown id of index component selector [" + id + "], available options are: " + ID_REGISTRY + ); + } + return indexComponentSelector; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeByte(id); + } + + public boolean shouldIncludeData() { + return this == ALL_APPLICABLE || this == DATA; + } + + public boolean shouldIncludeFailures() { + return this == ALL_APPLICABLE || this == FAILURES; } } diff --git a/server/src/main/java/org/elasticsearch/action/support/IndicesOptions.java b/server/src/main/java/org/elasticsearch/action/support/IndicesOptions.java index 22d019f80837d..85889d8398cb1 100644 --- a/server/src/main/java/org/elasticsearch/action/support/IndicesOptions.java +++ b/server/src/main/java/org/elasticsearch/action/support/IndicesOptions.java @@ -421,61 +421,45 @@ public static Builder builder(GatekeeperOptions gatekeeperOptions) { /** * Defines which selectors should be used by default for an index operation in the event that no selectors are provided. */ - public record SelectorOptions(EnumSet defaultSelectors) implements Writeable { + public record SelectorOptions(IndexComponentSelector defaultSelector) implements Writeable { - public static final SelectorOptions DATA_AND_FAILURE = new SelectorOptions( - EnumSet.of(IndexComponentSelector.DATA, IndexComponentSelector.FAILURES) - ); - public static final SelectorOptions ONLY_DATA = new SelectorOptions(EnumSet.of(IndexComponentSelector.DATA)); - public static final SelectorOptions ONLY_FAILURES = new SelectorOptions(EnumSet.of(IndexComponentSelector.FAILURES)); + public static final SelectorOptions ALL_APPLICABLE = new SelectorOptions(IndexComponentSelector.ALL_APPLICABLE); + public static final SelectorOptions DATA = new SelectorOptions(IndexComponentSelector.DATA); + public static final SelectorOptions FAILURES = new SelectorOptions(IndexComponentSelector.FAILURES); /** * Default instance. Uses
      ::data
      as the default selector if none are present in an index expression. */ - public static final SelectorOptions DEFAULT = ONLY_DATA; + public static final SelectorOptions DEFAULT = DATA; public static SelectorOptions read(StreamInput in) throws IOException { - return new SelectorOptions(in.readEnumSet(IndexComponentSelector.class)); + if (in.getTransportVersion().before(TransportVersions.INTRODUCE_ALL_APPLICABLE_SELECTOR)) { + EnumSet set = in.readEnumSet(IndexComponentSelector.class); + if (set.isEmpty() || set.size() == 2) { + assert set.contains(IndexComponentSelector.DATA) && set.contains(IndexComponentSelector.FAILURES) + : "The enum set only supported ::data and ::failures"; + return SelectorOptions.ALL_APPLICABLE; + } else if (set.contains(IndexComponentSelector.DATA)) { + return SelectorOptions.DATA; + } else { + return SelectorOptions.FAILURES; + } + } else { + return new SelectorOptions(IndexComponentSelector.read(in)); + } } @Override public void writeTo(StreamOutput out) throws IOException { - out.writeEnumSet(defaultSelectors); - } - - public static class Builder { - private EnumSet defaultSelectors; - - public Builder() { - this(DEFAULT); - } - - Builder(SelectorOptions options) { - defaultSelectors = EnumSet.copyOf(options.defaultSelectors); - } - - public Builder setDefaultSelectors(IndexComponentSelector first, IndexComponentSelector... remaining) { - defaultSelectors = EnumSet.of(first, remaining); - return this; - } - - public Builder setDefaultSelectors(EnumSet defaultSelectors) { - this.defaultSelectors = EnumSet.copyOf(defaultSelectors); - return this; - } - - public SelectorOptions build() { - assert defaultSelectors.isEmpty() != true : "Default selectors cannot be an empty set"; - return new SelectorOptions(EnumSet.copyOf(defaultSelectors)); + if (out.getTransportVersion().before(TransportVersions.INTRODUCE_ALL_APPLICABLE_SELECTOR)) { + switch (defaultSelector) { + case ALL_APPLICABLE -> out.writeEnumSet(EnumSet.of(IndexComponentSelector.DATA, IndexComponentSelector.FAILURES)); + case DATA -> out.writeEnumSet(EnumSet.of(IndexComponentSelector.DATA)); + case FAILURES -> out.writeEnumSet(EnumSet.of(IndexComponentSelector.FAILURES)); + } + } else { + defaultSelector.writeTo(out); } } - - public static Builder builder() { - return new Builder(); - } - - public static Builder builder(SelectorOptions selectorOptions) { - return new Builder(selectorOptions); - } } /** @@ -547,7 +531,7 @@ private enum Option { .allowFailureIndices(true) .ignoreThrottled(false) ) - .selectorOptions(SelectorOptions.ONLY_DATA) + .selectorOptions(SelectorOptions.DATA) .build(); public static final IndicesOptions STRICT_EXPAND_OPEN_FAILURE_STORE = IndicesOptions.builder() .concreteTargetOptions(ConcreteTargetOptions.ERROR_WHEN_UNAVAILABLE_TARGETS) @@ -566,7 +550,7 @@ private enum Option { .allowFailureIndices(true) .ignoreThrottled(false) ) - .selectorOptions(SelectorOptions.DATA_AND_FAILURE) + .selectorOptions(SelectorOptions.ALL_APPLICABLE) .build(); public static final IndicesOptions LENIENT_EXPAND_OPEN = IndicesOptions.builder() .concreteTargetOptions(ConcreteTargetOptions.ALLOW_UNAVAILABLE_TARGETS) @@ -585,7 +569,7 @@ private enum Option { .allowFailureIndices(true) .ignoreThrottled(false) ) - .selectorOptions(SelectorOptions.ONLY_DATA) + .selectorOptions(SelectorOptions.DATA) .build(); public static final IndicesOptions LENIENT_EXPAND_OPEN_NO_SELECTORS = IndicesOptions.builder() .concreteTargetOptions(ConcreteTargetOptions.ALLOW_UNAVAILABLE_TARGETS) @@ -622,7 +606,7 @@ private enum Option { .allowFailureIndices(true) .ignoreThrottled(false) ) - .selectorOptions(SelectorOptions.ONLY_DATA) + .selectorOptions(SelectorOptions.DATA) .build(); public static final IndicesOptions LENIENT_EXPAND_OPEN_CLOSED = IndicesOptions.builder() .concreteTargetOptions(ConcreteTargetOptions.ALLOW_UNAVAILABLE_TARGETS) @@ -641,7 +625,7 @@ private enum Option { .allowFailureIndices(true) .ignoreThrottled(false) ) - .selectorOptions(SelectorOptions.ONLY_DATA) + .selectorOptions(SelectorOptions.DATA) .build(); public static final IndicesOptions LENIENT_EXPAND_OPEN_CLOSED_HIDDEN = IndicesOptions.builder() .concreteTargetOptions(ConcreteTargetOptions.ALLOW_UNAVAILABLE_TARGETS) @@ -655,7 +639,7 @@ private enum Option { .allowFailureIndices(true) .ignoreThrottled(false) ) - .selectorOptions(SelectorOptions.ONLY_DATA) + .selectorOptions(SelectorOptions.DATA) .build(); public static final IndicesOptions LENIENT_EXPAND_OPEN_CLOSED_HIDDEN_NO_SELECTOR = IndicesOptions.builder() .concreteTargetOptions(ConcreteTargetOptions.ALLOW_UNAVAILABLE_TARGETS) @@ -687,7 +671,7 @@ private enum Option { .allowFailureIndices(true) .ignoreThrottled(false) ) - .selectorOptions(SelectorOptions.ONLY_DATA) + .selectorOptions(SelectorOptions.DATA) .build(); public static final IndicesOptions STRICT_EXPAND_OPEN_CLOSED_HIDDEN = IndicesOptions.builder() .concreteTargetOptions(ConcreteTargetOptions.ERROR_WHEN_UNAVAILABLE_TARGETS) @@ -701,7 +685,7 @@ private enum Option { .allowFailureIndices(true) .ignoreThrottled(false) ) - .selectorOptions(SelectorOptions.ONLY_DATA) + .selectorOptions(SelectorOptions.DATA) .build(); public static final IndicesOptions STRICT_EXPAND_OPEN_CLOSED_HIDDEN_NO_SELECTORS = IndicesOptions.builder() .concreteTargetOptions(ConcreteTargetOptions.ERROR_WHEN_UNAVAILABLE_TARGETS) @@ -733,7 +717,7 @@ private enum Option { .allowFailureIndices(true) .ignoreThrottled(false) ) - .selectorOptions(SelectorOptions.DATA_AND_FAILURE) + .selectorOptions(SelectorOptions.ALL_APPLICABLE) .build(); public static final IndicesOptions STRICT_EXPAND_OPEN_CLOSED_HIDDEN_FAILURE_STORE = IndicesOptions.builder() .concreteTargetOptions(ConcreteTargetOptions.ERROR_WHEN_UNAVAILABLE_TARGETS) @@ -747,7 +731,7 @@ private enum Option { .allowFailureIndices(true) .ignoreThrottled(false) ) - .selectorOptions(SelectorOptions.DATA_AND_FAILURE) + .selectorOptions(SelectorOptions.ALL_APPLICABLE) .build(); public static final IndicesOptions STRICT_EXPAND_OPEN_CLOSED_FAILURE_STORE = IndicesOptions.builder() .concreteTargetOptions(ConcreteTargetOptions.ERROR_WHEN_UNAVAILABLE_TARGETS) @@ -766,7 +750,7 @@ private enum Option { .allowFailureIndices(true) .ignoreThrottled(false) ) - .selectorOptions(SelectorOptions.DATA_AND_FAILURE) + .selectorOptions(SelectorOptions.ALL_APPLICABLE) .build(); public static final IndicesOptions STRICT_EXPAND_OPEN_FORBID_CLOSED = IndicesOptions.builder() .concreteTargetOptions(ConcreteTargetOptions.ERROR_WHEN_UNAVAILABLE_TARGETS) @@ -785,7 +769,7 @@ private enum Option { .allowFailureIndices(true) .ignoreThrottled(false) ) - .selectorOptions(SelectorOptions.ONLY_DATA) + .selectorOptions(SelectorOptions.DATA) .build(); public static final IndicesOptions STRICT_EXPAND_OPEN_HIDDEN_FORBID_CLOSED = IndicesOptions.builder() .concreteTargetOptions(ConcreteTargetOptions.ERROR_WHEN_UNAVAILABLE_TARGETS) @@ -804,7 +788,7 @@ private enum Option { .allowFailureIndices(true) .ignoreThrottled(false) ) - .selectorOptions(SelectorOptions.ONLY_DATA) + .selectorOptions(SelectorOptions.DATA) .build(); public static final IndicesOptions STRICT_EXPAND_OPEN_FORBID_CLOSED_IGNORE_THROTTLED = IndicesOptions.builder() .concreteTargetOptions(ConcreteTargetOptions.ERROR_WHEN_UNAVAILABLE_TARGETS) @@ -823,7 +807,7 @@ private enum Option { .allowFailureIndices(true) .allowAliasToMultipleIndices(true) ) - .selectorOptions(SelectorOptions.ONLY_DATA) + .selectorOptions(SelectorOptions.DATA) .build(); public static final IndicesOptions STRICT_SINGLE_INDEX_NO_EXPAND_FORBID_CLOSED = IndicesOptions.builder() .concreteTargetOptions(ConcreteTargetOptions.ERROR_WHEN_UNAVAILABLE_TARGETS) @@ -842,7 +826,7 @@ private enum Option { .allowFailureIndices(true) .ignoreThrottled(false) ) - .selectorOptions(SelectorOptions.ONLY_DATA) + .selectorOptions(SelectorOptions.DATA) .build(); public static final IndicesOptions STRICT_NO_EXPAND_FORBID_CLOSED = IndicesOptions.builder() .concreteTargetOptions(ConcreteTargetOptions.ERROR_WHEN_UNAVAILABLE_TARGETS) @@ -861,7 +845,7 @@ private enum Option { .allowFailureIndices(true) .ignoreThrottled(false) ) - .selectorOptions(SelectorOptions.ONLY_DATA) + .selectorOptions(SelectorOptions.DATA) .build(); /** @@ -919,7 +903,7 @@ public boolean forbidClosedIndices() { } /** - * @return Whether execution on closed indices is allowed. + * @return Whether execution on failure indices is allowed. */ public boolean allowFailureIndices() { return gatekeeperOptions.allowFailureIndices(); @@ -950,14 +934,14 @@ public boolean ignoreThrottled() { * @return whether regular indices (stand-alone or backing indices) will be included in the response */ public boolean includeRegularIndices() { - return selectorOptions().defaultSelectors().contains(IndexComponentSelector.DATA); + return selectorOptions().defaultSelector().shouldIncludeData(); } /** * @return whether failure indices (only supported by certain data streams) will be included in the response */ public boolean includeFailureIndices() { - return selectorOptions().defaultSelectors().contains(IndexComponentSelector.FAILURES); + return selectorOptions().defaultSelector().shouldIncludeFailures(); } public void writeIndicesOptions(StreamOutput out) throws IOException { @@ -1004,7 +988,7 @@ public void writeIndicesOptions(StreamOutput out) throws IOException { out.writeBoolean(includeFailureIndices()); } if (out.getTransportVersion().onOrAfter(TransportVersions.CONVERT_FAILURE_STORE_OPTIONS_TO_SELECTOR_OPTIONS_INTERNALLY)) { - out.writeEnumSet(selectorOptions.defaultSelectors); + selectorOptions.writeTo(out); } } @@ -1032,15 +1016,15 @@ public static IndicesOptions readIndicesOptions(StreamInput in) throws IOExcepti var includeData = in.readBoolean(); var includeFailures = in.readBoolean(); if (includeData && includeFailures) { - selectorOptions = SelectorOptions.DATA_AND_FAILURE; + selectorOptions = SelectorOptions.ALL_APPLICABLE; } else if (includeData) { - selectorOptions = SelectorOptions.ONLY_DATA; + selectorOptions = SelectorOptions.DATA; } else { - selectorOptions = SelectorOptions.ONLY_FAILURES; + selectorOptions = SelectorOptions.FAILURES; } } if (in.getTransportVersion().onOrAfter(TransportVersions.CONVERT_FAILURE_STORE_OPTIONS_TO_SELECTOR_OPTIONS_INTERNALLY)) { - selectorOptions = new SelectorOptions(in.readEnumSet(IndexComponentSelector.class)); + selectorOptions = SelectorOptions.read(in); } return new IndicesOptions( options.contains(Option.ALLOW_UNAVAILABLE_CONCRETE_TARGETS) @@ -1099,11 +1083,6 @@ public Builder selectorOptions(SelectorOptions selectorOptions) { return this; } - public Builder selectorOptions(SelectorOptions.Builder selectorOptions) { - this.selectorOptions = selectorOptions.build(); - return this; - } - public IndicesOptions build() { return new IndicesOptions(concreteTargetOptions, wildcardOptions, gatekeeperOptions, selectorOptions); } @@ -1322,9 +1301,9 @@ private static SelectorOptions parseFailureStoreParameters(Object failureStoreVa return defaultOptions; } return switch (failureStoreValue.toString()) { - case INCLUDE_ALL -> SelectorOptions.DATA_AND_FAILURE; - case INCLUDE_ONLY_REGULAR_INDICES -> SelectorOptions.ONLY_DATA; - case INCLUDE_ONLY_FAILURE_INDICES -> SelectorOptions.ONLY_FAILURES; + case INCLUDE_ALL -> SelectorOptions.ALL_APPLICABLE; + case INCLUDE_ONLY_REGULAR_INDICES -> SelectorOptions.DATA; + case INCLUDE_ONLY_FAILURE_INDICES -> SelectorOptions.FAILURES; default -> throw new IllegalArgumentException("No valid " + FAILURE_STORE_QUERY_PARAM + " value [" + failureStoreValue + "]"); }; } @@ -1336,9 +1315,9 @@ public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params par gatekeeperOptions.toXContent(builder, params); if (DataStream.isFailureStoreFeatureFlagEnabled()) { String displayValue; - if (SelectorOptions.DATA_AND_FAILURE.equals(selectorOptions())) { + if (SelectorOptions.ALL_APPLICABLE.equals(selectorOptions())) { displayValue = INCLUDE_ALL; - } else if (SelectorOptions.ONLY_DATA.equals(selectorOptions())) { + } else if (SelectorOptions.DATA.equals(selectorOptions())) { displayValue = INCLUDE_ONLY_REGULAR_INDICES; } else { displayValue = INCLUDE_ONLY_FAILURE_INDICES; diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRolloverIndexAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRolloverIndexAction.java index 942844dd1dd16..776302296b1a2 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRolloverIndexAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRolloverIndexAction.java @@ -69,7 +69,7 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC if (failureStore) { rolloverIndexRequest.setIndicesOptions( IndicesOptions.builder(rolloverIndexRequest.indicesOptions()) - .selectorOptions(IndicesOptions.SelectorOptions.ONLY_FAILURES) + .selectorOptions(IndicesOptions.SelectorOptions.FAILURES) .build() ); } diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/get/GetIndexRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/get/GetIndexRequestTests.java index a75b50e3a88f4..3bbc03a333438 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/get/GetIndexRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/get/GetIndexRequestTests.java @@ -82,6 +82,6 @@ public void testIndicesOptions() { ); assertThat(getIndexRequest.indicesOptions().wildcardOptions(), equalTo(IndicesOptions.strictExpandOpen().wildcardOptions())); assertThat(getIndexRequest.indicesOptions().gatekeeperOptions(), equalTo(IndicesOptions.strictExpandOpen().gatekeeperOptions())); - assertThat(getIndexRequest.indicesOptions().selectorOptions(), equalTo(IndicesOptions.SelectorOptions.DATA_AND_FAILURE)); + assertThat(getIndexRequest.indicesOptions().selectorOptions(), equalTo(IndicesOptions.SelectorOptions.ALL_APPLICABLE)); } } diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverServiceTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverServiceTests.java index b9fdb13958632..1a30fae1ebc00 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverServiceTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverServiceTests.java @@ -754,7 +754,7 @@ public void testValidation() throws Exception { .promoteDataStream(); rolloverTarget = dataStream.getName(); if (dataStream.isFailureStoreEnabled() && randomBoolean()) { - defaultSelectorOptions = IndicesOptions.SelectorOptions.ONLY_FAILURES; + defaultSelectorOptions = IndicesOptions.SelectorOptions.FAILURES; sourceIndexName = dataStream.getFailureStoreWriteIndex().getName(); defaultRolloverIndexName = DataStream.getDefaultFailureStoreName( dataStream.getName(), @@ -815,7 +815,7 @@ public void testValidation() throws Exception { true, null, null, - IndicesOptions.SelectorOptions.ONLY_FAILURES.equals(defaultSelectorOptions) + IndicesOptions.SelectorOptions.FAILURES.equals(defaultSelectorOptions) ); newIndexName = newIndexName == null ? defaultRolloverIndexName : newIndexName; diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java index 08e92c833dc85..f0190790ba001 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java @@ -11,7 +11,6 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; -import org.elasticsearch.action.support.IndexComponentSelector; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; @@ -34,9 +33,7 @@ import org.junit.Before; import java.io.IOException; -import java.util.EnumSet; import java.util.Map; -import java.util.Set; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -178,14 +175,7 @@ public void testSerialize() throws Exception { ); originalRequest.lazy(randomBoolean()); originalRequest.setIndicesOptions( - IndicesOptions.builder(originalRequest.indicesOptions()) - .selectorOptions( - IndicesOptions.SelectorOptions.builder() - .setDefaultSelectors( - EnumSet.copyOf(randomNonEmptySubsetOf(Set.of(IndexComponentSelector.DATA, IndexComponentSelector.FAILURES))) - ) - ) - .build() + IndicesOptions.builder(originalRequest.indicesOptions()).selectorOptions(IndicesOptions.SelectorOptions.ALL_APPLICABLE).build() ); try (BytesStreamOutput out = new BytesStreamOutput()) { @@ -266,7 +256,7 @@ public void testValidation() { RolloverRequest rolloverRequest = new RolloverRequest("alias-index", "new-index-name"); rolloverRequest.setIndicesOptions( IndicesOptions.builder(rolloverRequest.indicesOptions()) - .selectorOptions(IndicesOptions.SelectorOptions.DATA_AND_FAILURE) + .selectorOptions(IndicesOptions.SelectorOptions.ALL_APPLICABLE) .build() ); ActionRequestValidationException validationException = rolloverRequest.validate(); diff --git a/server/src/test/java/org/elasticsearch/action/support/IndexComponentSelectorTests.java b/server/src/test/java/org/elasticsearch/action/support/IndexComponentSelectorTests.java new file mode 100644 index 0000000000000..73d4ab59ce479 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/support/IndexComponentSelectorTests.java @@ -0,0 +1,41 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.action.support; + +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; + +public class IndexComponentSelectorTests extends ESTestCase { + + public void testIndexComponentSelectorFromKey() { + assertThat(IndexComponentSelector.getByKey("data"), equalTo(IndexComponentSelector.DATA)); + assertThat(IndexComponentSelector.getByKey("failures"), equalTo(IndexComponentSelector.FAILURES)); + assertThat(IndexComponentSelector.getByKey("*"), equalTo(IndexComponentSelector.ALL_APPLICABLE)); + assertThat(IndexComponentSelector.getByKey("d*ta"), nullValue()); + assertThat(IndexComponentSelector.getByKey("_all"), nullValue()); + assertThat(IndexComponentSelector.getByKey("**"), nullValue()); + assertThat(IndexComponentSelector.getByKey("failure"), nullValue()); + } + + public void testIndexComponentSelectorFromId() { + assertThat(IndexComponentSelector.getById((byte) 0), equalTo(IndexComponentSelector.DATA)); + assertThat(IndexComponentSelector.getById((byte) 1), equalTo(IndexComponentSelector.FAILURES)); + assertThat(IndexComponentSelector.getById((byte) 2), equalTo(IndexComponentSelector.ALL_APPLICABLE)); + IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> IndexComponentSelector.getById((byte) 3)); + assertThat( + exception.getMessage(), + containsString("Unknown id of index component selector [3], available options are: {0=DATA, 1=FAILURES, 2=ALL_APPLICABLE}") + ); + } + +} diff --git a/server/src/test/java/org/elasticsearch/action/support/IndicesOptionsTests.java b/server/src/test/java/org/elasticsearch/action/support/IndicesOptionsTests.java index 1784ab863bf1c..de7b43ad091fa 100644 --- a/server/src/test/java/org/elasticsearch/action/support/IndicesOptionsTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/IndicesOptionsTests.java @@ -30,11 +30,9 @@ import java.util.Arrays; import java.util.Collection; import java.util.Collections; -import java.util.EnumSet; import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Set; import static org.hamcrest.CoreMatchers.equalTo; @@ -58,13 +56,7 @@ public void testSerialization() throws Exception { .allowAliasToMultipleIndices(randomBoolean()) .allowClosedIndices(randomBoolean()) ) - .selectorOptions( - IndicesOptions.SelectorOptions.builder() - .setDefaultSelectors( - EnumSet.copyOf(randomNonEmptySubsetOf(Set.of(IndexComponentSelector.DATA, IndexComponentSelector.FAILURES))) - ) - .build() - ) + .selectorOptions(IndicesOptions.SelectorOptions.ALL_APPLICABLE) .build(); BytesStreamOutput output = new BytesStreamOutput(); @@ -350,9 +342,7 @@ public void testToXContent() throws IOException { randomBoolean() ); GatekeeperOptions gatekeeperOptions = new GatekeeperOptions(randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean()); - IndicesOptions.SelectorOptions selectorOptions = new IndicesOptions.SelectorOptions( - EnumSet.copyOf(randomNonEmptySubsetOf(Set.of(IndexComponentSelector.DATA, IndexComponentSelector.FAILURES))) - ); + IndicesOptions.SelectorOptions selectorOptions = new IndicesOptions.SelectorOptions(randomFrom(IndexComponentSelector.values())); IndicesOptions indicesOptions = new IndicesOptions(concreteTargetOptions, wildcardOptions, gatekeeperOptions, selectorOptions); @@ -370,9 +360,9 @@ public void testToXContent() throws IOException { assertThat(map.get("allow_no_indices"), equalTo(wildcardOptions.allowEmptyExpressions())); assertThat(map.get("ignore_throttled"), equalTo(gatekeeperOptions.ignoreThrottled())); String displayValue; - if (IndicesOptions.SelectorOptions.DATA_AND_FAILURE.equals(selectorOptions)) { + if (IndicesOptions.SelectorOptions.ALL_APPLICABLE.equals(selectorOptions)) { displayValue = "include"; - } else if (IndicesOptions.SelectorOptions.ONLY_DATA.equals(selectorOptions)) { + } else if (IndicesOptions.SelectorOptions.DATA.equals(selectorOptions)) { displayValue = "exclude"; } else { displayValue = "only"; diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java index d58de5ca65ea0..99470918ce063 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java @@ -2737,7 +2737,7 @@ public void testDataStreamsWithFailureStore() { // Test include failure store with an exact data stream name { IndicesOptions indicesOptions = IndicesOptions.builder(IndicesOptions.STRICT_EXPAND_OPEN) - .selectorOptions(IndicesOptions.SelectorOptions.DATA_AND_FAILURE) + .selectorOptions(IndicesOptions.SelectorOptions.ALL_APPLICABLE) .build(); Index[] result = indexNameExpressionResolver.concreteIndices(state, indicesOptions, true, "my-data-stream"); assertThat(result.length, equalTo(4)); @@ -2751,7 +2751,7 @@ public void testDataStreamsWithFailureStore() { // We expect that they will be skipped { IndicesOptions indicesOptions = IndicesOptions.builder(IndicesOptions.STRICT_EXPAND_OPEN) - .selectorOptions(IndicesOptions.SelectorOptions.DATA_AND_FAILURE) + .selectorOptions(IndicesOptions.SelectorOptions.ALL_APPLICABLE) .gatekeeperOptions(IndicesOptions.GatekeeperOptions.builder().allowFailureIndices(false).build()) .concreteTargetOptions(IndicesOptions.ConcreteTargetOptions.ALLOW_UNAVAILABLE_TARGETS) .build(); @@ -2765,7 +2765,7 @@ public void testDataStreamsWithFailureStore() { // We expect an error { IndicesOptions indicesOptions = IndicesOptions.builder(IndicesOptions.STRICT_EXPAND_OPEN) - .selectorOptions(IndicesOptions.SelectorOptions.DATA_AND_FAILURE) + .selectorOptions(IndicesOptions.SelectorOptions.ALL_APPLICABLE) .gatekeeperOptions(IndicesOptions.GatekeeperOptions.builder().allowFailureIndices(false).build()) .build(); FailureIndexNotSupportedException failureIndexNotSupportedException = expectThrows( @@ -2781,7 +2781,7 @@ public void testDataStreamsWithFailureStore() { // Test only failure store with an exact data stream name { IndicesOptions indicesOptions = IndicesOptions.builder(IndicesOptions.STRICT_EXPAND_OPEN) - .selectorOptions(IndicesOptions.SelectorOptions.ONLY_FAILURES) + .selectorOptions(IndicesOptions.SelectorOptions.FAILURES) .build(); Index[] result = indexNameExpressionResolver.concreteIndices(state, indicesOptions, true, "my-data-stream"); assertThat(result.length, equalTo(2)); @@ -2808,7 +2808,7 @@ public void testDataStreamsWithFailureStore() { // Test include failure store without any expressions { IndicesOptions indicesOptions = IndicesOptions.builder(IndicesOptions.STRICT_EXPAND_OPEN) - .selectorOptions(IndicesOptions.SelectorOptions.DATA_AND_FAILURE) + .selectorOptions(IndicesOptions.SelectorOptions.ALL_APPLICABLE) .build(); Index[] result = indexNameExpressionResolver.concreteIndices(state, indicesOptions, true); assertThat(result.length, equalTo(5)); @@ -2828,7 +2828,7 @@ public void testDataStreamsWithFailureStore() { // Test only failure store without any expressions { IndicesOptions indicesOptions = IndicesOptions.builder(IndicesOptions.STRICT_EXPAND_OPEN) - .selectorOptions(IndicesOptions.SelectorOptions.ONLY_FAILURES) + .selectorOptions(IndicesOptions.SelectorOptions.FAILURES) .build(); Index[] result = indexNameExpressionResolver.concreteIndices(state, indicesOptions, true); assertThat(result.length, equalTo(2)); @@ -2861,7 +2861,7 @@ public void testDataStreamsWithFailureStore() { // Test include failure store with wildcard expression { IndicesOptions indicesOptions = IndicesOptions.builder(IndicesOptions.STRICT_EXPAND_OPEN) - .selectorOptions(IndicesOptions.SelectorOptions.DATA_AND_FAILURE) + .selectorOptions(IndicesOptions.SelectorOptions.ALL_APPLICABLE) .build(); Index[] result = indexNameExpressionResolver.concreteIndices(state, indicesOptions, true, "my-*"); assertThat(result.length, equalTo(5)); @@ -2881,7 +2881,7 @@ public void testDataStreamsWithFailureStore() { // Test only failure store with wildcard expression { IndicesOptions indicesOptions = IndicesOptions.builder(IndicesOptions.STRICT_EXPAND_OPEN) - .selectorOptions(IndicesOptions.SelectorOptions.ONLY_FAILURES) + .selectorOptions(IndicesOptions.SelectorOptions.FAILURES) .build(); Index[] result = indexNameExpressionResolver.concreteIndices(state, indicesOptions, true, "my-*"); assertThat(result.length, equalTo(2)); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/RolloverStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/RolloverStep.java index d648dd1c7edf8..3d140f5a9d764 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/RolloverStep.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/RolloverStep.java @@ -126,9 +126,7 @@ public void performAction( RolloverRequest rolloverRequest = new RolloverRequest(rolloverTarget, null).masterNodeTimeout(TimeValue.MAX_VALUE); if (targetFailureStore) { rolloverRequest.setIndicesOptions( - IndicesOptions.builder(rolloverRequest.indicesOptions()) - .selectorOptions(IndicesOptions.SelectorOptions.ONLY_FAILURES) - .build() + IndicesOptions.builder(rolloverRequest.indicesOptions()).selectorOptions(IndicesOptions.SelectorOptions.FAILURES).build() ); } // We don't wait for active shards when we perform the rollover because the diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/WaitForRolloverReadyStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/WaitForRolloverReadyStep.java index 67f65481ef63e..aa20e33a3fbf2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/WaitForRolloverReadyStep.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/WaitForRolloverReadyStep.java @@ -247,9 +247,7 @@ RolloverRequest createRolloverRequest( rolloverRequest.setConditions(applyDefaultConditions(conditions, rolloverOnlyIfHasDocuments)); if (targetFailureStore) { rolloverRequest.setIndicesOptions( - IndicesOptions.builder(rolloverRequest.indicesOptions()) - .selectorOptions(IndicesOptions.SelectorOptions.ONLY_FAILURES) - .build() + IndicesOptions.builder(rolloverRequest.indicesOptions()).selectorOptions(IndicesOptions.SelectorOptions.FAILURES).build() ); } return rolloverRequest; From 8a3540fa74de36f62bafebdb719b22ef88879bf7 Mon Sep 17 00:00:00 2001 From: Liam Thompson <32779855+leemthompo@users.noreply.github.com> Date: Thu, 24 Oct 2024 11:44:13 +0200 Subject: [PATCH 354/449] [DOCS] Clarify start-local trial license info (#115504) --- README.asciidoc | 2 +- docs/reference/run-elasticsearch-locally.asciidoc | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/README.asciidoc b/README.asciidoc index 8d3c96c659896..bac6d0ed71752 100644 --- a/README.asciidoc +++ b/README.asciidoc @@ -56,8 +56,8 @@ Quickly set up Elasticsearch and Kibana in Docker for local development or testi - If you're using Microsoft Windows, then install https://learn.microsoft.com/en-us/windows/wsl/install[Windows Subsystem for Linux (WSL)]. ==== Trial license +This setup comes with a one-month trial license that includes all Elastic features. -This setup comes with a one-month trial of the Elastic *Platinum* license. After the trial period, the license reverts to *Free and open - Basic*. Refer to https://www.elastic.co/subscriptions[Elastic subscriptions] for more information. diff --git a/docs/reference/run-elasticsearch-locally.asciidoc b/docs/reference/run-elasticsearch-locally.asciidoc index 1a115ae926ea2..03885132e4050 100644 --- a/docs/reference/run-elasticsearch-locally.asciidoc +++ b/docs/reference/run-elasticsearch-locally.asciidoc @@ -20,7 +20,7 @@ Refer to <> for a list of produc Quickly set up {es} and {kib} in Docker for local development or testing, using the https://github.com/elastic/start-local?tab=readme-ov-file#-try-elasticsearch-and-kibana-locally[`start-local` script]. -This setup comes with a one-month trial of the Elastic *Platinum* license. +This setup comes with a one-month trial license that includes all Elastic features. After the trial period, the license reverts to *Free and open - Basic*. Refer to https://www.elastic.co/subscriptions[Elastic subscriptions] for more information. @@ -84,4 +84,4 @@ Learn about customizing the setup, logging, and more. [[local-dev-next-steps]] === Next steps -Use our <> to learn the basics of {es}. \ No newline at end of file +Use our <> to learn the basics of {es}. From 031a80d2dc8509d9a48a50261e18f6252c00560b Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Thu, 24 Oct 2024 11:30:33 +0100 Subject: [PATCH 355/449] Use BuildVersion rather than Version for reserved state version (#115406) --- .../settings/LocallyMountedSecrets.java | 4 +-- .../org/elasticsearch/env/BuildVersion.java | 15 +++++++++++ .../env/DefaultBuildVersion.java | 12 +++++---- .../internal/BuildExtension.java | 5 ++++ .../service/ReservedClusterStateService.java | 4 +-- .../service/ReservedStateUpdateTask.java | 5 ++-- .../service/ReservedStateVersion.java | 14 ++++------ .../service/FileSettingsServiceTests.java | 4 +-- .../ReservedClusterStateServiceTests.java | 27 ++++++++++--------- .../ReservedLifecycleStateServiceTests.java | 4 +-- 10 files changed, 57 insertions(+), 37 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/settings/LocallyMountedSecrets.java b/server/src/main/java/org/elasticsearch/common/settings/LocallyMountedSecrets.java index e4f1608a52d15..4a2e1cd92d4da 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/LocallyMountedSecrets.java +++ b/server/src/main/java/org/elasticsearch/common/settings/LocallyMountedSecrets.java @@ -11,11 +11,11 @@ import org.apache.lucene.util.SetOnce; import org.elasticsearch.TransportVersion; -import org.elasticsearch.Version; import org.elasticsearch.common.hash.MessageDigests; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.env.BuildVersion; import org.elasticsearch.env.Environment; import org.elasticsearch.reservedstate.service.ReservedStateVersion; import org.elasticsearch.xcontent.ConstructingObjectParser; @@ -130,7 +130,7 @@ public LocallyMountedSecrets(Environment environment) { throw new IllegalStateException("Error processing secrets file", e); } } else { - secrets.set(new LocalFileSecrets(Map.of(), new ReservedStateVersion(-1L, Version.CURRENT))); + secrets.set(new LocalFileSecrets(Map.of(), new ReservedStateVersion(-1L, BuildVersion.current()))); } this.secretsDir = secretsDirPath.toString(); this.secretsFile = secretsFilePath.toString(); diff --git a/server/src/main/java/org/elasticsearch/env/BuildVersion.java b/server/src/main/java/org/elasticsearch/env/BuildVersion.java index 3fdf01d7e1bae..5536b06d4d587 100644 --- a/server/src/main/java/org/elasticsearch/env/BuildVersion.java +++ b/server/src/main/java/org/elasticsearch/env/BuildVersion.java @@ -72,6 +72,16 @@ public static BuildVersion fromVersionId(int versionId) { return CurrentExtensionHolder.BUILD_EXTENSION.fromVersionId(versionId); } + /** + * Create a {@link BuildVersion} from a version string. + * + * @param version A string representation of a version + * @return a version representing a build or release of Elasticsearch + */ + public static BuildVersion fromString(String version) { + return CurrentExtensionHolder.BUILD_EXTENSION.fromString(version); + } + /** * Get the current build version. * @@ -110,6 +120,11 @@ public BuildVersion currentBuildVersion() { public BuildVersion fromVersionId(int versionId) { return new DefaultBuildVersion(versionId); } + + @Override + public BuildVersion fromString(String version) { + return new DefaultBuildVersion(version); + } } } diff --git a/server/src/main/java/org/elasticsearch/env/DefaultBuildVersion.java b/server/src/main/java/org/elasticsearch/env/DefaultBuildVersion.java index f31b34e89c01d..9cf0d60719653 100644 --- a/server/src/main/java/org/elasticsearch/env/DefaultBuildVersion.java +++ b/server/src/main/java/org/elasticsearch/env/DefaultBuildVersion.java @@ -28,15 +28,17 @@ final class DefaultBuildVersion extends BuildVersion { public static BuildVersion CURRENT = new DefaultBuildVersion(Version.CURRENT.id()); - private final int versionId; private final Version version; DefaultBuildVersion(int versionId) { assert versionId >= 0 : "Release version IDs must be non-negative integers"; - this.versionId = versionId; this.version = Version.fromId(versionId); } + DefaultBuildVersion(String version) { + this.version = Version.fromString(Objects.requireNonNull(version)); + } + @Override public boolean onOrAfterMinimumCompatible() { return Version.CURRENT.minimumCompatibilityVersion().onOrBefore(version); @@ -49,7 +51,7 @@ public boolean isFutureVersion() { @Override public int id() { - return versionId; + return version.id(); } @Override @@ -57,12 +59,12 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; DefaultBuildVersion that = (DefaultBuildVersion) o; - return versionId == that.versionId; + return version.equals(that.version); } @Override public int hashCode() { - return Objects.hash(versionId); + return Objects.hash(version.id()); } @Override diff --git a/server/src/main/java/org/elasticsearch/internal/BuildExtension.java b/server/src/main/java/org/elasticsearch/internal/BuildExtension.java index a23270cb5550c..427e186bc40cf 100644 --- a/server/src/main/java/org/elasticsearch/internal/BuildExtension.java +++ b/server/src/main/java/org/elasticsearch/internal/BuildExtension.java @@ -38,4 +38,9 @@ default boolean hasReleaseVersioning() { * Returns the {@link BuildVersion} for a given version identifier. */ BuildVersion fromVersionId(int versionId); + + /** + * Returns the {@link BuildVersion} for a given version string. + */ + BuildVersion fromString(String version); } diff --git a/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedClusterStateService.java b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedClusterStateService.java index 0c5fa61b29cfe..499b5e6515a8c 100644 --- a/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedClusterStateService.java +++ b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedClusterStateService.java @@ -11,7 +11,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.cluster.ClusterState; @@ -22,6 +21,7 @@ import org.elasticsearch.cluster.service.MasterServiceTaskQueue; import org.elasticsearch.common.Priority; import org.elasticsearch.core.Tuple; +import org.elasticsearch.env.BuildVersion; import org.elasticsearch.reservedstate.ReservedClusterStateHandler; import org.elasticsearch.reservedstate.TransformState; import org.elasticsearch.xcontent.ConstructingObjectParser; @@ -158,7 +158,7 @@ public void process( } public void initEmpty(String namespace, ActionListener listener) { - var missingVersion = new ReservedStateVersion(EMPTY_VERSION, Version.CURRENT); + var missingVersion = new ReservedStateVersion(EMPTY_VERSION, BuildVersion.current()); var emptyState = new ReservedStateChunk(Map.of(), missingVersion); updateTaskQueue.submitTask( "empty initial cluster state [" + namespace + "]", diff --git a/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTask.java b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTask.java index c85997f72cc78..90ae9923910d1 100644 --- a/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTask.java +++ b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTask.java @@ -11,7 +11,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.cluster.ClusterState; @@ -162,11 +161,11 @@ static boolean checkMetadataVersion( ReservedStateVersion reservedStateVersion, ReservedStateVersionCheck versionCheck ) { - if (Version.CURRENT.before(reservedStateVersion.minCompatibleVersion())) { + if (reservedStateVersion.buildVersion().isFutureVersion()) { logger.warn( () -> format( "Reserved cluster state version [%s] for namespace [%s] is not compatible with this Elasticsearch node", - reservedStateVersion.minCompatibleVersion(), + reservedStateVersion.buildVersion(), namespace ) ); diff --git a/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateVersion.java b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateVersion.java index e2a21689b9815..116d470755e1c 100644 --- a/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateVersion.java +++ b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateVersion.java @@ -9,10 +9,10 @@ package org.elasticsearch.reservedstate.service; -import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.env.BuildVersion; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentParser; @@ -23,7 +23,7 @@ * File settings metadata class that holds information about * versioning and Elasticsearch version compatibility */ -public record ReservedStateVersion(Long version, Version compatibleWith) implements Writeable { +public record ReservedStateVersion(Long version, BuildVersion buildVersion) implements Writeable { public static final ParseField VERSION = new ParseField("version"); public static final ParseField COMPATIBILITY = new ParseField("compatibility"); @@ -32,7 +32,7 @@ public record ReservedStateVersion(Long version, Version compatibleWith) impleme "reserved_cluster_state_version_metadata", a -> { Long updateId = Long.parseLong((String) a[0]); - Version minCompatVersion = Version.fromString((String) a[1]); + BuildVersion minCompatVersion = BuildVersion.fromString((String) a[1]); return new ReservedStateVersion(updateId, minCompatVersion); } @@ -47,17 +47,13 @@ public static ReservedStateVersion parse(XContentParser parser) { return PARSER.apply(parser, null); } - public Version minCompatibleVersion() { - return compatibleWith; - } - public static ReservedStateVersion readFrom(StreamInput input) throws IOException { - return new ReservedStateVersion(input.readLong(), Version.readVersion(input)); + return new ReservedStateVersion(input.readLong(), BuildVersion.fromVersionId(input.readVInt())); } @Override public void writeTo(StreamOutput out) throws IOException { out.writeLong(version()); - Version.writeVersion(compatibleWith(), out); + out.writeVInt(buildVersion().id()); } } diff --git a/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java index c0657b5888ad2..8af36e2f9677e 100644 --- a/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java @@ -9,7 +9,6 @@ package org.elasticsearch.reservedstate.service; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterName; @@ -26,6 +25,7 @@ import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.env.BuildVersion; import org.elasticsearch.env.Environment; import org.elasticsearch.reservedstate.action.ReservedClusterSettingsAction; import org.elasticsearch.tasks.TaskManager; @@ -277,7 +277,7 @@ public void testStopWorksInMiddleOfProcessing() throws Exception { throw new RuntimeException(e); } }).start(); - return new ReservedStateChunk(Map.of(), new ReservedStateVersion(1L, Version.CURRENT)); + return new ReservedStateChunk(Map.of(), new ReservedStateVersion(1L, BuildVersion.current())); }).when(controller).parse(any(String.class), any()); doAnswer((Answer) invocation -> { diff --git a/server/src/test/java/org/elasticsearch/reservedstate/service/ReservedClusterStateServiceTests.java b/server/src/test/java/org/elasticsearch/reservedstate/service/ReservedClusterStateServiceTests.java index d96387618e6bd..5c7dd6cb346b9 100644 --- a/server/src/test/java/org/elasticsearch/reservedstate/service/ReservedClusterStateServiceTests.java +++ b/server/src/test/java/org/elasticsearch/reservedstate/service/ReservedClusterStateServiceTests.java @@ -9,7 +9,6 @@ package org.elasticsearch.reservedstate.service; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; @@ -26,6 +25,7 @@ import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Releasable; +import org.elasticsearch.env.BuildVersion; import org.elasticsearch.reservedstate.ReservedClusterStateHandler; import org.elasticsearch.reservedstate.TransformState; import org.elasticsearch.reservedstate.action.ReservedClusterSettingsAction; @@ -396,7 +396,7 @@ public TransformState transform(Object source, TransformState prevState) throws assertTrue(ReservedStateErrorTask.isNewError(null, 1L, ReservedStateVersionCheck.HIGHER_VERSION_ONLY)); assertTrue(ReservedStateErrorTask.isNewError(null, 1L, ReservedStateVersionCheck.HIGHER_OR_SAME_VERSION)); - var chunk = new ReservedStateChunk(Map.of("one", "two", "maker", "three"), new ReservedStateVersion(2L, Version.CURRENT)); + var chunk = new ReservedStateChunk(Map.of("one", "two", "maker", "three"), new ReservedStateVersion(2L, BuildVersion.current())); var orderedHandlers = List.of(exceptionThrower.name(), newStateMaker.name()); // We submit a task with two handler, one will cause an exception, the other will create a new state. @@ -456,7 +456,7 @@ public void testCheckMetadataVersion() { ReservedStateUpdateTask task = new ReservedStateUpdateTask( "test", - new ReservedStateChunk(Map.of(), new ReservedStateVersion(124L, Version.CURRENT)), + new ReservedStateChunk(Map.of(), new ReservedStateVersion(124L, BuildVersion.current())), ReservedStateVersionCheck.HIGHER_VERSION_ONLY, Map.of(), List.of(), @@ -466,7 +466,7 @@ public void testCheckMetadataVersion() { assertThat("Cluster state should be modified", task.execute(state), not(sameInstance(state))); task = new ReservedStateUpdateTask( "test", - new ReservedStateChunk(Map.of(), new ReservedStateVersion(124L, Version.CURRENT)), + new ReservedStateChunk(Map.of(), new ReservedStateVersion(124L, BuildVersion.current())), ReservedStateVersionCheck.HIGHER_VERSION_ONLY, Map.of(), List.of(), @@ -477,7 +477,7 @@ public void testCheckMetadataVersion() { task = new ReservedStateUpdateTask( "test", - new ReservedStateChunk(Map.of(), new ReservedStateVersion(123L, Version.CURRENT)), + new ReservedStateChunk(Map.of(), new ReservedStateVersion(123L, BuildVersion.current())), ReservedStateVersionCheck.HIGHER_VERSION_ONLY, Map.of(), List.of(), @@ -487,7 +487,7 @@ public void testCheckMetadataVersion() { assertThat("Cluster state should not be modified", task.execute(state), sameInstance(state)); task = new ReservedStateUpdateTask( "test", - new ReservedStateChunk(Map.of(), new ReservedStateVersion(123L, Version.CURRENT)), + new ReservedStateChunk(Map.of(), new ReservedStateVersion(123L, BuildVersion.current())), ReservedStateVersionCheck.HIGHER_OR_SAME_VERSION, Map.of(), List.of(), @@ -498,7 +498,7 @@ public void testCheckMetadataVersion() { task = new ReservedStateUpdateTask( "test", - new ReservedStateChunk(Map.of(), new ReservedStateVersion(122L, Version.CURRENT)), + new ReservedStateChunk(Map.of(), new ReservedStateVersion(122L, BuildVersion.current())), ReservedStateVersionCheck.HIGHER_VERSION_ONLY, Map.of(), List.of(), @@ -508,7 +508,7 @@ public void testCheckMetadataVersion() { assertThat("Cluster state should not be modified", task.execute(state), sameInstance(state)); task = new ReservedStateUpdateTask( "test", - new ReservedStateChunk(Map.of(), new ReservedStateVersion(122L, Version.CURRENT)), + new ReservedStateChunk(Map.of(), new ReservedStateVersion(122L, BuildVersion.current())), ReservedStateVersionCheck.HIGHER_OR_SAME_VERSION, Map.of(), List.of(), @@ -519,7 +519,7 @@ public void testCheckMetadataVersion() { task = new ReservedStateUpdateTask( "test", - new ReservedStateChunk(Map.of(), new ReservedStateVersion(124L, Version.fromId(Version.CURRENT.id + 1))), + new ReservedStateChunk(Map.of(), new ReservedStateVersion(124L, BuildVersion.fromVersionId(BuildVersion.current().id() + 1))), ReservedStateVersionCheck.HIGHER_VERSION_ONLY, Map.of(), List.of(), @@ -529,7 +529,7 @@ public void testCheckMetadataVersion() { assertThat("Cluster state should not be modified", task.execute(state), sameInstance(state)); task = new ReservedStateUpdateTask( "test", - new ReservedStateChunk(Map.of(), new ReservedStateVersion(124L, Version.fromId(Version.CURRENT.id + 1))), + new ReservedStateChunk(Map.of(), new ReservedStateVersion(124L, BuildVersion.fromVersionId(BuildVersion.current().id() + 1))), ReservedStateVersionCheck.HIGHER_OR_SAME_VERSION, Map.of(), List.of(), @@ -627,7 +627,7 @@ public void testCheckAndReportError() { assertNull(controller.checkAndReportError("test", List.of(), null, ReservedStateVersionCheck.HIGHER_VERSION_ONLY)); verify(controller, times(0)).updateErrorState(any()); - var version = new ReservedStateVersion(2L, Version.CURRENT); + var version = new ReservedStateVersion(2L, BuildVersion.current()); var error = controller.checkAndReportError("test", List.of("test error"), version, ReservedStateVersionCheck.HIGHER_VERSION_ONLY); assertThat(error, instanceOf(IllegalStateException.class)); assertThat(error.getMessage(), is("Error processing state change request for test, errors: test error")); @@ -659,7 +659,10 @@ public TransformState transform(Object source, TransformState prevState) { Metadata metadata = Metadata.builder().put(operatorMetadata).build(); ClusterState state = ClusterState.builder(new ClusterName("test")).metadata(metadata).build(); - var chunk = new ReservedStateChunk(Map.of("non-state", "two", "maker", "three"), new ReservedStateVersion(2L, Version.CURRENT)); + var chunk = new ReservedStateChunk( + Map.of("non-state", "two", "maker", "three"), + new ReservedStateVersion(2L, BuildVersion.current()) + ); var orderedHandlers = List.of(exceptionThrower.name(), newStateMaker.name()); ClusterService clusterService = mock(ClusterService.class); diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/action/ReservedLifecycleStateServiceTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/action/ReservedLifecycleStateServiceTests.java index aab89c6620b52..bcd6026618a05 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/action/ReservedLifecycleStateServiceTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/action/ReservedLifecycleStateServiceTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.ilm.action; -import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.repositories.reservedstate.ReservedRepositoryAction; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterModule; @@ -22,6 +21,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.env.BuildVersion; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.reservedstate.TransformState; import org.elasticsearch.reservedstate.action.ReservedClusterSettingsAction; @@ -418,7 +418,7 @@ public void testOperatorControllerWithPluginPackage() { ) ) ), - new ReservedStateVersion(123L, Version.CURRENT) + new ReservedStateVersion(123L, BuildVersion.current()) ); controller.process("operator", pack, randomFrom(ReservedStateVersionCheck.values()), x::set); From 327f23254a8ea26bb462488b4cd06ab8604acd8e Mon Sep 17 00:00:00 2001 From: Andrei Dan Date: Thu, 24 Oct 2024 11:41:47 +0100 Subject: [PATCH 356/449] Allow for queries on _tier to skip shards during coordinator rewrite (#114990) The `_tier` metadata field was not used on the coordinator when rewriting queries in order to exclude shards that don't match. This lead to queries in the following form to continue to report failures even though the only unavailable shards were in the tier that was excluded from search (frozen tier in this example): ``` POST testing/_search { "query": { "bool": { "must_not": [ { "term": { "_tier": "data_frozen" } } ] } } } ``` This PR addresses this by having the queries that can execute on `_tier` (term, match, query string, simple query string, prefix, wildcard) execute a coordinator rewrite to exclude the indices that don't match the `_tier` query **before** attempting to reach to the shards (shards, that might not be available and raise errors). Fixes #114910 --- docs/changelog/114990.yaml | 6 + .../query/CoordinatorRewriteContext.java | 65 +++++++- .../CoordinatorRewriteContextProvider.java | 9 +- .../index/query/PrefixQueryBuilder.java | 18 ++- .../index/query/QueryRewriteContext.java | 21 +++ .../index/query/TermQueryBuilder.java | 18 ++- .../index/query/TermsQueryBuilder.java | 17 ++- .../index/query/WildcardQueryBuilder.java | 20 ++- .../index/query/PrefixQueryBuilderTests.java | 35 +++++ .../index/query/QueryRewriteContextTests.java | 131 ++++++++++++++++ .../index/query/TermQueryBuilderTests.java | 34 +++++ .../index/query/TermsQueryBuilderTests.java | 33 ++++ .../query/WildcardQueryBuilderTests.java | 34 +++++ .../test/AbstractBuilderTestCase.java | 15 +- .../mapper/DataTierFieldMapper.java | 26 +--- .../core/LocalStateCompositeXPackPlugin.java | 7 +- ...pshotsCanMatchOnCoordinatorIntegTests.java | 143 +++++++++++++++++- 17 files changed, 594 insertions(+), 38 deletions(-) create mode 100644 docs/changelog/114990.yaml create mode 100644 server/src/test/java/org/elasticsearch/index/query/QueryRewriteContextTests.java diff --git a/docs/changelog/114990.yaml b/docs/changelog/114990.yaml new file mode 100644 index 0000000000000..2575942d15bf5 --- /dev/null +++ b/docs/changelog/114990.yaml @@ -0,0 +1,6 @@ +pr: 114990 +summary: Allow for querries on `_tier` to skip shards in the `can_match` phase +area: Search +type: bug +issues: + - 114910 diff --git a/server/src/main/java/org/elasticsearch/index/query/CoordinatorRewriteContext.java b/server/src/main/java/org/elasticsearch/index/query/CoordinatorRewriteContext.java index 3e5deeeebae5d..964358610e074 100644 --- a/server/src/main/java/org/elasticsearch/index/query/CoordinatorRewriteContext.java +++ b/server/src/main/java/org/elasticsearch/index/query/CoordinatorRewriteContext.java @@ -9,17 +9,23 @@ package org.elasticsearch.index.query; +import org.apache.lucene.search.Query; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.regex.Regex; import org.elasticsearch.core.Nullable; +import org.elasticsearch.index.mapper.ConstantFieldType; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappingLookup; +import org.elasticsearch.index.mapper.ValueFetcher; import org.elasticsearch.index.shard.IndexLongFieldRange; import org.elasticsearch.indices.DateFieldRangeInfo; import org.elasticsearch.xcontent.XContentParserConfiguration; import java.util.Collections; +import java.util.Map; import java.util.function.LongSupplier; /** @@ -30,20 +36,57 @@ * and skip the shards that don't hold queried data. See IndexMetadata for more details. */ public class CoordinatorRewriteContext extends QueryRewriteContext { + + public static final String TIER_FIELD_NAME = "_tier"; + + private static final ConstantFieldType TIER_FIELD_TYPE = new ConstantFieldType(TIER_FIELD_NAME, Map.of()) { + @Override + public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { + throw new UnsupportedOperationException("fetching field values is not supported on the coordinator node"); + } + + @Override + public String typeName() { + return TIER_FIELD_NAME; + } + + @Override + protected boolean matches(String pattern, boolean caseInsensitive, QueryRewriteContext context) { + if (caseInsensitive) { + pattern = Strings.toLowercaseAscii(pattern); + } + + String tierPreference = context.getTierPreference(); + if (tierPreference == null) { + return false; + } + return Regex.simpleMatch(pattern, tierPreference); + } + + @Override + public Query existsQuery(SearchExecutionContext context) { + throw new UnsupportedOperationException("field exists query is not supported on the coordinator node"); + } + }; + private final DateFieldRangeInfo dateFieldRangeInfo; + private final String tier; /** * Context for coordinator search rewrites based on time ranges for the @timestamp field and/or 'event.ingested' field + * * @param parserConfig * @param client * @param nowInMillis * @param dateFieldRangeInfo range and field type info for @timestamp and 'event.ingested' + * @param tier the configured data tier (via the _tier_preference setting) for the index */ public CoordinatorRewriteContext( XContentParserConfiguration parserConfig, Client client, LongSupplier nowInMillis, - DateFieldRangeInfo dateFieldRangeInfo + DateFieldRangeInfo dateFieldRangeInfo, + String tier ) { super( parserConfig, @@ -63,10 +106,12 @@ public CoordinatorRewriteContext( null ); this.dateFieldRangeInfo = dateFieldRangeInfo; + this.tier = tier; } /** - * @param fieldName Must be one of DataStream.TIMESTAMP_FIELD_FIELD or IndexMetadata.EVENT_INGESTED_FIELD_NAME + * @param fieldName Must be one of DataStream.TIMESTAMP_FIELD_FIELD, IndexMetadata.EVENT_INGESTED_FIELD_NAME, or + * DataTierFiledMapper.NAME * @return MappedField with type for the field. Returns null if fieldName is not one of the allowed field names. */ @Nullable @@ -75,6 +120,8 @@ public MappedFieldType getFieldType(String fieldName) { return dateFieldRangeInfo.timestampFieldType(); } else if (IndexMetadata.EVENT_INGESTED_FIELD_NAME.equals(fieldName)) { return dateFieldRangeInfo.eventIngestedFieldType(); + } else if (TIER_FIELD_NAME.equals(fieldName)) { + return TIER_FIELD_TYPE; } else { return null; } @@ -99,4 +146,18 @@ public IndexLongFieldRange getFieldRange(String fieldName) { public CoordinatorRewriteContext convertToCoordinatorRewriteContext() { return this; } + + @Override + public String getTierPreference() { + // dominant branch first (tier preference is configured) + return tier.isEmpty() == false ? tier : null; + } + + /** + * We're holding on to the index tier in the context as otherwise we'd need + * to re-parse it from the index settings when evaluating the _tier field. + */ + public String tier() { + return tier; + } } diff --git a/server/src/main/java/org/elasticsearch/index/query/CoordinatorRewriteContextProvider.java b/server/src/main/java/org/elasticsearch/index/query/CoordinatorRewriteContextProvider.java index 67042a98db42a..e48d7699d03ef 100644 --- a/server/src/main/java/org/elasticsearch/index/query/CoordinatorRewriteContextProvider.java +++ b/server/src/main/java/org/elasticsearch/index/query/CoordinatorRewriteContextProvider.java @@ -52,6 +52,12 @@ public CoordinatorRewriteContext getCoordinatorRewriteContext(Index index) { return null; } DateFieldRangeInfo dateFieldRangeInfo = mappingSupplier.apply(index); + // we've now added a coordinator rewrite based on the _tier field so the requirement + // for the timestamps fields to be present is artificial (we could do a coordinator + // rewrite only based on the _tier field) and we might decide to remove this artificial + // limitation to enable coordinator rewrites based on _tier for hot and warm indices + // (currently the _tier coordinator rewrite is only available for mounted and partially mounted + // indices) if (dateFieldRangeInfo == null) { return null; } @@ -74,7 +80,8 @@ public CoordinatorRewriteContext getCoordinatorRewriteContext(Index index) { parserConfig, client, nowInMillis, - new DateFieldRangeInfo(timestampFieldType, timestampRange, dateFieldRangeInfo.eventIngestedFieldType(), eventIngestedRange) + new DateFieldRangeInfo(timestampFieldType, timestampRange, dateFieldRangeInfo.eventIngestedFieldType(), eventIngestedRange), + indexMetadata.getTierPreference().isEmpty() == false ? indexMetadata.getTierPreference().getFirst() : "" ); } } diff --git a/server/src/main/java/org/elasticsearch/index/query/PrefixQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/PrefixQueryBuilder.java index 24817b778a4da..fcf986191da23 100644 --- a/server/src/main/java/org/elasticsearch/index/query/PrefixQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/PrefixQueryBuilder.java @@ -20,6 +20,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.core.Nullable; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.ConstantFieldType; import org.elasticsearch.index.mapper.MappedFieldType; @@ -189,11 +190,24 @@ public String getWriteableName() { } @Override - protected QueryBuilder doIndexMetadataRewrite(QueryRewriteContext context) throws IOException { + protected QueryBuilder doIndexMetadataRewrite(QueryRewriteContext context) { MappedFieldType fieldType = context.getFieldType(this.fieldName); if (fieldType == null) { return new MatchNoneQueryBuilder("The \"" + getName() + "\" query is against a field that does not exist"); - } else if (fieldType instanceof ConstantFieldType constantFieldType) { + } + return maybeRewriteBasedOnConstantFields(fieldType, context); + } + + @Override + protected QueryBuilder doCoordinatorRewrite(CoordinatorRewriteContext coordinatorRewriteContext) { + MappedFieldType fieldType = coordinatorRewriteContext.getFieldType(this.fieldName); + // we don't rewrite a null field type to `match_none` on the coordinator because the coordinator has access + // to only a subset of fields see {@link CoordinatorRewriteContext#getFieldType} + return maybeRewriteBasedOnConstantFields(fieldType, coordinatorRewriteContext); + } + + private QueryBuilder maybeRewriteBasedOnConstantFields(@Nullable MappedFieldType fieldType, QueryRewriteContext context) { + if (fieldType instanceof ConstantFieldType constantFieldType) { // This logic is correct for all field types, but by only applying it to constant // fields we also have the guarantee that it doesn't perform I/O, which is important // since rewrites might happen on a network thread. diff --git a/server/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java b/server/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java index 8808cd79072f6..fce74aa60ab16 100644 --- a/server/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java +++ b/server/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java @@ -11,9 +11,12 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ResolvedIndices; import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.routing.allocation.DataTier; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.regex.Regex; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.CountDown; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.Index; @@ -407,4 +410,22 @@ public ResolvedIndices getResolvedIndices() { public PointInTimeBuilder getPointInTimeBuilder() { return pit; } + + /** + * Retrieve the first tier preference from the index setting. If the setting is not + * present, then return null. + */ + @Nullable + public String getTierPreference() { + Settings settings = getIndexSettings().getSettings(); + String value = DataTier.TIER_PREFERENCE_SETTING.get(settings); + + if (Strings.hasText(value) == false) { + return null; + } + + // Tier preference can be a comma-delimited list of tiers, ordered by preference + // It was decided we should only test the first of these potentially multiple preferences. + return value.split(",")[0].trim(); + } } diff --git a/server/src/main/java/org/elasticsearch/index/query/TermQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/TermQueryBuilder.java index 2978b3bfbf69c..113f66f3e58de 100644 --- a/server/src/main/java/org/elasticsearch/index/query/TermQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/TermQueryBuilder.java @@ -17,6 +17,7 @@ import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; import org.elasticsearch.index.mapper.ConstantFieldType; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.xcontent.ParseField; @@ -170,11 +171,24 @@ protected void addExtraXContent(XContentBuilder builder, Params params) throws I } @Override - protected QueryBuilder doIndexMetadataRewrite(QueryRewriteContext context) throws IOException { + protected QueryBuilder doIndexMetadataRewrite(QueryRewriteContext context) { MappedFieldType fieldType = context.getFieldType(this.fieldName); if (fieldType == null) { return new MatchNoneQueryBuilder("The \"" + getName() + "\" query is against a field that does not exist"); - } else if (fieldType instanceof ConstantFieldType constantFieldType) { + } + return maybeRewriteBasedOnConstantFields(fieldType, context); + } + + @Override + protected QueryBuilder doCoordinatorRewrite(CoordinatorRewriteContext coordinatorRewriteContext) { + MappedFieldType fieldType = coordinatorRewriteContext.getFieldType(this.fieldName); + // we don't rewrite a null field type to `match_none` on the coordinator because the coordinator has access + // to only a subset of fields see {@link CoordinatorRewriteContext#getFieldType} + return maybeRewriteBasedOnConstantFields(fieldType, coordinatorRewriteContext); + } + + private QueryBuilder maybeRewriteBasedOnConstantFields(@Nullable MappedFieldType fieldType, QueryRewriteContext context) { + if (fieldType instanceof ConstantFieldType constantFieldType) { // This logic is correct for all field types, but by only applying it to constant // fields we also have the guarantee that it doesn't perform I/O, which is important // since rewrites might happen on a network thread. diff --git a/server/src/main/java/org/elasticsearch/index/query/TermsQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/TermsQueryBuilder.java index 4035bc02fba79..dec4090a3e6bd 100644 --- a/server/src/main/java/org/elasticsearch/index/query/TermsQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/TermsQueryBuilder.java @@ -393,11 +393,24 @@ protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws } @Override - protected QueryBuilder doIndexMetadataRewrite(QueryRewriteContext context) throws IOException { + protected QueryBuilder doIndexMetadataRewrite(QueryRewriteContext context) { MappedFieldType fieldType = context.getFieldType(this.fieldName); if (fieldType == null) { return new MatchNoneQueryBuilder("The \"" + getName() + "\" query is against a field that does not exist"); - } else if (fieldType instanceof ConstantFieldType constantFieldType) { + } + return maybeRewriteBasedOnConstantFields(fieldType, context); + } + + @Override + protected QueryBuilder doCoordinatorRewrite(CoordinatorRewriteContext coordinatorRewriteContext) { + MappedFieldType fieldType = coordinatorRewriteContext.getFieldType(this.fieldName); + // we don't rewrite a null field type to `match_none` on the coordinator because the coordinator has access + // to only a subset of fields see {@link CoordinatorRewriteContext#getFieldType} + return maybeRewriteBasedOnConstantFields(fieldType, coordinatorRewriteContext); + } + + private QueryBuilder maybeRewriteBasedOnConstantFields(@Nullable MappedFieldType fieldType, QueryRewriteContext context) { + if (fieldType instanceof ConstantFieldType constantFieldType) { // This logic is correct for all field types, but by only applying it to constant // fields we also have the guarantee that it doesn't perform I/O, which is important // since rewrites might happen on a network thread. diff --git a/server/src/main/java/org/elasticsearch/index/query/WildcardQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/WildcardQueryBuilder.java index f287812ebbc10..419195e5e5ba5 100644 --- a/server/src/main/java/org/elasticsearch/index/query/WildcardQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/WildcardQueryBuilder.java @@ -20,6 +20,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.core.Nullable; import org.elasticsearch.index.mapper.ConstantFieldType; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.query.support.QueryParsers; @@ -200,11 +201,24 @@ public static WildcardQueryBuilder fromXContent(XContentParser parser) throws IO } @Override - protected QueryBuilder doIndexMetadataRewrite(QueryRewriteContext context) throws IOException { + protected QueryBuilder doIndexMetadataRewrite(QueryRewriteContext context) { MappedFieldType fieldType = context.getFieldType(this.fieldName); if (fieldType == null) { - return new MatchNoneQueryBuilder("The \"" + getName() + "\" query is against a field that does not exist"); - } else if (fieldType instanceof ConstantFieldType constantFieldType) { + return new MatchNoneQueryBuilder("The \"" + getName() + "\" query is against a field that does not exist"); + } + return maybeRewriteBasedOnConstantFields(fieldType, context); + } + + @Override + protected QueryBuilder doCoordinatorRewrite(CoordinatorRewriteContext coordinatorRewriteContext) { + MappedFieldType fieldType = coordinatorRewriteContext.getFieldType(this.fieldName); + // we don't rewrite a null field type to `match_none` on the coordinator because the coordinator has access + // to only a subset of fields see {@link CoordinatorRewriteContext#getFieldType} + return maybeRewriteBasedOnConstantFields(fieldType, coordinatorRewriteContext); + } + + private QueryBuilder maybeRewriteBasedOnConstantFields(@Nullable MappedFieldType fieldType, QueryRewriteContext context) { + if (fieldType instanceof ConstantFieldType constantFieldType) { // This logic is correct for all field types, but by only applying it to constant // fields we also have the guarantee that it doesn't perform I/O, which is important // since rewrites might happen on a network thread. diff --git a/server/src/test/java/org/elasticsearch/index/query/PrefixQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/PrefixQueryBuilderTests.java index 0260fa2ef4cc8..918815f2a4f77 100644 --- a/server/src/test/java/org/elasticsearch/index/query/PrefixQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/PrefixQueryBuilderTests.java @@ -17,7 +17,9 @@ import org.apache.lucene.search.Query; import org.elasticsearch.common.ParsingException; import org.elasticsearch.core.Strings; +import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.test.AbstractQueryTestCase; +import org.hamcrest.CoreMatchers; import org.hamcrest.Matchers; import java.io.IOException; @@ -175,4 +177,37 @@ public void testMustRewrite() throws IOException { IllegalStateException e = expectThrows(IllegalStateException.class, () -> queryBuilder.toQuery(context)); assertEquals("Rewrite first", e.getMessage()); } + + public void testCoordinatorTierRewriteToMatchAll() throws IOException { + QueryBuilder query = new PrefixQueryBuilder("_tier", "data_fro"); + final String timestampFieldName = "@timestamp"; + long minTimestamp = 1685714000000L; + long maxTimestamp = 1685715000000L; + final CoordinatorRewriteContext coordinatorRewriteContext = createCoordinatorRewriteContext( + new DateFieldMapper.DateFieldType(timestampFieldName), + minTimestamp, + maxTimestamp, + "data_frozen" + ); + + QueryBuilder rewritten = query.rewrite(coordinatorRewriteContext); + assertThat(rewritten, CoreMatchers.instanceOf(MatchAllQueryBuilder.class)); + } + + public void testCoordinatorTierRewriteToMatchNone() throws IOException { + QueryBuilder query = QueryBuilders.boolQuery().mustNot(new PrefixQueryBuilder("_tier", "data_fro")); + final String timestampFieldName = "@timestamp"; + long minTimestamp = 1685714000000L; + long maxTimestamp = 1685715000000L; + final CoordinatorRewriteContext coordinatorRewriteContext = createCoordinatorRewriteContext( + new DateFieldMapper.DateFieldType(timestampFieldName), + minTimestamp, + maxTimestamp, + "data_frozen" + ); + + QueryBuilder rewritten = query.rewrite(coordinatorRewriteContext); + assertThat(rewritten, CoreMatchers.instanceOf(MatchNoneQueryBuilder.class)); + } + } diff --git a/server/src/test/java/org/elasticsearch/index/query/QueryRewriteContextTests.java b/server/src/test/java/org/elasticsearch/index/query/QueryRewriteContextTests.java new file mode 100644 index 0000000000000..0b2a8ab4856b3 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/query/QueryRewriteContextTests.java @@ -0,0 +1,131 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.index.query; + +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.routing.allocation.DataTier; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.mapper.DateFieldMapper; +import org.elasticsearch.index.mapper.MappingLookup; +import org.elasticsearch.indices.DateFieldRangeInfo; +import org.elasticsearch.test.ESTestCase; + +import java.util.Collections; + +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; + +public class QueryRewriteContextTests extends ESTestCase { + + public void testGetTierPreference() { + { + // cold->hot tier preference + IndexMetadata metadata = newIndexMeta( + "index", + Settings.builder() + .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) + .put(DataTier.TIER_PREFERENCE, "data_cold,data_warm,data_hot") + .build() + ); + QueryRewriteContext context = new QueryRewriteContext( + parserConfig(), + null, + System::currentTimeMillis, + null, + MappingLookup.EMPTY, + Collections.emptyMap(), + new IndexSettings(metadata, Settings.EMPTY), + null, + null, + null, + null, + null, + null, + null, + null + ); + + assertThat(context.getTierPreference(), is("data_cold")); + } + + { + // missing tier preference + IndexMetadata metadata = newIndexMeta( + "index", + Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()).build() + ); + QueryRewriteContext context = new QueryRewriteContext( + parserConfig(), + null, + System::currentTimeMillis, + null, + MappingLookup.EMPTY, + Collections.emptyMap(), + new IndexSettings(metadata, Settings.EMPTY), + null, + null, + null, + null, + null, + null, + null, + null + ); + + assertThat(context.getTierPreference(), is(nullValue())); + } + + { + // coordinator rewrite context + IndexMetadata metadata = newIndexMeta( + "index", + Settings.builder() + .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) + .put(DataTier.TIER_PREFERENCE, "data_cold,data_warm,data_hot") + .build() + ); + CoordinatorRewriteContext coordinatorRewriteContext = new CoordinatorRewriteContext( + parserConfig(), + null, + System::currentTimeMillis, + new DateFieldRangeInfo(null, null, new DateFieldMapper.DateFieldType(IndexMetadata.EVENT_INGESTED_FIELD_NAME), null), + "data_frozen" + ); + + assertThat(coordinatorRewriteContext.getTierPreference(), is("data_frozen")); + } + { + // coordinator rewrite context empty tier + IndexMetadata metadata = newIndexMeta( + "index", + Settings.builder() + .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) + .put(DataTier.TIER_PREFERENCE, "data_cold,data_warm,data_hot") + .build() + ); + CoordinatorRewriteContext coordinatorRewriteContext = new CoordinatorRewriteContext( + parserConfig(), + null, + System::currentTimeMillis, + new DateFieldRangeInfo(null, null, new DateFieldMapper.DateFieldType(IndexMetadata.EVENT_INGESTED_FIELD_NAME), null), + "" + ); + + assertThat(coordinatorRewriteContext.getTierPreference(), is(nullValue())); + } + } + + public static IndexMetadata newIndexMeta(String name, Settings indexSettings) { + return IndexMetadata.builder(name).settings(indexSettings(IndexVersion.current(), 1, 1).put(indexSettings)).build(); + } + +} diff --git a/server/src/test/java/org/elasticsearch/index/query/TermQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/TermQueryBuilderTests.java index b5cf42cf5df28..bbac216754eed 100644 --- a/server/src/test/java/org/elasticsearch/index/query/TermQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/TermQueryBuilderTests.java @@ -17,9 +17,11 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.FieldTypeTestCase; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.xcontent.json.JsonStringEncoder; +import org.hamcrest.CoreMatchers; import java.io.IOException; import java.util.Locale; @@ -238,4 +240,36 @@ public void testLongTerm() throws IOException { { "term" : { "foo" : "%s" } }""", longTerm))); assertThat(e.getMessage(), containsString("term starting with [aaaaa")); } + + public void testCoordinatorTierRewriteToMatchAll() throws IOException { + QueryBuilder query = new TermQueryBuilder("_tier", "data_frozen"); + final String timestampFieldName = "@timestamp"; + long minTimestamp = 1685714000000L; + long maxTimestamp = 1685715000000L; + final CoordinatorRewriteContext coordinatorRewriteContext = createCoordinatorRewriteContext( + new DateFieldMapper.DateFieldType(timestampFieldName), + minTimestamp, + maxTimestamp, + "data_frozen" + ); + + QueryBuilder rewritten = query.rewrite(coordinatorRewriteContext); + assertThat(rewritten, CoreMatchers.instanceOf(MatchAllQueryBuilder.class)); + } + + public void testCoordinatorTierRewriteToMatchNone() throws IOException { + QueryBuilder query = QueryBuilders.boolQuery().mustNot(new TermQueryBuilder("_tier", "data_frozen")); + final String timestampFieldName = "@timestamp"; + long minTimestamp = 1685714000000L; + long maxTimestamp = 1685715000000L; + final CoordinatorRewriteContext coordinatorRewriteContext = createCoordinatorRewriteContext( + new DateFieldMapper.DateFieldType(timestampFieldName), + minTimestamp, + maxTimestamp, + "data_frozen" + ); + + QueryBuilder rewritten = query.rewrite(coordinatorRewriteContext); + assertThat(rewritten, CoreMatchers.instanceOf(MatchNoneQueryBuilder.class)); + } } diff --git a/server/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java index 1ce69355379de..2faee7bc89eb5 100644 --- a/server/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.index.get.GetResult; +import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.indices.TermsLookup; import org.elasticsearch.test.AbstractQueryTestCase; import org.elasticsearch.xcontent.XContentBuilder; @@ -317,6 +318,38 @@ public void testLongTerm() throws IOException { assertThat(e.getMessage(), containsString("term starting with [aaaaa")); } + public void testCoordinatorTierRewriteToMatchAll() throws IOException { + QueryBuilder query = new TermsQueryBuilder("_tier", "data_frozen"); + final String timestampFieldName = "@timestamp"; + long minTimestamp = 1685714000000L; + long maxTimestamp = 1685715000000L; + final CoordinatorRewriteContext coordinatorRewriteContext = createCoordinatorRewriteContext( + new DateFieldMapper.DateFieldType(timestampFieldName), + minTimestamp, + maxTimestamp, + "data_frozen" + ); + + QueryBuilder rewritten = query.rewrite(coordinatorRewriteContext); + assertThat(rewritten, CoreMatchers.instanceOf(MatchAllQueryBuilder.class)); + } + + public void testCoordinatorTierRewriteToMatchNone() throws IOException { + QueryBuilder query = QueryBuilders.boolQuery().mustNot(new TermsQueryBuilder("_tier", "data_frozen")); + final String timestampFieldName = "@timestamp"; + long minTimestamp = 1685714000000L; + long maxTimestamp = 1685715000000L; + final CoordinatorRewriteContext coordinatorRewriteContext = createCoordinatorRewriteContext( + new DateFieldMapper.DateFieldType(timestampFieldName), + minTimestamp, + maxTimestamp, + "data_frozen" + ); + + QueryBuilder rewritten = query.rewrite(coordinatorRewriteContext); + assertThat(rewritten, CoreMatchers.instanceOf(MatchNoneQueryBuilder.class)); + } + @Override protected QueryBuilder parseQuery(XContentParser parser) throws IOException { QueryBuilder query = super.parseQuery(parser); diff --git a/server/src/test/java/org/elasticsearch/index/query/WildcardQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/WildcardQueryBuilderTests.java index 7ee6d75a08736..182bd4d6b5b86 100644 --- a/server/src/test/java/org/elasticsearch/index/query/WildcardQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/WildcardQueryBuilderTests.java @@ -15,7 +15,9 @@ import org.apache.lucene.search.WildcardQuery; import org.elasticsearch.common.ParsingException; import org.elasticsearch.core.Strings; +import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.test.AbstractQueryTestCase; +import org.hamcrest.CoreMatchers; import java.io.IOException; import java.util.HashMap; @@ -166,4 +168,36 @@ public void testMustRewrite() throws IOException { IllegalStateException e = expectThrows(IllegalStateException.class, () -> queryBuilder.toQuery(context)); assertEquals("Rewrite first", e.getMessage()); } + + public void testCoordinatorTierRewriteToMatchAll() throws IOException { + QueryBuilder query = new WildcardQueryBuilder("_tier", "data_fr*"); + final String timestampFieldName = "@timestamp"; + long minTimestamp = 1685714000000L; + long maxTimestamp = 1685715000000L; + final CoordinatorRewriteContext coordinatorRewriteContext = createCoordinatorRewriteContext( + new DateFieldMapper.DateFieldType(timestampFieldName), + minTimestamp, + maxTimestamp, + "data_frozen" + ); + + QueryBuilder rewritten = query.rewrite(coordinatorRewriteContext); + assertThat(rewritten, CoreMatchers.instanceOf(MatchAllQueryBuilder.class)); + } + + public void testCoordinatorTierRewriteToMatchNone() throws IOException { + QueryBuilder query = QueryBuilders.boolQuery().mustNot(new WildcardQueryBuilder("_tier", "data_fro*")); + final String timestampFieldName = "@timestamp"; + long minTimestamp = 1685714000000L; + long maxTimestamp = 1685715000000L; + final CoordinatorRewriteContext coordinatorRewriteContext = createCoordinatorRewriteContext( + new DateFieldMapper.DateFieldType(timestampFieldName), + minTimestamp, + maxTimestamp, + "data_frozen" + ); + + QueryBuilder rewritten = query.rewrite(coordinatorRewriteContext); + assertThat(rewritten, CoreMatchers.instanceOf(MatchNoneQueryBuilder.class)); + } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java index 77ff194e2681d..0543bc7a78f8b 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java @@ -342,6 +342,15 @@ protected static CoordinatorRewriteContext createCoordinatorRewriteContext( return serviceHolder.createCoordinatorContext(dateFieldType, min, max); } + protected static CoordinatorRewriteContext createCoordinatorRewriteContext( + DateFieldMapper.DateFieldType dateFieldType, + long min, + long max, + String tier + ) { + return serviceHolder.createCoordinatorContext(dateFieldType, min, max, tier); + } + protected static DataRewriteContext dataRewriteContext() { return serviceHolder.createDataContext(); } @@ -625,13 +634,17 @@ QueryRewriteContext createQueryRewriteContext() { } CoordinatorRewriteContext createCoordinatorContext(DateFieldMapper.DateFieldType dateFieldType, long min, long max) { + return createCoordinatorContext(dateFieldType, min, max, ""); + } + + CoordinatorRewriteContext createCoordinatorContext(DateFieldMapper.DateFieldType dateFieldType, long min, long max, String tier) { DateFieldRangeInfo timestampFieldInfo = new DateFieldRangeInfo( dateFieldType, IndexLongFieldRange.NO_SHARDS.extendWithShardRange(0, 1, ShardLongFieldRange.of(min, max)), dateFieldType, IndexLongFieldRange.NO_SHARDS.extendWithShardRange(0, 1, ShardLongFieldRange.of(min, max)) ); - return new CoordinatorRewriteContext(parserConfiguration, this.client, () -> nowInMillis, timestampFieldInfo); + return new CoordinatorRewriteContext(parserConfiguration, this.client, () -> nowInMillis, timestampFieldInfo, tier); } DataRewriteContext createDataContext() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/cluster/routing/allocation/mapper/DataTierFieldMapper.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/cluster/routing/allocation/mapper/DataTierFieldMapper.java index 527f8d1c176ec..0e185a90ed39b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/cluster/routing/allocation/mapper/DataTierFieldMapper.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/cluster/routing/allocation/mapper/DataTierFieldMapper.java @@ -10,10 +10,8 @@ import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; -import org.elasticsearch.cluster.routing.allocation.DataTier; import org.elasticsearch.common.Strings; import org.elasticsearch.common.regex.Regex; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.mapper.ConstantFieldType; import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.MetadataFieldMapper; @@ -55,7 +53,7 @@ protected boolean matches(String pattern, boolean caseInsensitive, QueryRewriteC pattern = Strings.toLowercaseAscii(pattern); } - String tierPreference = getTierPreference(context); + String tierPreference = context.getTierPreference(); if (tierPreference == null) { return false; } @@ -64,7 +62,7 @@ protected boolean matches(String pattern, boolean caseInsensitive, QueryRewriteC @Override public Query existsQuery(SearchExecutionContext context) { - String tierPreference = getTierPreference(context); + String tierPreference = context.getTierPreference(); if (tierPreference == null) { return new MatchNoDocsQuery(); } @@ -77,26 +75,9 @@ public ValueFetcher valueFetcher(SearchExecutionContext context, String format) throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats."); } - String tierPreference = getTierPreference(context); + String tierPreference = context.getTierPreference(); return tierPreference == null ? ValueFetcher.EMPTY : ValueFetcher.singleton(tierPreference); } - - /** - * Retrieve the first tier preference from the index setting. If the setting is not - * present, then return null. - */ - private static String getTierPreference(QueryRewriteContext context) { - Settings settings = context.getIndexSettings().getSettings(); - String value = DataTier.TIER_PREFERENCE_SETTING.get(settings); - - if (Strings.hasText(value) == false) { - return null; - } - - // Tier preference can be a comma-delimited list of tiers, ordered by preference - // It was decided we should only test the first of these potentially multiple preferences. - return value.split(",")[0].trim(); - } } public DataTierFieldMapper() { @@ -107,4 +88,5 @@ public DataTierFieldMapper() { protected String contentType() { return CONTENT_TYPE; } + } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java index 918976c0d3db8..1f2c89c473a62 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java @@ -637,10 +637,15 @@ public Collection getSystemIndexDescriptors(Settings sett @Override public Map getMetadataMappers() { - return filterPlugins(MapperPlugin.class).stream() + Map pluginsMetadataMappers = filterPlugins(MapperPlugin.class).stream() .map(MapperPlugin::getMetadataMappers) .flatMap(map -> map.entrySet().stream()) .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + + // the xpack plugin itself exposes a metadata mapper so let's include it as well + Map metadataMappersIncludingXPackPlugin = new HashMap<>(pluginsMetadataMappers); + metadataMappersIncludingXPackPlugin.putAll(super.getMetadataMappers()); + return metadataMappersIncludingXPackPlugin; } @Override diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsCanMatchOnCoordinatorIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsCanMatchOnCoordinatorIntegTests.java index eab73fbe5ad04..ed42d86bc8c49 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsCanMatchOnCoordinatorIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsCanMatchOnCoordinatorIntegTests.java @@ -20,14 +20,18 @@ import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodeRole; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.index.Index; import org.elasticsearch.index.mapper.DateFieldMapper; +import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.RangeQueryBuilder; +import org.elasticsearch.index.query.TermQueryBuilder; +import org.elasticsearch.index.query.TermsQueryBuilder; import org.elasticsearch.index.shard.IndexLongFieldRange; import org.elasticsearch.indices.DateFieldRangeInfo; import org.elasticsearch.indices.IndicesService; @@ -36,6 +40,7 @@ import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.NodeRoles; import org.elasticsearch.test.junit.annotations.TestIssueLogging; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.xcontent.XContentFactory; @@ -51,6 +56,7 @@ import java.util.stream.Collectors; import static org.elasticsearch.cluster.metadata.IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_SETTING; +import static org.elasticsearch.cluster.node.DiscoveryNode.getRolesFromSettings; import static org.elasticsearch.index.IndexSettings.INDEX_SOFT_DELETES_SETTING; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; @@ -76,14 +82,24 @@ protected Collection> nodePlugins() { @Override protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { final Settings initialSettings = super.nodeSettings(nodeOrdinal, otherSettings); - if (DiscoveryNode.canContainData(otherSettings)) { + + if (DiscoveryNode.canContainData(otherSettings) + && getRolesFromSettings(otherSettings).stream() + .anyMatch( + nr -> nr.roleName().equals(DiscoveryNodeRole.DATA_FROZEN_NODE_ROLE.roleName()) + || nr.roleName().equals(DiscoveryNodeRole.DATA_ROLE.roleName()) + )) { return Settings.builder() .put(initialSettings) // Have a shared cache of reasonable size available on each node because tests randomize over frozen and cold allocation .put(SharedBlobCacheService.SHARED_CACHE_SIZE_SETTING.getKey(), ByteSizeValue.ofMb(randomLongBetween(1, 10))) .build(); } else { - return initialSettings; + return Settings.builder() + .put(initialSettings) + // Have a shared cache of reasonable size available on each node because tests randomize over frozen and cold allocation + .putNull(SharedBlobCacheService.SHARED_CACHE_SIZE_SETTING.getKey()) + .build(); } } @@ -955,6 +971,129 @@ public void testSearchableSnapshotShardsThatHaveMatchingDataAreNotSkippedOnTheCo } } + public void testCanMatchSkipsPartiallyMountedIndicesWhenFrozenNodesUnavailable() throws Exception { + internalCluster().startMasterOnlyNode(); + internalCluster().startCoordinatingOnlyNode(Settings.EMPTY); + final String dataNodeHoldingRegularIndex = internalCluster().startNode( + NodeRoles.onlyRole(DiscoveryNodeRole.DATA_CONTENT_NODE_ROLE) + ); + final String dataNodeHoldingSearchableSnapshot = internalCluster().startNode( + NodeRoles.onlyRole(DiscoveryNodeRole.DATA_FROZEN_NODE_ROLE) + ); + + final String indexToMountInFrozen = "frozen-" + randomAlphaOfLength(10).toLowerCase(Locale.ROOT); + final int shardCount = randomIntBetween(2, 3); + createIndexWithTimestampAndEventIngested(indexToMountInFrozen, shardCount, Settings.EMPTY); + final int numDocsFrozenIndex = between(350, 1000); + indexRandomDocs(indexToMountInFrozen, numDocsFrozenIndex); + + final String regularIndex = "regular-" + randomAlphaOfLength(10).toLowerCase(Locale.ROOT); + createIndexWithTimestampAndEventIngested( + regularIndex, + shardCount, + Settings.builder() + .put(INDEX_ROUTING_REQUIRE_GROUP_SETTING.getConcreteSettingForNamespace("_name").getKey(), dataNodeHoldingRegularIndex) + .build() + ); + int numDocsRegularIndex = between(100, 1000); + indexDocumentsWithTimestampAndEventIngestedDates(regularIndex, numDocsRegularIndex, TIMESTAMP_TEMPLATE_WITHIN_RANGE); + + final String repositoryName = randomAlphaOfLength(10).toLowerCase(Locale.ROOT); + createRepository(repositoryName, "mock"); + + final SnapshotId snapshotId = createSnapshot(repositoryName, "snapshot-1", List.of(indexToMountInFrozen)).snapshotId(); + assertAcked(indicesAdmin().prepareDelete(indexToMountInFrozen)); + + final String partiallyMountedIndex = randomAlphaOfLength(10).toLowerCase(Locale.ROOT); + + final MountSearchableSnapshotRequest mountRequest = new MountSearchableSnapshotRequest( + TEST_REQUEST_TIMEOUT, + partiallyMountedIndex, + repositoryName, + snapshotId.getName(), + indexToMountInFrozen, + Settings.EMPTY, + Strings.EMPTY_ARRAY, + false, + MountSearchableSnapshotRequest.Storage.SHARED_CACHE + ); + client().execute(MountSearchableSnapshotAction.INSTANCE, mountRequest).actionGet(); + + ensureGreen(regularIndex, partiallyMountedIndex); + + // Stop the node holding the searchable snapshots, and since we defined + // the index allocation criteria to require the searchable snapshot + // index to be allocated in that node, the shards should remain unassigned + internalCluster().stopNode(dataNodeHoldingSearchableSnapshot); + final IndexMetadata partiallyMountedIndexMetadata = getIndexMetadata(partiallyMountedIndex); + waitUntilAllShardsAreUnassigned(partiallyMountedIndexMetadata.getIndex()); + + { + // term query + TermQueryBuilder termQueryBuilder = QueryBuilders.termQuery("_tier", "data_content"); + List indicesToSearch = List.of(regularIndex, partiallyMountedIndex); + SearchRequest request = new SearchRequest().indices(indicesToSearch.toArray(new String[0])) + .source(new SearchSourceBuilder().query(termQueryBuilder)); + + assertResponse(client().search(request), searchResponse -> { + // as we excluded the frozen tier we shouldn't get any failures + assertThat(searchResponse.getFailedShards(), equalTo(0)); + // we should be receiving all the hits from the index that's in the data_content tier + assertNotNull(searchResponse.getHits().getTotalHits()); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo((long) numDocsRegularIndex)); + }); + } + + { + // termS query + TermsQueryBuilder termsQueryBuilder = QueryBuilders.termsQuery("_tier", "data_hot", "data_content"); + List indicesToSearch = List.of(regularIndex, partiallyMountedIndex); + SearchRequest request = new SearchRequest().indices(indicesToSearch.toArray(new String[0])) + .source(new SearchSourceBuilder().query(termsQueryBuilder)); + + assertResponse(client().search(request), searchResponse -> { + // as we excluded the frozen tier we shouldn't get any failures + assertThat(searchResponse.getFailedShards(), equalTo(0)); + // we should be receiving all the hits from the index that's in the data_content tier + assertNotNull(searchResponse.getHits().getTotalHits()); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo((long) numDocsRegularIndex)); + }); + } + + { + // bool term query + BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery().mustNot(QueryBuilders.termQuery("_tier", "data_frozen")); + List indicesToSearch = List.of(regularIndex, partiallyMountedIndex); + SearchRequest request = new SearchRequest().indices(indicesToSearch.toArray(new String[0])) + .source(new SearchSourceBuilder().query(boolQueryBuilder)); + + assertResponse(client().search(request), searchResponse -> { + // as we excluded the frozen tier we shouldn't get any failures + assertThat(searchResponse.getFailedShards(), equalTo(0)); + // we should be receiving all the hits from the index that's in the data_content tier + assertNotNull(searchResponse.getHits().getTotalHits()); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo((long) numDocsRegularIndex)); + }); + } + + { + // bool prefix, wildcard + BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery() + .mustNot(randomFrom(QueryBuilders.wildcardQuery("_tier", "dat*ozen"), QueryBuilders.prefixQuery("_tier", "data_fro"))); + List indicesToSearch = List.of(regularIndex, partiallyMountedIndex); + SearchRequest request = new SearchRequest().indices(indicesToSearch.toArray(new String[0])) + .source(new SearchSourceBuilder().query(boolQueryBuilder)); + + assertResponse(client().search(request), searchResponse -> { + // as we excluded the frozen tier we shouldn't get any failures + assertThat(searchResponse.getFailedShards(), equalTo(0)); + // we should be receiving all the hits from the index that's in the data_content tier + assertNotNull(searchResponse.getHits().getTotalHits()); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo((long) numDocsRegularIndex)); + }); + } + } + private void createIndexWithTimestampAndEventIngested(String indexName, int numShards, Settings extraSettings) throws IOException { assertAcked( indicesAdmin().prepareCreate(indexName) From 4fb7a4f1e98cb2934bf1427bb9dba0140a481dd6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20Zolt=C3=A1n=20Szab=C3=B3?= Date: Thu, 24 Oct 2024 14:07:06 +0200 Subject: [PATCH 357/449] [DOCS] Improve inference API documentation (#115235) Co-authored-by: David Kyle --- .../inference/inference-apis.asciidoc | 18 ++++ .../inference/service-elasticsearch.asciidoc | 94 +++++++++++++++++-- .../inference/service-elser.asciidoc | 3 +- 3 files changed, 104 insertions(+), 11 deletions(-) diff --git a/docs/reference/inference/inference-apis.asciidoc b/docs/reference/inference/inference-apis.asciidoc index b291b464be498..ddcff1abc7dce 100644 --- a/docs/reference/inference/inference-apis.asciidoc +++ b/docs/reference/inference/inference-apis.asciidoc @@ -34,6 +34,24 @@ Elastic –, then create an {infer} endpoint by the <>. Now use <> to perform <> on your data. + +[discrete] +[[default-enpoints]] +=== Default {infer} endpoints + +Your {es} deployment contains some preconfigured {infer} endpoints that makes it easier for you to use them when defining `semantic_text` fields or {infer} processors. +The following list contains the default {infer} endpoints listed by `inference_id`: + +* `.elser-2-elasticsearch`: uses the {ml-docs}/ml-nlp-elser.html[ELSER] built-in trained model for `sparse_embedding` tasks (recommended for English language texts) +* `.multilingual-e5-small-elasticsearch`: uses the {ml-docs}/ml-nlp-e5.html[E5] built-in trained model for `text_embedding` tasks (recommended for non-English language texts) + +Use the `inference_id` of the endpoint in a <> field definition or when creating an <>. +The API call will automatically download and deploy the model which might take a couple of minutes. +Default {infer} enpoints have {ml-docs}/ml-nlp-auto-scale.html#nlp-model-adaptive-allocations[adaptive allocations] enabled. +For these models, the minimum number of allocations is `0`. +If there is no {infer} activity that uses the endpoint, the number of allocations will scale down to `0` automatically after 15 minutes. + + include::delete-inference.asciidoc[] include::get-inference.asciidoc[] include::post-inference.asciidoc[] diff --git a/docs/reference/inference/service-elasticsearch.asciidoc b/docs/reference/inference/service-elasticsearch.asciidoc index efa0c78b8356f..259779a12134d 100644 --- a/docs/reference/inference/service-elasticsearch.asciidoc +++ b/docs/reference/inference/service-elasticsearch.asciidoc @@ -1,12 +1,9 @@ [[infer-service-elasticsearch]] === Elasticsearch {infer} service -Creates an {infer} endpoint to perform an {infer} task with the `elasticsearch` -service. +Creates an {infer} endpoint to perform an {infer} task with the `elasticsearch` service. -NOTE: If you use the E5 model through the `elasticsearch` service, the API -request will automatically download and deploy the model if it isn't downloaded -yet. +NOTE: If you use the ELSER or the E5 model through the `elasticsearch` service, the API request will automatically download and deploy the model if it isn't downloaded yet. [discrete] @@ -56,6 +53,11 @@ These settings are specific to the `elasticsearch` service. (Optional, object) include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=adaptive-allocation] +`deployment_id`::: +(Optional, string) +The `deployment_id` of an existing trained model deployment. +When `deployment_id` is used the `model_id` is optional. + `enabled`:::: (Optional, Boolean) include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=adaptive-allocation-enabled] @@ -71,7 +73,7 @@ include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=adaptive-allocation-min-number] `model_id`::: (Required, string) The name of the model to use for the {infer} task. -It can be the ID of either a built-in model (for example, `.multilingual-e5-small` for E5) or a text embedding model already +It can be the ID of either a built-in model (for example, `.multilingual-e5-small` for E5), a text embedding model already {ml-docs}/ml-nlp-import-model.html#ml-nlp-import-script[uploaded through Eland]. `num_allocations`::: @@ -98,15 +100,44 @@ Returns the document instead of only the index. Defaults to `true`. ===== +[discrete] +[[inference-example-elasticsearch-elser]] +==== ELSER via the `elasticsearch` service + +The following example shows how to create an {infer} endpoint called `my-elser-model` to perform a `sparse_embedding` task type. + +The API request below will automatically download the ELSER model if it isn't already downloaded and then deploy the model. + +[source,console] +------------------------------------------------------------ +PUT _inference/sparse_embedding/my-elser-model +{ + "service": "elasticsearch", + "service_settings": { + "adaptive_allocations": { <1> + "enabled": true, + "min_number_of_allocations": 1, + "max_number_of_allocations": 10 + }, + "num_threads": 1, + "model_id": ".elser_model_2" <2> + } +} +------------------------------------------------------------ +// TEST[skip:TBD] +<1> Adaptive allocations will be enabled with the minimum of 1 and the maximum of 10 allocations. +<2> The `model_id` must be the ID of one of the built-in ELSER models. +Valid values are `.elser_model_2` and `.elser_model_2_linux-x86_64`. +For further details, refer to the {ml-docs}/ml-nlp-elser.html[ELSER model documentation]. + + [discrete] [[inference-example-elasticsearch]] ==== E5 via the `elasticsearch` service -The following example shows how to create an {infer} endpoint called -`my-e5-model` to perform a `text_embedding` task type. +The following example shows how to create an {infer} endpoint called `my-e5-model` to perform a `text_embedding` task type. -The API request below will automatically download the E5 model if it isn't -already downloaded and then deploy the model. +The API request below will automatically download the E5 model if it isn't already downloaded and then deploy the model. [source,console] ------------------------------------------------------------ @@ -185,3 +216,46 @@ PUT _inference/text_embedding/my-e5-model } ------------------------------------------------------------ // TEST[skip:TBD] + + +[discrete] +[[inference-example-existing-deployment]] +==== Using an existing model deployment with the `elasticsearch` service + +The following example shows how to use an already existing model deployment when creating an {infer} endpoint. + +[source,console] +------------------------------------------------------------ +PUT _inference/sparse_embedding/use_existing_deployment +{ + "service": "elasticsearch", + "service_settings": { + "deployment_id": ".elser_model_2" <1> + } +} +------------------------------------------------------------ +// TEST[skip:TBD] +<1> The `deployment_id` of the already existing model deployment. + +The API response contains the `model_id`, and the threads and allocations settings from the model deployment: + +[source,console-result] +------------------------------------------------------------ +{ + "inference_id": "use_existing_deployment", + "task_type": "sparse_embedding", + "service": "elasticsearch", + "service_settings": { + "num_allocations": 2, + "num_threads": 1, + "model_id": ".elser_model_2", + "deployment_id": ".elser_model_2" + }, + "chunking_settings": { + "strategy": "sentence", + "max_chunk_size": 250, + "sentence_overlap": 1 + } +} +------------------------------------------------------------ +// NOTCONSOLE \ No newline at end of file diff --git a/docs/reference/inference/service-elser.asciidoc b/docs/reference/inference/service-elser.asciidoc index 6afc2a2e3ef65..521fab0375584 100644 --- a/docs/reference/inference/service-elser.asciidoc +++ b/docs/reference/inference/service-elser.asciidoc @@ -2,6 +2,7 @@ === ELSER {infer} service Creates an {infer} endpoint to perform an {infer} task with the `elser` service. +You can also deploy ELSER by using the <>. NOTE: The API request will automatically download and deploy the ELSER model if it isn't already downloaded. @@ -128,7 +129,7 @@ If using the Python client, you can set the `timeout` parameter to a higher valu [discrete] [[inference-example-elser-adaptive-allocation]] -==== Setting adaptive allocation for the ELSER service +==== Setting adaptive allocations for the ELSER service NOTE: For more information on how to optimize your ELSER endpoints, refer to {ml-docs}/ml-nlp-elser.html#elser-recommendations[the ELSER recommendations] section in the model documentation. To learn more about model autoscaling, refer to the {ml-docs}/ml-nlp-auto-scale.html[trained model autoscaling] page. From 4e4fe9c3a99faaded41b6c08d98bf8eda6f3ea6b Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Thu, 24 Oct 2024 23:28:55 +1100 Subject: [PATCH 358/449] Mute org.elasticsearch.xpack.restart.MLModelDeploymentFullClusterRestartIT testDeploymentSurvivesRestart {cluster=UPGRADED} #115528 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 8b9c3cc6ce712..2d5349ed03b48 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -282,6 +282,9 @@ tests: - class: org.elasticsearch.xpack.security.FileSettingsRoleMappingsRestartIT method: testFileSettingsReprocessedOnRestartWithoutVersionChange issue: https://github.com/elastic/elasticsearch/issues/115450 +- class: org.elasticsearch.xpack.restart.MLModelDeploymentFullClusterRestartIT + method: testDeploymentSurvivesRestart {cluster=UPGRADED} + issue: https://github.com/elastic/elasticsearch/issues/115528 # Examples: # From f774d0ee8249fef76182f76d401a97e217c53981 Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Thu, 24 Oct 2024 14:58:37 +0200 Subject: [PATCH 359/449] Remove Delivery team as codeowners for gradle build scripts (#115523) --- .github/CODEOWNERS | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 5b98444c044d2..540da14402192 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -39,7 +39,6 @@ gradle @elastic/es-delivery build-conventions @elastic/es-delivery build-tools @elastic/es-delivery build-tools-internal @elastic/es-delivery -*.gradle @elastic/es-delivery .buildkite @elastic/es-delivery .ci @elastic/es-delivery .idea @elastic/es-delivery From 889d2c346e4ab498875b1bb0aaaee88c54f4c1a2 Mon Sep 17 00:00:00 2001 From: Mark Tozzi Date: Thu, 24 Oct 2024 09:03:12 -0400 Subject: [PATCH 360/449] [ESQL] Enable "any type" aggregations on Date Nanos (#114438) Resolves #110002 Resolves #110003 Resolves #110005 Enable Values, Count, CountDistinct, Min and Max aggregations on date nanos. In the course of addressing this, I had to make some changes to AggregateMapper where it maps types into string names. I tried to refactor this once before (#110841) but at the time we decided not to go ahead with it. That bit me while working on this, and so I am trying again to refactor it. This time I've made a more localized change, just replacing the cascading if block with a switch. That will cause a compile time failure when future new data types are added, unless they correctly update this section. I've also done a small refactoring on the aggregators themselves, to make the supplier function consistent with the typeResolution. --------- Co-authored-by: Elastic Machine --- .../src/main/resources/date_nanos.csv | 1 + .../src/main/resources/date_nanos.csv-spec | 31 ++++++++++++++ .../xpack/esql/action/EsqlCapabilities.java | 5 +++ .../function/aggregate/CountDistinct.java | 40 ++++++++++-------- .../expression/function/aggregate/Max.java | 42 +++++++++---------- .../expression/function/aggregate/Min.java | 42 +++++++++---------- .../expression/function/aggregate/Values.java | 38 +++++++++-------- .../xpack/esql/planner/AggregateMapper.java | 31 ++++++-------- 8 files changed, 131 insertions(+), 99 deletions(-) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv index 029c3baf3cbfb..26b6f055221a6 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv @@ -6,5 +6,6 @@ millis:date,nanos:date_nanos,num:long 2023-10-23T13:33:34.937Z,2023-10-23T13:33:34.937193000Z,1698068014937193000 2023-10-23T12:27:28.948Z,2023-10-23T12:27:28.948000000Z,1698064048948000000 2023-10-23T12:15:03.360Z,2023-10-23T12:15:03.360103847Z,1698063303360103847 +2023-10-23T12:15:03.360Z,2023-10-23T12:15:03.360103847Z,1698063303360103847 1999-10-23T12:15:03.360Z,[2023-03-23T12:15:03.360103847Z, 2023-02-23T13:33:34.937193000Z, 2023-01-23T13:55:01.543123456Z], 0 1999-10-22T12:15:03.360Z,[2023-03-23T12:15:03.360103847Z, 2023-03-23T12:15:03.360103847Z, 2023-03-23T12:15:03.360103847Z], 0 diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv-spec index 515e2c9c6587f..d0edc1f07d021 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv-spec @@ -216,6 +216,7 @@ l:long 1698068014937193000 1698064048948000000 1698063303360103847 +1698063303360103847 ; long to date nanos, index version @@ -231,6 +232,7 @@ d:date_nanos 2023-10-23T13:33:34.937193000Z 2023-10-23T12:27:28.948000000Z 2023-10-23T12:15:03.360103847Z +2023-10-23T12:15:03.360103847Z ; date_nanos to date nanos, index version @@ -246,6 +248,7 @@ d:date_nanos 2023-10-23T13:33:34.937193000Z 2023-10-23T12:27:28.948000000Z 2023-10-23T12:15:03.360103847Z +2023-10-23T12:15:03.360103847Z ; attempt to cast the result of a fold to date nanos @@ -331,3 +334,31 @@ a:date_nanos [2023-02-23T13:33:34.937193000Z, 2023-03-23T12:15:03.360103847Z] [2023-03-23T12:15:03.360103847Z, 2023-03-23T12:15:03.360103847Z] ; + + +Max and Min of date nanos +required_capability: date_nanos_aggregations + +FROM date_nanos | STATS max = MAX(nanos), min = MIN(nanos); + +max:date_nanos | min:date_nanos +2023-10-23T13:55:01.543123456Z | 2023-01-23T13:55:01.543123456Z +; + +Count and count distinct of date nanos +required_capability: date_nanos_aggregations + +FROM date_nanos | WHERE millis > "2020-01-01" | STATS count = COUNT(nanos), count_distinct = COUNT_DISTINCT(nanos); + +count:long | count_distinct:long +8 | 7 +; + +Values aggregation on date nanos +required_capability: date_nanos_aggregations + +FROM date_nanos | WHERE millis > "2020-01-01" | STATS v = MV_SORT(VALUES(nanos), "DESC"); + +v:date_nanos +[2023-10-23T13:55:01.543123456Z, 2023-10-23T13:53:55.832987654Z, 2023-10-23T13:52:55.015787878Z, 2023-10-23T13:51:54.732102837Z, 2023-10-23T13:33:34.937193000Z, 2023-10-23T12:27:28.948000000Z, 2023-10-23T12:15:03.360103847Z] +; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index f22ad07a4c6f6..55236af648236 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -313,6 +313,11 @@ public enum Cap { */ LEAST_GREATEST_FOR_DATENANOS(EsqlCorePlugin.DATE_NANOS_FEATURE_FLAG), + /** + * support aggregations on date nanos + */ + DATE_NANOS_AGGREGATIONS(EsqlCorePlugin.DATE_NANOS_FEATURE_FLAG), + /** * Support for datetime in least and greatest functions */ diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java index 756000dfbb187..5ae162f1fbb12 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java @@ -38,6 +38,8 @@ import java.io.IOException; import java.util.List; +import java.util.Map; +import java.util.function.BiFunction; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.SECOND; @@ -53,6 +55,20 @@ public class CountDistinct extends AggregateFunction implements OptionalArgument CountDistinct::new ); + private static final Map, Integer, AggregatorFunctionSupplier>> SUPPLIERS = Map.ofEntries( + // Booleans ignore the precision because there are only two possible values anyway + Map.entry(DataType.BOOLEAN, (inputChannels, precision) -> new CountDistinctBooleanAggregatorFunctionSupplier(inputChannels)), + Map.entry(DataType.LONG, CountDistinctLongAggregatorFunctionSupplier::new), + Map.entry(DataType.DATETIME, CountDistinctLongAggregatorFunctionSupplier::new), + Map.entry(DataType.DATE_NANOS, CountDistinctLongAggregatorFunctionSupplier::new), + Map.entry(DataType.INTEGER, CountDistinctIntAggregatorFunctionSupplier::new), + Map.entry(DataType.DOUBLE, CountDistinctDoubleAggregatorFunctionSupplier::new), + Map.entry(DataType.KEYWORD, CountDistinctBytesRefAggregatorFunctionSupplier::new), + Map.entry(DataType.IP, CountDistinctBytesRefAggregatorFunctionSupplier::new), + Map.entry(DataType.VERSION, CountDistinctBytesRefAggregatorFunctionSupplier::new), + Map.entry(DataType.TEXT, CountDistinctBytesRefAggregatorFunctionSupplier::new) + ); + private static final int DEFAULT_PRECISION = 3000; private final Expression precision; @@ -102,7 +118,7 @@ public CountDistinct( Source source, @Param( name = "field", - type = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "text", "version" }, + type = { "boolean", "date", "date_nanos", "double", "integer", "ip", "keyword", "long", "text", "version" }, description = "Column or literal for which to count the number of distinct values." ) Expression field, @Param( @@ -179,7 +195,7 @@ protected TypeResolution resolveType() { .and( isType( field(), - dt -> dt != DataType.UNSIGNED_LONG && dt != DataType.SOURCE, + SUPPLIERS::containsKey, sourceText(), DEFAULT, "any exact type except unsigned_long, _source, or counter types" @@ -196,23 +212,11 @@ protected TypeResolution resolveType() { public AggregatorFunctionSupplier supplier(List inputChannels) { DataType type = field().dataType(); int precision = this.precision == null ? DEFAULT_PRECISION : ((Number) this.precision.fold()).intValue(); - if (type == DataType.BOOLEAN) { - // Booleans ignore the precision because there are only two possible values anyway - return new CountDistinctBooleanAggregatorFunctionSupplier(inputChannels); - } - if (type == DataType.DATETIME || type == DataType.LONG) { - return new CountDistinctLongAggregatorFunctionSupplier(inputChannels, precision); - } - if (type == DataType.INTEGER) { - return new CountDistinctIntAggregatorFunctionSupplier(inputChannels, precision); - } - if (type == DataType.DOUBLE) { - return new CountDistinctDoubleAggregatorFunctionSupplier(inputChannels, precision); - } - if (DataType.isString(type) || type == DataType.IP || type == DataType.VERSION) { - return new CountDistinctBytesRefAggregatorFunctionSupplier(inputChannels, precision); + if (SUPPLIERS.containsKey(type) == false) { + // If the type checking did its job, this should never happen + throw EsqlIllegalArgumentException.illegalDataType(type); } - throw EsqlIllegalArgumentException.illegalDataType(type); + return SUPPLIERS.get(type).apply(inputChannels, precision); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java index 6119b2ce58465..ee16193efdccc 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java @@ -32,16 +32,28 @@ import java.io.IOException; import java.util.List; +import java.util.Map; +import java.util.function.Function; import static java.util.Collections.emptyList; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; -import static org.elasticsearch.xpack.esql.core.type.DataType.UNSIGNED_LONG; -import static org.elasticsearch.xpack.esql.core.type.DataType.isRepresentable; -import static org.elasticsearch.xpack.esql.core.type.DataType.isSpatial; public class Max extends AggregateFunction implements ToAggregator, SurrogateExpression { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Max", Max::new); + private static final Map, AggregatorFunctionSupplier>> SUPPLIERS = Map.ofEntries( + Map.entry(DataType.BOOLEAN, MaxBooleanAggregatorFunctionSupplier::new), + Map.entry(DataType.LONG, MaxLongAggregatorFunctionSupplier::new), + Map.entry(DataType.DATETIME, MaxLongAggregatorFunctionSupplier::new), + Map.entry(DataType.DATE_NANOS, MaxLongAggregatorFunctionSupplier::new), + Map.entry(DataType.INTEGER, MaxIntAggregatorFunctionSupplier::new), + Map.entry(DataType.DOUBLE, MaxDoubleAggregatorFunctionSupplier::new), + Map.entry(DataType.IP, MaxIpAggregatorFunctionSupplier::new), + Map.entry(DataType.KEYWORD, MaxBytesRefAggregatorFunctionSupplier::new), + Map.entry(DataType.TEXT, MaxBytesRefAggregatorFunctionSupplier::new), + Map.entry(DataType.VERSION, MaxBytesRefAggregatorFunctionSupplier::new) + ); + @FunctionInfo( returnType = { "boolean", "double", "integer", "long", "date", "ip", "keyword", "text", "long", "version" }, description = "The maximum value of a field.", @@ -98,7 +110,7 @@ public Max replaceChildren(List newChildren) { protected TypeResolution resolveType() { return TypeResolutions.isType( field(), - t -> isRepresentable(t) && t != UNSIGNED_LONG && isSpatial(t) == false, + SUPPLIERS::containsKey, sourceText(), DEFAULT, "representable except unsigned_long and spatial types" @@ -113,25 +125,11 @@ public DataType dataType() { @Override public final AggregatorFunctionSupplier supplier(List inputChannels) { DataType type = field().dataType(); - if (type == DataType.BOOLEAN) { - return new MaxBooleanAggregatorFunctionSupplier(inputChannels); - } - if (type == DataType.LONG || type == DataType.DATETIME) { - return new MaxLongAggregatorFunctionSupplier(inputChannels); - } - if (type == DataType.INTEGER) { - return new MaxIntAggregatorFunctionSupplier(inputChannels); - } - if (type == DataType.DOUBLE) { - return new MaxDoubleAggregatorFunctionSupplier(inputChannels); - } - if (type == DataType.IP) { - return new MaxIpAggregatorFunctionSupplier(inputChannels); - } - if (type == DataType.VERSION || DataType.isString(type)) { - return new MaxBytesRefAggregatorFunctionSupplier(inputChannels); + if (SUPPLIERS.containsKey(type) == false) { + // If the type checking did its job, this should never happen + throw EsqlIllegalArgumentException.illegalDataType(type); } - throw EsqlIllegalArgumentException.illegalDataType(type); + return SUPPLIERS.get(type).apply(inputChannels); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java index a1492f79da393..7aaa41ea6ab11 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java @@ -32,16 +32,28 @@ import java.io.IOException; import java.util.List; +import java.util.Map; +import java.util.function.Function; import static java.util.Collections.emptyList; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; -import static org.elasticsearch.xpack.esql.core.type.DataType.UNSIGNED_LONG; -import static org.elasticsearch.xpack.esql.core.type.DataType.isRepresentable; -import static org.elasticsearch.xpack.esql.core.type.DataType.isSpatial; public class Min extends AggregateFunction implements ToAggregator, SurrogateExpression { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Min", Min::new); + private static final Map, AggregatorFunctionSupplier>> SUPPLIERS = Map.ofEntries( + Map.entry(DataType.BOOLEAN, MinBooleanAggregatorFunctionSupplier::new), + Map.entry(DataType.LONG, MinLongAggregatorFunctionSupplier::new), + Map.entry(DataType.DATETIME, MinLongAggregatorFunctionSupplier::new), + Map.entry(DataType.DATE_NANOS, MinLongAggregatorFunctionSupplier::new), + Map.entry(DataType.INTEGER, MinIntAggregatorFunctionSupplier::new), + Map.entry(DataType.DOUBLE, MinDoubleAggregatorFunctionSupplier::new), + Map.entry(DataType.IP, MinIpAggregatorFunctionSupplier::new), + Map.entry(DataType.VERSION, MinBytesRefAggregatorFunctionSupplier::new), + Map.entry(DataType.KEYWORD, MinBytesRefAggregatorFunctionSupplier::new), + Map.entry(DataType.TEXT, MinBytesRefAggregatorFunctionSupplier::new) + ); + @FunctionInfo( returnType = { "boolean", "double", "integer", "long", "date", "ip", "keyword", "text", "long", "version" }, description = "The minimum value of a field.", @@ -98,7 +110,7 @@ public Min withFilter(Expression filter) { protected TypeResolution resolveType() { return TypeResolutions.isType( field(), - t -> isRepresentable(t) && t != UNSIGNED_LONG && isSpatial(t) == false, + SUPPLIERS::containsKey, sourceText(), DEFAULT, "representable except unsigned_long and spatial types" @@ -113,25 +125,11 @@ public DataType dataType() { @Override public final AggregatorFunctionSupplier supplier(List inputChannels) { DataType type = field().dataType(); - if (type == DataType.BOOLEAN) { - return new MinBooleanAggregatorFunctionSupplier(inputChannels); - } - if (type == DataType.LONG || type == DataType.DATETIME) { - return new MinLongAggregatorFunctionSupplier(inputChannels); - } - if (type == DataType.INTEGER) { - return new MinIntAggregatorFunctionSupplier(inputChannels); - } - if (type == DataType.DOUBLE) { - return new MinDoubleAggregatorFunctionSupplier(inputChannels); - } - if (type == DataType.IP) { - return new MinIpAggregatorFunctionSupplier(inputChannels); - } - if (type == DataType.VERSION || DataType.isString(type)) { - return new MinBytesRefAggregatorFunctionSupplier(inputChannels); + if (SUPPLIERS.containsKey(type) == false) { + // If the type checking did its job, this should never happen + throw EsqlIllegalArgumentException.illegalDataType(type); } - throw EsqlIllegalArgumentException.illegalDataType(type); + return SUPPLIERS.get(type).apply(inputChannels); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Values.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Values.java index a844b981c95d6..8d576839c3c5c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Values.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Values.java @@ -29,14 +29,28 @@ import java.io.IOException; import java.util.List; +import java.util.Map; +import java.util.function.Function; import static java.util.Collections.emptyList; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; -import static org.elasticsearch.xpack.esql.core.type.DataType.UNSIGNED_LONG; public class Values extends AggregateFunction implements ToAggregator { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Values", Values::new); + private static final Map, AggregatorFunctionSupplier>> SUPPLIERS = Map.ofEntries( + Map.entry(DataType.INTEGER, ValuesIntAggregatorFunctionSupplier::new), + Map.entry(DataType.LONG, ValuesLongAggregatorFunctionSupplier::new), + Map.entry(DataType.DATETIME, ValuesLongAggregatorFunctionSupplier::new), + Map.entry(DataType.DATE_NANOS, ValuesLongAggregatorFunctionSupplier::new), + Map.entry(DataType.DOUBLE, ValuesDoubleAggregatorFunctionSupplier::new), + Map.entry(DataType.KEYWORD, ValuesBytesRefAggregatorFunctionSupplier::new), + Map.entry(DataType.TEXT, ValuesBytesRefAggregatorFunctionSupplier::new), + Map.entry(DataType.IP, ValuesBytesRefAggregatorFunctionSupplier::new), + Map.entry(DataType.VERSION, ValuesBytesRefAggregatorFunctionSupplier::new), + Map.entry(DataType.BOOLEAN, ValuesBooleanAggregatorFunctionSupplier::new) + ); + @FunctionInfo( returnType = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "text", "version" }, preview = true, @@ -98,7 +112,7 @@ public DataType dataType() { protected TypeResolution resolveType() { return TypeResolutions.isType( field(), - dt -> DataType.isSpatial(dt) == false && dt != UNSIGNED_LONG, + SUPPLIERS::containsKey, sourceText(), DEFAULT, "any type except unsigned_long and spatial types" @@ -108,22 +122,10 @@ protected TypeResolution resolveType() { @Override public AggregatorFunctionSupplier supplier(List inputChannels) { DataType type = field().dataType(); - if (type == DataType.INTEGER) { - return new ValuesIntAggregatorFunctionSupplier(inputChannels); - } - if (type == DataType.LONG || type == DataType.DATETIME) { - return new ValuesLongAggregatorFunctionSupplier(inputChannels); - } - if (type == DataType.DOUBLE) { - return new ValuesDoubleAggregatorFunctionSupplier(inputChannels); - } - if (DataType.isString(type) || type == DataType.IP || type == DataType.VERSION) { - return new ValuesBytesRefAggregatorFunctionSupplier(inputChannels); - } - if (type == DataType.BOOLEAN) { - return new ValuesBooleanAggregatorFunctionSupplier(inputChannels); + if (SUPPLIERS.containsKey(type) == false) { + // If the type checking did its job, this should never happen + throw EsqlIllegalArgumentException.illegalDataType(type); } - // TODO cartesian_point, geo_point - throw EsqlIllegalArgumentException.illegalDataType(type); + return SUPPLIERS.get(type).apply(inputChannels); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java index c322135198262..3e81c2a2c1101 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java @@ -297,25 +297,18 @@ private static String dataTypeToString(DataType type, Class aggClass) { if (aggClass == Top.class && type.equals(DataType.IP)) { return "Ip"; } - if (type.equals(DataType.BOOLEAN)) { - return "Boolean"; - } else if (type.equals(DataType.INTEGER) || type.equals(DataType.COUNTER_INTEGER)) { - return "Int"; - } else if (type.equals(DataType.LONG) || type.equals(DataType.DATETIME) || type.equals(DataType.COUNTER_LONG)) { - return "Long"; - } else if (type.equals(DataType.DOUBLE) || type.equals(DataType.COUNTER_DOUBLE)) { - return "Double"; - } else if (type.equals(DataType.KEYWORD) - || type.equals(DataType.IP) - || type.equals(DataType.VERSION) - || type.equals(DataType.TEXT)) { - return "BytesRef"; - } else if (type.equals(GEO_POINT)) { - return "GeoPoint"; - } else if (type.equals(CARTESIAN_POINT)) { - return "CartesianPoint"; - } else { + + return switch (type) { + case DataType.BOOLEAN -> "Boolean"; + case DataType.INTEGER, DataType.COUNTER_INTEGER -> "Int"; + case DataType.LONG, DataType.DATETIME, DataType.COUNTER_LONG, DataType.DATE_NANOS -> "Long"; + case DataType.DOUBLE, DataType.COUNTER_DOUBLE -> "Double"; + case DataType.KEYWORD, DataType.IP, DataType.VERSION, DataType.TEXT -> "BytesRef"; + case GEO_POINT -> "GeoPoint"; + case CARTESIAN_POINT -> "CartesianPoint"; + case SEMANTIC_TEXT, UNSUPPORTED, NULL, UNSIGNED_LONG, SHORT, BYTE, FLOAT, HALF_FLOAT, SCALED_FLOAT, OBJECT, SOURCE, DATE_PERIOD, + TIME_DURATION, CARTESIAN_SHAPE, GEO_SHAPE, DOC_DATA_TYPE, TSID_DATA_TYPE, PARTIAL_AGG -> throw new EsqlIllegalArgumentException("illegal agg type: " + type.typeName()); - } + }; } } From 28715b791a88de6b3f2ccb6b4f097a9881f01007 Mon Sep 17 00:00:00 2001 From: mspielberg <9729801+mspielberg@users.noreply.github.com> Date: Thu, 24 Oct 2024 06:06:39 -0700 Subject: [PATCH 361/449] Add documentation for minimum_should_match (#113043) --- .../reference/query-dsl/terms-set-query.asciidoc | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/docs/reference/query-dsl/terms-set-query.asciidoc b/docs/reference/query-dsl/terms-set-query.asciidoc index 2abfe54d53976..27717af3ac171 100644 --- a/docs/reference/query-dsl/terms-set-query.asciidoc +++ b/docs/reference/query-dsl/terms-set-query.asciidoc @@ -159,12 +159,22 @@ GET /job-candidates/_search `terms`:: + -- -(Required, array of strings) Array of terms you wish to find in the provided +(Required, array) Array of terms you wish to find in the provided ``. To return a document, a required number of terms must exactly match the field values, including whitespace and capitalization. -The required number of matching terms is defined in the -`minimum_should_match_field` or `minimum_should_match_script` parameter. +The required number of matching terms is defined in the `minimum_should_match`, +`minimum_should_match_field` or `minimum_should_match_script` parameters. Exactly +one of these parameters must be provided. +-- + +`minimum_should_match`:: ++ +-- +(Optional) Specification for the number of matching terms required to return +a document. + +For valid values, see <>. -- `minimum_should_match_field`:: From 6980fc62531923b68accc204fc25e7dea59760e3 Mon Sep 17 00:00:00 2001 From: Liam Thompson <32779855+leemthompo@users.noreply.github.com> Date: Thu, 24 Oct 2024 15:11:10 +0200 Subject: [PATCH 362/449] [DOCS] Add text_expansion deprecation usage note (#115529) --- docs/reference/query-dsl/text-expansion-query.asciidoc | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/docs/reference/query-dsl/text-expansion-query.asciidoc b/docs/reference/query-dsl/text-expansion-query.asciidoc index 235a413df686f..5c7bce8c3fcf0 100644 --- a/docs/reference/query-dsl/text-expansion-query.asciidoc +++ b/docs/reference/query-dsl/text-expansion-query.asciidoc @@ -7,6 +7,13 @@ deprecated[8.15.0, This query has been replaced by <>.] +.Deprecation usage note +**** +You can continue using `rank_features` fields with `text_expansion` queries in the current version. +However, if you plan to upgrade, we recommend updating mappings to use the `sparse_vector` field type and <>. +This will allow you to take advantage of the new capabilities and improvements available in newer versions. +**** + The text expansion query uses a {nlp} model to convert the query text into a list of token-weight pairs which are then used in a query against a <> or <> field. From 833f2fb9185072b0f8edcd2576d512ff91810277 Mon Sep 17 00:00:00 2001 From: Stef Nestor <26751266+stefnestor@users.noreply.github.com> Date: Thu, 24 Oct 2024 07:27:23 -0600 Subject: [PATCH 363/449] (Doc+) link video for resolving max shards open (#115480) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 👋 howdy team! @anniegale9538 and my [video](https://www.youtube.com/watch?v=tZKbDegt4-M) demonstrates how to resolve `max shards open` errors as a common support ask. --- docs/reference/how-to/size-your-shards.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/how-to/size-your-shards.asciidoc b/docs/reference/how-to/size-your-shards.asciidoc index 8770ec373bb18..86f195d030223 100644 --- a/docs/reference/how-to/size-your-shards.asciidoc +++ b/docs/reference/how-to/size-your-shards.asciidoc @@ -572,7 +572,7 @@ PUT _cluster/settings } ---- -For more information, see <>. +See this https://www.youtube.com/watch?v=tZKbDegt4-M[fixing "max shards open" video] for an example troubleshooting walkthrough. For more information, see <>. [discrete] [[troubleshooting-max-docs-limit]] From e99607b5895880d11b4981279314bcbb6b0fe3a9 Mon Sep 17 00:00:00 2001 From: Panagiotis Bailis Date: Thu, 24 Oct 2024 16:29:14 +0300 Subject: [PATCH 364/449] Adding breaking change entry for retrievers (#115399) --- docs/changelog/115399.yaml | 29 +++++++++++++++++++ .../TextSimilarityRankRetrieverBuilder.java | 2 +- .../xpack/rank/rrf/RRFRetrieverBuilder.java | 2 +- 3 files changed, 31 insertions(+), 2 deletions(-) create mode 100644 docs/changelog/115399.yaml diff --git a/docs/changelog/115399.yaml b/docs/changelog/115399.yaml new file mode 100644 index 0000000000000..9f69657a5d167 --- /dev/null +++ b/docs/changelog/115399.yaml @@ -0,0 +1,29 @@ +pr: 115399 +summary: Adding breaking change entry for retrievers +area: Search +type: breaking +issues: [] +breaking: + title: Reworking RRF retriever to be evaluated during rewrite phase + area: REST API + details: |- + In this release (8.16), we have introduced major changes to the retrievers framework + and how they can be evaluated, focusing mainly on compound retrievers + like `rrf` and `text_similarity_reranker`, which allowed us to support full + composability (i.e. any retriever can be nested under any compound retriever), + as well as supporting additional search features like collapsing, explaining, + aggregations, and highlighting. + + To ensure consistency, and given that this rework is not available until 8.16, + `rrf` and `text_similarity_reranker` retriever queries would now + throw an exception in a mixed cluster scenario, where there are nodes + both in current or later (i.e. >= 8.16) and previous ( <= 8.15) versions. + + As part of the rework, we have also removed the `_rank` property from + the responses of an `rrf` retriever. + impact: |- + - Users will not be able to use the `rrf` and `text_similarity_reranker` retrievers in a mixed cluster scenario + with previous releases (i.e. prior to 8.16), and the request will throw an `IllegalArgumentException`. + - `_rank` has now been removed from the output of the `rrf` retrievers so trying to directly parse the field + will throw an exception + notable: false diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilder.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilder.java index 342199dc51db8..91b6cdc61afe4 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilder.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilder.java @@ -81,7 +81,7 @@ public static TextSimilarityRankRetrieverBuilder fromXContent(XContentParser par throw new ParsingException(parser.getTokenLocation(), "unknown retriever [" + TextSimilarityRankBuilder.NAME + "]"); } if (context.clusterSupportsFeature(TEXT_SIMILARITY_RERANKER_COMPOSITION_SUPPORTED) == false) { - throw new UnsupportedOperationException( + throw new IllegalArgumentException( "[text_similarity_reranker] retriever composition feature is not supported by all nodes in the cluster" ); } diff --git a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilder.java b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilder.java index c3c9f19cde6ef..792ff4eac3893 100644 --- a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilder.java +++ b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilder.java @@ -83,7 +83,7 @@ public static RRFRetrieverBuilder fromXContent(XContentParser parser, RetrieverP throw new ParsingException(parser.getTokenLocation(), "unknown retriever [" + NAME + "]"); } if (context.clusterSupportsFeature(RRF_RETRIEVER_COMPOSITION_SUPPORTED) == false) { - throw new UnsupportedOperationException("[rrf] retriever composition feature is not supported by all nodes in the cluster"); + throw new IllegalArgumentException("[rrf] retriever composition feature is not supported by all nodes in the cluster"); } if (RRFRankPlugin.RANK_RRF_FEATURE.check(XPackPlugin.getSharedLicenseState()) == false) { throw LicenseUtils.newComplianceException("Reciprocal Rank Fusion (RRF)"); From 28882e86b200e9dfef47e6615bfd993d35f17abd Mon Sep 17 00:00:00 2001 From: Alexey Ivanov Date: Thu, 24 Oct 2024 14:30:32 +0100 Subject: [PATCH 365/449] Report JVM stats for all memory pools (97046) (#115117) This fix allows reporting of all JVM memory pools sizes in JVM stats --- docs/changelog/115117.yaml | 6 ++++++ .../elasticsearch/monitor/jvm/GcNames.java | 15 +++++++++++++- .../elasticsearch/monitor/jvm/JvmStats.java | 5 +---- .../monitor/jvm/JvmStatsTests.java | 20 +++++++++++++++++-- 4 files changed, 39 insertions(+), 7 deletions(-) create mode 100644 docs/changelog/115117.yaml diff --git a/docs/changelog/115117.yaml b/docs/changelog/115117.yaml new file mode 100644 index 0000000000000..de2defcd46afd --- /dev/null +++ b/docs/changelog/115117.yaml @@ -0,0 +1,6 @@ +pr: 115117 +summary: Report JVM stats for all memory pools (97046) +area: Infra/Core +type: bug +issues: + - 97046 diff --git a/server/src/main/java/org/elasticsearch/monitor/jvm/GcNames.java b/server/src/main/java/org/elasticsearch/monitor/jvm/GcNames.java index 9db8e8f414d5c..3494204c330c0 100644 --- a/server/src/main/java/org/elasticsearch/monitor/jvm/GcNames.java +++ b/server/src/main/java/org/elasticsearch/monitor/jvm/GcNames.java @@ -15,8 +15,14 @@ public class GcNames { public static final String OLD = "old"; public static final String SURVIVOR = "survivor"; + private GcNames() {} + /** - * Resolves the GC type by its memory pool name ({@link java.lang.management.MemoryPoolMXBean#getName()}. + * Resolves the memory area name by the memory pool name provided by {@link java.lang.management.MemoryPoolMXBean#getName()} + * + * @param poolName the name of the memory pool from {@link java.lang.management.MemoryPoolMXBean} + * @param defaultName the name to return if the pool name does not match any known memory area + * @return memory area name corresponding to the pool name or {@code defaultName} if no match is found */ public static String getByMemoryPoolName(String poolName, String defaultName) { if ("Eden Space".equals(poolName) @@ -40,6 +46,13 @@ public static String getByMemoryPoolName(String poolName, String defaultName) { return defaultName; } + /** + * Resolves the GC type by the GC name provided by {@link java.lang.management.GarbageCollectorMXBean#getName()} + * + * @param gcName the name of the GC from {@link java.lang.management.GarbageCollectorMXBean} + * @param defaultName the name to return if the GC name does not match any known GC type + * @return GC type corresponding to the GC name or {@code defaultName} if no match is found + */ public static String getByGcName(String gcName, String defaultName) { if ("Copy".equals(gcName) || "PS Scavenge".equals(gcName) || "ParNew".equals(gcName) || "G1 Young Generation".equals(gcName)) { return YOUNG; diff --git a/server/src/main/java/org/elasticsearch/monitor/jvm/JvmStats.java b/server/src/main/java/org/elasticsearch/monitor/jvm/JvmStats.java index 0a2763474b8df..e6b109207fdf3 100644 --- a/server/src/main/java/org/elasticsearch/monitor/jvm/JvmStats.java +++ b/server/src/main/java/org/elasticsearch/monitor/jvm/JvmStats.java @@ -64,10 +64,7 @@ public static JvmStats jvmStats() { List pools = new ArrayList<>(); for (MemoryPoolMXBean memoryPoolMXBean : memoryPoolMXBeans) { try { - String name = GcNames.getByMemoryPoolName(memoryPoolMXBean.getName(), null); - if (name == null) { // if we can't resolve it, its not interesting.... (Per Gen, Code Cache) - continue; - } + String name = GcNames.getByMemoryPoolName(memoryPoolMXBean.getName(), memoryPoolMXBean.getName()); MemoryUsage usage = memoryPoolMXBean.getUsage(); MemoryUsage peakUsage = memoryPoolMXBean.getPeakUsage(); pools.add( diff --git a/server/src/test/java/org/elasticsearch/monitor/jvm/JvmStatsTests.java b/server/src/test/java/org/elasticsearch/monitor/jvm/JvmStatsTests.java index 12fa776dd7efd..28976d803ff53 100644 --- a/server/src/test/java/org/elasticsearch/monitor/jvm/JvmStatsTests.java +++ b/server/src/test/java/org/elasticsearch/monitor/jvm/JvmStatsTests.java @@ -13,17 +13,22 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.test.ESTestCase; -import java.io.IOException; import java.util.Arrays; import java.util.List; +import java.util.Map; +import java.util.function.Function; +import java.util.stream.Collectors; +import java.util.stream.StreamSupport; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.hasKey; +import static org.hamcrest.Matchers.hasSize; public class JvmStatsTests extends ESTestCase { - public void testJvmStats() throws IOException { + public void testJvmStats() { JvmStats stats = JvmStats.jvmStats(); assertNotNull(stats); assertNotNull(stats.getUptime()); @@ -40,6 +45,17 @@ public void testJvmStats() throws IOException { assertNotNull(mem.getHeapUsedPercent()); assertThat(mem.getHeapUsedPercent(), anyOf(equalTo((short) -1), greaterThanOrEqualTo((short) 0))); + // Memory pools + Map memoryPools = StreamSupport.stream(stats.getMem().spliterator(), false) + .collect(Collectors.toMap(JvmStats.MemoryPool::getName, Function.identity())); + assertThat(memoryPools, hasKey(GcNames.YOUNG)); + assertThat(memoryPools, hasKey(GcNames.OLD)); + assertThat(memoryPools, hasKey("Metaspace")); + assertThat(memoryPools.keySet(), hasSize(greaterThan(3))); + for (JvmStats.MemoryPool memoryPool : memoryPools.values()) { + assertThat(memoryPool.getUsed().getBytes(), greaterThan(0L)); + } + // Threads JvmStats.Threads threads = stats.getThreads(); assertNotNull(threads); From 37c7137f39d13ce36785c0bed01f2f058da886f8 Mon Sep 17 00:00:00 2001 From: Gergely Kalapos Date: Thu, 24 Oct 2024 15:49:45 +0200 Subject: [PATCH 366/449] [otel-data] Add more kubernetes aliases (#115429) * Add more kubernetes aliases * Update docs/changelog/115429.yaml * Review feedback --------- Co-authored-by: Elastic Machine --- docs/changelog/115429.yaml | 5 ++ .../semconv-resource-to-ecs@mappings.yaml | 48 +++++++++++++++++++ .../rest-api-spec/test/20_logs_tests.yml | 37 ++++++++++++++ 3 files changed, 90 insertions(+) create mode 100644 docs/changelog/115429.yaml diff --git a/docs/changelog/115429.yaml b/docs/changelog/115429.yaml new file mode 100644 index 0000000000000..ddf3c69183000 --- /dev/null +++ b/docs/changelog/115429.yaml @@ -0,0 +1,5 @@ +pr: 115429 +summary: "[otel-data] Add more kubernetes aliases" +area: Data streams +type: bug +issues: [] diff --git a/x-pack/plugin/otel-data/src/main/resources/component-templates/semconv-resource-to-ecs@mappings.yaml b/x-pack/plugin/otel-data/src/main/resources/component-templates/semconv-resource-to-ecs@mappings.yaml index 6645e7d282520..eb5cd6d37af83 100644 --- a/x-pack/plugin/otel-data/src/main/resources/component-templates/semconv-resource-to-ecs@mappings.yaml +++ b/x-pack/plugin/otel-data/src/main/resources/component-templates/semconv-resource-to-ecs@mappings.yaml @@ -56,21 +56,45 @@ template: os.version: type: keyword ignore_above: 1024 + k8s.container.name: + type: keyword + ignore_above: 1024 + k8s.cronjob.name: + type: keyword + ignore_above: 1024 + k8s.daemonset.name: + type: keyword + ignore_above: 1024 k8s.deployment.name: type: keyword ignore_above: 1024 + k8s.job.name: + type: keyword + ignore_above: 1024 k8s.namespace.name: type: keyword ignore_above: 1024 + k8s.node.hostname: + type: keyword + ignore_above: 1024 k8s.node.name: type: keyword ignore_above: 1024 + k8s.node.uid: + type: keyword + ignore_above: 1024 k8s.pod.name: type: keyword ignore_above: 1024 k8s.pod.uid: type: keyword ignore_above: 1024 + k8s.replicaset.name: + type: keyword + ignore_above: 1024 + k8s.statefulset.name: + type: keyword + ignore_above: 1024 service.node.name: type: alias path: resource.attributes.service.instance.id @@ -122,6 +146,30 @@ template: kubernetes.pod.uid: type: alias path: resource.attributes.k8s.pod.uid + kubernetes.container.name: + type: alias + path: resource.attributes.k8s.container.name + kubernetes.cronjob.name: + type: alias + path: resource.attributes.k8s.cronjob.name + kubernetes.job.name: + type: alias + path: resource.attributes.k8s.job.name + kubernetes.statefulset.name: + type: alias + path: resource.attributes.k8s.statefulset.name + kubernetes.daemonset.name: + type: alias + path: resource.attributes.k8s.daemonset.name + kubernetes.replicaset.name: + type: alias + path: resource.attributes.k8s.replicaset.name + kubernetes.node.uid: + type: alias + path: resource.attributes.k8s.node.uid + kubernetes.node.hostname: + type: alias + path: resource.attributes.k8s.node.hostname # Below are non-ECS fields that may be used by Kibana. service.language.name: type: alias diff --git a/x-pack/plugin/otel-data/src/yamlRestTest/resources/rest-api-spec/test/20_logs_tests.yml b/x-pack/plugin/otel-data/src/yamlRestTest/resources/rest-api-spec/test/20_logs_tests.yml index 6bc0cee78be4f..63966e601a3cb 100644 --- a/x-pack/plugin/otel-data/src/yamlRestTest/resources/rest-api-spec/test/20_logs_tests.yml +++ b/x-pack/plugin/otel-data/src/yamlRestTest/resources/rest-api-spec/test/20_logs_tests.yml @@ -187,3 +187,40 @@ host.name pass-through: - length: { hits.hits: 1 } - match: { hits.hits.0.fields.resource\.attributes\.host\.name: [ "localhost" ] } - match: { hits.hits.0.fields.host\.name: [ "localhost" ] } +--- +"kubernetes.* -> resource.attributes.k8s.* aliases": + - do: + bulk: + index: logs-generic.otel-default + refresh: true + body: + - create: { } + - "@timestamp": 2024-07-18T14:48:33.467654000Z + data_stream: + dataset: generic.otel + namespace: default + resource: + attributes: + k8s.container.name: myContainerName + k8s.cronjob.name: myCronJobName + k8s.job.name: myJobName + k8s.statefulset.name: myStatefulsetName + k8s.daemonset.name: myDaemonsetName + k8s.replicaset.name: myReplicasetName + k8s.node.uid: myNodeUid + k8s.node.hostname: myNodeHostname + - is_false: errors + - do: + search: + index: logs-generic.otel-default + body: + fields: ["kubernetes.container.name", "kubernetes.cronjob.name", "kubernetes.job.name", "kubernetes.statefulset.name", "kubernetes.daemonset.name", "kubernetes.replicaset.name", "kubernetes.node.uid", "kubernetes.node.hostname" ] + - length: { hits.hits: 1 } + - match: { hits.hits.0.fields.kubernetes\.container\.name : ["myContainerName"] } + - match: { hits.hits.0.fields.kubernetes\.cronjob\.name : ["myCronJobName"] } + - match: { hits.hits.0.fields.kubernetes\.job\.name : ["myJobName"] } + - match: { hits.hits.0.fields.kubernetes\.statefulset\.name : ["myStatefulsetName"] } + - match: { hits.hits.0.fields.kubernetes\.daemonset\.name : ["myDaemonsetName"] } + - match: { hits.hits.0.fields.kubernetes\.replicaset\.name : ["myReplicasetName"] } + - match: { hits.hits.0.fields.kubernetes\.node\.uid : ["myNodeUid"] } + - match: { hits.hits.0.fields.kubernetes\.node\.hostname : ["myNodeHostname"] } From 31ede8fd284a79e1f62088d9800e59701f42b79a Mon Sep 17 00:00:00 2001 From: Liam Thompson <32779855+leemthompo@users.noreply.github.com> Date: Thu, 24 Oct 2024 15:57:49 +0200 Subject: [PATCH 367/449] Update 8.12.0.asciidoc (#115303) (#115546) Fixing confusing format Co-authored-by: Johannes Mahne --- docs/reference/release-notes/8.12.0.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/release-notes/8.12.0.asciidoc b/docs/reference/release-notes/8.12.0.asciidoc index bfa99401f41a2..bd0ae032ef0b9 100644 --- a/docs/reference/release-notes/8.12.0.asciidoc +++ b/docs/reference/release-notes/8.12.0.asciidoc @@ -11,7 +11,7 @@ Also see <>. + When using `int8_hnsw` and the default `confidence_interval` (or any `confidence_interval` less than `1.0`) and when there are deleted documents in the segments, quantiles may fail to build and prevent merging. - ++ This issue is fixed in 8.12.1. * When upgrading clusters from version 8.11.4 or earlier, if your cluster contains non-master-eligible nodes, From aae3b3499a7e397bbd2f2cd7df0e218ec3f12caf Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Fri, 25 Oct 2024 00:57:55 +1100 Subject: [PATCH 368/449] Mute org.elasticsearch.test.apmintegration.MetricsApmIT testApmIntegration #115415 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 2d5349ed03b48..1ee677b14fea1 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -285,6 +285,9 @@ tests: - class: org.elasticsearch.xpack.restart.MLModelDeploymentFullClusterRestartIT method: testDeploymentSurvivesRestart {cluster=UPGRADED} issue: https://github.com/elastic/elasticsearch/issues/115528 +- class: org.elasticsearch.test.apmintegration.MetricsApmIT + method: testApmIntegration + issue: https://github.com/elastic/elasticsearch/issues/115415 # Examples: # From fffb98ac6c68cc633afbb855f697d514f4185c9b Mon Sep 17 00:00:00 2001 From: David Kyle Date: Thu, 24 Oct 2024 15:12:41 +0100 Subject: [PATCH 369/449] [ML] Set max allocations to 32 in default configs (#115518) --- .../services/elasticsearch/ElasticsearchInternalService.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java index 6732e5719b897..a0235f74ce511 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java @@ -859,7 +859,7 @@ private List defaultConfigs(boolean useLinuxOptimizedModel) { null, 1, useLinuxOptimizedModel ? ELSER_V2_MODEL_LINUX_X86 : ELSER_V2_MODEL, - new AdaptiveAllocationsSettings(Boolean.TRUE, 0, 8) + new AdaptiveAllocationsSettings(Boolean.TRUE, 0, 32) ), ElserMlNodeTaskSettings.DEFAULT, null // default chunking settings @@ -872,7 +872,7 @@ private List defaultConfigs(boolean useLinuxOptimizedModel) { null, 1, useLinuxOptimizedModel ? MULTILINGUAL_E5_SMALL_MODEL_ID_LINUX_X86 : MULTILINGUAL_E5_SMALL_MODEL_ID, - new AdaptiveAllocationsSettings(Boolean.TRUE, 0, 8) + new AdaptiveAllocationsSettings(Boolean.TRUE, 0, 32) ), null // default chunking settings ); From 7d829fa51a13b2150ce7c0a08e3f5f66c9ee8bfb Mon Sep 17 00:00:00 2001 From: David Kyle Date: Thu, 24 Oct 2024 15:14:29 +0100 Subject: [PATCH 370/449] [ML] Prevent NPE if model assignment is removed while waiting to start (#115430) --- docs/changelog/115430.yaml | 5 +++++ .../action/TransportStartTrainedModelDeploymentAction.java | 6 +++++- 2 files changed, 10 insertions(+), 1 deletion(-) create mode 100644 docs/changelog/115430.yaml diff --git a/docs/changelog/115430.yaml b/docs/changelog/115430.yaml new file mode 100644 index 0000000000000..c2903f7751012 --- /dev/null +++ b/docs/changelog/115430.yaml @@ -0,0 +1,5 @@ +pr: 115430 +summary: Prevent NPE if model assignment is removed while waiting to start +area: Machine Learning +type: bug +issues: [] diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartTrainedModelDeploymentAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartTrainedModelDeploymentAction.java index 0bda2de2ce9ae..5fd70ce71cd24 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartTrainedModelDeploymentAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartTrainedModelDeploymentAction.java @@ -671,7 +671,11 @@ public boolean test(ClusterState clusterState) { deploymentId ).orElse(null); if (trainedModelAssignment == null) { - // Something weird happened, it should NEVER be null... + // The assignment may be null if it was stopped by another action while waiting + this.exception = new ElasticsearchStatusException( + "Error waiting for the model deployment to start. The trained model assignment was removed while waiting", + RestStatus.BAD_REQUEST + ); logger.trace(() -> format("[%s] assignment was null while waiting for state [%s]", deploymentId, waitForState)); return true; } From 755c392bb22e9046ef79982aba188f3c45193c8b Mon Sep 17 00:00:00 2001 From: Luke Whiting Date: Thu, 24 Oct 2024 15:24:26 +0100 Subject: [PATCH 371/449] Fix for race condition in interval watcher scheduler tests (#115501) --- muted-tests.yml | 12 ------------ .../schedule/engine/TickerScheduleEngineTests.java | 12 ++++-------- 2 files changed, 4 insertions(+), 20 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 1ee677b14fea1..ba816ed5f3a9e 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -258,21 +258,9 @@ tests: - class: org.elasticsearch.xpack.test.rest.XPackRestIT method: test {p0=esql/60_usage/Basic ESQL usage output (telemetry)} issue: https://github.com/elastic/elasticsearch/issues/115231 -- class: org.elasticsearch.xpack.watcher.trigger.schedule.engine.TickerScheduleEngineTests - method: testAddWithNoLastCheckedTimeButHasActivationTimeExecutesBeforeInitialInterval - issue: https://github.com/elastic/elasticsearch/issues/115339 -- class: org.elasticsearch.xpack.watcher.trigger.schedule.engine.TickerScheduleEngineTests - method: testWatchWithLastCheckedTimeExecutesBeforeInitialInterval - issue: https://github.com/elastic/elasticsearch/issues/115354 -- class: org.elasticsearch.xpack.watcher.trigger.schedule.engine.TickerScheduleEngineTests - method: testAddWithLastCheckedTimeExecutesBeforeInitialInterval - issue: https://github.com/elastic/elasticsearch/issues/115356 - class: org.elasticsearch.xpack.inference.DefaultEndPointsIT method: testInferDeploysDefaultE5 issue: https://github.com/elastic/elasticsearch/issues/115361 -- class: org.elasticsearch.xpack.watcher.trigger.schedule.engine.TickerScheduleEngineTests - method: testWatchWithNoLastCheckedTimeButHasActivationTimeExecutesBeforeInitialInterval - issue: https://github.com/elastic/elasticsearch/issues/115368 - class: org.elasticsearch.reservedstate.service.FileSettingsServiceTests method: testProcessFileChanges issue: https://github.com/elastic/elasticsearch/issues/115280 diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/schedule/engine/TickerScheduleEngineTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/schedule/engine/TickerScheduleEngineTests.java index 9a12b8f394eb2..ef290628c06d5 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/schedule/engine/TickerScheduleEngineTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/schedule/engine/TickerScheduleEngineTests.java @@ -312,14 +312,13 @@ public void testWatchWithLastCheckedTimeExecutesBeforeInitialInterval() throws E engine.register(events -> { for (TriggerEvent ignored : events) { - if (runCount.get() == 0) { + if (runCount.getAndIncrement() == 0) { logger.info("job first fire"); firstLatch.countDown(); } else { logger.info("job second fire"); secondLatch.countDown(); } - runCount.incrementAndGet(); } }); @@ -375,14 +374,13 @@ public void testWatchWithNoLastCheckedTimeButHasActivationTimeExecutesBeforeInit engine.register(events -> { for (TriggerEvent ignored : events) { - if (runCount.get() == 0) { + if (runCount.getAndIncrement() == 0) { logger.info("job first fire"); firstLatch.countDown(); } else { logger.info("job second fire"); secondLatch.countDown(); } - runCount.incrementAndGet(); } }); @@ -428,14 +426,13 @@ public void testAddWithLastCheckedTimeExecutesBeforeInitialInterval() throws Exc engine.register(events -> { for (TriggerEvent ignored : events) { - if (runCount.get() == 0) { + if (runCount.getAndIncrement() == 0) { logger.info("job first fire"); firstLatch.countDown(); } else { logger.info("job second fire"); secondLatch.countDown(); } - runCount.incrementAndGet(); } }); @@ -492,14 +489,13 @@ public void testAddWithNoLastCheckedTimeButHasActivationTimeExecutesBeforeInitia engine.register(events -> { for (TriggerEvent ignored : events) { - if (runCount.get() == 0) { + if (runCount.getAndIncrement() == 0) { logger.info("job first fire"); firstLatch.countDown(); } else { logger.info("job second fire"); secondLatch.countDown(); } - runCount.incrementAndGet(); } }); From d7a9575d0314adcc65b50c4972c585464c2aefa9 Mon Sep 17 00:00:00 2001 From: Pete Gillin Date: Thu, 24 Oct 2024 15:58:24 +0100 Subject: [PATCH 372/449] Remove deprecated local parameter from alias APIs (#115393) This removes the `local` parameter from the `GET /_alias`, `HEAD /_alias`, and `GET /_cat/aliases` APIs. This option became a no-op and was deprecated in 8.12 by https://github.com/elastic/elasticsearch/pull/101815. We continue to accept the parameter (deprecated, with no other effect) in v8 compatibility mode for `GET /_alias` and `HEAD /_alias`. We don't do this for `GET /_cat/aliases` where the [compatibility policy does not apply](https://github.com/elastic/elasticsearch/blob/main/REST_API_COMPATIBILITY.md#when-not-to-apply). --- docs/changelog/115393.yaml | 18 ++++++++++++ docs/reference/cat/alias.asciidoc | 10 +++---- docs/reference/indices/alias-exists.asciidoc | 2 -- docs/reference/indices/get-alias.asciidoc | 2 -- rest-api-spec/build.gradle | 1 + .../rest-api-spec/api/cat.aliases.json | 4 --- .../api/indices.exists_alias.json | 4 --- .../rest-api-spec/api/indices.get_alias.json | 4 --- .../test/cat.aliases/10_basic.yml | 13 --------- .../test/indices.exists_alias/10_basic.yml | 14 --------- .../test/indices.get_alias/10_basic.yml | 29 ------------------- .../admin/indices/RestGetAliasesAction.java | 15 +++++----- .../rest/action/cat/RestAliasAction.java | 22 -------------- 13 files changed, 30 insertions(+), 108 deletions(-) create mode 100644 docs/changelog/115393.yaml diff --git a/docs/changelog/115393.yaml b/docs/changelog/115393.yaml new file mode 100644 index 0000000000000..5cf4e5f64ab34 --- /dev/null +++ b/docs/changelog/115393.yaml @@ -0,0 +1,18 @@ +pr: 115393 +summary: Remove deprecated local attribute from alias APIs +area: Indices APIs +type: breaking +issues: [] +breaking: + title: Remove deprecated local attribute from alias APIs + area: REST API + details: >- + The following APIs no longer accept the `?local` query parameter: + `GET /_alias`, `GET /_aliases`, `GET /_alias/{name}`, + `HEAD /_alias/{name}`, `GET /{index}/_alias`, `HEAD /{index}/_alias`, + `GET /{index}/_alias/{name}`, `HEAD /{index}/_alias/{name}`, + `GET /_cat/aliases`, and `GET /_cat/aliases/{alias}`. This parameter + has been deprecated and ignored since version 8.12. + impact: >- + Cease usage of the `?local` query parameter when calling the listed APIs. + notable: false diff --git a/docs/reference/cat/alias.asciidoc b/docs/reference/cat/alias.asciidoc index 72f949bf11e50..41ac279d3b2f5 100644 --- a/docs/reference/cat/alias.asciidoc +++ b/docs/reference/cat/alias.asciidoc @@ -6,8 +6,8 @@ [IMPORTANT] ==== -cat APIs are only intended for human consumption using the command line or the -{kib} console. They are _not_ intended for use by applications. For application +cat APIs are only intended for human consumption using the command line or the +{kib} console. They are _not_ intended for use by applications. For application consumption, use the <>. ==== @@ -45,8 +45,6 @@ include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-h] include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=help] -include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=local] - include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-s] include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-v] @@ -104,6 +102,6 @@ alias4 test1 - 2 1,2 - This response shows that `alias2` has configured a filter, and specific routing configurations in `alias3` and `alias4`. -If you only want to get information about specific aliases, you can specify -the aliases in comma-delimited format as a URL parameter, e.g., +If you only want to get information about specific aliases, you can specify +the aliases in comma-delimited format as a URL parameter, e.g., /_cat/aliases/alias1,alias2. diff --git a/docs/reference/indices/alias-exists.asciidoc b/docs/reference/indices/alias-exists.asciidoc index f820a95028a0f..d7b3454dcff56 100644 --- a/docs/reference/indices/alias-exists.asciidoc +++ b/docs/reference/indices/alias-exists.asciidoc @@ -52,8 +52,6 @@ Defaults to `all`. (Optional, Boolean) If `false`, requests that include a missing data stream or index in the `` return an error. Defaults to `false`. -include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=local] - [[alias-exists-api-response-codes]] ==== {api-response-codes-title} diff --git a/docs/reference/indices/get-alias.asciidoc b/docs/reference/indices/get-alias.asciidoc index 743aaf7aee174..41d62fb70e01b 100644 --- a/docs/reference/indices/get-alias.asciidoc +++ b/docs/reference/indices/get-alias.asciidoc @@ -58,5 +58,3 @@ Defaults to `all`. `ignore_unavailable`:: (Optional, Boolean) If `false`, requests that include a missing data stream or index in the `` return an error. Defaults to `false`. - -include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=local] diff --git a/rest-api-spec/build.gradle b/rest-api-spec/build.gradle index 4bd293f0a8641..6cc2028bffa39 100644 --- a/rest-api-spec/build.gradle +++ b/rest-api-spec/build.gradle @@ -60,4 +60,5 @@ tasks.named("yamlRestCompatTestTransform").configure ({ task -> task.skipTest("indices.sort/10_basic/Index Sort", "warning does not exist for compatibility") task.skipTest("search/330_fetch_fields/Test search rewrite", "warning does not exist for compatibility") task.skipTest("indices.create/21_synthetic_source_stored/object param - nested object with stored array", "temporary until backported") + task.skipTest("cat.aliases/10_basic/Deprecated local parameter", "CAT APIs not covered by compatibility policy") }) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.aliases.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.aliases.json index db49daeea372b..d3856b455efd1 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.aliases.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.aliases.json @@ -36,10 +36,6 @@ "type":"string", "description":"a short version of the Accept header, e.g. json, yaml" }, - "local":{ - "type":"boolean", - "description":"Return local information, do not retrieve the state from master node (default: false)" - }, "h":{ "type":"list", "description":"Comma-separated list of column names to display" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.exists_alias.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.exists_alias.json index b70854fdc3eb2..7d7a9c96c6419 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.exists_alias.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.exists_alias.json @@ -61,10 +61,6 @@ ], "default":"all", "description":"Whether to expand wildcard expression to concrete indices that are open, closed or both." - }, - "local":{ - "type":"boolean", - "description":"Return local information, do not retrieve the state from master node (default: false)" } } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_alias.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_alias.json index 0a4e4bb9ed90c..dc02a65adb068 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_alias.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_alias.json @@ -79,10 +79,6 @@ ], "default": "all", "description":"Whether to expand wildcard expression to concrete indices that are open, closed or both." - }, - "local":{ - "type":"boolean", - "description":"Return local information, do not retrieve the state from master node (default: false)" } } } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.aliases/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.aliases/10_basic.yml index 2e5234bd1ced1..6118453d7805e 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.aliases/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.aliases/10_basic.yml @@ -484,16 +484,3 @@ test_alias \s+ test_index\n my_alias \s+ test_index\n $/ - ---- -"Deprecated local parameter": - - requires: - cluster_features: ["gte_v8.12.0"] - test_runner_features: ["warnings"] - reason: verifying deprecation warnings from 8.12.0 onwards - - - do: - cat.aliases: - local: true - warnings: - - "the [?local=true] query parameter to cat-aliases requests has no effect and will be removed in a future version" diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.exists_alias/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.exists_alias/10_basic.yml index bf499de8463bd..a4223c2a983be 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.exists_alias/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.exists_alias/10_basic.yml @@ -34,17 +34,3 @@ name: test_alias - is_false: '' - ---- -"Test indices.exists_alias with local flag": - - skip: - features: ["allowed_warnings"] - - - do: - indices.exists_alias: - name: test_alias - local: true - allowed_warnings: - - "the [?local=true] query parameter to get-aliases requests has no effect and will be removed in a future version" - - - is_false: '' diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.get_alias/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.get_alias/10_basic.yml index 4f26a69712e83..63ab40f3bf578 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.get_alias/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.get_alias/10_basic.yml @@ -289,21 +289,6 @@ setup: index: non-existent name: foo ---- -"Get alias with local flag": - - skip: - features: ["allowed_warnings"] - - - do: - indices.get_alias: - local: true - allowed_warnings: - - "the [?local=true] query parameter to get-aliases requests has no effect and will be removed in a future version" - - - is_true: test_index - - - is_true: test_index_2 - --- "Get alias against closed indices": - skip: @@ -329,17 +314,3 @@ setup: - is_true: test_index - is_false: test_index_2 - - ---- -"Deprecated local parameter": - - requires: - cluster_features: "gte_v8.12.0" - test_runner_features: ["warnings"] - reason: verifying deprecation warnings from 8.12.0 onwards - - - do: - indices.get_alias: - local: true - warnings: - - "the [?local=true] query parameter to get-aliases requests has no effect and will be removed in a future version" diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetAliasesAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetAliasesAction.java index 7780ae08ac0ff..dfe501f29ce2e 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetAliasesAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetAliasesAction.java @@ -22,7 +22,7 @@ import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.RestApiVersion; -import org.elasticsearch.core.UpdateForV9; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; @@ -52,7 +52,7 @@ @ServerlessScope(Scope.PUBLIC) public class RestGetAliasesAction extends BaseRestHandler { - @UpdateForV9(owner = UpdateForV9.Owner.DATA_MANAGEMENT) // reject the deprecated ?local parameter + @UpdateForV10(owner = UpdateForV10.Owner.DATA_MANAGEMENT) // remove the BWC support for the deprecated ?local parameter private static final DeprecationLogger DEPRECATION_LOGGER = DeprecationLogger.getLogger(RestGetAliasesAction.class); @Override @@ -199,8 +199,7 @@ static RestResponse buildRestResponse( } @Override - @UpdateForV9(owner = UpdateForV9.Owner.DATA_MANAGEMENT) - // v7 REST API no longer exists: eliminate ref to RestApiVersion.V_7; reject local parameter in v9 too? + @UpdateForV10(owner = UpdateForV10.Owner.DATA_MANAGEMENT) // remove the BWC support for the deprecated ?local parameter public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { // The TransportGetAliasesAction was improved do the same post processing as is happening here. // We can't remove this logic yet to support mixed clusters. We should be able to remove this logic here @@ -213,10 +212,10 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC getAliasesRequest.indices(indices); getAliasesRequest.indicesOptions(IndicesOptions.fromRequest(request, getAliasesRequest.indicesOptions())); - if (request.hasParam("local")) { - // consume this param just for validation - final var localParam = request.paramAsBoolean("local", false); - if (request.getRestApiVersion() != RestApiVersion.V_7) { + if (request.getRestApiVersion() == RestApiVersion.V_8) { + if (request.hasParam("local")) { + // consume this param just for validation when in BWC mode for V_8 + final var localParam = request.paramAsBoolean("local", false); DEPRECATION_LOGGER.critical( DeprecationCategory.API, "get-aliases-local", diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestAliasAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestAliasAction.java index 191746b421c98..6aa0b1c865682 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestAliasAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestAliasAction.java @@ -15,10 +15,6 @@ import org.elasticsearch.cluster.metadata.AliasMetadata; import org.elasticsearch.common.Strings; import org.elasticsearch.common.Table; -import org.elasticsearch.common.logging.DeprecationCategory; -import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.core.RestApiVersion; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.Scope; @@ -34,8 +30,6 @@ @ServerlessScope(Scope.PUBLIC) public class RestAliasAction extends AbstractCatAction { - private static final DeprecationLogger DEPRECATION_LOGGER = DeprecationLogger.getLogger(RestAliasAction.class); - @Override public List routes() { return List.of(new Route(GET, "/_cat/aliases"), new Route(GET, "/_cat/aliases/{alias}")); @@ -52,27 +46,11 @@ public boolean allowSystemIndexAccessByDefault() { } @Override - @UpdateForV9(owner = UpdateForV9.Owner.DATA_MANAGEMENT) - // v7 REST API no longer exists: eliminate ref to RestApiVersion.V_7; reject local parameter in v9 too? protected RestChannelConsumer doCatRequest(final RestRequest request, final NodeClient client) { final GetAliasesRequest getAliasesRequest = request.hasParam("alias") ? new GetAliasesRequest(Strings.commaDelimitedListToStringArray(request.param("alias"))) : new GetAliasesRequest(); getAliasesRequest.indicesOptions(IndicesOptions.fromRequest(request, getAliasesRequest.indicesOptions())); - - if (request.hasParam("local")) { - // consume this param just for validation - final var localParam = request.paramAsBoolean("local", false); - if (request.getRestApiVersion() != RestApiVersion.V_7) { - DEPRECATION_LOGGER.critical( - DeprecationCategory.API, - "cat-aliases-local", - "the [?local={}] query parameter to cat-aliases requests has no effect and will be removed in a future version", - localParam - ); - } - } - return channel -> new RestCancellableNodeClient(client, request.getHttpChannel()).admin() .indices() .getAliases(getAliasesRequest, new RestResponseListener<>(channel) { From d8a3fc22cde255dc9b7456ba1009bb8b45b7407d Mon Sep 17 00:00:00 2001 From: Artem Prigoda Date: Thu, 24 Oct 2024 16:59:10 +0200 Subject: [PATCH 373/449] [test] Don't test any 7.x snapshots in `testLogicallyEquivalentSnapshotIsUsedEvenIfFilesAreDifferent` (#114821) Don't test any 7.x snapshots, keep using any 8,x compatible snapshot and Lucene version. Originally added in 8.0 (#77420) for testing peer recoveries using snapshots. Co-authored-by: Yang Wang Co-authored-by: Elastic Machine --- .../SnapshotsRecoveryPlannerServiceTests.java | 20 ++----------------- 1 file changed, 2 insertions(+), 18 deletions(-) diff --git a/x-pack/plugin/snapshot-based-recoveries/src/test/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/plan/SnapshotsRecoveryPlannerServiceTests.java b/x-pack/plugin/snapshot-based-recoveries/src/test/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/plan/SnapshotsRecoveryPlannerServiceTests.java index b082698254d17..6e7f2d82cfb1d 100644 --- a/x-pack/plugin/snapshot-based-recoveries/src/test/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/plan/SnapshotsRecoveryPlannerServiceTests.java +++ b/x-pack/plugin/snapshot-based-recoveries/src/test/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/plan/SnapshotsRecoveryPlannerServiceTests.java @@ -26,15 +26,12 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.IOUtils; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; -import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardSnapshot; import org.elasticsearch.index.store.Store; import org.elasticsearch.index.store.StoreFileMetadata; -import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.indices.recovery.plan.ShardRecoveryPlan; import org.elasticsearch.indices.recovery.plan.ShardSnapshot; import org.elasticsearch.indices.recovery.plan.ShardSnapshotsService; @@ -63,7 +60,6 @@ import static org.elasticsearch.common.util.CollectionUtils.iterableAsArrayList; import static org.elasticsearch.index.engine.Engine.ES_VERSION; import static org.elasticsearch.index.engine.Engine.HISTORY_UUID_KEY; -import static org.elasticsearch.test.index.IndexVersionUtils.randomVersionBetween; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; @@ -203,8 +199,6 @@ public void fetchLatestSnapshotsForShard(ShardId shardId, ActionListener { boolean shareFilesWithSource = randomBoolean(); @@ -217,18 +211,8 @@ public void testLogicallyEquivalentSnapshotIsUsedEvenIfFilesAreDifferent() throw final IndexVersion snapshotVersion; final Version luceneVersion; if (compatibleVersion) { - snapshotVersion = randomBoolean() ? null : IndexVersionUtils.randomCompatibleVersion(random()); - // If snapshotVersion is not present, - // then lucene version must be < RecoverySettings.SEQ_NO_SNAPSHOT_RECOVERIES_SUPPORTED_VERSION - if (snapshotVersion == null) { - luceneVersion = randomVersionBetween( - random(), - IndexVersions.V_7_0_0, - RecoverySettings.SNAPSHOT_RECOVERIES_SUPPORTED_INDEX_VERSION - ).luceneVersion(); - } else { - luceneVersion = IndexVersionUtils.randomCompatibleVersion(random()).luceneVersion(); - } + snapshotVersion = IndexVersionUtils.randomCompatibleVersion(random()); + luceneVersion = snapshotVersion.luceneVersion(); } else { snapshotVersion = IndexVersion.fromId(Integer.MAX_VALUE); luceneVersion = org.apache.lucene.util.Version.parse("255.255.255"); From f5d3c7c3d8bff7b91430c42d66550613e2716387 Mon Sep 17 00:00:00 2001 From: Artem Prigoda Date: Thu, 24 Oct 2024 17:09:34 +0200 Subject: [PATCH 374/449] Remove legacy join validation transport protocol (#114571) We introduced a new join validation protocol in #85380 (8.3), the legacy protocol can be removed in 9.0 Remove assertion that we run a version after 8.3.0 --- .../coordination/JoinValidationService.java | 57 ++----------------- .../coordination/ValidateJoinRequest.java | 21 ++----- 2 files changed, 12 insertions(+), 66 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinValidationService.java b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinValidationService.java index 34d59c9860aba..7de7fd4d92d1b 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinValidationService.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinValidationService.java @@ -13,7 +13,6 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.cluster.ClusterState; @@ -31,7 +30,6 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.core.RefCounted; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.env.Environment; import org.elasticsearch.node.NodeClosedException; import org.elasticsearch.threadpool.ThreadPool; @@ -46,7 +44,6 @@ import java.io.IOException; import java.util.Collection; import java.util.HashMap; -import java.util.Locale; import java.util.Map; import java.util.Queue; import java.util.concurrent.ConcurrentLinkedQueue; @@ -162,55 +159,14 @@ public void validateJoin(DiscoveryNode discoveryNode, ActionListener liste return; } - if (connection.getTransportVersion().onOrAfter(TransportVersions.V_8_3_0)) { - if (executeRefs.tryIncRef()) { - try { - execute(new JoinValidation(discoveryNode, connection, listener)); - } finally { - executeRefs.decRef(); - } - } else { - listener.onFailure(new NodeClosedException(transportService.getLocalNode())); + if (executeRefs.tryIncRef()) { + try { + execute(new JoinValidation(discoveryNode, connection, listener)); + } finally { + executeRefs.decRef(); } } else { - legacyValidateJoin(discoveryNode, listener, connection); - } - } - - @UpdateForV9(owner = UpdateForV9.Owner.DISTRIBUTED_COORDINATION) - private void legacyValidateJoin(DiscoveryNode discoveryNode, ActionListener listener, Transport.Connection connection) { - final var responseHandler = TransportResponseHandler.empty(responseExecutor, listener.delegateResponse((l, e) -> { - logger.warn(() -> "failed to validate incoming join request from node [" + discoveryNode + "]", e); - listener.onFailure( - new IllegalStateException( - String.format( - Locale.ROOT, - "failure when sending a join validation request from [%s] to [%s]", - transportService.getLocalNode().descriptionWithoutAttributes(), - discoveryNode.descriptionWithoutAttributes() - ), - e - ) - ); - })); - final var clusterState = clusterStateSupplier.get(); - if (clusterState != null) { - assert clusterState.nodes().isLocalNodeElectedMaster(); - transportService.sendRequest( - connection, - JOIN_VALIDATE_ACTION_NAME, - new ValidateJoinRequest(clusterState), - REQUEST_OPTIONS, - responseHandler - ); - } else { - transportService.sendRequest( - connection, - JoinHelper.JOIN_PING_ACTION_NAME, - new JoinHelper.JoinPingRequest(), - REQUEST_OPTIONS, - responseHandler - ); + listener.onFailure(new NodeClosedException(transportService.getLocalNode())); } } @@ -341,7 +297,6 @@ private class JoinValidation extends ActionRunnable { @Override protected void doRun() { - assert connection.getTransportVersion().onOrAfter(TransportVersions.V_8_3_0) : discoveryNode.getVersion(); // NB these things never run concurrently to each other, or to the cache cleaner (see IMPLEMENTATION NOTES above) so it is safe // to do these (non-atomic) things to the (unsynchronized) statesByVersion map. var transportVersion = connection.getTransportVersion(); diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/ValidateJoinRequest.java b/server/src/main/java/org/elasticsearch/cluster/coordination/ValidateJoinRequest.java index 1d99f28e62582..c81e4877196b3 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/ValidateJoinRequest.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/ValidateJoinRequest.java @@ -9,7 +9,6 @@ package org.elasticsearch.cluster.coordination; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.CheckedSupplier; import org.elasticsearch.common.bytes.BytesReference; @@ -29,19 +28,12 @@ public class ValidateJoinRequest extends TransportRequest { public ValidateJoinRequest(StreamInput in) throws IOException { super(in); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_3_0)) { - // recent versions send a BytesTransportRequest containing a compressed representation of the state - final var bytes = in.readReleasableBytesReference(); - final var version = in.getTransportVersion(); - final var namedWriteableRegistry = in.namedWriteableRegistry(); - this.stateSupplier = () -> readCompressed(version, bytes, namedWriteableRegistry); - this.refCounted = bytes; - } else { - // older versions just contain the bare state - final var state = ClusterState.readFrom(in, null); - this.stateSupplier = () -> state; - this.refCounted = null; - } + // recent versions send a BytesTransportRequest containing a compressed representation of the state + final var bytes = in.readReleasableBytesReference(); + final var version = in.getTransportVersion(); + final var namedWriteableRegistry = in.namedWriteableRegistry(); + this.stateSupplier = () -> readCompressed(version, bytes, namedWriteableRegistry); + this.refCounted = bytes; } private static ClusterState readCompressed( @@ -68,7 +60,6 @@ public ValidateJoinRequest(ClusterState state) { @Override public void writeTo(StreamOutput out) throws IOException { - assert out.getTransportVersion().before(TransportVersions.V_8_3_0); super.writeTo(out); stateSupplier.get().writeTo(out); } From 2ddd08aff7bea3a4ef1e4aea28d2ae63518902a1 Mon Sep 17 00:00:00 2001 From: Keith Massey Date: Thu, 24 Oct 2024 10:10:36 -0500 Subject: [PATCH 375/449] Fixing ingest simulate yaml rest test when there is a global legacy template (#115559) The ingest simulate yaml rest test `Test mapping addition works with indices without templates` tests what happens when an index has a mapping but matches no template at all. However, randomly and rarely a global match-all legacy template is applied to the cluster. When this happens, the assumptions for the test fail since the index matches a template. This PR removes that global legacy template so that the test works as intended. Closes #115412 Closes #115472 --- muted-tests.yml | 3 --- .../rest-api-spec/test/ingest/80_ingest_simulate.yml | 7 +++++++ 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index ba816ed5f3a9e..8c90f73f475e6 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -264,9 +264,6 @@ tests: - class: org.elasticsearch.reservedstate.service.FileSettingsServiceTests method: testProcessFileChanges issue: https://github.com/elastic/elasticsearch/issues/115280 -- class: org.elasticsearch.smoketest.SmokeTestIngestWithAllDepsClientYamlTestSuiteIT - method: test {yaml=ingest/80_ingest_simulate/Test mapping addition works with legacy templates} - issue: https://github.com/elastic/elasticsearch/issues/115412 - class: org.elasticsearch.xpack.security.FileSettingsRoleMappingsRestartIT method: testFileSettingsReprocessedOnRestartWithoutVersionChange issue: https://github.com/elastic/elasticsearch/issues/115450 diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_ingest_simulate.yml b/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_ingest_simulate.yml index 4d1a62c6f179e..7ed5ad3154151 100644 --- a/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_ingest_simulate.yml +++ b/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_ingest_simulate.yml @@ -1586,6 +1586,13 @@ setup: cluster_features: ["simulate.support.non.template.mapping"] reason: "ingest simulate support for indices with mappings that didn't come from templates added in 8.17" + # A global match-everything legacy template is added to the cluster sometimes (rarely). We have to get rid of this template if it exists + # because this test is making sure we get correct behavior when an index matches *no* template: + - do: + indices.delete_template: + name: '*' + ignore: 404 + # First, make sure that validation fails before we create the index (since we are only defining to bar field but trying to index a value # for foo. - do: From 79be69a5f87da015e6105a84537c590ae68c197b Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Thu, 24 Oct 2024 18:22:13 +0300 Subject: [PATCH 376/449] Ignore _field_names warning in testRollupAfterRestart (#115563) --- .../org/elasticsearch/xpack/restart/FullClusterRestartIT.java | 1 + 1 file changed, 1 insertion(+) diff --git a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java index c57e5653d1279..a56ddaabe8280 100644 --- a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java @@ -435,6 +435,7 @@ public void testRollupAfterRestart() throws Exception { final Request bulkRequest = new Request("POST", "/_bulk"); bulkRequest.setJsonEntity(bulk.toString()); + bulkRequest.setOptions(RequestOptions.DEFAULT.toBuilder().setWarningsHandler(fieldNamesFieldOk())); client().performRequest(bulkRequest); // create the rollup job From fb6c729858b443956ba41c68495a5de084ffa73d Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Thu, 24 Oct 2024 08:47:52 -0700 Subject: [PATCH 377/449] Guard blob store local directory creation with doPrivileged (#115459) The blob store may be triggered to create a local directory while in a reduced privilege context. This commit guards the creation of directories with doPrivileged. --- docs/changelog/115459.yaml | 5 +++++ .../common/blobstore/fs/FsBlobStore.java | 15 ++++++++++----- 2 files changed, 15 insertions(+), 5 deletions(-) create mode 100644 docs/changelog/115459.yaml diff --git a/docs/changelog/115459.yaml b/docs/changelog/115459.yaml new file mode 100644 index 0000000000000..b20a8f765c084 --- /dev/null +++ b/docs/changelog/115459.yaml @@ -0,0 +1,5 @@ +pr: 115459 +summary: Guard blob store local directory creation with `doPrivileged` +area: Infra/Core +type: bug +issues: [] diff --git a/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobStore.java b/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobStore.java index c4240672239fa..53e3b4b4796dc 100644 --- a/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobStore.java +++ b/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobStore.java @@ -19,6 +19,8 @@ import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; +import java.security.AccessController; +import java.security.PrivilegedAction; import java.util.Iterator; import java.util.List; @@ -56,11 +58,14 @@ public int bufferSizeInBytes() { public BlobContainer blobContainer(BlobPath path) { Path f = buildPath(path); if (readOnly == false) { - try { - Files.createDirectories(f); - } catch (IOException ex) { - throw new ElasticsearchException("failed to create blob container", ex); - } + AccessController.doPrivileged((PrivilegedAction) () -> { + try { + Files.createDirectories(f); + } catch (IOException ex) { + throw new ElasticsearchException("failed to create blob container", ex); + } + return null; + }); } return new FsBlobContainer(this, path, f); } From 482d2aced5f888d548a755e0fe20fc6f83125d11 Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Thu, 24 Oct 2024 17:58:36 +0200 Subject: [PATCH 378/449] Remove unused elasticsearch cloud docker image (#115357) --- .../gradle/internal/DockerBase.java | 3 --- distribution/docker/build.gradle | 25 +++---------------- .../cloud-docker-aarch64-export/build.gradle | 2 -- .../docker/cloud-docker-export/build.gradle | 2 -- .../build.gradle | 2 -- .../wolfi-ess-docker-export/build.gradle | 2 -- .../packaging/test/DockerTests.java | 11 +++----- .../test/KeystoreManagementTests.java | 5 +--- .../packaging/test/PackagingTestCase.java | 6 ++--- .../packaging/util/Distribution.java | 5 +--- .../packaging/util/docker/Docker.java | 2 +- .../packaging/util/docker/DockerRun.java | 1 - settings.gradle | 2 -- 13 files changed, 12 insertions(+), 56 deletions(-) delete mode 100644 distribution/docker/cloud-docker-aarch64-export/build.gradle delete mode 100644 distribution/docker/cloud-docker-export/build.gradle delete mode 100644 distribution/docker/wolfi-ess-docker-aarch64-export/build.gradle delete mode 100644 distribution/docker/wolfi-ess-docker-export/build.gradle diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java index fb52daf7e164f..0535f0bdc3cc8 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java @@ -21,9 +21,6 @@ public enum DockerBase { // The Iron Bank base image is UBI (albeit hardened), but we are required to parameterize the Docker build IRON_BANK("${BASE_REGISTRY}/${BASE_IMAGE}:${BASE_TAG}", "-ironbank", "yum"), - // Base image with extras for Cloud - CLOUD("ubuntu:20.04", "-cloud", "apt-get"), - // Chainguard based wolfi image with latest jdk // This is usually updated via renovatebot // spotless:off diff --git a/distribution/docker/build.gradle b/distribution/docker/build.gradle index e40ac68bbacf4..788e836f8f045 100644 --- a/distribution/docker/build.gradle +++ b/distribution/docker/build.gradle @@ -288,20 +288,6 @@ void addBuildDockerContextTask(Architecture architecture, DockerBase base) { } } - if (base == DockerBase.CLOUD) { - // If we're performing a release build, but `build.id` hasn't been set, we can - // infer that we're not at the Docker building stage of the build, and therefore - // we should skip the beats part of the build. - String buildId = providers.systemProperty('build.id').getOrNull() - boolean includeBeats = VersionProperties.isElasticsearchSnapshot() == true || buildId != null || useDra - - if (includeBeats) { - from configurations.getByName("filebeat_${architecture.classifier}") - from configurations.getByName("metricbeat_${architecture.classifier}") - } - // For some reason, the artifact name can differ depending on what repository we used. - rename ~/((?:file|metric)beat)-.*\.tar\.gz$/, "\$1-${VersionProperties.elasticsearch}.tar.gz" - } Provider serviceProvider = GradleUtils.getBuildService( project.gradle.sharedServices, DockerSupportPlugin.DOCKER_SUPPORT_SERVICE_NAME @@ -381,7 +367,7 @@ private static List generateTags(DockerBase base, Architecture architect String image = "elasticsearch${base.suffix}" String namespace = 'elasticsearch' - if (base == DockerBase.CLOUD || base == DockerBase.CLOUD_ESS) { + if (base == base == DockerBase.CLOUD_ESS) { namespace += '-ci' } @@ -439,7 +425,7 @@ void addBuildDockerImageTask(Architecture architecture, DockerBase base) { } - if (base != DockerBase.IRON_BANK && base != DockerBase.CLOUD && base != DockerBase.CLOUD_ESS) { + if (base != DockerBase.IRON_BANK && base != DockerBase.CLOUD_ESS) { tasks.named("assemble").configure { dependsOn(buildDockerImageTask) } @@ -548,10 +534,6 @@ subprojects { Project subProject -> base = DockerBase.IRON_BANK } else if (subProject.name.contains('cloud-ess-')) { base = DockerBase.CLOUD_ESS - } else if (subProject.name.contains('cloud-')) { - base = DockerBase.CLOUD - } else if (subProject.name.contains('wolfi-ess')) { - base = DockerBase.WOLFI_ESS } else if (subProject.name.contains('wolfi-')) { base = DockerBase.WOLFI } @@ -559,10 +541,9 @@ subprojects { Project subProject -> final String arch = architecture == Architecture.AARCH64 ? '-aarch64' : '' final String extension = base == DockerBase.UBI ? 'ubi.tar' : (base == DockerBase.IRON_BANK ? 'ironbank.tar' : - (base == DockerBase.CLOUD ? 'cloud.tar' : (base == DockerBase.CLOUD_ESS ? 'cloud-ess.tar' : (base == DockerBase.WOLFI ? 'wolfi.tar' : - 'docker.tar')))) + 'docker.tar'))) final String artifactName = "elasticsearch${arch}${base.suffix}_test" final String exportTaskName = taskName("export", architecture, base, 'DockerImage') diff --git a/distribution/docker/cloud-docker-aarch64-export/build.gradle b/distribution/docker/cloud-docker-aarch64-export/build.gradle deleted file mode 100644 index 537b5a093683e..0000000000000 --- a/distribution/docker/cloud-docker-aarch64-export/build.gradle +++ /dev/null @@ -1,2 +0,0 @@ -// This file is intentionally blank. All configuration of the -// export is done in the parent project. diff --git a/distribution/docker/cloud-docker-export/build.gradle b/distribution/docker/cloud-docker-export/build.gradle deleted file mode 100644 index 537b5a093683e..0000000000000 --- a/distribution/docker/cloud-docker-export/build.gradle +++ /dev/null @@ -1,2 +0,0 @@ -// This file is intentionally blank. All configuration of the -// export is done in the parent project. diff --git a/distribution/docker/wolfi-ess-docker-aarch64-export/build.gradle b/distribution/docker/wolfi-ess-docker-aarch64-export/build.gradle deleted file mode 100644 index 537b5a093683e..0000000000000 --- a/distribution/docker/wolfi-ess-docker-aarch64-export/build.gradle +++ /dev/null @@ -1,2 +0,0 @@ -// This file is intentionally blank. All configuration of the -// export is done in the parent project. diff --git a/distribution/docker/wolfi-ess-docker-export/build.gradle b/distribution/docker/wolfi-ess-docker-export/build.gradle deleted file mode 100644 index 537b5a093683e..0000000000000 --- a/distribution/docker/wolfi-ess-docker-export/build.gradle +++ /dev/null @@ -1,2 +0,0 @@ -// This file is intentionally blank. All configuration of the -// export is done in the parent project. diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java index 4ca97bff42333..8cb8354eb5d71 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java @@ -169,10 +169,7 @@ public void test012SecurityCanBeDisabled() throws Exception { * Checks that no plugins are initially active. */ public void test020PluginsListWithNoPlugins() { - assumeTrue( - "Only applies to non-Cloud images", - distribution.packaging != Packaging.DOCKER_CLOUD && distribution().packaging != Packaging.DOCKER_CLOUD_ESS - ); + assumeTrue("Only applies to non-Cloud images", distribution().packaging != Packaging.DOCKER_CLOUD_ESS); final Installation.Executables bin = installation.executables(); final Result r = sh.run(bin.pluginTool + " list"); @@ -1116,8 +1113,8 @@ public void test170DefaultShellIsBash() { */ public void test171AdditionalCliOptionsAreForwarded() throws Exception { assumeTrue( - "Does not apply to Cloud and Cloud ESS images, because they don't use the default entrypoint", - distribution.packaging != Packaging.DOCKER_CLOUD && distribution().packaging != Packaging.DOCKER_CLOUD_ESS + "Does not apply to Cloud ESS images, because they don't use the default entrypoint", + distribution().packaging != Packaging.DOCKER_CLOUD_ESS ); runContainer(distribution(), builder().runArgs("bin/elasticsearch", "-Ecluster.name=kimchy").envVar("ELASTIC_PASSWORD", PASSWORD)); @@ -1204,7 +1201,7 @@ public void test310IronBankImageHasNoAdditionalLabels() throws Exception { * Check that the Cloud image contains the required Beats */ public void test400CloudImageBundlesBeats() { - assumeTrue(distribution.packaging == Packaging.DOCKER_CLOUD || distribution.packaging == Packaging.DOCKER_CLOUD_ESS); + assumeTrue(distribution.packaging == Packaging.DOCKER_CLOUD_ESS); final List contents = listContents("/opt"); assertThat("Expected beats in /opt", contents, hasItems("filebeat", "metricbeat")); diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/KeystoreManagementTests.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/KeystoreManagementTests.java index a988a446f561f..02e1ce35764cf 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/KeystoreManagementTests.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/KeystoreManagementTests.java @@ -436,10 +436,7 @@ private void verifyKeystorePermissions() { switch (distribution.packaging) { case TAR, ZIP -> assertThat(keystore, file(File, ARCHIVE_OWNER, ARCHIVE_OWNER, p660)); case DEB, RPM -> assertThat(keystore, file(File, "root", "elasticsearch", p660)); - case DOCKER, DOCKER_UBI, DOCKER_IRON_BANK, DOCKER_CLOUD, DOCKER_CLOUD_ESS, DOCKER_WOLFI -> assertThat( - keystore, - DockerFileMatcher.file(p660) - ); + case DOCKER, DOCKER_UBI, DOCKER_IRON_BANK, DOCKER_CLOUD_ESS, DOCKER_WOLFI -> assertThat(keystore, DockerFileMatcher.file(p660)); default -> throw new IllegalStateException("Unknown Elasticsearch packaging type."); } } diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/PackagingTestCase.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/PackagingTestCase.java index 644990105f60f..b4a00ca56924a 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/PackagingTestCase.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/PackagingTestCase.java @@ -245,7 +245,7 @@ protected static void install() throws Exception { installation = Packages.installPackage(sh, distribution); Packages.verifyPackageInstallation(installation, distribution, sh); } - case DOCKER, DOCKER_UBI, DOCKER_IRON_BANK, DOCKER_CLOUD, DOCKER_CLOUD_ESS, DOCKER_WOLFI -> { + case DOCKER, DOCKER_UBI, DOCKER_IRON_BANK, DOCKER_CLOUD_ESS, DOCKER_WOLFI -> { installation = Docker.runContainer(distribution); Docker.verifyContainerInstallation(installation); } @@ -335,7 +335,6 @@ public Shell.Result runElasticsearchStartCommand(String password, boolean daemon case DOCKER: case DOCKER_UBI: case DOCKER_IRON_BANK: - case DOCKER_CLOUD: case DOCKER_CLOUD_ESS: case DOCKER_WOLFI: // nothing, "installing" docker image is running it @@ -358,7 +357,6 @@ public void stopElasticsearch() throws Exception { case DOCKER: case DOCKER_UBI: case DOCKER_IRON_BANK: - case DOCKER_CLOUD: case DOCKER_CLOUD_ESS: case DOCKER_WOLFI: // nothing, "installing" docker image is running it @@ -373,7 +371,7 @@ public void awaitElasticsearchStartup(Shell.Result result) throws Exception { switch (distribution.packaging) { case TAR, ZIP -> Archives.assertElasticsearchStarted(installation); case DEB, RPM -> Packages.assertElasticsearchStarted(sh, installation); - case DOCKER, DOCKER_UBI, DOCKER_IRON_BANK, DOCKER_CLOUD, DOCKER_CLOUD_ESS, DOCKER_WOLFI -> Docker.waitForElasticsearchToStart(); + case DOCKER, DOCKER_UBI, DOCKER_IRON_BANK, DOCKER_CLOUD_ESS, DOCKER_WOLFI -> Docker.waitForElasticsearchToStart(); default -> throw new IllegalStateException("Unknown Elasticsearch packaging type."); } } diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Distribution.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Distribution.java index 05cef4a0818ba..11b8324384631 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Distribution.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Distribution.java @@ -33,8 +33,6 @@ public Distribution(Path path) { this.packaging = Packaging.DOCKER_UBI; } else if (filename.endsWith(".ironbank.tar")) { this.packaging = Packaging.DOCKER_IRON_BANK; - } else if (filename.endsWith(".cloud.tar")) { - this.packaging = Packaging.DOCKER_CLOUD; } else if (filename.endsWith(".cloud-ess.tar")) { this.packaging = Packaging.DOCKER_CLOUD_ESS; } else if (filename.endsWith(".wolfi.tar")) { @@ -63,7 +61,7 @@ public boolean isPackage() { */ public boolean isDocker() { return switch (packaging) { - case DOCKER, DOCKER_UBI, DOCKER_IRON_BANK, DOCKER_CLOUD, DOCKER_CLOUD_ESS, DOCKER_WOLFI -> true; + case DOCKER, DOCKER_UBI, DOCKER_IRON_BANK, DOCKER_CLOUD_ESS, DOCKER_WOLFI -> true; default -> false; }; } @@ -77,7 +75,6 @@ public enum Packaging { DOCKER(".docker.tar", Platforms.isDocker()), DOCKER_UBI(".ubi.tar", Platforms.isDocker()), DOCKER_IRON_BANK(".ironbank.tar", Platforms.isDocker()), - DOCKER_CLOUD(".cloud.tar", Platforms.isDocker()), DOCKER_CLOUD_ESS(".cloud-ess.tar", Platforms.isDocker()), DOCKER_WOLFI(".wolfi.tar", Platforms.isDocker()); diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/Docker.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/Docker.java index c38eaa58f0552..0cd2823080b9b 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/Docker.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/Docker.java @@ -532,7 +532,7 @@ public static void verifyContainerInstallation(Installation es) { ) ); - if (es.distribution.packaging == Packaging.DOCKER_CLOUD || es.distribution.packaging == Packaging.DOCKER_CLOUD_ESS) { + if (es.distribution.packaging == Packaging.DOCKER_CLOUD_ESS) { verifyCloudContainerInstallation(es); } } diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/DockerRun.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/DockerRun.java index e562e7591564e..e3eac23d3ecce 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/DockerRun.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/DockerRun.java @@ -165,7 +165,6 @@ public static String getImageName(Distribution distribution) { case DOCKER -> ""; case DOCKER_UBI -> "-ubi"; case DOCKER_IRON_BANK -> "-ironbank"; - case DOCKER_CLOUD -> "-cloud"; case DOCKER_CLOUD_ESS -> "-cloud-ess"; case DOCKER_WOLFI -> "-wolfi"; default -> throw new IllegalStateException("Unexpected distribution packaging type: " + distribution.packaging); diff --git a/settings.gradle b/settings.gradle index a95a46a3569d7..39453e8d0935a 100644 --- a/settings.gradle +++ b/settings.gradle @@ -63,8 +63,6 @@ List projects = [ 'distribution:archives:linux-aarch64-tar', 'distribution:archives:linux-tar', 'distribution:docker', - 'distribution:docker:cloud-docker-export', - 'distribution:docker:cloud-docker-aarch64-export', 'distribution:docker:cloud-ess-docker-export', 'distribution:docker:cloud-ess-docker-aarch64-export', 'distribution:docker:docker-aarch64-export', From d500daf2e16bb3b6fb4bdde49bbf9d93b7fec25b Mon Sep 17 00:00:00 2001 From: Liam Thompson <32779855+leemthompo@users.noreply.github.com> Date: Thu, 24 Oct 2024 18:02:11 +0200 Subject: [PATCH 379/449] [DOCS][101] Add BYO vectors ingestion tutorial (#115112) --- docs/reference/images/semantic-options.svg | 62 ++++++++ .../search-your-data/ingest-vectors.asciidoc | 141 ++++++++++++++++++ .../search-your-data/semantic-search.asciidoc | 3 + 3 files changed, 206 insertions(+) create mode 100644 docs/reference/images/semantic-options.svg create mode 100644 docs/reference/search/search-your-data/ingest-vectors.asciidoc diff --git a/docs/reference/images/semantic-options.svg b/docs/reference/images/semantic-options.svg new file mode 100644 index 0000000000000..3bedf5307357e --- /dev/null +++ b/docs/reference/images/semantic-options.svg @@ -0,0 +1,62 @@ + + + + Elasticsearch semantic search workflows + + + + + + semantic_text + (Recommended) + + + + Inference API + + + + Model Deployment + + + Complexity: Low + Complexity: Medium + Complexity: High + + + + + + Create Inference Endpoint + + + Define Index Mapping + + + + Create Inference Endpoint + + + Configure Model Settings + + + Define Index Mapping + + + Setup Ingest Pipeline + + + + Select NLP Model + + + Deploy with Eland Client + + + Define Index Mapping + + + Setup Ingest Pipeline + + + diff --git a/docs/reference/search/search-your-data/ingest-vectors.asciidoc b/docs/reference/search/search-your-data/ingest-vectors.asciidoc new file mode 100644 index 0000000000000..f288293d2b03a --- /dev/null +++ b/docs/reference/search/search-your-data/ingest-vectors.asciidoc @@ -0,0 +1,141 @@ +[[bring-your-own-vectors]] +=== Bring your own dense vector embeddings to {es} +++++ +Bring your own dense vectors +++++ + +This tutorial demonstrates how to index documents that already have dense vector embeddings into {es}. +You'll also learn the syntax for searching these documents using a `knn` query. + +You'll find links at the end of this tutorial for more information about deploying a text embedding model in {es}, so you can generate embeddings for queries on the fly. + +[TIP] +==== +This is an advanced use case. +Refer to <> for an overview of your options for semantic search with {es}. +==== + +[discrete] +[[bring-your-own-vectors-create-index]] +=== Step 1: Create an index with `dense_vector` mapping + +Each document in our simple dataset will have: + +* A review: stored in a `review_text` field +* An embedding of that review: stored in a `review_vector` field +** The `review_vector` field is defined as a <> data type. + +[TIP] +==== +The `dense_vector` type automatically uses `int8_hnsw` quantization by default to reduce the memory footprint required when searching float vectors. +Learn more about balancing performance and accuracy in <>. +==== + +[source,console] +---- +PUT /amazon-reviews +{ + "mappings": { + "properties": { + "review_vector": { + "type": "dense_vector", + "dims": 8, <1> + "index": true, <2> + "similarity": "cosine" <3> + }, + "review_text": { + "type": "text" + } + } + } +} +---- +// TEST SETUP +<1> The `dims` parameter must match the length of the embedding vector. Here we're using a simple 8-dimensional embedding for readability. If not specified, `dims` will be dynamically calculated based on the first indexed document. +<2> The `index` parameter is set to `true` to enable the use of the `knn` query. +<3> The `similarity` parameter defines the similarity function used to compare the query vector to the document vectors. `cosine` is the default similarity function for `dense_vector` fields in {es}. + +[discrete] +[[bring-your-own-vectors-index-documents]] +=== Step 2: Index documents with embeddings + +[discrete] +==== Index a single document + +First, index a single document to understand the document structure. + +[source,console] +---- +PUT /amazon-reviews/_doc/1 +{ + "review_text": "This product is lifechanging! I'm telling all my friends about it.", + "review_vector": [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8] <1> +} +---- +// TEST +<1> The size of the `review_vector` array is 8, matching the `dims` count specified in the mapping. + +[discrete] +==== Bulk index multiple documents + +In a production scenario, you'll want to index many documents at once using the <>. + +Here's an example of indexing multiple documents in a single `_bulk` request. + +[source,console] +---- +POST /_bulk +{ "index": { "_index": "amazon-reviews", "_id": "2" } } +{ "review_text": "This product is amazing! I love it.", "review_vector": [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8] } +{ "index": { "_index": "amazon-reviews", "_id": "3" } } +{ "review_text": "This product is terrible. I hate it.", "review_vector": [0.8, 0.7, 0.6, 0.5, 0.4, 0.3, 0.2, 0.1] } +{ "index": { "_index": "amazon-reviews", "_id": "4" } } +{ "review_text": "This product is great. I can do anything with it.", "review_vector": [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8] } +{ "index": { "_index": "amazon-reviews", "_id": "5" } } +{ "review_text": "This product has ruined my life and the lives of my family and friends.", "review_vector": [0.8, 0.7, 0.6, 0.5, 0.4, 0.3, 0.2, 0.1] } +---- +// TEST[continued] + +[discrete] +[[bring-your-own-vectors-search-documents]] +=== Step 3: Search documents with embeddings + +Now you can query these document vectors using a <>. +`knn` is a type of vector search, which finds the `k` most similar documents to a query vector. +Here we're simply using a raw vector for the query text, for demonstration purposes. + +[source,console] +---- +POST /amazon-reviews/_search +{ + "retriever": { + "knn": { + "field": "review_vector", + "query_vector": [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8], <1> + "k": 2, <2> + "num_candidates": 5 <3> + } + } +} +---- +// TEST[skip:flakeyknnerror] +<1> In this simple example, we're sending a raw vector as the query text. In a real-world scenario, you'll need to generate vectors for queries using an embedding model. +<2> The `k` parameter specifies the number of results to return. +<3> The `num_candidates` parameter is optional. It limits the number of candidates returned by the search node. This can improve performance and reduce costs. + +[discrete] +[[bring-your-own-vectors-learn-more]] +=== Learn more + +In this simple example, we're sending a raw vector for the query text. +In a real-world scenario you won't know the query text ahead of time. +You'll need to generate query vectors, on the fly, using the same embedding model that generated the document vectors. + +For this you'll need to deploy a text embedding model in {es} and use the <>. Alternatively, you can generate vectors client-side and send them directly with the search request. + +Learn how to <> for semantic search. + +[TIP] +==== +If you're just getting started with vector search in {es}, refer to <>. +==== diff --git a/docs/reference/search/search-your-data/semantic-search.asciidoc b/docs/reference/search/search-your-data/semantic-search.asciidoc index 0ef8591e42b5d..e0fb8415fee18 100644 --- a/docs/reference/search/search-your-data/semantic-search.asciidoc +++ b/docs/reference/search/search-your-data/semantic-search.asciidoc @@ -8,6 +8,8 @@ Using an NLP model enables you to extract text embeddings out of text. Embeddings are vectors that provide a numeric representation of a text. Pieces of content with similar meaning have similar representations. +image::images/semantic-options.svg[Overview of semantic search workflows in {es}] + You have several options for using NLP models in the {stack}: * use the `semantic_text` workflow (recommended) @@ -109,3 +111,4 @@ include::semantic-search-inference.asciidoc[] include::semantic-search-elser.asciidoc[] include::cohere-es.asciidoc[] include::semantic-search-deploy-model.asciidoc[] +include::ingest-vectors.asciidoc[] From a270ee3f9c3e0dcfdd2874d8f64b9612098ddaf3 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Fri, 25 Oct 2024 03:05:08 +1100 Subject: [PATCH 380/449] Mute org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT test {yaml=reference/esql/esql-across-clusters/line_197} #115575 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 8c90f73f475e6..ab5d686a041c1 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -273,6 +273,9 @@ tests: - class: org.elasticsearch.test.apmintegration.MetricsApmIT method: testApmIntegration issue: https://github.com/elastic/elasticsearch/issues/115415 +- class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT + method: test {yaml=reference/esql/esql-across-clusters/line_197} + issue: https://github.com/elastic/elasticsearch/issues/115575 # Examples: # From c64226c3503b458c3285064d95528932d324177d Mon Sep 17 00:00:00 2001 From: Artem Prigoda Date: Thu, 24 Oct 2024 18:19:14 +0200 Subject: [PATCH 381/449] Don't return or accept `node_version` in the Desired Nodes API (#114580) It was deprecated in #104209 (8.13) and shouldn't be set or returned in 9.0 The Desired Nodes API is an internal API, and users shouldn't depend on its backward compatibility. --- .../upgrades/DesiredNodesUpgradeIT.java | 13 +-- rest-api-spec/build.gradle | 2 + .../test/cluster.desired_nodes/10_basic.yml | 95 ------------------- .../cluster/metadata/DesiredNode.java | 77 +-------------- .../metadata/DesiredNodeWithStatus.java | 5 +- .../cluster/RestUpdateDesiredNodesAction.java | 12 --- 6 files changed, 13 insertions(+), 191 deletions(-) diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java index e0d1e7aafa637..17618d5439d48 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java @@ -11,7 +11,6 @@ import com.carrotsearch.randomizedtesting.annotations.Name; -import org.elasticsearch.Build; import org.elasticsearch.action.admin.cluster.desirednodes.UpdateDesiredNodesRequest; import org.elasticsearch.client.Request; import org.elasticsearch.client.ResponseException; @@ -82,8 +81,7 @@ private void assertDesiredNodesUpdatedWithRoundedUpFloatsAreIdempotent() throws Settings.builder().put(NODE_NAME_SETTING.getKey(), nodeName).build(), 1238.49922909, ByteSizeValue.ofGb(32), - ByteSizeValue.ofGb(128), - clusterHasFeature(DesiredNode.DESIRED_NODE_VERSION_DEPRECATED) ? null : Build.current().version() + ByteSizeValue.ofGb(128) ) ) .toList(); @@ -153,8 +151,7 @@ private void addClusterNodesToDesiredNodesWithProcessorsOrProcessorRanges(int ve Settings.builder().put(NODE_NAME_SETTING.getKey(), nodeName).build(), processorsPrecision == ProcessorsPrecision.DOUBLE ? randomDoubleProcessorCount() : 0.5f, ByteSizeValue.ofGb(randomIntBetween(10, 24)), - ByteSizeValue.ofGb(randomIntBetween(128, 256)), - clusterHasFeature(DesiredNode.DESIRED_NODE_VERSION_DEPRECATED) ? null : Build.current().version() + ByteSizeValue.ofGb(randomIntBetween(128, 256)) ) ) .toList(); @@ -167,8 +164,7 @@ private void addClusterNodesToDesiredNodesWithProcessorsOrProcessorRanges(int ve Settings.builder().put(NODE_NAME_SETTING.getKey(), nodeName).build(), new DesiredNode.ProcessorsRange(minProcessors, minProcessors + randomIntBetween(10, 20)), ByteSizeValue.ofGb(randomIntBetween(10, 24)), - ByteSizeValue.ofGb(randomIntBetween(128, 256)), - clusterHasFeature(DesiredNode.DESIRED_NODE_VERSION_DEPRECATED) ? null : Build.current().version() + ByteSizeValue.ofGb(randomIntBetween(128, 256)) ); }).toList(); } @@ -182,8 +178,7 @@ private void addClusterNodesToDesiredNodesWithIntegerProcessors(int version) thr Settings.builder().put(NODE_NAME_SETTING.getKey(), nodeName).build(), randomIntBetween(1, 24), ByteSizeValue.ofGb(randomIntBetween(10, 24)), - ByteSizeValue.ofGb(randomIntBetween(128, 256)), - clusterHasFeature(DesiredNode.DESIRED_NODE_VERSION_DEPRECATED) ? null : Build.current().version() + ByteSizeValue.ofGb(randomIntBetween(128, 256)) ) ) .toList(); diff --git a/rest-api-spec/build.gradle b/rest-api-spec/build.gradle index 6cc2028bffa39..1a398f79085e7 100644 --- a/rest-api-spec/build.gradle +++ b/rest-api-spec/build.gradle @@ -61,4 +61,6 @@ tasks.named("yamlRestCompatTestTransform").configure ({ task -> task.skipTest("search/330_fetch_fields/Test search rewrite", "warning does not exist for compatibility") task.skipTest("indices.create/21_synthetic_source_stored/object param - nested object with stored array", "temporary until backported") task.skipTest("cat.aliases/10_basic/Deprecated local parameter", "CAT APIs not covered by compatibility policy") + task.skipTest("cluster.desired_nodes/10_basic/Test delete desired nodes with node_version generates a warning", "node_version warning is removed in 9.0") + task.skipTest("cluster.desired_nodes/10_basic/Test update desired nodes with node_version generates a warning", "node_version warning is removed in 9.0") }) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_nodes/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_nodes/10_basic.yml index 1d1aa524ffb21..a45146a4e147a 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_nodes/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_nodes/10_basic.yml @@ -59,61 +59,6 @@ teardown: - contains: { nodes: { settings: { node: { name: "instance-000187" } }, processors: 8.5, memory: "64gb", storage: "128gb" } } - contains: { nodes: { settings: { node: { name: "instance-000188" } }, processors: 16.0, memory: "128gb", storage: "1tb" } } --- -"Test update desired nodes with node_version generates a warning": - - skip: - reason: "contains is a newly added assertion" - features: ["contains", "allowed_warnings"] - - do: - cluster.state: {} - - # Get master node id - - set: { master_node: master } - - - do: - nodes.info: {} - - set: { nodes.$master.version: es_version } - - - do: - _internal.update_desired_nodes: - history_id: "test" - version: 1 - body: - nodes: - - { settings: { "node.name": "instance-000187" }, processors: 8.5, memory: "64gb", storage: "128gb", node_version: $es_version } - allowed_warnings: - - "[version removal] Specifying node_version in desired nodes requests is deprecated." - - match: { replaced_existing_history_id: false } - - - do: - _internal.get_desired_nodes: {} - - match: - $body: - history_id: "test" - version: 1 - nodes: - - { settings: { node: { name: "instance-000187" } }, processors: 8.5, memory: "64gb", storage: "128gb", node_version: $es_version } - - - do: - _internal.update_desired_nodes: - history_id: "test" - version: 2 - body: - nodes: - - { settings: { "node.name": "instance-000187" }, processors: 8.5, memory: "64gb", storage: "128gb", node_version: $es_version } - - { settings: { "node.name": "instance-000188" }, processors: 16.0, memory: "128gb", storage: "1tb", node_version: $es_version } - allowed_warnings: - - "[version removal] Specifying node_version in desired nodes requests is deprecated." - - match: { replaced_existing_history_id: false } - - - do: - _internal.get_desired_nodes: {} - - - match: { history_id: "test" } - - match: { version: 2 } - - length: { nodes: 2 } - - contains: { nodes: { settings: { node: { name: "instance-000187" } }, processors: 8.5, memory: "64gb", storage: "128gb", node_version: $es_version } } - - contains: { nodes: { settings: { node: { name: "instance-000188" } }, processors: 16.0, memory: "128gb", storage: "1tb", node_version: $es_version } } ---- "Test update move to a new history id": - skip: reason: "contains is a newly added assertion" @@ -199,46 +144,6 @@ teardown: _internal.get_desired_nodes: {} - match: { status: 404 } --- -"Test delete desired nodes with node_version generates a warning": - - skip: - features: allowed_warnings - - do: - cluster.state: {} - - - set: { master_node: master } - - - do: - nodes.info: {} - - set: { nodes.$master.version: es_version } - - - do: - _internal.update_desired_nodes: - history_id: "test" - version: 1 - body: - nodes: - - { settings: { "node.external_id": "instance-000187" }, processors: 8.0, memory: "64gb", storage: "128gb", node_version: $es_version } - allowed_warnings: - - "[version removal] Specifying node_version in desired nodes requests is deprecated." - - match: { replaced_existing_history_id: false } - - - do: - _internal.get_desired_nodes: {} - - match: - $body: - history_id: "test" - version: 1 - nodes: - - { settings: { node: { external_id: "instance-000187" } }, processors: 8.0, memory: "64gb", storage: "128gb", node_version: $es_version } - - - do: - _internal.delete_desired_nodes: {} - - - do: - catch: missing - _internal.get_desired_nodes: {} - - match: { status: 404 } ---- "Test update desired nodes is idempotent": - skip: reason: "contains is a newly added assertion" diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNode.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNode.java index fb8559b19d81d..fe72a59565cf6 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNode.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNode.java @@ -14,7 +14,6 @@ import org.elasticsearch.Version; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeRole; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -22,7 +21,6 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.Processors; import org.elasticsearch.core.Nullable; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ObjectParser; @@ -38,7 +36,6 @@ import java.util.Set; import java.util.TreeSet; import java.util.function.Predicate; -import java.util.regex.Pattern; import static java.lang.String.format; import static org.elasticsearch.node.Node.NODE_EXTERNAL_ID_SETTING; @@ -58,8 +55,6 @@ public final class DesiredNode implements Writeable, ToXContentObject, Comparabl private static final ParseField PROCESSORS_RANGE_FIELD = new ParseField("processors_range"); private static final ParseField MEMORY_FIELD = new ParseField("memory"); private static final ParseField STORAGE_FIELD = new ParseField("storage"); - @UpdateForV9(owner = UpdateForV9.Owner.DISTRIBUTED_COORDINATION) // Remove deprecated field - private static final ParseField VERSION_FIELD = new ParseField("node_version"); public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( "desired_node", @@ -69,8 +64,7 @@ public final class DesiredNode implements Writeable, ToXContentObject, Comparabl (Processors) args[1], (ProcessorsRange) args[2], (ByteSizeValue) args[3], - (ByteSizeValue) args[4], - (String) args[5] + (ByteSizeValue) args[4] ) ); @@ -104,12 +98,6 @@ static void configureParser(ConstructingObjectParser parser) { STORAGE_FIELD, ObjectParser.ValueType.STRING ); - parser.declareField( - ConstructingObjectParser.optionalConstructorArg(), - (p, c) -> p.text(), - VERSION_FIELD, - ObjectParser.ValueType.STRING - ); } private final Settings settings; @@ -118,21 +106,9 @@ static void configureParser(ConstructingObjectParser parser) { private final ByteSizeValue memory; private final ByteSizeValue storage; - @UpdateForV9(owner = UpdateForV9.Owner.DISTRIBUTED_COORDINATION) // Remove deprecated version field - private final String version; private final String externalId; private final Set roles; - @Deprecated - public DesiredNode(Settings settings, ProcessorsRange processorsRange, ByteSizeValue memory, ByteSizeValue storage, String version) { - this(settings, null, processorsRange, memory, storage, version); - } - - @Deprecated - public DesiredNode(Settings settings, double processors, ByteSizeValue memory, ByteSizeValue storage, String version) { - this(settings, Processors.of(processors), null, memory, storage, version); - } - public DesiredNode(Settings settings, ProcessorsRange processorsRange, ByteSizeValue memory, ByteSizeValue storage) { this(settings, null, processorsRange, memory, storage); } @@ -142,17 +118,6 @@ public DesiredNode(Settings settings, double processors, ByteSizeValue memory, B } DesiredNode(Settings settings, Processors processors, ProcessorsRange processorsRange, ByteSizeValue memory, ByteSizeValue storage) { - this(settings, processors, processorsRange, memory, storage, null); - } - - DesiredNode( - Settings settings, - Processors processors, - ProcessorsRange processorsRange, - ByteSizeValue memory, - ByteSizeValue storage, - @Deprecated String version - ) { assert settings != null; assert memory != null; assert storage != null; @@ -186,7 +151,6 @@ public DesiredNode(Settings settings, double processors, ByteSizeValue memory, B this.processorsRange = processorsRange; this.memory = memory; this.storage = storage; - this.version = version; this.externalId = NODE_EXTERNAL_ID_SETTING.get(settings); this.roles = Collections.unmodifiableSortedSet(new TreeSet<>(DiscoveryNode.getRolesFromSettings(settings))); } @@ -210,19 +174,7 @@ public static DesiredNode readFrom(StreamInput in) throws IOException { } else { version = Version.readVersion(in).toString(); } - return new DesiredNode(settings, processors, processorsRange, memory, storage, version); - } - - private static final Pattern SEMANTIC_VERSION_PATTERN = Pattern.compile("^(\\d+\\.\\d+\\.\\d+)\\D?.*"); - - private static Version parseLegacyVersion(String version) { - if (version != null) { - var semanticVersionMatcher = SEMANTIC_VERSION_PATTERN.matcher(version); - if (semanticVersionMatcher.matches()) { - return Version.fromString(semanticVersionMatcher.group(1)); - } - } - return null; + return new DesiredNode(settings, processors, processorsRange, memory, storage); } @Override @@ -239,15 +191,9 @@ public void writeTo(StreamOutput out) throws IOException { memory.writeTo(out); storage.writeTo(out); if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { - out.writeOptionalString(version); + out.writeOptionalString(null); } else { - Version parsedVersion = parseLegacyVersion(version); - if (version == null) { - // Some node is from before we made the version field not required. If so, fill in with the current node version. - Version.writeVersion(Version.CURRENT, out); - } else { - Version.writeVersion(parsedVersion, out); - } + Version.writeVersion(Version.CURRENT, out); } } @@ -275,14 +221,6 @@ public void toInnerXContent(XContentBuilder builder, Params params) throws IOExc } builder.field(MEMORY_FIELD.getPreferredName(), memory); builder.field(STORAGE_FIELD.getPreferredName(), storage); - addDeprecatedVersionField(builder); - } - - @UpdateForV9(owner = UpdateForV9.Owner.DISTRIBUTED_COORDINATION) // Remove deprecated field from response - private void addDeprecatedVersionField(XContentBuilder builder) throws IOException { - if (version != null) { - builder.field(VERSION_FIELD.getPreferredName(), version); - } } public boolean hasMasterRole() { @@ -366,7 +304,6 @@ private boolean equalsWithoutProcessorsSpecification(DesiredNode that) { return Objects.equals(settings, that.settings) && Objects.equals(memory, that.memory) && Objects.equals(storage, that.storage) - && Objects.equals(version, that.version) && Objects.equals(externalId, that.externalId) && Objects.equals(roles, that.roles); } @@ -379,7 +316,7 @@ public boolean equalsWithProcessorsCloseTo(DesiredNode that) { @Override public int hashCode() { - return Objects.hash(settings, processors, processorsRange, memory, storage, version, externalId, roles); + return Objects.hash(settings, processors, processorsRange, memory, storage, externalId, roles); } @Override @@ -408,10 +345,6 @@ public String toString() { + '}'; } - public boolean hasVersion() { - return Strings.isNullOrBlank(version) == false; - } - public record ProcessorsRange(Processors min, @Nullable Processors max) implements Writeable, ToXContentObject { private static final ParseField MIN_FIELD = new ParseField("min"); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNodeWithStatus.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNodeWithStatus.java index 7b89406be9aa0..606309adf205c 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNodeWithStatus.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNodeWithStatus.java @@ -44,13 +44,12 @@ public record DesiredNodeWithStatus(DesiredNode desiredNode, Status status) (Processors) args[1], (DesiredNode.ProcessorsRange) args[2], (ByteSizeValue) args[3], - (ByteSizeValue) args[4], - (String) args[5] + (ByteSizeValue) args[4] ), // An unknown status is expected during upgrades to versions >= STATUS_TRACKING_SUPPORT_VERSION // the desired node status would be populated when a node in the newer version is elected as // master, the desired nodes status update happens in NodeJoinExecutor. - args[6] == null ? Status.PENDING : (Status) args[6] + args[5] == null ? Status.PENDING : (Status) args[5] ) ); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestUpdateDesiredNodesAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestUpdateDesiredNodesAction.java index ec8bb6285bdd4..b8e1fa0c836a3 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestUpdateDesiredNodesAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestUpdateDesiredNodesAction.java @@ -12,13 +12,11 @@ import org.elasticsearch.action.admin.cluster.desirednodes.UpdateDesiredNodesAction; import org.elasticsearch.action.admin.cluster.desirednodes.UpdateDesiredNodesRequest; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.cluster.metadata.DesiredNode; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; -import org.elasticsearch.xcontent.XContentParseException; import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; @@ -67,16 +65,6 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli ); } - if (clusterSupportsFeature.test(DesiredNode.DESIRED_NODE_VERSION_DEPRECATED)) { - if (updateDesiredNodesRequest.getNodes().stream().anyMatch(DesiredNode::hasVersion)) { - deprecationLogger.compatibleCritical("desired_nodes_version", VERSION_DEPRECATION_MESSAGE); - } - } else { - if (updateDesiredNodesRequest.getNodes().stream().anyMatch(n -> n.hasVersion() == false)) { - throw new XContentParseException("[node_version] field is required and must have a valid value"); - } - } - return restChannel -> client.execute( UpdateDesiredNodesAction.INSTANCE, updateDesiredNodesRequest, From ebec1a2fe2bc2b9fc40401074dbbb0dbcdc800bd Mon Sep 17 00:00:00 2001 From: Salvatore Campagna <93581129+salvatore-campagna@users.noreply.github.com> Date: Thu, 24 Oct 2024 18:25:38 +0200 Subject: [PATCH 382/449] Improve Logsdb docs including default values (#115205) This PR adds detailed documentation for `logsdb` mode, covering several key aspects of its default behavior and configuration options. It includes: - default settings for index sorting (`index.sort.field`, `index.sort.order`, etc.). - usage of synthetic `_source` by default. - information about specialized codecs and how users can override them. - default behavior for `ignore_malformed` and `ignore_above` settings, including precedence rules. - explanation of how fields without `doc_values` are handled and what we do if they are missing. --- docs/reference/data-streams/logs.asciidoc | 180 +++++++++++++++++++++- 1 file changed, 172 insertions(+), 8 deletions(-) diff --git a/docs/reference/data-streams/logs.asciidoc b/docs/reference/data-streams/logs.asciidoc index e870289bcf7be..6bb98684544a3 100644 --- a/docs/reference/data-streams/logs.asciidoc +++ b/docs/reference/data-streams/logs.asciidoc @@ -8,14 +8,6 @@ A logs data stream is a data stream type that stores log data more efficiently. In benchmarks, log data stored in a logs data stream used ~2.5 times less disk space than a regular data stream. The exact impact will vary depending on your data set. -The following features are enabled in a logs data stream: - -* <>, which omits storing the `_source` field. When the document source is requested, it is synthesized from document fields upon retrieval. - -* Index sorting. This yields a lower storage footprint. By default indices are sorted by `host.name` and `@timestamp` fields at index time. - -* More space efficient compression for fields with <> enabled. - [discrete] [[how-to-use-logsds]] === Create a logs data stream @@ -50,3 +42,175 @@ DELETE _index_template/my-index-template ---- // TEST[continued] //// + +[[logsdb-default-settings]] + +[discrete] +[[logsdb-synthtic-source]] +=== Synthetic source + +By default, `logsdb` mode uses <>, which omits storing the original `_source` +field and synthesizes it from doc values or stored fields upon document retrieval. Synthetic source comes with a few +restrictions which you can read more about in the <> section dedicated to it. + +NOTE: When dealing with multi-value fields, the `index.mapping.synthetic_source_keep` setting controls how field values +are preserved for <> reconstruction. In `logsdb`, the default value is `arrays`, +which retains both duplicate values and the order of entries but not necessarily the exact structure when it comes to +array elements or objects. Preserving duplicates and ordering could be critical for some log fields. This could be the +case, for instance, for DNS A records, HTTP headers, or log entries that represent sequential or repeated events. + +For more details on this setting and ways to refine or bypass it, check out <>. + +[discrete] +[[logsdb-sort-settings]] +=== Index sort settings + +The following settings are applied by default when using the `logsdb` mode for index sorting: + +* `index.sort.field`: `["host.name", "@timestamp"]` + In `logsdb` mode, indices are sorted by `host.name` and `@timestamp` fields by default. For data streams, the + `@timestamp` field is automatically injected if it is not present. + +* `index.sort.order`: `["desc", "desc"]` + The default sort order for both fields is descending (`desc`), prioritizing the latest data. + +* `index.sort.mode`: `["min", "min"]` + The default sort mode is `min`, ensuring that indices are sorted by the minimum value of multi-value fields. + +* `index.sort.missing`: `["_first", "_first"]` + Missing values are sorted to appear first (`_first`) in `logsdb` index mode. + +`logsdb` index mode allows users to override the default sort settings. For instance, users can specify their own fields +and order for sorting by modifying the `index.sort.field` and `index.sort.order`. + +When using default sort settings, the `host.name` field is automatically injected into the mappings of the +index as a `keyword` field to ensure that sorting can be applied. This guarantees that logs are efficiently sorted and +retrieved based on the `host.name` and `@timestamp` fields. + +NOTE: If `subobjects` is set to `true` (which is the default), the `host.name` field will be mapped as an object field +named `host`, containing a `name` child field of type `keyword`. On the other hand, if `subobjects` is set to `false`, +a single `host.name` field will be mapped as a `keyword` field. + +Once an index is created, the sort settings are immutable and cannot be modified. To apply different sort settings, +a new index must be created with the desired configuration. For data streams, this can be achieved by means of an index +rollover after updating relevant (component) templates. + +If the default sort settings are not suitable for your use case, consider modifying them. Keep in mind that sort +settings can influence indexing throughput, query latency, and may affect compression efficiency due to the way data +is organized after sorting. For more details, refer to our documentation on +<>. + +NOTE: For <>, the `@timestamp` field is automatically injected if not already present. +However, if custom sort settings are applied, the `@timestamp` field is injected into the mappings, but it is not +automatically added to the list of sort fields. + +[discrete] +[[logsdb-specialized-codecs]] +=== Specialized codecs + +`logsdb` index mode uses the `best_compression` <> by default, which applies {wikipedia}/Zstd[ZSTD] +compression to stored fields. Users are allowed to override it and switch to the `default` codec for faster compression +at the expense of slightly larger storage footprint. + +`logsdb` index mode also adopts specialized codecs for numeric doc values that are crafted to optimize storage usage. +Users can rely on these specialized codecs being applied by default when using `logsdb` index mode. + +Doc values encoding for numeric fields in `logsdb` follows a static sequence of codecs, applying each one in the +following order: delta encoding, offset encoding, Greatest Common Divisor GCD encoding, and finally Frame Of Reference +(FOR) encoding. The decision to apply each encoding is based on heuristics determined by the data distribution. +For example, before applying delta encoding, the algorithm checks if the data is monotonically non-decreasing or +non-increasing. If the data fits this pattern, delta encoding is applied; otherwise, the next encoding is considered. + +The encoding is specific to each Lucene segment and is also re-applied at segment merging time. The merged Lucene segment +may use a different encoding compared to the original Lucene segments, based on the characteristics of the merged data. + +The following methods are applied sequentially: + +* **Delta encoding**: + a compression method that stores the difference between consecutive values instead of the actual values. + +* **Offset encoding**: + a compression method that stores the difference from a base value rather than between consecutive values. + +* **Greatest Common Divisor (GCD) encoding**: + a compression method that finds the greatest common divisor of a set of values and stores the differences + as multiples of the GCD. + +* **Frame Of Reference (FOR) encoding**: + a compression method that determines the smallest number of bits required to encode a block of values and uses + bit-packing to fit such values into larger 64-bit blocks. + +For keyword fields, **Run Length Encoding (RLE)** is applied to the ordinals, which represent positions in the Lucene +segment-level keyword dictionary. This compression is used when multiple consecutive documents share the same keyword. + +[discrete] +[[logsdb-ignored-settings]] +=== `ignore_malformed`, `ignore_above`, `ignore_dynamic_beyond_limit` + +By default, `logsdb` index mode sets `ignore_malformed` to `true`. This setting allows documents with malformed fields +to be indexed without causing indexing failures, ensuring that log data ingestion continues smoothly even when some +fields contain invalid or improperly formatted data. + +Users can override this setting by setting `index.mapping.ignore_malformed` to `false`. However, this is not recommended +as it might result in documents with malformed fields being rejected and not indexed at all. + +In `logsdb` index mode, the `index.mapping.ignore_above` setting is applied by default at the index level to ensure +efficient storage and indexing of large keyword fields.The index-level default for `ignore_above` is set to 8191 +**characters**. If using UTF-8 encoding, this results in a limit of 32764 bytes, depending on character encoding. +The mapping-level `ignore_above` setting still takes precedence. If a specific field has an `ignore_above` value +defined in its mapping, that value will override the index-level `index.mapping.ignore_above` value. This default +behavior helps to optimize indexing performance by preventing excessively large string values from being indexed, while +still allowing users to customize the limit, overriding it at the mapping level or changing the index level default +setting. + +In `logsdb` index mode, the setting `index.mapping.total_fields.ignore_dynamic_beyond_limit` is set to `true` by +default. This allows dynamically mapped fields to be added on top of statically defined fields without causing document +rejection, even after the total number of fields exceeds the limit defined by `index.mapping.total_fields.limit`. The +`index.mapping.total_fields.limit` setting specifies the maximum number of fields an index can have (static, dynamic +and runtime). When the limit is reached, new dynamically mapped fields will be ignored instead of failing the document +indexing, ensuring continued log ingestion without errors. + +NOTE: When automatically injected, `host.name` and `@timestamp` contribute to the limit of mapped fields. When +`host.name` is mapped with `subobjects: true` it consists of two fields. When `host.name` is mapped with +`subobjects: false` it only consists of one field. + +[discrete] +[[logsdb-nodocvalue-fields]] +=== Fields without doc values + +When `logsdb` index mode uses synthetic `_source`, and `doc_values` are disabled for a field in the mapping, +Elasticsearch may set the `store` setting to `true` for that field as a last resort option to ensure that the field's +data is still available for reconstructing the document’s source when retrieving it via +<>. + +For example, this happens with text fields when `store` is `false` and there is no suitable multi-field available to +reconstruct the original value in <>. + +This automatic adjustment allows synthetic source to work correctly, even when doc values are not enabled for certain +fields. + +[discrete] +[[logsdb-settings-summary]] +=== LogsDB settings summary + +The following is a summary of key settings that apply when using `logsdb` index mode in Elasticsearch: + +* **`index.mode`**: `"logsdb"` + +* **`index.mapping.synthetic_source_keep`**: `"arrays"` + +* **`index.sort.field`**: `["host.name", "@timestamp"]` + +* **`index.sort.order`**: `["desc", "desc"]` + +* **`index.sort.mode`**: `["min", "min"]` + +* **`index.sort.missing`**: `["_first", "_first"]` + +* **`index.codec`**: `"best_compression"` + +* **`index.mapping.ignore_malformed`**: `true` + +* **`index.mapping.ignore_above`**: `8191` + +* **`index.mapping.total_fields.ignore_dynamic_beyond_limit`**: `true` From 160faa2dfc8c590dcb398487b79eb51eb84f8f44 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Thu, 24 Oct 2024 09:29:34 -0700 Subject: [PATCH 383/449] Re-enable threadpool blocking in Kibana system index test (#112569) KibanaThreadPoolIT checks the Kibana system user can write (using the system read/write threadpools) even when the normal read/write threadpools are blocked. This commit re-enables a key part of the test which was disabled. closes #107625 --- .../kibana/KibanaThreadPoolIT.java | 21 +++++++++++-------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/modules/kibana/src/internalClusterTest/java/org/elasticsearch/kibana/KibanaThreadPoolIT.java b/modules/kibana/src/internalClusterTest/java/org/elasticsearch/kibana/KibanaThreadPoolIT.java index 61bd31fea3455..553e4696af316 100644 --- a/modules/kibana/src/internalClusterTest/java/org/elasticsearch/kibana/KibanaThreadPoolIT.java +++ b/modules/kibana/src/internalClusterTest/java/org/elasticsearch/kibana/KibanaThreadPoolIT.java @@ -12,6 +12,8 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.search.SearchPhaseExecutionException; +import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.settings.Settings; @@ -37,6 +39,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.startsWith; /** @@ -150,15 +153,15 @@ private void assertThreadPoolsBlocked() { new Thread(() -> expectThrows(EsRejectedExecutionException.class, () -> getFuture.actionGet(SAFE_AWAIT_TIMEOUT))).start(); // intentionally commented out this test until https://github.com/elastic/elasticsearch/issues/97916 is fixed - // var e3 = expectThrows( - // SearchPhaseExecutionException.class, - // () -> client().prepareSearch(USER_INDEX) - // .setQuery(QueryBuilders.matchAllQuery()) - // // Request times out if max concurrent shard requests is set to 1 - // .setMaxConcurrentShardRequests(usually() ? SearchRequest.DEFAULT_MAX_CONCURRENT_SHARD_REQUESTS : randomIntBetween(2, 10)) - // .get() - // ); - // assertThat(e3.getMessage(), containsString("all shards failed")); + var e3 = expectThrows( + SearchPhaseExecutionException.class, + () -> client().prepareSearch(USER_INDEX) + .setQuery(QueryBuilders.matchAllQuery()) + // Request times out if max concurrent shard requests is set to 1 + .setMaxConcurrentShardRequests(usually() ? SearchRequest.DEFAULT_MAX_CONCURRENT_SHARD_REQUESTS : randomIntBetween(2, 10)) + .get() + ); + assertThat(e3.getMessage(), containsString("all shards failed")); } protected void runWithBlockedThreadPools(Runnable runnable) throws Exception { From 5c1a3ada8ae7a790dfd8460c76c6a341d9d42b7a Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Thu, 24 Oct 2024 19:37:02 +0300 Subject: [PATCH 384/449] Propagate root subobjects setting to downsample indexes (#115358) * Propagate root subobjects setting to downsample indexes * exclude tests from rest compat * remove subobjects propagation --- x-pack/plugin/downsample/qa/rest/build.gradle | 14 + .../downsample/DownsampleWithBasicRestIT.java | 40 ++ .../test/downsample/10_basic.yml | 466 +++++++++--------- 3 files changed, 292 insertions(+), 228 deletions(-) create mode 100644 x-pack/plugin/downsample/qa/rest/src/yamlRestTest/java/org/elasticsearch/xpack/downsample/DownsampleWithBasicRestIT.java diff --git a/x-pack/plugin/downsample/qa/rest/build.gradle b/x-pack/plugin/downsample/qa/rest/build.gradle index ba5ac7b0c7317..5142632a36006 100644 --- a/x-pack/plugin/downsample/qa/rest/build.gradle +++ b/x-pack/plugin/downsample/qa/rest/build.gradle @@ -32,6 +32,20 @@ tasks.named('yamlRestTest') { tasks.named('yamlRestCompatTest') { usesDefaultDistribution() } +tasks.named("yamlRestCompatTestTransform").configure ({ task -> + task.skipTest("downsample/10_basic/Downsample index with empty dimension on routing path", "Skip until pr/115358 gets backported") + task.skipTest("downsample/10_basic/Downsample histogram as label", "Skip until pr/115358 gets backported") + task.skipTest("downsample/10_basic/Downsample date timestamp field using strict_date_optional_time_nanos format", + "Skip until pr/115358 gets backported") + task.skipTest("downsample/10_basic/Downsample a downsampled index", "Skip until pr/115358 gets backported") + task.skipTest("downsample/10_basic/Downsample date_nanos timestamp field using custom format", "Skip until pr/115358 gets backported") + task.skipTest("downsample/10_basic/Downsample using coarse grained timestamp", "Skip until pr/115358 gets backported") + task.skipTest("downsample/10_basic/Downsample label with ignore_above", "Skip until pr/115358 gets backported") + task.skipTest("downsample/10_basic/Downsample object field", "Skip until pr/115358 gets backported") + task.skipTest("downsample/10_basic/Downsample empty and missing labels", "Skip until pr/115358 gets backported") + task.skipTest("downsample/10_basic/Downsample index", "Skip until pr/115358 gets backported") + task.skipTest("downsample/10_basic/Downsample index with empty dimension", "Skip until pr/115358 gets backported") +}) if (BuildParams.inFipsJvm){ // This test cluster is using a BASIC license and FIPS 140 mode is not supported in BASIC tasks.named("yamlRestTest").configure{enabled = false } diff --git a/x-pack/plugin/downsample/qa/rest/src/yamlRestTest/java/org/elasticsearch/xpack/downsample/DownsampleWithBasicRestIT.java b/x-pack/plugin/downsample/qa/rest/src/yamlRestTest/java/org/elasticsearch/xpack/downsample/DownsampleWithBasicRestIT.java new file mode 100644 index 0000000000000..8f75e76315844 --- /dev/null +++ b/x-pack/plugin/downsample/qa/rest/src/yamlRestTest/java/org/elasticsearch/xpack/downsample/DownsampleWithBasicRestIT.java @@ -0,0 +1,40 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.downsample; + +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.junit.ClassRule; + +public class DownsampleWithBasicRestIT extends ESClientYamlSuiteTestCase { + + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .setting("xpack.security.enabled", "false") + .build(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + public DownsampleWithBasicRestIT(final ClientYamlTestCandidate testCandidate) { + super(testCandidate); + } + + @ParametersFactory + public static Iterable parameters() throws Exception { + return ESClientYamlSuiteTestCase.createParameters(); + } + +} diff --git a/x-pack/plugin/downsample/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/downsample/10_basic.yml b/x-pack/plugin/downsample/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/downsample/10_basic.yml index 0bcd35cc69038..fa3560bec516e 100644 --- a/x-pack/plugin/downsample/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/downsample/10_basic.yml +++ b/x-pack/plugin/downsample/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/downsample/10_basic.yml @@ -16,6 +16,7 @@ setup: start_time: 2021-04-28T00:00:00Z end_time: 2021-04-29T00:00:00Z mappings: + subobjects: false properties: "@timestamp": type: date @@ -106,6 +107,7 @@ setup: start_time: 2021-04-28T00:00:00Z end_time: 2021-04-29T00:00:00Z mappings: + subobjects: false properties: "@timestamp": type: date @@ -172,6 +174,7 @@ setup: start_time: 2021-04-28T00:00:00Z end_time: 2021-04-29T00:00:00Z mappings: + subobjects: false properties: "@timestamp": type: date @@ -237,6 +240,7 @@ setup: start_time: 2021-04-28T00:00:00Z end_time: 2021-04-29T00:00:00Z mappings: + subobjects: false properties: "@timestamp": type: date @@ -318,29 +322,29 @@ setup: - length: { hits.hits: 4 } - match: { hits.hits.0._source._doc_count: 2 } - - match: { hits.hits.0._source.k8s.pod.uid: df3145b3-0563-4d3b-a0f7-897eb2876ea9 } + - match: { hits.hits.0._source.k8s\.pod\.uid: df3145b3-0563-4d3b-a0f7-897eb2876ea9 } - match: { hits.hits.0._source.metricset: pod } - match: { hits.hits.0._source.@timestamp: 2021-04-28T18:00:00.000Z } - - match: { hits.hits.0._source.k8s.pod.multi-counter: 0 } - - match: { hits.hits.0._source.k8s.pod.scaled-counter: 0.00 } - - match: { hits.hits.0._source.k8s.pod.multi-gauge.min: 100 } - - match: { hits.hits.0._source.k8s.pod.multi-gauge.max: 102 } - - match: { hits.hits.0._source.k8s.pod.multi-gauge.sum: 607 } - - match: { hits.hits.0._source.k8s.pod.multi-gauge.value_count: 6 } - - match: { hits.hits.0._source.k8s.pod.scaled-gauge.min: 100.0 } - - match: { hits.hits.0._source.k8s.pod.scaled-gauge.max: 101.0 } - - match: { hits.hits.0._source.k8s.pod.scaled-gauge.sum: 201.0 } - - match: { hits.hits.0._source.k8s.pod.scaled-gauge.value_count: 2 } - - match: { hits.hits.0._source.k8s.pod.network.tx.min: 1434521831 } - - match: { hits.hits.0._source.k8s.pod.network.tx.max: 1434577921 } - - match: { hits.hits.0._source.k8s.pod.network.tx.value_count: 2 } - - match: { hits.hits.0._source.k8s.pod.ip: "10.10.55.56" } - - match: { hits.hits.0._source.k8s.pod.created_at: "2021-04-28T19:43:00.000Z" } - - match: { hits.hits.0._source.k8s.pod.number_of_containers: 1 } - - match: { hits.hits.0._source.k8s.pod.tags: ["backend", "test", "us-west2"] } - - match: { hits.hits.0._source.k8s.pod.values: [1, 1, 2] } - - is_false: hits.hits.0._source.k8s.pod.running + - match: { hits.hits.0._source.k8s\.pod\.multi-counter: 0 } + - match: { hits.hits.0._source.k8s\.pod\.scaled-counter: 0.00 } + - match: { hits.hits.0._source.k8s\.pod\.multi-gauge.min: 100 } + - match: { hits.hits.0._source.k8s\.pod\.multi-gauge.max: 102 } + - match: { hits.hits.0._source.k8s\.pod\.multi-gauge.sum: 607 } + - match: { hits.hits.0._source.k8s\.pod\.multi-gauge.value_count: 6 } + - match: { hits.hits.0._source.k8s\.pod\.scaled-gauge.min: 100.0 } + - match: { hits.hits.0._source.k8s\.pod\.scaled-gauge.max: 101.0 } + - match: { hits.hits.0._source.k8s\.pod\.scaled-gauge.sum: 201.0 } + - match: { hits.hits.0._source.k8s\.pod\.scaled-gauge.value_count: 2 } + - match: { hits.hits.0._source.k8s\.pod\.network\.tx.min: 1434521831 } + - match: { hits.hits.0._source.k8s\.pod\.network\.tx.max: 1434577921 } + - match: { hits.hits.0._source.k8s\.pod\.network\.tx.value_count: 2 } + - match: { hits.hits.0._source.k8s\.pod\.ip: "10.10.55.56" } + - match: { hits.hits.0._source.k8s\.pod\.created_at: "2021-04-28T19:43:00.000Z" } + - match: { hits.hits.0._source.k8s\.pod\.number_of_containers: 1 } + - match: { hits.hits.0._source.k8s\.pod\.tags: ["backend", "test", "us-west2"] } + - match: { hits.hits.0._source.k8s\.pod\.values: [1, 1, 2] } + - is_false: hits.hits.0._source.k8s\.pod\.running # Assert rollup index settings - do: @@ -362,21 +366,21 @@ setup: - match: { test-downsample.mappings.properties.@timestamp.type: date } - match: { test-downsample.mappings.properties.@timestamp.meta.fixed_interval: 1h } - match: { test-downsample.mappings.properties.@timestamp.meta.time_zone: UTC } - - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.multi-gauge.type: aggregate_metric_double } - - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.multi-gauge.metrics: [ "min", "max", "sum", "value_count" ] } - - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.multi-gauge.default_metric: max } - - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.multi-gauge.time_series_metric: gauge } - - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.multi-counter.type: long } - - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.multi-counter.time_series_metric: counter } - - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.scaled-counter.type: scaled_float } - - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.scaled-counter.scaling_factor: 100 } - - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.scaled-counter.time_series_metric: counter } - - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.scaled-gauge.type: aggregate_metric_double } - - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.scaled-gauge.metrics: [ "min", "max", "sum", "value_count" ] } - - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.scaled-gauge.default_metric: max } - - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.scaled-gauge.time_series_metric: gauge } - - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.uid.type: keyword } - - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.uid.time_series_dimension: true } + - match: { test-downsample.mappings.properties.k8s\.pod\.multi-gauge.type: aggregate_metric_double } + - match: { test-downsample.mappings.properties.k8s\.pod\.multi-gauge.metrics: [ "min", "max", "sum", "value_count" ] } + - match: { test-downsample.mappings.properties.k8s\.pod\.multi-gauge.default_metric: max } + - match: { test-downsample.mappings.properties.k8s\.pod\.multi-gauge.time_series_metric: gauge } + - match: { test-downsample.mappings.properties.k8s\.pod\.multi-counter.type: long } + - match: { test-downsample.mappings.properties.k8s\.pod\.multi-counter.time_series_metric: counter } + - match: { test-downsample.mappings.properties.k8s\.pod\.scaled-counter.type: scaled_float } + - match: { test-downsample.mappings.properties.k8s\.pod\.scaled-counter.scaling_factor: 100 } + - match: { test-downsample.mappings.properties.k8s\.pod\.scaled-counter.time_series_metric: counter } + - match: { test-downsample.mappings.properties.k8s\.pod\.scaled-gauge.type: aggregate_metric_double } + - match: { test-downsample.mappings.properties.k8s\.pod\.scaled-gauge.metrics: [ "min", "max", "sum", "value_count" ] } + - match: { test-downsample.mappings.properties.k8s\.pod\.scaled-gauge.default_metric: max } + - match: { test-downsample.mappings.properties.k8s\.pod\.scaled-gauge.time_series_metric: gauge } + - match: { test-downsample.mappings.properties.k8s\.pod\.uid.type: keyword } + - match: { test-downsample.mappings.properties.k8s\.pod\.uid.time_series_dimension: true } # Assert source index has not been deleted @@ -763,18 +767,18 @@ setup: - match: { test-downsample-2.mappings.properties.@timestamp.type: date } - match: { test-downsample-2.mappings.properties.@timestamp.meta.fixed_interval: 2h } - match: { test-downsample-2.mappings.properties.@timestamp.meta.time_zone: UTC } - - match: { test-downsample-2.mappings.properties.k8s.properties.pod.properties.multi-gauge.type: aggregate_metric_double } - - match: { test-downsample-2.mappings.properties.k8s.properties.pod.properties.multi-gauge.metrics: [ "min", "max", "sum", "value_count" ] } - - match: { test-downsample-2.mappings.properties.k8s.properties.pod.properties.multi-gauge.default_metric: max } - - match: { test-downsample-2.mappings.properties.k8s.properties.pod.properties.multi-gauge.time_series_metric: gauge } - - match: { test-downsample-2.mappings.properties.k8s.properties.pod.properties.multi-counter.type: long } - - match: { test-downsample-2.mappings.properties.k8s.properties.pod.properties.multi-counter.time_series_metric: counter } - - match: { test-downsample-2.mappings.properties.k8s.properties.pod.properties.uid.type: keyword } - - match: { test-downsample-2.mappings.properties.k8s.properties.pod.properties.uid.time_series_dimension: true } - - match: { test-downsample-2.mappings.properties.k8s.properties.pod.properties.network.properties.tx.type: aggregate_metric_double } - - match: { test-downsample-2.mappings.properties.k8s.properties.pod.properties.network.properties.tx.metrics: [ "min", "max", "sum", "value_count" ] } - - match: { test-downsample-2.mappings.properties.k8s.properties.pod.properties.network.properties.tx.default_metric: max } - - match: { test-downsample-2.mappings.properties.k8s.properties.pod.properties.network.properties.tx.time_series_metric: gauge } + - match: { test-downsample-2.mappings.properties.k8s\.pod\.multi-gauge.type: aggregate_metric_double } + - match: { test-downsample-2.mappings.properties.k8s\.pod\.multi-gauge.metrics: [ "min", "max", "sum", "value_count" ] } + - match: { test-downsample-2.mappings.properties.k8s\.pod\.multi-gauge.default_metric: max } + - match: { test-downsample-2.mappings.properties.k8s\.pod\.multi-gauge.time_series_metric: gauge } + - match: { test-downsample-2.mappings.properties.k8s\.pod\.multi-counter.type: long } + - match: { test-downsample-2.mappings.properties.k8s\.pod\.multi-counter.time_series_metric: counter } + - match: { test-downsample-2.mappings.properties.k8s\.pod\.uid.type: keyword } + - match: { test-downsample-2.mappings.properties.k8s\.pod\.uid.time_series_dimension: true } + - match: { test-downsample-2.mappings.properties.k8s\.pod\.network\.tx.type: aggregate_metric_double } + - match: { test-downsample-2.mappings.properties.k8s\.pod\.network\.tx.metrics: [ "min", "max", "sum", "value_count" ] } + - match: { test-downsample-2.mappings.properties.k8s\.pod\.network\.tx.default_metric: max } + - match: { test-downsample-2.mappings.properties.k8s\.pod\.network\.tx.time_series_metric: gauge } - do: search: @@ -784,29 +788,29 @@ setup: - length: { hits.hits: 3 } - match: { hits.hits.0._source._doc_count: 4 } - - match: { hits.hits.0._source.k8s.pod.uid: df3145b3-0563-4d3b-a0f7-897eb2876ea9 } + - match: { hits.hits.0._source.k8s\.pod\.uid: df3145b3-0563-4d3b-a0f7-897eb2876ea9 } - match: { hits.hits.0._source.metricset: pod } - match: { hits.hits.0._source.@timestamp: 2021-04-28T18:00:00.000Z } - - match: { hits.hits.0._source.k8s.pod.multi-counter: 76 } - - match: { hits.hits.0._source.k8s.pod.multi-gauge.min: 95.0 } - - match: { hits.hits.0._source.k8s.pod.multi-gauge.max: 110.0 } - - match: { hits.hits.0._source.k8s.pod.multi-gauge.sum: 1209.0 } - - match: { hits.hits.0._source.k8s.pod.multi-gauge.value_count: 12 } - - match: { hits.hits.0._source.k8s.pod.network.tx.min: 1434521831 } - - match: { hits.hits.0._source.k8s.pod.network.tx.max: 1434595272 } - - match: { hits.hits.0._source.k8s.pod.network.tx.value_count: 4 } - - match: { hits.hits.0._source.k8s.pod.ip: "10.10.55.120" } - - match: { hits.hits.0._source.k8s.pod.created_at: "2021-04-28T19:45:00.000Z" } - - match: { hits.hits.0._source.k8s.pod.number_of_containers: 1 } - - match: { hits.hits.0._source.k8s.pod.tags: [ "backend", "test", "us-west1" ] } - - match: { hits.hits.0._source.k8s.pod.values: [ 1, 2, 3 ] } - - - match: { hits.hits.1._source.k8s.pod.uid: 947e4ced-1786-4e53-9e0c-5c447e959507 } + - match: { hits.hits.0._source.k8s\.pod\.multi-counter: 76 } + - match: { hits.hits.0._source.k8s\.pod\.multi-gauge.min: 95.0 } + - match: { hits.hits.0._source.k8s\.pod\.multi-gauge.max: 110.0 } + - match: { hits.hits.0._source.k8s\.pod\.multi-gauge.sum: 1209.0 } + - match: { hits.hits.0._source.k8s\.pod\.multi-gauge.value_count: 12 } + - match: { hits.hits.0._source.k8s\.pod\.network\.tx.min: 1434521831 } + - match: { hits.hits.0._source.k8s\.pod\.network\.tx.max: 1434595272 } + - match: { hits.hits.0._source.k8s\.pod\.network\.tx.value_count: 4 } + - match: { hits.hits.0._source.k8s\.pod\.ip: "10.10.55.120" } + - match: { hits.hits.0._source.k8s\.pod\.created_at: "2021-04-28T19:45:00.000Z" } + - match: { hits.hits.0._source.k8s\.pod\.number_of_containers: 1 } + - match: { hits.hits.0._source.k8s\.pod\.tags: [ "backend", "test", "us-west1" ] } + - match: { hits.hits.0._source.k8s\.pod\.values: [ 1, 2, 3 ] } + + - match: { hits.hits.1._source.k8s\.pod\.uid: 947e4ced-1786-4e53-9e0c-5c447e959507 } - match: { hits.hits.1._source.metricset: pod } - match: { hits.hits.1._source.@timestamp: 2021-04-28T18:00:00.000Z } - match: { hits.hits.1._source._doc_count: 2 } - - match: { hits.hits.2._source.k8s.pod.uid: 947e4ced-1786-4e53-9e0c-5c447e959507 } + - match: { hits.hits.2._source.k8s\.pod\.uid: 947e4ced-1786-4e53-9e0c-5c447e959507 } - match: { hits.hits.2._source.metricset: pod } - match: { hits.hits.2._source.@timestamp: 2021-04-28T20:00:00.000Z } - match: { hits.hits.2._source._doc_count: 2 } @@ -890,16 +894,16 @@ setup: - match: { test-downsample-histogram.mappings.properties.@timestamp.type: date } - match: { test-downsample-histogram.mappings.properties.@timestamp.meta.fixed_interval: 1h } - match: { test-downsample-histogram.mappings.properties.@timestamp.meta.time_zone: UTC } - - match: { test-downsample-histogram.mappings.properties.k8s.properties.pod.properties.latency.type: histogram } - - match: { test-downsample-histogram.mappings.properties.k8s.properties.pod.properties.latency.time_series_metric: null } - - match: { test-downsample-histogram.mappings.properties.k8s.properties.pod.properties.empty-histogram.type: histogram } - - match: { test-downsample-histogram.mappings.properties.k8s.properties.pod.properties.empty-histogram.time_series_metric: null } - - match: { test-downsample-histogram.mappings.properties.k8s.properties.pod.properties.uid.type: keyword } - - match: { test-downsample-histogram.mappings.properties.k8s.properties.pod.properties.uid.time_series_dimension: true } - - match: { test-downsample-histogram.mappings.properties.k8s.properties.pod.properties.network.properties.tx.type: aggregate_metric_double } - - match: { test-downsample-histogram.mappings.properties.k8s.properties.pod.properties.network.properties.tx.metrics: [ "min", "max", "sum", "value_count" ] } - - match: { test-downsample-histogram.mappings.properties.k8s.properties.pod.properties.network.properties.tx.default_metric: max } - - match: { test-downsample-histogram.mappings.properties.k8s.properties.pod.properties.network.properties.tx.time_series_metric: gauge } + - match: { test-downsample-histogram.mappings.properties.k8s\.pod\.latency.type: histogram } + - match: { test-downsample-histogram.mappings.properties.k8s\.pod\.latency.time_series_metric: null } + - match: { test-downsample-histogram.mappings.properties.k8s\.pod\.empty-histogram.type: histogram } + - match: { test-downsample-histogram.mappings.properties.k8s\.pod\.empty-histogram.time_series_metric: null } + - match: { test-downsample-histogram.mappings.properties.k8s\.pod\.uid.type: keyword } + - match: { test-downsample-histogram.mappings.properties.k8s\.pod\.uid.time_series_dimension: true } + - match: { test-downsample-histogram.mappings.properties.k8s\.pod\.network\.tx.type: aggregate_metric_double } + - match: { test-downsample-histogram.mappings.properties.k8s\.pod\.network\.tx.metrics: [ "min", "max", "sum", "value_count" ] } + - match: { test-downsample-histogram.mappings.properties.k8s\.pod\.network\.tx.default_metric: max } + - match: { test-downsample-histogram.mappings.properties.k8s\.pod\.network\.tx.time_series_metric: gauge } - do: search: @@ -910,64 +914,64 @@ setup: - length: { hits.hits: 4 } - match: { hits.hits.0._source._doc_count: 2 } - - match: { hits.hits.0._source.k8s.pod.uid: df3145b3-0563-4d3b-a0f7-897eb2876ea9 } + - match: { hits.hits.0._source.k8s\.pod\.uid: df3145b3-0563-4d3b-a0f7-897eb2876ea9 } - match: { hits.hits.0._source.metricset: pod } - match: { hits.hits.0._source.@timestamp: 2021-04-28T18:00:00.000Z } - - length: { hits.hits.0._source.k8s.pod.latency.counts: 4 } - - match: { hits.hits.0._source.k8s.pod.latency.counts.0: 2 } - - match: { hits.hits.0._source.k8s.pod.latency.counts.1: 2 } - - match: { hits.hits.0._source.k8s.pod.latency.counts.2: 8 } - - match: { hits.hits.0._source.k8s.pod.latency.counts.3: 8 } - - length: { hits.hits.0._source.k8s.pod.latency.values: 4 } - - match: { hits.hits.0._source.k8s.pod.latency.values.0: 1.0 } - - match: { hits.hits.0._source.k8s.pod.latency.values.1: 10.0 } - - match: { hits.hits.0._source.k8s.pod.latency.values.2: 100.0 } - - match: { hits.hits.0._source.k8s.pod.latency.values.3: 1000.0 } + - length: { hits.hits.0._source.k8s\.pod\.latency.counts: 4 } + - match: { hits.hits.0._source.k8s\.pod\.latency.counts.0: 2 } + - match: { hits.hits.0._source.k8s\.pod\.latency.counts.1: 2 } + - match: { hits.hits.0._source.k8s\.pod\.latency.counts.2: 8 } + - match: { hits.hits.0._source.k8s\.pod\.latency.counts.3: 8 } + - length: { hits.hits.0._source.k8s\.pod\.latency.values: 4 } + - match: { hits.hits.0._source.k8s\.pod\.latency.values.0: 1.0 } + - match: { hits.hits.0._source.k8s\.pod\.latency.values.1: 10.0 } + - match: { hits.hits.0._source.k8s\.pod\.latency.values.2: 100.0 } + - match: { hits.hits.0._source.k8s\.pod\.latency.values.3: 1000.0 } - match: { hits.hits.1._source._doc_count: 1 } - - match: { hits.hits.1._source.k8s.pod.uid: df3145b3-0563-4d3b-a0f7-897eb2876ea9 } + - match: { hits.hits.1._source.k8s\.pod\.uid: df3145b3-0563-4d3b-a0f7-897eb2876ea9 } - match: { hits.hits.1._source.metricset: pod } - match: { hits.hits.1._source.@timestamp: 2021-04-28T19:00:00.000Z } - - length: { hits.hits.1._source.k8s.pod.latency.counts: 4 } - - match: { hits.hits.1._source.k8s.pod.latency.counts.0: 4 } - - match: { hits.hits.1._source.k8s.pod.latency.counts.1: 5 } - - match: { hits.hits.1._source.k8s.pod.latency.counts.2: 4 } - - match: { hits.hits.1._source.k8s.pod.latency.counts.3: 13 } - - length: { hits.hits.1._source.k8s.pod.latency.values: 4 } - - match: { hits.hits.1._source.k8s.pod.latency.values.0: 1.0 } - - match: { hits.hits.1._source.k8s.pod.latency.values.1: 10.0 } - - match: { hits.hits.1._source.k8s.pod.latency.values.2: 100.0 } - - match: { hits.hits.1._source.k8s.pod.latency.values.3: 1000.0 } + - length: { hits.hits.1._source.k8s\.pod\.latency.counts: 4 } + - match: { hits.hits.1._source.k8s\.pod\.latency.counts.0: 4 } + - match: { hits.hits.1._source.k8s\.pod\.latency.counts.1: 5 } + - match: { hits.hits.1._source.k8s\.pod\.latency.counts.2: 4 } + - match: { hits.hits.1._source.k8s\.pod\.latency.counts.3: 13 } + - length: { hits.hits.1._source.k8s\.pod\.latency.values: 4 } + - match: { hits.hits.1._source.k8s\.pod\.latency.values.0: 1.0 } + - match: { hits.hits.1._source.k8s\.pod\.latency.values.1: 10.0 } + - match: { hits.hits.1._source.k8s\.pod\.latency.values.2: 100.0 } + - match: { hits.hits.1._source.k8s\.pod\.latency.values.3: 1000.0 } - match: { hits.hits.2._source._doc_count: 2 } - - match: { hits.hits.2._source.k8s.pod.uid: 947e4ced-1786-4e53-9e0c-5c447e959507 } + - match: { hits.hits.2._source.k8s\.pod\.uid: 947e4ced-1786-4e53-9e0c-5c447e959507 } - match: { hits.hits.2._source.metricset: pod } - match: { hits.hits.2._source.@timestamp: 2021-04-28T18:00:00.000Z } - - length: { hits.hits.2._source.k8s.pod.latency.counts: 4 } - - match: { hits.hits.2._source.k8s.pod.latency.counts.0: 8 } - - match: { hits.hits.2._source.k8s.pod.latency.counts.1: 7 } - - match: { hits.hits.2._source.k8s.pod.latency.counts.2: 10 } - - match: { hits.hits.2._source.k8s.pod.latency.counts.3: 12 } - - length: { hits.hits.2._source.k8s.pod.latency.values: 4 } - - match: { hits.hits.2._source.k8s.pod.latency.values.0: 1.0 } - - match: { hits.hits.2._source.k8s.pod.latency.values.1: 2.0 } - - match: { hits.hits.2._source.k8s.pod.latency.values.2: 5.0 } - - match: { hits.hits.2._source.k8s.pod.latency.values.3: 10.0 } + - length: { hits.hits.2._source.k8s\.pod\.latency.counts: 4 } + - match: { hits.hits.2._source.k8s\.pod\.latency.counts.0: 8 } + - match: { hits.hits.2._source.k8s\.pod\.latency.counts.1: 7 } + - match: { hits.hits.2._source.k8s\.pod\.latency.counts.2: 10 } + - match: { hits.hits.2._source.k8s\.pod\.latency.counts.3: 12 } + - length: { hits.hits.2._source.k8s\.pod\.latency.values: 4 } + - match: { hits.hits.2._source.k8s\.pod\.latency.values.0: 1.0 } + - match: { hits.hits.2._source.k8s\.pod\.latency.values.1: 2.0 } + - match: { hits.hits.2._source.k8s\.pod\.latency.values.2: 5.0 } + - match: { hits.hits.2._source.k8s\.pod\.latency.values.3: 10.0 } - match: { hits.hits.3._source._doc_count: 2 } - - match: { hits.hits.3._source.k8s.pod.uid: 947e4ced-1786-4e53-9e0c-5c447e959507 } + - match: { hits.hits.3._source.k8s\.pod\.uid: 947e4ced-1786-4e53-9e0c-5c447e959507 } - match: { hits.hits.3._source.metricset: pod } - match: { hits.hits.3._source.@timestamp: 2021-04-28T19:00:00.000Z } - - length: { hits.hits.3._source.k8s.pod.latency.counts: 4 } - - match: { hits.hits.3._source.k8s.pod.latency.counts.0: 7 } - - match: { hits.hits.3._source.k8s.pod.latency.counts.1: 15 } - - match: { hits.hits.3._source.k8s.pod.latency.counts.2: 10 } - - match: { hits.hits.3._source.k8s.pod.latency.counts.3: 10 } - - length: { hits.hits.3._source.k8s.pod.latency.values: 4 } - - match: { hits.hits.3._source.k8s.pod.latency.values.0: 1.0 } - - match: { hits.hits.3._source.k8s.pod.latency.values.1: 2.0 } - - match: { hits.hits.3._source.k8s.pod.latency.values.2: 5.0 } - - match: { hits.hits.3._source.k8s.pod.latency.values.3: 10.0 } + - length: { hits.hits.3._source.k8s\.pod\.latency.counts: 4 } + - match: { hits.hits.3._source.k8s\.pod\.latency.counts.0: 7 } + - match: { hits.hits.3._source.k8s\.pod\.latency.counts.1: 15 } + - match: { hits.hits.3._source.k8s\.pod\.latency.counts.2: 10 } + - match: { hits.hits.3._source.k8s\.pod\.latency.counts.3: 10 } + - length: { hits.hits.3._source.k8s\.pod\.latency.values: 4 } + - match: { hits.hits.3._source.k8s\.pod\.latency.values.0: 1.0 } + - match: { hits.hits.3._source.k8s\.pod\.latency.values.1: 2.0 } + - match: { hits.hits.3._source.k8s\.pod\.latency.values.2: 5.0 } + - match: { hits.hits.3._source.k8s\.pod\.latency.values.3: 10.0 } --- "Downsample date_nanos timestamp field using custom format": @@ -988,6 +992,7 @@ setup: start_time: 2023-02-23T00:00:00Z end_time: 2023-02-24T00:00:00Z mappings: + subobjects: false properties: "@timestamp": type: date_nanos @@ -1048,19 +1053,19 @@ setup: - length: { hits.hits: 2 } - match: { hits.hits.0._source._doc_count: 3 } - - match: { hits.hits.0._source.k8s.pod.uid: 947e4ced-1786-4e53-9e0c-5c447e959507 } + - match: { hits.hits.0._source.k8s\.pod\.uid: 947e4ced-1786-4e53-9e0c-5c447e959507 } - match: { hits.hits.0._source.metricset: pod } - match: { hits.hits.0._source.@timestamp: 2023-02-23T12:00:00.000000000Z } - - match: { hits.hits.0._source.k8s.pod.value.min: 8.0 } - - match: { hits.hits.0._source.k8s.pod.value.max: 12.0 } - - match: { hits.hits.0._source.k8s.pod.value.sum: 30.0 } + - match: { hits.hits.0._source.k8s\.pod\.value.min: 8.0 } + - match: { hits.hits.0._source.k8s\.pod\.value.max: 12.0 } + - match: { hits.hits.0._source.k8s\.pod\.value.sum: 30.0 } - match: { hits.hits.1._source._doc_count: 1 } - - match: { hits.hits.1._source.k8s.pod.uid: 947e4ced-1786-4e53-9e0c-5c447e959507 } + - match: { hits.hits.1._source.k8s\.pod\.uid: 947e4ced-1786-4e53-9e0c-5c447e959507 } - match: { hits.hits.1._source.metricset: pod } - match: { hits.hits.1._source.@timestamp: 2023-02-23T13:00:00.000000000Z } - - match: { hits.hits.1._source.k8s.pod.value.min: 9.0 } - - match: { hits.hits.1._source.k8s.pod.value.max: 9.0 } - - match: { hits.hits.1._source.k8s.pod.value.sum: 9.0 } + - match: { hits.hits.1._source.k8s\.pod\.value.min: 9.0 } + - match: { hits.hits.1._source.k8s\.pod\.value.max: 9.0 } + - match: { hits.hits.1._source.k8s\.pod\.value.sum: 9.0 } - do: indices.get_mapping: @@ -1090,6 +1095,7 @@ setup: start_time: 2023-02-23T00:00:00Z end_time: 2023-02-24T00:00:00Z mappings: + subobjects: false properties: "@timestamp": type: date @@ -1150,19 +1156,19 @@ setup: - length: { hits.hits: 2 } - match: { hits.hits.0._source._doc_count: 3 } - - match: { hits.hits.0._source.k8s.pod.uid: 947e4ced-1786-4e53-9e0c-5c447e959507 } + - match: { hits.hits.0._source.k8s\.pod\.uid: 947e4ced-1786-4e53-9e0c-5c447e959507 } - match: { hits.hits.0._source.metricset: pod } - match: { hits.hits.0._source.@timestamp: 2023-02-23T12:00:00.000Z } - - match: { hits.hits.0._source.k8s.pod.value.min: 8.0 } - - match: { hits.hits.0._source.k8s.pod.value.max: 12.0 } - - match: { hits.hits.0._source.k8s.pod.value.sum: 30.0 } + - match: { hits.hits.0._source.k8s\.pod\.value.min: 8.0 } + - match: { hits.hits.0._source.k8s\.pod\.value.max: 12.0 } + - match: { hits.hits.0._source.k8s\.pod\.value.sum: 30.0 } - match: { hits.hits.1._source._doc_count: 1 } - - match: { hits.hits.1._source.k8s.pod.uid: 947e4ced-1786-4e53-9e0c-5c447e959507 } + - match: { hits.hits.1._source.k8s\.pod\.uid: 947e4ced-1786-4e53-9e0c-5c447e959507 } - match: { hits.hits.1._source.metricset: pod } - match: { hits.hits.1._source.@timestamp: 2023-02-23T13:00:00.000Z } - - match: { hits.hits.1._source.k8s.pod.value.min: 9.0 } - - match: { hits.hits.1._source.k8s.pod.value.max: 9.0 } - - match: { hits.hits.1._source.k8s.pod.value.sum: 9.0 } + - match: { hits.hits.1._source.k8s\.pod\.value.min: 9.0 } + - match: { hits.hits.1._source.k8s\.pod\.value.max: 9.0 } + - match: { hits.hits.1._source.k8s\.pod\.value.sum: 9.0 } - do: indices.get_mapping: @@ -1192,6 +1198,7 @@ setup: start_time: 2023-02-23T00:00:00Z end_time: 2023-02-27T00:00:00Z mappings: + subobjects: false properties: "@timestamp": type: date @@ -1251,33 +1258,33 @@ setup: - length: { hits.hits: 4 } - match: { hits.hits.0._source._doc_count: 1 } - - match: { hits.hits.0._source.k8s.pod.uid: 947e4ced-1786-4e53-9e0c-5c447e959507 } + - match: { hits.hits.0._source.k8s\.pod\.uid: 947e4ced-1786-4e53-9e0c-5c447e959507 } - match: { hits.hits.0._source.metricset: pod } - match: { hits.hits.0._source.@timestamp: 2023-02-23 } - - match: { hits.hits.0._source.k8s.pod.value.min: 10.0 } - - match: { hits.hits.0._source.k8s.pod.value.max: 10.0 } - - match: { hits.hits.0._source.k8s.pod.value.sum: 10.0 } + - match: { hits.hits.0._source.k8s\.pod\.value.min: 10.0 } + - match: { hits.hits.0._source.k8s\.pod\.value.max: 10.0 } + - match: { hits.hits.0._source.k8s\.pod\.value.sum: 10.0 } - match: { hits.hits.1._source._doc_count: 1 } - - match: { hits.hits.1._source.k8s.pod.uid: 947e4ced-1786-4e53-9e0c-5c447e959507 } + - match: { hits.hits.1._source.k8s\.pod\.uid: 947e4ced-1786-4e53-9e0c-5c447e959507 } - match: { hits.hits.1._source.metricset: pod } - match: { hits.hits.1._source.@timestamp: 2023-02-24 } - - match: { hits.hits.1._source.k8s.pod.value.min: 12.0 } - - match: { hits.hits.1._source.k8s.pod.value.max: 12.0 } - - match: { hits.hits.1._source.k8s.pod.value.sum: 12.0 } + - match: { hits.hits.1._source.k8s\.pod\.value.min: 12.0 } + - match: { hits.hits.1._source.k8s\.pod\.value.max: 12.0 } + - match: { hits.hits.1._source.k8s\.pod\.value.sum: 12.0 } - match: { hits.hits.2._source._doc_count: 1 } - - match: { hits.hits.2._source.k8s.pod.uid: 947e4ced-1786-4e53-9e0c-5c447e959507 } + - match: { hits.hits.2._source.k8s\.pod\.uid: 947e4ced-1786-4e53-9e0c-5c447e959507 } - match: { hits.hits.2._source.metricset: pod } - match: { hits.hits.2._source.@timestamp: 2023-02-25 } - - match: { hits.hits.2._source.k8s.pod.value.min: 8.0 } - - match: { hits.hits.2._source.k8s.pod.value.max: 8.0 } - - match: { hits.hits.2._source.k8s.pod.value.sum: 8.0 } + - match: { hits.hits.2._source.k8s\.pod\.value.min: 8.0 } + - match: { hits.hits.2._source.k8s\.pod\.value.max: 8.0 } + - match: { hits.hits.2._source.k8s\.pod\.value.sum: 8.0 } - match: { hits.hits.3._source._doc_count: 1 } - - match: { hits.hits.3._source.k8s.pod.uid: 947e4ced-1786-4e53-9e0c-5c447e959507 } + - match: { hits.hits.3._source.k8s\.pod\.uid: 947e4ced-1786-4e53-9e0c-5c447e959507 } - match: { hits.hits.3._source.metricset: pod } - match: { hits.hits.3._source.@timestamp: 2023-02-26 } - - match: { hits.hits.3._source.k8s.pod.value.min: 9.0 } - - match: { hits.hits.3._source.k8s.pod.value.max: 9.0 } - - match: { hits.hits.3._source.k8s.pod.value.sum: 9.0 } + - match: { hits.hits.3._source.k8s\.pod\.value.min: 9.0 } + - match: { hits.hits.3._source.k8s\.pod\.value.max: 9.0 } + - match: { hits.hits.3._source.k8s\.pod\.value.sum: 9.0 } --- "Downsample object field": @@ -1304,48 +1311,48 @@ setup: - length: { hits.hits: 4 } - match: { hits.hits.0._source._doc_count: 2 } - - match: { hits.hits.0._source.k8s.pod.uid: df3145b3-0563-4d3b-a0f7-897eb2876ea9 } + - match: { hits.hits.0._source.k8s\.pod\.uid: df3145b3-0563-4d3b-a0f7-897eb2876ea9 } - match: { hits.hits.0._source.metricset: pod } - match: { hits.hits.0._source.@timestamp: "2021-04-28T18:00:00.000Z" } - - match: { hits.hits.0._source.k8s.pod.name: "dog" } - - match: { hits.hits.0._source.k8s.pod.value.min: 9.0 } - - match: { hits.hits.0._source.k8s.pod.value.max: 16.0 } - - match: { hits.hits.0._source.k8s.pod.value.sum: 25.0 } - - match: { hits.hits.0._source.k8s.pod.agent.id: "second" } - - match: { hits.hits.0._source.k8s.pod.agent.version: "2.1.7" } + - match: { hits.hits.0._source.k8s\.pod\.name: "dog" } + - match: { hits.hits.0._source.k8s\.pod\.value.min: 9.0 } + - match: { hits.hits.0._source.k8s\.pod\.value.max: 16.0 } + - match: { hits.hits.0._source.k8s\.pod\.value.sum: 25.0 } + - match: { hits.hits.0._source.k8s\.pod\.agent\.id: "second" } + - match: { hits.hits.0._source.k8s\.pod\.agent\.version: "2.1.7" } - match: { hits.hits.1._source._doc_count: 2 } - - match: { hits.hits.1._source.k8s.pod.uid: df3145b3-0563-4d3b-a0f7-897eb2876ea9 } + - match: { hits.hits.1._source.k8s\.pod\.uid: df3145b3-0563-4d3b-a0f7-897eb2876ea9 } - match: { hits.hits.1._source.metricset: pod } - match: { hits.hits.1._source.@timestamp: "2021-04-28T19:00:00.000Z" } - - match: { hits.hits.1._source.k8s.pod.name: "dog" } - - match: { hits.hits.1._source.k8s.pod.value.min: 17.0 } - - match: { hits.hits.1._source.k8s.pod.value.max: 25.0 } - - match: { hits.hits.1._source.k8s.pod.value.sum: 42.0 } - - match: { hits.hits.1._source.k8s.pod.agent.id: "second" } - - match: { hits.hits.1._source.k8s.pod.agent.version: "2.1.7" } + - match: { hits.hits.1._source.k8s\.pod\.name: "dog" } + - match: { hits.hits.1._source.k8s\.pod\.value.min: 17.0 } + - match: { hits.hits.1._source.k8s\.pod\.value.max: 25.0 } + - match: { hits.hits.1._source.k8s\.pod\.value.sum: 42.0 } + - match: { hits.hits.1._source.k8s\.pod\.agent\.id: "second" } + - match: { hits.hits.1._source.k8s\.pod\.agent\.version: "2.1.7" } - match: { hits.hits.2._source._doc_count: 2 } - - match: { hits.hits.2._source.k8s.pod.uid: 947e4ced-1786-4e53-9e0c-5c447e959507 } + - match: { hits.hits.2._source.k8s\.pod\.uid: 947e4ced-1786-4e53-9e0c-5c447e959507 } - match: { hits.hits.2._source.metricset: pod } - match: { hits.hits.2._source.@timestamp: "2021-04-28T18:00:00.000Z" } - - match: { hits.hits.2._source.k8s.pod.name: "cat" } - - match: { hits.hits.2._source.k8s.pod.value.min: 10.0 } - - match: { hits.hits.2._source.k8s.pod.value.max: 20.0 } - - match: { hits.hits.2._source.k8s.pod.value.sum: 30.0 } - - match: { hits.hits.2._source.k8s.pod.agent.id: "first" } - - match: { hits.hits.2._source.k8s.pod.agent.version: "2.0.4" } + - match: { hits.hits.2._source.k8s\.pod\.name: "cat" } + - match: { hits.hits.2._source.k8s\.pod\.value.min: 10.0 } + - match: { hits.hits.2._source.k8s\.pod\.value.max: 20.0 } + - match: { hits.hits.2._source.k8s\.pod\.value.sum: 30.0 } + - match: { hits.hits.2._source.k8s\.pod\.agent\.id: "first" } + - match: { hits.hits.2._source.k8s\.pod\.agent\.version: "2.0.4" } - match: { hits.hits.3._source._doc_count: 2 } - - match: { hits.hits.3._source.k8s.pod.uid: 947e4ced-1786-4e53-9e0c-5c447e959507 } + - match: { hits.hits.3._source.k8s\.pod\.uid: 947e4ced-1786-4e53-9e0c-5c447e959507 } - match: { hits.hits.3._source.metricset: pod } - match: { hits.hits.3._source.@timestamp: "2021-04-28T20:00:00.000Z" } - - match: { hits.hits.3._source.k8s.pod.name: "cat" } - - match: { hits.hits.3._source.k8s.pod.value.min: 12.0 } - - match: { hits.hits.3._source.k8s.pod.value.max: 15.0 } - - match: { hits.hits.3._source.k8s.pod.value.sum: 27.0 } - - match: { hits.hits.3._source.k8s.pod.agent.id: "first" } - - match: { hits.hits.3._source.k8s.pod.agent.version: "2.0.4" } + - match: { hits.hits.3._source.k8s\.pod\.name: "cat" } + - match: { hits.hits.3._source.k8s\.pod\.value.min: 12.0 } + - match: { hits.hits.3._source.k8s\.pod\.value.max: 15.0 } + - match: { hits.hits.3._source.k8s\.pod\.value.sum: 27.0 } + - match: { hits.hits.3._source.k8s\.pod\.agent\.id: "first" } + - match: { hits.hits.3._source.k8s\.pod\.agent\.version: "2.0.4" } --- "Downsample empty and missing labels": @@ -1372,40 +1379,40 @@ setup: - length: { hits.hits: 3 } - match: { hits.hits.2._source._doc_count: 4 } - - match: { hits.hits.2._source.k8s.pod.uid: 947e4ced-1786-4e53-9e0c-5c447e959507 } + - match: { hits.hits.2._source.k8s\.pod\.uid: 947e4ced-1786-4e53-9e0c-5c447e959507 } - match: { hits.hits.2._source.metricset: pod } - match: { hits.hits.2._source.@timestamp: "2021-04-28T18:00:00.000Z" } - - match: { hits.hits.2._source.k8s.pod.name: "cat" } - - match: { hits.hits.2._source.k8s.pod.value.min: 10.0 } - - match: { hits.hits.2._source.k8s.pod.value.max: 40.0 } - - match: { hits.hits.2._source.k8s.pod.value.sum: 100.0 } - - match: { hits.hits.2._source.k8s.pod.value.value_count: 4 } - - match: { hits.hits.2._source.k8s.pod.label: "abc" } - - match: { hits.hits.2._source.k8s.pod.unmapped: "abc" } + - match: { hits.hits.2._source.k8s\.pod\.name: "cat" } + - match: { hits.hits.2._source.k8s\.pod\.value.min: 10.0 } + - match: { hits.hits.2._source.k8s\.pod\.value.max: 40.0 } + - match: { hits.hits.2._source.k8s\.pod\.value.sum: 100.0 } + - match: { hits.hits.2._source.k8s\.pod\.value.value_count: 4 } + - match: { hits.hits.2._source.k8s\.pod\.label: "abc" } + - match: { hits.hits.2._source.k8s\.pod\.unmapped: "abc" } - match: { hits.hits.1._source._doc_count: 4 } - - match: { hits.hits.1._source.k8s.pod.uid: 947e4ced-1786-4e53-9e0c-5c447e9597ab } + - match: { hits.hits.1._source.k8s\.pod\.uid: 947e4ced-1786-4e53-9e0c-5c447e9597ab } - match: { hits.hits.1._source.metricset: pod } - match: { hits.hits.1._source.@timestamp: "2021-04-28T18:00:00.000Z" } - - match: { hits.hits.1._source.k8s.pod.name: "cat" } - - match: { hits.hits.1._source.k8s.pod.value.min: 10.0 } - - match: { hits.hits.1._source.k8s.pod.value.max: 40.0 } - - match: { hits.hits.1._source.k8s.pod.value.sum: 100.0 } - - match: { hits.hits.1._source.k8s.pod.value.value_count: 4 } - - match: { hits.hits.1._source.k8s.pod.label: null } - - match: { hits.hits.1._source.k8s.pod.unmapped: null } + - match: { hits.hits.1._source.k8s\.pod\.name: "cat" } + - match: { hits.hits.1._source.k8s\.pod\.value.min: 10.0 } + - match: { hits.hits.1._source.k8s\.pod\.value.max: 40.0 } + - match: { hits.hits.1._source.k8s\.pod\.value.sum: 100.0 } + - match: { hits.hits.1._source.k8s\.pod\.value.value_count: 4 } + - match: { hits.hits.1._source.k8s\.pod\.label: null } + - match: { hits.hits.1._source.k8s\.pod\.unmapped: null } - match: { hits.hits.0._source._doc_count: 4 } - - match: { hits.hits.0._source.k8s.pod.uid: df3145b3-0563-4d3b-a0f7-897eb2876ea9 } + - match: { hits.hits.0._source.k8s\.pod\.uid: df3145b3-0563-4d3b-a0f7-897eb2876ea9 } - match: { hits.hits.0._source.metricset: pod } - match: { hits.hits.0._source.@timestamp: "2021-04-28T18:00:00.000Z" } - - match: { hits.hits.0._source.k8s.pod.name: "dog" } - - match: { hits.hits.0._source.k8s.pod.value.min: 10.0 } - - match: { hits.hits.0._source.k8s.pod.value.max: 40.0 } - - match: { hits.hits.0._source.k8s.pod.value.sum: 100.0 } - - match: { hits.hits.0._source.k8s.pod.value.value_count: 4 } - - match: { hits.hits.0._source.k8s.pod.label: "xyz" } - - match: { hits.hits.0._source.k8s.pod.unmapped: "xyz" } + - match: { hits.hits.0._source.k8s\.pod\.name: "dog" } + - match: { hits.hits.0._source.k8s\.pod\.value.min: 10.0 } + - match: { hits.hits.0._source.k8s\.pod\.value.max: 40.0 } + - match: { hits.hits.0._source.k8s\.pod\.value.sum: 100.0 } + - match: { hits.hits.0._source.k8s\.pod\.value.value_count: 4 } + - match: { hits.hits.0._source.k8s\.pod\.label: "xyz" } + - match: { hits.hits.0._source.k8s\.pod\.unmapped: "xyz" } --- @@ -1427,6 +1434,7 @@ setup: start_time: 2021-04-28T00:00:00Z end_time: 2021-04-29T00:00:00Z mappings: + subobjects: false properties: "@timestamp": type: date @@ -1495,45 +1503,45 @@ setup: - match: { hits.hits.0._source._doc_count: 2 } - match: { hits.hits.0._source.metricset: pod } - - match: { hits.hits.0._source.k8s.pod.name: dog } - - match: { hits.hits.0._source.k8s.pod.value: 20 } - - match: { hits.hits.0._source.k8s.pod.uid: df3145b3-0563-4d3b-a0f7-897eb2876ea9 } - - match: { hits.hits.0._source.k8s.pod.label: foo } + - match: { hits.hits.0._source.k8s\.pod\.name: dog } + - match: { hits.hits.0._source.k8s\.pod\.value: 20 } + - match: { hits.hits.0._source.k8s\.pod\.uid: df3145b3-0563-4d3b-a0f7-897eb2876ea9 } + - match: { hits.hits.0._source.k8s\.pod\.label: foo } - match: { hits.hits.0._source.@timestamp: 2021-04-28T18:00:00.000Z } - match: { hits.hits.1._source._doc_count: 2 } - match: { hits.hits.1._source.metricset: pod } - - match: { hits.hits.1._source.k8s.pod.name: fox } - - match: { hits.hits.1._source.k8s.pod.value: 20 } - - match: { hits.hits.1._source.k8s.pod.uid: 7393ef8e-489c-11ee-be56-0242ac120002 } - - match: { hits.hits.1._source.k8s.pod.label: bar } + - match: { hits.hits.1._source.k8s\.pod\.name: fox } + - match: { hits.hits.1._source.k8s\.pod\.value: 20 } + - match: { hits.hits.1._source.k8s\.pod\.uid: 7393ef8e-489c-11ee-be56-0242ac120002 } + - match: { hits.hits.1._source.k8s\.pod\.label: bar } - match: { hits.hits.1._source.@timestamp: 2021-04-28T18:00:00.000Z } - match: { hits.hits.2._source._doc_count: 2 } - match: { hits.hits.2._source.metricset: pod } - - match: { hits.hits.2._source.k8s.pod.name: cat } - - match: { hits.hits.2._source.k8s.pod.value: 20 } - - match: { hits.hits.2._source.k8s.pod.uid: 947e4ced-1786-4e53-9e0c-5c447e959507 } + - match: { hits.hits.2._source.k8s\.pod\.name: cat } + - match: { hits.hits.2._source.k8s\.pod\.value: 20 } + - match: { hits.hits.2._source.k8s\.pod\.uid: 947e4ced-1786-4e53-9e0c-5c447e959507 } # NOTE: when downsampling a label field we propagate the last (most-recent timestamp-wise) non-null value, # ignoring/skipping null values. Here the last document has a value that hits ignore_above ("foofoo") and, # as a result, we propagate the value of the previous document ("foo") - - match: { hits.hits.2._source.k8s.pod.label: foo } + - match: { hits.hits.2._source.k8s\.pod\.label: foo } - match: { hits.hits.2._source.@timestamp: 2021-04-28T18:00:00.000Z } - match: { hits.hits.3._source._doc_count: 2 } - match: { hits.hits.3._source.metricset: pod } - - match: { hits.hits.3._source.k8s.pod.name: cow } - - match: { hits.hits.3._source.k8s.pod.value: 20 } - - match: { hits.hits.3._source.k8s.pod.uid: a81ef23a-489c-11ee-be56-0242ac120005 } - - match: { hits.hits.3._source.k8s.pod.label: null } + - match: { hits.hits.3._source.k8s\.pod\.name: cow } + - match: { hits.hits.3._source.k8s\.pod\.value: 20 } + - match: { hits.hits.3._source.k8s\.pod\.uid: a81ef23a-489c-11ee-be56-0242ac120005 } + - match: { hits.hits.3._source.k8s\.pod\.label: null } - match: { hits.hits.3._source.@timestamp: 2021-04-28T18:00:00.000Z } - do: indices.get_mapping: index: test-downsample-label-ignore-above - - match: { test-downsample-label-ignore-above.mappings.properties.k8s.properties.pod.properties.label.type: keyword } - - match: { test-downsample-label-ignore-above.mappings.properties.k8s.properties.pod.properties.label.ignore_above: 3 } + - match: { test-downsample-label-ignore-above.mappings.properties.k8s\.pod\.label.type: keyword } + - match: { test-downsample-label-ignore-above.mappings.properties.k8s\.pod\.label.ignore_above: 3 } --- "Downsample index with empty dimension": @@ -1555,6 +1563,7 @@ setup: start_time: 2021-04-28T00:00:00Z end_time: 2021-04-29T00:00:00Z mappings: + subobjects: false properties: "@timestamp": type: date @@ -1612,11 +1621,11 @@ setup: - length: { hits.hits: 2 } - match: { hits.hits.0._source._doc_count: 3 } - - match: { hits.hits.0._source.k8s.pod.name: cat } - - match: { hits.hits.0._source.k8s.pod.empty: null } + - match: { hits.hits.0._source.k8s\.pod\.name: cat } + - match: { hits.hits.0._source.k8s\.pod\.empty: null } - match: { hits.hits.1._source._doc_count: 1 } - - match: { hits.hits.1._source.k8s.pod.name: cat } - - match: { hits.hits.1._source.k8s.pod.empty: "" } + - match: { hits.hits.1._source.k8s\.pod\.name: cat } + - match: { hits.hits.1._source.k8s\.pod\.empty: "" } --- "Downsample index with empty dimension on routing path": @@ -1638,6 +1647,7 @@ setup: start_time: 2021-04-28T00:00:00Z end_time: 2021-04-29T00:00:00Z mappings: + subobjects: false properties: "@timestamp": type: date @@ -1695,8 +1705,8 @@ setup: - length: { hits.hits: 2 } - match: { hits.hits.0._source._doc_count: 3 } - - match: { hits.hits.0._source.k8s.pod.name: cat } - - match: { hits.hits.0._source.k8s.pod.empty: null } + - match: { hits.hits.0._source.k8s\.pod\.name: cat } + - match: { hits.hits.0._source.k8s\.pod\.empty: null } - match: { hits.hits.1._source._doc_count: 1 } - - match: { hits.hits.1._source.k8s.pod.name: cat } - - match: { hits.hits.1._source.k8s.pod.empty: "" } + - match: { hits.hits.1._source.k8s\.pod\.name: cat } + - match: { hits.hits.1._source.k8s\.pod\.empty: "" } From 97ed0a93bb75d0f920c976527f4f5fc0b6065beb Mon Sep 17 00:00:00 2001 From: shainaraskas <58563081+shainaraskas@users.noreply.github.com> Date: Thu, 24 Oct 2024 13:26:15 -0400 Subject: [PATCH 385/449] Make a minor change to trigger release note process (#113975) * changelog entry --- docs/changelog/113975.yaml | 19 +++++++++++++++++++ docs/reference/mapping/params/format.asciidoc | 4 ++-- 2 files changed, 21 insertions(+), 2 deletions(-) create mode 100644 docs/changelog/113975.yaml diff --git a/docs/changelog/113975.yaml b/docs/changelog/113975.yaml new file mode 100644 index 0000000000000..632ba038271bb --- /dev/null +++ b/docs/changelog/113975.yaml @@ -0,0 +1,19 @@ +pr: 113975 +summary: JDK locale database change +area: Mapping +type: breaking +issues: [] +breaking: + title: JDK locale database change + area: Mapping + details: | + {es} 8.16 changes the version of the JDK that is included from version 22 to version 23. This changes the locale database that is used by Elasticsearch from the COMPAT database to the CLDR database. This change can cause significant differences to the textual date formats accepted by Elasticsearch, and to calculated week-dates. + + If you run {es} 8.16 on JDK version 22 or below, it will use the COMPAT locale database to match the behavior of 8.15. However, starting with {es} 9.0, {es} will use the CLDR database regardless of JDK version it is run on. + impact: | + This affects you if you use custom date formats using textual or week-date field specifiers. If you use date fields or calculated week-dates that change between the COMPAT and CLDR databases, then this change will cause Elasticsearch to reject previously valid date fields as invalid data. You might need to modify your ingest or output integration code to account for the differences between these two JDK versions. + + Starting in version 8.15.2, Elasticsearch will log deprecation warnings if you are using date format specifiers that might change on upgrading to JDK 23. These warnings are visible in Kibana. + + For detailed guidance, refer to <> and the https://ela.st/jdk-23-locales[Elastic blog]. + notable: true diff --git a/docs/reference/mapping/params/format.asciidoc b/docs/reference/mapping/params/format.asciidoc index 943e8fb879ff3..6c82b04eb5fe5 100644 --- a/docs/reference/mapping/params/format.asciidoc +++ b/docs/reference/mapping/params/format.asciidoc @@ -34,13 +34,13 @@ down to the nearest day. Completely customizable date formats are supported. The syntax for these is explained in https://docs.oracle.com/en/java/javase/21/docs/api/java.base/java/time/format/DateTimeFormatter.html[DateTimeFormatter docs]. -Note that whilst the built-in formats for week dates use the ISO definition of weekyears, +Note that while the built-in formats for week dates use the ISO definition of weekyears, custom formatters using the `Y`, `W`, or `w` field specifiers use the JDK locale definition of weekyears. This can result in different values between the built-in formats and custom formats for week dates. [[built-in-date-formats]] -==== Built In Formats +==== Built-in formats Most of the below formats have a `strict` companion format, which means that year, month and day parts of the month must use respectively 4, 2 and 2 digits From e951984831cc499f5f13efee0d6283ee8957f295 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Thu, 24 Oct 2024 11:40:59 -0700 Subject: [PATCH 386/449] Reenable CacheFileTests (#115582) The test issue was fixed by #110807 closes #110801 --- muted-tests.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index ab5d686a041c1..827a604cd6a19 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -23,9 +23,6 @@ tests: - class: org.elasticsearch.xpack.security.authz.store.NativePrivilegeStoreCacheTests method: testPopulationOfCacheWhenLoadingPrivilegesForAllApplications issue: https://github.com/elastic/elasticsearch/issues/110789 -- class: org.elasticsearch.xpack.searchablesnapshots.cache.common.CacheFileTests - method: testCacheFileCreatedAsSparseFile - issue: https://github.com/elastic/elasticsearch/issues/110801 - class: org.elasticsearch.nativeaccess.VectorSystemPropertyTests method: testSystemPropertyDisabled issue: https://github.com/elastic/elasticsearch/issues/110949 From ad9c5a0a0640f62f763f63682f7e321c4d68ab41 Mon Sep 17 00:00:00 2001 From: Pawan Kartik Date: Thu, 24 Oct 2024 20:15:17 +0100 Subject: [PATCH 387/449] Correctly update search status for a nonexistent local index (#115138) * fix: correctly update search status for a nonexistent local index * Check for cluster existence before updation * Remove unnecessary `println` * Address review comment: add an explanatory code comment * Further clarify code comment --- .../search/ccs/CrossClusterSearchIT.java | 64 +++++++++++++++++++ .../action/search/TransportSearchAction.java | 23 +++++++ 2 files changed, 87 insertions(+) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/ccs/CrossClusterSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/ccs/CrossClusterSearchIT.java index 5233a0cd564ef..5984e1acc89af 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/ccs/CrossClusterSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/ccs/CrossClusterSearchIT.java @@ -755,6 +755,70 @@ public void testNegativeRemoteIndexNameThrows() { assertNotNull(ee.getCause()); } + public void testClusterDetailsWhenLocalClusterHasNoMatchingIndex() throws Exception { + Map testClusterInfo = setupTwoClusters(); + String remoteIndex = (String) testClusterInfo.get("remote.index"); + int remoteNumShards = (Integer) testClusterInfo.get("remote.num_shards"); + + SearchRequest searchRequest = new SearchRequest("nomatch*", REMOTE_CLUSTER + ":" + remoteIndex); + if (randomBoolean()) { + searchRequest = searchRequest.scroll(TimeValue.timeValueMinutes(1)); + } + + searchRequest.allowPartialSearchResults(false); + if (randomBoolean()) { + searchRequest.setBatchedReduceSize(randomIntBetween(3, 20)); + } + + boolean minimizeRoundtrips = false; + searchRequest.setCcsMinimizeRoundtrips(minimizeRoundtrips); + + boolean dfs = randomBoolean(); + if (dfs) { + searchRequest.searchType(SearchType.DFS_QUERY_THEN_FETCH); + } + + if (randomBoolean()) { + searchRequest.setPreFilterShardSize(1); + } + + searchRequest.source(new SearchSourceBuilder().query(new MatchAllQueryBuilder()).size(10)); + assertResponse(client(LOCAL_CLUSTER).search(searchRequest), response -> { + assertNotNull(response); + + Clusters clusters = response.getClusters(); + assertFalse("search cluster results should BE successful", clusters.hasPartialResults()); + assertThat(clusters.getTotal(), equalTo(2)); + assertThat(clusters.getClusterStateCount(Cluster.Status.SUCCESSFUL), equalTo(2)); + assertThat(clusters.getClusterStateCount(Cluster.Status.SKIPPED), equalTo(0)); + assertThat(clusters.getClusterStateCount(Cluster.Status.RUNNING), equalTo(0)); + assertThat(clusters.getClusterStateCount(Cluster.Status.PARTIAL), equalTo(0)); + assertThat(clusters.getClusterStateCount(Cluster.Status.FAILED), equalTo(0)); + + Cluster localClusterSearchInfo = clusters.getCluster(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY); + assertNotNull(localClusterSearchInfo); + assertThat(localClusterSearchInfo.getStatus(), equalTo(Cluster.Status.SUCCESSFUL)); + assertThat(localClusterSearchInfo.getIndexExpression(), equalTo("nomatch*")); + assertThat(localClusterSearchInfo.getTotalShards(), equalTo(0)); + assertThat(localClusterSearchInfo.getSuccessfulShards(), equalTo(0)); + assertThat(localClusterSearchInfo.getSkippedShards(), equalTo(0)); + assertThat(localClusterSearchInfo.getFailedShards(), equalTo(0)); + assertThat(localClusterSearchInfo.getFailures().size(), equalTo(0)); + assertThat(localClusterSearchInfo.getTook().millis(), equalTo(0L)); + + Cluster remoteClusterSearchInfo = clusters.getCluster(REMOTE_CLUSTER); + assertNotNull(remoteClusterSearchInfo); + assertThat(remoteClusterSearchInfo.getStatus(), equalTo(Cluster.Status.SUCCESSFUL)); + assertThat(remoteClusterSearchInfo.getIndexExpression(), equalTo(remoteIndex)); + assertThat(remoteClusterSearchInfo.getTotalShards(), equalTo(remoteNumShards)); + assertThat(remoteClusterSearchInfo.getSuccessfulShards(), equalTo(remoteNumShards)); + assertThat(remoteClusterSearchInfo.getSkippedShards(), equalTo(0)); + assertThat(remoteClusterSearchInfo.getFailedShards(), equalTo(0)); + assertThat(remoteClusterSearchInfo.getFailures().size(), equalTo(0)); + assertThat(remoteClusterSearchInfo.getTook().millis(), greaterThan(0L)); + }); + } + private static void assertOneFailedShard(Cluster cluster, int totalShards) { assertNotNull(cluster); assertThat(cluster.getStatus(), equalTo(Cluster.Status.PARTIAL)); diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java index 1645a378446a4..302c3e243a1f6 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java @@ -1247,6 +1247,29 @@ private void executeSearch( indicesAndAliases, concreteLocalIndices ); + + // localShardIterators is empty since there are no matching indices. In such cases, + // we update the local cluster's status from RUNNING to SUCCESSFUL right away. Before + // we attempt to do that, we must ensure that the local cluster was specified in the user's + // search request. This is done by trying to fetch the local cluster via getCluster() and + // checking for a non-null return value. If the local cluster was never specified, its status + // update can be skipped. + if (localShardIterators.isEmpty() + && clusters != SearchResponse.Clusters.EMPTY + && clusters.getCluster(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY) != null) { + clusters.swapCluster( + RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY, + (alias, v) -> new SearchResponse.Cluster.Builder(v).setStatus(SearchResponse.Cluster.Status.SUCCESSFUL) + .setTotalShards(0) + .setSuccessfulShards(0) + .setSkippedShards(0) + .setFailedShards(0) + .setFailures(Collections.emptyList()) + .setTook(TimeValue.timeValueMillis(0)) + .setTimedOut(false) + .build() + ); + } } final GroupShardsIterator shardIterators = mergeShardsIterators(localShardIterators, remoteShardIterators); From 79cfcec065311165f7d491d164e99fed6c5cbeb9 Mon Sep 17 00:00:00 2001 From: Artem Prigoda Date: Thu, 24 Oct 2024 21:26:02 +0200 Subject: [PATCH 388/449] Clarify the null check for retention leases (#114979) `MetadataStateFormat.FORMAT.loadLatestState` can actually return null when the state directory hasn't been initialized yet, so we have to keep the null check when loading retention leases during the initialization of the engine. See #39359 --- .../java/org/elasticsearch/gateway/MetadataStateFormat.java | 2 ++ .../org/elasticsearch/index/seqno/ReplicationTracker.java | 5 ++--- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/gateway/MetadataStateFormat.java b/server/src/main/java/org/elasticsearch/gateway/MetadataStateFormat.java index 30b8d72b83f4c..3e68ec5243f5f 100644 --- a/server/src/main/java/org/elasticsearch/gateway/MetadataStateFormat.java +++ b/server/src/main/java/org/elasticsearch/gateway/MetadataStateFormat.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.lucene.store.InputStreamIndexInput; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.core.IOUtils; +import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Tuple; import org.elasticsearch.transport.Transports; import org.elasticsearch.xcontent.NamedXContentRegistry; @@ -485,6 +486,7 @@ public Tuple loadLatestStateWithGeneration(Logger logger, NamedXContent * @param dataLocations the data-locations to try. * @return the latest state or null if no state was found. */ + @Nullable public T loadLatestState(Logger logger, NamedXContentRegistry namedXContentRegistry, Path... dataLocations) throws IOException { return loadLatestStateWithGeneration(logger, namedXContentRegistry, dataLocations).v1(); } diff --git a/server/src/main/java/org/elasticsearch/index/seqno/ReplicationTracker.java b/server/src/main/java/org/elasticsearch/index/seqno/ReplicationTracker.java index e67e878fd3827..f1e3ac270d959 100644 --- a/server/src/main/java/org/elasticsearch/index/seqno/ReplicationTracker.java +++ b/server/src/main/java/org/elasticsearch/index/seqno/ReplicationTracker.java @@ -22,7 +22,6 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.util.Maps; import org.elasticsearch.core.SuppressForbidden; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.gateway.WriteStateException; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersions; @@ -471,9 +470,9 @@ public RetentionLeases loadRetentionLeases(final Path path) throws IOException { return emptyIfNull(retentionLeases); } - @UpdateForV9(owner = UpdateForV9.Owner.DISTRIBUTED_INDEXING) private static RetentionLeases emptyIfNull(RetentionLeases retentionLeases) { - // we expect never to see a null in 8.x, so adjust this to throw an exception from v9 onwards. + // `MetadataStateFormat.FORMAT.loadLatestState` can actually return null when the state directory + // on a node hasn't been initialized yet return retentionLeases == null ? RetentionLeases.EMPTY : retentionLeases; } From b4edc3ddab0ea910582c0dd0091ed5b147048280 Mon Sep 17 00:00:00 2001 From: Artem Prigoda Date: Thu, 24 Oct 2024 21:26:23 +0200 Subject: [PATCH 389/449] Remove loading on-disk cluster metadata from the manifest file (#114698) Since metadata storage was moved to Lucene in #50907 (7.16.0), we shouldn't encounter any on-disk global metadata files, so we can remove support for loading them. --- .../gateway/GatewayIndexStateIT.java | 60 -------- .../gateway/GatewayMetaState.java | 13 -- .../gateway/MetaStateService.java | 119 +--------------- .../java/org/elasticsearch/node/Node.java | 1 - .../gateway/MetaStateServiceTests.java | 132 ------------------ .../gateway/MockGatewayMetaState.java | 8 -- 6 files changed, 1 insertion(+), 332 deletions(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/gateway/GatewayIndexStateIT.java b/server/src/internalClusterTest/java/org/elasticsearch/gateway/GatewayIndexStateIT.java index 00bd350fe2b84..cdd5a52e048bd 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/gateway/GatewayIndexStateIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/gateway/GatewayIndexStateIT.java @@ -17,7 +17,6 @@ import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.coordination.CoordinationMetadata; import org.elasticsearch.cluster.metadata.IndexGraveyard; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.MappingMetadata; @@ -27,14 +26,9 @@ import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.UnassignedInfo; -import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Priority; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.CheckedConsumer; -import org.elasticsearch.core.IOUtils; -import org.elasticsearch.env.BuildVersion; import org.elasticsearch.env.NodeEnvironment; -import org.elasticsearch.env.NodeMetadata; import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.indices.IndexClosedException; @@ -46,13 +40,8 @@ import org.elasticsearch.xcontent.XContentFactory; import java.io.IOException; -import java.nio.file.Path; import java.util.List; -import java.util.Map; import java.util.concurrent.TimeUnit; -import java.util.function.Function; -import java.util.stream.Collectors; -import java.util.stream.Stream; import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; @@ -60,7 +49,6 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.notNullValue; @@ -545,52 +533,4 @@ public void testArchiveBrokenClusterSettings() throws Exception { assertHitCount(prepareSearch().setQuery(matchAllQuery()), 1L); } - public void testHalfDeletedIndexImport() throws Exception { - // It's possible for a 6.x node to add a tombstone for an index but not actually delete the index metadata from disk since that - // deletion is slightly deferred and may race against the node being shut down; if you upgrade to 7.x when in this state then the - // node won't start. - - final String nodeName = internalCluster().startNode(); - createIndex("test", 1, 0); - ensureGreen("test"); - - final Metadata metadata = internalCluster().getInstance(ClusterService.class).state().metadata(); - final Path[] paths = internalCluster().getInstance(NodeEnvironment.class).nodeDataPaths(); - final String nodeId = clusterAdmin().prepareNodesInfo(nodeName).clear().get().getNodes().get(0).getNode().getId(); - - writeBrokenMeta(nodeEnvironment -> { - for (final Path path : paths) { - IOUtils.rm(path.resolve(PersistedClusterStateService.METADATA_DIRECTORY_NAME)); - } - MetaStateWriterUtils.writeGlobalState( - nodeEnvironment, - "test", - Metadata.builder(metadata) - // we remove the manifest file, resetting the term and making this look like an upgrade from 6.x, so must also reset the - // term in the coordination metadata - .coordinationMetadata(CoordinationMetadata.builder(metadata.coordinationMetadata()).term(0L).build()) - // add a tombstone but do not delete the index metadata from disk - .putCustom(IndexGraveyard.TYPE, IndexGraveyard.builder().addTombstone(metadata.index("test").getIndex()).build()) - .build() - ); - NodeMetadata.FORMAT.writeAndCleanup(new NodeMetadata(nodeId, BuildVersion.current(), metadata.oldestIndexVersion()), paths); - }); - - ensureGreen(); - - assertBusy(() -> assertThat(internalCluster().getInstance(NodeEnvironment.class).availableIndexFolders(), empty())); - } - - private void writeBrokenMeta(CheckedConsumer writer) throws Exception { - Map nodeEnvironments = Stream.of(internalCluster().getNodeNames()) - .collect(Collectors.toMap(Function.identity(), nodeName -> internalCluster().getInstance(NodeEnvironment.class, nodeName))); - internalCluster().fullRestart(new RestartCallback() { - @Override - public Settings onNodeStopped(String nodeName) throws Exception { - final NodeEnvironment nodeEnvironment = nodeEnvironments.get(nodeName); - writer.accept(nodeEnvironment); - return super.onNodeStopped(nodeName); - } - }); - } } diff --git a/server/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java b/server/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java index c863a5bac973a..a7baca59e1857 100644 --- a/server/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java +++ b/server/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java @@ -23,7 +23,6 @@ import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexMetadataVerifier; import org.elasticsearch.cluster.metadata.IndexTemplateMetadata; -import org.elasticsearch.cluster.metadata.Manifest; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; @@ -33,8 +32,6 @@ import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.EsThreadPoolExecutor; import org.elasticsearch.core.IOUtils; -import org.elasticsearch.core.Tuple; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.env.BuildVersion; import org.elasticsearch.env.NodeMetadata; import org.elasticsearch.index.IndexVersions; @@ -185,16 +182,6 @@ private PersistedState createOnDiskPersistedState( long lastAcceptedVersion = onDiskState.lastAcceptedVersion; long currentTerm = onDiskState.currentTerm; - if (onDiskState.empty()) { - @UpdateForV9(owner = UpdateForV9.Owner.DISTRIBUTED_COORDINATION) // legacy metadata loader is not needed anymore from v9 onwards - final Tuple legacyState = metaStateService.loadFullState(); - if (legacyState.v1().isEmpty() == false) { - metadata = legacyState.v2(); - lastAcceptedVersion = legacyState.v1().clusterStateVersion(); - currentTerm = legacyState.v1().currentTerm(); - } - } - PersistedState persistedState = null; boolean success = false; try { diff --git a/server/src/main/java/org/elasticsearch/gateway/MetaStateService.java b/server/src/main/java/org/elasticsearch/gateway/MetaStateService.java index 4260ef51a3976..5f07deff31eea 100644 --- a/server/src/main/java/org/elasticsearch/gateway/MetaStateService.java +++ b/server/src/main/java/org/elasticsearch/gateway/MetaStateService.java @@ -12,22 +12,17 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexGraveyard; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.Manifest; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.core.Nullable; -import org.elasticsearch.core.Tuple; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.Index; import org.elasticsearch.xcontent.NamedXContentRegistry; import java.io.IOException; import java.util.ArrayList; -import java.util.HashMap; import java.util.List; -import java.util.Map; import java.util.function.Predicate; /** @@ -45,118 +40,6 @@ public MetaStateService(NodeEnvironment nodeEnv, NamedXContentRegistry namedXCon this.namedXContentRegistry = namedXContentRegistry; } - /** - * Loads the full state, which includes both the global state and all the indices meta data.
      - * When loading, manifest file is consulted (represented by {@link Manifest} class), to load proper generations.
      - * If there is no manifest file on disk, this method fallbacks to BWC mode, where latest generation of global and indices - * metadata is loaded. Please note that currently there is no way to distinguish between manifest file being removed and manifest - * file was not yet created. It means that this method always fallbacks to BWC mode, if there is no manifest file. - * - * @return tuple of {@link Manifest} and {@link Metadata} with global metadata and indices metadata. If there is no state on disk, - * meta state with globalGeneration -1 and empty meta data is returned. - * @throws IOException if some IOException when loading files occurs or there is no metadata referenced by manifest file. - */ - @UpdateForV9(owner = UpdateForV9.Owner.DISTRIBUTED_COORDINATION) - public Tuple loadFullState() throws IOException { - final Manifest manifest = Manifest.FORMAT.loadLatestState(logger, namedXContentRegistry, nodeEnv.nodeDataPaths()); - if (manifest == null) { - return loadFullStateBWC(); - } - - final Metadata.Builder metadataBuilder; - if (manifest.isGlobalGenerationMissing()) { - metadataBuilder = Metadata.builder(); - } else { - final Metadata globalMetadata = Metadata.FORMAT.loadGeneration( - logger, - namedXContentRegistry, - manifest.globalGeneration(), - nodeEnv.nodeDataPaths() - ); - if (globalMetadata != null) { - metadataBuilder = Metadata.builder(globalMetadata); - } else { - throw new IOException("failed to find global metadata [generation: " + manifest.globalGeneration() + "]"); - } - } - - for (Map.Entry entry : manifest.indexGenerations().entrySet()) { - final Index index = entry.getKey(); - final long generation = entry.getValue(); - final String indexFolderName = index.getUUID(); - final IndexMetadata indexMetadata = IndexMetadata.FORMAT.loadGeneration( - logger, - namedXContentRegistry, - generation, - nodeEnv.resolveIndexFolder(indexFolderName) - ); - if (indexMetadata != null) { - metadataBuilder.put(indexMetadata, false); - } else { - throw new IOException( - "failed to find metadata for existing index " - + index.getName() - + " [location: " - + indexFolderName - + ", generation: " - + generation - + "]" - ); - } - } - - return new Tuple<>(manifest, metadataBuilder.build()); - } - - /** - * "Manifest-less" BWC version of loading metadata from disk. See also {@link #loadFullState()} - */ - private Tuple loadFullStateBWC() throws IOException { - Map indices = new HashMap<>(); - Metadata.Builder metadataBuilder; - - Tuple metadataAndGeneration = Metadata.FORMAT.loadLatestStateWithGeneration( - logger, - namedXContentRegistry, - nodeEnv.nodeDataPaths() - ); - Metadata globalMetadata = metadataAndGeneration.v1(); - long globalStateGeneration = metadataAndGeneration.v2(); - - final IndexGraveyard indexGraveyard; - if (globalMetadata != null) { - metadataBuilder = Metadata.builder(globalMetadata); - indexGraveyard = globalMetadata.custom(IndexGraveyard.TYPE); - } else { - metadataBuilder = Metadata.builder(); - indexGraveyard = IndexGraveyard.builder().build(); - } - - for (String indexFolderName : nodeEnv.availableIndexFolders()) { - Tuple indexMetadataAndGeneration = IndexMetadata.FORMAT.loadLatestStateWithGeneration( - logger, - namedXContentRegistry, - nodeEnv.resolveIndexFolder(indexFolderName) - ); - IndexMetadata indexMetadata = indexMetadataAndGeneration.v1(); - long generation = indexMetadataAndGeneration.v2(); - if (indexMetadata != null) { - if (indexGraveyard.containsIndex(indexMetadata.getIndex())) { - logger.debug("[{}] found metadata for deleted index [{}]", indexFolderName, indexMetadata.getIndex()); - // this index folder is cleared up when state is recovered - } else { - indices.put(indexMetadata.getIndex(), generation); - metadataBuilder.put(indexMetadata, false); - } - } else { - logger.debug("[{}] failed to find metadata for existing index location", indexFolderName); - } - } - - Manifest manifest = Manifest.unknownCurrentTermAndVersion(globalStateGeneration, indices); - return new Tuple<>(manifest, metadataBuilder.build()); - } - /** * Loads the index state for the provided index name, returning null if doesn't exists. */ @@ -193,7 +76,7 @@ List loadIndicesStates(Predicate excludeIndexPathIdsPredi } /** - * Loads the global state, *without* index state, see {@link #loadFullState()} for that. + * Loads the global state, *without* index state */ Metadata loadGlobalState() throws IOException { return Metadata.FORMAT.loadLatestState(logger, namedXContentRegistry, nodeEnv.nodeDataPaths()); diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index e30f76fdd9414..ec4a534fc883b 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -325,7 +325,6 @@ public Node start() throws NodeValidationException { // TODO: Do not expect that the legacy metadata file is always present https://github.com/elastic/elasticsearch/issues/95211 if (Assertions.ENABLED && DiscoveryNode.isStateless(settings()) == false) { try { - assert injector.getInstance(MetaStateService.class).loadFullState().v1().isEmpty(); final NodeMetadata nodeMetadata = NodeMetadata.FORMAT.loadLatestState( logger, NamedXContentRegistry.EMPTY, diff --git a/server/src/test/java/org/elasticsearch/gateway/MetaStateServiceTests.java b/server/src/test/java/org/elasticsearch/gateway/MetaStateServiceTests.java index 40c4e064216f1..1bbab8bf782bd 100644 --- a/server/src/test/java/org/elasticsearch/gateway/MetaStateServiceTests.java +++ b/server/src/test/java/org/elasticsearch/gateway/MetaStateServiceTests.java @@ -9,21 +9,15 @@ package org.elasticsearch.gateway; import org.elasticsearch.cluster.metadata.IndexMetadata; -import org.elasticsearch.cluster.metadata.Manifest; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.Tuple; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.test.ESTestCase; -import java.io.IOException; -import java.util.HashMap; - import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.nullValue; public class MetaStateServiceTests extends ESTestCase { @@ -75,130 +69,4 @@ public void testWriteGlobalStateWithIndexAndNoIndexIsLoaded() throws Exception { assertThat(metaStateService.loadGlobalState().persistentSettings(), equalTo(metadata.persistentSettings())); assertThat(metaStateService.loadGlobalState().hasIndex("test1"), equalTo(false)); } - - public void testLoadFullStateBWC() throws Exception { - IndexMetadata indexMetadata = indexMetadata("test1"); - Metadata metadata = Metadata.builder() - .persistentSettings(Settings.builder().put("test1", "value1").build()) - .put(indexMetadata, true) - .build(); - - long globalGeneration = MetaStateWriterUtils.writeGlobalState(env, "test_write", metadata); - long indexGeneration = MetaStateWriterUtils.writeIndex(env, "test_write", indexMetadata); - - Tuple manifestAndMetadata = metaStateService.loadFullState(); - Manifest manifest = manifestAndMetadata.v1(); - assertThat(manifest.globalGeneration(), equalTo(globalGeneration)); - assertThat(manifest.indexGenerations(), hasKey(indexMetadata.getIndex())); - assertThat(manifest.indexGenerations().get(indexMetadata.getIndex()), equalTo(indexGeneration)); - - Metadata loadedMetadata = manifestAndMetadata.v2(); - assertThat(loadedMetadata.persistentSettings(), equalTo(metadata.persistentSettings())); - assertThat(loadedMetadata.hasIndex("test1"), equalTo(true)); - assertThat(loadedMetadata.index("test1"), equalTo(indexMetadata)); - } - - public void testLoadEmptyStateNoManifest() throws IOException { - Tuple manifestAndMetadata = metaStateService.loadFullState(); - - Manifest manifest = manifestAndMetadata.v1(); - assertTrue(manifest.isEmpty()); - - Metadata metadata = manifestAndMetadata.v2(); - assertTrue(Metadata.isGlobalStateEquals(metadata, Metadata.EMPTY_METADATA)); - } - - public void testLoadEmptyStateWithManifest() throws IOException { - Manifest manifest = Manifest.empty(); - MetaStateWriterUtils.writeManifestAndCleanup(env, "test", manifest); - - Tuple manifestAndMetadata = metaStateService.loadFullState(); - assertTrue(manifestAndMetadata.v1().isEmpty()); - Metadata metadata = manifestAndMetadata.v2(); - assertTrue(Metadata.isGlobalStateEquals(metadata, Metadata.EMPTY_METADATA)); - } - - public void testLoadFullStateMissingGlobalMetadata() throws IOException { - IndexMetadata index = indexMetadata("test1"); - long indexGeneration = MetaStateWriterUtils.writeIndex(env, "test", index); - Manifest manifest = new Manifest( - randomNonNegativeLong(), - randomNonNegativeLong(), - Manifest.empty().globalGeneration(), - new HashMap() { - { - put(index.getIndex(), indexGeneration); - } - } - ); - assertTrue(manifest.isGlobalGenerationMissing()); - MetaStateWriterUtils.writeManifestAndCleanup(env, "test", manifest); - - Tuple manifestAndMetadata = metaStateService.loadFullState(); - assertThat(manifestAndMetadata.v1(), equalTo(manifest)); - Metadata loadedMetadata = manifestAndMetadata.v2(); - assertTrue(Metadata.isGlobalStateEquals(loadedMetadata, Metadata.EMPTY_METADATA)); - assertThat(loadedMetadata.hasIndex("test1"), equalTo(true)); - assertThat(loadedMetadata.index("test1"), equalTo(index)); - } - - public void testLoadFullStateAndUpdateAndClean() throws IOException { - IndexMetadata index = indexMetadata("test1"); - Metadata metadata = Metadata.builder() - .persistentSettings(Settings.builder().put("test1", "value1").build()) - .put(index, true) - .build(); - - long globalGeneration = MetaStateWriterUtils.writeGlobalState(env, "first global state write", metadata); - long indexGeneration = MetaStateWriterUtils.writeIndex(env, "first index state write", index); - - Manifest manifest = new Manifest(randomNonNegativeLong(), randomNonNegativeLong(), globalGeneration, new HashMap() { - { - put(index.getIndex(), indexGeneration); - } - }); - MetaStateWriterUtils.writeManifestAndCleanup(env, "first manifest write", manifest); - - Metadata newMetadata = Metadata.builder() - .persistentSettings(Settings.builder().put("test1", "value2").build()) - .put(index, true) - .build(); - globalGeneration = MetaStateWriterUtils.writeGlobalState(env, "second global state write", newMetadata); - - Tuple manifestAndMetadata = metaStateService.loadFullState(); - assertThat(manifestAndMetadata.v1(), equalTo(manifest)); - - Metadata loadedMetadata = manifestAndMetadata.v2(); - assertThat(loadedMetadata.persistentSettings(), equalTo(metadata.persistentSettings())); - assertThat(loadedMetadata.hasIndex("test1"), equalTo(true)); - assertThat(loadedMetadata.index("test1"), equalTo(index)); - - manifest = new Manifest(randomNonNegativeLong(), randomNonNegativeLong(), globalGeneration, new HashMap() { - { - put(index.getIndex(), indexGeneration); - } - }); - - MetaStateWriterUtils.writeManifestAndCleanup(env, "second manifest write", manifest); - Metadata.FORMAT.cleanupOldFiles(globalGeneration, env.nodeDataPaths()); - IndexMetadata.FORMAT.cleanupOldFiles(indexGeneration, env.indexPaths(index.getIndex())); - - manifestAndMetadata = metaStateService.loadFullState(); - assertThat(manifestAndMetadata.v1(), equalTo(manifest)); - - loadedMetadata = manifestAndMetadata.v2(); - assertThat(loadedMetadata.persistentSettings(), equalTo(newMetadata.persistentSettings())); - assertThat(loadedMetadata.hasIndex("test1"), equalTo(true)); - assertThat(loadedMetadata.index("test1"), equalTo(index)); - - if (randomBoolean()) { - metaStateService.unreferenceAll(); - } else { - metaStateService.deleteAll(); - } - manifestAndMetadata = metaStateService.loadFullState(); - assertTrue(manifestAndMetadata.v1().isEmpty()); - metadata = manifestAndMetadata.v2(); - assertTrue(Metadata.isGlobalStateEquals(metadata, Metadata.EMPTY_METADATA)); - } } diff --git a/test/framework/src/main/java/org/elasticsearch/gateway/MockGatewayMetaState.java b/test/framework/src/main/java/org/elasticsearch/gateway/MockGatewayMetaState.java index d03396f9b53b3..64b468226e509 100644 --- a/test/framework/src/main/java/org/elasticsearch/gateway/MockGatewayMetaState.java +++ b/test/framework/src/main/java/org/elasticsearch/gateway/MockGatewayMetaState.java @@ -11,7 +11,6 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetadataVerifier; -import org.elasticsearch.cluster.metadata.Manifest; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; @@ -19,14 +18,12 @@ import org.elasticsearch.cluster.version.CompatibilityVersionsUtils; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.Tuple; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.plugins.MetadataUpgrader; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xcontent.NamedXContentRegistry; -import java.io.IOException; import java.util.List; import static org.mockito.Mockito.mock; @@ -70,11 +67,6 @@ public void start(Settings settings, NodeEnvironment nodeEnvironment, NamedXCont new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS) ); final MetaStateService metaStateService = mock(MetaStateService.class); - try { - when(metaStateService.loadFullState()).thenReturn(new Tuple<>(Manifest.empty(), Metadata.builder().build())); - } catch (IOException e) { - throw new AssertionError(e); - } start( settings, transportService, From e789039dfa8fee60dc2615c3876295ff7c6f3b01 Mon Sep 17 00:00:00 2001 From: Stanislav Malyshev Date: Thu, 24 Oct 2024 13:58:49 -0600 Subject: [PATCH 390/449] Fixing remote ENRICH by pushing the Enrich inside FragmentExec (#114665) * Fixing remote ENRICH by pushing the Enrich inside FragmentExec * Improve handling of more complex cases such as several enriches --- docs/changelog/114665.yaml | 6 ++ .../esql/action/CrossClustersEnrichIT.java | 102 ++++++++++++++++-- .../xpack/esql/analysis/Verifier.java | 7 -- .../xpack/esql/planner/Mapper.java | 42 ++++++++ .../optimizer/PhysicalPlanOptimizerTests.java | 63 +++++++++-- 5 files changed, 195 insertions(+), 25 deletions(-) create mode 100644 docs/changelog/114665.yaml diff --git a/docs/changelog/114665.yaml b/docs/changelog/114665.yaml new file mode 100644 index 0000000000000..b90bb799bd896 --- /dev/null +++ b/docs/changelog/114665.yaml @@ -0,0 +1,6 @@ +pr: 114665 +summary: Fixing remote ENRICH by pushing the Enrich inside `FragmentExec` +area: ES|QL +type: bug +issues: + - 105095 diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersEnrichIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersEnrichIT.java index 7d8bb738098d3..e8e9f45694e9c 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersEnrichIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersEnrichIT.java @@ -47,6 +47,7 @@ import java.util.Arrays; import java.util.Collection; import java.util.Collections; +import java.util.Comparator; import java.util.List; import java.util.Locale; import java.util.Map; @@ -469,27 +470,112 @@ public void testEnrichRemoteWithVendor() { } } + public void testEnrichRemoteWithVendorNoSort() { + Tuple includeCCSMetadata = randomIncludeCCSMetadata(); + Boolean requestIncludeMeta = includeCCSMetadata.v1(); + boolean responseExpectMeta = includeCCSMetadata.v2(); + + for (Enrich.Mode hostMode : List.of(Enrich.Mode.ANY, Enrich.Mode.REMOTE)) { + var query = String.format(Locale.ROOT, """ + FROM *:events,events + | LIMIT 100 + | eval ip= TO_STR(host) + | %s + | %s + | stats c = COUNT(*) by vendor + """, enrichHosts(hostMode), enrichVendors(Enrich.Mode.REMOTE)); + try (EsqlQueryResponse resp = runQuery(query, requestIncludeMeta)) { + var values = getValuesList(resp); + values.sort(Comparator.comparing(o -> (String) o.get(1), Comparator.nullsLast(Comparator.naturalOrder()))); + assertThat( + values, + equalTo( + List.of( + List.of(6L, "Apple"), + List.of(7L, "Microsoft"), + List.of(1L, "Redhat"), + List.of(2L, "Samsung"), + List.of(1L, "Sony"), + List.of(2L, "Suse"), + Arrays.asList(3L, (String) null) + ) + ) + ); + EsqlExecutionInfo executionInfo = resp.getExecutionInfo(); + assertThat(executionInfo.includeCCSMetadata(), equalTo(responseExpectMeta)); + assertThat(executionInfo.clusterAliases(), equalTo(Set.of("", "c1", "c2"))); + assertCCSExecutionInfoDetails(executionInfo); + } + } + } + public void testTopNThenEnrichRemote() { + Tuple includeCCSMetadata = randomIncludeCCSMetadata(); + Boolean requestIncludeMeta = includeCCSMetadata.v1(); + boolean responseExpectMeta = includeCCSMetadata.v2(); + String query = String.format(Locale.ROOT, """ FROM *:events,events | eval ip= TO_STR(host) - | SORT ip + | SORT timestamp, user, ip | LIMIT 5 - | %s + | %s | KEEP host, timestamp, user, os """, enrichHosts(Enrich.Mode.REMOTE)); - var error = expectThrows(VerificationException.class, () -> runQuery(query, randomBoolean()).close()); - assertThat(error.getMessage(), containsString("ENRICH with remote policy can't be executed after LIMIT")); + try (EsqlQueryResponse resp = runQuery(query, requestIncludeMeta)) { + assertThat( + getValuesList(resp), + equalTo( + List.of( + List.of("192.168.1.2", 1L, "andres", "Windows"), + List.of("192.168.1.3", 1L, "matthew", "MacOS"), + Arrays.asList("192.168.1.25", 1L, "park", (String) null), + List.of("192.168.1.5", 2L, "akio", "Android"), + List.of("192.168.1.6", 2L, "sergio", "iOS") + ) + ) + ); + EsqlExecutionInfo executionInfo = resp.getExecutionInfo(); + assertThat(executionInfo.includeCCSMetadata(), equalTo(responseExpectMeta)); + assertThat(executionInfo.clusterAliases(), equalTo(Set.of("", "c1", "c2"))); + assertCCSExecutionInfoDetails(executionInfo); + } } public void testLimitThenEnrichRemote() { + Tuple includeCCSMetadata = randomIncludeCCSMetadata(); + Boolean requestIncludeMeta = includeCCSMetadata.v1(); + boolean responseExpectMeta = includeCCSMetadata.v2(); + String query = String.format(Locale.ROOT, """ FROM *:events,events - | LIMIT 10 + | LIMIT 25 | eval ip= TO_STR(host) - | %s + | %s | KEEP host, timestamp, user, os """, enrichHosts(Enrich.Mode.REMOTE)); - var error = expectThrows(VerificationException.class, () -> runQuery(query, randomBoolean()).close()); - assertThat(error.getMessage(), containsString("ENRICH with remote policy can't be executed after LIMIT")); + try (EsqlQueryResponse resp = runQuery(query, requestIncludeMeta)) { + var values = getValuesList(resp); + values.sort( + Comparator.comparingLong((List o) -> (Long) o.get(1)) + .thenComparing(o -> (String) o.get(0)) + .thenComparing(o -> (String) o.get(2)) + ); + assertThat( + values.subList(0, 5), + equalTo( + List.of( + List.of("192.168.1.2", 1L, "andres", "Windows"), + Arrays.asList("192.168.1.25", 1L, "park", (String) null), + List.of("192.168.1.3", 1L, "matthew", "MacOS"), + List.of("192.168.1.5", 2L, "akio", "Android"), + List.of("192.168.1.5", 2L, "simon", "Android") + ) + ) + ); + EsqlExecutionInfo executionInfo = resp.getExecutionInfo(); + assertThat(executionInfo.includeCCSMetadata(), equalTo(responseExpectMeta)); + assertThat(executionInfo.clusterAliases(), equalTo(Set.of("", "c1", "c2"))); + assertCCSExecutionInfoDetails(executionInfo); + } } public void testAggThenEnrichRemote() { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java index e2717cd9af0d1..fbaf43467a2e7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java @@ -609,22 +609,15 @@ private static void checkForSortableDataTypes(LogicalPlan p, Set localF */ private static void checkRemoteEnrich(LogicalPlan plan, Set failures) { boolean[] agg = { false }; - boolean[] limit = { false }; boolean[] enrichCoord = { false }; plan.forEachUp(UnaryPlan.class, u -> { - if (u instanceof Limit) { - limit[0] = true; // TODO: Make Limit then enrich_remote work - } if (u instanceof Aggregate) { agg[0] = true; } else if (u instanceof Enrich enrich && enrich.mode() == Enrich.Mode.COORDINATOR) { enrichCoord[0] = true; } if (u instanceof Enrich enrich && enrich.mode() == Enrich.Mode.REMOTE) { - if (limit[0]) { - failures.add(fail(enrich, "ENRICH with remote policy can't be executed after LIMIT")); - } if (agg[0]) { failures.add(fail(enrich, "ENRICH with remote policy can't be executed after STATS")); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java index e571be54692c4..152c492a34433 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java @@ -52,8 +52,10 @@ import org.elasticsearch.xpack.esql.plan.physical.RowExec; import org.elasticsearch.xpack.esql.plan.physical.ShowExec; import org.elasticsearch.xpack.esql.plan.physical.TopNExec; +import org.elasticsearch.xpack.esql.plan.physical.UnaryExec; import java.util.List; +import java.util.concurrent.atomic.AtomicBoolean; /** *

      This class is part of the planner

      @@ -104,6 +106,46 @@ public PhysicalPlan map(LogicalPlan p) { // // Unary Plan // + if (localMode == false && p instanceof Enrich enrich && enrich.mode() == Enrich.Mode.REMOTE) { + // When we have remote enrich, we want to put it under FragmentExec, so it would be executed remotely. + // We're only going to do it on the coordinator node. + // The way we're going to do it is as follows: + // 1. Locate FragmentExec in the tree. If we have no FragmentExec, we won't do anything. + // 2. Put this Enrich under it, removing everything that was below it previously. + // 3. Above FragmentExec, we should deal with pipeline breakers, since pipeline ops already are supposed to go under + // FragmentExec. + // 4. Aggregates can't appear here since the plan should have errored out if we have aggregate inside remote Enrich. + // 5. So we should be keeping: LimitExec, ExchangeExec, OrderExec, TopNExec (actually OrderExec probably can't happen anyway). + + var child = map(enrich.child()); + AtomicBoolean hasFragment = new AtomicBoolean(false); + + var childTransformed = child.transformUp((f) -> { + // Once we reached FragmentExec, we stuff our Enrich under it + if (f instanceof FragmentExec) { + hasFragment.set(true); + return new FragmentExec(p); + } + if (f instanceof EnrichExec enrichExec) { + // It can only be ANY because COORDINATOR would have errored out earlier, and REMOTE should be under FragmentExec + assert enrichExec.mode() == Enrich.Mode.ANY : "enrich must be in ANY mode here"; + return enrichExec.child(); + } + if (f instanceof UnaryExec unaryExec) { + if (f instanceof LimitExec || f instanceof ExchangeExec || f instanceof OrderExec || f instanceof TopNExec) { + return f; + } else { + return unaryExec.child(); + } + } + // Currently, it's either UnaryExec or LeafExec. Leaf will either resolve to FragmentExec or we'll ignore it. + return f; + }); + + if (hasFragment.get()) { + return childTransformed; + } + } if (p instanceof UnaryPlan ua) { var child = map(ua.child()); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java index 964039268e30d..961c70acada7b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java @@ -172,7 +172,7 @@ import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.startsWith; -// @TestLogging(value = "org.elasticsearch.xpack.esql:TRACE", reason = "debug") +// @TestLogging(value = "org.elasticsearch.xpack.esql:DEBUG", reason = "debug") public class PhysicalPlanOptimizerTests extends ESTestCase { private static final String PARAM_FORMATTING = "%1$s"; @@ -5851,14 +5851,14 @@ public void testEnrichBeforeLimit() { | EVAL employee_id = to_str(emp_no) | ENRICH _remote:departments | LIMIT 10"""); - var enrich = as(plan, EnrichExec.class); - assertThat(enrich.mode(), equalTo(Enrich.Mode.REMOTE)); - assertThat(enrich.concreteIndices(), equalTo(Map.of("cluster_1", ".enrich-departments-2"))); - var eval = as(enrich.child(), EvalExec.class); - var finalLimit = as(eval.child(), LimitExec.class); + var finalLimit = as(plan, LimitExec.class); var exchange = as(finalLimit.child(), ExchangeExec.class); var fragment = as(exchange.child(), FragmentExec.class); - var partialLimit = as(fragment.fragment(), Limit.class); + var enrich = as(fragment.fragment(), Enrich.class); + assertThat(enrich.mode(), equalTo(Enrich.Mode.REMOTE)); + assertThat(enrich.concreteIndices(), equalTo(Map.of("cluster_1", ".enrich-departments-2"))); + var evalFragment = as(enrich.child(), Eval.class); + var partialLimit = as(evalFragment.child(), Limit.class); as(partialLimit.child(), EsRelation.class); } } @@ -5901,13 +5901,21 @@ public void testLimitThenEnrich() { } public void testLimitThenEnrichRemote() { - var error = expectThrows(VerificationException.class, () -> physicalPlan(""" + var plan = physicalPlan(""" FROM test | LIMIT 10 | EVAL employee_id = to_str(emp_no) | ENRICH _remote:departments - """)); - assertThat(error.getMessage(), containsString("line 4:3: ENRICH with remote policy can't be executed after LIMIT")); + """); + var finalLimit = as(plan, LimitExec.class); + var exchange = as(finalLimit.child(), ExchangeExec.class); + var fragment = as(exchange.child(), FragmentExec.class); + var enrich = as(fragment.fragment(), Enrich.class); + assertThat(enrich.mode(), equalTo(Enrich.Mode.REMOTE)); + assertThat(enrich.concreteIndices(), equalTo(Map.of("cluster_1", ".enrich-departments-2"))); + var evalFragment = as(enrich.child(), Eval.class); + var partialLimit = as(evalFragment.child(), Limit.class); + as(partialLimit.child(), EsRelation.class); } public void testEnrichBeforeTopN() { @@ -5961,6 +5969,23 @@ public void testEnrichBeforeTopN() { var eval = as(enrich.child(), Eval.class); as(eval.child(), EsRelation.class); } + { + var plan = physicalPlan(""" + FROM test + | EVAL employee_id = to_str(emp_no) + | ENRICH _remote:departments + | SORT department + | LIMIT 10"""); + var topN = as(plan, TopNExec.class); + var exchange = as(topN.child(), ExchangeExec.class); + var fragment = as(exchange.child(), FragmentExec.class); + var partialTopN = as(fragment.fragment(), TopN.class); + var enrich = as(partialTopN.child(), Enrich.class); + assertThat(enrich.mode(), equalTo(Enrich.Mode.REMOTE)); + assertThat(enrich.concreteIndices(), equalTo(Map.of("cluster_1", ".enrich-departments-2"))); + var eval = as(enrich.child(), Eval.class); + as(eval.child(), EsRelation.class); + } } public void testEnrichAfterTopN() { @@ -6000,6 +6025,24 @@ public void testEnrichAfterTopN() { var partialTopN = as(fragment.fragment(), TopN.class); as(partialTopN.child(), EsRelation.class); } + { + var plan = physicalPlan(""" + FROM test + | SORT emp_no + | LIMIT 10 + | EVAL employee_id = to_str(emp_no) + | ENRICH _remote:departments + """); + var topN = as(plan, TopNExec.class); + var exchange = as(topN.child(), ExchangeExec.class); + var fragment = as(exchange.child(), FragmentExec.class); + var enrich = as(fragment.fragment(), Enrich.class); + assertThat(enrich.mode(), equalTo(Enrich.Mode.REMOTE)); + assertThat(enrich.concreteIndices(), equalTo(Map.of("cluster_1", ".enrich-departments-2"))); + var evalFragment = as(enrich.child(), Eval.class); + var partialTopN = as(evalFragment.child(), TopN.class); + as(partialTopN.child(), EsRelation.class); + } } public void testManyEnrich() { From cade0021736d69f66db4bc73c022258833c3ff38 Mon Sep 17 00:00:00 2001 From: Keith Massey Date: Thu, 24 Oct 2024 15:34:27 -0500 Subject: [PATCH 391/449] Fixing ingest simulate yaml rest test when global legacy template is present (#115586) Sometimes the test framework adds a global legacy template. When this happens, a test that is using another legacy template to create an index emits a warning since the index matches two legacy templates. This PR allows that warning. --- .../resources/rest-api-spec/test/ingest/80_ingest_simulate.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_ingest_simulate.yml b/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_ingest_simulate.yml index 7ed5ad3154151..2d3fa6b568381 100644 --- a/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_ingest_simulate.yml +++ b/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_ingest_simulate.yml @@ -1537,6 +1537,8 @@ setup: - not_exists: docs.0.doc.error - do: + allowed_warnings: + - "index [foo-1] matches multiple legacy templates [global, my-legacy-template], composable templates will only match a single template" indices.create: index: foo-1 - match: { acknowledged: true } From d1c7e9886f483f2865b7780ce0ba44689fae622e Mon Sep 17 00:00:00 2001 From: Chris Hegarty <62058229+ChrisHegarty@users.noreply.github.com> Date: Thu, 24 Oct 2024 21:43:22 +0100 Subject: [PATCH 392/449] Update BlobCacheBufferedIndexInput::readVLong to correctly handle negative long values (#115594) --- docs/changelog/115594.yaml | 6 ++++++ .../blobcache/common/BlobCacheBufferedIndexInput.java | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) create mode 100644 docs/changelog/115594.yaml diff --git a/docs/changelog/115594.yaml b/docs/changelog/115594.yaml new file mode 100644 index 0000000000000..91a6089dfb3ce --- /dev/null +++ b/docs/changelog/115594.yaml @@ -0,0 +1,6 @@ +pr: 115594 +summary: Update `BlobCacheBufferedIndexInput::readVLong` to correctly handle negative + long values +area: Search +type: bug +issues: [] diff --git a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/common/BlobCacheBufferedIndexInput.java b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/common/BlobCacheBufferedIndexInput.java index 16645e7523c36..7e7e954d1fa72 100644 --- a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/common/BlobCacheBufferedIndexInput.java +++ b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/common/BlobCacheBufferedIndexInput.java @@ -175,7 +175,7 @@ public final int readVInt() throws IOException { @Override public final long readVLong() throws IOException { - if (9 <= buffer.remaining()) { + if (10 <= buffer.remaining()) { return ByteBufferStreamInput.readVLong(buffer); } else { return super.readVLong(); From f444c86f857db0f82f528d217bf0da6f5b9308c5 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Thu, 24 Oct 2024 13:47:20 -0700 Subject: [PATCH 393/449] Add lookup index mode (#115143) This change introduces a new index mode, lookup, for indices intended for lookup operations in ES|QL. Lookup indices must have a single shard and be replicated to all data nodes by default. Aside from these requirements, they function as standard indices. Documentation will be added later when the lookup operator in ES|QL is implemented. --- .../test/indices.create/10_basic.yml | 67 ++++++ .../index/LookupIndexModeIT.java | 219 ++++++++++++++++++ .../org/elasticsearch/TransportVersions.java | 1 + .../metadata/MetadataCreateIndexService.java | 16 +- .../org/elasticsearch/index/IndexMode.java | 115 ++++++++- .../monitor/metrics/IndicesMetrics.java | 2 +- .../elasticsearch/node/NodeConstruction.java | 10 +- .../indices/CreateIndexCapabilities.java | 7 +- .../index/mapper/MapperServiceTestCase.java | 2 +- .../index/engine/FollowingEngineTests.java | 3 + 10 files changed, 436 insertions(+), 6 deletions(-) create mode 100644 server/src/internalClusterTest/java/org/elasticsearch/index/LookupIndexModeIT.java diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/10_basic.yml index 8242b7cdd29e7..31d127b80c844 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/10_basic.yml @@ -149,3 +149,70 @@ indices.exists_alias: name: logs_2022-12-31 - is_true: '' + +--- +"Create lookup index": + - requires: + test_runner_features: [ capabilities ] + capabilities: + - method: PUT + path: /{index} + capabilities: [ lookup_index_mode ] + reason: "Support for 'lookup' index mode capability required" + - do: + indices.create: + index: "test_lookup" + body: + settings: + index.mode: lookup + + - do: + indices.get_settings: + index: test_lookup + + - match: { test_lookup.settings.index.number_of_shards: "1"} + - match: { test_lookup.settings.index.auto_expand_replicas: "0-all"} + +--- +"Create lookup index with one shard": + - requires: + test_runner_features: [ capabilities ] + capabilities: + - method: PUT + path: /{index} + capabilities: [ lookup_index_mode ] + reason: "Support for 'lookup' index mode capability required" + - do: + indices.create: + index: "test_lookup" + body: + settings: + index: + mode: lookup + number_of_shards: 1 + + - do: + indices.get_settings: + index: test_lookup + + - match: { test_lookup.settings.index.number_of_shards: "1"} + - match: { test_lookup.settings.index.auto_expand_replicas: "0-all"} + +--- +"Create lookup index with two shards": + - requires: + test_runner_features: [ capabilities ] + capabilities: + - method: PUT + path: /{index} + capabilities: [ lookup_index_mode ] + reason: "Support for 'lookup' index mode capability required" + - do: + catch: /illegal_argument_exception/ + indices.create: + index: test_lookup + body: + settings: + index.mode: lookup + index.number_of_shards: 2 + diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/LookupIndexModeIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/LookupIndexModeIT.java new file mode 100644 index 0000000000000..f294d4a2e7943 --- /dev/null +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/LookupIndexModeIT.java @@ -0,0 +1,219 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.index; + +import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; +import org.elasticsearch.action.admin.indices.shrink.ResizeAction; +import org.elasticsearch.action.admin.indices.shrink.ResizeRequest; +import org.elasticsearch.action.admin.indices.shrink.ResizeType; +import org.elasticsearch.action.fieldcaps.FieldCapabilitiesIndexResponse; +import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.query.MatchQueryBuilder; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.test.ESIntegTestCase; + +import java.util.Map; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; + +public class LookupIndexModeIT extends ESIntegTestCase { + + @Override + protected int numberOfShards() { + return 1; + } + + public void testBasic() { + internalCluster().ensureAtLeastNumDataNodes(1); + Settings.Builder lookupSettings = Settings.builder().put("index.mode", "lookup"); + if (randomBoolean()) { + lookupSettings.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1); + } + CreateIndexRequest createRequest = new CreateIndexRequest("hosts"); + createRequest.settings(lookupSettings); + createRequest.simpleMapping("ip", "type=ip", "os", "type=keyword"); + assertAcked(client().admin().indices().execute(TransportCreateIndexAction.TYPE, createRequest)); + Settings settings = client().admin().indices().prepareGetSettings("hosts").get().getIndexToSettings().get("hosts"); + assertThat(settings.get("index.mode"), equalTo("lookup")); + assertThat(settings.get("index.auto_expand_replicas"), equalTo("0-all")); + Map allHosts = Map.of( + "192.168.1.2", + "Windows", + "192.168.1.3", + "MacOS", + "192.168.1.4", + "Linux", + "192.168.1.5", + "Android", + "192.168.1.6", + "iOS", + "192.168.1.7", + "Windows", + "192.168.1.8", + "MacOS", + "192.168.1.9", + "Linux", + "192.168.1.10", + "Linux", + "192.168.1.11", + "Windows" + ); + for (Map.Entry e : allHosts.entrySet()) { + client().prepareIndex("hosts").setSource("ip", e.getKey(), "os", e.getValue()).get(); + } + refresh("hosts"); + assertAcked(client().admin().indices().prepareCreate("events").setSettings(Settings.builder().put("index.mode", "logsdb")).get()); + int numDocs = between(1, 10); + for (int i = 0; i < numDocs; i++) { + String ip = randomFrom(allHosts.keySet()); + String message = randomFrom("login", "logout", "shutdown", "restart"); + client().prepareIndex("events").setSource("@timestamp", "2024-01-01", "ip", ip, "message", message).get(); + } + refresh("events"); + // _search + { + SearchResponse resp = prepareSearch("events", "hosts").setQuery(new MatchQueryBuilder("_index_mode", "lookup")) + .setSize(10000) + .get(); + for (SearchHit hit : resp.getHits()) { + assertThat(hit.getIndex(), equalTo("hosts")); + } + assertHitCount(resp, allHosts.size()); + resp.decRef(); + } + // field_caps + { + FieldCapabilitiesRequest request = new FieldCapabilitiesRequest(); + request.indices("events", "hosts"); + request.fields("*"); + request.setMergeResults(false); + request.indexFilter(new MatchQueryBuilder("_index_mode", "lookup")); + var resp = client().fieldCaps(request).actionGet(); + assertThat(resp.getIndexResponses(), hasSize(1)); + FieldCapabilitiesIndexResponse indexResponse = resp.getIndexResponses().getFirst(); + assertThat(indexResponse.getIndexMode(), equalTo(IndexMode.LOOKUP)); + assertThat(indexResponse.getIndexName(), equalTo("hosts")); + } + } + + public void testRejectMoreThanOneShard() { + int numberOfShards = between(2, 5); + IllegalArgumentException error = expectThrows(IllegalArgumentException.class, () -> { + client().admin() + .indices() + .prepareCreate("hosts") + .setSettings(Settings.builder().put("index.mode", "lookup").put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, numberOfShards)) + .setMapping("ip", "type=ip", "os", "type=keyword") + .get(); + }); + assertThat( + error.getMessage(), + equalTo("index with [lookup] mode must have [index.number_of_shards] set to 1 or unset; provided " + numberOfShards) + ); + } + + public void testResizeLookupIndex() { + Settings.Builder createSettings = Settings.builder().put("index.mode", "lookup"); + if (randomBoolean()) { + createSettings.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1); + } + CreateIndexRequest createIndexRequest = new CreateIndexRequest("lookup-1").settings(createSettings); + assertAcked(client().admin().indices().execute(TransportCreateIndexAction.TYPE, createIndexRequest)); + client().admin().indices().prepareAddBlock(IndexMetadata.APIBlock.WRITE, "lookup-1").get(); + + ResizeRequest clone = new ResizeRequest("lookup-2", "lookup-1"); + clone.setResizeType(ResizeType.CLONE); + assertAcked(client().admin().indices().execute(ResizeAction.INSTANCE, clone).actionGet()); + Settings settings = client().admin().indices().prepareGetSettings("lookup-2").get().getIndexToSettings().get("lookup-2"); + assertThat(settings.get("index.mode"), equalTo("lookup")); + assertThat(settings.get("index.number_of_shards"), equalTo("1")); + assertThat(settings.get("index.auto_expand_replicas"), equalTo("0-all")); + + ResizeRequest split = new ResizeRequest("lookup-3", "lookup-1"); + split.setResizeType(ResizeType.SPLIT); + split.getTargetIndexRequest().settings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 3)); + IllegalArgumentException error = expectThrows( + IllegalArgumentException.class, + () -> client().admin().indices().execute(ResizeAction.INSTANCE, split).actionGet() + ); + assertThat( + error.getMessage(), + equalTo("index with [lookup] mode must have [index.number_of_shards] set to 1 or unset; provided 3") + ); + } + + public void testResizeRegularIndexToLookup() { + String dataNode = internalCluster().startDataOnlyNode(); + assertAcked( + client().admin() + .indices() + .prepareCreate("regular-1") + .setSettings( + Settings.builder() + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 2) + .put("index.routing.allocation.require._name", dataNode) + ) + .setMapping("ip", "type=ip", "os", "type=keyword") + .get() + ); + client().admin().indices().prepareAddBlock(IndexMetadata.APIBlock.WRITE, "regular-1").get(); + client().admin() + .indices() + .prepareUpdateSettings("regular-1") + .setSettings(Settings.builder().put("index.number_of_replicas", 0)) + .get(); + + ResizeRequest clone = new ResizeRequest("lookup-3", "regular-1"); + clone.setResizeType(ResizeType.CLONE); + clone.getTargetIndexRequest().settings(Settings.builder().put("index.mode", "lookup")); + IllegalArgumentException error = expectThrows( + IllegalArgumentException.class, + () -> client().admin().indices().execute(ResizeAction.INSTANCE, clone).actionGet() + ); + assertThat( + error.getMessage(), + equalTo("index with [lookup] mode must have [index.number_of_shards] set to 1 or unset; provided 2") + ); + + ResizeRequest shrink = new ResizeRequest("lookup-4", "regular-1"); + shrink.setResizeType(ResizeType.SHRINK); + shrink.getTargetIndexRequest() + .settings(Settings.builder().put("index.mode", "lookup").put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)); + + error = expectThrows( + IllegalArgumentException.class, + () -> client().admin().indices().execute(ResizeAction.INSTANCE, shrink).actionGet() + ); + assertThat(error.getMessage(), equalTo("can't change index.mode of index [regular-1] from [standard] to [lookup]")); + } + + public void testDoNotOverrideAutoExpandReplicas() { + internalCluster().ensureAtLeastNumDataNodes(1); + Settings.Builder createSettings = Settings.builder().put("index.mode", "lookup"); + if (randomBoolean()) { + createSettings.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1); + } + createSettings.put("index.auto_expand_replicas", "3-5"); + CreateIndexRequest createRequest = new CreateIndexRequest("hosts"); + createRequest.settings(createSettings); + createRequest.simpleMapping("ip", "type=ip", "os", "type=keyword"); + assertAcked(client().admin().indices().execute(TransportCreateIndexAction.TYPE, createRequest)); + Settings settings = client().admin().indices().prepareGetSettings("hosts").get().getIndexToSettings().get("hosts"); + assertThat(settings.get("index.mode"), equalTo("lookup")); + assertThat(settings.get("index.auto_expand_replicas"), equalTo("3-5")); + } +} diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 777ff083f33f8..25bb792d827a9 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -181,6 +181,7 @@ static TransportVersion def(int id) { public static final TransportVersion INFERENCE_DONT_PERSIST_ON_READ = def(8_776_00_0); public static final TransportVersion SIMULATE_MAPPING_ADDITION = def(8_777_00_0); public static final TransportVersion INTRODUCE_ALL_APPLICABLE_SELECTOR = def(8_778_00_0); + public static final TransportVersion INDEX_MODE_LOOKUP = def(8_779_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java index 69e3b7b70ff82..ed029db54bf06 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java @@ -308,7 +308,12 @@ private void onlyCreateIndex( final CreateIndexClusterStateUpdateRequest request, final ActionListener listener ) { - normalizeRequestSetting(request); + try { + normalizeRequestSetting(request); + } catch (Exception e) { + listener.onFailure(e); + return; + } var delegate = new AllocationActionListener<>(listener, threadPool.getThreadContext()); submitUnbatchedTask( @@ -1599,6 +1604,15 @@ static IndexMetadata validateResize( // of if the source shards are divisible by the number of target shards IndexMetadata.getRoutingFactor(sourceMetadata.getNumberOfShards(), INDEX_NUMBER_OF_SHARDS_SETTING.get(targetIndexSettings)); } + if (targetIndexSettings.hasValue(IndexSettings.MODE.getKey())) { + IndexMode oldMode = Objects.requireNonNullElse(sourceMetadata.getIndexMode(), IndexMode.STANDARD); + IndexMode newMode = IndexSettings.MODE.get(targetIndexSettings); + if (newMode != oldMode) { + throw new IllegalArgumentException( + "can't change index.mode of index [" + sourceIndex + "] from [" + oldMode + "] to [" + newMode + "]" + ); + } + } return sourceMetadata; } diff --git a/server/src/main/java/org/elasticsearch/index/IndexMode.java b/server/src/main/java/org/elasticsearch/index/IndexMode.java index 75ec67f26dd3a..e6339344b6e5f 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexMode.java +++ b/server/src/main/java/org/elasticsearch/index/IndexMode.java @@ -9,7 +9,9 @@ package org.elasticsearch.index; +import org.elasticsearch.TransportVersions; import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.metadata.MetadataCreateDataStreamService; import org.elasticsearch.cluster.routing.IndexRouting; import org.elasticsearch.common.compress.CompressedXContent; @@ -37,8 +39,10 @@ import org.elasticsearch.index.mapper.TsidExtractingIdFieldMapper; import java.io.IOException; +import java.time.Instant; import java.util.Arrays; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.Objects; import java.util.function.BooleanSupplier; @@ -308,6 +312,78 @@ public SourceFieldMapper.Mode defaultSourceMode() { public String getDefaultCodec() { return CodecService.BEST_COMPRESSION_CODEC; } + }, + LOOKUP("lookup") { + @Override + void validateWithOtherSettings(Map, Object> settings) { + final Integer providedNumberOfShards = (Integer) settings.get(IndexMetadata.INDEX_NUMBER_OF_SHARDS_SETTING); + if (providedNumberOfShards != null && providedNumberOfShards != 1) { + throw new IllegalArgumentException( + "index with [lookup] mode must have [index.number_of_shards] set to 1 or unset; provided " + providedNumberOfShards + ); + } + } + + @Override + public void validateMapping(MappingLookup lookup) {}; + + @Override + public void validateAlias(@Nullable String indexRouting, @Nullable String searchRouting) {} + + @Override + public void validateTimestampFieldMapping(boolean isDataStream, MappingLookup mappingLookup) { + + } + + @Override + public CompressedXContent getDefaultMapping(final IndexSettings indexSettings) { + return null; + } + + @Override + public TimestampBounds getTimestampBound(IndexMetadata indexMetadata) { + return null; + } + + @Override + public MetadataFieldMapper timeSeriesIdFieldMapper() { + // non time-series indices must not have a TimeSeriesIdFieldMapper + return null; + } + + @Override + public MetadataFieldMapper timeSeriesRoutingHashFieldMapper() { + // non time-series indices must not have a TimeSeriesRoutingIdFieldMapper + return null; + } + + @Override + public IdFieldMapper idFieldMapperWithoutFieldData() { + return ProvidedIdFieldMapper.NO_FIELD_DATA; + } + + @Override + public IdFieldMapper buildIdFieldMapper(BooleanSupplier fieldDataEnabled) { + return new ProvidedIdFieldMapper(fieldDataEnabled); + } + + @Override + public DocumentDimensions buildDocumentDimensions(IndexSettings settings) { + return DocumentDimensions.Noop.INSTANCE; + } + + @Override + public boolean shouldValidateTimestamp() { + return false; + } + + @Override + public void validateSourceFieldMapper(SourceFieldMapper sourceFieldMapper) {} + + @Override + public SourceFieldMapper.Mode defaultSourceMode() { + return SourceFieldMapper.Mode.STORED; + } }; private static final String HOST_NAME = "host.name"; @@ -370,6 +446,7 @@ private static CompressedXContent createDefaultMapping(boolean includeHostName) static final List> VALIDATE_WITH_SETTINGS = List.copyOf( Stream.concat( Stream.of( + IndexMetadata.INDEX_NUMBER_OF_SHARDS_SETTING, IndexMetadata.INDEX_ROUTING_PARTITION_SIZE_SETTING, IndexMetadata.INDEX_ROUTING_PATH, IndexSettings.TIME_SERIES_START_TIME, @@ -476,11 +553,12 @@ public static IndexMode fromString(String value) { case "standard" -> IndexMode.STANDARD; case "time_series" -> IndexMode.TIME_SERIES; case "logsdb" -> IndexMode.LOGSDB; + case "lookup" -> IndexMode.LOOKUP; default -> throw new IllegalArgumentException( "[" + value + "] is an invalid index mode, valid modes are: [" - + Arrays.stream(IndexMode.values()).map(IndexMode::toString).collect(Collectors.joining()) + + Arrays.stream(IndexMode.values()).map(IndexMode::toString).collect(Collectors.joining(",")) + "]" ); }; @@ -492,6 +570,7 @@ public static IndexMode readFrom(StreamInput in) throws IOException { case 0 -> STANDARD; case 1 -> TIME_SERIES; case 2 -> LOGSDB; + case 3 -> LOOKUP; default -> throw new IllegalStateException("unexpected index mode [" + mode + "]"); }; } @@ -501,6 +580,7 @@ public static void writeTo(IndexMode indexMode, StreamOutput out) throws IOExcep case STANDARD -> 0; case TIME_SERIES -> 1; case LOGSDB -> 2; + case LOOKUP -> out.getTransportVersion().onOrAfter(TransportVersions.INDEX_MODE_LOOKUP) ? 3 : 0; }; out.writeByte((byte) code); } @@ -509,4 +589,37 @@ public static void writeTo(IndexMode indexMode, StreamOutput out) throws IOExcep public String toString() { return getName(); } + + /** + * A built-in index setting provider that supplies additional index settings based on the index mode. + * Currently, only the lookup index mode provides non-empty additional settings. + */ + public static final class IndexModeSettingsProvider implements IndexSettingProvider { + @Override + public Settings getAdditionalIndexSettings( + String indexName, + String dataStreamName, + IndexMode templateIndexMode, + Metadata metadata, + Instant resolvedAt, + Settings indexTemplateAndCreateRequestSettings, + List combinedTemplateMappings + ) { + IndexMode indexMode = templateIndexMode; + if (indexMode == null) { + String modeName = indexTemplateAndCreateRequestSettings.get(IndexSettings.MODE.getKey()); + if (modeName != null) { + indexMode = IndexMode.valueOf(modeName.toUpperCase(Locale.ROOT)); + } + } + if (indexMode == LOOKUP) { + return Settings.builder() + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetadata.SETTING_AUTO_EXPAND_REPLICAS, "0-all") + .build(); + } else { + return Settings.EMPTY; + } + } + } } diff --git a/server/src/main/java/org/elasticsearch/monitor/metrics/IndicesMetrics.java b/server/src/main/java/org/elasticsearch/monitor/metrics/IndicesMetrics.java index 11df8710fad6c..ba67bc03e1441 100644 --- a/server/src/main/java/org/elasticsearch/monitor/metrics/IndicesMetrics.java +++ b/server/src/main/java/org/elasticsearch/monitor/metrics/IndicesMetrics.java @@ -55,7 +55,7 @@ public IndicesMetrics(MeterRegistry meterRegistry, IndicesService indicesService } private static List registerAsyncMetrics(MeterRegistry registry, IndicesStatsCache cache) { - final int TOTAL_METRICS = 36; + final int TOTAL_METRICS = 48; List metrics = new ArrayList<>(TOTAL_METRICS); for (IndexMode indexMode : IndexMode.values()) { String name = indexMode.getName(); diff --git a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java index 7e3991c1df1f4..784e02059823b 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java +++ b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java @@ -80,6 +80,7 @@ import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.PageCacheRecycler; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.core.TimeValue; @@ -108,6 +109,7 @@ import org.elasticsearch.health.node.tracker.RepositoriesHealthTracker; import org.elasticsearch.health.stats.HealthApiStats; import org.elasticsearch.http.HttpServerTransport; +import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexSettingProvider; import org.elasticsearch.index.IndexSettingProviders; import org.elasticsearch.index.IndexingPressure; @@ -820,7 +822,10 @@ private void construct( final var parameters = new IndexSettingProvider.Parameters(indicesService::createIndexMapperServiceForValidation); IndexSettingProviders indexSettingProviders = new IndexSettingProviders( - pluginsService.flatMap(p -> p.getAdditionalIndexSettingProviders(parameters)).collect(Collectors.toSet()) + Sets.union( + builtinIndexSettingProviders(), + pluginsService.flatMap(p -> p.getAdditionalIndexSettingProviders(parameters)).collect(Collectors.toSet()) + ) ); final ShardLimitValidator shardLimitValidator = new ShardLimitValidator(settings, clusterService); @@ -1656,4 +1661,7 @@ private Module loadPersistentTasksService( }; } + private Set builtinIndexSettingProviders() { + return Set.of(new IndexMode.IndexModeSettingsProvider()); + } } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/CreateIndexCapabilities.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/CreateIndexCapabilities.java index 899486399af6b..900a352d42f30 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/CreateIndexCapabilities.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/CreateIndexCapabilities.java @@ -21,5 +21,10 @@ public class CreateIndexCapabilities { */ private static final String LOGSDB_INDEX_MODE_CAPABILITY = "logsdb_index_mode"; - public static Set CAPABILITIES = Set.of(LOGSDB_INDEX_MODE_CAPABILITY); + /** + * Support lookup index mode + */ + private static final String LOOKUP_INDEX_MODE_CAPABILITY = "lookup_index_mode"; + + public static Set CAPABILITIES = Set.of(LOGSDB_INDEX_MODE_CAPABILITY, LOOKUP_INDEX_MODE_CAPABILITY); } diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java index da04f30ff8023..3960aa5a91cc5 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java @@ -142,7 +142,7 @@ protected static String randomIndexOptions() { protected final DocumentMapper createDocumentMapper(XContentBuilder mappings, IndexMode indexMode) throws IOException { return switch (indexMode) { - case STANDARD -> createDocumentMapper(mappings); + case STANDARD, LOOKUP -> createDocumentMapper(mappings); case TIME_SERIES -> createTimeSeriesModeDocumentMapper(mappings); case LOGSDB -> createLogsModeDocumentMapper(mappings); }; diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/index/engine/FollowingEngineTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/index/engine/FollowingEngineTests.java index 478a0d08d6612..150eddf039cec 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/index/engine/FollowingEngineTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/index/engine/FollowingEngineTests.java @@ -700,6 +700,9 @@ public void testProcessOnceOnPrimary() throws Exception { case LOGSDB: settingsBuilder.put("index.mode", IndexMode.LOGSDB.getName()); break; + case LOOKUP: + settingsBuilder.put("index.mode", IndexMode.LOOKUP.getName()); + break; default: throw new UnsupportedOperationException("Unknown index mode [" + indexMode + "]"); } From 057062bcae2b935294d3b9e91cdffdecd2a34208 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Fri, 25 Oct 2024 08:09:46 +1100 Subject: [PATCH 394/449] Mute org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT test {yaml=cluster.stats/30_ccs_stats/cross-cluster search stats search} #115600 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 827a604cd6a19..4af02859d88d4 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -273,6 +273,9 @@ tests: - class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT method: test {yaml=reference/esql/esql-across-clusters/line_197} issue: https://github.com/elastic/elasticsearch/issues/115575 +- class: org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT + method: test {yaml=cluster.stats/30_ccs_stats/cross-cluster search stats search} + issue: https://github.com/elastic/elasticsearch/issues/115600 # Examples: # From d5265bef572eaa87cc07b861ad00c74f8a955fbf Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Thu, 24 Oct 2024 23:17:06 +0200 Subject: [PATCH 395/449] Replace IndexNameExpressionResolver.ExpressionList with imperative logic (#115487) The approach taken by `ExpressionList` becomes very expensive for large numbers of indices/datastreams. It implies that large lists of concrete names (as they are passed down from the transport layer via e.g. security) are copied at least twice during iteration. Removing the intermediary list and inlining the logic brings down the latency of searches targetting many shards/indices at once and allows for subsequent optimizations. The removed tests appear redundant as they tested an implementation detail of the IndexNameExpressionResolver which itself is well covered by its own tests. --- .../metadata/IndexNameExpressionResolver.java | 186 +++++------ .../cluster/metadata/ExpressionListTests.java | 309 ------------------ 2 files changed, 85 insertions(+), 410 deletions(-) delete mode 100644 server/src/test/java/org/elasticsearch/cluster/metadata/ExpressionListTests.java diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java index 2229166a2d779..39499253c8790 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java @@ -48,7 +48,6 @@ import java.util.Collections; import java.util.HashMap; import java.util.HashSet; -import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Objects; @@ -253,7 +252,7 @@ protected static Collection resolveExpressions(Context context, String.. } else { return ExplicitResourceNameFilter.filterUnavailable( context, - DateMathExpressionResolver.resolve(context, List.of(expressions)) + DateMathExpressionResolver.resolve(context, Arrays.asList(expressions)) ); } } else { @@ -264,7 +263,10 @@ protected static Collection resolveExpressions(Context context, String.. } else { return WildcardExpressionResolver.resolve( context, - ExplicitResourceNameFilter.filterUnavailable(context, DateMathExpressionResolver.resolve(context, List.of(expressions))) + ExplicitResourceNameFilter.filterUnavailable( + context, + DateMathExpressionResolver.resolve(context, Arrays.asList(expressions)) + ) ); } } @@ -1294,34 +1296,51 @@ private static boolean shouldIncludeIfAlias(IndexAbstraction ia, IndexNameExpres * */ public static Collection resolve(Context context, List expressions) { - ExpressionList expressionList = new ExpressionList(context, expressions); // fast exit if there are no wildcards to evaluate - if (expressionList.hasWildcard() == false) { + if (context.getOptions().expandWildcardExpressions() == false) { + return expressions; + } + int firstWildcardIndex = 0; + for (; firstWildcardIndex < expressions.size(); firstWildcardIndex++) { + String expression = expressions.get(firstWildcardIndex); + if (isWildcard(expression)) { + break; + } + } + if (firstWildcardIndex == expressions.size()) { return expressions; } Set result = new HashSet<>(); - for (ExpressionList.Expression expression : expressionList) { - if (expression.isWildcard()) { - Stream matchingResources = matchResourcesToWildcard(context, expression.get()); + for (int i = 0; i < firstWildcardIndex; i++) { + result.add(expressions.get(i)); + } + AtomicBoolean emptyWildcardExpansion = context.getOptions().allowNoIndices() ? null : new AtomicBoolean(); + for (int i = firstWildcardIndex; i < expressions.size(); i++) { + String expression = expressions.get(i); + boolean isExclusion = i > firstWildcardIndex && expression.charAt(0) == '-'; + if (i == firstWildcardIndex || isWildcard(expression)) { + Stream matchingResources = matchResourcesToWildcard( + context, + isExclusion ? expression.substring(1) : expression + ); Stream matchingOpenClosedNames = expandToOpenClosed(context, matchingResources); - AtomicBoolean emptyWildcardExpansion = new AtomicBoolean(false); - if (context.getOptions().allowNoIndices() == false) { + if (emptyWildcardExpansion != null) { emptyWildcardExpansion.set(true); matchingOpenClosedNames = matchingOpenClosedNames.peek(x -> emptyWildcardExpansion.set(false)); } - if (expression.isExclusion()) { - matchingOpenClosedNames.forEachOrdered(result::remove); + if (isExclusion) { + matchingOpenClosedNames.forEach(result::remove); } else { - matchingOpenClosedNames.forEachOrdered(result::add); + matchingOpenClosedNames.forEach(result::add); } - if (emptyWildcardExpansion.get()) { - throw notFoundException(expression.get()); + if (emptyWildcardExpansion != null && emptyWildcardExpansion.get()) { + throw notFoundException(expression); } } else { - if (expression.isExclusion()) { - result.remove(expression.get()); + if (isExclusion) { + result.remove(expression.substring(1)); } else { - result.add(expression.get()); + result.add(expression); } } } @@ -1507,27 +1526,35 @@ private DateMathExpressionResolver() { // utility class } + /** + * Resolves date math expressions. If this is a noop the given {@code expressions} list is returned without copying. + * As a result callers of this method should not mutate the returned list. Mutating it may come with unexpected side effects. + */ public static List resolve(Context context, List expressions) { - List result = new ArrayList<>(expressions.size()); - for (ExpressionList.Expression expression : new ExpressionList(context, expressions)) { - result.add(resolveExpression(expression, context::getStartTime)); + boolean wildcardSeen = false; + final boolean expandWildcards = context.getOptions().expandWildcardExpressions(); + String[] result = null; + for (int i = 0, n = expressions.size(); i < n; i++) { + String expression = expressions.get(i); + // accepts date-math exclusions that are of the form "-<...{}>",f i.e. the "-" is outside the "<>" date-math template + boolean isExclusion = wildcardSeen && expression.startsWith("-"); + wildcardSeen = wildcardSeen || (expandWildcards && isWildcard(expression)); + String toResolve = isExclusion ? expression.substring(1) : expression; + String resolved = resolveExpression(toResolve, context::getStartTime); + if (toResolve != resolved) { + if (result == null) { + result = expressions.toArray(Strings.EMPTY_ARRAY); + } + result[i] = isExclusion ? "-" + resolved : resolved; + } } - return result; + return result == null ? expressions : Arrays.asList(result); } static String resolveExpression(String expression) { return resolveExpression(expression, System::currentTimeMillis); } - static String resolveExpression(ExpressionList.Expression expression, LongSupplier getTime) { - if (expression.isExclusion()) { - // accepts date-math exclusions that are of the form "-<...{}>", i.e. the "-" is outside the "<>" date-math template - return "-" + resolveExpression(expression.get(), getTime); - } else { - return resolveExpression(expression.get(), getTime); - } - } - static String resolveExpression(String expression, LongSupplier getTime) { if (expression.startsWith(EXPRESSION_LEFT_BOUND) == false || expression.endsWith(EXPRESSION_RIGHT_BOUND) == false) { return expression; @@ -1689,14 +1716,35 @@ private ExplicitResourceNameFilter() { */ public static List filterUnavailable(Context context, List expressions) { ensureRemoteIndicesRequireIgnoreUnavailable(context.getOptions(), expressions); - List result = new ArrayList<>(expressions.size()); - for (ExpressionList.Expression expression : new ExpressionList(context, expressions)) { - validateAliasOrIndex(expression); - if (expression.isWildcard() || expression.isExclusion() || ensureAliasOrIndexExists(context, expression.get())) { - result.add(expression.expression()); + final boolean expandWildcards = context.getOptions().expandWildcardExpressions(); + boolean wildcardSeen = false; + List result = null; + for (int i = 0; i < expressions.size(); i++) { + String expression = expressions.get(i); + if (Strings.isEmpty(expression)) { + throw notFoundException(expression); + } + // Expressions can not start with an underscore. This is reserved for APIs. If the check gets here, the API + // does not exist and the path is interpreted as an expression. If the expression begins with an underscore, + // throw a specific error that is different from the [[IndexNotFoundException]], which is typically thrown + // if the expression can't be found. + if (expression.charAt(0) == '_') { + throw new InvalidIndexNameException(expression, "must not start with '_'."); + } + final boolean isWildcard = expandWildcards && isWildcard(expression); + if (isWildcard || (wildcardSeen && expression.charAt(0) == '-') || ensureAliasOrIndexExists(context, expression)) { + if (result != null) { + result.add(expression); + } + } else { + if (result == null) { + result = new ArrayList<>(expressions.size() - 1); + result.addAll(expressions.subList(0, i)); + } } + wildcardSeen |= isWildcard; } - return result; + return result == null ? expressions : result; } /** @@ -1736,19 +1784,6 @@ private static boolean ensureAliasOrIndexExists(Context context, String name) { return true; } - private static void validateAliasOrIndex(ExpressionList.Expression expression) { - if (Strings.isEmpty(expression.expression())) { - throw notFoundException(expression.expression()); - } - // Expressions can not start with an underscore. This is reserved for APIs. If the check gets here, the API - // does not exist and the path is interpreted as an expression. If the expression begins with an underscore, - // throw a specific error that is different from the [[IndexNotFoundException]], which is typically thrown - // if the expression can't be found. - if (expression.expression().charAt(0) == '_') { - throw new InvalidIndexNameException(expression.expression(), "must not start with '_'."); - } - } - private static void ensureRemoteIndicesRequireIgnoreUnavailable(IndicesOptions options, List indexExpressions) { if (options.ignoreUnavailable()) { return; @@ -1773,57 +1808,6 @@ private static void failOnRemoteIndicesNotIgnoringUnavailable(List index } } - /** - * Used to iterate expression lists and work out which expression item is a wildcard or an exclusion. - */ - public static final class ExpressionList implements Iterable { - private final List expressionsList; - private final boolean hasWildcard; - - public record Expression(String expression, boolean isWildcard, boolean isExclusion) { - public String get() { - if (isExclusion()) { - // drop the leading "-" if exclusion because it is easier for callers to handle it like this - return expression().substring(1); - } else { - return expression(); - } - } - } - - /** - * Creates the expression iterable that can be used to easily check which expression item is a wildcard or an exclusion (or both). - * The {@param context} is used to check if wildcards ought to be considered or not. - */ - public ExpressionList(Context context, List expressionStrings) { - List expressionsList = new ArrayList<>(expressionStrings.size()); - boolean wildcardSeen = false; - for (String expressionString : expressionStrings) { - boolean isExclusion = expressionString.startsWith("-") && wildcardSeen; - if (context.getOptions().expandWildcardExpressions() && isWildcard(expressionString)) { - wildcardSeen = true; - expressionsList.add(new Expression(expressionString, true, isExclusion)); - } else { - expressionsList.add(new Expression(expressionString, false, isExclusion)); - } - } - this.expressionsList = expressionsList; - this.hasWildcard = wildcardSeen; - } - - /** - * Returns {@code true} if the expression contains any wildcard and the options allow wildcard expansion - */ - public boolean hasWildcard() { - return this.hasWildcard; - } - - @Override - public Iterator iterator() { - return expressionsList.iterator(); - } - } - /** * This is a context for the DateMathExpressionResolver which does not require {@code IndicesOptions} or {@code ClusterState} * since it uses only the start time to resolve expressions. diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/ExpressionListTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/ExpressionListTests.java deleted file mode 100644 index 1ca59ff402bd8..0000000000000 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/ExpressionListTests.java +++ /dev/null @@ -1,309 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.cluster.metadata; - -import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.Context; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ExpressionList; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ExpressionList.Expression; -import org.elasticsearch.core.Tuple; -import org.elasticsearch.test.ESTestCase; - -import java.util.ArrayList; -import java.util.Iterator; -import java.util.List; -import java.util.function.Supplier; - -import static org.hamcrest.Matchers.is; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -public class ExpressionListTests extends ESTestCase { - - public void testEmpty() { - ExpressionList expressionList = new ExpressionList(getContextWithOptions(getExpandWildcardsIndicesOptions()), List.of()); - assertThat(expressionList.iterator().hasNext(), is(false)); - assertThat(expressionList.hasWildcard(), is(false)); - expressionList = new ExpressionList(getContextWithOptions(getNoExpandWildcardsIndicesOptions()), List.of()); - assertThat(expressionList.iterator().hasNext(), is(false)); - assertThat(expressionList.hasWildcard(), is(false)); - } - - public void testExplicitSingleNameExpression() { - for (IndicesOptions indicesOptions : List.of(getExpandWildcardsIndicesOptions(), getNoExpandWildcardsIndicesOptions())) { - for (String expressionString : List.of("non_wildcard", "-non_exclusion")) { - ExpressionList expressionList = new ExpressionList(getContextWithOptions(indicesOptions), List.of(expressionString)); - assertThat(expressionList.hasWildcard(), is(false)); - if (randomBoolean()) { - expressionList = new ExpressionList(getContextWithOptions(indicesOptions), List.of(expressionString)); - } - Iterator expressionIterator = expressionList.iterator(); - assertThat(expressionIterator.hasNext(), is(true)); - if (randomBoolean()) { - expressionIterator = expressionList.iterator(); - } - Expression expression = expressionIterator.next(); - assertThat(expression.isExclusion(), is(false)); - assertThat(expression.isWildcard(), is(false)); - assertThat(expression.get(), is(expressionString)); - assertThat(expressionIterator.hasNext(), is(false)); - } - } - } - - public void testWildcardSingleExpression() { - for (String wildcardTest : List.of("*", "a*", "*b", "a*b", "a-*b", "a*-b", "-*", "-a*", "-*b", "**", "*-*")) { - ExpressionList expressionList = new ExpressionList( - getContextWithOptions(getExpandWildcardsIndicesOptions()), - List.of(wildcardTest) - ); - assertThat(expressionList.hasWildcard(), is(true)); - if (randomBoolean()) { - expressionList = new ExpressionList(getContextWithOptions(getExpandWildcardsIndicesOptions()), List.of(wildcardTest)); - } - Iterator expressionIterator = expressionList.iterator(); - assertThat(expressionIterator.hasNext(), is(true)); - if (randomBoolean()) { - expressionIterator = expressionList.iterator(); - } - Expression expression = expressionIterator.next(); - assertThat(expression.isExclusion(), is(false)); - assertThat(expression.isWildcard(), is(true)); - assertThat(expression.get(), is(wildcardTest)); - assertThat(expressionIterator.hasNext(), is(false)); - } - } - - public void testWildcardLongerExpression() { - List onlyExplicits = randomList(7, () -> randomAlphaOfLengthBetween(0, 5)); - String wildcard = randomFrom("*", "*b", "-*", "*-", "c*", "a*b", "**"); - List expressionList = new ArrayList<>(onlyExplicits.size() + 1); - expressionList.addAll(randomSubsetOf(onlyExplicits)); - int wildcardPos = expressionList.size(); - expressionList.add(wildcard); - for (String item : onlyExplicits) { - if (expressionList.contains(item) == false) { - expressionList.add(item); - } - } - ExpressionList expressionIterable = new ExpressionList(getContextWithOptions(getExpandWildcardsIndicesOptions()), expressionList); - assertThat(expressionIterable.hasWildcard(), is(true)); - if (randomBoolean()) { - expressionIterable = new ExpressionList(getContextWithOptions(getExpandWildcardsIndicesOptions()), expressionList); - } - int i = 0; - for (Expression expression : expressionIterable) { - assertThat(expression.isExclusion(), is(false)); - if (i != wildcardPos) { - assertThat(expression.isWildcard(), is(false)); - } else { - assertThat(expression.isWildcard(), is(true)); - } - assertThat(expression.get(), is(expressionList.get(i++))); - } - } - - public void testWildcardsNoExclusionExpressions() { - for (List wildcardExpression : List.of( - List.of("*"), - List.of("a", "*"), - List.of("-b", "*c"), - List.of("-", "a", "c*"), - List.of("*", "a*", "*b"), - List.of("-*", "a", "b*") - )) { - ExpressionList expressionList = new ExpressionList( - getContextWithOptions(getExpandWildcardsIndicesOptions()), - wildcardExpression - ); - assertThat(expressionList.hasWildcard(), is(true)); - if (randomBoolean()) { - expressionList = new ExpressionList(getContextWithOptions(getExpandWildcardsIndicesOptions()), wildcardExpression); - } - int i = 0; - for (Expression expression : expressionList) { - assertThat(expression.isExclusion(), is(false)); - if (wildcardExpression.get(i).contains("*")) { - assertThat(expression.isWildcard(), is(true)); - } else { - assertThat(expression.isWildcard(), is(false)); - } - assertThat(expression.get(), is(wildcardExpression.get(i++))); - } - } - } - - public void testWildcardExpressionNoExpandOptions() { - for (List wildcardExpression : List.of( - List.of("*"), - List.of("a", "*"), - List.of("-b", "*c"), - List.of("*d", "-"), - List.of("*", "-*"), - List.of("-", "a", "c*"), - List.of("*", "a*", "*b") - )) { - ExpressionList expressionList = new ExpressionList( - getContextWithOptions(getNoExpandWildcardsIndicesOptions()), - wildcardExpression - ); - assertThat(expressionList.hasWildcard(), is(false)); - if (randomBoolean()) { - expressionList = new ExpressionList(getContextWithOptions(getNoExpandWildcardsIndicesOptions()), wildcardExpression); - } - int i = 0; - for (Expression expression : expressionList) { - assertThat(expression.isWildcard(), is(false)); - assertThat(expression.isExclusion(), is(false)); - assertThat(expression.get(), is(wildcardExpression.get(i++))); - } - } - } - - public void testSingleExclusionExpression() { - String wildcard = randomFrom("*", "*b", "-*", "*-", "c*", "a*b", "**", "*-*"); - int wildcardPos = randomIntBetween(0, 3); - String exclusion = randomFrom("-*", "-", "-c*", "-ab", "--"); - int exclusionPos = randomIntBetween(wildcardPos + 1, 7); - List exclusionExpression = new ArrayList<>(); - for (int i = 0; i < wildcardPos; i++) { - exclusionExpression.add(randomAlphaOfLengthBetween(0, 5)); - } - exclusionExpression.add(wildcard); - for (int i = wildcardPos + 1; i < exclusionPos; i++) { - exclusionExpression.add(randomAlphaOfLengthBetween(0, 5)); - } - exclusionExpression.add(exclusion); - for (int i = 0; i < randomIntBetween(0, 3); i++) { - exclusionExpression.add(randomAlphaOfLengthBetween(0, 5)); - } - ExpressionList expressionList = new ExpressionList(getContextWithOptions(getExpandWildcardsIndicesOptions()), exclusionExpression); - if (randomBoolean()) { - assertThat(expressionList.hasWildcard(), is(true)); - } - int i = 0; - for (Expression expression : expressionList) { - if (i == wildcardPos) { - assertThat(expression.isWildcard(), is(true)); - assertThat(expression.isExclusion(), is(false)); - assertThat(expression.get(), is(exclusionExpression.get(i++))); - } else if (i == exclusionPos) { - assertThat(expression.isExclusion(), is(true)); - assertThat(expression.isWildcard(), is(exclusionExpression.get(i).contains("*"))); - assertThat(expression.get(), is(exclusionExpression.get(i++).substring(1))); - } else { - assertThat(expression.isWildcard(), is(false)); - assertThat(expression.isExclusion(), is(false)); - assertThat(expression.get(), is(exclusionExpression.get(i++))); - } - } - } - - public void testExclusionsExpression() { - for (Tuple, List> exclusionExpression : List.of( - new Tuple<>(List.of("-a", "*", "-a"), List.of(false, false, true)), - new Tuple<>(List.of("-b*", "c", "-a"), List.of(false, false, true)), - new Tuple<>(List.of("*d", "-", "*b"), List.of(false, true, false)), - new Tuple<>(List.of("-", "--", "-*", "", "-*"), List.of(false, false, false, false, true)), - new Tuple<>(List.of("*-", "-*", "a", "-b"), List.of(false, true, false, true)), - new Tuple<>(List.of("a", "-b", "-*", "-b", "*", "-b"), List.of(false, false, false, true, false, true)), - new Tuple<>(List.of("-a", "*d", "-a", "-*b", "-b", "--"), List.of(false, false, true, true, true, true)) - )) { - ExpressionList expressionList = new ExpressionList( - getContextWithOptions(getExpandWildcardsIndicesOptions()), - exclusionExpression.v1() - ); - if (randomBoolean()) { - assertThat(expressionList.hasWildcard(), is(true)); - } - int i = 0; - for (Expression expression : expressionList) { - boolean isExclusion = exclusionExpression.v2().get(i); - assertThat(expression.isExclusion(), is(isExclusion)); - assertThat(expression.isWildcard(), is(exclusionExpression.v1().get(i).contains("*"))); - if (isExclusion) { - assertThat(expression.get(), is(exclusionExpression.v1().get(i++).substring(1))); - } else { - assertThat(expression.get(), is(exclusionExpression.v1().get(i++))); - } - } - } - } - - private IndicesOptions getExpandWildcardsToOpenOnlyIndicesOptions() { - return IndicesOptions.fromOptions( - randomBoolean(), - randomBoolean(), - true, - false, - randomBoolean(), - randomBoolean(), - randomBoolean(), - randomBoolean(), - randomBoolean() - ); - } - - private IndicesOptions getExpandWildcardsToCloseOnlyIndicesOptions() { - return IndicesOptions.fromOptions( - randomBoolean(), - randomBoolean(), - false, - true, - randomBoolean(), - randomBoolean(), - randomBoolean(), - randomBoolean(), - randomBoolean() - ); - } - - private IndicesOptions getExpandWildcardsToOpenCloseIndicesOptions() { - return IndicesOptions.fromOptions( - randomBoolean(), - randomBoolean(), - true, - true, - randomBoolean(), - randomBoolean(), - randomBoolean(), - randomBoolean(), - randomBoolean() - ); - } - - private IndicesOptions getExpandWildcardsIndicesOptions() { - return ESTestCase.>randomFrom( - this::getExpandWildcardsToOpenOnlyIndicesOptions, - this::getExpandWildcardsToCloseOnlyIndicesOptions, - this::getExpandWildcardsToOpenCloseIndicesOptions - ).get(); - } - - private IndicesOptions getNoExpandWildcardsIndicesOptions() { - return IndicesOptions.fromOptions( - randomBoolean(), - randomBoolean(), - false, - false, - randomBoolean(), - randomBoolean(), - randomBoolean(), - randomBoolean(), - randomBoolean() - ); - } - - private Context getContextWithOptions(IndicesOptions indicesOptions) { - Context context = mock(Context.class); - when(context.getOptions()).thenReturn(indicesOptions); - return context; - } -} From b2ab9df1a9ff71442ad8d695ec15fcf8b72e133d Mon Sep 17 00:00:00 2001 From: David Kyle Date: Thu, 24 Oct 2024 22:33:56 +0100 Subject: [PATCH 396/449] [ML] Fix timeout attaching to missing deployment (#115517) Fixes a timeout in the Inference API where if connecting to an existing deployment and that deployment does not exist the listener was not called. --- .../xpack/inference/CreateFromDeploymentIT.java | 8 ++++++++ .../ElasticsearchInternalService.java | 14 +++++++------- 2 files changed, 15 insertions(+), 7 deletions(-) diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/CreateFromDeploymentIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/CreateFromDeploymentIT.java index 0bfb6e9e43b03..273b16d295a3d 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/CreateFromDeploymentIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/CreateFromDeploymentIT.java @@ -109,6 +109,14 @@ public void testModelIdDoesNotMatch() throws IOException { ); } + public void testDeploymentDoesNotExist() { + var deploymentId = "missing_deployment"; + + var inferenceId = "inference_on_missing_deployment"; + var e = expectThrows(ResponseException.class, () -> putModel(inferenceId, endpointConfig(deploymentId), TaskType.SPARSE_EMBEDDING)); + assertThat(e.getMessage(), containsString("Cannot find deployment [missing_deployment]")); + } + public void testNumAllocationsIsUpdated() throws IOException { var modelId = "update_num_allocations"; var deploymentId = modelId; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java index a0235f74ce511..fec690199d97d 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java @@ -34,7 +34,6 @@ import org.elasticsearch.xpack.core.inference.results.SparseEmbeddingResults; import org.elasticsearch.xpack.core.ml.action.GetDeploymentStatsAction; import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction; -import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsStatsAction; import org.elasticsearch.xpack.core.ml.action.InferModelAction; import org.elasticsearch.xpack.core.ml.inference.assignment.AdaptiveAllocationsSettings; import org.elasticsearch.xpack.core.ml.inference.assignment.AssignmentStats; @@ -913,7 +912,7 @@ private void validateAgainstDeployment( listener.onFailure( new ElasticsearchStatusException( "Deployment [{}] uses model [{}] which does not match the model [{}] in the request.", - RestStatus.BAD_REQUEST, // TODO better message + RestStatus.BAD_REQUEST, deploymentId, response.get().getModelId(), modelId @@ -933,21 +932,22 @@ private void validateAgainstDeployment( checkTaskTypeForMlNodeModel(response.get().getModelId(), taskType, l.delegateFailureAndWrap((l2, compatibleTaskType) -> { l2.onResponse(updatedSettings); })); + } else { + listener.onFailure(new ElasticsearchStatusException("Cannot find deployment [{}]", RestStatus.NOT_FOUND, deploymentId)); } })); } private void getDeployment(String deploymentId, ActionListener> listener) { client.execute( - GetTrainedModelsStatsAction.INSTANCE, - new GetTrainedModelsStatsAction.Request(deploymentId), + GetDeploymentStatsAction.INSTANCE, + new GetDeploymentStatsAction.Request(deploymentId), listener.delegateFailureAndWrap((l, response) -> { l.onResponse( - response.getResources() + response.getStats() .results() .stream() - .filter(s -> s.getDeploymentStats() != null && s.getDeploymentStats().getDeploymentId().equals(deploymentId)) - .map(GetTrainedModelsStatsAction.Response.TrainedModelStats::getDeploymentStats) + .filter(s -> s.getDeploymentId() != null && s.getDeploymentId().equals(deploymentId)) .findFirst() ); }) From c556a293c384b92a9ef71ec37bd49fb143300236 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Fri, 25 Oct 2024 08:50:44 +1100 Subject: [PATCH 397/449] Mute org.elasticsearch.test.rest.ClientYamlTestSuiteIT test {yaml=indices.create/10_basic/Create lookup index} #115605 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 4af02859d88d4..084bf27d6a11b 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -276,6 +276,9 @@ tests: - class: org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT method: test {yaml=cluster.stats/30_ccs_stats/cross-cluster search stats search} issue: https://github.com/elastic/elasticsearch/issues/115600 +- class: org.elasticsearch.test.rest.ClientYamlTestSuiteIT + method: test {yaml=indices.create/10_basic/Create lookup index} + issue: https://github.com/elastic/elasticsearch/issues/115605 # Examples: # From 5714b989fabcf944fb719f31200661789e0824f2 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Thu, 24 Oct 2024 16:58:41 -0700 Subject: [PATCH 398/449] Do not run lookup index YAML with two shards (#115608) We can randomly inject a global template that defaults to 2 shards instead of 1. This causes the lookup index YAML tests to fail. To avoid this, the change requires specifying the default_shards setting for these tests --- .../resources/rest-api-spec/test/indices.create/10_basic.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/10_basic.yml index 31d127b80c844..d0e1759073e1b 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/10_basic.yml @@ -153,7 +153,7 @@ --- "Create lookup index": - requires: - test_runner_features: [ capabilities ] + test_runner_features: [ capabilities, default_shards ] capabilities: - method: PUT path: /{index} @@ -176,7 +176,7 @@ --- "Create lookup index with one shard": - requires: - test_runner_features: [ capabilities ] + test_runner_features: [ capabilities, default_shards ] capabilities: - method: PUT path: /{index} From bbd887a66a1330188047825799dc8368dbd56ba8 Mon Sep 17 00:00:00 2001 From: Carlos Delgado <6339205+carlosdelest@users.noreply.github.com> Date: Fri, 25 Oct 2024 07:45:40 +0200 Subject: [PATCH 399/449] Identify system threads using a Thread subclass (#113562) --- .../common/util/concurrent/EsExecutors.java | 35 ++++++++++--- .../DefaultBuiltInExecutorBuilders.java | 12 +++-- .../threadpool/ExecutorBuilder.java | 7 ++- .../threadpool/FixedExecutorBuilder.java | 49 +++++++++++++++++-- .../threadpool/ScalingExecutorBuilder.java | 4 +- .../util/concurrent/EsExecutorsTests.java | 8 ++- 6 files changed, 98 insertions(+), 17 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java b/server/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java index b10db7d4d1dd3..9120576815bac 100644 --- a/server/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java +++ b/server/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java @@ -326,16 +326,25 @@ public static String executorName(Thread thread) { } public static ThreadFactory daemonThreadFactory(Settings settings, String namePrefix) { - return daemonThreadFactory(threadName(settings, namePrefix)); + return createDaemonThreadFactory(threadName(settings, namePrefix), false); } public static ThreadFactory daemonThreadFactory(String nodeName, String namePrefix) { + return daemonThreadFactory(nodeName, namePrefix, false); + } + + public static ThreadFactory daemonThreadFactory(String nodeName, String namePrefix, boolean isSystemThread) { assert nodeName != null && false == nodeName.isEmpty(); - return daemonThreadFactory(threadName(nodeName, namePrefix)); + return createDaemonThreadFactory(threadName(nodeName, namePrefix), isSystemThread); } - public static ThreadFactory daemonThreadFactory(String namePrefix) { - return new EsThreadFactory(namePrefix); + public static ThreadFactory daemonThreadFactory(String name) { + assert name != null && name.isEmpty() == false; + return createDaemonThreadFactory(name, false); + } + + private static ThreadFactory createDaemonThreadFactory(String namePrefix, boolean isSystemThread) { + return new EsThreadFactory(namePrefix, isSystemThread); } static class EsThreadFactory implements ThreadFactory { @@ -343,22 +352,36 @@ static class EsThreadFactory implements ThreadFactory { final ThreadGroup group; final AtomicInteger threadNumber = new AtomicInteger(1); final String namePrefix; + final boolean isSystem; - EsThreadFactory(String namePrefix) { + EsThreadFactory(String namePrefix, boolean isSystem) { this.namePrefix = namePrefix; SecurityManager s = System.getSecurityManager(); group = (s != null) ? s.getThreadGroup() : Thread.currentThread().getThreadGroup(); + this.isSystem = isSystem; } @Override public Thread newThread(Runnable r) { return AccessController.doPrivileged((PrivilegedAction) () -> { - Thread t = new Thread(group, r, namePrefix + "[T#" + threadNumber.getAndIncrement() + "]", 0); + Thread t = new EsThread(group, r, namePrefix + "[T#" + threadNumber.getAndIncrement() + "]", 0, isSystem); t.setDaemon(true); return t; }); } + } + public static class EsThread extends Thread { + private final boolean isSystem; + + EsThread(ThreadGroup group, Runnable target, String name, long stackSize, boolean isSystem) { + super(group, target, name, stackSize); + this.isSystem = isSystem; + } + + public boolean isSystem() { + return isSystem; + } } /** diff --git a/server/src/main/java/org/elasticsearch/threadpool/DefaultBuiltInExecutorBuilders.java b/server/src/main/java/org/elasticsearch/threadpool/DefaultBuiltInExecutorBuilders.java index c3a24d012c013..a97d22a976631 100644 --- a/server/src/main/java/org/elasticsearch/threadpool/DefaultBuiltInExecutorBuilders.java +++ b/server/src/main/java/org/elasticsearch/threadpool/DefaultBuiltInExecutorBuilders.java @@ -170,7 +170,8 @@ public Map getBuilders(Settings settings, int allocated ThreadPool.Names.SYSTEM_READ, halfProcMaxAt5, 2000, - EsExecutors.TaskTrackingConfig.DO_NOT_TRACK + EsExecutors.TaskTrackingConfig.DO_NOT_TRACK, + true ) ); result.put( @@ -180,7 +181,8 @@ public Map getBuilders(Settings settings, int allocated ThreadPool.Names.SYSTEM_WRITE, halfProcMaxAt5, 1000, - new EsExecutors.TaskTrackingConfig(true, indexAutoscalingEWMA) + new EsExecutors.TaskTrackingConfig(true, indexAutoscalingEWMA), + true ) ); result.put( @@ -190,7 +192,8 @@ public Map getBuilders(Settings settings, int allocated ThreadPool.Names.SYSTEM_CRITICAL_READ, halfProcMaxAt5, 2000, - EsExecutors.TaskTrackingConfig.DO_NOT_TRACK + EsExecutors.TaskTrackingConfig.DO_NOT_TRACK, + true ) ); result.put( @@ -200,7 +203,8 @@ public Map getBuilders(Settings settings, int allocated ThreadPool.Names.SYSTEM_CRITICAL_WRITE, halfProcMaxAt5, 1500, - new EsExecutors.TaskTrackingConfig(true, indexAutoscalingEWMA) + new EsExecutors.TaskTrackingConfig(true, indexAutoscalingEWMA), + true ) ); return unmodifiableMap(result); diff --git a/server/src/main/java/org/elasticsearch/threadpool/ExecutorBuilder.java b/server/src/main/java/org/elasticsearch/threadpool/ExecutorBuilder.java index 2337d51d07571..c259feb1c978e 100644 --- a/server/src/main/java/org/elasticsearch/threadpool/ExecutorBuilder.java +++ b/server/src/main/java/org/elasticsearch/threadpool/ExecutorBuilder.java @@ -24,9 +24,11 @@ public abstract class ExecutorBuilder { private final String name; + private final boolean isSystemThread; - public ExecutorBuilder(String name) { + public ExecutorBuilder(String name, boolean isSystemThread) { this.name = name; + this.isSystemThread = isSystemThread; } protected String name() { @@ -90,4 +92,7 @@ abstract static class ExecutorSettings { } + public boolean isSystemThread() { + return isSystemThread; + } } diff --git a/server/src/main/java/org/elasticsearch/threadpool/FixedExecutorBuilder.java b/server/src/main/java/org/elasticsearch/threadpool/FixedExecutorBuilder.java index 07db563da39a1..9c723f241f1d0 100644 --- a/server/src/main/java/org/elasticsearch/threadpool/FixedExecutorBuilder.java +++ b/server/src/main/java/org/elasticsearch/threadpool/FixedExecutorBuilder.java @@ -51,7 +51,28 @@ public final class FixedExecutorBuilder extends ExecutorBuilder( sizeKey, @@ -102,7 +145,7 @@ FixedExecutorSettings getSettings(Settings settings) { ThreadPool.ExecutorHolder build(final FixedExecutorSettings settings, final ThreadContext threadContext) { int size = settings.size; int queueSize = settings.queueSize; - final ThreadFactory threadFactory = EsExecutors.daemonThreadFactory(EsExecutors.threadName(settings.nodeName, name())); + final ThreadFactory threadFactory = EsExecutors.daemonThreadFactory(settings.nodeName, name(), isSystemThread()); final ExecutorService executor = EsExecutors.newFixed( settings.nodeName + "/" + name(), size, diff --git a/server/src/main/java/org/elasticsearch/threadpool/ScalingExecutorBuilder.java b/server/src/main/java/org/elasticsearch/threadpool/ScalingExecutorBuilder.java index a31f940cdb2dc..1017d41a77444 100644 --- a/server/src/main/java/org/elasticsearch/threadpool/ScalingExecutorBuilder.java +++ b/server/src/main/java/org/elasticsearch/threadpool/ScalingExecutorBuilder.java @@ -104,7 +104,7 @@ public ScalingExecutorBuilder( final String prefix, final EsExecutors.TaskTrackingConfig trackingConfig ) { - super(name); + super(name, false); this.coreSetting = Setting.intSetting(settingsKey(prefix, "core"), core, Setting.Property.NodeScope); this.maxSetting = Setting.intSetting(settingsKey(prefix, "max"), max, Setting.Property.NodeScope); this.keepAliveSetting = Setting.timeSetting(settingsKey(prefix, "keep_alive"), keepAlive, Setting.Property.NodeScope); @@ -131,7 +131,7 @@ ThreadPool.ExecutorHolder build(final ScalingExecutorSettings settings, final Th int core = settings.core; int max = settings.max; final ThreadPool.Info info = new ThreadPool.Info(name(), ThreadPool.ThreadPoolType.SCALING, core, max, keepAlive, null); - final ThreadFactory threadFactory = EsExecutors.daemonThreadFactory(EsExecutors.threadName(settings.nodeName, name())); + final ThreadFactory threadFactory = EsExecutors.daemonThreadFactory(settings.nodeName, name()); ExecutorService executor; executor = EsExecutors.newScaling( settings.nodeName + "/" + name(), diff --git a/server/src/test/java/org/elasticsearch/common/util/concurrent/EsExecutorsTests.java b/server/src/test/java/org/elasticsearch/common/util/concurrent/EsExecutorsTests.java index bdfec9dfaa630..2867c9e007937 100644 --- a/server/src/test/java/org/elasticsearch/common/util/concurrent/EsExecutorsTests.java +++ b/server/src/test/java/org/elasticsearch/common/util/concurrent/EsExecutorsTests.java @@ -635,15 +635,19 @@ public void testParseExecutorName() throws InterruptedException { final var executorName = randomAlphaOfLength(10); final String nodeName = rarely() ? null : randomIdentifier(); final ThreadFactory threadFactory; + final boolean isSystem; if (nodeName == null) { + isSystem = false; threadFactory = EsExecutors.daemonThreadFactory(Settings.EMPTY, executorName); } else if (randomBoolean()) { + isSystem = false; threadFactory = EsExecutors.daemonThreadFactory( Settings.builder().put(Node.NODE_NAME_SETTING.getKey(), nodeName).build(), executorName ); } else { - threadFactory = EsExecutors.daemonThreadFactory(nodeName, executorName); + isSystem = randomBoolean(); + threadFactory = EsExecutors.daemonThreadFactory(nodeName, executorName, isSystem); } final var thread = threadFactory.newThread(() -> {}); @@ -652,6 +656,8 @@ public void testParseExecutorName() throws InterruptedException { assertThat(EsExecutors.executorName(thread), equalTo(executorName)); assertThat(EsExecutors.executorName("TEST-" + thread.getName()), is(nullValue())); assertThat(EsExecutors.executorName("LuceneTestCase" + thread.getName()), is(nullValue())); + assertThat(EsExecutors.executorName("LuceneTestCase" + thread.getName()), is(nullValue())); + assertThat(((EsExecutors.EsThread) thread).isSystem(), equalTo(isSystem)); } finally { thread.join(); } From 7f573c6c28fb42e89d8bb76d6764dc681c239e06 Mon Sep 17 00:00:00 2001 From: Matteo Piergiovanni <134913285+piergm@users.noreply.github.com> Date: Fri, 25 Oct 2024 08:50:05 +0200 Subject: [PATCH 400/449] Only aggregations require at least one shard request (#115314) * unskipping shards only when aggs * Update docs/changelog/115314.yaml * fixed more tests * null check for searchRequest.source() --- docs/changelog/115314.yaml | 5 +++ .../datastreams/TSDBIndexingIT.java | 2 +- .../org/elasticsearch/search/CCSDuelIT.java | 4 ++- .../test/multi_cluster/70_skip_shards.yml | 12 +++---- .../multi_cluster/90_index_name_query.yml | 4 +-- .../search/ccs/CrossClusterSearchIT.java | 4 +-- .../search/profile/query/QueryProfilerIT.java | 6 +++- .../search/stats/FieldUsageStatsIT.java | 12 ++++--- .../action/search/TransportSearchAction.java | 4 ++- .../search/CrossClusterAsyncSearchIT.java | 32 +++++++++++++------ .../mapper/SearchIdleTests.java | 10 ++---- .../rrf/RRFRankCoordinatorCanMatchIT.java | 5 +-- .../rank/rrf/RRFRankShardCanMatchIT.java | 5 +-- ...pshotsCanMatchOnCoordinatorIntegTests.java | 12 +++---- .../checkpoint/TransformCCSCanMatchIT.java | 6 ++-- .../oldrepos/OldRepositoryAccessIT.java | 3 +- 16 files changed, 70 insertions(+), 56 deletions(-) create mode 100644 docs/changelog/115314.yaml diff --git a/docs/changelog/115314.yaml b/docs/changelog/115314.yaml new file mode 100644 index 0000000000000..76ac12d58fcf3 --- /dev/null +++ b/docs/changelog/115314.yaml @@ -0,0 +1,5 @@ +pr: 115314 +summary: Only aggregations require at least one shard request +area: Search +type: enhancement +issues: [] diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java index 29ec326548f2b..aad68660d2e4d 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java @@ -412,7 +412,7 @@ public void testSkippingShards() throws Exception { assertResponse(client().search(searchRequest), searchResponse -> { ElasticsearchAssertions.assertNoSearchHits(searchResponse); assertThat(searchResponse.getTotalShards(), equalTo(2)); - assertThat(searchResponse.getSkippedShards(), equalTo(1)); + assertThat(searchResponse.getSkippedShards(), equalTo(2)); assertThat(searchResponse.getSuccessfulShards(), equalTo(2)); }); } diff --git a/qa/multi-cluster-search/src/test/java/org/elasticsearch/search/CCSDuelIT.java b/qa/multi-cluster-search/src/test/java/org/elasticsearch/search/CCSDuelIT.java index 5dde1d664402f..79cdc1047aec9 100644 --- a/qa/multi-cluster-search/src/test/java/org/elasticsearch/search/CCSDuelIT.java +++ b/qa/multi-cluster-search/src/test/java/org/elasticsearch/search/CCSDuelIT.java @@ -43,6 +43,7 @@ import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; +import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; @@ -580,13 +581,14 @@ public void testSortByField() throws Exception { public void testSortByFieldOneClusterHasNoResults() throws Exception { assumeMultiClusterSetup(); - // set to a value greater than the number of shards to avoid differences due to the skipping of shards + // setting aggs to avoid differences due to the skipping of shards when matching none SearchSourceBuilder sourceBuilder = new SearchSourceBuilder(); boolean onlyRemote = randomBoolean(); sourceBuilder.query(new TermQueryBuilder("_index", onlyRemote ? REMOTE_INDEX_NAME : INDEX_NAME)); sourceBuilder.sort("type.keyword", SortOrder.ASC); sourceBuilder.sort("creationDate", SortOrder.DESC); sourceBuilder.sort("user.keyword", SortOrder.ASC); + sourceBuilder.aggregation(AggregationBuilders.max("max").field("creationDate")); CheckedConsumer responseChecker = response -> { assertHits(response); int size = response.evaluateArraySize("hits.hits"); diff --git a/qa/multi-cluster-search/src/test/resources/rest-api-spec/test/multi_cluster/70_skip_shards.yml b/qa/multi-cluster-search/src/test/resources/rest-api-spec/test/multi_cluster/70_skip_shards.yml index 92ae11c712b25..f392ae6d09413 100644 --- a/qa/multi-cluster-search/src/test/resources/rest-api-spec/test/multi_cluster/70_skip_shards.yml +++ b/qa/multi-cluster-search/src/test/resources/rest-api-spec/test/multi_cluster/70_skip_shards.yml @@ -166,8 +166,7 @@ - match: { hits.total.value: 0 } - match: { _shards.total: 2 } - match: { _shards.successful: 2 } - # When all shards are skipped current logic returns 1 to produce a valid search result - - match: { _shards.skipped : 1} + - match: { _shards.skipped : 2} - match: { _shards.failed: 0 } # check that skipped when we don't match the alias with a terms query @@ -183,8 +182,7 @@ - match: { hits.total.value: 0 } - match: { _shards.total: 2 } - match: { _shards.successful: 2 } - # When all shards are skipped current logic returns 1 to produce a valid search result - - match: { _shards.skipped : 1} + - match: { _shards.skipped : 2} - match: { _shards.failed: 0 } # check that skipped when we don't match the alias with a prefix query @@ -200,8 +198,7 @@ - match: { hits.total.value: 0 } - match: { _shards.total: 2 } - match: { _shards.successful: 2 } - # When all shards are skipped current logic returns 1 to produce a valid search result - - match: { _shards.skipped : 1} + - match: { _shards.skipped : 2} - match: { _shards.failed: 0 } # check that skipped when we don't match the alias with a wildcard query @@ -217,7 +214,6 @@ - match: { hits.total.value: 0 } - match: { _shards.total: 2 } - match: { _shards.successful: 2 } - # When all shards are skipped current logic returns 1 to produce a valid search result - - match: { _shards.skipped : 1} + - match: { _shards.skipped : 2} - match: { _shards.failed: 0 } diff --git a/qa/multi-cluster-search/src/test/resources/rest-api-spec/test/multi_cluster/90_index_name_query.yml b/qa/multi-cluster-search/src/test/resources/rest-api-spec/test/multi_cluster/90_index_name_query.yml index a60a1b0d812ee..be2ce033b123c 100644 --- a/qa/multi-cluster-search/src/test/resources/rest-api-spec/test/multi_cluster/90_index_name_query.yml +++ b/qa/multi-cluster-search/src/test/resources/rest-api-spec/test/multi_cluster/90_index_name_query.yml @@ -81,7 +81,7 @@ teardown: - match: { hits.total.value: 0 } - match: { _shards.total: 2 } - match: { _shards.successful: 2 } - - match: { _shards.skipped : 1} + - match: { _shards.skipped : 2} - match: { _shards.failed: 0 } - do: @@ -98,5 +98,5 @@ teardown: - match: { hits.total.value: 0 } - match: { _shards.total: 2 } - match: { _shards.successful: 2 } - - match: { _shards.skipped : 1} + - match: { _shards.skipped : 2} - match: { _shards.failed: 0 } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/ccs/CrossClusterSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/ccs/CrossClusterSearchIT.java index 5984e1acc89af..63eece88a53fc 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/ccs/CrossClusterSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/ccs/CrossClusterSearchIT.java @@ -214,7 +214,7 @@ public void testCCSClusterDetailsWhereAllShardsSkippedInCanMatch() throws Except // with DFS_QUERY_THEN_FETCH, the local shards are never skipped assertThat(localClusterSearchInfo.getSkippedShards(), equalTo(0)); } else { - assertThat(localClusterSearchInfo.getSkippedShards(), equalTo(localNumShards - 1)); + assertThat(localClusterSearchInfo.getSkippedShards(), equalTo(localNumShards)); } assertThat(localClusterSearchInfo.getFailedShards(), equalTo(0)); assertThat(localClusterSearchInfo.getFailures().size(), equalTo(0)); @@ -224,7 +224,7 @@ public void testCCSClusterDetailsWhereAllShardsSkippedInCanMatch() throws Except assertThat(remoteClusterSearchInfo.getTotalShards(), equalTo(remoteNumShards)); assertThat(remoteClusterSearchInfo.getSuccessfulShards(), equalTo(remoteNumShards)); if (clusters.isCcsMinimizeRoundtrips()) { - assertThat(remoteClusterSearchInfo.getSkippedShards(), equalTo(remoteNumShards - 1)); + assertThat(remoteClusterSearchInfo.getSkippedShards(), equalTo(remoteNumShards)); } else { assertThat(remoteClusterSearchInfo.getSkippedShards(), equalTo(remoteNumShards)); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/profile/query/QueryProfilerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/profile/query/QueryProfilerIT.java index e6cd89c09b979..0c1012c520dac 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/profile/query/QueryProfilerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/profile/query/QueryProfilerIT.java @@ -68,7 +68,11 @@ public void testProfileQuery() throws Exception { prepareSearch().setQuery(q).setTrackTotalHits(true).setProfile(true).setSearchType(SearchType.QUERY_THEN_FETCH), response -> { assertNotNull("Profile response element should not be null", response.getProfileResults()); - assertThat("Profile response should not be an empty array", response.getProfileResults().size(), not(0)); + if (response.getSkippedShards() == response.getSuccessfulShards()) { + assertEquals(0, response.getProfileResults().size()); + } else { + assertThat("Profile response should not be an empty array", response.getProfileResults().size(), not(0)); + } for (Map.Entry shard : response.getProfileResults().entrySet()) { for (QueryProfileShardResult searchProfiles : shard.getValue().getQueryProfileResults()) { for (ProfileResult result : searchProfiles.getQueryResults()) { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/stats/FieldUsageStatsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/stats/FieldUsageStatsIT.java index 140afd6b269b3..3d5120226ebed 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/stats/FieldUsageStatsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/stats/FieldUsageStatsIT.java @@ -158,11 +158,15 @@ public void testFieldUsageStats() throws ExecutionException, InterruptedExceptio assertTrue(stats.hasField("date_field")); assertEquals(Set.of(UsageContext.POINTS), stats.get("date_field").keySet()); - // can_match does not enter search stats - // there is a special case though where we have no hit but we need to get at least one search response in order - // to produce a valid search result with all the aggs etc., so we hit one of the two shards + + long expectedShards = 2L * numShards; + if (numShards == 1) { + // with 1 shard and setPreFilterShardSize(1) we don't perform can_match phase but instead directly query the shard + expectedShards += 1; + } + assertEquals( - (2 * numShards) + 1, + expectedShards, indicesAdmin().prepareStats("test") .clear() .setSearch(true) diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java index 302c3e243a1f6..8f718972c2eaa 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java @@ -1458,6 +1458,8 @@ public SearchPhase newSearchPhase( SearchResponse.Clusters clusters ) { if (preFilter) { + // only for aggs we need to contact shards even if there are no matches + boolean requireAtLeastOneMatch = searchRequest.source() != null && searchRequest.source().aggregations() != null; return new CanMatchPreFilterSearchPhase( logger, searchTransportService, @@ -1469,7 +1471,7 @@ public SearchPhase newSearchPhase( shardIterators, timeProvider, task, - true, + requireAtLeastOneMatch, searchService.getCoordinatorRewriteContextProvider(timeProvider::absoluteStartMillis), listener.delegateFailureAndWrap( (l, iters) -> newSearchPhase( diff --git a/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/CrossClusterAsyncSearchIT.java b/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/CrossClusterAsyncSearchIT.java index 9d83f88a043e2..3cd8778069d0c 100644 --- a/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/CrossClusterAsyncSearchIT.java +++ b/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/CrossClusterAsyncSearchIT.java @@ -274,6 +274,8 @@ public void testCCSClusterDetailsWhereAllShardsSkippedInCanMatch() throws Except boolean dfs = randomBoolean(); if (dfs) { request.getSearchRequest().searchType(SearchType.DFS_QUERY_THEN_FETCH); + } else { + request.getSearchRequest().searchType(SearchType.QUERY_THEN_FETCH); } RangeQueryBuilder rangeQueryBuilder = new RangeQueryBuilder("@timestamp").from(100).to(2000); request.getSearchRequest().source(new SearchSourceBuilder().query(rangeQueryBuilder).size(10)); @@ -288,20 +290,30 @@ public void testCCSClusterDetailsWhereAllShardsSkippedInCanMatch() throws Except assertTrue(response.isRunning()); SearchResponse.Clusters clusters = response.getSearchResponse().getClusters(); assertThat(clusters.getTotal(), equalTo(2)); - assertTrue("search cluster results should be marked as partial", clusters.hasPartialResults()); - + if (dfs) { + assertTrue("search cluster results should be marked as partial", clusters.hasPartialResults()); + } else { + assertFalse( + "search cluster results should not be marked as partial as all shards are skipped", + clusters.hasPartialResults() + ); + } SearchResponse.Cluster localClusterSearchInfo = clusters.getCluster(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY); assertNotNull(localClusterSearchInfo); - assertThat(localClusterSearchInfo.getStatus(), equalTo(SearchResponse.Cluster.Status.RUNNING)); + if (dfs) { + assertThat(localClusterSearchInfo.getStatus(), equalTo(SearchResponse.Cluster.Status.RUNNING)); + } else { + assertThat(localClusterSearchInfo.getStatus(), equalTo(SearchResponse.Cluster.Status.SUCCESSFUL)); + } SearchResponse.Cluster remoteClusterSearchInfo = clusters.getCluster(REMOTE_CLUSTER); assertNotNull(remoteClusterSearchInfo); - assertThat(localClusterSearchInfo.getStatus(), equalTo(SearchResponse.Cluster.Status.RUNNING)); } finally { response.decRef(); } - - SearchListenerPlugin.waitSearchStarted(); + if (dfs) { + SearchListenerPlugin.waitSearchStarted(); + } SearchListenerPlugin.allowQueryPhase(); waitForSearchTasksToFinish(); @@ -331,7 +343,7 @@ public void testCCSClusterDetailsWhereAllShardsSkippedInCanMatch() throws Except // no skipped shards locally when DFS_QUERY_THEN_FETCH is used assertThat(localClusterSearchInfo.getSkippedShards(), equalTo(0)); } else { - assertThat(localClusterSearchInfo.getSkippedShards(), equalTo(localNumShards - 1)); + assertThat(localClusterSearchInfo.getSkippedShards(), equalTo(localNumShards)); } assertThat(localClusterSearchInfo.getFailedShards(), equalTo(0)); assertThat(localClusterSearchInfo.getFailures().size(), equalTo(0)); @@ -341,7 +353,7 @@ public void testCCSClusterDetailsWhereAllShardsSkippedInCanMatch() throws Except assertThat(remoteClusterSearchInfo.getTotalShards(), equalTo(remoteNumShards)); assertThat(remoteClusterSearchInfo.getSuccessfulShards(), equalTo(remoteNumShards)); if (minimizeRoundtrips) { - assertThat(remoteClusterSearchInfo.getSkippedShards(), equalTo(remoteNumShards - 1)); + assertThat(remoteClusterSearchInfo.getSkippedShards(), equalTo(remoteNumShards)); } else { assertThat(remoteClusterSearchInfo.getSkippedShards(), equalTo(remoteNumShards)); } @@ -377,7 +389,7 @@ public void testCCSClusterDetailsWhereAllShardsSkippedInCanMatch() throws Except // no skipped shards locally when DFS_QUERY_THEN_FETCH is used assertThat(localClusterSearchInfo.getSkippedShards(), equalTo(0)); } else { - assertThat(localClusterSearchInfo.getSkippedShards(), equalTo(localNumShards - 1)); + assertThat(localClusterSearchInfo.getSkippedShards(), equalTo(localNumShards)); } assertThat(localClusterSearchInfo.getFailedShards(), equalTo(0)); assertThat(localClusterSearchInfo.getFailures().size(), equalTo(0)); @@ -387,7 +399,7 @@ public void testCCSClusterDetailsWhereAllShardsSkippedInCanMatch() throws Except assertThat(remoteClusterSearchInfo.getTotalShards(), equalTo(remoteNumShards)); assertThat(remoteClusterSearchInfo.getSuccessfulShards(), equalTo(remoteNumShards)); if (minimizeRoundtrips) { - assertThat(remoteClusterSearchInfo.getSkippedShards(), equalTo(remoteNumShards - 1)); + assertThat(remoteClusterSearchInfo.getSkippedShards(), equalTo(remoteNumShards)); } else { assertThat(remoteClusterSearchInfo.getSkippedShards(), equalTo(remoteNumShards)); } diff --git a/x-pack/plugin/mapper-constant-keyword/src/test/java/org/elasticsearch/xpack/constantkeyword/mapper/SearchIdleTests.java b/x-pack/plugin/mapper-constant-keyword/src/test/java/org/elasticsearch/xpack/constantkeyword/mapper/SearchIdleTests.java index 2da4e2802bdbe..9eb792428537b 100644 --- a/x-pack/plugin/mapper-constant-keyword/src/test/java/org/elasticsearch/xpack/constantkeyword/mapper/SearchIdleTests.java +++ b/x-pack/plugin/mapper-constant-keyword/src/test/java/org/elasticsearch/xpack/constantkeyword/mapper/SearchIdleTests.java @@ -42,7 +42,6 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.hamcrest.Matchers.empty; -import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; public class SearchIdleTests extends ESSingleNodeTestCase { @@ -133,8 +132,7 @@ public void testSearchIdleConstantKeywordMatchNoIndex() throws InterruptedExcept // WHEN assertResponse(search("test*", "constant_keyword", randomAlphaOfLength(5), 5), searchResponse -> { assertEquals(RestStatus.OK, searchResponse.status()); - // NOTE: we need an empty result from at least one shard - assertEquals(idleIndexShardsCount + activeIndexShardsCount - 1, searchResponse.getSkippedShards()); + assertEquals(idleIndexShardsCount + activeIndexShardsCount, searchResponse.getSkippedShards()); assertEquals(0, searchResponse.getFailedShards()); assertEquals(0, searchResponse.getHits().getHits().length); }); @@ -144,12 +142,8 @@ public void testSearchIdleConstantKeywordMatchNoIndex() throws InterruptedExcept assertIdleShardsRefreshStats(beforeStatsResponse, afterStatsResponse); - // If no shards match the can match phase then at least one shard gets queries for an empty response. - // However, this affects the search idle stats. List active = Arrays.stream(afterStatsResponse.getShards()).filter(s -> s.isSearchIdle() == false).toList(); - assertThat(active, hasSize(1)); - assertThat(active.get(0).getShardRouting().getIndexName(), equalTo("test1")); - assertThat(active.get(0).getShardRouting().id(), equalTo(0)); + assertThat(active, hasSize(0)); } public void testSearchIdleConstantKeywordMatchOneIndex() throws InterruptedException { diff --git a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankCoordinatorCanMatchIT.java b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankCoordinatorCanMatchIT.java index 445aeaa375e11..467668f008b04 100644 --- a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankCoordinatorCanMatchIT.java +++ b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankCoordinatorCanMatchIT.java @@ -10,6 +10,7 @@ import org.apache.lucene.document.LongPoint; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.PointValues; +import org.apache.lucene.search.TotalHits; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.common.Strings; import org.elasticsearch.index.IndexSettings; @@ -206,10 +207,10 @@ public void testCanMatchCoordinator() throws Exception { ) .setSize(5), response -> { - assertNull(response.getHits().getTotalHits()); + assertEquals(new TotalHits(0, TotalHits.Relation.EQUAL_TO), response.getHits().getTotalHits()); assertEquals(0, response.getHits().getHits().length); assertEquals(5, response.getSuccessfulShards()); - assertEquals(4, response.getSkippedShards()); + assertEquals(5, response.getSkippedShards()); } ); diff --git a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankShardCanMatchIT.java b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankShardCanMatchIT.java index 084ccc88bee33..09fe8d1b7ad6e 100644 --- a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankShardCanMatchIT.java +++ b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankShardCanMatchIT.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.rank.rrf; +import org.apache.lucene.search.TotalHits; import org.apache.lucene.util.BytesRef; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.search.SearchType; @@ -199,10 +200,10 @@ public void testCanMatchShard() throws IOException { ) .setSize(5), response -> { - assertNull(response.getHits().getTotalHits()); + assertEquals(new TotalHits(0, TotalHits.Relation.EQUAL_TO), response.getHits().getTotalHits()); assertEquals(0, response.getHits().getHits().length); assertEquals(5, response.getSuccessfulShards()); - assertEquals(4, response.getSkippedShards()); + assertEquals(5, response.getSkippedShards()); } ); diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsCanMatchOnCoordinatorIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsCanMatchOnCoordinatorIntegTests.java index ed42d86bc8c49..259d38b1fe8ee 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsCanMatchOnCoordinatorIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsCanMatchOnCoordinatorIntegTests.java @@ -384,11 +384,9 @@ public void testSearchableSnapshotShardsAreSkippedBySearchRequestWithoutQuerying } } else { assertResponse(client().search(request), newSearchResponse -> { - // When all shards are skipped, at least one of them should be queried in order to - // provide a proper search response. - assertThat(newSearchResponse.getSkippedShards(), equalTo(indexOutsideSearchRangeShardCount - 1)); - assertThat(newSearchResponse.getSuccessfulShards(), equalTo(indexOutsideSearchRangeShardCount - 1)); - assertThat(newSearchResponse.getFailedShards(), equalTo(1)); + assertThat(newSearchResponse.getSkippedShards(), equalTo(indexOutsideSearchRangeShardCount)); + assertThat(newSearchResponse.getSuccessfulShards(), equalTo(indexOutsideSearchRangeShardCount)); + assertThat(newSearchResponse.getFailedShards(), equalTo(0)); assertThat(newSearchResponse.getTotalShards(), equalTo(indexOutsideSearchRangeShardCount)); }); @@ -748,9 +746,7 @@ public void testQueryPhaseIsExecutedInAnAvailableNodeWhenAllShardsCanBeSkipped() // All the regular index searches succeeded assertThat(newSearchResponse.getSuccessfulShards(), equalTo(totalShards)); assertThat(newSearchResponse.getFailedShards(), equalTo(0)); - // We have to query at least one node to construct a valid response, and we pick - // a shard that's available in order to construct the search response - assertThat(newSearchResponse.getSkippedShards(), equalTo(totalShards - 1)); + assertThat(newSearchResponse.getSkippedShards(), equalTo(totalShards)); assertThat(newSearchResponse.getTotalShards(), equalTo(totalShards)); assertThat(newSearchResponse.getHits().getTotalHits().value(), equalTo(0L)); }); diff --git a/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/checkpoint/TransformCCSCanMatchIT.java b/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/checkpoint/TransformCCSCanMatchIT.java index a7f7b5bd3edda..208da4177fd4c 100644 --- a/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/checkpoint/TransformCCSCanMatchIT.java +++ b/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/checkpoint/TransformCCSCanMatchIT.java @@ -197,15 +197,13 @@ public void testSearchAction_RangeQueryThatMatchesNoShards() throws ExecutionExc QueryBuilders.rangeQuery("@timestamp").from(100_000_000), // This query matches no documents true, 0, - // All but 2 shards are skipped. TBH I don't know why this 2 shards are not skipped - oldLocalNumShards + newLocalNumShards + oldRemoteNumShards + newRemoteNumShards - 2 + oldLocalNumShards + newLocalNumShards + oldRemoteNumShards + newRemoteNumShards ); testSearchAction( QueryBuilders.rangeQuery("@timestamp").from(100_000_000), // This query matches no documents false, 0, - // All but 1 shards are skipped. TBH I don't know why this 1 shard is not skipped - oldLocalNumShards + newLocalNumShards + oldRemoteNumShards + newRemoteNumShards - 1 + oldLocalNumShards + newLocalNumShards + oldRemoteNumShards + newRemoteNumShards ); } diff --git a/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldRepositoryAccessIT.java b/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldRepositoryAccessIT.java index f502683e42eb2..30ec6630b9618 100644 --- a/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldRepositoryAccessIT.java +++ b/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldRepositoryAccessIT.java @@ -484,8 +484,7 @@ private void assertDocs( logger.info(searchResponse); assertEquals(0, searchResponse.getHits().getTotalHits().value()); assertEquals(numberOfShards, searchResponse.getSuccessfulShards()); - // When all shards are skipped, at least one of them is queried in order to provide a proper search response. - assertEquals(numberOfShards - 1, searchResponse.getSkippedShards()); + assertEquals(numberOfShards, searchResponse.getSkippedShards()); } finally { searchResponse.decRef(); } From a0c1df0d0c4ecdb39d05186f96c4ae976fde4f3e Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Fri, 25 Oct 2024 08:51:00 +0200 Subject: [PATCH 401/449] Speedup Query Phase Merging (#113355) Reducing contention and context switching in merging for the query phase by avoiding respining the merge task repeatedly, removing things that don't need synchronization from the synchronized blocks and merging repeated loops over the same query result arrays. --- .../search/QueryPhaseResultConsumer.java | 395 +++++++++--------- .../action/search/SearchPhaseController.java | 45 +- 2 files changed, 218 insertions(+), 222 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/search/QueryPhaseResultConsumer.java b/server/src/main/java/org/elasticsearch/action/search/QueryPhaseResultConsumer.java index 89411ac302b10..6c654d9235ec2 100644 --- a/server/src/main/java/org/elasticsearch/action/search/QueryPhaseResultConsumer.java +++ b/server/src/main/java/org/elasticsearch/action/search/QueryPhaseResultConsumer.java @@ -19,7 +19,6 @@ import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.core.Releasable; -import org.elasticsearch.core.Releasables; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchService; import org.elasticsearch.search.SearchShardTarget; @@ -121,26 +120,50 @@ public void consumeResult(SearchPhaseResult result, Runnable next) { public SearchPhaseController.ReducedQueryPhase reduce() throws Exception { if (pendingMerges.hasPendingMerges()) { throw new AssertionError("partial reduce in-flight"); - } else if (pendingMerges.hasFailure()) { - throw pendingMerges.getFailure(); + } + Exception failure = pendingMerges.failure.get(); + if (failure != null) { + throw failure; } // ensure consistent ordering pendingMerges.sortBuffer(); - final TopDocsStats topDocsStats = pendingMerges.consumeTopDocsStats(); - final List topDocsList = pendingMerges.consumeTopDocs(); + final TopDocsStats topDocsStats = pendingMerges.topDocsStats; + final int resultSize = pendingMerges.buffer.size() + (pendingMerges.mergeResult == null ? 0 : 1); + final List topDocsList = hasTopDocs ? new ArrayList<>(resultSize) : null; + final List> aggsList = hasAggs ? new ArrayList<>(resultSize) : null; + synchronized (pendingMerges) { + if (pendingMerges.mergeResult != null) { + if (topDocsList != null) { + topDocsList.add(pendingMerges.mergeResult.reducedTopDocs); + } + if (aggsList != null) { + aggsList.add(DelayableWriteable.referencing(pendingMerges.mergeResult.reducedAggs)); + } + } + for (QuerySearchResult result : pendingMerges.buffer) { + topDocsStats.add(result.topDocs(), result.searchTimedOut(), result.terminatedEarly()); + if (topDocsList != null) { + TopDocsAndMaxScore topDocs = result.consumeTopDocs(); + setShardIndex(topDocs.topDocs, result.getShardIndex()); + topDocsList.add(topDocs.topDocs); + } + if (aggsList != null) { + aggsList.add(result.getAggs()); + } + } + } SearchPhaseController.ReducedQueryPhase reducePhase; long breakerSize = pendingMerges.circuitBreakerBytes; try { - final List> aggsList = pendingMerges.getAggs(); - if (hasAggs) { + if (aggsList != null) { // Add an estimate of the final reduce size breakerSize = pendingMerges.addEstimateAndMaybeBreak(PendingMerges.estimateRamBytesUsedForReduce(breakerSize)); } reducePhase = SearchPhaseController.reducedQueryPhase( results.asList(), aggsList, - topDocsList, + topDocsList == null ? Collections.emptyList() : topDocsList, topDocsStats, pendingMerges.numReducePhases, false, @@ -183,65 +206,59 @@ private MergeResult partialReduce( // ensure consistent ordering Arrays.sort(toConsume, RESULT_COMPARATOR); - for (QuerySearchResult result : toConsume) { - topDocsStats.add(result.topDocs(), result.searchTimedOut(), result.terminatedEarly()); - } - + final List processedShards = new ArrayList<>(emptyResults); final TopDocs newTopDocs; + final InternalAggregations newAggs; + final List> aggsList; + final int resultSetSize = toConsume.length + (lastMerge != null ? 1 : 0); + if (hasAggs) { + aggsList = new ArrayList<>(resultSetSize); + if (lastMerge != null) { + aggsList.add(DelayableWriteable.referencing(lastMerge.reducedAggs)); + } + } else { + aggsList = null; + } + List topDocsList; if (hasTopDocs) { - List topDocsList = new ArrayList<>(); + topDocsList = new ArrayList<>(resultSetSize); if (lastMerge != null) { topDocsList.add(lastMerge.reducedTopDocs); } - for (QuerySearchResult result : toConsume) { - TopDocsAndMaxScore topDocs = result.consumeTopDocs(); - setShardIndex(topDocs.topDocs, result.getShardIndex()); - topDocsList.add(topDocs.topDocs); - } - newTopDocs = mergeTopDocs( - topDocsList, - // we have to merge here in the same way we collect on a shard - topNSize, - 0 - ); } else { - newTopDocs = null; + topDocsList = null; } - - final InternalAggregations newAggs; - if (hasAggs) { - try { - final List> aggsList = new ArrayList<>(); - if (lastMerge != null) { - aggsList.add(DelayableWriteable.referencing(lastMerge.reducedAggs)); - } - for (QuerySearchResult result : toConsume) { + try { + for (QuerySearchResult result : toConsume) { + topDocsStats.add(result.topDocs(), result.searchTimedOut(), result.terminatedEarly()); + SearchShardTarget target = result.getSearchShardTarget(); + processedShards.add(new SearchShard(target.getClusterAlias(), target.getShardId())); + if (aggsList != null) { aggsList.add(result.getAggs()); } - newAggs = InternalAggregations.topLevelReduceDelayable(aggsList, aggReduceContextBuilder.forPartialReduction()); - } finally { - for (QuerySearchResult result : toConsume) { - result.releaseAggs(); + if (topDocsList != null) { + TopDocsAndMaxScore topDocs = result.consumeTopDocs(); + setShardIndex(topDocs.topDocs, result.getShardIndex()); + topDocsList.add(topDocs.topDocs); } } - } else { - newAggs = null; + // we have to merge here in the same way we collect on a shard + newTopDocs = topDocsList == null ? null : mergeTopDocs(topDocsList, topNSize, 0); + newAggs = aggsList == null + ? null + : InternalAggregations.topLevelReduceDelayable(aggsList, aggReduceContextBuilder.forPartialReduction()); + } finally { + releaseAggs(toConsume); } - List processedShards = new ArrayList<>(emptyResults); if (lastMerge != null) { processedShards.addAll(lastMerge.processedShards); } - for (QuerySearchResult result : toConsume) { - SearchShardTarget target = result.getSearchShardTarget(); - processedShards.add(new SearchShard(target.getClusterAlias(), target.getShardId())); - } if (progressListener != SearchProgressListener.NOOP) { progressListener.notifyPartialReduce(processedShards, topDocsStats.getTotalHits(), newAggs, numReducePhases); } // we leave the results un-serialized because serializing is slow but we compute the serialized // size as an estimate of the memory used by the newly reduced aggregations. - long serializedSize = hasAggs ? DelayableWriteable.getSerializedSize(newAggs) : 0; - return new MergeResult(processedShards, newTopDocs, newAggs, hasAggs ? serializedSize : 0); + return new MergeResult(processedShards, newTopDocs, newAggs, newAggs != null ? DelayableWriteable.getSerializedSize(newAggs) : 0); } public int getNumReducePhases() { @@ -274,11 +291,7 @@ private class PendingMerges implements Releasable { @Override public synchronized void close() { - if (hasFailure()) { - assert circuitBreakerBytes == 0; - } else { - assert circuitBreakerBytes >= 0; - } + assert assertFailureAndBreakerConsistent(); releaseBuffer(); circuitBreaker.addWithoutBreaking(-circuitBreakerBytes); @@ -290,8 +303,14 @@ public synchronized void close() { } } - synchronized Exception getFailure() { - return failure.get(); + private boolean assertFailureAndBreakerConsistent() { + boolean hasFailure = failure.get() != null; + if (hasFailure) { + assert circuitBreakerBytes == 0; + } else { + assert circuitBreakerBytes >= 0; + } + return true; } boolean hasFailure() { @@ -342,56 +361,71 @@ static long estimateRamBytesUsedForReduce(long size) { } public void consume(QuerySearchResult result, Runnable next) { - boolean executeNextImmediately = true; - synchronized (this) { - if (hasFailure() || result.isNull()) { - result.consumeAll(); - if (result.isNull()) { - SearchShardTarget target = result.getSearchShardTarget(); - emptyResults.add(new SearchShard(target.getClusterAlias(), target.getShardId())); - } - } else { - if (hasAggs) { - long aggsSize = ramBytesUsedQueryResult(result); - try { - addEstimateAndMaybeBreak(aggsSize); - } catch (Exception exc) { - result.releaseAggs(); - releaseBuffer(); - onMergeFailure(exc); - next.run(); - return; + if (hasFailure()) { + result.consumeAll(); + next.run(); + } else if (result.isNull()) { + result.consumeAll(); + SearchShardTarget target = result.getSearchShardTarget(); + SearchShard searchShard = new SearchShard(target.getClusterAlias(), target.getShardId()); + synchronized (this) { + emptyResults.add(searchShard); + } + next.run(); + } else { + final long aggsSize = ramBytesUsedQueryResult(result); + boolean executeNextImmediately = true; + boolean hasFailure = false; + synchronized (this) { + if (hasFailure()) { + hasFailure = true; + } else { + if (hasAggs) { + try { + addEstimateAndMaybeBreak(aggsSize); + } catch (Exception exc) { + releaseBuffer(); + onMergeFailure(exc); + hasFailure = true; + } + } + if (hasFailure == false) { + aggsCurrentBufferSize += aggsSize; + // add one if a partial merge is pending + int size = buffer.size() + (hasPartialReduce ? 1 : 0); + if (size >= batchReduceSize) { + hasPartialReduce = true; + executeNextImmediately = false; + QuerySearchResult[] clone = buffer.toArray(QuerySearchResult[]::new); + MergeTask task = new MergeTask(clone, aggsCurrentBufferSize, new ArrayList<>(emptyResults), next); + aggsCurrentBufferSize = 0; + buffer.clear(); + emptyResults.clear(); + queue.add(task); + tryExecuteNext(); + } + buffer.add(result); } - aggsCurrentBufferSize += aggsSize; - } - // add one if a partial merge is pending - int size = buffer.size() + (hasPartialReduce ? 1 : 0); - if (size >= batchReduceSize) { - hasPartialReduce = true; - executeNextImmediately = false; - QuerySearchResult[] clone = buffer.toArray(QuerySearchResult[]::new); - MergeTask task = new MergeTask(clone, aggsCurrentBufferSize, new ArrayList<>(emptyResults), next); - aggsCurrentBufferSize = 0; - buffer.clear(); - emptyResults.clear(); - queue.add(task); - tryExecuteNext(); } - buffer.add(result); } - } - if (executeNextImmediately) { - next.run(); + if (hasFailure) { + result.consumeAll(); + } + if (executeNextImmediately) { + next.run(); + } } } private void releaseBuffer() { - buffer.forEach(QuerySearchResult::releaseAggs); + for (QuerySearchResult querySearchResult : buffer) { + querySearchResult.releaseAggs(); + } buffer.clear(); } private synchronized void onMergeFailure(Exception exc) { - if (hasFailure()) { + if (failure.compareAndSet(null, exc) == false) { assert circuitBreakerBytes == 0; return; } @@ -401,79 +435,89 @@ private synchronized void onMergeFailure(Exception exc) { circuitBreaker.addWithoutBreaking(-circuitBreakerBytes); circuitBreakerBytes = 0; } - failure.compareAndSet(null, exc); - final List toCancels = new ArrayList<>(); - toCancels.add(() -> onPartialMergeFailure.accept(exc)); + onPartialMergeFailure.accept(exc); final MergeTask task = runningTask.getAndSet(null); if (task != null) { - toCancels.add(task::cancel); + task.cancel(); } MergeTask mergeTask; while ((mergeTask = queue.pollFirst()) != null) { - toCancels.add(mergeTask::cancel); + mergeTask.cancel(); } mergeResult = null; - Releasables.close(toCancels); - } - - private void onAfterMerge(MergeTask task, MergeResult newResult, long estimatedSize) { - synchronized (this) { - if (hasFailure()) { - return; - } - runningTask.compareAndSet(task, null); - mergeResult = newResult; - if (hasAggs) { - // Update the circuit breaker to remove the size of the source aggregations - // and replace the estimation with the serialized size of the newly reduced result. - long newSize = mergeResult.estimatedSize - estimatedSize; - addWithoutBreaking(newSize); - logger.trace( - "aggs partial reduction [{}->{}] max [{}]", - estimatedSize, - mergeResult.estimatedSize, - maxAggsCurrentBufferSize - ); - } - task.consumeListener(); - } } private void tryExecuteNext() { final MergeTask task; synchronized (this) { - if (queue.isEmpty() || hasFailure() || runningTask.get() != null) { + if (hasFailure() || runningTask.get() != null) { return; } task = queue.poll(); - runningTask.compareAndSet(null, task); + runningTask.set(task); + } + if (task == null) { + return; } executor.execute(new AbstractRunnable() { @Override protected void doRun() { - final MergeResult thisMergeResult = mergeResult; - long estimatedTotalSize = (thisMergeResult != null ? thisMergeResult.estimatedSize : 0) + task.aggsBufferSize; - final MergeResult newMerge; - final QuerySearchResult[] toConsume = task.consumeBuffer(); - if (toConsume == null) { - return; - } - try { - long estimatedMergeSize = estimateRamBytesUsedForReduce(estimatedTotalSize); - addEstimateAndMaybeBreak(estimatedMergeSize); - estimatedTotalSize += estimatedMergeSize; - ++numReducePhases; - newMerge = partialReduce(toConsume, task.emptyResults, topDocsStats, thisMergeResult, numReducePhases); - } catch (Exception t) { - for (QuerySearchResult result : toConsume) { - result.releaseAggs(); + MergeTask mergeTask = task; + QuerySearchResult[] toConsume = mergeTask.consumeBuffer(); + while (mergeTask != null) { + final MergeResult thisMergeResult = mergeResult; + long estimatedTotalSize = (thisMergeResult != null ? thisMergeResult.estimatedSize : 0) + mergeTask.aggsBufferSize; + final MergeResult newMerge; + try { + long estimatedMergeSize = estimateRamBytesUsedForReduce(estimatedTotalSize); + addEstimateAndMaybeBreak(estimatedMergeSize); + estimatedTotalSize += estimatedMergeSize; + ++numReducePhases; + newMerge = partialReduce(toConsume, mergeTask.emptyResults, topDocsStats, thisMergeResult, numReducePhases); + } catch (Exception t) { + QueryPhaseResultConsumer.releaseAggs(toConsume); + onMergeFailure(t); + return; + } + synchronized (QueryPhaseResultConsumer.this) { + if (hasFailure()) { + return; + } + mergeResult = newMerge; + if (hasAggs) { + // Update the circuit breaker to remove the size of the source aggregations + // and replace the estimation with the serialized size of the newly reduced result. + long newSize = mergeResult.estimatedSize - estimatedTotalSize; + addWithoutBreaking(newSize); + if (logger.isTraceEnabled()) { + logger.trace( + "aggs partial reduction [{}->{}] max [{}]", + estimatedTotalSize, + mergeResult.estimatedSize, + maxAggsCurrentBufferSize + ); + } + } + } + Runnable r = mergeTask.consumeListener(); + synchronized (QueryPhaseResultConsumer.this) { + while (true) { + mergeTask = queue.poll(); + runningTask.set(mergeTask); + if (mergeTask == null) { + break; + } + toConsume = mergeTask.consumeBuffer(); + if (toConsume != null) { + break; + } + } + } + if (r != null) { + r.run(); } - onMergeFailure(t); - return; } - onAfterMerge(task, newMerge, estimatedTotalSize); - tryExecuteNext(); } @Override @@ -483,43 +527,6 @@ public void onFailure(Exception exc) { }); } - public synchronized TopDocsStats consumeTopDocsStats() { - for (QuerySearchResult result : buffer) { - topDocsStats.add(result.topDocs(), result.searchTimedOut(), result.terminatedEarly()); - } - return topDocsStats; - } - - public synchronized List consumeTopDocs() { - if (hasTopDocs == false) { - return Collections.emptyList(); - } - List topDocsList = new ArrayList<>(); - if (mergeResult != null) { - topDocsList.add(mergeResult.reducedTopDocs); - } - for (QuerySearchResult result : buffer) { - TopDocsAndMaxScore topDocs = result.consumeTopDocs(); - setShardIndex(topDocs.topDocs, result.getShardIndex()); - topDocsList.add(topDocs.topDocs); - } - return topDocsList; - } - - public synchronized List> getAggs() { - if (hasAggs == false) { - return Collections.emptyList(); - } - List> aggsList = new ArrayList<>(); - if (mergeResult != null) { - aggsList.add(DelayableWriteable.referencing(mergeResult.reducedAggs)); - } - for (QuerySearchResult result : buffer) { - aggsList.add(result.getAggs()); - } - return aggsList; - } - public synchronized void releaseAggs() { if (hasAggs) { for (QuerySearchResult result : buffer) { @@ -529,6 +536,12 @@ public synchronized void releaseAggs() { } } + private static void releaseAggs(QuerySearchResult... toConsume) { + for (QuerySearchResult result : toConsume) { + result.releaseAggs(); + } + } + private record MergeResult( List processedShards, TopDocs reducedTopDocs, @@ -555,21 +568,21 @@ public synchronized QuerySearchResult[] consumeBuffer() { return toRet; } - public void consumeListener() { - if (next != null) { - next.run(); - next = null; - } + public synchronized Runnable consumeListener() { + Runnable n = next; + next = null; + return n; } - public synchronized void cancel() { + public void cancel() { QuerySearchResult[] buffer = consumeBuffer(); if (buffer != null) { - for (QuerySearchResult result : buffer) { - result.releaseAggs(); - } + releaseAggs(buffer); + } + Runnable next = consumeListener(); + if (next != null) { + next.run(); } - consumeListener(); } } } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java index ca9c4ab44c423..b118c2560925e 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java @@ -29,6 +29,7 @@ import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore; import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.util.concurrent.AtomicArray; +import org.elasticsearch.core.Nullable; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.lucene.grouping.TopFieldGroups; import org.elasticsearch.search.DocValueFormat; @@ -190,7 +191,7 @@ public static List mergeKnnResults(SearchRequest request, List topDocs, + final List topDocs, int from, int size, List reducedCompletionSuggestions @@ -233,22 +234,22 @@ static SortedTopDocs sortDocs( return new SortedTopDocs(scoreDocs, isSortedByField, sortFields, groupField, groupValues, numSuggestDocs); } - static TopDocs mergeTopDocs(Collection results, int topN, int from) { + static TopDocs mergeTopDocs(List results, int topN, int from) { if (results.isEmpty()) { return null; } - final TopDocs topDocs = results.stream().findFirst().get(); + final TopDocs topDocs = results.getFirst(); final TopDocs mergedTopDocs; final int numShards = results.size(); if (numShards == 1 && from == 0) { // only one shard and no pagination we can just return the topDocs as we got them. return topDocs; } else if (topDocs instanceof TopFieldGroups firstTopDocs) { final Sort sort = new Sort(firstTopDocs.fields); - final TopFieldGroups[] shardTopDocs = results.toArray(new TopFieldGroups[numShards]); + final TopFieldGroups[] shardTopDocs = results.toArray(new TopFieldGroups[0]); mergedTopDocs = TopFieldGroups.merge(sort, from, topN, shardTopDocs, false); } else if (topDocs instanceof TopFieldDocs firstTopDocs) { final Sort sort = checkSameSortTypes(results, firstTopDocs.fields); - final TopFieldDocs[] shardTopDocs = results.toArray(new TopFieldDocs[numShards]); + final TopFieldDocs[] shardTopDocs = results.toArray(new TopFieldDocs[0]); mergedTopDocs = TopDocs.merge(sort, from, topN, shardTopDocs); } else { final TopDocs[] shardTopDocs = results.toArray(new TopDocs[numShards]); @@ -524,17 +525,7 @@ public AggregationReduceContext forFinalReduction() { topDocs.add(td.topDocs); } } - return reducedQueryPhase( - queryResults, - Collections.emptyList(), - topDocs, - topDocsStats, - 0, - true, - aggReduceContextBuilder, - null, - true - ); + return reducedQueryPhase(queryResults, null, topDocs, topDocsStats, 0, true, aggReduceContextBuilder, null, true); } /** @@ -548,7 +539,7 @@ public AggregationReduceContext forFinalReduction() { */ static ReducedQueryPhase reducedQueryPhase( Collection queryResults, - List> bufferedAggs, + @Nullable List> bufferedAggs, List bufferedTopDocs, TopDocsStats topDocsStats, int numReducePhases, @@ -642,7 +633,12 @@ static ReducedQueryPhase reducedQueryPhase( reducedSuggest = new Suggest(Suggest.reduce(groupedSuggestions)); reducedCompletionSuggestions = reducedSuggest.filter(CompletionSuggestion.class); } - final InternalAggregations aggregations = reduceAggs(aggReduceContextBuilder, performFinalReduce, bufferedAggs); + final InternalAggregations aggregations = bufferedAggs == null + ? null + : InternalAggregations.topLevelReduceDelayable( + bufferedAggs, + performFinalReduce ? aggReduceContextBuilder.forFinalReduction() : aggReduceContextBuilder.forPartialReduction() + ); final SearchProfileResultsBuilder profileBuilder = profileShardResults.isEmpty() ? null : new SearchProfileResultsBuilder(profileShardResults); @@ -681,19 +677,6 @@ static ReducedQueryPhase reducedQueryPhase( ); } - private static InternalAggregations reduceAggs( - AggregationReduceContext.Builder aggReduceContextBuilder, - boolean performFinalReduce, - List> toReduce - ) { - return toReduce.isEmpty() - ? null - : InternalAggregations.topLevelReduceDelayable( - toReduce, - performFinalReduce ? aggReduceContextBuilder.forFinalReduction() : aggReduceContextBuilder.forPartialReduction() - ); - } - /** * Checks that query results from all shards have consistent unsigned_long format. * Sort queries on a field that has long type in one index, and unsigned_long in another index From a02f68217a5bfb226fbcd3b26cfc2b125806be94 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Fri, 25 Oct 2024 08:53:27 +0200 Subject: [PATCH 402/449] Lazy initialize HttpRouteStatsTracker in MethodHandlers (#114107) We use about 1M for the route stats trackers instances per ES instance. Making this lazy init should come at a trivial overhead and in fact makes the computation of the node stats cheaper by saving spurious sums on 0-valued long adders. --- .../elasticsearch/http/HttpRouteStats.java | 2 + .../elasticsearch/rest/MethodHandlers.java | 42 ++++++++++++++----- .../elasticsearch/rest/RestController.java | 25 +++++------ 3 files changed, 46 insertions(+), 23 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/http/HttpRouteStats.java b/server/src/main/java/org/elasticsearch/http/HttpRouteStats.java index 5be1ae9312c46..a15b929fd3c1b 100644 --- a/server/src/main/java/org/elasticsearch/http/HttpRouteStats.java +++ b/server/src/main/java/org/elasticsearch/http/HttpRouteStats.java @@ -49,6 +49,8 @@ public record HttpRouteStats( long[] responseTimeHistogram ) implements Writeable, ToXContentObject { + public static final HttpRouteStats EMPTY = new HttpRouteStats(0, 0, new long[0], 0, 0, new long[0], new long[0]); + public HttpRouteStats(StreamInput in) throws IOException { this(in.readVLong(), in.readVLong(), in.readVLongArray(), in.readVLong(), in.readVLong(), in.readVLongArray(), in.readVLongArray()); } diff --git a/server/src/main/java/org/elasticsearch/rest/MethodHandlers.java b/server/src/main/java/org/elasticsearch/rest/MethodHandlers.java index a947ddce2b9f3..2f53f48f9ae5b 100644 --- a/server/src/main/java/org/elasticsearch/rest/MethodHandlers.java +++ b/server/src/main/java/org/elasticsearch/rest/MethodHandlers.java @@ -13,6 +13,8 @@ import org.elasticsearch.http.HttpRouteStats; import org.elasticsearch.http.HttpRouteStatsTracker; +import java.lang.invoke.MethodHandles; +import java.lang.invoke.VarHandle; import java.util.EnumMap; import java.util.Map; import java.util.Set; @@ -25,7 +27,18 @@ final class MethodHandlers { private final String path; private final Map> methodHandlers; - private final HttpRouteStatsTracker statsTracker = new HttpRouteStatsTracker(); + @SuppressWarnings("unused") // only accessed via #STATS_TRACKER_HANDLE, lazy initialized because instances consume non-trivial heap + private volatile HttpRouteStatsTracker statsTracker; + + private static final VarHandle STATS_TRACKER_HANDLE; + + static { + try { + STATS_TRACKER_HANDLE = MethodHandles.lookup().findVarHandle(MethodHandlers.class, "statsTracker", HttpRouteStatsTracker.class); + } catch (NoSuchFieldException | IllegalAccessException e) { + throw new ExceptionInInitializerError(e); + } + } MethodHandlers(String path) { this.path = path; @@ -73,19 +86,26 @@ Set getValidMethods() { return methodHandlers.keySet(); } - public void addRequestStats(int contentLength) { - statsTracker.addRequestStats(contentLength); - } - - public void addResponseStats(long contentLength) { - statsTracker.addResponseStats(contentLength); + public HttpRouteStats getStats() { + var tracker = existingStatsTracker(); + if (tracker == null) { + return HttpRouteStats.EMPTY; + } + return tracker.getStats(); } - public void addResponseTime(long timeMillis) { - statsTracker.addResponseTime(timeMillis); + public HttpRouteStatsTracker statsTracker() { + var tracker = existingStatsTracker(); + if (tracker == null) { + var newTracker = new HttpRouteStatsTracker(); + if ((tracker = (HttpRouteStatsTracker) STATS_TRACKER_HANDLE.compareAndExchange(this, null, newTracker)) == null) { + tracker = newTracker; + } + } + return tracker; } - public HttpRouteStats getStats() { - return statsTracker.getStats(); + private HttpRouteStatsTracker existingStatsTracker() { + return (HttpRouteStatsTracker) STATS_TRACKER_HANDLE.getAcquire(this); } } diff --git a/server/src/main/java/org/elasticsearch/rest/RestController.java b/server/src/main/java/org/elasticsearch/rest/RestController.java index c2064fdd931de..7446ec5bb6717 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestController.java +++ b/server/src/main/java/org/elasticsearch/rest/RestController.java @@ -36,6 +36,7 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.http.HttpHeadersValidationException; import org.elasticsearch.http.HttpRouteStats; +import org.elasticsearch.http.HttpRouteStatsTracker; import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.rest.RestHandler.Route; @@ -879,7 +880,7 @@ public void sendResponse(RestResponse response) { private static final class ResourceHandlingHttpChannel extends DelegatingRestChannel { private final CircuitBreakerService circuitBreakerService; private final int contentLength; - private final MethodHandlers methodHandlers; + private final HttpRouteStatsTracker statsTracker; private final long startTime; private final AtomicBoolean closed = new AtomicBoolean(); @@ -892,7 +893,7 @@ private static final class ResourceHandlingHttpChannel extends DelegatingRestCha super(delegate); this.circuitBreakerService = circuitBreakerService; this.contentLength = contentLength; - this.methodHandlers = methodHandlers; + this.statsTracker = methodHandlers.statsTracker(); this.startTime = rawRelativeTimeInMillis(); } @@ -901,12 +902,12 @@ public void sendResponse(RestResponse response) { boolean success = false; try { close(); - methodHandlers.addRequestStats(contentLength); - methodHandlers.addResponseTime(rawRelativeTimeInMillis() - startTime); + statsTracker.addRequestStats(contentLength); + statsTracker.addResponseTime(rawRelativeTimeInMillis() - startTime); if (response.isChunked() == false) { - methodHandlers.addResponseStats(response.content().length()); + statsTracker.addResponseStats(response.content().length()); } else { - final var responseLengthRecorder = new ResponseLengthRecorder(methodHandlers); + final var responseLengthRecorder = new ResponseLengthRecorder(statsTracker); final var headers = response.getHeaders(); response = RestResponse.chunked( response.status(), @@ -941,11 +942,11 @@ private void close() { } } - private static class ResponseLengthRecorder extends AtomicReference implements Releasable { + private static class ResponseLengthRecorder extends AtomicReference implements Releasable { private long responseLength; - private ResponseLengthRecorder(MethodHandlers methodHandlers) { - super(methodHandlers); + private ResponseLengthRecorder(HttpRouteStatsTracker routeStatsTracker) { + super(routeStatsTracker); } @Override @@ -953,11 +954,11 @@ public void close() { // closed just before sending the last chunk, and also when the whole RestResponse is closed since the client might abort the // connection before we send the last chunk, in which case we won't have recorded the response in the // stats yet; thus we need run-once semantics here: - final var methodHandlers = getAndSet(null); - if (methodHandlers != null) { + final var routeStatsTracker = getAndSet(null); + if (routeStatsTracker != null) { // if we started sending chunks then we're closed on the transport worker, no need for sync assert responseLength == 0L || Transports.assertTransportThread(); - methodHandlers.addResponseStats(responseLength); + routeStatsTracker.addResponseStats(responseLength); } } From ca4009e29823ae3eaaad26b75d8bb47ade5e218c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20Zolt=C3=A1n=20Szab=C3=B3?= Date: Fri, 25 Oct 2024 09:13:18 +0200 Subject: [PATCH 403/449] [DOCS] Adds stream inference API docs (#115333) Co-authored-by: Pat Whelan --- .../inference/inference-apis.asciidoc | 2 + .../inference/stream-inference.asciidoc | 122 ++++++++++++++++++ 2 files changed, 124 insertions(+) create mode 100644 docs/reference/inference/stream-inference.asciidoc diff --git a/docs/reference/inference/inference-apis.asciidoc b/docs/reference/inference/inference-apis.asciidoc index ddcff1abc7dce..1206cb02ba89a 100644 --- a/docs/reference/inference/inference-apis.asciidoc +++ b/docs/reference/inference/inference-apis.asciidoc @@ -19,6 +19,7 @@ the following APIs to manage {infer} models and perform {infer}: * <> * <> * <> +* <> * <> [[inference-landscape]] @@ -56,6 +57,7 @@ include::delete-inference.asciidoc[] include::get-inference.asciidoc[] include::post-inference.asciidoc[] include::put-inference.asciidoc[] +include::stream-inference.asciidoc[] include::update-inference.asciidoc[] include::service-alibabacloud-ai-search.asciidoc[] include::service-amazon-bedrock.asciidoc[] diff --git a/docs/reference/inference/stream-inference.asciidoc b/docs/reference/inference/stream-inference.asciidoc new file mode 100644 index 0000000000000..e66acd630cb3e --- /dev/null +++ b/docs/reference/inference/stream-inference.asciidoc @@ -0,0 +1,122 @@ +[role="xpack"] +[[stream-inference-api]] +=== Stream inference API + +Streams a chat completion response. + +IMPORTANT: The {infer} APIs enable you to use certain services, such as built-in {ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, Azure, Google AI Studio, Google Vertex AI, Anthropic, Watsonx.ai, or Hugging Face. +For built-in models and models uploaded through Eland, the {infer} APIs offer an alternative way to use and manage trained models. +However, if you do not plan to use the {infer} APIs to use these models or if you want to use non-NLP models, use the <>. + + +[discrete] +[[stream-inference-api-request]] +==== {api-request-title} + +`POST /_inference//_stream` + +`POST /_inference///_stream` + + +[discrete] +[[stream-inference-api-prereqs]] +==== {api-prereq-title} + +* Requires the `monitor_inference` <> +(the built-in `inference_admin` and `inference_user` roles grant this privilege) +* You must use a client that supports streaming. + + +[discrete] +[[stream-inference-api-desc]] +==== {api-description-title} + +The stream {infer} API enables real-time responses for completion tasks by delivering answers incrementally, reducing response times during computation. +It only works with the `completion` task type. + + +[discrete] +[[stream-inference-api-path-params]] +==== {api-path-parms-title} + +``:: +(Required, string) +The unique identifier of the {infer} endpoint. + + +``:: +(Optional, string) +The type of {infer} task that the model performs. + + +[discrete] +[[stream-inference-api-request-body]] +==== {api-request-body-title} + +`input`:: +(Required, string or array of strings) +The text on which you want to perform the {infer} task. +`input` can be a single string or an array. ++ +-- +[NOTE] +==== +Inference endpoints for the `completion` task type currently only support a +single string as input. +==== +-- + + +[discrete] +[[stream-inference-api-example]] +==== {api-examples-title} + +The following example performs a completion on the example question with streaming. + + +[source,console] +------------------------------------------------------------ +POST _inference/completion/openai-completion/_stream +{ + "input": "What is Elastic?" +} +------------------------------------------------------------ +// TEST[skip:TBD] + + +The API returns the following response: + + +[source,txt] +------------------------------------------------------------ +event: message +data: { + "completion":[{ + "delta":"Elastic" + }] +} + +event: message +data: { + "completion":[{ + "delta":" is" + }, + { + "delta":" a" + } + ] +} + +event: message +data: { + "completion":[{ + "delta":" software" + }, + { + "delta":" company" + }] +} + +(...) +------------------------------------------------------------ +// NOTCONSOLE From 6688fe225584cfa8d12ebb5e56662918a593f690 Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Fri, 25 Oct 2024 10:26:12 +0300 Subject: [PATCH 404/449] Remove excluded tests from rest compat (#115617) --- x-pack/plugin/downsample/qa/rest/build.gradle | 14 -------------- 1 file changed, 14 deletions(-) diff --git a/x-pack/plugin/downsample/qa/rest/build.gradle b/x-pack/plugin/downsample/qa/rest/build.gradle index 5142632a36006..ba5ac7b0c7317 100644 --- a/x-pack/plugin/downsample/qa/rest/build.gradle +++ b/x-pack/plugin/downsample/qa/rest/build.gradle @@ -32,20 +32,6 @@ tasks.named('yamlRestTest') { tasks.named('yamlRestCompatTest') { usesDefaultDistribution() } -tasks.named("yamlRestCompatTestTransform").configure ({ task -> - task.skipTest("downsample/10_basic/Downsample index with empty dimension on routing path", "Skip until pr/115358 gets backported") - task.skipTest("downsample/10_basic/Downsample histogram as label", "Skip until pr/115358 gets backported") - task.skipTest("downsample/10_basic/Downsample date timestamp field using strict_date_optional_time_nanos format", - "Skip until pr/115358 gets backported") - task.skipTest("downsample/10_basic/Downsample a downsampled index", "Skip until pr/115358 gets backported") - task.skipTest("downsample/10_basic/Downsample date_nanos timestamp field using custom format", "Skip until pr/115358 gets backported") - task.skipTest("downsample/10_basic/Downsample using coarse grained timestamp", "Skip until pr/115358 gets backported") - task.skipTest("downsample/10_basic/Downsample label with ignore_above", "Skip until pr/115358 gets backported") - task.skipTest("downsample/10_basic/Downsample object field", "Skip until pr/115358 gets backported") - task.skipTest("downsample/10_basic/Downsample empty and missing labels", "Skip until pr/115358 gets backported") - task.skipTest("downsample/10_basic/Downsample index", "Skip until pr/115358 gets backported") - task.skipTest("downsample/10_basic/Downsample index with empty dimension", "Skip until pr/115358 gets backported") -}) if (BuildParams.inFipsJvm){ // This test cluster is using a BASIC license and FIPS 140 mode is not supported in BASIC tasks.named("yamlRestTest").configure{enabled = false } From e7897bdeff7f4ec76e8a0801c86f5dea11cacabd Mon Sep 17 00:00:00 2001 From: Salvatore Campagna <93581129+salvatore-campagna@users.noreply.github.com> Date: Fri, 25 Oct 2024 09:57:12 +0200 Subject: [PATCH 405/449] Return `_ignored_source` as a top level array field (#115328) This PR introduces a fix for the `fields` and `stored_fields` APIs and the way `_ignored_source` field is handled: 1. **Return `_ignored_source` as a top-level array metadata field**: - The `_ignored_source` field is now returned as a top-level array in the metadata as done with other metadata fields. 2. **Return `_ignored_source` as an array of values**: - Even when there is only a single ignored field, `_ignored_source` will now be returned as an array of values. This is done to be consistent with how the `_ignored` field is returned. Without this fix, we would return the `_ignored_source` field twice, as a top-level field and as part of the `fields` array. Also, without this fix, we would only return a single value instead of all ignored field values. --- .../mapper/IgnoredSourceFieldMapper.java | 3 + .../index/mapper/MapperFeatures.java | 1 + .../org/elasticsearch/search/SearchHit.java | 3 +- .../fetch/subphase/FetchFieldsPhase.java | 25 ++- .../index/get/DocumentFieldTests.java | 5 +- .../search/SearchResponseUtils.java | 3 +- .../rest-api-spec/test/20_ignored_source.yml | 158 +++++++++++++++++- 7 files changed, 182 insertions(+), 16 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapper.java index 70d73fc2ffb9a..7e2bebfd403cb 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapper.java @@ -58,6 +58,9 @@ public class IgnoredSourceFieldMapper extends MetadataFieldMapper { static final NodeFeature TRACK_IGNORED_SOURCE = new NodeFeature("mapper.track_ignored_source"); static final NodeFeature DONT_EXPAND_DOTS_IN_IGNORED_SOURCE = new NodeFeature("mapper.ignored_source.dont_expand_dots"); + static final NodeFeature IGNORED_SOURCE_AS_TOP_LEVEL_METADATA_ARRAY_FIELD = new NodeFeature( + "mapper.ignored_source_as_top_level_metadata_array_field" + ); static final NodeFeature ALWAYS_STORE_OBJECT_ARRAYS_IN_NESTED_OBJECTS = new NodeFeature( "mapper.ignored_source.always_store_object_arrays_in_nested" ); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java index 026c7c98d7aeb..a5f173afffba2 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java @@ -63,6 +63,7 @@ public Set getTestFeatures() { RangeFieldMapper.DATE_RANGE_INDEXING_FIX, IgnoredSourceFieldMapper.DONT_EXPAND_DOTS_IN_IGNORED_SOURCE, SourceFieldMapper.REMOVE_SYNTHETIC_SOURCE_ONLY_VALIDATION, + IgnoredSourceFieldMapper.IGNORED_SOURCE_AS_TOP_LEVEL_METADATA_ARRAY_FIELD, IgnoredSourceFieldMapper.ALWAYS_STORE_OBJECT_ARRAYS_IN_NESTED_OBJECTS ); } diff --git a/server/src/main/java/org/elasticsearch/search/SearchHit.java b/server/src/main/java/org/elasticsearch/search/SearchHit.java index 1611c95d99df4..98f7c92d9997a 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchHit.java +++ b/server/src/main/java/org/elasticsearch/search/SearchHit.java @@ -29,6 +29,7 @@ import org.elasticsearch.core.RefCounted; import org.elasticsearch.core.SimpleRefCounted; import org.elasticsearch.index.mapper.IgnoredFieldMapper; +import org.elasticsearch.index.mapper.IgnoredSourceFieldMapper; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.seqno.SequenceNumbers; @@ -847,7 +848,7 @@ public XContentBuilder toInnerXContent(XContentBuilder builder, Params params) t } // _ignored is the only multi-valued meta field // TODO: can we avoid having an exception here? - if (field.getName().equals(IgnoredFieldMapper.NAME)) { + if (IgnoredFieldMapper.NAME.equals(field.getName()) || IgnoredSourceFieldMapper.NAME.equals(field.getName())) { builder.field(field.getName(), field.getValues()); } else { builder.field(field.getName(), field.getValue()); diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhase.java index 03bfbd40d97be..e0cb5a668b4ab 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhase.java @@ -57,13 +57,28 @@ public FetchSubPhaseProcessor getProcessor(FetchContext fetchContext) { return null; } + // NOTE: FieldFetcher for non-metadata fields, as well as `_id` and `_source`. + // We need to retain `_id` and `_source` here to correctly populate the `StoredFieldSpecs` created by the + // `FieldFetcher` constructor. final SearchExecutionContext searchExecutionContext = fetchContext.getSearchExecutionContext(); - final FieldFetcher fieldFetcher = fetchFieldsContext == null ? null - : fetchFieldsContext.fields() == null ? null - : fetchFieldsContext.fields().isEmpty() ? null - : FieldFetcher.create(searchExecutionContext, fetchFieldsContext.fields()); + final FieldFetcher fieldFetcher = (fetchFieldsContext == null + || fetchFieldsContext.fields() == null + || fetchFieldsContext.fields().isEmpty()) + ? null + : FieldFetcher.create( + searchExecutionContext, + fetchFieldsContext.fields() + .stream() + .filter( + fieldAndFormat -> (searchExecutionContext.isMetadataField(fieldAndFormat.field) == false + || searchExecutionContext.getFieldType(fieldAndFormat.field).isStored() == false + || IdFieldMapper.NAME.equals(fieldAndFormat.field) + || SourceFieldMapper.NAME.equals(fieldAndFormat.field)) + ) + .toList() + ); - // NOTE: Collect stored metadata fields requested via `fields` (in FetchFieldsContext`) like for instance the _ignored source field + // NOTE: Collect stored metadata fields requested via `fields` (in FetchFieldsContext) like for instance the _ignored source field final Set fetchContextMetadataFields = new HashSet<>(); if (fetchFieldsContext != null && fetchFieldsContext.fields() != null && fetchFieldsContext.fields().isEmpty() == false) { for (final FieldAndFormat fieldAndFormat : fetchFieldsContext.fields()) { diff --git a/server/src/test/java/org/elasticsearch/index/get/DocumentFieldTests.java b/server/src/test/java/org/elasticsearch/index/get/DocumentFieldTests.java index 76426e9df83d8..8a27c3545a110 100644 --- a/server/src/test/java/org/elasticsearch/index/get/DocumentFieldTests.java +++ b/server/src/test/java/org/elasticsearch/index/get/DocumentFieldTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.mapper.IgnoredFieldMapper; +import org.elasticsearch.index.mapper.IgnoredSourceFieldMapper; import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.RandomObjects; @@ -122,7 +123,7 @@ public static Tuple randomDocumentField( if (isMetafield) { String metaField = randomValueOtherThanMany(excludeMetaFieldFilter, () -> randomFrom(IndicesModule.getBuiltInMetadataFields())); DocumentField documentField; - if (metaField.equals(IgnoredFieldMapper.NAME)) { + if (IgnoredFieldMapper.NAME.equals(metaField) || IgnoredSourceFieldMapper.NAME.equals(metaField)) { int numValues = randomIntBetween(1, 3); List ignoredFields = new ArrayList<>(numValues); for (int i = 0; i < numValues; i++) { @@ -130,7 +131,7 @@ public static Tuple randomDocumentField( } documentField = new DocumentField(metaField, ignoredFields); } else { - // meta fields are single value only, besides _ignored + // meta fields are single value only, besides _ignored and _ignored_source documentField = new DocumentField(metaField, Collections.singletonList(randomAlphaOfLengthBetween(3, 10))); } return Tuple.tuple(documentField, documentField); diff --git a/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java b/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java index df1ea6b756405..b0edbb829df2a 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java @@ -29,6 +29,7 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.Index; import org.elasticsearch.index.mapper.IgnoredFieldMapper; +import org.elasticsearch.index.mapper.IgnoredSourceFieldMapper; import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.ShardId; @@ -83,7 +84,7 @@ public enum SearchResponseUtils { SearchHit.METADATA_FIELDS, v -> new HashMap() ); - if (fieldName.equals(IgnoredFieldMapper.NAME)) { + if (IgnoredFieldMapper.NAME.equals(fieldName) || IgnoredSourceFieldMapper.NAME.equals(fieldName)) { fieldMap.put(fieldName, new DocumentField(fieldName, (List) fieldValue)); } else { fieldMap.put(fieldName, new DocumentField(fieldName, Collections.singletonList(fieldValue))); diff --git a/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/20_ignored_source.yml b/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/20_ignored_source.yml index c54edb0994860..2f111d579ebb1 100644 --- a/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/20_ignored_source.yml +++ b/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/20_ignored_source.yml @@ -27,6 +27,10 @@ setup: --- "fetch stored fields wildcard": + - requires: + cluster_features: [ mapper.ignored_source_as_top_level_metadata_array_field ] + reason: requires returning the _ignored_source field as a top level array metadata field + - do: headers: Content-Type: application/yaml @@ -40,6 +44,10 @@ setup: --- "fetch fields wildcard": + - requires: + cluster_features: [ mapper.ignored_source_as_top_level_metadata_array_field ] + reason: requires returning the _ignored_source field as a top level array metadata field + - do: headers: Content-Type: application/yaml @@ -53,6 +61,10 @@ setup: --- "fetch stored fields by name": + - requires: + cluster_features: [ mapper.ignored_source_as_top_level_metadata_array_field ] + reason: requires returning the _ignored_source field as a top level array metadata field + - do: headers: Content-Type: application/yaml @@ -62,10 +74,14 @@ setup: stored_fields: [ _ignored_source ] - match: { hits.total.value: 1 } - - match: { hits.hits.0._ignored_source: !!binary "BgAAAG9iamVjdHktLS0KbmFtZTogImZvbyIKdmFsdWU6IDEwCg==" } + - match: { hits.hits.0._ignored_source.0: !!binary "BgAAAG9iamVjdHktLS0KbmFtZTogImZvbyIKdmFsdWU6IDEwCg==" } --- "fetch fields by name": + - requires: + cluster_features: [ mapper.ignored_source_as_top_level_metadata_array_field ] + reason: requires returning the _ignored_source field as a top level array metadata field + - do: headers: Content-Type: application/yaml @@ -75,10 +91,14 @@ setup: fields: [ _ignored_source ] - match: { hits.total.value: 1 } - - match: { hits.hits.0._ignored_source: !!binary "BgAAAG9iamVjdHktLS0KbmFtZTogImZvbyIKdmFsdWU6IDEwCg==" } + - match: { hits.hits.0._ignored_source.0: !!binary "BgAAAG9iamVjdHktLS0KbmFtZTogImZvbyIKdmFsdWU6IDEwCg==" } --- "fields and stored fields combination": + - requires: + cluster_features: [ mapper.ignored_source_as_top_level_metadata_array_field ] + reason: requires returning the _ignored_source field as a top level array metadata field + - do: headers: Content-Type: application/yaml @@ -92,10 +112,14 @@ setup: - match: { hits.total.value: 1 } - match: { hits.hits.0.fields.object: null } - - match: { hits.hits.0._ignored_source: !!binary "BgAAAG9iamVjdHktLS0KbmFtZTogImZvbyIKdmFsdWU6IDEwCg==" } + - match: { hits.hits.0._ignored_source.0: !!binary "BgAAAG9iamVjdHktLS0KbmFtZTogImZvbyIKdmFsdWU6IDEwCg==" } --- "wildcard fields and stored fields combination": + - requires: + cluster_features: [ mapper.ignored_source_as_top_level_metadata_array_field ] + reason: requires returning the _ignored_source field as a top level array metadata field + - do: search: index: test @@ -108,6 +132,10 @@ setup: --- "fields with ignored source in stored fields": + - requires: + cluster_features: [ mapper.ignored_source_as_top_level_metadata_array_field ] + reason: requires returning the _ignored_source field as a top level array metadata field + - do: headers: Content-Type: application/yaml @@ -118,11 +146,15 @@ setup: fields: [ object ] - match: { hits.total.value: 1 } - - match: { hits.hits.0._ignored_source: !!binary "BgAAAG9iamVjdHktLS0KbmFtZTogImZvbyIKdmFsdWU6IDEwCg==" } + - match: { hits.hits.0._ignored_source.0: !!binary "BgAAAG9iamVjdHktLS0KbmFtZTogImZvbyIKdmFsdWU6IDEwCg==" } - match: { hits.hits.0.fields: null } --- "fields with ignored source in fields": + - requires: + cluster_features: [ mapper.ignored_source_as_top_level_metadata_array_field ] + reason: requires returning the _ignored_source field as a top level array metadata field + - do: headers: Content-Type: application/yaml @@ -133,10 +165,14 @@ setup: fields: [ _ignored_source ] - match: { hits.total.value: 1 } - - match: { hits.hits.0._ignored_source: !!binary "BgAAAG9iamVjdHktLS0KbmFtZTogImZvbyIKdmFsdWU6IDEwCg==" } + - match: { hits.hits.0._ignored_source.0: !!binary "BgAAAG9iamVjdHktLS0KbmFtZTogImZvbyIKdmFsdWU6IDEwCg==" } --- "ignored source via fields and wildcard stored fields": + - requires: + cluster_features: [ mapper.ignored_source_as_top_level_metadata_array_field ] + reason: requires returning the _ignored_source field as a top level array metadata field + - do: headers: Content-Type: application/yaml @@ -147,10 +183,14 @@ setup: fields: [ _ignored_source ] - match: { hits.total.value: 1 } - - match: { hits.hits.0._ignored_source: !!binary "BgAAAG9iamVjdHktLS0KbmFtZTogImZvbyIKdmFsdWU6IDEwCg==" } + - match: { hits.hits.0._ignored_source.0: !!binary "BgAAAG9iamVjdHktLS0KbmFtZTogImZvbyIKdmFsdWU6IDEwCg==" } --- "wildcard fields and ignored source via stored fields": + - requires: + cluster_features: [ mapper.ignored_source_as_top_level_metadata_array_field ] + reason: requires returning the _ignored_source field as a top level array metadata field + - do: headers: Content-Type: application/yaml @@ -161,4 +201,108 @@ setup: fields: [ "*" ] - match: { hits.total.value: 1 } - - match: { hits.hits.0._ignored_source: !!binary "BgAAAG9iamVjdHktLS0KbmFtZTogImZvbyIKdmFsdWU6IDEwCg==" } + - match: { hits.hits.0._ignored_source.0: !!binary "BgAAAG9iamVjdHktLS0KbmFtZTogImZvbyIKdmFsdWU6IDEwCg==" } + +--- +ignored source array via fields: + - requires: + cluster_features: [mapper.ignored_source_as_top_level_metadata_array_field] + reason: requires returning the _ignored_source field as a top level array metadata field + + - do: + indices.create: + index: test-dynamic-fields + body: + settings: + index: + mapping: + source: + mode: synthetic + total_fields: + ignore_dynamic_beyond_limit: true + limit: 1 # only `name` static mapping is allowed + mappings: + properties: + name: + type: keyword + + - do: + bulk: + index: test-dynamic-fields + refresh: true + body: + - '{ "index": { } }' + - '{ "name": "foo", "value": 1, "id": "f5t7-66gt" }' + - match: { errors: false } + + - do: + headers: + Content-Type: application/yaml + search: + index: test-dynamic-fields + body: + fields: [ "_ignored_source" ] + query: + match_all: {} + + - match: { hits.total.value: 1 } + - match: { hits.hits.0._source.name: "foo" } + - match: { hits.hits.0._source.value: 1 } + - match: { hits.hits.0._source.id: "f5t7-66gt" } + - match: { hits.hits.0._ignored: [ "id", "value" ]} + - length: { hits.hits.0._ignored_source: 2 } + - match: { hits.hits.0._ignored_source.0: !!binary "AgAAAGlkU2Y1dDctNjZndA==" } # `id` field + - match: { hits.hits.0._ignored_source.1: !!binary "BQAAAHZhbHVlSQEAAAA=" } # `value` field + +--- +ignored source array via stored_fields: + - requires: + cluster_features: [mapper.ignored_source_as_top_level_metadata_array_field] + reason: requires returning the _ignored_source field as a top level array metadata field + + - do: + indices.create: + index: test-dynamic-stored-fields + body: + settings: + index: + mapping: + source: + mode: synthetic + total_fields: + ignore_dynamic_beyond_limit: true + limit: 1 # only `name` static mapping is allowed + mappings: + properties: + name: + type: keyword + + - do: + bulk: + index: test-dynamic-stored-fields + refresh: true + body: + - '{ "index": { } }' + - '{ "name": "foo", "value": 1, "id": "f5t7-66gt" }' + - match: { errors: false } + + - do: + headers: + Content-Type: application/yaml + search: + index: test-dynamic-stored-fields + body: + # NOTE: when using synthetic source `_source` field needs to be explicitly requested via `stored_fields`, + # a wildcard request would not include it. + stored_fields: [ "_ignored_source", "_source" ] + query: + match_all: {} + + - match: { hits.total.value: 1 } + - match: { hits.hits.0._source.name: "foo" } + - match: { hits.hits.0._source.value: 1 } + - match: { hits.hits.0._source.id: "f5t7-66gt" } + - match: { hits.hits.0._ignored: [ "id", "value" ]} + - length: { hits.hits.0._ignored_source: 2 } + - match: { hits.hits.0._ignored_source.0: !!binary "AgAAAGlkU2Y1dDctNjZndA==" } # `id` field + - match: { hits.hits.0._ignored_source.1: !!binary "BQAAAHZhbHVlSQEAAAA=" } # `value` field From 3d307e0d7867116585dccfb335e0cab0c192bdb9 Mon Sep 17 00:00:00 2001 From: Craig Taverner Date: Fri, 25 Oct 2024 10:09:53 +0200 Subject: [PATCH 406/449] Don't return TEXT type for functions that take TEXT (#114334) Always return `KEYWORD` for functions that previously returned `TEXT`, because any change to the value, no matter how small, is enough to render meaningless the original analyzer associated with the `TEXT` field value. In principle, if the attribute is no longer the original `FieldAttribute`, it can no longer claim to have the type `TEXT`. This has been done for all functions: conversion functions, aggregating functions, multi-value functions. There were several that already produced `KEYWORD` for `TEXT` input (eg. ToString, FromBase64 and ToBase64, MvZip, ToLower, ToUpper, DateFormat, Concat, Left, Repeat, Replace, Right, Split, Substring), but many others that incorrectly claimed to produce `TEXT`, while this was really a false claim. This PR makes that now strict, and includes changes to the functions' units tests to disallow the tests to expect any functions output to be `TEXT`. One side effect of this change is that methods that take multiple parameters that require all of them to have the same type, will now treat TEXT and KEYWORD the same. This was already the case for functions like `Concat`, but is now also the case for `Greatest`, `Least`, `Case`, `Coalesce` and `MvAppend`. An associated change is that the type casting operator `::text` has been entirely removed. It used to map onto the `ToString` function which returned type KEYWORD, and so `::text` really produced a `KEYWORD`, which is a lie, or at least a `bug`, which is now fixed. Should we ever wish to actually produce real `TEXT`, we might love the fact that this operator has been freed up for future use (although it seems likely that function will require parameters to specify the analyzer, so might never be an operator again). ### Backwards compatibility issues: This is a change that will fail BWC tests, since we have many tests that assert on TEXT output to functions. For this reason we needed to block two scenarios: * We used the capability `functions_never_emit_text` to prevent 7 csv-spec tests and 2 yaml tests from being run against older versions that still emit text. * We used `skipTest` to also block those two yaml tests from being run against the latest build, but using older yaml files downloaded (as far back as 8.14). In all cases the change observed in these tests was simply the results columns no longer having `text` type, and instead being `keyword`. --------- Co-authored-by: Luigi Dell'Aquila --- docs/changelog/114334.yaml | 7 +++ .../functions/kibana/definition/case.json | 52 +++++++++++++++- .../functions/kibana/definition/coalesce.json | 4 +- .../functions/kibana/definition/greatest.json | 4 +- .../functions/kibana/definition/least.json | 4 +- .../functions/kibana/definition/ltrim.json | 2 +- .../esql/functions/kibana/definition/max.json | 2 +- .../esql/functions/kibana/definition/min.json | 2 +- .../kibana/definition/mv_append.json | 2 +- .../kibana/definition/mv_dedupe.json | 2 +- .../functions/kibana/definition/mv_first.json | 2 +- .../functions/kibana/definition/mv_last.json | 2 +- .../functions/kibana/definition/mv_max.json | 2 +- .../functions/kibana/definition/mv_min.json | 2 +- .../functions/kibana/definition/mv_slice.json | 2 +- .../functions/kibana/definition/mv_sort.json | 2 +- .../functions/kibana/definition/reverse.json | 2 +- .../functions/kibana/definition/rtrim.json | 2 +- .../functions/kibana/definition/to_lower.json | 2 +- .../functions/kibana/definition/to_upper.json | 2 +- .../esql/functions/kibana/definition/top.json | 2 +- .../functions/kibana/definition/trim.json | 2 +- .../functions/kibana/definition/values.json | 2 +- .../esql/functions/kibana/inline_cast.json | 1 - .../esql/functions/types/case.asciidoc | 6 +- .../esql/functions/types/coalesce.asciidoc | 4 +- .../esql/functions/types/greatest.asciidoc | 4 +- .../esql/functions/types/least.asciidoc | 4 +- .../esql/functions/types/ltrim.asciidoc | 2 +- .../esql/functions/types/max.asciidoc | 2 +- .../esql/functions/types/min.asciidoc | 2 +- .../esql/functions/types/mv_append.asciidoc | 2 +- .../esql/functions/types/mv_dedupe.asciidoc | 2 +- .../esql/functions/types/mv_first.asciidoc | 2 +- .../esql/functions/types/mv_last.asciidoc | 2 +- .../esql/functions/types/mv_max.asciidoc | 2 +- .../esql/functions/types/mv_min.asciidoc | 2 +- .../esql/functions/types/mv_slice.asciidoc | 2 +- .../esql/functions/types/mv_sort.asciidoc | 2 +- .../esql/functions/types/reverse.asciidoc | 2 +- .../esql/functions/types/rtrim.asciidoc | 2 +- .../esql/functions/types/to_lower.asciidoc | 2 +- .../esql/functions/types/to_upper.asciidoc | 2 +- .../esql/functions/types/top.asciidoc | 2 +- .../esql/functions/types/trim.asciidoc | 2 +- .../esql/functions/types/values.asciidoc | 2 +- x-pack/plugin/build.gradle | 2 + .../xpack/esql/core/type/DataType.java | 4 ++ .../src/main/resources/convert.csv-spec | 6 +- .../src/main/resources/stats.csv-spec | 14 +++-- .../src/main/resources/stats_top.csv-spec | 6 +- .../src/main/resources/string.csv-spec | 3 +- .../xpack/esql/action/EsqlCapabilities.java | 5 ++ .../expression/function/aggregate/Max.java | 4 +- .../expression/function/aggregate/Min.java | 4 +- .../expression/function/aggregate/Top.java | 4 +- .../expression/function/aggregate/Values.java | 4 +- .../function/scalar/UnaryScalarFunction.java | 2 +- .../function/scalar/conditional/Case.java | 5 +- .../function/scalar/conditional/Greatest.java | 6 +- .../function/scalar/conditional/Least.java | 6 +- .../function/scalar/multivalue/MvAppend.java | 7 +-- .../function/scalar/multivalue/MvDedupe.java | 1 - .../function/scalar/multivalue/MvFirst.java | 1 - .../function/scalar/multivalue/MvLast.java | 1 - .../function/scalar/multivalue/MvMax.java | 2 +- .../function/scalar/multivalue/MvMin.java | 2 +- .../function/scalar/multivalue/MvSlice.java | 3 +- .../function/scalar/multivalue/MvSort.java | 4 +- .../function/scalar/nulls/Coalesce.java | 5 +- .../function/scalar/string/LTrim.java | 2 +- .../function/scalar/string/RTrim.java | 2 +- .../function/scalar/string/Reverse.java | 2 +- .../function/scalar/string/ToLower.java | 4 +- .../function/scalar/string/ToUpper.java | 4 +- .../function/scalar/string/Trim.java | 2 +- .../esql/type/EsqlDataTypeConverter.java | 1 - .../xpack/esql/analysis/ParsingTests.java | 3 - .../expression/function/TestCaseSupplier.java | 2 +- .../function/aggregate/MaxTests.java | 2 +- .../function/aggregate/MinTests.java | 2 +- .../scalar/conditional/CaseTests.java | 59 ++++++++++++++++++- .../function/scalar/string/ToLowerTests.java | 2 +- .../function/scalar/string/ToUpperTests.java | 2 +- .../rest-api-spec/test/esql/80_text.yml | 24 ++++++-- 85 files changed, 253 insertions(+), 123 deletions(-) create mode 100644 docs/changelog/114334.yaml diff --git a/docs/changelog/114334.yaml b/docs/changelog/114334.yaml new file mode 100644 index 0000000000000..d0fefe40c6970 --- /dev/null +++ b/docs/changelog/114334.yaml @@ -0,0 +1,7 @@ +pr: 114334 +summary: Don't return TEXT type for functions that take TEXT +area: ES|QL +type: bug +issues: + - 111537 + - 114333 diff --git a/docs/reference/esql/functions/kibana/definition/case.json b/docs/reference/esql/functions/kibana/definition/case.json index 1cf2c6ce7a579..bf498f690551c 100644 --- a/docs/reference/esql/functions/kibana/definition/case.json +++ b/docs/reference/esql/functions/kibana/definition/case.json @@ -424,6 +424,30 @@ "variadic" : true, "returnType" : "keyword" }, + { + "params" : [ + { + "name" : "condition", + "type" : "boolean", + "optional" : false, + "description" : "A condition." + }, + { + "name" : "trueValue", + "type" : "keyword", + "optional" : false, + "description" : "The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches." + }, + { + "name" : "elseValue", + "type" : "text", + "optional" : true, + "description" : "The value that's returned when no condition evaluates to `true`." + } + ], + "variadic" : true, + "returnType" : "keyword" + }, { "params" : [ { @@ -482,7 +506,31 @@ } ], "variadic" : true, - "returnType" : "text" + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "condition", + "type" : "boolean", + "optional" : false, + "description" : "A condition." + }, + { + "name" : "trueValue", + "type" : "text", + "optional" : false, + "description" : "The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches." + }, + { + "name" : "elseValue", + "type" : "keyword", + "optional" : true, + "description" : "The value that's returned when no condition evaluates to `true`." + } + ], + "variadic" : true, + "returnType" : "keyword" }, { "params" : [ @@ -506,7 +554,7 @@ } ], "variadic" : true, - "returnType" : "text" + "returnType" : "keyword" }, { "params" : [ diff --git a/docs/reference/esql/functions/kibana/definition/coalesce.json b/docs/reference/esql/functions/kibana/definition/coalesce.json index 9ebc5a97229cd..7f49195190951 100644 --- a/docs/reference/esql/functions/kibana/definition/coalesce.json +++ b/docs/reference/esql/functions/kibana/definition/coalesce.json @@ -242,7 +242,7 @@ } ], "variadic" : true, - "returnType" : "text" + "returnType" : "keyword" }, { "params" : [ @@ -260,7 +260,7 @@ } ], "variadic" : true, - "returnType" : "text" + "returnType" : "keyword" }, { "params" : [ diff --git a/docs/reference/esql/functions/kibana/definition/greatest.json b/docs/reference/esql/functions/kibana/definition/greatest.json index 2818a5ac56339..eebb4fad1eb1d 100644 --- a/docs/reference/esql/functions/kibana/definition/greatest.json +++ b/docs/reference/esql/functions/kibana/definition/greatest.json @@ -189,7 +189,7 @@ } ], "variadic" : true, - "returnType" : "text" + "returnType" : "keyword" }, { "params" : [ @@ -207,7 +207,7 @@ } ], "variadic" : true, - "returnType" : "text" + "returnType" : "keyword" }, { "params" : [ diff --git a/docs/reference/esql/functions/kibana/definition/least.json b/docs/reference/esql/functions/kibana/definition/least.json index 7b545896f4ddc..02fa58f92eaef 100644 --- a/docs/reference/esql/functions/kibana/definition/least.json +++ b/docs/reference/esql/functions/kibana/definition/least.json @@ -188,7 +188,7 @@ } ], "variadic" : true, - "returnType" : "text" + "returnType" : "keyword" }, { "params" : [ @@ -206,7 +206,7 @@ } ], "variadic" : true, - "returnType" : "text" + "returnType" : "keyword" }, { "params" : [ diff --git a/docs/reference/esql/functions/kibana/definition/ltrim.json b/docs/reference/esql/functions/kibana/definition/ltrim.json index e85c2d42dedee..6d992b9db7b2c 100644 --- a/docs/reference/esql/functions/kibana/definition/ltrim.json +++ b/docs/reference/esql/functions/kibana/definition/ltrim.json @@ -26,7 +26,7 @@ } ], "variadic" : false, - "returnType" : "text" + "returnType" : "keyword" } ], "examples" : [ diff --git a/docs/reference/esql/functions/kibana/definition/max.json b/docs/reference/esql/functions/kibana/definition/max.json index 09ca95a0afeff..45fd26571b091 100644 --- a/docs/reference/esql/functions/kibana/definition/max.json +++ b/docs/reference/esql/functions/kibana/definition/max.json @@ -98,7 +98,7 @@ } ], "variadic" : false, - "returnType" : "text" + "returnType" : "keyword" }, { "params" : [ diff --git a/docs/reference/esql/functions/kibana/definition/min.json b/docs/reference/esql/functions/kibana/definition/min.json index 3e87b3e9038e1..ae71fba049dbe 100644 --- a/docs/reference/esql/functions/kibana/definition/min.json +++ b/docs/reference/esql/functions/kibana/definition/min.json @@ -98,7 +98,7 @@ } ], "variadic" : false, - "returnType" : "text" + "returnType" : "keyword" }, { "params" : [ diff --git a/docs/reference/esql/functions/kibana/definition/mv_append.json b/docs/reference/esql/functions/kibana/definition/mv_append.json index c14a3337a25a7..81c1b777be498 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_append.json +++ b/docs/reference/esql/functions/kibana/definition/mv_append.json @@ -218,7 +218,7 @@ } ], "variadic" : false, - "returnType" : "text" + "returnType" : "keyword" }, { "params" : [ diff --git a/docs/reference/esql/functions/kibana/definition/mv_dedupe.json b/docs/reference/esql/functions/kibana/definition/mv_dedupe.json index 9bb0935c6a5de..bfca58bc3e140 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_dedupe.json +++ b/docs/reference/esql/functions/kibana/definition/mv_dedupe.json @@ -147,7 +147,7 @@ } ], "variadic" : false, - "returnType" : "text" + "returnType" : "keyword" }, { "params" : [ diff --git a/docs/reference/esql/functions/kibana/definition/mv_first.json b/docs/reference/esql/functions/kibana/definition/mv_first.json index 80e761faafab9..a2b6358023e4b 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_first.json +++ b/docs/reference/esql/functions/kibana/definition/mv_first.json @@ -146,7 +146,7 @@ } ], "variadic" : false, - "returnType" : "text" + "returnType" : "keyword" }, { "params" : [ diff --git a/docs/reference/esql/functions/kibana/definition/mv_last.json b/docs/reference/esql/functions/kibana/definition/mv_last.json index fb16400f86e62..b6dc268af5305 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_last.json +++ b/docs/reference/esql/functions/kibana/definition/mv_last.json @@ -146,7 +146,7 @@ } ], "variadic" : false, - "returnType" : "text" + "returnType" : "keyword" }, { "params" : [ diff --git a/docs/reference/esql/functions/kibana/definition/mv_max.json b/docs/reference/esql/functions/kibana/definition/mv_max.json index 17cdae8a3d39c..27d2b010dc02c 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_max.json +++ b/docs/reference/esql/functions/kibana/definition/mv_max.json @@ -98,7 +98,7 @@ } ], "variadic" : false, - "returnType" : "text" + "returnType" : "keyword" }, { "params" : [ diff --git a/docs/reference/esql/functions/kibana/definition/mv_min.json b/docs/reference/esql/functions/kibana/definition/mv_min.json index 3718a0f6e1de5..410e97335687f 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_min.json +++ b/docs/reference/esql/functions/kibana/definition/mv_min.json @@ -98,7 +98,7 @@ } ], "variadic" : false, - "returnType" : "text" + "returnType" : "keyword" }, { "params" : [ diff --git a/docs/reference/esql/functions/kibana/definition/mv_slice.json b/docs/reference/esql/functions/kibana/definition/mv_slice.json index 399a6145b040e..dbbfe0ffb5a78 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_slice.json +++ b/docs/reference/esql/functions/kibana/definition/mv_slice.json @@ -290,7 +290,7 @@ } ], "variadic" : false, - "returnType" : "text" + "returnType" : "keyword" }, { "params" : [ diff --git a/docs/reference/esql/functions/kibana/definition/mv_sort.json b/docs/reference/esql/functions/kibana/definition/mv_sort.json index c78ade7c8a94f..4cb255fb0afcb 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_sort.json +++ b/docs/reference/esql/functions/kibana/definition/mv_sort.json @@ -146,7 +146,7 @@ } ], "variadic" : false, - "returnType" : "text" + "returnType" : "keyword" }, { "params" : [ diff --git a/docs/reference/esql/functions/kibana/definition/reverse.json b/docs/reference/esql/functions/kibana/definition/reverse.json index 1b222691530f2..0652d9cfa6b15 100644 --- a/docs/reference/esql/functions/kibana/definition/reverse.json +++ b/docs/reference/esql/functions/kibana/definition/reverse.json @@ -26,7 +26,7 @@ } ], "variadic" : false, - "returnType" : "text" + "returnType" : "keyword" } ], "examples" : [ diff --git a/docs/reference/esql/functions/kibana/definition/rtrim.json b/docs/reference/esql/functions/kibana/definition/rtrim.json index 028f442de9632..9c8a7578ed789 100644 --- a/docs/reference/esql/functions/kibana/definition/rtrim.json +++ b/docs/reference/esql/functions/kibana/definition/rtrim.json @@ -26,7 +26,7 @@ } ], "variadic" : false, - "returnType" : "text" + "returnType" : "keyword" } ], "examples" : [ diff --git a/docs/reference/esql/functions/kibana/definition/to_lower.json b/docs/reference/esql/functions/kibana/definition/to_lower.json index f9b49a29a8c7d..07bb057fe080d 100644 --- a/docs/reference/esql/functions/kibana/definition/to_lower.json +++ b/docs/reference/esql/functions/kibana/definition/to_lower.json @@ -26,7 +26,7 @@ } ], "variadic" : false, - "returnType" : "text" + "returnType" : "keyword" } ], "examples" : [ diff --git a/docs/reference/esql/functions/kibana/definition/to_upper.json b/docs/reference/esql/functions/kibana/definition/to_upper.json index edf36a982f56b..caa9d563b08b1 100644 --- a/docs/reference/esql/functions/kibana/definition/to_upper.json +++ b/docs/reference/esql/functions/kibana/definition/to_upper.json @@ -26,7 +26,7 @@ } ], "variadic" : false, - "returnType" : "text" + "returnType" : "keyword" } ], "examples" : [ diff --git a/docs/reference/esql/functions/kibana/definition/top.json b/docs/reference/esql/functions/kibana/definition/top.json index 7fa4ff123eec7..82bd80636152c 100644 --- a/docs/reference/esql/functions/kibana/definition/top.json +++ b/docs/reference/esql/functions/kibana/definition/top.json @@ -194,7 +194,7 @@ } ], "variadic" : false, - "returnType" : "text" + "returnType" : "keyword" } ], "examples" : [ diff --git a/docs/reference/esql/functions/kibana/definition/trim.json b/docs/reference/esql/functions/kibana/definition/trim.json index 6edf13e588e62..45805b3bfb054 100644 --- a/docs/reference/esql/functions/kibana/definition/trim.json +++ b/docs/reference/esql/functions/kibana/definition/trim.json @@ -26,7 +26,7 @@ } ], "variadic" : false, - "returnType" : "text" + "returnType" : "keyword" } ], "examples" : [ diff --git a/docs/reference/esql/functions/kibana/definition/values.json b/docs/reference/esql/functions/kibana/definition/values.json index e289173d9d989..ae69febd4f755 100644 --- a/docs/reference/esql/functions/kibana/definition/values.json +++ b/docs/reference/esql/functions/kibana/definition/values.json @@ -98,7 +98,7 @@ } ], "variadic" : false, - "returnType" : "text" + "returnType" : "keyword" }, { "params" : [ diff --git a/docs/reference/esql/functions/kibana/inline_cast.json b/docs/reference/esql/functions/kibana/inline_cast.json index 81a1966773238..9f663c8d0d6a3 100644 --- a/docs/reference/esql/functions/kibana/inline_cast.json +++ b/docs/reference/esql/functions/kibana/inline_cast.json @@ -15,7 +15,6 @@ "keyword" : "to_string", "long" : "to_long", "string" : "to_string", - "text" : "to_string", "time_duration" : "to_timeduration", "unsigned_long" : "to_unsigned_long", "version" : "to_version" diff --git a/docs/reference/esql/functions/types/case.asciidoc b/docs/reference/esql/functions/types/case.asciidoc index e8aa3eaf5daae..c6fb6a091e9d0 100644 --- a/docs/reference/esql/functions/types/case.asciidoc +++ b/docs/reference/esql/functions/types/case.asciidoc @@ -24,11 +24,13 @@ boolean | integer | | integer boolean | ip | ip | ip boolean | ip | | ip boolean | keyword | keyword | keyword +boolean | keyword | text | keyword boolean | keyword | | keyword boolean | long | long | long boolean | long | | long -boolean | text | text | text -boolean | text | | text +boolean | text | keyword | keyword +boolean | text | text | keyword +boolean | text | | keyword boolean | unsigned_long | unsigned_long | unsigned_long boolean | unsigned_long | | unsigned_long boolean | version | version | version diff --git a/docs/reference/esql/functions/types/coalesce.asciidoc b/docs/reference/esql/functions/types/coalesce.asciidoc index 368a12db0dca4..23a249494e0a2 100644 --- a/docs/reference/esql/functions/types/coalesce.asciidoc +++ b/docs/reference/esql/functions/types/coalesce.asciidoc @@ -19,7 +19,7 @@ keyword | keyword | keyword keyword | | keyword long | long | long long | | long -text | text | text -text | | text +text | text | keyword +text | | keyword version | version | version |=== diff --git a/docs/reference/esql/functions/types/greatest.asciidoc b/docs/reference/esql/functions/types/greatest.asciidoc index 1454bbb6f81c1..7df77a6991315 100644 --- a/docs/reference/esql/functions/types/greatest.asciidoc +++ b/docs/reference/esql/functions/types/greatest.asciidoc @@ -16,7 +16,7 @@ keyword | keyword | keyword keyword | | keyword long | long | long long | | long -text | text | text -text | | text +text | text | keyword +text | | keyword version | version | version |=== diff --git a/docs/reference/esql/functions/types/least.asciidoc b/docs/reference/esql/functions/types/least.asciidoc index 1454bbb6f81c1..7df77a6991315 100644 --- a/docs/reference/esql/functions/types/least.asciidoc +++ b/docs/reference/esql/functions/types/least.asciidoc @@ -16,7 +16,7 @@ keyword | keyword | keyword keyword | | keyword long | long | long long | | long -text | text | text -text | | text +text | text | keyword +text | | keyword version | version | version |=== diff --git a/docs/reference/esql/functions/types/ltrim.asciidoc b/docs/reference/esql/functions/types/ltrim.asciidoc index 41d60049d59b8..1ba0e98ec8f09 100644 --- a/docs/reference/esql/functions/types/ltrim.asciidoc +++ b/docs/reference/esql/functions/types/ltrim.asciidoc @@ -6,5 +6,5 @@ |=== string | result keyword | keyword -text | text +text | keyword |=== diff --git a/docs/reference/esql/functions/types/max.asciidoc b/docs/reference/esql/functions/types/max.asciidoc index 35ce5811e0cd0..564fb8dc3bfb0 100644 --- a/docs/reference/esql/functions/types/max.asciidoc +++ b/docs/reference/esql/functions/types/max.asciidoc @@ -12,6 +12,6 @@ integer | integer ip | ip keyword | keyword long | long -text | text +text | keyword version | version |=== diff --git a/docs/reference/esql/functions/types/min.asciidoc b/docs/reference/esql/functions/types/min.asciidoc index 35ce5811e0cd0..564fb8dc3bfb0 100644 --- a/docs/reference/esql/functions/types/min.asciidoc +++ b/docs/reference/esql/functions/types/min.asciidoc @@ -12,6 +12,6 @@ integer | integer ip | ip keyword | keyword long | long -text | text +text | keyword version | version |=== diff --git a/docs/reference/esql/functions/types/mv_append.asciidoc b/docs/reference/esql/functions/types/mv_append.asciidoc index a1894e429ae82..05f9ff6b19f9e 100644 --- a/docs/reference/esql/functions/types/mv_append.asciidoc +++ b/docs/reference/esql/functions/types/mv_append.asciidoc @@ -16,6 +16,6 @@ integer | integer | integer ip | ip | ip keyword | keyword | keyword long | long | long -text | text | text +text | text | keyword version | version | version |=== diff --git a/docs/reference/esql/functions/types/mv_dedupe.asciidoc b/docs/reference/esql/functions/types/mv_dedupe.asciidoc index 68e546451c8cb..976de79bb0910 100644 --- a/docs/reference/esql/functions/types/mv_dedupe.asciidoc +++ b/docs/reference/esql/functions/types/mv_dedupe.asciidoc @@ -16,6 +16,6 @@ integer | integer ip | ip keyword | keyword long | long -text | text +text | keyword version | version |=== diff --git a/docs/reference/esql/functions/types/mv_first.asciidoc b/docs/reference/esql/functions/types/mv_first.asciidoc index 35633544d99a0..47736e76d1db4 100644 --- a/docs/reference/esql/functions/types/mv_first.asciidoc +++ b/docs/reference/esql/functions/types/mv_first.asciidoc @@ -16,7 +16,7 @@ integer | integer ip | ip keyword | keyword long | long -text | text +text | keyword unsigned_long | unsigned_long version | version |=== diff --git a/docs/reference/esql/functions/types/mv_last.asciidoc b/docs/reference/esql/functions/types/mv_last.asciidoc index 35633544d99a0..47736e76d1db4 100644 --- a/docs/reference/esql/functions/types/mv_last.asciidoc +++ b/docs/reference/esql/functions/types/mv_last.asciidoc @@ -16,7 +16,7 @@ integer | integer ip | ip keyword | keyword long | long -text | text +text | keyword unsigned_long | unsigned_long version | version |=== diff --git a/docs/reference/esql/functions/types/mv_max.asciidoc b/docs/reference/esql/functions/types/mv_max.asciidoc index 8ea36aebbad37..d4e014554c86c 100644 --- a/docs/reference/esql/functions/types/mv_max.asciidoc +++ b/docs/reference/esql/functions/types/mv_max.asciidoc @@ -12,7 +12,7 @@ integer | integer ip | ip keyword | keyword long | long -text | text +text | keyword unsigned_long | unsigned_long version | version |=== diff --git a/docs/reference/esql/functions/types/mv_min.asciidoc b/docs/reference/esql/functions/types/mv_min.asciidoc index 8ea36aebbad37..d4e014554c86c 100644 --- a/docs/reference/esql/functions/types/mv_min.asciidoc +++ b/docs/reference/esql/functions/types/mv_min.asciidoc @@ -12,7 +12,7 @@ integer | integer ip | ip keyword | keyword long | long -text | text +text | keyword unsigned_long | unsigned_long version | version |=== diff --git a/docs/reference/esql/functions/types/mv_slice.asciidoc b/docs/reference/esql/functions/types/mv_slice.asciidoc index 0a9dc073370c7..60c1f6315a599 100644 --- a/docs/reference/esql/functions/types/mv_slice.asciidoc +++ b/docs/reference/esql/functions/types/mv_slice.asciidoc @@ -16,6 +16,6 @@ integer | integer | integer | integer ip | integer | integer | ip keyword | integer | integer | keyword long | integer | integer | long -text | integer | integer | text +text | integer | integer | keyword version | integer | integer | version |=== diff --git a/docs/reference/esql/functions/types/mv_sort.asciidoc b/docs/reference/esql/functions/types/mv_sort.asciidoc index 93965187482ac..c21ea5983945e 100644 --- a/docs/reference/esql/functions/types/mv_sort.asciidoc +++ b/docs/reference/esql/functions/types/mv_sort.asciidoc @@ -12,6 +12,6 @@ integer | keyword | integer ip | keyword | ip keyword | keyword | keyword long | keyword | long -text | keyword | text +text | keyword | keyword version | keyword | version |=== diff --git a/docs/reference/esql/functions/types/reverse.asciidoc b/docs/reference/esql/functions/types/reverse.asciidoc index 974066d225bca..9e5dc1c477316 100644 --- a/docs/reference/esql/functions/types/reverse.asciidoc +++ b/docs/reference/esql/functions/types/reverse.asciidoc @@ -6,5 +6,5 @@ |=== str | result keyword | keyword -text | text +text | keyword |=== diff --git a/docs/reference/esql/functions/types/rtrim.asciidoc b/docs/reference/esql/functions/types/rtrim.asciidoc index 41d60049d59b8..1ba0e98ec8f09 100644 --- a/docs/reference/esql/functions/types/rtrim.asciidoc +++ b/docs/reference/esql/functions/types/rtrim.asciidoc @@ -6,5 +6,5 @@ |=== string | result keyword | keyword -text | text +text | keyword |=== diff --git a/docs/reference/esql/functions/types/to_lower.asciidoc b/docs/reference/esql/functions/types/to_lower.asciidoc index 974066d225bca..9e5dc1c477316 100644 --- a/docs/reference/esql/functions/types/to_lower.asciidoc +++ b/docs/reference/esql/functions/types/to_lower.asciidoc @@ -6,5 +6,5 @@ |=== str | result keyword | keyword -text | text +text | keyword |=== diff --git a/docs/reference/esql/functions/types/to_upper.asciidoc b/docs/reference/esql/functions/types/to_upper.asciidoc index 974066d225bca..9e5dc1c477316 100644 --- a/docs/reference/esql/functions/types/to_upper.asciidoc +++ b/docs/reference/esql/functions/types/to_upper.asciidoc @@ -6,5 +6,5 @@ |=== str | result keyword | keyword -text | text +text | keyword |=== diff --git a/docs/reference/esql/functions/types/top.asciidoc b/docs/reference/esql/functions/types/top.asciidoc index 25d7962a27252..699bc7b10ce84 100644 --- a/docs/reference/esql/functions/types/top.asciidoc +++ b/docs/reference/esql/functions/types/top.asciidoc @@ -12,5 +12,5 @@ integer | integer | keyword | integer ip | integer | keyword | ip keyword | integer | keyword | keyword long | integer | keyword | long -text | integer | keyword | text +text | integer | keyword | keyword |=== diff --git a/docs/reference/esql/functions/types/trim.asciidoc b/docs/reference/esql/functions/types/trim.asciidoc index 41d60049d59b8..1ba0e98ec8f09 100644 --- a/docs/reference/esql/functions/types/trim.asciidoc +++ b/docs/reference/esql/functions/types/trim.asciidoc @@ -6,5 +6,5 @@ |=== string | result keyword | keyword -text | text +text | keyword |=== diff --git a/docs/reference/esql/functions/types/values.asciidoc b/docs/reference/esql/functions/types/values.asciidoc index 35ce5811e0cd0..564fb8dc3bfb0 100644 --- a/docs/reference/esql/functions/types/values.asciidoc +++ b/docs/reference/esql/functions/types/values.asciidoc @@ -12,6 +12,6 @@ integer | integer ip | ip keyword | keyword long | long -text | text +text | keyword version | version |=== diff --git a/x-pack/plugin/build.gradle b/x-pack/plugin/build.gradle index 8297ef5161fb0..cf6a8f51d1b81 100644 --- a/x-pack/plugin/build.gradle +++ b/x-pack/plugin/build.gradle @@ -84,5 +84,7 @@ tasks.named("yamlRestCompatTestTransform").configure({ task -> task.skipTest("security/10_forbidden/Test bulk response with invalid credentials", "warning does not exist for compatibility") task.skipTest("inference/inference_crud/Test get all", "Assertions on number of inference models break due to default configs") task.skipTest("esql/60_usage/Basic ESQL usage output (telemetry)", "The telemetry output changed. We dropped a column. That's safe.") + task.skipTest("esql/80_text/reverse text", "The output type changed from TEXT to KEYWORD.") + task.skipTest("esql/80_text/values function", "The output type changed from TEXT to KEYWORD.") }) diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java index 5041c96128a1e..1b1eff8a07b1d 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java @@ -584,6 +584,10 @@ static Builder builder() { return new Builder(); } + public DataType noText() { + return this == TEXT ? KEYWORD : this; + } + /** * Named parameters with default values. It's just easier to do this with * a builder in java.... diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/convert.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/convert.csv-spec index 1397965145a1a..49960d1b5b0f3 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/convert.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/convert.csv-spec @@ -58,11 +58,11 @@ ROW zero="0"::double convertToString required_capability: casting_operator -ROW one=1::keyword, two=2::text, three=3::string +ROW one=1::keyword, two=2::double, three=3::string ; - one:keyword | two:keyword | three:keyword -1 |2 |3 +one:keyword | two:double | three:keyword +1 | 2.0 | 3 ; convertToDatetime diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec index 2dc21a86e6394..80ba18b85a004 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec @@ -131,17 +131,19 @@ OPQS | OPQS | OPQS | ___ | small maxOfText required_capability: agg_max_min_string_support +required_capability: functions_never_emit_text from airports | eval x = name | where scalerank >= 9 | stats max(name), a = max(name), b = max(x); -max(name):text | a:text | b:text -Zaporozhye Int'l | Zaporozhye Int'l | Zaporozhye Int'l +max(name):keyword | a:keyword | b:keyword +Zaporozhye Int'l | Zaporozhye Int'l | Zaporozhye Int'l ; maxOfTextGrouping required_capability: agg_max_min_string_support +required_capability: functions_never_emit_text from airports | eval x = name | where scalerank >= 9 @@ -149,7 +151,7 @@ from airports | sort type asc | limit 4; -max(name):text | a:text | b:text | type:keyword +max(name):keyword| a:keyword | b:keyword | type:keyword Cheongju Int'l | Cheongju Int'l | Cheongju Int'l | major Zaporozhye Int'l | Zaporozhye Int'l | Zaporozhye Int'l | mid Zaporozhye Int'l | Zaporozhye Int'l | Zaporozhye Int'l | military @@ -211,17 +213,19 @@ LUH | LUH | LUH | ___ | small minOfText required_capability: agg_max_min_string_support +required_capability: functions_never_emit_text from airports | eval x = name | where scalerank >= 9 | stats min(name), a = min(name), b = min(x); -min(name):text | a:text | b:text +min(name):keyword | a:keyword | b:keyword Abdul Rachman Saleh | Abdul Rachman Saleh | Abdul Rachman Saleh ; minOfTextGrouping required_capability: agg_max_min_string_support +required_capability: functions_never_emit_text from airports | eval x = name | where scalerank >= 9 @@ -229,7 +233,7 @@ from airports | sort type asc | limit 4; -min(name):text | a:text | b:text | type:keyword +min(name):keyword | a:keyword | b:keyword | type:keyword Chandigarh Int'l | Chandigarh Int'l | Chandigarh Int'l | major Abdul Rachman Saleh | Abdul Rachman Saleh | Abdul Rachman Saleh | mid Abdul Rachman Saleh | Abdul Rachman Saleh | Abdul Rachman Saleh | military diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats_top.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats_top.csv-spec index 80d11425c5bb6..6eebb2f4d19da 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats_top.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats_top.csv-spec @@ -263,6 +263,7 @@ FROM employees topText required_capability: agg_top required_capability: agg_top_string_support +required_capability: functions_never_emit_text # we don't need MATCH, but the loader for books.csv is busted in CsvTests required_capability: match_operator @@ -273,13 +274,14 @@ FROM books calc = TOP(calc, 3, "asc"), evil = TOP(CASE(year < 1980, title, author), 3, "desc"); -title:text | calc:keyword | evil:text +title:keyword | calc:keyword | evil:keyword [Worlds of Exile and Illusion: Three Complete Novels of the Hainish Series in One Volume--Rocannon's World, Planet of Exile, City of Illusions, Woman-The Full Story: A Dynamic Celebration of Freedoms, Winter notes on summer impressions] | ["'Bria", "Gent", "HE UN"] | [William Faulkner, William Faulkner, William Faulkner] ; topTextGrouping required_capability: agg_top required_capability: agg_top_string_support +required_capability: functions_never_emit_text # we don't need MATCH, but the loader for books.csv is busted in CsvTests required_capability: match_operator @@ -293,7 +295,7 @@ FROM books | SORT author | LIMIT 3; - title:text | calc:keyword | evil:text | author:text + title:keyword | calc:keyword | evil:keyword | author:text A Tolkien Compass: Including J. R. R. Tolkien's Guide to the Names in The Lord of the Rings | Tolk | A Tolkien Compass: Including J. R. R. Tolkien's Guide to the Names in The Lord of the Rings | Agnes Perkins The Lord of the Rings Poster Collection: Six Paintings by Alan Lee (No. 1) | he Lo | [J. R. R. Tolkien, Alan Lee] | Alan Lee A Gentle Creature and Other Stories: White Nights, A Gentle Creature, and The Dream of a Ridiculous Man (The World's Classics) | Gent | [W. J. Leatherbarrow, Fyodor Dostoevsky, Alan Myers] | Alan Myers diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec index 00fa2fddb2106..305b8f3d8011e 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec @@ -1289,6 +1289,7 @@ x:integer | y:string reverseWithTextFields required_capability: fn_reverse +required_capability: functions_never_emit_text FROM books | EVAL title_reversed = REVERSE(title), author_reversed_twice = REVERSE(REVERSE(author)), eq = author_reversed_twice == author | KEEP title, title_reversed, author, author_reversed_twice, eq, book_no @@ -1296,7 +1297,7 @@ FROM books | WHERE book_no IN ("1211", "1463") | LIMIT 2; -title:text | title_reversed:text | author:text | author_reversed_twice:text | eq:boolean | book_no:keyword +title:text | title_reversed:keyword | author:text | author_reversed_twice:keyword | eq:boolean | book_no:keyword The brothers Karamazov | vozamaraK srehtorb ehT | Fyodor Dostoevsky | Fyodor Dostoevsky | true | 1211 Realms of Tolkien: Images of Middle-earth | htrae-elddiM fo segamI :neikloT fo smlaeR | J. R. R. Tolkien | J. R. R. Tolkien | true | 1463 ; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index 55236af648236..196a864db2c15 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -74,6 +74,11 @@ public enum Cap { */ FN_SUBSTRING_EMPTY_NULL, + /** + * All functions that take TEXT should never emit TEXT, only KEYWORD. #114334 + */ + FUNCTIONS_NEVER_EMIT_TEXT, + /** * Support for the {@code INLINESTATS} syntax. */ diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java index ee16193efdccc..ac2d4ff3cbc43 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java @@ -55,7 +55,7 @@ public class Max extends AggregateFunction implements ToAggregator, SurrogateExp ); @FunctionInfo( - returnType = { "boolean", "double", "integer", "long", "date", "ip", "keyword", "text", "long", "version" }, + returnType = { "boolean", "double", "integer", "long", "date", "ip", "keyword", "long", "version" }, description = "The maximum value of a field.", isAggregation = true, examples = { @@ -119,7 +119,7 @@ protected TypeResolution resolveType() { @Override public DataType dataType() { - return field().dataType(); + return field().dataType().noText(); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java index 7aaa41ea6ab11..a5fc8196847b7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java @@ -55,7 +55,7 @@ public class Min extends AggregateFunction implements ToAggregator, SurrogateExp ); @FunctionInfo( - returnType = { "boolean", "double", "integer", "long", "date", "ip", "keyword", "text", "long", "version" }, + returnType = { "boolean", "double", "integer", "long", "date", "ip", "keyword", "long", "version" }, description = "The minimum value of a field.", isAggregation = true, examples = { @@ -119,7 +119,7 @@ protected TypeResolution resolveType() { @Override public DataType dataType() { - return field().dataType(); + return field().dataType().noText(); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Top.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Top.java index 4f81e0a897f9c..e0a7da806b3ac 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Top.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Top.java @@ -51,7 +51,7 @@ public class Top extends AggregateFunction implements ToAggregator, SurrogateExp private static final String ORDER_DESC = "DESC"; @FunctionInfo( - returnType = { "boolean", "double", "integer", "long", "date", "ip", "keyword", "text" }, + returnType = { "boolean", "double", "integer", "long", "date", "ip", "keyword" }, description = "Collects the top values for a field. Includes repeated values.", isAggregation = true, examples = @Example(file = "stats_top", tag = "top") @@ -175,7 +175,7 @@ protected TypeResolution resolveType() { @Override public DataType dataType() { - return field().dataType(); + return field().dataType().noText(); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Values.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Values.java index 8d576839c3c5c..111eab051719b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Values.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Values.java @@ -52,7 +52,7 @@ public class Values extends AggregateFunction implements ToAggregator { ); @FunctionInfo( - returnType = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "text", "version" }, + returnType = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "version" }, preview = true, description = "Returns all values in a group as a multivalued field. The order of the returned values isn't guaranteed. " + "If you need the values returned in order use <>.", @@ -105,7 +105,7 @@ public Values withFilter(Expression filter) { @Override public DataType dataType() { - return field().dataType(); + return field().dataType().noText(); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/UnaryScalarFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/UnaryScalarFunction.java index 4d34033286f52..53b51f16d4183 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/UnaryScalarFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/UnaryScalarFunction.java @@ -164,6 +164,6 @@ public final Expression field() { @Override public DataType dataType() { - return field.dataType(); + return field.dataType().noText(); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java index d833a796cbecc..824f02ca7ccbb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java @@ -73,7 +73,6 @@ ConditionEvaluatorSupplier toEvaluator(ToEvaluator toEvaluator) { "ip", "keyword", "long", - "text", "unsigned_long", "version" }, description = """ @@ -195,12 +194,12 @@ protected TypeResolution resolveType() { private TypeResolution resolveValueType(Expression value, int position) { if (dataType == null || dataType == NULL) { - dataType = value.dataType(); + dataType = value.dataType().noText(); return TypeResolution.TYPE_RESOLVED; } return TypeResolutions.isType( value, - t -> t == dataType, + t -> t.noText() == dataType, sourceText(), TypeResolutions.ParamOrdinal.fromIndex(position), dataType.typeName() diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java index aad2d37d414b8..abc2ea85198fa 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java @@ -43,7 +43,7 @@ public class Greatest extends EsqlScalarFunction implements OptionalArgument { private DataType dataType; @FunctionInfo( - returnType = { "boolean", "date", "date_nanos", "double", "integer", "ip", "keyword", "long", "text", "version" }, + returnType = { "boolean", "date", "date_nanos", "double", "integer", "ip", "keyword", "long", "version" }, description = "Returns the maximum value from multiple columns. This is similar to <>\n" + "except it is intended to run on multiple columns at once.", note = "When run on `keyword` or `text` fields, this returns the last string in alphabetical order. " @@ -104,12 +104,12 @@ protected TypeResolution resolveType() { for (int position = 0; position < children().size(); position++) { Expression child = children().get(position); if (dataType == null || dataType == NULL) { - dataType = child.dataType(); + dataType = child.dataType().noText(); continue; } TypeResolution resolution = TypeResolutions.isType( child, - t -> t == dataType, + t -> t.noText() == dataType, sourceText(), TypeResolutions.ParamOrdinal.fromIndex(position), dataType.typeName() diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java index 70ba9319385f3..a49fff0aa888b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java @@ -43,7 +43,7 @@ public class Least extends EsqlScalarFunction implements OptionalArgument { private DataType dataType; @FunctionInfo( - returnType = { "boolean", "date", "date_nanos", "double", "integer", "ip", "keyword", "long", "text", "version" }, + returnType = { "boolean", "date", "date_nanos", "double", "integer", "ip", "keyword", "long", "version" }, description = "Returns the minimum value from multiple columns. " + "This is similar to <> except it is intended to run on multiple columns at once.", examples = @Example(file = "math", tag = "least") @@ -102,12 +102,12 @@ protected TypeResolution resolveType() { for (int position = 0; position < children().size(); position++) { Expression child = children().get(position); if (dataType == null || dataType == NULL) { - dataType = child.dataType(); + dataType = child.dataType().noText(); continue; } TypeResolution resolution = TypeResolutions.isType( child, - t -> t == dataType, + t -> t.noText() == dataType, sourceText(), TypeResolutions.ParamOrdinal.fromIndex(position), dataType.typeName() diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppend.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppend.java index 72d96a86d31eb..bcd6f4c30bf8a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppend.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppend.java @@ -62,7 +62,6 @@ public class MvAppend extends EsqlScalarFunction implements EvaluatorMapper { "ip", "keyword", "long", - "text", "version" }, description = "Concatenates values of two multi-value fields." ) @@ -134,12 +133,12 @@ protected TypeResolution resolveType() { if (resolution.unresolved()) { return resolution; } - dataType = field1.dataType(); + dataType = field1.dataType().noText(); if (dataType == DataType.NULL) { - dataType = field2.dataType(); + dataType = field2.dataType().noText(); return isType(field2, DataType::isRepresentable, sourceText(), SECOND, "representable"); } - return isType(field2, t -> t == dataType, sourceText(), SECOND, dataType.typeName()); + return isType(field2, t -> t.noText() == dataType, sourceText(), SECOND, dataType.typeName()); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupe.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupe.java index 34b89b4f78997..9a2b041fafeb6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupe.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupe.java @@ -46,7 +46,6 @@ public class MvDedupe extends AbstractMultivalueFunction { "ip", "keyword", "long", - "text", "version" }, description = "Remove duplicate values from a multivalued field.", note = "`MV_DEDUPE` may, but won't always, sort the values in the column.", diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirst.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirst.java index d5d203e7bb3d1..957c74883ffdf 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirst.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirst.java @@ -53,7 +53,6 @@ public class MvFirst extends AbstractMultivalueFunction { "ip", "keyword", "long", - "text", "unsigned_long", "version" }, description = """ diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLast.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLast.java index 21487f14817cd..fedbc1934d1be 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLast.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLast.java @@ -53,7 +53,6 @@ public class MvLast extends AbstractMultivalueFunction { "ip", "keyword", "long", - "text", "unsigned_long", "version" }, description = """ diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMax.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMax.java index 6a53c652d3420..5386a9e3ef763 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMax.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMax.java @@ -36,7 +36,7 @@ public class MvMax extends AbstractMultivalueFunction { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "MvMax", MvMax::new); @FunctionInfo( - returnType = { "boolean", "date", "date_nanos", "double", "integer", "ip", "keyword", "long", "text", "unsigned_long", "version" }, + returnType = { "boolean", "date", "date_nanos", "double", "integer", "ip", "keyword", "long", "unsigned_long", "version" }, description = "Converts a multivalued expression into a single valued column containing the maximum value.", examples = { @Example(file = "math", tag = "mv_max"), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMin.java index 4cc83c99b2c08..a2b3c53f322ba 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMin.java @@ -36,7 +36,7 @@ public class MvMin extends AbstractMultivalueFunction { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "MvMin", MvMin::new); @FunctionInfo( - returnType = { "boolean", "date", "date_nanos", "double", "integer", "ip", "keyword", "long", "text", "unsigned_long", "version" }, + returnType = { "boolean", "date", "date_nanos", "double", "integer", "ip", "keyword", "long", "unsigned_long", "version" }, description = "Converts a multivalued expression into a single valued column containing the minimum value.", examples = { @Example(file = "math", tag = "mv_min"), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSlice.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSlice.java index ef562c339dfd9..f4f9679dc3704 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSlice.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSlice.java @@ -67,7 +67,6 @@ public class MvSlice extends EsqlScalarFunction implements OptionalArgument, Eva "ip", "keyword", "long", - "text", "version" }, description = """ Returns a subset of the multivalued field using the start and end index values. @@ -240,7 +239,7 @@ protected NodeInfo info() { @Override public DataType dataType() { - return field.dataType(); + return field.dataType().noText(); } static int adjustIndex(int oldOffset, int fieldValueCount, int first) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSort.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSort.java index 5ca5618bf2a54..2286a1357ced8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSort.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSort.java @@ -69,7 +69,7 @@ public class MvSort extends EsqlScalarFunction implements OptionalArgument, Vali private static final String INVALID_ORDER_ERROR = "Invalid order value in [{}], expected one of [{}, {}] but got [{}]"; @FunctionInfo( - returnType = { "boolean", "date", "date_nanos", "double", "integer", "ip", "keyword", "long", "text", "version" }, + returnType = { "boolean", "date", "date_nanos", "double", "integer", "ip", "keyword", "long", "version" }, description = "Sorts a multivalued field in lexicographical order.", examples = @Example(file = "ints", tag = "mv_sort") ) @@ -226,7 +226,7 @@ protected NodeInfo info() { @Override public DataType dataType() { - return field.dataType(); + return field.dataType().noText(); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java index 6b9c8d0da025b..52686430ca5b5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java @@ -61,7 +61,6 @@ public class Coalesce extends EsqlScalarFunction implements OptionalArgument { "ip", "keyword", "long", - "text", "version" }, description = "Returns the first of its arguments that is not null. If all arguments are null, it returns `null`.", examples = { @Example(file = "null", tag = "coalesce") } @@ -145,12 +144,12 @@ protected TypeResolution resolveType() { for (int position = 0; position < children().size(); position++) { if (dataType == null || dataType == NULL) { - dataType = children().get(position).dataType(); + dataType = children().get(position).dataType().noText(); continue; } TypeResolution resolution = TypeResolutions.isType( children().get(position), - t -> t == dataType, + t -> t.noText() == dataType, sourceText(), TypeResolutions.ParamOrdinal.fromIndex(position), dataType.typeName() diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrim.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrim.java index 8a4a5f4d841a5..0b7233f10b454 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrim.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrim.java @@ -34,7 +34,7 @@ public class LTrim extends UnaryScalarFunction { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "LTrim", LTrim::new); @FunctionInfo( - returnType = { "keyword", "text" }, + returnType = { "keyword" }, description = "Removes leading whitespaces from a string.", examples = @Example(file = "string", tag = "ltrim") ) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrim.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrim.java index b79e1adf99a20..80809a444f5e8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrim.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrim.java @@ -34,7 +34,7 @@ public class RTrim extends UnaryScalarFunction { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "RTrim", RTrim::new); @FunctionInfo( - returnType = { "keyword", "text" }, + returnType = { "keyword" }, description = "Removes trailing whitespaces from a string.", examples = @Example(file = "string", tag = "rtrim") ) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Reverse.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Reverse.java index e161566838cd9..02787999f24f7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Reverse.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Reverse.java @@ -37,7 +37,7 @@ public class Reverse extends UnaryScalarFunction { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Reverse", Reverse::new); @FunctionInfo( - returnType = { "keyword", "text" }, + returnType = { "keyword" }, description = "Returns a new string representing the input string in reverse order.", examples = { @Example(file = "string", tag = "reverse"), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToLower.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToLower.java index c475469488d7b..5f2bbcde52166 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToLower.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToLower.java @@ -39,7 +39,7 @@ public class ToLower extends EsqlConfigurationFunction { private final Expression field; @FunctionInfo( - returnType = { "keyword", "text" }, + returnType = { "keyword" }, description = "Returns a new string representing the input string converted to lower case.", examples = @Example(file = "string", tag = "to_lower") ) @@ -72,7 +72,7 @@ public String getWriteableName() { @Override public DataType dataType() { - return field.dataType(); + return DataType.KEYWORD; } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToUpper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToUpper.java index 1b5084a7916ef..7fdd5e39f96f3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToUpper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToUpper.java @@ -39,7 +39,7 @@ public class ToUpper extends EsqlConfigurationFunction { private final Expression field; @FunctionInfo( - returnType = { "keyword", "text" }, + returnType = { "keyword" }, description = "Returns a new string representing the input string converted to upper case.", examples = @Example(file = "string", tag = "to_upper") ) @@ -72,7 +72,7 @@ public String getWriteableName() { @Override public DataType dataType() { - return field.dataType(); + return DataType.KEYWORD; } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Trim.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Trim.java index 1fe7529caa2da..ef0afc3a4e6cb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Trim.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Trim.java @@ -34,7 +34,7 @@ public final class Trim extends UnaryScalarFunction { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Trim", Trim::new); @FunctionInfo( - returnType = { "keyword", "text" }, + returnType = { "keyword" }, description = "Removes leading and trailing whitespaces from a string.", examples = @Example(file = "string", tag = "trim") ) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverter.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverter.java index edc3081a33681..05a658ec411f3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverter.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverter.java @@ -117,7 +117,6 @@ public class EsqlDataTypeConverter { entry(LONG, ToLong::new), // ToRadians, typeless entry(KEYWORD, ToString::new), - entry(TEXT, ToString::new), entry(UNSIGNED_LONG, ToUnsignedLong::new), entry(VERSION, ToVersion::new), entry(DATE_PERIOD, ToDatePeriod::new), diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/ParsingTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/ParsingTests.java index 8867e7425a92e..3cafd42b731f6 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/ParsingTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/ParsingTests.java @@ -88,9 +88,6 @@ public void testInlineCast() throws IOException { Collections.sort(namesAndAliases); for (String nameOrAlias : namesAndAliases) { DataType expectedType = DataType.fromNameOrAlias(nameOrAlias); - if (expectedType == DataType.TEXT) { - expectedType = DataType.KEYWORD; - } if (EsqlDataTypeConverter.converterFunctionFactory(expectedType) == null) { continue; } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java index 2ba175657b6c2..c12e0a8684ba9 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java @@ -1435,7 +1435,7 @@ public static TestCase typeError(List data, String expectedTypeError) this.source = Source.EMPTY; this.data = data; this.evaluatorToString = evaluatorToString; - this.expectedType = expectedType; + this.expectedType = expectedType == null ? null : expectedType.noText(); @SuppressWarnings("unchecked") Matcher downcast = (Matcher) matcher; this.matcher = downcast; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MaxTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MaxTests.java index ce2bf7e262ae9..9756804a1ec0f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MaxTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MaxTests.java @@ -128,7 +128,7 @@ public static Iterable parameters() { return new TestCaseSupplier.TestCase( List.of(TestCaseSupplier.TypedData.multiRow(List.of(value), DataType.TEXT, "field")), "Max[field=Attribute[channel=0]]", - DataType.TEXT, + DataType.KEYWORD, equalTo(value) ); }), diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MinTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MinTests.java index 7250072cd2003..171181496c889 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MinTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MinTests.java @@ -128,7 +128,7 @@ public static Iterable parameters() { return new TestCaseSupplier.TestCase( List.of(TestCaseSupplier.TypedData.multiRow(List.of(value), DataType.TEXT, "field")), "Min[field=Attribute[channel=0]]", - DataType.TEXT, + DataType.KEYWORD, equalTo(value) ); }), diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java index fbb7c691b1d94..51b1c72c6e287 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java @@ -151,6 +151,33 @@ private static void twoAndThreeArgs( return testCase(type, typedData, lhsOrRhs ? lhs : rhs, toStringMatcher(1, false), false, null, addWarnings(warnings)); }) ); + if (type.noText() == DataType.KEYWORD) { + DataType otherType = type == DataType.KEYWORD ? DataType.TEXT : DataType.KEYWORD; + suppliers.add( + new TestCaseSupplier( + TestCaseSupplier.nameFrom(Arrays.asList(cond, type, otherType)), + List.of(DataType.BOOLEAN, type, otherType), + () -> { + Object lhs = randomLiteral(type).value(); + Object rhs = randomLiteral(otherType).value(); + List typedData = List.of( + cond(cond, "cond"), + new TestCaseSupplier.TypedData(lhs, type, "lhs"), + new TestCaseSupplier.TypedData(rhs, otherType, "rhs") + ); + return testCase( + type, + typedData, + lhsOrRhs ? lhs : rhs, + toStringMatcher(1, false), + false, + null, + addWarnings(warnings) + ); + } + ) + ); + } if (lhsOrRhs) { suppliers.add( new TestCaseSupplier( @@ -222,7 +249,6 @@ private static void twoAndThreeArgs( ) ); } - suppliers.add( new TestCaseSupplier( "partial foldable " + TestCaseSupplier.nameFrom(Arrays.asList(cond, type, type)), @@ -292,6 +318,33 @@ private static void twoAndThreeArgs( } ) ); + if (type.noText() == DataType.KEYWORD) { + DataType otherType = type == DataType.KEYWORD ? DataType.TEXT : DataType.KEYWORD; + suppliers.add( + new TestCaseSupplier( + TestCaseSupplier.nameFrom(Arrays.asList(DataType.NULL, type, otherType)), + List.of(DataType.NULL, type, otherType), + () -> { + Object lhs = randomLiteral(type).value(); + Object rhs = randomLiteral(otherType).value(); + List typedData = List.of( + new TestCaseSupplier.TypedData(null, DataType.NULL, "cond"), + new TestCaseSupplier.TypedData(lhs, type, "lhs"), + new TestCaseSupplier.TypedData(rhs, otherType, "rhs") + ); + return testCase( + type, + typedData, + lhsOrRhs ? lhs : rhs, + startsWith("CaseEagerEvaluator[conditions=[ConditionEvaluator[condition="), + false, + null, + addWarnings(warnings) + ); + } + ) + ); + } } suppliers.add( new TestCaseSupplier( @@ -804,7 +857,7 @@ private static String typeErrorMessage(boolean includeOrdinal, List ty if (types.get(0) != DataType.BOOLEAN && types.get(0) != DataType.NULL) { return typeErrorMessage(includeOrdinal, types, 0, "boolean"); } - DataType mainType = types.get(1); + DataType mainType = types.get(1).noText(); for (int i = 2; i < types.size(); i++) { if (i % 2 == 0 && i != types.size() - 1) { // condition @@ -813,7 +866,7 @@ private static String typeErrorMessage(boolean includeOrdinal, List ty } } else { // value - if (types.get(i) != mainType) { + if (types.get(i).noText() != mainType) { return typeErrorMessage(includeOrdinal, types, i, mainType.typeName()); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToLowerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToLowerTests.java index 7af1c180fd7b9..1f564ecb87f1e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToLowerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToLowerTests.java @@ -47,7 +47,7 @@ public static Iterable parameters() { suppliers.add(supplier("text unicode", DataType.TEXT, () -> randomUnicodeOfLengthBetween(1, 10))); // add null as parameter - return parameterSuppliersFromTypedDataWithDefaultChecks(false, suppliers, (v, p) -> "string"); + return parameterSuppliersFromTypedDataWithDefaultChecks(true, suppliers, (v, p) -> "string"); } public void testRandomLocale() { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToUpperTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToUpperTests.java index c8bbe03bde411..7c136c3bb83c2 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToUpperTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToUpperTests.java @@ -47,7 +47,7 @@ public static Iterable parameters() { suppliers.add(supplier("text unicode", DataType.TEXT, () -> randomUnicodeOfLengthBetween(1, 10))); // add null as parameter - return parameterSuppliersFromTypedDataWithDefaultChecks(false, suppliers, (v, p) -> "string"); + return parameterSuppliersFromTypedDataWithDefaultChecks(true, suppliers, (v, p) -> "string"); } public void testRandomLocale() { diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml index 88ef03a22d70c..55bd39bdd73cc 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml @@ -392,7 +392,7 @@ setup: - method: POST path: /_query parameters: [method, path, parameters, capabilities] - capabilities: [fn_reverse] + capabilities: [fn_reverse, functions_never_emit_text] reason: "reverse not yet added" - do: allowed_warnings_regex: @@ -402,10 +402,10 @@ setup: query: 'FROM test | SORT name | EVAL job_reversed = REVERSE(job), tag_reversed = REVERSE(tag) | KEEP job_reversed, tag_reversed' - match: { columns.0.name: "job_reversed" } - - match: { columns.0.type: "text" } + - match: { columns.0.type: "keyword" } - match: { columns.1.name: "tag_reversed" } - - match: { columns.1.type: "text" } + - match: { columns.1.type: "keyword" } - length: { values: 2 } - match: { values.0: [ "rotceriD TI", "rab oof" ] } @@ -573,7 +573,6 @@ setup: body: query: 'FROM test | STATS job = VALUES(job) | EVAL job = MV_SORT(job) | LIMIT 1' - match: { columns.0.name: "job" } - - match: { columns.0.type: "text" } - length: { values: 1 } - match: { values.0: [ [ "IT Director", "Payroll Specialist" ] ] } @@ -592,7 +591,22 @@ setup: - match: { columns.0.name: "tag" } - match: { columns.0.type: "text" } - match: { columns.1.name: "job" } - - match: { columns.1.type: "text" } - length: { values: 2 } - match: { values.0: [ "baz", [ "Other", "Payroll Specialist" ] ] } - match: { values.1: [ "foo bar", "IT Director" ] } + +--- +"remove text typecast": + - requires: + capabilities: + - method: POST + path: /_query + parameters: [ method, path, parameters, capabilities ] + capabilities: [ functions_never_emit_text ] + reason: "Disabling ::text was done in 8.17 as part of removing all possibilities to emit text" + + - do: + catch: /Unsupported conversion to type \[TEXT\]/ + esql.query: + body: + query: 'FROM test | EVAL tag = name::text | KEEP name' From 16f61b460033baed6e7ae725fad96860d7a7f5e5 Mon Sep 17 00:00:00 2001 From: Moritz Mack Date: Fri, 25 Oct 2024 10:15:56 +0200 Subject: [PATCH 407/449] Increase assert timeout for DeprecationHttpIT to reduce risk of failing when test cluster is slow to warm up (fixes #115179) (#115621) --- .../xpack/deprecation/DeprecationHttpIT.java | 24 +++++++++---------- 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/x-pack/plugin/deprecation/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/deprecation/DeprecationHttpIT.java b/x-pack/plugin/deprecation/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/deprecation/DeprecationHttpIT.java index 3fb9573dd7b62..4a17c2abbd797 100644 --- a/x-pack/plugin/deprecation/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/deprecation/DeprecationHttpIT.java +++ b/x-pack/plugin/deprecation/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/deprecation/DeprecationHttpIT.java @@ -121,7 +121,7 @@ public void testDeprecatedSettingsReturnWarnings() throws Exception { List> documents = DeprecationTestUtils.getIndexedDeprecations(client(), xOpaqueId()); logger.warn(documents); assertThat(documents, hasSize(2)); - }, 30, TimeUnit.SECONDS); + }, 45, TimeUnit.SECONDS); } finally { Response response = cleanupSettings(); List warningHeaders = getWarningHeaders(response.getHeaders()); @@ -245,7 +245,7 @@ private void doTestDeprecationWarningsAppearInHeaders(String xOpaqueId) throws E var documents = DeprecationTestUtils.getIndexedDeprecations(client(), xOpaqueId); logger.warn(documents); assertThat(documents, hasSize(headerMatchers.size())); - }, 30, TimeUnit.SECONDS); + }, 45, TimeUnit.SECONDS); } public void testDeprecationRouteThrottling() throws Exception { @@ -275,7 +275,7 @@ public void testDeprecationRouteThrottling() throws Exception { ) ) ); - }, 30, TimeUnit.SECONDS); + }, 45, TimeUnit.SECONDS); } @@ -303,7 +303,7 @@ public void testDisableDeprecationLogIndexing() throws Exception { ) ) ); - }, 30, TimeUnit.SECONDS); + }, 45, TimeUnit.SECONDS); } finally { configureWriteDeprecationLogsToIndex(null); } @@ -369,7 +369,7 @@ public void testDeprecationMessagesCanBeIndexed() throws Exception { ) ) ); - }, 30, TimeUnit.SECONDS); + }, 45, TimeUnit.SECONDS); } @@ -414,7 +414,7 @@ public void testDeprecationCriticalWarnMessagesCanBeIndexed() throws Exception { ) ) ); - }, 30, TimeUnit.SECONDS); + }, 45, TimeUnit.SECONDS); } @@ -473,7 +473,7 @@ public void testDeprecationWarnMessagesCanBeIndexed() throws Exception { ) ) ); - }, 30, TimeUnit.SECONDS); + }, 45, TimeUnit.SECONDS); } @@ -504,7 +504,7 @@ public void testDeprecateAndKeep() throws Exception { ) ) ); - }, 30, TimeUnit.SECONDS); + }, 45, TimeUnit.SECONDS); } public void testReplacesInCurrentVersion() throws Exception { @@ -534,7 +534,7 @@ public void testReplacesInCurrentVersion() throws Exception { ) ) ); - }, 30, TimeUnit.SECONDS); + }, 45, TimeUnit.SECONDS); } public void testReplacesInCompatibleVersion() throws Exception { @@ -579,7 +579,7 @@ public void testReplacesInCompatibleVersion() throws Exception { ) ) ); - }, 30, TimeUnit.SECONDS); + }, 45, TimeUnit.SECONDS); } /** @@ -649,7 +649,7 @@ public void testCompatibleMessagesCanBeIndexed() throws Exception { ) ) ); - }, 30, TimeUnit.SECONDS); + }, 45, TimeUnit.SECONDS); } @@ -690,7 +690,7 @@ public void testDeprecationIndexingCacheReset() throws Exception { ) ) ); - }, 30, TimeUnit.SECONDS); + }, 45, TimeUnit.SECONDS); } From 9394e88c0f00e58e6b49e7607fb70bde119e4e1e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20Zolt=C3=A1n=20Szab=C3=B3?= Date: Fri, 25 Oct 2024 10:18:01 +0200 Subject: [PATCH 408/449] [DOCS] Updates inference processor docs. (#115566) --- docs/reference/ingest/processors/inference.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/ingest/processors/inference.asciidoc b/docs/reference/ingest/processors/inference.asciidoc index 4699f634afe37..9c6f0592a1d91 100644 --- a/docs/reference/ingest/processors/inference.asciidoc +++ b/docs/reference/ingest/processors/inference.asciidoc @@ -16,7 +16,7 @@ ingested in the pipeline. [options="header"] |====== | Name | Required | Default | Description -| `model_id` . | yes | - | (String) The ID or alias for the trained model, or the ID of the deployment. +| `model_id` . | yes | - | (String) An inference ID, a model deployment ID, a trained model ID or an alias. | `input_output` | no | - | (List) Input fields for {infer} and output (destination) fields for the {infer} results. This option is incompatible with the `target_field` and `field_map` options. | `target_field` | no | `ml.inference.` | (String) Field added to incoming documents to contain results objects. | `field_map` | no | If defined the model's default field map | (Object) Maps the document field names to the known field names of the model. This mapping takes precedence over any default mappings provided in the model configuration. From 11401a35d41c723e98c0dcc09f4874c9c842d349 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Fri, 25 Oct 2024 19:45:39 +1100 Subject: [PATCH 409/449] Mute org.elasticsearch.oldrepos.OldRepositoryAccessIT testOldRepoAccess #115631 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 084bf27d6a11b..5c94c0aff60b6 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -279,6 +279,9 @@ tests: - class: org.elasticsearch.test.rest.ClientYamlTestSuiteIT method: test {yaml=indices.create/10_basic/Create lookup index} issue: https://github.com/elastic/elasticsearch/issues/115605 +- class: org.elasticsearch.oldrepos.OldRepositoryAccessIT + method: testOldRepoAccess + issue: https://github.com/elastic/elasticsearch/issues/115631 # Examples: # From 452ca351d3d0887db96c124dd83bb755e6e5894f Mon Sep 17 00:00:00 2001 From: Liam Thompson <32779855+leemthompo@users.noreply.github.com> Date: Fri, 25 Oct 2024 11:19:31 +0200 Subject: [PATCH 410/449] [DOCS] Test trivial commit (#115579) (#115628) (cherry picked from commit e642dd84815ea476d1e7b99f26f65cb5099d4e39) --- .../search-your-data/search-application-overview.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/search/search-your-data/search-application-overview.asciidoc b/docs/reference/search/search-your-data/search-application-overview.asciidoc index e12b55911740b..13cc97bb8aeab 100644 --- a/docs/reference/search/search-your-data/search-application-overview.asciidoc +++ b/docs/reference/search/search-your-data/search-application-overview.asciidoc @@ -74,7 +74,7 @@ To create a new search application in {kib}: . Name your search application. . Select *Create*. -Your search application should now be available in the list of search applications. +Your search application should now be available in the list. //[.screenshot] // image::../../images/search-applications/search-applications-create.png[Create search application screen] From b83042aa432776e4e1bcfe5c3c2f17ff2467a5e5 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Fri, 25 Oct 2024 20:30:37 +1100 Subject: [PATCH 411/449] Mute org.elasticsearch.xpack.esql.analysis.AnalyzerTests testMvAppendValidation #115636 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 5c94c0aff60b6..4869b669f6220 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -282,6 +282,9 @@ tests: - class: org.elasticsearch.oldrepos.OldRepositoryAccessIT method: testOldRepoAccess issue: https://github.com/elastic/elasticsearch/issues/115631 +- class: org.elasticsearch.xpack.esql.analysis.AnalyzerTests + method: testMvAppendValidation + issue: https://github.com/elastic/elasticsearch/issues/115636 # Examples: # From f1de84b51cf753e2bd1e381c0a6858797229b233 Mon Sep 17 00:00:00 2001 From: Liam Thompson <32779855+leemthompo@users.noreply.github.com> Date: Fri, 25 Oct 2024 11:39:52 +0200 Subject: [PATCH 412/449] [DOCS] Fix casing in servicenow docs config (#115634) --- .../connector/docs/connectors-servicenow.asciidoc | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/docs/reference/connector/docs/connectors-servicenow.asciidoc b/docs/reference/connector/docs/connectors-servicenow.asciidoc index 089a3b405d8a5..a02c418f11d74 100644 --- a/docs/reference/connector/docs/connectors-servicenow.asciidoc +++ b/docs/reference/connector/docs/connectors-servicenow.asciidoc @@ -81,7 +81,7 @@ Comma-separated list of services to fetch data from ServiceNow. If the value is - link:https://docs.servicenow.com/bundle/tokyo-it-service-management/page/product/incident-management/concept/c_IncidentManagement.html[Incident] - link:https://docs.servicenow.com/bundle/tokyo-servicenow-platform/page/use/service-catalog-requests/task/t_AddNewRequestItems.html[Requested Item] - link:https://docs.servicenow.com/bundle/tokyo-customer-service-management/page/product/customer-service-management/task/t_SearchTheKnowledgeBase.html[Knowledge] -- link:https://docs.servicenow.com/bundle/tokyo-it-service-management/page/product/change-management/task/t_CreateAChange.html[Change Request] +- link:https://docs.servicenow.com/bundle/tokyo-it-service-management/page/product/change-management/task/t_CreateAChange.html[Change request] + [NOTE] ==== @@ -89,7 +89,7 @@ If you have configured a custom service, the `*` value will not fetch data from ==== Default value is `*`. Examples: + - - `User, Incident, Requested Item, Knowledge, Change Request` + - `User, Incident, Requested Item, Knowledge, Change request` - `*` Enable document level security:: @@ -139,7 +139,7 @@ For default services, connectors use the following roles to find users who have | Knowledge | `admin`, `knowledge`, `knowledge_manager`, `knowledge_admin` -| Change Request | `admin`, `sn_change_read`, `itil` +| Change request | `admin`, `sn_change_read`, `itil` |=== For services other than these defaults, the connector iterates over access controls with `read` operations and finds the respective roles for those services. @@ -305,7 +305,7 @@ Comma-separated list of services to fetch data from ServiceNow. If the value is - link:https://docs.servicenow.com/bundle/tokyo-it-service-management/page/product/incident-management/concept/c_IncidentManagement.html[Incident] - link:https://docs.servicenow.com/bundle/tokyo-servicenow-platform/page/use/service-catalog-requests/task/t_AddNewRequestItems.html[Requested Item] - link:https://docs.servicenow.com/bundle/tokyo-customer-service-management/page/product/customer-service-management/task/t_SearchTheKnowledgeBase.html[Knowledge] -- link:https://docs.servicenow.com/bundle/tokyo-it-service-management/page/product/change-management/task/t_CreateAChange.html[Change Request] +- link:https://docs.servicenow.com/bundle/tokyo-it-service-management/page/product/change-management/task/t_CreateAChange.html[Change request] + [NOTE] ==== @@ -313,7 +313,7 @@ If you have configured a custom service, the `*` value will not fetch data from ==== Default value is `*`. Examples: + - - `User, Incident, Requested Item, Knowledge, Change Request` + - `User, Incident, Requested Item, Knowledge, Change request` - `*` `retry_count`:: @@ -374,7 +374,7 @@ For default services, connectors use the following roles to find users who have | Knowledge | `admin`, `knowledge`, `knowledge_manager`, `knowledge_admin` -| Change Request | `admin`, `sn_change_read`, `itil` +| Change request | `admin`, `sn_change_read`, `itil` |=== For services other than these defaults, the connector iterates over access controls with `read` operations and finds the respective roles for those services. From 2d854768bc98b34bd4ea8217aced2e1d95140aef Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Fri, 25 Oct 2024 12:37:38 +0200 Subject: [PATCH 413/449] Optimize threading in AbstractSearchAsyncAction (#113230) Forking when an action completes on the current thread is needlessly heavy handed in preventing stack-overflows. Also, we don't need locking/synchronization to deal with a worker-count + queue length problem. Both of these allow for non-trivial optimization even in the current execution model, also this change helps with moving to a more efficient execution model by saving needless forking to the search pool in particular. -> refactored the code to never fork but instead avoid stack-depth issues through use of a `SubscribableListener` -> replaced our home brew queue and semaphore combination by JDK primitives which saves blocking synchronization on task start and completion. --- .../search/AbstractSearchAsyncAction.java | 220 ++++++++---------- 1 file changed, 94 insertions(+), 126 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java index 0c585c705dcd0..cf25c5730d341 100644 --- a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java @@ -20,14 +20,13 @@ import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.search.TransportSearchAction.SearchTimeProvider; +import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.action.support.TransportActions; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.AtomicArray; -import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import org.elasticsearch.index.shard.ShardId; @@ -43,7 +42,6 @@ import org.elasticsearch.tasks.TaskCancelledException; import org.elasticsearch.transport.Transport; -import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; @@ -51,9 +49,12 @@ import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.Executor; +import java.util.concurrent.LinkedTransferQueue; +import java.util.concurrent.Semaphore; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.BiFunction; +import java.util.function.Consumer; import java.util.stream.Collectors; import static org.elasticsearch.core.Strings.format; @@ -238,7 +239,12 @@ public final void run() { assert shardRoutings.skip() == false; assert shardIndexMap.containsKey(shardRoutings); int shardIndex = shardIndexMap.get(shardRoutings); - performPhaseOnShard(shardIndex, shardRoutings, shardRoutings.nextOrNull()); + final SearchShardTarget routing = shardRoutings.nextOrNull(); + if (routing == null) { + failOnUnavailable(shardIndex, shardRoutings); + } else { + performPhaseOnShard(shardIndex, shardRoutings, routing); + } } } } @@ -258,7 +264,7 @@ private static boolean assertExecuteOnStartThread() { int index = 0; assert stackTraceElements[index++].getMethodName().equals("getStackTrace"); assert stackTraceElements[index++].getMethodName().equals("assertExecuteOnStartThread"); - assert stackTraceElements[index++].getMethodName().equals("performPhaseOnShard"); + assert stackTraceElements[index++].getMethodName().equals("failOnUnavailable"); if (stackTraceElements[index].getMethodName().equals("performPhaseOnShard")) { assert stackTraceElements[index].getClassName().endsWith("CanMatchPreFilterSearchPhase"); index++; @@ -277,65 +283,53 @@ private static boolean assertExecuteOnStartThread() { } protected void performPhaseOnShard(final int shardIndex, final SearchShardIterator shardIt, final SearchShardTarget shard) { - /* - * We capture the thread that this phase is starting on. When we are called back after executing the phase, we are either on the - * same thread (because we never went async, or the same thread was selected from the thread pool) or a different thread. If we - * continue on the same thread in the case that we never went async and this happens repeatedly we will end up recursing deeply and - * could stack overflow. To prevent this, we fork if we are called back on the same thread that execution started on and otherwise - * we can continue (cf. InitialSearchPhase#maybeFork). - */ - if (shard == null) { - assert assertExecuteOnStartThread(); - SearchShardTarget unassignedShard = new SearchShardTarget(null, shardIt.shardId(), shardIt.getClusterAlias()); - onShardFailure(shardIndex, unassignedShard, shardIt, new NoShardAvailableActionException(shardIt.shardId())); + if (throttleConcurrentRequests) { + var pendingExecutions = pendingExecutionsPerNode.computeIfAbsent( + shard.getNodeId(), + n -> new PendingExecutions(maxConcurrentRequestsPerNode) + ); + pendingExecutions.submit(l -> doPerformPhaseOnShard(shardIndex, shardIt, shard, l)); } else { - final PendingExecutions pendingExecutions = throttleConcurrentRequests - ? pendingExecutionsPerNode.computeIfAbsent(shard.getNodeId(), n -> new PendingExecutions(maxConcurrentRequestsPerNode)) - : null; - Runnable r = () -> { - final Thread thread = Thread.currentThread(); - try { - executePhaseOnShard(shardIt, shard, new SearchActionListener<>(shard, shardIndex) { - @Override - public void innerOnResponse(Result result) { - try { - onShardResult(result, shardIt); - } catch (Exception exc) { - onShardFailure(shardIndex, shard, shardIt, exc); - } finally { - executeNext(pendingExecutions, thread); - } - } + doPerformPhaseOnShard(shardIndex, shardIt, shard, () -> {}); + } + } - @Override - public void onFailure(Exception t) { - try { - onShardFailure(shardIndex, shard, shardIt, t); - } finally { - executeNext(pendingExecutions, thread); - } - } - }); - } catch (final Exception e) { - try { - /* - * It is possible to run into connection exceptions here because we are getting the connection early and might - * run into nodes that are not connected. In this case, on shard failure will move us to the next shard copy. - */ - fork(() -> onShardFailure(shardIndex, shard, shardIt, e)); - } finally { - executeNext(pendingExecutions, thread); + private void doPerformPhaseOnShard(int shardIndex, SearchShardIterator shardIt, SearchShardTarget shard, Releasable releasable) { + try { + executePhaseOnShard(shardIt, shard, new SearchActionListener<>(shard, shardIndex) { + @Override + public void innerOnResponse(Result result) { + try (releasable) { + onShardResult(result, shardIt); + } catch (Exception exc) { + onShardFailure(shardIndex, shard, shardIt, exc); } } - }; - if (throttleConcurrentRequests) { - pendingExecutions.tryRun(r); - } else { - r.run(); + + @Override + public void onFailure(Exception e) { + try (releasable) { + onShardFailure(shardIndex, shard, shardIt, e); + } + } + }); + } catch (final Exception e) { + /* + * It is possible to run into connection exceptions here because we are getting the connection early and might + * run into nodes that are not connected. In this case, on shard failure will move us to the next shard copy. + */ + try (releasable) { + onShardFailure(shardIndex, shard, shardIt, e); } } } + private void failOnUnavailable(int shardIndex, SearchShardIterator shardIt) { + assert assertExecuteOnStartThread(); + SearchShardTarget unassignedShard = new SearchShardTarget(null, shardIt.shardId(), shardIt.getClusterAlias()); + onShardFailure(shardIndex, unassignedShard, shardIt, new NoShardAvailableActionException(shardIt.shardId())); + } + /** * Sends the request to the actual shard. * @param shardIt the shards iterator @@ -348,34 +342,6 @@ protected abstract void executePhaseOnShard( SearchActionListener listener ); - protected void fork(final Runnable runnable) { - executor.execute(new AbstractRunnable() { - @Override - public void onFailure(Exception e) { - logger.error(() -> "unexpected error during [" + task + "]", e); - assert false : e; - } - - @Override - public void onRejection(Exception e) { - // avoid leaks during node shutdown by executing on the current thread if the executor shuts down - assert e instanceof EsRejectedExecutionException esre && esre.isExecutorShutdown() : e; - doRun(); - } - - @Override - protected void doRun() { - runnable.run(); - } - - @Override - public boolean isForceExecution() { - // we can not allow a stuffed queue to reject execution here - return true; - } - }); - } - @Override public final void executeNextPhase(SearchPhase currentPhase, SearchPhase nextPhase) { /* This is the main search phase transition where we move to the next phase. If all shards @@ -794,61 +760,63 @@ protected final ShardSearchRequest buildShardSearchRequest(SearchShardIterator s */ protected abstract SearchPhase getNextPhase(SearchPhaseResults results, SearchPhaseContext context); - private void executeNext(PendingExecutions pendingExecutions, Thread originalThread) { - executeNext(pendingExecutions == null ? null : pendingExecutions.finishAndGetNext(), originalThread); - } - - void executeNext(Runnable runnable, Thread originalThread) { - if (runnable != null) { - assert throttleConcurrentRequests; - if (originalThread == Thread.currentThread()) { - fork(runnable); - } else { - runnable.run(); - } - } - } - private static final class PendingExecutions { - private final int permits; - private int permitsTaken = 0; - private final ArrayDeque queue = new ArrayDeque<>(); + private final Semaphore semaphore; + private final LinkedTransferQueue> queue = new LinkedTransferQueue<>(); PendingExecutions(int permits) { assert permits > 0 : "not enough permits: " + permits; - this.permits = permits; + semaphore = new Semaphore(permits); } - Runnable finishAndGetNext() { - synchronized (this) { - permitsTaken--; - assert permitsTaken >= 0 : "illegal taken permits: " + permitsTaken; + void submit(Consumer task) { + if (semaphore.tryAcquire()) { + executeAndRelease(task); + } else { + queue.add(task); + if (semaphore.tryAcquire()) { + task = pollNextTaskOrReleasePermit(); + if (task != null) { + executeAndRelease(task); + } + } } - return tryQueue(null); + } - void tryRun(Runnable runnable) { - Runnable r = tryQueue(runnable); - if (r != null) { - r.run(); + private void executeAndRelease(Consumer task) { + while (task != null) { + final SubscribableListener onDone = new SubscribableListener<>(); + task.accept(() -> onDone.onResponse(null)); + if (onDone.isDone()) { + // keep going on the current thread, no need to fork + task = pollNextTaskOrReleasePermit(); + } else { + onDone.addListener(new ActionListener<>() { + @Override + public void onResponse(Void unused) { + final Consumer nextTask = pollNextTaskOrReleasePermit(); + if (nextTask != null) { + executeAndRelease(nextTask); + } + } + + @Override + public void onFailure(Exception e) { + assert false : e; + } + }); + return; + } } } - private synchronized Runnable tryQueue(Runnable runnable) { - Runnable toExecute = null; - if (permitsTaken < permits) { - permitsTaken++; - toExecute = runnable; - if (toExecute == null) { // only poll if we don't have anything to execute - toExecute = queue.poll(); - } - if (toExecute == null) { - permitsTaken--; - } - } else if (runnable != null) { - queue.add(runnable); + private Consumer pollNextTaskOrReleasePermit() { + var task = queue.poll(); + if (task == null) { + semaphore.release(); } - return toExecute; + return task; } } } From 13e67bdd0803914ac75ec13853828fec1b42d4a0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Aur=C3=A9lien=20FOUCRET?= Date: Fri, 25 Oct 2024 12:43:13 +0200 Subject: [PATCH 414/449] Refactoring of the KQL grammar. (#115632) --- x-pack/plugin/kql/src/main/antlr/KqlBase.g4 | 95 +- .../plugin/kql/src/main/antlr/KqlBase.tokens | 31 +- .../kql/src/main/antlr/KqlBaseLexer.tokens | 31 +- .../xpack/kql/parser/KqlBase.interp | 28 +- .../xpack/kql/parser/KqlBaseBaseListener.java | 56 +- .../xpack/kql/parser/KqlBaseBaseVisitor.java | 30 +- .../xpack/kql/parser/KqlBaseLexer.interp | 22 +- .../xpack/kql/parser/KqlBaseLexer.java | 255 ++--- .../xpack/kql/parser/KqlBaseListener.java | 84 +- .../xpack/kql/parser/KqlBaseParser.java | 1010 ++++++++++------- .../xpack/kql/parser/KqlBaseVisitor.java | 44 +- .../kql/src/test/resources/supported-queries | 9 + .../src/test/resources/unsupported-queries | 8 - 13 files changed, 900 insertions(+), 803 deletions(-) diff --git a/x-pack/plugin/kql/src/main/antlr/KqlBase.g4 b/x-pack/plugin/kql/src/main/antlr/KqlBase.g4 index cffa2db9f959a..dbf7c1979796a 100644 --- a/x-pack/plugin/kql/src/main/antlr/KqlBase.g4 +++ b/x-pack/plugin/kql/src/main/antlr/KqlBase.g4 @@ -26,70 +26,68 @@ topLevelQuery ; query - : query (AND | OR) query #booleanQuery - | NOT subQuery=simpleQuery #notQuery - | simpleQuery #defaultQuery + : query operator=(AND | OR) query #booleanQuery + | NOT subQuery=simpleQuery #notQuery + | simpleQuery #defaultQuery ; simpleQuery : nestedQuery - | expression | parenthesizedQuery - ; - -expression - : fieldTermQuery - | fieldRangeQuery + | matchAllQuery + | existsQuery + | rangeQuery + | fieldQuery + | fieldLessQuery ; nestedQuery : fieldName COLON LEFT_CURLY_BRACKET query RIGHT_CURLY_BRACKET ; -parenthesizedQuery: - LEFT_PARENTHESIS query RIGHT_PARENTHESIS; - -fieldRangeQuery - : fieldName operator=OP_COMPARE rangeQueryValue +matchAllQuery + : (WILDCARD COLON)? WILDCARD ; -fieldTermQuery - : (fieldName COLON)? termQueryValue +parenthesizedQuery + : LEFT_PARENTHESIS query RIGHT_PARENTHESIS ; -fieldName - : wildcardExpression - | unquotedLiteralExpression - | quotedStringExpression +rangeQuery + : fieldName operator=(OP_LESS|OP_LESS_EQ|OP_MORE|OP_MORE_EQ) rangeQueryValue ; rangeQueryValue - : unquotedLiteralExpression - | quotedStringExpression - ; - -termQueryValue - : wildcardExpression - | quotedStringExpression - | termValue=unquotedLiteralExpression - | groupingTermExpression; + : (UNQUOTED_LITERAL|WILDCARD)+ + | QUOTED_STRING + ; -groupingTermExpression - : LEFT_PARENTHESIS unquotedLiteralExpression RIGHT_PARENTHESIS +existsQuery + :fieldName COLON WILDCARD ; -unquotedLiteralExpression - : UNQUOTED_LITERAL+ +fieldQuery + : fieldName COLON fieldQueryValue + | fieldName COLON LEFT_PARENTHESIS fieldQueryValue RIGHT_PARENTHESIS ; -quotedStringExpression - : QUOTED_STRING +fieldLessQuery + : fieldQueryValue + | LEFT_PARENTHESIS fieldQueryValue RIGHT_PARENTHESIS ; -wildcardExpression - : WILDCARD -; +fieldQueryValue + : (AND|OR)? (UNQUOTED_LITERAL | WILDCARD )+ + | (UNQUOTED_LITERAL | WILDCARD )+ (AND|OR)? + | (NOT|AND|OR) + | QUOTED_STRING + ; +fieldName + : value=UNQUOTED_LITERAL+ + | value=QUOTED_STRING + | value=WILDCARD + ; DEFAULT_SKIP: WHITESPACE -> skip; @@ -98,31 +96,34 @@ OR: 'or'; NOT: 'not'; COLON: ':'; -OP_COMPARE: OP_LESS | OP_MORE | OP_LESS_EQ | OP_MORE_EQ; +OP_LESS: '<'; +OP_LESS_EQ: '<='; +OP_MORE: '>'; +OP_MORE_EQ: '>='; LEFT_PARENTHESIS: '('; RIGHT_PARENTHESIS: ')'; LEFT_CURLY_BRACKET: '{'; RIGHT_CURLY_BRACKET: '}'; -UNQUOTED_LITERAL: WILDCARD* UNQUOTED_LITERAL_CHAR+ WILDCARD*; +UNQUOTED_LITERAL: UNQUOTED_LITERAL_CHAR+; QUOTED_STRING: '"'QUOTED_CHAR*'"'; -WILDCARD: WILDCARD_CHAR+; +WILDCARD: WILDCARD_CHAR; fragment WILDCARD_CHAR: '*'; -fragment OP_LESS: '<'; -fragment OP_LESS_EQ: '<='; -fragment OP_MORE: '>'; -fragment OP_MORE_EQ: '>='; fragment UNQUOTED_LITERAL_CHAR + : WILDCARD_CHAR* UNQUOTED_LITERAL_BASE_CHAR WILDCARD_CHAR* + | WILDCARD_CHAR WILDCARD_CHAR+ + ; + +fragment UNQUOTED_LITERAL_BASE_CHAR : ESCAPED_WHITESPACE | ESCAPED_SPECIAL_CHAR | ESCAPE_UNICODE_SEQUENCE | '\\' (AND | OR | NOT) - | WILDCARD_CHAR UNQUOTED_LITERAL_CHAR | NON_SPECIAL_CHAR ; @@ -135,7 +136,7 @@ fragment QUOTED_CHAR fragment WHITESPACE: [ \t\n\r\u3000]; fragment ESCAPED_WHITESPACE: '\\r' | '\\t' | '\\n'; -fragment NON_SPECIAL_CHAR: ~[ \\():<>"*{}]; +fragment NON_SPECIAL_CHAR: ~[ \n\r\t\u3000\\():<>"*{}]; fragment ESCAPED_SPECIAL_CHAR: '\\'[ \\():<>"*{}]; fragment ESCAPED_QUOTE: '\\"'; diff --git a/x-pack/plugin/kql/src/main/antlr/KqlBase.tokens b/x-pack/plugin/kql/src/main/antlr/KqlBase.tokens index 268ae0613b9f0..f26b6b9c3da55 100644 --- a/x-pack/plugin/kql/src/main/antlr/KqlBase.tokens +++ b/x-pack/plugin/kql/src/main/antlr/KqlBase.tokens @@ -3,19 +3,26 @@ AND=2 OR=3 NOT=4 COLON=5 -OP_COMPARE=6 -LEFT_PARENTHESIS=7 -RIGHT_PARENTHESIS=8 -LEFT_CURLY_BRACKET=9 -RIGHT_CURLY_BRACKET=10 -UNQUOTED_LITERAL=11 -QUOTED_STRING=12 -WILDCARD=13 +OP_LESS=6 +OP_LESS_EQ=7 +OP_MORE=8 +OP_MORE_EQ=9 +LEFT_PARENTHESIS=10 +RIGHT_PARENTHESIS=11 +LEFT_CURLY_BRACKET=12 +RIGHT_CURLY_BRACKET=13 +UNQUOTED_LITERAL=14 +QUOTED_STRING=15 +WILDCARD=16 'and'=2 'or'=3 'not'=4 ':'=5 -'('=7 -')'=8 -'{'=9 -'}'=10 +'<'=6 +'<='=7 +'>'=8 +'>='=9 +'('=10 +')'=11 +'{'=12 +'}'=13 diff --git a/x-pack/plugin/kql/src/main/antlr/KqlBaseLexer.tokens b/x-pack/plugin/kql/src/main/antlr/KqlBaseLexer.tokens index 268ae0613b9f0..f26b6b9c3da55 100644 --- a/x-pack/plugin/kql/src/main/antlr/KqlBaseLexer.tokens +++ b/x-pack/plugin/kql/src/main/antlr/KqlBaseLexer.tokens @@ -3,19 +3,26 @@ AND=2 OR=3 NOT=4 COLON=5 -OP_COMPARE=6 -LEFT_PARENTHESIS=7 -RIGHT_PARENTHESIS=8 -LEFT_CURLY_BRACKET=9 -RIGHT_CURLY_BRACKET=10 -UNQUOTED_LITERAL=11 -QUOTED_STRING=12 -WILDCARD=13 +OP_LESS=6 +OP_LESS_EQ=7 +OP_MORE=8 +OP_MORE_EQ=9 +LEFT_PARENTHESIS=10 +RIGHT_PARENTHESIS=11 +LEFT_CURLY_BRACKET=12 +RIGHT_CURLY_BRACKET=13 +UNQUOTED_LITERAL=14 +QUOTED_STRING=15 +WILDCARD=16 'and'=2 'or'=3 'not'=4 ':'=5 -'('=7 -')'=8 -'{'=9 -'}'=10 +'<'=6 +'<='=7 +'>'=8 +'>='=9 +'('=10 +')'=11 +'{'=12 +'}'=13 diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBase.interp b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBase.interp index 1954195b52363..111cac6d641b9 100644 --- a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBase.interp +++ b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBase.interp @@ -5,7 +5,10 @@ null 'or' 'not' ':' -null +'<' +'<=' +'>' +'>=' '(' ')' '{' @@ -21,7 +24,10 @@ AND OR NOT COLON -OP_COMPARE +OP_LESS +OP_LESS_EQ +OP_MORE +OP_MORE_EQ LEFT_PARENTHESIS RIGHT_PARENTHESIS LEFT_CURLY_BRACKET @@ -34,19 +40,17 @@ rule names: topLevelQuery query simpleQuery -expression nestedQuery +matchAllQuery parenthesizedQuery -fieldRangeQuery -fieldTermQuery -fieldName +rangeQuery rangeQueryValue -termQueryValue -groupingTermExpression -unquotedLiteralExpression -quotedStringExpression -wildcardExpression +existsQuery +fieldQuery +fieldLessQuery +fieldQueryValue +fieldName atn: -[4, 1, 13, 108, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 1, 0, 3, 0, 32, 8, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 3, 1, 40, 8, 1, 1, 1, 1, 1, 1, 1, 5, 1, 45, 8, 1, 10, 1, 12, 1, 48, 9, 1, 1, 2, 1, 2, 1, 2, 3, 2, 53, 8, 2, 1, 3, 1, 3, 3, 3, 57, 8, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 3, 7, 76, 8, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 3, 8, 83, 8, 8, 1, 9, 1, 9, 3, 9, 87, 8, 9, 1, 10, 1, 10, 1, 10, 1, 10, 3, 10, 93, 8, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 4, 12, 100, 8, 12, 11, 12, 12, 12, 101, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 0, 1, 2, 15, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 0, 1, 1, 0, 2, 3, 106, 0, 31, 1, 0, 0, 0, 2, 39, 1, 0, 0, 0, 4, 52, 1, 0, 0, 0, 6, 56, 1, 0, 0, 0, 8, 58, 1, 0, 0, 0, 10, 64, 1, 0, 0, 0, 12, 68, 1, 0, 0, 0, 14, 75, 1, 0, 0, 0, 16, 82, 1, 0, 0, 0, 18, 86, 1, 0, 0, 0, 20, 92, 1, 0, 0, 0, 22, 94, 1, 0, 0, 0, 24, 99, 1, 0, 0, 0, 26, 103, 1, 0, 0, 0, 28, 105, 1, 0, 0, 0, 30, 32, 3, 2, 1, 0, 31, 30, 1, 0, 0, 0, 31, 32, 1, 0, 0, 0, 32, 33, 1, 0, 0, 0, 33, 34, 5, 0, 0, 1, 34, 1, 1, 0, 0, 0, 35, 36, 6, 1, -1, 0, 36, 37, 5, 4, 0, 0, 37, 40, 3, 4, 2, 0, 38, 40, 3, 4, 2, 0, 39, 35, 1, 0, 0, 0, 39, 38, 1, 0, 0, 0, 40, 46, 1, 0, 0, 0, 41, 42, 10, 3, 0, 0, 42, 43, 7, 0, 0, 0, 43, 45, 3, 2, 1, 4, 44, 41, 1, 0, 0, 0, 45, 48, 1, 0, 0, 0, 46, 44, 1, 0, 0, 0, 46, 47, 1, 0, 0, 0, 47, 3, 1, 0, 0, 0, 48, 46, 1, 0, 0, 0, 49, 53, 3, 8, 4, 0, 50, 53, 3, 6, 3, 0, 51, 53, 3, 10, 5, 0, 52, 49, 1, 0, 0, 0, 52, 50, 1, 0, 0, 0, 52, 51, 1, 0, 0, 0, 53, 5, 1, 0, 0, 0, 54, 57, 3, 14, 7, 0, 55, 57, 3, 12, 6, 0, 56, 54, 1, 0, 0, 0, 56, 55, 1, 0, 0, 0, 57, 7, 1, 0, 0, 0, 58, 59, 3, 16, 8, 0, 59, 60, 5, 5, 0, 0, 60, 61, 5, 9, 0, 0, 61, 62, 3, 2, 1, 0, 62, 63, 5, 10, 0, 0, 63, 9, 1, 0, 0, 0, 64, 65, 5, 7, 0, 0, 65, 66, 3, 2, 1, 0, 66, 67, 5, 8, 0, 0, 67, 11, 1, 0, 0, 0, 68, 69, 3, 16, 8, 0, 69, 70, 5, 6, 0, 0, 70, 71, 3, 18, 9, 0, 71, 13, 1, 0, 0, 0, 72, 73, 3, 16, 8, 0, 73, 74, 5, 5, 0, 0, 74, 76, 1, 0, 0, 0, 75, 72, 1, 0, 0, 0, 75, 76, 1, 0, 0, 0, 76, 77, 1, 0, 0, 0, 77, 78, 3, 20, 10, 0, 78, 15, 1, 0, 0, 0, 79, 83, 3, 28, 14, 0, 80, 83, 3, 24, 12, 0, 81, 83, 3, 26, 13, 0, 82, 79, 1, 0, 0, 0, 82, 80, 1, 0, 0, 0, 82, 81, 1, 0, 0, 0, 83, 17, 1, 0, 0, 0, 84, 87, 3, 24, 12, 0, 85, 87, 3, 26, 13, 0, 86, 84, 1, 0, 0, 0, 86, 85, 1, 0, 0, 0, 87, 19, 1, 0, 0, 0, 88, 93, 3, 28, 14, 0, 89, 93, 3, 26, 13, 0, 90, 93, 3, 24, 12, 0, 91, 93, 3, 22, 11, 0, 92, 88, 1, 0, 0, 0, 92, 89, 1, 0, 0, 0, 92, 90, 1, 0, 0, 0, 92, 91, 1, 0, 0, 0, 93, 21, 1, 0, 0, 0, 94, 95, 5, 7, 0, 0, 95, 96, 3, 24, 12, 0, 96, 97, 5, 8, 0, 0, 97, 23, 1, 0, 0, 0, 98, 100, 5, 11, 0, 0, 99, 98, 1, 0, 0, 0, 100, 101, 1, 0, 0, 0, 101, 99, 1, 0, 0, 0, 101, 102, 1, 0, 0, 0, 102, 25, 1, 0, 0, 0, 103, 104, 5, 12, 0, 0, 104, 27, 1, 0, 0, 0, 105, 106, 5, 13, 0, 0, 106, 29, 1, 0, 0, 0, 10, 31, 39, 46, 52, 56, 75, 82, 86, 92, 101] \ No newline at end of file +[4, 1, 16, 135, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 1, 0, 3, 0, 28, 8, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 3, 1, 36, 8, 1, 1, 1, 1, 1, 1, 1, 5, 1, 41, 8, 1, 10, 1, 12, 1, 44, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 53, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 3, 4, 63, 8, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 4, 7, 76, 8, 7, 11, 7, 12, 7, 77, 1, 7, 3, 7, 81, 8, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 97, 8, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 3, 10, 104, 8, 10, 1, 11, 3, 11, 107, 8, 11, 1, 11, 4, 11, 110, 8, 11, 11, 11, 12, 11, 111, 1, 11, 4, 11, 115, 8, 11, 11, 11, 12, 11, 116, 1, 11, 3, 11, 120, 8, 11, 1, 11, 1, 11, 3, 11, 124, 8, 11, 1, 12, 4, 12, 127, 8, 12, 11, 12, 12, 12, 128, 1, 12, 1, 12, 3, 12, 133, 8, 12, 1, 12, 0, 1, 2, 13, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 0, 4, 1, 0, 2, 3, 1, 0, 6, 9, 2, 0, 14, 14, 16, 16, 1, 0, 2, 4, 145, 0, 27, 1, 0, 0, 0, 2, 35, 1, 0, 0, 0, 4, 52, 1, 0, 0, 0, 6, 54, 1, 0, 0, 0, 8, 62, 1, 0, 0, 0, 10, 66, 1, 0, 0, 0, 12, 70, 1, 0, 0, 0, 14, 80, 1, 0, 0, 0, 16, 82, 1, 0, 0, 0, 18, 96, 1, 0, 0, 0, 20, 103, 1, 0, 0, 0, 22, 123, 1, 0, 0, 0, 24, 132, 1, 0, 0, 0, 26, 28, 3, 2, 1, 0, 27, 26, 1, 0, 0, 0, 27, 28, 1, 0, 0, 0, 28, 29, 1, 0, 0, 0, 29, 30, 5, 0, 0, 1, 30, 1, 1, 0, 0, 0, 31, 32, 6, 1, -1, 0, 32, 33, 5, 4, 0, 0, 33, 36, 3, 4, 2, 0, 34, 36, 3, 4, 2, 0, 35, 31, 1, 0, 0, 0, 35, 34, 1, 0, 0, 0, 36, 42, 1, 0, 0, 0, 37, 38, 10, 3, 0, 0, 38, 39, 7, 0, 0, 0, 39, 41, 3, 2, 1, 3, 40, 37, 1, 0, 0, 0, 41, 44, 1, 0, 0, 0, 42, 40, 1, 0, 0, 0, 42, 43, 1, 0, 0, 0, 43, 3, 1, 0, 0, 0, 44, 42, 1, 0, 0, 0, 45, 53, 3, 6, 3, 0, 46, 53, 3, 10, 5, 0, 47, 53, 3, 8, 4, 0, 48, 53, 3, 16, 8, 0, 49, 53, 3, 12, 6, 0, 50, 53, 3, 18, 9, 0, 51, 53, 3, 20, 10, 0, 52, 45, 1, 0, 0, 0, 52, 46, 1, 0, 0, 0, 52, 47, 1, 0, 0, 0, 52, 48, 1, 0, 0, 0, 52, 49, 1, 0, 0, 0, 52, 50, 1, 0, 0, 0, 52, 51, 1, 0, 0, 0, 53, 5, 1, 0, 0, 0, 54, 55, 3, 24, 12, 0, 55, 56, 5, 5, 0, 0, 56, 57, 5, 12, 0, 0, 57, 58, 3, 2, 1, 0, 58, 59, 5, 13, 0, 0, 59, 7, 1, 0, 0, 0, 60, 61, 5, 16, 0, 0, 61, 63, 5, 5, 0, 0, 62, 60, 1, 0, 0, 0, 62, 63, 1, 0, 0, 0, 63, 64, 1, 0, 0, 0, 64, 65, 5, 16, 0, 0, 65, 9, 1, 0, 0, 0, 66, 67, 5, 10, 0, 0, 67, 68, 3, 2, 1, 0, 68, 69, 5, 11, 0, 0, 69, 11, 1, 0, 0, 0, 70, 71, 3, 24, 12, 0, 71, 72, 7, 1, 0, 0, 72, 73, 3, 14, 7, 0, 73, 13, 1, 0, 0, 0, 74, 76, 7, 2, 0, 0, 75, 74, 1, 0, 0, 0, 76, 77, 1, 0, 0, 0, 77, 75, 1, 0, 0, 0, 77, 78, 1, 0, 0, 0, 78, 81, 1, 0, 0, 0, 79, 81, 5, 15, 0, 0, 80, 75, 1, 0, 0, 0, 80, 79, 1, 0, 0, 0, 81, 15, 1, 0, 0, 0, 82, 83, 3, 24, 12, 0, 83, 84, 5, 5, 0, 0, 84, 85, 5, 16, 0, 0, 85, 17, 1, 0, 0, 0, 86, 87, 3, 24, 12, 0, 87, 88, 5, 5, 0, 0, 88, 89, 3, 22, 11, 0, 89, 97, 1, 0, 0, 0, 90, 91, 3, 24, 12, 0, 91, 92, 5, 5, 0, 0, 92, 93, 5, 10, 0, 0, 93, 94, 3, 22, 11, 0, 94, 95, 5, 11, 0, 0, 95, 97, 1, 0, 0, 0, 96, 86, 1, 0, 0, 0, 96, 90, 1, 0, 0, 0, 97, 19, 1, 0, 0, 0, 98, 104, 3, 22, 11, 0, 99, 100, 5, 10, 0, 0, 100, 101, 3, 22, 11, 0, 101, 102, 5, 11, 0, 0, 102, 104, 1, 0, 0, 0, 103, 98, 1, 0, 0, 0, 103, 99, 1, 0, 0, 0, 104, 21, 1, 0, 0, 0, 105, 107, 7, 0, 0, 0, 106, 105, 1, 0, 0, 0, 106, 107, 1, 0, 0, 0, 107, 109, 1, 0, 0, 0, 108, 110, 7, 2, 0, 0, 109, 108, 1, 0, 0, 0, 110, 111, 1, 0, 0, 0, 111, 109, 1, 0, 0, 0, 111, 112, 1, 0, 0, 0, 112, 124, 1, 0, 0, 0, 113, 115, 7, 2, 0, 0, 114, 113, 1, 0, 0, 0, 115, 116, 1, 0, 0, 0, 116, 114, 1, 0, 0, 0, 116, 117, 1, 0, 0, 0, 117, 119, 1, 0, 0, 0, 118, 120, 7, 0, 0, 0, 119, 118, 1, 0, 0, 0, 119, 120, 1, 0, 0, 0, 120, 124, 1, 0, 0, 0, 121, 124, 7, 3, 0, 0, 122, 124, 5, 15, 0, 0, 123, 106, 1, 0, 0, 0, 123, 114, 1, 0, 0, 0, 123, 121, 1, 0, 0, 0, 123, 122, 1, 0, 0, 0, 124, 23, 1, 0, 0, 0, 125, 127, 5, 14, 0, 0, 126, 125, 1, 0, 0, 0, 127, 128, 1, 0, 0, 0, 128, 126, 1, 0, 0, 0, 128, 129, 1, 0, 0, 0, 129, 133, 1, 0, 0, 0, 130, 133, 5, 15, 0, 0, 131, 133, 5, 16, 0, 0, 132, 126, 1, 0, 0, 0, 132, 130, 1, 0, 0, 0, 132, 131, 1, 0, 0, 0, 133, 25, 1, 0, 0, 0, 16, 27, 35, 42, 52, 62, 77, 80, 96, 103, 106, 111, 116, 119, 123, 128, 132] \ No newline at end of file diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseBaseListener.java b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseBaseListener.java index 1b4282b5dbbea..426af7f7115b9 100644 --- a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseBaseListener.java +++ b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseBaseListener.java @@ -80,18 +80,6 @@ class KqlBaseBaseListener implements KqlBaseListener { *

      The default implementation does nothing.

      */ @Override public void exitSimpleQuery(KqlBaseParser.SimpleQueryContext ctx) { } - /** - * {@inheritDoc} - * - *

      The default implementation does nothing.

      - */ - @Override public void enterExpression(KqlBaseParser.ExpressionContext ctx) { } - /** - * {@inheritDoc} - * - *

      The default implementation does nothing.

      - */ - @Override public void exitExpression(KqlBaseParser.ExpressionContext ctx) { } /** * {@inheritDoc} * @@ -109,49 +97,37 @@ class KqlBaseBaseListener implements KqlBaseListener { * *

      The default implementation does nothing.

      */ - @Override public void enterParenthesizedQuery(KqlBaseParser.ParenthesizedQueryContext ctx) { } - /** - * {@inheritDoc} - * - *

      The default implementation does nothing.

      - */ - @Override public void exitParenthesizedQuery(KqlBaseParser.ParenthesizedQueryContext ctx) { } - /** - * {@inheritDoc} - * - *

      The default implementation does nothing.

      - */ - @Override public void enterFieldRangeQuery(KqlBaseParser.FieldRangeQueryContext ctx) { } + @Override public void enterMatchAllQuery(KqlBaseParser.MatchAllQueryContext ctx) { } /** * {@inheritDoc} * *

      The default implementation does nothing.

      */ - @Override public void exitFieldRangeQuery(KqlBaseParser.FieldRangeQueryContext ctx) { } + @Override public void exitMatchAllQuery(KqlBaseParser.MatchAllQueryContext ctx) { } /** * {@inheritDoc} * *

      The default implementation does nothing.

      */ - @Override public void enterFieldTermQuery(KqlBaseParser.FieldTermQueryContext ctx) { } + @Override public void enterParenthesizedQuery(KqlBaseParser.ParenthesizedQueryContext ctx) { } /** * {@inheritDoc} * *

      The default implementation does nothing.

      */ - @Override public void exitFieldTermQuery(KqlBaseParser.FieldTermQueryContext ctx) { } + @Override public void exitParenthesizedQuery(KqlBaseParser.ParenthesizedQueryContext ctx) { } /** * {@inheritDoc} * *

      The default implementation does nothing.

      */ - @Override public void enterFieldName(KqlBaseParser.FieldNameContext ctx) { } + @Override public void enterRangeQuery(KqlBaseParser.RangeQueryContext ctx) { } /** * {@inheritDoc} * *

      The default implementation does nothing.

      */ - @Override public void exitFieldName(KqlBaseParser.FieldNameContext ctx) { } + @Override public void exitRangeQuery(KqlBaseParser.RangeQueryContext ctx) { } /** * {@inheritDoc} * @@ -169,61 +145,61 @@ class KqlBaseBaseListener implements KqlBaseListener { * *

      The default implementation does nothing.

      */ - @Override public void enterTermQueryValue(KqlBaseParser.TermQueryValueContext ctx) { } + @Override public void enterExistsQuery(KqlBaseParser.ExistsQueryContext ctx) { } /** * {@inheritDoc} * *

      The default implementation does nothing.

      */ - @Override public void exitTermQueryValue(KqlBaseParser.TermQueryValueContext ctx) { } + @Override public void exitExistsQuery(KqlBaseParser.ExistsQueryContext ctx) { } /** * {@inheritDoc} * *

      The default implementation does nothing.

      */ - @Override public void enterGroupingTermExpression(KqlBaseParser.GroupingTermExpressionContext ctx) { } + @Override public void enterFieldQuery(KqlBaseParser.FieldQueryContext ctx) { } /** * {@inheritDoc} * *

      The default implementation does nothing.

      */ - @Override public void exitGroupingTermExpression(KqlBaseParser.GroupingTermExpressionContext ctx) { } + @Override public void exitFieldQuery(KqlBaseParser.FieldQueryContext ctx) { } /** * {@inheritDoc} * *

      The default implementation does nothing.

      */ - @Override public void enterUnquotedLiteralExpression(KqlBaseParser.UnquotedLiteralExpressionContext ctx) { } + @Override public void enterFieldLessQuery(KqlBaseParser.FieldLessQueryContext ctx) { } /** * {@inheritDoc} * *

      The default implementation does nothing.

      */ - @Override public void exitUnquotedLiteralExpression(KqlBaseParser.UnquotedLiteralExpressionContext ctx) { } + @Override public void exitFieldLessQuery(KqlBaseParser.FieldLessQueryContext ctx) { } /** * {@inheritDoc} * *

      The default implementation does nothing.

      */ - @Override public void enterQuotedStringExpression(KqlBaseParser.QuotedStringExpressionContext ctx) { } + @Override public void enterFieldQueryValue(KqlBaseParser.FieldQueryValueContext ctx) { } /** * {@inheritDoc} * *

      The default implementation does nothing.

      */ - @Override public void exitQuotedStringExpression(KqlBaseParser.QuotedStringExpressionContext ctx) { } + @Override public void exitFieldQueryValue(KqlBaseParser.FieldQueryValueContext ctx) { } /** * {@inheritDoc} * *

      The default implementation does nothing.

      */ - @Override public void enterWildcardExpression(KqlBaseParser.WildcardExpressionContext ctx) { } + @Override public void enterFieldName(KqlBaseParser.FieldNameContext ctx) { } /** * {@inheritDoc} * *

      The default implementation does nothing.

      */ - @Override public void exitWildcardExpression(KqlBaseParser.WildcardExpressionContext ctx) { } + @Override public void exitFieldName(KqlBaseParser.FieldNameContext ctx) { } /** * {@inheritDoc} diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseBaseVisitor.java b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseBaseVisitor.java index 09cd668804154..cf1f2b3972823 100644 --- a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseBaseVisitor.java +++ b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseBaseVisitor.java @@ -55,13 +55,6 @@ class KqlBaseBaseVisitor extends AbstractParseTreeVisitor implements KqlBa * {@link #visitChildren} on {@code ctx}.

      */ @Override public T visitSimpleQuery(KqlBaseParser.SimpleQueryContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

      The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

      - */ - @Override public T visitExpression(KqlBaseParser.ExpressionContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * @@ -75,28 +68,21 @@ class KqlBaseBaseVisitor extends AbstractParseTreeVisitor implements KqlBa *

      The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

      */ - @Override public T visitParenthesizedQuery(KqlBaseParser.ParenthesizedQueryContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

      The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

      - */ - @Override public T visitFieldRangeQuery(KqlBaseParser.FieldRangeQueryContext ctx) { return visitChildren(ctx); } + @Override public T visitMatchAllQuery(KqlBaseParser.MatchAllQueryContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

      The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

      */ - @Override public T visitFieldTermQuery(KqlBaseParser.FieldTermQueryContext ctx) { return visitChildren(ctx); } + @Override public T visitParenthesizedQuery(KqlBaseParser.ParenthesizedQueryContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

      The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

      */ - @Override public T visitFieldName(KqlBaseParser.FieldNameContext ctx) { return visitChildren(ctx); } + @Override public T visitRangeQuery(KqlBaseParser.RangeQueryContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * @@ -110,33 +96,33 @@ class KqlBaseBaseVisitor extends AbstractParseTreeVisitor implements KqlBa *

      The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

      */ - @Override public T visitTermQueryValue(KqlBaseParser.TermQueryValueContext ctx) { return visitChildren(ctx); } + @Override public T visitExistsQuery(KqlBaseParser.ExistsQueryContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

      The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

      */ - @Override public T visitGroupingTermExpression(KqlBaseParser.GroupingTermExpressionContext ctx) { return visitChildren(ctx); } + @Override public T visitFieldQuery(KqlBaseParser.FieldQueryContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

      The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

      */ - @Override public T visitUnquotedLiteralExpression(KqlBaseParser.UnquotedLiteralExpressionContext ctx) { return visitChildren(ctx); } + @Override public T visitFieldLessQuery(KqlBaseParser.FieldLessQueryContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

      The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

      */ - @Override public T visitQuotedStringExpression(KqlBaseParser.QuotedStringExpressionContext ctx) { return visitChildren(ctx); } + @Override public T visitFieldQueryValue(KqlBaseParser.FieldQueryValueContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

      The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

      */ - @Override public T visitWildcardExpression(KqlBaseParser.WildcardExpressionContext ctx) { return visitChildren(ctx); } + @Override public T visitFieldName(KqlBaseParser.FieldNameContext ctx) { return visitChildren(ctx); } } diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseLexer.interp b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseLexer.interp index d178df5fcbc88..f9afe07af3b40 100644 --- a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseLexer.interp +++ b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseLexer.interp @@ -5,7 +5,10 @@ null 'or' 'not' ':' -null +'<' +'<=' +'>' +'>=' '(' ')' '{' @@ -21,7 +24,10 @@ AND OR NOT COLON -OP_COMPARE +OP_LESS +OP_LESS_EQ +OP_MORE +OP_MORE_EQ LEFT_PARENTHESIS RIGHT_PARENTHESIS LEFT_CURLY_BRACKET @@ -36,7 +42,10 @@ AND OR NOT COLON -OP_COMPARE +OP_LESS +OP_LESS_EQ +OP_MORE +OP_MORE_EQ LEFT_PARENTHESIS RIGHT_PARENTHESIS LEFT_CURLY_BRACKET @@ -45,11 +54,8 @@ UNQUOTED_LITERAL QUOTED_STRING WILDCARD WILDCARD_CHAR -OP_LESS -OP_LESS_EQ -OP_MORE -OP_MORE_EQ UNQUOTED_LITERAL_CHAR +UNQUOTED_LITERAL_BASE_CHAR QUOTED_CHAR WHITESPACE ESCAPED_WHITESPACE @@ -68,4 +74,4 @@ mode names: DEFAULT_MODE atn: -[4, 0, 13, 181, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 79, 8, 5, 1, 6, 1, 6, 1, 7, 1, 7, 1, 8, 1, 8, 1, 9, 1, 9, 1, 10, 5, 10, 90, 8, 10, 10, 10, 12, 10, 93, 9, 10, 1, 10, 4, 10, 96, 8, 10, 11, 10, 12, 10, 97, 1, 10, 5, 10, 101, 8, 10, 10, 10, 12, 10, 104, 9, 10, 1, 11, 1, 11, 5, 11, 108, 8, 11, 10, 11, 12, 11, 111, 9, 11, 1, 11, 1, 11, 1, 12, 4, 12, 116, 8, 12, 11, 12, 12, 12, 117, 1, 13, 1, 13, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 3, 18, 139, 8, 18, 1, 18, 1, 18, 1, 18, 1, 18, 3, 18, 145, 8, 18, 1, 19, 1, 19, 1, 19, 1, 19, 3, 19, 151, 8, 19, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 3, 21, 161, 8, 21, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 0, 0, 28, 1, 1, 3, 2, 5, 3, 7, 4, 9, 5, 11, 6, 13, 7, 15, 8, 17, 9, 19, 10, 21, 11, 23, 12, 25, 13, 27, 0, 29, 0, 31, 0, 33, 0, 35, 0, 37, 0, 39, 0, 41, 0, 43, 0, 45, 0, 47, 0, 49, 0, 51, 0, 53, 0, 55, 0, 1, 0, 11, 2, 0, 65, 65, 97, 97, 2, 0, 78, 78, 110, 110, 2, 0, 68, 68, 100, 100, 2, 0, 79, 79, 111, 111, 2, 0, 82, 82, 114, 114, 2, 0, 84, 84, 116, 116, 1, 0, 34, 34, 4, 0, 9, 10, 13, 13, 32, 32, 12288, 12288, 9, 0, 32, 32, 34, 34, 40, 42, 58, 58, 60, 60, 62, 62, 92, 92, 123, 123, 125, 125, 2, 0, 85, 85, 117, 117, 3, 0, 48, 57, 65, 70, 97, 102, 185, 0, 1, 1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, 1, 0, 0, 0, 0, 9, 1, 0, 0, 0, 0, 11, 1, 0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0, 0, 25, 1, 0, 0, 0, 1, 57, 1, 0, 0, 0, 3, 61, 1, 0, 0, 0, 5, 65, 1, 0, 0, 0, 7, 68, 1, 0, 0, 0, 9, 72, 1, 0, 0, 0, 11, 78, 1, 0, 0, 0, 13, 80, 1, 0, 0, 0, 15, 82, 1, 0, 0, 0, 17, 84, 1, 0, 0, 0, 19, 86, 1, 0, 0, 0, 21, 91, 1, 0, 0, 0, 23, 105, 1, 0, 0, 0, 25, 115, 1, 0, 0, 0, 27, 119, 1, 0, 0, 0, 29, 121, 1, 0, 0, 0, 31, 123, 1, 0, 0, 0, 33, 126, 1, 0, 0, 0, 35, 128, 1, 0, 0, 0, 37, 144, 1, 0, 0, 0, 39, 150, 1, 0, 0, 0, 41, 152, 1, 0, 0, 0, 43, 160, 1, 0, 0, 0, 45, 162, 1, 0, 0, 0, 47, 164, 1, 0, 0, 0, 49, 167, 1, 0, 0, 0, 51, 170, 1, 0, 0, 0, 53, 173, 1, 0, 0, 0, 55, 179, 1, 0, 0, 0, 57, 58, 3, 41, 20, 0, 58, 59, 1, 0, 0, 0, 59, 60, 6, 0, 0, 0, 60, 2, 1, 0, 0, 0, 61, 62, 7, 0, 0, 0, 62, 63, 7, 1, 0, 0, 63, 64, 7, 2, 0, 0, 64, 4, 1, 0, 0, 0, 65, 66, 7, 3, 0, 0, 66, 67, 7, 4, 0, 0, 67, 6, 1, 0, 0, 0, 68, 69, 7, 1, 0, 0, 69, 70, 7, 3, 0, 0, 70, 71, 7, 5, 0, 0, 71, 8, 1, 0, 0, 0, 72, 73, 5, 58, 0, 0, 73, 10, 1, 0, 0, 0, 74, 79, 3, 29, 14, 0, 75, 79, 3, 33, 16, 0, 76, 79, 3, 31, 15, 0, 77, 79, 3, 35, 17, 0, 78, 74, 1, 0, 0, 0, 78, 75, 1, 0, 0, 0, 78, 76, 1, 0, 0, 0, 78, 77, 1, 0, 0, 0, 79, 12, 1, 0, 0, 0, 80, 81, 5, 40, 0, 0, 81, 14, 1, 0, 0, 0, 82, 83, 5, 41, 0, 0, 83, 16, 1, 0, 0, 0, 84, 85, 5, 123, 0, 0, 85, 18, 1, 0, 0, 0, 86, 87, 5, 125, 0, 0, 87, 20, 1, 0, 0, 0, 88, 90, 3, 25, 12, 0, 89, 88, 1, 0, 0, 0, 90, 93, 1, 0, 0, 0, 91, 89, 1, 0, 0, 0, 91, 92, 1, 0, 0, 0, 92, 95, 1, 0, 0, 0, 93, 91, 1, 0, 0, 0, 94, 96, 3, 37, 18, 0, 95, 94, 1, 0, 0, 0, 96, 97, 1, 0, 0, 0, 97, 95, 1, 0, 0, 0, 97, 98, 1, 0, 0, 0, 98, 102, 1, 0, 0, 0, 99, 101, 3, 25, 12, 0, 100, 99, 1, 0, 0, 0, 101, 104, 1, 0, 0, 0, 102, 100, 1, 0, 0, 0, 102, 103, 1, 0, 0, 0, 103, 22, 1, 0, 0, 0, 104, 102, 1, 0, 0, 0, 105, 109, 5, 34, 0, 0, 106, 108, 3, 39, 19, 0, 107, 106, 1, 0, 0, 0, 108, 111, 1, 0, 0, 0, 109, 107, 1, 0, 0, 0, 109, 110, 1, 0, 0, 0, 110, 112, 1, 0, 0, 0, 111, 109, 1, 0, 0, 0, 112, 113, 5, 34, 0, 0, 113, 24, 1, 0, 0, 0, 114, 116, 3, 27, 13, 0, 115, 114, 1, 0, 0, 0, 116, 117, 1, 0, 0, 0, 117, 115, 1, 0, 0, 0, 117, 118, 1, 0, 0, 0, 118, 26, 1, 0, 0, 0, 119, 120, 5, 42, 0, 0, 120, 28, 1, 0, 0, 0, 121, 122, 5, 60, 0, 0, 122, 30, 1, 0, 0, 0, 123, 124, 5, 60, 0, 0, 124, 125, 5, 61, 0, 0, 125, 32, 1, 0, 0, 0, 126, 127, 5, 62, 0, 0, 127, 34, 1, 0, 0, 0, 128, 129, 5, 62, 0, 0, 129, 130, 5, 61, 0, 0, 130, 36, 1, 0, 0, 0, 131, 145, 3, 43, 21, 0, 132, 145, 3, 47, 23, 0, 133, 145, 3, 51, 25, 0, 134, 138, 5, 92, 0, 0, 135, 139, 3, 3, 1, 0, 136, 139, 3, 5, 2, 0, 137, 139, 3, 7, 3, 0, 138, 135, 1, 0, 0, 0, 138, 136, 1, 0, 0, 0, 138, 137, 1, 0, 0, 0, 139, 145, 1, 0, 0, 0, 140, 141, 3, 27, 13, 0, 141, 142, 3, 37, 18, 0, 142, 145, 1, 0, 0, 0, 143, 145, 3, 45, 22, 0, 144, 131, 1, 0, 0, 0, 144, 132, 1, 0, 0, 0, 144, 133, 1, 0, 0, 0, 144, 134, 1, 0, 0, 0, 144, 140, 1, 0, 0, 0, 144, 143, 1, 0, 0, 0, 145, 38, 1, 0, 0, 0, 146, 151, 3, 43, 21, 0, 147, 151, 3, 51, 25, 0, 148, 151, 3, 49, 24, 0, 149, 151, 8, 6, 0, 0, 150, 146, 1, 0, 0, 0, 150, 147, 1, 0, 0, 0, 150, 148, 1, 0, 0, 0, 150, 149, 1, 0, 0, 0, 151, 40, 1, 0, 0, 0, 152, 153, 7, 7, 0, 0, 153, 42, 1, 0, 0, 0, 154, 155, 5, 92, 0, 0, 155, 161, 7, 4, 0, 0, 156, 157, 5, 92, 0, 0, 157, 161, 7, 5, 0, 0, 158, 159, 5, 92, 0, 0, 159, 161, 7, 1, 0, 0, 160, 154, 1, 0, 0, 0, 160, 156, 1, 0, 0, 0, 160, 158, 1, 0, 0, 0, 161, 44, 1, 0, 0, 0, 162, 163, 8, 8, 0, 0, 163, 46, 1, 0, 0, 0, 164, 165, 5, 92, 0, 0, 165, 166, 7, 8, 0, 0, 166, 48, 1, 0, 0, 0, 167, 168, 5, 92, 0, 0, 168, 169, 5, 34, 0, 0, 169, 50, 1, 0, 0, 0, 170, 171, 5, 92, 0, 0, 171, 172, 3, 53, 26, 0, 172, 52, 1, 0, 0, 0, 173, 174, 7, 9, 0, 0, 174, 175, 3, 55, 27, 0, 175, 176, 3, 55, 27, 0, 176, 177, 3, 55, 27, 0, 177, 178, 3, 55, 27, 0, 178, 54, 1, 0, 0, 0, 179, 180, 7, 10, 0, 0, 180, 56, 1, 0, 0, 0, 11, 0, 78, 91, 97, 102, 109, 117, 138, 144, 150, 160, 1, 6, 0, 0] \ No newline at end of file +[4, 0, 16, 178, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 10, 1, 10, 1, 11, 1, 11, 1, 12, 1, 12, 1, 13, 4, 13, 94, 8, 13, 11, 13, 12, 13, 95, 1, 14, 1, 14, 5, 14, 100, 8, 14, 10, 14, 12, 14, 103, 9, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 16, 1, 16, 1, 17, 5, 17, 112, 8, 17, 10, 17, 12, 17, 115, 9, 17, 1, 17, 1, 17, 5, 17, 119, 8, 17, 10, 17, 12, 17, 122, 9, 17, 1, 17, 1, 17, 4, 17, 126, 8, 17, 11, 17, 12, 17, 127, 3, 17, 130, 8, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 3, 18, 139, 8, 18, 1, 18, 3, 18, 142, 8, 18, 1, 19, 1, 19, 1, 19, 1, 19, 3, 19, 148, 8, 19, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 3, 21, 158, 8, 21, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 0, 0, 28, 1, 1, 3, 2, 5, 3, 7, 4, 9, 5, 11, 6, 13, 7, 15, 8, 17, 9, 19, 10, 21, 11, 23, 12, 25, 13, 27, 14, 29, 15, 31, 16, 33, 0, 35, 0, 37, 0, 39, 0, 41, 0, 43, 0, 45, 0, 47, 0, 49, 0, 51, 0, 53, 0, 55, 0, 1, 0, 12, 2, 0, 65, 65, 97, 97, 2, 0, 78, 78, 110, 110, 2, 0, 68, 68, 100, 100, 2, 0, 79, 79, 111, 111, 2, 0, 82, 82, 114, 114, 2, 0, 84, 84, 116, 116, 1, 0, 34, 34, 4, 0, 9, 10, 13, 13, 32, 32, 12288, 12288, 12, 0, 9, 10, 13, 13, 32, 32, 34, 34, 40, 42, 58, 58, 60, 60, 62, 62, 92, 92, 123, 123, 125, 125, 12288, 12288, 9, 0, 32, 32, 34, 34, 40, 42, 58, 58, 60, 60, 62, 62, 92, 92, 123, 123, 125, 125, 2, 0, 85, 85, 117, 117, 3, 0, 48, 57, 65, 70, 97, 102, 182, 0, 1, 1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, 1, 0, 0, 0, 0, 9, 1, 0, 0, 0, 0, 11, 1, 0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0, 0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0, 0, 1, 57, 1, 0, 0, 0, 3, 61, 1, 0, 0, 0, 5, 65, 1, 0, 0, 0, 7, 68, 1, 0, 0, 0, 9, 72, 1, 0, 0, 0, 11, 74, 1, 0, 0, 0, 13, 76, 1, 0, 0, 0, 15, 79, 1, 0, 0, 0, 17, 81, 1, 0, 0, 0, 19, 84, 1, 0, 0, 0, 21, 86, 1, 0, 0, 0, 23, 88, 1, 0, 0, 0, 25, 90, 1, 0, 0, 0, 27, 93, 1, 0, 0, 0, 29, 97, 1, 0, 0, 0, 31, 106, 1, 0, 0, 0, 33, 108, 1, 0, 0, 0, 35, 129, 1, 0, 0, 0, 37, 141, 1, 0, 0, 0, 39, 147, 1, 0, 0, 0, 41, 149, 1, 0, 0, 0, 43, 157, 1, 0, 0, 0, 45, 159, 1, 0, 0, 0, 47, 161, 1, 0, 0, 0, 49, 164, 1, 0, 0, 0, 51, 167, 1, 0, 0, 0, 53, 170, 1, 0, 0, 0, 55, 176, 1, 0, 0, 0, 57, 58, 3, 41, 20, 0, 58, 59, 1, 0, 0, 0, 59, 60, 6, 0, 0, 0, 60, 2, 1, 0, 0, 0, 61, 62, 7, 0, 0, 0, 62, 63, 7, 1, 0, 0, 63, 64, 7, 2, 0, 0, 64, 4, 1, 0, 0, 0, 65, 66, 7, 3, 0, 0, 66, 67, 7, 4, 0, 0, 67, 6, 1, 0, 0, 0, 68, 69, 7, 1, 0, 0, 69, 70, 7, 3, 0, 0, 70, 71, 7, 5, 0, 0, 71, 8, 1, 0, 0, 0, 72, 73, 5, 58, 0, 0, 73, 10, 1, 0, 0, 0, 74, 75, 5, 60, 0, 0, 75, 12, 1, 0, 0, 0, 76, 77, 5, 60, 0, 0, 77, 78, 5, 61, 0, 0, 78, 14, 1, 0, 0, 0, 79, 80, 5, 62, 0, 0, 80, 16, 1, 0, 0, 0, 81, 82, 5, 62, 0, 0, 82, 83, 5, 61, 0, 0, 83, 18, 1, 0, 0, 0, 84, 85, 5, 40, 0, 0, 85, 20, 1, 0, 0, 0, 86, 87, 5, 41, 0, 0, 87, 22, 1, 0, 0, 0, 88, 89, 5, 123, 0, 0, 89, 24, 1, 0, 0, 0, 90, 91, 5, 125, 0, 0, 91, 26, 1, 0, 0, 0, 92, 94, 3, 35, 17, 0, 93, 92, 1, 0, 0, 0, 94, 95, 1, 0, 0, 0, 95, 93, 1, 0, 0, 0, 95, 96, 1, 0, 0, 0, 96, 28, 1, 0, 0, 0, 97, 101, 5, 34, 0, 0, 98, 100, 3, 39, 19, 0, 99, 98, 1, 0, 0, 0, 100, 103, 1, 0, 0, 0, 101, 99, 1, 0, 0, 0, 101, 102, 1, 0, 0, 0, 102, 104, 1, 0, 0, 0, 103, 101, 1, 0, 0, 0, 104, 105, 5, 34, 0, 0, 105, 30, 1, 0, 0, 0, 106, 107, 3, 33, 16, 0, 107, 32, 1, 0, 0, 0, 108, 109, 5, 42, 0, 0, 109, 34, 1, 0, 0, 0, 110, 112, 3, 33, 16, 0, 111, 110, 1, 0, 0, 0, 112, 115, 1, 0, 0, 0, 113, 111, 1, 0, 0, 0, 113, 114, 1, 0, 0, 0, 114, 116, 1, 0, 0, 0, 115, 113, 1, 0, 0, 0, 116, 120, 3, 37, 18, 0, 117, 119, 3, 33, 16, 0, 118, 117, 1, 0, 0, 0, 119, 122, 1, 0, 0, 0, 120, 118, 1, 0, 0, 0, 120, 121, 1, 0, 0, 0, 121, 130, 1, 0, 0, 0, 122, 120, 1, 0, 0, 0, 123, 125, 3, 33, 16, 0, 124, 126, 3, 33, 16, 0, 125, 124, 1, 0, 0, 0, 126, 127, 1, 0, 0, 0, 127, 125, 1, 0, 0, 0, 127, 128, 1, 0, 0, 0, 128, 130, 1, 0, 0, 0, 129, 113, 1, 0, 0, 0, 129, 123, 1, 0, 0, 0, 130, 36, 1, 0, 0, 0, 131, 142, 3, 43, 21, 0, 132, 142, 3, 47, 23, 0, 133, 142, 3, 51, 25, 0, 134, 138, 5, 92, 0, 0, 135, 139, 3, 3, 1, 0, 136, 139, 3, 5, 2, 0, 137, 139, 3, 7, 3, 0, 138, 135, 1, 0, 0, 0, 138, 136, 1, 0, 0, 0, 138, 137, 1, 0, 0, 0, 139, 142, 1, 0, 0, 0, 140, 142, 3, 45, 22, 0, 141, 131, 1, 0, 0, 0, 141, 132, 1, 0, 0, 0, 141, 133, 1, 0, 0, 0, 141, 134, 1, 0, 0, 0, 141, 140, 1, 0, 0, 0, 142, 38, 1, 0, 0, 0, 143, 148, 3, 43, 21, 0, 144, 148, 3, 51, 25, 0, 145, 148, 3, 49, 24, 0, 146, 148, 8, 6, 0, 0, 147, 143, 1, 0, 0, 0, 147, 144, 1, 0, 0, 0, 147, 145, 1, 0, 0, 0, 147, 146, 1, 0, 0, 0, 148, 40, 1, 0, 0, 0, 149, 150, 7, 7, 0, 0, 150, 42, 1, 0, 0, 0, 151, 152, 5, 92, 0, 0, 152, 158, 7, 4, 0, 0, 153, 154, 5, 92, 0, 0, 154, 158, 7, 5, 0, 0, 155, 156, 5, 92, 0, 0, 156, 158, 7, 1, 0, 0, 157, 151, 1, 0, 0, 0, 157, 153, 1, 0, 0, 0, 157, 155, 1, 0, 0, 0, 158, 44, 1, 0, 0, 0, 159, 160, 8, 8, 0, 0, 160, 46, 1, 0, 0, 0, 161, 162, 5, 92, 0, 0, 162, 163, 7, 9, 0, 0, 163, 48, 1, 0, 0, 0, 164, 165, 5, 92, 0, 0, 165, 166, 5, 34, 0, 0, 166, 50, 1, 0, 0, 0, 167, 168, 5, 92, 0, 0, 168, 169, 3, 53, 26, 0, 169, 52, 1, 0, 0, 0, 170, 171, 7, 10, 0, 0, 171, 172, 3, 55, 27, 0, 172, 173, 3, 55, 27, 0, 173, 174, 3, 55, 27, 0, 174, 175, 3, 55, 27, 0, 175, 54, 1, 0, 0, 0, 176, 177, 7, 11, 0, 0, 177, 56, 1, 0, 0, 0, 11, 0, 95, 101, 113, 120, 127, 129, 138, 141, 147, 157, 1, 6, 0, 0] \ No newline at end of file diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseLexer.java b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseLexer.java index b397a412d5e8e..f9353afd6e114 100644 --- a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseLexer.java +++ b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseLexer.java @@ -25,9 +25,9 @@ class KqlBaseLexer extends Lexer { protected static final PredictionContextCache _sharedContextCache = new PredictionContextCache(); public static final int - DEFAULT_SKIP=1, AND=2, OR=3, NOT=4, COLON=5, OP_COMPARE=6, LEFT_PARENTHESIS=7, - RIGHT_PARENTHESIS=8, LEFT_CURLY_BRACKET=9, RIGHT_CURLY_BRACKET=10, UNQUOTED_LITERAL=11, - QUOTED_STRING=12, WILDCARD=13; + DEFAULT_SKIP=1, AND=2, OR=3, NOT=4, COLON=5, OP_LESS=6, OP_LESS_EQ=7, + OP_MORE=8, OP_MORE_EQ=9, LEFT_PARENTHESIS=10, RIGHT_PARENTHESIS=11, LEFT_CURLY_BRACKET=12, + RIGHT_CURLY_BRACKET=13, UNQUOTED_LITERAL=14, QUOTED_STRING=15, WILDCARD=16; public static String[] channelNames = { "DEFAULT_TOKEN_CHANNEL", "HIDDEN" }; @@ -38,28 +38,29 @@ class KqlBaseLexer extends Lexer { private static String[] makeRuleNames() { return new String[] { - "DEFAULT_SKIP", "AND", "OR", "NOT", "COLON", "OP_COMPARE", "LEFT_PARENTHESIS", - "RIGHT_PARENTHESIS", "LEFT_CURLY_BRACKET", "RIGHT_CURLY_BRACKET", "UNQUOTED_LITERAL", - "QUOTED_STRING", "WILDCARD", "WILDCARD_CHAR", "OP_LESS", "OP_LESS_EQ", - "OP_MORE", "OP_MORE_EQ", "UNQUOTED_LITERAL_CHAR", "QUOTED_CHAR", "WHITESPACE", - "ESCAPED_WHITESPACE", "NON_SPECIAL_CHAR", "ESCAPED_SPECIAL_CHAR", "ESCAPED_QUOTE", - "ESCAPE_UNICODE_SEQUENCE", "UNICODE_SEQUENCE", "HEX_DIGIT" + "DEFAULT_SKIP", "AND", "OR", "NOT", "COLON", "OP_LESS", "OP_LESS_EQ", + "OP_MORE", "OP_MORE_EQ", "LEFT_PARENTHESIS", "RIGHT_PARENTHESIS", "LEFT_CURLY_BRACKET", + "RIGHT_CURLY_BRACKET", "UNQUOTED_LITERAL", "QUOTED_STRING", "WILDCARD", + "WILDCARD_CHAR", "UNQUOTED_LITERAL_CHAR", "UNQUOTED_LITERAL_BASE_CHAR", + "QUOTED_CHAR", "WHITESPACE", "ESCAPED_WHITESPACE", "NON_SPECIAL_CHAR", + "ESCAPED_SPECIAL_CHAR", "ESCAPED_QUOTE", "ESCAPE_UNICODE_SEQUENCE", "UNICODE_SEQUENCE", + "HEX_DIGIT" }; } public static final String[] ruleNames = makeRuleNames(); private static String[] makeLiteralNames() { return new String[] { - null, null, "'and'", "'or'", "'not'", "':'", null, "'('", "')'", "'{'", - "'}'" + null, null, "'and'", "'or'", "'not'", "':'", "'<'", "'<='", "'>'", "'>='", + "'('", "')'", "'{'", "'}'" }; } private static final String[] _LITERAL_NAMES = makeLiteralNames(); private static String[] makeSymbolicNames() { return new String[] { - null, "DEFAULT_SKIP", "AND", "OR", "NOT", "COLON", "OP_COMPARE", "LEFT_PARENTHESIS", - "RIGHT_PARENTHESIS", "LEFT_CURLY_BRACKET", "RIGHT_CURLY_BRACKET", "UNQUOTED_LITERAL", - "QUOTED_STRING", "WILDCARD" + null, "DEFAULT_SKIP", "AND", "OR", "NOT", "COLON", "OP_LESS", "OP_LESS_EQ", + "OP_MORE", "OP_MORE_EQ", "LEFT_PARENTHESIS", "RIGHT_PARENTHESIS", "LEFT_CURLY_BRACKET", + "RIGHT_CURLY_BRACKET", "UNQUOTED_LITERAL", "QUOTED_STRING", "WILDCARD" }; } private static final String[] _SYMBOLIC_NAMES = makeSymbolicNames(); @@ -121,119 +122,119 @@ public KqlBaseLexer(CharStream input) { public ATN getATN() { return _ATN; } public static final String _serializedATN = - "\u0004\u0000\r\u00b5\u0006\uffff\uffff\u0002\u0000\u0007\u0000\u0002\u0001"+ - "\u0007\u0001\u0002\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004"+ - "\u0007\u0004\u0002\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007"+ - "\u0007\u0007\u0002\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b"+ - "\u0007\u000b\u0002\f\u0007\f\u0002\r\u0007\r\u0002\u000e\u0007\u000e\u0002"+ - "\u000f\u0007\u000f\u0002\u0010\u0007\u0010\u0002\u0011\u0007\u0011\u0002"+ - "\u0012\u0007\u0012\u0002\u0013\u0007\u0013\u0002\u0014\u0007\u0014\u0002"+ - "\u0015\u0007\u0015\u0002\u0016\u0007\u0016\u0002\u0017\u0007\u0017\u0002"+ - "\u0018\u0007\u0018\u0002\u0019\u0007\u0019\u0002\u001a\u0007\u001a\u0002"+ - "\u001b\u0007\u001b\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001"+ - "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0002\u0001\u0002\u0001"+ - "\u0002\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0004\u0001"+ - "\u0004\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003\u0005O\b"+ - "\u0005\u0001\u0006\u0001\u0006\u0001\u0007\u0001\u0007\u0001\b\u0001\b"+ - "\u0001\t\u0001\t\u0001\n\u0005\nZ\b\n\n\n\f\n]\t\n\u0001\n\u0004\n`\b"+ - "\n\u000b\n\f\na\u0001\n\u0005\ne\b\n\n\n\f\nh\t\n\u0001\u000b\u0001\u000b"+ - "\u0005\u000bl\b\u000b\n\u000b\f\u000bo\t\u000b\u0001\u000b\u0001\u000b"+ - "\u0001\f\u0004\ft\b\f\u000b\f\f\fu\u0001\r\u0001\r\u0001\u000e\u0001\u000e"+ - "\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u0010\u0001\u0010\u0001\u0011"+ - "\u0001\u0011\u0001\u0011\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012"+ - "\u0001\u0012\u0001\u0012\u0001\u0012\u0003\u0012\u008b\b\u0012\u0001\u0012"+ - "\u0001\u0012\u0001\u0012\u0001\u0012\u0003\u0012\u0091\b\u0012\u0001\u0013"+ - "\u0001\u0013\u0001\u0013\u0001\u0013\u0003\u0013\u0097\b\u0013\u0001\u0014"+ - "\u0001\u0014\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015"+ - "\u0001\u0015\u0003\u0015\u00a1\b\u0015\u0001\u0016\u0001\u0016\u0001\u0017"+ - "\u0001\u0017\u0001\u0017\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0019"+ - "\u0001\u0019\u0001\u0019\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a"+ - "\u0001\u001a\u0001\u001a\u0001\u001b\u0001\u001b\u0000\u0000\u001c\u0001"+ - "\u0001\u0003\u0002\u0005\u0003\u0007\u0004\t\u0005\u000b\u0006\r\u0007"+ - "\u000f\b\u0011\t\u0013\n\u0015\u000b\u0017\f\u0019\r\u001b\u0000\u001d"+ - "\u0000\u001f\u0000!\u0000#\u0000%\u0000\'\u0000)\u0000+\u0000-\u0000/"+ - "\u00001\u00003\u00005\u00007\u0000\u0001\u0000\u000b\u0002\u0000AAaa\u0002"+ - "\u0000NNnn\u0002\u0000DDdd\u0002\u0000OOoo\u0002\u0000RRrr\u0002\u0000"+ - "TTtt\u0001\u0000\"\"\u0004\u0000\t\n\r\r \u3000\u3000\t\u0000 \"\"("+ - "*::<<>>\\\\{{}}\u0002\u0000UUuu\u0003\u000009AFaf\u00b9\u0000\u0001\u0001"+ - "\u0000\u0000\u0000\u0000\u0003\u0001\u0000\u0000\u0000\u0000\u0005\u0001"+ - "\u0000\u0000\u0000\u0000\u0007\u0001\u0000\u0000\u0000\u0000\t\u0001\u0000"+ - "\u0000\u0000\u0000\u000b\u0001\u0000\u0000\u0000\u0000\r\u0001\u0000\u0000"+ - "\u0000\u0000\u000f\u0001\u0000\u0000\u0000\u0000\u0011\u0001\u0000\u0000"+ - "\u0000\u0000\u0013\u0001\u0000\u0000\u0000\u0000\u0015\u0001\u0000\u0000"+ - "\u0000\u0000\u0017\u0001\u0000\u0000\u0000\u0000\u0019\u0001\u0000\u0000"+ - "\u0000\u00019\u0001\u0000\u0000\u0000\u0003=\u0001\u0000\u0000\u0000\u0005"+ - "A\u0001\u0000\u0000\u0000\u0007D\u0001\u0000\u0000\u0000\tH\u0001\u0000"+ - "\u0000\u0000\u000bN\u0001\u0000\u0000\u0000\rP\u0001\u0000\u0000\u0000"+ - "\u000fR\u0001\u0000\u0000\u0000\u0011T\u0001\u0000\u0000\u0000\u0013V"+ - "\u0001\u0000\u0000\u0000\u0015[\u0001\u0000\u0000\u0000\u0017i\u0001\u0000"+ - "\u0000\u0000\u0019s\u0001\u0000\u0000\u0000\u001bw\u0001\u0000\u0000\u0000"+ - "\u001dy\u0001\u0000\u0000\u0000\u001f{\u0001\u0000\u0000\u0000!~\u0001"+ - "\u0000\u0000\u0000#\u0080\u0001\u0000\u0000\u0000%\u0090\u0001\u0000\u0000"+ - "\u0000\'\u0096\u0001\u0000\u0000\u0000)\u0098\u0001\u0000\u0000\u0000"+ - "+\u00a0\u0001\u0000\u0000\u0000-\u00a2\u0001\u0000\u0000\u0000/\u00a4"+ - "\u0001\u0000\u0000\u00001\u00a7\u0001\u0000\u0000\u00003\u00aa\u0001\u0000"+ - "\u0000\u00005\u00ad\u0001\u0000\u0000\u00007\u00b3\u0001\u0000\u0000\u0000"+ - "9:\u0003)\u0014\u0000:;\u0001\u0000\u0000\u0000;<\u0006\u0000\u0000\u0000"+ - "<\u0002\u0001\u0000\u0000\u0000=>\u0007\u0000\u0000\u0000>?\u0007\u0001"+ - "\u0000\u0000?@\u0007\u0002\u0000\u0000@\u0004\u0001\u0000\u0000\u0000"+ - "AB\u0007\u0003\u0000\u0000BC\u0007\u0004\u0000\u0000C\u0006\u0001\u0000"+ - "\u0000\u0000DE\u0007\u0001\u0000\u0000EF\u0007\u0003\u0000\u0000FG\u0007"+ - "\u0005\u0000\u0000G\b\u0001\u0000\u0000\u0000HI\u0005:\u0000\u0000I\n"+ - "\u0001\u0000\u0000\u0000JO\u0003\u001d\u000e\u0000KO\u0003!\u0010\u0000"+ - "LO\u0003\u001f\u000f\u0000MO\u0003#\u0011\u0000NJ\u0001\u0000\u0000\u0000"+ - "NK\u0001\u0000\u0000\u0000NL\u0001\u0000\u0000\u0000NM\u0001\u0000\u0000"+ - "\u0000O\f\u0001\u0000\u0000\u0000PQ\u0005(\u0000\u0000Q\u000e\u0001\u0000"+ - "\u0000\u0000RS\u0005)\u0000\u0000S\u0010\u0001\u0000\u0000\u0000TU\u0005"+ - "{\u0000\u0000U\u0012\u0001\u0000\u0000\u0000VW\u0005}\u0000\u0000W\u0014"+ - "\u0001\u0000\u0000\u0000XZ\u0003\u0019\f\u0000YX\u0001\u0000\u0000\u0000"+ - "Z]\u0001\u0000\u0000\u0000[Y\u0001\u0000\u0000\u0000[\\\u0001\u0000\u0000"+ - "\u0000\\_\u0001\u0000\u0000\u0000][\u0001\u0000\u0000\u0000^`\u0003%\u0012"+ - "\u0000_^\u0001\u0000\u0000\u0000`a\u0001\u0000\u0000\u0000a_\u0001\u0000"+ - "\u0000\u0000ab\u0001\u0000\u0000\u0000bf\u0001\u0000\u0000\u0000ce\u0003"+ - "\u0019\f\u0000dc\u0001\u0000\u0000\u0000eh\u0001\u0000\u0000\u0000fd\u0001"+ - "\u0000\u0000\u0000fg\u0001\u0000\u0000\u0000g\u0016\u0001\u0000\u0000"+ - "\u0000hf\u0001\u0000\u0000\u0000im\u0005\"\u0000\u0000jl\u0003\'\u0013"+ - "\u0000kj\u0001\u0000\u0000\u0000lo\u0001\u0000\u0000\u0000mk\u0001\u0000"+ - "\u0000\u0000mn\u0001\u0000\u0000\u0000np\u0001\u0000\u0000\u0000om\u0001"+ - "\u0000\u0000\u0000pq\u0005\"\u0000\u0000q\u0018\u0001\u0000\u0000\u0000"+ - "rt\u0003\u001b\r\u0000sr\u0001\u0000\u0000\u0000tu\u0001\u0000\u0000\u0000"+ - "us\u0001\u0000\u0000\u0000uv\u0001\u0000\u0000\u0000v\u001a\u0001\u0000"+ - "\u0000\u0000wx\u0005*\u0000\u0000x\u001c\u0001\u0000\u0000\u0000yz\u0005"+ - "<\u0000\u0000z\u001e\u0001\u0000\u0000\u0000{|\u0005<\u0000\u0000|}\u0005"+ - "=\u0000\u0000} \u0001\u0000\u0000\u0000~\u007f\u0005>\u0000\u0000\u007f"+ - "\"\u0001\u0000\u0000\u0000\u0080\u0081\u0005>\u0000\u0000\u0081\u0082"+ - "\u0005=\u0000\u0000\u0082$\u0001\u0000\u0000\u0000\u0083\u0091\u0003+"+ - "\u0015\u0000\u0084\u0091\u0003/\u0017\u0000\u0085\u0091\u00033\u0019\u0000"+ - "\u0086\u008a\u0005\\\u0000\u0000\u0087\u008b\u0003\u0003\u0001\u0000\u0088"+ - "\u008b\u0003\u0005\u0002\u0000\u0089\u008b\u0003\u0007\u0003\u0000\u008a"+ - "\u0087\u0001\u0000\u0000\u0000\u008a\u0088\u0001\u0000\u0000\u0000\u008a"+ - "\u0089\u0001\u0000\u0000\u0000\u008b\u0091\u0001\u0000\u0000\u0000\u008c"+ - "\u008d\u0003\u001b\r\u0000\u008d\u008e\u0003%\u0012\u0000\u008e\u0091"+ - "\u0001\u0000\u0000\u0000\u008f\u0091\u0003-\u0016\u0000\u0090\u0083\u0001"+ - "\u0000\u0000\u0000\u0090\u0084\u0001\u0000\u0000\u0000\u0090\u0085\u0001"+ - "\u0000\u0000\u0000\u0090\u0086\u0001\u0000\u0000\u0000\u0090\u008c\u0001"+ - "\u0000\u0000\u0000\u0090\u008f\u0001\u0000\u0000\u0000\u0091&\u0001\u0000"+ - "\u0000\u0000\u0092\u0097\u0003+\u0015\u0000\u0093\u0097\u00033\u0019\u0000"+ - "\u0094\u0097\u00031\u0018\u0000\u0095\u0097\b\u0006\u0000\u0000\u0096"+ - "\u0092\u0001\u0000\u0000\u0000\u0096\u0093\u0001\u0000\u0000\u0000\u0096"+ - "\u0094\u0001\u0000\u0000\u0000\u0096\u0095\u0001\u0000\u0000\u0000\u0097"+ - "(\u0001\u0000\u0000\u0000\u0098\u0099\u0007\u0007\u0000\u0000\u0099*\u0001"+ - "\u0000\u0000\u0000\u009a\u009b\u0005\\\u0000\u0000\u009b\u00a1\u0007\u0004"+ - "\u0000\u0000\u009c\u009d\u0005\\\u0000\u0000\u009d\u00a1\u0007\u0005\u0000"+ - "\u0000\u009e\u009f\u0005\\\u0000\u0000\u009f\u00a1\u0007\u0001\u0000\u0000"+ - "\u00a0\u009a\u0001\u0000\u0000\u0000\u00a0\u009c\u0001\u0000\u0000\u0000"+ - "\u00a0\u009e\u0001\u0000\u0000\u0000\u00a1,\u0001\u0000\u0000\u0000\u00a2"+ - "\u00a3\b\b\u0000\u0000\u00a3.\u0001\u0000\u0000\u0000\u00a4\u00a5\u0005"+ - "\\\u0000\u0000\u00a5\u00a6\u0007\b\u0000\u0000\u00a60\u0001\u0000\u0000"+ - "\u0000\u00a7\u00a8\u0005\\\u0000\u0000\u00a8\u00a9\u0005\"\u0000\u0000"+ - "\u00a92\u0001\u0000\u0000\u0000\u00aa\u00ab\u0005\\\u0000\u0000\u00ab"+ - "\u00ac\u00035\u001a\u0000\u00ac4\u0001\u0000\u0000\u0000\u00ad\u00ae\u0007"+ - "\t\u0000\u0000\u00ae\u00af\u00037\u001b\u0000\u00af\u00b0\u00037\u001b"+ - "\u0000\u00b0\u00b1\u00037\u001b\u0000\u00b1\u00b2\u00037\u001b\u0000\u00b2"+ - "6\u0001\u0000\u0000\u0000\u00b3\u00b4\u0007\n\u0000\u0000\u00b48\u0001"+ - "\u0000\u0000\u0000\u000b\u0000N[afmu\u008a\u0090\u0096\u00a0\u0001\u0006"+ - "\u0000\u0000"; + "\u0004\u0000\u0010\u00b2\u0006\uffff\uffff\u0002\u0000\u0007\u0000\u0002"+ + "\u0001\u0007\u0001\u0002\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002"+ + "\u0004\u0007\u0004\u0002\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002"+ + "\u0007\u0007\u0007\u0002\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002"+ + "\u000b\u0007\u000b\u0002\f\u0007\f\u0002\r\u0007\r\u0002\u000e\u0007\u000e"+ + "\u0002\u000f\u0007\u000f\u0002\u0010\u0007\u0010\u0002\u0011\u0007\u0011"+ + "\u0002\u0012\u0007\u0012\u0002\u0013\u0007\u0013\u0002\u0014\u0007\u0014"+ + "\u0002\u0015\u0007\u0015\u0002\u0016\u0007\u0016\u0002\u0017\u0007\u0017"+ + "\u0002\u0018\u0007\u0018\u0002\u0019\u0007\u0019\u0002\u001a\u0007\u001a"+ + "\u0002\u001b\u0007\u001b\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000"+ + "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0002\u0001\u0002"+ + "\u0001\u0002\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0004"+ + "\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0006\u0001\u0006\u0001\u0006"+ + "\u0001\u0007\u0001\u0007\u0001\b\u0001\b\u0001\b\u0001\t\u0001\t\u0001"+ + "\n\u0001\n\u0001\u000b\u0001\u000b\u0001\f\u0001\f\u0001\r\u0004\r^\b"+ + "\r\u000b\r\f\r_\u0001\u000e\u0001\u000e\u0005\u000ed\b\u000e\n\u000e\f"+ + "\u000eg\t\u000e\u0001\u000e\u0001\u000e\u0001\u000f\u0001\u000f\u0001"+ + "\u0010\u0001\u0010\u0001\u0011\u0005\u0011p\b\u0011\n\u0011\f\u0011s\t"+ + "\u0011\u0001\u0011\u0001\u0011\u0005\u0011w\b\u0011\n\u0011\f\u0011z\t"+ + "\u0011\u0001\u0011\u0001\u0011\u0004\u0011~\b\u0011\u000b\u0011\f\u0011"+ + "\u007f\u0003\u0011\u0082\b\u0011\u0001\u0012\u0001\u0012\u0001\u0012\u0001"+ + "\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0003\u0012\u008b\b\u0012\u0001"+ + "\u0012\u0003\u0012\u008e\b\u0012\u0001\u0013\u0001\u0013\u0001\u0013\u0001"+ + "\u0013\u0003\u0013\u0094\b\u0013\u0001\u0014\u0001\u0014\u0001\u0015\u0001"+ + "\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0003\u0015\u009e"+ + "\b\u0015\u0001\u0016\u0001\u0016\u0001\u0017\u0001\u0017\u0001\u0017\u0001"+ + "\u0018\u0001\u0018\u0001\u0018\u0001\u0019\u0001\u0019\u0001\u0019\u0001"+ + "\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001"+ + "\u001b\u0001\u001b\u0000\u0000\u001c\u0001\u0001\u0003\u0002\u0005\u0003"+ + "\u0007\u0004\t\u0005\u000b\u0006\r\u0007\u000f\b\u0011\t\u0013\n\u0015"+ + "\u000b\u0017\f\u0019\r\u001b\u000e\u001d\u000f\u001f\u0010!\u0000#\u0000"+ + "%\u0000\'\u0000)\u0000+\u0000-\u0000/\u00001\u00003\u00005\u00007\u0000"+ + "\u0001\u0000\f\u0002\u0000AAaa\u0002\u0000NNnn\u0002\u0000DDdd\u0002\u0000"+ + "OOoo\u0002\u0000RRrr\u0002\u0000TTtt\u0001\u0000\"\"\u0004\u0000\t\n\r"+ + "\r \u3000\u3000\f\u0000\t\n\r\r \"\"(*::<<>>\\\\{{}}\u3000\u3000\t\u0000"+ + " \"\"(*::<<>>\\\\{{}}\u0002\u0000UUuu\u0003\u000009AFaf\u00b6\u0000\u0001"+ + "\u0001\u0000\u0000\u0000\u0000\u0003\u0001\u0000\u0000\u0000\u0000\u0005"+ + "\u0001\u0000\u0000\u0000\u0000\u0007\u0001\u0000\u0000\u0000\u0000\t\u0001"+ + "\u0000\u0000\u0000\u0000\u000b\u0001\u0000\u0000\u0000\u0000\r\u0001\u0000"+ + "\u0000\u0000\u0000\u000f\u0001\u0000\u0000\u0000\u0000\u0011\u0001\u0000"+ + "\u0000\u0000\u0000\u0013\u0001\u0000\u0000\u0000\u0000\u0015\u0001\u0000"+ + "\u0000\u0000\u0000\u0017\u0001\u0000\u0000\u0000\u0000\u0019\u0001\u0000"+ + "\u0000\u0000\u0000\u001b\u0001\u0000\u0000\u0000\u0000\u001d\u0001\u0000"+ + "\u0000\u0000\u0000\u001f\u0001\u0000\u0000\u0000\u00019\u0001\u0000\u0000"+ + "\u0000\u0003=\u0001\u0000\u0000\u0000\u0005A\u0001\u0000\u0000\u0000\u0007"+ + "D\u0001\u0000\u0000\u0000\tH\u0001\u0000\u0000\u0000\u000bJ\u0001\u0000"+ + "\u0000\u0000\rL\u0001\u0000\u0000\u0000\u000fO\u0001\u0000\u0000\u0000"+ + "\u0011Q\u0001\u0000\u0000\u0000\u0013T\u0001\u0000\u0000\u0000\u0015V"+ + "\u0001\u0000\u0000\u0000\u0017X\u0001\u0000\u0000\u0000\u0019Z\u0001\u0000"+ + "\u0000\u0000\u001b]\u0001\u0000\u0000\u0000\u001da\u0001\u0000\u0000\u0000"+ + "\u001fj\u0001\u0000\u0000\u0000!l\u0001\u0000\u0000\u0000#\u0081\u0001"+ + "\u0000\u0000\u0000%\u008d\u0001\u0000\u0000\u0000\'\u0093\u0001\u0000"+ + "\u0000\u0000)\u0095\u0001\u0000\u0000\u0000+\u009d\u0001\u0000\u0000\u0000"+ + "-\u009f\u0001\u0000\u0000\u0000/\u00a1\u0001\u0000\u0000\u00001\u00a4"+ + "\u0001\u0000\u0000\u00003\u00a7\u0001\u0000\u0000\u00005\u00aa\u0001\u0000"+ + "\u0000\u00007\u00b0\u0001\u0000\u0000\u00009:\u0003)\u0014\u0000:;\u0001"+ + "\u0000\u0000\u0000;<\u0006\u0000\u0000\u0000<\u0002\u0001\u0000\u0000"+ + "\u0000=>\u0007\u0000\u0000\u0000>?\u0007\u0001\u0000\u0000?@\u0007\u0002"+ + "\u0000\u0000@\u0004\u0001\u0000\u0000\u0000AB\u0007\u0003\u0000\u0000"+ + "BC\u0007\u0004\u0000\u0000C\u0006\u0001\u0000\u0000\u0000DE\u0007\u0001"+ + "\u0000\u0000EF\u0007\u0003\u0000\u0000FG\u0007\u0005\u0000\u0000G\b\u0001"+ + "\u0000\u0000\u0000HI\u0005:\u0000\u0000I\n\u0001\u0000\u0000\u0000JK\u0005"+ + "<\u0000\u0000K\f\u0001\u0000\u0000\u0000LM\u0005<\u0000\u0000MN\u0005"+ + "=\u0000\u0000N\u000e\u0001\u0000\u0000\u0000OP\u0005>\u0000\u0000P\u0010"+ + "\u0001\u0000\u0000\u0000QR\u0005>\u0000\u0000RS\u0005=\u0000\u0000S\u0012"+ + "\u0001\u0000\u0000\u0000TU\u0005(\u0000\u0000U\u0014\u0001\u0000\u0000"+ + "\u0000VW\u0005)\u0000\u0000W\u0016\u0001\u0000\u0000\u0000XY\u0005{\u0000"+ + "\u0000Y\u0018\u0001\u0000\u0000\u0000Z[\u0005}\u0000\u0000[\u001a\u0001"+ + "\u0000\u0000\u0000\\^\u0003#\u0011\u0000]\\\u0001\u0000\u0000\u0000^_"+ + "\u0001\u0000\u0000\u0000_]\u0001\u0000\u0000\u0000_`\u0001\u0000\u0000"+ + "\u0000`\u001c\u0001\u0000\u0000\u0000ae\u0005\"\u0000\u0000bd\u0003\'"+ + "\u0013\u0000cb\u0001\u0000\u0000\u0000dg\u0001\u0000\u0000\u0000ec\u0001"+ + "\u0000\u0000\u0000ef\u0001\u0000\u0000\u0000fh\u0001\u0000\u0000\u0000"+ + "ge\u0001\u0000\u0000\u0000hi\u0005\"\u0000\u0000i\u001e\u0001\u0000\u0000"+ + "\u0000jk\u0003!\u0010\u0000k \u0001\u0000\u0000\u0000lm\u0005*\u0000\u0000"+ + "m\"\u0001\u0000\u0000\u0000np\u0003!\u0010\u0000on\u0001\u0000\u0000\u0000"+ + "ps\u0001\u0000\u0000\u0000qo\u0001\u0000\u0000\u0000qr\u0001\u0000\u0000"+ + "\u0000rt\u0001\u0000\u0000\u0000sq\u0001\u0000\u0000\u0000tx\u0003%\u0012"+ + "\u0000uw\u0003!\u0010\u0000vu\u0001\u0000\u0000\u0000wz\u0001\u0000\u0000"+ + "\u0000xv\u0001\u0000\u0000\u0000xy\u0001\u0000\u0000\u0000y\u0082\u0001"+ + "\u0000\u0000\u0000zx\u0001\u0000\u0000\u0000{}\u0003!\u0010\u0000|~\u0003"+ + "!\u0010\u0000}|\u0001\u0000\u0000\u0000~\u007f\u0001\u0000\u0000\u0000"+ + "\u007f}\u0001\u0000\u0000\u0000\u007f\u0080\u0001\u0000\u0000\u0000\u0080"+ + "\u0082\u0001\u0000\u0000\u0000\u0081q\u0001\u0000\u0000\u0000\u0081{\u0001"+ + "\u0000\u0000\u0000\u0082$\u0001\u0000\u0000\u0000\u0083\u008e\u0003+\u0015"+ + "\u0000\u0084\u008e\u0003/\u0017\u0000\u0085\u008e\u00033\u0019\u0000\u0086"+ + "\u008a\u0005\\\u0000\u0000\u0087\u008b\u0003\u0003\u0001\u0000\u0088\u008b"+ + "\u0003\u0005\u0002\u0000\u0089\u008b\u0003\u0007\u0003\u0000\u008a\u0087"+ + "\u0001\u0000\u0000\u0000\u008a\u0088\u0001\u0000\u0000\u0000\u008a\u0089"+ + "\u0001\u0000\u0000\u0000\u008b\u008e\u0001\u0000\u0000\u0000\u008c\u008e"+ + "\u0003-\u0016\u0000\u008d\u0083\u0001\u0000\u0000\u0000\u008d\u0084\u0001"+ + "\u0000\u0000\u0000\u008d\u0085\u0001\u0000\u0000\u0000\u008d\u0086\u0001"+ + "\u0000\u0000\u0000\u008d\u008c\u0001\u0000\u0000\u0000\u008e&\u0001\u0000"+ + "\u0000\u0000\u008f\u0094\u0003+\u0015\u0000\u0090\u0094\u00033\u0019\u0000"+ + "\u0091\u0094\u00031\u0018\u0000\u0092\u0094\b\u0006\u0000\u0000\u0093"+ + "\u008f\u0001\u0000\u0000\u0000\u0093\u0090\u0001\u0000\u0000\u0000\u0093"+ + "\u0091\u0001\u0000\u0000\u0000\u0093\u0092\u0001\u0000\u0000\u0000\u0094"+ + "(\u0001\u0000\u0000\u0000\u0095\u0096\u0007\u0007\u0000\u0000\u0096*\u0001"+ + "\u0000\u0000\u0000\u0097\u0098\u0005\\\u0000\u0000\u0098\u009e\u0007\u0004"+ + "\u0000\u0000\u0099\u009a\u0005\\\u0000\u0000\u009a\u009e\u0007\u0005\u0000"+ + "\u0000\u009b\u009c\u0005\\\u0000\u0000\u009c\u009e\u0007\u0001\u0000\u0000"+ + "\u009d\u0097\u0001\u0000\u0000\u0000\u009d\u0099\u0001\u0000\u0000\u0000"+ + "\u009d\u009b\u0001\u0000\u0000\u0000\u009e,\u0001\u0000\u0000\u0000\u009f"+ + "\u00a0\b\b\u0000\u0000\u00a0.\u0001\u0000\u0000\u0000\u00a1\u00a2\u0005"+ + "\\\u0000\u0000\u00a2\u00a3\u0007\t\u0000\u0000\u00a30\u0001\u0000\u0000"+ + "\u0000\u00a4\u00a5\u0005\\\u0000\u0000\u00a5\u00a6\u0005\"\u0000\u0000"+ + "\u00a62\u0001\u0000\u0000\u0000\u00a7\u00a8\u0005\\\u0000\u0000\u00a8"+ + "\u00a9\u00035\u001a\u0000\u00a94\u0001\u0000\u0000\u0000\u00aa\u00ab\u0007"+ + "\n\u0000\u0000\u00ab\u00ac\u00037\u001b\u0000\u00ac\u00ad\u00037\u001b"+ + "\u0000\u00ad\u00ae\u00037\u001b\u0000\u00ae\u00af\u00037\u001b\u0000\u00af"+ + "6\u0001\u0000\u0000\u0000\u00b0\u00b1\u0007\u000b\u0000\u0000\u00b18\u0001"+ + "\u0000\u0000\u0000\u000b\u0000_eqx\u007f\u0081\u008a\u008d\u0093\u009d"+ + "\u0001\u0006\u0000\u0000"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseListener.java b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseListener.java index bce2044fa8175..505569dbde58d 100644 --- a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseListener.java +++ b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseListener.java @@ -71,16 +71,6 @@ interface KqlBaseListener extends ParseTreeListener { * @param ctx the parse tree */ void exitSimpleQuery(KqlBaseParser.SimpleQueryContext ctx); - /** - * Enter a parse tree produced by {@link KqlBaseParser#expression}. - * @param ctx the parse tree - */ - void enterExpression(KqlBaseParser.ExpressionContext ctx); - /** - * Exit a parse tree produced by {@link KqlBaseParser#expression}. - * @param ctx the parse tree - */ - void exitExpression(KqlBaseParser.ExpressionContext ctx); /** * Enter a parse tree produced by {@link KqlBaseParser#nestedQuery}. * @param ctx the parse tree @@ -92,45 +82,35 @@ interface KqlBaseListener extends ParseTreeListener { */ void exitNestedQuery(KqlBaseParser.NestedQueryContext ctx); /** - * Enter a parse tree produced by {@link KqlBaseParser#parenthesizedQuery}. - * @param ctx the parse tree - */ - void enterParenthesizedQuery(KqlBaseParser.ParenthesizedQueryContext ctx); - /** - * Exit a parse tree produced by {@link KqlBaseParser#parenthesizedQuery}. - * @param ctx the parse tree - */ - void exitParenthesizedQuery(KqlBaseParser.ParenthesizedQueryContext ctx); - /** - * Enter a parse tree produced by {@link KqlBaseParser#fieldRangeQuery}. + * Enter a parse tree produced by {@link KqlBaseParser#matchAllQuery}. * @param ctx the parse tree */ - void enterFieldRangeQuery(KqlBaseParser.FieldRangeQueryContext ctx); + void enterMatchAllQuery(KqlBaseParser.MatchAllQueryContext ctx); /** - * Exit a parse tree produced by {@link KqlBaseParser#fieldRangeQuery}. + * Exit a parse tree produced by {@link KqlBaseParser#matchAllQuery}. * @param ctx the parse tree */ - void exitFieldRangeQuery(KqlBaseParser.FieldRangeQueryContext ctx); + void exitMatchAllQuery(KqlBaseParser.MatchAllQueryContext ctx); /** - * Enter a parse tree produced by {@link KqlBaseParser#fieldTermQuery}. + * Enter a parse tree produced by {@link KqlBaseParser#parenthesizedQuery}. * @param ctx the parse tree */ - void enterFieldTermQuery(KqlBaseParser.FieldTermQueryContext ctx); + void enterParenthesizedQuery(KqlBaseParser.ParenthesizedQueryContext ctx); /** - * Exit a parse tree produced by {@link KqlBaseParser#fieldTermQuery}. + * Exit a parse tree produced by {@link KqlBaseParser#parenthesizedQuery}. * @param ctx the parse tree */ - void exitFieldTermQuery(KqlBaseParser.FieldTermQueryContext ctx); + void exitParenthesizedQuery(KqlBaseParser.ParenthesizedQueryContext ctx); /** - * Enter a parse tree produced by {@link KqlBaseParser#fieldName}. + * Enter a parse tree produced by {@link KqlBaseParser#rangeQuery}. * @param ctx the parse tree */ - void enterFieldName(KqlBaseParser.FieldNameContext ctx); + void enterRangeQuery(KqlBaseParser.RangeQueryContext ctx); /** - * Exit a parse tree produced by {@link KqlBaseParser#fieldName}. + * Exit a parse tree produced by {@link KqlBaseParser#rangeQuery}. * @param ctx the parse tree */ - void exitFieldName(KqlBaseParser.FieldNameContext ctx); + void exitRangeQuery(KqlBaseParser.RangeQueryContext ctx); /** * Enter a parse tree produced by {@link KqlBaseParser#rangeQueryValue}. * @param ctx the parse tree @@ -142,53 +122,53 @@ interface KqlBaseListener extends ParseTreeListener { */ void exitRangeQueryValue(KqlBaseParser.RangeQueryValueContext ctx); /** - * Enter a parse tree produced by {@link KqlBaseParser#termQueryValue}. + * Enter a parse tree produced by {@link KqlBaseParser#existsQuery}. * @param ctx the parse tree */ - void enterTermQueryValue(KqlBaseParser.TermQueryValueContext ctx); + void enterExistsQuery(KqlBaseParser.ExistsQueryContext ctx); /** - * Exit a parse tree produced by {@link KqlBaseParser#termQueryValue}. + * Exit a parse tree produced by {@link KqlBaseParser#existsQuery}. * @param ctx the parse tree */ - void exitTermQueryValue(KqlBaseParser.TermQueryValueContext ctx); + void exitExistsQuery(KqlBaseParser.ExistsQueryContext ctx); /** - * Enter a parse tree produced by {@link KqlBaseParser#groupingTermExpression}. + * Enter a parse tree produced by {@link KqlBaseParser#fieldQuery}. * @param ctx the parse tree */ - void enterGroupingTermExpression(KqlBaseParser.GroupingTermExpressionContext ctx); + void enterFieldQuery(KqlBaseParser.FieldQueryContext ctx); /** - * Exit a parse tree produced by {@link KqlBaseParser#groupingTermExpression}. + * Exit a parse tree produced by {@link KqlBaseParser#fieldQuery}. * @param ctx the parse tree */ - void exitGroupingTermExpression(KqlBaseParser.GroupingTermExpressionContext ctx); + void exitFieldQuery(KqlBaseParser.FieldQueryContext ctx); /** - * Enter a parse tree produced by {@link KqlBaseParser#unquotedLiteralExpression}. + * Enter a parse tree produced by {@link KqlBaseParser#fieldLessQuery}. * @param ctx the parse tree */ - void enterUnquotedLiteralExpression(KqlBaseParser.UnquotedLiteralExpressionContext ctx); + void enterFieldLessQuery(KqlBaseParser.FieldLessQueryContext ctx); /** - * Exit a parse tree produced by {@link KqlBaseParser#unquotedLiteralExpression}. + * Exit a parse tree produced by {@link KqlBaseParser#fieldLessQuery}. * @param ctx the parse tree */ - void exitUnquotedLiteralExpression(KqlBaseParser.UnquotedLiteralExpressionContext ctx); + void exitFieldLessQuery(KqlBaseParser.FieldLessQueryContext ctx); /** - * Enter a parse tree produced by {@link KqlBaseParser#quotedStringExpression}. + * Enter a parse tree produced by {@link KqlBaseParser#fieldQueryValue}. * @param ctx the parse tree */ - void enterQuotedStringExpression(KqlBaseParser.QuotedStringExpressionContext ctx); + void enterFieldQueryValue(KqlBaseParser.FieldQueryValueContext ctx); /** - * Exit a parse tree produced by {@link KqlBaseParser#quotedStringExpression}. + * Exit a parse tree produced by {@link KqlBaseParser#fieldQueryValue}. * @param ctx the parse tree */ - void exitQuotedStringExpression(KqlBaseParser.QuotedStringExpressionContext ctx); + void exitFieldQueryValue(KqlBaseParser.FieldQueryValueContext ctx); /** - * Enter a parse tree produced by {@link KqlBaseParser#wildcardExpression}. + * Enter a parse tree produced by {@link KqlBaseParser#fieldName}. * @param ctx the parse tree */ - void enterWildcardExpression(KqlBaseParser.WildcardExpressionContext ctx); + void enterFieldName(KqlBaseParser.FieldNameContext ctx); /** - * Exit a parse tree produced by {@link KqlBaseParser#wildcardExpression}. + * Exit a parse tree produced by {@link KqlBaseParser#fieldName}. * @param ctx the parse tree */ - void exitWildcardExpression(KqlBaseParser.WildcardExpressionContext ctx); + void exitFieldName(KqlBaseParser.FieldNameContext ctx); } diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseParser.java b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseParser.java index 3bd9cc4104d2c..3ee44e389a371 100644 --- a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseParser.java +++ b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseParser.java @@ -25,37 +25,35 @@ class KqlBaseParser extends Parser { protected static final PredictionContextCache _sharedContextCache = new PredictionContextCache(); public static final int - DEFAULT_SKIP=1, AND=2, OR=3, NOT=4, COLON=5, OP_COMPARE=6, LEFT_PARENTHESIS=7, - RIGHT_PARENTHESIS=8, LEFT_CURLY_BRACKET=9, RIGHT_CURLY_BRACKET=10, UNQUOTED_LITERAL=11, - QUOTED_STRING=12, WILDCARD=13; + DEFAULT_SKIP=1, AND=2, OR=3, NOT=4, COLON=5, OP_LESS=6, OP_LESS_EQ=7, + OP_MORE=8, OP_MORE_EQ=9, LEFT_PARENTHESIS=10, RIGHT_PARENTHESIS=11, LEFT_CURLY_BRACKET=12, + RIGHT_CURLY_BRACKET=13, UNQUOTED_LITERAL=14, QUOTED_STRING=15, WILDCARD=16; public static final int - RULE_topLevelQuery = 0, RULE_query = 1, RULE_simpleQuery = 2, RULE_expression = 3, - RULE_nestedQuery = 4, RULE_parenthesizedQuery = 5, RULE_fieldRangeQuery = 6, - RULE_fieldTermQuery = 7, RULE_fieldName = 8, RULE_rangeQueryValue = 9, - RULE_termQueryValue = 10, RULE_groupingTermExpression = 11, RULE_unquotedLiteralExpression = 12, - RULE_quotedStringExpression = 13, RULE_wildcardExpression = 14; + RULE_topLevelQuery = 0, RULE_query = 1, RULE_simpleQuery = 2, RULE_nestedQuery = 3, + RULE_matchAllQuery = 4, RULE_parenthesizedQuery = 5, RULE_rangeQuery = 6, + RULE_rangeQueryValue = 7, RULE_existsQuery = 8, RULE_fieldQuery = 9, RULE_fieldLessQuery = 10, + RULE_fieldQueryValue = 11, RULE_fieldName = 12; private static String[] makeRuleNames() { return new String[] { - "topLevelQuery", "query", "simpleQuery", "expression", "nestedQuery", - "parenthesizedQuery", "fieldRangeQuery", "fieldTermQuery", "fieldName", - "rangeQueryValue", "termQueryValue", "groupingTermExpression", "unquotedLiteralExpression", - "quotedStringExpression", "wildcardExpression" + "topLevelQuery", "query", "simpleQuery", "nestedQuery", "matchAllQuery", + "parenthesizedQuery", "rangeQuery", "rangeQueryValue", "existsQuery", + "fieldQuery", "fieldLessQuery", "fieldQueryValue", "fieldName" }; } public static final String[] ruleNames = makeRuleNames(); private static String[] makeLiteralNames() { return new String[] { - null, null, "'and'", "'or'", "'not'", "':'", null, "'('", "')'", "'{'", - "'}'" + null, null, "'and'", "'or'", "'not'", "':'", "'<'", "'<='", "'>'", "'>='", + "'('", "')'", "'{'", "'}'" }; } private static final String[] _LITERAL_NAMES = makeLiteralNames(); private static String[] makeSymbolicNames() { return new String[] { - null, "DEFAULT_SKIP", "AND", "OR", "NOT", "COLON", "OP_COMPARE", "LEFT_PARENTHESIS", - "RIGHT_PARENTHESIS", "LEFT_CURLY_BRACKET", "RIGHT_CURLY_BRACKET", "UNQUOTED_LITERAL", - "QUOTED_STRING", "WILDCARD" + null, "DEFAULT_SKIP", "AND", "OR", "NOT", "COLON", "OP_LESS", "OP_LESS_EQ", + "OP_MORE", "OP_MORE_EQ", "LEFT_PARENTHESIS", "RIGHT_PARENTHESIS", "LEFT_CURLY_BRACKET", + "RIGHT_CURLY_BRACKET", "UNQUOTED_LITERAL", "QUOTED_STRING", "WILDCARD" }; } private static final String[] _SYMBOLIC_NAMES = makeSymbolicNames(); @@ -141,17 +139,17 @@ public final TopLevelQueryContext topLevelQuery() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(31); + setState(27); _errHandler.sync(this); _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & 14480L) != 0)) { + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & 115740L) != 0)) { { - setState(30); + setState(26); query(0); } } - setState(33); + setState(29); match(EOF); } } @@ -202,6 +200,7 @@ public T accept(ParseTreeVisitor visitor) { } @SuppressWarnings("CheckReturnValue") public static class BooleanQueryContext extends QueryContext { + public Token operator; public List query() { return getRuleContexts(QueryContext.class); } @@ -262,38 +261,33 @@ private QueryContext query(int _p) throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(39); + setState(35); _errHandler.sync(this); - switch (_input.LA(1)) { - case NOT: + switch ( getInterpreter().adaptivePredict(_input,1,_ctx) ) { + case 1: { _localctx = new NotQueryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(36); + setState(32); match(NOT); - setState(37); + setState(33); ((NotQueryContext)_localctx).subQuery = simpleQuery(); } break; - case LEFT_PARENTHESIS: - case UNQUOTED_LITERAL: - case QUOTED_STRING: - case WILDCARD: + case 2: { _localctx = new DefaultQueryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(38); + setState(34); simpleQuery(); } break; - default: - throw new NoViableAltException(this); } _ctx.stop = _input.LT(-1); - setState(46); + setState(42); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,2,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -304,24 +298,25 @@ private QueryContext query(int _p) throws RecognitionException { { _localctx = new BooleanQueryContext(new QueryContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_query); - setState(41); + setState(37); if (!(precpred(_ctx, 3))) throw new FailedPredicateException(this, "precpred(_ctx, 3)"); - setState(42); + setState(38); + ((BooleanQueryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==AND || _la==OR) ) { - _errHandler.recoverInline(this); + ((BooleanQueryContext)_localctx).operator = (Token)_errHandler.recoverInline(this); } else { if ( _input.LA(1)==Token.EOF ) matchedEOF = true; _errHandler.reportMatch(this); consume(); } - setState(43); - query(4); + setState(39); + query(3); } } } - setState(48); + setState(44); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,2,_ctx); } @@ -343,12 +338,24 @@ public static class SimpleQueryContext extends ParserRuleContext { public NestedQueryContext nestedQuery() { return getRuleContext(NestedQueryContext.class,0); } - public ExpressionContext expression() { - return getRuleContext(ExpressionContext.class,0); - } public ParenthesizedQueryContext parenthesizedQuery() { return getRuleContext(ParenthesizedQueryContext.class,0); } + public MatchAllQueryContext matchAllQuery() { + return getRuleContext(MatchAllQueryContext.class,0); + } + public ExistsQueryContext existsQuery() { + return getRuleContext(ExistsQueryContext.class,0); + } + public RangeQueryContext rangeQuery() { + return getRuleContext(RangeQueryContext.class,0); + } + public FieldQueryContext fieldQuery() { + return getRuleContext(FieldQueryContext.class,0); + } + public FieldLessQueryContext fieldLessQuery() { + return getRuleContext(FieldLessQueryContext.class,0); + } public SimpleQueryContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @@ -378,83 +385,50 @@ public final SimpleQueryContext simpleQuery() throws RecognitionException { case 1: enterOuterAlt(_localctx, 1); { - setState(49); + setState(45); nestedQuery(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(50); - expression(); + setState(46); + parenthesizedQuery(); } break; case 3: enterOuterAlt(_localctx, 3); { - setState(51); - parenthesizedQuery(); + setState(47); + matchAllQuery(); } break; - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - - @SuppressWarnings("CheckReturnValue") - public static class ExpressionContext extends ParserRuleContext { - public FieldTermQueryContext fieldTermQuery() { - return getRuleContext(FieldTermQueryContext.class,0); - } - public FieldRangeQueryContext fieldRangeQuery() { - return getRuleContext(FieldRangeQueryContext.class,0); - } - public ExpressionContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_expression; } - @Override - public void enterRule(ParseTreeListener listener) { - if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterExpression(this); - } - @Override - public void exitRule(ParseTreeListener listener) { - if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitExpression(this); - } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor)visitor).visitExpression(this); - else return visitor.visitChildren(this); - } - } - - public final ExpressionContext expression() throws RecognitionException { - ExpressionContext _localctx = new ExpressionContext(_ctx, getState()); - enterRule(_localctx, 6, RULE_expression); - try { - setState(56); - _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,4,_ctx) ) { - case 1: - enterOuterAlt(_localctx, 1); + case 4: + enterOuterAlt(_localctx, 4); { - setState(54); - fieldTermQuery(); + setState(48); + existsQuery(); } break; - case 2: - enterOuterAlt(_localctx, 2); + case 5: + enterOuterAlt(_localctx, 5); { - setState(55); - fieldRangeQuery(); + setState(49); + rangeQuery(); + } + break; + case 6: + enterOuterAlt(_localctx, 6); + { + setState(50); + fieldQuery(); + } + break; + case 7: + enterOuterAlt(_localctx, 7); + { + setState(51); + fieldLessQuery(); } break; } @@ -502,19 +476,19 @@ public T accept(ParseTreeVisitor visitor) { public final NestedQueryContext nestedQuery() throws RecognitionException { NestedQueryContext _localctx = new NestedQueryContext(_ctx, getState()); - enterRule(_localctx, 8, RULE_nestedQuery); + enterRule(_localctx, 6, RULE_nestedQuery); try { enterOuterAlt(_localctx, 1); { - setState(58); + setState(54); fieldName(); - setState(59); + setState(55); match(COLON); - setState(60); + setState(56); match(LEFT_CURLY_BRACKET); - setState(61); + setState(57); query(0); - setState(62); + setState(58); match(RIGHT_CURLY_BRACKET); } } @@ -530,43 +504,51 @@ public final NestedQueryContext nestedQuery() throws RecognitionException { } @SuppressWarnings("CheckReturnValue") - public static class ParenthesizedQueryContext extends ParserRuleContext { - public TerminalNode LEFT_PARENTHESIS() { return getToken(KqlBaseParser.LEFT_PARENTHESIS, 0); } - public QueryContext query() { - return getRuleContext(QueryContext.class,0); + public static class MatchAllQueryContext extends ParserRuleContext { + public List WILDCARD() { return getTokens(KqlBaseParser.WILDCARD); } + public TerminalNode WILDCARD(int i) { + return getToken(KqlBaseParser.WILDCARD, i); } - public TerminalNode RIGHT_PARENTHESIS() { return getToken(KqlBaseParser.RIGHT_PARENTHESIS, 0); } - public ParenthesizedQueryContext(ParserRuleContext parent, int invokingState) { + public TerminalNode COLON() { return getToken(KqlBaseParser.COLON, 0); } + public MatchAllQueryContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } - @Override public int getRuleIndex() { return RULE_parenthesizedQuery; } + @Override public int getRuleIndex() { return RULE_matchAllQuery; } @Override public void enterRule(ParseTreeListener listener) { - if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterParenthesizedQuery(this); + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterMatchAllQuery(this); } @Override public void exitRule(ParseTreeListener listener) { - if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitParenthesizedQuery(this); + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitMatchAllQuery(this); } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor)visitor).visitParenthesizedQuery(this); + if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor)visitor).visitMatchAllQuery(this); else return visitor.visitChildren(this); } } - public final ParenthesizedQueryContext parenthesizedQuery() throws RecognitionException { - ParenthesizedQueryContext _localctx = new ParenthesizedQueryContext(_ctx, getState()); - enterRule(_localctx, 10, RULE_parenthesizedQuery); + public final MatchAllQueryContext matchAllQuery() throws RecognitionException { + MatchAllQueryContext _localctx = new MatchAllQueryContext(_ctx, getState()); + enterRule(_localctx, 8, RULE_matchAllQuery); try { enterOuterAlt(_localctx, 1); { + setState(62); + _errHandler.sync(this); + switch ( getInterpreter().adaptivePredict(_input,4,_ctx) ) { + case 1: + { + setState(60); + match(WILDCARD); + setState(61); + match(COLON); + } + break; + } setState(64); - match(LEFT_PARENTHESIS); - setState(65); - query(0); - setState(66); - match(RIGHT_PARENTHESIS); + match(WILDCARD); } } catch (RecognitionException re) { @@ -581,46 +563,43 @@ public final ParenthesizedQueryContext parenthesizedQuery() throws RecognitionEx } @SuppressWarnings("CheckReturnValue") - public static class FieldRangeQueryContext extends ParserRuleContext { - public Token operator; - public FieldNameContext fieldName() { - return getRuleContext(FieldNameContext.class,0); - } - public RangeQueryValueContext rangeQueryValue() { - return getRuleContext(RangeQueryValueContext.class,0); + public static class ParenthesizedQueryContext extends ParserRuleContext { + public TerminalNode LEFT_PARENTHESIS() { return getToken(KqlBaseParser.LEFT_PARENTHESIS, 0); } + public QueryContext query() { + return getRuleContext(QueryContext.class,0); } - public TerminalNode OP_COMPARE() { return getToken(KqlBaseParser.OP_COMPARE, 0); } - public FieldRangeQueryContext(ParserRuleContext parent, int invokingState) { + public TerminalNode RIGHT_PARENTHESIS() { return getToken(KqlBaseParser.RIGHT_PARENTHESIS, 0); } + public ParenthesizedQueryContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } - @Override public int getRuleIndex() { return RULE_fieldRangeQuery; } + @Override public int getRuleIndex() { return RULE_parenthesizedQuery; } @Override public void enterRule(ParseTreeListener listener) { - if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterFieldRangeQuery(this); + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterParenthesizedQuery(this); } @Override public void exitRule(ParseTreeListener listener) { - if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitFieldRangeQuery(this); + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitParenthesizedQuery(this); } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor)visitor).visitFieldRangeQuery(this); + if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor)visitor).visitParenthesizedQuery(this); else return visitor.visitChildren(this); } } - public final FieldRangeQueryContext fieldRangeQuery() throws RecognitionException { - FieldRangeQueryContext _localctx = new FieldRangeQueryContext(_ctx, getState()); - enterRule(_localctx, 12, RULE_fieldRangeQuery); + public final ParenthesizedQueryContext parenthesizedQuery() throws RecognitionException { + ParenthesizedQueryContext _localctx = new ParenthesizedQueryContext(_ctx, getState()); + enterRule(_localctx, 10, RULE_parenthesizedQuery); try { enterOuterAlt(_localctx, 1); { + setState(66); + match(LEFT_PARENTHESIS); + setState(67); + query(0); setState(68); - fieldName(); - setState(69); - ((FieldRangeQueryContext)_localctx).operator = match(OP_COMPARE); - setState(70); - rangeQueryValue(); + match(RIGHT_PARENTHESIS); } } catch (RecognitionException re) { @@ -635,53 +614,59 @@ public final FieldRangeQueryContext fieldRangeQuery() throws RecognitionExceptio } @SuppressWarnings("CheckReturnValue") - public static class FieldTermQueryContext extends ParserRuleContext { - public TermQueryValueContext termQueryValue() { - return getRuleContext(TermQueryValueContext.class,0); - } + public static class RangeQueryContext extends ParserRuleContext { + public Token operator; public FieldNameContext fieldName() { return getRuleContext(FieldNameContext.class,0); } - public TerminalNode COLON() { return getToken(KqlBaseParser.COLON, 0); } - public FieldTermQueryContext(ParserRuleContext parent, int invokingState) { + public RangeQueryValueContext rangeQueryValue() { + return getRuleContext(RangeQueryValueContext.class,0); + } + public TerminalNode OP_LESS() { return getToken(KqlBaseParser.OP_LESS, 0); } + public TerminalNode OP_LESS_EQ() { return getToken(KqlBaseParser.OP_LESS_EQ, 0); } + public TerminalNode OP_MORE() { return getToken(KqlBaseParser.OP_MORE, 0); } + public TerminalNode OP_MORE_EQ() { return getToken(KqlBaseParser.OP_MORE_EQ, 0); } + public RangeQueryContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } - @Override public int getRuleIndex() { return RULE_fieldTermQuery; } + @Override public int getRuleIndex() { return RULE_rangeQuery; } @Override public void enterRule(ParseTreeListener listener) { - if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterFieldTermQuery(this); + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterRangeQuery(this); } @Override public void exitRule(ParseTreeListener listener) { - if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitFieldTermQuery(this); + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitRangeQuery(this); } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor)visitor).visitFieldTermQuery(this); + if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor)visitor).visitRangeQuery(this); else return visitor.visitChildren(this); } } - public final FieldTermQueryContext fieldTermQuery() throws RecognitionException { - FieldTermQueryContext _localctx = new FieldTermQueryContext(_ctx, getState()); - enterRule(_localctx, 14, RULE_fieldTermQuery); + public final RangeQueryContext rangeQuery() throws RecognitionException { + RangeQueryContext _localctx = new RangeQueryContext(_ctx, getState()); + enterRule(_localctx, 12, RULE_rangeQuery); + int _la; try { enterOuterAlt(_localctx, 1); { - setState(75); - _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,5,_ctx) ) { - case 1: - { - setState(72); - fieldName(); - setState(73); - match(COLON); - } - break; + setState(70); + fieldName(); + setState(71); + ((RangeQueryContext)_localctx).operator = _input.LT(1); + _la = _input.LA(1); + if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 960L) != 0)) ) { + ((RangeQueryContext)_localctx).operator = (Token)_errHandler.recoverInline(this); + } + else { + if ( _input.LA(1)==Token.EOF ) matchedEOF = true; + _errHandler.reportMatch(this); + consume(); } - setState(77); - termQueryValue(); + setState(72); + rangeQueryValue(); } } catch (RecognitionException re) { @@ -696,61 +681,83 @@ public final FieldTermQueryContext fieldTermQuery() throws RecognitionException } @SuppressWarnings("CheckReturnValue") - public static class FieldNameContext extends ParserRuleContext { - public WildcardExpressionContext wildcardExpression() { - return getRuleContext(WildcardExpressionContext.class,0); - } - public UnquotedLiteralExpressionContext unquotedLiteralExpression() { - return getRuleContext(UnquotedLiteralExpressionContext.class,0); + public static class RangeQueryValueContext extends ParserRuleContext { + public List UNQUOTED_LITERAL() { return getTokens(KqlBaseParser.UNQUOTED_LITERAL); } + public TerminalNode UNQUOTED_LITERAL(int i) { + return getToken(KqlBaseParser.UNQUOTED_LITERAL, i); } - public QuotedStringExpressionContext quotedStringExpression() { - return getRuleContext(QuotedStringExpressionContext.class,0); + public List WILDCARD() { return getTokens(KqlBaseParser.WILDCARD); } + public TerminalNode WILDCARD(int i) { + return getToken(KqlBaseParser.WILDCARD, i); } - public FieldNameContext(ParserRuleContext parent, int invokingState) { + public TerminalNode QUOTED_STRING() { return getToken(KqlBaseParser.QUOTED_STRING, 0); } + public RangeQueryValueContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } - @Override public int getRuleIndex() { return RULE_fieldName; } + @Override public int getRuleIndex() { return RULE_rangeQueryValue; } @Override public void enterRule(ParseTreeListener listener) { - if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterFieldName(this); + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterRangeQueryValue(this); } @Override public void exitRule(ParseTreeListener listener) { - if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitFieldName(this); + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitRangeQueryValue(this); } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor)visitor).visitFieldName(this); + if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor)visitor).visitRangeQueryValue(this); else return visitor.visitChildren(this); } } - public final FieldNameContext fieldName() throws RecognitionException { - FieldNameContext _localctx = new FieldNameContext(_ctx, getState()); - enterRule(_localctx, 16, RULE_fieldName); + public final RangeQueryValueContext rangeQueryValue() throws RecognitionException { + RangeQueryValueContext _localctx = new RangeQueryValueContext(_ctx, getState()); + enterRule(_localctx, 14, RULE_rangeQueryValue); + int _la; try { - setState(82); + int _alt; + setState(80); _errHandler.sync(this); switch (_input.LA(1)) { + case UNQUOTED_LITERAL: case WILDCARD: enterOuterAlt(_localctx, 1); { - setState(79); - wildcardExpression(); - } - break; - case UNQUOTED_LITERAL: - enterOuterAlt(_localctx, 2); - { - setState(80); - unquotedLiteralExpression(); + setState(75); + _errHandler.sync(this); + _alt = 1; + do { + switch (_alt) { + case 1: + { + { + setState(74); + _la = _input.LA(1); + if ( !(_la==UNQUOTED_LITERAL || _la==WILDCARD) ) { + _errHandler.recoverInline(this); + } + else { + if ( _input.LA(1)==Token.EOF ) matchedEOF = true; + _errHandler.reportMatch(this); + consume(); + } + } + } + break; + default: + throw new NoViableAltException(this); + } + setState(77); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input,5,_ctx); + } while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ); } break; case QUOTED_STRING: - enterOuterAlt(_localctx, 3); + enterOuterAlt(_localctx, 2); { - setState(81); - quotedStringExpression(); + setState(79); + match(QUOTED_STRING); } break; default: @@ -769,55 +776,43 @@ public final FieldNameContext fieldName() throws RecognitionException { } @SuppressWarnings("CheckReturnValue") - public static class RangeQueryValueContext extends ParserRuleContext { - public UnquotedLiteralExpressionContext unquotedLiteralExpression() { - return getRuleContext(UnquotedLiteralExpressionContext.class,0); - } - public QuotedStringExpressionContext quotedStringExpression() { - return getRuleContext(QuotedStringExpressionContext.class,0); + public static class ExistsQueryContext extends ParserRuleContext { + public FieldNameContext fieldName() { + return getRuleContext(FieldNameContext.class,0); } - public RangeQueryValueContext(ParserRuleContext parent, int invokingState) { + public TerminalNode COLON() { return getToken(KqlBaseParser.COLON, 0); } + public TerminalNode WILDCARD() { return getToken(KqlBaseParser.WILDCARD, 0); } + public ExistsQueryContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } - @Override public int getRuleIndex() { return RULE_rangeQueryValue; } + @Override public int getRuleIndex() { return RULE_existsQuery; } @Override public void enterRule(ParseTreeListener listener) { - if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterRangeQueryValue(this); + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterExistsQuery(this); } @Override public void exitRule(ParseTreeListener listener) { - if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitRangeQueryValue(this); + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitExistsQuery(this); } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor)visitor).visitRangeQueryValue(this); + if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor)visitor).visitExistsQuery(this); else return visitor.visitChildren(this); } } - public final RangeQueryValueContext rangeQueryValue() throws RecognitionException { - RangeQueryValueContext _localctx = new RangeQueryValueContext(_ctx, getState()); - enterRule(_localctx, 18, RULE_rangeQueryValue); + public final ExistsQueryContext existsQuery() throws RecognitionException { + ExistsQueryContext _localctx = new ExistsQueryContext(_ctx, getState()); + enterRule(_localctx, 16, RULE_existsQuery); try { - setState(86); - _errHandler.sync(this); - switch (_input.LA(1)) { - case UNQUOTED_LITERAL: - enterOuterAlt(_localctx, 1); - { - setState(84); - unquotedLiteralExpression(); - } - break; - case QUOTED_STRING: - enterOuterAlt(_localctx, 2); - { - setState(85); - quotedStringExpression(); - } - break; - default: - throw new NoViableAltException(this); + enterOuterAlt(_localctx, 1); + { + setState(82); + fieldName(); + setState(83); + match(COLON); + setState(84); + match(WILDCARD); } } catch (RecognitionException re) { @@ -832,76 +827,68 @@ public final RangeQueryValueContext rangeQueryValue() throws RecognitionExceptio } @SuppressWarnings("CheckReturnValue") - public static class TermQueryValueContext extends ParserRuleContext { - public UnquotedLiteralExpressionContext termValue; - public WildcardExpressionContext wildcardExpression() { - return getRuleContext(WildcardExpressionContext.class,0); - } - public QuotedStringExpressionContext quotedStringExpression() { - return getRuleContext(QuotedStringExpressionContext.class,0); - } - public UnquotedLiteralExpressionContext unquotedLiteralExpression() { - return getRuleContext(UnquotedLiteralExpressionContext.class,0); + public static class FieldQueryContext extends ParserRuleContext { + public FieldNameContext fieldName() { + return getRuleContext(FieldNameContext.class,0); } - public GroupingTermExpressionContext groupingTermExpression() { - return getRuleContext(GroupingTermExpressionContext.class,0); + public TerminalNode COLON() { return getToken(KqlBaseParser.COLON, 0); } + public FieldQueryValueContext fieldQueryValue() { + return getRuleContext(FieldQueryValueContext.class,0); } - public TermQueryValueContext(ParserRuleContext parent, int invokingState) { + public TerminalNode LEFT_PARENTHESIS() { return getToken(KqlBaseParser.LEFT_PARENTHESIS, 0); } + public TerminalNode RIGHT_PARENTHESIS() { return getToken(KqlBaseParser.RIGHT_PARENTHESIS, 0); } + public FieldQueryContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } - @Override public int getRuleIndex() { return RULE_termQueryValue; } + @Override public int getRuleIndex() { return RULE_fieldQuery; } @Override public void enterRule(ParseTreeListener listener) { - if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterTermQueryValue(this); + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterFieldQuery(this); } @Override public void exitRule(ParseTreeListener listener) { - if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitTermQueryValue(this); + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitFieldQuery(this); } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor)visitor).visitTermQueryValue(this); + if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor)visitor).visitFieldQuery(this); else return visitor.visitChildren(this); } } - public final TermQueryValueContext termQueryValue() throws RecognitionException { - TermQueryValueContext _localctx = new TermQueryValueContext(_ctx, getState()); - enterRule(_localctx, 20, RULE_termQueryValue); + public final FieldQueryContext fieldQuery() throws RecognitionException { + FieldQueryContext _localctx = new FieldQueryContext(_ctx, getState()); + enterRule(_localctx, 18, RULE_fieldQuery); try { - setState(92); + setState(96); _errHandler.sync(this); - switch (_input.LA(1)) { - case WILDCARD: + switch ( getInterpreter().adaptivePredict(_input,7,_ctx) ) { + case 1: enterOuterAlt(_localctx, 1); { + setState(86); + fieldName(); + setState(87); + match(COLON); setState(88); - wildcardExpression(); + fieldQueryValue(); } break; - case QUOTED_STRING: + case 2: enterOuterAlt(_localctx, 2); { - setState(89); - quotedStringExpression(); - } - break; - case UNQUOTED_LITERAL: - enterOuterAlt(_localctx, 3); - { setState(90); - ((TermQueryValueContext)_localctx).termValue = unquotedLiteralExpression(); - } - break; - case LEFT_PARENTHESIS: - enterOuterAlt(_localctx, 4); - { + fieldName(); setState(91); - groupingTermExpression(); + match(COLON); + setState(92); + match(LEFT_PARENTHESIS); + setState(93); + fieldQueryValue(); + setState(94); + match(RIGHT_PARENTHESIS); } break; - default: - throw new NoViableAltException(this); } } catch (RecognitionException re) { @@ -916,43 +903,63 @@ public final TermQueryValueContext termQueryValue() throws RecognitionException } @SuppressWarnings("CheckReturnValue") - public static class GroupingTermExpressionContext extends ParserRuleContext { - public TerminalNode LEFT_PARENTHESIS() { return getToken(KqlBaseParser.LEFT_PARENTHESIS, 0); } - public UnquotedLiteralExpressionContext unquotedLiteralExpression() { - return getRuleContext(UnquotedLiteralExpressionContext.class,0); + public static class FieldLessQueryContext extends ParserRuleContext { + public FieldQueryValueContext fieldQueryValue() { + return getRuleContext(FieldQueryValueContext.class,0); } + public TerminalNode LEFT_PARENTHESIS() { return getToken(KqlBaseParser.LEFT_PARENTHESIS, 0); } public TerminalNode RIGHT_PARENTHESIS() { return getToken(KqlBaseParser.RIGHT_PARENTHESIS, 0); } - public GroupingTermExpressionContext(ParserRuleContext parent, int invokingState) { + public FieldLessQueryContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } - @Override public int getRuleIndex() { return RULE_groupingTermExpression; } + @Override public int getRuleIndex() { return RULE_fieldLessQuery; } @Override public void enterRule(ParseTreeListener listener) { - if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterGroupingTermExpression(this); + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterFieldLessQuery(this); } @Override public void exitRule(ParseTreeListener listener) { - if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitGroupingTermExpression(this); + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitFieldLessQuery(this); } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor)visitor).visitGroupingTermExpression(this); + if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor)visitor).visitFieldLessQuery(this); else return visitor.visitChildren(this); } } - public final GroupingTermExpressionContext groupingTermExpression() throws RecognitionException { - GroupingTermExpressionContext _localctx = new GroupingTermExpressionContext(_ctx, getState()); - enterRule(_localctx, 22, RULE_groupingTermExpression); + public final FieldLessQueryContext fieldLessQuery() throws RecognitionException { + FieldLessQueryContext _localctx = new FieldLessQueryContext(_ctx, getState()); + enterRule(_localctx, 20, RULE_fieldLessQuery); try { - enterOuterAlt(_localctx, 1); - { - setState(94); - match(LEFT_PARENTHESIS); - setState(95); - unquotedLiteralExpression(); - setState(96); - match(RIGHT_PARENTHESIS); + setState(103); + _errHandler.sync(this); + switch (_input.LA(1)) { + case AND: + case OR: + case NOT: + case UNQUOTED_LITERAL: + case QUOTED_STRING: + case WILDCARD: + enterOuterAlt(_localctx, 1); + { + setState(98); + fieldQueryValue(); + } + break; + case LEFT_PARENTHESIS: + enterOuterAlt(_localctx, 2); + { + setState(99); + match(LEFT_PARENTHESIS); + setState(100); + fieldQueryValue(); + setState(101); + match(RIGHT_PARENTHESIS); + } + break; + default: + throw new NoViableAltException(this); } } catch (RecognitionException re) { @@ -967,57 +974,171 @@ public final GroupingTermExpressionContext groupingTermExpression() throws Recog } @SuppressWarnings("CheckReturnValue") - public static class UnquotedLiteralExpressionContext extends ParserRuleContext { + public static class FieldQueryValueContext extends ParserRuleContext { + public TerminalNode AND() { return getToken(KqlBaseParser.AND, 0); } + public TerminalNode OR() { return getToken(KqlBaseParser.OR, 0); } public List UNQUOTED_LITERAL() { return getTokens(KqlBaseParser.UNQUOTED_LITERAL); } public TerminalNode UNQUOTED_LITERAL(int i) { return getToken(KqlBaseParser.UNQUOTED_LITERAL, i); } - public UnquotedLiteralExpressionContext(ParserRuleContext parent, int invokingState) { + public List WILDCARD() { return getTokens(KqlBaseParser.WILDCARD); } + public TerminalNode WILDCARD(int i) { + return getToken(KqlBaseParser.WILDCARD, i); + } + public TerminalNode NOT() { return getToken(KqlBaseParser.NOT, 0); } + public TerminalNode QUOTED_STRING() { return getToken(KqlBaseParser.QUOTED_STRING, 0); } + public FieldQueryValueContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } - @Override public int getRuleIndex() { return RULE_unquotedLiteralExpression; } + @Override public int getRuleIndex() { return RULE_fieldQueryValue; } @Override public void enterRule(ParseTreeListener listener) { - if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterUnquotedLiteralExpression(this); + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterFieldQueryValue(this); } @Override public void exitRule(ParseTreeListener listener) { - if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitUnquotedLiteralExpression(this); + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitFieldQueryValue(this); } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor)visitor).visitUnquotedLiteralExpression(this); + if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor)visitor).visitFieldQueryValue(this); else return visitor.visitChildren(this); } } - public final UnquotedLiteralExpressionContext unquotedLiteralExpression() throws RecognitionException { - UnquotedLiteralExpressionContext _localctx = new UnquotedLiteralExpressionContext(_ctx, getState()); - enterRule(_localctx, 24, RULE_unquotedLiteralExpression); + public final FieldQueryValueContext fieldQueryValue() throws RecognitionException { + FieldQueryValueContext _localctx = new FieldQueryValueContext(_ctx, getState()); + enterRule(_localctx, 22, RULE_fieldQueryValue); + int _la; try { int _alt; - enterOuterAlt(_localctx, 1); - { - setState(99); + setState(123); _errHandler.sync(this); - _alt = 1; - do { - switch (_alt) { - case 1: + switch ( getInterpreter().adaptivePredict(_input,13,_ctx) ) { + case 1: + enterOuterAlt(_localctx, 1); + { + setState(106); + _errHandler.sync(this); + _la = _input.LA(1); + if (_la==AND || _la==OR) { { + setState(105); + _la = _input.LA(1); + if ( !(_la==AND || _la==OR) ) { + _errHandler.recoverInline(this); + } + else { + if ( _input.LA(1)==Token.EOF ) matchedEOF = true; + _errHandler.reportMatch(this); + consume(); + } + } + } + + setState(109); + _errHandler.sync(this); + _alt = 1; + do { + switch (_alt) { + case 1: + { + { + setState(108); + _la = _input.LA(1); + if ( !(_la==UNQUOTED_LITERAL || _la==WILDCARD) ) { + _errHandler.recoverInline(this); + } + else { + if ( _input.LA(1)==Token.EOF ) matchedEOF = true; + _errHandler.reportMatch(this); + consume(); + } + } + } + break; + default: + throw new NoViableAltException(this); + } + setState(111); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input,10,_ctx); + } while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ); + } + break; + case 2: + enterOuterAlt(_localctx, 2); + { + setState(114); + _errHandler.sync(this); + _alt = 1; + do { + switch (_alt) { + case 1: + { + { + setState(113); + _la = _input.LA(1); + if ( !(_la==UNQUOTED_LITERAL || _la==WILDCARD) ) { + _errHandler.recoverInline(this); + } + else { + if ( _input.LA(1)==Token.EOF ) matchedEOF = true; + _errHandler.reportMatch(this); + consume(); + } + } + } + break; + default: + throw new NoViableAltException(this); + } + setState(116); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input,11,_ctx); + } while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ); + setState(119); + _errHandler.sync(this); + switch ( getInterpreter().adaptivePredict(_input,12,_ctx) ) { + case 1: { - setState(98); - match(UNQUOTED_LITERAL); + setState(118); + _la = _input.LA(1); + if ( !(_la==AND || _la==OR) ) { + _errHandler.recoverInline(this); + } + else { + if ( _input.LA(1)==Token.EOF ) matchedEOF = true; + _errHandler.reportMatch(this); + consume(); } } break; - default: - throw new NoViableAltException(this); } - setState(101); - _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,9,_ctx); - } while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ); + } + break; + case 3: + enterOuterAlt(_localctx, 3); + { + setState(121); + _la = _input.LA(1); + if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 28L) != 0)) ) { + _errHandler.recoverInline(this); + } + else { + if ( _input.LA(1)==Token.EOF ) matchedEOF = true; + _errHandler.reportMatch(this); + consume(); + } + } + break; + case 4: + enterOuterAlt(_localctx, 4); + { + setState(122); + match(QUOTED_STRING); + } + break; } } catch (RecognitionException re) { @@ -1032,78 +1153,76 @@ public final UnquotedLiteralExpressionContext unquotedLiteralExpression() throws } @SuppressWarnings("CheckReturnValue") - public static class QuotedStringExpressionContext extends ParserRuleContext { - public TerminalNode QUOTED_STRING() { return getToken(KqlBaseParser.QUOTED_STRING, 0); } - public QuotedStringExpressionContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_quotedStringExpression; } - @Override - public void enterRule(ParseTreeListener listener) { - if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterQuotedStringExpression(this); - } - @Override - public void exitRule(ParseTreeListener listener) { - if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitQuotedStringExpression(this); - } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor)visitor).visitQuotedStringExpression(this); - else return visitor.visitChildren(this); - } - } - - public final QuotedStringExpressionContext quotedStringExpression() throws RecognitionException { - QuotedStringExpressionContext _localctx = new QuotedStringExpressionContext(_ctx, getState()); - enterRule(_localctx, 26, RULE_quotedStringExpression); - try { - enterOuterAlt(_localctx, 1); - { - setState(103); - match(QUOTED_STRING); - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); + public static class FieldNameContext extends ParserRuleContext { + public Token value; + public List UNQUOTED_LITERAL() { return getTokens(KqlBaseParser.UNQUOTED_LITERAL); } + public TerminalNode UNQUOTED_LITERAL(int i) { + return getToken(KqlBaseParser.UNQUOTED_LITERAL, i); } - return _localctx; - } - - @SuppressWarnings("CheckReturnValue") - public static class WildcardExpressionContext extends ParserRuleContext { + public TerminalNode QUOTED_STRING() { return getToken(KqlBaseParser.QUOTED_STRING, 0); } public TerminalNode WILDCARD() { return getToken(KqlBaseParser.WILDCARD, 0); } - public WildcardExpressionContext(ParserRuleContext parent, int invokingState) { + public FieldNameContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } - @Override public int getRuleIndex() { return RULE_wildcardExpression; } + @Override public int getRuleIndex() { return RULE_fieldName; } @Override public void enterRule(ParseTreeListener listener) { - if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterWildcardExpression(this); + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterFieldName(this); } @Override public void exitRule(ParseTreeListener listener) { - if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitWildcardExpression(this); + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitFieldName(this); } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor)visitor).visitWildcardExpression(this); + if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor)visitor).visitFieldName(this); else return visitor.visitChildren(this); } } - public final WildcardExpressionContext wildcardExpression() throws RecognitionException { - WildcardExpressionContext _localctx = new WildcardExpressionContext(_ctx, getState()); - enterRule(_localctx, 28, RULE_wildcardExpression); + public final FieldNameContext fieldName() throws RecognitionException { + FieldNameContext _localctx = new FieldNameContext(_ctx, getState()); + enterRule(_localctx, 24, RULE_fieldName); + int _la; try { - enterOuterAlt(_localctx, 1); - { - setState(105); - match(WILDCARD); + setState(132); + _errHandler.sync(this); + switch (_input.LA(1)) { + case UNQUOTED_LITERAL: + enterOuterAlt(_localctx, 1); + { + setState(126); + _errHandler.sync(this); + _la = _input.LA(1); + do { + { + { + setState(125); + ((FieldNameContext)_localctx).value = match(UNQUOTED_LITERAL); + } + } + setState(128); + _errHandler.sync(this); + _la = _input.LA(1); + } while ( _la==UNQUOTED_LITERAL ); + } + break; + case QUOTED_STRING: + enterOuterAlt(_localctx, 2); + { + setState(130); + ((FieldNameContext)_localctx).value = match(QUOTED_STRING); + } + break; + case WILDCARD: + enterOuterAlt(_localctx, 3); + { + setState(131); + ((FieldNameContext)_localctx).value = match(WILDCARD); + } + break; + default: + throw new NoViableAltException(this); } } catch (RecognitionException re) { @@ -1133,65 +1252,86 @@ private boolean query_sempred(QueryContext _localctx, int predIndex) { } public static final String _serializedATN = - "\u0004\u0001\rl\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ - "\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004\u0002"+ - "\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007\u0002"+ - "\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b\u0002"+ - "\f\u0007\f\u0002\r\u0007\r\u0002\u000e\u0007\u000e\u0001\u0000\u0003\u0000"+ - " \b\u0000\u0001\u0000\u0001\u0000\u0001\u0001\u0001\u0001\u0001\u0001"+ - "\u0001\u0001\u0003\u0001(\b\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ - "\u0005\u0001-\b\u0001\n\u0001\f\u00010\t\u0001\u0001\u0002\u0001\u0002"+ - "\u0001\u0002\u0003\u00025\b\u0002\u0001\u0003\u0001\u0003\u0003\u0003"+ - "9\b\u0003\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004"+ - "\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0006"+ - "\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0007\u0001\u0007\u0001\u0007"+ - "\u0003\u0007L\b\u0007\u0001\u0007\u0001\u0007\u0001\b\u0001\b\u0001\b"+ - "\u0003\bS\b\b\u0001\t\u0001\t\u0003\tW\b\t\u0001\n\u0001\n\u0001\n\u0001"+ - "\n\u0003\n]\b\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001"+ - "\f\u0004\fd\b\f\u000b\f\f\fe\u0001\r\u0001\r\u0001\u000e\u0001\u000e\u0001"+ - "\u000e\u0000\u0001\u0002\u000f\u0000\u0002\u0004\u0006\b\n\f\u000e\u0010"+ - "\u0012\u0014\u0016\u0018\u001a\u001c\u0000\u0001\u0001\u0000\u0002\u0003"+ - "j\u0000\u001f\u0001\u0000\u0000\u0000\u0002\'\u0001\u0000\u0000\u0000"+ - "\u00044\u0001\u0000\u0000\u0000\u00068\u0001\u0000\u0000\u0000\b:\u0001"+ - "\u0000\u0000\u0000\n@\u0001\u0000\u0000\u0000\fD\u0001\u0000\u0000\u0000"+ - "\u000eK\u0001\u0000\u0000\u0000\u0010R\u0001\u0000\u0000\u0000\u0012V"+ - "\u0001\u0000\u0000\u0000\u0014\\\u0001\u0000\u0000\u0000\u0016^\u0001"+ - "\u0000\u0000\u0000\u0018c\u0001\u0000\u0000\u0000\u001ag\u0001\u0000\u0000"+ - "\u0000\u001ci\u0001\u0000\u0000\u0000\u001e \u0003\u0002\u0001\u0000\u001f"+ - "\u001e\u0001\u0000\u0000\u0000\u001f \u0001\u0000\u0000\u0000 !\u0001"+ - "\u0000\u0000\u0000!\"\u0005\u0000\u0000\u0001\"\u0001\u0001\u0000\u0000"+ - "\u0000#$\u0006\u0001\uffff\uffff\u0000$%\u0005\u0004\u0000\u0000%(\u0003"+ - "\u0004\u0002\u0000&(\u0003\u0004\u0002\u0000\'#\u0001\u0000\u0000\u0000"+ - "\'&\u0001\u0000\u0000\u0000(.\u0001\u0000\u0000\u0000)*\n\u0003\u0000"+ - "\u0000*+\u0007\u0000\u0000\u0000+-\u0003\u0002\u0001\u0004,)\u0001\u0000"+ - "\u0000\u0000-0\u0001\u0000\u0000\u0000.,\u0001\u0000\u0000\u0000./\u0001"+ - "\u0000\u0000\u0000/\u0003\u0001\u0000\u0000\u00000.\u0001\u0000\u0000"+ - "\u000015\u0003\b\u0004\u000025\u0003\u0006\u0003\u000035\u0003\n\u0005"+ - "\u000041\u0001\u0000\u0000\u000042\u0001\u0000\u0000\u000043\u0001\u0000"+ - "\u0000\u00005\u0005\u0001\u0000\u0000\u000069\u0003\u000e\u0007\u0000"+ - "79\u0003\f\u0006\u000086\u0001\u0000\u0000\u000087\u0001\u0000\u0000\u0000"+ - "9\u0007\u0001\u0000\u0000\u0000:;\u0003\u0010\b\u0000;<\u0005\u0005\u0000"+ - "\u0000<=\u0005\t\u0000\u0000=>\u0003\u0002\u0001\u0000>?\u0005\n\u0000"+ - "\u0000?\t\u0001\u0000\u0000\u0000@A\u0005\u0007\u0000\u0000AB\u0003\u0002"+ - "\u0001\u0000BC\u0005\b\u0000\u0000C\u000b\u0001\u0000\u0000\u0000DE\u0003"+ - "\u0010\b\u0000EF\u0005\u0006\u0000\u0000FG\u0003\u0012\t\u0000G\r\u0001"+ - "\u0000\u0000\u0000HI\u0003\u0010\b\u0000IJ\u0005\u0005\u0000\u0000JL\u0001"+ - "\u0000\u0000\u0000KH\u0001\u0000\u0000\u0000KL\u0001\u0000\u0000\u0000"+ - "LM\u0001\u0000\u0000\u0000MN\u0003\u0014\n\u0000N\u000f\u0001\u0000\u0000"+ - "\u0000OS\u0003\u001c\u000e\u0000PS\u0003\u0018\f\u0000QS\u0003\u001a\r"+ - "\u0000RO\u0001\u0000\u0000\u0000RP\u0001\u0000\u0000\u0000RQ\u0001\u0000"+ - "\u0000\u0000S\u0011\u0001\u0000\u0000\u0000TW\u0003\u0018\f\u0000UW\u0003"+ - "\u001a\r\u0000VT\u0001\u0000\u0000\u0000VU\u0001\u0000\u0000\u0000W\u0013"+ - "\u0001\u0000\u0000\u0000X]\u0003\u001c\u000e\u0000Y]\u0003\u001a\r\u0000"+ - "Z]\u0003\u0018\f\u0000[]\u0003\u0016\u000b\u0000\\X\u0001\u0000\u0000"+ - "\u0000\\Y\u0001\u0000\u0000\u0000\\Z\u0001\u0000\u0000\u0000\\[\u0001"+ - "\u0000\u0000\u0000]\u0015\u0001\u0000\u0000\u0000^_\u0005\u0007\u0000"+ - "\u0000_`\u0003\u0018\f\u0000`a\u0005\b\u0000\u0000a\u0017\u0001\u0000"+ - "\u0000\u0000bd\u0005\u000b\u0000\u0000cb\u0001\u0000\u0000\u0000de\u0001"+ - "\u0000\u0000\u0000ec\u0001\u0000\u0000\u0000ef\u0001\u0000\u0000\u0000"+ - "f\u0019\u0001\u0000\u0000\u0000gh\u0005\f\u0000\u0000h\u001b\u0001\u0000"+ - "\u0000\u0000ij\u0005\r\u0000\u0000j\u001d\u0001\u0000\u0000\u0000\n\u001f"+ - "\'.48KRV\\e"; + "\u0004\u0001\u0010\u0087\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001"+ + "\u0002\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004"+ + "\u0002\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007"+ + "\u0002\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b"+ + "\u0002\f\u0007\f\u0001\u0000\u0003\u0000\u001c\b\u0000\u0001\u0000\u0001"+ + "\u0000\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0003\u0001$\b"+ + "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0005\u0001)\b\u0001\n\u0001"+ + "\f\u0001,\t\u0001\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001"+ + "\u0002\u0001\u0002\u0001\u0002\u0003\u00025\b\u0002\u0001\u0003\u0001"+ + "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0004\u0001"+ + "\u0004\u0003\u0004?\b\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001"+ + "\u0005\u0001\u0005\u0001\u0005\u0001\u0006\u0001\u0006\u0001\u0006\u0001"+ + "\u0006\u0001\u0007\u0004\u0007L\b\u0007\u000b\u0007\f\u0007M\u0001\u0007"+ + "\u0003\u0007Q\b\u0007\u0001\b\u0001\b\u0001\b\u0001\b\u0001\t\u0001\t"+ + "\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0003"+ + "\ta\b\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0003\nh\b\n\u0001\u000b"+ + "\u0003\u000bk\b\u000b\u0001\u000b\u0004\u000bn\b\u000b\u000b\u000b\f\u000b"+ + "o\u0001\u000b\u0004\u000bs\b\u000b\u000b\u000b\f\u000bt\u0001\u000b\u0003"+ + "\u000bx\b\u000b\u0001\u000b\u0001\u000b\u0003\u000b|\b\u000b\u0001\f\u0004"+ + "\f\u007f\b\f\u000b\f\f\f\u0080\u0001\f\u0001\f\u0003\f\u0085\b\f\u0001"+ + "\f\u0000\u0001\u0002\r\u0000\u0002\u0004\u0006\b\n\f\u000e\u0010\u0012"+ + "\u0014\u0016\u0018\u0000\u0004\u0001\u0000\u0002\u0003\u0001\u0000\u0006"+ + "\t\u0002\u0000\u000e\u000e\u0010\u0010\u0001\u0000\u0002\u0004\u0091\u0000"+ + "\u001b\u0001\u0000\u0000\u0000\u0002#\u0001\u0000\u0000\u0000\u00044\u0001"+ + "\u0000\u0000\u0000\u00066\u0001\u0000\u0000\u0000\b>\u0001\u0000\u0000"+ + "\u0000\nB\u0001\u0000\u0000\u0000\fF\u0001\u0000\u0000\u0000\u000eP\u0001"+ + "\u0000\u0000\u0000\u0010R\u0001\u0000\u0000\u0000\u0012`\u0001\u0000\u0000"+ + "\u0000\u0014g\u0001\u0000\u0000\u0000\u0016{\u0001\u0000\u0000\u0000\u0018"+ + "\u0084\u0001\u0000\u0000\u0000\u001a\u001c\u0003\u0002\u0001\u0000\u001b"+ + "\u001a\u0001\u0000\u0000\u0000\u001b\u001c\u0001\u0000\u0000\u0000\u001c"+ + "\u001d\u0001\u0000\u0000\u0000\u001d\u001e\u0005\u0000\u0000\u0001\u001e"+ + "\u0001\u0001\u0000\u0000\u0000\u001f \u0006\u0001\uffff\uffff\u0000 !"+ + "\u0005\u0004\u0000\u0000!$\u0003\u0004\u0002\u0000\"$\u0003\u0004\u0002"+ + "\u0000#\u001f\u0001\u0000\u0000\u0000#\"\u0001\u0000\u0000\u0000$*\u0001"+ + "\u0000\u0000\u0000%&\n\u0003\u0000\u0000&\'\u0007\u0000\u0000\u0000\'"+ + ")\u0003\u0002\u0001\u0003(%\u0001\u0000\u0000\u0000),\u0001\u0000\u0000"+ + "\u0000*(\u0001\u0000\u0000\u0000*+\u0001\u0000\u0000\u0000+\u0003\u0001"+ + "\u0000\u0000\u0000,*\u0001\u0000\u0000\u0000-5\u0003\u0006\u0003\u0000"+ + ".5\u0003\n\u0005\u0000/5\u0003\b\u0004\u000005\u0003\u0010\b\u000015\u0003"+ + "\f\u0006\u000025\u0003\u0012\t\u000035\u0003\u0014\n\u00004-\u0001\u0000"+ + "\u0000\u00004.\u0001\u0000\u0000\u00004/\u0001\u0000\u0000\u000040\u0001"+ + "\u0000\u0000\u000041\u0001\u0000\u0000\u000042\u0001\u0000\u0000\u0000"+ + "43\u0001\u0000\u0000\u00005\u0005\u0001\u0000\u0000\u000067\u0003\u0018"+ + "\f\u000078\u0005\u0005\u0000\u000089\u0005\f\u0000\u00009:\u0003\u0002"+ + "\u0001\u0000:;\u0005\r\u0000\u0000;\u0007\u0001\u0000\u0000\u0000<=\u0005"+ + "\u0010\u0000\u0000=?\u0005\u0005\u0000\u0000><\u0001\u0000\u0000\u0000"+ + ">?\u0001\u0000\u0000\u0000?@\u0001\u0000\u0000\u0000@A\u0005\u0010\u0000"+ + "\u0000A\t\u0001\u0000\u0000\u0000BC\u0005\n\u0000\u0000CD\u0003\u0002"+ + "\u0001\u0000DE\u0005\u000b\u0000\u0000E\u000b\u0001\u0000\u0000\u0000"+ + "FG\u0003\u0018\f\u0000GH\u0007\u0001\u0000\u0000HI\u0003\u000e\u0007\u0000"+ + "I\r\u0001\u0000\u0000\u0000JL\u0007\u0002\u0000\u0000KJ\u0001\u0000\u0000"+ + "\u0000LM\u0001\u0000\u0000\u0000MK\u0001\u0000\u0000\u0000MN\u0001\u0000"+ + "\u0000\u0000NQ\u0001\u0000\u0000\u0000OQ\u0005\u000f\u0000\u0000PK\u0001"+ + "\u0000\u0000\u0000PO\u0001\u0000\u0000\u0000Q\u000f\u0001\u0000\u0000"+ + "\u0000RS\u0003\u0018\f\u0000ST\u0005\u0005\u0000\u0000TU\u0005\u0010\u0000"+ + "\u0000U\u0011\u0001\u0000\u0000\u0000VW\u0003\u0018\f\u0000WX\u0005\u0005"+ + "\u0000\u0000XY\u0003\u0016\u000b\u0000Ya\u0001\u0000\u0000\u0000Z[\u0003"+ + "\u0018\f\u0000[\\\u0005\u0005\u0000\u0000\\]\u0005\n\u0000\u0000]^\u0003"+ + "\u0016\u000b\u0000^_\u0005\u000b\u0000\u0000_a\u0001\u0000\u0000\u0000"+ + "`V\u0001\u0000\u0000\u0000`Z\u0001\u0000\u0000\u0000a\u0013\u0001\u0000"+ + "\u0000\u0000bh\u0003\u0016\u000b\u0000cd\u0005\n\u0000\u0000de\u0003\u0016"+ + "\u000b\u0000ef\u0005\u000b\u0000\u0000fh\u0001\u0000\u0000\u0000gb\u0001"+ + "\u0000\u0000\u0000gc\u0001\u0000\u0000\u0000h\u0015\u0001\u0000\u0000"+ + "\u0000ik\u0007\u0000\u0000\u0000ji\u0001\u0000\u0000\u0000jk\u0001\u0000"+ + "\u0000\u0000km\u0001\u0000\u0000\u0000ln\u0007\u0002\u0000\u0000ml\u0001"+ + "\u0000\u0000\u0000no\u0001\u0000\u0000\u0000om\u0001\u0000\u0000\u0000"+ + "op\u0001\u0000\u0000\u0000p|\u0001\u0000\u0000\u0000qs\u0007\u0002\u0000"+ + "\u0000rq\u0001\u0000\u0000\u0000st\u0001\u0000\u0000\u0000tr\u0001\u0000"+ + "\u0000\u0000tu\u0001\u0000\u0000\u0000uw\u0001\u0000\u0000\u0000vx\u0007"+ + "\u0000\u0000\u0000wv\u0001\u0000\u0000\u0000wx\u0001\u0000\u0000\u0000"+ + "x|\u0001\u0000\u0000\u0000y|\u0007\u0003\u0000\u0000z|\u0005\u000f\u0000"+ + "\u0000{j\u0001\u0000\u0000\u0000{r\u0001\u0000\u0000\u0000{y\u0001\u0000"+ + "\u0000\u0000{z\u0001\u0000\u0000\u0000|\u0017\u0001\u0000\u0000\u0000"+ + "}\u007f\u0005\u000e\u0000\u0000~}\u0001\u0000\u0000\u0000\u007f\u0080"+ + "\u0001\u0000\u0000\u0000\u0080~\u0001\u0000\u0000\u0000\u0080\u0081\u0001"+ + "\u0000\u0000\u0000\u0081\u0085\u0001\u0000\u0000\u0000\u0082\u0085\u0005"+ + "\u000f\u0000\u0000\u0083\u0085\u0005\u0010\u0000\u0000\u0084~\u0001\u0000"+ + "\u0000\u0000\u0084\u0082\u0001\u0000\u0000\u0000\u0084\u0083\u0001\u0000"+ + "\u0000\u0000\u0085\u0019\u0001\u0000\u0000\u0000\u0010\u001b#*4>MP`gj"+ + "otw{\u0080\u0084"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseVisitor.java b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseVisitor.java index 55fa21f0e899d..67253e4364190 100644 --- a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseVisitor.java +++ b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseVisitor.java @@ -51,12 +51,6 @@ interface KqlBaseVisitor extends ParseTreeVisitor { * @return the visitor result */ T visitSimpleQuery(KqlBaseParser.SimpleQueryContext ctx); - /** - * Visit a parse tree produced by {@link KqlBaseParser#expression}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitExpression(KqlBaseParser.ExpressionContext ctx); /** * Visit a parse tree produced by {@link KqlBaseParser#nestedQuery}. * @param ctx the parse tree @@ -64,29 +58,23 @@ interface KqlBaseVisitor extends ParseTreeVisitor { */ T visitNestedQuery(KqlBaseParser.NestedQueryContext ctx); /** - * Visit a parse tree produced by {@link KqlBaseParser#parenthesizedQuery}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitParenthesizedQuery(KqlBaseParser.ParenthesizedQueryContext ctx); - /** - * Visit a parse tree produced by {@link KqlBaseParser#fieldRangeQuery}. + * Visit a parse tree produced by {@link KqlBaseParser#matchAllQuery}. * @param ctx the parse tree * @return the visitor result */ - T visitFieldRangeQuery(KqlBaseParser.FieldRangeQueryContext ctx); + T visitMatchAllQuery(KqlBaseParser.MatchAllQueryContext ctx); /** - * Visit a parse tree produced by {@link KqlBaseParser#fieldTermQuery}. + * Visit a parse tree produced by {@link KqlBaseParser#parenthesizedQuery}. * @param ctx the parse tree * @return the visitor result */ - T visitFieldTermQuery(KqlBaseParser.FieldTermQueryContext ctx); + T visitParenthesizedQuery(KqlBaseParser.ParenthesizedQueryContext ctx); /** - * Visit a parse tree produced by {@link KqlBaseParser#fieldName}. + * Visit a parse tree produced by {@link KqlBaseParser#rangeQuery}. * @param ctx the parse tree * @return the visitor result */ - T visitFieldName(KqlBaseParser.FieldNameContext ctx); + T visitRangeQuery(KqlBaseParser.RangeQueryContext ctx); /** * Visit a parse tree produced by {@link KqlBaseParser#rangeQueryValue}. * @param ctx the parse tree @@ -94,33 +82,33 @@ interface KqlBaseVisitor extends ParseTreeVisitor { */ T visitRangeQueryValue(KqlBaseParser.RangeQueryValueContext ctx); /** - * Visit a parse tree produced by {@link KqlBaseParser#termQueryValue}. + * Visit a parse tree produced by {@link KqlBaseParser#existsQuery}. * @param ctx the parse tree * @return the visitor result */ - T visitTermQueryValue(KqlBaseParser.TermQueryValueContext ctx); + T visitExistsQuery(KqlBaseParser.ExistsQueryContext ctx); /** - * Visit a parse tree produced by {@link KqlBaseParser#groupingTermExpression}. + * Visit a parse tree produced by {@link KqlBaseParser#fieldQuery}. * @param ctx the parse tree * @return the visitor result */ - T visitGroupingTermExpression(KqlBaseParser.GroupingTermExpressionContext ctx); + T visitFieldQuery(KqlBaseParser.FieldQueryContext ctx); /** - * Visit a parse tree produced by {@link KqlBaseParser#unquotedLiteralExpression}. + * Visit a parse tree produced by {@link KqlBaseParser#fieldLessQuery}. * @param ctx the parse tree * @return the visitor result */ - T visitUnquotedLiteralExpression(KqlBaseParser.UnquotedLiteralExpressionContext ctx); + T visitFieldLessQuery(KqlBaseParser.FieldLessQueryContext ctx); /** - * Visit a parse tree produced by {@link KqlBaseParser#quotedStringExpression}. + * Visit a parse tree produced by {@link KqlBaseParser#fieldQueryValue}. * @param ctx the parse tree * @return the visitor result */ - T visitQuotedStringExpression(KqlBaseParser.QuotedStringExpressionContext ctx); + T visitFieldQueryValue(KqlBaseParser.FieldQueryValueContext ctx); /** - * Visit a parse tree produced by {@link KqlBaseParser#wildcardExpression}. + * Visit a parse tree produced by {@link KqlBaseParser#fieldName}. * @param ctx the parse tree * @return the visitor result */ - T visitWildcardExpression(KqlBaseParser.WildcardExpressionContext ctx); + T visitFieldName(KqlBaseParser.FieldNameContext ctx); } diff --git a/x-pack/plugin/kql/src/test/resources/supported-queries b/x-pack/plugin/kql/src/test/resources/supported-queries index d750f16149112..d9378cf9041c2 100644 --- a/x-pack/plugin/kql/src/test/resources/supported-queries +++ b/x-pack/plugin/kql/src/test/resources/supported-queries @@ -68,6 +68,15 @@ foo_field:foo AND (foo_field:foo bar OR foo bar) foo_field:foo AND (foo_field:foo bar OR foo bar) foo_field:foo OR (foo_field:foo bar OR foo bar) +foo:AND +foo:OR +foo:NOT +foo AND +foo OR +AND foo +OR foo +NOT + // Nested queries nested_field: { NOT foo } nested_field: { NOT foo bar } diff --git a/x-pack/plugin/kql/src/test/resources/unsupported-queries b/x-pack/plugin/kql/src/test/resources/unsupported-queries index 545b03576b331..97a26f16db141 100644 --- a/x-pack/plugin/kql/src/test/resources/unsupported-queries +++ b/x-pack/plugin/kql/src/test/resources/unsupported-queries @@ -16,14 +16,6 @@ NOT (foo_field:foo AND) foo_field:foo bar foo_field: "foo bar foo_field: foo bar" - -// Invalid boolean queries -foo AND -AND foo -foo OR -OR foo -NOT foo: - // Can't nest grouping terms parentheses foo_field:(foo (bar)) From 999274c003cc165c2634cbf94b9bad354239e22d Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Fri, 25 Oct 2024 12:43:28 +0200 Subject: [PATCH 415/449] Cleanup HotThreadsIT (example of test cleanup) (#115601) Just a quick example of how to save quite a few lines of code and make a test easier to reason about. --- .../action/admin/HotThreadsIT.java | 54 ++++++------------- 1 file changed, 15 insertions(+), 39 deletions(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/HotThreadsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/HotThreadsIT.java index 20c10c3d8c1f9..8c80cee58f46c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/HotThreadsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/HotThreadsIT.java @@ -10,7 +10,7 @@ import org.apache.logging.log4j.Level; import org.apache.lucene.util.Constants; -import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.admin.cluster.node.hotthreads.NodeHotThreads; import org.elasticsearch.action.admin.cluster.node.hotthreads.NodesHotThreadsRequest; import org.elasticsearch.action.admin.cluster.node.hotthreads.NodesHotThreadsResponse; @@ -26,15 +26,14 @@ import org.hamcrest.Matcher; import java.util.Map; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.ExecutionException; import static org.elasticsearch.index.query.QueryBuilders.boolQuery; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.containsString; @@ -44,11 +43,10 @@ public class HotThreadsIT extends ESIntegTestCase { - public void testHotThreadsDontFail() throws InterruptedException { + public void testHotThreadsDontFail() throws InterruptedException, ExecutionException { // This test just checks if nothing crashes or gets stuck etc. createIndex("test"); final int iters = scaledRandomIntBetween(2, 20); - final AtomicBoolean hasErrors = new AtomicBoolean(false); for (int i = 0; i < iters; i++) { final NodesHotThreadsRequest request = new NodesHotThreadsRequest( Strings.EMPTY_ARRAY, @@ -67,36 +65,7 @@ public void testHotThreadsDontFail() throws InterruptedException { randomBoolean() ) ); - final CountDownLatch latch = new CountDownLatch(1); - client().execute(TransportNodesHotThreadsAction.TYPE, request, new ActionListener<>() { - @Override - public void onResponse(NodesHotThreadsResponse nodeHotThreads) { - boolean success = false; - try { - assertThat(nodeHotThreads, notNullValue()); - Map nodesMap = nodeHotThreads.getNodesMap(); - assertThat(nodeHotThreads.failures(), empty()); - assertThat(nodesMap.size(), equalTo(cluster().size())); - for (NodeHotThreads ht : nodeHotThreads.getNodes()) { - assertNotNull(ht.getHotThreads()); - } - success = true; - } finally { - if (success == false) { - hasErrors.set(true); - } - latch.countDown(); - } - } - - @Override - public void onFailure(Exception e) { - logger.error("FAILED", e); - hasErrors.set(true); - latch.countDown(); - fail(); - } - }); + final ActionFuture hotThreadsFuture = client().execute(TransportNodesHotThreadsAction.TYPE, request); indexRandom( true, @@ -105,7 +74,7 @@ public void onFailure(Exception e) { prepareIndex("test").setId("3").setSource("field1", "value3") ); ensureSearchable(); - while (latch.getCount() > 0) { + while (hotThreadsFuture.isDone() == false) { assertHitCount( prepareSearch().setQuery(matchAllQuery()) .setPostFilter( @@ -115,8 +84,15 @@ public void onFailure(Exception e) { 3L ); } - safeAwait(latch); - assertThat(hasErrors.get(), is(false)); + assertResponse(hotThreadsFuture, nodeHotThreads -> { + assertThat(nodeHotThreads, notNullValue()); + Map nodesMap = nodeHotThreads.getNodesMap(); + assertThat(nodeHotThreads.failures(), empty()); + assertThat(nodesMap.size(), equalTo(cluster().size())); + for (NodeHotThreads ht : nodeHotThreads.getNodes()) { + assertNotNull(ht.getHotThreads()); + } + }); } } From e3523c159106255a96b8c00339f6c565b69c266a Mon Sep 17 00:00:00 2001 From: Liam Thompson <32779855+leemthompo@users.noreply.github.com> Date: Fri, 25 Oct 2024 13:01:41 +0200 Subject: [PATCH 416/449] [DOCS] Fix link syntax in connectors-API-tutorial.asciidoc (#115635) --- docs/reference/connector/docs/connectors-API-tutorial.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/connector/docs/connectors-API-tutorial.asciidoc b/docs/reference/connector/docs/connectors-API-tutorial.asciidoc index 5275f82de1b1f..4118c564e4759 100644 --- a/docs/reference/connector/docs/connectors-API-tutorial.asciidoc +++ b/docs/reference/connector/docs/connectors-API-tutorial.asciidoc @@ -367,7 +367,7 @@ Refer to the individual connectors-references,connector references for these con ==== We're using a self-managed connector in this tutorial. To use these APIs with an Elastic managed connector, there's some extra setup for API keys. -Refer to native-connectors-manage-API-keys for details. +Refer to <> for details. ==== We're now ready to sync our PostgreSQL data to {es}. From 6e0bdbec0ade4af2b5d130aee6bf9e76a64f0e19 Mon Sep 17 00:00:00 2001 From: Craig Taverner Date: Fri, 25 Oct 2024 13:38:35 +0200 Subject: [PATCH 417/449] Fixed flaky test after PR that disallows functions to return TEXT (#115633) * Fixed flaky test after PR that disallows functions to return TEXT * Also ignore TEXT/KEYWORD combinations because they are now valid * Unmute the test --- muted-tests.yml | 3 --- .../elasticsearch/xpack/esql/analysis/AnalyzerTests.java | 8 +++++++- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 4869b669f6220..5c94c0aff60b6 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -282,9 +282,6 @@ tests: - class: org.elasticsearch.oldrepos.OldRepositoryAccessIT method: testOldRepoAccess issue: https://github.com/elastic/elasticsearch/issues/115631 -- class: org.elasticsearch.xpack.esql.analysis.AnalyzerTests - method: testMvAppendValidation - issue: https://github.com/elastic/elasticsearch/issues/115636 # Examples: # diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java index c18f55a651408..b86935dcd03da 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java @@ -56,6 +56,7 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.function.Function; import java.util.function.Supplier; import java.util.stream.Collectors; import java.util.stream.IntStream; @@ -1879,6 +1880,11 @@ public void testMvAppendValidation() { Supplier supplier = () -> randomInt(fields.length - 1); int first = supplier.get(); int second = randomValueOtherThan(first, supplier); + Function noText = (type) -> type.equals("text") ? "keyword" : type; + assumeTrue( + "Ignore tests with TEXT and KEYWORD combinations because they are now valid", + noText.apply(fields[first][0]).equals(noText.apply(fields[second][0])) == false + ); String signature = "mv_append(" + fields[first][0] + ", " + fields[second][0] + ")"; verifyUnsupported( @@ -1886,7 +1892,7 @@ public void testMvAppendValidation() { "second argument of [" + signature + "] must be [" - + fields[first][1] + + noText.apply(fields[first][1]) + "], found value [" + fields[second][0] + "] type [" From aabbc840a56e70b1d6b0f1221e3aaf9c4a54c08e Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Fri, 25 Oct 2024 23:22:55 +1100 Subject: [PATCH 418/449] Mute org.elasticsearch.xpack.search.CrossClusterAsyncSearchIT testCCSClusterDetailsWhereAllShardsSkippedInCanMatch #115652 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 5c94c0aff60b6..f10c214be26bf 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -282,6 +282,9 @@ tests: - class: org.elasticsearch.oldrepos.OldRepositoryAccessIT method: testOldRepoAccess issue: https://github.com/elastic/elasticsearch/issues/115631 +- class: org.elasticsearch.xpack.search.CrossClusterAsyncSearchIT + method: testCCSClusterDetailsWhereAllShardsSkippedInCanMatch + issue: https://github.com/elastic/elasticsearch/issues/115652 # Examples: # From 9adbebb123c875765624ca1fe85f4aeb118287fb Mon Sep 17 00:00:00 2001 From: Pat Whelan Date: Fri, 25 Oct 2024 09:16:54 -0400 Subject: [PATCH 419/449] [ML] Fix streaming IT (#115543) Fix #113430 --- muted-tests.yml | 3 --- .../org/elasticsearch/xpack/inference/InferenceCrudIT.java | 3 +-- 2 files changed, 1 insertion(+), 5 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index f10c214be26bf..20879ed327781 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -164,9 +164,6 @@ tests: - class: org.elasticsearch.xpack.esql.ccq.MultiClusterSpecIT method: test {categorize.Categorize} issue: https://github.com/elastic/elasticsearch/issues/113428 -- class: org.elasticsearch.xpack.inference.InferenceCrudIT - method: testSupportedStream - issue: https://github.com/elastic/elasticsearch/issues/113430 - class: org.elasticsearch.integration.KibanaUserRoleIntegTests method: testFieldMappings issue: https://github.com/elastic/elasticsearch/issues/113592 diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java index 37de2caadb475..53c82219e2f12 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java @@ -307,8 +307,7 @@ public void testSupportedStream() throws Exception { assertEquals(modelId, singleModel.get("inference_id")); assertEquals(TaskType.COMPLETION.toString(), singleModel.get("task_type")); - var input = IntStream.range(1, randomInt(10)).mapToObj(i -> randomAlphaOfLength(10)).toList(); - + var input = IntStream.range(1, 2 + randomInt(8)).mapToObj(i -> randomAlphaOfLength(10)).toList(); try { var events = streamInferOnMockService(modelId, TaskType.COMPLETION, input); From 6cec96cc1e208e50518949edf3b1840ddd012dd1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Fri, 25 Oct 2024 15:44:59 +0200 Subject: [PATCH 420/449] Fix TimeSeriesRateAggregatorTests file leak (#115278) With Lucene 10, IndexWriter requires a parent document field in order to use index sorting with document blocks. This lead to different IAE and file leaks in this test which are fixed by adapting the corresponding location in the test setup. --- .../search/aggregations/AggregatorTestCase.java | 2 ++ .../rate/TimeSeriesRateAggregatorTests.java | 15 ++++++++------- 2 files changed, 10 insertions(+), 7 deletions(-) diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java index 5f64d123c1bed..d6709b00b4dbb 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java @@ -81,6 +81,7 @@ import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.cache.query.DisabledQueryCache; import org.elasticsearch.index.cache.query.TrivialQueryCachingPolicy; +import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.fielddata.FieldDataContext; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldDataCache; @@ -749,6 +750,7 @@ protected void tes new SortField(TimeSeriesIdFieldMapper.NAME, SortField.Type.STRING, false), new SortedNumericSortField(DataStreamTimestampFieldMapper.DEFAULT_PATH, SortField.Type.LONG, true) ); + config.setParentField(Engine.ROOT_DOC_FIELD_NAME); config.setIndexSort(sort); } RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory, config); diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/TimeSeriesRateAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/TimeSeriesRateAggregatorTests.java index 753ce8283afca..3c7a18de536bc 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/TimeSeriesRateAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/TimeSeriesRateAggregatorTests.java @@ -42,6 +42,7 @@ import static org.hamcrest.Matchers.closeTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.startsWith; public class TimeSeriesRateAggregatorTests extends AggregatorTestCase { @@ -155,14 +156,14 @@ public void testNestedWithinAutoDateHistogram() throws IOException { AggTestConfig aggTestConfig = new AggTestConfig(tsBuilder, timeStampField(), counterField("counter_field")) .withSplitLeavesIntoSeperateAggregators(false); - expectThrows(IllegalArgumentException.class, () -> testCase(iw -> { - for (Document document : docs(2000, "1", 15, 37, 60, /*reset*/ 14)) { - iw.addDocument(document); - } - for (Document document : docs(2000, "2", 74, 150, /*reset*/ 50, 90, /*reset*/ 40)) { - iw.addDocument(document); - } + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> testCase(iw -> { + iw.addDocuments(docs(2000, "1", 15, 37, 60, /*reset*/ 14)); + iw.addDocuments(docs(2000, "2", 74, 150, /*reset*/ 50, 90, /*reset*/ 40)); }, verifier, aggTestConfig)); + assertThat( + e.getMessage(), + startsWith("Wrapping a time-series rate aggregation within a DeferableBucketAggregator is not supported.") + ); } private List docs(long startTimestamp, String dim, long... values) throws IOException { From d4ac705d57ff19685703738883c595392547e399 Mon Sep 17 00:00:00 2001 From: John Wagster Date: Fri, 25 Oct 2024 09:26:51 -0500 Subject: [PATCH 421/449] [CI] MixedClusterClientYamlTestSuiteIT test {p0=range/20_synthetic_source/Date range} failing - Removed Old `Date range` test because it's not longer validating useful code (#114057) unmuting test and removing bwc test to get mixedClusterTest working --- muted-tests.yml | 3 - .../test/range/20_synthetic_source.yml | 134 ------------------ 2 files changed, 137 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 20879ed327781..70f29016d8475 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -191,9 +191,6 @@ tests: - class: org.elasticsearch.threadpool.SimpleThreadPoolIT method: testThreadPoolMetrics issue: https://github.com/elastic/elasticsearch/issues/108320 -- class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT - method: test {p0=range/20_synthetic_source/Date range} - issue: https://github.com/elastic/elasticsearch/issues/113874 - class: org.elasticsearch.kibana.KibanaThreadPoolIT method: testBlockedThreadPoolsRejectUserRequests issue: https://github.com/elastic/elasticsearch/issues/113939 diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/range/20_synthetic_source.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/range/20_synthetic_source.yml index cc92b52e0887a..de20f82f8ba2f 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/range/20_synthetic_source.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/range/20_synthetic_source.yml @@ -525,140 +525,6 @@ setup: _source: ip_range: { "gte": "2001:db8::", "lte": null } ---- -"Date range": - - skip: - cluster_features: ["mapper.range.date_range_indexing_fix"] - reason: "tests prior to rounding fixes in 8.16.0 that caused non-intuitive indexing and query because ranges were assumed to always index with 0's as the default such as when time is missing 00:00:00.000 time was assumed but for lte indexing and query missing time should be 23:59:59.999 as per docs here: https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-range-query.html" - - - do: - index: - index: synthetic_source_test - id: "1" - body: { "date_range" : { "gte": "2017-09-01", "lte": "2017-09-05" } } - - - do: - index: - index: synthetic_source_test - id: "2" - body: { "date_range" : { "gt": "2017-09-01", "lte": "2017-09-03" } } - - - do: - index: - index: synthetic_source_test - id: "3" - body: { "date_range" : [ { "gte": "2017-09-04", "lt": "2017-09-05" } ] } - - - do: - index: - index: synthetic_source_test - id: "4" - body: { "date_range" : [ { "gt": "2017-09-04", "lt": "2017-09-08" }, { "gt": "2017-09-04", "lt": "2017-09-07" } ] } - - - do: - index: - index: synthetic_source_test - id: "5" - body: { "date_range" : { "gte": 1504224000000, "lte": 1504569600000 } } - - - do: - index: - index: synthetic_source_test - id: "6" - body: { "date_range" : { "gte": "2017-09-01T10:20:30.123Z", "lte": "2017-09-05T03:04:05.789Z" } } - - - do: - index: - index: synthetic_source_test - id: "7" - body: { "date_range" : null } - - - do: - index: - index: synthetic_source_test - id: "8" - body: { "date_range": { "gte": null, "lte": "2017-09-05" } } - - - do: - index: - index: synthetic_source_test - id: "9" - body: { "date_range": { "gte": "2017-09-05" } } - - - do: - indices.refresh: {} - - - do: - get: - index: synthetic_source_test - id: "1" - - match: - _source: - date_range: { "gte": "2017-09-01T00:00:00.000Z", "lte": "2017-09-05T00:00:00.000Z" } - - - do: - get: - index: synthetic_source_test - id: "2" - - match: - _source: - date_range: { "gte": "2017-09-01T00:00:00.001Z", "lte": "2017-09-03T00:00:00.000Z" } - - - do: - get: - index: synthetic_source_test - id: "3" - - match: - _source: - date_range: { "gte": "2017-09-04T00:00:00.000Z", "lte": "2017-09-04T23:59:59.999Z" } - - - do: - get: - index: synthetic_source_test - id: "4" - - match: - _source: - date_range: [ { "gte": "2017-09-04T00:00:00.001Z", "lte": "2017-09-06T23:59:59.999Z" }, { "gte": "2017-09-04T00:00:00.001Z", "lte": "2017-09-07T23:59:59.999Z" } ] - - - do: - get: - index: synthetic_source_test - id: "5" - - match: - _source: - date_range: { "gte": "2017-09-01T00:00:00.000Z", "lte": "2017-09-05T00:00:00.000Z" } - - - do: - get: - index: synthetic_source_test - id: "6" - - match: - _source: - date_range: { "gte": "2017-09-01T10:20:30.123Z", "lte": "2017-09-05T03:04:05.789Z" } - - - do: - get: - index: synthetic_source_test - id: "7" - - match: - _source: {} - - - do: - get: - index: synthetic_source_test - id: "8" - - match: - _source: - date_range: { "gte": null, "lte": "2017-09-05T00:00:00.000Z" } - - - do: - get: - index: synthetic_source_test - id: "9" - - match: - _source: - date_range: { "gte": "2017-09-05T00:00:00.000Z", "lte": null } - --- "Date range Rounding Fixes": - requires: From 0b94bb8a75076213f7ac7029d21a57ca2ebf93a1 Mon Sep 17 00:00:00 2001 From: Craig Taverner Date: Fri, 25 Oct 2024 16:51:02 +0200 Subject: [PATCH 422/449] Slightly more generous assertions for Cartesian tests (#115658) --- .../elasticsearch/lucene/spatial/CentroidCalculatorTests.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/test/java/org/elasticsearch/lucene/spatial/CentroidCalculatorTests.java b/server/src/test/java/org/elasticsearch/lucene/spatial/CentroidCalculatorTests.java index 8216d092bd683..caf4494986f6d 100644 --- a/server/src/test/java/org/elasticsearch/lucene/spatial/CentroidCalculatorTests.java +++ b/server/src/test/java/org/elasticsearch/lucene/spatial/CentroidCalculatorTests.java @@ -428,7 +428,7 @@ private Matcher matchDouble(double value) { // Most data (notably geo data) has values within bounds, and an absolute delta makes more sense. double delta = (value > 1e28 || value < -1e28) ? Math.abs(value / 1e6) : (value > 1e20 || value < -1e20) ? Math.abs(value / 1e10) - : (value > 1e9 || value < -1e9) ? Math.abs(value / 1e15) + : (value > 1e8 || value < -1e8) ? Math.abs(value / 1e15) : DELTA; return closeTo(value, delta); } From bb4a444edef301f263210209636ee46fb9d32d80 Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Fri, 25 Oct 2024 17:17:30 +0200 Subject: [PATCH 423/449] [Gradle] Fix packaging tests after removing cloud docker image (#115654) --- .../InternalDistributionDownloadPlugin.java | 3 --- ...kerCloudElasticsearchDistributionType.java | 27 ------------------- ...nternalElasticsearchDistributionTypes.java | 2 -- .../internal/test/DistroTestPlugin.java | 2 -- 4 files changed, 34 deletions(-) delete mode 100644 build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/distribution/DockerCloudElasticsearchDistributionType.java diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionDownloadPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionDownloadPlugin.java index 19309fe2da8a3..0bf4bcb33c23b 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionDownloadPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionDownloadPlugin.java @@ -172,9 +172,6 @@ private static String distributionProjectName(ElasticsearchDistribution distribu if (distribution.getType() == InternalElasticsearchDistributionTypes.DOCKER_IRONBANK) { return projectName + "ironbank-docker" + archString + "-export"; } - if (distribution.getType() == InternalElasticsearchDistributionTypes.DOCKER_CLOUD) { - return projectName + "cloud-docker" + archString + "-export"; - } if (distribution.getType() == InternalElasticsearchDistributionTypes.DOCKER_CLOUD_ESS) { return projectName + "cloud-ess-docker" + archString + "-export"; } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/distribution/DockerCloudElasticsearchDistributionType.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/distribution/DockerCloudElasticsearchDistributionType.java deleted file mode 100644 index eb522dbcad5e2..0000000000000 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/distribution/DockerCloudElasticsearchDistributionType.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.gradle.internal.distribution; - -import org.elasticsearch.gradle.ElasticsearchDistributionType; - -public class DockerCloudElasticsearchDistributionType implements ElasticsearchDistributionType { - - DockerCloudElasticsearchDistributionType() {} - - @Override - public String getName() { - return "dockerCloud"; - } - - @Override - public boolean isDocker() { - return true; - } -} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/distribution/InternalElasticsearchDistributionTypes.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/distribution/InternalElasticsearchDistributionTypes.java index ba0e76b3f5b99..8f0951da86b88 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/distribution/InternalElasticsearchDistributionTypes.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/distribution/InternalElasticsearchDistributionTypes.java @@ -19,7 +19,6 @@ public class InternalElasticsearchDistributionTypes { public static ElasticsearchDistributionType DOCKER = new DockerElasticsearchDistributionType(); public static ElasticsearchDistributionType DOCKER_UBI = new DockerUbiElasticsearchDistributionType(); public static ElasticsearchDistributionType DOCKER_IRONBANK = new DockerIronBankElasticsearchDistributionType(); - public static ElasticsearchDistributionType DOCKER_CLOUD = new DockerCloudElasticsearchDistributionType(); public static ElasticsearchDistributionType DOCKER_CLOUD_ESS = new DockerCloudEssElasticsearchDistributionType(); public static ElasticsearchDistributionType DOCKER_WOLFI = new DockerWolfiElasticsearchDistributionType(); @@ -29,7 +28,6 @@ public class InternalElasticsearchDistributionTypes { DOCKER, DOCKER_UBI, DOCKER_IRONBANK, - DOCKER_CLOUD, DOCKER_CLOUD_ESS, DOCKER_WOLFI ); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/DistroTestPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/DistroTestPlugin.java index 77ab9557eac33..8e7884888b63b 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/DistroTestPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/DistroTestPlugin.java @@ -49,7 +49,6 @@ import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.ALL_INTERNAL; import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.DEB; import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.DOCKER; -import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.DOCKER_CLOUD; import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.DOCKER_CLOUD_ESS; import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.DOCKER_IRONBANK; import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.DOCKER_UBI; @@ -149,7 +148,6 @@ private static Map> lifecycleTask lifecyleTasks.put(DOCKER, project.getTasks().register(taskPrefix + ".docker")); lifecyleTasks.put(DOCKER_UBI, project.getTasks().register(taskPrefix + ".docker-ubi")); lifecyleTasks.put(DOCKER_IRONBANK, project.getTasks().register(taskPrefix + ".docker-ironbank")); - lifecyleTasks.put(DOCKER_CLOUD, project.getTasks().register(taskPrefix + ".docker-cloud")); lifecyleTasks.put(DOCKER_CLOUD_ESS, project.getTasks().register(taskPrefix + ".docker-cloud-ess")); lifecyleTasks.put(DOCKER_WOLFI, project.getTasks().register(taskPrefix + ".docker-wolfi")); lifecyleTasks.put(ARCHIVE, project.getTasks().register(taskPrefix + ".archives")); From e82e6af50517e35400d5438a3932a5b6b478b8d5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20Zolt=C3=A1n=20Szab=C3=B3?= Date: Fri, 25 Oct 2024 17:35:48 +0200 Subject: [PATCH 424/449] [DOCS] Documents configurable chunking (#115300) Co-authored-by: David Kyle --- .../inference/inference-apis.asciidoc | 62 ++++++++++++++++++- .../inference/inference-shared.asciidoc | 34 +++++++++- .../service-alibabacloud-ai-search.asciidoc | 21 ++++++- .../inference/service-amazon-bedrock.asciidoc | 20 ++++++ .../inference/service-anthropic.asciidoc | 20 ++++++ .../service-azure-ai-studio.asciidoc | 20 ++++++ .../inference/service-azure-openai.asciidoc | 20 ++++++ .../inference/service-cohere.asciidoc | 20 ++++++ .../inference/service-elasticsearch.asciidoc | 20 ++++++ .../inference/service-elser.asciidoc | 20 ++++++ .../service-google-ai-studio.asciidoc | 20 ++++++ .../service-google-vertex-ai.asciidoc | 20 ++++++ .../inference/service-hugging-face.asciidoc | 20 ++++++ .../inference/service-mistral.asciidoc | 20 ++++++ .../inference/service-openai.asciidoc | 20 ++++++ 15 files changed, 354 insertions(+), 3 deletions(-) diff --git a/docs/reference/inference/inference-apis.asciidoc b/docs/reference/inference/inference-apis.asciidoc index 1206cb02ba89a..38afc7c416f18 100644 --- a/docs/reference/inference/inference-apis.asciidoc +++ b/docs/reference/inference/inference-apis.asciidoc @@ -35,7 +35,6 @@ Elastic –, then create an {infer} endpoint by the <>. Now use <> to perform <> on your data. - [discrete] [[default-enpoints]] === Default {infer} endpoints @@ -53,6 +52,67 @@ For these models, the minimum number of allocations is `0`. If there is no {infer} activity that uses the endpoint, the number of allocations will scale down to `0` automatically after 15 minutes. +[discrete] +[[infer-chunking-config]] +=== Configuring chunking + +{infer-cap} endpoints have a limit on the amount of text they can process at once, determined by the model's input capacity. +Chunking is the process of splitting the input text into pieces that remain within these limits. +It occurs when ingesting documents into <>. +Chunking also helps produce sections that are digestible for humans. +Returning a long document in search results is less useful than providing the most relevant chunk of text. + +Each chunk will include the text subpassage and the corresponding embedding generated from it. + +By default, documents are split into sentences and grouped in sections up to 250 words with 1 sentence overlap so that each chunk shares a sentence with the previous chunk. +Overlapping ensures continuity and prevents vital contextual information in the input text from being lost by a hard break. + +{es} uses the https://unicode-org.github.io/icu-docs/[ICU4J] library to detect word and sentence boundaries for chunking. +https://unicode-org.github.io/icu/userguide/boundaryanalysis/#word-boundary[Word boundaries] are identified by following a series of rules, not just the presence of a whitespace character. +For written languages that do use whitespace such as Chinese or Japanese dictionary lookups are used to detect word boundaries. + + +[discrete] +==== Chunking strategies + +Two strategies are available for chunking: `sentence` and `word`. + +The `sentence` strategy splits the input text at sentence boundaries. +Each chunk contains one or more complete sentences ensuring that the integrity of sentence-level context is preserved, except when a sentence causes a chunk to exceed a word count of `max_chunk_size`, in which case it will be split across chunks. +The `sentence_overlap` option defines the number of sentences from the previous chunk to include in the current chunk which is either `0` or `1`. + +The `word` strategy splits the input text on individual words up to the `max_chunk_size` limit. +The `overlap` option is the number of words from the previous chunk to include in the current chunk. + +The default chunking strategy is `sentence`. + +NOTE: The default chunking strategy for {infer} endpoints created before 8.16 is `word`. + + +[discrete] +==== Example of configuring the chunking behavior + +The following example creates an {infer} endpoint with the `elasticsearch` service that deploys the ELSER model by default and configures the chunking behavior. + +[source,console] +------------------------------------------------------------ +PUT _inference/sparse_embedding/small_chunk_size +{ + "service": "elasticsearch", + "service_settings": { + "num_allocations": 1, + "num_threads": 1 + }, + "chunking_settings": { + "strategy": "sentence", + "max_chunk_size": 100, + "sentence_overlap": 0 + } +} +------------------------------------------------------------ +// TEST[skip:TBD] + + include::delete-inference.asciidoc[] include::get-inference.asciidoc[] include::post-inference.asciidoc[] diff --git a/docs/reference/inference/inference-shared.asciidoc b/docs/reference/inference/inference-shared.asciidoc index 2eafa3434e89e..da497c6581e5d 100644 --- a/docs/reference/inference/inference-shared.asciidoc +++ b/docs/reference/inference/inference-shared.asciidoc @@ -31,4 +31,36 @@ end::task-settings[] tag::task-type[] The type of the {infer} task that the model will perform. -end::task-type[] \ No newline at end of file +end::task-type[] + +tag::chunking-settings[] +Chunking configuration object. +Refer to <> to learn more about chunking. +end::chunking-settings[] + +tag::chunking-settings-max-chunking-size[] +Specifies the maximum size of a chunk in words. +Defaults to `250`. +This value cannot be higher than `300` or lower than `20` (for `sentence` strategy) or `10` (for `word` strategy). +end::chunking-settings-max-chunking-size[] + +tag::chunking-settings-overlap[] +Only for `word` chunking strategy. +Specifies the number of overlapping words for chunks. +Defaults to `100`. +This value cannot be higher than the half of `max_chunking_size`. +end::chunking-settings-overlap[] + +tag::chunking-settings-sentence-overlap[] +Only for `sentence` chunking strategy. +Specifies the numnber of overlapping sentences for chunks. +It can be either `1` or `0`. +Defaults to `1`. +end::chunking-settings-sentence-overlap[] + +tag::chunking-settings-strategy[] +Specifies the chunking strategy. +It could be either `sentence` or `word`. +end::chunking-settings-strategy[] + + diff --git a/docs/reference/inference/service-alibabacloud-ai-search.asciidoc b/docs/reference/inference/service-alibabacloud-ai-search.asciidoc index 0607b56b528ea..c3ff40a39cd86 100644 --- a/docs/reference/inference/service-alibabacloud-ai-search.asciidoc +++ b/docs/reference/inference/service-alibabacloud-ai-search.asciidoc @@ -34,6 +34,26 @@ Available task types: [[infer-service-alibabacloud-ai-search-api-request-body]] ==== {api-request-body-title} +`chunking_settings`:: +(Optional, object) +include::inference-shared.asciidoc[tag=chunking-settings] + +`max_chunking_size`::: +(Optional, integer) +include::inference-shared.asciidoc[tag=chunking-settings-max-chunking-size] + +`overlap`::: +(Optional, integer) +include::inference-shared.asciidoc[tag=chunking-settings-overlap] + +`sentence_overlap`::: +(Optional, integer) +include::inference-shared.asciidoc[tag=chunking-settings-sentence-overlap] + +`strategy`::: +(Optional, string) +include::inference-shared.asciidoc[tag=chunking-settings-strategy] + `service`:: (Required, string) The type of service supported for the specified task type. In this case, @@ -108,7 +128,6 @@ To modify this, set the `requests_per_minute` setting of this object in your ser include::inference-shared.asciidoc[tag=request-per-minute-example] -- - `task_settings`:: (Optional, object) include::inference-shared.asciidoc[tag=task-settings] diff --git a/docs/reference/inference/service-amazon-bedrock.asciidoc b/docs/reference/inference/service-amazon-bedrock.asciidoc index dbffd5c26fbcc..761777e32f8e0 100644 --- a/docs/reference/inference/service-amazon-bedrock.asciidoc +++ b/docs/reference/inference/service-amazon-bedrock.asciidoc @@ -32,6 +32,26 @@ Available task types: [[infer-service-amazon-bedrock-api-request-body]] ==== {api-request-body-title} +`chunking_settings`:: +(Optional, object) +include::inference-shared.asciidoc[tag=chunking-settings] + +`max_chunking_size`::: +(Optional, integer) +include::inference-shared.asciidoc[tag=chunking-settings-max-chunking-size] + +`overlap`::: +(Optional, integer) +include::inference-shared.asciidoc[tag=chunking-settings-overlap] + +`sentence_overlap`::: +(Optional, integer) +include::inference-shared.asciidoc[tag=chunking-settings-sentence-overlap] + +`strategy`::: +(Optional, string) +include::inference-shared.asciidoc[tag=chunking-settings-strategy] + `service`:: (Required, string) The type of service supported for the specified task type. In this case, diff --git a/docs/reference/inference/service-anthropic.asciidoc b/docs/reference/inference/service-anthropic.asciidoc index 41419db7a6069..7fb3d1d5bea34 100644 --- a/docs/reference/inference/service-anthropic.asciidoc +++ b/docs/reference/inference/service-anthropic.asciidoc @@ -32,6 +32,26 @@ Available task types: [[infer-service-anthropic-api-request-body]] ==== {api-request-body-title} +`chunking_settings`:: +(Optional, object) +include::inference-shared.asciidoc[tag=chunking-settings] + +`max_chunking_size`::: +(Optional, integer) +include::inference-shared.asciidoc[tag=chunking-settings-max-chunking-size] + +`overlap`::: +(Optional, integer) +include::inference-shared.asciidoc[tag=chunking-settings-overlap] + +`sentence_overlap`::: +(Optional, integer) +include::inference-shared.asciidoc[tag=chunking-settings-sentence-overlap] + +`strategy`::: +(Optional, string) +include::inference-shared.asciidoc[tag=chunking-settings-strategy] + `service`:: (Required, string) The type of service supported for the specified task type. In this case, diff --git a/docs/reference/inference/service-azure-ai-studio.asciidoc b/docs/reference/inference/service-azure-ai-studio.asciidoc index 0d711a0d6171f..dd13a3e59aae5 100644 --- a/docs/reference/inference/service-azure-ai-studio.asciidoc +++ b/docs/reference/inference/service-azure-ai-studio.asciidoc @@ -33,6 +33,26 @@ Available task types: [[infer-service-azure-ai-studio-api-request-body]] ==== {api-request-body-title} +`chunking_settings`:: +(Optional, object) +include::inference-shared.asciidoc[tag=chunking-settings] + +`max_chunking_size`::: +(Optional, integer) +include::inference-shared.asciidoc[tag=chunking-settings-max-chunking-size] + +`overlap`::: +(Optional, integer) +include::inference-shared.asciidoc[tag=chunking-settings-overlap] + +`sentence_overlap`::: +(Optional, integer) +include::inference-shared.asciidoc[tag=chunking-settings-sentence-overlap] + +`strategy`::: +(Optional, string) +include::inference-shared.asciidoc[tag=chunking-settings-strategy] + `service`:: (Required, string) The type of service supported for the specified task type. In this case, diff --git a/docs/reference/inference/service-azure-openai.asciidoc b/docs/reference/inference/service-azure-openai.asciidoc index 6f03c5966d9e6..b134e2b687f6c 100644 --- a/docs/reference/inference/service-azure-openai.asciidoc +++ b/docs/reference/inference/service-azure-openai.asciidoc @@ -33,6 +33,26 @@ Available task types: [[infer-service-azure-openai-api-request-body]] ==== {api-request-body-title} +`chunking_settings`:: +(Optional, object) +include::inference-shared.asciidoc[tag=chunking-settings] + +`max_chunking_size`::: +(Optional, integer) +include::inference-shared.asciidoc[tag=chunking-settings-max-chunking-size] + +`overlap`::: +(Optional, integer) +include::inference-shared.asciidoc[tag=chunking-settings-overlap] + +`sentence_overlap`::: +(Optional, integer) +include::inference-shared.asciidoc[tag=chunking-settings-sentence-overlap] + +`strategy`::: +(Optional, string) +include::inference-shared.asciidoc[tag=chunking-settings-strategy] + `service`:: (Required, string) The type of service supported for the specified task type. In this case, diff --git a/docs/reference/inference/service-cohere.asciidoc b/docs/reference/inference/service-cohere.asciidoc index 84eae6e880617..1a815e3c45f36 100644 --- a/docs/reference/inference/service-cohere.asciidoc +++ b/docs/reference/inference/service-cohere.asciidoc @@ -34,6 +34,26 @@ Available task types: [[infer-service-cohere-api-request-body]] ==== {api-request-body-title} +`chunking_settings`:: +(Optional, object) +include::inference-shared.asciidoc[tag=chunking-settings] + +`max_chunking_size`::: +(Optional, integer) +include::inference-shared.asciidoc[tag=chunking-settings-max-chunking-size] + +`overlap`::: +(Optional, integer) +include::inference-shared.asciidoc[tag=chunking-settings-overlap] + +`sentence_overlap`::: +(Optional, integer) +include::inference-shared.asciidoc[tag=chunking-settings-sentence-overlap] + +`strategy`::: +(Optional, string) +include::inference-shared.asciidoc[tag=chunking-settings-strategy] + `service`:: (Required, string) The type of service supported for the specified task type. In this case, diff --git a/docs/reference/inference/service-elasticsearch.asciidoc b/docs/reference/inference/service-elasticsearch.asciidoc index 259779a12134d..0103b425faefe 100644 --- a/docs/reference/inference/service-elasticsearch.asciidoc +++ b/docs/reference/inference/service-elasticsearch.asciidoc @@ -36,6 +36,26 @@ Available task types: [[infer-service-elasticsearch-api-request-body]] ==== {api-request-body-title} +`chunking_settings`:: +(Optional, object) +include::inference-shared.asciidoc[tag=chunking-settings] + +`max_chunking_size`::: +(Optional, integer) +include::inference-shared.asciidoc[tag=chunking-settings-max-chunking-size] + +`overlap`::: +(Optional, integer) +include::inference-shared.asciidoc[tag=chunking-settings-overlap] + +`sentence_overlap`::: +(Optional, integer) +include::inference-shared.asciidoc[tag=chunking-settings-sentence-overlap] + +`strategy`::: +(Optional, string) +include::inference-shared.asciidoc[tag=chunking-settings-strategy] + `service`:: (Required, string) The type of service supported for the specified task type. In this case, diff --git a/docs/reference/inference/service-elser.asciidoc b/docs/reference/inference/service-elser.asciidoc index 521fab0375584..273d743e47a4b 100644 --- a/docs/reference/inference/service-elser.asciidoc +++ b/docs/reference/inference/service-elser.asciidoc @@ -36,6 +36,26 @@ Available task types: [[infer-service-elser-api-request-body]] ==== {api-request-body-title} +`chunking_settings`:: +(Optional, object) +include::inference-shared.asciidoc[tag=chunking-settings] + +`max_chunking_size`::: +(Optional, integer) +include::inference-shared.asciidoc[tag=chunking-settings-max-chunking-size] + +`overlap`::: +(Optional, integer) +include::inference-shared.asciidoc[tag=chunking-settings-overlap] + +`sentence_overlap`::: +(Optional, integer) +include::inference-shared.asciidoc[tag=chunking-settings-sentence-overlap] + +`strategy`::: +(Optional, string) +include::inference-shared.asciidoc[tag=chunking-settings-strategy] + `service`:: (Required, string) The type of service supported for the specified task type. In this case, diff --git a/docs/reference/inference/service-google-ai-studio.asciidoc b/docs/reference/inference/service-google-ai-studio.asciidoc index 25aa89cd49110..738fce3d53e9b 100644 --- a/docs/reference/inference/service-google-ai-studio.asciidoc +++ b/docs/reference/inference/service-google-ai-studio.asciidoc @@ -33,6 +33,26 @@ Available task types: [[infer-service-google-ai-studio-api-request-body]] ==== {api-request-body-title} +`chunking_settings`:: +(Optional, object) +include::inference-shared.asciidoc[tag=chunking-settings] + +`max_chunking_size`::: +(Optional, integer) +include::inference-shared.asciidoc[tag=chunking-settings-max-chunking-size] + +`overlap`::: +(Optional, integer) +include::inference-shared.asciidoc[tag=chunking-settings-overlap] + +`sentence_overlap`::: +(Optional, integer) +include::inference-shared.asciidoc[tag=chunking-settings-sentence-overlap] + +`strategy`::: +(Optional, string) +include::inference-shared.asciidoc[tag=chunking-settings-strategy] + `service`:: (Required, string) The type of service supported for the specified task type. In this case, diff --git a/docs/reference/inference/service-google-vertex-ai.asciidoc b/docs/reference/inference/service-google-vertex-ai.asciidoc index 640553ab74626..34e14e05e072a 100644 --- a/docs/reference/inference/service-google-vertex-ai.asciidoc +++ b/docs/reference/inference/service-google-vertex-ai.asciidoc @@ -33,6 +33,26 @@ Available task types: [[infer-service-google-vertex-ai-api-request-body]] ==== {api-request-body-title} +`chunking_settings`:: +(Optional, object) +include::inference-shared.asciidoc[tag=chunking-settings] + +`max_chunking_size`::: +(Optional, integer) +include::inference-shared.asciidoc[tag=chunking-settings-max-chunking-size] + +`overlap`::: +(Optional, integer) +include::inference-shared.asciidoc[tag=chunking-settings-overlap] + +`sentence_overlap`::: +(Optional, integer) +include::inference-shared.asciidoc[tag=chunking-settings-sentence-overlap] + +`strategy`::: +(Optional, string) +include::inference-shared.asciidoc[tag=chunking-settings-strategy] + `service`:: (Required, string) The type of service supported for the specified task type. In this case, diff --git a/docs/reference/inference/service-hugging-face.asciidoc b/docs/reference/inference/service-hugging-face.asciidoc index 177a15177d21f..6d8667351a6b4 100644 --- a/docs/reference/inference/service-hugging-face.asciidoc +++ b/docs/reference/inference/service-hugging-face.asciidoc @@ -32,6 +32,26 @@ Available task types: [[infer-service-hugging-face-api-request-body]] ==== {api-request-body-title} +`chunking_settings`:: +(Optional, object) +include::inference-shared.asciidoc[tag=chunking-settings] + +`max_chunking_size`::: +(Optional, integer) +include::inference-shared.asciidoc[tag=chunking-settings-max-chunking-size] + +`overlap`::: +(Optional, integer) +include::inference-shared.asciidoc[tag=chunking-settings-overlap] + +`sentence_overlap`::: +(Optional, integer) +include::inference-shared.asciidoc[tag=chunking-settings-sentence-overlap] + +`strategy`::: +(Optional, string) +include::inference-shared.asciidoc[tag=chunking-settings-strategy] + `service`:: (Required, string) The type of service supported for the specified task type. In this case, diff --git a/docs/reference/inference/service-mistral.asciidoc b/docs/reference/inference/service-mistral.asciidoc index 077e610191705..244381d107161 100644 --- a/docs/reference/inference/service-mistral.asciidoc +++ b/docs/reference/inference/service-mistral.asciidoc @@ -32,6 +32,26 @@ Available task types: [[infer-service-mistral-api-request-body]] ==== {api-request-body-title} +`chunking_settings`:: +(Optional, object) +include::inference-shared.asciidoc[tag=chunking-settings] + +`max_chunking_size`::: +(Optional, integer) +include::inference-shared.asciidoc[tag=chunking-settings-max-chunking-size] + +`overlap`::: +(Optional, integer) +include::inference-shared.asciidoc[tag=chunking-settings-overlap] + +`sentence_overlap`::: +(Optional, integer) +include::inference-shared.asciidoc[tag=chunking-settings-sentence-overlap] + +`strategy`::: +(Optional, string) +include::inference-shared.asciidoc[tag=chunking-settings-strategy] + `service`:: (Required, string) The type of service supported for the specified task type. In this case, diff --git a/docs/reference/inference/service-openai.asciidoc b/docs/reference/inference/service-openai.asciidoc index 075e76dc7d741..21643133553e1 100644 --- a/docs/reference/inference/service-openai.asciidoc +++ b/docs/reference/inference/service-openai.asciidoc @@ -33,6 +33,26 @@ Available task types: [[infer-service-openai-api-request-body]] ==== {api-request-body-title} +`chunking_settings`:: +(Optional, object) +include::inference-shared.asciidoc[tag=chunking-settings] + +`max_chunking_size`::: +(Optional, integer) +include::inference-shared.asciidoc[tag=chunking-settings-max-chunking-size] + +`overlap`::: +(Optional, integer) +include::inference-shared.asciidoc[tag=chunking-settings-overlap] + +`sentence_overlap`::: +(Optional, integer) +include::inference-shared.asciidoc[tag=chunking-settings-sentence-overlap] + +`strategy`::: +(Optional, string) +include::inference-shared.asciidoc[tag=chunking-settings-strategy] + `service`:: (Required, string) The type of service supported for the specified task type. In this case, From 3b5bd62467456db5eaefbf7cd72ce324eb7dbb49 Mon Sep 17 00:00:00 2001 From: Oleksandr Kolomiiets Date: Fri, 25 Oct 2024 09:06:11 -0700 Subject: [PATCH 425/449] Add tests for license changes while using data streams (#115478) --- .../logsdb/DataStreamLicenceDowngradeIT.java | 489 ++++++++++++++++++ .../logsdb/DataStreamLicenseChangeIT.java | 107 ++++ .../logsdb/DataStreamLicenseUpgradeIT.java | 487 +++++++++++++++++ 3 files changed, 1083 insertions(+) create mode 100644 x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/DataStreamLicenceDowngradeIT.java create mode 100644 x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/DataStreamLicenseChangeIT.java create mode 100644 x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/DataStreamLicenseUpgradeIT.java diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/DataStreamLicenceDowngradeIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/DataStreamLicenceDowngradeIT.java new file mode 100644 index 0000000000000..f004189098c43 --- /dev/null +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/DataStreamLicenceDowngradeIT.java @@ -0,0 +1,489 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.logsdb; + +import org.elasticsearch.index.mapper.SourceFieldMapper; + +import java.io.IOException; +import java.util.List; + +public class DataStreamLicenceDowngradeIT extends DataStreamLicenseChangeIT { + @Override + protected void applyInitialLicense() throws IOException { + startTrial(); + } + + @Override + protected void licenseChange() throws IOException { + startBasic(); + } + + @Override + protected List cases() { + return List.of(new TestCase() { + @Override + public String dataStreamName() { + return "logs-test-regular"; + } + + @Override + public String indexMode() { + return "logsdb"; + } + + @Override + public void prepareDataStream() throws IOException { + assertOK(createDataStream(client(), dataStreamName())); + } + + @Override + public void rollover() throws IOException { + rolloverDataStream(client(), dataStreamName()); + } + + @Override + public SourceFieldMapper.Mode initialMode() { + return SourceFieldMapper.Mode.SYNTHETIC; + } + + @Override + public SourceFieldMapper.Mode finalMode() { + return SourceFieldMapper.Mode.STORED; + } + }, new TestCase() { + private static final String sourceModeOverride = """ + { + "template": { + "settings": { + "index": { + "mapping.source.mode": "SYNTHETIC" + } + } + } + }"""; + + @Override + public String dataStreamName() { + return "logs-test-explicit-synthetic"; + } + + @Override + public String indexMode() { + return "logsdb"; + } + + @Override + public void prepareDataStream() throws IOException { + assertOK(putComponentTemplate(client(), "logs@custom", sourceModeOverride)); + assertOK(createDataStream(client(), dataStreamName())); + assertOK(removeComponentTemplate(client(), "logs@custom")); + } + + @Override + public void rollover() throws IOException { + assertOK(putComponentTemplate(client(), "logs@custom", sourceModeOverride)); + rolloverDataStream(client(), dataStreamName()); + assertOK(removeComponentTemplate(client(), "logs@custom")); + } + + @Override + public SourceFieldMapper.Mode initialMode() { + return SourceFieldMapper.Mode.SYNTHETIC; + } + + @Override + public SourceFieldMapper.Mode finalMode() { + return SourceFieldMapper.Mode.STORED; + } + }, new TestCase() { + private static final String sourceModeOverride = """ + { + "template": { + "settings": { + "index": { + "mapping.source.mode": "STORED" + } + } + } + }"""; + + @Override + public String dataStreamName() { + return "logs-test-explicit-stored"; + } + + @Override + public String indexMode() { + return "logsdb"; + } + + @Override + public void prepareDataStream() throws IOException { + assertOK(putComponentTemplate(client(), "logs@custom", sourceModeOverride)); + assertOK(createDataStream(client(), dataStreamName())); + assertOK(removeComponentTemplate(client(), "logs@custom")); + } + + @Override + public void rollover() throws IOException { + assertOK(putComponentTemplate(client(), "logs@custom", sourceModeOverride)); + rolloverDataStream(client(), dataStreamName()); + assertOK(removeComponentTemplate(client(), "logs@custom")); + } + + @Override + public SourceFieldMapper.Mode initialMode() { + return SourceFieldMapper.Mode.STORED; + } + + @Override + public SourceFieldMapper.Mode finalMode() { + return SourceFieldMapper.Mode.STORED; + } + }, new TestCase() { + @Override + public String dataStreamName() { + return "tsdb-test-regular"; + } + + @Override + public String indexMode() { + return "time_series"; + } + + @Override + public void prepareDataStream() throws IOException { + var componentTemplate = """ + { + "template": { + "settings": { + "index": { + "mode": "time_series", + "routing_path": ["dim"] + } + }, + "mappings": { + "properties": { + "dim": { + "type": "keyword", + "time_series_dimension": true + } + } + } + } + } + """; + assertOK(putComponentTemplate(client(), "tsdb-test-regular-component", componentTemplate)); + + var template = """ + { + "index_patterns": ["tsdb-test-regular"], + "priority": 100, + "data_stream": {}, + "composed_of": ["tsdb-test-regular-component"] + } + """; + + putTemplate(client(), "tsdb-test-regular-template", template); + assertOK(createDataStream(client(), dataStreamName())); + } + + @Override + public void rollover() throws IOException { + rolloverDataStream(client(), dataStreamName()); + } + + @Override + public SourceFieldMapper.Mode initialMode() { + return SourceFieldMapper.Mode.SYNTHETIC; + } + + @Override + public SourceFieldMapper.Mode finalMode() { + return SourceFieldMapper.Mode.STORED; + } + }, new TestCase() { + @Override + public String dataStreamName() { + return "tsdb-test-synthetic"; + } + + @Override + public String indexMode() { + return "time_series"; + } + + @Override + public void prepareDataStream() throws IOException { + var componentTemplate = """ + { + "template": { + "settings": { + "index": { + "mode": "time_series", + "routing_path": ["dim"], + "mapping.source.mode": "SYNTHETIC" + } + }, + "mappings": { + "properties": { + "dim": { + "type": "keyword", + "time_series_dimension": true + } + } + } + } + } + """; + assertOK(putComponentTemplate(client(), "tsdb-test-synthetic-component", componentTemplate)); + + var template = """ + { + "index_patterns": ["tsdb-test-synthetic"], + "priority": 100, + "data_stream": {}, + "composed_of": ["tsdb-test-synthetic-component"] + } + """; + + putTemplate(client(), "tsdb-test-synthetic-template", template); + assertOK(createDataStream(client(), dataStreamName())); + } + + @Override + public void rollover() throws IOException { + rolloverDataStream(client(), dataStreamName()); + } + + @Override + public SourceFieldMapper.Mode initialMode() { + return SourceFieldMapper.Mode.SYNTHETIC; + } + + @Override + public SourceFieldMapper.Mode finalMode() { + return SourceFieldMapper.Mode.STORED; + } + }, new TestCase() { + @Override + public String dataStreamName() { + return "tsdb-test-stored"; + } + + @Override + public String indexMode() { + return "time_series"; + } + + @Override + public void prepareDataStream() throws IOException { + var componentTemplate = """ + { + "template": { + "settings": { + "index": { + "mode": "time_series", + "routing_path": ["dim"], + "mapping.source.mode": "STORED" + } + }, + "mappings": { + "properties": { + "dim": { + "type": "keyword", + "time_series_dimension": true + } + } + } + } + } + """; + assertOK(putComponentTemplate(client(), "tsdb-test-stored-component", componentTemplate)); + + var template = """ + { + "index_patterns": ["tsdb-test-stored"], + "priority": 100, + "data_stream": {}, + "composed_of": ["tsdb-test-stored-component"] + } + """; + + putTemplate(client(), "tsdb-test-stored-template", template); + assertOK(createDataStream(client(), dataStreamName())); + } + + @Override + public void rollover() throws IOException { + rolloverDataStream(client(), dataStreamName()); + } + + @Override + public SourceFieldMapper.Mode initialMode() { + return SourceFieldMapper.Mode.STORED; + } + + @Override + public SourceFieldMapper.Mode finalMode() { + return SourceFieldMapper.Mode.STORED; + } + }, + + new TestCase() { + @Override + public String dataStreamName() { + return "standard"; + } + + @Override + public String indexMode() { + return "standard"; + } + + @Override + public void prepareDataStream() throws IOException { + var template = """ + { + "index_patterns": ["standard"], + "priority": 100, + "data_stream": {}, + "composed_of": [] + } + """; + + putTemplate(client(), "standard-template", template); + assertOK(createDataStream(client(), dataStreamName())); + } + + @Override + public void rollover() throws IOException { + rolloverDataStream(client(), dataStreamName()); + } + + @Override + public SourceFieldMapper.Mode initialMode() { + return SourceFieldMapper.Mode.STORED; + } + + @Override + public SourceFieldMapper.Mode finalMode() { + return SourceFieldMapper.Mode.STORED; + } + }, + new TestCase() { + @Override + public String dataStreamName() { + return "standard-synthetic"; + } + + @Override + public String indexMode() { + return "standard"; + } + + @Override + public void prepareDataStream() throws IOException { + var componentTemplate = """ + { + "template": { + "settings": { + "index": { + "mapping.source.mode": "SYNTHETIC" + } + } + } + } + """; + assertOK(putComponentTemplate(client(), "standard-synthetic-component", componentTemplate)); + + var template = """ + { + "index_patterns": ["standard-synthetic"], + "priority": 100, + "data_stream": {}, + "composed_of": ["standard-synthetic-component"] + } + """; + + putTemplate(client(), "standard-synthetic-template", template); + assertOK(createDataStream(client(), dataStreamName())); + } + + @Override + public void rollover() throws IOException { + rolloverDataStream(client(), dataStreamName()); + } + + @Override + public SourceFieldMapper.Mode initialMode() { + return SourceFieldMapper.Mode.SYNTHETIC; + } + + @Override + public SourceFieldMapper.Mode finalMode() { + return SourceFieldMapper.Mode.STORED; + } + }, + new TestCase() { + @Override + public String dataStreamName() { + return "standard-stored"; + } + + @Override + public String indexMode() { + return "standard"; + } + + @Override + public void prepareDataStream() throws IOException { + var componentTemplate = """ + { + "template": { + "settings": { + "index": { + "mapping.source.mode": "STORED" + } + } + } + } + """; + assertOK(putComponentTemplate(client(), "standard-stored-component", componentTemplate)); + + var template = """ + { + "index_patterns": ["standard-stored"], + "priority": 100, + "data_stream": {}, + "composed_of": ["standard-stored-component"] + } + """; + + putTemplate(client(), "standard-stored-template", template); + assertOK(createDataStream(client(), dataStreamName())); + } + + @Override + public void rollover() throws IOException { + rolloverDataStream(client(), dataStreamName()); + } + + @Override + public SourceFieldMapper.Mode initialMode() { + return SourceFieldMapper.Mode.STORED; + } + + @Override + public SourceFieldMapper.Mode finalMode() { + return SourceFieldMapper.Mode.STORED; + } + } + ); + } +} diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/DataStreamLicenseChangeIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/DataStreamLicenseChangeIT.java new file mode 100644 index 0000000000000..b84c982766e4b --- /dev/null +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/DataStreamLicenseChangeIT.java @@ -0,0 +1,107 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.logsdb; + +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.index.mapper.SourceFieldMapper; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.junit.ClassRule; + +import java.io.IOException; +import java.util.List; + +public abstract class DataStreamLicenseChangeIT extends LogsIndexModeRestTestIT { + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .module("data-streams") + .module("x-pack-stack") + .setting("cluster.logsdb.enabled", "true") + .setting("xpack.security.enabled", "false") + .setting("xpack.license.self_generated.type", "basic") + .build(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + protected interface TestCase { + String dataStreamName(); + + void prepareDataStream() throws IOException; + + String indexMode(); + + SourceFieldMapper.Mode initialMode(); + + SourceFieldMapper.Mode finalMode(); + + void rollover() throws IOException; + } + + protected abstract void licenseChange() throws IOException; + + protected abstract void applyInitialLicense() throws IOException; + + protected abstract List cases(); + + public void testLicenseChange() throws IOException { + applyInitialLicense(); + + for (var testCase : cases()) { + testCase.prepareDataStream(); + + var indexMode = (String) getSetting(client(), getDataStreamBackingIndex(client(), testCase.dataStreamName(), 0), "index.mode"); + assertEquals(testCase.indexMode(), indexMode); + + var sourceMode = (String) getSetting( + client(), + getDataStreamBackingIndex(client(), testCase.dataStreamName(), 0), + "index.mapping.source.mode" + ); + assertEquals(testCase.initialMode().toString(), sourceMode); + } + + licenseChange(); + + for (var testCase : cases()) { + testCase.rollover(); + + var indexMode = (String) getSetting(client(), getDataStreamBackingIndex(client(), testCase.dataStreamName(), 1), "index.mode"); + assertEquals(testCase.indexMode(), indexMode); + + var sourceMode = (String) getSetting( + client(), + getDataStreamBackingIndex(client(), testCase.dataStreamName(), 1), + "index.mapping.source.mode" + ); + assertEquals(testCase.finalMode().toString(), sourceMode); + } + } + + protected static void startBasic() throws IOException { + Request startTrial = new Request("POST", "/_license/start_basic"); + startTrial.addParameter("acknowledge", "true"); + assertOK(client().performRequest(startTrial)); + } + + protected static void startTrial() throws IOException { + Request startTrial = new Request("POST", "/_license/start_trial"); + startTrial.addParameter("acknowledge", "true"); + assertOK(client().performRequest(startTrial)); + } + + protected static Response removeComponentTemplate(final RestClient client, final String componentTemplate) throws IOException { + final Request request = new Request("DELETE", "/_component_template/" + componentTemplate); + return client.performRequest(request); + } +} diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/DataStreamLicenseUpgradeIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/DataStreamLicenseUpgradeIT.java new file mode 100644 index 0000000000000..bce43ca046523 --- /dev/null +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/DataStreamLicenseUpgradeIT.java @@ -0,0 +1,487 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.logsdb; + +import org.elasticsearch.index.mapper.SourceFieldMapper; + +import java.io.IOException; +import java.util.List; + +public class DataStreamLicenseUpgradeIT extends DataStreamLicenseChangeIT { + @Override + protected void applyInitialLicense() {} + + @Override + protected void licenseChange() throws IOException { + startTrial(); + } + + @Override + protected List cases() { + return List.of(new TestCase() { + @Override + public String dataStreamName() { + return "logs-test-regular"; + } + + @Override + public String indexMode() { + return "logsdb"; + } + + @Override + public void prepareDataStream() throws IOException { + assertOK(createDataStream(client(), dataStreamName())); + } + + @Override + public void rollover() throws IOException { + rolloverDataStream(client(), dataStreamName()); + } + + @Override + public SourceFieldMapper.Mode initialMode() { + return SourceFieldMapper.Mode.STORED; + } + + @Override + public SourceFieldMapper.Mode finalMode() { + return SourceFieldMapper.Mode.SYNTHETIC; + } + }, new TestCase() { + private static final String sourceModeOverride = """ + { + "template": { + "settings": { + "index": { + "mapping.source.mode": "SYNTHETIC" + } + } + } + }"""; + + @Override + public String dataStreamName() { + return "logs-test-explicit-synthetic"; + } + + @Override + public String indexMode() { + return "logsdb"; + } + + @Override + public void prepareDataStream() throws IOException { + assertOK(putComponentTemplate(client(), "logs@custom", sourceModeOverride)); + assertOK(createDataStream(client(), dataStreamName())); + assertOK(removeComponentTemplate(client(), "logs@custom")); + } + + @Override + public void rollover() throws IOException { + assertOK(putComponentTemplate(client(), "logs@custom", sourceModeOverride)); + rolloverDataStream(client(), dataStreamName()); + assertOK(removeComponentTemplate(client(), "logs@custom")); + } + + @Override + public SourceFieldMapper.Mode initialMode() { + return SourceFieldMapper.Mode.STORED; + } + + @Override + public SourceFieldMapper.Mode finalMode() { + return SourceFieldMapper.Mode.SYNTHETIC; + } + }, new TestCase() { + private static final String sourceModeOverride = """ + { + "template": { + "settings": { + "index": { + "mapping.source.mode": "STORED" + } + } + } + }"""; + + @Override + public String dataStreamName() { + return "logs-test-explicit-stored"; + } + + @Override + public String indexMode() { + return "logsdb"; + } + + @Override + public void prepareDataStream() throws IOException { + assertOK(putComponentTemplate(client(), "logs@custom", sourceModeOverride)); + assertOK(createDataStream(client(), dataStreamName())); + assertOK(removeComponentTemplate(client(), "logs@custom")); + } + + @Override + public void rollover() throws IOException { + assertOK(putComponentTemplate(client(), "logs@custom", sourceModeOverride)); + rolloverDataStream(client(), dataStreamName()); + assertOK(removeComponentTemplate(client(), "logs@custom")); + } + + @Override + public SourceFieldMapper.Mode initialMode() { + return SourceFieldMapper.Mode.STORED; + } + + @Override + public SourceFieldMapper.Mode finalMode() { + return SourceFieldMapper.Mode.STORED; + } + }, new TestCase() { + @Override + public String dataStreamName() { + return "tsdb-test-regular"; + } + + @Override + public String indexMode() { + return "time_series"; + } + + @Override + public void prepareDataStream() throws IOException { + var componentTemplate = """ + { + "template": { + "settings": { + "index": { + "mode": "time_series", + "routing_path": ["dim"] + } + }, + "mappings": { + "properties": { + "dim": { + "type": "keyword", + "time_series_dimension": true + } + } + } + } + } + """; + assertOK(putComponentTemplate(client(), "tsdb-test-regular-component", componentTemplate)); + + var template = """ + { + "index_patterns": ["tsdb-test-regular"], + "priority": 100, + "data_stream": {}, + "composed_of": ["tsdb-test-regular-component"] + } + """; + + putTemplate(client(), "tsdb-test-regular-template", template); + assertOK(createDataStream(client(), dataStreamName())); + } + + @Override + public void rollover() throws IOException { + rolloverDataStream(client(), dataStreamName()); + } + + @Override + public SourceFieldMapper.Mode initialMode() { + return SourceFieldMapper.Mode.STORED; + } + + @Override + public SourceFieldMapper.Mode finalMode() { + return SourceFieldMapper.Mode.SYNTHETIC; + } + }, new TestCase() { + @Override + public String dataStreamName() { + return "tsdb-test-synthetic"; + } + + @Override + public String indexMode() { + return "time_series"; + } + + @Override + public void prepareDataStream() throws IOException { + var componentTemplate = """ + { + "template": { + "settings": { + "index": { + "mode": "time_series", + "routing_path": ["dim"], + "mapping.source.mode": "SYNTHETIC" + } + }, + "mappings": { + "properties": { + "dim": { + "type": "keyword", + "time_series_dimension": true + } + } + } + } + } + """; + assertOK(putComponentTemplate(client(), "tsdb-test-synthetic-component", componentTemplate)); + + var template = """ + { + "index_patterns": ["tsdb-test-synthetic"], + "priority": 100, + "data_stream": {}, + "composed_of": ["tsdb-test-synthetic-component"] + } + """; + + putTemplate(client(), "tsdb-test-synthetic-template", template); + assertOK(createDataStream(client(), dataStreamName())); + } + + @Override + public void rollover() throws IOException { + rolloverDataStream(client(), dataStreamName()); + } + + @Override + public SourceFieldMapper.Mode initialMode() { + return SourceFieldMapper.Mode.STORED; + } + + @Override + public SourceFieldMapper.Mode finalMode() { + return SourceFieldMapper.Mode.SYNTHETIC; + } + }, new TestCase() { + @Override + public String dataStreamName() { + return "tsdb-test-stored"; + } + + @Override + public String indexMode() { + return "time_series"; + } + + @Override + public void prepareDataStream() throws IOException { + var componentTemplate = """ + { + "template": { + "settings": { + "index": { + "mode": "time_series", + "routing_path": ["dim"], + "mapping.source.mode": "STORED" + } + }, + "mappings": { + "properties": { + "dim": { + "type": "keyword", + "time_series_dimension": true + } + } + } + } + } + """; + assertOK(putComponentTemplate(client(), "tsdb-test-stored-component", componentTemplate)); + + var template = """ + { + "index_patterns": ["tsdb-test-stored"], + "priority": 100, + "data_stream": {}, + "composed_of": ["tsdb-test-stored-component"] + } + """; + + putTemplate(client(), "tsdb-test-stored-template", template); + assertOK(createDataStream(client(), dataStreamName())); + } + + @Override + public void rollover() throws IOException { + rolloverDataStream(client(), dataStreamName()); + } + + @Override + public SourceFieldMapper.Mode initialMode() { + return SourceFieldMapper.Mode.STORED; + } + + @Override + public SourceFieldMapper.Mode finalMode() { + return SourceFieldMapper.Mode.STORED; + } + }, + + new TestCase() { + @Override + public String dataStreamName() { + return "standard"; + } + + @Override + public String indexMode() { + return "standard"; + } + + @Override + public void prepareDataStream() throws IOException { + var template = """ + { + "index_patterns": ["standard"], + "priority": 100, + "data_stream": {}, + "composed_of": [] + } + """; + + putTemplate(client(), "standard-template", template); + assertOK(createDataStream(client(), dataStreamName())); + } + + @Override + public void rollover() throws IOException { + rolloverDataStream(client(), dataStreamName()); + } + + @Override + public SourceFieldMapper.Mode initialMode() { + return SourceFieldMapper.Mode.STORED; + } + + @Override + public SourceFieldMapper.Mode finalMode() { + return SourceFieldMapper.Mode.STORED; + } + }, + new TestCase() { + @Override + public String dataStreamName() { + return "standard-synthetic"; + } + + @Override + public String indexMode() { + return "standard"; + } + + @Override + public void prepareDataStream() throws IOException { + var componentTemplate = """ + { + "template": { + "settings": { + "index": { + "mapping.source.mode": "SYNTHETIC" + } + } + } + } + """; + assertOK(putComponentTemplate(client(), "standard-synthetic-component", componentTemplate)); + + var template = """ + { + "index_patterns": ["standard-synthetic"], + "priority": 100, + "data_stream": {}, + "composed_of": ["standard-synthetic-component"] + } + """; + + putTemplate(client(), "standard-synthetic-template", template); + assertOK(createDataStream(client(), dataStreamName())); + } + + @Override + public void rollover() throws IOException { + rolloverDataStream(client(), dataStreamName()); + } + + @Override + public SourceFieldMapper.Mode initialMode() { + return SourceFieldMapper.Mode.STORED; + } + + @Override + public SourceFieldMapper.Mode finalMode() { + return SourceFieldMapper.Mode.SYNTHETIC; + } + }, + new TestCase() { + @Override + public String dataStreamName() { + return "standard-stored"; + } + + @Override + public String indexMode() { + return "standard"; + } + + @Override + public void prepareDataStream() throws IOException { + var componentTemplate = """ + { + "template": { + "settings": { + "index": { + "mapping.source.mode": "STORED" + } + } + } + } + """; + assertOK(putComponentTemplate(client(), "standard-stored-component", componentTemplate)); + + var template = """ + { + "index_patterns": ["standard-stored"], + "priority": 100, + "data_stream": {}, + "composed_of": ["standard-stored-component"] + } + """; + + putTemplate(client(), "standard-stored-template", template); + assertOK(createDataStream(client(), dataStreamName())); + } + + @Override + public void rollover() throws IOException { + rolloverDataStream(client(), dataStreamName()); + } + + @Override + public SourceFieldMapper.Mode initialMode() { + return SourceFieldMapper.Mode.STORED; + } + + @Override + public SourceFieldMapper.Mode finalMode() { + return SourceFieldMapper.Mode.STORED; + } + } + ); + } +} From 29d1d9e6e034df4a40f5357e03c30d0cccb51afc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Aur=C3=A9lien=20FOUCRET?= Date: Fri, 25 Oct 2024 19:13:34 +0200 Subject: [PATCH 426/449] Implement string parsing for the KQL parser. (#115662) --- .../xpack/kql/parser/ParserUtils.java | 254 ++++++++++++++++ .../xpack/kql/parser/ParserUtilsTests.java | 280 ++++++++++++++++++ 2 files changed, 534 insertions(+) create mode 100644 x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/ParserUtils.java create mode 100644 x-pack/plugin/kql/src/test/java/org/elasticsearch/xpack/kql/parser/ParserUtilsTests.java diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/ParserUtils.java b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/ParserUtils.java new file mode 100644 index 0000000000000..f996a953ea7f7 --- /dev/null +++ b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/ParserUtils.java @@ -0,0 +1,254 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.kql.parser; + +import org.antlr.v4.runtime.ParserRuleContext; +import org.antlr.v4.runtime.Token; +import org.antlr.v4.runtime.tree.ParseTree; +import org.antlr.v4.runtime.tree.ParseTreeVisitor; +import org.antlr.v4.runtime.tree.TerminalNode; +import org.apache.logging.log4j.util.Strings; +import org.apache.lucene.queryparser.classic.QueryParser; + +import java.util.ArrayList; +import java.util.List; + +/** + * Utility class for parsing and processing KQL expressions. + * Provides methods for type-safe parsing, text extraction, and string escaping/unescaping. + */ +public final class ParserUtils { + + private static final String UNQUOTED_LITERAL_TERM_DELIMITER = " "; + private static final char ESCAPE_CHAR = '\\'; + private static final char QUOTE_CHAR = '"'; + private static final char WILDCARD_CHAR = '*'; + + private ParserUtils() { + throw new UnsupportedOperationException("No need to instantiate this class"); + } + + /** + * Performs type-safe parsing using the provided visitor. + * + * @param visitor The visitor to use to do the parsing + * @param ctx The parser tree context to visit + * @param type The expected return type class + * @return The parsed result, casted to the expected type + */ + @SuppressWarnings("unchecked") + public static T typedParsing(ParseTreeVisitor visitor, ParserRuleContext ctx, Class type) { + Object result = ctx.accept(visitor); + + if (type.isInstance(result)) { + return (T) result; + } + + throw new KqlParsingException( + "Invalid query '{}'[{}] given; expected {} but found {}", + ctx.start.getLine(), + ctx.start.getCharPositionInLine(), + ctx.getText(), + ctx.getClass().getSimpleName(), + type.getSimpleName(), + (result != null ? result.getClass().getSimpleName() : "null") + ); + } + + /** + * Extracts text from a parser tree context by joining all terminal nodes with a space delimiter. + * + * @param ctx The parser tree context + * + * @return The extracted text + */ + public static String extractText(ParserRuleContext ctx) { + return String.join(UNQUOTED_LITERAL_TERM_DELIMITER, extractTextTokens(ctx)); + } + + /** + * Checks if the given context contains any unescaped wildcard characters. + * + * @param ctx The tree context to check + * @return true if wildcards are present, false otherwise + */ + public static boolean hasWildcard(ParserRuleContext ctx) { + return ctx.children.stream().anyMatch(childNode -> { + if (childNode instanceof TerminalNode terminalNode) { + Token token = terminalNode.getSymbol(); + return switch (token.getType()) { + case KqlBaseParser.WILDCARD -> true; + case KqlBaseParser.UNQUOTED_LITERAL -> token.getText().matches("[^\\\\]*[*].*"); + default -> false; + }; + } + + return false; + }); + } + + /** + * Escapes special characters in a query string for use in Lucene queries. + * + * @param queryText The query text to escape + * @param preserveWildcards If true, does not escape wildcard characters (*) + * @return The escaped query string + */ + public static String escapeLuceneQueryString(String queryText, boolean preserveWildcards) { + if (preserveWildcards) { + StringBuilder escapedQuery = new StringBuilder(queryText.length()); + StringBuilder subpart = new StringBuilder(queryText.length()); + + for (char currentChar : queryText.toCharArray()) { + if (currentChar == WILDCARD_CHAR) { + escapedQuery.append(QueryParser.escape(subpart.toString())).append(currentChar); + subpart.setLength(0); + } else { + subpart.append(currentChar); + } + } + + return escapedQuery.append(QueryParser.escape(subpart.toString())).toString(); + } + + return QueryParser.escape(queryText); + } + + private static List extractTextTokens(ParserRuleContext ctx) { + assert ctx.children != null; + List textTokens = new ArrayList<>(ctx.children.size()); + + for (ParseTree currentNode : ctx.children) { + if (currentNode instanceof TerminalNode terminalNode) { + textTokens.add(extractText(terminalNode)); + } else { + throw new KqlParsingException("Unable to extract text from ctx", ctx.start.getLine(), ctx.start.getCharPositionInLine()); + } + } + + return textTokens; + } + + private static String extractText(TerminalNode node) { + if (node.getSymbol().getType() == KqlBaseParser.QUOTED_STRING) { + return unescapeQuotedString(node); + } else if (node.getSymbol().getType() == KqlBaseParser.UNQUOTED_LITERAL) { + return unescapeUnquotedLiteral(node); + } + + return node.getText(); + } + + private static String unescapeQuotedString(TerminalNode ctx) { + String inputText = ctx.getText(); + + assert inputText.length() >= 2 && inputText.charAt(0) == QUOTE_CHAR && inputText.charAt(inputText.length() - 1) == QUOTE_CHAR; + StringBuilder sb = new StringBuilder(); + + for (int i = 1; i < inputText.length() - 1;) { + char currentChar = inputText.charAt(i++); + if (currentChar == ESCAPE_CHAR && i + 1 < inputText.length()) { + currentChar = inputText.charAt(i++); + switch (currentChar) { + case 't' -> sb.append('\t'); + case 'n' -> sb.append('\n'); + case 'r' -> sb.append('\r'); + case 'u' -> i = handleUnicodeSequemce(ctx, sb, inputText, i); + case QUOTE_CHAR -> sb.append('\"'); + case ESCAPE_CHAR -> sb.append(ESCAPE_CHAR); + default -> sb.append(ESCAPE_CHAR).append(currentChar); + } + } else { + sb.append(currentChar); + } + } + + return sb.toString(); + } + + private static String unescapeUnquotedLiteral(TerminalNode ctx) { + String inputText = ctx.getText(); + + if (inputText == null || inputText.isEmpty()) { + return inputText; + } + StringBuilder sb = new StringBuilder(inputText.length()); + + for (int i = 0; i < inputText.length();) { + char currentChar = inputText.charAt(i++); + if (currentChar == ESCAPE_CHAR && i < inputText.length()) { + if (isEscapedKeywordSequence(inputText, i)) { + String sequence = handleKeywordSequence(inputText, i); + sb.append(sequence); + i += sequence.length(); + } else { + currentChar = inputText.charAt(i++); + switch (currentChar) { + case 't' -> sb.append('\t'); + case 'n' -> sb.append('\n'); + case 'r' -> sb.append('\r'); + case 'u' -> i = handleUnicodeSequemce(ctx, sb, inputText, i); + case QUOTE_CHAR -> sb.append('\"'); + case ESCAPE_CHAR -> sb.append(ESCAPE_CHAR); + case '(', ')', ':', '<', '>', '*', '{', '}' -> sb.append(currentChar); + default -> sb.append(ESCAPE_CHAR).append(currentChar); + } + } + } else { + sb.append(currentChar); + } + } + + return sb.toString(); + } + + private static boolean isEscapedKeywordSequence(String input, int startIndex) { + if (startIndex + 1 >= input.length()) { + return false; + } + String remaining = Strings.toRootLowerCase(input.substring(startIndex)); + return remaining.startsWith("and") || remaining.startsWith("or") || remaining.startsWith("not"); + } + + private static String handleKeywordSequence(String input, int startIndex) { + String remaining = input.substring(startIndex); + if (Strings.toRootLowerCase(remaining).startsWith("and")) return remaining.substring(0, 3); + if (Strings.toRootLowerCase(remaining).startsWith("or")) return remaining.substring(0, 2); + if (Strings.toRootLowerCase(remaining).startsWith("not")) return remaining.substring(0, 3); + return ""; + } + + private static int handleUnicodeSequemce(TerminalNode ctx, StringBuilder sb, String text, int startIdx) { + int endIdx = startIdx + 4; + String hex = text.substring(startIdx, endIdx); + + try { + int code = Integer.parseInt(hex, 16); + + if (code >= 0xD800 && code <= 0xDFFF) { + // U+D800—U+DFFF can only be used as surrogate pairs and are not valid character codes. + throw new KqlParsingException( + "Invalid unicode character code, [{}] is a surrogate code", + ctx.getSymbol().getLine(), + ctx.getSymbol().getCharPositionInLine() + startIdx, + hex + ); + } + sb.append(String.valueOf(Character.toChars(code))); + } catch (IllegalArgumentException e) { + throw new KqlParsingException( + "Invalid unicode character code [{}]", + ctx.getSymbol().getLine(), + ctx.getSymbol().getCharPositionInLine() + startIdx, + hex + ); + } + + return endIdx; + } +} diff --git a/x-pack/plugin/kql/src/test/java/org/elasticsearch/xpack/kql/parser/ParserUtilsTests.java b/x-pack/plugin/kql/src/test/java/org/elasticsearch/xpack/kql/parser/ParserUtilsTests.java new file mode 100644 index 0000000000000..05474bcedd4c8 --- /dev/null +++ b/x-pack/plugin/kql/src/test/java/org/elasticsearch/xpack/kql/parser/ParserUtilsTests.java @@ -0,0 +1,280 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.kql.parser; + +import org.antlr.v4.runtime.ParserRuleContext; +import org.antlr.v4.runtime.Token; +import org.antlr.v4.runtime.tree.ParseTree; +import org.antlr.v4.runtime.tree.TerminalNode; +import org.antlr.v4.runtime.tree.TerminalNodeImpl; +import org.elasticsearch.test.ESTestCase; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.stream.Stream; + +import static org.elasticsearch.core.Strings.format; +import static org.elasticsearch.xpack.kql.parser.KqlBaseParser.QUOTED_STRING; +import static org.elasticsearch.xpack.kql.parser.KqlBaseParser.UNQUOTED_LITERAL; +import static org.elasticsearch.xpack.kql.parser.KqlBaseParser.WILDCARD; +import static org.elasticsearch.xpack.kql.parser.ParserUtils.escapeLuceneQueryString; +import static org.elasticsearch.xpack.kql.parser.ParserUtils.extractText; +import static org.elasticsearch.xpack.kql.parser.ParserUtils.hasWildcard; +import static org.hamcrest.Matchers.equalTo; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class ParserUtilsTests extends ESTestCase { + + public void testExtractTestWithQuotedString() { + // General case + assertThat(extractText(parserRuleContext(quotedStringNode("foo"))), equalTo("foo")); + + // Empty string + assertThat(extractText(parserRuleContext(quotedStringNode(""))), equalTo("")); + + // Whitespaces are preserved + assertThat(extractText(parserRuleContext(quotedStringNode(" foo bar "))), equalTo(" foo bar ")); + + // Quoted string does not need escaping for KQL keywords (and, or, ...) + assertThat(extractText(parserRuleContext(quotedStringNode("not foo and bar or baz"))), equalTo("not foo and bar or baz")); + + // Quoted string does not need escaping for KQL special chars (e.g: '{', ':', ...) + assertThat(extractText(parserRuleContext(quotedStringNode("foo*:'\u3000{(})"))), equalTo("foo*:'\u3000{(})")); + + // Escaped characters handling + assertThat(extractText(parserRuleContext(quotedStringNode("\\\\"))), equalTo("\\")); + assertThat(extractText(parserRuleContext(quotedStringNode("foo\\\\bar"))), equalTo("foo\\bar")); + assertThat(extractText(parserRuleContext(quotedStringNode("foo\\\\"))), equalTo("foo\\")); + assertThat(extractText(parserRuleContext(quotedStringNode("\\\\foo"))), equalTo("\\foo")); + + assertThat(extractText(parserRuleContext(quotedStringNode("\\\""))), equalTo("\"")); + assertThat(extractText(parserRuleContext(quotedStringNode("foo\\\"bar"))), equalTo("foo\"bar")); + assertThat(extractText(parserRuleContext(quotedStringNode("foo\\\""))), equalTo("foo\"")); + assertThat(extractText(parserRuleContext(quotedStringNode("\\\"foo"))), equalTo("\"foo")); + + assertThat(extractText(parserRuleContext(quotedStringNode("\\t"))), equalTo("\t")); + assertThat(extractText(parserRuleContext(quotedStringNode("foo\\tbar"))), equalTo("foo\tbar")); + assertThat(extractText(parserRuleContext(quotedStringNode("foo\\t"))), equalTo("foo\t")); + assertThat(extractText(parserRuleContext(quotedStringNode("\\tfoo"))), equalTo("\tfoo")); + + assertThat(extractText(parserRuleContext(quotedStringNode("\\n"))), equalTo("\n")); + assertThat(extractText(parserRuleContext(quotedStringNode("foo\\nbar"))), equalTo("foo\nbar")); + assertThat(extractText(parserRuleContext(quotedStringNode("foo\\n"))), equalTo("foo\n")); + assertThat(extractText(parserRuleContext(quotedStringNode("\\nfoo"))), equalTo("\nfoo")); + + assertThat(extractText(parserRuleContext(quotedStringNode("\\r"))), equalTo("\r")); + assertThat(extractText(parserRuleContext(quotedStringNode("foo\\rbar"))), equalTo("foo\rbar")); + assertThat(extractText(parserRuleContext(quotedStringNode("foo\\r"))), equalTo("foo\r")); + assertThat(extractText(parserRuleContext(quotedStringNode("\\rfoo"))), equalTo("\rfoo")); + + // Unicode characters handling (\u0041 is 'A') + assertThat(extractText(parserRuleContext(quotedStringNode(format("\\u0041")))), equalTo("A")); + assertThat(extractText(parserRuleContext(quotedStringNode(format("foo\\u0041bar")))), equalTo("fooAbar")); + assertThat(extractText(parserRuleContext(quotedStringNode(format("foo\\u0041")))), equalTo("fooA")); + assertThat(extractText(parserRuleContext(quotedStringNode(format("\\u0041foo")))), equalTo("Afoo")); + } + + public void testExtractTestWithUnquotedLiteral() { + // General case + assertThat(extractText(parserRuleContext(literalNode("foo"))), equalTo("foo")); + + // KQL keywords unescaping + assertThat(extractText(parserRuleContext(literalNode("\\not foo \\and bar \\or baz"))), equalTo("not foo and bar or baz")); + assertThat( + extractText(parserRuleContext(literalNode("\\\\not foo \\\\and bar \\\\or baz"))), + equalTo("\\not foo \\and bar \\or baz") + ); + + // Escaped characters handling + assertThat(extractText(parserRuleContext(literalNode("\\\\"))), equalTo("\\")); + assertThat(extractText(parserRuleContext(literalNode("foo\\\\bar"))), equalTo("foo\\bar")); + assertThat(extractText(parserRuleContext(literalNode("foo\\\\"))), equalTo("foo\\")); + assertThat(extractText(parserRuleContext(literalNode("\\\\foo"))), equalTo("\\foo")); + + assertThat(extractText(parserRuleContext(literalNode("\\\""))), equalTo("\"")); + assertThat(extractText(parserRuleContext(literalNode("foo\\\"bar"))), equalTo("foo\"bar")); + assertThat(extractText(parserRuleContext(literalNode("foo\\\""))), equalTo("foo\"")); + assertThat(extractText(parserRuleContext(literalNode("\\\"foo"))), equalTo("\"foo")); + + assertThat(extractText(parserRuleContext(literalNode("\\t"))), equalTo("\t")); + assertThat(extractText(parserRuleContext(literalNode("foo\\tbar"))), equalTo("foo\tbar")); + assertThat(extractText(parserRuleContext(literalNode("foo\\t"))), equalTo("foo\t")); + assertThat(extractText(parserRuleContext(literalNode("\\tfoo"))), equalTo("\tfoo")); + + assertThat(extractText(parserRuleContext(literalNode("\\n"))), equalTo("\n")); + assertThat(extractText(parserRuleContext(literalNode("foo\\nbar"))), equalTo("foo\nbar")); + assertThat(extractText(parserRuleContext(literalNode("foo\\n"))), equalTo("foo\n")); + assertThat(extractText(parserRuleContext(literalNode("\\nfoo"))), equalTo("\nfoo")); + + assertThat(extractText(parserRuleContext(literalNode("\\r"))), equalTo("\r")); + assertThat(extractText(parserRuleContext(literalNode("foo\\rbar"))), equalTo("foo\rbar")); + assertThat(extractText(parserRuleContext(literalNode("foo\\r"))), equalTo("foo\r")); + assertThat(extractText(parserRuleContext(literalNode("\\rfoo"))), equalTo("\rfoo")); + + for (String escapedChar : List.of("(", ")", ":", "<", ">", "*", "{", "}")) { + assertThat(extractText(parserRuleContext(literalNode(format("\\%s", escapedChar)))), equalTo(escapedChar)); + assertThat( + extractText(parserRuleContext(literalNode(format("foo\\%sbar", escapedChar)))), + equalTo(format("foo%sbar", escapedChar)) + ); + assertThat(extractText(parserRuleContext(literalNode(format("foo\\%s", escapedChar)))), equalTo(format("foo%s", escapedChar))); + assertThat(extractText(parserRuleContext(literalNode(format("\\%sfoo", escapedChar)))), equalTo(format("%sfoo", escapedChar))); + } + + // Unicode characters handling (\u0041 is 'A') + assertThat(extractText(parserRuleContext(literalNode(format("\\u0041")))), equalTo("A")); + assertThat(extractText(parserRuleContext(literalNode(format("foo\\u0041bar")))), equalTo("fooAbar")); + assertThat(extractText(parserRuleContext(literalNode(format("foo\\u0041")))), equalTo("fooA")); + assertThat(extractText(parserRuleContext(literalNode(format("\\u0041foo")))), equalTo("Afoo")); + } + + public void testHasWildcard() { + // No children + assertFalse(hasWildcard(parserRuleContext(List.of()))); + + // Lone wildcard + assertTrue(hasWildcard(parserRuleContext(wildcardNode()))); + assertTrue(hasWildcard(parserRuleContext(randomTextNodeListWithNode(wildcardNode())))); + + // All children are literals + assertFalse(hasWildcard(parserRuleContext(randomList(1, randomIntBetween(1, 100), ParserUtilsTests::randomLiteralNode)))); + + // Quoted string + assertFalse(hasWildcard(parserRuleContext(randomQuotedStringNode()))); + + // Literal node containing the wildcard character + assertTrue(hasWildcard(parserRuleContext(literalNode("f*oo")))); + assertTrue(hasWildcard(parserRuleContext(literalNode("*foo")))); + assertTrue(hasWildcard(parserRuleContext(literalNode("foo*")))); + + // Literal node containing the wildcard characters (escaped) + assertFalse(hasWildcard(parserRuleContext(literalNode("f\\*oo")))); + assertFalse(hasWildcard(parserRuleContext(literalNode("\\*foo")))); + assertFalse(hasWildcard(parserRuleContext(literalNode("foo\\*")))); + } + + public void testUnquotedLiteralInvalidUnicodeCodeParsing() { + { + // Invalid unicode digit (G) + ParserRuleContext ctx = parserRuleContext(literalNode("\\u0G41")); + KqlParsingException e = assertThrows(KqlParsingException.class, () -> extractText(ctx)); + assertThat(e.getMessage(), equalTo("line 0:3: Invalid unicode character code [0G41]")); + } + + { + // U+D800—U+DFFF can only be used as surrogate pairs and are not valid character codes. + ParserRuleContext ctx = parserRuleContext(literalNode("\\uD900")); + KqlParsingException e = assertThrows(KqlParsingException.class, () -> extractText(ctx)); + assertThat(e.getMessage(), equalTo("line 0:3: Invalid unicode character code, [D900] is a surrogate code")); + } + } + + public void testQuotedStringInvalidUnicodeCodeParsing() { + { + // Invalid unicode digit (G) + ParserRuleContext ctx = parserRuleContext(quotedStringNode("\\u0G41")); + KqlParsingException e = assertThrows(KqlParsingException.class, () -> extractText(ctx)); + assertThat(e.getMessage(), equalTo("line 0:4: Invalid unicode character code [0G41]")); + } + + { + // U+D800—U+DFFF can only be used as surrogate pairs and are not valid character codes. + ParserRuleContext ctx = parserRuleContext(quotedStringNode("\\uD900")); + KqlParsingException e = assertThrows(KqlParsingException.class, () -> extractText(ctx)); + assertThat(e.getMessage(), equalTo("line 0:4: Invalid unicode character code, [D900] is a surrogate code")); + } + } + + public void testEscapeLuceneQueryString() { + // Quotes + assertThat(escapeLuceneQueryString("\"The Pink Panther\"", randomBoolean()), equalTo("\\\"The Pink Panther\\\"")); + + // Escape chars + assertThat(escapeLuceneQueryString("The Pink \\ Panther", randomBoolean()), equalTo("The Pink \\\\ Panther")); + + // Field operations + assertThat(escapeLuceneQueryString("title:Do it right", randomBoolean()), equalTo("title\\:Do it right")); + assertThat(escapeLuceneQueryString("title:(pink panther)", randomBoolean()), equalTo("title\\:\\(pink panther\\)")); + assertThat(escapeLuceneQueryString("title:-pink", randomBoolean()), equalTo("title\\:\\-pink")); + assertThat(escapeLuceneQueryString("title:+pink", randomBoolean()), equalTo("title\\:\\+pink")); + assertThat(escapeLuceneQueryString("title:pink~", randomBoolean()), equalTo("title\\:pink\\~")); + assertThat(escapeLuceneQueryString("title:pink~3.5", randomBoolean()), equalTo("title\\:pink\\~3.5")); + assertThat(escapeLuceneQueryString("title:pink panther^4", randomBoolean()), equalTo("title\\:pink panther\\^4")); + assertThat(escapeLuceneQueryString("rating:[0 TO 5]", randomBoolean()), equalTo("rating\\:\\[0 TO 5\\]")); + assertThat(escapeLuceneQueryString("rating:{0 TO 5}", randomBoolean()), equalTo("rating\\:\\{0 TO 5\\}")); + + // Boolean operators + assertThat(escapeLuceneQueryString("foo || bar", randomBoolean()), equalTo("foo \\|\\| bar")); + assertThat(escapeLuceneQueryString("foo && bar", randomBoolean()), equalTo("foo \\&\\& bar")); + assertThat(escapeLuceneQueryString("!foo", randomBoolean()), equalTo("\\!foo")); + + // Wildcards: + assertThat(escapeLuceneQueryString("te?t", randomBoolean()), equalTo("te\\?t")); + assertThat(escapeLuceneQueryString("foo*", true), equalTo("foo*")); + assertThat(escapeLuceneQueryString("*foo", true), equalTo("*foo")); + assertThat(escapeLuceneQueryString("foo * bar", true), equalTo("foo * bar")); + assertThat(escapeLuceneQueryString("foo*", false), equalTo("foo\\*")); + } + + private static ParserRuleContext parserRuleContext(ParseTree child) { + return parserRuleContext(List.of(child)); + } + + private static ParserRuleContext parserRuleContext(List children) { + ParserRuleContext ctx = new ParserRuleContext(null, randomInt()); + ctx.children = children; + return ctx; + } + + private static TerminalNode terminalNode(int type, String text) { + Token symbol = mock(Token.class); + when(symbol.getType()).thenReturn(type); + when(symbol.getText()).thenReturn(text); + when(symbol.getLine()).thenReturn(0); + when(symbol.getCharPositionInLine()).thenReturn(0); + return new TerminalNodeImpl(symbol); + } + + private static List randomTextNodeListWithNode(TerminalNode node) { + List nodes = new ArrayList<>( + Stream.concat(Stream.generate(ParserUtilsTests::randomTextNode).limit(100), Stream.of(node)).toList() + ); + Collections.shuffle(nodes, random()); + return nodes; + } + + private static TerminalNode randomTextNode() { + return switch (randomInt() % 3) { + case 0 -> wildcardNode(); + case 1 -> randomQuotedStringNode(); + default -> randomLiteralNode(); + }; + } + + private static TerminalNode quotedStringNode(String quotedStringText) { + return terminalNode(QUOTED_STRING, "\"" + quotedStringText + "\""); + } + + private static TerminalNode randomQuotedStringNode() { + return quotedStringNode(randomIdentifier()); + } + + private static TerminalNode literalNode(String literalText) { + return terminalNode(UNQUOTED_LITERAL, literalText); + } + + private static TerminalNode randomLiteralNode() { + return terminalNode(UNQUOTED_LITERAL, randomIdentifier()); + } + + private static TerminalNode wildcardNode() { + return terminalNode(WILDCARD, "*"); + } +} From 3294c679eb9b2fee0d6aca529da3fe2e2745db18 Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Fri, 25 Oct 2024 13:57:43 -0400 Subject: [PATCH 427/449] Fix test mute 115605 (#115659) * Unmuting test issue #115605 * fixing --------- Co-authored-by: Elastic Machine --- muted-tests.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 70f29016d8475..abd483c1bc67e 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -270,9 +270,6 @@ tests: - class: org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT method: test {yaml=cluster.stats/30_ccs_stats/cross-cluster search stats search} issue: https://github.com/elastic/elasticsearch/issues/115600 -- class: org.elasticsearch.test.rest.ClientYamlTestSuiteIT - method: test {yaml=indices.create/10_basic/Create lookup index} - issue: https://github.com/elastic/elasticsearch/issues/115605 - class: org.elasticsearch.oldrepos.OldRepositoryAccessIT method: testOldRepoAccess issue: https://github.com/elastic/elasticsearch/issues/115631 From 679ef122091e825401993530e38dcabe26f375bf Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sat, 26 Oct 2024 05:31:10 +1100 Subject: [PATCH 428/449] Mute org.elasticsearch.index.get.GetResultTests testToAndFromXContent #115688 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index abd483c1bc67e..f92795c18e2d2 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -276,6 +276,9 @@ tests: - class: org.elasticsearch.xpack.search.CrossClusterAsyncSearchIT method: testCCSClusterDetailsWhereAllShardsSkippedInCanMatch issue: https://github.com/elastic/elasticsearch/issues/115652 +- class: org.elasticsearch.index.get.GetResultTests + method: testToAndFromXContent + issue: https://github.com/elastic/elasticsearch/issues/115688 # Examples: # From 8240f409f943b1f2f865a4704213cdb79b047881 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sat, 26 Oct 2024 05:31:46 +1100 Subject: [PATCH 429/449] Mute org.elasticsearch.action.update.UpdateResponseTests testToAndFromXContent #115689 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index f92795c18e2d2..91644c9af70ca 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -279,6 +279,9 @@ tests: - class: org.elasticsearch.index.get.GetResultTests method: testToAndFromXContent issue: https://github.com/elastic/elasticsearch/issues/115688 +- class: org.elasticsearch.action.update.UpdateResponseTests + method: testToAndFromXContent + issue: https://github.com/elastic/elasticsearch/issues/115689 # Examples: # From 68316f7d17da89a11695e62561b4b0e099c0c3ff Mon Sep 17 00:00:00 2001 From: Moritz Mack Date: Fri, 25 Oct 2024 20:52:06 +0200 Subject: [PATCH 430/449] Remove metering from ingest service to occur afterwards when parsing the final document (#114895) --- ...eteringParserDecoratorWithPipelinesIT.java | 137 ------------------ .../XContentMeteringParserDecoratorIT.java | 11 +- .../org/elasticsearch/TransportVersions.java | 1 + .../action/index/IndexRequest.java | 81 +++-------- .../action/update/UpdateHelper.java | 17 +-- .../index/mapper/DocumentParser.java | 2 +- .../index/mapper/ParsedDocument.java | 21 +-- .../index/mapper/SourceToParse.java | 2 +- .../elasticsearch/ingest/IngestService.java | 16 +- .../elasticsearch/node/NodeConstruction.java | 9 +- .../internal/DocumentParsingProvider.java | 4 +- .../XContentMeteringParserDecorator.java | 8 +- .../bulk/TransportShardBulkActionTests.java | 32 ++-- .../ingest/ReservedPipelineActionTests.java | 2 - .../action/update/UpdateRequestTests.java | 5 +- .../index/IndexingSlowLogTests.java | 12 +- .../index/engine/InternalEngineTests.java | 4 +- .../index/shard/RefreshListenersTests.java | 4 +- .../index/translog/TranslogTests.java | 4 +- .../ingest/IngestServiceTests.java | 77 ---------- .../ingest/SimulateIngestServiceTests.java | 2 - .../snapshots/SnapshotResiliencyTests.java | 3 +- .../index/engine/EngineTestCase.java | 3 +- ...sportGetTrainedModelsStatsActionTests.java | 2 - .../authz/AuthorizationServiceTests.java | 3 +- 25 files changed, 87 insertions(+), 375 deletions(-) delete mode 100644 modules/ingest-common/src/internalClusterTest/java/org/elasticsearch/plugins/internal/XContentMeteringParserDecoratorWithPipelinesIT.java diff --git a/modules/ingest-common/src/internalClusterTest/java/org/elasticsearch/plugins/internal/XContentMeteringParserDecoratorWithPipelinesIT.java b/modules/ingest-common/src/internalClusterTest/java/org/elasticsearch/plugins/internal/XContentMeteringParserDecoratorWithPipelinesIT.java deleted file mode 100644 index 3547b3f9910ad..0000000000000 --- a/modules/ingest-common/src/internalClusterTest/java/org/elasticsearch/plugins/internal/XContentMeteringParserDecoratorWithPipelinesIT.java +++ /dev/null @@ -1,137 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.plugins.internal; - -import org.elasticsearch.action.DocWriteRequest; -import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.ParsedDocument; -import org.elasticsearch.ingest.common.IngestCommonPlugin; -import org.elasticsearch.plugins.IngestPlugin; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.xcontent.FilterXContentParserWrapper; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Collection; -import java.util.List; -import java.util.Map; -import java.util.concurrent.atomic.AtomicLong; - -import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; -import static org.hamcrest.Matchers.equalTo; - -@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST) -public class XContentMeteringParserDecoratorWithPipelinesIT extends ESIntegTestCase { - - private static String TEST_INDEX_NAME = "test-index-name"; - // the assertions are done in plugin which is static and will be created by ES server. - // hence a static flag to make sure it is indeed used - public static volatile boolean hasWrappedParser; - public static AtomicLong providedFixedSize = new AtomicLong(); - - public void testDocumentIsReportedWithPipelines() throws Exception { - hasWrappedParser = false; - // pipeline adding fields, changing destination is not affecting reporting - putJsonPipeline("pipeline", """ - { - "processors": [ - { - "set": { - "field": "my-text-field", - "value": "xxxx" - } - }, - { - "set": { - "field": "my-boolean-field", - "value": true - } - } - ] - } - """); - - client().index( - new IndexRequest(TEST_INDEX_NAME).setPipeline("pipeline") - .id("1") - .source(jsonBuilder().startObject().field("test", "I am sam i am").endObject()) - ).actionGet(); - assertBusy(() -> { - // ingest node has used an observer that was counting #map operations - // and passed that info to newFixedSize observer in TransportShardBulkAction - assertTrue(hasWrappedParser); - assertThat(providedFixedSize.get(), equalTo(1L)); - }); - } - - @Override - protected Collection> nodePlugins() { - return List.of(TestDocumentParsingProviderPlugin.class, IngestCommonPlugin.class); - } - - public static class TestDocumentParsingProviderPlugin extends Plugin implements DocumentParsingProviderPlugin, IngestPlugin { - - public TestDocumentParsingProviderPlugin() {} - - @Override - public DocumentParsingProvider getDocumentParsingProvider() { - // returns a static instance, because we want to assert that the wrapping is called only once - return new DocumentParsingProvider() { - @Override - public XContentMeteringParserDecorator newMeteringParserDecorator(DocWriteRequest request) { - if (request instanceof IndexRequest indexRequest && indexRequest.getNormalisedBytesParsed() > 0) { - long normalisedBytesParsed = indexRequest.getNormalisedBytesParsed(); - providedFixedSize.set(normalisedBytesParsed); - return new TestXContentMeteringParserDecorator(normalisedBytesParsed); - } - return new TestXContentMeteringParserDecorator(0L); - } - - @Override - public DocumentSizeReporter newDocumentSizeReporter( - String indexName, - MapperService mapperService, - DocumentSizeAccumulator documentSizeAccumulator - ) { - return DocumentSizeReporter.EMPTY_INSTANCE; - } - }; - } - } - - public static class TestXContentMeteringParserDecorator implements XContentMeteringParserDecorator { - long mapCounter = 0; - - public TestXContentMeteringParserDecorator(long mapCounter) { - this.mapCounter = mapCounter; - } - - @Override - public XContentParser decorate(XContentParser xContentParser) { - hasWrappedParser = true; - return new FilterXContentParserWrapper(xContentParser) { - - @Override - public Map map() throws IOException { - mapCounter++; - return super.map(); - } - }; - } - - @Override - public ParsedDocument.DocumentSize meteredDocumentSize() { - return new ParsedDocument.DocumentSize(mapCounter, 0); - } - } - -} diff --git a/server/src/internalClusterTest/java/org/elasticsearch/plugins/internal/XContentMeteringParserDecoratorIT.java b/server/src/internalClusterTest/java/org/elasticsearch/plugins/internal/XContentMeteringParserDecoratorIT.java index f11c145f71f23..f70667b91aec8 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/plugins/internal/XContentMeteringParserDecoratorIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/plugins/internal/XContentMeteringParserDecoratorIT.java @@ -9,7 +9,6 @@ package org.elasticsearch.plugins.internal; -import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.engine.EngineFactory; @@ -126,7 +125,7 @@ public TestDocumentParsingProviderPlugin() {} public DocumentParsingProvider getDocumentParsingProvider() { return new DocumentParsingProvider() { @Override - public XContentMeteringParserDecorator newMeteringParserDecorator(DocWriteRequest request) { + public XContentMeteringParserDecorator newMeteringParserDecorator(IndexRequest request) { return new TestXContentMeteringParserDecorator(0L); } @@ -152,8 +151,8 @@ public TestDocumentSizeReporter(String indexName) { @Override public void onIndexingCompleted(ParsedDocument parsedDocument) { - long delta = parsedDocument.getNormalizedSize().ingestedBytes(); - if (delta > 0) { + long delta = parsedDocument.getNormalizedSize(); + if (delta > XContentMeteringParserDecorator.UNKNOWN_SIZE) { COUNTER.addAndGet(delta); } assertThat(indexName, equalTo(TEST_INDEX_NAME)); @@ -181,8 +180,8 @@ public Token nextToken() throws IOException { } @Override - public ParsedDocument.DocumentSize meteredDocumentSize() { - return new ParsedDocument.DocumentSize(counter, counter); + public long meteredDocumentSize() { + return counter; } } } diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 25bb792d827a9..3986ea4b97254 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -182,6 +182,7 @@ static TransportVersion def(int id) { public static final TransportVersion SIMULATE_MAPPING_ADDITION = def(8_777_00_0); public static final TransportVersion INTRODUCE_ALL_APPLICABLE_SELECTOR = def(8_778_00_0); public static final TransportVersion INDEX_MODE_LOOKUP = def(8_779_00_0); + public static final TransportVersion INDEX_REQUEST_REMOVE_METERING = def(8_780_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java b/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java index b98f5d87ee232..d0785a60dd0f5 100644 --- a/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java +++ b/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java @@ -146,9 +146,6 @@ public class IndexRequest extends ReplicatedWriteRequest implement * rawTimestamp field is used on the coordinate node, it doesn't need to be serialised. */ private Object rawTimestamp; - private long normalisedBytesParsed = -1; - private boolean originatesFromUpdateByScript; - private boolean originatesFromUpdateByDoc; public IndexRequest(StreamInput in) throws IOException { this(null, in); @@ -183,7 +180,7 @@ public IndexRequest(@Nullable ShardId shardId, StreamInput in) throws IOExceptio dynamicTemplates = in.readMap(StreamInput::readString); if (in.getTransportVersion().onOrAfter(PIPELINES_HAVE_RUN_FIELD_ADDED) && in.getTransportVersion().before(TransportVersions.V_8_13_0)) { - in.readBoolean(); + in.readBoolean(); // obsolete, prior to tracking normalisedBytesParsed } if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_12_0)) { this.listExecutedPipelines = in.readBoolean(); @@ -196,21 +193,20 @@ public IndexRequest(@Nullable ShardId shardId, StreamInput in) throws IOExceptio } if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { requireDataStream = in.readBoolean(); - normalisedBytesParsed = in.readZLong(); } else { requireDataStream = false; } - if (in.getTransportVersion().onOrAfter(TransportVersions.INDEX_REQUEST_UPDATE_BY_SCRIPT_ORIGIN)) { - originatesFromUpdateByScript = in.readBoolean(); - } else { - originatesFromUpdateByScript = false; - } - - if (in.getTransportVersion().onOrAfter(TransportVersions.INDEX_REQUEST_UPDATE_BY_DOC_ORIGIN)) { - originatesFromUpdateByDoc = in.readBoolean(); - } else { - originatesFromUpdateByDoc = false; + if (in.getTransportVersion().before(TransportVersions.INDEX_REQUEST_REMOVE_METERING)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { + in.readZLong(); // obsolete normalisedBytesParsed + } + if (in.getTransportVersion().onOrAfter(TransportVersions.INDEX_REQUEST_UPDATE_BY_SCRIPT_ORIGIN)) { + in.readBoolean(); // obsolete originatesFromUpdateByScript + } + if (in.getTransportVersion().onOrAfter(TransportVersions.INDEX_REQUEST_UPDATE_BY_DOC_ORIGIN)) { + in.readBoolean(); // obsolete originatesFromUpdateByDoc + } } } @@ -759,7 +755,7 @@ private void writeBody(StreamOutput out) throws IOException { out.writeMap(dynamicTemplates, StreamOutput::writeString); if (out.getTransportVersion().onOrAfter(PIPELINES_HAVE_RUN_FIELD_ADDED) && out.getTransportVersion().before(TransportVersions.V_8_13_0)) { - out.writeBoolean(normalisedBytesParsed != -1L); + out.writeBoolean(false); // obsolete, prior to tracking normalisedBytesParsed } if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_12_0)) { out.writeBoolean(listExecutedPipelines); @@ -770,15 +766,18 @@ private void writeBody(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeBoolean(requireDataStream); - out.writeZLong(normalisedBytesParsed); - } - - if (out.getTransportVersion().onOrAfter(TransportVersions.INDEX_REQUEST_UPDATE_BY_SCRIPT_ORIGIN)) { - out.writeBoolean(originatesFromUpdateByScript); } - if (out.getTransportVersion().onOrAfter(TransportVersions.INDEX_REQUEST_UPDATE_BY_DOC_ORIGIN)) { - out.writeBoolean(originatesFromUpdateByDoc); + if (out.getTransportVersion().before(TransportVersions.INDEX_REQUEST_REMOVE_METERING)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { + out.writeZLong(-1); // obsolete normalisedBytesParsed + } + if (out.getTransportVersion().onOrAfter(TransportVersions.INDEX_REQUEST_UPDATE_BY_SCRIPT_ORIGIN)) { + out.writeBoolean(false); // obsolete originatesFromUpdateByScript + } + if (out.getTransportVersion().onOrAfter(TransportVersions.INDEX_REQUEST_UPDATE_BY_DOC_ORIGIN)) { + out.writeBoolean(false); // obsolete originatesFromUpdateByDoc + } } } @@ -928,24 +927,6 @@ public void setRawTimestamp(Object rawTimestamp) { this.rawTimestamp = rawTimestamp; } - /** - * Returns a number of bytes observed when parsing a document in earlier stages of ingestion (like update/ingest service) - * Defaults to -1 when a document size was not observed in earlier stages. - * @return a number of bytes observed - */ - public long getNormalisedBytesParsed() { - return normalisedBytesParsed; - } - - /** - * Sets number of bytes observed by a DocumentSizeObserver - * @return an index request - */ - public IndexRequest setNormalisedBytesParsed(long normalisedBytesParsed) { - this.normalisedBytesParsed = normalisedBytesParsed; - return this; - } - /** * Adds the pipeline to the list of executed pipelines, if listExecutedPipelines is true * @@ -976,22 +957,4 @@ public List getExecutedPipelines() { return Collections.unmodifiableList(executedPipelines); } } - - public IndexRequest setOriginatesFromUpdateByScript(boolean originatesFromUpdateByScript) { - this.originatesFromUpdateByScript = originatesFromUpdateByScript; - return this; - } - - public boolean originatesFromUpdateByScript() { - return originatesFromUpdateByScript; - } - - public boolean originatesFromUpdateByDoc() { - return originatesFromUpdateByDoc; - } - - public IndexRequest setOriginatesFromUpdateByDoc(boolean originatesFromUpdateByDoc) { - this.originatesFromUpdateByDoc = originatesFromUpdateByDoc; - return this; - } } diff --git a/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java b/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java index 212b99ca140d3..d32e102b2e18b 100644 --- a/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java +++ b/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java @@ -28,8 +28,7 @@ import org.elasticsearch.index.mapper.RoutingFieldMapper; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.plugins.internal.DocumentParsingProvider; -import org.elasticsearch.plugins.internal.XContentMeteringParserDecorator; +import org.elasticsearch.plugins.internal.XContentParserDecorator; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.UpdateCtxMap; @@ -51,11 +50,9 @@ public class UpdateHelper { private static final Logger logger = LogManager.getLogger(UpdateHelper.class); private final ScriptService scriptService; - private final DocumentParsingProvider documentParsingProvider; - public UpdateHelper(ScriptService scriptService, DocumentParsingProvider documentParsingProvider) { + public UpdateHelper(ScriptService scriptService) { this.scriptService = scriptService; - this.documentParsingProvider = documentParsingProvider; } /** @@ -183,14 +180,13 @@ static String calculateRouting(GetResult getResult, @Nullable IndexRequest updat Result prepareUpdateIndexRequest(IndexShard indexShard, UpdateRequest request, GetResult getResult, boolean detectNoop) { final IndexRequest currentRequest = request.doc(); final String routing = calculateRouting(getResult, currentRequest); - final XContentMeteringParserDecorator meteringParserDecorator = documentParsingProvider.newMeteringParserDecorator(request); final Tuple> sourceAndContent = XContentHelper.convertToMap(getResult.internalSourceRef(), true); final XContentType updateSourceContentType = sourceAndContent.v1(); final Map updatedSourceAsMap = sourceAndContent.v2(); final boolean noop = XContentHelper.update( updatedSourceAsMap, - currentRequest.sourceAsMap(meteringParserDecorator), + currentRequest.sourceAsMap(XContentParserDecorator.NOOP), detectNoop ) == false; @@ -228,9 +224,7 @@ Result prepareUpdateIndexRequest(IndexShard indexShard, UpdateRequest request, G .setIfPrimaryTerm(getResult.getPrimaryTerm()) .waitForActiveShards(request.waitForActiveShards()) .timeout(request.timeout()) - .setRefreshPolicy(request.getRefreshPolicy()) - .setOriginatesFromUpdateByDoc(true); - finalIndexRequest.setNormalisedBytesParsed(meteringParserDecorator.meteredDocumentSize().ingestedBytes()); + .setRefreshPolicy(request.getRefreshPolicy()); return new Result(finalIndexRequest, DocWriteResponse.Result.UPDATED, updatedSourceAsMap, updateSourceContentType); } } @@ -272,8 +266,7 @@ Result prepareUpdateScriptRequest(IndexShard indexShard, UpdateRequest request, .setIfPrimaryTerm(getResult.getPrimaryTerm()) .waitForActiveShards(request.waitForActiveShards()) .timeout(request.timeout()) - .setRefreshPolicy(request.getRefreshPolicy()) - .setOriginatesFromUpdateByScript(true); + .setRefreshPolicy(request.getRefreshPolicy()); return new Result(indexRequest, DocWriteResponse.Result.UPDATED, updatedSourceAsMap, updateSourceContentType); } case DELETE -> { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java index 1ed0a117ddd89..bde9b0fb8a4ab 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java @@ -80,7 +80,7 @@ public ParsedDocument parseDocument(SourceToParse source, MappingLookup mappingL final RootDocumentParserContext context; final XContentType xContentType = source.getXContentType(); - XContentMeteringParserDecorator meteringParserDecorator = source.getDocumentSizeObserver(); + XContentMeteringParserDecorator meteringParserDecorator = source.getMeteringParserDecorator(); try ( XContentParser parser = meteringParserDecorator.decorate( XContentHelper.createParser(parserConfiguration, source.source(), xContentType) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/ParsedDocument.java b/server/src/main/java/org/elasticsearch/index/mapper/ParsedDocument.java index b1d882f04de54..f2ddf38fe4357 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/ParsedDocument.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/ParsedDocument.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.index.mapper.MapperService.MergeReason; +import org.elasticsearch.plugins.internal.XContentMeteringParserDecorator; import org.elasticsearch.xcontent.XContentType; import java.util.Collections; @@ -24,6 +25,7 @@ * The result of parsing a document. */ public class ParsedDocument { + private final Field version; private final String id; @@ -33,7 +35,7 @@ public class ParsedDocument { private final List documents; - private final DocumentSize normalizedSize; + private final long normalizedSize; private BytesReference source; private XContentType xContentType; @@ -61,7 +63,7 @@ public static ParsedDocument noopTombstone(String reason) { new BytesArray("{}"), XContentType.JSON, null, - DocumentSize.UNKNOWN + XContentMeteringParserDecorator.UNKNOWN_SIZE ); } @@ -86,7 +88,7 @@ public static ParsedDocument deleteTombstone(String id) { new BytesArray("{}"), XContentType.JSON, null, - DocumentSize.UNKNOWN + XContentMeteringParserDecorator.UNKNOWN_SIZE ); } @@ -99,7 +101,7 @@ public ParsedDocument( BytesReference source, XContentType xContentType, Mapping dynamicMappingsUpdate, - DocumentSize normalizedSize + long normalizedSize ) { this.version = version; this.seqID = seqID; @@ -178,16 +180,7 @@ public String documentDescription() { return "id"; } - public DocumentSize getNormalizedSize() { + public long getNormalizedSize() { return normalizedSize; } - - /** - * Normalized ingested and stored size of a document. - * @param ingestedBytes ingest size of the document - * @param storedBytes stored retained size of the document - */ - public record DocumentSize(long ingestedBytes, long storedBytes) { - public static final DocumentSize UNKNOWN = new DocumentSize(-1, -1); - } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SourceToParse.java b/server/src/main/java/org/elasticsearch/index/mapper/SourceToParse.java index a8cb03c223833..879e0fe785df2 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SourceToParse.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SourceToParse.java @@ -91,7 +91,7 @@ public XContentType getXContentType() { return this.xContentType; } - public XContentMeteringParserDecorator getDocumentSizeObserver() { + public XContentMeteringParserDecorator getMeteringParserDecorator() { return meteringParserDecorator; } } diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestService.java b/server/src/main/java/org/elasticsearch/ingest/IngestService.java index 99ff44a3cd135..b5ac54b018e46 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestService.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestService.java @@ -68,8 +68,6 @@ import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.node.ReportingService; import org.elasticsearch.plugins.IngestPlugin; -import org.elasticsearch.plugins.internal.DocumentParsingProvider; -import org.elasticsearch.plugins.internal.XContentMeteringParserDecorator; import org.elasticsearch.plugins.internal.XContentParserDecorator; import org.elasticsearch.script.ScriptService; import org.elasticsearch.threadpool.Scheduler; @@ -121,7 +119,6 @@ public class IngestService implements ClusterStateApplier, ReportingService taskQueue; private final ClusterService clusterService; private final ScriptService scriptService; - private final DocumentParsingProvider documentParsingProvider; private final Map processorFactories; // Ideally this should be in IngestMetadata class, but we don't have the processor factories around there. // We know of all the processor factories when a node with all its plugin have been initialized. Also some @@ -204,12 +201,10 @@ public IngestService( List ingestPlugins, Client client, MatcherWatchdog matcherWatchdog, - DocumentParsingProvider documentParsingProvider, FailureStoreMetrics failureStoreMetrics ) { this.clusterService = clusterService; this.scriptService = scriptService; - this.documentParsingProvider = documentParsingProvider; this.processorFactories = processorFactories( ingestPlugins, new Processor.Parameters( @@ -238,7 +233,6 @@ public IngestService( IngestService(IngestService ingestService) { this.clusterService = ingestService.clusterService; this.scriptService = ingestService.scriptService; - this.documentParsingProvider = ingestService.documentParsingProvider; this.processorFactories = ingestService.processorFactories; this.threadPool = ingestService.threadPool; this.taskQueue = ingestService.taskQueue; @@ -776,10 +770,7 @@ protected void doRun() { } final int slot = i; final Releasable ref = refs.acquire(); - final XContentMeteringParserDecorator meteringParserDecorator = documentParsingProvider.newMeteringParserDecorator( - indexRequest - ); - final IngestDocument ingestDocument = newIngestDocument(indexRequest, meteringParserDecorator); + final IngestDocument ingestDocument = newIngestDocument(indexRequest); final org.elasticsearch.script.Metadata originalDocumentMetadata = ingestDocument.getMetadata().clone(); // the document listener gives us three-way logic: a document can fail processing (1), or it can // be successfully processed. a successfully processed document can be kept (2) or dropped (3). @@ -820,7 +811,6 @@ public void onFailure(Exception e) { ); executePipelines(pipelines, indexRequest, ingestDocument, resolveFailureStore, documentListener); - indexRequest.setNormalisedBytesParsed(meteringParserDecorator.meteredDocumentSize().ingestedBytes()); assert actionRequest.index() != null; i++; @@ -1159,14 +1149,14 @@ static String getProcessorName(Processor processor) { /** * Builds a new ingest document from the passed-in index request. */ - private static IngestDocument newIngestDocument(final IndexRequest request, XContentParserDecorator parserDecorator) { + private static IngestDocument newIngestDocument(final IndexRequest request) { return new IngestDocument( request.index(), request.id(), request.version(), request.routing(), request.versionType(), - request.sourceAsMap(parserDecorator) + request.sourceAsMap(XContentParserDecorator.NOOP) ); } diff --git a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java index 784e02059823b..0a88a202ac8d3 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java +++ b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java @@ -285,7 +285,7 @@ static NodeConstruction prepareConstruction( ScriptService scriptService = constructor.createScriptService(settingsModule, threadPool, serviceProvider); - constructor.createUpdateHelper(documentParsingProvider, scriptService); + constructor.createUpdateHelper(scriptService); constructor.construct( threadPool, @@ -643,10 +643,10 @@ private DataStreamGlobalRetentionSettings createDataStreamServicesAndGlobalReten return dataStreamGlobalRetentionSettings; } - private UpdateHelper createUpdateHelper(DocumentParsingProvider documentParsingProvider, ScriptService scriptService) { - UpdateHelper updateHelper = new UpdateHelper(scriptService, documentParsingProvider); + private UpdateHelper createUpdateHelper(ScriptService scriptService) { + UpdateHelper updateHelper = new UpdateHelper(scriptService); - modules.add(b -> { b.bind(UpdateHelper.class).toInstance(new UpdateHelper(scriptService, documentParsingProvider)); }); + modules.add(b -> b.bind(UpdateHelper.class).toInstance(updateHelper)); return updateHelper; } @@ -701,7 +701,6 @@ private void construct( pluginsService.filterPlugins(IngestPlugin.class).toList(), client, IngestService.createGrokThreadWatchdog(environment, threadPool), - documentParsingProvider, failureStoreMetrics ); diff --git a/server/src/main/java/org/elasticsearch/plugins/internal/DocumentParsingProvider.java b/server/src/main/java/org/elasticsearch/plugins/internal/DocumentParsingProvider.java index e1613caf9deac..9df7fd4c3bd43 100644 --- a/server/src/main/java/org/elasticsearch/plugins/internal/DocumentParsingProvider.java +++ b/server/src/main/java/org/elasticsearch/plugins/internal/DocumentParsingProvider.java @@ -9,7 +9,7 @@ package org.elasticsearch.plugins.internal; -import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.index.mapper.MapperService; /** @@ -40,7 +40,7 @@ default DocumentSizeAccumulator createDocumentSizeAccumulator() { /** * @return an observer */ - default XContentMeteringParserDecorator newMeteringParserDecorator(DocWriteRequest request) { + default XContentMeteringParserDecorator newMeteringParserDecorator(IndexRequest request) { return XContentMeteringParserDecorator.NOOP; } } diff --git a/server/src/main/java/org/elasticsearch/plugins/internal/XContentMeteringParserDecorator.java b/server/src/main/java/org/elasticsearch/plugins/internal/XContentMeteringParserDecorator.java index e3b4415edcc01..6ccdac19acb91 100644 --- a/server/src/main/java/org/elasticsearch/plugins/internal/XContentMeteringParserDecorator.java +++ b/server/src/main/java/org/elasticsearch/plugins/internal/XContentMeteringParserDecorator.java @@ -9,17 +9,17 @@ package org.elasticsearch.plugins.internal; -import org.elasticsearch.index.mapper.ParsedDocument.DocumentSize; import org.elasticsearch.xcontent.XContentParser; public interface XContentMeteringParserDecorator extends XContentParserDecorator { + long UNKNOWN_SIZE = -1; /** * a default noop implementation */ XContentMeteringParserDecorator NOOP = new XContentMeteringParserDecorator() { @Override - public DocumentSize meteredDocumentSize() { - return DocumentSize.UNKNOWN; + public long meteredDocumentSize() { + return UNKNOWN_SIZE; } @Override @@ -28,5 +28,5 @@ public XContentParser decorate(XContentParser xContentParser) { } }; - DocumentSize meteredDocumentSize(); + long meteredDocumentSize(); } diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportShardBulkActionTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportShardBulkActionTests.java index 35ef892da59a2..b389e33993b9b 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportShardBulkActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportShardBulkActionTests.java @@ -49,11 +49,11 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.plugins.internal.DocumentParsingProvider; +import org.elasticsearch.plugins.internal.XContentMeteringParserDecorator; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool.Names; -import org.mockito.ArgumentCaptor; import org.mockito.MockingDetails; import org.mockito.Mockito; import org.mockito.stubbing.Stubbing; @@ -114,13 +114,18 @@ public void testExecuteBulkIndexRequest() throws Exception { BulkItemRequest[] items = new BulkItemRequest[1]; boolean create = randomBoolean(); - DocWriteRequest writeRequest = new IndexRequest("index").id("id").source(Requests.INDEX_CONTENT_TYPE).create(create); + IndexRequest writeRequest = new IndexRequest("index").id("id").source(Requests.INDEX_CONTENT_TYPE).create(create); BulkItemRequest primaryRequest = new BulkItemRequest(0, writeRequest); items[0] = primaryRequest; BulkShardRequest bulkShardRequest = new BulkShardRequest(shardId, RefreshPolicy.NONE, items); randomlySetIgnoredPrimaryResponse(primaryRequest); + DocumentParsingProvider documentParsingProvider = mock(); + XContentMeteringParserDecorator parserDecorator = mock(); + when(documentParsingProvider.newMeteringParserDecorator(any())).thenReturn(parserDecorator); + when(parserDecorator.decorate(any())).then(i -> i.getArgument(0)); + BulkPrimaryExecutionContext context = new BulkPrimaryExecutionContext(bulkShardRequest, shard); TransportShardBulkAction.executeBulkItemRequest( context, @@ -129,7 +134,7 @@ public void testExecuteBulkIndexRequest() throws Exception { new NoopMappingUpdatePerformer(), (listener, mappingVersion) -> {}, ASSERTING_DONE_LISTENER, - DocumentParsingProvider.EMPTY_INSTANCE + documentParsingProvider ); assertFalse(context.hasMoreOperationsToExecute()); @@ -185,6 +190,8 @@ public void testExecuteBulkIndexRequest() throws Exception { assertThat(failure.getStatus(), equalTo(RestStatus.CONFLICT)); assertThat(replicaRequest, equalTo(primaryRequest)); + verify(documentParsingProvider).newMeteringParserDecorator(any()); + verify(parserDecorator).decorate(any()); // Assert that the document count is still 1 assertDocCount(shard, 1); @@ -600,9 +607,7 @@ public void testUpdateRequestWithConflictFailure() throws Exception { .retryOnConflict(retries); BulkItemRequest primaryRequest = new BulkItemRequest(0, writeRequest); - IndexRequest updateResponse = new IndexRequest("index").id("id") - .source(Requests.INDEX_CONTENT_TYPE, "field", "value") - .setNormalisedBytesParsed(0);// let's pretend this was modified by a script + IndexRequest updateResponse = new IndexRequest("index").id("id").source(Requests.INDEX_CONTENT_TYPE, "field", "value"); DocumentParsingProvider documentParsingProvider = mock(DocumentParsingProvider.class); Exception err = new VersionConflictEngineException(shardId, "id", "I'm conflicted <(;_;)>"); @@ -655,11 +660,7 @@ public void testUpdateRequestWithConflictFailure() throws Exception { assertThat(failure.getCause(), equalTo(err)); assertThat(failure.getStatus(), equalTo(RestStatus.CONFLICT)); - // we have set 0 value on normalisedBytesParsed on the IndexRequest, like it happens with updates by script. - ArgumentCaptor argument = ArgumentCaptor.forClass(IndexRequest.class); - verify(documentParsingProvider, times(retries + 1)).newMeteringParserDecorator(argument.capture()); - IndexRequest value = argument.getValue(); - assertThat(value.getNormalisedBytesParsed(), equalTo(0L)); + verify(documentParsingProvider, times(retries + 1)).newMeteringParserDecorator(any()); } @SuppressWarnings("unchecked") @@ -668,9 +669,7 @@ public void testUpdateRequestWithSuccess() throws Exception { DocWriteRequest writeRequest = new UpdateRequest("index", "id").doc(Requests.INDEX_CONTENT_TYPE, "field", "value"); BulkItemRequest primaryRequest = new BulkItemRequest(0, writeRequest); - IndexRequest updateResponse = new IndexRequest("index").id("id") - .source(Requests.INDEX_CONTENT_TYPE, "field", "value") - .setNormalisedBytesParsed(100L); + IndexRequest updateResponse = new IndexRequest("index").id("id").source(Requests.INDEX_CONTENT_TYPE, "field", "value"); DocumentParsingProvider documentParsingProvider = mock(DocumentParsingProvider.class); boolean created = randomBoolean(); @@ -721,10 +720,7 @@ public void testUpdateRequestWithSuccess() throws Exception { assertThat(response.status(), equalTo(created ? RestStatus.CREATED : RestStatus.OK)); assertThat(response.getSeqNo(), equalTo(13L)); - ArgumentCaptor argument = ArgumentCaptor.forClass(IndexRequest.class); - verify(documentParsingProvider, times(1)).newMeteringParserDecorator(argument.capture()); - IndexRequest value = argument.getValue(); - assertThat(value.getNormalisedBytesParsed(), equalTo(100L)); + verify(documentParsingProvider).newMeteringParserDecorator(updateResponse); } public void testUpdateWithDelete() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/action/ingest/ReservedPipelineActionTests.java b/server/src/test/java/org/elasticsearch/action/ingest/ReservedPipelineActionTests.java index 9729b653ae3d2..331f754d437a7 100644 --- a/server/src/test/java/org/elasticsearch/action/ingest/ReservedPipelineActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/ingest/ReservedPipelineActionTests.java @@ -31,7 +31,6 @@ import org.elasticsearch.ingest.Processor; import org.elasticsearch.ingest.ProcessorInfo; import org.elasticsearch.plugins.IngestPlugin; -import org.elasticsearch.plugins.internal.DocumentParsingProvider; import org.elasticsearch.reservedstate.TransformState; import org.elasticsearch.reservedstate.service.FileSettingsService; import org.elasticsearch.reservedstate.service.ReservedClusterStateService; @@ -94,7 +93,6 @@ public void setup() { Collections.singletonList(DUMMY_PLUGIN), client, null, - DocumentParsingProvider.EMPTY_INSTANCE, FailureStoreMetrics.NOOP ); Map factories = ingestService.getProcessorFactories(); diff --git a/server/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java b/server/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java index d8960bd902ac5..0cc2dcf38e8ff 100644 --- a/server/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java @@ -26,7 +26,6 @@ import org.elasticsearch.index.mapper.MappingLookup; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.plugins.internal.DocumentParsingProvider; import org.elasticsearch.script.MockScriptEngine; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptEngine; @@ -121,7 +120,7 @@ public void setUp() throws Exception { final MockScriptEngine engine = new MockScriptEngine("mock", scripts, Collections.emptyMap()); Map engines = Collections.singletonMap(engine.getType(), engine); ScriptService scriptService = new ScriptService(baseSettings, engines, ScriptModule.CORE_CONTEXTS, () -> 1L); - updateHelper = new UpdateHelper(scriptService, DocumentParsingProvider.EMPTY_INSTANCE); + updateHelper = new UpdateHelper(scriptService); } @SuppressWarnings("unchecked") @@ -594,7 +593,7 @@ public void testNoopDetection() throws Exception { try (var parser = createParser(JsonXContent.jsonXContent, new BytesArray("{\"doc\": {\"body\": \"foo\"}}"))) { request = new UpdateRequest("test", "1").fromXContent(parser); } - UpdateHelper updateHelper = new UpdateHelper(mock(ScriptService.class), DocumentParsingProvider.EMPTY_INSTANCE); + UpdateHelper updateHelper = new UpdateHelper(mock(ScriptService.class)); UpdateHelper.Result result = updateHelper.prepareUpdateIndexRequest(indexShard, request, getResult, true); assertThat(result.action(), instanceOf(UpdateResponse.class)); diff --git a/server/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java b/server/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java index 753602e73a30a..c626be7983c46 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java @@ -28,10 +28,10 @@ import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.EngineTestCase; import org.elasticsearch.index.mapper.ParsedDocument; -import org.elasticsearch.index.mapper.ParsedDocument.DocumentSize; import org.elasticsearch.index.mapper.SeqNoFieldMapper; import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.plugins.internal.XContentMeteringParserDecorator; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentParseException; import org.elasticsearch.xcontent.XContentType; @@ -217,7 +217,7 @@ public void testSlowLogMessageHasJsonFields() throws IOException { source, XContentType.JSON, null, - DocumentSize.UNKNOWN + XContentMeteringParserDecorator.UNKNOWN_SIZE ); Index index = new Index("foo", "123"); // Turning off document logging doesn't log source[] @@ -246,7 +246,7 @@ public void testSlowLogMessageHasAdditionalFields() throws IOException { source, XContentType.JSON, null, - DocumentSize.UNKNOWN + XContentMeteringParserDecorator.UNKNOWN_SIZE ); Index index = new Index("foo", "123"); // Turning off document logging doesn't log source[] @@ -276,7 +276,7 @@ public void testEmptyRoutingField() throws IOException { source, XContentType.JSON, null, - DocumentSize.UNKNOWN + XContentMeteringParserDecorator.UNKNOWN_SIZE ); Index index = new Index("foo", "123"); @@ -295,7 +295,7 @@ public void testSlowLogParsedDocumentPrinterSourceToLog() throws IOException { source, XContentType.JSON, null, - DocumentSize.UNKNOWN + XContentMeteringParserDecorator.UNKNOWN_SIZE ); Index index = new Index("foo", "123"); // Turning off document logging doesn't log source[] @@ -327,7 +327,7 @@ public void testSlowLogParsedDocumentPrinterSourceToLog() throws IOException { source, XContentType.JSON, null, - DocumentSize.UNKNOWN + XContentMeteringParserDecorator.UNKNOWN_SIZE ); final XContentParseException e = expectThrows( diff --git a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index 21aefd893de70..bba1fa338559f 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -109,7 +109,6 @@ import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MappingLookup; import org.elasticsearch.index.mapper.ParsedDocument; -import org.elasticsearch.index.mapper.ParsedDocument.DocumentSize; import org.elasticsearch.index.mapper.SeqNoFieldMapper; import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.mapper.Uid; @@ -132,6 +131,7 @@ import org.elasticsearch.index.translog.TranslogOperationsUtils; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.indices.recovery.RecoverySettings; +import org.elasticsearch.plugins.internal.XContentMeteringParserDecorator; import org.elasticsearch.test.IndexSettingsModule; import org.elasticsearch.test.index.IndexVersionUtils; import org.elasticsearch.threadpool.ThreadPool; @@ -5522,7 +5522,7 @@ public void testSeqNoGenerator() throws IOException { source, XContentType.JSON, null, - DocumentSize.UNKNOWN + XContentMeteringParserDecorator.UNKNOWN_SIZE ); final Engine.Index index = new Engine.Index( diff --git a/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java b/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java index ccf0bbebcc354..9e7f5fbbce1a3 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java @@ -45,7 +45,6 @@ import org.elasticsearch.index.mapper.LuceneDocument; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParsedDocument; -import org.elasticsearch.index.mapper.ParsedDocument.DocumentSize; import org.elasticsearch.index.mapper.SeqNoFieldMapper; import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.seqno.RetentionLeases; @@ -54,6 +53,7 @@ import org.elasticsearch.index.translog.Translog; import org.elasticsearch.index.translog.TranslogConfig; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; +import org.elasticsearch.plugins.internal.XContentMeteringParserDecorator; import org.elasticsearch.test.DummyShardLock; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; @@ -567,7 +567,7 @@ private Engine.IndexResult index(String id, String testFieldValue) throws IOExce source, XContentType.JSON, null, - DocumentSize.UNKNOWN + XContentMeteringParserDecorator.UNKNOWN_SIZE ); Engine.Index index = new Engine.Index(uid, engine.config().getPrimaryTermSupplier().getAsLong(), doc); return engine.index(index); diff --git a/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java b/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java index d0cabd609158b..97f49df41d099 100644 --- a/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java +++ b/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java @@ -56,7 +56,6 @@ import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.LuceneDocument; import org.elasticsearch.index.mapper.ParsedDocument; -import org.elasticsearch.index.mapper.ParsedDocument.DocumentSize; import org.elasticsearch.index.mapper.SeqNoFieldMapper; import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.seqno.LocalCheckpointTracker; @@ -64,6 +63,7 @@ import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.translog.Translog.Location; +import org.elasticsearch.plugins.internal.XContentMeteringParserDecorator; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; import org.elasticsearch.test.TransportVersionUtils; @@ -3395,7 +3395,7 @@ public void testTranslogOpSerialization() throws Exception { B_1, XContentType.JSON, null, - DocumentSize.UNKNOWN + XContentMeteringParserDecorator.UNKNOWN_SIZE ); Engine.Index eIndex = new Engine.Index( diff --git a/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java b/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java index d83fdbd5dd46b..b3ddc313eaf3a 100644 --- a/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java @@ -54,10 +54,7 @@ import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.VersionType; -import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.plugins.IngestPlugin; -import org.elasticsearch.plugins.internal.DocumentParsingProvider; -import org.elasticsearch.plugins.internal.XContentMeteringParserDecorator; import org.elasticsearch.script.MockScriptEngine; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptModule; @@ -68,7 +65,6 @@ import org.elasticsearch.test.MockLog; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.cbor.CborXContent; import org.junit.Before; @@ -157,7 +153,6 @@ public void testIngestPlugin() { List.of(DUMMY_PLUGIN), client, null, - DocumentParsingProvider.EMPTY_INSTANCE, FailureStoreMetrics.NOOP ); Map factories = ingestService.getProcessorFactories(); @@ -178,7 +173,6 @@ public void testIngestPluginDuplicate() { List.of(DUMMY_PLUGIN, DUMMY_PLUGIN), client, null, - DocumentParsingProvider.EMPTY_INSTANCE, FailureStoreMetrics.NOOP ) ); @@ -196,7 +190,6 @@ public void testExecuteIndexPipelineDoesNotExist() { List.of(DUMMY_PLUGIN), client, null, - DocumentParsingProvider.EMPTY_INSTANCE, FailureStoreMetrics.NOOP ); final IndexRequest indexRequest = new IndexRequest("_index").id("_id") @@ -1194,66 +1187,6 @@ public void testExecuteBulkPipelineDoesNotExist() { verify(completionHandler, times(1)).accept(Thread.currentThread(), null); } - public void testExecuteBulkRequestCallsDocumentSizeObserver() { - /* - * This test makes sure that for both insert and upsert requests, when we call executeBulkRequest DocumentSizeObserver is - * called using a non-null index name. - */ - AtomicInteger wrappedObserverWasUsed = new AtomicInteger(0); - AtomicInteger parsedValueWasUsed = new AtomicInteger(0); - DocumentParsingProvider documentParsingProvider = new DocumentParsingProvider() { - @Override - public XContentMeteringParserDecorator newMeteringParserDecorator(DocWriteRequest request) { - return new XContentMeteringParserDecorator() { - @Override - public ParsedDocument.DocumentSize meteredDocumentSize() { - parsedValueWasUsed.incrementAndGet(); - return new ParsedDocument.DocumentSize(0, 0); - } - - @Override - public XContentParser decorate(XContentParser xContentParser) { - wrappedObserverWasUsed.incrementAndGet(); - return xContentParser; - } - }; - } - }; - IngestService ingestService = createWithProcessors( - Map.of("mock", (factories, tag, description, config) -> mockCompoundProcessor()), - documentParsingProvider - ); - - PutPipelineRequest putRequest = putJsonPipelineRequest("_id", "{\"processors\": [{\"mock\" : {}}]}"); - ClusterState clusterState = ClusterState.builder(new ClusterName("_name")).build(); // Start empty - ClusterState previousClusterState = clusterState; - clusterState = executePut(putRequest, clusterState); - ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState)); - - BulkRequest bulkRequest = new BulkRequest(); - UpdateRequest updateRequest = new UpdateRequest("_index", "_id1").upsert("{}", "{}"); - updateRequest.upsertRequest().setPipeline("_id"); - bulkRequest.add(updateRequest); - IndexRequest indexRequest = new IndexRequest("_index").id("_id1").source(Map.of()).setPipeline("_id1"); - bulkRequest.add(indexRequest); - @SuppressWarnings("unchecked") - BiConsumer failureHandler = mock(BiConsumer.class); - @SuppressWarnings("unchecked") - final BiConsumer completionHandler = mock(BiConsumer.class); - ingestService.executeBulkRequest( - bulkRequest.numberOfActions(), - bulkRequest.requests(), - indexReq -> {}, - (s) -> false, - (slot, targetIndex, e) -> fail("Should not be redirecting failures"), - failureHandler, - completionHandler, - EsExecutors.DIRECT_EXECUTOR_SERVICE - ); - assertThat(wrappedObserverWasUsed.get(), equalTo(2)); - assertThat(parsedValueWasUsed.get(), equalTo(2)); - } - public void testExecuteSuccess() { IngestService ingestService = createWithProcessors( Map.of("mock", (factories, tag, description, config) -> mockCompoundProcessor()) @@ -2271,7 +2204,6 @@ public Map getProcessors(Processor.Parameters paramet List.of(testPlugin), client, null, - DocumentParsingProvider.EMPTY_INSTANCE, FailureStoreMetrics.NOOP ); ingestService.addIngestClusterStateListener(ingestClusterStateListener); @@ -2611,7 +2543,6 @@ private void testUpdatingPipeline(String pipelineString) throws Exception { List.of(DUMMY_PLUGIN), client, null, - DocumentParsingProvider.EMPTY_INSTANCE, FailureStoreMetrics.NOOP ); ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, clusterState)); @@ -2921,13 +2852,6 @@ private static IngestService createWithProcessors() { } private static IngestService createWithProcessors(Map processors) { - return createWithProcessors(processors, DocumentParsingProvider.EMPTY_INSTANCE); - } - - private static IngestService createWithProcessors( - Map processors, - DocumentParsingProvider documentParsingProvider - ) { Client client = mock(Client.class); ThreadPool threadPool = mock(ThreadPool.class); when(threadPool.generic()).thenReturn(EsExecutors.DIRECT_EXECUTOR_SERVICE); @@ -2946,7 +2870,6 @@ public Map getProcessors(final Processor.Parameters p }), client, null, - documentParsingProvider, FailureStoreMetrics.NOOP ); if (randomBoolean()) { diff --git a/server/src/test/java/org/elasticsearch/ingest/SimulateIngestServiceTests.java b/server/src/test/java/org/elasticsearch/ingest/SimulateIngestServiceTests.java index 94b3607bd7608..e8115e7266176 100644 --- a/server/src/test/java/org/elasticsearch/ingest/SimulateIngestServiceTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/SimulateIngestServiceTests.java @@ -16,7 +16,6 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.plugins.IngestPlugin; -import org.elasticsearch.plugins.internal.DocumentParsingProvider; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.XContentType; @@ -132,7 +131,6 @@ public Map getProcessors(final Processor.Parameters p List.of(ingestPlugin), client, null, - DocumentParsingProvider.EMPTY_INSTANCE, FailureStoreMetrics.NOOP ); } diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java index c46d98fe1cd8b..e0363d84ea4d2 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java @@ -2405,7 +2405,6 @@ public RecyclerBytesStreamOutput newNetworkBytesStream() { Collections.emptyList(), client, null, - DocumentParsingProvider.EMPTY_INSTANCE, FailureStoreMetrics.NOOP ), mockFeatureService, @@ -2425,7 +2424,7 @@ public RecyclerBytesStreamOutput newNetworkBytesStream() { threadPool, shardStateAction, mappingUpdatedAction, - new UpdateHelper(scriptService, DocumentParsingProvider.EMPTY_INSTANCE), + new UpdateHelper(scriptService), actionFilters, indexingMemoryLimits, EmptySystemIndices.INSTANCE, diff --git a/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java index 4713adf6cf01d..87c566d543d0f 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java @@ -100,6 +100,7 @@ import org.elasticsearch.index.translog.TranslogDeletionPolicy; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; +import org.elasticsearch.plugins.internal.XContentMeteringParserDecorator; import org.elasticsearch.test.DummyShardLock; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; @@ -428,7 +429,7 @@ protected static ParsedDocument testParsedDocument( source, XContentType.JSON, mappingUpdate, - ParsedDocument.DocumentSize.UNKNOWN + XContentMeteringParserDecorator.UNKNOWN_SIZE ); } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsActionTests.java index 7e88cad88dcec..bb973bf4359e8 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsActionTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsActionTests.java @@ -26,7 +26,6 @@ import org.elasticsearch.ingest.Processor; import org.elasticsearch.license.MockLicenseState; import org.elasticsearch.plugins.IngestPlugin; -import org.elasticsearch.plugins.internal.DocumentParsingProvider; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.ml.MachineLearningField; @@ -139,7 +138,6 @@ public void setUpVariables() { Collections.singletonList(SKINNY_INGEST_PLUGIN), client, null, - DocumentParsingProvider.EMPTY_INSTANCE, FailureStoreMetrics.NOOP ); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java index 5710b031494bf..c2e9a92e45353 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java @@ -115,7 +115,6 @@ import org.elasticsearch.indices.TestIndexNameExpressionResolver; import org.elasticsearch.license.MockLicenseState; import org.elasticsearch.license.XPackLicenseState; -import org.elasticsearch.plugins.internal.DocumentParsingProvider; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; @@ -1580,7 +1579,7 @@ public void testDenialErrorMessagesForBulkIngest() throws Exception { TransportShardBulkAction.performOnPrimary( request, indexShard, - new UpdateHelper(mock(ScriptService.class), DocumentParsingProvider.EMPTY_INSTANCE), + new UpdateHelper(mock(ScriptService.class)), System::currentTimeMillis, mappingUpdater, waitForMappingUpdate, From 5e98251bdab337beb218892d30529a7290f4e5a3 Mon Sep 17 00:00:00 2001 From: Mike Pellegrini Date: Fri, 25 Oct 2024 14:57:46 -0400 Subject: [PATCH 431/449] Remove "Use ELSER By Default For Semantic Text" Changelog Entry (#115686) --- docs/changelog/113563.yaml | 5 ----- 1 file changed, 5 deletions(-) delete mode 100644 docs/changelog/113563.yaml diff --git a/docs/changelog/113563.yaml b/docs/changelog/113563.yaml deleted file mode 100644 index 48484ead99d77..0000000000000 --- a/docs/changelog/113563.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 113563 -summary: Use ELSER By Default For Semantic Text -area: Mapping -type: enhancement -issues: [] From ca193bb923bf2a42df06dd39c8ad50068842879a Mon Sep 17 00:00:00 2001 From: Keith Massey Date: Fri, 25 Oct 2024 14:56:13 -0500 Subject: [PATCH 432/449] Adding additional checks for IPInfo results (#115481) --- .../geoip/IpinfoIpDataLookupsTests.java | 113 ++++++++++++++++-- 1 file changed, 106 insertions(+), 7 deletions(-) diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookupsTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookupsTests.java index d0cdc5a3e1b5e..11aa123824d18 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookupsTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookupsTests.java @@ -14,8 +14,10 @@ import com.maxmind.db.Reader; import org.apache.lucene.util.Constants; +import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.core.IOUtils; +import org.elasticsearch.core.Strings; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.test.ESTestCase; import org.junit.After; @@ -113,7 +115,10 @@ public void testAsnFree() { entry("asn", 16625L), entry("network", "23.32.184.0/21"), entry("domain", "akamai.com") - ) + ), + Map.ofEntries(entry("name", "organization_name"), entry("asn", "asn"), entry("network", "network"), entry("domain", "domain")), + Set.of("ip"), + Set.of() ); } @@ -133,7 +138,17 @@ public void testAsnStandard() { entry("domain", "tpx.com"), entry("type", "hosting"), entry("country_iso_code", "US") - ) + ), + Map.ofEntries( + entry("name", "organization_name"), + entry("asn", "asn"), + entry("network", "network"), + entry("domain", "domain"), + entry("country", "country_iso_code"), + entry("type", "type") + ), + Set.of("ip"), + Set.of() ); } @@ -188,7 +203,16 @@ public void testCountryFree() { entry("country_iso_code", "IE"), entry("continent_name", "Europe"), entry("continent_code", "EU") - ) + ), + Map.ofEntries( + entry("continent_name", "continent_name"), + entry("continent", "continent_code"), + entry("country", "country_iso_code"), + entry("country_name", "country_name"), + entry("type", "type") + ), + Set.of("ip"), + Set.of("network") ); } @@ -208,7 +232,18 @@ public void testGeolocationStandard() { entry("timezone", "Europe/London"), entry("postal_code", "E1W"), entry("location", Map.of("lat", 51.50853, "lon", -0.12574)) - ) + ), + Map.ofEntries( + entry("country", "country_iso_code"), + entry("region", "region_name"), + entry("city", "city_name"), + entry("timezone", "timezone"), + entry("postal_code", "postal_code"), + entry("lat", "location"), + entry("lng", "location") + ), + Set.of("ip", "location"), + Set.of("geoname_id", "region_code") ); } @@ -266,7 +301,16 @@ public void testPrivacyDetectionStandard() { entry("relay", false), entry("tor", false), entry("vpn", true) - ) + ), + Map.ofEntries( + entry("hosting", "hosting"), + entry("proxy", "proxy"), + entry("relay", "relay"), + entry("tor", "tor"), + entry("vpn", "vpn") + ), + Set.of("ip"), + Set.of("network", "service") ); } @@ -286,7 +330,17 @@ public void testPrivacyDetectionStandardNonEmptyService() { entry("relay", false), entry("tor", false), entry("vpn", true) - ) + ), + Map.ofEntries( + entry("hosting", "hosting"), + entry("proxy", "proxy"), + entry("service", "service"), + entry("relay", "relay"), + entry("tor", "tor"), + entry("vpn", "vpn") + ), + Set.of("ip"), + Set.of("network") ); } @@ -438,7 +492,15 @@ private static File pathToFile(Path databasePath) { return databasePath.toFile(); } - private void assertExpectedLookupResults(String databaseName, String ip, IpDataLookup lookup, Map expected) { + private void assertExpectedLookupResults( + String databaseName, + String ip, + IpDataLookup lookup, + Map expected, + Map keyMappings, + Set knownAdditionalKeys, + Set knownMissingKeys + ) { try (DatabaseReaderLazyLoader loader = loader(databaseName)) { Map actual = lookup.getData(loader, ip); assertThat( @@ -449,6 +511,7 @@ private void assertExpectedLookupResults(String databaseName, String ip, IpDataL for (Map.Entry entry : expected.entrySet()) { assertThat("Unexpected value for key [" + entry.getKey() + "]", actual.get(entry.getKey()), equalTo(entry.getValue())); } + assertActualResultsMatchReader(actual, databaseName, ip, keyMappings, knownAdditionalKeys, knownMissingKeys); } catch (AssertionError e) { fail(e, "Assert failed for database [%s] with address [%s]", databaseName, ip); } catch (Exception e) { @@ -456,6 +519,42 @@ private void assertExpectedLookupResults(String databaseName, String ip, IpDataL } } + private void assertActualResultsMatchReader( + Map actual, + String databaseName, + String ip, + Map keyMappings, + Set knownAdditionalKeys, + Set knownMissingKeys + ) throws IOException { + Path databasePath = tmpDir.resolve(databaseName); + try (Reader reader = new Reader(pathToFile(databasePath))) { + @SuppressWarnings("unchecked") + Map data = reader.get(InetAddresses.forString(ip), Map.class); + for (String key : data.keySet()) { + if (keyMappings.containsKey(key)) { + assertTrue( + Strings.format( + "The reader returned key [%s] that is expected to map to key [%s], but [%s] did not appear in the " + + "actual data", + key, + keyMappings.get(key), + keyMappings.get(key) + ), + actual.containsKey(keyMappings.get(key)) + ); + } else if (knownMissingKeys.contains(key) == false) { + fail(null, "The reader returned unexpected key [%s]", key); + } + } + for (String key : actual.keySet()) { + if (keyMappings.containsValue(key) == false && knownAdditionalKeys.contains(key) == false) { + fail(null, "Unexpected key [%s] in results", key); + } + } + } + } + private DatabaseReaderLazyLoader loader(final String databaseName) { Path path = tmpDir.resolve(databaseName); copyDatabase("ipinfo/" + databaseName, path); // the ipinfo databases are prefixed on the test classpath From 1ed7ff50a9874491648926ebfe754204298bb939 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sat, 26 Oct 2024 07:22:31 +1100 Subject: [PATCH 433/449] Mute org.elasticsearch.xpack.shutdown.NodeShutdownIT testStalledShardMigrationProperlyDetected #115697 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 91644c9af70ca..be2c8d03c3931 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -282,6 +282,9 @@ tests: - class: org.elasticsearch.action.update.UpdateResponseTests method: testToAndFromXContent issue: https://github.com/elastic/elasticsearch/issues/115689 +- class: org.elasticsearch.xpack.shutdown.NodeShutdownIT + method: testStalledShardMigrationProperlyDetected + issue: https://github.com/elastic/elasticsearch/issues/115697 # Examples: # From 9fffd2962ed8e6e53ba4ab60b5bcf54f2d73aeaa Mon Sep 17 00:00:00 2001 From: Oleksandr Kolomiiets Date: Fri, 25 Oct 2024 13:25:46 -0700 Subject: [PATCH 434/449] Add challenge tests for logsdb that utilize stored source (#115606) --- .../xpack/logsdb/qa/DataGenerationHelper.java | 1 + ...ndexedIntoStandardModeChallengeRestIT.java | 30 ------------ ...ndexedIntoStoredSourceChallengeRestIT.java | 48 ++++++++++++++++++ ...bVersusReindexedLogsDbChallengeRestIT.java | 30 ------------ ...VersusLogsStoredSourceChallengeRestIT.java | 22 +++++++++ ...bVersusReindexedLogsDbChallengeRestIT.java | 49 +++++++++++++++++++ 6 files changed, 120 insertions(+), 60 deletions(-) create mode 100644 x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/LogsDbVersusReindexedIntoStoredSourceChallengeRestIT.java create mode 100644 x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsStoredSourceChallengeRestIT.java create mode 100644 x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StoredSourceLogsDbVersusReindexedLogsDbChallengeRestIT.java diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/DataGenerationHelper.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/DataGenerationHelper.java index c03e8aea9c2ac..8a5bb8d12cd3d 100644 --- a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/DataGenerationHelper.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/DataGenerationHelper.java @@ -89,6 +89,7 @@ void standardMapping(XContentBuilder builder) throws IOException { } void logsDbSettings(Settings.Builder builder) { + builder.put("index.mode", "logsdb"); if (keepArraySource) { builder.put(Mapper.SYNTHETIC_SOURCE_KEEP_INDEX_SETTING.getKey(), "arrays"); } diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/LogsDbVersusLogsDbReindexedIntoStandardModeChallengeRestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/LogsDbVersusLogsDbReindexedIntoStandardModeChallengeRestIT.java index 0329f7723a108..d9abdc2cde446 100644 --- a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/LogsDbVersusLogsDbReindexedIntoStandardModeChallengeRestIT.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/LogsDbVersusLogsDbReindexedIntoStandardModeChallengeRestIT.java @@ -7,17 +7,10 @@ package org.elasticsearch.xpack.logsdb.qa; -import org.elasticsearch.client.Request; -import org.elasticsearch.client.Response; -import org.elasticsearch.common.CheckedSupplier; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; -import java.util.List; -import java.util.Locale; - -import static org.hamcrest.Matchers.equalTo; /** * This test compares behavior of a logsdb data stream and a standard index mode data stream @@ -52,27 +45,4 @@ public void baselineMappings(XContentBuilder builder) throws IOException { public void contenderMappings(XContentBuilder builder) throws IOException { dataGenerationHelper.standardMapping(builder); } - - @Override - public Response indexContenderDocuments(CheckedSupplier, IOException> documentsSupplier) throws IOException { - var reindexRequest = new Request("POST", "/_reindex?refresh=true"); - reindexRequest.setJsonEntity(String.format(Locale.ROOT, """ - { - "source": { - "index": "%s" - }, - "dest": { - "index": "%s", - "op_type": "create" - } - } - """, getBaselineDataStreamName(), getContenderDataStreamName())); - var response = client.performRequest(reindexRequest); - assertOK(response); - - var body = entityAsMap(response); - assertThat("encountered failures when performing reindex:\n " + body, body.get("failures"), equalTo(List.of())); - - return response; - } } diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/LogsDbVersusReindexedIntoStoredSourceChallengeRestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/LogsDbVersusReindexedIntoStoredSourceChallengeRestIT.java new file mode 100644 index 0000000000000..776a6faf7fa07 --- /dev/null +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/LogsDbVersusReindexedIntoStoredSourceChallengeRestIT.java @@ -0,0 +1,48 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.logsdb.qa; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; + +/** + * This test compares behavior of a standard mode data stream and a logsdb data stream using stored source. + * There should be no differences between such two data streams. + */ +public class LogsDbVersusReindexedIntoStoredSourceChallengeRestIT extends ReindexChallengeRestIT { + public String getBaselineDataStreamName() { + return "logs-apache-baseline"; + } + + public String getContenderDataStreamName() { + return "logs-apache-reindexed"; + } + + @Override + public void baselineSettings(Settings.Builder builder) { + dataGenerationHelper.logsDbSettings(builder); + } + + @Override + public void contenderSettings(Settings.Builder builder) { + dataGenerationHelper.logsDbSettings(builder); + builder.put("index.mapping.source.mode", "stored"); + } + + @Override + public void baselineMappings(XContentBuilder builder) throws IOException { + dataGenerationHelper.logsDbMapping(builder); + } + + @Override + public void contenderMappings(XContentBuilder builder) throws IOException { + dataGenerationHelper.logsDbMapping(builder); + } +} diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/LogsDbVersusReindexedLogsDbChallengeRestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/LogsDbVersusReindexedLogsDbChallengeRestIT.java index 1c425cf30907b..8b00c647b5dd0 100644 --- a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/LogsDbVersusReindexedLogsDbChallengeRestIT.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/LogsDbVersusReindexedLogsDbChallengeRestIT.java @@ -7,17 +7,10 @@ package org.elasticsearch.xpack.logsdb.qa; -import org.elasticsearch.client.Request; -import org.elasticsearch.client.Response; -import org.elasticsearch.common.CheckedSupplier; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; -import java.util.List; -import java.util.Locale; - -import static org.hamcrest.Matchers.equalTo; /** * This test compares behavior of a logsdb data stream and a data stream containing @@ -52,27 +45,4 @@ public void baselineMappings(XContentBuilder builder) throws IOException { public void contenderMappings(XContentBuilder builder) throws IOException { dataGenerationHelper.logsDbMapping(builder); } - - @Override - public Response indexContenderDocuments(CheckedSupplier, IOException> documentsSupplier) throws IOException { - var reindexRequest = new Request("POST", "/_reindex?refresh=true"); - reindexRequest.setJsonEntity(String.format(Locale.ROOT, """ - { - "source": { - "index": "%s" - }, - "dest": { - "index": "%s", - "op_type": "create" - } - } - """, getBaselineDataStreamName(), getContenderDataStreamName())); - var response = client.performRequest(reindexRequest); - assertOK(response); - - var body = entityAsMap(response); - assertThat("encountered failures when performing reindex:\n " + body, body.get("failures"), equalTo(List.of())); - - return response; - } } diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsStoredSourceChallengeRestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsStoredSourceChallengeRestIT.java new file mode 100644 index 0000000000000..2f018b7dc0b38 --- /dev/null +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsStoredSourceChallengeRestIT.java @@ -0,0 +1,22 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.logsdb.qa; + +import org.elasticsearch.common.settings.Settings; + +/** + * This test compares behavior of a standard mode data stream and a logsdb data stream using stored source. + * There should be no differences between such two data streams. + */ +public class StandardVersusLogsStoredSourceChallengeRestIT extends StandardVersusLogsIndexModeRandomDataChallengeRestIT { + @Override + public void contenderSettings(Settings.Builder builder) { + super.contenderSettings(builder); + builder.put("index.mapping.source.mode", "stored"); + } +} diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StoredSourceLogsDbVersusReindexedLogsDbChallengeRestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StoredSourceLogsDbVersusReindexedLogsDbChallengeRestIT.java new file mode 100644 index 0000000000000..a0672daafb243 --- /dev/null +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StoredSourceLogsDbVersusReindexedLogsDbChallengeRestIT.java @@ -0,0 +1,49 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.logsdb.qa; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; + +/** + * This test compares behavior of a logsdb data stream using stored source and a logsdb data stream + * containing data reindexed from initial data stream. + * There should be no differences between such two data streams. + */ +public class StoredSourceLogsDbVersusReindexedLogsDbChallengeRestIT extends ReindexChallengeRestIT { + public String getBaselineDataStreamName() { + return "logs-apache-baseline"; + } + + public String getContenderDataStreamName() { + return "logs-apache-reindexed"; + } + + @Override + public void baselineSettings(Settings.Builder builder) { + dataGenerationHelper.logsDbSettings(builder); + builder.put("index.mapping.source.mode", "stored"); + } + + @Override + public void contenderSettings(Settings.Builder builder) { + dataGenerationHelper.logsDbSettings(builder); + } + + @Override + public void baselineMappings(XContentBuilder builder) throws IOException { + dataGenerationHelper.logsDbMapping(builder); + } + + @Override + public void contenderMappings(XContentBuilder builder) throws IOException { + dataGenerationHelper.logsDbMapping(builder); + } +} From 38a19bfa2e4918db8ee64e8ca9a2c2c11ef82051 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sat, 26 Oct 2024 07:32:02 +1100 Subject: [PATCH 435/449] Mute org.elasticsearch.index.get.GetResultTests testToAndFromXContentEmbedded #115657 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index be2c8d03c3931..7675bcc4f2a28 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -285,6 +285,9 @@ tests: - class: org.elasticsearch.xpack.shutdown.NodeShutdownIT method: testStalledShardMigrationProperlyDetected issue: https://github.com/elastic/elasticsearch/issues/115697 +- class: org.elasticsearch.index.get.GetResultTests + method: testToAndFromXContentEmbedded + issue: https://github.com/elastic/elasticsearch/issues/115657 # Examples: # From 83578872d949879e93675c9e6b037072b0209ba4 Mon Sep 17 00:00:00 2001 From: Keith Massey Date: Fri, 25 Oct 2024 15:33:54 -0500 Subject: [PATCH 436/449] Fixing DatabaseNodeServiceIT testNonGzippedDatabase and testGzippedDatabase race condition (#115463) Co-authored-by: Joe Gallo --- .../ingest/geoip/DatabaseNodeServiceIT.java | 28 +++++++++++++------ muted-tests.yml | 6 ---- 2 files changed, 20 insertions(+), 14 deletions(-) diff --git a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/DatabaseNodeServiceIT.java b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/DatabaseNodeServiceIT.java index 786f091e0c024..7331afdbf585a 100644 --- a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/DatabaseNodeServiceIT.java +++ b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/DatabaseNodeServiceIT.java @@ -46,15 +46,21 @@ public class DatabaseNodeServiceIT extends AbstractGeoIpIT { public void testNonGzippedDatabase() throws Exception { String databaseType = "GeoLite2-Country"; String databaseFileName = databaseType + ".mmdb"; - // making the dabase name unique so we know we're not using another one: + // making the database name unique so we know we're not using another one: String databaseName = randomAlphaOfLength(20) + "-" + databaseFileName; byte[] mmdbBytes = getBytesForFile(databaseFileName); final DatabaseNodeService databaseNodeService = internalCluster().getInstance(DatabaseNodeService.class); assertNull(databaseNodeService.getDatabase(databaseName)); int numChunks = indexData(databaseName, mmdbBytes); - retrieveDatabase(databaseNodeService, databaseName, mmdbBytes, numChunks); - assertBusy(() -> assertNotNull(databaseNodeService.getDatabase(databaseName))); - assertValidDatabase(databaseNodeService, databaseName, databaseType); + /* + * If DatabaseNodeService::checkDatabases runs it will sometimes (rarely) remove the database we are using in this test while we + * are trying to assert things about it. So if it does then we 'just' try again. + */ + assertBusy(() -> { + retrieveDatabase(databaseNodeService, databaseName, mmdbBytes, numChunks); + assertNotNull(databaseNodeService.getDatabase(databaseName)); + assertValidDatabase(databaseNodeService, databaseName, databaseType); + }); } /* @@ -64,16 +70,22 @@ public void testNonGzippedDatabase() throws Exception { public void testGzippedDatabase() throws Exception { String databaseType = "GeoLite2-Country"; String databaseFileName = databaseType + ".mmdb"; - // making the dabase name unique so we know we're not using another one: + // making the database name unique so we know we're not using another one: String databaseName = randomAlphaOfLength(20) + "-" + databaseFileName; byte[] mmdbBytes = getBytesForFile(databaseFileName); byte[] gzipBytes = gzipFileBytes(databaseName, mmdbBytes); final DatabaseNodeService databaseNodeService = internalCluster().getInstance(DatabaseNodeService.class); assertNull(databaseNodeService.getDatabase(databaseName)); int numChunks = indexData(databaseName, gzipBytes); - retrieveDatabase(databaseNodeService, databaseName, gzipBytes, numChunks); - assertBusy(() -> assertNotNull(databaseNodeService.getDatabase(databaseName))); - assertValidDatabase(databaseNodeService, databaseName, databaseType); + /* + * If DatabaseNodeService::checkDatabases runs it will sometimes (rarely) remove the database we are using in this test while we + * are trying to assert things about it. So if it does then we 'just' try again. + */ + assertBusy(() -> { + retrieveDatabase(databaseNodeService, databaseName, gzipBytes, numChunks); + assertNotNull(databaseNodeService.getDatabase(databaseName)); + assertValidDatabase(databaseNodeService, databaseName, databaseType); + }); } /* diff --git a/muted-tests.yml b/muted-tests.yml index 7675bcc4f2a28..fad1304d73059 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -182,12 +182,6 @@ tests: - class: org.elasticsearch.xpack.esql.qa.mixed.MixedClusterEsqlSpecIT method: test {categorize.Categorize SYNC} issue: https://github.com/elastic/elasticsearch/issues/113722 -- class: org.elasticsearch.ingest.geoip.DatabaseNodeServiceIT - method: testNonGzippedDatabase - issue: https://github.com/elastic/elasticsearch/issues/113821 -- class: org.elasticsearch.ingest.geoip.DatabaseNodeServiceIT - method: testGzippedDatabase - issue: https://github.com/elastic/elasticsearch/issues/113752 - class: org.elasticsearch.threadpool.SimpleThreadPoolIT method: testThreadPoolMetrics issue: https://github.com/elastic/elasticsearch/issues/108320 From d9c776468dca054aab44512def67e51a116e1a20 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Fri, 25 Oct 2024 14:28:35 -0700 Subject: [PATCH 437/449] Enable preview features on native modules during IntelliJ import (#115698) --- .../src/main/groovy/elasticsearch.ide.gradle | 35 ++++++++++++++++++- 1 file changed, 34 insertions(+), 1 deletion(-) diff --git a/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle b/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle index d3209ff27ce06..67878181a005d 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle @@ -122,6 +122,36 @@ if (providers.systemProperty('idea.active').getOrNull() == 'true') { .findAll { it != null } } + // force IntelliJ to generate *.iml files for each imported module + tasks.register("enableExternalConfiguration") { + group = 'ide' + description = 'Enable per-module *.iml files' + + doLast { + modifyXml('.idea/misc.xml') {xml -> + def externalStorageConfig = xml.component.find { it.'@name' == 'ExternalStorageConfigurationManager' } + if (externalStorageConfig) { + xml.remove(externalStorageConfig) + } + } + } + } + + // modifies the idea module config to enable preview features on 'elasticsearch-native' module + tasks.register("enablePreviewFeatures") { + group = 'ide' + description = 'Enables preview features on native library module' + dependsOn tasks.named("enableExternalConfiguration") + + doLast { + ['main', 'test'].each { sourceSet -> + modifyXml(".idea/modules/libs/native/elasticsearch.libs.elasticsearch-native.${sourceSet}.iml") { xml -> + xml.component.find { it.'@name' == 'NewModuleRootManager' }?.'@LANGUAGE_LEVEL' = 'JDK_21_PREVIEW' + } + } + } + } + tasks.register('buildDependencyArtifacts') { group = 'ide' description = 'Builds artifacts needed as dependency for IDE modules' @@ -149,7 +179,10 @@ if (providers.systemProperty('idea.active').getOrNull() == 'true') { testRunner = 'choose_per_test' } taskTriggers { - afterSync tasks.named('configureIdeCheckstyle'), tasks.named('configureIdeaGradleJvm'), tasks.named('buildDependencyArtifacts') + afterSync tasks.named('configureIdeCheckstyle'), + tasks.named('configureIdeaGradleJvm'), + tasks.named('buildDependencyArtifacts'), + tasks.named('enablePreviewFeatures') } encodings { encoding = 'UTF-8' From d887d8e045bde0a5c8c1165dd05942c96c9c048e Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sat, 26 Oct 2024 08:52:15 +1100 Subject: [PATCH 438/449] Mute org.elasticsearch.xpack.spatial.search.GeoGridAggAndQueryConsistencyIT testGeoShapeGeoHash #115664 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index fad1304d73059..c0c716f2e26cf 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -282,6 +282,9 @@ tests: - class: org.elasticsearch.index.get.GetResultTests method: testToAndFromXContentEmbedded issue: https://github.com/elastic/elasticsearch/issues/115657 +- class: org.elasticsearch.xpack.spatial.search.GeoGridAggAndQueryConsistencyIT + method: testGeoShapeGeoHash + issue: https://github.com/elastic/elasticsearch/issues/115664 # Examples: # From 9b951cd92ed1fb7bb242d286bef6c5ba72dfc730 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sat, 26 Oct 2024 16:47:29 +1100 Subject: [PATCH 439/449] Mute org.elasticsearch.xpack.inference.InferenceCrudIT testSupportedStream #113430 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index c0c716f2e26cf..97a4864e57f8a 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -285,6 +285,9 @@ tests: - class: org.elasticsearch.xpack.spatial.search.GeoGridAggAndQueryConsistencyIT method: testGeoShapeGeoHash issue: https://github.com/elastic/elasticsearch/issues/115664 +- class: org.elasticsearch.xpack.inference.InferenceCrudIT + method: testSupportedStream + issue: https://github.com/elastic/elasticsearch/issues/113430 # Examples: # From 2b3d41ac2771180e4c034bb5ecd565ea30fa1f87 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sun, 27 Oct 2024 00:51:56 +1100 Subject: [PATCH 440/449] Mute org.elasticsearch.xpack.spatial.search.GeoGridAggAndQueryConsistencyIT testGeoShapeGeoTile #115717 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 97a4864e57f8a..3a59af6234038 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -288,6 +288,9 @@ tests: - class: org.elasticsearch.xpack.inference.InferenceCrudIT method: testSupportedStream issue: https://github.com/elastic/elasticsearch/issues/113430 +- class: org.elasticsearch.xpack.spatial.search.GeoGridAggAndQueryConsistencyIT + method: testGeoShapeGeoTile + issue: https://github.com/elastic/elasticsearch/issues/115717 # Examples: # From 2f2ddad00492fcac8fbfc272607a8db91d279385 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Sat, 26 Oct 2024 06:55:49 -0700 Subject: [PATCH 441/449] Improve error message for unparseable numeric settings (#115609) When a numeric setting is too large or too small such that it can't be parsed at all, the error message is the same as for garbage values. This commit improves the error message in these cases to be the same as for normal bounds checks. closes #115080 --- .../common/settings/Setting.java | 58 ++++++++++++++++--- .../common/settings/SettingTests.java | 34 +++++++++++ 2 files changed, 84 insertions(+), 8 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/settings/Setting.java b/server/src/main/java/org/elasticsearch/common/settings/Setting.java index a0b6e665042d0..aec9c108d898d 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/server/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -34,6 +34,7 @@ import org.elasticsearch.xcontent.XContentType; import java.io.IOException; +import java.math.BigInteger; import java.time.Instant; import java.util.Arrays; import java.util.Collections; @@ -1485,27 +1486,68 @@ public static int parseInt(String s, int minValue, int maxValue, String key) { } public static int parseInt(String s, int minValue, int maxValue, String key, boolean isFiltered) { - int value = Integer.parseInt(s); + int value; + try { + value = Integer.parseInt(s); + } catch (NumberFormatException e) { + // check if value is a number or garbage + try { + var bi = new BigInteger(s); + // it's a number, so check which bound it is outside + if (bi.compareTo(BigInteger.valueOf(minValue)) < 0) { + throw newNumericBoundsException(s, key, isFiltered, ">=", minValue); + } else { + throw newNumericBoundsException(s, key, isFiltered, "<=", maxValue); + } + } catch (NumberFormatException e2) { + throw e; // it's garbage, use the original exception + } + } if (value < minValue) { - String err = "Failed to parse value" + (isFiltered ? "" : " [" + s + "]") + " for setting [" + key + "] must be >= " + minValue; - throw new IllegalArgumentException(err); + throw newNumericBoundsException(s, key, isFiltered, ">=", minValue); } if (value > maxValue) { - String err = "Failed to parse value" + (isFiltered ? "" : " [" + s + "]") + " for setting [" + key + "] must be <= " + maxValue; - throw new IllegalArgumentException(err); + throw newNumericBoundsException(s, key, isFiltered, "<=", maxValue); } return value; } static long parseLong(String s, long minValue, String key, boolean isFiltered) { - long value = Long.parseLong(s); + long value; + try { + value = Long.parseLong(s); + } catch (NumberFormatException e) { + // check if value is a number or garbage + try { + var bi = new BigInteger(s); + // it's a number, so check which bound it is outside + if (bi.compareTo(BigInteger.valueOf(minValue)) < 0) { + throw newNumericBoundsException(s, key, isFiltered, ">=", minValue); + } else { + throw newNumericBoundsException(s, key, isFiltered, "<=", Long.MAX_VALUE); + } + } catch (NumberFormatException e2) { + throw e; // it's garbage, use the original exception + } + } if (value < minValue) { - String err = "Failed to parse value" + (isFiltered ? "" : " [" + s + "]") + " for setting [" + key + "] must be >= " + minValue; - throw new IllegalArgumentException(err); + throw newNumericBoundsException(s, key, isFiltered, ">=", minValue); } return value; } + private static IllegalArgumentException newNumericBoundsException(String s, String key, boolean isFiltered, String type, long bound) { + String err = "Failed to parse value" + + (isFiltered ? "" : " [" + s + "]") + + " for setting [" + + key + + "] must be " + + type + + " " + + bound; + throw new IllegalArgumentException(err); + } + public static Setting intSetting(String key, int defaultValue, Property... properties) { return intSetting(key, defaultValue, Integer.MIN_VALUE, properties); } diff --git a/server/src/test/java/org/elasticsearch/common/settings/SettingTests.java b/server/src/test/java/org/elasticsearch/common/settings/SettingTests.java index ba78ea5cf08a6..75f5045c5fbb6 100644 --- a/server/src/test/java/org/elasticsearch/common/settings/SettingTests.java +++ b/server/src/test/java/org/elasticsearch/common/settings/SettingTests.java @@ -1522,4 +1522,38 @@ public void testDeprecationPropertyValidation() { () -> Setting.boolSetting("a.bool.setting", true, Property.DeprecatedWarning, Property.IndexSettingDeprecatedInV7AndRemovedInV8) ); } + + public void testIntSettingBounds() { + Setting setting = Setting.intSetting("int.setting", 0, Integer.MIN_VALUE, Integer.MAX_VALUE); + var e = expectThrows( + IllegalArgumentException.class, + () -> setting.get(Settings.builder().put("int.setting", "2147483648").build()) + ); + assertThat(e.getMessage(), equalTo("Failed to parse value [2147483648] for setting [int.setting] must be <= 2147483647")); + var e2 = expectThrows( + IllegalArgumentException.class, + () -> setting.get(Settings.builder().put("int.setting", "-2147483649").build()) + ); + assertThat(e2.getMessage(), equalTo("Failed to parse value [-2147483649] for setting [int.setting] must be >= -2147483648")); + } + + public void testLongSettingBounds() { + Setting setting = Setting.longSetting("long.setting", 0, Long.MIN_VALUE); + var e = expectThrows( + IllegalArgumentException.class, + () -> setting.get(Settings.builder().put("long.setting", "9223372036854775808").build()) + ); + assertThat( + e.getMessage(), + equalTo("Failed to parse value [9223372036854775808] for setting [long.setting] must be <= 9223372036854775807") + ); + var e2 = expectThrows( + IllegalArgumentException.class, + () -> setting.get(Settings.builder().put("long.setting", "-9223372036854775809").build()) + ); + assertThat( + e2.getMessage(), + equalTo("Failed to parse value [-9223372036854775809] for setting [long.setting] must be >= -9223372036854775808") + ); + } } From 06cdd11193a5c551ce75edd9713a52d389144f4c Mon Sep 17 00:00:00 2001 From: Yang Wang Date: Mon, 28 Oct 2024 10:50:11 +1100 Subject: [PATCH 442/449] Retry on 403 for S3 put in certain environments (#115486) This PR configures a new retry condition for s3 client so that it retries on 403 for operations such as PUT in certain environments. Note that 403 is already retried for GET due to S3RetryingInputStream. Resolves: ES-9321 --- .../s3/S3BlobStoreRepositoryMetricsTests.java | 48 +++++++++++++++++++ .../repositories/s3/S3Service.java | 32 ++++++++++++- .../s3/AwsS3ServiceImplTests.java | 4 +- .../s3/S3ClientSettingsTests.java | 14 ++++-- .../repositories/s3/S3ServiceTests.java | 33 ++++++++++--- 5 files changed, 118 insertions(+), 13 deletions(-) diff --git a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryMetricsTests.java b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryMetricsTests.java index 21f42bf9eb99c..b1c5d707220af 100644 --- a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryMetricsTests.java +++ b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryMetricsTests.java @@ -19,6 +19,7 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.Strings; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.core.TimeValue; import org.elasticsearch.plugins.PluginsService; @@ -53,11 +54,13 @@ import static org.elasticsearch.repositories.RepositoriesMetrics.METRIC_THROTTLES_TOTAL; import static org.elasticsearch.repositories.RepositoriesMetrics.METRIC_UNSUCCESSFUL_OPERATIONS_TOTAL; import static org.elasticsearch.repositories.s3.S3RepositoriesMetrics.METRIC_DELETE_RETRIES_HISTOGRAM; +import static org.elasticsearch.rest.RestStatus.FORBIDDEN; import static org.elasticsearch.rest.RestStatus.INTERNAL_SERVER_ERROR; import static org.elasticsearch.rest.RestStatus.NOT_FOUND; import static org.elasticsearch.rest.RestStatus.REQUESTED_RANGE_NOT_SATISFIED; import static org.elasticsearch.rest.RestStatus.SERVICE_UNAVAILABLE; import static org.elasticsearch.rest.RestStatus.TOO_MANY_REQUESTS; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.lessThanOrEqualTo; @@ -320,6 +323,51 @@ public void testRetrySnapshotDeleteMetricsWhenRetriesExhausted() { assertThat(longHistogramMeasurement.get(0).getLong(), equalTo(3L)); } + public void testPutDoesNotRetryOn403InStateful() { + final Settings settings = internalCluster().getInstance(Settings.class); + assertThat(DiscoveryNode.isStateless(settings), equalTo(false)); + + final String repository = createRepository(randomRepositoryName()); + final String dataNodeName = internalCluster().getNodeNameThat(DiscoveryNode::canContainData); + final TestTelemetryPlugin plugin = getPlugin(dataNodeName); + // Exclude snapshot related purpose to avoid trigger assertions for cross-checking purpose and blob names + final OperationPurpose purpose = randomFrom( + OperationPurpose.REPOSITORY_ANALYSIS, + OperationPurpose.CLUSTER_STATE, + OperationPurpose.INDICES, + OperationPurpose.TRANSLOG + ); + final BlobContainer blobContainer = getBlobContainer(dataNodeName, repository); + final String blobName = randomIdentifier(); + + plugin.resetMeter(); + addErrorStatus(new S3ErrorResponse(FORBIDDEN, Strings.format(""" + + + InvalidAccessKeyId + The AWS Access Key Id you provided does not exist in our records. + %s + """, randomUUID()))); + + final var exception = expectThrows(IOException.class, () -> { + if (randomBoolean()) { + blobContainer.writeBlob(purpose, blobName, new BytesArray("blob"), randomBoolean()); + } else { + blobContainer.writeMetadataBlob( + purpose, + blobName, + randomBoolean(), + randomBoolean(), + outputStream -> outputStream.write("blob".getBytes()) + ); + } + }); + assertThat(exception.getCause().getMessage(), containsString("InvalidAccessKeyId")); + + assertThat(getLongCounterValue(plugin, METRIC_REQUESTS_TOTAL, Operation.PUT_OBJECT), equalTo(1L)); + assertThat(getLongCounterValue(plugin, METRIC_EXCEPTIONS_TOTAL, Operation.PUT_OBJECT), equalTo(1L)); + } + private void addErrorStatus(RestStatus... statuses) { errorResponseQueue.addAll(Arrays.stream(statuses).map(S3ErrorResponse::new).toList()); } diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java index 9042234de6f50..36eb1d61e21d7 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java @@ -9,6 +9,7 @@ package org.elasticsearch.repositories.s3; +import com.amazonaws.AmazonServiceException; import com.amazonaws.ClientConfiguration; import com.amazonaws.SDKGlobalConfiguration; import com.amazonaws.auth.AWSCredentials; @@ -20,6 +21,8 @@ import com.amazonaws.auth.STSAssumeRoleWithWebIdentitySessionCredentialsProvider; import com.amazonaws.client.builder.AwsClientBuilder; import com.amazonaws.http.IdleConnectionReaper; +import com.amazonaws.retry.PredefinedRetryPolicies; +import com.amazonaws.retry.RetryPolicy; import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.AmazonS3ClientBuilder; import com.amazonaws.services.s3.internal.Constants; @@ -27,6 +30,7 @@ import com.amazonaws.services.securitytoken.AWSSecurityTokenServiceClient; import com.amazonaws.services.securitytoken.AWSSecurityTokenServiceClientBuilder; +import org.apache.http.HttpStatus; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; @@ -193,7 +197,10 @@ AmazonS3 buildClient(final S3ClientSettings clientSettings) { protected AmazonS3ClientBuilder buildClientBuilder(S3ClientSettings clientSettings) { final AmazonS3ClientBuilder builder = AmazonS3ClientBuilder.standard(); builder.withCredentials(buildCredentials(LOGGER, clientSettings, webIdentityTokenCredentialsProvider)); - builder.withClientConfiguration(buildConfiguration(clientSettings)); + final ClientConfiguration clientConfiguration = buildConfiguration(clientSettings, isStateless); + assert (isStateless == false && clientConfiguration.getRetryPolicy() == PredefinedRetryPolicies.DEFAULT) + || (isStateless && clientConfiguration.getRetryPolicy() == RETRYABLE_403_RETRY_POLICY) : "invalid retry policy configuration"; + builder.withClientConfiguration(clientConfiguration); String endpoint = Strings.hasLength(clientSettings.endpoint) ? clientSettings.endpoint : Constants.S3_HOSTNAME; if ((endpoint.startsWith("http://") || endpoint.startsWith("https://")) == false) { @@ -223,7 +230,7 @@ protected AmazonS3ClientBuilder buildClientBuilder(S3ClientSettings clientSettin } // pkg private for tests - static ClientConfiguration buildConfiguration(S3ClientSettings clientSettings) { + static ClientConfiguration buildConfiguration(S3ClientSettings clientSettings, boolean isStateless) { final ClientConfiguration clientConfiguration = new ClientConfiguration(); // the response metadata cache is only there for diagnostics purposes, // but can force objects from every response to the old generation. @@ -248,6 +255,10 @@ static ClientConfiguration buildConfiguration(S3ClientSettings clientSettings) { clientConfiguration.setUseThrottleRetries(clientSettings.throttleRetries); clientConfiguration.setSocketTimeout(clientSettings.readTimeoutMillis); + if (isStateless) { + clientConfiguration.setRetryPolicy(RETRYABLE_403_RETRY_POLICY); + } + return clientConfiguration; } @@ -504,4 +515,21 @@ interface SystemEnvironment { interface JvmEnvironment { String getProperty(String key, String defaultValue); } + + static final RetryPolicy RETRYABLE_403_RETRY_POLICY = RetryPolicy.builder() + .withRetryCondition((originalRequest, exception, retriesAttempted) -> { + if (PredefinedRetryPolicies.DEFAULT_RETRY_CONDITION.shouldRetry(originalRequest, exception, retriesAttempted)) { + return true; + } + if (exception instanceof AmazonServiceException ase) { + return ase.getStatusCode() == HttpStatus.SC_FORBIDDEN && "InvalidAccessKeyId".equals(ase.getErrorCode()); + } + return false; + }) + .withBackoffStrategy(PredefinedRetryPolicies.DEFAULT_BACKOFF_STRATEGY) + .withMaxErrorRetry(PredefinedRetryPolicies.DEFAULT_MAX_ERROR_RETRY) + .withHonorMaxErrorRetryInClientConfig(true) + .withHonorDefaultMaxErrorRetryInRetryMode(true) + .withHonorDefaultBackoffStrategyInRetryMode(true) + .build(); } diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AwsS3ServiceImplTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AwsS3ServiceImplTests.java index 0aac0ba898f97..43f606135291d 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AwsS3ServiceImplTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AwsS3ServiceImplTests.java @@ -17,6 +17,7 @@ import com.amazonaws.auth.AWSStaticCredentialsProvider; import com.amazonaws.auth.BasicAWSCredentials; import com.amazonaws.auth.EC2ContainerCredentialsProviderWrapper; +import com.amazonaws.retry.PredefinedRetryPolicies; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.util.Supplier; @@ -211,7 +212,7 @@ private void launchAWSConfigurationTest( ) { final S3ClientSettings clientSettings = S3ClientSettings.getClientSettings(settings, "default"); - final ClientConfiguration configuration = S3Service.buildConfiguration(clientSettings); + final ClientConfiguration configuration = S3Service.buildConfiguration(clientSettings, false); assertThat(configuration.getResponseMetadataCacheSize(), is(0)); assertThat(configuration.getProtocol(), is(expectedProtocol)); @@ -222,6 +223,7 @@ private void launchAWSConfigurationTest( assertThat(configuration.getMaxErrorRetry(), is(expectedMaxRetries)); assertThat(configuration.useThrottledRetries(), is(expectedUseThrottleRetries)); assertThat(configuration.getSocketTimeout(), is(expectedReadTimeout)); + assertThat(configuration.getRetryPolicy(), is(PredefinedRetryPolicies.DEFAULT)); } public void testEndpointSetting() { diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3ClientSettingsTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3ClientSettingsTests.java index ddc7a1851c663..288ac1bb3c534 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3ClientSettingsTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3ClientSettingsTests.java @@ -194,9 +194,9 @@ public void testSignerOverrideCanBeSet() { ); assertThat(settings.get("default").region, is("")); assertThat(settings.get("other").signerOverride, is(signerOverride)); - ClientConfiguration defaultConfiguration = S3Service.buildConfiguration(settings.get("default")); + ClientConfiguration defaultConfiguration = S3Service.buildConfiguration(settings.get("default"), false); assertThat(defaultConfiguration.getSignerOverride(), nullValue()); - ClientConfiguration configuration = S3Service.buildConfiguration(settings.get("other")); + ClientConfiguration configuration = S3Service.buildConfiguration(settings.get("other"), false); assertThat(configuration.getSignerOverride(), is(signerOverride)); } @@ -207,12 +207,18 @@ public void testMaxConnectionsCanBeSet() { ); assertThat(settings.get("default").maxConnections, is(ClientConfiguration.DEFAULT_MAX_CONNECTIONS)); assertThat(settings.get("other").maxConnections, is(maxConnections)); - ClientConfiguration defaultConfiguration = S3Service.buildConfiguration(settings.get("default")); + ClientConfiguration defaultConfiguration = S3Service.buildConfiguration(settings.get("default"), false); assertThat(defaultConfiguration.getMaxConnections(), is(ClientConfiguration.DEFAULT_MAX_CONNECTIONS)); - ClientConfiguration configuration = S3Service.buildConfiguration(settings.get("other")); + ClientConfiguration configuration = S3Service.buildConfiguration(settings.get("other"), false); assertThat(configuration.getMaxConnections(), is(maxConnections)); // the default appears in the docs so let's make sure it doesn't change: assertEquals(50, ClientConfiguration.DEFAULT_MAX_CONNECTIONS); } + + public void testStatelessDefaultRetryPolicy() { + final var s3ClientSettings = S3ClientSettings.load(Settings.EMPTY).get("default"); + final var clientConfiguration = S3Service.buildConfiguration(s3ClientSettings, true); + assertThat(clientConfiguration.getRetryPolicy(), is(S3Service.RETRYABLE_403_RETRY_POLICY)); + } } diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3ServiceTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3ServiceTests.java index 7bfaf56127fc7..afe1bb1a03c76 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3ServiceTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3ServiceTests.java @@ -8,23 +8,23 @@ */ package org.elasticsearch.repositories.s3; +import com.amazonaws.AmazonWebServiceRequest; +import com.amazonaws.services.s3.model.AmazonS3Exception; + import org.elasticsearch.cluster.metadata.RepositoryMetadata; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.watcher.ResourceWatcherService; -import org.mockito.Mockito; import java.io.IOException; +import static org.mockito.Mockito.mock; + public class S3ServiceTests extends ESTestCase { public void testCachedClientsAreReleased() throws IOException { - final S3Service s3Service = new S3Service( - Mockito.mock(Environment.class), - Settings.EMPTY, - Mockito.mock(ResourceWatcherService.class) - ); + final S3Service s3Service = new S3Service(mock(Environment.class), Settings.EMPTY, mock(ResourceWatcherService.class)); final Settings settings = Settings.builder().put("endpoint", "http://first").build(); final RepositoryMetadata metadata1 = new RepositoryMetadata("first", "s3", settings); final RepositoryMetadata metadata2 = new RepositoryMetadata("second", "s3", settings); @@ -41,4 +41,25 @@ public void testCachedClientsAreReleased() throws IOException { final S3ClientSettings clientSettingsReloaded = s3Service.settings(metadata1); assertNotSame(clientSettings, clientSettingsReloaded); } + + public void testRetryOn403RetryPolicy() { + final AmazonS3Exception e = new AmazonS3Exception("error"); + e.setStatusCode(403); + e.setErrorCode("InvalidAccessKeyId"); + + // Retry on 403 invalid access key id + assertTrue( + S3Service.RETRYABLE_403_RETRY_POLICY.getRetryCondition().shouldRetry(mock(AmazonWebServiceRequest.class), e, between(0, 9)) + ); + + // Not retry if not 403 or not invalid access key id + if (randomBoolean()) { + e.setStatusCode(randomValueOtherThan(403, () -> between(0, 600))); + } else { + e.setErrorCode(randomAlphaOfLength(10)); + } + assertFalse( + S3Service.RETRYABLE_403_RETRY_POLICY.getRetryCondition().shouldRetry(mock(AmazonWebServiceRequest.class), e, between(0, 9)) + ); + } } From 5fb1e23f45f71bff1176f939b9f30f942f39cd96 Mon Sep 17 00:00:00 2001 From: David Turner Date: Mon, 28 Oct 2024 06:07:27 +0000 Subject: [PATCH 443/449] Clarify status of response to voting config API (#115714) These APIs return no body, just a status code. This commit clarifies that in the docs. Closes #115462 --- .../cluster/voting-exclusions.asciidoc | 22 +++++++++++-------- 1 file changed, 13 insertions(+), 9 deletions(-) diff --git a/docs/reference/cluster/voting-exclusions.asciidoc b/docs/reference/cluster/voting-exclusions.asciidoc index e5b8544a16554..55587a7010f8f 100644 --- a/docs/reference/cluster/voting-exclusions.asciidoc +++ b/docs/reference/cluster/voting-exclusions.asciidoc @@ -7,7 +7,6 @@ Adds or removes master-eligible nodes from the <>. - [[voting-config-exclusions-api-request]] ==== {api-request-title} @@ -28,7 +27,7 @@ users can use this API. [[voting-config-exclusions-api-desc]] ==== {api-description-title} - + By default, if there are more than three master-eligible nodes in the cluster and you remove fewer than half of the master-eligible nodes in the cluster at once, the <> automatically @@ -50,14 +49,19 @@ use `DELETE /_cluster/voting_config_exclusions?wait_for_removal=false` to clear the voting configuration exclusions without waiting for the nodes to leave the cluster. -If the API fails, you can safely retry it. Only a successful response -guarantees that the node has been removed from the voting configuration and -will not be reinstated. +A response to `POST /_cluster/voting_config_exclusions` with an HTTP status +code of `200 OK` guarantees that the node has been removed from the voting +configuration and will not be reinstated until the voting configuration +exclusions are cleared by calling `DELETE /_cluster/voting_config_exclusions`. +If the call to `POST /_cluster/voting_config_exclusions` fails or returns a +response with an HTTP status code other than `200 OK` then the node may not +have been removed from the voting configuration. In that case, you may safely +retry the call. NOTE: Voting exclusions are required only when you remove at least half of the master-eligible nodes from a cluster in a short time period. They are not -required when removing master-ineligible nodes or fewer than half of the -master-eligible nodes. +required when removing master-ineligible nodes or when removing fewer than half +of the master-eligible nodes. For more information, see <>. @@ -94,7 +98,7 @@ list. Defaults to `true`, meaning that all excluded nodes must be removed from the cluster before this API takes any action. If set to `false` then the voting configuration exclusions list is cleared even if some excluded nodes are still in the cluster. Only applies to the `DELETE` form of this API. - + [[voting-config-exclusions-api-example]] ==== {api-examples-title} @@ -102,7 +106,7 @@ Adds nodes named `nodeName1` and `nodeName2` to the voting configuration exclusions list: [source,console] --------------------------------------------------- +-------------------------------------------------- POST /_cluster/voting_config_exclusions?node_names=nodeName1,nodeName2 -------------------------------------------------- From 98cd34f3fde14cec9c5d5ac2507d4fdc55e89288 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Mon, 28 Oct 2024 08:34:48 +0100 Subject: [PATCH 444/449] Add more tsdb and logsdb rolling upgrade indexing tests. (#115639) The main difference between other rolling upgrade tests is that these tests index more data while performing the rolling upgrade and no rollover is performed during rolling upgrade. For example this makes it more likely for merging to happen, which could uncover bwc bugs. Note that currently both test suites start trial license so that synthetic source gets used. --- .../resources/checkstyle_suppressions.xml | 2 + .../LogsIndexModeRollingUpgradeIT.java | 4 +- .../LogsdbIndexingRollingUpgradeIT.java | 253 ++++++++++++++++++ .../org/elasticsearch/upgrades/TsdbIT.java | 4 +- .../TsdbIndexingRollingUpgradeIT.java | 187 +++++++++++++ 5 files changed, 446 insertions(+), 4 deletions(-) create mode 100644 qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/LogsdbIndexingRollingUpgradeIT.java create mode 100644 qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/TsdbIndexingRollingUpgradeIT.java diff --git a/build-tools-internal/src/main/resources/checkstyle_suppressions.xml b/build-tools-internal/src/main/resources/checkstyle_suppressions.xml index fd01993951959..5fdfebf6849e7 100644 --- a/build-tools-internal/src/main/resources/checkstyle_suppressions.xml +++ b/build-tools-internal/src/main/resources/checkstyle_suppressions.xml @@ -35,6 +35,8 @@ + + diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeRollingUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeRollingUpgradeIT.java index ba79de4ab6cd1..8c369ebc9950d 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeRollingUpgradeIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeRollingUpgradeIT.java @@ -171,7 +171,7 @@ public void testLogsIndexing() throws IOException { } } - private static void enableLogsdbByDefault() throws IOException { + static void enableLogsdbByDefault() throws IOException { var request = new Request("PUT", "/_cluster/settings"); request.setJsonEntity(""" { @@ -214,7 +214,7 @@ private static Request rolloverDataStream(final RestClient client, final String } @SuppressWarnings("unchecked") - private static String getWriteBackingIndex(final RestClient client, final String dataStreamName, int backingIndex) throws IOException { + static String getWriteBackingIndex(final RestClient client, final String dataStreamName, int backingIndex) throws IOException { final Request request = new Request("GET", "_data_stream/" + dataStreamName); final List dataStreams = (List) entityAsMap(client.performRequest(request)).get("data_streams"); final Map dataStream = (Map) dataStreams.get(0); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/LogsdbIndexingRollingUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/LogsdbIndexingRollingUpgradeIT.java new file mode 100644 index 0000000000000..9bdc43543e331 --- /dev/null +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/LogsdbIndexingRollingUpgradeIT.java @@ -0,0 +1,253 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.upgrades; + +import com.carrotsearch.randomizedtesting.annotations.Name; + +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.common.network.NetworkAddress; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.test.rest.ObjectPath; +import org.elasticsearch.xcontent.XContentType; + +import java.io.IOException; +import java.io.InputStream; +import java.time.Instant; +import java.util.Map; + +import static org.elasticsearch.upgrades.LogsIndexModeRollingUpgradeIT.enableLogsdbByDefault; +import static org.elasticsearch.upgrades.LogsIndexModeRollingUpgradeIT.getWriteBackingIndex; +import static org.elasticsearch.upgrades.TsdbIT.formatInstant; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.notNullValue; + +public class LogsdbIndexingRollingUpgradeIT extends AbstractRollingUpgradeTestCase { + + static String BULK_ITEM_TEMPLATE = + """ + {"@timestamp": "$now", "host.name": "$host", "method": "$method", "ip": "$ip", "message": "$message", "length": $length, "factor": $factor} + """; + + private static final String TEMPLATE = """ + { + "mappings": { + "properties": { + "@timestamp" : { + "type": "date" + }, + "method": { + "type": "keyword" + }, + "message": { + "type": "text" + }, + "ip": { + "type": "ip" + }, + "length": { + "type": "long" + }, + "factor": { + "type": "double" + } + } + } + }"""; + + public LogsdbIndexingRollingUpgradeIT(@Name("upgradedNodes") int upgradedNodes) { + super(upgradedNodes); + } + + public void testIndexing() throws Exception { + String dataStreamName = "logs-bwc-test"; + if (isOldCluster()) { + startTrial(); + enableLogsdbByDefault(); + createTemplate(dataStreamName, "3", TEMPLATE); + + Instant startTime = Instant.now().minusSeconds(60 * 60); + bulkIndex(dataStreamName, 4, 1024, startTime); + + String firstBackingIndex = getWriteBackingIndex(client(), dataStreamName, 0); + var settings = (Map) getIndexSettingsWithDefaults(firstBackingIndex).get(firstBackingIndex); + assertThat(((Map) settings.get("settings")).get("index.mode"), equalTo("logsdb")); + assertThat(((Map) settings.get("defaults")).get("index.mapping.source.mode"), equalTo("SYNTHETIC")); + + ensureGreen(dataStreamName); + search(dataStreamName); + query(dataStreamName); + } else if (isMixedCluster()) { + Instant startTime = Instant.now().minusSeconds(60 * 30); + bulkIndex(dataStreamName, 4, 1024, startTime); + + ensureGreen(dataStreamName); + search(dataStreamName); + query(dataStreamName); + } else if (isUpgradedCluster()) { + ensureGreen(dataStreamName); + Instant startTime = Instant.now(); + bulkIndex(dataStreamName, 4, 1024, startTime); + search(dataStreamName); + query(dataStreamName); + + var forceMergeRequest = new Request("POST", "/" + dataStreamName + "/_forcemerge"); + forceMergeRequest.addParameter("max_num_segments", "1"); + assertOK(client().performRequest(forceMergeRequest)); + + ensureGreen(dataStreamName); + search(dataStreamName); + query(dataStreamName); + } + } + + static void createTemplate(String dataStreamName, String id, String template) throws IOException { + final String INDEX_TEMPLATE = """ + { + "index_patterns": ["$DATASTREAM"], + "template": $TEMPLATE, + "data_stream": { + } + }"""; + var putIndexTemplateRequest = new Request("POST", "/_index_template/" + id); + putIndexTemplateRequest.setJsonEntity(INDEX_TEMPLATE.replace("$TEMPLATE", template).replace("$DATASTREAM", dataStreamName)); + assertOK(client().performRequest(putIndexTemplateRequest)); + } + + static void bulkIndex(String dataStreamName, int numRequest, int numDocs, Instant startTime) throws Exception { + for (int i = 0; i < numRequest; i++) { + var bulkRequest = new Request("POST", "/" + dataStreamName + "/_bulk"); + StringBuilder requestBody = new StringBuilder(); + for (int j = 0; j < numDocs; j++) { + String hostName = "host" + j % 50; // Not realistic, but makes asserting search / query response easier. + String methodName = "method" + j % 5; + String ip = NetworkAddress.format(randomIp(true)); + String message = randomAlphaOfLength(128); + long length = randomLong(); + double factor = randomDouble(); + + requestBody.append("{\"create\": {}}"); + requestBody.append('\n'); + requestBody.append( + BULK_ITEM_TEMPLATE.replace("$now", formatInstant(startTime)) + .replace("$host", hostName) + .replace("$method", methodName) + .replace("$ip", ip) + .replace("$message", message) + .replace("$length", Long.toString(length)) + .replace("$factor", Double.toString(factor)) + ); + requestBody.append('\n'); + + startTime = startTime.plusMillis(1); + } + bulkRequest.setJsonEntity(requestBody.toString()); + bulkRequest.addParameter("refresh", "true"); + var response = client().performRequest(bulkRequest); + assertOK(response); + var responseBody = entityAsMap(response); + assertThat("errors in response:\n " + responseBody, responseBody.get("errors"), equalTo(false)); + } + } + + void search(String dataStreamName) throws Exception { + var searchRequest = new Request("POST", "/" + dataStreamName + "/_search"); + searchRequest.addParameter("pretty", "true"); + searchRequest.setJsonEntity(""" + { + "size": 0, + "aggs": { + "host_name": { + "terms": { + "field": "host.name", + "order": { "_key": "asc" } + }, + "aggs": { + "max_length": { + "max": { + "field": "length" + } + }, + "max_factor": { + "max": { + "field": "factor" + } + } + } + } + } + } + """); + var response = client().performRequest(searchRequest); + assertOK(response); + var responseBody = entityAsMap(response); + + Integer totalCount = ObjectPath.evaluate(responseBody, "hits.total.value"); + assertThat(totalCount, greaterThanOrEqualTo(4096)); + String key = ObjectPath.evaluate(responseBody, "aggregations.host_name.buckets.0.key"); + assertThat(key, equalTo("host0")); + Integer docCount = ObjectPath.evaluate(responseBody, "aggregations.host_name.buckets.0.doc_count"); + assertThat(docCount, greaterThan(0)); + Double maxTx = ObjectPath.evaluate(responseBody, "aggregations.host_name.buckets.0.max_length.value"); + assertThat(maxTx, notNullValue()); + Double maxRx = ObjectPath.evaluate(responseBody, "aggregations.host_name.buckets.0.max_factor.value"); + assertThat(maxRx, notNullValue()); + } + + void query(String dataStreamName) throws Exception { + var queryRequest = new Request("POST", "/_query"); + queryRequest.addParameter("pretty", "true"); + queryRequest.setJsonEntity(""" + { + "query": "FROM $ds | STATS max(length), max(factor) BY host.name | SORT host.name | LIMIT 5" + } + """.replace("$ds", dataStreamName)); + var response = client().performRequest(queryRequest); + assertOK(response); + var responseBody = entityAsMap(response); + + String column1 = ObjectPath.evaluate(responseBody, "columns.0.name"); + String column2 = ObjectPath.evaluate(responseBody, "columns.1.name"); + String column3 = ObjectPath.evaluate(responseBody, "columns.2.name"); + assertThat(column1, equalTo("max(length)")); + assertThat(column2, equalTo("max(factor)")); + assertThat(column3, equalTo("host.name")); + + String key = ObjectPath.evaluate(responseBody, "values.0.2"); + assertThat(key, equalTo("host0")); + Long maxRx = ObjectPath.evaluate(responseBody, "values.0.0"); + assertThat(maxRx, notNullValue()); + Double maxTx = ObjectPath.evaluate(responseBody, "values.0.1"); + assertThat(maxTx, notNullValue()); + } + + protected static void startTrial() throws IOException { + Request startTrial = new Request("POST", "/_license/start_trial"); + startTrial.addParameter("acknowledge", "true"); + assertOK(client().performRequest(startTrial)); + } + + static Map getIndexSettingsWithDefaults(String index) throws IOException { + Request request = new Request("GET", "/" + index + "/_settings"); + request.addParameter("flat_settings", "true"); + request.addParameter("include_defaults", "true"); + Response response = client().performRequest(request); + try (InputStream is = response.getEntity().getContent()) { + return XContentHelper.convertToMap( + XContentType.fromMediaType(response.getEntity().getContentType().getValue()).xContent(), + is, + true + ); + } + } + +} diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/TsdbIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/TsdbIT.java index 9e3030d510266..6744c84f29d0f 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/TsdbIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/TsdbIT.java @@ -33,7 +33,7 @@ public TsdbIT(@Name("upgradedNodes") int upgradedNodes) { super(upgradedNodes); } - private static final String TEMPLATE = """ + static final String TEMPLATE = """ { "settings":{ "index": { @@ -289,7 +289,7 @@ private static void assertSearch(String dataStreamName, int expectedHitCount) th assertThat(ObjectPath.evaluate(responseBody, "hits.total.value"), equalTo(expectedHitCount)); } - private static String formatInstant(Instant instant) { + static String formatInstant(Instant instant) { return DateFormatter.forPattern(FormatNames.STRICT_DATE_OPTIONAL_TIME.getName()).format(instant); } diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/TsdbIndexingRollingUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/TsdbIndexingRollingUpgradeIT.java new file mode 100644 index 0000000000000..1ac919ea57001 --- /dev/null +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/TsdbIndexingRollingUpgradeIT.java @@ -0,0 +1,187 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.upgrades; + +import com.carrotsearch.randomizedtesting.annotations.Name; + +import org.elasticsearch.client.Request; +import org.elasticsearch.common.network.NetworkAddress; +import org.elasticsearch.test.rest.ObjectPath; + +import java.time.Instant; +import java.util.Map; + +import static org.elasticsearch.upgrades.LogsIndexModeRollingUpgradeIT.getWriteBackingIndex; +import static org.elasticsearch.upgrades.LogsdbIndexingRollingUpgradeIT.*; +import static org.elasticsearch.upgrades.TsdbIT.TEMPLATE; +import static org.elasticsearch.upgrades.TsdbIT.formatInstant; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.notNullValue; + +public class TsdbIndexingRollingUpgradeIT extends AbstractRollingUpgradeTestCase { + + static String BULK_ITEM_TEMPLATE = + """ + {"@timestamp": "$now", "metricset": "pod", "k8s": {"pod": {"name": "$name", "uid":"$uid", "ip": "$ip", "network": {"tx": $tx, "rx": $rx}}}} + """; + + public TsdbIndexingRollingUpgradeIT(@Name("upgradedNodes") int upgradedNodes) { + super(upgradedNodes); + } + + public void testIndexing() throws Exception { + String dataStreamName = "k9s"; + if (isOldCluster()) { + startTrial(); + createTemplate(dataStreamName, "2", TEMPLATE); + + Instant startTime = Instant.now().minusSeconds(60 * 60); + bulkIndex(dataStreamName, 4, 1024, startTime); + + String firstBackingIndex = getWriteBackingIndex(client(), dataStreamName, 0); + var settings = (Map) getIndexSettingsWithDefaults(firstBackingIndex).get(firstBackingIndex); + assertThat(((Map) settings.get("settings")).get("index.mode"), equalTo("time_series")); + assertThat(((Map) settings.get("defaults")).get("index.mapping.source.mode"), equalTo("SYNTHETIC")); + + ensureGreen(dataStreamName); + search(dataStreamName); + query(dataStreamName); + } else if (isMixedCluster()) { + Instant startTime = Instant.now().minusSeconds(60 * 30); + bulkIndex(dataStreamName, 4, 1024, startTime); + + ensureGreen(dataStreamName); + search(dataStreamName); + query(dataStreamName); + } else if (isUpgradedCluster()) { + ensureGreen(dataStreamName); + Instant startTime = Instant.now(); + bulkIndex(dataStreamName, 4, 1024, startTime); + search(dataStreamName); + query(dataStreamName); + + var forceMergeRequest = new Request("POST", "/" + dataStreamName + "/_forcemerge"); + forceMergeRequest.addParameter("max_num_segments", "1"); + assertOK(client().performRequest(forceMergeRequest)); + + ensureGreen(dataStreamName); + search(dataStreamName); + query(dataStreamName); + } + } + + static void bulkIndex(String dataStreamName, int numRequest, int numDocs, Instant startTime) throws Exception { + for (int i = 0; i < numRequest; i++) { + var bulkRequest = new Request("POST", "/" + dataStreamName + "/_bulk"); + StringBuilder requestBody = new StringBuilder(); + for (int j = 0; j < numDocs; j++) { + String podName = "pod" + j % 5; // Not realistic, but makes asserting search / query response easier. + String podUid = randomUUID(); + String podIp = NetworkAddress.format(randomIp(true)); + long podTx = randomLong(); + long podRx = randomLong(); + + requestBody.append("{\"create\": {}}"); + requestBody.append('\n'); + requestBody.append( + BULK_ITEM_TEMPLATE.replace("$now", formatInstant(startTime)) + .replace("$name", podName) + .replace("$uid", podUid) + .replace("$ip", podIp) + .replace("$tx", Long.toString(podTx)) + .replace("$rx", Long.toString(podRx)) + ); + requestBody.append('\n'); + + startTime = startTime.plusMillis(1); + } + bulkRequest.setJsonEntity(requestBody.toString()); + bulkRequest.addParameter("refresh", "true"); + var response = client().performRequest(bulkRequest); + assertOK(response); + var responseBody = entityAsMap(response); + assertThat("errors in response:\n " + responseBody, responseBody.get("errors"), equalTo(false)); + } + } + + void search(String dataStreamName) throws Exception { + var searchRequest = new Request("POST", "/" + dataStreamName + "/_search"); + searchRequest.addParameter("pretty", "true"); + searchRequest.setJsonEntity(""" + { + "size": 0, + "aggs": { + "pod_name": { + "terms": { + "field": "k8s.pod.name", + "order": { "_key": "asc" } + }, + "aggs": { + "max_tx": { + "max": { + "field": "k8s.pod.network.tx" + } + }, + "max_rx": { + "max": { + "field": "k8s.pod.network.rx" + } + } + } + } + } + } + """); + var response = client().performRequest(searchRequest); + assertOK(response); + var responseBody = entityAsMap(response); + + Integer totalCount = ObjectPath.evaluate(responseBody, "hits.total.value"); + assertThat(totalCount, greaterThanOrEqualTo(4096)); + String key = ObjectPath.evaluate(responseBody, "aggregations.pod_name.buckets.0.key"); + assertThat(key, equalTo("pod0")); + Integer docCount = ObjectPath.evaluate(responseBody, "aggregations.pod_name.buckets.0.doc_count"); + assertThat(docCount, greaterThan(0)); + Double maxTx = ObjectPath.evaluate(responseBody, "aggregations.pod_name.buckets.0.max_tx.value"); + assertThat(maxTx, notNullValue()); + Double maxRx = ObjectPath.evaluate(responseBody, "aggregations.pod_name.buckets.0.max_rx.value"); + assertThat(maxRx, notNullValue()); + } + + void query(String dataStreamName) throws Exception { + var queryRequest = new Request("POST", "/_query"); + queryRequest.addParameter("pretty", "true"); + queryRequest.setJsonEntity(""" + { + "query": "FROM $ds | STATS max(k8s.pod.network.rx), max(k8s.pod.network.tx) BY k8s.pod.name | SORT k8s.pod.name | LIMIT 5" + } + """.replace("$ds", dataStreamName)); + var response = client().performRequest(queryRequest); + assertOK(response); + var responseBody = entityAsMap(response); + + String column1 = ObjectPath.evaluate(responseBody, "columns.0.name"); + String column2 = ObjectPath.evaluate(responseBody, "columns.1.name"); + String column3 = ObjectPath.evaluate(responseBody, "columns.2.name"); + assertThat(column1, equalTo("max(k8s.pod.network.rx)")); + assertThat(column2, equalTo("max(k8s.pod.network.tx)")); + assertThat(column3, equalTo("k8s.pod.name")); + + String key = ObjectPath.evaluate(responseBody, "values.0.2"); + assertThat(key, equalTo("pod0")); + Long maxRx = ObjectPath.evaluate(responseBody, "values.0.0"); + assertThat(maxRx, notNullValue()); + Long maxTx = ObjectPath.evaluate(responseBody, "values.0.1"); + assertThat(maxTx, notNullValue()); + } + +} From ef85d0a53f1f58a63359b63933fc1e147167d42f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lorenzo=20Dematt=C3=A9?= Date: Mon, 28 Oct 2024 09:31:19 +0100 Subject: [PATCH 445/449] Avoid double instrumentation via class annotation (#115398) --- .../impl/InstrumenterImpl.java | 96 ++++++++++++--- .../impl/InstrumenterTests.java | 112 +++++++++++++++--- 2 files changed, 177 insertions(+), 31 deletions(-) diff --git a/distribution/tools/entitlement-agent/impl/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterImpl.java b/distribution/tools/entitlement-agent/impl/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterImpl.java index 81c120ddcd6d1..7c2e1645ada83 100644 --- a/distribution/tools/entitlement-agent/impl/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterImpl.java +++ b/distribution/tools/entitlement-agent/impl/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterImpl.java @@ -15,8 +15,10 @@ import org.objectweb.asm.ClassReader; import org.objectweb.asm.ClassVisitor; import org.objectweb.asm.ClassWriter; +import org.objectweb.asm.FieldVisitor; import org.objectweb.asm.MethodVisitor; import org.objectweb.asm.Opcodes; +import org.objectweb.asm.RecordComponentVisitor; import org.objectweb.asm.Type; import java.io.IOException; @@ -73,7 +75,13 @@ public byte[] instrumentClass(String className, byte[] classfileBuffer) { } class EntitlementClassVisitor extends ClassVisitor { - final String className; + + private static final String ENTITLEMENT_ANNOTATION = "EntitlementInstrumented"; + + private final String className; + + private boolean isAnnotationPresent; + private boolean annotationNeeded = true; EntitlementClassVisitor(int api, ClassVisitor classVisitor, String className) { super(api, classVisitor); @@ -85,25 +93,85 @@ public void visit(int version, int access, String name, String signature, String super.visit(version, access, name + classNameSuffix, signature, superName, interfaces); } + @Override + public AnnotationVisitor visitAnnotation(String descriptor, boolean visible) { + if (visible && descriptor.equals(ENTITLEMENT_ANNOTATION)) { + isAnnotationPresent = true; + annotationNeeded = false; + } + return cv.visitAnnotation(descriptor, visible); + } + + @Override + public void visitNestMember(String nestMember) { + addClassAnnotationIfNeeded(); + super.visitNestMember(nestMember); + } + + @Override + public void visitPermittedSubclass(String permittedSubclass) { + addClassAnnotationIfNeeded(); + super.visitPermittedSubclass(permittedSubclass); + } + + @Override + public void visitInnerClass(String name, String outerName, String innerName, int access) { + addClassAnnotationIfNeeded(); + super.visitInnerClass(name, outerName, innerName, access); + } + + @Override + public FieldVisitor visitField(int access, String name, String descriptor, String signature, Object value) { + addClassAnnotationIfNeeded(); + return super.visitField(access, name, descriptor, signature, value); + } + + @Override + public RecordComponentVisitor visitRecordComponent(String name, String descriptor, String signature) { + addClassAnnotationIfNeeded(); + return super.visitRecordComponent(name, descriptor, signature); + } + @Override public MethodVisitor visitMethod(int access, String name, String descriptor, String signature, String[] exceptions) { + addClassAnnotationIfNeeded(); var mv = super.visitMethod(access, name, descriptor, signature, exceptions); - boolean isStatic = (access & ACC_STATIC) != 0; - var key = new MethodKey( - className, - name, - Stream.of(Type.getArgumentTypes(descriptor)).map(Type::getInternalName).toList(), - isStatic - ); - var instrumentationMethod = instrumentationMethods.get(key); - if (instrumentationMethod != null) { - // LOGGER.debug("Will instrument method {}", key); - return new EntitlementMethodVisitor(Opcodes.ASM9, mv, isStatic, descriptor, instrumentationMethod); - } else { - // LOGGER.trace("Will not instrument method {}", key); + if (isAnnotationPresent == false) { + boolean isStatic = (access & ACC_STATIC) != 0; + var key = new MethodKey( + className, + name, + Stream.of(Type.getArgumentTypes(descriptor)).map(Type::getInternalName).toList(), + isStatic + ); + var instrumentationMethod = instrumentationMethods.get(key); + if (instrumentationMethod != null) { + // LOGGER.debug("Will instrument method {}", key); + return new EntitlementMethodVisitor(Opcodes.ASM9, mv, isStatic, descriptor, instrumentationMethod); + } else { + // LOGGER.trace("Will not instrument method {}", key); + } } return mv; } + + /** + * A class annotation can be added via visitAnnotation; we need to call visitAnnotation after all other visitAnnotation + * calls (in case one of them detects our annotation is already present), but before any other subsequent visit* method is called + * (up to visitMethod -- if no visitMethod is called, there is nothing to instrument). + * This includes visitNestMember, visitPermittedSubclass, visitInnerClass, visitField, visitRecordComponent and, of course, + * visitMethod (see {@link ClassVisitor} javadoc). + */ + private void addClassAnnotationIfNeeded() { + if (annotationNeeded) { + // logger.debug("Adding {} annotation", ENTITLEMENT_ANNOTATION); + AnnotationVisitor av = cv.visitAnnotation(ENTITLEMENT_ANNOTATION, true); + if (av != null) { + av.visitEnd(); + } + annotationNeeded = false; + } + } } static class EntitlementMethodVisitor extends MethodVisitor { diff --git a/distribution/tools/entitlement-agent/impl/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterTests.java b/distribution/tools/entitlement-agent/impl/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterTests.java index e807ecee4f103..f05c7ccae62e6 100644 --- a/distribution/tools/entitlement-agent/impl/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterTests.java +++ b/distribution/tools/entitlement-agent/impl/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterTests.java @@ -9,20 +9,24 @@ package org.elasticsearch.entitlement.instrumentation.impl; +import org.elasticsearch.common.Strings; import org.elasticsearch.entitlement.api.EntitlementChecks; import org.elasticsearch.entitlement.api.EntitlementProvider; import org.elasticsearch.entitlement.instrumentation.InstrumentationService; -import org.elasticsearch.entitlement.instrumentation.MethodKey; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; import org.elasticsearch.test.ESTestCase; import org.junit.Before; +import org.objectweb.asm.Type; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; -import java.util.Map; +import java.util.Arrays; +import java.util.stream.Collectors; import static org.elasticsearch.entitlement.instrumentation.impl.ASMUtils.bytecode2text; +import static org.elasticsearch.entitlement.instrumentation.impl.InstrumenterImpl.getClassFileInfo; +import static org.hamcrest.Matchers.is; /** * This tests {@link InstrumenterImpl} in isolation, without a java agent. @@ -60,6 +64,10 @@ public static class ClassToInstrument implements Testable { public static void systemExit(int status) { assertEquals(123, status); } + + public static void anotherSystemExit(int status) { + assertEquals(123, status); + } } static final class TestException extends RuntimeException {} @@ -76,8 +84,11 @@ public static class TestEntitlementManager implements EntitlementChecks { */ volatile boolean isActive; + int checkSystemExitCallCount = 0; + @Override public void checkSystemExit(Class callerClass, int status) { + checkSystemExitCallCount++; assertSame(InstrumenterTests.class, callerClass); assertEquals(123, status); throwIfActive(); @@ -90,18 +101,11 @@ private void throwIfActive() { } } - public void test() throws Exception { - // This test doesn't replace ClassToInstrument in-place but instead loads a separate - // class ClassToInstrument_NEW that contains the instrumentation. Because of this, - // we need to configure the Transformer to use a MethodKey and instrumentationMethod - // with slightly different signatures (using the common interface Testable) which - // is not what would happen when it's run by the agent. - - MethodKey k1 = instrumentationService.methodKeyForTarget(ClassToInstrument.class.getMethod("systemExit", int.class)); - Method v1 = EntitlementChecks.class.getMethod("checkSystemExit", Class.class, int.class); - var instrumenter = new InstrumenterImpl("_NEW", Map.of(k1, v1)); + public void testClassIsInstrumented() throws Exception { + var classToInstrument = ClassToInstrument.class; + var instrumenter = createInstrumenter(classToInstrument, "systemExit"); - byte[] newBytecode = instrumenter.instrumentClassFile(ClassToInstrument.class).bytecodes(); + byte[] newBytecode = instrumenter.instrumentClassFile(classToInstrument).bytecodes(); if (logger.isTraceEnabled()) { logger.trace("Bytecode after instrumentation:\n{}", bytecode2text(newBytecode)); @@ -112,22 +116,96 @@ public void test() throws Exception { newBytecode ); + getTestChecks().isActive = false; + // Before checking is active, nothing should throw - callStaticSystemExit(newClass, 123); + callStaticMethod(newClass, "systemExit", 123); getTestChecks().isActive = true; // After checking is activated, everything should throw - assertThrows(TestException.class, () -> callStaticSystemExit(newClass, 123)); + assertThrows(TestException.class, () -> callStaticMethod(newClass, "systemExit", 123)); + } + + public void testClassIsNotInstrumentedTwice() throws Exception { + var classToInstrument = ClassToInstrument.class; + var instrumenter = createInstrumenter(classToInstrument, "systemExit"); + + InstrumenterImpl.ClassFileInfo initial = getClassFileInfo(classToInstrument); + var internalClassName = Type.getInternalName(classToInstrument); + + byte[] instrumentedBytecode = instrumenter.instrumentClass(internalClassName, initial.bytecodes()); + byte[] instrumentedTwiceBytecode = instrumenter.instrumentClass(internalClassName, instrumentedBytecode); + + logger.trace(() -> Strings.format("Bytecode after 1st instrumentation:\n%s", bytecode2text(instrumentedBytecode))); + logger.trace(() -> Strings.format("Bytecode after 2nd instrumentation:\n%s", bytecode2text(instrumentedTwiceBytecode))); + + Class newClass = new TestLoader(Testable.class.getClassLoader()).defineClassFromBytes( + ClassToInstrument.class.getName() + "_NEW_NEW", + instrumentedTwiceBytecode + ); + + getTestChecks().isActive = true; + getTestChecks().checkSystemExitCallCount = 0; + + assertThrows(TestException.class, () -> callStaticMethod(newClass, "systemExit", 123)); + assertThat(getTestChecks().checkSystemExitCallCount, is(1)); + } + + public void testClassAllMethodsAreInstrumentedFirstPass() throws Exception { + var classToInstrument = ClassToInstrument.class; + var instrumenter = createInstrumenter(classToInstrument, "systemExit", "anotherSystemExit"); + + InstrumenterImpl.ClassFileInfo initial = getClassFileInfo(classToInstrument); + var internalClassName = Type.getInternalName(classToInstrument); + + byte[] instrumentedBytecode = instrumenter.instrumentClass(internalClassName, initial.bytecodes()); + byte[] instrumentedTwiceBytecode = instrumenter.instrumentClass(internalClassName, instrumentedBytecode); + + logger.trace(() -> Strings.format("Bytecode after 1st instrumentation:\n%s", bytecode2text(instrumentedBytecode))); + logger.trace(() -> Strings.format("Bytecode after 2nd instrumentation:\n%s", bytecode2text(instrumentedTwiceBytecode))); + + Class newClass = new TestLoader(Testable.class.getClassLoader()).defineClassFromBytes( + ClassToInstrument.class.getName() + "_NEW_NEW", + instrumentedTwiceBytecode + ); + + getTestChecks().isActive = true; + getTestChecks().checkSystemExitCallCount = 0; + + assertThrows(TestException.class, () -> callStaticMethod(newClass, "systemExit", 123)); + assertThat(getTestChecks().checkSystemExitCallCount, is(1)); + + assertThrows(TestException.class, () -> callStaticMethod(newClass, "anotherSystemExit", 123)); + assertThat(getTestChecks().checkSystemExitCallCount, is(2)); + } + + /** This test doesn't replace ClassToInstrument in-place but instead loads a separate + * class ClassToInstrument_NEW that contains the instrumentation. Because of this, + * we need to configure the Transformer to use a MethodKey and instrumentationMethod + * with slightly different signatures (using the common interface Testable) which + * is not what would happen when it's run by the agent. + */ + private InstrumenterImpl createInstrumenter(Class classToInstrument, String... methodNames) throws NoSuchMethodException { + Method v1 = EntitlementChecks.class.getMethod("checkSystemExit", Class.class, int.class); + var methods = Arrays.stream(methodNames).map(name -> { + try { + return instrumentationService.methodKeyForTarget(classToInstrument.getMethod(name, int.class)); + } catch (NoSuchMethodException e) { + throw new RuntimeException(e); + } + }).collect(Collectors.toUnmodifiableMap(name -> name, name -> v1)); + + return new InstrumenterImpl("_NEW", methods); } /** * Calling a static method of a dynamically loaded class is significantly more cumbersome * than calling a virtual method. */ - private static void callStaticSystemExit(Class c, int status) throws NoSuchMethodException, IllegalAccessException { + private static void callStaticMethod(Class c, String methodName, int status) throws NoSuchMethodException, IllegalAccessException { try { - c.getMethod("systemExit", int.class).invoke(null, status); + c.getMethod(methodName, int.class).invoke(null, status); } catch (InvocationTargetException e) { Throwable cause = e.getCause(); if (cause instanceof TestException n) { From c4c33ff359b99e855306542d6cc077661e21383d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lorenzo=20Dematt=C3=A9?= Date: Mon, 28 Oct 2024 09:33:14 +0100 Subject: [PATCH 446/449] Fix NPE on plugin sync (#115640) --- .../plugins/cli/SyncPluginsAction.java | 8 ++++---- .../plugins/cli/SyncPluginsActionTests.java | 16 ++++++++++++++++ docs/changelog/115640.yaml | 6 ++++++ 3 files changed, 26 insertions(+), 4 deletions(-) create mode 100644 docs/changelog/115640.yaml diff --git a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/SyncPluginsAction.java b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/SyncPluginsAction.java index 5394cb8f3d79b..d6d0619422770 100644 --- a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/SyncPluginsAction.java +++ b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/SyncPluginsAction.java @@ -25,6 +25,7 @@ import java.nio.file.Path; import java.util.ArrayList; import java.util.Comparator; +import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Locale; @@ -60,7 +61,7 @@ public SyncPluginsAction(Terminal terminal, Environment env) { * @throws UserException if a plugins config file is found. */ public static void ensureNoConfigFile(Environment env) throws UserException { - final Path pluginsConfig = env.configFile().resolve("elasticsearch-plugins.yml"); + final Path pluginsConfig = env.configFile().resolve(ELASTICSEARCH_PLUGINS_YML); if (Files.exists(pluginsConfig)) { throw new UserException( ExitCodes.USAGE, @@ -207,9 +208,8 @@ private List getPluginsToUpgrade( Optional cachedPluginsConfig, List existingPlugins ) { - final Map cachedPluginIdToLocation = cachedPluginsConfig.map( - config -> config.getPlugins().stream().collect(Collectors.toMap(InstallablePlugin::getId, InstallablePlugin::getLocation)) - ).orElse(Map.of()); + final Map cachedPluginIdToLocation = new HashMap<>(); + cachedPluginsConfig.ifPresent(config -> config.getPlugins().forEach(p -> cachedPluginIdToLocation.put(p.getId(), p.getLocation()))); return pluginsToMaybeUpgrade.stream().filter(eachPlugin -> { final String eachPluginId = eachPlugin.getId(); diff --git a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/SyncPluginsActionTests.java b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/SyncPluginsActionTests.java index 8ef44c8862e84..2d2336428a0a5 100644 --- a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/SyncPluginsActionTests.java +++ b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/SyncPluginsActionTests.java @@ -157,6 +157,22 @@ public void test_getPluginChanges_withOfficialPluginToUpgrade_returnsPluginToUpg assertThat(pluginChanges.upgrade.get(0).getId(), equalTo("analysis-icu")); } + /** + * Check that when there is an official plugin in the config file and in the cached config, then we + * calculate that the plugin does not need to be upgraded. + */ + public void test_getPluginChanges_withOfficialPluginCachedConfigAndNoChanges_returnsNoChanges() throws Exception { + createPlugin("analysis-icu"); + config.setPlugins(List.of(new InstallablePlugin("analysis-icu"))); + + final PluginsConfig cachedConfig = new PluginsConfig(); + cachedConfig.setPlugins(List.of(new InstallablePlugin("analysis-icu"))); + + final PluginChanges pluginChanges = action.getPluginChanges(config, Optional.of(cachedConfig)); + + assertThat(pluginChanges.isEmpty(), is(true)); + } + /** * Check that if an unofficial plugins' location has not changed in the cached config, then we * calculate that the plugin does not need to be upgraded. diff --git a/docs/changelog/115640.yaml b/docs/changelog/115640.yaml new file mode 100644 index 0000000000000..5c4a943a9697d --- /dev/null +++ b/docs/changelog/115640.yaml @@ -0,0 +1,6 @@ +pr: 115640 +summary: Fix NPE on plugin sync +area: Infra/CLI +type: bug +issues: + - 114818 From 918a9cc35ada3a348f0bd4ed24e7ab6f836d468e Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Mon, 28 Oct 2024 09:47:48 +0000 Subject: [PATCH 447/449] Make some chunked xcontent more efficient (#115512) --- .../org/elasticsearch/action/bulk/BulkResponse.java | 12 ++++++------ .../common/xcontent/ChunkedToXContentBuilder.java | 8 ++------ 2 files changed, 8 insertions(+), 12 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkResponse.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkResponse.java index 88a9fb56b8edb..ec7a08007de93 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkResponse.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkResponse.java @@ -158,13 +158,13 @@ public void writeTo(StreamOutput out) throws IOException { @Override public Iterator toXContentChunked(ToXContent.Params params) { - return ChunkedToXContent.builder(params).object(ob -> { - ob.field(ERRORS, hasFailures()); - ob.field(TOOK, tookInMillis); + return ChunkedToXContent.builder(params).object(ob -> ob.append((b, p) -> { + b.field(ERRORS, hasFailures()); + b.field(TOOK, tookInMillis); if (ingestTookInMillis != BulkResponse.NO_INGEST_TOOK) { - ob.field(INGEST_TOOK, ingestTookInMillis); + b.field(INGEST_TOOK, ingestTookInMillis); } - ob.array(ITEMS, Iterators.forArray(responses)); - }); + return b; + }).array(ITEMS, Iterators.forArray(responses))); } } diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContentBuilder.java b/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContentBuilder.java index a3141bff7c6e2..a3243ef3865a7 100644 --- a/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContentBuilder.java +++ b/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContentBuilder.java @@ -58,9 +58,7 @@ private void endObject() { * Creates an object, with the specified {@code contents} */ public ChunkedToXContentBuilder xContentObject(ToXContent contents) { - startObject(); - append(contents); - endObject(); + addChunk((b, p) -> contents.toXContent(b.startObject(), p).endObject()); return this; } @@ -68,9 +66,7 @@ public ChunkedToXContentBuilder xContentObject(ToXContent contents) { * Creates an object named {@code name}, with the specified {@code contents} */ public ChunkedToXContentBuilder xContentObject(String name, ToXContent contents) { - startObject(name); - append(contents); - endObject(); + addChunk((b, p) -> contents.toXContent(b.startObject(name), p).endObject()); return this; } From 0d8d8bd39282dd146ead0e8000a96d31587127de Mon Sep 17 00:00:00 2001 From: Liam Thompson <32779855+leemthompo@users.noreply.github.com> Date: Mon, 28 Oct 2024 11:05:44 +0100 Subject: [PATCH 448/449] [DOCS] Add search and filtering tutorial/quickstart, edit filtering page (#114353) --- .../query-dsl/query_filter_context.asciidoc | 47 +- .../full-text-filtering-tutorial.asciidoc | 626 ++++++++++++++++++ docs/reference/quickstart/index.asciidoc | 4 +- 3 files changed, 662 insertions(+), 15 deletions(-) create mode 100644 docs/reference/quickstart/full-text-filtering-tutorial.asciidoc diff --git a/docs/reference/query-dsl/query_filter_context.asciidoc b/docs/reference/query-dsl/query_filter_context.asciidoc index 78e1549644aa6..1fd75ef0e841d 100644 --- a/docs/reference/query-dsl/query_filter_context.asciidoc +++ b/docs/reference/query-dsl/query_filter_context.asciidoc @@ -29,26 +29,45 @@ parameter, such as the `query` parameter in the [discrete] [[filter-context]] === Filter context -In a filter context, a query clause answers the question ``__Does this -document match this query clause?__'' The answer is a simple Yes or No -- no -scores are calculated. Filter context is mostly used for filtering structured -data, e.g. -* __Does this +timestamp+ fall into the range 2015 to 2016?__ -* __Is the +status+ field set to ++"published"++__? +A filter answers the binary question “Does this document match this query clause?”. The answer is simply "yes" or "no". +Filtering has several benefits: -Frequently used filters will be cached automatically by Elasticsearch, to -speed up performance. +. *Simple binary logic*: In a filter context, a query clause determines document matches based on a yes/no criterion, without score calculation. +. *Performance*: Because they don't compute relevance scores, filters execute faster than queries. +. *Caching*: {es} automatically caches frequently used filters, speeding up subsequent search performance. +. *Resource efficiency*: Filters consume less CPU resources compared to full-text queries. +. *Query combination*: Filters can be combined with scored queries to refine result sets efficiently. -Filter context is in effect whenever a query clause is passed to a `filter` -parameter, such as the `filter` or `must_not` parameters in the -<> query, the `filter` parameter in the -<> query, or the -<> aggregation. +Filters are particularly effective for querying structured data and implementing "must have" criteria in complex searches. + +Structured data refers to information that is highly organized and formatted in a predefined manner. In the context of Elasticsearch, this typically includes: + +* Numeric fields (integers, floating-point numbers) +* Dates and timestamps +* Boolean values +* Keyword fields (exact match strings) +* Geo-points and geo-shapes + +Unlike full-text fields, structured data has a consistent, predictable format, making it ideal for precise filtering operations. + +Common filter applications include: + +* Date range checks: for example is the `timestamp` field between 2015 and 2016 +* Specific field value checks: for example is the `status` field equal to "published" or is the `author` field equal to "John Doe" + +Filter context applies when a query clause is passed to a `filter` parameter, such as: + +* `filter` or `must_not` parameters in <> queries +* `filter` parameter in <> queries +* <> aggregations + +Filters optimize query performance and efficiency, especially for structured data queries and when combined with full-text searches. [discrete] [[query-filter-context-ex]] === Example of query and filter contexts + Below is an example of query clauses being used in query and filter context in the `search` API. This query will match documents where all of the following conditions are met: @@ -93,4 +112,4 @@ significand's precision will be converted to floats with loss of precision. TIP: Use query clauses in query context for conditions which should affect the score of matching documents (i.e. how well does the document match), and use -all other query clauses in filter context. \ No newline at end of file +all other query clauses in filter context. diff --git a/docs/reference/quickstart/full-text-filtering-tutorial.asciidoc b/docs/reference/quickstart/full-text-filtering-tutorial.asciidoc new file mode 100644 index 0000000000000..46cadc19f2547 --- /dev/null +++ b/docs/reference/quickstart/full-text-filtering-tutorial.asciidoc @@ -0,0 +1,626 @@ +[[full-text-filter-tutorial]] +== Basic full-text search and filtering in {es} +++++ +Basics: Full-text search and filtering +++++ + +This is a hands-on introduction to the basics of full-text search with {es}, also known as _lexical search_, using the <> and <>. +You'll also learn how to filter data, to narrow down search results based on exact criteria. + +In this scenario, we're implementing a search function for a cooking blog. +The blog contains recipes with various attributes including textual content, categorical data, and numerical ratings. + +The goal is to create search queries that enable users to: + +* Find recipes based on ingredients they want to use or avoid +* Discover dishes suitable for their dietary needs +* Find highly-rated recipes in specific categories +* Find recent recipes from their favorite authors + +To achieve these goals we'll use different Elasticsearch queries to perform full-text search, apply filters, and combine multiple search criteria. + +[discrete] +[[full-text-filter-tutorial-create-index]] +=== Step 1: Create an index + +Create the `cooking_blog` index to get started: + +[source,console] +---- +PUT /cooking_blog +---- +// TESTSETUP + +Now define the mappings for the index: + +[source,console] +---- +PUT /cooking_blog/_mapping +{ + "properties": { + "title": { + "type": "text", + "analyzer": "standard", <1> + "fields": { <2> + "keyword": { + "type": "keyword", + "ignore_above": 256 <3> + } + } + }, + "description": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword" + } + } + }, + "author": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword" + } + } + }, + "date": { + "type": "date", + "format": "yyyy-MM-dd" + }, + "category": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword" + } + } + }, + "tags": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword" + } + } + }, + "rating": { + "type": "float" + } + } +} +---- +// TEST +<1> The `standard` analyzer is used by default for `text` fields if an `analyzer` isn't specified. It's included here for demonstration purposes. +<2> <> are used here to index `text` fields as both `text` and `keyword` <>. This enables both full-text search and exact matching/filtering on the same field. +Note that if you used <>, these multi-fields would be created automatically. +<3> The <> prevents indexing values longer than 256 characters in the `keyword` field. Again this is the default value, but it's included here for for demonstration purposes. +It helps to save disk space and avoid potential issues with Lucene's term byte-length limit. + +[TIP] +==== +Full-text search is powered by <>. +Text analysis normalizes and standardizes text data so it can be efficiently stored in an inverted index and searched in near real-time. +Analysis happens at both <>. +This tutorial won't cover analysis in detail, but it's important to understand how text is processed to create effective search queries. +==== + +[discrete] +[[full-text-filter-tutorial-index-data]] +=== Step 2: Add sample blog posts to your index + +Now you'll need to index some example blog posts using the <>. +Note that `text` fields are analyzed and multi-fields are generated at index time. + +[source,console] +---- +POST /cooking_blog/_bulk?refresh=wait_for +{"index":{"_id":"1"}} +{"title":"Perfect Pancakes: A Fluffy Breakfast Delight","description":"Learn the secrets to making the fluffiest pancakes, so amazing you won't believe your tastebuds. This recipe uses buttermilk and a special folding technique to create light, airy pancakes that are perfect for lazy Sunday mornings.","author":"Maria Rodriguez","date":"2023-05-01","category":"Breakfast","tags":["pancakes","breakfast","easy recipes"],"rating":4.8} +{"index":{"_id":"2"}} +{"title":"Spicy Thai Green Curry: A Vegetarian Adventure","description":"Dive into the flavors of Thailand with this vibrant green curry. Packed with vegetables and aromatic herbs, this dish is both healthy and satisfying. Don't worry about the heat - you can easily adjust the spice level to your liking.","author":"Liam Chen","date":"2023-05-05","category":"Main Course","tags":["thai","vegetarian","curry","spicy"],"rating":4.6} +{"index":{"_id":"3"}} +{"title":"Classic Beef Stroganoff: A Creamy Comfort Food","description":"Indulge in this rich and creamy beef stroganoff. Tender strips of beef in a savory mushroom sauce, served over a bed of egg noodles. It's the ultimate comfort food for chilly evenings.","author":"Emma Watson","date":"2023-05-10","category":"Main Course","tags":["beef","pasta","comfort food"],"rating":4.7} +{"index":{"_id":"4"}} +{"title":"Vegan Chocolate Avocado Mousse","description":"Discover the magic of avocado in this rich, vegan chocolate mousse. Creamy, indulgent, and secretly healthy, it's the perfect guilt-free dessert for chocolate lovers.","author":"Alex Green","date":"2023-05-15","category":"Dessert","tags":["vegan","chocolate","avocado","healthy dessert"],"rating":4.5} +{"index":{"_id":"5"}} +{"title":"Crispy Oven-Fried Chicken","description":"Get that perfect crunch without the deep fryer! This oven-fried chicken recipe delivers crispy, juicy results every time. A healthier take on the classic comfort food.","author":"Maria Rodriguez","date":"2023-05-20","category":"Main Course","tags":["chicken","oven-fried","healthy"],"rating":4.9} +---- +// TEST[continued] + +[discrete] +[[full-text-filter-tutorial-match-query]] +=== Step 3: Perform basic full-text searches + +Full-text search involves executing text-based queries across one or more document fields. +These queries calculate a relevance score for each matching document, based on how closely the document's content aligns with the search terms. +{es} offers various query types, each with its own method for matching text and <>. + +[discrete] +==== `match` query + +The <> query is the standard query for full-text, or "lexical", search. +The query text will be analyzed according to the analyzer configuration specified on each field (or at query time). + +First, search the `description` field for "fluffy pancakes": + +[source,console] +---- +GET /cooking_blog/_search +{ + "query": { + "match": { + "description": { + "query": "fluffy pancakes" <1> + } + } + } +} +---- +// TEST[continued] +<1> By default, the `match` query uses `OR` logic between the resulting tokens. This means it will match documents that contain either "fluffy" or "pancakes", or both, in the description field. + +At search time, {es} defaults to the analyzer defined in the field mapping. In this example, we're using the `standard` analyzer. Using a different analyzer at search time is an <>. + +.Example response +[%collapsible] +============== +[source,console-result] +---- +{ + "took": 0, + "timed_out": false, + "_shards": { + "total": 1, + "successful": 1, + "skipped": 0, + "failed": 0 + }, + "hits": { <1> + "total": { + "value": 1, + "relation": "eq" + }, + "max_score": 1.8378843, <2> + "hits": [ + { + "_index": "cooking_blog", + "_id": "1", + "_score": 1.8378843, <3> + "_source": { + "title": "Perfect Pancakes: A Fluffy Breakfast Delight", <4> + "description": "Learn the secrets to making the fluffiest pancakes, so amazing you won't believe your tastebuds. This recipe uses buttermilk and a special folding technique to create light, airy pancakes that are perfect for lazy Sunday mornings.", <5> + "author": "Maria Rodriguez", + "date": "2023-05-01", + "category": "Breakfast", + "tags": [ + "pancakes", + "breakfast", + "easy recipes" + ], + "rating": 4.8 + } + } + ] + } +} +---- +// TESTRESPONSE[s/"took": 0/"took": "$body.took"/] +// TESTRESPONSE[s/"total": 1/"total": $body._shards.total/] +// TESTRESPONSE[s/"successful": 1/"successful": $body._shards.successful/] +// TESTRESPONSE[s/"value": 1/"value": $body.hits.total.value/] +// TESTRESPONSE[s/"max_score": 1.8378843/"max_score": $body.hits.max_score/] +// TESTRESPONSE[s/"_score": 1.8378843/"_score": $body.hits.hits.0._score/] +<1> The `hits` object contains the total number of matching documents and their relation to the total. Refer to <> for more details about the `hits` object. +<2> `max_score` is the highest relevance score among all matching documents. In this example, we only have one matching document. +<3> `_score` is the relevance score for a specific document, indicating how well it matches the query. Higher scores indicate better matches. In this example the `max_score` is the same as the `_score`, as there is only one matching document. +<4> The title contains both "Fluffy" and "Pancakes", matching our search terms exactly. +<5> The description includes "fluffiest" and "pancakes", further contributing to the document's relevance due to the analysis process. +============== + +[discrete] +==== Require all terms in a match query + +Specify the `and` operator to require both terms in the `description` field. +This stricter search returns _zero hits_ on our sample data, as no document contains both "fluffy" and "pancakes" in the description. + +[source,console] +---- +GET /cooking_blog/_search +{ + "query": { + "match": { + "description": { + "query": "fluffy pancakes", + "operator": "and" + } + } + } +} +---- +// TEST[continued] + +.Example response +[%collapsible] +============== +[source,console-result] +---- +{ + "took": 0, + "timed_out": false, + "_shards": { + "total": 1, + "successful": 1, + "skipped": 0, + "failed": 0 + }, + "hits": { + "total": { + "value": 0, + "relation": "eq" + }, + "max_score": null, + "hits": [] + } +} +---- +// TESTRESPONSE[s/"took": 0/"took": "$body.took"/] +============== + +[discrete] +==== Specify a minimum number of terms to match + +Use the <> parameter to specify the minimum number of terms a document should have to be included in the search results. + +Search the title field to match at least 2 of the 3 terms: "fluffy", "pancakes", or "breakfast". +This is useful for improving relevance while allowing some flexibility. + +[source,console] +---- +GET /cooking_blog/_search +{ + "query": { + "match": { + "title": { + "query": "fluffy pancakes breakfast", + "minimum_should_match": 2 + } + } + } +} +---- +// TEST[continued] + +[discrete] +[[full-text-filter-tutorial-multi-match]] +=== Step 4: Search across multiple fields at once + +When users enter a search query, they often don't know (or care) whether their search terms appear in a specific field. +A <> query allows searching across multiple fields simultaneously. + +Let's start with a basic `multi_match` query: + +[source,console] +---- +GET /cooking_blog/_search +{ + "query": { + "multi_match": { + "query": "vegetarian curry", + "fields": ["title", "description", "tags"] + } + } +} +---- +// TEST[continued] + +This query searches for "vegetarian curry" across the title, description, and tags fields. Each field is treated with equal importance. + +However, in many cases, matches in certain fields (like the title) might be more relevant than others. We can adjust the importance of each field using field boosting: + +[source,console] +---- +GET /cooking_blog/_search +{ + "query": { + "multi_match": { + "query": "vegetarian curry", + "fields": ["title^3", "description^2", "tags"] <1> + } + } +} +---- +// TEST[continued] +<1> The `^` syntax applies a boost to specific fields: ++ +* `title^3`: The title field is 3 times more important than an unboosted field +* `description^2`: The description is 2 times more important +* `tags`: No boost applied (equivalent to `^1`) ++ +These boosts help tune relevance, prioritizing matches in the title over the description, and matches in the description over tags. + +Learn more about fields and per-field boosting in the <> reference. + +.Example response +[%collapsible] +============== +[source,console-result] +---- +{ + "took": 0, + "timed_out": false, + "_shards": { + "total": 1, + "successful": 1, + "skipped": 0, + "failed": 0 + }, + "hits": { + "total": { + "value": 1, + "relation": "eq" + }, + "max_score": 7.546015, + "hits": [ + { + "_index": "cooking_blog", + "_id": "2", + "_score": 7.546015, + "_source": { + "title": "Spicy Thai Green Curry: A Vegetarian Adventure", <1> + "description": "Dive into the flavors of Thailand with this vibrant green curry. Packed with vegetables and aromatic herbs, this dish is both healthy and satisfying. Don't worry about the heat - you can easily adjust the spice level to your liking.", <2> + "author": "Liam Chen", + "date": "2023-05-05", + "category": "Main Course", + "tags": [ + "thai", + "vegetarian", + "curry", + "spicy" + ], <3> + "rating": 4.6 + } + } + ] + } +} +---- +// TESTRESPONSE[s/"took": 0/"took": "$body.took"/] +// TESTRESPONSE[s/"_score": 7.546015/"_score": $body.hits.hits.0._score/] +// TESTRESPONSE[s/"max_score": 7.546015/"max_score": $body.hits.max_score/] +<1> The title contains "Vegetarian" and "Curry", which matches our search terms. The title field has the highest boost (^3), contributing significantly to this document's relevance score. +<2> The description contains "curry" and related terms like "vegetables", further increasing the document's relevance. +<3> The tags include both "vegetarian" and "curry", providing an exact match for our search terms, albeit with no boost. + +This result demonstrates how the `multi_match` query with field boosts helps users find relevant recipes across multiple fields. +Even though the exact phrase "vegetarian curry" doesn't appear in any single field, the combination of matches across fields produces a highly relevant result. +============== + +[TIP] +==== +The `multi_match` query is often recommended over a single `match` query for most text search use cases, as it provides more flexibility and better matches user expectations. +==== + +[discrete] +[[full-text-filter-tutorial-filtering]] +=== Step 5: Filter and find exact matches + +<> allows you to narrow down your search results based on exact criteria. +Unlike full-text searches, filters are binary (yes/no) and do not affect the relevance score. +Filters execute faster than queries because excluded results don't need to be scored. + +This <> query will return only blog posts in the "Breakfast" category. + +[source,console] +---- +GET /cooking_blog/_search +{ + "query": { + "bool": { + "filter": [ + { "term": { "category.keyword": "Breakfast" } } <1> + ] + } + } +} +---- +// TEST[continued] +<1> Note the use of `category.keyword` here. This refers to the <> multi-field of the `category` field, ensuring an exact, case-sensitive match. + +[TIP] +==== +The `.keyword` suffix accesses the unanalyzed version of a field, enabling exact, case-sensitive matching. This works in two scenarios: + +1. *When using dynamic mapping for text fields*. Elasticsearch automatically creates a `.keyword` sub-field. +2. *When text fields are explicitly mapped with a `.keyword` sub-field*. For example, we explicitly mapped the `category` field in <> of this tutorial. +==== + +[discrete] +[[full-text-filter-tutorial-range-query]] +==== Search for posts within a date range + +Often users want to find content published within a specific time frame. +A <> query finds documents that fall within numeric or date ranges. + +[source,console] +---- +GET /cooking_blog/_search +{ + "query": { + "range": { + "date": { + "gte": "2023-05-01", <1> + "lte": "2023-05-31" <2> + } + } + } +} +---- +// TEST[continued] +<1> Greater than or equal to May 1, 2023. +<2> Less than or equal to May 31, 2023. + +[discrete] +[[full-text-filter-tutorial-term-query]] +==== Find exact matches + +Sometimes users want to search for exact terms to eliminate ambiguity in their search results. +A <> query searches for an exact term in a field without analyzing it. +Exact, case-sensitive matches on specific terms are often referred to as "keyword" searches. + +Here you'll search for the author "Maria Rodriguez" in the `author.keyword` field. + +[source,console] +---- +GET /cooking_blog/_search +{ + "query": { + "term": { + "author.keyword": "Maria Rodriguez" <1> + } + } +} +---- +// TEST[continued] +<1> The `term` query has zero flexibility. For example, here the queries `maria` or `maria rodriguez` would have zero hits, due to case sensitivity. + +[TIP] +==== +Avoid using the `term` query for <> because they are transformed by the analysis process. +==== + +[discrete] +[[full-text-filter-tutorial-complex-bool]] +=== Step 6: Combine multiple search criteria + +A <> query allows you to combine multiple query clauses to create sophisticated searches. +In this tutorial scenario it's useful for when users have complex requirements for finding recipes. + +Let's create a query that addresses the following user needs: + +* Must be a vegetarian main course +* Should contain "curry" or "spicy" in the title or description +* Must not be a dessert +* Must have a rating of at least 4.5 +* Should prefer recipes published in the last month + +[source,console] +---- +GET /cooking_blog/_search +{ + "query": { + "bool": { + "must": [ + { + "term": { + "category.keyword": "Main Course" + } + }, + { + "term": { + "tags": "vegetarian" + } + }, + { + "range": { + "rating": { + "gte": 4.5 + } + } + } + ], + "should": [ + { + "multi_match": { + "query": "curry spicy", + "fields": ["title^2", "description"] + } + }, + { + "range": { + "date": { + "gte": "now-1M/d" + } + } + } + ], + "must_not": [ <1> + { + "term": { + "category.keyword": "Dessert" + } + } + ] + } + } +} +---- +// TEST[continued] +<1> The `must_not` clause excludes documents that match the specified criteria. This is a powerful tool for filtering out unwanted results. + +.Example response +[%collapsible] +============== +[source,console-result] +---- +{ + "took": 1, + "timed_out": false, + "_shards": { + "total": 1, + "successful": 1, + "skipped": 0, + "failed": 0 + }, + "hits": { + "total": { + "value": 1, + "relation": "eq" + }, + "max_score": 7.9835095, + "hits": [ + { + "_index": "cooking_blog", + "_id": "2", + "_score": 7.9835095, + "_source": { + "title": "Spicy Thai Green Curry: A Vegetarian Adventure", <1> + "description": "Dive into the flavors of Thailand with this vibrant green curry. Packed with vegetables and aromatic herbs, this dish is both healthy and satisfying. Don't worry about the heat - you can easily adjust the spice level to your liking.", <2> + "author": "Liam Chen", + "date": "2023-05-05", + "category": "Main Course", <3> + "tags": [ <4> + "thai", + "vegetarian", <5> + "curry", + "spicy" + ], + "rating": 4.6 <6> + } + } + ] + } +} +---- +// TESTRESPONSE[s/"took": 1/"took": "$body.took"/] +<1> The title contains "Spicy" and "Curry", matching our should condition. With the default <> behavior, this field contributes most to the relevance score. +<2> While the description also contains matching terms, only the best matching field's score is used by default. +<3> The recipe was published within the last month, satisfying our recency preference. +<4> The "Main Course" category matches our `must` condition. +<5> The "vegetarian" tag satisfies another `must` condition, while "curry" and "spicy" tags align with our `should` preferences. +<6> The rating of 4.6 meets our minimum rating requirement of 4.5. +============== + +[discrete] +[[full-text-filter-tutorial-learn-more]] +=== Learn more + +This tutorial introduced the basics of full-text search and filtering in {es}. +Building a real-world search experience requires understanding many more advanced concepts and techniques. +Here are some resources once you're ready to dive deeper: + +* <>: Understand all your options for searching and analyzing data in {es}. +* <>: Understand how text is processed for full-text search. +* <>: Learn about more advanced search techniques using the `_search` API, including semantic search. + + diff --git a/docs/reference/quickstart/index.asciidoc b/docs/reference/quickstart/index.asciidoc index 2d9114882254f..ed4c128392994 100644 --- a/docs/reference/quickstart/index.asciidoc +++ b/docs/reference/quickstart/index.asciidoc @@ -15,7 +15,8 @@ Get started <> , or see our <>. Learn about indices, documents, and mappings, and perform a basic search. +* <>. Learn about indices, documents, and mappings, and perform a basic search using the Query DSL. +* <>. Learn about different options for querying data, including full-text search and filtering, using the Query DSL. [discrete] [[quickstart-python-links]] @@ -27,3 +28,4 @@ If you're interested in using {es} with Python, check out Elastic Search Labs: * https://www.elastic.co/search-labs/tutorials/search-tutorial/welcome[Tutorial]: This walks you through building a complete search solution with {es} from the ground up using Flask. include::getting-started.asciidoc[] +include::full-text-filtering-tutorial.asciidoc[] From 78ccd2a4a216c25c6bf75833295a9f2d423fc19d Mon Sep 17 00:00:00 2001 From: Souradip Poddar <49103513+SouradipPoddar@users.noreply.github.com> Date: Mon, 28 Oct 2024 15:48:34 +0530 Subject: [PATCH 449/449] 112274 converted cpu stats to support unsigned 64 bit number (#114681) --- docs/changelog/114681.yaml | 6 ++ .../org/elasticsearch/TransportVersions.java | 1 + .../common/io/stream/StreamOutput.java | 7 +++ .../org/elasticsearch/monitor/os/OsProbe.java | 48 ++++++++------- .../org/elasticsearch/monitor/os/OsStats.java | 61 +++++++++++++------ .../cluster/node/stats/NodeStatsTests.java | 9 ++- .../monitor/os/OsProbeTests.java | 24 ++++---- .../monitor/os/OsStatsTests.java | 11 +++- .../node/NodeStatsMonitoringDocTests.java | 11 +++- 9 files changed, 119 insertions(+), 59 deletions(-) create mode 100644 docs/changelog/114681.yaml diff --git a/docs/changelog/114681.yaml b/docs/changelog/114681.yaml new file mode 100644 index 0000000000000..2a9901114e56f --- /dev/null +++ b/docs/changelog/114681.yaml @@ -0,0 +1,6 @@ +pr: 114681 +summary: "Support for unsigned 64 bit numbers in Cpu stats" +area: Infra/Core +type: enhancement +issues: + - 112274 diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 3986ea4b97254..9454c27dd787c 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -183,6 +183,7 @@ static TransportVersion def(int id) { public static final TransportVersion INTRODUCE_ALL_APPLICABLE_SELECTOR = def(8_778_00_0); public static final TransportVersion INDEX_MODE_LOOKUP = def(8_779_00_0); public static final TransportVersion INDEX_REQUEST_REMOVE_METERING = def(8_780_00_0); + public static final TransportVersion CPU_STAT_STRING_PARSING = def(8_781_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java b/server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java index c449065a953e2..d724e5ea25ca6 100644 --- a/server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java +++ b/server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java @@ -1234,4 +1234,11 @@ public void writeMissingWriteable(Class ignored) throws public void writeMissingString() throws IOException { writeBoolean(false); } + + /** + * Write a {@link BigInteger} to the stream + */ + public void writeBigInteger(BigInteger bigInteger) throws IOException { + writeString(bigInteger.toString()); + } } diff --git a/server/src/main/java/org/elasticsearch/monitor/os/OsProbe.java b/server/src/main/java/org/elasticsearch/monitor/os/OsProbe.java index 799264d8392b1..06ab6a6eee410 100644 --- a/server/src/main/java/org/elasticsearch/monitor/os/OsProbe.java +++ b/server/src/main/java/org/elasticsearch/monitor/os/OsProbe.java @@ -22,6 +22,7 @@ import java.lang.management.OperatingSystemMXBean; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; +import java.math.BigInteger; import java.nio.file.Files; import java.nio.file.Path; import java.util.Collections; @@ -341,8 +342,8 @@ List readProcSelfCgroup() throws IOException { * @return the total CPU time in nanoseconds * @throws IOException if an I/O exception occurs reading {@code cpuacct.usage} for the control group */ - private long getCgroupCpuAcctUsageNanos(final String controlGroup) throws IOException { - return Long.parseLong(readSysFsCgroupCpuAcctCpuAcctUsage(controlGroup)); + private BigInteger getCgroupCpuAcctUsageNanos(final String controlGroup) throws IOException { + return new BigInteger(readSysFsCgroupCpuAcctCpuAcctUsage(controlGroup)); } /** @@ -435,21 +436,22 @@ String readSysFsCgroupCpuAcctCpuAcctCfsQuota(final String controlGroup) throws I * @throws IOException if an I/O exception occurs reading {@code cpu.stat} for the control group */ private OsStats.Cgroup.CpuStat getCgroupCpuAcctCpuStat(final String controlGroup) throws IOException { + final var SENTINEL_VALUE = BigInteger.valueOf(-1); final List lines = readSysFsCgroupCpuAcctCpuStat(controlGroup); - long numberOfPeriods = -1; - long numberOfTimesThrottled = -1; - long timeThrottledNanos = -1; + var numberOfPeriods = SENTINEL_VALUE; + var numberOfTimesThrottled = SENTINEL_VALUE; + var timeThrottledNanos = SENTINEL_VALUE; for (final String line : lines) { final String[] fields = line.split("\\s+"); switch (fields[0]) { - case "nr_periods" -> numberOfPeriods = Long.parseLong(fields[1]); - case "nr_throttled" -> numberOfTimesThrottled = Long.parseLong(fields[1]); - case "throttled_time" -> timeThrottledNanos = Long.parseLong(fields[1]); + case "nr_periods" -> numberOfPeriods = new BigInteger(fields[1]); + case "nr_throttled" -> numberOfTimesThrottled = new BigInteger(fields[1]); + case "throttled_time" -> timeThrottledNanos = new BigInteger(fields[1]); } } - assert numberOfPeriods != -1; - assert numberOfTimesThrottled != -1; - assert timeThrottledNanos != -1; + assert numberOfPeriods.equals(SENTINEL_VALUE) == false; + assert numberOfTimesThrottled.equals(SENTINEL_VALUE) == false; + assert timeThrottledNanos.equals(SENTINEL_VALUE) == false; return new OsStats.Cgroup.CpuStat(numberOfPeriods, numberOfTimesThrottled, timeThrottledNanos); } @@ -635,28 +637,30 @@ boolean areCgroupStatsAvailable() throws IOException { * @throws IOException if an I/O exception occurs reading {@code cpu.stat} for the control group */ @SuppressForbidden(reason = "Uses PathUtils.get to generate meaningful assertion messages") - private Map getCgroupV2CpuStats(String controlGroup) throws IOException { + private Map getCgroupV2CpuStats(String controlGroup) throws IOException { final List lines = readCgroupV2CpuStats(controlGroup); - final Map stats = new HashMap<>(); + final Map stats = new HashMap<>(); + final BigInteger SENTINEL_VALUE = BigInteger.valueOf(-1); for (String line : lines) { String[] parts = line.split("\\s+"); assert parts.length == 2 : "Corrupt cpu.stat line: [" + line + "]"; - stats.put(parts[0], Long.parseLong(parts[1])); + stats.put(parts[0], new BigInteger(parts[1])); } final List expectedKeys = List.of("system_usec", "usage_usec", "user_usec"); expectedKeys.forEach(key -> { assert stats.containsKey(key) : "[" + key + "] missing from " + PathUtils.get("/sys/fs/cgroup", controlGroup, "cpu.stat"); - assert stats.get(key) != -1 : stats.get(key); + assert stats.get(key).compareTo(SENTINEL_VALUE) != 0 : stats.get(key).toString(); }); final List optionalKeys = List.of("nr_periods", "nr_throttled", "throttled_usec"); optionalKeys.forEach(key -> { if (stats.containsKey(key) == false) { - stats.put(key, 0L); + stats.put(key, BigInteger.ZERO); } - assert stats.get(key) != -1L : "[" + key + "] in " + PathUtils.get("/sys/fs/cgroup", controlGroup, "cpu.stat") + " is -1"; + assert stats.get(key).compareTo(SENTINEL_VALUE) != 0 + : "[" + key + "] in " + PathUtils.get("/sys/fs/cgroup", controlGroup, "cpu.stat") + " is -1"; }); return stats; @@ -682,7 +686,7 @@ private OsStats.Cgroup getCgroup() { assert controllerMap.isEmpty() == false; final String cpuAcctControlGroup; - final long cgroupCpuAcctUsageNanos; + final BigInteger cgroupCpuAcctUsageNanos; final long cgroupCpuAcctCpuCfsPeriodMicros; final long cgroupCpuAcctCpuCfsQuotaMicros; final String cpuControlGroup; @@ -696,9 +700,11 @@ private OsStats.Cgroup getCgroup() { cpuControlGroup = cpuAcctControlGroup = memoryControlGroup = controllerMap.get(""); // `cpuacct` was merged with `cpu` in v2 - final Map cpuStatsMap = getCgroupV2CpuStats(cpuControlGroup); + final Map cpuStatsMap = getCgroupV2CpuStats(cpuControlGroup); - cgroupCpuAcctUsageNanos = cpuStatsMap.get("usage_usec") * 1000; // convert from micros to nanos + final BigInteger THOUSAND = BigInteger.valueOf(1000); + + cgroupCpuAcctUsageNanos = cpuStatsMap.get("usage_usec").multiply(THOUSAND); // convert from micros to nanos long[] cpuLimits = getCgroupV2CpuLimit(cpuControlGroup); cgroupCpuAcctCpuCfsQuotaMicros = cpuLimits[0]; @@ -707,7 +713,7 @@ private OsStats.Cgroup getCgroup() { cpuStat = new OsStats.Cgroup.CpuStat( cpuStatsMap.get("nr_periods"), cpuStatsMap.get("nr_throttled"), - cpuStatsMap.get("throttled_usec") * 1000 + cpuStatsMap.get("throttled_usec").multiply(THOUSAND) ); cgroupMemoryLimitInBytes = getCgroupV2MemoryLimitInBytes(memoryControlGroup); diff --git a/server/src/main/java/org/elasticsearch/monitor/os/OsStats.java b/server/src/main/java/org/elasticsearch/monitor/os/OsStats.java index 7a2f46668f610..6c1ba2dfbe63a 100644 --- a/server/src/main/java/org/elasticsearch/monitor/os/OsStats.java +++ b/server/src/main/java/org/elasticsearch/monitor/os/OsStats.java @@ -20,6 +20,7 @@ import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; +import java.math.BigInteger; import java.util.Arrays; import java.util.Objects; @@ -362,7 +363,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws public static class Cgroup implements Writeable, ToXContentFragment { private final String cpuAcctControlGroup; - private final long cpuAcctUsageNanos; + private final BigInteger cpuAcctUsageNanos; private final String cpuControlGroup; private final long cpuCfsPeriodMicros; private final long cpuCfsQuotaMicros; @@ -387,7 +388,7 @@ public String getCpuAcctControlGroup() { * * @return the total CPU time in nanoseconds */ - public long getCpuAcctUsageNanos() { + public BigInteger getCpuAcctUsageNanos() { return cpuAcctUsageNanos; } @@ -465,7 +466,7 @@ public String getMemoryUsageInBytes() { public Cgroup( final String cpuAcctControlGroup, - final long cpuAcctUsageNanos, + final BigInteger cpuAcctUsageNanos, final String cpuControlGroup, final long cpuCfsPeriodMicros, final long cpuCfsQuotaMicros, @@ -487,7 +488,11 @@ public Cgroup( Cgroup(final StreamInput in) throws IOException { cpuAcctControlGroup = in.readString(); - cpuAcctUsageNanos = in.readLong(); + if (in.getTransportVersion().onOrAfter(TransportVersions.CPU_STAT_STRING_PARSING)) { + cpuAcctUsageNanos = in.readBigInteger(); + } else { + cpuAcctUsageNanos = BigInteger.valueOf(in.readLong()); + } cpuControlGroup = in.readString(); cpuCfsPeriodMicros = in.readLong(); cpuCfsQuotaMicros = in.readLong(); @@ -500,7 +505,11 @@ public Cgroup( @Override public void writeTo(final StreamOutput out) throws IOException { out.writeString(cpuAcctControlGroup); - out.writeLong(cpuAcctUsageNanos); + if (out.getTransportVersion().onOrAfter(TransportVersions.CPU_STAT_STRING_PARSING)) { + out.writeBigInteger(cpuAcctUsageNanos); + } else { + out.writeLong(cpuAcctUsageNanos.longValue()); + } out.writeString(cpuControlGroup); out.writeLong(cpuCfsPeriodMicros); out.writeLong(cpuCfsQuotaMicros); @@ -551,9 +560,9 @@ public XContentBuilder toXContent(final XContentBuilder builder, final Params pa */ public static class CpuStat implements Writeable, ToXContentFragment { - private final long numberOfElapsedPeriods; - private final long numberOfTimesThrottled; - private final long timeThrottledNanos; + private final BigInteger numberOfElapsedPeriods; + private final BigInteger numberOfTimesThrottled; + private final BigInteger timeThrottledNanos; /** * The number of elapsed periods. @@ -561,7 +570,7 @@ public static class CpuStat implements Writeable, ToXContentFragment { * @return the number of elapsed periods as measured by * {@code cpu.cfs_period_us} */ - public long getNumberOfElapsedPeriods() { + public BigInteger getNumberOfElapsedPeriods() { return numberOfElapsedPeriods; } @@ -571,7 +580,7 @@ public long getNumberOfElapsedPeriods() { * * @return the number of times */ - public long getNumberOfTimesThrottled() { + public BigInteger getNumberOfTimesThrottled() { return numberOfTimesThrottled; } @@ -581,27 +590,43 @@ public long getNumberOfTimesThrottled() { * * @return the total time in nanoseconds */ - public long getTimeThrottledNanos() { + public BigInteger getTimeThrottledNanos() { return timeThrottledNanos; } - public CpuStat(final long numberOfElapsedPeriods, final long numberOfTimesThrottled, final long timeThrottledNanos) { + public CpuStat( + final BigInteger numberOfElapsedPeriods, + final BigInteger numberOfTimesThrottled, + final BigInteger timeThrottledNanos + ) { this.numberOfElapsedPeriods = numberOfElapsedPeriods; this.numberOfTimesThrottled = numberOfTimesThrottled; this.timeThrottledNanos = timeThrottledNanos; } CpuStat(final StreamInput in) throws IOException { - numberOfElapsedPeriods = in.readLong(); - numberOfTimesThrottled = in.readLong(); - timeThrottledNanos = in.readLong(); + if (in.getTransportVersion().onOrAfter(TransportVersions.CPU_STAT_STRING_PARSING)) { + numberOfElapsedPeriods = in.readBigInteger(); + numberOfTimesThrottled = in.readBigInteger(); + timeThrottledNanos = in.readBigInteger(); + } else { + numberOfElapsedPeriods = BigInteger.valueOf(in.readLong()); + numberOfTimesThrottled = BigInteger.valueOf(in.readLong()); + timeThrottledNanos = BigInteger.valueOf(in.readLong()); + } } @Override public void writeTo(final StreamOutput out) throws IOException { - out.writeLong(numberOfElapsedPeriods); - out.writeLong(numberOfTimesThrottled); - out.writeLong(timeThrottledNanos); + if (out.getTransportVersion().onOrAfter(TransportVersions.CPU_STAT_STRING_PARSING)) { + out.writeBigInteger(numberOfElapsedPeriods); + out.writeBigInteger(numberOfTimesThrottled); + out.writeBigInteger(timeThrottledNanos); + } else { + out.writeLong(numberOfElapsedPeriods.longValue()); + out.writeLong(numberOfTimesThrottled.longValue()); + out.writeLong(timeThrottledNanos.longValue()); + } } @Override diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java index 77d00f0e5a068..b5f61d5b798fa 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java @@ -85,6 +85,7 @@ import org.elasticsearch.xcontent.ToXContent; import java.io.IOException; +import java.math.BigInteger; import java.nio.file.Path; import java.util.ArrayList; import java.util.Collections; @@ -709,11 +710,15 @@ public static NodeStats createNodeStats() { new OsStats.Swap(swapTotal, randomLongBetween(0, swapTotal)), new OsStats.Cgroup( randomAlphaOfLength(8), - randomNonNegativeLong(), + randomUnsignedLongBetween(BigInteger.ZERO, BigInteger.valueOf(Long.MAX_VALUE).multiply(BigInteger.TWO)), randomAlphaOfLength(8), randomNonNegativeLong(), randomNonNegativeLong(), - new OsStats.Cgroup.CpuStat(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong()), + new OsStats.Cgroup.CpuStat( + randomUnsignedLongBetween(BigInteger.ZERO, BigInteger.valueOf(Long.MAX_VALUE).multiply(BigInteger.TWO)), + randomUnsignedLongBetween(BigInteger.ZERO, BigInteger.valueOf(Long.MAX_VALUE).multiply(BigInteger.TWO)), + randomUnsignedLongBetween(BigInteger.ZERO, BigInteger.valueOf(Long.MAX_VALUE).multiply(BigInteger.TWO)) + ), randomAlphaOfLength(8), Long.toString(randomNonNegativeLong()), Long.toString(randomNonNegativeLong()) diff --git a/server/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java b/server/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java index 220f4336fc444..aad78881a8a13 100644 --- a/server/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java +++ b/server/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java @@ -136,12 +136,12 @@ public void testOsStats() { if (Constants.LINUX) { if (stats.getCgroup() != null) { assertThat(stats.getCgroup().getCpuAcctControlGroup(), notNullValue()); - assertThat(stats.getCgroup().getCpuAcctUsageNanos(), greaterThan(0L)); + assertThat(stats.getCgroup().getCpuAcctUsageNanos(), greaterThan(BigInteger.ZERO)); assertThat(stats.getCgroup().getCpuCfsQuotaMicros(), anyOf(equalTo(-1L), greaterThanOrEqualTo(0L))); assertThat(stats.getCgroup().getCpuCfsPeriodMicros(), greaterThanOrEqualTo(0L)); - assertThat(stats.getCgroup().getCpuStat().getNumberOfElapsedPeriods(), greaterThanOrEqualTo(0L)); - assertThat(stats.getCgroup().getCpuStat().getNumberOfTimesThrottled(), greaterThanOrEqualTo(0L)); - assertThat(stats.getCgroup().getCpuStat().getTimeThrottledNanos(), greaterThanOrEqualTo(0L)); + assertThat(stats.getCgroup().getCpuStat().getNumberOfElapsedPeriods(), greaterThanOrEqualTo(BigInteger.ZERO)); + assertThat(stats.getCgroup().getCpuStat().getNumberOfTimesThrottled(), greaterThanOrEqualTo(BigInteger.ZERO)); + assertThat(stats.getCgroup().getCpuStat().getTimeThrottledNanos(), greaterThanOrEqualTo(BigInteger.ZERO)); // These could be null if transported from a node running an older version, but shouldn't be null on the current node assertThat(stats.getCgroup().getMemoryControlGroup(), notNullValue()); String memoryLimitInBytes = stats.getCgroup().getMemoryLimitInBytes(); @@ -191,26 +191,26 @@ public void testCgroupProbe() { case 1 -> { assertNotNull(cgroup); assertThat(cgroup.getCpuAcctControlGroup(), equalTo("/" + hierarchy)); - assertThat(cgroup.getCpuAcctUsageNanos(), equalTo(364869866063112L)); + assertThat(cgroup.getCpuAcctUsageNanos(), equalTo(new BigInteger("364869866063112"))); assertThat(cgroup.getCpuControlGroup(), equalTo("/" + hierarchy)); assertThat(cgroup.getCpuCfsPeriodMicros(), equalTo(100000L)); assertThat(cgroup.getCpuCfsQuotaMicros(), equalTo(50000L)); - assertThat(cgroup.getCpuStat().getNumberOfElapsedPeriods(), equalTo(17992L)); - assertThat(cgroup.getCpuStat().getNumberOfTimesThrottled(), equalTo(1311L)); - assertThat(cgroup.getCpuStat().getTimeThrottledNanos(), equalTo(139298645489L)); + assertThat(cgroup.getCpuStat().getNumberOfElapsedPeriods(), equalTo(BigInteger.valueOf(17992))); + assertThat(cgroup.getCpuStat().getNumberOfTimesThrottled(), equalTo(BigInteger.valueOf(1311))); + assertThat(cgroup.getCpuStat().getTimeThrottledNanos(), equalTo(new BigInteger("139298645489"))); assertThat(cgroup.getMemoryLimitInBytes(), equalTo("18446744073709551615")); assertThat(cgroup.getMemoryUsageInBytes(), equalTo("4796416")); } case 2 -> { assertNotNull(cgroup); assertThat(cgroup.getCpuAcctControlGroup(), equalTo("/" + hierarchy)); - assertThat(cgroup.getCpuAcctUsageNanos(), equalTo(364869866063000L)); + assertThat(cgroup.getCpuAcctUsageNanos(), equalTo(new BigInteger("364869866063000"))); assertThat(cgroup.getCpuControlGroup(), equalTo("/" + hierarchy)); assertThat(cgroup.getCpuCfsPeriodMicros(), equalTo(100000L)); assertThat(cgroup.getCpuCfsQuotaMicros(), equalTo(50000L)); - assertThat(cgroup.getCpuStat().getNumberOfElapsedPeriods(), equalTo(17992L)); - assertThat(cgroup.getCpuStat().getNumberOfTimesThrottled(), equalTo(1311L)); - assertThat(cgroup.getCpuStat().getTimeThrottledNanos(), equalTo(139298645000L)); + assertThat(cgroup.getCpuStat().getNumberOfElapsedPeriods(), equalTo(BigInteger.valueOf(17992))); + assertThat(cgroup.getCpuStat().getNumberOfTimesThrottled(), equalTo(BigInteger.valueOf(1311))); + assertThat(cgroup.getCpuStat().getTimeThrottledNanos(), equalTo(new BigInteger("139298645000"))); assertThat(cgroup.getMemoryLimitInBytes(), equalTo("18446744073709551615")); assertThat(cgroup.getMemoryUsageInBytes(), equalTo("4796416")); } diff --git a/server/src/test/java/org/elasticsearch/monitor/os/OsStatsTests.java b/server/src/test/java/org/elasticsearch/monitor/os/OsStatsTests.java index 4c53067ca123a..2146e47febe9c 100644 --- a/server/src/test/java/org/elasticsearch/monitor/os/OsStatsTests.java +++ b/server/src/test/java/org/elasticsearch/monitor/os/OsStatsTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.test.ESTestCase; import java.io.IOException; +import java.math.BigInteger; import static org.hamcrest.Matchers.equalTo; @@ -21,7 +22,7 @@ public class OsStatsTests extends ESTestCase { public void testSerialization() throws IOException { int numLoadAverages = randomIntBetween(1, 5); - double loadAverages[] = new double[numLoadAverages]; + double[] loadAverages = new double[numLoadAverages]; for (int i = 0; i < loadAverages.length; i++) { loadAverages[i] = randomDouble(); } @@ -32,11 +33,15 @@ public void testSerialization() throws IOException { OsStats.Swap swap = new OsStats.Swap(swapTotal, randomLongBetween(0, swapTotal)); OsStats.Cgroup cgroup = new OsStats.Cgroup( randomAlphaOfLength(8), - randomNonNegativeLong(), + randomUnsignedLongBetween(BigInteger.ZERO, BigInteger.valueOf(Long.MAX_VALUE)), randomAlphaOfLength(8), randomNonNegativeLong(), randomNonNegativeLong(), - new OsStats.Cgroup.CpuStat(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong()), + new OsStats.Cgroup.CpuStat( + randomUnsignedLongBetween(BigInteger.ZERO, BigInteger.valueOf(Long.MAX_VALUE).multiply(BigInteger.TWO)), + randomUnsignedLongBetween(BigInteger.ZERO, BigInteger.valueOf(Long.MAX_VALUE).multiply(BigInteger.TWO)), + randomUnsignedLongBetween(BigInteger.ZERO, BigInteger.valueOf(Long.MAX_VALUE).multiply(BigInteger.TWO)) + ), randomAlphaOfLength(8), Long.toString(randomNonNegativeLong()), Long.toString(randomNonNegativeLong()) diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/node/NodeStatsMonitoringDocTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/node/NodeStatsMonitoringDocTests.java index da23f27e1357e..3d7f843358646 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/node/NodeStatsMonitoringDocTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/node/NodeStatsMonitoringDocTests.java @@ -37,6 +37,7 @@ import org.junit.Before; import java.io.IOException; +import java.math.BigInteger; import java.util.ArrayList; import java.util.Collections; import java.util.List; @@ -227,7 +228,7 @@ public void testToXContent() throws IOException { "stat": { "number_of_elapsed_periods": 39, "number_of_times_throttled": 40, - "time_throttled_nanos": 41 + "time_throttled_nanos": 9223372036854775848 } }, "memory": { @@ -393,10 +394,14 @@ private static NodeStats mockNodeStats() { // Os final OsStats.Cpu osCpu = new OsStats.Cpu((short) no, new double[] { ++iota, ++iota, ++iota }); - final OsStats.Cgroup.CpuStat osCpuStat = new OsStats.Cgroup.CpuStat(++iota, ++iota, ++iota); + final OsStats.Cgroup.CpuStat osCpuStat = new OsStats.Cgroup.CpuStat( + BigInteger.valueOf(++iota), + BigInteger.valueOf(++iota), + BigInteger.valueOf(Long.MAX_VALUE).add(BigInteger.valueOf(++iota)) + ); final OsStats.Cgroup osCgroup = new OsStats.Cgroup( "_cpu_acct_ctrl_group", - ++iota, + BigInteger.valueOf(++iota), "_cpu_ctrl_group", ++iota, ++iota,